Addition of submodules, update to connectors, base class start
This commit is contained in:
3
.gitmodules
vendored
Normal file
3
.gitmodules
vendored
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
[submodule "qualysapi"]
|
||||||
|
path = deps/qualysapi
|
||||||
|
url = git@github.com:austin-taylor/qualysapi.git
|
@ -1,9 +1,8 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
__author__ = 'Austin Taylor'
|
||||||
|
|
||||||
|
|
||||||
#Written by Austin Taylor
|
|
||||||
#www.austintaylor.io
|
|
||||||
|
|
||||||
from vulnwhisp.vulnwhisp import vulnWhisperer
|
from vulnwhisp.vulnwhisp import vulnWhisperer
|
||||||
from vulnwhisp.utils.cli import bcolors
|
from vulnwhisp.utils.cli import bcolors
|
||||||
import os
|
import os
|
||||||
|
34
deps/qualysapi/qualysapi/config.py
vendored
34
deps/qualysapi/qualysapi/config.py
vendored
@ -58,43 +58,43 @@ class QualysConnectConfig:
|
|||||||
self._cfgparse.read(self._cfgfile)
|
self._cfgparse.read(self._cfgfile)
|
||||||
|
|
||||||
# if 'info' doesn't exist, create the section.
|
# if 'info' doesn't exist, create the section.
|
||||||
if not self._cfgparse.has_section('info'):
|
if not self._cfgparse.has_section('qualys'):
|
||||||
self._cfgparse.add_section('info')
|
self._cfgparse.add_section('qualys')
|
||||||
|
|
||||||
# Use default hostname (if one isn't provided).
|
# Use default hostname (if one isn't provided).
|
||||||
if not self._cfgparse.has_option('info', 'hostname'):
|
if not self._cfgparse.has_option('qualys', 'hostname'):
|
||||||
if self._cfgparse.has_option('DEFAULT', 'hostname'):
|
if self._cfgparse.has_option('DEFAULT', 'hostname'):
|
||||||
hostname = self._cfgparse.get('DEFAULT', 'hostname')
|
hostname = self._cfgparse.get('DEFAULT', 'hostname')
|
||||||
self._cfgparse.set('info', 'hostname', hostname)
|
self._cfgparse.set('qualys', 'hostname', hostname)
|
||||||
else:
|
else:
|
||||||
raise Exception("No 'hostname' set. QualysConnect does not know who to connect to.")
|
raise Exception("No 'hostname' set. QualysConnect does not know who to connect to.")
|
||||||
|
|
||||||
# Use default max_retries (if one isn't provided).
|
# Use default max_retries (if one isn't provided).
|
||||||
if not self._cfgparse.has_option('info', 'max_retries'):
|
if not self._cfgparse.has_option('qualys', 'max_retries'):
|
||||||
self.max_retries = qcs.defaults['max_retries']
|
self.max_retries = qcs.defaults['max_retries']
|
||||||
else:
|
else:
|
||||||
self.max_retries = self._cfgparse.get('info', 'max_retries')
|
self.max_retries = self._cfgparse.get('qualys', 'max_retries')
|
||||||
try:
|
try:
|
||||||
self.max_retries = int(self.max_retries)
|
self.max_retries = int(self.max_retries)
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.error('Value max_retries must be an integer.')
|
logger.error('Value max_retries must be an integer.')
|
||||||
print('Value max_retries must be an integer.')
|
print('Value max_retries must be an integer.')
|
||||||
exit(1)
|
exit(1)
|
||||||
self._cfgparse.set('info', 'max_retries', str(self.max_retries))
|
self._cfgparse.set('qualys', 'max_retries', str(self.max_retries))
|
||||||
self.max_retries = int(self.max_retries)
|
self.max_retries = int(self.max_retries)
|
||||||
|
|
||||||
#Get template ID... user will need to set this to pull back CSV reports
|
#Get template ID... user will need to set this to pull back CSV reports
|
||||||
if not self._cfgparse.has_option('report', 'template_id'):
|
if not self._cfgparse.has_option('qualys', 'template_id'):
|
||||||
self.report_template_id = qcs.defaults['template_id']
|
self.report_template_id = qcs.defaults['template_id']
|
||||||
else:
|
else:
|
||||||
self.report_template_id = self._cfgparse.get('report', 'template_id')
|
self.report_template_id = self._cfgparse.get('qualys', 'template_id')
|
||||||
try:
|
try:
|
||||||
self.report_template_id = int(self.report_template_id)
|
self.report_template_id = int(self.report_template_id)
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.error('Report Template ID Must be set and be an integer')
|
logger.error('Report Template ID Must be set and be an integer')
|
||||||
print('Value template ID must be an integer.')
|
print('Value template ID must be an integer.')
|
||||||
exit(1)
|
exit(1)
|
||||||
self._cfgparse.set('report', 'template_id', str(self.max_retries))
|
self._cfgparse.set('qualys', 'template_id', str(self.max_retries))
|
||||||
self.max_retries = int(self.max_retries)
|
self.max_retries = int(self.max_retries)
|
||||||
|
|
||||||
# Proxy support
|
# Proxy support
|
||||||
@ -168,18 +168,18 @@ class QualysConnectConfig:
|
|||||||
self.proxies = None
|
self.proxies = None
|
||||||
|
|
||||||
# ask username (if one doesn't exist)
|
# ask username (if one doesn't exist)
|
||||||
if not self._cfgparse.has_option('info', 'username'):
|
if not self._cfgparse.has_option('qualys', 'username'):
|
||||||
username = input('QualysGuard Username: ')
|
username = input('QualysGuard Username: ')
|
||||||
self._cfgparse.set('info', 'username', username)
|
self._cfgparse.set('qualys', 'username', username)
|
||||||
|
|
||||||
# ask password (if one doesn't exist)
|
# ask password (if one doesn't exist)
|
||||||
if not self._cfgparse.has_option('info', 'password'):
|
if not self._cfgparse.has_option('qualys', 'password'):
|
||||||
password = getpass.getpass('QualysGuard Password: ')
|
password = getpass.getpass('QualysGuard Password: ')
|
||||||
self._cfgparse.set('info', 'password', password)
|
self._cfgparse.set('qualys', 'password', password)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
logging.debug(self._cfgparse.items('info'))
|
logging.debug(self._cfgparse.items('qualys'))
|
||||||
|
|
||||||
if remember_me or remember_me_always:
|
if remember_me or remember_me_always:
|
||||||
# Let's create that config file for next time...
|
# Let's create that config file for next time...
|
||||||
@ -211,8 +211,8 @@ class QualysConnectConfig:
|
|||||||
|
|
||||||
def get_auth(self):
|
def get_auth(self):
|
||||||
''' Returns username from the configfile. '''
|
''' Returns username from the configfile. '''
|
||||||
return (self._cfgparse.get('info', 'username'), self._cfgparse.get('info', 'password'))
|
return (self._cfgparse.get('qualys', 'username'), self._cfgparse.get('qualys', 'password'))
|
||||||
|
|
||||||
def get_hostname(self):
|
def get_hostname(self):
|
||||||
''' Returns hostname. '''
|
''' Returns hostname. '''
|
||||||
return self._cfgparse.get('info', 'hostname')
|
return self._cfgparse.get('qualys', 'hostname')
|
||||||
|
2
deps/qualysapi/qualysapi/connector.py
vendored
2
deps/qualysapi/qualysapi/connector.py
vendored
@ -252,7 +252,7 @@ class QGConnector(api_actions.QGActions):
|
|||||||
url = self.url_api_version(api_version)
|
url = self.url_api_version(api_version)
|
||||||
#
|
#
|
||||||
# Set up headers.
|
# Set up headers.
|
||||||
headers = {"X-Requested-With": "Parag Baxi QualysAPI (python) v%s" % (qualysapi.version.__version__,)}
|
headers = {"X-Requested-With": "QualysAPI (python) v%s - VulnWhisperer" % (qualysapi.version.__version__,)}
|
||||||
logger.debug('headers =\n%s' % (str(headers)))
|
logger.debug('headers =\n%s' % (str(headers)))
|
||||||
# Portal API takes in XML text, requiring custom header.
|
# Portal API takes in XML text, requiring custom header.
|
||||||
if api_version in ('am', 'was', 'am2'):
|
if api_version in ('am', 'was', 'am2'):
|
||||||
|
2
deps/qualysapi/qualysapi/version.py
vendored
2
deps/qualysapi/qualysapi/version.py
vendored
@ -1,3 +1,3 @@
|
|||||||
__author__ = 'Parag Baxi <parag.baxi@gmail.com>'
|
__author__ = 'Austin Taylor'
|
||||||
__pkgname__ = 'qualysapi'
|
__pkgname__ = 'qualysapi'
|
||||||
__version__ = '4.1.0'
|
__version__ = '4.1.0'
|
||||||
|
15
deps/qualysapi/setup.py
vendored
15
deps/qualysapi/setup.py
vendored
@ -2,14 +2,15 @@
|
|||||||
|
|
||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
import os
|
import os
|
||||||
|
import setuptools
|
||||||
import sys
|
import sys
|
||||||
try:
|
try:
|
||||||
from setuptools import setup
|
from setuptools import setup
|
||||||
except ImportError:
|
except ImportError:
|
||||||
from distutils.core import setup
|
from distutils.core import setup
|
||||||
|
|
||||||
__author__ = 'Parag Baxi <parag.baxi@gmail.com>'
|
__author__ = 'Austin Taylor <vulnWhisperer@austintaylor.io>'
|
||||||
__copyright__ = 'Copyright 2011-2013, Parag Baxi'
|
__copyright__ = 'Copyright 2017, Austin Taylor'
|
||||||
__license__ = 'BSD-new'
|
__license__ = 'BSD-new'
|
||||||
# Make pyflakes happy.
|
# Make pyflakes happy.
|
||||||
__pkgname__ = None
|
__pkgname__ = None
|
||||||
@ -27,14 +28,14 @@ def read(fname):
|
|||||||
|
|
||||||
setup(name=__pkgname__,
|
setup(name=__pkgname__,
|
||||||
version=__version__,
|
version=__version__,
|
||||||
author='Parag Baxi',
|
author='Austin Taylor',
|
||||||
author_email='parag.baxi@gmail.com',
|
author_email='vulnWhisperer@austintaylor.io',
|
||||||
description='QualysGuard(R) Qualys API Package',
|
description='QualysGuard(R) Qualys API Package modified for VulnWhisperer',
|
||||||
license='BSD-new',
|
license='BSD-new',
|
||||||
keywords='Qualys QualysGuard API helper network security',
|
keywords='Qualys QualysGuard API helper network security',
|
||||||
url='https://github.com/paragbaxi/qualysapi',
|
url='https://github.com/austin-taylor/qualysapi',
|
||||||
package_dir={'': '.'},
|
package_dir={'': '.'},
|
||||||
packages=['qualysapi', ],
|
packages=setuptools.find_packages(),,
|
||||||
# package_data={'qualysapi':['LICENSE']},
|
# package_data={'qualysapi':['LICENSE']},
|
||||||
# scripts=['src/scripts/qhostinfo.py', 'src/scripts/qscanhist.py', 'src/scripts/qreports.py'],
|
# scripts=['src/scripts/qhostinfo.py', 'src/scripts/qscanhist.py', 'src/scripts/qreports.py'],
|
||||||
long_description=read('README.md'),
|
long_description=read('README.md'),
|
||||||
|
@ -0,0 +1 @@
|
|||||||
|
from utils.cli import *
|
@ -311,9 +311,7 @@ class qualysWebAppReport:
|
|||||||
|
|
||||||
def grab_sections(self, report):
|
def grab_sections(self, report):
|
||||||
all_dataframes = []
|
all_dataframes = []
|
||||||
category_list = []
|
|
||||||
with open(report, 'rb') as csvfile:
|
with open(report, 'rb') as csvfile:
|
||||||
q_report = csv.reader(csvfile, delimiter=',', quotechar='"')
|
|
||||||
all_dataframes.append(pd.DataFrame(self.grab_section(report,
|
all_dataframes.append(pd.DataFrame(self.grab_section(report,
|
||||||
self.WEB_APP_VULN_BLOCK,
|
self.WEB_APP_VULN_BLOCK,
|
||||||
end=[self.WEB_APP_SENSITIVE_BLOCK,
|
end=[self.WEB_APP_SENSITIVE_BLOCK,
|
||||||
@ -365,7 +363,7 @@ class qualysWebAppReport:
|
|||||||
|
|
||||||
merged_df = pd.concat([dataframes[0], dataframes[1],
|
merged_df = pd.concat([dataframes[0], dataframes[1],
|
||||||
dataframes[2]], axis=0,
|
dataframes[2]], axis=0,
|
||||||
ignore_index=False).fillna('N/A')
|
ignore_index=False).fillna('')
|
||||||
merged_df = pd.merge(merged_df, dataframes[3], left_on='QID',
|
merged_df = pd.merge(merged_df, dataframes[3], left_on='QID',
|
||||||
right_on='Id')
|
right_on='Id')
|
||||||
|
|
||||||
@ -434,9 +432,10 @@ class qualysWebAppReport:
|
|||||||
report_id: App ID
|
report_id: App ID
|
||||||
updated_date: Last time scan was ran for app_id
|
updated_date: Last time scan was ran for app_id
|
||||||
"""
|
"""
|
||||||
|
vuln_ready = None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
vuln_ready = None
|
|
||||||
if 'Z' in updated_date:
|
if 'Z' in updated_date:
|
||||||
updated_date = self.iso_to_epoch(updated_date)
|
updated_date = self.iso_to_epoch(updated_date)
|
||||||
report_name = 'qualys_web_' + str(report_id) \
|
report_name = 'qualys_web_' + str(report_id) \
|
||||||
@ -457,7 +456,7 @@ class qualysWebAppReport:
|
|||||||
% generated_report_id)
|
% generated_report_id)
|
||||||
vuln_ready = self.process_data(generated_report_id)
|
vuln_ready = self.process_data(generated_report_id)
|
||||||
|
|
||||||
vuln_ready.to_csv(report_name, index=False) # add when timestamp occured
|
vuln_ready.to_csv(report_name, index=False, header=True) # add when timestamp occured
|
||||||
print('[SUCCESS] - Report written to %s' \
|
print('[SUCCESS] - Report written to %s' \
|
||||||
% report_name)
|
% report_name)
|
||||||
print('[ACTION] - Removing report %s' \
|
print('[ACTION] - Removing report %s' \
|
||||||
@ -474,4 +473,3 @@ class qualysWebAppReport:
|
|||||||
return vuln_ready
|
return vuln_ready
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,3 +1,6 @@
|
|||||||
|
#!/usr/bin/python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
__author__ = 'Austin Taylor'
|
||||||
|
|
||||||
from base.config import vwConfig
|
from base.config import vwConfig
|
||||||
from frameworks.nessus import NessusAPI
|
from frameworks.nessus import NessusAPI
|
||||||
@ -10,87 +13,121 @@ import time
|
|||||||
import sqlite3
|
import sqlite3
|
||||||
|
|
||||||
# TODO Create logging option which stores data about scan
|
# TODO Create logging option which stores data about scan
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
|
||||||
|
class vulnWhispererBase(object):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
config=None,
|
||||||
|
db_name='report_tracker.db',
|
||||||
|
purge=False,
|
||||||
|
verbose=None,
|
||||||
|
debug=False,
|
||||||
|
username=None,
|
||||||
|
password=None,
|
||||||
|
):
|
||||||
|
pass
|
||||||
|
|
||||||
class vulnWhisperer(object):
|
class vulnWhisperer(object):
|
||||||
|
def __init__(
|
||||||
def __init__(self, config=None, db_name='report_tracker.db', purge=False, verbose=None, debug=False, username=None, password=None):
|
self,
|
||||||
|
config=None,
|
||||||
|
db_name='report_tracker.db',
|
||||||
|
purge=False,
|
||||||
|
verbose=None,
|
||||||
|
debug=False,
|
||||||
|
username=None,
|
||||||
|
password=None,
|
||||||
|
):
|
||||||
|
|
||||||
self.verbose = verbose
|
self.verbose = verbose
|
||||||
self.nessus_connect = False
|
self.nessus_connect = False
|
||||||
self.develop = True
|
self.develop = True
|
||||||
self.purge = purge
|
self.purge = purge
|
||||||
|
|
||||||
|
|
||||||
if config is not None:
|
if config is not None:
|
||||||
try:
|
try:
|
||||||
self.config = vwConfig(config_in=config)
|
self.config = vwConfig(config_in=config)
|
||||||
self.nessus_enabled = self.config.getbool('nessus', 'enabled')
|
self.nessus_enabled = self.config.getbool('nessus',
|
||||||
|
'enabled')
|
||||||
|
|
||||||
if self.nessus_enabled:
|
if self.nessus_enabled:
|
||||||
self.nessus_hostname = self.config.get('nessus', 'hostname')
|
self.nessus_hostname = self.config.get('nessus',
|
||||||
|
'hostname')
|
||||||
self.nessus_port = self.config.get('nessus', 'port')
|
self.nessus_port = self.config.get('nessus', 'port')
|
||||||
|
|
||||||
if password:
|
if password:
|
||||||
self.nessus_password = password
|
self.nessus_password = password
|
||||||
else:
|
else:
|
||||||
self.nessus_password = self.config.get('nessus', 'password')
|
self.nessus_password = self.config.get('nessus'
|
||||||
|
, 'password')
|
||||||
|
|
||||||
if username:
|
if username:
|
||||||
self.nessus_username = username
|
self.nessus_username = username
|
||||||
else:
|
else:
|
||||||
self.nessus_username = self.config.get('nessus', 'username')
|
self.nessus_username = self.config.get('nessus'
|
||||||
|
, 'username')
|
||||||
|
|
||||||
self.nessus_writepath = self.config.get('nessus', 'write_path')
|
self.nessus_writepath = self.config.get('nessus',
|
||||||
self.nessus_dbpath = self.config.get('nessus', 'db_path')
|
'write_path')
|
||||||
self.nessus_trash = self.config.getbool('nessus', 'trash')
|
self.nessus_dbpath = self.config.get('nessus',
|
||||||
self.verbose = self.config.getbool('nessus', 'verbose')
|
'db_path')
|
||||||
|
self.nessus_trash = self.config.getbool('nessus',
|
||||||
|
'trash')
|
||||||
|
self.verbose = self.config.getbool('nessus',
|
||||||
|
'verbose')
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self.vprint(
|
self.vprint('{info} Attempting to connect to nessus...'.format(info=bcolors.INFO))
|
||||||
'{info} Attempting to connect to nessus...'.format(info=bcolors.INFO))
|
self.nessus = \
|
||||||
self.nessus = NessusAPI(hostname=self.nessus_hostname,
|
NessusAPI(hostname=self.nessus_hostname,
|
||||||
port=self.nessus_port,
|
port=self.nessus_port,
|
||||||
username=self.nessus_username,
|
username=self.nessus_username,
|
||||||
password=self.nessus_password)
|
password=self.nessus_password)
|
||||||
self.nessus_connect = True
|
self.nessus_connect = True
|
||||||
self.vprint(
|
self.vprint('{success} Connected to nessus on {host}:{port}'.format(success=bcolors.SUCCESS,
|
||||||
'{success} Connected to nessus on {host}:{port}'.format(success=bcolors.SUCCESS,
|
|
||||||
host=self.nessus_hostname,
|
host=self.nessus_hostname,
|
||||||
port=str(self.nessus_port)))
|
port=str(self.nessus_port)))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.vprint(e)
|
self.vprint(e)
|
||||||
raise Exception(
|
raise Exception(
|
||||||
"{fail} Could not connect to nessus -- Please verify your settings in {config} are correct and try again.\nReason: {e}".format(config=self.config,
|
'{fail} Could not connect to nessus -- Please verify your settings in {config} are correct and try again.\nReason: {e}'.format(
|
||||||
fail=bcolors.FAIL,
|
config=self.config,
|
||||||
e=e))
|
fail=bcolors.FAIL, e=e))
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
|
||||||
self.vprint('{fail} Could not properly load your config!\nReason: {e}'.format(fail=bcolors.FAIL, e=e))
|
self.vprint('{fail} Could not properly load your config!\nReason: {e}'.format(fail=bcolors.FAIL,
|
||||||
|
e=e))
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
if db_name is not None:
|
if db_name is not None:
|
||||||
if self.nessus_dbpath:
|
if self.nessus_dbpath:
|
||||||
self.database = os.path.join(self.nessus_dbpath, db_name)
|
self.database = os.path.join(self.nessus_dbpath,
|
||||||
|
db_name)
|
||||||
else:
|
else:
|
||||||
self.database = os.path.abspath(os.path.join(os.path.dirname( __file__ ), 'database', db_name))
|
self.database = \
|
||||||
|
os.path.abspath(os.path.join(os.path.dirname(__file__),
|
||||||
|
'database', db_name))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self.conn = sqlite3.connect(self.database)
|
self.conn = sqlite3.connect(self.database)
|
||||||
self.cur = self.conn.cursor()
|
self.cur = self.conn.cursor()
|
||||||
self.vprint("{info} Connected to database at {loc}".format(info=bcolors.INFO, loc=self.database))
|
self.vprint('{info} Connected to database at {loc}'.format(info=bcolors.INFO,
|
||||||
|
loc=self.database))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.vprint("{fail} Could not connect to database at {loc}\nReason: {e} - Please ensure the path exist".format(e=e, fail=bcolors.FAIL, loc=self.database))
|
self.vprint(
|
||||||
|
'{fail} Could not connect to database at {loc}\nReason: {e} - Please ensure the path exist'.format(
|
||||||
|
e=e,
|
||||||
|
fail=bcolors.FAIL, loc=self.database))
|
||||||
else:
|
else:
|
||||||
|
|
||||||
self.vprint('{fail} Please specify a database to connect to!'.format(fail=bcolors.FAIL))
|
self.vprint('{fail} Please specify a database to connect to!'.format(fail=bcolors.FAIL))
|
||||||
exit(0)
|
exit(0)
|
||||||
|
|
||||||
self.table_columns = ['scan_name',
|
self.table_columns = [
|
||||||
|
'scan_name',
|
||||||
'scan_id',
|
'scan_id',
|
||||||
'last_modified',
|
'last_modified',
|
||||||
'filename',
|
'filename',
|
||||||
@ -98,26 +135,26 @@ class vulnWhisperer(object):
|
|||||||
'record_count',
|
'record_count',
|
||||||
'source',
|
'source',
|
||||||
'uuid',
|
'uuid',
|
||||||
'processed']
|
'processed',
|
||||||
|
]
|
||||||
self.init()
|
self.init()
|
||||||
self.uuids = self.retrieve_uuids()
|
self.uuids = self.retrieve_uuids()
|
||||||
self.processed = 0
|
self.processed = 0
|
||||||
self.skipped = 0
|
self.skipped = 0
|
||||||
self.scan_list = []
|
self.scan_list = []
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def vprint(self, msg):
|
def vprint(self, msg):
|
||||||
if self.verbose:
|
if self.verbose:
|
||||||
print(msg)
|
print msg
|
||||||
|
|
||||||
|
|
||||||
def create_table(self):
|
def create_table(self):
|
||||||
self.cur.execute("create table if not exists scan_history (id integer primary key, scan_name text, scan_id integer, last_modified date, filename text, download_time date, record_count integer, source text, uuid text, processed integer)")
|
self.cur.execute(
|
||||||
|
'CREATE TABLE IF NOT EXISTS scan_history (id INTEGER PRIMARY KEY, scan_name TEXT, scan_id INTEGER, last_modified DATE, filename TEXT, download_time DATE, record_count INTEGER, source TEXT, uuid TEXT, processed INTEGER)'
|
||||||
|
)
|
||||||
self.conn.commit()
|
self.conn.commit()
|
||||||
|
|
||||||
def delete_table(self):
|
def delete_table(self):
|
||||||
self.cur.execute('drop table if exists scan_history')
|
self.cur.execute('DROP TABLE IF EXISTS scan_history')
|
||||||
self.conn.commit()
|
self.conn.commit()
|
||||||
|
|
||||||
def init(self):
|
def init(self):
|
||||||
@ -126,7 +163,7 @@ class vulnWhisperer(object):
|
|||||||
self.create_table()
|
self.create_table()
|
||||||
|
|
||||||
def cleanser(self, _data):
|
def cleanser(self, _data):
|
||||||
repls = ('\n', '|||'), ('\r', '|||'), (',',';')
|
repls = (('\n', '|||'), ('\r', '|||'), (',', ';'))
|
||||||
data = reduce(lambda a, kv: a.replace(*kv), repls, _data)
|
data = reduce(lambda a, kv: a.replace(*kv), repls, _data)
|
||||||
return data
|
return data
|
||||||
|
|
||||||
@ -142,6 +179,7 @@ class vulnWhisperer(object):
|
|||||||
:param completed: Only return completed scans
|
:param completed: Only return completed scans
|
||||||
:return:
|
:return:
|
||||||
"""
|
"""
|
||||||
|
|
||||||
self.vprint('{info} Gathering all scan data... this may take a while...'.format(info=bcolors.INFO))
|
self.vprint('{info} Gathering all scan data... this may take a while...'.format(info=bcolors.INFO))
|
||||||
scan_records = []
|
scan_records = []
|
||||||
for s in scans:
|
for s in scans:
|
||||||
@ -159,24 +197,29 @@ class vulnWhisperer(object):
|
|||||||
record['uuid'] = h.get('uuid', '')
|
record['uuid'] = h.get('uuid', '')
|
||||||
record['status'] = h.get('status', '')
|
record['status'] = h.get('status', '')
|
||||||
record['history_id'] = h.get('history_id', '')
|
record['history_id'] = h.get('history_id', '')
|
||||||
record['last_modification_date'] = h.get('last_modification_date', '')
|
record['last_modification_date'] = \
|
||||||
record['norm_time'] = self.nessus.get_utc_from_local(int(record['last_modification_date']),
|
h.get('last_modification_date', '')
|
||||||
local_tz=self.nessus.tz_conv(record['timezone']))
|
record['norm_time'] = \
|
||||||
|
self.nessus.get_utc_from_local(int(record['last_modification_date'
|
||||||
|
]),
|
||||||
|
local_tz=self.nessus.tz_conv(record['timezone'
|
||||||
|
]))
|
||||||
scan_records.append(record.copy())
|
scan_records.append(record.copy())
|
||||||
|
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
|
||||||
# print(e)
|
# print(e)
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if completed:
|
if completed:
|
||||||
scan_records = [s for s in scan_records if s['status'] == 'completed']
|
scan_records = [s for s in scan_records if s['status']
|
||||||
|
== 'completed']
|
||||||
return scan_records
|
return scan_records
|
||||||
|
|
||||||
|
|
||||||
def record_insert(self, record):
|
def record_insert(self, record):
|
||||||
self.cur.execute("insert into scan_history({table_columns}) values (?,?,?,?,?,?,?,?,?)".format(
|
self.cur.execute('insert into scan_history({table_columns}) values (?,?,?,?,?,?,?,?,?)'.format(
|
||||||
table_columns=', '.join(self.table_columns)), record)
|
table_columns=', '.join(self.table_columns)),
|
||||||
|
record)
|
||||||
self.conn.commit()
|
self.conn.commit()
|
||||||
|
|
||||||
def retrieve_uuids(self):
|
def retrieve_uuids(self):
|
||||||
@ -184,12 +227,12 @@ class vulnWhisperer(object):
|
|||||||
Retrieves UUIDs from database and checks list to determine which files need to be processed.
|
Retrieves UUIDs from database and checks list to determine which files need to be processed.
|
||||||
:return:
|
:return:
|
||||||
"""
|
"""
|
||||||
|
|
||||||
self.conn.text_factory = str
|
self.conn.text_factory = str
|
||||||
self.cur.execute('select uuid from scan_history')
|
self.cur.execute('SELECT uuid FROM scan_history')
|
||||||
results = frozenset([r[0] for r in self.cur.fetchall()])
|
results = frozenset([r[0] for r in self.cur.fetchall()])
|
||||||
return results
|
return results
|
||||||
|
|
||||||
|
|
||||||
def whisper_nessus(self):
|
def whisper_nessus(self):
|
||||||
if self.nessus_connect:
|
if self.nessus_connect:
|
||||||
scan_data = self.nessus.get_scans()
|
scan_data = self.nessus.get_scans()
|
||||||
@ -197,16 +240,20 @@ class vulnWhisperer(object):
|
|||||||
scans = scan_data['scans']
|
scans = scan_data['scans']
|
||||||
all_scans = self.scan_count(scans)
|
all_scans = self.scan_count(scans)
|
||||||
if self.uuids:
|
if self.uuids:
|
||||||
scan_list = [scan for scan in all_scans if (scan['uuid'] not in self.uuids and scan['status']=='completed')]
|
scan_list = [scan for scan in all_scans if scan['uuid']
|
||||||
|
not in self.uuids and scan['status']
|
||||||
|
== 'completed']
|
||||||
else:
|
else:
|
||||||
scan_list = all_scans
|
scan_list = all_scans
|
||||||
self.vprint("{info} Identified {new} scans to be processed".format(info=bcolors.INFO, new=len(scan_list)))
|
self.vprint('{info} Identified {new} scans to be processed'.format(info=bcolors.INFO,
|
||||||
|
new=len(scan_list)))
|
||||||
|
|
||||||
if not scan_list:
|
if not scan_list:
|
||||||
self.vprint("{info} No new scans to process. Exiting...".format(info=bcolors.INFO))
|
self.vprint('{info} No new scans to process. Exiting...'.format(info=bcolors.INFO))
|
||||||
exit(0)
|
exit(0)
|
||||||
|
|
||||||
# Create scan subfolders
|
# Create scan subfolders
|
||||||
|
|
||||||
for f in folders:
|
for f in folders:
|
||||||
if not os.path.exists(self.path_check(f['name'])):
|
if not os.path.exists(self.path_check(f['name'])):
|
||||||
if f['name'] == 'Trash' and self.nessus_trash:
|
if f['name'] == 'Trash' and self.nessus_trash:
|
||||||
@ -216,63 +263,136 @@ class vulnWhisperer(object):
|
|||||||
else:
|
else:
|
||||||
os.path.exists(self.path_check(f['name']))
|
os.path.exists(self.path_check(f['name']))
|
||||||
self.vprint('{info} Directory already exist for {scan} - Skipping creation'.format(
|
self.vprint('{info} Directory already exist for {scan} - Skipping creation'.format(
|
||||||
scan=self.path_check(f['name']), info=bcolors.INFO))
|
scan=self.path_check(f['name'
|
||||||
|
]), info=bcolors.INFO))
|
||||||
|
|
||||||
# try download and save scans into each folder the belong to
|
# try download and save scans into each folder the belong to
|
||||||
|
|
||||||
scan_count = 0
|
scan_count = 0
|
||||||
|
|
||||||
# TODO Rewrite this part to go through the scans that have aleady been processed
|
# TODO Rewrite this part to go through the scans that have aleady been processed
|
||||||
|
|
||||||
for s in scan_list:
|
for s in scan_list:
|
||||||
scan_count += 1
|
scan_count += 1
|
||||||
scan_name, scan_id, history_id,\
|
(
|
||||||
norm_time, status, uuid = s['scan_name'], s['scan_id'], s['history_id'],\
|
scan_name,
|
||||||
s['norm_time'], s['status'], s['uuid']
|
scan_id,
|
||||||
|
history_id,
|
||||||
|
norm_time,
|
||||||
|
status,
|
||||||
|
uuid,
|
||||||
|
) = (
|
||||||
|
s['scan_name'],
|
||||||
|
s['scan_id'],
|
||||||
|
s['history_id'],
|
||||||
|
s['norm_time'],
|
||||||
|
s['status'],
|
||||||
|
s['uuid'],
|
||||||
|
)
|
||||||
|
|
||||||
# TODO Create directory sync function which scans the directory for files that exist already and populates the database
|
# TODO Create directory sync function which scans the directory for files that exist already and populates the database
|
||||||
|
|
||||||
folder_id = s['folder_id']
|
folder_id = s['folder_id']
|
||||||
scan_history = self.nessus.get_scan_history(scan_id)
|
scan_history = self.nessus.get_scan_history(scan_id)
|
||||||
folder_name = next(f['name'] for f in folders if f['id'] == folder_id)
|
folder_name = next(f['name'] for f in folders if f['id'
|
||||||
|
] == folder_id)
|
||||||
if status == 'completed':
|
if status == 'completed':
|
||||||
file_name = '%s_%s_%s_%s.%s' % (scan_name, scan_id, history_id, norm_time, 'csv')
|
file_name = '%s_%s_%s_%s.%s' % (scan_name, scan_id,
|
||||||
repls = ('\\', '_'), ('/', '_'), ('/', '_'), (' ', '_')
|
history_id, norm_time, 'csv')
|
||||||
file_name = reduce(lambda a, kv: a.replace(*kv), repls, file_name)
|
repls = (('\\', '_'), ('/', '_'), ('/', '_'), (' ',
|
||||||
relative_path_name = self.path_check(folder_name + '/' + file_name)
|
'_'))
|
||||||
|
file_name = reduce(lambda a, kv: a.replace(*kv),
|
||||||
|
repls, file_name)
|
||||||
|
relative_path_name = self.path_check(folder_name
|
||||||
|
+ '/' + file_name)
|
||||||
|
|
||||||
if os.path.isfile(relative_path_name):
|
if os.path.isfile(relative_path_name):
|
||||||
if self.develop:
|
if self.develop:
|
||||||
csv_in = pd.read_csv(relative_path_name)
|
csv_in = pd.read_csv(relative_path_name)
|
||||||
record_meta = (
|
record_meta = (
|
||||||
scan_name, scan_id, norm_time, file_name, time.time(), csv_in.shape[0], 'nessus', uuid, 1)
|
scan_name,
|
||||||
|
scan_id,
|
||||||
|
norm_time,
|
||||||
|
file_name,
|
||||||
|
time.time(),
|
||||||
|
csv_in.shape[0],
|
||||||
|
'nessus',
|
||||||
|
uuid,
|
||||||
|
1,
|
||||||
|
)
|
||||||
self.record_insert(record_meta)
|
self.record_insert(record_meta)
|
||||||
self.vprint(
|
self.vprint(
|
||||||
"{info} File {filename} already exist! Updating database".format(info=bcolors.INFO, filename=relative_path_name))
|
'{info} File {filename} already exist! Updating database'.format(info=bcolors.INFO,
|
||||||
|
filename=relative_path_name))
|
||||||
else:
|
else:
|
||||||
file_req = self.nessus.download_scan(scan_id=scan_id, history=history_id, export_format='csv')
|
file_req = \
|
||||||
clean_csv = pd.read_csv(io.StringIO(file_req.decode('utf-8')))
|
self.nessus.download_scan(scan_id=scan_id,
|
||||||
|
history=history_id, export_format='csv')
|
||||||
|
clean_csv = \
|
||||||
|
pd.read_csv(io.StringIO(file_req.decode('utf-8'
|
||||||
|
)))
|
||||||
if len(clean_csv) > 2:
|
if len(clean_csv) > 2:
|
||||||
self.vprint("Processing %s/%s for scan: %s" % (scan_count, len(scan_history), scan_name))
|
self.vprint('Processing %s/%s for scan: %s'
|
||||||
clean_csv['CVSS'] = clean_csv['CVSS'].astype(str).apply(self.cleanser)
|
% (scan_count, len(scan_history),
|
||||||
clean_csv['CVE'] = clean_csv['CVE'].astype(str).apply(self.cleanser)
|
scan_name))
|
||||||
clean_csv['Description'] = clean_csv['Description'].astype(str).apply(self.cleanser)
|
clean_csv['CVSS'] = clean_csv['CVSS'
|
||||||
clean_csv['Synopsis'] = clean_csv['Description'].astype(str).apply(self.cleanser)
|
].astype(str).apply(self.cleanser)
|
||||||
clean_csv['Solution'] = clean_csv['Solution'].astype(str).apply(self.cleanser)
|
clean_csv['CVE'] = clean_csv['CVE'
|
||||||
clean_csv['See Also'] = clean_csv['See Also'].astype(str).apply(self.cleanser)
|
].astype(str).apply(self.cleanser)
|
||||||
clean_csv['Plugin Output'] = clean_csv['Plugin Output'].astype(str).apply(self.cleanser)
|
clean_csv['Description'] = \
|
||||||
clean_csv.to_csv(relative_path_name, index=False)
|
clean_csv['Description'
|
||||||
|
].astype(str).apply(self.cleanser)
|
||||||
|
clean_csv['Synopsis'] = \
|
||||||
|
clean_csv['Description'
|
||||||
|
].astype(str).apply(self.cleanser)
|
||||||
|
clean_csv['Solution'] = clean_csv['Solution'
|
||||||
|
].astype(str).apply(self.cleanser)
|
||||||
|
clean_csv['See Also'] = clean_csv['See Also'
|
||||||
|
].astype(str).apply(self.cleanser)
|
||||||
|
clean_csv['Plugin Output'] = \
|
||||||
|
clean_csv['Plugin Output'
|
||||||
|
].astype(str).apply(self.cleanser)
|
||||||
|
clean_csv.to_csv(relative_path_name,
|
||||||
|
index=False)
|
||||||
record_meta = (
|
record_meta = (
|
||||||
scan_name, scan_id, norm_time, file_name, time.time(), clean_csv.shape[0], 'nessus', uuid,
|
scan_name,
|
||||||
1)
|
scan_id,
|
||||||
|
norm_time,
|
||||||
|
file_name,
|
||||||
|
time.time(),
|
||||||
|
clean_csv.shape[0],
|
||||||
|
'nessus',
|
||||||
|
uuid,
|
||||||
|
1,
|
||||||
|
)
|
||||||
self.record_insert(record_meta)
|
self.record_insert(record_meta)
|
||||||
self.vprint("{info} {filename} records written to {path} ".format(info=bcolors.INFO, filename=clean_csv.shape[0], path=file_name))
|
self.vprint('{info} {filename} records written to {path} '.format(info=bcolors.INFO,
|
||||||
|
filename=clean_csv.shape[
|
||||||
|
0],
|
||||||
|
path=file_name))
|
||||||
else:
|
else:
|
||||||
record_meta = (
|
record_meta = (
|
||||||
scan_name, scan_id, norm_time, file_name, time.time(), clean_csv.shape[0], 'nessus', uuid,
|
scan_name,
|
||||||
1)
|
scan_id,
|
||||||
|
norm_time,
|
||||||
|
file_name,
|
||||||
|
time.time(),
|
||||||
|
clean_csv.shape[0],
|
||||||
|
'nessus',
|
||||||
|
uuid,
|
||||||
|
1,
|
||||||
|
)
|
||||||
self.record_insert(record_meta)
|
self.record_insert(record_meta)
|
||||||
self.vprint(file_name + ' has no host available... Updating database and skipping!')
|
self.vprint(file_name
|
||||||
|
+ ' has no host available... Updating database and skipping!'
|
||||||
|
)
|
||||||
self.conn.close()
|
self.conn.close()
|
||||||
"{success} Scan aggregation complete! Connection to database closed.".format(success=bcolors.SUCCESS)
|
'{success} Scan aggregation complete! Connection to database closed.'.format(success=bcolors.SUCCESS)
|
||||||
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
self.vprint('{fail} Failed to use scanner at {host}'.format(fail=bcolors.FAIL, host=self.nessus_hostname+':'+self.nessus_port))
|
|
||||||
|
self.vprint('{fail} Failed to use scanner at {host}'.format(fail=bcolors.FAIL,
|
||||||
|
host=self.nessus_hostname + ':'
|
||||||
|
+ self.nessus_port))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user