Pycharm indenting PEP8

This commit is contained in:
Quim
2020-03-03 10:19:00 +01:00
parent 97de805e0c
commit c0e7ab9863

View File

@ -3,6 +3,7 @@
from __future__ import absolute_import from __future__ import absolute_import
from six.moves import range from six.moves import range
from functools import reduce from functools import reduce
__author__ = 'Austin Taylor' __author__ = 'Austin Taylor'
from .base.config import vwConfig from .base.config import vwConfig
@ -24,7 +25,6 @@ import socket
class vulnWhispererBase(object): class vulnWhispererBase(object):
CONFIG_SECTION = None CONFIG_SECTION = None
def __init__( def __init__(
@ -38,13 +38,13 @@ class vulnWhispererBase(object):
password=None, password=None,
section=None, section=None,
develop=False, develop=False,
): ):
self.logger = logging.getLogger('vulnWhispererBase') self.logger = logging.getLogger('vulnWhispererBase')
if debug: if debug:
self.logger.setLevel(logging.DEBUG) self.logger.setLevel(logging.DEBUG)
if self.CONFIG_SECTION is None: if self.CONFIG_SECTION is None:
raise Exception('Implementing class must define CONFIG_SECTION') raise Exception('Implementing class must define CONFIG_SECTION')
self.exit_code = 0 self.exit_code = 0
self.db_name = db_name self.db_name = db_name
@ -68,8 +68,6 @@ class vulnWhispererBase(object):
self.db_path = self.config.get(self.CONFIG_SECTION, 'db_path') self.db_path = self.config.get(self.CONFIG_SECTION, 'db_path')
self.verbose = self.config.getbool(self.CONFIG_SECTION, 'verbose') self.verbose = self.config.getbool(self.CONFIG_SECTION, 'verbose')
if self.db_name is not None: if self.db_name is not None:
if self.db_path: if self.db_path:
self.database = os.path.join(self.db_path, self.database = os.path.join(self.db_path,
@ -91,7 +89,8 @@ class vulnWhispererBase(object):
self.cur = self.conn.cursor() self.cur = self.conn.cursor()
self.logger.info('Connected to database at {loc}'.format(loc=self.database)) self.logger.info('Connected to database at {loc}'.format(loc=self.database))
except Exception as e: except Exception as e:
self.logger.error('Could not connect to database at {loc}\nReason: {e} - Please ensure the path exist'.format( self.logger.error(
'Could not connect to database at {loc}\nReason: {e} - Please ensure the path exist'.format(
e=e, e=e,
loc=self.database)) loc=self.database))
else: else:
@ -124,7 +123,7 @@ class vulnWhispererBase(object):
' scan_name TEXT, scan_id INTEGER, last_modified DATE, filename TEXT,' ' scan_name TEXT, scan_id INTEGER, last_modified DATE, filename TEXT,'
' download_time DATE, record_count INTEGER, source TEXT,' ' download_time DATE, record_count INTEGER, source TEXT,'
' uuid TEXT, processed INTEGER, reported INTEGER)' ' uuid TEXT, processed INTEGER, reported INTEGER)'
) )
self.conn.commit() self.conn.commit()
def delete_table(self): def delete_table(self):
@ -150,11 +149,11 @@ class vulnWhispererBase(object):
return data return data
def record_insert(self, record): def record_insert(self, record):
#for backwards compatibility with older versions without "reported" field # for backwards compatibility with older versions without "reported" field
try: try:
#-1 to get the latest column, 1 to get the column name (old version would be "processed", new "reported") # -1 to get the latest column, 1 to get the column name (old version would be "processed", new "reported")
#TODO delete backward compatibility check after some versions # TODO delete backward compatibility check after some versions
last_column_table = self.cur.execute('PRAGMA table_info(scan_history)').fetchall()[-1][1] last_column_table = self.cur.execute('PRAGMA table_info(scan_history)').fetchall()[-1][1]
if last_column_table == self.table_columns[-1]: if last_column_table == self.table_columns[-1]:
self.cur.execute('insert into scan_history({table_columns}) values (?,?,?,?,?,?,?,?,?,?)'.format( self.cur.execute('insert into scan_history({table_columns}) values (?,?,?,?,?,?,?,?,?,?)'.format(
@ -169,8 +168,8 @@ class vulnWhispererBase(object):
sys.exit(1) sys.exit(1)
def set_latest_scan_reported(self, filename): def set_latest_scan_reported(self, filename):
#the reason to use the filename instead of the source/scan_name is because the filename already belongs to # the reason to use the filename instead of the source/scan_name is because the filename already belongs to
#that latest scan, and we maintain integrity making sure that it is the exact scan we checked # that latest scan, and we maintain integrity making sure that it is the exact scan we checked
try: try:
self.cur.execute('UPDATE scan_history SET reported = 1 WHERE filename="{}";'.format(filename)) self.cur.execute('UPDATE scan_history SET reported = 1 WHERE filename="{}";'.format(filename))
self.conn.commit() self.conn.commit()
@ -188,7 +187,8 @@ class vulnWhispererBase(object):
""" """
try: try:
self.conn.text_factory = str self.conn.text_factory = str
self.cur.execute('SELECT uuid FROM scan_history where source = "{config_section}"'.format(config_section=self.CONFIG_SECTION)) self.cur.execute('SELECT uuid FROM scan_history where source = "{config_section}"'.format(
config_section=self.CONFIG_SECTION))
results = frozenset([r[0] for r in self.cur.fetchall()]) results = frozenset([r[0] for r in self.cur.fetchall()])
except: except:
results = [] results = []
@ -211,18 +211,23 @@ class vulnWhispererBase(object):
try: try:
self.conn.text_factory = str self.conn.text_factory = str
self.cur.execute('SELECT filename FROM scan_history WHERE source="{}" AND scan_name="{}" ORDER BY last_modified DESC LIMIT 1;'.format(source, scan_name)) self.cur.execute(
#should always return just one filename 'SELECT filename FROM scan_history WHERE source="{}" AND scan_name="{}" ORDER BY last_modified DESC LIMIT 1;'.format(
source, scan_name))
# should always return just one filename
results = [r[0] for r in self.cur.fetchall()][0] results = [r[0] for r in self.cur.fetchall()][0]
#-1 to get the latest column, 1 to get the column name (old version would be "processed", new "reported") # -1 to get the latest column, 1 to get the column name (old version would be "processed", new "reported")
#TODO delete backward compatibility check after some versions # TODO delete backward compatibility check after some versions
last_column_table = self.cur.execute('PRAGMA table_info(scan_history)').fetchall()[-1][1] last_column_table = self.cur.execute('PRAGMA table_info(scan_history)').fetchall()[-1][1]
if results and last_column_table == self.table_columns[-1]: if results and last_column_table == self.table_columns[-1]:
reported = self.cur.execute('SELECT reported FROM scan_history WHERE filename="{}"'.format(results)).fetchall() reported = self.cur.execute(
'SELECT reported FROM scan_history WHERE filename="{}"'.format(results)).fetchall()
reported = reported[0][0] reported = reported[0][0]
if reported: if reported:
self.logger.debug("Last downloaded scan from source {source} scan_name {scan_name} has already been reported".format(source=source, scan_name=scan_name)) self.logger.debug(
"Last downloaded scan from source {source} scan_name {scan_name} has already been reported".format(
source=source, scan_name=scan_name))
except Exception as e: except Exception as e:
self.logger.error("Error when getting latest results from {}.{} : {}".format(source, scan_name, e)) self.logger.error("Error when getting latest results from {}.{} : {}".format(source, scan_name, e))
@ -251,14 +256,14 @@ class vulnWhispererBase(object):
self.cur.execute("SELECT DISTINCT scan_name FROM scan_history WHERE source='{}';".format(source)) self.cur.execute("SELECT DISTINCT scan_name FROM scan_history WHERE source='{}';".format(source))
scan_names = [r[0] for r in self.cur.fetchall()] scan_names = [r[0] for r in self.cur.fetchall()]
for scan in scan_names: for scan in scan_names:
results.append('{}.{}'.format(source,scan)) results.append('{}.{}'.format(source, scan))
except: except:
scan_names = [] scan_names = []
return results return results
class vulnWhispererNessus(vulnWhispererBase):
class vulnWhispererNessus(vulnWhispererBase):
CONFIG_SECTION = None CONFIG_SECTION = None
def __init__( def __init__(
@ -272,7 +277,7 @@ class vulnWhispererNessus(vulnWhispererBase):
password=None, password=None,
profile='nessus' profile='nessus'
): ):
self.CONFIG_SECTION=profile self.CONFIG_SECTION = profile
super(vulnWhispererNessus, self).__init__(config=config) super(vulnWhispererNessus, self).__init__(config=config)
@ -294,8 +299,8 @@ class vulnWhispererNessus(vulnWhispererBase):
'trash') 'trash')
try: try:
self.access_key = self.config.get(self.CONFIG_SECTION,'access_key') self.access_key = self.config.get(self.CONFIG_SECTION, 'access_key')
self.secret_key = self.config.get(self.CONFIG_SECTION,'secret_key') self.secret_key = self.config.get(self.CONFIG_SECTION, 'secret_key')
except: except:
pass pass
@ -312,7 +317,7 @@ class vulnWhispererNessus(vulnWhispererBase):
) )
self.nessus_connect = True self.nessus_connect = True
self.logger.info('Connected to {} on {host}:{port}'.format(self.CONFIG_SECTION, host=self.hostname, self.logger.info('Connected to {} on {host}:{port}'.format(self.CONFIG_SECTION, host=self.hostname,
port=str(self.nessus_port))) port=str(self.nessus_port)))
except Exception as e: except Exception as e:
self.logger.error('Exception: {}'.format(str(e))) self.logger.error('Exception: {}'.format(str(e)))
raise Exception( raise Exception(
@ -323,9 +328,7 @@ class vulnWhispererNessus(vulnWhispererBase):
except Exception as e: except Exception as e:
self.logger.error('Could not properly load your config!\nReason: {e}'.format(e=e)) self.logger.error('Could not properly load your config!\nReason: {e}'.format(e=e))
return False return False
#sys.exit(1) # sys.exit(1)
def scan_count(self, scans, completed=False): def scan_count(self, scans, completed=False):
""" """
@ -368,7 +371,6 @@ class vulnWhispererNessus(vulnWhispererBase):
scan_records = [s for s in scan_records if s['status'] == 'completed'] scan_records = [s for s in scan_records if s['status'] == 'completed']
return scan_records return scan_records
def whisper_nessus(self): def whisper_nessus(self):
if self.nessus_connect: if self.nessus_connect:
scan_data = self.nessus.scans scan_data = self.nessus.scans
@ -423,7 +425,8 @@ class vulnWhispererNessus(vulnWhispererBase):
s['uuid'], s['uuid'],
) )
# TODO Create directory sync function which scans the directory for files that exist already and populates the database # TODO Create directory sync function which scans the directory for files that exist already and
# populates the database
folder_id = s['folder_id'] folder_id = s['folder_id']
if self.CONFIG_SECTION == 'tenable': if self.CONFIG_SECTION == 'tenable':
@ -453,22 +456,26 @@ class vulnWhispererNessus(vulnWhispererBase):
0, 0,
) )
self.record_insert(record_meta) self.record_insert(record_meta)
self.logger.info('File {filename} already exist! Updating database'.format(filename=relative_path_name)) self.logger.info(
'File {filename} already exist! Updating database'.format(filename=relative_path_name))
else: else:
try: try:
file_req = \ file_req = \
self.nessus.download_scan(scan_id=scan_id, history=history_id, self.nessus.download_scan(scan_id=scan_id, history=history_id,
export_format='csv') export_format='csv')
except Exception as e: except Exception as e:
self.logger.error('Could not download {} scan {}: {}'.format(self.CONFIG_SECTION, scan_id, str(e))) self.logger.error(
'Could not download {} scan {}: {}'.format(self.CONFIG_SECTION, scan_id, str(e)))
self.exit_code += 1 self.exit_code += 1
continue continue
clean_csv = \ clean_csv = \
pd.read_csv(io.StringIO(file_req.decode('utf-8'))) pd.read_csv(io.StringIO(file_req.decode('utf-8')))
if len(clean_csv) > 2: if len(clean_csv) > 2:
self.logger.info('Processing {}/{} for scan: {}'.format(scan_count, len(scan_list), scan_name.encode('utf8'))) self.logger.info('Processing {}/{} for scan: {}'.format(scan_count, len(scan_list),
columns_to_cleanse = ['CVSS','CVE','Description','Synopsis','Solution','See Also','Plugin Output', 'MAC Address'] scan_name.encode('utf8')))
columns_to_cleanse = ['CVSS', 'CVE', 'Description', 'Synopsis', 'Solution', 'See Also',
'Plugin Output', 'MAC Address']
for col in columns_to_cleanse: for col in columns_to_cleanse:
if col in clean_csv: if col in clean_csv:
@ -489,7 +496,8 @@ class vulnWhispererNessus(vulnWhispererBase):
) )
self.record_insert(record_meta) self.record_insert(record_meta)
self.logger.info('{filename} records written to {path} '.format(filename=clean_csv.shape[0], self.logger.info('{filename} records written to {path} '.format(filename=clean_csv.shape[0],
path=file_name.encode('utf8'))) path=file_name.encode(
'utf8')))
else: else:
record_meta = ( record_meta = (
scan_name, scan_name,
@ -504,66 +512,68 @@ class vulnWhispererNessus(vulnWhispererBase):
0, 0,
) )
self.record_insert(record_meta) self.record_insert(record_meta)
self.logger.warn('{} has no host available... Updating database and skipping!'.format(file_name)) self.logger.warn(
'{} has no host available... Updating database and skipping!'.format(file_name))
self.conn.close() self.conn.close()
self.logger.info('Scan aggregation complete! Connection to database closed.') self.logger.info('Scan aggregation complete! Connection to database closed.')
else: else:
self.logger.error('Failed to use scanner at {host}:{port}'.format(host=self.hostname, port=self.nessus_port)) self.logger.error(
'Failed to use scanner at {host}:{port}'.format(host=self.hostname, port=self.nessus_port))
self.exit_code += 1 self.exit_code += 1
return self.exit_code return self.exit_code
class vulnWhispererQualys(vulnWhispererBase): class vulnWhispererQualys(vulnWhispererBase):
CONFIG_SECTION = 'qualys_web' CONFIG_SECTION = 'qualys_web'
COLUMN_MAPPING = {'Access Path': 'access_path', COLUMN_MAPPING = {'Access Path': 'access_path',
'Ajax Request': 'ajax_request', 'Ajax Request': 'ajax_request',
'Ajax Request ID': 'ajax_request_id', 'Ajax Request ID': 'ajax_request_id',
'Authentication': 'authentication', 'Authentication': 'authentication',
'CVSS Base': 'cvss', 'CVSS Base': 'cvss',
'CVSS Temporal': 'cvss_temporal', 'CVSS Temporal': 'cvss_temporal',
'CWE': 'cwe', 'CWE': 'cwe',
'Category': 'category', 'Category': 'category',
'Content': 'content', 'Content': 'content',
'DescriptionSeverity': 'severity_description', 'DescriptionSeverity': 'severity_description',
'DescriptionCatSev': 'category_description', 'DescriptionCatSev': 'category_description',
'Detection ID': 'detection_id', 'Detection ID': 'detection_id',
'Evidence #1': 'evidence_1', 'Evidence #1': 'evidence_1',
'First Time Detected': 'first_time_detected', 'First Time Detected': 'first_time_detected',
'Form Entry Point': 'form_entry_point', 'Form Entry Point': 'form_entry_point',
'Function': 'function', 'Function': 'function',
'Groups': 'groups', 'Groups': 'groups',
'ID': 'id', 'ID': 'id',
'Ignore Comments': 'ignore_comments', 'Ignore Comments': 'ignore_comments',
'Ignore Date': 'ignore_date', 'Ignore Date': 'ignore_date',
'Ignore Reason': 'ignore_reason', 'Ignore Reason': 'ignore_reason',
'Ignore User': 'ignore_user', 'Ignore User': 'ignore_user',
'Ignored': 'ignored', 'Ignored': 'ignored',
'Impact': 'impact', 'Impact': 'impact',
'Last Time Detected': 'last_time_detected', 'Last Time Detected': 'last_time_detected',
'Last Time Tested': 'last_time_tested', 'Last Time Tested': 'last_time_tested',
'Level': 'level', 'Level': 'level',
'OWASP': 'owasp', 'OWASP': 'owasp',
'Operating System': 'operating_system', 'Operating System': 'operating_system',
'Owner': 'owner', 'Owner': 'owner',
'Param': 'param', 'Param': 'param',
'Payload #1': 'payload_1', 'Payload #1': 'payload_1',
'QID': 'plugin_id', 'QID': 'plugin_id',
'Request Headers #1': 'request_headers_1', 'Request Headers #1': 'request_headers_1',
'Request Method #1': 'request_method_1', 'Request Method #1': 'request_method_1',
'Request URL #1': 'request_url_1', 'Request URL #1': 'request_url_1',
'Response #1': 'response_1', 'Response #1': 'response_1',
'Scope': 'scope', 'Scope': 'scope',
'Severity': 'risk', 'Severity': 'risk',
'Severity Level': 'security_level', 'Severity Level': 'security_level',
'Solution': 'solution', 'Solution': 'solution',
'Times Detected': 'times_detected', 'Times Detected': 'times_detected',
'Title': 'plugin_name', 'Title': 'plugin_name',
'URL': 'url', 'URL': 'url',
'Url': 'uri', 'Url': 'uri',
'Vulnerability Category': 'vulnerability_category', 'Vulnerability Category': 'vulnerability_category',
'WASC': 'wasc', 'WASC': 'wasc',
'Web Application Name': 'web_application_name'} 'Web Application Name': 'web_application_name'}
def __init__( def __init__(
self, self,
config=None, config=None,
@ -573,7 +583,7 @@ class vulnWhispererQualys(vulnWhispererBase):
debug=False, debug=False,
username=None, username=None,
password=None, password=None,
): ):
super(vulnWhispererQualys, self).__init__(config=config) super(vulnWhispererQualys, self).__init__(config=config)
self.logger = logging.getLogger('vulnWhispererQualys') self.logger = logging.getLogger('vulnWhispererQualys')
@ -611,7 +621,7 @@ class vulnWhispererQualys(vulnWhispererBase):
relative_path_name = self.path_check(report_name).encode('utf8') relative_path_name = self.path_check(report_name).encode('utf8')
if os.path.isfile(relative_path_name): if os.path.isfile(relative_path_name):
#TODO Possibly make this optional to sync directories # TODO Possibly make this optional to sync directories
file_length = len(open(relative_path_name).readlines()) file_length = len(open(relative_path_name).readlines())
record_meta = ( record_meta = (
scan_name, scan_name,
@ -663,7 +673,7 @@ class vulnWhispererQualys(vulnWhispererBase):
f.write('\n') f.write('\n')
elif output_format == 'csv': elif output_format == 'csv':
vuln_ready.to_csv(relative_path_name, index=False, header=True) # add when timestamp occured vuln_ready.to_csv(relative_path_name, index=False, header=True) # add when timestamp occured
self.logger.info('Report written to {}'.format(report_name)) self.logger.info('Report written to {}'.format(report_name))
@ -671,7 +681,8 @@ class vulnWhispererQualys(vulnWhispererBase):
self.logger.info('Removing report {} from Qualys Database'.format(generated_report_id)) self.logger.info('Removing report {} from Qualys Database'.format(generated_report_id))
cleaning_up = self.qualys_scan.qw.delete_report(generated_report_id) cleaning_up = self.qualys_scan.qw.delete_report(generated_report_id)
os.remove(self.path_check(str(generated_report_id) + '.csv')) os.remove(self.path_check(str(generated_report_id) + '.csv'))
self.logger.info('Deleted report from local disk: {}'.format(self.path_check(str(generated_report_id)))) self.logger.info(
'Deleted report from local disk: {}'.format(self.path_check(str(generated_report_id))))
else: else:
self.logger.error('Could not process report ID: {}'.format(status)) self.logger.error('Could not process report ID: {}'.format(status))
@ -679,7 +690,6 @@ class vulnWhispererQualys(vulnWhispererBase):
self.logger.error('Could not process {}: {}'.format(report_id, str(e))) self.logger.error('Could not process {}: {}'.format(report_id, str(e)))
return vuln_ready return vuln_ready
def identify_scans_to_process(self): def identify_scans_to_process(self):
if self.uuids: if self.uuids:
self.scans_to_process = self.latest_scans[~self.latest_scans['id'].isin(self.uuids)] self.scans_to_process = self.latest_scans[~self.latest_scans['id'].isin(self.uuids)]
@ -687,7 +697,6 @@ class vulnWhispererQualys(vulnWhispererBase):
self.scans_to_process = self.latest_scans self.scans_to_process = self.latest_scans
self.logger.info('Identified {new} scans to be processed'.format(new=len(self.scans_to_process))) self.logger.info('Identified {new} scans to be processed'.format(new=len(self.scans_to_process)))
def process_web_assets(self): def process_web_assets(self):
counter = 0 counter = 0
self.identify_scans_to_process() self.identify_scans_to_process()
@ -768,7 +777,6 @@ class vulnWhispererOpenVAS(vulnWhispererBase):
if report_id: if report_id:
self.logger.info('Processing report ID: {}'.format(report_id)) self.logger.info('Processing report ID: {}'.format(report_id))
scan_name = report_id.replace('-', '') scan_name = report_id.replace('-', '')
report_name = 'openvas_scan_{scan_name}_{last_updated}.{extension}'.format(scan_name=scan_name, report_name = 'openvas_scan_{scan_name}_{last_updated}.{extension}'.format(scan_name=scan_name,
last_updated=launched_date, last_updated=launched_date,
@ -836,7 +844,8 @@ class vulnWhispererOpenVAS(vulnWhispererBase):
for scan in self.scans_to_process.iterrows(): for scan in self.scans_to_process.iterrows():
counter += 1 counter += 1
info = scan[1] info = scan[1]
self.logger.info('Processing {}/{} - Report ID: {}'.format(counter, len(self.scans_to_process), info['report_ids'])) self.logger.info(
'Processing {}/{} - Report ID: {}'.format(counter, len(self.scans_to_process), info['report_ids']))
self.whisper_reports(report_id=info['report_ids'], self.whisper_reports(report_id=info['report_ids'],
launched_date=info['epoch']) launched_date=info['epoch'])
self.logger.info('Processing complete') self.logger.info('Processing complete')
@ -847,15 +856,14 @@ class vulnWhispererOpenVAS(vulnWhispererBase):
class vulnWhispererQualysVuln(vulnWhispererBase): class vulnWhispererQualysVuln(vulnWhispererBase):
CONFIG_SECTION = 'qualys_vuln' CONFIG_SECTION = 'qualys_vuln'
COLUMN_MAPPING = {'cvss_base': 'cvss', COLUMN_MAPPING = {'cvss_base': 'cvss',
'cvss3_base': 'cvss3', 'cvss3_base': 'cvss3',
'cve_id': 'cve', 'cve_id': 'cve',
'os': 'operating_system', 'os': 'operating_system',
'qid': 'plugin_id', 'qid': 'plugin_id',
'severity': 'risk', 'severity': 'risk',
'title': 'plugin_name'} 'title': 'plugin_name'}
def __init__( def __init__(
self, self,
@ -866,7 +874,7 @@ class vulnWhispererQualysVuln(vulnWhispererBase):
debug=False, debug=False,
username=None, username=None,
password=None, password=None,
): ):
super(vulnWhispererQualysVuln, self).__init__(config=config) super(vulnWhispererQualysVuln, self).__init__(config=config)
self.logger = logging.getLogger('vulnWhispererQualysVuln') self.logger = logging.getLogger('vulnWhispererQualysVuln')
@ -888,66 +896,65 @@ class vulnWhispererQualysVuln(vulnWhispererBase):
output_format='json', output_format='json',
cleanup=True): cleanup=True):
if 'Z' in launched_date: if 'Z' in launched_date:
launched_date = self.qualys_scan.utils.iso_to_epoch(launched_date) launched_date = self.qualys_scan.utils.iso_to_epoch(launched_date)
report_name = 'qualys_vuln_' + report_id.replace('/','_') \ report_name = 'qualys_vuln_' + report_id.replace('/', '_') \
+ '_{last_updated}'.format(last_updated=launched_date) \ + '_{last_updated}'.format(last_updated=launched_date) \
+ '.json' + '.json'
relative_path_name = self.path_check(report_name).encode('utf8') relative_path_name = self.path_check(report_name).encode('utf8')
if os.path.isfile(relative_path_name): if os.path.isfile(relative_path_name):
#TODO Possibly make this optional to sync directories # TODO Possibly make this optional to sync directories
file_length = len(open(relative_path_name).readlines()) file_length = len(open(relative_path_name).readlines())
record_meta = ( record_meta = (
scan_name, scan_name,
scan_reference, scan_reference,
launched_date, launched_date,
report_name, report_name,
time.time(), time.time(),
file_length, file_length,
self.CONFIG_SECTION, self.CONFIG_SECTION,
report_id, report_id,
1, 1,
0, 0,
) )
self.record_insert(record_meta) self.record_insert(record_meta)
self.logger.info('File {filename} already exist! Updating database'.format(filename=relative_path_name)) self.logger.info('File {filename} already exist! Updating database'.format(filename=relative_path_name))
else: else:
try: try:
self.logger.info('Processing report ID: {}'.format(report_id)) self.logger.info('Processing report ID: {}'.format(report_id))
vuln_ready = self.qualys_scan.process_data(scan_id=report_id) vuln_ready = self.qualys_scan.process_data(scan_id=report_id)
vuln_ready['scan_name'] = scan_name vuln_ready['scan_name'] = scan_name
vuln_ready['scan_reference'] = report_id vuln_ready['scan_reference'] = report_id
vuln_ready.rename(columns=self.COLUMN_MAPPING, inplace=True) vuln_ready.rename(columns=self.COLUMN_MAPPING, inplace=True)
except Exception as e: except Exception as e:
self.logger.error('Could not process {}: {}'.format(report_id, str(e))) self.logger.error('Could not process {}: {}'.format(report_id, str(e)))
self.exit_code += 1 self.exit_code += 1
return self.exit_code return self.exit_code
record_meta = ( record_meta = (
scan_name, scan_name,
scan_reference, scan_reference,
launched_date, launched_date,
report_name, report_name,
time.time(), time.time(),
vuln_ready.shape[0], vuln_ready.shape[0],
self.CONFIG_SECTION, self.CONFIG_SECTION,
report_id, report_id,
1, 1,
0, 0,
) )
self.record_insert(record_meta) self.record_insert(record_meta)
if output_format == 'json': if output_format == 'json':
with open(relative_path_name, 'w') as f: with open(relative_path_name, 'w') as f:
f.write(vuln_ready.to_json(orient='records', lines=True)) f.write(vuln_ready.to_json(orient='records', lines=True))
f.write('\n') f.write('\n')
self.logger.info('Report written to {}'.format(report_name))
return self.exit_code
self.logger.info('Report written to {}'.format(report_name))
return self.exit_code
def identify_scans_to_process(self): def identify_scans_to_process(self):
self.latest_scans = self.qualys_scan.qw.get_all_scans() self.latest_scans = self.qualys_scan.qw.get_all_scans()
@ -959,7 +966,6 @@ class vulnWhispererQualysVuln(vulnWhispererBase):
self.scans_to_process = self.latest_scans self.scans_to_process = self.latest_scans
self.logger.info('Identified {new} scans to be processed'.format(new=len(self.scans_to_process))) self.logger.info('Identified {new} scans to be processed'.format(new=len(self.scans_to_process)))
def process_vuln_scans(self): def process_vuln_scans(self):
counter = 0 counter = 0
self.identify_scans_to_process() self.identify_scans_to_process()
@ -969,9 +975,9 @@ class vulnWhispererQualysVuln(vulnWhispererBase):
r = app[1] r = app[1]
self.logger.info('Processing {}/{}'.format(counter, len(self.scans_to_process))) self.logger.info('Processing {}/{}'.format(counter, len(self.scans_to_process)))
self.exit_code += self.whisper_reports(report_id=r['id'], self.exit_code += self.whisper_reports(report_id=r['id'],
launched_date=r['date'], launched_date=r['date'],
scan_name=r['name'], scan_name=r['name'],
scan_reference=r['type']) scan_reference=r['type'])
else: else:
self.logger.info('No new scans to process. Exiting...') self.logger.info('No new scans to process. Exiting...')
self.conn.close() self.conn.close()
@ -979,7 +985,6 @@ class vulnWhispererQualysVuln(vulnWhispererBase):
class vulnWhispererJIRA(vulnWhispererBase): class vulnWhispererJIRA(vulnWhispererBase):
CONFIG_SECTION = 'jira' CONFIG_SECTION = 'jira'
def __init__( def __init__(
@ -991,7 +996,7 @@ class vulnWhispererJIRA(vulnWhispererBase):
debug=False, debug=False,
username=None, username=None,
password=None, password=None,
): ):
super(vulnWhispererJIRA, self).__init__(config=config) super(vulnWhispererJIRA, self).__init__(config=config)
self.logger = logging.getLogger('vulnWhispererJira') self.logger = logging.getLogger('vulnWhispererJira')
if debug: if debug:
@ -1001,8 +1006,8 @@ class vulnWhispererJIRA(vulnWhispererBase):
self.host_resolv_cache = {} self.host_resolv_cache = {}
self.host_no_resolv = [] self.host_no_resolv = []
self.no_resolv_by_team_dict = {} self.no_resolv_by_team_dict = {}
#Save locally those assets without DNS entry for flag to system owners # Save locally those assets without DNS entry for flag to system owners
self.no_resolv_fname="no_resolv.txt" self.no_resolv_fname = "no_resolv.txt"
if os.path.isfile(self.no_resolv_fname): if os.path.isfile(self.no_resolv_fname):
with open(self.no_resolv_fname, "r") as json_file: with open(self.no_resolv_fname, "r") as json_file:
self.no_resolv_by_team_dict = json.load(json_file) self.no_resolv_by_team_dict = json.load(json_file)
@ -1013,9 +1018,9 @@ class vulnWhispererJIRA(vulnWhispererBase):
self.logger.info('Attempting to connect to jira...') self.logger.info('Attempting to connect to jira...')
self.jira = \ self.jira = \
JiraAPI(hostname=self.hostname, JiraAPI(hostname=self.hostname,
username=self.username, username=self.username,
password=self.password, password=self.password,
path=self.config.get('jira','write_path')) path=self.config.get('jira', 'write_path'))
self.jira_connect = True self.jira_connect = True
self.logger.info('Connected to jira on {host}'.format(host=self.hostname)) self.logger.info('Connected to jira on {host}'.format(host=self.hostname))
except Exception as e: except Exception as e:
@ -1024,24 +1029,25 @@ class vulnWhispererJIRA(vulnWhispererBase):
'Could not connect to nessus -- Please verify your settings in {config} are correct and try again.\nReason: {e}'.format( 'Could not connect to nessus -- Please verify your settings in {config} are correct and try again.\nReason: {e}'.format(
config=self.config.config_in, e=e)) config=self.config.config_in, e=e))
return False return False
#sys.exit(1) # sys.exit(1)
profiles = [] profiles = []
profiles = self.get_scan_profiles() profiles = self.get_scan_profiles()
if not self.config.exists_jira_profiles(profiles): if not self.config.exists_jira_profiles(profiles):
self.config.update_jira_profiles(profiles) self.config.update_jira_profiles(profiles)
self.logger.info("Jira profiles have been created in {config}, please fill the variables before rerunning the module.".format(config=self.config_path)) self.logger.info(
"Jira profiles have been created in {config}, please fill the variables before rerunning the module.".format(
config=self.config_path))
sys.exit(0) sys.exit(0)
def get_env_variables(self, source, scan_name): def get_env_variables(self, source, scan_name):
# function returns an array with [jira_project, jira_components, datafile_path] # function returns an array with [jira_project, jira_components, datafile_path]
#Jira variables # Jira variables
jira_section = self.config.normalize_section("{}.{}".format(source,scan_name)) jira_section = self.config.normalize_section("{}.{}".format(source, scan_name))
project = self.config.get(jira_section,'jira_project') project = self.config.get(jira_section, 'jira_project')
if project == "": if project == "":
self.logger.error('JIRA project is missing on the configuration file!') self.logger.error('JIRA project is missing on the configuration file!')
sys.exit(0) sys.exit(0)
@ -1051,35 +1057,39 @@ class vulnWhispererJIRA(vulnWhispererBase):
self.logger.error("JIRA project '{project}' doesn't exist!".format(project=project)) self.logger.error("JIRA project '{project}' doesn't exist!".format(project=project))
sys.exit(0) sys.exit(0)
components = self.config.get(jira_section,'components').split(',') components = self.config.get(jira_section, 'components').split(',')
#cleaning empty array from '' # cleaning empty array from ''
if not components[0]: if not components[0]:
components = [] components = []
min_critical = self.config.get(jira_section,'min_critical_to_report') min_critical = self.config.get(jira_section, 'min_critical_to_report')
if not min_critical: if not min_critical:
self.logger.error('"min_critical_to_report" variable on config file is empty.') self.logger.error('"min_critical_to_report" variable on config file is empty.')
sys.exit(0) sys.exit(0)
#datafile path # datafile path
filename, reported = self.get_latest_results(source, scan_name) filename, reported = self.get_latest_results(source, scan_name)
fullpath = "" fullpath = ""
# search data files under user specified directory # search data files under user specified directory
for root, dirnames, filenames in os.walk(vwConfig(self.config_path).get(source,'write_path')): for root, dirnames, filenames in os.walk(vwConfig(self.config_path).get(source, 'write_path')):
if filename in filenames: if filename in filenames:
fullpath = "{}/{}".format(root,filename) fullpath = "{}/{}".format(root, filename)
if reported: if reported:
self.logger.warn('Last Scan of "{scan_name}" for source "{source}" has already been reported; will be skipped.'.format(scan_name=scan_name, source=source)) self.logger.warn(
'Last Scan of "{scan_name}" for source "{source}" has already been reported; will be skipped.'.format(
scan_name=scan_name, source=source))
return [False] * 5 return [False] * 5
if not fullpath: if not fullpath:
self.logger.error('Scan of "{scan_name}" for source "{source}" has not been found. Please check that the scanner data files are in place.'.format(scan_name=scan_name, source=source)) self.logger.error(
'Scan of "{scan_name}" for source "{source}" has not been found. Please check that the scanner data files are in place.'.format(
scan_name=scan_name, source=source))
sys.exit(1) sys.exit(1)
dns_resolv = self.config.get('jira','dns_resolv') dns_resolv = self.config.get('jira', 'dns_resolv')
if dns_resolv in ('False', 'false', ''): if dns_resolv in ('False', 'false', ''):
dns_resolv = False dns_resolv = False
elif dns_resolv in ('True', 'true'): elif dns_resolv in ('True', 'true'):
@ -1090,36 +1100,36 @@ class vulnWhispererJIRA(vulnWhispererBase):
return project, components, fullpath, min_critical, dns_resolv return project, components, fullpath, min_critical, dns_resolv
def parse_nessus_vulnerabilities(self, fullpath, source, scan_name, min_critical): def parse_nessus_vulnerabilities(self, fullpath, source, scan_name, min_critical):
vulnerabilities = [] vulnerabilities = []
# we need to parse the CSV # we need to parse the CSV
risks = ['none', 'low', 'medium', 'high', 'critical'] risks = ['none', 'low', 'medium', 'high', 'critical']
min_risk = int([i for i,x in enumerate(risks) if x == min_critical][0]) min_risk = int([i for i, x in enumerate(risks) if x == min_critical][0])
df = pd.read_csv(fullpath, delimiter=',') df = pd.read_csv(fullpath, delimiter=',')
#nessus fields we want - ['Host','Protocol','Port', 'Name', 'Synopsis', 'Description', 'Solution', 'See Also'] # nessus fields we want - ['Host','Protocol','Port', 'Name', 'Synopsis', 'Description', 'Solution', 'See Also']
for index in range(len(df)): for index in range(len(df)):
# filtering vulnerabilities by criticality, discarding low risk # filtering vulnerabilities by criticality, discarding low risk
to_report = int([i for i,x in enumerate(risks) if x == df.loc[index]['Risk'].lower()][0]) to_report = int([i for i, x in enumerate(risks) if x == df.loc[index]['Risk'].lower()][0])
if to_report < min_risk: if to_report < min_risk:
continue continue
if not vulnerabilities or df.loc[index]['Name'] not in [entry['title'] for entry in vulnerabilities]: if not vulnerabilities or df.loc[index]['Name'] not in [entry['title'] for entry in vulnerabilities]:
vuln = {} vuln = {}
#vulnerabilities should have all the info for creating all JIRA labels # vulnerabilities should have all the info for creating all JIRA labels
vuln['source'] = source vuln['source'] = source
vuln['scan_name'] = scan_name vuln['scan_name'] = scan_name
#vulnerability variables # vulnerability variables
vuln['title'] = df.loc[index]['Name'] vuln['title'] = df.loc[index]['Name']
vuln['diagnosis'] = df.loc[index]['Synopsis'].replace('\\n',' ') vuln['diagnosis'] = df.loc[index]['Synopsis'].replace('\\n', ' ')
vuln['consequence'] = df.loc[index]['Description'].replace('\\n',' ') vuln['consequence'] = df.loc[index]['Description'].replace('\\n', ' ')
vuln['solution'] = df.loc[index]['Solution'].replace('\\n',' ') vuln['solution'] = df.loc[index]['Solution'].replace('\\n', ' ')
vuln['ips'] = [] vuln['ips'] = []
vuln['ips'].append("{} - {}/{}".format(df.loc[index]['Host'], df.loc[index]['Protocol'], df.loc[index]['Port'])) vuln['ips'].append(
"{} - {}/{}".format(df.loc[index]['Host'], df.loc[index]['Protocol'], df.loc[index]['Port']))
vuln['risk'] = df.loc[index]['Risk'].lower() vuln['risk'] = df.loc[index]['Risk'].lower()
# Nessus "nan" value gets automatically casted to float by python # Nessus "nan" value gets automatically casted to float by python
@ -1133,51 +1143,54 @@ class vulnWhispererJIRA(vulnWhispererBase):
# grouping assets by vulnerability to open on single ticket, as each asset has its own nessus entry # grouping assets by vulnerability to open on single ticket, as each asset has its own nessus entry
for vuln in vulnerabilities: for vuln in vulnerabilities:
if vuln['title'] == df.loc[index]['Name']: if vuln['title'] == df.loc[index]['Name']:
vuln['ips'].append("{} - {}/{}".format(df.loc[index]['Host'], df.loc[index]['Protocol'], df.loc[index]['Port'])) vuln['ips'].append("{} - {}/{}".format(df.loc[index]['Host'], df.loc[index]['Protocol'],
df.loc[index]['Port']))
return vulnerabilities return vulnerabilities
def parse_qualys_vuln_vulnerabilities(self, fullpath, source, scan_name, min_critical, dns_resolv = False): def parse_qualys_vuln_vulnerabilities(self, fullpath, source, scan_name, min_critical, dns_resolv=False):
#parsing of the qualys vulnerabilities schema # parsing of the qualys vulnerabilities schema
#parse json # parse json
vulnerabilities = [] vulnerabilities = []
risks = ['info', 'low', 'medium', 'high', 'critical'] risks = ['info', 'low', 'medium', 'high', 'critical']
# +1 as array is 0-4, but score is 1-5 # +1 as array is 0-4, but score is 1-5
min_risk = int([i for i,x in enumerate(risks) if x == min_critical][0])+1 min_risk = int([i for i, x in enumerate(risks) if x == min_critical][0]) + 1
try: try:
data=[json.loads(line) for line in open(fullpath).readlines()] data = [json.loads(line) for line in open(fullpath).readlines()]
except Exception as e: except Exception as e:
self.logger.warn("Scan has no vulnerabilities, skipping.") self.logger.warn("Scan has no vulnerabilities, skipping.")
return vulnerabilities return vulnerabilities
#qualys fields we want - [] # qualys fields we want - []
for index in range(len(data)): for index in range(len(data)):
if int(data[index]['risk']) < min_risk: if int(data[index]['risk']) < min_risk:
continue continue
elif data[index]['type'] == 'Practice' or data[index]['type'] == 'Ig': elif data[index]['type'] == 'Practice' or data[index]['type'] == 'Ig':
self.logger.debug("Vulnerability '{vuln}' ignored, as it is 'Practice/Potential', not verified.".format(vuln=data[index]['plugin_name'])) self.logger.debug("Vulnerability '{vuln}' ignored, as it is 'Practice/Potential', not verified.".format(
vuln=data[index]['plugin_name']))
continue continue
if not vulnerabilities or data[index]['plugin_name'] not in [entry['title'] for entry in vulnerabilities]: if not vulnerabilities or data[index]['plugin_name'] not in [entry['title'] for entry in vulnerabilities]:
vuln = {} vuln = {}
#vulnerabilities should have all the info for creating all JIRA labels # vulnerabilities should have all the info for creating all JIRA labels
vuln['source'] = source vuln['source'] = source
vuln['scan_name'] = scan_name vuln['scan_name'] = scan_name
#vulnerability variables # vulnerability variables
vuln['title'] = data[index]['plugin_name'] vuln['title'] = data[index]['plugin_name']
vuln['diagnosis'] = data[index]['threat'].replace('\\n',' ') vuln['diagnosis'] = data[index]['threat'].replace('\\n', ' ')
vuln['consequence'] = data[index]['impact'].replace('\\n',' ') vuln['consequence'] = data[index]['impact'].replace('\\n', ' ')
vuln['solution'] = data[index]['solution'].replace('\\n',' ') vuln['solution'] = data[index]['solution'].replace('\\n', ' ')
vuln['ips'] = [] vuln['ips'] = []
#TODO ADDED DNS RESOLUTION FROM QUALYS! \n SEPARATORS INSTEAD OF \\n! # TODO ADDED DNS RESOLUTION FROM QUALYS! \n SEPARATORS INSTEAD OF \\n!
vuln['ips'].append("{ip} - {protocol}/{port} - {dns}".format(**self.get_asset_fields(data[index], dns_resolv))) vuln['ips'].append(
"{ip} - {protocol}/{port} - {dns}".format(**self.get_asset_fields(data[index], dns_resolv)))
#different risk system than Nessus! # different risk system than Nessus!
vuln['risk'] = risks[int(data[index]['risk'])-1] vuln['risk'] = risks[int(data[index]['risk']) - 1]
# Nessus "nan" value gets automatically casted to float by python # Nessus "nan" value gets automatically casted to float by python
if not (type(data[index]['vendor_reference']) is float or data[index]['vendor_reference'] == None): if not (type(data[index]['vendor_reference']) is float or data[index]['vendor_reference'] == None):
@ -1189,7 +1202,8 @@ class vulnWhispererJIRA(vulnWhispererBase):
# grouping assets by vulnerability to open on single ticket, as each asset has its own nessus entry # grouping assets by vulnerability to open on single ticket, as each asset has its own nessus entry
for vuln in vulnerabilities: for vuln in vulnerabilities:
if vuln['title'] == data[index]['plugin_name']: if vuln['title'] == data[index]['plugin_name']:
vuln['ips'].append("{ip} - {protocol}/{port} - {dns}".format(**self.get_asset_fields(data[index], dns_resolv))) vuln['ips'].append(
"{ip} - {protocol}/{port} - {dns}".format(**self.get_asset_fields(data[index], dns_resolv)))
return vulnerabilities return vulnerabilities
@ -1224,49 +1238,54 @@ class vulnWhispererJIRA(vulnWhispererBase):
return values return values
def parse_vulnerabilities(self, fullpath, source, scan_name, min_critical): def parse_vulnerabilities(self, fullpath, source, scan_name, min_critical):
#TODO: SINGLE LOCAL SAVE FORMAT FOR ALL SCANNERS # TODO: SINGLE LOCAL SAVE FORMAT FOR ALL SCANNERS
#JIRA standard vuln format - ['source', 'scan_name', 'title', 'diagnosis', 'consequence', 'solution', 'ips', 'references'] # JIRA standard vuln format - ['source', 'scan_name', 'title', 'diagnosis', 'consequence', 'solution', 'ips', 'references']
return 0 return 0
def jira_sync(self, source, scan_name): def jira_sync(self, source, scan_name):
self.logger.info("Jira Sync triggered for source '{source}' and scan '{scan_name}'".format(source=source, scan_name=scan_name)) self.logger.info("Jira Sync triggered for source '{source}' and scan '{scan_name}'".format(source=source,
scan_name=scan_name))
project, components, fullpath, min_critical, dns_resolv = self.get_env_variables(source, scan_name) project, components, fullpath, min_critical, dns_resolv = self.get_env_variables(source, scan_name)
if not project: if not project:
self.logger.debug("Skipping scan for source '{source}' and scan '{scan_name}': vulnerabilities have already been reported.".format(source=source, scan_name=scan_name)) self.logger.debug(
"Skipping scan for source '{source}' and scan '{scan_name}': vulnerabilities have already been reported.".format(
source=source, scan_name=scan_name))
return False return False
vulnerabilities = [] vulnerabilities = []
#***Nessus parsing*** # ***Nessus parsing***
if source == "nessus": if source == "nessus":
vulnerabilities = self.parse_nessus_vulnerabilities(fullpath, source, scan_name, min_critical) vulnerabilities = self.parse_nessus_vulnerabilities(fullpath, source, scan_name, min_critical)
#***Qualys VM parsing*** # ***Qualys VM parsing***
if source == "qualys_vuln": if source == "qualys_vuln":
vulnerabilities = self.parse_qualys_vuln_vulnerabilities(fullpath, source, scan_name, min_critical, dns_resolv) vulnerabilities = self.parse_qualys_vuln_vulnerabilities(fullpath, source, scan_name, min_critical,
dns_resolv)
#***JIRA sync*** # ***JIRA sync***
if vulnerabilities: if vulnerabilities:
self.logger.info('{source} data has been successfuly parsed'.format(source=source.upper())) self.logger.info('{source} data has been successfuly parsed'.format(source=source.upper()))
self.logger.info('Starting JIRA sync') self.logger.info('Starting JIRA sync')
self.jira.sync(vulnerabilities, project, components) self.jira.sync(vulnerabilities, project, components)
else: else:
self.logger.info("[{source}.{scan_name}] No vulnerabilities or vulnerabilities not parsed.".format(source=source, scan_name=scan_name)) self.logger.info(
"[{source}.{scan_name}] No vulnerabilities or vulnerabilities not parsed.".format(source=source,
scan_name=scan_name))
self.set_latest_scan_reported(fullpath.split("/")[-1]) self.set_latest_scan_reported(fullpath.split("/")[-1])
return False return False
#writing to file those assets without DNS resolution # writing to file those assets without DNS resolution
#if its not empty # if its not empty
if self.host_no_resolv: if self.host_no_resolv:
#we will replace old list of non resolved for the new one or create if it doesn't exist already # we will replace old list of non resolved for the new one or create if it doesn't exist already
self.no_resolv_by_team_dict[scan_name] = self.host_no_resolv self.no_resolv_by_team_dict[scan_name] = self.host_no_resolv
with open(self.no_resolv_fname, 'w') as outfile: with open(self.no_resolv_fname, 'w') as outfile:
json.dump(self.no_resolv_by_team_dict, outfile) json.dump(self.no_resolv_by_team_dict, outfile)
self.set_latest_scan_reported(fullpath.split("/")[-1]) self.set_latest_scan_reported(fullpath.split("/")[-1])
return True return True
@ -1279,10 +1298,13 @@ class vulnWhispererJIRA(vulnWhispererBase):
try: try:
self.jira_sync(self.config.get(scan, 'source'), self.config.get(scan, 'scan_name')) self.jira_sync(self.config.get(scan, 'source'), self.config.get(scan, 'scan_name'))
except Exception as e: except Exception as e:
self.logger.error("VulnWhisperer wasn't able to report the vulnerabilities from the '{}'s source".format(self.config.get(scan, 'source'))) self.logger.error(
"VulnWhisperer wasn't able to report the vulnerabilities from the '{}'s source".format(
self.config.get(scan, 'source')))
return True return True
return False return False
class vulnWhisperer(object): class vulnWhisperer(object):
def __init__(self, def __init__(self,
@ -1306,7 +1328,6 @@ class vulnWhisperer(object):
self.scanname = scanname self.scanname = scanname
self.exit_code = 0 self.exit_code = 0
def whisper_vulnerabilities(self): def whisper_vulnerabilities(self):
if self.profile == 'nessus': if self.profile == 'nessus':
@ -1337,7 +1358,7 @@ class vulnWhisperer(object):
self.exit_code += vw.process_vuln_scans() self.exit_code += vw.process_vuln_scans()
elif self.profile == 'jira': elif self.profile == 'jira':
#first we check config fields are created, otherwise we create them # first we check config fields are created, otherwise we create them
vw = vulnWhispererJIRA(config=self.config) vw = vulnWhispererJIRA(config=self.config)
if vw: if vw:
if not (self.source and self.scanname): if not (self.source and self.scanname):