Add @timestamps to findings, map more fields

This commit is contained in:
pemontto
2019-04-12 01:49:15 +10:00
parent bd1430ebbf
commit 5b45da69a8
3 changed files with 47 additions and 20 deletions

View File

@ -35,6 +35,7 @@ class NessusAPI(object):
'host': 'asset', 'host': 'asset',
'name': 'plugin_name', 'name': 'plugin_name',
'os': 'operating_system', 'os': 'operating_system',
'see also': 'exploitability',
'system type': 'category', 'system type': 'category',
'vulnerability state': 'state' 'vulnerability state': 'state'
} }

View File

@ -83,6 +83,7 @@ class qualysVulnScan:
'cve_id': 'cve', 'cve_id': 'cve',
'cvss_base': 'cvss', 'cvss_base': 'cvss',
'cvss3_base': 'cvss3', 'cvss3_base': 'cvss3',
'impact': 'synopsis',
'ip_status': 'state', 'ip_status': 'state',
'os': 'operating_system', 'os': 'operating_system',
'qid': 'plugin_id', 'qid': 'plugin_id',

View File

@ -326,13 +326,13 @@ class vulnWhispererNessus(vulnWhispererBase):
record['uuid'] = h.get('uuid', '') record['uuid'] = h.get('uuid', '')
record['status'] = h.get('status', '') record['status'] = h.get('status', '')
record['history_id'] = h.get('history_id', '') record['history_id'] = h.get('history_id', '')
record['last_modification_date'] = \ record["last_modification_date"] = h.get(
h.get('last_modification_date', '') "last_modification_date", ""
record['norm_time'] = \ )
self.nessus.get_utc_from_local(int(record['last_modification_date' record["norm_time"] = self.nessus.get_utc_from_local(
]), int(record["last_modification_date"]),
local_tz=self.nessus.tz_conv(record['timezone' local_tz=self.nessus.tz_conv(record["timezone"]),
])) )
scan_records.append(record.copy()) scan_records.append(record.copy())
except Exception as e: except Exception as e:
# Generates error each time nonetype is encountered. # Generates error each time nonetype is encountered.
@ -350,14 +350,20 @@ class vulnWhispererNessus(vulnWhispererBase):
scans = scan_data['scans'] if scan_data['scans'] else [] scans = scan_data['scans'] if scan_data['scans'] else []
all_scans = self.scan_count(scans) all_scans = self.scan_count(scans)
if self.uuids: if self.uuids:
scan_list = [scan for scan in all_scans if scan['uuid'] scan_list = [
not in self.uuids and scan['status'] in ['completed', 'imported']] scan
for scan in all_scans
if scan["uuid"] not in self.uuids
and scan["status"] in ["completed", "imported"]
]
else: else:
scan_list = all_scans scan_list = all_scans
self.logger.info('Identified {new} scans to be processed'.format(new=len(scan_list))) self.logger.info(
"Identified {new} scans to be processed".format(new=len(scan_list))
)
if not scan_list: if not scan_list:
self.logger.warn('No new scans to process. Exiting...') self.logger.warn("No new scans to process. Exiting...")
return self.exit_code return self.exit_code
# Create scan subfolders # Create scan subfolders
@ -445,9 +451,13 @@ class vulnWhispererNessus(vulnWhispererBase):
# Map and transform fields # Map and transform fields
clean_csv = self.nessus.normalise(clean_csv) clean_csv = self.nessus.normalise(clean_csv)
# Set common fields
clean_csv['scan_name'] = scan_name.encode('utf8') clean_csv['scan_name'] = scan_name.encode('utf8')
clean_csv['scan_id'] = uuid clean_csv['scan_id'] = uuid
# Add @timestamp and convert to milliseconds
clean_csv['@timestamp'] = int(norm_time) * 1000
clean_csv.to_json(relative_path_name, orient='records', lines=True) clean_csv.to_json(relative_path_name, orient='records', lines=True)
record_meta = ( record_meta = (
@ -610,14 +620,17 @@ class vulnWhispererQualys(vulnWhispererBase):
self.logger.info('New Report ID: {}'.format(generated_report_id)) self.logger.info('New Report ID: {}'.format(generated_report_id))
vuln_ready = self.qualys_scan.process_data(path=self.write_path, file_id=str(generated_report_id)) vuln_ready = self.qualys_scan.process_data(path=self.write_path, file_id=str(generated_report_id))
vuln_ready['scan_name'] = scan_name.encode('utf8')
vuln_ready['scan_id'] = report_id
# Map and transform fields # Map and transform fields
vuln_ready = self.qualys_scan.normalise(vuln_ready) vuln_ready = self.qualys_scan.normalise(vuln_ready)
# TODO remove the line below once normalising complete # TODO remove the line below once normalising complete
vuln_ready.rename(columns=self.COLUMN_MAPPING, inplace=True) vuln_ready.rename(columns=self.COLUMN_MAPPING, inplace=True)
# Set common fields
vuln_ready['scan_name'] = scan_name.encode('utf8')
vuln_ready['scan_id'] = report_id
# Add @timestamp and convert to milliseconds
vuln_ready['@timestamp'] = int(launched_date) * 1000
record_meta = ( record_meta = (
scan_name, scan_name,
scan_reference, scan_reference,
@ -778,13 +791,19 @@ class vulnWhispererOpenVAS(vulnWhispererBase):
else: else:
vuln_ready = self.openvas_api.process_report(report_id=report_id) vuln_ready = self.openvas_api.process_report(report_id=report_id)
vuln_ready['scan_name'] = scan_name.encode('utf8') # Map and transform fields
vuln_ready['scan_id'] = report_id vuln_ready = self.openvas_api.normalise(vuln_ready)
# TODO move the following to the openvas_api.transform_values
vuln_ready.rename(columns=self.COLUMN_MAPPING, inplace=True) vuln_ready.rename(columns=self.COLUMN_MAPPING, inplace=True)
vuln_ready.port = vuln_ready.port.fillna(0).astype(int) vuln_ready.port = vuln_ready.port.fillna(0).astype(int)
vuln_ready.fillna('', inplace=True) vuln_ready.fillna('', inplace=True)
# Map and transform fields
vuln_ready = self.openvas_api.normalise(vuln_ready) # Set common fields
vuln_ready['scan_name'] = scan_name.encode('utf8')
vuln_ready['scan_id'] = report_id
# Add @timestamp and convert to milliseconds
vuln_ready['@timestamp'] = int(launched_date) * 1000
vuln_ready.to_json(relative_path_name, orient='records', lines=True) vuln_ready.to_json(relative_path_name, orient='records', lines=True)
self.logger.info('Report written to {}'.format(report_name)) self.logger.info('Report written to {}'.format(report_name))
@ -876,10 +895,16 @@ class vulnWhispererQualysVuln(vulnWhispererBase):
try: try:
self.logger.info('Processing report ID: {}'.format(report_id)) self.logger.info('Processing report ID: {}'.format(report_id))
vuln_ready = self.qualys_scan.process_data(scan_id=report_id) vuln_ready = self.qualys_scan.process_data(scan_id=report_id)
vuln_ready['scan_name'] = scan_name.encode('utf8')
vuln_ready['scan_id'] = report_id
# Map and transform fields # Map and transform fields
vuln_ready = self.qualys_scan.normalise(vuln_ready) vuln_ready = self.qualys_scan.normalise(vuln_ready)
# Set common fields
vuln_ready['scan_name'] = scan_name.encode('utf8')
vuln_ready['scan_id'] = report_id
# Add @timestamp and convert to milliseconds
vuln_ready['@timestamp'] = int(launched_date) * 1000
except Exception as e: except Exception as e:
self.logger.error('Could not process {}: {}'.format(report_id, str(e))) self.logger.error('Could not process {}: {}'.format(report_id, str(e)))
self.exit_code += 1 self.exit_code += 1