From 5b6a51f02ceea685b43dbab57927862332894f2d Mon Sep 17 00:00:00 2001 From: pemontto Date: Wed, 1 May 2019 17:51:46 +0100 Subject: [PATCH] add unique document id --- .../pipeline/1000_nessus_process_file.conf | 21 ++++++++++++--- .../elk6/pipeline/2000_qualys_web_scans.conf | 24 +++++++++++++---- resources/elk6/pipeline/3000_openvas.conf | 24 +++++++++++++---- vulnwhisp/frameworks/openvas.py | 1 + vulnwhisp/frameworks/qualys_vuln.py | 2 ++ vulnwhisp/frameworks/qualys_web.py | 3 +++ vulnwhisp/vulnwhisp.py | 26 ++++++++++++++----- 7 files changed, 81 insertions(+), 20 deletions(-) diff --git a/resources/elk6/pipeline/1000_nessus_process_file.conf b/resources/elk6/pipeline/1000_nessus_process_file.conf index e344183..d575581 100644 --- a/resources/elk6/pipeline/1000_nessus_process_file.conf +++ b/resources/elk6/pipeline/1000_nessus_process_file.conf @@ -43,14 +43,29 @@ filter { convert => { "risk_number" => "integer"} convert => { "total_times_detected" => "integer"} } + + if [_unique] { + # Set document ID from _unique + mutate { + rename => { "_unique" => "[@metadata][id]" } + } + } } } output { if "nessus" in [tags] or "tenable" in [tags]{ - elasticsearch { - hosts => [ "elasticsearch:9200" ] - index => "logstash-vulnwhisperer-%{+YYYY.MM}" + if [@metadata][id] { + elasticsearch { + hosts => [ "elasticsearch:9200" ] + index => "logstash-vulnwhisperer-%{+YYYY.MM}" + document_id => "%{[@metadata][id]}" + } + } else { + elasticsearch { + hosts => [ "elasticsearch:9200" ] + index => "logstash-vulnwhisperer-%{+YYYY.MM}" + } } } } diff --git a/resources/elk6/pipeline/2000_qualys_web_scans.conf b/resources/elk6/pipeline/2000_qualys_web_scans.conf index d074b8a..e19f6f3 100644 --- a/resources/elk6/pipeline/2000_qualys_web_scans.conf +++ b/resources/elk6/pipeline/2000_qualys_web_scans.conf @@ -6,7 +6,7 @@ input { file { - path => [ "/opt/VulnWhisperer/data/qualys_vuln/*.json" ] + path => [ "/opt/VulnWhisperer/data/qualys_vuln/*.json" ] codec => json start_position => "beginning" tags => [ "qualys_vuln" ] @@ -15,7 +15,7 @@ input { file_completed_action => "delete" } file { - path => [ "/opt/VulnWhisperer/data/qualys_web/*.json" ] + path => [ "/opt/VulnWhisperer/data/qualys_web/*.json" ] codec => json start_position => "beginning" tags => [ "qualys_web" ] @@ -79,13 +79,27 @@ filter { # add_tag => [ "critical_asset" ] # } # } + if [_unique] { + # Set document ID from _unique + mutate { + rename => { "_unique" => "[@metadata][id]" } + } + } } } output { if "qualys_vuln" in [tags] or "qualys_web" in [tags] { - elasticsearch { - hosts => [ "elasticsearch:9200" ] - index => "logstash-vulnwhisperer-%{+YYYY.MM}" + if [@metadata][id] { + elasticsearch { + hosts => [ "elasticsearch:9200" ] + index => "logstash-vulnwhisperer-%{+YYYY.MM}" + document_id => "%{[@metadata][id]}" + } + } else { + elasticsearch { + hosts => [ "elasticsearch:9200" ] + index => "logstash-vulnwhisperer-%{+YYYY.MM}" + } } } } diff --git a/resources/elk6/pipeline/3000_openvas.conf b/resources/elk6/pipeline/3000_openvas.conf index 539475c..6cc35cc 100644 --- a/resources/elk6/pipeline/3000_openvas.conf +++ b/resources/elk6/pipeline/3000_openvas.conf @@ -100,18 +100,32 @@ filter { } # Add your critical assets by subnet or by hostname. Comment this field out if you don't want to tag any, but the asset panel will break. - if [asset] =~ "^10\.0\.100\." { + # if [asset] =~ "^10\.0\.100\." { + # mutate { + # add_tag => [ "critical_asset" ] + # } + # } + if [_unique] { + # Set document ID from _unique mutate { - add_tag => [ "critical_asset" ] + rename => { "_unique" => "[@metadata][id]" } } } } } output { if "openvas" in [tags] { - elasticsearch { - hosts => [ "elasticsearch:9200" ] - index => "logstash-vulnwhisperer-%{+YYYY.MM}" + if [@metadata][id] { + elasticsearch { + hosts => [ "elasticsearch:9200" ] + index => "logstash-vulnwhisperer-%{+YYYY.MM}" + document_id => "%{[@metadata][id]}" + } + } else { + elasticsearch { + hosts => [ "elasticsearch:9200" ] + index => "logstash-vulnwhisperer-%{+YYYY.MM}" + } } } } diff --git a/vulnwhisp/frameworks/openvas.py b/vulnwhisp/frameworks/openvas.py index 14f6393..c411f7d 100644 --- a/vulnwhisp/frameworks/openvas.py +++ b/vulnwhisp/frameworks/openvas.py @@ -110,6 +110,7 @@ class OpenVAS_API(object): ] token = requests.post(self.base + self.OMP, data=data, verify=False) return token + def get_report_formats(self): params = ( ('cmd', 'get_report_formats'), diff --git a/vulnwhisp/frameworks/qualys_vuln.py b/vulnwhisp/frameworks/qualys_vuln.py index 4592c7a..9b33986 100644 --- a/vulnwhisp/frameworks/qualys_vuln.py +++ b/vulnwhisp/frameworks/qualys_vuln.py @@ -169,6 +169,8 @@ class qualysVulnScan: df['cvss_temporal_vector'] = df['cvss_temporal'].str.extract('\((.*)\)', expand=False) df['cvss_temporal'] = df['cvss_temporal'].str.extract('^(\d+(?:\.\d+)?)', expand=False) + # Set asset to ip + df['asset'] = df['ip'] # Convert Qualys severity to standardised risk number df['risk_number'] = df['severity'].astype(int)-1 diff --git a/vulnwhisp/frameworks/qualys_web.py b/vulnwhisp/frameworks/qualys_web.py index 270d7b8..6905074 100644 --- a/vulnwhisp/frameworks/qualys_web.py +++ b/vulnwhisp/frameworks/qualys_web.py @@ -527,5 +527,8 @@ class qualysScanReport: df['dns'] = df['url'].str.extract('https?://([^/]+)', expand=False) df.loc[df['uri'] != '','dns'] = df.loc[df['uri'] != '','uri'].str.extract('https?://([^/]+)', expand=False) + # Set asset to dns + df['asset'] = df['dns'] + df.fillna('', inplace=True) return df diff --git a/vulnwhisp/vulnwhisp.py b/vulnwhisp/vulnwhisp.py index 78d750b..6ee8d79 100755 --- a/vulnwhisp/vulnwhisp.py +++ b/vulnwhisp/vulnwhisp.py @@ -275,7 +275,7 @@ class vulnWhispererBase(object): if cvss_version in df: self.logger.info('Normalising {} severity'.format(cvss_version)) # Map CVSS to severity name - df.loc[df[cvss_version] == '', cvss_version] = None + df.loc[df[cvss_version].astype(str) == '', cvss_version] = None df[cvss_version] = df[cvss_version].astype('float') # df.loc[df[cvss_version].isnull(), cvss_version + '_severity'] = 'info' df.loc[df[cvss_version] == 0, cvss_version + '_severity'] = 'info' @@ -284,6 +284,13 @@ class vulnWhispererBase(object): df.loc[(df[cvss_version] >= 6) & (df[cvss_version] < 9), cvss_version + '_severity'] = 'high' df.loc[(df[cvss_version] > 9) & (df[cvss_version].notnull()), cvss_version + '_severity'] = 'critical' + self.logger.info('Creating Unique Document ID') + df['_unique'] = df.index.values + if 'history_id' in df: + df['_unique'] = df[['scan_id', 'history_id', '_unique']].apply(lambda x: '_'.join(x.astype(str)), axis=1) + else: + df['_unique'] = df[['scan_id', '_unique']].apply(lambda x: '_'.join(x.astype(str)), axis=1) + # Rename cvss to cvss2 # Make cvss with no suffix == cvss3 else cvss2 # cvss = cvss3 if cvss3 else cvss2 @@ -510,7 +517,6 @@ class vulnWhispererNessus(vulnWhispererBase): # Map and transform fields vuln_ready = self.nessus.normalise(vuln_ready) - vuln_ready = self.common_normalise(vuln_ready) # Set common fields vuln_ready['history_id'] = history_id @@ -519,6 +525,8 @@ class vulnWhispererNessus(vulnWhispererBase): vuln_ready['scan_source'] = self.CONFIG_SECTION vuln_ready['scan_time'] = norm_time + vuln_ready = self.common_normalise(vuln_ready) + vuln_ready.to_json(relative_path_name + '.tmp', orient='records', lines=True) os.rename(relative_path_name + '.tmp', relative_path_name) self.logger.info('{records} records written to {path} '.format(records=vuln_ready.shape[0], @@ -621,7 +629,6 @@ class vulnWhispererQualys(vulnWhispererBase): vuln_ready = self.qualys_scan.process_data(path=self.write_path, file_id=str(generated_report_id)) # Map and transform fields vuln_ready = self.qualys_scan.normalise(vuln_ready) - vuln_ready = self.common_normalise(vuln_ready) # Set common fields vuln_ready['app_id'] = report_id @@ -630,6 +637,8 @@ class vulnWhispererQualys(vulnWhispererBase): vuln_ready['scan_source'] = self.CONFIG_SECTION vuln_ready['scan_time'] = launched_date + vuln_ready = self.common_normalise(vuln_ready) + if output_format == 'json': vuln_ready.to_json(relative_path_name + '.tmp', orient='records', lines=True) elif output_format == 'csv': @@ -699,6 +708,7 @@ class vulnWhispererOpenVAS(vulnWhispererBase): 'Hostname': 'hostname', 'Port': 'port', 'Port Protocol': 'protocol', + 'CVEs': 'cve', 'CVSS': 'cvss', 'Severity': 'severity', 'Solution Type': 'category', @@ -782,11 +792,10 @@ class vulnWhispererOpenVAS(vulnWhispererBase): vuln_ready = self.openvas_api.process_report(report_id=report_id) # Map and transform fields vuln_ready = self.openvas_api.normalise(vuln_ready) - vuln_ready = self.common_normalise(vuln_ready) + # TODO move the following to the openvas_api.transform_values vuln_ready.rename(columns=self.COLUMN_MAPPING, inplace=True) - vuln_ready.port = vuln_ready.port.fillna(0).astype(int) - vuln_ready.fillna('', inplace=True) + vuln_ready.port = vuln_ready.port.replace('', 0).astype(int) # Set common fields vuln_ready['scan_name'] = scan_name.encode('utf8') @@ -794,6 +803,8 @@ class vulnWhispererOpenVAS(vulnWhispererBase): vuln_ready['scan_time'] = launched_date vuln_ready['scan_source'] = self.CONFIG_SECTION + vuln_ready = self.common_normalise(vuln_ready) + vuln_ready.to_json(relative_path_name + '.tmp', orient='records', lines=True) os.rename(relative_path_name + '.tmp', relative_path_name) self.logger.info('{records} records written to {path} '.format(records=vuln_ready.shape[0], @@ -904,7 +915,6 @@ class vulnWhispererQualysVuln(vulnWhispererBase): vuln_ready = self.qualys_scan.process_data(scan_id=report_id) # Map and transform fields vuln_ready = self.qualys_scan.normalise(vuln_ready) - vuln_ready = self.common_normalise(vuln_ready) # Set common fields vuln_ready['scan_name'] = scan_name.encode('utf8') @@ -912,6 +922,8 @@ class vulnWhispererQualysVuln(vulnWhispererBase): vuln_ready['scan_time'] = launched_date vuln_ready['scan_source'] = self.CONFIG_SECTION + vuln_ready = self.common_normalise(vuln_ready) + except Exception as e: self.logger.error('Could not process {}: {}'.format(report_id, str(e))) self.exit_code += 1