From 97d2a2606c77df5688273edec207954c7b0330a2 Mon Sep 17 00:00:00 2001 From: pemontto Date: Mon, 15 Apr 2019 20:10:49 +1000 Subject: [PATCH] Cleanup logstash configs --- resources/elk6/init_kibana.sh | 2 +- .../pipeline/1000_nessus_process_file.conf | 106 ++++---------- .../elk6/pipeline/2000_qualys_web_scans.conf | 134 +++++++----------- resources/elk6/pipeline/3000_openvas.conf | 64 +++++---- vulnwhisp/vulnwhisp.py | 20 +-- 5 files changed, 138 insertions(+), 188 deletions(-) diff --git a/resources/elk6/init_kibana.sh b/resources/elk6/init_kibana.sh index 797fa93..9d2cbc3 100755 --- a/resources/elk6/init_kibana.sh +++ b/resources/elk6/init_kibana.sh @@ -10,7 +10,7 @@ saved_objects_file="kibana_APIonly.json" #if [ `curl -I localhost:5601/status | head -n1 |cut -d$' ' -f2` -eq '200' ]; then echo "Loading VulnWhisperer Saved Objects"; eval $(echo $add_saved_objects$saved_objects_file); else echo "waiting for kibana"; fi -until curl -s "$elasticsearch_url/_cluster/health?pretty" | grep '"status"' | grep -q green; do +until curl -s "$elasticsearch_url/_cluster/health?pretty" | grep '"status"' | grep -qE "green|yellow"; do curl -s "$elasticsearch_url/_cluster/health?pretty" echo "Waiting for Elasticsearch" sleep 5 diff --git a/resources/elk6/pipeline/1000_nessus_process_file.conf b/resources/elk6/pipeline/1000_nessus_process_file.conf index 122f1f0..1462ee3 100644 --- a/resources/elk6/pipeline/1000_nessus_process_file.conf +++ b/resources/elk6/pipeline/1000_nessus_process_file.conf @@ -27,95 +27,49 @@ input { filter { if "nessus" in [tags] or "tenable" in [tags] { + date { + match => [ "_timestamp", "UNIX" ] + target => "@timestamp" + remove_field => ["timestamp"] + } + #If using filebeats as your source, you will need to replace the "path" field to "source" # Remove when scan name is included in event (current method is error prone) grok { - match => { "path" => "(?[a-zA-Z0-9_.\-]+)_%{INT:scan_id}_%{INT:history_id}_%{INT:last_updated}.(csv|json)$" } + match => { "path" => "([a-zA-Z0-9_.\-]+)_%{INT}_%{INT:history_id}_%{INT}.json$" } tag_on_failure => [] } - # TODO remove when @timestamp is included in event - date { - match => [ "last_updated", "UNIX" ] - target => "@timestamp" - remove_field => ["last_updated"] - } - - if [risk] == "None" { - mutate { add_field => { "risk_number" => 0 }} - } - if [risk] == "Low" { - mutate { add_field => { "risk_number" => 1 }} - } - if [risk] == "Medium" { - mutate { add_field => { "risk_number" => 2 }} - } - if [risk] == "High" { - mutate { add_field => { "risk_number" => 3 }} - } - if [risk] == "Critical" { - mutate { add_field => { "risk_number" => 4 }} - } - - if ![cve] or [cve] == "nan" { - mutate { remove_field => [ "cve" ] } - } - if ![cvss] or [cvss] == "nan" { - mutate { remove_field => [ "cvss" ] } - } - if ![cvss_base] or [cvss_base] == "nan" { - mutate { remove_field => [ "cvss_base" ] } - } - if ![cvss_temporal] or [cvss_temporal] == "nan" { - mutate { remove_field => [ "cvss_temporal" ] } - } - if ![cvss_temporal_vector] or [cvss_temporal_vector] == "nan" { - mutate { remove_field => [ "cvss_temporal_vector" ] } - } - if ![cvss_vector] or [cvss_vector] == "nan" { - mutate { remove_field => [ "cvss_vector" ] } - } - if ![cvss3_base] or [cvss3_base] == "nan" { - mutate { remove_field => [ "cvss3_base" ] } - } - if ![cvss3_temporal] or [cvss3_temporal] == "nan" { - mutate { remove_field => [ "cvss3_temporal" ] } - } - if ![cvss3_temporal_vector] or [cvss3_temporal_vector] == "nan" { - mutate { remove_field => [ "cvss3_temporal_vector" ] } - } - if ![description] or [description] == "nan" { - mutate { remove_field => [ "description" ] } - } - if ![mac_address] or [mac_address] == "nan" { - mutate { remove_field => [ "mac_address" ] } - } - if ![netbios] or [netbios] == "nan" { - mutate { remove_field => [ "netbios" ] } - } - if ![operating_system] or [operating_system] == "nan" { - mutate { remove_field => [ "operating_system" ] } - } - if ![plugin_output] or [plugin_output] == "nan" { - mutate { remove_field => [ "plugin_output" ] } - } - if ![see_also] or [see_also] == "nan" { - mutate { remove_field => [ "see_also" ] } - } - if ![synopsis] or [synopsis] == "nan" { - mutate { remove_field => [ "synopsis" ] } - } - if ![system_type] or [system_type] == "nan" { - mutate { remove_field => [ "system_type" ] } + translate { + field => "[risk]" + destination => "[risk_number]" + dictionary => { + "None" => 0 + "Low" => 1 + "Medium" => 2 + "High" => 3 + "Critical" => 4 + } } mutate { - remove_field => [ "message" ] add_field => { "risk_score" => "%{cvss}" } } + mutate { - convert => { "risk_score" => "float" } + convert => { "cvss_base" => "float"} + convert => { "cvss_temporal" => "float"} + convert => { "cvss" => "float"} + convert => { "cvss3_base" => "float"} + convert => { "cvss3_temporal" => "float"} + convert => { "cvss3" => "float"} + convert => { "id" => "integer"} + convert => { "plugin_id" => "integer"} + convert => { "risk_number" => "integer"} + convert => { "risk_score" => "float"} + convert => { "total_times_detected" => "integer"} } + if [risk_score] == 0 { mutate { add_field => { "risk_score_name" => "info" } diff --git a/resources/elk6/pipeline/2000_qualys_web_scans.conf b/resources/elk6/pipeline/2000_qualys_web_scans.conf index 7c207c7..0ee2522 100644 --- a/resources/elk6/pipeline/2000_qualys_web_scans.conf +++ b/resources/elk6/pipeline/2000_qualys_web_scans.conf @@ -19,101 +19,53 @@ input { filter { if "qualys" in [tags] { + date { + match => [ "_timestamp", "UNIX" ] + target => "@timestamp" + remove_field => ["timestamp"] + } + grok { - match => { "path" => [ "(?qualys_vuln)_scan_%{DATA}_%{INT:last_updated}.json$", "(?qualys_web)_%{INT:app_id}_%{INT:last_updated}.json$" ] } + match => { "path" => [ "(?qualys_vuln)_scan_%{DATA}_%{INT}.json$", "(?qualys_web)_%{INT:app_id}_%{INT}.json$" ] } tag_on_failure => [] } - - mutate { - replace => [ "message", "%{message}" ] - #gsub => [ - # "message", "\|\|\|", " ", - # "message", "\t\t", " ", - # "message", " ", " ", - # "message", " ", " ", - # "message", " ", " ", - # "message", "nan", " ", - # "message",'\n','' - #] + + translate { + field => "[risk_number]" + destination => "[risk]" + dictionary => { + "0" => "Info" + "1" => "Low" + "2" => "Medium" + "3" => "High" + "4" => "Critical" + } } if "qualys_web" in [tags] { - mutate { - add_field => { "asset" => "%{web_application_name}" } - add_field => { "risk_score" => "%{cvss}" } - } - } else if "qualys_vuln" in [tags] { mutate { - add_field => { "asset" => "%{ip}" } - add_field => { "risk_score" => "%{cvss}" } + add_field => { "asset" => "%{web_application_name}" } } } - if [risk] == "1" { - mutate { add_field => { "risk_number" => 0 }} - mutate { replace => { "risk" => "info" }} - } - if [risk] == "2" { - mutate { add_field => { "risk_number" => 1 }} - mutate { replace => { "risk" => "low" }} - } - if [risk] == "3" { - mutate { add_field => { "risk_number" => 2 }} - mutate { replace => { "risk" => "medium" }} - } - if [risk] == "4" { - mutate { add_field => { "risk_number" => 3 }} - mutate { replace => { "risk" => "high" }} - } - if [risk] == "5" { - mutate { add_field => { "risk_number" => 4 }} - mutate { replace => { "risk" => "critical" }} - } - mutate { - remove_field => "message" + add_field => { "risk_score" => "%{cvss}" } } - if [first_time_detected] { - date { - match => [ "first_time_detected", "dd MMM yyyy HH:mma 'GMT'ZZ", "dd MMM yyyy HH:mma 'GMT'" ] - target => "first_time_detected" - } - } - if [first_time_tested] { - date { - match => [ "first_time_tested", "dd MMM yyyy HH:mma 'GMT'ZZ", "dd MMM yyyy HH:mma 'GMT'" ] - target => "first_time_tested" - } - } - if [last_time_detected] { - date { - match => [ "last_time_detected", "dd MMM yyyy HH:mma 'GMT'ZZ", "dd MMM yyyy HH:mma 'GMT'" ] - target => "last_time_detected" - } - } - if [last_time_tested] { - date { - match => [ "last_time_tested", "dd MMM yyyy HH:mma 'GMT'ZZ", "dd MMM yyyy HH:mma 'GMT'" ] - target => "last_time_tested" - } - } - - # TODO remove when @timestamp is included in event - date { - match => [ "last_updated", "UNIX" ] - target => "@timestamp" - remove_field => "last_updated" - } mutate { - convert => { "plugin_id" => "integer"} + convert => { "cvss_base" => "float"} + convert => { "cvss_temporal" => "float"} + convert => { "cvss" => "float"} + convert => { "cvss3_base" => "float"} + convert => { "cvss3_temporal" => "float"} + convert => { "cvss3" => "float"} convert => { "id" => "integer"} + convert => { "plugin_id" => "integer"} convert => { "risk_number" => "integer"} convert => { "risk_score" => "float"} convert => { "total_times_detected" => "integer"} - convert => { "cvss_temporal" => "float"} - convert => { "cvss" => "float"} } + if [risk_score] == 0 { mutate { add_field => { "risk_score_name" => "info" } @@ -140,11 +92,35 @@ filter { } } - if [asset] =~ "\.yourdomain\.(com|net)$" { - mutate { - add_tag => [ "critical_asset" ] + if [first_time_detected] { + date { + match => [ "first_time_detected", "dd MMM yyyy HH:mma 'GMT'ZZ", "dd MMM yyyy HH:mma 'GMT'" ] + target => "first_time_detected" } } + if [first_time_tested] { + date { + match => [ "first_time_tested", "dd MMM yyyy HH:mma 'GMT'ZZ", "dd MMM yyyy HH:mma 'GMT'" ] + target => "first_time_tested" + } + } + if [last_time_detected] { + date { + match => [ "last_time_detected", "dd MMM yyyy HH:mma 'GMT'ZZ", "dd MMM yyyy HH:mma 'GMT'" ] + target => "last_time_detected" + } + } + if [last_time_tested] { + date { + match => [ "last_time_tested", "dd MMM yyyy HH:mma 'GMT'ZZ", "dd MMM yyyy HH:mma 'GMT'" ] + target => "last_time_tested" + } + } + # if [asset] =~ "\.yourdomain\.(com|net)$" { + # mutate { + # add_tag => [ "critical_asset" ] + # } + # } } } output { diff --git a/resources/elk6/pipeline/3000_openvas.conf b/resources/elk6/pipeline/3000_openvas.conf index 4a96ca3..cb1a00c 100644 --- a/resources/elk6/pipeline/3000_openvas.conf +++ b/resources/elk6/pipeline/3000_openvas.conf @@ -20,27 +20,27 @@ input { filter { if "openvas_scan" in [tags] { - mutate { - replace => [ "message", "%{message}" ] - gsub => [ - "message", "\|\|\|", " ", - "message", "\t\t", " ", - "message", " ", " ", - "message", " ", " ", - "message", " ", " ", - "message", "nan", " ", - "message",'\n','' - ] + date { + match => [ "_timestamp", "UNIX" ] + target => "@timestamp" + remove_field => ["timestamp"] } - grok { - match => { "path" => "openvas_scan_%{DATA:scan_id}_%{INT:last_updated}.json$" } + match => { "path" => "openvas_scan_%{DATA}_%{INT}.json$" } tag_on_failure => [] } - mutate { - add_field => { "risk_score" => "%{cvss}" } + translate { + field => "[risk_number]" + destination => "[risk]" + dictionary => { + "0" => "Info" + "1" => "Low" + "2" => "Medium" + "3" => "High" + "4" => "Critical" + } } if [risk] == "1" { @@ -93,21 +93,24 @@ filter { } } - # TODO remove when @timestamp is included in event - date { - match => [ "last_updated", "UNIX" ] - target => "@timestamp" - remove_field => "last_updated" - } mutate { - convert => { "plugin_id" => "integer"} + add_field => { "risk_score" => "%{cvss}" } + } + + mutate { + convert => { "cvss_base" => "float"} + convert => { "cvss_temporal" => "float"} + convert => { "cvss" => "float"} + convert => { "cvss3_base" => "float"} + convert => { "cvss3_temporal" => "float"} + convert => { "cvss3" => "float"} convert => { "id" => "integer"} + convert => { "plugin_id" => "integer"} convert => { "risk_number" => "integer"} convert => { "risk_score" => "float"} convert => { "total_times_detected" => "integer"} - convert => { "cvss_temporal" => "float"} - convert => { "cvss" => "float"} } + if [risk_score] == 0 { mutate { add_field => { "risk_score_name" => "info" } @@ -139,6 +142,19 @@ filter { add_tag => [ "critical_asset" ] } } + mutate { + convert => { "plugin_id" => "integer"} + convert => { "id" => "integer"} + convert => { "risk_number" => "integer"} + convert => { "risk_score" => "float"} + convert => { "total_times_detected" => "integer"} + convert => { "cvss" => "float"} + convert => { "cvss_base" => "float"} + convert => { "cvss_temporal" => "float"} + convert => { "cvss3" => "float"} + convert => { "cvss3_base" => "float"} + convert => { "cvss3_temporal" => "float"} + } } } output { diff --git a/vulnwhisp/vulnwhisp.py b/vulnwhisp/vulnwhisp.py index 289eaf8..cd73320 100755 --- a/vulnwhisp/vulnwhisp.py +++ b/vulnwhisp/vulnwhisp.py @@ -455,8 +455,9 @@ class vulnWhispererNessus(vulnWhispererBase): clean_csv['scan_name'] = scan_name.encode('utf8') clean_csv['scan_id'] = uuid - # Add @timestamp and convert to milliseconds - clean_csv['@timestamp'] = int(norm_time) * 1000 + # Add timestamp and convert to milliseconds + clean_csv['_timestamp'] = norm_time + clean_csv['scan_source'] = self.CONFIG_SECTION clean_csv.to_json(relative_path_name, orient='records', lines=True) @@ -628,8 +629,9 @@ class vulnWhispererQualys(vulnWhispererBase): # Set common fields vuln_ready['scan_name'] = scan_name.encode('utf8') vuln_ready['scan_id'] = report_id - # Add @timestamp and convert to milliseconds - vuln_ready['@timestamp'] = int(launched_date) * 1000 + # Add timestamp and convert to milliseconds + vuln_ready['_timestamp'] = launched_date + vuln_ready['scan_source'] = self.CONFIG_SECTION record_meta = ( scan_name, @@ -801,8 +803,9 @@ class vulnWhispererOpenVAS(vulnWhispererBase): # Set common fields vuln_ready['scan_name'] = scan_name.encode('utf8') vuln_ready['scan_id'] = report_id - # Add @timestamp and convert to milliseconds - vuln_ready['@timestamp'] = int(launched_date) * 1000 + # Add _timestamp and convert to milliseconds + vuln_ready['_timestamp'] = launched_date + vuln_ready['scan_source'] = self.CONFIG_SECTION vuln_ready.to_json(relative_path_name, orient='records', lines=True) self.logger.info('Report written to {}'.format(report_name)) @@ -902,8 +905,9 @@ class vulnWhispererQualysVuln(vulnWhispererBase): vuln_ready['scan_name'] = scan_name.encode('utf8') vuln_ready['scan_id'] = report_id - # Add @timestamp and convert to milliseconds - vuln_ready['@timestamp'] = int(launched_date) * 1000 + # Add timestamp and convert to milliseconds + vuln_ready['_timestamp'] = launched_date + vuln_ready['scan_source'] = self.CONFIG_SECTION except Exception as e: self.logger.error('Could not process {}: {}'.format(report_id, str(e)))