unified logstash config

This commit is contained in:
pemontto
2019-05-07 17:20:14 +01:00
parent 836515f6d1
commit 488bd09dad
4 changed files with 38 additions and 255 deletions

View File

@ -1,49 +1,59 @@
# Author: Austin Taylor and Justin Henderson
# Email: austin@hasecuritysolutions.com
# Last Update: 12/30/2017
# Version 0.3
# Description: Take in qualys web scan reports from vulnWhisperer and pumps into logstash
input {
file {
path => [ "/opt/VulnWhisperer/data/qualys_vm/*.json" ]
codec => json
start_position => "beginning"
tags => [ "qualys_vm" ]
mode => "read"
path => ["/opt/VulnWhisperer/data/nessus/**/*.json", "/opt/VulnWhisperer/data/openvas/*.json", "/opt/VulnWhisperer/data/qualys_vm/*.json", "/opt/VulnWhisperer/data/qualys_was/*.json", "/opt/VulnWhisperer/data/tenable/*.json"]
start_position => "beginning"
file_completed_action => "delete"
}
file {
path => [ "/opt/VulnWhisperer/data/qualys_was/*.json" ]
codec => json
start_position => "beginning"
tags => [ "qualys_was" ]
mode => "read"
path => "/opt/VulnWhisperer/data/jira/*.json"
tags => [ "jira" ]
start_position => "beginning"
file_completed_action => "delete"
}
}
filter {
if "qualys_vm" in [tags] or "qualys_was" in [tags] {
if [scan_source] in ["nessus", "tenable", "qualys_vm", "qualys_was", "openvas"] {
# Parse the date/time from scan_time
date {
match => [ "scan_time", "UNIX" ]
target => "@timestamp"
remove_field => ["scan_time"]
}
# Add scan_source to tags
mutate {
add_field => { "[tags]" => "%{scan_source}" }
}
# Create a unique document_id if _unique field exists
if [_unique] {
# Set document ID from _unique
mutate {
rename => { "_unique" => "[@metadata][id]" }
}
}
# Do we need this?
mutate {
convert => { "cvss" => "float"}
convert => { "cvss_base" => "float"}
convert => { "cvss_temporal" => "float"}
convert => { "cvss2" => "float"}
convert => { "cvss2_base" => "float"}
convert => { "cvss2_temporal" => "float"}
convert => { "cvss3" => "float"}
convert => { "cvss3_base" => "float"}
convert => { "cvss3_temporal" => "float"}
convert => { "risk_number" => "integer"}
convert => { "total_times_detected" => "integer"}
}
}
if [scan_source] == "qualys_was" {
if [first_time_detected] {
date {
match => [ "first_time_detected", "dd MMM yyyy HH:mma 'GMT'ZZ", "dd MMM yyyy HH:mma 'GMT'" ]
@ -68,32 +78,32 @@ filter {
target => "last_time_tested"
}
}
# if [asset] =~ "\.yourdomain\.(com|net)$" {
# mutate {
# add_tag => [ "critical_asset" ]
# }
# }
if [_unique] {
# Set document ID from _unique
mutate {
rename => { "_unique" => "[@metadata][id]" }
}
}
}
}
output {
if "qualys_vm" in [tags] or "qualys_was" in [tags] {
if [scan_source] in ["nessus", "tenable", "qualys_vm", "qualys_was", "openvas"] {
if [@metadata][id] {
elasticsearch {
hosts => [ "elasticsearch:9200" ]
index => "logstash-vulnwhisperer-%{+YYYY.MM}"
document_id => "%{[@metadata][id]}"
manage_template => false
}
} else {
elasticsearch {
hosts => [ "elasticsearch:9200" ]
index => "logstash-vulnwhisperer-%{+YYYY.MM}"
manage_template => false
}
}
}
}
# Should these go to the same index?
if "jira" in [tags] {
stdout { codec => rubydebug }
elasticsearch {
hosts => [ "elasticsearch:9200" ]
index => "logstash-vulnwhisperer-%{+YYYY.MM}"
}
}
}

View File

@ -1,71 +0,0 @@
# Author: Austin Taylor and Justin Henderson
# Email: email@austintaylor.io
# Last Update: 12/20/2017
# Version 0.3
# Description: Take in nessus reports from vulnWhisperer and pumps into logstash
input {
file {
path => "/opt/VulnWhisperer/data/nessus/**/*.json"
mode => "read"
start_position => "beginning"
file_completed_action => "delete"
tags => "nessus"
codec => json
}
file {
path => "/opt/VulnWhisperer/data/tenable/*.json"
mode => "read"
start_position => "beginning"
file_completed_action => "delete"
tags => "tenable"
codec => json
}
}
filter {
if "nessus" in [tags] or "tenable" in [tags] {
date {
match => [ "scan_time", "UNIX" ]
target => "@timestamp"
remove_field => ["scan_time"]
}
mutate {
convert => { "cvss" => "float"}
convert => { "cvss_base" => "float"}
convert => { "cvss_temporal" => "float"}
convert => { "cvss3" => "float"}
convert => { "cvss3_base" => "float"}
convert => { "cvss3_temporal" => "float"}
convert => { "risk_number" => "integer"}
convert => { "total_times_detected" => "integer"}
}
if [_unique] {
# Set document ID from _unique
mutate {
rename => { "_unique" => "[@metadata][id]" }
}
}
}
}
output {
if "nessus" in [tags] or "tenable" in [tags]{
if [@metadata][id] {
elasticsearch {
hosts => [ "elasticsearch:9200" ]
index => "logstash-vulnwhisperer-%{+YYYY.MM}"
document_id => "%{[@metadata][id]}"
}
} else {
elasticsearch {
hosts => [ "elasticsearch:9200" ]
index => "logstash-vulnwhisperer-%{+YYYY.MM}"
}
}
}
}

View File

@ -1,131 +0,0 @@
# Author: Austin Taylor and Justin Henderson
# Email: austin@hasecuritysolutions.com
# Last Update: 03/04/2018
# Version 0.3
# Description: Take in qualys web scan reports from vulnWhisperer and pumps into logstash
input {
file {
path => "/opt/VulnWhisperer/data/openvas/*.json"
codec => json
start_position => "beginning"
tags => [ "openvas_scan", "openvas" ]
mode => "read"
start_position => "beginning"
file_completed_action => "delete"
}
}
filter {
if "openvas_scan" in [tags] {
date {
match => [ "scan_time", "UNIX" ]
target => "@timestamp"
remove_field => ["scan_time"]
}
# TODO - move this mapping into the vulnwhisperer module
translate {
field => "[risk_number]"
destination => "[risk]"
dictionary => {
"0" => "Info"
"1" => "Low"
"2" => "Medium"
"3" => "High"
"4" => "Critical"
}
}
if [risk] == "1" {
mutate { add_field => { "risk_number" => 0 }}
mutate { replace => { "risk" => "info" }}
}
if [risk] == "2" {
mutate { add_field => { "risk_number" => 1 }}
mutate { replace => { "risk" => "low" }}
}
if [risk] == "3" {
mutate { add_field => { "risk_number" => 2 }}
mutate { replace => { "risk" => "medium" }}
}
if [risk] == "4" {
mutate { add_field => { "risk_number" => 3 }}
mutate { replace => { "risk" => "high" }}
}
if [risk] == "5" {
mutate { add_field => { "risk_number" => 4 }}
mutate { replace => { "risk" => "critical" }}
}
mutate {
remove_field => "message"
}
if [first_time_detected] {
date {
match => [ "first_time_detected", "dd MMM yyyy HH:mma 'GMT'ZZ", "dd MMM yyyy HH:mma 'GMT'" ]
target => "first_time_detected"
}
}
if [first_time_tested] {
date {
match => [ "first_time_tested", "dd MMM yyyy HH:mma 'GMT'ZZ", "dd MMM yyyy HH:mma 'GMT'" ]
target => "first_time_tested"
}
}
if [last_time_detected] {
date {
match => [ "last_time_detected", "dd MMM yyyy HH:mma 'GMT'ZZ", "dd MMM yyyy HH:mma 'GMT'" ]
target => "last_time_detected"
}
}
if [last_time_tested] {
date {
match => [ "last_time_tested", "dd MMM yyyy HH:mma 'GMT'ZZ", "dd MMM yyyy HH:mma 'GMT'" ]
target => "last_time_tested"
}
}
mutate {
convert => { "cvss" => "float"}
convert => { "cvss_base" => "float"}
convert => { "cvss_temporal" => "float"}
convert => { "cvss3" => "float"}
convert => { "cvss3_base" => "float"}
convert => { "cvss3_temporal" => "float"}
convert => { "risk_number" => "integer"}
convert => { "total_times_detected" => "integer"}
}
# Add your critical assets by subnet or by hostname. Comment this field out if you don't want to tag any, but the asset panel will break.
# if [asset] =~ "^10\.0\.100\." {
# mutate {
# add_tag => [ "critical_asset" ]
# }
# }
if [_unique] {
# Set document ID from _unique
mutate {
rename => { "_unique" => "[@metadata][id]" }
}
}
}
}
output {
if "openvas" in [tags] {
if [@metadata][id] {
elasticsearch {
hosts => [ "elasticsearch:9200" ]
index => "logstash-vulnwhisperer-%{+YYYY.MM}"
document_id => "%{[@metadata][id]}"
}
} else {
elasticsearch {
hosts => [ "elasticsearch:9200" ]
index => "logstash-vulnwhisperer-%{+YYYY.MM}"
}
}
}
}

View File

@ -1,25 +0,0 @@
# Description: Take in jira tickets from vulnWhisperer and pumps into logstash
input {
file {
path => "/opt/VulnWhisperer/data/jira/*.json"
type => json
codec => json
start_position => "beginning"
mode => "read"
start_position => "beginning"
file_completed_action => "delete"
tags => [ "jira" ]
}
}
output {
if "jira" in [tags] {
stdout { codec => rubydebug }
elasticsearch {
hosts => [ "elasticsearch:9200" ]
index => "logstash-vulnwhisperer-%{+YYYY.MM}"
}
}
}