* Created the version 6 for ELK. Fixed #135 * Needed to make sure all the data volumes were set up properly. Some paths had VulnWhisperer, vulnwhisperer, vulnwhisp/data. * Delete 9998_output_broker_rabbitmq.conf * Delete 9998_input_broker_rabbitmq.conf * Delete 0001_input_beats.conf * add to gitignore creds files + correct elk5 docker-compose * elk changed to 6.6.0 from 6.5.2, output path from logstash to elasticsearch host
This commit is contained in:
116
elk6/filebeat.yml
Normal file
116
elk6/filebeat.yml
Normal file
@ -0,0 +1,116 @@
|
||||
###################### Filebeat Configuration Example #########################
|
||||
|
||||
# This file is an example configuration file highlighting only the most common
|
||||
# options. The filebeat.full.yml file from the same directory contains all the
|
||||
# supported options with more comments. You can use it as a reference.
|
||||
#
|
||||
# You can find the full configuration reference here:
|
||||
# https://www.elastic.co/guide/en/beats/filebeat/index.html
|
||||
|
||||
#=========================== Filebeat prospectors =============================
|
||||
|
||||
filebeat.prospectors:
|
||||
|
||||
# Each - is a prospector. Most options can be set at the prospector level, so
|
||||
# you can use different prospectors for various configurations.
|
||||
# Below are the prospector specific configurations.
|
||||
|
||||
- input_type: log
|
||||
# Paths that should be crawled and fetched. Glob based paths.
|
||||
paths:
|
||||
# Linux Example
|
||||
#- /var/log/*.log
|
||||
|
||||
#Windows Example
|
||||
- c:\nessus\My Scans\*
|
||||
|
||||
# Exclude lines. A list of regular expressions to match. It drops the lines that are
|
||||
# matching any regular expression from the list.
|
||||
#exclude_lines: ["^DBG"]
|
||||
|
||||
# Include lines. A list of regular expressions to match. It exports the lines that are
|
||||
# matching any regular expression from the list.
|
||||
#include_lines: ["^ERR", "^WARN"]
|
||||
|
||||
# Exclude files. A list of regular expressions to match. Filebeat drops the files that
|
||||
# are matching any regular expression from the list. By default, no files are dropped.
|
||||
#exclude_files: [".gz$"]
|
||||
|
||||
# Optional additional fields. These field can be freely picked
|
||||
# to add additional information to the crawled log files for filtering
|
||||
#fields:
|
||||
# level: debug
|
||||
# review: 1
|
||||
|
||||
### Multiline options
|
||||
|
||||
# Mutiline can be used for log messages spanning multiple lines. This is common
|
||||
# for Java Stack Traces or C-Line Continuation
|
||||
|
||||
# The regexp Pattern that has to be matched. The example pattern matches all lines starting with [
|
||||
#multiline.pattern: ^\[
|
||||
|
||||
# Defines if the pattern set under pattern should be negated or not. Default is false.
|
||||
#multiline.negate: false
|
||||
|
||||
# Match can be set to "after" or "before". It is used to define if lines should be append to a pattern
|
||||
# that was (not) matched before or after or as long as a pattern is not matched based on negate.
|
||||
# Note: After is the equivalent to previous and before is the equivalent to to next in Logstash
|
||||
#multiline.match: after
|
||||
|
||||
|
||||
#================================ General =====================================
|
||||
|
||||
# The name of the shipper that publishes the network data. It can be used to group
|
||||
# all the transactions sent by a single shipper in the web interface.
|
||||
#name:
|
||||
|
||||
# The tags of the shipper are included in their own field with each
|
||||
# transaction published.
|
||||
#tags: ["service-X", "web-tier"]
|
||||
|
||||
# Optional fields that you can specify to add additional information to the
|
||||
# output.
|
||||
#fields:
|
||||
# env: staging
|
||||
|
||||
#================================ Outputs =====================================
|
||||
|
||||
# Configure what outputs to use when sending the data collected by the beat.
|
||||
# Multiple outputs may be used.
|
||||
|
||||
#-------------------------- Elasticsearch output ------------------------------
|
||||
#output.elasticsearch:
|
||||
# Array of hosts to connect to.
|
||||
# hosts: ["logstash01:9200"]
|
||||
|
||||
# Optional protocol and basic auth credentials.
|
||||
#protocol: "https"
|
||||
#username: "elastic"
|
||||
#password: "changeme"
|
||||
|
||||
#----------------------------- Logstash output --------------------------------
|
||||
output.logstash:
|
||||
# The Logstash hosts
|
||||
hosts: ["logstashserver1:5044", "logstashserver2:5044", "logstashserver3:5044"]
|
||||
|
||||
# Optional SSL. By default is off.
|
||||
# List of root certificates for HTTPS server verifications
|
||||
#ssl.certificate_authorities: ["/etc/pki/root/ca.pem"]
|
||||
|
||||
# Certificate for SSL client authentication
|
||||
#ssl.certificate: "/etc/pki/client/cert.pem"
|
||||
|
||||
# Client Certificate Key
|
||||
#ssl.key: "/etc/pki/client/cert.key"
|
||||
|
||||
#================================ Logging =====================================
|
||||
|
||||
# Sets log level. The default log level is info.
|
||||
# Available log levels are: critical, error, warning, info, debug
|
||||
#logging.level: debug
|
||||
|
||||
# At debug level, you can selectively enable logging only for some components.
|
||||
# To enable all selectors use ["*"]. Examples of other selectors are "beat",
|
||||
# "publish", "service".
|
||||
#logging.selectors: ["*"]
|
428
elk6/kibana.json
Normal file
428
elk6/kibana.json
Normal file
File diff suppressed because one or more lines are too long
9
elk6/logstash.yml
Normal file
9
elk6/logstash.yml
Normal file
@ -0,0 +1,9 @@
|
||||
node.name: logstash
|
||||
path.config: /usr/share/logstash/pipeline/
|
||||
path.data: /tmp
|
||||
queue.drain: true
|
||||
queue.type: persisted
|
||||
xpack.monitoring.elasticsearch.password: changeme
|
||||
xpack.monitoring.elasticsearch.url: elasticsearch:9200
|
||||
xpack.monitoring.elasticsearch.username: elastic
|
||||
xpack.monitoring.enabled: false
|
177
elk6/pipeline/1000_nessus_process_file.conf
Normal file
177
elk6/pipeline/1000_nessus_process_file.conf
Normal file
@ -0,0 +1,177 @@
|
||||
# Author: Austin Taylor and Justin Henderson
|
||||
# Email: email@austintaylor.io
|
||||
# Last Update: 12/20/2017
|
||||
# Version 0.3
|
||||
# Description: Take in nessus reports from vulnWhisperer and pumps into logstash
|
||||
|
||||
|
||||
input {
|
||||
file {
|
||||
path => "/opt/vulnwhisperer/data/nessus/**/*"
|
||||
mode => "read"
|
||||
start_position => "beginning"
|
||||
file_completed_action => "delete"
|
||||
tags => "nessus"
|
||||
}
|
||||
file {
|
||||
path => "/opt/vulnwhisperer/data/tenable/*.csv"
|
||||
mode => "read"
|
||||
start_position => "beginning"
|
||||
file_completed_action => "delete"
|
||||
tags => "tenable"
|
||||
}
|
||||
}
|
||||
|
||||
filter {
|
||||
if "nessus" in [tags] or "tenable" in [tags] {
|
||||
# Drop the header column
|
||||
if [message] =~ "^Plugin ID" { drop {} }
|
||||
|
||||
csv {
|
||||
# columns => ["plugin_id", "cve", "cvss", "risk", "asset", "protocol", "port", "plugin_name", "synopsis", "description", "solution", "see_also", "plugin_output"]
|
||||
columns => ["plugin_id", "cve", "cvss", "risk", "asset", "protocol", "port", "plugin_name", "synopsis", "description", "solution", "see_also", "plugin_output", "asset_uuid", "vulnerability_state", "ip", "fqdn", "netbios", "operating_system", "mac_address", "plugin_family", "cvss_base", "cvss_temporal", "cvss_temporal_vector", "cvss_vector", "cvss3_base", "cvss3_temporal", "cvss3_temporal_vector", "cvss_vector", "system_type", "host_start", "host_end"]
|
||||
separator => ","
|
||||
source => "message"
|
||||
}
|
||||
|
||||
ruby {
|
||||
code => "if event.get('description')
|
||||
event.set('description', event.get('description').gsub(92.chr + 'n', 10.chr).gsub(92.chr + 'r', 13.chr))
|
||||
end
|
||||
if event.get('synopsis')
|
||||
event.set('synopsis', event.get('synopsis').gsub(92.chr + 'n', 10.chr).gsub(92.chr + 'r', 13.chr))
|
||||
end
|
||||
if event.get('solution')
|
||||
event.set('solution', event.get('solution').gsub(92.chr + 'n', 10.chr).gsub(92.chr + 'r', 13.chr))
|
||||
end
|
||||
if event.get('see_also')
|
||||
event.set('see_also', event.get('see_also').gsub(92.chr + 'n', 10.chr).gsub(92.chr + 'r', 13.chr))
|
||||
end
|
||||
if event.get('plugin_output')
|
||||
event.set('plugin_output', event.get('plugin_output').gsub(92.chr + 'n', 10.chr).gsub(92.chr + 'r', 13.chr))
|
||||
end"
|
||||
}
|
||||
|
||||
#If using filebeats as your source, you will need to replace the "path" field to "source"
|
||||
grok {
|
||||
match => { "path" => "(?<scan_name>[a-zA-Z0-9_.\-]+)_%{INT:scan_id}_%{INT:history_id}_%{INT:last_updated}.csv$" }
|
||||
tag_on_failure => []
|
||||
}
|
||||
|
||||
date {
|
||||
match => [ "last_updated", "UNIX" ]
|
||||
target => "@timestamp"
|
||||
remove_field => ["last_updated"]
|
||||
}
|
||||
|
||||
if [risk] == "None" {
|
||||
mutate { add_field => { "risk_number" => 0 }}
|
||||
}
|
||||
if [risk] == "Low" {
|
||||
mutate { add_field => { "risk_number" => 1 }}
|
||||
}
|
||||
if [risk] == "Medium" {
|
||||
mutate { add_field => { "risk_number" => 2 }}
|
||||
}
|
||||
if [risk] == "High" {
|
||||
mutate { add_field => { "risk_number" => 3 }}
|
||||
}
|
||||
if [risk] == "Critical" {
|
||||
mutate { add_field => { "risk_number" => 4 }}
|
||||
}
|
||||
|
||||
if ![cve] or [cve] == "nan" {
|
||||
mutate { remove_field => [ "cve" ] }
|
||||
}
|
||||
if ![cvss] or [cvss] == "nan" {
|
||||
mutate { remove_field => [ "cvss" ] }
|
||||
}
|
||||
if ![cvss_base] or [cvss_base] == "nan" {
|
||||
mutate { remove_field => [ "cvss_base" ] }
|
||||
}
|
||||
if ![cvss_temporal] or [cvss_temporal] == "nan" {
|
||||
mutate { remove_field => [ "cvss_temporal" ] }
|
||||
}
|
||||
if ![cvss_temporal_vector] or [cvss_temporal_vector] == "nan" {
|
||||
mutate { remove_field => [ "cvss_temporal_vector" ] }
|
||||
}
|
||||
if ![cvss_vector] or [cvss_vector] == "nan" {
|
||||
mutate { remove_field => [ "cvss_vector" ] }
|
||||
}
|
||||
if ![cvss3_base] or [cvss3_base] == "nan" {
|
||||
mutate { remove_field => [ "cvss3_base" ] }
|
||||
}
|
||||
if ![cvss3_temporal] or [cvss3_temporal] == "nan" {
|
||||
mutate { remove_field => [ "cvss3_temporal" ] }
|
||||
}
|
||||
if ![cvss3_temporal_vector] or [cvss3_temporal_vector] == "nan" {
|
||||
mutate { remove_field => [ "cvss3_temporal_vector" ] }
|
||||
}
|
||||
if ![description] or [description] == "nan" {
|
||||
mutate { remove_field => [ "description" ] }
|
||||
}
|
||||
if ![mac_address] or [mac_address] == "nan" {
|
||||
mutate { remove_field => [ "mac_address" ] }
|
||||
}
|
||||
if ![netbios] or [netbios] == "nan" {
|
||||
mutate { remove_field => [ "netbios" ] }
|
||||
}
|
||||
if ![operating_system] or [operating_system] == "nan" {
|
||||
mutate { remove_field => [ "operating_system" ] }
|
||||
}
|
||||
if ![plugin_output] or [plugin_output] == "nan" {
|
||||
mutate { remove_field => [ "plugin_output" ] }
|
||||
}
|
||||
if ![see_also] or [see_also] == "nan" {
|
||||
mutate { remove_field => [ "see_also" ] }
|
||||
}
|
||||
if ![synopsis] or [synopsis] == "nan" {
|
||||
mutate { remove_field => [ "synopsis" ] }
|
||||
}
|
||||
if ![system_type] or [system_type] == "nan" {
|
||||
mutate { remove_field => [ "system_type" ] }
|
||||
}
|
||||
|
||||
mutate {
|
||||
remove_field => [ "message" ]
|
||||
add_field => { "risk_score" => "%{cvss}" }
|
||||
}
|
||||
mutate {
|
||||
convert => { "risk_score" => "float" }
|
||||
}
|
||||
if [risk_score] == 0 {
|
||||
mutate {
|
||||
add_field => { "risk_score_name" => "info" }
|
||||
}
|
||||
}
|
||||
if [risk_score] > 0 and [risk_score] < 3 {
|
||||
mutate {
|
||||
add_field => { "risk_score_name" => "low" }
|
||||
}
|
||||
}
|
||||
if [risk_score] >= 3 and [risk_score] < 6 {
|
||||
mutate {
|
||||
add_field => { "risk_score_name" => "medium" }
|
||||
}
|
||||
}
|
||||
if [risk_score] >=6 and [risk_score] < 9 {
|
||||
mutate {
|
||||
add_field => { "risk_score_name" => "high" }
|
||||
}
|
||||
}
|
||||
if [risk_score] >= 9 {
|
||||
mutate {
|
||||
add_field => { "risk_score_name" => "critical" }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
output {
|
||||
if "nessus" in [tags] or "tenable" in [tags]{
|
||||
elasticsearch {
|
||||
hosts => [ "elasticsearch:9200" ]
|
||||
index => "logstash-vulnwhisperer-%{+YYYY.MM}"
|
||||
}
|
||||
}
|
||||
}
|
156
elk6/pipeline/2000_qualys_web_scans.conf
Normal file
156
elk6/pipeline/2000_qualys_web_scans.conf
Normal file
@ -0,0 +1,156 @@
|
||||
# Author: Austin Taylor and Justin Henderson
|
||||
# Email: austin@hasecuritysolutions.com
|
||||
# Last Update: 12/30/2017
|
||||
# Version 0.3
|
||||
# Description: Take in qualys web scan reports from vulnWhisperer and pumps into logstash
|
||||
|
||||
input {
|
||||
file {
|
||||
path => "/opt/vulnwhisperer/data/qualys/*.json"
|
||||
type => json
|
||||
codec => json
|
||||
start_position => "beginning"
|
||||
tags => [ "qualys" ]
|
||||
mode => "read"
|
||||
start_position => "beginning"
|
||||
file_completed_action => "delete"
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
filter {
|
||||
if "qualys" in [tags] {
|
||||
grok {
|
||||
match => { "path" => [ "(?<tags>qualys_vuln)_scan_%{DATA}_%{INT:last_updated}.json$", "(?<tags>qualys_web)_%{INT:app_id}_%{INT:last_updated}.json$" ] }
|
||||
tag_on_failure => []
|
||||
}
|
||||
|
||||
mutate {
|
||||
replace => [ "message", "%{message}" ]
|
||||
#gsub => [
|
||||
# "message", "\|\|\|", " ",
|
||||
# "message", "\t\t", " ",
|
||||
# "message", " ", " ",
|
||||
# "message", " ", " ",
|
||||
# "message", " ", " ",
|
||||
# "message", "nan", " ",
|
||||
# "message",'\n',''
|
||||
#]
|
||||
}
|
||||
|
||||
if "qualys_web" in [tags] {
|
||||
mutate {
|
||||
add_field => { "asset" => "%{web_application_name}" }
|
||||
add_field => { "risk_score" => "%{cvss}" }
|
||||
}
|
||||
} else if "qualys_vuln" in [tags] {
|
||||
mutate {
|
||||
add_field => { "asset" => "%{ip}" }
|
||||
add_field => { "risk_score" => "%{cvss}" }
|
||||
}
|
||||
}
|
||||
|
||||
if [risk] == "1" {
|
||||
mutate { add_field => { "risk_number" => 0 }}
|
||||
mutate { replace => { "risk" => "info" }}
|
||||
}
|
||||
if [risk] == "2" {
|
||||
mutate { add_field => { "risk_number" => 1 }}
|
||||
mutate { replace => { "risk" => "low" }}
|
||||
}
|
||||
if [risk] == "3" {
|
||||
mutate { add_field => { "risk_number" => 2 }}
|
||||
mutate { replace => { "risk" => "medium" }}
|
||||
}
|
||||
if [risk] == "4" {
|
||||
mutate { add_field => { "risk_number" => 3 }}
|
||||
mutate { replace => { "risk" => "high" }}
|
||||
}
|
||||
if [risk] == "5" {
|
||||
mutate { add_field => { "risk_number" => 4 }}
|
||||
mutate { replace => { "risk" => "critical" }}
|
||||
}
|
||||
|
||||
mutate {
|
||||
remove_field => "message"
|
||||
}
|
||||
|
||||
if [first_time_detected] {
|
||||
date {
|
||||
match => [ "first_time_detected", "dd MMM yyyy HH:mma 'GMT'ZZ", "dd MMM yyyy HH:mma 'GMT'" ]
|
||||
target => "first_time_detected"
|
||||
}
|
||||
}
|
||||
if [first_time_tested] {
|
||||
date {
|
||||
match => [ "first_time_tested", "dd MMM yyyy HH:mma 'GMT'ZZ", "dd MMM yyyy HH:mma 'GMT'" ]
|
||||
target => "first_time_tested"
|
||||
}
|
||||
}
|
||||
if [last_time_detected] {
|
||||
date {
|
||||
match => [ "last_time_detected", "dd MMM yyyy HH:mma 'GMT'ZZ", "dd MMM yyyy HH:mma 'GMT'" ]
|
||||
target => "last_time_detected"
|
||||
}
|
||||
}
|
||||
if [last_time_tested] {
|
||||
date {
|
||||
match => [ "last_time_tested", "dd MMM yyyy HH:mma 'GMT'ZZ", "dd MMM yyyy HH:mma 'GMT'" ]
|
||||
target => "last_time_tested"
|
||||
}
|
||||
}
|
||||
date {
|
||||
match => [ "last_updated", "UNIX" ]
|
||||
target => "@timestamp"
|
||||
remove_field => "last_updated"
|
||||
}
|
||||
mutate {
|
||||
convert => { "plugin_id" => "integer"}
|
||||
convert => { "id" => "integer"}
|
||||
convert => { "risk_number" => "integer"}
|
||||
convert => { "risk_score" => "float"}
|
||||
convert => { "total_times_detected" => "integer"}
|
||||
convert => { "cvss_temporal" => "float"}
|
||||
convert => { "cvss" => "float"}
|
||||
}
|
||||
if [risk_score] == 0 {
|
||||
mutate {
|
||||
add_field => { "risk_score_name" => "info" }
|
||||
}
|
||||
}
|
||||
if [risk_score] > 0 and [risk_score] < 3 {
|
||||
mutate {
|
||||
add_field => { "risk_score_name" => "low" }
|
||||
}
|
||||
}
|
||||
if [risk_score] >= 3 and [risk_score] < 6 {
|
||||
mutate {
|
||||
add_field => { "risk_score_name" => "medium" }
|
||||
}
|
||||
}
|
||||
if [risk_score] >=6 and [risk_score] < 9 {
|
||||
mutate {
|
||||
add_field => { "risk_score_name" => "high" }
|
||||
}
|
||||
}
|
||||
if [risk_score] >= 9 {
|
||||
mutate {
|
||||
add_field => { "risk_score_name" => "critical" }
|
||||
}
|
||||
}
|
||||
|
||||
if [asset] =~ "\.yourdomain\.(com|net)$" {
|
||||
mutate {
|
||||
add_tag => [ "critical_asset" ]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
output {
|
||||
if "qualys" in [tags] {
|
||||
elasticsearch {
|
||||
hosts => [ "elasticsearch:9200" ]
|
||||
index => "logstash-vulnwhisperer-%{+YYYY.MM}"
|
||||
}
|
||||
}
|
||||
}
|
149
elk6/pipeline/3000_openvas.conf
Normal file
149
elk6/pipeline/3000_openvas.conf
Normal file
@ -0,0 +1,149 @@
|
||||
# Author: Austin Taylor and Justin Henderson
|
||||
# Email: austin@hasecuritysolutions.com
|
||||
# Last Update: 03/04/2018
|
||||
# Version 0.3
|
||||
# Description: Take in qualys web scan reports from vulnWhisperer and pumps into logstash
|
||||
|
||||
input {
|
||||
file {
|
||||
path => "/opt/vulnwhisperer/data/openvas/*.json"
|
||||
type => json
|
||||
codec => json
|
||||
start_position => "beginning"
|
||||
tags => [ "openvas_scan", "openvas" ]
|
||||
mode => "read"
|
||||
start_position => "beginning"
|
||||
file_completed_action => "delete"
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
filter {
|
||||
if "openvas_scan" in [tags] {
|
||||
mutate {
|
||||
replace => [ "message", "%{message}" ]
|
||||
gsub => [
|
||||
"message", "\|\|\|", " ",
|
||||
"message", "\t\t", " ",
|
||||
"message", " ", " ",
|
||||
"message", " ", " ",
|
||||
"message", " ", " ",
|
||||
"message", "nan", " ",
|
||||
"message",'\n',''
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
grok {
|
||||
match => { "path" => "openvas_scan_%{DATA:scan_id}_%{INT:last_updated}.json$" }
|
||||
tag_on_failure => []
|
||||
}
|
||||
|
||||
mutate {
|
||||
add_field => { "risk_score" => "%{cvss}" }
|
||||
}
|
||||
|
||||
if [risk] == "1" {
|
||||
mutate { add_field => { "risk_number" => 0 }}
|
||||
mutate { replace => { "risk" => "info" }}
|
||||
}
|
||||
if [risk] == "2" {
|
||||
mutate { add_field => { "risk_number" => 1 }}
|
||||
mutate { replace => { "risk" => "low" }}
|
||||
}
|
||||
if [risk] == "3" {
|
||||
mutate { add_field => { "risk_number" => 2 }}
|
||||
mutate { replace => { "risk" => "medium" }}
|
||||
}
|
||||
if [risk] == "4" {
|
||||
mutate { add_field => { "risk_number" => 3 }}
|
||||
mutate { replace => { "risk" => "high" }}
|
||||
}
|
||||
if [risk] == "5" {
|
||||
mutate { add_field => { "risk_number" => 4 }}
|
||||
mutate { replace => { "risk" => "critical" }}
|
||||
}
|
||||
|
||||
mutate {
|
||||
remove_field => "message"
|
||||
}
|
||||
|
||||
if [first_time_detected] {
|
||||
date {
|
||||
match => [ "first_time_detected", "dd MMM yyyy HH:mma 'GMT'ZZ", "dd MMM yyyy HH:mma 'GMT'" ]
|
||||
target => "first_time_detected"
|
||||
}
|
||||
}
|
||||
if [first_time_tested] {
|
||||
date {
|
||||
match => [ "first_time_tested", "dd MMM yyyy HH:mma 'GMT'ZZ", "dd MMM yyyy HH:mma 'GMT'" ]
|
||||
target => "first_time_tested"
|
||||
}
|
||||
}
|
||||
if [last_time_detected] {
|
||||
date {
|
||||
match => [ "last_time_detected", "dd MMM yyyy HH:mma 'GMT'ZZ", "dd MMM yyyy HH:mma 'GMT'" ]
|
||||
target => "last_time_detected"
|
||||
}
|
||||
}
|
||||
if [last_time_tested] {
|
||||
date {
|
||||
match => [ "last_time_tested", "dd MMM yyyy HH:mma 'GMT'ZZ", "dd MMM yyyy HH:mma 'GMT'" ]
|
||||
target => "last_time_tested"
|
||||
}
|
||||
}
|
||||
date {
|
||||
match => [ "last_updated", "UNIX" ]
|
||||
target => "@timestamp"
|
||||
remove_field => "last_updated"
|
||||
}
|
||||
mutate {
|
||||
convert => { "plugin_id" => "integer"}
|
||||
convert => { "id" => "integer"}
|
||||
convert => { "risk_number" => "integer"}
|
||||
convert => { "risk_score" => "float"}
|
||||
convert => { "total_times_detected" => "integer"}
|
||||
convert => { "cvss_temporal" => "float"}
|
||||
convert => { "cvss" => "float"}
|
||||
}
|
||||
if [risk_score] == 0 {
|
||||
mutate {
|
||||
add_field => { "risk_score_name" => "info" }
|
||||
}
|
||||
}
|
||||
if [risk_score] > 0 and [risk_score] < 3 {
|
||||
mutate {
|
||||
add_field => { "risk_score_name" => "low" }
|
||||
}
|
||||
}
|
||||
if [risk_score] >= 3 and [risk_score] < 6 {
|
||||
mutate {
|
||||
add_field => { "risk_score_name" => "medium" }
|
||||
}
|
||||
}
|
||||
if [risk_score] >=6 and [risk_score] < 9 {
|
||||
mutate {
|
||||
add_field => { "risk_score_name" => "high" }
|
||||
}
|
||||
}
|
||||
if [risk_score] >= 9 {
|
||||
mutate {
|
||||
add_field => { "risk_score_name" => "critical" }
|
||||
}
|
||||
}
|
||||
# Add your critical assets by subnet or by hostname. Comment this field out if you don't want to tag any, but the asset panel will break.
|
||||
if [asset] =~ "^10\.0\.100\." {
|
||||
mutate {
|
||||
add_tag => [ "critical_asset" ]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
output {
|
||||
if "openvas" in [tags] {
|
||||
elasticsearch {
|
||||
hosts => [ "elasticsearch:9200" ]
|
||||
index => "logstash-vulnwhisperer-%{+YYYY.MM}"
|
||||
}
|
||||
}
|
||||
}
|
25
elk6/pipeline/4000_jira.conf
Normal file
25
elk6/pipeline/4000_jira.conf
Normal file
@ -0,0 +1,25 @@
|
||||
# Description: Take in jira tickets from vulnWhisperer and pumps into logstash
|
||||
|
||||
input {
|
||||
file {
|
||||
path => "/opt/vulnwhisperer/data/jira/*.json"
|
||||
type => json
|
||||
codec => json
|
||||
start_position => "beginning"
|
||||
mode => "read"
|
||||
start_position => "beginning"
|
||||
file_completed_action => "delete"
|
||||
|
||||
tags => [ "jira" ]
|
||||
}
|
||||
}
|
||||
|
||||
output {
|
||||
if "jira" in [tags] {
|
||||
stdout { codec => rubydebug }
|
||||
elasticsearch {
|
||||
hosts => [ "elasticsearch:9200" ]
|
||||
index => "logstash-vulnwhisperer-%{+YYYY.MM}"
|
||||
}
|
||||
}
|
||||
}
|
108
elk6/vulnwhisperer.ini
Normal file
108
elk6/vulnwhisperer.ini
Normal file
@ -0,0 +1,108 @@
|
||||
[nessus]
|
||||
enabled=true
|
||||
hostname=localhost
|
||||
port=8834
|
||||
username=nessus_username
|
||||
password=nessus_password
|
||||
write_path=/opt/vulnwhisperer/data/nessus/
|
||||
db_path=/opt/vulnwhisperer/database
|
||||
trash=false
|
||||
verbose=true
|
||||
|
||||
[tenable]
|
||||
enabled=true
|
||||
hostname=cloud.tenable.com
|
||||
port=443
|
||||
username=tenable.io_username
|
||||
password=tenable.io_password
|
||||
write_path=/opt/vulnwhisperer/data/tenable/
|
||||
db_path=/opt/VulnWhisperer/data/database
|
||||
trash=false
|
||||
verbose=true
|
||||
|
||||
[qualys_web]
|
||||
#Reference https://www.qualys.com/docs/qualys-was-api-user-guide.pdf to find your API
|
||||
enabled = true
|
||||
hostname = qualysapi.qg2.apps.qualys.com
|
||||
username = exampleuser
|
||||
password = examplepass
|
||||
write_path=/opt/vulnwhisperer/data/qualys/
|
||||
db_path=/opt/vulnwhisperer/data/database
|
||||
verbose=true
|
||||
|
||||
# Set the maximum number of retries each connection should attempt.
|
||||
#Note, this applies only to failed connections and timeouts, never to requests where the server returns a response.
|
||||
max_retries = 10
|
||||
# Template ID will need to be retrieved for each document. Please follow the reference guide above for instructions on how to get your template ID.
|
||||
template_id = 126024
|
||||
|
||||
[qualys_vuln]
|
||||
#Reference https://www.qualys.com/docs/qualys-was-api-user-guide.pdf to find your API
|
||||
enabled = true
|
||||
hostname = qualysapi.qg2.apps.qualys.com
|
||||
username = exampleuser
|
||||
password = examplepass
|
||||
write_path=/opt/vulnwhisperer/data/qualys/
|
||||
db_path=/opt/vulnwhisperer/data/database
|
||||
verbose=true
|
||||
|
||||
# Set the maximum number of retries each connection should attempt.
|
||||
#Note, this applies only to failed connections and timeouts, never to requests where the server returns a response.
|
||||
max_retries = 10
|
||||
# Template ID will need to be retrieved for each document. Please follow the reference guide above for instructions on how to get your template ID.
|
||||
template_id = 126024
|
||||
|
||||
[detectify]
|
||||
#Reference https://developer.detectify.com/
|
||||
enabled = false
|
||||
hostname = api.detectify.com
|
||||
#username variable used as apiKey
|
||||
username = exampleuser
|
||||
#password variable used as secretKey
|
||||
password = examplepass
|
||||
write_path =/opt/vulnwhisperer/data/detectify/
|
||||
db_path = /opt/vulnwhisperer/data/database
|
||||
verbose = true
|
||||
|
||||
[openvas]
|
||||
enabled = false
|
||||
hostname = localhost
|
||||
port = 4000
|
||||
username = exampleuser
|
||||
password = examplepass
|
||||
write_path=/opt/vulnwhisperer/data/openvas/
|
||||
db_path=/opt/vulnwhisperer/data/database
|
||||
verbose=true
|
||||
|
||||
#[proxy]
|
||||
; This section is optional. Leave it out if you're not using a proxy.
|
||||
; You can use environmental variables as well: http://www.python-requests.org/en/latest/user/advanced/#proxies
|
||||
|
||||
; proxy_protocol set to https, if not specified.
|
||||
#proxy_url = proxy.mycorp.com
|
||||
|
||||
; proxy_port will override any port specified in proxy_url
|
||||
#proxy_port = 8080
|
||||
|
||||
; proxy authentication
|
||||
#proxy_username = proxyuser
|
||||
#proxy_password = proxypass
|
||||
|
||||
[jira]
|
||||
hostname = jira-host
|
||||
username = username
|
||||
password = password
|
||||
write_path = /opt/vulnwhisperer/data/jira/
|
||||
db_path = /opt/vulnwhisperer/data/database
|
||||
verbose = true
|
||||
|
||||
#Sample jira report scan, will automatically be created for existent scans
|
||||
#[jira.qualys_vuln.test_scan]
|
||||
#source = qualys_vuln
|
||||
#scan_name = Test Scan
|
||||
#jira_project = PROJECT
|
||||
; if multiple components, separate by "," = None
|
||||
#components =
|
||||
; minimum criticality to report (low, medium, high or critical) = None
|
||||
#min_critical_to_report = high
|
||||
|
Reference in New Issue
Block a user