Merge pull request #180 from pemontto/feature-filter-scans

This PR adds two CLI flags to filter which scans get imported/requested and one to list the scans:

    -f, --filter: allows supplying a regex pattern to match a scan name (this can also be specified in the config file)
    --days: the number of days to look back from the current date for scans (not supported on OpenVAS yet)
    --list: lists scans matching the filters and shows their imported/processed status

Other changes:

    combined all Logstash config into a single file
    create cvss and cvss_severity field which will always be populated from either cvss3 or cvss2
    renamed qualys_web -> qualys_was
    renamed qualys_vuln -> qualys_vm
    renamed plugin -> signature in field mappings
    added a helper script to pull Kibana API objects
    updated ES index template
This commit is contained in:
Quim Montal
2019-05-27 18:22:38 +02:00
committed by GitHub
21 changed files with 1389 additions and 1414 deletions

View File

@ -28,16 +28,18 @@ def main():
help='Path of config file', type=lambda x: isFileValid(parser, x.strip()))
parser.add_argument('-s', '--section', dest='section', required=False,
help='Section in config')
parser.add_argument('-f', '--filter', dest='scan_filter', required=False,
help='Retrieve scans matching this regex pattern')
parser.add_argument('--days', dest='days', type=int, required=False,
help='Retrieve scans from this many days ago to now')
parser.add_argument('-l', '--list', dest='list_scans', required=False, action="store_true",
help='List available scans')
parser.add_argument('--source', dest='source', required=False,
help='JIRA required only! Source scanner to report')
parser.add_argument('-n', '--scanname', dest='scanname', required=False,
help='JIRA required only! Scan name from scan to report')
parser.add_argument('-v', '--verbose', dest='verbose', action='store_true', default=True,
parser.add_argument('-v', '--verbose', dest='verbose', action='store_true',
help='Prints status out to screen (defaults to True)')
parser.add_argument('-u', '--username', dest='username', required=False, default=None,
help='The NESSUS username', type=lambda x: x.strip())
parser.add_argument('-p', '--password', dest='password', required=False, default=None,
help='The NESSUS password', type=lambda x: x.strip())
parser.add_argument('-F', '--fancy', action='store_true',
help='Enable colourful logging output')
parser.add_argument('-d', '--debug', action='store_true',
@ -51,14 +53,14 @@ def main():
# First setup logging
logging.basicConfig(
stream=sys.stdout,
#format only applies when not using -F flag for colouring
# format only applies when not using -F flag for colouring
format='%(levelname)s:%(name)s:%(funcName)s:%(message)s',
level=logging.DEBUG if args.debug else logging.INFO
level=logging.DEBUG if args.debug else logging.INFO if args.verbose else logging.WARNING
)
logger = logging.getLogger()
# we set up the logger to log as well to file
fh = logging.FileHandler('vulnwhisperer.log')
fh.setLevel(logging.DEBUG if args.debug else logging.INFO)
fh.setLevel(logging.DEBUG if args.debug else logging.INFO if args.verbose else logging.WARNING)
fh.setFormatter(logging.Formatter("%(asctime)s %(levelname)s %(name)s - %(funcName)s:%(message)s", "%Y-%m-%d %H:%M:%S"))
logger.addHandler(fh)
@ -75,9 +77,11 @@ def main():
try:
if args.config and not args.section:
# this remains a print since we are in the main binary
print('WARNING: {warning}'.format(warning='No section was specified, vulnwhisperer will scrape enabled modules from config file. \
\nPlease specify a section using -s. \
\nExample vuln_whisperer -c config.ini -s nessus'))
print(
"WARNING: No section was specified, vulnwhisperer will scrape enabled modules from config file. \
\nPlease specify a section using -s. \
\nExample vuln_whisperer -c config.ini -s nessus"
)
logger.info('No section was specified, vulnwhisperer will scrape enabled modules from the config file.')
config = vwConfig(config_in=args.config)
@ -87,30 +91,32 @@ def main():
vw = vulnWhisperer(config=args.config,
profile=section,
verbose=args.verbose,
username=args.username,
password=args.password,
debug=args.debug,
source=args.source,
scanname=args.scanname)
scan_filter=args.scan_filter,
days=args.days,
scanname=args.scanname,
list_scans=args.list_scans)
exit_code += vw.whisper_vulnerabilities()
else:
logger.info('Running vulnwhisperer for section {}'.format(args.section))
vw = vulnWhisperer(config=args.config,
profile=args.section,
verbose=args.verbose,
username=args.username,
password=args.password,
debug=args.debug,
source=args.source,
scanname=args.scanname)
scan_filter=args.scan_filter,
days=args.days,
scanname=args.scanname,
list_scans=args.list_scans)
exit_code += vw.whisper_vulnerabilities()
close_logging_handlers(logger)
sys.exit(exit_code)
except Exception as e:
if args.verbose:
# this will remain a print since we are in the main binary
logger.error('{}'.format(str(e)))
print('ERROR: {error}'.format(error=e))
logger.error('{}'.format(str(e)))
print('ERROR: {error}'.format(error=e))
# TODO: fix this to NOT be exit 2 unless in error
close_logging_handlers(logger)
sys.exit(2)

View File

@ -9,7 +9,8 @@ password=nessus_password
write_path=/opt/VulnWhisperer/data/nessus/
db_path=/opt/VulnWhisperer/data/database
trash=false
verbose=true
verbose=false
scan_filter=
[tenable]
enabled=true
@ -22,7 +23,8 @@ password=tenable.io_password
write_path=/opt/VulnWhisperer/data/tenable/
db_path=/opt/VulnWhisperer/data/database
trash=false
verbose=true
verbose=false
scan_filter=
[qualys_web]
#Reference https://www.qualys.com/docs/qualys-was-api-user-guide.pdf to find your API
@ -33,6 +35,7 @@ password = examplepass
write_path=/opt/VulnWhisperer/data/qualys_web/
db_path=/opt/VulnWhisperer/data/database
verbose=true
scan_filter=
# Set the maximum number of retries each connection should attempt.
#Note, this applies only to failed connections and timeouts, never to requests where the server returns a response.
@ -48,7 +51,8 @@ username = exampleuser
password = examplepass
write_path=/opt/VulnWhisperer/data/qualys_vuln/
db_path=/opt/VulnWhisperer/data/database
verbose=true
verbose=false
scan_filter=
[detectify]
#Reference https://developer.detectify.com/
@ -61,6 +65,7 @@ password = examplepass
write_path =/opt/VulnWhisperer/data/detectify/
db_path = /opt/VulnWhisperer/data/database
verbose = true
scan_filter=
[openvas]
enabled = false
@ -70,7 +75,8 @@ username = exampleuser
password = examplepass
write_path=/opt/VulnWhisperer/data/openvas/
db_path=/opt/VulnWhisperer/data/database
verbose=true
verbose=false
scan_filter=
[jira]
enabled = false
@ -88,7 +94,7 @@ dns_resolv = False
#scan_name = Test Scan
#jira_project = PROJECT
; if multiple components, separate by "," = None
#components =
#components =
; minimum criticality to report (low, medium, high or critical) = None
#min_critical_to_report = high

View File

@ -9,7 +9,8 @@ password=nessus_password
write_path=/opt/VulnWhisperer/data/nessus/
db_path=/opt/VulnWhisperer/data/database
trash=false
verbose=true
verbose=false
scan_filter=
[tenable]
enabled=true
@ -22,73 +23,78 @@ password=tenable.io_password
write_path=/opt/VulnWhisperer/data/tenable/
db_path=/opt/VulnWhisperer/data/database
trash=false
verbose=true
verbose=false
scan_filter=
[qualys_web]
[qualys_was]
#Reference https://www.qualys.com/docs/qualys-was-api-user-guide.pdf to find your API
enabled = true
hostname = qualys_web
username = exampleuser
password = examplepass
write_path=/opt/VulnWhisperer/data/qualys_web/
enabled=true
hostname=qualys_was
username=exampleuser
password=examplepass
write_path=/opt/VulnWhisperer/data/qualys_was/
db_path=/opt/VulnWhisperer/data/database
verbose=true
verbose=false
scan_filter=
# Set the maximum number of retries each connection should attempt.
#Note, this applies only to failed connections and timeouts, never to requests where the server returns a response.
max_retries = 10
max_retries=10
# Template ID will need to be retrieved for each document. Please follow the reference guide above for instructions on how to get your template ID.
template_id = 289109
template_id=289109
[qualys_vuln]
[qualys_vm]
#Reference https://www.qualys.com/docs/qualys-was-api-user-guide.pdf to find your API
enabled = true
hostname = qualys_vuln
username = exampleuser
password = examplepass
write_path=/opt/VulnWhisperer/data/qualys_vuln/
enabled=true
hostname=qualys_vm
username=exampleuser
password=examplepass
write_path=/opt/VulnWhisperer/data/qualys_vm/
db_path=/opt/VulnWhisperer/data/database
verbose=true
verbose=false
scan_filter=
[detectify]
#Reference https://developer.detectify.com/
enabled = false
hostname = detectify
enabled=false
hostname=detectify
#username variable used as apiKey
username = exampleuser
username=exampleuser
#password variable used as secretKey
password = examplepass
password=examplepass
write_path =/opt/VulnWhisperer/data/detectify/
db_path = /opt/VulnWhisperer/data/database
verbose = true
db_path=/opt/VulnWhisperer/data/database
verbose=false
scan_filter=
[openvas]
enabled = true
hostname = openvas
port = 4000
username = exampleuser
password = examplepass
enabled=true
hostname=openvas
port=4000
username=exampleuser
password=examplepass
write_path=/opt/VulnWhisperer/data/openvas/
db_path=/opt/VulnWhisperer/data/database
verbose=true
verbose=false
scan_filter=
[jira]
enabled = false
hostname = jira-host
username = username
password = password
write_path = /opt/VulnWhisperer/data/jira/
db_path = /opt/VulnWhisperer/data/database
verbose = true
dns_resolv = False
enabled=false
hostname=jira-host
username=username
password=password
write_path=/opt/VulnWhisperer/data/jira/
db_path=/opt/VulnWhisperer/data/database
verbose=false
dns_resolv=False
#Sample jira report scan, will automatically be created for existent scans
#[jira.qualys_vuln.test_scan]
#source = qualys_vuln
#scan_name = Test Scan
#jira_project = PROJECT
; if multiple components, separate by "," = None
#components =
; minimum criticality to report (low, medium, high or critical) = None
#min_critical_to_report = high
#[jira.qualys_vm.test_scan]
#source=qualys_vm
#scan_name=Test Scan
#jira_project=PROJECT
; if multiple components, separate by ","=None
#components =
; minimum criticality to report (low, medium, high or critical)=None
#min_critical_to_report=high

View File

@ -81,12 +81,13 @@ services:
entrypoint: [
"vuln_whisperer",
"-F",
"-v",
"-c",
"/opt/VulnWhisperer/vulnwhisperer.ini",
"--mock",
"--mock_dir",
"/tests/data"
]
]
volumes:
- ./data/vulnwhisperer/:/opt/VulnWhisperer/data
# - ./resources/elk6/vulnwhisperer.ini:/opt/VulnWhisperer/vulnwhisperer.ini

View File

@ -0,0 +1,15 @@
import json
import requests
api_objects = []
for object_type in ['dashboard', 'visualization', 'search', 'index-pattern', 'timelion-sheet']:
r = requests.get('http://localhost:5601/api/saved_objects/_find?per_page=500&type={}'.format(object_type)).json()
api_objects += r['saved_objects']
print object_type, len(r['saved_objects'])
print len(api_objects)
for api_object in api_objects:
api_object.pop('updated_at', None)
json.dump(sorted(api_objects, key=lambda x:x['id']), open('kibana_APIonly.json', 'w'), indent=2)

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -29,21 +29,24 @@
"cve": {
"type": "keyword"
},
"cvss_base": {
"cvss": {
"type": "float"
},
"cvss_severity": {
"type": "keyword"
},
"cvss_temporal": {
"cvss2_base": {
"type": "float"
},
"cvss_vector": {
"cvss2_severity": {
"type": "keyword"
},
"cvss": {
"cvss2_temporal": {
"type": "float"
},
"cvss2_vector": {
"type": "keyword"
},
"cvss3_base": {
"type": "float"
},
@ -136,10 +139,10 @@
"plugin_family": {
"type": "keyword"
},
"plugin_id": {
"signature_id": {
"type": "keyword"
},
"plugin_name": {
"signature": {
"type": "keyword"
},
"plugin_output": {
@ -168,7 +171,14 @@
"type": "keyword"
},
"scan_name": {
"type": "keyword"
"fields": {
"keyword": {
"ignore_above": 256,
"type": "keyword"
}
},
"norms": false,
"type": "text"
},
"scan_source": {
"type": "keyword"

View File

@ -1,49 +1,60 @@
# Author: Austin Taylor and Justin Henderson
# Email: austin@hasecuritysolutions.com
# Last Update: 12/30/2017
# Version 0.3
# Description: Take in qualys web scan reports from vulnWhisperer and pumps into logstash
input {
file {
path => [ "/opt/VulnWhisperer/data/qualys_vuln/*.json" ]
codec => json
start_position => "beginning"
tags => [ "qualys_vuln" ]
mode => "read"
path => ["/opt/VulnWhisperer/data/nessus/**/*.json", "/opt/VulnWhisperer/data/openvas/*.json", "/opt/VulnWhisperer/data/qualys_vm/*.json", "/opt/VulnWhisperer/data/qualys_was/*.json", "/opt/VulnWhisperer/data/tenable/*.json"]
start_position => "beginning"
file_completed_action => "delete"
file_chunk_size => 262144
}
file {
path => [ "/opt/VulnWhisperer/data/qualys_web/*.json" ]
codec => json
start_position => "beginning"
tags => [ "qualys_web" ]
mode => "read"
path => "/opt/VulnWhisperer/data/jira/*.json"
tags => [ "jira" ]
start_position => "beginning"
file_completed_action => "delete"
}
}
filter {
if "qualys_vuln" in [tags] or "qualys_web" in [tags] {
if [scan_source] in ["nessus", "tenable", "qualys_vm", "qualys_was", "openvas"] {
# Parse the date/time from scan_time
date {
match => [ "scan_time", "UNIX" ]
target => "@timestamp"
remove_field => ["scan_time"]
}
# Add scan_source to tags
mutate {
add_field => { "[tags]" => "%{scan_source}" }
}
# Create a unique document_id if _unique field exists
if [_unique] {
# Set document ID from _unique
mutate {
rename => { "_unique" => "[@metadata][id]" }
}
}
# Do we need this?
mutate {
convert => { "cvss" => "float"}
convert => { "cvss_base" => "float"}
convert => { "cvss_temporal" => "float"}
convert => { "cvss2" => "float"}
convert => { "cvss2_base" => "float"}
convert => { "cvss2_temporal" => "float"}
convert => { "cvss3" => "float"}
convert => { "cvss3_base" => "float"}
convert => { "cvss3_temporal" => "float"}
convert => { "risk_number" => "integer"}
convert => { "total_times_detected" => "integer"}
}
}
if [scan_source] == "qualys_was" {
if [first_time_detected] {
date {
match => [ "first_time_detected", "dd MMM yyyy HH:mma 'GMT'ZZ", "dd MMM yyyy HH:mma 'GMT'" ]
@ -68,32 +79,32 @@ filter {
target => "last_time_tested"
}
}
# if [asset] =~ "\.yourdomain\.(com|net)$" {
# mutate {
# add_tag => [ "critical_asset" ]
# }
# }
if [_unique] {
# Set document ID from _unique
mutate {
rename => { "_unique" => "[@metadata][id]" }
}
}
}
}
output {
if "qualys_vuln" in [tags] or "qualys_web" in [tags] {
if [scan_source] in ["nessus", "tenable", "qualys_vm", "qualys_was", "openvas"] {
if [@metadata][id] {
elasticsearch {
hosts => [ "elasticsearch:9200" ]
index => "logstash-vulnwhisperer-%{+YYYY.MM}"
document_id => "%{[@metadata][id]}"
manage_template => false
}
} else {
elasticsearch {
hosts => [ "elasticsearch:9200" ]
index => "logstash-vulnwhisperer-%{+YYYY.MM}"
manage_template => false
}
}
}
}
# Should these go to the same index?
if "jira" in [tags] {
stdout { codec => rubydebug }
elasticsearch {
hosts => [ "elasticsearch:9200" ]
index => "logstash-vulnwhisperer-%{+YYYY.MM}"
}
}
}

View File

@ -1,71 +0,0 @@
# Author: Austin Taylor and Justin Henderson
# Email: email@austintaylor.io
# Last Update: 12/20/2017
# Version 0.3
# Description: Take in nessus reports from vulnWhisperer and pumps into logstash
input {
file {
path => "/opt/VulnWhisperer/data/nessus/**/*.json"
mode => "read"
start_position => "beginning"
file_completed_action => "delete"
tags => "nessus"
codec => json
}
file {
path => "/opt/VulnWhisperer/data/tenable/*.json"
mode => "read"
start_position => "beginning"
file_completed_action => "delete"
tags => "tenable"
codec => json
}
}
filter {
if "nessus" in [tags] or "tenable" in [tags] {
date {
match => [ "scan_time", "UNIX" ]
target => "@timestamp"
remove_field => ["scan_time"]
}
mutate {
convert => { "cvss" => "float"}
convert => { "cvss_base" => "float"}
convert => { "cvss_temporal" => "float"}
convert => { "cvss3" => "float"}
convert => { "cvss3_base" => "float"}
convert => { "cvss3_temporal" => "float"}
convert => { "risk_number" => "integer"}
convert => { "total_times_detected" => "integer"}
}
if [_unique] {
# Set document ID from _unique
mutate {
rename => { "_unique" => "[@metadata][id]" }
}
}
}
}
output {
if "nessus" in [tags] or "tenable" in [tags]{
if [@metadata][id] {
elasticsearch {
hosts => [ "elasticsearch:9200" ]
index => "logstash-vulnwhisperer-%{+YYYY.MM}"
document_id => "%{[@metadata][id]}"
}
} else {
elasticsearch {
hosts => [ "elasticsearch:9200" ]
index => "logstash-vulnwhisperer-%{+YYYY.MM}"
}
}
}
}

View File

@ -1,131 +0,0 @@
# Author: Austin Taylor and Justin Henderson
# Email: austin@hasecuritysolutions.com
# Last Update: 03/04/2018
# Version 0.3
# Description: Take in qualys web scan reports from vulnWhisperer and pumps into logstash
input {
file {
path => "/opt/VulnWhisperer/data/openvas/*.json"
codec => json
start_position => "beginning"
tags => [ "openvas_scan", "openvas" ]
mode => "read"
start_position => "beginning"
file_completed_action => "delete"
}
}
filter {
if "openvas_scan" in [tags] {
date {
match => [ "scan_time", "UNIX" ]
target => "@timestamp"
remove_field => ["scan_time"]
}
# TODO - move this mapping into the vulnwhisperer module
translate {
field => "[risk_number]"
destination => "[risk]"
dictionary => {
"0" => "Info"
"1" => "Low"
"2" => "Medium"
"3" => "High"
"4" => "Critical"
}
}
if [risk] == "1" {
mutate { add_field => { "risk_number" => 0 }}
mutate { replace => { "risk" => "info" }}
}
if [risk] == "2" {
mutate { add_field => { "risk_number" => 1 }}
mutate { replace => { "risk" => "low" }}
}
if [risk] == "3" {
mutate { add_field => { "risk_number" => 2 }}
mutate { replace => { "risk" => "medium" }}
}
if [risk] == "4" {
mutate { add_field => { "risk_number" => 3 }}
mutate { replace => { "risk" => "high" }}
}
if [risk] == "5" {
mutate { add_field => { "risk_number" => 4 }}
mutate { replace => { "risk" => "critical" }}
}
mutate {
remove_field => "message"
}
if [first_time_detected] {
date {
match => [ "first_time_detected", "dd MMM yyyy HH:mma 'GMT'ZZ", "dd MMM yyyy HH:mma 'GMT'" ]
target => "first_time_detected"
}
}
if [first_time_tested] {
date {
match => [ "first_time_tested", "dd MMM yyyy HH:mma 'GMT'ZZ", "dd MMM yyyy HH:mma 'GMT'" ]
target => "first_time_tested"
}
}
if [last_time_detected] {
date {
match => [ "last_time_detected", "dd MMM yyyy HH:mma 'GMT'ZZ", "dd MMM yyyy HH:mma 'GMT'" ]
target => "last_time_detected"
}
}
if [last_time_tested] {
date {
match => [ "last_time_tested", "dd MMM yyyy HH:mma 'GMT'ZZ", "dd MMM yyyy HH:mma 'GMT'" ]
target => "last_time_tested"
}
}
mutate {
convert => { "cvss" => "float"}
convert => { "cvss_base" => "float"}
convert => { "cvss_temporal" => "float"}
convert => { "cvss3" => "float"}
convert => { "cvss3_base" => "float"}
convert => { "cvss3_temporal" => "float"}
convert => { "risk_number" => "integer"}
convert => { "total_times_detected" => "integer"}
}
# Add your critical assets by subnet or by hostname. Comment this field out if you don't want to tag any, but the asset panel will break.
# if [asset] =~ "^10\.0\.100\." {
# mutate {
# add_tag => [ "critical_asset" ]
# }
# }
if [_unique] {
# Set document ID from _unique
mutate {
rename => { "_unique" => "[@metadata][id]" }
}
}
}
}
output {
if "openvas" in [tags] {
if [@metadata][id] {
elasticsearch {
hosts => [ "elasticsearch:9200" ]
index => "logstash-vulnwhisperer-%{+YYYY.MM}"
document_id => "%{[@metadata][id]}"
}
} else {
elasticsearch {
hosts => [ "elasticsearch:9200" ]
index => "logstash-vulnwhisperer-%{+YYYY.MM}"
}
}
}
}

View File

@ -1,25 +0,0 @@
# Description: Take in jira tickets from vulnWhisperer and pumps into logstash
input {
file {
path => "/opt/VulnWhisperer/data/jira/*.json"
type => json
codec => json
start_position => "beginning"
mode => "read"
start_position => "beginning"
file_completed_action => "delete"
tags => [ "jira" ]
}
}
output {
if "jira" in [tags] {
stdout { codec => rubydebug }
elasticsearch {
hosts => [ "elasticsearch:9200" ]
index => "logstash-vulnwhisperer-%{+YYYY.MM}"
}
}
}

View File

@ -61,8 +61,8 @@ fi
# ((return_code = return_code + 1))
# fi
# Test Nessus plugin_name:Backported Security Patch Detection (FTP)
nessus_doc=$(curl -s "$elasticsearch_url/logstash-vulnwhisperer-*/_search?q=plugin_name:%22Backported%20Security%20Patch%20Detection%20(FTP)%22%20AND%20asset:176.28.50.164%20AND%20tags:nessus" | jq '.hits.hits[]._source')
# Test Nessus signature:Backported Security Patch Detection (FTP)
nessus_doc=$(curl -s "$elasticsearch_url/logstash-vulnwhisperer-*/_search?q=signature:%22Backported%20Security%20Patch%20Detection%20(FTP)%22%20AND%20asset:176.28.50.164%20AND%20tags:nessus" | jq '.hits.hits[]._source')
if echo $nessus_doc | jq '.risk' | grep -q "none"; then
green "✅ Passed: Nessus risk == none"
else
@ -70,8 +70,8 @@ else
((return_code = return_code + 1))
fi
# Test Tenable plugin_name:Backported Security Patch Detection (FTP)
tenable_doc=$(curl -s "$elasticsearch_url/logstash-vulnwhisperer-*/_search?q=plugin_name:%22Backported%20Security%20Patch%20Detection%20(FTP)%22%20AND%20asset:176.28.50.164%20AND%20tags:tenable" | jq '.hits.hits[]._source')
# Test Tenable signature:Backported Security Patch Detection (FTP)
tenable_doc=$(curl -s "$elasticsearch_url/logstash-vulnwhisperer-*/_search?q=signature:%22Backported%20Security%20Patch%20Detection%20(FTP)%22%20AND%20asset:176.28.50.164%20AND%20tags:tenable" | jq '.hits.hits[]._source')
# Test asset
if echo $tenable_doc | jq .asset | grep -q '176.28.50.164'; then
green "✅ Passed: Tenable asset == 176.28.50.164"
@ -88,21 +88,21 @@ else
((return_code = return_code + 1))
fi
# Test Qualys plugin_name:OpenSSL Multiple Remote Security Vulnerabilities
qualys_vuln_doc=$(curl -s "$elasticsearch_url/logstash-vulnwhisperer-*/_search?q=tags:qualys_vuln%20AND%20ip:%22176.28.50.164%22%20AND%20plugin_name:%22OpenSSL%20Multiple%20Remote%20Security%20Vulnerabilities%22%20AND%20port:465" | jq '.hits.hits[]._source')
# Test Qualys signature:OpenSSL Multiple Remote Security Vulnerabilities
qualys_vm_doc=$(curl -s "$elasticsearch_url/logstash-vulnwhisperer-*/_search?q=tags:qualys_vm%20AND%20ip:%22176.28.50.164%22%20AND%20signature:%22OpenSSL%20Multiple%20Remote%20Security%20Vulnerabilities%22%20AND%20port:465" | jq '.hits.hits[]._source')
# Test @timestamp
if echo $qualys_vuln_doc | jq '.["@timestamp"]' | grep -q '2019-03-30T10:17:41.000Z'; then
if echo $qualys_vm_doc | jq '.["@timestamp"]' | grep -q '2019-03-30T10:17:41.000Z'; then
green "✅ Passed: Qualys VM @timestamp == 2019-03-30T10:17:41.000Z"
else
red "❌ Failed: Qualys VM @timestamp == 2019-03-30T10:17:41.000Z was: $(echo $qualys_vuln_doc | jq '.["@timestamp"]') instead"
red "❌ Failed: Qualys VM @timestamp == 2019-03-30T10:17:41.000Z was: $(echo $qualys_vm_doc | jq '.["@timestamp"]') instead"
((return_code = return_code + 1))
fi
# Test @XXXX
if echo $qualys_vuln_doc | jq '.cvss' | grep -q '5.6'; then
if echo $qualys_vm_doc | jq '.cvss' | grep -q '5.6'; then
green "✅ Passed: Qualys VM cvss == 5.6"
else
red "❌ Failed: Qualys VM cvss == 5.6 was: $(echo $qualys_vuln_doc | jq '.cvss') instead"
red "❌ Failed: Qualys VM cvss == 5.6 was: $(echo $qualys_vm_doc | jq '.cvss') instead"
((return_code = return_code + 1))
fi

View File

@ -59,8 +59,8 @@ yellow "\n*********************************************"
yellow "* Test two failed scans *"
yellow "*********************************************"
rm -rf /opt/VulnWhisperer/*
yellow "Removing ${TEST_PATH}/qualys_vuln/scan_1553941061.87241"
mv "${TEST_PATH}/qualys_vuln/scan_1553941061.87241"{,.bak}
yellow "Removing ${TEST_PATH}/qualys_vm/scan_1553941061.87241"
mv "${TEST_PATH}/qualys_vm/scan_1553941061.87241"{,.bak}
if vuln_whisperer -F -c configs/test.ini --mock --mock_dir "${TEST_PATH}"; [[ $? -eq 2 ]]; then
green "\n✅ Passed: Test two failed scans"
else
@ -83,7 +83,7 @@ yellow "\n*********************************************"
yellow "* Test only Qualys VM with one failed scan *"
yellow "*********************************************"
rm -rf /opt/VulnWhisperer/*
if vuln_whisperer -F -c configs/test.ini -s qualys_vuln --mock --mock_dir "${TEST_PATH}"; [[ $? -eq 1 ]]; then
if vuln_whisperer -F -c configs/test.ini -s qualys_vm --mock --mock_dir "${TEST_PATH}"; [[ $? -eq 1 ]]; then
green "\n✅ Passed: Test only Qualys VM with one failed scan"
else
red "\n❌ Failed: Test only Qualys VM with one failed scan"
@ -91,7 +91,7 @@ else
fi
# Restore the removed files
mv "${TEST_PATH}/qualys_vuln/scan_1553941061.87241.bak" "${TEST_PATH}/qualys_vuln/scan_1553941061.87241"
mv "${TEST_PATH}/qualys_vm/scan_1553941061.87241.bak" "${TEST_PATH}/qualys_vm/scan_1553941061.87241"
mv "${TEST_PATH}/nessus/GET_scans_exports_164_download.bak" "${TEST_PATH}/nessus/GET_scans_exports_164_download"
exit $return_code

View File

@ -2,7 +2,7 @@ import json
import logging
import sys
import time
from datetime import datetime
from datetime import datetime, timedelta
import pytz
import requests
@ -17,7 +17,7 @@ class NessusAPI(object):
SCANS = '/scans'
SCAN_ID = SCANS + '/{scan_id}'
HOST_VULN = SCAN_ID + '/hosts/{host_id}'
PLUGINS = HOST_VULN + '/plugins/{plugin_id}'
PLUGINS = HOST_VULN + '/plugins/{signature_id}'
EXPORT = SCAN_ID + '/export'
EXPORT_TOKEN_DOWNLOAD = '/scans/exports/{token_id}/download'
EXPORT_FILE_DOWNLOAD = EXPORT + '/{file_id}/download'
@ -25,27 +25,27 @@ class NessusAPI(object):
EXPORT_HISTORY = EXPORT + '?history_id={history_id}'
# All column mappings should be lowercase
COLUMN_MAPPING = {
'cvss base score': 'cvss_base',
'cvss temporal score': 'cvss_temporal',
'cvss temporal vector': 'cvss_temporal_vector',
'cvss base score': 'cvss2_base',
'cvss temporal score': 'cvss2_temporal',
'cvss temporal vector': 'cvss2_temporal_vector',
'cvss vector': 'cvss2_vector',
'cvss3 base score': 'cvss3_base',
'cvss3 temporal score': 'cvss3_temporal',
'cvss3 temporal vector': 'cvss3_temporal_vector',
'fqdn': 'dns',
'host': 'asset',
'ip address': 'ip',
'name': 'plugin_name',
'name': 'signature',
'os': 'operating_system',
'plugin id': 'signature_id',
'see also': 'exploitability',
'system type': 'category',
'vulnerability state': 'state'
}
SEVERITY_MAPPING = {'none': 0, 'low': 1, 'medium': 2, 'high': 3, 'critical': 4}
def __init__(self, hostname=None, port=None, username=None, password=None, verbose=True, profile=None, access_key=None, secret_key=None):
self.logger = logging.getLogger('NessusAPI')
if verbose:
self.logger.setLevel(logging.DEBUG)
self.logger.setLevel(logging.DEBUG if verbose else logging.INFO)
if not all((username, password)) and not all((access_key, secret_key)):
raise Exception('ERROR: Missing username, password or API keys.')
@ -81,9 +81,6 @@ class NessusAPI(object):
else:
self.login()
self.scans = self.get_scans()
self.scan_ids = self.get_scan_ids()
def login(self):
auth = '{"username":"%s", "password":"%s"}' % (self.user, self.password)
resp = self.request(self.SESSION, data=auth, json_output=False)
@ -92,7 +89,7 @@ class NessusAPI(object):
else:
raise Exception('[FAIL] Could not login to Nessus')
def request(self, url, data=None, headers=None, method='POST', download=False, json_output=False):
def request(self, url, data=None, headers=None, method='POST', download=False, json_output=False, params=None):
timeout = 0
success = False
@ -101,7 +98,7 @@ class NessusAPI(object):
self.logger.debug('Requesting to url {}'.format(url))
while (timeout <= 10) and (not success):
response = getattr(self.session, method)(url, data=data)
response = getattr(self.session, method)(url, data=data, params=params)
if response.status_code == 401:
if url == self.base + self.SESSION:
break
@ -130,12 +127,16 @@ class NessusAPI(object):
return response_data
return response
def get_scans(self):
scans = self.request(self.SCANS, method='GET', json_output=True)
def get_scans(self, days=None):
parameters = {}
if days != None:
parameters = {
"last_modification_date": (datetime.now() - timedelta(days=days)).strftime("%s")
}
scans = self.request(self.SCANS, method="GET", params=parameters, json_output=True)
return scans
def get_scan_ids(self):
scans = self.scans
def get_scan_ids(self, scans):
scan_ids = [scan_id['id'] for scan_id in scans['scans']] if scans['scans'] else []
self.logger.debug('Found {} scan_ids'.format(len(scan_ids)))
return scan_ids
@ -165,8 +166,6 @@ class NessusAPI(object):
report_status = self.request(self.EXPORT_STATUS.format(scan_id=scan_id, file_id=file_id), method='GET',
json_output=True)
running = report_status['status'] != 'ready'
sys.stdout.write('.')
sys.stdout.flush()
if self.profile == 'tenable' or self.api_keys:
content = self.request(self.EXPORT_FILE_DOWNLOAD.format(scan_id=scan_id, file_id=file_id), method='GET', download=True)
else:
@ -208,10 +207,6 @@ class NessusAPI(object):
self.logger.debug('Dropping redundant tenable fields')
df.drop('CVSS', axis=1, inplace=True, errors='ignore')
if self.profile == 'nessus':
# Set IP from Host field
df['ip'] = df['Host']
# Lowercase and map fields from COLUMN_MAPPING
df.columns = [x.lower() for x in df.columns]
df.rename(columns=self.COLUMN_MAPPING, inplace=True)
@ -224,16 +219,16 @@ class NessusAPI(object):
df.fillna('', inplace=True)
if self.profile == 'nessus':
# Set IP from asset field
df["ip"] = df.loc[df["asset"].str.match("^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$"), "asset"]
# upper/lowercase fields
self.logger.debug('Changing case of fields')
df['cve'] = df['cve'].str.upper()
df['protocol'] = df['protocol'].str.lower()
df['risk'] = df['risk'].str.lower()
# Map risk to a SEVERITY MAPPING value
self.logger.debug('Mapping risk to severity number')
df['risk_number'] = df['risk'].map(self.SEVERITY_MAPPING)
df.fillna('', inplace=True)
return df

View File

@ -13,6 +13,20 @@ from bs4 import BeautifulSoup
class OpenVAS_API(object):
OMP = '/omp'
COLUMN_MAPPING = {
'affected software/os': 'affected_software',
'cves': 'cve',
'impact': 'description',
'nvt name': 'signature',
'nvt oid': 'signature_id',
'other references': 'exploitability',
'port protocol': 'protocol',
'severity': 'risk',
'solution type': 'category',
'task name': 'scan_name',
'specific result': 'plugin_output',
'summary': 'synopsis',
}
def __init__(self,
hostname=None,
@ -200,9 +214,16 @@ class OpenVAS_API(object):
def map_fields(self, df):
self.logger.debug('Mapping fields')
# Lowercase and map fields from COLUMN_MAPPING
df.columns = [x.lower() for x in df.columns]
df.rename(columns=self.COLUMN_MAPPING, inplace=True)
df.columns = [x.replace(' ', '_') for x in df.columns]
return df
def transform_values(self, df):
self.logger.debug('Transforming values')
df['port'].fillna(0).astype(int)
df['risk'] = df['risk'].str.lower()
df['asset'] = df['ip']
df.fillna('', inplace=True)
return df

View File

@ -5,6 +5,7 @@ __author__ = 'Nathan Young'
import logging
import sys
import xml.etree.ElementTree as ET
from datetime import datetime, timedelta
import dateutil.parser as dp
import pandas as pd
@ -18,7 +19,7 @@ class qualysWhisperAPI(object):
self.logger = logging.getLogger('qualysWhisperAPI')
self.config = config
try:
self.qgc = qualysapi.connect(config, 'qualys_vuln')
self.qgc = qualysapi.connect(config, 'qualys_vm')
# Fail early if we can't make a request or auth is incorrect
self.qgc.request('about.php')
self.logger.info('Connected to Qualys at {}'.format(self.qgc.server))
@ -29,6 +30,8 @@ class qualysWhisperAPI(object):
def scan_xml_parser(self, xml):
all_records = []
root = ET.XML(xml.encode('utf-8'))
if len(root.find('.//SCAN_LIST')) == 0:
return pd.DataFrame(columns=['id', 'status'])
for child in root.find('.//SCAN_LIST'):
all_records.append({
'name': child.find('TITLE').text,
@ -40,12 +43,17 @@ class qualysWhisperAPI(object):
})
return pd.DataFrame(all_records)
def get_all_scans(self):
def get_all_scans(self, days=None):
if days == None:
self.launched_date = '0001-01-01'
else:
self.launched_date = (datetime.now() - timedelta(days=days)).strftime('%Y-%m-%d')
parameters = {
'action': 'list',
'echo_request': 0,
'show_op': 0,
'launched_after_datetime': '0001-01-01'
'state': 'Finished',
'launched_after_datetime': self.launched_date
}
scans_xml = self.qgc.request(self.SCANS, parameters)
return self.scan_xml_parser(scans_xml)
@ -83,14 +91,12 @@ class qualysVulnScan:
'impact': 'synopsis',
'ip_status': 'state',
'os': 'operating_system',
'qid': 'plugin_id',
'qid': 'signature_id',
'results': 'plugin_output',
'threat': 'description',
'title': 'plugin_name'
'title': 'signature'
}
SEVERITY_MAPPING = {0: 'none', 1: 'low', 2: 'medium', 3: 'high',4: 'critical'}
def __init__(
self,
config=None,
@ -164,17 +170,21 @@ class qualysVulnScan:
# Contruct the CVSS vector
self.logger.info('Extracting CVSS components')
df['cvss_vector'] = df['cvss_base'].str.extract('\((.*)\)', expand=False)
df['cvss_base'] = df['cvss_base'].str.extract('^(\d+(?:\.\d+)?)', expand=False)
df['cvss_temporal_vector'] = df['cvss_temporal'].str.extract('\((.*)\)', expand=False)
df['cvss_temporal'] = df['cvss_temporal'].str.extract('^(\d+(?:\.\d+)?)', expand=False)
df['cvss2_vector'] = df['cvss_base'].str.extract('\((.*)\)', expand=False)
df['cvss2_base'] = df['cvss_base'].str.extract('^(\d+(?:\.\d+)?)', expand=False)
df['cvss2_temporal_vector'] = df['cvss_temporal'].str.extract('\((.*)\)', expand=False)
df['cvss2_temporal'] = df['cvss_temporal'].str.extract('^(\d+(?:\.\d+)?)', expand=False)
df.drop('cvss_base', axis=1, inplace=True, errors='ignore')
df.drop('cvss_temporal', axis=1, inplace=True, errors='ignore')
# Set asset to ip
df['asset'] = df['ip']
# Set dns to fqdn if missing
df.loc[df['dns'] == '', 'dns'] = df['fqdn']
# Convert Qualys severity to standardised risk number
df['risk_number'] = df['severity'].astype(int)-1
df['risk'] = df['risk_number'].map(self.SEVERITY_MAPPING)
df['risk_number'] = df['severity'].astype(int)-1
df.fillna('', inplace=True)

View File

@ -7,6 +7,7 @@ import logging
import os
import sys
import xml.etree.ElementTree as ET
from datetime import datetime, timedelta
import dateutil.parser as dp
import pandas as pd
@ -38,7 +39,7 @@ class qualysWhisperAPI(object):
self.logger = logging.getLogger('qualysWhisperAPI')
self.config = config
try:
self.qgc = qualysapi.connect(config, 'qualys_web')
self.qgc = qualysapi.connect(config, 'qualys_was')
self.logger.info('Connected to Qualys at {}'.format(self.qgc.server))
except Exception as e:
self.logger.error('Could not connect to Qualys: {}'.format(str(e)))
@ -46,7 +47,7 @@ class qualysWhisperAPI(object):
#"content-type": "text/xml"}
"Accept" : "application/json",
"Content-Type": "application/json"}
self.config_parse = qcconf.QualysConnectConfig(config, 'qualys_web')
self.config_parse = qcconf.QualysConnectConfig(config, 'qualys_was')
try:
self.template_id = self.config_parse.get_template_id()
except:
@ -60,10 +61,12 @@ class qualysWhisperAPI(object):
"""
Checks number of scans, used to control the api limits
"""
parameters = (
E.ServiceRequest(
parameters = E.ServiceRequest(
E.filters(
E.Criteria({'field': 'status', 'operator': 'EQUALS'}, status))))
E.Criteria({"field": "status", "operator": "EQUALS"}, status),
E.Criteria({"field": "launchedDate", "operator": "GREATER"}, self.launched_date)
)
)
xml_output = self.qgc.request(self.COUNT_WASSCAN, parameters)
root = objectify.fromstring(xml_output.encode('utf-8'))
return root.count.text
@ -71,8 +74,8 @@ class qualysWhisperAPI(object):
def generate_scan_result_XML(self, limit=1000, offset=1, status='FINISHED'):
report_xml = E.ServiceRequest(
E.filters(
E.Criteria({'field': 'status', 'operator': 'EQUALS'}, status
),
E.Criteria({'field': 'status', 'operator': 'EQUALS'}, status),
E.Criteria({"field": "launchedDate", "operator": "GREATER"}, self.launched_date)
),
E.preferences(
E.startFromOffset(str(offset)),
@ -104,7 +107,12 @@ class qualysWhisperAPI(object):
all_records.append(record)
return pd.DataFrame(all_records)
def get_all_scans(self, limit=1000, offset=1, status='FINISHED'):
def get_all_scans(self, limit=1000, offset=1, status='FINISHED', days=None):
if days == None:
self.launched_date = '0001-01-01'
else:
self.launched_date = (datetime.now() - timedelta(days=days)).strftime('%Y-%m-%d')
qualys_api_limit = limit
dataframes = []
_records = []
@ -120,6 +128,8 @@ class qualysWhisperAPI(object):
_records.append(scan_info)
self.logger.debug('Converting XML to DataFrame')
dataframes = [self.xml_parser(xml) for xml in _records]
if not dataframes:
return pd.DataFrame(columns=['id'])
except Exception as e:
self.logger.error("Couldn't process all scans: {}".format(e))
@ -285,23 +295,23 @@ class qualysUtils:
class qualysScanReport:
COLUMN_MAPPING = {
'CVSS Base': 'cvss2_base',
'CVSS Temporal': 'cvss2_temporal',
'DescriptionCatSev': 'category_description',
'DescriptionSeverity': 'synopsis',
'Evidence #1': 'evidence',
'Payload #1': 'payload',
'QID': 'plugin_id',
'QID': 'signature_id',
'Request Headers #1': 'request_headers',
'Request Method #1': 'request_method',
'Request URL #1': 'request_url',
'Response #1': 'plugin_output',
'Title': 'plugin_name',
'Title': 'signature',
'Url': 'uri',
'URL': 'url',
'Vulnerability Category': 'type',
}
SEVERITY_MAPPING = {0: 'none', 1: 'low', 2: 'medium', 3: 'high', 4: 'critical'}
# URL Vulnerability Information
WEB_SCAN_VULN_BLOCK = list(qualysReportFields.VULN_BLOCK)
WEB_SCAN_VULN_BLOCK.insert(WEB_SCAN_VULN_BLOCK.index('QID'), 'Detection ID')
@ -521,11 +531,10 @@ class qualysScanReport:
# Convert Qualys severity to standardised risk number
df['risk_number'] = df['severity'].astype(int)-1
df['risk'] = df['risk_number'].map(self.SEVERITY_MAPPING)
# Extract dns field from URL
df['dns'] = df['url'].str.extract('https?://([^/]+)', expand=False)
df.loc[df['uri'] != '','dns'] = df.loc[df['uri'] != '','uri'].str.extract('https?://([^/]+)', expand=False)
df['dns'] = df.loc[df['uri'] != '','uri'].str.extract('https?://([^/]+)', expand=False)
# Set asset to web_application_name
df['asset'] = df['web_application_name']

View File

@ -31,22 +31,22 @@ class mockAPI(object):
for filename in self.get_files('{}/{}'.format(self.mock_dir, framework)):
method, resource = filename.split('_', 1)
resource = resource.replace('_', '/')
self.logger.debug('Adding mocked {} endpoint {} {}'.format(framework, method, resource))
self.logger.info('Adding mocked {} endpoint {} {}'.format(framework, method, resource))
httpretty.register_uri(
getattr(httpretty, method), 'https://{}:443/{}'.format(framework, resource),
body=open('{}/{}/{}'.format(self.mock_dir, framework, filename)).read()
)
def qualys_vuln_callback(self, request, uri, response_headers):
self.logger.debug('Simulating response for {} ({})'.format(uri, request.body))
def qualys_vm_callback(self, request, uri, response_headers):
self.logger.info('Simulating response for {} ({})'.format(uri, request.body))
if 'list' in request.parsed_body['action']:
return [200,
response_headers,
open(self.qualys_vuln_path + '/scans').read()]
open(self.qualys_vm_path + '/scans').read()]
elif 'fetch' in request.parsed_body['action']:
try:
response_body = open('{}/{}'.format(
self.qualys_vuln_path,
self.qualys_vm_path,
request.parsed_body['scan_ref'][0].replace('/', '_'))
).read()
except:
@ -54,43 +54,43 @@ class mockAPI(object):
response_body = ''
return [200, response_headers, response_body]
def create_qualys_vuln_resource(self, framework):
def create_qualys_vm_resource(self, framework):
# Create health check endpoint
self.logger.debug('Adding mocked {} endpoint GET msp/about.php'.format(framework))
self.logger.info('Adding mocked {} endpoint GET msp/about.php'.format(framework))
httpretty.register_uri(
httpretty.GET,
'https://{}:443/msp/about.php'.format(framework),
body='')
self.logger.debug('Adding mocked {} endpoint {} {}'.format(framework, 'POST', 'api/2.0/fo/scan'))
self.logger.info('Adding mocked {} endpoint {} {}'.format(framework, 'POST', 'api/2.0/fo/scan'))
httpretty.register_uri(
httpretty.POST, 'https://{}:443/api/2.0/fo/scan/'.format(framework),
body=self.qualys_vuln_callback)
body=self.qualys_vm_callback)
def qualys_web_callback(self, request, uri, response_headers):
self.logger.debug('Simulating response for {} ({})'.format(uri, request.body))
def qualys_was_callback(self, request, uri, response_headers):
self.logger.info('Simulating response for {} ({})'.format(uri, request.body))
report_id = request.parsed_body.split('<WasScan><id>')[1].split('<')[0]
response_body = open('{}/create_{}'.format(self.qualys_web_path, report_id)).read()
response_body = open('{}/create_{}'.format(self.qualys_was_path, report_id)).read()
return [200, response_headers, response_body]
def create_qualys_web_resource(self, framework):
def create_qualys_was_resource(self, framework):
for filename in self.get_files('{}/{}'.format(self.mock_dir, framework)):
if filename.startswith('POST') or filename.startswith('GET'):
method, resource = filename.split('_', 1)
resource = resource.replace('_', '/')
self.logger.debug('Adding mocked {} endpoint {} {}'.format(framework, method, resource))
self.logger.info('Adding mocked {} endpoint {} {}'.format(framework, method, resource))
httpretty.register_uri(
getattr(httpretty, method), 'https://{}:443/{}'.format(framework, resource),
body=open('{}/{}/{}'.format(self.mock_dir, framework, filename)).read()
)
self.logger.debug('Adding mocked {} endpoint {} {}'.format(framework, 'POST', 'qps/rest/3.0/create/was/report'))
self.logger.info('Adding mocked {} endpoint {} {}'.format(framework, 'POST', 'qps/rest/3.0/create/was/report'))
httpretty.register_uri(
httpretty.POST, 'https://{}:443/qps/rest/3.0/create/was/report'.format(framework),
body=self.qualys_web_callback)
body=self.qualys_was_callback)
def openvas_callback(self, request, uri, response_headers):
self.logger.debug('Simulating response for {} ({})'.format(uri, request.body))
self.logger.info('Simulating response for {} ({})'.format(uri, request.body))
if request.querystring['cmd'][0] in ['get_reports', 'get_report_formats']:
response_body = open('{}/{}'.format(self.openvas_path, request.querystring['cmd'][0])).read()
@ -116,12 +116,12 @@ class mockAPI(object):
for framework in self.get_directories(self.mock_dir):
if framework in ['nessus', 'tenable']:
self.create_nessus_resource(framework)
elif framework == 'qualys_vuln':
self.qualys_vuln_path = self.mock_dir + '/' + framework
self.create_qualys_vuln_resource(framework)
elif framework == 'qualys_web':
self.qualys_web_path = self.mock_dir + '/' + framework
self.create_qualys_web_resource(framework)
elif framework == 'qualys_vm':
self.qualys_vm_path = self.mock_dir + '/' + framework
self.create_qualys_vm_resource(framework)
elif framework == 'qualys_was':
self.qualys_was_path = self.mock_dir + '/' + framework
self.create_qualys_was_resource(framework)
elif framework == 'openvas':
self.openvas_path = self.mock_dir + '/' + framework
self.create_openvas_resource(framework)

View File

@ -6,10 +6,13 @@ import io
import json
import logging
import os
import re
import socket
import sqlite3
import sys
import time
import warnings
from datetime import datetime, timedelta
import numpy as np
import pandas as pd
@ -18,30 +21,31 @@ from lxml import objectify
from base.config import vwConfig
from frameworks.nessus import NessusAPI
from frameworks.openvas import OpenVAS_API
from frameworks.qualys_vuln import qualysVulnScan
from frameworks.qualys_web import qualysScanReport
from frameworks.qualys_vm import qualysVulnScan
from frameworks.qualys_was import qualysScanReport
from reporting.jira_api import JiraAPI
# Don't warn about capturing groups in regex filter
warnings.filterwarnings("ignore", 'This pattern has match groups')
class vulnWhispererBase(object):
CONFIG_SECTION = None
SEVERITY_NAME_MAPPING = {'none': 0, 'low': 1, 'medium': 2, 'high': 3, 'critical': 4}
SEVERITY_NUMBER_MAPPING = {0: 'none', 1: 'low', 2: 'medium', 3: 'high', 4: 'critical'}
def __init__(
self,
config=None,
db_name='report_tracker.db',
purge=False,
verbose=None,
verbose=False,
debug=False,
username=None,
password=None,
section=None,
scan_filter=None,
days=None,
develop=False,
):
self.logger = logging.getLogger('vulnWhispererBase')
if debug:
self.logger.setLevel(logging.DEBUG)
if self.CONFIG_SECTION is None:
raise Exception('Implementing class must define CONFIG_SECTION')
@ -50,6 +54,7 @@ class vulnWhispererBase(object):
self.db_name = db_name
self.purge = purge
self.develop = develop
self.days = days
if config is not None:
self.config = vwConfig(config_in=config)
@ -64,11 +69,28 @@ class vulnWhispererBase(object):
except:
self.username = None
self.password = None
try:
self.scan_filter = self.config.get(self.CONFIG_SECTION, 'scan_filter')
except:
self.scan_filter = scan_filter
self.write_path = self.config.get(self.CONFIG_SECTION, 'write_path')
self.db_path = self.config.get(self.CONFIG_SECTION, 'db_path')
self.verbose = self.config.getbool(self.CONFIG_SECTION, 'verbose')
self.logger = logging.getLogger('vulnWhispererBase')
self.logger.setLevel(logging.DEBUG if debug else logging.INFO if verbose else logging.WARNING)
# Preference command line argument over config file
if scan_filter:
self.scan_filter = scan_filter
if self.scan_filter:
self.logger.info('Filtering for scan names matching "{}"'.format(self.scan_filter))
# self.scan_filter = re.compile(scan_filter)
if self.days != None:
self.logger.info('Searching for scans within {} days to {}'.format(self.days, (datetime.now() - timedelta(days=days)).isoformat()))
# self.days = dp.parse(days)
# self.logger.info('Searching for scans after {}'.format(self.days))
if self.db_name is not None:
if self.db_path:
@ -254,17 +276,29 @@ class vulnWhispererBase(object):
"""Map and transform common data values"""
self.logger.info('Start common normalisation')
self.logger.info('Normalising CVSS')
for cvss_version in ['cvss', 'cvss3']:
if cvss_version + '_base' in df:
self.logger.info('Normalising {} base'.format(cvss_version))
# CVSS = cvss_temporal or cvss_base
df[cvss_version] = df[cvss_version + '_base']
df.loc[df[cvss_version + '_temporal'] != '', cvss_version] = df[cvss_version + '_temporal']
df.replace({'': np.nan}, inplace=True)
# Combine CVSS and CVSS3 vectors
# Map risk name to a risk value
if 'risk' in df and not 'risk_number' in df:
self.logger.debug('Mapping risk name to risk number')
df['risk_number'] = df['risk'].map(self.SEVERITY_NAME_MAPPING)
# Map risk value to a risk name
if 'risk_number' in df and not 'risk' in df:
self.logger.debug('Mapping risk number to risk name')
df['risk'] = df['risk_number'].map(self.SEVERITY_NUMBER_MAPPING)
self.logger.debug('Normalising CVSS')
for cvss_version in ['cvss', 'cvss2', 'cvss3']:
# cvssX = cvssX_temporal else cvssX_base
if cvss_version + '_base' in df:
self.logger.debug('Normalising {} base'.format(cvss_version))
df[cvss_version] = df[cvss_version + '_base']
df[cvss_version] = df[cvss_version + '_temporal'].fillna(df[cvss_version])
# Combine cvssX temporal and base vectors
if cvss_version + '_vector' in df and cvss_version + '_temporal_vector' in df:
self.logger.info('Normalising {} vector'.format(cvss_version))
self.logger.debug('Normalising {} vector'.format(cvss_version))
df[cvss_version + '_vector'] = (
df[[cvss_version + '_vector', cvss_version + '_temporal_vector']]
.apply(lambda x: '{}/{}'.format(x[0], x[1]), axis=1)
@ -272,33 +306,53 @@ class vulnWhispererBase(object):
)
df.drop(cvss_version + '_temporal_vector', axis=1, inplace=True)
# Map cvssX to severity name
if cvss_version in df:
self.logger.info('Normalising {} severity'.format(cvss_version))
# Map CVSS to severity name
df.loc[df[cvss_version].astype(str) == '', cvss_version] = None
self.logger.debug('Normalising {} severity'.format(cvss_version))
df[cvss_version] = df[cvss_version].astype('float')
# df.loc[df[cvss_version].isnull(), cvss_version + '_severity'] = 'info'
df.loc[df[cvss_version] == 0, cvss_version + '_severity'] = 'info'
# df[cvss_version + '_severity'] = 'informational'
df.loc[(df[cvss_version] > 0) & (df[cvss_version] < 3), cvss_version + '_severity'] = 'low'
df.loc[(df[cvss_version] >= 3) & (df[cvss_version] < 6), cvss_version + '_severity'] = 'medium'
df.loc[(df[cvss_version] >= 6) & (df[cvss_version] < 9), cvss_version + '_severity'] = 'high'
df.loc[(df[cvss_version] > 9) & (df[cvss_version].notnull()), cvss_version + '_severity'] = 'critical'
df.loc[(df[cvss_version] >= 9) & (df[cvss_version].notnull()), cvss_version + '_severity'] = 'critical'
self.logger.info('Creating Unique Document ID')
# Get a single cvss score derived from cvss3 else cvss2
if not 'cvss' in df:
if 'cvss2' in df:
df.loc[df['cvss2'].notnull(), 'cvss'] = df.loc[df['cvss2'].notnull(), 'cvss2']
df.loc[df['cvss2'].notnull(), 'cvss_severity'] = df.loc[df['cvss2'].notnull(), 'cvss2_severity']
if 'cvss3' in df:
df.loc[df['cvss3'].notnull(), 'cvss'] = df.loc[df['cvss3'].notnull(), 'cvss3']
df.loc[df['cvss3'].notnull(), 'cvss_severity'] = df.loc[df['cvss3'].notnull(), 'cvss3_severity']
df['cvss_severity'].fillna('informational', inplace=True)
self.logger.debug('Creating Unique Document ID')
df['_unique'] = df.index.values
if 'history_id' in df:
df['_unique'] = df[['scan_id', 'history_id', '_unique']].apply(lambda x: '_'.join(x.astype(str)), axis=1)
else:
df['_unique'] = df[['scan_id', '_unique']].apply(lambda x: '_'.join(x.astype(str)), axis=1)
# Rename cvss to cvss2
# Make cvss with no suffix == cvss3 else cvss2
# cvss = cvss3 if cvss3 else cvss2
# cvss_severity = cvss3_severity if cvss3_severity else cvss2_severity
df.replace({'': np.nan}, inplace=True)
return df
def print_available_scans(self, scan_list):
"""
Takes a list of dicts with fields 'time', 'scan_name', 'imported' and 'status' and prints a table
"""
output_string = '| {time} | {scan_name} | {imported} | {status} |'
print '-' * 110
print output_string.format(time='Time'.ljust(19), scan_name='Scan Name'.ljust(60), imported='Imported'.ljust(8), status='Status'.ljust(10))
print '-' * 110
for scan in sorted(scan_list, key=lambda k: k['time'], reverse=True):
scan['imported'] = scan['imported'].ljust(8)
scan['scan_name'] = scan['scan_name'][:60].ljust(60).encode('utf-8')
scan['time'] = scan['time'][:19].ljust(19)
scan['status'] = scan['status'][:10].ljust(10)
print output_string.format(**scan)
print '{}\n'.format('-' * 110)
return 0
class vulnWhispererNessus(vulnWhispererBase):
@ -309,63 +363,62 @@ class vulnWhispererNessus(vulnWhispererBase):
config=None,
db_name='report_tracker.db',
purge=False,
verbose=None,
verbose=False,
debug=False,
username=None,
password=None,
profile='nessus'
profile='nessus',
scan_filter=None,
days=None,
list_scans=None,
):
self.CONFIG_SECTION=profile
super(vulnWhispererNessus, self).__init__(config=config)
super(vulnWhispererNessus, self).__init__(config=config, verbose=verbose, debug=debug, scan_filter=scan_filter, days=days)
self.logger = logging.getLogger('vulnWhispererNessus')
if debug:
self.logger.setLevel(logging.DEBUG)
self.port = int(self.config.get(self.CONFIG_SECTION, 'port'))
self.logger = logging.getLogger('vulnWhisperer{}'.format(self.CONFIG_SECTION))
if not verbose:
verbose = self.config.getbool(self.CONFIG_SECTION, 'verbose')
self.logger.setLevel(logging.DEBUG if debug else logging.INFO if verbose else logging.WARNING)
self.develop = True
self.purge = purge
self.access_key = None
self.secret_key = None
self.list_scans = list_scans
try:
self.nessus_port = self.config.get(self.CONFIG_SECTION, 'port')
self.nessus_trash = self.config.getbool(self.CONFIG_SECTION, 'trash')
if config is not None:
try:
self.nessus_port = self.config.get(self.CONFIG_SECTION, 'port')
self.access_key = self.config.get(self.CONFIG_SECTION,'access_key')
self.secret_key = self.config.get(self.CONFIG_SECTION,'secret_key')
except:
self.access_key = None
self.secret_key = None
self.nessus_trash = self.config.getbool(self.CONFIG_SECTION,
'trash')
try:
self.access_key = self.config.get(self.CONFIG_SECTION,'access_key')
self.secret_key = self.config.get(self.CONFIG_SECTION,'secret_key')
except:
pass
try:
self.logger.info('Attempting to connect to {}...'.format(self.CONFIG_SECTION))
self.nessus = \
NessusAPI(hostname=self.hostname,
port=self.nessus_port,
username=self.username,
password=self.password,
profile=self.CONFIG_SECTION,
access_key=self.access_key,
secret_key=self.secret_key
)
self.nessus_connect = True
self.logger.info('Connected to {} on {host}:{port}'.format(self.CONFIG_SECTION, host=self.hostname,
port=str(self.nessus_port)))
except Exception as e:
self.logger.error('Exception: {}'.format(str(e)))
raise Exception(
'Could not connect to {} -- Please verify your settings in {config} are correct and try again.\nReason: {e}'.format(
self.CONFIG_SECTION,
config=self.config.config_in,
e=e))
try:
self.logger.info('Attempting to connect to {}...'.format(self.CONFIG_SECTION))
self.nessus = \
NessusAPI(hostname=self.hostname,
port=self.nessus_port,
username=self.username,
password=self.password,
profile=self.CONFIG_SECTION,
access_key=self.access_key,
secret_key=self.secret_key,
verbose=verbose,
)
self.nessus_connect = True
self.logger.info('Connected to {} on {host}:{port}'.format(self.CONFIG_SECTION, host=self.hostname,
port=str(self.nessus_port)))
except Exception as e:
self.logger.error('Could not properly load your config!\nReason: {e}'.format(e=e))
sys.exit(1)
self.logger.error('Exception: {}'.format(str(e)))
raise Exception(
'Could not connect to {} -- Please verify your settings in {config} are correct and try again.\nReason: {e}'.format(
self.CONFIG_SECTION,
config=self.config.config_in,
e=e))
except Exception as e:
self.logger.error('Could not properly load your config!\nReason: {e}'.format(e=e))
sys.exit(1)
@ -379,6 +432,8 @@ class vulnWhispererNessus(vulnWhispererBase):
self.logger.info('Gathering all scan data... this may take a while...')
scan_records = []
if self.days:
earliest_time = int((datetime.now() - timedelta(days=self.days)).strftime("%s"))
for s in scans:
if s:
record = {}
@ -401,6 +456,8 @@ class vulnWhispererNessus(vulnWhispererBase):
int(record["last_modification_date"]),
local_tz=self.nessus.tz_conv(record["timezone"]),
)
if self.days and record["norm_time"] < earliest_time:
continue
scan_records.append(record.copy())
except Exception as e:
# Generates error each time nonetype is encountered.
@ -417,10 +474,28 @@ class vulnWhispererNessus(vulnWhispererBase):
self.exit_code += 1
return self.exit_code
scan_data = self.nessus.scans
scan_data = self.nessus.get_scans(self.days)
folders = scan_data['folders']
scans = scan_data['scans'] if scan_data['scans'] else []
all_scans = self.scan_count(scans)
if self.scan_filter:
self.logger.info('Filtering scans that match "{}"'.format(self.scan_filter))
all_scans = [
x for x in all_scans
if re.findall(self.scan_filter, x["scan_name"], re.IGNORECASE)
]
if self.list_scans:
scan_list = []
for scan in all_scans:
scan['imported'] = 'Yes' if scan['uuid'] in self.uuids else 'No'
scan['time'] = datetime.utcfromtimestamp(scan['norm_time']).isoformat()
scan_list.append(scan)
print 'Available {} scans:'.format(self.CONFIG_SECTION)
self.print_available_scans(scan_list)
return 0
if self.uuids:
scan_list = [
scan for scan in all_scans
@ -429,6 +504,7 @@ class vulnWhispererNessus(vulnWhispererBase):
]
else:
scan_list = all_scans
self.logger.info(
"Identified {new} scans to be processed".format(new=len(scan_list))
)
@ -524,6 +600,7 @@ class vulnWhispererNessus(vulnWhispererBase):
vuln_ready['scan_name'] = scan_name.encode('utf8')
vuln_ready['scan_source'] = self.CONFIG_SECTION
vuln_ready['scan_time'] = norm_time
vuln_ready['vendor'] = 'Tenable'
vuln_ready = self.common_normalise(vuln_ready)
@ -553,29 +630,32 @@ class vulnWhispererNessus(vulnWhispererBase):
return self.exit_code
class vulnWhispererQualys(vulnWhispererBase):
class vulnWhispererQualysWAS(vulnWhispererBase):
CONFIG_SECTION = 'qualys_web'
CONFIG_SECTION = 'qualys_was'
def __init__(
self,
config=None,
db_name='report_tracker.db',
purge=False,
verbose=None,
verbose=False,
debug=False,
username=None,
password=None,
scan_filter=None,
days=None,
list_scans=None,
):
super(vulnWhispererQualys, self).__init__(config=config)
self.logger = logging.getLogger('vulnWhispererQualys')
if debug:
self.logger.setLevel(logging.DEBUG)
super(vulnWhispererQualysWAS, self).__init__(config=config, verbose=verbose, debug=debug, scan_filter=scan_filter, days=days)
self.logger = logging.getLogger('vulnWhispererQualysWAS')
if not verbose:
verbose = self.config.getbool(self.CONFIG_SECTION, 'verbose')
self.logger.setLevel(logging.DEBUG if debug else logging.INFO if verbose else logging.WARNING)
self.qualys_scan = qualysScanReport(config=config)
self.latest_scans = self.qualys_scan.qw.get_all_scans()
self.latest_scans = self.qualys_scan.qw.get_all_scans(days=self.days)
self.directory_check()
self.scans_to_process = None
self.list_scans = list_scans
def whisper_reports(self,
report_id=None,
@ -593,7 +673,7 @@ class vulnWhispererQualys(vulnWhispererBase):
try:
if 'Z' in launched_date:
launched_date = self.qualys_scan.utils.iso_to_epoch(launched_date)
report_name = 'qualys_web_' + str(report_id) \
report_name = 'qualys_was_' + str(report_id) \
+ '_{last_updated}'.format(last_updated=launched_date) \
+ '.{extension}'.format(extension=output_format)
@ -636,6 +716,7 @@ class vulnWhispererQualys(vulnWhispererBase):
vuln_ready['scan_name'] = scan_name.encode('utf8')
vuln_ready['scan_source'] = self.CONFIG_SECTION
vuln_ready['scan_time'] = launched_date
vuln_ready['vendor'] = 'Qualys'
vuln_ready = self.common_normalise(vuln_ready)
@ -686,6 +767,24 @@ class vulnWhispererQualys(vulnWhispererBase):
def process_web_assets(self):
counter = 0
if self.scan_filter:
self.logger.info('Filtering scans that match "{}"'.format(self.scan_filter))
self.latest_scans = self.latest_scans.loc[
self.latest_scans["name"].str.contains(self.scan_filter, case=False)
]
if self.list_scans:
if self.uuids and len(self.latest_scans) > 0:
self.latest_scans.loc[self.latest_scans['id'].isin(self.uuids), 'imported'] = 'Yes'
else:
self.latest_scans['imported'] = 'No'
self.latest_scans['imported'].fillna('No', inplace=True)
self.latest_scans.rename(columns={'launchedDate': 'time', 'name': 'scan_name'}, inplace=True)
print 'Available {} scans:'.format(self.CONFIG_SECTION)
self.print_available_scans(self.latest_scans[['time', 'scan_name', 'imported', 'status']].to_dict(orient='records'))
return 0
self.identify_scans_to_process()
if self.scans_to_process.shape[0]:
for app in self.scans_to_process.iterrows():
@ -704,53 +803,30 @@ class vulnWhispererQualys(vulnWhispererBase):
class vulnWhispererOpenVAS(vulnWhispererBase):
CONFIG_SECTION = 'openvas'
COLUMN_MAPPING = {'IP': 'asset',
'Hostname': 'hostname',
'Port': 'port',
'Port Protocol': 'protocol',
'CVEs': 'cve',
'CVSS': 'cvss',
'Severity': 'severity',
'Solution Type': 'category',
'NVT Name': 'plugin_name',
'Summary': 'synopsis',
'Specific Result': 'plugin_output',
'NVT OID': 'nvt_oid',
'Task ID': 'task_id',
'Task Name': 'scan_name',
'Timestamp': 'timestamp',
'Result ID': 'result_id',
'Impact': 'description',
'Solution': 'solution',
'Affected Software/OS': 'affected_software',
'Vulnerability Insight': 'vulnerability_insight',
'Vulnerability Detection Method': 'vulnerability_detection_method',
'Product Detection Result': 'product_detection_result',
'BIDs': 'bids',
'CERTs': 'certs',
'Other References': 'see_also'
}
def __init__(
self,
config=None,
db_name='report_tracker.db',
purge=False,
verbose=None,
verbose=False,
debug=False,
username=None,
password=None,
scan_filter=None,
days=None,
list_scans=None,
):
super(vulnWhispererOpenVAS, self).__init__(config=config)
super(vulnWhispererOpenVAS, self).__init__(config=config, verbose=verbose, debug=debug, scan_filter=scan_filter, days=days)
self.logger = logging.getLogger('vulnWhispererOpenVAS')
if debug:
self.logger.setLevel(logging.DEBUG)
if not verbose:
verbose = self.config.getbool(self.CONFIG_SECTION, 'verbose')
self.logger.setLevel(logging.DEBUG if debug else logging.INFO if verbose else logging.WARNING)
self.directory_check()
self.port = int(self.config.get(self.CONFIG_SECTION, 'port'))
self.develop = True
self.purge = purge
self.scans_to_process = None
self.list_scans = list_scans
self.openvas_api = OpenVAS_API(hostname=self.hostname,
port=self.port,
username=self.username,
@ -793,15 +869,11 @@ class vulnWhispererOpenVAS(vulnWhispererBase):
# Map and transform fields
vuln_ready = self.openvas_api.normalise(vuln_ready)
# TODO move the following to the openvas_api.transform_values
vuln_ready.rename(columns=self.COLUMN_MAPPING, inplace=True)
vuln_ready.port = vuln_ready.port.replace('', 0).astype(int)
# Set common fields
# vuln_ready['scan_name'] = scan_name.encode('utf8')
vuln_ready['scan_id'] = report_id
vuln_ready['scan_time'] = launched_date
vuln_ready['scan_source'] = self.CONFIG_SECTION
vuln_ready['vendor'] = 'Greenbone'
vuln_ready = self.common_normalise(vuln_ready)
@ -829,14 +901,33 @@ class vulnWhispererOpenVAS(vulnWhispererBase):
def identify_scans_to_process(self):
if self.uuids:
self.scans_to_process = self.openvas_api.openvas_reports[
~self.openvas_api.openvas_reports.report_ids.isin(self.uuids)]
else:
self.scans_to_process = self.openvas_api.openvas_reports
self.scans_to_process = self.scans_to_process[
~self.scans_to_process.report_ids.isin(self.uuids)]
self.logger.info('Identified {new} scans to be processed'.format(new=len(self.scans_to_process)))
def process_openvas_scans(self):
counter = 0
self.scans_to_process = self.openvas_api.openvas_reports.copy()
if self.scan_filter:
self.logger.info('Filtering scans that match "{}"'.format(self.scan_filter))
self.scans_to_process = self.scans_to_process.loc[
self.scans_to_process["task"].str.contains(self.scan_filter, case=False)
]
if self.list_scans:
if self.uuids and len(self.scans_to_process) > 0:
self.scans_to_process.loc[self.scans_to_process['report_ids'].isin(self.uuids), 'imported'] = 'Yes'
else:
self.scans_to_process['imported'] = 'No'
self.scans_to_process['imported'].fillna('No', inplace=True)
self.scans_to_process['time'] = pd.to_datetime(self.scans_to_process['epoch'], unit='s').astype(str)
self.scans_to_process.rename(columns={'task': 'scan_name'}, inplace=True)
print 'Available {} scans:'.format(self.CONFIG_SECTION)
self.print_available_scans(self.scans_to_process[['time', 'scan_name', 'imported', 'status']].to_dict(orient='records'))
return self.exit_code
self.identify_scans_to_process()
if self.scans_to_process.shape[0]:
for scan in self.scans_to_process.iterrows():
@ -852,29 +943,33 @@ class vulnWhispererOpenVAS(vulnWhispererBase):
return self.exit_code
class vulnWhispererQualysVuln(vulnWhispererBase):
class vulnWhispererQualysVM(vulnWhispererBase):
CONFIG_SECTION = 'qualys_vuln'
CONFIG_SECTION = 'qualys_vm'
def __init__(
self,
config=None,
db_name='report_tracker.db',
purge=False,
verbose=None,
verbose=False,
debug=False,
username=None,
password=None,
scan_filter=None,
days=None,
list_scans=None,
):
super(vulnWhispererQualysVuln, self).__init__(config=config)
self.logger = logging.getLogger('vulnWhispererQualysVuln')
if debug:
self.logger.setLevel(logging.DEBUG)
super(vulnWhispererQualysVM, self).__init__(config=config, verbose=verbose, debug=debug, scan_filter=scan_filter, days=days)
self.logger = logging.getLogger('vulnWhispererQualysVM')
if not verbose:
verbose = self.config.getbool(self.CONFIG_SECTION, 'verbose')
self.logger.setLevel(logging.DEBUG if debug else logging.INFO if verbose else logging.WARNING)
self.qualys_scan = qualysVulnScan(config=config)
self.directory_check()
self.scans_to_process = None
self.list_scans = list_scans
self.latest_scans = self.qualys_scan.qw.get_all_scans(days=self.days)
def whisper_reports(self,
report_id=None,
@ -885,7 +980,7 @@ class vulnWhispererQualysVuln(vulnWhispererBase):
cleanup=True):
if 'Z' in launched_date:
launched_date = self.qualys_scan.utils.iso_to_epoch(launched_date)
report_name = 'qualys_vuln_' + report_id.replace('/','_') \
report_name = 'qualys_vm_' + report_id.replace('/','_') \
+ '_{last_updated}'.format(last_updated=launched_date) \
+ '.{extension}'.format(extension=output_format)
@ -911,31 +1006,35 @@ class vulnWhispererQualysVuln(vulnWhispererBase):
else:
try:
self.logger.info('Processing {}: {}'.format(report_id, scan_name.encode('utf8')))
self.logger.info('Processing {} ({})'.format(scan_name.encode('utf8'), report_id))
vuln_ready = self.qualys_scan.process_data(scan_id=report_id)
# Map and transform fields
vuln_ready = self.qualys_scan.normalise(vuln_ready)
# Set common fields
vuln_ready['scan_name'] = scan_name.encode('utf8')
vuln_ready['scan_id'] = report_id
vuln_ready['scan_time'] = launched_date
vuln_ready['scan_source'] = self.CONFIG_SECTION
if len(vuln_ready) != 0:
# Map and transform fields
vuln_ready = self.qualys_scan.normalise(vuln_ready)
vuln_ready = self.common_normalise(vuln_ready)
# Set common fields
vuln_ready['scan_name'] = scan_name.encode('utf8')
vuln_ready['scan_id'] = report_id
vuln_ready['scan_time'] = launched_date
vuln_ready['scan_source'] = self.CONFIG_SECTION
vuln_ready['vendor'] = 'Qualys'
vuln_ready = self.common_normalise(vuln_ready)
except Exception as e:
self.logger.error('Could not process {}: {}'.format(report_id, str(e)))
self.exit_code += 1
return self.exit_code
if output_format == 'json':
vuln_ready.to_json(relative_path_name + '.tmp', orient='records', lines=True)
elif output_format == 'csv':
vuln_ready.to_csv(relative_path_name + '.tmp', index=False, header=True)
os.rename(relative_path_name + '.tmp', relative_path_name)
self.logger.info('{records} records written to {path} '.format(records=vuln_ready.shape[0],
path=relative_path_name))
if len(vuln_ready) != 0:
if output_format == 'json':
vuln_ready.to_json(relative_path_name + '.tmp', orient='records', lines=True)
elif output_format == 'csv':
vuln_ready.to_csv(relative_path_name + '.tmp', index=False, header=True)
os.rename(relative_path_name + '.tmp', relative_path_name)
self.logger.info('{records} records written to {path} '.format(records=vuln_ready.shape[0],
path=relative_path_name))
record_meta = (
scan_name,
@ -954,20 +1053,36 @@ class vulnWhispererQualysVuln(vulnWhispererBase):
return self.exit_code
def identify_scans_to_process(self):
self.latest_scans = self.qualys_scan.qw.get_all_scans()
if self.uuids:
self.scans_to_process = self.latest_scans.loc[
(~self.latest_scans['id'].isin(self.uuids))
& (self.latest_scans['status'] == 'Finished')]
& (self.latest_scans['status'] == 'Finished')].copy()
else:
self.scans_to_process = self.latest_scans
self.scans_to_process = self.latest_scans.copy()
self.scans_to_process.sort_values(by='date', inplace=True)
self.logger.info('Identified {new} scans to be processed'.format(new=len(self.scans_to_process)))
def process_vuln_scans(self):
counter = 0
if self.scan_filter:
self.logger.info('Filtering scans that match "{}"'.format(self.scan_filter))
self.latest_scans = self.latest_scans.loc[
self.latest_scans["name"].str.contains(self.scan_filter, case=False)
]
if self.list_scans:
if self.uuids and len(self.latest_scans) > 0:
self.latest_scans.loc[self.latest_scans['id'].isin(self.uuids), 'imported'] = 'Yes'
else:
self.latest_scans['imported'] = 'No'
self.latest_scans['imported'].fillna('No', inplace=True)
self.latest_scans.rename(columns={'date': 'time', 'name': 'scan_name'}, inplace=True)
print 'Available {} scans:'.format(self.CONFIG_SECTION)
self.print_available_scans(self.latest_scans[['time', 'scan_name', 'imported', 'status']].to_dict(orient='records'))
return self.exit_code
self.identify_scans_to_process()
if self.scans_to_process.shape[0]:
for app in self.scans_to_process.iterrows():
@ -993,17 +1108,16 @@ class vulnWhispererJIRA(vulnWhispererBase):
config=None,
db_name='report_tracker.db',
purge=False,
verbose=None,
verbose=False,
debug=False,
username=None,
password=None,
):
super(vulnWhispererJIRA, self).__init__(config=config)
super(vulnWhispererJIRA, self).__init__(config=config, verbose=verbose, debug=debug)
self.logger = logging.getLogger('vulnWhispererJira')
if debug:
self.logger.setLevel(logging.DEBUG)
self.config_path = config
self.config = vwConfig(config)
if not verbose:
verbose = self.config.getbool(self.CONFIG_SECTION, 'verbose')
self.logger.setLevel(logging.DEBUG if debug else logging.INFO if verbose else logging.WARNING)
self.host_resolv_cache = {}
self.directory_check()
@ -1135,7 +1249,7 @@ class vulnWhispererJIRA(vulnWhispererBase):
return vulnerabilities
def parse_qualys_vuln_vulnerabilities(self, fullpath, source, scan_name, min_critical, dns_resolv = False):
def parse_qualys_vm_vulnerabilities(self, fullpath, source, scan_name, min_critical, dns_resolv = False):
#parsing of the qualys vulnerabilities schema
#parse json
vulnerabilities = []
@ -1156,16 +1270,16 @@ class vulnWhispererJIRA(vulnWhispererBase):
continue
elif data[index]['type'] == 'Practice' or data[index]['type'] == 'Ig':
self.logger.debug("Vulnerability '{vuln}' ignored, as it is 'Practice/Potential', not verified.".format(vuln=data[index]['plugin_name']))
self.logger.debug("Vulnerability '{vuln}' ignored, as it is 'Practice/Potential', not verified.".format(vuln=data[index]['signature']))
continue
if not vulnerabilities or data[index]['plugin_name'] not in [entry['title'] for entry in vulnerabilities]:
if not vulnerabilities or data[index]['signature'] not in [entry['title'] for entry in vulnerabilities]:
vuln = {}
#vulnerabilities should have all the info for creating all JIRA labels
vuln['source'] = source
vuln['scan_name'] = scan_name
#vulnerability variables
vuln['title'] = data[index]['plugin_name']
vuln['title'] = data[index]['signature']
vuln['diagnosis'] = data[index]['threat'].replace('\\n',' ')
vuln['consequence'] = data[index]['impact'].replace('\\n',' ')
vuln['solution'] = data[index]['solution'].replace('\\n',' ')
@ -1186,7 +1300,7 @@ class vulnWhispererJIRA(vulnWhispererBase):
else:
# grouping assets by vulnerability to open on single ticket, as each asset has its own nessus entry
for vuln in vulnerabilities:
if vuln['title'] == data[index]['plugin_name']:
if vuln['title'] == data[index]['signature']:
vuln['ips'].append("{ip} - {protocol}/{port} - {dns}".format(**self.get_asset_fields(data[index], dns_resolv)))
return vulnerabilities
@ -1243,8 +1357,8 @@ class vulnWhispererJIRA(vulnWhispererBase):
vulnerabilities = self.parse_nessus_vulnerabilities(fullpath, source, scan_name, min_critical)
#***Qualys VM parsing***
if source == "qualys_vuln":
vulnerabilities = self.parse_qualys_vuln_vulnerabilities(fullpath, source, scan_name, min_critical, dns_resolv)
if source == "qualys_vm":
vulnerabilities = self.parse_qualys_vm_vulnerabilities(fullpath, source, scan_name, min_critical, dns_resolv)
#***JIRA sync***
if vulnerabilities:
@ -1273,53 +1387,83 @@ class vulnWhisperer(object):
def __init__(self,
profile=None,
verbose=None,
username=None,
password=None,
verbose=False,
debug=False,
config=None,
source=None,
scanname=None):
scan_filter=None,
days=None,
scanname=None,
list_scans=None):
self.logger = logging.getLogger('vulnWhisperer')
if verbose:
self.logger.setLevel(logging.DEBUG)
self.logger.setLevel(logging.DEBUG if debug else logging.INFO if verbose else logging.WARNING)
self.profile = profile
self.config = config
self.username = username
self.password = password
self.verbose = verbose
self.debug = debug
self.config = config
self.source = source
self.scan_filter = scan_filter
self.list_scans = list_scans
self.days = days
self.scanname = scanname
self.exit_code = 0
def whisper_vulnerabilities(self):
self.logger.setLevel(logging.INFO)
self.logger.info('Running {} framwork'.format(self.profile))
if self.profile == 'nessus':
vw = vulnWhispererNessus(config=self.config,
profile=self.profile)
profile=self.profile,
scan_filter=self.scan_filter,
days=self.days,
verbose=self.verbose,
debug=self.debug,
list_scans=self.list_scans)
self.exit_code += vw.whisper_nessus()
elif self.profile == 'qualys_web':
vw = vulnWhispererQualys(config=self.config)
elif self.profile == 'qualys_was':
vw = vulnWhispererQualysWAS(config=self.config,
scan_filter=self.scan_filter,
days=self.days,
verbose=self.verbose,
debug=self.debug,
list_scans=self.list_scans)
self.exit_code += vw.process_web_assets()
elif self.profile == 'openvas':
vw_openvas = vulnWhispererOpenVAS(config=self.config)
vw_openvas = vulnWhispererOpenVAS(config=self.config,
scan_filter=self.scan_filter,
days=self.days,
verbose=self.verbose,
debug=self.debug,
list_scans=self.list_scans)
self.exit_code += vw_openvas.process_openvas_scans()
elif self.profile == 'tenable':
vw = vulnWhispererNessus(config=self.config,
profile=self.profile)
profile=self.profile,
scan_filter=self.scan_filter,
days=self.days,
verbose=self.verbose,
debug=self.debug,
list_scans=self.list_scans)
self.exit_code += vw.whisper_nessus()
elif self.profile == 'qualys_vuln':
vw = vulnWhispererQualysVuln(config=self.config)
elif self.profile == 'qualys_vm':
vw = vulnWhispererQualysVM(config=self.config,
scan_filter=self.scan_filter,
days=self.days,
verbose=self.verbose,
debug=self.debug,
list_scans=self.list_scans)
self.exit_code += vw.process_vuln_scans()
elif self.profile == 'jira':
#first we check config fields are created, otherwise we create them
vw = vulnWhispererJIRA(config=self.config)
vw = vulnWhispererJIRA(config=self.config,
verbose=self.verbose,
debug=self.debug)
if not (self.source and self.scanname):
self.logger.info('No source/scan_name selected, all enabled scans will be synced')
success = vw.sync_all()