Compare commits
36 Commits
Author | SHA1 | Date | |
---|---|---|---|
53d70ab0db | |||
54fa0ace8a | |||
273b17009a | |||
ff5f4cb331 | |||
61539afa4d | |||
742a645190 | |||
51234a569f | |||
5dad1ceb10 | |||
3db931f3eb | |||
649ecd431b | |||
13a52a3e08 | |||
8403b35199 | |||
68519d5648 | |||
73342fdeb8 | |||
183e3b3e72 | |||
e25141261c | |||
8743b59147 | |||
c0e7ab9863 | |||
97de805e0c | |||
4974be02b4 | |||
7fe2f9a5c1 | |||
f4634d03bd | |||
e1ca9fadcd | |||
adb7700300 | |||
ced0d4c2fc | |||
f483c76638 | |||
f65116aec8 | |||
bdcb6de4b2 | |||
af8e27d075 | |||
accf926ff7 | |||
acf387bd0e | |||
ab7a91e020 | |||
a1a0d6b757 | |||
2fb089805c | |||
6cf2a94431 | |||
162636e60f |
@ -1,4 +1,4 @@
|
||||
FROM centos:latest
|
||||
FROM centos:7
|
||||
|
||||
MAINTAINER Justin Henderson justin@hasecuritysolutions.com
|
||||
|
||||
|
@ -30,7 +30,7 @@ Currently Supports
|
||||
|
||||
### Reporting Frameworks
|
||||
|
||||
- [X] [ELK](https://www.elastic.co/elk-stack)
|
||||
- [X] [ELK (**v6**/**v7**)](https://www.elastic.co/elk-stack)
|
||||
- [X] [Jira](https://www.atlassian.com/software/jira)
|
||||
- [ ] [Splunk](https://www.splunk.com/)
|
||||
|
||||
|
@ -83,6 +83,7 @@ def main():
|
||||
enabled_sections = config.get_sections_with_attribute('enabled')
|
||||
|
||||
for section in enabled_sections:
|
||||
try:
|
||||
vw = vulnWhisperer(config=args.config,
|
||||
profile=section,
|
||||
verbose=args.verbose,
|
||||
@ -91,6 +92,8 @@ def main():
|
||||
source=args.source,
|
||||
scanname=args.scanname)
|
||||
exit_code += vw.whisper_vulnerabilities()
|
||||
except Exception as e:
|
||||
logger.error("VulnWhisperer was unable to perform the processing on '{}'".format(section))
|
||||
else:
|
||||
logger.info('Running vulnwhisperer for section {}'.format(args.section))
|
||||
vw = vulnWhisperer(config=args.config,
|
||||
|
@ -2,6 +2,8 @@
|
||||
enabled=true
|
||||
hostname=localhost
|
||||
port=8834
|
||||
access_key=
|
||||
secret_key=
|
||||
username=nessus_username
|
||||
password=nessus_password
|
||||
write_path=/opt/VulnWhisperer/data/nessus/
|
||||
@ -13,6 +15,8 @@ verbose=true
|
||||
enabled=true
|
||||
hostname=cloud.tenable.com
|
||||
port=443
|
||||
access_key=
|
||||
secret_key=
|
||||
username=tenable.io_username
|
||||
password=tenable.io_password
|
||||
write_path=/opt/VulnWhisperer/data/tenable/
|
||||
@ -37,7 +41,7 @@ max_retries = 10
|
||||
template_id = 126024
|
||||
|
||||
[qualys_vuln]
|
||||
#Reference https://www.qualys.com/docs/qualys-was-api-user-guide.pdf to find your API
|
||||
#Reference https://www.qualys.com/docs/qualys-api-vmpc-user-guide.pdf to find your API
|
||||
enabled = true
|
||||
hostname = qualysapi.qg2.apps.qualys.com
|
||||
username = exampleuser
|
||||
|
@ -2,10 +2,12 @@
|
||||
enabled=true
|
||||
hostname=nessus
|
||||
port=443
|
||||
access_key=
|
||||
secret_key=
|
||||
username=nessus_username
|
||||
password=nessus_password
|
||||
write_path=/opt/VulnWhisperer/data/nessus/
|
||||
db_path=/opt/VulnWhisperer/data/database
|
||||
write_path=/tmp/VulnWhisperer/data/nessus/
|
||||
db_path=/tmp/VulnWhisperer/data/database
|
||||
trash=false
|
||||
verbose=true
|
||||
|
||||
@ -13,10 +15,12 @@ verbose=true
|
||||
enabled=true
|
||||
hostname=tenable
|
||||
port=443
|
||||
access_key=
|
||||
secret_key=
|
||||
username=tenable.io_username
|
||||
password=tenable.io_password
|
||||
write_path=/opt/VulnWhisperer/data/tenable/
|
||||
db_path=/opt/VulnWhisperer/data/database
|
||||
write_path=/tmp/VulnWhisperer/data/tenable/
|
||||
db_path=/tmp/VulnWhisperer/data/database
|
||||
trash=false
|
||||
verbose=true
|
||||
|
||||
@ -26,8 +30,8 @@ enabled = false
|
||||
hostname = qualys_web
|
||||
username = exampleuser
|
||||
password = examplepass
|
||||
write_path=/opt/VulnWhisperer/data/qualys_web/
|
||||
db_path=/opt/VulnWhisperer/data/database
|
||||
write_path=/tmp/VulnWhisperer/data/qualys_web/
|
||||
db_path=/tmp/VulnWhisperer/data/database
|
||||
verbose=true
|
||||
|
||||
# Set the maximum number of retries each connection should attempt.
|
||||
@ -42,8 +46,8 @@ enabled = true
|
||||
hostname = qualys_vuln
|
||||
username = exampleuser
|
||||
password = examplepass
|
||||
write_path=/opt/VulnWhisperer/data/qualys_vuln/
|
||||
db_path=/opt/VulnWhisperer/data/database
|
||||
write_path=/tmp/VulnWhisperer/data/qualys_vuln/
|
||||
db_path=/tmp/VulnWhisperer/data/database
|
||||
verbose=true
|
||||
|
||||
[detectify]
|
||||
@ -54,8 +58,8 @@ hostname = detectify
|
||||
username = exampleuser
|
||||
#password variable used as secretKey
|
||||
password = examplepass
|
||||
write_path =/opt/VulnWhisperer/data/detectify/
|
||||
db_path = /opt/VulnWhisperer/data/database
|
||||
write_path =/tmp/VulnWhisperer/data/detectify/
|
||||
db_path = /tmp/VulnWhisperer/data/database
|
||||
verbose = true
|
||||
|
||||
[openvas]
|
||||
@ -64,8 +68,8 @@ hostname = openvas
|
||||
port = 4000
|
||||
username = exampleuser
|
||||
password = examplepass
|
||||
write_path=/opt/VulnWhisperer/data/openvas/
|
||||
db_path=/opt/VulnWhisperer/data/database
|
||||
write_path=/tmp/VulnWhisperer/data/openvas/
|
||||
db_path=/tmp/VulnWhisperer/data/database
|
||||
verbose=true
|
||||
|
||||
[jira]
|
||||
@ -73,8 +77,8 @@ enabled = false
|
||||
hostname = jira-host
|
||||
username = username
|
||||
password = password
|
||||
write_path = /opt/VulnWhisperer/data/jira/
|
||||
db_path = /opt/VulnWhisperer/data/database
|
||||
write_path = /tmp/VulnWhisperer/data/jira/
|
||||
db_path = /tmp/VulnWhisperer/data/database
|
||||
verbose = true
|
||||
dns_resolv = False
|
||||
|
||||
|
@ -1,12 +1,12 @@
|
||||
pandas==0.20.3
|
||||
setuptools==40.4.3
|
||||
pytz==2017.2
|
||||
Requests==2.18.3
|
||||
Requests==2.20.0
|
||||
lxml==4.1.1
|
||||
future-fstrings
|
||||
bs4
|
||||
jira
|
||||
bottle
|
||||
coloredlogs
|
||||
qualysapi>=5.1.0
|
||||
qualysapi==6.0.0
|
||||
httpretty
|
231
resources/elk6/logstash-vulnwhisperer-template_elk7.json
Executable file
231
resources/elk6/logstash-vulnwhisperer-template_elk7.json
Executable file
@ -0,0 +1,231 @@
|
||||
{
|
||||
"index_patterns": "logstash-vulnwhisperer-*",
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"@timestamp": {
|
||||
"type": "date"
|
||||
},
|
||||
"@version": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"asset": {
|
||||
"type": "text",
|
||||
"norms": false,
|
||||
"fields": {
|
||||
"keyword": {
|
||||
"type": "keyword",
|
||||
"ignore_above": 256
|
||||
}
|
||||
}
|
||||
},
|
||||
"asset_uuid": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"assign_ip": {
|
||||
"type": "ip"
|
||||
},
|
||||
"category": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"cve": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"cvss_base": {
|
||||
"type": "float"
|
||||
},
|
||||
"cvss_temporal_vector": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"cvss_temporal": {
|
||||
"type": "float"
|
||||
},
|
||||
"cvss_vector": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"cvss": {
|
||||
"type": "float"
|
||||
},
|
||||
"cvss3_base": {
|
||||
"type": "float"
|
||||
},
|
||||
"cvss3_temporal_vector": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"cvss3_temporal": {
|
||||
"type": "float"
|
||||
},
|
||||
"cvss3_vector": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"cvss3": {
|
||||
"type": "float"
|
||||
},
|
||||
"description": {
|
||||
"fields": {
|
||||
"keyword": {
|
||||
"ignore_above": 256,
|
||||
"type": "keyword"
|
||||
}
|
||||
},
|
||||
"norms": false,
|
||||
"type": "text"
|
||||
},
|
||||
"dns": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"exploitability": {
|
||||
"fields": {
|
||||
"keyword": {
|
||||
"ignore_above": 256,
|
||||
"type": "keyword"
|
||||
}
|
||||
},
|
||||
"norms": false,
|
||||
"type": "text"
|
||||
},
|
||||
"fqdn": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"geoip": {
|
||||
"dynamic": true,
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"ip": {
|
||||
"type": "ip"
|
||||
},
|
||||
"latitude": {
|
||||
"type": "float"
|
||||
},
|
||||
"location": {
|
||||
"type": "geo_point"
|
||||
},
|
||||
"longitude": {
|
||||
"type": "float"
|
||||
}
|
||||
}
|
||||
},
|
||||
"history_id": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"host": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"host_end": {
|
||||
"type": "date"
|
||||
},
|
||||
"host_start": {
|
||||
"type": "date"
|
||||
},
|
||||
"impact": {
|
||||
"fields": {
|
||||
"keyword": {
|
||||
"ignore_above": 256,
|
||||
"type": "keyword"
|
||||
}
|
||||
},
|
||||
"norms": false,
|
||||
"type": "text"
|
||||
},
|
||||
"ip_status": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"ip": {
|
||||
"type": "ip"
|
||||
},
|
||||
"last_updated": {
|
||||
"type": "date"
|
||||
},
|
||||
"operating_system": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"path": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"pci_vuln": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"plugin_family": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"plugin_id": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"plugin_name": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"plugin_output": {
|
||||
"fields": {
|
||||
"keyword": {
|
||||
"ignore_above": 256,
|
||||
"type": "keyword"
|
||||
}
|
||||
},
|
||||
"norms": false,
|
||||
"type": "text"
|
||||
},
|
||||
"port": {
|
||||
"type": "integer"
|
||||
},
|
||||
"protocol": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"results": {
|
||||
"type": "text"
|
||||
},
|
||||
"risk_number": {
|
||||
"type": "integer"
|
||||
},
|
||||
"risk_score_name": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"risk_score": {
|
||||
"type": "float"
|
||||
},
|
||||
"risk": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"scan_id": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"scan_name": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"scan_reference": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"see_also": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"solution": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"source": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"ssl": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"synopsis": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"system_type": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"tags": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"threat": {
|
||||
"type": "text"
|
||||
},
|
||||
"type": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"vendor_reference": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"vulnerability_state": {
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
1
setup.py
1
setup.py
@ -1,5 +1,6 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
from __future__ import absolute_import
|
||||
from setuptools import setup, find_packages
|
||||
|
||||
setup(
|
||||
|
@ -1,3 +1,4 @@
|
||||
from __future__ import absolute_import
|
||||
import sys
|
||||
import logging
|
||||
|
||||
@ -5,7 +6,7 @@ import logging
|
||||
if sys.version_info > (3, 0):
|
||||
import configparser as cp
|
||||
else:
|
||||
import ConfigParser as cp
|
||||
import six.moves.configparser as cp
|
||||
|
||||
|
||||
class vwConfig(object):
|
||||
|
@ -1,3 +1,4 @@
|
||||
from __future__ import absolute_import
|
||||
import json
|
||||
import logging
|
||||
import sys
|
||||
@ -24,15 +25,19 @@ class NessusAPI(object):
|
||||
EXPORT_STATUS = EXPORT + '/{file_id}/status'
|
||||
EXPORT_HISTORY = EXPORT + '?history_id={history_id}'
|
||||
|
||||
def __init__(self, hostname=None, port=None, username=None, password=None, verbose=True):
|
||||
def __init__(self, hostname=None, port=None, username=None, password=None, verbose=True, profile=None, access_key=None, secret_key=None):
|
||||
self.logger = logging.getLogger('NessusAPI')
|
||||
if verbose:
|
||||
self.logger.setLevel(logging.DEBUG)
|
||||
if username is None or password is None:
|
||||
raise Exception('ERROR: Missing username or password.')
|
||||
if not all((username, password)) and not all((access_key, secret_key)):
|
||||
raise Exception('ERROR: Missing username, password or API keys.')
|
||||
|
||||
self.profile = profile
|
||||
self.user = username
|
||||
self.password = password
|
||||
self.api_keys = False
|
||||
self.access_key = access_key
|
||||
self.secret_key = secret_key
|
||||
self.base = 'https://{hostname}:{port}'.format(hostname=hostname, port=port)
|
||||
self.verbose = verbose
|
||||
|
||||
@ -52,7 +57,13 @@ class NessusAPI(object):
|
||||
'X-Cookie': None
|
||||
}
|
||||
|
||||
if all((self.access_key, self.secret_key)):
|
||||
self.logger.debug('Using {} API keys'.format(self.profile))
|
||||
self.api_keys = True
|
||||
self.session.headers['X-ApiKeys'] = 'accessKey={}; secretKey={}'.format(self.access_key, self.secret_key)
|
||||
else:
|
||||
self.login()
|
||||
|
||||
self.scans = self.get_scans()
|
||||
self.scan_ids = self.get_scan_ids()
|
||||
|
||||
@ -78,8 +89,10 @@ class NessusAPI(object):
|
||||
if url == self.base + self.SESSION:
|
||||
break
|
||||
try:
|
||||
self.login()
|
||||
timeout += 1
|
||||
if self.api_keys:
|
||||
continue
|
||||
self.login()
|
||||
self.logger.info('Token refreshed')
|
||||
except Exception as e:
|
||||
self.logger.error('Could not refresh token\nReason: {}'.format(str(e)))
|
||||
@ -114,7 +127,7 @@ class NessusAPI(object):
|
||||
data = self.request(self.SCAN_ID.format(scan_id=scan_id), method='GET', json_output=True)
|
||||
return data['history']
|
||||
|
||||
def download_scan(self, scan_id=None, history=None, export_format="", profile=""):
|
||||
def download_scan(self, scan_id=None, history=None, export_format=""):
|
||||
running = True
|
||||
counter = 0
|
||||
|
||||
@ -127,6 +140,7 @@ class NessusAPI(object):
|
||||
req = self.request(query, data=json.dumps(data), method='POST', json_output=True)
|
||||
try:
|
||||
file_id = req['file']
|
||||
if self.profile == 'nessus':
|
||||
token_id = req['token'] if 'token' in req else req['temp_token']
|
||||
except Exception as e:
|
||||
self.logger.error('{}'.format(str(e)))
|
||||
@ -143,7 +157,7 @@ class NessusAPI(object):
|
||||
if counter % 60 == 0:
|
||||
self.logger.info("Completed: {}".format(counter))
|
||||
self.logger.info("Done: {}".format(counter))
|
||||
if profile == 'tenable':
|
||||
if self.profile == 'tenable' or self.api_keys:
|
||||
content = self.request(self.EXPORT_FILE_DOWNLOAD.format(scan_id=scan_id, file_id=file_id), method='GET', download=True)
|
||||
else:
|
||||
content = self.request(self.EXPORT_TOKEN_DOWNLOAD.format(token_id=token_id), method='GET', download=True)
|
||||
@ -152,7 +166,7 @@ class NessusAPI(object):
|
||||
def get_utc_from_local(self, date_time, local_tz=None, epoch=True):
|
||||
date_time = datetime.fromtimestamp(date_time)
|
||||
if local_tz is None:
|
||||
local_tz = pytz.timezone('US/Central')
|
||||
local_tz = pytz.timezone('UTC')
|
||||
else:
|
||||
local_tz = pytz.timezone(local_tz)
|
||||
local_time = local_tz.normalize(local_tz.localize(date_time))
|
||||
|
@ -1,5 +1,6 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import absolute_import
|
||||
__author__ = 'Austin Taylor'
|
||||
|
||||
import datetime as dt
|
||||
|
@ -1,5 +1,6 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import absolute_import
|
||||
__author__ = 'Nathan Young'
|
||||
|
||||
import logging
|
||||
@ -18,9 +19,9 @@ class qualysWhisperAPI(object):
|
||||
self.logger = logging.getLogger('qualysWhisperAPI')
|
||||
self.config = config
|
||||
try:
|
||||
self.qgc = qualysapi.connect(config, 'qualys_vuln')
|
||||
self.qgc = qualysapi.connect(config_file=config, section='qualys_vuln')
|
||||
# Fail early if we can't make a request or auth is incorrect
|
||||
self.qgc.request('about.php')
|
||||
# self.qgc.request('about.php')
|
||||
self.logger.info('Connected to Qualys at {}'.format(self.qgc.server))
|
||||
except Exception as e:
|
||||
self.logger.error('Could not connect to Qualys: {}'.format(str(e)))
|
||||
|
@ -1,5 +1,8 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import absolute_import
|
||||
from six.moves import range
|
||||
from functools import reduce
|
||||
__author__ = 'Austin Taylor'
|
||||
|
||||
from lxml import objectify
|
||||
@ -14,24 +17,16 @@ import os
|
||||
import csv
|
||||
import logging
|
||||
import dateutil.parser as dp
|
||||
csv.field_size_limit(sys.maxsize)
|
||||
|
||||
|
||||
class qualysWhisperAPI(object):
|
||||
COUNT_WEBAPP = '/count/was/webapp'
|
||||
COUNT_WASSCAN = '/count/was/wasscan'
|
||||
DELETE_REPORT = '/delete/was/report/{report_id}'
|
||||
GET_WEBAPP_DETAILS = '/get/was/webapp/{was_id}'
|
||||
QPS_REST_3 = '/qps/rest/3.0'
|
||||
REPORT_DETAILS = '/get/was/report/{report_id}'
|
||||
REPORT_STATUS = '/status/was/report/{report_id}'
|
||||
REPORT_CREATE = '/create/was/report'
|
||||
REPORT_DOWNLOAD = '/download/was/report/{report_id}'
|
||||
SCAN_DETAILS = '/get/was/wasscan/{scan_id}'
|
||||
SCAN_DOWNLOAD = '/download/was/wasscan/{scan_id}'
|
||||
SEARCH_REPORTS = '/search/was/report'
|
||||
SEARCH_WEB_APPS = '/search/was/webapp'
|
||||
SEARCH_WAS_SCAN = '/search/was/wasscan'
|
||||
VERSION = '/qps/rest/portal/version'
|
||||
|
||||
def __init__(self, config=None):
|
||||
self.logger = logging.getLogger('qualysWhisperAPI')
|
||||
@ -41,10 +36,6 @@ class qualysWhisperAPI(object):
|
||||
self.logger.info('Connected to Qualys at {}'.format(self.qgc.server))
|
||||
except Exception as e:
|
||||
self.logger.error('Could not connect to Qualys: {}'.format(str(e)))
|
||||
self.headers = {
|
||||
#"content-type": "text/xml"}
|
||||
"Accept" : "application/json",
|
||||
"Content-Type": "application/json"}
|
||||
self.config_parse = qcconf.QualysConnectConfig(config, 'qualys_web')
|
||||
try:
|
||||
self.template_id = self.config_parse.get_template_id()
|
||||
@ -69,14 +60,8 @@ class qualysWhisperAPI(object):
|
||||
|
||||
def generate_scan_result_XML(self, limit=1000, offset=1, status='FINISHED'):
|
||||
report_xml = E.ServiceRequest(
|
||||
E.filters(
|
||||
E.Criteria({'field': 'status', 'operator': 'EQUALS'}, status
|
||||
),
|
||||
),
|
||||
E.preferences(
|
||||
E.startFromOffset(str(offset)),
|
||||
E.limitResults(str(limit))
|
||||
),
|
||||
E.filters(E.Criteria({'field': 'status', 'operator': 'EQUALS'}, status)),
|
||||
E.preferences(E.startFromOffset(str(offset)), E.limitResults(str(limit))),
|
||||
)
|
||||
return report_xml
|
||||
|
||||
@ -115,8 +100,10 @@ class qualysWhisperAPI(object):
|
||||
if i % limit == 0:
|
||||
if (total - i) < limit:
|
||||
qualys_api_limit = total - i
|
||||
self.logger.info('Making a request with a limit of {} at offset {}'.format((str(qualys_api_limit)), str(i + 1)))
|
||||
scan_info = self.get_scan_info(limit=qualys_api_limit, offset=i + 1, status=status)
|
||||
self.logger.info('Making a request with a limit of {} at offset {}'
|
||||
.format((str(qualys_api_limit)), str(i + 1)))
|
||||
scan_info = self.get_scan_info(
|
||||
limit=qualys_api_limit, offset=i + 1, status=status)
|
||||
_records.append(scan_info)
|
||||
self.logger.debug('Converting XML to DataFrame')
|
||||
dataframes = [self.xml_parser(xml) for xml in _records]
|
||||
@ -133,7 +120,8 @@ class qualysWhisperAPI(object):
|
||||
return self.qgc.request(self.REPORT_STATUS.format(report_id=report_id))
|
||||
|
||||
def download_report(self, report_id):
|
||||
return self.qgc.request(self.REPORT_DOWNLOAD.format(report_id=report_id))
|
||||
return self.qgc.request(
|
||||
self.REPORT_DOWNLOAD.format(report_id=report_id), http_method='get')
|
||||
|
||||
def generate_scan_report_XML(self, scan_id):
|
||||
"""Generates a CSV report for an asset based on template defined in .ini file"""
|
||||
@ -145,20 +133,8 @@ class qualysWhisperAPI(object):
|
||||
E.format('CSV'),
|
||||
#type is not needed, as the template already has it
|
||||
E.type('WAS_SCAN_REPORT'),
|
||||
E.template(
|
||||
E.id(self.template_id)
|
||||
),
|
||||
E.config(
|
||||
E.scanReport(
|
||||
E.target(
|
||||
E.scans(
|
||||
E.WasScan(
|
||||
E.id(scan_id)
|
||||
)
|
||||
),
|
||||
),
|
||||
),
|
||||
)
|
||||
E.template(E.id(self.template_id)),
|
||||
E.config(E.scanReport(E.target(E.scans(E.WasScan(E.id(scan_id))))))
|
||||
)
|
||||
)
|
||||
)
|
||||
@ -175,95 +151,14 @@ class qualysWhisperAPI(object):
|
||||
def delete_report(self, report_id):
|
||||
return self.qgc.request(self.DELETE_REPORT.format(report_id=report_id))
|
||||
|
||||
|
||||
class qualysReportFields:
|
||||
CATEGORIES = ['VULNERABILITY',
|
||||
'SENSITIVECONTENT',
|
||||
'INFORMATION_GATHERED']
|
||||
|
||||
# URL Vulnerability Information
|
||||
|
||||
VULN_BLOCK = [
|
||||
CATEGORIES[0],
|
||||
'ID',
|
||||
'QID',
|
||||
'Url',
|
||||
'Param',
|
||||
'Function',
|
||||
'Form Entry Point',
|
||||
'Access Path',
|
||||
'Authentication',
|
||||
'Ajax Request',
|
||||
'Ajax Request ID',
|
||||
'Ignored',
|
||||
'Ignore Reason',
|
||||
'Ignore Date',
|
||||
'Ignore User',
|
||||
'Ignore Comments',
|
||||
'First Time Detected',
|
||||
'Last Time Detected',
|
||||
'Last Time Tested',
|
||||
'Times Detected',
|
||||
'Payload #1',
|
||||
'Request Method #1',
|
||||
'Request URL #1',
|
||||
'Request Headers #1',
|
||||
'Response #1',
|
||||
'Evidence #1',
|
||||
]
|
||||
|
||||
INFO_HEADER = [
|
||||
'Vulnerability Category',
|
||||
'ID',
|
||||
'QID',
|
||||
'Response #1',
|
||||
'Last Time Detected',
|
||||
]
|
||||
INFO_BLOCK = [
|
||||
CATEGORIES[2],
|
||||
'ID',
|
||||
'QID',
|
||||
'Results',
|
||||
'Detection Date',
|
||||
]
|
||||
|
||||
QID_HEADER = [
|
||||
'QID',
|
||||
'Id',
|
||||
'Title',
|
||||
'Category',
|
||||
'Severity Level',
|
||||
'Groups',
|
||||
'OWASP',
|
||||
'WASC',
|
||||
'CWE',
|
||||
'CVSS Base',
|
||||
'CVSS Temporal',
|
||||
'Description',
|
||||
'Impact',
|
||||
'Solution',
|
||||
]
|
||||
GROUP_HEADER = ['GROUP', 'Name', 'Category']
|
||||
OWASP_HEADER = ['OWASP', 'Code', 'Name']
|
||||
WASC_HEADER = ['WASC', 'Code', 'Name']
|
||||
SCAN_META = ['Web Application Name', 'URL', 'Owner', 'Scope', 'Operating System']
|
||||
CATEGORY_HEADER = ['Category', 'Severity', 'Level', 'Description']
|
||||
|
||||
|
||||
class qualysUtils:
|
||||
def __init__(self):
|
||||
self.logger = logging.getLogger('qualysUtils')
|
||||
|
||||
def grab_section(
|
||||
self,
|
||||
report,
|
||||
section,
|
||||
end=[],
|
||||
pop_last=False,
|
||||
):
|
||||
def grab_section(self, report, section, end=[], pop_last=False):
|
||||
temp_list = []
|
||||
max_col_count = 0
|
||||
with open(report, 'rb') as csvfile:
|
||||
with open(report, 'rt') as csvfile:
|
||||
q_report = csv.reader(csvfile, delimiter=',', quotechar='"')
|
||||
for line in q_report:
|
||||
if set(line) == set(section):
|
||||
@ -289,44 +184,53 @@ class qualysUtils:
|
||||
return _data
|
||||
|
||||
class qualysScanReport:
|
||||
# URL Vulnerability Information
|
||||
WEB_SCAN_VULN_BLOCK = list(qualysReportFields.VULN_BLOCK)
|
||||
WEB_SCAN_VULN_BLOCK.insert(WEB_SCAN_VULN_BLOCK.index('QID'), 'Detection ID')
|
||||
CATEGORIES = ['VULNERABILITY', 'SENSITIVECONTENT', 'INFORMATION_GATHERED']
|
||||
|
||||
WEB_SCAN_VULN_HEADER = list(WEB_SCAN_VULN_BLOCK)
|
||||
WEB_SCAN_VULN_HEADER[WEB_SCAN_VULN_BLOCK.index(qualysReportFields.CATEGORIES[0])] = \
|
||||
'Vulnerability Category'
|
||||
WEB_SCAN_BLOCK = [
|
||||
"ID", "Detection ID", "QID", "Url", "Param/Cookie", "Function",
|
||||
"Form Entry Point", "Access Path", "Authentication", "Ajax Request",
|
||||
"Ajax Request ID", "Ignored", "Ignore Reason", "Ignore Date", "Ignore User",
|
||||
"Ignore Comments", "Detection Date", "Payload #1", "Request Method #1",
|
||||
"Request URL #1", "Request Headers #1", "Response #1", "Evidence #1",
|
||||
"Unique ID", "Flags", "Protocol", "Virtual Host", "IP", "Port", "Result",
|
||||
"Info#1", "CVSS V3 Base", "CVSS V3 Temporal", "CVSS V3 Attack Vector",
|
||||
"Request Body #1"
|
||||
]
|
||||
WEB_SCAN_VULN_BLOCK = [CATEGORIES[0]] + WEB_SCAN_BLOCK
|
||||
WEB_SCAN_SENSITIVE_BLOCK = [CATEGORIES[1]] + WEB_SCAN_BLOCK
|
||||
|
||||
WEB_SCAN_SENSITIVE_HEADER = list(WEB_SCAN_VULN_HEADER)
|
||||
WEB_SCAN_SENSITIVE_HEADER.insert(WEB_SCAN_SENSITIVE_HEADER.index('Url'
|
||||
), 'Content')
|
||||
WEB_SCAN_HEADER = ["Vulnerability Category"] + WEB_SCAN_BLOCK
|
||||
WEB_SCAN_HEADER[WEB_SCAN_HEADER.index("Detection Date")] = "Last Time Detected"
|
||||
|
||||
WEB_SCAN_SENSITIVE_BLOCK = list(WEB_SCAN_SENSITIVE_HEADER)
|
||||
WEB_SCAN_SENSITIVE_BLOCK.insert(WEB_SCAN_SENSITIVE_BLOCK.index('QID'), 'Detection ID')
|
||||
WEB_SCAN_SENSITIVE_BLOCK[WEB_SCAN_SENSITIVE_BLOCK.index('Vulnerability Category'
|
||||
)] = qualysReportFields.CATEGORIES[1]
|
||||
|
||||
WEB_SCAN_INFO_HEADER = list(qualysReportFields.INFO_HEADER)
|
||||
WEB_SCAN_INFO_HEADER.insert(WEB_SCAN_INFO_HEADER.index('QID'), 'Detection ID')
|
||||
WEB_SCAN_INFO_BLOCK = [
|
||||
"INFORMATION_GATHERED", "ID", "Detection ID", "QID", "Results", "Detection Date",
|
||||
"Unique ID", "Flags", "Protocol", "Virtual Host", "IP", "Port", "Result",
|
||||
"Info#1"
|
||||
]
|
||||
|
||||
WEB_SCAN_INFO_BLOCK = list(qualysReportFields.INFO_BLOCK)
|
||||
WEB_SCAN_INFO_BLOCK.insert(WEB_SCAN_INFO_BLOCK.index('QID'), 'Detection ID')
|
||||
WEB_SCAN_INFO_HEADER = [
|
||||
"Vulnerability Category", "ID", "Detection ID", "QID", "Results", "Last Time Detected",
|
||||
"Unique ID", "Flags", "Protocol", "Virtual Host", "IP", "Port", "Result",
|
||||
"Info#1"
|
||||
]
|
||||
|
||||
QID_HEADER = list(qualysReportFields.QID_HEADER)
|
||||
GROUP_HEADER = list(qualysReportFields.GROUP_HEADER)
|
||||
OWASP_HEADER = list(qualysReportFields.OWASP_HEADER)
|
||||
WASC_HEADER = list(qualysReportFields.WASC_HEADER)
|
||||
SCAN_META = list(qualysReportFields.SCAN_META)
|
||||
CATEGORY_HEADER = list(qualysReportFields.CATEGORY_HEADER)
|
||||
QID_HEADER = [
|
||||
"QID", "Id", "Title", "Category", "Severity Level", "Groups", "OWASP", "WASC",
|
||||
"CWE", "CVSS Base", "CVSS Temporal", "Description", "Impact", "Solution",
|
||||
"CVSS V3 Base", "CVSS V3 Temporal", "CVSS V3 Attack Vector"
|
||||
]
|
||||
GROUP_HEADER = ['GROUP', 'Name', 'Category']
|
||||
OWASP_HEADER = ['OWASP', 'Code', 'Name']
|
||||
WASC_HEADER = ['WASC', 'Code', 'Name']
|
||||
SCAN_META = [
|
||||
"Web Application Name", "URL", "Owner", "Scope", "ID", "Tags",
|
||||
"Custom Attributes"
|
||||
]
|
||||
CATEGORY_HEADER = ['Category', 'Severity', 'Level', 'Description']
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
config=None,
|
||||
file_in=None,
|
||||
file_stream=False,
|
||||
delimiter=',',
|
||||
quotechar='"',
|
||||
):
|
||||
def __init__(self, config=None, file_in=None,
|
||||
file_stream=False, delimiter=',', quotechar='"'):
|
||||
self.logger = logging.getLogger('qualysScanReport')
|
||||
self.file_in = file_in
|
||||
self.file_stream = file_stream
|
||||
@ -337,71 +241,79 @@ class qualysScanReport:
|
||||
try:
|
||||
self.qw = qualysWhisperAPI(config=config)
|
||||
except Exception as e:
|
||||
self.logger.error('Could not load config! Please check settings. Error: {}'.format(str(e)))
|
||||
self.logger.error(
|
||||
'Could not load config! Please check settings. Error: {}'.format(
|
||||
str(e)))
|
||||
|
||||
if file_stream:
|
||||
self.open_file = file_in.splitlines()
|
||||
elif file_in:
|
||||
|
||||
self.open_file = open(file_in, 'rb')
|
||||
|
||||
self.downloaded_file = None
|
||||
|
||||
def grab_sections(self, report):
|
||||
all_dataframes = []
|
||||
dict_tracker = {}
|
||||
with open(report, 'rb') as csvfile:
|
||||
dict_tracker['WEB_SCAN_VULN_BLOCK'] = pd.DataFrame(self.utils.grab_section(report,
|
||||
return {
|
||||
'WEB_SCAN_VULN_BLOCK': pd.DataFrame(
|
||||
self.utils.grab_section(
|
||||
report,
|
||||
self.WEB_SCAN_VULN_BLOCK,
|
||||
end=[
|
||||
self.WEB_SCAN_SENSITIVE_BLOCK,
|
||||
self.WEB_SCAN_INFO_BLOCK],
|
||||
end=[self.WEB_SCAN_SENSITIVE_BLOCK, self.WEB_SCAN_INFO_BLOCK],
|
||||
pop_last=True),
|
||||
columns=self.WEB_SCAN_VULN_HEADER)
|
||||
dict_tracker['WEB_SCAN_SENSITIVE_BLOCK'] = pd.DataFrame(self.utils.grab_section(report,
|
||||
columns=self.WEB_SCAN_HEADER),
|
||||
'WEB_SCAN_SENSITIVE_BLOCK': pd.DataFrame(
|
||||
self.utils.grab_section(report,
|
||||
self.WEB_SCAN_SENSITIVE_BLOCK,
|
||||
end=[
|
||||
self.WEB_SCAN_INFO_BLOCK,
|
||||
self.WEB_SCAN_SENSITIVE_BLOCK],
|
||||
end=[self.WEB_SCAN_INFO_BLOCK, self.WEB_SCAN_SENSITIVE_BLOCK],
|
||||
pop_last=True),
|
||||
columns=self.WEB_SCAN_SENSITIVE_HEADER)
|
||||
dict_tracker['WEB_SCAN_INFO_BLOCK'] = pd.DataFrame(self.utils.grab_section(report,
|
||||
columns=self.WEB_SCAN_HEADER),
|
||||
'WEB_SCAN_INFO_BLOCK': pd.DataFrame(
|
||||
self.utils.grab_section(
|
||||
report,
|
||||
self.WEB_SCAN_INFO_BLOCK,
|
||||
end=[self.QID_HEADER],
|
||||
pop_last=True),
|
||||
columns=self.WEB_SCAN_INFO_HEADER)
|
||||
dict_tracker['QID_HEADER'] = pd.DataFrame(self.utils.grab_section(report,
|
||||
columns=self.WEB_SCAN_INFO_HEADER),
|
||||
|
||||
'QID_HEADER': pd.DataFrame(
|
||||
self.utils.grab_section(
|
||||
report,
|
||||
self.QID_HEADER,
|
||||
end=[self.GROUP_HEADER],
|
||||
pop_last=True),
|
||||
columns=self.QID_HEADER)
|
||||
dict_tracker['GROUP_HEADER'] = pd.DataFrame(self.utils.grab_section(report,
|
||||
columns=self.QID_HEADER),
|
||||
'GROUP_HEADER': pd.DataFrame(
|
||||
self.utils.grab_section(
|
||||
report,
|
||||
self.GROUP_HEADER,
|
||||
end=[self.OWASP_HEADER],
|
||||
pop_last=True),
|
||||
columns=self.GROUP_HEADER)
|
||||
dict_tracker['OWASP_HEADER'] = pd.DataFrame(self.utils.grab_section(report,
|
||||
columns=self.GROUP_HEADER),
|
||||
'OWASP_HEADER': pd.DataFrame(
|
||||
self.utils.grab_section(
|
||||
report,
|
||||
self.OWASP_HEADER,
|
||||
end=[self.WASC_HEADER],
|
||||
pop_last=True),
|
||||
columns=self.OWASP_HEADER)
|
||||
dict_tracker['WASC_HEADER'] = pd.DataFrame(self.utils.grab_section(report,
|
||||
self.WASC_HEADER, end=[['APPENDIX']],
|
||||
columns=self.OWASP_HEADER),
|
||||
'WASC_HEADER': pd.DataFrame(
|
||||
self.utils.grab_section(
|
||||
report,
|
||||
self.WASC_HEADER,
|
||||
end=[['APPENDIX']],
|
||||
pop_last=True),
|
||||
columns=self.WASC_HEADER)
|
||||
|
||||
dict_tracker['SCAN_META'] = pd.DataFrame(self.utils.grab_section(report,
|
||||
columns=self.WASC_HEADER),
|
||||
'SCAN_META': pd.DataFrame(
|
||||
self.utils.grab_section(report,
|
||||
self.SCAN_META,
|
||||
end=[self.CATEGORY_HEADER],
|
||||
pop_last=True),
|
||||
columns=self.SCAN_META)
|
||||
|
||||
dict_tracker['CATEGORY_HEADER'] = pd.DataFrame(self.utils.grab_section(report,
|
||||
columns=self.SCAN_META),
|
||||
'CATEGORY_HEADER': pd.DataFrame(
|
||||
self.utils.grab_section(report,
|
||||
self.CATEGORY_HEADER),
|
||||
columns=self.CATEGORY_HEADER)
|
||||
all_dataframes.append(dict_tracker)
|
||||
|
||||
return all_dataframes
|
||||
}
|
||||
|
||||
def data_normalizer(self, dataframes):
|
||||
"""
|
||||
@ -409,12 +321,21 @@ class qualysScanReport:
|
||||
:param dataframes:
|
||||
:return:
|
||||
"""
|
||||
df_dict = dataframes[0]
|
||||
merged_df = pd.concat([df_dict['WEB_SCAN_VULN_BLOCK'], df_dict['WEB_SCAN_SENSITIVE_BLOCK'],
|
||||
df_dict['WEB_SCAN_INFO_BLOCK']], axis=0,
|
||||
ignore_index=False)
|
||||
merged_df = pd.merge(merged_df, df_dict['QID_HEADER'], left_on='QID',
|
||||
right_on='Id')
|
||||
df_dict = dataframes
|
||||
merged_df = pd.concat([
|
||||
df_dict['WEB_SCAN_VULN_BLOCK'],
|
||||
df_dict['WEB_SCAN_SENSITIVE_BLOCK'],
|
||||
df_dict['WEB_SCAN_INFO_BLOCK']
|
||||
], axis=0, ignore_index=False)
|
||||
|
||||
merged_df = pd.merge(
|
||||
merged_df,
|
||||
df_dict['QID_HEADER'].drop(
|
||||
#these columns always seem to be the same as what we're merging into
|
||||
['CVSS V3 Attack Vector', 'CVSS V3 Base', 'CVSS V3 Temporal'],
|
||||
axis=1),
|
||||
left_on='QID', right_on='Id'
|
||||
)
|
||||
|
||||
if 'Content' not in merged_df:
|
||||
merged_df['Content'] = ''
|
||||
@ -431,8 +352,11 @@ class qualysScanReport:
|
||||
|
||||
merged_df = merged_df.assign(**df_dict['SCAN_META'].to_dict(orient='records')[0])
|
||||
|
||||
merged_df = pd.merge(merged_df, df_dict['CATEGORY_HEADER'], how='left', left_on=['Category', 'Severity Level'],
|
||||
right_on=['Category', 'Severity'], suffixes=('Severity', 'CatSev'))
|
||||
merged_df = pd.merge(
|
||||
merged_df, df_dict['CATEGORY_HEADER'],
|
||||
how='left', left_on=['Category', 'Severity Level'],
|
||||
right_on=['Category', 'Severity'], suffixes=('Severity', 'CatSev')
|
||||
)
|
||||
|
||||
merged_df = merged_df.replace('N/A', '').fillna('')
|
||||
|
||||
|
@ -1,15 +1,18 @@
|
||||
from __future__ import absolute_import
|
||||
import json
|
||||
import os
|
||||
from datetime import datetime, date, timedelta
|
||||
from datetime import datetime, date
|
||||
|
||||
from jira import JIRA
|
||||
import requests
|
||||
import logging
|
||||
from bottle import template
|
||||
import re
|
||||
from six.moves import range
|
||||
|
||||
|
||||
class JiraAPI(object):
|
||||
def __init__(self, hostname=None, username=None, password=None, path="", debug=False, clean_obsolete=True, max_time_window=12, decommission_time_window=3):
|
||||
def __init__(self, hostname=None, username=None, password=None, path="", debug=False, clean_obsolete=True,
|
||||
max_time_window=12, decommission_time_window=3):
|
||||
self.logger = logging.getLogger('JiraAPI')
|
||||
if debug:
|
||||
self.logger.setLevel(logging.DEBUG)
|
||||
@ -41,10 +44,15 @@ class JiraAPI(object):
|
||||
# deletes the tag "server_decommission" from those tickets closed <=3 months ago
|
||||
self.decommission_cleanup()
|
||||
|
||||
self.jira_still_vulnerable_comment = '''This ticket has been reopened due to the vulnerability not having been fixed (if multiple assets are affected, all need to be fixed; if the server is down, lastest known vulnerability might be the one reported).
|
||||
- In the case of the team accepting the risk and wanting to close the ticket, please add the label "*risk_accepted*" to the ticket before closing it.
|
||||
- If server has been decommissioned, please add the label "*server_decommission*" to the ticket before closing it.
|
||||
- If when checking the vulnerability it looks like a false positive, _+please elaborate in a comment+_ and add the label "*false_positive*" before closing it; we will review it and report it to the vendor.
|
||||
self.jira_still_vulnerable_comment = '''This ticket has been reopened due to the vulnerability not having been \
|
||||
fixed (if multiple assets are affected, all need to be fixed; if the server is down, lastest known \
|
||||
vulnerability might be the one reported).
|
||||
- In the case of the team accepting the risk and wanting to close the ticket, please add the label \
|
||||
"*risk_accepted*" to the ticket before closing it.
|
||||
- If server has been decommissioned, please add the label "*server_decommission*" to the ticket before closing \
|
||||
it.
|
||||
- If when checking the vulnerability it looks like a false positive, _+please elaborate in a comment+_ and add \
|
||||
the label "*false_positive*" before closing it; we will review it and report it to the vendor.
|
||||
|
||||
If you have further doubts, please contact the Security Team.'''
|
||||
|
||||
@ -91,13 +99,15 @@ class JiraAPI(object):
|
||||
return len(self.jira.search_issues(jql, maxResults=0))
|
||||
|
||||
def metrics_closed_tickets(self, project=None):
|
||||
jql = "labels= vulnerability_management and NOT resolution = Unresolved AND created >=startOfMonth(-{})".format(self.max_time_tracking)
|
||||
jql = "labels= vulnerability_management and NOT resolution = Unresolved AND created >=startOfMonth(-{})".format(
|
||||
self.max_time_tracking)
|
||||
if project:
|
||||
jql += " and (project='{}')".format(project)
|
||||
return len(self.jira.search_issues(jql, maxResults=0))
|
||||
|
||||
def sync(self, vulnerabilities, project, components=[]):
|
||||
#JIRA structure of each vulnerability: [source, scan_name, title, diagnosis, consequence, solution, ips, risk, references]
|
||||
# JIRA structure of each vulnerability: [source, scan_name, title, diagnosis, consequence, solution,
|
||||
# ips, risk, references]
|
||||
self.logger.info("JIRA Sync started")
|
||||
|
||||
for vuln in vulnerabilities:
|
||||
@ -106,7 +116,8 @@ class JiraAPI(object):
|
||||
if " " in vuln['scan_name']:
|
||||
vuln['scan_name'] = "_".join(vuln['scan_name'].split(" "))
|
||||
|
||||
# we exclude from the vulnerabilities to report those assets that already exist with *risk_accepted*/*server_decommission*
|
||||
# we exclude from the vulnerabilities to report those assets that already exist
|
||||
# with *risk_accepted*/*server_decommission*
|
||||
vuln = self.exclude_accepted_assets(vuln)
|
||||
|
||||
# make sure after exclusion of risk_accepted assets there are still assets
|
||||
@ -131,13 +142,17 @@ class JiraAPI(object):
|
||||
# create local text file with assets, attach it to ticket
|
||||
if len(vuln['ips']) > self.max_ips_ticket:
|
||||
attachment_contents = vuln['ips']
|
||||
vuln['ips'] = ["Affected hosts ({assets}) exceed Jira's allowed character limit, added as an attachment.".format(assets = len(attachment_contents))]
|
||||
vuln['ips'] = [
|
||||
"Affected hosts ({assets}) exceed Jira's allowed character limit, added as an attachment.".format(
|
||||
assets=len(attachment_contents))]
|
||||
try:
|
||||
tpl = template(self.template_path, vuln)
|
||||
except Exception as e:
|
||||
self.logger.error('Exception templating: {}'.format(str(e)))
|
||||
return 0
|
||||
self.create_ticket(title=vuln['title'], desc=tpl, project=project, components=components, tags=[vuln['source'], vuln['scan_name'], 'vulnerability', vuln['risk']], attachment_contents = attachment_contents)
|
||||
self.create_ticket(title=vuln['title'], desc=tpl, project=project, components=components,
|
||||
tags=[vuln['source'], vuln['scan_name'], 'vulnerability', vuln['risk']],
|
||||
attachment_contents=attachment_contents)
|
||||
else:
|
||||
self.logger.info("Ignoring vulnerability as all assets are already reported in a risk_accepted ticket")
|
||||
|
||||
@ -153,7 +168,8 @@ class JiraAPI(object):
|
||||
labels = [vuln['source'], vuln['scan_name'], 'vulnerability_management', 'vulnerability']
|
||||
|
||||
if not self.excluded_tickets:
|
||||
jql = "{} AND labels in (risk_accepted,server_decommission, false_positive) AND NOT labels=advisory AND created >=startOfMonth(-{})".format(" AND ".join(["labels={}".format(label) for label in labels]), self.max_time_tracking)
|
||||
jql = "{} AND labels in (risk_accepted,server_decommission, false_positive) AND NOT labels=advisory AND created >=startOfMonth(-{})".format(
|
||||
" AND ".join(["labels={}".format(label) for label in labels]), self.max_time_tracking)
|
||||
self.excluded_tickets = self.jira.search_issues(jql, maxResults=0)
|
||||
|
||||
title = vuln['title']
|
||||
@ -163,7 +179,8 @@ class JiraAPI(object):
|
||||
assets_to_exclude = []
|
||||
tickets_excluded_assets = []
|
||||
for index in range(len(self.excluded_tickets)):
|
||||
checking_ticketid, checking_title, checking_assets = self.ticket_get_unique_fields(self.excluded_tickets[index])
|
||||
checking_ticketid, checking_title, checking_assets = self.ticket_get_unique_fields(
|
||||
self.excluded_tickets[index])
|
||||
if title.encode('ascii') == checking_title.encode('ascii'):
|
||||
if checking_assets:
|
||||
# checking_assets is a list, we add to our full list for later delete all assets
|
||||
@ -172,7 +189,8 @@ class JiraAPI(object):
|
||||
|
||||
if assets_to_exclude:
|
||||
assets_to_remove = []
|
||||
self.logger.warn("Vulnerable Assets seen on an already existing risk_accepted Jira ticket: {}".format(', '.join(tickets_excluded_assets)))
|
||||
self.logger.warn("Vulnerable Assets seen on an already existing risk_accepted Jira ticket: {}".format(
|
||||
', '.join(tickets_excluded_assets)))
|
||||
self.logger.debug("Original assets: {}".format(vuln['ips']))
|
||||
# assets in vulnerability have the structure "ip - hostname - port", so we need to match by partial
|
||||
for exclusion in assets_to_exclude:
|
||||
@ -180,7 +198,9 @@ class JiraAPI(object):
|
||||
# and we don't want it to affect the rest of the processing (otherwise, it would miss the asset right after the removed one)
|
||||
for index in range(len(vuln['ips']))[::-1]:
|
||||
if exclusion == vuln['ips'][index].split(" - ")[0]:
|
||||
self.logger.debug("Deleting asset {} from vulnerability {}, seen in risk_accepted.".format(vuln['ips'][index], title))
|
||||
self.logger.debug(
|
||||
"Deleting asset {} from vulnerability {}, seen in risk_accepted.".format(vuln['ips'][index],
|
||||
title))
|
||||
vuln['ips'].pop(index)
|
||||
self.logger.debug("Modified assets: {}".format(vuln['ips']))
|
||||
|
||||
@ -202,7 +222,8 @@ class JiraAPI(object):
|
||||
self.logger.info("Retrieving all JIRA tickets with the following tags {}".format(labels))
|
||||
# we want to check all JIRA tickets, to include tickets moved to other queues
|
||||
# will exclude tickets older than 12 months, old tickets will get closed for higiene and recreated if still vulnerable
|
||||
jql = "{} AND NOT labels=advisory AND created >=startOfMonth(-{})".format(" AND ".join(["labels={}".format(label) for label in labels]), self.max_time_tracking)
|
||||
jql = "{} AND NOT labels=advisory AND created >=startOfMonth(-{})".format(
|
||||
" AND ".join(["labels={}".format(label) for label in labels]), self.max_time_tracking)
|
||||
|
||||
self.all_tickets = self.jira.search_issues(jql, maxResults=0)
|
||||
|
||||
@ -212,7 +233,8 @@ class JiraAPI(object):
|
||||
for index in range(len(self.all_tickets)):
|
||||
checking_ticketid, checking_title, checking_assets = self.ticket_get_unique_fields(self.all_tickets[index])
|
||||
# added "not risk_accepted", as if it is risk_accepted, we will create a new ticket excluding the accepted assets
|
||||
if title.encode('ascii') == checking_title.encode('ascii') and not self.is_risk_accepted(self.jira.issue(checking_ticketid)):
|
||||
if title.encode('ascii') == checking_title.encode('ascii') and not self.is_risk_accepted(
|
||||
self.jira.issue(checking_ticketid)):
|
||||
difference = list(set(assets).symmetric_difference(checking_assets))
|
||||
# to check intersection - set(assets) & set(checking_assets)
|
||||
if difference:
|
||||
@ -226,32 +248,47 @@ class JiraAPI(object):
|
||||
def ticket_get_unique_fields(self, ticket):
|
||||
title = ticket.raw.get('fields', {}).get('summary').encode("ascii").strip()
|
||||
ticketid = ticket.key.encode("ascii")
|
||||
assets = []
|
||||
try:
|
||||
affected_assets_section = ticket.raw.get('fields', {}).get('description').encode("ascii").split("{panel:title=Affected Assets}")[1].split("{panel}")[0]
|
||||
assets = list(set(re.findall(r"\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\b", affected_assets_section)))
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error("Ticket IPs regex failed. Ticket ID: {}. Reason: {}".format(ticketid, e))
|
||||
assets = []
|
||||
|
||||
try:
|
||||
assets = self.get_assets_from_description(ticket)
|
||||
if not assets:
|
||||
# check if attachment, if so, get assets from attachment
|
||||
affected_assets_section = self.check_ips_attachment(ticket)
|
||||
if affected_assets_section:
|
||||
assets = list(set(re.findall(r"\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\b", affected_assets_section)))
|
||||
except Exception as e:
|
||||
self.logger.error("Ticket IPs Attachment regex failed. Ticket ID: {}. Reason: {}".format(ticketid, e))
|
||||
assets = self.get_assets_from_attachment(ticket)
|
||||
|
||||
return ticketid, title, assets
|
||||
|
||||
def check_ips_attachment(self, ticket):
|
||||
affected_assets_section = []
|
||||
def get_assets_from_description(self, ticket, _raw=False):
|
||||
# Get the assets as a string "host - protocol/port - hostname" separated by "\n"
|
||||
# structure the text to have the same structure as the assets from the attachment
|
||||
affected_assets = ""
|
||||
try:
|
||||
affected_assets = \
|
||||
ticket.raw.get('fields', {}).get('description').encode("ascii").split("{panel:title=Affected Assets}")[
|
||||
1].split("{panel}")[0].replace('\n', '').replace(' * ', '\n').replace('\n', '', 1)
|
||||
except Exception as e:
|
||||
self.logger.error(
|
||||
"Unable to process the Ticket's 'Affected Assets'. Ticket ID: {}. Reason: {}".format(ticket, e))
|
||||
|
||||
if affected_assets:
|
||||
if _raw:
|
||||
# from line 406 check if the text in the panel corresponds to having added an attachment
|
||||
if "added as an attachment" in affected_assets:
|
||||
return False
|
||||
return affected_assets
|
||||
|
||||
try:
|
||||
# if _raw is not true, we return only the IPs of the affected assets
|
||||
return list(set(re.findall(r"\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\b", affected_assets)))
|
||||
except Exception as e:
|
||||
self.logger.error("Ticket IPs regex failed. Ticket ID: {}. Reason: {}".format(ticket, e))
|
||||
return False
|
||||
|
||||
def get_assets_from_attachment(self, ticket, _raw=False):
|
||||
# Get the assets as a string "host - protocol/port - hostname" separated by "\n"
|
||||
affected_assets = []
|
||||
try:
|
||||
fields = self.jira.issue(ticket.key).raw.get('fields', {})
|
||||
attachments = fields.get('attachment', {})
|
||||
affected_assets_section = ""
|
||||
affected_assets = ""
|
||||
# we will make sure we get the latest version of the file
|
||||
latest = ''
|
||||
attachment_id = ''
|
||||
@ -265,12 +302,45 @@ class JiraAPI(object):
|
||||
if latest < item.get('created'):
|
||||
latest = item.get('created')
|
||||
attachment_id = item.get('id')
|
||||
affected_assets_section = self.jira.attachment(attachment_id).get()
|
||||
affected_assets = self.jira.attachment(attachment_id).get()
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error("Failed to get assets from ticket attachment. Ticket ID: {}. Reason: {}".format(ticket, e))
|
||||
self.logger.error(
|
||||
"Failed to get assets from ticket attachment. Ticket ID: {}. Reason: {}".format(ticket, e))
|
||||
|
||||
return affected_assets_section
|
||||
if affected_assets:
|
||||
if _raw:
|
||||
return affected_assets
|
||||
|
||||
try:
|
||||
# if _raw is not true, we return only the IPs of the affected assets
|
||||
affected_assets = list(set(re.findall(r"\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\b", affected_assets)))
|
||||
return affected_assets
|
||||
except Exception as e:
|
||||
self.logger.error("Ticket IPs Attachment regex failed. Ticket ID: {}. Reason: {}".format(ticket, e))
|
||||
|
||||
return False
|
||||
|
||||
def parse_asset_to_json(self, asset):
|
||||
hostname, protocol, port = "", "", ""
|
||||
asset_info = asset.split(" - ")
|
||||
ip = asset_info[0]
|
||||
proto_port = asset_info[1]
|
||||
# in case there is some case where hostname is not reported at all
|
||||
if len(asset_info) == 3:
|
||||
hostname = asset_info[2]
|
||||
if proto_port != "N/A/N/A":
|
||||
protocol, port = proto_port.split("/")
|
||||
port = int(float(port))
|
||||
|
||||
asset_dict = {
|
||||
"host": ip,
|
||||
"protocol": protocol,
|
||||
"port": port,
|
||||
"hostname": hostname
|
||||
}
|
||||
|
||||
return asset_dict
|
||||
|
||||
def clean_old_attachments(self, ticket):
|
||||
fields = ticket.raw.get('fields')
|
||||
@ -309,8 +379,10 @@ class JiraAPI(object):
|
||||
if self.is_ticket_resolved(ticket_obj):
|
||||
ticket_data = ticket_obj.raw.get('fields')
|
||||
# dates follow format '2018-11-06T10:36:13.849+0100'
|
||||
created = [int(x) for x in ticket_data['created'].split('.')[0].replace('T', '-').replace(':','-').split('-')]
|
||||
resolved =[int(x) for x in ticket_data['resolutiondate'].split('.')[0].replace('T', '-').replace(':','-').split('-')]
|
||||
created = [int(x) for x in
|
||||
ticket_data['created'].split('.')[0].replace('T', '-').replace(':', '-').split('-')]
|
||||
resolved = [int(x) for x in
|
||||
ticket_data['resolutiondate'].split('.')[0].replace('T', '-').replace(':', '-').split('-')]
|
||||
|
||||
start = datetime(created[0], created[1], created[2], created[3], created[4], created[5])
|
||||
end = datetime(resolved[0], resolved[1], resolved[2], resolved[3], resolved[4], resolved[5])
|
||||
@ -361,7 +433,9 @@ class JiraAPI(object):
|
||||
attachment_contents = []
|
||||
if len(vuln['ips']) > self.max_ips_ticket:
|
||||
attachment_contents = vuln['ips']
|
||||
vuln['ips'] = ["Affected hosts ({assets}) exceed Jira's allowed character limit, added as an attachment.".format(assets = len(attachment_contents))]
|
||||
vuln['ips'] = [
|
||||
"Affected hosts ({assets}) exceed Jira's allowed character limit, added as an attachment.".format(
|
||||
assets=len(attachment_contents))]
|
||||
|
||||
# fill the ticket description template
|
||||
try:
|
||||
@ -381,7 +455,8 @@ class JiraAPI(object):
|
||||
self.logger.info("Ticket {} updated successfully".format(ticketid))
|
||||
self.add_label(ticketid, 'updated')
|
||||
except Exception as e:
|
||||
self.logger.error("Error while trying up update ticket {ticketid}.\nReason: {e}".format(ticketid = ticketid, e=e))
|
||||
self.logger.error(
|
||||
"Error while trying up update ticket {ticketid}.\nReason: {e}".format(ticketid=ticketid, e=e))
|
||||
return 0
|
||||
|
||||
def add_label(self, ticketid, label):
|
||||
@ -393,8 +468,9 @@ class JiraAPI(object):
|
||||
try:
|
||||
ticket_obj.update(fields={"labels": ticket_obj.fields.labels})
|
||||
self.logger.info("Added label {label} to ticket {ticket}".format(label=label, ticket=ticketid))
|
||||
except:
|
||||
self.logger.error("Error while trying to add label {label} to ticket {ticket}".format(label=label, ticket=ticketid))
|
||||
except Exception as e:
|
||||
self.logger.error(
|
||||
"Error while trying to add label {label} to ticket {ticket}".format(label=label, ticket=ticketid))
|
||||
|
||||
return 0
|
||||
|
||||
@ -407,8 +483,9 @@ class JiraAPI(object):
|
||||
try:
|
||||
ticket_obj.update(fields={"labels": ticket_obj.fields.labels})
|
||||
self.logger.info("Removed label {label} from ticket {ticket}".format(label=label, ticket=ticketid))
|
||||
except:
|
||||
self.logger.error("Error while trying to remove label {label} to ticket {ticket}".format(label=label, ticket=ticketid))
|
||||
except Exception as e:
|
||||
self.logger.error("Error while trying to remove label {label} to ticket {ticket}".format(label=label,
|
||||
ticket=ticketid))
|
||||
else:
|
||||
self.logger.error("Error: label {label} not in ticket {ticket}".format(label=label, ticket=ticketid))
|
||||
|
||||
@ -434,7 +511,6 @@ class JiraAPI(object):
|
||||
self.close_ticket(ticket, self.JIRA_RESOLUTION_FIXED, comment)
|
||||
return 0
|
||||
|
||||
|
||||
def is_ticket_reopenable(self, ticket_obj):
|
||||
transitions = self.jira.transitions(ticket_obj)
|
||||
for transition in transitions:
|
||||
@ -463,7 +539,6 @@ class JiraAPI(object):
|
||||
self.logger.debug("Checked ticket {} is already open".format(ticket_obj))
|
||||
return False
|
||||
|
||||
|
||||
def is_risk_accepted(self, ticket_obj):
|
||||
if ticket_obj is not None:
|
||||
if ticket_obj.raw['fields'].get('labels') is not None:
|
||||
@ -489,7 +564,8 @@ class JiraAPI(object):
|
||||
if (not self.is_risk_accepted(ticket_obj) or ignore_labels):
|
||||
try:
|
||||
if self.is_ticket_reopenable(ticket_obj):
|
||||
error = self.jira.transition_issue(issue=ticketid, transition=self.JIRA_REOPEN_ISSUE, comment = comment)
|
||||
error = self.jira.transition_issue(issue=ticketid, transition=self.JIRA_REOPEN_ISSUE,
|
||||
comment=comment)
|
||||
self.logger.info("Ticket {} reopened successfully".format(ticketid))
|
||||
if not ignore_labels:
|
||||
self.add_label(ticketid, 'reopened')
|
||||
@ -509,7 +585,8 @@ class JiraAPI(object):
|
||||
if self.is_ticket_closeable(ticket_obj):
|
||||
# need to add the label before closing the ticket
|
||||
self.add_label(ticketid, 'closed')
|
||||
error = self.jira.transition_issue(issue=ticketid, transition=self.JIRA_CLOSE_ISSUE, comment = comment, resolution = {"name": resolution })
|
||||
error = self.jira.transition_issue(issue=ticketid, transition=self.JIRA_CLOSE_ISSUE,
|
||||
comment=comment, resolution={"name": resolution})
|
||||
self.logger.info("Ticket {} closed successfully".format(ticketid))
|
||||
return 1
|
||||
except Exception as e:
|
||||
@ -522,7 +599,8 @@ class JiraAPI(object):
|
||||
def close_obsolete_tickets(self):
|
||||
# Close tickets older than 12 months, vulnerabilities not solved will get created a new ticket
|
||||
self.logger.info("Closing obsolete tickets older than {} months".format(self.max_time_tracking))
|
||||
jql = "labels=vulnerability_management AND created <startOfMonth(-{}) and resolution=Unresolved".format(self.max_time_tracking)
|
||||
jql = "labels=vulnerability_management AND NOT labels=advisory AND created <startOfMonth(-{}) and resolution=Unresolved".format(
|
||||
self.max_time_tracking)
|
||||
tickets_to_close = self.jira.search_issues(jql, maxResults=0)
|
||||
|
||||
comment = '''This ticket is being closed for hygiene, as it is more than {} months old.
|
||||
@ -553,9 +631,36 @@ class JiraAPI(object):
|
||||
return True
|
||||
try:
|
||||
self.logger.info("Saving locally tickets from the last {} months".format(self.max_time_tracking))
|
||||
jql = "labels=vulnerability_management AND created >=startOfMonth(-{})".format(self.max_time_tracking)
|
||||
jql = "labels=vulnerability_management AND NOT labels=advisory AND created >=startOfMonth(-{})".format(
|
||||
self.max_time_tracking)
|
||||
tickets_data = self.jira.search_issues(jql, maxResults=0)
|
||||
|
||||
# TODO process tickets, creating a new field called "_metadata" with all the affected assets well structured
|
||||
# for future processing in ELK/Splunk; this includes downloading attachments with assets and processing them
|
||||
|
||||
processed_tickets = []
|
||||
|
||||
for ticket in tickets_data:
|
||||
assets = self.get_assets_from_description(ticket, _raw=True)
|
||||
if not assets:
|
||||
# check if attachment, if so, get assets from attachment
|
||||
assets = self.get_assets_from_attachment(ticket, _raw=True)
|
||||
# process the affected assets to save them as json structure on a new field from the JSON
|
||||
_metadata = {"affected_hosts": []}
|
||||
if assets:
|
||||
if "\n" in assets:
|
||||
for asset in assets.split("\n"):
|
||||
assets_json = self.parse_asset_to_json(asset)
|
||||
_metadata["affected_hosts"].append(assets_json)
|
||||
else:
|
||||
assets_json = self.parse_asset_to_json(assets)
|
||||
_metadata["affected_hosts"].append(assets_json)
|
||||
|
||||
temp_ticket = ticket.raw.get('fields')
|
||||
temp_ticket['_metadata'] = _metadata
|
||||
|
||||
processed_tickets.append(temp_ticket)
|
||||
|
||||
# end of line needed, as writelines() doesn't add it automatically, otherwise one big line
|
||||
to_save = [json.dumps(ticket.raw.get('fields')) + "\n" for ticket in tickets_data]
|
||||
with open(fname, 'w') as outfile:
|
||||
@ -575,13 +680,16 @@ class JiraAPI(object):
|
||||
closed already for more than x months (default is 3 months) in order to clean solved issues
|
||||
for statistics purposes
|
||||
'''
|
||||
self.logger.info("Deleting 'server_decommission' tag from tickets closed more than {} months ago".format(self.max_decommission_time))
|
||||
self.logger.info("Deleting 'server_decommission' tag from tickets closed more than {} months ago".format(
|
||||
self.max_decommission_time))
|
||||
|
||||
jql = "labels=vulnerability_management AND labels=server_decommission and resolutiondate <=startOfMonth(-{})".format(self.max_decommission_time)
|
||||
jql = "labels=vulnerability_management AND labels=server_decommission and resolutiondate <=startOfMonth(-{})".format(
|
||||
self.max_decommission_time)
|
||||
decommissioned_tickets = self.jira.search_issues(jql, maxResults=0)
|
||||
|
||||
comment = '''This ticket is having deleted the *server_decommission* tag, as it is more than {} months old and is expected to already have been decommissioned.
|
||||
If that is not the case and the vulnerability still exists, the vulnerability will be opened again.'''.format(self.max_decommission_time)
|
||||
If that is not the case and the vulnerability still exists, the vulnerability will be opened again.'''.format(
|
||||
self.max_decommission_time)
|
||||
|
||||
for ticket in decommissioned_tickets:
|
||||
# we open first the ticket, as we want to make sure the process is not blocked due to
|
||||
|
@ -1,3 +1,4 @@
|
||||
from __future__ import absolute_import
|
||||
import os
|
||||
import logging
|
||||
import httpretty
|
||||
@ -20,10 +21,12 @@ class mockAPI(object):
|
||||
|
||||
def get_directories(self, path):
|
||||
dir, subdirs, files = next(os.walk(path))
|
||||
self.logger.debug('Subdirectories found: {}'.format(subdirs))
|
||||
return subdirs
|
||||
|
||||
def get_files(self, path):
|
||||
dir, subdirs, files = next(os.walk(path))
|
||||
self.logger.debug('Files found: {}'.format(files))
|
||||
return files
|
||||
|
||||
def qualys_vuln_callback(self, request, uri, response_headers):
|
||||
|
@ -1,13 +1,17 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import absolute_import
|
||||
from six.moves import range
|
||||
from functools import reduce
|
||||
|
||||
__author__ = 'Austin Taylor'
|
||||
|
||||
from base.config import vwConfig
|
||||
from frameworks.nessus import NessusAPI
|
||||
from frameworks.qualys_web import qualysScanReport
|
||||
from frameworks.qualys_vuln import qualysVulnScan
|
||||
from frameworks.openvas import OpenVAS_API
|
||||
from reporting.jira_api import JiraAPI
|
||||
from .base.config import vwConfig
|
||||
from .frameworks.nessus import NessusAPI
|
||||
from .frameworks.qualys_web import qualysScanReport
|
||||
from .frameworks.qualys_vuln import qualysVulnScan
|
||||
from .frameworks.openvas import OpenVAS_API
|
||||
from .reporting.jira_api import JiraAPI
|
||||
import pandas as pd
|
||||
from lxml import objectify
|
||||
import sys
|
||||
@ -21,7 +25,6 @@ import socket
|
||||
|
||||
|
||||
class vulnWhispererBase(object):
|
||||
|
||||
CONFIG_SECTION = None
|
||||
|
||||
def __init__(
|
||||
@ -55,14 +58,16 @@ class vulnWhispererBase(object):
|
||||
except:
|
||||
self.enabled = False
|
||||
self.hostname = self.config.get(self.CONFIG_SECTION, 'hostname')
|
||||
try:
|
||||
self.username = self.config.get(self.CONFIG_SECTION, 'username')
|
||||
self.password = self.config.get(self.CONFIG_SECTION, 'password')
|
||||
except:
|
||||
self.username = None
|
||||
self.password = None
|
||||
self.write_path = self.config.get(self.CONFIG_SECTION, 'write_path')
|
||||
self.db_path = self.config.get(self.CONFIG_SECTION, 'db_path')
|
||||
self.verbose = self.config.getbool(self.CONFIG_SECTION, 'verbose')
|
||||
|
||||
|
||||
|
||||
if self.db_name is not None:
|
||||
if self.db_path:
|
||||
self.database = os.path.join(self.db_path,
|
||||
@ -84,7 +89,8 @@ class vulnWhispererBase(object):
|
||||
self.cur = self.conn.cursor()
|
||||
self.logger.info('Connected to database at {loc}'.format(loc=self.database))
|
||||
except Exception as e:
|
||||
self.logger.error('Could not connect to database at {loc}\nReason: {e} - Please ensure the path exist'.format(
|
||||
self.logger.error(
|
||||
'Could not connect to database at {loc}\nReason: {e} - Please ensure the path exist'.format(
|
||||
e=e,
|
||||
loc=self.database))
|
||||
else:
|
||||
@ -181,7 +187,8 @@ class vulnWhispererBase(object):
|
||||
"""
|
||||
try:
|
||||
self.conn.text_factory = str
|
||||
self.cur.execute('SELECT uuid FROM scan_history where source = "{config_section}"'.format(config_section=self.CONFIG_SECTION))
|
||||
self.cur.execute('SELECT uuid FROM scan_history where source = "{config_section}"'.format(
|
||||
config_section=self.CONFIG_SECTION))
|
||||
results = frozenset([r[0] for r in self.cur.fetchall()])
|
||||
except:
|
||||
results = []
|
||||
@ -200,10 +207,13 @@ class vulnWhispererBase(object):
|
||||
def get_latest_results(self, source, scan_name):
|
||||
processed = 0
|
||||
results = []
|
||||
reported = ""
|
||||
|
||||
try:
|
||||
self.conn.text_factory = str
|
||||
self.cur.execute('SELECT filename FROM scan_history WHERE source="{}" AND scan_name="{}" ORDER BY last_modified DESC LIMIT 1;'.format(source, scan_name))
|
||||
self.cur.execute(
|
||||
'SELECT filename FROM scan_history WHERE source="{}" AND scan_name="{}" ORDER BY last_modified DESC LIMIT 1;'.format(
|
||||
source, scan_name))
|
||||
# should always return just one filename
|
||||
results = [r[0] for r in self.cur.fetchall()][0]
|
||||
|
||||
@ -211,13 +221,17 @@ class vulnWhispererBase(object):
|
||||
# TODO delete backward compatibility check after some versions
|
||||
last_column_table = self.cur.execute('PRAGMA table_info(scan_history)').fetchall()[-1][1]
|
||||
if results and last_column_table == self.table_columns[-1]:
|
||||
reported = self.cur.execute('SELECT reported FROM scan_history WHERE filename="{}"'.format(results)).fetchall()
|
||||
reported = self.cur.execute(
|
||||
'SELECT reported FROM scan_history WHERE filename="{}"'.format(results)).fetchall()
|
||||
reported = reported[0][0]
|
||||
if reported:
|
||||
self.logger.debug("Last downloaded scan from source {source} scan_name {scan_name} has already been reported".format(source=source, scan_name=scan_name))
|
||||
self.logger.debug(
|
||||
"Last downloaded scan from source {source} scan_name {scan_name} has already been reported".format(
|
||||
source=source, scan_name=scan_name))
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error("Error when getting latest results from {}.{} : {}".format(source, scan_name, e))
|
||||
|
||||
return results, reported
|
||||
|
||||
def get_scan_profiles(self):
|
||||
@ -248,8 +262,8 @@ class vulnWhispererBase(object):
|
||||
|
||||
return results
|
||||
|
||||
class vulnWhispererNessus(vulnWhispererBase):
|
||||
|
||||
class vulnWhispererNessus(vulnWhispererBase):
|
||||
CONFIG_SECTION = None
|
||||
|
||||
def __init__(
|
||||
@ -274,6 +288,8 @@ class vulnWhispererNessus(vulnWhispererBase):
|
||||
|
||||
self.develop = True
|
||||
self.purge = purge
|
||||
self.access_key = None
|
||||
self.secret_key = None
|
||||
|
||||
if config is not None:
|
||||
try:
|
||||
@ -283,26 +299,36 @@ class vulnWhispererNessus(vulnWhispererBase):
|
||||
'trash')
|
||||
|
||||
try:
|
||||
self.logger.info('Attempting to connect to nessus...')
|
||||
self.access_key = self.config.get(self.CONFIG_SECTION, 'access_key')
|
||||
self.secret_key = self.config.get(self.CONFIG_SECTION, 'secret_key')
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
self.logger.info('Attempting to connect to {}...'.format(self.CONFIG_SECTION))
|
||||
self.nessus = \
|
||||
NessusAPI(hostname=self.hostname,
|
||||
port=self.nessus_port,
|
||||
username=self.username,
|
||||
password=self.password)
|
||||
password=self.password,
|
||||
profile=self.CONFIG_SECTION,
|
||||
access_key=self.access_key,
|
||||
secret_key=self.secret_key
|
||||
)
|
||||
self.nessus_connect = True
|
||||
self.logger.info('Connected to nessus on {host}:{port}'.format(host=self.hostname,
|
||||
self.logger.info('Connected to {} on {host}:{port}'.format(self.CONFIG_SECTION, host=self.hostname,
|
||||
port=str(self.nessus_port)))
|
||||
except Exception as e:
|
||||
self.logger.error('Exception: {}'.format(str(e)))
|
||||
raise Exception(
|
||||
'Could not connect to nessus -- Please verify your settings in {config} are correct and try again.\nReason: {e}'.format(
|
||||
'Could not connect to {} -- Please verify your settings in {config} are correct and try again.\nReason: {e}'.format(
|
||||
self.CONFIG_SECTION,
|
||||
config=self.config.config_in,
|
||||
e=e))
|
||||
except Exception as e:
|
||||
self.logger.error('Could not properly load your config!\nReason: {e}'.format(e=e))
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
return False
|
||||
# sys.exit(1)
|
||||
|
||||
def scan_count(self, scans, completed=False):
|
||||
"""
|
||||
@ -345,7 +371,6 @@ class vulnWhispererNessus(vulnWhispererBase):
|
||||
scan_records = [s for s in scan_records if s['status'] == 'completed']
|
||||
return scan_records
|
||||
|
||||
|
||||
def whisper_nessus(self):
|
||||
if self.nessus_connect:
|
||||
scan_data = self.nessus.scans
|
||||
@ -400,7 +425,8 @@ class vulnWhispererNessus(vulnWhispererBase):
|
||||
s['uuid'],
|
||||
)
|
||||
|
||||
# TODO Create directory sync function which scans the directory for files that exist already and populates the database
|
||||
# TODO Create directory sync function which scans the directory for files that exist already and
|
||||
# populates the database
|
||||
|
||||
folder_id = s['folder_id']
|
||||
if self.CONFIG_SECTION == 'tenable':
|
||||
@ -430,24 +456,29 @@ class vulnWhispererNessus(vulnWhispererBase):
|
||||
0,
|
||||
)
|
||||
self.record_insert(record_meta)
|
||||
self.logger.info('File {filename} already exist! Updating database'.format(filename=relative_path_name))
|
||||
self.logger.info(
|
||||
'File {filename} already exist! Updating database'.format(filename=relative_path_name))
|
||||
else:
|
||||
try:
|
||||
file_req = \
|
||||
self.nessus.download_scan(scan_id=scan_id, history=history_id,
|
||||
export_format='csv', profile=self.CONFIG_SECTION)
|
||||
export_format='csv')
|
||||
except Exception as e:
|
||||
self.logger.error('Could not download {} scan {}: {}'.format(self.CONFIG_SECTION, scan_id, str(e)))
|
||||
self.logger.error(
|
||||
'Could not download {} scan {}: {}'.format(self.CONFIG_SECTION, scan_id, str(e)))
|
||||
self.exit_code += 1
|
||||
continue
|
||||
|
||||
clean_csv = \
|
||||
pd.read_csv(io.StringIO(file_req.decode('utf-8')))
|
||||
if len(clean_csv) > 2:
|
||||
self.logger.info('Processing {}/{} for scan: {}'.format(scan_count, len(scan_list), scan_name.encode('utf8')))
|
||||
columns_to_cleanse = ['CVSS','CVE','Description','Synopsis','Solution','See Also','Plugin Output']
|
||||
self.logger.info('Processing {}/{} for scan: {}'.format(scan_count, len(scan_list),
|
||||
scan_name.encode('utf8')))
|
||||
columns_to_cleanse = ['CVSS', 'CVE', 'Description', 'Synopsis', 'Solution', 'See Also',
|
||||
'Plugin Output', 'MAC Address']
|
||||
|
||||
for col in columns_to_cleanse:
|
||||
if col in clean_csv:
|
||||
clean_csv[col] = clean_csv[col].astype(str).apply(self.cleanser)
|
||||
|
||||
clean_csv.to_csv(relative_path_name, index=False)
|
||||
@ -465,7 +496,8 @@ class vulnWhispererNessus(vulnWhispererBase):
|
||||
)
|
||||
self.record_insert(record_meta)
|
||||
self.logger.info('{filename} records written to {path} '.format(filename=clean_csv.shape[0],
|
||||
path=file_name.encode('utf8')))
|
||||
path=file_name.encode(
|
||||
'utf8')))
|
||||
else:
|
||||
record_meta = (
|
||||
scan_name,
|
||||
@ -480,27 +512,32 @@ class vulnWhispererNessus(vulnWhispererBase):
|
||||
0,
|
||||
)
|
||||
self.record_insert(record_meta)
|
||||
self.logger.warn('{} has no host available... Updating database and skipping!'.format(file_name))
|
||||
self.logger.warn(
|
||||
'{} has no host available... Updating database and skipping!'.format(file_name))
|
||||
self.conn.close()
|
||||
self.logger.info('Scan aggregation complete! Connection to database closed.')
|
||||
else:
|
||||
self.logger.error('Failed to use scanner at {host}:{port}'.format(host=self.hostname, port=self.nessus_port))
|
||||
self.logger.error(
|
||||
'Failed to use scanner at {host}:{port}'.format(host=self.hostname, port=self.nessus_port))
|
||||
self.exit_code += 1
|
||||
return self.exit_code
|
||||
|
||||
|
||||
class vulnWhispererQualys(vulnWhispererBase):
|
||||
|
||||
CONFIG_SECTION = 'qualys_web'
|
||||
COLUMN_MAPPING = {'Access Path': 'access_path',
|
||||
'Ajax Request': 'ajax_request',
|
||||
'Ajax Request ID': 'ajax_request_id',
|
||||
'Authentication': 'authentication',
|
||||
'CVSS Base': 'cvss',
|
||||
'CVSS V3 Attack Vector': 'cvss_v3_attack_vector',
|
||||
'CVSS V3 Base': 'cvss_v3_base',
|
||||
'CVSS V3 Temporal': 'cvss_v3_temporal',
|
||||
'CVSS Temporal': 'cvss_temporal',
|
||||
'CWE': 'cwe',
|
||||
'Category': 'category',
|
||||
'Content': 'content',
|
||||
'Custom Attributes': 'custom_attributes',
|
||||
'DescriptionSeverity': 'severity_description',
|
||||
'DescriptionCatSev': 'category_description',
|
||||
'Detection ID': 'detection_id',
|
||||
@ -516,15 +553,19 @@ class vulnWhispererQualys(vulnWhispererBase):
|
||||
'Ignore User': 'ignore_user',
|
||||
'Ignored': 'ignored',
|
||||
'Impact': 'impact',
|
||||
'Info#1': 'info_1',
|
||||
'Last Time Detected': 'last_time_detected',
|
||||
'Last Time Tested': 'last_time_tested',
|
||||
'Level': 'level',
|
||||
'OWASP': 'owasp',
|
||||
'Operating System': 'operating_system',
|
||||
'Owner': 'owner',
|
||||
'Param': 'param',
|
||||
'Param/Cookie': 'param',
|
||||
'Payload #1': 'payload_1',
|
||||
'Port': 'port',
|
||||
'Protocol': 'protocol',
|
||||
'QID': 'plugin_id',
|
||||
'Request Body #1': 'request_body_1',
|
||||
'Request Headers #1': 'request_headers_1',
|
||||
'Request Method #1': 'request_method_1',
|
||||
'Request URL #1': 'request_url_1',
|
||||
@ -533,13 +574,17 @@ class vulnWhispererQualys(vulnWhispererBase):
|
||||
'Severity': 'risk',
|
||||
'Severity Level': 'security_level',
|
||||
'Solution': 'solution',
|
||||
'Tags': 'tags',
|
||||
'Times Detected': 'times_detected',
|
||||
'Title': 'plugin_name',
|
||||
'URL': 'url',
|
||||
'Unique ID': 'unique_id',
|
||||
'Url': 'uri',
|
||||
'Vulnerability Category': 'vulnerability_category',
|
||||
'Virtual Host': 'virutal_host',
|
||||
'WASC': 'wasc',
|
||||
'Web Application Name': 'web_application_name'}
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
config=None,
|
||||
@ -555,8 +600,11 @@ class vulnWhispererQualys(vulnWhispererBase):
|
||||
self.logger = logging.getLogger('vulnWhispererQualys')
|
||||
if debug:
|
||||
self.logger.setLevel(logging.DEBUG)
|
||||
|
||||
try:
|
||||
self.qualys_scan = qualysScanReport(config=config)
|
||||
except Exception as e:
|
||||
self.logger.error("Unable to establish connection with Qualys scanner. Reason: {}".format(e))
|
||||
return False
|
||||
self.latest_scans = self.qualys_scan.qw.get_all_scans()
|
||||
self.directory_check()
|
||||
self.scans_to_process = None
|
||||
@ -642,10 +690,10 @@ class vulnWhispererQualys(vulnWhispererBase):
|
||||
|
||||
if cleanup:
|
||||
self.logger.info('Removing report {} from Qualys Database'.format(generated_report_id))
|
||||
cleaning_up = \
|
||||
self.qualys_scan.qw.delete_report(generated_report_id)
|
||||
cleaning_up = self.qualys_scan.qw.delete_report(generated_report_id)
|
||||
os.remove(self.path_check(str(generated_report_id) + '.csv'))
|
||||
self.logger.info('Deleted report from local disk: {}'.format(self.path_check(str(generated_report_id))))
|
||||
self.logger.info(
|
||||
'Deleted report from local disk: {}'.format(self.path_check(str(generated_report_id))))
|
||||
else:
|
||||
self.logger.error('Could not process report ID: {}'.format(status))
|
||||
|
||||
@ -653,7 +701,6 @@ class vulnWhispererQualys(vulnWhispererBase):
|
||||
self.logger.error('Could not process {}: {}'.format(report_id, str(e)))
|
||||
return vuln_ready
|
||||
|
||||
|
||||
def identify_scans_to_process(self):
|
||||
if self.uuids:
|
||||
self.scans_to_process = self.latest_scans[~self.latest_scans['id'].isin(self.uuids)]
|
||||
@ -661,7 +708,6 @@ class vulnWhispererQualys(vulnWhispererBase):
|
||||
self.scans_to_process = self.latest_scans
|
||||
self.logger.info('Identified {new} scans to be processed'.format(new=len(self.scans_to_process)))
|
||||
|
||||
|
||||
def process_web_assets(self):
|
||||
counter = 0
|
||||
self.identify_scans_to_process()
|
||||
@ -728,17 +774,20 @@ class vulnWhispererOpenVAS(vulnWhispererBase):
|
||||
self.develop = True
|
||||
self.purge = purge
|
||||
self.scans_to_process = None
|
||||
try:
|
||||
self.openvas_api = OpenVAS_API(hostname=self.hostname,
|
||||
port=self.port,
|
||||
username=self.username,
|
||||
password=self.password)
|
||||
except Exception as e:
|
||||
self.logger.error("Unable to establish connection with OpenVAS scanner. Reason: {}".format(e))
|
||||
return False
|
||||
|
||||
def whisper_reports(self, output_format='json', launched_date=None, report_id=None, cleanup=True):
|
||||
report = None
|
||||
if report_id:
|
||||
self.logger.info('Processing report ID: {}'.format(report_id))
|
||||
|
||||
|
||||
scan_name = report_id.replace('-', '')
|
||||
report_name = 'openvas_scan_{scan_name}_{last_updated}.{extension}'.format(scan_name=scan_name,
|
||||
last_updated=launched_date,
|
||||
@ -806,7 +855,8 @@ class vulnWhispererOpenVAS(vulnWhispererBase):
|
||||
for scan in self.scans_to_process.iterrows():
|
||||
counter += 1
|
||||
info = scan[1]
|
||||
self.logger.info('Processing {}/{} - Report ID: {}'.format(counter, len(self.scans_to_process), info['report_ids']))
|
||||
self.logger.info(
|
||||
'Processing {}/{} - Report ID: {}'.format(counter, len(self.scans_to_process), info['report_ids']))
|
||||
self.whisper_reports(report_id=info['report_ids'],
|
||||
launched_date=info['epoch'])
|
||||
self.logger.info('Processing complete')
|
||||
@ -817,7 +867,6 @@ class vulnWhispererOpenVAS(vulnWhispererBase):
|
||||
|
||||
|
||||
class vulnWhispererQualysVuln(vulnWhispererBase):
|
||||
|
||||
CONFIG_SECTION = 'qualys_vuln'
|
||||
COLUMN_MAPPING = {'cvss_base': 'cvss',
|
||||
'cvss3_base': 'cvss3',
|
||||
@ -842,8 +891,11 @@ class vulnWhispererQualysVuln(vulnWhispererBase):
|
||||
self.logger = logging.getLogger('vulnWhispererQualysVuln')
|
||||
if debug:
|
||||
self.logger.setLevel(logging.DEBUG)
|
||||
|
||||
try:
|
||||
self.qualys_scan = qualysVulnScan(config=config)
|
||||
except Exception as e:
|
||||
self.logger.error("Unable to create connection with Qualys. Reason: {}".format(e))
|
||||
return False
|
||||
self.directory_check()
|
||||
self.scans_to_process = None
|
||||
|
||||
@ -854,7 +906,7 @@ class vulnWhispererQualysVuln(vulnWhispererBase):
|
||||
scan_reference=None,
|
||||
output_format='json',
|
||||
cleanup=True):
|
||||
launched_date
|
||||
|
||||
if 'Z' in launched_date:
|
||||
launched_date = self.qualys_scan.utils.iso_to_epoch(launched_date)
|
||||
report_name = 'qualys_vuln_' + report_id.replace('/', '_') \
|
||||
@ -915,7 +967,6 @@ class vulnWhispererQualysVuln(vulnWhispererBase):
|
||||
self.logger.info('Report written to {}'.format(report_name))
|
||||
return self.exit_code
|
||||
|
||||
|
||||
def identify_scans_to_process(self):
|
||||
self.latest_scans = self.qualys_scan.qw.get_all_scans()
|
||||
if self.uuids:
|
||||
@ -926,7 +977,6 @@ class vulnWhispererQualysVuln(vulnWhispererBase):
|
||||
self.scans_to_process = self.latest_scans
|
||||
self.logger.info('Identified {new} scans to be processed'.format(new=len(self.scans_to_process)))
|
||||
|
||||
|
||||
def process_vuln_scans(self):
|
||||
counter = 0
|
||||
self.identify_scans_to_process()
|
||||
@ -946,7 +996,6 @@ class vulnWhispererQualysVuln(vulnWhispererBase):
|
||||
|
||||
|
||||
class vulnWhispererJIRA(vulnWhispererBase):
|
||||
|
||||
CONFIG_SECTION = 'jira'
|
||||
|
||||
def __init__(
|
||||
@ -966,6 +1015,13 @@ class vulnWhispererJIRA(vulnWhispererBase):
|
||||
self.config_path = config
|
||||
self.config = vwConfig(config)
|
||||
self.host_resolv_cache = {}
|
||||
self.host_no_resolv = []
|
||||
self.no_resolv_by_team_dict = {}
|
||||
# Save locally those assets without DNS entry for flag to system owners
|
||||
self.no_resolv_fname = "no_resolv.txt"
|
||||
if os.path.isfile(self.no_resolv_fname):
|
||||
with open(self.no_resolv_fname, "r") as json_file:
|
||||
self.no_resolv_by_team_dict = json.load(json_file)
|
||||
self.directory_check()
|
||||
|
||||
if config is not None:
|
||||
@ -983,17 +1039,19 @@ class vulnWhispererJIRA(vulnWhispererBase):
|
||||
raise Exception(
|
||||
'Could not connect to nessus -- Please verify your settings in {config} are correct and try again.\nReason: {e}'.format(
|
||||
config=self.config.config_in, e=e))
|
||||
sys.exit(1)
|
||||
return False
|
||||
# sys.exit(1)
|
||||
|
||||
profiles = []
|
||||
profiles = self.get_scan_profiles()
|
||||
|
||||
if not self.config.exists_jira_profiles(profiles):
|
||||
self.config.update_jira_profiles(profiles)
|
||||
self.logger.info("Jira profiles have been created in {config}, please fill the variables before rerunning the module.".format(config=self.config_path))
|
||||
self.logger.info(
|
||||
"Jira profiles have been created in {config}, please fill the variables before rerunning the module.".format(
|
||||
config=self.config_path))
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
def get_env_variables(self, source, scan_name):
|
||||
# function returns an array with [jira_project, jira_components, datafile_path]
|
||||
|
||||
@ -1031,11 +1089,15 @@ class vulnWhispererJIRA(vulnWhispererBase):
|
||||
fullpath = "{}/{}".format(root, filename)
|
||||
|
||||
if reported:
|
||||
self.logger.warn('Last Scan of "{scan_name}" for source "{source}" has already been reported; will be skipped.'.format(scan_name=scan_name, source=source))
|
||||
self.logger.warn(
|
||||
'Last Scan of "{scan_name}" for source "{source}" has already been reported; will be skipped.'.format(
|
||||
scan_name=scan_name, source=source))
|
||||
return [False] * 5
|
||||
|
||||
if not fullpath:
|
||||
self.logger.error('Scan of "{scan_name}" for source "{source}" has not been found. Please check that the scanner data files are in place.'.format(scan_name=scan_name, source=source))
|
||||
self.logger.error(
|
||||
'Scan of "{scan_name}" for source "{source}" has not been found. Please check that the scanner data files are in place.'.format(
|
||||
scan_name=scan_name, source=source))
|
||||
sys.exit(1)
|
||||
|
||||
dns_resolv = self.config.get('jira', 'dns_resolv')
|
||||
@ -1049,7 +1111,6 @@ class vulnWhispererJIRA(vulnWhispererBase):
|
||||
|
||||
return project, components, fullpath, min_critical, dns_resolv
|
||||
|
||||
|
||||
def parse_nessus_vulnerabilities(self, fullpath, source, scan_name, min_critical):
|
||||
|
||||
vulnerabilities = []
|
||||
@ -1078,7 +1139,8 @@ class vulnWhispererJIRA(vulnWhispererBase):
|
||||
vuln['consequence'] = df.loc[index]['Description'].replace('\\n', ' ')
|
||||
vuln['solution'] = df.loc[index]['Solution'].replace('\\n', ' ')
|
||||
vuln['ips'] = []
|
||||
vuln['ips'].append("{} - {}/{}".format(df.loc[index]['Host'], df.loc[index]['Protocol'], df.loc[index]['Port']))
|
||||
vuln['ips'].append(
|
||||
"{} - {}/{}".format(df.loc[index]['Host'], df.loc[index]['Protocol'], df.loc[index]['Port']))
|
||||
vuln['risk'] = df.loc[index]['Risk'].lower()
|
||||
|
||||
# Nessus "nan" value gets automatically casted to float by python
|
||||
@ -1092,7 +1154,8 @@ class vulnWhispererJIRA(vulnWhispererBase):
|
||||
# grouping assets by vulnerability to open on single ticket, as each asset has its own nessus entry
|
||||
for vuln in vulnerabilities:
|
||||
if vuln['title'] == df.loc[index]['Name']:
|
||||
vuln['ips'].append("{} - {}/{}".format(df.loc[index]['Host'], df.loc[index]['Protocol'], df.loc[index]['Port']))
|
||||
vuln['ips'].append("{} - {}/{}".format(df.loc[index]['Host'], df.loc[index]['Protocol'],
|
||||
df.loc[index]['Port']))
|
||||
|
||||
return vulnerabilities
|
||||
|
||||
@ -1117,7 +1180,8 @@ class vulnWhispererJIRA(vulnWhispererBase):
|
||||
continue
|
||||
|
||||
elif data[index]['type'] == 'Practice' or data[index]['type'] == 'Ig':
|
||||
self.logger.debug("Vulnerability '{vuln}' ignored, as it is 'Practice/Potential', not verified.".format(vuln=data[index]['plugin_name']))
|
||||
self.logger.debug("Vulnerability '{vuln}' ignored, as it is 'Practice/Potential', not verified.".format(
|
||||
vuln=data[index]['plugin_name']))
|
||||
continue
|
||||
|
||||
if not vulnerabilities or data[index]['plugin_name'] not in [entry['title'] for entry in vulnerabilities]:
|
||||
@ -1133,7 +1197,8 @@ class vulnWhispererJIRA(vulnWhispererBase):
|
||||
vuln['ips'] = []
|
||||
# TODO ADDED DNS RESOLUTION FROM QUALYS! \n SEPARATORS INSTEAD OF \\n!
|
||||
|
||||
vuln['ips'].append("{ip} - {protocol}/{port} - {dns}".format(**self.get_asset_fields(data[index], dns_resolv)))
|
||||
vuln['ips'].append(
|
||||
"{ip} - {protocol}/{port} - {dns}".format(**self.get_asset_fields(data[index], dns_resolv)))
|
||||
|
||||
# different risk system than Nessus!
|
||||
vuln['risk'] = risks[int(data[index]['risk']) - 1]
|
||||
@ -1148,7 +1213,8 @@ class vulnWhispererJIRA(vulnWhispererBase):
|
||||
# grouping assets by vulnerability to open on single ticket, as each asset has its own nessus entry
|
||||
for vuln in vulnerabilities:
|
||||
if vuln['title'] == data[index]['plugin_name']:
|
||||
vuln['ips'].append("{ip} - {protocol}/{port} - {dns}".format(**self.get_asset_fields(data[index], dns_resolv)))
|
||||
vuln['ips'].append(
|
||||
"{ip} - {protocol}/{port} - {dns}".format(**self.get_asset_fields(data[index], dns_resolv)))
|
||||
|
||||
return vulnerabilities
|
||||
|
||||
@ -1162,7 +1228,7 @@ class vulnWhispererJIRA(vulnWhispererBase):
|
||||
if vuln['dns']:
|
||||
values['dns'] = vuln['dns']
|
||||
else:
|
||||
if values['ip'] in self.host_resolv_cache.keys():
|
||||
if values['ip'] in list(self.host_resolv_cache.keys()):
|
||||
self.logger.debug("Hostname from {ip} cached, retrieving from cache.".format(ip=values['ip']))
|
||||
values['dns'] = self.host_resolv_cache[values['ip']]
|
||||
else:
|
||||
@ -1173,6 +1239,7 @@ class vulnWhispererJIRA(vulnWhispererBase):
|
||||
self.logger.debug("Hostname found: {hostname}.".format(hostname=values['dns']))
|
||||
except:
|
||||
self.host_resolv_cache[values['ip']] = ''
|
||||
self.host_no_resolv.append(values['ip'])
|
||||
self.logger.debug("Hostname not found for: {ip}.".format(ip=values['ip']))
|
||||
|
||||
for key in values.keys():
|
||||
@ -1187,14 +1254,16 @@ class vulnWhispererJIRA(vulnWhispererBase):
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def jira_sync(self, source, scan_name):
|
||||
self.logger.info("Jira Sync triggered for source '{source}' and scan '{scan_name}'".format(source=source, scan_name=scan_name))
|
||||
self.logger.info("Jira Sync triggered for source '{source}' and scan '{scan_name}'".format(source=source,
|
||||
scan_name=scan_name))
|
||||
|
||||
project, components, fullpath, min_critical, dns_resolv = self.get_env_variables(source, scan_name)
|
||||
|
||||
if not project:
|
||||
self.logger.debug("Skipping scan for source '{source}' and scan '{scan_name}': vulnerabilities have already been reported.".format(source=source, scan_name=scan_name))
|
||||
self.logger.debug(
|
||||
"Skipping scan for source '{source}' and scan '{scan_name}': vulnerabilities have already been reported.".format(
|
||||
source=source, scan_name=scan_name))
|
||||
return False
|
||||
|
||||
vulnerabilities = []
|
||||
@ -1205,7 +1274,8 @@ class vulnWhispererJIRA(vulnWhispererBase):
|
||||
|
||||
# ***Qualys VM parsing***
|
||||
if source == "qualys_vuln":
|
||||
vulnerabilities = self.parse_qualys_vuln_vulnerabilities(fullpath, source, scan_name, min_critical, dns_resolv)
|
||||
vulnerabilities = self.parse_qualys_vuln_vulnerabilities(fullpath, source, scan_name, min_critical,
|
||||
dns_resolv)
|
||||
|
||||
# ***JIRA sync***
|
||||
if vulnerabilities:
|
||||
@ -1214,10 +1284,20 @@ class vulnWhispererJIRA(vulnWhispererBase):
|
||||
|
||||
self.jira.sync(vulnerabilities, project, components)
|
||||
else:
|
||||
self.logger.info("[{source}.{scan_name}] No vulnerabilities or vulnerabilities not parsed.".format(source=source, scan_name=scan_name))
|
||||
self.logger.info(
|
||||
"[{source}.{scan_name}] No vulnerabilities or vulnerabilities not parsed.".format(source=source,
|
||||
scan_name=scan_name))
|
||||
self.set_latest_scan_reported(fullpath.split("/")[-1])
|
||||
return False
|
||||
|
||||
# writing to file those assets without DNS resolution
|
||||
# if its not empty
|
||||
if self.host_no_resolv:
|
||||
# we will replace old list of non resolved for the new one or create if it doesn't exist already
|
||||
self.no_resolv_by_team_dict[scan_name] = self.host_no_resolv
|
||||
with open(self.no_resolv_fname, 'w') as outfile:
|
||||
json.dump(self.no_resolv_by_team_dict, outfile)
|
||||
|
||||
self.set_latest_scan_reported(fullpath.split("/")[-1])
|
||||
return True
|
||||
|
||||
@ -1226,10 +1306,16 @@ class vulnWhispererJIRA(vulnWhispererBase):
|
||||
|
||||
if autoreport_sections:
|
||||
for scan in autoreport_sections:
|
||||
try:
|
||||
self.jira_sync(self.config.get(scan, 'source'), self.config.get(scan, 'scan_name'))
|
||||
except Exception as e:
|
||||
self.logger.error(
|
||||
"VulnWhisperer wasn't able to report the vulnerabilities from the '{}'s source".format(
|
||||
self.config.get(scan, 'source')))
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class vulnWhisperer(object):
|
||||
|
||||
def __init__(self,
|
||||
@ -1253,40 +1339,39 @@ class vulnWhisperer(object):
|
||||
self.scanname = scanname
|
||||
self.exit_code = 0
|
||||
|
||||
|
||||
def whisper_vulnerabilities(self):
|
||||
|
||||
if self.profile == 'nessus':
|
||||
vw = vulnWhispererNessus(config=self.config,
|
||||
username=self.username,
|
||||
password=self.password,
|
||||
verbose=self.verbose,
|
||||
profile=self.profile)
|
||||
if vw:
|
||||
self.exit_code += vw.whisper_nessus()
|
||||
|
||||
elif self.profile == 'qualys_web':
|
||||
vw = vulnWhispererQualys(config=self.config)
|
||||
if vw:
|
||||
self.exit_code += vw.process_web_assets()
|
||||
|
||||
elif self.profile == 'openvas':
|
||||
vw_openvas = vulnWhispererOpenVAS(config=self.config)
|
||||
if vw:
|
||||
self.exit_code += vw_openvas.process_openvas_scans()
|
||||
|
||||
elif self.profile == 'tenable':
|
||||
vw = vulnWhispererNessus(config=self.config,
|
||||
username=self.username,
|
||||
password=self.password,
|
||||
verbose=self.verbose,
|
||||
profile=self.profile)
|
||||
if vw:
|
||||
self.exit_code += vw.whisper_nessus()
|
||||
|
||||
elif self.profile == 'qualys_vuln':
|
||||
vw = vulnWhispererQualysVuln(config=self.config)
|
||||
if vw:
|
||||
self.exit_code += vw.process_vuln_scans()
|
||||
|
||||
elif self.profile == 'jira':
|
||||
# first we check config fields are created, otherwise we create them
|
||||
vw = vulnWhispererJIRA(config=self.config)
|
||||
if vw:
|
||||
if not (self.source and self.scanname):
|
||||
self.logger.info('No source/scan_name selected, all enabled scans will be synced')
|
||||
success = vw.sync_all()
|
||||
|
Reference in New Issue
Block a user