36 Commits
1.8.0 ... 2to3

Author SHA1 Message Date
53d70ab0db Merge pull request #230 from nfalke-/2to3
2to3
2022-06-11 20:40:31 -05:00
54fa0ace8a formatting 2021-08-03 16:40:14 -05:00
273b17009a renamed detection date to last time detected 2021-08-03 16:39:58 -05:00
ff5f4cb331 renamed and cleaned columns 2021-08-03 16:39:24 -05:00
61539afa4d headers is unused 2021-08-03 16:26:37 -05:00
742a645190 moved dict_tracker assignments into creation 2021-08-03 14:29:00 -05:00
51234a569f cleaned newline formatting 2021-08-03 14:28:13 -05:00
5dad1ceb10 removed commented code 2021-08-03 14:25:15 -05:00
3db931f3eb removed unused constants 2021-08-03 14:07:34 -05:00
649ecd431b moved qualysReportFields class into qualysScanReport; it only consists of constants and they are unused outside of qualysScanReport 2021-08-03 14:01:38 -05:00
13a52a3e08 formatting 2021-08-03 13:59:37 -05:00
8403b35199 increased size to sys max size 2021-08-03 13:58:24 -05:00
68519d5648 fixed formatting 2021-08-03 13:15:14 -05:00
73342fdeb8 use get method for downloading report 2021-08-03 13:14:51 -05:00
183e3b3e72 removed useless open 2021-08-03 13:01:22 -05:00
e25141261c qualys 'about.php' query made mock tests fail, added a bit of logging to mock 2020-03-03 11:33:03 +01:00
8743b59147 modify /opt to /tmp due to /opt usually being root owned to avoid issues 2020-03-03 10:23:40 +01:00
c0e7ab9863 Pycharm indenting PEP8 2020-03-03 10:19:00 +01:00
97de805e0c modernize python2 to python3 applied 2020-03-03 08:48:00 +01:00
4974be02b4 fix of fix... 2020-02-21 16:17:00 +01:00
7fe2f9a5c1 casting port from jira local download to an int 2020-02-21 16:09:25 +01:00
f4634d03bd Merge pull request #206 from HASecuritySolutions/jira_ticket_download_attachment_data
Jira ticket download attachment data
2020-02-21 15:58:05 +01:00
e1ca9fadcd fixed issue where when actioning all actions, if one failed it exited the program 2020-02-21 15:50:14 +01:00
adb7700300 added on Jira local download an extra field with affected assets in json format for further processing in Splunk/ELK 2020-02-21 11:00:07 +01:00
ced0d4c2fc Hotfix #190 2020-02-04 16:47:37 +01:00
f483c76638 latest qualysapi version that supports python 2 is 6.0.0 2020-01-13 11:34:21 +01:00
f65116aec8 fix requirements issue, new version of qualysapi to be reviewed 2020-01-13 11:03:04 +01:00
bdcb6de4b2 Target CentOS 7 (issue #199) (#200) 2019-12-03 16:21:48 +01:00
af8e27d075 Bump requests from 2.18.3 to 2.20.0 (#196)
Bumps [requests](https://github.com/requests/requests) from 2.18.3 to 2.20.0.
- [Release notes](https://github.com/requests/requests/releases)
- [Changelog](https://github.com/psf/requests/blob/master/HISTORY.md)
- [Commits](https://github.com/requests/requests/compare/v2.18.3...v2.20.0)

Signed-off-by: dependabot[bot] <support@github.com>
2019-12-03 16:20:36 +01:00
accf926ff7 fixed ELK7 logstash compatibility, #187 2019-09-16 15:35:34 +02:00
acf387bd0e added ELK versions supported (6 and 7) 2019-08-24 15:06:33 +02:00
ab7a91e020 Update frameworks_example.ini (#186) 2019-08-10 05:32:19 +02:00
a1a0d6b757 Merge pull request #182 from HASecuritySolutions/save_assets_no_DNS_record
[JIRA] added local file save with assets not resolving hostname
2019-06-18 12:05:49 +02:00
2fb089805c [JIRA] added local file save with assets not resolving hostname 2019-06-18 10:53:55 +02:00
6cf2a94431 Support tenable API keys (#176)
* support tenable API keys

* more flexible config support

* add nessus API key support

* fix whitespace
2019-05-02 10:26:51 +02:00
162636e60f Fix newlines in MAC Address field output (#178)
* fix newlines in all MAC Address field

* remove newline

* only cleanse if col exists
2019-05-02 08:58:18 +02:00
16 changed files with 2104 additions and 1724 deletions

View File

@ -1,4 +1,4 @@
FROM centos:latest
FROM centos:7
MAINTAINER Justin Henderson justin@hasecuritysolutions.com

View File

@ -30,7 +30,7 @@ Currently Supports
### Reporting Frameworks
- [X] [ELK](https://www.elastic.co/elk-stack)
- [X] [ELK (**v6**/**v7**)](https://www.elastic.co/elk-stack)
- [X] [Jira](https://www.atlassian.com/software/jira)
- [ ] [Splunk](https://www.splunk.com/)

View File

@ -83,6 +83,7 @@ def main():
enabled_sections = config.get_sections_with_attribute('enabled')
for section in enabled_sections:
try:
vw = vulnWhisperer(config=args.config,
profile=section,
verbose=args.verbose,
@ -91,6 +92,8 @@ def main():
source=args.source,
scanname=args.scanname)
exit_code += vw.whisper_vulnerabilities()
except Exception as e:
logger.error("VulnWhisperer was unable to perform the processing on '{}'".format(section))
else:
logger.info('Running vulnwhisperer for section {}'.format(args.section))
vw = vulnWhisperer(config=args.config,

View File

@ -2,6 +2,8 @@
enabled=true
hostname=localhost
port=8834
access_key=
secret_key=
username=nessus_username
password=nessus_password
write_path=/opt/VulnWhisperer/data/nessus/
@ -13,6 +15,8 @@ verbose=true
enabled=true
hostname=cloud.tenable.com
port=443
access_key=
secret_key=
username=tenable.io_username
password=tenable.io_password
write_path=/opt/VulnWhisperer/data/tenable/
@ -37,7 +41,7 @@ max_retries = 10
template_id = 126024
[qualys_vuln]
#Reference https://www.qualys.com/docs/qualys-was-api-user-guide.pdf to find your API
#Reference https://www.qualys.com/docs/qualys-api-vmpc-user-guide.pdf to find your API
enabled = true
hostname = qualysapi.qg2.apps.qualys.com
username = exampleuser

View File

@ -2,10 +2,12 @@
enabled=true
hostname=nessus
port=443
access_key=
secret_key=
username=nessus_username
password=nessus_password
write_path=/opt/VulnWhisperer/data/nessus/
db_path=/opt/VulnWhisperer/data/database
write_path=/tmp/VulnWhisperer/data/nessus/
db_path=/tmp/VulnWhisperer/data/database
trash=false
verbose=true
@ -13,10 +15,12 @@ verbose=true
enabled=true
hostname=tenable
port=443
access_key=
secret_key=
username=tenable.io_username
password=tenable.io_password
write_path=/opt/VulnWhisperer/data/tenable/
db_path=/opt/VulnWhisperer/data/database
write_path=/tmp/VulnWhisperer/data/tenable/
db_path=/tmp/VulnWhisperer/data/database
trash=false
verbose=true
@ -26,8 +30,8 @@ enabled = false
hostname = qualys_web
username = exampleuser
password = examplepass
write_path=/opt/VulnWhisperer/data/qualys_web/
db_path=/opt/VulnWhisperer/data/database
write_path=/tmp/VulnWhisperer/data/qualys_web/
db_path=/tmp/VulnWhisperer/data/database
verbose=true
# Set the maximum number of retries each connection should attempt.
@ -42,8 +46,8 @@ enabled = true
hostname = qualys_vuln
username = exampleuser
password = examplepass
write_path=/opt/VulnWhisperer/data/qualys_vuln/
db_path=/opt/VulnWhisperer/data/database
write_path=/tmp/VulnWhisperer/data/qualys_vuln/
db_path=/tmp/VulnWhisperer/data/database
verbose=true
[detectify]
@ -54,8 +58,8 @@ hostname = detectify
username = exampleuser
#password variable used as secretKey
password = examplepass
write_path =/opt/VulnWhisperer/data/detectify/
db_path = /opt/VulnWhisperer/data/database
write_path =/tmp/VulnWhisperer/data/detectify/
db_path = /tmp/VulnWhisperer/data/database
verbose = true
[openvas]
@ -64,8 +68,8 @@ hostname = openvas
port = 4000
username = exampleuser
password = examplepass
write_path=/opt/VulnWhisperer/data/openvas/
db_path=/opt/VulnWhisperer/data/database
write_path=/tmp/VulnWhisperer/data/openvas/
db_path=/tmp/VulnWhisperer/data/database
verbose=true
[jira]
@ -73,8 +77,8 @@ enabled = false
hostname = jira-host
username = username
password = password
write_path = /opt/VulnWhisperer/data/jira/
db_path = /opt/VulnWhisperer/data/database
write_path = /tmp/VulnWhisperer/data/jira/
db_path = /tmp/VulnWhisperer/data/database
verbose = true
dns_resolv = False

View File

@ -1,12 +1,12 @@
pandas==0.20.3
setuptools==40.4.3
pytz==2017.2
Requests==2.18.3
Requests==2.20.0
lxml==4.1.1
future-fstrings
bs4
jira
bottle
coloredlogs
qualysapi>=5.1.0
qualysapi==6.0.0
httpretty

View File

@ -0,0 +1,231 @@
{
"index_patterns": "logstash-vulnwhisperer-*",
"mappings": {
"properties": {
"@timestamp": {
"type": "date"
},
"@version": {
"type": "keyword"
},
"asset": {
"type": "text",
"norms": false,
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"asset_uuid": {
"type": "keyword"
},
"assign_ip": {
"type": "ip"
},
"category": {
"type": "keyword"
},
"cve": {
"type": "keyword"
},
"cvss_base": {
"type": "float"
},
"cvss_temporal_vector": {
"type": "keyword"
},
"cvss_temporal": {
"type": "float"
},
"cvss_vector": {
"type": "keyword"
},
"cvss": {
"type": "float"
},
"cvss3_base": {
"type": "float"
},
"cvss3_temporal_vector": {
"type": "keyword"
},
"cvss3_temporal": {
"type": "float"
},
"cvss3_vector": {
"type": "keyword"
},
"cvss3": {
"type": "float"
},
"description": {
"fields": {
"keyword": {
"ignore_above": 256,
"type": "keyword"
}
},
"norms": false,
"type": "text"
},
"dns": {
"type": "keyword"
},
"exploitability": {
"fields": {
"keyword": {
"ignore_above": 256,
"type": "keyword"
}
},
"norms": false,
"type": "text"
},
"fqdn": {
"type": "keyword"
},
"geoip": {
"dynamic": true,
"type": "object",
"properties": {
"ip": {
"type": "ip"
},
"latitude": {
"type": "float"
},
"location": {
"type": "geo_point"
},
"longitude": {
"type": "float"
}
}
},
"history_id": {
"type": "keyword"
},
"host": {
"type": "keyword"
},
"host_end": {
"type": "date"
},
"host_start": {
"type": "date"
},
"impact": {
"fields": {
"keyword": {
"ignore_above": 256,
"type": "keyword"
}
},
"norms": false,
"type": "text"
},
"ip_status": {
"type": "keyword"
},
"ip": {
"type": "ip"
},
"last_updated": {
"type": "date"
},
"operating_system": {
"type": "keyword"
},
"path": {
"type": "keyword"
},
"pci_vuln": {
"type": "keyword"
},
"plugin_family": {
"type": "keyword"
},
"plugin_id": {
"type": "keyword"
},
"plugin_name": {
"type": "keyword"
},
"plugin_output": {
"fields": {
"keyword": {
"ignore_above": 256,
"type": "keyword"
}
},
"norms": false,
"type": "text"
},
"port": {
"type": "integer"
},
"protocol": {
"type": "keyword"
},
"results": {
"type": "text"
},
"risk_number": {
"type": "integer"
},
"risk_score_name": {
"type": "keyword"
},
"risk_score": {
"type": "float"
},
"risk": {
"type": "keyword"
},
"scan_id": {
"type": "keyword"
},
"scan_name": {
"type": "keyword"
},
"scan_reference": {
"type": "keyword"
},
"see_also": {
"type": "keyword"
},
"solution": {
"type": "keyword"
},
"source": {
"type": "keyword"
},
"ssl": {
"type": "keyword"
},
"synopsis": {
"type": "keyword"
},
"system_type": {
"type": "keyword"
},
"tags": {
"type": "keyword"
},
"threat": {
"type": "text"
},
"type": {
"type": "keyword"
},
"vendor_reference": {
"type": "keyword"
},
"vulnerability_state": {
"type": "keyword"
}
}
}
}

View File

@ -1,5 +1,6 @@
#!/usr/bin/env python
from __future__ import absolute_import
from setuptools import setup, find_packages
setup(

View File

@ -1,3 +1,4 @@
from __future__ import absolute_import
import sys
import logging
@ -5,7 +6,7 @@ import logging
if sys.version_info > (3, 0):
import configparser as cp
else:
import ConfigParser as cp
import six.moves.configparser as cp
class vwConfig(object):

View File

@ -1,3 +1,4 @@
from __future__ import absolute_import
import json
import logging
import sys
@ -24,15 +25,19 @@ class NessusAPI(object):
EXPORT_STATUS = EXPORT + '/{file_id}/status'
EXPORT_HISTORY = EXPORT + '?history_id={history_id}'
def __init__(self, hostname=None, port=None, username=None, password=None, verbose=True):
def __init__(self, hostname=None, port=None, username=None, password=None, verbose=True, profile=None, access_key=None, secret_key=None):
self.logger = logging.getLogger('NessusAPI')
if verbose:
self.logger.setLevel(logging.DEBUG)
if username is None or password is None:
raise Exception('ERROR: Missing username or password.')
if not all((username, password)) and not all((access_key, secret_key)):
raise Exception('ERROR: Missing username, password or API keys.')
self.profile = profile
self.user = username
self.password = password
self.api_keys = False
self.access_key = access_key
self.secret_key = secret_key
self.base = 'https://{hostname}:{port}'.format(hostname=hostname, port=port)
self.verbose = verbose
@ -52,7 +57,13 @@ class NessusAPI(object):
'X-Cookie': None
}
if all((self.access_key, self.secret_key)):
self.logger.debug('Using {} API keys'.format(self.profile))
self.api_keys = True
self.session.headers['X-ApiKeys'] = 'accessKey={}; secretKey={}'.format(self.access_key, self.secret_key)
else:
self.login()
self.scans = self.get_scans()
self.scan_ids = self.get_scan_ids()
@ -78,8 +89,10 @@ class NessusAPI(object):
if url == self.base + self.SESSION:
break
try:
self.login()
timeout += 1
if self.api_keys:
continue
self.login()
self.logger.info('Token refreshed')
except Exception as e:
self.logger.error('Could not refresh token\nReason: {}'.format(str(e)))
@ -114,7 +127,7 @@ class NessusAPI(object):
data = self.request(self.SCAN_ID.format(scan_id=scan_id), method='GET', json_output=True)
return data['history']
def download_scan(self, scan_id=None, history=None, export_format="", profile=""):
def download_scan(self, scan_id=None, history=None, export_format=""):
running = True
counter = 0
@ -127,6 +140,7 @@ class NessusAPI(object):
req = self.request(query, data=json.dumps(data), method='POST', json_output=True)
try:
file_id = req['file']
if self.profile == 'nessus':
token_id = req['token'] if 'token' in req else req['temp_token']
except Exception as e:
self.logger.error('{}'.format(str(e)))
@ -143,7 +157,7 @@ class NessusAPI(object):
if counter % 60 == 0:
self.logger.info("Completed: {}".format(counter))
self.logger.info("Done: {}".format(counter))
if profile == 'tenable':
if self.profile == 'tenable' or self.api_keys:
content = self.request(self.EXPORT_FILE_DOWNLOAD.format(scan_id=scan_id, file_id=file_id), method='GET', download=True)
else:
content = self.request(self.EXPORT_TOKEN_DOWNLOAD.format(token_id=token_id), method='GET', download=True)
@ -152,7 +166,7 @@ class NessusAPI(object):
def get_utc_from_local(self, date_time, local_tz=None, epoch=True):
date_time = datetime.fromtimestamp(date_time)
if local_tz is None:
local_tz = pytz.timezone('US/Central')
local_tz = pytz.timezone('UTC')
else:
local_tz = pytz.timezone(local_tz)
local_time = local_tz.normalize(local_tz.localize(date_time))

View File

@ -1,5 +1,6 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
__author__ = 'Austin Taylor'
import datetime as dt

View File

@ -1,5 +1,6 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
__author__ = 'Nathan Young'
import logging
@ -18,9 +19,9 @@ class qualysWhisperAPI(object):
self.logger = logging.getLogger('qualysWhisperAPI')
self.config = config
try:
self.qgc = qualysapi.connect(config, 'qualys_vuln')
self.qgc = qualysapi.connect(config_file=config, section='qualys_vuln')
# Fail early if we can't make a request or auth is incorrect
self.qgc.request('about.php')
# self.qgc.request('about.php')
self.logger.info('Connected to Qualys at {}'.format(self.qgc.server))
except Exception as e:
self.logger.error('Could not connect to Qualys: {}'.format(str(e)))

View File

@ -1,5 +1,8 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from six.moves import range
from functools import reduce
__author__ = 'Austin Taylor'
from lxml import objectify
@ -14,24 +17,16 @@ import os
import csv
import logging
import dateutil.parser as dp
csv.field_size_limit(sys.maxsize)
class qualysWhisperAPI(object):
COUNT_WEBAPP = '/count/was/webapp'
COUNT_WASSCAN = '/count/was/wasscan'
DELETE_REPORT = '/delete/was/report/{report_id}'
GET_WEBAPP_DETAILS = '/get/was/webapp/{was_id}'
QPS_REST_3 = '/qps/rest/3.0'
REPORT_DETAILS = '/get/was/report/{report_id}'
REPORT_STATUS = '/status/was/report/{report_id}'
REPORT_CREATE = '/create/was/report'
REPORT_DOWNLOAD = '/download/was/report/{report_id}'
SCAN_DETAILS = '/get/was/wasscan/{scan_id}'
SCAN_DOWNLOAD = '/download/was/wasscan/{scan_id}'
SEARCH_REPORTS = '/search/was/report'
SEARCH_WEB_APPS = '/search/was/webapp'
SEARCH_WAS_SCAN = '/search/was/wasscan'
VERSION = '/qps/rest/portal/version'
def __init__(self, config=None):
self.logger = logging.getLogger('qualysWhisperAPI')
@ -41,10 +36,6 @@ class qualysWhisperAPI(object):
self.logger.info('Connected to Qualys at {}'.format(self.qgc.server))
except Exception as e:
self.logger.error('Could not connect to Qualys: {}'.format(str(e)))
self.headers = {
#"content-type": "text/xml"}
"Accept" : "application/json",
"Content-Type": "application/json"}
self.config_parse = qcconf.QualysConnectConfig(config, 'qualys_web')
try:
self.template_id = self.config_parse.get_template_id()
@ -69,14 +60,8 @@ class qualysWhisperAPI(object):
def generate_scan_result_XML(self, limit=1000, offset=1, status='FINISHED'):
report_xml = E.ServiceRequest(
E.filters(
E.Criteria({'field': 'status', 'operator': 'EQUALS'}, status
),
),
E.preferences(
E.startFromOffset(str(offset)),
E.limitResults(str(limit))
),
E.filters(E.Criteria({'field': 'status', 'operator': 'EQUALS'}, status)),
E.preferences(E.startFromOffset(str(offset)), E.limitResults(str(limit))),
)
return report_xml
@ -115,8 +100,10 @@ class qualysWhisperAPI(object):
if i % limit == 0:
if (total - i) < limit:
qualys_api_limit = total - i
self.logger.info('Making a request with a limit of {} at offset {}'.format((str(qualys_api_limit)), str(i + 1)))
scan_info = self.get_scan_info(limit=qualys_api_limit, offset=i + 1, status=status)
self.logger.info('Making a request with a limit of {} at offset {}'
.format((str(qualys_api_limit)), str(i + 1)))
scan_info = self.get_scan_info(
limit=qualys_api_limit, offset=i + 1, status=status)
_records.append(scan_info)
self.logger.debug('Converting XML to DataFrame')
dataframes = [self.xml_parser(xml) for xml in _records]
@ -133,7 +120,8 @@ class qualysWhisperAPI(object):
return self.qgc.request(self.REPORT_STATUS.format(report_id=report_id))
def download_report(self, report_id):
return self.qgc.request(self.REPORT_DOWNLOAD.format(report_id=report_id))
return self.qgc.request(
self.REPORT_DOWNLOAD.format(report_id=report_id), http_method='get')
def generate_scan_report_XML(self, scan_id):
"""Generates a CSV report for an asset based on template defined in .ini file"""
@ -145,20 +133,8 @@ class qualysWhisperAPI(object):
E.format('CSV'),
#type is not needed, as the template already has it
E.type('WAS_SCAN_REPORT'),
E.template(
E.id(self.template_id)
),
E.config(
E.scanReport(
E.target(
E.scans(
E.WasScan(
E.id(scan_id)
)
),
),
),
)
E.template(E.id(self.template_id)),
E.config(E.scanReport(E.target(E.scans(E.WasScan(E.id(scan_id))))))
)
)
)
@ -175,95 +151,14 @@ class qualysWhisperAPI(object):
def delete_report(self, report_id):
return self.qgc.request(self.DELETE_REPORT.format(report_id=report_id))
class qualysReportFields:
CATEGORIES = ['VULNERABILITY',
'SENSITIVECONTENT',
'INFORMATION_GATHERED']
# URL Vulnerability Information
VULN_BLOCK = [
CATEGORIES[0],
'ID',
'QID',
'Url',
'Param',
'Function',
'Form Entry Point',
'Access Path',
'Authentication',
'Ajax Request',
'Ajax Request ID',
'Ignored',
'Ignore Reason',
'Ignore Date',
'Ignore User',
'Ignore Comments',
'First Time Detected',
'Last Time Detected',
'Last Time Tested',
'Times Detected',
'Payload #1',
'Request Method #1',
'Request URL #1',
'Request Headers #1',
'Response #1',
'Evidence #1',
]
INFO_HEADER = [
'Vulnerability Category',
'ID',
'QID',
'Response #1',
'Last Time Detected',
]
INFO_BLOCK = [
CATEGORIES[2],
'ID',
'QID',
'Results',
'Detection Date',
]
QID_HEADER = [
'QID',
'Id',
'Title',
'Category',
'Severity Level',
'Groups',
'OWASP',
'WASC',
'CWE',
'CVSS Base',
'CVSS Temporal',
'Description',
'Impact',
'Solution',
]
GROUP_HEADER = ['GROUP', 'Name', 'Category']
OWASP_HEADER = ['OWASP', 'Code', 'Name']
WASC_HEADER = ['WASC', 'Code', 'Name']
SCAN_META = ['Web Application Name', 'URL', 'Owner', 'Scope', 'Operating System']
CATEGORY_HEADER = ['Category', 'Severity', 'Level', 'Description']
class qualysUtils:
def __init__(self):
self.logger = logging.getLogger('qualysUtils')
def grab_section(
self,
report,
section,
end=[],
pop_last=False,
):
def grab_section(self, report, section, end=[], pop_last=False):
temp_list = []
max_col_count = 0
with open(report, 'rb') as csvfile:
with open(report, 'rt') as csvfile:
q_report = csv.reader(csvfile, delimiter=',', quotechar='"')
for line in q_report:
if set(line) == set(section):
@ -289,44 +184,53 @@ class qualysUtils:
return _data
class qualysScanReport:
# URL Vulnerability Information
WEB_SCAN_VULN_BLOCK = list(qualysReportFields.VULN_BLOCK)
WEB_SCAN_VULN_BLOCK.insert(WEB_SCAN_VULN_BLOCK.index('QID'), 'Detection ID')
CATEGORIES = ['VULNERABILITY', 'SENSITIVECONTENT', 'INFORMATION_GATHERED']
WEB_SCAN_VULN_HEADER = list(WEB_SCAN_VULN_BLOCK)
WEB_SCAN_VULN_HEADER[WEB_SCAN_VULN_BLOCK.index(qualysReportFields.CATEGORIES[0])] = \
'Vulnerability Category'
WEB_SCAN_BLOCK = [
"ID", "Detection ID", "QID", "Url", "Param/Cookie", "Function",
"Form Entry Point", "Access Path", "Authentication", "Ajax Request",
"Ajax Request ID", "Ignored", "Ignore Reason", "Ignore Date", "Ignore User",
"Ignore Comments", "Detection Date", "Payload #1", "Request Method #1",
"Request URL #1", "Request Headers #1", "Response #1", "Evidence #1",
"Unique ID", "Flags", "Protocol", "Virtual Host", "IP", "Port", "Result",
"Info#1", "CVSS V3 Base", "CVSS V3 Temporal", "CVSS V3 Attack Vector",
"Request Body #1"
]
WEB_SCAN_VULN_BLOCK = [CATEGORIES[0]] + WEB_SCAN_BLOCK
WEB_SCAN_SENSITIVE_BLOCK = [CATEGORIES[1]] + WEB_SCAN_BLOCK
WEB_SCAN_SENSITIVE_HEADER = list(WEB_SCAN_VULN_HEADER)
WEB_SCAN_SENSITIVE_HEADER.insert(WEB_SCAN_SENSITIVE_HEADER.index('Url'
), 'Content')
WEB_SCAN_HEADER = ["Vulnerability Category"] + WEB_SCAN_BLOCK
WEB_SCAN_HEADER[WEB_SCAN_HEADER.index("Detection Date")] = "Last Time Detected"
WEB_SCAN_SENSITIVE_BLOCK = list(WEB_SCAN_SENSITIVE_HEADER)
WEB_SCAN_SENSITIVE_BLOCK.insert(WEB_SCAN_SENSITIVE_BLOCK.index('QID'), 'Detection ID')
WEB_SCAN_SENSITIVE_BLOCK[WEB_SCAN_SENSITIVE_BLOCK.index('Vulnerability Category'
)] = qualysReportFields.CATEGORIES[1]
WEB_SCAN_INFO_HEADER = list(qualysReportFields.INFO_HEADER)
WEB_SCAN_INFO_HEADER.insert(WEB_SCAN_INFO_HEADER.index('QID'), 'Detection ID')
WEB_SCAN_INFO_BLOCK = [
"INFORMATION_GATHERED", "ID", "Detection ID", "QID", "Results", "Detection Date",
"Unique ID", "Flags", "Protocol", "Virtual Host", "IP", "Port", "Result",
"Info#1"
]
WEB_SCAN_INFO_BLOCK = list(qualysReportFields.INFO_BLOCK)
WEB_SCAN_INFO_BLOCK.insert(WEB_SCAN_INFO_BLOCK.index('QID'), 'Detection ID')
WEB_SCAN_INFO_HEADER = [
"Vulnerability Category", "ID", "Detection ID", "QID", "Results", "Last Time Detected",
"Unique ID", "Flags", "Protocol", "Virtual Host", "IP", "Port", "Result",
"Info#1"
]
QID_HEADER = list(qualysReportFields.QID_HEADER)
GROUP_HEADER = list(qualysReportFields.GROUP_HEADER)
OWASP_HEADER = list(qualysReportFields.OWASP_HEADER)
WASC_HEADER = list(qualysReportFields.WASC_HEADER)
SCAN_META = list(qualysReportFields.SCAN_META)
CATEGORY_HEADER = list(qualysReportFields.CATEGORY_HEADER)
QID_HEADER = [
"QID", "Id", "Title", "Category", "Severity Level", "Groups", "OWASP", "WASC",
"CWE", "CVSS Base", "CVSS Temporal", "Description", "Impact", "Solution",
"CVSS V3 Base", "CVSS V3 Temporal", "CVSS V3 Attack Vector"
]
GROUP_HEADER = ['GROUP', 'Name', 'Category']
OWASP_HEADER = ['OWASP', 'Code', 'Name']
WASC_HEADER = ['WASC', 'Code', 'Name']
SCAN_META = [
"Web Application Name", "URL", "Owner", "Scope", "ID", "Tags",
"Custom Attributes"
]
CATEGORY_HEADER = ['Category', 'Severity', 'Level', 'Description']
def __init__(
self,
config=None,
file_in=None,
file_stream=False,
delimiter=',',
quotechar='"',
):
def __init__(self, config=None, file_in=None,
file_stream=False, delimiter=',', quotechar='"'):
self.logger = logging.getLogger('qualysScanReport')
self.file_in = file_in
self.file_stream = file_stream
@ -337,71 +241,79 @@ class qualysScanReport:
try:
self.qw = qualysWhisperAPI(config=config)
except Exception as e:
self.logger.error('Could not load config! Please check settings. Error: {}'.format(str(e)))
self.logger.error(
'Could not load config! Please check settings. Error: {}'.format(
str(e)))
if file_stream:
self.open_file = file_in.splitlines()
elif file_in:
self.open_file = open(file_in, 'rb')
self.downloaded_file = None
def grab_sections(self, report):
all_dataframes = []
dict_tracker = {}
with open(report, 'rb') as csvfile:
dict_tracker['WEB_SCAN_VULN_BLOCK'] = pd.DataFrame(self.utils.grab_section(report,
return {
'WEB_SCAN_VULN_BLOCK': pd.DataFrame(
self.utils.grab_section(
report,
self.WEB_SCAN_VULN_BLOCK,
end=[
self.WEB_SCAN_SENSITIVE_BLOCK,
self.WEB_SCAN_INFO_BLOCK],
end=[self.WEB_SCAN_SENSITIVE_BLOCK, self.WEB_SCAN_INFO_BLOCK],
pop_last=True),
columns=self.WEB_SCAN_VULN_HEADER)
dict_tracker['WEB_SCAN_SENSITIVE_BLOCK'] = pd.DataFrame(self.utils.grab_section(report,
columns=self.WEB_SCAN_HEADER),
'WEB_SCAN_SENSITIVE_BLOCK': pd.DataFrame(
self.utils.grab_section(report,
self.WEB_SCAN_SENSITIVE_BLOCK,
end=[
self.WEB_SCAN_INFO_BLOCK,
self.WEB_SCAN_SENSITIVE_BLOCK],
end=[self.WEB_SCAN_INFO_BLOCK, self.WEB_SCAN_SENSITIVE_BLOCK],
pop_last=True),
columns=self.WEB_SCAN_SENSITIVE_HEADER)
dict_tracker['WEB_SCAN_INFO_BLOCK'] = pd.DataFrame(self.utils.grab_section(report,
columns=self.WEB_SCAN_HEADER),
'WEB_SCAN_INFO_BLOCK': pd.DataFrame(
self.utils.grab_section(
report,
self.WEB_SCAN_INFO_BLOCK,
end=[self.QID_HEADER],
pop_last=True),
columns=self.WEB_SCAN_INFO_HEADER)
dict_tracker['QID_HEADER'] = pd.DataFrame(self.utils.grab_section(report,
columns=self.WEB_SCAN_INFO_HEADER),
'QID_HEADER': pd.DataFrame(
self.utils.grab_section(
report,
self.QID_HEADER,
end=[self.GROUP_HEADER],
pop_last=True),
columns=self.QID_HEADER)
dict_tracker['GROUP_HEADER'] = pd.DataFrame(self.utils.grab_section(report,
columns=self.QID_HEADER),
'GROUP_HEADER': pd.DataFrame(
self.utils.grab_section(
report,
self.GROUP_HEADER,
end=[self.OWASP_HEADER],
pop_last=True),
columns=self.GROUP_HEADER)
dict_tracker['OWASP_HEADER'] = pd.DataFrame(self.utils.grab_section(report,
columns=self.GROUP_HEADER),
'OWASP_HEADER': pd.DataFrame(
self.utils.grab_section(
report,
self.OWASP_HEADER,
end=[self.WASC_HEADER],
pop_last=True),
columns=self.OWASP_HEADER)
dict_tracker['WASC_HEADER'] = pd.DataFrame(self.utils.grab_section(report,
self.WASC_HEADER, end=[['APPENDIX']],
columns=self.OWASP_HEADER),
'WASC_HEADER': pd.DataFrame(
self.utils.grab_section(
report,
self.WASC_HEADER,
end=[['APPENDIX']],
pop_last=True),
columns=self.WASC_HEADER)
dict_tracker['SCAN_META'] = pd.DataFrame(self.utils.grab_section(report,
columns=self.WASC_HEADER),
'SCAN_META': pd.DataFrame(
self.utils.grab_section(report,
self.SCAN_META,
end=[self.CATEGORY_HEADER],
pop_last=True),
columns=self.SCAN_META)
dict_tracker['CATEGORY_HEADER'] = pd.DataFrame(self.utils.grab_section(report,
columns=self.SCAN_META),
'CATEGORY_HEADER': pd.DataFrame(
self.utils.grab_section(report,
self.CATEGORY_HEADER),
columns=self.CATEGORY_HEADER)
all_dataframes.append(dict_tracker)
return all_dataframes
}
def data_normalizer(self, dataframes):
"""
@ -409,12 +321,21 @@ class qualysScanReport:
:param dataframes:
:return:
"""
df_dict = dataframes[0]
merged_df = pd.concat([df_dict['WEB_SCAN_VULN_BLOCK'], df_dict['WEB_SCAN_SENSITIVE_BLOCK'],
df_dict['WEB_SCAN_INFO_BLOCK']], axis=0,
ignore_index=False)
merged_df = pd.merge(merged_df, df_dict['QID_HEADER'], left_on='QID',
right_on='Id')
df_dict = dataframes
merged_df = pd.concat([
df_dict['WEB_SCAN_VULN_BLOCK'],
df_dict['WEB_SCAN_SENSITIVE_BLOCK'],
df_dict['WEB_SCAN_INFO_BLOCK']
], axis=0, ignore_index=False)
merged_df = pd.merge(
merged_df,
df_dict['QID_HEADER'].drop(
#these columns always seem to be the same as what we're merging into
['CVSS V3 Attack Vector', 'CVSS V3 Base', 'CVSS V3 Temporal'],
axis=1),
left_on='QID', right_on='Id'
)
if 'Content' not in merged_df:
merged_df['Content'] = ''
@ -431,8 +352,11 @@ class qualysScanReport:
merged_df = merged_df.assign(**df_dict['SCAN_META'].to_dict(orient='records')[0])
merged_df = pd.merge(merged_df, df_dict['CATEGORY_HEADER'], how='left', left_on=['Category', 'Severity Level'],
right_on=['Category', 'Severity'], suffixes=('Severity', 'CatSev'))
merged_df = pd.merge(
merged_df, df_dict['CATEGORY_HEADER'],
how='left', left_on=['Category', 'Severity Level'],
right_on=['Category', 'Severity'], suffixes=('Severity', 'CatSev')
)
merged_df = merged_df.replace('N/A', '').fillna('')

View File

@ -1,15 +1,18 @@
from __future__ import absolute_import
import json
import os
from datetime import datetime, date, timedelta
from datetime import datetime, date
from jira import JIRA
import requests
import logging
from bottle import template
import re
from six.moves import range
class JiraAPI(object):
def __init__(self, hostname=None, username=None, password=None, path="", debug=False, clean_obsolete=True, max_time_window=12, decommission_time_window=3):
def __init__(self, hostname=None, username=None, password=None, path="", debug=False, clean_obsolete=True,
max_time_window=12, decommission_time_window=3):
self.logger = logging.getLogger('JiraAPI')
if debug:
self.logger.setLevel(logging.DEBUG)
@ -29,26 +32,31 @@ class JiraAPI(object):
self.template_path = 'vulnwhisp/reporting/resources/ticket.tpl'
self.max_ips_ticket = 30
self.attachment_filename = "vulnerable_assets.txt"
self.max_time_tracking = max_time_window #in months
self.max_time_tracking = max_time_window # in months
if path:
self.download_tickets(path)
else:
self.logger.warn("No local path specified, skipping Jira ticket download.")
self.max_decommission_time = decommission_time_window #in months
self.max_decommission_time = decommission_time_window # in months
# [HIGIENE] close tickets older than 12 months as obsolete (max_time_window defined)
if clean_obsolete:
self.close_obsolete_tickets()
# deletes the tag "server_decommission" from those tickets closed <=3 months ago
self.decommission_cleanup()
self.jira_still_vulnerable_comment = '''This ticket has been reopened due to the vulnerability not having been fixed (if multiple assets are affected, all need to be fixed; if the server is down, lastest known vulnerability might be the one reported).
- In the case of the team accepting the risk and wanting to close the ticket, please add the label "*risk_accepted*" to the ticket before closing it.
- If server has been decommissioned, please add the label "*server_decommission*" to the ticket before closing it.
- If when checking the vulnerability it looks like a false positive, _+please elaborate in a comment+_ and add the label "*false_positive*" before closing it; we will review it and report it to the vendor.
self.jira_still_vulnerable_comment = '''This ticket has been reopened due to the vulnerability not having been \
fixed (if multiple assets are affected, all need to be fixed; if the server is down, lastest known \
vulnerability might be the one reported).
- In the case of the team accepting the risk and wanting to close the ticket, please add the label \
"*risk_accepted*" to the ticket before closing it.
- If server has been decommissioned, please add the label "*server_decommission*" to the ticket before closing \
it.
- If when checking the vulnerability it looks like a false positive, _+please elaborate in a comment+_ and add \
the label "*false_positive*" before closing it; we will review it and report it to the vendor.
If you have further doubts, please contact the Security Team.'''
def create_ticket(self, title, desc, project="IS", components=[], tags=[], attachment_contents = []):
def create_ticket(self, title, desc, project="IS", components=[], tags=[], attachment_contents=[]):
labels = ['vulnerability_management']
for tag in tags:
labels.append(str(tag))
@ -62,8 +70,8 @@ class JiraAPI(object):
for c in project_obj.components:
if component == c.name:
self.logger.debug("resolved component name {} to id {}".format(c.name, c.id))
components_ticket.append({ "id": c.id })
exists=True
components_ticket.append({"id": c.id})
exists = True
if not exists:
self.logger.error("Error creating Ticket: component {} not found".format(component))
return 0
@ -82,7 +90,7 @@ class JiraAPI(object):
return new_issue
#Basic JIRA Metrics
# Basic JIRA Metrics
def metrics_open_tickets(self, project=None):
jql = "labels= vulnerability_management and resolution = Unresolved"
if project:
@ -91,13 +99,15 @@ class JiraAPI(object):
return len(self.jira.search_issues(jql, maxResults=0))
def metrics_closed_tickets(self, project=None):
jql = "labels= vulnerability_management and NOT resolution = Unresolved AND created >=startOfMonth(-{})".format(self.max_time_tracking)
jql = "labels= vulnerability_management and NOT resolution = Unresolved AND created >=startOfMonth(-{})".format(
self.max_time_tracking)
if project:
jql += " and (project='{}')".format(project)
return len(self.jira.search_issues(jql, maxResults=0))
def sync(self, vulnerabilities, project, components=[]):
#JIRA structure of each vulnerability: [source, scan_name, title, diagnosis, consequence, solution, ips, risk, references]
# JIRA structure of each vulnerability: [source, scan_name, title, diagnosis, consequence, solution,
# ips, risk, references]
self.logger.info("JIRA Sync started")
for vuln in vulnerabilities:
@ -106,7 +116,8 @@ class JiraAPI(object):
if " " in vuln['scan_name']:
vuln['scan_name'] = "_".join(vuln['scan_name'].split(" "))
# we exclude from the vulnerabilities to report those assets that already exist with *risk_accepted*/*server_decommission*
# we exclude from the vulnerabilities to report those assets that already exist
# with *risk_accepted*/*server_decommission*
vuln = self.exclude_accepted_assets(vuln)
# make sure after exclusion of risk_accepted assets there are still assets
@ -131,13 +142,17 @@ class JiraAPI(object):
# create local text file with assets, attach it to ticket
if len(vuln['ips']) > self.max_ips_ticket:
attachment_contents = vuln['ips']
vuln['ips'] = ["Affected hosts ({assets}) exceed Jira's allowed character limit, added as an attachment.".format(assets = len(attachment_contents))]
vuln['ips'] = [
"Affected hosts ({assets}) exceed Jira's allowed character limit, added as an attachment.".format(
assets=len(attachment_contents))]
try:
tpl = template(self.template_path, vuln)
except Exception as e:
self.logger.error('Exception templating: {}'.format(str(e)))
return 0
self.create_ticket(title=vuln['title'], desc=tpl, project=project, components=components, tags=[vuln['source'], vuln['scan_name'], 'vulnerability', vuln['risk']], attachment_contents = attachment_contents)
self.create_ticket(title=vuln['title'], desc=tpl, project=project, components=components,
tags=[vuln['source'], vuln['scan_name'], 'vulnerability', vuln['risk']],
attachment_contents=attachment_contents)
else:
self.logger.info("Ignoring vulnerability as all assets are already reported in a risk_accepted ticket")
@ -153,34 +168,39 @@ class JiraAPI(object):
labels = [vuln['source'], vuln['scan_name'], 'vulnerability_management', 'vulnerability']
if not self.excluded_tickets:
jql = "{} AND labels in (risk_accepted,server_decommission, false_positive) AND NOT labels=advisory AND created >=startOfMonth(-{})".format(" AND ".join(["labels={}".format(label) for label in labels]), self.max_time_tracking)
jql = "{} AND labels in (risk_accepted,server_decommission, false_positive) AND NOT labels=advisory AND created >=startOfMonth(-{})".format(
" AND ".join(["labels={}".format(label) for label in labels]), self.max_time_tracking)
self.excluded_tickets = self.jira.search_issues(jql, maxResults=0)
title = vuln['title']
#WARNING: function IGNORES DUPLICATES, after finding a "duplicate" will just return it exists
#it wont iterate over the rest of tickets looking for other possible duplicates/similar issues
# WARNING: function IGNORES DUPLICATES, after finding a "duplicate" will just return it exists
# it wont iterate over the rest of tickets looking for other possible duplicates/similar issues
self.logger.info("Comparing vulnerability to risk_accepted tickets")
assets_to_exclude = []
tickets_excluded_assets = []
for index in range(len(self.excluded_tickets)):
checking_ticketid, checking_title, checking_assets = self.ticket_get_unique_fields(self.excluded_tickets[index])
checking_ticketid, checking_title, checking_assets = self.ticket_get_unique_fields(
self.excluded_tickets[index])
if title.encode('ascii') == checking_title.encode('ascii'):
if checking_assets:
#checking_assets is a list, we add to our full list for later delete all assets
assets_to_exclude+=checking_assets
# checking_assets is a list, we add to our full list for later delete all assets
assets_to_exclude += checking_assets
tickets_excluded_assets.append(checking_ticketid)
if assets_to_exclude:
assets_to_remove = []
self.logger.warn("Vulnerable Assets seen on an already existing risk_accepted Jira ticket: {}".format(', '.join(tickets_excluded_assets)))
self.logger.warn("Vulnerable Assets seen on an already existing risk_accepted Jira ticket: {}".format(
', '.join(tickets_excluded_assets)))
self.logger.debug("Original assets: {}".format(vuln['ips']))
#assets in vulnerability have the structure "ip - hostname - port", so we need to match by partial
# assets in vulnerability have the structure "ip - hostname - port", so we need to match by partial
for exclusion in assets_to_exclude:
# for efficiency, we walk the backwards the array of ips from the scanners, as we will be popping out the matches
# and we don't want it to affect the rest of the processing (otherwise, it would miss the asset right after the removed one)
for index in range(len(vuln['ips']))[::-1]:
if exclusion == vuln['ips'][index].split(" - ")[0]:
self.logger.debug("Deleting asset {} from vulnerability {}, seen in risk_accepted.".format(vuln['ips'][index], title))
self.logger.debug(
"Deleting asset {} from vulnerability {}, seen in risk_accepted.".format(vuln['ips'][index],
title))
vuln['ips'].pop(index)
self.logger.debug("Modified assets: {}".format(vuln['ips']))
@ -192,67 +212,84 @@ class JiraAPI(object):
Returns [exists (bool), is equal (bool), ticketid (str), assets (array)]
'''
# we need to return if the vulnerability has already been reported and the ID of the ticket for further processing
#function returns array [duplicated(bool), update(bool), ticketid, ticket_assets]
# function returns array [duplicated(bool), update(bool), ticketid, ticket_assets]
title = vuln['title']
labels = [vuln['source'], vuln['scan_name'], 'vulnerability_management', 'vulnerability']
#list(set()) to remove duplicates
# list(set()) to remove duplicates
assets = list(set(re.findall(r"\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\b", ",".join(vuln['ips']))))
if not self.all_tickets:
self.logger.info("Retrieving all JIRA tickets with the following tags {}".format(labels))
# we want to check all JIRA tickets, to include tickets moved to other queues
# will exclude tickets older than 12 months, old tickets will get closed for higiene and recreated if still vulnerable
jql = "{} AND NOT labels=advisory AND created >=startOfMonth(-{})".format(" AND ".join(["labels={}".format(label) for label in labels]), self.max_time_tracking)
jql = "{} AND NOT labels=advisory AND created >=startOfMonth(-{})".format(
" AND ".join(["labels={}".format(label) for label in labels]), self.max_time_tracking)
self.all_tickets = self.jira.search_issues(jql, maxResults=0)
#WARNING: function IGNORES DUPLICATES, after finding a "duplicate" will just return it exists
#it wont iterate over the rest of tickets looking for other possible duplicates/similar issues
# WARNING: function IGNORES DUPLICATES, after finding a "duplicate" will just return it exists
# it wont iterate over the rest of tickets looking for other possible duplicates/similar issues
self.logger.info("Comparing Vulnerabilities to created tickets")
for index in range(len(self.all_tickets)):
checking_ticketid, checking_title, checking_assets = self.ticket_get_unique_fields(self.all_tickets[index])
# added "not risk_accepted", as if it is risk_accepted, we will create a new ticket excluding the accepted assets
if title.encode('ascii') == checking_title.encode('ascii') and not self.is_risk_accepted(self.jira.issue(checking_ticketid)):
if title.encode('ascii') == checking_title.encode('ascii') and not self.is_risk_accepted(
self.jira.issue(checking_ticketid)):
difference = list(set(assets).symmetric_difference(checking_assets))
#to check intersection - set(assets) & set(checking_assets)
# to check intersection - set(assets) & set(checking_assets)
if difference:
self.logger.info("Asset mismatch, ticket to update. Ticket ID: {}".format(checking_ticketid))
return False, True, checking_ticketid, checking_assets #this will automatically validate
return False, True, checking_ticketid, checking_assets # this will automatically validate
else:
self.logger.info("Confirmed duplicated. TickedID: {}".format(checking_ticketid))
return True, False, checking_ticketid, [] #this will automatically validate
return True, False, checking_ticketid, [] # this will automatically validate
return False, False, "", []
def ticket_get_unique_fields(self, ticket):
title = ticket.raw.get('fields', {}).get('summary').encode("ascii").strip()
ticketid = ticket.key.encode("ascii")
assets = []
try:
affected_assets_section = ticket.raw.get('fields', {}).get('description').encode("ascii").split("{panel:title=Affected Assets}")[1].split("{panel}")[0]
assets = list(set(re.findall(r"\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\b", affected_assets_section)))
except Exception as e:
self.logger.error("Ticket IPs regex failed. Ticket ID: {}. Reason: {}".format(ticketid, e))
assets = []
try:
assets = self.get_assets_from_description(ticket)
if not assets:
#check if attachment, if so, get assets from attachment
affected_assets_section = self.check_ips_attachment(ticket)
if affected_assets_section:
assets = list(set(re.findall(r"\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\b", affected_assets_section)))
except Exception as e:
self.logger.error("Ticket IPs Attachment regex failed. Ticket ID: {}. Reason: {}".format(ticketid, e))
# check if attachment, if so, get assets from attachment
assets = self.get_assets_from_attachment(ticket)
return ticketid, title, assets
def check_ips_attachment(self, ticket):
affected_assets_section = []
def get_assets_from_description(self, ticket, _raw=False):
# Get the assets as a string "host - protocol/port - hostname" separated by "\n"
# structure the text to have the same structure as the assets from the attachment
affected_assets = ""
try:
affected_assets = \
ticket.raw.get('fields', {}).get('description').encode("ascii").split("{panel:title=Affected Assets}")[
1].split("{panel}")[0].replace('\n', '').replace(' * ', '\n').replace('\n', '', 1)
except Exception as e:
self.logger.error(
"Unable to process the Ticket's 'Affected Assets'. Ticket ID: {}. Reason: {}".format(ticket, e))
if affected_assets:
if _raw:
# from line 406 check if the text in the panel corresponds to having added an attachment
if "added as an attachment" in affected_assets:
return False
return affected_assets
try:
# if _raw is not true, we return only the IPs of the affected assets
return list(set(re.findall(r"\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\b", affected_assets)))
except Exception as e:
self.logger.error("Ticket IPs regex failed. Ticket ID: {}. Reason: {}".format(ticket, e))
return False
def get_assets_from_attachment(self, ticket, _raw=False):
# Get the assets as a string "host - protocol/port - hostname" separated by "\n"
affected_assets = []
try:
fields = self.jira.issue(ticket.key).raw.get('fields', {})
attachments = fields.get('attachment', {})
affected_assets_section = ""
#we will make sure we get the latest version of the file
affected_assets = ""
# we will make sure we get the latest version of the file
latest = ''
attachment_id = ''
if attachments:
@ -265,12 +302,45 @@ class JiraAPI(object):
if latest < item.get('created'):
latest = item.get('created')
attachment_id = item.get('id')
affected_assets_section = self.jira.attachment(attachment_id).get()
affected_assets = self.jira.attachment(attachment_id).get()
except Exception as e:
self.logger.error("Failed to get assets from ticket attachment. Ticket ID: {}. Reason: {}".format(ticket, e))
self.logger.error(
"Failed to get assets from ticket attachment. Ticket ID: {}. Reason: {}".format(ticket, e))
return affected_assets_section
if affected_assets:
if _raw:
return affected_assets
try:
# if _raw is not true, we return only the IPs of the affected assets
affected_assets = list(set(re.findall(r"\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\b", affected_assets)))
return affected_assets
except Exception as e:
self.logger.error("Ticket IPs Attachment regex failed. Ticket ID: {}. Reason: {}".format(ticket, e))
return False
def parse_asset_to_json(self, asset):
hostname, protocol, port = "", "", ""
asset_info = asset.split(" - ")
ip = asset_info[0]
proto_port = asset_info[1]
# in case there is some case where hostname is not reported at all
if len(asset_info) == 3:
hostname = asset_info[2]
if proto_port != "N/A/N/A":
protocol, port = proto_port.split("/")
port = int(float(port))
asset_dict = {
"host": ip,
"protocol": protocol,
"port": port,
"hostname": hostname
}
return asset_dict
def clean_old_attachments(self, ticket):
fields = ticket.raw.get('fields')
@ -282,15 +352,15 @@ class JiraAPI(object):
def add_content_as_attachment(self, issue, contents):
try:
#Create the file locally with the data
# Create the file locally with the data
attachment_file = open(self.attachment_filename, "w")
attachment_file.write("\n".join(contents))
attachment_file.close()
#Push the created file to the ticket
# Push the created file to the ticket
attachment_file = open(self.attachment_filename, "rb")
self.jira.add_attachment(issue, attachment_file, self.attachment_filename)
attachment_file.close()
#remove the temp file
# remove the temp file
os.remove(self.attachment_filename)
self.logger.info("Added attachment successfully.")
except:
@ -300,21 +370,23 @@ class JiraAPI(object):
return True
def get_ticket_reported_assets(self, ticket):
#[METRICS] return a list with all the affected assets for that vulnerability (including already resolved ones)
return list(set(re.findall(r"\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\b",str(self.jira.issue(ticket).raw))))
# [METRICS] return a list with all the affected assets for that vulnerability (including already resolved ones)
return list(set(re.findall(r"\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\b", str(self.jira.issue(ticket).raw))))
def get_resolution_time(self, ticket):
#get time a ticket took to be resolved
# get time a ticket took to be resolved
ticket_obj = self.jira.issue(ticket)
if self.is_ticket_resolved(ticket_obj):
ticket_data = ticket_obj.raw.get('fields')
#dates follow format '2018-11-06T10:36:13.849+0100'
created = [int(x) for x in ticket_data['created'].split('.')[0].replace('T', '-').replace(':','-').split('-')]
resolved =[int(x) for x in ticket_data['resolutiondate'].split('.')[0].replace('T', '-').replace(':','-').split('-')]
# dates follow format '2018-11-06T10:36:13.849+0100'
created = [int(x) for x in
ticket_data['created'].split('.')[0].replace('T', '-').replace(':', '-').split('-')]
resolved = [int(x) for x in
ticket_data['resolutiondate'].split('.')[0].replace('T', '-').replace(':', '-').split('-')]
start = datetime(created[0],created[1],created[2],created[3],created[4],created[5])
end = datetime(resolved[0],resolved[1],resolved[2],resolved[3],resolved[4],resolved[5])
return (end-start).days
start = datetime(created[0], created[1], created[2], created[3], created[4], created[5])
end = datetime(resolved[0], resolved[1], resolved[2], resolved[3], resolved[4], resolved[5])
return (end - start).days
else:
self.logger.error("Ticket {ticket} is not resolved, can't calculate resolution time".format(ticket=ticket))
@ -324,11 +396,11 @@ class JiraAPI(object):
# correct description will always be in the vulnerability to report, only needed to update description to new one
self.logger.info("Ticket {} exists, UPDATE requested".format(ticketid))
#for now, if a vulnerability has been accepted ('accepted_risk'), ticket is completely ignored and not updated (no new assets)
# for now, if a vulnerability has been accepted ('accepted_risk'), ticket is completely ignored and not updated (no new assets)
#TODO when vulnerability accepted, create a new ticket with only the non-accepted vulnerable assets
#this would require go through the downloaded tickets, check duplicates/accepted ones, and if so,
#check on their assets to exclude them from the new ticket
# TODO when vulnerability accepted, create a new ticket with only the non-accepted vulnerable assets
# this would require go through the downloaded tickets, check duplicates/accepted ones, and if so,
# check on their assets to exclude them from the new ticket
risk_accepted = False
ticket_obj = self.jira.issue(ticketid)
if self.is_ticket_resolved(ticket_obj):
@ -336,7 +408,7 @@ class JiraAPI(object):
return 0
self.reopen_ticket(ticketid=ticketid, comment=self.jira_still_vulnerable_comment)
#First will do the comparison of assets
# First will do the comparison of assets
ticket_obj.update()
assets = list(set(re.findall(r"\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\b", ",".join(vuln['ips']))))
difference = list(set(assets).symmetric_difference(ticket_assets))
@ -344,7 +416,7 @@ class JiraAPI(object):
comment = ''
added = ''
removed = ''
#put a comment with the assets that have been added/removed
# put a comment with the assets that have been added/removed
for asset in difference:
if asset in assets:
if not added:
@ -352,36 +424,39 @@ class JiraAPI(object):
added += '* {}\n'.format(asset)
elif asset in ticket_assets:
if not removed:
removed= '\nThe following assets *have been resolved*:\n'
removed = '\nThe following assets *have been resolved*:\n'
removed += '* {}\n'.format(asset)
comment = added + removed
#then will check if assets are too many that need to be added as an attachment
# then will check if assets are too many that need to be added as an attachment
attachment_contents = []
if len(vuln['ips']) > self.max_ips_ticket:
attachment_contents = vuln['ips']
vuln['ips'] = ["Affected hosts ({assets}) exceed Jira's allowed character limit, added as an attachment.".format(assets = len(attachment_contents))]
vuln['ips'] = [
"Affected hosts ({assets}) exceed Jira's allowed character limit, added as an attachment.".format(
assets=len(attachment_contents))]
#fill the ticket description template
# fill the ticket description template
try:
tpl = template(self.template_path, vuln)
except Exception as e:
self.logger.error('Exception updating assets: {}'.format(str(e)))
return 0
#proceed checking if it requires adding as an attachment
# proceed checking if it requires adding as an attachment
try:
#update attachment with hosts and delete the old versions
# update attachment with hosts and delete the old versions
if attachment_contents:
self.clean_old_attachments(ticket_obj)
self.add_content_as_attachment(ticket_obj, attachment_contents)
ticket_obj.update(description=tpl, comment=comment, fields={"labels":ticket_obj.fields.labels})
ticket_obj.update(description=tpl, comment=comment, fields={"labels": ticket_obj.fields.labels})
self.logger.info("Ticket {} updated successfully".format(ticketid))
self.add_label(ticketid, 'updated')
except Exception as e:
self.logger.error("Error while trying up update ticket {ticketid}.\nReason: {e}".format(ticketid = ticketid, e=e))
self.logger.error(
"Error while trying up update ticket {ticketid}.\nReason: {e}".format(ticketid=ticketid, e=e))
return 0
def add_label(self, ticketid, label):
@ -391,10 +466,11 @@ class JiraAPI(object):
ticket_obj.fields.labels.append(label)
try:
ticket_obj.update(fields={"labels":ticket_obj.fields.labels})
ticket_obj.update(fields={"labels": ticket_obj.fields.labels})
self.logger.info("Added label {label} to ticket {ticket}".format(label=label, ticket=ticketid))
except:
self.logger.error("Error while trying to add label {label} to ticket {ticket}".format(label=label, ticket=ticketid))
except Exception as e:
self.logger.error(
"Error while trying to add label {label} to ticket {ticket}".format(label=label, ticket=ticketid))
return 0
@ -405,10 +481,11 @@ class JiraAPI(object):
ticket_obj.fields.labels.remove(label)
try:
ticket_obj.update(fields={"labels":ticket_obj.fields.labels})
ticket_obj.update(fields={"labels": ticket_obj.fields.labels})
self.logger.info("Removed label {label} from ticket {ticket}".format(label=label, ticket=ticketid))
except:
self.logger.error("Error while trying to remove label {label} to ticket {ticket}".format(label=label, ticket=ticketid))
except Exception as e:
self.logger.error("Error while trying to remove label {label} to ticket {ticket}".format(label=label,
ticket=ticketid))
else:
self.logger.error("Error: label {label} not in ticket {ticket}".format(label=label, ticket=ticketid))
@ -434,7 +511,6 @@ class JiraAPI(object):
self.close_ticket(ticket, self.JIRA_RESOLUTION_FIXED, comment)
return 0
def is_ticket_reopenable(self, ticket_obj):
transitions = self.jira.transitions(ticket_obj)
for transition in transitions:
@ -453,7 +529,7 @@ class JiraAPI(object):
return False
def is_ticket_resolved(self, ticket_obj):
#Checks if a ticket is resolved or not
# Checks if a ticket is resolved or not
if ticket_obj is not None:
if ticket_obj.raw['fields'].get('resolution') is not None:
if ticket_obj.raw['fields'].get('resolution').get('name') != 'Unresolved':
@ -463,7 +539,6 @@ class JiraAPI(object):
self.logger.debug("Checked ticket {} is already open".format(ticket_obj))
return False
def is_risk_accepted(self, ticket_obj):
if ticket_obj is not None:
if ticket_obj.raw['fields'].get('labels') is not None:
@ -489,7 +564,8 @@ class JiraAPI(object):
if (not self.is_risk_accepted(ticket_obj) or ignore_labels):
try:
if self.is_ticket_reopenable(ticket_obj):
error = self.jira.transition_issue(issue=ticketid, transition=self.JIRA_REOPEN_ISSUE, comment = comment)
error = self.jira.transition_issue(issue=ticketid, transition=self.JIRA_REOPEN_ISSUE,
comment=comment)
self.logger.info("Ticket {} reopened successfully".format(ticketid))
if not ignore_labels:
self.add_label(ticketid, 'reopened')
@ -507,9 +583,10 @@ class JiraAPI(object):
if not self.is_ticket_resolved(ticket_obj):
try:
if self.is_ticket_closeable(ticket_obj):
#need to add the label before closing the ticket
# need to add the label before closing the ticket
self.add_label(ticketid, 'closed')
error = self.jira.transition_issue(issue=ticketid, transition=self.JIRA_CLOSE_ISSUE, comment = comment, resolution = {"name": resolution })
error = self.jira.transition_issue(issue=ticketid, transition=self.JIRA_CLOSE_ISSUE,
comment=comment, resolution={"name": resolution})
self.logger.info("Ticket {} closed successfully".format(ticketid))
return 1
except Exception as e:
@ -522,7 +599,8 @@ class JiraAPI(object):
def close_obsolete_tickets(self):
# Close tickets older than 12 months, vulnerabilities not solved will get created a new ticket
self.logger.info("Closing obsolete tickets older than {} months".format(self.max_time_tracking))
jql = "labels=vulnerability_management AND created <startOfMonth(-{}) and resolution=Unresolved".format(self.max_time_tracking)
jql = "labels=vulnerability_management AND NOT labels=advisory AND created <startOfMonth(-{}) and resolution=Unresolved".format(
self.max_time_tracking)
tickets_to_close = self.jira.search_issues(jql, maxResults=0)
comment = '''This ticket is being closed for hygiene, as it is more than {} months old.
@ -545,7 +623,7 @@ class JiraAPI(object):
'''
saves all tickets locally, local snapshot of vulnerability_management ticktes
'''
#check if file already exists
# check if file already exists
check_date = str(date.today())
fname = '{}jira_{}.json'.format(path, check_date)
if os.path.isfile(fname):
@ -553,11 +631,38 @@ class JiraAPI(object):
return True
try:
self.logger.info("Saving locally tickets from the last {} months".format(self.max_time_tracking))
jql = "labels=vulnerability_management AND created >=startOfMonth(-{})".format(self.max_time_tracking)
jql = "labels=vulnerability_management AND NOT labels=advisory AND created >=startOfMonth(-{})".format(
self.max_time_tracking)
tickets_data = self.jira.search_issues(jql, maxResults=0)
#end of line needed, as writelines() doesn't add it automatically, otherwise one big line
to_save = [json.dumps(ticket.raw.get('fields'))+"\n" for ticket in tickets_data]
# TODO process tickets, creating a new field called "_metadata" with all the affected assets well structured
# for future processing in ELK/Splunk; this includes downloading attachments with assets and processing them
processed_tickets = []
for ticket in tickets_data:
assets = self.get_assets_from_description(ticket, _raw=True)
if not assets:
# check if attachment, if so, get assets from attachment
assets = self.get_assets_from_attachment(ticket, _raw=True)
# process the affected assets to save them as json structure on a new field from the JSON
_metadata = {"affected_hosts": []}
if assets:
if "\n" in assets:
for asset in assets.split("\n"):
assets_json = self.parse_asset_to_json(asset)
_metadata["affected_hosts"].append(assets_json)
else:
assets_json = self.parse_asset_to_json(assets)
_metadata["affected_hosts"].append(assets_json)
temp_ticket = ticket.raw.get('fields')
temp_ticket['_metadata'] = _metadata
processed_tickets.append(temp_ticket)
# end of line needed, as writelines() doesn't add it automatically, otherwise one big line
to_save = [json.dumps(ticket.raw.get('fields')) + "\n" for ticket in tickets_data]
with open(fname, 'w') as outfile:
outfile.writelines(to_save)
self.logger.info("Tickets saved succesfully.")
@ -575,17 +680,20 @@ class JiraAPI(object):
closed already for more than x months (default is 3 months) in order to clean solved issues
for statistics purposes
'''
self.logger.info("Deleting 'server_decommission' tag from tickets closed more than {} months ago".format(self.max_decommission_time))
self.logger.info("Deleting 'server_decommission' tag from tickets closed more than {} months ago".format(
self.max_decommission_time))
jql = "labels=vulnerability_management AND labels=server_decommission and resolutiondate <=startOfMonth(-{})".format(self.max_decommission_time)
jql = "labels=vulnerability_management AND labels=server_decommission and resolutiondate <=startOfMonth(-{})".format(
self.max_decommission_time)
decommissioned_tickets = self.jira.search_issues(jql, maxResults=0)
comment = '''This ticket is having deleted the *server_decommission* tag, as it is more than {} months old and is expected to already have been decommissioned.
If that is not the case and the vulnerability still exists, the vulnerability will be opened again.'''.format(self.max_decommission_time)
If that is not the case and the vulnerability still exists, the vulnerability will be opened again.'''.format(
self.max_decommission_time)
for ticket in decommissioned_tickets:
#we open first the ticket, as we want to make sure the process is not blocked due to
#an unexisting jira workflow or unallowed edit from closed tickets
# we open first the ticket, as we want to make sure the process is not blocked due to
# an unexisting jira workflow or unallowed edit from closed tickets
self.reopen_ticket(ticketid=ticket, ignore_labels=True)
self.remove_label(ticket, 'server_decommission')
self.close_ticket(ticket, self.JIRA_RESOLUTION_FIXED, comment)

View File

@ -1,3 +1,4 @@
from __future__ import absolute_import
import os
import logging
import httpretty
@ -20,10 +21,12 @@ class mockAPI(object):
def get_directories(self, path):
dir, subdirs, files = next(os.walk(path))
self.logger.debug('Subdirectories found: {}'.format(subdirs))
return subdirs
def get_files(self, path):
dir, subdirs, files = next(os.walk(path))
self.logger.debug('Files found: {}'.format(files))
return files
def qualys_vuln_callback(self, request, uri, response_headers):

View File

@ -1,13 +1,17 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from six.moves import range
from functools import reduce
__author__ = 'Austin Taylor'
from base.config import vwConfig
from frameworks.nessus import NessusAPI
from frameworks.qualys_web import qualysScanReport
from frameworks.qualys_vuln import qualysVulnScan
from frameworks.openvas import OpenVAS_API
from reporting.jira_api import JiraAPI
from .base.config import vwConfig
from .frameworks.nessus import NessusAPI
from .frameworks.qualys_web import qualysScanReport
from .frameworks.qualys_vuln import qualysVulnScan
from .frameworks.openvas import OpenVAS_API
from .reporting.jira_api import JiraAPI
import pandas as pd
from lxml import objectify
import sys
@ -21,7 +25,6 @@ import socket
class vulnWhispererBase(object):
CONFIG_SECTION = None
def __init__(
@ -55,14 +58,16 @@ class vulnWhispererBase(object):
except:
self.enabled = False
self.hostname = self.config.get(self.CONFIG_SECTION, 'hostname')
try:
self.username = self.config.get(self.CONFIG_SECTION, 'username')
self.password = self.config.get(self.CONFIG_SECTION, 'password')
except:
self.username = None
self.password = None
self.write_path = self.config.get(self.CONFIG_SECTION, 'write_path')
self.db_path = self.config.get(self.CONFIG_SECTION, 'db_path')
self.verbose = self.config.getbool(self.CONFIG_SECTION, 'verbose')
if self.db_name is not None:
if self.db_path:
self.database = os.path.join(self.db_path,
@ -84,7 +89,8 @@ class vulnWhispererBase(object):
self.cur = self.conn.cursor()
self.logger.info('Connected to database at {loc}'.format(loc=self.database))
except Exception as e:
self.logger.error('Could not connect to database at {loc}\nReason: {e} - Please ensure the path exist'.format(
self.logger.error(
'Could not connect to database at {loc}\nReason: {e} - Please ensure the path exist'.format(
e=e,
loc=self.database))
else:
@ -143,11 +149,11 @@ class vulnWhispererBase(object):
return data
def record_insert(self, record):
#for backwards compatibility with older versions without "reported" field
# for backwards compatibility with older versions without "reported" field
try:
#-1 to get the latest column, 1 to get the column name (old version would be "processed", new "reported")
#TODO delete backward compatibility check after some versions
# -1 to get the latest column, 1 to get the column name (old version would be "processed", new "reported")
# TODO delete backward compatibility check after some versions
last_column_table = self.cur.execute('PRAGMA table_info(scan_history)').fetchall()[-1][1]
if last_column_table == self.table_columns[-1]:
self.cur.execute('insert into scan_history({table_columns}) values (?,?,?,?,?,?,?,?,?,?)'.format(
@ -162,8 +168,8 @@ class vulnWhispererBase(object):
sys.exit(1)
def set_latest_scan_reported(self, filename):
#the reason to use the filename instead of the source/scan_name is because the filename already belongs to
#that latest scan, and we maintain integrity making sure that it is the exact scan we checked
# the reason to use the filename instead of the source/scan_name is because the filename already belongs to
# that latest scan, and we maintain integrity making sure that it is the exact scan we checked
try:
self.cur.execute('UPDATE scan_history SET reported = 1 WHERE filename="{}";'.format(filename))
self.conn.commit()
@ -181,7 +187,8 @@ class vulnWhispererBase(object):
"""
try:
self.conn.text_factory = str
self.cur.execute('SELECT uuid FROM scan_history where source = "{config_section}"'.format(config_section=self.CONFIG_SECTION))
self.cur.execute('SELECT uuid FROM scan_history where source = "{config_section}"'.format(
config_section=self.CONFIG_SECTION))
results = frozenset([r[0] for r in self.cur.fetchall()])
except:
results = []
@ -200,24 +207,31 @@ class vulnWhispererBase(object):
def get_latest_results(self, source, scan_name):
processed = 0
results = []
reported = ""
try:
self.conn.text_factory = str
self.cur.execute('SELECT filename FROM scan_history WHERE source="{}" AND scan_name="{}" ORDER BY last_modified DESC LIMIT 1;'.format(source, scan_name))
#should always return just one filename
self.cur.execute(
'SELECT filename FROM scan_history WHERE source="{}" AND scan_name="{}" ORDER BY last_modified DESC LIMIT 1;'.format(
source, scan_name))
# should always return just one filename
results = [r[0] for r in self.cur.fetchall()][0]
#-1 to get the latest column, 1 to get the column name (old version would be "processed", new "reported")
#TODO delete backward compatibility check after some versions
# -1 to get the latest column, 1 to get the column name (old version would be "processed", new "reported")
# TODO delete backward compatibility check after some versions
last_column_table = self.cur.execute('PRAGMA table_info(scan_history)').fetchall()[-1][1]
if results and last_column_table == self.table_columns[-1]:
reported = self.cur.execute('SELECT reported FROM scan_history WHERE filename="{}"'.format(results)).fetchall()
reported = self.cur.execute(
'SELECT reported FROM scan_history WHERE filename="{}"'.format(results)).fetchall()
reported = reported[0][0]
if reported:
self.logger.debug("Last downloaded scan from source {source} scan_name {scan_name} has already been reported".format(source=source, scan_name=scan_name))
self.logger.debug(
"Last downloaded scan from source {source} scan_name {scan_name} has already been reported".format(
source=source, scan_name=scan_name))
except Exception as e:
self.logger.error("Error when getting latest results from {}.{} : {}".format(source, scan_name, e))
return results, reported
def get_scan_profiles(self):
@ -242,14 +256,14 @@ class vulnWhispererBase(object):
self.cur.execute("SELECT DISTINCT scan_name FROM scan_history WHERE source='{}';".format(source))
scan_names = [r[0] for r in self.cur.fetchall()]
for scan in scan_names:
results.append('{}.{}'.format(source,scan))
results.append('{}.{}'.format(source, scan))
except:
scan_names = []
return results
class vulnWhispererNessus(vulnWhispererBase):
class vulnWhispererNessus(vulnWhispererBase):
CONFIG_SECTION = None
def __init__(
@ -263,7 +277,7 @@ class vulnWhispererNessus(vulnWhispererBase):
password=None,
profile='nessus'
):
self.CONFIG_SECTION=profile
self.CONFIG_SECTION = profile
super(vulnWhispererNessus, self).__init__(config=config)
@ -274,6 +288,8 @@ class vulnWhispererNessus(vulnWhispererBase):
self.develop = True
self.purge = purge
self.access_key = None
self.secret_key = None
if config is not None:
try:
@ -283,26 +299,36 @@ class vulnWhispererNessus(vulnWhispererBase):
'trash')
try:
self.logger.info('Attempting to connect to nessus...')
self.access_key = self.config.get(self.CONFIG_SECTION, 'access_key')
self.secret_key = self.config.get(self.CONFIG_SECTION, 'secret_key')
except:
pass
try:
self.logger.info('Attempting to connect to {}...'.format(self.CONFIG_SECTION))
self.nessus = \
NessusAPI(hostname=self.hostname,
port=self.nessus_port,
username=self.username,
password=self.password)
password=self.password,
profile=self.CONFIG_SECTION,
access_key=self.access_key,
secret_key=self.secret_key
)
self.nessus_connect = True
self.logger.info('Connected to nessus on {host}:{port}'.format(host=self.hostname,
self.logger.info('Connected to {} on {host}:{port}'.format(self.CONFIG_SECTION, host=self.hostname,
port=str(self.nessus_port)))
except Exception as e:
self.logger.error('Exception: {}'.format(str(e)))
raise Exception(
'Could not connect to nessus -- Please verify your settings in {config} are correct and try again.\nReason: {e}'.format(
'Could not connect to {} -- Please verify your settings in {config} are correct and try again.\nReason: {e}'.format(
self.CONFIG_SECTION,
config=self.config.config_in,
e=e))
except Exception as e:
self.logger.error('Could not properly load your config!\nReason: {e}'.format(e=e))
sys.exit(1)
return False
# sys.exit(1)
def scan_count(self, scans, completed=False):
"""
@ -345,7 +371,6 @@ class vulnWhispererNessus(vulnWhispererBase):
scan_records = [s for s in scan_records if s['status'] == 'completed']
return scan_records
def whisper_nessus(self):
if self.nessus_connect:
scan_data = self.nessus.scans
@ -400,7 +425,8 @@ class vulnWhispererNessus(vulnWhispererBase):
s['uuid'],
)
# TODO Create directory sync function which scans the directory for files that exist already and populates the database
# TODO Create directory sync function which scans the directory for files that exist already and
# populates the database
folder_id = s['folder_id']
if self.CONFIG_SECTION == 'tenable':
@ -430,24 +456,29 @@ class vulnWhispererNessus(vulnWhispererBase):
0,
)
self.record_insert(record_meta)
self.logger.info('File {filename} already exist! Updating database'.format(filename=relative_path_name))
self.logger.info(
'File {filename} already exist! Updating database'.format(filename=relative_path_name))
else:
try:
file_req = \
self.nessus.download_scan(scan_id=scan_id, history=history_id,
export_format='csv', profile=self.CONFIG_SECTION)
export_format='csv')
except Exception as e:
self.logger.error('Could not download {} scan {}: {}'.format(self.CONFIG_SECTION, scan_id, str(e)))
self.logger.error(
'Could not download {} scan {}: {}'.format(self.CONFIG_SECTION, scan_id, str(e)))
self.exit_code += 1
continue
clean_csv = \
pd.read_csv(io.StringIO(file_req.decode('utf-8')))
if len(clean_csv) > 2:
self.logger.info('Processing {}/{} for scan: {}'.format(scan_count, len(scan_list), scan_name.encode('utf8')))
columns_to_cleanse = ['CVSS','CVE','Description','Synopsis','Solution','See Also','Plugin Output']
self.logger.info('Processing {}/{} for scan: {}'.format(scan_count, len(scan_list),
scan_name.encode('utf8')))
columns_to_cleanse = ['CVSS', 'CVE', 'Description', 'Synopsis', 'Solution', 'See Also',
'Plugin Output', 'MAC Address']
for col in columns_to_cleanse:
if col in clean_csv:
clean_csv[col] = clean_csv[col].astype(str).apply(self.cleanser)
clean_csv.to_csv(relative_path_name, index=False)
@ -465,7 +496,8 @@ class vulnWhispererNessus(vulnWhispererBase):
)
self.record_insert(record_meta)
self.logger.info('{filename} records written to {path} '.format(filename=clean_csv.shape[0],
path=file_name.encode('utf8')))
path=file_name.encode(
'utf8')))
else:
record_meta = (
scan_name,
@ -480,27 +512,32 @@ class vulnWhispererNessus(vulnWhispererBase):
0,
)
self.record_insert(record_meta)
self.logger.warn('{} has no host available... Updating database and skipping!'.format(file_name))
self.logger.warn(
'{} has no host available... Updating database and skipping!'.format(file_name))
self.conn.close()
self.logger.info('Scan aggregation complete! Connection to database closed.')
else:
self.logger.error('Failed to use scanner at {host}:{port}'.format(host=self.hostname, port=self.nessus_port))
self.logger.error(
'Failed to use scanner at {host}:{port}'.format(host=self.hostname, port=self.nessus_port))
self.exit_code += 1
return self.exit_code
class vulnWhispererQualys(vulnWhispererBase):
CONFIG_SECTION = 'qualys_web'
COLUMN_MAPPING = {'Access Path': 'access_path',
'Ajax Request': 'ajax_request',
'Ajax Request ID': 'ajax_request_id',
'Authentication': 'authentication',
'CVSS Base': 'cvss',
'CVSS V3 Attack Vector': 'cvss_v3_attack_vector',
'CVSS V3 Base': 'cvss_v3_base',
'CVSS V3 Temporal': 'cvss_v3_temporal',
'CVSS Temporal': 'cvss_temporal',
'CWE': 'cwe',
'Category': 'category',
'Content': 'content',
'Custom Attributes': 'custom_attributes',
'DescriptionSeverity': 'severity_description',
'DescriptionCatSev': 'category_description',
'Detection ID': 'detection_id',
@ -516,15 +553,19 @@ class vulnWhispererQualys(vulnWhispererBase):
'Ignore User': 'ignore_user',
'Ignored': 'ignored',
'Impact': 'impact',
'Info#1': 'info_1',
'Last Time Detected': 'last_time_detected',
'Last Time Tested': 'last_time_tested',
'Level': 'level',
'OWASP': 'owasp',
'Operating System': 'operating_system',
'Owner': 'owner',
'Param': 'param',
'Param/Cookie': 'param',
'Payload #1': 'payload_1',
'Port': 'port',
'Protocol': 'protocol',
'QID': 'plugin_id',
'Request Body #1': 'request_body_1',
'Request Headers #1': 'request_headers_1',
'Request Method #1': 'request_method_1',
'Request URL #1': 'request_url_1',
@ -533,13 +574,17 @@ class vulnWhispererQualys(vulnWhispererBase):
'Severity': 'risk',
'Severity Level': 'security_level',
'Solution': 'solution',
'Tags': 'tags',
'Times Detected': 'times_detected',
'Title': 'plugin_name',
'URL': 'url',
'Unique ID': 'unique_id',
'Url': 'uri',
'Vulnerability Category': 'vulnerability_category',
'Virtual Host': 'virutal_host',
'WASC': 'wasc',
'Web Application Name': 'web_application_name'}
def __init__(
self,
config=None,
@ -555,8 +600,11 @@ class vulnWhispererQualys(vulnWhispererBase):
self.logger = logging.getLogger('vulnWhispererQualys')
if debug:
self.logger.setLevel(logging.DEBUG)
try:
self.qualys_scan = qualysScanReport(config=config)
except Exception as e:
self.logger.error("Unable to establish connection with Qualys scanner. Reason: {}".format(e))
return False
self.latest_scans = self.qualys_scan.qw.get_all_scans()
self.directory_check()
self.scans_to_process = None
@ -584,7 +632,7 @@ class vulnWhispererQualys(vulnWhispererBase):
relative_path_name = self.path_check(report_name).encode('utf8')
if os.path.isfile(relative_path_name):
#TODO Possibly make this optional to sync directories
# TODO Possibly make this optional to sync directories
file_length = len(open(relative_path_name).readlines())
record_meta = (
scan_name,
@ -642,10 +690,10 @@ class vulnWhispererQualys(vulnWhispererBase):
if cleanup:
self.logger.info('Removing report {} from Qualys Database'.format(generated_report_id))
cleaning_up = \
self.qualys_scan.qw.delete_report(generated_report_id)
cleaning_up = self.qualys_scan.qw.delete_report(generated_report_id)
os.remove(self.path_check(str(generated_report_id) + '.csv'))
self.logger.info('Deleted report from local disk: {}'.format(self.path_check(str(generated_report_id))))
self.logger.info(
'Deleted report from local disk: {}'.format(self.path_check(str(generated_report_id))))
else:
self.logger.error('Could not process report ID: {}'.format(status))
@ -653,7 +701,6 @@ class vulnWhispererQualys(vulnWhispererBase):
self.logger.error('Could not process {}: {}'.format(report_id, str(e)))
return vuln_ready
def identify_scans_to_process(self):
if self.uuids:
self.scans_to_process = self.latest_scans[~self.latest_scans['id'].isin(self.uuids)]
@ -661,7 +708,6 @@ class vulnWhispererQualys(vulnWhispererBase):
self.scans_to_process = self.latest_scans
self.logger.info('Identified {new} scans to be processed'.format(new=len(self.scans_to_process)))
def process_web_assets(self):
counter = 0
self.identify_scans_to_process()
@ -728,17 +774,20 @@ class vulnWhispererOpenVAS(vulnWhispererBase):
self.develop = True
self.purge = purge
self.scans_to_process = None
try:
self.openvas_api = OpenVAS_API(hostname=self.hostname,
port=self.port,
username=self.username,
password=self.password)
except Exception as e:
self.logger.error("Unable to establish connection with OpenVAS scanner. Reason: {}".format(e))
return False
def whisper_reports(self, output_format='json', launched_date=None, report_id=None, cleanup=True):
report = None
if report_id:
self.logger.info('Processing report ID: {}'.format(report_id))
scan_name = report_id.replace('-', '')
report_name = 'openvas_scan_{scan_name}_{last_updated}.{extension}'.format(scan_name=scan_name,
last_updated=launched_date,
@ -806,7 +855,8 @@ class vulnWhispererOpenVAS(vulnWhispererBase):
for scan in self.scans_to_process.iterrows():
counter += 1
info = scan[1]
self.logger.info('Processing {}/{} - Report ID: {}'.format(counter, len(self.scans_to_process), info['report_ids']))
self.logger.info(
'Processing {}/{} - Report ID: {}'.format(counter, len(self.scans_to_process), info['report_ids']))
self.whisper_reports(report_id=info['report_ids'],
launched_date=info['epoch'])
self.logger.info('Processing complete')
@ -817,7 +867,6 @@ class vulnWhispererOpenVAS(vulnWhispererBase):
class vulnWhispererQualysVuln(vulnWhispererBase):
CONFIG_SECTION = 'qualys_vuln'
COLUMN_MAPPING = {'cvss_base': 'cvss',
'cvss3_base': 'cvss3',
@ -842,8 +891,11 @@ class vulnWhispererQualysVuln(vulnWhispererBase):
self.logger = logging.getLogger('vulnWhispererQualysVuln')
if debug:
self.logger.setLevel(logging.DEBUG)
try:
self.qualys_scan = qualysVulnScan(config=config)
except Exception as e:
self.logger.error("Unable to create connection with Qualys. Reason: {}".format(e))
return False
self.directory_check()
self.scans_to_process = None
@ -854,17 +906,17 @@ class vulnWhispererQualysVuln(vulnWhispererBase):
scan_reference=None,
output_format='json',
cleanup=True):
launched_date
if 'Z' in launched_date:
launched_date = self.qualys_scan.utils.iso_to_epoch(launched_date)
report_name = 'qualys_vuln_' + report_id.replace('/','_') \
report_name = 'qualys_vuln_' + report_id.replace('/', '_') \
+ '_{last_updated}'.format(last_updated=launched_date) \
+ '.json'
relative_path_name = self.path_check(report_name).encode('utf8')
if os.path.isfile(relative_path_name):
#TODO Possibly make this optional to sync directories
# TODO Possibly make this optional to sync directories
file_length = len(open(relative_path_name).readlines())
record_meta = (
scan_name,
@ -915,7 +967,6 @@ class vulnWhispererQualysVuln(vulnWhispererBase):
self.logger.info('Report written to {}'.format(report_name))
return self.exit_code
def identify_scans_to_process(self):
self.latest_scans = self.qualys_scan.qw.get_all_scans()
if self.uuids:
@ -926,7 +977,6 @@ class vulnWhispererQualysVuln(vulnWhispererBase):
self.scans_to_process = self.latest_scans
self.logger.info('Identified {new} scans to be processed'.format(new=len(self.scans_to_process)))
def process_vuln_scans(self):
counter = 0
self.identify_scans_to_process()
@ -946,7 +996,6 @@ class vulnWhispererQualysVuln(vulnWhispererBase):
class vulnWhispererJIRA(vulnWhispererBase):
CONFIG_SECTION = 'jira'
def __init__(
@ -966,6 +1015,13 @@ class vulnWhispererJIRA(vulnWhispererBase):
self.config_path = config
self.config = vwConfig(config)
self.host_resolv_cache = {}
self.host_no_resolv = []
self.no_resolv_by_team_dict = {}
# Save locally those assets without DNS entry for flag to system owners
self.no_resolv_fname = "no_resolv.txt"
if os.path.isfile(self.no_resolv_fname):
with open(self.no_resolv_fname, "r") as json_file:
self.no_resolv_by_team_dict = json.load(json_file)
self.directory_check()
if config is not None:
@ -975,7 +1031,7 @@ class vulnWhispererJIRA(vulnWhispererBase):
JiraAPI(hostname=self.hostname,
username=self.username,
password=self.password,
path=self.config.get('jira','write_path'))
path=self.config.get('jira', 'write_path'))
self.jira_connect = True
self.logger.info('Connected to jira on {host}'.format(host=self.hostname))
except Exception as e:
@ -983,24 +1039,26 @@ class vulnWhispererJIRA(vulnWhispererBase):
raise Exception(
'Could not connect to nessus -- Please verify your settings in {config} are correct and try again.\nReason: {e}'.format(
config=self.config.config_in, e=e))
sys.exit(1)
return False
# sys.exit(1)
profiles = []
profiles = self.get_scan_profiles()
if not self.config.exists_jira_profiles(profiles):
self.config.update_jira_profiles(profiles)
self.logger.info("Jira profiles have been created in {config}, please fill the variables before rerunning the module.".format(config=self.config_path))
self.logger.info(
"Jira profiles have been created in {config}, please fill the variables before rerunning the module.".format(
config=self.config_path))
sys.exit(0)
def get_env_variables(self, source, scan_name):
# function returns an array with [jira_project, jira_components, datafile_path]
#Jira variables
jira_section = self.config.normalize_section("{}.{}".format(source,scan_name))
# Jira variables
jira_section = self.config.normalize_section("{}.{}".format(source, scan_name))
project = self.config.get(jira_section,'jira_project')
project = self.config.get(jira_section, 'jira_project')
if project == "":
self.logger.error('JIRA project is missing on the configuration file!')
sys.exit(0)
@ -1010,35 +1068,39 @@ class vulnWhispererJIRA(vulnWhispererBase):
self.logger.error("JIRA project '{project}' doesn't exist!".format(project=project))
sys.exit(0)
components = self.config.get(jira_section,'components').split(',')
components = self.config.get(jira_section, 'components').split(',')
#cleaning empty array from ''
# cleaning empty array from ''
if not components[0]:
components = []
min_critical = self.config.get(jira_section,'min_critical_to_report')
min_critical = self.config.get(jira_section, 'min_critical_to_report')
if not min_critical:
self.logger.error('"min_critical_to_report" variable on config file is empty.')
sys.exit(0)
#datafile path
# datafile path
filename, reported = self.get_latest_results(source, scan_name)
fullpath = ""
# search data files under user specified directory
for root, dirnames, filenames in os.walk(vwConfig(self.config_path).get(source,'write_path')):
for root, dirnames, filenames in os.walk(vwConfig(self.config_path).get(source, 'write_path')):
if filename in filenames:
fullpath = "{}/{}".format(root,filename)
fullpath = "{}/{}".format(root, filename)
if reported:
self.logger.warn('Last Scan of "{scan_name}" for source "{source}" has already been reported; will be skipped.'.format(scan_name=scan_name, source=source))
self.logger.warn(
'Last Scan of "{scan_name}" for source "{source}" has already been reported; will be skipped.'.format(
scan_name=scan_name, source=source))
return [False] * 5
if not fullpath:
self.logger.error('Scan of "{scan_name}" for source "{source}" has not been found. Please check that the scanner data files are in place.'.format(scan_name=scan_name, source=source))
self.logger.error(
'Scan of "{scan_name}" for source "{source}" has not been found. Please check that the scanner data files are in place.'.format(
scan_name=scan_name, source=source))
sys.exit(1)
dns_resolv = self.config.get('jira','dns_resolv')
dns_resolv = self.config.get('jira', 'dns_resolv')
if dns_resolv in ('False', 'false', ''):
dns_resolv = False
elif dns_resolv in ('True', 'true'):
@ -1049,36 +1111,36 @@ class vulnWhispererJIRA(vulnWhispererBase):
return project, components, fullpath, min_critical, dns_resolv
def parse_nessus_vulnerabilities(self, fullpath, source, scan_name, min_critical):
vulnerabilities = []
# we need to parse the CSV
risks = ['none', 'low', 'medium', 'high', 'critical']
min_risk = int([i for i,x in enumerate(risks) if x == min_critical][0])
min_risk = int([i for i, x in enumerate(risks) if x == min_critical][0])
df = pd.read_csv(fullpath, delimiter=',')
#nessus fields we want - ['Host','Protocol','Port', 'Name', 'Synopsis', 'Description', 'Solution', 'See Also']
# nessus fields we want - ['Host','Protocol','Port', 'Name', 'Synopsis', 'Description', 'Solution', 'See Also']
for index in range(len(df)):
# filtering vulnerabilities by criticality, discarding low risk
to_report = int([i for i,x in enumerate(risks) if x == df.loc[index]['Risk'].lower()][0])
to_report = int([i for i, x in enumerate(risks) if x == df.loc[index]['Risk'].lower()][0])
if to_report < min_risk:
continue
if not vulnerabilities or df.loc[index]['Name'] not in [entry['title'] for entry in vulnerabilities]:
vuln = {}
#vulnerabilities should have all the info for creating all JIRA labels
# vulnerabilities should have all the info for creating all JIRA labels
vuln['source'] = source
vuln['scan_name'] = scan_name
#vulnerability variables
# vulnerability variables
vuln['title'] = df.loc[index]['Name']
vuln['diagnosis'] = df.loc[index]['Synopsis'].replace('\\n',' ')
vuln['consequence'] = df.loc[index]['Description'].replace('\\n',' ')
vuln['solution'] = df.loc[index]['Solution'].replace('\\n',' ')
vuln['diagnosis'] = df.loc[index]['Synopsis'].replace('\\n', ' ')
vuln['consequence'] = df.loc[index]['Description'].replace('\\n', ' ')
vuln['solution'] = df.loc[index]['Solution'].replace('\\n', ' ')
vuln['ips'] = []
vuln['ips'].append("{} - {}/{}".format(df.loc[index]['Host'], df.loc[index]['Protocol'], df.loc[index]['Port']))
vuln['ips'].append(
"{} - {}/{}".format(df.loc[index]['Host'], df.loc[index]['Protocol'], df.loc[index]['Port']))
vuln['risk'] = df.loc[index]['Risk'].lower()
# Nessus "nan" value gets automatically casted to float by python
@ -1092,51 +1154,54 @@ class vulnWhispererJIRA(vulnWhispererBase):
# grouping assets by vulnerability to open on single ticket, as each asset has its own nessus entry
for vuln in vulnerabilities:
if vuln['title'] == df.loc[index]['Name']:
vuln['ips'].append("{} - {}/{}".format(df.loc[index]['Host'], df.loc[index]['Protocol'], df.loc[index]['Port']))
vuln['ips'].append("{} - {}/{}".format(df.loc[index]['Host'], df.loc[index]['Protocol'],
df.loc[index]['Port']))
return vulnerabilities
def parse_qualys_vuln_vulnerabilities(self, fullpath, source, scan_name, min_critical, dns_resolv = False):
#parsing of the qualys vulnerabilities schema
#parse json
def parse_qualys_vuln_vulnerabilities(self, fullpath, source, scan_name, min_critical, dns_resolv=False):
# parsing of the qualys vulnerabilities schema
# parse json
vulnerabilities = []
risks = ['info', 'low', 'medium', 'high', 'critical']
# +1 as array is 0-4, but score is 1-5
min_risk = int([i for i,x in enumerate(risks) if x == min_critical][0])+1
min_risk = int([i for i, x in enumerate(risks) if x == min_critical][0]) + 1
try:
data=[json.loads(line) for line in open(fullpath).readlines()]
data = [json.loads(line) for line in open(fullpath).readlines()]
except Exception as e:
self.logger.warn("Scan has no vulnerabilities, skipping.")
return vulnerabilities
#qualys fields we want - []
# qualys fields we want - []
for index in range(len(data)):
if int(data[index]['risk']) < min_risk:
continue
elif data[index]['type'] == 'Practice' or data[index]['type'] == 'Ig':
self.logger.debug("Vulnerability '{vuln}' ignored, as it is 'Practice/Potential', not verified.".format(vuln=data[index]['plugin_name']))
self.logger.debug("Vulnerability '{vuln}' ignored, as it is 'Practice/Potential', not verified.".format(
vuln=data[index]['plugin_name']))
continue
if not vulnerabilities or data[index]['plugin_name'] not in [entry['title'] for entry in vulnerabilities]:
vuln = {}
#vulnerabilities should have all the info for creating all JIRA labels
# vulnerabilities should have all the info for creating all JIRA labels
vuln['source'] = source
vuln['scan_name'] = scan_name
#vulnerability variables
# vulnerability variables
vuln['title'] = data[index]['plugin_name']
vuln['diagnosis'] = data[index]['threat'].replace('\\n',' ')
vuln['consequence'] = data[index]['impact'].replace('\\n',' ')
vuln['solution'] = data[index]['solution'].replace('\\n',' ')
vuln['diagnosis'] = data[index]['threat'].replace('\\n', ' ')
vuln['consequence'] = data[index]['impact'].replace('\\n', ' ')
vuln['solution'] = data[index]['solution'].replace('\\n', ' ')
vuln['ips'] = []
#TODO ADDED DNS RESOLUTION FROM QUALYS! \n SEPARATORS INSTEAD OF \\n!
# TODO ADDED DNS RESOLUTION FROM QUALYS! \n SEPARATORS INSTEAD OF \\n!
vuln['ips'].append("{ip} - {protocol}/{port} - {dns}".format(**self.get_asset_fields(data[index], dns_resolv)))
vuln['ips'].append(
"{ip} - {protocol}/{port} - {dns}".format(**self.get_asset_fields(data[index], dns_resolv)))
#different risk system than Nessus!
vuln['risk'] = risks[int(data[index]['risk'])-1]
# different risk system than Nessus!
vuln['risk'] = risks[int(data[index]['risk']) - 1]
# Nessus "nan" value gets automatically casted to float by python
if not (type(data[index]['vendor_reference']) is float or data[index]['vendor_reference'] == None):
@ -1148,7 +1213,8 @@ class vulnWhispererJIRA(vulnWhispererBase):
# grouping assets by vulnerability to open on single ticket, as each asset has its own nessus entry
for vuln in vulnerabilities:
if vuln['title'] == data[index]['plugin_name']:
vuln['ips'].append("{ip} - {protocol}/{port} - {dns}".format(**self.get_asset_fields(data[index], dns_resolv)))
vuln['ips'].append(
"{ip} - {protocol}/{port} - {dns}".format(**self.get_asset_fields(data[index], dns_resolv)))
return vulnerabilities
@ -1162,7 +1228,7 @@ class vulnWhispererJIRA(vulnWhispererBase):
if vuln['dns']:
values['dns'] = vuln['dns']
else:
if values['ip'] in self.host_resolv_cache.keys():
if values['ip'] in list(self.host_resolv_cache.keys()):
self.logger.debug("Hostname from {ip} cached, retrieving from cache.".format(ip=values['ip']))
values['dns'] = self.host_resolv_cache[values['ip']]
else:
@ -1173,6 +1239,7 @@ class vulnWhispererJIRA(vulnWhispererBase):
self.logger.debug("Hostname found: {hostname}.".format(hostname=values['dns']))
except:
self.host_resolv_cache[values['ip']] = ''
self.host_no_resolv.append(values['ip'])
self.logger.debug("Hostname not found for: {ip}.".format(ip=values['ip']))
for key in values.keys():
@ -1182,42 +1249,55 @@ class vulnWhispererJIRA(vulnWhispererBase):
return values
def parse_vulnerabilities(self, fullpath, source, scan_name, min_critical):
#TODO: SINGLE LOCAL SAVE FORMAT FOR ALL SCANNERS
#JIRA standard vuln format - ['source', 'scan_name', 'title', 'diagnosis', 'consequence', 'solution', 'ips', 'references']
# TODO: SINGLE LOCAL SAVE FORMAT FOR ALL SCANNERS
# JIRA standard vuln format - ['source', 'scan_name', 'title', 'diagnosis', 'consequence', 'solution', 'ips', 'references']
return 0
def jira_sync(self, source, scan_name):
self.logger.info("Jira Sync triggered for source '{source}' and scan '{scan_name}'".format(source=source, scan_name=scan_name))
self.logger.info("Jira Sync triggered for source '{source}' and scan '{scan_name}'".format(source=source,
scan_name=scan_name))
project, components, fullpath, min_critical, dns_resolv = self.get_env_variables(source, scan_name)
if not project:
self.logger.debug("Skipping scan for source '{source}' and scan '{scan_name}': vulnerabilities have already been reported.".format(source=source, scan_name=scan_name))
self.logger.debug(
"Skipping scan for source '{source}' and scan '{scan_name}': vulnerabilities have already been reported.".format(
source=source, scan_name=scan_name))
return False
vulnerabilities = []
#***Nessus parsing***
# ***Nessus parsing***
if source == "nessus":
vulnerabilities = self.parse_nessus_vulnerabilities(fullpath, source, scan_name, min_critical)
#***Qualys VM parsing***
# ***Qualys VM parsing***
if source == "qualys_vuln":
vulnerabilities = self.parse_qualys_vuln_vulnerabilities(fullpath, source, scan_name, min_critical, dns_resolv)
vulnerabilities = self.parse_qualys_vuln_vulnerabilities(fullpath, source, scan_name, min_critical,
dns_resolv)
#***JIRA sync***
# ***JIRA sync***
if vulnerabilities:
self.logger.info('{source} data has been successfuly parsed'.format(source=source.upper()))
self.logger.info('Starting JIRA sync')
self.jira.sync(vulnerabilities, project, components)
else:
self.logger.info("[{source}.{scan_name}] No vulnerabilities or vulnerabilities not parsed.".format(source=source, scan_name=scan_name))
self.logger.info(
"[{source}.{scan_name}] No vulnerabilities or vulnerabilities not parsed.".format(source=source,
scan_name=scan_name))
self.set_latest_scan_reported(fullpath.split("/")[-1])
return False
# writing to file those assets without DNS resolution
# if its not empty
if self.host_no_resolv:
# we will replace old list of non resolved for the new one or create if it doesn't exist already
self.no_resolv_by_team_dict[scan_name] = self.host_no_resolv
with open(self.no_resolv_fname, 'w') as outfile:
json.dump(self.no_resolv_by_team_dict, outfile)
self.set_latest_scan_reported(fullpath.split("/")[-1])
return True
@ -1226,10 +1306,16 @@ class vulnWhispererJIRA(vulnWhispererBase):
if autoreport_sections:
for scan in autoreport_sections:
try:
self.jira_sync(self.config.get(scan, 'source'), self.config.get(scan, 'scan_name'))
except Exception as e:
self.logger.error(
"VulnWhisperer wasn't able to report the vulnerabilities from the '{}'s source".format(
self.config.get(scan, 'source')))
return True
return False
class vulnWhisperer(object):
def __init__(self,
@ -1253,40 +1339,39 @@ class vulnWhisperer(object):
self.scanname = scanname
self.exit_code = 0
def whisper_vulnerabilities(self):
if self.profile == 'nessus':
vw = vulnWhispererNessus(config=self.config,
username=self.username,
password=self.password,
verbose=self.verbose,
profile=self.profile)
if vw:
self.exit_code += vw.whisper_nessus()
elif self.profile == 'qualys_web':
vw = vulnWhispererQualys(config=self.config)
if vw:
self.exit_code += vw.process_web_assets()
elif self.profile == 'openvas':
vw_openvas = vulnWhispererOpenVAS(config=self.config)
if vw:
self.exit_code += vw_openvas.process_openvas_scans()
elif self.profile == 'tenable':
vw = vulnWhispererNessus(config=self.config,
username=self.username,
password=self.password,
verbose=self.verbose,
profile=self.profile)
if vw:
self.exit_code += vw.whisper_nessus()
elif self.profile == 'qualys_vuln':
vw = vulnWhispererQualysVuln(config=self.config)
if vw:
self.exit_code += vw.process_vuln_scans()
elif self.profile == 'jira':
#first we check config fields are created, otherwise we create them
# first we check config fields are created, otherwise we create them
vw = vulnWhispererJIRA(config=self.config)
if vw:
if not (self.source and self.scanname):
self.logger.info('No source/scan_name selected, all enabled scans will be synced')
success = vw.sync_all()