Compare commits
19 Commits
dependabot
...
2to3
Author | SHA1 | Date | |
---|---|---|---|
53d70ab0db | |||
54fa0ace8a | |||
273b17009a | |||
ff5f4cb331 | |||
61539afa4d | |||
742a645190 | |||
51234a569f | |||
5dad1ceb10 | |||
3db931f3eb | |||
649ecd431b | |||
13a52a3e08 | |||
8403b35199 | |||
68519d5648 | |||
73342fdeb8 | |||
183e3b3e72 | |||
e25141261c | |||
8743b59147 | |||
c0e7ab9863 | |||
97de805e0c |
@ -6,10 +6,8 @@
|
||||
|
||||
VulnWhisperer is a vulnerability management tool and report aggregator. VulnWhisperer will pull all the reports from the different Vulnerability scanners and create a file with a unique filename for each one, using that data later to sync with Jira and feed Logstash. Jira does a closed cycle full Sync with the data provided by the Scanners, while Logstash indexes and tags all of the information inside the report (see logstash files at /resources/elk6/pipeline/). Data is then shipped to ElasticSearch to be indexed, and ends up in a visual and searchable format in Kibana with already defined dashboards.
|
||||
|
||||
VulnWhisperer is an open-source community funded project. VulnWhisperer currently works but is due for a documentation overhaul and code review. This is on the roadmap for the next month or two (February or March of 2022 - hopefully). Please note, crowd funding is an option. If you would like help getting VulnWhisperer up and running, are interested in new features, or are looking for paid support (for those of you that require commercial support contracts to implement open-source solutions), please reach out to **info@hasecuritysolutions.com**.
|
||||
|
||||
[](https://travis-ci.org/HASecuritySolutions/VulnWhisperer)
|
||||
[](https://github.com/HASecuritySolutions/VulnWhisperer/blob/master/LICENSE)
|
||||
[](http://choosealicense.com/licenses/mit/)
|
||||
[](https://twitter.com/VulnWhisperer)
|
||||
|
||||
Currently Supports
|
||||
@ -32,8 +30,7 @@ Currently Supports
|
||||
|
||||
### Reporting Frameworks
|
||||
|
||||
- [X] [Elastic Stack (**v6**/**v7**)](https://www.elastic.co/elk-stack)
|
||||
- [ ] [OpenSearch - Being considered for next update](https://opensearch.org/)
|
||||
- [X] [ELK (**v6**/**v7**)](https://www.elastic.co/elk-stack)
|
||||
- [X] [Jira](https://www.atlassian.com/software/jira)
|
||||
- [ ] [Splunk](https://www.splunk.com/)
|
||||
|
||||
|
@ -93,7 +93,7 @@ def main():
|
||||
scanname=args.scanname)
|
||||
exit_code += vw.whisper_vulnerabilities()
|
||||
except Exception as e:
|
||||
logger.error("VulnWhisperer was unable to perform the processing on '{}'".format(args.source))
|
||||
logger.error("VulnWhisperer was unable to perform the processing on '{}'".format(section))
|
||||
else:
|
||||
logger.info('Running vulnwhisperer for section {}'.format(args.section))
|
||||
vw = vulnWhisperer(config=args.config,
|
||||
|
@ -6,8 +6,8 @@ access_key=
|
||||
secret_key=
|
||||
username=nessus_username
|
||||
password=nessus_password
|
||||
write_path=/opt/VulnWhisperer/data/nessus/
|
||||
db_path=/opt/VulnWhisperer/data/database
|
||||
write_path=/tmp/VulnWhisperer/data/nessus/
|
||||
db_path=/tmp/VulnWhisperer/data/database
|
||||
trash=false
|
||||
verbose=true
|
||||
|
||||
@ -19,8 +19,8 @@ access_key=
|
||||
secret_key=
|
||||
username=tenable.io_username
|
||||
password=tenable.io_password
|
||||
write_path=/opt/VulnWhisperer/data/tenable/
|
||||
db_path=/opt/VulnWhisperer/data/database
|
||||
write_path=/tmp/VulnWhisperer/data/tenable/
|
||||
db_path=/tmp/VulnWhisperer/data/database
|
||||
trash=false
|
||||
verbose=true
|
||||
|
||||
@ -30,8 +30,8 @@ enabled = false
|
||||
hostname = qualys_web
|
||||
username = exampleuser
|
||||
password = examplepass
|
||||
write_path=/opt/VulnWhisperer/data/qualys_web/
|
||||
db_path=/opt/VulnWhisperer/data/database
|
||||
write_path=/tmp/VulnWhisperer/data/qualys_web/
|
||||
db_path=/tmp/VulnWhisperer/data/database
|
||||
verbose=true
|
||||
|
||||
# Set the maximum number of retries each connection should attempt.
|
||||
@ -46,8 +46,8 @@ enabled = true
|
||||
hostname = qualys_vuln
|
||||
username = exampleuser
|
||||
password = examplepass
|
||||
write_path=/opt/VulnWhisperer/data/qualys_vuln/
|
||||
db_path=/opt/VulnWhisperer/data/database
|
||||
write_path=/tmp/VulnWhisperer/data/qualys_vuln/
|
||||
db_path=/tmp/VulnWhisperer/data/database
|
||||
verbose=true
|
||||
|
||||
[detectify]
|
||||
@ -58,8 +58,8 @@ hostname = detectify
|
||||
username = exampleuser
|
||||
#password variable used as secretKey
|
||||
password = examplepass
|
||||
write_path =/opt/VulnWhisperer/data/detectify/
|
||||
db_path = /opt/VulnWhisperer/data/database
|
||||
write_path =/tmp/VulnWhisperer/data/detectify/
|
||||
db_path = /tmp/VulnWhisperer/data/database
|
||||
verbose = true
|
||||
|
||||
[openvas]
|
||||
@ -68,8 +68,8 @@ hostname = openvas
|
||||
port = 4000
|
||||
username = exampleuser
|
||||
password = examplepass
|
||||
write_path=/opt/VulnWhisperer/data/openvas/
|
||||
db_path=/opt/VulnWhisperer/data/database
|
||||
write_path=/tmp/VulnWhisperer/data/openvas/
|
||||
db_path=/tmp/VulnWhisperer/data/database
|
||||
verbose=true
|
||||
|
||||
[jira]
|
||||
@ -77,8 +77,8 @@ enabled = false
|
||||
hostname = jira-host
|
||||
username = username
|
||||
password = password
|
||||
write_path = /opt/VulnWhisperer/data/jira/
|
||||
db_path = /opt/VulnWhisperer/data/database
|
||||
write_path = /tmp/VulnWhisperer/data/jira/
|
||||
db_path = /tmp/VulnWhisperer/data/database
|
||||
verbose = true
|
||||
dns_resolv = False
|
||||
|
||||
|
@ -1,8 +1,8 @@
|
||||
pandas==0.20.3
|
||||
setuptools==65.5.1
|
||||
setuptools==40.4.3
|
||||
pytz==2017.2
|
||||
Requests==2.20.0
|
||||
lxml==4.6.5
|
||||
lxml==4.1.1
|
||||
future-fstrings
|
||||
bs4
|
||||
jira
|
||||
|
@ -2,7 +2,7 @@
|
||||
# Email: austin@hasecuritysolutions.com
|
||||
# Last Update: 03/04/2018
|
||||
# Version 0.3
|
||||
# Description: Take in Openvas web scan reports from vulnWhisperer and pumps into logstash
|
||||
# Description: Take in qualys web scan reports from vulnWhisperer and pumps into logstash
|
||||
|
||||
input {
|
||||
file {
|
||||
|
1
setup.py
1
setup.py
@ -1,5 +1,6 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
from __future__ import absolute_import
|
||||
from setuptools import setup, find_packages
|
||||
|
||||
setup(
|
||||
|
@ -1,3 +1,4 @@
|
||||
from __future__ import absolute_import
|
||||
import sys
|
||||
import logging
|
||||
|
||||
@ -5,7 +6,7 @@ import logging
|
||||
if sys.version_info > (3, 0):
|
||||
import configparser as cp
|
||||
else:
|
||||
import ConfigParser as cp
|
||||
import six.moves.configparser as cp
|
||||
|
||||
|
||||
class vwConfig(object):
|
||||
|
@ -1,3 +1,4 @@
|
||||
from __future__ import absolute_import
|
||||
import json
|
||||
import logging
|
||||
import sys
|
||||
|
@ -1,5 +1,6 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import absolute_import
|
||||
__author__ = 'Austin Taylor'
|
||||
|
||||
import datetime as dt
|
||||
|
@ -1,5 +1,6 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import absolute_import
|
||||
__author__ = 'Nathan Young'
|
||||
|
||||
import logging
|
||||
@ -18,9 +19,9 @@ class qualysWhisperAPI(object):
|
||||
self.logger = logging.getLogger('qualysWhisperAPI')
|
||||
self.config = config
|
||||
try:
|
||||
self.qgc = qualysapi.connect(config, 'qualys_vuln')
|
||||
self.qgc = qualysapi.connect(config_file=config, section='qualys_vuln')
|
||||
# Fail early if we can't make a request or auth is incorrect
|
||||
self.qgc.request('about.php')
|
||||
# self.qgc.request('about.php')
|
||||
self.logger.info('Connected to Qualys at {}'.format(self.qgc.server))
|
||||
except Exception as e:
|
||||
self.logger.error('Could not connect to Qualys: {}'.format(str(e)))
|
||||
|
@ -1,5 +1,8 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import absolute_import
|
||||
from six.moves import range
|
||||
from functools import reduce
|
||||
__author__ = 'Austin Taylor'
|
||||
|
||||
from lxml import objectify
|
||||
@ -14,24 +17,16 @@ import os
|
||||
import csv
|
||||
import logging
|
||||
import dateutil.parser as dp
|
||||
csv.field_size_limit(sys.maxsize)
|
||||
|
||||
|
||||
class qualysWhisperAPI(object):
|
||||
COUNT_WEBAPP = '/count/was/webapp'
|
||||
COUNT_WASSCAN = '/count/was/wasscan'
|
||||
DELETE_REPORT = '/delete/was/report/{report_id}'
|
||||
GET_WEBAPP_DETAILS = '/get/was/webapp/{was_id}'
|
||||
QPS_REST_3 = '/qps/rest/3.0'
|
||||
REPORT_DETAILS = '/get/was/report/{report_id}'
|
||||
REPORT_STATUS = '/status/was/report/{report_id}'
|
||||
REPORT_CREATE = '/create/was/report'
|
||||
REPORT_DOWNLOAD = '/download/was/report/{report_id}'
|
||||
SCAN_DETAILS = '/get/was/wasscan/{scan_id}'
|
||||
SCAN_DOWNLOAD = '/download/was/wasscan/{scan_id}'
|
||||
SEARCH_REPORTS = '/search/was/report'
|
||||
SEARCH_WEB_APPS = '/search/was/webapp'
|
||||
SEARCH_WAS_SCAN = '/search/was/wasscan'
|
||||
VERSION = '/qps/rest/portal/version'
|
||||
|
||||
def __init__(self, config=None):
|
||||
self.logger = logging.getLogger('qualysWhisperAPI')
|
||||
@ -41,10 +36,6 @@ class qualysWhisperAPI(object):
|
||||
self.logger.info('Connected to Qualys at {}'.format(self.qgc.server))
|
||||
except Exception as e:
|
||||
self.logger.error('Could not connect to Qualys: {}'.format(str(e)))
|
||||
self.headers = {
|
||||
#"content-type": "text/xml"}
|
||||
"Accept" : "application/json",
|
||||
"Content-Type": "application/json"}
|
||||
self.config_parse = qcconf.QualysConnectConfig(config, 'qualys_web')
|
||||
try:
|
||||
self.template_id = self.config_parse.get_template_id()
|
||||
@ -69,14 +60,8 @@ class qualysWhisperAPI(object):
|
||||
|
||||
def generate_scan_result_XML(self, limit=1000, offset=1, status='FINISHED'):
|
||||
report_xml = E.ServiceRequest(
|
||||
E.filters(
|
||||
E.Criteria({'field': 'status', 'operator': 'EQUALS'}, status
|
||||
),
|
||||
),
|
||||
E.preferences(
|
||||
E.startFromOffset(str(offset)),
|
||||
E.limitResults(str(limit))
|
||||
),
|
||||
E.filters(E.Criteria({'field': 'status', 'operator': 'EQUALS'}, status)),
|
||||
E.preferences(E.startFromOffset(str(offset)), E.limitResults(str(limit))),
|
||||
)
|
||||
return report_xml
|
||||
|
||||
@ -115,8 +100,10 @@ class qualysWhisperAPI(object):
|
||||
if i % limit == 0:
|
||||
if (total - i) < limit:
|
||||
qualys_api_limit = total - i
|
||||
self.logger.info('Making a request with a limit of {} at offset {}'.format((str(qualys_api_limit)), str(i + 1)))
|
||||
scan_info = self.get_scan_info(limit=qualys_api_limit, offset=i + 1, status=status)
|
||||
self.logger.info('Making a request with a limit of {} at offset {}'
|
||||
.format((str(qualys_api_limit)), str(i + 1)))
|
||||
scan_info = self.get_scan_info(
|
||||
limit=qualys_api_limit, offset=i + 1, status=status)
|
||||
_records.append(scan_info)
|
||||
self.logger.debug('Converting XML to DataFrame')
|
||||
dataframes = [self.xml_parser(xml) for xml in _records]
|
||||
@ -133,7 +120,8 @@ class qualysWhisperAPI(object):
|
||||
return self.qgc.request(self.REPORT_STATUS.format(report_id=report_id))
|
||||
|
||||
def download_report(self, report_id):
|
||||
return self.qgc.request(self.REPORT_DOWNLOAD.format(report_id=report_id))
|
||||
return self.qgc.request(
|
||||
self.REPORT_DOWNLOAD.format(report_id=report_id), http_method='get')
|
||||
|
||||
def generate_scan_report_XML(self, scan_id):
|
||||
"""Generates a CSV report for an asset based on template defined in .ini file"""
|
||||
@ -145,20 +133,8 @@ class qualysWhisperAPI(object):
|
||||
E.format('CSV'),
|
||||
#type is not needed, as the template already has it
|
||||
E.type('WAS_SCAN_REPORT'),
|
||||
E.template(
|
||||
E.id(self.template_id)
|
||||
),
|
||||
E.config(
|
||||
E.scanReport(
|
||||
E.target(
|
||||
E.scans(
|
||||
E.WasScan(
|
||||
E.id(scan_id)
|
||||
)
|
||||
),
|
||||
),
|
||||
),
|
||||
)
|
||||
E.template(E.id(self.template_id)),
|
||||
E.config(E.scanReport(E.target(E.scans(E.WasScan(E.id(scan_id))))))
|
||||
)
|
||||
)
|
||||
)
|
||||
@ -175,95 +151,14 @@ class qualysWhisperAPI(object):
|
||||
def delete_report(self, report_id):
|
||||
return self.qgc.request(self.DELETE_REPORT.format(report_id=report_id))
|
||||
|
||||
|
||||
class qualysReportFields:
|
||||
CATEGORIES = ['VULNERABILITY',
|
||||
'SENSITIVECONTENT',
|
||||
'INFORMATION_GATHERED']
|
||||
|
||||
# URL Vulnerability Information
|
||||
|
||||
VULN_BLOCK = [
|
||||
CATEGORIES[0],
|
||||
'ID',
|
||||
'QID',
|
||||
'Url',
|
||||
'Param',
|
||||
'Function',
|
||||
'Form Entry Point',
|
||||
'Access Path',
|
||||
'Authentication',
|
||||
'Ajax Request',
|
||||
'Ajax Request ID',
|
||||
'Ignored',
|
||||
'Ignore Reason',
|
||||
'Ignore Date',
|
||||
'Ignore User',
|
||||
'Ignore Comments',
|
||||
'First Time Detected',
|
||||
'Last Time Detected',
|
||||
'Last Time Tested',
|
||||
'Times Detected',
|
||||
'Payload #1',
|
||||
'Request Method #1',
|
||||
'Request URL #1',
|
||||
'Request Headers #1',
|
||||
'Response #1',
|
||||
'Evidence #1',
|
||||
]
|
||||
|
||||
INFO_HEADER = [
|
||||
'Vulnerability Category',
|
||||
'ID',
|
||||
'QID',
|
||||
'Response #1',
|
||||
'Last Time Detected',
|
||||
]
|
||||
INFO_BLOCK = [
|
||||
CATEGORIES[2],
|
||||
'ID',
|
||||
'QID',
|
||||
'Results',
|
||||
'Detection Date',
|
||||
]
|
||||
|
||||
QID_HEADER = [
|
||||
'QID',
|
||||
'Id',
|
||||
'Title',
|
||||
'Category',
|
||||
'Severity Level',
|
||||
'Groups',
|
||||
'OWASP',
|
||||
'WASC',
|
||||
'CWE',
|
||||
'CVSS Base',
|
||||
'CVSS Temporal',
|
||||
'Description',
|
||||
'Impact',
|
||||
'Solution',
|
||||
]
|
||||
GROUP_HEADER = ['GROUP', 'Name', 'Category']
|
||||
OWASP_HEADER = ['OWASP', 'Code', 'Name']
|
||||
WASC_HEADER = ['WASC', 'Code', 'Name']
|
||||
SCAN_META = ['Web Application Name', 'URL', 'Owner', 'Scope', 'Operating System']
|
||||
CATEGORY_HEADER = ['Category', 'Severity', 'Level', 'Description']
|
||||
|
||||
|
||||
class qualysUtils:
|
||||
def __init__(self):
|
||||
self.logger = logging.getLogger('qualysUtils')
|
||||
|
||||
def grab_section(
|
||||
self,
|
||||
report,
|
||||
section,
|
||||
end=[],
|
||||
pop_last=False,
|
||||
):
|
||||
def grab_section(self, report, section, end=[], pop_last=False):
|
||||
temp_list = []
|
||||
max_col_count = 0
|
||||
with open(report, 'rb') as csvfile:
|
||||
with open(report, 'rt') as csvfile:
|
||||
q_report = csv.reader(csvfile, delimiter=',', quotechar='"')
|
||||
for line in q_report:
|
||||
if set(line) == set(section):
|
||||
@ -289,44 +184,53 @@ class qualysUtils:
|
||||
return _data
|
||||
|
||||
class qualysScanReport:
|
||||
# URL Vulnerability Information
|
||||
WEB_SCAN_VULN_BLOCK = list(qualysReportFields.VULN_BLOCK)
|
||||
WEB_SCAN_VULN_BLOCK.insert(WEB_SCAN_VULN_BLOCK.index('QID'), 'Detection ID')
|
||||
CATEGORIES = ['VULNERABILITY', 'SENSITIVECONTENT', 'INFORMATION_GATHERED']
|
||||
|
||||
WEB_SCAN_VULN_HEADER = list(WEB_SCAN_VULN_BLOCK)
|
||||
WEB_SCAN_VULN_HEADER[WEB_SCAN_VULN_BLOCK.index(qualysReportFields.CATEGORIES[0])] = \
|
||||
'Vulnerability Category'
|
||||
WEB_SCAN_BLOCK = [
|
||||
"ID", "Detection ID", "QID", "Url", "Param/Cookie", "Function",
|
||||
"Form Entry Point", "Access Path", "Authentication", "Ajax Request",
|
||||
"Ajax Request ID", "Ignored", "Ignore Reason", "Ignore Date", "Ignore User",
|
||||
"Ignore Comments", "Detection Date", "Payload #1", "Request Method #1",
|
||||
"Request URL #1", "Request Headers #1", "Response #1", "Evidence #1",
|
||||
"Unique ID", "Flags", "Protocol", "Virtual Host", "IP", "Port", "Result",
|
||||
"Info#1", "CVSS V3 Base", "CVSS V3 Temporal", "CVSS V3 Attack Vector",
|
||||
"Request Body #1"
|
||||
]
|
||||
WEB_SCAN_VULN_BLOCK = [CATEGORIES[0]] + WEB_SCAN_BLOCK
|
||||
WEB_SCAN_SENSITIVE_BLOCK = [CATEGORIES[1]] + WEB_SCAN_BLOCK
|
||||
|
||||
WEB_SCAN_SENSITIVE_HEADER = list(WEB_SCAN_VULN_HEADER)
|
||||
WEB_SCAN_SENSITIVE_HEADER.insert(WEB_SCAN_SENSITIVE_HEADER.index('Url'
|
||||
), 'Content')
|
||||
WEB_SCAN_HEADER = ["Vulnerability Category"] + WEB_SCAN_BLOCK
|
||||
WEB_SCAN_HEADER[WEB_SCAN_HEADER.index("Detection Date")] = "Last Time Detected"
|
||||
|
||||
WEB_SCAN_SENSITIVE_BLOCK = list(WEB_SCAN_SENSITIVE_HEADER)
|
||||
WEB_SCAN_SENSITIVE_BLOCK.insert(WEB_SCAN_SENSITIVE_BLOCK.index('QID'), 'Detection ID')
|
||||
WEB_SCAN_SENSITIVE_BLOCK[WEB_SCAN_SENSITIVE_BLOCK.index('Vulnerability Category'
|
||||
)] = qualysReportFields.CATEGORIES[1]
|
||||
|
||||
WEB_SCAN_INFO_HEADER = list(qualysReportFields.INFO_HEADER)
|
||||
WEB_SCAN_INFO_HEADER.insert(WEB_SCAN_INFO_HEADER.index('QID'), 'Detection ID')
|
||||
WEB_SCAN_INFO_BLOCK = [
|
||||
"INFORMATION_GATHERED", "ID", "Detection ID", "QID", "Results", "Detection Date",
|
||||
"Unique ID", "Flags", "Protocol", "Virtual Host", "IP", "Port", "Result",
|
||||
"Info#1"
|
||||
]
|
||||
|
||||
WEB_SCAN_INFO_BLOCK = list(qualysReportFields.INFO_BLOCK)
|
||||
WEB_SCAN_INFO_BLOCK.insert(WEB_SCAN_INFO_BLOCK.index('QID'), 'Detection ID')
|
||||
WEB_SCAN_INFO_HEADER = [
|
||||
"Vulnerability Category", "ID", "Detection ID", "QID", "Results", "Last Time Detected",
|
||||
"Unique ID", "Flags", "Protocol", "Virtual Host", "IP", "Port", "Result",
|
||||
"Info#1"
|
||||
]
|
||||
|
||||
QID_HEADER = list(qualysReportFields.QID_HEADER)
|
||||
GROUP_HEADER = list(qualysReportFields.GROUP_HEADER)
|
||||
OWASP_HEADER = list(qualysReportFields.OWASP_HEADER)
|
||||
WASC_HEADER = list(qualysReportFields.WASC_HEADER)
|
||||
SCAN_META = list(qualysReportFields.SCAN_META)
|
||||
CATEGORY_HEADER = list(qualysReportFields.CATEGORY_HEADER)
|
||||
QID_HEADER = [
|
||||
"QID", "Id", "Title", "Category", "Severity Level", "Groups", "OWASP", "WASC",
|
||||
"CWE", "CVSS Base", "CVSS Temporal", "Description", "Impact", "Solution",
|
||||
"CVSS V3 Base", "CVSS V3 Temporal", "CVSS V3 Attack Vector"
|
||||
]
|
||||
GROUP_HEADER = ['GROUP', 'Name', 'Category']
|
||||
OWASP_HEADER = ['OWASP', 'Code', 'Name']
|
||||
WASC_HEADER = ['WASC', 'Code', 'Name']
|
||||
SCAN_META = [
|
||||
"Web Application Name", "URL", "Owner", "Scope", "ID", "Tags",
|
||||
"Custom Attributes"
|
||||
]
|
||||
CATEGORY_HEADER = ['Category', 'Severity', 'Level', 'Description']
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
config=None,
|
||||
file_in=None,
|
||||
file_stream=False,
|
||||
delimiter=',',
|
||||
quotechar='"',
|
||||
):
|
||||
def __init__(self, config=None, file_in=None,
|
||||
file_stream=False, delimiter=',', quotechar='"'):
|
||||
self.logger = logging.getLogger('qualysScanReport')
|
||||
self.file_in = file_in
|
||||
self.file_stream = file_stream
|
||||
@ -337,71 +241,79 @@ class qualysScanReport:
|
||||
try:
|
||||
self.qw = qualysWhisperAPI(config=config)
|
||||
except Exception as e:
|
||||
self.logger.error('Could not load config! Please check settings. Error: {}'.format(str(e)))
|
||||
self.logger.error(
|
||||
'Could not load config! Please check settings. Error: {}'.format(
|
||||
str(e)))
|
||||
|
||||
if file_stream:
|
||||
self.open_file = file_in.splitlines()
|
||||
elif file_in:
|
||||
|
||||
self.open_file = open(file_in, 'rb')
|
||||
|
||||
self.downloaded_file = None
|
||||
|
||||
def grab_sections(self, report):
|
||||
all_dataframes = []
|
||||
dict_tracker = {}
|
||||
with open(report, 'rb') as csvfile:
|
||||
dict_tracker['WEB_SCAN_VULN_BLOCK'] = pd.DataFrame(self.utils.grab_section(report,
|
||||
return {
|
||||
'WEB_SCAN_VULN_BLOCK': pd.DataFrame(
|
||||
self.utils.grab_section(
|
||||
report,
|
||||
self.WEB_SCAN_VULN_BLOCK,
|
||||
end=[
|
||||
self.WEB_SCAN_SENSITIVE_BLOCK,
|
||||
self.WEB_SCAN_INFO_BLOCK],
|
||||
end=[self.WEB_SCAN_SENSITIVE_BLOCK, self.WEB_SCAN_INFO_BLOCK],
|
||||
pop_last=True),
|
||||
columns=self.WEB_SCAN_VULN_HEADER)
|
||||
dict_tracker['WEB_SCAN_SENSITIVE_BLOCK'] = pd.DataFrame(self.utils.grab_section(report,
|
||||
columns=self.WEB_SCAN_HEADER),
|
||||
'WEB_SCAN_SENSITIVE_BLOCK': pd.DataFrame(
|
||||
self.utils.grab_section(report,
|
||||
self.WEB_SCAN_SENSITIVE_BLOCK,
|
||||
end=[
|
||||
self.WEB_SCAN_INFO_BLOCK,
|
||||
self.WEB_SCAN_SENSITIVE_BLOCK],
|
||||
end=[self.WEB_SCAN_INFO_BLOCK, self.WEB_SCAN_SENSITIVE_BLOCK],
|
||||
pop_last=True),
|
||||
columns=self.WEB_SCAN_SENSITIVE_HEADER)
|
||||
dict_tracker['WEB_SCAN_INFO_BLOCK'] = pd.DataFrame(self.utils.grab_section(report,
|
||||
columns=self.WEB_SCAN_HEADER),
|
||||
'WEB_SCAN_INFO_BLOCK': pd.DataFrame(
|
||||
self.utils.grab_section(
|
||||
report,
|
||||
self.WEB_SCAN_INFO_BLOCK,
|
||||
end=[self.QID_HEADER],
|
||||
pop_last=True),
|
||||
columns=self.WEB_SCAN_INFO_HEADER)
|
||||
dict_tracker['QID_HEADER'] = pd.DataFrame(self.utils.grab_section(report,
|
||||
columns=self.WEB_SCAN_INFO_HEADER),
|
||||
|
||||
'QID_HEADER': pd.DataFrame(
|
||||
self.utils.grab_section(
|
||||
report,
|
||||
self.QID_HEADER,
|
||||
end=[self.GROUP_HEADER],
|
||||
pop_last=True),
|
||||
columns=self.QID_HEADER)
|
||||
dict_tracker['GROUP_HEADER'] = pd.DataFrame(self.utils.grab_section(report,
|
||||
columns=self.QID_HEADER),
|
||||
'GROUP_HEADER': pd.DataFrame(
|
||||
self.utils.grab_section(
|
||||
report,
|
||||
self.GROUP_HEADER,
|
||||
end=[self.OWASP_HEADER],
|
||||
pop_last=True),
|
||||
columns=self.GROUP_HEADER)
|
||||
dict_tracker['OWASP_HEADER'] = pd.DataFrame(self.utils.grab_section(report,
|
||||
columns=self.GROUP_HEADER),
|
||||
'OWASP_HEADER': pd.DataFrame(
|
||||
self.utils.grab_section(
|
||||
report,
|
||||
self.OWASP_HEADER,
|
||||
end=[self.WASC_HEADER],
|
||||
pop_last=True),
|
||||
columns=self.OWASP_HEADER)
|
||||
dict_tracker['WASC_HEADER'] = pd.DataFrame(self.utils.grab_section(report,
|
||||
self.WASC_HEADER, end=[['APPENDIX']],
|
||||
columns=self.OWASP_HEADER),
|
||||
'WASC_HEADER': pd.DataFrame(
|
||||
self.utils.grab_section(
|
||||
report,
|
||||
self.WASC_HEADER,
|
||||
end=[['APPENDIX']],
|
||||
pop_last=True),
|
||||
columns=self.WASC_HEADER)
|
||||
|
||||
dict_tracker['SCAN_META'] = pd.DataFrame(self.utils.grab_section(report,
|
||||
columns=self.WASC_HEADER),
|
||||
'SCAN_META': pd.DataFrame(
|
||||
self.utils.grab_section(report,
|
||||
self.SCAN_META,
|
||||
end=[self.CATEGORY_HEADER],
|
||||
pop_last=True),
|
||||
columns=self.SCAN_META)
|
||||
|
||||
dict_tracker['CATEGORY_HEADER'] = pd.DataFrame(self.utils.grab_section(report,
|
||||
columns=self.SCAN_META),
|
||||
'CATEGORY_HEADER': pd.DataFrame(
|
||||
self.utils.grab_section(report,
|
||||
self.CATEGORY_HEADER),
|
||||
columns=self.CATEGORY_HEADER)
|
||||
all_dataframes.append(dict_tracker)
|
||||
|
||||
return all_dataframes
|
||||
}
|
||||
|
||||
def data_normalizer(self, dataframes):
|
||||
"""
|
||||
@ -409,12 +321,21 @@ class qualysScanReport:
|
||||
:param dataframes:
|
||||
:return:
|
||||
"""
|
||||
df_dict = dataframes[0]
|
||||
merged_df = pd.concat([df_dict['WEB_SCAN_VULN_BLOCK'], df_dict['WEB_SCAN_SENSITIVE_BLOCK'],
|
||||
df_dict['WEB_SCAN_INFO_BLOCK']], axis=0,
|
||||
ignore_index=False)
|
||||
merged_df = pd.merge(merged_df, df_dict['QID_HEADER'], left_on='QID',
|
||||
right_on='Id')
|
||||
df_dict = dataframes
|
||||
merged_df = pd.concat([
|
||||
df_dict['WEB_SCAN_VULN_BLOCK'],
|
||||
df_dict['WEB_SCAN_SENSITIVE_BLOCK'],
|
||||
df_dict['WEB_SCAN_INFO_BLOCK']
|
||||
], axis=0, ignore_index=False)
|
||||
|
||||
merged_df = pd.merge(
|
||||
merged_df,
|
||||
df_dict['QID_HEADER'].drop(
|
||||
#these columns always seem to be the same as what we're merging into
|
||||
['CVSS V3 Attack Vector', 'CVSS V3 Base', 'CVSS V3 Temporal'],
|
||||
axis=1),
|
||||
left_on='QID', right_on='Id'
|
||||
)
|
||||
|
||||
if 'Content' not in merged_df:
|
||||
merged_df['Content'] = ''
|
||||
@ -431,8 +352,11 @@ class qualysScanReport:
|
||||
|
||||
merged_df = merged_df.assign(**df_dict['SCAN_META'].to_dict(orient='records')[0])
|
||||
|
||||
merged_df = pd.merge(merged_df, df_dict['CATEGORY_HEADER'], how='left', left_on=['Category', 'Severity Level'],
|
||||
right_on=['Category', 'Severity'], suffixes=('Severity', 'CatSev'))
|
||||
merged_df = pd.merge(
|
||||
merged_df, df_dict['CATEGORY_HEADER'],
|
||||
how='left', left_on=['Category', 'Severity Level'],
|
||||
right_on=['Category', 'Severity'], suffixes=('Severity', 'CatSev')
|
||||
)
|
||||
|
||||
merged_df = merged_df.replace('N/A', '').fillna('')
|
||||
|
||||
|
@ -1,15 +1,18 @@
|
||||
from __future__ import absolute_import
|
||||
import json
|
||||
import os
|
||||
from datetime import datetime, date, timedelta
|
||||
from datetime import datetime, date
|
||||
|
||||
from jira import JIRA
|
||||
import requests
|
||||
import logging
|
||||
from bottle import template
|
||||
import re
|
||||
from six.moves import range
|
||||
|
||||
|
||||
class JiraAPI(object):
|
||||
def __init__(self, hostname=None, username=None, password=None, path="", debug=False, clean_obsolete=True, max_time_window=12, decommission_time_window=3):
|
||||
def __init__(self, hostname=None, username=None, password=None, path="", debug=False, clean_obsolete=True,
|
||||
max_time_window=12, decommission_time_window=3):
|
||||
self.logger = logging.getLogger('JiraAPI')
|
||||
if debug:
|
||||
self.logger.setLevel(logging.DEBUG)
|
||||
@ -29,26 +32,31 @@ class JiraAPI(object):
|
||||
self.template_path = 'vulnwhisp/reporting/resources/ticket.tpl'
|
||||
self.max_ips_ticket = 30
|
||||
self.attachment_filename = "vulnerable_assets.txt"
|
||||
self.max_time_tracking = max_time_window #in months
|
||||
self.max_time_tracking = max_time_window # in months
|
||||
if path:
|
||||
self.download_tickets(path)
|
||||
else:
|
||||
self.logger.warn("No local path specified, skipping Jira ticket download.")
|
||||
self.max_decommission_time = decommission_time_window #in months
|
||||
self.max_decommission_time = decommission_time_window # in months
|
||||
# [HIGIENE] close tickets older than 12 months as obsolete (max_time_window defined)
|
||||
if clean_obsolete:
|
||||
self.close_obsolete_tickets()
|
||||
# deletes the tag "server_decommission" from those tickets closed <=3 months ago
|
||||
self.decommission_cleanup()
|
||||
|
||||
self.jira_still_vulnerable_comment = '''This ticket has been reopened due to the vulnerability not having been fixed (if multiple assets are affected, all need to be fixed; if the server is down, lastest known vulnerability might be the one reported).
|
||||
- In the case of the team accepting the risk and wanting to close the ticket, please add the label "*risk_accepted*" to the ticket before closing it.
|
||||
- If server has been decommissioned, please add the label "*server_decommission*" to the ticket before closing it.
|
||||
- If when checking the vulnerability it looks like a false positive, _+please elaborate in a comment+_ and add the label "*false_positive*" before closing it; we will review it and report it to the vendor.
|
||||
self.jira_still_vulnerable_comment = '''This ticket has been reopened due to the vulnerability not having been \
|
||||
fixed (if multiple assets are affected, all need to be fixed; if the server is down, lastest known \
|
||||
vulnerability might be the one reported).
|
||||
- In the case of the team accepting the risk and wanting to close the ticket, please add the label \
|
||||
"*risk_accepted*" to the ticket before closing it.
|
||||
- If server has been decommissioned, please add the label "*server_decommission*" to the ticket before closing \
|
||||
it.
|
||||
- If when checking the vulnerability it looks like a false positive, _+please elaborate in a comment+_ and add \
|
||||
the label "*false_positive*" before closing it; we will review it and report it to the vendor.
|
||||
|
||||
If you have further doubts, please contact the Security Team.'''
|
||||
|
||||
def create_ticket(self, title, desc, project="IS", components=[], tags=[], attachment_contents = []):
|
||||
def create_ticket(self, title, desc, project="IS", components=[], tags=[], attachment_contents=[]):
|
||||
labels = ['vulnerability_management']
|
||||
for tag in tags:
|
||||
labels.append(str(tag))
|
||||
@ -62,13 +70,12 @@ class JiraAPI(object):
|
||||
for c in project_obj.components:
|
||||
if component == c.name:
|
||||
self.logger.debug("resolved component name {} to id {}".format(c.name, c.id))
|
||||
components_ticket.append({ "id": c.id })
|
||||
exists=True
|
||||
components_ticket.append({"id": c.id})
|
||||
exists = True
|
||||
if not exists:
|
||||
self.logger.error("Error creating Ticket: component {} not found".format(component))
|
||||
return 0
|
||||
|
||||
try:
|
||||
new_issue = self.jira.create_issue(project=project,
|
||||
summary=title,
|
||||
description=desc,
|
||||
@ -81,13 +88,9 @@ class JiraAPI(object):
|
||||
if attachment_contents:
|
||||
self.add_content_as_attachment(new_issue, attachment_contents)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error("Failed to create ticket on Jira Project '{}'. Error: {}".format(project, e))
|
||||
new_issue = False
|
||||
|
||||
return new_issue
|
||||
|
||||
#Basic JIRA Metrics
|
||||
# Basic JIRA Metrics
|
||||
def metrics_open_tickets(self, project=None):
|
||||
jql = "labels= vulnerability_management and resolution = Unresolved"
|
||||
if project:
|
||||
@ -96,13 +99,15 @@ class JiraAPI(object):
|
||||
return len(self.jira.search_issues(jql, maxResults=0))
|
||||
|
||||
def metrics_closed_tickets(self, project=None):
|
||||
jql = "labels= vulnerability_management and NOT resolution = Unresolved AND created >=startOfMonth(-{})".format(self.max_time_tracking)
|
||||
jql = "labels= vulnerability_management and NOT resolution = Unresolved AND created >=startOfMonth(-{})".format(
|
||||
self.max_time_tracking)
|
||||
if project:
|
||||
jql += " and (project='{}')".format(project)
|
||||
return len(self.jira.search_issues(jql, maxResults=0))
|
||||
|
||||
def sync(self, vulnerabilities, project, components=[]):
|
||||
#JIRA structure of each vulnerability: [source, scan_name, title, diagnosis, consequence, solution, ips, risk, references]
|
||||
# JIRA structure of each vulnerability: [source, scan_name, title, diagnosis, consequence, solution,
|
||||
# ips, risk, references]
|
||||
self.logger.info("JIRA Sync started")
|
||||
|
||||
for vuln in vulnerabilities:
|
||||
@ -111,7 +116,8 @@ class JiraAPI(object):
|
||||
if " " in vuln['scan_name']:
|
||||
vuln['scan_name'] = "_".join(vuln['scan_name'].split(" "))
|
||||
|
||||
# we exclude from the vulnerabilities to report those assets that already exist with *risk_accepted*/*server_decommission*
|
||||
# we exclude from the vulnerabilities to report those assets that already exist
|
||||
# with *risk_accepted*/*server_decommission*
|
||||
vuln = self.exclude_accepted_assets(vuln)
|
||||
|
||||
# make sure after exclusion of risk_accepted assets there are still assets
|
||||
@ -136,13 +142,17 @@ class JiraAPI(object):
|
||||
# create local text file with assets, attach it to ticket
|
||||
if len(vuln['ips']) > self.max_ips_ticket:
|
||||
attachment_contents = vuln['ips']
|
||||
vuln['ips'] = ["Affected hosts ({assets}) exceed Jira's allowed character limit, added as an attachment.".format(assets = len(attachment_contents))]
|
||||
vuln['ips'] = [
|
||||
"Affected hosts ({assets}) exceed Jira's allowed character limit, added as an attachment.".format(
|
||||
assets=len(attachment_contents))]
|
||||
try:
|
||||
tpl = template(self.template_path, vuln)
|
||||
except Exception as e:
|
||||
self.logger.error('Exception templating: {}'.format(str(e)))
|
||||
return 0
|
||||
self.create_ticket(title=vuln['title'], desc=tpl, project=project, components=components, tags=[vuln['source'], vuln['scan_name'], 'vulnerability', vuln['risk']], attachment_contents = attachment_contents)
|
||||
self.create_ticket(title=vuln['title'], desc=tpl, project=project, components=components,
|
||||
tags=[vuln['source'], vuln['scan_name'], 'vulnerability', vuln['risk']],
|
||||
attachment_contents=attachment_contents)
|
||||
else:
|
||||
self.logger.info("Ignoring vulnerability as all assets are already reported in a risk_accepted ticket")
|
||||
|
||||
@ -158,34 +168,39 @@ class JiraAPI(object):
|
||||
labels = [vuln['source'], vuln['scan_name'], 'vulnerability_management', 'vulnerability']
|
||||
|
||||
if not self.excluded_tickets:
|
||||
jql = "{} AND labels in (risk_accepted,server_decommission, false_positive) AND NOT labels=advisory AND created >=startOfMonth(-{})".format(" AND ".join(["labels={}".format(label) for label in labels]), self.max_time_tracking)
|
||||
jql = "{} AND labels in (risk_accepted,server_decommission, false_positive) AND NOT labels=advisory AND created >=startOfMonth(-{})".format(
|
||||
" AND ".join(["labels={}".format(label) for label in labels]), self.max_time_tracking)
|
||||
self.excluded_tickets = self.jira.search_issues(jql, maxResults=0)
|
||||
|
||||
title = vuln['title']
|
||||
#WARNING: function IGNORES DUPLICATES, after finding a "duplicate" will just return it exists
|
||||
#it wont iterate over the rest of tickets looking for other possible duplicates/similar issues
|
||||
# WARNING: function IGNORES DUPLICATES, after finding a "duplicate" will just return it exists
|
||||
# it wont iterate over the rest of tickets looking for other possible duplicates/similar issues
|
||||
self.logger.info("Comparing vulnerability to risk_accepted tickets")
|
||||
assets_to_exclude = []
|
||||
tickets_excluded_assets = []
|
||||
for index in range(len(self.excluded_tickets)):
|
||||
checking_ticketid, checking_title, checking_assets = self.ticket_get_unique_fields(self.excluded_tickets[index])
|
||||
checking_ticketid, checking_title, checking_assets = self.ticket_get_unique_fields(
|
||||
self.excluded_tickets[index])
|
||||
if title.encode('ascii') == checking_title.encode('ascii'):
|
||||
if checking_assets:
|
||||
#checking_assets is a list, we add to our full list for later delete all assets
|
||||
assets_to_exclude+=checking_assets
|
||||
# checking_assets is a list, we add to our full list for later delete all assets
|
||||
assets_to_exclude += checking_assets
|
||||
tickets_excluded_assets.append(checking_ticketid)
|
||||
|
||||
if assets_to_exclude:
|
||||
assets_to_remove = []
|
||||
self.logger.warn("Vulnerable Assets seen on an already existing risk_accepted Jira ticket: {}".format(', '.join(tickets_excluded_assets)))
|
||||
self.logger.warn("Vulnerable Assets seen on an already existing risk_accepted Jira ticket: {}".format(
|
||||
', '.join(tickets_excluded_assets)))
|
||||
self.logger.debug("Original assets: {}".format(vuln['ips']))
|
||||
#assets in vulnerability have the structure "ip - hostname - port", so we need to match by partial
|
||||
# assets in vulnerability have the structure "ip - hostname - port", so we need to match by partial
|
||||
for exclusion in assets_to_exclude:
|
||||
# for efficiency, we walk the backwards the array of ips from the scanners, as we will be popping out the matches
|
||||
# and we don't want it to affect the rest of the processing (otherwise, it would miss the asset right after the removed one)
|
||||
for index in range(len(vuln['ips']))[::-1]:
|
||||
if exclusion == vuln['ips'][index].split(" - ")[0]:
|
||||
self.logger.debug("Deleting asset {} from vulnerability {}, seen in risk_accepted.".format(vuln['ips'][index], title))
|
||||
self.logger.debug(
|
||||
"Deleting asset {} from vulnerability {}, seen in risk_accepted.".format(vuln['ips'][index],
|
||||
title))
|
||||
vuln['ips'].pop(index)
|
||||
self.logger.debug("Modified assets: {}".format(vuln['ips']))
|
||||
|
||||
@ -197,35 +212,37 @@ class JiraAPI(object):
|
||||
Returns [exists (bool), is equal (bool), ticketid (str), assets (array)]
|
||||
'''
|
||||
# we need to return if the vulnerability has already been reported and the ID of the ticket for further processing
|
||||
#function returns array [duplicated(bool), update(bool), ticketid, ticket_assets]
|
||||
# function returns array [duplicated(bool), update(bool), ticketid, ticket_assets]
|
||||
title = vuln['title']
|
||||
labels = [vuln['source'], vuln['scan_name'], 'vulnerability_management', 'vulnerability']
|
||||
#list(set()) to remove duplicates
|
||||
# list(set()) to remove duplicates
|
||||
assets = list(set(re.findall(r"\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\b", ",".join(vuln['ips']))))
|
||||
|
||||
if not self.all_tickets:
|
||||
self.logger.info("Retrieving all JIRA tickets with the following tags {}".format(labels))
|
||||
# we want to check all JIRA tickets, to include tickets moved to other queues
|
||||
# will exclude tickets older than 12 months, old tickets will get closed for higiene and recreated if still vulnerable
|
||||
jql = "{} AND NOT labels=advisory AND created >=startOfMonth(-{})".format(" AND ".join(["labels={}".format(label) for label in labels]), self.max_time_tracking)
|
||||
jql = "{} AND NOT labels=advisory AND created >=startOfMonth(-{})".format(
|
||||
" AND ".join(["labels={}".format(label) for label in labels]), self.max_time_tracking)
|
||||
|
||||
self.all_tickets = self.jira.search_issues(jql, maxResults=0)
|
||||
|
||||
#WARNING: function IGNORES DUPLICATES, after finding a "duplicate" will just return it exists
|
||||
#it wont iterate over the rest of tickets looking for other possible duplicates/similar issues
|
||||
# WARNING: function IGNORES DUPLICATES, after finding a "duplicate" will just return it exists
|
||||
# it wont iterate over the rest of tickets looking for other possible duplicates/similar issues
|
||||
self.logger.info("Comparing Vulnerabilities to created tickets")
|
||||
for index in range(len(self.all_tickets)):
|
||||
checking_ticketid, checking_title, checking_assets = self.ticket_get_unique_fields(self.all_tickets[index])
|
||||
# added "not risk_accepted", as if it is risk_accepted, we will create a new ticket excluding the accepted assets
|
||||
if title.encode('ascii') == checking_title.encode('ascii') and not self.is_risk_accepted(self.jira.issue(checking_ticketid)):
|
||||
if title.encode('ascii') == checking_title.encode('ascii') and not self.is_risk_accepted(
|
||||
self.jira.issue(checking_ticketid)):
|
||||
difference = list(set(assets).symmetric_difference(checking_assets))
|
||||
#to check intersection - set(assets) & set(checking_assets)
|
||||
# to check intersection - set(assets) & set(checking_assets)
|
||||
if difference:
|
||||
self.logger.info("Asset mismatch, ticket to update. Ticket ID: {}".format(checking_ticketid))
|
||||
return False, True, checking_ticketid, checking_assets #this will automatically validate
|
||||
return False, True, checking_ticketid, checking_assets # this will automatically validate
|
||||
else:
|
||||
self.logger.info("Confirmed duplicated. TickedID: {}".format(checking_ticketid))
|
||||
return True, False, checking_ticketid, [] #this will automatically validate
|
||||
return True, False, checking_ticketid, [] # this will automatically validate
|
||||
return False, False, "", []
|
||||
|
||||
def ticket_get_unique_fields(self, ticket):
|
||||
@ -234,19 +251,22 @@ class JiraAPI(object):
|
||||
|
||||
assets = self.get_assets_from_description(ticket)
|
||||
if not assets:
|
||||
#check if attachment, if so, get assets from attachment
|
||||
# check if attachment, if so, get assets from attachment
|
||||
assets = self.get_assets_from_attachment(ticket)
|
||||
|
||||
return ticketid, title, assets
|
||||
|
||||
def get_assets_from_description(self, ticket, _raw = False):
|
||||
def get_assets_from_description(self, ticket, _raw=False):
|
||||
# Get the assets as a string "host - protocol/port - hostname" separated by "\n"
|
||||
# structure the text to have the same structure as the assets from the attachment
|
||||
affected_assets = ""
|
||||
try:
|
||||
affected_assets = ticket.raw.get('fields', {}).get('description').encode("ascii").split("{panel:title=Affected Assets}")[1].split("{panel}")[0].replace('\n','').replace(' * ','\n').replace('\n', '', 1)
|
||||
affected_assets = \
|
||||
ticket.raw.get('fields', {}).get('description').encode("ascii").split("{panel:title=Affected Assets}")[
|
||||
1].split("{panel}")[0].replace('\n', '').replace(' * ', '\n').replace('\n', '', 1)
|
||||
except Exception as e:
|
||||
self.logger.error("Unable to process the Ticket's 'Affected Assets'. Ticket ID: {}. Reason: {}".format(ticket, e))
|
||||
self.logger.error(
|
||||
"Unable to process the Ticket's 'Affected Assets'. Ticket ID: {}. Reason: {}".format(ticket, e))
|
||||
|
||||
if affected_assets:
|
||||
if _raw:
|
||||
@ -262,14 +282,14 @@ class JiraAPI(object):
|
||||
self.logger.error("Ticket IPs regex failed. Ticket ID: {}. Reason: {}".format(ticket, e))
|
||||
return False
|
||||
|
||||
def get_assets_from_attachment(self, ticket, _raw = False):
|
||||
def get_assets_from_attachment(self, ticket, _raw=False):
|
||||
# Get the assets as a string "host - protocol/port - hostname" separated by "\n"
|
||||
affected_assets = []
|
||||
try:
|
||||
fields = self.jira.issue(ticket.key).raw.get('fields', {})
|
||||
attachments = fields.get('attachment', {})
|
||||
affected_assets = ""
|
||||
#we will make sure we get the latest version of the file
|
||||
# we will make sure we get the latest version of the file
|
||||
latest = ''
|
||||
attachment_id = ''
|
||||
if attachments:
|
||||
@ -285,7 +305,8 @@ class JiraAPI(object):
|
||||
affected_assets = self.jira.attachment(attachment_id).get()
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error("Failed to get assets from ticket attachment. Ticket ID: {}. Reason: {}".format(ticket, e))
|
||||
self.logger.error(
|
||||
"Failed to get assets from ticket attachment. Ticket ID: {}. Reason: {}".format(ticket, e))
|
||||
|
||||
if affected_assets:
|
||||
if _raw:
|
||||
@ -331,15 +352,15 @@ class JiraAPI(object):
|
||||
|
||||
def add_content_as_attachment(self, issue, contents):
|
||||
try:
|
||||
#Create the file locally with the data
|
||||
# Create the file locally with the data
|
||||
attachment_file = open(self.attachment_filename, "w")
|
||||
attachment_file.write("\n".join(contents))
|
||||
attachment_file.close()
|
||||
#Push the created file to the ticket
|
||||
# Push the created file to the ticket
|
||||
attachment_file = open(self.attachment_filename, "rb")
|
||||
self.jira.add_attachment(issue, attachment_file, self.attachment_filename)
|
||||
attachment_file.close()
|
||||
#remove the temp file
|
||||
# remove the temp file
|
||||
os.remove(self.attachment_filename)
|
||||
self.logger.info("Added attachment successfully.")
|
||||
except:
|
||||
@ -349,21 +370,23 @@ class JiraAPI(object):
|
||||
return True
|
||||
|
||||
def get_ticket_reported_assets(self, ticket):
|
||||
#[METRICS] return a list with all the affected assets for that vulnerability (including already resolved ones)
|
||||
return list(set(re.findall(r"\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\b",str(self.jira.issue(ticket).raw))))
|
||||
# [METRICS] return a list with all the affected assets for that vulnerability (including already resolved ones)
|
||||
return list(set(re.findall(r"\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\b", str(self.jira.issue(ticket).raw))))
|
||||
|
||||
def get_resolution_time(self, ticket):
|
||||
#get time a ticket took to be resolved
|
||||
# get time a ticket took to be resolved
|
||||
ticket_obj = self.jira.issue(ticket)
|
||||
if self.is_ticket_resolved(ticket_obj):
|
||||
ticket_data = ticket_obj.raw.get('fields')
|
||||
#dates follow format '2018-11-06T10:36:13.849+0100'
|
||||
created = [int(x) for x in ticket_data['created'].split('.')[0].replace('T', '-').replace(':','-').split('-')]
|
||||
resolved =[int(x) for x in ticket_data['resolutiondate'].split('.')[0].replace('T', '-').replace(':','-').split('-')]
|
||||
# dates follow format '2018-11-06T10:36:13.849+0100'
|
||||
created = [int(x) for x in
|
||||
ticket_data['created'].split('.')[0].replace('T', '-').replace(':', '-').split('-')]
|
||||
resolved = [int(x) for x in
|
||||
ticket_data['resolutiondate'].split('.')[0].replace('T', '-').replace(':', '-').split('-')]
|
||||
|
||||
start = datetime(created[0],created[1],created[2],created[3],created[4],created[5])
|
||||
end = datetime(resolved[0],resolved[1],resolved[2],resolved[3],resolved[4],resolved[5])
|
||||
return (end-start).days
|
||||
start = datetime(created[0], created[1], created[2], created[3], created[4], created[5])
|
||||
end = datetime(resolved[0], resolved[1], resolved[2], resolved[3], resolved[4], resolved[5])
|
||||
return (end - start).days
|
||||
else:
|
||||
self.logger.error("Ticket {ticket} is not resolved, can't calculate resolution time".format(ticket=ticket))
|
||||
|
||||
@ -373,11 +396,11 @@ class JiraAPI(object):
|
||||
# correct description will always be in the vulnerability to report, only needed to update description to new one
|
||||
self.logger.info("Ticket {} exists, UPDATE requested".format(ticketid))
|
||||
|
||||
#for now, if a vulnerability has been accepted ('accepted_risk'), ticket is completely ignored and not updated (no new assets)
|
||||
# for now, if a vulnerability has been accepted ('accepted_risk'), ticket is completely ignored and not updated (no new assets)
|
||||
|
||||
#TODO when vulnerability accepted, create a new ticket with only the non-accepted vulnerable assets
|
||||
#this would require go through the downloaded tickets, check duplicates/accepted ones, and if so,
|
||||
#check on their assets to exclude them from the new ticket
|
||||
# TODO when vulnerability accepted, create a new ticket with only the non-accepted vulnerable assets
|
||||
# this would require go through the downloaded tickets, check duplicates/accepted ones, and if so,
|
||||
# check on their assets to exclude them from the new ticket
|
||||
risk_accepted = False
|
||||
ticket_obj = self.jira.issue(ticketid)
|
||||
if self.is_ticket_resolved(ticket_obj):
|
||||
@ -385,7 +408,7 @@ class JiraAPI(object):
|
||||
return 0
|
||||
self.reopen_ticket(ticketid=ticketid, comment=self.jira_still_vulnerable_comment)
|
||||
|
||||
#First will do the comparison of assets
|
||||
# First will do the comparison of assets
|
||||
ticket_obj.update()
|
||||
assets = list(set(re.findall(r"\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\b", ",".join(vuln['ips']))))
|
||||
difference = list(set(assets).symmetric_difference(ticket_assets))
|
||||
@ -393,7 +416,7 @@ class JiraAPI(object):
|
||||
comment = ''
|
||||
added = ''
|
||||
removed = ''
|
||||
#put a comment with the assets that have been added/removed
|
||||
# put a comment with the assets that have been added/removed
|
||||
for asset in difference:
|
||||
if asset in assets:
|
||||
if not added:
|
||||
@ -401,36 +424,39 @@ class JiraAPI(object):
|
||||
added += '* {}\n'.format(asset)
|
||||
elif asset in ticket_assets:
|
||||
if not removed:
|
||||
removed= '\nThe following assets *have been resolved*:\n'
|
||||
removed = '\nThe following assets *have been resolved*:\n'
|
||||
removed += '* {}\n'.format(asset)
|
||||
|
||||
comment = added + removed
|
||||
|
||||
#then will check if assets are too many that need to be added as an attachment
|
||||
# then will check if assets are too many that need to be added as an attachment
|
||||
attachment_contents = []
|
||||
if len(vuln['ips']) > self.max_ips_ticket:
|
||||
attachment_contents = vuln['ips']
|
||||
vuln['ips'] = ["Affected hosts ({assets}) exceed Jira's allowed character limit, added as an attachment.".format(assets = len(attachment_contents))]
|
||||
vuln['ips'] = [
|
||||
"Affected hosts ({assets}) exceed Jira's allowed character limit, added as an attachment.".format(
|
||||
assets=len(attachment_contents))]
|
||||
|
||||
#fill the ticket description template
|
||||
# fill the ticket description template
|
||||
try:
|
||||
tpl = template(self.template_path, vuln)
|
||||
except Exception as e:
|
||||
self.logger.error('Exception updating assets: {}'.format(str(e)))
|
||||
return 0
|
||||
|
||||
#proceed checking if it requires adding as an attachment
|
||||
# proceed checking if it requires adding as an attachment
|
||||
try:
|
||||
#update attachment with hosts and delete the old versions
|
||||
# update attachment with hosts and delete the old versions
|
||||
if attachment_contents:
|
||||
self.clean_old_attachments(ticket_obj)
|
||||
self.add_content_as_attachment(ticket_obj, attachment_contents)
|
||||
|
||||
ticket_obj.update(description=tpl, comment=comment, fields={"labels":ticket_obj.fields.labels})
|
||||
ticket_obj.update(description=tpl, comment=comment, fields={"labels": ticket_obj.fields.labels})
|
||||
self.logger.info("Ticket {} updated successfully".format(ticketid))
|
||||
self.add_label(ticketid, 'updated')
|
||||
except Exception as e:
|
||||
self.logger.error("Error while trying up update ticket {ticketid}.\nReason: {e}".format(ticketid = ticketid, e=e))
|
||||
self.logger.error(
|
||||
"Error while trying up update ticket {ticketid}.\nReason: {e}".format(ticketid=ticketid, e=e))
|
||||
return 0
|
||||
|
||||
def add_label(self, ticketid, label):
|
||||
@ -440,10 +466,11 @@ class JiraAPI(object):
|
||||
ticket_obj.fields.labels.append(label)
|
||||
|
||||
try:
|
||||
ticket_obj.update(fields={"labels":ticket_obj.fields.labels})
|
||||
ticket_obj.update(fields={"labels": ticket_obj.fields.labels})
|
||||
self.logger.info("Added label {label} to ticket {ticket}".format(label=label, ticket=ticketid))
|
||||
except:
|
||||
self.logger.error("Error while trying to add label {label} to ticket {ticket}".format(label=label, ticket=ticketid))
|
||||
except Exception as e:
|
||||
self.logger.error(
|
||||
"Error while trying to add label {label} to ticket {ticket}".format(label=label, ticket=ticketid))
|
||||
|
||||
return 0
|
||||
|
||||
@ -454,10 +481,11 @@ class JiraAPI(object):
|
||||
ticket_obj.fields.labels.remove(label)
|
||||
|
||||
try:
|
||||
ticket_obj.update(fields={"labels":ticket_obj.fields.labels})
|
||||
ticket_obj.update(fields={"labels": ticket_obj.fields.labels})
|
||||
self.logger.info("Removed label {label} from ticket {ticket}".format(label=label, ticket=ticketid))
|
||||
except:
|
||||
self.logger.error("Error while trying to remove label {label} to ticket {ticket}".format(label=label, ticket=ticketid))
|
||||
except Exception as e:
|
||||
self.logger.error("Error while trying to remove label {label} to ticket {ticket}".format(label=label,
|
||||
ticket=ticketid))
|
||||
else:
|
||||
self.logger.error("Error: label {label} not in ticket {ticket}".format(label=label, ticket=ticketid))
|
||||
|
||||
@ -483,14 +511,13 @@ class JiraAPI(object):
|
||||
self.close_ticket(ticket, self.JIRA_RESOLUTION_FIXED, comment)
|
||||
return 0
|
||||
|
||||
|
||||
def is_ticket_reopenable(self, ticket_obj):
|
||||
transitions = self.jira.transitions(ticket_obj)
|
||||
for transition in transitions:
|
||||
if transition.get('name') == self.JIRA_REOPEN_ISSUE:
|
||||
self.logger.debug("Ticket is reopenable")
|
||||
return True
|
||||
self.logger.error("Ticket {} can't be opened. Check Jira transitions.".format(ticket_obj))
|
||||
self.logger.warn("Ticket can't be opened. Check Jira transitions.")
|
||||
return False
|
||||
|
||||
def is_ticket_closeable(self, ticket_obj):
|
||||
@ -498,11 +525,11 @@ class JiraAPI(object):
|
||||
for transition in transitions:
|
||||
if transition.get('name') == self.JIRA_CLOSE_ISSUE:
|
||||
return True
|
||||
self.logger.error("Ticket {} can't closed. Check Jira transitions.".format(ticket_obj))
|
||||
self.logger.warn("Ticket can't closed. Check Jira transitions.")
|
||||
return False
|
||||
|
||||
def is_ticket_resolved(self, ticket_obj):
|
||||
#Checks if a ticket is resolved or not
|
||||
# Checks if a ticket is resolved or not
|
||||
if ticket_obj is not None:
|
||||
if ticket_obj.raw['fields'].get('resolution') is not None:
|
||||
if ticket_obj.raw['fields'].get('resolution').get('name') != 'Unresolved':
|
||||
@ -512,7 +539,6 @@ class JiraAPI(object):
|
||||
self.logger.debug("Checked ticket {} is already open".format(ticket_obj))
|
||||
return False
|
||||
|
||||
|
||||
def is_risk_accepted(self, ticket_obj):
|
||||
if ticket_obj is not None:
|
||||
if ticket_obj.raw['fields'].get('labels') is not None:
|
||||
@ -538,7 +564,8 @@ class JiraAPI(object):
|
||||
if (not self.is_risk_accepted(ticket_obj) or ignore_labels):
|
||||
try:
|
||||
if self.is_ticket_reopenable(ticket_obj):
|
||||
error = self.jira.transition_issue(issue=ticketid, transition=self.JIRA_REOPEN_ISSUE, comment = comment)
|
||||
error = self.jira.transition_issue(issue=ticketid, transition=self.JIRA_REOPEN_ISSUE,
|
||||
comment=comment)
|
||||
self.logger.info("Ticket {} reopened successfully".format(ticketid))
|
||||
if not ignore_labels:
|
||||
self.add_label(ticketid, 'reopened')
|
||||
@ -556,9 +583,10 @@ class JiraAPI(object):
|
||||
if not self.is_ticket_resolved(ticket_obj):
|
||||
try:
|
||||
if self.is_ticket_closeable(ticket_obj):
|
||||
#need to add the label before closing the ticket
|
||||
# need to add the label before closing the ticket
|
||||
self.add_label(ticketid, 'closed')
|
||||
error = self.jira.transition_issue(issue=ticketid, transition=self.JIRA_CLOSE_ISSUE, comment = comment, resolution = {"name": resolution })
|
||||
error = self.jira.transition_issue(issue=ticketid, transition=self.JIRA_CLOSE_ISSUE,
|
||||
comment=comment, resolution={"name": resolution})
|
||||
self.logger.info("Ticket {} closed successfully".format(ticketid))
|
||||
return 1
|
||||
except Exception as e:
|
||||
@ -571,7 +599,8 @@ class JiraAPI(object):
|
||||
def close_obsolete_tickets(self):
|
||||
# Close tickets older than 12 months, vulnerabilities not solved will get created a new ticket
|
||||
self.logger.info("Closing obsolete tickets older than {} months".format(self.max_time_tracking))
|
||||
jql = "labels=vulnerability_management AND NOT labels=advisory AND created <startOfMonth(-{}) and resolution=Unresolved".format(self.max_time_tracking)
|
||||
jql = "labels=vulnerability_management AND NOT labels=advisory AND created <startOfMonth(-{}) and resolution=Unresolved".format(
|
||||
self.max_time_tracking)
|
||||
tickets_to_close = self.jira.search_issues(jql, maxResults=0)
|
||||
|
||||
comment = '''This ticket is being closed for hygiene, as it is more than {} months old.
|
||||
@ -594,7 +623,7 @@ class JiraAPI(object):
|
||||
'''
|
||||
saves all tickets locally, local snapshot of vulnerability_management ticktes
|
||||
'''
|
||||
#check if file already exists
|
||||
# check if file already exists
|
||||
check_date = str(date.today())
|
||||
fname = '{}jira_{}.json'.format(path, check_date)
|
||||
if os.path.isfile(fname):
|
||||
@ -602,10 +631,11 @@ class JiraAPI(object):
|
||||
return True
|
||||
try:
|
||||
self.logger.info("Saving locally tickets from the last {} months".format(self.max_time_tracking))
|
||||
jql = "labels=vulnerability_management AND NOT labels=advisory AND created >=startOfMonth(-{})".format(self.max_time_tracking)
|
||||
jql = "labels=vulnerability_management AND NOT labels=advisory AND created >=startOfMonth(-{})".format(
|
||||
self.max_time_tracking)
|
||||
tickets_data = self.jira.search_issues(jql, maxResults=0)
|
||||
|
||||
#TODO process tickets, creating a new field called "_metadata" with all the affected assets well structured
|
||||
# TODO process tickets, creating a new field called "_metadata" with all the affected assets well structured
|
||||
# for future processing in ELK/Splunk; this includes downloading attachments with assets and processing them
|
||||
|
||||
processed_tickets = []
|
||||
@ -626,14 +656,13 @@ class JiraAPI(object):
|
||||
assets_json = self.parse_asset_to_json(assets)
|
||||
_metadata["affected_hosts"].append(assets_json)
|
||||
|
||||
|
||||
temp_ticket = ticket.raw.get('fields')
|
||||
temp_ticket['_metadata'] = _metadata
|
||||
|
||||
processed_tickets.append(temp_ticket)
|
||||
|
||||
#end of line needed, as writelines() doesn't add it automatically, otherwise one big line
|
||||
to_save = [json.dumps(ticket.raw.get('fields'))+"\n" for ticket in tickets_data]
|
||||
# end of line needed, as writelines() doesn't add it automatically, otherwise one big line
|
||||
to_save = [json.dumps(ticket.raw.get('fields')) + "\n" for ticket in tickets_data]
|
||||
with open(fname, 'w') as outfile:
|
||||
outfile.writelines(to_save)
|
||||
self.logger.info("Tickets saved succesfully.")
|
||||
@ -651,17 +680,20 @@ class JiraAPI(object):
|
||||
closed already for more than x months (default is 3 months) in order to clean solved issues
|
||||
for statistics purposes
|
||||
'''
|
||||
self.logger.info("Deleting 'server_decommission' tag from tickets closed more than {} months ago".format(self.max_decommission_time))
|
||||
self.logger.info("Deleting 'server_decommission' tag from tickets closed more than {} months ago".format(
|
||||
self.max_decommission_time))
|
||||
|
||||
jql = "labels=vulnerability_management AND labels=server_decommission and resolutiondate <=startOfMonth(-{})".format(self.max_decommission_time)
|
||||
jql = "labels=vulnerability_management AND labels=server_decommission and resolutiondate <=startOfMonth(-{})".format(
|
||||
self.max_decommission_time)
|
||||
decommissioned_tickets = self.jira.search_issues(jql, maxResults=0)
|
||||
|
||||
comment = '''This ticket is having deleted the *server_decommission* tag, as it is more than {} months old and is expected to already have been decommissioned.
|
||||
If that is not the case and the vulnerability still exists, the vulnerability will be opened again.'''.format(self.max_decommission_time)
|
||||
If that is not the case and the vulnerability still exists, the vulnerability will be opened again.'''.format(
|
||||
self.max_decommission_time)
|
||||
|
||||
for ticket in decommissioned_tickets:
|
||||
#we open first the ticket, as we want to make sure the process is not blocked due to
|
||||
#an unexisting jira workflow or unallowed edit from closed tickets
|
||||
# we open first the ticket, as we want to make sure the process is not blocked due to
|
||||
# an unexisting jira workflow or unallowed edit from closed tickets
|
||||
self.reopen_ticket(ticketid=ticket, ignore_labels=True)
|
||||
self.remove_label(ticket, 'server_decommission')
|
||||
self.close_ticket(ticket, self.JIRA_RESOLUTION_FIXED, comment)
|
||||
|
@ -1,3 +1,4 @@
|
||||
from __future__ import absolute_import
|
||||
import os
|
||||
import logging
|
||||
import httpretty
|
||||
@ -20,10 +21,12 @@ class mockAPI(object):
|
||||
|
||||
def get_directories(self, path):
|
||||
dir, subdirs, files = next(os.walk(path))
|
||||
self.logger.debug('Subdirectories found: {}'.format(subdirs))
|
||||
return subdirs
|
||||
|
||||
def get_files(self, path):
|
||||
dir, subdirs, files = next(os.walk(path))
|
||||
self.logger.debug('Files found: {}'.format(files))
|
||||
return files
|
||||
|
||||
def qualys_vuln_callback(self, request, uri, response_headers):
|
||||
|
@ -1,13 +1,17 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import absolute_import
|
||||
from six.moves import range
|
||||
from functools import reduce
|
||||
|
||||
__author__ = 'Austin Taylor'
|
||||
|
||||
from base.config import vwConfig
|
||||
from frameworks.nessus import NessusAPI
|
||||
from frameworks.qualys_web import qualysScanReport
|
||||
from frameworks.qualys_vuln import qualysVulnScan
|
||||
from frameworks.openvas import OpenVAS_API
|
||||
from reporting.jira_api import JiraAPI
|
||||
from .base.config import vwConfig
|
||||
from .frameworks.nessus import NessusAPI
|
||||
from .frameworks.qualys_web import qualysScanReport
|
||||
from .frameworks.qualys_vuln import qualysVulnScan
|
||||
from .frameworks.openvas import OpenVAS_API
|
||||
from .reporting.jira_api import JiraAPI
|
||||
import pandas as pd
|
||||
from lxml import objectify
|
||||
import sys
|
||||
@ -21,7 +25,6 @@ import socket
|
||||
|
||||
|
||||
class vulnWhispererBase(object):
|
||||
|
||||
CONFIG_SECTION = None
|
||||
|
||||
def __init__(
|
||||
@ -65,8 +68,6 @@ class vulnWhispererBase(object):
|
||||
self.db_path = self.config.get(self.CONFIG_SECTION, 'db_path')
|
||||
self.verbose = self.config.getbool(self.CONFIG_SECTION, 'verbose')
|
||||
|
||||
|
||||
|
||||
if self.db_name is not None:
|
||||
if self.db_path:
|
||||
self.database = os.path.join(self.db_path,
|
||||
@ -88,7 +89,8 @@ class vulnWhispererBase(object):
|
||||
self.cur = self.conn.cursor()
|
||||
self.logger.info('Connected to database at {loc}'.format(loc=self.database))
|
||||
except Exception as e:
|
||||
self.logger.error('Could not connect to database at {loc}\nReason: {e} - Please ensure the path exist'.format(
|
||||
self.logger.error(
|
||||
'Could not connect to database at {loc}\nReason: {e} - Please ensure the path exist'.format(
|
||||
e=e,
|
||||
loc=self.database))
|
||||
else:
|
||||
@ -147,11 +149,11 @@ class vulnWhispererBase(object):
|
||||
return data
|
||||
|
||||
def record_insert(self, record):
|
||||
#for backwards compatibility with older versions without "reported" field
|
||||
# for backwards compatibility with older versions without "reported" field
|
||||
|
||||
try:
|
||||
#-1 to get the latest column, 1 to get the column name (old version would be "processed", new "reported")
|
||||
#TODO delete backward compatibility check after some versions
|
||||
# -1 to get the latest column, 1 to get the column name (old version would be "processed", new "reported")
|
||||
# TODO delete backward compatibility check after some versions
|
||||
last_column_table = self.cur.execute('PRAGMA table_info(scan_history)').fetchall()[-1][1]
|
||||
if last_column_table == self.table_columns[-1]:
|
||||
self.cur.execute('insert into scan_history({table_columns}) values (?,?,?,?,?,?,?,?,?,?)'.format(
|
||||
@ -166,8 +168,8 @@ class vulnWhispererBase(object):
|
||||
sys.exit(1)
|
||||
|
||||
def set_latest_scan_reported(self, filename):
|
||||
#the reason to use the filename instead of the source/scan_name is because the filename already belongs to
|
||||
#that latest scan, and we maintain integrity making sure that it is the exact scan we checked
|
||||
# the reason to use the filename instead of the source/scan_name is because the filename already belongs to
|
||||
# that latest scan, and we maintain integrity making sure that it is the exact scan we checked
|
||||
try:
|
||||
self.cur.execute('UPDATE scan_history SET reported = 1 WHERE filename="{}";'.format(filename))
|
||||
self.conn.commit()
|
||||
@ -185,7 +187,8 @@ class vulnWhispererBase(object):
|
||||
"""
|
||||
try:
|
||||
self.conn.text_factory = str
|
||||
self.cur.execute('SELECT uuid FROM scan_history where source = "{config_section}"'.format(config_section=self.CONFIG_SECTION))
|
||||
self.cur.execute('SELECT uuid FROM scan_history where source = "{config_section}"'.format(
|
||||
config_section=self.CONFIG_SECTION))
|
||||
results = frozenset([r[0] for r in self.cur.fetchall()])
|
||||
except:
|
||||
results = []
|
||||
@ -208,18 +211,23 @@ class vulnWhispererBase(object):
|
||||
|
||||
try:
|
||||
self.conn.text_factory = str
|
||||
self.cur.execute('SELECT filename FROM scan_history WHERE source="{}" AND scan_name="{}" ORDER BY last_modified DESC LIMIT 1;'.format(source, scan_name))
|
||||
#should always return just one filename
|
||||
self.cur.execute(
|
||||
'SELECT filename FROM scan_history WHERE source="{}" AND scan_name="{}" ORDER BY last_modified DESC LIMIT 1;'.format(
|
||||
source, scan_name))
|
||||
# should always return just one filename
|
||||
results = [r[0] for r in self.cur.fetchall()][0]
|
||||
|
||||
#-1 to get the latest column, 1 to get the column name (old version would be "processed", new "reported")
|
||||
#TODO delete backward compatibility check after some versions
|
||||
# -1 to get the latest column, 1 to get the column name (old version would be "processed", new "reported")
|
||||
# TODO delete backward compatibility check after some versions
|
||||
last_column_table = self.cur.execute('PRAGMA table_info(scan_history)').fetchall()[-1][1]
|
||||
if results and last_column_table == self.table_columns[-1]:
|
||||
reported = self.cur.execute('SELECT reported FROM scan_history WHERE filename="{}"'.format(results)).fetchall()
|
||||
reported = self.cur.execute(
|
||||
'SELECT reported FROM scan_history WHERE filename="{}"'.format(results)).fetchall()
|
||||
reported = reported[0][0]
|
||||
if reported:
|
||||
self.logger.debug("Last downloaded scan from source {source} scan_name {scan_name} has already been reported".format(source=source, scan_name=scan_name))
|
||||
self.logger.debug(
|
||||
"Last downloaded scan from source {source} scan_name {scan_name} has already been reported".format(
|
||||
source=source, scan_name=scan_name))
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error("Error when getting latest results from {}.{} : {}".format(source, scan_name, e))
|
||||
@ -248,14 +256,14 @@ class vulnWhispererBase(object):
|
||||
self.cur.execute("SELECT DISTINCT scan_name FROM scan_history WHERE source='{}';".format(source))
|
||||
scan_names = [r[0] for r in self.cur.fetchall()]
|
||||
for scan in scan_names:
|
||||
results.append('{}.{}'.format(source,scan))
|
||||
results.append('{}.{}'.format(source, scan))
|
||||
except:
|
||||
scan_names = []
|
||||
|
||||
return results
|
||||
|
||||
class vulnWhispererNessus(vulnWhispererBase):
|
||||
|
||||
class vulnWhispererNessus(vulnWhispererBase):
|
||||
CONFIG_SECTION = None
|
||||
|
||||
def __init__(
|
||||
@ -269,7 +277,7 @@ class vulnWhispererNessus(vulnWhispererBase):
|
||||
password=None,
|
||||
profile='nessus'
|
||||
):
|
||||
self.CONFIG_SECTION=profile
|
||||
self.CONFIG_SECTION = profile
|
||||
|
||||
super(vulnWhispererNessus, self).__init__(config=config)
|
||||
|
||||
@ -291,8 +299,8 @@ class vulnWhispererNessus(vulnWhispererBase):
|
||||
'trash')
|
||||
|
||||
try:
|
||||
self.access_key = self.config.get(self.CONFIG_SECTION,'access_key')
|
||||
self.secret_key = self.config.get(self.CONFIG_SECTION,'secret_key')
|
||||
self.access_key = self.config.get(self.CONFIG_SECTION, 'access_key')
|
||||
self.secret_key = self.config.get(self.CONFIG_SECTION, 'secret_key')
|
||||
except:
|
||||
pass
|
||||
|
||||
@ -320,9 +328,7 @@ class vulnWhispererNessus(vulnWhispererBase):
|
||||
except Exception as e:
|
||||
self.logger.error('Could not properly load your config!\nReason: {e}'.format(e=e))
|
||||
return False
|
||||
#sys.exit(1)
|
||||
|
||||
|
||||
# sys.exit(1)
|
||||
|
||||
def scan_count(self, scans, completed=False):
|
||||
"""
|
||||
@ -365,7 +371,6 @@ class vulnWhispererNessus(vulnWhispererBase):
|
||||
scan_records = [s for s in scan_records if s['status'] == 'completed']
|
||||
return scan_records
|
||||
|
||||
|
||||
def whisper_nessus(self):
|
||||
if self.nessus_connect:
|
||||
scan_data = self.nessus.scans
|
||||
@ -420,7 +425,8 @@ class vulnWhispererNessus(vulnWhispererBase):
|
||||
s['uuid'],
|
||||
)
|
||||
|
||||
# TODO Create directory sync function which scans the directory for files that exist already and populates the database
|
||||
# TODO Create directory sync function which scans the directory for files that exist already and
|
||||
# populates the database
|
||||
|
||||
folder_id = s['folder_id']
|
||||
if self.CONFIG_SECTION == 'tenable':
|
||||
@ -450,22 +456,26 @@ class vulnWhispererNessus(vulnWhispererBase):
|
||||
0,
|
||||
)
|
||||
self.record_insert(record_meta)
|
||||
self.logger.info('File {filename} already exist! Updating database'.format(filename=relative_path_name))
|
||||
self.logger.info(
|
||||
'File {filename} already exist! Updating database'.format(filename=relative_path_name))
|
||||
else:
|
||||
try:
|
||||
file_req = \
|
||||
self.nessus.download_scan(scan_id=scan_id, history=history_id,
|
||||
export_format='csv')
|
||||
except Exception as e:
|
||||
self.logger.error('Could not download {} scan {}: {}'.format(self.CONFIG_SECTION, scan_id, str(e)))
|
||||
self.logger.error(
|
||||
'Could not download {} scan {}: {}'.format(self.CONFIG_SECTION, scan_id, str(e)))
|
||||
self.exit_code += 1
|
||||
continue
|
||||
|
||||
clean_csv = \
|
||||
pd.read_csv(io.StringIO(file_req.decode('utf-8')))
|
||||
if len(clean_csv) > 2:
|
||||
self.logger.info('Processing {}/{} for scan: {}'.format(scan_count, len(scan_list), scan_name.encode('utf8')))
|
||||
columns_to_cleanse = ['CVSS','CVE','Description','Synopsis','Solution','See Also','Plugin Output', 'MAC Address']
|
||||
self.logger.info('Processing {}/{} for scan: {}'.format(scan_count, len(scan_list),
|
||||
scan_name.encode('utf8')))
|
||||
columns_to_cleanse = ['CVSS', 'CVE', 'Description', 'Synopsis', 'Solution', 'See Also',
|
||||
'Plugin Output', 'MAC Address']
|
||||
|
||||
for col in columns_to_cleanse:
|
||||
if col in clean_csv:
|
||||
@ -486,7 +496,8 @@ class vulnWhispererNessus(vulnWhispererBase):
|
||||
)
|
||||
self.record_insert(record_meta)
|
||||
self.logger.info('{filename} records written to {path} '.format(filename=clean_csv.shape[0],
|
||||
path=file_name.encode('utf8')))
|
||||
path=file_name.encode(
|
||||
'utf8')))
|
||||
else:
|
||||
record_meta = (
|
||||
scan_name,
|
||||
@ -501,27 +512,32 @@ class vulnWhispererNessus(vulnWhispererBase):
|
||||
0,
|
||||
)
|
||||
self.record_insert(record_meta)
|
||||
self.logger.warn('{} has no host available... Updating database and skipping!'.format(file_name))
|
||||
self.logger.warn(
|
||||
'{} has no host available... Updating database and skipping!'.format(file_name))
|
||||
self.conn.close()
|
||||
self.logger.info('Scan aggregation complete! Connection to database closed.')
|
||||
else:
|
||||
self.logger.error('Failed to use scanner at {host}:{port}'.format(host=self.hostname, port=self.nessus_port))
|
||||
self.logger.error(
|
||||
'Failed to use scanner at {host}:{port}'.format(host=self.hostname, port=self.nessus_port))
|
||||
self.exit_code += 1
|
||||
return self.exit_code
|
||||
|
||||
|
||||
class vulnWhispererQualys(vulnWhispererBase):
|
||||
|
||||
CONFIG_SECTION = 'qualys_web'
|
||||
COLUMN_MAPPING = {'Access Path': 'access_path',
|
||||
'Ajax Request': 'ajax_request',
|
||||
'Ajax Request ID': 'ajax_request_id',
|
||||
'Authentication': 'authentication',
|
||||
'CVSS Base': 'cvss',
|
||||
'CVSS V3 Attack Vector': 'cvss_v3_attack_vector',
|
||||
'CVSS V3 Base': 'cvss_v3_base',
|
||||
'CVSS V3 Temporal': 'cvss_v3_temporal',
|
||||
'CVSS Temporal': 'cvss_temporal',
|
||||
'CWE': 'cwe',
|
||||
'Category': 'category',
|
||||
'Content': 'content',
|
||||
'Custom Attributes': 'custom_attributes',
|
||||
'DescriptionSeverity': 'severity_description',
|
||||
'DescriptionCatSev': 'category_description',
|
||||
'Detection ID': 'detection_id',
|
||||
@ -537,15 +553,19 @@ class vulnWhispererQualys(vulnWhispererBase):
|
||||
'Ignore User': 'ignore_user',
|
||||
'Ignored': 'ignored',
|
||||
'Impact': 'impact',
|
||||
'Info#1': 'info_1',
|
||||
'Last Time Detected': 'last_time_detected',
|
||||
'Last Time Tested': 'last_time_tested',
|
||||
'Level': 'level',
|
||||
'OWASP': 'owasp',
|
||||
'Operating System': 'operating_system',
|
||||
'Owner': 'owner',
|
||||
'Param': 'param',
|
||||
'Param/Cookie': 'param',
|
||||
'Payload #1': 'payload_1',
|
||||
'Port': 'port',
|
||||
'Protocol': 'protocol',
|
||||
'QID': 'plugin_id',
|
||||
'Request Body #1': 'request_body_1',
|
||||
'Request Headers #1': 'request_headers_1',
|
||||
'Request Method #1': 'request_method_1',
|
||||
'Request URL #1': 'request_url_1',
|
||||
@ -554,13 +574,17 @@ class vulnWhispererQualys(vulnWhispererBase):
|
||||
'Severity': 'risk',
|
||||
'Severity Level': 'security_level',
|
||||
'Solution': 'solution',
|
||||
'Tags': 'tags',
|
||||
'Times Detected': 'times_detected',
|
||||
'Title': 'plugin_name',
|
||||
'URL': 'url',
|
||||
'Unique ID': 'unique_id',
|
||||
'Url': 'uri',
|
||||
'Vulnerability Category': 'vulnerability_category',
|
||||
'Virtual Host': 'virutal_host',
|
||||
'WASC': 'wasc',
|
||||
'Web Application Name': 'web_application_name'}
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
config=None,
|
||||
@ -608,7 +632,7 @@ class vulnWhispererQualys(vulnWhispererBase):
|
||||
relative_path_name = self.path_check(report_name).encode('utf8')
|
||||
|
||||
if os.path.isfile(relative_path_name):
|
||||
#TODO Possibly make this optional to sync directories
|
||||
# TODO Possibly make this optional to sync directories
|
||||
file_length = len(open(relative_path_name).readlines())
|
||||
record_meta = (
|
||||
scan_name,
|
||||
@ -668,7 +692,8 @@ class vulnWhispererQualys(vulnWhispererBase):
|
||||
self.logger.info('Removing report {} from Qualys Database'.format(generated_report_id))
|
||||
cleaning_up = self.qualys_scan.qw.delete_report(generated_report_id)
|
||||
os.remove(self.path_check(str(generated_report_id) + '.csv'))
|
||||
self.logger.info('Deleted report from local disk: {}'.format(self.path_check(str(generated_report_id))))
|
||||
self.logger.info(
|
||||
'Deleted report from local disk: {}'.format(self.path_check(str(generated_report_id))))
|
||||
else:
|
||||
self.logger.error('Could not process report ID: {}'.format(status))
|
||||
|
||||
@ -676,7 +701,6 @@ class vulnWhispererQualys(vulnWhispererBase):
|
||||
self.logger.error('Could not process {}: {}'.format(report_id, str(e)))
|
||||
return vuln_ready
|
||||
|
||||
|
||||
def identify_scans_to_process(self):
|
||||
if self.uuids:
|
||||
self.scans_to_process = self.latest_scans[~self.latest_scans['id'].isin(self.uuids)]
|
||||
@ -684,7 +708,6 @@ class vulnWhispererQualys(vulnWhispererBase):
|
||||
self.scans_to_process = self.latest_scans
|
||||
self.logger.info('Identified {new} scans to be processed'.format(new=len(self.scans_to_process)))
|
||||
|
||||
|
||||
def process_web_assets(self):
|
||||
counter = 0
|
||||
self.identify_scans_to_process()
|
||||
@ -765,7 +788,6 @@ class vulnWhispererOpenVAS(vulnWhispererBase):
|
||||
if report_id:
|
||||
self.logger.info('Processing report ID: {}'.format(report_id))
|
||||
|
||||
|
||||
scan_name = report_id.replace('-', '')
|
||||
report_name = 'openvas_scan_{scan_name}_{last_updated}.{extension}'.format(scan_name=scan_name,
|
||||
last_updated=launched_date,
|
||||
@ -833,7 +855,8 @@ class vulnWhispererOpenVAS(vulnWhispererBase):
|
||||
for scan in self.scans_to_process.iterrows():
|
||||
counter += 1
|
||||
info = scan[1]
|
||||
self.logger.info('Processing {}/{} - Report ID: {}'.format(counter, len(self.scans_to_process), info['report_ids']))
|
||||
self.logger.info(
|
||||
'Processing {}/{} - Report ID: {}'.format(counter, len(self.scans_to_process), info['report_ids']))
|
||||
self.whisper_reports(report_id=info['report_ids'],
|
||||
launched_date=info['epoch'])
|
||||
self.logger.info('Processing complete')
|
||||
@ -844,7 +867,6 @@ class vulnWhispererOpenVAS(vulnWhispererBase):
|
||||
|
||||
|
||||
class vulnWhispererQualysVuln(vulnWhispererBase):
|
||||
|
||||
CONFIG_SECTION = 'qualys_vuln'
|
||||
COLUMN_MAPPING = {'cvss_base': 'cvss',
|
||||
'cvss3_base': 'cvss3',
|
||||
@ -887,14 +909,14 @@ class vulnWhispererQualysVuln(vulnWhispererBase):
|
||||
|
||||
if 'Z' in launched_date:
|
||||
launched_date = self.qualys_scan.utils.iso_to_epoch(launched_date)
|
||||
report_name = 'qualys_vuln_' + report_id.replace('/','_') \
|
||||
report_name = 'qualys_vuln_' + report_id.replace('/', '_') \
|
||||
+ '_{last_updated}'.format(last_updated=launched_date) \
|
||||
+ '.json'
|
||||
|
||||
relative_path_name = self.path_check(report_name).encode('utf8')
|
||||
|
||||
if os.path.isfile(relative_path_name):
|
||||
#TODO Possibly make this optional to sync directories
|
||||
# TODO Possibly make this optional to sync directories
|
||||
file_length = len(open(relative_path_name).readlines())
|
||||
record_meta = (
|
||||
scan_name,
|
||||
@ -945,7 +967,6 @@ class vulnWhispererQualysVuln(vulnWhispererBase):
|
||||
self.logger.info('Report written to {}'.format(report_name))
|
||||
return self.exit_code
|
||||
|
||||
|
||||
def identify_scans_to_process(self):
|
||||
self.latest_scans = self.qualys_scan.qw.get_all_scans()
|
||||
if self.uuids:
|
||||
@ -956,7 +977,6 @@ class vulnWhispererQualysVuln(vulnWhispererBase):
|
||||
self.scans_to_process = self.latest_scans
|
||||
self.logger.info('Identified {new} scans to be processed'.format(new=len(self.scans_to_process)))
|
||||
|
||||
|
||||
def process_vuln_scans(self):
|
||||
counter = 0
|
||||
self.identify_scans_to_process()
|
||||
@ -976,7 +996,6 @@ class vulnWhispererQualysVuln(vulnWhispererBase):
|
||||
|
||||
|
||||
class vulnWhispererJIRA(vulnWhispererBase):
|
||||
|
||||
CONFIG_SECTION = 'jira'
|
||||
|
||||
def __init__(
|
||||
@ -998,8 +1017,8 @@ class vulnWhispererJIRA(vulnWhispererBase):
|
||||
self.host_resolv_cache = {}
|
||||
self.host_no_resolv = []
|
||||
self.no_resolv_by_team_dict = {}
|
||||
#Save locally those assets without DNS entry for flag to system owners
|
||||
self.no_resolv_fname="no_resolv.txt"
|
||||
# Save locally those assets without DNS entry for flag to system owners
|
||||
self.no_resolv_fname = "no_resolv.txt"
|
||||
if os.path.isfile(self.no_resolv_fname):
|
||||
with open(self.no_resolv_fname, "r") as json_file:
|
||||
self.no_resolv_by_team_dict = json.load(json_file)
|
||||
@ -1012,7 +1031,7 @@ class vulnWhispererJIRA(vulnWhispererBase):
|
||||
JiraAPI(hostname=self.hostname,
|
||||
username=self.username,
|
||||
password=self.password,
|
||||
path=self.config.get('jira','write_path'))
|
||||
path=self.config.get('jira', 'write_path'))
|
||||
self.jira_connect = True
|
||||
self.logger.info('Connected to jira on {host}'.format(host=self.hostname))
|
||||
except Exception as e:
|
||||
@ -1021,24 +1040,25 @@ class vulnWhispererJIRA(vulnWhispererBase):
|
||||
'Could not connect to nessus -- Please verify your settings in {config} are correct and try again.\nReason: {e}'.format(
|
||||
config=self.config.config_in, e=e))
|
||||
return False
|
||||
#sys.exit(1)
|
||||
# sys.exit(1)
|
||||
|
||||
profiles = []
|
||||
profiles = self.get_scan_profiles()
|
||||
|
||||
if not self.config.exists_jira_profiles(profiles):
|
||||
self.config.update_jira_profiles(profiles)
|
||||
self.logger.info("Jira profiles have been created in {config}, please fill the variables before rerunning the module.".format(config=self.config_path))
|
||||
self.logger.info(
|
||||
"Jira profiles have been created in {config}, please fill the variables before rerunning the module.".format(
|
||||
config=self.config_path))
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
def get_env_variables(self, source, scan_name):
|
||||
# function returns an array with [jira_project, jira_components, datafile_path]
|
||||
|
||||
#Jira variables
|
||||
jira_section = self.config.normalize_section("{}.{}".format(source,scan_name))
|
||||
# Jira variables
|
||||
jira_section = self.config.normalize_section("{}.{}".format(source, scan_name))
|
||||
|
||||
project = self.config.get(jira_section,'jira_project')
|
||||
project = self.config.get(jira_section, 'jira_project')
|
||||
if project == "":
|
||||
self.logger.error('JIRA project is missing on the configuration file!')
|
||||
sys.exit(0)
|
||||
@ -1048,35 +1068,39 @@ class vulnWhispererJIRA(vulnWhispererBase):
|
||||
self.logger.error("JIRA project '{project}' doesn't exist!".format(project=project))
|
||||
sys.exit(0)
|
||||
|
||||
components = self.config.get(jira_section,'components').split(',')
|
||||
components = self.config.get(jira_section, 'components').split(',')
|
||||
|
||||
#cleaning empty array from ''
|
||||
# cleaning empty array from ''
|
||||
if not components[0]:
|
||||
components = []
|
||||
|
||||
min_critical = self.config.get(jira_section,'min_critical_to_report')
|
||||
min_critical = self.config.get(jira_section, 'min_critical_to_report')
|
||||
if not min_critical:
|
||||
self.logger.error('"min_critical_to_report" variable on config file is empty.')
|
||||
sys.exit(0)
|
||||
|
||||
#datafile path
|
||||
# datafile path
|
||||
filename, reported = self.get_latest_results(source, scan_name)
|
||||
fullpath = ""
|
||||
|
||||
# search data files under user specified directory
|
||||
for root, dirnames, filenames in os.walk(vwConfig(self.config_path).get(source,'write_path')):
|
||||
for root, dirnames, filenames in os.walk(vwConfig(self.config_path).get(source, 'write_path')):
|
||||
if filename in filenames:
|
||||
fullpath = "{}/{}".format(root,filename)
|
||||
fullpath = "{}/{}".format(root, filename)
|
||||
|
||||
if reported:
|
||||
self.logger.warn('Last Scan of "{scan_name}" for source "{source}" has already been reported; will be skipped.'.format(scan_name=scan_name, source=source))
|
||||
self.logger.warn(
|
||||
'Last Scan of "{scan_name}" for source "{source}" has already been reported; will be skipped.'.format(
|
||||
scan_name=scan_name, source=source))
|
||||
return [False] * 5
|
||||
|
||||
if not fullpath:
|
||||
self.logger.error('Scan of "{scan_name}" for source "{source}" has not been found. Please check that the scanner data files are in place.'.format(scan_name=scan_name, source=source))
|
||||
self.logger.error(
|
||||
'Scan of "{scan_name}" for source "{source}" has not been found. Please check that the scanner data files are in place.'.format(
|
||||
scan_name=scan_name, source=source))
|
||||
sys.exit(1)
|
||||
|
||||
dns_resolv = self.config.get('jira','dns_resolv')
|
||||
dns_resolv = self.config.get('jira', 'dns_resolv')
|
||||
if dns_resolv in ('False', 'false', ''):
|
||||
dns_resolv = False
|
||||
elif dns_resolv in ('True', 'true'):
|
||||
@ -1087,36 +1111,36 @@ class vulnWhispererJIRA(vulnWhispererBase):
|
||||
|
||||
return project, components, fullpath, min_critical, dns_resolv
|
||||
|
||||
|
||||
def parse_nessus_vulnerabilities(self, fullpath, source, scan_name, min_critical):
|
||||
|
||||
vulnerabilities = []
|
||||
|
||||
# we need to parse the CSV
|
||||
risks = ['none', 'low', 'medium', 'high', 'critical']
|
||||
min_risk = int([i for i,x in enumerate(risks) if x == min_critical][0])
|
||||
min_risk = int([i for i, x in enumerate(risks) if x == min_critical][0])
|
||||
|
||||
df = pd.read_csv(fullpath, delimiter=',')
|
||||
|
||||
#nessus fields we want - ['Host','Protocol','Port', 'Name', 'Synopsis', 'Description', 'Solution', 'See Also']
|
||||
# nessus fields we want - ['Host','Protocol','Port', 'Name', 'Synopsis', 'Description', 'Solution', 'See Also']
|
||||
for index in range(len(df)):
|
||||
# filtering vulnerabilities by criticality, discarding low risk
|
||||
to_report = int([i for i,x in enumerate(risks) if x == df.loc[index]['Risk'].lower()][0])
|
||||
to_report = int([i for i, x in enumerate(risks) if x == df.loc[index]['Risk'].lower()][0])
|
||||
if to_report < min_risk:
|
||||
continue
|
||||
|
||||
if not vulnerabilities or df.loc[index]['Name'] not in [entry['title'] for entry in vulnerabilities]:
|
||||
vuln = {}
|
||||
#vulnerabilities should have all the info for creating all JIRA labels
|
||||
# vulnerabilities should have all the info for creating all JIRA labels
|
||||
vuln['source'] = source
|
||||
vuln['scan_name'] = scan_name
|
||||
#vulnerability variables
|
||||
# vulnerability variables
|
||||
vuln['title'] = df.loc[index]['Name']
|
||||
vuln['diagnosis'] = df.loc[index]['Synopsis'].replace('\\n',' ')
|
||||
vuln['consequence'] = df.loc[index]['Description'].replace('\\n',' ')
|
||||
vuln['solution'] = df.loc[index]['Solution'].replace('\\n',' ')
|
||||
vuln['diagnosis'] = df.loc[index]['Synopsis'].replace('\\n', ' ')
|
||||
vuln['consequence'] = df.loc[index]['Description'].replace('\\n', ' ')
|
||||
vuln['solution'] = df.loc[index]['Solution'].replace('\\n', ' ')
|
||||
vuln['ips'] = []
|
||||
vuln['ips'].append("{} - {}/{}".format(df.loc[index]['Host'], df.loc[index]['Protocol'], df.loc[index]['Port']))
|
||||
vuln['ips'].append(
|
||||
"{} - {}/{}".format(df.loc[index]['Host'], df.loc[index]['Protocol'], df.loc[index]['Port']))
|
||||
vuln['risk'] = df.loc[index]['Risk'].lower()
|
||||
|
||||
# Nessus "nan" value gets automatically casted to float by python
|
||||
@ -1130,51 +1154,54 @@ class vulnWhispererJIRA(vulnWhispererBase):
|
||||
# grouping assets by vulnerability to open on single ticket, as each asset has its own nessus entry
|
||||
for vuln in vulnerabilities:
|
||||
if vuln['title'] == df.loc[index]['Name']:
|
||||
vuln['ips'].append("{} - {}/{}".format(df.loc[index]['Host'], df.loc[index]['Protocol'], df.loc[index]['Port']))
|
||||
vuln['ips'].append("{} - {}/{}".format(df.loc[index]['Host'], df.loc[index]['Protocol'],
|
||||
df.loc[index]['Port']))
|
||||
|
||||
return vulnerabilities
|
||||
|
||||
def parse_qualys_vuln_vulnerabilities(self, fullpath, source, scan_name, min_critical, dns_resolv = False):
|
||||
#parsing of the qualys vulnerabilities schema
|
||||
#parse json
|
||||
def parse_qualys_vuln_vulnerabilities(self, fullpath, source, scan_name, min_critical, dns_resolv=False):
|
||||
# parsing of the qualys vulnerabilities schema
|
||||
# parse json
|
||||
vulnerabilities = []
|
||||
|
||||
risks = ['info', 'low', 'medium', 'high', 'critical']
|
||||
# +1 as array is 0-4, but score is 1-5
|
||||
min_risk = int([i for i,x in enumerate(risks) if x == min_critical][0])+1
|
||||
min_risk = int([i for i, x in enumerate(risks) if x == min_critical][0]) + 1
|
||||
|
||||
try:
|
||||
data=[json.loads(line) for line in open(fullpath).readlines()]
|
||||
data = [json.loads(line) for line in open(fullpath).readlines()]
|
||||
except Exception as e:
|
||||
self.logger.warn("Scan has no vulnerabilities, skipping.")
|
||||
return vulnerabilities
|
||||
|
||||
#qualys fields we want - []
|
||||
# qualys fields we want - []
|
||||
for index in range(len(data)):
|
||||
if int(data[index]['risk']) < min_risk:
|
||||
continue
|
||||
|
||||
elif data[index]['type'] == 'Practice' or data[index]['type'] == 'Ig':
|
||||
self.logger.debug("Vulnerability '{vuln}' ignored, as it is 'Practice/Potential', not verified.".format(vuln=data[index]['plugin_name']))
|
||||
self.logger.debug("Vulnerability '{vuln}' ignored, as it is 'Practice/Potential', not verified.".format(
|
||||
vuln=data[index]['plugin_name']))
|
||||
continue
|
||||
|
||||
if not vulnerabilities or data[index]['plugin_name'] not in [entry['title'] for entry in vulnerabilities]:
|
||||
vuln = {}
|
||||
#vulnerabilities should have all the info for creating all JIRA labels
|
||||
# vulnerabilities should have all the info for creating all JIRA labels
|
||||
vuln['source'] = source
|
||||
vuln['scan_name'] = scan_name
|
||||
#vulnerability variables
|
||||
# vulnerability variables
|
||||
vuln['title'] = data[index]['plugin_name']
|
||||
vuln['diagnosis'] = data[index]['threat'].replace('\\n',' ')
|
||||
vuln['consequence'] = data[index]['impact'].replace('\\n',' ')
|
||||
vuln['solution'] = data[index]['solution'].replace('\\n',' ')
|
||||
vuln['diagnosis'] = data[index]['threat'].replace('\\n', ' ')
|
||||
vuln['consequence'] = data[index]['impact'].replace('\\n', ' ')
|
||||
vuln['solution'] = data[index]['solution'].replace('\\n', ' ')
|
||||
vuln['ips'] = []
|
||||
#TODO ADDED DNS RESOLUTION FROM QUALYS! \n SEPARATORS INSTEAD OF \\n!
|
||||
# TODO ADDED DNS RESOLUTION FROM QUALYS! \n SEPARATORS INSTEAD OF \\n!
|
||||
|
||||
vuln['ips'].append("{ip} - {protocol}/{port} - {dns}".format(**self.get_asset_fields(data[index], dns_resolv)))
|
||||
vuln['ips'].append(
|
||||
"{ip} - {protocol}/{port} - {dns}".format(**self.get_asset_fields(data[index], dns_resolv)))
|
||||
|
||||
#different risk system than Nessus!
|
||||
vuln['risk'] = risks[int(data[index]['risk'])-1]
|
||||
# different risk system than Nessus!
|
||||
vuln['risk'] = risks[int(data[index]['risk']) - 1]
|
||||
|
||||
# Nessus "nan" value gets automatically casted to float by python
|
||||
if not (type(data[index]['vendor_reference']) is float or data[index]['vendor_reference'] == None):
|
||||
@ -1186,7 +1213,8 @@ class vulnWhispererJIRA(vulnWhispererBase):
|
||||
# grouping assets by vulnerability to open on single ticket, as each asset has its own nessus entry
|
||||
for vuln in vulnerabilities:
|
||||
if vuln['title'] == data[index]['plugin_name']:
|
||||
vuln['ips'].append("{ip} - {protocol}/{port} - {dns}".format(**self.get_asset_fields(data[index], dns_resolv)))
|
||||
vuln['ips'].append(
|
||||
"{ip} - {protocol}/{port} - {dns}".format(**self.get_asset_fields(data[index], dns_resolv)))
|
||||
|
||||
return vulnerabilities
|
||||
|
||||
@ -1200,7 +1228,7 @@ class vulnWhispererJIRA(vulnWhispererBase):
|
||||
if vuln['dns']:
|
||||
values['dns'] = vuln['dns']
|
||||
else:
|
||||
if values['ip'] in self.host_resolv_cache.keys():
|
||||
if values['ip'] in list(self.host_resolv_cache.keys()):
|
||||
self.logger.debug("Hostname from {ip} cached, retrieving from cache.".format(ip=values['ip']))
|
||||
values['dns'] = self.host_resolv_cache[values['ip']]
|
||||
else:
|
||||
@ -1221,51 +1249,51 @@ class vulnWhispererJIRA(vulnWhispererBase):
|
||||
return values
|
||||
|
||||
def parse_vulnerabilities(self, fullpath, source, scan_name, min_critical):
|
||||
#TODO: SINGLE LOCAL SAVE FORMAT FOR ALL SCANNERS
|
||||
#JIRA standard vuln format - ['source', 'scan_name', 'title', 'diagnosis', 'consequence', 'solution', 'ips', 'references']
|
||||
# TODO: SINGLE LOCAL SAVE FORMAT FOR ALL SCANNERS
|
||||
# JIRA standard vuln format - ['source', 'scan_name', 'title', 'diagnosis', 'consequence', 'solution', 'ips', 'references']
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def jira_sync(self, source, scan_name):
|
||||
self.logger.info("Jira Sync triggered for source '{source}' and scan '{scan_name}'".format(source=source, scan_name=scan_name))
|
||||
self.logger.info("Jira Sync triggered for source '{source}' and scan '{scan_name}'".format(source=source,
|
||||
scan_name=scan_name))
|
||||
|
||||
project, components, fullpath, min_critical, dns_resolv = self.get_env_variables(source, scan_name)
|
||||
|
||||
if not project:
|
||||
self.logger.debug("Skipping scan for source '{source}' and scan '{scan_name}': vulnerabilities have already been reported.".format(source=source, scan_name=scan_name))
|
||||
self.logger.debug(
|
||||
"Skipping scan for source '{source}' and scan '{scan_name}': vulnerabilities have already been reported.".format(
|
||||
source=source, scan_name=scan_name))
|
||||
return False
|
||||
|
||||
vulnerabilities = []
|
||||
|
||||
#***Nessus parsing***
|
||||
# ***Nessus parsing***
|
||||
if source == "nessus":
|
||||
vulnerabilities = self.parse_nessus_vulnerabilities(fullpath, source, scan_name, min_critical)
|
||||
|
||||
#***Qualys VM parsing***
|
||||
# ***Qualys VM parsing***
|
||||
if source == "qualys_vuln":
|
||||
vulnerabilities = self.parse_qualys_vuln_vulnerabilities(fullpath, source, scan_name, min_critical, dns_resolv)
|
||||
vulnerabilities = self.parse_qualys_vuln_vulnerabilities(fullpath, source, scan_name, min_critical,
|
||||
dns_resolv)
|
||||
|
||||
#***JIRA sync***
|
||||
try:
|
||||
# ***JIRA sync***
|
||||
if vulnerabilities:
|
||||
self.logger.info('{source} data has been successfuly parsed'.format(source=source.upper()))
|
||||
self.logger.info('Starting JIRA sync')
|
||||
|
||||
self.jira.sync(vulnerabilities, project, components)
|
||||
else:
|
||||
self.logger.info("[{source}.{scan_name}] No vulnerabilities or vulnerabilities not parsed.".format(source=source, scan_name=scan_name))
|
||||
self.logger.info(
|
||||
"[{source}.{scan_name}] No vulnerabilities or vulnerabilities not parsed.".format(source=source,
|
||||
scan_name=scan_name))
|
||||
self.set_latest_scan_reported(fullpath.split("/")[-1])
|
||||
return False
|
||||
except Exception as e:
|
||||
self.logger.error("Error: {}".format(e))
|
||||
return False
|
||||
|
||||
|
||||
#writing to file those assets without DNS resolution
|
||||
#if its not empty
|
||||
# writing to file those assets without DNS resolution
|
||||
# if its not empty
|
||||
if self.host_no_resolv:
|
||||
#we will replace old list of non resolved for the new one or create if it doesn't exist already
|
||||
# we will replace old list of non resolved for the new one or create if it doesn't exist already
|
||||
self.no_resolv_by_team_dict[scan_name] = self.host_no_resolv
|
||||
with open(self.no_resolv_fname, 'w') as outfile:
|
||||
json.dump(self.no_resolv_by_team_dict, outfile)
|
||||
@ -1282,12 +1310,12 @@ class vulnWhispererJIRA(vulnWhispererBase):
|
||||
self.jira_sync(self.config.get(scan, 'source'), self.config.get(scan, 'scan_name'))
|
||||
except Exception as e:
|
||||
self.logger.error(
|
||||
"VulnWhisperer wasn't able to report the vulnerabilities from the '{}'s source, section {}.\
|
||||
\nError: {}".format(
|
||||
self.config.get(scan, 'source'), self.config.get(scan, 'scan_name'), e))
|
||||
"VulnWhisperer wasn't able to report the vulnerabilities from the '{}'s source".format(
|
||||
self.config.get(scan, 'source')))
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class vulnWhisperer(object):
|
||||
|
||||
def __init__(self,
|
||||
@ -1311,7 +1339,6 @@ class vulnWhisperer(object):
|
||||
self.scanname = scanname
|
||||
self.exit_code = 0
|
||||
|
||||
|
||||
def whisper_vulnerabilities(self):
|
||||
|
||||
if self.profile == 'nessus':
|
||||
@ -1326,9 +1353,9 @@ class vulnWhisperer(object):
|
||||
self.exit_code += vw.process_web_assets()
|
||||
|
||||
elif self.profile == 'openvas':
|
||||
vw = vulnWhispererOpenVAS(config=self.config)
|
||||
vw_openvas = vulnWhispererOpenVAS(config=self.config)
|
||||
if vw:
|
||||
self.exit_code += vw.process_openvas_scans()
|
||||
self.exit_code += vw_openvas.process_openvas_scans()
|
||||
|
||||
elif self.profile == 'tenable':
|
||||
vw = vulnWhispererNessus(config=self.config,
|
||||
@ -1342,7 +1369,7 @@ class vulnWhisperer(object):
|
||||
self.exit_code += vw.process_vuln_scans()
|
||||
|
||||
elif self.profile == 'jira':
|
||||
#first we check config fields are created, otherwise we create them
|
||||
# first we check config fields are created, otherwise we create them
|
||||
vw = vulnWhispererJIRA(config=self.config)
|
||||
if vw:
|
||||
if not (self.source and self.scanname):
|
||||
|
Reference in New Issue
Block a user