Compare commits
28 Commits
1.8.0
...
dependabot
Author | SHA1 | Date | |
---|---|---|---|
dfb5c98cf6 | |||
691f45a1dc | |||
80197454a3 | |||
841cd09f2d | |||
e7183864d0 | |||
12ac3dbf62 | |||
e41ec93058 | |||
8a86e3142a | |||
9d003d12b4 | |||
63c638751b | |||
a3e85b7207 | |||
4974be02b4 | |||
7fe2f9a5c1 | |||
f4634d03bd | |||
e1ca9fadcd | |||
adb7700300 | |||
ced0d4c2fc | |||
f483c76638 | |||
f65116aec8 | |||
bdcb6de4b2 | |||
af8e27d075 | |||
accf926ff7 | |||
acf387bd0e | |||
ab7a91e020 | |||
a1a0d6b757 | |||
2fb089805c | |||
6cf2a94431 | |||
162636e60f |
@ -1,4 +1,4 @@
|
||||
FROM centos:latest
|
||||
FROM centos:7
|
||||
|
||||
MAINTAINER Justin Henderson justin@hasecuritysolutions.com
|
||||
|
||||
|
@ -6,8 +6,10 @@
|
||||
|
||||
VulnWhisperer is a vulnerability management tool and report aggregator. VulnWhisperer will pull all the reports from the different Vulnerability scanners and create a file with a unique filename for each one, using that data later to sync with Jira and feed Logstash. Jira does a closed cycle full Sync with the data provided by the Scanners, while Logstash indexes and tags all of the information inside the report (see logstash files at /resources/elk6/pipeline/). Data is then shipped to ElasticSearch to be indexed, and ends up in a visual and searchable format in Kibana with already defined dashboards.
|
||||
|
||||
VulnWhisperer is an open-source community funded project. VulnWhisperer currently works but is due for a documentation overhaul and code review. This is on the roadmap for the next month or two (February or March of 2022 - hopefully). Please note, crowd funding is an option. If you would like help getting VulnWhisperer up and running, are interested in new features, or are looking for paid support (for those of you that require commercial support contracts to implement open-source solutions), please reach out to **info@hasecuritysolutions.com**.
|
||||
|
||||
[](https://travis-ci.org/HASecuritySolutions/VulnWhisperer)
|
||||
[](http://choosealicense.com/licenses/mit/)
|
||||
[](https://github.com/HASecuritySolutions/VulnWhisperer/blob/master/LICENSE)
|
||||
[](https://twitter.com/VulnWhisperer)
|
||||
|
||||
Currently Supports
|
||||
@ -30,7 +32,8 @@ Currently Supports
|
||||
|
||||
### Reporting Frameworks
|
||||
|
||||
- [X] [ELK](https://www.elastic.co/elk-stack)
|
||||
- [X] [Elastic Stack (**v6**/**v7**)](https://www.elastic.co/elk-stack)
|
||||
- [ ] [OpenSearch - Being considered for next update](https://opensearch.org/)
|
||||
- [X] [Jira](https://www.atlassian.com/software/jira)
|
||||
- [ ] [Splunk](https://www.splunk.com/)
|
||||
|
||||
|
@ -83,14 +83,17 @@ def main():
|
||||
enabled_sections = config.get_sections_with_attribute('enabled')
|
||||
|
||||
for section in enabled_sections:
|
||||
vw = vulnWhisperer(config=args.config,
|
||||
profile=section,
|
||||
verbose=args.verbose,
|
||||
username=args.username,
|
||||
password=args.password,
|
||||
source=args.source,
|
||||
scanname=args.scanname)
|
||||
exit_code += vw.whisper_vulnerabilities()
|
||||
try:
|
||||
vw = vulnWhisperer(config=args.config,
|
||||
profile=section,
|
||||
verbose=args.verbose,
|
||||
username=args.username,
|
||||
password=args.password,
|
||||
source=args.source,
|
||||
scanname=args.scanname)
|
||||
exit_code += vw.whisper_vulnerabilities()
|
||||
except Exception as e:
|
||||
logger.error("VulnWhisperer was unable to perform the processing on '{}'".format(args.source))
|
||||
else:
|
||||
logger.info('Running vulnwhisperer for section {}'.format(args.section))
|
||||
vw = vulnWhisperer(config=args.config,
|
||||
|
@ -2,6 +2,8 @@
|
||||
enabled=true
|
||||
hostname=localhost
|
||||
port=8834
|
||||
access_key=
|
||||
secret_key=
|
||||
username=nessus_username
|
||||
password=nessus_password
|
||||
write_path=/opt/VulnWhisperer/data/nessus/
|
||||
@ -13,6 +15,8 @@ verbose=true
|
||||
enabled=true
|
||||
hostname=cloud.tenable.com
|
||||
port=443
|
||||
access_key=
|
||||
secret_key=
|
||||
username=tenable.io_username
|
||||
password=tenable.io_password
|
||||
write_path=/opt/VulnWhisperer/data/tenable/
|
||||
@ -37,7 +41,7 @@ max_retries = 10
|
||||
template_id = 126024
|
||||
|
||||
[qualys_vuln]
|
||||
#Reference https://www.qualys.com/docs/qualys-was-api-user-guide.pdf to find your API
|
||||
#Reference https://www.qualys.com/docs/qualys-api-vmpc-user-guide.pdf to find your API
|
||||
enabled = true
|
||||
hostname = qualysapi.qg2.apps.qualys.com
|
||||
username = exampleuser
|
||||
|
@ -2,6 +2,8 @@
|
||||
enabled=true
|
||||
hostname=nessus
|
||||
port=443
|
||||
access_key=
|
||||
secret_key=
|
||||
username=nessus_username
|
||||
password=nessus_password
|
||||
write_path=/opt/VulnWhisperer/data/nessus/
|
||||
@ -13,6 +15,8 @@ verbose=true
|
||||
enabled=true
|
||||
hostname=tenable
|
||||
port=443
|
||||
access_key=
|
||||
secret_key=
|
||||
username=tenable.io_username
|
||||
password=tenable.io_password
|
||||
write_path=/opt/VulnWhisperer/data/tenable/
|
||||
|
@ -1,12 +1,12 @@
|
||||
pandas==0.20.3
|
||||
setuptools==40.4.3
|
||||
setuptools==65.5.1
|
||||
pytz==2017.2
|
||||
Requests==2.18.3
|
||||
lxml==4.1.1
|
||||
Requests==2.20.0
|
||||
lxml==4.6.5
|
||||
future-fstrings
|
||||
bs4
|
||||
jira
|
||||
bottle
|
||||
coloredlogs
|
||||
qualysapi>=5.1.0
|
||||
qualysapi==6.0.0
|
||||
httpretty
|
@ -2,7 +2,7 @@
|
||||
# Email: austin@hasecuritysolutions.com
|
||||
# Last Update: 03/04/2018
|
||||
# Version 0.3
|
||||
# Description: Take in qualys web scan reports from vulnWhisperer and pumps into logstash
|
||||
# Description: Take in Openvas web scan reports from vulnWhisperer and pumps into logstash
|
||||
|
||||
input {
|
||||
file {
|
||||
|
231
resources/elk6/logstash-vulnwhisperer-template_elk7.json
Executable file
231
resources/elk6/logstash-vulnwhisperer-template_elk7.json
Executable file
@ -0,0 +1,231 @@
|
||||
{
|
||||
"index_patterns": "logstash-vulnwhisperer-*",
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"@timestamp": {
|
||||
"type": "date"
|
||||
},
|
||||
"@version": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"asset": {
|
||||
"type": "text",
|
||||
"norms": false,
|
||||
"fields": {
|
||||
"keyword": {
|
||||
"type": "keyword",
|
||||
"ignore_above": 256
|
||||
}
|
||||
}
|
||||
},
|
||||
"asset_uuid": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"assign_ip": {
|
||||
"type": "ip"
|
||||
},
|
||||
"category": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"cve": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"cvss_base": {
|
||||
"type": "float"
|
||||
},
|
||||
"cvss_temporal_vector": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"cvss_temporal": {
|
||||
"type": "float"
|
||||
},
|
||||
"cvss_vector": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"cvss": {
|
||||
"type": "float"
|
||||
},
|
||||
"cvss3_base": {
|
||||
"type": "float"
|
||||
},
|
||||
"cvss3_temporal_vector": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"cvss3_temporal": {
|
||||
"type": "float"
|
||||
},
|
||||
"cvss3_vector": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"cvss3": {
|
||||
"type": "float"
|
||||
},
|
||||
"description": {
|
||||
"fields": {
|
||||
"keyword": {
|
||||
"ignore_above": 256,
|
||||
"type": "keyword"
|
||||
}
|
||||
},
|
||||
"norms": false,
|
||||
"type": "text"
|
||||
},
|
||||
"dns": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"exploitability": {
|
||||
"fields": {
|
||||
"keyword": {
|
||||
"ignore_above": 256,
|
||||
"type": "keyword"
|
||||
}
|
||||
},
|
||||
"norms": false,
|
||||
"type": "text"
|
||||
},
|
||||
"fqdn": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"geoip": {
|
||||
"dynamic": true,
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"ip": {
|
||||
"type": "ip"
|
||||
},
|
||||
"latitude": {
|
||||
"type": "float"
|
||||
},
|
||||
"location": {
|
||||
"type": "geo_point"
|
||||
},
|
||||
"longitude": {
|
||||
"type": "float"
|
||||
}
|
||||
}
|
||||
},
|
||||
"history_id": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"host": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"host_end": {
|
||||
"type": "date"
|
||||
},
|
||||
"host_start": {
|
||||
"type": "date"
|
||||
},
|
||||
"impact": {
|
||||
"fields": {
|
||||
"keyword": {
|
||||
"ignore_above": 256,
|
||||
"type": "keyword"
|
||||
}
|
||||
},
|
||||
"norms": false,
|
||||
"type": "text"
|
||||
},
|
||||
"ip_status": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"ip": {
|
||||
"type": "ip"
|
||||
},
|
||||
"last_updated": {
|
||||
"type": "date"
|
||||
},
|
||||
"operating_system": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"path": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"pci_vuln": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"plugin_family": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"plugin_id": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"plugin_name": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"plugin_output": {
|
||||
"fields": {
|
||||
"keyword": {
|
||||
"ignore_above": 256,
|
||||
"type": "keyword"
|
||||
}
|
||||
},
|
||||
"norms": false,
|
||||
"type": "text"
|
||||
},
|
||||
"port": {
|
||||
"type": "integer"
|
||||
},
|
||||
"protocol": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"results": {
|
||||
"type": "text"
|
||||
},
|
||||
"risk_number": {
|
||||
"type": "integer"
|
||||
},
|
||||
"risk_score_name": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"risk_score": {
|
||||
"type": "float"
|
||||
},
|
||||
"risk": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"scan_id": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"scan_name": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"scan_reference": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"see_also": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"solution": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"source": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"ssl": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"synopsis": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"system_type": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"tags": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"threat": {
|
||||
"type": "text"
|
||||
},
|
||||
"type": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"vendor_reference": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"vulnerability_state": {
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -24,15 +24,19 @@ class NessusAPI(object):
|
||||
EXPORT_STATUS = EXPORT + '/{file_id}/status'
|
||||
EXPORT_HISTORY = EXPORT + '?history_id={history_id}'
|
||||
|
||||
def __init__(self, hostname=None, port=None, username=None, password=None, verbose=True):
|
||||
def __init__(self, hostname=None, port=None, username=None, password=None, verbose=True, profile=None, access_key=None, secret_key=None):
|
||||
self.logger = logging.getLogger('NessusAPI')
|
||||
if verbose:
|
||||
self.logger.setLevel(logging.DEBUG)
|
||||
if username is None or password is None:
|
||||
raise Exception('ERROR: Missing username or password.')
|
||||
if not all((username, password)) and not all((access_key, secret_key)):
|
||||
raise Exception('ERROR: Missing username, password or API keys.')
|
||||
|
||||
self.profile = profile
|
||||
self.user = username
|
||||
self.password = password
|
||||
self.api_keys = False
|
||||
self.access_key = access_key
|
||||
self.secret_key = secret_key
|
||||
self.base = 'https://{hostname}:{port}'.format(hostname=hostname, port=port)
|
||||
self.verbose = verbose
|
||||
|
||||
@ -52,7 +56,13 @@ class NessusAPI(object):
|
||||
'X-Cookie': None
|
||||
}
|
||||
|
||||
self.login()
|
||||
if all((self.access_key, self.secret_key)):
|
||||
self.logger.debug('Using {} API keys'.format(self.profile))
|
||||
self.api_keys = True
|
||||
self.session.headers['X-ApiKeys'] = 'accessKey={}; secretKey={}'.format(self.access_key, self.secret_key)
|
||||
else:
|
||||
self.login()
|
||||
|
||||
self.scans = self.get_scans()
|
||||
self.scan_ids = self.get_scan_ids()
|
||||
|
||||
@ -78,8 +88,10 @@ class NessusAPI(object):
|
||||
if url == self.base + self.SESSION:
|
||||
break
|
||||
try:
|
||||
self.login()
|
||||
timeout += 1
|
||||
if self.api_keys:
|
||||
continue
|
||||
self.login()
|
||||
self.logger.info('Token refreshed')
|
||||
except Exception as e:
|
||||
self.logger.error('Could not refresh token\nReason: {}'.format(str(e)))
|
||||
@ -114,7 +126,7 @@ class NessusAPI(object):
|
||||
data = self.request(self.SCAN_ID.format(scan_id=scan_id), method='GET', json_output=True)
|
||||
return data['history']
|
||||
|
||||
def download_scan(self, scan_id=None, history=None, export_format="", profile=""):
|
||||
def download_scan(self, scan_id=None, history=None, export_format=""):
|
||||
running = True
|
||||
counter = 0
|
||||
|
||||
@ -127,7 +139,8 @@ class NessusAPI(object):
|
||||
req = self.request(query, data=json.dumps(data), method='POST', json_output=True)
|
||||
try:
|
||||
file_id = req['file']
|
||||
token_id = req['token'] if 'token' in req else req['temp_token']
|
||||
if self.profile == 'nessus':
|
||||
token_id = req['token'] if 'token' in req else req['temp_token']
|
||||
except Exception as e:
|
||||
self.logger.error('{}'.format(str(e)))
|
||||
self.logger.info('Download for file id {}'.format(str(file_id)))
|
||||
@ -143,7 +156,7 @@ class NessusAPI(object):
|
||||
if counter % 60 == 0:
|
||||
self.logger.info("Completed: {}".format(counter))
|
||||
self.logger.info("Done: {}".format(counter))
|
||||
if profile == 'tenable':
|
||||
if self.profile == 'tenable' or self.api_keys:
|
||||
content = self.request(self.EXPORT_FILE_DOWNLOAD.format(scan_id=scan_id, file_id=file_id), method='GET', download=True)
|
||||
else:
|
||||
content = self.request(self.EXPORT_TOKEN_DOWNLOAD.format(token_id=token_id), method='GET', download=True)
|
||||
@ -152,7 +165,7 @@ class NessusAPI(object):
|
||||
def get_utc_from_local(self, date_time, local_tz=None, epoch=True):
|
||||
date_time = datetime.fromtimestamp(date_time)
|
||||
if local_tz is None:
|
||||
local_tz = pytz.timezone('US/Central')
|
||||
local_tz = pytz.timezone('UTC')
|
||||
else:
|
||||
local_tz = pytz.timezone(local_tz)
|
||||
local_time = local_tz.normalize(local_tz.localize(date_time))
|
||||
|
@ -68,17 +68,22 @@ class JiraAPI(object):
|
||||
self.logger.error("Error creating Ticket: component {} not found".format(component))
|
||||
return 0
|
||||
|
||||
new_issue = self.jira.create_issue(project=project,
|
||||
summary=title,
|
||||
description=desc,
|
||||
issuetype={'name': 'Bug'},
|
||||
labels=labels,
|
||||
components=components_ticket)
|
||||
try:
|
||||
new_issue = self.jira.create_issue(project=project,
|
||||
summary=title,
|
||||
description=desc,
|
||||
issuetype={'name': 'Bug'},
|
||||
labels=labels,
|
||||
components=components_ticket)
|
||||
|
||||
self.logger.info("Ticket {} created successfully".format(new_issue))
|
||||
self.logger.info("Ticket {} created successfully".format(new_issue))
|
||||
|
||||
if attachment_contents:
|
||||
self.add_content_as_attachment(new_issue, attachment_contents)
|
||||
if attachment_contents:
|
||||
self.add_content_as_attachment(new_issue, attachment_contents)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error("Failed to create ticket on Jira Project '{}'. Error: {}".format(project, e))
|
||||
new_issue = False
|
||||
|
||||
return new_issue
|
||||
|
||||
@ -226,32 +231,44 @@ class JiraAPI(object):
|
||||
def ticket_get_unique_fields(self, ticket):
|
||||
title = ticket.raw.get('fields', {}).get('summary').encode("ascii").strip()
|
||||
ticketid = ticket.key.encode("ascii")
|
||||
assets = []
|
||||
try:
|
||||
affected_assets_section = ticket.raw.get('fields', {}).get('description').encode("ascii").split("{panel:title=Affected Assets}")[1].split("{panel}")[0]
|
||||
assets = list(set(re.findall(r"\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\b", affected_assets_section)))
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error("Ticket IPs regex failed. Ticket ID: {}. Reason: {}".format(ticketid, e))
|
||||
assets = []
|
||||
|
||||
try:
|
||||
if not assets:
|
||||
#check if attachment, if so, get assets from attachment
|
||||
affected_assets_section = self.check_ips_attachment(ticket)
|
||||
if affected_assets_section:
|
||||
assets = list(set(re.findall(r"\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\b", affected_assets_section)))
|
||||
except Exception as e:
|
||||
self.logger.error("Ticket IPs Attachment regex failed. Ticket ID: {}. Reason: {}".format(ticketid, e))
|
||||
assets = self.get_assets_from_description(ticket)
|
||||
if not assets:
|
||||
#check if attachment, if so, get assets from attachment
|
||||
assets = self.get_assets_from_attachment(ticket)
|
||||
|
||||
return ticketid, title, assets
|
||||
|
||||
def check_ips_attachment(self, ticket):
|
||||
affected_assets_section = []
|
||||
def get_assets_from_description(self, ticket, _raw = False):
|
||||
# Get the assets as a string "host - protocol/port - hostname" separated by "\n"
|
||||
# structure the text to have the same structure as the assets from the attachment
|
||||
affected_assets = ""
|
||||
try:
|
||||
affected_assets = ticket.raw.get('fields', {}).get('description').encode("ascii").split("{panel:title=Affected Assets}")[1].split("{panel}")[0].replace('\n','').replace(' * ','\n').replace('\n', '', 1)
|
||||
except Exception as e:
|
||||
self.logger.error("Unable to process the Ticket's 'Affected Assets'. Ticket ID: {}. Reason: {}".format(ticket, e))
|
||||
|
||||
if affected_assets:
|
||||
if _raw:
|
||||
# from line 406 check if the text in the panel corresponds to having added an attachment
|
||||
if "added as an attachment" in affected_assets:
|
||||
return False
|
||||
return affected_assets
|
||||
|
||||
try:
|
||||
# if _raw is not true, we return only the IPs of the affected assets
|
||||
return list(set(re.findall(r"\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\b", affected_assets)))
|
||||
except Exception as e:
|
||||
self.logger.error("Ticket IPs regex failed. Ticket ID: {}. Reason: {}".format(ticket, e))
|
||||
return False
|
||||
|
||||
def get_assets_from_attachment(self, ticket, _raw = False):
|
||||
# Get the assets as a string "host - protocol/port - hostname" separated by "\n"
|
||||
affected_assets = []
|
||||
try:
|
||||
fields = self.jira.issue(ticket.key).raw.get('fields', {})
|
||||
attachments = fields.get('attachment', {})
|
||||
affected_assets_section = ""
|
||||
affected_assets = ""
|
||||
#we will make sure we get the latest version of the file
|
||||
latest = ''
|
||||
attachment_id = ''
|
||||
@ -265,12 +282,44 @@ class JiraAPI(object):
|
||||
if latest < item.get('created'):
|
||||
latest = item.get('created')
|
||||
attachment_id = item.get('id')
|
||||
affected_assets_section = self.jira.attachment(attachment_id).get()
|
||||
affected_assets = self.jira.attachment(attachment_id).get()
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error("Failed to get assets from ticket attachment. Ticket ID: {}. Reason: {}".format(ticket, e))
|
||||
|
||||
return affected_assets_section
|
||||
if affected_assets:
|
||||
if _raw:
|
||||
return affected_assets
|
||||
|
||||
try:
|
||||
# if _raw is not true, we return only the IPs of the affected assets
|
||||
affected_assets = list(set(re.findall(r"\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\b", affected_assets)))
|
||||
return affected_assets
|
||||
except Exception as e:
|
||||
self.logger.error("Ticket IPs Attachment regex failed. Ticket ID: {}. Reason: {}".format(ticket, e))
|
||||
|
||||
return False
|
||||
|
||||
def parse_asset_to_json(self, asset):
|
||||
hostname, protocol, port = "", "", ""
|
||||
asset_info = asset.split(" - ")
|
||||
ip = asset_info[0]
|
||||
proto_port = asset_info[1]
|
||||
# in case there is some case where hostname is not reported at all
|
||||
if len(asset_info) == 3:
|
||||
hostname = asset_info[2]
|
||||
if proto_port != "N/A/N/A":
|
||||
protocol, port = proto_port.split("/")
|
||||
port = int(float(port))
|
||||
|
||||
asset_dict = {
|
||||
"host": ip,
|
||||
"protocol": protocol,
|
||||
"port": port,
|
||||
"hostname": hostname
|
||||
}
|
||||
|
||||
return asset_dict
|
||||
|
||||
def clean_old_attachments(self, ticket):
|
||||
fields = ticket.raw.get('fields')
|
||||
@ -441,7 +490,7 @@ class JiraAPI(object):
|
||||
if transition.get('name') == self.JIRA_REOPEN_ISSUE:
|
||||
self.logger.debug("Ticket is reopenable")
|
||||
return True
|
||||
self.logger.warn("Ticket can't be opened. Check Jira transitions.")
|
||||
self.logger.error("Ticket {} can't be opened. Check Jira transitions.".format(ticket_obj))
|
||||
return False
|
||||
|
||||
def is_ticket_closeable(self, ticket_obj):
|
||||
@ -449,7 +498,7 @@ class JiraAPI(object):
|
||||
for transition in transitions:
|
||||
if transition.get('name') == self.JIRA_CLOSE_ISSUE:
|
||||
return True
|
||||
self.logger.warn("Ticket can't closed. Check Jira transitions.")
|
||||
self.logger.error("Ticket {} can't closed. Check Jira transitions.".format(ticket_obj))
|
||||
return False
|
||||
|
||||
def is_ticket_resolved(self, ticket_obj):
|
||||
@ -522,7 +571,7 @@ class JiraAPI(object):
|
||||
def close_obsolete_tickets(self):
|
||||
# Close tickets older than 12 months, vulnerabilities not solved will get created a new ticket
|
||||
self.logger.info("Closing obsolete tickets older than {} months".format(self.max_time_tracking))
|
||||
jql = "labels=vulnerability_management AND created <startOfMonth(-{}) and resolution=Unresolved".format(self.max_time_tracking)
|
||||
jql = "labels=vulnerability_management AND NOT labels=advisory AND created <startOfMonth(-{}) and resolution=Unresolved".format(self.max_time_tracking)
|
||||
tickets_to_close = self.jira.search_issues(jql, maxResults=0)
|
||||
|
||||
comment = '''This ticket is being closed for hygiene, as it is more than {} months old.
|
||||
@ -553,9 +602,36 @@ class JiraAPI(object):
|
||||
return True
|
||||
try:
|
||||
self.logger.info("Saving locally tickets from the last {} months".format(self.max_time_tracking))
|
||||
jql = "labels=vulnerability_management AND created >=startOfMonth(-{})".format(self.max_time_tracking)
|
||||
jql = "labels=vulnerability_management AND NOT labels=advisory AND created >=startOfMonth(-{})".format(self.max_time_tracking)
|
||||
tickets_data = self.jira.search_issues(jql, maxResults=0)
|
||||
|
||||
#TODO process tickets, creating a new field called "_metadata" with all the affected assets well structured
|
||||
# for future processing in ELK/Splunk; this includes downloading attachments with assets and processing them
|
||||
|
||||
processed_tickets = []
|
||||
|
||||
for ticket in tickets_data:
|
||||
assets = self.get_assets_from_description(ticket, _raw=True)
|
||||
if not assets:
|
||||
# check if attachment, if so, get assets from attachment
|
||||
assets = self.get_assets_from_attachment(ticket, _raw=True)
|
||||
# process the affected assets to save them as json structure on a new field from the JSON
|
||||
_metadata = {"affected_hosts": []}
|
||||
if assets:
|
||||
if "\n" in assets:
|
||||
for asset in assets.split("\n"):
|
||||
assets_json = self.parse_asset_to_json(asset)
|
||||
_metadata["affected_hosts"].append(assets_json)
|
||||
else:
|
||||
assets_json = self.parse_asset_to_json(assets)
|
||||
_metadata["affected_hosts"].append(assets_json)
|
||||
|
||||
|
||||
temp_ticket = ticket.raw.get('fields')
|
||||
temp_ticket['_metadata'] = _metadata
|
||||
|
||||
processed_tickets.append(temp_ticket)
|
||||
|
||||
#end of line needed, as writelines() doesn't add it automatically, otherwise one big line
|
||||
to_save = [json.dumps(ticket.raw.get('fields'))+"\n" for ticket in tickets_data]
|
||||
with open(fname, 'w') as outfile:
|
||||
|
@ -55,8 +55,12 @@ class vulnWhispererBase(object):
|
||||
except:
|
||||
self.enabled = False
|
||||
self.hostname = self.config.get(self.CONFIG_SECTION, 'hostname')
|
||||
self.username = self.config.get(self.CONFIG_SECTION, 'username')
|
||||
self.password = self.config.get(self.CONFIG_SECTION, 'password')
|
||||
try:
|
||||
self.username = self.config.get(self.CONFIG_SECTION, 'username')
|
||||
self.password = self.config.get(self.CONFIG_SECTION, 'password')
|
||||
except:
|
||||
self.username = None
|
||||
self.password = None
|
||||
self.write_path = self.config.get(self.CONFIG_SECTION, 'write_path')
|
||||
self.db_path = self.config.get(self.CONFIG_SECTION, 'db_path')
|
||||
self.verbose = self.config.getbool(self.CONFIG_SECTION, 'verbose')
|
||||
@ -200,6 +204,7 @@ class vulnWhispererBase(object):
|
||||
def get_latest_results(self, source, scan_name):
|
||||
processed = 0
|
||||
results = []
|
||||
reported = ""
|
||||
|
||||
try:
|
||||
self.conn.text_factory = str
|
||||
@ -218,6 +223,7 @@ class vulnWhispererBase(object):
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error("Error when getting latest results from {}.{} : {}".format(source, scan_name, e))
|
||||
|
||||
return results, reported
|
||||
|
||||
def get_scan_profiles(self):
|
||||
@ -274,6 +280,8 @@ class vulnWhispererNessus(vulnWhispererBase):
|
||||
|
||||
self.develop = True
|
||||
self.purge = purge
|
||||
self.access_key = None
|
||||
self.secret_key = None
|
||||
|
||||
if config is not None:
|
||||
try:
|
||||
@ -283,24 +291,36 @@ class vulnWhispererNessus(vulnWhispererBase):
|
||||
'trash')
|
||||
|
||||
try:
|
||||
self.logger.info('Attempting to connect to nessus...')
|
||||
self.access_key = self.config.get(self.CONFIG_SECTION,'access_key')
|
||||
self.secret_key = self.config.get(self.CONFIG_SECTION,'secret_key')
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
self.logger.info('Attempting to connect to {}...'.format(self.CONFIG_SECTION))
|
||||
self.nessus = \
|
||||
NessusAPI(hostname=self.hostname,
|
||||
port=self.nessus_port,
|
||||
username=self.username,
|
||||
password=self.password)
|
||||
password=self.password,
|
||||
profile=self.CONFIG_SECTION,
|
||||
access_key=self.access_key,
|
||||
secret_key=self.secret_key
|
||||
)
|
||||
self.nessus_connect = True
|
||||
self.logger.info('Connected to nessus on {host}:{port}'.format(host=self.hostname,
|
||||
self.logger.info('Connected to {} on {host}:{port}'.format(self.CONFIG_SECTION, host=self.hostname,
|
||||
port=str(self.nessus_port)))
|
||||
except Exception as e:
|
||||
self.logger.error('Exception: {}'.format(str(e)))
|
||||
raise Exception(
|
||||
'Could not connect to nessus -- Please verify your settings in {config} are correct and try again.\nReason: {e}'.format(
|
||||
'Could not connect to {} -- Please verify your settings in {config} are correct and try again.\nReason: {e}'.format(
|
||||
self.CONFIG_SECTION,
|
||||
config=self.config.config_in,
|
||||
e=e))
|
||||
except Exception as e:
|
||||
self.logger.error('Could not properly load your config!\nReason: {e}'.format(e=e))
|
||||
sys.exit(1)
|
||||
return False
|
||||
#sys.exit(1)
|
||||
|
||||
|
||||
|
||||
@ -435,7 +455,7 @@ class vulnWhispererNessus(vulnWhispererBase):
|
||||
try:
|
||||
file_req = \
|
||||
self.nessus.download_scan(scan_id=scan_id, history=history_id,
|
||||
export_format='csv', profile=self.CONFIG_SECTION)
|
||||
export_format='csv')
|
||||
except Exception as e:
|
||||
self.logger.error('Could not download {} scan {}: {}'.format(self.CONFIG_SECTION, scan_id, str(e)))
|
||||
self.exit_code += 1
|
||||
@ -445,10 +465,11 @@ class vulnWhispererNessus(vulnWhispererBase):
|
||||
pd.read_csv(io.StringIO(file_req.decode('utf-8')))
|
||||
if len(clean_csv) > 2:
|
||||
self.logger.info('Processing {}/{} for scan: {}'.format(scan_count, len(scan_list), scan_name.encode('utf8')))
|
||||
columns_to_cleanse = ['CVSS','CVE','Description','Synopsis','Solution','See Also','Plugin Output']
|
||||
columns_to_cleanse = ['CVSS','CVE','Description','Synopsis','Solution','See Also','Plugin Output', 'MAC Address']
|
||||
|
||||
for col in columns_to_cleanse:
|
||||
clean_csv[col] = clean_csv[col].astype(str).apply(self.cleanser)
|
||||
if col in clean_csv:
|
||||
clean_csv[col] = clean_csv[col].astype(str).apply(self.cleanser)
|
||||
|
||||
clean_csv.to_csv(relative_path_name, index=False)
|
||||
record_meta = (
|
||||
@ -555,8 +576,11 @@ class vulnWhispererQualys(vulnWhispererBase):
|
||||
self.logger = logging.getLogger('vulnWhispererQualys')
|
||||
if debug:
|
||||
self.logger.setLevel(logging.DEBUG)
|
||||
|
||||
self.qualys_scan = qualysScanReport(config=config)
|
||||
try:
|
||||
self.qualys_scan = qualysScanReport(config=config)
|
||||
except Exception as e:
|
||||
self.logger.error("Unable to establish connection with Qualys scanner. Reason: {}".format(e))
|
||||
return False
|
||||
self.latest_scans = self.qualys_scan.qw.get_all_scans()
|
||||
self.directory_check()
|
||||
self.scans_to_process = None
|
||||
@ -642,8 +666,7 @@ class vulnWhispererQualys(vulnWhispererBase):
|
||||
|
||||
if cleanup:
|
||||
self.logger.info('Removing report {} from Qualys Database'.format(generated_report_id))
|
||||
cleaning_up = \
|
||||
self.qualys_scan.qw.delete_report(generated_report_id)
|
||||
cleaning_up = self.qualys_scan.qw.delete_report(generated_report_id)
|
||||
os.remove(self.path_check(str(generated_report_id) + '.csv'))
|
||||
self.logger.info('Deleted report from local disk: {}'.format(self.path_check(str(generated_report_id))))
|
||||
else:
|
||||
@ -728,10 +751,14 @@ class vulnWhispererOpenVAS(vulnWhispererBase):
|
||||
self.develop = True
|
||||
self.purge = purge
|
||||
self.scans_to_process = None
|
||||
self.openvas_api = OpenVAS_API(hostname=self.hostname,
|
||||
port=self.port,
|
||||
username=self.username,
|
||||
password=self.password)
|
||||
try:
|
||||
self.openvas_api = OpenVAS_API(hostname=self.hostname,
|
||||
port=self.port,
|
||||
username=self.username,
|
||||
password=self.password)
|
||||
except Exception as e:
|
||||
self.logger.error("Unable to establish connection with OpenVAS scanner. Reason: {}".format(e))
|
||||
return False
|
||||
|
||||
def whisper_reports(self, output_format='json', launched_date=None, report_id=None, cleanup=True):
|
||||
report = None
|
||||
@ -842,8 +869,11 @@ class vulnWhispererQualysVuln(vulnWhispererBase):
|
||||
self.logger = logging.getLogger('vulnWhispererQualysVuln')
|
||||
if debug:
|
||||
self.logger.setLevel(logging.DEBUG)
|
||||
|
||||
self.qualys_scan = qualysVulnScan(config=config)
|
||||
try:
|
||||
self.qualys_scan = qualysVulnScan(config=config)
|
||||
except Exception as e:
|
||||
self.logger.error("Unable to create connection with Qualys. Reason: {}".format(e))
|
||||
return False
|
||||
self.directory_check()
|
||||
self.scans_to_process = None
|
||||
|
||||
@ -854,7 +884,7 @@ class vulnWhispererQualysVuln(vulnWhispererBase):
|
||||
scan_reference=None,
|
||||
output_format='json',
|
||||
cleanup=True):
|
||||
launched_date
|
||||
|
||||
if 'Z' in launched_date:
|
||||
launched_date = self.qualys_scan.utils.iso_to_epoch(launched_date)
|
||||
report_name = 'qualys_vuln_' + report_id.replace('/','_') \
|
||||
@ -966,6 +996,13 @@ class vulnWhispererJIRA(vulnWhispererBase):
|
||||
self.config_path = config
|
||||
self.config = vwConfig(config)
|
||||
self.host_resolv_cache = {}
|
||||
self.host_no_resolv = []
|
||||
self.no_resolv_by_team_dict = {}
|
||||
#Save locally those assets without DNS entry for flag to system owners
|
||||
self.no_resolv_fname="no_resolv.txt"
|
||||
if os.path.isfile(self.no_resolv_fname):
|
||||
with open(self.no_resolv_fname, "r") as json_file:
|
||||
self.no_resolv_by_team_dict = json.load(json_file)
|
||||
self.directory_check()
|
||||
|
||||
if config is not None:
|
||||
@ -983,7 +1020,8 @@ class vulnWhispererJIRA(vulnWhispererBase):
|
||||
raise Exception(
|
||||
'Could not connect to nessus -- Please verify your settings in {config} are correct and try again.\nReason: {e}'.format(
|
||||
config=self.config.config_in, e=e))
|
||||
sys.exit(1)
|
||||
return False
|
||||
#sys.exit(1)
|
||||
|
||||
profiles = []
|
||||
profiles = self.get_scan_profiles()
|
||||
@ -1173,6 +1211,7 @@ class vulnWhispererJIRA(vulnWhispererBase):
|
||||
self.logger.debug("Hostname found: {hostname}.".format(hostname=values['dns']))
|
||||
except:
|
||||
self.host_resolv_cache[values['ip']] = ''
|
||||
self.host_no_resolv.append(values['ip'])
|
||||
self.logger.debug("Hostname not found for: {ip}.".format(ip=values['ip']))
|
||||
|
||||
for key in values.keys():
|
||||
@ -1208,16 +1247,29 @@ class vulnWhispererJIRA(vulnWhispererBase):
|
||||
vulnerabilities = self.parse_qualys_vuln_vulnerabilities(fullpath, source, scan_name, min_critical, dns_resolv)
|
||||
|
||||
#***JIRA sync***
|
||||
if vulnerabilities:
|
||||
self.logger.info('{source} data has been successfuly parsed'.format(source=source.upper()))
|
||||
self.logger.info('Starting JIRA sync')
|
||||
try:
|
||||
if vulnerabilities:
|
||||
self.logger.info('{source} data has been successfuly parsed'.format(source=source.upper()))
|
||||
self.logger.info('Starting JIRA sync')
|
||||
|
||||
self.jira.sync(vulnerabilities, project, components)
|
||||
else:
|
||||
self.logger.info("[{source}.{scan_name}] No vulnerabilities or vulnerabilities not parsed.".format(source=source, scan_name=scan_name))
|
||||
self.set_latest_scan_reported(fullpath.split("/")[-1])
|
||||
self.jira.sync(vulnerabilities, project, components)
|
||||
else:
|
||||
self.logger.info("[{source}.{scan_name}] No vulnerabilities or vulnerabilities not parsed.".format(source=source, scan_name=scan_name))
|
||||
self.set_latest_scan_reported(fullpath.split("/")[-1])
|
||||
return False
|
||||
except Exception as e:
|
||||
self.logger.error("Error: {}".format(e))
|
||||
return False
|
||||
|
||||
|
||||
#writing to file those assets without DNS resolution
|
||||
#if its not empty
|
||||
if self.host_no_resolv:
|
||||
#we will replace old list of non resolved for the new one or create if it doesn't exist already
|
||||
self.no_resolv_by_team_dict[scan_name] = self.host_no_resolv
|
||||
with open(self.no_resolv_fname, 'w') as outfile:
|
||||
json.dump(self.no_resolv_by_team_dict, outfile)
|
||||
|
||||
self.set_latest_scan_reported(fullpath.split("/")[-1])
|
||||
return True
|
||||
|
||||
@ -1226,7 +1278,13 @@ class vulnWhispererJIRA(vulnWhispererBase):
|
||||
|
||||
if autoreport_sections:
|
||||
for scan in autoreport_sections:
|
||||
self.jira_sync(self.config.get(scan, 'source'), self.config.get(scan, 'scan_name'))
|
||||
try:
|
||||
self.jira_sync(self.config.get(scan, 'source'), self.config.get(scan, 'scan_name'))
|
||||
except Exception as e:
|
||||
self.logger.error(
|
||||
"VulnWhisperer wasn't able to report the vulnerabilities from the '{}'s source, section {}.\
|
||||
\nError: {}".format(
|
||||
self.config.get(scan, 'source'), self.config.get(scan, 'scan_name'), e))
|
||||
return True
|
||||
return False
|
||||
|
||||
@ -1258,43 +1316,43 @@ class vulnWhisperer(object):
|
||||
|
||||
if self.profile == 'nessus':
|
||||
vw = vulnWhispererNessus(config=self.config,
|
||||
username=self.username,
|
||||
password=self.password,
|
||||
verbose=self.verbose,
|
||||
profile=self.profile)
|
||||
self.exit_code += vw.whisper_nessus()
|
||||
if vw:
|
||||
self.exit_code += vw.whisper_nessus()
|
||||
|
||||
elif self.profile == 'qualys_web':
|
||||
vw = vulnWhispererQualys(config=self.config)
|
||||
self.exit_code += vw.process_web_assets()
|
||||
if vw:
|
||||
self.exit_code += vw.process_web_assets()
|
||||
|
||||
elif self.profile == 'openvas':
|
||||
vw_openvas = vulnWhispererOpenVAS(config=self.config)
|
||||
self.exit_code += vw_openvas.process_openvas_scans()
|
||||
vw = vulnWhispererOpenVAS(config=self.config)
|
||||
if vw:
|
||||
self.exit_code += vw.process_openvas_scans()
|
||||
|
||||
elif self.profile == 'tenable':
|
||||
vw = vulnWhispererNessus(config=self.config,
|
||||
username=self.username,
|
||||
password=self.password,
|
||||
verbose=self.verbose,
|
||||
profile=self.profile)
|
||||
self.exit_code += vw.whisper_nessus()
|
||||
if vw:
|
||||
self.exit_code += vw.whisper_nessus()
|
||||
|
||||
elif self.profile == 'qualys_vuln':
|
||||
vw = vulnWhispererQualysVuln(config=self.config)
|
||||
self.exit_code += vw.process_vuln_scans()
|
||||
if vw:
|
||||
self.exit_code += vw.process_vuln_scans()
|
||||
|
||||
elif self.profile == 'jira':
|
||||
#first we check config fields are created, otherwise we create them
|
||||
vw = vulnWhispererJIRA(config=self.config)
|
||||
if not (self.source and self.scanname):
|
||||
self.logger.info('No source/scan_name selected, all enabled scans will be synced')
|
||||
success = vw.sync_all()
|
||||
if not success:
|
||||
self.logger.error('All scans sync failed!')
|
||||
self.logger.error('Source scanner and scan name needed!')
|
||||
return 0
|
||||
else:
|
||||
vw.jira_sync(self.source, self.scanname)
|
||||
if vw:
|
||||
if not (self.source and self.scanname):
|
||||
self.logger.info('No source/scan_name selected, all enabled scans will be synced')
|
||||
success = vw.sync_all()
|
||||
if not success:
|
||||
self.logger.error('All scans sync failed!')
|
||||
self.logger.error('Source scanner and scan name needed!')
|
||||
return 0
|
||||
else:
|
||||
vw.jira_sync(self.source, self.scanname)
|
||||
|
||||
return self.exit_code
|
||||
|
Reference in New Issue
Block a user