Rework logging using the stdlib machinery (#116)
* Rework logging using the stdlib machinery Use the verbose or debug flag to enable/disable logging.DEBUG Remove the vprint function from all classes Remove bcolors from all code Cleanup [INFO], [ERROR], {success} and similar * fix some errors my local linter missed but travis catched * add coloredlogs and --fancy command line flag
This commit is contained in:

committed by
Austin Taylor

parent
46955bff75
commit
e3e416fe44
@ -4,11 +4,11 @@ __author__ = 'Austin Taylor'
|
||||
|
||||
|
||||
from vulnwhisp.vulnwhisp import vulnWhisperer
|
||||
from vulnwhisp.utils.cli import bcolors
|
||||
from vulnwhisp.base.config import vwConfig
|
||||
import os
|
||||
import argparse
|
||||
import sys
|
||||
import logging
|
||||
|
||||
def isFileValid(parser, arg):
|
||||
if not os.path.exists(arg):
|
||||
@ -32,16 +32,28 @@ def main():
|
||||
help='Prints status out to screen (defaults to True)')
|
||||
parser.add_argument('-u', '--username', dest='username', required=False, default=None, type=lambda x: x.strip(), help='The NESSUS username')
|
||||
parser.add_argument('-p', '--password', dest='password', required=False, default=None, type=lambda x: x.strip(), help='The NESSUS password')
|
||||
parser.add_argument('-F', '--fancy', action='store_true', help='Enable colourful logging output')
|
||||
parser.add_argument('-d', '--debug', action='store_true', help='Enable debugging messages')
|
||||
args = parser.parse_args()
|
||||
|
||||
# First setup logging
|
||||
logging.basicConfig(
|
||||
stream=sys.stdout,
|
||||
level=logging.DEBUG if args.debug else logging.INFO
|
||||
)
|
||||
logger = logging.getLogger(name='main')
|
||||
if args.fancy:
|
||||
import coloredlogs
|
||||
coloredlogs.install(level='DEBUG' if args.debug else 'INFO')
|
||||
|
||||
try:
|
||||
if args.config and not args.section:
|
||||
|
||||
print('{yellow}WARNING: {warning}{endc}'.format(yellow=bcolors.WARNING,
|
||||
warning='No section was specified, vulnwhisperer will scrape enabled modules from config file. \
|
||||
\nPlease specify a section using -s. \
|
||||
\nExample vuln_whisperer -c config.ini -s nessus',
|
||||
endc=bcolors.ENDC))
|
||||
# this remains a print since we are in the main binary
|
||||
print('WARNING: {warning}'.format(warning='No section was specified, vulnwhisperer will scrape enabled modules from config file. \
|
||||
\nPlease specify a section using -s. \
|
||||
\nExample vuln_whisperer -c config.ini -s nessus'))
|
||||
logger.info('No section was specified, vulnwhisperer will scrape enabled modules from the config file.')
|
||||
config = vwConfig(config_in=args.config)
|
||||
enabled_sections = config.get_enabled()
|
||||
|
||||
@ -55,9 +67,11 @@ def main():
|
||||
scanname=args.scanname)
|
||||
|
||||
vw.whisper_vulnerabilities()
|
||||
# TODO: fix this to NOT be exit 1 unless in error
|
||||
sys.exit(1)
|
||||
|
||||
else:
|
||||
logger.info('Running vulnwhisperer for section {}'.format(args.section))
|
||||
vw = vulnWhisperer(config=args.config,
|
||||
profile=args.section,
|
||||
verbose=args.verbose,
|
||||
@ -67,11 +81,15 @@ def main():
|
||||
scanname=args.scanname)
|
||||
|
||||
vw.whisper_vulnerabilities()
|
||||
# TODO: fix this to NOT be exit 1 unless in error
|
||||
sys.exit(1)
|
||||
|
||||
except Exception as e:
|
||||
if args.verbose:
|
||||
print('{red} ERROR: {error}{endc}'.format(red=bcolors.FAIL, error=e, endc=bcolors.ENDC))
|
||||
# this will remain a print since we are in the main binary
|
||||
logger.error('{}'.format(str(e)))
|
||||
print('ERROR: {error}'.format(error=e))
|
||||
# TODO: fix this to NOT be exit 2 unless in error
|
||||
sys.exit(2)
|
||||
|
||||
|
||||
|
@ -7,3 +7,4 @@ lxml==4.1.1
|
||||
bs4
|
||||
jira
|
||||
bottle
|
||||
coloredlogs
|
||||
|
@ -1 +0,0 @@
|
||||
from utils.cli import bcolors
|
@ -1,5 +1,6 @@
|
||||
import os
|
||||
import sys
|
||||
import logging
|
||||
|
||||
# Support for python3
|
||||
if (sys.version_info > (3, 0)):
|
||||
@ -14,44 +15,49 @@ class vwConfig(object):
|
||||
self.config_in = config_in
|
||||
self.config = cp.RawConfigParser()
|
||||
self.config.read(self.config_in)
|
||||
self.logger = logging.getLogger('vwConfig')
|
||||
|
||||
def get(self, section, option):
|
||||
self.logger.debug('Calling get for {}:{}'.format(section, option))
|
||||
return self.config.get(section, option)
|
||||
|
||||
def getbool(self, section, option):
|
||||
self.logger.debug('Calling getbool for {}:{}'.format(section, option))
|
||||
return self.config.getboolean(section, option)
|
||||
|
||||
def get_enabled(self):
|
||||
enabled = []
|
||||
# TODO: does this not also need the "yes" case?
|
||||
check = ["true", "True", "1"]
|
||||
for section in self.config.sections():
|
||||
try:
|
||||
if self.get(section, "enabled") in check:
|
||||
enabled.append(section)
|
||||
except:
|
||||
print "[INFO] Section {} has no option 'enabled'".format(section)
|
||||
self.logger.error("Section {} has no option 'enabled'".format(section))
|
||||
return enabled
|
||||
|
||||
def exists_jira_profiles(self, profiles):
|
||||
# get list of profiles source_scanner.scan_name
|
||||
for profile in profiles:
|
||||
if not self.config.has_section(self.normalize_section(profile)):
|
||||
print "[INFO] JIRA Scan Profile missing"
|
||||
self.logger.warn("JIRA Scan Profile missing")
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def update_jira_profiles(self, profiles):
|
||||
# create JIRA profiles in the ini config file
|
||||
self.logger.debug('Updating Jira profiles: {}'.format(str(profiles)))
|
||||
|
||||
for profile in profiles:
|
||||
#IMPORTANT profile scans/results will be normalized to lower and "_" instead of spaces for ini file section
|
||||
section_name = self.normalize_section(profile)
|
||||
try:
|
||||
self.get(section_name, "source")
|
||||
print "Skipping creating of section '{}'; already exists".format(section_name)
|
||||
self.logger.info("Skipping creating of section '{}'; already exists".format(section_name))
|
||||
except:
|
||||
print "Creating config section for '{}'".format(section_name)
|
||||
self.logger.warn("Creating config section for '{}'".format(section_name))
|
||||
self.config.add_section(section_name)
|
||||
self.config.set(section_name,'source',profile.split('.')[0])
|
||||
# in case any scan name contains '.' character
|
||||
@ -62,12 +68,16 @@ class vwConfig(object):
|
||||
self.config.set(section_name,'; minimum criticality to report (low, medium, high or critical)')
|
||||
self.config.set(section_name,'min_critical_to_report', 'high')
|
||||
|
||||
# TODO: try/catch this
|
||||
# writing changes back to file
|
||||
with open(self.config_in, 'w') as configfile:
|
||||
self.config.write(configfile)
|
||||
self.logger.debug('Written configuration to {}'.format(self.config_in))
|
||||
|
||||
# FIXME: this is the same as return None, that is the default return for return-less functions
|
||||
return
|
||||
|
||||
def normalize_section(self, profile):
|
||||
profile = "jira.{}".format(profile.lower().replace(" ","_"))
|
||||
self.logger.debug('Normalized profile as: {}'.format(profile))
|
||||
return profile
|
||||
|
@ -7,6 +7,7 @@ from datetime import datetime
|
||||
import json
|
||||
import sys
|
||||
import time
|
||||
import logging
|
||||
|
||||
from requests.packages.urllib3.exceptions import InsecureRequestWarning
|
||||
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
|
||||
@ -27,6 +28,9 @@ class NessusAPI(object):
|
||||
EXPORT_HISTORY = EXPORT + '?history_id={history_id}'
|
||||
|
||||
def __init__(self, hostname=None, port=None, username=None, password=None, verbose=True):
|
||||
self.logger = logging.getLogger('NessusAPI')
|
||||
if verbose:
|
||||
self.logger.setLevel(logging.DEBUG)
|
||||
if username is None or password is None:
|
||||
raise Exception('ERROR: Missing username or password.')
|
||||
|
||||
@ -51,10 +55,6 @@ class NessusAPI(object):
|
||||
self.login()
|
||||
self.scan_ids = self.get_scan_ids()
|
||||
|
||||
def vprint(self, msg):
|
||||
if self.verbose:
|
||||
print(msg)
|
||||
|
||||
def login(self):
|
||||
resp = self.get_token()
|
||||
if resp.status_code is 200:
|
||||
@ -69,6 +69,7 @@ class NessusAPI(object):
|
||||
success = False
|
||||
|
||||
url = self.base + url
|
||||
self.logging.debug('Requesting to url {}'.format(url))
|
||||
methods = {'GET': requests.get,
|
||||
'POST': requests.post,
|
||||
'DELETE': requests.delete}
|
||||
@ -81,15 +82,16 @@ class NessusAPI(object):
|
||||
try:
|
||||
self.login()
|
||||
timeout += 1
|
||||
self.vprint('[INFO] Token refreshed')
|
||||
self.logger.info('Token refreshed')
|
||||
except Exception as e:
|
||||
self.vprint('[FAIL] Could not refresh token\nReason: %s' % e)
|
||||
self.logger.error('Could not refresh token\nReason: {}'.format(str(e)))
|
||||
else:
|
||||
success = True
|
||||
|
||||
if json:
|
||||
data = data.json()
|
||||
if download:
|
||||
self.logger.debug('Returning data.content')
|
||||
return data.content
|
||||
return data
|
||||
|
||||
@ -99,6 +101,7 @@ class NessusAPI(object):
|
||||
return token
|
||||
|
||||
def logout(self):
|
||||
self.logger.debug('Logging out')
|
||||
self.request(self.SESSION, method='DELETE')
|
||||
|
||||
def get_folders(self):
|
||||
@ -112,6 +115,7 @@ class NessusAPI(object):
|
||||
def get_scan_ids(self):
|
||||
scans = self.get_scans()
|
||||
scan_ids = [scan_id['id'] for scan_id in scans['scans']] if scans['scans'] else []
|
||||
self.logger.debug('Found {} scan_ids'.format(len(scan_ids)))
|
||||
return scan_ids
|
||||
|
||||
def count_scan(self, scans, folder_id):
|
||||
@ -122,11 +126,10 @@ class NessusAPI(object):
|
||||
|
||||
def print_scans(self, data):
|
||||
for folder in data['folders']:
|
||||
print("\\{0} - ({1})\\".format(folder['name'], self.count_scan(data['scans'], folder['id'])))
|
||||
self.logger.info("\\{0} - ({1})\\".format(folder['name'], self.count_scan(data['scans'], folder['id'])))
|
||||
for scan in data['scans']:
|
||||
if scan['folder_id'] == folder['id']:
|
||||
print(
|
||||
"\t\"{0}\" - sid:{1} - uuid: {2}".format(scan['name'].encode('utf-8'), scan['id'], scan['uuid']))
|
||||
self.logger.info("\t\"{0}\" - sid:{1} - uuid: {2}".format(scan['name'].encode('utf-8'), scan['id'], scan['uuid']))
|
||||
|
||||
def get_scan_details(self, scan_id):
|
||||
data = self.request(self.SCAN_ID.format(scan_id=scan_id), method='GET', json=True)
|
||||
@ -171,8 +174,8 @@ class NessusAPI(object):
|
||||
file_id = req['file']
|
||||
token_id = req['token'] if 'token' in req else req['temp_token']
|
||||
except Exception as e:
|
||||
print("[ERROR] %s" % e)
|
||||
print('Download for file id ' + str(file_id) + '.')
|
||||
self.logger.error('{}'.format(str(e)))
|
||||
self.logger.info('Download for file id {}'.format(str(file_id)))
|
||||
while running:
|
||||
time.sleep(2)
|
||||
counter += 2
|
||||
@ -181,10 +184,10 @@ class NessusAPI(object):
|
||||
running = report_status['status'] != 'ready'
|
||||
sys.stdout.write(".")
|
||||
sys.stdout.flush()
|
||||
# FIXME: why? can this be removed in favour of a counter?
|
||||
if counter % 60 == 0:
|
||||
print("")
|
||||
|
||||
print("")
|
||||
self.logger.info("Completed: {}".format(counter))
|
||||
self.logger.info("Done: {}".format(counter))
|
||||
if profile=='tenable':
|
||||
content = self.request(self.EXPORT_FILE_DOWNLOAD.format(scan_id=scan_id, file_id=file_id), method='GET', download=True)
|
||||
else:
|
||||
@ -208,12 +211,12 @@ class NessusAPI(object):
|
||||
local_tz = pytz.timezone('US/Central')
|
||||
else:
|
||||
local_tz = pytz.timezone(local_tz)
|
||||
# print date_time
|
||||
local_time = local_tz.normalize(local_tz.localize(date_time))
|
||||
local_time = local_time.astimezone(pytz.utc)
|
||||
if epoch:
|
||||
naive = local_time.replace(tzinfo=None)
|
||||
local_time = int((naive - datetime(1970, 1, 1)).total_seconds())
|
||||
self.logger.debug('Converted timestamp {} in datetime {}'.format(date_time, local_time))
|
||||
return local_time
|
||||
|
||||
def tz_conv(self, tz):
|
||||
|
@ -4,11 +4,11 @@ __author__ = 'Austin Taylor'
|
||||
|
||||
import datetime as dt
|
||||
import io
|
||||
import logging
|
||||
|
||||
import pandas as pd
|
||||
import requests
|
||||
from bs4 import BeautifulSoup
|
||||
from ..utils.cli import bcolors
|
||||
|
||||
|
||||
class OpenVAS_API(object):
|
||||
@ -21,6 +21,9 @@ class OpenVAS_API(object):
|
||||
password=None,
|
||||
report_format_id=None,
|
||||
verbose=True):
|
||||
self.logger = logging.getLogger('OpenVAS_API')
|
||||
if verbose:
|
||||
self.logger.setLevel(logging.DEBUG)
|
||||
if username is None or password is None:
|
||||
raise Exception('ERROR: Missing username or password.')
|
||||
|
||||
@ -49,10 +52,6 @@ class OpenVAS_API(object):
|
||||
self.openvas_reports = self.get_reports()
|
||||
self.report_formats = self.get_report_formats()
|
||||
|
||||
def vprint(self, msg):
|
||||
if self.verbose:
|
||||
print(msg)
|
||||
|
||||
def login(self):
|
||||
resp = self.get_token()
|
||||
if resp.status_code is 200:
|
||||
@ -90,9 +89,9 @@ class OpenVAS_API(object):
|
||||
try:
|
||||
self.login()
|
||||
timeout += 1
|
||||
self.vprint('[INFO] Token refreshed')
|
||||
self.logger.info(' Token refreshed')
|
||||
except Exception as e:
|
||||
self.vprint('[FAIL] Could not refresh token\nReason: %s' % e)
|
||||
self.logger.error('Could not refresh token\nReason: {}'.format(str(e)))
|
||||
else:
|
||||
success = True
|
||||
|
||||
@ -116,7 +115,7 @@ class OpenVAS_API(object):
|
||||
('cmd', 'get_report_formats'),
|
||||
('token', self.token)
|
||||
)
|
||||
self.vprint('{info} Retrieving available report formats'.format(info=bcolors.INFO))
|
||||
self.logger.info('Retrieving available report formats')
|
||||
data = self.request(url=self.OMP, method='GET', params=params)
|
||||
|
||||
bs = BeautifulSoup(data.content, "lxml")
|
||||
@ -133,7 +132,7 @@ class OpenVAS_API(object):
|
||||
return format_mapping
|
||||
|
||||
def get_reports(self, complete=True):
|
||||
print('{info} Retreiving OpenVAS report data...'.format(info=bcolors.INFO))
|
||||
self.logger.info('Retreiving OpenVAS report data...')
|
||||
params = (('cmd', 'get_reports'),
|
||||
('token', self.token),
|
||||
('max_results', 1),
|
||||
@ -184,7 +183,7 @@ class OpenVAS_API(object):
|
||||
('report_format_id', '{report_format_id}'.format(report_format_id=self.report_formats['CSV Results'])),
|
||||
('submit', 'Download'),
|
||||
)
|
||||
print('Retrieving %s' % report_id)
|
||||
self.logger.info('Retrieving {}'.format(report_id))
|
||||
req = self.request(self.OMP, params=params, method='GET')
|
||||
report_df = pd.read_csv(io.BytesIO(req.text.encode('utf-8')))
|
||||
report_df['report_ids'] = report_id
|
||||
|
@ -12,6 +12,7 @@ import requests
|
||||
import sys
|
||||
import os
|
||||
import csv
|
||||
import logging
|
||||
import dateutil.parser as dp
|
||||
|
||||
|
||||
@ -33,19 +34,20 @@ class qualysWhisperAPI(object):
|
||||
VERSION = '/qps/rest/portal/version'
|
||||
|
||||
def __init__(self, config=None):
|
||||
self.logger = logging.getLogger('qualysWhisperAPI')
|
||||
self.config = config
|
||||
try:
|
||||
self.qgc = qualysapi.connect(config)
|
||||
print('[SUCCESS] - Connected to Qualys at %s' % self.qgc.server)
|
||||
self.logger.info('Connected to Qualys at {}'.format(self.qgc.server))
|
||||
except Exception as e:
|
||||
print('[ERROR] Could not connect to Qualys - %s' % e)
|
||||
self.logger.error('Could not connect to Qualys: {}'.format(str(e)))
|
||||
self.headers = {
|
||||
"content-type": "text/xml"}
|
||||
self.config_parse = qcconf.QualysConnectConfig(config)
|
||||
try:
|
||||
self.template_id = self.config_parse.get_template_id()
|
||||
except:
|
||||
print('ERROR - Could not retrieve template ID')
|
||||
self.logger.error('Could not retrieve template ID')
|
||||
|
||||
def request(self, path, method='get', data=None):
|
||||
methods = {'get': requests.get,
|
||||
@ -126,15 +128,15 @@ class qualysWhisperAPI(object):
|
||||
dataframes = []
|
||||
_records = []
|
||||
total = int(self.get_was_scan_count(status=status))
|
||||
print('Retrieving information for %s scans' % total)
|
||||
self.logger.info('Retrieving information for {} scans'.format(total))
|
||||
for i in range(0, total):
|
||||
if i % limit == 0:
|
||||
if (total - i) < limit:
|
||||
qualys_api_limit = total - i
|
||||
print('Making a request with a limit of %s at offset %s' % (str(qualys_api_limit), str(i + 1)))
|
||||
self.logger.info('Making a request with a limit of {} at offset {}'.format((str(qualys_api_limit), str(i + 1))))
|
||||
scan_info = self.get_scan_info(limit=qualys_api_limit, offset=i + 1, status=status)
|
||||
_records.append(scan_info)
|
||||
print('Converting XML to DataFrame')
|
||||
self.logger.debug('Converting XML to DataFrame')
|
||||
dataframes = [self.xml_parser(xml) for xml in _records]
|
||||
|
||||
return pd.concat(dataframes, axis=0).reset_index().drop('index', axis=1)
|
||||
@ -235,10 +237,9 @@ class qualysWhisperAPI(object):
|
||||
mapper = {'scan': self.generate_scan_report_XML,
|
||||
'webapp': self.generate_webapp_report_XML}
|
||||
try:
|
||||
# print lxml.etree.tostring(mapper[kind](report_id), pretty_print=True)
|
||||
data = mapper[kind](report_id)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
self.logger.error('Error creating report: {}'.format(str(e)))
|
||||
|
||||
return self.qgc.request(self.REPORT_CREATE, data)
|
||||
|
||||
@ -322,7 +323,7 @@ class qualysReportFields:
|
||||
|
||||
class qualysUtils:
|
||||
def __init__(self):
|
||||
pass
|
||||
self.logger = logging.getLogger('qualysUtils')
|
||||
|
||||
def grab_section(
|
||||
self,
|
||||
@ -398,6 +399,7 @@ class qualysWebAppReport:
|
||||
delimiter=',',
|
||||
quotechar='"',
|
||||
):
|
||||
self.logger = logging.getLogger('qualysWebAppReport')
|
||||
self.file_in = file_in
|
||||
self.file_stream = file_stream
|
||||
self.report = None
|
||||
@ -407,8 +409,7 @@ class qualysWebAppReport:
|
||||
try:
|
||||
self.qw = qualysWhisperAPI(config=config)
|
||||
except Exception as e:
|
||||
print('Could not load config! Please check settings for %s' \
|
||||
% e)
|
||||
self.logger.error('Could not load config! Please check settings. Error: {}'.format(str(e)))
|
||||
|
||||
if file_stream:
|
||||
self.open_file = file_in.splitlines()
|
||||
@ -517,7 +518,7 @@ class qualysWebAppReport:
|
||||
merged_df[~merged_df.Title.str.contains('Links Crawled|External Links Discovered'
|
||||
)]
|
||||
except Exception as e:
|
||||
print(e)
|
||||
self.logger.error('Error merging df: {}'.format(str(e)))
|
||||
return merged_df
|
||||
|
||||
def download_file(self, file_id):
|
||||
@ -527,7 +528,7 @@ class qualysWebAppReport:
|
||||
for line in report.splitlines():
|
||||
file_out.write(line + '\n')
|
||||
file_out.close()
|
||||
print('[ACTION] - File written to %s' % filename)
|
||||
self.logger.info('File written to {}'.format(filename))
|
||||
return filename
|
||||
|
||||
def remove_file(self, filename):
|
||||
@ -537,7 +538,7 @@ class qualysWebAppReport:
|
||||
"""Downloads a file from qualys and normalizes it"""
|
||||
|
||||
download_file = self.download_file(file_id)
|
||||
print('[ACTION] - Downloading file ID: %s' % file_id)
|
||||
self.logger.info('Downloading file ID: {}'.format(file_id))
|
||||
report_data = self.grab_sections(download_file)
|
||||
merged_data = self.data_normalizer(report_data)
|
||||
if scan:
|
||||
@ -562,35 +563,30 @@ class qualysWebAppReport:
|
||||
+ '_{last_updated}'.format(last_updated=updated_date) \
|
||||
+ '.csv'
|
||||
if os.path.isfile(report_name):
|
||||
print('[ACTION] - File already exist! Skipping...')
|
||||
self.logger.info('File already exists! Skipping...')
|
||||
pass
|
||||
else:
|
||||
print('[ACTION] - Generating report for %s' % report_id)
|
||||
self.logger.info('Generating report for {}'.format(report_id))
|
||||
status = self.qw.create_report(report_id)
|
||||
root = objectify.fromstring(status)
|
||||
if root.responseCode == 'SUCCESS':
|
||||
print('[INFO] - Successfully generated report for webapp: %s' \
|
||||
% report_id)
|
||||
self.logger.info('Successfully generated report for webapp: {}'.format(report_id))
|
||||
generated_report_id = root.data.Report.id
|
||||
print ('[INFO] - New Report ID: %s' \
|
||||
% generated_report_id)
|
||||
self.logger.info('New Report ID: {}'.format(generated_report_id))
|
||||
vuln_ready = self.process_data(generated_report_id)
|
||||
|
||||
vuln_ready.to_csv(report_name, index=False, header=True) # add when timestamp occured
|
||||
print('[SUCCESS] - Report written to %s' \
|
||||
% report_name)
|
||||
self.logger.info('Report written to {}'.format(report_name))
|
||||
if cleanup:
|
||||
print('[ACTION] - Removing report %s' \
|
||||
% generated_report_id)
|
||||
self.logger.info('Removing report {}'.format(generated_report_id))
|
||||
cleaning_up = \
|
||||
self.qw.delete_report(generated_report_id)
|
||||
self.remove_file(str(generated_report_id) + '.csv')
|
||||
print('[ACTION] - Deleted report: %s' \
|
||||
% generated_report_id)
|
||||
self.logger.info('Deleted report: {}'.format(generated_report_id))
|
||||
else:
|
||||
print('Could not process report ID: %s' % status)
|
||||
self.logger.error('Could not process report ID: {}'.format(status))
|
||||
except Exception as e:
|
||||
print('[ERROR] - Could not process %s - %s' % (report_id, e))
|
||||
self.logger.error('Could not process {}: {}'.format(report_id, e))
|
||||
return vuln_ready
|
||||
|
||||
|
||||
@ -633,6 +629,7 @@ class qualysScanReport:
|
||||
delimiter=',',
|
||||
quotechar='"',
|
||||
):
|
||||
self.logger = logging.getLogger('qualysScanReport')
|
||||
self.file_in = file_in
|
||||
self.file_stream = file_stream
|
||||
self.report = None
|
||||
@ -642,8 +639,7 @@ class qualysScanReport:
|
||||
try:
|
||||
self.qw = qualysWhisperAPI(config=config)
|
||||
except Exception as e:
|
||||
print('Could not load config! Please check settings for %s' \
|
||||
% e)
|
||||
self.logger.error('Could not load config! Please check settings. Error: {}'.format(str(e)))
|
||||
|
||||
if file_stream:
|
||||
self.open_file = file_in.splitlines()
|
||||
@ -746,7 +742,7 @@ class qualysScanReport:
|
||||
merged_df[~merged_df.Title.str.contains('Links Crawled|External Links Discovered'
|
||||
)]
|
||||
except Exception as e:
|
||||
print(e)
|
||||
self.logger.error('Error normalizing: {}'.format(str(e)))
|
||||
return merged_df
|
||||
|
||||
def download_file(self, path='', file_id=None):
|
||||
@ -756,7 +752,7 @@ class qualysScanReport:
|
||||
for line in report.splitlines():
|
||||
file_out.write(line + '\n')
|
||||
file_out.close()
|
||||
print('[ACTION] - File written to %s' % filename)
|
||||
self.logger.info('File written to {}'.format(filename))
|
||||
return filename
|
||||
|
||||
def remove_file(self, filename):
|
||||
@ -766,7 +762,7 @@ class qualysScanReport:
|
||||
"""Downloads a file from qualys and normalizes it"""
|
||||
|
||||
download_file = self.download_file(path=path, file_id=file_id)
|
||||
print('[ACTION] - Downloading file ID: %s' % file_id)
|
||||
self.logger.info('Downloading file ID: {}'.format(file_id))
|
||||
report_data = self.grab_sections(download_file)
|
||||
merged_data = self.data_normalizer(report_data)
|
||||
merged_data.sort_index(axis=1, inplace=True)
|
||||
@ -788,35 +784,29 @@ class qualysScanReport:
|
||||
+ '_{last_updated}'.format(last_updated=updated_date) \
|
||||
+ '.csv'
|
||||
if os.path.isfile(report_name):
|
||||
print('[ACTION] - File already exist! Skipping...')
|
||||
pass
|
||||
self.logger.info('File already exist! Skipping...')
|
||||
else:
|
||||
print('[ACTION] - Generating report for %s' % report_id)
|
||||
self.logger.info('Generating report for {}'.format(report_id))
|
||||
status = self.qw.create_report(report_id)
|
||||
root = objectify.fromstring(status)
|
||||
if root.responseCode == 'SUCCESS':
|
||||
print('[INFO] - Successfully generated report for webapp: %s' \
|
||||
% report_id)
|
||||
self.logger.info('Successfully generated report for webapp: {}'.format(report_id))
|
||||
generated_report_id = root.data.Report.id
|
||||
print ('[INFO] - New Report ID: %s' \
|
||||
% generated_report_id)
|
||||
self.logger.info('New Report ID: {}'.format(generated_report_id))
|
||||
vuln_ready = self.process_data(generated_report_id)
|
||||
|
||||
vuln_ready.to_csv(report_name, index=False, header=True) # add when timestamp occured
|
||||
print('[SUCCESS] - Report written to %s' \
|
||||
% report_name)
|
||||
self.logger.info('Report written to {}'.format(report_name))
|
||||
if cleanup:
|
||||
print('[ACTION] - Removing report %s from disk' \
|
||||
% generated_report_id)
|
||||
self.logger.info('Removing report {} from disk'.format(generated_report_id))
|
||||
cleaning_up = \
|
||||
self.qw.delete_report(generated_report_id)
|
||||
self.remove_file(str(generated_report_id) + '.csv')
|
||||
print('[ACTION] - Deleted report from Qualys Database: %s' \
|
||||
% generated_report_id)
|
||||
self.logger.info('Deleted report from Qualys Database: {}'.format(generated_report_id))
|
||||
else:
|
||||
print('Could not process report ID: %s' % status)
|
||||
self.logger.error('Could not process report ID: {}'.format(status))
|
||||
except Exception as e:
|
||||
print('[ERROR] - Could not process %s - %s' % (report_id, e))
|
||||
self.logger.error('Could not process {}: {}'.format(report_id, e))
|
||||
return vuln_ready
|
||||
|
||||
|
||||
|
@ -7,6 +7,7 @@ import pandas as pd
|
||||
import qualysapi
|
||||
import requests
|
||||
import sys
|
||||
import logging
|
||||
import os
|
||||
import dateutil.parser as dp
|
||||
|
||||
@ -15,14 +16,16 @@ class qualysWhisperAPI(object):
|
||||
SCANS = 'api/2.0/fo/scan'
|
||||
|
||||
def __init__(self, config=None):
|
||||
self.logger = logging.getLogger('qualysWhisperAPI')
|
||||
self.config = config
|
||||
try:
|
||||
self.qgc = qualysapi.connect(config)
|
||||
# Fail early if we can't make a request or auth is incorrect
|
||||
self.qgc.request('about.php')
|
||||
print('[SUCCESS] - Connected to Qualys at %s' % self.qgc.server)
|
||||
self.logger.info('Connected to Qualys at {}'.format(self.qgc.server))
|
||||
except Exception as e:
|
||||
print('[ERROR] Could not connect to Qualys - %s' % e)
|
||||
self.logger.error('Could not connect to Qualys: {}'.format(str(e)))
|
||||
# FIXME: exit(1) does not exist: either it's exit() or sys.exit(CODE)
|
||||
exit(1)
|
||||
|
||||
def scan_xml_parser(self, xml):
|
||||
@ -66,10 +69,12 @@ class qualysWhisperAPI(object):
|
||||
|
||||
class qualysUtils:
|
||||
def __init__(self):
|
||||
pass
|
||||
self.logger = logging.getLogger('qualysUtils')
|
||||
|
||||
def iso_to_epoch(self, dt):
|
||||
return dp.parse(dt).strftime('%s')
|
||||
out = dp.parse(dt).strftime('%s')
|
||||
self.logger.info('Converted {} to {}'.format(dt, out))
|
||||
return out
|
||||
|
||||
|
||||
class qualysVulnScan:
|
||||
@ -82,6 +87,7 @@ class qualysVulnScan:
|
||||
delimiter=',',
|
||||
quotechar='"',
|
||||
):
|
||||
self.logger = logging.getLogger('qualysVulnScan')
|
||||
self.file_in = file_in
|
||||
self.file_stream = file_stream
|
||||
self.report = None
|
||||
@ -91,8 +97,7 @@ class qualysVulnScan:
|
||||
try:
|
||||
self.qw = qualysWhisperAPI(config=config)
|
||||
except Exception as e:
|
||||
print('Could not load config! Please check settings for %s' \
|
||||
% e)
|
||||
self.logger.error('Could not load config! Please check settings. Error: {}'.format(str(e)))
|
||||
|
||||
if file_stream:
|
||||
self.open_file = file_in.splitlines()
|
||||
@ -104,7 +109,7 @@ class qualysVulnScan:
|
||||
def process_data(self, scan_id=None):
|
||||
"""Downloads a file from Qualys and normalizes it"""
|
||||
|
||||
print('[ACTION] - Downloading scan ID: %s' % scan_id)
|
||||
self.logger.info('Downloading scan ID: {}'.format(scan_id))
|
||||
scan_report = self.qw.get_scan_details(scan_id=scan_id)
|
||||
keep_columns = ['category', 'cve_id', 'cvss3_base', 'cvss3_temporal', 'cvss_base', 'cvss_temporal', 'dns', 'exploitability', 'fqdn', 'impact', 'ip', 'ip_status', 'netbios', 'os', 'pci_vuln', 'port', 'protocol', 'qid', 'results', 'severity', 'solution', 'ssl', 'threat', 'title', 'type', 'vendor_reference']
|
||||
scan_report = scan_report.filter(keep_columns)
|
||||
|
@ -3,22 +3,22 @@ from datetime import datetime, timedelta
|
||||
|
||||
from jira import JIRA
|
||||
import requests
|
||||
import logging
|
||||
from bottle import template
|
||||
import re
|
||||
|
||||
class JiraAPI(object): #NamedLogger):
|
||||
__logname__="vjira"
|
||||
|
||||
#TODO implement logging
|
||||
|
||||
class JiraAPI(object):
|
||||
def __init__(self, hostname=None, username=None, password=None, debug=False, clean_obsolete=True, max_time_window=6):
|
||||
#self.setup_logger(debug=debug)
|
||||
self.logger = logging.getLogger('JiraAPI')
|
||||
if debug:
|
||||
self.logger.setLevel(logging.DEBUG)
|
||||
|
||||
if "https://" not in hostname:
|
||||
hostname = "https://{}".format(hostname)
|
||||
self.username = username
|
||||
self.password = password
|
||||
self.jira = JIRA(options={'server': hostname}, basic_auth=(self.username, self.password))
|
||||
#self.logger.info("Created vjira service for {}".format(server))
|
||||
self.logger.info("Created vjira service for {}".format(hostname))
|
||||
self.all_tickets = []
|
||||
self.JIRA_REOPEN_ISSUE = "Reopen Issue"
|
||||
self.JIRA_CLOSE_ISSUE = "Close Issue"
|
||||
@ -34,19 +34,19 @@ class JiraAPI(object): #NamedLogger):
|
||||
for tag in tags:
|
||||
labels.append(str(tag))
|
||||
|
||||
#self.logger.info("creating ticket for project {} title[20] {}".format(project, title[:20]))
|
||||
#self.logger.info("project {} has a component requirement: {}".format(project, self.PROJECT_COMPONENT_TABLE[project]))
|
||||
self.logger.info("creating ticket for project {} title[20] {}".format(project, title[:20]))
|
||||
self.logger.info("project {} has a component requirement: {}".format(project, self.PROJECT_COMPONENT_TABLE[project]))
|
||||
project_obj = self.jira.project(project)
|
||||
components_ticket = []
|
||||
for component in components:
|
||||
exists = False
|
||||
for c in project_obj.components:
|
||||
if component == c.name:
|
||||
#self.logger.debug("resolved component name {} to id {}".format(component_name, c.id)ra python)
|
||||
self.logger.debug("resolved component name {} to id {}".format(c.name, c.id))
|
||||
components_ticket.append({ "id": c.id })
|
||||
exists=True
|
||||
if not exists:
|
||||
print "[ERROR] Error creating Ticket: component {} not found".format(component)
|
||||
self.logger.error("Error creating Ticket: component {} not found".format(component))
|
||||
return 0
|
||||
|
||||
new_issue = self.jira.create_issue(project=project,
|
||||
@ -56,7 +56,7 @@ class JiraAPI(object): #NamedLogger):
|
||||
labels=labels,
|
||||
components=components_ticket)
|
||||
|
||||
print "[SUCCESS] Ticket {} has been created".format(new_issue)
|
||||
self.logger.info("Ticket {} has been created".format(new_issue))
|
||||
return new_issue
|
||||
|
||||
#Basic JIRA Metrics
|
||||
@ -64,7 +64,7 @@ class JiraAPI(object): #NamedLogger):
|
||||
jql = "labels= vulnerability_management and resolution = Unresolved"
|
||||
if project:
|
||||
jql += " and (project='{}')".format(project)
|
||||
print jql
|
||||
self.logger.debug('Executing: {}'.format(jql))
|
||||
return len(self.jira.search_issues(jql, maxResults=0))
|
||||
|
||||
def metrics_closed_tickets(self, project=None):
|
||||
@ -75,7 +75,7 @@ class JiraAPI(object): #NamedLogger):
|
||||
|
||||
def sync(self, vulnerabilities, project, components=[]):
|
||||
#JIRA structure of each vulnerability: [source, scan_name, title, diagnosis, consequence, solution, ips, risk, references]
|
||||
print "JIRA Sync started"
|
||||
self.logger.info("JIRA Sync started")
|
||||
|
||||
# [HIGIENE] close tickets older than 6 months as obsolete
|
||||
# Higiene clean up affects to all tickets created by the module, filters by label 'vulnerability_management'
|
||||
@ -105,7 +105,7 @@ class JiraAPI(object): #NamedLogger):
|
||||
try:
|
||||
tpl = template(self.template_path, vuln)
|
||||
except Exception as e:
|
||||
print e
|
||||
self.logger.error('Exception templating: {}'.format(str(e)))
|
||||
return 0
|
||||
self.create_ticket(title=vuln['title'], desc=tpl, project=project, components=components, tags=[vuln['source'], vuln['scan_name'], 'vulnerability', vuln['risk']])
|
||||
|
||||
@ -123,7 +123,7 @@ class JiraAPI(object): #NamedLogger):
|
||||
assets = list(set(re.findall(r"\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\b", ",".join(vuln['ips']))))
|
||||
|
||||
if not self.all_tickets:
|
||||
print "Retrieving all JIRA tickets with the following tags {}".format(labels)
|
||||
self.logger.info("Retrieving all JIRA tickets with the following tags {}".format(labels))
|
||||
# we want to check all JIRA tickets, to include tickets moved to other queues
|
||||
# will exclude tickets older than 6 months, old tickets will get closed for higiene and recreated if still vulnerable
|
||||
jql = "{} AND NOT labels=advisory AND created >=startOfMonth(-{})".format(" AND ".join(["labels={}".format(label) for label in labels]), self.max_time_tracking)
|
||||
@ -132,17 +132,17 @@ class JiraAPI(object): #NamedLogger):
|
||||
|
||||
#WARNING: function IGNORES DUPLICATES, after finding a "duplicate" will just return it exists
|
||||
#it wont iterate over the rest of tickets looking for other possible duplicates/similar issues
|
||||
print "Comparing Vulnerabilities to created tickets"
|
||||
self.logger.info("Comparing Vulnerabilities to created tickets")
|
||||
for index in range(len(self.all_tickets)-1):
|
||||
checking_ticketid, checking_title, checking_assets = self.ticket_get_unique_fields(self.all_tickets[index])
|
||||
if title == checking_title:
|
||||
difference = list(set(assets).symmetric_difference(checking_assets))
|
||||
#to check intersection - set(assets) & set(checking_assets)
|
||||
if difference:
|
||||
print "Asset mismatch, ticket to update. TickedID: {}".format(checking_ticketid)
|
||||
self.logger.info("Asset mismatch, ticket to update. TickedID: {}".format(checking_ticketid))
|
||||
return False, True, checking_ticketid, checking_assets #this will automatically validate
|
||||
else:
|
||||
print "Confirmed duplicated. TickedID: {}".format(checking_ticketid)
|
||||
self.logger.info("Confirmed duplicated. TickedID: {}".format(checking_ticketid))
|
||||
return True, False, checking_ticketid, [] #this will automatically validate
|
||||
return False, False, "", []
|
||||
|
||||
@ -153,21 +153,21 @@ class JiraAPI(object): #NamedLogger):
|
||||
affected_assets_section = ticket.raw.get('fields', {}).get('description').encode("ascii").split("{panel:title=Affected Assets}")[1].split("{panel}")[0]
|
||||
assets = list(set(re.findall(r"\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\b", affected_assets_section)))
|
||||
except:
|
||||
print "[ERROR] Ticket IPs regex failed. Ticket ID: {}".format(ticketid)
|
||||
self.logger.error("Ticket IPs regex failed. Ticket ID: {}".format(ticketid))
|
||||
assets = []
|
||||
|
||||
return ticketid, title, assets
|
||||
|
||||
def ticket_update_assets(self, vuln, ticketid, ticket_assets):
|
||||
# correct description will always be in the vulnerability to report, only needed to update description to new one
|
||||
print "Ticket {} exists, UPDATE requested".format(ticketid)
|
||||
self.logger.info("Ticket {} exists, UPDATE requested".format(ticketid))
|
||||
|
||||
if self.is_ticket_resolved(self.jira.issue(ticketid)):
|
||||
self.reopen_ticket(ticketid)
|
||||
try:
|
||||
tpl = template(self.template_path, vuln)
|
||||
except Exception as e:
|
||||
print e
|
||||
self.logger.error('Exception updating assets: {}'.format(str(e)))
|
||||
return 0
|
||||
|
||||
ticket_obj = self.jira.issue(ticketid)
|
||||
@ -186,9 +186,9 @@ class JiraAPI(object): #NamedLogger):
|
||||
ticket_obj.fields.labels.append('updated')
|
||||
try:
|
||||
ticket_obj.update(description=tpl, comment=comment, fields={"labels":ticket_obj.fields.labels})
|
||||
print "Ticket {} updated successfully".format(ticketid)
|
||||
self.logger.info("Ticket {} updated successfully".format(ticketid))
|
||||
except:
|
||||
print "[ERROR] Error while trying up update ticket {}".format(ticketid)
|
||||
self.logger.error("Error while trying up update ticket {}".format(ticketid))
|
||||
return 0
|
||||
|
||||
def close_fixed_tickets(self, vulnerabilities):
|
||||
@ -202,9 +202,9 @@ class JiraAPI(object): #NamedLogger):
|
||||
|
||||
for ticket in self.all_tickets:
|
||||
if ticket.raw['fields']['summary'].strip() in found_vulns:
|
||||
print "Ticket {} is still vulnerable".format(ticket)
|
||||
self.logger.info("Ticket {} is still vulnerable".format(ticket))
|
||||
continue
|
||||
print "Ticket {} is no longer vulnerable".format(ticket)
|
||||
self.logger.info("Ticket {} is no longer vulnerable".format(ticket))
|
||||
self.close_ticket(ticket, self.JIRA_RESOLUTION_FIXED, comment)
|
||||
return 0
|
||||
|
||||
@ -213,9 +213,9 @@ class JiraAPI(object): #NamedLogger):
|
||||
transitions = self.jira.transitions(ticket_obj)
|
||||
for transition in transitions:
|
||||
if transition.get('name') == self.JIRA_REOPEN_ISSUE:
|
||||
#print "ticket is reopenable"
|
||||
self.logger.debug("Ticket is reopenable")
|
||||
return True
|
||||
print "[ERROR] Ticket can't be opened. Check Jira transitions."
|
||||
self.logger.warn("Ticket can't be opened. Check Jira transitions.")
|
||||
return False
|
||||
|
||||
def is_ticket_closeable(self, ticket_obj):
|
||||
@ -223,7 +223,7 @@ class JiraAPI(object): #NamedLogger):
|
||||
for transition in transitions:
|
||||
if transition.get('name') == self.JIRA_CLOSE_ISSUE:
|
||||
return True
|
||||
print "[ERROR] Ticket can't closed. Check Jira transitions."
|
||||
self.logger.warn("Ticket can't closed. Check Jira transitions.")
|
||||
return False
|
||||
|
||||
def is_ticket_resolved(self, ticket_obj):
|
||||
@ -231,10 +231,10 @@ class JiraAPI(object): #NamedLogger):
|
||||
if ticket_obj is not None:
|
||||
if ticket_obj.raw['fields'].get('resolution') is not None:
|
||||
if ticket_obj.raw['fields'].get('resolution').get('name') != 'Unresolved':
|
||||
print "Checked ticket {} is already closed".format(ticket_obj)
|
||||
#logger.info("ticket {} is closed".format(ticketid))
|
||||
self.logger.debug("Checked ticket {} is already closed".format(ticket_obj))
|
||||
self.logger.info("ticket {} is closed".format(ticket_obj.id))
|
||||
return True
|
||||
print "Checked ticket {} is already open".format(ticket_obj)
|
||||
self.logger.debug("Checked ticket {} is already open".format(ticket_obj))
|
||||
return False
|
||||
|
||||
|
||||
@ -242,23 +242,21 @@ class JiraAPI(object): #NamedLogger):
|
||||
if ticket_obj is not None:
|
||||
if ticket_obj.raw['fields'].get('labels') is not None:
|
||||
labels = ticket_obj.raw['fields'].get('labels')
|
||||
print labels
|
||||
if "risk_accepted" in labels:
|
||||
print "Ticket {} accepted risk, will be ignored".format(ticket_obj)
|
||||
self.logger.warn("Ticket {} accepted risk, will be ignored".format(ticket_obj))
|
||||
return True
|
||||
elif "server_decomission" in labels:
|
||||
print "Ticket {} server decomissioned, will be ignored".format(ticket_obj)
|
||||
self.logger.warn("Ticket {} server decomissioned, will be ignored".format(ticket_obj))
|
||||
return True
|
||||
print "Ticket {} risk has not been accepted".format(ticket_obj)
|
||||
self.logger.info("Ticket {} risk has not been accepted".format(ticket_obj))
|
||||
return False
|
||||
|
||||
def reopen_ticket(self, ticketid):
|
||||
print "Ticket {} exists, REOPEN requested".format(ticketid)
|
||||
self.logger.debug("Ticket {} exists, REOPEN requested".format(ticketid))
|
||||
# this will reopen a ticket by ticketid
|
||||
ticket_obj = self.jira.issue(ticketid)
|
||||
|
||||
if self.is_ticket_resolved(ticket_obj):
|
||||
#print "ticket is resolved"
|
||||
if not self.is_risk_accepted(ticket_obj):
|
||||
try:
|
||||
if self.is_ticket_reopenable(ticket_obj):
|
||||
@ -267,38 +265,34 @@ class JiraAPI(object): #NamedLogger):
|
||||
If server has been decomissioned, please add the label "*server_decomission*" to the ticket before closing it.
|
||||
If you have further doubts, please contact the Security Team.'''
|
||||
error = self.jira.transition_issue(issue=ticketid, transition=self.JIRA_REOPEN_ISSUE, comment = comment)
|
||||
print "[SUCCESS] ticket {} reopened successfully".format(ticketid)
|
||||
#logger.info("ticket {} reopened successfully".format(ticketid))
|
||||
self.logger.info("ticket {} reopened successfully".format(ticketid))
|
||||
return 1
|
||||
except Exception as e:
|
||||
# continue with ticket data so that a new ticket is created in place of the "lost" one
|
||||
print "[ERROR] error reopening ticket {}: {}".format(ticketid, e)
|
||||
#logger.error("error reopening ticket {}: {}".format(ticketid, e))
|
||||
self.logger.error("error reopening ticket {}: {}".format(ticketid, e))
|
||||
return 0
|
||||
return 0
|
||||
|
||||
def close_ticket(self, ticketid, resolution, comment):
|
||||
# this will close a ticket by ticketid
|
||||
print "Ticket {} exists, CLOSE requested".format(ticketid)
|
||||
self.logger.debug("Ticket {} exists, CLOSE requested".format(ticketid))
|
||||
ticket_obj = self.jira.issue(ticketid)
|
||||
if not self.is_ticket_resolved(ticket_obj):
|
||||
try:
|
||||
if self.is_ticket_closeable(ticket_obj):
|
||||
error = self.jira.transition_issue(issue=ticketid, transition=self.JIRA_CLOSE_ISSUE, comment = comment, resolution = {"name": resolution })
|
||||
print "[SUCCESS] ticket {} closed successfully".format(ticketid)
|
||||
#logger.info("ticket {} reopened successfully".format(ticketid))
|
||||
self.logger.info("ticket {} reopened successfully".format(ticketid))
|
||||
return 1
|
||||
except Exception as e:
|
||||
# continue with ticket data so that a new ticket is created in place of the "lost" one
|
||||
print "[ERROR] error closing ticket {}: {}".format(ticketid, e)
|
||||
#logger.error("error closing ticket {}: {}".format(ticketid, e))
|
||||
self.logger.error("error closing ticket {}: {}".format(ticketid, e))
|
||||
return 0
|
||||
|
||||
return 0
|
||||
|
||||
def close_obsolete_tickets(self):
|
||||
# Close tickets older than 6 months, vulnerabilities not solved will get created a new ticket
|
||||
print "Closing obsolete tickets older than {} months".format(self.max_time_tracking)
|
||||
self.logger.info("Closing obsolete tickets older than {} months".format(self.max_time_tracking))
|
||||
jql = "labels=vulnerability_management AND created <startOfMonth(-{}) and resolution=Unresolved".format(self.max_time_tracking)
|
||||
tickets_to_close = self.jira.search_issues(jql, maxResults=0)
|
||||
|
||||
|
@ -1,17 +0,0 @@
|
||||
class bcolors:
|
||||
"""
|
||||
Utility to add colors to shell for scripts
|
||||
"""
|
||||
HEADERS = '\033[95m'
|
||||
OKBLUE = '\033[94m'
|
||||
OKGREEN = '\033[92m'
|
||||
WARNING = '\033[93m'
|
||||
FAIL = '\033[91m'
|
||||
ENDC = '\033[0m'
|
||||
BOLD = '\033[1m'
|
||||
UNDERLINE = '\033[4m'
|
||||
|
||||
INFO = '{info}[INFO]{endc}'.format(info=OKBLUE, endc=ENDC)
|
||||
ACTION = '{info}[ACTION]{endc}'.format(info=OKBLUE, endc=ENDC)
|
||||
SUCCESS = '{green}[SUCCESS]{endc}'.format(green=OKGREEN, endc=ENDC)
|
||||
FAIL = '{red}[FAIL]{endc}'.format(red=FAIL, endc=ENDC)
|
@ -8,7 +8,6 @@ from frameworks.qualys import qualysScanReport
|
||||
from frameworks.qualys_vuln import qualysVulnScan
|
||||
from frameworks.openvas import OpenVAS_API
|
||||
from reporting.jira_api import JiraAPI
|
||||
from utils.cli import bcolors
|
||||
import pandas as pd
|
||||
from lxml import objectify
|
||||
import sys
|
||||
@ -17,9 +16,6 @@ import io
|
||||
import time
|
||||
import sqlite3
|
||||
import json
|
||||
|
||||
# TODO Create logging option which stores data about scan
|
||||
|
||||
import logging
|
||||
|
||||
|
||||
@ -39,7 +35,9 @@ class vulnWhispererBase(object):
|
||||
section=None,
|
||||
develop=False,
|
||||
):
|
||||
|
||||
self.logger = logging.getLogger('vulnWhispererBase')
|
||||
if debug:
|
||||
self.logger.setLevel(logging.DEBUG)
|
||||
|
||||
if self.CONFIG_SECTION is None:
|
||||
raise Exception('Implementing class must define CONFIG_SECTION')
|
||||
@ -74,26 +72,23 @@ class vulnWhispererBase(object):
|
||||
'database', db_name))
|
||||
if not os.path.exists(self.db_path):
|
||||
os.makedirs(self.db_path)
|
||||
self.vprint('{info} Creating directory {dir}'.format(info=bcolors.INFO, dir=self.db_path))
|
||||
self.logger.info('Creating directory {dir}'.format(dir=self.db_path))
|
||||
|
||||
if not os.path.exists(self.database):
|
||||
with open(self.database, 'w'):
|
||||
self.vprint('{info} Creating file {dir}'.format(info=bcolors.INFO, dir=self.database))
|
||||
pass
|
||||
self.logging.info('Creating file {dir}'.format(dir=self.database))
|
||||
|
||||
try:
|
||||
self.conn = sqlite3.connect(self.database)
|
||||
self.cur = self.conn.cursor()
|
||||
self.vprint('{info} Connected to database at {loc}'.format(info=bcolors.INFO,
|
||||
loc=self.database))
|
||||
self.logger.info('Connected to database at {loc}'.format(loc=self.database))
|
||||
except Exception as e:
|
||||
self.vprint(
|
||||
'{fail} Could not connect to database at {loc}\nReason: {e} - Please ensure the path exist'.format(
|
||||
self.logger.error('Could not connect to database at {loc}\nReason: {e} - Please ensure the path exist'.format(
|
||||
e=e,
|
||||
fail=bcolors.FAIL, loc=self.database))
|
||||
loc=self.database))
|
||||
else:
|
||||
|
||||
self.vprint('{fail} Please specify a database to connect to!'.format(fail=bcolors.FAIL))
|
||||
self.logger.error('Please specify a database to connect to!')
|
||||
exit(1)
|
||||
|
||||
self.table_columns = [
|
||||
@ -114,10 +109,6 @@ class vulnWhispererBase(object):
|
||||
self.skipped = 0
|
||||
self.scan_list = []
|
||||
|
||||
def vprint(self, msg):
|
||||
if self.verbose:
|
||||
print(msg)
|
||||
|
||||
def create_table(self):
|
||||
self.cur.execute(
|
||||
'CREATE TABLE IF NOT EXISTS scan_history (id INTEGER PRIMARY KEY,'
|
||||
@ -171,12 +162,12 @@ class vulnWhispererBase(object):
|
||||
def directory_check(self):
|
||||
if not os.path.exists(self.write_path):
|
||||
os.makedirs(self.write_path)
|
||||
self.vprint('{info} Directory created at {scan} - Skipping creation'.format(
|
||||
scan=self.write_path, info=bcolors.INFO))
|
||||
self.logger.info('Directory created at {scan} - Skipping creation'.format(
|
||||
scan=self.write_path))
|
||||
else:
|
||||
os.path.exists(self.write_path)
|
||||
self.vprint('{info} Directory already exist for {scan} - Skipping creation'.format(
|
||||
scan=self.write_path, info=bcolors.INFO))
|
||||
self.logger.info('Directory already exist for {scan} - Skipping creation'.format(
|
||||
scan=self.write_path))
|
||||
|
||||
def get_latest_results(self, source, scan_name):
|
||||
try:
|
||||
@ -201,7 +192,7 @@ class vulnWhispererBase(object):
|
||||
sources = [r[0] for r in self.cur.fetchall()]
|
||||
except:
|
||||
sources = []
|
||||
self.vprint("{fail} Process failed at executing 'SELECT DISTINCT source FROM scan_history;'".format(fail=bcolors.FAIL))
|
||||
self.logger.error("Process failed at executing 'SELECT DISTINCT source FROM scan_history;'")
|
||||
|
||||
results = []
|
||||
|
||||
@ -238,6 +229,9 @@ class vulnWhispererNessus(vulnWhispererBase):
|
||||
|
||||
super(vulnWhispererNessus, self).__init__(config=config)
|
||||
|
||||
self.logger = logging.getLogger('vulnWhispererNessus')
|
||||
if debug:
|
||||
self.logger.setLevel(logging.DEBUG)
|
||||
self.port = int(self.config.get(self.CONFIG_SECTION, 'port'))
|
||||
|
||||
self.develop = True
|
||||
@ -251,26 +245,23 @@ class vulnWhispererNessus(vulnWhispererBase):
|
||||
'trash')
|
||||
|
||||
try:
|
||||
self.vprint('{info} Attempting to connect to nessus...'.format(info=bcolors.INFO))
|
||||
self.logger.info('Attempting to connect to nessus...')
|
||||
self.nessus = \
|
||||
NessusAPI(hostname=self.hostname,
|
||||
port=self.nessus_port,
|
||||
username=self.username,
|
||||
password=self.password)
|
||||
self.nessus_connect = True
|
||||
self.vprint('{success} Connected to nessus on {host}:{port}'.format(success=bcolors.SUCCESS,
|
||||
host=self.hostname,
|
||||
port=str(self.nessus_port)))
|
||||
self.logger.info('Connected to nessus on {host}:{port}'.format(host=self.hostname,
|
||||
port=str(self.nessus_port)))
|
||||
except Exception as e:
|
||||
self.vprint(e)
|
||||
self.logger.error('Exception: {}'.format(str(e)))
|
||||
raise Exception(
|
||||
'{fail} Could not connect to nessus -- Please verify your settings in {config} are correct and try again.\nReason: {e}'.format(
|
||||
'Could not connect to nessus -- Please verify your settings in {config} are correct and try again.\nReason: {e}'.format(
|
||||
config=self.config.config_in,
|
||||
fail=bcolors.FAIL, e=e))
|
||||
e=e))
|
||||
except Exception as e:
|
||||
|
||||
self.vprint('{fail} Could not properly load your config!\nReason: {e}'.format(fail=bcolors.FAIL,
|
||||
e=e))
|
||||
self.logger.error('Could not properly load your config!\nReason: {e}'.format(e=e))
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
@ -283,7 +274,7 @@ class vulnWhispererNessus(vulnWhispererBase):
|
||||
:return:
|
||||
"""
|
||||
|
||||
self.vprint('{info} Gathering all scan data... this may take a while...'.format(info=bcolors.INFO))
|
||||
self.logger.info('Gathering all scan data... this may take a while...')
|
||||
scan_records = []
|
||||
for s in scans:
|
||||
if s:
|
||||
@ -310,7 +301,6 @@ class vulnWhispererNessus(vulnWhispererBase):
|
||||
scan_records.append(record.copy())
|
||||
except Exception as e:
|
||||
# Generates error each time nonetype is encountered.
|
||||
# print(e)
|
||||
|
||||
pass
|
||||
|
||||
@ -331,11 +321,10 @@ class vulnWhispererNessus(vulnWhispererBase):
|
||||
== 'completed']
|
||||
else:
|
||||
scan_list = all_scans
|
||||
self.vprint('{info} Identified {new} scans to be processed'.format(info=bcolors.INFO,
|
||||
new=len(scan_list)))
|
||||
self.logger.info('Identified {new} scans to be processed'.format(new=len(scan_list)))
|
||||
|
||||
if not scan_list:
|
||||
self.vprint('{info} No new scans to process. Exiting...'.format(info=bcolors.INFO))
|
||||
self.logger.warn('No new scans to process. Exiting...')
|
||||
return 0
|
||||
|
||||
# Create scan subfolders
|
||||
@ -348,9 +337,9 @@ class vulnWhispererNessus(vulnWhispererBase):
|
||||
os.makedirs(self.path_check(f['name']))
|
||||
else:
|
||||
os.path.exists(self.path_check(f['name']))
|
||||
self.vprint('{info} Directory already exist for {scan} - Skipping creation'.format(
|
||||
self.logger.info('Directory already exist for {scan} - Skipping creation'.format(
|
||||
scan=self.path_check(f['name'
|
||||
]), info=bcolors.INFO))
|
||||
])))
|
||||
|
||||
# try download and save scans into each folder the belong to
|
||||
|
||||
@ -406,9 +395,7 @@ class vulnWhispererNessus(vulnWhispererBase):
|
||||
1,
|
||||
)
|
||||
self.record_insert(record_meta)
|
||||
self.vprint(
|
||||
'{info} File {filename} already exist! Updating database'.format(info=bcolors.INFO,
|
||||
filename=relative_path_name))
|
||||
self.logger.info('File {filename} already exist! Updating database'.format(filename=relative_path_name))
|
||||
else:
|
||||
file_req = \
|
||||
self.nessus.download_scan(scan_id=scan_id, history=history_id,
|
||||
@ -417,9 +404,7 @@ class vulnWhispererNessus(vulnWhispererBase):
|
||||
pd.read_csv(io.StringIO(file_req.decode('utf-8'
|
||||
)))
|
||||
if len(clean_csv) > 2:
|
||||
self.vprint('Processing %s/%s for scan: %s'
|
||||
% (scan_count, len(scan_list),
|
||||
scan_name))
|
||||
self.logger.info('Processing {}/{} for scan: {}'.format(scan_count, len(scan_list), scan_name))
|
||||
columns_to_cleanse = ['CVSS','CVE','Description','Synopsis','Solution','See Also','Plugin Output']
|
||||
|
||||
for col in columns_to_cleanse:
|
||||
@ -438,10 +423,8 @@ class vulnWhispererNessus(vulnWhispererBase):
|
||||
1,
|
||||
)
|
||||
self.record_insert(record_meta)
|
||||
self.vprint('{info} {filename} records written to {path} '.format(info=bcolors.INFO,
|
||||
filename=clean_csv.shape[
|
||||
0],
|
||||
path=file_name))
|
||||
self.logger.info('{filename} records written to {path} '.format(filename=clean_csv.shape[0],
|
||||
path=file_name))
|
||||
else:
|
||||
record_meta = (
|
||||
scan_name,
|
||||
@ -455,16 +438,12 @@ class vulnWhispererNessus(vulnWhispererBase):
|
||||
1,
|
||||
)
|
||||
self.record_insert(record_meta)
|
||||
self.vprint(file_name
|
||||
+ ' has no host available... Updating database and skipping!'
|
||||
)
|
||||
self.logger.warn('{} has no host available... Updating database and skipping!'.format(file_name))
|
||||
self.conn.close()
|
||||
'{success} Scan aggregation complete! Connection to database closed.'.format(success=bcolors.SUCCESS)
|
||||
self.logger.info('Scan aggregation complete! Connection to database closed.')
|
||||
else:
|
||||
|
||||
self.vprint('{fail} Failed to use scanner at {host}'.format(fail=bcolors.FAIL,
|
||||
host=self.hostname + ':'
|
||||
+ self.nessus_port))
|
||||
self.logger.error('Failed to use scanner at {host}:{port}'.format(host=self.hostname, port=self.nessus_port))
|
||||
|
||||
|
||||
class vulnWhispererQualys(vulnWhispererBase):
|
||||
@ -530,6 +509,10 @@ class vulnWhispererQualys(vulnWhispererBase):
|
||||
):
|
||||
|
||||
super(vulnWhispererQualys, self).__init__(config=config)
|
||||
self.logger = logging.getLogger('vulnWhispererQualys')
|
||||
if debug:
|
||||
self.logger.setLevel(logging.DEBUG)
|
||||
|
||||
|
||||
self.qualys_scan = qualysScanReport(config=config)
|
||||
self.latest_scans = self.qualys_scan.qw.get_all_scans()
|
||||
@ -573,18 +556,16 @@ class vulnWhispererQualys(vulnWhispererBase):
|
||||
1,
|
||||
)
|
||||
self.record_insert(record_meta)
|
||||
self.vprint('{info} File {filename} already exist! Updating database'.format(info=bcolors.INFO, filename=relative_path_name))
|
||||
self.logger.info('File {filename} already exist! Updating database'.format(filename=relative_path_name))
|
||||
|
||||
else:
|
||||
print('{action} - Generating report for %s'.format(action=bcolors.ACTION) % report_id)
|
||||
self.logger.info('Generating report for {}'.format(report_id))
|
||||
status = self.qualys_scan.qw.create_report(report_id)
|
||||
root = objectify.fromstring(status)
|
||||
if root.responseCode == 'SUCCESS':
|
||||
print('{info} - Successfully generated report! ID: %s'.format(info=bcolors.INFO) \
|
||||
% report_id)
|
||||
self.logger.info('Successfully generated report! ID: {}'.format(report_id))
|
||||
generated_report_id = root.data.Report.id
|
||||
print('{info} - New Report ID: %s'.format(info=bcolors.INFO) \
|
||||
% generated_report_id)
|
||||
self.logger.info('New Report ID: {}'.format(generated_report_id))
|
||||
|
||||
vuln_ready = self.qualys_scan.process_data(path=self.write_path, file_id=str(generated_report_id))
|
||||
|
||||
@ -613,22 +594,19 @@ class vulnWhispererQualys(vulnWhispererBase):
|
||||
elif output_format == 'csv':
|
||||
vuln_ready.to_csv(relative_path_name, index=False, header=True) # add when timestamp occured
|
||||
|
||||
print('{success} - Report written to %s'.format(success=bcolors.SUCCESS) \
|
||||
% report_name)
|
||||
self.logger.info('Report written to {}'.format(report_name))
|
||||
|
||||
if cleanup:
|
||||
print('{action} - Removing report %s from Qualys Database'.format(action=bcolors.ACTION) \
|
||||
% generated_report_id)
|
||||
self.logger.info('Removing report {} from Qualys Database'.format(generated_report_id))
|
||||
cleaning_up = \
|
||||
self.qualys_scan.qw.delete_report(generated_report_id)
|
||||
os.remove(self.path_check(str(generated_report_id) + '.csv'))
|
||||
print('{action} - Deleted report from local disk: %s'.format(action=bcolors.ACTION) \
|
||||
% self.path_check(str(generated_report_id)))
|
||||
self.logger.info('Deleted report from local disk: {}'.format(self.path_check(str(generated_report_id))))
|
||||
else:
|
||||
print('{error} Could not process report ID: %s'.format(error=bcolors.FAIL) % status)
|
||||
self.logger.error('Could not process report ID: {}'.format(status))
|
||||
|
||||
except Exception as e:
|
||||
print('{error} - Could not process %s - %s'.format(error=bcolors.FAIL) % (report_id, e))
|
||||
self.logger.error('Could not process {}: {}'.format(report_id, str(e)))
|
||||
return vuln_ready
|
||||
|
||||
|
||||
@ -637,8 +615,7 @@ class vulnWhispererQualys(vulnWhispererBase):
|
||||
self.scans_to_process = self.latest_scans[~self.latest_scans['id'].isin(self.uuids)]
|
||||
else:
|
||||
self.scans_to_process = self.latest_scans
|
||||
self.vprint('{info} Identified {new} scans to be processed'.format(info=bcolors.INFO,
|
||||
new=len(self.scans_to_process)))
|
||||
self.logger.info('Identified {new} scans to be processed'.format(new=len(self.scans_to_process)))
|
||||
|
||||
|
||||
def process_web_assets(self):
|
||||
@ -648,13 +625,13 @@ class vulnWhispererQualys(vulnWhispererBase):
|
||||
for app in self.scans_to_process.iterrows():
|
||||
counter += 1
|
||||
r = app[1]
|
||||
print('Processing %s/%s' % (counter, len(self.scans_to_process)))
|
||||
self.logger.debug('Processing {}/{}'.format(counter, len(self.scans_to_process)))
|
||||
self.whisper_reports(report_id=r['id'],
|
||||
launched_date=r['launchedDate'],
|
||||
scan_name=r['name'],
|
||||
scan_reference=r['reference'])
|
||||
else:
|
||||
self.vprint('{info} No new scans to process. Exiting...'.format(info=bcolors.INFO))
|
||||
self.logger.info('No new scans to process. Exiting...')
|
||||
self.conn.close()
|
||||
return 0
|
||||
|
||||
@ -698,6 +675,9 @@ class vulnWhispererOpenVAS(vulnWhispererBase):
|
||||
password=None,
|
||||
):
|
||||
super(vulnWhispererOpenVAS, self).__init__(config=config)
|
||||
self.logger = logging.getLogger('vulnWhispererOpenVAS')
|
||||
if debug:
|
||||
self.logger.setLevel(logging.DEBUG)
|
||||
|
||||
self.port = int(self.config.get(self.CONFIG_SECTION, 'port'))
|
||||
self.develop = True
|
||||
@ -711,7 +691,7 @@ class vulnWhispererOpenVAS(vulnWhispererBase):
|
||||
def whisper_reports(self, output_format='json', launched_date=None, report_id=None, cleanup=True):
|
||||
report = None
|
||||
if report_id:
|
||||
print('Processing report ID: %s' % report_id)
|
||||
self.logger.info('Processing report ID: {}'.format(report_id))
|
||||
|
||||
|
||||
scan_name = report_id.replace('-', '')
|
||||
@ -720,7 +700,6 @@ class vulnWhispererOpenVAS(vulnWhispererBase):
|
||||
extension=output_format)
|
||||
relative_path_name = self.path_check(report_name)
|
||||
scan_reference = report_id
|
||||
print relative_path_name
|
||||
|
||||
if os.path.isfile(relative_path_name):
|
||||
# TODO Possibly make this optional to sync directories
|
||||
@ -737,8 +716,7 @@ class vulnWhispererOpenVAS(vulnWhispererBase):
|
||||
1,
|
||||
)
|
||||
self.record_insert(record_meta)
|
||||
self.vprint('{info} File {filename} already exist! Updating database'.format(info=bcolors.INFO,
|
||||
filename=relative_path_name))
|
||||
self.logger.info('File {filename} already exist! Updating database'.format(filename=relative_path_name))
|
||||
|
||||
record_meta = (
|
||||
scan_name,
|
||||
@ -763,8 +741,7 @@ class vulnWhispererOpenVAS(vulnWhispererBase):
|
||||
with open(relative_path_name, 'w') as f:
|
||||
f.write(vuln_ready.to_json(orient='records', lines=True))
|
||||
f.write('\n')
|
||||
print('{success} - Report written to %s'.format(success=bcolors.SUCCESS) \
|
||||
% report_name)
|
||||
self.logger.info('Report written to {}'.format(report_name))
|
||||
|
||||
return report
|
||||
|
||||
@ -774,8 +751,7 @@ class vulnWhispererOpenVAS(vulnWhispererBase):
|
||||
~self.openvas_api.openvas_reports.report_ids.isin(self.uuids)]
|
||||
else:
|
||||
self.scans_to_process = self.openvas_api.openvas_reports
|
||||
self.vprint('{info} Identified {new} scans to be processed'.format(info=bcolors.INFO,
|
||||
new=len(self.scans_to_process)))
|
||||
self.logger.info('Identified {new} scans to be processed'.format(new=len(self.scans_to_process)))
|
||||
|
||||
def process_openvas_scans(self):
|
||||
counter = 0
|
||||
@ -784,13 +760,12 @@ class vulnWhispererOpenVAS(vulnWhispererBase):
|
||||
for scan in self.scans_to_process.iterrows():
|
||||
counter += 1
|
||||
info = scan[1]
|
||||
print(
|
||||
'[INFO] Processing %s/%s - Report ID: %s' % (counter, len(self.scans_to_process), info['report_ids']))
|
||||
self.logger.info('Processing {}/{} - Report ID: {}'.format(counter, len(self.scans_to_process), info['report_ids']))
|
||||
self.whisper_reports(report_id=info['report_ids'],
|
||||
launched_date=info['epoch'])
|
||||
self.vprint('{info} Processing complete!'.format(info=bcolors.INFO))
|
||||
self.logger.info('Processing complete')
|
||||
else:
|
||||
self.vprint('{info} No new scans to process. Exiting...'.format(info=bcolors.INFO))
|
||||
self.logger.info('No new scans to process. Exiting...')
|
||||
self.conn.close()
|
||||
return 0
|
||||
|
||||
@ -818,6 +793,9 @@ class vulnWhispererQualysVuln(vulnWhispererBase):
|
||||
):
|
||||
|
||||
super(vulnWhispererQualysVuln, self).__init__(config=config)
|
||||
self.logger = logging.getLogger('vulnWhispererQualysVuln')
|
||||
if debug:
|
||||
self.logger.setLevel(logging.DEBUG)
|
||||
|
||||
self.qualys_scan = qualysVulnScan(config=config)
|
||||
self.directory_check()
|
||||
@ -855,10 +833,10 @@ class vulnWhispererQualysVuln(vulnWhispererBase):
|
||||
1,
|
||||
)
|
||||
self.record_insert(record_meta)
|
||||
self.vprint('{info} File {filename} already exist! Updating database'.format(info=bcolors.INFO, filename=relative_path_name))
|
||||
self.logger.info('File {filename} already exist! Updating database'.format(filename=relative_path_name))
|
||||
|
||||
else:
|
||||
print('Processing report ID: %s' % report_id)
|
||||
self.logger.info('Processing report ID: {}'.format(report_id))
|
||||
vuln_ready = self.qualys_scan.process_data(scan_id=report_id)
|
||||
vuln_ready['scan_name'] = scan_name
|
||||
vuln_ready['scan_reference'] = report_id
|
||||
@ -882,11 +860,10 @@ class vulnWhispererQualysVuln(vulnWhispererBase):
|
||||
f.write(vuln_ready.to_json(orient='records', lines=True))
|
||||
f.write('\n')
|
||||
|
||||
print('{success} - Report written to %s'.format(success=bcolors.SUCCESS) \
|
||||
% report_name)
|
||||
self.logger.info('Report written to {}'.format(report_name))
|
||||
|
||||
except Exception as e:
|
||||
print('{error} - Could not process %s - %s'.format(error=bcolors.FAIL) % (report_id, e))
|
||||
self.logger.error('Could not process {}: {}'.format(report_id, str(e)))
|
||||
|
||||
|
||||
def identify_scans_to_process(self):
|
||||
@ -897,8 +874,7 @@ class vulnWhispererQualysVuln(vulnWhispererBase):
|
||||
& (self.latest_scans['status'] == 'Finished')]
|
||||
else:
|
||||
self.scans_to_process = self.latest_scans
|
||||
self.vprint('{info} Identified {new} scans to be processed'.format(info=bcolors.INFO,
|
||||
new=len(self.scans_to_process)))
|
||||
self.logger.info('Identified {new} scans to be processed'.format(new=len(self.scans_to_process)))
|
||||
|
||||
|
||||
def process_vuln_scans(self):
|
||||
@ -908,13 +884,13 @@ class vulnWhispererQualysVuln(vulnWhispererBase):
|
||||
for app in self.scans_to_process.iterrows():
|
||||
counter += 1
|
||||
r = app[1]
|
||||
print('Processing %s/%s' % (counter, len(self.scans_to_process)))
|
||||
self.logger.debug('Processing {}/{}'.format(counter, len(self.scans_to_process)))
|
||||
self.whisper_reports(report_id=r['id'],
|
||||
launched_date=r['date'],
|
||||
scan_name=r['name'],
|
||||
scan_reference=r['type'])
|
||||
else:
|
||||
self.vprint('{info} No new scans to process. Exiting...'.format(info=bcolors.INFO))
|
||||
self.logger.info('No new scans to process. Exiting...')
|
||||
self.conn.close()
|
||||
return 0
|
||||
|
||||
@ -934,26 +910,27 @@ class vulnWhispererJIRA(vulnWhispererBase):
|
||||
password=None,
|
||||
):
|
||||
super(vulnWhispererJIRA, self).__init__(config=config)
|
||||
self.logger = logging.getLogger('vulnWhispererJira')
|
||||
if debug:
|
||||
self.logger.setLevel(logging.DEBUG)
|
||||
self.config_path = config
|
||||
self.config = vwConfig(config)
|
||||
|
||||
|
||||
if config is not None:
|
||||
try:
|
||||
self.vprint('{info} Attempting to connect to jira...'.format(info=bcolors.INFO))
|
||||
self.logger.info('Attempting to connect to jira...')
|
||||
self.jira = \
|
||||
JiraAPI(hostname=self.hostname,
|
||||
username=self.username,
|
||||
password=self.password)
|
||||
self.jira_connect = True
|
||||
self.vprint('{success} Connected to jira on {host}'.format(success=bcolors.SUCCESS,
|
||||
host=self.hostname))
|
||||
self.logger.info('Connected to jira on {host}'.format(host=self.hostname))
|
||||
except Exception as e:
|
||||
self.vprint(e)
|
||||
self.logger.error('Exception: {}'.format(str(e)))
|
||||
raise Exception(
|
||||
'{fail} Could not connect to nessus -- Please verify your settings in {config} are correct and try again.\nReason: {e}'.format(
|
||||
config=self.config.config_in,
|
||||
fail=bcolors.FAIL, e=e))
|
||||
'Could not connect to nessus -- Please verify your settings in {config} are correct and try again.\nReason: {e}'.format(
|
||||
config=self.config.config_in, e=e))
|
||||
sys.exit(1)
|
||||
|
||||
profiles = []
|
||||
@ -961,7 +938,7 @@ class vulnWhispererJIRA(vulnWhispererBase):
|
||||
|
||||
if not self.config.exists_jira_profiles(profiles):
|
||||
self.config.update_jira_profiles(profiles)
|
||||
self.vprint("{info} Jira profiles have been created in {config}, please fill the variables before rerunning the module.".format(info=bcolors.INFO ,config=self.config_path))
|
||||
self.logger.info("Jira profiles have been created in {config}, please fill the variables before rerunning the module.".format(config=self.config_path))
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
@ -973,12 +950,12 @@ class vulnWhispererJIRA(vulnWhispererBase):
|
||||
|
||||
project = self.config.get(jira_section,'jira_project')
|
||||
if project == "":
|
||||
self.vprint('{fail} JIRA project is missing on the configuration file!'.format(fail=bcolors.FAIL))
|
||||
self.logger.error('JIRA project is missing on the configuration file!')
|
||||
sys.exit(0)
|
||||
|
||||
# check that project actually exists
|
||||
if not self.jira.project_exists(project):
|
||||
self.vprint("{fail} JIRA project '{project}' doesn't exist!".format(fail=bcolors.FAIL, project=project))
|
||||
self.logger.error("JIRA project '{project}' doesn't exist!".format(project=project))
|
||||
sys.exit(0)
|
||||
|
||||
components = self.config.get(jira_section,'components').split(',')
|
||||
@ -989,7 +966,7 @@ class vulnWhispererJIRA(vulnWhispererBase):
|
||||
|
||||
min_critical = self.config.get(jira_section,'min_critical_to_report')
|
||||
if not min_critical:
|
||||
self.vprint('{error} - "min_critical_to_report" variable on config file is empty.'.format(error=bcolors.FAIL))
|
||||
self.logger.error('"min_critical_to_report" variable on config file is empty.')
|
||||
sys.exit(0)
|
||||
|
||||
#datafile path
|
||||
@ -1001,7 +978,7 @@ class vulnWhispererJIRA(vulnWhispererBase):
|
||||
fullpath = "{}/{}".format(root,filename)
|
||||
|
||||
if not fullpath:
|
||||
self.vprint('{error} - Scan file path "{scan_name}" for source "{source}" has not been found.'.format(error=bcolors.FAIL, scan_name=scan_name, source=source))
|
||||
self.logger.error('Scan file path "{scan_name}" for source "{source}" has not been found.'.format(scan_name=scan_name, source=source))
|
||||
return 0
|
||||
|
||||
return project, components, fullpath, min_critical
|
||||
@ -1136,12 +1113,12 @@ class vulnWhispererJIRA(vulnWhispererBase):
|
||||
|
||||
#***JIRA sync***
|
||||
if vulnerabilities:
|
||||
self.vprint('{info} {source} data has been successfuly parsed'.format(info=bcolors.INFO, source=source.upper()))
|
||||
self.vprint('{info} Starting JIRA sync'.format(info=bcolors.INFO))
|
||||
self.logger.info('{source} data has been successfuly parsed'.format(source=source.upper()))
|
||||
self.logger.info('Starting JIRA sync')
|
||||
|
||||
self.jira.sync(vulnerabilities, project, components)
|
||||
else:
|
||||
self.vprint("{fail} Vulnerabilities from {source} has not been parsed! Exiting...".format(fail=bcolors.FAIL, source=source))
|
||||
self.logger.info("Vulnerabilities from {source} has not been parsed! Exiting...".format(source=source))
|
||||
sys.exit(0)
|
||||
|
||||
return True
|
||||
@ -1158,6 +1135,9 @@ class vulnWhisperer(object):
|
||||
source=None,
|
||||
scanname=None):
|
||||
|
||||
self.logger = logging.getLogger('vulnWhisperer')
|
||||
if verbose:
|
||||
self.logger.setLevel(logging.DEBUG)
|
||||
self.profile = profile
|
||||
self.config = config
|
||||
self.username = username
|
||||
@ -1201,6 +1181,6 @@ class vulnWhisperer(object):
|
||||
#first we check config fields are created, otherwise we create them
|
||||
vw = vulnWhispererJIRA(config=self.config)
|
||||
if not (self.source and self.scanname):
|
||||
print('{error} - Source scanner and scan name needed!'.format(error=bcolors.FAIL))
|
||||
self.logger.error('Source scanner and scan name needed!')
|
||||
return 0
|
||||
vw.jira_sync(self.source, self.scanname)
|
||||
|
Reference in New Issue
Block a user