Rework logging using the stdlib machinery (#116)

* Rework logging using the stdlib machinery
Use the verbose or debug flag to enable/disable logging.DEBUG
Remove the vprint function from all classes
Remove bcolors from all code
Cleanup [INFO], [ERROR], {success} and similar

* fix some errors my local linter missed but travis catched

* add coloredlogs and --fancy command line flag
This commit is contained in:
Andrea Lusuardi
2018-11-04 12:39:27 +01:00
committed by Austin Taylor
parent 46955bff75
commit e3e416fe44
12 changed files with 251 additions and 269 deletions

View File

@ -7,6 +7,7 @@ from datetime import datetime
import json
import sys
import time
import logging
from requests.packages.urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
@ -27,6 +28,9 @@ class NessusAPI(object):
EXPORT_HISTORY = EXPORT + '?history_id={history_id}'
def __init__(self, hostname=None, port=None, username=None, password=None, verbose=True):
self.logger = logging.getLogger('NessusAPI')
if verbose:
self.logger.setLevel(logging.DEBUG)
if username is None or password is None:
raise Exception('ERROR: Missing username or password.')
@ -51,10 +55,6 @@ class NessusAPI(object):
self.login()
self.scan_ids = self.get_scan_ids()
def vprint(self, msg):
if self.verbose:
print(msg)
def login(self):
resp = self.get_token()
if resp.status_code is 200:
@ -69,6 +69,7 @@ class NessusAPI(object):
success = False
url = self.base + url
self.logging.debug('Requesting to url {}'.format(url))
methods = {'GET': requests.get,
'POST': requests.post,
'DELETE': requests.delete}
@ -81,15 +82,16 @@ class NessusAPI(object):
try:
self.login()
timeout += 1
self.vprint('[INFO] Token refreshed')
self.logger.info('Token refreshed')
except Exception as e:
self.vprint('[FAIL] Could not refresh token\nReason: %s' % e)
self.logger.error('Could not refresh token\nReason: {}'.format(str(e)))
else:
success = True
if json:
data = data.json()
if download:
self.logger.debug('Returning data.content')
return data.content
return data
@ -99,6 +101,7 @@ class NessusAPI(object):
return token
def logout(self):
self.logger.debug('Logging out')
self.request(self.SESSION, method='DELETE')
def get_folders(self):
@ -112,6 +115,7 @@ class NessusAPI(object):
def get_scan_ids(self):
scans = self.get_scans()
scan_ids = [scan_id['id'] for scan_id in scans['scans']] if scans['scans'] else []
self.logger.debug('Found {} scan_ids'.format(len(scan_ids)))
return scan_ids
def count_scan(self, scans, folder_id):
@ -122,11 +126,10 @@ class NessusAPI(object):
def print_scans(self, data):
for folder in data['folders']:
print("\\{0} - ({1})\\".format(folder['name'], self.count_scan(data['scans'], folder['id'])))
self.logger.info("\\{0} - ({1})\\".format(folder['name'], self.count_scan(data['scans'], folder['id'])))
for scan in data['scans']:
if scan['folder_id'] == folder['id']:
print(
"\t\"{0}\" - sid:{1} - uuid: {2}".format(scan['name'].encode('utf-8'), scan['id'], scan['uuid']))
self.logger.info("\t\"{0}\" - sid:{1} - uuid: {2}".format(scan['name'].encode('utf-8'), scan['id'], scan['uuid']))
def get_scan_details(self, scan_id):
data = self.request(self.SCAN_ID.format(scan_id=scan_id), method='GET', json=True)
@ -171,8 +174,8 @@ class NessusAPI(object):
file_id = req['file']
token_id = req['token'] if 'token' in req else req['temp_token']
except Exception as e:
print("[ERROR] %s" % e)
print('Download for file id ' + str(file_id) + '.')
self.logger.error('{}'.format(str(e)))
self.logger.info('Download for file id {}'.format(str(file_id)))
while running:
time.sleep(2)
counter += 2
@ -181,10 +184,10 @@ class NessusAPI(object):
running = report_status['status'] != 'ready'
sys.stdout.write(".")
sys.stdout.flush()
# FIXME: why? can this be removed in favour of a counter?
if counter % 60 == 0:
print("")
print("")
self.logger.info("Completed: {}".format(counter))
self.logger.info("Done: {}".format(counter))
if profile=='tenable':
content = self.request(self.EXPORT_FILE_DOWNLOAD.format(scan_id=scan_id, file_id=file_id), method='GET', download=True)
else:
@ -208,12 +211,12 @@ class NessusAPI(object):
local_tz = pytz.timezone('US/Central')
else:
local_tz = pytz.timezone(local_tz)
# print date_time
local_time = local_tz.normalize(local_tz.localize(date_time))
local_time = local_time.astimezone(pytz.utc)
if epoch:
naive = local_time.replace(tzinfo=None)
local_time = int((naive - datetime(1970, 1, 1)).total_seconds())
self.logger.debug('Converted timestamp {} in datetime {}'.format(date_time, local_time))
return local_time
def tz_conv(self, tz):

View File

@ -4,11 +4,11 @@ __author__ = 'Austin Taylor'
import datetime as dt
import io
import logging
import pandas as pd
import requests
from bs4 import BeautifulSoup
from ..utils.cli import bcolors
class OpenVAS_API(object):
@ -21,6 +21,9 @@ class OpenVAS_API(object):
password=None,
report_format_id=None,
verbose=True):
self.logger = logging.getLogger('OpenVAS_API')
if verbose:
self.logger.setLevel(logging.DEBUG)
if username is None or password is None:
raise Exception('ERROR: Missing username or password.')
@ -49,10 +52,6 @@ class OpenVAS_API(object):
self.openvas_reports = self.get_reports()
self.report_formats = self.get_report_formats()
def vprint(self, msg):
if self.verbose:
print(msg)
def login(self):
resp = self.get_token()
if resp.status_code is 200:
@ -90,9 +89,9 @@ class OpenVAS_API(object):
try:
self.login()
timeout += 1
self.vprint('[INFO] Token refreshed')
self.logger.info(' Token refreshed')
except Exception as e:
self.vprint('[FAIL] Could not refresh token\nReason: %s' % e)
self.logger.error('Could not refresh token\nReason: {}'.format(str(e)))
else:
success = True
@ -116,7 +115,7 @@ class OpenVAS_API(object):
('cmd', 'get_report_formats'),
('token', self.token)
)
self.vprint('{info} Retrieving available report formats'.format(info=bcolors.INFO))
self.logger.info('Retrieving available report formats')
data = self.request(url=self.OMP, method='GET', params=params)
bs = BeautifulSoup(data.content, "lxml")
@ -133,7 +132,7 @@ class OpenVAS_API(object):
return format_mapping
def get_reports(self, complete=True):
print('{info} Retreiving OpenVAS report data...'.format(info=bcolors.INFO))
self.logger.info('Retreiving OpenVAS report data...')
params = (('cmd', 'get_reports'),
('token', self.token),
('max_results', 1),
@ -184,7 +183,7 @@ class OpenVAS_API(object):
('report_format_id', '{report_format_id}'.format(report_format_id=self.report_formats['CSV Results'])),
('submit', 'Download'),
)
print('Retrieving %s' % report_id)
self.logger.info('Retrieving {}'.format(report_id))
req = self.request(self.OMP, params=params, method='GET')
report_df = pd.read_csv(io.BytesIO(req.text.encode('utf-8')))
report_df['report_ids'] = report_id

View File

@ -12,6 +12,7 @@ import requests
import sys
import os
import csv
import logging
import dateutil.parser as dp
@ -33,19 +34,20 @@ class qualysWhisperAPI(object):
VERSION = '/qps/rest/portal/version'
def __init__(self, config=None):
self.logger = logging.getLogger('qualysWhisperAPI')
self.config = config
try:
self.qgc = qualysapi.connect(config)
print('[SUCCESS] - Connected to Qualys at %s' % self.qgc.server)
self.logger.info('Connected to Qualys at {}'.format(self.qgc.server))
except Exception as e:
print('[ERROR] Could not connect to Qualys - %s' % e)
self.logger.error('Could not connect to Qualys: {}'.format(str(e)))
self.headers = {
"content-type": "text/xml"}
self.config_parse = qcconf.QualysConnectConfig(config)
try:
self.template_id = self.config_parse.get_template_id()
except:
print('ERROR - Could not retrieve template ID')
self.logger.error('Could not retrieve template ID')
def request(self, path, method='get', data=None):
methods = {'get': requests.get,
@ -126,15 +128,15 @@ class qualysWhisperAPI(object):
dataframes = []
_records = []
total = int(self.get_was_scan_count(status=status))
print('Retrieving information for %s scans' % total)
self.logger.info('Retrieving information for {} scans'.format(total))
for i in range(0, total):
if i % limit == 0:
if (total - i) < limit:
qualys_api_limit = total - i
print('Making a request with a limit of %s at offset %s' % (str(qualys_api_limit), str(i + 1)))
self.logger.info('Making a request with a limit of {} at offset {}'.format((str(qualys_api_limit), str(i + 1))))
scan_info = self.get_scan_info(limit=qualys_api_limit, offset=i + 1, status=status)
_records.append(scan_info)
print('Converting XML to DataFrame')
self.logger.debug('Converting XML to DataFrame')
dataframes = [self.xml_parser(xml) for xml in _records]
return pd.concat(dataframes, axis=0).reset_index().drop('index', axis=1)
@ -235,10 +237,9 @@ class qualysWhisperAPI(object):
mapper = {'scan': self.generate_scan_report_XML,
'webapp': self.generate_webapp_report_XML}
try:
# print lxml.etree.tostring(mapper[kind](report_id), pretty_print=True)
data = mapper[kind](report_id)
except Exception as e:
print(e)
self.logger.error('Error creating report: {}'.format(str(e)))
return self.qgc.request(self.REPORT_CREATE, data)
@ -322,7 +323,7 @@ class qualysReportFields:
class qualysUtils:
def __init__(self):
pass
self.logger = logging.getLogger('qualysUtils')
def grab_section(
self,
@ -398,6 +399,7 @@ class qualysWebAppReport:
delimiter=',',
quotechar='"',
):
self.logger = logging.getLogger('qualysWebAppReport')
self.file_in = file_in
self.file_stream = file_stream
self.report = None
@ -407,8 +409,7 @@ class qualysWebAppReport:
try:
self.qw = qualysWhisperAPI(config=config)
except Exception as e:
print('Could not load config! Please check settings for %s' \
% e)
self.logger.error('Could not load config! Please check settings. Error: {}'.format(str(e)))
if file_stream:
self.open_file = file_in.splitlines()
@ -517,7 +518,7 @@ class qualysWebAppReport:
merged_df[~merged_df.Title.str.contains('Links Crawled|External Links Discovered'
)]
except Exception as e:
print(e)
self.logger.error('Error merging df: {}'.format(str(e)))
return merged_df
def download_file(self, file_id):
@ -527,7 +528,7 @@ class qualysWebAppReport:
for line in report.splitlines():
file_out.write(line + '\n')
file_out.close()
print('[ACTION] - File written to %s' % filename)
self.logger.info('File written to {}'.format(filename))
return filename
def remove_file(self, filename):
@ -537,7 +538,7 @@ class qualysWebAppReport:
"""Downloads a file from qualys and normalizes it"""
download_file = self.download_file(file_id)
print('[ACTION] - Downloading file ID: %s' % file_id)
self.logger.info('Downloading file ID: {}'.format(file_id))
report_data = self.grab_sections(download_file)
merged_data = self.data_normalizer(report_data)
if scan:
@ -562,35 +563,30 @@ class qualysWebAppReport:
+ '_{last_updated}'.format(last_updated=updated_date) \
+ '.csv'
if os.path.isfile(report_name):
print('[ACTION] - File already exist! Skipping...')
self.logger.info('File already exists! Skipping...')
pass
else:
print('[ACTION] - Generating report for %s' % report_id)
self.logger.info('Generating report for {}'.format(report_id))
status = self.qw.create_report(report_id)
root = objectify.fromstring(status)
if root.responseCode == 'SUCCESS':
print('[INFO] - Successfully generated report for webapp: %s' \
% report_id)
self.logger.info('Successfully generated report for webapp: {}'.format(report_id))
generated_report_id = root.data.Report.id
print ('[INFO] - New Report ID: %s' \
% generated_report_id)
self.logger.info('New Report ID: {}'.format(generated_report_id))
vuln_ready = self.process_data(generated_report_id)
vuln_ready.to_csv(report_name, index=False, header=True) # add when timestamp occured
print('[SUCCESS] - Report written to %s' \
% report_name)
self.logger.info('Report written to {}'.format(report_name))
if cleanup:
print('[ACTION] - Removing report %s' \
% generated_report_id)
self.logger.info('Removing report {}'.format(generated_report_id))
cleaning_up = \
self.qw.delete_report(generated_report_id)
self.remove_file(str(generated_report_id) + '.csv')
print('[ACTION] - Deleted report: %s' \
% generated_report_id)
self.logger.info('Deleted report: {}'.format(generated_report_id))
else:
print('Could not process report ID: %s' % status)
self.logger.error('Could not process report ID: {}'.format(status))
except Exception as e:
print('[ERROR] - Could not process %s - %s' % (report_id, e))
self.logger.error('Could not process {}: {}'.format(report_id, e))
return vuln_ready
@ -633,6 +629,7 @@ class qualysScanReport:
delimiter=',',
quotechar='"',
):
self.logger = logging.getLogger('qualysScanReport')
self.file_in = file_in
self.file_stream = file_stream
self.report = None
@ -642,8 +639,7 @@ class qualysScanReport:
try:
self.qw = qualysWhisperAPI(config=config)
except Exception as e:
print('Could not load config! Please check settings for %s' \
% e)
self.logger.error('Could not load config! Please check settings. Error: {}'.format(str(e)))
if file_stream:
self.open_file = file_in.splitlines()
@ -746,7 +742,7 @@ class qualysScanReport:
merged_df[~merged_df.Title.str.contains('Links Crawled|External Links Discovered'
)]
except Exception as e:
print(e)
self.logger.error('Error normalizing: {}'.format(str(e)))
return merged_df
def download_file(self, path='', file_id=None):
@ -756,7 +752,7 @@ class qualysScanReport:
for line in report.splitlines():
file_out.write(line + '\n')
file_out.close()
print('[ACTION] - File written to %s' % filename)
self.logger.info('File written to {}'.format(filename))
return filename
def remove_file(self, filename):
@ -766,7 +762,7 @@ class qualysScanReport:
"""Downloads a file from qualys and normalizes it"""
download_file = self.download_file(path=path, file_id=file_id)
print('[ACTION] - Downloading file ID: %s' % file_id)
self.logger.info('Downloading file ID: {}'.format(file_id))
report_data = self.grab_sections(download_file)
merged_data = self.data_normalizer(report_data)
merged_data.sort_index(axis=1, inplace=True)
@ -788,35 +784,29 @@ class qualysScanReport:
+ '_{last_updated}'.format(last_updated=updated_date) \
+ '.csv'
if os.path.isfile(report_name):
print('[ACTION] - File already exist! Skipping...')
pass
self.logger.info('File already exist! Skipping...')
else:
print('[ACTION] - Generating report for %s' % report_id)
self.logger.info('Generating report for {}'.format(report_id))
status = self.qw.create_report(report_id)
root = objectify.fromstring(status)
if root.responseCode == 'SUCCESS':
print('[INFO] - Successfully generated report for webapp: %s' \
% report_id)
self.logger.info('Successfully generated report for webapp: {}'.format(report_id))
generated_report_id = root.data.Report.id
print ('[INFO] - New Report ID: %s' \
% generated_report_id)
self.logger.info('New Report ID: {}'.format(generated_report_id))
vuln_ready = self.process_data(generated_report_id)
vuln_ready.to_csv(report_name, index=False, header=True) # add when timestamp occured
print('[SUCCESS] - Report written to %s' \
% report_name)
self.logger.info('Report written to {}'.format(report_name))
if cleanup:
print('[ACTION] - Removing report %s from disk' \
% generated_report_id)
self.logger.info('Removing report {} from disk'.format(generated_report_id))
cleaning_up = \
self.qw.delete_report(generated_report_id)
self.remove_file(str(generated_report_id) + '.csv')
print('[ACTION] - Deleted report from Qualys Database: %s' \
% generated_report_id)
self.logger.info('Deleted report from Qualys Database: {}'.format(generated_report_id))
else:
print('Could not process report ID: %s' % status)
self.logger.error('Could not process report ID: {}'.format(status))
except Exception as e:
print('[ERROR] - Could not process %s - %s' % (report_id, e))
self.logger.error('Could not process {}: {}'.format(report_id, e))
return vuln_ready

View File

@ -7,6 +7,7 @@ import pandas as pd
import qualysapi
import requests
import sys
import logging
import os
import dateutil.parser as dp
@ -15,14 +16,16 @@ class qualysWhisperAPI(object):
SCANS = 'api/2.0/fo/scan'
def __init__(self, config=None):
self.logger = logging.getLogger('qualysWhisperAPI')
self.config = config
try:
self.qgc = qualysapi.connect(config)
# Fail early if we can't make a request or auth is incorrect
self.qgc.request('about.php')
print('[SUCCESS] - Connected to Qualys at %s' % self.qgc.server)
self.logger.info('Connected to Qualys at {}'.format(self.qgc.server))
except Exception as e:
print('[ERROR] Could not connect to Qualys - %s' % e)
self.logger.error('Could not connect to Qualys: {}'.format(str(e)))
# FIXME: exit(1) does not exist: either it's exit() or sys.exit(CODE)
exit(1)
def scan_xml_parser(self, xml):
@ -66,10 +69,12 @@ class qualysWhisperAPI(object):
class qualysUtils:
def __init__(self):
pass
self.logger = logging.getLogger('qualysUtils')
def iso_to_epoch(self, dt):
return dp.parse(dt).strftime('%s')
out = dp.parse(dt).strftime('%s')
self.logger.info('Converted {} to {}'.format(dt, out))
return out
class qualysVulnScan:
@ -82,6 +87,7 @@ class qualysVulnScan:
delimiter=',',
quotechar='"',
):
self.logger = logging.getLogger('qualysVulnScan')
self.file_in = file_in
self.file_stream = file_stream
self.report = None
@ -91,8 +97,7 @@ class qualysVulnScan:
try:
self.qw = qualysWhisperAPI(config=config)
except Exception as e:
print('Could not load config! Please check settings for %s' \
% e)
self.logger.error('Could not load config! Please check settings. Error: {}'.format(str(e)))
if file_stream:
self.open_file = file_in.splitlines()
@ -104,7 +109,7 @@ class qualysVulnScan:
def process_data(self, scan_id=None):
"""Downloads a file from Qualys and normalizes it"""
print('[ACTION] - Downloading scan ID: %s' % scan_id)
self.logger.info('Downloading scan ID: {}'.format(scan_id))
scan_report = self.qw.get_scan_details(scan_id=scan_id)
keep_columns = ['category', 'cve_id', 'cvss3_base', 'cvss3_temporal', 'cvss_base', 'cvss_temporal', 'dns', 'exploitability', 'fqdn', 'impact', 'ip', 'ip_status', 'netbios', 'os', 'pci_vuln', 'port', 'protocol', 'qid', 'results', 'severity', 'solution', 'ssl', 'threat', 'title', 'type', 'vendor_reference']
scan_report = scan_report.filter(keep_columns)