Addition of category class and special class for Qualys Scanning Reports. Also added additional enrichments to reports

This commit is contained in:
Austin Taylor
2017-12-28 21:57:21 -05:00
parent a274341d23
commit d03ba15772
3 changed files with 594 additions and 154 deletions

View File

@ -29,7 +29,14 @@ def main():
parser.add_argument('-p', '--password', dest='password', required=False, default=None, type=lambda x: x.strip(), help='The NESSUS password') parser.add_argument('-p', '--password', dest='password', required=False, default=None, type=lambda x: x.strip(), help='The NESSUS password')
args = parser.parse_args() args = parser.parse_args()
vw = vulnWhisperer(config=args.config,
profile=args.section,
verbose=args.verbose,
username=args.username,
password=args.password)
vw.whisper_vulnerabilities()
'''
try: try:
vw = vulnWhisperer(config=args.config, vw = vulnWhisperer(config=args.config,
@ -45,7 +52,7 @@ def main():
if args.verbose: if args.verbose:
print('{red} ERROR: {error}{endc}'.format(red=bcolors.FAIL, error=e, endc=bcolors.ENDC)) print('{red} ERROR: {error}{endc}'.format(red=bcolors.FAIL, error=e, endc=bcolors.ENDC))
sys.exit(2) sys.exit(2)
'''
if __name__ == '__main__': if __name__ == '__main__':
main() main()

View File

@ -18,8 +18,10 @@ import os
import csv import csv
import dateutil.parser as dp import dateutil.parser as dp
class qualysWhisper(object):
COUNT = '/count/was/webapp' class qualysWhisperAPI(object):
COUNT_WEBAPP = '/count/was/webapp'
COUNT_WASSCAN = '/count/was/wasscan'
DELETE_REPORT = '/delete/was/report/{report_id}' DELETE_REPORT = '/delete/was/report/{report_id}'
GET_WEBAPP_DETAILS = '/get/was/webapp/{was_id}' GET_WEBAPP_DETAILS = '/get/was/webapp/{was_id}'
QPS_REST_3 = '/qps/rest/3.0' QPS_REST_3 = '/qps/rest/3.0'
@ -28,6 +30,8 @@ class qualysWhisper(object):
REPORT_STATUS = '/status/was/report/{report_id}' REPORT_STATUS = '/status/was/report/{report_id}'
REPORT_CREATE = '/create/was/report' REPORT_CREATE = '/create/was/report'
REPORT_DOWNLOAD = '/download/was/report/{report_id}' REPORT_DOWNLOAD = '/download/was/report/{report_id}'
SCAN_DETAILS = '/get/was/wasscan/{scan_id}'
SCAN_DOWNLOAD = '/download/was/wasscan/{scan_id}'
SEARCH_REPORTS = '/search/was/report' SEARCH_REPORTS = '/search/was/report'
SEARCH_WEB_APPS = '/search/was/webapp' SEARCH_WEB_APPS = '/search/was/webapp'
SEARCH_WAS_SCAN = '/search/was/wasscan' SEARCH_WAS_SCAN = '/search/was/wasscan'
@ -37,37 +41,42 @@ class qualysWhisper(object):
self.config = config self.config = config
try: try:
self.qgc = qualysapi.connect(config) self.qgc = qualysapi.connect(config)
print('[SUCCESS] - Connected to Qualys at %s' \ print('[SUCCESS] - Connected to Qualys at %s' % self.qgc.server)
% self.qgc.server)
except Exception as e: except Exception as e:
print('[ERROR] Could not connect to Qualys - %s' % e) print('[ERROR] Could not connect to Qualys - %s' % e)
self.headers = {'content-type': 'text/xml'} self.headers = {
"content-type": "text/xml"}
self.config_parse = qcconf.QualysConnectConfig(config) self.config_parse = qcconf.QualysConnectConfig(config)
try: try:
self.template_id = self.config_parse.get_template_id() self.template_id = self.config_parse.get_template_id()
except: except:
print 'ERROR - Could not retrieve template ID' print('ERROR - Could not retrieve template ID')
sys.exit(2)
def request( def request(self, path, method='get', data=None):
self, methods = {'get': requests.get,
path, 'post': requests.post}
method='get',
data=None,
):
methods = {'get': requests.get, 'post': requests.post}
base = 'https://' + self.qgc.server + path base = 'https://' + self.qgc.server + path
req = methods[method](base, auth=self.qgc.auth, data=data, req = methods[method](base, auth=self.qgc.auth, data=data, headers=self.headers).content
headers=self.headers).content
return req return req
def get_version(self): def get_version(self):
return self.request(self.VERSION) return self.request(self.VERSION)
def get_scan_count(self, scan_name): def get_scan_count(self, scan_name):
parameters = E.ServiceRequest(E.filters(E.Criteria(scan_name, parameters = (
field='name', operator='CONTAINS'))) E.ServiceRequest(
xml_output = self.qgc.request(self.COUNT, parameters) E.filters(
E.Criteria({'field': 'name', 'operator': 'CONTAINS'}, scan_name))))
xml_output = self.qgc.request(self.COUNT_WEBAPP, parameters)
root = objectify.fromstring(xml_output)
return root.count.text
def get_was_scan_count(self, status):
parameters = (
E.ServiceRequest(
E.filters(
E.Criteria({'field': 'status', 'operator': 'EQUALS'}, status))))
xml_output = self.qgc.request(self.COUNT_WASSCAN, parameters)
root = objectify.fromstring(xml_output) root = objectify.fromstring(xml_output)
return root.count.text return root.count.text
@ -77,14 +86,16 @@ class qualysWhisper(object):
def xml_parser(self, xml, dupfield=None): def xml_parser(self, xml, dupfield=None):
all_records = [] all_records = []
root = ET.XML(xml) root = ET.XML(xml)
for (i, child) in enumerate(root): for i, child in enumerate(root):
for subchild in child: for subchild in child:
record = {} record = {}
dup_tracker = 0
for p in subchild: for p in subchild:
record[p.tag] = p.text record[p.tag] = p.text
for o in p: for o in p:
if o.tag == 'id': if o.tag in record:
record[dupfield] = o.text dup_tracker += 1
record[o.tag + '_%s' % dup_tracker] = o.text
else: else:
record[o.tag] = o.text record[o.tag] = o.text
all_records.append(record) all_records.append(record)
@ -92,28 +103,50 @@ class qualysWhisper(object):
def get_report_list(self): def get_report_list(self):
"""Returns a dataframe of reports""" """Returns a dataframe of reports"""
return self.xml_parser(self.get_reports(), dupfield='user_id') return self.xml_parser(self.get_reports(), dupfield='user_id')
def get_web_apps(self): def get_web_apps(self):
"""Returns webapps available for account""" """Returns webapps available for account"""
return self.qgc.request(self.SEARCH_WEB_APPS) return self.qgc.request(self.SEARCH_WEB_APPS)
def get_web_app_list(self): def get_web_app_list(self):
"""Returns dataframe of webapps""" """Returns dataframe of webapps"""
return self.xml_parser(self.get_web_apps(), dupfield='user_id') return self.xml_parser(self.get_web_apps(), dupfield='user_id')
def get_web_app_details(self, was_id): def get_web_app_details(self, was_id):
"""Get webapp details - use to retrieve app ID tag""" """Get webapp details - use to retrieve app ID tag"""
return self.qgc.request(self.GET_WEBAPP_DETAILS.format(was_id=was_id)) return self.qgc.request(self.GET_WEBAPP_DETAILS.format(was_id=was_id))
def get_scans_by_app_id(self, app_id): def get_scans_by_app_id(self, app_id):
data = self.generate_app_id_scan_XML(app_id) data = self.generate_app_id_scan_XML(app_id)
return self.qgc.request(self.SEARCH_WAS_SCAN, data) return self.qgc.request(self.SEARCH_WAS_SCAN, data)
def get_scan_info(self, limit=1000, offset=1, status='FINISHED'):
""" Returns XML of ALL WAS Scans"""
data = self.generate_scan_result_XML(limit=limit, offset=offset, status=status)
return self.qgc.request(self.SEARCH_WAS_SCAN, data)
def get_all_scans(self, limit=1000, offset=1, status='FINISHED'):
qualys_api_limit = limit
dataframes = []
_records = []
total = int(self.get_was_scan_count(status=status))
print('Processing %s total scans' % total)
for i in range(0, total):
if i % limit == 0:
if (total - i) < limit:
qualys_api_limit = total - i
print('Making a request with a limit of %s at offset %s' % (str(qualys_api_limit), str(i + 1)))
scan_info = self.get_scan_info(limit=qualys_api_limit, offset=i + 1, status=status)
_records.append(scan_info)
print('Converting XML to DataFrame')
dataframes = [self.xml_parser(xml) for xml in _records]
return pd.concat(dataframes, axis=0).reset_index().drop('index', axis=1)
def get_scan_details(self, scan_id):
return self.qgc.request(self.SCAN_DETAILS.format(scan_id=scan_id))
def get_report_details(self, report_id): def get_report_details(self, report_id):
return self.qgc.request(self.REPORT_DETAILS.format(report_id=report_id)) return self.qgc.request(self.REPORT_DETAILS.format(report_id=report_id))
@ -123,43 +156,109 @@ class qualysWhisper(object):
def download_report(self, report_id): def download_report(self, report_id):
return self.qgc.request(self.REPORT_DOWNLOAD.format(report_id=report_id)) return self.qgc.request(self.REPORT_DOWNLOAD.format(report_id=report_id))
def download_scan_results(self, scan_id):
return self.qgc.request(self.SCAN_DOWNLOAD.format(scan_id=scan_id))
def generate_scan_result_XML(self, limit=1000, offset=1, status='FINISHED'):
report_xml = E.ServiceRequest(
E.filters(
E.Criteria({'field': 'status', 'operator': 'EQUALS'}, status
),
),
E.preferences(
E.startFromOffset(str(offset)),
E.limitResults(str(limit))
),
)
return report_xml
def generate_scan_report_XML(self, scan_id):
"""Generates a CSV report for an asset based on template defined in .ini file"""
report_xml = E.ServiceRequest(
E.data(
E.Report(
E.name('![CDATA[API Scan Report generated by VulnWhisperer]]>'),
E.description('<![CDATA[CSV Scanning report for VulnWhisperer]]>'),
E.format('CSV'),
E.type('WAS_SCAN_REPORT'),
E.template(
E.id(self.template_id)
),
E.config(
E.scanReport(
E.target(
E.scans(
E.WasScan(
E.id(scan_id)
)
),
),
),
)
)
)
)
return report_xml
def generate_webapp_report_XML(self, app_id): def generate_webapp_report_XML(self, app_id):
"""Generates a CSV report for an asset based on template defined in .ini file""" """Generates a CSV report for an asset based on template defined in .ini file"""
report_xml = E.ServiceRequest(
report_xml = \ E.data(
E.ServiceRequest(E.data(E.Report(E.name('![CDATA[API Web Application Report generated by VulnWhisperer]]>' E.Report(
E.name('![CDATA[API Web Application Report generated by VulnWhisperer]]>'),
E.description('<![CDATA[CSV WebApp report for VulnWhisperer]]>'),
E.format('CSV'),
E.template(
E.id(self.template_id)
), ),
E.description('<![CDATA[CSV WebApp report for VulnWhisperer]]>' E.config(
), E.format('CSV'), E.webAppReport(
E.template(E.id(self.template_id)), E.target(
E.config(E.webAppReport(E.target(E.webapps(E.WebApp(E.id(app_id))))))))) E.webapps(
E.WebApp(
E.id(app_id)
)
),
),
),
)
)
)
)
return report_xml return report_xml
def generate_app_id_scan_XML(self, app_id): def generate_app_id_scan_XML(self, app_id):
report_xml = \ report_xml = E.ServiceRequest(
E.ServiceRequest(E.filters(E.Criteria({'field': 'webApp.id' E.filters(
, 'operator': 'EQUALS'}, app_id))) E.Criteria({'field': 'webApp.id', 'operator': 'EQUALS'}, app_id
),
),
)
return report_xml return report_xml
def create_report(self, report_id): def create_report(self, report_id, kind='scan'):
data = self.generate_webapp_report_XML(report_id) mapper = {'scan': self.generate_scan_report_XML,
return self.qgc.request(self.REPORT_CREATE.format(report_id=report_id), 'webapp': self.generate_webapp_report_XML}
data) try:
# print lxml.etree.tostring(mapper[kind](report_id), pretty_print=True)
data = mapper[kind](report_id)
except Exception as e:
print(e)
return self.qgc.request(self.REPORT_CREATE, data)
def delete_report(self, report_id): def delete_report(self, report_id):
return self.qgc.request(self.DELETE_REPORT.format(report_id=report_id)) return self.qgc.request(self.DELETE_REPORT.format(report_id=report_id))
class qualysWebAppReport: class qualysReportFields:
CATEGORIES = ['VULNERABILITY', 'SENSITIVE CONTENT', CATEGORIES = ['VULNERABILITY',
'INFORMATION GATHERED'] 'SENSITIVECONTENT',
'INFORMATION_GATHERED']
# URL Vulnerability Information # URL Vulnerability Information
WEB_APP_VULN_BLOCK = [ VULN_BLOCK = [
'Web Application Name',
CATEGORIES[0], CATEGORIES[0],
'ID', 'ID',
'QID', 'QID',
@ -171,7 +270,6 @@ class qualysWebAppReport:
'Authentication', 'Authentication',
'Ajax Request', 'Ajax Request',
'Ajax Request ID', 'Ajax Request ID',
'Status',
'Ignored', 'Ignored',
'Ignore Reason', 'Ignore Reason',
'Ignore Date', 'Ignore Date',
@ -189,29 +287,14 @@ class qualysWebAppReport:
'Evidence #1', 'Evidence #1',
] ]
WEB_APP_VULN_HEADER = list(WEB_APP_VULN_BLOCK) INFO_HEADER = [
WEB_APP_VULN_HEADER[WEB_APP_VULN_BLOCK.index(CATEGORIES[0])] = \
'Vulnerability Category'
WEB_APP_SENSITIVE_HEADER = list(WEB_APP_VULN_HEADER)
WEB_APP_SENSITIVE_HEADER.insert(WEB_APP_SENSITIVE_HEADER.index('Url'
), 'Content')
WEB_APP_SENSITIVE_BLOCK = list(WEB_APP_SENSITIVE_HEADER)
WEB_APP_SENSITIVE_BLOCK[WEB_APP_SENSITIVE_BLOCK.index('Vulnerability Category'
)] = CATEGORIES[1]
WEB_APP_INFO_HEADER = [
'Web Application Name',
'Vulnerability Category', 'Vulnerability Category',
'ID', 'ID',
'QID', 'QID',
'Response #1', 'Response #1',
'Last Time Detected', 'Last Time Detected',
] ]
WEB_APP_INFO_BLOCK = [ INFO_BLOCK = [
'Web Application Name',
CATEGORIES[2], CATEGORIES[2],
'ID', 'ID',
'QID', 'QID',
@ -238,43 +321,13 @@ class qualysWebAppReport:
GROUP_HEADER = ['GROUP', 'Name', 'Category'] GROUP_HEADER = ['GROUP', 'Name', 'Category']
OWASP_HEADER = ['OWASP', 'Code', 'Name'] OWASP_HEADER = ['OWASP', 'Code', 'Name']
WASC_HEADER = ['WASC', 'Code', 'Name'] WASC_HEADER = ['WASC', 'Code', 'Name']
SCAN_META = ['Web Application Name', 'URL', 'Owner', 'Operating System', 'Scope']
CATEGORY_HEADER = ['Category', 'Severity', 'Level', 'Description'] CATEGORY_HEADER = ['Category', 'Severity', 'Level', 'Description']
def __init__(
self,
config=None,
file_in=None,
file_stream=False,
delimiter=',',
quotechar='"',
):
self.file_in = file_in
self.file_stream = file_stream
self.report = None
if config:
try:
self.qw = qualysWhisper(config=config)
except Exception as e:
print('Could not load config! Please check settings for %s' \
% e)
if file_stream: class qualysUtils:
self.open_file = file_in.splitlines() def __init__(self):
elif file_in: pass
self.open_file = open(file_in, 'rb')
self.downloaded_file = None
def get_hostname(self, report):
host = ''
with open(report, 'rb') as csvfile:
q_report = csv.reader(csvfile, delimiter=',', quotechar='"')
for x in q_report:
if 'Web Application Name' in x[0]:
host = q_report.next()[0]
return host
def grab_section( def grab_section(
self, self,
@ -311,47 +364,127 @@ class qualysWebAppReport:
_data = reduce(lambda a, kv: a.replace(*kv), repls, _data) _data = reduce(lambda a, kv: a.replace(*kv), repls, _data)
return _data return _data
class qualysWebAppReport:
# URL Vulnerability Information
WEB_APP_VULN_BLOCK = list(qualysReportFields.VULN_BLOCK)
WEB_APP_VULN_BLOCK.insert(0, 'Web Application Name')
WEB_APP_VULN_BLOCK.insert(WEB_APP_VULN_BLOCK.index('Ignored'), 'Status')
WEB_APP_VULN_HEADER = list(WEB_APP_VULN_BLOCK)
WEB_APP_VULN_HEADER[WEB_APP_VULN_BLOCK.index(qualysReportFields.CATEGORIES[0])] = \
'Vulnerability Category'
WEB_APP_SENSITIVE_HEADER = list(WEB_APP_VULN_HEADER)
WEB_APP_SENSITIVE_HEADER.insert(WEB_APP_SENSITIVE_HEADER.index('Url'
), 'Content')
WEB_APP_SENSITIVE_BLOCK = list(WEB_APP_SENSITIVE_HEADER)
WEB_APP_SENSITIVE_BLOCK[WEB_APP_SENSITIVE_BLOCK.index('Vulnerability Category'
)] = qualysReportFields.CATEGORIES[1]
WEB_APP_INFO_HEADER = list(qualysReportFields.INFO_HEADER)
WEB_APP_INFO_HEADER.insert(0, 'Web Application Name')
WEB_APP_INFO_BLOCK = list(qualysReportFields.INFO_BLOCK)
WEB_APP_INFO_BLOCK.insert(0, 'Web Application Name')
QID_HEADER = list(qualysReportFields.QID_HEADER)
GROUP_HEADER = list(qualysReportFields.GROUP_HEADER)
OWASP_HEADER = list(qualysReportFields.OWASP_HEADER)
WASC_HEADER = list(qualysReportFields.WASC_HEADER)
SCAN_META = list(qualysReportFields.SCAN_META)
CATEGORY_HEADER = list(qualysReportFields.CATEGORY_HEADER)
def __init__(
self,
config=None,
file_in=None,
file_stream=False,
delimiter=',',
quotechar='"',
):
self.file_in = file_in
self.file_stream = file_stream
self.report = None
self.utils = qualysUtils()
if config:
try:
self.qw = qualysWhisperAPI(config=config)
except Exception as e:
print('Could not load config! Please check settings for %s' \
% e)
if file_stream:
self.open_file = file_in.splitlines()
elif file_in:
self.open_file = open(file_in, 'rb')
self.downloaded_file = None
def get_hostname(self, report):
host = ''
with open(report, 'rb') as csvfile:
q_report = csv.reader(csvfile, delimiter=',', quotechar='"')
for x in q_report:
if 'Web Application Name' in x[0]:
host = q_report.next()[0]
return host
def get_scanreport_name(self, report):
scan_name = ''
with open(report, 'rb') as csvfile:
q_report = csv.reader(csvfile, delimiter=',', quotechar='"')
for x in q_report:
if 'Scans' in x[0]:
scan_name = x[1]
return scan_name
def grab_sections(self, report): def grab_sections(self, report):
all_dataframes = [] all_dataframes = []
with open(report, 'rb') as csvfile: with open(report, 'rb') as csvfile:
all_dataframes.append(pd.DataFrame(self.grab_section(report, all_dataframes.append(pd.DataFrame(self.utils.grab_section(report,
self.WEB_APP_VULN_BLOCK, self.WEB_APP_VULN_BLOCK,
end=[self.WEB_APP_SENSITIVE_BLOCK, end=[self.WEB_APP_SENSITIVE_BLOCK,
self.WEB_APP_INFO_BLOCK], self.WEB_APP_INFO_BLOCK],
pop_last=True), pop_last=True),
columns=self.WEB_APP_VULN_HEADER)) columns=self.WEB_APP_VULN_HEADER))
all_dataframes.append(pd.DataFrame(self.grab_section(report, all_dataframes.append(pd.DataFrame(self.utils.grab_section(report,
self.WEB_APP_SENSITIVE_BLOCK, self.WEB_APP_SENSITIVE_BLOCK,
end=[self.WEB_APP_INFO_BLOCK, end=[self.WEB_APP_INFO_BLOCK,
self.WEB_APP_SENSITIVE_BLOCK], self.WEB_APP_SENSITIVE_BLOCK],
pop_last=True), pop_last=True),
columns=self.WEB_APP_SENSITIVE_HEADER)) columns=self.WEB_APP_SENSITIVE_HEADER))
all_dataframes.append(pd.DataFrame(self.grab_section(report, all_dataframes.append(pd.DataFrame(self.utils.grab_section(report,
self.WEB_APP_INFO_BLOCK, self.WEB_APP_INFO_BLOCK,
end=[self.QID_HEADER], end=[self.QID_HEADER],
pop_last=True), pop_last=True),
columns=self.WEB_APP_INFO_HEADER)) columns=self.WEB_APP_INFO_HEADER))
all_dataframes.append(pd.DataFrame(self.grab_section(report, all_dataframes.append(pd.DataFrame(self.utils.grab_section(report,
self.QID_HEADER, self.QID_HEADER,
end=[self.GROUP_HEADER], end=[self.GROUP_HEADER],
pop_last=True), pop_last=True),
columns=self.QID_HEADER)) columns=self.QID_HEADER))
all_dataframes.append(pd.DataFrame(self.grab_section(report, all_dataframes.append(pd.DataFrame(self.utils.grab_section(report,
self.GROUP_HEADER, self.GROUP_HEADER,
end=[self.OWASP_HEADER], end=[self.OWASP_HEADER],
pop_last=True), pop_last=True),
columns=self.GROUP_HEADER)) columns=self.GROUP_HEADER))
all_dataframes.append(pd.DataFrame(self.grab_section(report, all_dataframes.append(pd.DataFrame(self.utils.grab_section(report,
self.OWASP_HEADER, self.OWASP_HEADER,
end=[self.WASC_HEADER], end=[self.WASC_HEADER],
pop_last=True), pop_last=True),
columns=self.OWASP_HEADER)) columns=self.OWASP_HEADER))
all_dataframes.append(pd.DataFrame(self.grab_section(report, all_dataframes.append(pd.DataFrame(self.utils.grab_section(report,
self.WASC_HEADER, end=[['APPENDIX']], self.WASC_HEADER, end=[['APPENDIX']],
pop_last=True), pop_last=True),
columns=self.WASC_HEADER)) columns=self.WASC_HEADER))
all_dataframes.append(pd.DataFrame(self.grab_section(report, all_dataframes.append(pd.DataFrame(self.utils.grab_section(report,
self.CATEGORY_HEADER, end=''), self.CATEGORY_HEADER),
columns=self.CATEGORY_HEADER)) columns=self.CATEGORY_HEADER))
return all_dataframes return all_dataframes
@ -372,13 +505,12 @@ class qualysWebAppReport:
if 'Content' not in merged_df: if 'Content' not in merged_df:
merged_df['Content'] = '' merged_df['Content'] = ''
columns_to_cleanse = ['Payload #1','Request Method #1','Request URL #1', columns_to_cleanse = ['Payload #1', 'Request Method #1', 'Request URL #1',
'Request Headers #1','Response #1','Evidence #1', 'Request Headers #1', 'Response #1', 'Evidence #1',
'Description','Impact','Solution','Url','Content'] 'Description', 'Impact', 'Solution', 'Url', 'Content']
for col in columns_to_cleanse: for col in columns_to_cleanse:
merged_df[col] = merged_df[col].apply(self.cleanser) merged_df[col] = merged_df[col].apply(self.utils.cleanser)
merged_df = merged_df.drop(['QID_y', 'QID_x'], axis=1) merged_df = merged_df.drop(['QID_y', 'QID_x'], axis=1)
merged_df = merged_df.rename(columns={'Id': 'QID'}) merged_df = merged_df.rename(columns={'Id': 'QID'})
@ -404,6 +536,233 @@ class qualysWebAppReport:
def remove_file(self, filename): def remove_file(self, filename):
os.remove(filename) os.remove(filename)
def process_data(self, file_id, scan=True, cleanup=True):
"""Downloads a file from qualys and normalizes it"""
download_file = self.download_file(file_id)
print('[ACTION] - Downloading file ID: %s' % file_id)
report_data = self.grab_sections(download_file)
merged_data = self.data_normalizer(report_data)
if scan:
scan_name = self.get_scanreport_name(download_file)
merged_data['ScanName'] = scan_name
# TODO cleanup old data (delete)
return merged_data
def whisper_reports(self, report_id, updated_date, cleanup=False):
"""
report_id: App ID
updated_date: Last time scan was ran for app_id
"""
vuln_ready = None
try:
if 'Z' in updated_date:
updated_date = self.utils.iso_to_epoch(updated_date)
report_name = 'qualys_web_' + str(report_id) \
+ '_{last_updated}'.format(last_updated=updated_date) \
+ '.csv'
if os.path.isfile(report_name):
print('[ACTION] - File already exist! Skipping...')
pass
else:
print('[ACTION] - Generating report for %s' % report_id)
status = self.qw.create_report(report_id)
root = objectify.fromstring(status)
if root.responseCode == 'SUCCESS':
print('[INFO] - Successfully generated report for webapp: %s' \
% report_id)
generated_report_id = root.data.Report.id
print ('[INFO] - New Report ID: %s' \
% generated_report_id)
vuln_ready = self.process_data(generated_report_id)
vuln_ready.to_csv(report_name, index=False, header=True) # add when timestamp occured
print('[SUCCESS] - Report written to %s' \
% report_name)
if cleanup:
print('[ACTION] - Removing report %s' \
% generated_report_id)
cleaning_up = \
self.qw.delete_report(generated_report_id)
self.remove_file(str(generated_report_id) + '.csv')
print('[ACTION] - Deleted report: %s' \
% generated_report_id)
else:
print('Could not process report ID: %s' % status)
except Exception as e:
print('[ERROR] - Could not process %s - %s' % (report_id, e))
return vuln_ready
class qualysScanReport:
# URL Vulnerability Information
WEB_SCAN_VULN_BLOCK = list(qualysReportFields.VULN_BLOCK)
WEB_SCAN_VULN_BLOCK.insert(WEB_SCAN_VULN_BLOCK.index('QID'), 'Detection ID')
WEB_SCAN_VULN_HEADER = list(WEB_SCAN_VULN_BLOCK)
WEB_SCAN_VULN_HEADER[WEB_SCAN_VULN_BLOCK.index(qualysReportFields.CATEGORIES[0])] = \
'Vulnerability Category'
WEB_SCAN_SENSITIVE_HEADER = list(WEB_SCAN_VULN_HEADER)
WEB_SCAN_SENSITIVE_HEADER.insert(WEB_SCAN_SENSITIVE_HEADER.index('Url'
), 'Content')
WEB_SCAN_SENSITIVE_BLOCK = list(WEB_SCAN_SENSITIVE_HEADER)
WEB_SCAN_SENSITIVE_BLOCK.insert(WEB_SCAN_SENSITIVE_BLOCK.index('QID'), 'Detection ID')
WEB_SCAN_SENSITIVE_BLOCK[WEB_SCAN_SENSITIVE_BLOCK.index('Vulnerability Category'
)] = qualysReportFields.CATEGORIES[1]
WEB_SCAN_INFO_HEADER = list(qualysReportFields.INFO_HEADER)
WEB_SCAN_INFO_HEADER.insert(WEB_SCAN_INFO_HEADER.index('QID'), 'Detection ID')
WEB_SCAN_INFO_BLOCK = list(qualysReportFields.INFO_BLOCK)
WEB_SCAN_INFO_BLOCK.insert(WEB_SCAN_INFO_BLOCK.index('QID'), 'Detection ID')
QID_HEADER = list(qualysReportFields.QID_HEADER)
GROUP_HEADER = list(qualysReportFields.GROUP_HEADER)
OWASP_HEADER = list(qualysReportFields.OWASP_HEADER)
WASC_HEADER = list(qualysReportFields.WASC_HEADER)
SCAN_META = list(qualysReportFields.SCAN_META)
CATEGORY_HEADER = list(qualysReportFields.CATEGORY_HEADER)
def __init__(
self,
config=None,
file_in=None,
file_stream=False,
delimiter=',',
quotechar='"',
):
self.file_in = file_in
self.file_stream = file_stream
self.report = None
self.utils = qualysUtils()
if config:
try:
self.qw = qualysWhisperAPI(config=config)
except Exception as e:
print('Could not load config! Please check settings for %s' \
% e)
if file_stream:
self.open_file = file_in.splitlines()
elif file_in:
self.open_file = open(file_in, 'rb')
self.downloaded_file = None
def grab_sections(self, report):
all_dataframes = []
dict_tracker = {}
with open(report, 'rb') as csvfile:
dict_tracker['WEB_SCAN_VULN_BLOCK'] = pd.DataFrame(self.utils.grab_section(report,
self.WEB_SCAN_VULN_BLOCK,
end=[
self.WEB_SCAN_SENSITIVE_BLOCK,
self.WEB_SCAN_INFO_BLOCK],
pop_last=True),
columns=self.WEB_SCAN_VULN_HEADER)
dict_tracker['WEB_SCAN_SENSITIVE_BLOCK'] = pd.DataFrame(self.utils.grab_section(report,
self.WEB_SCAN_SENSITIVE_BLOCK,
end=[
self.WEB_SCAN_INFO_BLOCK,
self.WEB_SCAN_SENSITIVE_BLOCK],
pop_last=True),
columns=self.WEB_SCAN_SENSITIVE_HEADER)
dict_tracker['WEB_SCAN_INFO_BLOCK'] = pd.DataFrame(self.utils.grab_section(report,
self.WEB_SCAN_INFO_BLOCK,
end=[self.QID_HEADER],
pop_last=True),
columns=self.WEB_SCAN_INFO_HEADER)
dict_tracker['QID_HEADER'] = pd.DataFrame(self.utils.grab_section(report,
self.QID_HEADER,
end=[self.GROUP_HEADER],
pop_last=True),
columns=self.QID_HEADER)
dict_tracker['GROUP_HEADER'] = pd.DataFrame(self.utils.grab_section(report,
self.GROUP_HEADER,
end=[self.OWASP_HEADER],
pop_last=True),
columns=self.GROUP_HEADER)
dict_tracker['OWASP_HEADER'] = pd.DataFrame(self.utils.grab_section(report,
self.OWASP_HEADER,
end=[self.WASC_HEADER],
pop_last=True),
columns=self.OWASP_HEADER)
dict_tracker['WASC_HEADER'] = pd.DataFrame(self.utils.grab_section(report,
self.WASC_HEADER, end=[['APPENDIX']],
pop_last=True),
columns=self.WASC_HEADER)
dict_tracker['SCAN_META'] = pd.DataFrame(self.utils.grab_section(report,
self.SCAN_META,
end=[self.CATEGORY_HEADER],
pop_last=True),
columns=self.SCAN_META)
dict_tracker['CATEGORY_HEADER'] = pd.DataFrame(self.utils.grab_section(report,
self.CATEGORY_HEADER),
columns=self.CATEGORY_HEADER)
all_dataframes.append(dict_tracker)
return all_dataframes
def data_normalizer(self, dataframes):
"""
Merge and clean data
:param dataframes:
:return:
"""
df_dict = dataframes[0]
merged_df = pd.concat([df_dict['WEB_SCAN_VULN_BLOCK'], df_dict['WEB_SCAN_SENSITIVE_BLOCK'],
df_dict['WEB_SCAN_INFO_BLOCK']], axis=0,
ignore_index=False).fillna('')
merged_df = pd.merge(merged_df, df_dict['QID_HEADER'], left_on='QID',
right_on='Id')
if 'Content' not in merged_df:
merged_df['Content'] = ''
columns_to_cleanse = ['Payload #1', 'Request Method #1', 'Request URL #1',
'Request Headers #1', 'Response #1', 'Evidence #1',
'Description', 'Impact', 'Solution', 'Url', 'Content']
for col in columns_to_cleanse:
merged_df[col] = merged_df[col].apply(self.utils.cleanser)
merged_df = merged_df.drop(['QID_y', 'QID_x'], axis=1)
merged_df = merged_df.rename(columns={'Id': 'QID'})
merged_df = merged_df.assign(**df_dict['SCAN_META'].to_dict(orient='records')[0])
merged_df = pd.merge(merged_df, df_dict['CATEGORY_HEADER'], how='left', left_on=['Category', 'Severity Level'],
right_on=['Category', 'Severity'], suffixes=('Severity', 'CatSev'))
try:
merged_df = \
merged_df[~merged_df.Title.str.contains('Links Crawled|External Links Discovered'
)]
except Exception as e:
print(e)
return merged_df
def download_file(self, file_id):
report = self.qw.download_report(file_id)
filename = str(file_id) + '.csv'
file_out = open(filename, 'w')
for line in report.splitlines():
file_out.write(line + '\n')
file_out.close()
print('[ACTION] - File written to %s' % filename)
return filename
def remove_file(self, filename):
os.remove(filename)
def process_data(self, file_id, cleanup=True): def process_data(self, file_id, cleanup=True):
"""Downloads a file from qualys and normalizes it""" """Downloads a file from qualys and normalizes it"""
@ -411,11 +770,55 @@ class qualysWebAppReport:
print('[ACTION] - Downloading file ID: %s' % file_id) print('[ACTION] - Downloading file ID: %s' % file_id)
report_data = self.grab_sections(download_file) report_data = self.grab_sections(download_file)
merged_data = self.data_normalizer(report_data) merged_data = self.data_normalizer(report_data)
merged_data.sort_index(axis=1, inplace=True)
# TODO cleanup old data (delete) # TODO cleanup old data (delete)
return merged_data return merged_data
def whisper_reports(self, report_id, updated_date, cleanup=False):
"""
report_id: App ID
updated_date: Last time scan was ran for app_id
"""
vuln_ready = None
try:
if 'Z' in updated_date:
updated_date = self.utils.iso_to_epoch(updated_date)
report_name = 'qualys_web_' + str(report_id) \
+ '_{last_updated}'.format(last_updated=updated_date) \
+ '.csv'
if os.path.isfile(report_name):
print('[ACTION] - File already exist! Skipping...')
pass
else:
print('[ACTION] - Generating report for %s' % report_id)
status = self.qw.create_report(report_id)
root = objectify.fromstring(status)
if root.responseCode == 'SUCCESS':
print('[INFO] - Successfully generated report for webapp: %s' \
% report_id)
generated_report_id = root.data.Report.id
print ('[INFO] - New Report ID: %s' \
% generated_report_id)
vuln_ready = self.process_data(generated_report_id)
vuln_ready.to_csv(report_name, index=False, header=True) # add when timestamp occured
print('[SUCCESS] - Report written to %s' \
% report_name)
if cleanup:
print('[ACTION] - Removing report %s' \
% generated_report_id)
cleaning_up = \
self.qw.delete_report(generated_report_id)
self.remove_file(str(generated_report_id) + '.csv')
print('[ACTION] - Deleted report: %s' \
% generated_report_id)
else:
print('Could not process report ID: %s' % status)
except Exception as e:
print('[ERROR] - Could not process %s - %s' % (report_id, e))
return vuln_ready
maxInt = sys.maxsize maxInt = sys.maxsize

View File

@ -4,7 +4,7 @@ __author__ = 'Austin Taylor'
from base.config import vwConfig from base.config import vwConfig
from frameworks.nessus import NessusAPI from frameworks.nessus import NessusAPI
from frameworks.qualys import qualysWebAppReport from frameworks.qualys import qualysScanReport
from utils.cli import bcolors from utils.cli import bcolors
import pandas as pd import pandas as pd
from lxml import objectify from lxml import objectify
@ -411,11 +411,22 @@ class vulnWhispererQualys(vulnWhispererBase):
): ):
super(vulnWhispererQualys, self).__init__(config=config, ) super(vulnWhispererQualys, self).__init__(config=config, )
self.qualys_web = qualysWebAppReport(config=config) self.qualys_scan = qualysScanReport(config=config)
self.latest_scans = self.qualys_web.qw.get_web_app_list() self.latest_scans = self.qualys_scan.qw.get_all_scans()
self.directory_check()
def whisper_webapp(self, report_id, updated_date): def directory_check(self):
if not os.path.exists(self.write_path):
os.makedirs(self.write_path)
self.vprint('{info} Directory created at {scan} - Skipping creation'.format(
scan=self.write_path, info=bcolors.INFO))
else:
os.path.exists(self.write_path)
self.vprint('{info} Directory already exist for {scan} - Skipping creation'.format(
scan=self.write_path, info=bcolors.INFO))
def whisper_reports(self, report_id, updated_date, cleanup=True):
""" """
report_id: App ID report_id: App ID
updated_date: Last time scan was ran for app_id updated_date: Last time scan was ran for app_id
@ -424,16 +435,30 @@ class vulnWhispererQualys(vulnWhispererBase):
try: try:
if 'Z' in updated_date: if 'Z' in updated_date:
updated_date = self.qualys_web.iso_to_epoch(updated_date) updated_date = self.qualys_scan.utils.iso_to_epoch(updated_date)
report_name = 'qualys_web_' + str(report_id) \ report_name = 'qualys_web_' + str(report_id) \
+ '_{last_updated}'.format(last_updated=updated_date) \ + '_{last_updated}'.format(last_updated=updated_date) \
+ '.csv' + '.csv'
if os.path.isfile(report_name): """
record_meta = (
scan_name,
app_id,
norm_time,
report_name,
time.time(),
clean_csv.shape[0],
'qualys',
uuid,
1,
)
"""
#self.record_insert(record_meta)
if os.path.isfile(self.path_check(report_name)):
print('{action} - File already exist! Skipping...'.format(action=bcolors.ACTION)) print('{action} - File already exist! Skipping...'.format(action=bcolors.ACTION))
pass pass
else: else:
print('{action} - Generating report for %s'.format(action=bcolors.ACTION) % report_id) print('{action} - Generating report for %s'.format(action=bcolors.ACTION) % report_id)
status = self.qualys_web.qw.create_report(report_id) status = self.qualys_scan.qw.create_report(report_id)
root = objectify.fromstring(status) root = objectify.fromstring(status)
if root.responseCode == 'SUCCESS': if root.responseCode == 'SUCCESS':
print('{info} - Successfully generated report for webapp: %s'.format(info=bcolors.INFO) \ print('{info} - Successfully generated report for webapp: %s'.format(info=bcolors.INFO) \
@ -441,16 +466,17 @@ class vulnWhispererQualys(vulnWhispererBase):
generated_report_id = root.data.Report.id generated_report_id = root.data.Report.id
print('{info} - New Report ID: %s'.format(info=bcolors.INFO) \ print('{info} - New Report ID: %s'.format(info=bcolors.INFO) \
% generated_report_id) % generated_report_id)
vuln_ready = self.qualys_web.process_data(generated_report_id) vuln_ready = self.qualys_scan.process_data(generated_report_id)
vuln_ready.to_csv(report_name, index=False, header=True) # add when timestamp occured vuln_ready.to_csv(self.path_check(report_name), index=False, header=True) # add when timestamp occured
print('{success} - Report written to %s'.format(success=bcolors.SUCCESS) \ print('{success} - Report written to %s'.format(success=bcolors.SUCCESS) \
% report_name) % report_name)
print('{action} - Removing report %s'.format(action=bcolors.ACTION) \ print('{action} - Removing report %s'.format(action=bcolors.ACTION) \
% generated_report_id) % generated_report_id)
if cleanup:
cleaning_up = \ cleaning_up = \
self.qualys_web.qw.delete_report(generated_report_id) self.qualys_scan.qw.delete_report(generated_report_id)
os.remove(str(generated_report_id) + '.csv') os.remove(self.path_check(str(generated_report_id) + '.csv'))
print('{action} - Deleted report: %s'.format(action=bcolors.ACTION) \ print('{action} - Deleted report: %s'.format(action=bcolors.ACTION) \
% generated_report_id) % generated_report_id)
else: else:
@ -464,7 +490,7 @@ class vulnWhispererQualys(vulnWhispererBase):
for app in self.latest_scans.iterrows(): for app in self.latest_scans.iterrows():
counter += 1 counter += 1
print('Processing %s/%s' % (counter, len(self.latest_scans))) print('Processing %s/%s' % (counter, len(self.latest_scans)))
self.whisper_webapp(app[1]['id'], app[1]['createdDate']) self.whisper_reports(app[1]['id'], app[1]['launchedDate'])
@ -489,10 +515,14 @@ class vulnWhisperer(object):
def whisper_vulnerabilities(self): def whisper_vulnerabilities(self):
if self.profile == 'nessus': if self.profile == 'nessus':
vw = vulnWhispererNessus(config=self.config, username=self.username, password=self.password, verbose=self.verbose) vw = vulnWhispererNessus(config=self.config,
username=self.username,
password=self.password,
verbose=self.verbose)
vw.whisper_nessus() vw.whisper_nessus()
elif self.profile == 'qualys': elif self.profile == 'qualys':
vw = vulnWhispererQualys(config=self.config) vw = vulnWhispererQualys(config=self.config)
vw.process_web_assets() vw.process_web_assets()