Addition of submodules, update to connectors, base class start
This commit is contained in:
3
.gitmodules
vendored
Normal file
3
.gitmodules
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
[submodule "qualysapi"]
|
||||
path = deps/qualysapi
|
||||
url = git@github.com:austin-taylor/qualysapi.git
|
@ -1,9 +1,8 @@
|
||||
#!/usr/bin/env python
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
__author__ = 'Austin Taylor'
|
||||
|
||||
|
||||
#Written by Austin Taylor
|
||||
#www.austintaylor.io
|
||||
|
||||
from vulnwhisp.vulnwhisp import vulnWhisperer
|
||||
from vulnwhisp.utils.cli import bcolors
|
||||
import os
|
||||
|
34
deps/qualysapi/qualysapi/config.py
vendored
34
deps/qualysapi/qualysapi/config.py
vendored
@ -58,43 +58,43 @@ class QualysConnectConfig:
|
||||
self._cfgparse.read(self._cfgfile)
|
||||
|
||||
# if 'info' doesn't exist, create the section.
|
||||
if not self._cfgparse.has_section('info'):
|
||||
self._cfgparse.add_section('info')
|
||||
if not self._cfgparse.has_section('qualys'):
|
||||
self._cfgparse.add_section('qualys')
|
||||
|
||||
# Use default hostname (if one isn't provided).
|
||||
if not self._cfgparse.has_option('info', 'hostname'):
|
||||
if not self._cfgparse.has_option('qualys', 'hostname'):
|
||||
if self._cfgparse.has_option('DEFAULT', 'hostname'):
|
||||
hostname = self._cfgparse.get('DEFAULT', 'hostname')
|
||||
self._cfgparse.set('info', 'hostname', hostname)
|
||||
self._cfgparse.set('qualys', 'hostname', hostname)
|
||||
else:
|
||||
raise Exception("No 'hostname' set. QualysConnect does not know who to connect to.")
|
||||
|
||||
# Use default max_retries (if one isn't provided).
|
||||
if not self._cfgparse.has_option('info', 'max_retries'):
|
||||
if not self._cfgparse.has_option('qualys', 'max_retries'):
|
||||
self.max_retries = qcs.defaults['max_retries']
|
||||
else:
|
||||
self.max_retries = self._cfgparse.get('info', 'max_retries')
|
||||
self.max_retries = self._cfgparse.get('qualys', 'max_retries')
|
||||
try:
|
||||
self.max_retries = int(self.max_retries)
|
||||
except Exception:
|
||||
logger.error('Value max_retries must be an integer.')
|
||||
print('Value max_retries must be an integer.')
|
||||
exit(1)
|
||||
self._cfgparse.set('info', 'max_retries', str(self.max_retries))
|
||||
self._cfgparse.set('qualys', 'max_retries', str(self.max_retries))
|
||||
self.max_retries = int(self.max_retries)
|
||||
|
||||
#Get template ID... user will need to set this to pull back CSV reports
|
||||
if not self._cfgparse.has_option('report', 'template_id'):
|
||||
if not self._cfgparse.has_option('qualys', 'template_id'):
|
||||
self.report_template_id = qcs.defaults['template_id']
|
||||
else:
|
||||
self.report_template_id = self._cfgparse.get('report', 'template_id')
|
||||
self.report_template_id = self._cfgparse.get('qualys', 'template_id')
|
||||
try:
|
||||
self.report_template_id = int(self.report_template_id)
|
||||
except Exception:
|
||||
logger.error('Report Template ID Must be set and be an integer')
|
||||
print('Value template ID must be an integer.')
|
||||
exit(1)
|
||||
self._cfgparse.set('report', 'template_id', str(self.max_retries))
|
||||
self._cfgparse.set('qualys', 'template_id', str(self.max_retries))
|
||||
self.max_retries = int(self.max_retries)
|
||||
|
||||
# Proxy support
|
||||
@ -168,18 +168,18 @@ class QualysConnectConfig:
|
||||
self.proxies = None
|
||||
|
||||
# ask username (if one doesn't exist)
|
||||
if not self._cfgparse.has_option('info', 'username'):
|
||||
if not self._cfgparse.has_option('qualys', 'username'):
|
||||
username = input('QualysGuard Username: ')
|
||||
self._cfgparse.set('info', 'username', username)
|
||||
self._cfgparse.set('qualys', 'username', username)
|
||||
|
||||
# ask password (if one doesn't exist)
|
||||
if not self._cfgparse.has_option('info', 'password'):
|
||||
if not self._cfgparse.has_option('qualys', 'password'):
|
||||
password = getpass.getpass('QualysGuard Password: ')
|
||||
self._cfgparse.set('info', 'password', password)
|
||||
self._cfgparse.set('qualys', 'password', password)
|
||||
|
||||
|
||||
|
||||
logging.debug(self._cfgparse.items('info'))
|
||||
logging.debug(self._cfgparse.items('qualys'))
|
||||
|
||||
if remember_me or remember_me_always:
|
||||
# Let's create that config file for next time...
|
||||
@ -211,8 +211,8 @@ class QualysConnectConfig:
|
||||
|
||||
def get_auth(self):
|
||||
''' Returns username from the configfile. '''
|
||||
return (self._cfgparse.get('info', 'username'), self._cfgparse.get('info', 'password'))
|
||||
return (self._cfgparse.get('qualys', 'username'), self._cfgparse.get('qualys', 'password'))
|
||||
|
||||
def get_hostname(self):
|
||||
''' Returns hostname. '''
|
||||
return self._cfgparse.get('info', 'hostname')
|
||||
return self._cfgparse.get('qualys', 'hostname')
|
||||
|
2
deps/qualysapi/qualysapi/connector.py
vendored
2
deps/qualysapi/qualysapi/connector.py
vendored
@ -252,7 +252,7 @@ class QGConnector(api_actions.QGActions):
|
||||
url = self.url_api_version(api_version)
|
||||
#
|
||||
# Set up headers.
|
||||
headers = {"X-Requested-With": "Parag Baxi QualysAPI (python) v%s" % (qualysapi.version.__version__,)}
|
||||
headers = {"X-Requested-With": "QualysAPI (python) v%s - VulnWhisperer" % (qualysapi.version.__version__,)}
|
||||
logger.debug('headers =\n%s' % (str(headers)))
|
||||
# Portal API takes in XML text, requiring custom header.
|
||||
if api_version in ('am', 'was', 'am2'):
|
||||
|
2
deps/qualysapi/qualysapi/version.py
vendored
2
deps/qualysapi/qualysapi/version.py
vendored
@ -1,3 +1,3 @@
|
||||
__author__ = 'Parag Baxi <parag.baxi@gmail.com>'
|
||||
__author__ = 'Austin Taylor'
|
||||
__pkgname__ = 'qualysapi'
|
||||
__version__ = '4.1.0'
|
||||
|
15
deps/qualysapi/setup.py
vendored
15
deps/qualysapi/setup.py
vendored
@ -2,14 +2,15 @@
|
||||
|
||||
from __future__ import absolute_import
|
||||
import os
|
||||
import setuptools
|
||||
import sys
|
||||
try:
|
||||
from setuptools import setup
|
||||
except ImportError:
|
||||
from distutils.core import setup
|
||||
|
||||
__author__ = 'Parag Baxi <parag.baxi@gmail.com>'
|
||||
__copyright__ = 'Copyright 2011-2013, Parag Baxi'
|
||||
__author__ = 'Austin Taylor <vulnWhisperer@austintaylor.io>'
|
||||
__copyright__ = 'Copyright 2017, Austin Taylor'
|
||||
__license__ = 'BSD-new'
|
||||
# Make pyflakes happy.
|
||||
__pkgname__ = None
|
||||
@ -27,14 +28,14 @@ def read(fname):
|
||||
|
||||
setup(name=__pkgname__,
|
||||
version=__version__,
|
||||
author='Parag Baxi',
|
||||
author_email='parag.baxi@gmail.com',
|
||||
description='QualysGuard(R) Qualys API Package',
|
||||
author='Austin Taylor',
|
||||
author_email='vulnWhisperer@austintaylor.io',
|
||||
description='QualysGuard(R) Qualys API Package modified for VulnWhisperer',
|
||||
license='BSD-new',
|
||||
keywords='Qualys QualysGuard API helper network security',
|
||||
url='https://github.com/paragbaxi/qualysapi',
|
||||
url='https://github.com/austin-taylor/qualysapi',
|
||||
package_dir={'': '.'},
|
||||
packages=['qualysapi', ],
|
||||
packages=setuptools.find_packages(),,
|
||||
# package_data={'qualysapi':['LICENSE']},
|
||||
# scripts=['src/scripts/qhostinfo.py', 'src/scripts/qscanhist.py', 'src/scripts/qreports.py'],
|
||||
long_description=read('README.md'),
|
||||
|
@ -0,0 +1 @@
|
||||
from utils.cli import *
|
@ -311,9 +311,7 @@ class qualysWebAppReport:
|
||||
|
||||
def grab_sections(self, report):
|
||||
all_dataframes = []
|
||||
category_list = []
|
||||
with open(report, 'rb') as csvfile:
|
||||
q_report = csv.reader(csvfile, delimiter=',', quotechar='"')
|
||||
all_dataframes.append(pd.DataFrame(self.grab_section(report,
|
||||
self.WEB_APP_VULN_BLOCK,
|
||||
end=[self.WEB_APP_SENSITIVE_BLOCK,
|
||||
@ -365,7 +363,7 @@ class qualysWebAppReport:
|
||||
|
||||
merged_df = pd.concat([dataframes[0], dataframes[1],
|
||||
dataframes[2]], axis=0,
|
||||
ignore_index=False).fillna('N/A')
|
||||
ignore_index=False).fillna('')
|
||||
merged_df = pd.merge(merged_df, dataframes[3], left_on='QID',
|
||||
right_on='Id')
|
||||
|
||||
@ -434,9 +432,10 @@ class qualysWebAppReport:
|
||||
report_id: App ID
|
||||
updated_date: Last time scan was ran for app_id
|
||||
"""
|
||||
vuln_ready = None
|
||||
|
||||
try:
|
||||
vuln_ready = None
|
||||
|
||||
if 'Z' in updated_date:
|
||||
updated_date = self.iso_to_epoch(updated_date)
|
||||
report_name = 'qualys_web_' + str(report_id) \
|
||||
@ -457,7 +456,7 @@ class qualysWebAppReport:
|
||||
% generated_report_id)
|
||||
vuln_ready = self.process_data(generated_report_id)
|
||||
|
||||
vuln_ready.to_csv(report_name, index=False) # add when timestamp occured
|
||||
vuln_ready.to_csv(report_name, index=False, header=True) # add when timestamp occured
|
||||
print('[SUCCESS] - Report written to %s' \
|
||||
% report_name)
|
||||
print('[ACTION] - Removing report %s' \
|
||||
@ -474,4 +473,3 @@ class qualysWebAppReport:
|
||||
return vuln_ready
|
||||
|
||||
|
||||
|
||||
|
@ -1,3 +1,6 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
__author__ = 'Austin Taylor'
|
||||
|
||||
from base.config import vwConfig
|
||||
from frameworks.nessus import NessusAPI
|
||||
@ -10,114 +13,148 @@ import time
|
||||
import sqlite3
|
||||
|
||||
# TODO Create logging option which stores data about scan
|
||||
|
||||
import logging
|
||||
|
||||
|
||||
class vulnWhispererBase(object):
|
||||
def __init__(
|
||||
self,
|
||||
config=None,
|
||||
db_name='report_tracker.db',
|
||||
purge=False,
|
||||
verbose=None,
|
||||
debug=False,
|
||||
username=None,
|
||||
password=None,
|
||||
):
|
||||
pass
|
||||
|
||||
class vulnWhisperer(object):
|
||||
|
||||
def __init__(self, config=None, db_name='report_tracker.db', purge=False, verbose=None, debug=False, username=None, password=None):
|
||||
def __init__(
|
||||
self,
|
||||
config=None,
|
||||
db_name='report_tracker.db',
|
||||
purge=False,
|
||||
verbose=None,
|
||||
debug=False,
|
||||
username=None,
|
||||
password=None,
|
||||
):
|
||||
|
||||
self.verbose = verbose
|
||||
self.nessus_connect = False
|
||||
self.develop = True
|
||||
self.purge = purge
|
||||
|
||||
|
||||
if config is not None:
|
||||
try:
|
||||
self.config = vwConfig(config_in=config)
|
||||
self.nessus_enabled = self.config.getbool('nessus', 'enabled')
|
||||
self.nessus_enabled = self.config.getbool('nessus',
|
||||
'enabled')
|
||||
|
||||
if self.nessus_enabled:
|
||||
self.nessus_hostname = self.config.get('nessus', 'hostname')
|
||||
self.nessus_hostname = self.config.get('nessus',
|
||||
'hostname')
|
||||
self.nessus_port = self.config.get('nessus', 'port')
|
||||
|
||||
if password:
|
||||
self.nessus_password = password
|
||||
else:
|
||||
self.nessus_password = self.config.get('nessus', 'password')
|
||||
|
||||
self.nessus_password = self.config.get('nessus'
|
||||
, 'password')
|
||||
|
||||
if username:
|
||||
self.nessus_username = username
|
||||
else:
|
||||
self.nessus_username = self.config.get('nessus', 'username')
|
||||
self.nessus_username = self.config.get('nessus'
|
||||
, 'username')
|
||||
|
||||
self.nessus_writepath = self.config.get('nessus', 'write_path')
|
||||
self.nessus_dbpath = self.config.get('nessus', 'db_path')
|
||||
self.nessus_trash = self.config.getbool('nessus', 'trash')
|
||||
self.verbose = self.config.getbool('nessus', 'verbose')
|
||||
self.nessus_writepath = self.config.get('nessus',
|
||||
'write_path')
|
||||
self.nessus_dbpath = self.config.get('nessus',
|
||||
'db_path')
|
||||
self.nessus_trash = self.config.getbool('nessus',
|
||||
'trash')
|
||||
self.verbose = self.config.getbool('nessus',
|
||||
'verbose')
|
||||
|
||||
try:
|
||||
self.vprint(
|
||||
'{info} Attempting to connect to nessus...'.format(info=bcolors.INFO))
|
||||
self.nessus = NessusAPI(hostname=self.nessus_hostname,
|
||||
port=self.nessus_port,
|
||||
username=self.nessus_username,
|
||||
password=self.nessus_password)
|
||||
self.vprint('{info} Attempting to connect to nessus...'.format(info=bcolors.INFO))
|
||||
self.nessus = \
|
||||
NessusAPI(hostname=self.nessus_hostname,
|
||||
port=self.nessus_port,
|
||||
username=self.nessus_username,
|
||||
password=self.nessus_password)
|
||||
self.nessus_connect = True
|
||||
self.vprint(
|
||||
'{success} Connected to nessus on {host}:{port}'.format(success=bcolors.SUCCESS,
|
||||
host=self.nessus_hostname,
|
||||
port=str(self.nessus_port)))
|
||||
self.vprint('{success} Connected to nessus on {host}:{port}'.format(success=bcolors.SUCCESS,
|
||||
host=self.nessus_hostname,
|
||||
port=str(self.nessus_port)))
|
||||
except Exception as e:
|
||||
self.vprint(e)
|
||||
raise Exception(
|
||||
"{fail} Could not connect to nessus -- Please verify your settings in {config} are correct and try again.\nReason: {e}".format(config=self.config,
|
||||
fail=bcolors.FAIL,
|
||||
e=e))
|
||||
|
||||
'{fail} Could not connect to nessus -- Please verify your settings in {config} are correct and try again.\nReason: {e}'.format(
|
||||
config=self.config,
|
||||
fail=bcolors.FAIL, e=e))
|
||||
except Exception as e:
|
||||
|
||||
self.vprint('{fail} Could not properly load your config!\nReason: {e}'.format(fail=bcolors.FAIL, e=e))
|
||||
self.vprint('{fail} Could not properly load your config!\nReason: {e}'.format(fail=bcolors.FAIL,
|
||||
e=e))
|
||||
sys.exit(0)
|
||||
|
||||
if db_name is not None:
|
||||
if self.nessus_dbpath:
|
||||
self.database = os.path.join(self.nessus_dbpath, db_name)
|
||||
self.database = os.path.join(self.nessus_dbpath,
|
||||
db_name)
|
||||
else:
|
||||
self.database = os.path.abspath(os.path.join(os.path.dirname( __file__ ), 'database', db_name))
|
||||
self.database = \
|
||||
os.path.abspath(os.path.join(os.path.dirname(__file__),
|
||||
'database', db_name))
|
||||
|
||||
try:
|
||||
self.conn = sqlite3.connect(self.database)
|
||||
self.cur = self.conn.cursor()
|
||||
self.vprint("{info} Connected to database at {loc}".format(info=bcolors.INFO, loc=self.database))
|
||||
self.vprint('{info} Connected to database at {loc}'.format(info=bcolors.INFO,
|
||||
loc=self.database))
|
||||
except Exception as e:
|
||||
self.vprint("{fail} Could not connect to database at {loc}\nReason: {e} - Please ensure the path exist".format(e=e, fail=bcolors.FAIL, loc=self.database))
|
||||
|
||||
self.vprint(
|
||||
'{fail} Could not connect to database at {loc}\nReason: {e} - Please ensure the path exist'.format(
|
||||
e=e,
|
||||
fail=bcolors.FAIL, loc=self.database))
|
||||
else:
|
||||
|
||||
self.vprint('{fail} Please specify a database to connect to!'.format(fail=bcolors.FAIL))
|
||||
exit(0)
|
||||
|
||||
self.table_columns = ['scan_name',
|
||||
'scan_id',
|
||||
'last_modified',
|
||||
'filename',
|
||||
'download_time',
|
||||
'record_count',
|
||||
'source',
|
||||
'uuid',
|
||||
'processed']
|
||||
self.table_columns = [
|
||||
'scan_name',
|
||||
'scan_id',
|
||||
'last_modified',
|
||||
'filename',
|
||||
'download_time',
|
||||
'record_count',
|
||||
'source',
|
||||
'uuid',
|
||||
'processed',
|
||||
]
|
||||
self.init()
|
||||
self.uuids = self.retrieve_uuids()
|
||||
self.processed = 0
|
||||
self.skipped = 0
|
||||
self.scan_list = []
|
||||
|
||||
|
||||
|
||||
def vprint(self, msg):
|
||||
if self.verbose:
|
||||
print(msg)
|
||||
|
||||
print msg
|
||||
|
||||
def create_table(self):
|
||||
self.cur.execute("create table if not exists scan_history (id integer primary key, scan_name text, scan_id integer, last_modified date, filename text, download_time date, record_count integer, source text, uuid text, processed integer)")
|
||||
self.cur.execute(
|
||||
'CREATE TABLE IF NOT EXISTS scan_history (id INTEGER PRIMARY KEY, scan_name TEXT, scan_id INTEGER, last_modified DATE, filename TEXT, download_time DATE, record_count INTEGER, source TEXT, uuid TEXT, processed INTEGER)'
|
||||
)
|
||||
self.conn.commit()
|
||||
|
||||
def delete_table(self):
|
||||
self.cur.execute('drop table if exists scan_history')
|
||||
self.cur.execute('DROP TABLE IF EXISTS scan_history')
|
||||
self.conn.commit()
|
||||
|
||||
def init(self):
|
||||
@ -126,7 +163,7 @@ class vulnWhisperer(object):
|
||||
self.create_table()
|
||||
|
||||
def cleanser(self, _data):
|
||||
repls = ('\n', '|||'), ('\r', '|||'), (',',';')
|
||||
repls = (('\n', '|||'), ('\r', '|||'), (',', ';'))
|
||||
data = reduce(lambda a, kv: a.replace(*kv), repls, _data)
|
||||
return data
|
||||
|
||||
@ -142,6 +179,7 @@ class vulnWhisperer(object):
|
||||
:param completed: Only return completed scans
|
||||
:return:
|
||||
"""
|
||||
|
||||
self.vprint('{info} Gathering all scan data... this may take a while...'.format(info=bcolors.INFO))
|
||||
scan_records = []
|
||||
for s in scans:
|
||||
@ -159,24 +197,29 @@ class vulnWhisperer(object):
|
||||
record['uuid'] = h.get('uuid', '')
|
||||
record['status'] = h.get('status', '')
|
||||
record['history_id'] = h.get('history_id', '')
|
||||
record['last_modification_date'] = h.get('last_modification_date', '')
|
||||
record['norm_time'] = self.nessus.get_utc_from_local(int(record['last_modification_date']),
|
||||
local_tz=self.nessus.tz_conv(record['timezone']))
|
||||
record['last_modification_date'] = \
|
||||
h.get('last_modification_date', '')
|
||||
record['norm_time'] = \
|
||||
self.nessus.get_utc_from_local(int(record['last_modification_date'
|
||||
]),
|
||||
local_tz=self.nessus.tz_conv(record['timezone'
|
||||
]))
|
||||
scan_records.append(record.copy())
|
||||
|
||||
|
||||
except Exception as e:
|
||||
#print(e)
|
||||
|
||||
# print(e)
|
||||
|
||||
pass
|
||||
|
||||
if completed:
|
||||
scan_records = [s for s in scan_records if s['status'] == 'completed']
|
||||
scan_records = [s for s in scan_records if s['status']
|
||||
== 'completed']
|
||||
return scan_records
|
||||
|
||||
|
||||
def record_insert(self, record):
|
||||
self.cur.execute("insert into scan_history({table_columns}) values (?,?,?,?,?,?,?,?,?)".format(
|
||||
table_columns=', '.join(self.table_columns)), record)
|
||||
self.cur.execute('insert into scan_history({table_columns}) values (?,?,?,?,?,?,?,?,?)'.format(
|
||||
table_columns=', '.join(self.table_columns)),
|
||||
record)
|
||||
self.conn.commit()
|
||||
|
||||
def retrieve_uuids(self):
|
||||
@ -184,12 +227,12 @@ class vulnWhisperer(object):
|
||||
Retrieves UUIDs from database and checks list to determine which files need to be processed.
|
||||
:return:
|
||||
"""
|
||||
|
||||
self.conn.text_factory = str
|
||||
self.cur.execute('select uuid from scan_history')
|
||||
self.cur.execute('SELECT uuid FROM scan_history')
|
||||
results = frozenset([r[0] for r in self.cur.fetchall()])
|
||||
return results
|
||||
|
||||
|
||||
def whisper_nessus(self):
|
||||
if self.nessus_connect:
|
||||
scan_data = self.nessus.get_scans()
|
||||
@ -197,16 +240,20 @@ class vulnWhisperer(object):
|
||||
scans = scan_data['scans']
|
||||
all_scans = self.scan_count(scans)
|
||||
if self.uuids:
|
||||
scan_list = [scan for scan in all_scans if (scan['uuid'] not in self.uuids and scan['status']=='completed')]
|
||||
scan_list = [scan for scan in all_scans if scan['uuid']
|
||||
not in self.uuids and scan['status']
|
||||
== 'completed']
|
||||
else:
|
||||
scan_list = all_scans
|
||||
self.vprint("{info} Identified {new} scans to be processed".format(info=bcolors.INFO, new=len(scan_list)))
|
||||
self.vprint('{info} Identified {new} scans to be processed'.format(info=bcolors.INFO,
|
||||
new=len(scan_list)))
|
||||
|
||||
if not scan_list:
|
||||
self.vprint("{info} No new scans to process. Exiting...".format(info=bcolors.INFO))
|
||||
self.vprint('{info} No new scans to process. Exiting...'.format(info=bcolors.INFO))
|
||||
exit(0)
|
||||
|
||||
# Create scan subfolders
|
||||
|
||||
for f in folders:
|
||||
if not os.path.exists(self.path_check(f['name'])):
|
||||
if f['name'] == 'Trash' and self.nessus_trash:
|
||||
@ -216,63 +263,136 @@ class vulnWhisperer(object):
|
||||
else:
|
||||
os.path.exists(self.path_check(f['name']))
|
||||
self.vprint('{info} Directory already exist for {scan} - Skipping creation'.format(
|
||||
scan=self.path_check(f['name']), info=bcolors.INFO))
|
||||
scan=self.path_check(f['name'
|
||||
]), info=bcolors.INFO))
|
||||
|
||||
# try download and save scans into each folder the belong to
|
||||
|
||||
scan_count = 0
|
||||
|
||||
# TODO Rewrite this part to go through the scans that have aleady been processed
|
||||
|
||||
for s in scan_list:
|
||||
scan_count += 1
|
||||
scan_name, scan_id, history_id,\
|
||||
norm_time, status, uuid = s['scan_name'], s['scan_id'], s['history_id'],\
|
||||
s['norm_time'], s['status'], s['uuid']
|
||||
(
|
||||
scan_name,
|
||||
scan_id,
|
||||
history_id,
|
||||
norm_time,
|
||||
status,
|
||||
uuid,
|
||||
) = (
|
||||
s['scan_name'],
|
||||
s['scan_id'],
|
||||
s['history_id'],
|
||||
s['norm_time'],
|
||||
s['status'],
|
||||
s['uuid'],
|
||||
)
|
||||
|
||||
# TODO Create directory sync function which scans the directory for files that exist already and populates the database
|
||||
|
||||
folder_id = s['folder_id']
|
||||
scan_history = self.nessus.get_scan_history(scan_id)
|
||||
folder_name = next(f['name'] for f in folders if f['id'] == folder_id)
|
||||
folder_name = next(f['name'] for f in folders if f['id'
|
||||
] == folder_id)
|
||||
if status == 'completed':
|
||||
file_name = '%s_%s_%s_%s.%s' % (scan_name, scan_id, history_id, norm_time, 'csv')
|
||||
repls = ('\\', '_'), ('/', '_'), ('/', '_'), (' ', '_')
|
||||
file_name = reduce(lambda a, kv: a.replace(*kv), repls, file_name)
|
||||
relative_path_name = self.path_check(folder_name + '/' + file_name)
|
||||
file_name = '%s_%s_%s_%s.%s' % (scan_name, scan_id,
|
||||
history_id, norm_time, 'csv')
|
||||
repls = (('\\', '_'), ('/', '_'), ('/', '_'), (' ',
|
||||
'_'))
|
||||
file_name = reduce(lambda a, kv: a.replace(*kv),
|
||||
repls, file_name)
|
||||
relative_path_name = self.path_check(folder_name
|
||||
+ '/' + file_name)
|
||||
|
||||
if os.path.isfile(relative_path_name):
|
||||
if self.develop:
|
||||
csv_in = pd.read_csv(relative_path_name)
|
||||
record_meta = (
|
||||
scan_name, scan_id, norm_time, file_name, time.time(), csv_in.shape[0], 'nessus', uuid, 1)
|
||||
scan_name,
|
||||
scan_id,
|
||||
norm_time,
|
||||
file_name,
|
||||
time.time(),
|
||||
csv_in.shape[0],
|
||||
'nessus',
|
||||
uuid,
|
||||
1,
|
||||
)
|
||||
self.record_insert(record_meta)
|
||||
self.vprint(
|
||||
"{info} File {filename} already exist! Updating database".format(info=bcolors.INFO, filename=relative_path_name))
|
||||
'{info} File {filename} already exist! Updating database'.format(info=bcolors.INFO,
|
||||
filename=relative_path_name))
|
||||
else:
|
||||
file_req = self.nessus.download_scan(scan_id=scan_id, history=history_id, export_format='csv')
|
||||
clean_csv = pd.read_csv(io.StringIO(file_req.decode('utf-8')))
|
||||
file_req = \
|
||||
self.nessus.download_scan(scan_id=scan_id,
|
||||
history=history_id, export_format='csv')
|
||||
clean_csv = \
|
||||
pd.read_csv(io.StringIO(file_req.decode('utf-8'
|
||||
)))
|
||||
if len(clean_csv) > 2:
|
||||
self.vprint("Processing %s/%s for scan: %s" % (scan_count, len(scan_history), scan_name))
|
||||
clean_csv['CVSS'] = clean_csv['CVSS'].astype(str).apply(self.cleanser)
|
||||
clean_csv['CVE'] = clean_csv['CVE'].astype(str).apply(self.cleanser)
|
||||
clean_csv['Description'] = clean_csv['Description'].astype(str).apply(self.cleanser)
|
||||
clean_csv['Synopsis'] = clean_csv['Description'].astype(str).apply(self.cleanser)
|
||||
clean_csv['Solution'] = clean_csv['Solution'].astype(str).apply(self.cleanser)
|
||||
clean_csv['See Also'] = clean_csv['See Also'].astype(str).apply(self.cleanser)
|
||||
clean_csv['Plugin Output'] = clean_csv['Plugin Output'].astype(str).apply(self.cleanser)
|
||||
clean_csv.to_csv(relative_path_name, index=False)
|
||||
self.vprint('Processing %s/%s for scan: %s'
|
||||
% (scan_count, len(scan_history),
|
||||
scan_name))
|
||||
clean_csv['CVSS'] = clean_csv['CVSS'
|
||||
].astype(str).apply(self.cleanser)
|
||||
clean_csv['CVE'] = clean_csv['CVE'
|
||||
].astype(str).apply(self.cleanser)
|
||||
clean_csv['Description'] = \
|
||||
clean_csv['Description'
|
||||
].astype(str).apply(self.cleanser)
|
||||
clean_csv['Synopsis'] = \
|
||||
clean_csv['Description'
|
||||
].astype(str).apply(self.cleanser)
|
||||
clean_csv['Solution'] = clean_csv['Solution'
|
||||
].astype(str).apply(self.cleanser)
|
||||
clean_csv['See Also'] = clean_csv['See Also'
|
||||
].astype(str).apply(self.cleanser)
|
||||
clean_csv['Plugin Output'] = \
|
||||
clean_csv['Plugin Output'
|
||||
].astype(str).apply(self.cleanser)
|
||||
clean_csv.to_csv(relative_path_name,
|
||||
index=False)
|
||||
record_meta = (
|
||||
scan_name, scan_id, norm_time, file_name, time.time(), clean_csv.shape[0], 'nessus', uuid,
|
||||
1)
|
||||
scan_name,
|
||||
scan_id,
|
||||
norm_time,
|
||||
file_name,
|
||||
time.time(),
|
||||
clean_csv.shape[0],
|
||||
'nessus',
|
||||
uuid,
|
||||
1,
|
||||
)
|
||||
self.record_insert(record_meta)
|
||||
self.vprint("{info} {filename} records written to {path} ".format(info=bcolors.INFO, filename=clean_csv.shape[0], path=file_name))
|
||||
self.vprint('{info} {filename} records written to {path} '.format(info=bcolors.INFO,
|
||||
filename=clean_csv.shape[
|
||||
0],
|
||||
path=file_name))
|
||||
else:
|
||||
record_meta = (
|
||||
scan_name, scan_id, norm_time, file_name, time.time(), clean_csv.shape[0], 'nessus', uuid,
|
||||
1)
|
||||
scan_name,
|
||||
scan_id,
|
||||
norm_time,
|
||||
file_name,
|
||||
time.time(),
|
||||
clean_csv.shape[0],
|
||||
'nessus',
|
||||
uuid,
|
||||
1,
|
||||
)
|
||||
self.record_insert(record_meta)
|
||||
self.vprint(file_name + ' has no host available... Updating database and skipping!')
|
||||
self.vprint(file_name
|
||||
+ ' has no host available... Updating database and skipping!'
|
||||
)
|
||||
self.conn.close()
|
||||
"{success} Scan aggregation complete! Connection to database closed.".format(success=bcolors.SUCCESS)
|
||||
|
||||
|
||||
'{success} Scan aggregation complete! Connection to database closed.'.format(success=bcolors.SUCCESS)
|
||||
else:
|
||||
self.vprint('{fail} Failed to use scanner at {host}'.format(fail=bcolors.FAIL, host=self.nessus_hostname+':'+self.nessus_port))
|
||||
|
||||
self.vprint('{fail} Failed to use scanner at {host}'.format(fail=bcolors.FAIL,
|
||||
host=self.nessus_hostname + ':'
|
||||
+ self.nessus_port))
|
||||
|
||||
|
||||
|
||||
|
Reference in New Issue
Block a user