Database Integration
This commit is contained in:
6
.idea/vcs.xml
generated
Normal file
6
.idea/vcs.xml
generated
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<project version="4">
|
||||||
|
<component name="VcsDirectoryMappings">
|
||||||
|
<mapping directory="$PROJECT_DIR$" vcs="Git" />
|
||||||
|
</component>
|
||||||
|
</project>
|
38
bin/vuln_whisperer
Normal file
38
bin/vuln_whisperer
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
|
||||||
|
#Written by Austin Taylor
|
||||||
|
#www.austintaylor.io
|
||||||
|
|
||||||
|
from vulnwhisp.vulnwhisp import vulnWhisperer
|
||||||
|
from vulnwhisp.utils.cli import bcolors
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import sys
|
||||||
|
|
||||||
|
def main():
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser(description=""" VulnWhisperer is designed to create actionable data from\
|
||||||
|
your vulnerability scans through aggregation of historical scans.""")
|
||||||
|
parser.add_argument('-c', '--config', dest='config', required=False, default='frameworks.ini',
|
||||||
|
help='Path of config file')
|
||||||
|
parser.add_argument('-v', '--verbose', dest='verbose', action='store_true', default=True,
|
||||||
|
help='Prints status out to screen (defaults to True)')
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
try:
|
||||||
|
|
||||||
|
vw = vulnWhisperer(config=args.config,
|
||||||
|
verbose=args.verbose)
|
||||||
|
|
||||||
|
vw.whisper_nessus()
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
if args.verbose:
|
||||||
|
print('{red}ERROR: {error}{endc}'.format(red=bcolors.FAIL, error=e, endc=bcolors.ENDC))
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
10
configs/frameworks_example.ini
Executable file
10
configs/frameworks_example.ini
Executable file
@ -0,0 +1,10 @@
|
|||||||
|
[nessus]
|
||||||
|
enabled=true
|
||||||
|
hostname=localhost
|
||||||
|
port=8834
|
||||||
|
username=nessus_username
|
||||||
|
password=nessus_password
|
||||||
|
write_path=path_to_scans
|
||||||
|
trash=false
|
||||||
|
verbose=true
|
||||||
|
|
30
setup.py
Normal file
30
setup.py
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
from setuptools import setup, find_packages
|
||||||
|
|
||||||
|
setup(
|
||||||
|
name='VulnWhisperer',
|
||||||
|
version='1.0a',
|
||||||
|
packages=find_packages(),
|
||||||
|
url='https://github.com/austin-taylor/vulnwhisperer',
|
||||||
|
license="""MIT License
|
||||||
|
Copyright (c) 2016 Austin Taylor
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.""",
|
||||||
|
author='Austin Taylor',
|
||||||
|
author_email='email@austintaylor.io',
|
||||||
|
description='Vulnerability assessment framework aggregator',
|
||||||
|
scripts=['bin/vuln_whisperer']
|
||||||
|
)
|
||||||
|
|
0
vulnwhisp/__init__.py
Executable file
0
vulnwhisp/__init__.py
Executable file
0
vulnwhisp/base/__init__.py
Normal file
0
vulnwhisp/base/__init__.py
Normal file
22
vulnwhisp/base/config.py
Normal file
22
vulnwhisp/base/config.py
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
# Support for python3
|
||||||
|
if (sys.version_info > (3, 0)):
|
||||||
|
import configparser as cp
|
||||||
|
else:
|
||||||
|
import ConfigParser as cp
|
||||||
|
|
||||||
|
|
||||||
|
class vwConfig(object):
|
||||||
|
|
||||||
|
def __init__(self, config_in=None):
|
||||||
|
self.config_in = config_in
|
||||||
|
self.config = cp.RawConfigParser()
|
||||||
|
self.config.read(self.config_in)
|
||||||
|
|
||||||
|
def get(self, section, option):
|
||||||
|
return self.config.get(section, option)
|
||||||
|
|
||||||
|
def getbool(self, section, option):
|
||||||
|
return self.config.getboolean(section, option)
|
BIN
vulnwhisp/database/report_tracker.db
Normal file
BIN
vulnwhisp/database/report_tracker.db
Normal file
Binary file not shown.
0
vulnwhisp/frameworks/__init__.py
Executable file
0
vulnwhisp/frameworks/__init__.py
Executable file
219
vulnwhisp/frameworks/nessus.py
Executable file
219
vulnwhisp/frameworks/nessus.py
Executable file
@ -0,0 +1,219 @@
|
|||||||
|
import requests
|
||||||
|
from requests.packages.urllib3.exceptions import InsecureRequestWarning
|
||||||
|
|
||||||
|
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
|
||||||
|
import pandas as pd
|
||||||
|
from pandas.io.json import json_normalize
|
||||||
|
import pytz
|
||||||
|
from datetime import datetime
|
||||||
|
import json
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import time
|
||||||
|
import io
|
||||||
|
|
||||||
|
|
||||||
|
class NessusAPI(object):
|
||||||
|
SESSION = '/session'
|
||||||
|
FOLDERS = '/folders'
|
||||||
|
SCANS = '/scans'
|
||||||
|
SCAN_ID = SCANS + '/{scan_id}'
|
||||||
|
HOST_VULN = SCAN_ID + '/hosts/{host_id}'
|
||||||
|
PLUGINS = HOST_VULN + '/plugins/{plugin_id}'
|
||||||
|
EXPORT = SCAN_ID + '/export'
|
||||||
|
EXPORT_TOKEN_DOWNLOAD = '/scans/exports/{token_id}/download'
|
||||||
|
EXPORT_FILE_DOWNLOAD = EXPORT + '/{file_id}/download'
|
||||||
|
EXPORT_STATUS = EXPORT + '/{file_id}/status'
|
||||||
|
EXPORT_HISTORY = EXPORT + '?history_id={history_id}'
|
||||||
|
|
||||||
|
def __init__(self, hostname=None, port=None, username=None, password=None, verbose=True):
|
||||||
|
if username is None or password is None:
|
||||||
|
raise Exception('ERROR: Missing username or password.')
|
||||||
|
|
||||||
|
self.user = username
|
||||||
|
self.password = password
|
||||||
|
self.base = 'https://{hostname}:{port}'.format(hostname=hostname, port=port)
|
||||||
|
self.verbose = verbose
|
||||||
|
|
||||||
|
self.headers = {
|
||||||
|
'Origin': self.base,
|
||||||
|
'Accept-Encoding': 'gzip, deflate, br',
|
||||||
|
'Accept-Language': 'en-US,en;q=0.8',
|
||||||
|
'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.96 Safari/537.36',
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
'Accept': 'application/json, text/javascript, */*; q=0.01',
|
||||||
|
'Referer': self.base,
|
||||||
|
'X-Requested-With': 'XMLHttpRequest',
|
||||||
|
'Connection': 'keep-alive',
|
||||||
|
'X-Cookie': None
|
||||||
|
}
|
||||||
|
|
||||||
|
self.login()
|
||||||
|
self.scan_ids = self.get_scan_ids()
|
||||||
|
|
||||||
|
def vprint(self, msg):
|
||||||
|
if self.verbose:
|
||||||
|
print(msg)
|
||||||
|
|
||||||
|
def login(self):
|
||||||
|
resp = self.get_token()
|
||||||
|
if resp.status_code is 200:
|
||||||
|
self.headers['X-Cookie'] = 'token={token}'.format(token=resp.json()['token'])
|
||||||
|
else:
|
||||||
|
raise Exception('[FAIL] Could not login to Nessus')
|
||||||
|
|
||||||
|
def request(self, url, data=None, headers=None, method='POST', download=False, json=False):
|
||||||
|
if headers is None:
|
||||||
|
headers = self.headers
|
||||||
|
timeout = 0
|
||||||
|
success = False
|
||||||
|
|
||||||
|
url = self.base + url
|
||||||
|
methods = {'GET': requests.get,
|
||||||
|
'POST': requests.post,
|
||||||
|
'DELETE': requests.delete}
|
||||||
|
|
||||||
|
while (timeout <= 10) and (not success):
|
||||||
|
data = methods[method](url, data=data, headers=self.headers, verify=False)
|
||||||
|
if data.status_code == 401:
|
||||||
|
try:
|
||||||
|
self.login()
|
||||||
|
timeout += 1
|
||||||
|
self.vprint('[INFO] Token refreshed')
|
||||||
|
except Exception as e:
|
||||||
|
self.vprint('[FAIL] Could not refresh token\nReason: %s' % e)
|
||||||
|
else:
|
||||||
|
success = True
|
||||||
|
|
||||||
|
if json:
|
||||||
|
data = data.json()
|
||||||
|
if download:
|
||||||
|
return data.content
|
||||||
|
return data
|
||||||
|
|
||||||
|
def get_token(self):
|
||||||
|
auth = '{"username":"%s", "password":"%s"}' % (self.user, self.password)
|
||||||
|
token = self.request(self.SESSION, data=auth, json=False)
|
||||||
|
return token
|
||||||
|
|
||||||
|
def logout(self):
|
||||||
|
self.request(self.SESSION, method='DELETE')
|
||||||
|
|
||||||
|
def get_folders(self):
|
||||||
|
folders = self.request(self.FOLDERS, method='GET', json=True)
|
||||||
|
return folders
|
||||||
|
|
||||||
|
def get_scans(self):
|
||||||
|
scans = self.request(self.SCANS, method='GET', json=True)
|
||||||
|
return scans
|
||||||
|
|
||||||
|
def get_scan_ids(self):
|
||||||
|
scans = self.get_scans()
|
||||||
|
scan_ids = [scan_id['id'] for scan_id in scans['scans']]
|
||||||
|
return scan_ids
|
||||||
|
|
||||||
|
def count_scan(self, scans, folder_id):
|
||||||
|
count = 0
|
||||||
|
for scan in scans:
|
||||||
|
if scan['folder_id'] == folder_id: count = count + 1
|
||||||
|
return count
|
||||||
|
|
||||||
|
def print_scans(self, data):
|
||||||
|
for folder in data['folders']:
|
||||||
|
print("\\{0} - ({1})\\".format(folder['name'], self.count_scan(data['scans'], folder['id'])))
|
||||||
|
for scan in data['scans']:
|
||||||
|
if scan['folder_id'] == folder['id']:
|
||||||
|
print(
|
||||||
|
"\t\"{0}\" - sid:{1} - uuid: {2}".format(scan['name'].encode('utf-8'), scan['id'], scan['uuid']))
|
||||||
|
|
||||||
|
def get_scan_details(self, scan_id):
|
||||||
|
data = self.request(self.SCAN_ID.format(scan_id=scan_id), method='GET', json=True)
|
||||||
|
return data
|
||||||
|
|
||||||
|
def get_scan_history(self, scan_id):
|
||||||
|
data = self.request(self.SCAN_ID.format(scan_id=scan_id), method='GET', json=True)
|
||||||
|
return data['history']
|
||||||
|
|
||||||
|
def get_scan_hosts(self, scan_id):
|
||||||
|
data = self.request(self.SCAN_ID.format(scan_id=scan_id), method='GET', json=True)
|
||||||
|
return data['hosts']
|
||||||
|
|
||||||
|
def get_host_vulnerabilities(self, scan_id, host_id):
|
||||||
|
query = self.HOST_VULN.format(scan_id=scan_id, host_id=host_id)
|
||||||
|
data = self.request(query, method='GET', json=True)
|
||||||
|
return data
|
||||||
|
|
||||||
|
def get_plugin_info(self, scan_id, host_id, plugin_id):
|
||||||
|
query = self.PLUGINS.format(scan_id=scan_id, host_id=host_id, plugin_id=plugin_id)
|
||||||
|
data = self.request(query, method='GET', json=True)
|
||||||
|
return data
|
||||||
|
|
||||||
|
def export_scan(self, scan_id, history_id):
|
||||||
|
data = {'format': 'csv'}
|
||||||
|
query = self.EXPORT_REPORT.format(scan_id=scan_id, history_id=history_id)
|
||||||
|
req = self.request(query, data=data, method='POST')
|
||||||
|
return req
|
||||||
|
|
||||||
|
def download_scan(self, scan_id=None, history=None, export_format="", chapters="", dbpasswd=""):
|
||||||
|
running = True
|
||||||
|
counter = 0
|
||||||
|
|
||||||
|
data = {'format': export_format}
|
||||||
|
if not history:
|
||||||
|
query = self.EXPORT.format(scan_id=scan_id)
|
||||||
|
else:
|
||||||
|
query = self.EXPORT_HISTORY.format(scan_id=scan_id, history_id=history)
|
||||||
|
scan_id = str(scan_id)
|
||||||
|
req = self.request(query, data=json.dumps(data), method='POST', json=True)
|
||||||
|
try:
|
||||||
|
file_id = req['file']
|
||||||
|
token_id = req['token']
|
||||||
|
except Exception as e:
|
||||||
|
print("[ERROR] %s" % e)
|
||||||
|
print('Download for file id ' + str(file_id) + '.')
|
||||||
|
while running:
|
||||||
|
time.sleep(2)
|
||||||
|
counter += 2
|
||||||
|
report_status = self.request(self.EXPORT_STATUS.format(scan_id=scan_id, file_id=file_id), method='GET',
|
||||||
|
json=True)
|
||||||
|
running = report_status['status'] != 'ready'
|
||||||
|
sys.stdout.write(".")
|
||||||
|
sys.stdout.flush()
|
||||||
|
if counter % 60 == 0:
|
||||||
|
print("")
|
||||||
|
|
||||||
|
print("")
|
||||||
|
content = self.request(self.EXPORT_TOKEN_DOWNLOAD.format(token_id=token_id), method='GET', download=True)
|
||||||
|
return content
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def merge_dicts(self, *dict_args):
|
||||||
|
"""
|
||||||
|
Given any number of dicts, shallow copy and merge into a new dict,
|
||||||
|
precedence goes to key value pairs in latter dicts.
|
||||||
|
"""
|
||||||
|
result = {}
|
||||||
|
for dictionary in dict_args:
|
||||||
|
result.update(dictionary)
|
||||||
|
return result
|
||||||
|
|
||||||
|
def get_utc_from_local(self, date_time, local_tz=None, epoch=True):
|
||||||
|
date_time = datetime.fromtimestamp(date_time)
|
||||||
|
if local_tz is None:
|
||||||
|
local_tz = pytz.timezone('US/Central')
|
||||||
|
else:
|
||||||
|
local_tz = pytz.timezone(local_tz)
|
||||||
|
# print date_time
|
||||||
|
local_time = local_tz.normalize(local_tz.localize(date_time))
|
||||||
|
local_time = local_time.astimezone(pytz.utc)
|
||||||
|
if epoch:
|
||||||
|
naive = local_time.replace(tzinfo=None)
|
||||||
|
local_time = int((naive - datetime(1970, 1, 1)).total_seconds())
|
||||||
|
return local_time
|
||||||
|
|
||||||
|
def tz_conv(self, tz):
|
||||||
|
time_map = {'Eastern Standard Time': 'US/Eastern',
|
||||||
|
'Central Standard Time': 'US/Central',
|
||||||
|
'Pacific Standard Time': 'US/Pacific',
|
||||||
|
'None': 'US/Central'}
|
||||||
|
return time_map.get(tz, None)
|
0
vulnwhisp/utils/__init__.py
Normal file
0
vulnwhisp/utils/__init__.py
Normal file
16
vulnwhisp/utils/cli.py
Normal file
16
vulnwhisp/utils/cli.py
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
class bcolors:
|
||||||
|
"""
|
||||||
|
Utility to add colors to shell for scripts
|
||||||
|
"""
|
||||||
|
HEADERS = '\033[95m'
|
||||||
|
OKBLUE = '\033[94m'
|
||||||
|
OKGREEN = '\033[92m'
|
||||||
|
WARNING = '\033[93m'
|
||||||
|
FAIL = '\033[91m'
|
||||||
|
ENDC = '\033[0m'
|
||||||
|
BOLD = '\033[1m'
|
||||||
|
UNDERLINE = '\033[4m'
|
||||||
|
|
||||||
|
INFO = '{info}[INFO]{endc}'.format(info=OKBLUE, endc=ENDC)
|
||||||
|
SUCCESS = '{green}[SUCCESS]{endc}'.format(green=OKGREEN, endc=ENDC)
|
||||||
|
FAIL = '{red}[FAIL]{endc}'.format(red=FAIL, endc=ENDC)
|
255
vulnwhisp/vulnwhisp.py
Executable file
255
vulnwhisp/vulnwhisp.py
Executable file
@ -0,0 +1,255 @@
|
|||||||
|
|
||||||
|
from base.config import vwConfig
|
||||||
|
from frameworks.nessus import NessusAPI
|
||||||
|
from utils.cli import bcolors
|
||||||
|
import pandas as pd
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import io
|
||||||
|
import time
|
||||||
|
import sqlite3
|
||||||
|
|
||||||
|
# TODO Create logging option which stores data about scan
|
||||||
|
import logging
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class vulnWhisperer(object):
|
||||||
|
|
||||||
|
def __init__(self, config=None, db_name='report_tracker.db', purge=False, verbose=None, debug=False):
|
||||||
|
|
||||||
|
self.verbose = verbose
|
||||||
|
self.nessus_connect = False
|
||||||
|
self.develop = True
|
||||||
|
self.purge = purge
|
||||||
|
|
||||||
|
if config is not None:
|
||||||
|
try:
|
||||||
|
self.config = vwConfig(config_in=config)
|
||||||
|
self.nessus_enabled = self.config.getbool('nessus', 'enabled')
|
||||||
|
|
||||||
|
if self.nessus_enabled:
|
||||||
|
self.nessus_hostname = self.config.get('nessus', 'hostname')
|
||||||
|
self.nessus_port = self.config.get('nessus', 'port')
|
||||||
|
self.nessus_username = self.config.get('nessus', 'username')
|
||||||
|
self.nessus_password = self.config.get('nessus', 'password')
|
||||||
|
self.nessus_writepath = self.config.get('nessus', 'write_path')
|
||||||
|
self.nessus_trash = self.config.getbool('nessus', 'trash')
|
||||||
|
self.verbose = self.config.getbool('nessus', 'verbose')
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.vprint(
|
||||||
|
'{info} Attempting to connect to nessus...'.format(info=bcolors.INFO))
|
||||||
|
self.nessus = NessusAPI(hostname=self.nessus_hostname,
|
||||||
|
port=self.nessus_port,
|
||||||
|
username=self.nessus_username,
|
||||||
|
password=self.nessus_password)
|
||||||
|
self.nessus_connect = True
|
||||||
|
self.vprint(
|
||||||
|
'{success} Connected to nessus on {host}:{port}'.format(success=bcolors.SUCCESS,
|
||||||
|
host=self.nessus_hostname,
|
||||||
|
port=str(self.nessus_port)))
|
||||||
|
except Exception as e:
|
||||||
|
self.vprint(e)
|
||||||
|
raise Exception(
|
||||||
|
"{fail} Could not connect to nessus -- Please verify your settings in {config} are correct and try again.\nReason: {e}".format(config=self.config,
|
||||||
|
fail=bcolors.FAIL,
|
||||||
|
e=e))
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
|
||||||
|
self.vprint('{fail} Could not properly load your config!\nReason: {e}'.format(fail=bcolors.FAIL, e=e))
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
if db_name is not None:
|
||||||
|
self.database = os.path.abspath(os.path.join(os.path.dirname( __file__ ), 'database', db_name))
|
||||||
|
self.conn = sqlite3.connect(self.database)
|
||||||
|
self.cur = self.conn.cursor()
|
||||||
|
else:
|
||||||
|
self.vprint('{fail} Please specify a database to connect to!'.format(fail=bcolors.FAIL))
|
||||||
|
exit(0)
|
||||||
|
|
||||||
|
self.table_columns = ['scan_name',
|
||||||
|
'scan_id',
|
||||||
|
'last_modified',
|
||||||
|
'filename',
|
||||||
|
'download_time',
|
||||||
|
'record_count',
|
||||||
|
'source',
|
||||||
|
'uuid',
|
||||||
|
'processed']
|
||||||
|
self.init()
|
||||||
|
self.uuids = self.retrieve_uuids()
|
||||||
|
self.processed = 0
|
||||||
|
self.skipped = 0
|
||||||
|
self.scan_list = []
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def vprint(self, msg):
|
||||||
|
if self.verbose:
|
||||||
|
print(msg)
|
||||||
|
|
||||||
|
|
||||||
|
def create_table(self):
|
||||||
|
self.cur.execute("create table if not exists scan_history (id integer primary key, scan_name text, scan_id integer, last_modified date, filename text, download_time date, record_count integer, source text, uuid text, processed integer)")
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
|
def delete_table(self):
|
||||||
|
self.cur.execute('drop table if exists scan_history')
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
|
def init(self):
|
||||||
|
if self.purge:
|
||||||
|
self.delete_table()
|
||||||
|
self.create_table()
|
||||||
|
|
||||||
|
def cleanser(self, _data):
|
||||||
|
repls = ('\n', '|||'), ('\r', '|||'), (',',';')
|
||||||
|
data = reduce(lambda a, kv: a.replace(*kv), repls, _data)
|
||||||
|
return data
|
||||||
|
|
||||||
|
def path_check(self, _data):
|
||||||
|
if self.nessus_writepath:
|
||||||
|
data = self.nessus_writepath + '/' + _data
|
||||||
|
return data
|
||||||
|
|
||||||
|
def scan_count(self, scans, completed=False):
|
||||||
|
"""
|
||||||
|
|
||||||
|
:param scans: Pulls in available scans
|
||||||
|
:param completed: Only return completed scans
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
|
self.vprint('{info} Gathering all scan data...'.format(info=bcolors.INFO))
|
||||||
|
scan_records = []
|
||||||
|
for s in scans:
|
||||||
|
if s:
|
||||||
|
record = {}
|
||||||
|
record['scan_id'] = s['id']
|
||||||
|
record['scan_name'] = s.get('name', '')
|
||||||
|
record['owner'] = s.get('owner', '')
|
||||||
|
record['creation_date'] = s.get('creation_date', '')
|
||||||
|
record['starttime'] = s.get('starttime', '')
|
||||||
|
record['timezone'] = s.get('timezone', '')
|
||||||
|
record['folder_id'] = s.get('folder_id', '')
|
||||||
|
try:
|
||||||
|
for h in self.nessus.get_scan_history(s['id']):
|
||||||
|
record['uuid'] = h.get('uuid', '')
|
||||||
|
record['status'] = h.get('status', '')
|
||||||
|
record['history_id'] = h.get('history_id', '')
|
||||||
|
record['last_modification_date'] = h.get('last_modification_date', '')
|
||||||
|
record['norm_time'] = self.nessus.get_utc_from_local(int(record['last_modification_date']),
|
||||||
|
local_tz=self.nessus.tz_conv(record['timezone']))
|
||||||
|
scan_records.append(record.copy())
|
||||||
|
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(e)
|
||||||
|
pass
|
||||||
|
|
||||||
|
if completed:
|
||||||
|
scan_records = [s for s in scan_records if s['status'] == 'completed']
|
||||||
|
return scan_records
|
||||||
|
|
||||||
|
|
||||||
|
def record_insert(self, record):
|
||||||
|
self.cur.execute("insert into scan_history({table_columns}) values (?,?,?,?,?,?,?,?,?)".format(
|
||||||
|
table_columns=', '.join(self.table_columns)), record)
|
||||||
|
|
||||||
|
def retrieve_uuids(self):
|
||||||
|
"""
|
||||||
|
Retrieves UUIDs from database and checks list to determine which files need to be processed.
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
|
self.conn.text_factory = str
|
||||||
|
self.cur.execute('select uuid from scan_history')
|
||||||
|
results = frozenset([r[0] for r in self.cur.fetchall()])
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
def whisper_nessus(self):
|
||||||
|
if self.nessus_connect:
|
||||||
|
scan_data = self.nessus.get_scans()
|
||||||
|
# print scan_data
|
||||||
|
folders = scan_data['folders']
|
||||||
|
scans = scan_data['scans']
|
||||||
|
all_scans = self.scan_count(scans)
|
||||||
|
scan_list = [scan for scan in all_scans if scan['uuid'] not in self.uuids]
|
||||||
|
print scan_list, self.uuids
|
||||||
|
self.vprint("{info} Identified {new} new scans to be processed".format(info=bcolors.INFO, new=len(scan_list)))
|
||||||
|
#print scan_list, len(scan_list)
|
||||||
|
# create scan subfolders
|
||||||
|
for f in folders:
|
||||||
|
if not os.path.exists(self.path_check(f['name'])):
|
||||||
|
if f['name'] == 'Trash' and self.nessus_trash:
|
||||||
|
os.makedirs(self.path_check(f['name']))
|
||||||
|
elif f['name'] != 'Trash':
|
||||||
|
os.makedirs(self.path_check(f['name']))
|
||||||
|
else:
|
||||||
|
os.path.exists(self.path_check(f['name']))
|
||||||
|
self.vprint('{info} Directory already exist for {scan} - Skipping creation'.format(
|
||||||
|
scan=self.path_check(f['name']), info=bcolors.INFO))
|
||||||
|
|
||||||
|
# try download and save scans into each folder the belong to
|
||||||
|
scan_count = 0
|
||||||
|
# TODO Rewrite this part to go through the scans that have aleady been processed
|
||||||
|
for s in scan_list:
|
||||||
|
scan_count += 1
|
||||||
|
#self.vprint('%s/%s' % (scan_count, len(scan_list)))
|
||||||
|
scan_name, scan_id, history_id,\
|
||||||
|
norm_time, status, uuid = s['scan_name'], s['scan_id'], s['history_id'],\
|
||||||
|
s['norm_time'], s['status'], s['uuid']
|
||||||
|
|
||||||
|
# TODO Create directory sync function which scans the directory for files that exist already and populates the database
|
||||||
|
|
||||||
|
folder_id = s['folder_id']
|
||||||
|
scan_history = self.nessus.get_scan_history(scan_id)
|
||||||
|
folder_name = next(f['name'] for f in folders if f['id'] == folder_id)
|
||||||
|
if status == 'completed':
|
||||||
|
file_name = '%s_%s_%s_%s.%s' % (scan_name, scan_id, history_id, norm_time, 'csv')
|
||||||
|
repls = ('\\', '_'), ('/', '_'), ('/', '_'), (' ', '_')
|
||||||
|
file_name = reduce(lambda a, kv: a.replace(*kv), repls, file_name)
|
||||||
|
relative_path_name = self.path_check(folder_name + '/' + file_name)
|
||||||
|
|
||||||
|
if os.path.isfile(relative_path_name):
|
||||||
|
if self.develop:
|
||||||
|
csv_in = pd.read_csv(relative_path_name)
|
||||||
|
record_meta = (
|
||||||
|
scan_name, scan_id, norm_time, file_name, time.time(), csv_in.shape[0], 'nessus', uuid, 1)
|
||||||
|
self.record_insert(record_meta)
|
||||||
|
self.vprint(
|
||||||
|
"[INFO] File {filename} already exist! Updating database".format(filename=relative_path_name))
|
||||||
|
else:
|
||||||
|
file_req = self.nessus.download_scan(scan_id=scan_id, history=history_id, export_format='csv')
|
||||||
|
clean_csv = pd.read_csv(io.StringIO(file_req.decode('utf-8')))
|
||||||
|
if len(clean_csv) > 2:
|
||||||
|
self.vprint("Processing %s/%s for scan: %s" % (scan_count, len(scan_history), scan_name))
|
||||||
|
clean_csv['CVSS'] = clean_csv['CVSS'].astype(str).apply(self.cleanser)
|
||||||
|
clean_csv['CVE'] = clean_csv['CVE'].astype(str).apply(self.cleanser)
|
||||||
|
clean_csv['Description'] = clean_csv['Description'].astype(str).apply(self.cleanser)
|
||||||
|
clean_csv['Synopsis'] = clean_csv['Description'].astype(str).apply(self.cleanser)
|
||||||
|
clean_csv['Solution'] = clean_csv['Solution'].astype(str).apply(self.cleanser)
|
||||||
|
clean_csv['See Also'] = clean_csv['See Also'].astype(str).apply(self.cleanser)
|
||||||
|
clean_csv['Plugin Output'] = clean_csv['Plugin Output'].astype(str).apply(self.cleanser)
|
||||||
|
clean_csv.to_csv(relative_path_name, index=False)
|
||||||
|
record_meta = (
|
||||||
|
scan_name, scan_id, norm_time, file_name, time.time(), clean_csv.shape[0], 'nessus', uuid,
|
||||||
|
1)
|
||||||
|
self.record_insert(record_meta)
|
||||||
|
self.vprint("{info} {filename} records written to {path} ".format(info=bcolors.INFO, filename=clean_csv.shape[0], path=file_name))
|
||||||
|
else:
|
||||||
|
record_meta = (
|
||||||
|
scan_name, scan_id, norm_time, file_name, time.time(), clean_csv.shape[0], 'nessus', uuid,
|
||||||
|
1)
|
||||||
|
self.record_insert(record_meta)
|
||||||
|
self.vprint(file_name + ' has no host available... Updating database and skipping!')
|
||||||
|
self.conn.commit()
|
||||||
|
self.conn.close()
|
||||||
|
|
||||||
|
else:
|
||||||
|
self.vprint('{fail} Failed to use scanner at {host}'.format(fail=bcolors.FAIL, host=self.nessus_hostname+':'+self.nessus_port))
|
||||||
|
|
||||||
|
|
||||||
|
#vw = vulnWhisperer(config='../configs/frameworks.ini', purge=False)
|
||||||
|
#vw.whisper_nessus()
|
Reference in New Issue
Block a user