From 8b63aa4fbca8bb6db9815990136b3e2dbebb220f Mon Sep 17 00:00:00 2001 From: Austin Taylor Date: Sun, 11 Feb 2018 16:02:16 -0500 Subject: [PATCH] Addition of OpenVAS Connector --- configs/frameworks_example.ini | 9 ++ vulnwhisp/frameworks/openvas.py | 173 ++++++++++++++++++++++++++++++++ 2 files changed, 182 insertions(+) create mode 100644 vulnwhisp/frameworks/openvas.py diff --git a/configs/frameworks_example.ini b/configs/frameworks_example.ini index 2b8d7fa..58241ab 100755 --- a/configs/frameworks_example.ini +++ b/configs/frameworks_example.ini @@ -24,6 +24,15 @@ verbose=true max_retries = 10 template_id = 126024 +[openvas] +enabled = true +hostname = localhost +username = exampleuser +password = examplepass +write_path=/opt/vulnwhisp/openvas/ +db_path=/opt/vulnwhisp/database +verbose=true + #[proxy] ; This section is optional. Leave it out if you're not using a proxy. ; You can use environmental variables as well: http://www.python-requests.org/en/latest/user/advanced/#proxies diff --git a/vulnwhisp/frameworks/openvas.py b/vulnwhisp/frameworks/openvas.py new file mode 100644 index 0000000..a75b7d8 --- /dev/null +++ b/vulnwhisp/frameworks/openvas.py @@ -0,0 +1,173 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- +__author__ = 'Austin Taylor' + +import datetime as dt +import io +import json +import pandas as pd +import requests +import requests +from bs4 import BeautifulSoup +from requests.packages.urllib3.exceptions import InsecureRequestWarning + +requests.packages.urllib3.disable_warnings(InsecureRequestWarning) + + +class OpenVAS_API(object): + OMP = '/omp' + + def __init__(self, + hostname=None, + port=None, + username=None, + password=None, + verbose=True): + if username is None or password is None: + raise Exception('ERROR: Missing username or password.') + + self.username = username + self.password = password + self.base = 'https://{hostname}:{port}'.format(hostname=hostname, port=port) + self.verbose = verbose + self.processed_reports = 0 + + self.headers = { + 'Origin': self.base, + 'Accept-Encoding': 'gzip, deflate, br', + 'Accept-Language': 'en-US,en;q=0.8', + 'User-Agent': 'VulnWhisperer for OpenVAS', + 'Content-Type': 'application/x-www-form-urlencoded', + 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8', + 'Cache-Control': 'max-age=0', + 'Referer': self.base, + 'X-Requested-With': 'XMLHttpRequest', + 'Connection': 'keep-alive', + } + + self.login() + + self.open_vas_reports = self.get_reports() + + def vprint(self, msg): + if self.verbose: + print(msg) + + def login(self): + resp = self.get_token() + if resp.status_code is 200: + xml_response = BeautifulSoup(resp.content, 'lxml') + self.token = xml_response.find(attrs={'id': 'gsa-token'}).text + + self.cookies = resp.cookies.get_dict() + else: + raise Exception('[FAIL] Could not login to OpenVAS') + + def request(self, url, data=None, params=None, headers=None, cookies=None, method='POST', download=False, + json=False): + if headers is None: + headers = self.headers + if cookies is None: + cookies = self.cookies + + timeout = 0 + success = False + + url = self.base + url + methods = {'GET': requests.get, + 'POST': requests.post, + 'DELETE': requests.delete} + + while (timeout <= 10) and (not success): + data = methods[method](url, + data=data, + headers=self.headers, + params=params, + cookies=cookies, + verify=False) + + if data.status_code == 401: + try: + self.login() + timeout += 1 + self.vprint('[INFO] Token refreshed') + except Exception as e: + self.vprint('[FAIL] Could not refresh token\nReason: %s' % e) + else: + success = True + + if json: + data = data.json() + if download: + return data.content + return data + + def get_token(self): + data = [ + ('cmd', 'login'), + ('text', '/omp?r=1'), + ('login', self.username), + ('password', self.password), + ] + token = requests.post(self.base + self.OMP, data=data, verify=False) + return token + + def get_reports(self, complete=True): + print('Retreiving OpenVAS report data...') + params = (('cmd', 'get_reports'), ('token', self.token)) + reports = self.request(self.OMP, params=params, method='GET') + soup = BeautifulSoup(reports.text, 'lxml') + data = [] + links = [] + table = soup.find('table', attrs={'class': 'gbntable'}) + table_body = table.find('tbody') + + rows = table_body.find_all('tr') + for row in rows: + cols = row.find_all('td') + links.extend([a['href'] for a in row.find_all('a', href=True) if 'get_report' in str(a)]) + cols = [ele.text.strip() for ele in cols] + data.append([ele for ele in cols if ele]) + report = pd.DataFrame(data, columns=['date', 'status', 'task', 'severity', 'high', 'medium', 'low', 'log', + 'false_pos']) + + if report.shape[0] != 0: + report['links'] = links + report['report_ids'] = report.links.str.extract('.*report_id=([a-z-0-9]*)') + report['epoch'] = (pd.to_datetime(report['date']) - dt.datetime(1970, 1, 1)).dt.total_seconds().astype(int) + else: + raise Exception("Could not retrieve OpenVAS Reports - Please check your settings and try again") + + report['links'] = links + report['report_ids'] = report.links.str.extract('.*report_id=([a-z-0-9]*)') + report['epoch'] = (pd.to_datetime(report['date']) - dt.datetime(1970, 1, 1)).dt.total_seconds().astype(int) + if complete: + report = report[report.status == 'Done'] + severity_extraction = report.severity.str.extract('([0-9.]*) \(([\w]+)\)') + severity_extraction.columns = ['severity', 'severity_rate'] + report_with_severity = pd.concat([report, severity_extraction], axis=1) + return report_with_severity + + def process_report(self, report_id): + params = ( + ('token', self.token), + ('cmd', 'get_report'), + ('report_id', report_id), + ('filter', 'apply_overrides=0 min_qod=70 autofp=0 levels=hml first=1 rows=50 sort-reverse=severity'), + ('ignore_pagination', '1'), + ('report_format_id', 'c1645568-627a-11e3-a660-406186ea4fc5'), + ('submit', 'Download'), + ) + print('Retrieving %s' % report_id) + req = self.request(self.OMP, params=params, method='GET') + report_df = pd.read_csv(io.BytesIO(req.text.encode('utf-8'))) + report_df['report_ids'] = report_id + self.processed_reports += 1 + merged_df = pd.merge(report_df, self.open_vas_reports, on='report_ids').drop('index', axis=1) + return merged_df + + + + + +