Merge branch 'master' of github.com:austin-taylor/VulnWhisperer

This commit is contained in:
Austin Taylor
2018-02-25 21:15:54 -05:00
4 changed files with 179 additions and 15 deletions

View File

@ -24,6 +24,15 @@ verbose=true
max_retries = 10 max_retries = 10
template_id = 126024 template_id = 126024
[openvas]
enabled = true
hostname = localhost
username = exampleuser
password = examplepass
write_path=/opt/vulnwhisp/openvas/
db_path=/opt/vulnwhisp/database
verbose=true
#[proxy] #[proxy]
; This section is optional. Leave it out if you're not using a proxy. ; This section is optional. Leave it out if you're not using a proxy.
; You can use environmental variables as well: http://www.python-requests.org/en/latest/user/advanced/#proxies ; You can use environmental variables as well: http://www.python-requests.org/en/latest/user/advanced/#proxies

View File

@ -4,6 +4,7 @@
# Version 0.3 # Version 0.3
# Description: Take in nessus reports from vulnWhisperer and pumps into logstash # Description: Take in nessus reports from vulnWhisperer and pumps into logstash
input { input {
file { file {
path => "/opt/vulnwhisperer/nessus/**/*" path => "/opt/vulnwhisperer/nessus/**/*"
@ -33,7 +34,8 @@ filter {
separator => "," separator => ","
source => "message" source => "message"
} }
#If using filebeats as your source, you will need to replace the "path" field to "source"
grok { grok {
match => { "path" => "(?<scan_name>[a-zA-Z0-9_.\-]+)_%{INT:scan_id}_%{INT:history_id}_%{INT:last_updated}.csv$" } match => { "path" => "(?<scan_name>[a-zA-Z0-9_.\-]+)_%{INT:scan_id}_%{INT:history_id}_%{INT:last_updated}.csv$" }
tag_on_failure => [] tag_on_failure => []

View File

@ -1,14 +0,0 @@
# Author: Austin Taylor
# Email: email@austintaylor.io
# Last Update: 05/21/2017
# Creates logstash-nessus
output {
if "nessus" in [tags] or [type] == "nessus" {
#stdout { codec => rubydebug }
elasticsearch {
hosts => "localhost:9200"
index => "logstash-nessus-%{+YYYY.MM}"
}
}
}

View File

@ -0,0 +1,167 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
__author__ = 'Austin Taylor'
import datetime as dt
import io
import json
import pandas as pd
import requests
import requests
from bs4 import BeautifulSoup
from requests.packages.urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
class OpenVAS_API(object):
OMP = '/omp'
def __init__(self,
hostname=None,
port=None,
username=None,
password=None,
verbose=True):
if username is None or password is None:
raise Exception('ERROR: Missing username or password.')
self.username = username
self.password = password
self.base = 'https://{hostname}:{port}'.format(hostname=hostname, port=port)
self.verbose = verbose
self.processed_reports = 0
self.headers = {
'Origin': self.base,
'Accept-Encoding': 'gzip, deflate, br',
'Accept-Language': 'en-US,en;q=0.8',
'User-Agent': 'VulnWhisperer for OpenVAS',
'Content-Type': 'application/x-www-form-urlencoded',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
'Cache-Control': 'max-age=0',
'Referer': self.base,
'X-Requested-With': 'XMLHttpRequest',
'Connection': 'keep-alive',
}
self.login()
self.open_vas_reports = self.get_reports()
def vprint(self, msg):
if self.verbose:
print(msg)
def login(self):
resp = self.get_token()
if resp.status_code is 200:
xml_response = BeautifulSoup(resp.content, 'lxml')
self.token = xml_response.find(attrs={'id': 'gsa-token'}).text
self.cookies = resp.cookies.get_dict()
else:
raise Exception('[FAIL] Could not login to OpenVAS')
def request(self, url, data=None, params=None, headers=None, cookies=None, method='POST', download=False,
json=False):
if headers is None:
headers = self.headers
if cookies is None:
cookies = self.cookies
timeout = 0
success = False
url = self.base + url
methods = {'GET': requests.get,
'POST': requests.post,
'DELETE': requests.delete}
while (timeout <= 10) and (not success):
data = methods[method](url,
data=data,
headers=self.headers,
params=params,
cookies=cookies,
verify=False)
if data.status_code == 401:
try:
self.login()
timeout += 1
self.vprint('[INFO] Token refreshed')
except Exception as e:
self.vprint('[FAIL] Could not refresh token\nReason: %s' % e)
else:
success = True
if json:
data = data.json()
if download:
return data.content
return data
def get_token(self):
data = [
('cmd', 'login'),
('text', '/omp?r=1'),
('login', self.username),
('password', self.password),
]
token = requests.post(self.base + self.OMP, data=data, verify=False)
return token
def get_reports(self, complete=True):
print('Retreiving OpenVAS report data...')
params = (('cmd', 'get_reports'), ('token', self.token))
reports = self.request(self.OMP, params=params, method='GET')
soup = BeautifulSoup(reports.text, 'lxml')
data = []
links = []
table = soup.find('table', attrs={'class': 'gbntable'})
table_body = table.find('tbody')
rows = table_body.find_all('tr')
for row in rows:
cols = row.find_all('td')
links.extend([a['href'] for a in row.find_all('a', href=True) if 'get_report' in str(a)])
cols = [ele.text.strip() for ele in cols]
data.append([ele for ele in cols if ele])
report = pd.DataFrame(data, columns=['date', 'status', 'task', 'severity', 'high', 'medium', 'low', 'log',
'false_pos'])
if report.shape[0] != 0:
report['links'] = links
report['report_ids'] = report.links.str.extract('.*report_id=([a-z-0-9]*)')
report['epoch'] = (pd.to_datetime(report['date']) - dt.datetime(1970, 1, 1)).dt.total_seconds().astype(int)
else:
raise Exception("Could not retrieve OpenVAS Reports - Please check your settings and try again")
report['links'] = links
report['report_ids'] = report.links.str.extract('.*report_id=([a-z-0-9]*)')
report['epoch'] = (pd.to_datetime(report['date']) - dt.datetime(1970, 1, 1)).dt.total_seconds().astype(int)
if complete:
report = report[report.status == 'Done']
severity_extraction = report.severity.str.extract('([0-9.]*) \(([\w]+)\)')
severity_extraction.columns = ['severity', 'severity_rate']
report_with_severity = pd.concat([report, severity_extraction], axis=1)
return report_with_severity
def process_report(self, report_id):
params = (
('token', self.token),
('cmd', 'get_report'),
('report_id', report_id),
('filter', 'apply_overrides=0 min_qod=70 autofp=0 levels=hml first=1 rows=50 sort-reverse=severity'),
('ignore_pagination', '1'),
('report_format_id', 'c1645568-627a-11e3-a660-406186ea4fc5'),
('submit', 'Download'),
)
print('Retrieving %s' % report_id)
req = self.request(self.OMP, params=params, method='GET')
report_df = pd.read_csv(io.BytesIO(req.text.encode('utf-8')))
report_df['report_ids'] = report_id
self.processed_reports += 1
merged_df = pd.merge(report_df, self.open_vas_reports, on='report_ids').drop('index', axis=1)
return merged_df