76 Commits
1.2.0 ... 1.5.0

Author SHA1 Message Date
f83a5d89a3 ELK Sample Install (#37)
Updated Readme.md to include a Sample ELK Install guide addressing multiple issues around ELK Cluster/Node Configuration.
2018-03-04 19:14:51 -05:00
1400cacfcb Update README.md 2018-03-04 17:18:34 -05:00
6f96536145 Update setup.py 2018-03-04 17:15:32 -05:00
4a60306bdd Addition of openvas logstash config 2018-03-04 16:06:53 -05:00
d509c03d68 Remove template format 2018-03-04 15:41:23 -05:00
f6745b00fd Completion of OpenVAS module 2018-03-04 15:06:09 -05:00
21b2a03b36 Completion of OpenVAS module 2018-03-04 14:33:18 -05:00
a658b7abab Allow template defined config form IDs 2018-03-04 08:43:35 -05:00
f21d3a3f64 Addition of OpenVas -- ready for alpha 2018-03-03 15:54:24 -05:00
53b0b27cb2 Addition of OpenVas -- ready for alpha 2018-03-03 15:53:23 -05:00
d8e813ff5a Merge branch 'master' of github.com:austin-taylor/VulnWhisperer 2018-02-25 21:15:54 -05:00
a0de072394 Automatically create filepath and directory if it does not exist 2018-02-25 21:15:50 -05:00
13dbc79b27 Update 1000_nessus_process_file.conf 2018-02-17 22:57:32 -05:00
42e72c36dd Delete 9000_output_nessus.conf 2018-02-17 22:30:16 -05:00
554b739146 Update 9000_output_nessus.conf 2018-02-17 22:29:41 -05:00
54337d3bfa Addition of OpenVAS 2018-02-11 16:07:50 -05:00
8b63aa4fbc Addition of OpenVAS Connector 2018-02-11 16:02:16 -05:00
5362d6f9e8 Badge addition 2018-01-31 10:12:47 -05:00
645e5707a4 Badge addition 2018-01-31 10:11:14 -05:00
03a2125dd1 Update to README 2018-01-31 10:04:39 -05:00
8e85eb0981 Merge branch 'master' of github.com:austin-taylor/VulnWhisperer 2018-01-31 09:51:55 -05:00
136cc3ac61 Update to README 2018-01-31 09:51:51 -05:00
0c6611711c Merge pull request #23 from HASecuritySolutions/master
HA Sync
2018-01-29 22:38:39 -05:00
f3eb2fbda1 Merge pull request #5 from austin-taylor/master
Fork Sync
2018-01-29 22:38:04 -05:00
124cbf2753 Merge branch 'master' of github.com:austin-taylor/VulnWhisperer 2018-01-29 22:35:55 -05:00
13a01fbfd0 Update to make nessus visualizations consistent with qualys 2018-01-29 22:35:45 -05:00
bbfe7ad71b Merge pull request #22 from austin-taylor/add-license-1
Create LICENSE
2018-01-23 12:07:01 -05:00
330e90c7a0 Create LICENSE 2018-01-23 12:06:48 -05:00
f9af977145 Delete LICENSE 2018-01-23 12:06:03 -05:00
1a2091ac54 Update 1000_nessus_process_file.conf 2018-01-05 09:43:57 -05:00
b2c230f43b Rename logstash-nessus-template.json to logstash-vulnwhisperer-template.json 2018-01-05 07:51:51 -05:00
cdaf743435 Readme update 2018-01-04 18:48:52 -05:00
59b688a117 Update to README 2018-01-04 18:06:43 -05:00
009ccc24f6 Update to README 2018-01-04 18:06:06 -05:00
3141dcabd2 Update to qualys logstash conf to reflect example config 2018-01-04 18:01:01 -05:00
02afd9c24d Update to example logstash config 2018-01-04 17:59:20 -05:00
d70238fbeb Update to README 2018-01-04 17:52:30 -05:00
36b028a78a Merge branch 'master' of github.com:austin-taylor/VulnWhisperer 2018-01-04 17:51:29 -05:00
16b04d7763 Update to README 2018-01-04 17:51:24 -05:00
4ea72650df Merge pull request #4 from austin-taylor/master
Fork Sync
2018-01-04 16:44:30 -05:00
a1b9ff6273 Update to readme 2018-01-04 13:53:38 -05:00
bbad599a73 Update to readme 2018-01-04 13:49:45 -05:00
882a4be275 Update to readme 2018-01-04 13:49:08 -05:00
2bf8c2be8b Update to readme 2018-01-04 13:36:19 -05:00
2b057f290b Remind user to select section if using a config 2018-01-03 18:33:14 -05:00
4359478e3d Getting started steps 2018-01-02 07:53:33 -05:00
ff50354bf9 Getting started steps 2018-01-02 07:10:57 -05:00
0ab53890ca Merge pull request #3 from austin-taylor/master
sync
2018-01-02 04:15:52 -05:00
ee6d61605b Merge branch 'master' of github.com:austin-taylor/VulnWhisperer 2018-01-02 03:12:15 -05:00
ada256cc46 README Update 2018-01-02 03:12:11 -05:00
8215f4e938 Set theme jekyll-theme-leap-day 2018-01-02 03:09:49 -05:00
30f966f354 Create CNAME 2018-01-02 02:59:41 -05:00
8af1ddd9e9 README Update 2018-01-02 02:51:34 -05:00
c3850247c9 Merge branch 'master' of github.com:austin-taylor/VulnWhisperer 2018-01-02 02:51:29 -05:00
745e4b3a0b README Update 2018-01-02 02:48:52 -05:00
c80383aaa6 Merge pull request #18 from cclauss/patch-1
Remove leading slash
2018-01-02 02:34:54 -05:00
e128d8c753 Travis Config update 2018-01-02 02:31:25 -05:00
66987810df Merge branch 'master' of github.com:austin-taylor/VulnWhisperer 2018-01-02 02:29:23 -05:00
6e500a4829 Add build status to README 2018-01-02 02:29:19 -05:00
92d6a7788c Remove leading slash 2018-01-02 08:26:15 +01:00
6ce3a254e4 Merge pull request #17 from cclauss/patch-1
flake8 . --exclude=/deps/qualysapi
2018-01-02 02:24:15 -05:00
9fe048fc5f flake8 . --exclude=/deps/qualysapi 2018-01-02 08:20:37 +01:00
67f9017f92 flake8 --ignore=deps/qualysapi 2018-01-02 08:17:03 +01:00
03d7954da9 Testing build with no submodules 2018-01-02 01:46:03 -05:00
ff02340e32 Merge pull request #16 from cclauss/patch-1
Add free automated flake8 testing of pull requests
2018-01-02 01:39:18 -05:00
45f8ea55d3 Add free automated flake8 testing of pull requests
[Flake8](http://flake8.pycqa.org) tests can help to find Python syntax errors, undefined names, and other code quality issues.  [Travis CI](https://travis-ci.org) is a framework for running flake8 and other tests which is free for open source projects like this one.  The owner of the this repo would need to go to https://travis-ci.org/profile and flip the repository switch __on__ to enable free automated flake8 testing of each pull request.
2018-01-02 07:31:24 +01:00
05608b29bb Update README.md 2018-01-01 14:58:42 -05:00
4d6ad51b50 README Update 2018-01-01 07:28:48 -05:00
b953e1d97b README Update 2018-01-01 07:28:16 -05:00
8f536ed2ac Documentation update 2017-12-31 07:04:57 -05:00
c5115fba00 Update to README 2017-12-31 06:02:48 -05:00
ce529dd4f9 Plugin name converted to scan name 2017-12-30 23:57:58 -05:00
3d34916e4c Adding name of scan and scan reference 2017-12-30 23:54:47 -05:00
690841c4df Readme Update 2017-12-30 23:06:09 -05:00
78d9a077f5 Merge pull request #2 from austin-taylor/master
Sync with master
2017-12-30 21:27:36 -05:00
dd3a8bb649 Merge pull request #1 from austin-taylor/master
Sync with master
2017-12-26 17:10:07 -05:00
24 changed files with 1111 additions and 373 deletions

24
.travis.yml Normal file
View File

@ -0,0 +1,24 @@
group: travis_latest
language: python
cache: pip
python:
- 2.7
# - 3.6
#matrix:
# allow_failures:
# - python: 3.6 - Commenting out testing for Python 3.6 until ready
install:
- pip install -r requirements.txt
- pip install flake8 # pytest # add another testing frameworks later
before_script:
# stop the build if there are Python syntax errors or undefined names
- flake8 . --count --exclude=deps/qualysapi --select=E901,E999,F821,F822,F823 --show-source --statistics
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
- flake8 . --count --exit-zero --exclude=deps/qualysapi --max-complexity=10 --max-line-length=127 --statistics
script:
- true # pytest --capture=sys # add other tests here
notifications:
on_success: change
on_failure: change # `always` will be the setting once code changes slow down

1
CNAME Normal file
View File

@ -0,0 +1 @@
www.vulnwhisperer.com

214
LICENSE
View File

@ -1,21 +1,201 @@
MIT License Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
Copyright (c) 2017 Austin Taylor TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
Permission is hereby granted, free of charge, to any person obtaining a copy 1. Definitions.
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all "License" shall mean the terms and conditions for use, reproduction,
copies or substantial portions of the Software. and distribution as defined by Sections 1 through 9 of this document.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR "Licensor" shall mean the copyright owner or entity authorized by
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, the copyright owner that is granting the License.
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER "Legal Entity" shall mean the union of the acting entity and all
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, other entities that control, are controlled by, or are under common
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE control with that entity. For the purposes of this definition,
SOFTWARE. "control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

177
README.md
View File

@ -1,51 +1,152 @@
<p align="center"><img src="https://github.com/austin-taylor/vulnwhisperer/blob/master/docs/source/vuln_whisperer_logo_s.png" width="400px"></p> <p align="center"><img src="https://github.com/austin-taylor/vulnwhisperer/blob/master/docs/source/vuln_whisperer_logo_s.png" width="400px"></p>
<p align="center"> <i>Create <u><b>actionable data</b></u> from your vulnerability scans </i> </p> <p align="center"> <i>Create <u><b>actionable data</b></u> from your vulnerability scans </i> </p>
<p align="center" style="width:400px"><img src="https://github.com/austin-taylor/vulnwhisperer/blob/master/docs/source/vulnWhispererWebApplications.png" style="width:400px"></p> <p align="center" style="width:400px"><img src="https://github.com/austin-taylor/vulnwhisperer/blob/master/docs/source/vulnWhispererWebApplications.png" style="width:400px"></p>
VulnWhisperer is a vulnerability report aggregator. VulnWhisperer will pull all the reports VulnWhisperer is a vulnerability data and report aggregator. VulnWhisperer will pull all the reports
and create a file with a unique filename which is then fed into logstash. Logstash extracts data from the filename and tags all of the information inside the report (see logstash_vulnwhisp.conf file). Data is then shipped to elasticsearch to be indexed. and create a file with a unique filename which is then fed into logstash. Logstash extracts data from the filename and tags all of the information inside the report (see logstash_vulnwhisp.conf file). Data is then shipped to elasticsearch to be indexed.
[![Build Status](https://travis-ci.org/austin-taylor/VulnWhisperer.svg?branch=master)](https://travis-ci.org/austin-taylor/VulnWhisperer)
[![MIT License](https://img.shields.io/badge/license-MIT-blue.svg?style=flat)](http://choosealicense.com/licenses/mit/)
[![Twitter](https://img.shields.io/twitter/follow/VulnWhisperer.svg?style=social&label=Follow)](https://twitter.com/VulnWhisperer)
Currently Supports
-----------------
### Vulnerability Frameworks
- [X] Nessus (v6 & **v7**)
- [X] Qualys Web Applications
- [ ] Qualys Vulnerability Management (Need license)
- [X] OpenVAS
- [ ] Nexpose
- [ ] Insight VM
- [ ] NMAP
- [ ] More to come
Getting Started
===============
1) Follow the [install requirements](#installreq)
2) Fill out the section you want to process in <a href="https://github.com/austin-taylor/VulnWhisperer/blob/master/configs/frameworks_example.ini">example.ini file</a>
3) Modify the IP settings in the <a href="https://github.com/austin-taylor/VulnWhisperer/tree/master/logstash">logstash files to accomodate your environment</a> and import them to your logstash conf directory (default is /etc/logstash/conf.d/)
4) Import the <a href="https://github.com/austin-taylor/VulnWhisperer/tree/master/kibana/vuln_whisp_kibana">kibana visualizations</a>
5) [Run Vulnwhisperer](#run)
Requirements Requirements
------------- -------------
#### ####
* ElasticStack * ElasticStack 5.x
* Python 2.7 * Python 2.7
* Vulnerability Scanner * Vulnerability Scanner
* Optional: Message broker such as Kafka or RabbitMQ * Optional: Message broker such as Kafka or RabbitMQ
Currently Supports <a id="installreq">Install Requirements-VulnWhisperer(may require sudo)</a>
------------- --------------------
####
* Elasticsearch 2.x
* Python 2.7
* Nessus
* Qualys - Web Application Scanner
Setup
===============
**First, install dependant modules**
```python ```python
Install pip:
sudo <pkg-manager> install python-pip
sudo pip install --upgrade pip
Manually install requirements: cd deps/qualysapi
sudo pip install pytz python setup.py install
sudo pip install pandas
Using requirements file:
sudo pip install -r /path/to/VulnWhisperer/requirements.txt
cd /path/to/VulnWhisperer
sudo python setup.py install
``` ```
**Second, install requirements**
```python
pip install -r /path/to/VulnWhisperer/requirements.txt
cd /path/to/VulnWhisperer
python setup.py install
```
Now you're ready to pull down scans. (see <a href="#run">run section</a>)
Install Requirements-ELK Node **\*SAMPLE\***
--------------------
The following instructions should be utilized as a **Sample Guide** in the absence of an existing ELK Cluster/Node. This will cover a Debian example install guide of a stand-alone node of Elasticsearch & Kibana.
While Logstash is included in this install guide, it it recommended that a seperate host pulling the VulnWhisperer data is utilized with Logstash to ship the data to the Elasticsearch node.
*Please note there is a docker-compose.yml available as well.*
**Debian:** *(https://www.elastic.co/guide/en/elasticsearch/reference/5.6/deb.html)*
```shell
sudo apt-get install -y default-jre
wget -qO - https://artifacts.elastic.co/GPG-KEY-elasticsearch | sudo apt-key add -
sudo apt-get install apt-transport-https
echo "deb https://artifacts.elastic.co/packages/5.x/apt stable main" | sudo tee -a /etc/apt/sources.list.d/elastic-5.x.list
sudo apt-get update && sudo apt-get install elasticsearch kibana logstash
sudo /bin/systemctl daemon-reload
sudo /bin/systemctl enable elasticsearch.service
sudo /bin/systemctl enable kibana.service
sudo /bin/systemctl enable logstash.service
```
**Elasticsearch & Kibana Sample Config Notes**
Utilizing your favorite text editor:
* Grab your host IP and change the IP of your /etc/elasticsearch/elasticsearch.yml file. (This defaults to 'localhost')
* Validate Elasticsearch is set to run on port 9200 (Default)
* Grab your host IP and change the IP of your /etc/kibana/kibana.yml file. (This defaults to 'localhost') *Validate that Kibana is pointing to the correct Elasticsearch IP (This was set in the previous step)*
* Validate Kibana is set to run on port 5601 (Default)
*Start elasticsearch and validate they are running/communicating with one another:*
```shell
sudo service elasticsearch start
sudo service kibana start
```
OR
```shell
sudo systemctl start elasticsearch.service
sudo systemctl start kibana.service
```
**Logstash Sample Config Notes**
* Copy/Move the Logstash .conf files from */VulnWhisperer/logstash/* to */etc/logstash/conf.d/*
* Validate the Logstash.conf files *input* contains the correct location of VulnWhisper Scans in the *input.file.path* directory identified below:
```
input {
file {
path => "/opt/vulnwhisperer/nessus/**/*"
start_position => "beginning"
tags => "nessus"
type => "nessus"
}
}
```
* Validate the Logstash.conf files *output* contains the correct Elasticsearch IP set during the previous step above: (This will default to localhost)
```
output {
if "nessus" in [tags] or [type] == "nessus" {
#stdout { codec => rubydebug }
elasticsearch {
hosts => [ "localhost:9200" ]
index => "logstash-vulnwhisperer-%{+YYYY.MM}"
}
}
```
* Validate logstash has the correct file permissions to read the location of the VulnWhisperer Scans
Once configured run Logstash: (Running Logstash as a service will pick up all the files in */etc/logstash/conf.d/* If you would like to run only one logstash file please reference the command below):
Logstash as a service:
```shell
sudo service logstash start
```
*OR*
```shell
sudo systemctl start logstash.service
```
Single Logstash file:
```shell
sudo /usr/share/logstash/bin/logstash --path.settings /etc/logstash/ -f /etc/logstash/conf.d/1000_nessus_process_file.conf
```
Configuration Configuration
----- -----
@ -55,7 +156,11 @@ There are a few configuration steps to setting up VulnWhisperer:
* Import ElasticSearch Templates * Import ElasticSearch Templates
* Import Kibana Dashboards * Import Kibana Dashboards
Run <a href="https://github.com/austin-taylor/VulnWhisperer/blob/master/configs/frameworks_example.ini">example.ini file</a>
<p align="left" style="width:200px"><img src="https://github.com/austin-taylor/vulnwhisperer/blob/master/docs/source/config_example.png" style="width:200px"></p>
<a id="run">Run</a>
----- -----
To run, fill out the configuration file with your vulnerability scanner settings. Then you can execute from the command line. To run, fill out the configuration file with your vulnerability scanner settings. Then you can execute from the command line.
```python ```python
@ -68,12 +173,30 @@ vuln_whisperer -c configs/example.ini -s qualys
<p align="center" style="width:300px"><img src="https://github.com/austin-taylor/vulnwhisperer/blob/master/docs/source/running_vuln_whisperer.png" style="width:400px"></p> <p align="center" style="width:300px"><img src="https://github.com/austin-taylor/vulnwhisperer/blob/master/docs/source/running_vuln_whisperer.png" style="width:400px"></p>
Next you'll need to import the visualizations into Kibana and setup your logstash config. A more thorough README is underway with setup instructions. Next you'll need to import the visualizations into Kibana and setup your logstash config. A more thorough README is underway with setup instructions.
Running Nightly
---------------
If you're running linux, be sure to setup a cronjob to remove old files that get stored in the database. Be sure to change .csv if you're using json.
Setup crontab -e with the following config (modify to your environment) - this will run vulnwhisperer each night at 0130:
`00 1 * * * /usr/bin/find /opt/vulnwhisp/ -type f -name '*.csv' -ctime +3 -exec rm {} \;`
`30 1 * * * /usr/local/bin/vuln_whisperer -c /opt/vulnwhisp/configs/example.ini`
_For windows, you may need to type the full path of the binary in vulnWhisperer located in the bin directory._ _For windows, you may need to type the full path of the binary in vulnWhisperer located in the bin directory._
Video Walkthrough -- Featured on ElasticWebinar
----------------------------------------------
<a href="http://www.youtube.com/watch?feature=player_embedded&v=zrEuTtRUfNw?start=30
" target="_blank"><img src="https://github.com/austin-taylor/vulnwhisperer/blob/master/docs/source/elastic_webinar.png"
alt="Elastic presentation on VulnWhisperer" border="10" /></a>
Credit Credit
------ ------
Big thank you to <a href="https://github.com/SMAPPER">Justin Henderson</a> for his contributions to vulnWhisperer! Big thank you to <a href="https://github.com/SMAPPER">Justin Henderson</a> for his contributions to vulnWhisperer!
AS SEEN ON TV AS SEEN ON TV
------------- -------------
<p align="center" style="width:400px"><a href="https://twitter.com/MalwareJake/status/935654519471353856"><img src="https://github.com/austin-taylor/vulnwhisperer/blob/master/docs/source/as_seen_on_tv.png" style="width:400px"></a></p> <p align="center" style="width:400px"><a href="https://twitter.com/MalwareJake/status/935654519471353856"><img src="https://github.com/austin-taylor/vulnwhisperer/blob/master/docs/source/as_seen_on_tv.png" style="width:400px"></a></p>

1
_config.yml Normal file
View File

@ -0,0 +1 @@
theme: jekyll-theme-leap-day

View File

@ -29,30 +29,27 @@ def main():
parser.add_argument('-p', '--password', dest='password', required=False, default=None, type=lambda x: x.strip(), help='The NESSUS password') parser.add_argument('-p', '--password', dest='password', required=False, default=None, type=lambda x: x.strip(), help='The NESSUS password')
args = parser.parse_args() args = parser.parse_args()
vw = vulnWhisperer(config=args.config,
profile=args.section,
verbose=args.verbose,
username=args.username,
password=args.password)
vw.whisper_vulnerabilities()
'''
try: try:
if args.config and not args.section:
print('{red} ERROR: {error}{endc}'.format(red=bcolors.FAIL,
error='Please specify a section using -s. \
\nExample vuln_whisperer -c config.ini -s nessus',
endc=bcolors.ENDC))
else:
vw = vulnWhisperer(config=args.config,
profile=args.section,
verbose=args.verbose,
username=args.username,
password=args.password)
vw = vulnWhisperer(config=args.config, vw.whisper_vulnerabilities()
profile=args.section, sys.exit(1)
verbose=args.verbose,
username=args.username,
password=args.password)
vw.whisper_vulnerabilities()
sys.exit(1)
except Exception as e: except Exception as e:
if args.verbose: if args.verbose:
print('{red} ERROR: {error}{endc}'.format(red=bcolors.FAIL, error=e, endc=bcolors.ENDC)) print('{red} ERROR: {error}{endc}'.format(red=bcolors.FAIL, error=e, endc=bcolors.ENDC))
sys.exit(2) sys.exit(2)
'''
if __name__ == '__main__': if __name__ == '__main__':
main() main()

View File

@ -22,8 +22,19 @@ verbose=true
# Set the maximum number of retries each connection should attempt. # Set the maximum number of retries each connection should attempt.
#Note, this applies only to failed connections and timeouts, never to requests where the server returns a response. #Note, this applies only to failed connections and timeouts, never to requests where the server returns a response.
max_retries = 10 max_retries = 10
# Template ID will need to be retrieved for each document. Please follow the reference guide above for instructions on how to get your template ID.
template_id = 126024 template_id = 126024
[openvas]
enabled = true
hostname = localhost
port = 4000
username = exampleuser
password = examplepass
write_path=/opt/vulnwhisp/openvas/
db_path=/opt/vulnwhisp/database
verbose=true
#[proxy] #[proxy]
; This section is optional. Leave it out if you're not using a proxy. ; This section is optional. Leave it out if you're not using a proxy.
; You can use environmental variables as well: http://www.python-requests.org/en/latest/user/advanced/#proxies ; You can use environmental variables as well: http://www.python-requests.org/en/latest/user/advanced/#proxies

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 81 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 185 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 235 KiB

After

Width:  |  Height:  |  Size: 273 KiB

View File

@ -1,245 +0,0 @@
{
"order": 0,
"template": "logstash-vulnwhisperer-*",
"settings": {
"index": {
"routing": {
"allocation": {
"total_shards_per_node": "2"
}
},
"mapping": {
"total_fields": {
"limit": "3000"
}
},
"refresh_interval": "5s",
"number_of_shards": "1",
"number_of_replicas": "1"
}
},
"mappings": {
"_default_": {
"dynamic_templates": [
{
"message_field": {
"mapping": {
"fielddata": {
"format": "disabled"
},
"index": "analyzed",
"omit_norms": true,
"type": "string"
},
"match_mapping_type": "string",
"match": "message"
}
},
{
"string_fields": {
"mapping": {
"fielddata": {
"format": "disabled"
},
"index": "analyzed",
"omit_norms": true,
"type": "string",
"fields": {
"raw": {
"ignore_above": 256,
"index": "not_analyzed",
"type": "string",
"doc_values": true
}
}
},
"match_mapping_type": "string",
"match": "*"
}
},
{
"ip_address_fields": {
"mapping": {
"type": "ip"
},
"match": "*_ip"
}
},
{
"ipv6_address_fields": {
"mapping": {
"index": "not_analyzed",
"type": "string"
},
"match": "*_ipv6"
}
},
{
"float_fields": {
"mapping": {
"type": "float",
"doc_values": true
},
"match_mapping_type": "float",
"match": "*"
}
},
{
"double_fields": {
"mapping": {
"type": "double",
"doc_values": true
},
"match_mapping_type": "double",
"match": "*"
}
},
{
"byte_fields": {
"mapping": {
"type": "byte",
"doc_values": true
},
"match_mapping_type": "byte",
"match": "*"
}
},
{
"short_fields": {
"mapping": {
"type": "short",
"doc_values": true
},
"match_mapping_type": "short",
"match": "*"
}
},
{
"integer_fields": {
"mapping": {
"type": "integer",
"doc_values": true
},
"match_mapping_type": "integer",
"match": "*"
}
},
{
"long_fields": {
"mapping": {
"type": "long",
"doc_values": true
},
"match_mapping_type": "long",
"match": "*"
}
},
{
"date_fields": {
"mapping": {
"type": "date",
"doc_values": true
},
"match_mapping_type": "date",
"match": "*"
}
},
{
"geo_point_fields": {
"mapping": {
"type": "geo_point",
"doc_values": true
},
"match_mapping_type": "geo_point",
"match": "*"
}
}
],
"_all": {
"omit_norms": true,
"enabled": true
},
"properties": {
"plugin_id": {
"type": "integer"
},
"last_updated": {
"type": "date",
"doc_values": true
},
"geoip": {
"dynamic": true,
"type": "object",
"properties": {
"ip": {
"type": "ip",
"doc_values": true
},
"latitude": {
"type": "float",
"doc_values": true
},
"location": {
"type": "geo_point",
"doc_values": true
},
"longitude": {
"type": "float",
"doc_values": true
}
}
},
"risk_score": {
"type": "float"
},
"source": {
"index": "not_analyzed",
"type": "string"
},
"synopsis": {
"index": "not_analyzed",
"type": "string"
},
"see_also": {
"index": "not_analyzed",
"type": "string"
},
"@timestamp": {
"type": "date",
"doc_values": true
},
"cve": {
"index": "not_analyzed",
"type": "string"
},
"solution": {
"index": "not_analyzed",
"type": "string"
},
"port": {
"index": "not_analyzed",
"type": "integer"
},
"host": {
"type": "string"
},
"@version": {
"index": "not_analyzed",
"type": "string",
"doc_values": true
},
"risk": {
"index": "not_analyzed",
"type": "string"
},
"assign_ip": {
"type": "ip"
},
"cvss": {
"type": "float"
}
}
}
},
"aliases": {}
}

View File

@ -0,0 +1,138 @@
{
"order": 0,
"template": "logstash-vulnwhisperer-*",
"settings": {
"index": {
"routing": {
"allocation": {
"total_shards_per_node": "2"
}
},
"mapping": {
"total_fields": {
"limit": "3000"
}
},
"refresh_interval": "5s",
"number_of_shards": "1",
"number_of_replicas": "0"
}
},
"mappings": {
"_default_": {
"_all": {
"enabled": true,
"norms": false
},
"dynamic_templates": [
{
"message_field": {
"path_match": "message",
"match_mapping_type": "string",
"mapping": {
"type": "text",
"norms": false
}
}
},
{
"string_fields": {
"match": "*",
"match_mapping_type": "string",
"mapping": {
"type": "text",
"norms": false,
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
}
}
}
],
"properties": {
"plugin_id": {
"type": "integer"
},
"last_updated": {
"type": "date",
"doc_values": true
},
"geoip": {
"dynamic": true,
"type": "object",
"properties": {
"ip": {
"type": "ip",
"doc_values": true
},
"latitude": {
"type": "float",
"doc_values": true
},
"location": {
"type": "geo_point",
"doc_values": true
},
"longitude": {
"type": "float",
"doc_values": true
}
}
},
"risk_score": {
"type": "float"
},
"source": {
"index": "not_analyzed",
"type": "string"
},
"synopsis": {
"index": "not_analyzed",
"type": "string"
},
"see_also": {
"index": "not_analyzed",
"type": "string"
},
"@timestamp": {
"type": "date",
"doc_values": true
},
"cve": {
"index": "not_analyzed",
"type": "string"
},
"solution": {
"index": "not_analyzed",
"type": "string"
},
"port": {
"index": "not_analyzed",
"type": "integer"
},
"host": {
"type": "string"
},
"@version": {
"index": "not_analyzed",
"type": "string",
"doc_values": true
},
"risk": {
"index": "not_analyzed",
"type": "string"
},
"assign_ip": {
"type": "ip"
},
"cvss": {
"type": "float"
}
}
}
},
"aliases": {}
}

View File

@ -144,7 +144,7 @@
"_type": "visualization", "_type": "visualization",
"_source": { "_source": {
"title": "VulnWhisperer - ScanName", "title": "VulnWhisperer - ScanName",
"visState": "{\"title\":\"VulnWhisperer - ScanName\",\"type\":\"table\",\"params\":{\"perPage\":10,\"showPartialRows\":false,\"showMeticsAtAllLevels\":false,\"sort\":{\"columnIndex\":null,\"direction\":null},\"showTotal\":false,\"totalFunc\":\"sum\"},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"bucket\",\"params\":{\"field\":\"plugin_name.keyword\",\"size\":20,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"Scan Name\"}}],\"listeners\":{}}", "visState": "{\"title\":\"VulnWhisperer - ScanName\",\"type\":\"table\",\"params\":{\"perPage\":10,\"showPartialRows\":false,\"showMeticsAtAllLevels\":false,\"sort\":{\"columnIndex\":null,\"direction\":null},\"showTotal\":false,\"totalFunc\":\"sum\"},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"bucket\",\"params\":{\"field\":\"scan_name.keyword\",\"size\":20,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"Scan Name\"}}],\"listeners\":{}}",
"uiStateJSON": "{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}}", "uiStateJSON": "{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}}",
"description": "", "description": "",
"version": 1, "version": 1,
@ -340,7 +340,7 @@
"_type": "visualization", "_type": "visualization",
"_source": { "_source": {
"title": "VulnWhisperer - Critical Assets", "title": "VulnWhisperer - Critical Assets",
"visState": "{\"title\":\"VulnWhisperer - Critical Assets\",\"type\":\"heatmap\",\"params\":{\"addTooltip\":true,\"addLegend\":true,\"enableHover\":true,\"legendPosition\":\"right\",\"times\":[],\"colorsNumber\":4,\"colorSchema\":\"Green to Red\",\"setColorRange\":true,\"colorsRange\":[{\"from\":0,\"to\":3},{\"from\":3,\"to\":7},{\"from\":7,\"to\":9},{\"from\":9,\"to\":11}],\"invertColors\":false,\"percentageMode\":false,\"valueAxes\":[{\"show\":false,\"id\":\"ValueAxis-1\",\"type\":\"value\",\"scale\":{\"type\":\"linear\",\"defaultYExtents\":false},\"labels\":{\"show\":true,\"rotate\":0,\"color\":\"white\"}}],\"type\":\"heatmap\"},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"max\",\"schema\":\"metric\",\"params\":{\"field\":\"risk_score\",\"customLabel\":\"Residual Risk Score\"}},{\"id\":\"2\",\"enabled\":false,\"type\":\"terms\",\"schema\":\"split\",\"params\":{\"field\":\"risk_score\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\",\"row\":true}},{\"id\":\"3\",\"enabled\":true,\"type\":\"date_histogram\",\"schema\":\"segment\",\"params\":{\"field\":\"@timestamp\",\"interval\":\"auto\",\"customInterval\":\"2h\",\"min_doc_count\":1,\"extended_bounds\":{},\"customLabel\":\"Date\"}},{\"id\":\"4\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"group\",\"params\":{\"field\":\"asset.keyword\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"Critical Asset\"}}],\"listeners\":{}}", "visState": "{\"title\":\"VulnWhisperer - Critical Assets\",\"type\":\"heatmap\",\"params\":{\"addTooltip\":true,\"addLegend\":true,\"enableHover\":true,\"legendPosition\":\"right\",\"times\":[],\"colorsNumber\":4,\"colorSchema\":\"Green to Red\",\"setColorRange\":true,\"colorsRange\":[{\"from\":0,\"to\":3},{\"from\":3,\"to\":7},{\"from\":7,\"to\":9},{\"from\":9,\"to\":11}],\"invertColors\":false,\"percentageMode\":false,\"valueAxes\":[{\"show\":false,\"id\":\"ValueAxis-1\",\"type\":\"value\",\"scale\":{\"type\":\"linear\",\"defaultYExtents\":false},\"labels\":{\"show\":false,\"rotate\":0,\"color\":\"white\"}}],\"type\":\"heatmap\"},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"max\",\"schema\":\"metric\",\"params\":{\"field\":\"risk_score\",\"customLabel\":\"Residual Risk Score\"}},{\"id\":\"2\",\"enabled\":false,\"type\":\"terms\",\"schema\":\"split\",\"params\":{\"field\":\"risk_score\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\",\"row\":true}},{\"id\":\"3\",\"enabled\":true,\"type\":\"date_histogram\",\"schema\":\"segment\",\"params\":{\"field\":\"@timestamp\",\"interval\":\"auto\",\"customInterval\":\"2h\",\"min_doc_count\":1,\"extended_bounds\":{},\"customLabel\":\"Date\"}},{\"id\":\"4\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"group\",\"params\":{\"field\":\"asset.keyword\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"Critical Asset\"}}],\"listeners\":{}}",
"uiStateJSON": "{\"vis\":{\"defaultColors\":{\"0 - 3\":\"rgb(0,104,55)\",\"3 - 7\":\"rgb(135,203,103)\",\"7 - 9\":\"rgb(255,255,190)\",\"9 - 11\":\"rgb(249,142,82)\"},\"colors\":{\"8 - 10\":\"#BF1B00\",\"9 - 11\":\"#BF1B00\",\"7 - 9\":\"#EF843C\",\"3 - 7\":\"#EAB839\",\"0 - 3\":\"#7EB26D\"},\"legendOpen\":false}}", "uiStateJSON": "{\"vis\":{\"defaultColors\":{\"0 - 3\":\"rgb(0,104,55)\",\"3 - 7\":\"rgb(135,203,103)\",\"7 - 9\":\"rgb(255,255,190)\",\"9 - 11\":\"rgb(249,142,82)\"},\"colors\":{\"8 - 10\":\"#BF1B00\",\"9 - 11\":\"#BF1B00\",\"7 - 9\":\"#EF843C\",\"3 - 7\":\"#EAB839\",\"0 - 3\":\"#7EB26D\"},\"legendOpen\":false}}",
"description": "", "description": "",
"version": 1, "version": 1,
@ -410,7 +410,7 @@
"_type": "visualization", "_type": "visualization",
"_source": { "_source": {
"title": "VulnWhisperer - Risk: Medium", "title": "VulnWhisperer - Risk: Medium",
"visState": "{\"title\":\"VulnWhisperer - Risk: Medium\",\"type\":\"goal\",\"params\":{\"addTooltip\":true,\"addLegend\":true,\"type\":\"gauge\",\"gauge\":{\"verticalSplit\":false,\"autoExtend\":false,\"percentageMode\":false,\"gaugeType\":\"Metric\",\"gaugeStyle\":\"Full\",\"backStyle\":\"Full\",\"orientation\":\"vertical\",\"useRanges\":false,\"colorSchema\":\"Green to Red\",\"gaugeColorMode\":\"Background\",\"colorsRange\":[{\"from\":0,\"to\":10000}],\"invertColors\":false,\"labels\":{\"show\":false,\"color\":\"black\"},\"scale\":{\"show\":true,\"labels\":false,\"color\":\"#333\",\"width\":2},\"type\":\"simple\",\"style\":{\"bgFill\":\"white\",\"bgColor\":true,\"labelColor\":false,\"subText\":\"\",\"fontSize\":\"34\"},\"extendRange\":false}},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{\"customLabel\":\"Medium Risk\"}},{\"id\":\"2\",\"enabled\":true,\"type\":\"filters\",\"schema\":\"group\",\"params\":{\"filters\":[{\"input\":{\"query\":{\"query_string\":{\"query\":\"risk_score:medium\"}}},\"label\":\"Medium Risk\"}]}}],\"listeners\":{}}", "visState": "{\"title\":\"VulnWhisperer - Risk: Medium\",\"type\":\"goal\",\"params\":{\"addTooltip\":true,\"addLegend\":true,\"type\":\"gauge\",\"gauge\":{\"verticalSplit\":false,\"autoExtend\":false,\"percentageMode\":false,\"gaugeType\":\"Metric\",\"gaugeStyle\":\"Full\",\"backStyle\":\"Full\",\"orientation\":\"vertical\",\"useRanges\":false,\"colorSchema\":\"Green to Red\",\"gaugeColorMode\":\"Background\",\"colorsRange\":[{\"from\":0,\"to\":10000}],\"invertColors\":false,\"labels\":{\"show\":false,\"color\":\"black\"},\"scale\":{\"show\":true,\"labels\":false,\"color\":\"#333\",\"width\":2},\"type\":\"simple\",\"style\":{\"bgFill\":\"white\",\"bgColor\":true,\"labelColor\":false,\"subText\":\"\",\"fontSize\":\"34\"},\"extendRange\":false},\"isDisplayWarning\":false},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{\"customLabel\":\"Medium Risk\"}},{\"id\":\"2\",\"enabled\":true,\"type\":\"filters\",\"schema\":\"group\",\"params\":{\"filters\":[{\"input\":{\"query\":{\"query_string\":{\"query\":\"risk_score_name:medium\"}}},\"label\":\"Medium Risk\"}]}}],\"listeners\":{}}",
"uiStateJSON": "{\"vis\":{\"defaultColors\":{\"0 - 10000\":\"rgb(0,104,55)\"},\"legendOpen\":true,\"colors\":{\"0 - 10000\":\"#EAB839\"}}}", "uiStateJSON": "{\"vis\":{\"defaultColors\":{\"0 - 10000\":\"rgb(0,104,55)\"},\"legendOpen\":true,\"colors\":{\"0 - 10000\":\"#EAB839\"}}}",
"description": "", "description": "",
"version": 1, "version": 1,

View File

@ -19,7 +19,7 @@
"value": 0 "value": 0
}, },
"kibanaSavedObjectMeta": { "kibanaSavedObjectMeta": {
"searchSourceJSON": "{\"filter\":[{\"query\":{\"query_string\":{\"analyze_wildcard\":true,\"query\":\"-vulnerability_category:\\\"INFORMATION_GATHERED\\\"\"}}}],\"highlightAll\":true,\"version\":true}" "searchSourceJSON": "{\"filter\":[{\"query\":{\"match_all\":{}}}],\"highlightAll\":true,\"version\":true}"
} }
} }
}, },
@ -34,16 +34,9 @@
"optionsJSON": "{\"darkTheme\":false}", "optionsJSON": "{\"darkTheme\":false}",
"uiStateJSON": "{\"P-11\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}},\"P-2\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}},\"P-20\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}},\"P-21\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":0,\"direction\":\"desc\"}}}},\"P-27\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}},\"P-28\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":0,\"direction\":\"desc\"}}}},\"P-3\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":0,\"direction\":\"asc\"}}}},\"P-30\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}},\"P-31\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}},\"P-40\":{\"vis\":{\"defaultColors\":{\"0 - 3\":\"rgb(0,104,55)\",\"3 - 7\":\"rgb(135,203,103)\",\"7 - 9\":\"rgb(255,255,190)\",\"9 - 11\":\"rgb(249,142,82)\"}}},\"P-41\":{\"vis\":{\"defaultColors\":{\"0 - 10000\":\"rgb(0,104,55)\"}}},\"P-42\":{\"vis\":{\"defaultColors\":{\"0 - 10000\":\"rgb(0,104,55)\"}}},\"P-43\":{\"vis\":{\"defaultColors\":{\"0 - 1000\":\"rgb(0,104,55)\"}}},\"P-44\":{\"vis\":{\"defaultColors\":{\"0 - 10000\":\"rgb(0,104,55)\"}}},\"P-45\":{\"vis\":{\"defaultColors\":{\"0 - 10000\":\"rgb(0,104,55)\"}}},\"P-46\":{\"vis\":{\"legendOpen\":true}},\"P-47\":{\"vis\":{\"defaultColors\":{\"0 - 10000\":\"rgb(0,104,55)\"},\"legendOpen\":false}},\"P-48\":{\"vis\":{\"defaultColors\":{\"0 - 10000\":\"rgb(0,104,55)\"},\"legendOpen\":false}},\"P-49\":{\"vis\":{\"defaultColors\":{\"0 - 1000\":\"rgb(0,104,55)\"},\"legendOpen\":false}},\"P-5\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}},\"P-50\":{\"vis\":{\"defaultColors\":{\"0 - 10000\":\"rgb(0,104,55)\"}}},\"P-51\":{\"vis\":{\"defaultColors\":{\"0 - 10000\":\"rgb(0,104,55)\"}}},\"P-6\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}},\"P-8\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}}}", "uiStateJSON": "{\"P-11\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}},\"P-2\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}},\"P-20\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}},\"P-21\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":0,\"direction\":\"desc\"}}}},\"P-27\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}},\"P-28\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":0,\"direction\":\"desc\"}}}},\"P-3\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":0,\"direction\":\"asc\"}}}},\"P-30\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}},\"P-31\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}},\"P-40\":{\"vis\":{\"defaultColors\":{\"0 - 3\":\"rgb(0,104,55)\",\"3 - 7\":\"rgb(135,203,103)\",\"7 - 9\":\"rgb(255,255,190)\",\"9 - 11\":\"rgb(249,142,82)\"}}},\"P-41\":{\"vis\":{\"defaultColors\":{\"0 - 10000\":\"rgb(0,104,55)\"}}},\"P-42\":{\"vis\":{\"defaultColors\":{\"0 - 10000\":\"rgb(0,104,55)\"}}},\"P-43\":{\"vis\":{\"defaultColors\":{\"0 - 1000\":\"rgb(0,104,55)\"}}},\"P-44\":{\"vis\":{\"defaultColors\":{\"0 - 10000\":\"rgb(0,104,55)\"}}},\"P-45\":{\"vis\":{\"defaultColors\":{\"0 - 10000\":\"rgb(0,104,55)\"}}},\"P-46\":{\"vis\":{\"legendOpen\":true}},\"P-47\":{\"vis\":{\"defaultColors\":{\"0 - 10000\":\"rgb(0,104,55)\"},\"legendOpen\":false}},\"P-48\":{\"vis\":{\"defaultColors\":{\"0 - 10000\":\"rgb(0,104,55)\"},\"legendOpen\":false}},\"P-49\":{\"vis\":{\"defaultColors\":{\"0 - 1000\":\"rgb(0,104,55)\"},\"legendOpen\":false}},\"P-5\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}},\"P-50\":{\"vis\":{\"defaultColors\":{\"0 - 10000\":\"rgb(0,104,55)\"}}},\"P-51\":{\"vis\":{\"defaultColors\":{\"0 - 10000\":\"rgb(0,104,55)\"}}},\"P-6\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}},\"P-8\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}}}",
"version": 1, "version": 1,
"timeRestore": true, "timeRestore": false,
"timeTo": "now",
"timeFrom": "now-30d",
"refreshInterval": {
"display": "Off",
"pause": false,
"value": 0
},
"kibanaSavedObjectMeta": { "kibanaSavedObjectMeta": {
"searchSourceJSON": "{\"filter\":[{\"query\":{\"query_string\":{\"analyze_wildcard\":true,\"query\":\"-vulnerability_category:\\\"INFORMATION_GATHERED\\\"\"}}}],\"highlightAll\":true,\"version\":true}" "searchSourceJSON": "{\"filter\":[{\"query\":{\"match_all\":{}}}],\"highlightAll\":true,\"version\":true}"
} }
} }
} }

View File

@ -4,9 +4,10 @@
# Version 0.3 # Version 0.3
# Description: Take in nessus reports from vulnWhisperer and pumps into logstash # Description: Take in nessus reports from vulnWhisperer and pumps into logstash
input { input {
file { file {
path => "/opt/vulnwhisp/scans/**/*" path => "/opt/vulnwhisperer/nessus/**/*"
start_position => "beginning" start_position => "beginning"
tags => "nessus" tags => "nessus"
type => "nessus" type => "nessus"
@ -15,6 +16,9 @@ input {
filter { filter {
if "nessus" in [tags]{ if "nessus" in [tags]{
# Drop the header column
if [message] =~ "^Plugin ID" { drop {} }
mutate { mutate {
gsub => [ gsub => [
"message", "\|\|\|", " ", "message", "\|\|\|", " ",
@ -26,11 +30,12 @@ filter {
} }
csv { csv {
columns => ["plugin_id", "cve", "cvss", "risk", "host", "protocol", "port", "plugin_name", "synopsis", "description", "solution", "see_also", "plugin_output"] columns => ["plugin_id", "cve", "cvss", "risk", "asset", "protocol", "port", "plugin_name", "synopsis", "description", "solution", "see_also", "plugin_output"]
separator => "," separator => ","
source => "message" source => "message"
} }
#If using filebeats as your source, you will need to replace the "path" field to "source"
grok { grok {
match => { "path" => "(?<scan_name>[a-zA-Z0-9_.\-]+)_%{INT:scan_id}_%{INT:history_id}_%{INT:last_updated}.csv$" } match => { "path" => "(?<scan_name>[a-zA-Z0-9_.\-]+)_%{INT:scan_id}_%{INT:history_id}_%{INT:last_updated}.csv$" }
tag_on_failure => [] tag_on_failure => []
@ -61,6 +66,9 @@ filter {
if [cve] == "nan" { if [cve] == "nan" {
mutate { remove_field => [ "cve" ] } mutate { remove_field => [ "cve" ] }
} }
if [cvss] == "nan" {
mutate { remove_field => [ "cvss" ] }
}
if [see_also] == "nan" { if [see_also] == "nan" {
mutate { remove_field => [ "see_also" ] } mutate { remove_field => [ "see_also" ] }
} }
@ -81,6 +89,31 @@ filter {
mutate { mutate {
convert => { "risk_score" => "float" } convert => { "risk_score" => "float" }
} }
if [risk_score] == 0 {
mutate {
add_field => { "risk_score_name" => "info" }
}
}
if [risk_score] > 0 and [risk_score] < 3 {
mutate {
add_field => { "risk_score_name" => "low" }
}
}
if [risk_score] >= 3 and [risk_score] < 6 {
mutate {
add_field => { "risk_score_name" => "medium" }
}
}
if [risk_score] >=6 and [risk_score] < 9 {
mutate {
add_field => { "risk_score_name" => "high" }
}
}
if [risk_score] >= 9 {
mutate {
add_field => { "risk_score_name" => "critical" }
}
}
# Compensating controls - adjust risk_score # Compensating controls - adjust risk_score
# Adobe and Java are not allowed to run in browser unless whitelisted # Adobe and Java are not allowed to run in browser unless whitelisted
@ -100,27 +133,27 @@ filter {
# Add tags for reporting based on assets or criticality # Add tags for reporting based on assets or criticality
#if [host] == "192.168.0.1" or [host] == "192.168.0.50" or [host] =~ "^192\.168\.10\." or [host] =~ "^42.42.42." { if [asset] == "dc01" or [asset] == "dc02" or [asset] == "pki01" or [asset] == "192.168.0.54" or [asset] =~ "^192\.168\.0\." or [asset] =~ "^42.42.42." {
# mutate { mutate {
# add_tag => [ "critical_asset" ] add_tag => [ "critical_asset" ]
# } }
#} }
#if [host] =~ "^192\.168\.[45][0-9][0-9]\.1$" or [host] =~ "^192.168\.[50]\.[0-9]{1,2}\.1$"{ #if [asset] =~ "^192\.168\.[45][0-9][0-9]\.1$" or [asset] =~ "^192.168\.[50]\.[0-9]{1,2}\.1$"{
# mutate { # mutate {
# add_tag => [ "has_hipaa_data" ] # add_tag => [ "has_hipaa_data" ]
# } # }
#} #}
#if [host] =~ "^192\.168\.[45][0-9][0-9]\." { #if [asset] =~ "^192\.168\.[45][0-9][0-9]\." {
# mutate { # mutate {
# add_tag => [ "hipaa_asset" ] # add_tag => [ "hipaa_asset" ]
# } # }
#} #}
#if [host] =~ "^192\.168\.5\." { if [asset] =~ "^hr" {
# mutate { mutate {
# add_tag => [ "pci_asset" ] add_tag => [ "pci_asset" ]
# } }
#} }
#if [host] =~ "^10\.0\.50\." { #if [asset] =~ "^10\.0\.50\." {
# mutate { # mutate {
# add_tag => [ "web_servers" ] # add_tag => [ "web_servers" ]
# } # }
@ -133,7 +166,7 @@ output {
#stdout { codec => rubydebug } #stdout { codec => rubydebug }
elasticsearch { elasticsearch {
hosts => [ "localhost:9200" ] hosts => [ "localhost:9200" ]
index => "logstash-nessus-%{+YYYY.MM}" index => "logstash-vulnwhisperer-%{+YYYY.MM}"
} }
} }
} }

View File

@ -6,7 +6,7 @@
input { input {
file { file {
path => "/opt/vulnwhisperer/scans/**/*.json" path => "/opt/vulnwhisperer/qualys/scans/**/*.json"
type => json type => json
codec => json codec => json
start_position => "beginning" start_position => "beginning"

146
logstash/3000_openvas.conf Normal file
View File

@ -0,0 +1,146 @@
# Author: Austin Taylor and Justin Henderson
# Email: austin@hasecuritysolutions.com
# Last Update: 03/04/2018
# Version 0.3
# Description: Take in qualys web scan reports from vulnWhisperer and pumps into logstash
input {
file {
path => "/opt/vulnwhisperer/openvas/*.json"
type => json
codec => json
start_position => "beginning"
tags => [ "openvas_scan", "openvas" ]
}
}
filter {
if "openvas_scan" in [tags] {
mutate {
replace => [ "message", "%{message}" ]
gsub => [
"message", "\|\|\|", " ",
"message", "\t\t", " ",
"message", " ", " ",
"message", " ", " ",
"message", " ", " ",
"message", "nan", " ",
"message",'\n',''
]
}
grok {
match => { "path" => "openvas_scan_%{DATA:scan_id}_%{INT:last_updated}.json$" }
tag_on_failure => []
}
mutate {
add_field => { "risk_score" => "%{cvss}" }
}
if [risk] == "1" {
mutate { add_field => { "risk_number" => 0 }}
mutate { replace => { "risk" => "info" }}
}
if [risk] == "2" {
mutate { add_field => { "risk_number" => 1 }}
mutate { replace => { "risk" => "low" }}
}
if [risk] == "3" {
mutate { add_field => { "risk_number" => 2 }}
mutate { replace => { "risk" => "medium" }}
}
if [risk] == "4" {
mutate { add_field => { "risk_number" => 3 }}
mutate { replace => { "risk" => "high" }}
}
if [risk] == "5" {
mutate { add_field => { "risk_number" => 4 }}
mutate { replace => { "risk" => "critical" }}
}
mutate {
remove_field => "message"
}
if [first_time_detected] {
date {
match => [ "first_time_detected", "dd MMM yyyy HH:mma 'GMT'ZZ", "dd MMM yyyy HH:mma 'GMT'" ]
target => "first_time_detected"
}
}
if [first_time_tested] {
date {
match => [ "first_time_tested", "dd MMM yyyy HH:mma 'GMT'ZZ", "dd MMM yyyy HH:mma 'GMT'" ]
target => "first_time_tested"
}
}
if [last_time_detected] {
date {
match => [ "last_time_detected", "dd MMM yyyy HH:mma 'GMT'ZZ", "dd MMM yyyy HH:mma 'GMT'" ]
target => "last_time_detected"
}
}
if [last_time_tested] {
date {
match => [ "last_time_tested", "dd MMM yyyy HH:mma 'GMT'ZZ", "dd MMM yyyy HH:mma 'GMT'" ]
target => "last_time_tested"
}
}
date {
match => [ "last_updated", "UNIX" ]
target => "@timestamp"
remove_field => "last_updated"
}
mutate {
convert => { "plugin_id" => "integer"}
convert => { "id" => "integer"}
convert => { "risk_number" => "integer"}
convert => { "risk_score" => "float"}
convert => { "total_times_detected" => "integer"}
convert => { "cvss_temporal" => "float"}
convert => { "cvss" => "float"}
}
if [risk_score] == 0 {
mutate {
add_field => { "risk_score_name" => "info" }
}
}
if [risk_score] > 0 and [risk_score] < 3 {
mutate {
add_field => { "risk_score_name" => "low" }
}
}
if [risk_score] >= 3 and [risk_score] < 6 {
mutate {
add_field => { "risk_score_name" => "medium" }
}
}
if [risk_score] >=6 and [risk_score] < 9 {
mutate {
add_field => { "risk_score_name" => "high" }
}
}
if [risk_score] >= 9 {
mutate {
add_field => { "risk_score_name" => "critical" }
}
}
# Add your critical assets by subnet or by hostname. Comment this field out if you don't want to tag any, but the asset panel will break.
if [asset] =~ "^10\.0\.100\." {
mutate {
add_tag => [ "critical_asset" ]
}
}
}
}
output {
if "openvas" in [tags] {
stdout { codec => rubydebug }
elasticsearch {
hosts => [ "localhost:9200" ]
index => "logstash-vulnwhisperer-%{+YYYY.MM}"
}
}
}

View File

@ -1,14 +0,0 @@
# Author: Austin Taylor
# Email: email@austintaylor.io
# Last Update: 05/21/2017
# Creates logstash-nessus
output {
if "nessus" in [tags] or [type] == "nessus" {
#stdout { codec => rubydebug }
elasticsearch {
hosts => "localhost:9200"
index => "logstash-nessus-%{+YYYY.MM}"
}
}
}

View File

@ -4,3 +4,4 @@ pytz==2017.2
Requests==2.18.3 Requests==2.18.3
qualysapi==4.1.0 qualysapi==4.1.0
lxml==4.1.1 lxml==4.1.1
bs4

View File

@ -4,7 +4,7 @@ from setuptools import setup, find_packages
setup( setup(
name='VulnWhisperer', name='VulnWhisperer',
version='1.2.0', version='1.5.0',
packages=find_packages(), packages=find_packages(),
url='https://github.com/austin-taylor/vulnwhisperer', url='https://github.com/austin-taylor/vulnwhisperer',
license="""MIT License license="""MIT License

View File

@ -0,0 +1,196 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
__author__ = 'Austin Taylor'
import datetime as dt
import io
import pandas as pd
import requests
from bs4 import BeautifulSoup
from requests.packages.urllib3.exceptions import InsecureRequestWarning
from ..utils.cli import bcolors
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
class OpenVAS_API(object):
OMP = '/omp'
def __init__(self,
hostname=None,
port=None,
username=None,
password=None,
report_format_id=None,
verbose=True):
if username is None or password is None:
raise Exception('ERROR: Missing username or password.')
self.username = username
self.password = password
self.base = 'https://{hostname}:{port}'.format(hostname=hostname, port=port)
self.verbose = verbose
self.processed_reports = 0
self.report_format_id = report_format_id
self.headers = {
'Origin': self.base,
'Accept-Encoding': 'gzip, deflate, br',
'Accept-Language': 'en-US,en;q=0.8',
'User-Agent': 'VulnWhisperer for OpenVAS',
'Content-Type': 'application/x-www-form-urlencoded',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
'Cache-Control': 'max-age=0',
'Referer': self.base,
'X-Requested-With': 'XMLHttpRequest',
'Connection': 'keep-alive',
}
self.login()
self.openvas_reports = self.get_reports()
self.report_formats = self.get_report_formats()
def vprint(self, msg):
if self.verbose:
print(msg)
def login(self):
resp = self.get_token()
if resp.status_code is 200:
xml_response = BeautifulSoup(resp.content, 'lxml')
self.token = xml_response.find(attrs={'id': 'gsa-token'}).text
self.cookies = resp.cookies.get_dict()
else:
raise Exception('[FAIL] Could not login to OpenVAS')
def request(self, url, data=None, params=None, headers=None, cookies=None, method='POST', download=False,
json=False):
if headers is None:
headers = self.headers
if cookies is None:
cookies = self.cookies
timeout = 0
success = False
url = self.base + url
methods = {'GET': requests.get,
'POST': requests.post,
'DELETE': requests.delete}
while (timeout <= 10) and (not success):
data = methods[method](url,
data=data,
headers=self.headers,
params=params,
cookies=cookies,
verify=False)
if data.status_code == 401:
try:
self.login()
timeout += 1
self.vprint('[INFO] Token refreshed')
except Exception as e:
self.vprint('[FAIL] Could not refresh token\nReason: %s' % e)
else:
success = True
if json:
data = data.json()
if download:
return data.content
return data
def get_token(self):
data = [
('cmd', 'login'),
('text', '/omp?r=1'),
('login', self.username),
('password', self.password),
]
token = requests.post(self.base + self.OMP, data=data, verify=False)
return token
def get_report_formats(self):
params = (
('cmd', 'get_report_formats'),
('token', self.token)
)
self.vprint('{info} Retrieving available report formats'.format(info=bcolors.INFO))
data = self.request(url=self.OMP, method='GET', params=params)
bs = BeautifulSoup(data.content, "lxml")
table_body = bs.find('tbody')
rows = table_body.find_all('tr')
format_mapping = {}
for row in rows:
cols = row.find_all('td')
for x in cols:
for y in x.find_all('a'):
if y.get_text() != '':
format_mapping[y.get_text()] = \
[h.split('=')[1] for h in y['href'].split('&') if 'report_format_id' in h][0]
return format_mapping
def get_reports(self, complete=True):
print('{info} Retreiving OpenVAS report data...'.format(info=bcolors.INFO))
params = (('cmd', 'get_reports'),
('token', self.token),
('max_results', 1),
('ignore_pagination', 1),
('filter', 'apply_overrides=1 min_qod=70 autofp=0 first=1 rows=0 levels=hml sort-reverse=severity'),
)
reports = self.request(self.OMP, params=params, method='GET')
soup = BeautifulSoup(reports.text, 'lxml')
data = []
links = []
table = soup.find('table', attrs={'class': 'gbntable'})
table_body = table.find('tbody')
rows = table_body.find_all('tr')
for row in rows:
cols = row.find_all('td')
links.extend([a['href'] for a in row.find_all('a', href=True) if 'get_report' in str(a)])
cols = [ele.text.strip() for ele in cols]
data.append([ele for ele in cols if ele])
report = pd.DataFrame(data, columns=['date', 'status', 'task', 'scan_severity', 'high', 'medium', 'low', 'log',
'false_pos'])
if report.shape[0] != 0:
report['links'] = links
report['report_ids'] = report.links.str.extract('.*report_id=([a-z-0-9]*)', expand=False)
report['epoch'] = (pd.to_datetime(report['date']) - dt.datetime(1970, 1, 1)).dt.total_seconds().astype(int)
else:
raise Exception("Could not retrieve OpenVAS Reports - Please check your settings and try again")
report['links'] = links
report['report_ids'] = report.links.str.extract('.*report_id=([a-z-0-9]*)', expand=False)
report['epoch'] = (pd.to_datetime(report['date']) - dt.datetime(1970, 1, 1)).dt.total_seconds().astype(int)
if complete:
report = report[report.status == 'Done']
severity_extraction = report.scan_severity.str.extract('([0-9.]*) \(([\w]+)\)', expand=False)
severity_extraction.columns = ['scan_highest_severity', 'severity_rate']
report_with_severity = pd.concat([report, severity_extraction], axis=1)
return report_with_severity
def process_report(self, report_id):
params = (
('token', self.token),
('cmd', 'get_report'),
('report_id', report_id),
('filter', 'apply_overrides=0 min_qod=70 autofp=0 levels=hml first=1 rows=0 sort-reverse=severity'),
('ignore_pagination', '1'),
('report_format_id', '{report_format_id}'.format(report_format_id=self.report_formats['CSV Results'])),
('submit', 'Download'),
)
print('Retrieving %s' % report_id)
req = self.request(self.OMP, params=params, method='GET')
report_df = pd.read_csv(io.BytesIO(req.text.encode('utf-8')))
report_df['report_ids'] = report_id
self.processed_reports += 1
merged_df = pd.merge(report_df, self.openvas_reports, on='report_ids').reset_index().drop('index', axis=1)
return merged_df

View File

@ -25,7 +25,6 @@ class qualysWhisperAPI(object):
DELETE_REPORT = '/delete/was/report/{report_id}' DELETE_REPORT = '/delete/was/report/{report_id}'
GET_WEBAPP_DETAILS = '/get/was/webapp/{was_id}' GET_WEBAPP_DETAILS = '/get/was/webapp/{was_id}'
QPS_REST_3 = '/qps/rest/3.0' QPS_REST_3 = '/qps/rest/3.0'
REPORT_DETAILS = '/get/was/report/{report_id}' REPORT_DETAILS = '/get/was/report/{report_id}'
REPORT_STATUS = '/status/was/report/{report_id}' REPORT_STATUS = '/status/was/report/{report_id}'
REPORT_CREATE = '/create/was/report' REPORT_CREATE = '/create/was/report'
@ -825,7 +824,12 @@ class qualysScanReport:
return vuln_ready return vuln_ready
maxInt = sys.maxsize maxInt = int(4000000)
maxSize = sys.maxsize
if maxSize > maxInt and type(maxSize) == int:
maxInt = maxSize
decrement = True decrement = True
while decrement: while decrement:

View File

@ -5,6 +5,7 @@ __author__ = 'Austin Taylor'
from base.config import vwConfig from base.config import vwConfig
from frameworks.nessus import NessusAPI from frameworks.nessus import NessusAPI
from frameworks.qualys import qualysScanReport from frameworks.qualys import qualysScanReport
from frameworks.openvas import OpenVAS_API
from utils.cli import bcolors from utils.cli import bcolors
import pandas as pd import pandas as pd
from lxml import objectify from lxml import objectify
@ -44,6 +45,7 @@ class vulnWhispererBase(object):
self.purge = purge self.purge = purge
self.develop = develop self.develop = develop
if config is not None: if config is not None:
self.config = vwConfig(config_in=config) self.config = vwConfig(config_in=config)
self.enabled = self.config.get(self.CONFIG_SECTION, 'enabled') self.enabled = self.config.get(self.CONFIG_SECTION, 'enabled')
@ -64,6 +66,14 @@ class vulnWhispererBase(object):
self.database = \ self.database = \
os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.abspath(os.path.join(os.path.dirname(__file__),
'database', db_name)) 'database', db_name))
if not os.path.exists(self.db_path):
os.makedirs(self.db_path)
self.vprint('{info} Creating directory {dir}'.format(info=bcolors.INFO, dir=self.db_path))
if not os.path.exists(self.database):
with open(self.database, 'w'):
self.vprint('{info} Creating file {dir}'.format(info=bcolors.INFO, dir=self.database))
pass
try: try:
self.conn = sqlite3.connect(self.database) self.conn = sqlite3.connect(self.database)
@ -152,6 +162,18 @@ class vulnWhispererBase(object):
results = [] results = []
return results return results
def directory_check(self):
if not os.path.exists(self.write_path):
os.makedirs(self.write_path)
self.vprint('{info} Directory created at {scan} - Skipping creation'.format(
scan=self.write_path, info=bcolors.INFO))
else:
os.path.exists(self.write_path)
self.vprint('{info} Directory already exist for {scan} - Skipping creation'.format(
scan=self.write_path, info=bcolors.INFO))
class vulnWhispererNessus(vulnWhispererBase): class vulnWhispererNessus(vulnWhispererBase):
CONFIG_SECTION = 'nessus' CONFIG_SECTION = 'nessus'
@ -461,24 +483,13 @@ class vulnWhispererQualys(vulnWhispererBase):
password=None, password=None,
): ):
super(vulnWhispererQualys, self).__init__(config=config, ) super(vulnWhispererQualys, self).__init__(config=config)
self.qualys_scan = qualysScanReport(config=config) self.qualys_scan = qualysScanReport(config=config)
self.latest_scans = self.qualys_scan.qw.get_all_scans() self.latest_scans = self.qualys_scan.qw.get_all_scans()
self.directory_check() self.directory_check()
self.scans_to_process = None self.scans_to_process = None
def directory_check(self):
if not os.path.exists(self.write_path):
os.makedirs(self.write_path)
self.vprint('{info} Directory created at {scan} - Skipping creation'.format(
scan=self.write_path, info=bcolors.INFO))
else:
os.path.exists(self.write_path)
self.vprint('{info} Directory already exist for {scan} - Skipping creation'.format(
scan=self.write_path, info=bcolors.INFO))
def whisper_reports(self, def whisper_reports(self,
report_id=None, report_id=None,
launched_date=None, launched_date=None,
@ -531,7 +542,8 @@ class vulnWhispererQualys(vulnWhispererBase):
vuln_ready = self.qualys_scan.process_data(path=self.write_path, file_id=str(generated_report_id)) vuln_ready = self.qualys_scan.process_data(path=self.write_path, file_id=str(generated_report_id))
vuln_ready.to_csv(relative_path_name, index=False, header=True) # add when timestamp occured vuln_ready['scan_name'] = scan_name
vuln_ready['scan_reference'] = scan_reference
vuln_ready.rename(columns=self.COLUMN_MAPPING, inplace=True) vuln_ready.rename(columns=self.COLUMN_MAPPING, inplace=True)
record_meta = ( record_meta = (
@ -600,6 +612,139 @@ class vulnWhispererQualys(vulnWhispererBase):
exit(0) exit(0)
class vulnWhispererOpenVAS(vulnWhispererBase):
CONFIG_SECTION = 'openvas'
COLUMN_MAPPING = {'IP': 'asset',
'Hostname': 'hostname',
'Port': 'port',
'Port Protocol': 'protocol',
'CVSS': 'cvss',
'Severity': 'severity',
'Solution Type': 'category',
'NVT Name': 'plugin_name',
'Summary': 'synopsis',
'Specific Result': 'plugin_output',
'NVT OID': 'nvt_oid',
'Task ID': 'task_id',
'Task Name': 'task_name',
'Timestamp': 'timestamp',
'Result ID': 'result_id',
'Impact': 'description',
'Solution': 'solution',
'Affected Software/OS': 'affected_software',
'Vulnerability Insight': 'vulnerability_insight',
'Vulnerability Detection Method': 'vulnerability_detection_method',
'Product Detection Result': 'product_detection_result',
'BIDs': 'bids',
'CERTs': 'certs',
'Other References': 'see_also'
}
def __init__(
self,
config=None,
db_name='report_tracker.db',
purge=False,
verbose=None,
debug=False,
username=None,
password=None,
):
super(vulnWhispererOpenVAS, self).__init__(config=config)
self.port = int(self.config.get(self.CONFIG_SECTION, 'port'))
self.develop = True
self.purge = purge
self.scans_to_process = None
self.openvas_api = OpenVAS_API(hostname=self.hostname,
port=self.port,
username=self.username,
password=self.password)
def whisper_reports(self, output_format='json', launched_date=None, report_id=None, cleanup=True):
report = None
if report_id:
print('Processing report ID: %s' % report_id)
scan_name = report_id.replace('-', '')
report_name = 'openvas_scan_{scan_name}_{last_updated}.{extension}'.format(scan_name=scan_name,
last_updated=launched_date,
extension=output_format)
relative_path_name = self.path_check(report_name)
scan_reference = report_id
print relative_path_name
if os.path.isfile(relative_path_name):
# TODO Possibly make this optional to sync directories
file_length = len(open(relative_path_name).readlines())
record_meta = (
scan_name,
scan_reference,
launched_date,
report_name,
time.time(),
file_length,
self.CONFIG_SECTION,
report_id,
1,
)
self.record_insert(record_meta)
self.vprint('{info} File {filename} already exist! Updating database'.format(info=bcolors.INFO,
filename=relative_path_name))
record_meta = (
scan_name,
scan_reference,
launched_date,
report_name,
time.time(),
file_length,
self.CONFIG_SECTION,
report_id,
1,
)
else:
vuln_ready = self.openvas_api.process_report(report_id=report_id)
vuln_ready['scan_name'] = scan_name
vuln_ready['scan_reference'] = report_id
vuln_ready.rename(columns=self.COLUMN_MAPPING, inplace=True)
vuln_ready.port = vuln_ready.port.fillna(0).astype(int)
vuln_ready.fillna('', inplace=True)
if output_format == 'json':
with open(relative_path_name, 'w') as f:
f.write(vuln_ready.to_json(orient='records', lines=True))
print('{success} - Report written to %s'.format(success=bcolors.SUCCESS) \
% report_name)
return report
def identify_scans_to_process(self):
if self.uuids:
self.scans_to_process = self.openvas_api.openvas_reports[
~self.openvas_api.openvas_reports.report_ids.isin(self.uuids)]
else:
self.scans_to_process = self.openvas_api.openvas_reports
self.vprint('{info} Identified {new} scans to be processed'.format(info=bcolors.INFO,
new=len(self.scans_to_process)))
def process_openvas_scans(self):
counter = 0
self.identify_scans_to_process()
if self.scans_to_process.shape[0]:
for scan in self.scans_to_process.iterrows():
counter += 1
info = scan[1]
print(
'[INFO] Processing %s/%s - Report ID: %s' % (counter, len(self.scans_to_process), info['report_ids']))
self.whisper_reports(report_id=info['report_ids'],
launched_date=info['epoch'])
self.vprint('{info} Processing complete!'.format(info=bcolors.INFO))
else:
self.vprint('{info} No new scans to process. Exiting...'.format(info=bcolors.INFO))
self.conn.close()
exit(0)
@ -631,3 +776,7 @@ class vulnWhisperer(object):
elif self.profile == 'qualys': elif self.profile == 'qualys':
vw = vulnWhispererQualys(config=self.config) vw = vulnWhispererQualys(config=self.config)
vw.process_web_assets() vw.process_web_assets()
elif self.profile == 'openvas':
vw_openvas = vulnWhispererOpenVAS(config=self.config)
vw_openvas.process_openvas_scans()