Compare commits
166 Commits
Author | SHA1 | Date | |
---|---|---|---|
7f2c59f531 | |||
3ac9a8156a | |||
9a08acb2d6 | |||
38d2eec065 | |||
9b10711d34 | |||
9049b1ff0f | |||
d1d679b12f | |||
8ca1c3540d | |||
e4e9ed7f28 | |||
0982e26197 | |||
9fc9af37f7 | |||
3984c879cd | |||
f83a5d89a3 | |||
1400cacfcb | |||
6f96536145 | |||
4a60306bdd | |||
d509c03d68 | |||
f6745b00fd | |||
21b2a03b36 | |||
a658b7abab | |||
f21d3a3f64 | |||
53b0b27cb2 | |||
d8e813ff5a | |||
a0de072394 | |||
13dbc79b27 | |||
42e72c36dd | |||
554b739146 | |||
54337d3bfa | |||
8b63aa4fbc | |||
5362d6f9e8 | |||
645e5707a4 | |||
03a2125dd1 | |||
8e85eb0981 | |||
136cc3ac61 | |||
0c6611711c | |||
f3eb2fbda1 | |||
124cbf2753 | |||
13a01fbfd0 | |||
bbfe7ad71b | |||
330e90c7a0 | |||
f9af977145 | |||
1a2091ac54 | |||
b2c230f43b | |||
cdaf743435 | |||
59b688a117 | |||
009ccc24f6 | |||
3141dcabd2 | |||
02afd9c24d | |||
d70238fbeb | |||
36b028a78a | |||
16b04d7763 | |||
4ea72650df | |||
a1b9ff6273 | |||
bbad599a73 | |||
882a4be275 | |||
2bf8c2be8b | |||
2b057f290b | |||
4359478e3d | |||
ff50354bf9 | |||
0ab53890ca | |||
ee6d61605b | |||
ada256cc46 | |||
8215f4e938 | |||
30f966f354 | |||
8af1ddd9e9 | |||
c3850247c9 | |||
745e4b3a0b | |||
c80383aaa6 | |||
e128d8c753 | |||
66987810df | |||
6e500a4829 | |||
92d6a7788c | |||
6ce3a254e4 | |||
9fe048fc5f | |||
67f9017f92 | |||
03d7954da9 | |||
ff02340e32 | |||
45f8ea55d3 | |||
05608b29bb | |||
4d6ad51b50 | |||
b953e1d97b | |||
8f536ed2ac | |||
c5115fba00 | |||
ce529dd4f9 | |||
3d34916e4c | |||
690841c4df | |||
5f3b02aa10 | |||
646a5f94ba | |||
c33fbb256a | |||
a2a15094b4 | |||
2bd32fd9dc | |||
5c7137a606 | |||
78d9a077f5 | |||
732237ad5a | |||
4a78387ce6 | |||
64751c47dd | |||
bec9cdd4d0 | |||
d7fc63c952 | |||
de62400730 | |||
0ba3cdf579 | |||
e03860d087 | |||
dc7ad082be | |||
0cd2e28ccd | |||
07a99eda54 | |||
469f3fee81 | |||
bb776bd9f2 | |||
55c0713baf | |||
caa64b4ca2 | |||
fb9f86634e | |||
24417cd1bb | |||
34638bcf42 | |||
c041693018 | |||
d03ba15772 | |||
a274341d23 | |||
ee1e79dcd5 | |||
2997e2d2b6 | |||
abe8925ebc | |||
b26ff7d9c9 | |||
cec794daa8 | |||
dd3a8bb649 | |||
bf537df475 | |||
4f6003066e | |||
61ba3f0804 | |||
10f8809723 | |||
796db314f3 | |||
d9ff8532ee | |||
dc2491e8b0 | |||
a9a21c2e90 | |||
16369f0e40 | |||
2d8a50d1ad | |||
4657241b70 | |||
c1c4a45562 | |||
fcd938b75a | |||
f8905e8c4b | |||
ac61390b88 | |||
eb22d9475c | |||
35b7093762 | |||
fedbb18bb2 | |||
8808b9e458 | |||
b108c1fbeb | |||
6ea508503d | |||
39662cc4cc | |||
a63f69b3d4 | |||
8be2527ff4 | |||
d35645363d | |||
df03e7b928 | |||
6a29cb7b84 | |||
dab91faff8 | |||
db5ab0a265 | |||
11d2e91321 | |||
be3938daa5 | |||
e8c7c5e13e | |||
e645c33eea | |||
8cd4e0cc19 | |||
aed171de81 | |||
72e6043b09 | |||
68116635a2 | |||
4df413d11f | |||
3b64f7c27d | |||
034b204255 | |||
ccf774099f | |||
34d4821c24 | |||
0d96664209 | |||
35c5119390 | |||
69230af210 | |||
14a451a492 |
3
.gitmodules
vendored
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
[submodule "qualysapi"]
|
||||||
|
path = deps/qualysapi
|
||||||
|
url = git@github.com:austin-taylor/qualysapi.git
|
24
.travis.yml
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
group: travis_latest
|
||||||
|
language: python
|
||||||
|
cache: pip
|
||||||
|
python:
|
||||||
|
- 2.7
|
||||||
|
|
||||||
|
# - 3.6
|
||||||
|
#matrix:
|
||||||
|
# allow_failures:
|
||||||
|
# - python: 3.6 - Commenting out testing for Python 3.6 until ready
|
||||||
|
|
||||||
|
install:
|
||||||
|
- pip install -r requirements.txt
|
||||||
|
- pip install flake8 # pytest # add another testing frameworks later
|
||||||
|
before_script:
|
||||||
|
# stop the build if there are Python syntax errors or undefined names
|
||||||
|
- flake8 . --count --exclude=deps/qualysapi --select=E901,E999,F821,F822,F823 --show-source --statistics
|
||||||
|
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
|
||||||
|
- flake8 . --count --exit-zero --exclude=deps/qualysapi --max-complexity=10 --max-line-length=127 --statistics
|
||||||
|
script:
|
||||||
|
- true # pytest --capture=sys # add other tests here
|
||||||
|
notifications:
|
||||||
|
on_success: change
|
||||||
|
on_failure: change # `always` will be the setting once code changes slow down
|
214
LICENSE
@ -1,21 +1,201 @@
|
|||||||
MIT License
|
Apache License
|
||||||
|
Version 2.0, January 2004
|
||||||
|
http://www.apache.org/licenses/
|
||||||
|
|
||||||
Copyright (c) 2017 Austin Taylor
|
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
1. Definitions.
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all
|
"License" shall mean the terms and conditions for use, reproduction,
|
||||||
copies or substantial portions of the Software.
|
and distribution as defined by Sections 1 through 9 of this document.
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
"Licensor" shall mean the copyright owner or entity authorized by
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
the copyright owner that is granting the License.
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
"Legal Entity" shall mean the union of the acting entity and all
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
other entities that control, are controlled by, or are under common
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
control with that entity. For the purposes of this definition,
|
||||||
SOFTWARE.
|
"control" means (i) the power, direct or indirect, to cause the
|
||||||
|
direction or management of such entity, whether by contract or
|
||||||
|
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||||
|
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||||
|
|
||||||
|
"You" (or "Your") shall mean an individual or Legal Entity
|
||||||
|
exercising permissions granted by this License.
|
||||||
|
|
||||||
|
"Source" form shall mean the preferred form for making modifications,
|
||||||
|
including but not limited to software source code, documentation
|
||||||
|
source, and configuration files.
|
||||||
|
|
||||||
|
"Object" form shall mean any form resulting from mechanical
|
||||||
|
transformation or translation of a Source form, including but
|
||||||
|
not limited to compiled object code, generated documentation,
|
||||||
|
and conversions to other media types.
|
||||||
|
|
||||||
|
"Work" shall mean the work of authorship, whether in Source or
|
||||||
|
Object form, made available under the License, as indicated by a
|
||||||
|
copyright notice that is included in or attached to the work
|
||||||
|
(an example is provided in the Appendix below).
|
||||||
|
|
||||||
|
"Derivative Works" shall mean any work, whether in Source or Object
|
||||||
|
form, that is based on (or derived from) the Work and for which the
|
||||||
|
editorial revisions, annotations, elaborations, or other modifications
|
||||||
|
represent, as a whole, an original work of authorship. For the purposes
|
||||||
|
of this License, Derivative Works shall not include works that remain
|
||||||
|
separable from, or merely link (or bind by name) to the interfaces of,
|
||||||
|
the Work and Derivative Works thereof.
|
||||||
|
|
||||||
|
"Contribution" shall mean any work of authorship, including
|
||||||
|
the original version of the Work and any modifications or additions
|
||||||
|
to that Work or Derivative Works thereof, that is intentionally
|
||||||
|
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||||
|
or by an individual or Legal Entity authorized to submit on behalf of
|
||||||
|
the copyright owner. For the purposes of this definition, "submitted"
|
||||||
|
means any form of electronic, verbal, or written communication sent
|
||||||
|
to the Licensor or its representatives, including but not limited to
|
||||||
|
communication on electronic mailing lists, source code control systems,
|
||||||
|
and issue tracking systems that are managed by, or on behalf of, the
|
||||||
|
Licensor for the purpose of discussing and improving the Work, but
|
||||||
|
excluding communication that is conspicuously marked or otherwise
|
||||||
|
designated in writing by the copyright owner as "Not a Contribution."
|
||||||
|
|
||||||
|
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||||
|
on behalf of whom a Contribution has been received by Licensor and
|
||||||
|
subsequently incorporated within the Work.
|
||||||
|
|
||||||
|
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
copyright license to reproduce, prepare Derivative Works of,
|
||||||
|
publicly display, publicly perform, sublicense, and distribute the
|
||||||
|
Work and such Derivative Works in Source or Object form.
|
||||||
|
|
||||||
|
3. Grant of Patent License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
(except as stated in this section) patent license to make, have made,
|
||||||
|
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||||
|
where such license applies only to those patent claims licensable
|
||||||
|
by such Contributor that are necessarily infringed by their
|
||||||
|
Contribution(s) alone or by combination of their Contribution(s)
|
||||||
|
with the Work to which such Contribution(s) was submitted. If You
|
||||||
|
institute patent litigation against any entity (including a
|
||||||
|
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||||
|
or a Contribution incorporated within the Work constitutes direct
|
||||||
|
or contributory patent infringement, then any patent licenses
|
||||||
|
granted to You under this License for that Work shall terminate
|
||||||
|
as of the date such litigation is filed.
|
||||||
|
|
||||||
|
4. Redistribution. You may reproduce and distribute copies of the
|
||||||
|
Work or Derivative Works thereof in any medium, with or without
|
||||||
|
modifications, and in Source or Object form, provided that You
|
||||||
|
meet the following conditions:
|
||||||
|
|
||||||
|
(a) You must give any other recipients of the Work or
|
||||||
|
Derivative Works a copy of this License; and
|
||||||
|
|
||||||
|
(b) You must cause any modified files to carry prominent notices
|
||||||
|
stating that You changed the files; and
|
||||||
|
|
||||||
|
(c) You must retain, in the Source form of any Derivative Works
|
||||||
|
that You distribute, all copyright, patent, trademark, and
|
||||||
|
attribution notices from the Source form of the Work,
|
||||||
|
excluding those notices that do not pertain to any part of
|
||||||
|
the Derivative Works; and
|
||||||
|
|
||||||
|
(d) If the Work includes a "NOTICE" text file as part of its
|
||||||
|
distribution, then any Derivative Works that You distribute must
|
||||||
|
include a readable copy of the attribution notices contained
|
||||||
|
within such NOTICE file, excluding those notices that do not
|
||||||
|
pertain to any part of the Derivative Works, in at least one
|
||||||
|
of the following places: within a NOTICE text file distributed
|
||||||
|
as part of the Derivative Works; within the Source form or
|
||||||
|
documentation, if provided along with the Derivative Works; or,
|
||||||
|
within a display generated by the Derivative Works, if and
|
||||||
|
wherever such third-party notices normally appear. The contents
|
||||||
|
of the NOTICE file are for informational purposes only and
|
||||||
|
do not modify the License. You may add Your own attribution
|
||||||
|
notices within Derivative Works that You distribute, alongside
|
||||||
|
or as an addendum to the NOTICE text from the Work, provided
|
||||||
|
that such additional attribution notices cannot be construed
|
||||||
|
as modifying the License.
|
||||||
|
|
||||||
|
You may add Your own copyright statement to Your modifications and
|
||||||
|
may provide additional or different license terms and conditions
|
||||||
|
for use, reproduction, or distribution of Your modifications, or
|
||||||
|
for any such Derivative Works as a whole, provided Your use,
|
||||||
|
reproduction, and distribution of the Work otherwise complies with
|
||||||
|
the conditions stated in this License.
|
||||||
|
|
||||||
|
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||||
|
any Contribution intentionally submitted for inclusion in the Work
|
||||||
|
by You to the Licensor shall be under the terms and conditions of
|
||||||
|
this License, without any additional terms or conditions.
|
||||||
|
Notwithstanding the above, nothing herein shall supersede or modify
|
||||||
|
the terms of any separate license agreement you may have executed
|
||||||
|
with Licensor regarding such Contributions.
|
||||||
|
|
||||||
|
6. Trademarks. This License does not grant permission to use the trade
|
||||||
|
names, trademarks, service marks, or product names of the Licensor,
|
||||||
|
except as required for reasonable and customary use in describing the
|
||||||
|
origin of the Work and reproducing the content of the NOTICE file.
|
||||||
|
|
||||||
|
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||||
|
agreed to in writing, Licensor provides the Work (and each
|
||||||
|
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
implied, including, without limitation, any warranties or conditions
|
||||||
|
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||||
|
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||||
|
appropriateness of using or redistributing the Work and assume any
|
||||||
|
risks associated with Your exercise of permissions under this License.
|
||||||
|
|
||||||
|
8. Limitation of Liability. In no event and under no legal theory,
|
||||||
|
whether in tort (including negligence), contract, or otherwise,
|
||||||
|
unless required by applicable law (such as deliberate and grossly
|
||||||
|
negligent acts) or agreed to in writing, shall any Contributor be
|
||||||
|
liable to You for damages, including any direct, indirect, special,
|
||||||
|
incidental, or consequential damages of any character arising as a
|
||||||
|
result of this License or out of the use or inability to use the
|
||||||
|
Work (including but not limited to damages for loss of goodwill,
|
||||||
|
work stoppage, computer failure or malfunction, or any and all
|
||||||
|
other commercial damages or losses), even if such Contributor
|
||||||
|
has been advised of the possibility of such damages.
|
||||||
|
|
||||||
|
9. Accepting Warranty or Additional Liability. While redistributing
|
||||||
|
the Work or Derivative Works thereof, You may choose to offer,
|
||||||
|
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||||
|
or other liability obligations and/or rights consistent with this
|
||||||
|
License. However, in accepting such obligations, You may act only
|
||||||
|
on Your own behalf and on Your sole responsibility, not on behalf
|
||||||
|
of any other Contributor, and only if You agree to indemnify,
|
||||||
|
defend, and hold each Contributor harmless for any liability
|
||||||
|
incurred by, or claims asserted against, such Contributor by reason
|
||||||
|
of your accepting any such warranty or additional liability.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
APPENDIX: How to apply the Apache License to your work.
|
||||||
|
|
||||||
|
To apply the Apache License to your work, attach the following
|
||||||
|
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||||
|
replaced with your own identifying information. (Don't include
|
||||||
|
the brackets!) The text should be enclosed in the appropriate
|
||||||
|
comment syntax for the file format. We also recommend that a
|
||||||
|
file or class name and description of purpose be included on the
|
||||||
|
same "printed page" as the copyright notice for easier
|
||||||
|
identification within third-party archives.
|
||||||
|
|
||||||
|
Copyright [yyyy] [name of copyright owner]
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
210
README.md
@ -1,2 +1,208 @@
|
|||||||
# VulnWhisperer
|
<p align="center"><img src="https://github.com/austin-taylor/vulnwhisperer/blob/master/docs/source/vuln_whisperer_logo_s.png" width="400px"></p>
|
||||||
Create actionable data from your Vulnerability Scans
|
<p align="center"> <i>Create <u><b>actionable data</b></u> from your vulnerability scans </i> </p>
|
||||||
|
|
||||||
|
<p align="center" style="width:400px"><img src="https://github.com/austin-taylor/vulnwhisperer/blob/master/docs/source/vulnWhispererWebApplications.png" style="width:400px"></p>
|
||||||
|
|
||||||
|
|
||||||
|
VulnWhisperer is a vulnerability data and report aggregator. VulnWhisperer will pull all the reports
|
||||||
|
and create a file with a unique filename which is then fed into logstash. Logstash extracts data from the filename and tags all of the information inside the report (see logstash_vulnwhisp.conf file). Data is then shipped to elasticsearch to be indexed.
|
||||||
|
|
||||||
|
[](https://travis-ci.org/austin-taylor/VulnWhisperer)
|
||||||
|
[](http://choosealicense.com/licenses/mit/)
|
||||||
|
[](https://twitter.com/VulnWhisperer)
|
||||||
|
|
||||||
|
|
||||||
|
Currently Supports
|
||||||
|
-----------------
|
||||||
|
|
||||||
|
### Vulnerability Frameworks
|
||||||
|
|
||||||
|
- [X] [Nessus (v6 & **v7**)](https://www.tenable.com/products/nessus/nessus-professional)
|
||||||
|
- [X] [Qualys Web Applications](https://www.qualys.com/apps/web-app-scanning/)
|
||||||
|
- [X] [Qualys Vulnerability Management (Need license)](https://www.qualys.com/apps/vulnerability-management/)
|
||||||
|
- [X] [OpenVAS](http://www.openvas.org/)
|
||||||
|
- [X] [Tenable.io](https://www.tenable.com/products/tenable-io)
|
||||||
|
- [ ] [Nexpose](https://www.rapid7.com/products/nexpose/)
|
||||||
|
- [ ] [Insight VM](https://www.rapid7.com/products/insightvm/)
|
||||||
|
- [ ] [NMAP](https://nmap.org/)
|
||||||
|
- [ ] More to come
|
||||||
|
|
||||||
|
Getting Started
|
||||||
|
===============
|
||||||
|
|
||||||
|
1) Follow the [install requirements](#installreq)
|
||||||
|
2) Fill out the section you want to process in <a href="https://github.com/austin-taylor/VulnWhisperer/blob/master/configs/frameworks_example.ini">example.ini file</a>
|
||||||
|
3) Modify the IP settings in the <a href="https://github.com/austin-taylor/VulnWhisperer/tree/master/logstash">logstash files to accomodate your environment</a> and import them to your logstash conf directory (default is /etc/logstash/conf.d/)
|
||||||
|
4) Import the <a href="https://github.com/austin-taylor/VulnWhisperer/tree/master/kibana/vuln_whisp_kibana">kibana visualizations</a>
|
||||||
|
5) [Run Vulnwhisperer](#run)
|
||||||
|
|
||||||
|
Requirements
|
||||||
|
-------------
|
||||||
|
####
|
||||||
|
* ElasticStack 5.x
|
||||||
|
* Python 2.7
|
||||||
|
* Vulnerability Scanner
|
||||||
|
* Optional: Message broker such as Kafka or RabbitMQ
|
||||||
|
|
||||||
|
<a id="installreq">Install Requirements-VulnWhisperer(may require sudo)</a>
|
||||||
|
--------------------
|
||||||
|
**First, install requirement dependencies**
|
||||||
|
```shell
|
||||||
|
|
||||||
|
sudo apt-get install zlib1g-dev libxml2-dev libxslt1-dev
|
||||||
|
```
|
||||||
|
|
||||||
|
**Second, install dependant modules**
|
||||||
|
```python
|
||||||
|
|
||||||
|
cd deps/qualysapi
|
||||||
|
python setup.py install
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
**Third, install requirements**
|
||||||
|
|
||||||
|
```python
|
||||||
|
pip install -r /path/to/VulnWhisperer/requirements.txt
|
||||||
|
cd /path/to/VulnWhisperer
|
||||||
|
python setup.py install
|
||||||
|
```
|
||||||
|
|
||||||
|
Now you're ready to pull down scans. (see <a href="#run">run section</a>)
|
||||||
|
|
||||||
|
|
||||||
|
Install Requirements-ELK Node **\*SAMPLE\***
|
||||||
|
--------------------
|
||||||
|
The following instructions should be utilized as a **Sample Guide** in the absence of an existing ELK Cluster/Node. This will cover a Debian example install guide of a stand-alone node of Elasticsearch & Kibana.
|
||||||
|
|
||||||
|
While Logstash is included in this install guide, it it recommended that a seperate host pulling the VulnWhisperer data is utilized with Logstash to ship the data to the Elasticsearch node.
|
||||||
|
|
||||||
|
*Please note there is a docker-compose.yml available as well.*
|
||||||
|
|
||||||
|
**Debian:** *(https://www.elastic.co/guide/en/elasticsearch/reference/5.6/deb.html)*
|
||||||
|
```shell
|
||||||
|
sudo apt-get install -y default-jre
|
||||||
|
wget -qO - https://artifacts.elastic.co/GPG-KEY-elasticsearch | sudo apt-key add -
|
||||||
|
sudo apt-get install apt-transport-https
|
||||||
|
echo "deb https://artifacts.elastic.co/packages/5.x/apt stable main" | sudo tee -a /etc/apt/sources.list.d/elastic-5.x.list
|
||||||
|
sudo apt-get update && sudo apt-get install elasticsearch kibana logstash
|
||||||
|
sudo /bin/systemctl daemon-reload
|
||||||
|
sudo /bin/systemctl enable elasticsearch.service
|
||||||
|
sudo /bin/systemctl enable kibana.service
|
||||||
|
sudo /bin/systemctl enable logstash.service
|
||||||
|
```
|
||||||
|
|
||||||
|
**Elasticsearch & Kibana Sample Config Notes**
|
||||||
|
|
||||||
|
Utilizing your favorite text editor:
|
||||||
|
* Grab your host IP and change the IP of your /etc/elasticsearch/elasticsearch.yml file. (This defaults to 'localhost')
|
||||||
|
* Validate Elasticsearch is set to run on port 9200 (Default)
|
||||||
|
* Grab your host IP and change the IP of your /etc/kibana/kibana.yml file. (This defaults to 'localhost') *Validate that Kibana is pointing to the correct Elasticsearch IP (This was set in the previous step)*
|
||||||
|
* Validate Kibana is set to run on port 5601 (Default)
|
||||||
|
|
||||||
|
*Start elasticsearch and validate they are running/communicating with one another:*
|
||||||
|
```shell
|
||||||
|
sudo service elasticsearch start
|
||||||
|
sudo service kibana start
|
||||||
|
```
|
||||||
|
OR
|
||||||
|
```shell
|
||||||
|
sudo systemctl start elasticsearch.service
|
||||||
|
sudo systemctl start kibana.service
|
||||||
|
```
|
||||||
|
|
||||||
|
**Logstash Sample Config Notes**
|
||||||
|
|
||||||
|
* Copy/Move the Logstash .conf files from */VulnWhisperer/logstash/* to */etc/logstash/conf.d/*
|
||||||
|
* Validate the Logstash.conf files *input* contains the correct location of VulnWhisper Scans in the *input.file.path* directory identified below:
|
||||||
|
```
|
||||||
|
input {
|
||||||
|
file {
|
||||||
|
path => "/opt/vulnwhisperer/nessus/**/*"
|
||||||
|
start_position => "beginning"
|
||||||
|
tags => "nessus"
|
||||||
|
type => "nessus"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
* Validate the Logstash.conf files *output* contains the correct Elasticsearch IP set during the previous step above: (This will default to localhost)
|
||||||
|
```
|
||||||
|
output {
|
||||||
|
if "nessus" in [tags] or [type] == "nessus" {
|
||||||
|
#stdout { codec => rubydebug }
|
||||||
|
elasticsearch {
|
||||||
|
hosts => [ "localhost:9200" ]
|
||||||
|
index => "logstash-vulnwhisperer-%{+YYYY.MM}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
* Validate logstash has the correct file permissions to read the location of the VulnWhisperer Scans
|
||||||
|
|
||||||
|
Once configured run Logstash: (Running Logstash as a service will pick up all the files in */etc/logstash/conf.d/* If you would like to run only one logstash file please reference the command below):
|
||||||
|
|
||||||
|
Logstash as a service:
|
||||||
|
```shell
|
||||||
|
sudo service logstash start
|
||||||
|
```
|
||||||
|
*OR*
|
||||||
|
```shell
|
||||||
|
sudo systemctl start logstash.service
|
||||||
|
```
|
||||||
|
Single Logstash file:
|
||||||
|
```shell
|
||||||
|
sudo /usr/share/logstash/bin/logstash --path.settings /etc/logstash/ -f /etc/logstash/conf.d/1000_nessus_process_file.conf
|
||||||
|
```
|
||||||
|
|
||||||
|
Configuration
|
||||||
|
-----
|
||||||
|
|
||||||
|
There are a few configuration steps to setting up VulnWhisperer:
|
||||||
|
* Configure Ini file
|
||||||
|
* Setup Logstash File
|
||||||
|
* Import ElasticSearch Templates
|
||||||
|
* Import Kibana Dashboards
|
||||||
|
|
||||||
|
<a href="https://github.com/austin-taylor/VulnWhisperer/blob/master/configs/frameworks_example.ini">example.ini file</a>
|
||||||
|
<p align="left" style="width:200px"><img src="https://github.com/austin-taylor/vulnwhisperer/blob/master/docs/source/config_example.png" style="width:200px"></p>
|
||||||
|
|
||||||
|
|
||||||
|
<a id="run">Run</a>
|
||||||
|
-----
|
||||||
|
To run, fill out the configuration file with your vulnerability scanner settings. Then you can execute from the command line.
|
||||||
|
```python
|
||||||
|
|
||||||
|
vuln_whisperer -c configs/example.ini -s nessus
|
||||||
|
or
|
||||||
|
vuln_whisperer -c configs/example.ini -s qualys
|
||||||
|
|
||||||
|
```
|
||||||
|
<p align="center" style="width:300px"><img src="https://github.com/austin-taylor/vulnwhisperer/blob/master/docs/source/running_vuln_whisperer.png" style="width:400px"></p>
|
||||||
|
Next you'll need to import the visualizations into Kibana and setup your logstash config. A more thorough README is underway with setup instructions.
|
||||||
|
|
||||||
|
Running Nightly
|
||||||
|
---------------
|
||||||
|
If you're running linux, be sure to setup a cronjob to remove old files that get stored in the database. Be sure to change .csv if you're using json.
|
||||||
|
|
||||||
|
Setup crontab -e with the following config (modify to your environment) - this will run vulnwhisperer each night at 0130:
|
||||||
|
|
||||||
|
`00 1 * * * /usr/bin/find /opt/vulnwhisp/ -type f -name '*.csv' -ctime +3 -exec rm {} \;`
|
||||||
|
|
||||||
|
`30 1 * * * /usr/local/bin/vuln_whisperer -c /opt/vulnwhisp/configs/example.ini`
|
||||||
|
|
||||||
|
|
||||||
|
_For windows, you may need to type the full path of the binary in vulnWhisperer located in the bin directory._
|
||||||
|
|
||||||
|
Video Walkthrough -- Featured on ElasticWebinar
|
||||||
|
----------------------------------------------
|
||||||
|
<a href="http://www.youtube.com/watch?feature=player_embedded&v=zrEuTtRUfNw?start=30
|
||||||
|
" target="_blank"><img src="https://github.com/austin-taylor/vulnwhisperer/blob/master/docs/source/elastic_webinar.png"
|
||||||
|
alt="Elastic presentation on VulnWhisperer" border="10" /></a>
|
||||||
|
|
||||||
|
Credit
|
||||||
|
------
|
||||||
|
Big thank you to <a href="https://github.com/SMAPPER">Justin Henderson</a> for his contributions to vulnWhisperer!
|
||||||
|
|
||||||
|
|
||||||
|
AS SEEN ON TV
|
||||||
|
-------------
|
||||||
|
<p align="center" style="width:400px"><a href="https://twitter.com/MalwareJake/status/935654519471353856"><img src="https://github.com/austin-taylor/vulnwhisperer/blob/master/docs/source/as_seen_on_tv.png" style="width:400px"></a></p>
|
||||||
|
1
_config.yml
Normal file
@ -0,0 +1 @@
|
|||||||
|
theme: jekyll-theme-leap-day
|
@ -1,31 +1,48 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
__author__ = 'Austin Taylor'
|
||||||
|
|
||||||
|
|
||||||
#Written by Austin Taylor
|
|
||||||
#www.austintaylor.io
|
|
||||||
|
|
||||||
from vulnwhisp.vulnwhisp import vulnWhisperer
|
from vulnwhisp.vulnwhisp import vulnWhisperer
|
||||||
from vulnwhisp.utils.cli import bcolors
|
from vulnwhisp.utils.cli import bcolors
|
||||||
|
import os
|
||||||
import argparse
|
import argparse
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
def isFileValid(parser, arg):
|
||||||
|
if not os.path.exists(arg):
|
||||||
|
parser.error("The file %s does not exist!" % arg)
|
||||||
|
else:
|
||||||
|
return arg
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(description=""" VulnWhisperer is designed to create actionable data from\
|
parser = argparse.ArgumentParser(description=""" VulnWhisperer is designed to create actionable data from\
|
||||||
your vulnerability scans through aggregation of historical scans.""")
|
your vulnerability scans through aggregation of historical scans.""")
|
||||||
parser.add_argument('-c', '--config', dest='config', required=False, default='frameworks.ini',
|
parser.add_argument('-c', '--config', dest='config', required=False, default='frameworks.ini',
|
||||||
help='Path of config file')
|
help='Path of config file', type=lambda x: isFileValid(parser, x.strip()))
|
||||||
|
parser.add_argument('-s', '--section', dest='section', required=False,
|
||||||
|
help='Section in config')
|
||||||
parser.add_argument('-v', '--verbose', dest='verbose', action='store_true', default=True,
|
parser.add_argument('-v', '--verbose', dest='verbose', action='store_true', default=True,
|
||||||
help='Prints status out to screen (defaults to True)')
|
help='Prints status out to screen (defaults to True)')
|
||||||
|
parser.add_argument('-u', '--username', dest='username', required=False, default=None, type=lambda x: x.strip(), help='The NESSUS username')
|
||||||
|
parser.add_argument('-p', '--password', dest='password', required=False, default=None, type=lambda x: x.strip(), help='The NESSUS password')
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
if args.config and not args.section:
|
||||||
|
print('{red} ERROR: {error}{endc}'.format(red=bcolors.FAIL,
|
||||||
|
error='Please specify a section using -s. \
|
||||||
|
\nExample vuln_whisperer -c config.ini -s nessus',
|
||||||
|
endc=bcolors.ENDC))
|
||||||
|
else:
|
||||||
vw = vulnWhisperer(config=args.config,
|
vw = vulnWhisperer(config=args.config,
|
||||||
verbose=args.verbose)
|
profile=args.section,
|
||||||
|
verbose=args.verbose,
|
||||||
|
username=args.username,
|
||||||
|
password=args.password)
|
||||||
|
|
||||||
vw.whisper_nessus()
|
vw.whisper_vulnerabilities()
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@ -34,6 +51,5 @@ def main():
|
|||||||
sys.exit(2)
|
sys.exit(2)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main()
|
main()
|
@ -4,8 +4,74 @@ hostname=localhost
|
|||||||
port=8834
|
port=8834
|
||||||
username=nessus_username
|
username=nessus_username
|
||||||
password=nessus_password
|
password=nessus_password
|
||||||
write_path=/opt/vulnwhisp/scans
|
write_path=/opt/vulnwhisperer/nessus/
|
||||||
db_path=/opt/vulnwhisp/database
|
db_path=/opt/vulnwhisperer/database
|
||||||
trash=false
|
trash=false
|
||||||
verbose=true
|
verbose=true
|
||||||
|
|
||||||
|
[tenable]
|
||||||
|
enabled=true
|
||||||
|
hostname=cloud.tenable.com
|
||||||
|
port=443
|
||||||
|
username=tenable.io_username
|
||||||
|
password=tenable.io_password
|
||||||
|
write_path=/opt/vulnwhisperer/tenable/
|
||||||
|
db_path=/opt/vulnwhisperer/database
|
||||||
|
trash=false
|
||||||
|
verbose=true
|
||||||
|
|
||||||
|
[qualys]
|
||||||
|
#Reference https://www.qualys.com/docs/qualys-was-api-user-guide.pdf to find your API
|
||||||
|
enabled = true
|
||||||
|
hostname = qualysapi.qg2.apps.qualys.com
|
||||||
|
username = exampleuser
|
||||||
|
password = examplepass
|
||||||
|
write_path=/opt/vulnwhisperer/qualys/
|
||||||
|
db_path=/opt/vulnwhisperer/database
|
||||||
|
verbose=true
|
||||||
|
|
||||||
|
# Set the maximum number of retries each connection should attempt.
|
||||||
|
#Note, this applies only to failed connections and timeouts, never to requests where the server returns a response.
|
||||||
|
max_retries = 10
|
||||||
|
# Template ID will need to be retrieved for each document. Please follow the reference guide above for instructions on how to get your template ID.
|
||||||
|
template_id = 126024
|
||||||
|
|
||||||
|
[qualys_vuln]
|
||||||
|
#Reference https://www.qualys.com/docs/qualys-was-api-user-guide.pdf to find your API
|
||||||
|
enabled = true
|
||||||
|
hostname = qualysapi.qg2.apps.qualys.com
|
||||||
|
username = exampleuser
|
||||||
|
password = examplepass
|
||||||
|
write_path=/opt/vulnwhisperer/qualys/
|
||||||
|
db_path=/opt/vulnwhisperer/database
|
||||||
|
verbose=true
|
||||||
|
|
||||||
|
# Set the maximum number of retries each connection should attempt.
|
||||||
|
#Note, this applies only to failed connections and timeouts, never to requests where the server returns a response.
|
||||||
|
max_retries = 10
|
||||||
|
# Template ID will need to be retrieved for each document. Please follow the reference guide above for instructions on how to get your template ID.
|
||||||
|
template_id = 126024
|
||||||
|
|
||||||
|
[openvas]
|
||||||
|
enabled = false
|
||||||
|
hostname = localhost
|
||||||
|
port = 4000
|
||||||
|
username = exampleuser
|
||||||
|
password = examplepass
|
||||||
|
write_path=/opt/vulnwhisperer/openvas/
|
||||||
|
db_path=/opt/vulnwhisperer/database
|
||||||
|
verbose=true
|
||||||
|
|
||||||
|
#[proxy]
|
||||||
|
; This section is optional. Leave it out if you're not using a proxy.
|
||||||
|
; You can use environmental variables as well: http://www.python-requests.org/en/latest/user/advanced/#proxies
|
||||||
|
|
||||||
|
; proxy_protocol set to https, if not specified.
|
||||||
|
#proxy_url = proxy.mycorp.com
|
||||||
|
|
||||||
|
; proxy_port will override any port specified in proxy_url
|
||||||
|
#proxy_port = 8080
|
||||||
|
|
||||||
|
; proxy authentication
|
||||||
|
#proxy_username = proxyuser
|
||||||
|
#proxy_password = proxypass
|
||||||
|
47
deps/qualysapi/.gitignore
vendored
Normal file
@ -0,0 +1,47 @@
|
|||||||
|
*.py[cod]
|
||||||
|
|
||||||
|
# C extensions
|
||||||
|
*.so
|
||||||
|
|
||||||
|
# Packages
|
||||||
|
*.egg
|
||||||
|
*.egg-info
|
||||||
|
dist
|
||||||
|
build
|
||||||
|
eggs
|
||||||
|
parts
|
||||||
|
bin
|
||||||
|
var
|
||||||
|
sdist
|
||||||
|
develop-eggs
|
||||||
|
.installed.cfg
|
||||||
|
lib
|
||||||
|
lib64
|
||||||
|
|
||||||
|
# Installer logs
|
||||||
|
pip-log.txt
|
||||||
|
|
||||||
|
# Unit test / coverage reports
|
||||||
|
.coverage
|
||||||
|
.tox
|
||||||
|
nosetests.xml
|
||||||
|
|
||||||
|
# Translations
|
||||||
|
*.mo
|
||||||
|
|
||||||
|
# Mr Developer
|
||||||
|
.mr.developer.cfg
|
||||||
|
.project
|
||||||
|
.pydevproject
|
||||||
|
|
||||||
|
# Mac
|
||||||
|
.DS_Store
|
||||||
|
|
||||||
|
# Authenticatin configuration
|
||||||
|
*.qcrc
|
||||||
|
config.qcrc
|
||||||
|
config.ini
|
||||||
|
|
||||||
|
# PyCharm
|
||||||
|
.idea
|
||||||
|
.qcrc.swp
|
2
deps/qualysapi/MANIFEST.in
vendored
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
include README.md
|
||||||
|
recursive-include examples *.py
|
107
deps/qualysapi/README.md
vendored
Normal file
@ -0,0 +1,107 @@
|
|||||||
|
qualysapi
|
||||||
|
=========
|
||||||
|
|
||||||
|
Python package, qualysapi, that makes calling any Qualys API very simple. Qualys API versions v1, v2, & WAS & AM (asset management) are all supported.
|
||||||
|
|
||||||
|
My focus was making the API super easy to use. The only parameters the user needs to provide is the call, and data (optional). It automates the following:
|
||||||
|
* Automatically identifies API version through the call requested.
|
||||||
|
* Automatically identifies url from the above step.
|
||||||
|
* Automatically identifies http method as POST or GET for the request per Qualys documentation.
|
||||||
|
|
||||||
|
Usage
|
||||||
|
=====
|
||||||
|
|
||||||
|
Check out the example scripts in the [/examples directory](https://github.com/paragbaxi/qualysapi/blob/master/examples/).
|
||||||
|
|
||||||
|
Example
|
||||||
|
-------
|
||||||
|
Detailed example found at [qualysapi-example.py](https://github.com/paragbaxi/qualysapi/blob/master/examples/qualysapi-example.py).
|
||||||
|
|
||||||
|
Sample example below.
|
||||||
|
|
||||||
|
```python
|
||||||
|
>>> import qualysapi
|
||||||
|
>>> a = qualysapi.connect()
|
||||||
|
QualysGuard Username: my_username
|
||||||
|
QualysGuard Password:
|
||||||
|
>>> print a.request('about.php')
|
||||||
|
<?xml version="1.0" encoding="UTF-8" ?>
|
||||||
|
<!DOCTYPE ABOUT SYSTEM "https://qualysapi.qualys.com/about.dtd">
|
||||||
|
<ABOUT>
|
||||||
|
<API-VERSION MAJOR="1" MINOR="4" />
|
||||||
|
<WEB-VERSION>7.10.61-1</WEB-VERSION>
|
||||||
|
<SCANNER-VERSION>7.1.10-1</SCANNER-VERSION>
|
||||||
|
<VULNSIGS-VERSION>2.2.475-2</VULNSIGS-VERSION>
|
||||||
|
</ABOUT>
|
||||||
|
<!-- Generated for username="my_username" date="2013-07-03T10:31:57Z" -->
|
||||||
|
<!-- CONFIDENTIAL AND PROPRIETARY INFORMATION. Qualys provides the QualysGuard Service "As Is," without any warranty of any kind. Qualys makes no warranty that the information contained in this report is complete or error-free. Copyright 2013, Qualys, Inc. //-->
|
||||||
|
```
|
||||||
|
|
||||||
|
Installation
|
||||||
|
============
|
||||||
|
|
||||||
|
Use pip to install:
|
||||||
|
```Shell
|
||||||
|
pip install qualysapi
|
||||||
|
```
|
||||||
|
|
||||||
|
NOTE: If you would like to experiment without installing globally, look into 'virtualenv'.
|
||||||
|
|
||||||
|
Requirements
|
||||||
|
------------
|
||||||
|
|
||||||
|
* requests (http://docs.python-requests.org)
|
||||||
|
* lxml (http://lxml.de/)
|
||||||
|
|
||||||
|
Tested successfully on Python 2.7.
|
||||||
|
|
||||||
|
Configuration
|
||||||
|
=============
|
||||||
|
|
||||||
|
By default, the package will ask at the command prompt for username and password. By default, the package connects to the Qualys documented host (qualysapi.qualys.com).
|
||||||
|
|
||||||
|
You can override these settings and prevent yourself from typing credentials by doing any of the following:
|
||||||
|
|
||||||
|
1. By running the following Python, `qualysapi.connect(remember_me=True)`. This automatically generates a .qcrc file in your current working directory, scoping the configuration to that directory.
|
||||||
|
2. By running the following Python, `qualysapi.connect(remember_me_always=True)`. This automatically generates a .qcrc file in your home directory, scoping the configuratoin to all calls to qualysapi, regardless of the directory.
|
||||||
|
3. By creating a file called '.qcrc' (for Windows, the default filename is 'config.ini') in your home directory or directory of the Python script.
|
||||||
|
4. This supports multiple configuration files. Just add the filename in your call to qualysapi.connect('config.txt').
|
||||||
|
|
||||||
|
Example config file
|
||||||
|
-------------------
|
||||||
|
```INI
|
||||||
|
; Note, it should be possible to omit any of these entries.
|
||||||
|
|
||||||
|
[info]
|
||||||
|
hostname = qualysapi.serviceprovider.com
|
||||||
|
username = jerry
|
||||||
|
password = I<3Elaine
|
||||||
|
|
||||||
|
# Set the maximum number of retries each connection should attempt. Note, this applies only to failed connections and timeouts, never to requests where the server returns a response.
|
||||||
|
max_retries = 10
|
||||||
|
|
||||||
|
[proxy]
|
||||||
|
; This section is optional. Leave it out if you're not using a proxy.
|
||||||
|
; You can use environmental variables as well: http://www.python-requests.org/en/latest/user/advanced/#proxies
|
||||||
|
|
||||||
|
; proxy_protocol set to https, if not specified.
|
||||||
|
proxy_url = proxy.mycorp.com
|
||||||
|
|
||||||
|
; proxy_port will override any port specified in proxy_url
|
||||||
|
proxy_port = 8080
|
||||||
|
|
||||||
|
; proxy authentication
|
||||||
|
proxy_username = kramer
|
||||||
|
proxy_password = giddy up!
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
License
|
||||||
|
=======
|
||||||
|
Apache License, Version 2.0
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0.html
|
||||||
|
|
||||||
|
Acknowledgements
|
||||||
|
================
|
||||||
|
|
||||||
|
Special thank you to Colin Bell for qualysconnect.
|
12
deps/qualysapi/changelog.txt
vendored
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
3.5.0
|
||||||
|
- Retooled authentication.
|
||||||
|
|
||||||
|
3.4.0
|
||||||
|
- Allows choice of configuration filenames. Easy to support those with multiple Qualys accounts, and need to automate tasks.
|
||||||
|
|
||||||
|
3.3.0
|
||||||
|
- Remove curl capability. Requests 2.0 and latest urllib3 can handle https proxy.
|
||||||
|
- Workaround for audience that does not have lxml. Warning: cannot handle lxml.builder E objects for AM & WAS APIs.
|
||||||
|
|
||||||
|
3.0.0
|
||||||
|
Proxy support.
|
1
deps/qualysapi/examples/__init__.py
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
__author__ = 'pbaxi'
|
113
deps/qualysapi/examples/qualysapi-example.py
vendored
Normal file
@ -0,0 +1,113 @@
|
|||||||
|
__author__ = 'Parag Baxi <parag.baxi@gmail.com>'
|
||||||
|
__license__ = 'Apache License 2.0'
|
||||||
|
|
||||||
|
import qualysapi
|
||||||
|
from lxml import objectify
|
||||||
|
from lxml.builder import E
|
||||||
|
|
||||||
|
# Setup connection to QualysGuard API.
|
||||||
|
qgc = qualysapi.connect('config.txt')
|
||||||
|
#
|
||||||
|
# API v1 call: Scan the New York & Las Vegas asset groups
|
||||||
|
# The call is our request's first parameter.
|
||||||
|
call = 'scan.php'
|
||||||
|
# The parameters to append to the url is our request's second parameter.
|
||||||
|
parameters = {'scan_title': 'Go big or go home', 'asset_groups': 'New York&Las Vegas', 'option': 'Initial+Options'}
|
||||||
|
# Note qualysapi will automatically convert spaces into plus signs for API v1 & v2.
|
||||||
|
# Let's call the API and store the result in xml_output.
|
||||||
|
xml_output = qgc.request(call, parameters, concurrent_scans_retries=2, concurrent_scans_retry_delay=600)
|
||||||
|
# concurrent_retries: Retry the call this many times if your subscription hits the concurrent scans limit.
|
||||||
|
# concurrent_retries: Delay in seconds between retrying when subscription hits the concurrent scans limit.
|
||||||
|
# Example XML response when this happens below:
|
||||||
|
# <?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
# <ServiceResponse xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="http://localhost:50205/qps/rest/app//xsd/3.0/was/wasscan.xsd">
|
||||||
|
# <responseCode>INVALID_REQUEST</responseCode>
|
||||||
|
# <responseErrorDetails>
|
||||||
|
# <errorMessage>You have reached the maximum number of concurrent running scans (10) for your account</errorMessage>
|
||||||
|
# <errorResolution>Please wait until your previous scans have completed</errorResolution>
|
||||||
|
# </responseErrorDetails>
|
||||||
|
#
|
||||||
|
print(xml_output)
|
||||||
|
#
|
||||||
|
# API v1 call: Print out all IPs associated with asset group "Looneyville Texas".
|
||||||
|
# Note that the question mark at the end is optional.
|
||||||
|
call = 'asset_group_list.php?'
|
||||||
|
# We can still use strings for the data (not recommended).
|
||||||
|
parameters = 'title=Looneyville Texas'
|
||||||
|
# Let's call the API and store the result in xml_output.
|
||||||
|
xml_output = qgc.request(call, parameters)
|
||||||
|
# Let's objectify the xml_output string.
|
||||||
|
root = objectify.fromstring(xml_output)
|
||||||
|
# Print out the IPs.
|
||||||
|
print(root.ASSET_GROUP.SCANIPS.IP.text)
|
||||||
|
# Prints out:
|
||||||
|
# 10.0.0.102
|
||||||
|
#
|
||||||
|
# API v2 call: Print out DNS name for a range of IPs.
|
||||||
|
call = '/api/2.0/fo/asset/host/'
|
||||||
|
parameters = {'action': 'list', 'ips': '10.0.0.10-10.0.0.11'}
|
||||||
|
xml_output = qgc.request(call, parameters)
|
||||||
|
root = objectify.fromstring(xml_output)
|
||||||
|
# Iterate hosts and print out DNS name.
|
||||||
|
for host in root.RESPONSE.HOST_LIST.HOST:
|
||||||
|
print(host.IP.text, host.DNS.text)
|
||||||
|
# Prints out:
|
||||||
|
# 10.0.0.10 mydns1.qualys.com
|
||||||
|
# 10.0.0.11 mydns2.qualys.com
|
||||||
|
#
|
||||||
|
# API v3 WAS call: Print out number of webapps.
|
||||||
|
call = '/count/was/webapp'
|
||||||
|
# Note that this call does not have a payload so we don't send any data parameters.
|
||||||
|
xml_output = qgc.request(call)
|
||||||
|
root = objectify.fromstring(xml_output)
|
||||||
|
# Print out count of webapps.
|
||||||
|
print(root.count.text)
|
||||||
|
# Prints out:
|
||||||
|
# 89
|
||||||
|
#
|
||||||
|
# API v3 WAS call: Print out number of webapps containing title 'Supafly'.
|
||||||
|
call = '/count/was/webapp'
|
||||||
|
# We can send a string XML for the data.
|
||||||
|
parameters = '<ServiceRequest><filters><Criteria operator="CONTAINS" field="name">Supafly</Criteria></filters></ServiceRequest>'
|
||||||
|
xml_output = qgc.request(call, parameters)
|
||||||
|
root = objectify.fromstring(xml_output)
|
||||||
|
# Print out count of webapps.
|
||||||
|
print(root.count.text)
|
||||||
|
# Prints out:
|
||||||
|
# 3
|
||||||
|
#
|
||||||
|
# API v3 WAS call: Print out number of webapps containing title 'Lightsabertooth Tiger'.
|
||||||
|
call = '/count/was/webapp'
|
||||||
|
# We can also send an lxml.builder E object.
|
||||||
|
parameters = (
|
||||||
|
E.ServiceRequest(
|
||||||
|
E.filters(
|
||||||
|
E.Criteria('Lightsabertooth Tiger', field='name',operator='CONTAINS'))))
|
||||||
|
xml_output = qgc.request(call, parameters)
|
||||||
|
root = objectify.fromstring(xml_output)
|
||||||
|
# Print out count of webapps.
|
||||||
|
print(root.count.text)
|
||||||
|
# Prints out:
|
||||||
|
# 0
|
||||||
|
# Too bad, because that is an awesome webapp name!
|
||||||
|
#
|
||||||
|
# API v3 Asset Management call: Count tags.
|
||||||
|
call = '/count/am/tag'
|
||||||
|
xml_output = qgc.request(call)
|
||||||
|
root = objectify.fromstring(xml_output)
|
||||||
|
# We can use XPATH to find the count.
|
||||||
|
print(root.xpath('count')[0].text)
|
||||||
|
# Prints out:
|
||||||
|
# 840
|
||||||
|
#
|
||||||
|
# API v3 Asset Management call: Find asset by name.
|
||||||
|
call = '/search/am/tag'
|
||||||
|
parameters = '''<ServiceRequest>
|
||||||
|
<preferences>
|
||||||
|
<limitResults>10</limitResults>
|
||||||
|
</preferences>
|
||||||
|
<filters>
|
||||||
|
<Criteria field="name" operator="CONTAINS">PB</Criteria>
|
||||||
|
</filters>
|
||||||
|
</ServiceRequest>'''
|
||||||
|
xml_output = qgc.request(call, parameters)
|
42
deps/qualysapi/examples/qualysapi-simple-v1.py
vendored
Normal file
@ -0,0 +1,42 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
import sys
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import qualysapi
|
||||||
|
|
||||||
|
# Questions? See:
|
||||||
|
# https://bitbucket.org/uWaterloo_IST_ISS/python-qualysconnect
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
# Basic command line processing.
|
||||||
|
if len(sys.argv) != 2:
|
||||||
|
print('A single IPv4 address is expected as the only argument')
|
||||||
|
sys.exit(2)
|
||||||
|
|
||||||
|
# Set the MAXIMUM level of log messages displayed @ runtime.
|
||||||
|
logging.basicConfig(level=logging.INFO)
|
||||||
|
|
||||||
|
# Call helper that creates a connection w/ HTTP-Basic to QualysGuard API.
|
||||||
|
qgs=qualysapi.connect()
|
||||||
|
|
||||||
|
# Logging must be set after instanciation of connector class.
|
||||||
|
logger = logging.getLogger('qualysapi.connector')
|
||||||
|
logger.setLevel(logging.DEBUG)
|
||||||
|
|
||||||
|
# Log to sys.out.
|
||||||
|
logger_console = logging.StreamHandler()
|
||||||
|
logger_console.setLevel(logging.DEBUG)
|
||||||
|
formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s')
|
||||||
|
logging.getLogger(__name__).addHandler(logger)
|
||||||
|
|
||||||
|
# Formulate a request to the QualysGuard V1 API.
|
||||||
|
# docs @
|
||||||
|
# https://community.qualys.com/docs/DOC-1324
|
||||||
|
# http://www.qualys.com/docs/QualysGuard_API_User_Guide.pdf
|
||||||
|
#
|
||||||
|
# Old way still works:
|
||||||
|
# ret = qgs.request(1,'asset_search.php', "target_ips=%s&"%(sys.argv[1]))
|
||||||
|
# New way is cleaner:
|
||||||
|
ret = qgs.request(1,'asset_search.php', {'target_ips': sys.argv[1]})
|
||||||
|
|
||||||
|
print(ret)
|
37
deps/qualysapi/examples/qualysapi-simple-v2-report.py
vendored
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
import sys
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import qualysapi
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
# Basic command line processing.
|
||||||
|
if len(sys.argv) != 3:
|
||||||
|
print('A report template and scan reference respectively are expected as the only arguments.')
|
||||||
|
sys.exit(2)
|
||||||
|
|
||||||
|
# Set the MAXIMUM level of log messages displayed @ runtime.
|
||||||
|
logging.basicConfig(level=logging.DEBUG)
|
||||||
|
|
||||||
|
# Call helper that creates a connection w/ HTTP-Basic to QualysGuard v1 API
|
||||||
|
qgs=qualysapi.connect()
|
||||||
|
|
||||||
|
# Logging must be set after instanciation of connector class.
|
||||||
|
logger = logging.getLogger('qualysapi.connector')
|
||||||
|
logger.setLevel(logging.DEBUG)
|
||||||
|
|
||||||
|
# Log to sys.out.
|
||||||
|
logger_console = logging.StreamHandler()
|
||||||
|
logger_console.setLevel(logging.DEBUG)
|
||||||
|
formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s')
|
||||||
|
logging.getLogger(__name__).addHandler(logger)
|
||||||
|
|
||||||
|
|
||||||
|
# Formulate a request to the QualysGuard V1 API
|
||||||
|
# docs @
|
||||||
|
# https://community.qualys.com/docs/DOC-1324
|
||||||
|
# http://www.qualys.com/docs/QualysGuard_API_User_Guide.pdf
|
||||||
|
#
|
||||||
|
ret = qgs.request('/api/2.0/fo/report',{'action': 'launch', 'report_refs': sys.argv[2], 'output_format': 'xml', 'template_id': sys.argv[1], 'report_type': 'Scan'})
|
||||||
|
|
||||||
|
print(ret)
|
43
deps/qualysapi/examples/qualysapi-simple-v2.py
vendored
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
import sys
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import qualysapi
|
||||||
|
|
||||||
|
# Questions? See:
|
||||||
|
# https://bitbucket.org/uWaterloo_IST_ISS/python-qualysconnect
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
# Basic command line processing.
|
||||||
|
if len(sys.argv) != 2:
|
||||||
|
print('A single IPv4 address is expected as the only argument.')
|
||||||
|
sys.exit(2)
|
||||||
|
|
||||||
|
# Set the MAXIMUM level of log messages displayed @ runtime.
|
||||||
|
logging.basicConfig(level=logging.INFO)
|
||||||
|
|
||||||
|
# Call helper that creates a connection w/ HTTP-Basic to QualysGuard v1 API
|
||||||
|
qgs=qualysapi.connect()
|
||||||
|
|
||||||
|
# Logging must be set after instanciation of connector class.
|
||||||
|
logger = logging.getLogger('qualysapi.connector')
|
||||||
|
logger.setLevel(logging.DEBUG)
|
||||||
|
|
||||||
|
# Log to sys.out.
|
||||||
|
logger_console = logging.StreamHandler()
|
||||||
|
logger_console.setLevel(logging.DEBUG)
|
||||||
|
formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s')
|
||||||
|
logging.getLogger(__name__).addHandler(logger)
|
||||||
|
|
||||||
|
|
||||||
|
# Formulate a request to the QualysGuard V1 API
|
||||||
|
# docs @
|
||||||
|
# https://community.qualys.com/docs/DOC-1324
|
||||||
|
# http://www.qualys.com/docs/QualysGuard_API_User_Guide.pdf
|
||||||
|
#
|
||||||
|
# Old way still works:
|
||||||
|
# ret = qgs.request(2, "asset/host","?action=list&ips=%s&"%(sys.argv[1]))
|
||||||
|
# New way is cleaner:
|
||||||
|
ret = qgs.request('/api/2.0/fo/asset/host',{'action': 'list', 'ips': sys.argv[1]})
|
||||||
|
|
||||||
|
print(ret)
|
201
deps/qualysapi/license
vendored
Normal file
@ -0,0 +1,201 @@
|
|||||||
|
Apache License
|
||||||
|
Version 2.0, January 2004
|
||||||
|
http://www.apache.org/licenses/
|
||||||
|
|
||||||
|
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||||
|
|
||||||
|
1. Definitions.
|
||||||
|
|
||||||
|
"License" shall mean the terms and conditions for use, reproduction,
|
||||||
|
and distribution as defined by Sections 1 through 9 of this document.
|
||||||
|
|
||||||
|
"Licensor" shall mean the copyright owner or entity authorized by
|
||||||
|
the copyright owner that is granting the License.
|
||||||
|
|
||||||
|
"Legal Entity" shall mean the union of the acting entity and all
|
||||||
|
other entities that control, are controlled by, or are under common
|
||||||
|
control with that entity. For the purposes of this definition,
|
||||||
|
"control" means (i) the power, direct or indirect, to cause the
|
||||||
|
direction or management of such entity, whether by contract or
|
||||||
|
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||||
|
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||||
|
|
||||||
|
"You" (or "Your") shall mean an individual or Legal Entity
|
||||||
|
exercising permissions granted by this License.
|
||||||
|
|
||||||
|
"Source" form shall mean the preferred form for making modifications,
|
||||||
|
including but not limited to software source code, documentation
|
||||||
|
source, and configuration files.
|
||||||
|
|
||||||
|
"Object" form shall mean any form resulting from mechanical
|
||||||
|
transformation or translation of a Source form, including but
|
||||||
|
not limited to compiled object code, generated documentation,
|
||||||
|
and conversions to other media types.
|
||||||
|
|
||||||
|
"Work" shall mean the work of authorship, whether in Source or
|
||||||
|
Object form, made available under the License, as indicated by a
|
||||||
|
copyright notice that is included in or attached to the work
|
||||||
|
(an example is provided in the Appendix below).
|
||||||
|
|
||||||
|
"Derivative Works" shall mean any work, whether in Source or Object
|
||||||
|
form, that is based on (or derived from) the Work and for which the
|
||||||
|
editorial revisions, annotations, elaborations, or other modifications
|
||||||
|
represent, as a whole, an original work of authorship. For the purposes
|
||||||
|
of this License, Derivative Works shall not include works that remain
|
||||||
|
separable from, or merely link (or bind by name) to the interfaces of,
|
||||||
|
the Work and Derivative Works thereof.
|
||||||
|
|
||||||
|
"Contribution" shall mean any work of authorship, including
|
||||||
|
the original version of the Work and any modifications or additions
|
||||||
|
to that Work or Derivative Works thereof, that is intentionally
|
||||||
|
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||||
|
or by an individual or Legal Entity authorized to submit on behalf of
|
||||||
|
the copyright owner. For the purposes of this definition, "submitted"
|
||||||
|
means any form of electronic, verbal, or written communication sent
|
||||||
|
to the Licensor or its representatives, including but not limited to
|
||||||
|
communication on electronic mailing lists, source code control systems,
|
||||||
|
and issue tracking systems that are managed by, or on behalf of, the
|
||||||
|
Licensor for the purpose of discussing and improving the Work, but
|
||||||
|
excluding communication that is conspicuously marked or otherwise
|
||||||
|
designated in writing by the copyright owner as "Not a Contribution."
|
||||||
|
|
||||||
|
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||||
|
on behalf of whom a Contribution has been received by Licensor and
|
||||||
|
subsequently incorporated within the Work.
|
||||||
|
|
||||||
|
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
copyright license to reproduce, prepare Derivative Works of,
|
||||||
|
publicly display, publicly perform, sublicense, and distribute the
|
||||||
|
Work and such Derivative Works in Source or Object form.
|
||||||
|
|
||||||
|
3. Grant of Patent License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
(except as stated in this section) patent license to make, have made,
|
||||||
|
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||||
|
where such license applies only to those patent claims licensable
|
||||||
|
by such Contributor that are necessarily infringed by their
|
||||||
|
Contribution(s) alone or by combination of their Contribution(s)
|
||||||
|
with the Work to which such Contribution(s) was submitted. If You
|
||||||
|
institute patent litigation against any entity (including a
|
||||||
|
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||||
|
or a Contribution incorporated within the Work constitutes direct
|
||||||
|
or contributory patent infringement, then any patent licenses
|
||||||
|
granted to You under this License for that Work shall terminate
|
||||||
|
as of the date such litigation is filed.
|
||||||
|
|
||||||
|
4. Redistribution. You may reproduce and distribute copies of the
|
||||||
|
Work or Derivative Works thereof in any medium, with or without
|
||||||
|
modifications, and in Source or Object form, provided that You
|
||||||
|
meet the following conditions:
|
||||||
|
|
||||||
|
(a) You must give any other recipients of the Work or
|
||||||
|
Derivative Works a copy of this License; and
|
||||||
|
|
||||||
|
(b) You must cause any modified files to carry prominent notices
|
||||||
|
stating that You changed the files; and
|
||||||
|
|
||||||
|
(c) You must retain, in the Source form of any Derivative Works
|
||||||
|
that You distribute, all copyright, patent, trademark, and
|
||||||
|
attribution notices from the Source form of the Work,
|
||||||
|
excluding those notices that do not pertain to any part of
|
||||||
|
the Derivative Works; and
|
||||||
|
|
||||||
|
(d) If the Work includes a "NOTICE" text file as part of its
|
||||||
|
distribution, then any Derivative Works that You distribute must
|
||||||
|
include a readable copy of the attribution notices contained
|
||||||
|
within such NOTICE file, excluding those notices that do not
|
||||||
|
pertain to any part of the Derivative Works, in at least one
|
||||||
|
of the following places: within a NOTICE text file distributed
|
||||||
|
as part of the Derivative Works; within the Source form or
|
||||||
|
documentation, if provided along with the Derivative Works; or,
|
||||||
|
within a display generated by the Derivative Works, if and
|
||||||
|
wherever such third-party notices normally appear. The contents
|
||||||
|
of the NOTICE file are for informational purposes only and
|
||||||
|
do not modify the License. You may add Your own attribution
|
||||||
|
notices within Derivative Works that You distribute, alongside
|
||||||
|
or as an addendum to the NOTICE text from the Work, provided
|
||||||
|
that such additional attribution notices cannot be construed
|
||||||
|
as modifying the License.
|
||||||
|
|
||||||
|
You may add Your own copyright statement to Your modifications and
|
||||||
|
may provide additional or different license terms and conditions
|
||||||
|
for use, reproduction, or distribution of Your modifications, or
|
||||||
|
for any such Derivative Works as a whole, provided Your use,
|
||||||
|
reproduction, and distribution of the Work otherwise complies with
|
||||||
|
the conditions stated in this License.
|
||||||
|
|
||||||
|
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||||
|
any Contribution intentionally submitted for inclusion in the Work
|
||||||
|
by You to the Licensor shall be under the terms and conditions of
|
||||||
|
this License, without any additional terms or conditions.
|
||||||
|
Notwithstanding the above, nothing herein shall supersede or modify
|
||||||
|
the terms of any separate license agreement you may have executed
|
||||||
|
with Licensor regarding such Contributions.
|
||||||
|
|
||||||
|
6. Trademarks. This License does not grant permission to use the trade
|
||||||
|
names, trademarks, service marks, or product names of the Licensor,
|
||||||
|
except as required for reasonable and customary use in describing the
|
||||||
|
origin of the Work and reproducing the content of the NOTICE file.
|
||||||
|
|
||||||
|
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||||
|
agreed to in writing, Licensor provides the Work (and each
|
||||||
|
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
implied, including, without limitation, any warranties or conditions
|
||||||
|
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||||
|
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||||
|
appropriateness of using or redistributing the Work and assume any
|
||||||
|
risks associated with Your exercise of permissions under this License.
|
||||||
|
|
||||||
|
8. Limitation of Liability. In no event and under no legal theory,
|
||||||
|
whether in tort (including negligence), contract, or otherwise,
|
||||||
|
unless required by applicable law (such as deliberate and grossly
|
||||||
|
negligent acts) or agreed to in writing, shall any Contributor be
|
||||||
|
liable to You for damages, including any direct, indirect, special,
|
||||||
|
incidental, or consequential damages of any character arising as a
|
||||||
|
result of this License or out of the use or inability to use the
|
||||||
|
Work (including but not limited to damages for loss of goodwill,
|
||||||
|
work stoppage, computer failure or malfunction, or any and all
|
||||||
|
other commercial damages or losses), even if such Contributor
|
||||||
|
has been advised of the possibility of such damages.
|
||||||
|
|
||||||
|
9. Accepting Warranty or Additional Liability. While redistributing
|
||||||
|
the Work or Derivative Works thereof, You may choose to offer,
|
||||||
|
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||||
|
or other liability obligations and/or rights consistent with this
|
||||||
|
License. However, in accepting such obligations, You may act only
|
||||||
|
on Your own behalf and on Your sole responsibility, not on behalf
|
||||||
|
of any other Contributor, and only if You agree to indemnify,
|
||||||
|
defend, and hold each Contributor harmless for any liability
|
||||||
|
incurred by, or claims asserted against, such Contributor by reason
|
||||||
|
of your accepting any such warranty or additional liability.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
APPENDIX: How to apply the Apache License to your work.
|
||||||
|
|
||||||
|
To apply the Apache License to your work, attach the following
|
||||||
|
boilerplate notice, with the fields enclosed by brackets "{}"
|
||||||
|
replaced with your own identifying information. (Don't include
|
||||||
|
the brackets!) The text should be enclosed in the appropriate
|
||||||
|
comment syntax for the file format. We also recommend that a
|
||||||
|
file or class name and description of purpose be included on the
|
||||||
|
same "printed page" as the copyright notice for easier
|
||||||
|
identification within third-party archives.
|
||||||
|
|
||||||
|
Copyright 2017 Parag Baxi
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
10
deps/qualysapi/qualysapi/__init__.py
vendored
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
# This is the version string assigned to the entire egg post
|
||||||
|
# setup.py install
|
||||||
|
|
||||||
|
# Ownership and Copyright Information.
|
||||||
|
from __future__ import absolute_import
|
||||||
|
__author__ = "Parag Baxi <parag.baxi@gmail.com>"
|
||||||
|
__copyright__ = "Copyright 2011-2013, Parag Baxi"
|
||||||
|
__license__ = "BSD-new"
|
||||||
|
|
||||||
|
from qualysapi.util import connect
|
181
deps/qualysapi/qualysapi/api_actions.py
vendored
Normal file
@ -0,0 +1,181 @@
|
|||||||
|
from __future__ import absolute_import
|
||||||
|
from lxml import objectify
|
||||||
|
import qualysapi.api_objects
|
||||||
|
from qualysapi.api_objects import *
|
||||||
|
|
||||||
|
|
||||||
|
class QGActions(object):
|
||||||
|
def getHost(host):
|
||||||
|
call = '/api/2.0/fo/asset/host/'
|
||||||
|
parameters = {'action': 'list', 'ips': host, 'details': 'All'}
|
||||||
|
hostData = objectify.fromstring(self.request(call, parameters)).RESPONSE
|
||||||
|
try:
|
||||||
|
hostData = hostData.HOST_LIST.HOST
|
||||||
|
return Host(hostData.DNS, hostData.ID, hostData.IP, hostData.LAST_VULN_SCAN_DATETIME, hostData.NETBIOS, hostData.OS, hostData.TRACKING_METHOD)
|
||||||
|
except AttributeError:
|
||||||
|
return Host("", "", host, "never", "", "", "")
|
||||||
|
|
||||||
|
def getHostRange(self, start, end):
|
||||||
|
call = '/api/2.0/fo/asset/host/'
|
||||||
|
parameters = {'action': 'list', 'ips': start + '-' + end}
|
||||||
|
hostData = objectify.fromstring(self.request(call, parameters))
|
||||||
|
hostArray = []
|
||||||
|
for host in hostData.RESPONSE.HOST_LIST.HOST:
|
||||||
|
hostArray.append(Host(host.DNS, host.ID, host.IP, host.LAST_VULN_SCAN_DATETIME, host.NETBIOS, host.OS, host.TRACKING_METHOD))
|
||||||
|
|
||||||
|
return hostArray
|
||||||
|
|
||||||
|
def listAssetGroups(self, groupName=''):
|
||||||
|
call = 'asset_group_list.php'
|
||||||
|
if groupName == '':
|
||||||
|
agData = objectify.fromstring(self.request(call))
|
||||||
|
else:
|
||||||
|
agData = objectify.fromstring(self.request(call, 'title=' + groupName)).RESPONSE
|
||||||
|
|
||||||
|
groupsArray = []
|
||||||
|
scanipsArray = []
|
||||||
|
scandnsArray = []
|
||||||
|
scannersArray = []
|
||||||
|
for group in agData.ASSET_GROUP:
|
||||||
|
try:
|
||||||
|
for scanip in group.SCANIPS:
|
||||||
|
scanipsArray.append(scanip.IP)
|
||||||
|
except AttributeError:
|
||||||
|
scanipsArray = [] # No IPs defined to scan.
|
||||||
|
|
||||||
|
try:
|
||||||
|
for scanner in group.SCANNER_APPLIANCES.SCANNER_APPLIANCE:
|
||||||
|
scannersArray.append(scanner.SCANNER_APPLIANCE_NAME)
|
||||||
|
except AttributeError:
|
||||||
|
scannersArray = [] # No scanner appliances defined for this group.
|
||||||
|
|
||||||
|
try:
|
||||||
|
for dnsName in group.SCANDNS:
|
||||||
|
scandnsArray.append(dnsName.DNS)
|
||||||
|
except AttributeError:
|
||||||
|
scandnsArray = [] # No DNS names assigned to group.
|
||||||
|
|
||||||
|
groupsArray.append(AssetGroup(group.BUSINESS_IMPACT, group.ID, group.LAST_UPDATE, scanipsArray, scandnsArray, scannersArray, group.TITLE))
|
||||||
|
|
||||||
|
return groupsArray
|
||||||
|
|
||||||
|
def listReportTemplates(self):
|
||||||
|
call = 'report_template_list.php'
|
||||||
|
rtData = objectify.fromstring(self.request(call))
|
||||||
|
templatesArray = []
|
||||||
|
|
||||||
|
for template in rtData.REPORT_TEMPLATE:
|
||||||
|
templatesArray.append(ReportTemplate(template.GLOBAL, template.ID, template.LAST_UPDATE, template.TEMPLATE_TYPE, template.TITLE, template.TYPE, template.USER))
|
||||||
|
|
||||||
|
return templatesArray
|
||||||
|
|
||||||
|
def listReports(self, id=0):
|
||||||
|
call = '/api/2.0/fo/report'
|
||||||
|
|
||||||
|
if id == 0:
|
||||||
|
parameters = {'action': 'list'}
|
||||||
|
|
||||||
|
repData = objectify.fromstring(self.request(call, parameters)).RESPONSE
|
||||||
|
reportsArray = []
|
||||||
|
|
||||||
|
for report in repData.REPORT_LIST.REPORT:
|
||||||
|
reportsArray.append(Report(report.EXPIRATION_DATETIME, report.ID, report.LAUNCH_DATETIME, report.OUTPUT_FORMAT, report.SIZE, report.STATUS, report.TYPE, report.USER_LOGIN))
|
||||||
|
|
||||||
|
return reportsArray
|
||||||
|
|
||||||
|
else:
|
||||||
|
parameters = {'action': 'list', 'id': id}
|
||||||
|
repData = objectify.fromstring(self.request(call, parameters)).RESPONSE.REPORT_LIST.REPORT
|
||||||
|
return Report(repData.EXPIRATION_DATETIME, repData.ID, repData.LAUNCH_DATETIME, repData.OUTPUT_FORMAT, repData.SIZE, repData.STATUS, repData.TYPE, repData.USER_LOGIN)
|
||||||
|
|
||||||
|
def notScannedSince(self, days):
|
||||||
|
call = '/api/2.0/fo/asset/host/'
|
||||||
|
parameters = {'action': 'list', 'details': 'All'}
|
||||||
|
hostData = objectify.fromstring(self.request(call, parameters))
|
||||||
|
hostArray = []
|
||||||
|
today = datetime.date.today()
|
||||||
|
for host in hostData.RESPONSE.HOST_LIST.HOST:
|
||||||
|
last_scan = str(host.LAST_VULN_SCAN_DATETIME).split('T')[0]
|
||||||
|
last_scan = datetime.date(int(last_scan.split('-')[0]), int(last_scan.split('-')[1]), int(last_scan.split('-')[2]))
|
||||||
|
if (today - last_scan).days >= days:
|
||||||
|
hostArray.append(Host(host.DNS, host.ID, host.IP, host.LAST_VULN_SCAN_DATETIME, host.NETBIOS, host.OS, host.TRACKING_METHOD))
|
||||||
|
|
||||||
|
return hostArray
|
||||||
|
|
||||||
|
def addIP(self, ips, vmpc):
|
||||||
|
# 'ips' parameter accepts comma-separated list of IP addresses.
|
||||||
|
# 'vmpc' parameter accepts 'vm', 'pc', or 'both'. (Vulnerability Managment, Policy Compliance, or both)
|
||||||
|
call = '/api/2.0/fo/asset/ip/'
|
||||||
|
enablevm = 1
|
||||||
|
enablepc = 0
|
||||||
|
if vmpc == 'pc':
|
||||||
|
enablevm = 0
|
||||||
|
enablepc = 1
|
||||||
|
elif vmpc == 'both':
|
||||||
|
enablevm = 1
|
||||||
|
enablepc = 1
|
||||||
|
|
||||||
|
parameters = {'action': 'add', 'ips': ips, 'enable_vm': enablevm, 'enable_pc': enablepc}
|
||||||
|
self.request(call, parameters)
|
||||||
|
|
||||||
|
def listScans(self, launched_after="", state="", target="", type="", user_login=""):
|
||||||
|
# 'launched_after' parameter accepts a date in the format: YYYY-MM-DD
|
||||||
|
# 'state' parameter accepts "Running", "Paused", "Canceled", "Finished", "Error", "Queued", and "Loading".
|
||||||
|
# 'title' parameter accepts a string
|
||||||
|
# 'type' parameter accepts "On-Demand", and "Scheduled".
|
||||||
|
# 'user_login' parameter accepts a user name (string)
|
||||||
|
call = '/api/2.0/fo/scan/'
|
||||||
|
parameters = {'action': 'list', 'show_ags': 1, 'show_op': 1, 'show_status': 1}
|
||||||
|
if launched_after != "":
|
||||||
|
parameters['launched_after_datetime'] = launched_after
|
||||||
|
|
||||||
|
if state != "":
|
||||||
|
parameters['state'] = state
|
||||||
|
|
||||||
|
if target != "":
|
||||||
|
parameters['target'] = target
|
||||||
|
|
||||||
|
if type != "":
|
||||||
|
parameters['type'] = type
|
||||||
|
|
||||||
|
if user_login != "":
|
||||||
|
parameters['user_login'] = user_login
|
||||||
|
|
||||||
|
scanlist = objectify.fromstring(self.request(call, parameters))
|
||||||
|
scanArray = []
|
||||||
|
for scan in scanlist.RESPONSE.SCAN_LIST.SCAN:
|
||||||
|
try:
|
||||||
|
agList = []
|
||||||
|
for ag in scan.ASSET_GROUP_TITLE_LIST.ASSET_GROUP_TITLE:
|
||||||
|
agList.append(ag)
|
||||||
|
except AttributeError:
|
||||||
|
agList = []
|
||||||
|
|
||||||
|
scanArray.append(Scan(agList, scan.DURATION, scan.LAUNCH_DATETIME, scan.OPTION_PROFILE.TITLE, scan.PROCESSED, scan.REF, scan.STATUS, scan.TARGET, scan.TITLE, scan.TYPE, scan.USER_LOGIN))
|
||||||
|
|
||||||
|
return scanArray
|
||||||
|
|
||||||
|
def launchScan(self, title, option_title, iscanner_name, asset_groups="", ip=""):
|
||||||
|
# TODO: Add ability to scan by tag.
|
||||||
|
call = '/api/2.0/fo/scan/'
|
||||||
|
parameters = {'action': 'launch', 'scan_title': title, 'option_title': option_title, 'iscanner_name': iscanner_name, 'ip': ip, 'asset_groups': asset_groups}
|
||||||
|
if ip == "":
|
||||||
|
parameters.pop("ip")
|
||||||
|
|
||||||
|
if asset_groups == "":
|
||||||
|
parameters.pop("asset_groups")
|
||||||
|
|
||||||
|
scan_ref = objectify.fromstring(self.request(call, parameters)).RESPONSE.ITEM_LIST.ITEM[1].VALUE
|
||||||
|
|
||||||
|
call = '/api/2.0/fo/scan/'
|
||||||
|
parameters = {'action': 'list', 'scan_ref': scan_ref, 'show_status': 1, 'show_ags': 1, 'show_op': 1}
|
||||||
|
|
||||||
|
scan = objectify.fromstring(self.request(call, parameters)).RESPONSE.SCAN_LIST.SCAN
|
||||||
|
try:
|
||||||
|
agList = []
|
||||||
|
for ag in scan.ASSET_GROUP_TITLE_LIST.ASSET_GROUP_TITLE:
|
||||||
|
agList.append(ag)
|
||||||
|
except AttributeError:
|
||||||
|
agList = []
|
||||||
|
|
||||||
|
return Scan(agList, scan.DURATION, scan.LAUNCH_DATETIME, scan.OPTION_PROFILE.TITLE, scan.PROCESSED, scan.REF, scan.STATUS, scan.TARGET, scan.TITLE, scan.TYPE, scan.USER_LOGIN)
|
155
deps/qualysapi/qualysapi/api_methods.py
vendored
Normal file
@ -0,0 +1,155 @@
|
|||||||
|
from __future__ import absolute_import
|
||||||
|
__author__ = 'pbaxi'
|
||||||
|
|
||||||
|
from collections import defaultdict
|
||||||
|
|
||||||
|
api_methods = defaultdict(set)
|
||||||
|
api_methods['1'] = set([
|
||||||
|
'about.php',
|
||||||
|
'action_log_report.php',
|
||||||
|
'asset_data_report.php',
|
||||||
|
'asset_domain.php',
|
||||||
|
'asset_domain_list.php',
|
||||||
|
'asset_group_delete.php',
|
||||||
|
'asset_group_list.php',
|
||||||
|
'asset_ip_list.php',
|
||||||
|
'asset_range_info.php',
|
||||||
|
'asset_search.php',
|
||||||
|
'get_host_info.php',
|
||||||
|
'ignore_vuln.php',
|
||||||
|
'iscanner_list.php',
|
||||||
|
'knowledgebase_download.php',
|
||||||
|
'map-2.php',
|
||||||
|
'map.php',
|
||||||
|
'map_report.php',
|
||||||
|
'map_report_list.php',
|
||||||
|
'password_change.php',
|
||||||
|
'scan.php',
|
||||||
|
'scan_cancel.php',
|
||||||
|
'scan_options.php',
|
||||||
|
'scan_report.php',
|
||||||
|
'scan_report_delete.php',
|
||||||
|
'scan_report_list.php',
|
||||||
|
'scan_running_list.php',
|
||||||
|
'scan_target_history.php',
|
||||||
|
'scheduled_scans.php',
|
||||||
|
'ticket_delete.php',
|
||||||
|
'ticket_edit.php',
|
||||||
|
'ticket_list.php',
|
||||||
|
'ticket_list_deleted.php',
|
||||||
|
'time_zone_code.php',
|
||||||
|
'user.php',
|
||||||
|
'user_list.php',
|
||||||
|
])
|
||||||
|
# API v1 POST methods.
|
||||||
|
api_methods['1 post'] = set([
|
||||||
|
'action_log_report.php',
|
||||||
|
'asset_group.php',
|
||||||
|
'asset_ip.php',
|
||||||
|
'ignore_vuln.php',
|
||||||
|
'knowledgebase_download.php',
|
||||||
|
'map-2.php',
|
||||||
|
'map.php',
|
||||||
|
'password_change.php',
|
||||||
|
'scan.php',
|
||||||
|
'scan_report.php',
|
||||||
|
'scan_target_history.php',
|
||||||
|
'scheduled_scans.php',
|
||||||
|
'ticket_delete.php',
|
||||||
|
'ticket_edit.php',
|
||||||
|
'ticket_list.php',
|
||||||
|
'ticket_list_deleted.php',
|
||||||
|
'user.php',
|
||||||
|
'user_list.php',
|
||||||
|
])
|
||||||
|
# API v2 methods (they're all POST).
|
||||||
|
api_methods['2'] = set([
|
||||||
|
'api/2.0/fo/appliance/',
|
||||||
|
'api/2.0/fo/asset/excluded_ip/',
|
||||||
|
'api/2.0/fo/asset/excluded_ip/history/',
|
||||||
|
'api/2.0/fo/asset/host/',
|
||||||
|
'api/2.0/fo/asset/host/cyberscope/',
|
||||||
|
'api/2.0/fo/asset/host/cyberscope/fdcc/policy/',
|
||||||
|
'api/2.0/fo/asset/host/cyberscope/fdcc/scan/',
|
||||||
|
'api/2.0/fo/asset/host/vm/detection/',
|
||||||
|
'api/2.0/fo/asset/ip/',
|
||||||
|
'api/2.0/fo/asset/ip/v4_v6/',
|
||||||
|
'api/2.0/fo/asset/vhost/',
|
||||||
|
'api/2.0/fo/auth/',
|
||||||
|
# 'api/2.0/fo/auth/{type}/', # Added below.
|
||||||
|
'api/2.0/fo/compliance/',
|
||||||
|
'api/2.0/fo/compliance/control',
|
||||||
|
'api/2.0/fo/compliance/fdcc/policy',
|
||||||
|
'api/2.0/fo/compliance/policy/',
|
||||||
|
'api/2.0/fo/compliance/posture/info/',
|
||||||
|
'api/2.0/fo/compliance/scap/arf/',
|
||||||
|
'api/2.0/fo/knowledge_base/vuln/',
|
||||||
|
'api/2.0/fo/report/',
|
||||||
|
'api/2.0/fo/report/scorecard/',
|
||||||
|
'api/2.0/fo/scan/',
|
||||||
|
'api/2.0/fo/scan/compliance/',
|
||||||
|
'api/2.0/fo/session/',
|
||||||
|
'api/2.0/fo/setup/restricted_ips/',
|
||||||
|
])
|
||||||
|
for auth_type in set([
|
||||||
|
'ibm_db2',
|
||||||
|
'ms_sql',
|
||||||
|
'oracle',
|
||||||
|
'oracle_listener',
|
||||||
|
'snmp',
|
||||||
|
'unix',
|
||||||
|
'windows',
|
||||||
|
]):
|
||||||
|
api_methods['2'].add('api/2.0/fo/auth/%s/' % auth_type)
|
||||||
|
# WAS GET methods when no POST data.
|
||||||
|
api_methods['was no data get'] = set([
|
||||||
|
'count/was/report',
|
||||||
|
'count/was/wasscan',
|
||||||
|
'count/was/wasscanschedule',
|
||||||
|
'count/was/webapp',
|
||||||
|
'download/was/report/',
|
||||||
|
'download/was/wasscan/',
|
||||||
|
])
|
||||||
|
# WAS GET methods.
|
||||||
|
api_methods['was get'] = set([
|
||||||
|
'download/was/report/',
|
||||||
|
'download/was/wasscan/',
|
||||||
|
'get/was/report/',
|
||||||
|
'get/was/wasscan/',
|
||||||
|
'get/was/wasscanschedule/',
|
||||||
|
'get/was/webapp/',
|
||||||
|
'status/was/report/',
|
||||||
|
'status/was/wasscan/',
|
||||||
|
])
|
||||||
|
# Asset Management GET methods.
|
||||||
|
api_methods['am get'] = set([
|
||||||
|
'count/am/asset',
|
||||||
|
'count/am/hostasset',
|
||||||
|
'count/am/tag',
|
||||||
|
'get/am/asset/',
|
||||||
|
'get/am/hostasset/',
|
||||||
|
'get/am/tag/',
|
||||||
|
])
|
||||||
|
# Asset Management v2 GET methods.
|
||||||
|
api_methods['am2 get'] = set([
|
||||||
|
'get/am/asset/',
|
||||||
|
'get/am/hostasset/',
|
||||||
|
'get/am/tag/',
|
||||||
|
'get/am/hostinstancevuln/',
|
||||||
|
'get/am/assetdataconnector/',
|
||||||
|
'get/am/awsassetdataconnector/',
|
||||||
|
'get/am/awsauthrecord/',
|
||||||
|
])
|
||||||
|
# Keep track of methods with ending slashes to autocorrect user when they forgot slash.
|
||||||
|
api_methods_with_trailing_slash = defaultdict(set)
|
||||||
|
for method_group in set(['1', '2', 'was', 'am', 'am2']):
|
||||||
|
for method in api_methods[method_group]:
|
||||||
|
if method[-1] == '/':
|
||||||
|
# Add applicable method with api_version preceding it.
|
||||||
|
# Example:
|
||||||
|
# WAS API has 'get/was/webapp/'.
|
||||||
|
# method_group = 'was get'
|
||||||
|
# method_group.split()[0] = 'was'
|
||||||
|
# Take off slash to match user provided method.
|
||||||
|
# api_methods_with_trailing_slash['was'] contains 'get/was/webapp'
|
||||||
|
api_methods_with_trailing_slash[method_group.split()[0]].add(method[:-1])
|
120
deps/qualysapi/qualysapi/api_objects.py
vendored
Normal file
@ -0,0 +1,120 @@
|
|||||||
|
from __future__ import absolute_import
|
||||||
|
import datetime
|
||||||
|
from lxml import objectify
|
||||||
|
|
||||||
|
|
||||||
|
class Host(object):
|
||||||
|
def __init__(self, dns, id, ip, last_scan, netbios, os, tracking_method):
|
||||||
|
self.dns = str(dns)
|
||||||
|
self.id = int(id)
|
||||||
|
self.ip = str(ip)
|
||||||
|
last_scan = str(last_scan).replace('T', ' ').replace('Z', '').split(' ')
|
||||||
|
date = last_scan[0].split('-')
|
||||||
|
time = last_scan[1].split(':')
|
||||||
|
self.last_scan = datetime.datetime(int(date[0]), int(date[1]), int(date[2]), int(time[0]), int(time[1]), int(time[2]))
|
||||||
|
self.netbios = str(netbios)
|
||||||
|
self.os = str(os)
|
||||||
|
self.tracking_method = str(tracking_method)
|
||||||
|
|
||||||
|
|
||||||
|
class AssetGroup(object):
|
||||||
|
def __init__(self, business_impact, id, last_update, scanips, scandns, scanner_appliances, title):
|
||||||
|
self.business_impact = str(business_impact)
|
||||||
|
self.id = int(id)
|
||||||
|
self.last_update = str(last_update)
|
||||||
|
self.scanips = scanips
|
||||||
|
self.scandns = scandns
|
||||||
|
self.scanner_appliances = scanner_appliances
|
||||||
|
self.title = str(title)
|
||||||
|
|
||||||
|
def addAsset(conn, ip):
|
||||||
|
call = '/api/2.0/fo/asset/group/'
|
||||||
|
parameters = {'action': 'edit', 'id': self.id, 'add_ips': ip}
|
||||||
|
conn.request(call, parameters)
|
||||||
|
self.scanips.append(ip)
|
||||||
|
|
||||||
|
def setAssets(conn, ips):
|
||||||
|
call = '/api/2.0/fo/asset/group/'
|
||||||
|
parameters = {'action': 'edit', 'id': self.id, 'set_ips': ips}
|
||||||
|
conn.request(call, parameters)
|
||||||
|
|
||||||
|
|
||||||
|
class ReportTemplate(object):
|
||||||
|
def __init__(self, isGlobal, id, last_update, template_type, title, type, user):
|
||||||
|
self.isGlobal = int(isGlobal)
|
||||||
|
self.id = int(id)
|
||||||
|
self.last_update = str(last_update).replace('T', ' ').replace('Z', '').split(' ')
|
||||||
|
self.template_type = template_type
|
||||||
|
self.title = title
|
||||||
|
self.type = type
|
||||||
|
self.user = user.LOGIN
|
||||||
|
|
||||||
|
|
||||||
|
class Report(object):
|
||||||
|
def __init__(self, expiration_datetime, id, launch_datetime, output_format, size, status, type, user_login):
|
||||||
|
self.expiration_datetime = str(expiration_datetime).replace('T', ' ').replace('Z', '').split(' ')
|
||||||
|
self.id = int(id)
|
||||||
|
self.launch_datetime = str(launch_datetime).replace('T', ' ').replace('Z', '').split(' ')
|
||||||
|
self.output_format = output_format
|
||||||
|
self.size = size
|
||||||
|
self.status = status.STATE
|
||||||
|
self.type = type
|
||||||
|
self.user_login = user_login
|
||||||
|
|
||||||
|
def download(self, conn):
|
||||||
|
call = '/api/2.0/fo/report'
|
||||||
|
parameters = {'action': 'fetch', 'id': self.id}
|
||||||
|
if self.status == 'Finished':
|
||||||
|
return conn.request(call, parameters)
|
||||||
|
|
||||||
|
|
||||||
|
class Scan(object):
|
||||||
|
def __init__(self, assetgroups, duration, launch_datetime, option_profile, processed, ref, status, target, title, type, user_login):
|
||||||
|
self.assetgroups = assetgroups
|
||||||
|
self.duration = str(duration)
|
||||||
|
launch_datetime = str(launch_datetime).replace('T', ' ').replace('Z', '').split(' ')
|
||||||
|
date = launch_datetime[0].split('-')
|
||||||
|
time = launch_datetime[1].split(':')
|
||||||
|
self.launch_datetime = datetime.datetime(int(date[0]), int(date[1]), int(date[2]), int(time[0]), int(time[1]), int(time[2]))
|
||||||
|
self.option_profile = str(option_profile)
|
||||||
|
self.processed = int(processed)
|
||||||
|
self.ref = str(ref)
|
||||||
|
self.status = str(status.STATE)
|
||||||
|
self.target = str(target).split(', ')
|
||||||
|
self.title = str(title)
|
||||||
|
self.type = str(type)
|
||||||
|
self.user_login = str(user_login)
|
||||||
|
|
||||||
|
def cancel(self, conn):
|
||||||
|
cancelled_statuses = ['Cancelled', 'Finished', 'Error']
|
||||||
|
if any(self.status in s for s in cancelled_statuses):
|
||||||
|
raise ValueError("Scan cannot be cancelled because its status is " + self.status)
|
||||||
|
else:
|
||||||
|
call = '/api/2.0/fo/scan/'
|
||||||
|
parameters = {'action': 'cancel', 'scan_ref': self.ref}
|
||||||
|
conn.request(call, parameters)
|
||||||
|
|
||||||
|
parameters = {'action': 'list', 'scan_ref': self.ref, 'show_status': 1}
|
||||||
|
self.status = objectify.fromstring(conn.request(call, parameters)).RESPONSE.SCAN_LIST.SCAN.STATUS.STATE
|
||||||
|
|
||||||
|
def pause(self, conn):
|
||||||
|
if self.status != "Running":
|
||||||
|
raise ValueError("Scan cannot be paused because its status is " + self.status)
|
||||||
|
else:
|
||||||
|
call = '/api/2.0/fo/scan/'
|
||||||
|
parameters = {'action': 'pause', 'scan_ref': self.ref}
|
||||||
|
conn.request(call, parameters)
|
||||||
|
|
||||||
|
parameters = {'action': 'list', 'scan_ref': self.ref, 'show_status': 1}
|
||||||
|
self.status = objectify.fromstring(conn.request(call, parameters)).RESPONSE.SCAN_LIST.SCAN.STATUS.STATE
|
||||||
|
|
||||||
|
def resume(self, conn):
|
||||||
|
if self.status != "Paused":
|
||||||
|
raise ValueError("Scan cannot be resumed because its status is " + self.status)
|
||||||
|
else:
|
||||||
|
call = '/api/2.0/fo/scan/'
|
||||||
|
parameters = {'action': 'resume', 'scan_ref': self.ref}
|
||||||
|
conn.request(call, parameters)
|
||||||
|
|
||||||
|
parameters = {'action': 'list', 'scan_ref': self.ref, 'show_status': 1}
|
||||||
|
self.status = objectify.fromstring(conn.request(call, parameters)).RESPONSE.SCAN_LIST.SCAN.STATUS.STATE
|
221
deps/qualysapi/qualysapi/config.py
vendored
Normal file
@ -0,0 +1,221 @@
|
|||||||
|
""" Module providing a single class (QualysConnectConfig) that parses a config
|
||||||
|
file and provides the information required to build QualysGuard sessions.
|
||||||
|
"""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import print_function
|
||||||
|
import os
|
||||||
|
import stat
|
||||||
|
import getpass
|
||||||
|
import logging
|
||||||
|
from six.moves import input
|
||||||
|
from six.moves.configparser import *
|
||||||
|
|
||||||
|
import qualysapi.settings as qcs
|
||||||
|
# Setup module level logging.
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# try:
|
||||||
|
# from requests_ntlm import HttpNtlmAuth
|
||||||
|
# except ImportError, e:
|
||||||
|
# logger.warning('Warning: Cannot support NTML authentication.')
|
||||||
|
|
||||||
|
|
||||||
|
__author__ = "Parag Baxi <parag.baxi@gmail.com> & Colin Bell <colin.bell@uwaterloo.ca>"
|
||||||
|
__updated_by__ = "Austin Taylor <vulnWhisperer@austintaylor.io>"
|
||||||
|
__copyright__ = "Copyright 2011-2013, Parag Baxi & University of Waterloo"
|
||||||
|
__license__ = "BSD-new"
|
||||||
|
|
||||||
|
|
||||||
|
class QualysConnectConfig:
|
||||||
|
""" Class to create a ConfigParser and read user/password details
|
||||||
|
from an ini file.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, filename=qcs.default_filename, remember_me=False, remember_me_always=False):
|
||||||
|
|
||||||
|
self._cfgfile = None
|
||||||
|
|
||||||
|
# Prioritize local directory filename.
|
||||||
|
# Check for file existence.
|
||||||
|
if os.path.exists(filename):
|
||||||
|
self._cfgfile = filename
|
||||||
|
elif os.path.exists(os.path.join(os.path.expanduser("~"), filename)):
|
||||||
|
# Set home path for file.
|
||||||
|
self._cfgfile = os.path.join(os.path.expanduser("~"), filename)
|
||||||
|
|
||||||
|
# create ConfigParser to combine defaults and input from config file.
|
||||||
|
self._cfgparse = ConfigParser(qcs.defaults)
|
||||||
|
|
||||||
|
if self._cfgfile:
|
||||||
|
self._cfgfile = os.path.realpath(self._cfgfile)
|
||||||
|
|
||||||
|
mode = stat.S_IMODE(os.stat(self._cfgfile)[stat.ST_MODE])
|
||||||
|
|
||||||
|
# apply bitmask to current mode to check ONLY user access permissions.
|
||||||
|
if (mode & (stat.S_IRWXG | stat.S_IRWXO)) != 0:
|
||||||
|
logging.warning('%s permissions allows more than user access.' % (filename,))
|
||||||
|
|
||||||
|
self._cfgparse.read(self._cfgfile)
|
||||||
|
|
||||||
|
# if 'info' doesn't exist, create the section.
|
||||||
|
if not self._cfgparse.has_section('qualys'):
|
||||||
|
self._cfgparse.add_section('qualys')
|
||||||
|
|
||||||
|
# Use default hostname (if one isn't provided).
|
||||||
|
if not self._cfgparse.has_option('qualys', 'hostname'):
|
||||||
|
if self._cfgparse.has_option('DEFAULT', 'hostname'):
|
||||||
|
hostname = self._cfgparse.get('DEFAULT', 'hostname')
|
||||||
|
self._cfgparse.set('qualys', 'hostname', hostname)
|
||||||
|
else:
|
||||||
|
raise Exception("No 'hostname' set. QualysConnect does not know who to connect to.")
|
||||||
|
|
||||||
|
# Use default max_retries (if one isn't provided).
|
||||||
|
if not self._cfgparse.has_option('qualys', 'max_retries'):
|
||||||
|
self.max_retries = qcs.defaults['max_retries']
|
||||||
|
else:
|
||||||
|
self.max_retries = self._cfgparse.get('qualys', 'max_retries')
|
||||||
|
try:
|
||||||
|
self.max_retries = int(self.max_retries)
|
||||||
|
except Exception:
|
||||||
|
logger.error('Value max_retries must be an integer.')
|
||||||
|
print('Value max_retries must be an integer.')
|
||||||
|
exit(1)
|
||||||
|
self._cfgparse.set('qualys', 'max_retries', str(self.max_retries))
|
||||||
|
self.max_retries = int(self.max_retries)
|
||||||
|
|
||||||
|
#Get template ID... user will need to set this to pull back CSV reports
|
||||||
|
if not self._cfgparse.has_option('qualys', 'template_id'):
|
||||||
|
self.report_template_id = qcs.defaults['template_id']
|
||||||
|
else:
|
||||||
|
self.report_template_id = self._cfgparse.get('qualys', 'template_id')
|
||||||
|
try:
|
||||||
|
self.report_template_id = int(self.report_template_id)
|
||||||
|
except Exception:
|
||||||
|
logger.error('Report Template ID Must be set and be an integer')
|
||||||
|
print('Value template ID must be an integer.')
|
||||||
|
exit(1)
|
||||||
|
self._cfgparse.set('qualys', 'template_id', str(self.report_template_id))
|
||||||
|
self.report_template_id = int(self.report_template_id)
|
||||||
|
|
||||||
|
# Proxy support
|
||||||
|
proxy_config = proxy_url = proxy_protocol = proxy_port = proxy_username = proxy_password = None
|
||||||
|
# User requires proxy?
|
||||||
|
if self._cfgparse.has_option('proxy', 'proxy_url'):
|
||||||
|
proxy_url = self._cfgparse.get('proxy', 'proxy_url')
|
||||||
|
# Remove protocol prefix from url if included.
|
||||||
|
for prefix in ('http://', 'https://'):
|
||||||
|
if proxy_url.startswith(prefix):
|
||||||
|
proxy_protocol = prefix
|
||||||
|
proxy_url = proxy_url[len(prefix):]
|
||||||
|
# Default proxy protocol is http.
|
||||||
|
if not proxy_protocol:
|
||||||
|
proxy_protocol = 'https://'
|
||||||
|
# Check for proxy port request.
|
||||||
|
if ':' in proxy_url:
|
||||||
|
# Proxy port already specified in url.
|
||||||
|
# Set proxy port.
|
||||||
|
proxy_port = proxy_url[proxy_url.index(':') + 1:]
|
||||||
|
# Remove proxy port from proxy url.
|
||||||
|
proxy_url = proxy_url[:proxy_url.index(':')]
|
||||||
|
if self._cfgparse.has_option('proxy', 'proxy_port'):
|
||||||
|
# Proxy requires specific port.
|
||||||
|
if proxy_port:
|
||||||
|
# Warn that a proxy port was already specified in the url.
|
||||||
|
proxy_port_url = proxy_port
|
||||||
|
proxy_port = self._cfgparse.get('proxy', 'proxy_port')
|
||||||
|
logger.warning('Proxy port from url overwritten by specified proxy_port from config:')
|
||||||
|
logger.warning('%s --> %s' % (proxy_port_url, proxy_port))
|
||||||
|
else:
|
||||||
|
proxy_port = self._cfgparse.get('proxy', 'proxy_port')
|
||||||
|
if not proxy_port:
|
||||||
|
# No proxy port specified.
|
||||||
|
if proxy_protocol == 'http://':
|
||||||
|
# Use default HTTP Proxy port.
|
||||||
|
proxy_port = '8080'
|
||||||
|
else:
|
||||||
|
# Use default HTTPS Proxy port.
|
||||||
|
proxy_port = '443'
|
||||||
|
|
||||||
|
# Check for proxy authentication request.
|
||||||
|
if self._cfgparse.has_option('proxy', 'proxy_username'):
|
||||||
|
# Proxy requires username & password.
|
||||||
|
proxy_username = self._cfgparse.get('proxy', 'proxy_username')
|
||||||
|
proxy_password = self._cfgparse.get('proxy', 'proxy_password')
|
||||||
|
# Not sure if this use case below is valid.
|
||||||
|
# # Support proxy with username and empty password.
|
||||||
|
# try:
|
||||||
|
# proxy_password = self._cfgparse.get('proxy','proxy_password')
|
||||||
|
# except NoOptionError, e:
|
||||||
|
# # Set empty password.
|
||||||
|
# proxy_password = ''
|
||||||
|
# Sample proxy config:f
|
||||||
|
# 'http://user:pass@10.10.1.10:3128'
|
||||||
|
if proxy_url:
|
||||||
|
# Proxy requested.
|
||||||
|
proxy_config = proxy_url
|
||||||
|
if proxy_port:
|
||||||
|
# Proxy port requested.
|
||||||
|
proxy_config += ':' + proxy_port
|
||||||
|
if proxy_username:
|
||||||
|
# Proxy authentication requested.
|
||||||
|
proxy_config = proxy_username + ':' + proxy_password + '@' + proxy_config
|
||||||
|
# Prefix by proxy protocol.
|
||||||
|
proxy_config = proxy_protocol + proxy_config
|
||||||
|
# Set up proxy if applicable.
|
||||||
|
if proxy_config:
|
||||||
|
self.proxies = {'https': proxy_config}
|
||||||
|
else:
|
||||||
|
self.proxies = None
|
||||||
|
|
||||||
|
# ask username (if one doesn't exist)
|
||||||
|
if not self._cfgparse.has_option('qualys', 'username'):
|
||||||
|
username = input('QualysGuard Username: ')
|
||||||
|
self._cfgparse.set('qualys', 'username', username)
|
||||||
|
|
||||||
|
# ask password (if one doesn't exist)
|
||||||
|
if not self._cfgparse.has_option('qualys', 'password'):
|
||||||
|
password = getpass.getpass('QualysGuard Password: ')
|
||||||
|
self._cfgparse.set('qualys', 'password', password)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
logging.debug(self._cfgparse.items('qualys'))
|
||||||
|
|
||||||
|
if remember_me or remember_me_always:
|
||||||
|
# Let's create that config file for next time...
|
||||||
|
# Where to store this?
|
||||||
|
if remember_me:
|
||||||
|
# Store in current working directory.
|
||||||
|
config_path = filename
|
||||||
|
if remember_me_always:
|
||||||
|
# Store in home directory.
|
||||||
|
config_path = os.path.expanduser("~")
|
||||||
|
if not os.path.exists(config_path):
|
||||||
|
# Write file only if it doesn't already exists.
|
||||||
|
# http://stackoverflow.com/questions/5624359/write-file-with-specific-permissions-in-python
|
||||||
|
mode = stat.S_IRUSR | stat.S_IWUSR # This is 0o600 in octal and 384 in decimal.
|
||||||
|
umask_original = os.umask(0)
|
||||||
|
try:
|
||||||
|
config_file = os.fdopen(os.open(config_path, os.O_WRONLY | os.O_CREAT, mode), 'w')
|
||||||
|
finally:
|
||||||
|
os.umask(umask_original)
|
||||||
|
# Add the settings to the structure of the file, and lets write it out...
|
||||||
|
self._cfgparse.write(config_file)
|
||||||
|
config_file.close()
|
||||||
|
|
||||||
|
def get_config_filename(self):
|
||||||
|
return self._cfgfile
|
||||||
|
|
||||||
|
def get_config(self):
|
||||||
|
return self._cfgparse
|
||||||
|
|
||||||
|
def get_auth(self):
|
||||||
|
''' Returns username from the configfile. '''
|
||||||
|
return (self._cfgparse.get('qualys', 'username'), self._cfgparse.get('qualys', 'password'))
|
||||||
|
|
||||||
|
def get_hostname(self):
|
||||||
|
''' Returns hostname. '''
|
||||||
|
return self._cfgparse.get('qualys', 'hostname')
|
||||||
|
|
||||||
|
def get_template_id(self):
|
||||||
|
return self._cfgparse.get('qualys','template_id')
|
363
deps/qualysapi/qualysapi/connector.py
vendored
Normal file
@ -0,0 +1,363 @@
|
|||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import print_function
|
||||||
|
__author__ = 'Parag Baxi <parag.baxi@gmail.com>'
|
||||||
|
__copyright__ = 'Copyright 2013, Parag Baxi'
|
||||||
|
__license__ = 'Apache License 2.0'
|
||||||
|
|
||||||
|
""" Module that contains classes for setting up connections to QualysGuard API
|
||||||
|
and requesting data from it.
|
||||||
|
"""
|
||||||
|
import logging
|
||||||
|
import time
|
||||||
|
|
||||||
|
try:
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
except ImportError:
|
||||||
|
from urlparse import urlparse
|
||||||
|
|
||||||
|
from collections import defaultdict
|
||||||
|
|
||||||
|
import requests
|
||||||
|
|
||||||
|
import qualysapi.version
|
||||||
|
import qualysapi.api_methods
|
||||||
|
|
||||||
|
import qualysapi.api_actions
|
||||||
|
import qualysapi.api_actions as api_actions
|
||||||
|
|
||||||
|
# Setup module level logging.
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
try:
|
||||||
|
from lxml import etree
|
||||||
|
except ImportError as e:
|
||||||
|
logger.warning(
|
||||||
|
'Warning: Cannot consume lxml.builder E objects without lxml. Send XML strings for AM & WAS API calls.')
|
||||||
|
|
||||||
|
|
||||||
|
class QGConnector(api_actions.QGActions):
|
||||||
|
""" Qualys Connection class which allows requests to the QualysGuard API using HTTP-Basic Authentication (over SSL).
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, auth, server='qualysapi.qualys.com', proxies=None, max_retries=3):
|
||||||
|
# Read username & password from file, if possible.
|
||||||
|
self.auth = auth
|
||||||
|
# Remember QualysGuard API server.
|
||||||
|
self.server = server
|
||||||
|
# Remember rate limits per call.
|
||||||
|
self.rate_limit_remaining = defaultdict(int)
|
||||||
|
# api_methods: Define method algorithm in a dict of set.
|
||||||
|
# Naming convention: api_methods[api_version optional_blah] due to api_methods_with_trailing_slash testing.
|
||||||
|
self.api_methods = qualysapi.api_methods.api_methods
|
||||||
|
#
|
||||||
|
# Keep track of methods with ending slashes to autocorrect user when they forgot slash.
|
||||||
|
self.api_methods_with_trailing_slash = qualysapi.api_methods.api_methods_with_trailing_slash
|
||||||
|
self.proxies = proxies
|
||||||
|
logger.debug('proxies = \n%s' % proxies)
|
||||||
|
# Set up requests max_retries.
|
||||||
|
logger.debug('max_retries = \n%s' % max_retries)
|
||||||
|
self.session = requests.Session()
|
||||||
|
http_max_retries = requests.adapters.HTTPAdapter(max_retries=max_retries)
|
||||||
|
https_max_retries = requests.adapters.HTTPAdapter(max_retries=max_retries)
|
||||||
|
self.session.mount('http://', http_max_retries)
|
||||||
|
self.session.mount('https://', https_max_retries)
|
||||||
|
|
||||||
|
def __call__(self):
|
||||||
|
return self
|
||||||
|
|
||||||
|
def format_api_version(self, api_version):
|
||||||
|
""" Return QualysGuard API version for api_version specified.
|
||||||
|
|
||||||
|
"""
|
||||||
|
# Convert to int.
|
||||||
|
if type(api_version) == str:
|
||||||
|
api_version = api_version.lower()
|
||||||
|
if api_version[0] == 'v' and api_version[1].isdigit():
|
||||||
|
# Remove first 'v' in case the user typed 'v1' or 'v2', etc.
|
||||||
|
api_version = api_version[1:]
|
||||||
|
# Check for input matching Qualys modules.
|
||||||
|
if api_version in ('asset management', 'assets', 'tag', 'tagging', 'tags'):
|
||||||
|
# Convert to Asset Management API.
|
||||||
|
api_version = 'am'
|
||||||
|
elif api_version in ('am2'):
|
||||||
|
# Convert to Asset Management API v2
|
||||||
|
api_version = 'am2'
|
||||||
|
elif api_version in ('webapp', 'web application scanning', 'webapp scanning'):
|
||||||
|
# Convert to WAS API.
|
||||||
|
api_version = 'was'
|
||||||
|
elif api_version in ('pol', 'pc'):
|
||||||
|
# Convert PC module to API number 2.
|
||||||
|
api_version = 2
|
||||||
|
else:
|
||||||
|
api_version = int(api_version)
|
||||||
|
return api_version
|
||||||
|
|
||||||
|
def which_api_version(self, api_call):
|
||||||
|
""" Return QualysGuard API version for api_call specified.
|
||||||
|
|
||||||
|
"""
|
||||||
|
# Leverage patterns of calls to API methods.
|
||||||
|
if api_call.endswith('.php'):
|
||||||
|
# API v1.
|
||||||
|
return 1
|
||||||
|
elif api_call.startswith('api/2.0/'):
|
||||||
|
# API v2.
|
||||||
|
return 2
|
||||||
|
elif '/am/' in api_call:
|
||||||
|
# Asset Management API.
|
||||||
|
return 'am'
|
||||||
|
elif '/was/' in api_call:
|
||||||
|
# WAS API.
|
||||||
|
return 'was'
|
||||||
|
return False
|
||||||
|
|
||||||
|
def url_api_version(self, api_version):
|
||||||
|
""" Return base API url string for the QualysGuard api_version and server.
|
||||||
|
|
||||||
|
"""
|
||||||
|
# Set base url depending on API version.
|
||||||
|
if api_version == 1:
|
||||||
|
# QualysGuard API v1 url.
|
||||||
|
url = "https://%s/msp/" % (self.server,)
|
||||||
|
elif api_version == 2:
|
||||||
|
# QualysGuard API v2 url.
|
||||||
|
url = "https://%s/" % (self.server,)
|
||||||
|
elif api_version == 'was':
|
||||||
|
# QualysGuard REST v3 API url (Portal API).
|
||||||
|
url = "https://%s/qps/rest/3.0/" % (self.server,)
|
||||||
|
elif api_version == 'am':
|
||||||
|
# QualysGuard REST v1 API url (Portal API).
|
||||||
|
url = "https://%s/qps/rest/1.0/" % (self.server,)
|
||||||
|
elif api_version == 'am2':
|
||||||
|
# QualysGuard REST v1 API url (Portal API).
|
||||||
|
url = "https://%s/qps/rest/2.0/" % (self.server,)
|
||||||
|
else:
|
||||||
|
raise Exception("Unknown QualysGuard API Version Number (%s)" % (api_version,))
|
||||||
|
logger.debug("Base url =\n%s" % (url))
|
||||||
|
return url
|
||||||
|
|
||||||
|
def format_http_method(self, api_version, api_call, data):
|
||||||
|
""" Return QualysGuard API http method, with POST preferred..
|
||||||
|
|
||||||
|
"""
|
||||||
|
# Define get methods for automatic http request methodology.
|
||||||
|
#
|
||||||
|
# All API v2 requests are POST methods.
|
||||||
|
if api_version == 2:
|
||||||
|
return 'post'
|
||||||
|
elif api_version == 1:
|
||||||
|
if api_call in self.api_methods['1 post']:
|
||||||
|
return 'post'
|
||||||
|
else:
|
||||||
|
return 'get'
|
||||||
|
elif api_version == 'was':
|
||||||
|
# WAS API call.
|
||||||
|
# Because WAS API enables user to GET API resources in URI, let's chop off the resource.
|
||||||
|
# '/download/was/report/18823' --> '/download/was/report/'
|
||||||
|
api_call_endpoint = api_call[:api_call.rfind('/') + 1]
|
||||||
|
if api_call_endpoint in self.api_methods['was get']:
|
||||||
|
return 'get'
|
||||||
|
# Post calls with no payload will result in HTTPError: 415 Client Error: Unsupported Media Type.
|
||||||
|
if data is None:
|
||||||
|
# No post data. Some calls change to GET with no post data.
|
||||||
|
if api_call_endpoint in self.api_methods['was no data get']:
|
||||||
|
return 'get'
|
||||||
|
else:
|
||||||
|
return 'post'
|
||||||
|
else:
|
||||||
|
# Call with post data.
|
||||||
|
return 'post'
|
||||||
|
else:
|
||||||
|
# Asset Management API call.
|
||||||
|
if api_call in self.api_methods['am get']:
|
||||||
|
return 'get'
|
||||||
|
else:
|
||||||
|
return 'post'
|
||||||
|
|
||||||
|
def preformat_call(self, api_call):
|
||||||
|
""" Return properly formatted QualysGuard API call.
|
||||||
|
|
||||||
|
"""
|
||||||
|
# Remove possible starting slashes or trailing question marks in call.
|
||||||
|
api_call_formatted = api_call.lstrip('/')
|
||||||
|
api_call_formatted = api_call_formatted.rstrip('?')
|
||||||
|
if api_call != api_call_formatted:
|
||||||
|
# Show difference
|
||||||
|
logger.debug('api_call post strip =\n%s' % api_call_formatted)
|
||||||
|
return api_call_formatted
|
||||||
|
|
||||||
|
def format_call(self, api_version, api_call):
|
||||||
|
""" Return properly formatted QualysGuard API call according to api_version etiquette.
|
||||||
|
|
||||||
|
"""
|
||||||
|
# Remove possible starting slashes or trailing question marks in call.
|
||||||
|
api_call = api_call.lstrip('/')
|
||||||
|
api_call = api_call.rstrip('?')
|
||||||
|
logger.debug('api_call post strip =\n%s' % api_call)
|
||||||
|
# Make sure call always ends in slash for API v2 calls.
|
||||||
|
if (api_version == 2 and api_call[-1] != '/'):
|
||||||
|
# Add slash.
|
||||||
|
logger.debug('Adding "/" to api_call.')
|
||||||
|
api_call += '/'
|
||||||
|
if api_call in self.api_methods_with_trailing_slash[api_version]:
|
||||||
|
# Add slash.
|
||||||
|
logger.debug('Adding "/" to api_call.')
|
||||||
|
api_call += '/'
|
||||||
|
return api_call
|
||||||
|
|
||||||
|
def format_payload(self, api_version, data):
|
||||||
|
""" Return appropriate QualysGuard API call.
|
||||||
|
|
||||||
|
"""
|
||||||
|
# Check if payload is for API v1 or API v2.
|
||||||
|
if (api_version in (1, 2)):
|
||||||
|
# Check if string type.
|
||||||
|
if type(data) == str:
|
||||||
|
# Convert to dictionary.
|
||||||
|
logger.debug('Converting string to dict:\n%s' % data)
|
||||||
|
# Remove possible starting question mark & ending ampersands.
|
||||||
|
data = data.lstrip('?')
|
||||||
|
data = data.rstrip('&')
|
||||||
|
# Convert to dictionary.
|
||||||
|
#data = urllib.parse.parse_qs(data)
|
||||||
|
data = urlparse(data)
|
||||||
|
logger.debug('Converted:\n%s' % str(data))
|
||||||
|
elif api_version in ('am', 'was', 'am2'):
|
||||||
|
if type(data) == etree._Element:
|
||||||
|
logger.debug('Converting lxml.builder.E to string')
|
||||||
|
data = etree.tostring(data)
|
||||||
|
logger.debug('Converted:\n%s' % data)
|
||||||
|
return data
|
||||||
|
|
||||||
|
def request(self, api_call, data=None, api_version=None, http_method=None, concurrent_scans_retries=0,
|
||||||
|
concurrent_scans_retry_delay=0):
|
||||||
|
""" Return QualysGuard API response.
|
||||||
|
|
||||||
|
"""
|
||||||
|
logger.debug('api_call =\n%s' % api_call)
|
||||||
|
logger.debug('api_version =\n%s' % api_version)
|
||||||
|
logger.debug('data %s =\n %s' % (type(data), str(data)))
|
||||||
|
logger.debug('http_method =\n%s' % http_method)
|
||||||
|
logger.debug('concurrent_scans_retries =\n%s' % str(concurrent_scans_retries))
|
||||||
|
logger.debug('concurrent_scans_retry_delay =\n%s' % str(concurrent_scans_retry_delay))
|
||||||
|
concurrent_scans_retries = int(concurrent_scans_retries)
|
||||||
|
concurrent_scans_retry_delay = int(concurrent_scans_retry_delay)
|
||||||
|
#
|
||||||
|
# Determine API version.
|
||||||
|
# Preformat call.
|
||||||
|
api_call = self.preformat_call(api_call)
|
||||||
|
if api_version:
|
||||||
|
# API version specified, format API version inputted.
|
||||||
|
api_version = self.format_api_version(api_version)
|
||||||
|
else:
|
||||||
|
# API version not specified, determine automatically.
|
||||||
|
api_version = self.which_api_version(api_call)
|
||||||
|
#
|
||||||
|
# Set up base url.
|
||||||
|
url = self.url_api_version(api_version)
|
||||||
|
#
|
||||||
|
# Set up headers.
|
||||||
|
headers = {"X-Requested-With": "QualysAPI (python) v%s - VulnWhisperer" % (qualysapi.version.__version__,)}
|
||||||
|
logger.debug('headers =\n%s' % (str(headers)))
|
||||||
|
# Portal API takes in XML text, requiring custom header.
|
||||||
|
if api_version in ('am', 'was', 'am2'):
|
||||||
|
headers['Content-type'] = 'text/xml'
|
||||||
|
#
|
||||||
|
# Set up http request method, if not specified.
|
||||||
|
if not http_method:
|
||||||
|
http_method = self.format_http_method(api_version, api_call, data)
|
||||||
|
logger.debug('http_method =\n%s' % http_method)
|
||||||
|
#
|
||||||
|
# Format API call.
|
||||||
|
api_call = self.format_call(api_version, api_call)
|
||||||
|
logger.debug('api_call =\n%s' % (api_call))
|
||||||
|
# Append api_call to url.
|
||||||
|
url += api_call
|
||||||
|
#
|
||||||
|
# Format data, if applicable.
|
||||||
|
if data is not None:
|
||||||
|
data = self.format_payload(api_version, data)
|
||||||
|
# Make request at least once (more if concurrent_retry is enabled).
|
||||||
|
retries = 0
|
||||||
|
#
|
||||||
|
# set a warning threshold for the rate limit
|
||||||
|
rate_warn_threshold = 10
|
||||||
|
while retries <= concurrent_scans_retries:
|
||||||
|
# Make request.
|
||||||
|
logger.debug('url =\n%s' % (str(url)))
|
||||||
|
logger.debug('data =\n%s' % (str(data)))
|
||||||
|
logger.debug('headers =\n%s' % (str(headers)))
|
||||||
|
if http_method == 'get':
|
||||||
|
# GET
|
||||||
|
logger.debug('GET request.')
|
||||||
|
request = self.session.get(url, params=data, auth=self.auth, headers=headers, proxies=self.proxies)
|
||||||
|
else:
|
||||||
|
# POST
|
||||||
|
logger.debug('POST request.')
|
||||||
|
# Make POST request.
|
||||||
|
request = self.session.post(url, data=data, auth=self.auth, headers=headers, proxies=self.proxies)
|
||||||
|
logger.debug('response headers =\n%s' % (str(request.headers)))
|
||||||
|
#
|
||||||
|
# Remember how many times left user can make against api_call.
|
||||||
|
try:
|
||||||
|
self.rate_limit_remaining[api_call] = int(request.headers['x-ratelimit-remaining'])
|
||||||
|
logger.debug('rate limit for api_call, %s = %s' % (api_call, self.rate_limit_remaining[api_call]))
|
||||||
|
if (self.rate_limit_remaining[api_call] > rate_warn_threshold):
|
||||||
|
logger.debug('rate limit for api_call, %s = %s' % (api_call, self.rate_limit_remaining[api_call]))
|
||||||
|
elif (self.rate_limit_remaining[api_call] <= rate_warn_threshold) and (self.rate_limit_remaining[api_call] > 0):
|
||||||
|
logger.warning('Rate limit is about to being reached (remaining api calls = %s)' % self.rate_limit_remaining[api_call])
|
||||||
|
elif self.rate_limit_remaining[api_call] <= 0:
|
||||||
|
logger.critical('ATTENTION! RATE LIMIT HAS BEEN REACHED (remaining api calls = %s)!' % self.rate_limit_remaining[api_call])
|
||||||
|
except KeyError as e:
|
||||||
|
# Likely a bad api_call.
|
||||||
|
logger.debug(e)
|
||||||
|
pass
|
||||||
|
except TypeError as e:
|
||||||
|
# Likely an asset search api_call.
|
||||||
|
logger.debug(e)
|
||||||
|
pass
|
||||||
|
# Response received.
|
||||||
|
response = str(request.content)
|
||||||
|
logger.debug('response text =\n%s' % (response))
|
||||||
|
# Keep track of how many retries.
|
||||||
|
retries += 1
|
||||||
|
# Check for concurrent scans limit.
|
||||||
|
if not ('<responseCode>INVALID_REQUEST</responseCode>' in response and
|
||||||
|
'<errorMessage>You have reached the maximum number of concurrent running scans' in response and
|
||||||
|
'<errorResolution>Please wait until your previous scans have completed</errorResolution>' in response):
|
||||||
|
# Did not hit concurrent scan limit.
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
# Hit concurrent scan limit.
|
||||||
|
logger.critical(response)
|
||||||
|
# If trying again, delay next try by concurrent_scans_retry_delay.
|
||||||
|
if retries <= concurrent_scans_retries:
|
||||||
|
logger.warning('Waiting %d seconds until next try.' % concurrent_scans_retry_delay)
|
||||||
|
time.sleep(concurrent_scans_retry_delay)
|
||||||
|
# Inform user of how many retries.
|
||||||
|
logger.critical('Retry #%d' % retries)
|
||||||
|
else:
|
||||||
|
# Ran out of retries. Let user know.
|
||||||
|
print('Alert! Ran out of concurrent_scans_retries!')
|
||||||
|
logger.critical('Alert! Ran out of concurrent_scans_retries!')
|
||||||
|
return False
|
||||||
|
# Check to see if there was an error.
|
||||||
|
try:
|
||||||
|
request.raise_for_status()
|
||||||
|
except requests.HTTPError as e:
|
||||||
|
# Error
|
||||||
|
print('Error! Received a 4XX client error or 5XX server error response.')
|
||||||
|
print('Content = \n', response)
|
||||||
|
logger.error('Content = \n%s' % response)
|
||||||
|
print('Headers = \n', request.headers)
|
||||||
|
logger.error('Headers = \n%s' % str(request.headers))
|
||||||
|
request.raise_for_status()
|
||||||
|
if '<RETURN status="FAILED" number="2007">' in response:
|
||||||
|
print('Error! Your IP address is not in the list of secure IPs. Manager must include this IP (QualysGuard VM > Users > Security).')
|
||||||
|
print('Content = \n', response)
|
||||||
|
logger.error('Content = \n%s' % response)
|
||||||
|
print('Headers = \n', request.headers)
|
||||||
|
logger.error('Headers = \n%s' % str(request.headers))
|
||||||
|
return False
|
||||||
|
return response
|
290
deps/qualysapi/qualysapi/contrib.py
vendored
Normal file
@ -0,0 +1,290 @@
|
|||||||
|
# File for 3rd party contributions.
|
||||||
|
|
||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import print_function
|
||||||
|
import six
|
||||||
|
from six.moves import range
|
||||||
|
|
||||||
|
__author__ = 'Parag Baxi <parag.baxi@gmail.com>'
|
||||||
|
__license__ = 'Apache License 2.0'
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import time
|
||||||
|
import types
|
||||||
|
import unicodedata
|
||||||
|
from collections import defaultdict
|
||||||
|
|
||||||
|
from lxml import etree, objectify
|
||||||
|
|
||||||
|
|
||||||
|
# Set module level logger.
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def generate_vm_report(self, report_details, startup_delay=60, polling_delay=30, max_checks=10):
|
||||||
|
''' Spool and download QualysGuard VM report.
|
||||||
|
|
||||||
|
startup_delay: Time in seconds to wait before initially checking.
|
||||||
|
polling_delay: Time in seconds to wait between checks.
|
||||||
|
max_checks: Maximum number of times to check for report spooling completion.
|
||||||
|
|
||||||
|
'''
|
||||||
|
# Merge parameters.
|
||||||
|
report_details['action'] = 'launch'
|
||||||
|
logger.debug(report_details)
|
||||||
|
xml_output = qualysapi_instance.request(2, 'report', report_details)
|
||||||
|
report_id = etree.XML(xml_output).find('.//VALUE').text
|
||||||
|
logger.debug('report_id: %s' % (report_id))
|
||||||
|
# Wait for report to finish spooling.
|
||||||
|
# Maximum number of times to check for report. About 10 minutes.
|
||||||
|
MAX_CHECKS = 10
|
||||||
|
logger.info('Report sent to spooler. Checking for report in %s seconds.' % (startup_delay))
|
||||||
|
time.sleep(startup_delay)
|
||||||
|
for n in range(0, max_checks):
|
||||||
|
# Check to see if report is done.
|
||||||
|
xml_output = qualysapi_instance.request(2, 'report', {'action': 'list', 'id': report_id})
|
||||||
|
tag_status = etree.XML(xml_output).findtext(".//STATE")
|
||||||
|
logger.debug('tag_status: %s' % (tag_status))
|
||||||
|
tag_status = etree.XML(xml_output).findtext(".//STATE")
|
||||||
|
logger.debug('tag_status: %s' % (tag_status))
|
||||||
|
if tag_status is not None:
|
||||||
|
# Report is showing up in the Report Center.
|
||||||
|
if tag_status == 'Finished':
|
||||||
|
# Report creation complete.
|
||||||
|
break
|
||||||
|
# Report not finished, wait.
|
||||||
|
logger.info('Report still spooling. Trying again in %s seconds.' % (polling_delay))
|
||||||
|
time.sleep(polling_delay)
|
||||||
|
# We now have to fetch the report. Use the report id.
|
||||||
|
report_xml = qualysapi_instance.request(2, 'report', {'action': 'fetch', 'id': report_id})
|
||||||
|
return report_xml
|
||||||
|
|
||||||
|
|
||||||
|
def qg_html_to_ascii(qg_html_text):
|
||||||
|
"""Convert and return QualysGuard's quasi HTML text to ASCII text."""
|
||||||
|
text = qg_html_text
|
||||||
|
# Handle tagged line breaks (<p>, <br>)
|
||||||
|
text = re.sub(r'(?i)<br>[ ]*', '\n', text)
|
||||||
|
text = re.sub(r'(?i)<p>[ ]*', '\n', text)
|
||||||
|
# Remove consecutive line breaks
|
||||||
|
text = re.sub(r"^\s+", "", text, flags=re.MULTILINE)
|
||||||
|
# Remove empty lines at the end.
|
||||||
|
text = re.sub('[\n]+$', '$', text)
|
||||||
|
# Store anchor tags href attribute
|
||||||
|
links = list(lxml.html.iterlinks(text))
|
||||||
|
# Remove anchor tags
|
||||||
|
html_element = lxml.html.fromstring(text)
|
||||||
|
# Convert anchor tags to "link_text (link: link_url )".
|
||||||
|
logging.debug('Converting anchor tags...')
|
||||||
|
text = html_element.text_content().encode('ascii', 'ignore')
|
||||||
|
# Convert each link.
|
||||||
|
for l in links:
|
||||||
|
# Find and replace each link.
|
||||||
|
link_text = l[0].text_content().encode('ascii', 'ignore').strip()
|
||||||
|
link_url = l[2].strip()
|
||||||
|
# Replacing link_text
|
||||||
|
if link_text != link_url:
|
||||||
|
# Link text is different, most likely a description.
|
||||||
|
text = string.replace(text, link_text, '%s (link: %s )' % (link_text, link_url))
|
||||||
|
else:
|
||||||
|
# Link text is the same as the href. No need to duplicate link.
|
||||||
|
text = string.replace(text, link_text, '%s' % (link_url))
|
||||||
|
logging.debug('Done.')
|
||||||
|
return text
|
||||||
|
|
||||||
|
|
||||||
|
def qg_parse_informational_qids(xml_report):
|
||||||
|
"""Return vulnerabilities of severity 1 and 2 levels due to a restriction of
|
||||||
|
QualysGuard's inability to report them in the internal ticketing system.
|
||||||
|
"""
|
||||||
|
# asset_group's vulnerability data map:
|
||||||
|
# {'qid_number': {
|
||||||
|
# # CSV info
|
||||||
|
# 'hosts': [{'ip': '10.28.0.1', 'dns': 'hostname', 'netbios': 'blah', 'vuln_id': 'remediation_ticket_number'}, {'ip': '10.28.0.3', 'dns': 'hostname2', 'netbios': '', 'vuln_id': 'remediation_ticket_number'}, ...],
|
||||||
|
# 'solution': '',
|
||||||
|
# 'impact': '',
|
||||||
|
# 'threat': '',
|
||||||
|
# 'severity': '',
|
||||||
|
# }
|
||||||
|
# 'qid_number2': ...
|
||||||
|
# }
|
||||||
|
# Add all vulnerabilities to list of dictionaries.
|
||||||
|
# Use defaultdict in case a new QID is encountered.
|
||||||
|
info_vulns = defaultdict(dict)
|
||||||
|
# Parse vulnerabilities in xml string.
|
||||||
|
tree = objectify.fromstring(xml_report)
|
||||||
|
# Write IP, DNS, & Result into each QID CSV file.
|
||||||
|
logging.debug('Parsing report...')
|
||||||
|
# TODO: Check against c_args.max to prevent creating CSV content for QIDs that we won't use.
|
||||||
|
for host in tree.HOST_LIST.HOST:
|
||||||
|
# Extract possible extra hostname information.
|
||||||
|
try:
|
||||||
|
netbios = unicodedata.normalize('NFKD', six.text_type(host.NETBIOS)).encode('ascii', 'ignore').strip()
|
||||||
|
except AttributeError:
|
||||||
|
netbios = ''
|
||||||
|
try:
|
||||||
|
dns = unicodedata.normalize('NFKD', six.text_type(host.DNS)).encode('ascii', 'ignore').strip()
|
||||||
|
except AttributeError:
|
||||||
|
dns = ''
|
||||||
|
ip = unicodedata.normalize('NFKD', six.text_type(host.IP)).encode('ascii', 'ignore').strip()
|
||||||
|
# Extract vulnerabilities host is affected by.
|
||||||
|
for vuln in host.VULN_INFO_LIST.VULN_INFO:
|
||||||
|
try:
|
||||||
|
result = unicodedata.normalize('NFKD', six.text_type(vuln.RESULT)).encode('ascii', 'ignore').strip()
|
||||||
|
except AttributeError:
|
||||||
|
result = ''
|
||||||
|
qid = unicodedata.normalize('NFKD', six.text_type(vuln.QID)).encode('ascii', 'ignore').strip()
|
||||||
|
# Attempt to add host to QID's list of affected hosts.
|
||||||
|
try:
|
||||||
|
info_vulns[qid]['hosts'].append({'ip': '%s' % (ip),
|
||||||
|
'dns': '%s' % (dns),
|
||||||
|
'netbios': '%s' % (netbios),
|
||||||
|
'vuln_id': '',
|
||||||
|
# Informational QIDs do not have vuln_id numbers. This is a flag to write the CSV file.
|
||||||
|
'result': '%s' % (result), })
|
||||||
|
except KeyError:
|
||||||
|
# New QID.
|
||||||
|
logging.debug('New QID found: %s' % (qid))
|
||||||
|
info_vulns[qid]['hosts'] = []
|
||||||
|
info_vulns[qid]['hosts'].append({'ip': '%s' % (ip),
|
||||||
|
'dns': '%s' % (dns),
|
||||||
|
'netbios': '%s' % (netbios),
|
||||||
|
'vuln_id': '',
|
||||||
|
# Informational QIDs do not have vuln_id numbers. This is a flag to write the CSV file.
|
||||||
|
'result': '%s' % (result), })
|
||||||
|
# All vulnerabilities added.
|
||||||
|
# Add all vulnerabilty information.
|
||||||
|
for vuln_details in tree.GLOSSARY.VULN_DETAILS_LIST.VULN_DETAILS:
|
||||||
|
qid = unicodedata.normalize('NFKD', six.text_type(vuln_details.QID)).encode('ascii', 'ignore').strip()
|
||||||
|
info_vulns[qid]['title'] = unicodedata.normalize('NFKD', six.text_type(vuln_details.TITLE)).encode('ascii',
|
||||||
|
'ignore').strip()
|
||||||
|
info_vulns[qid]['severity'] = unicodedata.normalize('NFKD', six.text_type(vuln_details.SEVERITY)).encode('ascii',
|
||||||
|
'ignore').strip()
|
||||||
|
info_vulns[qid]['solution'] = qg_html_to_ascii(
|
||||||
|
unicodedata.normalize('NFKD', six.text_type(vuln_details.SOLUTION)).encode('ascii', 'ignore').strip())
|
||||||
|
info_vulns[qid]['threat'] = qg_html_to_ascii(
|
||||||
|
unicodedata.normalize('NFKD', six.text_type(vuln_details.THREAT)).encode('ascii', 'ignore').strip())
|
||||||
|
info_vulns[qid]['impact'] = qg_html_to_ascii(
|
||||||
|
unicodedata.normalize('NFKD', six.text_type(vuln_details.IMPACT)).encode('ascii', 'ignore').strip())
|
||||||
|
# Ready to report informational vulnerabilities.
|
||||||
|
return info_vulns
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: Implement required function qg_remediation_tickets(asset_group, status, qids)
|
||||||
|
# TODO: Remove static 'report_template' value. Parameterize and document required report template.
|
||||||
|
def qg_ticket_list(asset_group, severity, qids=None):
|
||||||
|
"""Return dictionary of each vulnerability reported against asset_group of severity."""
|
||||||
|
global asset_group_details
|
||||||
|
# All vulnerabilities imported to list of dictionaries.
|
||||||
|
vulns = qg_remediation_tickets(asset_group, 'OPEN', qids) # vulns now holds all open remediation tickets.
|
||||||
|
if not vulns:
|
||||||
|
# No tickets to report.
|
||||||
|
return False
|
||||||
|
#
|
||||||
|
# Sort the vulnerabilities in order of prevalence -- number of hosts affected.
|
||||||
|
vulns = OrderedDict(sorted(list(vulns.items()), key=lambda t: len(t[1]['hosts'])))
|
||||||
|
logging.debug('vulns sorted = %s' % (vulns))
|
||||||
|
#
|
||||||
|
# Remove QIDs that have duplicate patches.
|
||||||
|
#
|
||||||
|
# Read in patch report.
|
||||||
|
# TODO: Allow for lookup of report_template.
|
||||||
|
# Report template is Patch report "Sev 5 confirmed patchable".
|
||||||
|
logging.debug('Retrieving patch report from QualysGuard.')
|
||||||
|
print('Retrieving patch report from QualysGuard.')
|
||||||
|
report_template = '1063695'
|
||||||
|
# Call QualysGuard for patch report.
|
||||||
|
csv_output = qg_command(2, 'report', {'action': 'launch', 'output_format': 'csv',
|
||||||
|
'asset_group_ids': asset_group_details['qg_asset_group_id'],
|
||||||
|
'template_id': report_template,
|
||||||
|
'report_title': 'QGIR Patch %s' % (asset_group)})
|
||||||
|
logging.debug('csv_output =')
|
||||||
|
logging.debug(csv_output)
|
||||||
|
logging.debug('Improving remediation efficiency by removing unneeded, redundant patches.')
|
||||||
|
print('Improving remediation efficiency by removing unneeded, redundant patches.')
|
||||||
|
# Find the line for Patches by Host data.
|
||||||
|
logging.debug('Header found at %s.' % (csv_output.find('Patch QID, IP, DNS, NetBIOS, OS, Vulnerability Count')))
|
||||||
|
|
||||||
|
starting_pos = csv_output.find('Patch QID, IP, DNS, NetBIOS, OS, Vulnerability Count') + 52
|
||||||
|
logging.debug('starting_pos = %s' % str(starting_pos))
|
||||||
|
# Data resides between line ending in 'Vulnerability Count' and a blank line.
|
||||||
|
patches_by_host = csv_output[starting_pos:csv_output[starting_pos:].find(
|
||||||
|
'Host Vulnerabilities Fixed by Patch') + starting_pos - 3]
|
||||||
|
logging.debug('patches_by_host =')
|
||||||
|
logging.debug(patches_by_host)
|
||||||
|
# Read in string patches_by_host csv to a dictionary.
|
||||||
|
f = patches_by_host.split(os.linesep)
|
||||||
|
reader = csv.DictReader(f, ['Patch QID', 'IP', 'DNS', 'NetBIOS', 'OS', 'Vulnerability Count'], delimiter=',')
|
||||||
|
# Mark Patch QIDs that fix multiple vulnerabilities with associated IP addresses.
|
||||||
|
redundant_qids = defaultdict(list)
|
||||||
|
for row in reader:
|
||||||
|
if int(row['Vulnerability Count']) > 1:
|
||||||
|
# Add to list of redundant QIDs.
|
||||||
|
redundant_qids[row['Patch QID']].append(row['IP'])
|
||||||
|
logging.debug('%s, %s, %s, %s' % (
|
||||||
|
row['Patch QID'],
|
||||||
|
row['IP'],
|
||||||
|
int(row['Vulnerability Count']),
|
||||||
|
redundant_qids[row['Patch QID']]))
|
||||||
|
# Log for debugging.
|
||||||
|
logging.debug('len(redundant_qids) = %s, redundant_qids =' % (len(redundant_qids)))
|
||||||
|
for patch_qid in list(redundant_qids.keys()):
|
||||||
|
logging.debug('%s, %s' % (str(patch_qid), str(redundant_qids[patch_qid])))
|
||||||
|
# Extract redundant QIDs with associated IP addresses.
|
||||||
|
# Find the line for Patches by Host data.
|
||||||
|
starting_pos = csv_output.find('Patch QID, IP, QID, Severity, Type, Title, Instance, Last Detected') + 66
|
||||||
|
# Data resides between line ending in 'Vulnerability Count' and end of string.
|
||||||
|
host_vulnerabilities_fixed_by_patch = csv_output[starting_pos:]
|
||||||
|
# Read in string host_vulnerabilities_fixed_by_patch csv to a dictionary.
|
||||||
|
f = host_vulnerabilities_fixed_by_patch.split(os.linesep)
|
||||||
|
reader = csv.DictReader(f, ['Patch QID', 'IP', 'QID', 'Severity', 'Type', 'Title', 'Instance', 'Last Detected'],
|
||||||
|
delimiter=',')
|
||||||
|
# Remove IP addresses associated with redundant QIDs.
|
||||||
|
qids_to_remove = defaultdict(list)
|
||||||
|
for row in reader:
|
||||||
|
# If the row's IP address's Patch QID was found to have multiple vulnerabilities...
|
||||||
|
if len(redundant_qids[row['Patch QID']]) > 0 and redundant_qids[row['Patch QID']].count(row['IP']) > 0:
|
||||||
|
# Add the QID column to the list of dictionaries {QID: [IP address, IP address, ...], QID2: [IP address], ...}
|
||||||
|
qids_to_remove[row['QID']].append(row['IP'])
|
||||||
|
# Log for debugging.
|
||||||
|
logging.debug('len(qids_to_remove) = %s, qids_to_remove =' % (len(qids_to_remove)))
|
||||||
|
for a_qid in list(qids_to_remove.keys()):
|
||||||
|
logging.debug('%s, %s' % (str(a_qid), str(qids_to_remove[a_qid])))
|
||||||
|
#
|
||||||
|
# Diff vulns against qids_to_remove and against open incidents.
|
||||||
|
#
|
||||||
|
vulns_length = len(vulns)
|
||||||
|
# Iterate over list of keys rather than original dictionary as some keys may be deleted changing the size of the dictionary.
|
||||||
|
for a_qid in list(vulns.keys()):
|
||||||
|
# Debug log original qid's hosts.
|
||||||
|
logging.debug('Before diffing vulns[%s] =' % (a_qid))
|
||||||
|
logging.debug(vulns[a_qid]['hosts'])
|
||||||
|
# Pop each host.
|
||||||
|
# The [:] returns a "slice" of x, which happens to contain all its elements, and is thus effectively a copy of x.
|
||||||
|
for host in vulns[a_qid]['hosts'][:]:
|
||||||
|
# If the QID for the host is a dupe or if a there is an open Reaction incident.
|
||||||
|
if qids_to_remove[a_qid].count(host['ip']) > 0 or reaction_open_issue(host['vuln_id']):
|
||||||
|
# Remove the host from the QID's list of target hosts.
|
||||||
|
logging.debug('Removing remediation ticket %s.' % (host['vuln_id']))
|
||||||
|
vulns[a_qid]['hosts'].remove(host)
|
||||||
|
else:
|
||||||
|
# Do not remove this vuln
|
||||||
|
logging.debug('Will report remediation %s.' % (host['vuln_id']))
|
||||||
|
# Debug log diff'd qid's hosts.
|
||||||
|
logging.debug('After diffing vulns[%s]=' % (a_qid))
|
||||||
|
logging.debug(vulns[a_qid]['hosts'])
|
||||||
|
# If there are no more hosts left to patch for the qid.
|
||||||
|
if len(vulns[a_qid]['hosts']) == 0:
|
||||||
|
# Remove the QID.
|
||||||
|
logging.debug('Deleting vulns[%s].' % (a_qid))
|
||||||
|
del vulns[a_qid]
|
||||||
|
# Diff completed
|
||||||
|
if not vulns_length == len(vulns):
|
||||||
|
print('A count of %s vulnerabilities have been consolidated to %s vulnerabilities, a reduction of %s%%.' % (
|
||||||
|
int(vulns_length),
|
||||||
|
int(len(vulns)),
|
||||||
|
int(round((int(vulns_length) - int(len(vulns))) / float(vulns_length) * 100))))
|
||||||
|
# Return vulns to report.
|
||||||
|
logging.debug('vulns =')
|
||||||
|
logging.debug(vulns)
|
||||||
|
return vulns
|
21
deps/qualysapi/qualysapi/settings.py
vendored
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
''' Module to hold global settings reused throughout qualysapi. '''
|
||||||
|
|
||||||
|
from __future__ import absolute_import
|
||||||
|
__author__ = "Colin Bell <colin.bell@uwaterloo.ca>"
|
||||||
|
__copyright__ = "Copyright 2011-2013, University of Waterloo"
|
||||||
|
__license__ = "BSD-new"
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
|
global defaults
|
||||||
|
global default_filename
|
||||||
|
|
||||||
|
|
||||||
|
if os.name == 'nt':
|
||||||
|
default_filename = "config.ini"
|
||||||
|
else:
|
||||||
|
default_filename = ".qcrc"
|
||||||
|
|
||||||
|
defaults = {'hostname': 'qualysapi.qualys.com',
|
||||||
|
'max_retries': '3',
|
||||||
|
'template_id': '00000'}
|
29
deps/qualysapi/qualysapi/util.py
vendored
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
""" A set of utility functions for QualysConnect module. """
|
||||||
|
from __future__ import absolute_import
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import qualysapi.config as qcconf
|
||||||
|
import qualysapi.connector as qcconn
|
||||||
|
import qualysapi.settings as qcs
|
||||||
|
|
||||||
|
__author__ = "Parag Baxi <parag.baxi@gmail.com> & Colin Bell <colin.bell@uwaterloo.ca>"
|
||||||
|
__copyright__ = "Copyright 2011-2013, Parag Baxi & University of Waterloo"
|
||||||
|
__license__ = 'Apache License 2.0'
|
||||||
|
|
||||||
|
# Set module level logger.
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def connect(config_file=qcs.default_filename, remember_me=False, remember_me_always=False):
|
||||||
|
""" Return a QGAPIConnect object for v1 API pulling settings from config
|
||||||
|
file.
|
||||||
|
"""
|
||||||
|
# Retrieve login credentials.
|
||||||
|
conf = qcconf.QualysConnectConfig(filename=config_file, remember_me=remember_me,
|
||||||
|
remember_me_always=remember_me_always)
|
||||||
|
connect = qcconn.QGConnector(conf.get_auth(),
|
||||||
|
conf.get_hostname(),
|
||||||
|
conf.proxies,
|
||||||
|
conf.max_retries)
|
||||||
|
logger.info("Finished building connector.")
|
||||||
|
return connect
|
3
deps/qualysapi/qualysapi/version.py
vendored
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
__author__ = 'Austin Taylor'
|
||||||
|
__pkgname__ = 'qualysapi'
|
||||||
|
__version__ = '4.1.0'
|
51
deps/qualysapi/setup.py
vendored
Normal file
@ -0,0 +1,51 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
from __future__ import absolute_import
|
||||||
|
import os
|
||||||
|
import setuptools
|
||||||
|
|
||||||
|
try:
|
||||||
|
from setuptools import setup
|
||||||
|
except ImportError:
|
||||||
|
from distutils.core import setup
|
||||||
|
|
||||||
|
__author__ = 'Austin Taylor <vulnWhisperer@austintaylor.io>'
|
||||||
|
__copyright__ = 'Copyright 2017, Austin Taylor'
|
||||||
|
__license__ = 'BSD-new'
|
||||||
|
# Make pyflakes happy.
|
||||||
|
__pkgname__ = None
|
||||||
|
__version__ = None
|
||||||
|
exec(compile(open('qualysapi/version.py').read(), 'qualysapi/version.py', 'exec'))
|
||||||
|
|
||||||
|
|
||||||
|
# A utility function to read the README file into the long_description field.
|
||||||
|
def read(fname):
|
||||||
|
""" Takes a filename and returns the contents of said file relative to
|
||||||
|
the current directory.
|
||||||
|
"""
|
||||||
|
return open(os.path.join(os.path.dirname(__file__), fname)).read()
|
||||||
|
|
||||||
|
|
||||||
|
setup(name=__pkgname__,
|
||||||
|
version=__version__,
|
||||||
|
author='Austin Taylor',
|
||||||
|
author_email='vulnWhisperer@austintaylor.io',
|
||||||
|
description='QualysGuard(R) Qualys API Package modified for VulnWhisperer',
|
||||||
|
license='BSD-new',
|
||||||
|
keywords='Qualys QualysGuard API helper network security',
|
||||||
|
url='https://github.com/austin-taylor/qualysapi',
|
||||||
|
package_dir={'': '.'},
|
||||||
|
#packages=setuptools.find_packages(),
|
||||||
|
packages=['qualysapi',],
|
||||||
|
# package_data={'qualysapi':['LICENSE']},
|
||||||
|
# scripts=['src/scripts/qhostinfo.py', 'src/scripts/qscanhist.py', 'src/scripts/qreports.py'],
|
||||||
|
long_description=read('README.md'),
|
||||||
|
classifiers=[
|
||||||
|
'Development Status :: 5 - Production/Stable',
|
||||||
|
'Topic :: Utilities',
|
||||||
|
'License :: OSI Approved :: Apache Software License',
|
||||||
|
'Intended Audience :: Developers',
|
||||||
|
],
|
||||||
|
install_requires=[
|
||||||
|
'requests',
|
||||||
|
],
|
||||||
|
)
|
40
docker-compose.yml
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
version: '2'
|
||||||
|
services:
|
||||||
|
vulnwhisp_es1:
|
||||||
|
image: docker.elastic.co/elasticsearch/elasticsearch:5.6.2
|
||||||
|
container_name: vulnwhisp_es1
|
||||||
|
environment:
|
||||||
|
- cluster.name=vulnwhisperer
|
||||||
|
- bootstrap.memory_lock=true
|
||||||
|
- "ES_JAVA_OPTS=-Xms512m -Xmx512m"
|
||||||
|
ulimits:
|
||||||
|
memlock:
|
||||||
|
soft: -1
|
||||||
|
hard: -1
|
||||||
|
mem_limit: 1g
|
||||||
|
volumes:
|
||||||
|
- esdata1:/usr/share/elasticsearch/data
|
||||||
|
ports:
|
||||||
|
- 19200:9200
|
||||||
|
networks:
|
||||||
|
- esnet
|
||||||
|
vulnwhisp_ks1:
|
||||||
|
image: docker.elastic.co/kibana/kibana:5.6.2
|
||||||
|
environment:
|
||||||
|
SERVER_NAME: vulnwhisp_ks1
|
||||||
|
ELASTICSEARCH_URL: http://vulnwhisp_es1:9200
|
||||||
|
ports:
|
||||||
|
- 15601:5601
|
||||||
|
networks:
|
||||||
|
- esnet
|
||||||
|
vulnwhisp_ls1:
|
||||||
|
image: docker.elastic.co/logstash/logstash:5.6.2
|
||||||
|
networks:
|
||||||
|
- esnet
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
esdata1:
|
||||||
|
driver: local
|
||||||
|
|
||||||
|
networks:
|
||||||
|
esnet:
|
BIN
docs/source/as_seen_on_tv.png
Normal file
After Width: | Height: | Size: 356 KiB |
BIN
docs/source/config_example.png
Normal file
After Width: | Height: | Size: 18 KiB |
BIN
docs/source/elastic_webinar.png
Normal file
After Width: | Height: | Size: 81 KiB |
BIN
docs/source/running_vuln_whisperer.png
Normal file
After Width: | Height: | Size: 20 KiB |
BIN
docs/source/vulnWhispFull.png
Normal file
After Width: | Height: | Size: 185 KiB |
BIN
docs/source/vulnWhispererWebApplications.png
Normal file
After Width: | Height: | Size: 273 KiB |
BIN
docs/source/vuln_whisperer_logo_s.png
Normal file
After Width: | Height: | Size: 48 KiB |
122
elasticsearch/logstash-vulnwhisperer-template.json
Executable file
@ -0,0 +1,122 @@
|
|||||||
|
{
|
||||||
|
"order": 0,
|
||||||
|
"template": "logstash-vulnwhisperer-*",
|
||||||
|
"settings": {
|
||||||
|
"index": {
|
||||||
|
"routing": {
|
||||||
|
"allocation": {
|
||||||
|
"total_shards_per_node": "2"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"mapping": {
|
||||||
|
"total_fields": {
|
||||||
|
"limit": "3000"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"refresh_interval": "5s",
|
||||||
|
"number_of_shards": "1",
|
||||||
|
"number_of_replicas": "0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"mappings": {
|
||||||
|
"_default_": {
|
||||||
|
"_all": {
|
||||||
|
"enabled": false
|
||||||
|
},
|
||||||
|
"dynamic_templates": [
|
||||||
|
{
|
||||||
|
"message_field": {
|
||||||
|
"path_match": "message",
|
||||||
|
"match_mapping_type": "string",
|
||||||
|
"mapping": {
|
||||||
|
"type": "text",
|
||||||
|
"norms": false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"string_fields": {
|
||||||
|
"match": "*",
|
||||||
|
"match_mapping_type": "string",
|
||||||
|
"mapping": {
|
||||||
|
"type": "text",
|
||||||
|
"norms": false,
|
||||||
|
"fields": {
|
||||||
|
"keyword": {
|
||||||
|
"type": "keyword",
|
||||||
|
"ignore_above": 256
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"properties": {
|
||||||
|
"plugin_id": {
|
||||||
|
"type": "integer"
|
||||||
|
},
|
||||||
|
"last_updated": {
|
||||||
|
"type": "date"
|
||||||
|
},
|
||||||
|
"geoip": {
|
||||||
|
"dynamic": true,
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"ip": {
|
||||||
|
"type": "ip"
|
||||||
|
},
|
||||||
|
"latitude": {
|
||||||
|
"type": "float"
|
||||||
|
},
|
||||||
|
"location": {
|
||||||
|
"type": "geo_point"
|
||||||
|
},
|
||||||
|
"longitude": {
|
||||||
|
"type": "float"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"risk_score": {
|
||||||
|
"type": "float"
|
||||||
|
},
|
||||||
|
"source": {
|
||||||
|
"type": "keyword"
|
||||||
|
},
|
||||||
|
"synopsis": {
|
||||||
|
"type": "keyword"
|
||||||
|
},
|
||||||
|
"see_also": {
|
||||||
|
"type": "keyword"
|
||||||
|
},
|
||||||
|
"@timestamp": {
|
||||||
|
"type": "date"
|
||||||
|
},
|
||||||
|
"cve": {
|
||||||
|
"type": "keyword"
|
||||||
|
},
|
||||||
|
"solution": {
|
||||||
|
"type": "keyword"
|
||||||
|
},
|
||||||
|
"port": {
|
||||||
|
"type": "integer"
|
||||||
|
},
|
||||||
|
"host": {
|
||||||
|
"type": "text"
|
||||||
|
},
|
||||||
|
"@version": {
|
||||||
|
"type": "keyword"
|
||||||
|
},
|
||||||
|
"risk": {
|
||||||
|
"type": "keyword"
|
||||||
|
},
|
||||||
|
"assign_ip": {
|
||||||
|
"type": "ip"
|
||||||
|
},
|
||||||
|
"cvss": {
|
||||||
|
"type": "float"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"aliases": {}
|
||||||
|
}
|
116
filebeat/filebeat.yml
Executable file
@ -0,0 +1,116 @@
|
|||||||
|
###################### Filebeat Configuration Example #########################
|
||||||
|
|
||||||
|
# This file is an example configuration file highlighting only the most common
|
||||||
|
# options. The filebeat.full.yml file from the same directory contains all the
|
||||||
|
# supported options with more comments. You can use it as a reference.
|
||||||
|
#
|
||||||
|
# You can find the full configuration reference here:
|
||||||
|
# https://www.elastic.co/guide/en/beats/filebeat/index.html
|
||||||
|
|
||||||
|
#=========================== Filebeat prospectors =============================
|
||||||
|
|
||||||
|
filebeat.prospectors:
|
||||||
|
|
||||||
|
# Each - is a prospector. Most options can be set at the prospector level, so
|
||||||
|
# you can use different prospectors for various configurations.
|
||||||
|
# Below are the prospector specific configurations.
|
||||||
|
|
||||||
|
- input_type: log
|
||||||
|
# Paths that should be crawled and fetched. Glob based paths.
|
||||||
|
paths:
|
||||||
|
# Linux Example
|
||||||
|
#- /var/log/*.log
|
||||||
|
|
||||||
|
#Windows Example
|
||||||
|
- c:\nessus\My Scans\*
|
||||||
|
|
||||||
|
# Exclude lines. A list of regular expressions to match. It drops the lines that are
|
||||||
|
# matching any regular expression from the list.
|
||||||
|
#exclude_lines: ["^DBG"]
|
||||||
|
|
||||||
|
# Include lines. A list of regular expressions to match. It exports the lines that are
|
||||||
|
# matching any regular expression from the list.
|
||||||
|
#include_lines: ["^ERR", "^WARN"]
|
||||||
|
|
||||||
|
# Exclude files. A list of regular expressions to match. Filebeat drops the files that
|
||||||
|
# are matching any regular expression from the list. By default, no files are dropped.
|
||||||
|
#exclude_files: [".gz$"]
|
||||||
|
|
||||||
|
# Optional additional fields. These field can be freely picked
|
||||||
|
# to add additional information to the crawled log files for filtering
|
||||||
|
#fields:
|
||||||
|
# level: debug
|
||||||
|
# review: 1
|
||||||
|
|
||||||
|
### Multiline options
|
||||||
|
|
||||||
|
# Mutiline can be used for log messages spanning multiple lines. This is common
|
||||||
|
# for Java Stack Traces or C-Line Continuation
|
||||||
|
|
||||||
|
# The regexp Pattern that has to be matched. The example pattern matches all lines starting with [
|
||||||
|
#multiline.pattern: ^\[
|
||||||
|
|
||||||
|
# Defines if the pattern set under pattern should be negated or not. Default is false.
|
||||||
|
#multiline.negate: false
|
||||||
|
|
||||||
|
# Match can be set to "after" or "before". It is used to define if lines should be append to a pattern
|
||||||
|
# that was (not) matched before or after or as long as a pattern is not matched based on negate.
|
||||||
|
# Note: After is the equivalent to previous and before is the equivalent to to next in Logstash
|
||||||
|
#multiline.match: after
|
||||||
|
|
||||||
|
|
||||||
|
#================================ General =====================================
|
||||||
|
|
||||||
|
# The name of the shipper that publishes the network data. It can be used to group
|
||||||
|
# all the transactions sent by a single shipper in the web interface.
|
||||||
|
#name:
|
||||||
|
|
||||||
|
# The tags of the shipper are included in their own field with each
|
||||||
|
# transaction published.
|
||||||
|
#tags: ["service-X", "web-tier"]
|
||||||
|
|
||||||
|
# Optional fields that you can specify to add additional information to the
|
||||||
|
# output.
|
||||||
|
#fields:
|
||||||
|
# env: staging
|
||||||
|
|
||||||
|
#================================ Outputs =====================================
|
||||||
|
|
||||||
|
# Configure what outputs to use when sending the data collected by the beat.
|
||||||
|
# Multiple outputs may be used.
|
||||||
|
|
||||||
|
#-------------------------- Elasticsearch output ------------------------------
|
||||||
|
#output.elasticsearch:
|
||||||
|
# Array of hosts to connect to.
|
||||||
|
# hosts: ["logstash01:9200"]
|
||||||
|
|
||||||
|
# Optional protocol and basic auth credentials.
|
||||||
|
#protocol: "https"
|
||||||
|
#username: "elastic"
|
||||||
|
#password: "changeme"
|
||||||
|
|
||||||
|
#----------------------------- Logstash output --------------------------------
|
||||||
|
output.logstash:
|
||||||
|
# The Logstash hosts
|
||||||
|
hosts: ["logstashserver1:5044", "logstashserver2:5044", "logstashserver3:5044"]
|
||||||
|
|
||||||
|
# Optional SSL. By default is off.
|
||||||
|
# List of root certificates for HTTPS server verifications
|
||||||
|
#ssl.certificate_authorities: ["/etc/pki/root/ca.pem"]
|
||||||
|
|
||||||
|
# Certificate for SSL client authentication
|
||||||
|
#ssl.certificate: "/etc/pki/client/cert.pem"
|
||||||
|
|
||||||
|
# Client Certificate Key
|
||||||
|
#ssl.key: "/etc/pki/client/cert.key"
|
||||||
|
|
||||||
|
#================================ Logging =====================================
|
||||||
|
|
||||||
|
# Sets log level. The default log level is info.
|
||||||
|
# Available log levels are: critical, error, warning, info, debug
|
||||||
|
#logging.level: debug
|
||||||
|
|
||||||
|
# At debug level, you can selectively enable logging only for some components.
|
||||||
|
# To enable all selectors use ["*"]. Examples of other selectors are "beat",
|
||||||
|
# "publish", "service".
|
||||||
|
#logging.selectors: ["*"]
|
450
kibana/vuln_whisp_kibana/1000_vulnWhispererBaseVisuals.json
Executable file
@ -0,0 +1,450 @@
|
|||||||
|
[
|
||||||
|
{
|
||||||
|
"_id": "80158c90-57c1-11e7-b484-a970fc9d150a",
|
||||||
|
"_type": "visualization",
|
||||||
|
"_source": {
|
||||||
|
"title": "VulnWhisperer - HIPAA TL",
|
||||||
|
"visState": "{\"type\":\"timelion\",\"title\":\"VulnWhisperer - HIPAA TL\",\"params\":{\"expression\":\".es(index=logstash-vulnwhisperer-*,q='risk_score:>9 AND tags:pci_asset').label(\\\"PCI Assets\\\"),.es(index=logstash-vulnwhisperer-*,q='risk_score:>9 AND tags:has_hipaa_data').label(\\\"Has HIPAA Data\\\"),.es(index=logstash-vulnwhisperer-*,q='risk_score:>9 AND tags:hipaa_asset').label(\\\"HIPAA Assets\\\")\",\"interval\":\"auto\"}}",
|
||||||
|
"uiStateJSON": "{}",
|
||||||
|
"description": "",
|
||||||
|
"version": 1,
|
||||||
|
"kibanaSavedObjectMeta": {
|
||||||
|
"searchSourceJSON": "{}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"_id": "479deab0-8a39-11e7-a58a-9bfcb3761a3d",
|
||||||
|
"_type": "visualization",
|
||||||
|
"_source": {
|
||||||
|
"title": "VulnWhisperer - TL - TaggedAssetsPluginNames",
|
||||||
|
"visState": "{\"title\":\"VulnWhisperer - TL - TaggedAssetsPluginNames\",\"type\":\"timelion\",\"params\":{\"expression\":\".es(index='logstash-vulnwhisperer-*', q='tags:critical_asset OR tags:hipaa_asset OR tags:pci_asset', split=\\\"plugin_name.keyword:10\\\").bars(width=4).label(regex=\\\".*:(.+)>.*\\\",label=\\\"$1\\\")\",\"interval\":\"auto\"},\"aggs\":[],\"listeners\":{}}",
|
||||||
|
"uiStateJSON": "{}",
|
||||||
|
"description": "",
|
||||||
|
"version": 1,
|
||||||
|
"kibanaSavedObjectMeta": {
|
||||||
|
"searchSourceJSON": "{\"query\":{\"query_string\":{\"query\":\"*\",\"analyze_wildcard\":true}},\"filter\":[]}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"_id": "84f5c370-8a38-11e7-a58a-9bfcb3761a3d",
|
||||||
|
"_type": "visualization",
|
||||||
|
"_source": {
|
||||||
|
"title": "VulnWhisperer - TL - CriticalAssetsPluginNames",
|
||||||
|
"visState": "{\"title\":\"VulnWhisperer - TL - CriticalAssetsPluginNames\",\"type\":\"timelion\",\"params\":{\"expression\":\".es(index='logstash-vulnwhisperer-*', q='tags:critical_asset', split=\\\"plugin_name.keyword:10\\\").bars(width=4).label(regex=\\\".*:(.+)>.*\\\",label=\\\"$1\\\")\",\"interval\":\"auto\"},\"aggs\":[],\"listeners\":{}}",
|
||||||
|
"uiStateJSON": "{}",
|
||||||
|
"description": "",
|
||||||
|
"version": 1,
|
||||||
|
"kibanaSavedObjectMeta": {
|
||||||
|
"searchSourceJSON": "{\"query\":{\"query_string\":{\"query\":\"*\",\"analyze_wildcard\":true}},\"filter\":[]}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"_id": "307cdae0-8a38-11e7-a58a-9bfcb3761a3d",
|
||||||
|
"_type": "visualization",
|
||||||
|
"_source": {
|
||||||
|
"title": "VulnWhisperer - TL - PluginNames",
|
||||||
|
"visState": "{\"title\":\"VulnWhisperer - TL - PluginNames\",\"type\":\"timelion\",\"params\":{\"expression\":\".es(index='logstash-vulnwhisperer-*', split=\\\"plugin_name.keyword:25\\\").bars(width=4).label(regex=\\\".*:(.+)>.*\\\",label=\\\"$1\\\")\",\"interval\":\"auto\"},\"aggs\":[],\"listeners\":{}}",
|
||||||
|
"uiStateJSON": "{}",
|
||||||
|
"description": "",
|
||||||
|
"version": 1,
|
||||||
|
"kibanaSavedObjectMeta": {
|
||||||
|
"searchSourceJSON": "{\"query\":{\"query_string\":{\"query\":\"*\",\"analyze_wildcard\":true}},\"filter\":[]}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"_id": "5093c620-44e9-11e7-8014-ede06a7e69f8",
|
||||||
|
"_type": "visualization",
|
||||||
|
"_source": {
|
||||||
|
"title": "VulnWhisperer - Mitigation Readme",
|
||||||
|
"visState": "{\"title\":\"VulnWhisperer - Mitigation Readme\",\"type\":\"markdown\",\"params\":{\"markdown\":\"** Legend **\\n\\n* [Common Vulnerability Scoring System (CVSS)](https://nvd.nist.gov/vuln-metrics/cvss) is the NIST vulnerability scoring system\\n* Risk Number is residual risk score calculated from CVSS, which is adjusted to be specific to Heartland which accounts for services not in use such as Java and Flash\\n* Vulnerabilities by Tag are systems tagged with HIPAA and PCI identification.\\n\\n\\n** Workflow **\\n* Select 10.0 under Risk Number to identify Critical Vulnerabilities. \\n* For more information about a CVE, scroll down and click the CVE link.\\n* To filter by tags, use one of the following filters:\\n** tags:has_hipaa_data, tags:pci_asset, tags:hipaa_asset, tags:critical_asset**\"},\"aggs\":[],\"listeners\":{}}",
|
||||||
|
"uiStateJSON": "{}",
|
||||||
|
"description": "",
|
||||||
|
"version": 1,
|
||||||
|
"kibanaSavedObjectMeta": {
|
||||||
|
"searchSourceJSON": "{\"query\":{\"query_string\":{\"query\":\"*\",\"analyze_wildcard\":true}},\"filter\":[]}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"_id": "7e7fbc90-3df2-11e7-a44e-c79ca8efb780",
|
||||||
|
"_type": "visualization",
|
||||||
|
"_source": {
|
||||||
|
"title": "VulnWhisperer-PluginID",
|
||||||
|
"visState": "{\"title\":\"VulnWhisperer-PluginID\",\"type\":\"table\",\"params\":{\"perPage\":10,\"showMeticsAtAllLevels\":false,\"showPartialRows\":false,\"showTotal\":false,\"sort\":{\"columnIndex\":null,\"direction\":null},\"totalFunc\":\"sum\"},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"bucket\",\"params\":{\"field\":\"plugin_id\",\"size\":50,\"order\":\"desc\",\"orderBy\":\"1\"}}],\"listeners\":{}}",
|
||||||
|
"uiStateJSON": "{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}}",
|
||||||
|
"description": "",
|
||||||
|
"version": 1,
|
||||||
|
"kibanaSavedObjectMeta": {
|
||||||
|
"searchSourceJSON": "{\"index\":\"logstash-vulnwhisperer-*\",\"query\":{\"query_string\":{\"analyze_wildcard\":true,\"query\":\"*\"}},\"filter\":[]}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"_id": "5a3c0340-3eb3-11e7-a192-93f36fbd9d05",
|
||||||
|
"_type": "visualization",
|
||||||
|
"_source": {
|
||||||
|
"title": "VulnWhisperer-CVSSHeatmap",
|
||||||
|
"visState": "{\"title\":\"VulnWhisperer-CVSSHeatmap\",\"type\":\"heatmap\",\"params\":{\"addTooltip\":true,\"addLegend\":true,\"enableHover\":false,\"legendPosition\":\"right\",\"times\":[],\"colorsNumber\":4,\"colorSchema\":\"Yellow to Red\",\"setColorRange\":false,\"colorsRange\":[],\"invertColors\":false,\"percentageMode\":false,\"valueAxes\":[{\"show\":false,\"id\":\"ValueAxis-1\",\"type\":\"value\",\"scale\":{\"type\":\"linear\",\"defaultYExtents\":false},\"labels\":{\"show\":false,\"rotate\":0,\"color\":\"#555\"}}]},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"segment\",\"params\":{\"field\":\"host\",\"size\":50,\"order\":\"desc\",\"orderBy\":\"1\"}},{\"id\":\"3\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"group\",\"params\":{\"field\":\"cvss\",\"size\":50,\"order\":\"desc\",\"orderBy\":\"_term\"}}],\"listeners\":{}}",
|
||||||
|
"uiStateJSON": "{\"vis\":{\"defaultColors\":{\"0 - 3500\":\"rgb(255,255,204)\",\"3500 - 7000\":\"rgb(254,217,118)\",\"7000 - 10500\":\"rgb(253,141,60)\",\"10500 - 14000\":\"rgb(227,27,28)\"}}}",
|
||||||
|
"description": "",
|
||||||
|
"version": 1,
|
||||||
|
"kibanaSavedObjectMeta": {
|
||||||
|
"searchSourceJSON": "{\"index\":\"logstash-vulnwhisperer-*\",\"query\":{\"query_string\":{\"query\":\"*\",\"analyze_wildcard\":true}},\"filter\":[]}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"_id": "1de9e550-3df1-11e7-a44e-c79ca8efb780",
|
||||||
|
"_type": "visualization",
|
||||||
|
"_source": {
|
||||||
|
"title": "VulnWhisperer-Description",
|
||||||
|
"visState": "{\"title\":\"VulnWhisperer-Description\",\"type\":\"table\",\"params\":{\"perPage\":10,\"showPartialRows\":false,\"showMeticsAtAllLevels\":false,\"sort\":{\"columnIndex\":null,\"direction\":null},\"showTotal\":false,\"totalFunc\":\"sum\"},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"bucket\",\"params\":{\"field\":\"description.keyword\",\"size\":50,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"Description\"}}],\"listeners\":{}}",
|
||||||
|
"uiStateJSON": "{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}}",
|
||||||
|
"description": "",
|
||||||
|
"version": 1,
|
||||||
|
"kibanaSavedObjectMeta": {
|
||||||
|
"searchSourceJSON": "{\"index\":\"logstash-vulnwhisperer-*\",\"query\":{\"query_string\":{\"query\":\"*\",\"analyze_wildcard\":true}},\"filter\":[]}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"_id": "13c7d4e0-3df3-11e7-a44e-c79ca8efb780",
|
||||||
|
"_type": "visualization",
|
||||||
|
"_source": {
|
||||||
|
"title": "VulnWhisperer-Solution",
|
||||||
|
"visState": "{\"title\":\"VulnWhisperer-Solution\",\"type\":\"table\",\"params\":{\"perPage\":10,\"showMeticsAtAllLevels\":false,\"showPartialRows\":false,\"showTotal\":false,\"sort\":{\"columnIndex\":null,\"direction\":null},\"totalFunc\":\"sum\"},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"bucket\",\"params\":{\"field\":\"solution.keyword\",\"size\":50,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"Solution\"}}],\"listeners\":{}}",
|
||||||
|
"uiStateJSON": "{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}}",
|
||||||
|
"description": "",
|
||||||
|
"version": 1,
|
||||||
|
"kibanaSavedObjectMeta": {
|
||||||
|
"searchSourceJSON": "{\"index\":\"logstash-vulnwhisperer-*\",\"query\":{\"query_string\":{\"analyze_wildcard\":true,\"query\":\"*\"}},\"filter\":[]}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"_id": "297df800-3f7e-11e7-bd24-6903e3283192",
|
||||||
|
"_type": "visualization",
|
||||||
|
"_source": {
|
||||||
|
"title": "VulnWhisperer - Plugin Name",
|
||||||
|
"visState": "{\"title\":\"VulnWhisperer - Plugin Name\",\"type\":\"table\",\"params\":{\"perPage\":10,\"showPartialRows\":false,\"showMeticsAtAllLevels\":false,\"sort\":{\"columnIndex\":null,\"direction\":null},\"showTotal\":false,\"totalFunc\":\"sum\"},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"bucket\",\"params\":{\"field\":\"plugin_name.keyword\",\"size\":10,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"Plugin Name\"}}],\"listeners\":{}}",
|
||||||
|
"uiStateJSON": "{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}}",
|
||||||
|
"description": "",
|
||||||
|
"version": 1,
|
||||||
|
"kibanaSavedObjectMeta": {
|
||||||
|
"searchSourceJSON": "{\"index\":\"logstash-vulnwhisperer-*\",\"query\":{\"query_string\":{\"query\":\"*\",\"analyze_wildcard\":true}},\"filter\":[]}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"_id": "de1a5f40-3f85-11e7-97f9-3777d794626d",
|
||||||
|
"_type": "visualization",
|
||||||
|
"_source": {
|
||||||
|
"title": "VulnWhisperer - ScanName",
|
||||||
|
"visState": "{\"title\":\"VulnWhisperer - ScanName\",\"type\":\"table\",\"params\":{\"perPage\":10,\"showPartialRows\":false,\"showMeticsAtAllLevels\":false,\"sort\":{\"columnIndex\":null,\"direction\":null},\"showTotal\":false,\"totalFunc\":\"sum\"},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"bucket\",\"params\":{\"field\":\"scan_name.keyword\",\"size\":20,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"Scan Name\"}}],\"listeners\":{}}",
|
||||||
|
"uiStateJSON": "{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}}",
|
||||||
|
"description": "",
|
||||||
|
"version": 1,
|
||||||
|
"kibanaSavedObjectMeta": {
|
||||||
|
"searchSourceJSON": "{\"index\":\"logstash-vulnwhisperer-*\",\"query\":{\"query_string\":{\"query\":\"*\",\"analyze_wildcard\":true}},\"filter\":[]}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"_id": "ecbb99c0-3f84-11e7-97f9-3777d794626d",
|
||||||
|
"_type": "visualization",
|
||||||
|
"_source": {
|
||||||
|
"title": "VulnWhisperer - Total",
|
||||||
|
"visState": "{\"title\":\"VulnWhisperer - Total\",\"type\":\"metric\",\"params\":{\"handleNoResults\":true,\"fontSize\":60},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{\"customLabel\":\"Total\"}}],\"listeners\":{}}",
|
||||||
|
"uiStateJSON": "{}",
|
||||||
|
"description": "",
|
||||||
|
"version": 1,
|
||||||
|
"kibanaSavedObjectMeta": {
|
||||||
|
"searchSourceJSON": "{\"index\":\"logstash-vulnwhisperer-*\",\"query\":{\"query_string\":{\"query\":\"*\",\"analyze_wildcard\":true}},\"filter\":[]}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"_id": "471a3580-3f6b-11e7-88e7-df1abe6547fb",
|
||||||
|
"_type": "visualization",
|
||||||
|
"_source": {
|
||||||
|
"title": "VulnWhisperer - Vulnerabilities by Tag",
|
||||||
|
"visState": "{\"title\":\"VulnWhisperer - Vulnerabilities by Tag\",\"type\":\"table\",\"params\":{\"perPage\":3,\"showMeticsAtAllLevels\":false,\"showPartialRows\":false,\"showTotal\":false,\"sort\":{\"columnIndex\":null,\"direction\":null},\"totalFunc\":\"sum\"},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"3\",\"enabled\":true,\"type\":\"filters\",\"schema\":\"bucket\",\"params\":{\"filters\":[{\"input\":{\"query\":{\"query_string\":{\"query\":\"tags:has_hipaa_data\",\"analyze_wildcard\":true}}},\"label\":\"Systems with HIPAA data\"},{\"input\":{\"query\":{\"query_string\":{\"query\":\"tags:pci_asset\",\"analyze_wildcard\":true}}},\"label\":\"PCI Systems\"},{\"input\":{\"query\":{\"query_string\":{\"query\":\"tags:hipaa_asset\",\"analyze_wildcard\":true}}},\"label\":\"HIPAA Systems\"}]}}],\"listeners\":{}}",
|
||||||
|
"uiStateJSON": "{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}}",
|
||||||
|
"description": "",
|
||||||
|
"version": 1,
|
||||||
|
"kibanaSavedObjectMeta": {
|
||||||
|
"searchSourceJSON": "{\"index\":\"logstash-vulnwhisperer-*\",\"query\":{\"query_string\":{\"analyze_wildcard\":true,\"query\":\"*\"}},\"filter\":[]}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"_id": "35b6d320-3f7f-11e7-bd24-6903e3283192",
|
||||||
|
"_type": "visualization",
|
||||||
|
"_source": {
|
||||||
|
"title": "VulnWhisperer - Residual Risk",
|
||||||
|
"visState": "{\"title\":\"VulnWhisperer - Residual Risk\",\"type\":\"table\",\"params\":{\"perPage\":15,\"showPartialRows\":false,\"showMeticsAtAllLevels\":false,\"sort\":{\"columnIndex\":0,\"direction\":\"desc\"},\"showTotal\":false,\"totalFunc\":\"sum\"},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"bucket\",\"params\":{\"field\":\"risk_score\",\"size\":50,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"Risk Number\"}}],\"listeners\":{}}",
|
||||||
|
"uiStateJSON": "{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":0,\"direction\":\"desc\"}}}}",
|
||||||
|
"description": "",
|
||||||
|
"version": 1,
|
||||||
|
"kibanaSavedObjectMeta": {
|
||||||
|
"searchSourceJSON": "{\"index\":\"logstash-vulnwhisperer-*\",\"query\":{\"query_string\":{\"query\":\"*\",\"analyze_wildcard\":true}},\"filter\":[]}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"_id": "a9225930-3df2-11e7-a44e-c79ca8efb780",
|
||||||
|
"_type": "visualization",
|
||||||
|
"_source": {
|
||||||
|
"title": "VulnWhisperer-Risk",
|
||||||
|
"visState": "{\"title\":\"VulnWhisperer-Risk\",\"type\":\"table\",\"params\":{\"perPage\":4,\"showMeticsAtAllLevels\":false,\"showPartialRows\":false,\"showTotal\":false,\"sort\":{\"columnIndex\":null,\"direction\":null},\"totalFunc\":\"sum\"},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"bucket\",\"params\":{\"field\":\"risk\",\"size\":10,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"Risk Severity\"}}],\"listeners\":{}}",
|
||||||
|
"uiStateJSON": "{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}}",
|
||||||
|
"description": "",
|
||||||
|
"version": 1,
|
||||||
|
"kibanaSavedObjectMeta": {
|
||||||
|
"searchSourceJSON": "{\"index\":\"logstash-vulnwhisperer-*\",\"query\":{\"query_string\":{\"analyze_wildcard\":true,\"query\":\"*\"}},\"filter\":[]}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"_id": "2f979030-44b9-11e7-a818-f5f80dfc3590",
|
||||||
|
"_type": "visualization",
|
||||||
|
"_source": {
|
||||||
|
"title": "VulnWhisperer - ScanBarChart",
|
||||||
|
"visState": "{\"aggs\":[{\"enabled\":true,\"id\":\"1\",\"params\":{},\"schema\":\"metric\",\"type\":\"count\"},{\"enabled\":true,\"id\":\"2\",\"params\":{\"customLabel\":\"Scan Name\",\"field\":\"plugin_name.keyword\",\"order\":\"desc\",\"orderBy\":\"1\",\"size\":10},\"schema\":\"segment\",\"type\":\"terms\"}],\"listeners\":{},\"params\":{\"addLegend\":true,\"addTimeMarker\":false,\"addTooltip\":true,\"defaultYExtents\":false,\"legendPosition\":\"right\",\"mode\":\"stacked\",\"scale\":\"linear\",\"setYExtents\":false,\"times\":[]},\"title\":\"VulnWhisperer - ScanBarChart\",\"type\":\"histogram\"}",
|
||||||
|
"uiStateJSON": "{}",
|
||||||
|
"description": "",
|
||||||
|
"version": 1,
|
||||||
|
"kibanaSavedObjectMeta": {
|
||||||
|
"searchSourceJSON": "{\"index\":\"logstash-vulnwhisperer-*\",\"query\":{\"query_string\":{\"analyze_wildcard\":true,\"query\":\"*\"}},\"filter\":[]}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"_id": "a6508640-897a-11e7-bbc0-33592ce0be1e",
|
||||||
|
"_type": "visualization",
|
||||||
|
"_source": {
|
||||||
|
"title": "VulnWhisperer - Critical Assets Aggregated",
|
||||||
|
"visState": "{\"title\":\"VulnWhisperer - Critical Assets Aggregated\",\"type\":\"heatmap\",\"params\":{\"addTooltip\":true,\"addLegend\":true,\"enableHover\":true,\"legendPosition\":\"right\",\"times\":[],\"colorsNumber\":4,\"colorSchema\":\"Green to Red\",\"setColorRange\":true,\"colorsRange\":[{\"from\":0,\"to\":3},{\"from\":3,\"to\":7},{\"from\":7,\"to\":9},{\"from\":9,\"to\":11}],\"invertColors\":false,\"percentageMode\":false,\"valueAxes\":[{\"show\":false,\"id\":\"ValueAxis-1\",\"type\":\"value\",\"scale\":{\"type\":\"linear\",\"defaultYExtents\":false},\"labels\":{\"show\":true,\"rotate\":0,\"color\":\"white\"}}]},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"max\",\"schema\":\"metric\",\"params\":{\"field\":\"risk_score\",\"customLabel\":\"Residual Risk Score\"}},{\"id\":\"3\",\"enabled\":true,\"type\":\"date_histogram\",\"schema\":\"segment\",\"params\":{\"field\":\"@timestamp\",\"interval\":\"auto\",\"customInterval\":\"2h\",\"min_doc_count\":1,\"extended_bounds\":{},\"customLabel\":\"Date\"}},{\"id\":\"4\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"group\",\"params\":{\"field\":\"host\",\"size\":10,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"Critical Asset IP\"}},{\"id\":\"5\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"split\",\"params\":{\"field\":\"plugin_name.keyword\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\",\"row\":true}}],\"listeners\":{}}",
|
||||||
|
"uiStateJSON": "{\"vis\":{\"colors\":{\"0 - 3\":\"#7EB26D\",\"3 - 7\":\"#EAB839\",\"7 - 9\":\"#EF843C\",\"8 - 10\":\"#BF1B00\",\"9 - 11\":\"#BF1B00\"},\"defaultColors\":{\"0 - 3\":\"rgb(0,104,55)\",\"3 - 7\":\"rgb(135,203,103)\",\"7 - 9\":\"rgb(255,255,190)\",\"9 - 11\":\"rgb(249,142,82)\"},\"legendOpen\":false}}",
|
||||||
|
"description": "",
|
||||||
|
"version": 1,
|
||||||
|
"kibanaSavedObjectMeta": {
|
||||||
|
"searchSourceJSON": "{\"index\":\"logstash-vulnwhisperer-*\",\"query\":{\"query_string\":{\"analyze_wildcard\":true,\"query\":\"*\"}},\"filter\":[{\"$state\":{\"store\":\"appState\"},\"meta\":{\"alias\":\"Critical Asset\",\"disabled\":false,\"index\":\"logstash-vulnwhisperer-*\",\"key\":\"tags\",\"negate\":false,\"type\":\"phrase\",\"value\":\"critical_asset\"},\"query\":{\"match\":{\"tags\":{\"query\":\"critical_asset\",\"type\":\"phrase\"}}}}]}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"_id": "099a3820-3f68-11e7-a6bd-e764d950e506",
|
||||||
|
"_type": "visualization",
|
||||||
|
"_source": {
|
||||||
|
"title": "Timelion VulnWhisperer Example",
|
||||||
|
"visState": "{\"type\":\"timelion\",\"title\":\"Timelion VulnWhisperer Example\",\"params\":{\"expression\":\".es(index=logstash-vulnwhisperer-*,q=risk:high).label(\\\"Current High Risk\\\"),.es(index=logstash-vulnwhisperer-*,q=risk:high,offset=-1y).label(\\\"Last 1 Year High Risk\\\"),.es(index=logstash-vulnwhisperer-*,q=risk:medium).label(\\\"Current Medium Risk\\\"),.es(index=logstash-vulnwhisperer-*,q=risk:medium,offset=-1y).label(\\\"Last 1 Year Medium Risk\\\")\",\"interval\":\"auto\"}}",
|
||||||
|
"uiStateJSON": "{}",
|
||||||
|
"description": "",
|
||||||
|
"version": 1,
|
||||||
|
"kibanaSavedObjectMeta": {
|
||||||
|
"searchSourceJSON": "{}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"_id": "67d432e0-44ec-11e7-a05f-d9719b331a27",
|
||||||
|
"_type": "visualization",
|
||||||
|
"_source": {
|
||||||
|
"title": "VulnWhisperer - TL-Critical Risk",
|
||||||
|
"visState": "{\"title\":\"VulnWhisperer - TL-Critical Risk\",\"type\":\"timelion\",\"params\":{\"expression\":\".es(index='logstash-vulnwhisperer-*',q='(risk_score:>=9 AND risk_score:<=10)').label(\\\"Original\\\"),.es(index='logstash-vulnwhisperer-*',q='(risk_score:>=9 AND risk_score:<=10)',offset=-1w).label(\\\"One week offset\\\"),.es(index='logstash-vulnwhisperer-*',q='(risk_score:>=9 AND risk_score:<=10)').subtract(.es(index='logstash-vulnwhisperer-*',q='(risk_score:>=9 AND risk_score:<=10)',offset=-1w)).label(\\\"Difference\\\").lines(steps=3,fill=2,width=1)\",\"interval\":\"auto\"},\"aggs\":[],\"listeners\":{}}",
|
||||||
|
"uiStateJSON": "{}",
|
||||||
|
"description": "",
|
||||||
|
"version": 1,
|
||||||
|
"kibanaSavedObjectMeta": {
|
||||||
|
"searchSourceJSON": "{\"query\":{\"query_string\":{\"query\":\"*\",\"analyze_wildcard\":true}},\"filter\":[]}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"_id": "a91b9fe0-44ec-11e7-a05f-d9719b331a27",
|
||||||
|
"_type": "visualization",
|
||||||
|
"_source": {
|
||||||
|
"title": "VulnWhisperer - TL-Medium Risk",
|
||||||
|
"visState": "{\"title\":\"VulnWhisperer - TL-Medium Risk\",\"type\":\"timelion\",\"params\":{\"expression\":\".es(index='logstash-vulnwhisperer-*',q='(risk_score:>=4 AND risk_score:<7)').label(\\\"Original\\\"),.es(index='logstash-vulnwhisperer-*',q='(risk_score:>=4 AND risk_score:<7)',offset=-1w).label(\\\"One week offset\\\"),.es(index='logstash-vulnwhisperer-*',q='(risk_score:>=4 AND risk_score:<7)').subtract(.es(index='logstash-vulnwhisperer-*',q='(risk_score:>=4 AND risk_score:<7)',offset=-1w)).label(\\\"Difference\\\").lines(steps=3,fill=2,width=1)\",\"interval\":\"auto\"},\"aggs\":[],\"listeners\":{}}",
|
||||||
|
"uiStateJSON": "{}",
|
||||||
|
"description": "",
|
||||||
|
"version": 1,
|
||||||
|
"kibanaSavedObjectMeta": {
|
||||||
|
"searchSourceJSON": "{\"query\":{\"query_string\":{\"query\":\"*\",\"analyze_wildcard\":true}},\"filter\":[]}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"_id": "8d9592d0-44ec-11e7-a05f-d9719b331a27",
|
||||||
|
"_type": "visualization",
|
||||||
|
"_source": {
|
||||||
|
"title": "VulnWhisperer - TL-High Risk",
|
||||||
|
"visState": "{\"title\":\"VulnWhisperer - TL-High Risk\",\"type\":\"timelion\",\"params\":{\"expression\":\".es(index='logstash-vulnwhisperer-*',q='(risk_score:>=7 AND risk_score:<9)').label(\\\"Original\\\"),.es(index='logstash-vulnwhisperer-*',q='(risk_score:>=7 AND risk_score:<9)',offset=-1w).label(\\\"One week offset\\\"),.es(index='logstash-vulnwhisperer-*',q='(risk_score:>=7 AND risk_score:<9)').subtract(.es(index='logstash-vulnwhisperer-*',q='(risk_score:>=7 AND risk_score:<9)',offset=-1w)).label(\\\"Difference\\\").lines(steps=3,fill=2,width=1)\",\"interval\":\"auto\"},\"aggs\":[],\"listeners\":{}}",
|
||||||
|
"uiStateJSON": "{}",
|
||||||
|
"description": "",
|
||||||
|
"version": 1,
|
||||||
|
"kibanaSavedObjectMeta": {
|
||||||
|
"searchSourceJSON": "{\"query\":{\"query_string\":{\"query\":\"*\",\"analyze_wildcard\":true}},\"filter\":[]}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"_id": "a2d66660-44ec-11e7-a05f-d9719b331a27",
|
||||||
|
"_type": "visualization",
|
||||||
|
"_source": {
|
||||||
|
"title": "VulnWhisperer - TL-Low Risk",
|
||||||
|
"visState": "{\"title\":\"VulnWhisperer - TL-Low Risk\",\"type\":\"timelion\",\"params\":{\"expression\":\".es(index='logstash-vulnwhisperer-*',q='(risk_score:>0 AND risk_score:<4)').label(\\\"Original\\\"),.es(index='logstash-vulnwhisperer-*',q='(risk_score:>0 AND risk_score:<4)',offset=-1w).label(\\\"One week offset\\\"),.es(index='logstash-vulnwhisperer-*',q='(risk_score:>0 AND risk_score:<4)').subtract(.es(index='logstash-vulnwhisperer-*',q='(risk_score:>0 AND risk_score:<4)',offset=-1w)).label(\\\"Difference\\\").lines(steps=3,fill=2,width=1)\",\"interval\":\"auto\"},\"aggs\":[],\"listeners\":{}}",
|
||||||
|
"uiStateJSON": "{}",
|
||||||
|
"description": "",
|
||||||
|
"version": 1,
|
||||||
|
"kibanaSavedObjectMeta": {
|
||||||
|
"searchSourceJSON": "{\"query\":{\"query_string\":{\"query\":\"*\",\"analyze_wildcard\":true}},\"filter\":[]}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"_id": "fb6eb020-49ab-11e7-8f8c-57ad64ec48a6",
|
||||||
|
"_type": "visualization",
|
||||||
|
"_source": {
|
||||||
|
"title": "VulnWhisperer - Critical Risk Score for Tagged Assets",
|
||||||
|
"visState": "{\"title\":\"VulnWhisperer - Critical Risk Score for Tagged Assets\",\"type\":\"timelion\",\"params\":{\"expression\":\".es(index=logstash-vulnwhisperer-*,q='risk_score:>9 AND tags:hipaa_asset').label(\\\"HIPAA Assets\\\"),.es(index=logstash-vulnwhisperer-*,q='risk_score:>9 AND tags:pci_asset').label(\\\"PCI Systems\\\"),.es(index=logstash-vulnwhisperer-*,q='risk_score:>9 AND tags:has_hipaa_data').label(\\\"Has HIPAA Data\\\")\",\"interval\":\"auto\"},\"aggs\":[],\"listeners\":{}}",
|
||||||
|
"uiStateJSON": "{}",
|
||||||
|
"description": "",
|
||||||
|
"version": 1,
|
||||||
|
"kibanaSavedObjectMeta": {
|
||||||
|
"searchSourceJSON": "{\"query\":{\"query_string\":{\"query\":\"*\",\"analyze_wildcard\":true}},\"filter\":[]}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"_id": "b2f2adb0-897f-11e7-a2d2-c57bca21b3aa",
|
||||||
|
"_type": "visualization",
|
||||||
|
"_source": {
|
||||||
|
"title": "VulnWhisperer - Risk: Total",
|
||||||
|
"visState": "{\"title\":\"VulnWhisperer - Risk: Total\",\"type\":\"goal\",\"params\":{\"addLegend\":true,\"addTooltip\":true,\"gauge\":{\"autoExtend\":false,\"backStyle\":\"Full\",\"colorSchema\":\"Green to Red\",\"colorsRange\":[{\"from\":0,\"to\":10000}],\"gaugeColorMode\":\"Background\",\"gaugeStyle\":\"Full\",\"gaugeType\":\"Metric\",\"invertColors\":false,\"labels\":{\"color\":\"black\",\"show\":false},\"orientation\":\"vertical\",\"percentageMode\":false,\"scale\":{\"color\":\"#333\",\"labels\":false,\"show\":true,\"width\":2},\"style\":{\"bgColor\":true,\"bgFill\":\"white\",\"fontSize\":\"34\",\"labelColor\":false,\"subText\":\"Risk\"},\"type\":\"simple\",\"useRanges\":false,\"verticalSplit\":false},\"type\":\"gauge\"},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{\"customLabel\":\"Total\"}},{\"id\":\"2\",\"enabled\":true,\"type\":\"filters\",\"schema\":\"group\",\"params\":{\"filters\":[{\"input\":{\"query\":{\"query_string\":{\"query\":\"*\",\"analyze_wildcard\":true}}},\"label\":\"Critical\"}]}}],\"listeners\":{}}",
|
||||||
|
"uiStateJSON": "{\"vis\":{\"colors\":{\"0 - 10000\":\"#64B0C8\"},\"defaultColors\":{\"0 - 10000\":\"rgb(0,104,55)\"},\"legendOpen\":false}}",
|
||||||
|
"description": "",
|
||||||
|
"version": 1,
|
||||||
|
"kibanaSavedObjectMeta": {
|
||||||
|
"searchSourceJSON": "{\"index\":\"logstash-vulnwhisperer-*\",\"query\":{\"query_string\":{\"analyze_wildcard\":true,\"query\":\"*\"}},\"filter\":[]}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"_id": "465c5820-8977-11e7-857e-e1d56b17746d",
|
||||||
|
"_type": "visualization",
|
||||||
|
"_source": {
|
||||||
|
"title": "VulnWhisperer - Critical Assets",
|
||||||
|
"visState": "{\"title\":\"VulnWhisperer - Critical Assets\",\"type\":\"heatmap\",\"params\":{\"addTooltip\":true,\"addLegend\":true,\"enableHover\":true,\"legendPosition\":\"right\",\"times\":[],\"colorsNumber\":4,\"colorSchema\":\"Green to Red\",\"setColorRange\":true,\"colorsRange\":[{\"from\":0,\"to\":3},{\"from\":3,\"to\":7},{\"from\":7,\"to\":9},{\"from\":9,\"to\":11}],\"invertColors\":false,\"percentageMode\":false,\"valueAxes\":[{\"show\":false,\"id\":\"ValueAxis-1\",\"type\":\"value\",\"scale\":{\"type\":\"linear\",\"defaultYExtents\":false},\"labels\":{\"show\":false,\"rotate\":0,\"color\":\"white\"}}],\"type\":\"heatmap\"},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"max\",\"schema\":\"metric\",\"params\":{\"field\":\"risk_score\",\"customLabel\":\"Residual Risk Score\"}},{\"id\":\"2\",\"enabled\":false,\"type\":\"terms\",\"schema\":\"split\",\"params\":{\"field\":\"risk_score\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\",\"row\":true}},{\"id\":\"3\",\"enabled\":true,\"type\":\"date_histogram\",\"schema\":\"segment\",\"params\":{\"field\":\"@timestamp\",\"interval\":\"auto\",\"customInterval\":\"2h\",\"min_doc_count\":1,\"extended_bounds\":{},\"customLabel\":\"Date\"}},{\"id\":\"4\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"group\",\"params\":{\"field\":\"asset.keyword\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"Critical Asset\"}}],\"listeners\":{}}",
|
||||||
|
"uiStateJSON": "{\"vis\":{\"defaultColors\":{\"0 - 3\":\"rgb(0,104,55)\",\"3 - 7\":\"rgb(135,203,103)\",\"7 - 9\":\"rgb(255,255,190)\",\"9 - 11\":\"rgb(249,142,82)\"},\"colors\":{\"8 - 10\":\"#BF1B00\",\"9 - 11\":\"#BF1B00\",\"7 - 9\":\"#EF843C\",\"3 - 7\":\"#EAB839\",\"0 - 3\":\"#7EB26D\"},\"legendOpen\":false}}",
|
||||||
|
"description": "",
|
||||||
|
"version": 1,
|
||||||
|
"kibanaSavedObjectMeta": {
|
||||||
|
"searchSourceJSON": "{\"index\":\"logstash-vulnwhisperer-*\",\"query\":{\"query_string\":{\"query\":\"*\",\"analyze_wildcard\":true}},\"filter\":[{\"meta\":{\"index\":\"logstash-vulnwhisperer-*\",\"negate\":false,\"disabled\":false,\"alias\":\"Critical Asset\",\"type\":\"phrase\",\"key\":\"tags\",\"value\":\"critical_asset\"},\"query\":{\"match\":{\"tags\":{\"query\":\"critical_asset\",\"type\":\"phrase\"}}},\"$state\":{\"store\":\"appState\"}}]}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"_id": "852816e0-3eb1-11e7-90cb-918f9cb01e3d",
|
||||||
|
"_type": "visualization",
|
||||||
|
"_source": {
|
||||||
|
"title": "VulnWhisperer-CVSS",
|
||||||
|
"visState": "{\"title\":\"VulnWhisperer-CVSS\",\"type\":\"table\",\"params\":{\"perPage\":10,\"showMeticsAtAllLevels\":false,\"showPartialRows\":false,\"showTotal\":false,\"sort\":{\"columnIndex\":null,\"direction\":null},\"totalFunc\":\"sum\",\"type\":\"table\"},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"bucket\",\"params\":{\"field\":\"cvss\",\"size\":20,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"CVSS Score\"}},{\"id\":\"4\",\"enabled\":true,\"type\":\"cardinality\",\"schema\":\"metric\",\"params\":{\"field\":\"asset.keyword\",\"customLabel\":\"# of Assets\"}}],\"listeners\":{}}",
|
||||||
|
"uiStateJSON": "{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":0,\"direction\":\"desc\"}}}}",
|
||||||
|
"description": "",
|
||||||
|
"version": 1,
|
||||||
|
"kibanaSavedObjectMeta": {
|
||||||
|
"searchSourceJSON": "{\"index\":\"logstash-vulnwhisperer-*\",\"query\":{\"query_string\":{\"query\":\"*\",\"analyze_wildcard\":true}},\"filter\":[]}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"_id": "d048c220-80b3-11e7-8790-73b60225f736",
|
||||||
|
"_type": "visualization",
|
||||||
|
"_source": {
|
||||||
|
"title": "VulnWhisperer - Risk: High",
|
||||||
|
"visState": "{\"title\":\"VulnWhisperer - Risk: High\",\"type\":\"goal\",\"params\":{\"addTooltip\":true,\"addLegend\":true,\"type\":\"gauge\",\"gauge\":{\"verticalSplit\":false,\"autoExtend\":false,\"percentageMode\":false,\"gaugeType\":\"Metric\",\"gaugeStyle\":\"Full\",\"backStyle\":\"Full\",\"orientation\":\"vertical\",\"useRanges\":false,\"colorSchema\":\"Green to Red\",\"gaugeColorMode\":\"Background\",\"colorsRange\":[{\"from\":0,\"to\":1000}],\"invertColors\":false,\"labels\":{\"show\":false,\"color\":\"black\"},\"scale\":{\"show\":true,\"labels\":false,\"color\":\"#333\",\"width\":2},\"type\":\"simple\",\"style\":{\"bgFill\":\"white\",\"bgColor\":true,\"labelColor\":false,\"subText\":\"\",\"fontSize\":\"34\"},\"extendRange\":true}},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{\"customLabel\":\"High Risk\"}},{\"id\":\"2\",\"enabled\":true,\"type\":\"filters\",\"schema\":\"group\",\"params\":{\"filters\":[{\"input\":{\"query\":{\"query_string\":{\"query\":\"risk_score_name:high\"}}},\"label\":\"\"}]}}],\"listeners\":{}}",
|
||||||
|
"uiStateJSON": "{\"vis\":{\"defaultColors\":{\"0 - 1000\":\"rgb(0,104,55)\"},\"legendOpen\":true,\"colors\":{\"0 - 10000\":\"#EF843C\",\"0 - 1000\":\"#E0752D\"}}}",
|
||||||
|
"description": "",
|
||||||
|
"version": 1,
|
||||||
|
"kibanaSavedObjectMeta": {
|
||||||
|
"searchSourceJSON": "{\"index\":\"logstash-vulnwhisperer-*\",\"query\":{\"query_string\":{\"query\":\"*\",\"analyze_wildcard\":true}},\"filter\":[]}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"_id": "db55bce0-80b3-11e7-8790-73b60225f736",
|
||||||
|
"_type": "visualization",
|
||||||
|
"_source": {
|
||||||
|
"title": "VulnWhisperer - Risk: Critical",
|
||||||
|
"visState": "{\"title\":\"VulnWhisperer - Risk: Critical\",\"type\":\"goal\",\"params\":{\"addLegend\":true,\"addTooltip\":true,\"gauge\":{\"autoExtend\":false,\"backStyle\":\"Full\",\"colorSchema\":\"Green to Red\",\"colorsRange\":[{\"from\":0,\"to\":10000}],\"gaugeColorMode\":\"Background\",\"gaugeStyle\":\"Full\",\"gaugeType\":\"Metric\",\"invertColors\":false,\"labels\":{\"color\":\"black\",\"show\":false},\"orientation\":\"vertical\",\"percentageMode\":false,\"scale\":{\"color\":\"#333\",\"labels\":false,\"show\":true,\"width\":2},\"style\":{\"bgColor\":true,\"bgFill\":\"white\",\"fontSize\":\"34\",\"labelColor\":false,\"subText\":\"Risk\"},\"type\":\"simple\",\"useRanges\":false,\"verticalSplit\":false},\"type\":\"gauge\"},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{\"customLabel\":\"Critical Risk\"}},{\"id\":\"2\",\"enabled\":true,\"type\":\"filters\",\"schema\":\"group\",\"params\":{\"filters\":[{\"input\":{\"query\":{\"query_string\":{\"query\":\"risk_score_name:critical\"}}},\"label\":\"Critical\"}]}}],\"listeners\":{}}",
|
||||||
|
"uiStateJSON": "{\"vis\":{\"colors\":{\"0 - 10000\":\"#BF1B00\"},\"defaultColors\":{\"0 - 10000\":\"rgb(0,104,55)\"},\"legendOpen\":false}}",
|
||||||
|
"description": "",
|
||||||
|
"version": 1,
|
||||||
|
"kibanaSavedObjectMeta": {
|
||||||
|
"searchSourceJSON": "{\"index\":\"logstash-vulnwhisperer-*\",\"query\":{\"query_string\":{\"analyze_wildcard\":true,\"query\":\"*\"}},\"filter\":[]}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"_id": "56f0f5f0-3ebe-11e7-a192-93f36fbd9d05",
|
||||||
|
"_type": "visualization",
|
||||||
|
"_source": {
|
||||||
|
"title": "VulnWhisperer-RiskOverTime",
|
||||||
|
"visState": "{\"title\":\"VulnWhisperer-RiskOverTime\",\"type\":\"line\",\"params\":{\"addLegend\":true,\"addTimeMarker\":false,\"addTooltip\":true,\"categoryAxes\":[{\"id\":\"CategoryAxis-1\",\"labels\":{\"show\":true,\"truncate\":100},\"position\":\"bottom\",\"scale\":{\"type\":\"linear\"},\"show\":true,\"style\":{},\"title\":{\"text\":\"@timestamp per 12 hours\"},\"type\":\"category\"}],\"defaultYExtents\":false,\"drawLinesBetweenPoints\":true,\"grid\":{\"categoryLines\":false,\"style\":{\"color\":\"#eee\"},\"valueAxis\":\"ValueAxis-1\"},\"interpolate\":\"linear\",\"legendPosition\":\"right\",\"orderBucketsBySum\":false,\"radiusRatio\":9,\"scale\":\"linear\",\"seriesParams\":[{\"data\":{\"id\":\"1\",\"label\":\"Count\"},\"drawLinesBetweenPoints\":true,\"interpolate\":\"linear\",\"mode\":\"normal\",\"show\":\"true\",\"showCircles\":true,\"type\":\"line\",\"valueAxis\":\"ValueAxis-1\"}],\"setYExtents\":false,\"showCircles\":true,\"times\":[],\"valueAxes\":[{\"id\":\"ValueAxis-1\",\"labels\":{\"filter\":false,\"rotate\":0,\"show\":true,\"truncate\":100},\"name\":\"LeftAxis-1\",\"position\":\"left\",\"scale\":{\"mode\":\"normal\",\"type\":\"linear\"},\"show\":true,\"style\":{},\"title\":{\"text\":\"Count\"},\"type\":\"value\"}],\"type\":\"line\"},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"2\",\"enabled\":true,\"type\":\"date_histogram\",\"schema\":\"segment\",\"params\":{\"field\":\"@timestamp\",\"interval\":\"auto\",\"customInterval\":\"2h\",\"min_doc_count\":1,\"extended_bounds\":{}}},{\"id\":\"3\",\"enabled\":true,\"type\":\"filters\",\"schema\":\"group\",\"params\":{\"filters\":[{\"input\":{\"query\":{\"query_string\":{\"query\":\"risk_score_name:info\"}}},\"label\":\"Info\"},{\"input\":{\"query\":{\"query_string\":{\"query\":\"risk_score_name:low\"}}},\"label\":\"Low\"},{\"input\":{\"query\":{\"query_string\":{\"query\":\"risk_score_name:medium\"}}},\"label\":\"Medium\"},{\"input\":{\"query\":{\"query_string\":{\"query\":\"risk_score_name:high\"}}},\"label\":\"High\"},{\"input\":{\"query\":{\"query_string\":{\"query\":\"risk_score_name:critical\"}}},\"label\":\"Critical\"}]}}],\"listeners\":{}}",
|
||||||
|
"uiStateJSON": "{\"vis\":{\"colors\":{\"Critical\":\"#962D82\",\"High\":\"#BF1B00\",\"Low\":\"#629E51\",\"Medium\":\"#EAB839\",\"Info\":\"#65C5DB\"}}}",
|
||||||
|
"description": "",
|
||||||
|
"version": 1,
|
||||||
|
"kibanaSavedObjectMeta": {
|
||||||
|
"searchSourceJSON": "{\"index\":\"logstash-vulnwhisperer-*\",\"query\":{\"query_string\":{\"analyze_wildcard\":true,\"query\":\"*\"}},\"filter\":[]}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"_id": "c1361da0-80b3-11e7-8790-73b60225f736",
|
||||||
|
"_type": "visualization",
|
||||||
|
"_source": {
|
||||||
|
"title": "VulnWhisperer - Risk: Medium",
|
||||||
|
"visState": "{\"title\":\"VulnWhisperer - Risk: Medium\",\"type\":\"goal\",\"params\":{\"addTooltip\":true,\"addLegend\":true,\"type\":\"gauge\",\"gauge\":{\"verticalSplit\":false,\"autoExtend\":false,\"percentageMode\":false,\"gaugeType\":\"Metric\",\"gaugeStyle\":\"Full\",\"backStyle\":\"Full\",\"orientation\":\"vertical\",\"useRanges\":false,\"colorSchema\":\"Green to Red\",\"gaugeColorMode\":\"Background\",\"colorsRange\":[{\"from\":0,\"to\":10000}],\"invertColors\":false,\"labels\":{\"show\":false,\"color\":\"black\"},\"scale\":{\"show\":true,\"labels\":false,\"color\":\"#333\",\"width\":2},\"type\":\"simple\",\"style\":{\"bgFill\":\"white\",\"bgColor\":true,\"labelColor\":false,\"subText\":\"\",\"fontSize\":\"34\"},\"extendRange\":false},\"isDisplayWarning\":false},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{\"customLabel\":\"Medium Risk\"}},{\"id\":\"2\",\"enabled\":true,\"type\":\"filters\",\"schema\":\"group\",\"params\":{\"filters\":[{\"input\":{\"query\":{\"query_string\":{\"query\":\"risk_score_name:medium\"}}},\"label\":\"Medium Risk\"}]}}],\"listeners\":{}}",
|
||||||
|
"uiStateJSON": "{\"vis\":{\"defaultColors\":{\"0 - 10000\":\"rgb(0,104,55)\"},\"legendOpen\":true,\"colors\":{\"0 - 10000\":\"#EAB839\"}}}",
|
||||||
|
"description": "",
|
||||||
|
"version": 1,
|
||||||
|
"kibanaSavedObjectMeta": {
|
||||||
|
"searchSourceJSON": "{\"index\":\"logstash-vulnwhisperer-*\",\"query\":{\"query_string\":{\"query\":\"*\",\"analyze_wildcard\":true}},\"filter\":[]}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"_id": "e46ff7f0-897d-11e7-934b-67cec0a7da65",
|
||||||
|
"_type": "visualization",
|
||||||
|
"_source": {
|
||||||
|
"title": "VulnWhisperer - Risk: Low",
|
||||||
|
"visState": "{\"title\":\"VulnWhisperer - Risk: Low\",\"type\":\"goal\",\"params\":{\"addTooltip\":true,\"addLegend\":true,\"type\":\"gauge\",\"gauge\":{\"verticalSplit\":false,\"autoExtend\":false,\"percentageMode\":false,\"gaugeType\":\"Metric\",\"gaugeStyle\":\"Full\",\"backStyle\":\"Full\",\"orientation\":\"vertical\",\"useRanges\":false,\"colorSchema\":\"Green to Red\",\"gaugeColorMode\":\"Background\",\"colorsRange\":[{\"from\":0,\"to\":10000}],\"invertColors\":false,\"labels\":{\"show\":false,\"color\":\"black\"},\"scale\":{\"show\":true,\"labels\":false,\"color\":\"#333\",\"width\":2},\"type\":\"simple\",\"style\":{\"bgFill\":\"white\",\"bgColor\":true,\"labelColor\":false,\"subText\":\"\",\"fontSize\":\"34\"},\"extendRange\":false}},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{\"customLabel\":\"Low Risk\"}},{\"id\":\"2\",\"enabled\":true,\"type\":\"filters\",\"schema\":\"group\",\"params\":{\"filters\":[{\"input\":{\"query\":{\"query_string\":{\"query\":\"risk_score_name:low\"}}},\"label\":\"Low Risk\"}]}}],\"listeners\":{}}",
|
||||||
|
"uiStateJSON": "{\"vis\":{\"defaultColors\":{\"0 - 10000\":\"rgb(0,104,55)\"},\"legendOpen\":true,\"colors\":{\"0 - 10000\":\"#629E51\"}}}",
|
||||||
|
"description": "",
|
||||||
|
"version": 1,
|
||||||
|
"kibanaSavedObjectMeta": {
|
||||||
|
"searchSourceJSON": "{\"index\":\"logstash-vulnwhisperer-*\",\"query\":{\"query_string\":{\"query\":\"*\",\"analyze_wildcard\":true}},\"filter\":[]}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"_id": "995e2280-3df3-11e7-a44e-c79ca8efb780",
|
||||||
|
"_type": "visualization",
|
||||||
|
"_source": {
|
||||||
|
"title": "VulnWhisperer-Asset",
|
||||||
|
"visState": "{\"title\":\"VulnWhisperer-Asset\",\"type\":\"table\",\"params\":{\"perPage\":15,\"showMeticsAtAllLevels\":false,\"showPartialRows\":false,\"showTotal\":false,\"sort\":{\"columnIndex\":null,\"direction\":null},\"totalFunc\":\"sum\",\"type\":\"table\"},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"bucket\",\"params\":{\"field\":\"asset.keyword\",\"size\":50,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"Asset\"}}],\"listeners\":{}}",
|
||||||
|
"uiStateJSON": "{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}}",
|
||||||
|
"description": "",
|
||||||
|
"version": 1,
|
||||||
|
"kibanaSavedObjectMeta": {
|
||||||
|
"searchSourceJSON": "{\"index\":\"logstash-vulnwhisperer-*\",\"query\":{\"query_string\":{\"analyze_wildcard\":true,\"query\":\"*\"}},\"filter\":[]}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
@ -0,0 +1,43 @@
|
|||||||
|
[
|
||||||
|
{
|
||||||
|
"_id": "72051530-448e-11e7-a818-f5f80dfc3590",
|
||||||
|
"_type": "dashboard",
|
||||||
|
"_source": {
|
||||||
|
"title": "VulnWhisperer - Reporting",
|
||||||
|
"hits": 0,
|
||||||
|
"description": "",
|
||||||
|
"panelsJSON": "[{\"col\":1,\"id\":\"2f979030-44b9-11e7-a818-f5f80dfc3590\",\"panelIndex\":5,\"row\":12,\"size_x\":6,\"size_y\":4,\"type\":\"visualization\"},{\"col\":1,\"id\":\"8d9592d0-44ec-11e7-a05f-d9719b331a27\",\"panelIndex\":12,\"row\":8,\"size_x\":6,\"size_y\":4,\"type\":\"visualization\"},{\"col\":7,\"id\":\"67d432e0-44ec-11e7-a05f-d9719b331a27\",\"panelIndex\":14,\"row\":4,\"size_x\":6,\"size_y\":4,\"type\":\"visualization\"},{\"col\":10,\"id\":\"297df800-3f7e-11e7-bd24-6903e3283192\",\"panelIndex\":15,\"row\":8,\"size_x\":3,\"size_y\":4,\"type\":\"visualization\"},{\"col\":7,\"id\":\"471a3580-3f6b-11e7-88e7-df1abe6547fb\",\"panelIndex\":20,\"row\":8,\"size_x\":3,\"size_y\":4,\"type\":\"visualization\"},{\"col\":11,\"id\":\"995e2280-3df3-11e7-a44e-c79ca8efb780\",\"panelIndex\":22,\"row\":1,\"size_x\":2,\"size_y\":3,\"type\":\"visualization\"},{\"col\":9,\"id\":\"b2f2adb0-897f-11e7-a2d2-c57bca21b3aa\",\"panelIndex\":23,\"row\":1,\"size_x\":2,\"size_y\":3,\"type\":\"visualization\"},{\"col\":7,\"id\":\"db55bce0-80b3-11e7-8790-73b60225f736\",\"panelIndex\":25,\"row\":1,\"size_x\":2,\"size_y\":3,\"type\":\"visualization\"},{\"col\":5,\"id\":\"d048c220-80b3-11e7-8790-73b60225f736\",\"panelIndex\":26,\"row\":1,\"size_x\":2,\"size_y\":3,\"type\":\"visualization\"},{\"col\":1,\"id\":\"e46ff7f0-897d-11e7-934b-67cec0a7da65\",\"panelIndex\":27,\"row\":1,\"size_x\":2,\"size_y\":3,\"type\":\"visualization\"},{\"col\":3,\"id\":\"c1361da0-80b3-11e7-8790-73b60225f736\",\"panelIndex\":28,\"row\":1,\"size_x\":2,\"size_y\":3,\"type\":\"visualization\"},{\"col\":1,\"id\":\"479deab0-8a39-11e7-a58a-9bfcb3761a3d\",\"panelIndex\":29,\"row\":4,\"size_x\":6,\"size_y\":4,\"type\":\"visualization\"}]",
|
||||||
|
"optionsJSON": "{\"darkTheme\":false}",
|
||||||
|
"uiStateJSON": "{\"P-15\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}},\"P-20\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}},\"P-21\":{\"vis\":{\"defaultColors\":{\"0 - 100\":\"rgb(0,104,55)\"}}},\"P-22\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}},\"P-23\":{\"vis\":{\"defaultColors\":{\"0 - 10000\":\"rgb(0,104,55)\"}}},\"P-24\":{\"vis\":{\"defaultColors\":{\"0 - 10000\":\"rgb(0,104,55)\"}}},\"P-25\":{\"vis\":{\"defaultColors\":{\"0 - 10000\":\"rgb(0,104,55)\"}}},\"P-26\":{\"vis\":{\"defaultColors\":{\"0 - 1000\":\"rgb(0,104,55)\"},\"legendOpen\":false}},\"P-27\":{\"vis\":{\"defaultColors\":{\"0 - 10000\":\"rgb(0,104,55)\"},\"legendOpen\":false}},\"P-28\":{\"vis\":{\"defaultColors\":{\"0 - 10000\":\"rgb(0,104,55)\"},\"legendOpen\":false}},\"P-5\":{\"vis\":{\"legendOpen\":false}}}",
|
||||||
|
"version": 1,
|
||||||
|
"timeRestore": true,
|
||||||
|
"timeTo": "now",
|
||||||
|
"timeFrom": "now-1y",
|
||||||
|
"refreshInterval": {
|
||||||
|
"display": "Off",
|
||||||
|
"pause": false,
|
||||||
|
"value": 0
|
||||||
|
},
|
||||||
|
"kibanaSavedObjectMeta": {
|
||||||
|
"searchSourceJSON": "{\"filter\":[{\"query\":{\"match_all\":{}}}],\"highlightAll\":true,\"version\":true}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"_id": "AWCUqesWib22Ai8JwW3u",
|
||||||
|
"_type": "dashboard",
|
||||||
|
"_source": {
|
||||||
|
"title": "VulnWhisperer - Risk Mitigation",
|
||||||
|
"hits": 0,
|
||||||
|
"description": "",
|
||||||
|
"panelsJSON": "[{\"col\":11,\"id\":\"995e2280-3df3-11e7-a44e-c79ca8efb780\",\"panelIndex\":20,\"row\":8,\"size_x\":2,\"size_y\":6,\"type\":\"visualization\"},{\"col\":1,\"id\":\"852816e0-3eb1-11e7-90cb-918f9cb01e3d\",\"panelIndex\":21,\"row\":10,\"size_x\":3,\"size_y\":5,\"type\":\"visualization\"},{\"col\":4,\"id\":\"297df800-3f7e-11e7-bd24-6903e3283192\",\"panelIndex\":27,\"row\":8,\"size_x\":3,\"size_y\":5,\"type\":\"visualization\"},{\"col\":9,\"id\":\"35b6d320-3f7f-11e7-bd24-6903e3283192\",\"panelIndex\":28,\"row\":8,\"size_x\":2,\"size_y\":6,\"type\":\"visualization\"},{\"col\":11,\"id\":\"471a3580-3f6b-11e7-88e7-df1abe6547fb\",\"panelIndex\":30,\"row\":1,\"size_x\":2,\"size_y\":3,\"type\":\"visualization\"},{\"col\":7,\"id\":\"de1a5f40-3f85-11e7-97f9-3777d794626d\",\"panelIndex\":31,\"row\":8,\"size_x\":2,\"size_y\":5,\"type\":\"visualization\"},{\"col\":10,\"id\":\"5093c620-44e9-11e7-8014-ede06a7e69f8\",\"panelIndex\":37,\"row\":4,\"size_x\":3,\"size_y\":4,\"type\":\"visualization\"},{\"col\":1,\"columns\":[\"host\",\"risk\",\"risk_score\",\"cve\",\"plugin_name\",\"solution\",\"plugin_output\"],\"id\":\"54648700-3f74-11e7-852e-69207a3d0726\",\"panelIndex\":38,\"row\":15,\"size_x\":12,\"size_y\":6,\"sort\":[\"@timestamp\",\"desc\"],\"type\":\"search\"},{\"col\":1,\"id\":\"fb6eb020-49ab-11e7-8f8c-57ad64ec48a6\",\"panelIndex\":39,\"row\":8,\"size_x\":3,\"size_y\":2,\"type\":\"visualization\"},{\"col\":5,\"id\":\"465c5820-8977-11e7-857e-e1d56b17746d\",\"panelIndex\":40,\"row\":4,\"size_x\":5,\"size_y\":4,\"type\":\"visualization\"},{\"col\":1,\"id\":\"56f0f5f0-3ebe-11e7-a192-93f36fbd9d05\",\"panelIndex\":46,\"row\":4,\"size_x\":4,\"size_y\":4,\"type\":\"visualization\"},{\"col\":1,\"id\":\"e46ff7f0-897d-11e7-934b-67cec0a7da65\",\"panelIndex\":47,\"row\":1,\"size_x\":2,\"size_y\":3,\"type\":\"visualization\"},{\"col\":3,\"id\":\"c1361da0-80b3-11e7-8790-73b60225f736\",\"panelIndex\":48,\"row\":1,\"size_x\":2,\"size_y\":3,\"type\":\"visualization\"},{\"col\":5,\"id\":\"d048c220-80b3-11e7-8790-73b60225f736\",\"panelIndex\":49,\"row\":1,\"size_x\":2,\"size_y\":3,\"type\":\"visualization\"},{\"col\":7,\"id\":\"db55bce0-80b3-11e7-8790-73b60225f736\",\"panelIndex\":50,\"row\":1,\"size_x\":2,\"size_y\":3,\"type\":\"visualization\"},{\"col\":9,\"id\":\"b2f2adb0-897f-11e7-a2d2-c57bca21b3aa\",\"panelIndex\":51,\"row\":1,\"size_x\":2,\"size_y\":3,\"type\":\"visualization\"}]",
|
||||||
|
"optionsJSON": "{\"darkTheme\":false}",
|
||||||
|
"uiStateJSON": "{\"P-11\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}},\"P-2\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}},\"P-20\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}},\"P-21\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":0,\"direction\":\"desc\"}}}},\"P-27\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}},\"P-28\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":0,\"direction\":\"desc\"}}}},\"P-3\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":0,\"direction\":\"asc\"}}}},\"P-30\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}},\"P-31\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}},\"P-40\":{\"vis\":{\"defaultColors\":{\"0 - 3\":\"rgb(0,104,55)\",\"3 - 7\":\"rgb(135,203,103)\",\"7 - 9\":\"rgb(255,255,190)\",\"9 - 11\":\"rgb(249,142,82)\"}}},\"P-41\":{\"vis\":{\"defaultColors\":{\"0 - 10000\":\"rgb(0,104,55)\"}}},\"P-42\":{\"vis\":{\"defaultColors\":{\"0 - 10000\":\"rgb(0,104,55)\"}}},\"P-43\":{\"vis\":{\"defaultColors\":{\"0 - 1000\":\"rgb(0,104,55)\"}}},\"P-44\":{\"vis\":{\"defaultColors\":{\"0 - 10000\":\"rgb(0,104,55)\"}}},\"P-45\":{\"vis\":{\"defaultColors\":{\"0 - 10000\":\"rgb(0,104,55)\"}}},\"P-46\":{\"vis\":{\"legendOpen\":true}},\"P-47\":{\"vis\":{\"defaultColors\":{\"0 - 10000\":\"rgb(0,104,55)\"},\"legendOpen\":false}},\"P-48\":{\"vis\":{\"defaultColors\":{\"0 - 10000\":\"rgb(0,104,55)\"},\"legendOpen\":false}},\"P-49\":{\"vis\":{\"defaultColors\":{\"0 - 1000\":\"rgb(0,104,55)\"},\"legendOpen\":false}},\"P-5\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}},\"P-50\":{\"vis\":{\"defaultColors\":{\"0 - 10000\":\"rgb(0,104,55)\"}}},\"P-51\":{\"vis\":{\"defaultColors\":{\"0 - 10000\":\"rgb(0,104,55)\"}}},\"P-6\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}},\"P-8\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}}}",
|
||||||
|
"version": 1,
|
||||||
|
"timeRestore": false,
|
||||||
|
"kibanaSavedObjectMeta": {
|
||||||
|
"searchSourceJSON": "{\"filter\":[{\"query\":{\"match_all\":{}}}],\"highlightAll\":true,\"version\":true}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
@ -0,0 +1,170 @@
|
|||||||
|
[
|
||||||
|
{
|
||||||
|
"_id": "AWCUo-jRib22Ai8JwW1N",
|
||||||
|
"_type": "visualization",
|
||||||
|
"_source": {
|
||||||
|
"title": "VulnWhisperer - Risk: High Qualys Scoring",
|
||||||
|
"visState": "{\"title\":\"VulnWhisperer - Risk: High Qualys Scoring\",\"type\":\"goal\",\"params\":{\"addTooltip\":true,\"addLegend\":true,\"type\":\"gauge\",\"gauge\":{\"verticalSplit\":false,\"autoExtend\":false,\"percentageMode\":false,\"gaugeType\":\"Metric\",\"gaugeStyle\":\"Full\",\"backStyle\":\"Full\",\"orientation\":\"vertical\",\"useRanges\":false,\"colorSchema\":\"Green to Red\",\"gaugeColorMode\":\"Background\",\"colorsRange\":[{\"from\":0,\"to\":1000}],\"invertColors\":false,\"labels\":{\"show\":false,\"color\":\"black\"},\"scale\":{\"show\":true,\"labels\":false,\"color\":\"#333\",\"width\":2},\"type\":\"simple\",\"style\":{\"bgFill\":\"white\",\"bgColor\":true,\"labelColor\":false,\"subText\":\"\",\"fontSize\":\"34\"},\"extendRange\":true}},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{\"customLabel\":\"High Risk\"}},{\"id\":\"2\",\"enabled\":true,\"type\":\"filters\",\"schema\":\"group\",\"params\":{\"filters\":[{\"input\":{\"query\":{\"query_string\":{\"query\":\"risk:high\"}}},\"label\":\"\"}]}}],\"listeners\":{}}",
|
||||||
|
"uiStateJSON": "{\"vis\":{\"defaultColors\":{\"0 - 1000\":\"rgb(0,104,55)\"},\"legendOpen\":true,\"colors\":{\"0 - 10000\":\"#EF843C\",\"0 - 1000\":\"#E0752D\"}}}",
|
||||||
|
"description": "",
|
||||||
|
"version": 1,
|
||||||
|
"kibanaSavedObjectMeta": {
|
||||||
|
"searchSourceJSON": "{\"index\":\"logstash-vulnwhisperer-*\",\"query\":{\"query_string\":{\"query\":\"*\",\"analyze_wildcard\":true}},\"filter\":[]}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"_id": "AWCUozGBib22Ai8JwW1B",
|
||||||
|
"_type": "visualization",
|
||||||
|
"_source": {
|
||||||
|
"title": "VulnWhisperer - Risk: Medium Qualys Scoring",
|
||||||
|
"visState": "{\"title\":\"VulnWhisperer - Risk: Medium Qualys Scoring\",\"type\":\"goal\",\"params\":{\"addTooltip\":true,\"addLegend\":true,\"type\":\"gauge\",\"gauge\":{\"verticalSplit\":false,\"autoExtend\":false,\"percentageMode\":false,\"gaugeType\":\"Metric\",\"gaugeStyle\":\"Full\",\"backStyle\":\"Full\",\"orientation\":\"vertical\",\"useRanges\":false,\"colorSchema\":\"Green to Red\",\"gaugeColorMode\":\"Background\",\"colorsRange\":[{\"from\":0,\"to\":10000}],\"invertColors\":false,\"labels\":{\"show\":false,\"color\":\"black\"},\"scale\":{\"show\":true,\"labels\":false,\"color\":\"#333\",\"width\":2},\"type\":\"simple\",\"style\":{\"bgFill\":\"white\",\"bgColor\":true,\"labelColor\":false,\"subText\":\"\",\"fontSize\":\"34\"},\"extendRange\":false}},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{\"customLabel\":\"Medium Risk\"}},{\"id\":\"2\",\"enabled\":true,\"type\":\"filters\",\"schema\":\"group\",\"params\":{\"filters\":[{\"input\":{\"query\":{\"query_string\":{\"query\":\"risk:medium\"}}},\"label\":\"Medium Risk\"}]}}],\"listeners\":{}}",
|
||||||
|
"uiStateJSON": "{\"vis\":{\"defaultColors\":{\"0 - 10000\":\"rgb(0,104,55)\"},\"legendOpen\":true,\"colors\":{\"0 - 10000\":\"#EAB839\"}}}",
|
||||||
|
"description": "",
|
||||||
|
"version": 1,
|
||||||
|
"kibanaSavedObjectMeta": {
|
||||||
|
"searchSourceJSON": "{\"index\":\"logstash-vulnwhisperer-*\",\"query\":{\"query_string\":{\"query\":\"*\",\"analyze_wildcard\":true}},\"filter\":[]}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"_id": "AWCUpE3Kib22Ai8JwW1c",
|
||||||
|
"_type": "visualization",
|
||||||
|
"_source": {
|
||||||
|
"title": "VulnWhisperer - Risk: Critical Qualys Scoring",
|
||||||
|
"visState": "{\"title\":\"VulnWhisperer - Risk: Critical Qualys Scoring\",\"type\":\"goal\",\"params\":{\"addLegend\":true,\"addTooltip\":true,\"gauge\":{\"autoExtend\":false,\"backStyle\":\"Full\",\"colorSchema\":\"Green to Red\",\"colorsRange\":[{\"from\":0,\"to\":10000}],\"gaugeColorMode\":\"Background\",\"gaugeStyle\":\"Full\",\"gaugeType\":\"Metric\",\"invertColors\":false,\"labels\":{\"color\":\"black\",\"show\":false},\"orientation\":\"vertical\",\"percentageMode\":false,\"scale\":{\"color\":\"#333\",\"labels\":false,\"show\":true,\"width\":2},\"style\":{\"bgColor\":true,\"bgFill\":\"white\",\"fontSize\":\"34\",\"labelColor\":false,\"subText\":\"Risk\"},\"type\":\"simple\",\"useRanges\":false,\"verticalSplit\":false},\"type\":\"gauge\"},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{\"customLabel\":\"Critical Risk\"}},{\"id\":\"2\",\"enabled\":true,\"type\":\"filters\",\"schema\":\"group\",\"params\":{\"filters\":[{\"input\":{\"query\":{\"query_string\":{\"query\":\"risk:critical\"}}},\"label\":\"Critical\"}]}}],\"listeners\":{}}",
|
||||||
|
"uiStateJSON": "{\"vis\":{\"colors\":{\"0 - 10000\":\"#BF1B00\"},\"defaultColors\":{\"0 - 10000\":\"rgb(0,104,55)\"},\"legendOpen\":false}}",
|
||||||
|
"description": "",
|
||||||
|
"version": 1,
|
||||||
|
"kibanaSavedObjectMeta": {
|
||||||
|
"searchSourceJSON": "{\"index\":\"logstash-vulnwhisperer-*\",\"query\":{\"query_string\":{\"analyze_wildcard\":true,\"query\":\"*\"}},\"filter\":[]}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"_id": "AWCUyeHGib22Ai8JwX62",
|
||||||
|
"_type": "visualization",
|
||||||
|
"_source": {
|
||||||
|
"title": "VulnWhisperer-RiskOverTime Qualys Scoring",
|
||||||
|
"visState": "{\"title\":\"VulnWhisperer-RiskOverTime Qualys Scoring\",\"type\":\"line\",\"params\":{\"addLegend\":true,\"addTimeMarker\":false,\"addTooltip\":true,\"categoryAxes\":[{\"id\":\"CategoryAxis-1\",\"labels\":{\"show\":true,\"truncate\":100},\"position\":\"bottom\",\"scale\":{\"type\":\"linear\"},\"show\":true,\"style\":{},\"title\":{\"text\":\"@timestamp per 12 hours\"},\"type\":\"category\"}],\"defaultYExtents\":false,\"drawLinesBetweenPoints\":true,\"grid\":{\"categoryLines\":false,\"style\":{\"color\":\"#eee\"},\"valueAxis\":\"ValueAxis-1\"},\"interpolate\":\"linear\",\"legendPosition\":\"right\",\"orderBucketsBySum\":false,\"radiusRatio\":9,\"scale\":\"linear\",\"seriesParams\":[{\"data\":{\"id\":\"1\",\"label\":\"Count\"},\"drawLinesBetweenPoints\":true,\"interpolate\":\"linear\",\"mode\":\"normal\",\"show\":\"true\",\"showCircles\":true,\"type\":\"line\",\"valueAxis\":\"ValueAxis-1\"}],\"setYExtents\":false,\"showCircles\":true,\"times\":[],\"valueAxes\":[{\"id\":\"ValueAxis-1\",\"labels\":{\"filter\":false,\"rotate\":0,\"show\":true,\"truncate\":100},\"name\":\"LeftAxis-1\",\"position\":\"left\",\"scale\":{\"mode\":\"normal\",\"type\":\"linear\"},\"show\":true,\"style\":{},\"title\":{\"text\":\"Count\"},\"type\":\"value\"}],\"type\":\"line\"},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"2\",\"enabled\":true,\"type\":\"date_histogram\",\"schema\":\"segment\",\"params\":{\"field\":\"@timestamp\",\"interval\":\"auto\",\"customInterval\":\"2h\",\"min_doc_count\":1,\"extended_bounds\":{}}},{\"id\":\"3\",\"enabled\":true,\"type\":\"filters\",\"schema\":\"group\",\"params\":{\"filters\":[{\"input\":{\"query\":{\"query_string\":{\"query\":\"risk:info\"}}},\"label\":\"Info\"},{\"input\":{\"query\":{\"query_string\":{\"query\":\"risk:low\"}}},\"label\":\"Low\"},{\"input\":{\"query\":{\"query_string\":{\"query\":\"risk:medium\"}}},\"label\":\"Medium\"},{\"input\":{\"query\":{\"query_string\":{\"query\":\"risk:high\"}}},\"label\":\"High\"},{\"input\":{\"query\":{\"query_string\":{\"query\":\"risk:critical\"}}},\"label\":\"Critical\"}]}}],\"listeners\":{}}",
|
||||||
|
"uiStateJSON": "{\"vis\":{\"colors\":{\"Critical\":\"#962D82\",\"High\":\"#BF1B00\",\"Low\":\"#629E51\",\"Medium\":\"#EAB839\",\"Info\":\"#65C5DB\"}}}",
|
||||||
|
"description": "",
|
||||||
|
"version": 1,
|
||||||
|
"kibanaSavedObjectMeta": {
|
||||||
|
"searchSourceJSON": "{\"index\":\"logstash-vulnwhisperer-*\",\"query\":{\"query_string\":{\"analyze_wildcard\":true,\"query\":\"*\"}},\"filter\":[]}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"_id": "AWCUos-Fib22Ai8JwW0y",
|
||||||
|
"_type": "visualization",
|
||||||
|
"_source": {
|
||||||
|
"title": "VulnWhisperer - Risk: Low Qualys Scoring",
|
||||||
|
"visState": "{\"title\":\"VulnWhisperer - Risk: Low Qualys Scoring\",\"type\":\"goal\",\"params\":{\"addTooltip\":true,\"addLegend\":true,\"type\":\"gauge\",\"gauge\":{\"verticalSplit\":false,\"autoExtend\":false,\"percentageMode\":false,\"gaugeType\":\"Metric\",\"gaugeStyle\":\"Full\",\"backStyle\":\"Full\",\"orientation\":\"vertical\",\"useRanges\":false,\"colorSchema\":\"Green to Red\",\"gaugeColorMode\":\"Background\",\"colorsRange\":[{\"from\":0,\"to\":10000}],\"invertColors\":false,\"labels\":{\"show\":false,\"color\":\"black\"},\"scale\":{\"show\":true,\"labels\":false,\"color\":\"#333\",\"width\":2},\"type\":\"simple\",\"style\":{\"bgFill\":\"white\",\"bgColor\":true,\"labelColor\":false,\"subText\":\"\",\"fontSize\":\"34\"},\"extendRange\":false}},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{\"customLabel\":\"Low Risk\"}},{\"id\":\"2\",\"enabled\":true,\"type\":\"filters\",\"schema\":\"group\",\"params\":{\"filters\":[{\"input\":{\"query\":{\"query_string\":{\"query\":\"risk:low\"}}},\"label\":\"Low Risk\"}]}}],\"listeners\":{}}",
|
||||||
|
"uiStateJSON": "{\"vis\":{\"defaultColors\":{\"0 - 10000\":\"rgb(0,104,55)\"},\"legendOpen\":true,\"colors\":{\"0 - 10000\":\"#629E51\"}}}",
|
||||||
|
"description": "",
|
||||||
|
"version": 1,
|
||||||
|
"kibanaSavedObjectMeta": {
|
||||||
|
"searchSourceJSON": "{\"index\":\"logstash-vulnwhisperer-*\",\"query\":{\"query_string\":{\"query\":\"*\",\"analyze_wildcard\":true}},\"filter\":[]}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"_id": "AWCg9Wsfib22Ai8Jww3v",
|
||||||
|
"_type": "visualization",
|
||||||
|
"_source": {
|
||||||
|
"title": "VulnWhisperer - Qualys: Category Description",
|
||||||
|
"visState": "{\"title\":\"VulnWhisperer - Qualys: Category Description\",\"type\":\"table\",\"params\":{\"perPage\":10,\"showPartialRows\":false,\"showMeticsAtAllLevels\":false,\"sort\":{\"columnIndex\":null,\"direction\":null},\"showTotal\":false,\"totalFunc\":\"sum\",\"type\":\"table\"},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"bucket\",\"params\":{\"field\":\"category_description.keyword\",\"size\":20,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"Category Description\"}}],\"listeners\":{}}",
|
||||||
|
"uiStateJSON": "{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}}",
|
||||||
|
"description": "",
|
||||||
|
"version": 1,
|
||||||
|
"kibanaSavedObjectMeta": {
|
||||||
|
"searchSourceJSON": "{\"index\":\"logstash-vulnwhisperer-*\",\"query\":{\"match_all\":{}},\"filter\":[]}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"_id": "AWCg88f1ib22Ai8Jww3C",
|
||||||
|
"_type": "visualization",
|
||||||
|
"_source": {
|
||||||
|
"title": "VulnWhisperer - QualysOS",
|
||||||
|
"visState": "{\"title\":\"VulnWhisperer - QualysOS\",\"type\":\"table\",\"params\":{\"perPage\":10,\"showPartialRows\":false,\"showMeticsAtAllLevels\":false,\"sort\":{\"columnIndex\":null,\"direction\":null},\"showTotal\":false,\"totalFunc\":\"sum\",\"type\":\"table\"},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"bucket\",\"params\":{\"field\":\"operating_system.keyword\",\"size\":20,\"order\":\"desc\",\"orderBy\":\"1\"}}],\"listeners\":{}}",
|
||||||
|
"uiStateJSON": "{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}}",
|
||||||
|
"description": "",
|
||||||
|
"version": 1,
|
||||||
|
"kibanaSavedObjectMeta": {
|
||||||
|
"searchSourceJSON": "{\"index\":\"logstash-vulnwhisperer-*\",\"query\":{\"match_all\":{}},\"filter\":[]}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"_id": "AWCg9JUAib22Ai8Jww3Y",
|
||||||
|
"_type": "visualization",
|
||||||
|
"_source": {
|
||||||
|
"title": "VulnWhisperer - QualysOwner",
|
||||||
|
"visState": "{\"title\":\"VulnWhisperer - QualysOwner\",\"type\":\"table\",\"params\":{\"perPage\":10,\"showPartialRows\":false,\"showMeticsAtAllLevels\":false,\"sort\":{\"columnIndex\":null,\"direction\":null},\"showTotal\":false,\"totalFunc\":\"sum\",\"type\":\"table\"},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"bucket\",\"params\":{\"field\":\"owner.keyword\",\"size\":20,\"order\":\"desc\",\"orderBy\":\"1\"}}],\"listeners\":{}}",
|
||||||
|
"uiStateJSON": "{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}}",
|
||||||
|
"description": "",
|
||||||
|
"version": 1,
|
||||||
|
"kibanaSavedObjectMeta": {
|
||||||
|
"searchSourceJSON": "{\"index\":\"logstash-vulnwhisperer-*\",\"query\":{\"match_all\":{}},\"filter\":[]}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"_id": "AWCg9tE6ib22Ai8Jww4R",
|
||||||
|
"_type": "visualization",
|
||||||
|
"_source": {
|
||||||
|
"title": "VulnWhisperer - Qualys: Impact",
|
||||||
|
"visState": "{\"title\":\"VulnWhisperer - Qualys: Impact\",\"type\":\"table\",\"params\":{\"perPage\":10,\"showPartialRows\":false,\"showMeticsAtAllLevels\":false,\"sort\":{\"columnIndex\":null,\"direction\":null},\"showTotal\":false,\"totalFunc\":\"sum\",\"type\":\"table\"},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"bucket\",\"params\":{\"field\":\"impact.keyword\",\"size\":20,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"Impact\"}}],\"listeners\":{}}",
|
||||||
|
"uiStateJSON": "{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}}",
|
||||||
|
"description": "",
|
||||||
|
"version": 1,
|
||||||
|
"kibanaSavedObjectMeta": {
|
||||||
|
"searchSourceJSON": "{\"index\":\"logstash-vulnwhisperer-*\",\"query\":{\"match_all\":{}},\"filter\":[]}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"_id": "AWCg9igvib22Ai8Jww36",
|
||||||
|
"_type": "visualization",
|
||||||
|
"_source": {
|
||||||
|
"title": "VulnWhisperer - Qualys: Level",
|
||||||
|
"visState": "{\"title\":\"VulnWhisperer - Qualys: Level\",\"type\":\"table\",\"params\":{\"perPage\":10,\"showPartialRows\":false,\"showMeticsAtAllLevels\":false,\"sort\":{\"columnIndex\":null,\"direction\":null},\"showTotal\":false,\"totalFunc\":\"sum\",\"type\":\"table\"},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"bucket\",\"params\":{\"field\":\"level.keyword\",\"size\":20,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"Level\"}}],\"listeners\":{}}",
|
||||||
|
"uiStateJSON": "{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}}",
|
||||||
|
"description": "",
|
||||||
|
"version": 1,
|
||||||
|
"kibanaSavedObjectMeta": {
|
||||||
|
"searchSourceJSON": "{\"index\":\"logstash-vulnwhisperer-*\",\"query\":{\"match_all\":{}},\"filter\":[]}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"_id": "AWCUsp_3ib22Ai8JwW7R",
|
||||||
|
"_type": "visualization",
|
||||||
|
"_source": {
|
||||||
|
"title": "VulnWhisperer - TL-Critical Risk Qualys Scoring",
|
||||||
|
"visState": "{\"title\":\"VulnWhisperer - TL-Critical Risk Qualys Scoring\",\"type\":\"timelion\",\"params\":{\"expression\":\".es(index='logstash-vulnwhisperer-*',q='(risk:critical)').label(\\\"Original\\\"),.es(index='logstash-vulnwhisperer-*',q='(risk:critical)',offset=-1w).label(\\\"One week offset\\\"),.es(index='logstash-vulnwhisperer-*',q='(risk:critical)').subtract(.es(index='logstash-vulnwhisperer-*',q='(risk:critical)',offset=-1w)).label(\\\"Difference\\\").lines(steps=3,fill=2,width=1)\",\"interval\":\"auto\",\"type\":\"timelion\"},\"aggs\":[],\"listeners\":{}}",
|
||||||
|
"uiStateJSON": "{}",
|
||||||
|
"description": "",
|
||||||
|
"version": 1,
|
||||||
|
"kibanaSavedObjectMeta": {
|
||||||
|
"searchSourceJSON": "{\"query\":{\"query_string\":{\"query\":\"*\",\"analyze_wildcard\":true}},\"filter\":[]}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"_id": "AWCUtHETib22Ai8JwW79",
|
||||||
|
"_type": "visualization",
|
||||||
|
"_source": {
|
||||||
|
"title": "VulnWhisperer - TL-High Risk Qualys Scoring",
|
||||||
|
"visState": "{\"title\":\"VulnWhisperer - TL-High Risk Qualys Scoring\",\"type\":\"timelion\",\"params\":{\"expression\":\".es(index='logstash-vulnwhisperer-*',q='(risk:high)').label(\\\"Original\\\"),.es(index='logstash-vulnwhisperer-*',q='(risk:high)',offset=-1w).label(\\\"One week offset\\\"),.es(index='logstash-vulnwhisperer-*',q='(risk:high)').subtract(.es(index='logstash-vulnwhisperer-*',q='(risk:high)',offset=-1w)).label(\\\"Difference\\\").lines(steps=3,fill=2,width=1)\",\"interval\":\"auto\",\"type\":\"timelion\"},\"aggs\":[],\"listeners\":{}}",
|
||||||
|
"uiStateJSON": "{}",
|
||||||
|
"description": "",
|
||||||
|
"version": 1,
|
||||||
|
"kibanaSavedObjectMeta": {
|
||||||
|
"searchSourceJSON": "{\"query\":{\"query_string\":{\"query\":\"*\",\"analyze_wildcard\":true}},\"filter\":[]}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
@ -0,0 +1,50 @@
|
|||||||
|
[
|
||||||
|
{
|
||||||
|
"_id": "AWCUrIBqib22Ai8JwW43",
|
||||||
|
"_type": "dashboard",
|
||||||
|
"_source": {
|
||||||
|
"title": "VulnWhisperer - Reporting Qualys Scoring",
|
||||||
|
"hits": 0,
|
||||||
|
"description": "",
|
||||||
|
"panelsJSON": "[{\"col\":1,\"id\":\"2f979030-44b9-11e7-a818-f5f80dfc3590\",\"panelIndex\":5,\"row\":11,\"size_x\":6,\"size_y\":4,\"type\":\"visualization\"},{\"col\":10,\"id\":\"297df800-3f7e-11e7-bd24-6903e3283192\",\"panelIndex\":15,\"row\":7,\"size_x\":3,\"size_y\":4,\"type\":\"visualization\"},{\"col\":7,\"id\":\"471a3580-3f6b-11e7-88e7-df1abe6547fb\",\"panelIndex\":20,\"row\":7,\"size_x\":3,\"size_y\":4,\"type\":\"visualization\"},{\"col\":11,\"id\":\"995e2280-3df3-11e7-a44e-c79ca8efb780\",\"panelIndex\":22,\"row\":1,\"size_x\":2,\"size_y\":3,\"type\":\"visualization\"},{\"col\":9,\"id\":\"b2f2adb0-897f-11e7-a2d2-c57bca21b3aa\",\"panelIndex\":23,\"row\":1,\"size_x\":2,\"size_y\":3,\"type\":\"visualization\"},{\"col\":1,\"id\":\"479deab0-8a39-11e7-a58a-9bfcb3761a3d\",\"panelIndex\":29,\"row\":4,\"size_x\":6,\"size_y\":4,\"type\":\"visualization\"},{\"size_x\":6,\"size_y\":3,\"panelIndex\":30,\"type\":\"visualization\",\"id\":\"AWCUtHETib22Ai8JwW79\",\"col\":1,\"row\":8},{\"size_x\":6,\"size_y\":3,\"panelIndex\":31,\"type\":\"visualization\",\"id\":\"AWCUsp_3ib22Ai8JwW7R\",\"col\":7,\"row\":4},{\"size_x\":2,\"size_y\":3,\"panelIndex\":33,\"type\":\"visualization\",\"id\":\"AWCUozGBib22Ai8JwW1B\",\"col\":3,\"row\":1},{\"size_x\":2,\"size_y\":3,\"panelIndex\":34,\"type\":\"visualization\",\"id\":\"AWCUo-jRib22Ai8JwW1N\",\"col\":5,\"row\":1},{\"size_x\":2,\"size_y\":3,\"panelIndex\":35,\"type\":\"visualization\",\"id\":\"AWCUpE3Kib22Ai8JwW1c\",\"col\":7,\"row\":1},{\"size_x\":2,\"size_y\":3,\"panelIndex\":36,\"type\":\"visualization\",\"id\":\"AWCUos-Fib22Ai8JwW0y\",\"col\":1,\"row\":1}]",
|
||||||
|
"optionsJSON": "{\"darkTheme\":false}",
|
||||||
|
"uiStateJSON": "{\"P-15\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}},\"P-20\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}},\"P-21\":{\"vis\":{\"defaultColors\":{\"0 - 100\":\"rgb(0,104,55)\"}}},\"P-22\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}},\"P-23\":{\"vis\":{\"defaultColors\":{\"0 - 10000\":\"rgb(0,104,55)\"}}},\"P-24\":{\"vis\":{\"defaultColors\":{\"0 - 10000\":\"rgb(0,104,55)\"}}},\"P-5\":{\"vis\":{\"legendOpen\":false}},\"P-33\":{\"vis\":{\"defaultColors\":{\"0 - 10000\":\"rgb(0,104,55)\"},\"legendOpen\":false}},\"P-34\":{\"vis\":{\"defaultColors\":{\"0 - 1000\":\"rgb(0,104,55)\"},\"legendOpen\":false}},\"P-35\":{\"vis\":{\"defaultColors\":{\"0 - 10000\":\"rgb(0,104,55)\"}}},\"P-27\":{\"vis\":{\"defaultColors\":{\"0 - 10000\":\"rgb(0,104,55)\"}}},\"P-28\":{\"vis\":{\"defaultColors\":{\"0 - 10000\":\"rgb(0,104,55)\"}}},\"P-26\":{\"vis\":{\"defaultColors\":{\"0 - 1000\":\"rgb(0,104,55)\"}}},\"P-25\":{\"vis\":{\"defaultColors\":{\"0 - 10000\":\"rgb(0,104,55)\"}}},\"P-32\":{\"vis\":{\"defaultColors\":{\"0 - 10000\":\"rgb(0,104,55)\"}}},\"P-36\":{\"vis\":{\"defaultColors\":{\"0 - 10000\":\"rgb(0,104,55)\"},\"legendOpen\":false}}}",
|
||||||
|
"version": 1,
|
||||||
|
"timeRestore": true,
|
||||||
|
"timeTo": "now",
|
||||||
|
"timeFrom": "now-30d",
|
||||||
|
"refreshInterval": {
|
||||||
|
"display": "Off",
|
||||||
|
"pause": false,
|
||||||
|
"value": 0
|
||||||
|
},
|
||||||
|
"kibanaSavedObjectMeta": {
|
||||||
|
"searchSourceJSON": "{\"filter\":[{\"query\":{\"query_string\":{\"analyze_wildcard\":true,\"query\":\"-vulnerability_category:\\\"INFORMATION_GATHERED\\\"\"}}}],\"highlightAll\":true,\"version\":true}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"_id": "5dba30c0-3df3-11e7-a44e-c79ca8efb780",
|
||||||
|
"_type": "dashboard",
|
||||||
|
"_source": {
|
||||||
|
"title": "VulnWhisperer - Risk Mitigation Qualys Web Scoring",
|
||||||
|
"hits": 0,
|
||||||
|
"description": "",
|
||||||
|
"panelsJSON": "[{\"col\":11,\"id\":\"995e2280-3df3-11e7-a44e-c79ca8efb780\",\"panelIndex\":20,\"row\":8,\"size_x\":2,\"size_y\":7,\"type\":\"visualization\"},{\"col\":1,\"id\":\"852816e0-3eb1-11e7-90cb-918f9cb01e3d\",\"panelIndex\":21,\"row\":10,\"size_x\":3,\"size_y\":5,\"type\":\"visualization\"},{\"col\":4,\"id\":\"297df800-3f7e-11e7-bd24-6903e3283192\",\"panelIndex\":27,\"row\":8,\"size_x\":3,\"size_y\":4,\"type\":\"visualization\"},{\"col\":9,\"id\":\"35b6d320-3f7f-11e7-bd24-6903e3283192\",\"panelIndex\":28,\"row\":8,\"size_x\":2,\"size_y\":7,\"type\":\"visualization\"},{\"col\":11,\"id\":\"471a3580-3f6b-11e7-88e7-df1abe6547fb\",\"panelIndex\":30,\"row\":1,\"size_x\":2,\"size_y\":3,\"type\":\"visualization\"},{\"col\":7,\"id\":\"de1a5f40-3f85-11e7-97f9-3777d794626d\",\"panelIndex\":31,\"row\":8,\"size_x\":2,\"size_y\":4,\"type\":\"visualization\"},{\"col\":10,\"id\":\"5093c620-44e9-11e7-8014-ede06a7e69f8\",\"panelIndex\":37,\"row\":4,\"size_x\":3,\"size_y\":4,\"type\":\"visualization\"},{\"col\":1,\"columns\":[\"host\",\"risk\",\"risk_score\",\"cve\",\"plugin_name\",\"solution\",\"plugin_output\"],\"id\":\"54648700-3f74-11e7-852e-69207a3d0726\",\"panelIndex\":38,\"row\":15,\"size_x\":12,\"size_y\":6,\"sort\":[\"@timestamp\",\"desc\"],\"type\":\"search\"},{\"col\":1,\"id\":\"fb6eb020-49ab-11e7-8f8c-57ad64ec48a6\",\"panelIndex\":39,\"row\":8,\"size_x\":3,\"size_y\":2,\"type\":\"visualization\"},{\"col\":5,\"id\":\"465c5820-8977-11e7-857e-e1d56b17746d\",\"panelIndex\":40,\"row\":4,\"size_x\":5,\"size_y\":4,\"type\":\"visualization\"},{\"col\":9,\"id\":\"b2f2adb0-897f-11e7-a2d2-c57bca21b3aa\",\"panelIndex\":45,\"row\":1,\"size_x\":2,\"size_y\":3,\"type\":\"visualization\"},{\"col\":1,\"id\":\"AWCUos-Fib22Ai8JwW0y\",\"panelIndex\":47,\"row\":1,\"size_x\":2,\"size_y\":3,\"type\":\"visualization\"},{\"col\":3,\"id\":\"AWCUozGBib22Ai8JwW1B\",\"panelIndex\":48,\"row\":1,\"size_x\":2,\"size_y\":3,\"type\":\"visualization\"},{\"col\":5,\"id\":\"AWCUo-jRib22Ai8JwW1N\",\"panelIndex\":49,\"row\":1,\"size_x\":2,\"size_y\":3,\"type\":\"visualization\"},{\"col\":7,\"id\":\"AWCUpE3Kib22Ai8JwW1c\",\"panelIndex\":50,\"row\":1,\"size_x\":2,\"size_y\":3,\"type\":\"visualization\"},{\"col\":1,\"id\":\"AWCUyeHGib22Ai8JwX62\",\"panelIndex\":51,\"row\":4,\"size_x\":4,\"size_y\":4,\"type\":\"visualization\"},{\"col\":4,\"id\":\"AWCg88f1ib22Ai8Jww3C\",\"panelIndex\":52,\"row\":12,\"size_x\":3,\"size_y\":3,\"type\":\"visualization\"},{\"col\":7,\"id\":\"AWCg9JUAib22Ai8Jww3Y\",\"panelIndex\":53,\"row\":12,\"size_x\":2,\"size_y\":3,\"type\":\"visualization\"}]",
|
||||||
|
"optionsJSON": "{\"darkTheme\":false}",
|
||||||
|
"uiStateJSON": "{\"P-11\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}},\"P-2\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}},\"P-20\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}},\"P-21\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":0,\"direction\":\"desc\"}}}},\"P-27\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}},\"P-28\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":0,\"direction\":\"desc\"}}}},\"P-3\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":0,\"direction\":\"asc\"}}}},\"P-30\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}},\"P-31\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}},\"P-40\":{\"vis\":{\"defaultColors\":{\"0 - 3\":\"rgb(0,104,55)\",\"3 - 7\":\"rgb(135,203,103)\",\"7 - 9\":\"rgb(255,255,190)\",\"9 - 11\":\"rgb(249,142,82)\"}}},\"P-41\":{\"vis\":{\"defaultColors\":{\"0 - 10000\":\"rgb(0,104,55)\"}}},\"P-42\":{\"vis\":{\"defaultColors\":{\"0 - 10000\":\"rgb(0,104,55)\"}}},\"P-43\":{\"vis\":{\"defaultColors\":{\"0 - 1000\":\"rgb(0,104,55)\"}}},\"P-44\":{\"vis\":{\"defaultColors\":{\"0 - 10000\":\"rgb(0,104,55)\"}}},\"P-45\":{\"vis\":{\"defaultColors\":{\"0 - 10000\":\"rgb(0,104,55)\"}}},\"P-47\":{\"vis\":{\"defaultColors\":{\"0 - 10000\":\"rgb(0,104,55)\"},\"legendOpen\":false}},\"P-48\":{\"vis\":{\"defaultColors\":{\"0 - 10000\":\"rgb(0,104,55)\"},\"legendOpen\":false}},\"P-49\":{\"vis\":{\"defaultColors\":{\"0 - 1000\":\"rgb(0,104,55)\"},\"legendOpen\":false}},\"P-5\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}},\"P-50\":{\"vis\":{\"defaultColors\":{\"0 - 10000\":\"rgb(0,104,55)\"}}},\"P-6\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}},\"P-8\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}},\"P-52\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}},\"P-53\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}}}",
|
||||||
|
"version": 1,
|
||||||
|
"timeRestore": true,
|
||||||
|
"timeTo": "now",
|
||||||
|
"timeFrom": "now-30d",
|
||||||
|
"refreshInterval": {
|
||||||
|
"display": "Off",
|
||||||
|
"pause": false,
|
||||||
|
"value": 0
|
||||||
|
},
|
||||||
|
"kibanaSavedObjectMeta": {
|
||||||
|
"searchSourceJSON": "{\"filter\":[{\"query\":{\"query_string\":{\"analyze_wildcard\":true,\"query\":\"-vulnerability_category:\\\"INFORMATION_GATHERED\\\"\"}}}],\"highlightAll\":true,\"version\":true}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
28
kibana/vuln_whisp_kibana/9000_vulnWhisperer_SavedSearch.json
Executable file
@ -0,0 +1,28 @@
|
|||||||
|
[
|
||||||
|
{
|
||||||
|
"_id": "54648700-3f74-11e7-852e-69207a3d0726",
|
||||||
|
"_type": "search",
|
||||||
|
"_source": {
|
||||||
|
"title": "VulnWhisperer - Saved Search",
|
||||||
|
"description": "",
|
||||||
|
"hits": 0,
|
||||||
|
"columns": [
|
||||||
|
"host",
|
||||||
|
"risk",
|
||||||
|
"risk_score",
|
||||||
|
"cve",
|
||||||
|
"plugin_name",
|
||||||
|
"solution",
|
||||||
|
"plugin_output"
|
||||||
|
],
|
||||||
|
"sort": [
|
||||||
|
"@timestamp",
|
||||||
|
"desc"
|
||||||
|
],
|
||||||
|
"version": 1,
|
||||||
|
"kibanaSavedObjectMeta": {
|
||||||
|
"searchSourceJSON": "{\"index\":\"logstash-vulnwhisperer-*\",\"query\":{\"query_string\":{\"analyze_wildcard\":true,\"query\":\"*\"}},\"filter\":[],\"highlight\":{\"pre_tags\":[\"@kibana-highlighted-field@\"],\"post_tags\":[\"@/kibana-highlighted-field@\"],\"fields\":{\"*\":{}},\"require_field_match\":false,\"fragment_size\":2147483647}}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
14
logstash/0001_input_beats.conf
Executable file
@ -0,0 +1,14 @@
|
|||||||
|
input {
|
||||||
|
beats {
|
||||||
|
port => 5044
|
||||||
|
tags => "beats"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
filter {
|
||||||
|
if [beat][hostname] == "filebeathost" {
|
||||||
|
mutate {
|
||||||
|
add_tag => ["nessus"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
220
logstash/1000_nessus_process_file.conf
Normal file
@ -0,0 +1,220 @@
|
|||||||
|
# Author: Austin Taylor and Justin Henderson
|
||||||
|
# Email: email@austintaylor.io
|
||||||
|
# Last Update: 12/20/2017
|
||||||
|
# Version 0.3
|
||||||
|
# Description: Take in nessus reports from vulnWhisperer and pumps into logstash
|
||||||
|
|
||||||
|
|
||||||
|
input {
|
||||||
|
file {
|
||||||
|
path => "/opt/vulnwhisperer/nessus/**/*"
|
||||||
|
start_position => "beginning"
|
||||||
|
tags => "nessus"
|
||||||
|
type => "nessus"
|
||||||
|
}
|
||||||
|
file {
|
||||||
|
path => "/opt/vulnwhisperer/tenable/*.csv"
|
||||||
|
start_position => "beginning"
|
||||||
|
tags => "tenable"
|
||||||
|
type => "tenable"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
filter {
|
||||||
|
if "nessus" in [tags] or "tenable" in [tags] {
|
||||||
|
# Drop the header column
|
||||||
|
if [message] =~ "^Plugin ID" { drop {} }
|
||||||
|
|
||||||
|
csv {
|
||||||
|
# columns => ["plugin_id", "cve", "cvss", "risk", "asset", "protocol", "port", "plugin_name", "synopsis", "description", "solution", "see_also", "plugin_output"]
|
||||||
|
columns => ["plugin_id", "cve", "cvss", "risk", "asset", "protocol", "port", "plugin_name", "synopsis", "description", "solution", "see_also", "plugin_output", "asset_uuid", "vulnerability_state", "ip", "fqdn", "netbios", "operating_system", "mac_address", "plugin_family", "cvss_base", "cvss_temporal", "cvss_temporal_vector", "cvss_vector", "cvss3_base", "cvss3_temporal", "cvss3_temporal_vector", "cvss_vector", "system_type", "host_start", "host_end"]
|
||||||
|
separator => ","
|
||||||
|
source => "message"
|
||||||
|
}
|
||||||
|
|
||||||
|
ruby {
|
||||||
|
code => "if event.get('description')
|
||||||
|
event.set('description', event.get('description').gsub(92.chr + 'n', 10.chr).gsub(92.chr + 'r', 13.chr))
|
||||||
|
end
|
||||||
|
if event.get('synopsis')
|
||||||
|
event.set('synopsis', event.get('synopsis').gsub(92.chr + 'n', 10.chr).gsub(92.chr + 'r', 13.chr))
|
||||||
|
end
|
||||||
|
if event.get('solution')
|
||||||
|
event.set('solution', event.get('solution').gsub(92.chr + 'n', 10.chr).gsub(92.chr + 'r', 13.chr))
|
||||||
|
end
|
||||||
|
if event.get('see_also')
|
||||||
|
event.set('see_also', event.get('see_also').gsub(92.chr + 'n', 10.chr).gsub(92.chr + 'r', 13.chr))
|
||||||
|
end
|
||||||
|
if event.get('plugin_output')
|
||||||
|
event.set('plugin_output', event.get('plugin_output').gsub(92.chr + 'n', 10.chr).gsub(92.chr + 'r', 13.chr))
|
||||||
|
end"
|
||||||
|
}
|
||||||
|
|
||||||
|
#If using filebeats as your source, you will need to replace the "path" field to "source"
|
||||||
|
grok {
|
||||||
|
match => { "path" => "(?<scan_name>[a-zA-Z0-9_.\-]+)_%{INT:scan_id}_%{INT:history_id}_%{INT:last_updated}.csv$" }
|
||||||
|
tag_on_failure => []
|
||||||
|
}
|
||||||
|
|
||||||
|
date {
|
||||||
|
match => [ "last_updated", "UNIX" ]
|
||||||
|
target => "@timestamp"
|
||||||
|
remove_field => ["last_updated"]
|
||||||
|
}
|
||||||
|
|
||||||
|
if [risk] == "None" {
|
||||||
|
mutate { add_field => { "risk_number" => 0 }}
|
||||||
|
}
|
||||||
|
if [risk] == "Low" {
|
||||||
|
mutate { add_field => { "risk_number" => 1 }}
|
||||||
|
}
|
||||||
|
if [risk] == "Medium" {
|
||||||
|
mutate { add_field => { "risk_number" => 2 }}
|
||||||
|
}
|
||||||
|
if [risk] == "High" {
|
||||||
|
mutate { add_field => { "risk_number" => 3 }}
|
||||||
|
}
|
||||||
|
if [risk] == "Critical" {
|
||||||
|
mutate { add_field => { "risk_number" => 4 }}
|
||||||
|
}
|
||||||
|
|
||||||
|
if ![cve] or [cve] == "nan" {
|
||||||
|
mutate { remove_field => [ "cve" ] }
|
||||||
|
}
|
||||||
|
if ![cvss] or [cvss] == "nan" {
|
||||||
|
mutate { remove_field => [ "cvss" ] }
|
||||||
|
}
|
||||||
|
if ![cvss_base] or [cvss_base] == "nan" {
|
||||||
|
mutate { remove_field => [ "cvss_base" ] }
|
||||||
|
}
|
||||||
|
if ![cvss_temporal] or [cvss_temporal] == "nan" {
|
||||||
|
mutate { remove_field => [ "cvss_temporal" ] }
|
||||||
|
}
|
||||||
|
if ![cvss_temporal_vector] or [cvss_temporal_vector] == "nan" {
|
||||||
|
mutate { remove_field => [ "cvss_temporal_vector" ] }
|
||||||
|
}
|
||||||
|
if ![cvss_vector] or [cvss_vector] == "nan" {
|
||||||
|
mutate { remove_field => [ "cvss_vector" ] }
|
||||||
|
}
|
||||||
|
if ![cvss3_base] or [cvss3_base] == "nan" {
|
||||||
|
mutate { remove_field => [ "cvss3_base" ] }
|
||||||
|
}
|
||||||
|
if ![cvss3_temporal] or [cvss3_temporal] == "nan" {
|
||||||
|
mutate { remove_field => [ "cvss3_temporal" ] }
|
||||||
|
}
|
||||||
|
if ![cvss3_temporal_vector] or [cvss3_temporal_vector] == "nan" {
|
||||||
|
mutate { remove_field => [ "cvss3_temporal_vector" ] }
|
||||||
|
}
|
||||||
|
if ![description] or [description] == "nan" {
|
||||||
|
mutate { remove_field => [ "description" ] }
|
||||||
|
}
|
||||||
|
if ![mac_address] or [mac_address] == "nan" {
|
||||||
|
mutate { remove_field => [ "mac_address" ] }
|
||||||
|
}
|
||||||
|
if ![netbios] or [netbios] == "nan" {
|
||||||
|
mutate { remove_field => [ "netbios" ] }
|
||||||
|
}
|
||||||
|
if ![operating_system] or [operating_system] == "nan" {
|
||||||
|
mutate { remove_field => [ "operating_system" ] }
|
||||||
|
}
|
||||||
|
if ![plugin_output] or [plugin_output] == "nan" {
|
||||||
|
mutate { remove_field => [ "plugin_output" ] }
|
||||||
|
}
|
||||||
|
if ![see_also] or [see_also] == "nan" {
|
||||||
|
mutate { remove_field => [ "see_also" ] }
|
||||||
|
}
|
||||||
|
if ![synopsis] or [synopsis] == "nan" {
|
||||||
|
mutate { remove_field => [ "synopsis" ] }
|
||||||
|
}
|
||||||
|
if ![system_type] or [system_type] == "nan" {
|
||||||
|
mutate { remove_field => [ "system_type" ] }
|
||||||
|
}
|
||||||
|
|
||||||
|
mutate {
|
||||||
|
remove_field => [ "message" ]
|
||||||
|
add_field => { "risk_score" => "%{cvss}" }
|
||||||
|
}
|
||||||
|
mutate {
|
||||||
|
convert => { "risk_score" => "float" }
|
||||||
|
}
|
||||||
|
if [risk_score] == 0 {
|
||||||
|
mutate {
|
||||||
|
add_field => { "risk_score_name" => "info" }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if [risk_score] > 0 and [risk_score] < 3 {
|
||||||
|
mutate {
|
||||||
|
add_field => { "risk_score_name" => "low" }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if [risk_score] >= 3 and [risk_score] < 6 {
|
||||||
|
mutate {
|
||||||
|
add_field => { "risk_score_name" => "medium" }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if [risk_score] >=6 and [risk_score] < 9 {
|
||||||
|
mutate {
|
||||||
|
add_field => { "risk_score_name" => "high" }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if [risk_score] >= 9 {
|
||||||
|
mutate {
|
||||||
|
add_field => { "risk_score_name" => "critical" }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# Compensating controls - adjust risk_score
|
||||||
|
# Adobe and Java are not allowed to run in browser unless whitelisted
|
||||||
|
# Therefore, lower score by dividing by 3 (score is subjective to risk)
|
||||||
|
|
||||||
|
#Modify and uncomment when ready to use
|
||||||
|
#if [risk_score] != 0 {
|
||||||
|
# if [plugin_name] =~ "Adobe" and [risk_score] > 6 or [plugin_name] =~ "Java" and [risk_score] > 6 {
|
||||||
|
# ruby {
|
||||||
|
# code => "event.set('risk_score', event.get('risk_score') / 3)"
|
||||||
|
# }
|
||||||
|
# mutate {
|
||||||
|
# add_field => { "compensating_control" => "Adobe and Flash removed from browsers unless whitelisted site." }
|
||||||
|
# }
|
||||||
|
# }
|
||||||
|
#}
|
||||||
|
|
||||||
|
# Add tags for reporting based on assets or criticality
|
||||||
|
|
||||||
|
if [asset] == "dc01" or [asset] == "dc02" or [asset] == "pki01" or [asset] == "192.168.0.54" or [asset] =~ "^192\.168\.0\." or [asset] =~ "^42.42.42." {
|
||||||
|
mutate {
|
||||||
|
add_tag => [ "critical_asset" ]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
#if [asset] =~ "^192\.168\.[45][0-9][0-9]\.1$" or [asset] =~ "^192.168\.[50]\.[0-9]{1,2}\.1$"{
|
||||||
|
# mutate {
|
||||||
|
# add_tag => [ "has_hipaa_data" ]
|
||||||
|
# }
|
||||||
|
#}
|
||||||
|
#if [asset] =~ "^192\.168\.[45][0-9][0-9]\." {
|
||||||
|
# mutate {
|
||||||
|
# add_tag => [ "hipaa_asset" ]
|
||||||
|
# }
|
||||||
|
#}
|
||||||
|
if [asset] =~ "^hr" {
|
||||||
|
mutate {
|
||||||
|
add_tag => [ "pci_asset" ]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
#if [asset] =~ "^10\.0\.50\." {
|
||||||
|
# mutate {
|
||||||
|
# add_tag => [ "web_servers" ]
|
||||||
|
# }
|
||||||
|
#}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
output {
|
||||||
|
if "nessus" in [tags] or "tenable" in [tags] or [type] in [ "nessus", "tenable" ] {
|
||||||
|
# stdout { codec => rubydebug }
|
||||||
|
elasticsearch {
|
||||||
|
hosts => [ "localhost:9200" ]
|
||||||
|
index => "logstash-vulnwhisperer-%{+YYYY.MM}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
153
logstash/2000_qualys_web_scans.conf
Normal file
@ -0,0 +1,153 @@
|
|||||||
|
# Author: Austin Taylor and Justin Henderson
|
||||||
|
# Email: austin@hasecuritysolutions.com
|
||||||
|
# Last Update: 12/30/2017
|
||||||
|
# Version 0.3
|
||||||
|
# Description: Take in qualys web scan reports from vulnWhisperer and pumps into logstash
|
||||||
|
|
||||||
|
input {
|
||||||
|
file {
|
||||||
|
path => "/opt/vulnwhisperer/qualys/scans/**/*.json"
|
||||||
|
type => json
|
||||||
|
codec => json
|
||||||
|
start_position => "beginning"
|
||||||
|
tags => [ "qualys" ]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
filter {
|
||||||
|
if "qualys" in [tags] {
|
||||||
|
grok {
|
||||||
|
match => { "path" => [ "(?<tags>qualys_vuln)_scan_%{DATA}_%{INT:last_updated}.json$", "(?<tags>qualys_web)_%{INT:app_id}_%{INT:last_updated}.json$" ] }
|
||||||
|
tag_on_failure => []
|
||||||
|
}
|
||||||
|
|
||||||
|
mutate {
|
||||||
|
replace => [ "message", "%{message}" ]
|
||||||
|
#gsub => [
|
||||||
|
# "message", "\|\|\|", " ",
|
||||||
|
# "message", "\t\t", " ",
|
||||||
|
# "message", " ", " ",
|
||||||
|
# "message", " ", " ",
|
||||||
|
# "message", " ", " ",
|
||||||
|
# "message", "nan", " ",
|
||||||
|
# "message",'\n',''
|
||||||
|
#]
|
||||||
|
}
|
||||||
|
|
||||||
|
if "qualys_web" in [tags] {
|
||||||
|
mutate {
|
||||||
|
add_field => { "asset" => "%{web_application_name}" }
|
||||||
|
add_field => { "risk_score" => "%{cvss}" }
|
||||||
|
}
|
||||||
|
} else if "qualys_vuln" in [tags] {
|
||||||
|
mutate {
|
||||||
|
add_field => { "asset" => "%{ip}" }
|
||||||
|
add_field => { "risk_score" => "%{cvss}" }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if [risk] == "1" {
|
||||||
|
mutate { add_field => { "risk_number" => 0 }}
|
||||||
|
mutate { replace => { "risk" => "info" }}
|
||||||
|
}
|
||||||
|
if [risk] == "2" {
|
||||||
|
mutate { add_field => { "risk_number" => 1 }}
|
||||||
|
mutate { replace => { "risk" => "low" }}
|
||||||
|
}
|
||||||
|
if [risk] == "3" {
|
||||||
|
mutate { add_field => { "risk_number" => 2 }}
|
||||||
|
mutate { replace => { "risk" => "medium" }}
|
||||||
|
}
|
||||||
|
if [risk] == "4" {
|
||||||
|
mutate { add_field => { "risk_number" => 3 }}
|
||||||
|
mutate { replace => { "risk" => "high" }}
|
||||||
|
}
|
||||||
|
if [risk] == "5" {
|
||||||
|
mutate { add_field => { "risk_number" => 4 }}
|
||||||
|
mutate { replace => { "risk" => "critical" }}
|
||||||
|
}
|
||||||
|
|
||||||
|
mutate {
|
||||||
|
remove_field => "message"
|
||||||
|
}
|
||||||
|
|
||||||
|
if [first_time_detected] {
|
||||||
|
date {
|
||||||
|
match => [ "first_time_detected", "dd MMM yyyy HH:mma 'GMT'ZZ", "dd MMM yyyy HH:mma 'GMT'" ]
|
||||||
|
target => "first_time_detected"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if [first_time_tested] {
|
||||||
|
date {
|
||||||
|
match => [ "first_time_tested", "dd MMM yyyy HH:mma 'GMT'ZZ", "dd MMM yyyy HH:mma 'GMT'" ]
|
||||||
|
target => "first_time_tested"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if [last_time_detected] {
|
||||||
|
date {
|
||||||
|
match => [ "last_time_detected", "dd MMM yyyy HH:mma 'GMT'ZZ", "dd MMM yyyy HH:mma 'GMT'" ]
|
||||||
|
target => "last_time_detected"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if [last_time_tested] {
|
||||||
|
date {
|
||||||
|
match => [ "last_time_tested", "dd MMM yyyy HH:mma 'GMT'ZZ", "dd MMM yyyy HH:mma 'GMT'" ]
|
||||||
|
target => "last_time_tested"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
date {
|
||||||
|
match => [ "last_updated", "UNIX" ]
|
||||||
|
target => "@timestamp"
|
||||||
|
remove_field => "last_updated"
|
||||||
|
}
|
||||||
|
mutate {
|
||||||
|
convert => { "plugin_id" => "integer"}
|
||||||
|
convert => { "id" => "integer"}
|
||||||
|
convert => { "risk_number" => "integer"}
|
||||||
|
convert => { "risk_score" => "float"}
|
||||||
|
convert => { "total_times_detected" => "integer"}
|
||||||
|
convert => { "cvss_temporal" => "float"}
|
||||||
|
convert => { "cvss" => "float"}
|
||||||
|
}
|
||||||
|
if [risk_score] == 0 {
|
||||||
|
mutate {
|
||||||
|
add_field => { "risk_score_name" => "info" }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if [risk_score] > 0 and [risk_score] < 3 {
|
||||||
|
mutate {
|
||||||
|
add_field => { "risk_score_name" => "low" }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if [risk_score] >= 3 and [risk_score] < 6 {
|
||||||
|
mutate {
|
||||||
|
add_field => { "risk_score_name" => "medium" }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if [risk_score] >=6 and [risk_score] < 9 {
|
||||||
|
mutate {
|
||||||
|
add_field => { "risk_score_name" => "high" }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if [risk_score] >= 9 {
|
||||||
|
mutate {
|
||||||
|
add_field => { "risk_score_name" => "critical" }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if [asset] =~ "\.yourdomain\.(com|net)$" {
|
||||||
|
mutate {
|
||||||
|
add_tag => [ "critical_asset" ]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
output {
|
||||||
|
if "qualys" in [tags] {
|
||||||
|
stdout { codec => rubydebug }
|
||||||
|
elasticsearch {
|
||||||
|
hosts => [ "localhost:9200" ]
|
||||||
|
index => "logstash-vulnwhisperer-%{+YYYY.MM}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
146
logstash/3000_openvas.conf
Normal file
@ -0,0 +1,146 @@
|
|||||||
|
# Author: Austin Taylor and Justin Henderson
|
||||||
|
# Email: austin@hasecuritysolutions.com
|
||||||
|
# Last Update: 03/04/2018
|
||||||
|
# Version 0.3
|
||||||
|
# Description: Take in qualys web scan reports from vulnWhisperer and pumps into logstash
|
||||||
|
|
||||||
|
input {
|
||||||
|
file {
|
||||||
|
path => "/opt/vulnwhisperer/openvas/*.json"
|
||||||
|
type => json
|
||||||
|
codec => json
|
||||||
|
start_position => "beginning"
|
||||||
|
tags => [ "openvas_scan", "openvas" ]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
filter {
|
||||||
|
if "openvas_scan" in [tags] {
|
||||||
|
mutate {
|
||||||
|
replace => [ "message", "%{message}" ]
|
||||||
|
gsub => [
|
||||||
|
"message", "\|\|\|", " ",
|
||||||
|
"message", "\t\t", " ",
|
||||||
|
"message", " ", " ",
|
||||||
|
"message", " ", " ",
|
||||||
|
"message", " ", " ",
|
||||||
|
"message", "nan", " ",
|
||||||
|
"message",'\n',''
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
grok {
|
||||||
|
match => { "path" => "openvas_scan_%{DATA:scan_id}_%{INT:last_updated}.json$" }
|
||||||
|
tag_on_failure => []
|
||||||
|
}
|
||||||
|
|
||||||
|
mutate {
|
||||||
|
add_field => { "risk_score" => "%{cvss}" }
|
||||||
|
}
|
||||||
|
|
||||||
|
if [risk] == "1" {
|
||||||
|
mutate { add_field => { "risk_number" => 0 }}
|
||||||
|
mutate { replace => { "risk" => "info" }}
|
||||||
|
}
|
||||||
|
if [risk] == "2" {
|
||||||
|
mutate { add_field => { "risk_number" => 1 }}
|
||||||
|
mutate { replace => { "risk" => "low" }}
|
||||||
|
}
|
||||||
|
if [risk] == "3" {
|
||||||
|
mutate { add_field => { "risk_number" => 2 }}
|
||||||
|
mutate { replace => { "risk" => "medium" }}
|
||||||
|
}
|
||||||
|
if [risk] == "4" {
|
||||||
|
mutate { add_field => { "risk_number" => 3 }}
|
||||||
|
mutate { replace => { "risk" => "high" }}
|
||||||
|
}
|
||||||
|
if [risk] == "5" {
|
||||||
|
mutate { add_field => { "risk_number" => 4 }}
|
||||||
|
mutate { replace => { "risk" => "critical" }}
|
||||||
|
}
|
||||||
|
|
||||||
|
mutate {
|
||||||
|
remove_field => "message"
|
||||||
|
}
|
||||||
|
|
||||||
|
if [first_time_detected] {
|
||||||
|
date {
|
||||||
|
match => [ "first_time_detected", "dd MMM yyyy HH:mma 'GMT'ZZ", "dd MMM yyyy HH:mma 'GMT'" ]
|
||||||
|
target => "first_time_detected"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if [first_time_tested] {
|
||||||
|
date {
|
||||||
|
match => [ "first_time_tested", "dd MMM yyyy HH:mma 'GMT'ZZ", "dd MMM yyyy HH:mma 'GMT'" ]
|
||||||
|
target => "first_time_tested"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if [last_time_detected] {
|
||||||
|
date {
|
||||||
|
match => [ "last_time_detected", "dd MMM yyyy HH:mma 'GMT'ZZ", "dd MMM yyyy HH:mma 'GMT'" ]
|
||||||
|
target => "last_time_detected"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if [last_time_tested] {
|
||||||
|
date {
|
||||||
|
match => [ "last_time_tested", "dd MMM yyyy HH:mma 'GMT'ZZ", "dd MMM yyyy HH:mma 'GMT'" ]
|
||||||
|
target => "last_time_tested"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
date {
|
||||||
|
match => [ "last_updated", "UNIX" ]
|
||||||
|
target => "@timestamp"
|
||||||
|
remove_field => "last_updated"
|
||||||
|
}
|
||||||
|
mutate {
|
||||||
|
convert => { "plugin_id" => "integer"}
|
||||||
|
convert => { "id" => "integer"}
|
||||||
|
convert => { "risk_number" => "integer"}
|
||||||
|
convert => { "risk_score" => "float"}
|
||||||
|
convert => { "total_times_detected" => "integer"}
|
||||||
|
convert => { "cvss_temporal" => "float"}
|
||||||
|
convert => { "cvss" => "float"}
|
||||||
|
}
|
||||||
|
if [risk_score] == 0 {
|
||||||
|
mutate {
|
||||||
|
add_field => { "risk_score_name" => "info" }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if [risk_score] > 0 and [risk_score] < 3 {
|
||||||
|
mutate {
|
||||||
|
add_field => { "risk_score_name" => "low" }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if [risk_score] >= 3 and [risk_score] < 6 {
|
||||||
|
mutate {
|
||||||
|
add_field => { "risk_score_name" => "medium" }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if [risk_score] >=6 and [risk_score] < 9 {
|
||||||
|
mutate {
|
||||||
|
add_field => { "risk_score_name" => "high" }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if [risk_score] >= 9 {
|
||||||
|
mutate {
|
||||||
|
add_field => { "risk_score_name" => "critical" }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
# Add your critical assets by subnet or by hostname. Comment this field out if you don't want to tag any, but the asset panel will break.
|
||||||
|
if [asset] =~ "^10\.0\.100\." {
|
||||||
|
mutate {
|
||||||
|
add_tag => [ "critical_asset" ]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
output {
|
||||||
|
if "openvas" in [tags] {
|
||||||
|
stdout { codec => rubydebug }
|
||||||
|
elasticsearch {
|
||||||
|
hosts => [ "localhost:9200" ]
|
||||||
|
index => "logstash-vulnwhisperer-%{+YYYY.MM}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
13
logstash/9998_input_broker_rabbitmq.conf
Executable file
@ -0,0 +1,13 @@
|
|||||||
|
input {
|
||||||
|
rabbitmq {
|
||||||
|
key => "nessus"
|
||||||
|
queue => "nessus"
|
||||||
|
durable => true
|
||||||
|
exchange => "nessus"
|
||||||
|
user => "logstash"
|
||||||
|
password => "yourpassword"
|
||||||
|
host => "buffer01"
|
||||||
|
port => 5672
|
||||||
|
tags => [ "queue_nessus", "rabbitmq" ]
|
||||||
|
}
|
||||||
|
}
|
16
logstash/9998_output_broker_rabbitmq.conf
Executable file
@ -0,0 +1,16 @@
|
|||||||
|
output {
|
||||||
|
if "nessus" in [tags]{
|
||||||
|
rabbitmq {
|
||||||
|
key => "nessus"
|
||||||
|
exchange => "nessus"
|
||||||
|
exchange_type => "direct"
|
||||||
|
user => "logstash"
|
||||||
|
password => "yourbufferpassword"
|
||||||
|
host => "buffer01"
|
||||||
|
port => 5672
|
||||||
|
durable => true
|
||||||
|
persistent => true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
7
requirements.txt
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
pandas==0.20.3
|
||||||
|
setuptools==0.9.8
|
||||||
|
pytz==2017.2
|
||||||
|
Requests==2.18.3
|
||||||
|
qualysapi==4.1.0
|
||||||
|
lxml==4.1.1
|
||||||
|
bs4
|
2
setup.py
@ -4,7 +4,7 @@ from setuptools import setup, find_packages
|
|||||||
|
|
||||||
setup(
|
setup(
|
||||||
name='VulnWhisperer',
|
name='VulnWhisperer',
|
||||||
version='1.0.1',
|
version='1.5.0',
|
||||||
packages=find_packages(),
|
packages=find_packages(),
|
||||||
url='https://github.com/austin-taylor/vulnwhisperer',
|
url='https://github.com/austin-taylor/vulnwhisperer',
|
||||||
license="""MIT License
|
license="""MIT License
|
||||||
|
BIN
vulnwhisp/.DS_Store
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
from utils.cli import bcolors
|
@ -1,16 +1,9 @@
|
|||||||
import requests
|
import requests
|
||||||
from requests.packages.urllib3.exceptions import InsecureRequestWarning
|
|
||||||
|
|
||||||
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
|
|
||||||
import pandas as pd
|
|
||||||
from pandas.io.json import json_normalize
|
|
||||||
import pytz
|
import pytz
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
import json
|
import json
|
||||||
import sys
|
import sys
|
||||||
import os
|
|
||||||
import time
|
import time
|
||||||
import io
|
|
||||||
|
|
||||||
|
|
||||||
class NessusAPI(object):
|
class NessusAPI(object):
|
||||||
@ -39,7 +32,7 @@ class NessusAPI(object):
|
|||||||
'Origin': self.base,
|
'Origin': self.base,
|
||||||
'Accept-Encoding': 'gzip, deflate, br',
|
'Accept-Encoding': 'gzip, deflate, br',
|
||||||
'Accept-Language': 'en-US,en;q=0.8',
|
'Accept-Language': 'en-US,en;q=0.8',
|
||||||
'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.96 Safari/537.36',
|
'User-Agent': 'VulnWhisperer for Nessus',
|
||||||
'Content-Type': 'application/json',
|
'Content-Type': 'application/json',
|
||||||
'Accept': 'application/json, text/javascript, */*; q=0.01',
|
'Accept': 'application/json, text/javascript, */*; q=0.01',
|
||||||
'Referer': self.base,
|
'Referer': self.base,
|
||||||
@ -76,6 +69,8 @@ class NessusAPI(object):
|
|||||||
while (timeout <= 10) and (not success):
|
while (timeout <= 10) and (not success):
|
||||||
data = methods[method](url, data=data, headers=self.headers, verify=False)
|
data = methods[method](url, data=data, headers=self.headers, verify=False)
|
||||||
if data.status_code == 401:
|
if data.status_code == 401:
|
||||||
|
if url == self.base + self.SESSION:
|
||||||
|
break
|
||||||
try:
|
try:
|
||||||
self.login()
|
self.login()
|
||||||
timeout += 1
|
timeout += 1
|
||||||
@ -109,7 +104,7 @@ class NessusAPI(object):
|
|||||||
|
|
||||||
def get_scan_ids(self):
|
def get_scan_ids(self):
|
||||||
scans = self.get_scans()
|
scans = self.get_scans()
|
||||||
scan_ids = [scan_id['id'] for scan_id in scans['scans']]
|
scan_ids = [scan_id['id'] for scan_id in scans['scans']] if scans['scans'] else []
|
||||||
return scan_ids
|
return scan_ids
|
||||||
|
|
||||||
def count_scan(self, scans, folder_id):
|
def count_scan(self, scans, folder_id):
|
||||||
@ -154,7 +149,7 @@ class NessusAPI(object):
|
|||||||
req = self.request(query, data=data, method='POST')
|
req = self.request(query, data=data, method='POST')
|
||||||
return req
|
return req
|
||||||
|
|
||||||
def download_scan(self, scan_id=None, history=None, export_format="", chapters="", dbpasswd=""):
|
def download_scan(self, scan_id=None, history=None, export_format="", chapters="", dbpasswd="", profile=""):
|
||||||
running = True
|
running = True
|
||||||
counter = 0
|
counter = 0
|
||||||
|
|
||||||
@ -167,7 +162,7 @@ class NessusAPI(object):
|
|||||||
req = self.request(query, data=json.dumps(data), method='POST', json=True)
|
req = self.request(query, data=json.dumps(data), method='POST', json=True)
|
||||||
try:
|
try:
|
||||||
file_id = req['file']
|
file_id = req['file']
|
||||||
token_id = req['token']
|
token_id = req['token'] if 'token' in req else req['temp_token']
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print("[ERROR] %s" % e)
|
print("[ERROR] %s" % e)
|
||||||
print('Download for file id ' + str(file_id) + '.')
|
print('Download for file id ' + str(file_id) + '.')
|
||||||
@ -183,6 +178,9 @@ class NessusAPI(object):
|
|||||||
print("")
|
print("")
|
||||||
|
|
||||||
print("")
|
print("")
|
||||||
|
if profile=='tenable':
|
||||||
|
content = self.request(self.EXPORT_FILE_DOWNLOAD.format(scan_id=scan_id, file_id=file_id), method='GET', download=True)
|
||||||
|
else:
|
||||||
content = self.request(self.EXPORT_TOKEN_DOWNLOAD.format(token_id=token_id), method='GET', download=True)
|
content = self.request(self.EXPORT_TOKEN_DOWNLOAD.format(token_id=token_id), method='GET', download=True)
|
||||||
return content
|
return content
|
||||||
|
|
||||||
|
193
vulnwhisp/frameworks/openvas.py
Normal file
@ -0,0 +1,193 @@
|
|||||||
|
#!/usr/bin/python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
__author__ = 'Austin Taylor'
|
||||||
|
|
||||||
|
import datetime as dt
|
||||||
|
import io
|
||||||
|
|
||||||
|
import pandas as pd
|
||||||
|
import requests
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
|
from ..utils.cli import bcolors
|
||||||
|
|
||||||
|
|
||||||
|
class OpenVAS_API(object):
|
||||||
|
OMP = '/omp'
|
||||||
|
|
||||||
|
def __init__(self,
|
||||||
|
hostname=None,
|
||||||
|
port=None,
|
||||||
|
username=None,
|
||||||
|
password=None,
|
||||||
|
report_format_id=None,
|
||||||
|
verbose=True):
|
||||||
|
if username is None or password is None:
|
||||||
|
raise Exception('ERROR: Missing username or password.')
|
||||||
|
|
||||||
|
self.username = username
|
||||||
|
self.password = password
|
||||||
|
self.base = 'https://{hostname}:{port}'.format(hostname=hostname, port=port)
|
||||||
|
self.verbose = verbose
|
||||||
|
self.processed_reports = 0
|
||||||
|
self.report_format_id = report_format_id
|
||||||
|
|
||||||
|
self.headers = {
|
||||||
|
'Origin': self.base,
|
||||||
|
'Accept-Encoding': 'gzip, deflate, br',
|
||||||
|
'Accept-Language': 'en-US,en;q=0.8',
|
||||||
|
'User-Agent': 'VulnWhisperer for OpenVAS',
|
||||||
|
'Content-Type': 'application/x-www-form-urlencoded',
|
||||||
|
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
|
||||||
|
'Cache-Control': 'max-age=0',
|
||||||
|
'Referer': self.base,
|
||||||
|
'X-Requested-With': 'XMLHttpRequest',
|
||||||
|
'Connection': 'keep-alive',
|
||||||
|
}
|
||||||
|
|
||||||
|
self.login()
|
||||||
|
|
||||||
|
self.openvas_reports = self.get_reports()
|
||||||
|
self.report_formats = self.get_report_formats()
|
||||||
|
|
||||||
|
def vprint(self, msg):
|
||||||
|
if self.verbose:
|
||||||
|
print(msg)
|
||||||
|
|
||||||
|
def login(self):
|
||||||
|
resp = self.get_token()
|
||||||
|
if resp.status_code is 200:
|
||||||
|
xml_response = BeautifulSoup(resp.content, 'lxml')
|
||||||
|
self.token = xml_response.find(attrs={'id': 'gsa-token'}).text
|
||||||
|
|
||||||
|
self.cookies = resp.cookies.get_dict()
|
||||||
|
else:
|
||||||
|
raise Exception('[FAIL] Could not login to OpenVAS')
|
||||||
|
|
||||||
|
def request(self, url, data=None, params=None, headers=None, cookies=None, method='POST', download=False,
|
||||||
|
json=False):
|
||||||
|
if headers is None:
|
||||||
|
headers = self.headers
|
||||||
|
if cookies is None:
|
||||||
|
cookies = self.cookies
|
||||||
|
|
||||||
|
timeout = 0
|
||||||
|
success = False
|
||||||
|
|
||||||
|
url = self.base + url
|
||||||
|
methods = {'GET': requests.get,
|
||||||
|
'POST': requests.post,
|
||||||
|
'DELETE': requests.delete}
|
||||||
|
|
||||||
|
while (timeout <= 10) and (not success):
|
||||||
|
data = methods[method](url,
|
||||||
|
data=data,
|
||||||
|
headers=self.headers,
|
||||||
|
params=params,
|
||||||
|
cookies=cookies,
|
||||||
|
verify=False)
|
||||||
|
|
||||||
|
if data.status_code == 401:
|
||||||
|
try:
|
||||||
|
self.login()
|
||||||
|
timeout += 1
|
||||||
|
self.vprint('[INFO] Token refreshed')
|
||||||
|
except Exception as e:
|
||||||
|
self.vprint('[FAIL] Could not refresh token\nReason: %s' % e)
|
||||||
|
else:
|
||||||
|
success = True
|
||||||
|
|
||||||
|
if json:
|
||||||
|
data = data.json()
|
||||||
|
if download:
|
||||||
|
return data.content
|
||||||
|
return data
|
||||||
|
|
||||||
|
def get_token(self):
|
||||||
|
data = [
|
||||||
|
('cmd', 'login'),
|
||||||
|
('text', '/omp?r=1'),
|
||||||
|
('login', self.username),
|
||||||
|
('password', self.password),
|
||||||
|
]
|
||||||
|
token = requests.post(self.base + self.OMP, data=data, verify=False)
|
||||||
|
return token
|
||||||
|
def get_report_formats(self):
|
||||||
|
params = (
|
||||||
|
('cmd', 'get_report_formats'),
|
||||||
|
('token', self.token)
|
||||||
|
)
|
||||||
|
self.vprint('{info} Retrieving available report formats'.format(info=bcolors.INFO))
|
||||||
|
data = self.request(url=self.OMP, method='GET', params=params)
|
||||||
|
|
||||||
|
bs = BeautifulSoup(data.content, "lxml")
|
||||||
|
table_body = bs.find('tbody')
|
||||||
|
rows = table_body.find_all('tr')
|
||||||
|
format_mapping = {}
|
||||||
|
for row in rows:
|
||||||
|
cols = row.find_all('td')
|
||||||
|
for x in cols:
|
||||||
|
for y in x.find_all('a'):
|
||||||
|
if y.get_text() != '':
|
||||||
|
format_mapping[y.get_text()] = \
|
||||||
|
[h.split('=')[1] for h in y['href'].split('&') if 'report_format_id' in h][0]
|
||||||
|
return format_mapping
|
||||||
|
|
||||||
|
def get_reports(self, complete=True):
|
||||||
|
print('{info} Retreiving OpenVAS report data...'.format(info=bcolors.INFO))
|
||||||
|
params = (('cmd', 'get_reports'),
|
||||||
|
('token', self.token),
|
||||||
|
('max_results', 1),
|
||||||
|
('ignore_pagination', 1),
|
||||||
|
('filter', 'apply_overrides=1 min_qod=70 autofp=0 first=1 rows=0 levels=hml sort-reverse=severity'),
|
||||||
|
)
|
||||||
|
reports = self.request(self.OMP, params=params, method='GET')
|
||||||
|
soup = BeautifulSoup(reports.text, 'lxml')
|
||||||
|
data = []
|
||||||
|
links = []
|
||||||
|
table = soup.find('table', attrs={'class': 'gbntable'})
|
||||||
|
table_body = table.find('tbody')
|
||||||
|
|
||||||
|
rows = table_body.find_all('tr')
|
||||||
|
for row in rows:
|
||||||
|
cols = row.find_all('td')
|
||||||
|
links.extend([a['href'] for a in row.find_all('a', href=True) if 'get_report' in str(a)])
|
||||||
|
cols = [ele.text.strip() for ele in cols]
|
||||||
|
data.append([ele for ele in cols if ele])
|
||||||
|
report = pd.DataFrame(data, columns=['date', 'status', 'task', 'scan_severity', 'high', 'medium', 'low', 'log',
|
||||||
|
'false_pos'])
|
||||||
|
|
||||||
|
if report.shape[0] != 0:
|
||||||
|
report['links'] = links
|
||||||
|
report['report_ids'] = report.links.str.extract('.*report_id=([a-z-0-9]*)', expand=False)
|
||||||
|
report['epoch'] = (pd.to_datetime(report['date']) - dt.datetime(1970, 1, 1)).dt.total_seconds().astype(int)
|
||||||
|
else:
|
||||||
|
raise Exception("Could not retrieve OpenVAS Reports - Please check your settings and try again")
|
||||||
|
|
||||||
|
report['links'] = links
|
||||||
|
report['report_ids'] = report.links.str.extract('.*report_id=([a-z-0-9]*)', expand=False)
|
||||||
|
report['epoch'] = (pd.to_datetime(report['date']) - dt.datetime(1970, 1, 1)).dt.total_seconds().astype(int)
|
||||||
|
if complete:
|
||||||
|
report = report[report.status == 'Done']
|
||||||
|
severity_extraction = report.scan_severity.str.extract('([0-9.]*) \(([\w]+)\)', expand=False)
|
||||||
|
severity_extraction.columns = ['scan_highest_severity', 'severity_rate']
|
||||||
|
report_with_severity = pd.concat([report, severity_extraction], axis=1)
|
||||||
|
return report_with_severity
|
||||||
|
|
||||||
|
def process_report(self, report_id):
|
||||||
|
|
||||||
|
params = (
|
||||||
|
('token', self.token),
|
||||||
|
('cmd', 'get_report'),
|
||||||
|
('report_id', report_id),
|
||||||
|
('filter', 'apply_overrides=0 min_qod=70 autofp=0 levels=hml first=1 rows=0 sort-reverse=severity'),
|
||||||
|
('ignore_pagination', '1'),
|
||||||
|
('report_format_id', '{report_format_id}'.format(report_format_id=self.report_formats['CSV Results'])),
|
||||||
|
('submit', 'Download'),
|
||||||
|
)
|
||||||
|
print('Retrieving %s' % report_id)
|
||||||
|
req = self.request(self.OMP, params=params, method='GET')
|
||||||
|
report_df = pd.read_csv(io.BytesIO(req.text.encode('utf-8')))
|
||||||
|
report_df['report_ids'] = report_id
|
||||||
|
self.processed_reports += 1
|
||||||
|
merged_df = pd.merge(report_df, self.openvas_reports, on='report_ids').reset_index().drop('index', axis=1)
|
||||||
|
return merged_df
|
837
vulnwhisp/frameworks/qualys.py
Normal file
@ -0,0 +1,837 @@
|
|||||||
|
#!/usr/bin/python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
__author__ = 'Austin Taylor'
|
||||||
|
|
||||||
|
from lxml import objectify
|
||||||
|
from lxml.builder import E
|
||||||
|
import xml.etree.ElementTree as ET
|
||||||
|
import pandas as pd
|
||||||
|
import qualysapi
|
||||||
|
import qualysapi.config as qcconf
|
||||||
|
import requests
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import csv
|
||||||
|
import dateutil.parser as dp
|
||||||
|
|
||||||
|
|
||||||
|
class qualysWhisperAPI(object):
|
||||||
|
COUNT_WEBAPP = '/count/was/webapp'
|
||||||
|
COUNT_WASSCAN = '/count/was/wasscan'
|
||||||
|
DELETE_REPORT = '/delete/was/report/{report_id}'
|
||||||
|
GET_WEBAPP_DETAILS = '/get/was/webapp/{was_id}'
|
||||||
|
QPS_REST_3 = '/qps/rest/3.0'
|
||||||
|
REPORT_DETAILS = '/get/was/report/{report_id}'
|
||||||
|
REPORT_STATUS = '/status/was/report/{report_id}'
|
||||||
|
REPORT_CREATE = '/create/was/report'
|
||||||
|
REPORT_DOWNLOAD = '/download/was/report/{report_id}'
|
||||||
|
SCAN_DETAILS = '/get/was/wasscan/{scan_id}'
|
||||||
|
SCAN_DOWNLOAD = '/download/was/wasscan/{scan_id}'
|
||||||
|
SEARCH_REPORTS = '/search/was/report'
|
||||||
|
SEARCH_WEB_APPS = '/search/was/webapp'
|
||||||
|
SEARCH_WAS_SCAN = '/search/was/wasscan'
|
||||||
|
VERSION = '/qps/rest/portal/version'
|
||||||
|
|
||||||
|
def __init__(self, config=None):
|
||||||
|
self.config = config
|
||||||
|
try:
|
||||||
|
self.qgc = qualysapi.connect(config)
|
||||||
|
print('[SUCCESS] - Connected to Qualys at %s' % self.qgc.server)
|
||||||
|
except Exception as e:
|
||||||
|
print('[ERROR] Could not connect to Qualys - %s' % e)
|
||||||
|
self.headers = {
|
||||||
|
"content-type": "text/xml"}
|
||||||
|
self.config_parse = qcconf.QualysConnectConfig(config)
|
||||||
|
try:
|
||||||
|
self.template_id = self.config_parse.get_template_id()
|
||||||
|
except:
|
||||||
|
print('ERROR - Could not retrieve template ID')
|
||||||
|
|
||||||
|
def request(self, path, method='get', data=None):
|
||||||
|
methods = {'get': requests.get,
|
||||||
|
'post': requests.post}
|
||||||
|
base = 'https://' + self.qgc.server + path
|
||||||
|
req = methods[method](base, auth=self.qgc.auth, data=data, headers=self.headers).content
|
||||||
|
return req
|
||||||
|
|
||||||
|
def get_version(self):
|
||||||
|
return self.request(self.VERSION)
|
||||||
|
|
||||||
|
def get_scan_count(self, scan_name):
|
||||||
|
parameters = (
|
||||||
|
E.ServiceRequest(
|
||||||
|
E.filters(
|
||||||
|
E.Criteria({'field': 'name', 'operator': 'CONTAINS'}, scan_name))))
|
||||||
|
xml_output = self.qgc.request(self.COUNT_WEBAPP, parameters)
|
||||||
|
root = objectify.fromstring(xml_output)
|
||||||
|
return root.count.text
|
||||||
|
|
||||||
|
def get_was_scan_count(self, status):
|
||||||
|
parameters = (
|
||||||
|
E.ServiceRequest(
|
||||||
|
E.filters(
|
||||||
|
E.Criteria({'field': 'status', 'operator': 'EQUALS'}, status))))
|
||||||
|
xml_output = self.qgc.request(self.COUNT_WASSCAN, parameters)
|
||||||
|
root = objectify.fromstring(xml_output)
|
||||||
|
return root.count.text
|
||||||
|
|
||||||
|
def get_reports(self):
|
||||||
|
return self.qgc.request(self.SEARCH_REPORTS)
|
||||||
|
|
||||||
|
def xml_parser(self, xml, dupfield=None):
|
||||||
|
all_records = []
|
||||||
|
root = ET.XML(xml)
|
||||||
|
for i, child in enumerate(root):
|
||||||
|
for subchild in child:
|
||||||
|
record = {}
|
||||||
|
dup_tracker = 0
|
||||||
|
for p in subchild:
|
||||||
|
record[p.tag] = p.text
|
||||||
|
for o in p:
|
||||||
|
if o.tag in record:
|
||||||
|
dup_tracker += 1
|
||||||
|
record[o.tag + '_%s' % dup_tracker] = o.text
|
||||||
|
else:
|
||||||
|
record[o.tag] = o.text
|
||||||
|
all_records.append(record)
|
||||||
|
return pd.DataFrame(all_records)
|
||||||
|
|
||||||
|
def get_report_list(self):
|
||||||
|
"""Returns a dataframe of reports"""
|
||||||
|
return self.xml_parser(self.get_reports(), dupfield='user_id')
|
||||||
|
|
||||||
|
def get_web_apps(self):
|
||||||
|
"""Returns webapps available for account"""
|
||||||
|
return self.qgc.request(self.SEARCH_WEB_APPS)
|
||||||
|
|
||||||
|
def get_web_app_list(self):
|
||||||
|
"""Returns dataframe of webapps"""
|
||||||
|
return self.xml_parser(self.get_web_apps(), dupfield='user_id')
|
||||||
|
|
||||||
|
def get_web_app_details(self, was_id):
|
||||||
|
"""Get webapp details - use to retrieve app ID tag"""
|
||||||
|
return self.qgc.request(self.GET_WEBAPP_DETAILS.format(was_id=was_id))
|
||||||
|
|
||||||
|
def get_scans_by_app_id(self, app_id):
|
||||||
|
data = self.generate_app_id_scan_XML(app_id)
|
||||||
|
return self.qgc.request(self.SEARCH_WAS_SCAN, data)
|
||||||
|
|
||||||
|
def get_scan_info(self, limit=1000, offset=1, status='FINISHED'):
|
||||||
|
""" Returns XML of ALL WAS Scans"""
|
||||||
|
data = self.generate_scan_result_XML(limit=limit, offset=offset, status=status)
|
||||||
|
return self.qgc.request(self.SEARCH_WAS_SCAN, data)
|
||||||
|
|
||||||
|
def get_all_scans(self, limit=1000, offset=1, status='FINISHED'):
|
||||||
|
qualys_api_limit = limit
|
||||||
|
dataframes = []
|
||||||
|
_records = []
|
||||||
|
total = int(self.get_was_scan_count(status=status))
|
||||||
|
print('Retrieving information for %s scans' % total)
|
||||||
|
for i in range(0, total):
|
||||||
|
if i % limit == 0:
|
||||||
|
if (total - i) < limit:
|
||||||
|
qualys_api_limit = total - i
|
||||||
|
print('Making a request with a limit of %s at offset %s' % (str(qualys_api_limit), str(i + 1)))
|
||||||
|
scan_info = self.get_scan_info(limit=qualys_api_limit, offset=i + 1, status=status)
|
||||||
|
_records.append(scan_info)
|
||||||
|
print('Converting XML to DataFrame')
|
||||||
|
dataframes = [self.xml_parser(xml) for xml in _records]
|
||||||
|
|
||||||
|
return pd.concat(dataframes, axis=0).reset_index().drop('index', axis=1)
|
||||||
|
|
||||||
|
def get_scan_details(self, scan_id):
|
||||||
|
return self.qgc.request(self.SCAN_DETAILS.format(scan_id=scan_id))
|
||||||
|
|
||||||
|
def get_report_details(self, report_id):
|
||||||
|
return self.qgc.request(self.REPORT_DETAILS.format(report_id=report_id))
|
||||||
|
|
||||||
|
def get_report_status(self, report_id):
|
||||||
|
return self.qgc.request(self.REPORT_STATUS.format(report_id=report_id))
|
||||||
|
|
||||||
|
def download_report(self, report_id):
|
||||||
|
return self.qgc.request(self.REPORT_DOWNLOAD.format(report_id=report_id))
|
||||||
|
|
||||||
|
def download_scan_results(self, scan_id):
|
||||||
|
return self.qgc.request(self.SCAN_DOWNLOAD.format(scan_id=scan_id))
|
||||||
|
|
||||||
|
def generate_scan_result_XML(self, limit=1000, offset=1, status='FINISHED'):
|
||||||
|
report_xml = E.ServiceRequest(
|
||||||
|
E.filters(
|
||||||
|
E.Criteria({'field': 'status', 'operator': 'EQUALS'}, status
|
||||||
|
),
|
||||||
|
),
|
||||||
|
E.preferences(
|
||||||
|
E.startFromOffset(str(offset)),
|
||||||
|
E.limitResults(str(limit))
|
||||||
|
),
|
||||||
|
)
|
||||||
|
return report_xml
|
||||||
|
|
||||||
|
def generate_scan_report_XML(self, scan_id):
|
||||||
|
"""Generates a CSV report for an asset based on template defined in .ini file"""
|
||||||
|
report_xml = E.ServiceRequest(
|
||||||
|
E.data(
|
||||||
|
E.Report(
|
||||||
|
E.name('![CDATA[API Scan Report generated by VulnWhisperer]]>'),
|
||||||
|
E.description('<![CDATA[CSV Scanning report for VulnWhisperer]]>'),
|
||||||
|
E.format('CSV'),
|
||||||
|
E.type('WAS_SCAN_REPORT'),
|
||||||
|
E.template(
|
||||||
|
E.id(self.template_id)
|
||||||
|
),
|
||||||
|
E.config(
|
||||||
|
E.scanReport(
|
||||||
|
E.target(
|
||||||
|
E.scans(
|
||||||
|
E.WasScan(
|
||||||
|
E.id(scan_id)
|
||||||
|
)
|
||||||
|
),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return report_xml
|
||||||
|
|
||||||
|
def generate_webapp_report_XML(self, app_id):
|
||||||
|
"""Generates a CSV report for an asset based on template defined in .ini file"""
|
||||||
|
report_xml = E.ServiceRequest(
|
||||||
|
E.data(
|
||||||
|
E.Report(
|
||||||
|
E.name('![CDATA[API Web Application Report generated by VulnWhisperer]]>'),
|
||||||
|
E.description('<![CDATA[CSV WebApp report for VulnWhisperer]]>'),
|
||||||
|
E.format('CSV'),
|
||||||
|
E.template(
|
||||||
|
E.id(self.template_id)
|
||||||
|
),
|
||||||
|
E.config(
|
||||||
|
E.webAppReport(
|
||||||
|
E.target(
|
||||||
|
E.webapps(
|
||||||
|
E.WebApp(
|
||||||
|
E.id(app_id)
|
||||||
|
)
|
||||||
|
),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return report_xml
|
||||||
|
|
||||||
|
def generate_app_id_scan_XML(self, app_id):
|
||||||
|
report_xml = E.ServiceRequest(
|
||||||
|
E.filters(
|
||||||
|
E.Criteria({'field': 'webApp.id', 'operator': 'EQUALS'}, app_id
|
||||||
|
),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
return report_xml
|
||||||
|
|
||||||
|
def create_report(self, report_id, kind='scan'):
|
||||||
|
mapper = {'scan': self.generate_scan_report_XML,
|
||||||
|
'webapp': self.generate_webapp_report_XML}
|
||||||
|
try:
|
||||||
|
# print lxml.etree.tostring(mapper[kind](report_id), pretty_print=True)
|
||||||
|
data = mapper[kind](report_id)
|
||||||
|
except Exception as e:
|
||||||
|
print(e)
|
||||||
|
|
||||||
|
return self.qgc.request(self.REPORT_CREATE, data)
|
||||||
|
|
||||||
|
def delete_report(self, report_id):
|
||||||
|
return self.qgc.request(self.DELETE_REPORT.format(report_id=report_id))
|
||||||
|
|
||||||
|
|
||||||
|
class qualysReportFields:
|
||||||
|
CATEGORIES = ['VULNERABILITY',
|
||||||
|
'SENSITIVECONTENT',
|
||||||
|
'INFORMATION_GATHERED']
|
||||||
|
|
||||||
|
# URL Vulnerability Information
|
||||||
|
|
||||||
|
VULN_BLOCK = [
|
||||||
|
CATEGORIES[0],
|
||||||
|
'ID',
|
||||||
|
'QID',
|
||||||
|
'Url',
|
||||||
|
'Param',
|
||||||
|
'Function',
|
||||||
|
'Form Entry Point',
|
||||||
|
'Access Path',
|
||||||
|
'Authentication',
|
||||||
|
'Ajax Request',
|
||||||
|
'Ajax Request ID',
|
||||||
|
'Ignored',
|
||||||
|
'Ignore Reason',
|
||||||
|
'Ignore Date',
|
||||||
|
'Ignore User',
|
||||||
|
'Ignore Comments',
|
||||||
|
'First Time Detected',
|
||||||
|
'Last Time Detected',
|
||||||
|
'Last Time Tested',
|
||||||
|
'Times Detected',
|
||||||
|
'Payload #1',
|
||||||
|
'Request Method #1',
|
||||||
|
'Request URL #1',
|
||||||
|
'Request Headers #1',
|
||||||
|
'Response #1',
|
||||||
|
'Evidence #1',
|
||||||
|
]
|
||||||
|
|
||||||
|
INFO_HEADER = [
|
||||||
|
'Vulnerability Category',
|
||||||
|
'ID',
|
||||||
|
'QID',
|
||||||
|
'Response #1',
|
||||||
|
'Last Time Detected',
|
||||||
|
]
|
||||||
|
INFO_BLOCK = [
|
||||||
|
CATEGORIES[2],
|
||||||
|
'ID',
|
||||||
|
'QID',
|
||||||
|
'Results',
|
||||||
|
'Detection Date',
|
||||||
|
]
|
||||||
|
|
||||||
|
QID_HEADER = [
|
||||||
|
'QID',
|
||||||
|
'Id',
|
||||||
|
'Title',
|
||||||
|
'Category',
|
||||||
|
'Severity Level',
|
||||||
|
'Groups',
|
||||||
|
'OWASP',
|
||||||
|
'WASC',
|
||||||
|
'CWE',
|
||||||
|
'CVSS Base',
|
||||||
|
'CVSS Temporal',
|
||||||
|
'Description',
|
||||||
|
'Impact',
|
||||||
|
'Solution',
|
||||||
|
]
|
||||||
|
GROUP_HEADER = ['GROUP', 'Name', 'Category']
|
||||||
|
OWASP_HEADER = ['OWASP', 'Code', 'Name']
|
||||||
|
WASC_HEADER = ['WASC', 'Code', 'Name']
|
||||||
|
SCAN_META = ['Web Application Name', 'URL', 'Owner', 'Scope', 'Operating System']
|
||||||
|
CATEGORY_HEADER = ['Category', 'Severity', 'Level', 'Description']
|
||||||
|
|
||||||
|
|
||||||
|
class qualysUtils:
|
||||||
|
def __init__(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def grab_section(
|
||||||
|
self,
|
||||||
|
report,
|
||||||
|
section,
|
||||||
|
end=[],
|
||||||
|
pop_last=False,
|
||||||
|
):
|
||||||
|
temp_list = []
|
||||||
|
max_col_count = 0
|
||||||
|
with open(report, 'rb') as csvfile:
|
||||||
|
q_report = csv.reader(csvfile, delimiter=',', quotechar='"')
|
||||||
|
for line in q_report:
|
||||||
|
if set(line) == set(section):
|
||||||
|
break
|
||||||
|
|
||||||
|
# Reads text until the end of the block:
|
||||||
|
for line in q_report: # This keeps reading the file
|
||||||
|
temp_list.append(line)
|
||||||
|
|
||||||
|
if line in end:
|
||||||
|
break
|
||||||
|
if pop_last and len(temp_list) > 1:
|
||||||
|
temp_list.pop(-1)
|
||||||
|
return temp_list
|
||||||
|
|
||||||
|
def iso_to_epoch(self, dt):
|
||||||
|
return dp.parse(dt).strftime('%s')
|
||||||
|
|
||||||
|
def cleanser(self, _data):
|
||||||
|
repls = (('\n', '|||'), ('\r', '|||'), (',', ';'), ('\t', '|||'))
|
||||||
|
if _data:
|
||||||
|
_data = reduce(lambda a, kv: a.replace(*kv), repls, str(_data))
|
||||||
|
return _data
|
||||||
|
|
||||||
|
|
||||||
|
class qualysWebAppReport:
|
||||||
|
# URL Vulnerability Information
|
||||||
|
WEB_APP_VULN_BLOCK = list(qualysReportFields.VULN_BLOCK)
|
||||||
|
WEB_APP_VULN_BLOCK.insert(0, 'Web Application Name')
|
||||||
|
WEB_APP_VULN_BLOCK.insert(WEB_APP_VULN_BLOCK.index('Ignored'), 'Status')
|
||||||
|
|
||||||
|
WEB_APP_VULN_HEADER = list(WEB_APP_VULN_BLOCK)
|
||||||
|
WEB_APP_VULN_HEADER[WEB_APP_VULN_BLOCK.index(qualysReportFields.CATEGORIES[0])] = \
|
||||||
|
'Vulnerability Category'
|
||||||
|
|
||||||
|
WEB_APP_SENSITIVE_HEADER = list(WEB_APP_VULN_HEADER)
|
||||||
|
WEB_APP_SENSITIVE_HEADER.insert(WEB_APP_SENSITIVE_HEADER.index('Url'
|
||||||
|
), 'Content')
|
||||||
|
|
||||||
|
WEB_APP_SENSITIVE_BLOCK = list(WEB_APP_SENSITIVE_HEADER)
|
||||||
|
WEB_APP_SENSITIVE_BLOCK[WEB_APP_SENSITIVE_BLOCK.index('Vulnerability Category'
|
||||||
|
)] = qualysReportFields.CATEGORIES[1]
|
||||||
|
|
||||||
|
WEB_APP_INFO_HEADER = list(qualysReportFields.INFO_HEADER)
|
||||||
|
WEB_APP_INFO_HEADER.insert(0, 'Web Application Name')
|
||||||
|
|
||||||
|
WEB_APP_INFO_BLOCK = list(qualysReportFields.INFO_BLOCK)
|
||||||
|
WEB_APP_INFO_BLOCK.insert(0, 'Web Application Name')
|
||||||
|
|
||||||
|
QID_HEADER = list(qualysReportFields.QID_HEADER)
|
||||||
|
GROUP_HEADER = list(qualysReportFields.GROUP_HEADER)
|
||||||
|
OWASP_HEADER = list(qualysReportFields.OWASP_HEADER)
|
||||||
|
WASC_HEADER = list(qualysReportFields.WASC_HEADER)
|
||||||
|
SCAN_META = list(qualysReportFields.SCAN_META)
|
||||||
|
CATEGORY_HEADER = list(qualysReportFields.CATEGORY_HEADER)
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
config=None,
|
||||||
|
file_in=None,
|
||||||
|
file_stream=False,
|
||||||
|
delimiter=',',
|
||||||
|
quotechar='"',
|
||||||
|
):
|
||||||
|
self.file_in = file_in
|
||||||
|
self.file_stream = file_stream
|
||||||
|
self.report = None
|
||||||
|
self.utils = qualysUtils()
|
||||||
|
|
||||||
|
if config:
|
||||||
|
try:
|
||||||
|
self.qw = qualysWhisperAPI(config=config)
|
||||||
|
except Exception as e:
|
||||||
|
print('Could not load config! Please check settings for %s' \
|
||||||
|
% e)
|
||||||
|
|
||||||
|
if file_stream:
|
||||||
|
self.open_file = file_in.splitlines()
|
||||||
|
elif file_in:
|
||||||
|
|
||||||
|
self.open_file = open(file_in, 'rb')
|
||||||
|
|
||||||
|
self.downloaded_file = None
|
||||||
|
|
||||||
|
def get_hostname(self, report):
|
||||||
|
host = ''
|
||||||
|
with open(report, 'rb') as csvfile:
|
||||||
|
q_report = csv.reader(csvfile, delimiter=',', quotechar='"')
|
||||||
|
for x in q_report:
|
||||||
|
|
||||||
|
if 'Web Application Name' in x[0]:
|
||||||
|
host = q_report.next()[0]
|
||||||
|
return host
|
||||||
|
|
||||||
|
def get_scanreport_name(self, report):
|
||||||
|
scan_name = ''
|
||||||
|
with open(report, 'rb') as csvfile:
|
||||||
|
q_report = csv.reader(csvfile, delimiter=',', quotechar='"')
|
||||||
|
for x in q_report:
|
||||||
|
|
||||||
|
if 'Scans' in x[0]:
|
||||||
|
scan_name = x[1]
|
||||||
|
return scan_name
|
||||||
|
|
||||||
|
def grab_sections(self, report):
|
||||||
|
all_dataframes = []
|
||||||
|
dict_tracker = {}
|
||||||
|
with open(report, 'rb') as csvfile:
|
||||||
|
dict_tracker['WEB_APP_VULN_BLOCK'] = pd.DataFrame(self.utils.grab_section(report,
|
||||||
|
self.WEB_APP_VULN_BLOCK,
|
||||||
|
end=[self.WEB_APP_SENSITIVE_BLOCK,
|
||||||
|
self.WEB_APP_INFO_BLOCK],
|
||||||
|
pop_last=True), columns=self.WEB_APP_VULN_HEADER)
|
||||||
|
dict_tracker['WEB_APP_SENSITIVE_BLOCK'] = pd.DataFrame(self.utils.grab_section(report,
|
||||||
|
self.WEB_APP_SENSITIVE_BLOCK,
|
||||||
|
end=[self.WEB_APP_INFO_BLOCK,
|
||||||
|
self.WEB_APP_SENSITIVE_BLOCK],
|
||||||
|
pop_last=True), columns=self.WEB_APP_SENSITIVE_HEADER)
|
||||||
|
dict_tracker['WEB_APP_INFO_BLOCK'] = pd.DataFrame(self.utils.grab_section(report,
|
||||||
|
self.WEB_APP_INFO_BLOCK,
|
||||||
|
end=[self.QID_HEADER],
|
||||||
|
pop_last=True), columns=self.WEB_APP_INFO_HEADER)
|
||||||
|
dict_tracker['QID_HEADER'] = pd.DataFrame(self.utils.grab_section(report,
|
||||||
|
self.QID_HEADER,
|
||||||
|
end=[self.GROUP_HEADER],
|
||||||
|
pop_last=True), columns=self.QID_HEADER)
|
||||||
|
dict_tracker['GROUP_HEADER'] = pd.DataFrame(self.utils.grab_section(report,
|
||||||
|
self.GROUP_HEADER,
|
||||||
|
end=[self.OWASP_HEADER],
|
||||||
|
pop_last=True), columns=self.GROUP_HEADER)
|
||||||
|
dict_tracker['OWASP_HEADER'] = pd.DataFrame(self.utils.grab_section(report,
|
||||||
|
self.OWASP_HEADER,
|
||||||
|
end=[self.WASC_HEADER],
|
||||||
|
pop_last=True), columns=self.OWASP_HEADER)
|
||||||
|
dict_tracker['WASC_HEADER'] = pd.DataFrame(self.utils.grab_section(report,
|
||||||
|
self.WASC_HEADER, end=[['APPENDIX']],
|
||||||
|
pop_last=True), columns=self.WASC_HEADER)
|
||||||
|
dict_tracker['CATEGORY_HEADER'] =pd.DataFrame(self.utils.grab_section(report,
|
||||||
|
self.CATEGORY_HEADER), columns=self.CATEGORY_HEADER)
|
||||||
|
all_dataframes.append(dict_tracker)
|
||||||
|
|
||||||
|
return all_dataframes
|
||||||
|
|
||||||
|
def data_normalizer(self, dataframes):
|
||||||
|
"""
|
||||||
|
Merge and clean data
|
||||||
|
:param dataframes:
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
|
df_dict = dataframes[0]
|
||||||
|
merged_df = pd.concat([df_dict['WEB_APP_VULN_BLOCK'], df_dict['WEB_APP_SENSITIVE_BLOCK'],
|
||||||
|
df_dict['WEB_APP_INFO_BLOCK']], axis=0,
|
||||||
|
ignore_index=False)
|
||||||
|
|
||||||
|
merged_df = pd.merge(merged_df, df_dict['QID_HEADER'], left_on='QID',
|
||||||
|
right_on='Id')
|
||||||
|
|
||||||
|
merged_df = pd.concat([dataframes[0], dataframes[1],
|
||||||
|
dataframes[2]], axis=0,
|
||||||
|
ignore_index=False)
|
||||||
|
merged_df = pd.merge(merged_df, dataframes[3], left_on='QID',
|
||||||
|
right_on='Id')
|
||||||
|
|
||||||
|
if 'Content' not in merged_df:
|
||||||
|
merged_df['Content'] = ''
|
||||||
|
|
||||||
|
columns_to_cleanse = ['Payload #1', 'Request Method #1', 'Request URL #1',
|
||||||
|
'Request Headers #1', 'Response #1', 'Evidence #1',
|
||||||
|
'Description', 'Impact', 'Solution', 'Url', 'Content']
|
||||||
|
|
||||||
|
for col in columns_to_cleanse:
|
||||||
|
merged_df[col] = merged_df[col].astype(str).apply(self.utils.cleanser)
|
||||||
|
|
||||||
|
merged_df = pd.merge(merged_df, df_dict['CATEGORY_HEADER'])
|
||||||
|
merged_df = merged_df.drop(['QID_y', 'QID_x'], axis=1)
|
||||||
|
merged_df = merged_df.rename(columns={'Id': 'QID'})
|
||||||
|
merged_df = merged_df.replace('N/A','').fillna('')
|
||||||
|
|
||||||
|
try:
|
||||||
|
merged_df = \
|
||||||
|
merged_df[~merged_df.Title.str.contains('Links Crawled|External Links Discovered'
|
||||||
|
)]
|
||||||
|
except Exception as e:
|
||||||
|
print(e)
|
||||||
|
return merged_df
|
||||||
|
|
||||||
|
def download_file(self, file_id):
|
||||||
|
report = self.qw.download_report(file_id)
|
||||||
|
filename = str(file_id) + '.csv'
|
||||||
|
file_out = open(filename, 'w')
|
||||||
|
for line in report.splitlines():
|
||||||
|
file_out.write(line + '\n')
|
||||||
|
file_out.close()
|
||||||
|
print('[ACTION] - File written to %s' % filename)
|
||||||
|
return filename
|
||||||
|
|
||||||
|
def remove_file(self, filename):
|
||||||
|
os.remove(filename)
|
||||||
|
|
||||||
|
def process_data(self, file_id, scan=True, cleanup=True):
|
||||||
|
"""Downloads a file from qualys and normalizes it"""
|
||||||
|
|
||||||
|
download_file = self.download_file(file_id)
|
||||||
|
print('[ACTION] - Downloading file ID: %s' % file_id)
|
||||||
|
report_data = self.grab_sections(download_file)
|
||||||
|
merged_data = self.data_normalizer(report_data)
|
||||||
|
if scan:
|
||||||
|
scan_name = self.get_scanreport_name(download_file)
|
||||||
|
merged_data['ScanName'] = scan_name
|
||||||
|
|
||||||
|
# TODO cleanup old data (delete)
|
||||||
|
|
||||||
|
return merged_data
|
||||||
|
|
||||||
|
def whisper_reports(self, report_id, updated_date, cleanup=False):
|
||||||
|
"""
|
||||||
|
report_id: App ID
|
||||||
|
updated_date: Last time scan was ran for app_id
|
||||||
|
"""
|
||||||
|
vuln_ready = None
|
||||||
|
try:
|
||||||
|
|
||||||
|
if 'Z' in updated_date:
|
||||||
|
updated_date = self.utils.iso_to_epoch(updated_date)
|
||||||
|
report_name = 'qualys_web_' + str(report_id) \
|
||||||
|
+ '_{last_updated}'.format(last_updated=updated_date) \
|
||||||
|
+ '.csv'
|
||||||
|
if os.path.isfile(report_name):
|
||||||
|
print('[ACTION] - File already exist! Skipping...')
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
print('[ACTION] - Generating report for %s' % report_id)
|
||||||
|
status = self.qw.create_report(report_id)
|
||||||
|
root = objectify.fromstring(status)
|
||||||
|
if root.responseCode == 'SUCCESS':
|
||||||
|
print('[INFO] - Successfully generated report for webapp: %s' \
|
||||||
|
% report_id)
|
||||||
|
generated_report_id = root.data.Report.id
|
||||||
|
print ('[INFO] - New Report ID: %s' \
|
||||||
|
% generated_report_id)
|
||||||
|
vuln_ready = self.process_data(generated_report_id)
|
||||||
|
|
||||||
|
vuln_ready.to_csv(report_name, index=False, header=True) # add when timestamp occured
|
||||||
|
print('[SUCCESS] - Report written to %s' \
|
||||||
|
% report_name)
|
||||||
|
if cleanup:
|
||||||
|
print('[ACTION] - Removing report %s' \
|
||||||
|
% generated_report_id)
|
||||||
|
cleaning_up = \
|
||||||
|
self.qw.delete_report(generated_report_id)
|
||||||
|
self.remove_file(str(generated_report_id) + '.csv')
|
||||||
|
print('[ACTION] - Deleted report: %s' \
|
||||||
|
% generated_report_id)
|
||||||
|
else:
|
||||||
|
print('Could not process report ID: %s' % status)
|
||||||
|
except Exception as e:
|
||||||
|
print('[ERROR] - Could not process %s - %s' % (report_id, e))
|
||||||
|
return vuln_ready
|
||||||
|
|
||||||
|
|
||||||
|
class qualysScanReport:
|
||||||
|
# URL Vulnerability Information
|
||||||
|
WEB_SCAN_VULN_BLOCK = list(qualysReportFields.VULN_BLOCK)
|
||||||
|
WEB_SCAN_VULN_BLOCK.insert(WEB_SCAN_VULN_BLOCK.index('QID'), 'Detection ID')
|
||||||
|
|
||||||
|
WEB_SCAN_VULN_HEADER = list(WEB_SCAN_VULN_BLOCK)
|
||||||
|
WEB_SCAN_VULN_HEADER[WEB_SCAN_VULN_BLOCK.index(qualysReportFields.CATEGORIES[0])] = \
|
||||||
|
'Vulnerability Category'
|
||||||
|
|
||||||
|
WEB_SCAN_SENSITIVE_HEADER = list(WEB_SCAN_VULN_HEADER)
|
||||||
|
WEB_SCAN_SENSITIVE_HEADER.insert(WEB_SCAN_SENSITIVE_HEADER.index('Url'
|
||||||
|
), 'Content')
|
||||||
|
|
||||||
|
WEB_SCAN_SENSITIVE_BLOCK = list(WEB_SCAN_SENSITIVE_HEADER)
|
||||||
|
WEB_SCAN_SENSITIVE_BLOCK.insert(WEB_SCAN_SENSITIVE_BLOCK.index('QID'), 'Detection ID')
|
||||||
|
WEB_SCAN_SENSITIVE_BLOCK[WEB_SCAN_SENSITIVE_BLOCK.index('Vulnerability Category'
|
||||||
|
)] = qualysReportFields.CATEGORIES[1]
|
||||||
|
|
||||||
|
WEB_SCAN_INFO_HEADER = list(qualysReportFields.INFO_HEADER)
|
||||||
|
WEB_SCAN_INFO_HEADER.insert(WEB_SCAN_INFO_HEADER.index('QID'), 'Detection ID')
|
||||||
|
|
||||||
|
WEB_SCAN_INFO_BLOCK = list(qualysReportFields.INFO_BLOCK)
|
||||||
|
WEB_SCAN_INFO_BLOCK.insert(WEB_SCAN_INFO_BLOCK.index('QID'), 'Detection ID')
|
||||||
|
|
||||||
|
QID_HEADER = list(qualysReportFields.QID_HEADER)
|
||||||
|
GROUP_HEADER = list(qualysReportFields.GROUP_HEADER)
|
||||||
|
OWASP_HEADER = list(qualysReportFields.OWASP_HEADER)
|
||||||
|
WASC_HEADER = list(qualysReportFields.WASC_HEADER)
|
||||||
|
SCAN_META = list(qualysReportFields.SCAN_META)
|
||||||
|
CATEGORY_HEADER = list(qualysReportFields.CATEGORY_HEADER)
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
config=None,
|
||||||
|
file_in=None,
|
||||||
|
file_stream=False,
|
||||||
|
delimiter=',',
|
||||||
|
quotechar='"',
|
||||||
|
):
|
||||||
|
self.file_in = file_in
|
||||||
|
self.file_stream = file_stream
|
||||||
|
self.report = None
|
||||||
|
self.utils = qualysUtils()
|
||||||
|
|
||||||
|
if config:
|
||||||
|
try:
|
||||||
|
self.qw = qualysWhisperAPI(config=config)
|
||||||
|
except Exception as e:
|
||||||
|
print('Could not load config! Please check settings for %s' \
|
||||||
|
% e)
|
||||||
|
|
||||||
|
if file_stream:
|
||||||
|
self.open_file = file_in.splitlines()
|
||||||
|
elif file_in:
|
||||||
|
|
||||||
|
self.open_file = open(file_in, 'rb')
|
||||||
|
|
||||||
|
self.downloaded_file = None
|
||||||
|
|
||||||
|
def grab_sections(self, report):
|
||||||
|
all_dataframes = []
|
||||||
|
dict_tracker = {}
|
||||||
|
with open(report, 'rb') as csvfile:
|
||||||
|
dict_tracker['WEB_SCAN_VULN_BLOCK'] = pd.DataFrame(self.utils.grab_section(report,
|
||||||
|
self.WEB_SCAN_VULN_BLOCK,
|
||||||
|
end=[
|
||||||
|
self.WEB_SCAN_SENSITIVE_BLOCK,
|
||||||
|
self.WEB_SCAN_INFO_BLOCK],
|
||||||
|
pop_last=True),
|
||||||
|
columns=self.WEB_SCAN_VULN_HEADER)
|
||||||
|
dict_tracker['WEB_SCAN_SENSITIVE_BLOCK'] = pd.DataFrame(self.utils.grab_section(report,
|
||||||
|
self.WEB_SCAN_SENSITIVE_BLOCK,
|
||||||
|
end=[
|
||||||
|
self.WEB_SCAN_INFO_BLOCK,
|
||||||
|
self.WEB_SCAN_SENSITIVE_BLOCK],
|
||||||
|
pop_last=True),
|
||||||
|
columns=self.WEB_SCAN_SENSITIVE_HEADER)
|
||||||
|
dict_tracker['WEB_SCAN_INFO_BLOCK'] = pd.DataFrame(self.utils.grab_section(report,
|
||||||
|
self.WEB_SCAN_INFO_BLOCK,
|
||||||
|
end=[self.QID_HEADER],
|
||||||
|
pop_last=True),
|
||||||
|
columns=self.WEB_SCAN_INFO_HEADER)
|
||||||
|
dict_tracker['QID_HEADER'] = pd.DataFrame(self.utils.grab_section(report,
|
||||||
|
self.QID_HEADER,
|
||||||
|
end=[self.GROUP_HEADER],
|
||||||
|
pop_last=True),
|
||||||
|
columns=self.QID_HEADER)
|
||||||
|
dict_tracker['GROUP_HEADER'] = pd.DataFrame(self.utils.grab_section(report,
|
||||||
|
self.GROUP_HEADER,
|
||||||
|
end=[self.OWASP_HEADER],
|
||||||
|
pop_last=True),
|
||||||
|
columns=self.GROUP_HEADER)
|
||||||
|
dict_tracker['OWASP_HEADER'] = pd.DataFrame(self.utils.grab_section(report,
|
||||||
|
self.OWASP_HEADER,
|
||||||
|
end=[self.WASC_HEADER],
|
||||||
|
pop_last=True),
|
||||||
|
columns=self.OWASP_HEADER)
|
||||||
|
dict_tracker['WASC_HEADER'] = pd.DataFrame(self.utils.grab_section(report,
|
||||||
|
self.WASC_HEADER, end=[['APPENDIX']],
|
||||||
|
pop_last=True),
|
||||||
|
columns=self.WASC_HEADER)
|
||||||
|
|
||||||
|
dict_tracker['SCAN_META'] = pd.DataFrame(self.utils.grab_section(report,
|
||||||
|
self.SCAN_META,
|
||||||
|
end=[self.CATEGORY_HEADER],
|
||||||
|
pop_last=True),
|
||||||
|
columns=self.SCAN_META)
|
||||||
|
|
||||||
|
dict_tracker['CATEGORY_HEADER'] = pd.DataFrame(self.utils.grab_section(report,
|
||||||
|
self.CATEGORY_HEADER),
|
||||||
|
columns=self.CATEGORY_HEADER)
|
||||||
|
all_dataframes.append(dict_tracker)
|
||||||
|
|
||||||
|
return all_dataframes
|
||||||
|
|
||||||
|
def data_normalizer(self, dataframes):
|
||||||
|
"""
|
||||||
|
Merge and clean data
|
||||||
|
:param dataframes:
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
|
df_dict = dataframes[0]
|
||||||
|
merged_df = pd.concat([df_dict['WEB_SCAN_VULN_BLOCK'], df_dict['WEB_SCAN_SENSITIVE_BLOCK'],
|
||||||
|
df_dict['WEB_SCAN_INFO_BLOCK']], axis=0,
|
||||||
|
ignore_index=False)
|
||||||
|
merged_df = pd.merge(merged_df, df_dict['QID_HEADER'], left_on='QID',
|
||||||
|
right_on='Id')
|
||||||
|
|
||||||
|
if 'Content' not in merged_df:
|
||||||
|
merged_df['Content'] = ''
|
||||||
|
|
||||||
|
columns_to_cleanse = ['Payload #1', 'Request Method #1', 'Request URL #1',
|
||||||
|
'Request Headers #1', 'Response #1', 'Evidence #1',
|
||||||
|
'Description', 'Impact', 'Solution', 'Url', 'Content']
|
||||||
|
|
||||||
|
for col in columns_to_cleanse:
|
||||||
|
merged_df[col] = merged_df[col].apply(self.utils.cleanser)
|
||||||
|
|
||||||
|
merged_df = merged_df.drop(['QID_y', 'QID_x'], axis=1)
|
||||||
|
merged_df = merged_df.rename(columns={'Id': 'QID'})
|
||||||
|
merged_df = merged_df.assign(**df_dict['SCAN_META'].to_dict(orient='records')[0])
|
||||||
|
|
||||||
|
merged_df = pd.merge(merged_df, df_dict['CATEGORY_HEADER'], how='left', left_on=['Category', 'Severity Level'],
|
||||||
|
right_on=['Category', 'Severity'], suffixes=('Severity', 'CatSev'))
|
||||||
|
|
||||||
|
merged_df = merged_df.replace('N/A', '').fillna('')
|
||||||
|
|
||||||
|
try:
|
||||||
|
merged_df = \
|
||||||
|
merged_df[~merged_df.Title.str.contains('Links Crawled|External Links Discovered'
|
||||||
|
)]
|
||||||
|
except Exception as e:
|
||||||
|
print(e)
|
||||||
|
return merged_df
|
||||||
|
|
||||||
|
def download_file(self, path='', file_id=None):
|
||||||
|
report = self.qw.download_report(file_id)
|
||||||
|
filename = path + str(file_id) + '.csv'
|
||||||
|
file_out = open(filename, 'w')
|
||||||
|
for line in report.splitlines():
|
||||||
|
file_out.write(line + '\n')
|
||||||
|
file_out.close()
|
||||||
|
print('[ACTION] - File written to %s' % filename)
|
||||||
|
return filename
|
||||||
|
|
||||||
|
def remove_file(self, filename):
|
||||||
|
os.remove(filename)
|
||||||
|
|
||||||
|
def process_data(self, path='', file_id=None, cleanup=True):
|
||||||
|
"""Downloads a file from qualys and normalizes it"""
|
||||||
|
|
||||||
|
download_file = self.download_file(path=path, file_id=file_id)
|
||||||
|
print('[ACTION] - Downloading file ID: %s' % file_id)
|
||||||
|
report_data = self.grab_sections(download_file)
|
||||||
|
merged_data = self.data_normalizer(report_data)
|
||||||
|
merged_data.sort_index(axis=1, inplace=True)
|
||||||
|
# TODO cleanup old data (delete)
|
||||||
|
|
||||||
|
return merged_data
|
||||||
|
|
||||||
|
def whisper_reports(self, report_id, updated_date, cleanup=False):
|
||||||
|
"""
|
||||||
|
report_id: App ID
|
||||||
|
updated_date: Last time scan was ran for app_id
|
||||||
|
"""
|
||||||
|
vuln_ready = None
|
||||||
|
try:
|
||||||
|
|
||||||
|
if 'Z' in updated_date:
|
||||||
|
updated_date = self.utils.iso_to_epoch(updated_date)
|
||||||
|
report_name = 'qualys_web_' + str(report_id) \
|
||||||
|
+ '_{last_updated}'.format(last_updated=updated_date) \
|
||||||
|
+ '.csv'
|
||||||
|
if os.path.isfile(report_name):
|
||||||
|
print('[ACTION] - File already exist! Skipping...')
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
print('[ACTION] - Generating report for %s' % report_id)
|
||||||
|
status = self.qw.create_report(report_id)
|
||||||
|
root = objectify.fromstring(status)
|
||||||
|
if root.responseCode == 'SUCCESS':
|
||||||
|
print('[INFO] - Successfully generated report for webapp: %s' \
|
||||||
|
% report_id)
|
||||||
|
generated_report_id = root.data.Report.id
|
||||||
|
print ('[INFO] - New Report ID: %s' \
|
||||||
|
% generated_report_id)
|
||||||
|
vuln_ready = self.process_data(generated_report_id)
|
||||||
|
|
||||||
|
vuln_ready.to_csv(report_name, index=False, header=True) # add when timestamp occured
|
||||||
|
print('[SUCCESS] - Report written to %s' \
|
||||||
|
% report_name)
|
||||||
|
if cleanup:
|
||||||
|
print('[ACTION] - Removing report %s from disk' \
|
||||||
|
% generated_report_id)
|
||||||
|
cleaning_up = \
|
||||||
|
self.qw.delete_report(generated_report_id)
|
||||||
|
self.remove_file(str(generated_report_id) + '.csv')
|
||||||
|
print('[ACTION] - Deleted report from Qualys Database: %s' \
|
||||||
|
% generated_report_id)
|
||||||
|
else:
|
||||||
|
print('Could not process report ID: %s' % status)
|
||||||
|
except Exception as e:
|
||||||
|
print('[ERROR] - Could not process %s - %s' % (report_id, e))
|
||||||
|
return vuln_ready
|
||||||
|
|
||||||
|
|
||||||
|
maxInt = int(4000000)
|
||||||
|
maxSize = sys.maxsize
|
||||||
|
|
||||||
|
if maxSize > maxInt and type(maxSize) == int:
|
||||||
|
maxInt = maxSize
|
||||||
|
|
||||||
|
decrement = True
|
||||||
|
|
||||||
|
while decrement:
|
||||||
|
decrement = False
|
||||||
|
try:
|
||||||
|
csv.field_size_limit(maxInt)
|
||||||
|
except OverflowError:
|
||||||
|
maxInt = int(maxInt/10)
|
||||||
|
decrement = True
|
114
vulnwhisp/frameworks/qualys_vuln.py
Normal file
@ -0,0 +1,114 @@
|
|||||||
|
#!/usr/bin/python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
__author__ = 'Nathan Young'
|
||||||
|
|
||||||
|
import xml.etree.ElementTree as ET
|
||||||
|
import pandas as pd
|
||||||
|
import qualysapi
|
||||||
|
import requests
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import dateutil.parser as dp
|
||||||
|
|
||||||
|
|
||||||
|
class qualysWhisperAPI(object):
|
||||||
|
SCANS = 'api/2.0/fo/scan'
|
||||||
|
|
||||||
|
def __init__(self, config=None):
|
||||||
|
self.config = config
|
||||||
|
try:
|
||||||
|
self.qgc = qualysapi.connect(config)
|
||||||
|
# Fail early if we can't make a request or auth is incorrect
|
||||||
|
self.qgc.request('about.php')
|
||||||
|
print('[SUCCESS] - Connected to Qualys at %s' % self.qgc.server)
|
||||||
|
except Exception as e:
|
||||||
|
print('[ERROR] Could not connect to Qualys - %s' % e)
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
def scan_xml_parser(self, xml):
|
||||||
|
all_records = []
|
||||||
|
root = ET.XML(xml)
|
||||||
|
for child in root.find('.//SCAN_LIST'):
|
||||||
|
all_records.append({
|
||||||
|
'name': child.find('TITLE').text,
|
||||||
|
'id': child.find('REF').text,
|
||||||
|
'date': child.find('LAUNCH_DATETIME').text,
|
||||||
|
'type': child.find('TYPE').text,
|
||||||
|
'duration': child.find('DURATION').text,
|
||||||
|
'status': child.find('.//STATE').text,
|
||||||
|
})
|
||||||
|
return pd.DataFrame(all_records)
|
||||||
|
|
||||||
|
def get_all_scans(self):
|
||||||
|
parameters = {
|
||||||
|
'action': 'list',
|
||||||
|
'echo_request': 0,
|
||||||
|
'show_op': 0,
|
||||||
|
'launched_after_datetime': '0001-01-01'
|
||||||
|
}
|
||||||
|
scans_xml = self.qgc.request(self.SCANS, parameters)
|
||||||
|
return self.scan_xml_parser(scans_xml)
|
||||||
|
|
||||||
|
def get_scan_details(self, scan_id=None):
|
||||||
|
parameters = {
|
||||||
|
'action': 'fetch',
|
||||||
|
'echo_request': 0,
|
||||||
|
'output_format': 'json_extended',
|
||||||
|
'mode': 'extended',
|
||||||
|
'scan_ref': scan_id
|
||||||
|
}
|
||||||
|
scan_json = self.qgc.request(self.SCANS, parameters)
|
||||||
|
|
||||||
|
# First two columns are metadata we already have
|
||||||
|
# Last column corresponds to "target_distribution_across_scanner_appliances" element
|
||||||
|
# which doesn't follow the schema and breaks the pandas data manipulation
|
||||||
|
return pd.read_json(scan_json).iloc[2:-1]
|
||||||
|
|
||||||
|
class qualysUtils:
|
||||||
|
def __init__(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def iso_to_epoch(self, dt):
|
||||||
|
return dp.parse(dt).strftime('%s')
|
||||||
|
|
||||||
|
|
||||||
|
class qualysVulnScan:
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
config=None,
|
||||||
|
file_in=None,
|
||||||
|
file_stream=False,
|
||||||
|
delimiter=',',
|
||||||
|
quotechar='"',
|
||||||
|
):
|
||||||
|
self.file_in = file_in
|
||||||
|
self.file_stream = file_stream
|
||||||
|
self.report = None
|
||||||
|
self.utils = qualysUtils()
|
||||||
|
|
||||||
|
if config:
|
||||||
|
try:
|
||||||
|
self.qw = qualysWhisperAPI(config=config)
|
||||||
|
except Exception as e:
|
||||||
|
print('Could not load config! Please check settings for %s' \
|
||||||
|
% e)
|
||||||
|
|
||||||
|
if file_stream:
|
||||||
|
self.open_file = file_in.splitlines()
|
||||||
|
elif file_in:
|
||||||
|
self.open_file = open(file_in, 'rb')
|
||||||
|
|
||||||
|
self.downloaded_file = None
|
||||||
|
|
||||||
|
def process_data(self, scan_id=None):
|
||||||
|
"""Downloads a file from Qualys and normalizes it"""
|
||||||
|
|
||||||
|
print('[ACTION] - Downloading scan ID: %s' % scan_id)
|
||||||
|
scan_report = self.qw.get_scan_details(scan_id=scan_id)
|
||||||
|
keep_columns = ['category', 'cve_id', 'cvss3_base', 'cvss3_temporal', 'cvss_base', 'cvss_temporal', 'dns', 'exploitability', 'fqdn', 'impact', 'ip', 'ip_status', 'netbios', 'os', 'pci_vuln', 'port', 'protocol', 'qid', 'results', 'severity', 'solution', 'ssl', 'threat', 'title', 'type', 'vendor_reference']
|
||||||
|
scan_report = scan_report.filter(keep_columns)
|
||||||
|
scan_report['severity'] = scan_report['severity'].astype(int).astype(str)
|
||||||
|
scan_report['qid'] = scan_report['qid'].astype(int).astype(str)
|
||||||
|
|
||||||
|
return scan_report
|
@ -12,5 +12,6 @@ class bcolors:
|
|||||||
UNDERLINE = '\033[4m'
|
UNDERLINE = '\033[4m'
|
||||||
|
|
||||||
INFO = '{info}[INFO]{endc}'.format(info=OKBLUE, endc=ENDC)
|
INFO = '{info}[INFO]{endc}'.format(info=OKBLUE, endc=ENDC)
|
||||||
|
ACTION = '{info}[ACTION]{endc}'.format(info=OKBLUE, endc=ENDC)
|
||||||
SUCCESS = '{green}[SUCCESS]{endc}'.format(green=OKGREEN, endc=ENDC)
|
SUCCESS = '{green}[SUCCESS]{endc}'.format(green=OKGREEN, endc=ENDC)
|
||||||
FAIL = '{red}[FAIL]{endc}'.format(red=FAIL, endc=ENDC)
|
FAIL = '{red}[FAIL]{endc}'.format(red=FAIL, endc=ENDC)
|
||||||
|