add unique document id

This commit is contained in:
pemontto
2019-05-01 17:51:46 +01:00
parent ea864d09ac
commit 5b6a51f02c
7 changed files with 81 additions and 20 deletions

View File

@ -43,14 +43,29 @@ filter {
convert => { "risk_number" => "integer"}
convert => { "total_times_detected" => "integer"}
}
if [_unique] {
# Set document ID from _unique
mutate {
rename => { "_unique" => "[@metadata][id]" }
}
}
}
}
output {
if "nessus" in [tags] or "tenable" in [tags]{
elasticsearch {
hosts => [ "elasticsearch:9200" ]
index => "logstash-vulnwhisperer-%{+YYYY.MM}"
if [@metadata][id] {
elasticsearch {
hosts => [ "elasticsearch:9200" ]
index => "logstash-vulnwhisperer-%{+YYYY.MM}"
document_id => "%{[@metadata][id]}"
}
} else {
elasticsearch {
hosts => [ "elasticsearch:9200" ]
index => "logstash-vulnwhisperer-%{+YYYY.MM}"
}
}
}
}

View File

@ -79,13 +79,27 @@ filter {
# add_tag => [ "critical_asset" ]
# }
# }
if [_unique] {
# Set document ID from _unique
mutate {
rename => { "_unique" => "[@metadata][id]" }
}
}
}
}
output {
if "qualys_vuln" in [tags] or "qualys_web" in [tags] {
elasticsearch {
hosts => [ "elasticsearch:9200" ]
index => "logstash-vulnwhisperer-%{+YYYY.MM}"
if [@metadata][id] {
elasticsearch {
hosts => [ "elasticsearch:9200" ]
index => "logstash-vulnwhisperer-%{+YYYY.MM}"
document_id => "%{[@metadata][id]}"
}
} else {
elasticsearch {
hosts => [ "elasticsearch:9200" ]
index => "logstash-vulnwhisperer-%{+YYYY.MM}"
}
}
}
}

View File

@ -100,18 +100,32 @@ filter {
}
# Add your critical assets by subnet or by hostname. Comment this field out if you don't want to tag any, but the asset panel will break.
if [asset] =~ "^10\.0\.100\." {
# if [asset] =~ "^10\.0\.100\." {
# mutate {
# add_tag => [ "critical_asset" ]
# }
# }
if [_unique] {
# Set document ID from _unique
mutate {
add_tag => [ "critical_asset" ]
rename => { "_unique" => "[@metadata][id]" }
}
}
}
}
output {
if "openvas" in [tags] {
elasticsearch {
hosts => [ "elasticsearch:9200" ]
index => "logstash-vulnwhisperer-%{+YYYY.MM}"
if [@metadata][id] {
elasticsearch {
hosts => [ "elasticsearch:9200" ]
index => "logstash-vulnwhisperer-%{+YYYY.MM}"
document_id => "%{[@metadata][id]}"
}
} else {
elasticsearch {
hosts => [ "elasticsearch:9200" ]
index => "logstash-vulnwhisperer-%{+YYYY.MM}"
}
}
}
}

View File

@ -110,6 +110,7 @@ class OpenVAS_API(object):
]
token = requests.post(self.base + self.OMP, data=data, verify=False)
return token
def get_report_formats(self):
params = (
('cmd', 'get_report_formats'),

View File

@ -169,6 +169,8 @@ class qualysVulnScan:
df['cvss_temporal_vector'] = df['cvss_temporal'].str.extract('\((.*)\)', expand=False)
df['cvss_temporal'] = df['cvss_temporal'].str.extract('^(\d+(?:\.\d+)?)', expand=False)
# Set asset to ip
df['asset'] = df['ip']
# Convert Qualys severity to standardised risk number
df['risk_number'] = df['severity'].astype(int)-1

View File

@ -527,5 +527,8 @@ class qualysScanReport:
df['dns'] = df['url'].str.extract('https?://([^/]+)', expand=False)
df.loc[df['uri'] != '','dns'] = df.loc[df['uri'] != '','uri'].str.extract('https?://([^/]+)', expand=False)
# Set asset to dns
df['asset'] = df['dns']
df.fillna('', inplace=True)
return df

View File

@ -275,7 +275,7 @@ class vulnWhispererBase(object):
if cvss_version in df:
self.logger.info('Normalising {} severity'.format(cvss_version))
# Map CVSS to severity name
df.loc[df[cvss_version] == '', cvss_version] = None
df.loc[df[cvss_version].astype(str) == '', cvss_version] = None
df[cvss_version] = df[cvss_version].astype('float')
# df.loc[df[cvss_version].isnull(), cvss_version + '_severity'] = 'info'
df.loc[df[cvss_version] == 0, cvss_version + '_severity'] = 'info'
@ -284,6 +284,13 @@ class vulnWhispererBase(object):
df.loc[(df[cvss_version] >= 6) & (df[cvss_version] < 9), cvss_version + '_severity'] = 'high'
df.loc[(df[cvss_version] > 9) & (df[cvss_version].notnull()), cvss_version + '_severity'] = 'critical'
self.logger.info('Creating Unique Document ID')
df['_unique'] = df.index.values
if 'history_id' in df:
df['_unique'] = df[['scan_id', 'history_id', '_unique']].apply(lambda x: '_'.join(x.astype(str)), axis=1)
else:
df['_unique'] = df[['scan_id', '_unique']].apply(lambda x: '_'.join(x.astype(str)), axis=1)
# Rename cvss to cvss2
# Make cvss with no suffix == cvss3 else cvss2
# cvss = cvss3 if cvss3 else cvss2
@ -510,7 +517,6 @@ class vulnWhispererNessus(vulnWhispererBase):
# Map and transform fields
vuln_ready = self.nessus.normalise(vuln_ready)
vuln_ready = self.common_normalise(vuln_ready)
# Set common fields
vuln_ready['history_id'] = history_id
@ -519,6 +525,8 @@ class vulnWhispererNessus(vulnWhispererBase):
vuln_ready['scan_source'] = self.CONFIG_SECTION
vuln_ready['scan_time'] = norm_time
vuln_ready = self.common_normalise(vuln_ready)
vuln_ready.to_json(relative_path_name + '.tmp', orient='records', lines=True)
os.rename(relative_path_name + '.tmp', relative_path_name)
self.logger.info('{records} records written to {path} '.format(records=vuln_ready.shape[0],
@ -621,7 +629,6 @@ class vulnWhispererQualys(vulnWhispererBase):
vuln_ready = self.qualys_scan.process_data(path=self.write_path, file_id=str(generated_report_id))
# Map and transform fields
vuln_ready = self.qualys_scan.normalise(vuln_ready)
vuln_ready = self.common_normalise(vuln_ready)
# Set common fields
vuln_ready['app_id'] = report_id
@ -630,6 +637,8 @@ class vulnWhispererQualys(vulnWhispererBase):
vuln_ready['scan_source'] = self.CONFIG_SECTION
vuln_ready['scan_time'] = launched_date
vuln_ready = self.common_normalise(vuln_ready)
if output_format == 'json':
vuln_ready.to_json(relative_path_name + '.tmp', orient='records', lines=True)
elif output_format == 'csv':
@ -699,6 +708,7 @@ class vulnWhispererOpenVAS(vulnWhispererBase):
'Hostname': 'hostname',
'Port': 'port',
'Port Protocol': 'protocol',
'CVEs': 'cve',
'CVSS': 'cvss',
'Severity': 'severity',
'Solution Type': 'category',
@ -782,11 +792,10 @@ class vulnWhispererOpenVAS(vulnWhispererBase):
vuln_ready = self.openvas_api.process_report(report_id=report_id)
# Map and transform fields
vuln_ready = self.openvas_api.normalise(vuln_ready)
vuln_ready = self.common_normalise(vuln_ready)
# TODO move the following to the openvas_api.transform_values
vuln_ready.rename(columns=self.COLUMN_MAPPING, inplace=True)
vuln_ready.port = vuln_ready.port.fillna(0).astype(int)
vuln_ready.fillna('', inplace=True)
vuln_ready.port = vuln_ready.port.replace('', 0).astype(int)
# Set common fields
vuln_ready['scan_name'] = scan_name.encode('utf8')
@ -794,6 +803,8 @@ class vulnWhispererOpenVAS(vulnWhispererBase):
vuln_ready['scan_time'] = launched_date
vuln_ready['scan_source'] = self.CONFIG_SECTION
vuln_ready = self.common_normalise(vuln_ready)
vuln_ready.to_json(relative_path_name + '.tmp', orient='records', lines=True)
os.rename(relative_path_name + '.tmp', relative_path_name)
self.logger.info('{records} records written to {path} '.format(records=vuln_ready.shape[0],
@ -904,7 +915,6 @@ class vulnWhispererQualysVuln(vulnWhispererBase):
vuln_ready = self.qualys_scan.process_data(scan_id=report_id)
# Map and transform fields
vuln_ready = self.qualys_scan.normalise(vuln_ready)
vuln_ready = self.common_normalise(vuln_ready)
# Set common fields
vuln_ready['scan_name'] = scan_name.encode('utf8')
@ -912,6 +922,8 @@ class vulnWhispererQualysVuln(vulnWhispererBase):
vuln_ready['scan_time'] = launched_date
vuln_ready['scan_source'] = self.CONFIG_SECTION
vuln_ready = self.common_normalise(vuln_ready)
except Exception as e:
self.logger.error('Could not process {}: {}'.format(report_id, str(e)))
self.exit_code += 1