forked from DefectDojo/django-DefectDojo
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathparse_acunetix360_json.py
107 lines (100 loc) · 4.21 KB
/
parse_acunetix360_json.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
import json
import html2text
from cvss import parser as cvss_parser
from dateutil import parser
from dojo.models import Endpoint, Finding
class AcunetixJSONParser:
"""This parser is written for Acunetix JSON Findings."""
def get_findings(self, filename, test):
dupes = {}
data = json.load(filename)
dupes = {}
scan_date = parser.parse(data["Generated"], dayfirst=True)
text_maker = html2text.HTML2Text()
text_maker.body_width = 0
for item in data["Vulnerabilities"]:
title = item["Name"]
findingdetail = text_maker.handle(item.get("Description", ""))
if item["Classification"] is not None and "Cwe" in item["Classification"]:
try:
cwe = int(item["Classification"]["Cwe"].split(",")[0])
except BaseException:
cwe = None
else:
cwe = None
sev = item["Severity"]
if sev not in ["Info", "Low", "Medium", "High", "Critical"]:
sev = "Info"
if item["RemedialProcedure"] is not None:
mitigation = text_maker.handle(item.get("RemedialProcedure", ""))
else:
mitigation = None
if item["RemedyReferences"] is not None:
references = text_maker.handle(item.get("RemedyReferences", ""))
else:
references = None
if "LookupId" in item:
lookupId = item["LookupId"]
if references is None:
references = (
f"https://online.acunetix360.com/issues/detail/{lookupId}\n"
)
else:
references = (
f"https://online.acunetix360.com/issues/detail/{lookupId}\n"
+ references
)
url = item["Url"]
if item["Impact"] is not None:
impact = text_maker.handle(item.get("Impact", ""))
else:
impact = None
dupe_key = title
request = item["HttpRequest"]["Content"]
if request is None or len(request) <= 0:
request = "Request Not Found"
response = item["HttpResponse"]["Content"]
if response is None or len(response) <= 0:
response = "Response Not Found"
finding = Finding(
title=title,
test=test,
description=findingdetail,
severity=sev.title(),
mitigation=mitigation,
impact=impact,
date=scan_date,
references=references,
cwe=cwe,
static_finding=True,
)
if (
(item["Classification"] is not None)
and (item["Classification"]["Cvss"] is not None)
and (item["Classification"]["Cvss"]["Vector"] is not None)
):
cvss_objects = cvss_parser.parse_cvss_from_text(
item["Classification"]["Cvss"]["Vector"],
)
if len(cvss_objects) > 0:
finding.cvssv3 = cvss_objects[0].clean_vector()
if item["State"] is not None:
state = [x.strip() for x in item["State"].split(",")]
if "AcceptedRisk" in state:
finding.risk_accepted = True
finding.active = False
elif "FalsePositive" in state:
finding.false_p = True
finding.active = False
finding.unsaved_req_resp = [{"req": request, "resp": response}]
finding.unsaved_endpoints = [Endpoint.from_uri(url)]
if item.get("FirstSeenDate"):
parseddate = parser.parse(item["FirstSeenDate"], dayfirst=True)
finding.date = parseddate
if dupe_key in dupes:
find = dupes[dupe_key]
find.unsaved_req_resp.extend(finding.unsaved_req_resp)
find.unsaved_endpoints.extend(finding.unsaved_endpoints)
else:
dupes[dupe_key] = finding
return list(dupes.values())