Skip to content

Commit

Permalink
Merge pull request #11848 from DefectDojo/release/2.43.3
Browse files Browse the repository at this point in the history
Release: Merge release into master from: release/2.43.3
  • Loading branch information
rossops authored Feb 18, 2025
2 parents 31f0be8 + ba42d83 commit 73af855
Show file tree
Hide file tree
Showing 15 changed files with 752 additions and 72 deletions.
2 changes: 1 addition & 1 deletion components/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "defectdojo",
"version": "2.43.2",
"version": "2.43.3",
"license" : "BSD-3-Clause",
"private": true,
"dependencies": {
Expand Down
2 changes: 1 addition & 1 deletion dojo/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,6 @@
# Django starts so that shared_task will use this app.
from .celery import app as celery_app # noqa: F401

__version__ = "2.43.2"
__version__ = "2.43.3"
__url__ = "https://github.com/DefectDojo/django-DefectDojo"
__docs__ = "https://documentation.defectdojo.com"
53 changes: 26 additions & 27 deletions dojo/api_v2/serializers.py
Original file line number Diff line number Diff line change
Expand Up @@ -167,7 +167,7 @@ class ImportStatisticsSerializer(serializers.Serializer):
)
delta = DeltaStatisticsSerializer(
required=False,
help_text="Finding statistics of modifications made by the reimport. Only available when TRACK_IMPORT_HISTORY hass not disabled.",
help_text="Finding statistics of modifications made by the reimport. Only available when TRACK_IMPORT_HISTORY has not been disabled.",
)
after = SeverityStatusStatisticsSerializer(
help_text="Finding statistics as stored in Defect Dojo after the import",
Expand Down Expand Up @@ -1920,43 +1920,42 @@ class Meta:

# Overriding this to push add Push to JIRA functionality
def create(self, validated_data):
# remove tags from validated data and store them seperately
# Pop off of some fields that should not be sent to the model at this time
to_be_tagged, validated_data = self._pop_tags(validated_data)

# pop push_to_jira so it won't get send to the model as a field
push_to_jira = validated_data.pop("push_to_jira")

# Save vulnerability ids and pop them
if "vulnerability_id_set" in validated_data:
vulnerability_id_set = validated_data.pop("vulnerability_id_set")
else:
vulnerability_id_set = None

# first save, so we have an instance to get push_all_to_jira from
new_finding = super(TaggitSerializer, self).create(validated_data)

if vulnerability_id_set:
vulnerability_ids = []
for vulnerability_id in vulnerability_id_set:
vulnerability_ids.append(vulnerability_id["vulnerability_id"])
validated_data["cve"] = vulnerability_ids[0]
save_vulnerability_ids(new_finding, vulnerability_ids)
new_finding.save()

push_to_jira = validated_data.pop("push_to_jira", False)
notes = validated_data.pop("notes", None)
found_by = validated_data.pop("found_by", None)
reviewers = validated_data.pop("reviewers", None)
# Process the vulnerability IDs specially
parsed_vulnerability_ids = []
if (vulnerability_ids := validated_data.pop("vulnerability_id_set", None)):
for vulnerability_id in vulnerability_ids:
parsed_vulnerability_ids.append(vulnerability_id["vulnerability_id"])
validated_data["cve"] = parsed_vulnerability_ids[0]
# Create a findings in memory so that we have access to unsaved_vulnerability_ids
new_finding = Finding(**validated_data)
new_finding.unsaved_vulnerability_ids = parsed_vulnerability_ids
new_finding.save()
# Deal with all of the many to many things
if notes:
new_finding.notes.set(notes)
if found_by:
new_finding.found_by.set(found_by)
if reviewers:
new_finding.reviewers.set(reviewers)
if parsed_vulnerability_ids:
save_vulnerability_ids(new_finding, parsed_vulnerability_ids)
# TODO: JIRA can we remove this is_push_all_issues, already checked in
# apiv2 viewset?
push_to_jira = push_to_jira or jira_helper.is_push_all_issues(
new_finding,
)

# If we need to push to JIRA, an extra save call is needed.
# TODO: try to combine create and save, but for now I'm just fixing a
# bug and don't want to change to much
if push_to_jira or new_finding:
new_finding.save(push_to_jira=push_to_jira)

# not sure why we are returning a tag_object, but don't want to change
# too much now as we're just fixing a bug
# This final call will save the finding again and return it
return self._save_tags(new_finding, to_be_tagged)

def validate(self, data):
Expand Down
63 changes: 49 additions & 14 deletions dojo/importers/base_importer.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
from django.core.exceptions import ValidationError
from django.core.files.base import ContentFile
from django.core.files.uploadedfile import TemporaryUploadedFile
from django.db import IntegrityError
from django.urls import reverse
from django.utils.timezone import make_aware

Expand Down Expand Up @@ -262,6 +263,13 @@ def determine_process_method(
**kwargs,
)

def determine_deduplication_algorithm(self) -> str:
"""
Determines what dedupe algorithm to use for the Test being processed.
:return: A string representing the dedupe algorithm to use.
"""
return self.test.deduplication_algorithm

def update_test_meta(self):
"""
Update the test with some values stored in the kwargs dict. The common
Expand Down Expand Up @@ -357,53 +365,80 @@ def update_import_history(
commit_hash=self.commit_hash,
type=self.import_type,
)
# Define all of the respective import finding actions for the test import object
test_import_finding_action_list = []

# Create a history record for each finding
for finding in closed_findings:
logger.debug(f"preparing Test_Import_Finding_Action for closed finding: {finding.id}")
test_import_finding_action_list.append(Test_Import_Finding_Action(
self.create_import_history_record_safe(Test_Import_Finding_Action(
test_import=test_import,
finding=finding,
action=IMPORT_CLOSED_FINDING,
))
for finding in new_findings:
logger.debug(f"preparing Test_Import_Finding_Action for created finding: {finding.id}")
test_import_finding_action_list.append(Test_Import_Finding_Action(
self.create_import_history_record_safe(Test_Import_Finding_Action(
test_import=test_import,
finding=finding,
action=IMPORT_CREATED_FINDING,
))
for finding in reactivated_findings:
logger.debug(f"preparing Test_Import_Finding_Action for reactivated finding: {finding.id}")
test_import_finding_action_list.append(Test_Import_Finding_Action(
self.create_import_history_record_safe(Test_Import_Finding_Action(
test_import=test_import,
finding=finding,
action=IMPORT_REACTIVATED_FINDING,
))
for finding in untouched_findings:
logger.debug(f"preparing Test_Import_Finding_Action for untouched finding: {finding.id}")
test_import_finding_action_list.append(Test_Import_Finding_Action(
self.create_import_history_record_safe(Test_Import_Finding_Action(
test_import=test_import,
finding=finding,
action=IMPORT_UNTOUCHED_FINDING,
))
# Bulk create all the defined objects
Test_Import_Finding_Action.objects.bulk_create(test_import_finding_action_list)

# Add any tags to the findings imported if necessary
if self.apply_tags_to_findings and self.tags:
for finding in test_import.findings_affected.all():
for tag in self.tags:
finding.tags.add(tag)
self.add_tags_safe(finding, tag)
# Add any tags to any endpoints of the findings imported if necessary
if self.apply_tags_to_endpoints and self.tags:
for finding in test_import.findings_affected.all():
for endpoint in finding.endpoints.all():
for tag in self.tags:
endpoint.tags.add(tag)
self.add_tags_safe(endpoint, tag)

return test_import

def create_import_history_record_safe(
self,
test_import_finding_action,
):
"""Creates an import history record, while catching any IntegrityErrors that might happen because of the background job having deleted a finding"""
logger.debug(f"creating Test_Import_Finding_Action for finding: {test_import_finding_action.finding.id} action: {test_import_finding_action.action}")
try:
test_import_finding_action.save()
except IntegrityError as e:
# This try catch makes us look we don't know what we're doing, but in https://github.com/DefectDojo/django-DefectDojo/issues/6217 we decided that for now this is the best solution
logger.warning("Error creating Test_Import_Finding_Action: %s", e)
logger.debug("Error creating Test_Import_Finding_Action, finding marked as duplicate and deleted ?")

def add_tags_safe(
self,
finding_or_endpoint,
tag,
):
"""Adds tags to a finding or endpoint, while catching any IntegrityErrors that might happen because of the background job having deleted a finding"""
if not isinstance(finding_or_endpoint, Finding) and not isinstance(finding_or_endpoint, Endpoint):
msg = "finding_or_endpoint must be a Finding or Endpoint object"
raise TypeError(msg)

msg = "finding" if isinstance(finding_or_endpoint, Finding) else "endpoint" if isinstance(finding_or_endpoint, Endpoint) else "unknown"
logger.debug(f" adding tag: {tag} to " + msg + f"{finding_or_endpoint.id}")

try:
finding_or_endpoint.tags.add(tag)
except IntegrityError as e:
# This try catch makes us look we don't know what we're doing, but in https://github.com/DefectDojo/django-DefectDojo/issues/6217 we decided that for now this is the best solution
logger.warning("Error adding tag: %s", e)
logger.debug("Error adding tag, finding marked as duplicate and deleted ?")

def construct_imported_message(
self,
finding_count: int = 0,
Expand Down
51 changes: 33 additions & 18 deletions dojo/importers/default_importer.py
Original file line number Diff line number Diff line change
Expand Up @@ -254,29 +254,44 @@ def close_old_findings(
# First check if close old findings is desired
if not self.close_old_findings_toggle:
return []
logger.debug("REIMPORT_SCAN: Closing findings no longer present in scan report")
# Close old active findings that are not reported by this scan.
# Refactoring this to only call test.finding_set.values() once.
findings = findings.values()
mitigated_hash_codes = []

logger.debug("IMPORT_SCAN: Closing findings no longer present in scan report")
# Remove all the findings that are coming from the report already mitigated
new_hash_codes = []
for finding in findings:
new_hash_codes.append(finding["hash_code"])
if finding.get("is_mitigated", None):
mitigated_hash_codes.append(finding["hash_code"])
for hash_code in new_hash_codes:
if hash_code == finding["hash_code"]:
new_hash_codes.remove(hash_code)
new_unique_ids_from_tool = []
for finding in findings.values():
# Do not process closed findings in the report
if finding.get("is_mitigated", False):
continue
# Grab the hash code
if (hash_code := finding.get("hash_code")) is not None:
new_hash_codes.append(hash_code)
if (unique_id_from_tool := finding.get("unique_id_from_tool")) is not None:
new_unique_ids_from_tool.append(unique_id_from_tool)
# Get the initial filtered list of old findings to be closed without
# considering the scope of the product or engagement
old_findings = Finding.objects.exclude(
test=self.test,
).exclude(
hash_code__in=new_hash_codes,
).filter(
old_findings = Finding.objects.filter(
test__test_type=self.test.test_type,
active=True,
)
).exclude(test=self.test)
# Filter further based on the deduplication algorithm set on the test
self.deduplication_algorithm = self.determine_deduplication_algorithm()
if self.deduplication_algorithm in ["hash_code", "legacy"]:
old_findings = old_findings.exclude(
hash_code__in=new_hash_codes,
)
if self.deduplication_algorithm == "unique_id_from_tool":
old_findings = old_findings.exclude(
unique_id_from_tool__in=new_unique_ids_from_tool,
)
if self.deduplication_algorithm == "unique_id_from_tool_or_hash_code":
old_findings = old_findings.exclude(
(Q(hash_code__isnull=False) & Q(hash_code__in=new_hash_codes))
| (
Q(unique_id_from_tool__isnull=False)
& Q(unique_id_from_tool__in=new_unique_ids_from_tool)
),
)
# Accommodate for product scope or engagement scope
if self.close_old_findings_product_scope:
old_findings = old_findings.filter(test__engagement__product=self.test.engagement.product)
Expand Down
7 changes: 0 additions & 7 deletions dojo/importers/default_reimporter.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,13 +147,6 @@ def process_scan(
test_import_history,
)

def determine_deduplication_algorithm(self) -> str:
"""
Determines what dedupe algorithm to use for the Test being processed.
:return: A string representing the dedupe algorithm to use.
"""
return self.test.deduplication_algorithm

def process_findings(
self,
parsed_findings: list[Finding],
Expand Down
1 change: 1 addition & 0 deletions dojo/product/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -1381,6 +1381,7 @@ def process_finding_form(self, request: HttpRequest, test: Test, context: dict):
finding.reporter = request.user
finding.numerical_severity = Finding.get_numerical_severity(finding.severity)
finding.tags = context["form"].cleaned_data["tags"]
finding.unsaved_vulnerability_ids = context["form"].cleaned_data["vulnerability_ids"].split()
finding.save()
# Save and add new endpoints
finding_helper.add_endpoints(finding, context["form"])
Expand Down
2 changes: 1 addition & 1 deletion dojo/templates/base.html
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@
<div class="custom-search-form">
<form id="custom_search_form" role="search" method="get" action="{% url 'simple_search' %}">
<div class="input-group">
<input id="simple_search" label="simple_search" aria_label="simple_search" type="text" name="query" class="form-control"
<input id="simple_search" label="simple_search" aria-label="simple_search" type="text" name="query" class="form-control"
placeholder="{% trans "Search" %}..." value="{{clean_query}}">
<span class="input-group-btn">
<button id="simple_search_submit" class="btn btn-primary" type="submit" aria-label="Search">
Expand Down
1 change: 1 addition & 0 deletions dojo/test/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -538,6 +538,7 @@ def process_finding_form(self, request: HttpRequest, test: Test, context: dict):
finding.reporter = request.user
finding.numerical_severity = Finding.get_numerical_severity(finding.severity)
finding.tags = context["form"].cleaned_data["tags"]
finding.unsaved_vulnerability_ids = context["form"].cleaned_data["vulnerability_ids"].split()
finding.save()
# Save and add new endpoints
finding_helper.add_endpoints(finding, context["form"])
Expand Down
4 changes: 2 additions & 2 deletions helm/defectdojo/Chart.yaml
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
apiVersion: v2
appVersion: "2.43.2"
appVersion: "2.43.3"
description: A Helm chart for Kubernetes to install DefectDojo
name: defectdojo
version: 1.6.173
version: 1.6.174
icon: https://www.defectdojo.org/img/favicon.ico
maintainers:
- name: madchap
Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ Markdown==3.7
openpyxl==3.1.5
Pillow==11.1.0 # required by django-imagekit
psycopg[c]==3.2.4
cryptography==44.0.0
cryptography==44.0.1
python-dateutil==2.9.0.post0
pytz==2025.1
redis==5.2.1
Expand Down
Loading

0 comments on commit 73af855

Please sign in to comment.