Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
47 commits
Select commit Hold shift + click to select a range
ccf963f
wip
dogboat Apr 13, 2026
695877a
comment
dogboat Apr 14, 2026
1b8b026
simplify
dogboat Apr 14, 2026
23c1839
tests
dogboat Apr 14, 2026
479d0de
linter fixes
dogboat Apr 14, 2026
70c8449
comments
dogboat Apr 14, 2026
b193222
updates
dogboat Apr 14, 2026
cc5cf13
remove celery stuff
dogboat Apr 14, 2026
10748a9
refactor
dogboat Apr 14, 2026
8acea58
test updates
dogboat Apr 15, 2026
3dce1c0
linter
dogboat Apr 15, 2026
2bc4192
perf test updates
dogboat Apr 15, 2026
4e8f53f
wip
dogboat Apr 15, 2026
e7d912c
testing
dogboat Apr 15, 2026
f8d4bc1
wip
dogboat Apr 16, 2026
22bba91
perf test updates
dogboat Apr 16, 2026
933c259
wip rename
dogboat Apr 16, 2026
3920066
wip
dogboat Apr 16, 2026
e5d3213
cleanup
dogboat Apr 16, 2026
ce8f9eb
comments
dogboat Apr 16, 2026
8647d36
rename
dogboat Apr 16, 2026
37bbfb4
lint
dogboat Apr 16, 2026
164d395
clean on endpoints
dogboat Apr 16, 2026
8ec5559
remove cleaning until later
dogboat Apr 16, 2026
e5cb359
test updates
dogboat Apr 16, 2026
9f0bb2e
cache cleaned locations for tests
dogboat Apr 16, 2026
e208934
remove unnecessary guard
dogboat Apr 17, 2026
77261be
move clean call mmm
dogboat Apr 17, 2026
5d35fa4
consolidate
dogboat Apr 17, 2026
a3e950d
restore clean mmm
dogboat Apr 17, 2026
8fb9520
fix
dogboat Apr 17, 2026
5f9b22c
add test
dogboat Apr 17, 2026
6ea09f3
linter
dogboat Apr 17, 2026
49074e8
refactor
dogboat Apr 17, 2026
ea2210a
fixup
dogboat Apr 17, 2026
34ffb29
refactor
dogboat Apr 17, 2026
7a9b72a
cleanup
dogboat Apr 17, 2026
dcb291d
coments/cleanup
dogboat Apr 17, 2026
94a83b5
product ref statuses
dogboat Apr 17, 2026
77ed8a9
persist in txn
dogboat Apr 17, 2026
221a525
refactor
dogboat Apr 17, 2026
e24b4b6
perf test updates
dogboat Apr 18, 2026
efc5625
comments
dogboat Apr 21, 2026
9dc2728
linter
dogboat Apr 21, 2026
97e60aa
comments
dogboat Apr 21, 2026
4e4009c
comment
dogboat Apr 21, 2026
5af3da9
fixup
dogboat Apr 21, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
# Generated by Django 5.2.12 on 2026-04-10 14:24

import django.core.validators
from django.db import migrations, models


class Migration(migrations.Migration):

dependencies = [
('dojo', '0263_language_type_unique_language'),
]

operations = [
migrations.AlterField(
model_name='url',
name='identity_hash',
field=models.CharField(db_index=True, editable=False, help_text='The hash of the location for uniqueness', max_length=64, unique=True, validators=[django.core.validators.MinLengthValidator(64)]),
),
migrations.AlterField(
model_name='urlevent',
name='identity_hash',
field=models.CharField(editable=False, help_text='The hash of the location for uniqueness', max_length=64, validators=[django.core.validators.MinLengthValidator(64)]),
),
]
101 changes: 13 additions & 88 deletions dojo/importers/base_importer.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,6 @@

import dojo.finding.helper as finding_helper
import dojo.risk_acceptance.helper as ra_helper
from dojo.celery_dispatch import dojo_dispatch_task
from dojo.importers.location_manager import LocationManager, UnsavedLocation
from dojo.importers.options import ImporterOptions
from dojo.jira_link.helper import is_keep_in_sync_with_jira
from dojo.location.models import Location
Expand Down Expand Up @@ -80,8 +78,6 @@ def __init__(
and will raise a `NotImplemented` exception
"""
ImporterOptions.__init__(self, *args, **kwargs)
if settings.V3_FEATURE_LOCATIONS:
self.location_manager = LocationManager()

def check_child_implementation_exception(self):
"""
Expand Down Expand Up @@ -391,36 +387,20 @@ def apply_import_tags(
for tag in self.tags:
self.add_tags_safe(finding, tag)

if settings.V3_FEATURE_LOCATIONS:
# Add any tags to any locations of the findings imported if necessary
if self.apply_tags_to_endpoints and self.tags:
# Collect all endpoints linked to the affected findings
locations_qs = Location.objects.filter(findings__finding__in=findings_to_tag).distinct()
try:
bulk_add_tags_to_instances(
tag_or_tags=self.tags,
instances=locations_qs,
tag_field_name="tags",
)
except IntegrityError:
for finding in findings_to_tag:
for location in finding.locations.all():
for tag in self.tags:
self.add_tags_safe(location.location, tag)
# Add any tags to any endpoints of the findings imported if necessary
elif self.apply_tags_to_endpoints and self.tags:
endpoints_qs = Endpoint.objects.filter(finding__in=findings_to_tag).distinct()
# Add any tags to any locations/endpoints of the findings imported if necessary
if self.apply_tags_to_endpoints and self.tags:
locations_qs = self.location_handler.get_locations_for_tagging(findings_to_tag)
try:
bulk_add_tags_to_instances(
tag_or_tags=self.tags,
instances=endpoints_qs,
instances=locations_qs,
tag_field_name="tags",
)
except IntegrityError:
for finding in findings_to_tag:
for endpoint in finding.endpoints.all():
for location in self.location_handler.get_location_tag_fallback(finding):
for tag in self.tags:
self.add_tags_safe(endpoint, tag)
self.add_tags_safe(location, tag)

def update_import_history(
self,
Expand Down Expand Up @@ -467,14 +447,8 @@ def update_import_history(
import_settings["apply_tags_to_endpoints"] = self.apply_tags_to_endpoints
import_settings["group_by"] = self.group_by
import_settings["create_finding_groups_for_all_findings"] = self.create_finding_groups_for_all_findings
if settings.V3_FEATURE_LOCATIONS:
# Add the list of locations that were added exclusively at import time
if len(self.endpoints_to_add) > 0:
import_settings["locations"] = [str(location) for location in self.endpoints_to_add]
# TODO: Delete this after the move to Locations
# Add the list of endpoints that were added exclusively at import time
elif len(self.endpoints_to_add) > 0:
import_settings["endpoints"] = [str(endpoint) for endpoint in self.endpoints_to_add]
if len(self.endpoints_to_add) > 0:
import_settings.update(self.location_handler.serialize_extra_locations(self.endpoints_to_add))
# Create the test import object
test_import = Test_Import.objects.create(
test=self.test,
Expand Down Expand Up @@ -796,50 +770,13 @@ def process_request_response_pairs(
def process_locations(
self,
finding: Finding,
locations_to_add: list[UnsavedLocation],
extra_locations_to_add: list | None = None,
) -> None:
"""
Process any locations to add to the finding. Locations could come from two places
- Directly from the report
- Supplied by the user from the import form
These locations will be processed in to Location objects and associated with the
finding and product
"""
# Save the unsaved locations
self.location_manager.chunk_locations_and_disperse(finding, finding.unsaved_locations)
# Check for any that were added in the form
if len(locations_to_add) > 0:
logger.debug("locations_to_add: %s", locations_to_add)
self.location_manager.chunk_locations_and_disperse(finding, locations_to_add)

# TODO: Delete this after the move to Locations
def process_endpoints(
self,
finding: Finding,
endpoints_to_add: list[Endpoint],
) -> None:
Record locations/endpoints from the finding + any form-added extras.
Flushed to DB by location_handler.persist().
"""
Process any endpoints to add to the finding. Endpoints could come from two places
- Directly from the report
- Supplied by the user from the import form
These endpoints will be processed in to endpoints objects and associated with the
finding and and product
"""
if settings.V3_FEATURE_LOCATIONS:
msg = "BaseImporter#process_endpoints() method is deprecated when V3_FEATURE_LOCATIONS is enabled"
raise NotImplementedError(msg)

# Clean and record unsaved endpoints from the report
self.endpoint_manager.clean_unsaved_endpoints(finding.unsaved_endpoints)
for endpoint in finding.unsaved_endpoints:
key = self.endpoint_manager.record_endpoint(endpoint)
self.endpoint_manager.record_status_for_create(finding, key)
# Record any endpoints added from the form
if len(endpoints_to_add) > 0:
logger.debug("endpoints_to_add: %s", endpoints_to_add)
for endpoint in endpoints_to_add:
key = self.endpoint_manager.record_endpoint(endpoint)
self.endpoint_manager.record_status_for_create(finding, key)
self.location_handler.record_for_finding(finding, extra_locations_to_add)

def sanitize_vulnerability_ids(self, finding) -> None:
"""Remove undisired vulnerability id values"""
Expand Down Expand Up @@ -932,19 +869,7 @@ def mitigate_finding(
# Remove risk acceptance if present (vulnerability is now fixed)
# risk_unaccept will check if finding.risk_accepted is True before proceeding
ra_helper.risk_unaccept(self.user, finding, perform_save=False, post_comments=False)
if settings.V3_FEATURE_LOCATIONS:
# Mitigate the location statuses
dojo_dispatch_task(
LocationManager.mitigate_location_status,
finding.locations.all(),
self.user,
kwuser=self.user,
sync=True,
)
else:
# TODO: Delete this after the move to Locations
# Accumulate endpoint statuses for bulk mitigate in persist()
self.endpoint_manager.record_statuses_to_mitigate(finding.status_finding.all())
self.location_handler.record_mitigations_for_finding(finding, self.user)
# to avoid pushing a finding group multiple times, we push those outside of the loop
if finding_groups_enabled and finding.finding_group:
# don't try to dedupe findings that we are closing
Expand Down
130 changes: 130 additions & 0 deletions dojo/importers/base_location_manager.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,130 @@
"""
Base class and handler for location/endpoint managers in the import pipeline.

BaseLocationManager defines the contract that both LocationManager (V3) and
EndpointManager (legacy) must implement. LocationHandler is the facade that
importers interact with — it picks the appropriate manager based on
V3_FEATURE_LOCATIONS and delegates all calls through the shared interface.

This structure prevents drift between the two managers: adding an abstract
method to BaseLocationManager forces both to implement it, and callers can
only access methods exposed by LocationHandler.
"""

from __future__ import annotations

from abc import ABC, abstractmethod
from typing import TYPE_CHECKING

if TYPE_CHECKING:
from dojo.models import Dojo_User, Finding, Product


class BaseLocationManager(ABC):

"""
Abstract base for import-pipeline managers that handle network identifiers
(locations in V3, endpoints in legacy).

Subclasses must implement every abstract method. The importer never calls
subclass-specific methods directly — it goes through LocationHandler.
"""

def __init__(self, product: Product) -> None:
self._product = product

@abstractmethod
def clean_unsaved(self, finding: Finding) -> None:
"""Clean the unsaved locations/endpoints on this finding."""

@abstractmethod
def record_for_finding(self, finding: Finding, extra_locations: list | None = None) -> None:
"""Record items from the finding + any form-added extras for later batch creation."""

@abstractmethod
def update_status(self, existing_finding: Finding, new_finding: Finding, user: Dojo_User) -> None:
"""Accumulate status changes (mitigate/reactivate) based on old vs new finding."""

@abstractmethod
def record_reactivations_for_finding(self, finding: Finding) -> None:
"""Record items on this finding for reactivation."""

@abstractmethod
def record_mitigations_for_finding(self, finding: Finding, user: Dojo_User) -> None:
"""Record items on this finding for mitigation."""

@abstractmethod
def get_locations_for_tagging(self, findings: list[Finding]):
"""Return a queryset of taggable objects linked to the given findings."""

@abstractmethod
def get_location_tag_fallback(self, finding: Finding):
"""Return an iterable of taggable objects for per-instance tag fallback."""

@abstractmethod
def serialize_extra_locations(self, locations: list) -> dict:
"""Serialize extra locations/endpoints for import history settings."""

@abstractmethod
def persist(self) -> None:
"""Flush all accumulated operations to the database."""


class LocationHandler:

"""
Facade used by importers. Delegates to the appropriate BaseLocationManager
implementation based on V3_FEATURE_LOCATIONS.

Callers only see the methods defined here — they cannot reach into the
internal manager to call implementation-specific methods. This prevents
V3-only or endpoint-only code from leaking into shared importer logic.
"""

def __init__(
self,
product: Product,
*,
v3_manager_class: type[BaseLocationManager] | None = None,
v2_manager_class: type[BaseLocationManager] | None = None,
) -> None:
from django.conf import settings # noqa: PLC0415

from dojo.importers.endpoint_manager import EndpointManager # noqa: PLC0415
from dojo.importers.location_manager import LocationManager # noqa: PLC0415

self._product = product
if settings.V3_FEATURE_LOCATIONS:
cls = v3_manager_class or LocationManager
else:
cls = v2_manager_class or EndpointManager
self._manager: BaseLocationManager = cls(product)

# --- Delegates (one per BaseLocationManager method) ---

def clean_unsaved(self, finding: Finding) -> None:
return self._manager.clean_unsaved(finding)

def record_for_finding(self, finding: Finding, extra_locations: list | None = None) -> None:
return self._manager.record_for_finding(finding, extra_locations)

def update_status(self, existing_finding: Finding, new_finding: Finding, user: Dojo_User) -> None:
return self._manager.update_status(existing_finding, new_finding, user)

def record_reactivations_for_finding(self, finding: Finding) -> None:
return self._manager.record_reactivations_for_finding(finding)

def record_mitigations_for_finding(self, finding: Finding, user: Dojo_User) -> None:
return self._manager.record_mitigations_for_finding(finding, user)

def get_locations_for_tagging(self, findings: list[Finding]):
return self._manager.get_locations_for_tagging(findings)

def get_location_tag_fallback(self, finding: Finding):
return self._manager.get_location_tag_fallback(finding)

def serialize_extra_locations(self, locations: list) -> dict:
return self._manager.serialize_extra_locations(locations)

def persist(self) -> None:
return self._manager.persist()
25 changes: 7 additions & 18 deletions dojo/importers/default_importer.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
from dojo.celery_dispatch import dojo_dispatch_task
from dojo.finding import helper as finding_helper
from dojo.importers.base_importer import BaseImporter, Parser
from dojo.importers.endpoint_manager import EndpointManager
from dojo.importers.base_location_manager import LocationHandler
from dojo.importers.options import ImporterOptions
from dojo.jira_link.helper import is_keep_in_sync_with_jira
from dojo.models import (
Expand Down Expand Up @@ -58,8 +58,7 @@ def __init__(self, *args, **kwargs):
import_type=Test_Import.IMPORT_TYPE,
**kwargs,
)
if not settings.V3_FEATURE_LOCATIONS:
self.endpoint_manager = EndpointManager(self.engagement.product)
self.location_handler = LocationHandler(self.engagement.product)

def create_test(
self,
Expand Down Expand Up @@ -240,13 +239,7 @@ def process_findings(
)
# Process any request/response pairs
self.process_request_response_pairs(finding)
if settings.V3_FEATURE_LOCATIONS:
# Process any locations on the finding, or added on the form
self.process_locations(finding, self.endpoints_to_add)
else:
# TODO: Delete this after the move to Locations
# Process any endpoints on the finding, or added on the form
self.process_endpoints(finding, self.endpoints_to_add)
self.process_locations(finding, self.endpoints_to_add)
# Parsers must use unsaved_tags to store tags, so we can clean them.
# Accumulate for bulk application after the loop (O(unique_tags) instead of O(N·T)).
cleaned_tags = clean_tags(finding.unsaved_tags)
Expand All @@ -267,16 +260,13 @@ def process_findings(
logger.debug("process_findings: computed push_to_jira=%s", push_to_jira)
batch_finding_ids.append(finding.id)

# If batch is full or we're at the end, persist endpoints and dispatch
# If batch is full or we're at the end, persist locations/endpoints and dispatch
if len(batch_finding_ids) >= batch_max_size or is_final_finding:
if not settings.V3_FEATURE_LOCATIONS:
self.endpoint_manager.persist(user=self.user)

self.location_handler.persist()
# Apply parser-supplied tags for this batch before post-processing starts,
# so rules/deduplication tasks see the tags already on the findings.
bulk_apply_parser_tags(findings_with_parser_tags)
findings_with_parser_tags.clear()

finding_ids_batch = list(batch_finding_ids)
batch_finding_ids.clear()
logger.debug("process_findings: dispatching batch with push_to_jira=%s (batch_size=%d, is_final=%s)",
Expand Down Expand Up @@ -404,9 +394,8 @@ def close_old_findings(
finding_groups_enabled=self.findings_groups_enabled,
product_grading_option=False,
)
# Persist any accumulated endpoint status mitigations
if not settings.V3_FEATURE_LOCATIONS:
self.endpoint_manager.persist(user=self.user)
# Persist any accumulated location/endpoint status changes
self.location_handler.persist()
# push finding groups to jira since we only only want to push whole groups
# We dont check if the finding jira sync is applicable quite yet until we can get in the loop
# but this is a way to at least make it that far
Expand Down
Loading
Loading