diff --git a/src/sentry/workflow_engine/models/action.py b/src/sentry/workflow_engine/models/action.py index 1ea85bcd66dbc5..90b8a7c9d1a7e8 100644 --- a/src/sentry/workflow_engine/models/action.py +++ b/src/sentry/workflow_engine/models/action.py @@ -124,9 +124,9 @@ def get_handler(self) -> builtins.type[ActionHandler]: return action_handler_registry.get(action_type) def trigger(self, event_data: WorkflowEventData, notification_uuid: str) -> None: - from sentry.workflow_engine.processors.detector import get_detector_from_event_data + from sentry.workflow_engine.processors.detector import get_preferred_detector - detector = get_detector_from_event_data(event_data) + detector = get_preferred_detector(event_data) with metrics.timer( "workflow_engine.action.trigger.execution_time", diff --git a/src/sentry/workflow_engine/processors/detector.py b/src/sentry/workflow_engine/processors/detector.py index 9eb296d74743b0..ee35fc3d805796 100644 --- a/src/sentry/workflow_engine/processors/detector.py +++ b/src/sentry/workflow_engine/processors/detector.py @@ -226,6 +226,10 @@ def ensure_default_detectors(project: Project) -> tuple[Detector, Detector]: ) +class SpecificDetectorNotFound(Exception): + pass + + @dataclass(frozen=True) class EventDetectors: issue_stream_detector: Detector | None = None @@ -258,8 +262,9 @@ def detectors(self) -> set[Detector]: return {d for d in [self.issue_stream_detector, self.event_detector] if d is not None} -def get_detectors_for_event( - event_data: WorkflowEventData, detector: Detector | None = None +def get_detectors_for_event_data( + event_data: WorkflowEventData, + detector: Detector | None = None, ) -> EventDetectors | None: """ Returns a list of detectors for the event to process workflows for. @@ -267,8 +272,7 @@ def get_detectors_for_event( We always return at least the issue stream detector, unless excluded via option. If the event has an associated detector, we return it too. - If the detector is passed in, use that instead of searching for a detector. - This is used for Activity updates. + We expect a detector to be passed in for Activity updates. """ issue_stream_detector: Detector | None = None exclude_issue_stream = options.get("workflow_engine.exclude_issue_stream_detector") @@ -288,10 +292,10 @@ def get_detectors_for_event( }, ) - if detector is None: + if detector is None and isinstance(event_data.event, GroupEvent): try: - detector = get_detector_by_event(event_data) - except Detector.DoesNotExist: + detector = _get_detector_for_event(event_data.event) + except (Detector.DoesNotExist, SpecificDetectorNotFound): pass try: @@ -300,24 +304,21 @@ def get_detectors_for_event( return None -def get_detector_by_event(event_data: WorkflowEventData) -> Detector: - evt = event_data.event - - if not isinstance(evt, GroupEvent): - raise TypeError( - "Can only use `get_detector_by_event` for a new event, Activity updates are not supported" - ) +def _get_detector_for_event(event: GroupEvent) -> Detector: + """ + Returns the detector from the GroupEvent in event_data. + """ - issue_occurrence = evt.occurrence + issue_occurrence = event.occurrence try: - if issue_occurrence is None or evt.group.issue_type.detector_settings is None: - # if there are no detector settings, default to the error detector - detector = Detector.get_error_detector_for_project(evt.project_id) + if issue_occurrence is not None: + detector_id = issue_occurrence.evidence_data.get("detector_id") + if detector_id is None: + raise SpecificDetectorNotFound + detector = Detector.objects.get(id=detector_id) else: - detector = Detector.objects.get( - id=issue_occurrence.evidence_data.get("detector_id", None) - ) + detector = Detector.get_error_detector_for_project(event.group.project_id) except Detector.DoesNotExist: metrics.incr("workflow_engine.detectors.error") detector_id = ( @@ -327,8 +328,8 @@ def get_detector_by_event(event_data: WorkflowEventData) -> Detector: logger.exception( "Detector not found for event", extra={ - "event_id": evt.event_id, - "group_id": evt.group_id, + "event_id": event.event_id, + "group_id": event.group_id, "detector_id": detector_id, }, ) @@ -337,7 +338,7 @@ def get_detector_by_event(event_data: WorkflowEventData) -> Detector: return detector -def get_detector_by_group(group: Group) -> Detector: +def _get_detector_for_group(group: Group) -> Detector: """ Returns Detector associated with this group, either based on DetectorGroup, (project, type), or if those fail, returns the Issue Stream detector. @@ -360,12 +361,18 @@ def get_detector_by_group(group: Group) -> Detector: return Detector.objects.get(project_id=group.project_id, type=IssueStreamGroupType.slug) -def get_detector_from_event_data(event_data: WorkflowEventData) -> Detector: +def get_preferred_detector(event_data: WorkflowEventData) -> Detector: + """ + Attempts to fetch the specific detector based on the GroupEvent or Activity in event_data + """ try: if isinstance(event_data.event, GroupEvent): - return get_detector_by_event(event_data) + event_detectors = get_detectors_for_event_data(event_data) + if event_detectors is None: + raise Detector.DoesNotExist("No detectors found for event") + return event_detectors.preferred_detector elif isinstance(event_data.event, Activity): - return get_detector_by_group(event_data.group) + return _get_detector_for_group(event_data.group) else: raise TypeError(f"Cannot determine the detector from {type(event_data.event)}.") except Detector.DoesNotExist: diff --git a/src/sentry/workflow_engine/processors/workflow.py b/src/sentry/workflow_engine/processors/workflow.py index f0695206c46421..5d3eaf42c10944 100644 --- a/src/sentry/workflow_engine/processors/workflow.py +++ b/src/sentry/workflow_engine/processors/workflow.py @@ -1,4 +1,4 @@ -from collections.abc import Collection, Sequence +from collections.abc import Sequence from dataclasses import asdict, replace from datetime import datetime from enum import StrEnum @@ -30,7 +30,7 @@ get_data_conditions_for_group, process_data_condition_group, ) -from sentry.workflow_engine.processors.detector import get_detectors_for_event +from sentry.workflow_engine.processors.detector import get_detectors_for_event_data from sentry.workflow_engine.processors.workflow_fire_history import create_workflow_fire_histories from sentry.workflow_engine.types import ( WorkflowEvaluation, @@ -380,7 +380,7 @@ def get_environment_by_event(event_data: WorkflowEventData) -> Environment | Non @scopedstats.timer() def _get_associated_workflows( - detectors: Collection[Detector], environment: Environment | None, event_data: WorkflowEventData + detector: Detector, environment: Environment | None, event_data: WorkflowEventData ) -> set[Workflow]: """ This is a wrapper method to get the workflows associated with a detector and environment. @@ -394,7 +394,7 @@ def _get_associated_workflows( workflows = set( Workflow.objects.filter( environment_filter, - detectorworkflow__detector_id__in=[detector.id for detector in detectors], + detectorworkflow__detector_id=detector.id, enabled=True, ) .select_related("environment") @@ -421,7 +421,7 @@ def _get_associated_workflows( "event_data": asdict(event_data), "event_environment_id": environment.id if environment else None, "workflows": [workflow.id for workflow in workflows], - "detector_types": [detector.type for detector in detectors], + "detector_type": detector.type, }, ) @@ -454,7 +454,7 @@ def process_workflows( ) try: - event_detectors = get_detectors_for_event(event_data, detector) + event_detectors = get_detectors_for_event_data(event_data, detector) if not event_detectors: raise Detector.DoesNotExist("No Detectors associated with the issue were found") @@ -500,7 +500,9 @@ def process_workflows( if features.has("organizations:workflow-engine-process-workflows-logs", organization): log_context.set_verbose(True) - workflows = _get_associated_workflows(event_detectors.detectors, environment, event_data) + workflows = _get_associated_workflows( + event_detectors.preferred_detector, environment, event_data + ) workflow_evaluation_data.workflows = workflows if not workflows: diff --git a/src/sentry/workflow_engine/tasks/actions.py b/src/sentry/workflow_engine/tasks/actions.py index b00386e4bf13b6..de7c3c180db6a5 100644 --- a/src/sentry/workflow_engine/tasks/actions.py +++ b/src/sentry/workflow_engine/tasks/actions.py @@ -91,7 +91,7 @@ def trigger_action( import uuid from sentry.notifications.notification_action.utils import should_fire_workflow_actions - from sentry.workflow_engine.processors.detector import get_detector_from_event_data + from sentry.workflow_engine.processors.detector import get_preferred_detector # Generate UUID if not provided (handles version skew at task boundary) if notification_uuid is None: @@ -129,7 +129,7 @@ def trigger_action( ) raise ValueError("Exactly one of event_id or activity_id must be provided") - detector = get_detector_from_event_data(event_data) + detector = get_preferred_detector(event_data) metrics.incr( "workflow_engine.tasks.trigger_action_task_started", diff --git a/tests/sentry/workflow_engine/models/test_action.py b/tests/sentry/workflow_engine/models/test_action.py index bbc05731f8e7e0..0310d88c23f3dc 100644 --- a/tests/sentry/workflow_engine/models/test_action.py +++ b/tests/sentry/workflow_engine/models/test_action.py @@ -71,7 +71,7 @@ def test_get_handler_unregistered_type(self) -> None: # Verify the registry was queried with the correct action type mock_get.assert_called_once_with(Action.Type.SLACK) - @patch("sentry.workflow_engine.processors.detector.get_detector_from_event_data") + @patch("sentry.workflow_engine.processors.detector.get_preferred_detector") def test_trigger_calls_handler_execute(self, mock_get_detector: MagicMock) -> None: mock_handler = Mock(spec=ActionHandler) mock_detector = Mock(spec=Detector, type="error") @@ -88,7 +88,7 @@ def test_trigger_calls_handler_execute(self, mock_get_detector: MagicMock) -> No assert invocation.action == self.action assert invocation.detector == mock_detector - @patch("sentry.workflow_engine.processors.detector.get_detector_from_event_data") + @patch("sentry.workflow_engine.processors.detector.get_preferred_detector") def test_trigger_with_failing_handler(self, mock_get_detector: MagicMock) -> None: mock_handler = Mock(spec=ActionHandler) mock_handler.execute.side_effect = Exception("Handler failed") @@ -99,7 +99,7 @@ def test_trigger_with_failing_handler(self, mock_get_detector: MagicMock) -> Non self.action.trigger(self.mock_event, notification_uuid=str(uuid.uuid4())) @patch("sentry.utils.metrics.incr") - @patch("sentry.workflow_engine.processors.detector.get_detector_from_event_data") + @patch("sentry.workflow_engine.processors.detector.get_preferred_detector") def test_trigger_metrics(self, mock_get_detector: MagicMock, mock_incr: MagicMock) -> None: mock_handler = Mock(spec=ActionHandler) mock_get_detector.return_value = Mock(spec=Detector, type="error") diff --git a/tests/sentry/workflow_engine/processors/test_detector.py b/tests/sentry/workflow_engine/processors/test_detector.py index 72345176eefa91..c9c7cd6bb2a702 100644 --- a/tests/sentry/workflow_engine/processors/test_detector.py +++ b/tests/sentry/workflow_engine/processors/test_detector.py @@ -2,7 +2,7 @@ import uuid from typing import Any from unittest import mock -from unittest.mock import MagicMock, call +from unittest.mock import MagicMock, call, patch import pytest from django.utils import timezone @@ -30,8 +30,8 @@ from sentry.workflow_engine.processors.detector import ( associate_new_group_with_detector, ensure_association_with_detector, - get_detector_by_event, - get_detectors_for_event, + get_detectors_for_event_data, + get_preferred_detector, process_detectors, ) from sentry.workflow_engine.types import ( @@ -838,7 +838,7 @@ class TestGetDetectorsForEvent(TestCase): def setUp(self) -> None: super().setUp() self.project = self.create_project() - self.group = self.create_group(project=self.project) + self.group = self.create_group(project=self.project, type=MetricIssue.type_id) self.detector = self.create_detector(project=self.project, type=MetricIssue.slug) self.error_detector = self.create_detector(project=self.project, type=ErrorGroupType.slug) self.issue_stream_detector = self.create_detector( @@ -871,7 +871,7 @@ def test_activity_update(self) -> None: user_id=self.user.id, ) event_data = WorkflowEventData(event=activity, group=self.group) - result = get_detectors_for_event(event_data, detector=self.detector) + result = get_detectors_for_event_data(event_data, detector=self.detector) assert result is not None assert result.preferred_detector == self.detector assert result.detectors == {self.issue_stream_detector, self.detector} @@ -879,7 +879,7 @@ def test_activity_update(self) -> None: @override_options({"workflow_engine.exclude_issue_stream_detector": False}) def test_error_event(self) -> None: event_data = WorkflowEventData(event=self.group_event, group=self.group) - result = get_detectors_for_event(event_data) + result = get_detectors_for_event_data(event_data) assert result is not None assert result.preferred_detector == self.error_detector assert result.detectors == {self.issue_stream_detector, self.error_detector} @@ -889,13 +889,14 @@ def test_metric_issue(self) -> None: self.group_event.occurrence = self.occurrence event_data = WorkflowEventData(event=self.group_event, group=self.group) - result = get_detectors_for_event(event_data) + result = get_detectors_for_event_data(event_data) assert result is not None assert result.preferred_detector == self.detector assert result.detectors == {self.issue_stream_detector, self.detector} @override_options({"workflow_engine.exclude_issue_stream_detector": False}) - def test_event_without_detector(self) -> None: + @patch("sentry.workflow_engine.processors.detector.logger") + def test_event_without_detector(self, mock_logger: MagicMock) -> None: occurrence = IssueOccurrence( id=uuid.uuid4().hex, project_id=1, @@ -914,23 +915,55 @@ def test_event_without_detector(self) -> None: self.group_event.occurrence = occurrence event_data = WorkflowEventData(event=self.group_event, group=self.group) - result = get_detectors_for_event(event_data) + result = get_detectors_for_event_data(event_data) assert result is not None assert result.preferred_detector == self.issue_stream_detector assert result.detectors == {self.issue_stream_detector} + # assert no exception is logged + mock_logger.exception.assert_not_called() + + @override_options({"workflow_engine.exclude_issue_stream_detector": False}) + @patch("sentry.workflow_engine.processors.detector.logger") + def test_event_missing_detector(self, mock_logger: MagicMock) -> None: + occurrence = IssueOccurrence( + id=uuid.uuid4().hex, + project_id=1, + event_id="asdf", + fingerprint=["asdf"], + issue_title="title", + subtitle="subtitle", + resource_id=None, + evidence_data={"detector_id": 12345}, # missing detector + evidence_display=[], + type=PerformanceNPlusOneAPICallsGroupType, + detection_time=timezone.now(), + level="error", + culprit="", + ) + self.group_event.occurrence = occurrence + + event_data = WorkflowEventData(event=self.group_event, group=self.group) + result = get_detectors_for_event_data(event_data) + assert result is not None + assert result.preferred_detector == self.issue_stream_detector + assert result.detectors == {self.issue_stream_detector} + + # assert no exception is logged + mock_logger.exception.assert_called_once() + def test_no_detectors(self) -> None: self.issue_stream_detector.delete() self.error_detector.delete() event_data = WorkflowEventData(event=self.group_event, group=self.group) - result = get_detectors_for_event(event_data) + result = get_detectors_for_event_data(event_data) assert result is None def test_exclude_issue_stream_detector(self) -> None: event_data = WorkflowEventData(event=self.group_event, group=self.group) # Default behavior: issue stream detector is included - result = get_detectors_for_event(event_data) + result = get_detectors_for_event_data(event_data) assert result is not None assert result.issue_stream_detector == self.issue_stream_detector @@ -939,7 +972,7 @@ def test_exclude_issue_stream_detector(self) -> None: # With flag enabled: issue stream detector is excluded with override_options({"workflow_engine.exclude_issue_stream_detector": True}): - result_excluded = get_detectors_for_event(event_data) + result_excluded = get_detectors_for_event_data(event_data) assert result_excluded is not None assert result_excluded.issue_stream_detector is None @@ -947,10 +980,10 @@ def test_exclude_issue_stream_detector(self) -> None: assert result_excluded.preferred_detector == self.error_detector -class TestGetDetectorByEvent(TestCase): +class TestGetPreferredDetector(TestCase): def setUp(self) -> None: super().setUp() - self.group = self.create_group(project=self.project) + self.group = self.create_group(project=self.project, type=MetricIssue.type_id) self.detector = self.create_detector(project=self.project, type=MetricIssue.slug) self.error_detector = self.create_detector(project=self.project, type=ErrorGroupType.slug) self.event = self.store_event(project_id=self.project.id, data={}) @@ -976,32 +1009,35 @@ def test_with_occurrence(self) -> None: event_data = WorkflowEventData(event=group_event, group=self.group) - result = get_detector_by_event(event_data) + result = get_preferred_detector(event_data) assert result == self.detector def test_without_occurrence(self) -> None: + self.group.type = ErrorGroupType.type_id group_event = GroupEvent.from_event(self.event, self.group) group_event.occurrence = None event_data = WorkflowEventData(event=group_event, group=self.group) - result = get_detector_by_event(event_data) + result = get_preferred_detector(event_data) assert result == self.error_detector - def test_activity_not_supported(self) -> None: + def test_activity(self) -> None: activity = Activity.objects.create( project=self.project, group=self.group, type=ActivityType.SET_RESOLVED.value, user_id=self.user.id, ) + DetectorGroup.objects.create(detector=self.detector, group=self.group) event_data = WorkflowEventData(event=activity, group=self.group) - with pytest.raises(TypeError): - get_detector_by_event(event_data) + result = get_preferred_detector(event_data) + + assert result == self.detector def test_no_detector_id(self) -> None: occurrence = IssueOccurrence( @@ -1026,9 +1062,9 @@ def test_no_detector_id(self) -> None: event_data = WorkflowEventData(event=group_event, group=self.group) with pytest.raises(Detector.DoesNotExist): - get_detector_by_event(event_data) + get_preferred_detector(event_data) - def test_defaults_to_error_detector(self) -> None: + def test_errors_on_no_detector(self) -> None: occurrence = IssueOccurrence( id=uuid.uuid4().hex, project_id=self.project.id, @@ -1051,9 +1087,8 @@ def test_defaults_to_error_detector(self) -> None: event_data = WorkflowEventData(event=group_event, group=self.group) - result = get_detector_by_event(event_data) - - assert result == self.error_detector + with pytest.raises(Detector.DoesNotExist): + get_preferred_detector(event_data) class TestAssociateNewGroupWithDetector(TestCase): diff --git a/tests/sentry/workflow_engine/processors/test_workflow.py b/tests/sentry/workflow_engine/processors/test_workflow.py index ed5779bf8b4cb8..551600ec838fde 100644 --- a/tests/sentry/workflow_engine/processors/test_workflow.py +++ b/tests/sentry/workflow_engine/processors/test_workflow.py @@ -6,6 +6,7 @@ from sentry.eventstream.base import GroupState from sentry.grouping.grouptype import ErrorGroupType +from sentry.incidents.grouptype import MetricIssue from sentry.models.activity import Activity from sentry.models.environment import Environment from sentry.services.eventstore.models import GroupEvent @@ -63,7 +64,9 @@ def setUp(self) -> None: ) ) - self.group, self.event, self.group_event = self.create_group_event() + self.group, self.event, self.group_event = self.create_group_event( + group_type_id=MetricIssue.type_id + ) self.event_data = WorkflowEventData( event=self.group_event, group=self.group, @@ -304,6 +307,7 @@ def test_same_environment_only(self) -> None: def test_issue_occurrence_event(self) -> None: issue_occurrence = self.build_occurrence(evidence_data={"detector_id": self.detector.id}) self.group_event.occurrence = issue_occurrence + self.group_event.group.type = issue_occurrence.type.type_id result = process_workflows(self.batch_client, self.event_data, FROZEN_TIME) assert result.data.triggered_workflows == {self.workflow} @@ -337,11 +341,10 @@ def test_no_detector(self, mock_logger: MagicMock, mock_incr: MagicMock) -> None mock_incr.assert_called_with("workflow_engine.detectors.error") # called twice mock_logger.exception.assert_called_with( - "Detector not found for event", + "Issue stream detector not found for event", extra={ - "event_id": self.event.event_id, + "project_id": self.group.project_id, "group_id": self.group_event.group_id, - "detector_id": None, }, ) # exception is called twice for both missing detectors @@ -464,8 +467,8 @@ def test_uses_issue_stream_workflows(self) -> None: assert len(result.data.triggered_actions) == 0 @override_options({"workflow_engine.exclude_issue_stream_detector": False}) - def test_multiple_detectors(self) -> None: - issue_stream_workflow, issue_stream_detector, _, _ = self.create_detector_and_workflow( + def test_multiple_detectors__preferred(self) -> None: + _, issue_stream_detector, _, _ = self.create_detector_and_workflow( name_prefix="issue_stream", workflow_triggers=self.create_data_condition_group(), detector_type=IssueStreamGroupType.slug, @@ -476,7 +479,7 @@ def test_multiple_detectors(self) -> None: ) result = process_workflows(self.batch_client, self.event_data, FROZEN_TIME) - assert result.data.triggered_workflows == {self.error_workflow, issue_stream_workflow} + assert result.data.triggered_workflows == {self.error_workflow} assert result.data.associated_detector == self.error_detector @@ -670,6 +673,7 @@ class TestWorkflowEnqueuing(BaseWorkflowTest): buffer_timestamp = (FROZEN_TIME + timedelta(seconds=1)).timestamp() def setUp(self) -> None: + self.project = self.create_project(create_default_detectors=True) ( self.workflow, self.detector, @@ -679,7 +683,7 @@ def setUp(self) -> None: occurrence = self.build_occurrence(evidence_data={"detector_id": self.detector.id}) self.group, self.event, self.group_event = self.create_group_event( - occurrence=occurrence, + occurrence=occurrence, group_type_id=MetricIssue.type_id ) self.event_data = WorkflowEventData(event=self.group_event, group=self.group) self.action_group, _ = self.create_workflow_action(self.workflow) @@ -877,17 +881,23 @@ def setUp(self) -> None: ) = self.create_detector_and_workflow() self.action_group, self.action = self.create_workflow_action(workflow=self.workflow) + self.issue_stream_detector = self.create_detector( + project=self.project, + type=IssueStreamGroupType.slug, + ) self.group, self.event, self.group_event = self.create_group_event( - occurrence=self.build_occurrence(evidence_data={"detector_id": self.detector.id}) + occurrence=self.build_occurrence(evidence_data={"detector_id": self.detector.id}), + group_type_id=MetricIssue.type_id, ) self.event_data = WorkflowEventData(event=self.group_event, group=self.group) self.batch_client = DelayedWorkflowClient() @patch("sentry.utils.metrics.incr") @patch("sentry.workflow_engine.tasks.utils.IssueOccurrence.fetch") + @patch("sentry.workflow_engine.processors.action.Action.trigger") def test_metrics_issue_dual_processing_metrics( - self, mock_fetch: MagicMock, mock_incr: MagicMock + self, mock_trigger: MagicMock, mock_fetch: MagicMock, mock_incr: MagicMock ) -> None: mock_fetch.return_value = self.group_event.occurrence @@ -901,6 +911,7 @@ def test_metrics_issue_dual_processing_metrics( }, sample_rate=1.0, ) + mock_trigger.assert_called_once() def test_basic__no_filter(self) -> None: triggered_action_filters, _ = evaluate_workflows_action_filters( diff --git a/tests/sentry/workflow_engine/test_integration.py b/tests/sentry/workflow_engine/test_integration.py index 39ed0d2ea51107..667aa1411104a3 100644 --- a/tests/sentry/workflow_engine/test_integration.py +++ b/tests/sentry/workflow_engine/test_integration.py @@ -547,10 +547,9 @@ def test_service_hooks_integration( ) -class TestWorkflowEngineIntegrationFromFeedbackPostProcess(BaseWorkflowIntegrationTest): - @override_options({"workflow_engine.issue_alert.group.type_id.rollout": [6001]}) - @with_feature("organizations:workflow-engine-single-process-workflows") - def test_workflow_engine(self) -> None: +class TestWorkflowEngineIntegrationPostProcessRollout(BaseWorkflowIntegrationTest): + def setUp(self) -> None: + self.project = self.create_project(create_default_detectors=True) occurrence_data = self.build_occurrence_data( type=FeedbackGroup.type_id, event_id=self.event.event_id, @@ -567,11 +566,65 @@ def test_workflow_engine(self) -> None: self.occurrence, group_info = save_issue_occurrence(occurrence_data, self.event) assert group_info is not None - self.group = Group.objects.get(grouphash__hash=self.occurrence.fingerprint[0]) - assert self.group.type == FeedbackGroup.type_id + self.feedback_group = Group.objects.get(id=group_info.group.id) + assert self.feedback_group.type == FeedbackGroup.type_id + @override_options( + { + "workflow_engine.issue_alert.group.type_id.ga": [1], + "workflow_engine.exclude_issue_stream_detector": True, + } + ) + def test_errors_only_rollout(self) -> None: with mock.patch( "sentry.workflow_engine.tasks.workflows.process_workflows_event.apply_async" ) as mock_process_workflow: - self.call_post_process_group(self.group.id) - mock_process_workflow.assert_called_once() + self.call_post_process_group(self.feedback_group.id) + assert not mock_process_workflow.called + + @override_options( + { + "workflow_engine.issue_alert.group.type_id.ga": [1], + "workflow_engine.exclude_issue_stream_detector": False, + } + ) + def test_issue_stream_errors_only(self) -> None: + with mock.patch( + "sentry.workflow_engine.tasks.workflows.process_workflows_event.apply_async" + ) as mock_process_workflow: + self.call_post_process_group(self.feedback_group.id) + assert not mock_process_workflow.called # still not called because not in rollout FF + + @override_options( + { + "workflow_engine.issue_alert.group.type_id.ga": [1], + "workflow_engine.exclude_issue_stream_detector": False, + "workflow_engine.issue_alert.group.type_id.rollout": [6001], + } + ) + @with_feature( + { + "organizations:workflow-engine-single-process-workflows": True, + "organizations:workflow-engine-log-evaluations": True, + } + ) + @patch("sentry.workflow_engine.tasks.workflows.logger") + def test_rollout_new_issue_type(self, mock_logger: MagicMock) -> None: + with self.tasks(): + self.call_post_process_group(self.feedback_group.id) + + # log and exit because no workflows, but we fetched issue stream detector successfully + mock_logger.info.assert_any_call( + "workflow_engine.process_workflows.evaluation.workflows.not_triggered", + extra={ + "event_id": self.event.event_id, + "group_id": self.feedback_group.id, + "detection_type": "issue_stream", + "workflow_ids": None, + "triggered_workflow_ids": [], + "delayed_conditions": None, + "action_filter_group_ids": [], + "triggered_action_ids": [], + "debug_msg": "No workflows are associated with the detector in the event", + }, + )