Skip to content
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,9 @@
from sentry.workflow_engine.models.data_condition import Condition
from sentry.workflow_engine.registry import condition_handler_registry
from sentry.workflow_engine.types import DataConditionHandler, WorkflowEventData
from sentry.workflow_engine.utils import log_context

logger = log_context.get_logger(__name__)


@condition_handler_registry.register(Condition.TAGGED_EVENT)
Expand Down Expand Up @@ -90,4 +93,17 @@ def evaluate_value(event_data: WorkflowEventData, comparison: Any) -> bool:
if k.lower() == key or tagstore.backend.get_standardized_key(k) == key
)

return match_values(group_values=tag_values, match_value=value, match_type=match)
result = match_values(group_values=tag_values, match_value=value, match_type=match)

logger.debug(
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this is just incase the top level logs don't show me everything, i want to be able to handle the user case(s) around this specific handler.

"workflow_engine.handlers.tagged_event_handler",
extra={
"evaluation_result": result,
"event": event,
"event_tags": event.tags,
"processed_values": tag_values,
"comparison_type": match,
},
)

return result
8 changes: 7 additions & 1 deletion src/sentry/workflow_engine/models/action.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import builtins
import logging
from enum import StrEnum
from typing import TYPE_CHECKING, ClassVar
from typing import TYPE_CHECKING, Any, ClassVar

from django.db import models
from django.db.models import Q
Expand Down Expand Up @@ -112,6 +112,12 @@ class Meta:
),
]

def get_snapshot(self) -> dict[str, Any]:
return {
"id": self.id,
"type": self.type,
}

def get_handler(self) -> builtins.type[ActionHandler]:
action_type = Action.Type(self.type)
return action_handler_registry.get(action_type)
Expand Down
13 changes: 12 additions & 1 deletion src/sentry/workflow_engine/models/data_condition_group.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from enum import StrEnum
from typing import ClassVar, Self
from typing import Any, ClassVar, Self

from django.db import models

Expand Down Expand Up @@ -36,3 +36,14 @@ class Type(StrEnum):
max_length=200, choices=[(t.value, t.value) for t in Type], default=Type.ANY
)
organization = models.ForeignKey("sentry.Organization", on_delete=models.CASCADE)

def get_snapshot(self) -> dict[str, Any]:
conditions = []
if hasattr(self, "conditions"):
conditions = [cond.get_snapshot() for cond in self.conditions.all()]

return {
"id": self.id,
"logic_type": self.logic_type,
"conditions": conditions,
}
12 changes: 12 additions & 0 deletions src/sentry/workflow_engine/models/detector.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,6 +141,18 @@ def settings(self) -> DetectorSettings:

return settings

def get_snapshot(self) -> dict[str, Any]:
trigger_conditions = None
if self.workflow_condition_group:
trigger_conditions = self.workflow_condition_group.get_snapshot()

return {
"id": self.id,
"enabled": self.enabled,
"status": self.status,
"trigger_conditions": trigger_conditions,
}

def get_audit_log_data(self) -> dict[str, Any]:
return {"name": self.name}

Expand Down
19 changes: 18 additions & 1 deletion src/sentry/workflow_engine/models/workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ class Workflow(DefaultFieldsModel, OwnerModel, JSONConfigBase):
"additionalProperties": False,
}

__repr__ = sane_repr("name", "organization_id")
__repr__ = sane_repr("organization_id")

class Meta:
app_label = "workflow_engine"
Expand All @@ -92,6 +92,23 @@ class Meta:
def get_audit_log_data(self) -> dict[str, Any]:
return {"name": self.name}

def get_snapshot(self) -> dict[str, Any]:
when_condition_group = None
if self.when_condition_group:
when_condition_group = self.when_condition_group.get_snapshot()

environment_id = None
if self.environment:
environment_id = self.environment.id

return {
"id": self.id,
"enabled": self.enabled,
"environment_id": environment_id,
"status": self.status,
"triggers": when_condition_group,
}

def evaluate_trigger_conditions(
self, event_data: WorkflowEventData, when_data_conditions: list[DataCondition] | None = None
) -> tuple[TriggerResult, list[DataCondition]]:
Expand Down
2 changes: 1 addition & 1 deletion src/sentry/workflow_engine/processors/workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -482,7 +482,7 @@ def process_workflows(
fire_actions,
)

workflow_evaluation_data = WorkflowEvaluationData(group_event=event_data.event)
workflow_evaluation_data = WorkflowEvaluationData(event=event_data.event)

try:
if detector is None and isinstance(event_data.event, GroupEvent):
Expand Down
43 changes: 40 additions & 3 deletions src/sentry/workflow_engine/types.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from __future__ import annotations

from abc import ABC, abstractmethod
from dataclasses import asdict, dataclass, field
from dataclasses import dataclass, field
from enum import IntEnum, StrEnum
from logging import Logger
from typing import TYPE_CHECKING, Any, ClassVar, Generic, TypedDict, TypeVar
Expand Down Expand Up @@ -89,13 +89,50 @@ class WorkflowEventData:

@dataclass
class WorkflowEvaluationData:
group_event: GroupEvent | Activity
event: GroupEvent | Activity
action_groups: set[DataConditionGroup] | None = None
workflows: set[Workflow] | None = None
triggered_actions: set[Action] | None = None
triggered_workflows: set[Workflow] | None = None
associated_detector: Detector | None = None

def get_snapshot(self) -> dict[str, Any]:
"""
This method will take the complex data structures, like models / list of models,
and turn them into the critical attributes of a model or lists of IDs.
"""

associated_detector = None
if self.associated_detector:
associated_detector = self.associated_detector.get_snapshot()

workflow_ids = None
if self.workflows:
workflow_ids = [workflow.id for workflow in self.workflows]

triggered_workflows = None
if self.triggered_workflows:
triggered_workflows = [workflow.get_snapshot() for workflow in self.triggered_workflows]

action_filter_conditions = None
if self.action_groups:
action_filter_conditions = [group.get_snapshot() for group in self.action_groups]

triggered_actions = None
if self.triggered_actions:
triggered_actions = [action.get_snapshot() for action in self.triggered_actions]

return {
"workflow_ids": workflow_ids,
"associated_detector": associated_detector,
"event": self.event,
"group": self.event.group,
"event_data": self.event.data,
"action_filter_conditions": action_filter_conditions,
"triggered_actions": triggered_actions,
"triggered_workflows": triggered_workflows,
}


@dataclass(frozen=True)
class WorkflowEvaluation:
Expand Down Expand Up @@ -134,7 +171,7 @@ def to_log(self, logger: Logger) -> None:
else:
log_str = f"{log_str}.actions.triggered"

logger.info(log_str, extra={**asdict(self.data), "debug_msg": self.msg})
logger.info(log_str, extra={**self.data.get_snapshot(), "debug_msg": self.msg})


class ConfigTransformer(ABC):
Expand Down
73 changes: 56 additions & 17 deletions tests/sentry/workflow_engine/test_task.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,13 +154,15 @@ def test_process_workflow_activity__no_workflows(self, mock_logger) -> None:
mock_logger.info.assert_called_once_with(
"workflow_engine.process_workflows.evaluation.workflows.not_triggered",
extra={
"debug_msg": "No workflows are associated with the detector in the event",
"group_event": self.activity,
"action_groups": None,
"workflow_ids": None,
"associated_detector": self.detector.get_snapshot(),
"event": self.activity,
"group": self.activity.group,
"event_data": self.activity.data,
"action_filter_conditions": None,
"triggered_actions": None,
"workflows": set(),
"triggered_workflows": None,
"associated_detector": self.detector,
"debug_msg": "No workflows are associated with the detector in the event",
},
)

Expand Down Expand Up @@ -199,13 +201,15 @@ def test_process_workflow_activity__workflows__no_actions(
mock_logger.info.assert_called_once_with(
"workflow_engine.process_workflows.evaluation.workflows.triggered",
extra={
"debug_msg": "No items were triggered or queued for slow evaluation",
"group_event": self.activity,
"action_groups": None,
"workflow_ids": [self.workflow.id],
"associated_detector": self.detector.get_snapshot(),
"event": self.activity,
"group": self.activity.group,
"event_data": self.activity.data,
"action_filter_conditions": None,
"triggered_actions": None,
"workflows": {self.workflow},
"triggered_workflows": set(), # from the mock
"associated_detector": self.detector,
"triggered_workflows": None,
"debug_msg": "No items were triggered or queued for slow evaluation",
},
)

Expand Down Expand Up @@ -241,16 +245,51 @@ def test_process_workflow_activity(
)

mock_filter_actions.assert_called_once_with({self.action_group}, expected_event_data)

@mock.patch("sentry.workflow_engine.processors.workflow.evaluate_workflow_triggers")
@mock.patch("sentry.workflow_engine.tasks.workflows.logger")
def test_process_workflow_activity__success_logs(
self, mock_logger, mock_evaluate_workflow_triggers
) -> None:
self.workflow = self.create_workflow(organization=self.organization)

# Add additional data to ensure logs work as expected
self.workflow.when_condition_group = self.create_data_condition_group()
self.create_data_condition(condition_group=self.workflow.when_condition_group)
self.workflow.save()

self.action_group = self.create_data_condition_group(logic_type="any-short")
self.action = self.create_action()
self.create_data_condition_group_action(
condition_group=self.action_group,
action=self.action,
)
self.create_workflow_data_condition_group(self.workflow, self.action_group)

self.create_detector_workflow(
detector=self.detector,
workflow=self.workflow,
)

mock_evaluate_workflow_triggers.return_value = ({self.workflow}, {})
process_workflow_activity(
activity_id=self.activity.id,
group_id=self.group.id,
detector_id=self.detector.id,
)

mock_logger.info.assert_called_once_with(
"workflow_engine.process_workflows.evaluation.actions.triggered",
extra={
"workflow_ids": [self.workflow.id],
"associated_detector": self.detector.get_snapshot(),
"event": self.activity,
"group": self.activity.group,
"event_data": self.activity.data,
"action_filter_conditions": [self.action_group.get_snapshot()],
"triggered_actions": [self.action.get_snapshot()],
"triggered_workflows": [self.workflow.get_snapshot()],
"debug_msg": None,
"group_event": self.activity,
"action_groups": {self.action_group},
"triggered_actions": set(),
"workflows": {self.workflow},
"triggered_workflows": {self.workflow},
"associated_detector": self.detector,
},
)

Expand Down
Loading