Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions src/sentry/explore/translation/alerts_translation.py
Original file line number Diff line number Diff line change
Expand Up @@ -173,6 +173,13 @@ def rollback_detector_query_and_update_subscription_in_snuba(snuba_query: SnubaQ
if not snapshot:
return

# Skip rollback if user has already updated this alert/monitor
if snapshot.get("user_updated"):
logger.info(
"Skipping rollback for user-updated query", extra={"snuba_query_id": snuba_query.id}
)
return

detectors = data_source.detectors.all()

old_query_type, old_dataset, old_query, old_aggregate = _get_old_query_info(snuba_query)
Expand Down
15 changes: 15 additions & 0 deletions src/sentry/incidents/metric_issue_detector.py
Original file line number Diff line number Diff line change
Expand Up @@ -324,6 +324,9 @@ def update_data_source(
extrapolation_mode=extrapolation_mode,
)

# Mark that this detector's query was updated by a user
self._mark_query_as_user_updated(snuba_query)

def update_anomaly_detection(self, instance: Detector, validated_data: dict[str, Any]) -> bool:
"""
When data changes on a detector we may need to tell Seer to update or remove their data for the detector
Expand Down Expand Up @@ -414,3 +417,15 @@ def delete(self):
delete_data_in_seer_for_detector(detector)

super().delete()

def _mark_query_as_user_updated(self, snuba_query: SnubaQuery):
"""
Mark the snuba query as user-updated in the query_snapshot field.
This is used to skip automatic migrations for queries that users have already modified.
Only marks queries that already have a snapshot (i.e., were previously migrated).
"""
snuba_query.refresh_from_db()
if snuba_query.query_snapshot is None:
return
snuba_query.query_snapshot["user_updated"] = True
snuba_query.save()
15 changes: 15 additions & 0 deletions src/sentry/incidents/serializers/alert_rule.py
Original file line number Diff line number Diff line change
Expand Up @@ -356,6 +356,10 @@ def update(self, instance, validated_data):
except Exception:
sentry_sdk.capture_exception()
raise BadRequest(message="Error when updating alert rule")

# Mark that this alert was updated by a user
self._mark_query_as_user_updated(alert_rule.snuba_query)

return alert_rule

def _handle_triggers(self, alert_rule, triggers):
Expand Down Expand Up @@ -405,3 +409,14 @@ def _handle_triggers(self, alert_rule, triggers):
raise serializers.ValidationError(trigger_serializer.errors)
if channel_lookup_timeout_error:
raise channel_lookup_timeout_error

def _mark_query_as_user_updated(self, snuba_query):
"""
Mark the snuba query as user-updated in the query_snapshot field.
This is used to skip automatic migrations for queries that users have already modified.
Only marks queries that already have a snapshot (i.e., were previously migrated).
"""
if snuba_query.query_snapshot is None:
return
snuba_query.query_snapshot["user_updated"] = True
snuba_query.save()
234 changes: 22 additions & 212 deletions tests/sentry/explore/translation/test_alerts_translation.py
Original file line number Diff line number Diff line change
Expand Up @@ -264,7 +264,7 @@ def test_translate_alert_rule_p95(self, mock_create_rpc) -> None:
assert snuba_query.dataset == Dataset.EventsAnalyticsPlatform.value
assert snuba_query.aggregate == "p95(span.duration)"
assert snuba_query.query == "(transaction.method:GET) AND is_transaction:1"
assert snuba_query.extrapolation_mode == ExtrapolationMode.CLIENT_AND_SERVER_WEIGHTED.value
assert snuba_query.extrapolation_mode == ExtrapolationMode.NONE.value

event_types = list(
SnubaQueryEventType.objects.filter(snuba_query=snuba_query).values_list(
Expand Down Expand Up @@ -746,241 +746,51 @@ def test_rollback_anomaly_detection_alert(
assert event_types_arg == [SnubaQueryEventType.EventType.TRANSACTION]

@with_feature("organizations:migrate-transaction-alerts-to-spans")
@patch("sentry.snuba.tasks._create_rpc_in_snuba")
def test_extrapolation_mode_sum_performance_metrics(self, mock_create_rpc) -> None:
mock_create_rpc.return_value = "test-subscription-id"

snuba_query = create_snuba_query(
query_type=SnubaQuery.Type.PERFORMANCE,
dataset=Dataset.PerformanceMetrics,
query="",
aggregate="sum(transaction.duration)",
time_window=timedelta(minutes=10),
environment=None,
event_types=[SnubaQueryEventType.EventType.TRANSACTION],
resolution=timedelta(minutes=1),
)

create_snuba_subscription(
project=self.project,
subscription_type=INCIDENTS_SNUBA_SUBSCRIPTION_TYPE,
snuba_query=snuba_query,
)

data_source = self.create_data_source(
organization=self.org,
source_id=str(snuba_query.id),
type=DATA_SOURCE_SNUBA_QUERY_SUBSCRIPTION,
)

detector_data_condition_group = self.create_data_condition_group(
organization=self.org,
)

detector = self.create_detector(
name="Test Detector",
type=MetricIssue.slug,
project=self.project,
config={"detection_type": AlertRuleDetectionType.STATIC.value},
workflow_condition_group=detector_data_condition_group,
)

data_source.detectors.add(detector)

with self.tasks():
translate_detector_and_update_subscription_in_snuba(snuba_query)
snuba_query.refresh_from_db()

assert snuba_query.extrapolation_mode == ExtrapolationMode.SERVER_WEIGHTED.value

@with_feature("organizations:migrate-transaction-alerts-to-spans")
@patch("sentry.snuba.tasks._create_rpc_in_snuba")
def test_extrapolation_mode_sum_transactions(self, mock_create_rpc) -> None:
mock_create_rpc.return_value = "test-subscription-id"

def test_rollback_skips_user_updated_query(self) -> None:
snuba_query = create_snuba_query(
query_type=SnubaQuery.Type.PERFORMANCE,
dataset=Dataset.Transactions,
query="",
aggregate="sum(transaction.duration)",
time_window=timedelta(minutes=10),
environment=None,
event_types=[SnubaQueryEventType.EventType.TRANSACTION],
resolution=timedelta(minutes=1),
)

create_snuba_subscription(
project=self.project,
subscription_type=INCIDENTS_SNUBA_SUBSCRIPTION_TYPE,
snuba_query=snuba_query,
)

data_source = self.create_data_source(
organization=self.org,
source_id=str(snuba_query.id),
type=DATA_SOURCE_SNUBA_QUERY_SUBSCRIPTION,
)

detector_data_condition_group = self.create_data_condition_group(
organization=self.org,
)

detector = self.create_detector(
name="Test Detector",
type=MetricIssue.slug,
project=self.project,
config={"detection_type": AlertRuleDetectionType.STATIC.value},
workflow_condition_group=detector_data_condition_group,
)

data_source.detectors.add(detector)

with self.tasks():
translate_detector_and_update_subscription_in_snuba(snuba_query)
snuba_query.refresh_from_db()

assert snuba_query.extrapolation_mode == ExtrapolationMode.NONE.value

@with_feature("organizations:migrate-transaction-alerts-to-spans")
@patch("sentry.snuba.tasks._create_rpc_in_snuba")
def test_extrapolation_mode_count_if_performance_metrics(self, mock_create_rpc) -> None:
mock_create_rpc.return_value = "test-subscription-id"

snuba_query = create_snuba_query(
query_type=SnubaQuery.Type.PERFORMANCE,
dataset=Dataset.PerformanceMetrics,
query="",
aggregate="count_if(transaction.duration,greater,100)",
time_window=timedelta(minutes=10),
environment=None,
event_types=[SnubaQueryEventType.EventType.TRANSACTION],
resolution=timedelta(minutes=1),
)

create_snuba_subscription(
project=self.project,
subscription_type=INCIDENTS_SNUBA_SUBSCRIPTION_TYPE,
snuba_query=snuba_query,
)

data_source = self.create_data_source(
organization=self.org,
source_id=str(snuba_query.id),
type=DATA_SOURCE_SNUBA_QUERY_SUBSCRIPTION,
)

detector_data_condition_group = self.create_data_condition_group(
organization=self.org,
)

detector = self.create_detector(
name="Test Detector",
type=MetricIssue.slug,
project=self.project,
config={"detection_type": AlertRuleDetectionType.STATIC.value},
workflow_condition_group=detector_data_condition_group,
)

data_source.detectors.add(detector)

with self.tasks():
translate_detector_and_update_subscription_in_snuba(snuba_query)
snuba_query.refresh_from_db()

assert snuba_query.extrapolation_mode == ExtrapolationMode.SERVER_WEIGHTED.value

@with_feature("organizations:migrate-transaction-alerts-to-spans")
@patch("sentry.snuba.tasks._create_rpc_in_snuba")
def test_extrapolation_mode_count_if_transactions(self, mock_create_rpc) -> None:
mock_create_rpc.return_value = "test-subscription-id"

snuba_query = create_snuba_query(
query_type=SnubaQuery.Type.PERFORMANCE,
dataset=Dataset.Transactions,
query="",
aggregate="count_if(transaction.duration,greater,100)",
query="transaction.duration:>100",
aggregate="count()",
time_window=timedelta(minutes=10),
environment=None,
event_types=[SnubaQueryEventType.EventType.TRANSACTION],
resolution=timedelta(minutes=1),
)

create_snuba_subscription(
project=self.project,
subscription_type=INCIDENTS_SNUBA_SUBSCRIPTION_TYPE,
snuba_query=snuba_query,
)

data_source = self.create_data_source(
organization=self.org,
source_id=str(snuba_query.id),
type=DATA_SOURCE_SNUBA_QUERY_SUBSCRIPTION,
)

detector_data_condition_group = self.create_data_condition_group(
organization=self.org,
)
detector_data_condition_group = self.create_data_condition_group()

detector = self.create_detector(
name="Test Detector",
type=MetricIssue.slug,
project=self.project,
config={"detection_type": AlertRuleDetectionType.STATIC.value},
config={
"detection_type": AlertRuleDetectionType.STATIC.value,
},
workflow_condition_group=detector_data_condition_group,
)

data_source.detectors.add(detector)

with self.tasks():
translate_detector_and_update_subscription_in_snuba(snuba_query)
snuba_query.refresh_from_db()

assert snuba_query.extrapolation_mode == ExtrapolationMode.NONE.value

@with_feature("organizations:migrate-transaction-alerts-to-spans")
@patch("sentry.snuba.tasks._create_rpc_in_snuba")
def test_extrapolation_mode_p50_transactions(self, mock_create_rpc) -> None:
mock_create_rpc.return_value = "test-subscription-id"

snuba_query = create_snuba_query(
query_type=SnubaQuery.Type.PERFORMANCE,
dataset=Dataset.Transactions,
query="",
aggregate="p50(transaction.duration)",
time_window=timedelta(minutes=10),
environment=None,
event_types=[SnubaQueryEventType.EventType.TRANSACTION],
resolution=timedelta(minutes=1),
)

create_snuba_subscription(
project=self.project,
subscription_type=INCIDENTS_SNUBA_SUBSCRIPTION_TYPE,
snuba_query=snuba_query,
)

data_source = self.create_data_source(
organization=self.org,
source_id=str(snuba_query.id),
type=DATA_SOURCE_SNUBA_QUERY_SUBSCRIPTION,
)

detector_data_condition_group = self.create_data_condition_group(
organization=self.org,
)

detector = self.create_detector(
name="Test Detector",
type=MetricIssue.slug,
project=self.project,
config={"detection_type": AlertRuleDetectionType.STATIC.value},
workflow_condition_group=detector_data_condition_group,
)
data_source.detectors.set([detector])

data_source.detectors.add(detector)
original_dataset = snuba_query.dataset
snuba_query.dataset = Dataset.EventsAnalyticsPlatform.value
snuba_query.query_snapshot = {
"type": snuba_query.type,
"dataset": original_dataset,
"query": snuba_query.query,
"aggregate": snuba_query.aggregate,
"time_window": snuba_query.time_window,
"user_updated": True,
}
snuba_query.save()

with self.tasks():
translate_detector_and_update_subscription_in_snuba(snuba_query)
rollback_detector_query_and_update_subscription_in_snuba(snuba_query)
snuba_query.refresh_from_db()

assert snuba_query.extrapolation_mode == ExtrapolationMode.CLIENT_AND_SERVER_WEIGHTED.value
assert snuba_query.dataset == Dataset.EventsAnalyticsPlatform.value
44 changes: 44 additions & 0 deletions tests/sentry/incidents/endpoints/test_serializers.py
Original file line number Diff line number Diff line change
Expand Up @@ -931,6 +931,50 @@ def test_count_aggregate_not_converted_for_non_upsampled_projects(
# Verify the mock was called with correct project IDs
mock_are_any_projects_error_upsampled.assert_called_once_with([self.project.id])

def test_update_marks_query_as_user_updated_when_snapshot_exists(self):
alert_rule = self.create_alert_rule()

alert_rule.snuba_query.query_snapshot = {
"type": alert_rule.snuba_query.type,
"dataset": alert_rule.snuba_query.dataset,
"query": alert_rule.snuba_query.query,
"aggregate": alert_rule.snuba_query.aggregate,
}
alert_rule.snuba_query.save()

params = self.valid_params.copy()
params["name"] = "Updated Alert Name"

serializer = AlertRuleSerializer(
context=self.context, instance=alert_rule, data=params, partial=True
)
assert serializer.is_valid(), serializer.errors

updated_alert_rule = serializer.save()
updated_alert_rule.snuba_query.refresh_from_db()

assert updated_alert_rule.snuba_query.query_snapshot is not None
assert updated_alert_rule.snuba_query.query_snapshot.get("user_updated") is True

def test_update_does_not_mark_user_updated_when_no_snapshot(self):
alert_rule = self.create_alert_rule()

alert_rule.snuba_query.query_snapshot = None
alert_rule.snuba_query.save()

params = self.valid_params.copy()
params["name"] = "Updated Alert Name"

serializer = AlertRuleSerializer(
context=self.context, instance=alert_rule, data=params, partial=True
)
assert serializer.is_valid(), serializer.errors

updated_alert_rule = serializer.save()
updated_alert_rule.snuba_query.refresh_from_db()

assert updated_alert_rule.snuba_query.query_snapshot is None


class TestAlertRuleTriggerSerializer(TestAlertRuleSerializerBase):
@cached_property
Expand Down
Loading
Loading