diff --git a/src/sentry/api/endpoints/organization_events_anomalies.py b/src/sentry/api/endpoints/organization_events_anomalies.py index 33599c87d4a0f..ea1cc23c5c510 100644 --- a/src/sentry/api/endpoints/organization_events_anomalies.py +++ b/src/sentry/api/endpoints/organization_events_anomalies.py @@ -79,12 +79,16 @@ def post(self, request: Request, organization: Organization) -> Response: if project_id is None or not config or not historical_data or not current_data: return Response( - "Unable to get historical anomaly data: missing required argument(s) project, start, and/or end", + "Unable to get historical anomaly data: missing required argument(s) project_id, config, historical_data, and/or current_data", status=400, ) anomalies = get_historical_anomaly_data_from_seer_preview( - current_data, historical_data, project_id, config + current_data=current_data, + historical_data=historical_data, + project_id=project_id, + organization_id=organization.id, + config=config, ) # NOTE: returns None if there's a problem with the Seer response if anomalies is None: diff --git a/src/sentry/incidents/subscription_processor.py b/src/sentry/incidents/subscription_processor.py index a59b48007910f..ce77e69f07bb8 100644 --- a/src/sentry/incidents/subscription_processor.py +++ b/src/sentry/incidents/subscription_processor.py @@ -752,7 +752,7 @@ def get_anomaly_data_from_seer( "ad_config": anomaly_detection_config, "context": context, "response_data": response.data, - "reponse_code": response.status, + "response_code": response.status, }, ) return None @@ -766,7 +766,7 @@ def get_anomaly_data_from_seer( "ad_config": anomaly_detection_config, "context": context, "response_data": decoded_data, - "reponse_code": response.status, + "response_code": response.status, }, ) return None diff --git a/src/sentry/seer/anomaly_detection/get_historical_anomalies.py b/src/sentry/seer/anomaly_detection/get_historical_anomalies.py index 2d3fe4882f99b..3691c7758c9cc 100644 --- a/src/sentry/seer/anomaly_detection/get_historical_anomalies.py +++ b/src/sentry/seer/anomaly_detection/get_historical_anomalies.py @@ -1,5 +1,5 @@ import logging -from datetime import datetime +from datetime import datetime, timedelta from django.conf import settings from urllib3.exceptions import MaxRetryError, TimeoutError @@ -8,9 +8,13 @@ from sentry.incidents.models.alert_rule import AlertRule, AlertRuleStatus from sentry.models.project import Project from sentry.net.http import connection_from_url +from sentry.seer.anomaly_detection.store_data import _get_start_and_end_indices from sentry.seer.anomaly_detection.types import ( AnomalyDetectionConfig, DetectAnomaliesRequest, + DetectAnomaliesResponse, + DetectHistoricalAnomaliesContext, + DetectHistoricalAnomaliesRequest, TimeSeriesPoint, ) from sentry.seer.anomaly_detection.utils import ( @@ -32,29 +36,110 @@ ) +def handle_seer_error_responses(response, config, context, log_params): + def log_statement(log_level, text, extra_data=None): + log_data = {**log_params} + if extra_data: + log_data.update(**extra_data) + if log_level == "error": + logger.error(text, extra=log_data) + elif log_level == "warning": + logger.warning(text, extra=log_data) + + extra_response_data = {"response_data": response.data, "response_code": response.status} + if response.status > 400: + log_statement( + "error", "Error when hitting Seer detect anomalies endpoint", extra_response_data + ) + return True + + try: + decoded_data = response.data.decode("utf-8") + except AttributeError: + extra_data = {**log_params, **extra_response_data} + logger.exception("Failed to parse Seer anomaly detection response", extra=extra_data) + return True + + try: + results: DetectAnomaliesResponse = json.loads(decoded_data) + except JSONDecodeError: + extra_response_data["response_data"] = decoded_data + log_statement( + "exception", "Failed to parse Seer anomaly detection response", extra_response_data + ) + return True + + if not results.get("success"): + extra_data = {"message": results.get("message", "")} + log_statement("error", "Error when hitting Seer detect anomalies endpoint", extra_data) + return True + + if not results.get("timeseries"): + extra_data = { + "response_data": results.get("message"), + } + log_statement( + "warning", "Seer anomaly detection response returned no potential anomalies", extra_data + ) + return True + return False + + def get_historical_anomaly_data_from_seer_preview( current_data: list[TimeSeriesPoint], historical_data: list[TimeSeriesPoint], + organization_id: int, project_id: int, config: AnomalyDetectionConfig, ) -> list | None: """ Send current and historical timeseries data to Seer and return anomaly detection response on the current timeseries. - - Dummy function. TODO: write out the Seer request logic. + Used for rendering the preview charts of anomaly detection alert rules. """ - return [ - { - "anomaly": {"anomaly_score": -0.38810767243044786, "anomaly_type": "none"}, - "timestamp": 169, - "value": 0.048480431, - }, - { - "anomaly": {"anomaly_score": -0.3890542800124323, "anomaly_type": "none"}, - "timestamp": 170, - "value": 0.047910238, - }, - ] + # Check if historical data has at least seven days of data. Return early if not. + MIN_DAYS = 7 + data_start_index, data_end_index = _get_start_and_end_indices(historical_data) + if data_start_index == -1: + return [] + + data_start_time = datetime.fromtimestamp(historical_data[data_start_index]["timestamp"]) + data_end_time = datetime.fromtimestamp(historical_data[data_end_index]["timestamp"]) + if data_end_time - data_start_time < timedelta(days=MIN_DAYS): + return [] + + # Send data to Seer + context = DetectHistoricalAnomaliesContext( + history=historical_data, + current=current_data, + ) + body = DetectHistoricalAnomaliesRequest( + organization_id=organization_id, + project_id=project_id, + config=config, + context=context, + ) + extra_data = { + "organization_id": organization_id, + "project_id": project_id, + "config": config, + "context": context, + } + try: + response = make_signed_seer_api_request( + connection_pool=seer_anomaly_detection_connection_pool, + path=SEER_ANOMALY_DETECTION_ENDPOINT_URL, + body=json.dumps(body).encode("utf-8"), + ) + except (TimeoutError, MaxRetryError): + logger.warning("Timeout error when hitting anomaly detection endpoint", extra=extra_data) + return None + + error = handle_seer_error_responses(response, config, context, extra_data) + if error: + return None + + results: DetectAnomaliesResponse = json.loads(response.data.decode("utf-8")) + return results.get("timeseries") def get_historical_anomaly_data_from_seer( @@ -161,7 +246,7 @@ def get_historical_anomaly_data_from_seer( "ad_config": anomaly_detection_config, "context": formatted_data, "response_data": response.data, - "reponse_code": response.status, + "response_code": response.status, }, ) return None @@ -177,7 +262,7 @@ def get_historical_anomaly_data_from_seer( "ad_config": anomaly_detection_config, "context": formatted_data, "response_data": response.data, - "reponse_code": response.status, + "response_code": response.status, }, ) return None diff --git a/src/sentry/seer/anomaly_detection/store_data.py b/src/sentry/seer/anomaly_detection/store_data.py index 8db7880f6a4f2..5d86beac00dd9 100644 --- a/src/sentry/seer/anomaly_detection/store_data.py +++ b/src/sentry/seer/anomaly_detection/store_data.py @@ -139,7 +139,7 @@ def send_historical_data_to_seer(alert_rule: AlertRule, project: Project) -> Ale "ad_config": anomaly_detection_config, "alert": alert_rule.id, "response_data": response.data, - "reponse_code": response.status, + "response_code": response.status, }, ) raise AttributeError(data_format_error_string) @@ -154,7 +154,7 @@ def send_historical_data_to_seer(alert_rule: AlertRule, project: Project) -> Ale "ad_config": anomaly_detection_config, "alert": alert_rule.id, "response_data": response.data, - "reponse_code": response.status, + "response_code": response.status, "dataset": snuba_query.dataset, "meta": json.dumps(historical_data.data.get("meta", {}).get("fields", {})), }, diff --git a/src/sentry/seer/anomaly_detection/types.py b/src/sentry/seer/anomaly_detection/types.py index 8971fb40f03c3..af45ffef4b65a 100644 --- a/src/sentry/seer/anomaly_detection/types.py +++ b/src/sentry/seer/anomaly_detection/types.py @@ -45,6 +45,18 @@ class DetectAnomaliesRequest(TypedDict): context: AlertInSeer | list[TimeSeriesPoint] +class DetectHistoricalAnomaliesContext(TypedDict): + history: list[TimeSeriesPoint] + current: list[TimeSeriesPoint] + + +class DetectHistoricalAnomaliesRequest(TypedDict): + organization_id: int + project_id: int + config: AnomalyDetectionConfig + context: DetectHistoricalAnomaliesContext + + class DeleteAlertDataRequest(TypedDict): organization_id: int project_id: NotRequired[int] diff --git a/tests/sentry/api/endpoints/test_organization_events_anomalies.py b/tests/sentry/api/endpoints/test_organization_events_anomalies.py new file mode 100644 index 0000000000000..460d5df64c58d --- /dev/null +++ b/tests/sentry/api/endpoints/test_organization_events_anomalies.py @@ -0,0 +1,299 @@ +from datetime import timedelta +from unittest.mock import patch + +import orjson +from urllib3 import HTTPResponse +from urllib3.exceptions import TimeoutError + +from sentry.incidents.models.alert_rule import ( + AlertRuleSeasonality, + AlertRuleSensitivity, + AlertRuleThresholdType, +) +from sentry.seer.anomaly_detection.types import ( + Anomaly, + AnomalyDetectionConfig, + DetectAnomaliesResponse, + DetectHistoricalAnomaliesContext, + TimeSeriesPoint, +) +from sentry.seer.anomaly_detection.utils import translate_direction +from sentry.testutils.cases import APITestCase +from sentry.testutils.helpers.datetime import before_now, freeze_time, timestamp_format +from sentry.testutils.helpers.features import with_feature +from sentry.testutils.outbox import outbox_runner + + +@freeze_time() +class OrganizationEventsAnomaliesEndpointTest(APITestCase): + endpoint = "sentry-api-0-organization-events-anomalies" + + method = "post" + + four_weeks_ago = before_now(days=28).replace(hour=10, minute=0, second=0, microsecond=0) + one_week_ago = before_now(days=7) + + config = AnomalyDetectionConfig( + time_period=60, + sensitivity=AlertRuleSensitivity.LOW.value, + direction=translate_direction(AlertRuleThresholdType.ABOVE.value), + expected_seasonality=AlertRuleSeasonality.AUTO.value, + ) + historical_timestamp_1 = timestamp_format(four_weeks_ago) + historical_timestamp_2 = timestamp_format(four_weeks_ago + timedelta(days=10)) + current_timestamp_1 = timestamp_format(one_week_ago) + current_timestamp_2 = timestamp_format(one_week_ago + timedelta(minutes=10)) + data = { + "project_id": 1, + "config": config, + "historical_data": [ + [historical_timestamp_1, {"count": 5}], + [historical_timestamp_2, {"count": 7}], + ], + "current_data": [ + [current_timestamp_1, {"count": 2}], + [current_timestamp_2, {"count": 3}], + ], + } + + # for logging + context = DetectHistoricalAnomaliesContext( + history=[ + TimeSeriesPoint( + timestamp=historical_timestamp_1, + value=5, + ), + TimeSeriesPoint( + timestamp=historical_timestamp_2, + value=7, + ), + ], + current=[ + TimeSeriesPoint( + timestamp=current_timestamp_1, + value=2, + ), + TimeSeriesPoint( + timestamp=current_timestamp_2, + value=3, + ), + ], + ) + + @with_feature("organizations:anomaly-detection-alerts") + @with_feature("organizations:incidents") + @patch( + "sentry.seer.anomaly_detection.get_historical_anomalies.seer_anomaly_detection_connection_pool.urlopen" + ) + def test_simple(self, mock_seer_request): + self.create_team(organization=self.organization, members=[self.user]) + self.login_as(self.user) + + seer_return_value = DetectAnomaliesResponse( + success=True, + message="", + timeseries=[ + TimeSeriesPoint( + timestamp=self.current_timestamp_1, + value=2, + anomaly=Anomaly(anomaly_score=-0.1, anomaly_type="none"), + ), + TimeSeriesPoint( + timestamp=self.current_timestamp_2, + value=3, + anomaly=Anomaly(anomaly_score=-0.2, anomaly_type="none"), + ), + ], + ) + mock_seer_request.return_value = HTTPResponse(orjson.dumps(seer_return_value), status=200) + + with outbox_runner(): + resp = self.get_success_response( + self.organization.slug, status_code=200, raw_data=orjson.dumps(self.data) + ) + + assert mock_seer_request.call_count == 1 + assert resp.data == seer_return_value["timeseries"] + + @with_feature("organizations:anomaly-detection-alerts") + @with_feature("organizations:incidents") + @patch( + "sentry.seer.anomaly_detection.get_historical_anomalies.seer_anomaly_detection_connection_pool.urlopen" + ) + def test_not_enough_historical_data(self, mock_seer_request): + data = { + "project_id": 1, + "config": self.config, + "historical_data": [ + [self.historical_timestamp_1, {"count": 5}], + ], + "current_data": [ + [self.current_timestamp_1, {"count": 2}], + [self.current_timestamp_2, {"count": 3}], + ], + } + self.create_team(organization=self.organization, members=[self.user]) + self.login_as(self.user) + + with outbox_runner(): + resp = self.get_success_response( + self.organization.slug, status_code=200, raw_data=orjson.dumps(data) + ) + + # we return the empty list early + assert mock_seer_request.call_count == 0 + assert resp.data == [] + + @with_feature("organizations:anomaly-detection-alerts") + @with_feature("organizations:incidents") + @patch( + "sentry.seer.anomaly_detection.get_historical_anomalies.seer_anomaly_detection_connection_pool.urlopen" + ) + @patch("sentry.seer.anomaly_detection.get_historical_anomalies.logger") + def test_timeout_error(self, mock_logger, mock_seer_request): + mock_seer_request.side_effect = TimeoutError + self.create_team(organization=self.organization, members=[self.user]) + self.login_as(self.user) + + with outbox_runner(): + resp = self.get_error_response( + self.organization.slug, status_code=400, raw_data=orjson.dumps(self.data) + ) + + assert mock_seer_request.call_count == 1 + mock_logger.warning.assert_called_with( + "Timeout error when hitting anomaly detection endpoint", + extra={ + "organization_id": self.organization.id, + "project_id": 1, + "config": self.config, + "context": self.context, + }, + ) + assert resp.data == "Unable to get historical anomaly data" + + @with_feature("organizations:anomaly-detection-alerts") + @with_feature("organizations:incidents") + @patch( + "sentry.seer.anomaly_detection.get_historical_anomalies.seer_anomaly_detection_connection_pool.urlopen" + ) + @patch("sentry.seer.anomaly_detection.get_historical_anomalies.logger") + def test_attribute_error(self, mock_logger, mock_seer_request): + mock_seer_request.return_value = HTTPResponse(None, status=400) # type:ignore[arg-type] + self.create_team(organization=self.organization, members=[self.user]) + self.login_as(self.user) + + with outbox_runner(): + resp = self.get_error_response( + self.organization.slug, status_code=400, raw_data=orjson.dumps(self.data) + ) + + assert mock_seer_request.call_count == 1 + mock_logger.exception.assert_called_with( + "Failed to parse Seer anomaly detection response", + extra={ + "organization_id": self.organization.id, + "project_id": 1, + "config": self.config, + "context": self.context, + "response_data": None, + "response_code": 400, + }, + ) + assert resp.data == "Unable to get historical anomaly data" + + @with_feature("organizations:anomaly-detection-alerts") + @with_feature("organizations:incidents") + @patch( + "sentry.seer.anomaly_detection.get_historical_anomalies.seer_anomaly_detection_connection_pool.urlopen" + ) + @patch("sentry.seer.anomaly_detection.get_historical_anomalies.logger") + def test_seer_error(self, mock_logger, mock_seer_request): + mock_seer_request.return_value = HTTPResponse("Bad stuff", status=500) + self.create_team(organization=self.organization, members=[self.user]) + self.login_as(self.user) + + with outbox_runner(): + resp = self.get_error_response( + self.organization.slug, status_code=400, raw_data=orjson.dumps(self.data) + ) + + assert mock_seer_request.call_count == 1 + mock_logger.error.assert_called_with( + "Error when hitting Seer detect anomalies endpoint", + extra={ + "response_data": "Bad stuff", + "response_code": 500, + "organization_id": self.organization.id, + "project_id": 1, + "config": self.config, + "context": self.context, + }, + ) + assert resp.data == "Unable to get historical anomaly data" + + @with_feature("organizations:anomaly-detection-alerts") + @with_feature("organizations:incidents") + @patch( + "sentry.seer.anomaly_detection.get_historical_anomalies.seer_anomaly_detection_connection_pool.urlopen" + ) + @patch("sentry.seer.anomaly_detection.get_historical_anomalies.logger") + def test_seer_fail_response(self, mock_logger, mock_seer_request): + mock_seer_request.return_value = HTTPResponse( + orjson.dumps( + {"success": False, "message": "I have revolted against my human overlords"} + ), + status=200, + ) + self.create_team(organization=self.organization, members=[self.user]) + self.login_as(self.user) + + with outbox_runner(): + resp = self.get_error_response( + self.organization.slug, status_code=400, raw_data=orjson.dumps(self.data) + ) + + assert mock_seer_request.call_count == 1 + mock_logger.error.assert_called_with( + "Error when hitting Seer detect anomalies endpoint", + extra={ + "message": "I have revolted against my human overlords", + "organization_id": self.organization.id, + "project_id": 1, + "config": self.config, + "context": self.context, + }, + ) + assert resp.data == "Unable to get historical anomaly data" + + @with_feature("organizations:anomaly-detection-alerts") + @with_feature("organizations:incidents") + @patch( + "sentry.seer.anomaly_detection.get_historical_anomalies.seer_anomaly_detection_connection_pool.urlopen" + ) + @patch("sentry.seer.anomaly_detection.get_historical_anomalies.logger") + def test_seer_no_anomalies(self, mock_logger, mock_seer_request): + mock_seer_request.return_value = HTTPResponse( + orjson.dumps({"success": True, "message": "moo deng is cute", "timeseries": []}), + status=200, + ) + self.create_team(organization=self.organization, members=[self.user]) + self.login_as(self.user) + + with outbox_runner(): + resp = self.get_error_response( + self.organization.slug, status_code=400, raw_data=orjson.dumps(self.data) + ) + + assert mock_seer_request.call_count == 1 + mock_logger.warning.assert_called_with( + "Seer anomaly detection response returned no potential anomalies", + extra={ + "organization_id": self.organization.id, + "project_id": 1, + "response_data": "moo deng is cute", + "config": self.config, + "context": self.context, + }, + ) + assert resp.data == "Unable to get historical anomaly data" diff --git a/tests/sentry/api/endpoints/test_organization_historical_anomalies.py b/tests/sentry/api/endpoints/test_organization_historical_anomalies.py deleted file mode 100644 index b4a66cbfcb599..0000000000000 --- a/tests/sentry/api/endpoints/test_organization_historical_anomalies.py +++ /dev/null @@ -1,54 +0,0 @@ -import orjson - -from sentry.incidents.models.alert_rule import ( - AlertRuleSeasonality, - AlertRuleSensitivity, - AlertRuleThresholdType, -) -from sentry.seer.anomaly_detection.types import AnomalyDetectionConfig -from sentry.seer.anomaly_detection.utils import translate_direction -from sentry.testutils.cases import APITestCase -from sentry.testutils.helpers.features import with_feature -from sentry.testutils.outbox import outbox_runner - - -class OrganizationEventsAnomaliesEndpointTest(APITestCase): - endpoint = "sentry-api-0-organization-events-anomalies" - - method = "post" - - @with_feature("organizations:anomaly-detection-alerts") - @with_feature("organizations:incidents") - def test_simple(self): - self.create_team(organization=self.organization, members=[self.user]) - self.login_as(self.user) - config = AnomalyDetectionConfig( - time_period=60, - sensitivity=AlertRuleSensitivity.LOW, - direction=translate_direction(AlertRuleThresholdType.ABOVE.value), - expected_seasonality=AlertRuleSeasonality.AUTO, - ) - data = { - "project_id": 1, - "config": config, - "current_data": [[1, {"count": 0.077881957}], [2, {"count": 0.075652768}]], - "historical_data": [[169, {"count": 0.048480431}], [170, {"count": 0.047910238}]], - } - - with outbox_runner(): - resp = self.get_success_response( - self.organization.slug, status_code=200, raw_data=orjson.dumps(data) - ) - - assert resp.data == [ - { - "anomaly": {"anomaly_score": -0.38810767243044786, "anomaly_type": "none"}, - "timestamp": 169, - "value": 0.048480431, - }, - { - "anomaly": {"anomaly_score": -0.3890542800124323, "anomaly_type": "none"}, - "timestamp": 170, - "value": 0.047910238, - }, - ]