Skip to content

Commit

Permalink
Fix mypy and fixed timeseries
Browse files Browse the repository at this point in the history
  • Loading branch information
aayush-se committed Nov 4, 2024
1 parent 5339865 commit 7ae6de5
Show file tree
Hide file tree
Showing 3 changed files with 16 additions and 7 deletions.
9 changes: 5 additions & 4 deletions src/seer/anomaly_detection/anomaly_detection.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,17 +147,16 @@ def _online_detect(
history_mp=anomalies.matrix_profile,
window_size=anomalies.window_size,
history_flags=anomalies.flags,
anomaly_algo_data=anomalies.get_anomaly_algo_data(len(historic.timeseries.timestamps))[
-5: # TODO: This slice should be based on the time period
],
anomaly_algo_data=anomalies.get_anomaly_algo_data(len(historic.timeseries.timestamps)),
)
streamed_anomalies = stream_detector.detect(
convert_external_ts_to_internal(ts_external), config
)

# Get current point's anomaly data and track original flag
curr_algo_data = streamed_anomalies.get_anomaly_algo_data(len(ts_external))[0]
curr_algo_data["original_flag"] = streamed_anomalies.original_flags[-1]
if streamed_anomalies.original_flags and curr_algo_data is not None:
curr_algo_data["original_flag"] = streamed_anomalies.original_flags[-1]

# Save new data point
alert_data_accessor.save_timepoint(
Expand Down Expand Up @@ -240,6 +239,8 @@ def _combo_detect(
history_values=historic.values,
history_mp=anomalies.matrix_profile,
window_size=anomalies.window_size,
history_flags=anomalies.flags,
anomaly_algo_data=anomalies.get_anomaly_algo_data(len(historic.timestamps)),
)
streamed_anomalies = stream_detector.detect(
convert_external_ts_to_internal(ts_external), config
Expand Down
11 changes: 10 additions & 1 deletion src/seer/anomaly_detection/detectors/anomaly_detectors.py
Original file line number Diff line number Diff line change
Expand Up @@ -155,6 +155,11 @@ class MPStreamAnomalyDetector(AnomalyDetector):
arbitrary_types_allowed=True,
)

stream_history_size: dict[int, int] = Field(
default={5: 19, 15: 11, 30: 7, 60: 5},
description="History size for stream smoothing based on the function smooth_size = floor(43 / sqrt(time_period))",
)

@inject
@sentry_sdk.trace
def detect(
Expand Down Expand Up @@ -215,9 +220,13 @@ def detect(
if flags_and_scores is None:
raise ServerError("Failed to score the matrix profile distance")

anomaly_algo_data_to_use = self.anomaly_algo_data[
self.stream_history_size[config.time_period]
]

# The original flags are the flags of the previous points
past_original_flags = [
algo_data["original_flag"] for algo_data in self.anomaly_algo_data
algo_data["original_flag"] for algo_data in anomaly_algo_data_to_use
]

self.anomaly_algo_data.append({"original_flag": flags_and_scores.flags[-1]})
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -149,5 +149,4 @@ def optimal_window_size(self, time_series: npt.NDArray[np.float64]) -> int:
sentry_sdk.set_tag(AnomalyDetectionTags.WINDOW_SEARCH_FAILED, 1)
return 3
sentry_sdk.set_tag(AnomalyDetectionTags.WINDOW_SEARCH_FAILED, 0)
# return window_size
return 10
return window_size

0 comments on commit 7ae6de5

Please sign in to comment.