Skip to content

Commit

Permalink
more integration tests
Browse files Browse the repository at this point in the history
  • Loading branch information
Dav1dde committed Jun 4, 2024
1 parent a1db23a commit ad01ebe
Show file tree
Hide file tree
Showing 4 changed files with 38 additions and 41 deletions.
8 changes: 1 addition & 7 deletions relay-server/src/metrics_extraction/transactions/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -394,18 +394,12 @@ impl TransactionExtractor<'_> {
// Duration
let duration = relay_common::time::chrono_to_positive_millis(end - start);
if let Some(duration) = FiniteF64::new(duration) {
let has_profile = if self.config.version >= 3 {
false
} else {
self.has_profile
};

metrics.project_metrics.push(
TransactionMetric::Duration {
unit: DurationUnit::MilliSecond,
value: duration,
tags: TransactionDurationTags {
has_profile,
has_profile: false,
universal_tags: tags.clone(),
},
}
Expand Down
6 changes: 5 additions & 1 deletion tests/integration/test_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -803,7 +803,7 @@ def test_transaction_metrics(
)

if extract_metrics == "corrupted":
config["transactionMetrics"] = (TRANSACTION_EXTRACT_MAX_SUPPORTED_VERSION + 1,)
config["transactionMetrics"] = TRANSACTION_EXTRACT_MAX_SUPPORTED_VERSION + 1

elif extract_metrics:
config["transactionMetrics"] = {
Expand Down Expand Up @@ -1171,6 +1171,10 @@ def test_transaction_metrics_not_extracted_on_unsupported_version(
assert tx["transaction"] == "/organizations/:orgId/performance/:eventSlug/"
tx_consumer.assert_empty()

if unsupported_version < TRANSACTION_EXTRACT_MIN_SUPPORTED_VERSION:
error = str(mini_sentry.test_failures.pop(0))
assert "Processing Relay outdated" in error

metrics_consumer.assert_empty()


Expand Down
21 changes: 10 additions & 11 deletions tests/integration/test_outcome.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,10 @@
from datetime import UTC, datetime, timedelta, timezone
from pathlib import Path
from queue import Empty
from .consts import TRANSACTION_EXTRACT_MIN_SUPPORTED_VERSION
from .consts import (
TRANSACTION_EXTRACT_MIN_SUPPORTED_VERSION,
TRANSACTION_EXTRACT_MAX_SUPPORTED_VERSION,
)

import pytest
import requests
Expand Down Expand Up @@ -1158,7 +1161,7 @@ def test_profile_outcomes(

project_config.setdefault("features", []).append("organizations:profiling")
project_config["transactionMetrics"] = {
"version": TRANSACTION_EXTRACT_MIN_SUPPORTED_VERSION,
"version": TRANSACTION_EXTRACT_MAX_SUPPORTED_VERSION,
}
project_config["sampling"] = {
"version": 2,
Expand Down Expand Up @@ -1279,10 +1282,10 @@ def make_envelope(transaction_name):
metrics = [
m
for m, _ in metrics_consumer.get_metrics()
if m["name"] == "d:transactions/duration@millisecond"
if m["name"] == "c:transactions/usage@none"
]
assert len(metrics) == 2
assert all(metric["tags"]["has_profile"] == "true" for metric in metrics)
assert sum(metric["value"] for metric in metrics) == 2

assert outcomes == expected_outcomes, outcomes

Expand Down Expand Up @@ -1392,7 +1395,7 @@ def test_profile_outcomes_too_many(

project_config.setdefault("features", []).append("organizations:profiling")
project_config["transactionMetrics"] = {
"version": TRANSACTION_EXTRACT_MIN_SUPPORTED_VERSION,
"version": TRANSACTION_EXTRACT_MAX_SUPPORTED_VERSION,
}

config = {
Expand Down Expand Up @@ -1457,9 +1460,7 @@ def make_envelope():

# Make sure one profile will not be counted as accepted
metrics = metrics_by_name(metrics_consumer, 4)
assert (
metrics["d:transactions/duration@millisecond"]["tags"]["has_profile"] == "true"
)
assert "has_profile" not in metrics["d:transactions/duration@millisecond"]["tags"]
assert metrics["c:transactions/usage@none"]["tags"]["has_profile"] == "true"


Expand Down Expand Up @@ -1542,9 +1543,7 @@ def make_envelope():

# Because invalid data is detected _after_ metrics extraction, there is still a metric:
metrics = metrics_by_name(metrics_consumer, 4)
assert (
metrics["d:transactions/duration@millisecond"]["tags"]["has_profile"] == "true"
)
assert "has_profile" not in metrics["d:transactions/duration@millisecond"]["tags"]
assert metrics["c:transactions/usage@none"]["tags"]["has_profile"] == "true"


Expand Down
44 changes: 22 additions & 22 deletions tests/integration/test_store.py
Original file line number Diff line number Diff line change
Expand Up @@ -608,7 +608,7 @@ def send_buckets(buckets):
assert len(produced_buckets) == metric_bucket_limit


@pytest.mark.parametrize("violating_bucket", [[4.0, 5.0], [4.0, 5.0, 6.0]])
@pytest.mark.parametrize("violating_bucket", [2, 3])
def test_rate_limit_metrics_buckets(
mini_sentry,
relay_with_processing,
Expand Down Expand Up @@ -695,7 +695,7 @@ def send_buckets(buckets):
send_buckets(
[
# Duration metric, subtract 3 from quota
make_bucket("d:transactions/duration@millisecond", "d", [1, 2, 3]),
make_bucket("c:transactions/usage@none", "c", 3),
],
)
send_buckets(
Expand All @@ -706,9 +706,9 @@ def send_buckets(buckets):
)
send_buckets(
[
# Duration metric, subtract from quota. This bucket is still accepted, but the rest
# Usage metric, subtract from quota. This bucket is still accepted, but the rest
# will be exceeded.
make_bucket("d:transactions/duration@millisecond", "d", violating_bucket),
make_bucket("c:transactions/usage@none", "c", violating_bucket),
],
)
send_buckets(
Expand All @@ -719,8 +719,8 @@ def send_buckets(buckets):
)
send_buckets(
[
# Another three for duration, won't make it into kafka.
make_bucket("d:transactions/duration@millisecond", "d", [7, 8, 9]),
# Another three for usage, won't make it into kafka.
make_bucket("c:transactions/usage@none", "c", 3),
# Session metrics are still accepted.
make_bucket("d:sessions/session@user", "s", [1254]),
],
Expand All @@ -736,49 +736,49 @@ def send_buckets(buckets):

assert produced_buckets == [
{
"name": "d:sessions/duration@second",
"name": "c:transactions/usage@none",
"org_id": 1,
"project_id": 42,
"retention_days": 90,
"project_id": 42,
"tags": {},
"type": "d",
"value": [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0],
"type": "c",
"value": violating_bucket,
},
{
"name": "d:sessions/session@none",
"name": "c:transactions/usage@none",
"org_id": 1,
"retention_days": 90,
"project_id": 42,
"tags": {},
"type": "c",
"value": 1.0,
"value": 3,
},
{
"name": "d:sessions/session@user",
"name": "d:sessions/duration@second",
"org_id": 1,
"retention_days": 90,
"project_id": 42,
"retention_days": 90,
"tags": {},
"type": "s",
"value": [1254],
"type": "d",
"value": [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0],
},
{
"name": "d:transactions/duration@millisecond",
"name": "d:sessions/session@none",
"org_id": 1,
"retention_days": 90,
"project_id": 42,
"tags": {},
"type": "d",
"value": [1.0, 2.0, 3.0],
"type": "c",
"value": 1.0,
},
{
"name": "d:transactions/duration@millisecond",
"name": "d:sessions/session@user",
"org_id": 1,
"retention_days": 90,
"project_id": 42,
"tags": {},
"type": "d",
"value": violating_bucket,
"type": "s",
"value": [1254],
},
{
"name": "d:transactions/measurements.lcp@millisecond",
Expand Down

0 comments on commit ad01ebe

Please sign in to comment.