Skip to content

Commit

Permalink
ref(metrics): Drops support for tx metric extranction versions < 3
Browse files Browse the repository at this point in the history
  • Loading branch information
Dav1dde committed May 31, 2024
1 parent 703a2e1 commit d520124
Show file tree
Hide file tree
Showing 9 changed files with 59 additions and 216 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
- Limit metric name to 150 characters. ([#3628](https://github.com/getsentry/relay/pull/3628))
- Add validation of Kafka topics on startup. ([#3543](https://github.com/getsentry/relay/pull/3543))
- Send `attachment` data inline when possible. ([#3654](https://github.com/getsentry/relay/pull/3654))
- Drops support for transaction metrics extraction versions < 3. ([#3672](https://github.com/getsentry/relay/pull/3672))

## 24.5.0

Expand Down
11 changes: 5 additions & 6 deletions relay-dynamic-config/src/metrics.rs
Original file line number Diff line number Diff line change
Expand Up @@ -122,6 +122,9 @@ pub struct CustomMeasurementConfig {
/// - 6: Bugfix to make transaction metrics extraction apply globally defined tag mappings.
const TRANSACTION_EXTRACT_MAX_SUPPORTED_VERSION: u16 = 6;

/// Minimum supported version of metrics extraction from transaction.
const TRANSACTION_EXTRACT_MIN_SUPPORTED_VERSION: u16 = 3;

/// Deprecated. Defines whether URL transactions should be considered low cardinality.
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq)]
#[serde(rename_all = "camelCase")]
Expand Down Expand Up @@ -167,12 +170,8 @@ impl TransactionMetricsConfig {

/// Returns `true` if metrics extraction is enabled and compatible with this Relay.
pub fn is_enabled(&self) -> bool {
self.version > 0 && self.version <= TRANSACTION_EXTRACT_MAX_SUPPORTED_VERSION
}

/// Returns `true` if usage should be tracked through a dedicated metric.
pub fn usage_metric(&self) -> bool {
self.version >= 3
self.version >= TRANSACTION_EXTRACT_MIN_SUPPORTED_VERSION
&& self.version <= TRANSACTION_EXTRACT_MAX_SUPPORTED_VERSION
}
}

Expand Down
97 changes: 8 additions & 89 deletions relay-server/src/metrics/minimal.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@ use relay_metrics::{
BucketMetadata, CounterType, MetricName, MetricNamespace, MetricResourceIdentifier, MetricType,
};
use serde::de::IgnoredAny;
use serde::{de, Deserialize, Deserializer};
use serde::Deserialize;

use crate::metrics::{BucketSummary, ExtractionMode, TrackableBucket};
use crate::metrics::{BucketSummary, TrackableBucket};

/// Bucket which parses only the minimally required information to implement [`TrackableBucket`].
///
Expand All @@ -30,18 +30,16 @@ impl TrackableBucket for MinimalTrackableBucket {
self.value.ty()
}

fn summary(&self, mode: ExtractionMode) -> BucketSummary {
fn summary(&self) -> BucketSummary {
let mri = match MetricResourceIdentifier::parse(self.name()) {
Ok(mri) => mri,
Err(_) => return BucketSummary::default(),
};

match mri.namespace {
MetricNamespace::Transactions => {
let usage = matches!(mode, ExtractionMode::Usage);
let count = match self.value {
MinimalValue::Counter(c) if usage && mri.name == "usage" => c.to_f64() as usize,
MinimalValue::Distribution(d) if !usage && mri.name == "duration" => d.0,
MinimalValue::Counter(c) if mri.name == "usage" => c.to_f64() as usize,
_ => 0,
};
let has_profile = matches!(mri.name.as_ref(), "usage" | "duration")
Expand All @@ -67,7 +65,7 @@ enum MinimalValue {
#[serde(rename = "c")]
Counter(CounterType),
#[serde(rename = "d")]
Distribution(SeqCount),
Distribution(IgnoredAny),
#[serde(rename = "s")]
Set(IgnoredAny),
#[serde(rename = "g")]
Expand All @@ -91,60 +89,13 @@ struct Tags {
has_profile: Option<IgnoredAny>,
}

/// Deserializes only the count of a sequence ingoring all individual items.
#[derive(Clone, Copy, Debug, Default)]
struct SeqCount(usize);

impl<'de> Deserialize<'de> for SeqCount {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
struct Visitor;

impl<'a> de::Visitor<'a> for Visitor {
type Value = SeqCount;

fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
formatter.write_str("a sequence")
}

fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
where
A: de::SeqAccess<'a>,
{
let mut count = 0;
while seq.next_element::<IgnoredAny>()?.is_some() {
count += 1;
}

Ok(SeqCount(count))
}
}

deserializer.deserialize_seq(Visitor)
}
}

#[cfg(test)]
mod tests {
use insta::assert_debug_snapshot;
use relay_metrics::Bucket;

use super::*;

#[test]
fn test_seq_count() {
let SeqCount(s) = serde_json::from_str("[1, 2, 3, 4, 5]").unwrap();
assert_eq!(s, 5);

let SeqCount(s) = serde_json::from_str("[1, 2, \"mixed\", 4, 5]").unwrap();
assert_eq!(s, 5);

let SeqCount(s) = serde_json::from_str("[]").unwrap();
assert_eq!(s, 0);
}

#[test]
fn test_buckets() {
let json = r#"[
Expand Down Expand Up @@ -214,44 +165,12 @@ mod tests {

for (b, mb) in buckets.iter().zip(min_buckets.iter()) {
assert_eq!(b.name(), mb.name());
assert_eq!(
b.summary(ExtractionMode::Usage),
mb.summary(ExtractionMode::Usage)
);
assert_eq!(
b.summary(ExtractionMode::Duration),
mb.summary(ExtractionMode::Duration)
);
assert_eq!(b.summary(), mb.summary());
assert_eq!(b.metadata, mb.metadata);
}

let duration = min_buckets
.iter()
.map(|b| b.summary(ExtractionMode::Duration))
.collect::<Vec<_>>();
let usage = min_buckets
.iter()
.map(|b| b.summary(ExtractionMode::Usage))
.collect::<Vec<_>>();

assert_debug_snapshot!(duration, @r###"
[
Transactions {
count: 4,
has_profile: true,
},
Transactions {
count: 0,
has_profile: false,
},
Spans(
3,
),
None,
None,
]
"###);
assert_debug_snapshot!(usage, @r###"
let summary = min_buckets.iter().map(|b| b.summary()).collect::<Vec<_>>();
assert_debug_snapshot!(summary, @r###"
[
Transactions {
count: 0,
Expand Down
57 changes: 12 additions & 45 deletions relay-server/src/metrics/outcomes.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,33 +7,13 @@ use relay_quotas::{DataCategory, Scoping};
use relay_system::Addr;

use crate::envelope::SourceQuantities;
use crate::metrics::{ExtractionMode, MetricStats};
use crate::metrics::MetricStats;
use crate::services::outcome::{Outcome, TrackOutcome};
#[cfg(feature = "processing")]
use relay_cardinality::{CardinalityLimit, CardinalityReport};

pub const PROFILE_TAG: &str = "has_profile";

/// Indicates where quantities should be taken from.
pub enum Quantities {
/// Calculates the quantities from the passed buckets using [`ExtractionMode`].
FromBuckets(ExtractionMode),
/// Uses the provided [`SourceQuantities`].
Value(SourceQuantities),
}

impl From<ExtractionMode> for Quantities {
fn from(value: ExtractionMode) -> Self {
Self::FromBuckets(value)
}
}

impl From<SourceQuantities> for Quantities {
fn from(value: SourceQuantities) -> Self {
Self::Value(value)
}
}

/// [`MetricOutcomes`] takes care of creating the right outcomes for metrics at the end of their
/// lifecycle.
///
Expand All @@ -55,13 +35,7 @@ impl MetricOutcomes {
}

/// Tracks an outcome for a list of buckets and generates the necessary outcomes.
pub fn track(
&self,
scoping: Scoping,
buckets: &[impl TrackableBucket],
quantities: impl Into<Quantities>,
outcome: Outcome,
) {
pub fn track(&self, scoping: Scoping, buckets: &[impl TrackableBucket], outcome: Outcome) {
let timestamp = Utc::now();

// Never emit accepted outcomes for surrogate metrics.
Expand All @@ -72,10 +46,7 @@ impl MetricOutcomes {
spans,
profiles,
buckets,
} = match quantities.into() {
Quantities::FromBuckets(mode) => extract_quantities(buckets, mode),
Quantities::Value(source) => source,
};
} = extract_quantities(buckets);

let categories = [
(DataCategory::Transaction, transactions as u32),
Expand Down Expand Up @@ -151,7 +122,7 @@ pub trait TrackableBucket {
/// of datapoints contained in the bucket.
///
/// Additionally tracks whether the transactions also contained profiling information.
fn summary(&self, mode: ExtractionMode) -> BucketSummary;
fn summary(&self) -> BucketSummary;

/// Metric bucket metadata.
fn metadata(&self) -> BucketMetadata;
Expand All @@ -166,8 +137,8 @@ impl<T: TrackableBucket> TrackableBucket for &T {
(**self).ty()
}

fn summary(&self, mode: ExtractionMode) -> BucketSummary {
(**self).summary(mode)
fn summary(&self) -> BucketSummary {
(**self).summary()
}

fn metadata(&self) -> BucketMetadata {
Expand All @@ -184,8 +155,8 @@ impl TrackableBucket for Bucket {
self.value.ty()
}

fn summary(&self, mode: ExtractionMode) -> BucketSummary {
BucketView::new(self).summary(mode)
fn summary(&self) -> BucketSummary {
BucketView::new(self).summary()
}

fn metadata(&self) -> BucketMetadata {
Expand All @@ -202,20 +173,16 @@ impl TrackableBucket for BucketView<'_> {
self.ty()
}

fn summary(&self, mode: ExtractionMode) -> BucketSummary {
fn summary(&self) -> BucketSummary {
let mri = match MetricResourceIdentifier::parse(self.name()) {
Ok(mri) => mri,
Err(_) => return BucketSummary::default(),
};

match mri.namespace {
MetricNamespace::Transactions => {
let usage = matches!(mode, ExtractionMode::Usage);
let count = match self.value() {
BucketViewValue::Counter(c) if usage && mri.name == "usage" => {
c.to_f64() as usize
}
BucketViewValue::Distribution(d) if !usage && mri.name == "duration" => d.len(),
BucketViewValue::Counter(c) if mri.name == "usage" => c.to_f64() as usize,
_ => 0,
};
let has_profile = matches!(mri.name.as_ref(), "usage" | "duration")
Expand All @@ -239,7 +206,7 @@ impl TrackableBucket for BucketView<'_> {
}

/// Extracts quota information from a list of metric buckets.
pub fn extract_quantities<I, T>(buckets: I, mode: ExtractionMode) -> SourceQuantities
pub fn extract_quantities<I, T>(buckets: I) -> SourceQuantities
where
I: IntoIterator<Item = T>,
T: TrackableBucket,
Expand All @@ -248,7 +215,7 @@ where

for bucket in buckets {
quantities.buckets += 1;
let summary = bucket.summary(mode);
let summary = bucket.summary();
match summary {
BucketSummary::Transactions { count, has_profile } => {
quantities.transactions += count;
Expand Down
19 changes: 2 additions & 17 deletions relay-server/src/metrics/rate_limits.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,6 @@ pub struct MetricsLimiter<Q: AsRef<Vec<Quota>> = Vec<Quota>> {
/// A list of aggregated metric buckets with some counters.
buckets: Vec<SummarizedBucket>,

/// Mode used to extract transaction and span counts.
mode: ExtractionMode,

/// The quotas set on the current project.
quotas: Q,

Expand All @@ -30,15 +27,6 @@ pub struct MetricsLimiter<Q: AsRef<Vec<Quota>> = Vec<Quota>> {
counts: EntityCounts,
}

/// Whether to extract transaction and profile count based on the usage or duration metric.
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum ExtractionMode {
/// Use the usage count metric.
Usage,
/// Use the duration distribution metric.
Duration,
}

fn to_counts(summary: &BucketSummary) -> EntityCounts {
match *summary {
BucketSummary::Transactions { count, has_profile } => EntityCounts {
Expand Down Expand Up @@ -120,12 +108,11 @@ impl<Q: AsRef<Vec<Quota>>> MetricsLimiter<Q> {
buckets: impl IntoIterator<Item = Bucket>,
quotas: Q,
scoping: Scoping,
mode: ExtractionMode,
) -> Result<Self, Vec<Bucket>> {
let buckets: Vec<_> = buckets
.into_iter()
.map(|bucket| {
let summary = bucket.summary(mode);
let summary = bucket.summary();
SummarizedBucket { bucket, summary }
})
.collect();
Expand All @@ -138,7 +125,6 @@ impl<Q: AsRef<Vec<Quota>>> MetricsLimiter<Q> {
if let Some(counts) = total_counts {
Ok(Self {
buckets,
mode,
quotas,
scoping,
counts,
Expand Down Expand Up @@ -194,7 +180,7 @@ impl<Q: AsRef<Vec<Quota>>> MetricsLimiter<Q> {
});
self.buckets = buckets;

metric_outcomes.track(self.scoping, &dropped, self.mode, outcome);
metric_outcomes.track(self.scoping, &dropped, outcome);
}

fn drop_profiles(
Expand Down Expand Up @@ -321,7 +307,6 @@ mod tests {
project_key: ProjectKey::parse("a94ae32be2584e0bbd7a4cbb95971fee").unwrap(),
key_id: None,
},
ExtractionMode::Usage,
)
.unwrap();

Expand Down
Loading

0 comments on commit d520124

Please sign in to comment.