Skip to content

Commit

Permalink
Merge branch 'master' into aliu/feedback-attachment-counter
Browse files Browse the repository at this point in the history
  • Loading branch information
aliu39 authored Jun 4, 2024
2 parents 2bb39e7 + 5c5c55e commit 78f83a6
Show file tree
Hide file tree
Showing 32 changed files with 588 additions and 503 deletions.
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
- Apply globally defined metric tags to legacy transaction metrics. ([#3615](https://github.com/getsentry/relay/pull/3615))
- Limit the maximum size of spans in an transaction to 800 kib. ([#3645](https://github.com/getsentry/relay/pull/3645))
- Scrub identifiers in spans with `op:db` and `db_system:redis`. ([#3642](https://github.com/getsentry/relay/pull/3642))
- Stop trimming important span fields by marking them `trim = "false"`. ([#3670](https://github.com/getsentry/relay/pull/3670))

**Features**:

Expand All @@ -23,6 +24,8 @@
- Limit metric name to 150 characters. ([#3628](https://github.com/getsentry/relay/pull/3628))
- Add validation of Kafka topics on startup. ([#3543](https://github.com/getsentry/relay/pull/3543))
- Send `attachment` data inline when possible. ([#3654](https://github.com/getsentry/relay/pull/3654))
- Drops support for transaction metrics extraction versions < 3. ([#3672](https://github.com/getsentry/relay/pull/3672))
- Move partitioning into the `Aggregator` and add a new `Partition` bucket shift mode. ([#3661](https://github.com/getsentry/relay/pull/3661))

## 24.5.0

Expand Down
10 changes: 3 additions & 7 deletions relay-config/src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ use relay_kafka::{
ConfigError as KafkaConfigError, KafkaConfigParam, KafkaParams, KafkaTopic, TopicAssignment,
TopicAssignments,
};
use relay_metrics::aggregator::{AggregatorConfig, ShiftKey};
use relay_metrics::aggregator::{AggregatorConfig, FlushBatching};
use relay_metrics::{AggregatorServiceConfig, MetricNamespace, ScopedAggregatorConfig};
use relay_redis::RedisConfig;
use serde::de::{DeserializeOwned, Unexpected, Visitor};
Expand Down Expand Up @@ -2276,11 +2276,6 @@ impl Config {
self.values.processing.attachment_chunk_size.as_bytes()
}

/// Amount of metric partitions.
pub fn metrics_partitions(&self) -> Option<u64> {
self.values.aggregator.flush_partitions
}

/// Maximum metrics batch size in bytes.
pub fn metrics_max_batch_size_bytes(&self) -> usize {
self.values.aggregator.max_flush_bytes
Expand Down Expand Up @@ -2398,7 +2393,8 @@ impl Config {
max_project_key_bucket_bytes,
initial_delay: 30,
debounce_delay: 10,
shift_key: ShiftKey::Project,
flush_partitions: None,
flush_batching: FlushBatching::Project,
}
}

Expand Down
13 changes: 6 additions & 7 deletions relay-dynamic-config/src/metrics.rs
Original file line number Diff line number Diff line change
Expand Up @@ -122,6 +122,9 @@ pub struct CustomMeasurementConfig {
/// - 6: Bugfix to make transaction metrics extraction apply globally defined tag mappings.
const TRANSACTION_EXTRACT_MAX_SUPPORTED_VERSION: u16 = 6;

/// Minimum supported version of metrics extraction from transaction.
const TRANSACTION_EXTRACT_MIN_SUPPORTED_VERSION: u16 = 3;

/// Deprecated. Defines whether URL transactions should be considered low cardinality.
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq)]
#[serde(rename_all = "camelCase")]
Expand Down Expand Up @@ -160,19 +163,15 @@ impl TransactionMetricsConfig {
/// Creates an enabled configuration with empty defaults.
pub fn new() -> Self {
Self {
version: 1,
version: TRANSACTION_EXTRACT_MAX_SUPPORTED_VERSION,
..Self::default()
}
}

/// Returns `true` if metrics extraction is enabled and compatible with this Relay.
pub fn is_enabled(&self) -> bool {
self.version > 0 && self.version <= TRANSACTION_EXTRACT_MAX_SUPPORTED_VERSION
}

/// Returns `true` if usage should be tracked through a dedicated metric.
pub fn usage_metric(&self) -> bool {
self.version >= 3
self.version >= TRANSACTION_EXTRACT_MIN_SUPPORTED_VERSION
&& self.version <= TRANSACTION_EXTRACT_MAX_SUPPORTED_VERSION
}
}

Expand Down
21 changes: 21 additions & 0 deletions relay-event-derive/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -323,6 +323,7 @@ struct FieldAttrs {
max_chars_allowance: Option<TokenStream>,
max_depth: Option<TokenStream>,
max_bytes: Option<TokenStream>,
trim: Option<bool>,
}

impl FieldAttrs {
Expand Down Expand Up @@ -366,6 +367,14 @@ impl FieldAttrs {
quote!(crate::processor::Pii::False)
};

let trim = if let Some(trim) = self.trim {
quote!(#trim)
} else if let Some(ref parent_attrs) = inherit_from_field_attrs {
quote!(#parent_attrs.trim)
} else {
quote!(true)
};

let retain = self.retain;

let max_chars = if let Some(ref max_chars) = self.max_chars {
Expand Down Expand Up @@ -421,6 +430,7 @@ impl FieldAttrs {
max_bytes: #max_bytes,
pii: #pii,
retain: #retain,
trim: #trim,
}
})
}
Expand Down Expand Up @@ -595,6 +605,17 @@ fn parse_field_attributes(
panic!("Got non string literal for retain");
}
}
} else if ident == "trim" {
match name_value.lit {
Lit::Str(litstr) => match litstr.value().as_str() {
"true" => rv.trim = None,
"false" => rv.trim = Some(false),
other => panic!("Unknown value {other}"),
},
_ => {
panic!("Got non string literal for trim");
}
}
} else if ident == "legacy_alias" || ident == "skip_serialization" {
// Skip
} else {
Expand Down
6 changes: 3 additions & 3 deletions relay-event-normalization/src/normalize/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1706,14 +1706,14 @@ mod tests {
timestamp: ~,
start_timestamp: ~,
exclusive_time: ~,
description: ~,
op: "app_start_cold",
span_id: ~,
parent_span_id: ~,
trace_id: ~,
segment_id: ~,
is_segment: ~,
status: ~,
description: ~,
tags: ~,
origin: ~,
profile_id: ~,
Expand Down Expand Up @@ -1751,14 +1751,14 @@ mod tests {
timestamp: ~,
start_timestamp: ~,
exclusive_time: ~,
description: ~,
op: "app.start.cold",
span_id: ~,
parent_span_id: ~,
trace_id: ~,
segment_id: ~,
is_segment: ~,
status: ~,
description: ~,
tags: ~,
origin: ~,
profile_id: ~,
Expand Down Expand Up @@ -1796,14 +1796,14 @@ mod tests {
timestamp: ~,
start_timestamp: ~,
exclusive_time: ~,
description: ~,
op: "app.start.warm",
span_id: ~,
parent_span_id: ~,
trace_id: ~,
segment_id: ~,
is_segment: ~,
status: ~,
description: ~,
tags: ~,
origin: ~,
profile_id: ~,
Expand Down
151 changes: 139 additions & 12 deletions relay-event-normalization/src/trimming.rs
Original file line number Diff line number Diff line change
Expand Up @@ -74,15 +74,16 @@ impl Processor for TrimmingProcessor {
});
}

if self.remaining_size() == Some(0) {
// TODO: Create remarks (ensure they do not bloat event)
return Err(ProcessingAction::DeleteValueHard);
}
if self.remaining_depth(state) == Some(0) {
// TODO: Create remarks (ensure they do not bloat event)
return Err(ProcessingAction::DeleteValueHard);
if state.attrs().trim {
if self.remaining_size() == Some(0) {
// TODO: Create remarks (ensure they do not bloat event)
return Err(ProcessingAction::DeleteValueHard);
}
if self.remaining_depth(state) == Some(0) {
// TODO: Create remarks (ensure they do not bloat event)
return Err(ProcessingAction::DeleteValueHard);
}
}

Ok(())
}

Expand Down Expand Up @@ -131,6 +132,10 @@ impl Processor for TrimmingProcessor {
trim_string(value, meta, max_chars, state.attrs().max_chars_allowance);
}

if !state.attrs().trim {
return Ok(());
}

if let Some(size_state) = self.size_state.last() {
if let Some(size_remaining) = size_state.size_remaining {
trim_string(value, meta, size_remaining, 0);
Expand All @@ -149,6 +154,10 @@ impl Processor for TrimmingProcessor {
where
T: ProcessValue,
{
if !state.attrs().trim {
return Ok(());
}

// If we need to check the bag size, then we go down a different path
if !self.size_state.is_empty() {
let original_length = value.len();
Expand All @@ -159,7 +168,7 @@ impl Processor for TrimmingProcessor {

let mut split_index = None;
for (index, item) in value.iter_mut().enumerate() {
if self.remaining_size().unwrap() == 0 {
if self.remaining_size() == Some(0) {
split_index = Some(index);
break;
}
Expand Down Expand Up @@ -191,6 +200,10 @@ impl Processor for TrimmingProcessor {
where
T: ProcessValue,
{
if !state.attrs().trim {
return Ok(());
}

// If we need to check the bag size, then we go down a different path
if !self.size_state.is_empty() {
let original_length = value.len();
Expand All @@ -201,7 +214,7 @@ impl Processor for TrimmingProcessor {

let mut split_key = None;
for (key, item) in value.iter_mut() {
if self.remaining_size().unwrap() == 0 {
if self.remaining_size() == Some(0) {
split_key = Some(key.to_owned());
break;
}
Expand Down Expand Up @@ -230,6 +243,10 @@ impl Processor for TrimmingProcessor {
_meta: &mut Meta,
state: &ProcessingState<'_>,
) -> ProcessingResult {
if !state.attrs().trim {
return Ok(());
}

match value {
Value::Array(_) | Value::Object(_) => {
if self.remaining_depth(state) == Some(1) {
Expand All @@ -252,6 +269,10 @@ impl Processor for TrimmingProcessor {
_meta: &mut Meta,
state: &ProcessingState<'_>,
) -> ProcessingResult {
if !state.attrs().trim {
return Ok(());
}

processor::apply(&mut stacktrace.frames, |frames, meta| {
enforce_frame_hard_limit(frames, meta, 250);
Ok(())
Expand Down Expand Up @@ -393,9 +414,10 @@ mod tests {
use std::iter::repeat;

use relay_event_schema::protocol::{
Breadcrumb, Context, Contexts, Event, Exception, ExtraValue, Span, TagEntry, Tags, Values,
Breadcrumb, Context, Contexts, Event, Exception, ExtraValue, Span, SpanId, TagEntry, Tags,
TraceId, Values,
};
use relay_protocol::{Map, Remark, SerializableAnnotated};
use relay_protocol::{get_value, Map, Remark, SerializableAnnotated};
use similar_asserts::assert_eq;

use crate::MaxChars;
Expand Down Expand Up @@ -930,4 +952,109 @@ mod tests {

assert_eq!(event.0.unwrap().spans.0.unwrap().len(), 8);
}

#[test]
fn test_too_many_spans_trimmed_trace_id() {
let original_description = "a".repeat(819163);
let original_trace_id = TraceId("b".repeat(48));
let mut event = Annotated::new(Event {
spans: Annotated::new(vec![
Span {
description: original_description.clone().into(),
..Default::default()
}
.into(),
Span {
trace_id: original_trace_id.clone().into(),
..Default::default()
}
.into(),
]),
..Default::default()
});

let mut processor = TrimmingProcessor::new();
processor::process_value(&mut event, &mut processor, ProcessingState::root()).unwrap();

assert_eq!(
get_value!(event.spans[0].description!),
&original_description
);
// Trace ID would be trimmed without `trim = "false"`
assert_eq!(get_value!(event.spans[1].trace_id!), &original_trace_id);
}

#[test]
fn test_too_many_spans_trimmed_trace_id_drop() {
let original_description = "a".repeat(819163);
let original_span_id = SpanId("b".repeat(48));
let original_trace_id = TraceId("c".repeat(48));
let original_segment_id = SpanId("d".repeat(48));
let original_op = "e".repeat(129);

let mut event = Annotated::new(Event {
spans: Annotated::new(vec![
Span {
description: original_description.clone().into(),
..Default::default()
}
.into(),
Span {
span_id: original_span_id.clone().into(),
trace_id: original_trace_id.clone().into(),
segment_id: original_segment_id.clone().into(),
is_segment: false.into(),
op: original_op.clone().into(),
..Default::default()
}
.into(),
]),
..Default::default()
});

let mut processor = TrimmingProcessor::new();
processor::process_value(&mut event, &mut processor, ProcessingState::root()).unwrap();

assert_eq!(
get_value!(event.spans[0].description!),
&original_description
);
// These fields would be dropped without `trim = "false"`
assert_eq!(get_value!(event.spans[1].span_id!), &original_span_id);
assert_eq!(get_value!(event.spans[1].trace_id!), &original_trace_id);
assert_eq!(get_value!(event.spans[1].segment_id!), &original_segment_id);
assert_eq!(get_value!(event.spans[1].is_segment!), &false);

// span.op is trimmed to its max_chars, but not dropped:
assert_eq!(get_value!(event.spans[1].op!).len(), 128);
}

#[test]
fn test_trim_false_contributes_to_budget() {
for span_id in ["short", "looooooooooooooooooooooooooong"] {
let original_span_id = SpanId(span_id.to_owned());
let original_description = "a".repeat(900000);

let mut event = Annotated::new(Event {
spans: Annotated::new(vec![Span {
span_id: original_span_id.clone().into(),
description: original_description.clone().into(),
..Default::default()
}
.into()]),
..Default::default()
});

let mut processor = TrimmingProcessor::new();
processor::process_value(&mut event, &mut processor, ProcessingState::root()).unwrap();

assert_eq!(get_value!(event.spans[0].span_id!).as_ref(), span_id);

// The amount of trimming on the description depends on the length of the span id.
assert_eq!(
get_value!(event.spans[0].description!).len(),
1024 * 800 - 12 - span_id.len(),
);
}
}
}
Loading

0 comments on commit 78f83a6

Please sign in to comment.