Skip to content

Commit

Permalink
Merge branch 'master' into dav1d/tune-evicition
Browse files Browse the repository at this point in the history
  • Loading branch information
Dav1dde authored Nov 4, 2024
2 parents a63a9aa + 265f79c commit de4ff8e
Show file tree
Hide file tree
Showing 18 changed files with 526 additions and 243 deletions.
5 changes: 3 additions & 2 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,7 @@ hostname = "0.4.0"
human-size = "0.4.1"
http = "1.1.0"
hyper-util = { version = "0.1.7", features = ["tokio"] }
hyper = "1.5.0"
indexmap = "2.2.5"
insta = { version = "1.31.0", features = ["json", "redactions", "ron"] }
ipnetwork = "0.20.0"
Expand Down
63 changes: 28 additions & 35 deletions devservices/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,44 +4,37 @@ x-sentry-service-config:
service_name: relay
dependencies:
redis:
description: "redis"
description: Shared instance of redis used by sentry services
remote:
repo_name: sentry-shared-redis
branch: main
repo_link: git@github.com:getsentry/sentry-shared-redis.git
kafka:
description: "kafka"
description: Shared instance of kafka used by sentry services
remote:
repo_name: sentry-shared-kafka
branch: main
repo_link: git@github.com:getsentry/sentry-shared-kafka.git
relay:
description: Service that pushes some functionality from the Sentry SDKs as well as the Sentry server into a proxy process.
modes:
default: [redis, kafka]
containerized: [redis, kafka, relay]

services:
redis:
image: "redis:6.2.14-alpine"
healthcheck:
test: redis-cli ping
kafka:
image: "confluentinc/cp-kafka:7.6.1"
environment:
# https://docs.confluent.io/platform/current/installation/docker/config-reference.html#cp-kakfa-example
KAFKA_PROCESS_ROLES: "broker,controller"
KAFKA_CONTROLLER_QUORUM_VOTERS: "1001@127.0.0.1:29093"
KAFKA_CONTROLLER_LISTENER_NAMES: "CONTROLLER"
KAFKA_NODE_ID: "1001"
CLUSTER_ID: "MkU3OEVBNTcwNTJENDM2Qk"
KAFKA_LISTENERS: "PLAINTEXT://0.0.0.0:29092,INTERNAL://0.0.0.0:9093,EXTERNAL://0.0.0.0:9092,CONTROLLER://0.0.0.0:29093"
KAFKA_ADVERTISED_LISTENERS: "PLAINTEXT://127.0.0.1:29092,INTERNAL://kafka:9093,EXTERNAL://kafka:9092"
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: "PLAINTEXT:PLAINTEXT,INTERNAL:PLAINTEXT,EXTERNAL:PLAINTEXT,CONTROLLER:PLAINTEXT"
KAFKA_INTER_BROKER_LISTENER_NAME: "PLAINTEXT"
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: "1"
KAFKA_OFFSETS_TOPIC_NUM_PARTITIONS: "1"
KAFKA_LOG_RETENTION_HOURS: "24"
KAFKA_MESSAGE_MAX_BYTES: "50000000" #50MB or bust
KAFKA_MAX_REQUEST_SIZE: "50000000" #50MB on requests apparently too
CONFLUENT_SUPPORT_METRICS_ENABLE: "false"
KAFKA_LOG4J_LOGGERS: "kafka.cluster=WARN,kafka.controller=WARN,kafka.coordinator=WARN,kafka.log=WARN,kafka.server=WARN,state.change.logger=WARN"
KAFKA_LOG4J_ROOT_LOGLEVEL: "WARN"
KAFKA_TOOLS_LOG4J_LOGLEVEL: "WARN"
ulimits:
nofile:
soft: 4096
hard: 4096
relay:
image: us-central1-docker.pkg.dev/sentryio/relay/relay:nightly
ports:
- 7899:7899
command: [run, --config, /etc/relay]
volumes:
- "kafka:/var/lib/kafka/data"
volumes:
kafka:
- ./config/relay.yml:/etc/relay/config.yml
- ./config/devservices-credentials.json:/etc/relay/credentials.json
extra_hosts:
- host.docker.internal:host-gateway
networks:
- devservices

networks:
devservices:
name: devservices
5 changes: 5 additions & 0 deletions devservices/config/devservices-credentials.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
{
"secret_key": "OxE6Du8quMxWj19f7YDCpIxm6XyU9nWGQJkMWFlkchA",
"public_key": "SMSesqan65THCV6M4qs4kBzPai60LzuDn-xNsvYpuP8",
"id": "88888888-4444-4444-8444-cccccccccccc"
}
18 changes: 18 additions & 0 deletions devservices/config/relay.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
---
relay:
upstream: "http://host.docker.internal:8001/"
host: 0.0.0.0
port: 7899
logging:
level: INFO
enable_backtraces: false
limits:
shutdown_timeout: 0
processing:
enabled: true
kafka_config:
- { name: "bootstrap.servers", value: "kafka:9093" }
# The maximum attachment chunk size is 1MB. Together with some meta data,
# messages will never get larger than 2MB in total.
- { name: "message.max.bytes", value: 2097176 }
redis: redis://redis:6379
4 changes: 4 additions & 0 deletions gocd/templates/pipelines/pops.libsonnet
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,8 @@ local canary_region_pops = {
'us-pop-regional-4',
'us-pop-1',
'us-pop-2',
'us-pop-3',
'us-pop-4',
],
};

Expand All @@ -28,6 +30,8 @@ local region_pops = {
'us-pop-regional-4',
'us-pop-1',
'us-pop-2',
'us-pop-3',
'us-pop-4',
],
s4s: [],
};
Expand Down
1 change: 1 addition & 0 deletions relay-server/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -128,6 +128,7 @@ url = { workspace = true, features = ["serde"] }
uuid = { workspace = true, features = ["v5"] }
zstd = { workspace = true }
semver = { workspace = true }
hyper = { workspace = true }

[dev-dependencies]
criterion = { workspace = true }
Expand Down
19 changes: 17 additions & 2 deletions relay-server/src/endpoints/common.rs
Original file line number Diff line number Diff line change
Expand Up @@ -334,7 +334,11 @@ pub async fn handle_envelope(
state: &ServiceState,
envelope: Box<Envelope>,
) -> Result<Option<EventId>, BadStoreRequest> {
let client_name = envelope.meta().client_name().unwrap_or("proprietary");
let client_name = envelope
.meta()
.client_name()
.filter(|name| name.starts_with("sentry") || name.starts_with("raven"))
.unwrap_or("proprietary");
for item in envelope.items() {
metric!(
histogram(RelayHistograms::EnvelopeItemSize) = item.payload().len() as u64,
Expand Down Expand Up @@ -367,9 +371,20 @@ pub async fn handle_envelope(
return Ok(event_id);
}

let project_key = managed_envelope.envelope().meta().public_key();

// Prefetch sampling project key, current spooling implementations rely on this behavior.
//
// To be changed once spool v1 has been removed.
if let Some(sampling_project_key) = managed_envelope.envelope().sampling_key() {
if sampling_project_key != project_key {
state.project_cache_handle().fetch(sampling_project_key);
}
}

let checked = state
.project_cache_handle()
.get(managed_envelope.scoping().project_key)
.get(project_key)
.check_envelope(managed_envelope)
.map_err(BadStoreRequest::EventRejected)?;

Expand Down
Loading

0 comments on commit de4ff8e

Please sign in to comment.