From e7a084c7fc08e1d0d2c82d5297d982c2232efcb0 Mon Sep 17 00:00:00 2001 From: Zach Collins Date: Tue, 17 Sep 2024 11:16:10 -0700 Subject: [PATCH] Adding ability to push self hoisting staging docker compose serice (#1157) Creates a self hoisting docker-compose image and enables command to push that to a target sandbox for staging deployment. --- .env.example | 1 + .pre-commit-config.yaml | 1 - Compose.Dockerfile | 16 ++ Dockerfile | 20 ++- Makefile | 18 ++ api_old.json | 343 ------------------------------------- docker-compose.staging.yml | 10 ++ src/seer/bootup.py | 2 +- src/seer/configuration.py | 7 +- 9 files changed, 59 insertions(+), 359 deletions(-) create mode 100644 Compose.Dockerfile delete mode 100644 api_old.json create mode 100644 docker-compose.staging.yml diff --git a/.env.example b/.env.example index 6bf12e097..c4eee39b4 100644 --- a/.env.example +++ b/.env.example @@ -14,3 +14,4 @@ NO_SENTRY_INTEGRATION=... # Set this to 1 in develop mode to ignore Local Sentry NO_REAL_MODELS=... # Set this to 1 in development to ignore real models and use stubs RPC_SHARED_SECRET="seers-also-very-long-value-haha" # Match with SEER_RPC_SHARED_SECRET=[""] in sentry.conf.py +SBX_PROJECT=eng-dev-sbx--XXX # If using push-image and https://github.com/getsentry/terraform-sandboxes.private diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ba3597bc9..5900cc785 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -55,7 +55,6 @@ repos: - id: check-executables-have-shebangs - id: check-merge-conflict - id: check-symlinks - - id: check-yaml - id: end-of-file-fixer - id: trailing-whitespace - id: debug-statements diff --git a/Compose.Dockerfile b/Compose.Dockerfile new file mode 100644 index 000000000..da2d89623 --- /dev/null +++ b/Compose.Dockerfile @@ -0,0 +1,16 @@ +FROM docker:cli + +ARG SBX_PROJECT +ENV SBX_PROJECT=${SBX_PROJECT} + +RUN mkdir /app +WORKDIR /app + +COPY .env /app/ +COPY docker-compose.yml /app/ +COPY docker-compose.staging.yml /app/ +# Can't reset these values with overlay unfortunately +RUN grep -v 'context: .' /app/docker-compose.yml | grep -v 'build:' > /app/docker-compose.yml.2 +RUN mv /app/docker-compose.yml.2 /app/docker-compose.yml + +ENTRYPOINT ["/usr/local/bin/docker", "compose", "-f", "/app/docker-compose.yml", "-f", "docker-compose.staging.yml"] diff --git a/Dockerfile b/Dockerfile index c8565c87b..0c37cd727 100644 --- a/Dockerfile +++ b/Dockerfile @@ -31,18 +31,17 @@ RUN update-alternatives --install /usr/bin/python python /usr/bin/python3.11 1 & RUN ln -s /usr/bin/python /usr/local/bin/python && \ ln -s /usr/bin/python3 /usr/local/bin/python3 -# Copy model files (assuming they are in the 'models' directory) -COPY models/ models/ - -# Copy setup files, requirements, and scripts -COPY setup.py requirements.txt celeryworker.sh celerybeat.sh gunicorn.sh ./ - -RUN chmod +x ./celeryworker.sh ./celerybeat.sh ./gunicorn.sh - # Install dependencies +COPY setup.py requirements.txt ./ RUN pip install --upgrade pip==24.0 RUN pip install -r requirements.txt --no-cache-dir +# Copy model files (assuming they are in the 'models' directory) +COPY models/ models/ +# Copy scripts +COPY celeryworker.sh celerybeat.sh gunicorn.sh ./ +RUN chmod +x ./celeryworker.sh ./celerybeat.sh ./gunicorn.sh + # Copy source code COPY src/ src/ COPY pyproject.toml . @@ -55,8 +54,11 @@ COPY supervisord.conf /etc/supervisord.conf RUN pip install --default-timeout=120 -e . --no-cache-dir --no-deps ENV FLASK_APP=src.seer.app:start_app() -# Set in cloudbuild.yaml for production images + +# Supports sentry releases ARG SEER_VERSION_SHA ENV SEER_VERSION_SHA ${SEER_VERSION_SHA} +ARG SENTRY_ENVIRONMENT=production +ENV SENTRY_ENVIRONMENT ${SENTRY_ENVIRONMENT} CMD ["/usr/bin/supervisord", "-c", "/etc/supervisord.conf"] diff --git a/Makefile b/Makefile index 49214a5d3..abad390f0 100644 --- a/Makefile +++ b/Makefile @@ -89,3 +89,21 @@ gocd: ## Build GoCD pipelines # Convert JSON to yaml cd ./gocd/generated-pipelines && find . -type f \( -name '*.yaml' \) -print0 | xargs -n 1 -0 yq -p json -o yaml -i .PHONY: gocd + +HEAD_SHA:=$(shell git rev-parse --short HEAD) +TIME:=$(shell date +%F.%T) +SEER_STAGING_VERSION_SHA:=$(HEAD_SHA).$(TIME) +export SENTRY_ORG:=sentry +export SENTRY_PROJECT:=seer +push-staging: + # Ensure the google authentication helper is working. If this fails, https://cloud.google.com/artifact-registry/docs/docker/authentication#gcloud-helper + gcloud auth configure-docker us-west1-docker.pkg.dev > /dev/null + # Setup your SBX_PROJECT in .env from the sandbox project name + docker build . --platform linux/amd64 --build-arg SEER_VERSION_SHA=$(SEER_STAGING_VERSION_SHA) --build-arg SENTRY_ENVIRONMENT=staging -t us-west1-docker.pkg.dev/$(SBX_PROJECT)/staging/seer + docker build . --platform linux/amd64 -f Compose.Dockerfile --build-arg SBX_PROJECT=$(SBX_PROJECT) -t us-west1-docker.pkg.dev/$(SBX_PROJECT)/staging/seer.compose + docker push us-west1-docker.pkg.dev/$(SBX_PROJECT)/staging/seer + docker push us-west1-docker.pkg.dev/$(SBX_PROJECT)/staging/seer.compose + sentry-cli releases new "${SEER_STAGING_VERSION_SHA}" + sentry-cli releases deploys "${SEER_STAGING_VERSION_SHA}" new -e staging + sentry-cli releases finalize "${SEER_STAGING_VERSION_SHA}" + sentry-cli releases set-commits "${SEER_STAGING_VERSION_SHA}" --auto || true diff --git a/api_old.json b/api_old.json deleted file mode 100644 index f4e1150b5..000000000 --- a/api_old.json +++ /dev/null @@ -1,343 +0,0 @@ -{ - "openapi": "3.1.0", - "info": { - "title": "Sentry Inference APIs", - "version": "0.0.1" - }, - "servers": [ - { - "url": "http://seer" - } - ], - "paths": { - "/v0/issues/severity-score": { - "post": { - "tags": [], - "operationId": "severity_endpoint", - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/SeverityRequest" - } - } - }, - "required": true - }, - "responses": { - "200": { - "description": "Success", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/SeverityResponse" - } - } - } - } - }, - "deprecated": false - } - }, - "/trends/breakpoint-detector": { - "post": { - "tags": [], - "operationId": "breakpoint_trends_endpoint", - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/BreakpointRequest" - } - } - }, - "required": true - }, - "responses": { - "200": { - "description": "Success", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/BreakpointResponse" - } - } - } - } - }, - "deprecated": false - } - } - }, - "components": { - "schemas": { - "BreakpointEntry": { - "properties": { - "project": { - "type": "string", - "title": "Project" - }, - "transaction": { - "type": "string", - "title": "Transaction" - }, - "aggregate_range_1": { - "type": "number", - "title": "Aggregate Range 1" - }, - "aggregate_range_2": { - "type": "number", - "title": "Aggregate Range 2" - }, - "unweighted_t_value": { - "type": "number", - "title": "Unweighted T Value" - }, - "unweighted_p_value": { - "type": "number", - "title": "Unweighted P Value" - }, - "trend_percentage": { - "type": "number", - "title": "Trend Percentage" - }, - "absolute_percentage_change": { - "type": "number", - "title": "Absolute Percentage Change" - }, - "trend_difference": { - "type": "number", - "title": "Trend Difference" - }, - "breakpoint": { - "type": "integer", - "title": "Breakpoint" - }, - "request_start": { - "type": "integer", - "title": "Request Start" - }, - "request_end": { - "type": "integer", - "title": "Request End" - }, - "data_start": { - "type": "integer", - "title": "Data Start" - }, - "data_end": { - "type": "integer", - "title": "Data End" - }, - "change": { - "anyOf": [ - { - "const": "improvement" - }, - { - "const": "regression" - } - ], - "title": "Change" - } - }, - "type": "object", - "required": [ - "project", - "transaction", - "aggregate_range_1", - "aggregate_range_2", - "unweighted_t_value", - "unweighted_p_value", - "trend_percentage", - "absolute_percentage_change", - "trend_difference", - "breakpoint", - "request_start", - "request_end", - "data_start", - "data_end", - "change" - ], - "title": "BreakpointEntry" - }, - "BreakpointRequest": { - "properties": { - "data": { - "additionalProperties": { - "$ref": "#/components/schemas/BreakpointTransaction" - }, - "type": "object", - "title": "Data" - }, - "sort": { - "type": "string", - "title": "Sort", - "default": "" - }, - "allow_midpoint": { - "type": "string", - "title": "Allow Midpoint", - "default": "1" - }, - "validate_tail_hours": { - "type": "integer", - "title": "Validate Tail Hours", - "default": 0 - }, - "trend_percentage()": { - "type": "number", - "title": "Trend Percentage()", - "default": 0.1 - }, - "min_change()": { - "type": "number", - "title": "Min Change()", - "default": 0.0 - } - }, - "type": "object", - "required": [ - "data" - ], - "title": "BreakpointRequest" - }, - "BreakpointResponse": { - "properties": { - "data": { - "items": { - "$ref": "#/components/schemas/BreakpointEntry" - }, - "type": "array", - "title": "Data" - } - }, - "type": "object", - "required": [ - "data" - ], - "title": "BreakpointResponse" - }, - "BreakpointTransaction": { - "properties": { - "data": { - "items": { - "prefixItems": [ - { - "type": "integer" - }, - { - "prefixItems": [ - { - "$ref": "#/components/schemas/SnubaMetadata" - } - ], - "type": "array", - "maxItems": 1, - "minItems": 1 - } - ], - "type": "array", - "maxItems": 2, - "minItems": 2 - }, - "type": "array", - "title": "Data" - }, - "request_start": { - "type": "integer", - "title": "Request Start" - }, - "request_end": { - "type": "integer", - "title": "Request End" - }, - "data_start": { - "type": "integer", - "title": "Data Start" - }, - "data_end": { - "type": "integer", - "title": "Data End" - } - }, - "type": "object", - "required": [ - "data", - "request_start", - "request_end", - "data_start", - "data_end" - ], - "title": "BreakpointTransaction" - }, - "SeverityRequest": { - "properties": { - "message": { - "type": "string", - "title": "Message", - "default": "" - }, - "has_stacktrace": { - "type": "integer", - "title": "Has Stacktrace", - "default": 0 - }, - "handled": { - "type": "boolean", - "title": "Handled", - "default": false - }, - "trigger_timeout": { - "anyOf": [ - { - "type": "boolean" - }, - { - "type": "null" - } - ], - "title": "Trigger Timeout" - }, - "trigger_error": { - "anyOf": [ - { - "type": "boolean" - }, - { - "type": "null" - } - ], - "title": "Trigger Error" - } - }, - "type": "object", - "title": "SeverityRequest" - }, - "SeverityResponse": { - "properties": { - "severity": { - "type": "number", - "title": "Severity", - "default": 0.0 - } - }, - "type": "object", - "title": "SeverityResponse" - }, - "SnubaMetadata": { - "properties": { - "count": { - "type": "integer", - "title": "Count" - } - }, - "type": "object", - "required": [ - "count" - ], - "title": "SnubaMetadata" - } - } - } -} diff --git a/docker-compose.staging.yml b/docker-compose.staging.yml new file mode 100644 index 000000000..788080d9a --- /dev/null +++ b/docker-compose.staging.yml @@ -0,0 +1,10 @@ +services: + app: + image: us-west1-docker.pkg.dev/${SBX_PROJECT}/staging/seer + volumes: !reset [] + command: + - bash + - -c + - | + flask db upgrade + exec /usr/bin/supervisord -c /etc/supervisord.conf diff --git a/src/seer/bootup.py b/src/seer/bootup.py index f3d11f717..e7254eafb 100644 --- a/src/seer/bootup.py +++ b/src/seer/bootup.py @@ -62,7 +62,7 @@ def before_send(event: Event, hint: dict) -> Event | None: traces_sample_rate=1.0, send_default_pii=True, release=config.SEER_VERSION_SHA, - environment="production", + environment=config.SENTRY_ENVIRONMENT, before_send=before_send, ) diff --git a/src/seer/configuration.py b/src/seer/configuration.py index 4ca5fe8f4..2de47b58f 100644 --- a/src/seer/configuration.py +++ b/src/seer/configuration.py @@ -40,7 +40,9 @@ def as_absolute_path(path: str) -> str: class AppConfig(BaseModel): SEER_VERSION_SHA: str = "" + SENTRY_DSN: str = "" + SENTRY_ENVIRONMENT: str = "production" DATABASE_URL: str CELERY_BROKER_URL: str @@ -80,11 +82,6 @@ def has_sentry_integration(self) -> bool: return not self.NO_SENTRY_INTEGRATION def do_validation(self): - if not self.IGNORE_API_AUTH: - assert ( - self.JSON_API_SHARED_SECRETS or self.API_PUBLIC_KEY_SECRET_ID - ), "JSON_API_SHARED_SECRETS or API_PUBLIC_KEY_SECRET_ID required if IGNORE_API_AUTH is false!" - if self.is_production: # TODO: Set and uncomment this # assert (