-
Notifications
You must be signed in to change notification settings - Fork 7
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Add setup for local development within repo (#26)
The commit adds local development capability by adding files for building a local docker image, manage by docker-compose and adds some make targets to run the docker-compose commands with ease.
- Loading branch information
1 parent
46e3050
commit 8cdf25d
Showing
4 changed files
with
265 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -29,3 +29,7 @@ ENV/ | |
# test reports | ||
test-report/ | ||
coverage.xml | ||
|
||
# dev | ||
dev/logs | ||
dev/dags |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,54 @@ | ||
.PHONY: dev logs stop clean build build-emr_eks_container_example_dag-image build-aws build-google-cloud build-run docs | ||
.PHONY: restart restart-all run-tests run-static-checks run-mypy run-local-lineage-server test-rc-deps shell help | ||
|
||
ASTRO_PROVIDER_VERSION ?= "dev" | ||
|
||
# If the first argument is "run"... | ||
ifeq (run-mypy,$(firstword $(MAKECMDGOALS))) | ||
# use the rest as arguments for "run" | ||
RUN_ARGS := $(wordlist 2,$(words $(MAKECMDGOALS)),$(MAKECMDGOALS)) | ||
ifndef RUN_ARGS | ||
RUN_ARGS := . | ||
endif | ||
# ...and turn them into do-nothing targets | ||
$(eval $(RUN_ARGS):;@:) | ||
endif | ||
|
||
ASTRO_RUNTIME_IMAGE_NAME = "quay.io/astronomer/astro-runtime:8.2.0-base" | ||
|
||
dev: ## Create a development Environment using `docker compose` file. | ||
IMAGE_NAME=$(ASTRO_RUNTIME_IMAGE_NAME) docker compose -f dev/docker-compose.yaml up -d | ||
|
||
logs: ## View logs of the all the containers | ||
docker compose -f dev/docker-compose.yaml logs --follow | ||
|
||
stop: ## Stop all the containers | ||
docker compose -f dev/docker-compose.yaml down | ||
|
||
clean: ## Remove all the containers along with volumes | ||
docker compose -f dev/docker-compose.yaml down --volumes --remove-orphans | ||
rm -rf dev/logs | ||
|
||
build: ## Build the Docker image (ignoring cache) | ||
docker build --build-arg IMAGE_NAME=$(ASTRO_RUNTIME_IMAGE_NAME) -f dev/Dockerfile . -t airflow-provider-fivetran-async-dev:latest --no-cache | ||
|
||
build-run: ## Build the Docker Image & then run the containers | ||
IMAGE_NAME=$(ASTRO_RUNTIME_IMAGE_NAME) docker compose -f dev/docker-compose.yaml up --build -d | ||
|
||
restart: ## Restart Triggerer, Scheduler and Worker containers | ||
docker compose -f dev/docker-compose.yaml restart airflow-triggerer airflow-scheduler airflow-worker | ||
|
||
restart-all: ## Restart all the containers | ||
docker compose -f dev/docker-compose.yaml restart | ||
|
||
run-tests: ## Run CI tests | ||
docker build --build-arg IMAGE_NAME=$(ASTRO_RUNTIME_IMAGE_NAME) -f dev/Dockerfile . -t airflow-provider-fivetran-async-dev | ||
docker run -v `pwd`:/usr/local/airflow/airflow_provider_fivetran_async -v `pwd`/dev/.cache:/home/astro/.cache \ | ||
-w /usr/local/airflow/airflow_provider_fivetran_async \ | ||
--rm -it airflow-provider-fivetran-async-dev -- pytest --cov astronomer --cov-report=term-missing tests | ||
|
||
shell: ## Runs a shell within a container (Allows interactive session) | ||
docker compose -f dev/docker-compose.yaml run --rm airflow-scheduler bash | ||
|
||
help: ## Prints this message | ||
@grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-41s\033[0m %s\n", $$1, $$2}' |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,18 @@ | ||
ARG IMAGE_NAME | ||
FROM ${IMAGE_NAME} | ||
|
||
USER root | ||
RUN apt-get update -y && apt-get install -y git | ||
RUN apt-get install -y --no-install-recommends \ | ||
build-essential \ | ||
libsasl2-2 \ | ||
libsasl2-dev \ | ||
libsasl2-modules | ||
|
||
COPY setup.cfg ${AIRFLOW_HOME}/airflow_provider_fivetran_async/setup.cfg | ||
COPY pyproject.toml ${AIRFLOW_HOME}/airflow_provider_fivetran_async/pyproject.toml | ||
|
||
COPY fivetran_provider_async/__init__.py ${AIRFLOW_HOME}/airflow_provider_fivetran_async/fivetran_provider_async/__init__.py | ||
|
||
RUN pip install -e "${AIRFLOW_HOME}/airflow_provider_fivetran_async[all,tests]" | ||
USER astro |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,189 @@ | ||
--- | ||
version: '3' | ||
x-airflow-common: | ||
&airflow-common | ||
image: airflow-provider-fivetran-async-dev | ||
build: | ||
context: .. | ||
dockerfile: dev/Dockerfile | ||
args: | ||
- IMAGE_NAME | ||
environment: | ||
&airflow-common-env | ||
DB_BACKEND: postgres | ||
AIRFLOW__CORE__EXECUTOR: CeleryExecutor | ||
AIRFLOW__DATABASE__SQL_ALCHEMY_CONN: postgresql+psycopg2://airflow:airflow@postgres:5432/airflow | ||
AIRFLOW__CELERY__RESULT_BACKEND: db+postgresql://airflow:airflow@postgres:5432/airflow | ||
AIRFLOW__CELERY__BROKER_URL: redis://:@redis:6379/0 | ||
AIRFLOW__CORE__FERNET_KEY: '' | ||
AIRFLOW__CORE__LOAD_EXAMPLES: "False" | ||
AIRFLOW__WEBSERVER__EXPOSE_CONFIG: "True" | ||
AIRFLOW__SCHEDULER__DAG_DIR_LIST_INTERVAL: "5" | ||
# AIRFLOW__SECRETS__BACKEND: "airflow.providers.amazon.aws.secrets.systems_manager.SystemsManagerParameterStoreBackend" | ||
# AIRFLOW__SECRETS__BACKEND_KWARGS: '{"connections_prefix": "airflow/connections", "variables_prefix": null, "config_prefix": null, "profile_name": "default" }' | ||
ASTRONOMER_ENVIRONMENT: local | ||
|
||
volumes: | ||
- ./dags:/usr/local/airflow/dags | ||
- ./logs:/usr/local/airflow/logs | ||
- ./plugins:/usr/local/airflow/plugins | ||
- ../../airflow-provider-fivetran-async:/usr/local/airflow/airflow_provider_fivetran_async | ||
# - ~/.aws/config:/home/astro/.aws/config | ||
depends_on: | ||
&airflow-common-depends-on | ||
redis: | ||
condition: service_healthy | ||
postgres: | ||
condition: service_healthy | ||
|
||
services: | ||
postgres: | ||
image: postgres:13 | ||
environment: | ||
POSTGRES_USER: airflow | ||
POSTGRES_PASSWORD: airflow | ||
POSTGRES_DB: airflow | ||
command: postgres -c 'idle_in_transaction_session_timeout=60000' # 1 minute timeout | ||
volumes: | ||
- postgres-db-volume:/var/lib/postgresql/data | ||
ports: | ||
- "5432:5432" | ||
healthcheck: | ||
test: ["CMD", "pg_isready", "-U", "airflow"] | ||
interval: 5s | ||
retries: 5 | ||
restart: always | ||
|
||
redis: | ||
image: redis:latest | ||
expose: | ||
- 6379 | ||
healthcheck: | ||
test: ["CMD", "redis-cli", "ping"] | ||
interval: 5s | ||
timeout: 30s | ||
retries: 50 | ||
restart: always | ||
|
||
airflow-webserver: | ||
<<: *airflow-common | ||
command: airflow webserver | ||
ports: | ||
- 8080:8080 | ||
healthcheck: | ||
test: ["CMD", "curl", "--fail", "http://localhost:8080/health"] | ||
interval: 10s | ||
timeout: 10s | ||
retries: 5 | ||
restart: always | ||
depends_on: | ||
<<: *airflow-common-depends-on | ||
airflow-init: | ||
condition: service_completed_successfully | ||
|
||
airflow-scheduler: | ||
<<: *airflow-common | ||
command: airflow scheduler | ||
healthcheck: | ||
test: ["CMD-SHELL", 'airflow jobs check --job-type SchedulerJob --hostname "$${HOSTNAME}"'] | ||
interval: 10s | ||
timeout: 10s | ||
retries: 5 | ||
restart: always | ||
depends_on: | ||
<<: *airflow-common-depends-on | ||
airflow-init: | ||
condition: service_completed_successfully | ||
|
||
airflow-worker: | ||
<<: *airflow-common | ||
command: airflow celery worker | ||
healthcheck: | ||
test: | ||
- "CMD-SHELL" | ||
- 'celery --app airflow.executors.celery_executor.app inspect ping -d "celery@$${HOSTNAME}"' | ||
interval: 10s | ||
timeout: 10s | ||
retries: 5 | ||
environment: | ||
<<: *airflow-common-env | ||
restart: always | ||
depends_on: | ||
<<: *airflow-common-depends-on | ||
airflow-init: | ||
condition: service_completed_successfully | ||
|
||
airflow-triggerer: | ||
<<: *airflow-common | ||
command: airflow triggerer | ||
healthcheck: | ||
test: ["CMD-SHELL", 'airflow jobs check --job-type TriggererJob --hostname "$${HOSTNAME}"'] | ||
interval: 10s | ||
timeout: 10s | ||
retries: 5 | ||
restart: always | ||
depends_on: | ||
<<: *airflow-common-depends-on | ||
airflow-init: | ||
condition: service_completed_successfully | ||
|
||
airflow-init: | ||
<<: *airflow-common | ||
entrypoint: /bin/bash | ||
# yamllint disable rule:line-length | ||
command: | ||
- -c | ||
- | | ||
one_meg=1048576 | ||
mem_available=$$(($$(getconf _PHYS_PAGES) * $$(getconf PAGE_SIZE) / one_meg)) | ||
cpus_available=$$(grep -cE 'cpu[0-9]+' /proc/stat) | ||
disk_available=$$(df / | tail -1 | awk '{print $$4}') | ||
warning_resources="false" | ||
if (( mem_available < 4000 )) ; then | ||
echo | ||
echo -e "\033[1;33mWARNING!!!: Not enough memory available for Docker.\e[0m" | ||
echo "At least 4GB of memory required. You have $$(numfmt --to iec $$((mem_available * one_meg)))" | ||
echo | ||
warning_resources="true" | ||
fi | ||
if (( cpus_available < 2 )); then | ||
echo | ||
echo -e "\033[1;33mWARNING!!!: Not enough CPUS available for Docker.\e[0m" | ||
echo "At least 2 CPUs recommended. You have $${cpus_available}" | ||
echo | ||
fi | ||
if (( disk_available < one_meg * 10 )); then | ||
echo | ||
echo -e "\033[1;33mWARNING!!!: Not enough Disk space available for Docker.\e[0m" | ||
echo "At least 10 GBs recommended. You have $$(numfmt --to iec $$((disk_available * 1024 )))" | ||
echo | ||
fi | ||
exec /entrypoint bash -c " | ||
airflow db upgrade && \ | ||
airflow users create -r Admin -u admin -e admin -f admin -l admin -p admin && \ | ||
airflow connections import /usr/local/airflow/airflow_provider_fivetran_async/dev/connections.yaml || true && \ | ||
airflow version" | ||
# yamllint enable rule:line-length | ||
environment: | ||
<<: *airflow-common-env | ||
|
||
flower: | ||
<<: *airflow-common | ||
command: airflow celery flower | ||
ports: | ||
- 5555:5555 | ||
healthcheck: | ||
test: ["CMD", "curl", "--fail", "http://localhost:5555/"] | ||
interval: 10s | ||
timeout: 10s | ||
retries: 5 | ||
environment: | ||
<<: *airflow-common-env | ||
restart: always | ||
depends_on: | ||
<<: *airflow-common-depends-on | ||
airflow-init: | ||
condition: service_completed_successfully | ||
|
||
volumes: | ||
postgres-db-volume: |