-
Notifications
You must be signed in to change notification settings - Fork 0
/
config.py
154 lines (136 loc) · 5.5 KB
/
config.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
"""
Global Config
"""
from os import getenv, path
import logging
import sqlalchemy as sa
from celery import Celery
def get_db_uri(
dbname: str,
username_env="AUTOPKG_POSTGRES_USER",
password_env="AUTOPKG_POSTGRES_PASSWORD",
host_env="AUTOPKG_POSTGRES_HOST",
port_env="AUTOPKG_POSTGRES_PORT",
) -> sa.engine.URL:
"""Standard user DBURI"""
return sa.engine.URL.create(
drivername="postgresql+asyncpg",
username=getenv(username_env),
password=getenv(password_env),
host=getenv(host_env),
port=getenv(port_env),
database=dbname,
)
def get_db_uri_ogr(
dbname: str,
username_env="AUTOPKG_POSTGRES_USER",
password_env="AUTOPKG_POSTGRES_PASSWORD",
host_env="AUTOPKG_POSTGRES_HOST",
port_env="AUTOPKG_POSTGRES_PORT",
) -> sa.engine.URL:
"""Standard user DBURI for use with OGR (no psycopg2)"""
for var in [username_env, password_env, host_env, port_env]:
if not getenv(var):
raise Exception(f"Environment failed to parse - check var: {var}")
return sa.engine.URL.create(
drivername="postgresql",
username=getenv(username_env),
password=getenv(password_env),
host=getenv(host_env),
port=getenv(port_env),
database=dbname,
)
def get_db_uri_sync(
dbname: str,
username_env="AUTOPKG_POSTGRES_USER",
password_env="AUTOPKG_POSTGRES_PASSWORD",
host_env="AUTOPKG_POSTGRES_HOST",
port_env="AUTOPKG_POSTGRES_PORT",
) -> sa.engine.URL:
"""Standard user DBURI - non-async"""
return sa.engine.URL.create(
drivername="postgresql+psycopg2",
username=getenv(username_env),
password=getenv(password_env),
host=getenv(host_env),
port=getenv(port_env),
database=dbname,
)
# DATAPROC VARS
REDIS_HOST = getenv("AUTOPKG_REDIS_HOST", "localhost")
TASK_LOCK_TIMEOUT = int(
getenv("AUTOPKG_TASK_LOCK_TIMEOUT", "600")
) # seconds before locked tasks timeout
# Deployment Env
DEPLOYMENT_ENV = getenv("AUTOPKG_DEPLOYMENT_ENV", "prod")
LOG_LEVEL = logging.getLevelName(getenv("AUTOPKG_LOG_LEVEL", "DEBUG"))
INTEGRATION_TEST_ENDPOINT = getenv(
"AUTOPKG_INTEGRATION_TEST_ENDPOINT", "http://localhost:8000"
)
# Celery Env
CELERY_BROKER = getenv("AUTOPKG_CELERY_BROKER", "redis://localhost")
CELERY_BACKEND = getenv("AUTOPKG_CELERY_BACKEND", "redis://localhost")
CELERY_CONCURRENCY = int(getenv("AUTOPKG_CELERY_CONCURRENCY", "2"))
REDIS_HOST = getenv("AUTOPKG_REDIS_HOST", "localhost")
TASK_LOCK_TIMEOUT = getenv("AUTOPKG_TASK_LOCK_TIMEOUT", "600")
# Api Env
API_POSTGRES_USER = getenv("AUTOPKG_POSTGRES_USER")
API_POSTGRES_PASSWORD = getenv("AUTOPKG_POSTGRES_PASSWORD")
API_POSTGRES_HOST = getenv("AUTOPKG_POSTGRES_HOST")
API_POSTGRES_PORT = getenv("AUTOPKG_POSTGRES_PORT")
API_POSTGRES_DB = getenv("AUTOPKG_POSTGRES_DB", "ccgautopkg")
# Packages URL under-which all packages are served
PACKAGES_HOST_URL = getenv("AUTOPKG_PACKAGES_HOST_URL", "http://localhost/packages")
# Storage backend to use
STORAGE_BACKEND = getenv("AUTOPKG_STORAGE_BACKEND", "localfs")
# Dev / Prod switch for testing
if DEPLOYMENT_ENV == "test":
# TEST
# The root-level folder when using localfs storage backend
LOCALFS_STORAGE_BACKEND_ROOT = getenv(
"AUTOPKG_LOCALFS_STORAGE_BACKEND_ROOT_TEST",
path.join(path.dirname(path.abspath(__file__)), "tests", "data", "packages"),
)
# The root-level folder when using localfs processing backend
LOCALFS_PROCESSING_BACKEND_ROOT = getenv(
"AUTOPKG_LOCALFS_PROCESSING_BACKEND_ROOT_TEST",
path.join(path.dirname(path.abspath(__file__)), "tests", "data", "processing"),
)
# Integration tests which require access to the GRIOSM Postgres instance will be run if this is set-True (1)
TEST_GRI_OSM = True if getenv("AUTOPKG_TEST_GRI_OSM", "True") == "True" else False
# AWSS3 Storage Backend
S3_ACCESS_KEY = getenv("AUTOPKG_S3_TEST_ACCESS_KEY")
S3_SECRET_KEY = getenv("AUTOPKG_S3_TEST_SECRET_KEY")
# Top level S3 bucket, under-which packages are stored if using AWSS3 backend
S3_BUCKET = getenv("AUTOPKG_S3_TEST_BUCKET", "irv-autopkg-dev")
S3_REGION = getenv("AUTOPKG_S3_REGION", "eu-west-2")
else:
# PROD
# The root-level folder when using localfs storage backend
LOCALFS_STORAGE_BACKEND_ROOT = getenv("AUTOPKG_LOCALFS_STORAGE_BACKEND_ROOT", "")
# The root-level folder when using localfs processing backend
LOCALFS_PROCESSING_BACKEND_ROOT = getenv(
"AUTOPKG_LOCALFS_PROCESSING_BACKEND_ROOT", ""
)
# AWSS3 Storage Backend
S3_ACCESS_KEY = getenv("AUTOPKG_S3_ACCESS_KEY", "")
S3_SECRET_KEY = getenv("AUTOPKG_S3_SECRET_KEY", "")
# Top level S3 bucket, under-which packages are stored if using AWSS3 backend
S3_BUCKET = getenv("AUTOPKG_S3_BUCKET", "irv-autopkg")
S3_REGION = getenv("AUTOPKG_S3_REGION", "eu-west-2")
# Initialised Startup Data
DBURI_API = get_db_uri(API_POSTGRES_DB)
CELERY_APP = Celery(
"AutoPackage",
worker_prefetch_multiplier=1, # Do not change - long running tasks require this. See: https://docs.celeryq.dev/en/stable/userguide/configuration.html#std-setting-worker_prefetch_multiplier
worker_concurrency=CELERY_CONCURRENCY,
broker_url=CELERY_BROKER,
result_backend=CELERY_BACKEND,
result_extended=True,
)
# Seconds before submitted tasks expire
TASK_EXPIRY_SECS = int(getenv("AUTOPKG_TASK_EXPIRY_SECS", "3600"))
# Remove Test Processors from the available processors list
INCLUDE_TEST_PROCESSORS = (
True if getenv("AUTOPKG_INCLUDE_TEST_PROCESSORS", "True") == "True" else False
)