-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathrun.sh
executable file
·336 lines (277 loc) · 9.34 KB
/
run.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
#!/bin/bash
set -e
#####################
# --- Constants --- #
#####################
THIS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
MINIMUM_TEST_COVERAGE_PERCENT=0
AWS_LAMBDA_FUNCTION_NAME="files-api-handler"
BUILD_DIR_REL_PATH="./build"
BUILD_DIR="${THIS_DIR}/${BUILD_DIR_REL_PATH}"
##########################
# --- Task Functions --- #
##########################
# install core and development Python dependencies into the currently activated venv
function install {
python -m pip install --upgrade pip
python -m pip install --editable "$THIS_DIR/[dev]"
}
# package and deploy the src/ code by updating an existing AWS Lambda function
# The function package consists of
# - deployment package: contents of the src/ folder
# - layer package: installed dependencies
#
# Note, this function assumes that
# - a lambda function named $AWS_LAMBDA_FUNCTION_NAME already exists
# - docker 🐳 is required to run this function
function deploy-lambda {
export AWS_PROFILE=cloud-course
export AWS_REGION=ap-south-1
deploy-lambda:cd
}
function deploy-lambda:cd {
# Get the current user ID and group ID to run the docker command with so that
# the generated lambda-env folder doesn't have root permissions, instead user level permission
# This will help in library installation in the docker container and cleaning up the lambda-env folder later on.
USER_ID=$(id -u)
GROUP_ID=$(id -g)
LAMBDA_LAYER_DIR_NAME="lambda-env"
LAMBDA_LAYER_DIR="${BUILD_DIR}/${LAMBDA_LAYER_DIR_NAME}"
LAMBDA_LAYER_ZIP_FPATH="${BUILD_DIR}/lambda-layer.zip"
LAMBDA_HANDLER_ZIP_FPATH="${BUILD_DIR}/lambda.zip"
SRC_DIR="${THIS_DIR}/src"
# clean up artifacts
rm -rf "$LAMBDA_LAYER_DIR" || true
rm -f "$LAMBDA_LAYER_ZIP_FPATH" || true
# install dependencies
docker logout || true # log out to use the public ecr
docker pull public.ecr.aws/lambda/python:3.12-arm64
# install dependencies in a docker container to ensure compatibility with AWS Lambda
#
# Note: we remote boto3 and botocore because AWS lambda automatically
# provides these. This saves us ~24MB in the final, uncompressed layer size.
docker run --rm \
--user $USER_ID:$GROUP_ID \
--volume "${THIS_DIR}":/out \
--entrypoint /bin/bash \
public.ecr.aws/lambda/python:3.12-arm64 \
-c " \
pip install --root --upgrade pip \
&& pip install \
--editable /out/[aws-lambda] \
--target /out/${BUILD_DIR_REL_PATH}/${LAMBDA_LAYER_DIR_NAME}/python \
&& rm -rf /out/${BUILD_DIR_REL_PATH}/${LAMBDA_LAYER_DIR_NAME}/python/boto3 \
&& rm -rf /out/${BUILD_DIR_REL_PATH}/${LAMBDA_LAYER_DIR_NAME}/python/botocore \
"
# bundle dependencies and handler in a zip file
cd "$LAMBDA_LAYER_DIR"
zip -r "$LAMBDA_LAYER_ZIP_FPATH" ./
cd "$SRC_DIR"
zip -r "$LAMBDA_HANDLER_ZIP_FPATH" ./
cd "$THIS_DIR"
# publish the lambda "deployment package" (the handler)
aws lambda update-function-code \
--function-name "$AWS_LAMBDA_FUNCTION_NAME" \
--zip-file fileb://${LAMBDA_HANDLER_ZIP_FPATH} \
--output json | cat
# publish the lambda layer with a new version
LAYER_VERSION_ARN=$(aws lambda publish-layer-version \
--layer-name cloud-course-project-python-deps \
--compatible-runtimes python3.12 \
--zip-file fileb://${LAMBDA_LAYER_ZIP_FPATH} \
--compatible-architectures arm64 \
--query 'LayerVersionArn' \
--output text | cat)
# update the lambda function to use the new layer version
aws lambda update-function-configuration \
--function-name "$AWS_LAMBDA_FUNCTION_NAME" \
--layers $LAYER_VERSION_ARN \
--handler "files_api.aws_lambda_handler.handler" \
--output json | cat
}
function install-generated-sdk {
# install the generated SDK in newly created venv
python -m pip install --upgrade pip
python -m pip install --editable "$THIS_DIR/files-api-sdk/" \
--config-settings editable_mode=strict
}
function generate-client-library {
# Get the current user ID and group ID to run the docker command with so that
# the generated SDK folder doesn't have root permissions, instead user level permission
USER_ID=$(id -u)
GROUP_ID=$(id -g)
docker run --rm \
--user $USER_ID:$GROUP_ID \
-v $PWD:/local openapitools/openapi-generator-cli generate \
--generator-name python-pydantic-v1 \
--input-spec /local/openapi.json \
--output /local/files-api-sdk \
--package-name files_api_sdk
}
function run {
AWS_PROFILE=cloud-course\
S3_BUCKET_NAME=python-aws-cloud-course-bucket\
uvicorn 'files_api.main:create_app' --reload
}
function run-local {
if [ -f .env ]; then
export $(grep -v '^#' .env | xargs)
# Capture the environment variables names
VARS=$(grep -v '^#' .env | cut -d= -f1)
fi
uvicorn 'files_api.main:create_app' --reload
# Unset the environment variables
for var in $VARS; do
unset $var
done
}
# start the FastAPI app, pointed at a mocked aws endpoint
function run-mock {
set +e
# Start moto.server in the background on localhost:5000
python -m moto.server -p 5000 &
MOTO_PID=$!
# point the AWS CLI and boto3 to the mocked AWS server using mocked credentials
export AWS_ENDPOINT_URL="http://localhost:5000"
export AWS_SECRET_ACCESS_KEY="mock"
export AWS_ACCESS_KEY_ID="mock"
export S3_BUCKET_NAME="some-bucket"
# point the OpenAI API to the mocked OpenAI server using mocked credentials
export OPENAI_BASE_URL="http://localhost:1080"
export OPENAI_API_KEY="mocked_key"
# create a bucket called "some-bucket" using the mocked aws server
aws s3 mb "s3://$S3_BUCKET_NAME"
# Start the Docker Compose to mock the OpenAI API
docker compose --file ./mock-openai-docker-compose.yaml up --detach
# Trap EXIT signal to kill the moto.server process when uvicorn stops
trap 'kill $MOTO_PID; docker compose --file ./mock-openai-docker-compose.yaml down' EXIT
# Set AWS endpoint URL and start FastAPI app with uvicorn in the foreground
uvicorn src.files_api.main:create_app --reload
# # Unset the environment variables
unset OPENAI_BASE_URL
unset OPENAI_API_KEY
# Wait for the moto.server process to finish (this is optional if you want to keep it running)
wait $MOTO_PID
}
# run linting, formatting, and other static code quality tools
function lint {
pre-commit run --all-files
}
# same as `lint` but with any special considerations for CI
function lint:ci {
# We skip no-commit-to-branch since that blocks commits to `main`.
# All merged PRs are commits to `main` so this must be disabled.
SKIP=no-commit-to-branch pre-commit run --all-files
}
# execute tests that are not marked as `slow`
function test:quick {
run-tests -m "not slow" ${@:-"$THIS_DIR/tests/"}
}
# execute tests against the installed package; assumes the wheel is already installed
function test:ci {
INSTALLED_PKG_DIR="$(python -c 'import files_api; print(files_api.__path__[0])')"
# in CI, we must calculate the coverage for the installed package, not the src/ folder
COVERAGE_DIR="$INSTALLED_PKG_DIR" run-tests
}
# (example) ./run.sh test tests/test_states_info.py::test__slow_add
function run-tests {
PYTEST_EXIT_STATUS=0
# clean the test-reports dir
rm -rf "$THIS_DIR/test-reports" || mkdir "$THIS_DIR/test-reports"
# execute the tests, calculate coverage, and generate coverage reports in the test-reports dir
python -m pytest ${@:-"$THIS_DIR/tests/"} \
--cov "${COVERAGE_DIR:-$THIS_DIR/src}" \
--cov-report html \
--cov-report term \
--cov-report xml \
--junit-xml "$THIS_DIR/test-reports/report.xml" \
--cov-fail-under "$MINIMUM_TEST_COVERAGE_PERCENT" || ((PYTEST_EXIT_STATUS+=$?))
mv coverage.xml "$THIS_DIR/test-reports/" || true
mv htmlcov "$THIS_DIR/test-reports/" || true
mv .coverage "$THIS_DIR/test-reports/" || true
return $PYTEST_EXIT_STATUS
}
function test:wheel-locally {
source deactivate || true
rm -rf test-env || true
python -m venv test-env
source test-env/bin/activate
clean || true
pip install build
build
WHEEL_FPATH=$(ls ./dist/*.whl)
pip install "${WHEEL_FPATH}[test]" || true
test:ci || true
source deactivate || true
rm -rf test-env || true
clean || true
}
# serve the html test coverage report on localhost:8000
function serve-coverage-report {
python -m http.server --directory "$THIS_DIR/test-reports/htmlcov/" 8000
}
# build a wheel and sdist from the Python source code
function build {
python -m build --sdist --wheel "$THIS_DIR/"
}
function release:test {
lint
clean
build
publish:test
}
function release:prod {
release:test
publish:prod
}
function publish:test {
try-load-dotenv || true
twine upload dist/* \
--repository testpypi \
--username=__token__ \
--password="$TEST_PYPI_TOKEN"
}
function publish:prod {
try-load-dotenv || true
twine upload dist/* \
--repository pypi \
--username=__token__ \
--password="$PROD_PYPI_TOKEN"
}
# remove all files generated by tests, builds, or operating this codebase
function clean {
rm -rf dist build coverage.xml test-reports
find . \
-type d \
\( \
-name "*cache*" \
-o -name "*.dist-info" \
-o -name "*.egg-info" \
-o -name "*htmlcov" \
\) \
-not -path "*env*/*" \
-exec rm -r {} + || true
find . \
-type f \
-name "*.pyc" \
-not -path "*env/*" \
-exec rm {} +
}
# export the contents of .env as environment variables
function try-load-dotenv {
if [ ! -f "$THIS_DIR/.env" ]; then
echo "no .env file found"
return 1
fi
while read -r line; do
export "$line"
done < <(grep -v '^#' "$THIS_DIR/.env" | grep -v '^$')
}
# print all functions in this file
function help {
echo "$0 <task> <args>"
echo "Tasks:"
compgen -A function | cat -n
}
TIMEFORMAT="Task completed in %3lR"
time ${@:-help}