diff --git a/.github/workflows/deployment.yml b/.github/workflows/deployment.yml index 9dc919dcf..16350d45f 100644 --- a/.github/workflows/deployment.yml +++ b/.github/workflows/deployment.yml @@ -99,26 +99,6 @@ jobs: oc login "${{ secrets.OPENSHIFT_CLUSTER }}" --token="${{ secrets.OC4_DEV_TOKEN }}" bash openshift/scripts/oc_provision_nats_server_config.sh ${SUFFIX} apply - deploy-sfms-dev: - name: Deploy SFMS API to dev - if: github.triggering_actor != 'renovate' - needs: [build-api-image, deploy-dev-queue, configure-nats-server-name] - runs-on: ubuntu-22.04 - steps: - - name: Set Variables - shell: bash - run: | - echo "SUFFIX=pr-${{ github.event.number }}" >> $GITHUB_ENV - - - name: Checkout - uses: actions/checkout@v4 - - - name: Configure - shell: bash - run: | - oc login "${{ secrets.OPENSHIFT_CLUSTER }}" --token="${{ secrets.OC4_DEV_TOKEN }}" - MODULE_NAME=api SECOND_LEVEL_DOMAIN="apps.silver.devops.gov.bc.ca" VANITY_DOMAIN="${SUFFIX}-dev-psu.apps.silver.devops.gov.bc.ca" ENVIRONMENT="development" bash openshift/scripts/oc_deploy_sfms.sh ${SUFFIX} apply - deploy-dev: name: Deploy to Dev if: github.triggering_actor != 'renovate' diff --git a/.sonarcloud.properties b/.sonarcloud.properties index 9148781c2..fe2770b85 100644 --- a/.sonarcloud.properties +++ b/.sonarcloud.properties @@ -16,7 +16,7 @@ sonar.test.exclusions=*.feature sonar.tests.inclusions=**/*.test.tsx # Exclude duplication in fba tests due to many similar calculation numbers, ignore sample code as it's temporary, ignore sfms entrypoint, ignore util tests, ignore temporary fwi folder -sonar.cpd.exclusions=api/app/tests/fba_calc/*.py, api/app/weather_models/wind_direction_sample.py, api/app/sfms.py, web/src/features/moreCast2/util.test.ts, web/src/utils/fwi +sonar.cpd.exclusions=api/app/tests/fba_calc/*.py, api/app/weather_models/wind_direction_sample.py, web/src/features/moreCast2/util.test.ts, web/src/utils/fwi # Encoding of the source code. Default is default system encoding sonar.sourceEncoding=UTF-8 diff --git a/.vscode/settings.json b/.vscode/settings.json index dc8ad8381..8f5c6b4c0 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -85,6 +85,7 @@ "maxy", "miny", "morecast", + "nats", "ndarray", "numba", "ORJSON", @@ -101,8 +102,10 @@ "rocketchat", "sfms", "sqlalchemy", + "starlette", "tobytes", "upsampled", + "uvicorn", "vectorize", "VSIL", "vsimem", diff --git a/Dockerfile b/Dockerfile index 7eb651a19..30ead8b4b 100644 --- a/Dockerfile +++ b/Dockerfile @@ -75,7 +75,6 @@ COPY ./api/alembic.ini /app # Copy pre-start.sh (it will be run on startup): COPY ./api/prestart.sh /app COPY ./api/start.sh /app -COPY ./api/start_sfms.sh /app # Copy installed Python packages COPY --from=builder /home/worker/.cache/pypoetry/virtualenvs /home/worker/.cache/pypoetry/virtualenvs diff --git a/api/app/routers/sfms.py b/api/app/routers/sfms.py index f1ee49bc3..32cb404d2 100644 --- a/api/app/routers/sfms.py +++ b/api/app/routers/sfms.py @@ -1,4 +1,5 @@ -""" Router for SFMS """ +"""Router for SFMS""" + import io import logging from datetime import datetime, date @@ -21,10 +22,11 @@ prefix="/sfms", ) -SFMS_HOURLIES_PERMISSIONS = 'public-read' +SFMS_HOURLIES_PERMISSIONS = "public-read" + class FileLikeObject(io.IOBase): - """ Very basic wrapper of the SpooledTemporaryFile to expose the file-like object interface. + """Very basic wrapper of the SpooledTemporaryFile to expose the file-like object interface. The aiobotocore library expects a file-like object, but we can't pass the SpooledTemporaryFile object directly to aiobotocore. aiobotocore looks for a "tell" method, which isn't present @@ -48,34 +50,16 @@ def seek(self, offset: int, whence: int = io.SEEK_SET): def get_meta_data(request: Request) -> dict: - """ Create the meta-data for the s3 object. + """Create the meta-data for the s3 object. # NOTE: No idea what timezone this is going to be. Is it UTC? Is it PST? Is it PDT? """ - last_modified = datetime.fromisoformat(request.headers.get( - 'Last-modified')) - create_time = datetime.fromisoformat(request.headers.get( - 'Create-time')) - return { - 'last_modified': last_modified.isoformat(), - 'create_time': create_time.isoformat()} - -@router.get('/ready') -async def get_ready(): - """ A simple endpoint for OpenShift readiness """ - return Response() - - -@router.get('/health') -async def get_health(): - """ A simple endpoint for Openshift Healthchecks. """ - return Response() - - -@router.post('/upload') -async def upload(file: UploadFile, - request: Request, - background_tasks: BackgroundTasks, - _=Depends(sfms_authenticate)): + last_modified = datetime.fromisoformat(request.headers.get("Last-modified")) + create_time = datetime.fromisoformat(request.headers.get("Create-time")) + return {"last_modified": last_modified.isoformat(), "create_time": create_time.isoformat()} + + +@router.post("/upload") +async def upload(file: UploadFile, request: Request, background_tasks: BackgroundTasks, _=Depends(sfms_authenticate)): """ Trigger the SFMS process to run on the provided file. The header MUST include the SFMS secret key. @@ -89,7 +73,7 @@ async def upload(file: UploadFile, -F 'file=@hfi20220812.tif;type=image/tiff' ``` """ - logger.info('sfms/upload/') + logger.info("sfms/upload/") # Get an async S3 client. async with get_client() as (client, bucket): # We save the Last-modified and Create-time as metadata in the object store - just @@ -97,12 +81,9 @@ async def upload(file: UploadFile, key = get_target_filename(file.filename) logger.info('Uploading file "%s" to "%s"', file.filename, key) meta_data = get_meta_data(request) - await client.put_object(Bucket=bucket, - Key=key, - Body=FileLikeObject(file.file), - Metadata=meta_data) + await client.put_object(Bucket=bucket, Key=key, Body=FileLikeObject(file.file), Metadata=meta_data) await file.close() - logger.info('Done uploading file') + logger.info("Done uploading file") try: # We don't want to hold back the response to the client, so we'll publish the message # as a background task. @@ -122,10 +103,9 @@ async def upload(file: UploadFile, # and can't be given that level of responsibility. return Response(status_code=200) -@router.post('/upload/hourlies') -async def upload_hourlies(file: UploadFile, - request: Request, - _=Depends(sfms_authenticate)): + +@router.post("/upload/hourlies") +async def upload_hourlies(file: UploadFile, request: Request, _=Depends(sfms_authenticate)): """ Trigger the SFMS process to run on the provided file for hourlies. The header MUST include the SFMS secret key. @@ -139,7 +119,7 @@ async def upload_hourlies(file: UploadFile, -F 'file=@hfi20220812.tif;type=image/tiff' ``` """ - logger.info('sfms/upload/hourlies') + logger.info("sfms/upload/hourlies") if is_ffmc_file(file.filename): # Get an async S3 client. @@ -149,40 +129,34 @@ async def upload_hourlies(file: UploadFile, key = get_hourly_filename(file.filename) logger.info('Uploading file "%s" to "%s"', file.filename, key) meta_data = get_meta_data(request) - await client.put_object(Bucket=bucket, - Key=key, - ACL=SFMS_HOURLIES_PERMISSIONS, - Body=FileLikeObject(file.file), - Metadata=meta_data) + await client.put_object(Bucket=bucket, Key=key, ACL=SFMS_HOURLIES_PERMISSIONS, Body=FileLikeObject(file.file), Metadata=meta_data) await file.close() - logger.info('Done uploading file') + logger.info("Done uploading file") return Response(status_code=200) -@router.get('/hourlies', response_model=HourlyTIFs) +@router.get("/hourlies", response_model=HourlyTIFs) async def get_hourlies(for_date: date): """ - Retrieve hourly FFMC TIF files for the given date. + Retrieve hourly FFMC TIF files for the given date. Files are named in the format: "fine_fuel_moisture_codeYYYYMMDDHH.tif", where HH is the two digit day hour in PST. """ - logger.info('sfms/hourlies') + logger.info("sfms/hourlies") async with get_client() as (client, bucket): logger.info('Retrieving hourlies for "%s"', for_date) - bucket = config.get('OBJECT_STORE_BUCKET') - response = await client.list_objects_v2(Bucket=bucket, Prefix=f'sfms/uploads/hourlies/{str(for_date)}') - if 'Contents' in response: - hourlies = [HourlyTIF(url=f'https://nrs.objectstore.gov.bc.ca/{bucket}/{hourly["Key"]}') for hourly in response['Contents']] - logger.info(f'Retrieved {len(hourlies)} hourlies') + bucket = config.get("OBJECT_STORE_BUCKET") + response = await client.list_objects_v2(Bucket=bucket, Prefix=f"sfms/uploads/hourlies/{str(for_date)}") + if "Contents" in response: + hourlies = [HourlyTIF(url=f'https://nrs.objectstore.gov.bc.ca/{bucket}/{hourly["Key"]}') for hourly in response["Contents"]] + logger.info(f"Retrieved {len(hourlies)} hourlies") return HourlyTIFs(hourlies=hourlies) - logger.info(f'No hourlies found for {for_date}') + logger.info(f"No hourlies found for {for_date}") return HourlyTIFs(hourlies=[]) - -@router.post('/manual') -async def upload_manual(file: UploadFile, - request: Request, - background_tasks: BackgroundTasks): + +@router.post("/manual") +async def upload_manual(file: UploadFile, request: Request, background_tasks: BackgroundTasks): """ Trigger the SFMS process to run on the provided file. The header MUST include the SFMS secret key. @@ -198,31 +172,27 @@ async def upload_manual(file: UploadFile, -F 'file=@hfi20220812.tif;type=image/tiff' ``` """ - logger.info('sfms/manual') - forecast_or_actual = request.headers.get('ForecastOrActual') - issue_date = datetime.fromisoformat(str(request.headers.get('IssueDate'))) - secret = request.headers.get('Secret') - if not secret or secret != config.get('SFMS_SECRET'): + logger.info("sfms/manual") + forecast_or_actual = request.headers.get("ForecastOrActual") + issue_date = datetime.fromisoformat(str(request.headers.get("IssueDate"))) + secret = request.headers.get("Secret") + if not secret or secret != config.get("SFMS_SECRET"): return Response(status_code=401) # Get an async S3 client. async with get_client() as (client, bucket): # We save the Last-modified and Create-time as metadata in the object store - just # in case we need to know about it in the future. - key = os.path.join('sfms', 'uploads', forecast_or_actual, issue_date.isoformat()[:10], file.filename) + key = os.path.join("sfms", "uploads", forecast_or_actual, issue_date.isoformat()[:10], file.filename) # create the filename logger.info('Uploading file "%s" to "%s"', file.filename, key) meta_data = get_meta_data(request) - await client.put_object(Bucket=bucket, - Key=key, - Body=FileLikeObject(file.file), - Metadata=meta_data) + await client.put_object(Bucket=bucket, Key=key, Body=FileLikeObject(file.file), Metadata=meta_data) await file.close() - logger.info('Done uploading file') + logger.info("Done uploading file") return add_msg_to_queue(file, key, forecast_or_actual, meta_data, issue_date, background_tasks) -def add_msg_to_queue(file: UploadFile, key: str, forecast_or_actual: str, meta_data: dict, - issue_date: datetime, background_tasks: BackgroundTasks): +def add_msg_to_queue(file: UploadFile, key: str, forecast_or_actual: str, meta_data: dict, issue_date: datetime, background_tasks: BackgroundTasks): try: # We don't want to hold back the response to the client, so we'll publish the message # as a background task. @@ -231,14 +201,14 @@ def add_msg_to_queue(file: UploadFile, key: str, forecast_or_actual: str, meta_d if is_hfi_file(filename=file.filename): logger.info("HFI file: %s, putting processing message on queue", file.filename) for_date = get_date_part(file.filename) - message = SFMSFile(key=key, - run_type=forecast_or_actual, - last_modified=datetime.fromisoformat(meta_data.get('last_modified')), - create_time=datetime.fromisoformat(meta_data.get('create_time')), - run_date=issue_date, - for_date=date(year=int(for_date[0:4]), - month=int(for_date[4:6]), - day=int(for_date[6:8]))) + message = SFMSFile( + key=key, + run_type=forecast_or_actual, + last_modified=datetime.fromisoformat(meta_data.get("last_modified")), + create_time=datetime.fromisoformat(meta_data.get("create_time")), + run_date=issue_date, + for_date=date(year=int(for_date[0:4]), month=int(for_date[4:6]), day=int(for_date[6:8])), + ) background_tasks.add_task(publish, stream_name, sfms_file_subject, message, subjects) except Exception as exception: logger.error(exception, exc_info=True) @@ -251,30 +221,22 @@ def add_msg_to_queue(file: UploadFile, key: str, forecast_or_actual: str, meta_d return Response(status_code=200) -@router.post('/manual/msgOnly') -async def upload_manual_msg(message: ManualSFMS, - background_tasks: BackgroundTasks, - secret: str | None = Header(default=None)): +@router.post("/manual/msgOnly") +async def upload_manual_msg(message: ManualSFMS, background_tasks: BackgroundTasks, secret: str | None = Header(default=None)): """ Trigger the SFMS process to run on a tif file that already exists in s3. Client provides, key, for_date, runtype, run_date and an SFMS message is queued up on the message queue. """ - logger.info('sfms/manual/msgOnly') + logger.info("sfms/manual/msgOnly") logger.info("Received request to process tif: %s", message.key) - if not secret or secret != config.get('SFMS_SECRET'): + if not secret or secret != config.get("SFMS_SECRET"): return Response(status_code=401) async with get_client() as (client, bucket): - tif_object = await client.get_object(Bucket=bucket, - Key=message.key) - logger.info('Found requested object: %s', tif_object) + tif_object = await client.get_object(Bucket=bucket, Key=message.key) + logger.info("Found requested object: %s", tif_object) last_modified = datetime.fromisoformat(tif_object["Metadata"]["last_modified"]) create_time = datetime.fromisoformat(tif_object["Metadata"]["create_time"]) - message = SFMSFile(key=message.key, - run_type=message.runtype, - last_modified=last_modified, - create_time=create_time, - run_date=message.run_date, - for_date=message.for_date) + message = SFMSFile(key=message.key, run_type=message.runtype, last_modified=last_modified, create_time=create_time, run_date=message.run_date, for_date=message.for_date) background_tasks.add_task(publish, stream_name, sfms_file_subject, message, subjects) diff --git a/api/app/sfms.py b/api/app/sfms.py deleted file mode 100644 index aa550e6fe..000000000 --- a/api/app/sfms.py +++ /dev/null @@ -1,114 +0,0 @@ -""" This module contains the router for the SFMS API. -""" -import logging -from urllib.request import Request -from fastapi import FastAPI -from fastapi.middleware.cors import CORSMiddleware -import sentry_sdk -from starlette.applications import Starlette -from app import configure_logging -from app import config -from app.rocketchat_notifications import send_rocketchat_notification -from app.routers import (sfms) - - -configure_logging() - -logger = logging.getLogger(__name__) - -API_INFO = ''' - Description: SFMS API for the PSU Services - - Warranty Disclaimer: - - This PSU API and related documentation is provided as a public service by the - Government of British Columbia, Box 9411, Victoria, British - Columbia, Canada V8W 9V1. - - This PSU API and related documentation are provided \"as is\" without - warranty of any kind, whether express or implied. Users of this - software and documentation do so at their own risk. All implied - warranties, including, without limitation, implied warranties of - merchantability, fitness for a particular purpose, and - non - infringement, are hereby expressly disclaimed. Links and - references to any other websites or software are provided for - information only and listing shall not be taken as endorsement of - any kind. - - The Government of British Columbia is not responsible for the - content or reliability of any linked software and websites and does - not endorse the content, products, services or views expressed - within them. It is the responsibility of all persons who use the PSU API - and documentation to independently confirm the accuracy of the - data, information, or results obtained through their use. - - Limitation of Liabilities Under no circumstances will the Government - of British Columbia be liable to any person or business entity for - any direct, indirect, special, incidental, consequential, or other - damages based on any use of this software and documentation or any - other software to which this site is linked, including, without - limitation, any lost profits, business interruption, or loss of - programs or information, even if the Government of British Columbia - has been specifically advised of the possibility of such damages.''' - -if config.get('ENVIRONMENT') == 'production': - sentry_sdk.init( - dsn=config.get("SENTRY_DSN"), - environment=config.get('ENVIRONMENT'), - # Set traces_sample_rate to 1.0 to capture 100% - # of transactions for performance monitoring. - traces_sample_rate=0.5, - # Set profiles_sample_rate to 1.0 to profile 100% - # of sampled transactions. - # We recommend adjusting this value in production. - profiles_sample_rate=0.5, - ) - -# This is the api app. -api = FastAPI( - title="Predictive Services SFMS API", - description=API_INFO, - version="0.0.0" -) - -# This is our base starlette app - it doesn't do much except glue together -# the api and the front end. -app = Starlette() - - -# Mount the /api -# In production, / routes to the frontend. (api and front end run in seperate containers, with -# seperate routing) -app.mount('/api', app=api) - -ORIGINS = config.get('ORIGINS') - - -async def catch_exception_middleware(request: Request, call_next): - """ Basic middleware to catch all unhandled exceptions and log them to the terminal """ - try: - return await call_next(request) - except Exception as exc: - logger.error('%s %s %s', request.method, request.url.path, exc, exc_info=True) - rc_message = f"Exception occurred {request.method} {request.url.path}" - send_rocketchat_notification(rc_message, exc) - raise - -app.middleware('http')(catch_exception_middleware) - -api.add_middleware( - CORSMiddleware, - allow_origins=ORIGINS, - allow_credentials=True, - allow_methods=["GET", "POST"], - allow_headers=["*"], -) - -api.include_router(sfms.router, tags=["SFMS", "Auto Spatial Advisory"]) - -if __name__ == "__main__": - # This section of code is for the convenience of developers only. Having this section of code, allows - # for developers to easily debug the application by running main.py and attaching to it with a debugger. - # uvicorn is imported in this scope only, as it's not required when the application is run in production. - import uvicorn - uvicorn.run(app, host="0.0.0.0", port=8080) diff --git a/api/start_sfms.sh b/api/start_sfms.sh deleted file mode 100755 index f2a7864da..000000000 --- a/api/start_sfms.sh +++ /dev/null @@ -1,7 +0,0 @@ -#! /usr/bin/env sh -set -e - -# set some defaults -GUNICORN_WORKERS="${GUNICORN_WORKERS:-4}" -# start the server -poetry run gunicorn app.sfms:app --timeout 200 --workers $GUNICORN_WORKERS --worker-class uvicorn.workers.UvicornWorker --bind=0.0.0.0:8080 diff --git a/openshift/scripts/common/envars b/openshift/scripts/common/envars index 394d97dbb..48877f69a 100644 --- a/openshift/scripts/common/envars +++ b/openshift/scripts/common/envars @@ -7,7 +7,6 @@ PROJ_PROD="${PROJ_PROD:-e1e498-prod}" TAG_PROD="${TAG_PROD:-prod}" PATH_BC="${PATH_BC:-$(dirname ${0})/../templates/build.bc.yaml}" PATH_DC="${PATH_DC:-$(dirname ${0})/../templates/deploy.dc.yaml}" -PATH_SFMS="${PATH_SFMS:-$(dirname ${0})/../templates/sfms.yaml}" PATH_NATS="${PATH_NATS:-$(dirname ${0})/../templates/nats.yaml}" PATH_NATS_SERVER_CONFIG="${PATH_NATS_SERVER_CONFIG:-$(dirname ${0})/../templates/nats_server.yaml}" TEMPLATE_PATH="${TEMPLATE_PATH:-$(dirname ${0})/../templates}" diff --git a/openshift/scripts/oc_cleanup.sh b/openshift/scripts/oc_cleanup.sh index 022f79a1d..870d48431 100755 --- a/openshift/scripts/oc_cleanup.sh +++ b/openshift/scripts/oc_cleanup.sh @@ -29,7 +29,7 @@ if [ "${APPLY}" ]; then else DELETE_OR_GET="get" fi -OC_CLEAN_DEPLOY="oc -n ${PROJ_TARGET} ${DELETE_OR_GET} all,cm,pvc -o name -l 'app in (${APP_LABEL},sfms-${SUFFIX})'" +OC_CLEAN_DEPLOY="oc -n ${PROJ_TARGET} ${DELETE_OR_GET} all,cm,pvc -o name -l app=${APP_LABEL}" # Execute commands # diff --git a/openshift/scripts/oc_deploy_sfms.sh b/openshift/scripts/oc_deploy_sfms.sh deleted file mode 100755 index 1ed0369d9..000000000 --- a/openshift/scripts/oc_deploy_sfms.sh +++ /dev/null @@ -1,59 +0,0 @@ -#!/bin/sh -l -# -source "$(dirname ${0})/common/common" - -#% -#% OpenShift Deploy Helper -#% -#% Intended for use with a pull request-based pipeline. -#% Suffixes incl.: pr-###, test and prod. -#% -#% Usage: -#% -#% [CPU_REQUEST=<>] [CPU_LIMIT=<>] [MEMORY_REQUEST=<>] [MEMORY_LIMIT=<>] [REPLICAS=<>] \ -#% ${THIS_FILE} [SUFFIX] [apply] -#% -#% Examples: -#% -#% Provide a PR number. Defaults to a dry-run=client. -#% ${THIS_FILE} pr-0 -#% -#% Apply when satisfied. -#% ${THIS_FILE} pr-0 apply -#% -#% Override default CPU_REQUEST to 2000 millicores -#% CPU_REQUEST=2000m ${THIS_FILE} pr-0 - -# Target project override for Dev or Prod deployments -# -PROJ_TARGET="${PROJ_TARGET:-${PROJ_DEV}}" -OBJ_NAME="${APP_NAME}-${SUFFIX}" - -# Process a template (mostly variable substition) -# -OC_PROCESS="oc -n ${PROJ_TARGET} process -f ${PATH_SFMS} \ - -p SUFFIX=${SUFFIX} \ - -p PROJECT_NAMESPACE=${PROJ_TARGET} \ - -p VANITY_DOMAIN=${VANITY_DOMAIN} \ - ${SECOND_LEVEL_DOMAIN:+ "-p SECOND_LEVEL_DOMAIN=${SECOND_LEVEL_DOMAIN}"} \ - ${GUNICORN_WORKERS:+ "-p GUNICORN_WORKERS=${GUNICORN_WORKERS}"} \ - ${CPU_REQUEST:+ "-p CPU_REQUEST=${CPU_REQUEST}"} \ - ${CPU_LIMIT:+ "-p CPU_LIMIT=${CPU_LIMIT}"} \ - ${MEMORY_REQUEST:+ "-p MEMORY_REQUEST=${MEMORY_REQUEST}"} \ - ${MEMORY_LIMIT:+ "-p MEMORY_LIMIT=${MEMORY_LIMIT}"} \ - ${PROJ_TOOLS:+ "-p PROJ_TOOLS=${PROJ_TOOLS}"} \ - ${IMAGE_REGISTRY:+ "-p IMAGE_REGISTRY=${IMAGE_REGISTRY}"} \ - ${ENVIRONMENT:+ "-p ENVIRONMENT=${ENVIRONMENT}"} \ - ${REPLICAS:+ "-p REPLICAS=${REPLICAS}"}" - -# Apply a template (apply or use --dry-run=client) -# -OC_APPLY="oc -n ${PROJ_TARGET} apply -f -" -[ "${APPLY}" ] || OC_APPLY="${OC_APPLY} --dry-run=client" - -# Execute commands -# -eval "${OC_PROCESS}" -eval "${OC_PROCESS} | ${OC_APPLY}" - -display_helper "${OC_PROCESS} | ${OC_APPLY}" diff --git a/openshift/scripts/oc_deploy_to_production.sh b/openshift/scripts/oc_deploy_to_production.sh index aab2427b1..9a4e0f4a8 100755 --- a/openshift/scripts/oc_deploy_to_production.sh +++ b/openshift/scripts/oc_deploy_to_production.sh @@ -44,8 +44,6 @@ echo Provision NATS PROJ_TARGET=${PROJ_TARGET} bash $(dirname ${0})/oc_provision_nats.sh prod ${RUN_TYPE} echo Deploy API MODULE_NAME=api GUNICORN_WORKERS=8 CPU_REQUEST=100m CPU_LIMIT=500m MEMORY_REQUEST=6Gi MEMORY_LIMIT=8Gi REPLICAS=3 PROJ_TARGET=${PROJ_TARGET} VANITY_DOMAIN=psu.nrs.gov.bc.ca SECOND_LEVEL_DOMAIN=apps.silver.devops.gov.bc.ca USE_WFWX="True" ENVIRONMENT="production" bash $(dirname ${0})/oc_deploy.sh prod ${RUN_TYPE} -echo Deploy SFMS -MODULE_NAME=api GUNICORN_WORKERS=8 CPU_REQUEST=100m CPU_LIMIT=500m MEMORY_REQUEST=2Gi MEMORY_LIMIT=6Gi REPLICAS=2 PROJ_TARGET=${PROJ_TARGET} VANITY_DOMAIN=psu.nrs.gov.bc.ca SECOND_LEVEL_DOMAIN=apps.silver.devops.gov.bc.ca ENVIRONMENT="production" bash $(dirname ${0})/oc_deploy_sfms.sh prod ${RUN_TYPE} echo Env Canada Subscriber PROJ_TARGET=${PROJ_TARGET} bash $(dirname ${0})/oc_provision_ec_gdps_cronjob.sh prod ${RUN_TYPE} PROJ_TARGET=${PROJ_TARGET} bash $(dirname ${0})/oc_provision_ec_hrdps_cronjob.sh prod ${RUN_TYPE} diff --git a/openshift/templates/sfms.yaml b/openshift/templates/sfms.yaml deleted file mode 100644 index db1ed943a..000000000 --- a/openshift/templates/sfms.yaml +++ /dev/null @@ -1,330 +0,0 @@ -apiVersion: template.openshift.io/v1 -kind: Template -metadata: - name: ${APP_NAME} - annotations: - openshift.io/display-name: "wps" - description: "Wildfire Predictive Services - SFMS API" - openshift.io/long-description: "Wildfire Predictive Services - SFMS API" - tags: "wps" - iconClass: icon-js - openshift.io/provider-display-name: "Government of British Columbia" - openshift.io/documentation-url: "https://github.com/bcgov/wps" - openshift.io/support-url: "https://github.com/bcgov/wps" -labels: - app.kubernetes.io/part-of: "${APP_NAME}" - app: ${APP_NAME}-${SUFFIX} -parameters: - - name: APP_NAME - description: Application name (wps - wildfire predictive services) - value: sfms - - name: GLOBAL_NAME - description: Name of global Module - value: wps-global - - name: SUFFIX - description: Deployment suffix, e.g. pr-### - required: true - - name: PROJ_TOOLS - value: e1e498-tools - - name: CPU_REQUEST - description: Requested CPU - value: 100m - - name: CPU_LIMIT - description: CPU upper limit - value: 500m - - name: MEMORY_REQUEST - description: Requested memory - value: 500Mi - - name: MEMORY_LIMIT - description: Memory upper limit - value: 1Gi - - name: REPLICAS - description: Number of replicas (pods) - value: "2" - - name: ALLOWED_ORIGINS - value: wps-*.apps.silver.devops.gov.bc.ca - - name: PROJECT_NAMESPACE - description: Openshift project namespace. Used for /health check - required: true - - name: IMAGE_REGISTRY - description: Location where images are to be pulled - value: image-registry.openshift-image-registry.svc:5000 - required: true - - name: SECOND_LEVEL_DOMAIN - description: The domain of which this application hangs. - required: True - value: apps.silver.devops.gov.bc.ca - - name: ENVIRONMENT - description: Used for specifying which environment sentry is running in - value: "" - - name: VANITY_DOMAIN - requests: True - - name: GUNICORN_WORKERS - description: "Number of gunicorn workers" - value: "4" -objects: - - apiVersion: apps/v1 - kind: Deployment - metadata: - labels: - app: ${APP_NAME}-${SUFFIX} - name: ${APP_NAME}-${SUFFIX} - annotations: - image.openshift.io/triggers: '[{"from":{"kind":"ImageStreamTag","name":"${PROJ_TOOLS}/wps-api-${SUFFIX}:${SUFFIX}"},"fieldPath":"spec.template.spec.containers[?(@.name==\"container\")].sfms"}]' - spec: - replicas: ${{REPLICAS}} - selector: - matchLabels: - app: ${APP_NAME}-${SUFFIX} - strategy: - type: RollingUpdate - template: - metadata: - labels: - app: ${APP_NAME}-${SUFFIX} - spec: - automountServiceAccountToken: false - volumes: - - name: config-env - configMap: - name: ${GLOBAL_NAME} - containers: - ############################################################################# - # SFMS API container - ############################################################################# - - image: ${IMAGE_REGISTRY}/${PROJ_TOOLS}/wps-api-${SUFFIX}:${SUFFIX} - imagePullPolicy: Always - command: ["bash", "start_sfms.sh"] - name: ${APP_NAME} - env: - - name: ORIGINS - value: ${ALLOWED_ORIGINS} - - name: BASE_URI - value: https://${APP_NAME}-${SUFFIX}-${PROJECT_NAMESPACE}.${SECOND_LEVEL_DOMAIN} - - name: PORT # The port that the API will run on (used by fastapi docker image) - value: "8080" - - name: ENVIRONMENT - value: ${ENVIRONMENT} - - name: GUNICORN_WORKERS # The number of workers to spawn - value: ${GUNICORN_WORKERS} - - name: "TIMEOUT" # https://github.com/tiangolo/uvicorn-gunicorn-fastapi-docker#timeout - valueFrom: - configMapKeyRef: - name: ${GLOBAL_NAME} - key: env.gunicorn.timeout - - name: KEYCLOAK_PUBLIC_KEY - valueFrom: - configMapKeyRef: - name: ${GLOBAL_NAME} - key: env.gold.keycloak-public-key - - name: KEYCLOAK_CLIENT - valueFrom: - configMapKeyRef: - name: ${GLOBAL_NAME} - key: env.gold.keycloak-client - - name: STATUS_CHECKER_SECRET - valueFrom: - secretKeyRef: - name: ${GLOBAL_NAME} - key: status-checker-sa-secret - - name: OPENSHIFT_BASE_URI - valueFrom: - configMapKeyRef: - name: ${GLOBAL_NAME} - key: env.openshift-base-uri - - name: OPENSHIFT_NAMESPACE_API - valueFrom: - configMapKeyRef: - name: ${GLOBAL_NAME} - key: env.openshift-namespace-api - - name: PROJECT_NAMESPACE - value: ${PROJECT_NAMESPACE} - - name: ROCKET_URL_POST_MESSAGE - valueFrom: - configMapKeyRef: - name: ${GLOBAL_NAME} - key: rocket.chat-url-post-message - - name: ROCKET_CHANNEL - valueFrom: - configMapKeyRef: - name: ${GLOBAL_NAME} - key: rocket.chat-channel - - name: ROCKET_USER_ID - valueFrom: - secretKeyRef: - name: ${GLOBAL_NAME} - key: rocket.chat-user-id-secret - - name: ROCKET_AUTH_TOKEN - valueFrom: - secretKeyRef: - name: ${GLOBAL_NAME} - key: rocket.chat-auth-token-secret - - name: OBJECT_STORE_SERVER - valueFrom: - secretKeyRef: - name: ${GLOBAL_NAME} - key: object-store-server - - name: OBJECT_STORE_USER_ID - valueFrom: - secretKeyRef: - name: ${GLOBAL_NAME} - key: object-store-user-id - - name: OBJECT_STORE_SECRET - valueFrom: - secretKeyRef: - name: ${GLOBAL_NAME} - key: object-store-secret - - name: OBJECT_STORE_BUCKET - valueFrom: - secretKeyRef: - name: ${GLOBAL_NAME} - key: object-store-bucket - - name: REDIS_HOST - valueFrom: - configMapKeyRef: - name: ${GLOBAL_NAME} - key: env.redis-host - - name: REDIS_PORT - valueFrom: - configMapKeyRef: - name: ${GLOBAL_NAME} - key: env.redis-port - - name: REDIS_USE - valueFrom: - configMapKeyRef: - name: ${GLOBAL_NAME} - key: env.redis-use - - name: REDIS_PASSWORD - valueFrom: - secretKeyRef: - name: wps-redis - key: database-password - - name: SFMS_SECRET - valueFrom: - secretKeyRef: - name: ${GLOBAL_NAME} - key: sfms-secret - - name: SENTRY_DSN - valueFrom: - secretKeyRef: - name: ${GLOBAL_NAME} - key: sentry-dsn - - name: NATS_STREAM_PREFIX - value: wps-${SUFFIX} - - name: NATS_SERVER - valueFrom: - configMapKeyRef: - name: wps-${SUFFIX}-nats-server - key: nats.server - ports: - - containerPort: 8080 - protocol: TCP - resources: - limits: - cpu: ${CPU_LIMIT} - memory: ${MEMORY_LIMIT} - requests: - cpu: ${CPU_REQUEST} - memory: ${MEMORY_REQUEST} - readinessProbe: - httpGet: - path: /api/sfms/ready - port: 8080 - scheme: HTTP - # first probe will fire some time between: - # initialDelaySeconds and initialDelaySeconds + periodSeconds - initialDelaySeconds: 30 - periodSeconds: 120 - timeoutSeconds: 1 - livenessProbe: - successThreshold: 1 - failureThreshold: 3 - httpGet: - path: /api/sfms/health - port: 8080 - scheme: HTTP - # first probe will fire some time between: - # initialDelaySeconds and initialDelaySeconds + periodSeconds - initialDelaySeconds: 30 - periodSeconds: 120 - timeoutSeconds: 20 - - apiVersion: autoscaling.k8s.io/v1 - kind: VerticalPodAutoscaler - metadata: - name: ${APP_NAME}-vpa-recommender-${SUFFIX} - spec: - targetRef: - apiVersion: "apps.openshift.io/v1" - kind: DeploymentConfig - name: ${APP_NAME}-${SUFFIX} - updatePolicy: - updateMode: "Off" - - apiVersion: v1 - kind: Service - metadata: - labels: - app: ${APP_NAME}-${SUFFIX} - name: ${APP_NAME}-api-${SUFFIX} - spec: - ports: - - name: 8080-tcp - protocol: TCP - port: 80 - targetPort: 8080 - selector: - app: ${APP_NAME}-${SUFFIX} - - apiVersion: route.openshift.io/v1 - ################################################################################ - # SFMS API ROUTE - # Route for wps-api-prod.apps.silver.devops.gov.bc.ca/api/sfms - kind: Route - metadata: - labels: - app: ${APP_NAME}-${SUFFIX} - name: ${APP_NAME}-api-${SUFFIX} - annotations: - # Enable HTTP Strict Transport Security: - haproxy.router.openshift.io/hsts_header: max-age=31536000;includeSubDomains;preload - # Use roundrobin load balancing strategy - haproxy.router.openshift.io/balance: roundrobin - haproxy.router.openshift.io/disable_cookies: "true" - spec: - host: wps-${SUFFIX}-${PROJECT_NAMESPACE}.${SECOND_LEVEL_DOMAIN} - path: "/api/sfms" - port: - targetPort: 8080-tcp - to: - kind: Service - name: ${APP_NAME}-api-${SUFFIX} - tls: - termination: edge - - apiVersion: route.openshift.io/v1 - ################################################################################ - # SFMS API ROUTE - # Route for psu.nrs.gov.bc.ca/api/sfms - # Points to api - kind: Route - metadata: - labels: - app: ${APP_NAME}-${SUFFIX} - certbot-managed: "true" - name: ${APP_NAME}-api-${SUFFIX}-vanity - annotations: - # Enable HTTP Strict Transport Security: - haproxy.router.openshift.io/hsts_header: max-age=31536000;includeSubDomains;preload - # Use roundrobin load balancing strategy - haproxy.router.openshift.io/balance: roundrobin - haproxy.router.openshift.io/disable_cookies: "true" - # TODO: we don't want to keep it at 60s - this is just a temporary workaround while - # we generate classified HFI on the fly. - haproxy.router.openshift.io/timeout: 60s - spec: - host: ${VANITY_DOMAIN} - path: "/api/sfms" - port: - targetPort: 8080-tcp - to: - kind: Service - name: ${APP_NAME}-api-${SUFFIX} - tls: - termination: edge