From e21ea54e5603061d9d594b6a45518da65e56985a Mon Sep 17 00:00:00 2001 From: dgboss Date: Tue, 5 Mar 2024 08:46:29 -0800 Subject: [PATCH] Create and display pmtiles for snow coverage (#3444) Co-authored-by: Conor Brady --- .../auto_spatial_advisory/fuel_type_layer.py | 4 +- api/app/auto_spatial_advisory/hfi_pmtiles.py | 58 -------------- api/app/auto_spatial_advisory/process_hfi.py | 9 ++- api/app/db/crud/snow.py | 20 ++++- api/app/jobs/viirs_snow.py | 78 ++++++++++++++++++- api/app/main.py | 3 +- api/app/routers/snow.py | 30 +++++++ api/app/schemas/snow.py | 16 ++++ api/app/utils/pmtiles.py | 60 ++++++++++++++ .../polygonize.py | 6 +- web/cypress/e2e/fba-map-page.cy.ts | 1 + web/cypress/fixtures/fba/processedSnow.json | 7 ++ web/src/api/snow.ts | 42 ++++++++++ .../features/fba/components/map/FBAMap.tsx | 44 ++++++++++- .../features/fba/components/map/Legend.tsx | 31 +++++++- .../fba/components/map/fbaMap.test.tsx | 1 + .../fba/components/map/featureStylers.ts | 13 ++++ .../fba/components/map/legend.test.tsx | 8 ++ .../fba/pages/FireBehaviourAdvisoryPage.tsx | 15 ++++ web/src/features/fba/pmtilesBuilder.ts | 12 +++ 20 files changed, 381 insertions(+), 77 deletions(-) create mode 100644 api/app/routers/snow.py create mode 100644 api/app/schemas/snow.py create mode 100644 api/app/utils/pmtiles.py rename api/app/{auto_spatial_advisory => utils}/polygonize.py (95%) create mode 100644 web/cypress/fixtures/fba/processedSnow.json create mode 100644 web/src/api/snow.ts diff --git a/api/app/auto_spatial_advisory/fuel_type_layer.py b/api/app/auto_spatial_advisory/fuel_type_layer.py index b16dc7e79..f31aa9854 100644 --- a/api/app/auto_spatial_advisory/fuel_type_layer.py +++ b/api/app/auto_spatial_advisory/fuel_type_layer.py @@ -6,11 +6,11 @@ from osgeo import ogr, osr, gdal from shapely import wkt, wkb from app import config -from app.auto_spatial_advisory.polygonize import polygonize_in_memory from app.db.models.auto_spatial_advisory import FuelType from app.db.database import get_async_write_session_scope from app.db.crud.auto_spatial_advisory import save_fuel_type from app.geospatial import NAD83_BC_ALBERS +from app.utils.polygonize import polygonize_in_memory logger = logging.getLogger(__name__) @@ -33,7 +33,7 @@ def fuel_type_iterator() -> Generator[Tuple[int, str], None, None]: # that gdal is able to read. filename = f'/vsis3/{bucket}/sfms/static/fbp2021.tif' logger.info('Polygonizing %s...', filename) - with polygonize_in_memory(filename) as layer: + with polygonize_in_memory(filename, 'fuel', 'fuel') as layer: spatial_reference: osr.SpatialReference = layer.GetSpatialRef() target_srs = osr.SpatialReference() diff --git a/api/app/auto_spatial_advisory/hfi_pmtiles.py b/api/app/auto_spatial_advisory/hfi_pmtiles.py index 5ab7095e2..e80377dea 100644 --- a/api/app/auto_spatial_advisory/hfi_pmtiles.py +++ b/api/app/auto_spatial_advisory/hfi_pmtiles.py @@ -1,66 +1,8 @@ -from osgeo import gdal, ogr import os -import subprocess from app.auto_spatial_advisory.run_type import RunType from datetime import date -def tippecanoe_wrapper(geojson_filepath: str, output_pmtiles_filepath: str, min_zoom: int = 4, max_zoom: int = 11): - """ - Wrapper for the tippecanoe cli tool - - :param geojson_filepath: Path to input geojson (must be in EPSG:4326) - :type geojson_filepath: str - :param output_pmtile: Path to output pmtiles file - :type output_pmtiles_filepath: str - :param min_zoom: pmtiles zoom out level - :type min_zoom: int - :param max_zoom: pmtiles zoom in level - :type max_zoom: int - """ - subprocess.run([ - 'tippecanoe', - f'--minimum-zoom={min_zoom}', - f'--maximum-zoom={max_zoom}', - '--projection=EPSG:4326', - f'--output={output_pmtiles_filepath}', - f'{geojson_filepath}', - '--force', - '--quiet' - ], check=True - ) - - -def write_hfi_geojson(hfi_polygons: ogr.Layer, output_dir: str) -> str: - """ - Write geojson file, projected in EPSG:4326, from ogr.Layer object - - :param hfi_polygons: HFI polygon layer - :type hfi_polygons: ogr.Layer - :param output_dir: Output directory - :type output_dir: str - :return: Path to hfi geojson file - :rtype: str - """ - # We can't use an in-memory layer for translating, so we'll create a temp layer - # Using a geopackage since it supports all projections and doesn't limit field name lengths. - # This matters because the hfi data is distributed in an odd projection that doesn't have an EPSG code - temp_gpkg = os.path.join(output_dir, 'temp_hfi_polys.gpkg') - driver = ogr.GetDriverByName('GPKG') - temp_data_source = driver.CreateDataSource(temp_gpkg) - temp_data_source.CopyLayer(hfi_polygons, 'hfi_layer') - - # We need a geojson file to pass to tippecanoe - temp_geojson = os.path.join(output_dir, 'temp_hfi_polys.geojson') - - # tippecanoe recommends the input geojson be in EPSG:4326 [https://github.com/felt/tippecanoe#projection-of-input] - gdal.VectorTranslate(destNameOrDestDS=temp_geojson, srcDS=temp_gpkg, - format='GeoJSON', dstSRS='EPSG:4326', reproject=True) - - del temp_gpkg - - return temp_geojson - def get_pmtiles_filepath(run_date: date, run_type: RunType, filename: str) -> str: """ diff --git a/api/app/auto_spatial_advisory/process_hfi.py b/api/app/auto_spatial_advisory/process_hfi.py index 6fc0be9c6..5b8cfcbb2 100644 --- a/api/app/auto_spatial_advisory/process_hfi.py +++ b/api/app/auto_spatial_advisory/process_hfi.py @@ -15,10 +15,11 @@ save_hfi, get_hfi_classification_threshold, HfiClassificationThresholdEnum, save_run_parameters, get_run_parameters_id) from app.auto_spatial_advisory.classify_hfi import classify_hfi -from app.auto_spatial_advisory.polygonize import polygonize_in_memory from app.auto_spatial_advisory.run_type import RunType from app.geospatial import NAD83_BC_ALBERS -from app.auto_spatial_advisory.hfi_pmtiles import write_hfi_geojson, tippecanoe_wrapper, get_pmtiles_filepath +from app.auto_spatial_advisory.hfi_pmtiles import get_pmtiles_filepath +from app.utils.polygonize import polygonize_in_memory +from app.utils.pmtiles import tippecanoe_wrapper, write_geojson from app.utils.s3 import get_client @@ -105,10 +106,10 @@ async def process_hfi(run_type: RunType, run_date: date, run_datetime: datetime, with tempfile.TemporaryDirectory() as temp_dir: temp_filename = os.path.join(temp_dir, 'classified.tif') classify_hfi(key, temp_filename) - with polygonize_in_memory(temp_filename) as layer: + with polygonize_in_memory(temp_filename, 'hfi', 'hfi') as layer: # We need a geojson file to pass to tippecanoe - temp_geojson = write_hfi_geojson(layer, temp_dir) + temp_geojson = write_geojson(layer, temp_dir) pmtiles_filename = f'hfi{for_date.strftime("%Y%m%d")}.pmtiles' temp_pmtiles_filepath = os.path.join(temp_dir, pmtiles_filename) diff --git a/api/app/db/crud/snow.py b/api/app/db/crud/snow.py index fed5075ca..630e9f552 100644 --- a/api/app/db/crud/snow.py +++ b/api/app/db/crud/snow.py @@ -1,5 +1,6 @@ """ CRUD operations relating to processing snow coverage """ +from datetime import datetime from sqlalchemy import select from sqlalchemy.ext.asyncio import AsyncSession from app.db.models.snow import ProcessedSnow, SnowSourceEnum @@ -14,7 +15,7 @@ async def save_processed_snow(session: AsyncSession, processed_snow: ProcessedSn session.add(processed_snow) -async def get_last_processed_snow_by_source(session: AsyncSession, snow_source: SnowSourceEnum) -> ProcessedSnow: +async def get_last_processed_snow_by_source(session: AsyncSession, snow_source: SnowSourceEnum = SnowSourceEnum.viirs) -> ProcessedSnow: """ Retrieve the record with the most recent for_date of the specified snow source. :param snow_source: The source of snow data of interest. @@ -26,4 +27,21 @@ async def get_last_processed_snow_by_source(session: AsyncSession, snow_source: .where(ProcessedSnow.snow_source == snow_source)\ .order_by(ProcessedSnow.for_date.desc()) result = await session.execute(stmt) + return result.first() + +async def get_most_recent_processed_snow_by_date(session: AsyncSession, target_date: datetime, snow_source: SnowSourceEnum = SnowSourceEnum.viirs) -> ProcessedSnow: + """ Retreive the most recent record prior or equal to the provided date. + + :param target_date: The date of interest + :type target_date: datetime + :param snow_source: The source of snow data of interest. + :type snow_source: SnowSourceEnum + :return: A record containing the last date for which snow data from the specified source was successfully processed. + :rtype: ProcessedSnow + """ + stmt = select(ProcessedSnow)\ + .where(ProcessedSnow.snow_source == snow_source)\ + .where(ProcessedSnow.for_date <= target_date)\ + .order_by(ProcessedSnow.for_date.desc()) + result = await session.execute(stmt) return result.first() \ No newline at end of file diff --git a/api/app/jobs/viirs_snow.py b/api/app/jobs/viirs_snow.py index 84b336152..c7c2fbd68 100644 --- a/api/app/jobs/viirs_snow.py +++ b/api/app/jobs/viirs_snow.py @@ -1,8 +1,9 @@ -from datetime import date, timedelta +from datetime import date, datetime, timedelta from osgeo import gdal import asyncio import glob import logging +import numpy as np import os import requests import shutil @@ -13,7 +14,10 @@ from app.db.database import get_async_read_session_scope, get_async_write_session_scope from app.db.models.snow import ProcessedSnow, SnowSourceEnum from app.rocketchat_notifications import send_rocketchat_notification +from app.utils.polygonize import polygonize_in_memory +from app.utils.pmtiles import tippecanoe_wrapper, write_geojson from app.utils.s3 import get_client +from app.utils.time import vancouver_tz logger = logging.getLogger(__name__) @@ -24,8 +28,28 @@ LAYER_VARIABLE = "/VIIRS_Grid_IMG_2D/CGF_NDSI_Snow_Cover" RAW_SNOW_COVERAGE_NAME = 'raw_snow_coverage.tif' RAW_SNOW_COVERAGE_CLIPPED_NAME = 'raw_snow_coverage_clipped.tif' +BINARY_SNOW_COVERAGE_CLASSIFICATION_NAME = 'binary_snow_coverage.tif' +SNOW_COVERAGE_PMTILES_MIN_ZOOM = 4 +SNOW_COVERAGE_PMTILES_MAX_ZOOM = 11 +SNOW_COVERAGE_PMTILES_PERMISSIONS = 'public-read' +def get_pmtiles_filepath(for_date: date, filename: str) -> str: + """ + Returns the S3 storage key for storing the snow coverage pmtiles for the given date and file name. + + + :param for_date: The date of snow coverage imagery. + :type run_date: date + :param filename: snowCoverage[for_date].pmtiles -> snowCoverage20230821.pmtiles + :type filename: str + :return: s3 bucket key for pmtiles file + :rtype: str + """ + pmtiles_filepath = os.path.join('psu', 'pmtiles', 'snow', for_date.strftime('%Y-%m-%d'), filename) + + return pmtiles_filepath + class ViirsSnowJob(): """ Job that downloads and processed VIIRS snow coverage data from the NSIDC (https://nsidc.org). """ @@ -135,6 +159,48 @@ async def _save_clipped_snow_coverage_mosaic_to_s3(self, for_date: date, path: s await client.put_object(Bucket=bucket, Key=key, Body=file) + + def _classify_snow_coverage(self, path: str): + source_path = os.path.join(path, RAW_SNOW_COVERAGE_CLIPPED_NAME) + source = gdal.Open(source_path, gdal.GA_ReadOnly) + source_band = source.GetRasterBand(1) + source_data = source_band.ReadAsArray() + # Classify the data. Snow coverage in the source data is indicated by values in the range of 0-100. I'm using a range of + # 10 - 100 to increase confidence. In the classified data 1 is assigned to snow covered pixels and all other pixels are 0. + classified = np.where((source_data > 10) & (source_data <= 100), 1, 0) + output_driver = gdal.GetDriverByName("GTiff") + classified_snow_path = os.path.join(path, BINARY_SNOW_COVERAGE_CLASSIFICATION_NAME) + classified_snow = output_driver.Create(classified_snow_path, xsize=source_band.XSize, ysize=source_band.YSize, bands=1, eType=gdal.GDT_Byte) + classified_snow.SetGeoTransform(source.GetGeoTransform()) + classified_snow.SetProjection(source.GetProjection()) + classified_snow_band = classified_snow.GetRasterBand(1) + classified_snow_band.WriteArray(classified) + source_data = None + source_band = None + source = None + classified_snow_band = None + classified_snow = None + + async def _create_pmtiles_layer(self, path: str, for_date: date): + filename = os.path.join(path, BINARY_SNOW_COVERAGE_CLASSIFICATION_NAME) + with polygonize_in_memory(filename, 'snow', 'snow') as layer: + # We need a geojson file to pass to tippecanoe + temp_geojson = write_geojson(layer, path) + pmtiles_filename = f'snowCoverage{for_date.strftime("%Y%m%d")}.pmtiles' + temp_pmtiles_filepath = os.path.join(path, pmtiles_filename) + logger.info(f'Writing snow coverage pmtiles -- {pmtiles_filename}') + tippecanoe_wrapper(temp_geojson, temp_pmtiles_filepath, + min_zoom=SNOW_COVERAGE_PMTILES_MIN_ZOOM, max_zoom=SNOW_COVERAGE_PMTILES_MAX_ZOOM) + + async with get_client() as (client, bucket): + key = get_pmtiles_filepath(for_date, pmtiles_filename) + logger.info(f'Uploading snow coverage file {pmtiles_filename} to {key}') + + await client.put_object(Bucket=bucket, + Key=key, + ACL=SNOW_COVERAGE_PMTILES_PERMISSIONS, # We need these to be accessible to everyone + Body=open(temp_pmtiles_filepath, 'rb')) + logger.info('Done uploading snow coverage file') async def _process_viirs_snow(self, for_date: date, path: str): @@ -152,6 +218,11 @@ async def _process_viirs_snow(self, for_date: date, path: str): self._create_snow_coverage_mosaic(sub_dir) await self._clip_snow_coverage_mosaic(sub_dir, path) await self._save_clipped_snow_coverage_mosaic_to_s3(for_date, sub_dir) + # Reclassify the clipped snow coverage mosaic to 1 for snow and 0 for all other cells + self._classify_snow_coverage(sub_dir) + # Create pmtiles file and save to S3 + await self._create_pmtiles_layer(sub_dir, for_date) + async def _run_viirs_snow(self): @@ -162,7 +233,7 @@ async def _run_viirs_snow(self): today = date.today() if last_processed_date is None: # Case to cover the initial run of VIIRS snow processing (ie. start processing one week ago) - next_date = today - timedelta(days=7) + next_date = today - timedelta(days=10) else: # Start processing the day after the last record of a successful job. next_date = last_processed_date + timedelta(days=1) @@ -176,10 +247,11 @@ async def _run_viirs_snow(self): while next_date < today: date_string = next_date.strftime('%Y-%m-%d') logger.info(f"Processing snow coverage data for date: {date_string}") + tz_aware_datetime = vancouver_tz.localize(datetime.combine(next_date, datetime.min.time())) try: await self._process_viirs_snow(next_date, temp_dir) async with get_async_write_session_scope() as session: - processed_snow = ProcessedSnow(for_date=next_date, processed_date=today, snow_source=SnowSourceEnum.viirs) + processed_snow = ProcessedSnow(for_date=tz_aware_datetime, processed_date=today, snow_source=SnowSourceEnum.viirs) await save_processed_snow(session, processed_snow) logger.info(f"Successfully processed VIIRS snow coverage data for date: {date_string}") except requests.exceptions.HTTPError as http_error: diff --git a/api/app/main.py b/api/app/main.py index eb00e5ac8..b744de6be 100644 --- a/api/app/main.py +++ b/api/app/main.py @@ -16,7 +16,7 @@ from app import hourlies from app.rocketchat_notifications import send_rocketchat_notification from app.routers import (fba, forecasts, weather_models, c_haines, stations, hfi_calc, - fba_calc, sfms, morecast_v2) + fba_calc, sfms, morecast_v2, snow) from app.fire_behaviour.cffdrs import CFFDRS @@ -110,6 +110,7 @@ async def catch_exception_middleware(request: Request, call_next): api.include_router(fba.router, tags=["Auto Spatial Advisory"]) api.include_router(sfms.router, tags=["SFMS", "Auto Spatial Advisory"]) api.include_router(morecast_v2.router, tags=["Morecast v2"]) +api.include_router(snow.router, tags=['Snow']) @api.get('/ready') diff --git a/api/app/routers/snow.py b/api/app/routers/snow.py new file mode 100644 index 000000000..48e9e4537 --- /dev/null +++ b/api/app/routers/snow.py @@ -0,0 +1,30 @@ +""" Routers for Snow related data +""" + +import logging +from datetime import date, datetime +from fastapi import APIRouter, Depends +from app.auth import authentication_required, audit +from app.db.crud.snow import get_most_recent_processed_snow_by_date +from app.db.database import get_async_read_session_scope +from app.schemas.snow import ProcessedSnowModel, ProcessedSnowResponse +from app.utils.time import vancouver_tz + +logger = logging.getLogger(__name__) + +router = APIRouter( + prefix="/snow", + dependencies=[Depends(authentication_required), Depends(audit)], +) + + +@router.get('/most-recent-by-date/{for_date}', response_model=ProcessedSnowResponse | None) +async def get_most_recent_by_date(for_date: date, _=Depends(authentication_required)): + """ Returns the most recent processed snow record before or equal to the provided date. """ + logger.info('/snow/most-recent-by-date/') + tz_aware_datetime = vancouver_tz.localize(datetime.combine(for_date, datetime.min.time())) + async with get_async_read_session_scope() as session: + result = await get_most_recent_processed_snow_by_date(session, tz_aware_datetime) + if result is not None: + processed_snow = result[0] + return ProcessedSnowResponse(processed_snow=ProcessedSnowModel(for_date=processed_snow.for_date, processed_date=processed_snow.processed_date, snow_source=processed_snow.snow_source)) diff --git a/api/app/schemas/snow.py b/api/app/schemas/snow.py new file mode 100644 index 000000000..f5a64de0e --- /dev/null +++ b/api/app/schemas/snow.py @@ -0,0 +1,16 @@ +""" This module contains pydantic models related to snow data. """ + +from datetime import datetime +from pydantic import BaseModel +from app.db.models.snow import SnowSourceEnum + +class ProcessedSnowModel(BaseModel): + """ The content of a processed snow object""" + for_date: datetime + processed_date: datetime + snow_source: SnowSourceEnum + +class ProcessedSnowResponse(BaseModel): + """ A processed snow response """ + processed_snow: ProcessedSnowModel + diff --git a/api/app/utils/pmtiles.py b/api/app/utils/pmtiles.py new file mode 100644 index 000000000..697926c6f --- /dev/null +++ b/api/app/utils/pmtiles.py @@ -0,0 +1,60 @@ +from osgeo import gdal, ogr +import os +import subprocess + + +def tippecanoe_wrapper(geojson_filepath: str, output_pmtiles_filepath: str, min_zoom: int = 4, max_zoom: int = 11): + """ + Wrapper for the tippecanoe cli tool + + :param geojson_filepath: Path to input geojson (must be in EPSG:4326) + :type geojson_filepath: str + :param output_pmtile: Path to output pmtiles file + :type output_pmtiles_filepath: str + :param min_zoom: pmtiles zoom out level + :type min_zoom: int + :param max_zoom: pmtiles zoom in level + :type max_zoom: int + """ + subprocess.run([ + 'tippecanoe', + f'--minimum-zoom={min_zoom}', + f'--maximum-zoom={max_zoom}', + '--projection=EPSG:4326', + f'--output={output_pmtiles_filepath}', + f'{geojson_filepath}', + '--force', + '--quiet' + ], check=True + ) + + +def write_geojson(polygons: ogr.Layer, output_dir: str) -> str: + """ + Write geojson file, projected in EPSG:4326, from ogr.Layer object + + :param polygons: Polygon layer + :type polygons: ogr.Layer + :param output_dir: Output directory + :type output_dir: str + :return: Path to geojson file + :rtype: str + """ + # We can't use an in-memory layer for translating, so we'll create a temp layer + # Using a geopackage since it supports all projections and doesn't limit field name lengths. + temp_gpkg = os.path.join(output_dir, 'temp_polys.gpkg') + driver = ogr.GetDriverByName('GPKG') + temp_data_source = driver.CreateDataSource(temp_gpkg) + temp_data_source.CopyLayer(polygons, 'poly_layer') + + # We need a geojson file to pass to tippecanoe + temp_geojson = os.path.join(output_dir, 'temp_polys.geojson') + + # tippecanoe recommends the input geojson be in EPSG:4326 [https://github.com/felt/tippecanoe#projection-of-input] + gdal.VectorTranslate(destNameOrDestDS=temp_geojson, srcDS=temp_gpkg, + format='GeoJSON', dstSRS='EPSG:4326', reproject=True) + + del temp_gpkg + + return temp_geojson + diff --git a/api/app/auto_spatial_advisory/polygonize.py b/api/app/utils/polygonize.py similarity index 95% rename from api/app/auto_spatial_advisory/polygonize.py rename to api/app/utils/polygonize.py index 7bf472f02..3515bb45e 100644 --- a/api/app/auto_spatial_advisory/polygonize.py +++ b/api/app/utils/polygonize.py @@ -25,7 +25,7 @@ def _create_in_memory_band(data: np.ndarray, cols, rows, projection, geotransfor @contextmanager -def polygonize_in_memory(geotiff_filename) -> ogr.Layer: +def polygonize_in_memory(geotiff_filename, layer, field) -> ogr.Layer: """ Given some tiff file, return a polygonized version of it, in memory, as an ogr layer. """ source: gdal.Dataset = gdal.Open(geotiff_filename, gdal.GA_ReadOnly) @@ -46,8 +46,8 @@ def polygonize_in_memory(geotiff_filename) -> ogr.Layer: # https://gdal.org/api/python/osgeo.ogr.html#osgeo.ogr.DataSource dst_ds: ogr.DataSource = mem_drv.CreateDataSource("out") - dst_layer: ogr.Layer = dst_ds.CreateLayer("hfi", spatial_reference, ogr.wkbPolygon) - field_name = ogr.FieldDefn("hfi", ogr.OFTInteger) + dst_layer: ogr.Layer = dst_ds.CreateLayer(layer, spatial_reference, ogr.wkbPolygon) + field_name = ogr.FieldDefn(field, ogr.OFTInteger) field_name.SetWidth(24) dst_layer.CreateField(field_name) diff --git a/web/cypress/e2e/fba-map-page.cy.ts b/web/cypress/e2e/fba-map-page.cy.ts index 1379ef409..7a8b990ca 100644 --- a/web/cypress/e2e/fba-map-page.cy.ts +++ b/web/cypress/e2e/fba-map-page.cy.ts @@ -11,6 +11,7 @@ describe('Fire Behaviour Advisory Page', () => { }, { fixture: 'fba/vectors.json' } ).as('getVectors') + cy.intercept('GET', 'api/snow/most-recent-by-date/*', { fixture: 'fba/processedSnow.json' }).as('processedSnow') cy.visit(FIRE_BEHAVIOUR_ADVISORY_ROUTE) }) diff --git a/web/cypress/fixtures/fba/processedSnow.json b/web/cypress/fixtures/fba/processedSnow.json new file mode 100644 index 000000000..1825e4f1c --- /dev/null +++ b/web/cypress/fixtures/fba/processedSnow.json @@ -0,0 +1,7 @@ +{ + "processed_snow": { + "for_date": "2024-02-20T08:00:00Z", + "processed_date": "2024-02-27T08:00:00Z", + "snow_source": "viirs" + } +} \ No newline at end of file diff --git a/web/src/api/snow.ts b/web/src/api/snow.ts new file mode 100644 index 000000000..be93024a6 --- /dev/null +++ b/web/src/api/snow.ts @@ -0,0 +1,42 @@ +import axios from 'api/axios' +import { DateTime } from 'luxon' + +enum SnowSource { + VIIRS = 'viirs' +} + +// The shape of processed snow data. +interface ProcessedSnowPayload { + for_date: string + processed_date: string + snow_source: SnowSource +} + +// Response object from our API. +interface ProcessedSnowResponse { + processed_snow: ProcessedSnowPayload +} + +// Client side representation of processed snow data. +export interface ProcessedSnow { + forDate: DateTime + processedDate: DateTime + snowSource: SnowSource +} + +export async function getMostRecentProcessedSnowByDate(forDate: DateTime): Promise { + if (!forDate) { + return null + } + const url = `snow/most-recent-by-date/${forDate.toISODate()}` + const { data } = await axios.get(url, {}) + if (data) { + const processedSnow = data.processed_snow + return { + forDate: DateTime.fromISO(processedSnow.for_date), + processedDate: DateTime.fromISO(processedSnow.processed_date), + snowSource: processedSnow.snow_source + } + } + return data +} diff --git a/web/src/features/fba/components/map/FBAMap.tsx b/web/src/features/fba/components/map/FBAMap.tsx index efd6f1dff..25b586826 100644 --- a/web/src/features/fba/components/map/FBAMap.tsx +++ b/web/src/features/fba/components/map/FBAMap.tsx @@ -22,13 +22,14 @@ import { fireShapeStyler, fireShapeLabelStyler, stationStyler, - hfiStyler + hfiStyler, + snowStyler } from 'features/fba/components/map/featureStylers' import { CENTER_OF_BC } from 'utils/constants' import { DateTime } from 'luxon' import { PMTILES_BUCKET } from 'utils/env' import { RunType } from 'features/fba/pages/FireBehaviourAdvisoryPage' -import { buildPMTilesURL } from 'features/fba/pmtilesBuilder' +import { buildPMTilesURL, buildSnowPMTilesURL } from 'features/fba/pmtilesBuilder' import { isUndefined, cloneDeep, isNull } from 'lodash' import { Box } from '@mui/material' import Legend from 'features/fba/components/map/Legend' @@ -47,6 +48,7 @@ export interface FBAMapProps { runType: RunType advisoryThreshold: number showSummaryPanel: boolean + snowDate: DateTime | null setShowSummaryPanel: React.Dispatch> } @@ -63,7 +65,8 @@ const removeLayerByName = (map: ol.Map, layerName: string) => { const FBAMap = (props: FBAMapProps) => { const { stations } = useSelector(selectFireWeatherStations) const [showShapeStatus, setShowShapeStatus] = useState(true) - const [showHFI, setShowHFI] = React.useState(false) + const [showHFI, setShowHFI] = useState(false) + const [showSnow, setShowSnow] = useState(false) const [map, setMap] = useState(null) const mapRef = useRef(null) const { mostRecentRunDate } = useSelector(selectRunDates) @@ -231,6 +234,27 @@ const FBAMap = (props: FBAMapProps) => { } }, [showHFI, mostRecentRunDate]) // eslint-disable-line react-hooks/exhaustive-deps + useEffect(() => { + if (!map) return + const layerName = 'snowVector' + removeLayerByName(map, layerName) + if (!isNull(props.snowDate)) { + const snowPMTilesSource = new olpmtiles.PMTilesVectorSource({ + url: buildSnowPMTilesURL(props.snowDate) + }) + + const latestSnowPMTilesLayer = new VectorTileLayer({ + source: snowPMTilesSource, + style: snowStyler, + zIndex: 100, + minZoom: 4, + properties: { name: layerName }, + visible: showSnow + }) + map.addLayer(latestSnowPMTilesLayer) + } + }, [props.snowDate]) // eslint-disable-line react-hooks/exhaustive-deps + useEffect(() => { // The React ref is used to attach to the div rendered in our // return statement of which this map's target is set to. @@ -289,6 +313,17 @@ const FBAMap = (props: FBAMapProps) => { map?.addLayer(stationsLayer) }, [stations]) // eslint-disable-line react-hooks/exhaustive-deps + // Generate a message to display about the snow layer in the legend. + const getSnowDateMessage = () => { + if (!showSnow) { + return null + } + if (isNull(props.snowDate)) { + return 'No data available' + } + return `as of ${props.snowDate?.toISODate()}` + } + return ( @@ -308,6 +343,9 @@ const FBAMap = (props: FBAMapProps) => { setShowShapeStatus={setShowShapeStatus} showHFI={showHFI} setShowHFI={setShowHFI} + showSnow={showSnow} + setShowSnow={setShowSnow} + snowDescription={getSnowDateMessage()} /> diff --git a/web/src/features/fba/components/map/Legend.tsx b/web/src/features/fba/components/map/Legend.tsx index 939a0b947..23ffe9193 100644 --- a/web/src/features/fba/components/map/Legend.tsx +++ b/web/src/features/fba/components/map/Legend.tsx @@ -38,9 +38,10 @@ interface LegendItemProps { checked: boolean onChange: (event: React.ChangeEvent, checked: boolean) => void subItems?: SubItem[] + description?: string | null } -const LegendItem: React.FC = ({ label, checked, onChange, subItems }) => ( +const LegendItem: React.FC = ({ label, checked, onChange, subItems, description }) => (
@@ -57,6 +58,14 @@ const LegendItem: React.FC = ({ label, checked, onChange, subIt + + + + {description} + + + + {subItems && ( {subItems.map(subItem => ( @@ -79,9 +88,21 @@ interface LegendProps { setShowShapeStatus: React.Dispatch> showHFI: boolean setShowHFI: React.Dispatch> + showSnow: boolean + setShowSnow: React.Dispatch> + snowDescription: string | null } -const Legend = ({ onToggleLayer, showShapeStatus, setShowShapeStatus, showHFI, setShowHFI }: LegendProps) => { +const Legend = ({ + onToggleLayer, + showShapeStatus, + setShowShapeStatus, + showHFI, + setShowHFI, + showSnow, + setShowSnow, + snowDescription +}: LegendProps) => { const handleLayerChange = ( layerName: string, isVisible: boolean, @@ -117,6 +138,12 @@ const Legend = ({ onToggleLayer, showShapeStatus, setShowShapeStatus, showHFI, s onChange={() => handleLayerChange('hfiVector', showHFI, setShowHFI)} subItems={hfiSubItems} /> + handleLayerChange('snowVector', showSnow, setShowSnow)} + description={snowDescription} + > ) } diff --git a/web/src/features/fba/components/map/fbaMap.test.tsx b/web/src/features/fba/components/map/fbaMap.test.tsx index f86937b62..01645d491 100644 --- a/web/src/features/fba/components/map/fbaMap.test.tsx +++ b/web/src/features/fba/components/map/fbaMap.test.tsx @@ -36,6 +36,7 @@ describe('FBAMap', () => { setShowSummaryPanel={function (): void { throw new Error('Function not implemented.') }} + snowDate={DateTime.now()} /> ) diff --git a/web/src/features/fba/components/map/featureStylers.ts b/web/src/features/fba/components/map/featureStylers.ts index 5d0de1e9e..67c9579af 100644 --- a/web/src/features/fba/components/map/featureStylers.ts +++ b/web/src/features/fba/components/map/featureStylers.ts @@ -8,6 +8,7 @@ import { range, startCase, lowerCase, isUndefined } from 'lodash' import { FireShape, FireShapeArea } from 'api/fbaAPI' const EMPTY_FILL = 'rgba(0, 0, 0, 0.0)' +const SNOW_FILL = 'rgba(255, 255, 255, 0.85)' export const ADVISORY_ORANGE_FILL = 'rgba(255, 147, 38, 0.4)' export const ADVISORY_RED_FILL = 'rgba(128, 0, 0, 0.4)' @@ -169,3 +170,15 @@ export const hfiStyler = (feature: RenderFeature | ol.Feature): Style } return hfiStyle } + +// A styling function for the snow coverage pmtiles layer. +export const snowStyler = (feature: RenderFeature | ol.Feature): Style => { + const snow = feature.get('snow') + const snowStyle = new Style({}) + if (snow === 1) { + snowStyle.setFill(new Fill({ color: SNOW_FILL })) + } else { + snowStyle.setFill(new Fill({ color: EMPTY_FILL })) + } + return snowStyle +} diff --git a/web/src/features/fba/components/map/legend.test.tsx b/web/src/features/fba/components/map/legend.test.tsx index 54dcb75ca..6e51cd918 100644 --- a/web/src/features/fba/components/map/legend.test.tsx +++ b/web/src/features/fba/components/map/legend.test.tsx @@ -8,6 +8,7 @@ describe('Legend', () => { const onToggleLayer = jest.fn() const setShowZoneStatus = jest.fn() const setShowHFI = jest.fn() + const setShowSnow = jest.fn() const { getByTestId } = render( { setShowHFI={setShowHFI} showHFI={false} showShapeStatus={true} + showSnow={false} + setShowSnow={setShowSnow} + snowDescription="foo" /> ) const legendComponent = getByTestId('asa-map-legend') @@ -35,6 +39,7 @@ describe('Legend', () => { const onToggleLayer = jest.fn() const setShowZoneStatus = jest.fn() const setShowHFI = jest.fn() + const setShowSnow = jest.fn() const { getByTestId } = render( { setShowHFI={setShowHFI} showHFI={false} showShapeStatus={true} + showSnow={false} + setShowSnow={setShowSnow} + snowDescription="foo" /> ) diff --git a/web/src/features/fba/pages/FireBehaviourAdvisoryPage.tsx b/web/src/features/fba/pages/FireBehaviourAdvisoryPage.tsx index 60a23796b..407f017fb 100644 --- a/web/src/features/fba/pages/FireBehaviourAdvisoryPage.tsx +++ b/web/src/features/fba/pages/FireBehaviourAdvisoryPage.tsx @@ -29,6 +29,7 @@ import { fetchFireShapeAreas } from 'features/fba/slices/fireZoneAreasSlice' import { fetchfireZoneElevationInfo } from 'features/fba/slices/fireZoneElevationInfoSlice' import ZoneSummaryPanel from 'features/fba/components/ZoneSummaryPanel' import { StyledFormControl } from 'components/StyledFormControl' +import { getMostRecentProcessedSnowByDate } from 'api/snow' export enum RunType { FORECAST = 'FORECAST', @@ -61,9 +62,21 @@ const FireBehaviourAdvisoryPage: React.FunctionComponent = () => { ) const [runType, setRunType] = useState(RunType.FORECAST) const [showSummaryPanel, setShowSummaryPanel] = useState(true) + const [snowDate, setSnowDate] = useState(null) const { mostRecentRunDate } = useSelector(selectRunDates) const { fireShapeAreas } = useSelector(selectFireShapeAreas) + // Query our API for the most recently processed snow coverage date <= the currently selected date. + const fetchLastProcessedSnow = async (selectedDate: DateTime) => { + const data = await getMostRecentProcessedSnowByDate(selectedDate) + if (isNull(data)) { + setSnowDate(null) + } else { + const newSnowDate = data.forDate + setSnowDate(newSnowDate) + } + } + useEffect(() => { const findCenter = (id: string | null): FireCenter | undefined => { return fireCenters.find(center => center.id.toString() == id) @@ -104,6 +117,7 @@ const FireBehaviourAdvisoryPage: React.FunctionComponent = () => { if (!isNull(doiISODate)) { dispatch(fetchSFMSRunDates(runType, doiISODate)) } + fetchLastProcessedSnow(dateOfInterest) }, [dateOfInterest]) // eslint-disable-line react-hooks/exhaustive-deps useEffect(() => { @@ -240,6 +254,7 @@ const FireBehaviourAdvisoryPage: React.FunctionComponent = () => { setSelectedFireShape={setSelectedFireShape} fireShapeAreas={fireShapeAreas} showSummaryPanel={showSummaryPanel} + snowDate={snowDate} setShowSummaryPanel={setShowSummaryPanel} /> diff --git a/web/src/features/fba/pmtilesBuilder.ts b/web/src/features/fba/pmtilesBuilder.ts index 17968728d..b39c61b6f 100644 --- a/web/src/features/fba/pmtilesBuilder.ts +++ b/web/src/features/fba/pmtilesBuilder.ts @@ -16,3 +16,15 @@ export const buildPMTilesURL = (for_date: DateTime, run_type: RunType, run_date: return PMTilesURL } + +/** + * Builds the URL for snow coverage pmtiles layers. + * @param snowDate The target date for snow coverage. + * @returns A URL to the snow coverage PMTiles stored in S3 + */ +export const buildSnowPMTilesURL = (snowDate: DateTime) => { + const snowPMTilesUrl = `${PMTILES_BUCKET}snow/${snowDate.toISODate()}/snowCoverage${snowDate.toISODate({ + format: 'basic' + })}.pmtiles` + return snowPMTilesUrl +}