diff --git a/README.md b/README.md index 10b4ea0..cc4eff3 100644 --- a/README.md +++ b/README.md @@ -39,11 +39,11 @@ It is therefore possible, to cover large geographic extent with a __seamless ima It is worth noting that the overall accuracy of your final ARD product strongly depends on the accuracy of sen2like auxiliary data. Two categories of auxiliary data are important: the raster reference for geometric corrections and the meteorological data for atmospheric corrections. Regarding atmospheric corrections, one possibility is to use data from the Copernicus Atmosphere Monitoring Service [9]. The Sen2Like team prepared a dedicated CAMS monthly dataset for the Year 2020, available from [here](http://185.178.85.51/CAMS/). Please refer to this short [description](http://185.178.85.51/CAMS/Readme_CAMS2020.txt) for additional information. For further details on the format specification of the harmonized products or the functionalities of the Sen2Like software, please -refer to the [Product Format Specification](sen2like/docs/source/S2-PDGS-MPC-L2HF-PFS-v1.1.pdf), and the [User Manual v1.8](sen2like/docs/source/S2-SEN2LIKE-UM-V1.8.pdf). +refer to the [Product Format Specification](sen2like/docs/source/S2-PDGS-MPC-L2HF-PFS-v1.2.pdf), and the [User Manual v1.8](sen2like/docs/source/S2-SEN2LIKE-UM-V1.9.pdf). ## Publications and Contacts **Yearning to know more ? Check out** -* poster [Sen2Like, a tool to generate Sentinel-2 Harmonised Surface Reflectance Products, First Results With Landsat-8, 3rd S2 Validation Team Meeting](https://www.researchgate.net/publication/332428332_Sen2like_a_Tool_to_Generate_Sentinel-2_Harmonised_Surface_Reflectance_Products_-_First_Results_With_Landsat-8) +* poster [Sen2like: A solution for Harmonization and Fusion of Sentinel-2 and Landsat 8/9 data, JACIE 2023](https://www.researchgate.net/publication/372831217_Sen2like_A_solution_for_Harmonization_and_Fusion_of_Sentinel-2_and_Landsat_89_data) * A [Sen2Like Relaxing Video](https://youtu.be/KBSYYBShyos) prepared for [ESA EO PHI-WEEK 2020](https://www.youtube.com/playlist?list=PLvT7fd9OiI9XELZXcljYTftUtJ_NFWRrY) * A [Sen2Like Time Lapse including NDVI graphic](https://youtu.be/yEObvI1KQBg) prepared for QWG#12 @@ -60,7 +60,7 @@ And the following research papers : + [10] Saunier, S.; Pflug, B.; Lobos, I.M.; Franch, B.; Louis, J.; De Los Reyes, R.; Debaecker, V.; Cadau, E.G.; Boccia, V.; Gascon, F.; Kocaman, S. Sen2Like: Paving the Way towards Harmonization and Fusion of Optical Data. Remote Sens. 2022, 14, 3855. (https://doi.org/10.3390/rs14163855) -**Learn how to use Sen2Like**, have a look at the [User Manual](sen2like/docs/source/S2-SEN2LIKE-UM-V1.8.pdf). +**Learn how to use Sen2Like**, have a look at the [User Manual](sen2like/docs/source/S2-SEN2LIKE-UM-V1.9.pdf). **Get help**, contact us at sen2like@telespazio.com. diff --git a/sen2like/.dockerignore b/sen2like/.dockerignore index 92d502b..6413307 100644 --- a/sen2like/.dockerignore +++ b/sen2like/.dockerignore @@ -2,4 +2,6 @@ * # but not what we want to send in the build context !requirements.txt +!requirements_dev.txt !sen2like +!aux_data diff --git a/sen2like/Dockerfile b/sen2like/Dockerfile index 2c246cc..8f02172 100644 --- a/sen2like/Dockerfile +++ b/sen2like/Dockerfile @@ -14,20 +14,14 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - +ARG SEN2LIKE_BUILD_IMAGE_TAG # docker image baser on miniconda image (debian:latest) -FROM continuumio/miniconda3:22.11.1 AS build +FROM ${SEN2LIKE_BUILD_IMAGE_TAG} AS build LABEL stage=sen2like_build # set the working dir to /usr/local/sen2like WORKDIR /usr/local/sen2like -# Create the environment: -# copy requirements.txt from sources to docker image -COPY ./requirements.txt . -# create sen2like env from requirement -RUN conda create -n sen2like --file requirements.txt -c conda-forge - # Install conda-pack: RUN conda install -c conda-forge conda-pack @@ -48,11 +42,18 @@ FROM debian:bullseye-slim AS runtime COPY --from=build /venv /venv # install in curent docker image mesa-glx -RUN apt-get update && apt-get install -y libgl1-mesa-glx +RUN apt-get update \ + && apt-get install -y libgl1-mesa-glx \ + && rm -rf /var/lib/apt/lists/* # set PATH with venv ENV VIRTUAL_ENV=/venv ENV PATH="$VIRTUAL_ENV/bin:$PATH" +ENV PROJ_LIB=/venv/share/proj + +# set the working dir to /usr/local/aux_data +WORKDIR /usr/local/aux_data +COPY ./aux_data . # set the working dir to /usr/local/sen2like WORKDIR /usr/local/sen2like diff --git a/sen2like/Dockerfile-base b/sen2like/Dockerfile-base new file mode 100644 index 0000000..79a3bee --- /dev/null +++ b/sen2like/Dockerfile-base @@ -0,0 +1,13 @@ +# docker image based on miniconda image (debian:latest) +# ARG MINICONDA_DOCKER_VERSION +# FROM continuumio/miniconda3:${MINICONDA_DOCKER_VERSION} +FROM continuumio/miniconda3:23.3.1-0 + +# set the working dir to /usr/local/sen2like +WORKDIR /usr/local/sen2like + +# Create the environment: +# copy requirements.txt from sources to docker image +COPY ./requirements.txt . +# create sen2like env from requirement +RUN conda create -n sen2like --file requirements.txt -c conda-forge diff --git a/sen2like/Dockerfile-tests b/sen2like/Dockerfile-tests new file mode 100644 index 0000000..2fbeea3 --- /dev/null +++ b/sen2like/Dockerfile-tests @@ -0,0 +1,18 @@ +# sen2like-base docker image is based on miniconda image (debian:latest) +ARG SEN2LIKE_BUILD_IMAGE_TAG +FROM ${SEN2LIKE_BUILD_IMAGE_TAG} +LABEL stage=sen2like_tests + +# Update the environment: +# copy dev/tests requirements from sources to docker image +COPY ./requirements_dev.txt . + +# update sen2like env +RUN conda install -n sen2like --file requirements_dev.txt -c conda-forge + +# install system dependencies in curent docker image mesa-glx +RUN apt-get update \ + && apt-get install -y libgl1-mesa-glx \ + && rm -rf /var/lib/apt/lists/* + +# image is ready for tests, usage must activate sen2like conda env diff --git a/sen2like/README.md b/sen2like/README.md index 6d661c0..4cfdcca 100644 --- a/sen2like/README.md +++ b/sen2like/README.md @@ -26,8 +26,11 @@ TOC Generated with markdown all in one: https://github.com/yzhang-gh/vscode-mark - [Geometry](#geometry) - [Atmcor](#atmcor) - [Nbar](#nbar) + - [Sbaf](#sbaf) - [Fusion](#fusion) - [Stitching](#stitching) + - [TopographicCorrection (Experimental)](#topographiccorrection-experimental) + - [DEMRepository](#demrepository) - [OutputFormat](#outputformat) - [COGoptions](#cogoptions) - [JPEG2000options](#jpeg2000options) @@ -39,6 +42,8 @@ TOC Generated with markdown all in one: https://github.com/yzhang-gh/vscode-mark - [Single tile mode](#single-tile-mode) - [Multi tile mode](#multi-tile-mode) - [ROI based mode](#roi-based-mode) +- [Auxiliary data tools](#auxiliary-data-tools) + - [DEM downloader](#dem-downloader) - [Release notes](#release-notes) - [License](#license) @@ -100,7 +105,7 @@ conda activate sen2like ```bash python sen2like.py -[INFO ] 2023-01-11 14:50:54 - sen2like - Run Sen2like 4.3.0 +[INFO ] 2023-01-11 14:50:54 - sen2like - Run Sen2like 4.4.0 usage: sen2like.py [-h] [--version] [--refImage PATH] [--wd PATH] [--conf PATH] [--confParams STRLIST] [--bands STRLIST] [--allow-other-srs] [--no-run] [--intermediate-products] @@ -124,8 +129,16 @@ Please refer to the docker documentation to install docker on your environnement From the sen2like root directory (the one containing `Dockerfile`) +First build the base image : + ```bash -docker build -t sen2like . && docker image prune --filter label=stage=sen2like_build -f +docker build -t sen2like --build-arg sen2like_base . +``` + +Then the final image : + +```bash +docker build -t sen2like --build-arg SEN2LIKE_BUILD_IMAGE_TAG=sen2like_base . && docker image prune --filter label=stage=sen2like_build -f ``` The result is a docker image with tag `sen2like:latest` @@ -149,7 +162,7 @@ Create a tag TARGET_IMAGE that refers to SOURCE_IMAGE Example ```bash -docker image tag sen2like my-internal-docker-registry-url/sen2like:4.3 +docker image tag sen2like my-internal-docker-registry-url/sen2like:4.4 ``` Push the image on a registry with the command `docker push NAME[:TAG]` @@ -157,7 +170,7 @@ Push the image on a registry with the command `docker push NAME[:TAG]` Example ```bash -docker push my-internal-docker-registry-url/sen2like:4.3 +docker push my-internal-docker-registry-url/sen2like:4.4 ``` ## Running the tool @@ -181,15 +194,15 @@ Build sen2like docker image or pull it from a registry with the command `docker Example : ```bash -docker pull https://my-internal-docker-registry-url/sen2like:4.3 +docker pull https://my-internal-docker-registry-url/sen2like:4.4 ``` You can run it directly without entering into the container: ```bash -docker run --rm my-internal-docker-registry-url/sen2like/sen2like:4.3 +docker run --rm my-internal-docker-registry-url/sen2like/sen2like:4.4 -[INFO ] 2023-01-11 14:50:54 - sen2like - Run Sen2like 4.3.0 +[INFO ] 2023-01-11 14:50:54 - sen2like - Run Sen2like 4.4.0 usage: sen2like.py [-h] [--version] [--refImage PATH] [--wd PATH] [--conf PATH] [--confParams STRLIST] [--bands STRLIST] [--allow-other-srs] [--no-run] [--intermediate-products] @@ -210,7 +223,7 @@ In the following examples **local** folder `/data` is supposed to exist and cont ```bash docker run --rm \ --mount type=bind,source="/data",target=/data \ - my-internal-docker-registry-url/sen2like/sen2like:4.3 \ + my-internal-docker-registry-url/sen2like/sen2like:4.4 \ single-tile-mode 31TFJ \ --conf "/data/config.ini" \ --start-date 2017-10-30 --end-date 2017-10-31 \ @@ -224,7 +237,7 @@ Python script `sen2like.py` could be accessed from a docker container. Launch the docker binding **local** `/data` folder to the container `/data` folder, example: ```bash -docker run --rm -it --mount type=bind,source="/data",target=/data --entrypoint=/bin/bash my-internal-docker-registry-url/sen2like/sen2like:4.3 +docker run --rm -it --mount type=bind,source="/data",target=/data --entrypoint=/bin/bash my-internal-docker-registry-url/sen2like/sen2like:4.4 root@15a2f44ddd70:/usr/local/sen2like @@ -271,9 +284,10 @@ Enable or disable a processing block based on value `(True, False)`: * `doGeometryCheck`: Run the geometric assessment using KLT to compute geometry QI * `doToa`: Run the TOA correction * `doInterCalibration`: Run the Inter Calibration correction (S2B) -* `doAtmcor`: Run the Atmospheric correction (SMAC or Sen2Cor) +* `doAtmcor`: Activate the Atmospheric correction (SMAC or with sen2cor) * `doNbar`: Run Nbar correction processing * `doSbaf`: Run the Sbaf correction processing +* `doTopographicCorrection`: Activate the Topographic correction (sen2like or with sen2cor) * `doFusion`: Run the Fusion processing * `doPackagerL2H`: Run the packaging processing for harmonized products * `doPackagerL2F`: Run the packaging processing for fused products @@ -287,7 +301,6 @@ Indicates path for special directories: * `cams_daily_dir`: Where the CAMS daily files are located * `cams_hourly_dir`: Where the CAMS hourly files are located * `cams_climatology_dir`: Where the CAMS climatology files are located -* `dem_dir`: Where the DEM files are located * `scl_dir`: Where the auxiliary scl maps files are located #### Downloader @@ -349,7 +362,7 @@ url_parameters_pattern_Sentinel2 = /data/PRODUCTS/Sentinel2/31TFJ Define parameters for geometric correction. -* `reference_band`= The reference band to be used for geometric correction +* `reference_band`: The reference band to be used for geometric correction * `doMatchingCorrection`: Apply the matching correction (`True`, `False`) * `doAssessGeometry`: Assess geometry (Band list separated by comma.) * `references_map`: Path to the reference json file containing the reference image for each tile @@ -359,7 +372,7 @@ Define parameters for geometric correction. Atmospheric correction method to use. -* `use_sen2cor`: Activate sen2cor for Atmospheric correction (SMAC otherwise) +* `use_sen2cor`: Activate sen2cor for Atmospheric correction (SMAC otherwise), `doAtmcor` MUST be `True` * `sen2cor_path`: Path to sen2cor tool command (L2A_Process.py) #### Nbar @@ -369,6 +382,23 @@ Define parameters for Nbar processing. * `nbar_methode`: Method to get BRDF coefficients. Currently, available methods are : ROY, VJB * `vjb_coeff_matrice_dir`: If VJB method is selected, directory path of the BRDF coefficients netcdf file +#### Sbaf + +Define parameters for SBAF processing. + +* `adaptative`: **Experimental** Use adaptative SBAF or not. Adaptative SBAF is based on NDVI to which is apply a factor and offset depending the processed band. +* `adaptative_band_candidates`: S2 equivalent band list separated by comma for which to apply adaptative SBAF. For now B04, B11 and B12 are known to be the better candidates. + Band mapping table: + |Landsat|S2| + |-|-| + |B01|B01| + |B02|B02| + |B03|B03| + |B04|B04| + |B05|B8A| + |B06|B11| + |B07|B12| + #### Fusion Define parameters for Fusion processing. @@ -385,6 +415,33 @@ Define parameters for stitching processing. * `same_utm_only`: Enable or disable stitching with product on different UTM as current processed product. For Landsat it allows to use previous or next product that can be on another UTM for stitching and have a better tile coverage. +#### TopographicCorrection (Experimental) + +Define parameters for topographic correction process. Topographic correction can be done with the sen2like processing block or with sen2cor. + +* `sen2cor_topographic_correction`: with topographic correction activated (`doTopographicCorrection=True`), and atmcor with sen2cor enabled (`doAtmcor=True` and `use_sen2cor=True`), the topographic correction is delegated to sen2cor instead of the sen2like TopographicCorrection processing block. +* `topographic_correction_limiter`: limit to this value the maximum topographic factor to apply (only for sen2like topographic correction, not sen2cor). +* `apply_valid_pixel_mask`: Use valid pixel mask to select pixel for which the correction is done. Useful to not apply correction on cloudy pixels (only for sen2like topographic correction, not sen2cor). + + +#### DEMRepository + +Define parameters for DEMRepository component. + +The DEMRepository component is responsible to access DEM files mainly for [`TopographicCorrection`](#topographiccorrection) processing block + +* `dem_folder`: Base folder path that host DEM dataset +* `dem_dataset`: DEM dataset to use +* `src_dem_resolution`: resolution to use for DEM in DEM dataset + +DEM storage tree MUST follow the following structure : + +`{dem_folder}/{dem_dataset}/Copernicus_DSM_{src_dem_resolution}m_{tile_code}.TIF` + +Where `tile_code` is the current processed MGRS tile. + +See [DEM Downloader chapter](#dem-downloader) to know how to generate MGRS DEM tile. + #### OutputFormat Define output format, gain and offset for image files. @@ -393,7 +450,7 @@ Define output format, gain and offset for image files. * `offset`: DN Offset to substract from the output image (DN to reflectance conversion) * `output_format`: Format of the output image. Supported formats: COG, GTIFF (for geotiff), JPEG2000. -*Note: DN to reflectance conversion: reflectance = (DN - offset) / gain * +> *Note: DN to reflectance conversion: reflectance = (DN - offset) / gain* #### COGoptions @@ -714,6 +771,81 @@ Debug arguments: --no-log-date Do no store date in log (default: False) ``` +## Auxiliary data tools + +Auxiliary data tools are located in the folder `aux_data` + +For now there is only the DEM downloader tools. + +### DEM downloader + +This tool allows to create DEM aux data for TopographicCorrection of Sen2like. + +It creates a DEM as GeoTiff having the same extend and UTM projection as a MGRS tile. + +To do so, from a given MGRS tile code, it retrieves matching Copernicus DEM tiles, build a mosaic, then crop and reproject it. + +In order to use this tool, `sen2like` module/folder **MUST** be in `PYTHONPATH`. + +Usage example to display the help: + +```bash +PYTHONPATH=sen2like/ aux_data/dem/dem_downloader.py -h +``` + +It should display: + +``` +usage: dem_downloader.py [-h] --server_url SERVER_URL [--debug] MGRS_TILE_CODE DEM_DATASET_NAME DEM_LOCAL_URL + + Create DEM for a MGRS Tile. + The generated DEM is TIF file in the MGRS extend and its projected in the MGRS tile EPSG (UTM). + + +positional arguments: + MGRS_TILE_CODE MGRS Tile code for witch to generate DEM, example 31TFJ + DEM_DATASET_NAME DEM dataset name, example COP-DEM_GLO-90-DGED__2022_1 + DEM_LOCAL_URL Base output folder for generated DEM, example /data/AUX_DATA/ + Generated files are stored as follow : + /data/AUX_DATA/{DEM_DATASET_NAME}/Copernicus_DSM_{resolution}m_{MGRS_TILE_CODE}.TIF + + +options: + -h, --help show this help message and exit + --server_url SERVER_URL + DEM server base URL (default: https://prism-dem-open.copernicus.eu/pd-desk-open-access/prismDownload/) + --debug, -d Enable Debug mode (default: False) +``` + +The `--server_url` options MUST allow to produce URL formatted as follow to download tiles : `//` + +With: + +- `` is the `DEM_DATASET_NAME` program argument +- `` is a file name that match `Copernicus_DSM_VV_WWW_XX_YYY_ZZ.tar`, example: `Copernicus_DSM_30_N42_00_E012_00.tar` +See [Product Structure / Naming Convention in this document](https://spacedata.copernicus.eu/documents/20123/122407/GEO1988-CopernicusDEM-SPE-002_ProductHandbook_I5.0+%281%29.pdf) + + +The tools is also available in the Sen2like docker image in `/usr/local/aux_data` folder. +`sen2like` path to use for `PYTHONPATH` is `/usr/local/sen2like`. + +Example : + +```bash +docker run --rm \ + -v /data/DEM:/data/DEM \ + -e PYTHONPATH=/usr/local/sen2like \ + --entrypoint /usr/local/aux_data/dem/dem_downloader.py \ + sen2like \ + 33TTG \ + COP-DEM_GLO-90-DGED__2022_1 \ + /data/DEM +``` + +It results a file `/data/DEM/COP-DEM_GLO-90-DGED__2022_1/Copernicus_DSM_90m_33TTG.TIF` on the docker host. + +The tool keep extracted Copernicus geocell DEM TIF files in folder `//geocells` to avoid to download them every time it needs them to create a MGRS DEM file. + ## [Release notes](./release-notes.md) ## License diff --git a/sen2like/aux_data/dem/dem_downloader.py b/sen2like/aux_data/dem/dem_downloader.py new file mode 100755 index 0000000..1ac9621 --- /dev/null +++ b/sen2like/aux_data/dem/dem_downloader.py @@ -0,0 +1,486 @@ +#! /usr/bin/env python +# Copyright (c) 2023 ESA. +# +# This file is part of sen2like. +# See https://github.com/senbox-org/sen2like for further info. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""An interface for accessing the Copernicus 90m DEM.""" + +import itertools +import logging +import os +import re +import tarfile +import urllib.request +from argparse import ArgumentDefaultsHelpFormatter, ArgumentParser, RawTextHelpFormatter +from tempfile import TemporaryDirectory +from urllib.error import HTTPError + +from core.product_archive import tile_db +from osgeo import gdal, ogr + +LOGGER = logging.getLogger("Sen2Like") + +# Apply proposed patch https://github.com/senbox-org/sen2like/pull/2 +# for CVE-2007-4559 Patch + + +def is_within_directory(directory, target): + abs_directory = os.path.abspath(directory) + abs_target = os.path.abspath(target) + + prefix = os.path.commonprefix([abs_directory, abs_target]) + + return prefix == abs_directory + + +def safe_extract(tar, path=".", members=None, *, numeric_owner=False): + for member in tar.getmembers(): + member_path = os.path.join(path, member.name) + if not is_within_directory(path, member_path): + raise Exception("Attempted Path Traversal in Tar File") + + tar.extractall(path, members, numeric_owner=numeric_owner) + + +# EO Patch + + +def res_to_arcsec(resolution: int): + return resolution // 3 + + +def dem_file_from_tar(tar_file: tarfile.TarFile): + for tarinfo in tar_file: + # only extract file *_DEM.tif + # example Copernicus_DSM_30_N42_00_E011_00_DEM.tif + if re.match(".*_DEM.tif", tarinfo.name): + tarinfo.name = os.path.basename(tarinfo.name) + yield tarinfo + + +def is_local(url: str) -> bool: + """Determine if url is a local or distant location. + + Args: + url (str): The url to test + + Returns: + bool: True if url is local, False otherwise + """ + for prefix in ["http", "https", "ftp"]: + if url.startswith(f"{prefix}://"): + return False + return True + + +def progress(downloaded, block_size, data_size): + """Display download progression. + + Args: + downloaded: Amount of data already downloaded. + block_size: Size of a data block. + data_size: Total size of data to download. + """ + percentage = downloaded * block_size / data_size + end = "" + if percentage > 100: + percentage = 100.0 + end = "\n" + print(f"\r{percentage:.2%}...", end=end) + + +# DEM dataset name expression to extract resolution, type of DEM, year and rev +DATASET_EXPR = re.compile(r"COP-DEM_GLO-(\d{2})-(DGED|DTED)__(\d{4})_(\d{1})") + + +class HelpFormatter(RawTextHelpFormatter, ArgumentDefaultsHelpFormatter): + """Custom argparser formatter""" + + pass + + +class DemDownloader: + """Manage DEM. + + Deduce the tile(s) that is(are) needed for covering the input extent (e.g. MGRS tile extent), + Resolve the filenames of the tiles using latitude and longitude information (e.g. “w145/n69.dt1”), + Download the tiles from a public server if not present on local archive, + Mosaic the tiles, resample (if requested), and crop the tile to fit the input extent. + """ + + def __init__(self, args, in_resolution: int = 90, resolution: int = 90): + self.config = args + self.mgrs_tile_code: str = args.mgrs_tile_code + self._mgrs_def: dict = {} + self.resolution: int = resolution + self.in_resolution: int = in_resolution + self.temp_directory: TemporaryDirectory = None + self._dem_output: str = "" + self.cross_dateline: bool = False + + @property + def dem_output(self) -> str: + """Get DEM file for mgrs extent.""" + if not self._dem_output: + self._dem_output = os.path.join( + self.config.dem_local_url, + self.config.dem_dataset_name, + f"Copernicus_DSM_{self.resolution}m_{self.mgrs_tile_code}.TIF", + ) + LOGGER.info("DEM out: %s", self._dem_output) + + return self._dem_output + + @property + def mgrs_def(self) -> dict: + """retrieve MGRS def from tile_db + + Raises: + ValueError: if MGRS tile does not exists + + Returns: + dict: MGRS def having keys: + 'index', 'TILE_ID', 'EPSG', 'UTM_WKT', 'MGRS_REF', 'LL_WKT', 'geometry' + """ + if not self._mgrs_def: + self._mgrs_def = tile_db.get_mgrs_def(self.mgrs_tile_code) + if self._mgrs_def is None: + LOGGER.error("Cannot get MGRS definition for tile %s", self.mgrs_tile_code) + raise ValueError(f"Does tile {self.mgrs_tile_code} exists?") + + return self._mgrs_def + + def get(self) -> str: + """Retrieve DEM file if exists, otherwise create it. + + Returns: + str: path to the DEM file, empty if DEM cannot be produced + """ + if os.path.isfile(self.dem_output): + LOGGER.info("DEM file for tile %s: %s", self.mgrs_tile_code, self.dem_output) + return self.dem_output + + LOGGER.info("No local DEM available for tile %s.", self.mgrs_tile_code) + + self.temp_directory = TemporaryDirectory() + + locations = self.compute_tile_extent() + dem_files = self.resolve_dem_file_urls(locations) + + self.get_src_dem_tiles(dem_files) + if len(dem_files) == 0: + LOGGER.error("Error while processing tile %s. DEM is invalid.", self.mgrs_tile_code) + return "" + + self.create_mgrs_dem(list(dem_files.values())) + + LOGGER.info("DEM file for tile %s: %s", self.mgrs_tile_code, self.dem_output) + + return self.dem_output + + def get_tile_extent(self, utm: bool) -> tuple: + """Retrieve MGRS tile extend in LatLon or UTM coordinates + + Args: + utm (bool): have UTM coordinates or not + + Returns: + tuple: extent as minX, maxX, minY, maxY + """ + tile_geometry = ogr.CreateGeometryFromWkt(self.mgrs_def["UTM_WKT" if utm else "LL_WKT"]) + return tile_geometry.GetEnvelope() + + def compute_tile_extent(self) -> list[tuple[str, str]]: + """Deduce the tile(s) that is(are) needed for covering the input extent (e.g. MGRS tile extent). + + Returns: + list[tuple[str,str]]: List of latitudes, longitudes prefixed by direction (N/S E/W) + corresponding to dem tiles. + """ + locations = None + extent = self.get_tile_extent(False) + LOGGER.debug("Extent: %s", extent) + if extent: + lon_min, lon_max, lat_min, lat_max = extent + + lat_min = int(lat_min if lat_min > 0 else lat_min - 1) + lat_max = int(lat_max + 1 if lat_max > 0 else lat_max) + latitudes = range(lat_min, lat_max) + LOGGER.debug(latitudes) + + # For now assume we never have this case. + # to fix it, retrieve code history to see how it was made, + # but it was not working well + self.cross_dateline = False + lon_min = int(lon_min) + lon_max = int(lon_max + 1) + if self.cross_dateline: + longitudes = list(range(-180, lon_min)) + list(range(lon_max - 1, 180)) + else: + longitudes = range(lon_min, lon_max) + LOGGER.debug(longitudes) + + latitudes = [ + f"N{latitude}" if (0 < latitude <= 90) else f"S{abs(latitude-1)}" + for latitude in latitudes + ] + longitudes = [ + f"E{longitude:03}" if (0 < longitude <= 180) else f"W{abs(longitude-1):03}" + for longitude in longitudes + ] + LOGGER.debug(latitudes) + LOGGER.debug(longitudes) + + locations = list(itertools.product(latitudes, longitudes)) + LOGGER.debug(locations) + else: + LOGGER.error("Error while computing tile extent.") + return locations + + def resolve_dem_file_urls(self, locations: list[tuple[str, str]], local=True) -> dict: + """Resolve the file url of the tiles using latitude and longitude information. + Urls can be local file path or remote url. + + Args: + locations (list[tuple[str,str]]): List of latitudes, longitudes prefixed by direction (N/S E/W) + local (bool, optional): resolve local or remote source dem tile url. Defaults to True. + + Returns: + dict: urls indexed by lat lon + """ + + arcsec = res_to_arcsec(self.in_resolution) + dem_product_name = "Copernicus_DSM_{arcsec:02}_{latitude}_00_{longitude}_00" + + if local: + url = os.path.join( + self.config.dem_local_url, + self.config.dem_dataset_name, + "geocells", + "{dem_product_name}_DEM.tif", + ) + else: + url = self.config.server_url + "{dem_dataset_name}/{dem_product_name}.tar" + + output_files = {} + + if locations is not None: + for latitude, longitude in locations: + dem_url = url.format( + dem_dataset_name=self.config.dem_dataset_name, dem_product_name=dem_product_name + ) + # format `{dem_product_name}` put in dem_url by previous instruction + dem_url = dem_url.format(arcsec=arcsec, latitude=latitude, longitude=longitude) + + LOGGER.debug(dem_url) + output_files[latitude, longitude] = dem_url + + return output_files + + def get_src_dem_tiles(self, tile_urls: dict) -> bool: + """ + Update `tile_urls` with founded tiles from. + If a tile is not found locally, try to download it. + If not downloaded (and so not locally founded), the corresponding url is remove from `tile_urls` + + Args: + tile_urls (dict): tiles urls indexed by lat lon + """ + LOGGER.info("Trying to retrieve or download DEM for tile %s", self.mgrs_tile_code) + + exclude = [] + for location, tile_file in tile_urls.items(): + if not os.path.isfile(tile_file): + output_dir = os.path.dirname(tile_file) + self.download_tile(location, output_dir) + # After download file must exist + if not os.path.isfile(tile_file): + LOGGER.warning("Unable to download tile for %s, exclude it.", location) + exclude.append(location) + for location in exclude: + tile_urls.pop(location) + + def download_tile(self, location: tuple, output_dir: str): + """ + Download the tiles from a public server in a temp dir, + then extract dem tif file in the local geocell archive + + Args: + location (tuple): tile lat lon location + output_dir (str): destination directory + """ + urls = self.resolve_dem_file_urls([location], local=False) + for dem_url in urls.values(): + tmp_file = os.path.join(self.temp_directory.name, os.path.basename(dem_url)) + try: + LOGGER.info("Downloading file to %s", tmp_file) + local_dem, _ = urllib.request.urlretrieve(dem_url, tmp_file, reporthook=progress) + LOGGER.info("File correctly downloaded") + except HTTPError as err: + LOGGER.error("Cannot get file %s : %s", dem_url, err) + else: + LOGGER.info("Extract file %s", local_dem) + with tarfile.open(local_dem) as tar_file: + safe_extract(tar_file, path=output_dir, members=dem_file_from_tar(tar_file)) + + def _create_dem_mosaic(self, dem_mosaic_file: str, dem_files: list[str]): + """Create DEM mosaic from source DEM tile + + Args: + dem_mosaic_file (str): mosaic destination file path + dem_files (list[str]): list of sources dem tile file path + """ + LOGGER.info("Creating DEM mosaic...") + no_data = -20000 + try: + # Mosaic + if self.cross_dateline: + gdal.SetConfigOption("CENTER_LONG", "180") + + options = gdal.WarpOptions( + dstNodata=no_data, outputType=gdal.GDT_Int16, dstSRS="EPSG:4326" + ) + + dataset = gdal.Warp(dem_mosaic_file, dem_files, options=options) + + # Replace no-data by 0 + dem_band = dataset.GetRasterBand(1) + dem_arr = dem_band.ReadAsArray() + dem_arr[dem_arr == no_data] = 0 + dem_band.WriteArray(dem_arr) + dem_band.FlushCache() + dataset = None + + gdal.SetConfigOption("CENTER_LONG", "0") + LOGGER.debug("DEM mosaic: %s", dem_mosaic_file) + except Exception as exception: + LOGGER.fatal(exception, exc_info=True) + LOGGER.fatal("error using gdalwarp") + raise + + def _reframe_dem(self, dem_mosaic_file: str): + """Reframe the dem mosaic in the target MGRS tile projected in the MGRS tile UTM + + Args: + dem_mosaic_file (str): mosaic file path + """ + LOGGER.info("Cropping and projecting DEM...") + + extent = self.get_tile_extent(True) + os.makedirs(os.path.dirname(self.dem_output), exist_ok=True) + options = gdal.WarpOptions( + dstSRS=f"EPSG:{self.mgrs_def['EPSG']}", + xRes=self.resolution, + yRes=self.resolution, + resampleAlg="cubicspline", + outputType=gdal.GDT_Int16, + outputBounds=(extent[0], extent[2], extent[1], extent[3]), + ) + + try: + gdal.Warp(self.dem_output, dem_mosaic_file, options=options) + except Exception as exception: + LOGGER.fatal(exception, exc_info=True) + LOGGER.fatal("error using gdalwarp") + raise + + def create_mgrs_dem(self, dem_files: list[str]): + """Create the DEM. + + Args: + dem_files (list[str]): list of source dem files to use to create the DEM + """ + + dem_mosaic = os.path.join( + self.temp_directory.name, + f"Copernicus_{self.mgrs_tile_code}_{self.resolution}_mosaic.tif", + ) + + self._create_dem_mosaic(dem_mosaic, dem_files) + # Crop to MGRS and reproject + self._reframe_dem(dem_mosaic) + + +if __name__ == "__main__": + DESCRIPTION = """ + Create DEM for a MGRS Tile. + The generated DEM is TIF file in the MGRS extend and its projected in the MGRS tile EPSG (UTM). + """ + + _arg_parser = ArgumentParser(formatter_class=HelpFormatter, description=DESCRIPTION) + + _arg_parser.add_argument( + dest="mgrs_tile_code", + help="MGRS Tile code for witch to generate DEM, example 31TFJ", + metavar="MGRS_TILE_CODE", + type=str, + ) + + _arg_parser.add_argument( + dest="dem_dataset_name", + help="DEM dataset name, example COP-DEM_GLO-90-DGED__2022_1", + metavar="DEM_DATASET_NAME", + type=str, + ) + + _arg_parser.add_argument( + dest="dem_local_url", + help="""Base output folder for generated DEM, example /data/AUX_DATA/ +Generated files are stored as follow : +/data/AUX_DATA/{DEM_DATASET_NAME}/Copernicus_DSM_{resolution}m_{MGRS_TILE_CODE}.TIF + """, + metavar="DEM_LOCAL_URL", + type=str, + ) + + _arg_parser.add_argument( + "--server_url", + dest="server_url", + help="DEM server base URL", + required=False, + type=str, + default="https://prism-dem-open.copernicus.eu/pd-desk-open-access/prismDownload/", + metavar="SERVER_URL", + ) + + _arg_parser.add_argument( + "--debug", + "-d", + dest="debug", + action="store_true", + default=False, + help="Enable Debug mode", + ) + + _args = _arg_parser.parse_args() + + logging.basicConfig( + level=logging.DEBUG if _args.debug else logging.INFO, + ) + + # verify dataset + match = DATASET_EXPR.match(_args.dem_dataset_name) + if match is None: + raise ValueError( + f""" + Invalid dataset name: {_args.dem_dataset_name}. + For more details, see https://sentinels.copernicus.eu/web/sentinel/-/copernicus-dem-new-direct-data-download-access + """ + ) + + res = int(match.group(1)) + dem_downloader = DemDownloader(_args, in_resolution=res, resolution=res) + dem = dem_downloader.get() diff --git a/sen2like/conf/Sen2Like_GIPP.xsd b/sen2like/conf/Sen2Like_GIPP.xsd index 8d3e45e..df75fe4 100644 --- a/sen2like/conf/Sen2Like_GIPP.xsd +++ b/sen2like/conf/Sen2Like_GIPP.xsd @@ -13,6 +13,7 @@ + @@ -25,7 +26,6 @@ - @@ -110,12 +110,32 @@ + + + + + + + + + + + + + + + + + + + + @@ -166,7 +186,6 @@ - @@ -177,8 +196,11 @@ + + + diff --git a/sen2like/conf/config.ini b/sen2like/conf/config.ini index 054670c..249994d 100644 --- a/sen2like/conf/config.ini +++ b/sen2like/conf/config.ini @@ -8,6 +8,7 @@ doInterCalibration = True doAtmcor = True doNbar = True doSbaf = True +doTopographicCorrection = False doFusion = True doPackagerL2H = True doPackagerL2F = True @@ -20,7 +21,6 @@ cams_daily_dir = /data/CAMS/daily cams_hourly_dir = /data/CAMS/hourly cams_climatology_dir = /data/CAMS/climatology/v1 -dem_dir = /data/AUX_DATA/DEM scl_dir = /data/AUX_DATA/SCL_maps_2.10 [InputProductArchive] @@ -89,9 +89,13 @@ use_sen2cor = False sen2cor_path = ../sen2cor/process.py [Nbar] -nbar_methode = VJB +nbar_methode = ROY vjb_coeff_matrice_dir = /data/Belen +[Sbaf] +adaptative = False +adaptative_band_candidates = B04,B11,B12 + [fusion] # predict_method: predict or composite (most recent valid pixels) predict_method = predict @@ -102,6 +106,17 @@ fusion_auto_check_threshold = 0.1 [Stitching] same_utm_only = True +[TopographicCorrection] +topographic_correction_limiter = 4.0 +apply_valid_pixel_mask = False +sen2cor_topographic_correction = False + +[DEMRepository] +# Expect to get DEM from {dem_folder}/{dem_dataset}/Copernicus_DSM_{src_dem_resolution}m_{tile_code}.TIF +dem_folder = /data/AUX_DATA/ +dem_dataset = COP-DEM_GLO-90-DGED__2022_1 +src_dem_resolution = 90 + [OutputFormat] gain = 10000 offset = 1000 @@ -135,4 +150,4 @@ json_metadata = True [RunTime] dx = 0 -dy = 0 +dy = 0 \ No newline at end of file diff --git a/sen2like/conf/config.xml b/sen2like/conf/config.xml index fd77863..3616b69 100644 --- a/sen2like/conf/config.xml +++ b/sen2like/conf/config.xml @@ -10,6 +10,7 @@ True True True + False True False True @@ -20,7 +21,6 @@ /data/CAMS/daily /data/CAMS/hourly /data/CAMS/climatology/v1 - /data/AUX_DATA/DEM /data/AUX_DATA/SCL_maps_2.10 @@ -78,9 +78,13 @@ ../sen2cor/process.py - VJB + ROY /data/Belen + + False + B04,B11,B12 + predict 2 @@ -90,6 +94,16 @@ True + + 4.0 + False + True + + + /data/AUX_DATA/ + COP-DEM_GLO-90-DGED__2022_1 + 90 + 10000 1000 diff --git a/sen2like/docs/source/S2-PDGS-MPC-L2HF-PFS-v1.1.pdf b/sen2like/docs/source/S2-PDGS-MPC-L2HF-PFS-v1.1.pdf deleted file mode 100644 index bdc671f..0000000 Binary files a/sen2like/docs/source/S2-PDGS-MPC-L2HF-PFS-v1.1.pdf and /dev/null differ diff --git a/sen2like/docs/source/S2-PDGS-MPC-L2HF-PFS-v1.2.pdf b/sen2like/docs/source/S2-PDGS-MPC-L2HF-PFS-v1.2.pdf new file mode 100644 index 0000000..eb9cc5b Binary files /dev/null and b/sen2like/docs/source/S2-PDGS-MPC-L2HF-PFS-v1.2.pdf differ diff --git a/sen2like/docs/source/S2-SEN2LIKE-UM-V1.8.pdf b/sen2like/docs/source/S2-SEN2LIKE-UM-V1.8.pdf deleted file mode 100644 index 56f8c97..0000000 Binary files a/sen2like/docs/source/S2-SEN2LIKE-UM-V1.8.pdf and /dev/null differ diff --git a/sen2like/docs/source/S2-SEN2LIKE-UM-V1.9.pdf b/sen2like/docs/source/S2-SEN2LIKE-UM-V1.9.pdf new file mode 100644 index 0000000..330627f Binary files /dev/null and b/sen2like/docs/source/S2-SEN2LIKE-UM-V1.9.pdf differ diff --git a/sen2like/release-notes.md b/sen2like/release-notes.md index 202dd6d..6cc8229 100644 --- a/sen2like/release-notes.md +++ b/sen2like/release-notes.md @@ -1,5 +1,28 @@ # Sen2Like Release Notes +## v4.4.0 + +### **Breaking changes** + +* New mandatory parameters in GIPP XML and INI configuration file: + * new section with new params : `DEMRepository` + * new section with new params : `TopographicCorrection` + * new section with new params : `Sbaf` +* Docker image build is now done in two step. [`Dockerfile`](Dockerfile) is based on a docker image that comes from [`Dockerfile-base`](Dockerfile-base) for reuse purpose. + +### New features + +* Add topographic correction (Experimental) +* Add DEM Downloader aux data utility to generate DEM in MGRS extent for topographic correction +* New adaptative SBAF capability (Experimental) + +### Fix + +### Improvements + +* Design : Change the way to initialise processing block and to allow dependency injection in processing block classes. + + ## v4.3.0 ### **Breaking changes** diff --git a/sen2like/run_tests.sh b/sen2like/run_tests.sh index 0e68c0e..70fd59b 100755 --- a/sen2like/run_tests.sh +++ b/sen2like/run_tests.sh @@ -8,8 +8,8 @@ export PYTHONPATH=.:${PYTHONPATH} if hash coverage 2> /dev/null; then echo **Running tests - PYTHONPATH=sen2like coverage run --branch -m pytest -source=sen2like --log-level=DEBUG --junitxml reports/junit.xml "$@" > reports/tests.report - out=$? + PYTHONPATH=sen2like:aux_data coverage run --branch -m pytest -source=sen2like --log-level=DEBUG --junitxml reports/junit.xml "$@" |& tee reports/tests.report + out=${PIPESTATUS[0]} if [ $out -ne 0 ]; then echo "Error in tests" @@ -53,9 +53,9 @@ else fi if hash pylint 2> /dev/null; then - pushd sen2like + # pushd sen2like echo **Running pylint code analysis - PYTHONPATH=. pylint --extension-pkg-whitelist=numpy,cv2 --max-line-length=120 -j 8 -r y ./**/*.py > ../reports/pylint.report + PYTHONPATH=sen2like:aux_data pylint --extension-pkg-whitelist=numpy,cv2 --max-line-length=120 -j 8 -r y sen2like/**/*.py aux_data/**/*.py > ../reports/pylint.report out=$? if [ $out -ne 0 ]; then @@ -80,7 +80,7 @@ fi if hash bandit 2> /dev/null; then echo **Running code security check - bandit -r sen2like > reports/bandit.report + bandit -r --severity-level high sen2like > reports/bandit.report out=$? if [ $out -ne 0 ]; then diff --git a/sen2like/sen2like/atmcor/__init__.py b/sen2like/sen2like/atmcor/__init__.py index e69de29..c104327 100644 --- a/sen2like/sen2like/atmcor/__init__.py +++ b/sen2like/sen2like/atmcor/__init__.py @@ -0,0 +1,17 @@ +# Copyright (c) 2023 ESA. +# +# This file is part of sen2like. +# See https://github.com/senbox-org/sen2like for further info. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/sen2like/sen2like/atmcor/atmospheric_parameters.py b/sen2like/sen2like/atmcor/atmospheric_parameters.py index 7fa3ea7..687e4b1 100644 --- a/sen2like/sen2like/atmcor/atmospheric_parameters.py +++ b/sen2like/sen2like/atmcor/atmospheric_parameters.py @@ -1,3 +1,20 @@ +# Copyright (c) 2023 ESA. +# +# This file is part of sen2like. +# See https://github.com/senbox-org/sen2like for further info. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import logging import numpy as np diff --git a/sen2like/sen2like/atmcor/cams_data_reader.py b/sen2like/sen2like/atmcor/cams_data_reader.py index 971d1fc..4e75b01 100644 --- a/sen2like/sen2like/atmcor/cams_data_reader.py +++ b/sen2like/sen2like/atmcor/cams_data_reader.py @@ -1,13 +1,30 @@ +# Copyright (c) 2023 ESA. +# +# This file is part of sen2like. +# See https://github.com/senbox-org/sen2like for further info. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import datetime import glob import logging import os from datetime import timedelta -from math import floor, ceil +from math import ceil, floor -from osgeo import gdal import numpy as np from netCDF4 import Dataset +from osgeo import gdal from core.S2L_config import S2L_Config diff --git a/sen2like/sen2like/atmcor/get_s2_angles.py b/sen2like/sen2like/atmcor/get_s2_angles.py index 500b9d3..d36c29c 100644 --- a/sen2like/sen2like/atmcor/get_s2_angles.py +++ b/sen2like/sen2like/atmcor/get_s2_angles.py @@ -1,222 +1,267 @@ -# -*- coding: utf-8 -*- -import logging -import re -import sys -import xml.parsers as pars -from typing import Union -from xml.dom import minidom - -import numpy as np -from osgeo import gdal, osr - -log = logging.getLogger("Sen2Like") - -re_band = re.compile(r'B0?(\d{1,2})$') - - -def get_angles_band_index(band: str) -> Union[int, None]: - """ - Convert the band index into the S2 angles indexing convention - B1->B8 : indices from 0 to 7 - B8A : index 8 - B9 -> B12 : indices from 9 to 12 - """ - if band == "B8A": - return 8 - band_index = re_band.match(band) - if band_index: - band_index = int(band_index.group(1)) - if 0 < band_index < 9: - return band_index - 1 - return band_index - return None - - -def from_values_list_to_array(selected_node): - col_step = selected_node.getElementsByTagName('COL_STEP')[0].childNodes[0].data - row_step = selected_node.getElementsByTagName('ROW_STEP')[0].childNodes[0].data - - values_list = selected_node.getElementsByTagName('Values_List')[0].getElementsByTagName('VALUES') - - # x_size, y_size , size of the matrix - x_size = len(values_list[0].childNodes[0].data.split()) - y_size = len(values_list) - - # Create np array of size (x_size,y_size) for sun zenith values : - arr = np.empty([x_size, y_size], float) - for j in range(0, y_size, 1): - a = np.asarray(values_list[j].childNodes[0].data.split(), float) - arr[j] = a - - return x_size, y_size, col_step, row_step, arr - - -def reduce_angle_matrix(x_size, y_size, a_dict): - # As S2 viewing zenith / azimuth matrix given for different detector - # As overlapping detector, build matrix including averaged values where - # several values from different detectors exist - # Input : - # - a : dictionary (detector, band_ud and array values) - # - x_size / y_size size of the matrix - # Output : - # ~ - the reduce matrix - M = np.zeros([x_size, y_size], float) - # print('input M :' + str(M[2][6])) - CPT = np.zeros([x_size, y_size], int) - for k, u in list(a_dict.items()): - for i in range(0, x_size, 1): - for j in range(0, x_size, 1): - A = u["Values"] - if A[i][j] == A[i][j]: # test if value is not nan - M[i][j] = A[i][j] + M[i][j] - CPT[i][j] += 1 - # if i == 2 and j == 6 : - # print str(M[i][j])+' '+str(A[i][j]) - - N = np.divide(M, CPT) - - # keep it commented for history - # before, the division had a where clause CPT!=0 - # but it was not working well so we remove it - # and then the N matrix have the good final result - #N[N == 0] = np.nan - - return N - - -def extract_sun_angle(src_file, dst_file, angle_type): - # Open the 'MTD_TL.xml' file, and read information in - # Depending on angle_type value, {'Zenith' , 'Azimuth' } - # select in the corresponding xml section - # save image file in dst_file - do not apply resampling - - xml_tl_file = src_file - try: - dom = minidom.parse(xml_tl_file) - except pars.expat.ExpatError: - sys.exit(' Invalid XML TL File') - - # gdal parameter : - NoData_value = -9999 - - # Load xmlf file and retrieve projection parameter : - node_name = 'Tile_Geocoding' # Level-1C / Level-2A ? - geocoding_node = dom.getElementsByTagName(node_name)[0] - epsg_code = geocoding_node.getElementsByTagName('HORIZONTAL_CS_CODE')[0].childNodes[0].data - geo_position = geocoding_node.getElementsByTagName('Geoposition')[0] - ulx = geo_position.getElementsByTagName('ULX')[0].childNodes[0].data - uly = geo_position.getElementsByTagName('ULY')[0].childNodes[0].data - - # Call gdalsrs info to generate wkt for the projection - # Replaced by gdal python api: - srs = osr.SpatialReference() - srs.ImportFromEPSG(int(epsg_code.replace('EPSG:', ''))) - wkt = srs.ExportToWkt() - - # Load xml file and extract parameter for sun zenith : - node_name = 'Sun_Angles_Grid' # Level-1C / Level-2A ? - sun_angle_node = dom.getElementsByTagName(node_name)[0] - - selected_node = sun_angle_node.getElementsByTagName(angle_type)[0] - - x_size, y_size, col_step, row_step, arr = from_values_list_to_array(selected_node) - - # scale between -180 and 180 deg. - if arr.max() > 180.0: - arr[arr > 180] = arr[arr > 180] - 360 - - # Create gdal dataset - x_res = int(x_size) - y_res = int(y_size) - - x_pixel_size = int(col_step) - y_pixel_size = int(row_step) - - log.debug(' Save in {}'.format(dst_file)) - target_ds = gdal.GetDriverByName('GTiff').Create(dst_file, x_res, y_res, 1, gdal.GDT_Int16) - target_ds.SetGeoTransform((int(ulx), x_pixel_size, 0, int(uly), 0, -y_pixel_size)) - band = target_ds.GetRasterBand(1) - band.SetNoDataValue(NoData_value) - band.SetDescription('Solar_' + angle_type) - band.WriteArray((arr * 100).astype(np.int16), 0, 0) # int16 with scale factor 100 - target_ds.SetProjection(wkt) - band = None - target_ds = None - - -def extract_viewing_angle(src_file, dst_file, angle_type): - # Access to MTL and extract vieing angles depending on the angletype - # Return the list of files that have been generated, out_list - out_list = [] # Store the path of all outputs - log.debug('extact viewing angle') - xml_tl_file = src_file - try: - dom = minidom.parse(xml_tl_file) - except pars.expat.ExpatError: - sys.exit(' Invalid XML TL File') - - # gdal parameter : - NoData_value = -9999 - - # Load xmlf file and retrieve projection parameter : - node_name = 'Tile_Geocoding' # Level-1C / Level-2A? - geocoding_node = dom.getElementsByTagName(node_name)[0] - epsg_code = geocoding_node.getElementsByTagName('HORIZONTAL_CS_CODE')[0].childNodes[0].data - geo_position = geocoding_node.getElementsByTagName('Geoposition')[0] - ulx = geo_position.getElementsByTagName('ULX')[0].childNodes[0].data - uly = geo_position.getElementsByTagName('ULY')[0].childNodes[0].data - # Call gdalsrs info to generate wkt for the projection : - # Replaced by gdal python api: - srs = osr.SpatialReference() - srs.ImportFromEPSG(int(epsg_code.replace('EPSG:', ''))) - wkt = srs.ExportToWkt() - - # Load xml file and extract parameter for sun zenith : - node_name = 'Viewing_Incidence_Angles_Grids' # Level-1C / Level-2A ? - viewing_angle_node = dom.getElementsByTagName(node_name) - v_dico = {} - for cpt in range(0, len(viewing_angle_node), 1): - band_id = viewing_angle_node[cpt].attributes["bandId"].value - detector = viewing_angle_node[cpt].attributes["detectorId"].value - selected_node = viewing_angle_node[cpt].getElementsByTagName(angle_type)[0] - [x_size, y_size, col_step, row_step, arr] = from_values_list_to_array(selected_node) - v_dico['_'.join([band_id, detector])] = {"Band_id": str(band_id), - "Detector": str(detector), - "Values": arr} - - for rec in range(0, 13, 1): - dic = v_dico.copy() - a = {k: v for k, v in dic.items() if v["Band_id"] == str(rec)} - arr = reduce_angle_matrix(x_size, y_size, a) - - # scale between -180 and 180 deg. - if arr.max() > 180.0: - arr[arr > 180] = arr[arr > 180] - 360 - - # Create gdal dataset - x_res = int(x_size) - y_res = int(y_size) - - x_pixel_size = int(col_step) - y_pixel_size = int(row_step) - - # Decoding of band number : - # CF : https: // earth.esa.int / web / sentinel / user - guides / sentinel - 2 - msi / resolutions / radiometric - # Band 8A <=> Band 9 in the mtl - - dst_file_bd = dst_file.replace('.tif', '_band_' + str(rec + 1) + '.tif') - out_list.append(dst_file_bd) - log.debug(' Save in {}'.format(dst_file_bd)) - target_ds = gdal.GetDriverByName('GTiff').Create(dst_file_bd, x_res, y_res, 1, gdal.GDT_Int16) - target_ds.SetGeoTransform((int(ulx), x_pixel_size, 0, int(uly), 0, -y_pixel_size)) - band = target_ds.GetRasterBand(1) - band.SetNoDataValue(NoData_value) - band.SetDescription('Viewing_' + angle_type + '_band_' + str(rec + 1)) # This sets the band name! - target_ds.GetRasterBand(1).WriteArray((arr * 100).astype(np.int16), 0, 0) # int16 with scale factor 100 - target_ds.SetProjection(wkt) - band = None - target_ds = None - arr = None - a = None - - return out_list +# -*- coding: utf-8 -*- +# Copyright (c) 2023 ESA. +# +# This file is part of sen2like. +# See https://github.com/senbox-org/sen2like for further info. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import re +import sys +import xml.parsers as pars +from typing import NamedTuple +from xml.dom import minidom + +import numpy as np +from osgeo import gdal, osr + +log = logging.getLogger("Sen2Like") + +re_band = re.compile(r'B0?(\d{1,2})$') + + +def get_angles_band_index(band: str) -> int|None: + """ + Convert the band index into the S2 angles indexing convention + B1->B8 : indices from 0 to 7 + B8A : index 8 + B9 -> B12 : indices from 9 to 12 + """ + if band == "B8A": + return 8 + band_index = re_band.match(band) + if band_index: + band_index = int(band_index.group(1)) + if 0 < band_index < 9: + return band_index - 1 + return band_index + return None + + +def from_values_list_to_array(selected_node): + col_step = selected_node.getElementsByTagName('COL_STEP')[0].childNodes[0].data + row_step = selected_node.getElementsByTagName('ROW_STEP')[0].childNodes[0].data + + values_list = selected_node.getElementsByTagName('Values_List')[0].getElementsByTagName('VALUES') + + # x_size, y_size , size of the matrix + x_size = len(values_list[0].childNodes[0].data.split()) + y_size = len(values_list) + + # Create np array of size (x_size,y_size) for sun zenith values : + arr = np.empty([x_size, y_size], float) + for j in range(0, y_size, 1): + a = np.asarray(values_list[j].childNodes[0].data.split(), float) + arr[j] = a + + return x_size, y_size, col_step, row_step, arr + + +def reduce_angle_matrix(x_size, y_size, a_dict): + # As S2 viewing zenith / azimuth matrix given for different detector + # As overlapping detector, build matrix including averaged values where + # several values from different detectors exist + # Input : + # - a : dictionary (detector, band_ud and array values) + # - x_size / y_size size of the matrix + # Output : + # ~ - the reduce matrix + M = np.zeros([x_size, y_size], float) + # print('input M :' + str(M[2][6])) + CPT = np.zeros([x_size, y_size], int) + for k, u in list(a_dict.items()): + for i in range(0, x_size, 1): + for j in range(0, x_size, 1): + A = u["Values"] + if not np.isnan(A[i][j]): + M[i][j] = A[i][j] + M[i][j] + CPT[i][j] += 1 + # if i == 2 and j == 6 : + # print str(M[i][j])+' '+str(A[i][j]) + + N = np.divide(M, CPT) + + # keep it commented for history + # before, the division had a where clause CPT!=0 + # but it was not working well so we remove it + # and then the N matrix have the good final result + #N[N == 0] = np.nan + + return N + + +def _get_geo_info(xml_tl_file: str) -> tuple: + """extract ULX, ULY, SRS as WKT and DOM of MTD_TL XML file + + Args: + xml_tl_file (str): MTD_TL file path + + Returns: + tuple: dom, ulx, uly, wkt + """ + + try: + dom = minidom.parse(xml_tl_file) + except pars.expat.ExpatError: + sys.exit(' Invalid XML TL File') + + # Load xmlf file and retrieve projection parameter : + node_name = 'Tile_Geocoding' # Level-1C / Level-2A ? + geocoding_node = dom.getElementsByTagName(node_name)[0] + epsg_code = geocoding_node.getElementsByTagName('HORIZONTAL_CS_CODE')[0].childNodes[0].data + geo_position = geocoding_node.getElementsByTagName('Geoposition')[0] + ulx = geo_position.getElementsByTagName('ULX')[0].childNodes[0].data + uly = geo_position.getElementsByTagName('ULY')[0].childNodes[0].data + + # Call gdalsrs info to generate wkt for the projection + # Replaced by gdal python api: + srs = osr.SpatialReference() + srs.ImportFromEPSG(int(epsg_code.replace('EPSG:', ''))) + wkt = srs.ExportToWkt() + + return dom, ulx, uly, wkt + +class _GeoInfo(NamedTuple): + x_res: int + y_res: int + x_pixel_size: int + y_pixel_size: int + ul_x: int + ul_y: int + wkt: str + + +def _save_angle_as_img(dst_file, arr, geo_info: _GeoInfo, description: str): + + # gdal parameter : + nodata_value = -9999 + + # scale between -180 and 180 deg. + if arr.max() > 180.0: + arr[arr > 180] = arr[arr > 180] - 360 + + target_ds = gdal.GetDriverByName('GTiff').Create( + dst_file, + geo_info.x_res, + geo_info.y_res, + 1, + gdal.GDT_Int16 + ) + target_ds.SetGeoTransform( + (geo_info.ul_x, geo_info.x_pixel_size, 0, geo_info.ul_y, 0, -geo_info.y_pixel_size) + ) + band = target_ds.GetRasterBand(1) + band.SetNoDataValue(nodata_value) + band.SetDescription(description) + band.WriteArray((arr * 100).astype(np.int16), 0, 0) # int16 with scale factor 100 + target_ds.SetProjection(geo_info.wkt) + band = None + target_ds = None + + +def extract_sun_angle(src_file: str, dst_file: str, angle_type: str): + """Read the 'MTD_TL.xml' file, and read information in . + Depending on angle_type value, {'Zenith' , 'Azimuth'}, + it selects in the corresponding xml section + save image file in dst_file - do not apply resampling + + Args: + src_file (str): MTD_TL.xml file path + dst_file (str): destination angle image file path + angle_type (str): angle type to extract + """ + dom, ulx, uly, wkt = _get_geo_info(src_file) + + # Load xml file and extract parameter for sun zenith : + node_name = 'Sun_Angles_Grid' # Level-1C / Level-2A ? + sun_angle_node = dom.getElementsByTagName(node_name)[0] + + selected_node = sun_angle_node.getElementsByTagName(angle_type)[0] + + x_size, y_size, col_step, row_step, arr = from_values_list_to_array(selected_node) + + log.debug(' Save in %s', dst_file) + + geo_info = _GeoInfo( + int(x_size), + int(y_size), + int(col_step), + int(row_step), + int(ulx), + int(uly), + wkt + ) + _save_angle_as_img(dst_file, arr, geo_info, f'Solar_{angle_type}') + + +def extract_viewing_angle(src_file: str, dst_file: str, angle_type: str) -> list[str]: + """Access to MTL and extract viewing angles depending on the angle type for each band + + Args: + src_file (str): MTD_TL.xml file path + dst_file (str): destination angle image file path, will be updated for each band + angle_type (str): angle type to extract + + Returns: + list[str]: list of file path that have been generated + """ + out_list = [] # Store the path of all outputs + log.debug('extact viewing angle') + + dom, ulx, uly, wkt = _get_geo_info(src_file) + + # Load xml file and extract parameter for sun zenith : + node_name = 'Viewing_Incidence_Angles_Grids' # Level-1C / Level-2A ? + viewing_angle_node = dom.getElementsByTagName(node_name) + + v_dico = {} + + for cpt in range(0, len(viewing_angle_node), 1): + band_id = viewing_angle_node[cpt].attributes["bandId"].value + detector = viewing_angle_node[cpt].attributes["detectorId"].value + selected_node = viewing_angle_node[cpt].getElementsByTagName(angle_type)[0] + [x_size, y_size, col_step, row_step, arr] = from_values_list_to_array(selected_node) + v_dico['_'.join([band_id, detector])] = {"Band_id": str(band_id), + "Detector": str(detector), + "Values": arr} + + for rec in range(0, 13, 1): + dic = v_dico.copy() + a = {k: v for k, v in dic.items() if v["Band_id"] == str(rec)} + arr = reduce_angle_matrix(x_size, y_size, a) + + # Decoding of band number : + # CF : https: // earth.esa.int / web / sentinel / user - guides / sentinel - 2 - msi / resolutions / radiometric + # Band 8A <=> Band 9 in the mtl + + dst_file_bd = dst_file.replace('.tif', '_band_' + str(rec + 1) + '.tif') + out_list.append(dst_file_bd) + log.debug(' Save in %s',dst_file_bd) + + geo_info = _GeoInfo( + int(x_size), + int(y_size), + int(col_step), + int(row_step), + int(ulx), + int(uly), + wkt + ) + _save_angle_as_img(dst_file_bd, arr, geo_info, f'Viewing_{angle_type}_band_{str(rec + 1)}') + + # clean + arr = None + a = None + + return out_list diff --git a/sen2like/sen2like/core/QI_MTD/QIreport.py b/sen2like/sen2like/core/QI_MTD/QIreport.py index 9c121b7..8dba487 100644 --- a/sen2like/sen2like/core/QI_MTD/QIreport.py +++ b/sen2like/sen2like/core/QI_MTD/QIreport.py @@ -30,6 +30,7 @@ create_child, remove_namespace, ) +from core.QI_MTD.mtd import Metadata log = logging.getLogger('Sen2Like') @@ -82,15 +83,7 @@ def manual_replaces(self, product): chg_elm_with_tag(self.root_out, tag='value', new_value=metadata.qi.get('MEAN', 'None'), attrs={"name": "COREGISTRATION_AFTER_CORRECTION"}) - # # Removing unchanged SBAF values and inserting new ones - ns = self.remove_children('./Data_Block/report/checkList[parentID="L2H_SBAF"]/check/extraValues', tag='value', - attrs={'name': 'SBAF_'}) - - for key, item in sorted(metadata.qi.items()): - if 'SBAF_' in key: - create_child(self.root_out, './Data_Block/report/checkList[parentID="L2H_SBAF"]/check/extraValues', - tag=ns + 'value', text=str(item), - attribs={"name": key}) + self._sbaf_replace(metadata) # Change all 'item' urls and names product_name_field = f"product_{self.H_F}_name" @@ -105,6 +98,24 @@ def manual_replaces(self, product): change_elm(self.root_out, rpath='./Data_Block/report/checkList/item', attr_to_change='name', new_value=metadata.mtd.get(granule_name_field)) + def _sbaf_replace(self, metadata: Metadata): + extra_values_elem = './Data_Block/report/checkList[parentID="L2H_SBAF"]/check/extraValues' + # # Removing unchanged SBAF values and inserting new ones + ns = self.remove_children( + extra_values_elem, + tag='value', + attrs={'name': 'SBAF_COEFFICIENT_'} + ) + self.remove_children(extra_values_elem, tag='value', attrs={'name': 'SBAF_OFFSET_'}) + + for key, item in sorted(metadata.qi.items()): + if 'SBAF_COEFFICIENT_' in key or 'SBAF_OFFSET_' in key: + create_child( + self.root_out, extra_values_elem, + tag=ns + 'value', text=str(item), + attribs={"name": key} + ) + def _feed_values_dict(self, metadata): """ Function only used by the QI report writer diff --git a/sen2like/sen2like/core/QI_MTD/xml_backbones/L2F_QUALITY_backbone.xml b/sen2like/sen2like/core/QI_MTD/xml_backbones/L2F_QUALITY_backbone.xml index 5f7455d..9ff09e6 100644 --- a/sen2like/sen2like/core/QI_MTD/xml_backbones/L2F_QUALITY_backbone.xml +++ b/sen2like/sen2like/core/QI_MTD/xml_backbones/L2F_QUALITY_backbone.xml @@ -230,6 +230,8 @@ "SBAF correction" Claverie and al. 2018 + False + NONE NONE NONE NONE @@ -247,6 +249,26 @@ + + L2H_TOPOGRAPHIC_CORRECTION + L2H_TOPOGRAPHIC_CORRECTION + 1.0 + + + "TOPOGRAPHIC correction" + + NONE + NONE + NONE + NONE + NONE + + + L2F_FUSION L2F_Fusion diff --git a/sen2like/sen2like/core/QI_MTD/xml_backbones/L2H_QUALITY_backbone.xml b/sen2like/sen2like/core/QI_MTD/xml_backbones/L2H_QUALITY_backbone.xml index 41532e9..c26f192 100644 --- a/sen2like/sen2like/core/QI_MTD/xml_backbones/L2H_QUALITY_backbone.xml +++ b/sen2like/sen2like/core/QI_MTD/xml_backbones/L2H_QUALITY_backbone.xml @@ -230,6 +230,8 @@ "SBAF correction" Claverie and al. 2018 + False + NONE NONE NONE NONE @@ -247,6 +249,26 @@ + + L2H_TOPOGRAPHIC_CORRECTION + L2H_TOPOGRAPHIC_CORRECTION + 1.0 + + + "TOPOGRAPHIC correction" + + NONE + NONE + NONE + NONE + NONE + + + diff --git a/sen2like/sen2like/core/S2L_config.py b/sen2like/sen2like/core/S2L_config.py index 7e46436..1ed092e 100644 --- a/sen2like/sen2like/core/S2L_config.py +++ b/sen2like/sen2like/core/S2L_config.py @@ -44,6 +44,7 @@ PROC_BLOCKS['S2L_Atmcor'] = {'extension': '_SURF.TIF', 'applicability': 'L8_L9_S2_Prisma'} PROC_BLOCKS['S2L_Nbar'] = {'extension': '_BRDF.TIF', 'applicability': 'L8_L9_S2_Prisma'} PROC_BLOCKS['S2L_Sbaf'] = {'extension': '_SBAF.TIF', 'applicability': 'L8_L9_S2_Prisma'} +PROC_BLOCKS['S2L_TopographicCorrection'] = {'extension': '_TOPOCORR.TIF', 'applicability': 'L8_L9_S2_Prisma'} PROC_BLOCKS['S2L_PackagerL2H'] = {'extension': None, 'applicability': 'L8_L9_S2_Prisma'} PROC_BLOCKS['S2L_Fusion'] = {'extension': '_FUSION.TIF', 'applicability': 'L8_L9_Prisma'} PROC_BLOCKS['S2L_PackagerL2F'] = {'extension': None, 'applicability': 'L8_L9_S2_Prisma'} diff --git a/sen2like/sen2like/core/dem.py b/sen2like/sen2like/core/dem.py new file mode 100644 index 0000000..8e2776e --- /dev/null +++ b/sen2like/sen2like/core/dem.py @@ -0,0 +1,65 @@ +# Copyright (c) 2023 ESA. +# +# This file is part of sen2like. +# See https://github.com/senbox-org/sen2like for further info. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Module to manage MGRS DEM file +""" + +import logging +import os + +logger = logging.getLogger("Sen2Like") + + +class DEMRepository: # pylint: disable=too-few-public-methods + """ + Access class to MGRS DEM files + DEM files are store as {dem_folder}/{dem_dataset}/Copernicus_DSM_{resolution}m_{tilecode}.TIF + """ + + def __init__(self, dem_folder: str, dem_dataset: str, resolution: int): + """Constructor + + Args: + dem_folder (str): base DEM folder + dem_dataset (str): dataset to use + resolution (int): DEM resolution to use in DEM dataset + """ + self.dataset_name = dem_dataset + self._dem_path = os.path.join(dem_folder, dem_dataset) + self._file_expr = f"Copernicus_DSM_{resolution}m_{{tilecode}}.TIF" + + def get_by_mgrs(self, mgrs_tile_code: str) -> str: + """Get DEM file path for a MGRS tile + + Args: + mgrs_tile_code (str): MGRS tile code. + + Raises: + FileNotFoundError: if the DEM file does not exists in the DEM storage + + Returns: + str: DEM file path + """ + expected_file_name = self._file_expr.format(tilecode=mgrs_tile_code) + expected_file_path = os.path.join(self._dem_path, expected_file_name) + + if not os.path.isfile(expected_file_path): + logger.error("Cannot find %s", expected_file_path) + raise FileNotFoundError(f"Cannot retrieve {expected_file_name} in {self._dem_path}") + + return expected_file_path diff --git a/sen2like/sen2like/core/product_archive/tile_db.py b/sen2like/sen2like/core/product_archive/tile_db.py index 8d9f139..96fc794 100644 --- a/sen2like/sen2like/core/product_archive/tile_db.py +++ b/sen2like/sen2like/core/product_archive/tile_db.py @@ -229,7 +229,7 @@ def _select_tiles_by_spatial_relationships(relation, roi): Returns: list of tile ids """ - with sqlite3.connect(_database_path("s2tiles.db")) as connection: + with sqlite3.connect(_database_path(S2_TILE_DB)) as connection: logging.debug("ROI: %s", roi) connection.enable_load_extension(True) connection.load_extension("mod_spatialite") @@ -271,6 +271,32 @@ def tiles_contains_roi(roi): return _select_tiles_by_spatial_relationships("contains", roi) +def get_mgrs_def(tile: str) -> dict|None: + """Get MGRS tile definition + + Args: + tile (str): MGRS tile code + + Returns: + dict|None: MGRS def having keys: + 'index', 'TILE_ID', 'EPSG', 'UTM_WKT', 'MGRS_REF', 'LL_WKT', 'geometry' + None if mgrs tile is not found + """ + + with sqlite3.connect(_database_path(S2_TILE_DB)) as connection: + logging.debug("TILE: %s", tile) + sql = f"select * from s2tiles where TILE_ID='{tile}'" + logging.debug("SQL request: %s", sql) + connection.row_factory = sqlite3.Row + cur = connection.execute(sql) + res = cur.fetchall() + if len(res) > 0: + return res[0] + + logging.error("tile %s not found in database", tile) + return None + + def mgrs_to_wkt(tile, utm=False): """Get the MGRS tile geom as WKT in LL or UTM. @@ -281,7 +307,7 @@ def mgrs_to_wkt(tile, utm=False): Returns: tile geom as WKT or None if no tile match """ - with sqlite3.connect(_database_path("s2tiles.db")) as connection: + with sqlite3.connect(_database_path(S2_TILE_DB)) as connection: logging.debug("TILE: %s", tile) sql = f"select {'UTM_WKT' if utm else 'LL_WKT'} from s2tiles where TILE_ID='{tile}'" logging.debug("SQL request: %s", sql) diff --git a/sen2like/sen2like/core/product_preparation.py b/sen2like/sen2like/core/product_preparation.py index 99a3061..b169b21 100644 --- a/sen2like/sen2like/core/product_preparation.py +++ b/sen2like/sen2like/core/product_preparation.py @@ -36,10 +36,10 @@ logger = logging.getLogger("Sen2Like") -_LS_SENSOR_MISSION_MAPPING = {"L8": "Landsat8","L9": "Landsat9"} +_LS_SENSOR_MISSION_MAPPING = {"L8": "Landsat8", "L9": "Landsat9"} -class ProductPreparator: +class ProductPreparator: # pylint: disable=too-few-public-methods """Product preparator class""" def __init__(self, config: S2L_Config, args: Namespace, ref_image: str): @@ -50,9 +50,7 @@ def __init__(self, config: S2L_Config, args: Namespace, ref_image: str): args (Namespace): program arguments """ self._config = config - self._roi_file = ( - args.roi if args.operational_mode == Mode.ROI_BASED else None - ) + self._roi_file = args.roi if args.operational_mode == Mode.ROI_BASED else None self._ref_image = ref_image def prepare(self, product: S2L_Product): @@ -72,12 +70,15 @@ def prepare(self, product: S2L_Product): # search and attach related product to product # only if S2L_Stitching activated if product.context.doStitching: - logger.debug("Stitching Enable, look for related product for %s", product.name) + logger.debug( + "Stitching Enable, look for related product for %s", product.name + ) self._set_related_product(product) else: logger.debug("Stitching Disable, skip looking for related product") # prepare the product + logger.info("Extract product files for product %s", product.name) self._extract_product_files(product) def _search_interrelated_product( @@ -109,33 +110,37 @@ def _search_interrelated_product( row = product.mtl.row logger.debug("Search product on path [%s, %s]", product.mtl.path, row) - search_urls.extend([ - ( - archive.construct_url( - _LS_SENSOR_MISSION_MAPPING.get(product.sensor), - path=product.mtl.path, - row=row, - start_date=start_date, - end_date=end_date, - cloud_cover=100, - ), - None, - ) - ]) + search_urls.extend( + [ + ( + archive.construct_url( + _LS_SENSOR_MISSION_MAPPING.get(product.sensor), + path=product.mtl.path, + row=row, + start_date=start_date, + end_date=end_date, + cloud_cover=100, + ), + None, + ) + ] + ) else: logger.debug("Search product on same tile [%s]", _tile) - search_urls.extend([ - ( - archive.construct_url( - "Sentinel2", - tile=_tile, - start_date=start_date, - end_date=end_date, - cloud_cover=100, - ), - None, - ) - ]) + search_urls.extend( + [ + ( + archive.construct_url( + "Sentinel2", + tile=_tile, + start_date=start_date, + end_date=end_date, + cloud_cover=100, + ), + None, + ) + ] + ) # get products as InputProduct logger.debug("Urls: %s", search_urls) @@ -149,7 +154,6 @@ def _search_interrelated_product( ) def _get_s2_interrelated_product(self, product: S2L_Product) -> InputProduct: - related_product = None logger.debug("Product is located on %s", product.mgrs) @@ -157,7 +161,7 @@ def _get_s2_interrelated_product(self, product: S2L_Product) -> InputProduct: logger.debug("Products on same date:") logger.debug(products_found) if len(products_found): - # verify instrument as S2A/B cannot be stitched with S2P + # verify instrument as S2A/B cannot be stitched with S2P # S2P product archive is the same as S2A/B, so we must check for rel_product in products_found: if product.sensor == rel_product.instrument: @@ -166,7 +170,6 @@ def _get_s2_interrelated_product(self, product: S2L_Product) -> InputProduct: return related_product def _get_l8_interrelated_product(self, product: S2L_Product) -> InputProduct: - related_product = None _same_utm_only = self._config.getboolean("same_utm_only") @@ -233,14 +236,12 @@ def _set_related_product(self, product: S2L_Product): logger.info("No product found for stitching") else: product.related_product = related_input_product.s2l_product_class( - related_input_product.path, - product.context + related_input_product.path, product.context ) # set related product working dir product.related_product.working_dir = os.path.join( - self._config.get("wd"), - product.related_product.name + self._config.get("wd"), product.related_product.name ) # set ref image to the related product @@ -248,6 +249,7 @@ def _set_related_product(self, product: S2L_Product): def _extract_aux_product_files(self, product: S2L_Product): """Extract aux product files of the product and its related product if any. + Theses files must be extracted before image processing as they could need to be geo corrected and stitched. - angle images (see 'S2L_Product.get_angle_images' impl) - valid and no data pixel masks (see 'S2L_Product.get_valid_pixel_mask' impl) - NDVI image @@ -266,8 +268,18 @@ def _extract_aux_product_files(self, product: S2L_Product): os.path.join(working_dir, "valid_pixel_mask.tif"), self._roi_file ) - # extract NDVI - if self._config.get("nbar_methode") == "VJB": + # extract NDVI processing blocks that need it (NBAR with VJB or Adaptative SBAF) + if ( + (self._config.getboolean("doNbar") and self._config.get("nbar_methode") == "VJB") + or + ( + self._config.getboolean("doSbaf") + and + product.apply_sbaf_param + and + self._config.getboolean("adaptative") + ) + ): product.get_ndvi_image(os.path.join(working_dir, "ndvi.tif")) def _extract_product_files(self, product: S2L_Product): @@ -315,6 +327,7 @@ def _extract_product_files(self, product: S2L_Product): # extract aux files of related product if any if product.related_product is not None: + logger.info("Extract product files for related product %s", product.related_product.name) self._extract_product_files(product.related_product) def _get_scl_map(self, scl_dir: str, product: S2L_Product): @@ -328,7 +341,9 @@ def _get_scl_map(self, scl_dir: str, product: S2L_Product): acq_date = datetime.datetime.strftime(product.acqdate, "%Y%m%dT%H%M%S") result = glob.glob( - os.path.join(scl_dir, product.mgrs, f"T{product.mgrs}_{acq_date}_SCL_60m.tif") + os.path.join( + scl_dir, product.mgrs, f"T{product.mgrs}_{acq_date}_SCL_60m.tif" + ) ) if result: scl_map = result[0] diff --git a/sen2like/sen2like/core/product_process.py b/sen2like/sen2like/core/product_process.py index 8451941..f6ac685 100644 --- a/sen2like/sen2like/core/product_process.py +++ b/sen2like/sen2like/core/product_process.py @@ -19,69 +19,40 @@ Module for 'S2L_Product' process with multiple 'S2L_Process'. IMPORTANT : Note that multithreading can used, see 'ProductProcess' """ -import importlib import logging -import os -import sys from concurrent.futures import ThreadPoolExecutor -from core import S2L_config from core.product_preparation import ProductPreparator from core.products.product import S2L_Product -from core.S2L_config import config -from s2l_processes.S2L_Process import S2L_Process +from core.S2L_config import config, PROC_BLOCKS +from s2l_processes import S2L_Process, create_process_block -logger = logging.getLogger('Sen2Like') - - -_PROCESS_INSTANCES = {} # for parallelization -_PROCESS_BLOCK_CLASSES = {} # OPTIM +logger = logging.getLogger("Sen2Like") # list of the blocks that are available -_list_of_blocks = tuple(S2L_config.PROC_BLOCKS.keys()) -# Add building blocks package to Python path -sys.path.append(os.path.join(os.path.dirname(__file__), "..", "s2l_processes")) - - -def _get_processing_block(block_name: str) -> S2L_Process: - """Get process class associated to block_name. - If class does not have yet been imported, import it, save it and return it, - otherwise, return class. - - Args: - block_name (str): The name of the process block. - - Returns: - S2L_Process: S2L_Process class - """ - # import module and class - _processing_block_class = _PROCESS_BLOCK_CLASSES.get(block_name) - if _processing_block_class is None: - module = importlib.import_module(block_name) - _processing_block_class = getattr(module, block_name) - _PROCESS_BLOCK_CLASSES[block_name] = _processing_block_class - - return _processing_block_class +_list_of_blocks = tuple(PROC_BLOCKS.keys()) # pylint: disable=too-few-public-methods class ProductProcess: """ - Class to process a S2L product. + Class to process a S2L S2L_Product. The main function 'run' execute eligible processing blocks for the product - based on the configuration and product context (see 'core.products.productProcessingContext'). + based on the configuration and product context (see 'core.products.ProcessingContext'). MultiThreading is used for parallelization if enabled. If parallelization is enabled, 'S2L_Product' object to process and 'S2L_Process' instances are SHARED in memory. So 'S2L_Process' concrete classes MUST be coded in consequences. """ - def __init__(self, + def __init__( + self, product: S2L_Product, product_preparator: ProductPreparator, parallelize_bands: bool, - bands: list[str]): + bands: list[str], + ): """Constructor. It initializes list of processing block with new instances of eligible concrete S2L_Process @@ -107,7 +78,7 @@ def run(self): """ # displays - logger.info('=' * 50) + logger.info("=" * 50) logger.info("Process : %s %s", self._product.sensor, self._product.path) # Search and attach related product to product @@ -142,7 +113,7 @@ def run(self): for proc_block in self._processing_block_list: proc_block.postprocess(self._product) - def _init_block_list(self): + def _init_block_list(self) -> list[S2L_Process]: """Instantiate eligible S2L_Process concrete class (processing bloc). A processing bloc is eligible if: - Its config param is set to True (doS2Lxxxx=True) in config file. @@ -154,7 +125,7 @@ def _init_block_list(self): """ processing_block = [] for block_name in _list_of_blocks: - _param = 'do' + block_name.split('_')[-1] + _param = "do" + block_name.split("_")[-1] # disable in conf and not override in context if not config.getboolean(_param) and not hasattr(self._product.context, _param): @@ -162,16 +133,18 @@ def _init_block_list(self): continue # override in context and False - if hasattr(self._product.context, _param) and not getattr(self._product.context, _param): + if hasattr(self._product.context, _param) and not getattr( + self._product.context, _param + ): logger.debug("%s disable by configuration", block_name) continue # check if block is applicable to the sensor (L8, L9 or S2) - if self._product.sensor not in S2L_config.PROC_BLOCKS[block_name]['applicability']: + if self._product.sensor not in PROC_BLOCKS[block_name]["applicability"]: logger.debug("%s not applicable to %s", block_name, self._product.sensor) continue - proc_block_instance = _get_processing_block(block_name)() + proc_block_instance = create_process_block(block_name) processing_block.append(proc_block_instance) return processing_block @@ -179,7 +152,7 @@ def _init_block_list(self): def _run_parallel(self, bands): # Concurrent call of _process_band for each band using Thread - number_of_process = int(config.get('number_of_process', 1)) + number_of_process = int(config.get("number_of_process", 1)) bands_filenames = [] with ThreadPoolExecutor(number_of_process) as executor: @@ -189,7 +162,6 @@ def _run_parallel(self, bands): return bands_filenames def _get_bands(self): - bands = self._bands # For each band or a selection of bands: if bands is None: @@ -201,7 +173,7 @@ def _get_bands(self): return bands - def _process_band(self, band: str) -> str|None: + def _process_band(self, band: str) -> str | None: """Run all the blocks over one band of a product. Args: @@ -211,7 +183,7 @@ def _process_band(self, band: str) -> str|None: str: Last file path of the image generated by the processing chain. None if no image for band """ - logger.info('--- Process band %s ---', band) + logger.info("--- Process band %s ---", band) # get band file path image = self._product.get_band_file(band) @@ -220,8 +192,8 @@ def _process_band(self, band: str) -> str|None: # iterate on blocks for proc_block in self._processing_block_list: - logger.info('----- Start %s for band %s -----', proc_block.__class__.__name__, band) + logger.info("----- Start %s for band %s -----", proc_block.__class__.__name__, band) image = proc_block.process(self._product, image, band) - logger.info('----- Finish %s for band %s -----', proc_block.__class__.__name__, band) + logger.info("----- Finish %s for band %s -----", proc_block.__class__.__name__, band) return image.filename diff --git a/sen2like/sen2like/core/products/product.py b/sen2like/sen2like/core/products/product.py index bb8e33d..4fc0a8a 100644 --- a/sen2like/sen2like/core/products/product.py +++ b/sen2like/sen2like/core/products/product.py @@ -40,29 +40,41 @@ class ProcessingContext: """ Processing context class to store conf parameters - that could need to be modified to properly process a product + that could need to be modified to properly process a product. + It allows to override processing block activation flag (doStitching for example) + that are used by `ProductProcess` to run or not a processing block. + See `ProductProcess._init_block_list` implementation for more details. """ def __init__(self, config: S2L_Config, tile: str): self.tile = tile + + self.doStitching = config.getboolean('doStitching') # pylint: disable=invalid-name + self.doInterCalibration = config.getboolean('doInterCalibration') # pylint: disable=invalid-name + + self.doAtmcor = config.getboolean('doAtmcor') # pylint: disable=invalid-name self.use_sen2cor = config.getboolean('use_sen2cor') - # pylint: disable=invalid-name - self.doStitching = config.getboolean('doStitching') - self.doInterCalibration = config.getboolean('doInterCalibration') + + self.doTopographicCorrection = config.getboolean('doTopographicCorrection') # pylint: disable=invalid-name + self.sen2cor_topographic_correction = config.getboolean('sen2cor_topographic_correction') + + if self.doAtmcor and config.get('s2_processing_level') == 'LEVEL2A': + logger.warning("Disable atmospheric correction (SMAC and sen2cor) because process L2A product") + self.doAtmcor = False + self.use_sen2cor = False + + if not self.doTopographicCorrection: + logger.warning("Disable sen2cor topographic correction because main topographic correction is disabled") + self.sen2cor_topographic_correction = False + elif config.get('s2_processing_level') == 'LEVEL2A': # only for log + logger.warning("Will apply topographic correction on L2A product, please verify that the input L2A does not yet have topographic correction") if not config.getboolean('doFusion') and config.getboolean('doPackagerL2F'): - logger.warning("Disable L2F packager because Fusion is disable") - self.doPackagerL2F = False + logger.warning("Disable L2F packager because Fusion is disabled") + self.doPackagerL2F = False # pylint: disable=invalid-name if config.getboolean('doFusion') and not config.getboolean('doPackagerL2F'): - logger.warning("Disable Fusion because L2F Packaging is disable") - self.doFusion = False - - _use_smac = config.getboolean('use_smac') - # use_smac could not be in conf, meaning we want to use it (default behavior) - if _use_smac is None: - # force True as default for usage in S2L_Atmcor - _use_smac = True - self.use_smac = _use_smac + logger.warning("Disable Fusion because L2F Packaging is disabled") + self.doFusion = False # pylint: disable=invalid-name class ClassPropertyDescriptor(object): @@ -278,6 +290,7 @@ def get_band_from_s2(cls, s2_band: str) -> str: return cls.reverse_bands_mapping.get(s2_band) def get_ndvi_image(self, ndvi_filepath): + logger.info("Extract NDVI to %s", ndvi_filepath) B04_image = self.get_band_file(self.reverse_bands_mapping['B04']) B8A_image = self.get_band_file(self.reverse_bands_mapping['B8A']) B04 = B04_image.array @@ -363,3 +376,11 @@ def absolute_orbit(self) -> str: str: product absolute orbit """ return self.mtl.absolute_orbit + + @property + def sun_zenith_angle(self) -> float: + return float(self.mtl.sun_zenith_angle) + + @property + def sun_azimuth_angle(self) -> float: + return float(self.mtl.sun_azimuth_angle) diff --git a/sen2like/sen2like/core/sen2cor_client/L2A_GIPP_ROI_Landsat_template.xml b/sen2like/sen2like/core/sen2cor_client/L2A_GIPP_ROI_Landsat_template.xml index 36d3d04..c777fbd 100644 --- a/sen2like/sen2like/core/sen2cor_client/L2A_GIPP_ROI_Landsat_template.xml +++ b/sen2like/sen2like/core/sen2cor_client/L2A_GIPP_ROI_Landsat_template.xml @@ -31,16 +31,16 @@ NONE - dem/srtm + dem/CopernicusDEM90 - http://srtm.csi.cgiar.org/wp-content/uploads/files/srtm_5x5/TIFF/ - + https://prism-dem-open.copernicus.eu/pd-desk-open-access/prismDownload/COP-DEM_GLO-90-DGED__2022_1/ + FALSE FALSE @@ -168,4 +168,4 @@ - \ No newline at end of file + diff --git a/sen2like/sen2like/core/sen2cor_client/sen2cor_client.py b/sen2like/sen2like/core/sen2cor_client/sen2cor_client.py index eea36d1..df6509b 100644 --- a/sen2like/sen2like/core/sen2cor_client/sen2cor_client.py +++ b/sen2like/sen2like/core/sen2cor_client/sen2cor_client.py @@ -19,10 +19,8 @@ import logging import os -import shutil import subprocess -import xml.etree.ElementTree as ET - +import lxml.etree as ET from core import S2L_config from grids.mgrs_framing import pixel_center @@ -45,14 +43,15 @@ class Sen2corClient: "Prisma" : ["--Hyper_MS", "--resolution", "30"] } - def __init__(self, sen2cor_command, out_mgrs): + def __init__(self, sen2cor_command, out_mgrs, enable_topo_corr=False): """ :params sen2cor_command: main sen2cor python script :params out_mgrs: out mgrs tile code, sen2cor will only compute value on this tile - :params wd: work directory + :params enable_topo_corr: activate or not topographic correction """ self.sen2cor_command = sen2cor_command self.out_mgrs = out_mgrs + self.enable_topo_corr = enable_topo_corr def run(self, product): """ @@ -107,30 +106,33 @@ def _write_gipp(self, product): logger.debug('GIPP template : %s', self.gipp_template_file) + with open(self.gipp_template_file, mode='r', encoding='utf-8') as template: + tree = ET.parse(template)#, parser = _CommentedTreeBuilder()) + + # configure topo correction + root = tree.getroot() + dem_correction_node = root.find('Atmospheric_Correction/Flags/DEM_Terrain_Correction') + dem_correction_node.text = "TRUE" if self.enable_topo_corr else "FALSE" + # ref_band = None is considered as S2 product format (S2A, S2B, S2P prisma) ref_band = self.roi_ref_band.get(product.mtl.mission, None) if ref_band is None: - shutil.copyfile(self.gipp_template_file, gipp_path) logger.debug("For sentinel, sen2cor don't use ROI") + ET.ElementTree(root).write(gipp_path, encoding='utf-8', xml_declaration=True) return gipp_path - y, x = pixel_center(ref_band, self.out_mgrs) - logger.debug('Pixel center : (%s, %s)', y, x) + ref_band_file = product.get_band_file(ref_band) - with open(self.gipp_template_file, mode='r', encoding='utf-8') as template: - tree = ET.parse(template) + y, x = pixel_center(ref_band_file, self.out_mgrs) + logger.debug('Pixel center : (%s, %s)', y, x) - root = tree.getroot() row0 = root.find('Common_Section/Region_Of_Interest/row0') row0.text = str(y) col0 = root.find('Common_Section/Region_Of_Interest/col0') col0.text = str(x) - out_string = ET.tostring(root) - - with open(gipp_path, mode='wb') as gipp: - gipp.write(out_string) + ET.ElementTree(root).write(gipp_path, encoding='utf-8', xml_declaration=True) logger.info('GIPP L2A : %s', gipp_path) return gipp_path diff --git a/sen2like/sen2like/grids/mgrs_framing.py b/sen2like/sen2like/grids/mgrs_framing.py index 7e707bc..8e57f5c 100644 --- a/sen2like/sen2like/grids/mgrs_framing.py +++ b/sen2like/sen2like/grids/mgrs_framing.py @@ -22,10 +22,12 @@ import os from dataclasses import dataclass from math import ceil +from typing import NamedTuple import affine import numpy as np import pandas as pd +from numpy.typing import NDArray from osgeo import gdal, osr from shapely.geometry.base import BaseGeometry from shapely.wkt import loads @@ -68,6 +70,20 @@ class MGRSGeoInfo: """tile geometry as UTM coords""" +class _CorrectionConfig(NamedTuple): + """Array correction config. + method allowed : translation | polynomial + For a translation method, x_off, y_off, x_size and y_size MUST be setted + For a polynomial method, klt_dir must be set + """ + method: str + x_off: float + y_off: float + x_size: int + y_size: int + klt_dir: str + + # skimage / gdal resampling method mapping order_to_gdal_resampling = { 0: 'near', @@ -193,6 +209,7 @@ def _reproject(filepath: str, dir_out: str, ds_src: gdal.Dataset, x_res: int, y_ raise error + def get_mgrs_geo_info(tile_code: str) -> MGRSGeoInfo: """Get MGRS geo information of the given tile @@ -210,6 +227,51 @@ def get_mgrs_geo_info(tile_code: str) -> MGRSGeoInfo: return MGRSGeoInfo(roi['EPSG'][0], loads(roi['UTM_WKT'][0])) +def _do_correction(array: NDArray, order: int, config: _CorrectionConfig): + """Apply correction to array + + Args: + array (NDArray): array to transform + order (int): type of resampling + config (CorrectionConfig): correction config + + Raises: + ValueError: if correction method is unknown + + Returns: + NDArray: corrected array + """ + if config.method == "translation": + log.info("Translation correction") + # translate and reframe (order= 0:nearest, 1: linear, 3:cubic) + transform = SimilarityTransform(translation=(config.x_off, config.y_off)) + return skit_warp( + array, + inverse_map=transform, + output_shape=(config.y_size, config.x_size), + order=order, + preserve_range=True + ) + + if config.method == "polynomial": + log.info("Polynomial correction") + # klt_dir should be working dir + data_frame = pd.read_csv(os.path.join(config.klt_dir, "KLT.csv"), sep=";") + + dst = np.array( + [ + (data_frame.x0 + data_frame.dx).to_numpy(), + (data_frame.y0 + data_frame.dy).to_numpy() + ] + ).T + + src = np.array([data_frame.x0.to_numpy(), data_frame.y0.to_numpy()]).T + transform = estimate_transform(config.method, src, dst) + return skit_warp(array, inverse_map=transform, preserve_range=True, cval=1000) + + raise ValueError(f"Unknown correction method: {config.method}") + + def reframe( image: S2L_ImageFile, tile_code: str, @@ -218,8 +280,6 @@ def reframe( dy=0., order=3, dtype=None, - margin=0, - compute_offsets=False, method="translation" ) -> S2L_ImageFile: """Reframe SINGLE band image in MGRS tile @@ -227,15 +287,14 @@ def reframe( Args: image (S2L_ImageFile): image to reframe tile_code (str): MGRS tile code - filepath_out (str): reframed image destination file path + filepath_out (str): reframed image destination file path, should be working dir dx (float, optional): x correction to apply during reframing. Defaults to 0.. dy (float, optional): y correction to apply during reframing. Defaults to 0.. order (int, optional): type of resampling (see skimage warp). Defaults to 3. dtype (numpy dtype, optional): output image dtype name. Defaults to None (use input image dtype). - margin (int, optional): margin to apply to output. Defaults to 0. - compute_offsets (bool, optional): TODO : see with Vince. Defaults to False. method (str, optional): geometry correction strategy to apply. Expect 'polynomial' or 'translation". Defaults to 'translation'. + If polynomial, KLT.csv file should be located in dirname of filepath_out Returns: S2L_ImageFile: reframed image @@ -244,8 +303,8 @@ def reframe( mgrs_geo_info = get_mgrs_geo_info(tile_code) tile_geom = mgrs_geo_info.geometry - box = Box(x_min=tile_geom.bounds[0] - margin * image.xRes, y_min=tile_geom.bounds[1] + margin * image.yRes, - x_max=tile_geom.bounds[2] + margin * image.xRes, y_max=tile_geom.bounds[3] - margin * image.yRes) + box = Box(x_min=tile_geom.bounds[0], y_min=tile_geom.bounds[1], + x_max=tile_geom.bounds[2], y_max=tile_geom.bounds[3]) # UTM South vs. UTM North ? utm_offset = 0 @@ -268,20 +327,13 @@ def reframe( y_res = geo[5] result = _reproject( - image.filepath, - os.path.dirname(filepath_out), - ds_src, + image.filepath, + os.path.dirname(filepath_out), + ds_src, x_res, y_res, target_srs, order ) image = S2L_ImageFile(result.out_file_path) - # compute offsets (grid origin + dx/dy - if compute_offsets: - r_dx = image.xRes * (((box.x_min - image.xMin) / image.xRes) % 1) - r_dy = image.yRes * (((box.y_min - image.yMin) / image.yRes) % 1) - else: - r_dx = r_dy = 0 - # compute offsets (grid origin + dx/dy) xOff = (box.x_min - image.xMin + dx) / image.xRes yOff = (box.y_max - utm_offset - image.yMax - dy) / image.yRes @@ -305,32 +357,26 @@ def reframe( array[array == 0.0] = np.nan if xOff == 0 and yOff == 0 and image.xSize == xSize and image.ySize == ySize: - new = array + new_array = array else: - if method == "translation": - log.info("Translation correction") - # translate and reframe (order= 0:nearest, 1: linear, 3:cubic) - transform = SimilarityTransform(translation=(xOff, yOff)) - new = skit_warp(array, inverse_map=transform, output_shape=(ySize, xSize), order=order, preserve_range=True) - elif method == "polynomial": - log.info("Polynomial correction") - # filepath_out should be in working dir - df = pd.read_csv(os.path.join(os.path.dirname(filepath_out), "KLT.csv"), sep=";") - dst = np.array([(df.x0 + df.dx).to_numpy(), (df.y0 + df.dy).to_numpy()]).T - src = np.array([df.x0.to_numpy(), df.y0.to_numpy()]).T - tform = estimate_transform(method, src, dst) - new = skit_warp(array, inverse_map=tform, preserve_range=True, cval=1000) - else: - raise ValueError(f"Unknown correction method: {method}") + correction_config = _CorrectionConfig( + method, + xOff, + yOff, + xSize, + ySize, + os.path.dirname(filepath_out) + ) + new_array = _do_correction(array, order, correction_config) # As we played with 0 and NaN, restore zeros for floating array # we have to restore zero for output NaN if np.issubdtype(_dtype, np.floating): - new[np.isnan(new)] = 0.0 + new_array[np.isnan(new_array)] = 0.0 # set into new S2L_ImageFile - _origin=(box.x_min + r_dx, box.y_max + r_dy) - return image.duplicate(filepath_out, array=new.astype(_dtype), origin=_origin, output_EPSG=target_epsg) + _origin=(box.x_min, box.y_max) + return image.duplicate(filepath_out, array=new_array.astype(_dtype), origin=_origin, output_EPSG=target_epsg) def reframe_multiband(filepath_in: str, tile_code: str, filepath_out: str, dx=0., dy=0., order=3): @@ -377,9 +423,9 @@ def reframe_multiband(filepath_in: str, tile_code: str, filepath_out: str, dx=0. image_epsg = image_srs.GetAuthorityCode(None) log.info("Image epsg and target epsg differ: %s vs %s.", image_epsg, target_epsg) result = _reproject( - filepath_in, - os.path.dirname(filepath_in), - ds_src, + filepath_in, + os.path.dirname(filepath_in), + ds_src, xRes, yRes, target_srs, order ) ds_src = result.dataset diff --git a/sen2like/sen2like/klt/__init__.py b/sen2like/sen2like/klt/__init__.py new file mode 100644 index 0000000..4a19e31 --- /dev/null +++ b/sen2like/sen2like/klt/__init__.py @@ -0,0 +1,53 @@ +# Copyright (c) 2023 ESA. +# +# This file is part of sen2like. +# See https://github.com/senbox-org/sen2like for further info. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +""" +KLT module. +Expose KLTMatcher, KTLResult, and klt_matcher_factory. +klt_matcher_factory keep a single (same) instance of KLTMatcher +""" + +from .klt import KLTMatcher, KTLResult + + +class KLTMatcherFactory: + # pylint: disable=too-few-public-methods + """Factory that keep a single instance of KLTMatcher. + It is recommended to use the module `klt_matcher_factory` instance of this class + """ + + klt_matcher: KLTMatcher | None = None + + def get_klt_matcher(self) -> KLTMatcher: + """Provide a KLTMatcher + + Returns: + KLTMatcher: matcher + """ + if self.klt_matcher is None: + self.klt_matcher = KLTMatcher() + return self.klt_matcher + + +klt_matcher_factory = KLTMatcherFactory() + +__all__ = [ + "KLTMatcher", + "KTLResult", + "klt_matcher_factory", +] diff --git a/sen2like/sen2like/s2l_processes/S2L_Atmcor.py b/sen2like/sen2like/s2l_processes/S2L_Atmcor.py index b0a5adc..9cff4ae 100644 --- a/sen2like/sen2like/s2l_processes/S2L_Atmcor.py +++ b/sen2like/sen2like/s2l_processes/S2L_Atmcor.py @@ -60,16 +60,15 @@ def get_smac_coefficients(product, band): class S2L_Atmcor(S2L_Process): """ - Atmo Correction processing block class. + Atmospheric Correction processing block class. Only able to run SMAC atmo corr as sen2cor cannot be run by band. - If use_sen2cor=True in the config, then this class set S2A_AC AC_PROCESSOR quality parameter. - If use_smac=True in the config, then run SMAC atmo corr and set S2A_AC quality parameters. - Notice that use_sen2cor and use_smac can be overridden depending on the type of product to process. + If use_sen2cor=True in the config, then this processing block should be disabled + Notice that use_sen2cor and doAtmcor can be overridden depending on the type of product to process. See sen2like module about """ - def __init__(self): - super().__init__() + def __init__(self, generate_intermediate_products: bool): + super().__init__(generate_intermediate_products) self._mgrs = GridsConverter() # default params self.uH2O = 2.0 # Water Vapor content - unit: g.cm-2 @@ -147,6 +146,7 @@ def smac_correction(self, product, array_in, band): def preprocess(self, product: S2L_Product): log.debug("SMAC Correction") + mtl = product.mtl extent = self._mgrs.get_corners(product.mgrs, out_epsg=4326) @@ -215,18 +215,13 @@ def preprocess(self, product: S2L_Product): def process(self, product: S2L_Product, image: S2L_ImageFile, band: str) -> S2L_ImageFile: log.info('Start') - out_image = image - - if product.context.use_smac: - # SMAC correction - array_in = image.array - array_out = self.smac_correction(product, array_in, band) - out_image = image.duplicate(self.output_file(product, band), array_out) - if S2L_config.config.getboolean('generate_intermediate_products'): - image.write(creation_options=['COMPRESS=LZW']) - else: - log.info("Atmo corr already done with sen2cor") - + # SMAC correction + array_in = image.array + array_out = self.smac_correction(product, array_in, band) + out_image = image.duplicate(self.output_file(product, band), array_out) + if self.generate_intermediate_products: + image.write(creation_options=['COMPRESS=LZW']) + log.info('End') return out_image @@ -237,13 +232,10 @@ def postprocess(self, product: S2L_Product): Args: product (S2L_Product): product to post process """ - if product.context.use_sen2cor: - product.metadata.qi["AC_PROCESSOR"] = "SEN2COR" - - elif product.context.use_smac: - product.metadata.qi["AC_PROCESSOR"] = "SMAC" - # TODO: put config param in self ? - product.metadata.qi["GRANULE_MEAN_WV"] = self.uH2O - product.metadata.qi["OZONE_VALUE"] = self.uO3 - product.metadata.qi["PRESSURE"] = self.pressure - product.metadata.qi["GRANULE_MEAN_AOT"] = self.taup550 + + product.metadata.qi["AC_PROCESSOR"] = "SMAC" + # TODO: put config param in self ? + product.metadata.qi["GRANULE_MEAN_WV"] = self.uH2O + product.metadata.qi["OZONE_VALUE"] = self.uO3 + product.metadata.qi["PRESSURE"] = self.pressure + product.metadata.qi["GRANULE_MEAN_AOT"] = self.taup550 diff --git a/sen2like/sen2like/s2l_processes/S2L_Fusion.py b/sen2like/sen2like/s2l_processes/S2L_Fusion.py index 2fb1b67..29e7788 100644 --- a/sen2like/sen2like/s2l_processes/S2L_Fusion.py +++ b/sen2like/sen2like/s2l_processes/S2L_Fusion.py @@ -90,7 +90,7 @@ def get_fractional_year(ad): class S2L_Fusion(S2L_Process): - def __init__(self): + def __init__(self, generate_intermediate_products: bool): super().__init__() self.reference_products : list(S2L_HLS_Product) = [] self._predict_method = None @@ -173,7 +173,7 @@ def process(self, product: S2L_Product, image: S2L_ImageFile, band: str) -> S2L_ array_L2H_predict, array_L2F_predict = self._predict(product, band_s2, qa_mask, output_shape) # save - if S2L_config.config.getboolean('generate_intermediate_products'): + if self.generate_intermediate_products: self._save_as_image_file(image_file_L2F, qa_mask, product, band, '_FUSION_QA.TIF') self._save_as_image_file(image_file_L2F, array_L2H_predict, product, band, '_FUSION_L2H_PREDICT.TIF') self._save_as_image_file(image_file_L2F, array_L2F_predict, product, band, '_FUSION_L2F_PREDICT.TIF') @@ -184,7 +184,7 @@ def process(self, product: S2L_Product, image: S2L_ImageFile, band: str) -> S2L_ array_L2H_predict, array_L2F_predict = self._composite(band_s2, output_shape) # save - if S2L_config.config.getboolean('generate_intermediate_products'): + if self.generate_intermediate_products: self._save_as_image_file(image_file_L2F, array_L2H_predict, product, band, '_FUSION_L2H_COMPO.TIF') self._save_as_image_file(image_file_L2F, array_L2F_predict, product, band, '_FUSION_L2F_COMPO.TIF') @@ -205,7 +205,7 @@ def process(self, product: S2L_Product, image: S2L_ImageFile, band: str) -> S2L_ image, image_out, S2L_ImageFile(mask_filename), nodata_value=nodata_value) log.debug('Fusion auto check proportional difference of L2F from L2H') out_stat(proportion_diff * proportion_diff_mask, log, 'proportional diff') - if S2L_config.config.getboolean('generate_intermediate_products'): + if self.generate_intermediate_products: proportion_diff_img = image.duplicate( filepath=os.path.join( product.working_dir, @@ -253,7 +253,7 @@ def _save_as_image_file(self, image_template, array, product, band, extension): product.get_band_file(band).rootname + extension ) image_file = image_template.duplicate(path, array=array) - if S2L_config.config.getboolean('generate_intermediate_products'): + if self.generate_intermediate_products: image_file.write(creation_options=['COMPRESS=LZW']) return image_file diff --git a/sen2like/sen2like/s2l_processes/S2L_Geometry.py b/sen2like/sen2like/s2l_processes/S2L_Geometry.py index c749aeb..f719572 100644 --- a/sen2like/sen2like/s2l_processes/S2L_Geometry.py +++ b/sen2like/sen2like/s2l_processes/S2L_Geometry.py @@ -21,19 +21,19 @@ import os import numpy as np - -from core import S2L_config from core.image_file import S2L_ImageFile from core.products.product import S2L_Product from core.reference_image import get_resampled_ref_image from grids import mgrs_framing -from klt.klt import KLTMatcher, KTLResult +from klt import KLTMatcher, KTLResult from s2l_processes.S2L_Process import S2L_Process log = logging.getLogger("Sen2Like") -def reframe_mask(product: S2L_Product, product_mask_filename_attr: str, output_filename: str, **kwargs) -> S2L_ImageFile: +def reframe_mask( + product: S2L_Product, product_mask_filename_attr: str, output_filename: str, **kwargs +) -> S2L_ImageFile: """Reframe a mask of a product and set product reader mask attr to the reframed mask Args: @@ -49,9 +49,11 @@ def reframe_mask(product: S2L_Product, product_mask_filename_attr: str, output_f mask_file_path = getattr(product, product_mask_filename_attr, None) image = S2L_ImageFile(mask_file_path) - out_image = mgrs_framing.reframe(image, product.mgrs, filepath_out, product.dx, product.dy, order=0) + out_image = mgrs_framing.reframe( + image, product.mgrs, filepath_out, product.dx, product.dy, order=0 + ) - out_image.write(creation_options=['COMPRESS=LZW'], **kwargs) + out_image.write(creation_options=["COMPRESS=LZW"], **kwargs) setattr(product, product_mask_filename_attr, filepath_out) @@ -59,14 +61,33 @@ def reframe_mask(product: S2L_Product, product_mask_filename_attr: str, output_f class S2L_Geometry(S2L_Process): + def __init__( + self, + klt_matcher: KLTMatcher, + force_geometric_correction: bool, + do_matching_correction: bool, + reference_band: str, + generate_intermediate_products: bool, + ): + """Constructor - def __init__(self): - super().__init__() + Args: + klt_matcher (KLTMatcher): KLT matcher component + force_geometric_correction (bool): force geometric correction for refined products + do_matching_correction (bool): compute or not dx/dy. + If False, the consequence is that only reframing will be done, no correction at all + reference_band (str): the reference band for matching + generate_intermediate_products (bool): flag to generate or not intermediate image products. + """ + super().__init__(generate_intermediate_products) + self._force_geometric_correction = force_geometric_correction + self._do_matching_correction = do_matching_correction + self._reference_band = reference_band self._output_file = None # avoid process related product when process is called from preprocess for "main" product self._process_related_product = False self._klt_results = [] - self._klt_matcher = KLTMatcher() + self._klt_matcher = klt_matcher def preprocess(self, product: S2L_Product): """ @@ -84,7 +105,7 @@ def preprocess(self, product: S2L_Product): # No geometric correction for refined products if product.mtl.is_refined: - if S2L_config.config.getboolean('force_geometric_correction'): + if self._force_geometric_correction: log.info("Product is refined but geometric correction is forced.") else: log.info("Product is refined: no additional geometric correction.") @@ -94,7 +115,7 @@ def preprocess(self, product: S2L_Product): self._preprocess_related(product) return - log.info('Start S2L_Geometry preprocess for %s' , product.name) + log.info("Start S2L_Geometry preprocess for %s", product.name) # reframe angle file and validity mask, reframed validity mask is needed for matching self._pre_reframe_aux_files(product) @@ -118,18 +139,16 @@ def _pre_reframe_aux_files(self, product: S2L_Product): """ # Reframe angles, only once without correction because very low resolution if product.reframe_angle_file: - filepath_out = os.path.join( - product.working_dir, - 'tie_points_REFRAMED.TIF' + filepath_out = os.path.join(product.working_dir, "tie_points_REFRAMED.TIF") + mgrs_framing.reframe_multiband( + product.angles_file, product.mgrs, filepath_out, product.dx, product.dy, order=0 ) - mgrs_framing.reframe_multiband(product.angles_file, product.mgrs, filepath_out, - product.dx, product.dy, order=0) # update product angles_images product.angles_file = filepath_out # Reframe validity mask only because needed for KLT if product.mask_filename: - reframe_mask(product, "mask_filename", 'valid_pixel_mask_REFRAMED.TIF') + reframe_mask(product, "mask_filename", "valid_pixel_mask_REFRAMED.TIF") def _post_reframe_aux_files(self, product: S2L_Product): """Reframe validity mask, no data mask and ndvi file if they exist by applying product dx/dy @@ -138,13 +157,15 @@ def _post_reframe_aux_files(self, product: S2L_Product): product (S2L_Product): product having aux data file to reframe """ if product.mask_filename: - reframe_mask(product, "mask_filename", 'valid_pixel_mask_REFRAMED_GEOM_CORRECTED.TIF') + reframe_mask(product, "mask_filename", "valid_pixel_mask_REFRAMED_GEOM_CORRECTED.TIF") if product.nodata_mask_filename: - reframe_mask(product, "nodata_mask_filename", 'nodata__mask_REFRAMED_GEOM_CORRECTED.TIF') + reframe_mask( + product, "nodata_mask_filename", "nodata__mask_REFRAMED_GEOM_CORRECTED.TIF" + ) if product.ndvi_filename is not None: - reframe_mask(product, "ndvi_filename", 'ndvi_REFRAMED_GEOM_CORRECTED.TIF', DCmode=True) + reframe_mask(product, "ndvi_filename", "ndvi_REFRAMED_GEOM_CORRECTED.TIF", DCmode=True) def _set_qi_information(self, product): """Set COREGISTRATION_BEFORE_CORRECTION, INPUT_RMSE_X, INPUT_RMSE_Y in QI report. @@ -153,18 +174,20 @@ def _set_qi_information(self, product): # self._klt_results have been filled by co registration for each product (main and related) # so we can compute COREGISTRATION_BEFORE_CORRECTION QI field if self._klt_results: - stats = { - "dist_means": [], - "rmse_x": [], - "rmse_y": [] - } + stats = {"dist_means": [], "rmse_x": [], "rmse_y": []} for klt_result in self._klt_results: - dist = np.sqrt(np.power(klt_result.dx_array, 2) + np.power(klt_result.dy_array, 2)).flatten() + dist = np.sqrt( + np.power(klt_result.dx_array, 2) + np.power(klt_result.dy_array, 2) + ).flatten() stats["dist_means"].append(np.round(np.mean(dist), 1)) - stats["rmse_x"].append(np.round(np.sqrt(np.mean(np.power(klt_result.dx_array, 2))), 1)) - stats["rmse_y"].append(np.round(np.sqrt(np.mean(np.power(klt_result.dy_array, 2))), 1)) + stats["rmse_x"].append( + np.round(np.sqrt(np.mean(np.power(klt_result.dx_array, 2))), 1) + ) + stats["rmse_y"].append( + np.round(np.sqrt(np.mean(np.power(klt_result.dy_array, 2))), 1) + ) self._set_formatted_qi(product, "COREGISTRATION_BEFORE_CORRECTION", stats["dist_means"]) self._set_formatted_qi(product, "INPUT_RMSE_X", stats["rmse_x"]) @@ -203,7 +226,7 @@ def process(self, product: S2L_Product, image: S2L_ImageFile, band: str) -> S2L_ """ # No geometric correction for refined products if product.mtl.is_refined: - if S2L_config.config.getboolean('force_geometric_correction'): + if self._force_geometric_correction: log.info("Product is refined but geometric correction is forced.") else: log.info("process: Product is refined: no additional geometric correction.") @@ -214,13 +237,13 @@ def process(self, product: S2L_Product, image: S2L_ImageFile, band: str) -> S2L_ self._output_file = self.output_file(product, band) - log.info('Start S2L_Geometry process for %s band %s' , product.name, band) + log.info("Start S2L_Geometry process for %s band %s", product.name, band) # MGRS reframing - log.debug('Product dx / dy : %s / %s', product.dx, product.dy) + log.debug("Product dx / dy : %s / %s", product.dx, product.dy) image = self._reframe(product, image, product.dx, product.dy) - log.info('End') + log.info("End") # attempt to process related self._process_related(product, band) @@ -239,7 +262,9 @@ def _process_related(self, product: S2L_Product, band: str): # when process is called first from preprocess, does not process the related product # let it to the call of preprocess for the related product if self._process_related_product and product.related_product is not None: - related_image = self.process(product.related_product, product.related_product.get_band_file(band), band) + related_image = self.process( + product.related_product, product.related_product.get_band_file(band), band + ) # set related_image IN THE RELATED_PRODUCT of product product.related_product.related_image = related_image @@ -251,7 +276,7 @@ def _do_matching(self, product: S2L_Product): product (S2L_Product): product to compute dx/dy for """ - if not S2L_config.config.getboolean('doMatchingCorrection'): + if not self._do_matching_correction: log.warning("Matching correction is disabled") return @@ -260,14 +285,16 @@ def _do_matching(self, product: S2L_Product): return # reframe image reference band image - band = S2L_config.config.get('reference_band', 'B04') - image = product.get_band_file(band) - reframed_image = self.process(product, image, band) + image = product.get_band_file(self._reference_band) + reframed_image = self.process(product, image, self._reference_band) # get ref image at the same resolution ref_image = get_resampled_ref_image(reframed_image, product.ref_image) if not ref_image: - log.warning("Cannot do matching correction, no reference image found for image %s", reframed_image.filepath) + log.warning( + "Cannot do matching correction, no reference image found for image %s", + reframed_image.filepath, + ) return # open validity mask @@ -276,9 +303,7 @@ def _do_matching(self, product: S2L_Product): # matching to compute product dx/dy # Fine resolution of correlation grid (for accurate dx dy computation) klt_result = self._klt_matcher.do_matching( - product.working_dir, - ref_image, - reframed_image, mask.array + product.working_dir, ref_image, reframed_image, mask.array ) # save values for future correction process on bands @@ -289,7 +314,7 @@ def _do_matching(self, product: S2L_Product): self._klt_results.append(klt_result) log.info("Geometrical Offsets (DX/DY): %sm %sm", product.dx, product.dy) - def _reframe(self, product: S2L_Product, image_in: S2L_ImageFile, dx=0., dy=0.): + def _reframe(self, product: S2L_Product, image_in: S2L_ImageFile, dx=0.0, dy=0.0): """ Reframe image of the given product in the product mgrs tile @@ -302,7 +327,7 @@ def _reframe(self, product: S2L_Product, image_in: S2L_ImageFile, dx=0., dy=0.): Returns: S2L_ImageFile: reframed image """ - log.info('MGRS Framing: Start...') + log.info("MGRS Framing: Start...") # reframe on MGRS image_out = mgrs_framing.reframe( @@ -312,12 +337,12 @@ def _reframe(self, product: S2L_Product, image_in: S2L_ImageFile, dx=0., dy=0.): dx, dy, dtype=np.float32, - method=product.geometry_correction_strategy + method=product.geometry_correction_strategy, ) # display - if S2L_config.config.getboolean('generate_intermediate_products'): + if self.generate_intermediate_products: image_out.write(DCmode=True) # digital count - log.info('MGRS Framing: End') + log.info("MGRS Framing: End") return image_out diff --git a/sen2like/sen2like/s2l_processes/S2L_GeometryCheck.py b/sen2like/sen2like/s2l_processes/S2L_GeometryCheck.py index ffc7273..30bef80 100644 --- a/sen2like/sen2like/s2l_processes/S2L_GeometryCheck.py +++ b/sen2like/sen2like/s2l_processes/S2L_GeometryCheck.py @@ -20,36 +20,48 @@ import os import numpy as np -from scipy.stats import kurtosis, skew - -from core import S2L_config from core.image_file import S2L_ImageFile from core.products.product import S2L_Product from core.reference_image import get_resampled_ref_image -from klt import klt +from klt import KLTMatcher, KTLResult from s2l_processes.S2L_Process import S2L_Process +from scipy.stats import kurtosis, skew log = logging.getLogger("Sen2Like") class S2L_GeometryCheck(S2L_Process): - """Class to verify product geometry - - Args: - S2L_Process (_type_): _description_ - """ - - def __init__(self): - super().__init__() + """Class to verify product geometry""" + + def __init__( + self, + klt_matcher: KLTMatcher, + assess_bands: list[str], + reference_band: str, + generate_intermediate_products: bool, + ): + """Constructor + + Args: + klt_matcher (KLTMatcher): KLT matcher component + assess_bands (list[str]): list of S2 band name for which to do the assessment. + For other missions, band name resolution will be performed + reference_band (str): reference band name that is used for correction + generate_intermediate_products (bool): generate or not intermediate image products. + """ + super().__init__(generate_intermediate_products) self._tmp_stats = {} - self._klt_matcher = klt.KLTMatcher() + self._assess_bands = assess_bands + self._reference_band = reference_band + self._klt_matcher = klt_matcher def process(self, product: S2L_Product, image: S2L_ImageFile, band: str) -> S2L_ImageFile: - log.info('Start') + log.info("Start") # do Geometry Assessment only if required - assess_geometry_bands = S2L_config.config.get('doAssessGeometry', default='').split(',') - assess_geometry_bands = [product.reverse_bands_mapping.get(band) for band in assess_geometry_bands] + assess_geometry_bands = [ + product.reverse_bands_mapping.get(band) for band in self._assess_bands + ] if assess_geometry_bands and band in assess_geometry_bands: # open validity mask @@ -59,23 +71,27 @@ def process(self, product: S2L_Product, image: S2L_ImageFile, band: str) -> S2L_ ref_image = get_resampled_ref_image(image, product.ref_image) if ref_image is None: - log.warning("Abort geometry assessment, no reference image found for %s", image.filepath) + log.warning( + "Abort geometry assessment, no reference image found for %s", image.filepath + ) # abort, cannot do matching without ref image return image # Coarse resolution of correlation grid (only for stats) - klt_result = self._matching(ref_image, image, mask, product.working_dir, product.ref_image) + klt_result = self._matching( + ref_image, image, mask, product.working_dir, product.ref_image + ) log.info( "Geometrical Offsets after correction if any (DX/DY): %sm %sm", klt_result.dx_array.mean(), - klt_result.dy_array.mean() + klt_result.dy_array.mean(), ) # Append bands name to keys - if S2L_config.config.get('reference_band') != band: + if self._reference_band != band: for key in self._tmp_stats: - self._tmp_stats[f'{key}_{band}'] = self._tmp_stats.pop(key) + self._tmp_stats[f"{key}_{band}"] = self._tmp_stats.pop(key) # set qi info to reference band stats product.metadata.qi.update(self._tmp_stats) @@ -83,31 +99,43 @@ def process(self, product: S2L_Product, image: S2L_ImageFile, band: str) -> S2L_ # clear for next band process self._tmp_stats = {} - log.info('End') + log.info("End") return image - def _matching(self, image_ref: S2L_ImageFile, image: S2L_ImageFile, mask: S2L_ImageFile, working_dir: str, ref_image_path: str) -> klt.KTLResult: - - log.info('Start matching') + def _matching( + self, + image_ref: S2L_ImageFile, + image: S2L_ImageFile, + mask: S2L_ImageFile, + working_dir: str, + ref_image_path: str, + ) -> KTLResult: + log.info("Start matching") # do matching with KLT - result = self._klt_matcher.do_matching(working_dir, image_ref, image, mask.array, assessment=True) + result = self._klt_matcher.do_matching( + working_dir, image_ref, image, mask.array, assessment=True + ) dx = result.dx_array dy = result.dy_array dist = np.sqrt(np.power(dx, 2) + np.power(dy, 2)).flatten() - self._tmp_stats.update({'SKEW': np.round(skew(dist, axis=None), 1), - 'KURTOSIS': np.round(kurtosis(dist, axis=None), 1), - 'REF_IMAGE': os.path.basename(ref_image_path), - 'MEAN': np.round(np.mean(dist), 1), - 'MEAN_X': dx.mean(), - 'MEAN_Y': dy.mean(), - 'STD': np.round(np.std(dist), 1), - 'STD_X': np.round(np.std(dx), 1), - 'STD_Y': np.round(np.std(dy), 1), - 'RMSE': np.round(np.sqrt(np.mean(np.power(dist, 2))), 1), - 'NB_OF_POINTS': result.nb_matching_point}) - - return result \ No newline at end of file + self._tmp_stats.update( + { + "SKEW": np.round(skew(dist, axis=None), 1), + "KURTOSIS": np.round(kurtosis(dist, axis=None), 1), + "REF_IMAGE": os.path.basename(ref_image_path), + "MEAN": np.round(np.mean(dist), 1), + "MEAN_X": dx.mean(), + "MEAN_Y": dy.mean(), + "STD": np.round(np.std(dist), 1), + "STD_X": np.round(np.std(dx), 1), + "STD_Y": np.round(np.std(dy), 1), + "RMSE": np.round(np.sqrt(np.mean(np.power(dist, 2))), 1), + "NB_OF_POINTS": result.nb_matching_point, + } + ) + + return result diff --git a/sen2like/sen2like/s2l_processes/S2L_InterCalibration.py b/sen2like/sen2like/s2l_processes/S2L_InterCalibration.py index e927069..e18d06b 100644 --- a/sen2like/sen2like/s2l_processes/S2L_InterCalibration.py +++ b/sen2like/sen2like/s2l_processes/S2L_InterCalibration.py @@ -22,7 +22,6 @@ import numpy as np -from core import S2L_config from core.image_file import S2L_ImageFile from core.products.product import S2L_Product from s2l_processes.S2L_Process import S2L_Process @@ -61,6 +60,9 @@ class S2L_InterCalibration(S2L_Process): (It is the SPACECRAFT_NAME (for sentinel) or SPACECRAFT_ID (for landsats)) """ + def __init__(self, generate_intermediate_products: bool): + super().__init__(generate_intermediate_products) + def process(self, product: S2L_Product, image: S2L_ImageFile, band: str) -> S2L_ImageFile: log.info('Start') @@ -90,7 +92,7 @@ def process(self, product: S2L_Product, image: S2L_ImageFile, band: str) -> S2L_ new[image.array == 0] = 0 # Format Output : duplicate, link to product as parameter image = image.duplicate(self.output_file(product, band), array=new.astype(np.float32)) - if S2L_config.config.getboolean('generate_intermediate_products'): + if self.generate_intermediate_products: image.write(creation_options=['COMPRESS=LZW']) log.info('End') diff --git a/sen2like/sen2like/s2l_processes/S2L_Nbar.py b/sen2like/sen2like/s2l_processes/S2L_Nbar.py index b1da31e..9ae0872 100644 --- a/sen2like/sen2like/s2l_processes/S2L_Nbar.py +++ b/sen2like/sen2like/s2l_processes/S2L_Nbar.py @@ -195,11 +195,12 @@ class VJBMatriceBRDFCoefficient(BRDFCoefficient): "S2(.)_(.{4})_(.{3})_(.{6})_(.{4})_(\\d{8}T\\d{6})_V(\\d{8}T\\d{6})_(\\d{8}T\\d{6})_T(.{5})_(.{5})_(.{2})\\.nc" ) - def __init__(self, product, image, band, vr_matrix_dir): + def __init__(self, product, image, band, vr_matrix_dir, generate_intermediate_products): super().__init__(product, image, band) self.vr_matrix = None + self._generate_intermediate_products = generate_intermediate_products self.vr_matrix_file = self._select_vr_file(vr_matrix_dir) if self.vr_matrix_file: @@ -298,7 +299,7 @@ def get(self): _working_dir = self.product.working_dir - if S2L_config.config.getboolean('generate_intermediate_products'): + if self._generate_intermediate_products: ndvi_clip_img_path = os.path.join(_working_dir, 'ndvi_clipped.tif') if not os.path.isfile(ndvi_clip_img_path): ndvi_clip_img = ndvi_img.duplicate( @@ -315,7 +316,7 @@ def get(self): log.debug("c_vol have %s NaN", np.isnan(c_vol).sum()) np.nan_to_num(c_geo, copy=False) np.nan_to_num(c_vol, copy=False) - if S2L_config.config.getboolean('generate_intermediate_products'): + if self._generate_intermediate_products: c_geo_image = self.image.duplicate( filepath=os.path.join(_working_dir, f'c_geo_{self.band}.tif'), array=c_geo, @@ -400,8 +401,8 @@ class BandParam: class S2L_Nbar(S2L_Process): - def __init__(self): - super().__init__() + def __init__(self, generate_intermediate_products: bool): + super().__init__(generate_intermediate_products) self._theta_s = None self._mean_delta_azimuth = [] self._band_param = {} @@ -425,7 +426,7 @@ def process(self, product: S2L_Product, image: S2L_ImageFile, band: str) -> S2L_ # brdf coefficiant class if S2L_config.config.get('nbar_methode') == 'VJB' and product.ndvi_filename is not None: _band_param.brdf_coeff = VJBMatriceBRDFCoefficient( - product, image, band, S2L_config.config.get('vjb_coeff_matrice_dir')) + product, image, band, S2L_config.config.get('vjb_coeff_matrice_dir'), self.generate_intermediate_products) if not _band_param.brdf_coeff.check(): _band_param.brdf_coeff = ROYBRDFCoefficient(product, image, band) log.info( @@ -453,7 +454,7 @@ def process(self, product: S2L_Product, image: S2L_ImageFile, band: str) -> S2L_ # Format Output : duplicate, link to product as parameter image_out = image.duplicate(self.output_file(product, band), array=OUT.astype(np.float32)) - if S2L_config.config.getboolean('generate_intermediate_products'): + if self.generate_intermediate_products: image_out.write(creation_options=['COMPRESS=LZW']) log.info('End') diff --git a/sen2like/sen2like/s2l_processes/S2L_PackagerL2F.py b/sen2like/sen2like/s2l_processes/S2L_PackagerL2F.py index c917526..6413033 100644 --- a/sen2like/sen2like/s2l_processes/S2L_PackagerL2F.py +++ b/sen2like/sen2like/s2l_processes/S2L_PackagerL2F.py @@ -48,8 +48,8 @@ class S2L_PackagerL2F(S2L_Product_Packager): S2F product packager """ - def __init__(self): - super().__init__(packager_config) + def __init__(self, generate_intermediate_products: bool): + super().__init__(generate_intermediate_products, packager_config) def postprocess_quicklooks(self, qi_data_dir, product: S2L_Product): """ diff --git a/sen2like/sen2like/s2l_processes/S2L_PackagerL2H.py b/sen2like/sen2like/s2l_processes/S2L_PackagerL2H.py index e24817e..6c01045 100644 --- a/sen2like/sen2like/s2l_processes/S2L_PackagerL2H.py +++ b/sen2like/sen2like/s2l_processes/S2L_PackagerL2H.py @@ -45,5 +45,5 @@ class S2L_PackagerL2H(S2L_Product_Packager): S2H product packager """ - def __init__(self): - super().__init__(packager_config) + def __init__(self, generate_intermediate_products: bool): + super().__init__(generate_intermediate_products, packager_config) diff --git a/sen2like/sen2like/s2l_processes/S2L_Process.py b/sen2like/sen2like/s2l_processes/S2L_Process.py index 04b6798..ba14508 100644 --- a/sen2like/sen2like/s2l_processes/S2L_Process.py +++ b/sen2like/sen2like/s2l_processes/S2L_Process.py @@ -30,8 +30,18 @@ class S2L_Process(ABC): Implementation MUST implements 'process' and SHOULD override 'preprocess' and 'postprocess' """ - def __init__(self): - self.ext = S2L_config.PROC_BLOCKS.get(self.__class__.__name__, {}).get('extension') + def __init__(self, generate_intermediate_products: bool = False): + """Default constructor. + + Args: + generate_intermediate_products (bool, optional): flag to generate or not intermediate image products. + Concrete implementation is responsible to use it and generate the intermediate image. + Defaults to False. + """ + self.generate_intermediate_products = generate_intermediate_products + self.ext = S2L_config.PROC_BLOCKS.get(self.__class__.__name__, {}).get( + "extension" + ) def preprocess(self, product: S2L_Product): """Do some preprocess on / for the product @@ -42,7 +52,9 @@ def preprocess(self, product: S2L_Product): # deliberately empty @abstractmethod - def process(self, product: S2L_Product, image: S2L_ImageFile, band: str) -> S2L_ImageFile: + def process( + self, product: S2L_Product, image: S2L_ImageFile, band: str + ) -> S2L_ImageFile: """Process the product/image/band Args: @@ -69,5 +81,5 @@ def output_file(self, product, band, extension=None): extension = self.ext return os.path.join( product.working_dir, - product.get_band_file(band).rootname + extension + product.get_band_file(band).rootname + extension, ) diff --git a/sen2like/sen2like/s2l_processes/S2L_Product_Packager.py b/sen2like/sen2like/s2l_processes/S2L_Product_Packager.py index 9587d88..ae072e4 100644 --- a/sen2like/sen2like/s2l_processes/S2L_Product_Packager.py +++ b/sen2like/sen2like/s2l_processes/S2L_Product_Packager.py @@ -70,8 +70,8 @@ class PackagerConfig: class S2L_Product_Packager(S2L_Process): """Base class for S2L product packaging""" - def __init__(self, config: PackagerConfig): - super().__init__() + def __init__(self, generate_intermediate_products: bool, config: PackagerConfig): + super().__init__(generate_intermediate_products) self.images = {} self.product_type_name = config.product_type_name self.mtd_mask_field = config.mtd_mask_field diff --git a/sen2like/sen2like/s2l_processes/S2L_Sbaf.py b/sen2like/sen2like/s2l_processes/S2L_Sbaf.py index 58ac1c2..50ccfcf 100644 --- a/sen2like/sen2like/s2l_processes/S2L_Sbaf.py +++ b/sen2like/sen2like/s2l_processes/S2L_Sbaf.py @@ -18,62 +18,85 @@ # limitations under the License. - import logging -from dataclasses import dataclass +from typing import NamedTuple import numpy as np - -from core import S2L_config from core.image_file import S2L_ImageFile from core.products.landsat_8.landsat8 import Landsat8Product from core.products.product import S2L_Product +from numpy.typing import NDArray from s2l_processes.S2L_Process import S2L_Process +from skimage.transform import resize as skit_resize log = logging.getLogger("Sen2Like") -@dataclass -class SbafParams: - """Simple sbaff param storage - """ - coefficient: float +class SbafParams(NamedTuple): + """Simple sbaff param storage""" + + slope: float offset: float +# params +adaptive_adj_coef = {} +adaptive_adj_coef["B01"] = {"bandLabel": "CA", "coef": SbafParams(-0.13363398, 0.92552824)} +adaptive_adj_coef["B02"] = {"bandLabel": "BLUE", "coef": SbafParams(0.1422238, 1.05114394)} +adaptive_adj_coef["B03"] = {"bandLabel": "GREEN", "coef": SbafParams(0.00898318, 0.97937428)} +adaptive_adj_coef["B04"] = {"bandLabel": "RED", "coef": SbafParams(-0.09417763, 1.0296573)} +adaptive_adj_coef["B8A"] = {"bandLabel": "NIR 20", "coef": SbafParams(-0.00292645, 0.99517658)} +adaptive_adj_coef["B11"] = {"bandLabel": "SWIR 1", "coef": SbafParams(0.01377031, 0.99593527)} +adaptive_adj_coef["B12"] = {"bandLabel": "SWIR 2", "coef": SbafParams(0.01272434, 0.97395877)} + + class S2L_Sbaf(S2L_Process): + def __init__( + self, + adaptative: bool, + adaptative_band_candidates: list[str], + generate_intermediate_products: bool, + ): + """Constructor - def __init__(self): - super().__init__() - self._sbaf_params = {} + Args: + adaptative (bool): use adaptative SBAF. If False, standard slope and offset are used. + generate_intermediate_products (bool): generate or not intermediate image + adaptative_band_candidates (list[str]): list of band (S2 band name) that to treat with adaptative method if enabled + """ + super().__init__(generate_intermediate_products) + self._adaptative: bool = adaptative + self._adaptative_band_candidates: list[str] = adaptative_band_candidates + # store used params for QI report + self._sbaf_params: dict[str, SbafParams] = {} def get_sen2like_coef(self, mission): """ Derived from value in HLS Guide v 1.4 - Get Adjustement coefficient for SEN2LIKE processing, + Get Adjustment coefficient for SEN2LIKE processing, Coefficient applied to Landsat8/OLI towards Sentinel2A/MSI data Coef array definition [slope, intercept]""" adj_coef = {} - if mission in ('LANDSAT_8', 'LANDSAT_9'): - adj_coef['B01'] = {'bandLabel': 'CA'} - adj_coef['B02'] = {'bandLabel': 'BLUE'} - adj_coef['B03'] = {'bandLabel': 'GREEN'} - adj_coef['B04'] = {'bandLabel': 'RED'} - adj_coef['B05'] = {'bandLabel': 'NIR 20'} - adj_coef['B06'] = {'bandLabel': 'SWIR 1'} - adj_coef['B07'] = {'bandLabel': 'SWIR 2'} + if mission in ("LANDSAT_8", "LANDSAT_9"): + adj_coef["B01"] = {"bandLabel": "CA"} + adj_coef["B02"] = {"bandLabel": "BLUE"} + adj_coef["B03"] = {"bandLabel": "GREEN"} + adj_coef["B04"] = {"bandLabel": "RED"} + adj_coef["B05"] = {"bandLabel": "NIR 20"} + adj_coef["B06"] = {"bandLabel": "SWIR 1"} + adj_coef["B07"] = {"bandLabel": "SWIR 2"} # compute coeff from Nasa SBAF values adj_coef_l8_s2a = self.get_oli_like_coef("Sentinel-2A") - for oli_band in adj_coef.keys(): + for oli_band, band_coef in adj_coef.items(): s2_band = Landsat8Product.get_s2like_band(oli_band) if not s2_band: continue - coef = adj_coef_l8_s2a[s2_band]['coef'] - a = 1 / coef[0] - b = - coef[1] / coef[0] - adj_coef[oli_band]['coef'] = [a, b] + coef = adj_coef_l8_s2a[s2_band]["coef"] + slope = 1 / coef.slope + offset = -coef.offset / coef.slope + band_coef["coef"] = SbafParams(slope, offset) return adj_coef @@ -86,70 +109,69 @@ def get_oli_like_coef(self, mission): Coef array definition [slope, intercept]""" adj_coef = {} - if mission == 'Sentinel-2A': - adj_coef['B01'] = {'bandLabel': 'CA', 'coef': [0.9959, -0.0002]} - adj_coef['B02'] = {'bandLabel': 'BLUE', 'coef': [0.9778, -0.004]} - adj_coef['B03'] = {'bandLabel': 'GREEN', 'coef': [1.0053, -0.0009]} - adj_coef['B04'] = {'bandLabel': 'RED', 'coef': [0.9765, 0.0009]} - adj_coef['B8A'] = {'bandLabel': 'NIR 20', 'coef': [0.9983, -0.0001]} - adj_coef['B11'] = {'bandLabel': 'SWIR 1', 'coef': [0.9987, -0.0011]} - adj_coef['B12'] = {'bandLabel': 'SWIR 2', 'coef': [1.003, -0.0012]} - - elif mission == 'Sentinel-2B': - adj_coef['B01'] = {'bandLabel': 'CA', 'coef': [0.9959, -0.0002]} - adj_coef['B02'] = {'bandLabel': 'BLUE', 'coef': [0.9778, -0.004]} - adj_coef['B03'] = {'bandLabel': 'GREEN', 'coef': [1.0075, -0.0008]} - adj_coef['B04'] = {'bandLabel': 'RED', 'coef': [0.9761, 0.001]} - adj_coef['B8A'] = {'bandLabel': 'NIR 20', 'coef': [0.9966, 0.000]} - adj_coef['B11'] = {'bandLabel': 'SWIR 1', 'coef': [1.000, -0.0003]} - adj_coef['B12'] = {'bandLabel': 'SWIR 2', 'coef': [0.9867, 0.0004]} + if mission == "Sentinel-2A": + adj_coef["B01"] = {"bandLabel": "CA", "coef": SbafParams(0.9959, -0.0002)} + adj_coef["B02"] = {"bandLabel": "BLUE", "coef": SbafParams(0.9778, -0.004)} + adj_coef["B03"] = {"bandLabel": "GREEN", "coef": SbafParams(1.0053, -0.0009)} + adj_coef["B04"] = {"bandLabel": "RED", "coef": SbafParams(0.9765, 0.0009)} + adj_coef["B8A"] = {"bandLabel": "NIR 20", "coef": SbafParams(0.9983, -0.0001)} + adj_coef["B11"] = {"bandLabel": "SWIR 1", "coef": SbafParams(0.9987, -0.0011)} + adj_coef["B12"] = {"bandLabel": "SWIR 2", "coef": SbafParams(1.003, -0.0012)} + + elif mission == "Sentinel-2B": + adj_coef["B01"] = {"bandLabel": "CA", "coef": SbafParams(0.9959, -0.0002)} + adj_coef["B02"] = {"bandLabel": "BLUE", "coef": SbafParams(0.9778, -0.004)} + adj_coef["B03"] = {"bandLabel": "GREEN", "coef": SbafParams(1.0075, -0.0008)} + adj_coef["B04"] = {"bandLabel": "RED", "coef": SbafParams(0.9761, 0.001)} + adj_coef["B8A"] = {"bandLabel": "NIR 20", "coef": SbafParams(0.9966, 0.000)} + adj_coef["B11"] = {"bandLabel": "SWIR 1", "coef": SbafParams(1.000, -0.0003)} + adj_coef["B12"] = {"bandLabel": "SWIR 2", "coef": SbafParams(0.9867, 0.0004)} return adj_coef def process(self, product: S2L_Product, image: S2L_ImageFile, band: str) -> S2L_ImageFile: - log.info('Start') + log.info("Start") # init to None - offset = None - slope = None + sbaf_params = None if not product.apply_sbaf_param: + # Feed params for QI report self._sbaf_params[band] = SbafParams(1, 0) - log.info('Skip for %s', product.mtl.mission) + log.info("Skip for %s", product.mtl.mission) log.info("End") return image - # TODO : what about MAJA product ? + # TODO : what about MAJA product ? # (where mission is LANDSAT8,LANDSAT9, SENTINEL2) it will be None, is that right ? - if product.mtl.mission in ('LANDSAT_8', 'LANDSAT_9'): + if product.mtl.mission in ("LANDSAT_8", "LANDSAT_9"): # L8 => S2A band_sbaf = band - adj_coef = self.get_sen2like_coef("LANDSAT_8") + adj_coef = self.get_sen2like_coef(product.mtl.mission) if band_sbaf in adj_coef: - log.info('Sbaf coefficient find to %s', band) - slope, offset = adj_coef[band_sbaf]['coef'] - log.info('slop = %s, offset = %s', slope, offset) + log.info("Sbaf coefficient find to %s", band) + sbaf_params = adj_coef[band_sbaf]["coef"] + log.info(str(sbaf_params)) else: self._sbaf_params[band] = SbafParams(1, 0) log.info("No Sbaf coefficient defined for %s", band) return image - self._sbaf_params[band] = SbafParams(slope, offset) + self._sbaf_params[band] = sbaf_params # Apply SBAF - if offset is not None and slope is not None: - log.debug("Applying SBAF") - new = image.array - new = new * slope + offset - # transfer no data - new[image.array == 0] = 0 + if sbaf_params is not None: + new_image_array = self._do_sbaf(product, image, band, self._sbaf_params[band]) # Format Output : duplicate, link to product as parameter - image = image.duplicate(self.output_file(product, band), array=new.astype(np.float32)) - if S2L_config.config.getboolean('generate_intermediate_products'): - image.write(creation_options=['COMPRESS=LZW']) + image = image.duplicate( + self.output_file(product, band), array=new_image_array.astype(np.float32) + ) - log.info('End') + if self.generate_intermediate_products: + image.write(creation_options=["COMPRESS=LZW"]) + + log.info("End") return image @@ -159,11 +181,116 @@ def postprocess(self, product: S2L_Product): Args: product (S2L_Product): product to post process """ + product.metadata.qi["SBAF_ADAPTATIVE"] = self._adaptative + + if self._adaptative: + # select only processed bands + product.metadata.qi["SBAF_ADAPTED_BANDS"] = " ".join( + product.__class__.get_s2like_band(band) + for band in self._sbaf_params + if band in self._adaptative_band_candidates + ) + for band, params in self._sbaf_params.items(): # set sbaf qi param with S2 band naming s2_band = product.__class__.get_s2like_band(band) if not s2_band: # avoid Non Sentinel native band continue - product.metadata.qi[f'SBAF_COEFFICIENT_{s2_band}'] = params.coefficient - product.metadata.qi[f'SBAF_OFFSET_{s2_band}'] = params.offset + product.metadata.qi[f"SBAF_COEFFICIENT_{s2_band}"] = params.slope + product.metadata.qi[f"SBAF_OFFSET_{s2_band}"] = params.offset + + def _do_sbaf( + self, product: S2L_Product, image: S2L_ImageFile, band: str, static_params: SbafParams + ) -> NDArray: + """Apply sbaf to an image band. + Depending the processing block configuration, static or adaptative method is applied. + Even if adaptative method should be applied, some image band values will be treat + with static because adaptative have no sense for them. + + Args: + product (S2L_Product): current processed product + image (S2L_ImageFile): band image to which apply sbaf + band (str): image band name + static_params (SbafParams): static params, + + Returns: + NDArray: adapted image band values + """ + log.debug("Applying SBAF") + + new_image_array = None + if ( + self._adaptative + and product.__class__.get_s2like_band(band) in self._adaptative_band_candidates + ): + log.info("Use adaptative method for band %s", band) + + ndvi_image_array = self._get_ndvi_image_array(product, image) + # given band is the product mission band name, use its corresponding S2 band to get sbaf param + adaptative_param = adaptive_adj_coef[product.__class__.get_s2like_band(band)]["coef"] + log.info(str(adaptative_param)) + factor = adaptative_param.slope * ndvi_image_array + adaptative_param.offset + new_image_array = image.array * factor + # apply static method on water, barren areas of rock, sand, or snow + # adaptative have no sense for them + where = ndvi_image_array <= 0.1 + new_image_array[where] = self._apply_static(image.array[where], static_params) + + # override QI report info + self._sbaf_params[band] = adaptative_param + else: + log.info("Use static method") + new_image_array = self._apply_static(image.array, static_params) + + # restore no data + new_image_array[image.array == 0] = 0 + + return new_image_array + + def _apply_static(self, in_array: NDArray, sbaf_params: SbafParams) -> NDArray: + """Apply simple sbaf method to an image band + + Args: + in_array (NDArray): band image values + sbaf_params (SbafParams): sbaf param to apply + + Returns: + NDArray: Adapted values + """ + return in_array * sbaf_params.slope + sbaf_params.offset + + def _get_ndvi_image_array(self, product: S2L_Product, image: S2L_ImageFile) -> NDArray: + """Get product NDVI as array having same shape as image + + Args: + product (S2L_Product): product for which get the NDVI + image (S2L_ImageFile): image to get shape and resolution necessary to extract NDVI + + Returns: + NDArray: NDVI image array, resampled to image shape if needed + """ + ndvi_image = S2L_ImageFile(product.ndvi_filename) + ndvi_image_array = ndvi_image.array + if ndvi_image.xRes != image.xRes: + log.info("Resample NDVI image band resolution %s", image.xRes) + ndvi_image_array = skit_resize( + ndvi_image.array, image.array.shape, order=3, preserve_range=True + ) + log.info("Resample done") + + return ndvi_image_array + + +""" +factor = a * NDVI + b +out = image * factor +out[ndvi < 0.1] = calcul normal + +Ajouter la méthode dans les qi report (nom des méthodes à déterminer) +a et b configurable par bande ou en dur ? + +resize NDVI pour les images qui ne sont pas à la résolution du NDVI + +quelles valeurs pour les slope et offset dans le cas de l'adaptative ? a & b ? +""" diff --git a/sen2like/sen2like/s2l_processes/S2L_Stitching.py b/sen2like/sen2like/s2l_processes/S2L_Stitching.py index af13860..0896367 100644 --- a/sen2like/sen2like/s2l_processes/S2L_Stitching.py +++ b/sen2like/sen2like/s2l_processes/S2L_Stitching.py @@ -35,6 +35,9 @@ class S2L_Stitching(S2L_Process): + def __init__(self, generate_intermediate_products: bool): + super().__init__(generate_intermediate_products) + def _output_file(self, product, band=None, image=None, extension=None): if band is None and image is not None: return os.path.join( diff --git a/sen2like/sen2like/s2l_processes/S2L_Toa.py b/sen2like/sen2like/s2l_processes/S2L_Toa.py index ff83b1a..224113c 100644 --- a/sen2like/sen2like/s2l_processes/S2L_Toa.py +++ b/sen2like/sen2like/s2l_processes/S2L_Toa.py @@ -20,7 +20,6 @@ import logging -from core import S2L_config from core.image_file import S2L_ImageFile from core.products.product import S2L_Product from core.toa_reflectance import convert_to_reflectance_from_reflectance_cal_product @@ -31,6 +30,9 @@ class S2L_Toa(S2L_Process): + def __init__(self, generate_intermediate_products: bool): + super().__init__(generate_intermediate_products) + def process(self, product: S2L_Product, image: S2L_ImageFile, band: str) -> S2L_ImageFile: log.info('Start') @@ -38,7 +40,7 @@ def process(self, product: S2L_Product, image: S2L_ImageFile, band: str) -> S2L_ array_in = image.array array_out = convert_to_reflectance_from_reflectance_cal_product(product.mtl, array_in, band) image = image.duplicate(self.output_file(product, band), array=array_out) - if S2L_config.config.getboolean('generate_intermediate_products'): + if self.generate_intermediate_products: image.write(creation_options=['COMPRESS=LZW']) log.info('End') diff --git a/sen2like/sen2like/s2l_processes/S2L_TopographicCorrection.py b/sen2like/sen2like/s2l_processes/S2L_TopographicCorrection.py new file mode 100644 index 0000000..a9237c7 --- /dev/null +++ b/sen2like/sen2like/s2l_processes/S2L_TopographicCorrection.py @@ -0,0 +1,215 @@ +# Copyright (c) 2023 ESA. +# +# This file is part of sen2like. +# See https://github.com/senbox-org/sen2like for further info. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Topographic (Slope) correction processing block module""" + +import logging +import math +import os +from typing import NamedTuple + +import numpy as np +from core.dem import DEMRepository +from core.image_file import S2L_ImageFile +from core.products.product import S2L_Product +from numpy.typing import NDArray +from osgeo import gdal +from s2l_processes import S2L_Process +from skimage.transform import resize as skit_resize + +logger = logging.getLogger("Sen2Like") + + +class Stat(NamedTuple): + res: int + val: dict[str, float] + + +def _get_resample_array( + image: S2L_ImageFile, file_path: str, resample_alg: int, description: str +) -> NDArray: + """Resample one band image file given by file_path to image shape + + Args: + image (S2L_ImageFile): image to resample + file_path (str): path to the file to get resample array + resample_alg (int): skimage resize `order` + - 0: Nearest-neighbor + - 1: Bi-linear + - 2: Bi-quadratic + - 3: Bi-cubic + - 4: Bi-quartic + - 5: Bi-quintic + description (str): input description (for log purpose) + + Returns: + NDArray: resampled array + """ + dataset = gdal.Open(file_path) + array = np.array(dataset.GetRasterBand(1).ReadAsArray()) + + # resize array to image size if needed + geo = dataset.GetGeoTransform() + dem_x_res = geo[1] + if image.xRes != dem_x_res: + logger.info("Resample %s to image resolution %s", description, image.xRes) + array = skit_resize(array, image.array.shape, order=resample_alg, preserve_range=True) + logger.info("Resample done") + + return array + + +class S2L_TopographicCorrection(S2L_Process): + """Topographic correction processing block. + It uses MGRS dem file matching the image to process, + Generate a shaded relief map elevation raster from the DEM (gdal hillshade) thanks to sun angles + Compute a topographic correction, and apply it. + """ + + def __init__( + self, + dem_repository: DEMRepository, + topographic_correction_limiter: float, + apply_valid_pixel_mask: bool, + generate_intermediate_products: bool, + ): + """Constructor + + Args: + dem_repository (DEMRepository): service to access MGRS DEM + topographic_correction_limiter (float): max factor value. + (down factor > topographic_correction_limiter to topographic_correction_limiter) + apply_valid_pixel_mask (bool): Use valid pixel mask to select pixel for which the correction is done + generate_intermediate_products (bool): generate or not intermediate image band file + """ + super().__init__(generate_intermediate_products) + self._dem_repository = dem_repository + self._shaded_dem_file = None + self._apply_valid_pixel_mask = apply_valid_pixel_mask + self._topographic_correction_limiter = topographic_correction_limiter + self._stats: Stat | None = None + + def preprocess(self, product: S2L_Product): + """Create hillshade DEM if possible. + If not, process will be skipped. + + Args: + product (S2L_Product): product to have sun angle info for hillshade DEM creation. + """ + + logger.info("Compute hillshade") + mgrs_dem_file = None + try: + mgrs_dem_file = self._dem_repository.get_by_mgrs(product.context.tile) + except FileNotFoundError: + logger.warning( + "Cannot find DEM for tile %s, TopographicCorrection will not be performed", + product.context.tile, + ) + else: + logger.info("Product sun zenith angle %s", product.sun_zenith_angle) + logger.info("Product sun azimuth angle %s", product.sun_azimuth_angle) + + shaded_dem_file = os.path.join(product.working_dir, "shaded_dem.tiff") + altitude = 90.0 - np.float32(product.sun_zenith_angle) + azimuth = np.float32(product.sun_azimuth_angle) + + logger.info("Altitude %s", altitude) + + options = "-compute_edges -az " + str(azimuth) + " -alt " + str(altitude) + gdal.DEMProcessing(shaded_dem_file, mgrs_dem_file, "hillshade", options=options) + + # set it at the end as it used as condition in the beginning of process function + self._shaded_dem_file = shaded_dem_file + logger.info("Hillshade computation finished") + + def process(self, product: S2L_Product, image: S2L_ImageFile, band: str) -> S2L_ImageFile: + """ + preform topographic correction. + resize hillshade dem to image size if resolution differs. + skip if hillshade DEM have not been produced by `preprocess`. + + Args: + product (S2L_Product): product to process + image (S2L_ImageFile): product image to process + band (str): band to process + + Returns: + S2L_ImageFile: image with topographic correction or input image + """ + logger.info("Start") + if not self._shaded_dem_file: + logger.warning("Skip topographic correction because DEM is missing") + return image + + # load hillshade DEM, resize it to image size if needed + hillshade_array = _get_resample_array(image, self._shaded_dem_file, 3, "hillshade DEM") + + # compute correction factor array + # topographic_correction_factor is limited using the + # "topographic_correction_limiter" parameter initially set to 4.0 + logger.info("Compute correction factor for sun zenith angle %s", product.sun_zenith_angle) + topographic_correction_factor = ( + math.cos(math.radians(product.sun_zenith_angle)) * 255.0 / hillshade_array + ).clip(min=0.0, max=self._topographic_correction_limiter) + + # apply correction to image + logger.info("Apply correction factor to band image %s", band) + array_out = topographic_correction_factor * image.array + + # mask filtering + # restore initial values on invalid pixels + if self._apply_valid_pixel_mask: + logger.info("Apply valid pixel mask") + mask_array = _get_resample_array(image, product.mask_filename, 0, "valid pixel mask") + invalid = mask_array == 0 + array_out[invalid] = image.array[invalid] + # update to compute stats + topographic_correction_factor = topographic_correction_factor[~invalid] + + # compute stat for higher resolution for QI report + if not self._stats or self._stats.res > image.xRes: + self._set_stats(topographic_correction_factor, image.xRes) + + out_image = image.duplicate(self.output_file(product, band), array_out) + + if self.generate_intermediate_products: + logger.info("Generate intermediate product") + out_image.write(creation_options=["COMPRESS=LZW"]) + + logger.info("End") + return out_image + + def postprocess(self, product: S2L_Product): + product.metadata.qi["DEM_DATASET"] = self._dem_repository.dataset_name + product.metadata.qi.update(self._stats.val) + + def _set_stats(self, factor: NDArray, res: float): + """Set stats at the given resolution + + Args: + factor (NDArray): factor to compute stats on + res (float): resolution for which stats are computed + """ + logger.info("Set stats for resolution %s", res) + stats = { + "MIN_TOPOGRAPHIC_CORRECTION_FACTOR": factor.min(), + "MAX_TOPOGRAPHIC_CORRECTION_FACTOR": factor.max(), + "AVERAGE_TOPOGRAPHIC_CORRECTION_FACTOR": factor.mean(), + "STD_TOPOGRAPHIC_CORRECTION_FACTOR": factor.std(), + } + self._stats = Stat(res, stats) diff --git a/sen2like/sen2like/s2l_processes/__init__.py b/sen2like/sen2like/s2l_processes/__init__.py index c104327..8c3d8d8 100644 --- a/sen2like/sen2like/s2l_processes/__init__.py +++ b/sen2like/sen2like/s2l_processes/__init__.py @@ -15,3 +15,128 @@ # See the License for the specific language governing permissions and # limitations under the License. + +""" +PROVIDE `create_process_block` function which is dedicated to `S2L_Process` concrete class instance +It Exposes `S2L_Process` and `create_process_block` +""" + + +from core.dem import DEMRepository +from core.S2L_config import S2L_Config, config +from klt import klt_matcher_factory + +from .S2L_Atmcor import S2L_Atmcor +from .S2L_Fusion import S2L_Fusion +from .S2L_Geometry import S2L_Geometry +from .S2L_GeometryCheck import S2L_GeometryCheck +from .S2L_InterCalibration import S2L_InterCalibration +from .S2L_Nbar import S2L_Nbar +from .S2L_PackagerL2F import S2L_PackagerL2F +from .S2L_PackagerL2H import S2L_PackagerL2H +from .S2L_Process import S2L_Process +from .S2L_Sbaf import S2L_Sbaf +from .S2L_Stitching import S2L_Stitching +from .S2L_Toa import S2L_Toa +from .S2L_TopographicCorrection import S2L_TopographicCorrection + +_class_lookup = { + S2L_Atmcor.__name__: S2L_Atmcor, + S2L_Fusion.__name__: S2L_Fusion, + S2L_InterCalibration.__name__: S2L_InterCalibration, + S2L_Nbar.__name__: S2L_Nbar, + S2L_PackagerL2F.__name__: S2L_PackagerL2F, + S2L_PackagerL2H.__name__: S2L_PackagerL2H, + S2L_Stitching.__name__: S2L_Stitching, + S2L_Toa.__name__: S2L_Toa, +} + + +# DEFINE CUSTOM BUILDER FUNCTIONS HERE + + +def _create_topographic_correction(configuration: S2L_Config) -> S2L_TopographicCorrection: + dem_repository = DEMRepository( + configuration.get("dem_folder"), + configuration.get("dem_dataset"), + int(configuration.get("src_dem_resolution")), + ) + + topo_corr_limit = configuration.getfloat("topographic_correction_limiter") + if not topo_corr_limit: + raise RuntimeError(f"Configuration parameter {topo_corr_limit} not set in config file.") + + generate_intermediate_products = configuration.getboolean("generate_intermediate_products") + apply_valid_pixel_mask = configuration.getboolean("apply_valid_pixel_mask") + return S2L_TopographicCorrection( + dem_repository, topo_corr_limit, apply_valid_pixel_mask, generate_intermediate_products + ) + + +def _create_geometry(configuration: S2L_Config) -> S2L_Geometry: + return S2L_Geometry( + klt_matcher_factory.get_klt_matcher(), + configuration.getboolean("force_geometric_correction"), + configuration.getboolean("doMatchingCorrection"), + configuration.get("reference_band", "B04"), + configuration.getboolean("generate_intermediate_products"), + ) + + +def _create_geometry_check(configuration: S2L_Config) -> S2L_GeometryCheck: + return S2L_GeometryCheck( + klt_matcher_factory.get_klt_matcher(), + configuration.get("doAssessGeometry", default="").split(","), + configuration.get("reference_band"), + configuration.getboolean("generate_intermediate_products"), + ) + + +def _create_sbaf(configuration: S2L_Config) -> S2L_Sbaf: + return S2L_Sbaf( + configuration.getboolean("adaptative"), + configuration.get("adaptative_band_candidates", default="").split(","), + configuration.getboolean("generate_intermediate_products"), + ) + + +# CUSTOM BUILDER FUNCTIONS LOOKUP TABLE + +_create_function_lookup = { + S2L_Geometry.__name__: _create_geometry, + S2L_GeometryCheck.__name__: _create_geometry_check, + S2L_Sbaf.__name__: _create_sbaf, + S2L_TopographicCorrection.__name__: _create_topographic_correction, +} + + +def create_process_block(process_name: str) -> S2L_Process: + """Create a processing block instance. + + Args: + process_name (str): name of the processing block to create + + Returns: + S2L_Process: concrete `S2L_Process` + """ + + # get S2L_Process create function, if not found try using default constructor + create_function = _create_function_lookup.get(process_name, None) + + # Try to use default constructor. + if not create_function: + create_function = _class_lookup.get(process_name, None) + # Manual check to raise if process_name not declared in both lookup dict + if not create_function: + # this is a code engineering / chair keyboard interface error + raise RuntimeError( + f"Create function for processing block '{process_name}' not found in '_class_lookup' or '_create_function_lookup'." + ) + + return create_function(config.getboolean("generate_intermediate_products")) + + # call custom create function with config + return create_function(config) + + +__all__ = ["create_process_block", "S2L_Process"] diff --git a/sen2like/sen2like/sen2like.py b/sen2like/sen2like/sen2like.py index 552e80c..d2e495d 100644 --- a/sen2like/sen2like/sen2like.py +++ b/sen2like/sen2like/sen2like.py @@ -54,41 +54,63 @@ def filter_product(product: S2L_Product): return True -def pre_process_atmcor(s2l_product: S2L_Product, tile, do_atmcor: bool) -> S2L_Product|None: +def pre_process_atmcor(s2l_product: S2L_Product, tile) -> S2L_Product|None: """ Adapt processing parameters for atmo corr processing to use. - THIS FUNCTION MODIFY SOME `s2l_product.context` PARAMETERS (use_sen2cor, use_smac, doStitching, doInterCalibration) - Run sen2cor if configured for (do_atmcor activated and use_sen2cor=True) and if product is compatible. - Otherwise, configures exec parameters to use smac if product is compatible in case do_atmcor activated + THIS FUNCTION MODIFY SOME `s2l_product.context` PARAMETERS (use_sen2cor, doAtmcor, doStitching, doInterCalibration) + Run sen2cor if configured for (doAtmcor=True activated and use_sen2cor=True) and if product is compatible. + Otherwise, configures exec parameters to use smac if product is compatible in case doAtmcor is activated Args: s2l_product (S2L_Product): s2l_product to check atmo corr compatibility and run sen2cor on tile (str): tile name for sen2cor - do_atmcor (bool): if atmospheric correction must be done Returns: s2l_product after sen2cor if executed or provided s2l_product, or None if fail or too many cloud cover """ - use_sen2cor = do_atmcor and s2l_product.context.use_sen2cor - # only landsat collection 1 + use_sen2cor = s2l_product.context.doAtmcor and s2l_product.context.use_sen2cor + + # Avoid sen2cor for very old LS products (not collection product) if 'L8' in s2l_product.sensor and not s2l_product.mtl.collection_number.isdigit(): # can only use SMAC for these product_urls, so force SMAC in case doAtmcor=True use_sen2cor = False s2l_product.context.use_sen2cor = use_sen2cor - s2l_product.context.use_smac = True logger.info("For Landsat 8-9, apply sen2cor only on collection 1 & 2 product_urls") if use_sen2cor: logger.info("Use sen2cor instead of Atmcor SMAC") + + do_sen2cor_topo_corr = ( + s2l_product.context.doTopographicCorrection and + s2l_product.context.sen2cor_topographic_correction + ) + + sen2cor = Sen2corClient( + os.path.abspath(config.get('sen2cor_path')), + tile, + do_sen2cor_topo_corr + ) + # Disable SMAC Atmospheric correction - s2l_product.context.use_smac = False + s2l_product.context.doAtmcor = False + + # For now, do not enable stitching when sen2cor is used s2l_product.context.doStitching = False + # Should be done before atmospheric correction + # and only for S2B with baseline before 4, so disable it. s2l_product.context.doInterCalibration = False - sen2cor = Sen2corClient(os.path.abspath(config.get('sen2cor_path')), tile) + # Disable sen2like topographic correction processing block if enabled in sen2cor + if (s2l_product.context.doTopographicCorrection and + s2l_product.context.sen2cor_topographic_correction): + logger.info( + "Disable sen2like topographic correction processing block because done with sen2cor" + ) + s2l_product.context.doTopographicCorrection = False try: orig_processing_sw = s2l_product.mtl.processing_sw + # run sen2cor on product and instantiate a new one from result s2l_product = s2l_product.__class__( sen2cor.run(s2l_product), s2l_product.context @@ -99,6 +121,9 @@ def pre_process_atmcor(s2l_product: S2L_Product, tile, do_atmcor: bool) -> S2L_P # example: for intercalibration to know if S2B intercalibration was already applied, not to apply it twice s2l_product.mtl.processing_sw = orig_processing_sw + # set AC QI param + s2l_product.metadata.qi["AC_PROCESSOR"] = "SEN2COR" + except Sen2corError: logger.warning("sen2cor raises an error", exc_info=True) return None @@ -113,11 +138,14 @@ def pre_process_atmcor(s2l_product: S2L_Product, tile, do_atmcor: bool) -> S2L_P return None # Disable Atmospheric correction for Level-2A product_urls + # override s2_processing_level because it could be use later for related product search + # (see product_archive) if s2l_product.mtl.data_type in ('Level-2A', 'L2TP', 'L2A'): config.overload('s2_processing_level=LEVEL2A') logger.info("Processing Level-2A product: Atmospheric correction is disabled.") - # do not run SMAC even if doAtmo=True - s2l_product.context.use_smac = False + # do not run SMAC doAtmcor processing block, + s2l_product.context.doAtmcor = False + # intercalibration only for L1C, so disable it s2l_product.context.doInterCalibration = False else: config.overload('s2_processing_level=LEVEL1C') @@ -188,7 +216,7 @@ def process_tile(tile: str, search_urls: list[tuple], args: Namespace, start_dat ) # run sen2cor if any and update s2l_product.context - s2l_product = pre_process_atmcor(s2l_product, tile, config.get('doAtmcor')) + s2l_product = pre_process_atmcor(s2l_product, tile) if not s2l_product: continue diff --git a/sen2like/sen2like/version.py b/sen2like/sen2like/version.py index 9b0873c..98dbbce 100644 --- a/sen2like/sen2like/version.py +++ b/sen2like/sen2like/version.py @@ -17,7 +17,7 @@ """Version of the Application.""" -__version__ = '4.3.0' +__version__ = '4.4.0' _splitted_version = __version__.split('.') diff --git a/sen2like/tests/aux_data/test_dem_downloader.py b/sen2like/tests/aux_data/test_dem_downloader.py new file mode 100644 index 0000000..e8fe61f --- /dev/null +++ b/sen2like/tests/aux_data/test_dem_downloader.py @@ -0,0 +1,76 @@ +import os +import shutil +from dataclasses import dataclass +from unittest import TestCase + +from dem.dem_downloader import DemDownloader + +LOCAL_TMP = "/tmp/DEM" + + +@dataclass +class Arguments: + """argument parser result stub""" + + mgrs_tile_code: str = "33TTG" + dem_dataset_name: str = "COP-DEM_GLO-90-DGED__2022_1" + dem_local_url: str = LOCAL_TMP + server_url: str = "https://prism-dem-open.copernicus.eu/pd-desk-open-access/prismDownload/" + + +class TestDemDownloader(TestCase): + """DemDownloader test class""" + + def tearDown(self): + """clean temp folder""" + if os.path.exists(LOCAL_TMP): + shutil.rmtree(LOCAL_TMP) + + def test_north_est(self): + arg = Arguments() + dem_downloader = DemDownloader(arg) + res = dem_downloader.get() + assert os.path.isfile(res) + assert res == "/tmp/DEM/COP-DEM_GLO-90-DGED__2022_1/Copernicus_DSM_90m_33TTG.TIF" + assert os.path.isfile( + "/tmp/DEM/COP-DEM_GLO-90-DGED__2022_1/geocells/Copernicus_DSM_30_N41_00_E011_00_DEM.tif" + ) + assert os.path.isfile( + "/tmp/DEM/COP-DEM_GLO-90-DGED__2022_1/geocells/Copernicus_DSM_30_N41_00_E012_00_DEM.tif" + ) + assert os.path.isfile( + "/tmp/DEM/COP-DEM_GLO-90-DGED__2022_1/geocells/Copernicus_DSM_30_N42_00_E011_00_DEM.tif" + ) + assert os.path.isfile( + "/tmp/DEM/COP-DEM_GLO-90-DGED__2022_1/geocells/Copernicus_DSM_30_N42_00_E012_00_DEM.tif" + ) + + # no additional download but create temp mosaic + res = dem_downloader.get() + assert res == "/tmp/DEM/COP-DEM_GLO-90-DGED__2022_1/Copernicus_DSM_90m_33TTG.TIF" + + def test_north_west(self): + arg = Arguments() + arg.mgrs_tile_code = "12SYH" + dem_downloader = DemDownloader(arg) + res = dem_downloader.get() + assert os.path.isfile(res) + assert res == "/tmp/DEM/COP-DEM_GLO-90-DGED__2022_1/Copernicus_DSM_90m_12SYH.TIF" + assert os.path.isfile( + "/tmp/DEM/COP-DEM_GLO-90-DGED__2022_1/geocells/Copernicus_DSM_30_N37_00_W109_00_DEM.tif" + ) + assert os.path.isfile( + "/tmp/DEM/COP-DEM_GLO-90-DGED__2022_1/geocells/Copernicus_DSM_30_N37_00_W108_00_DEM.tif" + ) + assert os.path.isfile( + "/tmp/DEM/COP-DEM_GLO-90-DGED__2022_1/geocells/Copernicus_DSM_30_N38_00_W109_00_DEM.tif" + ) + assert os.path.isfile( + "/tmp/DEM/COP-DEM_GLO-90-DGED__2022_1/geocells/Copernicus_DSM_30_N38_00_W108_00_DEM.tif" + ) + + def test_unknow(self): + arg = Arguments() + arg.mgrs_tile_code = "12SY" + dem_downloader = DemDownloader(arg) + self.assertRaises(ValueError, dem_downloader.get) diff --git a/sen2like/tests/core/config.ini b/sen2like/tests/core/config.ini new file mode 100644 index 0000000..24978e7 --- /dev/null +++ b/sen2like/tests/core/config.ini @@ -0,0 +1,73 @@ +[Processing] +doGeometry = True +doStitching = True +doGeometryCheck = True +doToa = True +doInterCalibration = True +doAtmcor = True +doNbar = True +doSbaf = True +doTopographicCorrection = True +doFusion = True +doPackagerL2H = True +doPackagerL2F = True + +[Directories] +archive_dir = /data/HLS +cams_dir = /data/CAMS + +[Downloader] +cloud_cover = 10 +coverage = 0.1 +# Local +base_url = /data/PRODUCTS +;url_parameters_pattern_Sentinel2 = {base_url}/{mission}/{tile} +;url_parameters_pattern_Landsat8 = {base_url}/{mission}/{path}/{row} +;url_parameters_pattern_Landsat9 = {base_url}/{mission}/{path}/{row} + +# Creodias +# base_url = https://finder.creodias.eu/resto/api/collections +# cloud_cover = 100 +# location_Landsat8 = path={path}&row={row} +# location_Sentinel2 = processingLevel=LEVEL1C&productIdentifier=%25{tile}%25 +# url_parameters_pattern = {base_url}/{mission}/search.json?maxRecords=100&_pretty=true&cloudCover=%5B0%2C{cloud_cover}%5D&startDate={start_date}&completionDate={end_date}&sortParam=startDate&sortOrder=ascending&status=all&{location}&dataset=ESA-DATASET +# thumbnail_property = properties/productIdentifier +# cloud_cover_property = properties/cloudCover + + +[Geometry] +reference_band = B04 +doMatchingCorrection = True + +[Atmcor] +use_sen2cor = True +sen2cor_path = ../sen2cor/process.py + +[fusion] +# predict_method: predict or composite (most recent valid pixels) +predict_method = predict +predict_nb_products = 2 + +[Stitching] +same_utm_only = True + +[TopographicCorrection] +topographic_correction_limiter = 4.0 +sen2cor_topographic_correction = True + +[DEMRepository] +# Expect to get DEM from {dem_folder}/{dem_dataset}/Copernicus_DSM_{src_dem_resolution}m_{tile_code}.TIF +dem_folder = /data/AUX_DATA/ +dem_dataset = COP-DEM_GLO-90-DGED__2022_1 +src_dem_resolution = 90 + +[OutputFormat] +gain = 10000 +offset = 0 + +[Multiprocessing] +number_of_process = 5 + +[RunTime] +dx = 0 +dy = 0 diff --git a/sen2like/tests/core/products/config.ini b/sen2like/tests/core/products/config.ini new file mode 100644 index 0000000..24978e7 --- /dev/null +++ b/sen2like/tests/core/products/config.ini @@ -0,0 +1,73 @@ +[Processing] +doGeometry = True +doStitching = True +doGeometryCheck = True +doToa = True +doInterCalibration = True +doAtmcor = True +doNbar = True +doSbaf = True +doTopographicCorrection = True +doFusion = True +doPackagerL2H = True +doPackagerL2F = True + +[Directories] +archive_dir = /data/HLS +cams_dir = /data/CAMS + +[Downloader] +cloud_cover = 10 +coverage = 0.1 +# Local +base_url = /data/PRODUCTS +;url_parameters_pattern_Sentinel2 = {base_url}/{mission}/{tile} +;url_parameters_pattern_Landsat8 = {base_url}/{mission}/{path}/{row} +;url_parameters_pattern_Landsat9 = {base_url}/{mission}/{path}/{row} + +# Creodias +# base_url = https://finder.creodias.eu/resto/api/collections +# cloud_cover = 100 +# location_Landsat8 = path={path}&row={row} +# location_Sentinel2 = processingLevel=LEVEL1C&productIdentifier=%25{tile}%25 +# url_parameters_pattern = {base_url}/{mission}/search.json?maxRecords=100&_pretty=true&cloudCover=%5B0%2C{cloud_cover}%5D&startDate={start_date}&completionDate={end_date}&sortParam=startDate&sortOrder=ascending&status=all&{location}&dataset=ESA-DATASET +# thumbnail_property = properties/productIdentifier +# cloud_cover_property = properties/cloudCover + + +[Geometry] +reference_band = B04 +doMatchingCorrection = True + +[Atmcor] +use_sen2cor = True +sen2cor_path = ../sen2cor/process.py + +[fusion] +# predict_method: predict or composite (most recent valid pixels) +predict_method = predict +predict_nb_products = 2 + +[Stitching] +same_utm_only = True + +[TopographicCorrection] +topographic_correction_limiter = 4.0 +sen2cor_topographic_correction = True + +[DEMRepository] +# Expect to get DEM from {dem_folder}/{dem_dataset}/Copernicus_DSM_{src_dem_resolution}m_{tile_code}.TIF +dem_folder = /data/AUX_DATA/ +dem_dataset = COP-DEM_GLO-90-DGED__2022_1 +src_dem_resolution = 90 + +[OutputFormat] +gain = 10000 +offset = 0 + +[Multiprocessing] +number_of_process = 5 + +[RunTime] +dx = 0 +dy = 0 diff --git a/sen2like/tests/core/products/test_processing_context.py b/sen2like/tests/core/products/test_processing_context.py new file mode 100644 index 0000000..b4fa17f --- /dev/null +++ b/sen2like/tests/core/products/test_processing_context.py @@ -0,0 +1,277 @@ +""" +Test ProcessingContext configuration depending some use cases +after the call of sen2like `pre_process_atmcor` function. +`pre_process_atmcor` reconfigure the product or sen2cor output product ProcessingContext +""" + +import os +from tempfile import TemporaryDirectory +from unittest import TestCase + +from core.products.product import ProcessingContext, S2L_Product +from core.readers.reader import BaseReader +from core.S2L_config import config + +from sen2like import sen2like + +test_folder_path = os.path.dirname(__file__) +configuration_file = os.path.join(test_folder_path, "config.ini") + +tmp_dir_val = {} # store temp dir name + + +class DummyReader(BaseReader): + """Simple Reader impl to configure some properties used during processing""" + + def __init__(self, path): + super().__init__(path) + self.cloud_cover = "0.0" + self.processing_sw = None + if "L1C" in path: + self.data_type = "LEVEL1C" + else: + self.data_type = "L2A" + + @staticmethod + def can_read(product_name): + return True + + +class Sen2corClientStub: # pylint: disable=too-few-public-methods + """Sen2corClient stub""" + + def __init__(self, sen2cor_command, out_mgrs, enable_topo_corr): + self.sen2cor_command = sen2cor_command + self.out_mgrs = out_mgrs + + def run(self, product): + # pylint: disable=unused-argument,missing-function-docstring + product_dir = os.path.join(tmp_dir_val["name"], "L2A_out") + os.mkdir(product_dir) + return product_dir + + +class DummyProduct(S2L_Product): + """Simple S2L_Product having DummyReader""" + + sensor = "S2A" + + def __init__(self, path, context): + super().__init__(path, context) + self.mtl = DummyReader(path) + + +# replace sen2cor client with its stub +sen2like.Sen2corClient = Sen2corClientStub + + +class TestProcessingContext(TestCase): + """ProcessingContext module test class""" + + def __init__(self, methodName): + super().__init__(methodName) + if not config.initialize(configuration_file): + raise Exception + + def process(self, is_l2a: bool = False) -> S2L_Product: + """run `pre_process_atmcor` until the call of `update_config`""" + + with TemporaryDirectory() as tem_dir: + tmp_dir_val["name"] = tem_dir + + # simulate a product + product_dir = os.path.join(tem_dir, "L2A_in" if is_l2a else "L1C_in") + os.mkdir(product_dir) + processing_context = ProcessingContext(config, "31TFJ") + product = DummyProduct(product_dir, processing_context) + + # sen2like product process reconfiguration + product = sen2like.pre_process_atmcor(product, "31TFJ") + + return product + + def test_a(self): + """process S2 L1C""" + config.overload( + { + "doAtmcor": "True", + "doTopographicCorrection": "False", + "use_sen2cor": "True", + "sen2cor_topographic_correction": "True", + } + ) + product: S2L_Product = self.process() + + self.assertFalse( + product.context.doTopographicCorrection, + "TopographicCorrection proc block must be disabled", + ) + self.assertFalse(product.context.doAtmcor, "AtmCor proc block must be disabled") + self.assertFalse( + product.context.sen2cor_topographic_correction, + "sen2cor topo correction must be disabled", + ) + + def test_b(self): + """process S2 L1C""" + config.overload( + { + "doAtmcor": "True", + "doTopographicCorrection": "True", + "use_sen2cor": "True", + "sen2cor_topographic_correction": "True", + } + ) + product: S2L_Product = self.process() + + self.assertFalse( + product.context.doTopographicCorrection, + "TopographicCorrection proc block must be disabled", + ) + self.assertFalse(product.context.doAtmcor, "AtmCor proc block must be disabled") + self.assertTrue( + product.context.sen2cor_topographic_correction, + "sen2cor topo correction must be enabled", + ) + + def test_c(self): + """process S2 L1C""" + config.overload( + { + "doAtmcor": "True", + "doTopographicCorrection": "True", + "use_sen2cor": "True", + "sen2cor_topographic_correction": "False", + } + ) + product: S2L_Product = self.process() + + self.assertTrue( + product.context.doTopographicCorrection, + "TopographicCorrection proc block must be enabled", + ) + self.assertFalse(product.context.doAtmcor, "AtmCor proc block must be disabled") + self.assertFalse( + product.context.sen2cor_topographic_correction, + "sen2cor topo correction must be disabled", + ) + + def test_d(self): + """process S2 L1C""" + config.overload( + { + "doAtmcor": "True", + "doTopographicCorrection": "False", + "use_sen2cor": "False", + "sen2cor_topographic_correction": "False", + } + ) + product: S2L_Product = self.process() + + self.assertFalse( + product.context.doTopographicCorrection, + "TopographicCorrection proc block must be disabled", + ) + self.assertTrue(product.context.doAtmcor, "AtmCor proc block must be enabled") + self.assertFalse(product.context.use_sen2cor, "sen2cor topo correction must be disabled") + self.assertEqual(product.mtl.data_type, "LEVEL1C") + + def test_e(self): + """process S2 L1C""" + config.overload( + { + "doAtmcor": "True", + "doTopographicCorrection": "True", + "use_sen2cor": "False", + "sen2cor_topographic_correction": "False", + } + ) + product: S2L_Product = self.process() + + self.assertTrue( + product.context.doTopographicCorrection, + "TopographicCorrection proc block must be enabled", + ) + self.assertTrue(product.context.doAtmcor, "AtmCor proc block must be enabled") + self.assertFalse(product.context.use_sen2cor, "sen2cor topo correction must be disabled") + self.assertEqual(product.mtl.data_type, "LEVEL1C") + + def test_f(self): + """process S2 L2A""" + config.overload( + { + "doAtmcor": "True", + "doTopographicCorrection": "False", + "use_sen2cor": "True", + "sen2cor_topographic_correction": "True", + } + ) + config.set("s2_processing_level", "LEVEL2A") # -l2a arg + + product: S2L_Product = self.process(True) + self.assertFalse( + product.context.doTopographicCorrection, + "TopographicCorrection proc block must be disabled", + ) + self.assertFalse(product.context.doAtmcor, "AtmCor proc block must be disabled") + + def test_g(self): + """process S2 L2A""" + config.overload( + { + "doAtmcor": "False", + "doTopographicCorrection": "False", + "use_sen2cor": "True", + "sen2cor_topographic_correction": "True", + } + ) + config.set("s2_processing_level", "LEVEL2A") # -l2a arg + + product: S2L_Product = self.process(True) + self.assertFalse( + product.context.doTopographicCorrection, + "TopographicCorrection proc block must be disabled", + ) + self.assertFalse(product.context.doAtmcor, "AtmCor proc block must be disabled") + + def test_h(self): + """process S2 L2A""" + config.overload( + { + "doAtmcor": "True", + "doTopographicCorrection": "True", + "use_sen2cor": "True", + "sen2cor_topographic_correction": "True", + "s2_processing_level": "LEVEL2A", # -l2a arg + } + ) + config.set("s2_processing_level", "LEVEL2A") # -l2a arg + + product: S2L_Product = self.process(True) + + self.assertTrue( + product.context.doTopographicCorrection, + "TopographicCorrection proc block must be enabled", + ) + self.assertFalse(product.context.doAtmcor, "AtmCor proc block must be disabled") + + def test_i(self): + """process S2 L2A""" + config.overload( + { + "doAtmcor": "True", + "doTopographicCorrection": "False", + "use_sen2cor": "True", + "sen2cor_topographic_correction": "True", + "s2_processing_level": "LEVEL2A", # -l2a arg + } + ) + config.set("s2_processing_level", "LEVEL2A") # -l2a arg + + product: S2L_Product = self.process(True) + + self.assertFalse( + product.context.doTopographicCorrection, + "TopographicCorrection proc block must be disabled", + ) + self.assertFalse(product.context.doAtmcor, "AtmCor proc block must be disabled") diff --git a/sen2like/tests/core/sen2cor_L2A_GIPP_DEM_LS.xml b/sen2like/tests/core/sen2cor_L2A_GIPP_DEM_LS.xml new file mode 100644 index 0000000..ba9d497 --- /dev/null +++ b/sen2like/tests/core/sen2cor_L2A_GIPP_DEM_LS.xml @@ -0,0 +1,170 @@ + + + + INFO + + + 4 + 5 + 4000 + 4000 + + + + + + + + + https://doi.org/10.5270/S2_-znk9xsj + + AUTO + + NONE + + dem/CopernicusDEM90 + + https://prism-dem-open.copernicus.eu/pd-desk-open-access/prismDownload/COP-DEM_GLO-90-DGED__2022_1/ + + FALSE + + FALSE + + TRUE + + FALSE + + TRUE + + TRUE + + DEFAULT + + + S2_User_Product_Level-1C_Metadata + S2_User_Product_Level-2A_Metadata + S2_PDI_Level-1C_Tile_Metadata + S2_PDI_Level-2A_Tile_Metadata + S2_PDI_Level-1C_Datastrip_Metadata + S2_PDI_Level-2A_Datastrip_Metadata + + + S2_User_Product_Level-1C_Metadata + S2_User_Product_Level-2A_Metadata + S2_PDI_Level-1C_Tile_Metadata + S2_PDI_Level-2A_Tile_Metadata + S2_PDI_Level-1C_Datastrip_Metadata + S2_PDI_Level-2A_Datastrip_Metadata + + + S2_User_Product_Level-1C_Metadata + S2_User_Product_Level-2A_Metadata + S2_PDI_Level-1C_Tile_Metadata + S2_PDI_Level-2A_Tile_Metadata + S2_PDI_Level-1C_Datastrip_Metadata + S2_PDI_Level-2A_Datastrip_Metadata + + + S2_User_Product_Level-1C_Metadata + S2_User_Product_Level-2A_Metadata + S2_PDI_Level-1C_Tile_Metadata + S2_PDI_Level-2A_Tile_Metadata + S2_PDI_Level-1C_Datastrip_Metadata + S2_PDI_Level-2A_Datastrip_Metadata + + + S2_User_Product_Level-1C_Metadata + S2_User_Product_Level-2A_Metadata + S2_PDI_Level-1C_Tile_Metadata + S2_PDI_Level-2A_Tile_Metadata + S2_PDI_Level-1C_Datastrip_Metadata + S2_PDI_Level-2A_Datastrip_Metadata + + + S2_User_Product_Level-1C_Metadata + S2_User_Product_Level-2A_Metadata + S2_PDI_Level-1C_Tile_Metadata + S2_PDI_Level-2A_Tile_Metadata + S2_PDI_Level-1C_Datastrip_Metadata + S2_PDI_Level-2A_Datastrip_Metadata + + L2A_GIPP + L2A_CAL_SC_GIPP + L2A_CAL_AC_GIPP + L2A_PB_GIPP + L2A_QUALITY + + + + 0 + + + + + RURAL + + SUMMER + + 0 + + + + 1 + + 1 + + 1 + + FALSE + + TRUE + + 0 + + 0.22 + + + + 1.000 + + 40.0 + + 0.100 + + 100.0 + + 0.25 + + 0 + + + + \ No newline at end of file diff --git a/sen2like/tests/core/sen2cor_L2A_GIPP_DEM_OFF.xml b/sen2like/tests/core/sen2cor_L2A_GIPP_DEM_OFF.xml new file mode 100644 index 0000000..8b8168b --- /dev/null +++ b/sen2like/tests/core/sen2cor_L2A_GIPP_DEM_OFF.xml @@ -0,0 +1,170 @@ + + + + INFO + + + OFF + OFF + 4000 + 4000 + + + + + + + + + https://doi.org/10.5270/S2_-znk9xsj + + AUTO + + NONE + + dem/CopernicusDEM90 + + https://prism-dem-open.copernicus.eu/pd-desk-open-access/prismDownload/COP-DEM_GLO-90-DGED__2022_1/ + + FALSE + + FALSE + + TRUE + + FALSE + + TRUE + + TRUE + + DEFAULT + + + S2_User_Product_Level-1C_Metadata + S2_User_Product_Level-2A_Metadata + S2_PDI_Level-1C_Tile_Metadata + S2_PDI_Level-2A_Tile_Metadata + S2_PDI_Level-1C_Datastrip_Metadata + S2_PDI_Level-2A_Datastrip_Metadata + + + S2_User_Product_Level-1C_Metadata + S2_User_Product_Level-2A_Metadata + S2_PDI_Level-1C_Tile_Metadata + S2_PDI_Level-2A_Tile_Metadata + S2_PDI_Level-1C_Datastrip_Metadata + S2_PDI_Level-2A_Datastrip_Metadata + + + S2_User_Product_Level-1C_Metadata + S2_User_Product_Level-2A_Metadata + S2_PDI_Level-1C_Tile_Metadata + S2_PDI_Level-2A_Tile_Metadata + S2_PDI_Level-1C_Datastrip_Metadata + S2_PDI_Level-2A_Datastrip_Metadata + + + S2_User_Product_Level-1C_Metadata + S2_User_Product_Level-2A_Metadata + S2_PDI_Level-1C_Tile_Metadata + S2_PDI_Level-2A_Tile_Metadata + S2_PDI_Level-1C_Datastrip_Metadata + S2_PDI_Level-2A_Datastrip_Metadata + + + S2_User_Product_Level-1C_Metadata + S2_User_Product_Level-2A_Metadata + S2_PDI_Level-1C_Tile_Metadata + S2_PDI_Level-2A_Tile_Metadata + S2_PDI_Level-1C_Datastrip_Metadata + S2_PDI_Level-2A_Datastrip_Metadata + + + S2_User_Product_Level-1C_Metadata + S2_User_Product_Level-2A_Metadata + S2_PDI_Level-1C_Tile_Metadata + S2_PDI_Level-2A_Tile_Metadata + S2_PDI_Level-1C_Datastrip_Metadata + S2_PDI_Level-2A_Datastrip_Metadata + + L2A_GIPP + L2A_CAL_SC_GIPP + L2A_CAL_AC_GIPP + L2A_PB_GIPP + L2A_QUALITY + + + + 0 + + + + + RURAL + + SUMMER + + 0 + + + + 1 + + 1 + + 1 + + FALSE + + FALSE + + 0 + + 0.22 + + + + 1.000 + + 40.0 + + 0.100 + + 100.0 + + 0.25 + + 0 + + + + \ No newline at end of file diff --git a/sen2like/tests/core/sen2cor_L2A_GIPP_DEM_ON.xml b/sen2like/tests/core/sen2cor_L2A_GIPP_DEM_ON.xml new file mode 100644 index 0000000..b9c0e90 --- /dev/null +++ b/sen2like/tests/core/sen2cor_L2A_GIPP_DEM_ON.xml @@ -0,0 +1,170 @@ + + + + INFO + + + OFF + OFF + 4000 + 4000 + + + + + + + + + https://doi.org/10.5270/S2_-znk9xsj + + AUTO + + NONE + + dem/CopernicusDEM90 + + https://prism-dem-open.copernicus.eu/pd-desk-open-access/prismDownload/COP-DEM_GLO-90-DGED__2022_1/ + + FALSE + + FALSE + + TRUE + + FALSE + + TRUE + + TRUE + + DEFAULT + + + S2_User_Product_Level-1C_Metadata + S2_User_Product_Level-2A_Metadata + S2_PDI_Level-1C_Tile_Metadata + S2_PDI_Level-2A_Tile_Metadata + S2_PDI_Level-1C_Datastrip_Metadata + S2_PDI_Level-2A_Datastrip_Metadata + + + S2_User_Product_Level-1C_Metadata + S2_User_Product_Level-2A_Metadata + S2_PDI_Level-1C_Tile_Metadata + S2_PDI_Level-2A_Tile_Metadata + S2_PDI_Level-1C_Datastrip_Metadata + S2_PDI_Level-2A_Datastrip_Metadata + + + S2_User_Product_Level-1C_Metadata + S2_User_Product_Level-2A_Metadata + S2_PDI_Level-1C_Tile_Metadata + S2_PDI_Level-2A_Tile_Metadata + S2_PDI_Level-1C_Datastrip_Metadata + S2_PDI_Level-2A_Datastrip_Metadata + + + S2_User_Product_Level-1C_Metadata + S2_User_Product_Level-2A_Metadata + S2_PDI_Level-1C_Tile_Metadata + S2_PDI_Level-2A_Tile_Metadata + S2_PDI_Level-1C_Datastrip_Metadata + S2_PDI_Level-2A_Datastrip_Metadata + + + S2_User_Product_Level-1C_Metadata + S2_User_Product_Level-2A_Metadata + S2_PDI_Level-1C_Tile_Metadata + S2_PDI_Level-2A_Tile_Metadata + S2_PDI_Level-1C_Datastrip_Metadata + S2_PDI_Level-2A_Datastrip_Metadata + + + S2_User_Product_Level-1C_Metadata + S2_User_Product_Level-2A_Metadata + S2_PDI_Level-1C_Tile_Metadata + S2_PDI_Level-2A_Tile_Metadata + S2_PDI_Level-1C_Datastrip_Metadata + S2_PDI_Level-2A_Datastrip_Metadata + + L2A_GIPP + L2A_CAL_SC_GIPP + L2A_CAL_AC_GIPP + L2A_PB_GIPP + L2A_QUALITY + + + + 0 + + + + + RURAL + + SUMMER + + 0 + + + + 1 + + 1 + + 1 + + FALSE + + TRUE + + 0 + + 0.22 + + + + 1.000 + + 40.0 + + 0.100 + + 100.0 + + 0.25 + + 0 + + + + \ No newline at end of file diff --git a/sen2like/tests/core/test_sen2cor_client.py b/sen2like/tests/core/test_sen2cor_client.py new file mode 100644 index 0000000..8c882c5 --- /dev/null +++ b/sen2like/tests/core/test_sen2cor_client.py @@ -0,0 +1,126 @@ +import difflib +import filecmp +import os +from tempfile import TemporaryDirectory +from unittest import TestCase + +from core.products.product import ProcessingContext, S2L_Product +from core.readers.reader import BaseReader +from core.S2L_config import config +from core.sen2cor_client import sen2cor_client +from core.sen2cor_client.sen2cor_client import Sen2corClient + +test_folder_path = os.path.dirname(__file__) +configuration_file = os.path.join(test_folder_path, "config.ini") + + +def pixel_center(ref_band_file, mgrs): + return 4, 5 + +# stub pixel center +sen2cor_client.pixel_center = pixel_center + + +def show_diff(tested, reference): + with open(tested) as file_1: + file_1_text = file_1.readlines() + + with open(reference) as file_2: + file_2_text = file_2.readlines() + + # Find and print the diff: + for line in difflib.unified_diff( + file_1_text, file_2_text, fromfile="tested", tofile="reference", lineterm="" + ): + print(line) + + +class DummyProduct(S2L_Product): + """Simple S2L_Product having DummyReader""" + + sensor = "S2A" + + def __init__(self, path, context): + super().__init__(path, context) + self.mtl = DummyReader(path) + + +class DummyReader(BaseReader): + """Simple Reader impl to configure some properties used during processing""" + + def __init__(self, path): + super().__init__(path) + self.cloud_cover = "0.0" + self.processing_sw = None + if "L1C" in path: + self.data_type = "LEVEL1C" + else: + self.data_type = "L2A" + + if "LS" in path: + self.mission = "LANDSAT_8" + + @staticmethod + def can_read(product_name): + return True + + +class TestSen2corClient(TestCase): + """ProcessingContext module test class""" + + def __init__(self, methodName): + super().__init__(methodName) + if not config.initialize(configuration_file): + raise Exception + + def test_dem_config(self): + # simulate a product + with TemporaryDirectory() as tem_dir: + config.set("wd", tem_dir) # set it for sen2cor + os.makedirs(os.path.join(tem_dir, "sen2cor", "L1C_in")) + + product_dir = os.path.join(tem_dir, "L1C_in") + os.mkdir(product_dir) + processing_context = ProcessingContext(config, "31TFJ") + product = DummyProduct(product_dir, processing_context) + + s2c = Sen2corClient("dummy_command", "31TFJ", True) + # pylint: disable=protected-access This what we want to test + gipp_path = s2c._write_gipp(product) + reference = os.path.join(test_folder_path, "sen2cor_L2A_GIPP_DEM_ON.xml") + try: + self.assertTrue(filecmp.cmp(reference, gipp_path), "Files does not match") + except AssertionError: + show_diff(gipp_path, reference) + raise + + s2c = Sen2corClient("dummy_command", "31TFJ", False) + # pylint: disable=protected-access This what we want to test + gipp_path = s2c._write_gipp(product) + reference = os.path.join(test_folder_path, "sen2cor_L2A_GIPP_DEM_OFF.xml") + try: + self.assertTrue(filecmp.cmp(reference, gipp_path), "Files does not match") + except AssertionError: + show_diff(gipp_path, reference) + raise + + def test_landsat_config(self): + # simulate a product + with TemporaryDirectory() as tem_dir: + config.set("wd", tem_dir) # set it for sen2cor + os.makedirs(os.path.join(tem_dir, "sen2cor", "L1C_LS_in")) + + product_dir = os.path.join(tem_dir, "L1C_LS_in") + os.mkdir(product_dir) + processing_context = ProcessingContext(config, "31TFJ") + product = DummyProduct(product_dir, processing_context) + + s2c = Sen2corClient("dummy_command", "31TFJ", True) + # pylint: disable=protected-access This what we want to test + gipp_path = s2c._write_gipp(product) + reference = os.path.join(test_folder_path, "sen2cor_L2A_GIPP_DEM_LS.xml") + try: + self.assertTrue(filecmp.cmp(reference, gipp_path), "Files does not match") + except AssertionError: + show_diff(gipp_path, reference) + raise diff --git a/sen2like/tests/s2l_processes/config.ini b/sen2like/tests/s2l_processes/config.ini index db22380..e181a36 100644 --- a/sen2like/tests/s2l_processes/config.ini +++ b/sen2like/tests/s2l_processes/config.ini @@ -7,6 +7,7 @@ doInterCalibration = True doAtmcor = True doNbar = True doSbaf = True +doTopographicCorrection = True doFusion = True doPackagerL2H = True doPackagerL2F = True @@ -41,12 +42,25 @@ doMatchingCorrection = True use_sen2cor = True sen2cor_path = ../sen2cor/process.py +[Sbaf] +adaptative = True + [fusion] # predict_method: predict or composite (most recent valid pixels) predict_method = predict predict_nb_products = 2 [Stitching] +same_utm_only = True + +[TopographicCorrection] +topographic_correction_limiter = 4.0 + +[DEMRepository] +# Expect to get DEM from {dem_folder}/{dem_dataset}/Copernicus_DSM_{src_dem_resolution}m_{tile_code}.TIF +dem_folder = /data/AUX_DATA/ +dem_dataset = COP-DEM_GLO-90-DGED__2022_1 +src_dem_resolution = 90 [OutputFormat] gain = 10000 diff --git a/sen2like/tests/s2l_processes/test_S2L_InterCalibration.py b/sen2like/tests/s2l_processes/test_S2L_InterCalibration.py index c74b80e..89cfad3 100644 --- a/sen2like/tests/s2l_processes/test_S2L_InterCalibration.py +++ b/sen2like/tests/s2l_processes/test_S2L_InterCalibration.py @@ -35,10 +35,10 @@ def test_S2B_band01(self): _product_path, "GRANULE/L1C_T31TFJ_A003609_20171114T104257/IMG_DATA/T31TFJ_20171114T104259_B01.jp2")) # MUST RUN TOA before inter calibration - block = S2L_Toa() + block = S2L_Toa(False) image = block.process(product, image, "B01") - block = S2L_InterCalibration() + block = S2L_InterCalibration(False) result_image = block.process(product, image, "B01") self.assertNotEqual(image.filepath, result_image.filepath, @@ -55,10 +55,10 @@ def test_S2B_band09(self): _product_path, "GRANULE/L1C_T31TFJ_A003609_20171114T104257/IMG_DATA/T31TFJ_20171114T104259_B09.jp2")) # MUST RUN TOA before inter calibration - block = S2L_Toa() + block = S2L_Toa(False) image = block.process(product, image, "B09") - block = S2L_InterCalibration() + block = S2L_InterCalibration(False) result_image = block.process(product, image, "B09") self.assertEqual(image.filepath, result_image.filepath, "Result image should be the same") @@ -74,10 +74,10 @@ def test_landsat(self): _product_path, "LC81960292017318MTI00_B3.TIF")) # MUST RUN TOA before inter calibration - block = S2L_Toa() + block = S2L_Toa(False) image = block.process(product, image, "B3") - block = S2L_InterCalibration() + block = S2L_InterCalibration(False) result_image = block.process(product, image, "B3") self.assertEqual(image.filepath, result_image.filepath, "Result image should be the same") diff --git a/sen2like/tests/s2l_processes/test_S2L_Nbar.py b/sen2like/tests/s2l_processes/test_S2L_Nbar.py index aa70e9f..825860f 100644 --- a/sen2like/tests/s2l_processes/test_S2L_Nbar.py +++ b/sen2like/tests/s2l_processes/test_S2L_Nbar.py @@ -35,7 +35,7 @@ def test_select_vr_file(self): product = Sentinel2Product(_product_path, context) # deliberately set false aux data folder (test_folder_path) # to not have error during object init - vjb = VJBMatriceBRDFCoefficient(product, None, "B04", test_folder_path) + vjb = VJBMatriceBRDFCoefficient(product, None, "B04", test_folder_path, False) # pylint: disable=protected-access assert vjb._select_vr_file(aux_data_dir) == os.path.join( diff --git a/sen2like/tests/s2l_processes/test_s2l_processes.py b/sen2like/tests/s2l_processes/test_s2l_processes.py new file mode 100644 index 0000000..5c40375 --- /dev/null +++ b/sen2like/tests/s2l_processes/test_s2l_processes.py @@ -0,0 +1,29 @@ +"""S2L processes module tests""" +import os +from unittest import TestCase + +from core.S2L_config import config +from s2l_processes import create_process_block +from s2l_processes.S2L_Sbaf import S2L_Sbaf +from s2l_processes.S2L_Toa import S2L_Toa +from s2l_processes.S2L_TopographicCorrection import S2L_TopographicCorrection + +test_folder_path = os.path.dirname(__file__) +configuration_file = os.path.join(test_folder_path, "config.ini") + + +class TestS2LProcesses(TestCase): + def __init__(self, methodName): + super().__init__(methodName) + if not config.initialize(configuration_file): + raise Exception + config.set("generate_intermediate_products", False) + + def test_create(self): + assert isinstance( + create_process_block("S2L_TopographicCorrection"), S2L_TopographicCorrection + ) + assert isinstance(create_process_block("S2L_Toa"), S2L_Toa) + assert isinstance(create_process_block("S2L_Sbaf"), S2L_Sbaf) + + self.assertRaises(RuntimeError, create_process_block, "bad module name")