diff --git a/README.md b/README.md index eecb7ce..73d1dd9 100644 --- a/README.md +++ b/README.md @@ -9,7 +9,7 @@ The main goal of __Sen2Like__ is to generate Sentinel-2 like harmonised/fused su It is a contribution to on going worldwide initiatives (*NASA-HLS, Force, CESBIO [2],[3]) undertook to facilitate higher level processing starting from harmonized data. -The __Sen2Like__ framework is a scientific and open source software. In its current implementation version (*November 2020*), it combines Landsat-8 and Sentinel-2 data products. +The __Sen2Like__ framework is a scientific and open source software. In its current implementation version (*December 2022*), it combines Landsat-8 and Sentinel-2 data products. Level 1 and Level 2 input Landsat 8 (LS8) products are processed to be harmonized with Sentinel-2 data (S2). The two following ARD product types are generated: * Harmonized Surface Reflectance Products (Level 2H) - at 30m of resolution, @@ -36,7 +36,7 @@ It is therefore possible, to cover large geographic extent with a __seamless ima It is worth noting that the overall accuracy of your final ARD product strongly depends on the accuracy of sen2like auxiliary data. Two categories of auxiliary data are important: the raster reference for geometric corrections and the meteorological data for atmospheric corrections. Regarding atmospheric corrections, one possibility is to use data from the Copernicus Atmosphere Monitoring Service [9]. The Sen2Like team prepared a dedicated CAMS monthly dataset for the Year 2020, available from [here](http://185.178.85.51/CAMS/). Please refer to this short [description](http://185.178.85.51/CAMS/Readme_CAMS2020.txt) for additional information. For further details on the format specification of the harmonized products or the functionalities of the Sen2Like software, please -refer to the [Product Format Specification](https://github.com/senbox-org/sen2like/blob/master/sen2like/docs/source/S2-PDGS-MPC-L2HF-PFS-v1.0.pdf), and the [User Manual v1.5](https://github.com/senbox-org/sen2like/blob/master/sen2like/docs/source/S2-SEN2LIKE-UM-V1.5.pdf). +refer to the [Product Format Specification](https://github.com/senbox-org/sen2like/blob/master/sen2like/docs/source/S2-PDGS-MPC-L2HF-PFS-v1.1.pdf), and the [User Manual v1.6](https://github.com/senbox-org/sen2like/blob/master/sen2like/docs/source/S2-SEN2LIKE-UM-V1.6.pdf). ## Publications and Contacts **Yearning to know more ? Check out** @@ -57,7 +57,7 @@ And the following research papers : -**Learn how to use Sen2Like**, have a look at the [User Manual](https://github.com/senbox-org/sen2like/blob/master/sen2like/docs/source/S2-SEN2LIKE-UM-V1.5.pdf). +**Learn how to use Sen2Like**, have a look at the [User Manual](https://github.com/senbox-org/sen2like/blob/master/sen2like/docs/source/S2-SEN2LIKE-UM-V1.6.pdf). **Get help**, contact us at sen2like@telespazio.com. diff --git a/sen2like/.dockerignore b/sen2like/.dockerignore new file mode 100644 index 0000000..92d502b --- /dev/null +++ b/sen2like/.dockerignore @@ -0,0 +1,5 @@ +# exclude all +* +# but not what we want to send in the build context +!requirements.txt +!sen2like diff --git a/sen2like/.gitignore b/sen2like/.gitignore index b5181d2..dd0a289 100644 --- a/sen2like/.gitignore +++ b/sen2like/.gitignore @@ -8,4 +8,5 @@ docs/source/modules /docs/coverage/ /doc-generated.txt dist/ -sen2cor/ \ No newline at end of file +sen2cor/ +**/delivery/ diff --git a/sen2like/Dockerfile b/sen2like/Dockerfile index def113c..f647896 100644 --- a/sen2like/Dockerfile +++ b/sen2like/Dockerfile @@ -1,8 +1,6 @@ # docker image baser on miniconda image (debian:latest) -FROM continuumio/miniconda3 - -# install in curent docker image mesa-glx -RUN apt-get update && apt-get install -y libgl1-mesa-glx +FROM continuumio/miniconda3 AS build +LABEL stage=sen2like_build # set the working dir to /usr/local/sen2like WORKDIR /usr/local/sen2like @@ -13,14 +11,39 @@ COPY ./requirements.txt . # create sen2like env from requirement RUN conda create -n sen2like --file requirements.txt -c conda-forge +# Install conda-pack: +RUN conda install -c conda-forge conda-pack + +# Use conda-pack to create a standalone enviornment +# in /venv: +RUN conda-pack -n sen2like -o /tmp/env.tar && \ + mkdir /venv && cd /venv && tar xf /tmp/env.tar && \ + rm /tmp/env.tar + +# We've put venv in same path it'll be in final image, +# so now fix up paths: +RUN /venv/bin/conda-unpack + +# FINAL IMAGE +FROM debian:bullseye-slim AS runtime + +# Copy /venv from the previous stage: +COPY --from=build /venv /venv + +# install in curent docker image mesa-glx +RUN apt-get update && apt-get install -y libgl1-mesa-glx + +# set PATH with venv +ENV VIRTUAL_ENV=/venv +ENV PATH="$VIRTUAL_ENV/bin:$PATH" + +# set the working dir to /usr/local/sen2like +WORKDIR /usr/local/sen2like + # copy script code to run when container is started: COPY ./sen2like . -# set sne2like.py executable +# set sen2like.py executable RUN chmod +x /usr/local/sen2like/sen2like.py -# initialise conda for all shells -RUN conda init bash - -# force activation of sen2like env on bash -RUN echo "conda activate sen2like" >> ~/.bashrc +ENTRYPOINT ["./sen2like.py"] diff --git a/sen2like/README.md b/sen2like/README.md index 3f967f3..63650fd 100644 --- a/sen2like/README.md +++ b/sen2like/README.md @@ -1,178 +1,248 @@ -# Sen2Like - -## Local install - -### check installation of tools for install - -`sudo apt-get install curl git` - -#### Retrieve sources of Sen2Like code - -* Using git (restricted to telespazio): - -`git clone git@gitlab.telespazio.fr:SEN2LIKE/poleeo.git` - -* Or from a downloaded archive: - -`unzip sen2like.zip` - -`cd sen2like` - -### Installation of Anaconda or Miniconda - -* Installing Anaconda: - -`curl https://repo.anaconda.com/archive/Anaconda3-2020.02-Linux-x86_64.sh --output Anaconda3-2020.02-Linux-x86_64.sh` - -`chmod +x Anaconda3-2020.02-Linux-x86_64.sh` - -`./Anaconda3-2020.02-Linux-x86_64.sh` - -* or Miniconda: - -`curl https://repo.anaconda.com/miniconda/Miniconda3-py37_4.8.2-Linux-x86_64.sh --output Miniconda3-py37_4.8.2-Linux-x86_64.sh` - -`chmod +x Miniconda3-py37_4.8.2-Linux-x86_64.sh` - -`./Miniconda3-py37_4.8.2-Linux-x86_64.sh` - -### Create a conda virtual environment with required packages - -`conda create -n sen2like --file requirements.txt -c conda-forge` +# Sen2Like + +## Table of content + +TOC Generated with markdown all in one: https://github.com/yzhang-gh/vscode-markdown + +- [Retrieve sources of sen2like](#retrieve-sources-of-sen2like) +- [Local install](#local-install) + - [Prerequisite](#prerequisite) + - [libGL](#libgl) + - [Conda](#conda) +- [Docker](#docker) + - [Prerequisite](#prerequisite-1) + - [Build sen2like docker image](#build-sen2like-docker-image) + - [Docker store in repository](#docker-store-in-repository) +- [Running the tool](#running-the-tool) + - [Local install](#local-install-1) + - [Docker](#docker-1) + - [sen2like usage](#sen2like-usage) + - [Configuration file](#configuration-file) + - [Processing](#processing) + - [Directories](#directories) + - [Downloader](#downloader) + - [Local](#local) + - [Creodias API](#creodias-api) + - [DemDownloader](#demdownloader) + - [Geometry](#geometry) + - [Atmcor](#atmcor) + - [Nbar](#nbar) + - [Fusion](#fusion) + - [Stitching](#stitching) + - [OutputFormat](#outputformat) + - [COGoptions](#cogoptions) + - [JPEG2000options](#jpeg2000options) + - [Multiprocessing](#multiprocessing) + - [Packager](#packager) + - [Runtime](#runtime) + - [Command line arguments](#command-line-arguments) + - [Product mode](#product-mode) + - [Single tile mode](#single-tile-mode) + - [Multi tile mode](#multi-tile-mode) + - [ROI based mode](#roi-based-mode) +- [Release notes](#release-notes) +- [License](#license) + +## Retrieve sources of sen2like + +* Using git : + +```bash +git clone https://github.com/senbox-org/sen2like.git +``` -### Activate conda virtual environment +* Or from a [downloaded archive](https://github.com/senbox-org/sen2like/archive/refs/heads/master.zip): -`conda activate sen2like` +```bash +unzip sen2like.zip +``` -### Installation of dependencies +* Enter sen2like root source folder: -`sudo apt-get install mesa-libGL` +```bash +cd sen2like/sen2like +``` -## Docker creation +## Local install -### Docker environement install +### Prerequisite -#### On Ubuntu +#### libGL -* Docker install method availlable for ubuntu when writing this file "2020-09" check for current date and os +You will need `libGL` for linux, depending on your distribution it can be `libgl1-mesa-glx`, `mesa-libGL` or another. Install it if you don't have it yet. -`sudo apt-get remove docker docker-engine docker.io containerd runc` +#### Conda -`sudo apt-get install apt-transport-https ca-certificates curl software-properties-common` +sen2like needs a [conda](https://docs.conda.io/en/latest/) env to work. -`curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add -` +We recommend to use miniconda. -`sudo add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable` +To install miniconda, please refer to the miniconda documentation : https://docs.conda.io/en/latest/miniconda.html# -`sudo apt-get update` +**Please note that you MUST use an installer compatible with your current python version** -`sudo apt-get install docker-ce` +For example: if you have python 3.8 on Linux x86_64, choose the Python 3.8 Miniconda3 Linux 64-bit installer as illustrated bellow : -`sudo usermod -aG docker ${USER}` +![miniconda](docs/resources/miniconda_version.png) -log out and log in to get changes applied +* Create a conda virtual environment with required packages -#### On Centos 7 +Once you retrieved the code, go into `sen2like` root source folder and run the following command to create a conda env named `sen2like`: -`sudo yum install -y yum-utils` +```bash +conda create -n sen2like --file requirements.txt -c conda-forge +``` -`sudo yum-config-manager \ ---add-repo \ -https://download.docker.com/linux/centos/docker-ce.repo` +* Activate conda virtual environment -`sudo yum install docker-ce docker-ce-cli containerd.io` +```bash +conda activate sen2like +``` -`sudo usermod -aG docker ${USER}` +* Verify your sen2like installation with `python sen2like.py`, that should display sen2like CLI usage: -log out and log in to get changes applied +```bash +python sen2like.py +[INFO ] 2022-10-23 06:53:18 - sen2like - Run Sen2like 4.1.0 +usage: sen2like.py [-h] [--version] [--refImage PATH] [--wd PATH] + [--conf PATH] [--confParams STRLIST] [--bands STRLIST] + [--no-run] [--intermediate-products] [--parallelize-bands] + [--debug] [--no-log-date] + {product-mode,single-tile-mode,multi-tile-mode,roi-based-mode} + ... +.... +``` -### Retrieve sources of Sen2Like code +*Note: output is deliberately truncated* -* Using git (restricted to telespazio): +## Docker -`git clone https://gitlab.telespazio.fr/SEN2LIKE/poleeo.git` +### Prerequisite -* Or from a downloaded archive: +You need docker engine in order to build and execute sen2like with docker. -`unzip sen2like.zip` +Please refer to the docker documentation to install docker on your environnement : https://docs.docker.com/engine/install/ -`cd sen2like` +### Build sen2like docker image -### Docker build +From the sen2like root directory (the one containing `Dockerfile`) -Build docker image from Dockerfile: +```bash +docker build -t sen2like . && docker image prune --filter label=stage=sen2like_build -f +``` -`cd ./poleeo/HLS-project` +The result is a docker image with tag `sen2like:latest` -`docker build -t sen2like .` +*Note: In the previous command `docker image prune` instruction remove intermediate docker image created for the build.* ### Docker store in repository -Tag the image so that is points to registry +You might want to store the builded docker image on your internal docker registry. -`docker image tag ` +for this purpose, tag the image so that is points to your registry -sample +```bash +docker tag --help -`docker image tag sen2like https://tpzf-ssa-docker-registry.telespazio.fr` +Usage: docker tag SOURCE_IMAGE[:TAG] TARGET_IMAGE[:TAG] -Push the image on registry +Create a tag TARGET_IMAGE that refers to SOURCE_IMAGE +``` -`docker push ` +Example -sample +```bash +docker image tag sen2like my-internal-docker-registry-url/sen2like:4.1 +``` -`docker push https://tpzf-ssa-docker-registry.telespazio.fr/sen2like` +Push the image on a registry with the command `docker push NAME[:TAG]` -reminder to allow access to docker registry https://tpzf-ssa-docker-registry.telespazio.fr you should +Example -* have an account on the registry -* update or add /etc/docker/daemon.json with { "insecure-registries" : ["https://tpz-ssa-docker-registry.telespazio.fr"] - } -* restart docker daemon `systemctl restart docker` +```bash +docker push my-internal-docker-registry-url/sen2like:4.1 +``` ## Running the tool -### running on local install +### Local install After install. Python script sen2like.py could be found in cloned git repository, or unzipped folder. -For exemple if git cloned in home directory: +For example if git cloned in home directory: + +```bash +/opt/anaconda3/bin/python "$HOME/sen2like/sen2like/sen2like.py" single-tile-mode 31TFJ --conf "./config.ini" --start-date 2017-10-30 --end-date 2017-10-31 --wd "/data/production" --refImage "/data/References/GRI/S2A_OPER_MSI_L1C_TL_MPS__20161018T120000_A000008_T31TFJ_N01.01/IMG_DATA/S2A_OPER_MSI_L1C_TL_MPS__20161018T120000_A000008_T31TFJ_B04.jp2" --bands B04 +``` + +### Docker + +Build sen2like docker image or pull it from a registry with the command `docker pull NAME[:TAG]` -`/opt/anaconda3/bin/python "$HOME/poleeo/HLS-project/sen2like/sen2like.py" single-tile-mode 31TFJ --conf "./config.ini" --start-date 2017-10-30 --end-date 2017-10-31 --wd "/data/production" --refImage "/data/References/GRI/S2A_OPER_MSI_L1C_TL_MPS__20161018T120000_A000008_T31TFJ_N01.01/IMG_DATA/S2A_OPER_MSI_L1C_TL_MPS__20161018T120000_A000008_T31TFJ_B04.jp2" --bands B04` +Example : -### running in docker +```bash +docker pull https://my-internal-docker-registry-url/sen2like:4.1 +``` -After pulling the docket from registry +You can run it directly without entering into the container: -`docker pull ` +```bash +docker run --rm my-internal-docker-registry-url/sen2like/sen2like:4.1 -sample +[INFO ] 2022-10-23 04:37:47 - sen2like - Run Sen2like 4.1.0 +usage: sen2like.py [-h] [--version] [--refImage PATH] [--wd PATH] + [--conf PATH] [--confParams STRLIST] [--bands STRLIST] + [--no-run] [--intermediate-products] [--parallelize-bands] + [--debug] [--no-log-date] + {product-mode,single-tile-mode,multi-tile-mode,roi-based-mode} + ... -`docker pull https://tpz-ssa-docker-registry.telespazio.fr/sen2like` +positional arguments: +.... +``` -reminder to allow access to docker registry https://tpz-ssa-docker-registry.telespazio.fr you should +In the following examples **local** folder `/data` is supposed to exist and contains : +- sen2like config file `/data/config.ini` +- a folder for working `/data/production` +- the reference image `/data/References/GRI/S2A_OPER_MSI_L1C_TL_MPS__20161018T120000_A000008_T31TFJ_N01.01/IMG_DATA/S2A_OPER_MSI_L1C_TL_MPS__20161018T120000_A000008_T31TFJ_B04.jp2` + + +```bash +docker run --rm \ + --mount type=bind,source="/data",target=/data \ + my-internal-docker-registry-url/sen2like/sen2like:4.1 \ + single-tile-mode 31TFJ \ + --conf "/data/config.ini" \ + --start-date 2017-10-30 --end-date 2017-10-31 \ + --wd "/data/production" \ + --refImage "/data/References/GRI/S2A_OPER_MSI_L1C_TL_MPS__20161018T120000_A000008_T31TFJ_N01.01/IMG_DATA/S2A_OPER_MSI_L1C_TL_MPS__20161018T120000_A000008_T31TFJ_B04.jp2" \ + --bands B04 +``` -* have an account on the registry -* update or add /etc/docker/daemon.json with { "insecure-registries" : ["https://tpz-ssa-docker-registry.telespazio.fr"] - } -* restart docker daemon `systemctl restart docker` +Python script `sen2like.py` could be accessed from a docker container. -Python script sen2like.py could be accessed from docker. +Launch the docker binding **local** `/data` folder to the container `/data` folder, example: -* remark in this sample **local** folder `/data` is supposed to exist and contain sen2like config - file `/data/config.ini` a folder for working `/data/production` and the reference - image `/data/References/GRI/S2A_OPER_MSI_L1C_TL_MPS__20161018T120000_A000008_T31TFJ_N01.01/IMG_DATA/S2A_OPER_MSI_L1C_TL_MPS__20161018T120000_A000008_T31TFJ_B04.jp2` +```bash +docker run --rm -it --mount type=bind,source="/data",target=/data --entrypoint=/bin/bash my-internal-docker-registry-url/sen2like/sen2like:4.1 -Launch the docker binding local /data folder to docker internal /data folder +root@15a2f44ddd70:/usr/local/sen2like -`docker run -it --mount type=bind,source="/data",target=/data tpzf-ssa-docker-registry.telespazio.fr/sen2like/sen2like:3.0` +``` -In prompt activate sen2like env and execute ./sen2like.py +In prompt execute ./sen2like.py as follow: -`python ./sen2like.py single-tile-mode 31TFJ --conf "/data/config.ini" --start-date 2017-10-30 --end-date 2017-10-31 --wd "/data/production" --refImage "/data/References/GRI/S2A_OPER_MSI_L1C_TL_MPS__20161018T120000_A000008_T31TFJ_N01.01/IMG_DATA/S2A_OPER_MSI_L1C_TL_MPS__20161018T120000_A000008_T31TFJ_B04.jp2" --bands B04` +```bash +./sen2like.py single-tile-mode 31TFJ \ + --conf "/data/config.ini" \ + --start-date 2017-10-30 --end-date 2017-10-31 \ + --wd "/data/production" \ + --refImage "/data/References/GRI/S2A_OPER_MSI_L1C_TL_MPS__20161018T120000_A000008_T31TFJ_N01.01/IMG_DATA/S2A_OPER_MSI_L1C_TL_MPS__20161018T120000_A000008_T31TFJ_B04.jp2" \ + --bands B04 +``` ### sen2like usage @@ -180,9 +250,8 @@ Sen2like can be run in three different modes: * `product-mode`: Run the tool on a singe product * `single-tile-mode`: Run the tool on a MGRS tile. Corresponding products will be loaded. -* `multi-tile-mode`: Run the tool on a ROI defined in a geojson. Corresponding MGRS tile will be inferred and products - will be loaded. It is equivalent to run a single-tile mode for each matching tile. In multi-tile mode, multiprocessing - can be used to speed-up computation time. +* `multi-tile-mode`: Run the tool on a ROI defined in a geojson. Corresponding MGRS tile will be inferred and products will be loaded. It is equivalent to run a single-tile mode for each matching tile. In multi-tile mode, multiprocessing can be used to speed-up computation time. +* `roi-based-mode`: Run the tool on a ROI defined in a geojson. The configuration of the tool is done by command-line arguments and by a configuration file. A default configuration file is provided in `conf/config.ini`. @@ -203,26 +272,26 @@ Enable or disable a processing block based on value `(True, False)`: * `doStitching`: Run the stitching processing * `doGeometryKLT`: Run the geometric correction processing using KLT * `doToa`: Run the TOA correction -* `doInterCalibration`: Run the Inter Calibration correction -* `doAtmcor`: Run the Atmospheric correction +* `doInterCalibration`: Run the Inter Calibration correction (S2B) +* `doAtmcor`: Run the Atmospheric correction (SMAC or Sen2Cor) * `doNbar`: Run Nbar correction processing * `doSbaf`: Run the Sbaf correction processing * `doFusion`: Run the Fusion processing * `doPackager`: Run the packaging processing (legacy) * `doPackagerL2H`: Run the packaging processing for harmonized products -* `doPackagerL2F`: Run the packaging processing +* `doPackagerL2F`: Run the packaging processing for fused products #### Directories Indicates path for special directories: -* `archive_dir`: Where to store resulting products -* `cams_dir`: Where are located CAMS files -* `cams_daily_dir`: Where are located CAMS daily files -* `cams_hourly_dir`: Where are located CAMS hourly files -* `cams_climatology_dir`: Where are located CAMS climatology files -* `dem_dir`: Where are located DEM files -* `scl_dir`: Where are located scl maps files +* `archive_dir`: Where the L2H and L2F output products are generated +* `cams_dir`: Where the CAMS files are located +* `cams_daily_dir`: Where the CAMS daily files are located +* `cams_hourly_dir`: Where the CAMS hourly files are located +* `cams_climatology_dir`: Where the CAMS climatology files are located +* `dem_dir`: Where the DEM files are located +* `scl_dir`: Where the auxiliary scl maps files are located #### Downloader @@ -256,14 +325,16 @@ In addition these parameters are defined in the tool and can be used in brackets For a Sentinel 2 product on tile 31TFJ: -``` +```ini base_url = /data/PRODUCTS url_parameters_pattern_Sentinel2 = {base_url}/{mission}/{tile} ``` will be replaced by: -```url_parameters_pattern_Sentinel2 = /data/PRODUCTS/Sentinel2/31TFJ``` +```ini +url_parameters_pattern_Sentinel2 = /data/PRODUCTS/Sentinel2/31TFJ +``` ##### Creodias API @@ -278,7 +349,7 @@ will be replaced by: #### DemDownloader -This section presents configuration for DEM downloader. It is not yet integrated into the sen2like workflow but can be +This section presents the configuration for the DEM downloader. It is not yet integrated into the sen2like workflow but can be used by itself. * `download_if_unavailable`: Download DEM at need if unavalaible (`True`or `False`) @@ -289,39 +360,40 @@ used by itself. * `dem_product_name`: Name of the DEM product on server * `dem_server_url`: Server url where DEM are retrieved +*Note: No compatible dem_server_url is available yet to the public.* + #### Geometry Define parameters for geometric correction. -* `reference_band`= The reference band to use for geometric correction +* `reference_band`= The reference band to be used for geometric correction * `doMatchingCorrection`: Apply the matching correction (`True`, `False`) * `doAssessGeometry`: Assess geometry (Band list separated by comma.) -* `references_map`: Path to the reference json file containing reference images for tiles -* `force_geometry_correction`: Do geometry correction even if product is refined +* `references_map`: Path to the reference json file containing the reference image for each tile +* `force_geometry_correction`: Do geometry correction even if product is refined (S2 mission) #### Atmcor -Atmospheric method to use. +Atmospheric correction method to use. * `use_sen2cor`: Activate sen2cor for Atmospheric correction (SMAC otherwise) -* `sen2cor_path`: Path to sen2cor tool +* `sen2cor_path`: Path to sen2cor tool command (L2A_Process.py) #### Nbar Define parameters for Nbar processing. -* `nbar_methode`: Methode to get BRDF coefficients. Nowadays, available methode are : ROY, VJB -* `vjb_coeff_matrice_dir`: If choose VJB methode, coefficient netcdf file directory path +* `nbar_methode`: Method to get BRDF coefficients. Currently, available methods are : ROY, VJB +* `vjb_coeff_matrice_dir`: If VJB method is selected, directory path of the BRDF coefficients netcdf file #### Fusion -Define parameters for fusion processing. +Define parameters for Fusion processing. -* `predict_method` : Predic method to use (predict or composite using most recent valid pixels) +* `predict_method` : Predict method to be used (predict or composite using most recent valid pixels) * `predict_nb_products`: Number of products needed by predict method -* `fusion_auto_check_band` : Band on witch apply fusion auto check -* `fusion_auto_check_threshold` : (in [0,1]) Threshold of fusion auto check proportion diff. - Use to compute threshold mask. +* `fusion_auto_check_band` : Band on which the fusion auto check is performed +* `fusion_auto_check_threshold` : (in [0,1]) Threshold on fusion auto check difference. Used to generate FCM mask. #### Stitching @@ -331,23 +403,25 @@ Define parameters for stitching processing. #### OutputFormat -Define modifier for written image file. +Define output format, gain and offset for image files. + +* `gain`: DN Quantification value of output image (DN to reflectance conversion) +* `offset`: DN Offset to substract from the output image (DN to reflectance conversion) +* `output_format`: Format of the output image. Supported formats: COG, GTIFF (for geotiff), JPEG2000. -* `gain`: Gain multplier for output image -* `offset`: Offset to add to the output image -* `output_format`: The format of the output image. Supported formats: COG, GTIFF (for geotiff), JPEG2000. +*Note: DN to reflectance conversion: reflectance = (DN - offset) / gain * #### COGoptions * `interleave`: Interleave mode * `internal_tiling`: Internal tiling resolution * `internal_overviews`: Internal overviews sampling -* `downsampling_levels_$RES$`: COG pyramides sampling level for $RES$ (ie: 10, 15...). +* `downsampling_levels_$RES$`: COG pyramids sampling level for $RES$ (ie: 10, 15...). This keyword can be `present` several times for multiple resolutions. * `downsampling_levels`: Downsampling levels mode -* `resampling_algo_MASK`: Resampling algorithm mask -* `resampling_algo`: Resampling algorithm +* `resampling_algo_MASK`: Resampling algorithm for masks +* `resampling_algo`: Resampling algorithm for images * `compression`: Compression mode * `predictor`: Predicator value @@ -365,51 +439,55 @@ Define parameters for multiprocessing in multi-tile-mode. Define packaging parameters. -* `quicklook_jpeg_quality`: Quality for outputs quicklooks +* `quicklook_jpeg_quality`: JPEG Quality for quicklooks * `json_metadata`: Indicates if metadata are also written as json (`True` or `False`) #### Runtime -This section is overriden during runtime and contains backup of computed values. Modifying this section will have no +This section is overridden during runtime and contains backup of computed values. Modifying this section will have no effect. ### Command line arguments The help of the tool can be displayed with the command: -`python sen2like\sen2like.py --help` +``` +python sen2like.py --help +``` ``` -usage: sen2like.py [-h] [-v] [--refImage PATH] [--wd PATH] [--conf PATH] - [--confParams STRLIST] [--bands STRLIST] +usage: sen2like.py [-h] [--version] [--refImage PATH] [--wd PATH] + [--conf PATH] [--confParams STRLIST] [--bands STRLIST] [--no-run] [--intermediate-products] [--parallelize-bands] [--debug] [--no-log-date] - {product-mode,single-tile-mode,multi-tile-mode} ... + {product-mode,single-tile-mode,multi-tile-mode,roi-based-mode} + ... positional arguments: - {product-mode,single-tile-mode,multi-tile-mode} + {product-mode,single-tile-mode,multi-tile-mode,roi-based-mode} Operational mode product-mode Process a single product single-tile-mode Process all products on a MGRS tile multi-tile-mode Process all products on a ROI + roi-based-mode Process all products that fully contains an ROI. The + ROI footprint must be FULLY INSIDE a MGRS tile. optional arguments: -h, --help show this help message and exit - -v, --version show program's version number and exit + --version, -v show program's version number and exit --refImage PATH Reference image (use as geometric reference) --wd PATH Working directory (default : /data/production/wd) --conf PATH S2L_configuration file (Default: SEN2LIKE_DIR/conf/S2L_config.ini) --confParams STRLIST Overload parameter values (Default: None). Given as a - "key=value" comma-separated list.Example: --confParams - "doNbar=False,doSbaf=False" - --bands STRLIST Bands to process as coma separated list (Default: ALL - bands) + "key=value" comma-separated list. Example: + --confParams "doNbar=False,doSbaf=False" + --bands STRLIST S2 bands to process as coma separated list (Default: + ALL bands) --no-run Do not start process and only list products (default: False) --intermediate-products Generate intermediate products (default: False) - --parallelize-bands - Process bands in parallel (default: False) + --parallelize-bands Process bands in parallel (default: False) Debug arguments: --debug, -d Enable Debug mode (default: False) @@ -419,11 +497,13 @@ Debug arguments: #### Product mode -In product mode, a product is specified an processed by the tool. +In product mode, a product is specified and processed by the tool. The help of the product-mode can be displayed with the command: -`python sen2like\sen2like.py product-mode --help` +```bash +python sen2like.py product-mode --help +``` ``` usage: sen2like.py product-mode [-h] [-v] [--refImage PATH] [--wd PATH] @@ -462,17 +542,19 @@ Debug arguments: Example of command line: -`python sen2like.py product-mode /eodata/Sentinel-2/MSI/L1C/2017/01/03/S2A_MSIL1C_20170103T104432_N0204_R008_T31TFJ_20170103T104428.SAFE --wd -~/wd --tile 31TFJ --bands B04 -` +```bash +python sen2like.py product-mode /eodata/Sentinel-2/MSI/L1C/2017/01/03/S2A_MSIL1C_20170103T104432_N0204_R008_T31TFJ_20170103T104428.SAFE --wd ~/wd --tile 31TFJ --bands B04 +``` #### Single tile mode -In single-tile mode, a MGRS tile is specified an processed by the tool. +In single-tile mode, a MGRS tile is specified and processed by the tool. The help of the single-tile-mode can be displayed with the command: -`python sen2like\sen2like.py single-tile-mode --help` +```bash +python sen2like.py single-tile-mode --help +``` ``` usage: sen2like.py single-tile-mode [-h] [--start-date START_DATE] @@ -516,18 +598,20 @@ Debug arguments: Example of command line: -`python sen2like.py single-tile-mode 31TFJ --wd -~/wd --refImage /data/HLS/31TFJ/L2F_31TFJ_20170103_S2A_R008/L2F_31TFJ_20170103_S2A_R008_B04_10m.TIF -` +```bash +python sen2like.py single-tile-mode 31TFJ --wd ~/wd --refImage /data/HLS/31TFJ/L2F_31TFJ_20170103_S2A_R008/L2F_31TFJ_20170103_S2A_R008_B04_10m.TIF +``` #### Multi tile mode -In multi-tile mode, a geojson file is specified an processed by the tool. An example of geojson file containing tile +In multi-tile mode, a geojson file is specified and processed by the tool. An example of geojson file containing tile 31TFJ is located in `conf/tile_mgrs_31TFJ.json`. The help of the multi-tile-mode can be displayed with the command: -`python sen2like\sen2like.py multi-tile-mode --help` +```bash +python sen2like.py multi-tile-mode --help +``` ``` usage: sen2like.py multi-tile-mode [-h] [--start-date START_DATE] @@ -572,12 +656,65 @@ Debug arguments: Example of command line: -`python sen2like.py multi-tile-mode ROI_FILE --wd -~/wd --refImage /data/HLS/31TFJ/L2F_31TFJ_20170103_S2A_R008/L2F_31TFJ_20170103_S2A_R008_B04_10m.TIF -` +```bash +python sen2like.py multi-tile-mode ROI_FILE --wd ~/wd --refImage /data/HLS/31TFJ/L2F_31TFJ_20170103_S2A_R008/L2F_31TFJ_20170103_S2A_R008_B04_10m.TIF +``` + +#### ROI based mode + +In roi-based-mode, a geojson file is specified and processed by the tool. + +The help of the roi-based-mode can be displayed with the command: + +```bash +python sen2like.py roi-based-mode --help +``` + +``` +usage: sen2like.py roi-based-mode [-h] [--tile TILE] [--start-date START_DATE] + [--end-date END_DATE] [--l2a] [--version] + [--refImage PATH] [--wd PATH] [--conf PATH] + [--confParams STRLIST] [--bands STRLIST] + [--no-run] [--intermediate-products] + [--parallelize-bands] [--debug] + [--no-log-date] + roi + +positional arguments: + roi Json file containing the ROI to process + +optional arguments: + -h, --help show this help message and exit + --tile TILE MGRS Tile Code : Force Processing of a specific tile + in case several MGRS tiles contain the ROI footprint + --start-date START_DATE + Beginning of period (format YYYY-MM-DD) + --end-date END_DATE End of period (format YYYY-MM-DD) + --l2a Processing level Level-2A for S2 products if set + (default: L1C) + --version, -v show program's version number and exit + --refImage PATH Reference image (use as geometric reference) + --wd PATH Working directory (default : /data/production/wd) + --conf PATH S2L_configuration file (Default: + SEN2LIKE_DIR/conf/S2L_config.ini) + --confParams STRLIST Overload parameter values (Default: None). Given as a + "key=value" comma-separated list. Example: + --confParams "doNbar=False,doSbaf=False" + --bands STRLIST S2 bands to process as coma separated list (Default: + ALL bands) + --no-run Do not start process and only list products (default: + False) + --intermediate-products + Generate intermediate products (default: False) + --parallelize-bands Process bands in parallel (default: False) + +Debug arguments: + --debug, -d Enable Debug mode (default: False) + --no-log-date Do no store date in log (default: False) +``` -## [Release notes](release-notes.md) +## [Release notes](./release-notes.md) ## License -[Apache License 2.0](LICENSE.txt) \ No newline at end of file +[Apache License 2.0](./LICENSE.txt) diff --git a/sen2like/conf/Sen2Like_GIPP.xsd b/sen2like/conf/Sen2Like_GIPP.xsd index 4f8030c..c95539e 100644 --- a/sen2like/conf/Sen2Like_GIPP.xsd +++ b/sen2like/conf/Sen2Like_GIPP.xsd @@ -33,21 +33,59 @@ - + + - - - + + + + + + + + + + + + + + + + + + + + + - - - + + + + + + + + + + + + + + + + + + + + + + @@ -129,6 +167,7 @@ + diff --git a/sen2like/conf/config.ini b/sen2like/conf/config.ini index 63d0271..bff94bd 100644 --- a/sen2like/conf/config.ini +++ b/sen2like/conf/config.ini @@ -2,6 +2,7 @@ doStitching = True doGeometryKLT = True doToa = True +# doInterCalibration must be set to True to generate consistent S2A-S2B timeseries doInterCalibration = True doAtmcor = True doNbar = True @@ -23,17 +24,20 @@ dem_dir = /data/AUX_DATA/DEM scl_dir = /data/AUX_DATA/SCL_maps_2.10 [InputProductArchive] +# global InputProductArchive params coverage = 0.5 -# Local -base_url = /data/PRODUCTS cloud_cover = 11 + +################################################################## +# Local only +base_url = /data/PRODUCTS url_parameters_pattern_Sentinel2 = {base_url}/{mission}/{tile} url_parameters_pattern_Landsat8 = {base_url}/{mission}/{path}/{row} url_parameters_pattern_Landsat9 = {base_url}/{mission}/{path}/{row} -# Creodias +################################################################## +# Creodias only ;base_url = https://finder.creodias.eu/resto/api/collections -;cloud_cover = 11 ;location_Landsat8 = path={path}&row={row} ;location_Landsat9 = path={path}&row={row} ;location_Sentinel2 = processingLevel={s2_processing_level}&productIdentifier=%25{tile}%25 @@ -42,6 +46,36 @@ url_parameters_pattern_Landsat9 = {base_url}/{mission}/{path}/{row} ;cloud_cover_property = properties/cloudCover ;gml_geometry_property = properties/gmlgeometry +################################################################## +# Mixed archive sample: local landsat and remote S2 +# local landsat part +;base_url_landsat = /data/PRODUCTS +;url_parameters_pattern_Landsat8 = {base_url_landsat}/{mission}/{path}/{row} +;url_parameters_pattern_Landsat9 = {base_url_landsat}/{mission}/{path}/{row} + +# remote S2 part +;base_url_s2 = https://finder.creodias.eu/resto/api/collections +;location_Sentinel2 = processingLevel={s2_processing_level}&productIdentifier=%25{tile}%25 +;url_parameters_pattern = {base_url_s2}/{mission}/search.json?maxRecords=1000&_pretty=true&cloudCover=%5B0%2C{cloud_cover}%5D&startDate={start_date}&completionDate={end_date}&sortParam=startDate&sortOrder=ascending&status=all&{location}&dataset=ESA-DATASET +;thumbnail_property = properties/productIdentifier +;cloud_cover_property = properties/cloudCover +;gml_geometry_property = properties/gmlgeometry + +################################################################## +# Mixed archive sample 2: local S2 and remote landsat +# local S2 part +;base_url_s2 = /data/PRODUCTS +;url_parameters_pattern_Sentinel2 = {base_url_s2}/{mission}/{tile} + +# remote landsat part +;base_url_landsat = https://finder.creodias.eu/resto/api/collections +;location_Landsat8 = path={path}&row={row} +;location_Landsat9 = path={path}&row={row} +;url_parameters_pattern = {base_url_landsat}/{mission}/search.json?maxRecords=1000&_pretty=true&cloudCover=%5B0%2C{cloud_cover}%5D&startDate={start_date}&completionDate={end_date}&sortParam=startDate&sortOrder=ascending&status=all&{location}&dataset=ESA-DATASET +;thumbnail_property = properties/productIdentifier +;cloud_cover_property = properties/cloudCover +;gml_geometry_property = properties/gmlgeometry + [DemDownloader] download_if_unavailable = True @@ -86,7 +120,7 @@ reframe_margin = 50 [OutputFormat] gain = 10000 -offset = 0 +offset = 1000 output_format = COG [COGoptions] diff --git a/sen2like/conf/config.xml b/sen2like/conf/config.xml index 0cf39f9..09a6f76 100644 --- a/sen2like/conf/config.xml +++ b/sen2like/conf/config.xml @@ -25,22 +25,46 @@ 0.5 + 11 /data/PRODUCTS - 11 {base_url}/{mission}/{tile} {base_url}/{mission}/{path}/{row} {base_url}/{mission}/{path}/{row} - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + B04 @@ -68,7 +92,7 @@ 10000 - 0 + 1000 COG diff --git a/sen2like/docs/resources/miniconda_version.png b/sen2like/docs/resources/miniconda_version.png new file mode 100644 index 0000000..963e378 Binary files /dev/null and b/sen2like/docs/resources/miniconda_version.png differ diff --git a/sen2like/docs/source/S2-PDGS-MPC-L2HF-PFS-v1.0.pdf b/sen2like/docs/source/S2-PDGS-MPC-L2HF-PFS-v1.0.pdf deleted file mode 100644 index 6f5b789..0000000 Binary files a/sen2like/docs/source/S2-PDGS-MPC-L2HF-PFS-v1.0.pdf and /dev/null differ diff --git a/sen2like/docs/source/S2-PDGS-MPC-L2HF-PFS-v1.1.pdf b/sen2like/docs/source/S2-PDGS-MPC-L2HF-PFS-v1.1.pdf new file mode 100644 index 0000000..bdc671f Binary files /dev/null and b/sen2like/docs/source/S2-PDGS-MPC-L2HF-PFS-v1.1.pdf differ diff --git a/sen2like/docs/source/S2-SEN2LIKE-UM-V1.5.pdf b/sen2like/docs/source/S2-SEN2LIKE-UM-V1.5.pdf deleted file mode 100644 index 7008feb..0000000 Binary files a/sen2like/docs/source/S2-SEN2LIKE-UM-V1.5.pdf and /dev/null differ diff --git a/sen2like/docs/source/S2-SEN2LIKE-UM-V1.6.pdf b/sen2like/docs/source/S2-SEN2LIKE-UM-V1.6.pdf new file mode 100644 index 0000000..f88f48a Binary files /dev/null and b/sen2like/docs/source/S2-SEN2LIKE-UM-V1.6.pdf differ diff --git a/sen2like/docs/source/changelog.rst b/sen2like/docs/source/changelog.rst index 8f95a23..c702e78 100644 --- a/sen2like/docs/source/changelog.rst +++ b/sen2like/docs/source/changelog.rst @@ -1,4 +1 @@ -Changelog -========== - -Here will be the changelog of the version \ No newline at end of file +.. mdinclude:: ../../release-notes.md diff --git a/sen2like/docs/source/conf.py b/sen2like/docs/source/conf.py index 8e5d967..2299d2e 100644 --- a/sen2like/docs/source/conf.py +++ b/sen2like/docs/source/conf.py @@ -12,16 +12,22 @@ # import os import sys +from datetime import date sys.path.insert(0, os.path.abspath(os.path.join('..', '..', 'sen2like'))) sys.path.insert(0, os.path.abspath(os.path.join('..', '..'))) +# import version from sen2like +# pylint: disable=wrong-import-position +import version + # -- Project information ----------------------------------------------------- +# pylint: disable=invalid-name project = 'Sen2Like' -copyright = '2020, Telespazio' author = 'Telespazio' -version = __version__ +copyright = f'{date.today().year}, {author}' # pylint: disable=redefined-builtin +version = version.__version__ release = version # -- General configuration --------------------------------------------------- @@ -35,7 +41,7 @@ 'sphinx.ext.autosummary', 'sphinx.ext.coverage', 'sphinx_rtd_theme', - 'm2r' + 'm2r2' ] # Add any paths that contain templates here, relative to this directory. diff --git a/sen2like/release-notes.md b/sen2like/release-notes.md index 15ec715..21bf6fa 100644 --- a/sen2like/release-notes.md +++ b/sen2like/release-notes.md @@ -1,5 +1,35 @@ # Sen2Like Release Notes +## v4.1.1 + +### Fix + +* Fix QI report (L2H/F_QUALITY.xml files) not valid +* Fix MEAN_DELTA_AZIMUTH calculation for QI report +* Fix angles files extraction that leaded to random values in tie_points file and non reproducible results + +## v4.1.0 + +### New features + +* Add ROI based mode +* Sentinel-2 Collection-1 support +* Generated product compliant with Collection-1 format +* Possible mixed local/remote archive configuration +* Use sen2like version as baseline and real production date time in product MTD and product name +* Add some QI parameters +* Support sen2like 3.0.2 + +### Improvements + +* Refactor Packagers to remove some code duplication +* Refactor Product MTD writer to remove some code duplication +* Factorize diverse duplicated code +* Move mask and angle file generation in a dedicated module (separation of concern) +* Move some function related to the configuration in S2L_Config module +* Code quality (WIP) +* Reduce docker image size of approximately 46% + ## v4.0.2 ### Fix diff --git a/sen2like/sen2like/atmcor/get_s2_angles.py b/sen2like/sen2like/atmcor/get_s2_angles.py index c634cf1..500b9d3 100644 --- a/sen2like/sen2like/atmcor/get_s2_angles.py +++ b/sen2like/sen2like/atmcor/get_s2_angles.py @@ -1,214 +1,222 @@ -# -*- coding: utf-8 -*- -import logging -import re -import sys -import xml.parsers as pars -from typing import Union -from xml.dom import minidom - -import numpy as np -from osgeo import gdal, osr - -log = logging.getLogger("Sen2Like") - -re_band = re.compile(r'B0?(\d{1,2})$') - - -def get_angles_band_index(band: str) -> Union[int, None]: - """ - Convert the band index into the S2 angles indexing convention - B1->B8 : indices from 0 to 7 - B8A : index 8 - B9 -> B12 : indices from 9 to 12 - """ - if band == "B8A": - return 8 - band_index = re_band.match(band) - if band_index: - band_index = int(band_index.group(1)) - if 0 < band_index < 9: - return band_index - 1 - return band_index - return None - - -def from_values_list_to_array(selected_node): - col_step = selected_node.getElementsByTagName('COL_STEP')[0].childNodes[0].data - row_step = selected_node.getElementsByTagName('ROW_STEP')[0].childNodes[0].data - - values_list = selected_node.getElementsByTagName('Values_List')[0].getElementsByTagName('VALUES') - - # x_size, y_size , size of the matrix - x_size = len(values_list[0].childNodes[0].data.split()) - y_size = len(values_list) - - # Create np array of size (x_size,y_size) for sun zenith values : - arr = np.empty([x_size, y_size], np.float) - for j in range(0, y_size, 1): - a = np.asarray(values_list[j].childNodes[0].data.split(), np.float) - arr[j] = a - - return x_size, y_size, col_step, row_step, arr - - -def reduce_angle_matrix(x_size, y_size, a_dict): - # As S2 viewing zenith / azimuth matrix given for different detector - # As overlapping detector, build matrix including averaged values where - # several values from different detectors exist - # Input : - # - a : dictionary (detector, band_ud and array values) - # - x_size / y_size size of the matrix - # Output : - # ~ - the reduce matrix - M = np.zeros([x_size, y_size], np.float) - # print('input M :' + str(M[2][6])) - CPT = np.zeros([x_size, y_size], np.float) - for k, u in list(a_dict.items()): - for i in range(0, x_size, 1): - for j in range(0, x_size, 1): - A = u["Values"] - if A[i][j] == A[i][j]: # test if value is not nan - M[i][j] = A[i][j] + M[i][j] - CPT[i][j] += 1 - # if i == 2 and j == 6 : - # print str(M[i][j])+' '+str(A[i][j]) - - N = np.divide(M, CPT, where=(CPT != 0)) - N[N == 0] = np.nan - return N - - -def extract_sun_angle(src_file, dst_file, angle_type): - # Open the 'MTD_TL.xml' file, and read information in - # Depending on angle_type value, {'Zenith' , 'Azimuth' } - # select in the corresponding xml section - # save image file in dst_file - do not apply resampling - - xml_tl_file = src_file - try: - dom = minidom.parse(xml_tl_file) - except pars.expat.ExpatError: - sys.exit(' Invalid XML TL File') - - # gdal parameter : - NoData_value = -9999 - - # Load xmlf file and retrieve projection parameter : - node_name = 'Tile_Geocoding' # Level-1C / Level-2A ? - geocoding_node = dom.getElementsByTagName(node_name)[0] - epsg_code = geocoding_node.getElementsByTagName('HORIZONTAL_CS_CODE')[0].childNodes[0].data - geo_position = geocoding_node.getElementsByTagName('Geoposition')[0] - ulx = geo_position.getElementsByTagName('ULX')[0].childNodes[0].data - uly = geo_position.getElementsByTagName('ULY')[0].childNodes[0].data - - # Call gdalsrs info to generate wkt for the projection - # Replaced by gdal python api: - srs = osr.SpatialReference() - srs.ImportFromEPSG(int(epsg_code.replace('EPSG:', ''))) - wkt = srs.ExportToWkt() - - # Load xml file and extract parameter for sun zenith : - node_name = 'Sun_Angles_Grid' # Level-1C / Level-2A ? - sun_angle_node = dom.getElementsByTagName(node_name)[0] - - selected_node = sun_angle_node.getElementsByTagName(angle_type)[0] - - x_size, y_size, col_step, row_step, arr = from_values_list_to_array(selected_node) - - # scale between -180 and 180 deg. - if arr.max() > 180.0: - arr[arr > 180] = arr[arr > 180] - 360 - - # Create gdal dataset - x_res = np.int(x_size) - y_res = np.int(y_size) - - x_pixel_size = np.int(col_step) - y_pixel_size = np.int(row_step) - - log.debug(' Save in {}'.format(dst_file)) - target_ds = gdal.GetDriverByName('GTiff').Create(dst_file, x_res, y_res, 1, gdal.GDT_Int16) - target_ds.SetGeoTransform((np.int(ulx), x_pixel_size, 0, np.int(uly), 0, -y_pixel_size)) - band = target_ds.GetRasterBand(1) - band.SetNoDataValue(NoData_value) - band.SetDescription('Solar_' + angle_type) - band.WriteArray((arr * 100).astype(np.int16), 0, 0) # int16 with scale factor 100 - target_ds.SetProjection(wkt) - - -def extract_viewing_angle(src_file, dst_file, angle_type): - # Access to MTL and extract vieing angles depending on the angletype - # Return the list of files that have been generated, out_list - out_list = [] # Store the path of all outputs - log.debug('extact viewing angle') - xml_tl_file = src_file - try: - dom = minidom.parse(xml_tl_file) - except pars.expat.ExpatError: - sys.exit(' Invalid XML TL File') - - # gdal parameter : - NoData_value = -9999 - - # Load xmlf file and retrieve projection parameter : - node_name = 'Tile_Geocoding' # Level-1C / Level-2A? - geocoding_node = dom.getElementsByTagName(node_name)[0] - epsg_code = geocoding_node.getElementsByTagName('HORIZONTAL_CS_CODE')[0].childNodes[0].data - geo_position = geocoding_node.getElementsByTagName('Geoposition')[0] - ulx = geo_position.getElementsByTagName('ULX')[0].childNodes[0].data - uly = geo_position.getElementsByTagName('ULY')[0].childNodes[0].data - # Call gdalsrs info to generate wkt for the projection : - # Replaced by gdal python api: - srs = osr.SpatialReference() - srs.ImportFromEPSG(int(epsg_code.replace('EPSG:', ''))) - wkt = srs.ExportToWkt() - - # Load xml file and extract parameter for sun zenith : - node_name = 'Viewing_Incidence_Angles_Grids' # Level-1C / Level-2A ? - viewing_angle_node = dom.getElementsByTagName(node_name) - v_dico = {} - for cpt in range(0, len(viewing_angle_node), 1): - band_id = viewing_angle_node[cpt].attributes["bandId"].value - detector = viewing_angle_node[cpt].attributes["detectorId"].value - selected_node = viewing_angle_node[cpt].getElementsByTagName(angle_type)[0] - [x_size, y_size, col_step, row_step, arr] = from_values_list_to_array(selected_node) - v_dico.update({str(cpt): {"Band_id": str(band_id), - "Detector": str(detector), - "Values": arr}}) - - for rec in range(0, 13, 1): - dic = v_dico.copy() - a = {k: v for k, v in list(dic.items()) if v["Band_id"] == str(rec)} - arr = reduce_angle_matrix(x_size, y_size, a) - - # scale between -180 and 180 deg. - if arr.max() > 180.0: - arr[arr > 180] = arr[arr > 180] - 360 - - # Create gdal dataset - x_res = np.int(x_size) - y_res = np.int(y_size) - - x_pixel_size = np.int(col_step) - y_pixel_size = np.int(row_step) - - # Decoding of band number : - # CF : https: // earth.esa.int / web / sentinel / user - guides / sentinel - 2 - msi / resolutions / radiometric - # Band 8A <=> Band 9 in the mtl - - dst_file_bd = dst_file.replace('.tif', '_band_' + str(rec + 1) + '.tif') - out_list.append(dst_file_bd) - log.debug(' Save in {}'.format(dst_file_bd)) - target_ds = gdal.GetDriverByName('GTiff').Create(dst_file_bd, x_res, y_res, 1, gdal.GDT_Int16) - target_ds.SetGeoTransform((np.int(ulx), x_pixel_size, 0, np.int(uly), 0, -y_pixel_size)) - band = target_ds.GetRasterBand(1) - band.SetNoDataValue(NoData_value) - band.SetDescription('Viewing_' + angle_type + '_band_' + str(rec + 1)) # This sets the band name! - target_ds.GetRasterBand(1).WriteArray((arr * 100).astype(np.int16), 0, 0) # int16 with scale factor 100 - target_ds.SetProjection(wkt) - band = None - target_ds = None - arr = None - a = None - - return out_list +# -*- coding: utf-8 -*- +import logging +import re +import sys +import xml.parsers as pars +from typing import Union +from xml.dom import minidom + +import numpy as np +from osgeo import gdal, osr + +log = logging.getLogger("Sen2Like") + +re_band = re.compile(r'B0?(\d{1,2})$') + + +def get_angles_band_index(band: str) -> Union[int, None]: + """ + Convert the band index into the S2 angles indexing convention + B1->B8 : indices from 0 to 7 + B8A : index 8 + B9 -> B12 : indices from 9 to 12 + """ + if band == "B8A": + return 8 + band_index = re_band.match(band) + if band_index: + band_index = int(band_index.group(1)) + if 0 < band_index < 9: + return band_index - 1 + return band_index + return None + + +def from_values_list_to_array(selected_node): + col_step = selected_node.getElementsByTagName('COL_STEP')[0].childNodes[0].data + row_step = selected_node.getElementsByTagName('ROW_STEP')[0].childNodes[0].data + + values_list = selected_node.getElementsByTagName('Values_List')[0].getElementsByTagName('VALUES') + + # x_size, y_size , size of the matrix + x_size = len(values_list[0].childNodes[0].data.split()) + y_size = len(values_list) + + # Create np array of size (x_size,y_size) for sun zenith values : + arr = np.empty([x_size, y_size], float) + for j in range(0, y_size, 1): + a = np.asarray(values_list[j].childNodes[0].data.split(), float) + arr[j] = a + + return x_size, y_size, col_step, row_step, arr + + +def reduce_angle_matrix(x_size, y_size, a_dict): + # As S2 viewing zenith / azimuth matrix given for different detector + # As overlapping detector, build matrix including averaged values where + # several values from different detectors exist + # Input : + # - a : dictionary (detector, band_ud and array values) + # - x_size / y_size size of the matrix + # Output : + # ~ - the reduce matrix + M = np.zeros([x_size, y_size], float) + # print('input M :' + str(M[2][6])) + CPT = np.zeros([x_size, y_size], int) + for k, u in list(a_dict.items()): + for i in range(0, x_size, 1): + for j in range(0, x_size, 1): + A = u["Values"] + if A[i][j] == A[i][j]: # test if value is not nan + M[i][j] = A[i][j] + M[i][j] + CPT[i][j] += 1 + # if i == 2 and j == 6 : + # print str(M[i][j])+' '+str(A[i][j]) + + N = np.divide(M, CPT) + + # keep it commented for history + # before, the division had a where clause CPT!=0 + # but it was not working well so we remove it + # and then the N matrix have the good final result + #N[N == 0] = np.nan + + return N + + +def extract_sun_angle(src_file, dst_file, angle_type): + # Open the 'MTD_TL.xml' file, and read information in + # Depending on angle_type value, {'Zenith' , 'Azimuth' } + # select in the corresponding xml section + # save image file in dst_file - do not apply resampling + + xml_tl_file = src_file + try: + dom = minidom.parse(xml_tl_file) + except pars.expat.ExpatError: + sys.exit(' Invalid XML TL File') + + # gdal parameter : + NoData_value = -9999 + + # Load xmlf file and retrieve projection parameter : + node_name = 'Tile_Geocoding' # Level-1C / Level-2A ? + geocoding_node = dom.getElementsByTagName(node_name)[0] + epsg_code = geocoding_node.getElementsByTagName('HORIZONTAL_CS_CODE')[0].childNodes[0].data + geo_position = geocoding_node.getElementsByTagName('Geoposition')[0] + ulx = geo_position.getElementsByTagName('ULX')[0].childNodes[0].data + uly = geo_position.getElementsByTagName('ULY')[0].childNodes[0].data + + # Call gdalsrs info to generate wkt for the projection + # Replaced by gdal python api: + srs = osr.SpatialReference() + srs.ImportFromEPSG(int(epsg_code.replace('EPSG:', ''))) + wkt = srs.ExportToWkt() + + # Load xml file and extract parameter for sun zenith : + node_name = 'Sun_Angles_Grid' # Level-1C / Level-2A ? + sun_angle_node = dom.getElementsByTagName(node_name)[0] + + selected_node = sun_angle_node.getElementsByTagName(angle_type)[0] + + x_size, y_size, col_step, row_step, arr = from_values_list_to_array(selected_node) + + # scale between -180 and 180 deg. + if arr.max() > 180.0: + arr[arr > 180] = arr[arr > 180] - 360 + + # Create gdal dataset + x_res = int(x_size) + y_res = int(y_size) + + x_pixel_size = int(col_step) + y_pixel_size = int(row_step) + + log.debug(' Save in {}'.format(dst_file)) + target_ds = gdal.GetDriverByName('GTiff').Create(dst_file, x_res, y_res, 1, gdal.GDT_Int16) + target_ds.SetGeoTransform((int(ulx), x_pixel_size, 0, int(uly), 0, -y_pixel_size)) + band = target_ds.GetRasterBand(1) + band.SetNoDataValue(NoData_value) + band.SetDescription('Solar_' + angle_type) + band.WriteArray((arr * 100).astype(np.int16), 0, 0) # int16 with scale factor 100 + target_ds.SetProjection(wkt) + band = None + target_ds = None + + +def extract_viewing_angle(src_file, dst_file, angle_type): + # Access to MTL and extract vieing angles depending on the angletype + # Return the list of files that have been generated, out_list + out_list = [] # Store the path of all outputs + log.debug('extact viewing angle') + xml_tl_file = src_file + try: + dom = minidom.parse(xml_tl_file) + except pars.expat.ExpatError: + sys.exit(' Invalid XML TL File') + + # gdal parameter : + NoData_value = -9999 + + # Load xmlf file and retrieve projection parameter : + node_name = 'Tile_Geocoding' # Level-1C / Level-2A? + geocoding_node = dom.getElementsByTagName(node_name)[0] + epsg_code = geocoding_node.getElementsByTagName('HORIZONTAL_CS_CODE')[0].childNodes[0].data + geo_position = geocoding_node.getElementsByTagName('Geoposition')[0] + ulx = geo_position.getElementsByTagName('ULX')[0].childNodes[0].data + uly = geo_position.getElementsByTagName('ULY')[0].childNodes[0].data + # Call gdalsrs info to generate wkt for the projection : + # Replaced by gdal python api: + srs = osr.SpatialReference() + srs.ImportFromEPSG(int(epsg_code.replace('EPSG:', ''))) + wkt = srs.ExportToWkt() + + # Load xml file and extract parameter for sun zenith : + node_name = 'Viewing_Incidence_Angles_Grids' # Level-1C / Level-2A ? + viewing_angle_node = dom.getElementsByTagName(node_name) + v_dico = {} + for cpt in range(0, len(viewing_angle_node), 1): + band_id = viewing_angle_node[cpt].attributes["bandId"].value + detector = viewing_angle_node[cpt].attributes["detectorId"].value + selected_node = viewing_angle_node[cpt].getElementsByTagName(angle_type)[0] + [x_size, y_size, col_step, row_step, arr] = from_values_list_to_array(selected_node) + v_dico['_'.join([band_id, detector])] = {"Band_id": str(band_id), + "Detector": str(detector), + "Values": arr} + + for rec in range(0, 13, 1): + dic = v_dico.copy() + a = {k: v for k, v in dic.items() if v["Band_id"] == str(rec)} + arr = reduce_angle_matrix(x_size, y_size, a) + + # scale between -180 and 180 deg. + if arr.max() > 180.0: + arr[arr > 180] = arr[arr > 180] - 360 + + # Create gdal dataset + x_res = int(x_size) + y_res = int(y_size) + + x_pixel_size = int(col_step) + y_pixel_size = int(row_step) + + # Decoding of band number : + # CF : https: // earth.esa.int / web / sentinel / user - guides / sentinel - 2 - msi / resolutions / radiometric + # Band 8A <=> Band 9 in the mtl + + dst_file_bd = dst_file.replace('.tif', '_band_' + str(rec + 1) + '.tif') + out_list.append(dst_file_bd) + log.debug(' Save in {}'.format(dst_file_bd)) + target_ds = gdal.GetDriverByName('GTiff').Create(dst_file_bd, x_res, y_res, 1, gdal.GDT_Int16) + target_ds.SetGeoTransform((int(ulx), x_pixel_size, 0, int(uly), 0, -y_pixel_size)) + band = target_ds.GetRasterBand(1) + band.SetNoDataValue(NoData_value) + band.SetDescription('Viewing_' + angle_type + '_band_' + str(rec + 1)) # This sets the band name! + target_ds.GetRasterBand(1).WriteArray((arr * 100).astype(np.int16), 0, 0) # int16 with scale factor 100 + target_ds.SetProjection(wkt) + band = None + target_ds = None + arr = None + a = None + + return out_list diff --git a/sen2like/sen2like/core/QI_MTD/QIreport.py b/sen2like/sen2like/core/QI_MTD/QIreport.py index 8763964..726b1ef 100644 --- a/sen2like/sen2like/core/QI_MTD/QIreport.py +++ b/sen2like/sen2like/core/QI_MTD/QIreport.py @@ -6,30 +6,42 @@ import logging import os -from core.QI_MTD.generic_writer import remove_namespace, chg_elm_with_tag, change_elm, create_child, MtdWriter +from core.QI_MTD.generic_writer import remove_namespace, chg_elm_with_tag, change_elm, create_child, XmlWriter from core.QI_MTD.mtd import metadata -from version import __version__ +import version log = logging.getLogger('Sen2Like') -class QiWriter(MtdWriter): +class QiWriter(XmlWriter): - def __init__(self, backbone_path: str, init_QI_path: str = None, H_F='H', outfile: str = None): - super().__init__(backbone_path, init_QI_path, H_F) + def __init__(self, backbone_path: str, init_qi_path: str = None, H_F='H', outfile: str = None): + """ + Init L2H/F_QUALITY.xml writer. + When `init_qi_path`is given, the L2H/F_QUALITY.xml result inherit values from init_qi_path content + if not recompute by S2L. + Args: + backbone_path (str): L2H/F_QUALITY.xml file template + init_qi_path (str): L2A_QUALITY.xml file path, can be `None` + H_F (str): type of the product (H/F) + outfile (str): L2H/F_QUALITY.xml output file path + """ + super().__init__(backbone_path, init_qi_path, H_F) self.outfile = outfile def manual_replaces(self, product): - # Saving all values which are present in the input QI report - self.feed_values_dict() + # Saving all values which are present in the input QI report (L2A_QUALITY.xml) if any. + # This is done to retrieve all values from init_qi_path (input_xml_path in XmlWriter) + # that are not in metadata.qi (computed by S2L) and put them in metadata.qi + self._feed_values_dict() # Replace all 'value' nodes from mtd dict self._replace_values(metadata.qi) - file_version = str(int(__version__.replace('.', ''))) # Int because it is specified in the .xsd - chg_elm_with_tag(self.root_out, tag='version', new_value=file_version) - chg_elm_with_tag(self.root_out, tag='File_Version', new_value=file_version) + chg_elm_with_tag(self.root_out, tag='version', new_value=version.baseline_dotted) + chg_elm_with_tag(self.root_out, tag='File_Version', new_value=version.baseline) + chg_elm_with_tag(self.root_out, tag='Creator_Version', new_value=version.__version__) # L2A_Quality_Header # ------------------ @@ -42,7 +54,7 @@ def manual_replaces(self, product): # Data_Block # ---------- - report_creation_date = dt.datetime.strftime(dt.datetime.now(), '%Y-%m-%dT%H:%M:%SZ') + report_creation_date = dt.datetime.strftime(dt.datetime.utcnow(), '%Y-%m-%dT%H:%M:%SZ') change_elm(self.root_out, rpath='./Data_Block/report', attr_to_change='date', new_value=report_creation_date) chg_elm_with_tag(self.root_out, tag='value', new_value=metadata.qi.get('MEAN', 'None'), @@ -69,11 +81,11 @@ def manual_replaces(self, product): change_elm(self.root_out, rpath='./Data_Block/report/checkList/item', attr_to_change='name', new_value=metadata.mtd.get('granule_{}_name'.format(self.H_F))) - def feed_values_dict(self): + def _feed_values_dict(self): """ Function only used by the QI report writer - Reads the input QI report if it exists, and saves all 'value' nodes text into the metadata.qi dictionary - :return: + Reads the input QI report if it exists, put all 'value' nodes text into the metadata.qi dictionary + if not already in. """ if self.root_in is not None: for elem in self.root_in.iter('*'): diff --git a/sen2like/sen2like/core/QI_MTD/generic_writer.py b/sen2like/sen2like/core/QI_MTD/generic_writer.py index ffc8d48..327a728 100644 --- a/sen2like/sen2like/core/QI_MTD/generic_writer.py +++ b/sen2like/sen2like/core/QI_MTD/generic_writer.py @@ -1,7 +1,7 @@ #! /usr/bin/env python # -*- coding: utf-8 -*- # G. Cavaro (TPZ-F) 2020 - +import abc import copy import json import logging @@ -20,44 +20,49 @@ from grids import grids +from core.products.product import S2L_Product + log = logging.getLogger('Sen2Like') -class MtdWriter: +class XmlWriter(abc.ABC): """ Generic xml writer. """ - def __init__(self, backbone_path: str, init_MTD_path: Union[str, None], H_F: str): + def __init__(self, backbone_path: str, input_xml_path: Union[str, None], H_F: str): """ + Init 'self.root_out' with given 'backbone_path' and fill it with 'input_xml_path' content if given and is xml. + 'self.root_in' is init with 'input_xml_path' content if given and is xml. + :param backbone_path: path of the .xml backbone - :param init_MTD_path: path of the .xml file of the input product, if exists. Can be None + :param input_xml_path: path of the .xml file of the input product, if exists. Can be None :param H_F: Product level (H or F) """ self.root_in = None backbone_path = os.path.join(os.path.dirname(__file__), backbone_path) if not os.path.exists(backbone_path): - log.error('MTD backbone {} does not exist'.format(backbone_path)) + log.error('MTD backbone %s does not exist', backbone_path) return - if init_MTD_path and not os.path.exists(init_MTD_path): - log.error('Input product MTD {} does not exist'.format(init_MTD_path)) + if input_xml_path and not os.path.exists(input_xml_path): + log.error('Input product MTD %s does not exist', input_xml_path) return self.backbone_path = backbone_path - self.init_MTD_path = init_MTD_path + self.input_xml_path = input_xml_path try: tree_bb = ElementTree.parse(backbone_path) # Tree backbone for the output file. Will not be changed self.root_bb = tree_bb.getroot() - if init_MTD_path and not init_MTD_path.endswith('.txt'): - tree_in = ElementTree.parse(init_MTD_path) # Tree of the input mtd (S2 MTD.xml, L2A_QI_report.xml) + if input_xml_path and not input_xml_path.endswith('.txt'): + tree_in = ElementTree.parse(input_xml_path) # Tree of the input mtd (S2 MTD.xml, L2A_QI_report.xml) self.root_in = tree_in.getroot() else: self.root_in = None except pars.expat.ExpatError as err: - logging.error("Error during parsing of MTD product file: %s" % backbone_path) + logging.error("Error during parsing of MTD product file: %s", backbone_path) logging.error(err) sys.exit(-1) @@ -66,8 +71,14 @@ def __init__(self, backbone_path: str, init_MTD_path: Union[str, None], H_F: str self.H_F = H_F # Product level (H or F) - def manual_replaces(self, product): - pass + @abc.abstractmethod + def manual_replaces(self, product: S2L_Product): + """Implementation made here all replacement in the template ('root_bb') to fill + + Args: + product (S2L_Product): Product used to fill the template + """ + def remove_children(self, root, tag: str = '', attrs: dict = None, exceptions: list = None): """ @@ -383,8 +394,8 @@ def create_child(root: Element, rpath: str, tag: str, text: str = None, attribs: parent_elm = find_element_by_path(root, rpath) if len(parent_elm) > 1 or len(parent_elm) == 0: - log.warning('(fn create_child) Multiple or 0 elements found with this path {}'.format(rpath) + - 'Will not create element under') + log.warning('(fn create_child) Multiple or 0 elements found with this path %s ' + 'Will not create element under', rpath) child = xml.etree.ElementTree.SubElement(parent_elm[0], tag, attrib=attribs) child.text = text @@ -406,12 +417,13 @@ def copy_elements(elements_to_copy: list, root_in, root_out, root_bb=None): out_elems = find_element_by_path(root_out, elem_path) ini_elems = find_element_by_path(root_in, elem_path) if len(out_elems) == 0 or len(ini_elems) == 0: - log.warning('(fn copy_elements) No matching elements found for {}'.format(elem_path)) + log.warning('(fn copy_elements) No matching elements found for %s', elem_path) continue if len(out_elems) == len(ini_elems): if len(out_elems) > 1: - log.warning('(fn copy_elements) multiple matching elements found for {}'.format(elem_path) + - 'They will be copied in the encountered order') + log.warning( + '(fn copy_elements) multiple matching elements found for %s They will be copied in the encountered order', + elem_path) for out_elem, ini_elem in zip(out_elems, ini_elems): parent = getParentObjectNode(root_out, out_elem) idx = get_idx(parent, out_elem) @@ -485,4 +497,4 @@ def write_json(xml_file: str): xml_content = xmltodict.parse(p_xml.read()) with open(json_file, 'w') as fp: json.dump(xml_content, fp, indent=4) - log.info('Json file writed: {}'.format(json_file)) + log.info('Json file writed: %s', json_file) diff --git a/sen2like/sen2like/core/QI_MTD/mtd.py b/sen2like/sen2like/core/QI_MTD/mtd.py index 6a32dd9..2f65b20 100644 --- a/sen2like/sen2like/core/QI_MTD/mtd.py +++ b/sen2like/sen2like/core/QI_MTD/mtd.py @@ -36,8 +36,13 @@ def clear(self): self.qi = {"COREGISTRATION_BEFORE_CORRECTION": "NONE", "SKEW": "NONE", "KURTOSIS": "NONE", + "REF_IMAGE": "NONE", "MEAN": "NONE", + 'MEAN_X': "NONE", + 'MEAN_Y': "NONE", "STD": "NONE", + 'STD_X': "NONE", + 'STD_Y': "NONE", "RMSE": "NONE", "NB_OF_POINTS": "NONE", "MEAN_DELTA_AZIMUTH": "NONE", @@ -46,22 +51,14 @@ def clear(self): } self.hardcoded_values = {"s2_struct_xml": "xml_backbones/S2_folder_backbone.xml", - "bb_S2F_product": "xml_backbones/MTD_MSIL2F_S2.xml", - "bb_S2H_product": "xml_backbones/MTD_MSIL2H_S2.xml", - "bb_L8F_product": "xml_backbones/MTD_OLIL2F_L8.xml", - "bb_L8H_product": "xml_backbones/MTD_OLIL2H_L8.xml", - "bb_S2F_tile": "xml_backbones/MTD_TL_L2F_S2.xml", - "bb_S2H_tile": "xml_backbones/MTD_TL_L2H_S2.xml", - "bb_L8F_tile": "xml_backbones/MTD_TL_L2F_L8.xml", - "bb_L8H_tile": "xml_backbones/MTD_TL_L2H_L8.xml", - "bb_QIH_path": "xml_backbones/L2H_QI_Report_backbone.xml", - "bb_QIF_path": "xml_backbones/L2F_QI_Report_backbone.xml", + "bb_QIH_path": "xml_backbones/L2H_QUALITY_backbone.xml", + "bb_QIF_path": "xml_backbones/L2F_QUALITY_backbone.xml", "product_mtd_xsd": "xsd_files/S2-PDGS-TAS-DI-PSD-V14.5_Schema/S2_User_Product_Level-2A_Metadata.xsd", "product_tl_xsd": "xsd_files/S2-PDGS-TAS-DI-PSD-V14.5_Schema/S2_PDI_Level-2A_Tile_Metadata.xsd", - "product_QIH_xsd": "xsd_files/L2H_QI_Report.xsd", - "product_QIF_xsd": "xsd_files/L2F_QI_Report.xsd", + "product_QIH_xsd": "xsd_files/L2H_QUALITY.xsd", + "product_QIF_xsd": "xsd_files/L2F_QUALITY.xsd", "L8_absolute_orbit": "000000", "PDGS": "9999", "L8_archiving_center": "ZZZ_", diff --git a/sen2like/sen2like/core/QI_MTD/mtd_writers.py b/sen2like/sen2like/core/QI_MTD/mtd_writers.py index bbbc586..684f328 100644 --- a/sen2like/sen2like/core/QI_MTD/mtd_writers.py +++ b/sen2like/sen2like/core/QI_MTD/mtd_writers.py @@ -1,26 +1,64 @@ #! /usr/bin/env python # -*- coding: utf-8 -*- # G. Cavaro (TPZ-F) 2020 - -import datetime as dt +import abc import logging import math import os import re -from osgeo import gdal +from datetime import datetime + import numpy as np +from osgeo import gdal from shapely.wkt import loads -from core.QI_MTD.generic_writer import MtdWriter, chg_elm_with_tag, change_elm, copy_children, create_child, \ +import version +from core.QI_MTD.generic_writer import XmlWriter, chg_elm_with_tag, change_elm, copy_children, create_child, \ copy_elements, search_db, rm_elm_with_tag, find_element_by_path from core.QI_MTD.mtd import metadata from core.S2L_config import config +from core.products.product import S2L_Product log = logging.getLogger('Sen2Like') +# XPATH constant +L2A_TILE_ID_PATH = './General_Info/L2A_TILE_ID' +L1_TILE_ID_PATH = './General_Info/L1_TILE_ID' +TILE_ID_PATH = './General_Info/TILE_ID' +GRANULE_PATH = './General_Info/Product_Info/Product_Organisation/Granule_List/Granule' +GIPP_LIST_PATH = './Auxiliary_Data_Info/GIPP_List' +QUALITY_INDICATOR_PATH = './Quality_Indicators_Info' +IMAGE_CONTENT_QI_PATH = './Quality_Indicators_Info/Image_Content_QI' +PIXEL_LEVEL_QI_PATH = './Quality_Indicators_Info/Pixel_Level_QI' + + +_template_dict = { + "H": { + "S2": { + "product": "xml_backbones/MTD_MSIL2H_S2.xml", + "tile": "xml_backbones/MTD_TL_L2H_S2.xml" + }, + "Landsat": { + "product": "xml_backbones/MTD_OLIL2H_L8.xml", + "tile": "xml_backbones/MTD_TL_L2H_L8.xml" + } + }, + "F": { + "S2": { + "product": "xml_backbones/MTD_MSIL2F_S2.xml", + "tile": "xml_backbones/MTD_TL_L2F_S2.xml" + }, + "Landsat": { + "product": "xml_backbones/MTD_OLIL2F_L8.xml", + "tile": "xml_backbones/MTD_TL_L2F_L8.xml" + } + } +} -class MTD_writer_S2(MtdWriter): +class S2LProductMtdWriter(XmlWriter, abc.ABC): + """Abstract Product level MTD writer + """ IMAGE_FORMAT = { 'COG': 'GEOTIFF', @@ -28,11 +66,97 @@ class MTD_writer_S2(MtdWriter): 'JPEG2000': 'JPEG2000', } - def __init__(self, backbone_path: str, init_MTD_path: str, H_F='H', outfile: str = None): - super().__init__(backbone_path, init_MTD_path, H_F) + def __init__(self, sensor: str, input_xml_path: str, H_F='H', outfile: str = None): + super().__init__(_template_dict[H_F][sensor]["product"], input_xml_path, H_F=H_F) self.outfile = outfile - def manual_replaces(self, product): + def manual_replaces(self, product: S2L_Product): + """Do commons replacements in template ('self.root_out'), + then call 'self._specific_replaces' to finish to fill the final document ('self.root_out') + Set : + - ./General_Info/Product_Info/PRODUCT_URI + - ./General_Info/Product_Info/PROCESSING_LEVEL + - ./General_Info/Product_Info/PROCESSING_BASELINE + - ./General_Info/Product_Info/GENERATION_TIME + - ./General_Info/Product_Info/Product_Organisation/Granule_List/Granule + - ./General_Info/Product_Image_Characteristics/BOA_ADD_OFFSET_VALUES_LIST + - .Geometric_Info/Product_Footprint/Product_Footprint/Global_Footprint/EXT_POS_LIST> + + Args: + product (S2L_Product): concerned product + """ + + change_elm(self.root_out, rpath='./General_Info/Product_Info/PRODUCT_URI', + new_value=metadata.mtd.get(f'product_{self.H_F}_name')) + change_elm(self.root_out, rpath='./General_Info/Product_Info/PROCESSING_LEVEL', + new_value=f'Level-2{self.H_F}') + change_elm(self.root_out, rpath='./General_Info/Product_Info/PROCESSING_BASELINE', + new_value=version.baseline_dotted) + + generation_time = datetime.strftime(metadata.mtd.get('product_creation_date'), '%Y-%m-%dT%H:%M:%S.%f')[ + :-3] + 'Z' # -3 to keep only 3 decimals + change_elm(self.root_out, rpath='./General_Info/Product_Info/GENERATION_TIME', new_value=generation_time) + + self.remove_children(GRANULE_PATH) + for band_path in sorted(set(metadata.mtd.get(f'bands_path_{self.H_F}'))): + adjusted_path = os.path.splitext(re.sub(r'^.*?GRANULE', 'GRANULE', band_path))[0] + create_child(self.root_out, rpath=GRANULE_PATH, tag='IMAGE_FILE', text=adjusted_path) + + tile_id = self._generate_tile_id(product) + change_elm(self.root_out, rpath=GRANULE_PATH, new_value=tile_id, attr_to_change='granuleIdentifier') + change_elm(self.root_out, rpath=GRANULE_PATH, + new_value=self.IMAGE_FORMAT[config.get('output_format')], attr_to_change='imageFormat') + + # Add BOA_ADD_OFFSET for each L2 bands + offset = int(config.get('offset')) + boa_offset_list_elem = './General_Info/Product_Image_Characteristics/BOA_ADD_OFFSET_VALUES_LIST' + for band_id in range(0, 13): + create_child(self.root_out, rpath=boa_offset_list_elem, tag='BOA_ADD_OFFSET', + text=str(-offset), attribs={"band_id": str(band_id)}) + + # Geometric_info + # --------------- + tile_code = product.mtl.mgrs + if tile_code.startswith('T'): + tile_code = tile_code[1:] + + footprint = search_db(tile_code, search='MGRS_REF') + + # adding back first element, to get a complete polygon + fp = footprint.split(' ') + footprint = ' '.join(fp + [fp[0], fp[1]]) + chg_elm_with_tag(self.root_out, tag='EXT_POS_LIST', new_value=footprint) + + self._specific_replaces(product) + + @abc.abstractmethod + def _generate_tile_id(self, product: S2L_Product): + """Get product tile id, mission dependant + + Args: + product (S2L_Product): Product for which tile is created + """ + # deliberately empty + + @abc.abstractmethod + def _specific_replaces(self, product: S2L_Product): + """Mission specific MTD changes to apply to the product level MTD template to have final MTD file + Call at the end of 'manual_replaces' + + Args: + product (S2L_Product): Product for which product level MTD is processed + """ + # deliberately empty + + +class Sentinel2ToS2LProductMtdWriter(S2LProductMtdWriter): + """Writer of S2H/F Product MTD file for product created from S2 product + """ + + def _generate_tile_id(self, product: S2L_Product): + return _generate_sentinel2_tile_id(product, self.H_F, metadata.mtd['S2_AC']) + + def _specific_replaces(self, product: S2L_Product): # GENERAL_INFO # ------------ @@ -43,38 +167,12 @@ def manual_replaces(self, product): ] copy_elements(elements_to_copy, self.root_in, self.root_out, self.root_bb) - change_elm(self.root_out, rpath='./General_Info/Product_Info/PRODUCT_URI', - new_value=metadata.mtd.get('product_{}_name'.format(self.H_F))) - change_elm(self.root_out, rpath='./General_Info/Product_Info/PROCESSING_LEVEL', - new_value='Level-2{}'.format(self.H_F)) change_elm(self.root_out, rpath='./General_Info/Product_Info/PRODUCT_TYPE', - new_value='S2MSI2{}'.format(self.H_F)) - - pdgs = config.get('PDGS', '9999') - PDGS = '.'.join([pdgs[:len(pdgs) // 2], pdgs[len(pdgs) // 2:]]) - AC = self.root_in.findall('.//ARCHIVING_CENTRE') - if AC: - metadata.mtd['S2_AC'] = AC[0].text - change_elm(self.root_out, rpath='./General_Info/Product_Info/PROCESSING_BASELINE', new_value=PDGS) - generation_time = dt.datetime.strftime(metadata.mtd.get('product_creation_date'), '%Y-%m-%dT%H:%M:%S.%f')[ - :-3] + 'Z' # -3 to keep only 3 decimals - change_elm(self.root_out, rpath='./General_Info/Product_Info/GENERATION_TIME', new_value=generation_time) + new_value=f'S2MSI2{self.H_F}') - self.remove_children('./General_Info/Product_Info/Product_Organisation/Granule_List/Granule') - for band_path in sorted(set(metadata.mtd.get('bands_path_{}'.format(self.H_F)))): - adjusted_path = os.path.splitext(re.sub(r'^.*?GRANULE', 'GRANULE', band_path))[0] - create_child(self.root_out, rpath='./General_Info/Product_Info/Product_Organisation/Granule_List/Granule', - tag='IMAGE_FILE', text=adjusted_path) - grnl_id = \ - find_element_by_path(self.root_in, './General_Info/Product_Info/Product_Organisation/Granule_List/Granule') - if grnl_id: - change_elm(self.root_out, rpath='./General_Info/Product_Info/Product_Organisation/Granule_List/Granule', - new_value=generate_S2_tile_id(product, self.H_F, metadata.mtd['S2_AC']), - attr_to_change='granuleIdentifier') - change_elm(self.root_out, rpath='./General_Info/Product_Info/Product_Organisation/Granule_List/Granule', - new_value=self.IMAGE_FORMAT[config.get('output_format')], attr_to_change='imageFormat') - else: - pass # Fixme + archive_center = self.root_in.findall('.//ARCHIVING_CENTRE') + if archive_center: + metadata.mtd['S2_AC'] = archive_center[0].text # If Sbaf is done, we keep the values inside the backbone (S2A values) if not config.getboolean('doSbaf'): @@ -91,24 +189,16 @@ def manual_replaces(self, product): # Geometric_info # --------------- - tilecode = product.mtl.mgrs - if tilecode.startswith('T'): - tilecode = tilecode[1:] - footprint = search_db(tilecode, search='MGRS_REF') - # adding back first element, to get a complete polygon - fp = footprint.split(' ') - footprint = ' '.join(fp + [fp[0], fp[1]]) - chg_elm_with_tag(self.root_out, tag='EXT_POS_LIST', new_value=footprint) copy_elements(['./Geometric_Info/Coordinate_Reference_System'], self.root_in, self.root_out, self.root_bb) # Auxiliary_Data_Info # ------------------- - self.remove_children('./Auxiliary_Data_Info/GIPP_List') - copy_children(self.root_in, './Auxiliary_Data_Info/GIPP_List', - self.root_out, './Auxiliary_Data_Info/GIPP_List') + self.remove_children(GIPP_LIST_PATH) + copy_children(self.root_in, GIPP_LIST_PATH, + self.root_out, GIPP_LIST_PATH) config_fn = os.path.splitext(os.path.basename(config.parser.config_file))[0] - create_child(self.root_out, './Auxiliary_Data_Info/GIPP_List', tag="GIPP_FILENAME", text=config_fn, - attribs={"version": pdgs, "type": "GIP_S2LIKE"}) + create_child(self.root_out, GIPP_LIST_PATH, tag="GIPP_FILENAME", text=config_fn, + attribs={"version": version.baseline, "type": "GIP_S2LIKE"}) for tag in ['PRODUCTION_DEM_TYPE', 'IERS_BULLETIN_FILENAME', @@ -118,106 +208,73 @@ def manual_replaces(self, product): 'ESACCI_LandCover_Map', 'ESACCI_SnowCondition_Map_Dir']: elem = find_element_by_path(self.root_in, './Auxiliary_Data_Info/' + tag) + if len(elem) != 0: new_value = elem[0].text else: new_value = "NONE" + change_elm(self.root_out, rpath='./Auxiliary_Data_Info/' + tag, new_value=new_value) # Fill GRI_List - gri_elems = self.root_in.findall('.//GRI_FILENAME') - for gri_elm in gri_elems: + for gri_elm in self.root_in.findall('.//GRI_FILENAME'): create_child(self.root_out, './Auxiliary_Data_Info/GRI_List', tag="GRI_FILENAME", text=gri_elm.text) # Quality_Indicators_Info # ----------------------- - copy_elements(['./Quality_Indicators_Info'], self.root_in, self.root_out, self.root_bb) + copy_elements([QUALITY_INDICATOR_PATH], self.root_in, self.root_out, self.root_bb) -class MTD_writer_LS8(MtdWriter): +class LandsatToS2LProductMtdWriter(S2LProductMtdWriter): + """Writer of S2H/F Product MTD file for product created from LS product + """ - IMAGE_FORMAT = { - 'COG': 'GEOTIFF', - 'GTIFF': 'GEOTIFF', - 'JPEG2000': 'JPEG2000', - } + # Redefine constructor to DELIBERATELY Force + # - "Landsat" for sensor + # - "None" for input_xml_path + # And have a similar constructor contract + def __init__(self, sensor: str, input_xml_path: str, H_F='H', outfile: str = None): + super().__init__("Landsat", None, H_F, outfile) - def __init__(self, backbone_path: str, H_F='H', outfile: str = None): - super().__init__(backbone_path, init_MTD_path=None, H_F=H_F) - self.outfile = outfile + def _generate_tile_id(self, product: S2L_Product): + return _generate_landsat8_tile_id(product, self.H_F) - def manual_replaces(self, product): + def _specific_replaces(self, product: S2L_Product): # GENERAL_INFO # ------------ - acqdate = dt.datetime.strftime(product.acqdate, '%Y-%m-%dT%H:%M:%S.%fZ') - change_elm(self.root_out, rpath='./General_Info/Product_Info/PRODUCT_START_TIME', new_value=acqdate) - change_elm(self.root_out, rpath='./General_Info/Product_Info/PRODUCT_STOP_TIME', new_value=acqdate) - - change_elm(self.root_out, rpath='./General_Info/Product_Info/PRODUCT_URI', - new_value=metadata.mtd.get('product_{}_name'.format(self.H_F))) - change_elm(self.root_out, rpath='./General_Info/Product_Info/PROCESSING_LEVEL', - new_value='Level-2{}'.format(self.H_F)) + acq_date = datetime.strftime(product.acqdate, '%Y-%m-%dT%H:%M:%S.%fZ') + change_elm(self.root_out, rpath='./General_Info/Product_Info/PRODUCT_START_TIME', new_value=acq_date) + change_elm(self.root_out, rpath='./General_Info/Product_Info/PRODUCT_STOP_TIME', new_value=acq_date) change_elm(self.root_out, rpath='./General_Info/Product_Info/PRODUCT_TYPE', new_value=f'{product.sensor}OLI2{self.H_F}') - - pdgs = config.get('PDGS', '9999') - PDGS = '.'.join([pdgs[:len(pdgs) // 2], pdgs[len(pdgs) // 2:]]) - change_elm(self.root_out, rpath='./General_Info/Product_Info/PROCESSING_BASELINE', new_value=PDGS) - generation_time = dt.datetime.strftime(metadata.mtd.get('product_creation_date'), '%Y-%m-%dT%H:%M:%S.%f')[ - :-3] + 'Z' # -3 to keep only 3 decimals - change_elm(self.root_out, rpath='./General_Info/Product_Info/GENERATION_TIME', new_value=generation_time) - change_elm(self.root_out, rpath='./General_Info/Product_Info/Datatake/SPACECRAFT_NAME', new_value=product.mtl.mission) - change_elm(self.root_out, rpath='./General_Info/Product_Info/Datatake/DATATAKE_SENSING_START', - new_value=acqdate) + new_value=acq_date) change_elm(self.root_out, rpath='./General_Info/Product_Info/Datatake/SENSING_ORBIT_NUMBER', new_value=config.get('relative_orbit')) - self.remove_children('./General_Info/Product_Info/Product_Organisation/Granule_List/Granule') - for band_path in sorted(set(metadata.mtd.get('bands_path_{}'.format(self.H_F)))): - adjusted_path = os.path.splitext(re.sub(r'^.*?GRANULE', 'GRANULE', band_path))[0] - create_child(self.root_out, rpath='./General_Info/Product_Info/Product_Organisation/Granule_List/Granule', - tag='IMAGE_FILE', text=adjusted_path) - - tile_id = generate_LS8_tile_id(product, self.H_F) - change_elm(self.root_out, rpath='./General_Info/Product_Info/Product_Organisation/Granule_List/Granule', - new_value=tile_id, attr_to_change='granuleIdentifier') - change_elm(self.root_out, rpath='./General_Info/Product_Info/Product_Organisation/Granule_List/Granule', - new_value=self.IMAGE_FORMAT[config.get('output_format')], attr_to_change='imageFormat') - if not config.getboolean('doSbaf'): # FIXME : get product image characteristics from origin sensor (LS8 here), # copying from another template fro example pass - U = distance_variation_corr(product.acqdate) + + U = _distance_variation_corr(product.acqdate) change_elm(self.root_out, rpath='./General_Info/Product_Image_Characteristics/Reflectance_Conversion/U', new_value=str(U)) - # Geometric_info - # --------------- - tilecode = product.mtl.mgrs - if tilecode.startswith('T'): - tilecode = tilecode[1:] - footprint = search_db(tilecode, search='MGRS_REF') - # adding back first element, to get a complete polygon - fp = footprint.split(' ') - footprint = ' '.join(fp + [fp[0], fp[1]]) - chg_elm_with_tag(self.root_out, tag='EXT_POS_LIST', new_value=footprint) - # Auxiliary_Data_Info # ------------------- - self.remove_children('./Auxiliary_Data_Info/GIPP_List', exceptions=['Input_Product_Info']) + self.remove_children(GIPP_LIST_PATH, exceptions=['Input_Product_Info']) config_fn = os.path.splitext(os.path.basename(config.parser.config_file))[0] - create_child(self.root_out, './Auxiliary_Data_Info/GIPP_List', tag="GIPP_FILENAME", text=config_fn, - attribs={"version": pdgs, "type": "GIP_S2LIKE"}) + create_child(self.root_out, GIPP_LIST_PATH, tag="GIPP_FILENAME", text=config_fn, + attribs={"version": version.baseline, "type": "GIP_S2LIKE"}) # Quality_Indicators_Info # ----------------------- - self.remove_children('./Quality_Indicators_Info', exceptions=['Input_Product_Info', 'Cloud_Coverage_Assessment']) + self.remove_children(QUALITY_INDICATOR_PATH, exceptions=['Input_Product_Info', 'Cloud_Coverage_Assessment']) change_elm(self.root_out, './Quality_Indicators_Info/Input_Product_Info', attr_to_change='type', new_value=product.mtl.mission) change_elm(self.root_out, './Quality_Indicators_Info/Input_Product_Info', @@ -226,16 +283,90 @@ def manual_replaces(self, product): new_value=product.mtl.cloud_cover) -class MTD_tile_writer_S2(MtdWriter): - def __init__(self, backbone_path: str, init_MTD_path: str, H_F='H', outfile: str = None): - super().__init__(backbone_path, init_MTD_path, H_F) +_product_mtl_writer_class_dict = { + "S2": Sentinel2ToS2LProductMtdWriter, + "L8": LandsatToS2LProductMtdWriter, + "L9": LandsatToS2LProductMtdWriter, +} + + +def get_product_mtl_writer_class(sensor: str) -> S2LProductMtdWriter: + """Return concrete S2LProductMtdWriter from a sensor + + Args: + sensor (str): sensor from which retrieve concrete S2LProductMtdWriter + + Returns: + S2LProductMtdWriter: product mtd writer corresponding to the sensor + """ + return _product_mtl_writer_class_dict[sensor] + + +class S2LTileMtdWriter(XmlWriter, abc.ABC): + """Abstract Tile level MTD writer + """ + + def __init__(self, sensor: str, input_xml_path: str, H_F='H', outfile: str = None): + super().__init__(_template_dict[H_F][sensor]["tile"], input_xml_path, H_F=H_F) self.outfile = outfile - def manual_replaces(self, product): + def manual_replaces(self, product: S2L_Product): + + tile = loads(search_db(product.mtl.mgrs, search='UTM_WKT')) + x_min = int(tile.bounds[0]) + y_min = int(tile.bounds[1]) + change_elm(self.root_out, './Geometric_Info/Tile_Geocoding/Geoposition/ULX', new_value=str(x_min)) + change_elm(self.root_out, './Geometric_Info/Tile_Geocoding/Geoposition/ULY', new_value=str(y_min)) + + angles_path = os.path.join('GRANULE', metadata.mtd.get(f'granule_{self.H_F}_name'), 'QI_DATA', + metadata.mtd.get('ang_filename')) + change_elm(self.root_out, './Geometric_Info/Tile_Angles/Acquisition_Angles_Filename', new_value=angles_path) + + rm_elm_with_tag(self.root_out, tag='Sun_Angles_Grid') + rm_elm_with_tag(self.root_out, tag='Viewing_Incidence_Angle_Grid') + + # Replace masks with all existing + self.remove_children(PIXEL_LEVEL_QI_PATH, tag='MASK_FILENAME') + + for mask in metadata.mtd.get(f'masks_{self.H_F}'): + create_child(self.root_out, PIXEL_LEVEL_QI_PATH, tag=mask.get('tag'), + text=mask.get('text'), + attribs=mask.get('attribs')) + + rm_elm_with_tag(self.root_out, tag='PVI_FILENAME') + rm_elm_with_tag(self.root_out, tag='QL_B12118A_FILENAME') + rm_elm_with_tag(self.root_out, tag='QL_B432_FILENAME') + + # Get all created quicklooks (including PVI) + for ql in metadata.mtd.get(f'quicklooks_{self.H_F}'): + ql_path = re.search(r'GRANULE(.*)', ql).group() + band_root_name = metadata.mtd.get(f'band_rootName_{self.H_F}') + ql_name = re.search(r'{}_(.*)'.format(band_root_name), ql_path).group(1) + create_child(self.root_out, QUALITY_INDICATOR_PATH, + tag=f"{os.path.splitext(ql_name)[0]}_FILENAME", text=ql_path) + + self._specific_replaces(product) + + @abc.abstractmethod + def _specific_replaces(self, product: S2L_Product): + """Mission specific MTD changes to apply to the Tile level MTD template to have final MTD file + Call at the end of 'manual_replaces' + + Args: + product (S2L_Product): Product for which tile level MTD is processed + """ + # deliberately empty + + +class Sentinel2ToS2LTileMtdWriter(S2LTileMtdWriter): + """Writer of S2H/F Tile MTD file for product created from S2 product + """ + + def _specific_replaces(self, product: S2L_Product): # GENERAL_INFO # ------------ - copy_elements(['./General_Info/TILE_ID', + copy_elements([TILE_ID_PATH, './General_Info/DATASTRIP_ID', './General_Info/DOWNLINK_PRIORITY', './General_Info/SENSING_TIME', @@ -243,7 +374,7 @@ def manual_replaces(self, product): self.root_in, self.root_out, self.root_bb) if product.mtl.data_type == 'Level-1C' or 'L1' in product.mtl.data_type: - l1c_tile_id = find_element_by_path(self.root_in, './General_Info/TILE_ID')[0].text + l1c_tile_id = find_element_by_path(self.root_in, TILE_ID_PATH)[0].text l2a_tile_id = "NONE" else: try: @@ -251,26 +382,27 @@ def manual_replaces(self, product): except IndexError: l1c_tile_id = None try: - l2a_tile_id = find_element_by_path(self.root_in, './General_Info/TILE_ID')[0].text + l2a_tile_id = find_element_by_path(self.root_in, TILE_ID_PATH)[0].text except IndexError: l2a_tile_id = None - tilecode = product.mtl.mgrs - AC = self.root_in.findall('.//ARCHIVING_CENTRE') - if AC: - metadata.mtd['S2_AC'] = AC[0].text + archive_center = self.root_in.findall('.//ARCHIVING_CENTRE') + if archive_center: + metadata.mtd['S2_AC'] = archive_center[0].text - tile_id = generate_S2_tile_id(product, self.H_F, metadata.mtd['S2_AC']) + tile_id = _generate_sentinel2_tile_id(product, self.H_F, metadata.mtd['S2_AC']) if l1c_tile_id is None: - self.remove_children('./General_Info/L1_TILE_ID') + self.remove_children(L1_TILE_ID_PATH) else: - change_elm(self.root_out, './General_Info/L1_TILE_ID', new_value=l1c_tile_id) + change_elm(self.root_out, L1_TILE_ID_PATH, new_value=l1c_tile_id) + if l2a_tile_id is None: - self.remove_children('./General_Info/L2A_TILE_ID') + self.remove_children(L2A_TILE_ID_PATH) else: - change_elm(self.root_out, './General_Info/L2A_TILE_ID', new_value=l2a_tile_id) - change_elm(self.root_out, './General_Info/TILE_ID', new_value=tile_id) + change_elm(self.root_out, L2A_TILE_ID_PATH, new_value=l2a_tile_id) + + change_elm(self.root_out, TILE_ID_PATH, new_value=tile_id) # Geometric_info # --------------- @@ -278,37 +410,15 @@ def manual_replaces(self, product): './Geometric_Info/Tile_Geocoding/HORIZONTAL_CS_CODE'], self.root_in, self.root_out, self.root_bb) - g = loads(search_db(tilecode, search='UTM_WKT')) - xMin = int(g.bounds[0]) - yMin = int(g.bounds[1]) - change_elm(self.root_out, './Geometric_Info/Tile_Geocoding/Geoposition/ULX', new_value=str(xMin)) - change_elm(self.root_out, './Geometric_Info/Tile_Geocoding/Geoposition/ULY', new_value=str(yMin)) - - self.remove_children('./Geometric_Info/Tile_Angles', tag='Viewing_Incidence_Angles_Grids') - angles_path = os.path.join('GRANULE', metadata.mtd.get('granule_{}_name'.format(self.H_F)), 'QI_DATA', - metadata.mtd.get('ang_filename')) - change_elm(self.root_out, './Geometric_Info/Tile_Angles/Acquisition_Angles_Filename', new_value=angles_path) - - rm_elm_with_tag(self.root_out, tag='Sun_Angles_Grid') - rm_elm_with_tag(self.root_out, tag='Viewing_Incidence_Angle_Grid') - copy_elements(['./Geometric_Info/Tile_Angles/Mean_Sun_Angle'], self.root_in, self.root_out, self.root_bb) copy_elements(['./Geometric_Info/Tile_Angles/Mean_Viewing_Incidence_Angle_List'], self.root_in, self.root_out, self.root_bb) # Quality indicators info # ----------------------- - self.remove_children('./Quality_Indicators_Info/Image_Content_QI') - copy_children(self.root_in, './Quality_Indicators_Info/Image_Content_QI', - self.root_out, './Quality_Indicators_Info/Image_Content_QI') - - # Replace masks with all existing - self.remove_children('./Quality_Indicators_Info/Pixel_Level_QI', tag='MASK_FILENAME') - - for mask in metadata.mtd.get('masks_{}'.format(self.H_F)): - create_child(self.root_out, './Quality_Indicators_Info/Pixel_Level_QI', tag=mask.get('tag'), - text=mask.get('text'), - attribs=mask.get('attribs')) + self.remove_children(IMAGE_CONTENT_QI_PATH) + copy_children(self.root_in, IMAGE_CONTENT_QI_PATH, + self.root_out, IMAGE_CONTENT_QI_PATH) try: msk_text = find_element_by_path(self.root_in, './Quality_Indicators_Info/Pixel_Level_QI/MASK_FILENAME')[ @@ -319,26 +429,21 @@ def manual_replaces(self, product): if ini_grn_name is not None: elems = find_element_by_path(self.root_out, './Quality_Indicators_Info/Pixel_Level_QI/MASK_FILENAME') for elem in elems: - elem.text = elem.text.replace(ini_grn_name, metadata.mtd.get('granule_{}_name'.format(self.H_F))) + elem.text = elem.text.replace(ini_grn_name, metadata.mtd.get(f'granule_{self.H_F}_name')) - rm_elm_with_tag(self.root_out, tag='PVI_FILENAME') - rm_elm_with_tag(self.root_out, tag='QL_B12118A_FILENAME') - rm_elm_with_tag(self.root_out, tag='QL_B432_FILENAME') - # Get all created quicklooks (including PVI) - for ql in metadata.mtd.get('quicklooks_{}'.format(self.H_F)): - ql_path = re.search(r'GRANULE(.*)', ql).group() - band_rootName = metadata.mtd.get(f'band_rootName_{self.H_F}') - ql_name = re.search(r'{}_(.*)'.format(band_rootName), ql_path).group(1) - create_child(self.root_out, './Quality_Indicators_Info', - tag="{}_FILENAME".format(os.path.splitext(ql_name)[0]), text=ql_path) +class LandsatToS2LTileMtdWriter(S2LTileMtdWriter): + """Writer of S2H/F Tile MTD file for product created from LS product + """ -class MTD_tile_writer_LS8(MtdWriter): - def __init__(self, backbone_path: str, H_F='H', outfile: str = None): - super().__init__(backbone_path, init_MTD_path=None, H_F=H_F) - self.outfile = outfile + # Redefine constructor to DELIBERATELY Force + # - "Landsat" for sensor + # - "None" for input_xml_path + # And have a similar constructor contract + def __init__(self, sensor: str, input_xml_path: str, H_F='H', outfile: str = None): + super().__init__("Landsat", None, H_F, outfile) - def manual_replaces(self, product): + def _specific_replaces(self, product: S2L_Product): # GENERAL_INFO # ------------ @@ -349,82 +454,71 @@ def manual_replaces(self, product): l1__tile_id = "NONE" l2a_tile_id = product.mtl.landsat_scene_id - tile_id = generate_LS8_tile_id(product, self.H_F) - change_elm(self.root_out, './General_Info/L1_TILE_ID', new_value=l1__tile_id) - change_elm(self.root_out, './General_Info/L2A_TILE_ID', new_value=l2a_tile_id) - change_elm(self.root_out, './General_Info/TILE_ID', new_value=tile_id) + tile_id = _generate_landsat8_tile_id(product, self.H_F) + change_elm(self.root_out, L1_TILE_ID_PATH, new_value=l1__tile_id) + change_elm(self.root_out, L2A_TILE_ID_PATH, new_value=l2a_tile_id) + change_elm(self.root_out, TILE_ID_PATH, new_value=tile_id) - acqdate = dt.datetime.strftime(product.acqdate, '%Y-%m-%dT%H:%M:%S.%fZ') - change_elm(self.root_out, './General_Info/SENSING_TIME', new_value=acqdate) + acq_date = datetime.strftime(product.acqdate, '%Y-%m-%dT%H:%M:%S.%fZ') + change_elm(self.root_out, './General_Info/SENSING_TIME', new_value=acq_date) - AC = metadata.hardcoded_values.get('L8_archiving_center') - change_elm(self.root_out, './General_Info/Archiving_Info/ARCHIVING_CENTRE', new_value=AC) + archive_center = metadata.hardcoded_values.get('L8_archiving_center') + change_elm(self.root_out, './General_Info/Archiving_Info/ARCHIVING_CENTRE', new_value=archive_center) change_elm(self.root_out, './General_Info/Archiving_Info/ARCHIVING_TIME', new_value=metadata.hardcoded_values.get('L8_archiving_time')) # Geometric_info # --------------- - tilecode = product.mtl.mgrs - cs_name = '{} / {} {}N'.format(product.mtl.datum, product.mtl.map_projection, product.mtl.utm_zone) - cs_code = 'EPSG:{}'.format(search_db(tilecode, search='EPSG')) + tile_code = product.mtl.mgrs + cs_name = f'{product.mtl.datum} / {product.mtl.map_projection} {product.mtl.utm_zone}N' + cs_code = f'EPSG:{search_db(tile_code, search="EPSG")}' change_elm(self.root_out, './Geometric_Info/Tile_Geocoding/HORIZONTAL_CS_NAME', new_value=cs_name) change_elm(self.root_out, './Geometric_Info/Tile_Geocoding/HORIZONTAL_CS_CODE', new_value=cs_code) - g = loads(search_db(tilecode, search='UTM_WKT')) - xMin = int(g.bounds[0]) - yMin = int(g.bounds[1]) - change_elm(self.root_out, './Geometric_Info/Tile_Geocoding/Geoposition/ULX', new_value=str(xMin)) - change_elm(self.root_out, './Geometric_Info/Tile_Geocoding/Geoposition/ULY', new_value=str(yMin)) - - angles_path = os.path.join('GRANULE', metadata.mtd.get('granule_{}_name'.format(self.H_F)), 'QI_DATA', - metadata.mtd.get('ang_filename')) - change_elm(self.root_out, './Geometric_Info/Tile_Angles/Acquisition_Angles_Filename', new_value=angles_path) - - rm_elm_with_tag(self.root_out, tag='Sun_Angles_Grid') - rm_elm_with_tag(self.root_out, tag='Viewing_Incidence_Angle_Grid') - - src_ds = gdal.Open(product.mtl.angles_file) - VAA = np.mean(src_ds.GetRasterBand(1).ReadAsArray().astype(np.float32) / 100.0) - VZA = np.mean(src_ds.GetRasterBand(2).ReadAsArray().astype(np.float32) / 100.0) - SAA = np.mean(src_ds.GetRasterBand(3).ReadAsArray().astype(np.float32) / 100.0) - SZA = np.mean(src_ds.GetRasterBand(4).ReadAsArray().astype(np.float32) / 100.0) - - change_elm(self.root_out, './Geometric_Info/Tile_Angles/Mean_Sun_Angle/ZENITH_ANGLE', new_value=str(SZA)) - change_elm(self.root_out, './Geometric_Info/Tile_Angles/Mean_Sun_Angle/AZIMUTH_ANGLE', new_value=str(SAA)) - change_elm(self.root_out, - './Geometric_Info/Tile_Angles/Mean_Viewing_Incidence_Angle_List/Mean_Viewing_Incidence_Angle/ZENITH_ANGLE', - new_value=str(VZA)) - change_elm(self.root_out, - './Geometric_Info/Tile_Angles/Mean_Viewing_Incidence_Angle_List/Mean_Viewing_Incidence_Angle/AZIMUTH_ANGLE', - new_value=str(VAA)) + src_ds = gdal.Open(product.angles_file) + viewing_azimuth_angle = np.mean(src_ds.GetRasterBand(1).ReadAsArray().astype(np.float32) / 100.0) + viewing_zenith_angle = np.mean(src_ds.GetRasterBand(2).ReadAsArray().astype(np.float32) / 100.0) + azimuth_angle = np.mean(src_ds.GetRasterBand(3).ReadAsArray().astype(np.float32) / 100.0) + zenith_angle = np.mean(src_ds.GetRasterBand(4).ReadAsArray().astype(np.float32) / 100.0) + + change_elm(self.root_out, './Geometric_Info/Tile_Angles/Mean_Sun_Angle/ZENITH_ANGLE', new_value=str(zenith_angle)) + change_elm(self.root_out, './Geometric_Info/Tile_Angles/Mean_Sun_Angle/AZIMUTH_ANGLE', new_value=str(azimuth_angle)) + change_elm( + self.root_out, + './Geometric_Info/Tile_Angles/Mean_Viewing_Incidence_Angle_List/Mean_Viewing_Incidence_Angle/ZENITH_ANGLE', + new_value=str(viewing_zenith_angle)) + change_elm( + self.root_out, + './Geometric_Info/Tile_Angles/Mean_Viewing_Incidence_Angle_List/Mean_Viewing_Incidence_Angle/AZIMUTH_ANGLE', + new_value=str(viewing_azimuth_angle)) # Quality indicators info # ----------------------- - self.remove_children('./Quality_Indicators_Info/Image_Content_QI') - create_child(self.root_out, './Quality_Indicators_Info/Image_Content_QI', + self.remove_children(IMAGE_CONTENT_QI_PATH) + create_child(self.root_out, IMAGE_CONTENT_QI_PATH, tag="CLOUDY_PIXEL_PERCENTAGE", text=product.mtl.cloud_cover) - # Replace masks with all existing - self.remove_children('./Quality_Indicators_Info/Pixel_Level_QI', tag='MASK_FILENAME') - for mask in metadata.mtd.get('masks_{}'.format(self.H_F)): - create_child(self.root_out, './Quality_Indicators_Info/Pixel_Level_QI', tag=mask.get('tag'), - text=mask.get('text'), - attribs=mask.get('attribs')) +_tile_mtl_writer_class_dict = { + "S2": Sentinel2ToS2LTileMtdWriter, + "L8": LandsatToS2LTileMtdWriter, + "L9": LandsatToS2LTileMtdWriter, +} - rm_elm_with_tag(self.root_out, tag='PVI_FILENAME') - rm_elm_with_tag(self.root_out, tag='QL_B12118A_FILENAME') - rm_elm_with_tag(self.root_out, tag='QL_B432_FILENAME') - # Get all created quicklooks (including PVI) - for ql in metadata.mtd.get('quicklooks_{}'.format(self.H_F)): - ql_path = re.search(r'GRANULE(.*)', ql).group() - band_rootName = metadata.mtd.get(f'band_rootName_{self.H_F}') - ql_name = re.search(r'{}_(.*)'.format(band_rootName), ql_path).group(1) - create_child(self.root_out, './Quality_Indicators_Info', - tag="{}_FILENAME".format(os.path.splitext(ql_name)[0]), text=ql_path) +def get_tile_mtl_writer_class(sensor: str) -> S2LTileMtdWriter: + """Return concrete S2LTileMtdWriter from a sensor + + Args: + sensor (str): sensor from which retrieve concrete S2LTileMtdWriter -def to_JulianDay(date): + Returns: + S2LTileMtdWriter: tile mtd writer corresponding to the sensor + """ + return _tile_mtl_writer_class_dict[sensor] + + +def _to_julian_day(date): """ Computes Julian day from datetime.datetime date :param date: @@ -437,47 +531,47 @@ def to_JulianDay(date): ss = date.second ms = date.microsecond fraction = hh / 24 + mm / (24 * 60) + ss / (24 * 60 * 60) + ms / (24 * 60 * 60 * 10 ** 6) - t = date.toordinal() + year1 + fraction - return t + return date.toordinal() + year1 + fraction -def distance_variation_corr(date): +def _distance_variation_corr(date): """ From https://sentinel.esa.int/web/sentinel/technical-guides/sentinel-2-msi/level-1c/algorithm :param date: :return: """ t0 = 2433283 - t = to_JulianDay(date) - dt = 1 / math.pow((1 - 0.01672 * math.cos(0.0172 * (t - t0 - 2))), 2) - return dt - - -def generate_LS8_tile_id(pd, H_F): - tilecode = pd.mtl.mgrs - if not tilecode.startswith('T'): - tilecode = f"T{tilecode}" - pdgs = metadata.hardcoded_values.get('PDGS', '9999') - PDGS = '.'.join([pdgs[:len(pdgs) // 2], pdgs[len(pdgs) // 2:]]) - AC = metadata.hardcoded_values.get('L8_archiving_center') - AO = metadata.hardcoded_values.get('L8_absolute_orbit') - acqdate = dt.datetime.strftime(pd.acqdate, '%Y%m%dT%H%M%S') + julian_day = _to_julian_day(date) + return 1 / math.pow((1 - 0.01672 * math.cos(0.0172 * (julian_day - t0 - 2))), 2) + + +def _generate_landsat8_tile_id(product, H_F): + tile_code = product.mtl.mgrs + if not tile_code.startswith('T'): + tile_code = f"T{tile_code}" + + archive_center = metadata.hardcoded_values.get('L8_archiving_center') + absolute_orbit = metadata.hardcoded_values.get('L8_absolute_orbit') + acq_date = datetime.strftime(product.acqdate, '%Y%m%dT%H%M%S') + tile_id = '_'.join( - [pd.sensor_name, 'OPER', 'OLI', 'L2{}'.format(H_F), AC, acqdate, 'A{}'.format(AO), - tilecode, 'N{}'.format(PDGS)]) + [product.sensor_name, 'OPER', 'OLI', f'L2{H_F}', archive_center, acq_date, f'A{absolute_orbit}', tile_code, + f'N{version.baseline_dotted}']) return tile_id -def generate_S2_tile_id(product, H_F, AC): - tilecode = product.mtl.mgrs - if not tilecode.startswith('T'): - tilecode = f"T{tilecode}" - pdgs = metadata.hardcoded_values.get('PDGS', '9999') - PDGS = '.'.join([pdgs[:len(pdgs) // 2], pdgs[len(pdgs) // 2:]]) - acqdate = dt.datetime.strftime(product.acqdate, '%Y%m%dT%H%M%S') - if AC.endswith('_'): - AC = AC[:-1] - tile_id = '_'.join([product.sensor_name, 'OPER', 'MSI', 'L2{}'.format(H_F), AC, acqdate, - 'A{}'.format(config.get('absolute_orbit')), tilecode, 'N{}'.format(PDGS)]) +def _generate_sentinel2_tile_id(product, H_F, archive_center): + tile_code = product.mtl.mgrs + if not tile_code.startswith('T'): + tile_code = f"T{tile_code}" + + acq_date = datetime.strftime(product.acqdate, '%Y%m%dT%H%M%S') + if archive_center.endswith('_'): + archive_center = archive_center[:-1] + + tile_id = '_'.join( + [product.sensor_name, 'OPER', 'MSI', f'L2{H_F}', archive_center, acq_date, f'A{config.get("absolute_orbit")}', + tile_code, f'N{version.baseline_dotted}']) + return tile_id diff --git a/sen2like/sen2like/core/QI_MTD/stac_interface.py b/sen2like/sen2like/core/QI_MTD/stac_interface.py index 55b2988..42b49fd 100644 --- a/sen2like/sen2like/core/QI_MTD/stac_interface.py +++ b/sen2like/sen2like/core/QI_MTD/stac_interface.py @@ -116,7 +116,8 @@ def write_product(self, product, output_dir, bands, ql_name, granule_compact_nam output_name = f"{os.path.join(output_dir, product_id)}.json" item = self._create_item(product, product_id, output_name, bands[0]) - for image in set(bands): + # sort mainly to avoid error during compare with ref file in tests + for image in sorted(set(bands)): band = image.split('_')[-2] if not os.path.exists(image) and image.endswith('.jp2'): log.warning("Overwrite .jp2 extension from metadata -> image file is a TIF !!!!!") @@ -136,16 +137,16 @@ def write_product(self, product, output_dir, bands, ql_name, granule_compact_nam media_type=pystac.MediaType.JPEG) item.add_asset("thumbnail", ql_asset) else: - log.warning("%s not found: No thumbnail for band %s" % (ql_path, band)) + log.warning("%s not found: No thumbnail for band %s", ql_path, band) item.save_object() - logging.debug("STAC file generated: %s" % output_name) + log.debug("STAC file generated: %s", output_name) if self.catalog_path is not None: try: self.catalog.add_item(item, title=product_id) except urllib.error.URLError as error: - log.error("Cannot write to catalog: %s" % error) + log.error("Cannot write to catalog: %s", error) def write_catalog(self): if self.catalog is None: @@ -155,7 +156,7 @@ def write_catalog(self): list(self.catalog.get_all_items())): self.catalog.update_extent_from_items() self.catalog.save(catalog_type=pystac.CatalogType.ABSOLUTE_PUBLISHED) - logging.debug("STAC catalog generated: %s" % self.catalog_path) + log.debug("STAC catalog generated: %s", self.catalog_path) class STACReader: diff --git a/sen2like/sen2like/core/QI_MTD/xml_backbones/L2F_QI_Report_backbone.xml b/sen2like/sen2like/core/QI_MTD/xml_backbones/L2F_QUALITY_backbone.xml similarity index 87% rename from sen2like/sen2like/core/QI_MTD/xml_backbones/L2F_QI_Report_backbone.xml rename to sen2like/sen2like/core/QI_MTD/xml_backbones/L2F_QUALITY_backbone.xml index 0af4f51..94d87f6 100644 --- a/sen2like/sen2like/core/QI_MTD/xml_backbones/L2F_QI_Report_backbone.xml +++ b/sen2like/sen2like/core/QI_MTD/xml_backbones/L2F_QUALITY_backbone.xml @@ -1,8 +1,8 @@ - + - L2F_QI_Report + L2F_QUALITY Quality information obtained from Sen2like NONE @@ -71,6 +71,7 @@ NONE NONE NONE + NONE NONE NONE NONE @@ -83,6 +84,8 @@ Atmospheric correction quality control + + NONE NONE @@ -99,6 +102,7 @@ NONE NONE NONE + NONE NONE NONE NONE @@ -143,6 +147,23 @@ + + L2H_VALIDITY_MASK + VALIDITY_MASK + 1.0 + + + Percentage of valid pixels + + NONE + NONE + + + L2H_GEO L2H_Geometry @@ -159,12 +180,18 @@ NONE NONE NONE + NONE NONE + NONE + NONE NONE + NONE + NONE NONE NONE - NONE + NONE Publication regarding the SIFT methods (ISPRS) + NONE @@ -184,6 +211,7 @@ Strahler and al. 1999 NONE NONE + NONE @@ -218,7 +246,7 @@ - L2F_FU + L2F_FUSION L2F_Fusion 1.0 Sen2like ATBD TBW - TBW + TBW TBW + TBW diff --git a/sen2like/sen2like/core/QI_MTD/xml_backbones/L2H_QI_Report_backbone.xml b/sen2like/sen2like/core/QI_MTD/xml_backbones/L2H_QUALITY_backbone.xml similarity index 86% rename from sen2like/sen2like/core/QI_MTD/xml_backbones/L2H_QI_Report_backbone.xml rename to sen2like/sen2like/core/QI_MTD/xml_backbones/L2H_QUALITY_backbone.xml index e787794..1e69de6 100644 --- a/sen2like/sen2like/core/QI_MTD/xml_backbones/L2H_QI_Report_backbone.xml +++ b/sen2like/sen2like/core/QI_MTD/xml_backbones/L2H_QUALITY_backbone.xml @@ -1,8 +1,8 @@ - + - L2H_QI_Report + L2H_QUALITY Quality information obtained from Sen2like NONE @@ -71,6 +71,7 @@ NONE NONE NONE + NONE NONE NONE NONE @@ -83,6 +84,8 @@ Atmospheric correction quality control + + NONE NONE @@ -99,6 +102,7 @@ NONE NONE NONE + NONE NONE NONE NONE @@ -143,11 +147,28 @@ + + L2H_VALIDITY_MASK + VALIDITY_MASK + 1.0 + + + Percentage of valid pixels + + NONE + NONE + + + L2H_GEO L2H_Geometry 1.0 - NONE NONE NONE + NONE NONE + NONE + NONE NONE + NONE + NONE NONE NONE - NONE + NONE Publication regarding the SIFT methods (ISPRS) + NONE @@ -172,7 +199,7 @@ L2H_BRDF L2H_BRDF_NBAR 1.0 - Strahler and al. 1999 NONE NONE + NONE @@ -191,7 +219,7 @@ L2H_SBAF L2H_SBAF 1.0 - 1000.0 1000.0 + 1.02055236267144 diff --git a/sen2like/sen2like/core/QI_MTD/xml_backbones/MTD_MSIL2H_S2.xml b/sen2like/sen2like/core/QI_MTD/xml_backbones/MTD_MSIL2H_S2.xml index bd83a7c..978d74f 100644 --- a/sen2like/sen2like/core/QI_MTD/xml_backbones/MTD_MSIL2H_S2.xml +++ b/sen2like/sen2like/core/QI_MTD/xml_backbones/MTD_MSIL2H_S2.xml @@ -59,6 +59,7 @@ 1000.0 1000.0 + 1.02055236267144 diff --git a/sen2like/sen2like/core/QI_MTD/xml_backbones/MTD_OLIL2F_L8.xml b/sen2like/sen2like/core/QI_MTD/xml_backbones/MTD_OLIL2F_L8.xml index cc15807..6950474 100644 --- a/sen2like/sen2like/core/QI_MTD/xml_backbones/MTD_OLIL2F_L8.xml +++ b/sen2like/sen2like/core/QI_MTD/xml_backbones/MTD_OLIL2F_L8.xml @@ -59,6 +59,7 @@ 1000.0 1000.0 + 1.02055236267144 diff --git a/sen2like/sen2like/core/QI_MTD/xml_backbones/MTD_OLIL2H_L8.xml b/sen2like/sen2like/core/QI_MTD/xml_backbones/MTD_OLIL2H_L8.xml index 3b48bad..2c3c751 100644 --- a/sen2like/sen2like/core/QI_MTD/xml_backbones/MTD_OLIL2H_L8.xml +++ b/sen2like/sen2like/core/QI_MTD/xml_backbones/MTD_OLIL2H_L8.xml @@ -59,6 +59,7 @@ 1000.0 1000.0 + 1.02055236267144 diff --git a/sen2like/sen2like/core/QI_MTD/xml_fn.py b/sen2like/sen2like/core/QI_MTD/xml_fn.py deleted file mode 100644 index afe6bd9..0000000 --- a/sen2like/sen2like/core/QI_MTD/xml_fn.py +++ /dev/null @@ -1,328 +0,0 @@ -#! /usr/bin/env python -# -*- coding: utf-8 -*- -# G. Cavaro (TPZ-F) 2018 - -import copy -import os -import re -import xml -from xml.etree.ElementTree import Element - - -def find_element_by_path(root, path_to_match): - """. - Support Xpath attrib and value assignement - :param root: - :param path_to_match: - :return: - """ - - updated_path = get_final_path(root, path_to_match) - children = root.findall(updated_path) - indexes = [get_idx(root, child) for child in children] - - return children, indexes - - -def adjust_node(root: Element, path: str, node_to_match: str): - ns_path = node_to_match - parents = root.findall('/'.join(path.split('/')[:-1])) - for parent in parents: - - children = list(parent) - - for child in children: - if node_to_match.split('[')[0] in child.tag: - _, namespace = remove_namespace(child.tag) - ns_path = append_namespace_to_path(node_to_match, namespace) - return ns_path - return ns_path - - -def get_final_path(root, path_to_match): - """ - Does not support namespace constraints inside the path - :param root: - :param path_to_match: - :return: - """ - - nodes = path_to_match.split('/') - final_nodes = nodes - - for i, node_path in enumerate(nodes): - if node_path == '.': - continue - path = '/'.join([node for node in final_nodes[0:i + 1]]) - adjusted_node = adjust_node(root, path, node_path) - final_nodes[i] = adjusted_node - - return '/'.join(final_nodes) - - -def get_idx(root, elem): - parent = getParentObjectNode(root, elem) - for i, child in enumerate(parent.findall('./')): - if child == elem: - return i - - -def append_namespace_to_path(path, namespace): - """ - Append the namespace to all subelement of the path - :param path: - :param namespace: - :return: - """ - - last_char = '/' if path.endswith('/') else '' - first_char = './' if path.startswith('./') else '' - - ns_path = path.lstrip('./').rstrip('/').split('/') - for i, sub_path in enumerate(ns_path): - if not sub_path.startswith('['): - ns_path[i] = namespace + sub_path - - ns_path = '/'.join([sub_path for sub_path in ns_path if sub_path]) - ns_path = first_char + ns_path + last_char - - # Replace tags but not attribs, which do not have namespaces - ns_path = ns_path.replace('[@', '@123456789@') - ns_path = ns_path.replace('[', '[' + namespace) - ns_path = ns_path.replace('@123456789@', '[@') - - return ns_path - - -def getParentObjectNode(root: Element, node: Element): - for elem in root.iter('*'): - if node in list(elem): - return elem - return None - - -def get_elem_path(root: Element, node: Element, rm_ns=False): - tag, _ = remove_namespace(node.tag) - path = node.tag if rm_ns else tag - parent = getParentObjectNode(root, node) - while parent is not None: - tag, _ = remove_namespace(parent.tag) - path = os.path.join(tag, path) if rm_ns else os.path.join(parent.tag, path) - parent = getParentObjectNode(root, parent) - - return path - - -def remove_namespace(tag): - """ - Removes the namespace before an element tag - Example of tag with namespace : {http://gs2.esa.int/DATA_STRUCTURE/l2aqiReport}L2A_Quality_File - :param tag: - :return: - """ - - m = re.match(r'\{.*\}', tag) - namespace = m.group(0) if m else '' - node_name = tag.replace(namespace, '') - - return node_name, namespace - - -def compare_nodes(node1: Element, node2: Element, rpath: str): - """ - Compares recursively in the nodes which elements are in common, based on the tag - :param node1: - :param node2: - :param rpath: - :return: - """ - - common_nodes = [] - paths_to_nodes = [] - not_matched = [] - not_matched_paths = [] - - children1 = node1.findall('./') - children2 = node2.findall('./') - - for child1 in children1: - matched = False - tag1, _ = remove_namespace(child1.tag) - child_rpath = os.path.join(rpath, tag1) - # child_rpath = os.path.join(rpath, child1.tag) - - for child2 in children2: - tag2, _ = remove_namespace(child2.tag) - - if tag1 == tag2: - matched = True - common_nodes.append(child2) - paths_to_nodes.append(child_rpath) - - new_common_nodes, new_paths, new_not_matched, new_not_matched_paths = compare_nodes(child1, child2, - child_rpath) - common_nodes += new_common_nodes - paths_to_nodes += new_paths - not_matched += new_not_matched - not_matched_paths += new_not_matched_paths - - if not matched: - not_matched.append(child1) - not_matched_paths.append(child_rpath) - - return common_nodes, paths_to_nodes, not_matched, not_matched_paths - - -def compare_trees(self): - # Find wich elements are in the backbone, and also in the MTD of the used product - common_nodes, path_to_nodes, not_matched, not_matched_paths = compare_nodes(self.root_bb, self.root_in, rpath='./') - print('Matched :') - [print(m) for m in set(path_to_nodes)] - print('\nNot matched :') - [print(m) for m in set(not_matched_paths)] - print() - - -# @get_modifications -def chg_elm_with_tag(root: Element, tag: str, new_value: str, attrs: dict = None): - """ - Searchs in the tree all elements with a particular tag, and replaces its value - :param root: Element from xml tree - :param tag: Elements with this tag will have their text replaced - :param new_value: New text value - :param attrs : If provided, adds a constraint on the element to find (attributes must match) - :return: - """ - changed = [] - for elem in root.iter('*'): - node_space, _ = remove_namespace(elem.tag) - if node_space == tag: - if not attrs: - elem.text = str(new_value) - changed.append(elem) - elif attrs.items() <= elem.attrib.items(): - elem.text = str(new_value) - changed.append(elem) - return changed - - -# @get_modifications -def change_elm(root: Element, rpath: str, new_value: str, attr_to_change: str = None): - """ - Changes the text or the attribute's value of a particular element - :param root: - :param rpath: relative path of the element in the root - :param new_value: - :param attr_to_change: If provided, the changed value will be the attribute's one - :return: - """ - elements, indexes = find_element_by_path(root, rpath) - - if not elements: - print('\nWARNING : (change_elm) no element found with this path : {}\n'.format(rpath)) - if len(elements) > 1: - print('\nWARNING : multiple elements found with this path : {}'.format(rpath)) - print('The value will be changed for all these elements\n') - - for elem in elements: - if attr_to_change: - elem.attrib[attr_to_change] = new_value - else: - elem.text = new_value - - return elements - - -# @get_modifications -def copy_children(root_in: Element, ini_rpath: str, root_out: Element, out_rpath: str): - """ - Copies all children from root_in's element to root_out's one - :param root_in: - :param ini_rpath: - :param root_out: - :param out_rpath: - :return: - """ - - changed = [] - - out_elem, _ = find_element_by_path(root_out, out_rpath) - ini_elem, _ = find_element_by_path(root_in, ini_rpath) - - if len(out_elem) != 1 or len(ini_elem) != 1: - return changed - out_elem = out_elem[0] - ini_elem = ini_elem[0] - - for idx, child in enumerate(ini_elem.getchildren()): - out_elem.insert(idx, child) - changed.append(child) - - replace_namespace_recursively(out_elem, root_out) - - return changed - - -def replace_namespace(elem: Element, root_bb: Element): - """ - Finds in the root_bb the corresponding 'element' to elem, and changes elem's namespace with the 'element's one - :param elem: - :return: - """ - - tag, namespace = remove_namespace(elem.tag) - - for e in root_bb.iter('*'): - e_tag, bb_ns = remove_namespace(e.tag) - if e_tag == tag and e.attrib == elem.attrib and namespace and bb_ns: - elem.tag = bb_ns + tag - - -def replace_namespace_recursively(root: Element, root_bb: Element): - for elem in root.iter('*'): - replace_namespace(elem, root_bb) - - -# @get_modifications -def create_child(root: Element, rpath: str, tag: str, text: str = None, attribs=None): - if attribs is None: - attribs = {} - parent_elm, _ = find_element_by_path(root, rpath) - if len(parent_elm) > 1 or len(parent_elm) == 0: - print('(create_child) Multiple ot 0 elements found with this path {}'.format(rpath), - '\n Will not create element under.') - return [] - - child = xml.etree.ElementTree.SubElement(parent_elm[0], tag, attrib=attribs, text=text) - - return [child] - - -# @get_modifications -def copy_elements(elements_to_copy: list, root_in, root_out, root_bb): - """ - Finds matching elements in elements_to_copy, and replaces them in the root_out. - Supports some xpath queries. - :param elements_to_copy: List of paths to the nodes we want to copy from the initial MTD file - :return: - """ - - changed = [] - for elem_path in elements_to_copy: - - out_elems, indexes = find_element_by_path(root_out, elem_path) - ini_elems, _ = find_element_by_path(root_in, elem_path) - - if len(out_elems) == len(ini_elems): - if len(out_elems) > 1: - print('WARNING : (copy_elements) multiple elements found for {}'.format(elem_path)) - for out_elem, ini_elem, idx in zip(out_elems, ini_elems, indexes): - parent = getParentObjectNode(root_out, out_elem) - parent.remove(out_elem) - new_elem = copy.copy(ini_elem) - parent.insert(idx, new_elem) - - replace_namespace_recursively(new_elem, root_bb) - [changed.append(e) for e in new_elem.iter('*')] - - return changed diff --git a/sen2like/sen2like/core/QI_MTD/xsd_files/L2F_QI_Report.xsd b/sen2like/sen2like/core/QI_MTD/xsd_files/L2F_QUALITY.xsd similarity index 100% rename from sen2like/sen2like/core/QI_MTD/xsd_files/L2F_QI_Report.xsd rename to sen2like/sen2like/core/QI_MTD/xsd_files/L2F_QUALITY.xsd diff --git a/sen2like/sen2like/core/QI_MTD/xsd_files/L2H_QI_Report.xsd b/sen2like/sen2like/core/QI_MTD/xsd_files/L2H_QUALITY.xsd similarity index 100% rename from sen2like/sen2like/core/QI_MTD/xsd_files/L2H_QI_Report.xsd rename to sen2like/sen2like/core/QI_MTD/xsd_files/L2H_QUALITY.xsd diff --git a/sen2like/sen2like/core/S2L_config.py b/sen2like/sen2like/core/S2L_config.py index 0dd2168..86f217c 100644 --- a/sen2like/sen2like/core/S2L_config.py +++ b/sen2like/sen2like/core/S2L_config.py @@ -3,12 +3,17 @@ # V. Debaecker (TPZ-F) 2018 import configparser +import datetime +import hashlib +import json import logging import os +import xmlschema + +from argparse import Namespace from collections import OrderedDict from xml.etree import ElementTree -import xmlschema # INTERNAL CONFIGURATION (static) @@ -134,7 +139,7 @@ def overload(self, dic): element.text = str(value) break else: - logger.warning("Can not overload parameter '{}' (not found)".format(option)) + logger.warning("Can not overload parameter '%s' (not found)", option) def savetofile(self, config_file): """Save configuration file into ini format.""" @@ -239,7 +244,7 @@ def overload(self, dic): self.configObject.set(section, option, str(value)) break else: - logger.warning("Can not overload parameter '{}' (not found)".format(option)) + logger.warning("Can not overload parameter '%s' (not found)", option) def savetofile(self, configfile): # check if dir exists @@ -264,9 +269,9 @@ def __init__(self, configuration_file=None): def initialize(self, config_file): if not os.path.exists(config_file): - logger.error("Configuration file does not exists: {}".format(config_file)) + logger.error("Configuration file does not exists: %s", config_file) return False - logger.debug("Reading configuration file: {}".format(os.path.abspath(config_file))) + logger.info("Reading configuration file: %s", os.path.abspath(config_file)) self.parser = self.parsers.get(os.path.splitext(config_file)[-1]) if self.parser is None: logger.error("Unsupported configuration file format.") @@ -277,7 +282,73 @@ def initialize(self, config_file): def __getattr__(self, item): if item in ["display", "get", "getboolean", "getfloat", "set", "overload", "savetofile", "get_section"]: return getattr(self.parser, item) - raise AttributeError("'S2L_Config' object has no attribute '%s'" % item) + raise AttributeError(f"'S2L_Config' object has no attribute '{item}'") + + def _compute_config_hash(self, args): + """Compute hash from arguments and configuration. + + :param args: Tool arguments. + :param _config: Configuration + :return: Hexdigest of the hash. + """ + + # debug + import copy + exclude_list = ['parallelize_bands'] + dc = copy.deepcopy(args.__dict__) + for exc in exclude_list: + dc.pop(exc) + dc = str(dc) + + # Prod + # dc = str(args.__dict__) + + # Configuration hash + if self.parser.config_file is not None: + with open(self.parser.config_file) as file: + file_content = file.read() + _hash = hashlib.md5(file_content.encode()) + _hash.update(dc.encode()) + return _hash.hexdigest() + + def update_with_args(self, args: Namespace, tile=None): + """update config with the given arguments + + Args: + args (Namespace): parsed program args + tile (str, optional): tile name. Defaults to None. + """ + # init S2L_config and save to wd + if not self.initialize(args.S2L_configfile): + return + + if args.confParams is not None: + self.overload(args.confParams) + + # set working dir + date_now = datetime.datetime.utcnow().strftime('%Y%m%dT_%H%M%S') + output_folder = f'{"" if args.no_log_date else f"{date_now}_"}{self._compute_config_hash(args)}' + self.set('wd', os.path.join(args.wd, output_folder)) + + references_map_file = self.get('references_map') + if args.refImage: + self.set('refImage', args.refImage) + elif references_map_file and tile: + if os.path.isfile(references_map_file): + # load dataset + with open(references_map_file) as j: + references_map = json.load(j) + self.set('refImage', references_map.get(tile)) + else: + logger.warning("The reference path %s doesn't exist. So it is considered as None.", references_map_file) + self.set('refImage', None) + else: + self.set('refImage', None) + self.set('hlsplus', self.getboolean('doPackager') or self.getboolean('doPackagerL2F')) + self.set('debug', args.debug) + self.set('generate_intermediate_products', args.generate_intermediate_products) + if hasattr(args, 'l2a'): + self.set('s2_processing_level', 'LEVEL2A' if args.l2a else "LEVEL1C") config = S2L_Config() diff --git a/sen2like/sen2like/core/S2L_tools.py b/sen2like/sen2like/core/S2L_tools.py index 751ffc0..372ae08 100644 --- a/sen2like/sen2like/core/S2L_tools.py +++ b/sen2like/sen2like/core/S2L_tools.py @@ -11,14 +11,14 @@ log = logging.getLogger("Sen2Like") -def quicklook(pd, images, bands, qlpath, quality=95, xRes=30, yRes=30, format='JPEG', creationOptions: list = None): +def quicklook(product, images, bands, qlpath, quality=95, xRes=30, yRes=30, out_format='JPEG', creationOptions: list = None, offset: int = 0): """ - :param pd: S2L_Product object + :param product: S2L_Product object :param images: list of image filepaths :param bands: List of 3 band index for [R, G, B] :param qlpath: output file path - :return: + :return: output file path if any otherwise None """ imagefiles = [] @@ -26,7 +26,7 @@ def quicklook(pd, images, bands, qlpath, quality=95, xRes=30, yRes=30, format='J # bands for rgb for band in bands: if band not in images.keys(): - log.warning('Bands not available for quicklook ({})'.format(bands)) + log.warning('Bands not available for quicklook (%s)', bands) return None else: imagefiles.append(images[band]) @@ -46,7 +46,7 @@ def quicklook(pd, images, bands, qlpath, quality=95, xRes=30, yRes=30, format='J vrtpath = qlpath + '.vrt' gdal.BuildVRT(vrtpath, imagefiles, separate=True) - log.debug("save in : " + vrtpath) + log.debug("save in : %s", vrtpath) # Remove nodata attribut vrt = gdal.Open(vrtpath, gdal.GA_Update) @@ -58,43 +58,48 @@ def quicklook(pd, images, bands, qlpath, quality=95, xRes=30, yRes=30, format='J # default src_min = 0 - #src_min = 1 + # src_min = 1 src_max = 2500 dst_min = 0 - #dst_min = 1 + # dst_min = 1 dst_max = 255 if bands == ["B12", "B11", "B8A"]: src_max = 4000 # FIXME: site specific should be in configuration - if pd.mtl.mgrs == '34RGS': + if product.mtl.mgrs == '34RGS': src_max = 4000 if bands == ["B12", "B11", "B8A"]: src_max = 10000 - scale = [[src_min, src_max, dst_min, dst_max]] + scale = [[src_min + offset, src_max + offset, dst_min, dst_max]] # do gdal... - if format == 'GTIFF': - co = creationOptions # Because the driver does not support QUALITY={quality} as co when format='Gtiff' + if out_format == 'GTIFF': + # Because the driver does not support QUALITY={quality} as create_options when format='Gtiff' + create_options = creationOptions else: - co = [f'QUALITY={quality}'] if creationOptions is None else [f'QUALITY={quality}'] + creationOptions - gdal.Translate(qlpath, vrtpath, xRes=xRes, yRes=yRes, resampleAlg='bilinear', bandList=band_list, - outputType=gdal.GDT_Byte, format=format, creationOptions=co, + create_options = [f'QUALITY={quality}'] if creationOptions is None else [f'QUALITY={quality}'] + creationOptions + + dataset = gdal.Translate(qlpath, vrtpath, xRes=xRes, yRes=yRes, resampleAlg='bilinear', bandList=band_list, + outputType=gdal.GDT_Byte, format=out_format, creationOptions=create_options, scaleParams=scale) - log.info("save in : {}".format(qlpath)) + + log.info("save in : %s", qlpath) quantification_value = 10000. scaling = (src_max - src_min) / quantification_value / (dst_max - dst_min) - offset = 0 try: - dataset = gdal.Open(qlpath) + for i in band_list: dataset.GetRasterBand(i).SetScale(scaling) - dataset.GetRasterBand(i).SetOffset(offset) + # force offset to 0 + dataset.GetRasterBand(i).SetOffset(0) dataset.GetRasterBand(i).DeleteNoDataValue() + log.info("scale and offset information added to the metadata of the quicklook image") + dataset = None except Exception as e: log.warning(e, exc_info=True) @@ -103,9 +108,11 @@ def quicklook(pd, images, bands, qlpath, quality=95, xRes=30, yRes=30, format='J # clean os.remove(vrtpath) + return qlpath + def out_stat(input_matrix, logger, label=""): - logger.debug('Maximum {} : {}'.format(label, np.max(input_matrix))) - logger.debug('Mean {} : {}'.format(label, np.mean(input_matrix))) - logger.debug('Std dev {} : {}'.format(label, np.std(input_matrix))) - logger.debug('Minimum {} : {}'.format(label, np.min(input_matrix))) + logger.debug('Maximum %s : %s', label, np.max(input_matrix)) + logger.debug('Mean %s : %s', label, np.mean(input_matrix)) + logger.debug('Std dev %s : %s', label, np.std(input_matrix)) + logger.debug('Minimum %s : %s', label, np.min(input_matrix)) diff --git a/sen2like/sen2like/core/argparser.py b/sen2like/sen2like/core/argparser.py new file mode 100644 index 0000000..d449171 --- /dev/null +++ b/sen2like/sen2like/core/argparser.py @@ -0,0 +1,164 @@ +""" +Module to manage S2L arguments +""" +import os +from argparse import ArgumentParser +from dataclasses import dataclass +from datetime import datetime +from version import __version__ + + +def _get_date(date_str: str) -> datetime: + return datetime.strptime(date_str, "%Y-%m-%d") if date_str else date_str + + +@dataclass +class DateRange: + """A simple date range container + """ + start_date: datetime = None + end_date: datetime = None + + +# pylint: disable=too-few-public-methods +class Mode: + """program mode constants + """ + SINGLE_TILE = 'single-tile-mode' + MULTI_TILE = 'multi-tile-mode' + PRODUCT = 'product-mode' + ROI_BASED = 'roi-based-mode' + + +class S2LArgumentParser(ArgumentParser): + """ArgumentParser inheritance that configure S2L argument parser + """ + + def __init__(self, config_dir: str): + """Init and configure S2LArgumentParser + + Args: + config_dir (str): default conf dir path + + """ + + super().__init__() + self._config_dir = config_dir + self._configure_arguments() + self._args = None + + def parse_args(self, args=None, namespace=None): + self._args = super().parse_args(args, namespace) + return self._args + + def get_date_range(self) -> DateRange: + """get start/end date from arguments + + Returns: + DateRange: initialized 'DateRange' if possible or empty 'DateRange' (with None values) + """ + if self._args.operational_mode in [Mode.SINGLE_TILE, Mode.MULTI_TILE, Mode.ROI_BASED]: + start_date = _get_date(self._args.start_date) + end_date = _get_date(self._args.end_date) + return DateRange(start_date, end_date) + + # empty range + return DateRange() + + def _configure_arguments(self): + """Configure this parser with common arguments and 4 subparser + (product-mode, single-tile-mode, multi-tile-mode, roi-based-mode) + subparser have specific and common arguments + + """ + + # use parser_class=ArgumentParser avoid error on subparsers.add_parser + # see https://stackoverflow.com/questions/47833828/subparsers-add-parser-typeerror-init-got-an-unexpected-keyword-argument + subparsers = self.add_subparsers(dest='operational_mode', help="Operational mode", parser_class=ArgumentParser) + self._add_common_arguments(self) + + # Product mode arguments + sp_product = subparsers.add_parser(Mode.PRODUCT, help="Process a single product") + sp_product.add_argument('product', help="Landsat8 L1 product path / or Sentinel2 L1C product path") + self._add_common_arguments(sp_product) + sp_product.add_argument("--tile", help="Id of the MGRS tile to process", required=True) + + # Single tile mode arguments + sp_single_tile_mode = subparsers.add_parser(Mode.SINGLE_TILE, help='Process all products on a MGRS tile') + sp_single_tile_mode.add_argument("tile", help="Id of the MGRS tile to process") + # self._add_tile_arguments(sp_single_tile_mode) + self._add_tile_mode_arguments(sp_single_tile_mode) + self._add_common_arguments(sp_single_tile_mode) + + # Multi tile mode arguments + sp_multi_tile_mode = subparsers.add_parser(Mode.MULTI_TILE, help='Process all products on a ROI') + sp_multi_tile_mode.add_argument("roi", help="Json file containing the ROI to process") + self._add_tile_mode_arguments(sp_multi_tile_mode) + sp_multi_tile_mode.add_argument("--jobs", "-j", dest="jobs", help="Number of tile to process in parallel", + default=None) + self._add_common_arguments(sp_multi_tile_mode) + + # ROI based mode arguments + roi_based_mode = subparsers.add_parser( + Mode.ROI_BASED, + help='Process all products that fully contains an ROI. The ROI footprint must be FULLY INSIDE a MGRS tile.') + roi_based_mode.add_argument("roi", help="Json file containing the ROI to process") + roi_based_mode.add_argument( + "--tile", + help="MGRS Tile Code : Force Processing of a specific tile in case several MGRS tiles contain the ROI footprint", + required=False) + self._add_tile_mode_arguments(roi_based_mode) + self._add_common_arguments(roi_based_mode) + + def _add_common_arguments(self, parser: ArgumentParser): + """Add common arguments to the given parser + + Args: + parser (ArgumentParser): parser to add arguments to + + """ + + parser.add_argument('--version', '-v', action='version', version='%(prog)s ' + __version__) + parser.add_argument("--refImage", dest="refImage", type=str, + help="Reference image (use as geometric reference)", metavar="PATH", default=None) + parser.add_argument("--wd", dest="wd", type=str, + help="Working directory (default : /data/production/wd)", metavar="PATH", + default='/data/production/wd') + parser.add_argument("--conf", dest="S2L_configfile", type=str, + help="S2L_configuration file (Default: SEN2LIKE_DIR/conf/S2L_config.ini)", metavar="PATH", + default=os.path.join(self._config_dir, '..', 'conf', 'config.ini')) + parser.add_argument("--confParams", dest="confParams", type=str, + help='Overload parameter values (Default: None). ' + 'Given as a "key=value" comma-separated list. ' + 'Example: --confParams "doNbar=False,doSbaf=False"', + metavar="STRLIST", default=None) + parser.add_argument("--bands", dest="bands", type=lambda s: [i for i in s.split(',')], + help="S2 bands to process as coma separated list (Default: ALL bands)", metavar="STRLIST", + default=None) + parser.add_argument("--no-run", dest="no_run", action="store_true", + help="Do not start process and only list products (default: False)") + parser.add_argument("--intermediate-products", dest="generate_intermediate_products", action="store_true", + help="Generate intermediate products (default: False)") + parser.add_argument("--parallelize-bands", action="store_true", + help="Process bands in parallel (default: False)") + debug_group = parser.add_argument_group('Debug arguments') + debug_group.add_argument("--debug", "-d", dest="debug", action="store_true", + help="Enable Debug mode (default: False)") + debug_group.add_argument("--no-log-date", dest="no_log_date", action="store_true", + help="Do no store date in log (default: False)") + + @staticmethod + def _add_tile_mode_arguments(parser: ArgumentParser): + """Add arguments for *-tile-mode parser, aka start-date, end-date and l2a + + Args: + parser (ArgumentParser): parser to add arguments + + """ + parser.add_argument("--start-date", dest="start_date", + help="Beginning of period (format YYYY-MM-DD)", + default='') + parser.add_argument("--end-date", dest="end_date", help="End of period (format YYYY-MM-DD)", + default='') + parser.add_argument("--l2a", help="Processing level Level-2A for S2 products if set (default: L1C)", + action='store_true') diff --git a/sen2like/sen2like/core/file_extractor/__init__.py b/sen2like/sen2like/core/file_extractor/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/sen2like/sen2like/core/file_extractor/file_extractor.py b/sen2like/sen2like/core/file_extractor/file_extractor.py new file mode 100644 index 0000000..14399c3 --- /dev/null +++ b/sen2like/sen2like/core/file_extractor/file_extractor.py @@ -0,0 +1,891 @@ +"""Input product file extractor module for the needs of the S2L output product +This is where the business is to create files like mask for output product from input product +""" +import abc +import datetime +import logging +import os + +from dataclasses import dataclass +from typing import Optional +from xml.dom import minidom + +import numpy as np + +from fmask import landsatangles +from fmask import config as fmask_config +from osgeo import gdal +from rios import fileinfo +from skimage.transform import resize as skit_resize + +from atmcor import get_s2_angles as s2_angles + +from core.image_file import S2L_ImageFile +from core.readers.reader import BaseReader +from core.readers.sentinel2 import Sentinel2MTL +from core.readers.landsat import LandsatMTL +from core.readers.sentinel2_maja import Sentinel2MajaMTL +from core.readers.landsat_maja import LandsatMajaMTL +from core.file_extractor.landsat_utils import downsample_coarse_image, make_angles_image + +log = logging.getLogger("Sen2Like") + +NO_DATA_MASK_FILE_NAME = 'nodata_pixel_mask.tif' +ANGLE_IMAGE_FILE_NAME = 'tie_points.tif' + + +@dataclass +class MaskImage: + """Dataclass to write mask file having: + - 'mask_array' content + - 'mask_filename' as full name (full path) to write it + - 'resolution' as output resolution + - 'orig_image' is the S2_Image used to write the mask, + it should be the orig file from witch the mask is extracted/generated. + 'orig_image' can be None, in this case, 'write' function have no effect + """ + orig_image: S2L_ImageFile + mask_array: np.ndarray + mask_filename: str + resolution: int + + def write(self): + """Write the mask in 'mask_filename' using 'orig_image'""" + if self.orig_image: + mask = self.orig_image.duplicate(self.mask_filename, array=self.mask_array, res=self.resolution) + mask.write(creation_options=['COMPRESS=LZW']) + log.info('Written: %s', self.mask_filename) + else: + log.warning('Cannot write: %s, please verify it have been written', self.mask_filename) + # this case happen in Sentinel2MTL._create_valid_mask_form_l1c_gml, + # the mask is already created and written + # shall we find a way to not write it and create it here ? + + +@dataclass +class ImageMasks: + """'MaskImage' container for validity and no data mask + """ + no_data_mask: MaskImage + validity_mask: MaskImage + + def write(self): + """Write image masks using 'MaskImage.write'""" + self.no_data_mask.write() + self.validity_mask.write() + + +@dataclass +class MaskInfo: + """Mask information having info to compute valid and nodata pixel percentage""" + mask_size: int + nb_valid_pixel: int + nb_nodata_pixel: int + + def get_valid_pixel_percentage(self) -> float: + """get valid pixel percentage considering nodata + + Returns: + float: valid pixel percentage + """ + return (self.nb_valid_pixel * 100) / (self.mask_size - self.nb_nodata_pixel) + + def get_nodata_pixel_percentage(self) -> float: + """get nodata pixel percentage + + Returns: + float: valid pixel percentage + """ + return (self.nb_nodata_pixel * 100) / self.mask_size + + +class InputFileExtractor(abc.ABC): + """Abstract class for input product file extractor for the needs of S2L product. + For example, it has the responsibility to create validity mask of S2L product + from masks or SCL file of input product + """ + + def __init__(self, input_product: BaseReader): + self._input_product: BaseReader = input_product + + @abc.abstractmethod + def _get_valid_pixel_mask(self, mask_filename: str) -> ImageMasks: + """Create validity mask and nodata pixel mask. + nodata pixel mask name is nodata_pixel_mask.tif in the same folder of the valid pixel mask + + Args: + mask_filename (str): valid pixel mask file path + + Returns: + ImageMasks: generated mask container + """ + + @abc.abstractmethod + def get_angle_images(self, out_file: str = None) -> str: + """ Generate angles image with following band order : + SAT_AZ , SAT_ZENITH, SUN_AZ, SUN_ZENITH + The unit is RADIANS + + Args: + out_file (str, optional): Name of the output tif containing all angles images. Defaults to None. + + Returns: + str: output filename, if 'out_file' is None implementation should set to 'ANGLE_IMAGE_FILE_NAME' in the input product folder + """ + + def get_valid_pixel_mask(self, mask_filename, roi_file_path: str = None) -> Optional[ImageMasks]: + """Create validity mask and nodata pixel mask. + nodata pixel mask name is nodata_pixel_mask.tif in the same folder of the valid pixel mask. + `MaskInfo` are save in the current instance + + Args: + mask_filename (str): valid pixel mask file path + roi_file_path (str): path to roi file to apply to the mask for roi-based-mode. Default to None + + Returns: + ImageMasks: generated mask container + """ + + image_masks = self._get_valid_pixel_mask(mask_filename) + if not image_masks: + return None + + image_masks.write() + + # ROI based mode : apply ROI masks + if roi_file_path: + image_masks = self._apply_roi(image_masks, roi_file_path) + + return image_masks + + def _apply_roi(self, image_masks: ImageMasks, roi_file_path: str) -> ImageMasks: + """Apply ROI for ROI based capabilities on masks. + Update 'MaskImage.mask_array' of 'MaskImage' in 'image_masks' + + Returns: + ImageMasks: Updated image mask after applying ROI to the masks + """ + + log.info("Apply ROI file %s ", roi_file_path) + for mask in [image_masks.validity_mask, image_masks.no_data_mask]: + log.info("Apply ROI to mask %s ", mask.mask_filename) + src_mask_dataset = gdal.Open(mask.mask_filename) + geo_transform = src_mask_dataset.GetGeoTransform() + ul_x = geo_transform[0] + x_res = geo_transform[1] + ul_y = geo_transform[3] + y_res = geo_transform[5] + res = mask.resolution + proj = src_mask_dataset.GetProjection() + if res is None: + # native geometry (default) + res = x_res + + lr_x = ul_x + (src_mask_dataset.RasterXSize * x_res) + lr_y = ul_y + (src_mask_dataset.RasterYSize * y_res) + + cutline_blend = 0 # on utilise 4 normalement. + output_bounds = [ul_x, lr_y, lr_x, ul_y] + options = gdal.WarpOptions(outputType=gdal.GDT_Byte, + creationOptions=['COMPRESS=LZW'], outputBounds=output_bounds, + dstSRS=proj, + targetAlignedPixels=True, xRes=res, yRes=res, dstNodata=0, + cutlineDSName=roi_file_path, + cutlineBlend=cutline_blend, + warpOptions=['NUM_THREADS=ALL_CPUS'], multithread=True) + + mask_dest_path = mask.mask_filename + try: + dataset = gdal.Warp( + mask_dest_path, + src_mask_dataset, + options=options) + + # Update current 'MaskImage.mask_array' with new mask + mask.mask_array = dataset.GetRasterBand(1).ReadAsArray() + dataset = None + + except RuntimeError: + log.error("Cannot apply ROI to mask %s", mask_dest_path) + + # close src dataset + src_mask_dataset = None + + return image_masks + + +class S2FileExtractor(InputFileExtractor): + """'InputFileExtractor' implementation for S2 L1C/L2A products + """ + + def __init__(self, input_product: Sentinel2MTL): + super().__init__(input_product) + + def _create_masks_from_scl(self, mask_filename: str, res: int) -> ImageMasks: + """Create validity mask and nodata pixel mask from LSC image. + Consider as valid pixels : + - VEGETATION and NOT_VEGETATED (values 4 et 5) + - UNCLASSIFIED (7) + + Args: + mask_filename (str): validity mask file name + res (int): output mask resolution + + Returns: + ImageMasks: mask container for future writing + """ + log.info('Generating validity and nodata masks from SCL band') + log.debug('Read SCL: %s', self._input_product.scene_classif_band) + scl = S2L_ImageFile(self._input_product.scene_classif_band) + scl_array = scl.array + if scl.xRes != res: + shape = (int(scl_array.shape[0] * - scl.yRes / res), + int(scl_array.shape[1] * scl.xRes / res)) + log.debug(shape) + scl_array = skit_resize(scl_array, shape, order=0, preserve_range=True).astype(np.uint8) + + valid_px_mask = np.zeros(scl_array.shape, np.uint8) + # Consider as valid pixels : + # VEGETATION and NOT_VEGETATED (values 4 et 5) + # UNCLASSIFIED (7) + valid_px_mask[scl_array == 4] = 1 + valid_px_mask[scl_array == 5] = 1 + valid_px_mask[scl_array == 7] = 1 + # valid_px_mask[scl_array == 11] = 1 + + validity_mask = MaskImage(scl, valid_px_mask, mask_filename, res) + + # nodata mask + nodata = np.ones(scl_array.shape, np.uint8) + nodata[scl_array == 0] = 0 + + nodata_mask_filename = os.path.join(os.path.dirname(mask_filename), NO_DATA_MASK_FILE_NAME) + + no_data_mask = MaskImage(scl, nodata, nodata_mask_filename, res) + + return ImageMasks(no_data_mask, validity_mask) + + def _create_nodata_mask_from_l1c(self, image: S2L_ImageFile, nodata_mask_filename: str, res: int) -> MaskImage: + """Create the nodata 'MaskImage' from L1C S2L_ImageFile + + Args: + image (S2L_ImageFile): L1C S2L_ImageFile from witch extract the nodata mask + nodata_mask_filename (str): output path of the nodata pixel mask + res (int): output mask resolution + + Returns: + MaskImage: nodata mask container + """ + array = image.array + nodata = np.ones(array.shape, np.uint8) + # shall be 0, but due to compression artefact, threshold increased to 4: + nodata[array <= 4] = 0 + + # resize nodata to output res + shape = (int(nodata.shape[0] * - image.yRes / res), + int(nodata.shape[1] * image.xRes / res)) + log.debug(shape) + nodata = skit_resize(nodata, shape, order=0, preserve_range=True).astype(np.uint8) + + return MaskImage(image, nodata, nodata_mask_filename, res) + + def _create_valid_mask_form_l1c_gml(self, mask_filename: str, nodata_mask: MaskImage, res: int) -> MaskImage: + """Create valid pixel mask FILE from the current cloud mask. + Current cloud mask MUST be a gml file. + The nodata mask is applied to the generated valid mask + + Args: + mask_filename (str): validity mask output file path + nodata_mask (MaskImage): nodata mask to apply to the validity mask + res (int): output mask resolution + + Returns: + MaskImage: valid mask container + """ + log.info('Generating validity mask from cloud mask') + log.debug('Read cloud mask: %s', self._input_product.cloudmask) + # Check if any cloud feature in gml + dom = minidom.parse(self._input_product.cloudmask) + nb_cloud = len(dom.getElementsByTagName('eop:MaskFeature')) + + # rasterize + # make byte mask 0/1, LZW compression + valid_px_mask = None + if nb_cloud > 0: + output_bounds = [self._input_product.ULX, self._input_product.LRY, + self._input_product.LRX, self._input_product.ULY] + + if not os.path.exists(os.path.dirname(mask_filename)): + os.makedirs(os.path.dirname(mask_filename)) + + gdal.Rasterize(mask_filename, self._input_product.cloudmask, outputType=gdal.GDT_Byte, + creationOptions=['COMPRESS=LZW'], + burnValues=0, initValues=1, outputBounds=output_bounds, outputSRS=self._input_product.epsg, + xRes=res, yRes=res) + + # apply nodata to validity mask + dataset = gdal.Open(mask_filename, gdal.GA_Update) + valid_px_mask = dataset.GetRasterBand(1).ReadAsArray() + valid_px_mask[nodata_mask.mask_array == 0] = 0 + dataset.GetRasterBand(1).WriteArray(valid_px_mask) + dataset = None + log.info('Written: %s', mask_filename) + return MaskImage(None, valid_px_mask, mask_filename, res) + else: + # no cloud mask, copy nodata mask + return MaskImage(nodata_mask.orig_image, nodata_mask.mask_array, mask_filename, res) + + def _create_valid_mask_form_l1c_jp2( + self, mask_filename: str, image: S2L_ImageFile, nodata: np.ndarray, res: int) -> MaskImage: + """Create valid pixel 'MaskImage' from the current cloud mask. + Current cloud mask MUST be a jp2 file. + The nodata mask is applied to the generated valid mask + + Args: + mask_filename (str): output path of the valid pixel mask + image (S2L_ImageFile): S2L_ImageFile + nodata (np.ndarray): nodata mask + res (int): output mask resolution + + Returns: + MaskImage: valid mask container + """ + log.info('Generating validity mask from cloud mask, baseline 4.0') + log.debug('mask filename: %s', mask_filename) + + log.debug('Read cloud mask: %s', self._input_product.cloudmask) + dataset = gdal.Open(self._input_product.cloudmask, gdal.GA_ReadOnly) + clm_1 = dataset.GetRasterBand(1).ReadAsArray() + clm_2 = dataset.GetRasterBand(2).ReadAsArray() + clm_3 = dataset.GetRasterBand(3).ReadAsArray() + tot = clm_1 + clm_2 + clm_3 + valid_px_mask = np.zeros(clm_1.shape, np.uint8) + valid_px_mask[tot == 0] = 1 + # resize valid_px to output res: + shape = (int(valid_px_mask.shape[0] * - image.yRes / res), + int(valid_px_mask.shape[1] * image.xRes / res)) + valid_px_mask = skit_resize(valid_px_mask, shape, order=0, preserve_range=True).astype(np.uint8) + # Applied no data mask: + valid_px_mask[nodata == 0] = 0 + + # This is the way to close dataset + dataset = None + + return MaskImage(image, valid_px_mask, mask_filename, res) + + def _create_masks_from_l1c(self, mask_filename, res) -> ImageMasks: + """Create validity mask and nodata pixel mask from L1C image. + Use gml or jp2 cloud mask to get valid pixels + Args: + mask_filename (str): file path of the output valid pixel mask + res (int): output mask resolution + + Returns: + ImageMasks: masks container for future writing + """ + # Nodata Mask + nodata_ref_band = 'B01' + band_path = self._input_product.bands[nodata_ref_band] + log.info('Generating nodata mask from band %s', nodata_ref_band) + log.debug('Read band file: %s', band_path) + image = S2L_ImageFile(band_path) + # we do not use NO_DATA_MASK_FILE_NAME + nodata_mask_filename = os.path.join(os.path.dirname(mask_filename), + f'nodata_pixel_mask_{nodata_ref_band}.tif') + nodata_mask = self._create_nodata_mask_from_l1c(image, nodata_mask_filename, res) + + validity_mask = None + if self._input_product.cloudmask: + # Cloud mask + ext = os.path.splitext(self._input_product.cloudmask)[1] + if ext == '.gml': + validity_mask = self._create_valid_mask_form_l1c_gml(mask_filename, nodata_mask, res) + + elif ext == '.jp2': + validity_mask = self._create_valid_mask_form_l1c_jp2(mask_filename, image, nodata_mask.mask_array, res) + + if not validity_mask: + # consider all valid + validity_mask = MaskImage(None, np.ones(image.array, np.uint8), mask_filename, res) + + return ImageMasks(nodata_mask, validity_mask) + + def _get_valid_pixel_mask(self, mask_filename: str) -> ImageMasks: + """Create validity mask and nodata pixel mask. + nodata pixel mask name is nodata_pixel_mask.tif in the same folder of the valid pixel mask + + Args: + mask_filename (str): valid pixel mask file path + + Returns: + ImageMasks: masks container for future writing + """ + res = 20 + image_masks = None + log.debug('get valid pixel mask') + if self._input_product.scene_classif_band: + image_masks = self._create_masks_from_scl(mask_filename, res) + # L1C case for instance -> No SCL, but NODATA and CLD mask + else: + log.debug('L1C Case') + image_masks = self._create_masks_from_l1c(mask_filename, res) + + return image_masks + + def get_angle_images(self, out_file: str = None) -> str: + """See 'InputFileExtractor._get_angle_images' + """ + # TODO : maybe refactor to : + # not read multiple times mtl_file_name (multiple usage of extract_viewing_angle and extract_sun_angle) + # - change root_dir in working dir it out_file is None + if out_file is not None: + root_dir = os.path.dirname(out_file) + else: + root_dir = os.path.dirname(self._input_product.tile_metadata) + + # Viewing Angles (SAT_AZ / SAT_ZENITH) + dst_file = os.path.join(root_dir, 'VAA.tif') + out_file_list = s2_angles.extract_viewing_angle(self._input_product.tile_metadata, dst_file, 'Azimuth') + + dst_file = os.path.join(root_dir, 'VZA.tif') + out_file_list.extend(s2_angles.extract_viewing_angle(self._input_product.tile_metadata, dst_file, 'Zenith')) + + # Solar Angles (SUN_AZ, SUN_ZENITH) + dst_file = os.path.join(root_dir, 'SAA.tif') + s2_angles.extract_sun_angle(self._input_product.tile_metadata, dst_file, 'Azimuth') + out_file_list.append(dst_file) + + dst_file = os.path.join(root_dir, 'SZA.tif') + s2_angles.extract_sun_angle(self._input_product.tile_metadata, dst_file, 'Zenith') + out_file_list.append(dst_file) + + out_vrt_file = os.path.join(root_dir, 'tie_points.vrt') + gdal.BuildVRT(out_vrt_file, out_file_list, separate=True) + + if out_file is not None: + out_tif_file = out_file + else: + out_tif_file = os.path.join(root_dir, ANGLE_IMAGE_FILE_NAME) + + gdal.Translate(out_tif_file, out_vrt_file, format="GTiff") + + # TODO: strange, see with the team + # self.angles_file = out_vrt_file + log.info('SAT_AZIMUTH, SAT_ZENITH, SUN_AZIMUTH, SUN_ZENITH') + log.info('UNIT = DEGREES (scale: x100)') + log.info('Angles file: %s', out_tif_file) + return out_tif_file + + +class LandsatFileExtractor(InputFileExtractor): + + def __init__(self, input_product: LandsatMTL): + super().__init__(input_product) + + def _create_masks_from_bqa(self, mask_filename: str) -> ImageMasks: + """Create validity mask and nodata pixel mask from BQA image. + Consider as valid pixels : + - TODO + + Args: + mask_filename (str): validity mask file name + + Returns: + ImageMasks: masks container for future writing + """ + log.info('Generating validity and nodata masks from BQA band') + log.debug('Read cloud mask: %s', self._input_product.bqa_filename) + bqa = S2L_ImageFile(self._input_product.bqa_filename) + bqa_array = bqa.array + + # Process Pixel valid 'pre collection + # Process Land Water Mask 'collection 1 + if self._input_product.collection != 'Pre Collection': + threshold = 2720 # No land sea mask given with Collection products + log.debug(threshold) + else: + threshold = 20480 + + # TODO: Check threshold, 20480 not good for C-2 + if self._input_product.collection_number == '02': + threshold = 21824 + + valid_px_mask = np.zeros(bqa_array.shape, np.uint8) + valid_px_mask[bqa_array <= threshold] = 1 + valid_px_mask[bqa_array == 1] = 0 # Remove background + valid_px_mask[bqa_array > threshold] = 0 + + validity_mask = MaskImage(bqa, valid_px_mask, mask_filename, None) + + # nodata mask (not good when taking it from BQA, getting from B01): + if self._input_product.data_type == 'L2A': + image_filename = self._input_product.surf_image_list[0] + else: + image_filename = self._input_product.dn_image_list[0] + image = S2L_ImageFile(image_filename) + nodata = image.array.clip(0, 1).astype(np.uint8) + + nodata_mask_filename = os.path.join( + os.path.dirname(mask_filename), NO_DATA_MASK_FILE_NAME) + + no_data_mask = MaskImage(image, nodata, nodata_mask_filename, None) + + return ImageMasks(no_data_mask, validity_mask) + + def _create_masks_from_scl(self, mask_filename: str) -> ImageMasks: + """Create validity mask and nodata pixel mask from LSC image. + Consider as valid pixels : + - VEGETATION and NOT_VEGETATED (values 4 et 5) + - UNCLASSIFIED (7) + - SNOW (11) - EXCLUDED + + Args: + mask_filename (str): validity mask file name + + Returns: + ImageMasks: masks container for future writing + """ + log.info('Generating validity and nodata masks from SCL band') + log.debug('Read SCL: %s', self._input_product.scene_classif_band) + scl = S2L_ImageFile(self._input_product.scene_classif_band) + scl_array = scl.array + res = 30 + if scl.xRes != res: + shape = (int(scl_array.shape[0] * - scl.yRes / res), int(scl_array.shape[1] * scl.xRes / res)) + log.debug(shape) + scl_array = skit_resize(scl_array, shape, order=0, preserve_range=True).astype(np.uint8) + + valid_px_mask = np.zeros(scl_array.shape, np.uint8) + # Consider as valid pixels : + # VEGETATION et NOT_VEGETATED (values 4 et 5) + # UNCLASSIFIED (7) + # excluded SNOW (11) - + valid_px_mask[scl_array == 4] = 1 + valid_px_mask[scl_array == 5] = 1 + valid_px_mask[scl_array == 7] = 1 + valid_px_mask[scl_array == 11] = 0 + + validity_mask = MaskImage(scl, valid_px_mask, mask_filename, None) + + # nodata mask + nodata = np.ones(scl_array.shape, np.uint8) + nodata[scl_array == 0] = 0 + + nodata_mask_filename = os.path.join( + os.path.dirname(mask_filename), NO_DATA_MASK_FILE_NAME) + + no_data_mask = MaskImage(scl, nodata, nodata_mask_filename, None) + + return ImageMasks(no_data_mask, validity_mask) + + def _get_valid_pixel_mask(self, mask_filename: str) -> ImageMasks: + """Create validity mask and nodata pixel mask from QA or SCL image. + nodata pixel mask name is nodata_pixel_mask.tif in the same folder of the valid pixel mask. + Depending on collection / processing level, provide the cloud / sea mask. + Args: + mask_filename (str): valid pixel mask file path + + Returns: + ImageMasks: masks container for future writing + """ + + # Open QA Image + image_masks = None + if self._input_product.bqa_filename: + image_masks = self._create_masks_from_bqa(mask_filename) + + elif self._input_product.scl: + image_masks = self._create_masks_from_scl(mask_filename) + + return image_masks + + def get_angle_images(self, out_file: str = None) -> str: + """See 'InputFileExtractor._get_angle_images' + """ + + # downsample factor + downsample_factor = 10 + + if out_file is None: + out_file = os.path.join(self._input_product.product_path, ANGLE_IMAGE_FILE_NAME) + + mtl_info = fmask_config.readMTLFile(self._input_product.mtl_file_name) + image = self._input_product.reflective_band_list[0] + + # downsample image for angle computation + coarse_res_image = downsample_coarse_image(image, os.path.dirname(out_file), downsample_factor) + + img_info = fileinfo.ImageInfo(coarse_res_image) + corners = landsatangles.findImgCorners(coarse_res_image, img_info) + nadir_line = landsatangles.findNadirLine(corners) + extent_sun_angles = landsatangles.sunAnglesForExtent(img_info, mtl_info) + sat_azimuth = landsatangles.satAzLeftRight(nadir_line) + + # do not use fmask function but internal custom function + make_angles_image(coarse_res_image, out_file, nadir_line, extent_sun_angles, sat_azimuth) + + log.info('SAT_AZ , SAT_ZENITH, SUN_AZ, SUN_ZENITH ') + log.info('UNIT = DEGREES (scale: x100) :') + log.info(' %s', out_file) + return out_file + + +class S2MajaFileExtractor(InputFileExtractor): + + def __init__(self, input_product: Sentinel2MajaMTL): + super().__init__(input_product) + + def _create_nodata_mask(self, nodata_mask_file_path: str, mask_band_id: str, resolution_id: str) -> MaskImage: + """Create no data 'MaskImage' + + Args: + nodata_mask_file_path (str): nodata mask file path + mask_band_id (str): nodata mask band identifier + resolution_id (str): edge mask identifier + + Returns: + MaskImage: created nodata mask + """ + log.info('Read validity and nodata masks') + log.debug('Read mask: %s', mask_band_id) + + edge = S2L_ImageFile(os.path.join(self._input_product.product_path, + self._input_product.edge_mask[resolution_id])) + edge_arr = edge.array + + defective = S2L_ImageFile(os.path.join(self._input_product.product_path, + self._input_product.nodata_mask[mask_band_id])) + defective_arr = defective.array + + nodata = np.zeros(edge_arr.shape, np.uint8) + nodata[edge_arr == 1] = 1 + nodata[defective_arr == 1] = 1 + + del edge_arr + del defective_arr + + return MaskImage(edge, nodata, nodata_mask_file_path, None) + + def _create_valid_pixel_mask( + self, mask_filename: str, nodata: np.ndarray, mask_band_id: str, resolution_id: str) -> MaskImage: + """Create the valid pixel 'MaskImage' + + Args: + mask_filename (str): output path of the valid pixel mask + nodata (np.ndarray): nodata mask + mask_band_id (str): saturation mask band identifier + resolution_id (str): cloud mask identifier + + Returns: + MaskImage: the valid pixel mask + """ + cloud = S2L_ImageFile(os.path.join( + self._input_product.product_path, self._input_product.cloud_mask[resolution_id])) + cloud_arr = cloud.array + saturation = S2L_ImageFile(os.path.join( + self._input_product.product_path, self._input_product.saturation_mask[mask_band_id])) + saturation_arr = saturation.array + + valid_px_mask = np.ones(cloud_arr.shape, np.uint8) + valid_px_mask[cloud_arr == 1] = 0 + valid_px_mask[cloud_arr == 2] = 0 + valid_px_mask[cloud_arr == 4] = 0 + valid_px_mask[cloud_arr == 8] = 0 + valid_px_mask[saturation_arr == 1] = 0 + valid_px_mask[nodata == 1] = 0 + + return MaskImage(cloud, valid_px_mask, mask_filename, None) + + def _get_valid_pixel_mask(self, mask_filename: str) -> ImageMasks: + """Create validity mask and nodata pixel mask. + nodata pixel mask name is nodata_pixel_mask.tif in the same folder of the valid pixel mask + + Args: + mask_filename (str): valid pixel mask file path + + Returns: + ImageMasks: generated mask container + """ + res = 20 + resolution_id = self._input_product.resolutions.get(res) + mask_band_id = self._input_product.classif_band.get(res) + + nodata_mask_filename = os.path.join( + os.path.dirname(mask_filename), NO_DATA_MASK_FILE_NAME) + + no_data_mask = self._create_nodata_mask( + nodata_mask_filename, mask_band_id, resolution_id) + + validity_max = self._create_valid_pixel_mask( + mask_filename, no_data_mask.mask_array, mask_band_id, resolution_id) + + return ImageMasks(no_data_mask, validity_max) + + def get_angle_images(self, out_file: str = None) -> str: + """See 'InputFileExtractor._get_angle_images' + """ + # TODO : maybe refactor to : + # - have extract_sun_angle and extract_viewing_angle in this class, + # - change root_dir in working dir it out_file is None + # then refactor these methods to not read multiple times mtl_file_name + if out_file is not None: + root_dir = os.path.dirname(out_file) + else: + root_dir = os.path.dirname(self._input_product.tile_metadata) + + # Viewing Angles (SAT_AZ / SAT_ZENITH) + dst_file = os.path.join(root_dir, 'VAA.tif') + out_file_list = self._input_product.extract_viewing_angle(dst_file, 'Azimuth') + + dst_file = os.path.join(root_dir, 'VZA.tif') + out_file_list.extend(self._input_product.extract_viewing_angle(dst_file, 'Zenith')) + + # Solar Angles (SUN_AZ, SUN_ZENITH) + dst_file = os.path.join(root_dir, 'SAA.tif') + self._input_product.extract_sun_angle(dst_file, 'Azimuth') + out_file_list.append(dst_file) + + dst_file = os.path.join(root_dir, 'SZA.tif') + self._input_product.extract_sun_angle(dst_file, 'Zenith') + out_file_list.append(dst_file) + + out_vrt_file = os.path.join(root_dir, 'tie_points.vrt') + gdal.BuildVRT(out_vrt_file, out_file_list, separate=True) + + if out_file is not None: + out_tif_file = out_file + else: + out_tif_file = os.path.join(root_dir, ANGLE_IMAGE_FILE_NAME) + gdal.Translate(out_tif_file, out_vrt_file, format="GTiff") + + # TODO : strange, see with the team + # self.angles_file = out_vrt_file + log.info('SAT_AZ, SAT_ZENITH, SUN_AZ, SUN_ZENITH') + log.info('UNIT = DEGREES (scale: x100) :') + log.info(' %s', out_tif_file) + return out_tif_file + + +class LandsatMajaFileExtractor(InputFileExtractor): + + def __init__(self, input_product: LandsatMajaMTL): + super().__init__(input_product) + + def _get_valid_pixel_mask(self, mask_filename: str) -> ImageMasks: + """Create validity mask and nodata pixel mask. + nodata pixel mask name is nodata_pixel_mask.tif in the same folder of the valid pixel mask + + Args: + mask_filename (str): valid pixel mask file path + + Returns: + ImageMasks: generated mask container + """ + log.info('Read validity and nodata masks') + + # No data mask + edge = S2L_ImageFile(os.path.join(self._input_product.product_path, self._input_product.edge_mask)) + edge_arr = edge.array + + nodata = np.zeros(edge_arr.shape, np.uint8) + nodata[edge_arr == 1] = 1 + + del edge_arr + + nodata_mask_filename = os.path.join( + os.path.dirname(mask_filename), NO_DATA_MASK_FILE_NAME) + no_data_mask = MaskImage(edge, nodata, nodata_mask_filename, None) + + # Validity mask + cloud = S2L_ImageFile(os.path.join(self._input_product.product_path, self._input_product.cloud_mask)) + cloud_arr = cloud.array + saturation = S2L_ImageFile(os.path.join(self._input_product.product_path, self._input_product.saturation_mask)) + saturation_arr = saturation.array + + valid_px_mask = np.ones(cloud_arr.shape, np.uint8) + valid_px_mask[cloud_arr == 1] = 0 + valid_px_mask[cloud_arr == 2] = 0 + valid_px_mask[cloud_arr == 4] = 0 + valid_px_mask[cloud_arr == 8] = 0 + valid_px_mask[saturation_arr == 1] = 0 + valid_px_mask[nodata == 1] = 0 + + validity_mask = MaskImage(cloud, valid_px_mask, mask_filename, None) + + return ImageMasks(no_data_mask, validity_mask) + + def get_angle_images(self, out_file: str = None) -> str: + """See 'InputFileExtractor._get_angle_images' + """ + + # downsample factor + downsample_factor = 10 + + if out_file is None: + out_file = os.path.join(self._input_product.product_path, ANGLE_IMAGE_FILE_NAME) + + image = self._input_product.reflective_band_list[0] + + # downsample image for angle computation + coarse_res_image = downsample_coarse_image(image, os.path.dirname(out_file), downsample_factor) + + img_info = fileinfo.ImageInfo(coarse_res_image) + corners = landsatangles.findImgCorners(coarse_res_image, img_info) + nadir_line = landsatangles.findNadirLine(corners) + extent_sun_angles = self._sunAnglesForExtent(img_info) + sat_azimuth = landsatangles.satAzLeftRight(nadir_line) + + # do not use fmask function but internal custom function + make_angles_image(coarse_res_image, out_file, nadir_line, extent_sun_angles, sat_azimuth) + + log.info('SAT_AZ , SAT_ZENITH, SUN_AZ, SUN_ZENITH ') + log.info('UNIT = DEGREES (scale: x100) :') + log.info(' %s', out_file) + return out_file + + def _sunAnglesForExtent(self, img_info): + """ + Return array of sun azimuth and zenith for each of the corners of the image + extent. Note that this is the raster extent, not the corners of the swathe. + + The algorithm used here has been copied from the 6S possol() subroutine. The + Fortran code I copied it from was .... up to the usual standard in 6S. So, the + notation is not always clear. + + """ + corner_lat_long = img_info.getCorners(outEPSG=4326) + (ul_long, ul_lat, ur_long, ur_lat, lr_long, lr_lat, ll_long, ll_lat) = corner_lat_long + pts = np.array([ + [ul_long, ul_lat], + [ur_long, ur_lat], + [ll_long, ll_lat], + [lr_long, lr_lat] + ]) + long_deg = pts[:, 0] + lat_deg = pts[:, 1] + + # Date/time in UTC + date_str = self._input_product.observation_date + time_str = self._input_product.scene_center_time.replace('Z', '') + ymd = [int(i) for i in date_str.split('-')] + date_obj = datetime.date(ymd[0], ymd[1], ymd[2]) + julian_day = (date_obj - datetime.date(ymd[0], 1, 1)).days + 1 + julday_year_end = (datetime.date(ymd[0], 12, 31) - datetime.date(ymd[0], 1, 1)).days + 1 + # Julian day as a proportion of the year + jdp = julian_day / julday_year_end + # Hour in UTC + hms = [float(x) for x in time_str.split(':')] + hour_gmt = hms[0] + hms[1] / 60.0 + hms[2] / 3600.0 + + (sun_az, sun_zen) = landsatangles.sunAnglesForPoints(lat_deg, long_deg, hour_gmt, jdp) + + sun_angles = np.vstack((sun_az, sun_zen)).T + return sun_angles + + +extractor_class = { + Sentinel2MTL.__name__: S2FileExtractor, + LandsatMTL.__name__: LandsatFileExtractor, + Sentinel2MajaMTL.__name__: S2MajaFileExtractor, + LandsatMajaMTL.__name__: LandsatMajaFileExtractor, +} diff --git a/sen2like/sen2like/core/file_extractor/landsat_utils.py b/sen2like/sen2like/core/file_extractor/landsat_utils.py new file mode 100644 index 0000000..e3707ab --- /dev/null +++ b/sen2like/sen2like/core/file_extractor/landsat_utils.py @@ -0,0 +1,58 @@ +"""Common function for landsat readers""" +import os +from rios import fileinfo +from fmask import landsatangles +from osgeo import gdal +import numpy as np + + +def downsample_coarse_image(image_path: str, out_dir: str, ds_factor: int) -> str: + """Downsample coarse image in file named tie_points_coarseResImage.tif with factor * 30 + + Args: + image (str): input image path + out_dir (str): image output dir + ds_factor (int): downsample factor + + Returns: + str: output image path + """ + if not os.path.exists(out_dir): + os.makedirs(out_dir) + coarse_res_image = os.path.join(out_dir, 'tie_points_coarseResImage.tif') + gdal.Translate(coarse_res_image, image_path, xRes=30 * ds_factor, yRes=30 * ds_factor) + return coarse_res_image + + +def make_angles_image(template_img, outfile, nadir_line, extent_sun_angles, sat_azimuth): + """ + Make a single output image file of the sun and satellite angles for every + pixel in the template image. + + """ + img_info = fileinfo.ImageInfo(template_img) + + infiles = landsatangles.applier.FilenameAssociations() + outfiles = landsatangles.applier.FilenameAssociations() + otherargs = landsatangles.applier.OtherInputs() + controls = landsatangles.applier.ApplierControls() + + infiles.img = template_img + outfiles.angles = outfile + + ctr_lat = landsatangles.getCtrLatLong(img_info)[0] + otherargs.R = landsatangles.localRadius(ctr_lat) + otherargs.nadirLine = nadir_line + otherargs.xMin = img_info.xMin + otherargs.xMax = img_info.xMax + otherargs.yMin = img_info.yMin + otherargs.yMax = img_info.yMax + otherargs.extentSunAngles = extent_sun_angles + otherargs.satAltitude = 705000 # Landsat nominal altitude in metres + otherargs.satAzimuth = sat_azimuth + otherargs.radianScale = 100 * 180 / np.pi # Store pixel values in degrees and scale factor of 100 + controls.setStatsIgnore(500) + controls.setCalcStats(False) + controls.setOutputDriverName('GTiff') + + landsatangles.applier.apply(landsatangles.makeAngles, infiles, outfiles, otherargs, controls=controls) diff --git a/sen2like/sen2like/core/image_file.py b/sen2like/sen2like/core/image_file.py index 6790ebc..4786f22 100644 --- a/sen2like/sen2like/core/image_file.py +++ b/sen2like/sen2like/core/image_file.py @@ -1,6 +1,5 @@ import logging import os -import shutil import numpy as np from osgeo import gdal, osr @@ -153,46 +152,47 @@ def crop(self, box): dst = None return data - def duplicate(self, filepath, array=None, res=None, origin=None, output_EPSG=None): + def duplicate(self, filepath, array=None, res=None, origin=None, output_EPSG=None) -> 'S2L_ImageFile': # case array is not provided (default) if array is None: array = self._array # init new instance, copy header, # set array and return new instance - newInstance = S2L_ImageFile(filepath, mode='w') - self.copyHeaderTo(newInstance) + new_image = S2L_ImageFile(filepath, mode='w') + self.copyHeaderTo(new_image) if array is not None: - newInstance.xSize = array.shape[1] - newInstance.ySize = array.shape[0] + new_image.xSize = array.shape[1] + new_image.ySize = array.shape[0] if res is not None: - newInstance.xRes = res - newInstance.yRes = -res + new_image.xRes = res + # pylint: disable=invalid-unary-operand-type + new_image.yRes = -res if origin is not None: # origin is a tuple with xMin and yMax (same def as in gdal) - newInstance.xMin = origin[0] - newInstance.yMax = origin[1] - newInstance.xMax = newInstance.xMin + newInstance.xSize * newInstance.xRes - newInstance.yMin = newInstance.yMax + newInstance.ySize * newInstance.yRes + new_image.xMin = origin[0] + new_image.yMax = origin[1] + new_image.xMax = new_image.xMin + new_image.xSize * new_image.xRes + new_image.yMin = new_image.yMax + new_image.ySize * new_image.yRes if output_EPSG is not None: - new_SRS = osr.SpatialReference() - new_SRS.ImportFromEPSG(int(output_EPSG)) - newInstance.projection = new_SRS.ExportToWkt() + new_srs = osr.SpatialReference() + new_srs.ImportFromEPSG(int(output_EPSG)) + new_image.projection = new_srs.ExportToWkt() # data - newInstance._array = array + new_image._array = array # check dimensions - if array.shape[0] != newInstance.ySize or array.shape[1] != newInstance.xSize: + if array.shape[0] != new_image.ySize or array.shape[1] != new_image.xSize: log.error( 'ERROR: Input array dimensions do not fit xSize and ySize defined in the file header to be duplicated') return None - return newInstance + return new_image def write(self, creation_options=None, DCmode=False, filepath=None, nodata_value=None, output_format: str = 'GTIFF', band: str = None, no_data_mask=None): @@ -225,13 +225,13 @@ def write(self, creation_options=None, DCmode=False, filepath=None, nodata_value os.makedirs(self.dirpath) # write with gdal - etype = gdal.GetDataTypeByName(self.array.dtype.name) + e_type = gdal.GetDataTypeByName(self.array.dtype.name) if self.array.dtype.name.endswith('int8'): # work around to GDT_Unknown - etype = 1 + e_type = 1 elif 'float' in self.array.dtype.name and not DCmode: # float to UInt16 - etype = gdal.GDT_UInt16 + e_type = gdal.GDT_UInt16 # Update image attributes self.setFilePath(filepath) @@ -243,30 +243,35 @@ def write(self, creation_options=None, DCmode=False, filepath=None, nodata_value if output_format == 'GTIFF': driver = gdal.GetDriverByName('GTiff') dst_ds = driver.Create(self.filepath, xsize=self.xSize, - ysize=self.ySize, bands=1, eType=etype, options=creation_options) + ysize=self.ySize, bands=1, eType=e_type, options=creation_options) else: driver = gdal.GetDriverByName('MEM') dst_ds = driver.Create('', xsize=self.xSize, - ysize=self.ySize, bands=1, eType=etype) + ysize=self.ySize, bands=1, eType=e_type) dst_ds.SetProjection(self.projection) - geotranform = (self.xMin, self.xRes, 0, self.yMax, 0, self.yRes) - log.debug(geotranform) - dst_ds.SetGeoTransform(geotranform) + geo_transform = (self.xMin, self.xRes, 0, self.yMax, 0, self.yRes) + log.debug(geo_transform) + dst_ds.SetGeoTransform(geo_transform) + if 'float' in self.array.dtype.name and not DCmode: # float to UInt16 with scaling factor of 10000 offset = float(S2L_config.config.get('offset')) gain = float(S2L_config.config.get('gain')) - array_out = ((offset + self.array).clip(min=0) * gain).astype(np.uint16) + + array_out = (self.array.clip(min=0) * gain + offset).astype(np.uint16) + if no_data_mask is not None: - array_out[array_out == nodata_value] += 1 + array_out[array_out == offset] += 1 array_out[no_data_mask == 0] = nodata_value + dst_ds.GetRasterBand(1).WriteArray(array_out) # set GTiff metadata dst_ds.GetRasterBand(1).SetScale(1 / gain) - dst_ds.GetRasterBand(1).SetOffset(offset) + dst_ds.GetRasterBand(1).SetOffset(-offset / gain) else: dst_ds.GetRasterBand(1).WriteArray(self.array) + if nodata_value is not None: dst_ds.GetRasterBand(1).SetNoDataValue(nodata_value) @@ -279,6 +284,7 @@ def write(self, creation_options=None, DCmode=False, filepath=None, nodata_value creation_options["QUALITY"] = 100 creation_options["REVERSIBLE"] = 'YES' creation_options["YCBCR420"] = 'NO' + if self.xRes == 60: creation_options.update({ 'CODEBLOCK_WIDTH': 4, @@ -308,7 +314,9 @@ def write(self, creation_options=None, DCmode=False, filepath=None, nodata_value }) creation_options = list(map(lambda ops: ops[0] + '=' + str(ops[1]), creation_options.items())) + # pylint: disable=unused-variable data_set2 = driver_JPG.CreateCopy(self.filepath, dst_ds, options=creation_options) + # this is the way to close gdal dataset data_set2 = None if output_format == 'COG': @@ -341,13 +349,12 @@ def write(self, creation_options=None, DCmode=False, filepath=None, nodata_value try: data_set2 = driver_Gtiff.CreateCopy(self.filepath, dst_ds, options=creation_options + ['COPY_SRC_OVERVIEWS=YES']) - data_set2 = None + data_set2 = None # noqa: F841 except RuntimeError as err: log.error(err) else: - log.info('Written: {}'.format(self.filepath)) - + log.info('Written: %s', self.filepath) dst_ds.FlushCache() dst_ds = None diff --git a/sen2like/sen2like/core/log.py b/sen2like/sen2like/core/log.py index 48af785..4d2d5f2 100644 --- a/sen2like/sen2like/core/log.py +++ b/sen2like/sen2like/core/log.py @@ -4,21 +4,43 @@ import logging import os +import time - -def configure_loggers(log_path, is_debug, log_filename="sen2like.log", without_date=True): +def configure_loggers( + logger, log_path, is_debug, log_filename="sen2like.log", without_date=True +): """Define the global parameters for the logger output. - :param log_path: The path where to store log files. - :param is_debug: Activate debug logging flag. - :param log_filename: The name of the logfile. - :param without_date: Do no write date in log file. + :param log_path: The path where to store log files. + :param is_debug: Activate debug logging flag. + :param log_filename: The name of the logfile. + :param without_date: Do no write date in log file. """ if not os.path.exists(log_path): os.makedirs(log_path) + + logging.Formatter.converter = time.gmtime + file_handler = logging.FileHandler(os.path.join(log_path, log_filename)) - console_handler = logging.StreamHandler() + date_format = "" if without_date else "%(asctime)s " - logging.basicConfig(level=logging.DEBUG if is_debug else logging.INFO, - format='[%(levelname)-8s] {}- %(module)-20s - %(message)s'.format(date_format), - datefmt="%Y-%m-%d %H:%M:%S", handlers=[file_handler, console_handler]) + + level = logging.DEBUG if is_debug else logging.INFO + + logger.setLevel(level) + + log_format = f"[%(levelname)-8s] {date_format}- %(module)-20s - %(message)s" + formatter = logging.Formatter( + fmt=log_format, + datefmt="%Y-%m-%d %H:%M:%S", + ) + + file_handler.setFormatter(formatter) + + logging.basicConfig( + level=level, + format=log_format, + datefmt="%Y-%m-%d %H:%M:%S", + ) + + logger.addHandler(file_handler) diff --git a/sen2like/sen2like/core/metadata_extraction.py b/sen2like/sen2like/core/metadata_extraction.py index 959b936..06a1fe0 100644 --- a/sen2like/sen2like/core/metadata_extraction.py +++ b/sen2like/sen2like/core/metadata_extraction.py @@ -15,6 +15,8 @@ __status__ = "Production" # "Prototype", "Development", or "Production" description = __component_name__ + " version:" + __version__ + " (" + __status__ + ")" +NOT_FOUND = 'not found' + def compute_earth_solar_distance(doy): return 1 - np.multiply(0.016729, np.cos(0.9856 * (doy - 4) * np.divide(np.pi, 180))) @@ -69,7 +71,7 @@ def reg_exp(mtl_text, stringToSearch): if result: subs = result[0].split('=')[1].replace('"', '').replace(' ', '') else: - subs = 'not found' + subs = NOT_FOUND return subs diff --git a/sen2like/sen2like/core/product_archive/data/l8_s2_coverage.db b/sen2like/sen2like/core/product_archive/data/l8_s2_coverage.db deleted file mode 100644 index 9061003..0000000 Binary files a/sen2like/sen2like/core/product_archive/data/l8_s2_coverage.db and /dev/null differ diff --git a/sen2like/sen2like/core/product_archive/dem_downloader.py b/sen2like/sen2like/core/product_archive/dem_downloader.py index dbf89be..6576ff5 100644 --- a/sen2like/sen2like/core/product_archive/dem_downloader.py +++ b/sen2like/sen2like/core/product_archive/dem_downloader.py @@ -13,7 +13,7 @@ from osgeo import ogr from core.S2L_config import S2L_Config -from product_archive import InputProductArchive +import core.product_archive.tile_db as tile_db logging.basicConfig( level=logging.DEBUG, @@ -106,10 +106,10 @@ def get(self, mgrs_tile, hcs_code='EPSG:32632', in_resolution=90, resolution=60) self.cross_dateline = False if os.path.isfile(self.dem_output): - LOGGER.info('DEM file for tile {}: {}'.format(self.mgrs_tile, self.dem_output)) + LOGGER.info('DEM file for tile %s: %s', self.mgrs_tile, self.dem_output) return self.dem_output - LOGGER.warning('No DEM available for tile {}.'.format(self.mgrs_tile)) + LOGGER.warning('No DEM available for tile %s.', self.mgrs_tile) if not self.configuration.getboolean('download_if_unavailable'): return None @@ -117,27 +117,27 @@ def get(self, mgrs_tile, hcs_code='EPSG:32632', in_resolution=90, resolution=60) return self.process(mgrs_tile) def process(self, mgrs_tile): - LOGGER.info("Trying to download DEM for tile %s" % self.mgrs_tile) + LOGGER.info("Trying to download DEM for tile %s", self.mgrs_tile) self.temp_directory = tempfile.TemporaryDirectory() locations = self.compute_tile_extent() dem_files = self.resolve_dem_filenames(locations) if not self.check_tiles(dem_files): - LOGGER.error("Error while processing tile {}. DEM is invalid.".format(mgrs_tile)) + LOGGER.error("Error while processing tile %s. DEM is invalid.", mgrs_tile) else: dem_file = self.create_dem(dem_files) if dem_file is None: - LOGGER.error("Invalid DEM for tile: {}".format(self.mgrs_tile)) + LOGGER.error("Invalid DEM for tile: %s", self.mgrs_tile) else: - LOGGER.info("DEM file for tile {}: {}".format(self.mgrs_tile, dem_file)) + LOGGER.info("DEM file for tile %s: %s", self.mgrs_tile, dem_file) return dem_file return None def extent(self, utm): - tile_wkt = InputProductArchive.mgrs_to_wkt(self.mgrs_tile, utm=utm) + tile_wkt = tile_db.mgrs_to_wkt(self.mgrs_tile, utm=utm) if tile_wkt is None: - LOGGER.error("Cannot get geometry for tile {}".format(self.mgrs_tile)) + LOGGER.error("Cannot get geometry for tile %s", self.mgrs_tile) self.tile_geometry = ogr.CreateGeometryFromWkt(tile_wkt) return self.tile_geometry.GetEnvelope() @@ -149,7 +149,7 @@ def compute_tile_extent(self): """ locations = None extent = self.extent(False) - LOGGER.debug("Extent: {}".format(extent)) + LOGGER.debug("Extent: %s", extent) if extent: lon_min, lon_max, lat_min, lat_max = extent @@ -222,24 +222,24 @@ def download_tile(self, location: tuple, output_file: str): for dem_url in urls.values(): tmp_file = os.path.join(self.temp_directory.name, os.path.basename(dem_url)) try: - LOGGER.info('Downloading file to {}'.format(tmp_file)) + LOGGER.info('Downloading file to %s', tmp_file) local_dem, _ = urllib.request.urlretrieve(dem_url, tmp_file, reporthook=progress) LOGGER.info('File correctly downloaded') except HTTPError as err: - LOGGER.error('Cannot get file {} : {}'.format(dem_url, err)) + LOGGER.error('Cannot get file %s : %s', dem_url, err) else: output_dir = os.path.dirname(output_file) if local_dem.endswith('.zip'): LOGGER.info('Unzipping file...') with zipfile.ZipFile(local_dem) as zip_file: zip_file.extractall(output_dir) - LOGGER.info('DEM extracted to {}'.format(output_dir)) + LOGGER.info('DEM extracted to %s', output_dir) elif local_dem.endswith('.tar'): LOGGER.info('Untarring file...') with tarfile.open(local_dem) as tar_file: tar_file.extractall(path=output_dir, members=dem_file_from_tar(tar_file)) else: - LOGGER.error("Unknown archive format: %s".format(output_file)) + LOGGER.error("Unknown archive format: %s", output_file) def create_dem(self, dem_files): dem_files = list(dem_files.values()) @@ -264,7 +264,7 @@ def create_dem(self, dem_files): ds = None gdal.SetConfigOption('CENTER_LONG', '0') - LOGGER.debug('DEM mosaic: {}'.format(dem_src)) + LOGGER.debug('DEM mosaic: %s', dem_src) except Exception as e: LOGGER.fatal(e, exc_info=True) LOGGER.fatal('error using gdalwarp') diff --git a/sen2like/sen2like/core/product_archive/product_archive.py b/sen2like/sen2like/core/product_archive/product_archive.py index 75cdff1..ca116e9 100644 --- a/sen2like/sen2like/core/product_archive/product_archive.py +++ b/sen2like/sen2like/core/product_archive/product_archive.py @@ -2,37 +2,25 @@ import json import logging import os -import re -import sqlite3 import time import urllib +from typing import List from collections import defaultdict from urllib.request import urlopen -from osgeo import gdal, ogr +from osgeo import ogr -from core.products import get_product +from core.products import get_s2l_product_class +from core.product_archive import tile_db logger = logging.getLogger("Sen2Like") -select_not_on_180th_meridian = ( - "SELECT *, " - "st_x(st_pointn(ST_ExteriorRing({geo_col}), 1)) as p1, " - "st_x(st_pointn(ST_ExteriorRing({geo_col}), 2)) as p2, " - "st_x(st_pointn(ST_ExteriorRing({geo_col}), 3)) as p3, " - "st_x(st_pointn(ST_ExteriorRing({geo_col}), 4)) as p4, " - "st_x(st_pointn(ST_ExteriorRing({geo_col}), 5)) as p5 " - "FROM {table} " - "WHERE p1 between -100 and 100 " - "OR p1 <= 100 and p2 < 0 and p3 < 0 and p4 < 0 and p5 < 0 " - "OR p1 >= 100 and p2 > 0 and p3 > 0 and p4 > 0 and p5 > 0 " -) - class InputProduct: - def __init__(self, path=None, tile_coverage=None, date=None, reader=None): + def __init__(self, path=None, tile_coverage=None, date=None, s2l_product_class=None): self.path = path - self.reader = reader + # concrete S2L_Product class for S2L product instantiation + self.s2l_product_class = s2l_product_class self.cloud_cover = None self.tile_coverage = tile_coverage self.date = date @@ -43,276 +31,121 @@ def __eq__(self, other): @property def instrument(self): - return self.reader.sensor if self.reader is not None else None + return self.s2l_product_class.sensor if self.s2l_product_class is not None else None @property def short_date(self): return datetime.datetime(self.date.year, self.date.month, self.date.day) if self.date else None -def read_polygon_from_json(json_file): - ds = gdal.OpenEx(json_file) - layer = ds.GetLayer() - feature = layer.GetFeature(0) - if feature is None: - logging.error("No features in json file: {}".format(json_file)) - return None - export = feature.GetGeometryRef().ExportToWkt() - ds = None - return export - - -def database_path(database_name): - return os.path.join(os.path.dirname(__file__), "data", database_name) - - -def is_spatialite_supported(): - if os.environ.get("SPATIALITE_DIR") is None: - logger.warning("SPATIALITE_DIR environment variable not set.") - else: - os.environ["PATH"] = ";".join([os.environ["SPATIALITE_DIR"], os.environ["PATH"]]) - with sqlite3.connect(":memory:") as conn: - conn.enable_load_extension(True) - try: - conn.load_extension("mod_spatialite") - except sqlite3.OperationalError: - return False - return True - - -def select_on_attache_db(databases, request, parameters=[]): - """ Attache all database on one memory database and execute request on them - :param databases: dict of {'data base name use in request': 'path to database'} - :param request: the sql_request - :param parameters: sqlite3 request parameters - """ - with sqlite3.connect(":memory:") as conn: - conn.enable_load_extension(True) - conn.load_extension("mod_spatialite") - cur = conn.cursor() - for name, filepath in databases.items(): - attache = f'ATTACH DATABASE "{filepath}" AS "{name}"' - cur.execute(attache) - conn.commit() - cur = conn.execute(request, parameters) - res = cur.fetchall() - return res - - class InputProductArchive: - + """Input product archive to retrieve products from there they are stored + """ def __init__(self, configuration, roi=None): self.configuration = configuration self.roi = roi def construct_url(self, mission, tile=None, start_date=None, end_date=None, path=None, row=None, cloud_cover=None): + # WARN : load variable to having them in the local scope when using **locals() + base_url_landsat = self.configuration.get('base_url_landsat') + base_url_s2 = self.configuration.get('base_url_s2') base_url = self.configuration.get('base_url') + # TODO : specific to s2, see how to avoid it here + s2_processing_level = self.configuration.get('s2_processing_level') + # Special formatting for date if start_date: start_date = start_date.strftime("%Y-%m-%dT%H:%M:%SZ") + if end_date: end_date = end_date.strftime("%Y-%m-%dT23:59:59") + parameter = self.configuration.get(f'url_parameters_pattern_{mission}') if parameter is None: parameter = self.configuration.get('url_parameters_pattern') - s2_processing_level = self.configuration.get('s2_processing_level') - # Get location parameter depending on mission + + # Get location parameter depending on mission, then format it with available local variable location = self.configuration.get(f'location_{mission}', "").format(**locals()) if cloud_cover is None: cloud_cover = self.configuration.get('cloud_cover', 10) + + # fill url with variable in local scope url = parameter.format(**locals()) logging.debug(url) return url @staticmethod - def mgrs_to_wrs(mgrs_tile, coverage=None): - if coverage is None: - logger.warning( - "No minimum coverage defined in configuration, using {:.0%} as default coverage.".format(0.1)) - coverage = 0.1 - else: - logging.debug("Using {:.0%} coverage.".format(coverage)) - # Open db - select_l8tile_not_on_180th_meridian = select_not_on_180th_meridian.format( - geo_col='geometry', table='l8tiles.l8tiles') - select_s2tile_not_on_180th_meridian = select_not_on_180th_meridian.format( - geo_col='geometry', table='s2tiles.s2tiles') - - sql_request = ( - f"SELECT " - f" s2.TILE_ID, " - f" l8.PATH_ROW, " - f" (st_area(st_intersection(l8.geometry, s2.geometry)) / st_area(s2.geometry)) as Coverage " - f"FROM ({select_l8tile_not_on_180th_meridian}) as l8," - f"({select_s2tile_not_on_180th_meridian}) as s2 " - f"WHERE s2.TILE_ID == ? " - f"AND Coverage >= ? " - f"AND Coverage is not NULL " - f"AND cast(SUBSTR(s2.TILE_ID, 1, 2) as INTEGER ) == l8.UTM " - ) - data = select_on_attache_db( - {'l8tiles': database_path('l8tiles.db'), 's2tiles': database_path('s2tiles.db')}, - sql_request, - [mgrs_tile, coverage] - ) - # Sort by coverage - data = sorted(data, key=lambda t: t[2], reverse=True) - result = [([int(i) for i in entry[1].split('_')], entry[2]) for entry in data] - return result + def _download_file(url: str) -> dict: + """Download resource at given url. Expect a json resource - @staticmethod - def wrs_to_mgrs(wrs_path, coverage=None): - if coverage is None: - logger.warning( - "No minimum coverage defined in configuration, using {:.0%} as default coverage.".format(0.1)) - coverage = 0.1 - else: - logging.debug("Using {:.0%} coverage.".format(coverage)) - # Open db - select_l8tile_not_on_180th_meridian = select_not_on_180th_meridian.format( - geo_col='geometry', table='l8tiles.l8tiles') - select_s2tile_not_on_180th_meridian = select_not_on_180th_meridian.format( - geo_col='geometry', table='s2tiles.s2tiles') - - sql_request = ( - f"SELECT " - f" s2.TILE_ID, " - f" l8.PATH_ROW, " - f" (st_area(st_intersection(l8.geometry, s2.geometry)) / st_area(s2.geometry)) as Coverage " - f"FROM ({select_l8tile_not_on_180th_meridian}) as l8," - f"({select_s2tile_not_on_180th_meridian}) as s2 " - f"WHERE l8.PATH_ROW == ? " - f"AND Coverage >= ? " - f"AND Coverage is not NULL " - f"AND cast(SUBSTR(s2.TILE_ID, 1, 2) as INTEGER ) == l8.UTM " - ) - data = select_on_attache_db( - {'l8tiles': database_path('l8tiles.db'), 's2tiles': database_path('s2tiles.db')}, - sql_request, - ["{}_{}".format(*wrs_path), coverage] - ) - # Sort by coverage - data = sorted(data, key=lambda t: t[2], reverse=True) - result = [entry[0] for entry in data] - return result + Args: + url (str): url to fetch - @staticmethod - def get_coverage(wrs_path, mgrs_tile): - # Open db - coverage = 0 - select_l8tile_not_on_180th_meridian = select_not_on_180th_meridian.format( - geo_col='geometry', table='l8tiles.l8tiles') - select_s2tile_not_on_180th_meridian = select_not_on_180th_meridian.format( - geo_col='geometry', table='s2tiles.s2tiles') - - sql_request = ( - f"SELECT " - f" (st_area(st_intersection(l8.geometry, s2.geometry)) / st_area(s2.geometry)) as Coverage " - f"FROM ({select_l8tile_not_on_180th_meridian}) as l8," - f"({select_s2tile_not_on_180th_meridian}) as s2 " - f"WHERE s2.TILE_ID == ? " - f"AND l8.PATH_ROW == ? " - f"AND Coverage is not NULL " - f"AND cast(SUBSTR(s2.TILE_ID, 1, 2) as INTEGER ) == l8.UTM " - ) - data = select_on_attache_db( - {'l8tiles': database_path('l8tiles.db'), 's2tiles': database_path('s2tiles.db')}, - sql_request, - [mgrs_tile, "{}_{}".format(*wrs_path)] - ) - if len(data) > 0: - coverage = data[0][0] - return coverage + Returns: + dict: json response as dict - @staticmethod - def roi_to_tiles(roi): - with sqlite3.connect(database_path("s2tiles.db")) as connection: - logging.debug("ROI: {}".format(roi)) - connection.enable_load_extension(True) - connection.load_extension("mod_spatialite") - sql = f"select TILE_ID from s2tiles where intersects(s2tiles.geometry, GeomFromText('{roi}'))==1" - logging.debug("SQL request: {}".format(sql)) - cur = connection.execute(sql) - # TODO: For now, first mgrs tile is excluded. To improve in a future version - # TODO: Add coverage - tiles = [tile[0] for tile in cur.fetchall() if not tile[0].startswith('01') and not tile[0].startswith('60')] - logging.debug("Tiles: {}".format(tiles)) - return tiles - - @staticmethod - def mgrs_to_wkt(tile, utm=False): - with sqlite3.connect(database_path("s2tiles.db")) as connection: - logging.debug("TILE: {}".format(tile)) - sql = f"select {'UTM_WKT' if utm else 'LL_WKT'} from s2tiles where TILE_ID='{tile}'" - logging.debug("SQL request: {}".format(sql)) - cur = connection.execute(sql) - res = cur.fetchall() - if len(res) > 0: - wkt = res[0][0] - logging.debug("TILE WKT: {}".format(wkt)) - else: - wkt = None - logging.error(f"tile {tile} not found in database") - return wkt - - @staticmethod - def wrs_to_wkt(wrs_id): - with sqlite3.connect(database_path("l8tiles.db")) as connection: - logging.debug("WRS: {}".format(wrs_id)) - sql = f"select LL_WKT from l8tiles where PATH_ROW='{wrs_id}'" - logging.debug("SQL request: {}".format(sql)) - cur = connection.execute(sql) - wkt = cur.fetchall()[0][0] - logging.debug("WRS WKT: {}".format(wkt)) - return wkt - - def download_file(self, url): + Raises: + urllib.error.HTTPError: for 404 + """ try: with urlopen(url, timeout=120) as stream: - logger.debug("http request status: %s" % stream.status) + logger.debug("http request status: %s", stream.status) return json.loads(stream.read().decode()) except (urllib.error.URLError, ValueError) as error: - logger.error("Cannot read %s" % url) + logger.error("Cannot read %s", url) logger.error(error) + # for now only check 404, but could be some other + if isinstance(error, urllib.error.HTTPError) and error.code == 404: + raise return {} - def read_products_from_url(self, url, tile_coverage): - urls = [] + def read_products_from_url(self, url, tile_coverage) -> List[InputProduct]: + input_product_list: List[InputProduct] = [] products = {} - logger.debug("URL: %s" % url) + logger.debug("URL: %s", url) for download_try in range(1, 5): - logger.debug("Trying to download url: try %s/5 " % (download_try)) - products = self.download_file(url) + logger.debug("Trying to download url: try %s/5 ", download_try) + + try: + products = self._download_file(url) + except urllib.error.HTTPError: + break if products: break + time.sleep(5) - else: - logger.error("Cannot download products from url: %s" % url) + + if not products: + logger.error("Cannot download products from url: %s", url) + return input_product_list for product in products.get("features"): - downloaded = InputProduct(tile_coverage=tile_coverage) + input_product = InputProduct(tile_coverage=tile_coverage) _product = product for _property in self.configuration.get("thumbnail_property").split('/'): _product = _product.get(_property, {}) - downloaded.path = _product + input_product.path = _product _cloud_cover = product for _property in self.configuration.get("cloud_cover_property").split('/'): _cloud_cover = _cloud_cover.get(_property, {}) - downloaded.cloud_cover = _cloud_cover + input_product.cloud_cover = _cloud_cover _gml_geometry = product for _property in self.configuration.get("gml_geometry_property").split('/'): _gml_geometry = _gml_geometry.get(_property, {}) - downloaded.gml_geometry = _gml_geometry - if downloaded.path: - urls.append(downloaded) - return urls + input_product.gml_geometry = _gml_geometry + if input_product.path: + input_product_list.append(input_product) + + return input_product_list @staticmethod def is_local(url): for prefix in ["http", "https", "ftp"]: - if url.startswith("{}://".format(prefix)): + + if url.startswith(f"{prefix}://"): return False + return True def get_products_url_from_tile(self, tile, start_date=None, end_date=None): @@ -323,40 +156,42 @@ def get_products_url_from_tile(self, tile, start_date=None, end_date=None): :param end_date: End of the period :return: """ - wrs = self.mgrs_to_wrs(tile, self.configuration.getfloat("coverage")) - logger.debug("{} > {}".format(tile, wrs)) + wrs = tile_db.mgrs_to_wrs(tile, self.configuration.getfloat("coverage")) + logger.debug("%s > %s", tile, wrs) # Build urls for Sentinel2 urls = [(self.construct_url("Sentinel2", tile, start_date=start_date, end_date=end_date), 100)] - # Build urls for Landsat8 for [path, row], tile_coverage in wrs: add_url = True # Check if wrs path actually intersects the ROI if self.roi is not None: - wkt = self.wrs_to_wkt(f"{path}_{row}") + wkt = tile_db.wrs_to_wkt(f"{path}_{row}") path_polygon = ogr.CreateGeometryFromWkt(wkt) roi_polygon = ogr.CreateGeometryFromWkt(self.roi) intersection = path_polygon.Intersection(roi_polygon) + if intersection is None or intersection.Area() == 0: - logger.info("WRS %s_%s does not intersect given ROI. Skip wrs tile." % (path, row)) + logger.info("WRS %s_%s does not intersect given ROI. Skip wrs tile.", path, row) add_url = False + if add_url: for mission in ['Landsat8', 'Landsat9']: parameter = self.configuration.get(f'url_parameters_pattern_{mission}') + if parameter is None: parameter = self.configuration.get(f'location_{mission}') + if parameter is not None: - urls.append(( - self.construct_url(mission, tile, start_date=start_date, end_date=end_date, path=path, row=row), - tile_coverage)) + urls.append((self.construct_url(mission, tile, start_date=start_date, + end_date=end_date, path=path, row=row), tile_coverage)) if not urls: - logger.warning( - "No product found for tile {} during period {} - {}".format(tile, start_date, end_date)) + logger.warning("No product found for tile %s during period %s - %s", tile, start_date, end_date) + return urls def get_products_from_urls(self, urls, start_date=None, end_date=None, product_mode=False, exclude=None, - processing_level=None): + processing_level=None) -> List[InputProduct]: """Get products on tile on the provided time interval. :param processing_level: Add processing level for filtering @@ -367,101 +202,120 @@ def get_products_from_urls(self, urls, start_date=None, end_date=None, product_m :param end_date: End of the period :return: list of selected InputProduct """ - products_urls = [] + input_product_list = [] for index, (url, tile_coverage) in enumerate(urls, 1): - logger.debug('Reading product sources: {:.2%} ({}/{})'.format(index / len(urls), index, len(urls))) + logger.debug('Reading product sources: %.2f (%s/%s)', index / len(urls), index, len(urls)) if self.is_local(url): if os.path.exists(url): if product_mode: - products_urls.append(InputProduct(path=url, tile_coverage=tile_coverage)) + input_product_list.append(InputProduct(path=url, tile_coverage=tile_coverage)) else: - products_urls.extend( + input_product_list.extend( [InputProduct(path=os.path.join(url, _dir), tile_coverage=tile_coverage) for _dir in os.listdir(url)]) else: - logger.warning("Missing product path: %s does not exist" % url) + logger.warning("Missing product path: %s does not exist", url) else: - products_urls.extend(self.read_products_from_url(url, tile_coverage=tile_coverage)) + input_product_list.extend(self.read_products_from_url(url, tile_coverage=tile_coverage)) processing_level_filter = self.configuration.get('s2_processing_level') + if processing_level_filter is None: processing_level_filter = processing_level - products = [] - for product in products_urls: - product.reader = get_product(product.path) - if product.reader is None: + valid_input_product_list = [] + for input_product in input_product_list: + input_product.s2l_product_class = get_s2l_product_class(input_product.path) + + if input_product.s2l_product_class is None: + logger.warning("No S2L Product type found for %s, skip", input_product.path) continue - regexp, date_format = product.reader.date_format(os.path.basename(product.path)) - product.date = product.reader.date(product.path, regexp, date_format) - is_product_valid = self.filter_on_date(product, start_date, end_date) - if product.instrument == 'S2' and processing_level_filter is not None: - is_product_valid &= product.reader.processing_level( - os.path.basename(product.path)) == processing_level_filter + regexp, date_format = input_product.s2l_product_class.date_format(os.path.basename(input_product.path)) + input_product.date = input_product.s2l_product_class.date(input_product.path, regexp, date_format) + is_product_valid = self.filter_on_date(input_product, start_date, end_date) + + if input_product.instrument == 'S2' and processing_level_filter is not None: + is_product_valid &= input_product.s2l_product_class.processing_level( + os.path.basename(input_product.path)) == processing_level_filter + if is_product_valid: - products.append(product) - logger.debug(' + {} {}'.format(product.reader.sensor, os.path.basename(product.path))) + valid_input_product_list.append(input_product) + logger.debug(' + %s %s', input_product.s2l_product_class.sensor, os.path.basename(input_product.path)) # Filter products with exclude list if exclude is not None: excluded_path = [os.path.normpath(p.path) for p in exclude] - filtered_products = [p for p in products if os.path.normpath(p.path) not in excluded_path] - logger.debug("{} products excluded".format(len(products) - len(filtered_products))) - products = filtered_products + filtered_products = [p for p in valid_input_product_list if os.path.normpath(p.path) not in excluded_path] + logger.debug("%s products excluded", len(valid_input_product_list) - len(filtered_products)) + valid_input_product_list = filtered_products - return self.filter_and_sort_products(products) + return self.filter_and_sort_products(valid_input_product_list) @staticmethod def filter_product_composition(products): if products: - reader = get_product(products[0].path) + s2l_product_class = get_s2l_product_class(products[0].path) try: - filtered = reader.best_product([p.path for p in products]) + filtered = s2l_product_class.best_product([p.path for p in products]) return [p for p in products if p.path in filtered] except AttributeError: - logger.debug('{} has no best_product method.'.format(reader.__class__.__name__)) + logger.debug('%s has no best_product method.', s2l_product_class.__class__.__name__) + return products - def filter_on_tile_coverage(self, products): - products_filtered = [] + def filter_on_tile_coverage(self, input_products: List[InputProduct]) -> List[InputProduct]: + input_products_filtered = [] # update tile coverage and refilter - tile_wkt = self.mgrs_to_wkt(self.configuration.get('tile')) + tile_wkt = tile_db.mgrs_to_wkt(self.configuration.get('tile')) if tile_wkt is None: - return products_filtered + return input_products_filtered + tile_polygon = ogr.CreateGeometryFromWkt(tile_wkt) coverage = self.configuration.getfloat('coverage') + if coverage is None: coverage = 0.1 - for product in products: - if product.instrument == 'S2' and product.gml_geometry: - product_polygon = ogr.CreateGeometryFromGML(product.gml_geometry) + + for input_product in input_products: + + if input_product.instrument == 'S2' and input_product.gml_geometry: + product_polygon = ogr.CreateGeometryFromGML(input_product.gml_geometry) + logger.debug( - 'PRODUCT/TILE_COVERAGE: {}/{}'.format(os.path.basename(product.path), product.tile_coverage)) - product.tile_coverage = 100 * product_polygon.Intersection( + 'PRODUCT/TILE_COVERAGE: %s/%s', os.path.basename(input_product.path), + input_product.tile_coverage) + + input_product.tile_coverage = 100 * product_polygon.Intersection( tile_polygon).GetArea() / tile_polygon.GetArea() - logger.debug('PRODUCT/TILE_COVERAGE (UPDATED): {}/{}'.format(os.path.basename(product.path), - product.tile_coverage)) - if product.tile_coverage > 100 * coverage: - products_filtered.append(product) + + logger.debug('PRODUCT/TILE_COVERAGE (UPDATED): %s/%s', + os.path.basename(input_product.path), input_product.tile_coverage) + + if input_product.tile_coverage > 100 * coverage: + input_products_filtered.append(input_product) + else: - products_filtered.append(product) - return products_filtered + input_products_filtered.append(input_product) - def filter_and_sort_products(self, products): + return input_products_filtered + + def filter_and_sort_products(self, input_products: List[InputProduct]) -> List[InputProduct]: # update tile coverage and filter - products = self.filter_on_tile_coverage(products) + filtered_input_products = self.filter_on_tile_coverage(input_products) # Group products by dates flipped = defaultdict(lambda: defaultdict(list)) - for product in products: - flipped[product.short_date][product.instrument].append(product) + for input_product in filtered_input_products: + flipped[input_product.short_date][input_product.instrument].append(input_product) results = [] for date, instruments in flipped.items(): for instrument, _products in instruments.items(): + if instrument == 'L8': _products = self.filter_product_composition(_products) + results.append(sorted(_products, key=lambda p: p.tile_coverage if p.tile_coverage is not None else 0, reverse=True)[0]) @@ -471,20 +325,22 @@ def filter_and_sort_products(self, products): return results @staticmethod - def filter_on_date(product, start_date=None, end_date=None): + def filter_on_date(product: InputProduct, start_date=None, end_date=None): if product.date is None: return False - logger.debug("Extracted date for {}: {}".format(product.path, product.date.strftime("%Y/%m/%d"))) + + logger.debug("Extracted date for %s: %s", product.path, product.date.strftime("%Y/%m/%d")) product_is_valid = True if start_date: product_is_valid &= start_date <= product.date + if end_date: end_date = end_date.replace(hour=23, minute=59, second=59) product_is_valid &= product.date <= end_date + if not product_is_valid: - logger.debug( - "Product not contained in {} - {}".format(start_date.strftime("%Y/%m/%d") if start_date else '', - end_date.strftime("%Y/%m/%d") if end_date else '')) + logger.debug("Product not contained in %s - %s", start_date.strftime("%Y/%m/%d") + if start_date else '', end_date.strftime("%Y/%m/%d") if end_date else '') return product_is_valid diff --git a/sen2like/sen2like/core/product_archive/product_selector.py b/sen2like/sen2like/core/product_archive/product_selector.py new file mode 100644 index 0000000..6f6306b --- /dev/null +++ b/sen2like/sen2like/core/product_archive/product_selector.py @@ -0,0 +1,167 @@ +"""_summary_ + +Returns: + _type_: _description_ +""" +import logging + +from argparse import Namespace +from osgeo import gdal + +from core.argparser import DateRange, Mode +from core.product_archive import tile_db +from core.product_archive.product_archive import InputProductArchive +from core.S2L_config import config + + +logger = logging.getLogger('Sen2Like') + + +def _read_polygon_from_json(json_file): + dataset = gdal.OpenEx(json_file) + layer = dataset.GetLayer() + feature = layer.GetFeature(0) + if feature is None: + logging.error("No features in json file: %s", json_file) + return None + export = feature.GetGeometryRef().ExportToWkt() + dataset = None + return export + + +def _get_product(polygon, date_range, tiles): + downloader = InputProductArchive(config, roi=polygon) + products = {tile: [url for url in downloader.get_products_url_from_tile( + tile, date_range.start_date, date_range.end_date)] for tile in tiles} + return products + + +def _geo_get_tiles(spatial_func, geojson_file_path): + + if not tile_db.is_spatialite_supported(): + raise AssertionError("Spatialite support is not available. Cannot determine MGRS tiles from ROI.") + + polygon = _read_polygon_from_json(geojson_file_path) + if polygon is not None: + tiles = spatial_func(polygon) + else: + tiles = [] + + return polygon, tiles + + +def _get_single_tile_mode_products(args, date_range): + tiles = [args.tile] + products = _get_product(None, date_range, tiles) + return products, tiles + + +def _get_multi_tile_mode_products(args, date_range): + polygon, tiles = _geo_get_tiles(tile_db.tiles_intersect_roi, args.roi) + products = _get_product(polygon, date_range, tiles) + return products, tiles + + +# pylint: disable=unused-argument +def _get_product_mode_products(args, date_range): + products = {args.tile: [(args.product, 100)]} + tiles = [args.tile] + return products, tiles + + +def _get_roi_based_mode_products(args, date_range): + polygon, tiles = _geo_get_tiles(tile_db.tiles_contains_roi, args.roi) + if args.tile: + if args.tile in tiles: + tiles = [args.tile] + else: + raise AssertionError(f"{args.tile} is not in founded MGRS tiles : {tiles}") + else: + if len(tiles) != 1: + raise AssertionError( + f"Found more than one MGRS tile containing the ROI without specifying --tile param : {tiles}") + + products = _get_product(polygon, date_range, tiles) + return products, tiles + + +# dict to select function for tile and product selection depending mode +# function MUST have signature def _func_name(args: Namespace, date_range: DateRange) +_product_function = { + Mode.SINGLE_TILE: _get_single_tile_mode_products, + Mode.MULTI_TILE: _get_multi_tile_mode_products, + Mode.PRODUCT: _get_product_mode_products, + Mode.ROI_BASED: _get_roi_based_mode_products, +} + + +def get_products(args: Namespace, date_range: DateRange): + """Retrieve products to process depending the selected mode + + Args: + args (Namespace): parsed program args, contains selected mode + and other useful parameter for product and tile selection + date_range (DateRange): date interval to search product for + + Returns: + dict: product indexed by tile with value list of tuple that are product URL and tile coverage + example : {'31TFJ': [('/data/PRODUCTS/Sentinel2/31TFJ', 100), + ('/data/PRODUCTS/Landsat8/196/30', 0.7600012569702809), + ('/data/PRODUCTS/Landsat9/196/30', 0.7600012569702809)]} + None if no product found + """ + func = _product_function.get(args.operational_mode) + products, tiles = func(args, date_range) + if not products: + logger.error("No product found. Exiting application...") + return None + # Filter on original tiles: + products = {tile: item for (tile, item) in products.items() if tile in tiles} + return products + +# def get_products_old(args: Namespace, date_range: DateRange): +# """Retrieve products to process. + +# Args: +# args (Namespace): parsed program args +# date_range (DateRange): date interval to search product for + +# Returns: +# _type_: tuple of : +# - product as dict indexed by tile with value list of tuple that are product URL and tile coverage +# example : {'31TFJ': [('/data/PRODUCTS/Sentinel2/31TFJ', 100), +# ('/data/PRODUCTS/Landsat8/196/30', 0.7600012569702809), +# ('/data/PRODUCTS/Landsat9/196/30', 0.7600012569702809)]} +# - processing start date if not in *-tile-mode, or None +# - processing end date if not in *-tile-mode, or None +# """ +# # Are we in tile mode ? +# if args.operational_mode in ['single-tile-mode', 'multi-tile-mode']: + +# if args.operational_mode == 'multi-tile-mode': +# if not tile_db.is_spatialite_supported(): +# logger.error("Spatialite support is not available. Cannot determine MGRS tiles from ROI.") +# return +# json_file = args.roi +# polygon = read_polygon_from_json(json_file) +# if polygon is not None: +# tiles = tile_db.tiles_intersect_roi(polygon) +# else: +# tiles = [] +# else: +# polygon = None +# tiles = [args.tile] + +# downloader = InputProductArchive(config, roi=polygon) +# products = {tile: [url for url in downloader.get_products_url_from_tile( +# tile, date_range.start_date, date_range.end_date)] for tile in tiles} +# if not products: +# logger.error("No product found. Exiting application...") +# return +# else: +# products = {args.tile: [(args.product, 100)]} +# tiles = [args.tile] + +# # Filter on original tiles: +# products = {tile: item for (tile, item) in products.items() if tile in tiles} +# return products diff --git a/sen2like/sen2like/core/product_archive/tile_db.py b/sen2like/sen2like/core/product_archive/tile_db.py new file mode 100644 index 0000000..f021f55 --- /dev/null +++ b/sen2like/sen2like/core/product_archive/tile_db.py @@ -0,0 +1,283 @@ +""" +Facility module to request MGRS and WRS tile db +""" +import logging +import os +import sqlite3 +from collections import namedtuple + +logger = logging.getLogger("Sen2Like") + +S2_TILE_DB = 's2tiles.db' +L8_TILE_DB = 'l8tiles.db' + +SELECT_NOT_ON_180TH_MERIDIAN = ( + "SELECT *, " + "st_x(st_pointn(ST_ExteriorRing({geo_col}), 1)) as p1, " + "st_x(st_pointn(ST_ExteriorRing({geo_col}), 2)) as p2, " + "st_x(st_pointn(ST_ExteriorRing({geo_col}), 3)) as p3, " + "st_x(st_pointn(ST_ExteriorRing({geo_col}), 4)) as p4, " + "st_x(st_pointn(ST_ExteriorRing({geo_col}), 5)) as p5 " + "FROM {table} " + "WHERE p1 between -100 and 100 " + "OR p1 <= 100 and p2 < 0 and p3 < 0 and p4 < 0 and p5 < 0 " + "OR p1 >= 100 and p2 > 0 and p3 > 0 and p4 > 0 and p5 > 0 " +) + +# named tuple fo "tile to tile" functions +T2TRequest = namedtuple('T2TRequest', ['coverage', 'sql_request']) + + +def _database_path(database_name): + return os.path.join(os.path.dirname(__file__), "data", database_name) + + +def is_spatialite_supported(): + """Check if spatialite is supported by the execution environment + + Returns: + True if it is, otherwise False + """ + if os.environ.get("SPATIALITE_DIR") is None: + logger.warning("SPATIALITE_DIR environment variable not set.") + else: + os.environ["PATH"] = ";".join([os.environ["SPATIALITE_DIR"], os.environ["PATH"]]) + with sqlite3.connect(":memory:") as conn: + conn.enable_load_extension(True) + try: + conn.load_extension("mod_spatialite") + except sqlite3.OperationalError: + return False + return True + + +def _select_on_attache_db(databases, request, parameters=[]): + """ Attache all database on one memory database and execute request on them + :param databases: dict of {'data base name use in request': 'path to database'} + :param request: the sql_request + :param parameters: sqlite3 request parameters + """ + with sqlite3.connect(":memory:") as conn: + conn.enable_load_extension(True) + conn.load_extension("mod_spatialite") + cur = conn.cursor() + for name, filepath in databases.items(): + attache = f'ATTACH DATABASE "{filepath}" AS "{name}"' + cur.execute(attache) + conn.commit() + cur = conn.execute(request, parameters) + res = cur.fetchall() + return res + + +def _prepare_tile_to_tile_request(coverage: float, tile_column: str) -> T2TRequest: + if coverage is None: + logger.warning( + "No minimum coverage defined in configuration, using {:.0%} as default coverage.".format(0.1)) + coverage = 0.1 + else: + logging.debug("Using {:.0%} coverage.".format(coverage)) + # Open db + select_l8tile_not_on_180th_meridian = SELECT_NOT_ON_180TH_MERIDIAN.format( + geo_col='geometry', table='l8tiles.l8tiles') + select_s2tile_not_on_180th_meridian = SELECT_NOT_ON_180TH_MERIDIAN.format( + geo_col='geometry', table='s2tiles.s2tiles') + + sql_request = ( + f"SELECT " + f" s2.TILE_ID, " + f" l8.PATH_ROW, " + f" (st_area(st_intersection(l8.geometry, s2.geometry)) / st_area(s2.geometry)) as Coverage " + f"FROM ({select_l8tile_not_on_180th_meridian}) as l8," + f"({select_s2tile_not_on_180th_meridian}) as s2 " + f"WHERE {tile_column} == ? " + f"AND Coverage >= ? " + f"AND Coverage is not NULL " + f"AND cast(SUBSTR(s2.TILE_ID, 1, 2) as INTEGER ) == l8.UTM " + ) + + return T2TRequest(coverage, sql_request) + + +def mgrs_to_wrs(mgrs_tile, coverage=None): + """Get WRS tiles that cover a MGRS by at least a coverage percentage + + Args: + mgrs_tile (str): MGRS tile id that should intersect WRS tiles to retrieve + coverage (float): minimum coverage percentage of MGRS tile by WRS tile, default to 0.1 + + Returns: + A tuple of WRS [path,row] and the coverage of the MGRS tile. + Examples : ([45,56],45) + """ + + t2t_request = _prepare_tile_to_tile_request(coverage, "s2.TILE_ID") + + data = _select_on_attache_db( + {'l8tiles': _database_path(L8_TILE_DB), 's2tiles': _database_path(S2_TILE_DB)}, + t2t_request.sql_request, + [mgrs_tile, t2t_request.coverage] + ) + # Sort by coverage + data = sorted(data, key=lambda t: t[2], reverse=True) + result = [([int(i) for i in entry[1].split('_')], entry[2]) for entry in data] + return result + + +def wrs_to_mgrs(wrs_path, coverage=None): + """Get MGRS tiles for which a WRS tile cover at least the MGRS by a coverage percentage + + Args: + wrs_path (str): WRS path row + coverage (float): minimum MGRS percentage coverage by WRS tile + + Returns: + Array of MGRS tile ids sorted by coverage desc + """ + + t2t_request = _prepare_tile_to_tile_request(coverage, "l8.PATH_ROW") + + data = _select_on_attache_db( + {'l8tiles': _database_path(L8_TILE_DB), 's2tiles': _database_path(S2_TILE_DB)}, + t2t_request.sql_request, + ["{}_{}".format(*wrs_path), t2t_request.coverage] + ) + # Sort by coverage + data = sorted(data, key=lambda t: t[2], reverse=True) + result = [entry[0] for entry in data] + return result + + +def get_coverage(wrs_path: tuple, mgrs_tile: str) -> float: + """Get the percentage coverage of an MGRS tile by WRS + + Args: + wrs_path (tuple): tuple of WRS path and row + mgrs_tile (str): MGRS tile id + + Returns: + Percentage of MGRS tile coverage by WRS + + """ + # Open db + coverage = 0 + select_l8tile_not_on_180th_meridian = SELECT_NOT_ON_180TH_MERIDIAN.format( + geo_col='geometry', table='l8tiles.l8tiles') + select_s2tile_not_on_180th_meridian = SELECT_NOT_ON_180TH_MERIDIAN.format( + geo_col='geometry', table='s2tiles.s2tiles') + + sql_request = ( + f"SELECT " + f" (st_area(st_intersection(l8.geometry, s2.geometry)) / st_area(s2.geometry)) as Coverage " + f"FROM ({select_l8tile_not_on_180th_meridian}) as l8," + f"({select_s2tile_not_on_180th_meridian}) as s2 " + f"WHERE s2.TILE_ID == ? " + f"AND l8.PATH_ROW == ? " + f"AND Coverage is not NULL " + f"AND cast(SUBSTR(s2.TILE_ID, 1, 2) as INTEGER ) == l8.UTM " + ) + data = _select_on_attache_db( + {'l8tiles': _database_path(L8_TILE_DB), 's2tiles': _database_path(S2_TILE_DB)}, + sql_request, + # pylint: disable=consider-using-f-string + [mgrs_tile, "{}_{}".format(*wrs_path)] + ) + if len(data) > 0: + coverage = data[0][0] + return coverage + + +def _select_tiles_by_spatial_relationships(relation, roi): + """Retrieve MGRS tiles having the relation with a ROI. + For now, exclude tiles having ids string with 01 or 60 + + Args: + roi (str): the ROI as WKT + + Returns: + list of tile ids + """ + with sqlite3.connect(_database_path("s2tiles.db")) as connection: + logging.debug("ROI: %s", roi) + connection.enable_load_extension(True) + connection.load_extension("mod_spatialite") + sql = f"select TILE_ID from s2tiles where {relation}(s2tiles.geometry, GeomFromText('{roi}'))==1" + logging.debug("SQL request: %s", sql) + cur = connection.execute(sql) + # TODO: For now, first mgrs tile is excluded. To improve in a future version + # TODO: Add coverage + tiles = [tile[0] for tile in cur.fetchall() if not tile[0].startswith('01') and not tile[0].startswith('60')] + logging.debug("Tiles: %s", tiles) + return tiles + + +def tiles_intersect_roi(roi): + """Retrieve MGRS tiles that intersect a ROI. + For now, exclude tiles having ids string with 01 or 60 + + Args: + roi (str): the ROI as WKT + + Returns: + list of tile ids + + """ + return _select_tiles_by_spatial_relationships("intersects", roi) + + +def tiles_contains_roi(roi): + """Retrieve MGRS tiles that completely contained a ROI. + For now, exclude tiles having ids string with 01 or 60 + + Args: + roi (str): the ROI as WKT + + Returns: + list of tile ids + + """ + return _select_tiles_by_spatial_relationships("contains", roi) + + +def mgrs_to_wkt(tile, utm=False): + """Get the MGRS tile geom as WKT in LL or UTM. + + Args: + tile (str): tile id + utm (bool): if coordinates must be UTM or not + + Returns: + tile geom as WKT or None if no tile match + """ + with sqlite3.connect(_database_path("s2tiles.db")) as connection: + logging.debug("TILE: %s", tile) + sql = f"select {'UTM_WKT' if utm else 'LL_WKT'} from s2tiles where TILE_ID='{tile}'" + logging.debug("SQL request: %s", sql) + cur = connection.execute(sql) + res = cur.fetchall() + if len(res) > 0: + wkt = res[0][0] + logging.debug("TILE WKT: %s", wkt) + else: + wkt = None + logging.error("tile %s not found in database", tile) + return wkt + + +def wrs_to_wkt(wrs_id: str): + """Get WRS tile geom as WKT + + Args: + wrs_id (str): name of the WRS tile + + Returns: + tile geom as WKT + """ + with sqlite3.connect(_database_path("l8tiles.db")) as connection: + logging.debug("WRS: %s", wrs_id) + sql = f"select LL_WKT from l8tiles where PATH_ROW='{wrs_id}'" + logging.debug("SQL request: %s", sql) + cur = connection.execute(sql) + wkt = cur.fetchall()[0][0] + logging.debug("WRS WKT: %s", wkt) + return wkt diff --git a/sen2like/sen2like/core/products/__init__.py b/sen2like/sen2like/core/products/__init__.py index f28f61e..b3767d9 100644 --- a/sen2like/sen2like/core/products/__init__.py +++ b/sen2like/sen2like/core/products/__init__.py @@ -19,7 +19,7 @@ def is_product(item): return inspect.isclass(item) and issubclass(item, S2L_Product) and item.__name__ != S2L_Product.__name__ -def get_product_from_sensor_name(sensor): +def get_s2l_product_class_from_sensor_name(sensor): """Get product corresponding to given sensor name. :param sensor: Product sensor name @@ -28,10 +28,11 @@ def get_product_from_sensor_name(sensor): for current_product in PRODUCTS.values(): if getattr(current_product, 'is_final', False) and sensor in getattr(current_product, 'supported_sensors', []): return current_product + return None -def get_product(product_path): - """Get product corresponding to given product. +def get_s2l_product_class(product_path): + """Get S2L_Product children class corresponding to given product. :param product_path: Path of the product file to read :return: @@ -40,17 +41,18 @@ def get_product(product_path): if len(products) == 1: return products[0] if len(products) > 1: - log.error('Multiple products reader compatible with %s' % product_path) + log.error('Multiple products reader compatible with %s', product_path) else: - log.error("No product reader compatible with %s" % product_path) + log.error("No product reader compatible with %s", product_path) + return None -def read_mapping(product_class): +def read_mapping(product_class) -> OrderedDict: """Read bands mapping file for the given class.""" directory = os.path.dirname(inspect.getfile(product_class)) filename = os.path.join(directory, 'bands.csv') if not os.path.exists(filename): - log.error("Invalid mapping filename: %s" % filename) + log.error("Invalid mapping filename: %s", filename) return {} with open(filename, 'rt') as fp: csv_reader = csv.reader(fp) diff --git a/sen2like/sen2like/core/products/hls_product.py b/sen2like/sen2like/core/products/hls_product.py index 9832a27..266c3a9 100644 --- a/sen2like/sen2like/core/products/hls_product.py +++ b/sen2like/sen2like/core/products/hls_product.py @@ -4,7 +4,7 @@ import os from core.image_file import S2L_ImageFile -from core.products import get_product_from_sensor_name, get_product +from core.products import get_s2l_product_class_from_sensor_name from core.products.product import S2L_Product logger = logging.getLogger('Sen2Like') @@ -21,28 +21,28 @@ def __init__(self, path): try: self.type, self.tilecode, self.datestr, self.sensor, self.relative_orbit = self.name.split('_') except ValueError: - logger.info("Cannot parse as old format %s: invalid filename" % self.name) - self.product = None + logger.info("Cannot parse as old format %s: invalid filename", self.name) + self.s2l_product_class = None else: self.acqdate = dt.datetime.strptime(self.datestr, '%Y%m%d') - self.product = get_product_from_sensor_name(self.sensor) - if self.product is None: - logger.warning("Cannot determine Product associated to sensor {}".format(self.sensor)) + self.s2l_product_class = get_s2l_product_class_from_sensor_name(self.sensor) + if self.s2l_product_class is None: + logger.warning("Cannot determine Product associated to sensor %s", self.sensor) - if self.product is None: + if self.s2l_product_class is None: logger.info('Trying to parse S2like structure') try: # S2A_MSIL2F_20170103T104432_N9999_R008_T31TFJ_20170103T104428.SAFE self.sensor, self.type, self.datestr, self.pdgs, self.relative_orbit, self.tilecode, self.filedate = \ os.path.splitext(self.name)[0].split('_') except ValueError: - logger.error("Error while trying to parse %s: invalid filename" % self.name) - self.product = None + logger.error("Error while trying to parse %s: invalid filename", self.name) + self.s2l_product_class = None else: self.acqdate = dt.datetime.strptime(self.datestr, '%Y%m%dT%H%M%S') - self.product = get_product_from_sensor_name(self.sensor) - if self.product is None: - logger.error("Cannot determine Product associated to sensor {}".format(self.sensor)) + self.s2l_product_class = get_s2l_product_class_from_sensor_name(self.sensor) + if self.s2l_product_class is None: + logger.error("Cannot determine Product associated to sensor %s", self.sensor) def get_band_file(self, band, plus=False): # get band @@ -81,7 +81,7 @@ def get_band_filepath(self, band, plus=False): filepath = '' if not len(filename) != 0 else filename[0] if os.path.exists(filepath): return filepath - logger.debug("Product band {} with res {} not found in {}".format(band, int(res), self.path)) + logger.debug("Product band %s with res %s not found in %s", band, int(res), self.path) logger.debug(filepath) return None @@ -100,7 +100,7 @@ def getMask(self): filepath = filename[0] if filename else '' if not os.path.exists(filepath): - logger.warning("Product mask not found at {}".format(filepath)) + logger.warning("Product mask not found at %s", filepath) # Trying to parse with old format filename = '{}_MSK.TIF'.format(self.name) filepath = os.path.join(self.path, filename) @@ -114,4 +114,4 @@ def getMask(self): @property def bands(self): - return self.product.bands + return self.s2l_product_class.bands diff --git a/sen2like/sen2like/core/products/landsat_8/landsat8.py b/sen2like/sen2like/core/products/landsat_8/landsat8.py index 8acac19..79aa12b 100644 --- a/sen2like/sen2like/core/products/landsat_8/landsat8.py +++ b/sen2like/sen2like/core/products/landsat_8/landsat8.py @@ -3,7 +3,7 @@ import re from typing import List -from core.product_archive.product_archive import InputProductArchive +import core.product_archive.tile_db as tile_db from core.products.product import S2L_Product @@ -46,7 +46,7 @@ def date_format(cls, name): def update_site_info(self, tile=None): if tile is None: - tiles = InputProductArchive.wrs_to_mgrs((self.mtl.path, self.mtl.row)) + tiles = tile_db.wrs_to_mgrs((self.mtl.path, self.mtl.row)) self.mtl.mgrs = tiles[0] if len(tiles) else "NO_TILE" else: self.mtl.mgrs = tile diff --git a/sen2like/sen2like/core/products/landsat_8_maja/landsat8_maja.py b/sen2like/sen2like/core/products/landsat_8_maja/landsat8_maja.py index 8d4fa01..8070f83 100644 --- a/sen2like/sen2like/core/products/landsat_8_maja/landsat8_maja.py +++ b/sen2like/sen2like/core/products/landsat_8_maja/landsat8_maja.py @@ -46,4 +46,3 @@ def can_handle(cls, product_name): @property def sensor_name(self): return self.sensor_names[self.sensor] - diff --git a/sen2like/sen2like/core/products/product.py b/sen2like/sen2like/core/products/product.py index 094bb45..516c25e 100644 --- a/sen2like/sen2like/core/products/product.py +++ b/sen2like/sen2like/core/products/product.py @@ -1,5 +1,4 @@ import datetime -import datetime as dt import logging import os import numpy as np @@ -7,12 +6,16 @@ from core import readers from core import S2L_config +from core.file_extractor.file_extractor import MaskInfo, extractor_class from core.image_file import S2L_ImageFile from core.products import read_mapping +from core.readers import BaseReader from core.toa_reflectance import convert_to_reflectance_from_reflectance_cal_product logger = logging.getLogger('Sen2Like') +DATE_WITH_MILLI_FORMAT = "%Y-%m-%dT%H:%M:%S.%fZ" + class ClassPropertyDescriptor(object): @@ -31,8 +34,10 @@ def classproperty(func): return ClassPropertyDescriptor(func) +# FIXME : see to use ABC + -class S2L_Product(object): +class S2L_Product(): _bands = None brdf_coefficients = {} image30m = {} @@ -43,21 +48,27 @@ class S2L_Product(object): def __init__(self, path): # check if product exist if not os.path.isdir(path): - raise IOError("%s is not a valid directory." % path) + raise IOError(f"{path} is not a valid directory.") self.acqdate = None - self.mtl = None + # TODO : rename attribute, could be tricky as "mtl" could be use for + self.mtl: BaseReader = None self.filenames = {} self.path = path # product directory self.name = os.path.basename(path) # product name self.ndvi_filename = None self.fusion_auto_check_threshold_msk_file = None + self.mask_info = None + self.mask_filename = None + self.nodata_mask_filename = None + self.angles_file = None + self.roi_filename = None @staticmethod def date(name, regexp, date_format): match = regexp.match(os.path.basename(name)) if not match or not match.groups(): - logger.error("Cannot extract acquisition date from {}".format(name)) + logger.error("Cannot extract acquisition date from %s", name) return None return datetime.datetime.strptime(match.group(1), date_format) @@ -79,12 +90,13 @@ def reverse_bands_mapping(self): self._reverse_bands_mapping = {v: k for k, v in self.bands_mapping.items()} return self._reverse_bands_mapping - def read_metadata(self, granule_folder='GRANULE'): + def read_metadata(self): # extract metadata - self.mtl = readers.get_reader(self.path) - if self.mtl is None: + reader_class = readers.get_reader(self.path) + if reader_class is None: return - self.mtl = self.mtl(self.path) + # instantiate the reader + self.mtl = reader_class(self.path) try: self.update_site_info(S2L_config.config.get('tile', None)) @@ -98,22 +110,24 @@ def read_metadata(self, granule_folder='GRANULE'): if n < 6: # fill with zeros scene_center_time = self.mtl.scene_center_time.replace('Z', (6 - n) * '0' + 'Z') - self.acqdate = dt.datetime.strptime(self.mtl.observation_date + ' ' + scene_center_time, - "%Y-%m-%d %H:%M:%S.%fZ") + self.acqdate = datetime.datetime.strptime(self.mtl.observation_date + ' ' + scene_center_time, + "%Y-%m-%d %H:%M:%S.%fZ") if 'S2' in self.sensor or self.mtl.data_type in ['Level-2F', 'Level-2H']: # Sentinel 2 - self.dt_sensing_start = dt.datetime.strptime(self.mtl.dt_sensing_start, "%Y-%m-%dT%H:%M:%S.%fZ") - self.ds_sensing_start = dt.datetime.strptime(self.mtl.ds_sensing_start, "%Y-%m-%dT%H:%M:%S.%fZ") + self.dt_sensing_start = datetime.datetime.strptime(self.mtl.dt_sensing_start, DATE_WITH_MILLI_FORMAT) + self.ds_sensing_start = datetime.datetime.strptime(self.mtl.ds_sensing_start, DATE_WITH_MILLI_FORMAT) + + logger.debug("Datatake sensing start: %s", self.dt_sensing_start) + logger.debug("Datastrip sensing start: %s", self.ds_sensing_start) - logger.debug("Datatake sensing start: {}".format(self.dt_sensing_start)) - logger.debug("Datastrip sensing start: {}".format(self.ds_sensing_start)) + # TODO : understand and comment this if '.' in self.mtl.file_date: - self.file_date = dt.datetime.strptime(self.mtl.file_date, "%Y-%m-%dT%H:%M:%S.%fZ") + self.file_date = datetime.datetime.strptime(self.mtl.file_date, DATE_WITH_MILLI_FORMAT) else: - self.file_date = dt.datetime.strptime(self.mtl.file_date, "%Y-%m-%dT%H:%M:%SZ") + self.file_date = datetime.datetime.strptime(self.mtl.file_date, "%Y-%m-%dT%H:%M:%SZ") - logger.debug("Product generation time: {}".format(self.file_date)) - logger.debug("Acquisition Date: {}".format(self.acqdate)) + logger.debug("Product generation time: %s", self.file_date) + logger.debug("Acquisition Date: %s", self.acqdate) def get_band_filepath(self, band): """ @@ -129,14 +143,25 @@ def get_band_filepath(self, band): files = self.band_files(band) if len(files) > 0: return files[-1] - logger.warning("Product for {} band {} not found in {}".format(self.sensor, band, self.path)) + logger.warning("Product for %s band %s not found in %s", self.sensor, band, self.path) return None @classmethod def processing_level(cls, name): + # pylint:disable=unused-argument + # because need name for children classes return None - def get_band_file(self, band): + def get_band_file(self, band: str) -> S2L_ImageFile: + """Get the image band file as S2L_ImageFile. + Also Set the product band file path in filenames[band] + + Args: + band (str): name of the band + + Returns: + S2L_ImageFile: the product band image, None if the band is not found + """ # check if not already known if band not in self.filenames: filepath = self.get_band_filepath(band) @@ -158,7 +183,7 @@ def can_handle(product_path): @classmethod def read_bands_mapping(cls): - logger.debug("Reading bands mapping for %s" % cls) + logger.debug("Reading bands mapping for %s", cls.__name__) cls._bands_mapping = read_mapping(cls) @classmethod @@ -186,8 +211,8 @@ def get_ndvi_image(self, ndvi_filepath): B8A = skit_resize(B8A, B04.shape, order=3, preserve_range=True) # NDVI toujours basé sur les valeurs de reflectance - B04_index = list(self.bands).index(self.reverse_bands_mapping['B04']) - B8A_index = list(self.bands).index(self.reverse_bands_mapping['B8A']) + B04_index = list(self.bands).index(self.reverse_bands_mapping['B04']) + B8A_index = list(self.bands).index(self.reverse_bands_mapping['B8A']) B04 = convert_to_reflectance_from_reflectance_cal_product(self.mtl, B04, self.reverse_bands_mapping['B04']) B8A = convert_to_reflectance_from_reflectance_cal_product(self.mtl, B8A, self.reverse_bands_mapping['B8A']) ndvi_arr = (B8A - B04) / (B04 + B8A) @@ -197,3 +222,58 @@ def get_ndvi_image(self, ndvi_filepath): self.ndvi_filename = ndvi.filepath ndvi.write(DCmode=True, creation_options=['COMPRESS=LZW']) return True + + def get_valid_pixel_mask(self, mask_filename: str, roi_file_path: str) -> bool: + """Get validity and nodata masks from S2L Product. + Masks are generated in the dir of 'mask_filename'. + Mask information are computed for QI report needs + Set in the product: + - 'mask_filename' + - 'nodata_mask_filename' + - 'mask_info' + - 'roi_filename' if roi_file_path + + Args: + mask_filename (str): Validity mask file destination path + roi_file_path (str): Path to roi file to apply to the mask for ROI based mode. + Must be geojson with Polygon. Can be None if no ROI to apply. + + Returns: + bool: if masks are valid, meaning extraction is successful + """ + + if roi_file_path: + if not os.path.isfile(roi_file_path): + raise AssertionError(f"roi_file_path param is not a file: {roi_file_path}") + + self.roi_filename = roi_file_path + + image_masks = extractor_class.get( + self.mtl.__class__.__name__)( + self.mtl).get_valid_pixel_mask( + mask_filename, roi_file_path) + + if not image_masks: + return False + + self.mask_filename = image_masks.validity_mask.mask_filename + self.nodata_mask_filename = image_masks.no_data_mask.mask_filename + + # compute MaskInfo + validity_mask = image_masks.validity_mask.mask_array + no_data_mask = image_masks.no_data_mask.mask_array + + self.mask_info = MaskInfo( + validity_mask.size, + np.count_nonzero(validity_mask), + no_data_mask.size - np.count_nonzero(no_data_mask)) + + return True + + def get_angle_images(self, out_file: str): + """"Extract angle image file from S2L Product from input product. + set 'angles_file' in the product + Args: + out_file (str): file path to extract + """ + self.angles_file = extractor_class.get(self.mtl.__class__.__name__)(self.mtl).get_angle_images(out_file) diff --git a/sen2like/sen2like/core/readers/__init__.py b/sen2like/sen2like/core/readers/__init__.py index 90a2ec4..e64a76c 100644 --- a/sen2like/sen2like/core/readers/__init__.py +++ b/sen2like/sen2like/core/readers/__init__.py @@ -25,9 +25,10 @@ def get_reader(product_path): if len(readers) == 1: return readers[0] if len(readers) > 1: - log.error('Multiple readers compatible with %s' % product_path) + log.error('Multiple readers compatible with %s', product_path) else: - log.error("No reader compatible with %s" % product_path) + log.error("No reader compatible with %s", product_path) + return None # Loads readers diff --git a/sen2like/sen2like/core/readers/landsat.py b/sen2like/sen2like/core/readers/landsat.py index a2676b2..6b1fa8f 100644 --- a/sen2like/sen2like/core/readers/landsat.py +++ b/sen2like/sen2like/core/readers/landsat.py @@ -4,19 +4,10 @@ import logging import os import re -import subprocess -import sys -import numpy as np -from fmask import config, landsatangles -from osgeo import gdal -from rios import fileinfo -from skimage.transform import resize as skit_resize - -from core.image_file import S2L_ImageFile -from core.metadata_extraction import reg_exp, compute_earth_solar_distance, get_in_band_solar_irrandiance_value +from core.metadata_extraction import NOT_FOUND, reg_exp, compute_earth_solar_distance, get_in_band_solar_irrandiance_value from core.readers.reader import BaseReader -from sen2like import BINDIR + log = logging.getLogger('Sen2Like') @@ -71,10 +62,9 @@ def __init__(self, product_path): try: mtl_file_name = md_list_1[0] - self.mask_filename = None self.product_directory_name = os.path.basename(self.product_path) self.mtl_file_name = mtl_file_name - with io.open(mtl_file_name, 'rU') as mtl_file: + with io.open(mtl_file_name, 'r') as mtl_file: mtl_text = mtl_file.read() self.product_name = os.path.basename( os.path.dirname(mtl_file_name)) # PRODUCT_NAME # VDE : linux compatible @@ -88,7 +78,7 @@ def __init__(self, product_path): self.landsat_scene_id = res[0].split('=')[1].replace('"', '').replace(' ', '') else: self.landsat_scene_id = os.path.basename(mtl_file_name).split('_')[0].replace(" ", "") - log.info(' -- Landsat_id : ' + self.landsat_scene_id) + log.info(' -- Landsat_id : %s', self.landsat_scene_id) string_to_search = 'LANDSAT_PRODUCT_ID =.*' self.product_id = reg_exp(mtl_text, string_to_search) @@ -110,13 +100,13 @@ def __init__(self, product_path): self.data_type = reg_exp(mtl_text, string_to_search) string_to_search = 'COLLECTION_CATEGORY =.*' self.collection = reg_exp(mtl_text, string_to_search) - if self.collection == 'not found': + if self.collection == NOT_FOUND: self.collection = 'Pre Collection' # Les produits de niveau SR ne sont pas indiques dans le MTL (USGS) regex = 'L[O, M, T, C]0[1 - 8].*-SC.*' p0 = re.compile(regex) - log.debug('product name: ' + self.product_name) + log.debug('product name: %s', self.product_name) # For match the name of directory and not the path is to be provided # Conflict with radiometric processing - split required # In radiometric processing path with following type : @@ -126,8 +116,6 @@ def __init__(self, product_path): if p0.match(str(rec)): self.data_type = 'L2A' - self.angles_file = None - string_to_search = 'MODEL_FIT_TYPE =.*' # MODEL_FIT_TYPE = "L1T_SINGLESCENE_OPTIMAL" # MODEL_FIT_TYPE = "L1T_MULTISCENE_SUBOPTIMAL" @@ -194,7 +182,7 @@ def __init__(self, product_path): if self.processing_sw == "SLAP_03.04": if self.data_type == "L1T": - log.debug("GCP : ", self.gcp_filename) + log.debug("GCP : %s", self.gcp_filename) if self.gcp_filename != 'NotApplicable-geometricrefinementusingneighbouringscenes': self.model_fit = "L1T_SINGLESCENE_OPTIMAL" else: @@ -221,7 +209,6 @@ def __init__(self, product_path): string_to_search = 'GROUND_CONTROL_POINT_RESIDUALS_KURTOSIS_Y =.*' self.gcp_res_kurt_y = reg_exp(mtl_text, string_to_search) - self.mask_filename = None # INFORMATION ON FILE NAMES : if self.collection_number == '02': string_to_search = 'FILE_NAME_METADATA_ODL =.*' @@ -244,7 +231,7 @@ def __init__(self, product_path): string_to_search = 'SUN_AZIMUTH =.*' self.sun_azimuth_angle = reg_exp(mtl_text, string_to_search) string_to_search = 'SUN_ELEVATION =.*' - self.sun_zenith_angle = 90.0 - np.float(reg_exp(mtl_text, string_to_search)) + self.sun_zenith_angle = 90.0 - float(reg_exp(mtl_text, string_to_search)) string_to_search = 'UTM_ZONE =.*' self.utm_zone = reg_exp(mtl_text, string_to_search) string_to_search = 'MAP_PROJECTION =.*' @@ -267,14 +254,14 @@ def __init__(self, product_path): result = regex.findall(mtl_text) self.radiance_maximum = [] for k in result: - v = np.float(k.split('=')[1].replace(' ', '')) + v = float(k.split('=')[1].replace(' ', '')) self.radiance_maximum.append(v) regex = re.compile('RADIANCE_MINIMUM_.* =.*') result = regex.findall(mtl_text) self.radiance_minimum = [] for k in result: - v = np.float(k.split('=')[1].replace(' ', '')) + v = float(k.split('=')[1].replace(' ', '')) self.radiance_minimum.append(v) self.rad_radio_coefficient_dic = {} @@ -282,17 +269,16 @@ def __init__(self, product_path): result = regex.findall(mtl_text) self.rescaling_gain = [] for cpt, k in enumerate(result): - v = np.float(k.split('=')[1].replace(' ', '')) + v = float(k.split('=')[1].replace(' ', '')) self.rescaling_gain.append(v) band_id = k.split('_')[3].split('=')[0].replace(' ', '') - self.rad_radio_coefficient_dic[str(cpt)] = {"Band_id": str(band_id), - "Gain": v, "Offset": "0"} + self.rad_radio_coefficient_dic[str(cpt)] = {"Band_id": str(band_id), "Gain": v, "Offset": "0"} regex = re.compile('RADIANCE_ADD_BAND_.* =.*') result = regex.findall(mtl_text) self.rescaling_offset = [] for cpt, k in enumerate(result): - v = np.float((k.split('='))[1].replace(' ', '')) + v = float((k.split('='))[1].replace(' ', '')) band_id = k.split('_')[3].split('=')[0].replace(' ', '') for x in self.rad_radio_coefficient_dic: bd = self.rad_radio_coefficient_dic[x]['Band_id'] @@ -306,9 +292,9 @@ def __init__(self, product_path): result = regex.findall(mtl_text) self.rho_rescaling_gain = [] for cpt, k in enumerate(result): - v = np.float(k.split('=')[1].replace(' ', '')) + v = float(k.split('=')[1].replace(' ', '')) self.rho_rescaling_gain.append(v) - band_id = np.int(k.split('_')[3].split('=')[0].replace(' ', '')) + band_id = int(k.split('_')[3].split('=')[0].replace(' ', '')) if band_id < 10: band_id_st = '0' + str(band_id) else: @@ -320,8 +306,8 @@ def __init__(self, product_path): result = (regex.findall(mtl_text)) self.rho_rescaling_offset = [] for cpt, k in enumerate(result): - v = np.float(k.split('=')[1].replace(' ', '')) - band_id = np.int(k.split('_')[3].split('=')[0].replace(' ', '')) + v = float(k.split('=')[1].replace(' ', '')) + band_id = int(k.split('_')[3].split('=')[0].replace(' ', '')) if band_id < 10: band_id_st = '0' + str(band_id) else: @@ -345,13 +331,19 @@ def __init__(self, product_path): string_to_search = 'FILE_NAME_QUALITY_L1_PIXEL =.*' else: string_to_search = 'FILE_NAME_BAND_QUALITY =.*' - self.bqa_filename = reg_exp(mtl_text, string_to_search) + + self.bqa_filename = None + bqa_filename = reg_exp(mtl_text, string_to_search) + if bqa_filename != NOT_FOUND: + self.bqa_filename = os.path.join(self.product_path, bqa_filename) if self.collection_number == '02': string_to_search = 'ANGLE_COEFFICIENT_FILE_NAME =.*' else: string_to_search = 'FILE_NAME_ANGLE_COEFFICIENT =.*' - self.ang_filename = reg_exp(mtl_text, string_to_search) + ang_filename = reg_exp(mtl_text, string_to_search) + if ang_filename != NOT_FOUND: + self.ang_filename = os.path.join(self.product_path, ang_filename) self.scl, self.scene_classif_band = self.get_scl_band() @@ -376,7 +368,7 @@ def __init__(self, product_path): # Assume that for L2A image_file are present self.missing_image_in_list = 'FALSE' - aerosol_file_list = fnmatch.fnmatch(os.listdir(self.product_path), '*sr_aerosol.tif') + aerosol_file_list = fnmatch.filter(os.listdir(self.product_path), '*sr_aerosol.tif') if aerosol_file_list: self.aerosol_band = os.path.join(self.product_path, aerosol_file_list[0]) log.info(' -- Aerosol image found ') @@ -436,170 +428,6 @@ def __init__(self, product_path): self.isValid = False self.mtl_file_name = '' - def get_valid_pixel_mask(self, mask_filename): - """ - Depending on collection / processing level, provide the cloud / sea mask - Set self.mask_filename - """ - - # Open QA Image - if self.bqa_filename != 'not found': - self.bqa_filename = os.path.join(self.product_path, self.bqa_filename) - log.info('Generating validity and nodata masks from BQA band') - log.debug(f'Read cloud mask: {self.bqa_filename}') - bqa = S2L_ImageFile(self.bqa_filename) - bqa_array = bqa.array - - # Process Pixel valid 'pre collection - # Process Land Water Mask 'collection 1 - if self.collection != 'Pre Collection': - th = 2720 # No land sea mask given with Collection products - log.debug(th) - else: - th = 20480 - - #TODO: Check th, 20480 not good for C-2 - if self.collection_number == '02': - th = 21824 - - valid_px_mask = np.zeros(bqa_array.shape, np.uint8) - valid_px_mask[bqa_array <= th] = 1 - valid_px_mask[bqa_array == 1] = 0 # Remove background - valid_px_mask[bqa_array > th] = 0 - - mask = bqa.duplicate(mask_filename, array=valid_px_mask) - mask.write(creation_options=['COMPRESS=LZW'], nodata_value=None) - self.mask_filename = mask_filename - - # nodata mask (not good when taking it from BQA, getting from B01): - mask_filename = os.path.join(os.path.dirname(mask_filename), 'nodata_pixel_mask.tif') - if self.data_type == 'L2A': - image_filename = self.surf_image_list[0] - else: - image_filename = self.dn_image_list[0] - image = S2L_ImageFile(image_filename) - array = image.array.clip(0, 1).astype(np.uint8) - mask = image.duplicate(mask_filename, array=array.astype(np.uint8)) - mask.write(creation_options=['COMPRESS=LZW'], nodata_value=None) - self.nodata_mask_filename = mask_filename - - return True - elif self.scl: - log.info('Generating validity and nodata masks from SCL band') - log.debug(f'Read SCL: {self.scene_classif_band}') - scl = S2L_ImageFile(self.scene_classif_band) - scl_array = scl.array - res = 30 - if scl.xRes != res: - shape = (int(scl_array.shape[0] * - scl.yRes / res), int(scl_array.shape[1] * scl.xRes / res)) - log.debug(shape) - scl_array = skit_resize(scl_array, shape, order=0, preserve_range=True).astype(np.uint8) - - valid_px_mask = np.zeros(scl_array.shape, np.uint8) - # Consider as valid pixels : - # VEGETATION et NOT_VEGETATED (valeurs 4 et 5) - # UNCLASSIFIED (7) - # excluded SNOW (11) - - valid_px_mask[scl_array == 4] = 1 - valid_px_mask[scl_array == 5] = 1 - valid_px_mask[scl_array == 7] = 1 - valid_px_mask[scl_array == 11] = 0 - - mask = scl.duplicate(mask_filename, array=valid_px_mask) - mask.write(creation_options=['COMPRESS=LZW']) - self.mask_filename = mask_filename - - # nodata mask - mask_filename = os.path.join(os.path.dirname(mask_filename), 'nodata_pixel_mask.tif') - nodata = np.ones(scl_array.shape, np.uint8) - nodata[scl_array == 0] = 0 - mask = scl.duplicate(mask_filename, array=nodata) - mask.write(creation_options=['COMPRESS=LZW']) - self.nodata_mask_filename = mask_filename - - return True - return False - - def get_angle_images(self, DST=None): - """ - :param DST: Optional name of the output tif containing all angles images - :return: set self.angles_file - Following band order : SAT_AZ , SAT_ZENITH, SUN_AZ, SUN_ZENITH ') - The unit is RADIANS - """ - - # downsample factor - F = 10 - - if DST is not None: - out_file = DST - else: - out_file = os.path.join(self.product_path, 'tie_points.tif') - - if self.ang_filename != 'not found' and sys.platform == 'linux2': - self.ang_filename = os.path.join(self.product_path, self.ang_filename) - ls8_angles_exe = os.path.join(BINDIR, 'l8_angles', 'l8_angles') - args = [ls8_angles_exe, os.path.abspath(self.ang_filename), 'SATELLITE {} -b 1,2,3,4,5,6,7'.format(F)] - subprocess.check_call(' '.join(args), shell=True, cwd=os.path.dirname(out_file)) - args = [ls8_angles_exe, os.path.abspath(self.ang_filename), 'SOLAR {} -b 1'.format(F)] - subprocess.check_call(' '.join(args), shell=True, cwd=os.path.dirname(out_file)) - - mtlInfo = config.readMTLFile(self.mtl_file_name) - image = self.reflective_band_list[0] - - # downsample image for angle computation - dirname = os.path.dirname(out_file) - if not os.path.exists(dirname): - os.makedirs(dirname) - coarseResImage = os.path.join(dirname, 'tie_points_coarseResImage.tif') - gdal.Translate(coarseResImage, image, xRes=30 * F, yRes=30 * F) - - imgInfo = fileinfo.ImageInfo(coarseResImage) - corners = landsatangles.findImgCorners(coarseResImage, imgInfo) - nadirLine = landsatangles.findNadirLine(corners) - extentSunAngles = landsatangles.sunAnglesForExtent(imgInfo, mtlInfo) - satAzimuth = landsatangles.satAzLeftRight(nadirLine) - # do not use fmask function but internal custom function - self.makeAnglesImage(coarseResImage, out_file, - nadirLine, extentSunAngles, satAzimuth, imgInfo) - log.info('SAT_AZ , SAT_ZENITH, SUN_AZ, SUN_ZENITH ') - log.info('UNIT = DEGREES (scale: x100) :') - log.info(' ' + out_file) - self.angles_file = out_file - - def makeAnglesImage(self, template_img, outfile, nadirLine, extentSunAngles, satAzimuth, imgInfo): - """ - Make a single output image file of the sun and satellite angles for every - pixel in the template image. - - """ - imgInfo = fileinfo.ImageInfo(template_img) - - infiles = landsatangles.applier.FilenameAssociations() - outfiles = landsatangles.applier.FilenameAssociations() - otherargs = landsatangles.applier.OtherInputs() - controls = landsatangles.applier.ApplierControls() - - infiles.img = template_img - outfiles.angles = outfile - - (ctrLat, ctrLong) = landsatangles.getCtrLatLong(imgInfo) - otherargs.R = landsatangles.localRadius(ctrLat) - otherargs.nadirLine = nadirLine - otherargs.xMin = imgInfo.xMin - otherargs.xMax = imgInfo.xMax - otherargs.yMin = imgInfo.yMin - otherargs.yMax = imgInfo.yMax - otherargs.extentSunAngles = extentSunAngles - otherargs.satAltitude = 705000 # Landsat nominal altitude in metres - otherargs.satAzimuth = satAzimuth - otherargs.radianScale = 100 * 180 / np.pi # Store pixel values in degrees and scale factor of 100 - controls.setStatsIgnore(500) - controls.setCalcStats(False) - controls.setOutputDriverName('GTiff') - - landsatangles.applier.apply(landsatangles.makeAngles, infiles, outfiles, otherargs, controls=controls) - def _get_band(self, regex): image_list = [filename for filename in os.listdir(self.product_path) if re.search(regex, filename, re.IGNORECASE)] diff --git a/sen2like/sen2like/core/readers/landsat_maja.py b/sen2like/sen2like/core/readers/landsat_maja.py index 42be722..0cf2535 100644 --- a/sen2like/sen2like/core/readers/landsat_maja.py +++ b/sen2like/sen2like/core/readers/landsat_maja.py @@ -1,4 +1,3 @@ -import datetime import glob import logging import os @@ -7,17 +6,12 @@ from xml import parsers from xml.etree import ElementTree -import numpy import numpy as np from osgeo import ogr import shapely import shapely.geometry import shapely.wkt -from fmask import landsatangles -from osgeo import gdal -from rios import fileinfo -from core.image_file import S2L_ImageFile from core.metadata_extraction import compute_earth_solar_distance, get_in_band_solar_irrandiance_value, from_date_to_doy from core.readers.reader import BaseReader @@ -76,7 +70,7 @@ def __init__(self, product_path): root = ElementTree.parse(mtl_file_name) except parsers.expat.ExpatError as err: self.isValid = False - logging.error("Error during parsing of MTD product file: %s" % mtl_file_name) + logging.error("Error during parsing of MTD product file: %s", mtl_file_name) logging.error(err) sys.exit(-1) @@ -85,7 +79,7 @@ def __init__(self, product_path): self.product_name = os.path.basename(os.path.dirname(mtl_file_name)) # PRODUCT_NAME # VDE : linux compatible self.landsat_scene_id = root.findtext('.//Dataset_Identification/IDENTIFIER') - log.info(' -- Landsat_id : ' + self.landsat_scene_id) + log.info(' -- Landsat_id : %s', self.landsat_scene_id) self.product_id = root.findtext('.//Product_Characteristics/PRODUCT_ID') self.file_date = root.findtext('.//Product_Characteristics/ACQUISITION_DATE') @@ -97,8 +91,6 @@ def __init__(self, product_path): if self.collection == 'not found': self.collection = 'Pre Collection' - self.angles_file = None - self.spacecraft_id = root.findtext('.//Product_Characteristics/PLATFORM') self.mgrs = root.findtext('.//Dataset_Identification/GEOGRAPHICAL_ZONE') self.path = root.findtext('.//Product_Characteristics/ORBIT_NUMBER[@type="Path"]') @@ -110,10 +102,6 @@ def __init__(self, product_path): self.observation_date = observation_date.split('T')[0] self.scene_center_time = observation_date.split('T')[-1] - # Read masks - self.mask_filename = None - self.nodata_mask_filename = None - masks_nodes = root.findall('.//Mask_List/Mask') for mask_node in masks_nodes: if mask_node.findtext('.//Mask_Properties/NATURE') == 'Cloud': @@ -135,7 +123,7 @@ def __init__(self, product_path): self.map_projection = match.group(2) self.utm_zone = match.group(3) else: - log.warning('Cannot read Geographical zone : {}'.format(utm_zone)) + log.warning('Cannot read Geographical zone : %s', utm_zone) self.datum = self.utm_zone = self.map_projection = None bands_files = [] @@ -216,155 +204,6 @@ def __init__(self, product_path): if not os.path.isfile(self.l2a_qi_report_path): self.l2a_qi_report_path = None - def get_valid_pixel_mask(self, mask_filename): - """ - Depending on collection / processing level, provide the cloud / sea mask - Set self.mask_filename - """ - log.info('Read validity and nodata masks') - - # No data mask - edge = S2L_ImageFile(os.path.join(self.product_path, self.edge_mask)) - edge_arr = edge.array - - nodata = np.zeros(edge_arr.shape, np.uint8) - nodata[edge_arr == 1] = 1 - - del edge_arr - - nodata_mask_filename = os.path.join(os.path.dirname(mask_filename), 'nodata_pixel_mask.tif') - mask = edge.duplicate(nodata_mask_filename, array=nodata) - mask.write(creation_options=['COMPRESS=LZW']) - self.nodata_mask_filename = mask_filename - - # Validity mask - cloud = S2L_ImageFile(os.path.join(self.product_path, self.cloud_mask)) - cloud_arr = cloud.array - saturation = S2L_ImageFile(os.path.join(self.product_path, self.saturation_mask)) - saturation_arr = saturation.array - - valid_px_mask = np.ones(cloud_arr.shape, np.uint8) - valid_px_mask[cloud_arr == 1] = 0 - valid_px_mask[cloud_arr == 2] = 0 - valid_px_mask[cloud_arr == 4] = 0 - valid_px_mask[cloud_arr == 8] = 0 - valid_px_mask[saturation_arr == 1] = 0 - valid_px_mask[nodata == 1] = 0 - - mask = cloud.duplicate(mask_filename, array=valid_px_mask) - mask.write(creation_options=['COMPRESS=LZW']) - self.mask_filename = mask_filename - return True - - def get_angle_images(self, out_file=None): - """ - :param DST: Optional name of the output tif containing all angles images - :return: set self.angles_file - Following band order : SAT_AZ , SAT_ZENITH, SUN_AZ, SUN_ZENITH ') - The unit is RADIANS - """ - - # downsample factor - F = 10 - - if out_file is None: - out_file = os.path.join(self.product_path, 'tie_points.tif') - - image = self.reflective_band_list[0] - - # downsample image for angle computation - dirname = os.path.dirname(out_file) - if not os.path.exists(dirname): - os.makedirs(dirname) - coarse_res_image = os.path.join(dirname, 'tie_points_coarseResImage.tif') - gdal.Translate(coarse_res_image, image, xRes=30 * F, yRes=30 * F) - - img_info = fileinfo.ImageInfo(coarse_res_image) - corners = landsatangles.findImgCorners(coarse_res_image, img_info) - nadir_line = landsatangles.findNadirLine(corners) - extent_sun_angles = self.sunAnglesForExtent(img_info) - sat_azimuth = landsatangles.satAzLeftRight(nadir_line) - - # do not use fmask function but internal custom function - self.makeAnglesImage(coarse_res_image, out_file, nadir_line, extent_sun_angles, sat_azimuth, img_info) - - log.info('SAT_AZ , SAT_ZENITH, SUN_AZ, SUN_ZENITH ') - log.info('UNIT = DEGREES (scale: x100) :') - log.info(' ' + out_file) - self.angles_file = out_file - - def makeAnglesImage(self, template_img, outfile, nadirLine, extentSunAngles, satAzimuth, imgInfo): - """ - Make a single output image file of the sun and satellite angles for every - pixel in the template image. - - """ - imgInfo = fileinfo.ImageInfo(template_img) - - infiles = landsatangles.applier.FilenameAssociations() - outfiles = landsatangles.applier.FilenameAssociations() - otherargs = landsatangles.applier.OtherInputs() - controls = landsatangles.applier.ApplierControls() - - infiles.img = template_img - outfiles.angles = outfile - - (ctrLat, ctrLong) = landsatangles.getCtrLatLong(imgInfo) - otherargs.R = landsatangles.localRadius(ctrLat) - otherargs.nadirLine = nadirLine - otherargs.xMin = imgInfo.xMin - otherargs.xMax = imgInfo.xMax - otherargs.yMin = imgInfo.yMin - otherargs.yMax = imgInfo.yMax - otherargs.extentSunAngles = extentSunAngles - otherargs.satAltitude = 705000 # Landsat nominal altitude in metres - otherargs.satAzimuth = satAzimuth - otherargs.radianScale = 100 * 180 / np.pi # Store pixel values in degrees and scale factor of 100 - controls.setStatsIgnore(500) - controls.setCalcStats(False) - controls.setOutputDriverName('GTiff') - - landsatangles.applier.apply(landsatangles.makeAngles, infiles, outfiles, otherargs, controls=controls) - - def sunAnglesForExtent(self, imgInfo): - """ - Return array of sun azimuth and zenith for each of the corners of the image - extent. Note that this is the raster extent, not the corners of the swathe. - - The algorithm used here has been copied from the 6S possol() subroutine. The - Fortran code I copied it from was .... up to the usual standard in 6S. So, the - notation is not always clear. - - """ - cornerLatLong = imgInfo.getCorners(outEPSG=4326) - (ul_long, ul_lat, ur_long, ur_lat, lr_long, lr_lat, ll_long, ll_lat) = cornerLatLong - pts = numpy.array([ - [ul_long, ul_lat], - [ur_long, ur_lat], - [ll_long, ll_lat], - [lr_long, lr_lat] - ]) - longDeg = pts[:, 0] - latDeg = pts[:, 1] - - # Date/time in UTC - dateStr = self.observation_date - timeStr = self.scene_center_time.replace('Z', '') - ymd = [int(i) for i in dateStr.split('-')] - dateObj = datetime.date(ymd[0], ymd[1], ymd[2]) - julianDay = (dateObj - datetime.date(ymd[0], 1, 1)).days + 1 - juldayYearEnd = (datetime.date(ymd[0], 12, 31) - datetime.date(ymd[0], 1, 1)).days + 1 - # Julian day as a proportion of the year - jdp = julianDay / juldayYearEnd - # Hour in UTC - hms = [float(x) for x in timeStr.split(':')] - hourGMT = hms[0] + hms[1] / 60.0 + hms[2] / 3600.0 - - (sunAz, sunZen) = landsatangles.sunAnglesForPoints(latDeg, longDeg, hourGMT, jdp) - - sunAngles = numpy.vstack((sunAz, sunZen)).T - return sunAngles - @staticmethod def can_read(product_name): return os.path.basename(product_name).startswith('LANDSAT8') or \ diff --git a/sen2like/sen2like/core/readers/reader.py b/sen2like/sen2like/core/readers/reader.py index 162e316..0ae0048 100644 --- a/sen2like/sen2like/core/readers/reader.py +++ b/sen2like/sen2like/core/readers/reader.py @@ -1,17 +1,26 @@ +"""Base MTL/MTD reader class + +Returns: + MaskImage: dataclass having enough to write a mask file + ImageMasks: dataclass combination of nodata et validity mask as 'MaskImage' + MaskInfo: dataclass having enough to compute mask QI info + BaseReader: Base class for MTL/MTD reader +""" import logging - -import numpy as np from abc import ABC, abstractmethod +import numpy as np + logger = logging.getLogger("Sen2Like") class BaseReader(ABC): + """Base reader for image metadata extraction""" def __init__(self, product_path): self.product_path = product_path - logger.info("%s Class" % self.__class__.__name__) - logger.info("Product: %s" % self.product_path) + logger.info("%s Class", self.__class__.__name__) + logger.info("Product: %s", self.product_path) self.is_refined = False # Mandatory attributes @@ -21,19 +30,16 @@ def __init__(self, product_path): self.scene_boundary_lon = None self.absolute_orbit = 'N/A' self.sensor = None # Instrument - self.angles_file = None # All angles images self.data_type = None # Product level - self.mask_filename = None # Mask filename self.observation_date = None self.doy = 0 self.sun_zenith_angle = None self.mission = None # Mission name self.cloud_cover = None self.relative_orbit = None - self.nodata_mask_filename = None self.sun_azimuth_angle = None self.mtl_file_name = None - self.file_date = None # Product date + self.file_date = None # Product date self.tile_metadata = None self.scene_center_time = None self.l2a_qi_report_path = None @@ -53,17 +59,3 @@ def can_read(product_name): :return: """ return False - - @abstractmethod - def get_valid_pixel_mask(self, mask_filename): - pass - - @abstractmethod - def get_angle_images(self, DST=None): - """ - :param DST: Optional name of the output tif containing all angles images - :return: set self.angles_file - Following band order : SAT_AZ , SAT_ZENITH, SUN_AZ, SUN_ZENITH ') - The unit is RADIANS - """ - pass diff --git a/sen2like/sen2like/core/readers/sentinel2.py b/sen2like/sen2like/core/readers/sentinel2.py index b159465..2a21981 100644 --- a/sen2like/sen2like/core/readers/sentinel2.py +++ b/sen2like/sen2like/core/readers/sentinel2.py @@ -2,18 +2,13 @@ import logging import os import re -import shutil import sys from xml import parsers as pars from xml.dom import minidom import numpy as np -from osgeo import gdal -from skimage.transform import resize as skit_resize import mgrs -from atmcor import get_s2_angles as s2_angles -from core.image_file import S2L_ImageFile from core.metadata_extraction import from_date_to_doy from core.readers.reader import BaseReader @@ -32,8 +27,7 @@ def node_value(dom, node_name): class Sentinel2MTL(BaseReader): - - # Object for metadata extraction + """Object for S2 product metadata extraction""" def __init__(self, product_path, mtd_file=None): super().__init__(product_path) @@ -77,12 +71,10 @@ def __init__(self, product_path, mtd_file=None): dom = minidom.parse(mtl_file_name) except pars.expat.ExpatError as err: self.isValid = False - logging.error("Error during parsing of MTD product file: %s" % mtl_file_name) - logging.error(err) + log.error("Error during parsing of MTD product file: %s", mtl_file_name) + log.error(err) sys.exit(-1) - self.mask_filename = None - self.nodata_mask_filename = None self.aerosol_band = None self.aerosol_value = None @@ -152,7 +144,7 @@ def __init__(self, product_path, mtd_file=None): else: file_path = os.path.join(self.product_path, 'GRANULE', self.granule_id, 'IMG_DATA', file_path + self.file_extension) - log.debug(f'{band_id} {file_path}') + log.debug('%s %s', band_id, file_path) self.bands[band_id] = file_path # Band name ordered by their integer id in datastrip (base on spectral information) spectral_information = dom.getElementsByTagName('Spectral_Information') @@ -167,24 +159,23 @@ def __init__(self, product_path, mtd_file=None): # Collection not applicable for Landsat self.collection = ' ' - self.radio_coefficient_dic = {} - self.radiometric_offset_dic = None + # RESCALING GAIN And OFFSET : - try: - self.quantification_value = node_value(dom, 'QUANTIFICATION_VALUE') - except IndexError: - self.quantification_value = node_value(dom, 'BOA_QUANTIFICATION_VALUE') - radio_add_offset_list = dom.getElementsByTagName('RADIO_ADD_OFFSET') - if len(radio_add_offset_list) > 0: - log.debug('Radiometric offsets are finded.') - self.radiometric_offset_dic = {} - for _, node in enumerate(radio_add_offset_list): - band_id = node.attributes['band_id'].value - radio_add_offset = node.childNodes[0].data - self.radiometric_offset_dic[int(band_id)] = radio_add_offset + for quantification_node_name in [ + 'QUANTIFICATION_VALUE', 'BOA_QUANTIFICATION_VALUE', 'L2A_BOA_QUANTIFICATION_VALUE']: + try: + self.quantification_value = node_value( + dom, quantification_node_name) + break + except IndexError: + pass + + self._set_radiometric_offset_dic(dom) + self.dE_S = node_value(dom, 'U') nodes = dom.getElementsByTagName('SOLAR_IRRADIANCE') + self.radio_coefficient_dic = {} self.band_sequence = [] for cpt, node in enumerate(nodes): band_id = node.attributes['bandId'] @@ -194,12 +185,13 @@ def __init__(self, product_path, mtd_file=None): "Gain": 0.00001, "Offset": 0.0, "Solar_irradiance": solar_irradiance } - self.band_sequence = [np.int(rec) + 1 for rec in self.band_sequence] + self.band_sequence = [int(rec) + 1 for rec in self.band_sequence] self.rescaling_gain = [0.00001] * len(self.band_sequence) self.rescaling_offset = [0] * len(self.band_sequence) tab = [self.radio_coefficient_dic[x]["Solar_irradiance"] for x in self.radio_coefficient_dic] self.solar_irradiance = [np.double(rec) for rec in tab] + try: self.cloud_cover = node_value(dom, 'Cloud_Coverage_Assessment') except IndexError: @@ -210,10 +202,10 @@ def __init__(self, product_path, mtd_file=None): scene_boundary_lat = [rec for j, rec in enumerate(pos_list) if j % 2 == 0] scene_boundary_lon = [rec for j, rec in enumerate(pos_list) if j % 2 == 1] self.scene_pos_list = pos_list - arr1 = np.asarray(scene_boundary_lat, np.float) + arr1 = np.asarray(scene_boundary_lat, float) arr1_r = np.roll(arr1, -1) # Retour d index - arr2 = np.asarray(scene_boundary_lon, np.float) + arr2 = np.asarray(scene_boundary_lon, float) arr2_r = np.roll(arr2, -1) x = arr1_r - arr1 # Vecteur X - latitude y = arr2_r - arr2 # Vecteur Y - longitude @@ -241,7 +233,7 @@ def __init__(self, product_path, mtd_file=None): except IndexError: # if not md_list: file_path = None - log.error(' -- Warning - no MTL file found') + log.error(' -- Warning - error with MTD', exc_info=1) log.error(' -- Procedure aborted') self.mtl_file_name = '' # Observation date of the GRANULE @@ -268,7 +260,6 @@ def __init__(self, product_path, mtd_file=None): try: self.doy = 0 - self.angles_file = None sun_node = dom.getElementsByTagName('Mean_Sun_Angle')[0] self.sun_zenith_angle = node_value(sun_node, 'ZENITH_ANGLE') self.sun_azimuth_angle = node_value(sun_node, 'AZIMUTH_ANGLE') @@ -287,7 +278,7 @@ def __init__(self, product_path, mtd_file=None): } # TO USE to set the angle files - self.angles_file = None + # self.angles_file = None node = dom.getElementsByTagName('Tile_Geocoding')[0] self.utm = node_value(node, 'HORIZONTAL_CS_NAME') @@ -329,25 +320,22 @@ def __init__(self, product_path, mtd_file=None): maskpath = os.path.join(product_path, 'GRANULE', self.granule_id, 'QI_DATA', maskpath) - log.debug(f'mask path: {maskpath}') - log.debug(f'mask type: {node.getAttribute("type")}') - if node.getAttribute('type') == 'MSK_CLOUDS': - self.cloudmask = maskpath - elif node.getAttribute('type') == 'MSK_CLASSI': + log.debug('mask path: %s', maskpath) + log.debug('mask type: %s', node.getAttribute("type")) + + _type = node.getAttribute('type') + if _type in ['MSK_CLOUDS', 'MSK_CLASSI']: self.cloudmask = maskpath - elif node.getAttribute('type') == 'MSK_NODATA': + elif _type in ['MSK_NODATA', 'MSK_QUALIT']: band = os.path.splitext(maskpath)[0][-3:] self.nodata_mask[band] = maskpath - elif node.getAttribute('type') == 'MSK_QUALIT': - band = os.path.splitext(maskpath)[0][-3:] - self.nodata_mask[band] = maskpath - elif node.getAttribute('type') == 'MSK_DETFOO': + elif _type == 'MSK_DETFOO': band = os.path.splitext(maskpath)[0][-3:] self.detfoo_mask[band] = maskpath - log.debug(f'Cloud Mask: {self.cloudmask}') - log.debug(f'No data mask: {self.nodata_mask}') - log.debug(f'Defective detector: {self.detfoo_mask}') + log.debug('Cloud Mask: %s', self.cloudmask) + log.debug('No data mask: %s', self.nodata_mask) + log.debug('Defective detector: %s', self.detfoo_mask) except IndexError: sys.exit(' TILE MTL Parsing Issue ') else: @@ -365,181 +353,6 @@ def __init__(self, product_path, mtd_file=None): if not os.path.isfile(self.l2a_qi_report_path): self.l2a_qi_report_path = None - def get_valid_pixel_mask(self, mask_filename, res=20): - """ - :param res: - :param mask_filename: - :return: - """ - - log.debug('get valid pixel mask') - if self.scene_classif_band: - log.info('Generating validity and nodata masks from SCL band') - log.debug(f'Read SCL: {self.scene_classif_band}') - scl = S2L_ImageFile(self.scene_classif_band) - scl_array = scl.array - if scl.xRes != res: - shape = (int(scl_array.shape[0] * - scl.yRes / res), int(scl_array.shape[1] * scl.xRes / res)) - log.debug(shape) - scl_array = skit_resize(scl_array, shape, order=0, preserve_range=True).astype(np.uint8) - - valid_px_mask = np.zeros(scl_array.shape, np.uint8) - # Consider as valid pixels : - # VEGETATION and NOT_VEGETATED (valeurs 4 et 5) - # UNCLASSIFIED (7) - # SNOW (11) - EXCLUDED - valid_px_mask[scl_array == 4] = 1 - valid_px_mask[scl_array == 5] = 1 - valid_px_mask[scl_array == 7] = 1 - #valid_px_mask[scl_array == 11] = 1 - - mask = scl.duplicate(mask_filename, array=valid_px_mask, res=res) - mask.write(creation_options=['COMPRESS=LZW']) - self.mask_filename = mask_filename - log.info('Written: {}'.format(mask_filename)) - - # nodata mask - mask_filename = os.path.join(os.path.dirname(mask_filename), 'nodata_pixel_mask.tif') - nodata = np.ones(scl_array.shape, np.uint8) - nodata[scl_array == 0] = 0 - mask = scl.duplicate(mask_filename, array=nodata, res=res) - mask.write(creation_options=['COMPRESS=LZW']) - self.nodata_mask_filename = mask_filename - log.info('Written: {}'.format(mask_filename)) - - return True - - # L1C case for instance -> No SCL, but NODATA and CLD mask - else: - log.debug('L1C Case') - # Nodata Mask - nodata_ref_band = 'B01' - band_path = self.bands[nodata_ref_band] - log.info(f'Generating nodata mask from band {nodata_ref_band}') - log.debug(f'Read band file: {band_path}') - image = S2L_ImageFile(band_path) - array = image.array - nodata_mask_filename = os.path.join(os.path.dirname(mask_filename), - f'nodata_pixel_mask_{nodata_ref_band}.tif') - nodata = np.ones(array.shape, np.uint8) - # shall be 0, but due to compression artefact, threshold increased to 4: - nodata[array <= 4] = 0 - - # resize nodata to output res - shape = (int(nodata.shape[0] * - image.yRes / res), int(nodata.shape[1] * image.xRes / res)) - log.debug(shape) - nodata = skit_resize(nodata, shape, order=0, preserve_range=True).astype(np.uint8) - - # save to image - mask = image.duplicate(nodata_mask_filename, array=nodata, res=res) - mask.write(creation_options=['COMPRESS=LZW'], nodata_value=None) - self.nodata_mask_filename = nodata_mask_filename - - if self.cloudmask: - # Cloud mask - rname, ext = os.path.splitext(self.cloudmask) - if ext == '.gml': - log.info('Generating validity mask from cloud mask') - log.debug(f'Read cloud mask: {self.cloudmask}') - # Check if any cloud feature in gml - dom = minidom.parse(self.cloudmask) - nClouds = len(dom.getElementsByTagName('eop:MaskFeature')) - - # rasterize - # make byte mask 0/1, LZW compression - if nClouds > 0: - outputBounds = [self.ULX, self.LRY, self.LRX, self.ULY] - if not os.path.exists(os.path.dirname(mask_filename)): - os.makedirs(os.path.dirname(mask_filename)) - gdal.Rasterize(mask_filename, self.cloudmask, outputType=gdal.GDT_Byte, - creationOptions=['COMPRESS=LZW'], - burnValues=0, initValues=1, outputBounds=outputBounds, outputSRS=self.epsg, - xRes=res, yRes=res) - - # apply nodata to validity mask - dataset = gdal.Open(mask_filename, gdal.GA_Update) - array = dataset.GetRasterBand(1).ReadAsArray() - array[nodata == 0] = 0 - dataset.GetRasterBand(1).WriteArray(array) - dataset = None - else: - # no cloud mask, copy nodata mask - shutil.copy(self.nodata_mask_filename, mask_filename) - log.info('Written: {}'.format(mask_filename)) - self.mask_filename = mask_filename - - elif ext =='.jp2': - log.info('Generating validity mask from cloud mask, baseline 4.0') - log.debug(f'no data mask: {self.nodata_mask_filename}') - log.debug(f'mask filename: {mask_filename}') - - log.debug(f'Read cloud mask: {self.cloudmask}') - dataset = gdal.Open(self.cloudmask, gdal.GA_ReadOnly) - clm_1 = dataset.GetRasterBand(1).ReadAsArray() - clm_2 = dataset.GetRasterBand(2).ReadAsArray() - clm_3 = dataset.GetRasterBand(3).ReadAsArray() - tot = clm_1 + clm_2 + clm_3 - valid_px_mask = np.zeros(clm_1.shape, np.uint8) - valid_px_mask[tot == 0] = 1 - # resize valid_px to output res: - shape = (int(valid_px_mask.shape[0] * - image.yRes / res), int(valid_px_mask.shape[1] * image.xRes / res)) - valid_px_mask = skit_resize(valid_px_mask, shape, order=0, preserve_range=True).astype(np.uint8) - #Applied no data mask: - valid_px_mask[nodata == 0] = 0 - - # save to image - mask = image.duplicate(mask_filename, array=valid_px_mask, res=res) - mask.write(creation_options=['COMPRESS=LZW'], nodata_value=None) - log.info('Written: {}'.format(mask_filename)) - self.mask_filename = mask_filename - - dataset = None - - return True - - def get_angle_images(self, DST=None): - """ - :param DST: OPptional name of the outptu tif containing all angle images - :return: set self.angles_file - Following band order : SAT_AZ , SAT_ZENITH, SUN_AZ, SUN_ZENITH ') - The unit is DEGREES - """ - if DST is not None: - root_dir = os.path.dirname(DST) - else: - root_dir = os.path.dirname(self.tile_metadata) - - # Viewing Angles (SAT_AZ / SAT_ZENITH) - dst_file = os.path.join(root_dir, 'VAA.tif') - out_file_list = s2_angles.extract_viewing_angle(self.tile_metadata, dst_file, 'Azimuth') - - dst_file = os.path.join(root_dir, 'VZA.tif') - out_file_list.extend(s2_angles.extract_viewing_angle(self.tile_metadata, dst_file, 'Zenith')) - - # Solar Angles (SUN_AZ, SUN_ZENITH) - dst_file = os.path.join(root_dir, 'SAA.tif') - s2_angles.extract_sun_angle(self.tile_metadata, dst_file, 'Azimuth') - out_file_list.append(dst_file) - - dst_file = os.path.join(root_dir, 'SZA.tif') - s2_angles.extract_sun_angle(self.tile_metadata, dst_file, 'Zenith') - out_file_list.append(dst_file) - - out_vrt_file = os.path.join(root_dir, 'tie_points.vrt') - gdal.BuildVRT(out_vrt_file, out_file_list, separate=True) - - if DST is not None: - out_tif_file = DST - else: - out_tif_file = os.path.join(root_dir, 'tie_points.tif') - gdal.Translate(out_tif_file, out_vrt_file, format="GTiff") - - self.angles_file = out_vrt_file - log.info('SAT_AZIMUTH, SAT_ZENITH, SUN_AZIMUTH, SUN_ZENITH') - log.info('UNIT = DEGREES (scale: x100)') - log.info('Angles file: ' + out_tif_file) - self.angles_file = out_tif_file - @staticmethod def can_read(product_name): name = os.path.basename(product_name) @@ -557,3 +370,28 @@ def set_zero_in_band_name(self, band): if len(band) == 2: band = band[0] + '0' + band[1] return band + + def _set_radiometric_offset_dic(self, dom: minidom.Document): + """set radiometric_offset_dic attr with: + - RADIO_ADD_OFFSET is present in dom (L1) + - BOA_ADD_OFFSET_VALUES_LIST is present in dom (L2) + - otherwise to None + + Args: + dom (minidom.Document): document of L1 or L2 S2 MTD + """ + + # try L2 case first, never present in L1 + radio_add_offset_list = dom.getElementsByTagName('BOA_ADD_OFFSET_VALUES_LIST') + if len(radio_add_offset_list) == 0: + # L1 case, never present in L2 + radio_add_offset_list = dom.getElementsByTagName('RADIO_ADD_OFFSET') + + self.radiometric_offset_dic = None + if len(radio_add_offset_list) > 0: + log.debug('Radiometric offsets are finded.') + self.radiometric_offset_dic = {} + for _, node in enumerate(radio_add_offset_list): + band_id = node.attributes['band_id'].value + radio_add_offset = node.childNodes[0].data + self.radiometric_offset_dic[int(band_id)] = radio_add_offset diff --git a/sen2like/sen2like/core/readers/sentinel2_maja.py b/sen2like/sen2like/core/readers/sentinel2_maja.py index 9beb82d..715aa6a 100644 --- a/sen2like/sen2like/core/readers/sentinel2_maja.py +++ b/sen2like/sen2like/core/readers/sentinel2_maja.py @@ -10,13 +10,30 @@ from osgeo import gdal import mgrs -from atmcor.get_s2_angles import reduce_angle_matrix, from_values_list_to_array, get_angles_band_index -from core.image_file import S2L_ImageFile +from atmcor.get_s2_angles import reduce_angle_matrix, get_angles_band_index from core.metadata_extraction import from_date_to_doy from core.readers.reader import BaseReader log = logging.getLogger('Sen2Like') +def from_values_list_to_array(selected_node): + col_step = selected_node.findtext('COL_STEP') + row_step = selected_node.findtext('ROW_STEP') + + values_list = selected_node.find('.//Values_List').findall('.//VALUES') + + # x_size, y_size , size of the matrix + x_size = len(values_list[0].text.split()) + y_size = len(values_list) + + # Create np array of size (x_size, y_size) for values : + arr = np.empty([x_size, y_size], np.float) + for j in range(y_size): + a = np.asarray(values_list[j].text.split(), np.float) + arr[j] = a + + return x_size, y_size, col_step, row_step, arr + class Sentinel2MajaMTL(BaseReader): resolutions = {10: 'R1', 20: 'R2'} @@ -45,12 +62,10 @@ def __init__(self, product_path, mtd_file=None): root = ElementTree.parse(mtl_file_name) except pars.expat.ExpatError as err: self.isValid = False - logging.error("Error during parsing of MTD product file: %s" % mtl_file_name) + logging.error("Error during parsing of MTD product file: %s", mtl_file_name) logging.error(err) sys.exit(-1) - self.mask_filename = None - self.nodata_mask_filename = None self.aerosol_band = None self.aerosol_value = None @@ -82,7 +97,7 @@ def __init__(self, product_path, mtd_file=None): for band_file in bands_files: band_id = band_file.text.split('_')[-1].split('.')[0] file_path = os.path.join(self.product_path, band_file.text) - log.debug('{} {}'.format(band_id, file_path)) + log.debug('%s %s', band_id, file_path) self.bands[band_id] = file_path # Collection not applicable for Landsat @@ -103,7 +118,7 @@ def __init__(self, product_path, mtd_file=None): "Solar_irradiance": solar_irradiance } - # self.band_sequence = [np.int(rec) + 1 for rec in self.band_sequence] + # self.band_sequence = [int(rec) + 1 for rec in self.band_sequence] # self.rescaling_gain = [0.00001] * len(self.band_sequence) # self.rescaling_offset = [0] * len(self.band_sequence) @@ -158,7 +173,6 @@ def __init__(self, product_path, mtd_file=None): try: self.doy = 0 - self.angles_file = None self.sun_zenith_angle = root.findtext('.//Geometric_Informations/Mean_Value_List/Sun_Angles/ZENITH_ANGLE') self.sun_azimuth_angle = root.findtext('.//Geometric_Informations/Mean_Value_List/Sun_Angles/AZIMUTH_ANGLE') @@ -175,7 +189,7 @@ def __init__(self, product_path, mtd_file=None): } # TO USE to set the angle files - self.angles_file = None + # self.angles_file = None geoposition_node = root.find('Geoposition_Informations/Coordinate_Reference_System') self.utm = geoposition_node.findtext('HORIZONTAL_CS_NAME') @@ -220,99 +234,6 @@ def __init__(self, product_path, mtd_file=None): if not os.path.isfile(self.l2a_qi_report_path): self.l2a_qi_report_path = None - def get_valid_pixel_mask(self, mask_filename, res=20): - """ - :param res: - :param mask_filename: - :return: - """ - resolution = self.resolutions.get(res) - mask_band = self.classif_band.get(res) - - log.info('Read validity and nodata masks') - log.debug(f'Read mask: {mask_band}') - - # No data mask - edge = S2L_ImageFile(os.path.join(self.product_path, self.edge_mask[resolution])) - edge_arr = edge.array - defective = S2L_ImageFile(os.path.join(self.product_path, self.nodata_mask[mask_band])) - defective_arr = defective.array - - nodata = np.zeros(edge_arr.shape, np.uint8) - nodata[edge_arr == 1] = 1 - nodata[defective_arr == 1] = 1 - - del edge_arr - del defective_arr - - nodata_mask_filename = os.path.join(os.path.dirname(mask_filename), 'nodata_pixel_mask.tif') - mask = edge.duplicate(nodata_mask_filename, array=nodata) - mask.write(creation_options=['COMPRESS=LZW']) - self.nodata_mask_filename = mask_filename - - # Validity mask - cloud = S2L_ImageFile(os.path.join(self.product_path, self.cloud_mask[resolution])) - cloud_arr = cloud.array - saturation = S2L_ImageFile(os.path.join(self.product_path, self.saturation_mask[mask_band])) - saturation_arr = saturation.array - - valid_px_mask = np.ones(cloud_arr.shape, np.uint8) - valid_px_mask[cloud_arr == 1] = 0 - valid_px_mask[cloud_arr == 2] = 0 - valid_px_mask[cloud_arr == 4] = 0 - valid_px_mask[cloud_arr == 8] = 0 - valid_px_mask[saturation_arr == 1] = 0 - valid_px_mask[nodata == 1] = 0 - - mask = cloud.duplicate(mask_filename, array=valid_px_mask) - mask.write(creation_options=['COMPRESS=LZW']) - self.mask_filename = mask_filename - - return True - - def get_angle_images(self, DST=None): - """ - :param DST: Optional name of the output tif containing all angle images - :return: set self.angles_file - Following band order : SAT_AZ , SAT_ZENITH, SUN_AZ, SUN_ZENITH ') - The unit is DEGREES - """ - if DST is not None: - root_dir = os.path.dirname(DST) - else: - root_dir = os.path.dirname(self.tile_metadata) - - # Viewing Angles (SAT_AZ / SAT_ZENITH) - dst_file = os.path.join(root_dir, 'VAA.tif') - out_file_list = self.extract_viewing_angle(dst_file, 'Azimuth') - - dst_file = os.path.join(root_dir, 'VZA.tif') - out_file_list.extend(self.extract_viewing_angle(dst_file, 'Zenith')) - - # Solar Angles (SUN_AZ, SUN_ZENITH) - dst_file = os.path.join(root_dir, 'SAA.tif') - self.extract_sun_angle(dst_file, 'Azimuth') - out_file_list.append(dst_file) - - dst_file = os.path.join(root_dir, 'SZA.tif') - self.extract_sun_angle(dst_file, 'Zenith') - out_file_list.append(dst_file) - - out_vrt_file = os.path.join(root_dir, 'tie_points.vrt') - gdal.BuildVRT(out_vrt_file, out_file_list, separate=True) - - if DST is not None: - out_tif_file = DST - else: - out_tif_file = os.path.join(root_dir, 'tie_points.tif') - gdal.Translate(out_tif_file, out_vrt_file, format="GTiff") - - self.angles_file = out_vrt_file - log.info('SAT_AZ, SAT_ZENITH, SUN_AZ, SUN_ZENITH') - log.info('UNIT = DEGREES (scale: x100) :') - log.info(' ' + out_tif_file) - self.angles_file = out_tif_file - @staticmethod def can_read(product_name): name = os.path.basename(product_name) @@ -366,18 +287,18 @@ def extract_viewing_angle(self, dst_file, angle_type): arr[arr > 180] -= 360 # Create gdal dataset - x_res = np.int(x_size) - y_res = np.int(y_size) + x_res = int(x_size) + y_res = int(y_size) - x_pixel_size = np.int(col_step) - y_pixel_size = np.int(row_step) + x_pixel_size = int(col_step) + y_pixel_size = int(row_step) dst_file_bd = dst_file.replace('.tif', '_band_' + str(rec) + '.tif') out_list.append(dst_file_bd) - log.debug(' Save in {}'.format(dst_file_bd)) + log.debug(' Save in %s', dst_file_bd) target_ds = gdal.GetDriverByName('GTiff').Create(dst_file_bd, x_res, y_res, 1, gdal.GDT_Int16) target_ds.SetGeoTransform( - (np.int(np.float(ulx)), x_pixel_size, 0, np.int(np.float(uly)), 0, -y_pixel_size)) + (int(float(ulx)), x_pixel_size, 0, int(float(uly)), 0, -y_pixel_size)) band = target_ds.GetRasterBand(1) band.SetNoDataValue(NoData_value) band.SetDescription('Viewing_' + angle_type + '_band_' + str(rec)) # This sets the band name! @@ -386,7 +307,6 @@ def extract_viewing_angle(self, dst_file, angle_type): band = None target_ds = None arr = None - a = None return out_list @@ -411,7 +331,7 @@ def extract_sun_angle(self, dst_file, angle_type): wkt = srs.ExportToWkt() # Load xml file and extract parameter for sun zenith : - node_name = 'Sun_Angles_Grid' # Level-1C / Level-2A ? + # Level-1C / Level-2A ? sun_angle_node = root.find('.//Sun_Angles_Grids') selected_node = sun_angle_node.find(f'.//{angle_type}') @@ -423,15 +343,15 @@ def extract_sun_angle(self, dst_file, angle_type): arr[arr > 180] -= 360 # Create gdal dataset - x_res = np.int(x_size) - y_res = np.int(y_size) + x_res = int(x_size) + y_res = int(y_size) - x_pixel_size = np.int(col_step) - y_pixel_size = np.int(row_step) + x_pixel_size = int(col_step) + y_pixel_size = int(row_step) - log.debug(' Save in {}'.format(dst_file)) + log.debug(' Save in %s', dst_file) target_ds = gdal.GetDriverByName('GTiff').Create(dst_file, x_res, y_res, 1, gdal.GDT_Int16) - target_ds.SetGeoTransform((np.int(np.float(ulx)), x_pixel_size, 0, np.int(np.float(uly)), 0, -y_pixel_size)) + target_ds.SetGeoTransform((int(float(ulx)), x_pixel_size, 0, int(float(uly)), 0, -y_pixel_size)) band = target_ds.GetRasterBand(1) band.SetNoDataValue(NoData_value) band.SetDescription('Solar_' + angle_type) @@ -442,22 +362,3 @@ def get_scene_center_coordinates(self): m = mgrs.MGRS() lat, lon = m.toLatLon(self.mgrs + '5490045100') return lon, lat - - -def from_values_list_to_array(selected_node): - col_step = selected_node.findtext('COL_STEP') - row_step = selected_node.findtext('ROW_STEP') - - values_list = selected_node.find('.//Values_List').findall('.//VALUES') - - # x_size, y_size , size of the matrix - x_size = len(values_list[0].text.split()) - y_size = len(values_list) - - # Create np array of size (x_size, y_size) for values : - arr = np.empty([x_size, y_size], np.float) - for j in range(y_size): - a = np.asarray(values_list[j].text.split(), np.float) - arr[j] = a - - return x_size, y_size, col_step, row_step, arr diff --git a/sen2like/sen2like/core/toa_reflectance.py b/sen2like/sen2like/core/toa_reflectance.py index f95232d..9471dc7 100644 --- a/sen2like/sen2like/core/toa_reflectance.py +++ b/sen2like/sen2like/core/toa_reflectance.py @@ -20,16 +20,16 @@ def convert_to_reflectance_from_reflectance_cal_product(mtl, data_in, band): reflectance_data = None if mtl.sensor == 'OLI' or mtl.sensor == 'OLI_TIRS': # LANDSAT 8 - log.info("Sun Zenith angle : {} deg".format(mtl.sun_zenith_angle)) + log.info("Sun Zenith angle : %s deg", mtl.sun_zenith_angle) sun_elevation_angle = 90. - mtl.sun_zenith_angle - log.info("Sun Elevation angle : {} deg".format(sun_elevation_angle)) + log.info("Sun Elevation angle : %s deg", sun_elevation_angle) gain = offset = None for k, x in list(mtl.radio_coefficient_dic.items()): if 'B' + x['Band_id'] == band: gain = str(x['Gain']) offset = str(x['Offset']) - log.info('Band Id : {} Gain : {} / Offset : {}'.format(x['Band_id'], gain, offset)) + log.info('Band Id : %s Gain : %s / Offset : %s', x['Band_id'], gain, offset) if gain is not None and offset is not None: if 'L2' in mtl.data_type: # Level-2 product surface reflectance is independent from sun_elevation_angle reflectance_data = (np.float32(data_in) * np.float32(gain) + np.float32(offset)) diff --git a/sen2like/sen2like/generate_stac_files.py b/sen2like/sen2like/generate_stac_files.py index 7e11dec..207139f 100644 --- a/sen2like/sen2like/generate_stac_files.py +++ b/sen2like/sen2like/generate_stac_files.py @@ -7,7 +7,7 @@ import rasterio -from core.QI_MTD.stac_interface import STACWriter, S2LSTACCatalog, S2LSTACCatalog_Tile, S2LSTACCatalog_Product +from core.QI_MTD.stac_interface import S2LSTACCatalog, S2LSTACCatalog_Tile, S2LSTACCatalog_Product from core.products.hls_product import S2L_HLS_Product stats = {} @@ -75,12 +75,11 @@ def main(args): parser.add_argument("catalog_dir", help="Path to catalog output directory") parser.add_argument( "catalog_dir_url", - help="The base url call by stac client to get catalog directory " - "(exemple: if calalog url is http://sen2like.com/stac/catalog.json, the base url is http://sen2like.com/stac)") + help="The base url call by stac client to get catalog directory" + "(exemple: if calalog url is http://sen2like.com/stac/catalog.json, the base url is http://sen2like.com/stac)") parser.add_argument("s2l_out", help="The sen2like output directory") parser.add_argument("s2l_out_url", help="The base url to accesse to the sen2like output directory") - # parser.add_argument("--is-tile", help="Indicates if the path is a tile path", action='store_true', dest='is_tile') parser.add_argument("--dry-run", help="Only list products. Do not generate files.", action='store_true') parser.add_argument("--cog", help="Set image assets type to COG", action='store_true') diff --git a/sen2like/sen2like/grids/db_filter_utm.py b/sen2like/sen2like/grids/db_filter_utm.py deleted file mode 100644 index 480e443..0000000 --- a/sen2like/sen2like/grids/db_filter_utm.py +++ /dev/null @@ -1,47 +0,0 @@ -import sqlite3 - -import pandas as pd - -conn1 = sqlite3.connect('s2grid.db') -conn2 = sqlite3.connect('../core/product_archive/data/l8_s2_coverage.db') - -df = pd.read_sql_query('SELECT WRS_ID FROM l8tiles', conn1) -list_wrs = df['WRS_ID'].tolist() - -# parse db -print('parse db') -dataframes = [] -count = 0 -n = len(list_wrs) -for i, wrs_id in enumerate(list_wrs): - print(i, n) - # get utm - df = pd.read_sql_query(f'SELECT UTM FROM l8tiles WHERE WRS_ID=="{wrs_id}"', conn1) - utm = int(df['UTM']) - - # get l8_s2_coverage on this wrs id - df = pd.read_sql_query(f'SELECT * FROM l8_s2_coverage WHERE WRS_ID=="{wrs_id}"', conn2) - - # keep only tile with same utm - df = df[df['TILE_ID'].str.startswith(f'{utm}')] - dataframes.append(df) - """count += 1 - if count == 10: - break""" - -# close input database -conn1.close() -conn2.close() - -# concat, sort, clean -print('concat') -df = pd.concat(dataframes) -df.sort_values(by='TILE_ID', axis=0, inplace=True) -df.drop(columns=['index'], inplace=True) -df.reset_index(inplace=True, drop=True) - -# to sql -print('write new db') -conn = sqlite3.connect('../core/product_archive/data/l8_s2_coverage_new.db') -df.to_sql('l8_s2_coverage', conn) -conn.close() diff --git a/sen2like/sen2like/grids/kml2db.py b/sen2like/sen2like/grids/kml2db.py deleted file mode 100644 index 0a301f1..0000000 --- a/sen2like/sen2like/grids/kml2db.py +++ /dev/null @@ -1,75 +0,0 @@ -import sqlite3 -from collections import OrderedDict - -import pandas as pd -from pykml import parser - - -def readDescription(pm): - """ - read kml description and derive a dictionary - with key/value for each parameters - (expected keys: ['UTM_WKT', 'EPSG', 'TILE_ID', 'LL_WKT', 'MGRS_REF']) - """ - - # read string stream - lines = [] - line = '' - isText = False - for c in des: - if c == '<': - isText = False - if line.strip() != '': - lines.append(line) - line = '' - if isText: - line += c - if c == '>': - isText = True - - # fill dictionary - meta = {} - for i in range(0, len(lines), 2): - meta[lines[i]] = lines[i + 1] - - # return dictionary - return meta - - -# MAIN - -# KML can be downloaded on ESA website: -# https://sentinel.esa.int/documents/247904/1955685/S2A_OPER_GIP_TILPAR_MPC__20151209T095117_V20150622T000000_21000101T000000_B00.kml - -# read kml as string -with open('S2A_OPER_GIP_TILPAR_MPC__20151209T095117_V20150622T000000_21000101T000000_B00.kml') as f: - string = f.read() - -# parse kml string with pykml -root = parser.fromstring(string) -print('nb tiles:', len(root.Document.Folder.Placemark)) - -# create empty dic with keys -dic = OrderedDict() - -# now for each tile -for pm in root.Document.Folder.Placemark[0:]: - # get tileid and description - tileid = pm.name.text - des = pm.description.text - - # get key/values pairs from kml description - meta = readDescription(pm) - for key in ['TILE_ID', 'EPSG', 'UTM_WKT', 'MGRS_REF', 'LL_WKT']: - if key not in list(dic.keys()): - # init list - dic[key] = [] - # add values - dic[key].append(meta[key]) - -df = pd.DataFrame.from_dict(dic) -conn = sqlite3.connect('s2grid.db') -df.to_sql('s2tiles', conn) -conn.close() - -print('OK') diff --git a/sen2like/sen2like/grids/kml2s2tiles.py b/sen2like/sen2like/grids/kml2s2tiles.py index aef9631..fcc0532 100644 --- a/sen2like/sen2like/grids/kml2s2tiles.py +++ b/sen2like/sen2like/grids/kml2s2tiles.py @@ -55,9 +55,6 @@ def main(): # now for each tile for pm in root.Document.Folder.Placemark[0:]: - # get tileid and description - tileid = pm.name.text - # get key/values pairs from kml description meta = readDescription(pm) for key in ['TILE_ID', 'EPSG', 'UTM_WKT', 'MGRS_REF', 'LL_WKT']: @@ -72,8 +69,8 @@ def main(): conn.enable_load_extension(True) conn.load_extension("mod_spatialite") create_req = ( - f"CREATE TABLE s2tiles (" - f"TILE_ID VARCHAR(5), EPSG VARCHAR(5), UTM_WKT VARCHAR, MGRS_REF VARCHAR, LL_WKT VARCHAR, geometry POLYGON); " + "CREATE TABLE s2tiles (" + "TILE_ID VARCHAR(5), EPSG VARCHAR(5), UTM_WKT VARCHAR, MGRS_REF VARCHAR, LL_WKT VARCHAR, geometry POLYGON); " ) conn.execute(create_req) insert_req = ( diff --git a/sen2like/sen2like/grids/mgrs_framing.py b/sen2like/sen2like/grids/mgrs_framing.py index 2f0ecf7..5993c2c 100644 --- a/sen2like/sen2like/grids/mgrs_framing.py +++ b/sen2like/sen2like/grids/mgrs_framing.py @@ -30,7 +30,6 @@ def resample(imagein, res, filepath_out): # get input resolutionresolution input_res = imagein.xRes - dst_in = None # SCIKIT resampling fullRes = imagein.array @@ -60,7 +59,6 @@ def pixel_center(image, tilecode): inSR = osr.SpatialReference() inSR.ImportFromEPSG(int('32' + ('6' if orientation == 'N' else '7') + str(utm))) outSR = osr.SpatialReference(wkt=image.projection) - utm_offset = 0 if not inSR.IsSame(outSR): transformater = osr.CoordinateTransformation(inSR, outSR) northing, easting = transformater.TransformPoint((northing, easting)) @@ -138,8 +136,6 @@ def reframe(image, tilecode, filepath_out, dx=0., dy=0., order=3, dtype=None, ma def reframeMulti(filepath_in, tilecode, filepath_out, dx=0., dy=0., order=3): - from osgeo import gdal - from math import ceil # get roi from mgrs tilecode converter = grids.GridsConverter() diff --git a/sen2like/sen2like/grids/precompute.py b/sen2like/sen2like/grids/precompute.py deleted file mode 100644 index 32e3dea..0000000 --- a/sen2like/sen2like/grids/precompute.py +++ /dev/null @@ -1,81 +0,0 @@ -import sqlite3 -from collections import OrderedDict - -import pandas as pd - -from .grids import GridsConverter - -""" -### REQUIRES: ### -### TO ADD THIS CODE IN THE CLASS GridsConverter (grids.py) ### - - - -from shapely.wkt import loads - - - - def _get_l8tiles(self): - return pd.read_sql_query("SELECT * FROM l8tiles", self.conn) - - def close(self): - self.conn.close() - - def getOverlaps(self, tilecode, minCoverage=0): - #TODO: optimize because it is too slow. (Precompute for all S2 tiles? Use geodatabase like spatialite?) - #TODO: should overlap only on same UTM zone - # get mgrs info for tilecode - mgrsinfo = self._get_roi(tilecode) - wkt1 = mgrsinfo['LL_WKT'].item() - g1 = loads(wkt1) - if not g1.is_valid: - print "Polygon geometry is not valid for tile {}".format(tilecode) - return None - - res = [] - l8tiles = self._get_l8tiles() - for index, row in l8tiles.iterrows(): - wkt2 = row['LL_WKT'] - g2 = loads(wkt2) - if g2.intersects(g1): - if not g2.is_valid: - print "Polygon geometry is not valid for tile {}".format(row['WRS_ID']) - else: - coverage = 100 * g2.intersection(g1).area / g1.area - if coverage >= minCoverage: - res.append((row['WRS_ID'], row['PATH'], row['ROW'], coverage)) - return res - -""" - -# init grids converter -converter = GridsConverter() - -dic = OrderedDict() -dic['TILE_ID'] = [] -dic['WRS_ID'] = [] -dic['Coverage'] = [] - -df = pd.read_sql_query('SELECT TILE_ID FROM s2tiles', converter.conn) -tilecodes = df['TILE_ID'].tolist() - -for tilecode in tilecodes: - print(tilecode) - # get WRS tiles that overlaps - res = converter.getOverlaps(tilecode) - for r in res: - dic['TILE_ID'].append(tilecode) - dic['WRS_ID'].append(r[0]) - dic['Coverage'].append(r[3]) - -# close DB -converter.close() - -# to pandas -df = pd.DataFrame.from_dict(dic) -dic = None - -# to sql -conn = sqlite3.connect('l8_s2_coverage.db') -df.to_sql('l8_s2_coverage', conn) -conn.close() diff --git a/sen2like/sen2like/grids/s2grid.db b/sen2like/sen2like/grids/s2grid.db deleted file mode 100644 index 0ab18ff..0000000 Binary files a/sen2like/sen2like/grids/s2grid.db and /dev/null differ diff --git a/sen2like/sen2like/grids/wrskml2db.py b/sen2like/sen2like/grids/wrskml2db.py deleted file mode 100644 index e826b14..0000000 --- a/sen2like/sen2like/grids/wrskml2db.py +++ /dev/null @@ -1,102 +0,0 @@ -import sqlite3 -from collections import OrderedDict -from math import floor - -import pandas as pd -from pykml import parser - - -def readDescription(des): - """ - read kml description and derive a dictionary - with key/value for each parameters - (expected keys: ['UTM_WKT', 'EPSG', 'TILE_ID', 'LL_WKT', 'MGRS_REF']) - """ - - # read string stream - lines = [] - line = '' - isText = False - for c in des: - if c == '<': - isText = False - if line.strip() != '': - lines.append(line) - line = '' - if isText: - line += c - if c == '>': - isText = True - - # fill dictionary - meta = {} - for i in range(0, len(lines), 2): - meta[lines[i]] = lines[i + 1] - - # return dictionary - return meta - - -# MAIN - -# KML can be downloaded on Landsat website: -# https://landsat.usgs.gov/pathrow-shapefiles - -# read kml as string -with open('WRS-2_bound_world.kml') as f: - string = f.read() - -# parse kml string with pykml -root = parser.fromstring(string) -print('nb tiles:', len(root.Document.Placemark)) - -# create empty dic with keys -dic = OrderedDict() - -# now for each tile -for pm in root.Document.Placemark[0:]: - meta = {'WRS_ID': pm.name.text} - # get tileid and description - [path, row] = meta['WRS_ID'].split('_') - meta['PATH'] = path - meta['ROW'] = row - coords_txt = pm.Polygon.outerBoundaryIs.LinearRing.coordinates.text.strip() - # -178.785,-80.929,6999.999999999999 -177.567, - # -82.68000000000001,6999.999999999999 169.654, - # -82.68000000000001,6999.999999999999 170.873, - # -80.929,6999.999999999999 -178.785, - # -80.929,6999.999999999999 - - coords = [] - for coord in coords_txt.split(' '): - coords.append(" ".join(coord.split(',')[:2])) # keeping x,y removing z - - # MULTIPOLYGON((( - # 179.938002683357 - 72.9727796803777, 179.755750140428 - 73.9555330546422, -176.683165143952 - 73.9797063483524, - # -176.7009986188 - 72.9954805993986, 179.938002683357 - 72.9727796803777))) - - ll_wkt = "MULTIPOLYGON(((" + ",".join(coords) + ")))" - meta['LL_WKT'] = ll_wkt - """print path, row - print coords - print ll_wkt""" - - # get utm zone - ctr_lon = float(pm.description.text.split('CTR LON:')[-1].split('
')[0]) - utm_zone = floor((ctr_lon + 180) / 6) + 1 - meta['UTM'] = utm_zone - - # get key/values pairs from kml description - for key in ['WRS_ID', 'PATH', 'ROW', 'LL_WKT', 'UTM']: - if key not in list(dic.keys()): - # init list - dic[key] = [] - # add values - dic[key].append(meta[key]) - -df = pd.DataFrame.from_dict(dic) -dic = None - -conn = sqlite3.connect('s2grid.db') -df.to_sql('l8tiles', conn) -conn.close() diff --git a/sen2like/sen2like/grids/wrskml2l8tiles.py b/sen2like/sen2like/grids/wrskml2l8tiles.py index 7c31aec..932dcc9 100644 --- a/sen2like/sen2like/grids/wrskml2l8tiles.py +++ b/sen2like/sen2like/grids/wrskml2l8tiles.py @@ -92,8 +92,8 @@ def main(): conn.enable_load_extension(True) conn.load_extension("mod_spatialite") create_req = ( - f"CREATE TABLE l8tiles (" - f"PATH_ROW VARCHAR(7), PATH VARCHAR(3), ROW VARCHAR(3), LL_WKT VARCHAR, geometry POLYGON, UTM INTEGER); " + "CREATE TABLE l8tiles (" + "PATH_ROW VARCHAR(7), PATH VARCHAR(3), ROW VARCHAR(3), LL_WKT VARCHAR, geometry POLYGON, UTM INTEGER); " ) conn.execute(create_req) insert_req = ( diff --git a/sen2like/sen2like/misc/SCL_to_valid_pixel_mask.py b/sen2like/sen2like/misc/SCL_to_valid_pixel_mask.py index 448ddf6..5e806e4 100644 --- a/sen2like/sen2like/misc/SCL_to_valid_pixel_mask.py +++ b/sen2like/sen2like/misc/SCL_to_valid_pixel_mask.py @@ -5,6 +5,7 @@ import numpy as np from osgeo import gdal +from skimage.morphology import binary_closing, binary_opening, binary_dilation, disk, square # inputs # scl_image = sys.argv[1] @@ -75,7 +76,6 @@ def filter_isolated_cells(image, struct): """ # Closing -from skimage.morphology import binary_closing, binary_opening, binary_dilation, disk, square # valid_px_mask = closing(valid_px_mask, disk(5)) # valid_px_mask = erosion(valid_px_mask, square(3)) diff --git a/sen2like/sen2like/s2l_processes/S2L_Atmcor.py b/sen2like/sen2like/s2l_processes/S2L_Atmcor.py index 4892c82..3b9c006 100644 --- a/sen2like/sen2like/s2l_processes/S2L_Atmcor.py +++ b/sen2like/sen2like/s2l_processes/S2L_Atmcor.py @@ -6,13 +6,16 @@ import logging import os -from osgeo import gdal import numpy as np from atmcor.atmospheric_parameters import ATMO_parameter from atmcor.cams_data_reader import ECMWF_Product from atmcor.smac import smac from core import S2L_config +from core.QI_MTD.mtd import metadata +from core.S2L_config import config +from core.image_file import S2L_ImageFile +from core.products.product import S2L_Product from s2l_processes.S2L_Process import S2L_Process log = logging.getLogger("Sen2Like") @@ -40,45 +43,6 @@ def get_smac_coefficients(product, band): return None -def smac_correction_grid(obs_datetime, extent, hcs_code, resolution=120): - output_filename = 'output_file.tif' - - ecmwf_data = ECMWF_Product(cams_config=get_cams_configuration(), observation_datetime=obs_datetime) - - new_SRS = gdal.osr.SpatialReference() - new_SRS.ImportFromEPSG(int(4326)) - - if ecmwf_data.is_valid: - # Write cams file - cams_file = 'cams_file.tif' - etype = gdal.GDT_Float32 - driver = gdal.GetDriverByName('GTiff') - dst_ds = driver.Create(cams_file, xsize=ecmwf_data.longitude.size, - ysize=ecmwf_data.latitude.size, bands=4, eType=etype, options=[]) - dst_ds.SetProjection(new_SRS.ExportToWkt()) - x_res = (ecmwf_data.longitude.max() - ecmwf_data.longitude.min()) / ecmwf_data.longitude.size - y_res = (ecmwf_data.latitude.max() - ecmwf_data.latitude.min()) / ecmwf_data.latitude.size - geotranform = (ecmwf_data.longitude.min(), x_res, 0, ecmwf_data.latitude.max(), 0, -y_res) - dst_ds.SetGeoTransform(geotranform) - - dst_ds.GetRasterBand(1).WriteArray(ecmwf_data.aod550.astype(np.float)) - dst_ds.GetRasterBand(2).WriteArray(ecmwf_data.tcwv.astype(np.float)) - dst_ds.GetRasterBand(3).WriteArray(ecmwf_data.gtco3.astype(np.float)) - dst_ds.GetRasterBand(4).WriteArray(ecmwf_data.msl.astype(np.float)) - - dst_ds.FlushCache() - - # Warp cams data on input spatial extent - options = gdal.WarpOptions(srcSRS=dst_ds.GetProjection(), dstSRS=hcs_code, xRes=resolution, - yRes=resolution, - resampleAlg='cubicspline', - outputBounds=extent) - gdal.Warp(output_filename, cams_file, options=options) - dst_ds = None - - return output_filename - - def smac_correction(product, array_in, extent, band): """ Atmospheric correction with SMAC @@ -223,18 +187,48 @@ def smac_correction(product, array_in, extent, band): class S2L_Atmcor(S2L_Process): + """ + Atmo Correction processing block class. + Only able to run SMAC atmo corr as sen2cor cannot be run by band. + If use_sen2cor=True in the config, then this class set S2A_AC AC_PROCESSOR quality parameter. + If use_smac=True in the config, then run SMAC atmo corr and set S2A_AC quality parameters. + Notice that use_sen2cor and use_smac can be overridden depending on the type of product to process. + See sen2like module about + """ - def process(self, product, image, band): + def process(self, product: S2L_Product, image: S2L_ImageFile, band: str) -> S2L_ImageFile: log.info('Start') - # SMAC correction - extent = image.getCorners(outEPSG=4326) - array_in = image.array - array_out = smac_correction(product, array_in, extent, band) - image = image.duplicate(self.output_file(product, band), array_out) - if S2L_config.config.getboolean('generate_intermediate_products'): - image.write(creation_options=['COMPRESS=LZW']) + out_image = image + + if config.getboolean('use_smac'): + # SMAC correction + extent = image.getCorners(outEPSG=4326) + array_in = image.array + array_out = smac_correction(product, array_in, extent, band) + out_image = image.duplicate(self.output_file(product, band), array_out) + if S2L_config.config.getboolean('generate_intermediate_products'): + image.write(creation_options=['COMPRESS=LZW']) + else: + log.info("Atmo corr already done with sen2cor") log.info('End') - return image + return out_image + + def postprocess(self, product: S2L_Product): + """Set QI params + + Args: + product (S2L_Product): product to post process + """ + if config.getboolean('use_sen2cor'): + metadata.qi["AC_PROCESSOR"] = "SEN2COR" + + elif config.getboolean('use_smac'): + metadata.qi["AC_PROCESSOR"] = "SMAC" + # TODO: put config param in self ? + metadata.qi["GRANULE_MEAN_WV"] = S2L_config.config.get('uH2O') + metadata.qi["OZONE_VALUE"] = S2L_config.config.get('uO3') + metadata.qi["PRESSURE"] = S2L_config.config.get('pressure') + metadata.qi["GRANULE_MEAN_AOT"] = S2L_config.config.get('taup550') diff --git a/sen2like/sen2like/s2l_processes/S2L_Fusion.py b/sen2like/sen2like/s2l_processes/S2L_Fusion.py index 806e969..e5af15c 100644 --- a/sen2like/sen2like/s2l_processes/S2L_Fusion.py +++ b/sen2like/sen2like/s2l_processes/S2L_Fusion.py @@ -17,6 +17,7 @@ from core import S2L_config from core.image_file import S2L_ImageFile from core.products.hls_product import S2L_HLS_Product +from core.products.product import S2L_Product from grids import mgrs_framing from s2l_processes.S2L_Process import S2L_Process from core.S2L_tools import out_stat @@ -75,26 +76,29 @@ class S2L_Fusion(S2L_Process): def initialize(self): self.reference_products = [] + self._predict_method = None - def preprocess(self, pd): + def preprocess(self, product: S2L_Product): + + log.info('Start') # check most recent HLS S2 products available archive_dir = S2L_config.config.get('archive_dir') - tsdir = join(archive_dir, pd.mtl.mgrs) + tsdir = join(archive_dir, product.mtl.mgrs) # list products with dates pdlist = [] for pdpath in sorted(glob.glob(tsdir + '/L2F_*_S2*')): pdname = basename(pdpath) date = dt.datetime.strptime(pdname.split('_')[2], '%Y%m%d').date() - if date <= pd.acqdate.date(): + if date <= product.acqdate.date(): pdlist.append([date, pdpath]) # Handle new format aswell for pdpath in sorted(glob.glob(tsdir + '/S2*L2F_*')): pdname = basename(pdpath) date = dt.datetime.strptime(os.path.splitext(pdname.split('_')[2])[0], '%Y%m%dT%H%M%S').date() - if date <= pd.acqdate.date(): + if date <= product.acqdate.date(): pdlist.append([date, pdpath]) # sort by date @@ -104,16 +108,18 @@ def preprocess(self, pd): self.reference_products = [] nb_products = int(S2L_config.config.get('predict_nb_products', 2)) for date, pdname in pdlist[-nb_products:]: - product = S2L_HLS_Product(pdname) - if product.product is not None: - self.reference_products.append(product) + ref_product = S2L_HLS_Product(pdname) + if ref_product.s2l_product_class is not None: + self.reference_products.append(ref_product) - for product in self.reference_products: - log.info('Selected product: {}'.format(product.name)) + for ref_product in self.reference_products: + log.info('Selected product: {}'.format(ref_product.name)) S2L_config.config.set('none_S2_product_for_fusion', len(self.reference_products) == 0) - def process(self, product, image, band): + log.info('End') + + def process(self, product: S2L_Product, image: S2L_ImageFile, band: str) -> S2L_ImageFile: log.info('Start') if not S2L_config.config.getboolean('hlsplus'): @@ -147,14 +153,14 @@ def process(self, product, image, band): 'Not enough Sentinel2 products for the predict (only one product). Using last S2 product as ref.') predict_method = 'composite' + self._predict_method = predict_method # general info band_s2 = product.get_s2like_band(band) image_file_L2F = self.reference_products[0].get_band_file(band_s2, plus=True) output_shape = (image_file_L2F.ySize, image_file_L2F.xSize) # method: prediction (from the 2 most recent S2 products) - if predict_method == 'predict': - metadata.qi['PREDICTED_METHODE'] = 'predict' + if self._predict_method == 'predict': # Use QA (product selection) to apply Composting : qa_mask = self._get_qa_band(output_shape) @@ -168,8 +174,7 @@ def process(self, product, image, band): self._save_as_image_file(image_file_L2F, array_L2F_predict, product, band, '_FUSION_L2F_PREDICT.TIF') # method: composite (most recent valid pixels from N products) - elif predict_method == 'composite': - metadata.qi['PREDICTED_METHODE'] = 'composite' + elif self._predict_method == 'composite': # composite array_L2H_predict, array_L2F_predict = self._composite(product, band_s2, output_shape) @@ -180,11 +185,11 @@ def process(self, product, image, band): # method: unknown else: - log.error(f'Unknown predict method: {predict_method}. Please check your configuration.') + log.error('Unknown predict method: %s. Please check your configuration.', self._predict_method) return None # fusion L8/S2 - mask_filename = product.mtl.nodata_mask_filename + mask_filename = product.nodata_mask_filename array_out = self._fusion(image, array_L2H_predict, array_L2F_predict, mask_filename).astype(np.float32) image_out = self._save_as_image_file(image_file_L2F, array_out, product, band, '_FUSION_L2H_PREDICT.TIF') @@ -213,11 +218,26 @@ def process(self, product, image, band): threshold_msk.write(creation_options=['COMPRESS=LZW']) product.fusion_auto_check_threshold_msk_file = threshold_msk.filepath - log.info('End') return image_out + def postprocess(self, product: S2L_Product): + """Set QI params + + Args: + product (S2L_Product): product to post process + """ + + log.info('Start') + + metadata.qi["FUSION_AUTO_CHECK_THRESHOLD"] = S2L_config.config.getfloat( + 'fusion_auto_check_threshold') + + metadata.qi["PREDICTED_METHOD"] = self._predict_method + + log.info('End') + def _save_as_image_file(self, image_template, array, product, band, extension): path = os.path.join(S2L_config.config.get('wd'), product.name, product.get_band_file(band).rootname + extension) image_file = image_template.duplicate(path, array=array) @@ -342,7 +362,7 @@ def _predict(self, product, band_s2, qa_mask, output_shape): B = array2 - A * doy_2 # Compute Predicted Image at input_xdoy - array_dp_raw = A * (np.float(input_xdoy)) + B + array_dp_raw = A * (float(input_xdoy)) + B array_dp = array_dp_raw * M1 + array1 * M3 + array2 * M2 # + array_dp_raw [qa_mask == 0] diff --git a/sen2like/sen2like/s2l_processes/S2L_GeometryKLT.py b/sen2like/sen2like/s2l_processes/S2L_GeometryKLT.py index f6a4116..066bc21 100644 --- a/sen2like/sen2like/s2l_processes/S2L_GeometryKLT.py +++ b/sen2like/sen2like/s2l_processes/S2L_GeometryKLT.py @@ -13,6 +13,7 @@ from core import S2L_config from core.QI_MTD.mtd import metadata from core.image_file import S2L_ImageFile +from core.products.product import S2L_Product from grids import mgrs_framing from s2l_processes.S2L_Process import S2L_Process @@ -65,6 +66,29 @@ def extract_features(data, ddepth=cv2.CV_8U, ksize=5, mask=None): return result +def reframe_mask(product: S2L_Product, product_mask_filename_attr: str, output_filename: str, **kwargs): + """Reframe a mask of a product and set product reader mask attr to the reframed mask + + Args: + product (S2L_Product): product having the mask + product_mask_filename_attr (str): mask file name attr in the product reader + output_filename (str): filename for the reframed mask + **kwargs: any args for 'S2L_ImageFile.write' except 'creation_options' + """ + filepath_out = os.path.join(S2L_config.config.get( + 'wd'), product.name, output_filename) + + mask_file_path = getattr(product, product_mask_filename_attr, None) + image = S2L_ImageFile(mask_file_path) + + out_image = mgrs_framing.reframe(image, product.mtl.mgrs, filepath_out, S2L_config.config.getfloat( + 'dx'), S2L_config.config.getfloat('dy'), order=0) + + out_image.write(creation_options=['COMPRESS=LZW'], **kwargs) + + setattr(product, product_mask_filename_attr, filepath_out) + + def KLT_Tracker(reference, imagedata, mask, matching_winsize=25): ## @@ -91,7 +115,7 @@ def KLT_Tracker(reference, imagedata, mask, matching_winsize=25): return None, None, 0 # define KLT parameters-for matching - log.info("Using window of size {} for matching.".format(matching_winsize)) + log.info("Using window of size %s for matching.", matching_winsize) # LSM input parameters - termination criteria for corner estimation/stopping criteria lk_params = dict(winSize=(matching_winsize, matching_winsize), maxLevel=1, @@ -108,7 +132,7 @@ def KLT_Tracker(reference, imagedata, mask, matching_winsize=25): d = abs(p0 - p0r).reshape(-1, 2).max(-1) st = d < back_threshold - logging.debug("Nb Bad Status: {}".format(len(st[st == 0]))) + logging.debug("Nb Bad Status: %s", len(st[st == 0])) p0 = p0[st] p1 = p1[st] @@ -131,7 +155,7 @@ def initialize(self): self._output_file = None self._tmp_stats = {} - def preprocess(self, product): + def preprocess(self, product: S2L_Product): # No geometric correction for refined products if product.mtl.is_refined: if S2L_config.config.getboolean('force_geometric_correction'): @@ -147,37 +171,23 @@ def preprocess(self, product): if product.sensor != 'S2': # Reframe angles and masks filepath_out = os.path.join(S2L_config.config.get('wd'), product.name, 'tie_points_REFRAMED.TIF') - mgrs_framing.reframeMulti(product.mtl.angles_file, product.mtl.mgrs, filepath_out, + mgrs_framing.reframeMulti(product.angles_file, product.mtl.mgrs, filepath_out, S2L_config.config.getfloat('dx'), S2L_config.config.getfloat('dy'), order=0) - product.mtl.angles_file = filepath_out + # update product angles_images + product.angles_file = filepath_out # Reframe mask - if product.mtl.mask_filename: - filepath_out = os.path.join(S2L_config.config.get('wd'), product.name, 'valid_pixel_mask_REFRAMED.TIF') - image = S2L_ImageFile(product.mtl.mask_filename) - imageout = mgrs_framing.reframe(image, product.mtl.mgrs, filepath_out, S2L_config.config.getfloat('dx'), - S2L_config.config.getfloat('dy'), order=0) - imageout.write(creation_options=['COMPRESS=LZW']) - product.mtl.mask_filename = filepath_out + if product.mask_filename: + reframe_mask(product, "mask_filename", 'valid_pixel_mask_REFRAMED.TIF') # Reframe nodata mask - if product.mtl.nodata_mask_filename: - filepath_out = os.path.join(S2L_config.config.get('wd'), product.name, 'nodata_pixel_mask_REFRAMED.TIF') - image = S2L_ImageFile(product.mtl.nodata_mask_filename) - imageout = mgrs_framing.reframe(image, product.mtl.mgrs, filepath_out, S2L_config.config.getfloat('dx'), - S2L_config.config.getfloat('dy'), order=0) - imageout.write(creation_options=['COMPRESS=LZW']) - product.mtl.nodata_mask_filename = filepath_out + if product.nodata_mask_filename: + reframe_mask(product, "nodata_mask_filename", 'nodata_pixel_mask_REFRAMED.TIF') # Reframe NDVI if product.ndvi_filename is not None: - filepath_out = os.path.join(S2L_config.config.get('wd'), product.name, 'ndvi_REFRAMED.TIF') - image = S2L_ImageFile(product.ndvi_filename) - imageout = mgrs_framing.reframe(image, product.mtl.mgrs, filepath_out, S2L_config.config.getfloat('dx'), - S2L_config.config.getfloat('dy'), order=0) - imageout.write(creation_options=['COMPRESS=LZW'], DCmode=True) - product.ndvi_filename = filepath_out + reframe_mask(product, "ndvi_filename", 'ndvi_REFRAMED.TIF', DCmode=True) # Matching for dx/dy correction? band = S2L_config.config.get('reference_band', 'B04') @@ -189,7 +199,7 @@ def preprocess(self, product): S2L_config.config.set('freeze_dx_dy', True) metadata.qi.update({'COREGISTRATION_BEFORE_CORRECTION': self._tmp_stats.get('MEAN')}) - def process(self, product, image, band): + def process(self, product: S2L_Product, image: S2L_ImageFile, band: str) -> S2L_ImageFile: # No geometric correction for refined products if product.mtl.is_refined: if S2L_config.config.getboolean('force_geometric_correction'): @@ -198,86 +208,106 @@ def process(self, product, image, band): log.info("Product is refined: no additional geometric correction.") return image - wd = os.path.join(S2L_config.config.get('wd'), product.name) self._output_file = self.output_file(product, band) self._tmp_stats = {} log.info('Start') # MGRS reframing for Landsat8 - if product.sensor in ('L8', 'L9'): - log.debug('{} {}'.format(S2L_config.config.getfloat('dx'), S2L_config.config.getfloat('dy'))) + if product.sensor in ('L8', 'L9', 'S2'): + log.debug('%s %s', S2L_config.config.getfloat('dx'), S2L_config.config.getfloat('dy')) image = self._reframe(product, image, S2L_config.config.getfloat('dx'), S2L_config.config.getfloat('dy')) - # Resampling to 30m for S2 (HLS) - elif product.sensor == 'S2': - # refine geometry - # if config.getfloat('dx') > 0.3 or config.getfloat('dy') > 0.3: - log.debug("{} {}".format(S2L_config.config.getfloat('dx'), S2L_config.config.getfloat('dy'))) - image = self._reframe(product, image, S2L_config.config.getfloat('dx'), - S2L_config.config.getfloat('dy')) - # matching for having QA stats if S2L_config.config.get('refImage'): # try to adapt resolution, changing end of reference filename - refImage_path = S2L_config.config.get('refImage') - if not os.path.exists(refImage_path): + ref_image_path = S2L_config.config.get('refImage') + if not os.path.exists(ref_image_path): return image - # open image ref - imageref = S2L_ImageFile(refImage_path) - - # if refImage resolution does not fit - if imageref.xRes != image.xRes: - # new refImage filepath - refImage_noext = os.path.splitext(refImage_path)[0] - if refImage_noext.endswith(f"_{int(imageref.xRes)}m"): - refImage_noext = refImage_noext[:-len(f"_{int(imageref.xRes)}m")] - refImage_path = refImage_noext + f"_{int(image.xRes)}m.TIF" - - # compute (resample), or load if exists - if not os.path.exists(refImage_path): - log.info("Resampling of the reference image") - # compute - imageref = mgrs_framing.resample(imageref, image.xRes, refImage_path) - # write for reuse - imageref.write(DCmode=True, creation_options=['COMPRESS=LZW']) - else: - # or load if exists - log.info("Change reference image to:" + refImage_path) - imageref = S2L_ImageFile(refImage_path) - - # open mask - mask = S2L_ImageFile(product.mtl.mask_filename) - if S2L_config.config.getboolean('freeze_dx_dy'): - # do Geometry Assessment only if required - assess_geometry_bands = S2L_config.config.get('doAssessGeometry', default='').split(',') - if product.sensor != 'S2': - assess_geometry_bands = [product.reverse_bands_mapping.get(band) for band in assess_geometry_bands] - if assess_geometry_bands and band in assess_geometry_bands: - log.info("Geometry assessment for band %s" % band) - # Coarse resolution of correlation grid (only for stats) - self._matching(imageref, image, wd, mask) + ref_image = self._get_ref_image(ref_image_path, image) - else: - # Fine resolution of correlation grid (for accurate dx dy computation) - dx, dy = self._matching(imageref, image, wd, mask) - # save values for correction on bands - S2L_config.config.set('dx', dx) - S2L_config.config.set('dy', dy) - log.info("Geometrical Offsets (DX/DY): {}m {}m".format(S2L_config.config.getfloat('dx'), - S2L_config.config.getfloat('dy'))) + self._handle_matching(product, band, image, ref_image) # Append bands name to keys - for key, item in self._tmp_stats.items(): + for key in self._tmp_stats: if S2L_config.config.get('reference_band') != band: - self._tmp_stats[key + '_{}'.format(band)] = self._tmp_stats.pop(key) + self._tmp_stats[f'{key}_{band}'] = self._tmp_stats.pop(key) metadata.qi.update(self._tmp_stats) log.info('End') return image + def _get_ref_image(self, ref_image_path: str, image: S2L_ImageFile) -> S2L_ImageFile: + """Get reference image file to use for matching + + Args: + ref_image_path (str): reference image file path + image (S2L_ImageFile): _description_ + + Returns: + S2L_ImageFile: reference image denoted by 'ref_image_path' + or a new one resample to 'image' X resolution if resolutions differ + """ + # open image ref + ref_image = S2L_ImageFile(ref_image_path) + + # if refImage resolution does not fit + if ref_image.xRes != image.xRes: + # new refImage filepath + ref_image_no_ext = os.path.splitext(ref_image_path)[0] + if ref_image_no_ext.endswith(f"_{int(ref_image.xRes)}m"): + ref_image_no_ext = ref_image_no_ext[:-len(f"_{int(ref_image.xRes)}m")] + ref_image_path = ref_image_no_ext + f"_{int(image.xRes)}m.TIF" + + # compute (resample), or load if exists + if not os.path.exists(ref_image_path): + log.info("Resampling of the reference image") + # compute + ref_image = mgrs_framing.resample(ref_image, image.xRes, ref_image_path) + # write for reuse + ref_image.write(DCmode=True, creation_options=['COMPRESS=LZW']) + else: + # or load if exists + log.info("Change reference image to: %s", ref_image_path) + ref_image = S2L_ImageFile(ref_image_path) + + return ref_image + + def _handle_matching(self, product: S2L_Product, band: str, image: S2L_ImageFile, ref_image: S2L_ImageFile): + """TODO : see with Vince + Update '_tmp_stats' only if freeze_dx_dy, otherwise also update dx and dy in config + + Args: + product (S2L_Product): current product to create + band (str): band name currently manage + image (S2L_ImageFile): image to match + ref_image (S2L_ImageFile): reference image to use for matching + """ + work_dir = os.path.join(S2L_config.config.get('wd'), product.name) + + # open mask + mask = S2L_ImageFile(product.mask_filename) + if S2L_config.config.getboolean('freeze_dx_dy'): + # do Geometry Assessment only if required + assess_geometry_bands = S2L_config.config.get('doAssessGeometry', default='').split(',') + if product.sensor != 'S2': + assess_geometry_bands = [product.reverse_bands_mapping.get(band) for band in assess_geometry_bands] + if assess_geometry_bands and band in assess_geometry_bands: + log.info("Geometry assessment for band %s", band) + # Coarse resolution of correlation grid (only for stats) + self._matching(ref_image, image, work_dir, mask) + + else: + # Fine resolution of correlation grid (for accurate dx dy computation) + dx, dy = self._matching(ref_image, image, work_dir, mask) + # save values for correction on bands + S2L_config.config.set('dx', dx) + S2L_config.config.set('dy', dy) + log.info("Geometrical Offsets (DX/DY): %sm %sm", + S2L_config.config.getfloat('dx'), S2L_config.config.getfloat('dy')) + def _reframe(self, product, imagein, dx=0., dy=0.): log.info('MGRS Framing: Start...') @@ -319,14 +349,19 @@ def _matching(self, imageref, imagesec, wd, mask): dx = dx * imageref.xRes dy = dy * (- imageref.yRes) - log.debug("KLT Nb Points (init/final): {} / {}".format(Ninit, len(dx))) - log.debug("KLT (avgx, avgy): {}m {}m".format(dx.mean(), dy.mean())) + log.debug("KLT Nb Points (init/final): %s / %s", Ninit, len(dx)) + log.debug("KLT (avgx, avgy): %sm %sm", dx.mean(), dy.mean()) dist = np.sqrt(np.power(dx, 2) + np.power(dy, 2)).flatten() self._tmp_stats.update({'SKEW': np.round(skew(dist, axis=None), 1), 'KURTOSIS': np.round(kurtosis(dist, axis=None), 1), + 'REF_IMAGE': os.path.basename(S2L_config.config.get('refImage')), 'MEAN': np.round(np.mean(dist), 1), + 'MEAN_X': dx.mean(), + 'MEAN_Y': dy.mean(), 'STD': np.round(np.std(dist), 1), + 'STD_X': np.round(np.std(dx), 1), + 'STD_Y': np.round(np.std(dy), 1), 'RMSE': np.round(np.sqrt(np.mean(np.power(dist, 2))), 1), 'NB_OF_POINTS': len(dx)}) diff --git a/sen2like/sen2like/s2l_processes/S2L_InterCalibration.py b/sen2like/sen2like/s2l_processes/S2L_InterCalibration.py index eb6d1ee..2c13a02 100644 --- a/sen2like/sen2like/s2l_processes/S2L_InterCalibration.py +++ b/sen2like/sen2like/s2l_processes/S2L_InterCalibration.py @@ -6,11 +6,25 @@ import numpy as np from core import S2L_config +from core.image_file import S2L_ImageFile +from core.products.product import S2L_Product from s2l_processes.S2L_Process import S2L_Process -from core.QI_MTD.mtd import metadata log = logging.getLogger("Sen2Like") +COEFFICIENT = { + "Sentinel-2B": { + 'B01': {'coef': [1.011, 0]}, + 'B02': {'coef': [1.011, 0]}, + 'B03': {'coef': [1.011, 0]}, + 'B04': {'coef': [1.011, 0]}, + 'B05': {'coef': [1.011, 0]}, + 'B06': {'coef': [1.011, 0]}, + 'B07': {'coef': [1.011, 0]}, + 'B08': {'coef': [1.011, 0]}, + 'B8A': {'coef': [1.011, 0]}, + } +} class S2L_InterCalibration(S2L_Process): """ @@ -30,35 +44,28 @@ class S2L_InterCalibration(S2L_Process): (It is the SPACECRAFT_NAME (for sentinel) or SPACECRAFT_ID (for landsats)) """ - def process(self, product, image, band): + def process(self, product: S2L_Product, image: S2L_ImageFile, band: str) -> S2L_ImageFile: log.info('Start') - coeff = { - "Sentinel-2B": { - 'B01': {'coef': [1.011, 0]}, - 'B02': {'coef': [1.011, 0]}, - 'B03': {'coef': [1.011, 0]}, - 'B04': {'coef': [1.011, 0]}, - 'B05': {'coef': [1.011, 0]}, - 'B06': {'coef': [1.011, 0]}, - 'B07': {'coef': [1.011, 0]}, - 'B08': {'coef': [1.011, 0]}, - 'B8A': {'coef': [1.011, 0]}, - } - } - if product.mtl.mission in coeff: - if band in coeff[product.mtl.mission]: - slope, offset = coeff[product.mtl.mission][band]['coef'] + + if product.mtl.mission in COEFFICIENT: + if float(product.mtl.processing_sw) < 4.0: + if band in COEFFICIENT[product.mtl.mission]: + slope, offset = COEFFICIENT[product.mtl.mission][band]['coef'] + else: + log.info("No inter calibration coefficient defined for %s", band) + log.info('End') + return image else: - log.info("No inter calibration coefficient defined for {}".format(band)) + log.info("No inter calibration performed for Sentinel-2B Collection-1 products (PB >= 04.00) ") log.info('End') return image else: - log.info("No inter calibration coefficient defined for {} mission".format(product.mtl.mission)) + log.info("No inter calibration coefficient defined for %s mission", product.mtl.mission) log.info('End') return image if offset is not None and slope is not None: - log.debug(f"Applying InterCalibration : slope = {slope}, offset{offset}") + log.debug("Applying InterCalibration : slope = %s, offset = %s", slope, offset) new = image.array np.multiply(new, slope, out=new) np.add(new, offset, out=new) diff --git a/sen2like/sen2like/s2l_processes/S2L_Nbar.py b/sen2like/sen2like/s2l_processes/S2L_Nbar.py index 33d9cf6..31f0a75 100644 --- a/sen2like/sen2like/s2l_processes/S2L_Nbar.py +++ b/sen2like/sen2like/s2l_processes/S2L_Nbar.py @@ -8,7 +8,6 @@ import os import glob import numpy as np -import re from osgeo import gdal from skimage.transform import resize as skit_resize from skimage.measure import block_reduce @@ -18,8 +17,9 @@ from core.QI_MTD.mtd import metadata from core import S2L_config from core.S2L_tools import out_stat -from s2l_processes.S2L_Process import S2L_Process +from core.products.product import S2L_Product from core.image_file import S2L_ImageFile +from s2l_processes.S2L_Process import S2L_Process log = logging.getLogger("Sen2Like") @@ -44,7 +44,7 @@ def li_sparse_kernel(theta_s, theta_v, phi): """ h_sur_b = 2 b_sur_r = 1 - ct = np.float(np.pi / 180.0) + ct = float(np.pi / 180.0) # Convert to radiance theta_s_r = np.multiply(ct, theta_s) theta_v_r = np.multiply(ct, theta_v) @@ -77,10 +77,9 @@ def li_sparse_kernel(theta_s, theta_v, phi): # Compute overlap value between the view and the solar shadow : overlap = np.divide(1.0, np.pi) * (t - sin_t * cos_t) * (sec_theta_s_p + sec_theta_v_p) # Compute KGEO : - t = - sec_theta_s_p - sec_theta_v_p + 0.5 * (1 + cos_zetha_p) * sec_theta_v_p * sec_theta_v_p - K_GEO = overlap - sec_theta_s_p - sec_theta_v_p + 0.5 * (1 + cos_zetha_p) * sec_theta_s_p * sec_theta_v_p + k_geo = overlap - sec_theta_s_p - sec_theta_v_p + 0.5 * (1 + cos_zetha_p) * sec_theta_s_p * sec_theta_v_p - return K_GEO + return k_geo def normalized_brdf(KVOL_norm, KGEO_norm, KVOL_input, KGEO_input, coef): @@ -119,7 +118,7 @@ def check(self): def get(self): return None - def get_cmatrix_full(self,KVOL_norm, KGEO_norm, KVOL_input, KGEO_input): + def get_cmatrix_full(self, KVOL_norm, KGEO_norm, KVOL_input, KGEO_input): return np.zeros(self.image.shape) def compute_Kvol(self, theta_s, theta_v, phi): @@ -138,10 +137,10 @@ def check(self): def get(self): brdf_coef_set = self.product.brdf_coefficients.get(self.band, {}).get("coef") - log.debug('BRDF Coefficient Set :{}'.format(brdf_coef_set)) + log.debug('BRDF Coefficient Set :%s', brdf_coef_set) return brdf_coef_set - def get_cmatrix_full(self,KVOL_norm, KGEO_norm, KVOL_input, KGEO_input): + def get_cmatrix_full(self, KVOL_norm, KGEO_norm, KVOL_input, KGEO_input): CMATRIX = normalized_brdf(KVOL_norm, KGEO_norm, KVOL_input, KGEO_input, self.get()) return skit_resize(CMATRIX, self.image.array.shape) @@ -152,7 +151,7 @@ def compute_Kvol(self, theta_s, theta_v, phi): But different from Roujean and Al """ - ct = np.float(np.pi / 180.0) + ct = float(np.pi / 180.0) # Convert to radiance theta_s_r = np.multiply(ct, theta_s) theta_v_r = np.multiply(ct, theta_v) @@ -171,59 +170,60 @@ class VJBMatriceBRDFCoefficient(BRDFCoefficient): vr_file_glob_path = '*_BRDFinputs.nc' mtd = 'Vermote, E., C.O. Justice, et F.-M. Breon. 2009' - def __init__(self, product, image, band, vr_matrice_file): + def __init__(self, product, image, band, vr_matrix_dir): super().__init__(product, image, band) - vr_files = glob.glob(os.path.join(vr_matrice_file, self.vr_file_glob_path)) - self.vr_matrice = None + vr_files = glob.glob(os.path.join(vr_matrix_dir, self.vr_file_glob_path)) + self.vr_matrix = None + self.vr_matrix_file = None self.tile = product.mtl.mgrs for file in vr_files: - vr_matrice = xr.open_dataset(file) - if vr_matrice.attrs['TILE'][-5:] == product.mtl.mgrs: - log.info(f"Find VJB matrices : {file}") - self.vr_matrice = vr_matrice - self.vr_matrice_resolution = int(self.vr_matrice.attrs['SPATIAL_RESOLUTION']) - self.vr_matrice_file = file + vr_matrix = xr.open_dataset(file) + if vr_matrix.attrs['TILE'][-5:] == product.mtl.mgrs: + log.info("Find VJB matrices : %s", file) + self.vr_matrix = vr_matrix + self.vr_matrix_resolution = int(self.vr_matrix.attrs['SPATIAL_RESOLUTION']) + self.vr_matrix_file = file self.band_names = { - k:v for k, v in zip(self.vr_matrice.attrs['BANDS_NUMBER'], self.vr_matrice.attrs['BANDS'])} + k: v for k, v in zip(self.vr_matrix.attrs['BANDS_NUMBER'], self.vr_matrix.attrs['BANDS'])} + break # Stop at the first correct file - vr_matrice.close() + vr_matrix.close() def check(self): - return self.vr_matrice is not None and self.band in self.vr_matrice.attrs['BANDS_NUMBER'] + return self.vr_matrix is not None and self.band in self.vr_matrix.attrs['BANDS_NUMBER'] def get(self): # Load datas if not self.check(): return None - V0 = self.vr_matrice['V0_tendency_' + self.band_names[self.band]] / 10000.0 - V1 = self.vr_matrice['V1_tendency_' + self.band_names[self.band]] / 10000.0 - R0 = self.vr_matrice['R0_tendency_' + self.band_names[self.band]] / 10000.0 - R1 = self.vr_matrice['R1_tendency_' + self.band_names[self.band]] / 10000.0 + V0 = self.vr_matrix['V0_tendency_' + self.band_names[self.band]] / 10000.0 + V1 = self.vr_matrix['V1_tendency_' + self.band_names[self.band]] / 10000.0 + R0 = self.vr_matrix['R0_tendency_' + self.band_names[self.band]] / 10000.0 + R1 = self.vr_matrix['R1_tendency_' + self.band_names[self.band]] / 10000.0 ndvi_img = S2L_ImageFile(self.product.ndvi_filename) # Resizing img_res = int(self.image.xRes) ndvi = _resize(ndvi_img.array, img_res / int(ndvi_img.xRes)) - log.debug(f'{img_res} {self.vr_matrice_resolution}') - V0 = _resize(V0.data, img_res / self.vr_matrice_resolution) - V1 = _resize(V1.data, img_res / self.vr_matrice_resolution) - R0 = _resize(R0.data, img_res / self.vr_matrice_resolution) - R1 = _resize(R1.data, img_res / self.vr_matrice_resolution) - ndvi_min = _resize(self.vr_matrice.ndvi_min.data, img_res / self.vr_matrice_resolution) / 10000.0 - ndvi_max = _resize(self.vr_matrice.ndvi_max.data, img_res / self.vr_matrice_resolution) / 10000.0 - - #Clip a tester: - ndvi = np.where(ndvi < ndvi_min, ndvi_min,ndvi) - ndvi = np.where(ndvi > ndvi_max, ndvi_max,ndvi) + log.debug("%s %s", img_res, self.vr_matrix_resolution) + V0 = _resize(V0.data, img_res / self.vr_matrix_resolution) + V1 = _resize(V1.data, img_res / self.vr_matrix_resolution) + R0 = _resize(R0.data, img_res / self.vr_matrix_resolution) + R1 = _resize(R1.data, img_res / self.vr_matrix_resolution) + ndvi_min = _resize(self.vr_matrix.ndvi_min.data, img_res / self.vr_matrix_resolution) / 10000.0 + ndvi_max = _resize(self.vr_matrix.ndvi_max.data, img_res / self.vr_matrix_resolution) / 10000.0 - #regarde definition de np.clip sur la doc. & tester + # Clip a tester: + ndvi = np.where(ndvi < ndvi_min, ndvi_min, ndvi) + ndvi = np.where(ndvi > ndvi_max, ndvi_max, ndvi) - #ndvi = np.clip(ndvi, ndvi_min, ndvi_max) + # regarde definition de np.clip sur la doc. & tester + # ndvi = np.clip(ndvi, ndvi_min, ndvi_max) - out_stat(ndvi_min,log,'BRDF AUX - minimum ndvi') - out_stat(ndvi_max,log,'BRDF AUX - maximum ndvi') - out_stat(ndvi,log,'NDVI of input products') + out_stat(ndvi_min, log, 'BRDF AUX - minimum ndvi') + out_stat(ndvi_max, log, 'BRDF AUX - maximum ndvi') + out_stat(ndvi, log, 'NDVI of input products') if S2L_config.config.getboolean('generate_intermediate_products'): ndvi_clip_img_path = os.path.join(S2L_config.config.get("wd"), self.product.name, 'ndvi_clipped.tif') @@ -238,8 +238,8 @@ def get(self): # Compute coefficiant c_vol = V0 + V1 * ndvi # c_geo = f_geo/f_iso c_geo = R0 + R1 * ndvi # c_vol = f_vol/f_iso - log.debug(f"c_geo have {np.isnan(c_geo).sum()} NaN") - log.debug(f"c_vol have {np.isnan(c_vol).sum()} NaN") + log.debug("c_geo have %s NaN", np.isnan(c_geo).sum()) + log.debug("c_vol have %s NaN", np.isnan(c_vol).sum()) np.nan_to_num(c_geo, copy=False) np.nan_to_num(c_vol, copy=False) if S2L_config.config.getboolean('generate_intermediate_products'): @@ -257,7 +257,7 @@ def get(self): c_vol_image.write(DCmode=True, creation_options=['COMPRESS=LZW']) return 1, c_geo, c_vol - def get_cmatrix_full(self,KVOL_norm, KGEO_norm, KVOL_input, KGEO_input): + def get_cmatrix_full(self, KVOL_norm, KGEO_norm, KVOL_input, KGEO_input): IM1 = self.image.array KVOL_NORM = skit_resize(KVOL_norm, IM1.shape) KGEO_NORM = skit_resize(KGEO_norm, IM1.shape) @@ -276,7 +276,7 @@ def compute_Kvol(self, theta_s, theta_v, phi): F. Maignana, F.-M. Breon, R. Lacaze Remote Sensing of Environment 90 (2004) 210–220 (Equation 12) """ - ct = np.float(np.pi / 180.0) + ct = float(np.pi / 180.0) # Convert to radiance theta_s_r = np.multiply(ct, theta_s) theta_v_r = np.multiply(ct, theta_v) @@ -286,12 +286,13 @@ def compute_Kvol(self, theta_s, theta_v, phi): zetha_0 = ct * 1.5 numerator = (np.pi / 2.0 - zetha) * np.cos(zetha) + np.sin(zetha) denominator = np.cos(theta_v_r) + np.cos(theta_s_r) - hot_spot_factor = (1 + (1 + (zetha/zetha_0))**-1) + hot_spot_factor = (1 + (1 + (zetha/zetha_0))**-1) # Kvol = ( numerator / denominator ) - np.pi / 4.0 Kvol = (4.0 / (3.0 * np.pi)) * (numerator / denominator) * hot_spot_factor - (1.0 / 3.0) return Kvol + def get_mean_sun_angle(scene_center_latitude): # Polynomial coefficient to retrieve the mean sun zenith angle (SZA) # as a function of the central latitude (eq. 4) @@ -314,7 +315,16 @@ def get_mean_sun_angle(scene_center_latitude): class S2L_Nbar(S2L_Process): - def process(self, product, image, band): + def __init__(self): + super().__init__() + self._theta_s = None + self._mean_delta_azimuth = [] + + def initialize(self): + self._theta_s = None + self._mean_delta_azimuth = [] + + def process(self, product: S2L_Product, image: S2L_ImageFile, band: str) -> S2L_ImageFile: log.info('Start') @@ -325,19 +335,19 @@ def process(self, product, image, band): if not self.brdf_coeff.check(): self.brdf_coeff = ROYBRDFCoefficient(product, image, band) log.info( - f"None VJB matrice for tile {product.mtl.mgrs} " - f"and band {band}, try to use ROY coeff in place" + "None VJB matrice for tile %s and band %s, try to use ROY coeff in place", + product.mtl.mgrs, band ) else: self.brdf_coeff = ROYBRDFCoefficient(product, image, band) # coeff for this band? if not self.brdf_coeff.check(): - log.info('No BRDF coefficient for {}'.format(band)) + log.info('No BRDF coefficient for %s', band) image_out = image else: if isinstance(self.brdf_coeff, VJBMatriceBRDFCoefficient): - log.info(f"Use VJB coefficient matrices in : {self.brdf_coeff.vr_matrice_file}") + log.info("Use VJB coefficient matrices in : %s", self.brdf_coeff.vr_matrix_file) else: log.info("Use ROY coefficients") @@ -356,16 +366,30 @@ def process(self, product, image, band): return image_out + def postprocess(self, product: S2L_Product): + """Set QI params + + Args: + product (S2L_Product): product to post process + """ + + metadata.qi['BRDF_METHOD'] = self.brdf_coeff.mtd + metadata.qi['CONSTANT_SOLAR_ZENITH_ANGLE'] = self._theta_s + metadata.qi['MEAN_DELTA_AZIMUTH'] = np.mean(self._mean_delta_azimuth) + + # TODO : manage it with an abstract method in BRDFCoefficient + if isinstance(self.brdf_coeff, VJBMatriceBRDFCoefficient) and self.brdf_coeff.vr_matrix_file: + metadata.qi["VJB_COEFFICIENTS_FILENAME"] = os.path.basename(self.brdf_coeff.vr_matrix_file) + def _computeKernels(self, product, band=None): lat = product.mtl.get_scene_center_coordinates()[1] scene_center_latitude = lat - theta_s = get_mean_sun_angle(scene_center_latitude) - metadata.qi['CONSTANT_SOLAR_ZENITH_ANGLE'] = theta_s - log.debug('theta_s: {}'.format(theta_s)) - metadata.qi['BRDF_METHOD'] = self.brdf_coeff.mtd + self._theta_s = get_mean_sun_angle(scene_center_latitude) + + log.debug('theta_s: %s', self._theta_s) # Read TP , unit = degree, scale=100 - src_ds = gdal.Open(product.mtl.angles_file) + src_ds = gdal.Open(product.angles_file) nBands = src_ds.RasterCount if nBands == 4: @@ -385,7 +409,8 @@ def _computeKernels(self, product, band=None): # close src_ds = None - metadata.qi['MEAN_DELTA_AZIMUTH'] = np.mean(SAA - VAA) % 360 + + self._mean_delta_azimuth.append(np.mean(SAA - VAA) % 360) if S2L_config.config.getboolean('debug'): out_stat(VAA, log, 'VAA') @@ -402,7 +427,7 @@ def _computeKernels(self, product, band=None): log.debug('------------- KVOL INPUT COMPUTATION ------------------------------') self.KVOL_INPUT = self.brdf_coeff.compute_Kvol(SZA, VZA, SAA - VAA) # Prepare KGEO Norm : - SZA_NORM = np.ones(VAA.shape) * theta_s + SZA_NORM = np.ones(VAA.shape) * self._theta_s VZA_NORM = np.zeros(VAA.shape) DPHI_NORM = np.zeros(VAA.shape) @@ -444,11 +469,11 @@ def _nbar(self, product, image, band): U = IM >= 0 OUT = CMATRIX_full * IM1 - #CORRECTION NBAR Limite a 20% - PDIFF = np.divide((IM1-OUT)*100,IM1) - #Difference Exceed + 20% : - OUT = np.where( PDIFF > 20, IM1 + 0.2*IM1,OUT) - OUT = np.where( PDIFF < -20, IM1 - 0.2*IM1,OUT) + # CORRECTION NBAR Limite a 20% + PDIFF = np.divide((IM1-OUT)*100, IM1) + # Difference Exceed + 20% : + OUT = np.where(PDIFF > 20, IM1 + 0.2*IM1, OUT) + OUT = np.where(PDIFF < -20, IM1 - 0.2*IM1, OUT) if S2L_config.config.getboolean('debug'): log.debug('---- IMAGE after correction ( before removing negative values ---') @@ -465,7 +490,7 @@ def _resize(array, resolution_ratio: float): if resolution_ratio == 1: return array elif resolution_ratio.is_integer(): - return block_reduce(array,(int(resolution_ratio), int(resolution_ratio)), func=np.mean) + return block_reduce(array, (int(resolution_ratio), int(resolution_ratio)), func=np.mean) else: out_shape = tuple(round(s / resolution_ratio) for s in array.shape) return skit_resize(array, out_shape, order=1, preserve_range=True) diff --git a/sen2like/sen2like/s2l_processes/S2L_Packager.py b/sen2like/sen2like/s2l_processes/S2L_Packager.py index 6a7295a..7103e9e 100644 --- a/sen2like/sen2like/s2l_processes/S2L_Packager.py +++ b/sen2like/sen2like/s2l_processes/S2L_Packager.py @@ -9,6 +9,7 @@ from core import S2L_config from core.S2L_tools import quicklook from core.image_file import S2L_ImageFile +from core.products.product import S2L_Product from grids import mgrs_framing from s2l_processes.S2L_Process import S2L_Process @@ -28,12 +29,12 @@ def base_path(product): tilecode = tilecode[1:] return "_".join(['L2F', tilecode, acqdate, product.sensor_name, 'R{:0>3}'.format(relative_orbit)]), tilecode - def process(self, pd, image, band): + def process(self, product: S2L_Product, image: S2L_ImageFile, band: str) -> S2L_ImageFile: """ Write final product in the archive directory 'archive_dir' is defined in config.ini file Naming convention from Design Document - :param pd: instance of S2L_Product class + :param product: instance of S2L_Product class :param image: input instance of S2L_ImageFile class :param band: band being processed :return: outpu t instance of instance of S2L_ImageFile class @@ -44,7 +45,7 @@ def process(self, pd, image, band): # /data/HLS_DATA/Archive/Site_Name/TILE_ID/S2L_DATEACQ_DATEPROD_SENSOR/S2L_DATEACQ_DATEPROD_SENSOR res = image.xRes - outdir, tilecode = self.base_path(pd) + outdir, tilecode = self.base_path(product) outfile = "_".join([outdir, band, '{}m'.format(int(res))]) + '.TIF' tsdir = os.path.join(S2L_config.config.get('archive_dir'), tilecode) # ts = temporal series newpath = os.path.join(tsdir, outdir, outfile) @@ -62,7 +63,7 @@ def process(self, pd, image, band): res = 30 outfile_30m = "_".join([outdir, band, '{}m'.format(int(res))]) + '.TIF' newpath_30m = os.path.join(tsdir, outdir, outfile_30m) - if pd.sensor == 'S2': + if product.sensor == 'S2': # create 30m band as well # resampling log.info('Resampling to 30m: Start...') @@ -70,15 +71,15 @@ def process(self, pd, image, band): image_30m.write(creation_options=['COMPRESS=LZW'], DCmode=True) # digital count log.info('Resampling to 30m: End') - if pd.sensor in ('L8', 'L9') and band in pd.image30m: + if product.sensor in ('L8', 'L9') and band in product.image30m: # copy 30m band as well # write - pd.image30m[band].write(creation_options=['COMPRESS=LZW'], filepath=newpath_30m) - del pd.image30m[band] + product.image30m[band].write(creation_options=['COMPRESS=LZW'], filepath=newpath_30m) + del product.image30m[band] return image - def postprocess(self, pd): + def postprocess(self, product: S2L_Product): """ Copy auxiliary files in the final output like mask, angle files Input product metadata file is also copied. @@ -86,23 +87,23 @@ def postprocess(self, pd): """ # output directory - outdir, tilecode = self.base_path(pd) + outdir, tilecode = self.base_path(product) tsdir = os.path.join(S2L_config.config.get('archive_dir'), tilecode) # ts = temporal series # copy MTL files in final product - outfile = os.path.basename(pd.mtl.mtl_file_name) - shutil.copyfile(pd.mtl.mtl_file_name, os.path.join(tsdir, outdir, outfile)) - if pd.mtl.tile_metadata: - outfile = os.path.basename(pd.mtl.tile_metadata) - shutil.copyfile(pd.mtl.tile_metadata, os.path.join(tsdir, outdir, outfile)) + outfile = os.path.basename(product.mtl.mtl_file_name) + shutil.copyfile(product.mtl.mtl_file_name, os.path.join(tsdir, outdir, outfile)) + if product.mtl.tile_metadata: + outfile = os.path.basename(product.mtl.tile_metadata) + shutil.copyfile(product.mtl.tile_metadata, os.path.join(tsdir, outdir, outfile)) # copy angles file outfile = "_".join([outdir, 'ANG']) + '.TIF' - shutil.copyfile(pd.mtl.angles_file, os.path.join(tsdir, outdir, outfile)) + shutil.copyfile(product.angles_file, os.path.join(tsdir, outdir, outfile)) # copy valid pixel mask outfile = "_".join([outdir, 'MSK']) + '.TIF' - shutil.copyfile(pd.mtl.mask_filename, os.path.join(tsdir, outdir, outfile)) + shutil.copyfile(product.mask_filename, os.path.join(tsdir, outdir, outfile)) # QI directory qipath = os.path.join(tsdir, 'QI') @@ -112,32 +113,35 @@ def postprocess(self, pd): # save config file in QI cfgname = "_".join([outdir, 'INFO']) + '.cfg' cfgpath = os.path.join(tsdir, 'QI', cfgname) - S2L_config.config.savetofile(os.path.join(S2L_config.config.get('wd'), pd.name, cfgpath)) + S2L_config.config.savetofile(os.path.join(S2L_config.config.get('wd'), product.name, cfgpath)) # save correl file in QI - if os.path.exists(os.path.join(S2L_config.config.get('wd'), pd.name, 'correl_res.txt')): + if os.path.exists(os.path.join(S2L_config.config.get('wd'), product.name, 'correl_res.txt')): corrname = "_".join([outdir, 'CORREL']) + '.csv' corrpath = os.path.join(tsdir, 'QI', corrname) - shutil.copy(os.path.join(S2L_config.config.get('wd'), pd.name, 'correl_res.txt'), corrpath) + shutil.copy(os.path.join(S2L_config.config.get('wd'), product.name, 'correl_res.txt'), corrpath) if len(self.images.keys()) > 1: # true color QL band_list = ["B04", "B03", "B02"] qlname = "_".join([outdir, 'QL', 'B432']) + '.jpg' qlpath = os.path.join(tsdir, 'QI', 'QL_B432', qlname) - quicklook(pd, self.images, band_list, qlpath, S2L_config.config.get("quicklook_jpeg_quality", 95)) + quicklook(product, self.images, band_list, qlpath, S2L_config.config.get( + "quicklook_jpeg_quality", 95), offset=int(S2L_config.config.get('offset'))) # false color QL band_list = ["B12", "B11", "B8A"] qlname = "_".join([outdir, 'QL', 'B12118A']) + '.jpg' qlpath = os.path.join(tsdir, 'QI', 'QL_B12118A', qlname) - quicklook(pd, self.images, band_list, qlpath, S2L_config.config.get("quicklook_jpeg_quality", 95)) + quicklook(product, self.images, band_list, qlpath, S2L_config.config.get( + "quicklook_jpeg_quality", 95), offset=int(S2L_config.config.get('offset'))) else: # grayscale QL band_list = list(self.images.keys()) qlname = "_".join([outdir, 'QL', band_list[0]]) + '.jpg' qlpath = os.path.join(tsdir, 'QI', f'QL_{band_list[0]}', qlname) - quicklook(pd, self.images, band_list, qlpath, S2L_config.config.get("quicklook_jpeg_quality", 95)) + quicklook(product, self.images, band_list, qlpath, S2L_config.config.get( + "quicklook_jpeg_quality", 95), offset=int(S2L_config.config.get('offset'))) # Clear images as packager is the last process self.images.clear() diff --git a/sen2like/sen2like/s2l_processes/S2L_PackagerL2F.py b/sen2like/sen2like/s2l_processes/S2L_PackagerL2F.py index eeba7e3..1d0c203 100644 --- a/sen2like/sen2like/s2l_processes/S2L_PackagerL2F.py +++ b/sen2like/sen2like/s2l_processes/S2L_PackagerL2F.py @@ -2,328 +2,62 @@ # -*- coding: utf-8 -*- # G. Cavaro (TPZ-F) 2020 -import datetime as dt -import glob import logging import os import shutil -from xml.etree import ElementTree -import numpy as np -from skimage.transform import resize as skit_resize -import core.QI_MTD.S2_structure from core import S2L_config -from core.QI_MTD.QIreport import QiWriter -from core.QI_MTD.generic_writer import find_element_by_path from core.QI_MTD.mtd import metadata -from core.QI_MTD.mtd_writers import MTD_writer_S2, MTD_writer_LS8, MTD_tile_writer_S2, MTD_tile_writer_LS8 -from core.QI_MTD.stac_interface import STACWriter -from core.S2L_tools import quicklook -from core.image_file import S2L_ImageFile -from s2l_processes.S2L_Process import S2L_Process +from s2l_processes.S2L_Product_Packager import S2L_Product_Packager, PackagerConfig log = logging.getLogger("Sen2Like") +packager_config = PackagerConfig( + product_type_name='L2F', + mtd_mask_field='masks_F', + mtd_product_name_field='product_F_name', + mtd_granule_name_field='granule_F_name', + mtd_band_root_name_field='band_rootName_F', + mtd_band_path_field='bands_path_F', + mtd_quicklook_field='quicklooks_F', + mtd_bb_qi_path_field='bb_QIF_path', + mtd_qi_report_file_name_field='L2F_QUALITY.xml', + product_suffix='F', + mtd_product_qi_xsd_field='product_QIF_xsd', + tile_mtd_file_path='MTD_TL_L2F.xml' +) -class S2L_PackagerL2F(S2L_Process): - images = {} - out_variables = ['images'] - @staticmethod - def base_path_S2L(product): - """ - See https://sentinel.esa.int/web/sentinel/user-guides/sentinel-2-msi/naming-convention - More information https://sentinel.esa.int/documents/247904/685211/Sentinel-2-Products-Specification-Document - at p74, p438 - Needed parameters : datastrip sensing start - datatake sensing start - absolute orbit - relative orbit - product generation time - Product baseline number - """ - - relative_orbit = S2L_config.config.get('relative_orbit') - file_date = dt.datetime.strftime(product.file_date, '%Y%m%dT%H%M%S') # generation time - - if product.sensor == 'S2': - datatake_sensing_start = dt.datetime.strftime(product.dt_sensing_start, '%Y%m%dT%H%M%S') - datastrip_sensing_start = dt.datetime.strftime(product.ds_sensing_start, '%Y%m%dT%H%M%S') - absolute_orbit = S2L_config.config.get('absolute_orbit') - else: - datatake_sensing_start = dt.datetime.strftime(product.acqdate, '%Y%m%dT%H%M%S') - datastrip_sensing_start = file_date - absolute_orbit = metadata.hardcoded_values.get('L8_absolute_orbit') - - PDGS = metadata.hardcoded_values.get('PDGS') - tilecode = product.mtl.mgrs - if tilecode.startswith('T'): - tilecode = tilecode[1:] - - sensor = product.mtl.sensor[0:3] # OLI / MSI / OLI_TIRS - product_name = "_".join([product.sensor_name, '{}L2F'.format(sensor), datatake_sensing_start, 'N' + PDGS, - 'R{:0>3}'.format(relative_orbit), 'T' + tilecode, file_date]) + '.SAFE' - granule_compact_name = "_".join(['L2F', 'T' + tilecode, 'A' + absolute_orbit, datastrip_sensing_start, - product.sensor_name, 'R{:0>3}'.format(relative_orbit)]) +class S2L_PackagerL2F(S2L_Product_Packager): + """ + S2F product packager + """ - return product_name, granule_compact_name, tilecode, datatake_sensing_start + def __init__(self): + super().__init__(packager_config) - @staticmethod - def band_path(tsdir, product_name, granule_name, outfile, native: bool = False): - if not native: - out_path = os.path.join(tsdir, product_name, 'GRANULE', granule_name, 'IMG_DATA', outfile) - else: - out_path = os.path.join(tsdir, product_name, 'GRANULE', granule_name, 'IMG_DATA', 'NATIVE', outfile) - return out_path - - def preprocess(self, product): - - if not self.guard(): - return - - product_name, granule_compact_name, tilecode, _ = self.base_path_S2L(product) - metadata.mtd['product_F_name'] = product_name - metadata.mtd['granule_F_name'] = granule_compact_name - metadata.mtd['product_creation_date'] = metadata.mtd.get('product_creation_date', dt.datetime.now()) - outdir = os.path.join(S2L_config.config.get('archive_dir'), tilecode) - - """ - # Creation of S2 folder tree structure - tree = core.QI_MTD.S2_structure.generate_S2_structure_XML(out_xml='', product_name=product_name, - tile_name=granule_compact_name, save_xml=False) - core.QI_MTD.S2_structure.create_architecture(outdir, tree, create_empty_files=True) + def postprocess_quicklooks(self, qi_data_dir, product): """ + Creates all QL as done by `2L_Product_Packager.postprocess_quicklooks` plus Fusion Mask QL if needed + Args: + qi_data_dir (str): path to quicklook output dir + product (): product - log.debug('Create folder : ' + os.path.join(outdir, product_name)) - change_nodes = {'PRODUCT_NAME': product_name, - 'TILE_NAME': granule_compact_name - } - core.QI_MTD.S2_structure.create_architecture(outdir, metadata.hardcoded_values.get('s2_struct_xml'), - change_nodes=change_nodes, create_empty_files=False) + Returns: - def process(self, pd, image, band): - """ - Write final product in the archive directory - 'archive_dir' is defined in S2L_config.config.ini file - Naming convention from Design Document - :param pd: instance of S2L_Product class - :param image: input instance of S2L_ImageFile class - :param band: band being processed - :return: output instance of instance of S2L_ImageFile class - """ - - if not self.guard(): - return image - log.info('Start process') - - # TODO : add production date? - - # /data/HLS_DATA/Archive/Site_Name/TILE_ID/S2L_DATEACQ_DATEPROD_SENSOR/S2L_DATEACQ_DATEPROD_SENSOR - res = image.xRes - product_name, granule_compact_name, tilecode, datatake_sensing_start = self.base_path_S2L(pd) - sensor = pd.sensor_name - relative_orbit = S2L_config.config.get('relative_orbit') - native = band in pd.native_bands - s2_band = pd.get_s2like_band(band) - if not native: - band = s2_band - band_rootName = "_".join( - ['L2F', 'T' + tilecode, datatake_sensing_start, sensor, 'R{:0>3}'.format(relative_orbit)]) - metadata.mtd['band_rootName_F'] = band_rootName - - output_format = S2L_config.config.get('output_format') - outfile = "_".join([band_rootName, band, '{}m'.format(int(res))]) + '.' + S2L_ImageFile.FILE_EXTENSIONS[output_format] - # Naming convention from Sentinel-2-Products-Specification-Document (p294) - - tsdir = os.path.join(S2L_config.config.get('archive_dir'), tilecode) # ts = temporal series - newpath = self.band_path(tsdir, product_name, granule_compact_name, outfile, native=native) - - log.debug('New: ' + newpath) - creation_options=[] - if output_format in ('COG', 'GTIFF'): - creation_options.append('COMPRESS=LZW') - nodata_mask = S2L_ImageFile(pd.mtl.nodata_mask_filename).array - if nodata_mask.shape != image.array.shape: - nodata_mask = skit_resize( - nodata_mask.clip(min=-1.0, max=1.0), image.array.shape, order=0, preserve_range=True - ).astype(np.uint8) - image.write( - creation_options=creation_options, - filepath=newpath, - output_format=output_format, - band=band, - nodata_value=0, - no_data_mask=nodata_mask - ) - metadata.mtd.get('bands_path_F').append(newpath) - - # declare output internally - self.images[s2_band] = image.filepath - # declare output in config file - S2L_config.config.set('imageout_dir', image.dirpath) - S2L_config.config.set('imageout_' + band, image.filename) - - log.info('End process') - return image - - def postprocess(self, pd): - """ - Copy auxiliary files in the final output like mask, angle files - Input product metadata file is also copied. - :param pd: instance of S2L_Product class """ - - if not self.guard(): - return - log.info('Start postprocess') - - # output directory - product_name, granule_compact_name, tilecode, datatake_sensing_start = self.base_path_S2L(pd) - - tsdir = os.path.join(S2L_config.config.get('archive_dir'), tilecode) # ts = temporal series - outdir = product_name - product_path = os.path.join(tsdir, outdir) - qi_dir = os.path.join(product_path, 'GRANULE', granule_compact_name, 'QI_DATA') - - # copy angles file - outfile = "_".join([metadata.mtd.get('band_rootName_F'), 'ANG']) + '.TIF' - metadata.mtd['ang_filename'] = outfile - shutil.copyfile(pd.mtl.angles_file, os.path.join(qi_dir, outfile)) - - # copy mask files - if "S2" in pd.sensor and pd.mtl.tile_metadata is not None: - tree_in = ElementTree.parse(pd.mtl.tile_metadata) # Tree of the input mtd (S2 MTD.xml) - root_in = tree_in.getroot() - mask_elements = find_element_by_path(root_in, './Quality_Indicators_Info/Pixel_Level_QI/MASK_FILENAME') - for element in mask_elements: - mask_file = os.path.join(pd.path, element.text) - if os.path.exists(mask_file): - shutil.copyfile(mask_file, os.path.join(qi_dir, os.path.basename(mask_file))) - metadata.mtd.get('masks_F').append({"tag": "MASK_FILENAME", - "attribs": element.attrib, - "text": element.text}) - - # copy valid pixel mask - outfile = "_".join([metadata.mtd.get('band_rootName_F'), pd.sensor, 'MSK']) + '.TIF' - - fpath = os.path.join(qi_dir, outfile) - metadata.mtd.get('masks_F').append({"tag": "MASK_FILENAME", - "attribs": {"type": "MSK_VALPIX"}, - "text": os.path.relpath(fpath, product_path)}) - - if S2L_config.config.get('output_format') == 'COG': - img_object = S2L_ImageFile(pd.mtl.mask_filename, mode='r') - img_object.write(filepath=fpath, output_format='COG', band='MASK') - else: - shutil.copyfile(pd.mtl.mask_filename, fpath) - - # QI directory - qipath = os.path.join(tsdir, 'QI') - if not os.path.exists(qipath): - os.makedirs(qipath) - - # save config file in QI - cfgname = "_".join([outdir, 'INFO']) + '.cfg' - cfgpath = os.path.join(tsdir, 'QI', cfgname) - S2L_config.config.savetofile(os.path.join(S2L_config.config.get('wd'), pd.name, cfgpath)) - - # save correl file in QI - if os.path.exists(os.path.join(S2L_config.config.get('wd'), pd.name, 'correl_res.txt')): - corrname = "_".join([outdir, 'CORREL']) + '.csv' - corrpath = os.path.join(tsdir, 'QI', corrname) - shutil.copy(os.path.join(S2L_config.config.get('wd'), pd.name, 'correl_res.txt'), corrpath) - - if len(self.images.keys()) > 1: - # true color QL - band_list = ["B04", "B03", "B02"] - qlname = "_".join([metadata.mtd.get('band_rootName_F'), 'QL', 'B432']) + '.jpg' - qlpath = os.path.join(qi_dir, qlname) - quicklook(pd, self.images, band_list, qlpath, S2L_config.config.get("quicklook_jpeg_quality", 95)) - metadata.mtd.get('quicklooks_F').append(qlpath) - - # false color QL - band_list = ["B12", "B11", "B8A"] - qlname = "_".join([metadata.mtd.get('band_rootName_F'), 'QL', 'B12118A']) + '.jpg' - qlpath = os.path.join(qi_dir, qlname) - quicklook(pd, self.images, band_list, qlpath, S2L_config.config.get("quicklook_jpeg_quality", 95)) - metadata.mtd.get('quicklooks_F').append(qlpath) - else: - # grayscale QL - band_list = list(self.images.keys()) - qlname = "_".join([metadata.mtd.get('band_rootName_F'), 'QL', band_list[0]]) + '.jpg' - qlpath = os.path.join(qi_dir, qlname) - quicklook(pd, self.images, band_list, qlpath, S2L_config.config.get("quicklook_jpeg_quality", 95)) - metadata.mtd.get('quicklooks_F').append(qlpath) - + super().postprocess_quicklooks(qi_data_dir, product) # Copy fusion auto check threshold mask - if pd.fusion_auto_check_threshold_msk_file is not None: - outfile = "_".join([metadata.mtd.get('band_rootName_F'), 'FCM']) + '.TIF' - fpath = os.path.join(qi_dir, outfile) - shutil.copyfile(pd.fusion_auto_check_threshold_msk_file, fpath) - metadata.mtd.get('quicklooks_F').append(fpath) - - # PVI - band_list = ["B04", "B03", "B02"] - pvi_filename = "_".join([metadata.mtd.get('band_rootName_F'), 'PVI']) + '.TIF' - qlpath = os.path.join(qi_dir, pvi_filename) - quicklook(pd, self.images, band_list, qlpath, S2L_config.config.get("quicklook_jpeg_quality", 95), xRes=320, - yRes=320, - creationOptions=['COMPRESS=LZW'], format='GTIFF') - metadata.mtd.get('quicklooks_F').append(qlpath) - - # Clear images as packager is the last process - self.images.clear() - - # Write QI report as XML - bb_QI_path = metadata.hardcoded_values.get('bb_QIF_path') - out_QI_path = os.path.join(qi_dir, 'L2F_QI_Report.xml') - if pd.mtl.l2a_qi_report_path is not None: - log.info(f'QI report for input product found here : {pd.mtl.l2a_qi_report_path}') - Qi_Writer = QiWriter(bb_QI_path, outfile=out_QI_path, init_QI_path=pd.mtl.l2a_qi_report_path, H_F='F') - Qi_Writer.manual_replaces(pd) - Qi_Writer.write(pretty_print=True, json_print=False) - # TODO UNCOMMENT BELOW FOR XSD CHECK - product_QI_xsd = metadata.hardcoded_values.get('product_QIF_xsd') - log.info('QI Report is valid : {}'.format(Qi_Writer.validate_schema(product_QI_xsd, out_QI_path))) - - # Write tile MTD - bb_S2_tile = metadata.hardcoded_values.get('bb_S2F_tile') - bb_L8_tile = metadata.hardcoded_values.get('bb_L8F_tile') - tile_mtd_path = 'MTD_TL_L2F.xml' - tile_MTD_outpath = os.path.join(product_path, 'GRANULE', granule_compact_name, tile_mtd_path) - - mtd_tl_writer = MTD_tile_writer_S2(bb_S2_tile, pd.mtl.tile_metadata, H_F='F') if pd.sensor == 'S2' \ - else MTD_tile_writer_LS8(bb_L8_tile, H_F='F') - mtd_tl_writer.manual_replaces(pd) - - mtd_tl_writer.write(tile_MTD_outpath, pretty_print=True) - # TODO UNCOMMENT BELOW FOR XSD CHECK - # product_tl_xsd = metadata.hardcoded_values.get('product_tl_xsd') - # log.info('Tile MTD is valid : {}'.format(mtd_tl_writer.validate_schema(product_tl_xsd, tile_MTD_outpath))) - - # Write product MTD - bb_S2_product = metadata.hardcoded_values.get('bb_S2F_product') - bb_L8_product = metadata.hardcoded_values.get('bb_L8F_product') - product_mtd_path = 'MTD_{}L2F.xml'.format(pd.mtl.sensor[0:3]) # MSI / OLI/ OLI_TIRS - product_MTD_outpath = os.path.join(tsdir, product_name, product_mtd_path) - mtd_pd_writer = MTD_writer_S2(bb_S2_product, pd.mtl.mtl_file_name, H_F='F') if pd.sensor == 'S2' \ - else MTD_writer_LS8(bb_L8_product, H_F='F') - mtd_pd_writer.manual_replaces(pd) - mtd_pd_writer.write(product_MTD_outpath, pretty_print=True) - # TODO UNCOMMENT BELOW FOR XSD CHECK - # product_mtd_xsd = metadata.hardcoded_values.get('product_mtd_xsd') - # log.info('Product MTD is valid : {}'.format(mtd_pd_writer.validate_schema(product_mtd_xsd, product_MTD_outpath))) - - # Write stac - stac_writer = STACWriter() - stac_writer.write_product(pd, os.path.join(tsdir, product_name), metadata.mtd['bands_path_F'], - f"{metadata.mtd['band_rootName_F']}_QL_B432.jpg", granule_compact_name) - log.info('End postprocess') + if product.fusion_auto_check_threshold_msk_file is not None: + outfile = "_".join([metadata.mtd.get(self.mtd_band_root_name_field), 'FCM']) + '.TIF' + fpath = os.path.join(qi_data_dir, outfile) + shutil.copyfile(product.fusion_auto_check_threshold_msk_file, fpath) + metadata.mtd.get(self.mtd_quicklook_field).append(fpath) def guard(self): - """ Define required condition to algorithme execution + """ Define required condition to algorithm execution """ if S2L_config.config.getboolean('none_S2_product_for_fusion'): - log.info("Fusion hase not been done. So s2l don't write L2F product.") + log.info("Fusion has not been performed. So s2l does not write L2F product.") return False return True diff --git a/sen2like/sen2like/s2l_processes/S2L_PackagerL2H.py b/sen2like/sen2like/s2l_processes/S2L_PackagerL2H.py index 46e79cb..31dbdc3 100644 --- a/sen2like/sen2like/s2l_processes/S2L_PackagerL2H.py +++ b/sen2like/sen2like/s2l_processes/S2L_PackagerL2H.py @@ -2,307 +2,32 @@ # -*- coding: utf-8 -*- # G. Cavaro (TPZ-F) 2020 -import datetime as dt -import glob import logging -import os -import shutil -import numpy as np -from xml.etree import ElementTree -from skimage.transform import resize as skit_resize -import core.QI_MTD.S2_structure -from core import S2L_config -from core.QI_MTD.QIreport import QiWriter -from core.QI_MTD.generic_writer import find_element_by_path -from core.QI_MTD.mtd import metadata -from core.QI_MTD.mtd_writers import MTD_writer_S2, MTD_writer_LS8, MTD_tile_writer_S2, MTD_tile_writer_LS8 -from core.QI_MTD.stac_interface import STACWriter -from core.S2L_tools import quicklook -from core.image_file import S2L_ImageFile -from s2l_processes.S2L_Process import S2L_Process +from s2l_processes.S2L_Product_Packager import S2L_Product_Packager, PackagerConfig log = logging.getLogger("Sen2Like") - -class S2L_PackagerL2H(S2L_Process): - images = {} - out_variables = ['images'] - - @staticmethod - def base_path_S2L(product): - """ - See https://sentinel.esa.int/web/sentinel/user-guides/sentinel-2-msi/naming-convention - More information https://sentinel.esa.int/documents/247904/685211/Sentinel-2-Products-Specification-Document - at p74, p438 - Needed parameters : datastrip sensing start - datatake sensing start - absolute orbit - relative orbit - product generation time - Product baseline number - """ - - relative_orbit = S2L_config.config.get('relative_orbit') - file_date = dt.datetime.strftime(product.file_date, '%Y%m%dT%H%M%S') # generation time - - if product.sensor == 'S2': - datatake_sensing_start = dt.datetime.strftime(product.dt_sensing_start, '%Y%m%dT%H%M%S') - datastrip_sensing_start = dt.datetime.strftime(product.ds_sensing_start, '%Y%m%dT%H%M%S') - absolute_orbit = S2L_config.config.get('absolute_orbit') - else: - datatake_sensing_start = dt.datetime.strftime(product.acqdate, '%Y%m%dT%H%M%S') - datastrip_sensing_start = file_date - absolute_orbit = metadata.hardcoded_values.get('L8_absolute_orbit') - - PDGS = metadata.hardcoded_values.get('PDGS') - tilecode = product.mtl.mgrs - if tilecode.startswith('T'): - tilecode = tilecode[1:] - - sensor = product.mtl.sensor[0:3] # OLI / MSI / OLI_TIRS - product_name = "_".join([product.sensor_name, '{}L2H'.format(sensor), datatake_sensing_start, 'N' + PDGS, - 'R{:0>3}'.format(relative_orbit), 'T' + tilecode, file_date]) + '.SAFE' - granule_compact_name = "_".join(['L2H', 'T' + tilecode, 'A' + absolute_orbit, datastrip_sensing_start, - product.sensor_name, 'R{:0>3}'.format(relative_orbit)]) - - return product_name, granule_compact_name, tilecode, datatake_sensing_start - - @staticmethod - def band_path(tsdir, product_name, granule_name, outfile, native: bool = False): - if not native: - out_path = os.path.join(tsdir, product_name, 'GRANULE', granule_name, 'IMG_DATA', outfile) - else: - out_path = os.path.join(tsdir, product_name, 'GRANULE', granule_name, 'IMG_DATA', 'NATIVE', outfile) - return out_path - - def preprocess(self, product): - - product_name, granule_compact_name, tilecode, _ = self.base_path_S2L(product) - metadata.mtd['product_H_name'] = product_name - metadata.mtd['granule_H_name'] = granule_compact_name - metadata.mtd['product_creation_date'] = metadata.mtd.get('product_creation_date', dt.datetime.now()) - outdir = os.path.join(S2L_config.config.get('archive_dir'), tilecode) - - """ - # Creation of S2 folder tree structure - tree = core.QI_MTD.S2_structure.generate_S2_structure_XML(out_xml='', product_name=product_name, - tile_name=granule_compact_name, save_xml=False) - core.QI_MTD.S2_structure.create_architecture(outdir, tree, create_empty_files=True) - """ - - log.debug('Create folder : ' + os.path.join(outdir, product_name)) - change_nodes = {'PRODUCT_NAME': product_name, - 'TILE_NAME': granule_compact_name, - } - core.QI_MTD.S2_structure.create_architecture(outdir, metadata.hardcoded_values.get('s2_struct_xml'), - change_nodes=change_nodes, create_empty_files=False) - - def process(self, pd, image, band): - """ - Write final product in the archive directory - 'archive_dir' is defined in S2L_config.config.ini file - Naming convention from Design Document - :param pd: instance of S2L_Product class - :param image: input instance of S2L_ImageFile class - :param band: band being processed - :return: output instance of instance of S2L_ImageFile class - """ - - # TODO : add production date? - - log.info('Start process') - - # /data/HLS_DATA/Archive/Site_Name/TILE_ID/S2L_DATEACQ_DATEPROD_SENSOR/S2L_DATEACQ_DATEPROD_SENSOR - res = image.xRes - product_name, granule_compact_name, tilecode, datatake_sensing_start = self.base_path_S2L(pd) - sensor = pd.sensor_name - relative_orbit = S2L_config.config.get('relative_orbit') - native = band in pd.native_bands - s2_band = pd.get_s2like_band(band) - if not native: - band = s2_band - band_rootName = "_".join( - ['L2H', 'T' + tilecode, datatake_sensing_start, sensor, 'R{:0>3}'.format(relative_orbit)]) - metadata.mtd['band_rootName_H'] = band_rootName - - output_format = S2L_config.config.get('output_format') - outfile = "_".join([band_rootName, band, '{}m'.format(int(res))]) + '.' + S2L_ImageFile.FILE_EXTENSIONS[output_format] - # Naming convention from Sentinel-2-Products-Specification-Document (p294) - - tsdir = os.path.join(S2L_config.config.get('archive_dir'), tilecode) # ts = temporal series - newpath = self.band_path(tsdir, product_name, granule_compact_name, outfile, native=native) - - log.debug('New: ' + newpath) - creation_options = [] - if output_format in ('COG', 'GTIFF'): - creation_options.append('COMPRESS=LZW') - nodata_mask = S2L_ImageFile(pd.mtl.nodata_mask_filename).array - if nodata_mask.shape != image.array.shape: - nodata_mask = skit_resize( - nodata_mask.clip(min=-1.0, max=1.0), image.array.shape, order=0, preserve_range=True - ).astype(np.uint8) - image.write( - creation_options=creation_options, - filepath=newpath, - output_format=output_format, - band=band, - nodata_value=0, - no_data_mask=nodata_mask - ) - metadata.mtd.get('bands_path_H').append(newpath) - - # declare output internally - self.images[s2_band] = image.filepath - # declare output in config file - S2L_config.config.set('imageout_dir', image.dirpath) - S2L_config.config.set('imageout_' + band, image.filename) - - log.info('End process') - - return image - - def postprocess(self, pd): - """ - Copy auxiliary files in the final output like mask, angle files - Input product metadata file is also copied. - :param pd: instance of S2L_Product class - """ - - log.info('Start postprocess') - # output directory - product_name, granule_compact_name, tilecode, datatake_sensing_start = self.base_path_S2L(pd) - - tsdir = os.path.join(S2L_config.config.get('archive_dir'), tilecode) # ts = temporal series - outdir = product_name - product_path = os.path.join(tsdir, outdir) - qi_dir = os.path.join(product_path, 'GRANULE', granule_compact_name, 'QI_DATA') - - # copy angles file - outfile = "_".join([metadata.mtd.get('band_rootName_H'), 'ANG']) + '.TIF' - metadata.mtd['ang_filename'] = outfile - shutil.copyfile(pd.mtl.angles_file, os.path.join(qi_dir, outfile)) - - # copy mask files - if "S2" in pd.sensor and pd.mtl.tile_metadata is not None: - tree_in = ElementTree.parse(pd.mtl.tile_metadata) # Tree of the input mtd (S2 MTD.xml) - root_in = tree_in.getroot() - mask_elements = find_element_by_path(root_in, './Quality_Indicators_Info/Pixel_Level_QI/MASK_FILENAME') - for element in mask_elements: - mask_file = os.path.join(pd.path, element.text) - if os.path.exists(mask_file): - shutil.copyfile(mask_file, os.path.join(qi_dir, os.path.basename(mask_file))) - metadata.mtd.get('masks_H').append({"tag": "MASK_FILENAME", - "attribs": element.attrib, - "text": element.text}) - - # copy valid pixel mask - outfile = "_".join([metadata.mtd.get('band_rootName_H'), pd.sensor, 'MSK']) + '.TIF' - - fpath = os.path.join(qi_dir, outfile) - metadata.mtd.get('masks_H').append({"tag": "MASK_FILENAME", - "attribs": {"type": "MSK_VALPIX"}, - "text": os.path.relpath(fpath, product_path)}) - - if S2L_config.config.get('output_format') == 'COG': - img_object = S2L_ImageFile(pd.mtl.mask_filename, mode='r') - img_object.write(filepath=fpath, output_format='COG', band='MASK') - else: - shutil.copyfile(pd.mtl.mask_filename, fpath) - - # QI directory - qipath = os.path.join(tsdir, 'QI') - if not os.path.exists(qipath): - os.makedirs(qipath) - - # save config file in QI - cfgname = "_".join([outdir, 'INFO']) + '.cfg' - cfgpath = os.path.join(tsdir, 'QI', cfgname) - S2L_config.config.savetofile(os.path.join(S2L_config.config.get('wd'), pd.name, cfgpath)) - - # save correl file in QI - if os.path.exists(os.path.join(S2L_config.config.get('wd'), pd.name, 'correl_res.txt')): - corrname = "_".join([outdir, 'CORREL']) + '.csv' - corrpath = os.path.join(tsdir, 'QI', corrname) - shutil.copy(os.path.join(S2L_config.config.get('wd'), pd.name, 'correl_res.txt'), corrpath) - - if len(self.images.keys()) > 1: - # true color QL - band_list = ["B04", "B03", "B02"] - qlname = "_".join([metadata.mtd.get('band_rootName_H'), 'QL', 'B432']) + '.jpg' - qlpath = os.path.join(qi_dir, qlname) - quicklook(pd, self.images, band_list, qlpath, S2L_config.config.get("quicklook_jpeg_quality", 95)) - metadata.mtd.get('quicklooks_H').append(qlpath) - - # false color QL - band_list = ["B12", "B11", "B8A"] - qlname = "_".join([metadata.mtd.get('band_rootName_H'), 'QL', 'B12118A']) + '.jpg' - qlpath = os.path.join(qi_dir, qlname) - quicklook(pd, self.images, band_list, qlpath, S2L_config.config.get("quicklook_jpeg_quality", 95)) - metadata.mtd.get('quicklooks_H').append(qlpath) - else: - # grayscale QL - band_list = list(self.images.keys()) - qlname = "_".join([metadata.mtd.get('band_rootName_H'), 'QL', band_list[0]]) + '.jpg' - qlpath = os.path.join(qi_dir, qlname) - quicklook(pd, self.images, band_list, qlpath, S2L_config.config.get("quicklook_jpeg_quality", 95)) - metadata.mtd.get('quicklooks_H').append(qlpath) - - # PVI - band_list = ["B04", "B03", "B02"] - pvi_filename = "_".join([metadata.mtd.get('band_rootName_H'), 'PVI']) + '.TIF' - qlpath = os.path.join(qi_dir, pvi_filename) - quicklook(pd, self.images, band_list, qlpath, S2L_config.config.get("quicklook_jpeg_quality", 95), xRes=320, - yRes=320, - creationOptions=['COMPRESS=LZW'], format='GTIFF') - metadata.mtd.get('quicklooks_H').append(qlpath) - - # Clear images as packager is the last process - self.images.clear() - - # Write QI report as XML - bb_QI_path = metadata.hardcoded_values.get('bb_QIH_path') - out_QI_path = os.path.join(qi_dir, 'L2H_QI_Report.xml') - if pd.mtl.l2a_qi_report_path is not None: - log.info(f'QI report for input product found here : {pd.mtl.l2a_qi_report_path}') - Qi_Writer = QiWriter(bb_QI_path, outfile=out_QI_path, init_QI_path=pd.mtl.l2a_qi_report_path, H_F='H') - Qi_Writer.manual_replaces(pd) - Qi_Writer.write(pretty_print=True, json_print=False) - # TODO UNCOMMENT BELOW FOR XSD CHECK - product_QI_xsd = metadata.hardcoded_values.get('product_QIH_xsd') - log.info('QI Report is valid : {}'.format(Qi_Writer.validate_schema(product_QI_xsd, out_QI_path))) - - # Write tile MTD - bb_S2_tile = metadata.hardcoded_values.get('bb_S2H_tile') - bb_L8_tile = metadata.hardcoded_values.get('bb_L8H_tile') - tile_mtd_path = 'MTD_TL_L2H.xml' - tile_MTD_outpath = os.path.join(product_path, 'GRANULE', granule_compact_name, tile_mtd_path) - - mtd_tl_writer = MTD_tile_writer_S2(bb_S2_tile, pd.mtl.tile_metadata, H_F='H') if pd.sensor == 'S2' \ - else MTD_tile_writer_LS8(bb_L8_tile, H_F='H') - mtd_tl_writer.manual_replaces(pd) - mtd_tl_writer.write(tile_MTD_outpath, pretty_print=True) - # TODO UNCOMMENT BELOW FOR XSD CHECK - # product_tl_xsd = metadata.hardcoded_values.get('product_tl_xsd') - # log.info('Tile MTD is valid : {}'.format(mtd_tl_writer.validate_schema(product_tl_xsd, tile_MTD_outpath))) - - # Write product MTD - bb_S2_product = metadata.hardcoded_values.get('bb_S2H_product') - bb_L8_product = metadata.hardcoded_values.get('bb_L8H_product') - product_mtd_path = 'MTD_{}L2H.xml'.format(pd.mtl.sensor[0:3]) # MSI / OLI/ OLI_TIRS - product_MTD_outpath = os.path.join(tsdir, product_name, product_mtd_path) - mtd_pd_writer = MTD_writer_S2(bb_S2_product, pd.mtl.mtl_file_name, H_F='H') if pd.sensor == 'S2' \ - else MTD_writer_LS8(bb_L8_product, H_F='H') - mtd_pd_writer.manual_replaces(pd) - mtd_pd_writer.write(product_MTD_outpath, pretty_print=True) - # TODO UNCOMMENT BELOW FOR XSD CHECK - # product_mtd_xsd = metadata.hardcoded_values.get('product_mtd_xsd') - # log.info('Product MTD is valid : {}'.format(mtd_pd_writer.validate_schema(product_mtd_xsd, - # product_MTD_outpath))) - - # Write stac - stac_writer = STACWriter() - stac_writer.write_product(pd, os.path.join(tsdir, product_name), metadata.mtd['bands_path_H'], - f"{metadata.mtd['band_rootName_H']}_QL_B432.jpg", granule_compact_name) - - log.info('End postprocess') \ No newline at end of file +packager_config = PackagerConfig( + product_type_name='L2H', + mtd_mask_field='masks_H', + mtd_product_name_field='product_H_name', + mtd_granule_name_field='granule_H_name', + mtd_band_root_name_field='band_rootName_H', + mtd_band_path_field='bands_path_H', + mtd_quicklook_field='quicklooks_H', + mtd_bb_qi_path_field='bb_QIH_path', + mtd_qi_report_file_name_field='L2H_QUALITY.xml', + product_suffix='H', + mtd_product_qi_xsd_field='product_QIH_xsd', + tile_mtd_file_path='MTD_TL_L2H.xml' +) + + +class S2L_PackagerL2H(S2L_Product_Packager): + """ + S2H product packager + """ + + def __init__(self): + super().__init__(packager_config) diff --git a/sen2like/sen2like/s2l_processes/S2L_Process.py b/sen2like/sen2like/s2l_processes/S2L_Process.py index c84ff09..e761862 100644 --- a/sen2like/sen2like/s2l_processes/S2L_Process.py +++ b/sen2like/sen2like/s2l_processes/S2L_Process.py @@ -1,10 +1,19 @@ +"""S2L_Process abstraction definition +""" import os from abc import ABC, abstractmethod from core import S2L_config +from core.image_file import S2L_ImageFile +from core.products.product import S2L_Product + class S2L_Process(ABC): + """S2L_Process abstract class. + Implementation MUST implements 'process' and SHOULD override 'preprocess' and 'postprocess' + """ + def __init__(self): self.ext = S2L_config.PROC_BLOCKS.get(self.__class__.__name__, {}).get('extension') self.initialize() @@ -12,10 +21,37 @@ def __init__(self): def initialize(self): return + def preprocess(self, product: S2L_Product): + """Do some preprocess on / for the product + + Args: + product (S2L_Product): product to preprocess + """ + # deliberately empty + @abstractmethod - def process(self, pd, image, band: str): + def process(self, product: S2L_Product, image: S2L_ImageFile, band: str) -> S2L_ImageFile: + """Process the product/image/band + + Args: + pd (S2L_Product): product to process + image (S2L_ImageFile): image to use to process or to process + band (str): band to process + + Returns: + S2L_ImageFile: processing result image + """ return None + def postprocess(self, product: S2L_Product): + """Do some post process on / for the product. + This is also a good place to set process metadata.qi params + + Args: + product (S2L_Product): product to post process + """ + # deliberately empty + def output_file(self, product, band, extension=None): if extension is None: extension = self.ext diff --git a/sen2like/sen2like/s2l_processes/S2L_Product_Packager.py b/sen2like/sen2like/s2l_processes/S2L_Product_Packager.py new file mode 100644 index 0000000..038ae9f --- /dev/null +++ b/sen2like/sen2like/s2l_processes/S2L_Product_Packager.py @@ -0,0 +1,439 @@ +"""S2L product packager base module""" + +import datetime as dt +import logging +import os +import shutil +from dataclasses import dataclass +from xml.etree import ElementTree +import numpy as np +from skimage.transform import resize as skit_resize + +import version +import core.QI_MTD.S2_structure +from core import S2L_config +from core.QI_MTD.QIreport import QiWriter +from core.QI_MTD.generic_writer import find_element_by_path +from core.QI_MTD.mtd import metadata +from core.QI_MTD.mtd_writers import get_product_mtl_writer_class, get_tile_mtl_writer_class +from core.QI_MTD.stac_interface import STACWriter +from core.S2L_tools import quicklook +from core.image_file import S2L_ImageFile +from core.products.product import S2L_Product +from s2l_processes.S2L_Process import S2L_Process + +log = logging.getLogger("Sen2Like") + +DATE_FILE_FORMAT = "%Y%m%dT%H%M%S" + + +@dataclass +class PackagerConfig: + """ + Config class for concrete S2L Packager. + Most of them are mtd field name used to retrieve mtd value + """ + product_type_name: str + mtd_mask_field: str + mtd_product_name_field: str + mtd_granule_name_field: str + mtd_band_root_name_field: str + mtd_band_path_field: str + mtd_quicklook_field: str + mtd_bb_qi_path_field: str + mtd_qi_report_file_name_field: str + product_suffix: str + mtd_product_qi_xsd_field: str + tile_mtd_file_path: str + + +class S2L_Product_Packager(S2L_Process): + """Base class for S2L product packaging""" + + def __init__(self, config: PackagerConfig): + super().__init__() + self.images = {} + self.out_variables = ['images'] + self.product_type_name = config.product_type_name + self.mtd_mask_field = config.mtd_mask_field + self.mtd_product_name_field = config.mtd_product_name_field + self.mtd_granule_name_field = config.mtd_granule_name_field + self.mtd_band_root_name_field = config.mtd_band_root_name_field + self.mtd_band_path_field = config.mtd_band_path_field + self.mtd_quicklook_field = config.mtd_quicklook_field + self.mtd_bb_qi_path_field = config.mtd_bb_qi_path_field + self.mtd_qi_report_file_name_field = config.mtd_qi_report_file_name_field + self.product_suffix = config.product_suffix + self.mtd_product_qi_xsd_field = config.mtd_product_qi_xsd_field + self.tile_mtd_file_path = config.tile_mtd_file_path + + def base_path_product(self, product): + """ + See https://sentinel.esa.int/web/sentinel/user-guides/sentinel-2-msi/naming-convention + More information https://sentinel.esa.int/documents/247904/685211/Sentinel-2-Products-Specification-Document + at p74, p438 + Needed parameters : datastrip sensing start + datatake sensing start + absolute orbit + relative orbit + product generation time + Product baseline number + """ + + relative_orbit = S2L_config.config.get('relative_orbit') + + # generation time + generation_time = dt.datetime.strftime( + metadata.mtd.get('product_creation_date', None), + DATE_FILE_FORMAT) + + if product.sensor == 'S2': + datatake_sensing_start = dt.datetime.strftime(product.dt_sensing_start, DATE_FILE_FORMAT) + datastrip_sensing_start = dt.datetime.strftime(product.ds_sensing_start, DATE_FILE_FORMAT) + absolute_orbit = S2L_config.config.get('absolute_orbit') + else: + datatake_sensing_start = dt.datetime.strftime(product.acqdate, DATE_FILE_FORMAT) + datastrip_sensing_start = dt.datetime.strftime(product.file_date, DATE_FILE_FORMAT) + absolute_orbit = metadata.hardcoded_values.get('L8_absolute_orbit') + + tile_code = product.mtl.mgrs + if tile_code.startswith('T'): + tile_code = tile_code[1:] + + sensor = product.mtl.sensor[0:3] # OLI / MSI / OLI_TIRS + + product_name = "_".join( + [product.sensor_name, f'{sensor}{self.product_type_name}', datatake_sensing_start, f'N{version.baseline}', + f'R{relative_orbit:0>3}', f'T{tile_code}', generation_time]) + '.SAFE' + + granule_compact_name = "_".join([self.product_type_name, f'T{tile_code}', f'A{absolute_orbit}', + datastrip_sensing_start, product.sensor_name, + f'R{relative_orbit:0>3}']) + + return product_name, granule_compact_name, tile_code, datatake_sensing_start + + @staticmethod + def band_path(ts_dir: str, product_name: str, granule_name: str, outfile: str, native: bool = False): + """ + Build band image file path of S2 product + + Args: + ts_dir (str): path of output tile directory + product_name (str): product name + granule_name (str): granule name + outfile (str): image band file name + native (bool): if put in NATIVE sub dir + + Returns: + full path as done by `os.path.join` + + """ + if not native: + out_path = os.path.join(ts_dir, product_name, 'GRANULE', granule_name, 'IMG_DATA', outfile) + else: + out_path = os.path.join(ts_dir, product_name, 'GRANULE', granule_name, 'IMG_DATA', 'NATIVE', outfile) + return out_path + + def preprocess(self, product: S2L_Product): + + if not self.guard(): + log.info('Abort pre process due to execution condition') + return + + # set it first as it is used in base_path_product + metadata.mtd['product_creation_date'] = metadata.mtd.get('product_creation_date', dt.datetime.utcnow()) + + product_name, granule_compact_name, tile_code, _ = self.base_path_product(product) + + metadata.mtd[self.mtd_product_name_field] = product_name + metadata.mtd[self.mtd_granule_name_field] = granule_compact_name + + out_dir = os.path.join(S2L_config.config.get('archive_dir'), tile_code) + + # Creation of S2 folder tree structure + # tree = core.QI_MTD.S2_structure.generate_S2_structure_XML(out_xml='', product_name=product_name, + # tile_name=granule_compact_name, save_xml=False) + # core.QI_MTD.S2_structure.create_architecture(outdir, tree, create_empty_files=True) + + log.debug('Create folder : %s', os.path.join(out_dir, product_name)) + change_nodes = {'PRODUCT_NAME': product_name, + 'TILE_NAME': granule_compact_name, + } + core.QI_MTD.S2_structure.create_architecture(out_dir, metadata.hardcoded_values.get('s2_struct_xml'), + change_nodes=change_nodes, create_empty_files=False) + + # extract mask statistic for QI report + if product.mask_info: + metadata.qi["NODATA_PIX_PERCENTAGE"] = f'{product.mask_info.get_nodata_pixel_percentage():.6f}' + metadata.qi["VALID_PIX_PERCENTAGE"] = f'{product.mask_info.get_valid_pixel_percentage():.6f}' + + # extract ROI (ROI based mode + if product.roi_filename: + metadata.qi["ROI_FILE"] = os.path.basename(product.roi_filename) + + def process(self, product: S2L_Product, image: S2L_ImageFile, band: str) -> S2L_ImageFile: + """ + Write final product in the archive directory + 'archive_dir' is defined in S2L_config.config.ini file + Naming convention from Design Document + :param pd: instance of S2L_Product class + :param image: input instance of S2L_ImageFile class + :param band: band being processed + :return: output instance of S2L_ImageFile class + """ + + log.info('Start process') + if not self.guard(): + log.info('Abort process due to execution condition') + return image + + # TODO : add production date? + + # /data/HLS_DATA/Archive/Site_Name/TILE_ID/S2L_DATEACQ_DATEPROD_SENSOR/S2L_DATEACQ_DATEPROD_SENSOR + res = image.xRes + product_name, granule_compact_name, tile_code, datatake_sensing_start = self.base_path_product(product) + sensor = product.sensor_name + relative_orbit = S2L_config.config.get('relative_orbit') + native = band in product.native_bands + s2_band = product.get_s2like_band(band) + + if not native: + band = s2_band + + band_root_name = "_".join([self.product_type_name, 'T' + tile_code, + datatake_sensing_start, sensor, f'R{relative_orbit:0>3}']) + + metadata.mtd[self.mtd_band_root_name_field] = band_root_name + + output_format = S2L_config.config.get('output_format') + outfile = "_".join([band_root_name, band, f'{int(res)}m']) + '.' + S2L_ImageFile.FILE_EXTENSIONS[ + output_format] + # Naming convention from Sentinel-2-Products-Specification-Document (p294) + + ts_dir = os.path.join(S2L_config.config.get('archive_dir'), tile_code) # ts = temporal series + new_path = self.band_path(ts_dir, product_name, granule_compact_name, outfile, native=native) + + log.debug('New: %s', new_path) + creation_options = [] + + if output_format in ('COG', 'GTIFF'): + creation_options.append('COMPRESS=LZW') + + nodata_mask = S2L_ImageFile(product.nodata_mask_filename).array + + if nodata_mask.shape != image.array.shape: + nodata_mask = skit_resize( + nodata_mask.clip(min=-1.0, max=1.0), image.array.shape, order=0, preserve_range=True + ).astype(np.uint8) + + image.write( + creation_options=creation_options, + filepath=new_path, + output_format=output_format, + band=band, + nodata_value=0, + no_data_mask=nodata_mask + ) + + metadata.mtd.get(self.mtd_band_path_field).append(new_path) + + # declare output internally + self.images[s2_band] = image.filepath + # declare output in config file + S2L_config.config.set('imageout_dir', image.dirpath) + S2L_config.config.set('imageout_' + band, image.filename) + + log.info('End process') + return image + + def postprocess(self, product: S2L_Product): + """ + Copy auxiliary files in the final output like mask, angle files + Input product metadata file is also copied. + :param pd: instance of S2L_Product class + """ + + log.info('Start postprocess') + if not self.guard(): + log.info('Abort post process due to execution condition') + return + + # output directory + product_name, granule_compact_name, tile_code, datatake_sensing_start = self.base_path_product(product) + + ts_dir = os.path.join(S2L_config.config.get('archive_dir'), tile_code) # ts = temporal series + product_path = os.path.join(ts_dir, product_name) + granule_dir = os.path.join(product_path, 'GRANULE', granule_compact_name) + qi_data_dir = os.path.join(granule_dir, 'QI_DATA') + + # copy angles file + self._copy_angles_file(product, qi_data_dir) + + # copy mask files + self._copy_masks(product, qi_data_dir, product_path) + + # ROI File (ROI based mode) + if product.roi_filename: + shutil.copyfile(product.roi_filename, os.path.join(qi_data_dir, os.path.basename(product.roi_filename))) + + # QI directory + qi_path = os.path.join(ts_dir, 'QI') + if not os.path.exists(qi_path): + os.makedirs(qi_path) + + product_working_dir = os.path.join(S2L_config.config.get('wd'), product.name) + + # save config file in QI + cfg_name = f'{product_name}_INFO.cfg' + cfg_path = os.path.join(qi_path, cfg_name) + S2L_config.config.savetofile(os.path.join(product_working_dir, cfg_path)) + + # save correl file in QI + if os.path.exists(os.path.join(product_working_dir, 'correl_res.txt')): + corr_name = f"{product_name}_CORREL.csv" + corr_path = os.path.join(qi_path, corr_name) + shutil.copy(os.path.join(product_working_dir, 'correl_res.txt'), corr_path) + + self.postprocess_quicklooks(qi_data_dir, product) + + # Clear images as packager is the last process + self.images.clear() + + # Write QI report as XML + self._write_qi_report(product, qi_data_dir) + + # Write tile MTD + self._write_tile_mtd(product, granule_dir) + + # Write product MTD + self._write_product_mtd(product, product_path) + + # Write stac + stac_writer = STACWriter() + stac_writer.write_product(product, product_path, metadata.mtd[self.mtd_band_path_field], + f"{metadata.mtd[self.mtd_band_root_name_field]}_QL_B432.jpg", granule_compact_name) + log.info('End postprocess') + + def postprocess_quicklooks(self, qi_data_dir: str, product: S2L_Product): + """ + Creates all QL of the product B432 & B12118A (for multi band process, otherwise greyscale for the unique band) + and PVI + Args: + qi_data_dir (str): path to quicklook output dir + product (S2L_Product): product + """ + if len(self.images.keys()) > 1: + # true color QL + self.handle_product_quicklook(qi_data_dir, product, ["B04", "B03", "B02"], 'B432') + self.handle_product_quicklook(qi_data_dir, product, ["B12", "B11", "B8A"], 'B12118A') + else: + # grayscale QL + band_list = list(self.images.keys()) + self.handle_product_quicklook(qi_data_dir, product, band_list, band_list[0]) + + # PVI + band_list = ["B04", "B03", "B02"] + pvi_filename = f"{metadata.mtd.get(self.mtd_band_root_name_field)}_PVI.TIF" + ql_path = os.path.join(qi_data_dir, pvi_filename) + result_path = quicklook(product, self.images, band_list, ql_path, S2L_config.config.get( + "quicklook_jpeg_quality", 95), + xRes=320, yRes=320, creationOptions=['COMPRESS=LZW'], + out_format='GTIFF', offset=int(S2L_config.config.get('offset'))) + + if result_path is not None: + metadata.mtd.get(self.mtd_quicklook_field).append(ql_path) + + def handle_product_quicklook(self, qi_data_dir: str, product: S2L_Product, band_list: list, suffix: str): + """ + Creates a quicklook for the given bands + Args: + qi_data_dir (str): path to quicklook output dir + product (S2L_Product): product + band_list (list): list of band name of the product to use to generate the QL + suffix (str): quicklook filename suffix (before extension) + """ + ql_name = "_".join([metadata.mtd.get(self.mtd_band_root_name_field), 'QL', suffix]) + '.jpg' + ql_path = os.path.join(qi_data_dir, ql_name) + result_path = quicklook(product, self.images, band_list, ql_path, S2L_config.config.get( + "quicklook_jpeg_quality", 95), offset=int(S2L_config.config.get('offset'))) + + if result_path is not None: + metadata.mtd.get(self.mtd_quicklook_field).append(ql_path) + + def guard(self): + """ Define required condition to algorithm execution + """ + return True + + def _copy_masks(self, product, qi_data_dir, product_path): + if "S2" in product.sensor and product.mtl.tile_metadata is not None: + tree_in = ElementTree.parse(product.mtl.tile_metadata) # Tree of the input mtd (S2 MTD.xml) + root_in = tree_in.getroot() + mask_elements = find_element_by_path(root_in, './Quality_Indicators_Info/Pixel_Level_QI/MASK_FILENAME') + for element in mask_elements: + mask_file = os.path.join(product.path, element.text) + if os.path.exists(mask_file): + shutil.copyfile(mask_file, os.path.join(qi_data_dir, os.path.basename(mask_file))) + metadata.mtd.get(self.mtd_mask_field).append({"tag": "MASK_FILENAME", "attribs": element.attrib, + "text": element.text}) + + # copy valid pixel mask + outfile = "_".join([metadata.mtd.get(self.mtd_band_root_name_field), product.sensor, 'MSK']) + '.TIF' + + fpath = os.path.join(qi_data_dir, outfile) + metadata.mtd.get(self.mtd_mask_field).append({"tag": "MASK_FILENAME", "attribs": {"type": "MSK_VALPIX"}, + "text": os.path.relpath(fpath, product_path)}) + + if S2L_config.config.get('output_format') == 'COG': + img_object = S2L_ImageFile(product.mask_filename, mode='r') + img_object.write(filepath=fpath, output_format='COG', band='MASK') + else: + shutil.copyfile(product.mask_filename, fpath) + + def _copy_angles_file(self, product, qi_data_dir): + outfile = f"{metadata.mtd.get(self.mtd_band_root_name_field)}_ANG.TIF" + metadata.mtd['ang_filename'] = outfile + shutil.copyfile(product.angles_file, os.path.join(qi_data_dir, outfile)) + + def _write_qi_report(self, product, qi_data_dir): + bb_qi_path = metadata.hardcoded_values.get(self.mtd_bb_qi_path_field) + out_qi_path = os.path.join(qi_data_dir, self.mtd_qi_report_file_name_field) + + if product.mtl.l2a_qi_report_path is not None: + log.info('QI report for input product found here : %s', product.mtl.l2a_qi_report_path) + + qi_writer = QiWriter(bb_qi_path, + outfile=out_qi_path, + init_qi_path=product.mtl.l2a_qi_report_path, + H_F=self.product_suffix) + qi_writer.manual_replaces(product) + qi_writer.write(pretty_print=True, json_print=False) + # validate against XSD + product_qi_xsd = metadata.hardcoded_values.get(self.mtd_product_qi_xsd_field) + log.info('QI Report is valid : %s', qi_writer.validate_schema(product_qi_xsd, out_qi_path)) + + def _write_tile_mtd(self, product, granule_dir): + + tile_mtd_out_path = os.path.join(granule_dir, self.tile_mtd_file_path) + + writer_class = get_tile_mtl_writer_class(product.sensor) + mtd_writer = writer_class(product.sensor, product.mtl.tile_metadata, self.product_suffix) + + mtd_writer.manual_replaces(product) + mtd_writer.write(tile_mtd_out_path, pretty_print=True) + # TODO UNCOMMENT BELOW FOR XSD CHECK + # product_tl_xsd = metadata.hardcoded_values.get('product_tl_xsd') + # log.info('Tile MTD is valid : {}'.format(mtd_tl_writer.validate_schema(product_tl_xsd, tile_MTD_outpath))) + + def _write_product_mtd(self, product, product_path): + product_mtd_file_name = f'MTD_{product.mtl.sensor[0:3]}{self.product_type_name}.xml' # MSI / OLI/ OLI_TIRS + product_mtd_out_path = os.path.join(product_path, product_mtd_file_name) + + writer_class = get_product_mtl_writer_class(product.sensor) + mtd_writer = writer_class(product.sensor, product.mtl.mtl_file_name, self.product_suffix) + + mtd_writer.manual_replaces(product) + mtd_writer.write(product_mtd_out_path, pretty_print=True) + # TODO UNCOMMENT BELOW FOR XSD CHECK + # product_mtd_xsd = metadata.hardcoded_values.get('product_mtd_xsd') + # log.info('Product MTD is valid : {}'.format(mtd_pd_writer.validate_schema(product_mtd_xsd, product_MTD_outpath))) diff --git a/sen2like/sen2like/s2l_processes/S2L_Sbaf.py b/sen2like/sen2like/s2l_processes/S2L_Sbaf.py index dbce5f1..7851500 100644 --- a/sen2like/sen2like/s2l_processes/S2L_Sbaf.py +++ b/sen2like/sen2like/s2l_processes/S2L_Sbaf.py @@ -4,27 +4,41 @@ import logging +from dataclasses import dataclass import numpy as np from core import S2L_config from core.QI_MTD.mtd import metadata +from core.image_file import S2L_ImageFile from core.products.landsat_8.landsat8 import Landsat8Product +from core.products.product import S2L_Product from s2l_processes.S2L_Process import S2L_Process log = logging.getLogger("Sen2Like") +@dataclass +class SbafParams: + """Simple sbaff param storage + """ + coefficient: float + offset: float + + class S2L_Sbaf(S2L_Process): - def getSen2likeCoef(self, mission): + def initialize(self): + self._sbaf_params = {} + + def get_sen2like_coef(self, mission): """ Derived from value in HLS Guide v 1.4 Get Adjustement coefficient for SEN2LIKE processing, Coefficient applied to Landsat8/OLI towards Sentinel2A/MSI data Coef array definition [slope, intercept]""" - adj_coef = dict() + adj_coef = {} if mission in ('LANDSAT_8', 'LANDSAT_9'): adj_coef['B01'] = {'bandLabel': 'CA'} adj_coef['B02'] = {'bandLabel': 'BLUE'} @@ -35,19 +49,19 @@ def getSen2likeCoef(self, mission): adj_coef['B07'] = {'bandLabel': 'SWIR 2'} # compute coeff from Nasa SBAF values - adj_coef_L8_S2A = self.getOLILikeCoef("Sentinel-2A") + adj_coef_l8_s2a = self.get_oli_like_coef("Sentinel-2A") for oli_band in adj_coef.keys(): s2_band = Landsat8Product.get_s2like_band(oli_band) if s2_band is None: continue - coef = adj_coef_L8_S2A[s2_band]['coef'] + coef = adj_coef_l8_s2a[s2_band]['coef'] a = 1 / coef[0] b = - coef[1] / coef[0] adj_coef[oli_band]['coef'] = [a, b] return adj_coef - def getOLILikeCoef(self, mission): + def get_oli_like_coef(self, mission): """S.Saunier 20/11/2018 Value in HLS Guide v 1.4 Get Adjustement coefficient for OLI LIKE processing, @@ -76,7 +90,7 @@ def getOLILikeCoef(self, mission): return adj_coef - def process(self, product, image, band): + def process(self, product: S2L_Product, image: S2L_ImageFile, band: str) -> S2L_ImageFile: log.info('Start') # init to None @@ -85,45 +99,34 @@ def process(self, product, image, band): if product.mtl.mission == "Sentinel-2A": # skip for S2A - metadata.qi['SBAF_COEFFICIENT_{}'.format(band)] = 1 - metadata.qi['SBAF_OFFSET_{}'.format(band)] = 0 + # set SBAF parameters for export in L2H/F_QUALITY.xml file + self._sbaf_params[band] = SbafParams(1, 0) log.info('Skip for Sentinel-2A') log.info("End") return image elif product.mtl.mission == "Sentinel-2B": - # S2B => L8 + L8 => S2A - adj_coef1 = self.getOLILikeCoef("Sentinel-2B") - adj_coef2 = self.getSen2likeCoef("LANDSAT_8") - band_sbaf1 = band - band_sbaf2 = Landsat8Product.get_band_from_s2(band) - if band_sbaf1 in adj_coef1 and band_sbaf2 in adj_coef2: - log.info(f'Sbaf coefficient find to {band}') - slope1, offset1 = adj_coef1[band_sbaf1]['coef'] - slope2, offset2 = adj_coef2[band_sbaf2]['coef'] - # merging coefficients - slope = slope2 * slope1 - offset = slope2 * offset1 + offset2 - log.info(f'slop = {slope}, offset = {offset}') - else: - log.info("No Sbaf coefficient defined for {}".format(band)) + # skip for S2B as S2B is intercalibrated with S2B in Collection-1 (PB >= 4.00) + # set SBAF parameters for export in L2H/F_QUALITY.xml file + self._sbaf_params[band] = SbafParams(1, 0) + log.info('Skip for Sentinel-2B, already intercalibrated') + log.info("End") + return image elif product.mtl.mission in ('LANDSAT_8', 'LANDSAT_9'): # L8 => S2A band_sbaf = band - adj_coef = self.getSen2likeCoef("LANDSAT_8") + adj_coef = self.get_sen2like_coef("LANDSAT_8") if band_sbaf in adj_coef: - log.info(f'Sbaf coefficient find to {band}') + log.info('Sbaf coefficient find to %s', band) slope, offset = adj_coef[band_sbaf]['coef'] - log.info(f'slop = {slope}, offset = {offset}') + log.info('slop = %s, offset = %s', slope, offset) else: - metadata.qi['SBAF_COEFFICIENT_{}'.format(band)] = 1 - metadata.qi['SBAF_OFFSET_{}'.format(band)] = 0 - log.info("No Sbaf coefficient defined for {}".format(band)) + self._sbaf_params[band] = SbafParams(1, 0) + log.info("No Sbaf coefficient defined for %s", band) return image - metadata.qi['SBAF_COEFFICIENT_{}'.format(band)] = slope - metadata.qi['SBAF_OFFSET_{}'.format(band)] = offset + self._sbaf_params[band] = SbafParams(slope, offset) # Apply SBAF if offset is not None and slope is not None: @@ -141,3 +144,13 @@ def process(self, product, image, band): log.info('End') return image + + def postprocess(self, product: S2L_Product): + """Set QI parameters + + Args: + product (S2L_Product): product to post process + """ + for band, params in self._sbaf_params.items(): + metadata.qi[f'SBAF_COEFFICIENT_{band}'] = params.coefficient + metadata.qi[f'SBAF_OFFSET_{band}'] = params.offset diff --git a/sen2like/sen2like/s2l_processes/S2L_Stitching.py b/sen2like/sen2like/s2l_processes/S2L_Stitching.py index c7485f8..a15baa2 100644 --- a/sen2like/sen2like/s2l_processes/S2L_Stitching.py +++ b/sen2like/sen2like/s2l_processes/S2L_Stitching.py @@ -8,8 +8,10 @@ from core import S2L_config from core.product_archive.product_archive import InputProductArchive from core.image_file import S2L_ImageFile +from core.products.product import S2L_Product from grids import mgrs_framing from s2l_processes.S2L_Process import S2L_Process +import core.product_archive.tile_db as tile_db log = logging.getLogger("Sen2Like") @@ -67,15 +69,15 @@ def _get_s2_new_product(self, product): def _get_l8_new_product(self, product): products = [] - log.debug("Product is located on [{}, {}]".format(product.mtl.path, product.mtl.row)) - log.debug(self.downloader.get_coverage((product.mtl.path, product.mtl.row), product.mtl.mgrs)) + log.debug("Product is located on [%s, %s]", product.mtl.path, product.mtl.row) + log.debug(tile_db.get_coverage((product.mtl.path, product.mtl.row), product.mtl.mgrs)) # Get previous_acquisition and test eligibility for row_offset in [-1, 1]: new_products = self.acquisition(product, row_offset=row_offset) - log.debug("products for row_offset: {}".format(row_offset)) + log.debug("products for row_offset: %s", row_offset) log.debug([p.path for p in new_products]) if len(new_products): - coverage = self.downloader.get_coverage((product.mtl.path, int(product.mtl.row) + row_offset), product.mtl.mgrs) + coverage = tile_db.get_coverage((product.mtl.path, int(product.mtl.row) + row_offset), product.mtl.mgrs) log.debug(coverage) if coverage > 0.001: products.append((new_products[0], coverage)) @@ -83,7 +85,7 @@ def _get_l8_new_product(self, product): if len(products) > 0: products = sorted(products, key=lambda t: t[1], reverse=True) self.new_product = products[0][0] - log.info("Product found for stitching {}:".format(self.new_product.path)) + log.info("Product found for stitching %s:", self.new_product.path) else: log.info("No product found for stitching") self.new_product = None @@ -94,7 +96,7 @@ def get_new_product(self, product): elif product.sensor == 'S2': self._get_s2_new_product(product) else: - log.info("Product type not supported by stitching: {}".format(product.sensor)) + log.info("Product type not supported by stitching: %s", product.sensor) self.new_product = None def reframe(self, image, product, band=None, dtype=None): @@ -146,7 +148,7 @@ def stitch_multi(product, product_file, new_product_file): ds_dst = None return filepath_out - def preprocess(self, product): + def preprocess(self, product: S2L_Product): self.get_new_product(product) if self.new_product is None: return @@ -155,24 +157,26 @@ def preprocess(self, product): product_nodata_masks = [] product_angles = [] product_ndvi = [] - for _product in [product, self.new_product.reader(self.new_product.path)]: + for _product in [product, self.new_product.s2l_product_class(self.new_product.path)]: is_mask_valid = True # Validity mask - if _product.mtl.mask_filename is None: - is_mask_valid = _product.mtl.get_valid_pixel_mask( - os.path.join(S2L_config.config.get("wd"), _product.name, 'valid_pixel_mask.tif')) + if _product.mask_filename is None: + is_mask_valid = _product.get_valid_pixel_mask( + os.path.join(S2L_config.config.get("wd"), + _product.name, 'valid_pixel_mask.tif'), + product.roi_filename) if is_mask_valid: - product_validity_masks.append(self.reframe(S2L_ImageFile(_product.mtl.mask_filename), _product)) - product_nodata_masks.append(self.reframe(S2L_ImageFile(_product.mtl.nodata_mask_filename), _product)) + product_validity_masks.append(self.reframe(S2L_ImageFile(_product.mask_filename), _product)) + product_nodata_masks.append(self.reframe(S2L_ImageFile(_product.nodata_mask_filename), _product)) # Angles - if _product.mtl.angles_file is None: - _product.mtl.get_angle_images(os.path.join(S2L_config.config.get("wd"), _product.name, 'tie_points.tif')) + if _product.angles_file is None: + _product.get_angle_images(os.path.join(S2L_config.config.get("wd"), _product.name, 'tie_points.tif')) filepath_out = os.path.join(S2L_config.config.get('wd'), _product.name, 'tie_points_PREREFRAMED.TIF') if product.sensor != 'S2': - mgrs_framing.reframeMulti(_product.mtl.angles_file, self.tile, filepath_out=filepath_out, order=0) + mgrs_framing.reframeMulti(_product.angles_file, self.tile, filepath_out=filepath_out, order=0) product_angles.append(filepath_out) else: - shutil.copyfile(_product.mtl.angles_file, filepath_out) + shutil.copyfile(_product.angles_file, filepath_out) product_angles.append(filepath_out) # NDVI if S2L_config.config.get('nbar_methode') == 'VJB': @@ -183,12 +187,12 @@ def preprocess(self, product): if None not in product_validity_masks: stitched_mask = self.stitch(product, product_validity_masks[0], product_validity_masks[1]) stitched_mask.write(creation_options=['COMPRESS=LZW']) - product.mtl.mask_filename = stitched_mask.filepath + product.mask_filename = stitched_mask.filepath if None not in product_nodata_masks: stitched_mask = self.stitch(product, product_nodata_masks[0], product_nodata_masks[1]) stitched_mask.write(creation_options=['COMPRESS=LZW']) - product.mtl.nodata_mask_filename = stitched_mask.filepath + product.nodata_mask_filename = stitched_mask.filepath if len(product_ndvi) > 0 and None not in product_ndvi: stitched_ndvi = self.stitch(product, product_ndvi[0], product_ndvi[1]) @@ -196,7 +200,7 @@ def preprocess(self, product): product.ndvi_filename = stitched_ndvi.filepath stitched_angles = self.stitch_multi(product, product_angles[0], product_angles[1]) - product.mtl.angles_file = stitched_angles + product.angles_file = stitched_angles # Stitch reference band (needed by geometry module) band = S2L_config.config.get('reference_band', 'B04') @@ -204,7 +208,7 @@ def preprocess(self, product): image = product.get_band_file(band) self.process(product, image, band) - def process(self, product, image, band): + def process(self, product: S2L_Product, image: S2L_ImageFile, band: str) -> S2L_ImageFile: log.info('Start') if self.new_product is None: log.info("None product found for stitching.") @@ -213,7 +217,7 @@ def process(self, product, image, band): # Reframe products product_image = self.reframe(image, product, band, dtype=np.float32) - new_product = self.new_product.reader(self.new_product.path) + new_product = self.new_product.s2l_product_class(self.new_product.path) new_product_image = self.reframe(new_product.get_band_file(band), new_product, band, dtype=np.float32) stitched_product_image = self.stitch(product, product_image, new_product_image, band) stitched_product_image.write(creation_options=['COMPRESS=LZW'], DCmode=True) diff --git a/sen2like/sen2like/s2l_processes/S2L_Toa.py b/sen2like/sen2like/s2l_processes/S2L_Toa.py index b5643ff..2e035e3 100644 --- a/sen2like/sen2like/s2l_processes/S2L_Toa.py +++ b/sen2like/sen2like/s2l_processes/S2L_Toa.py @@ -5,6 +5,8 @@ import logging from core import S2L_config +from core.image_file import S2L_ImageFile +from core.products.product import S2L_Product from s2l_processes.S2L_Process import S2L_Process from core.toa_reflectance import convert_to_reflectance_from_reflectance_cal_product @@ -13,7 +15,7 @@ class S2L_Toa(S2L_Process): - def process(self, product, image, band): + def process(self, product: S2L_Product, image: S2L_ImageFile, band: str) -> S2L_ImageFile: log.info('Start') # convert to TOA (gain + offset) diff --git a/sen2like/sen2like/sen2like.py b/sen2like/sen2like/sen2like.py index 378facc..848a65a 100644 --- a/sen2like/sen2like/sen2like.py +++ b/sen2like/sen2like/sen2like.py @@ -5,30 +5,34 @@ """Main entry point for the sen2like application.""" import datetime -import hashlib import importlib -import json import logging import os import shutil -import subprocess import sys import glob -from argparse import ArgumentParser +from argparse import Namespace from multiprocessing import Pool +from typing import Tuple, List from core import S2L_config, log from core.QI_MTD import mtd +from core.QI_MTD.mtd import Metadata from core.S2L_config import config +from core.argparser import S2LArgumentParser, Mode +from core.image_file import S2L_ImageFile +from core.readers import BaseReader from core.sen2cor_client.sen2cor_client import Sen2corClient, Sen2corError +from core.product_archive import product_selector +from s2l_processes.S2L_Process import S2L_Process try: from sen2like import BINDIR except ImportError: BINDIR = os.path.dirname(__file__) -import core.products # Todo: Try to get rid of that -from core.product_archive.product_archive import InputProductArchive, is_spatialite_supported, read_polygon_from_json +from core.product_archive.product_archive import InputProductArchive, InputProduct +from core.products.product import S2L_Product from version import __version__ # Add building blocks to Python path @@ -42,91 +46,133 @@ def get_scl_map(scl_dir, product): scl_map = None tilecode = product.mtl.mgrs - + if product.sensor == 'S2': - acqdate = datetime.datetime.strftime(product.dt_sensing_start, '%Y%m%dT%H%M%S') + acq_date = datetime.datetime.strftime(product.dt_sensing_start, '%Y%m%dT%H%M%S') else: - acqdate = datetime.datetime.strftime(product.acqdate, '%Y%m%dT%H%M%S') + acq_date = datetime.datetime.strftime(product.acqdate, '%Y%m%dT%H%M%S') - result = glob.glob(os.path.join(scl_dir, tilecode, f"T{tilecode}_{acqdate}_SCL_60m.tif")) + result = glob.glob(os.path.join(scl_dir, tilecode, f"T{tilecode}_{acq_date}_SCL_60m.tif")) if result: scl_map = result[0] - + if scl_map is not None: - logger.info('Auxiliary scene classification map found: {}'.format(scl_map)) + logger.info('Auxiliary scene classification map found: %s', scl_map) else: logger.info('Auxiliary scene classification map NOT found.') - - return scl_map - -def get_module(blockname): - """Get process class associated to blockname. + return scl_map + + +def get_module(block_name: str) -> S2L_Process: + """Get process class instance associated to block_name. + If instance does not yet exist, create it, save it and return, + otherwise, return existing instance. + + Args: + block_name (str): The name of the process to instantiate. - :param blockname: The name of the process to instanciate. - :return: The instanciated process + Returns: + S2L_Process: S2L_Process instance """ # import module and class - class_instance = PROCESS_INSTANCES.get(blockname) + class_instance = PROCESS_INSTANCES.get(block_name) if class_instance is None: - module = importlib.import_module(blockname) - class_instance = getattr(module, blockname)() - PROCESS_INSTANCES[blockname] = class_instance + module = importlib.import_module(block_name) + class_instance = getattr(module, block_name)() + PROCESS_INSTANCES[block_name] = class_instance return class_instance -def generic_process_step(blockname, pd, process_step): - """From the name of the block, import the module, get the class, +def generic_process_step(block_name, product, process_step): + """ + From the name of the block, import the module, get the class, create object from class, run the process step method of object. This supposes that there all the names are the same (e.g. S2L_GeometryKLT) - :param blockname: The block to process - :param pd: + :param block_name: The block to process + :param product: :param process_step: The step to process :return: """ # check if block is switch ON - if not config.getboolean('do' + blockname.split('_')[-1]): + if not config.getboolean('do' + block_name.split('_')[-1]): return # check if block is applicable to the sensor (L8, L9 or S2) - if pd.sensor not in S2L_config.PROC_BLOCKS[blockname]['applicability']: + if product.sensor not in S2L_config.PROC_BLOCKS[block_name]['applicability']: return - class_instance = get_module(blockname) + s2l_process = get_module(block_name) # create object and run process if method exists! - processus = getattr(class_instance, process_step, None) - if processus is not None: - return processus(pd) + func = getattr(s2l_process, process_step, None) + if func is not None: + return func(product) -def generic_process_band(blockname, pd, image, band): +def generic_process_band(block_name: str, + product: S2L_Product, + image: S2L_ImageFile, + band: str) -> Tuple[S2L_ImageFile, S2L_Process]: """ - from the name of the block, import the module, get the class, - create object from class, run the main method of object. - This supposes that there all the names are the same (e.g. S2L_GeometryKLT) + Execute the `S2L_Process.process` of the S2L_Process corresponding to the block_name if applicable. + The S2L_Process is applicable if its "doBlockName" config param is True + and configured as applicable for the product sensor. + + Args: + block_name (str): The block name to execute + product (S2L_Product): product to process + image (S2L_ImageFile): image to process + band (str): band of the product to process + + Returns: + Tuple[S2L_ImageFile, S2L_Process]: the process output image and executed process instance if applicable, + otherwise input S2L_ImageFile and None. """ # check if block is switch ON - logger.debug(config.getboolean('do' + blockname.split('_')[-1])) - if not config.getboolean('do' + blockname.split('_')[-1]): + logger.debug(config.getboolean('do' + block_name.split('_')[-1])) + if not config.getboolean('do' + block_name.split('_')[-1]): return image, None # check if block is applicable to the sensor (L8, L9 or S2) - if pd.sensor not in S2L_config.PROC_BLOCKS[blockname]['applicability']: + if product.sensor not in S2L_config.PROC_BLOCKS[block_name]['applicability']: return image, None - class_instance = get_module(blockname) - # create object and run it! - return class_instance.process(pd, image, band), class_instance - - -def process_band(pd, band, list_of_blocks, _config, _metadata, _processus=None): - """Function for running all the blocks over one band."""; - logger.info(f'--- Process band {band} ---') + s2l_process = get_module(block_name) + return s2l_process.process(product, image, band), s2l_process + + +def process_band(product: S2L_Product, + band: str, + list_of_blocks: tuple, + _config: S2L_config, + _metadata: Metadata, + _processus=None) -> Tuple['str', 'dict', 'S2L_config', 'Metadata']: + """Run all the blocks over one band of a product. + + Args: + product (S2L_Product): product to process + band (str): band to process + list_of_blocks (tuple): block names that should be executed + _config (S2L_config): TODO understand why + _metadata (Metadata): TODO understand why + _processus (_type_, optional): TODO understand why. Defaults to None. + + Returns: + Tuple[str, dict, S2L_config, Metadata]: + - Last file path of the image generated by the processing chain + - dict indexed by packager block name of dict of generated band images + indexed by band name by packager block if executed by the block chain (see `S2L_Product_Packager.process`) + - config: TODO understand why + - metadata: TODO understand why + + If no image for the band, all None + """ + logger.info('--- Process band %s ---', band) if S2L_config.config.parser is None: S2L_config.config = _config globals()['config'] = _config @@ -137,14 +183,14 @@ def process_band(pd, band, list_of_blocks, _config, _metadata, _processus=None): PROCESS_INSTANCES = _processus # get band file path - image = pd.get_band_file(band) + image = product.get_band_file(band) if image is None: return None, None, None, None # iterate on blocks packager_images = {} for block_name in list_of_blocks: - image, block = generic_process_band(block_name, pd, image, band) + image, block = generic_process_band(block_name, product, image, band) # Special case for packager as we need to keep self.images if '_Packager' in block_name and block is not None: @@ -154,234 +200,241 @@ def process_band(pd, band, list_of_blocks, _config, _metadata, _processus=None): return image.filename, packager_images, config, mtd.metadata -def compute_config_hash(args, _config): - """Compute hash from arguments and configuration. - - :param args: Tool arguments. - :param _config: Configuration - :return: Hexdigest of the hash. +def filter_product(product: S2L_Product): + """ Filter on product after created them base on cloud cover + :param product: a core.product.S2L_Product + :return: bool """ + cloud_cover = config.getfloat('cloud_cover') + if float(product.mtl.cloud_cover) > cloud_cover: + logger.info('cloud cover > %s', cloud_cover) + return False + return True - # debug - import copy - exclude_list = ['parallelize_bands'] - dc = copy.deepcopy(args.__dict__) - for exc in exclude_list: - dc.pop(exc) - dc = str(dc) - - # Prod - # dc = str(args.__dict__) - - # Configuration hash - if _config.parser.config_file is not None: - with open(_config.parser.config_file) as file: - file_content = file.read() - _hash = hashlib.md5(file_content.encode()) - _hash.update(dc.encode()) - return _hash.hexdigest() - - -def update_configuration(args, tile=None): - # init S2L_config and save to wd - if not config.initialize(args.S2L_configfile): - return - - if args.confParams is not None: - config.overload(args.confParams) - use_pid = False - if use_pid: - output_folder = str(os.getpid()) - else: - date_now = datetime.datetime.now().strftime('%Y%m%dT_%H%M%S') - output_folder = f'{"" if args.no_log_date else f"{date_now}_"}{compute_config_hash(args, config)}' - config.set('wd', os.path.join(args.wd, output_folder)) - references_map_file = config.get('references_map') - if args.refImage: - config.set('refImage', args.refImage) - elif references_map_file and tile: - if os.path.isfile(references_map_file): - # load dataset - with open(references_map_file) as j: - references_map = json.load(j) - config.set('refImage', references_map.get(tile)) - else: - logger.warning(f"The reference path {references_map_file} doesn't exist. So it is considered as None.") - config.set('refImage', None) - else: - config.set('refImage', None) - config.set('hlsplus', config.getboolean('doPackager') or config.getboolean('doPackagerL2F')) - config.set('debug', args.debug) - config.set('generate_intermediate_products', args.generate_intermediate_products) - if hasattr(args, 'l2a'): - config.set('s2_processing_level', 'LEVEL2A' if args.l2a else "LEVEL1C") - -def configure_sen2like(args): - """Initialize application configuration. - - :param args: The application parameters. - :return: The product to process +def pre_process(product: InputProduct, s2l_product: S2L_Product, tile, do_atmcor: bool, + use_sen2cor_config: bool) -> S2L_Product: """ - update_configuration(args) - - # Are we in tile mode ? - if args.operational_mode in ['single-tile-mode', 'multi-tile-mode']: - start_date = datetime.datetime.strptime(args.start_date, "%Y-%m-%d") if args.start_date else args.start_date - end_date = datetime.datetime.strptime(args.end_date, "%Y-%m-%d") if args.end_date else args.end_date - - if args.operational_mode == 'multi-tile-mode': - if not is_spatialite_supported(): - logger.error("Spatialite support is not available. Cannot determine MGRS tiles from ROI.") - return - json_file = args.roi - polygon = read_polygon_from_json(json_file) - if polygon is not None: - tiles = InputProductArchive.roi_to_tiles(polygon) - else: - tiles = [] - else: - polygon = None - tiles = [args.tile] - - downloader = InputProductArchive(config, roi=polygon) - products = {tile: [url for url in downloader.get_products_url_from_tile(tile, start_date, end_date)] for tile in - tiles} - if not products: - logger.error("No product found. Exiting application...") - return + Adapt processing parameters for atmo corr processing to use. + THIS FUNCTION MODIFY SOME CONFIG PARAMETERS (use_sen2cor, use_smac, doStitching, doInterCalibration) + Run sen2cor if configured for (do_atmcor activated and use_sen2cor=True) and if product is compatible. + Otherwise, configures exec parameters to use smac if product is compatible in case do_atmcor activated + Args: + product (InputProduct): input product to instantiate S2L_Product after sen2cor execution + s2l_product (S2L_Product): s2l_product to check atmo corr compatibility and run sen2cor on + tile (str): tile name for sen2cor + do_atmcor (bool): if atmospheric correction must be done + use_sen2cor_config (bool): if sen2cor should be run or not + + Returns: + s2l_product after sen2cor if executed or provided s2l_product, or None if fail or too many cloud cover + """ + use_sen2cor = do_atmcor and use_sen2cor_config + # only landsat collection 1 + if 'L8' in s2l_product.sensor and not s2l_product.mtl.collection_number.isdigit(): + # can only use SMAC for these product_urls, so force SMAC in case doAtmcor=True + use_sen2cor = False + config.overload('use_sen2cor=False') + config.overload('use_smac=True') + logger.info("For Landsat 8-9, apply sen2cor only on collection 1 & 2 product_urls") + + if use_sen2cor: + logger.info("Use sen2cor instead of Atmcor SMAC") + # Disable SMAC Atmospheric correction + config.overload('use_smac=False') + config.overload('doStitching=False') + config.overload('doInterCalibration=False') + + sen2cor = Sen2corClient(os.path.abspath(config.get('sen2cor_path')), tile) + + try: + orig_processing_sw = s2l_product.mtl.processing_sw + s2l_product = product.s2l_product_class(sen2cor.run(s2l_product)) + # restore L1 "orig" processing version (processing baseline for S2) + # because sen2cor sets by default the processing baseline to 99.99 + # however L1 "orig" processing version information could be needed for future processing block. + # example: for intercalibration to know if S2B intercalibration was already applied, not to apply it twice + s2l_product.mtl.processing_sw = orig_processing_sw + except Sen2corError: + logger.warning("sen2cor raises an error", exc_info=True) + return None + else: + logger.info("sen2cor disabled") + + # FIXME : ask to the team why we do this because the same call is done before + if s2l_product is None: + s2l_product = product.s2l_product_class(product.path) + + if not filter_product(s2l_product): + return None + + # Update processing configuration + config.set('productName', s2l_product.name) + config.set('sensor', s2l_product.sensor) + config.set('observation_date', s2l_product.mtl.observation_date) + config.set('relative_orbit', s2l_product.mtl.relative_orbit) + config.set('absolute_orbit', s2l_product.mtl.absolute_orbit) + config.set('mission', s2l_product.mtl.mission) + config.set('none_S2_product_for_fusion', False) + + # Disable Atmospheric correction for Level-2A product_urls + if s2l_product.mtl.data_type in ('Level-2A', 'L2TP', 'L2A'): + config.overload('s2_processing_level=LEVEL2A') + logger.info("Processing Level-2A product: Atmospheric correction is disabled.") + # do not run SMAC even if doAtmo=True + config.overload('use_smac=False') + config.overload('doInterCalibration=False') else: - start_date = end_date = None - products = {args.tile: [(args.product, 100)]} - tiles = [args.tile] + config.overload('s2_processing_level=LEVEL1C') - # Filter on original tiles: - products = {tile: item for (tile, item) in products.items() if tile in tiles} - return products, start_date, end_date + return s2l_product -def filter_product(product): - """ Filter on product after creat them - :param product: a core.product.S2L_Product - :return: bool +def process_no_run(tile: str, input_products: List[InputProduct]): + """no run execution + + Args: + tile (str): tile name + input_products (List[InputProduct]): list of product that should be processed """ - cloud_cover = config.getfloat('cloud_cover') - if float(product.mtl.cloud_cover) > cloud_cover: - logger.info(f'cloud cover > {cloud_cover}') - return False - return True + logger.info("Tile: %s", tile) + if not input_products: + logger.info("No product_urls found.") + for product in input_products: + tile_message = f'[ Tile coverage = {100*product.tile_coverage:6.0f}% ]' \ + if product.tile_coverage is not None else '' + cloud_message = f'[ Cloud coverage = {product.cloud_cover:6.0f}% ]' \ + if product.cloud_cover is not None else '' + logger.info("%s %s %s", tile_message, cloud_message, product.path) + + +def start_process(tile: str, product_urls: List['tuple'], args: Namespace, start_date: datetime.datetime, + end_date: datetime.datetime): + """ + Process products on the tile for a period + All products are not processed, only the one on the period, see InputProductArchive.get_products_from_urls + Args: + tile (str): tile name + product_urls (list(tuple)): list of products urls. + args (argparse.Namespace): program arguments + start_date (datetime.datetime): start date period to process + end_date (datetime.datetime): end date to process (include) + Returns: -def start_process(tile, products, args, start_date, end_date): - update_configuration(args, tile) + """ + config.update_with_args(args, tile) config.set('tile', tile) - logger.info("Processing tile {}".format(tile)) + logger.info("Processing tile %s", tile) downloader = InputProductArchive(config) - _products = downloader.get_products_from_urls(products, start_date, end_date, - product_mode=args.operational_mode == 'product-mode') + input_products_list = downloader.get_products_from_urls( + product_urls, start_date, end_date, product_mode=args.operational_mode == 'product-mode') + if args.no_run: - logger.info("Tile: %s" % tile) - if not _products: - logger.info("No products found.") - for product in _products: - tile_message = f'[ Tile coverage = {product.tile_coverage:6.0f}% ]' \ - if product.tile_coverage is not None else '' - cloud_message = f'[ Cloud coverage = {product.cloud_cover:6.0f}% ]' \ - if product.cloud_cover is not None else '' - logger.info("%s %s %s" % (tile_message, cloud_message, product.path)) + process_no_run(tile, input_products_list) return - for product in _products: - _product = product.reader(product.path) - atmcor = config.get('doAtmcor') + if len(input_products_list) == 0: + logger.error('No product for tile %s', tile) + return + + for input_product in input_products_list: + # instantiate S2L_Product + s2l_product = input_product.s2l_product_class(input_product.path) + + # Extract parameters for potential override and restore stitch = config.get('doStitching') intercalibration = config.get('doInterCalibration') - - use_sen2cor = config.getboolean('use_sen2cor') - # only landsat collection 1 - if 'L8' in _product.sensor: - if not _product.mtl.collection_number.isdigit() or int(_product.mtl.collection_number) > 1: - use_sen2cor = False - logger.info("For landsat 8, apply sen2cor only on collection 01 products") - if use_sen2cor: - # Disable Atmospheric correction - config.overload('doAtmcor=False') - config.overload('doStitching=False') - config.overload('doInterCalibration=False') - - sen2cor = Sen2corClient(os.path.abspath(config.get('sen2cor_path')), tile) - - try: - _product = product.reader(sen2cor.run(_product)) - except Sen2corError: - continue - - if _product is None: - _product = product.reader(product.path) - - if not filter_product(_product): + do_atmcor = config.get('doAtmcor') + use_sen2cor_config = config.getboolean('use_sen2cor') + use_smac_config = config.get('use_smac') + # use_smac could not be in conf, meaning we want to use it (default behavior) + if use_smac_config is None: + # force True for future Restore use_smac status and usage in S2L_Atmcor + use_smac_config = True + # put in conf to allow overloading later here + config.set('use_smac', True) + + # run sen2cor if any and prepare conf parameters + s2l_product = pre_process(input_product, s2l_product, tile, do_atmcor, use_sen2cor_config) + + if not s2l_product: continue - # Update processing configuration - config.set('productName', _product.name) - config.set('sensor', _product.sensor) - config.set('observation_date', _product.mtl.observation_date) - config.set('relative_orbit', _product.mtl.relative_orbit) - config.set('absolute_orbit', _product.mtl.absolute_orbit) - config.set('mission', _product.mtl.mission) - config.set('none_S2_product_for_fusion', False) - - # Disable Atmospheric correction for Level-2A products - if _product.mtl.data_type in ('Level-2A', 'L2TP', 'L2A'): - config.overload('s2_processing_level=LEVEL2A') - logger.info("Processing Level-2A product: Atmospheric correction is disabled.") - config.overload('doAtmcor=False') - config.overload('doInterCalibration=False') - else: - config.overload('s2_processing_level=LEVEL1C') - - process(_product, args) - - # Restore atmcor status - config.overload(f'doAtmcor={atmcor}') + # execute processing block on product + process(s2l_product, args) + + # Restore potential overridden conf parameters (by pre_process) + config.overload(f'use_sen2cor={use_sen2cor_config}') + config.overload(f'use_smac={use_smac_config}') config.overload(f'doStitching={stitch}') config.overload(f'doInterCalibration={intercalibration}') - del _product - if len(_products) == 0: - logger.error('No product for tile %s' % tile) + del s2l_product + + +def extract_product_files(product: S2L_Product, args: Namespace): + """Prepare the product before process by extracting some files in the current working dir when possible. + Extracted data from product are : + - product metadata file + - product tile metadata file + - angle images (see 'S2L_Product.get_angle_images' impl) + - valid and no data pixel masks (see 'S2L_Product.get_valid_pixel_mask' impl) + - NDVI image + Args: + product (S2L_Product): product to prepare + args (Namespace): program args + """ + # copy MTL files in wd + working_dir = os.path.join(config.get("wd"), product.name) + product_reader: BaseReader = product.mtl + + # copy MTL files in final product + shutil.copyfile(product_reader.mtl_file_name, + os.path.join(working_dir, os.path.basename(product_reader.mtl_file_name))) + if product_reader.tile_metadata: + shutil.copyfile(product_reader.tile_metadata, + os.path.join(working_dir, os.path.basename(product_reader.tile_metadata))) + + # Get scl map for valid pixel mask + scl_dir = config.get("scl_dir") + if scl_dir and (not config.getboolean('use_sen2cor')) and product_reader.data_type != 'Level-2A': + product_reader.scene_classif_band = get_scl_map(scl_dir, product) + + # Angles extraction + product.get_angle_images(os.path.join(working_dir, 'tie_points.tif')) + + # extract masks + roi_file = args.roi if args.operational_mode == Mode.ROI_BASED else None + product.get_valid_pixel_mask( + os.path.join(working_dir, 'valid_pixel_mask.tif'), roi_file) + + # extract NDVI + if S2L_config.config.get('nbar_methode') == 'VJB': + product.get_ndvi_image(os.path.join(working_dir, 'ndvi.tif')) -def process(product, args): - """Launch process on product.""" +def process(product: S2L_Product, args: Namespace): + """Launch process on product + + Args: + product (S2L_Product): product to process + args (Namespace): program arguments + """ bands = args.bands # create working directory and save conf (traceability) - if not os.path.exists(os.path.join(config.get("wd"), product.name)): - os.makedirs(os.path.join(config.get("wd"), product.name)) + product_path = os.path.join(config.get("wd"), product.name) + if not os.path.exists(product_path): + os.makedirs(product_path) # displays - logger.info('='*50) - logger.info("Process : {} {}".format(product.sensor, product.path)) + logger.info('=' * 50) + logger.info("Process : %s %s", product.sensor, product.path) # list of the blocks that are available list_of_blocks = tuple(S2L_config.PROC_BLOCKS.keys()) - # copy MTL files in wd - wd = os.path.join(config.get("wd"), product.name) - # copy MTL files in final product - shutil.copyfile(product.mtl.mtl_file_name, os.path.join(wd, os.path.basename(product.mtl.mtl_file_name))) - if product.mtl.tile_metadata: - shutil.copyfile(product.mtl.tile_metadata, os.path.join(wd, os.path.basename(product.mtl.tile_metadata))) - - # Get scl map for valid pixel mask - scl_dir = config.get("scl_dir") - if scl_dir and (not config.getboolean('use_sen2cor')) and product.mtl.data_type != 'Level-2A': - product.mtl.scene_classif_band = get_scl_map(scl_dir, product) - - # Angles extraction - product.mtl.get_angle_images(os.path.join(config.get("wd"), product.name, 'tie_points.tif')) - product.mtl.get_valid_pixel_mask(os.path.join(config.get("wd"), product.name, 'valid_pixel_mask.tif')) - if S2L_config.config.get('nbar_methode') == 'VJB': - product.get_ndvi_image(os.path.join(config.get("wd"), product.name, 'ndvi.tif')) + # prepare the product + extract_product_files(product, args) # !! Initialization of each block for block_name in list_of_blocks: @@ -431,7 +484,7 @@ def process(product, args): if bands_filenames == [None] * len(bands_filenames): logger.error("No valid band provided for input product.") - logger.error("Valids band for products are: %s" % str(list(product.bands))) + logger.error("Valids band for product_urls are: %s", str(list(product.bands))) return # !! Post processing !! # Run the postprocessing method of each block @@ -445,113 +498,48 @@ def process(product, args): S2L_config.config.savetofile(os.path.join(S2L_config.config.get('wd'), product.name, 'processing_end.cfg')) -def add_common_arguments(parser): - parser.add_argument('--version', '-v', action='version', version='%(prog)s ' + __version__) - parser.add_argument("--refImage", dest="refImage", type=str, - help="Reference image (use as geometric reference)", metavar="PATH", default=None) - # parser.add_argument("--roi", dest="roi", type=str, - # help="region of interest (Json or Shapefile) [optional]", metavar="PATH", default=None) - parser.add_argument("--wd", dest="wd", type=str, - help="Working directory (default : /data/production/wd)", metavar="PATH", - default='/data/production/wd') - parser.add_argument("--conf", dest="S2L_configfile", type=str, - help="S2L_configuration file (Default: SEN2LIKE_DIR/conf/S2L_config.ini)", metavar="PATH", - default=os.path.join(BINDIR, '..', 'conf', 'config.ini')) - parser.add_argument("--confParams", dest="confParams", type=str, - help="Overload parameter values (Default: None). Given as a \"key=value\" comma-separated list." - "Example: --confParams \"doNbar=False,doSbaf=False\"", - metavar="STRLIST", default=None) - parser.add_argument("--bands", dest="bands", type=str, - help="S2 bands to process as coma separated list (Default: ALL bands)", metavar="STRLIST", - default=None) - parser.add_argument("--no-run", dest="no_run", action="store_true", - help="Do not start process and only list products (default: False)") - parser.add_argument("--intermediate-products", dest="generate_intermediate_products", action="store_true", - help="Generate intermediate products (default: False)") - parser.add_argument("--parallelize-bands", action="store_true", - help="Process bands in parallel (default: False)") - debug_group = parser.add_argument_group('Debug arguments') - debug_group.add_argument("--debug", "-d", dest="debug", action="store_true", - help="Enable Debug mode (default: False)") - debug_group.add_argument("--no-log-date", dest="no_log_date", action="store_true", - help="Do no store date in log (default: False)") - return parser - - -def configure_arguments(): - """S2L_configure arguments parser - - :return: The S2L_configured arguments parser. +def main(args, with_multiprocess_support=False): + """Sen2like entry point function + + Args: + with_multiprocess_support (bool): use + + Returns: + """ - parser = ArgumentParser() - subparsers = parser.add_subparsers(dest='operational_mode', help="Operational mode") - add_common_arguments(parser) - - # Product mode arguments - sp_product = subparsers.add_parser("product-mode", help="Process a single product") - sp_product.add_argument('product', help="Landsat8 L1 product path / or Sentinel2 L1C product path") - add_common_arguments(sp_product) - sp_product.add_argument("--tile", help="Id of the MGRS tile to process", required=True) - - # Single tile mode arguments - sp_single_tile_mode = subparsers.add_parser('single-tile-mode', help='Process all products on a MGRS tile') - sp_single_tile_mode.add_argument("tile", help="Id of the MGRS tile to process") - sp_single_tile_mode.add_argument("--start-date", dest="start_date", help="Beginning of period (format YYYY-MM-DD)", - default='') - sp_single_tile_mode.add_argument("--end-date", dest="end_date", help="End of period (format YYYY-MM-DD)", - default='') - sp_single_tile_mode.add_argument("--l2a", help="Processing level Level-2A for S2 products if set (default: L1C)", - action='store_true') - add_common_arguments(sp_single_tile_mode) - - # Multi tile mode arguments - sp_multi_tile_mode = subparsers.add_parser('multi-tile-mode', help='Process all products on a ROI') - sp_multi_tile_mode.add_argument("roi", help="Json file containing the ROI to process") - sp_multi_tile_mode.add_argument("--start-date", dest="start_date", help="Beginning of period (format YYYY-MM-DD)", - default='') - sp_multi_tile_mode.add_argument("--end-date", dest="end_date", help="End of period (format YYYY-MM-DD)", - default='') - sp_multi_tile_mode.add_argument("--jobs", "-j", dest="jobs", help="Number of tile to process in parallel", - default=None) - sp_multi_tile_mode.add_argument("--l2a", help="Processing level Level-2A for S2 products if set (default: L1C)", - action='store_true') - add_common_arguments(sp_multi_tile_mode) - - return parser - - -def main(with_multiprocess_support=False): - parser = configure_arguments() - args = parser.parse_args() - - log.configure_loggers(log_path=args.wd, is_debug=args.debug, without_date=args.no_log_date) + parser = S2LArgumentParser(BINDIR) + args = parser.parse_args(args) + + log.configure_loggers(logger, log_path=args.wd, is_debug=args.debug, without_date=args.no_log_date) + + logger.info("Run Sen2like %s", __version__) if args.operational_mode is None: parser.print_help() return 1 - # convert list of bands if provided - if args.bands is not None: - args.bands = args.bands.split(',') - - products, start_date, end_date = configure_sen2like(args) + config.update_with_args(args) + date_range = parser.get_date_range() + products = product_selector.get_products(args, date_range) if products is None: return 1 + if args.operational_mode == 'multi-tile-mode' and with_multiprocess_support and not args.no_run: number_of_process = args.jobs if number_of_process is None: number_of_process = config.get('number_of_process', 1) - params = [(tile, _products, args, start_date, end_date) for tile, _products in products.items()] + params = [(tile, _products, args, date_range.start_date, date_range.end_date) + for tile, _products in products.items()] with Pool(int(number_of_process)) as pool: pool.starmap(start_process, params) else: if args.no_run: logger.info("No-run mode: Products will only be listed") for tile, _products in products.items(): - start_process(tile, _products, args, start_date, end_date) + start_process(tile, _products, args, date_range.start_date, date_range.end_date) return 0 if __name__ == "__main__": - sys.exit(main(with_multiprocess_support=True)) + sys.exit(main(sys.argv[1:], with_multiprocess_support=True)) diff --git a/sen2like/sen2like/version.py b/sen2like/sen2like/version.py index 298a38b..48b36d1 100644 --- a/sen2like/sen2like/version.py +++ b/sen2like/sen2like/version.py @@ -1,3 +1,14 @@ """Version of the Application.""" -__version__ = '4.0.2' +__version__ = '4.1.1' + +_splitted_version = __version__.split('.') + +_major = f"{int(_splitted_version[0]):02d}" +_minor = f"{int(_splitted_version[1]):02d}" + +# sample : +# version = 4.1.0 => 04.01 | 0401 +# version = 4.12.0 => 04.12 | 0412 +baseline_dotted = f"{_major}.{_minor}" +baseline = f"{_major}{_minor}" diff --git a/sen2like/stac.md b/sen2like/stac.md index 9bf22e7..1e2fd4b 100644 --- a/sen2like/stac.md +++ b/sen2like/stac.md @@ -75,7 +75,7 @@ Install node-modules ```npm install``` Build with specification of catalog url and path proxy. -```CATALOG_URL=http://45.130.29.32/stac/sen2like_catalog.json STAC_PROXY_URL="/data/S2L|http://45.130.29.32/stac/S2L" npm run build``` +```CATALOG_URL=http://45.130.29.32/stac/catalog.json npm run build``` CATALOG_URL: The stac catalog URL. STAC_PROXY_URL: The original location and the proxy location separated by the | character, i.e. {original}|{proxy}. diff --git a/sen2like/tests/configuration/config.ini b/sen2like/tests/core/downloader/config.ini similarity index 94% rename from sen2like/tests/configuration/config.ini rename to sen2like/tests/core/downloader/config.ini index bb8ad2c..985d83e 100644 --- a/sen2like/tests/configuration/config.ini +++ b/sen2like/tests/core/downloader/config.ini @@ -2,11 +2,14 @@ doStitching = True doGeometryKLT = True doToa = True +doInterCalibration = True doAtmcor = True doNbar = True doSbaf = True doFusion = True -doPackager = True +doPackager = False +doPackagerL2H = True +doPackagerL2F = True [Directories] archive_dir = /data/HLS diff --git a/sen2like/tests/core/downloader/test_product_downloader.py b/sen2like/tests/core/downloader/test_product_downloader.py index bc7ea24..7d30497 100644 --- a/sen2like/tests/core/downloader/test_product_downloader.py +++ b/sen2like/tests/core/downloader/test_product_downloader.py @@ -5,87 +5,91 @@ from core.products.landsat_8.landsat8 import Landsat8Product from core.products.sentinel_2.sentinel2 import Sentinel2Product from sen2like.core.product_archive.product_archive import InputProductArchive, InputProduct +from core.S2L_config import config -configuration_file = os.path.join(os.path.dirname(__file__), '..', '..', 'conf', 'conf.ini') +configuration_file = os.path.join(os.path.dirname(__file__), 'config.ini') class TestProductDownloader(TestCase): def setUp(self) -> None: - self.downloader = InputProductArchive(configuration_file) + if not config.initialize(configuration_file): + raise Exception + config.set('tile', '31TFJ') + self.downloader = InputProductArchive(config) def test_filter_and_sort_products(self): # No hours, two products p1 = InputProduct(path="LC08_L1TP_196030_20200430_20200430_01_T1", date=datetime.datetime(2020, 4, 30), - reader=Landsat8Product, tile_coverage=10) + s2l_product_class=Landsat8Product, tile_coverage=10) p2 = InputProduct(path="S2A_MSIL1C_20170420T103021_N0204_R108_T31TFJ_20170420T103454.SAFE", - date=datetime.datetime(2020, 4, 30), reader=Sentinel2Product, tile_coverage=100) + date=datetime.datetime(2020, 4, 30), s2l_product_class=Sentinel2Product, tile_coverage=100) self.assertEqual([p2, p1], self.downloader.filter_and_sort_products([p1, p2])) self.assertEqual([p2, p1], self.downloader.filter_and_sort_products([p2, p1])) # No hours multiple products p1 = InputProduct(path="LC08_L1TP_196030_20200430_20200430_01_T1", date=datetime.datetime(2020, 4, 30), - reader=Landsat8Product, tile_coverage=10) + s2l_product_class=Landsat8Product, tile_coverage=10) p2 = InputProduct(path="LC08_L1TP_196030_20200430_20200430_01_T1", date=datetime.datetime(2020, 4, 30), - reader=Landsat8Product, tile_coverage=20) + s2l_product_class=Landsat8Product, tile_coverage=20) p3 = InputProduct(path="S2A_MSIL1C_20170420T103021_N0204_R108_T31TFJ_20170420T103454.SAFE", - date=datetime.datetime(2020, 4, 30), reader=Sentinel2Product, tile_coverage=100) + date=datetime.datetime(2020, 4, 30), s2l_product_class=Sentinel2Product, tile_coverage=100) self.assertEqual([p3, p2], self.downloader.filter_and_sort_products([p1, p2, p3])) self.assertEqual([p3, p2], self.downloader.filter_and_sort_products([p3, p2, p1])) self.assertEqual([p3, p2], self.downloader.filter_and_sort_products([p2, p1, p3])) p1 = InputProduct(path="LC08_L1TP_196030_20200430_20200430_01_T1", date=datetime.datetime(2020, 4, 30), - reader=Landsat8Product, tile_coverage=10) + s2l_product_class=Landsat8Product, tile_coverage=10) p2 = InputProduct(path="LC08_L1TP_196030_20200429_20200429_01_T1", date=datetime.datetime(2020, 4, 29), - reader=Landsat8Product, tile_coverage=20) + s2l_product_class=Landsat8Product, tile_coverage=20) p3 = InputProduct(path="S2A_MSIL1C_20170420T103021_N0204_R108_T31TFJ_20170420T103454.SAFE", - date=datetime.datetime(2020, 4, 30), reader=Sentinel2Product, tile_coverage=100) + date=datetime.datetime(2020, 4, 30), s2l_product_class=Sentinel2Product, tile_coverage=100) self.assertEqual([p2, p3, p1], self.downloader.filter_and_sort_products([p1, p2, p3])) self.assertEqual([p2, p3, p1], self.downloader.filter_and_sort_products([p3, p2, p1])) self.assertEqual([p2, p3, p1], self.downloader.filter_and_sort_products([p2, p1, p3])) p1 = InputProduct(path="LC08_L1TP_196030_20200430_20200430_01_T1", date=datetime.datetime(2020, 4, 30), - reader=Landsat8Product, tile_coverage=20) + s2l_product_class=Landsat8Product, tile_coverage=20) p2 = InputProduct(path="LC08_L1TP_196030_20200430_20200430_01_T1", date=datetime.datetime(2020, 4, 30), - reader=Landsat8Product, tile_coverage=10) + s2l_product_class=Landsat8Product, tile_coverage=10) p3 = InputProduct(path="LC08_L1TP_196030_20200430_20200430_01_T1", date=datetime.datetime(2020, 4, 30), - reader=Landsat8Product, tile_coverage=30) + s2l_product_class=Landsat8Product, tile_coverage=30) self.assertEqual([p3], self.downloader.filter_and_sort_products([p1, p2, p3])) self.assertEqual([p3], self.downloader.filter_and_sort_products([p3, p2, p1])) self.assertEqual([p3], self.downloader.filter_and_sort_products([p2, p1, p3])) p1 = InputProduct(path="LC08_L1TP_196030_20200430_20200430_01_T1", date=datetime.datetime(2020, 4, 30), - reader=Landsat8Product, tile_coverage=20) + s2l_product_class=Landsat8Product, tile_coverage=20) p2 = InputProduct(path="LC08_L1TP_196030_20200429_20200429_01_T1", date=datetime.datetime(2020, 4, 29), - reader=Landsat8Product, tile_coverage=10) + s2l_product_class=Landsat8Product, tile_coverage=10) p3 = InputProduct(path="LC08_L1TP_196030_20200430_20200430_01_T1", date=datetime.datetime(2020, 4, 30), - reader=Landsat8Product, tile_coverage=30) + s2l_product_class=Landsat8Product, tile_coverage=30) self.assertEqual([p2, p3], self.downloader.filter_and_sort_products([p1, p2, p3])) self.assertEqual([p2, p3], self.downloader.filter_and_sort_products([p3, p2, p1])) self.assertEqual([p2, p3], self.downloader.filter_and_sort_products([p2, p1, p3])) # With hour p1 = InputProduct(path="LC08_L1TP_196030_20200430_20200430_01_T1", date=datetime.datetime(2020, 4, 30, 10, 11), - reader=Landsat8Product, + s2l_product_class=Landsat8Product, tile_coverage=20) p2 = InputProduct(path="LC08_L1TP_196030_20200430_20200430_01_T1", date=datetime.datetime(2020, 4, 30, 11, 12), - reader=Landsat8Product, + s2l_product_class=Landsat8Product, tile_coverage=10) p3 = InputProduct(path="LC08_L1TP_196030_20200430_20200430_01_T1", date=datetime.datetime(2020, 4, 30, 9, 8), - reader=Landsat8Product, + s2l_product_class=Landsat8Product, tile_coverage=30) self.assertEqual([p3], self.downloader.filter_and_sort_products([p1, p2, p3])) self.assertEqual([p3], self.downloader.filter_and_sort_products([p3, p2, p1])) self.assertEqual([p3], self.downloader.filter_and_sort_products([p2, p1, p3])) p1 = InputProduct(path="LC08_L1TP_196030_20200430_20200430_01_T1", date=datetime.datetime(2020, 4, 30, 10, 10), - reader=Landsat8Product, + s2l_product_class=Landsat8Product, tile_coverage=20) p2 = InputProduct(path="LC08_L1TP_196030_20200429_20200429_01_T1", date=datetime.datetime(2020, 4, 29, 10, 10), - reader=Landsat8Product, + s2l_product_class=Landsat8Product, tile_coverage=10) p3 = InputProduct(path="LC08_L1TP_196030_20200430_20200430_01_T1", date=datetime.datetime(2020, 4, 30, 9, 10), - reader=Landsat8Product, + s2l_product_class=Landsat8Product, tile_coverage=30) self.assertEqual([p2, p3], self.downloader.filter_and_sort_products([p1, p2, p3])) self.assertEqual([p2, p3], self.downloader.filter_and_sort_products([p3, p2, p1])) diff --git a/sen2like/tests/core/file_extractor/Arles-communes-13-bouches-du-rhone.geojson b/sen2like/tests/core/file_extractor/Arles-communes-13-bouches-du-rhone.geojson new file mode 100644 index 0000000..23e6551 --- /dev/null +++ b/sen2like/tests/core/file_extractor/Arles-communes-13-bouches-du-rhone.geojson @@ -0,0 +1 @@ +{"type":"FeatureCollection","features":[{"type":"Feature","geometry":{"type":"Polygon","coordinates":[[[4.84446,43.33233],[4.84121,43.33087],[4.83873,43.32914],[4.83549,43.32875],[4.83386,43.32905],[4.83257,43.33061],[4.82957,43.33215],[4.82319,43.33596],[4.81367,43.33963],[4.80197,43.34315],[4.79502,43.345],[4.78336,43.34719],[4.77376,43.3485],[4.76535,43.34928],[4.75879,43.35004],[4.75368,43.35034],[4.73787,43.35068],[4.73088,43.35067],[4.71774,43.35031],[4.71378,43.34898],[4.7126,43.349],[4.7057,43.3478],[4.70223,43.34877],[4.69222,43.34822],[4.68687,43.34746],[4.6807,43.34575],[4.67667,43.34602],[4.67105,43.3468],[4.66426,43.34682],[4.65913,43.34625],[4.65296,43.34815],[4.6488,43.34885],[4.64489,43.34994],[4.64342,43.34939],[4.64238,43.35036],[4.63789,43.35022],[4.63809,43.351],[4.62717,43.35193],[4.62427,43.35253],[4.6131,43.35377],[4.59597,43.35703],[4.5858,43.36007],[4.57792,43.36341],[4.57256,43.36599],[4.56746,43.36896],[4.5628,43.37214],[4.55819,43.37686],[4.55623,43.38],[4.55561,43.38282],[4.55616,43.3854],[4.55743,43.38832],[4.55907,43.38929],[4.56272,43.39225],[4.5657,43.39339],[4.56737,43.39087],[4.57437,43.39387],[4.58034,43.39727],[4.58212,43.39747],[4.5877,43.40071],[4.59103,43.40368],[4.59271,43.40625],[4.59502,43.40642],[4.59779,43.4062],[4.6073,43.41118],[4.6146,43.41518],[4.61709,43.4154],[4.6204,43.41799],[4.61912,43.41982],[4.6187,43.42621],[4.61978,43.42805],[4.61941,43.42895],[4.61586,43.43075],[4.61621,43.43753],[4.61509,43.44133],[4.6154,43.4461],[4.62059,43.44766],[4.6213,43.44814],[4.61984,43.45534],[4.61984,43.45863],[4.6224,43.45935],[4.62358,43.46062],[4.62222,43.46356],[4.61975,43.46611],[4.62121,43.4708],[4.62542,43.47354],[4.62644,43.47496],[4.62841,43.47632],[4.63156,43.47747],[4.63615,43.48024],[4.63707,43.48165],[4.63832,43.48153],[4.64129,43.48433],[4.64462,43.48548],[4.64241,43.49031],[4.64164,43.49447],[4.64223,43.49813],[4.6414,43.50161],[4.63583,43.50578],[4.63379,43.51057],[4.63312,43.51454],[4.63386,43.51961],[4.63495,43.52044],[4.63929,43.52746],[4.6417,43.53051],[4.64318,43.5349],[4.64233,43.53725],[4.63392,43.54198],[4.62995,43.54693],[4.62963,43.54945],[4.62752,43.55273],[4.62584,43.55412],[4.62383,43.55994],[4.62433,43.56491],[4.62485,43.56706],[4.62483,43.57008],[4.62379,43.5733],[4.62317,43.57358],[4.61514,43.57216],[4.60643,43.5702],[4.59986,43.56968],[4.59649,43.56899],[4.59017,43.56677],[4.58746,43.56605],[4.57971,43.56558],[4.57742,43.56562],[4.57158,43.56686],[4.56616,43.5672],[4.56216,43.56813],[4.55829,43.57068],[4.5492,43.5727],[4.53227,43.57047],[4.52827,43.57276],[4.51269,43.58022],[4.51183,43.58046],[4.50384,43.5804],[4.5044,43.5752],[4.50394,43.5729],[4.49878,43.5696],[4.49706,43.56786],[4.49677,43.56471],[4.4941,43.56098],[4.49,43.55692],[4.4886,43.55438],[4.48594,43.55519],[4.48554,43.55595],[4.4789,43.55766],[4.47728,43.55551],[4.46427,43.56157],[4.46231,43.56261],[4.46078,43.56742],[4.4567,43.5729],[4.4534,43.57827],[4.45117,43.57913],[4.44954,43.58132],[4.44889,43.58344],[4.45227,43.58424],[4.45549,43.5859],[4.46068,43.58926],[4.46289,43.59205],[4.46679,43.59914],[4.46927,43.60094],[4.47242,43.60254],[4.47414,43.60386],[4.4753,43.6065],[4.47523,43.6083],[4.47252,43.61204],[4.46961,43.61413],[4.46681,43.61521],[4.46222,43.61489],[4.45453,43.61139],[4.44836,43.61029],[4.44388,43.60996],[4.43975,43.61067],[4.43684,43.61235],[4.43165,43.61623],[4.42704,43.62068],[4.42609,43.62231],[4.42617,43.62448],[4.42845,43.62772],[4.43182,43.63014],[4.4343,43.63355],[4.43821,43.64438],[4.4408,43.64905],[4.44472,43.65475],[4.44703,43.65871],[4.4513,43.66413],[4.45424,43.66663],[4.45796,43.66801],[4.46464,43.66914],[4.47049,43.6697],[4.4755,43.67109],[4.47747,43.67251],[4.4784,43.67424],[4.47843,43.67685],[4.47637,43.68149],[4.47567,43.68446],[4.47771,43.6896],[4.47865,43.6913],[4.4813,43.69444],[4.48642,43.69885],[4.4912,43.70056],[4.49627,43.70168],[4.50368,43.70226],[4.51596,43.70229],[4.52439,43.70213],[4.52701,43.70262],[4.53166,43.70599],[4.533,43.70665],[4.53694,43.70751],[4.53985,43.70727],[4.54525,43.70547],[4.54772,43.70388],[4.55278,43.70131],[4.55811,43.6995],[4.56336,43.69858],[4.57635,43.69797],[4.5789,43.69751],[4.58188,43.69637],[4.5846,43.69411],[4.58681,43.69155],[4.58915,43.6897],[4.59303,43.68746],[4.59735,43.68652],[4.60191,43.68594],[4.60583,43.68608],[4.61305,43.68823],[4.61687,43.68885],[4.61858,43.68864],[4.62237,43.68728],[4.6251,43.68544],[4.62745,43.68543],[4.62797,43.68845],[4.62766,43.69054],[4.62543,43.6957],[4.62325,43.69915],[4.61601,43.70694],[4.61475,43.70935],[4.61304,43.71429],[4.61234,43.71867],[4.61228,43.72475],[4.61364,43.72982],[4.61647,43.73427],[4.62272,43.74324],[4.62566,43.74907],[4.62812,43.75771],[4.63265,43.75772],[4.63578,43.75995],[4.63946,43.76032],[4.64144,43.75371],[4.64205,43.75019],[4.64046,43.74602],[4.64282,43.74482],[4.64881,43.74305],[4.65038,43.74309],[4.65771,43.74093],[4.66237,43.74035],[4.66875,43.73573],[4.67434,43.73394],[4.67492,43.73294],[4.67885,43.72999],[4.67977,43.72861],[4.67299,43.72619],[4.66597,43.72244],[4.65942,43.71822],[4.65315,43.71326],[4.65234,43.71184],[4.6597,43.70736],[4.66122,43.70785],[4.66626,43.70857],[4.66654,43.70745],[4.66916,43.70753],[4.66901,43.70266],[4.66732,43.69955],[4.66686,43.69724],[4.66839,43.69726],[4.6694,43.69572],[4.68441,43.69496],[4.68973,43.69734],[4.69153,43.70069],[4.69631,43.70216],[4.70039,43.70161],[4.70852,43.70013],[4.71289,43.69849],[4.7547,43.69435],[4.75553,43.69525],[4.75863,43.69585],[4.75898,43.692],[4.76553,43.69241],[4.76717,43.68813],[4.76983,43.68741],[4.77171,43.68608],[4.77553,43.68445],[4.77748,43.68404],[4.77754,43.68285],[4.77527,43.68307],[4.77167,43.67377],[4.77258,43.67296],[4.7735,43.67013],[4.77359,43.66769],[4.76837,43.66812],[4.76601,43.66001],[4.767,43.65997],[4.76645,43.65552],[4.7654,43.65145],[4.76519,43.63943],[4.76686,43.63748],[4.76432,43.63463],[4.76197,43.63388],[4.75963,43.63395],[4.75582,43.63266],[4.75352,43.62833],[4.74879,43.626],[4.75043,43.62398],[4.75167,43.6213],[4.75179,43.61774],[4.75264,43.61618],[4.75415,43.61525],[4.75947,43.61001],[4.7609,43.60746],[4.76126,43.60542],[4.76141,43.59979],[4.76031,43.594],[4.76206,43.59009],[4.76388,43.58763],[4.76536,43.58431],[4.76725,43.58197],[4.77489,43.57523],[4.77919,43.57554],[4.80029,43.55852],[4.80654,43.55253],[4.81522,43.54243],[4.83418,43.52778],[4.85615,43.51033],[4.86206,43.50542],[4.87639,43.49452],[4.87386,43.49263],[4.86861,43.48241],[4.86212,43.47439],[4.85564,43.46737],[4.85373,43.46588],[4.83814,43.46257],[4.82465,43.47303],[4.82282,43.47418],[4.81488,43.46833],[4.81185,43.4698],[4.80797,43.47312],[4.80636,43.47594],[4.8061,43.47729],[4.80471,43.47843],[4.8027,43.47871],[4.79887,43.4775],[4.7968,43.47808],[4.79515,43.47713],[4.79202,43.47441],[4.78519,43.47874],[4.78368,43.47927],[4.77966,43.47954],[4.77372,43.47759],[4.76961,43.4771],[4.764,43.47425],[4.76242,43.47223],[4.75694,43.46713],[4.75459,43.46367],[4.75232,43.46157],[4.74992,43.46167],[4.74952,43.46019],[4.74694,43.45452],[4.74311,43.44809],[4.74039,43.44454],[4.73786,43.44047],[4.73438,43.43418],[4.73375,43.43096],[4.73435,43.42783],[4.73671,43.42607],[4.74023,43.42415],[4.7482,43.41661],[4.75396,43.40873],[4.76342,43.40506],[4.76827,43.40251],[4.77731,43.39716],[4.78333,43.39289],[4.79023,43.39007],[4.79796,43.38664],[4.81378,43.37692],[4.82196,43.37213],[4.82663,43.37023],[4.83115,43.36869],[4.83329,43.36564],[4.83428,43.36087],[4.83488,43.35427],[4.83476,43.35046],[4.83413,43.34698],[4.83498,43.34298],[4.8378,43.33764],[4.8416,43.33346],[4.84446,43.33233]]]},"properties":{"code":"13004","nom":"Arles"}}]} \ No newline at end of file diff --git a/sen2like/tests/core/file_extractor/Avignon-communes-84-vaucluse.geojson b/sen2like/tests/core/file_extractor/Avignon-communes-84-vaucluse.geojson new file mode 100644 index 0000000..acb84f0 --- /dev/null +++ b/sen2like/tests/core/file_extractor/Avignon-communes-84-vaucluse.geojson @@ -0,0 +1 @@ +{"type":"FeatureCollection","features":[{"type":"Feature","geometry":{"type":"Polygon","coordinates":[[[4.79198,43.95227],[4.79278,43.95378],[4.79716,43.95672],[4.808,43.96003],[4.81156,43.9618],[4.81402,43.964],[4.81506,43.96763],[4.81381,43.97071],[4.81117,43.97464],[4.81015,43.97704],[4.81012,43.97957],[4.81126,43.98562],[4.81247,43.98766],[4.81598,43.9888],[4.8196,43.98838],[4.82996,43.98558],[4.83199,43.98526],[4.83766,43.98519],[4.84211,43.98647],[4.84438,43.98937],[4.84487,43.9916],[4.84472,43.99379],[4.84555,43.99668],[4.84764,43.99356],[4.85157,43.98911],[4.85376,43.98527],[4.85443,43.98319],[4.85436,43.98035],[4.8534,43.97542],[4.85398,43.97127],[4.85327,43.96768],[4.85181,43.9656],[4.84856,43.96234],[4.84675,43.96098],[4.84396,43.95987],[4.83914,43.95935],[4.83808,43.95246],[4.84257,43.95252],[4.84708,43.95147],[4.85599,43.95064],[4.85971,43.95082],[4.86246,43.95197],[4.87228,43.95321],[4.88397,43.95277],[4.88792,43.94976],[4.89064,43.94502],[4.89074,43.93848],[4.89393,43.93746],[4.89551,43.93653],[4.89949,43.93054],[4.89751,43.92365],[4.89697,43.91995],[4.89707,43.91772],[4.89789,43.9154],[4.89985,43.91432],[4.90577,43.91315],[4.91251,43.91053],[4.91756,43.90932],[4.92085,43.90741],[4.92718,43.90528],[4.9258,43.90287],[4.92423,43.90238],[4.92275,43.89997],[4.91459,43.89795],[4.91941,43.89416],[4.92326,43.88992],[4.92201,43.88676],[4.90816,43.88626],[4.90232,43.88793],[4.88638,43.89941],[4.88133,43.90156],[4.85309,43.91138],[4.82174,43.91283],[4.81097,43.91428],[4.7945,43.91673],[4.7736,43.91963],[4.75204,43.92261],[4.73906,43.92406],[4.74255,43.92775],[4.74449,43.92933],[4.75,43.93218],[4.75901,43.93459],[4.76153,43.93492],[4.76878,43.9338],[4.77306,43.9357],[4.77436,43.93587],[4.77902,43.93789],[4.78247,43.94144],[4.78616,43.94409],[4.78897,43.94733],[4.79198,43.95227]]]},"properties":{"code":"84007","nom":"Avignon"}}]} \ No newline at end of file diff --git a/sen2like/tests/core/file_extractor/abstract_extractor_test.py b/sen2like/tests/core/file_extractor/abstract_extractor_test.py new file mode 100644 index 0000000..7091d3f --- /dev/null +++ b/sen2like/tests/core/file_extractor/abstract_extractor_test.py @@ -0,0 +1,111 @@ +"""abstract reader test module +""" +import os +import pathlib +import shutil +import filecmp + +from unittest import TestCase + +from core.S2L_config import config +from core.file_extractor.file_extractor import extractor_class, ImageMasks + +test_folder_path = os.path.dirname(__file__) +configuration_file = os.path.join(test_folder_path, 'config.ini') + + +class AbstractExtractorTestCase(TestCase): + """Base TestCase class for FileExtrator test + """ + + def __init__(self, reader_class, roi_path_file, dataset, methodName): + """Init instance + load a Config with config file at same file level + + Args: + reader_class (): 'Reader' concrete class to test + roi_path_file (_type_): roi path file for roi based tests + dataset (_type_): Dataset path to retrieve test data, relative to base_url config param + methodName (_type_): Method name (TestCase constructor param) + + Raises: + Exception: if config cannot be loaded + """ + super().__init__(methodName) + self._reader_class = reader_class + self.roi_path_file = roi_path_file + if not config.initialize(configuration_file): + raise Exception + self._product_path = os.path.join(config.get('base_url'), dataset) + + def setUp(self): + config.set('tile', '31TFJ') + + def tearDown(self): + test_dir = pathlib.Path(os.path.join( + test_folder_path, self._testMethodName)) + shutil.rmtree(test_dir) + print("End of %s", self._testMethodName) + + def _verify(self, product_path: str, expected_no_data_file_name: str, test_method_name: str) -> ImageMasks: + """Read given product and do some verification + + Args: + product_path (str): Product to load and read + expected_no_data_file_name (str): expected name of the no data file + test_method_name (str): test method name to generated result folder and retrieve ref data + + Returns: + BaseReader: concrete BaseReader instantiated by the function + """ + # init reader + s2_reader = self._reader_class( + os.path.join(self._product_path, product_path)) + + mask_filename = os.path.join( + test_folder_path, test_method_name, f"{test_method_name}.tif") + + image_masks = extractor_class.get(s2_reader.__class__.__name__)(s2_reader).get_valid_pixel_mask(mask_filename, self.roi_path_file) + + # get masks + # s2_reader.get_valid_pixel_mask(os.path.join( + # test_folder_path, test_method_name, f"{test_method_name}.tif")) + + # verify nodata mask + nodata_mask_path = os.path.join( + test_folder_path, test_method_name, expected_no_data_file_name) + nodata_mask_ref_path = os.path.join( + test_folder_path, "ref_data", test_method_name, expected_no_data_file_name) + self._compare(nodata_mask_path, nodata_mask_ref_path) + + # verify validity mask + validity_mask_path = os.path.join( + test_folder_path, test_method_name, f"{test_method_name}.tif") + validity_mask_ref_path = os.path.join( + test_folder_path, "ref_data", test_method_name, f"{test_method_name}.tif") + self._compare(validity_mask_path, validity_mask_ref_path) + + # FOR COVERAGE ONLY + angle_file = os.path.join( + test_folder_path, test_method_name, f"{test_method_name}_tie_points.tif") + extractor_class.get(s2_reader.__class__.__name__)(s2_reader).get_angle_images(angle_file) + + return image_masks + + def _compare(self, file_path: str, ref_file_path: str): + """Compare the 2 given files + + Args: + file_path (str): file to verify + ref_file_path (str): reference file + """ + self.assertTrue( + os.path.exists(file_path), + msg=f"File '{file_path}' does not exists", + ) + # same? + self.assertTrue( + filecmp.cmp(file_path, ref_file_path, shallow=False), + msg=f"File differs : \n'{file_path}'\n'{ref_file_path}'", + ) + filecmp.clear_cache() diff --git a/sen2like/tests/core/file_extractor/config.ini b/sen2like/tests/core/file_extractor/config.ini new file mode 100644 index 0000000..fdadb1d --- /dev/null +++ b/sen2like/tests/core/file_extractor/config.ini @@ -0,0 +1,62 @@ +[Processing] +doStitching = True +doGeometryKLT = True +doToa = True +doInterCalibration = True +doAtmcor = True +doNbar = True +doSbaf = True +doFusion = True +doPackager = False +doPackagerL2H = True +doPackagerL2F = True + +[Directories] +archive_dir = /data/HLS +cams_dir = /data/CAMS + +[Downloader] +coverage = 0.1 +# Local +base_url = /data/PRODUCTS +;url_parameters_pattern_Sentinel2 = {base_url}/{mission}/{tile} +;url_parameters_pattern_Landsat8 = {base_url}/{mission}/{path}/{row} +;url_parameters_pattern_Landsat9 = {base_url}/{mission}/{path}/{row} + +# Creodias +# base_url = https://finder.creodias.eu/resto/api/collections +# cloud_cover = 100 +# location_Landsat8 = path={path}&row={row} +# location_Sentinel2 = processingLevel=LEVEL1C&productIdentifier=%25{tile}%25 +# url_parameters_pattern = {base_url}/{mission}/search.json?maxRecords=100&_pretty=true&cloudCover=%5B0%2C{cloud_cover}%5D&startDate={start_date}&completionDate={end_date}&sortParam=startDate&sortOrder=ascending&status=all&{location}&dataset=ESA-DATASET +# thumbnail_property = properties/productIdentifier +# cloud_cover_property = properties/cloudCover + + +[Geometry] +reference_band = B04 +doMatchingCorrection = True + +[Atmcor] +use_sen2cor = True +sen2cor_path = ../sen2cor/process.py + +[fusion] +# predict_method: predict or composite (most recent valid pixels) +predict_method = predict +predict_nb_products = 2 + +[Stitching] +reframe_margin = 50 + +[OutputFormat] +gain = 10000 +offset = 0 + +[Multiprocessing] +number_of_process = 5 + +[RunTime] +dx = 0 +dy = 0 +freeze_dx_dy = True diff --git a/sen2like/tests/core/file_extractor/ref_data/test_landsat/nodata_pixel_mask.tif b/sen2like/tests/core/file_extractor/ref_data/test_landsat/nodata_pixel_mask.tif new file mode 100644 index 0000000..c75b124 Binary files /dev/null and b/sen2like/tests/core/file_extractor/ref_data/test_landsat/nodata_pixel_mask.tif differ diff --git a/sen2like/tests/core/file_extractor/ref_data/test_landsat/test_landsat.tif b/sen2like/tests/core/file_extractor/ref_data/test_landsat/test_landsat.tif new file mode 100644 index 0000000..33b6793 Binary files /dev/null and b/sen2like/tests/core/file_extractor/ref_data/test_landsat/test_landsat.tif differ diff --git a/sen2like/tests/core/file_extractor/ref_data/test_landsat_maja/nodata_pixel_mask.tif b/sen2like/tests/core/file_extractor/ref_data/test_landsat_maja/nodata_pixel_mask.tif new file mode 100644 index 0000000..0a29f94 Binary files /dev/null and b/sen2like/tests/core/file_extractor/ref_data/test_landsat_maja/nodata_pixel_mask.tif differ diff --git a/sen2like/tests/core/file_extractor/ref_data/test_landsat_maja/test_landsat_maja.tif b/sen2like/tests/core/file_extractor/ref_data/test_landsat_maja/test_landsat_maja.tif new file mode 100644 index 0000000..3eae13b Binary files /dev/null and b/sen2like/tests/core/file_extractor/ref_data/test_landsat_maja/test_landsat_maja.tif differ diff --git a/sen2like/tests/core/file_extractor/ref_data/test_s2_l1c/nodata_pixel_mask_B01.tif b/sen2like/tests/core/file_extractor/ref_data/test_s2_l1c/nodata_pixel_mask_B01.tif new file mode 100644 index 0000000..218f9c2 Binary files /dev/null and b/sen2like/tests/core/file_extractor/ref_data/test_s2_l1c/nodata_pixel_mask_B01.tif differ diff --git a/sen2like/tests/core/file_extractor/ref_data/test_s2_l1c/test_s2_l1c.tif b/sen2like/tests/core/file_extractor/ref_data/test_s2_l1c/test_s2_l1c.tif new file mode 100644 index 0000000..f1d0a7c Binary files /dev/null and b/sen2like/tests/core/file_extractor/ref_data/test_s2_l1c/test_s2_l1c.tif differ diff --git a/sen2like/tests/core/file_extractor/ref_data/test_s2_l2a/nodata_pixel_mask.tif b/sen2like/tests/core/file_extractor/ref_data/test_s2_l2a/nodata_pixel_mask.tif new file mode 100644 index 0000000..218f9c2 Binary files /dev/null and b/sen2like/tests/core/file_extractor/ref_data/test_s2_l2a/nodata_pixel_mask.tif differ diff --git a/sen2like/tests/core/file_extractor/ref_data/test_s2_l2a/test_s2_l2a.tif b/sen2like/tests/core/file_extractor/ref_data/test_s2_l2a/test_s2_l2a.tif new file mode 100644 index 0000000..c409d8a Binary files /dev/null and b/sen2like/tests/core/file_extractor/ref_data/test_s2_l2a/test_s2_l2a.tif differ diff --git a/sen2like/tests/core/file_extractor/ref_data/test_sentinel2_maja/nodata_pixel_mask.tif b/sen2like/tests/core/file_extractor/ref_data/test_sentinel2_maja/nodata_pixel_mask.tif new file mode 100644 index 0000000..31a26d1 Binary files /dev/null and b/sen2like/tests/core/file_extractor/ref_data/test_sentinel2_maja/nodata_pixel_mask.tif differ diff --git a/sen2like/tests/core/file_extractor/ref_data/test_sentinel2_maja/test_sentinel2_maja.tif b/sen2like/tests/core/file_extractor/ref_data/test_sentinel2_maja/test_sentinel2_maja.tif new file mode 100644 index 0000000..dc8fcc0 Binary files /dev/null and b/sen2like/tests/core/file_extractor/ref_data/test_sentinel2_maja/test_sentinel2_maja.tif differ diff --git a/sen2like/tests/core/file_extractor/test.geojson b/sen2like/tests/core/file_extractor/test.geojson new file mode 100644 index 0000000..9dd27c6 --- /dev/null +++ b/sen2like/tests/core/file_extractor/test.geojson @@ -0,0 +1,7 @@ +{ +"type": "FeatureCollection", +"name": "test", +"features": [ +{ "type": "Feature", "properties": { }, "geometry": { "type": "Polygon", "coordinates": [ [ [ 4.47218893375359, 43.804113298213515 ], [ 4.517506651387607, 43.600762122517217 ], [ 4.878754375098853, 43.532916050669037 ], [ 4.984071424564012, 43.68084789624546 ], [ 4.95976814577441, 43.832154288708018 ], [ 4.745415704398131, 43.964117656378917 ], [ 4.745415704398131, 43.964117656378917 ], [ 4.47218893375359, 43.804113298213515 ] ] ] } } +] +} diff --git a/sen2like/tests/core/file_extractor/test_landsat.py b/sen2like/tests/core/file_extractor/test_landsat.py new file mode 100644 index 0000000..f58cf89 --- /dev/null +++ b/sen2like/tests/core/file_extractor/test_landsat.py @@ -0,0 +1,24 @@ +"""Landsat Reader test module +""" +import os +from core.readers.landsat import LandsatMTL +from abstract_extractor_test import AbstractExtractorTestCase + + +test_folder_path = os.path.dirname(__file__) +roi_path_file = os.path.join( + test_folder_path, 'Avignon-communes-84-vaucluse.geojson') + + +class TestLandsatReader(AbstractExtractorTestCase): + """LandsatMTL test class + """ + + def __init__(self, methodName="unitTest"): + super().__init__(LandsatMTL, roi_path_file, 'Landsat8', methodName) + + def test_landsat(self): + """test S2 L1C + """ + self._verify("196/29/LC81960292017318MTI00", + "nodata_pixel_mask.tif", self._testMethodName) diff --git a/sen2like/tests/core/file_extractor/test_landsat_maja.py b/sen2like/tests/core/file_extractor/test_landsat_maja.py new file mode 100644 index 0000000..7e3020d --- /dev/null +++ b/sen2like/tests/core/file_extractor/test_landsat_maja.py @@ -0,0 +1,24 @@ +"""Landsat Maja Reader test module +""" +import os +from core.readers.landsat_maja import LandsatMajaMTL +from abstract_extractor_test import AbstractExtractorTestCase + + +test_folder_path = os.path.dirname(__file__) +roi_path_file = os.path.join( + test_folder_path, 'Avignon-communes-84-vaucluse.geojson') + + +class TestLandsatMajaExtractor(AbstractExtractorTestCase): + """LandsatMajaMTL test class + """ + + def __init__(self, methodName="unitTest"): + super().__init__(LandsatMajaMTL, roi_path_file, 'L2A_MAJA', methodName) + + def test_landsat_maja(self): + """test S2 L1C + """ + self._verify("LANDSAT8-OLITIRS-XS_20210821-102351-515_L2A_T31TFJ_C_V2-2", + "nodata_pixel_mask.tif", self._testMethodName) diff --git a/sen2like/tests/core/file_extractor/test_sentinel2.py b/sen2like/tests/core/file_extractor/test_sentinel2.py new file mode 100644 index 0000000..971afa2 --- /dev/null +++ b/sen2like/tests/core/file_extractor/test_sentinel2.py @@ -0,0 +1,37 @@ +"""Sentinel 2 Reader test module +""" +import os +from core.readers.sentinel2 import Sentinel2MTL +from abstract_extractor_test import AbstractExtractorTestCase + + +test_folder_path = os.path.dirname(__file__) +roi_path_file = os.path.join( + test_folder_path, 'Arles-communes-13-bouches-du-rhone.geojson') + + +class TestSentinel2Reader(AbstractExtractorTestCase): + """Sentinel2MTL test class + """ + + def __init__(self, methodName="unitTest"): + super().__init__(Sentinel2MTL, roi_path_file, 'Sentinel2', methodName) + + def test_s2_l1c(self): + """test S2 L1C + """ + image_masks = self._verify("31TFJ/S2A_MSIL1C_20171030T104151_N0206_R008_T31TFJ_20171030T160027.SAFE", + "nodata_pixel_mask_B01.tif", self._testMethodName) + + # self.assertEqual(s2_reader.mask_info.mask_size, 30140100) + # self.assertEqual(s2_reader.mask_info.nb_valid_pixel, 1855204) + # self.assertEqual(s2_reader.mask_info.nb_nodata_pixel, 28247958) + + # self.assertEqual(s2_reader.mask_info.get_valid_pixel_percentage(), 98.04782093521523) + # self.assertEqual(s2_reader.mask_info.get_nodata_pixel_percentage(), 93.72217743139538) + + def test_s2_l2a(self): + """test S2 LA2 + """ + self._verify("31TFJ/S2A_MSIL2A_20171030T104151_N9999_R008_T31TFJ_20200519T152631.SAFE", + "nodata_pixel_mask.tif", self._testMethodName) diff --git a/sen2like/tests/core/file_extractor/test_sentinel2_maja.py b/sen2like/tests/core/file_extractor/test_sentinel2_maja.py new file mode 100644 index 0000000..22bc166 --- /dev/null +++ b/sen2like/tests/core/file_extractor/test_sentinel2_maja.py @@ -0,0 +1,24 @@ +"""Sentinel 2 Maja Reader test module +""" +import os +from core.readers.sentinel2_maja import Sentinel2MajaMTL +from abstract_extractor_test import AbstractExtractorTestCase + + +test_folder_path = os.path.dirname(__file__) +roi_path_file = os.path.join( + test_folder_path, 'Avignon-communes-84-vaucluse.geojson') + + +class TestSentinel2MajaReader(AbstractExtractorTestCase): + """Sentinel2MajaMTL test class + """ + + def __init__(self, methodName="unitTest"): + super().__init__(Sentinel2MajaMTL, roi_path_file, 'L2A_MAJA', methodName) + + def test_sentinel2_maja(self): + """test + """ + self._verify("SENTINEL2A_20220815-104902-689_L2A_T31TFJ_C_V3-0", + "nodata_pixel_mask.tif", self._testMethodName) diff --git a/sen2like/tests/core/product_archive/Avignon-communes-84-vaucluse.geojson b/sen2like/tests/core/product_archive/Avignon-communes-84-vaucluse.geojson new file mode 100644 index 0000000..acb84f0 --- /dev/null +++ b/sen2like/tests/core/product_archive/Avignon-communes-84-vaucluse.geojson @@ -0,0 +1 @@ +{"type":"FeatureCollection","features":[{"type":"Feature","geometry":{"type":"Polygon","coordinates":[[[4.79198,43.95227],[4.79278,43.95378],[4.79716,43.95672],[4.808,43.96003],[4.81156,43.9618],[4.81402,43.964],[4.81506,43.96763],[4.81381,43.97071],[4.81117,43.97464],[4.81015,43.97704],[4.81012,43.97957],[4.81126,43.98562],[4.81247,43.98766],[4.81598,43.9888],[4.8196,43.98838],[4.82996,43.98558],[4.83199,43.98526],[4.83766,43.98519],[4.84211,43.98647],[4.84438,43.98937],[4.84487,43.9916],[4.84472,43.99379],[4.84555,43.99668],[4.84764,43.99356],[4.85157,43.98911],[4.85376,43.98527],[4.85443,43.98319],[4.85436,43.98035],[4.8534,43.97542],[4.85398,43.97127],[4.85327,43.96768],[4.85181,43.9656],[4.84856,43.96234],[4.84675,43.96098],[4.84396,43.95987],[4.83914,43.95935],[4.83808,43.95246],[4.84257,43.95252],[4.84708,43.95147],[4.85599,43.95064],[4.85971,43.95082],[4.86246,43.95197],[4.87228,43.95321],[4.88397,43.95277],[4.88792,43.94976],[4.89064,43.94502],[4.89074,43.93848],[4.89393,43.93746],[4.89551,43.93653],[4.89949,43.93054],[4.89751,43.92365],[4.89697,43.91995],[4.89707,43.91772],[4.89789,43.9154],[4.89985,43.91432],[4.90577,43.91315],[4.91251,43.91053],[4.91756,43.90932],[4.92085,43.90741],[4.92718,43.90528],[4.9258,43.90287],[4.92423,43.90238],[4.92275,43.89997],[4.91459,43.89795],[4.91941,43.89416],[4.92326,43.88992],[4.92201,43.88676],[4.90816,43.88626],[4.90232,43.88793],[4.88638,43.89941],[4.88133,43.90156],[4.85309,43.91138],[4.82174,43.91283],[4.81097,43.91428],[4.7945,43.91673],[4.7736,43.91963],[4.75204,43.92261],[4.73906,43.92406],[4.74255,43.92775],[4.74449,43.92933],[4.75,43.93218],[4.75901,43.93459],[4.76153,43.93492],[4.76878,43.9338],[4.77306,43.9357],[4.77436,43.93587],[4.77902,43.93789],[4.78247,43.94144],[4.78616,43.94409],[4.78897,43.94733],[4.79198,43.95227]]]},"properties":{"code":"84007","nom":"Avignon"}}]} \ No newline at end of file diff --git a/sen2like/tests/core/product_archive/on_rome.geojson b/sen2like/tests/core/product_archive/on_rome.geojson new file mode 100644 index 0000000..5d09092 --- /dev/null +++ b/sen2like/tests/core/product_archive/on_rome.geojson @@ -0,0 +1,48 @@ +{ + "type": "FeatureCollection", + "features": [ + { + "type": "Feature", + "properties": {}, + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [ + 12.389144897460938, + 41.85319643776675 + ], + [ + 12.434463500976562, + 41.748518877483804 + ], + [ + 12.580718994140625, + 41.74544507131366 + ], + [ + 12.623977661132812, + 41.82122266302155 + ], + [ + 12.636680603027344, + 41.864703086040585 + ], + [ + 12.545700073242186, + 41.908153959965105 + ], + [ + 12.472572326660156, + 41.90764294500524 + ], + [ + 12.389144897460938, + 41.85319643776675 + ] + ] + ] + } + } + ] + } \ No newline at end of file diff --git a/sen2like/tests/core/product_archive/test_tile_db.py b/sen2like/tests/core/product_archive/test_tile_db.py new file mode 100644 index 0000000..7236afb --- /dev/null +++ b/sen2like/tests/core/product_archive/test_tile_db.py @@ -0,0 +1,49 @@ +import os +import unittest + +from core.product_archive import tile_db +from core.product_archive.product_selector import _read_polygon_from_json + + +class TestTileDb(unittest.TestCase): + + def test_mgrs_to_wrs(self): + res = tile_db.mgrs_to_wrs("31TFJ") + # [([196, 30], 0.7600012569702809), ([196, 29], 0.41728420731535404), ([197, 29], 0.23638134146149337), + # ([197, 30], 0.17748451765693712)] + self.assertEqual(len(res), 4) + self.assertEqual(res[0], ([196, 30], 0.7600012569702809)) + self.assertEqual(res[1], ([196, 29], 0.41728420731535404)) + self.assertEqual(res[2], ([197, 29], 0.23638134146149337)) + self.assertEqual(res[3], ([197, 30], 0.17748451765693712)) + + def test_wrs_to_mgrs(self): + res = tile_db.wrs_to_mgrs("196_30") + # ['25XDA', '25XEA', '25WEV', '25WDV', '25XDB'] + self.assertEqual(len(res), 5) + self.assertEqual(res[0], '25XDA') + self.assertEqual(res[1], '25XEA') + self.assertEqual(res[2], '25WEV') + self.assertEqual(res[3], '25WDV') + self.assertEqual(res[4], '25XDB') + + def test_one_tile_contains_roi(self): + test_folder_path = os.path.dirname(__file__) + roi_path_file = os.path.join(test_folder_path, 'Avignon-communes-84-vaucluse.geojson') + polygon = _read_polygon_from_json(roi_path_file) + res = tile_db.tiles_contains_roi(polygon) + self.assertEqual(res[0], "31TFJ") + self.assertEqual(len(res), 1) + + def test_multiple_tiles_contains_roi(self): + test_folder_path = os.path.dirname(__file__) + roi_path_file = os.path.join(test_folder_path, 'on_rome.geojson') + polygon = _read_polygon_from_json(roi_path_file) + res = tile_db.tiles_contains_roi(polygon) + res = sorted(res) + self.assertEqual(res[0], "32TQM") + self.assertEqual(res[1], "33TTG") + self.assertEqual(len(res), 2) + +if __name__ == '__main__': + unittest.main() diff --git a/sen2like/tests/core/products/landsat_8/test_landsat8.py b/sen2like/tests/core/products/landsat_8/test_landsat8.py index c26081d..3665b59 100644 --- a/sen2like/tests/core/products/landsat_8/test_landsat8.py +++ b/sen2like/tests/core/products/landsat_8/test_landsat8.py @@ -1,11 +1,11 @@ from unittest import TestCase -from core.products import get_product +from core.products import get_s2l_product_class class TestLandsat8Product(TestCase): def get_best_product(self, products, reference): - reader = get_product(products[0]) + reader = get_s2l_product_class(products[0]) self.assertEqual([reference], reader.best_product(products)) def test_best_product(self): diff --git a/sen2like/tests/s2l_processes/config.ini b/sen2like/tests/s2l_processes/config.ini new file mode 100644 index 0000000..fdadb1d --- /dev/null +++ b/sen2like/tests/s2l_processes/config.ini @@ -0,0 +1,62 @@ +[Processing] +doStitching = True +doGeometryKLT = True +doToa = True +doInterCalibration = True +doAtmcor = True +doNbar = True +doSbaf = True +doFusion = True +doPackager = False +doPackagerL2H = True +doPackagerL2F = True + +[Directories] +archive_dir = /data/HLS +cams_dir = /data/CAMS + +[Downloader] +coverage = 0.1 +# Local +base_url = /data/PRODUCTS +;url_parameters_pattern_Sentinel2 = {base_url}/{mission}/{tile} +;url_parameters_pattern_Landsat8 = {base_url}/{mission}/{path}/{row} +;url_parameters_pattern_Landsat9 = {base_url}/{mission}/{path}/{row} + +# Creodias +# base_url = https://finder.creodias.eu/resto/api/collections +# cloud_cover = 100 +# location_Landsat8 = path={path}&row={row} +# location_Sentinel2 = processingLevel=LEVEL1C&productIdentifier=%25{tile}%25 +# url_parameters_pattern = {base_url}/{mission}/search.json?maxRecords=100&_pretty=true&cloudCover=%5B0%2C{cloud_cover}%5D&startDate={start_date}&completionDate={end_date}&sortParam=startDate&sortOrder=ascending&status=all&{location}&dataset=ESA-DATASET +# thumbnail_property = properties/productIdentifier +# cloud_cover_property = properties/cloudCover + + +[Geometry] +reference_band = B04 +doMatchingCorrection = True + +[Atmcor] +use_sen2cor = True +sen2cor_path = ../sen2cor/process.py + +[fusion] +# predict_method: predict or composite (most recent valid pixels) +predict_method = predict +predict_nb_products = 2 + +[Stitching] +reframe_margin = 50 + +[OutputFormat] +gain = 10000 +offset = 0 + +[Multiprocessing] +number_of_process = 5 + +[RunTime] +dx = 0 +dy = 0 +freeze_dx_dy = True diff --git a/sen2like/tests/s2l_processes/test_S2L_InterCalibration.py b/sen2like/tests/s2l_processes/test_S2L_InterCalibration.py new file mode 100644 index 0000000..6c67d6a --- /dev/null +++ b/sen2like/tests/s2l_processes/test_S2L_InterCalibration.py @@ -0,0 +1,75 @@ +import os +from unittest import TestCase + +from core.S2L_config import config +from core.image_file import S2L_ImageFile +from core.products.sentinel_2.sentinel2 import Sentinel2Product +from core.products.landsat_8.landsat8 import Landsat8Product + +from s2l_processes.S2L_InterCalibration import S2L_InterCalibration +from s2l_processes.S2L_Toa import S2L_Toa + +test_folder_path = os.path.dirname(__file__) +configuration_file = os.path.join(test_folder_path, 'config.ini') + + +class TestS2L_InterCalibration(TestCase): + + def __init__(self, methodName): + super().__init__(methodName) + if not config.initialize(configuration_file): + raise Exception + + config.set('wd', os.path.join(test_folder_path, methodName)) + + def test_S2B_band01(self): + _product_path = os.path.join(config.get('base_url'), 'Sentinel2', '31TFJ', + 'S2B_MSIL1C_20171114T104259_N0206_R008_T31TFJ_20171114T124011.SAFE') + product = Sentinel2Product(_product_path) + image = S2L_ImageFile( + os.path.join( + _product_path, "GRANULE/L1C_T31TFJ_A003609_20171114T104257/IMG_DATA/T31TFJ_20171114T104259_B01.jp2")) + + # MUST RUN TOA before inter calibration + block = S2L_Toa() + image = block.process(product, image, "B01") + + block = S2L_InterCalibration() + result_image = block.process(product, image, "B01") + + self.assertNotEqual(image.filepath, result_image.filepath, + "Result image should be different from the input image for S2B") + + def test_S2B_band09(self): + _product_path = os.path.join(config.get('base_url'), 'Sentinel2', '31TFJ', + 'S2B_MSIL1C_20171114T104259_N0206_R008_T31TFJ_20171114T124011.SAFE') + product = Sentinel2Product(_product_path) + image = S2L_ImageFile( + os.path.join( + _product_path, "GRANULE/L1C_T31TFJ_A003609_20171114T104257/IMG_DATA/T31TFJ_20171114T104259_B09.jp2")) + + # MUST RUN TOA before inter calibration + block = S2L_Toa() + image = block.process(product, image, "B09") + + block = S2L_InterCalibration() + result_image = block.process(product, image, "B09") + + self.assertEqual(image.filepath, result_image.filepath, "Result image should be the same") + + def test_landsat(self): + _product_path = os.path.join(config.get('base_url'), 'Landsat8', + '196/29/LC81960292017318MTI00') + product = Landsat8Product(_product_path) + image = S2L_ImageFile( + os.path.join( + _product_path, "LC81960292017318MTI00_B3.TIF")) + + # MUST RUN TOA before inter calibration + block = S2L_Toa() + image = block.process(product, image, "B3") + + block = S2L_InterCalibration() + result_image = block.process(product, image, "B3") + + self.assertEqual(image.filepath, result_image.filepath, "Result image should be the same")