diff --git a/.github/workflows/pylint_check.yaml b/.github/workflows/pylint_check.yaml deleted file mode 100644 index d4fafcc..0000000 --- a/.github/workflows/pylint_check.yaml +++ /dev/null @@ -1,79 +0,0 @@ -name: Enforcing pylint checks -on: - push: - branches: - - master - - develop - pull_request: - branches: - - master - - develop - tags: - - v* - -jobs: - update_python: - runs-on: ubuntu-latest - name: Running pylint checks - steps: - - name: Current python version - run: python3 --version || echo python3 not installed - - name: Install Python 3.7 - run: sudo apt-get install -y --no-install-recommends python3.7 python3-pip && sudo ln -sfn /usr/bin/python3.7 /usr/bin/python3 - id: install_python_3_7 - - name: Updated python version - run: python3 --version - - name: PYTHONPATH environment variable - run: echo ${PYTHONPATH} - - name: Update pip - run: python3 -m pip install --upgrade --no-cache-dir pip - id: pip-install - - name: Fetch/update setuptools - run: python3 -m pip install --upgrade --no-cache-dir setuptools - id: setuptools-install - - name: Install python-apt - run: sudo apt-get install -y python-apt - - name: HACK to fix apt-get update problem w/ different python versions - run: 'cd /usr/lib/python3/dist-packages && sudo cp apt_pkg.cpython-36m-x86_64-linux-gnu.so apt_pkg.so' - - name: Update apt-get - run: sudo apt-get update - - name: Fetch/update pylint and pytest - run: python3 -m pip install --upgrade --no-cache-dir pylint pytest - id: pylint-install - - name: Fetch source code - uses: actions/checkout@v2 - id: fetch-source - - name: Finding files - run: find . -type f -name "*.py" | grep -v 'tests/' > action_pylint_files.txt - id: find-python-files - - name: Install system requirements - shell: bash - run: 'sudo apt-get install -y python3-gdal gdal-bin libgdal-dev gcc g++ python3.7-dev' - id: install-system-reqs - - name: Install Python numpy - shell: bash - run: 'python3 -m pip install --upgrade --no-cache-dir numpy wheel' - id: install-python-numpy - - name: Install Python pygdal - shell: bash - run: 'sudo python3 -m pip install --no-cache-dir pygdal==2.2.3.5' - id: install-python-pygdal - - name: Run action pylint script - shell: bash - run: '[ -s "action_pylint.sh" ] && (chmod +x "action_pylint.sh" && ./action_pylint.sh) || (echo "Not running special action script - empty or not found")' - id: run-special-action-script - - name: Fetching pylint.rc file - run: wget https://raw.githubusercontent.com/AgPipeline/Organization-info/master/pylint.rc - id: fetch_pylint_resource - - name: Listing - run: ls -la - - name: Files to be linted - run: cat action_pylint_files.txt - - name: Running pylint - run: cat action_pylint_files.txt | xargs python3 -m pylint --rcfile ./pylint.rc - - name: Running pylint - run: find ./tests | grep '\.py' | xargs python3 -m pylint -d duplicate-code --rcfile ./pylint.rc - - name: Set execution permission for PyTest - run: chmod +x betydb2geojson.py - - name: run pytest - run: python3 -m pytest diff --git a/.github/workflows/shellscript_checks.yaml b/.github/workflows/shellscript_checks.yaml new file mode 100644 index 0000000..8c49473 --- /dev/null +++ b/.github/workflows/shellscript_checks.yaml @@ -0,0 +1,41 @@ +name: Enforcing shell script tests +on: + push: + branches: + - master + - develop + pull_request: + branches: + - master + - develop + tags: + - v* + +jobs: + testing: + runs-on: ubuntu-latest + name: Running testing + strategy: + matrix: + app: [shellcheck, shfmt] + include: + - app: shellcheck + shellcheck_opts: + shellcheck_disable: false + shfmt_disable: true + - app: shfmt + shfmt_opts: -i 2 -ci -w + shellcheck_disable: true + shfmt_disable: false + steps: + - name: Fetch source code + uses: actions/checkout@v2 + - name: shell check + uses: luizm/action-sh-checker@v0.1.8 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + SHELLCHECK_OPTS: ${{ matrix.shellcheck_opts }} + SHFMT_OPTS: ${{ matrix.shfmt_opts }} + with: + sh_checker_shellcheck_disable: ${{ matrix.shellcheck_disable }} + sh_checker_shfmt_disable: ${{ matrix.shfmt_disable }} diff --git a/.github/workflows/short_workflow_check.sh b/.github/workflows/short_workflow_check.sh index e45b1e0..24a8b2d 100755 --- a/.github/workflows/short_workflow_check.sh +++ b/.github/workflows/short_workflow_check.sh @@ -18,45 +18,45 @@ CHECK_FILE="${TARGET_FOLDER}/orthomosaic_mask.tif" if [[ -f "${CHECK_FILE}" ]]; then echo "Mask file found: ${CHECK_FILE}" else - echo "Unable to find mask file: ${CHECK_FILE}"; - exit 1; + echo "Unable to find mask file: ${CHECK_FILE}" + exit 1 fi # Get all the folders and check the count -FOLDER_LIST=(`find "${TARGET_FOLDER}/" -maxdepth 1 -type d`) +# shellcheck disable=SC2207 +FOLDER_LIST=($(find "${TARGET_FOLDER}/" -maxdepth 1 -type d)) if [[ "${#FOLDER_LIST[@]}" == "${EXPECTED_NUM_FOLDERS}" ]]; then echo "Found expected number of folders: ${EXPECTED_NUM_FOLDERS}" else echo "Expected ${EXPECTED_NUM_FOLDERS} folders and found ${#FOLDER_LIST[@]}" - for i in $(seq 0 $(( ${#FOLDER_LIST[@]} - 1 ))) - do - echo "$(( ${i} + 1 )): ${FOLDER_LIST[$i]}" + for i in $(seq 0 $((${#FOLDER_LIST[@]} - 1))); do + echo "$((i + 1)): ${FOLDER_LIST[$i]}" done exit 10 fi # Check the expected number of output files -EXPECTED_CSV=(`find "${TARGET_FOLDER}/" -type f | grep 'canopycover\.csv'`) +# shellcheck disable=SC2207 +EXPECTED_CSV=($(find "${TARGET_FOLDER}/" -type f | grep 'canopycover\.csv')) if [[ "${#EXPECTED_CSV[@]}" == "${EXPECTED_NUM_CANOPYCOVER_CSV}" ]]; then echo "Found expected number of canopycover.csv files: ${EXPECTED_NUM_CANOPYCOVER_CSV}" else echo "Expected ${EXPECTED_NUM_CANOPYCOVER_CSV} canopycover.csv files but found ${#EXPECTED_CSV[@]}" - for i in $(seq 0 $(( ${#EXPECTED_CSV[@]} - 1 ))) - do - echo "$(( ${i} + 1 )): ${EXPECTED_CSV[$i]}" + for i in $(seq 0 $((${#EXPECTED_CSV[@]} - 1))); do + echo "$((i + 1)): ${EXPECTED_CSV[$i]}" done exit 20 fi # Check the expected number of image mask files -EXPECTED_MASK=(`find "${TARGET_FOLDER}/" -type f | grep 'orthomosaic_mask\.tif'`) +# shellcheck disable=SC2207 +EXPECTED_MASK=($(find "${TARGET_FOLDER}/" -type f | grep 'orthomosaic_mask\.tif')) if [[ "${#EXPECTED_MASK[@]}" == "${EXPECTED_NUM_MASK_TIF}" ]]; then echo "Found expected number of orthomosaic_mask.tif files: ${EXPECTED_NUM_MASK_TIF}" else echo "Expected ${EXPECTED_NUM_MASK_TIF} orthomosaic_mask.tif files but found ${#EXPECTED_MASK[@]}" - for i in $(seq 0 $(( ${#EXPECTED_MASK[@]} - 1 ))) - do - echo "$(( ${i} + 1 )): ${EXPECTED_MASK[$i]}" + for i in $(seq 0 $((${#EXPECTED_MASK[@]} - 1))); do + echo "$((i + 1)): ${EXPECTED_MASK[$i]}" done exit 30 fi diff --git a/.github/workflows/testing_checks.yaml b/.github/workflows/testing_checks.yaml new file mode 100644 index 0000000..343ba37 --- /dev/null +++ b/.github/workflows/testing_checks.yaml @@ -0,0 +1,93 @@ +name: Enforcing tests +on: + push: + branches: + - master + - develop + pull_request: + branches: + - master + - develop + tags: + - v* + +jobs: + testing: + runs-on: ubuntu-latest + name: Running testing + strategy: + matrix: + app: [pylint, pytest] + include: + - app: pylint + pip_installs: pylint pytest + test_command: (cat action_pylint_files.txt | xargs python3 -m pylint --rcfile ./pylint.rc) && (find ./tests | grep '\.py' | xargs python3 -m pylint -d duplicate-code --rcfile ./pylint.rc) + - app: pytest + pip_installs: pytest pytest-cov + test_command: python3 -m pytest --cov=. -rpP --cov-report=xml > coverage.txt + artifacts: coverage.txt + steps: + - name: Current python version + run: python3 --version || echo python3 not installed + - name: Install Python 3.7 + uses: actions/setup-python@v2 + with: + python-version: '3.7' + - name: Updated python version + run: python3 --version + - name: PYTHONPATH environment variable + run: echo "PYTHONPATH is ${PYTHONPATH}" + - name: Update pip + run: python3 -m pip install --upgrade --no-cache-dir pip + - name: Fetch/update setuptools + run: python3 -m pip install --upgrade --no-cache-dir setuptools + - name: Install python-apt + run: sudo apt-get install -y python-apt + - name: HACK to fix apt-get update problem w/ different python versions + run: 'cd /usr/lib/python3/dist-packages && sudo cp apt_pkg.cpython-36m-x86_64-linux-gnu.so apt_pkg.so' + - name: Update apt-get + run: sudo apt-get update + - name: Fetch/update testing pip installations + run: python3 -m pip install --upgrade --no-cache-dir ${{ matrix.pip_installs }} + - name: Fetch source code + uses: actions/checkout@v2 + - name: Finding files + run: find . -type f -name "*.py" | grep -v 'tests/' > action_pylint_files.txt + - name: Install system requirements + shell: bash + run: 'sudo apt-get install -y python3-gdal gdal-bin libgdal-dev gcc g++ python3.7-dev' + - name: Install Python numpy and other modules + shell: bash + run: 'python3 -m pip install --upgrade --no-cache-dir numpy wheel requests' + - name: Install Python pygdal + shell: bash + run: 'python3 -m pip install --no-cache-dir pygdal==2.2.3.5' + - name: Install system requirements from source + shell: bash + run: 'if [ -s "packages.txt" ]; then (cat packages.txt | sudo xargs apt-get install -y --no-install-recommends) || (echo "Failed to install additional packages" && exit 1); fi' + - name: Install Python requirements from source + shell: bash + run: 'if [ -s "requirements.txt" ]; then (python3 -m pip install --no-cache-dir -r requirements.txt) || (echo "Failed to install Python requirements" && exit 1); fi' + - name: Run action pylint script + shell: bash + run: 'if [ -s ".github/workflows/action_pylint.sh" ]; then (chmod +x ".github/workflows/action_pylint.sh" && ./.github/workflows/action_pylint.sh) || (echo "Error running shell script" && exit 1); fi' + - name: Fetching pylint.rc file + run: wget https://raw.githubusercontent.com/AgPipeline/Organization-info/master/pylint.rc + if: ${{ matrix.name }} == "pylint" + - name: Set execution permission for testing + run: chmod +x betydb2geojson.py + - name: Listing + run: ls -la + - name: Files to be tested + run: cat action_pylint_files.txt + - name: Running test + run: ${{ matrix.test_command }} + - name: Upload testing artifact + uses: actions/upload-artifact@v2 + with: + name: testing_artifacts + path: ${{ matrix.artifacts }} + if: ${{ matrix.artifacts }} + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v1 + if: ${{ matrix.app == 'pytest' }} diff --git a/README.md b/README.md index c3ed64c..6feb4e7 100644 --- a/README.md +++ b/README.md @@ -11,32 +11,55 @@ After ODM has created the orthomosaic, the file is processed to produce plot-lev The [Scientific Filesystem](https://sci-f.github.io/) is used as to provide the entry points for the different tasks available (known as "apps" with the Scientific Filesystem). These apps are used by the above workflows and can be used to create custom workflows outside of what's provided. -## Running the workflow - -This section contains different ways of executing an existing Docker workflow container. - -### Terms used +## Table of contents +- [Terms used](#terms) +- [Running the workflow](#run_workflow) + - [Prerequisites](#prerequisites) + - [Workflow](#workflows) + - [Canopy Cover: Orthomosaic and plot boundaries](#om_can_shp) + - [Canopy Cover: OpenDroneMap and plot boundaries](#opendm_can_shp) + - [Clean](#workflow_clean) +- [Running Other Apps](#apps) +- [Build The Container](#build) +- [A Note On Docker Sibling Containers](#docker_sibling_containers) +- [Acceptance Testing](#acceptance_testing) + - [PyLint and PyTest](#pylint_pytest) + - [shellcheck and shfmt](#shellcheck_shfmt) + - [Docker Testing](#test_docker) + +## Terms used Here are the definition of some of the terms we use with links to additional information -* BETYdb +* apps +This term refers to the entry points in a (Scientific Filesystem)[#def_scif] solution. + +* BETYdb [BETYdb](https://www.betydb.org/) is a database that can be used to store trait and yield data. It can be used in the processing pipeline as a source of plot geometry for clipping. -* GeoJSON +* GeoJSON [GeoJSON](https://datatracker.ietf.org/doc/rfc7946/) is a JSON format for specifying geographic shape information. This is the default format for specifying plot geometries. -* Scientific Filesystem +* Makeflow +We use [Makeflow](https://cctools.readthedocs.io/en/latest/man_pages/makeflow/) to run the apps defined with Scientific Filesystem. +This tool enables error recovery, automatic retries, distributed computing, and many other features. + +* Scientific Filesystem We use the [Scientific Filesystem](https://sci-f.github.io/) to organize our applications, provide ease of execution, and to assist in reproducibility. -* Shapefile +* Shapefile In this document we use the term "shapefile" to refer to all the files ending in `.shp`, `.shx`, `.dbf`, and `.prj` that have the same name. It can be used to specify geographic information and shapes associated with plot geometries. -### Prerequisites +## Running the workflow -- Docker needs to be installed to run the workflows. [Get Docker](https://docs.docker.com/get-docker/) +This section contains different ways of executing an existing Docker workflow container. + +### Prerequisites + +- Docker needs to be installed to run the workflows. How to [get Docker](https://docs.docker.com/get-docker/) - Create an `inputs` folder in the current working directory (or other folder of your choice) ```bash mkdir -p "${PWD}/inputs" @@ -45,26 +68,32 @@ mkdir -p "${PWD}/inputs" ```bash mkdir -p "${PWD}/outputs" ``` -- Create an output folder. -The `checkpoints` folder will contain the generated workflow checkpoint data allowing easy recovery from an error and helps prevent re-running an already completed workflow. -Removing the workflow checkpoint files will enable a complete re-run of the workflow: +- Create an `checkpoints` folder. +The checkpoints folder will contain the generated workflow checkpoint data allowing easy error recovery and stops re-running an already completed workflow. +Removing the workflow checkpoint files will enable a complete re-run of the workflow ```bash mkdir -p "${PWD}/checkpoints" ``` -### Canopy Cover: Orthomosaic and plot boundaries +### Workflow + +There are workflows [apps](#def_apps) available that will run a predefined series of apps to completion. +This section describes how to run these workflows using sample data. +It's expected that after running these examples these workflow commands can be customized and run in other situations. + +#### Canopy Cover: Orthomosaic and plot boundaries The following steps are used to generate plot-level canopy cover values for a georeferenced orthomosaic image and plot boundaries using geographic information. We will first present the steps and then provide an example. 1. Create a folder and copy the orthomosaic into it -2. If using a [shapefile](#shapefile) or [GeoJSON](#geojson) file, copy those into the same folder as the orthomosaic image +2. If using a [shapefile](#def_shapefile) or [GeoJSON](#def_geojson) file, copy those into the same folder as the orthomosaic image 3. Create another folder for the output folders and files -4. Run the docker container's `short_workflow` app specifying the name of the orthomosaic and either the name of the shapefile or geojson file, or the URL of they [BETYdb](#betydb) instance to query for plot boundaries +4. Run the docker container's `short_workflow` app specifying the name of the orthomosaic and either the name of the shapefile or geojson file, or the URL of they [BETYdb](#def_betydb) instance to query for plot boundaries _NOTE_: the orthomosaic must be the file name without any extensions; in other words, leave off the `.tif` when specifying it on the Docker command line. -#### For example: +##### For example: You can download a sample dataset of files (archived) with names corresponding to those listed here from CyVerse using the following command. ```bash @@ -94,7 +123,7 @@ Each plot will contain two key outputs of interest: * [In the future](https://github.com/AgPipeline/issues-and-projects/issues/210), these CSV files will be aggregated into a single file for each run. The file with "geostreams" in its name can be uploaded to TERRAREF's Geostreams database. -### Canopy Cover: OpenDroneMap and plot boundaries +#### Canopy Cover: OpenDroneMap and plot boundaries The following steps are used when wanting to use OpenDroneMap (ODM) to create the Orthomosaic image that's then used to create the canopy cover values. As with the [previous example](#om_can_shp) we will be listing the steps and then providing an example. @@ -104,15 +133,15 @@ Please read our section on [Docker Sibling Containers](#docker_sibling_container 1. Create two named Docker volumes to use for processing data; one for input files and one for output files - the same volume can be used for both if desired 2. Copy the source drone images into a folder -3. If using a [shapefile](#shapefile) or [GeoJSON](#geojson) file, copy those into the same folder as the drone images +3. If using a [shapefile](#def_shapefile) or [GeoJSON](#def_geojson) file, copy those into the same folder as the drone images 4. Copy the experiment metadata file into the same folder as the drone images 5. Copy the folder contents of the drone images folder that was just prepared onto the input named volume 6. Create another folder for the output folders and files -7. Run the docker container's `odm_workflow` app specifying either the name of the shapefile or geojson file, or the URL of they [BETYdb](#betydb) instance to query for plot boundaries, and the two named volumes +7. Run the docker container's `odm_workflow` app specifying either the name of the shapefile or geojson file, or the URL of they [BETYdb](#def_betydb) instance to query for plot boundaries, and the two named volumes 8. Copy the resulting files off the output named volume to the local folder 9. Clean up the named volumes -#### For example: +##### For example: You can download a sample dataset of files (archived) with names corresponding to those listed here from CyVerse using the following command. ```bash @@ -143,7 +172,7 @@ docker run --rm -v "${PWD}/inputs:/sources" -v my_input:/input --entrypoint bash In step 4 we run the workflow to generate the orothomosaic image using ODM (OrthoDroneMap) and calculate plot-level canopy cover: ```bash -docker run --rm -v /var/run/docker.sock:/var/run/docker.sock -v "${PWD}/inputs:/scif/data/odm_workflow/images" -v my_output:/output -v "${PWD}/checkpoints:/scif/data/odm_workflow" -e INPUT_VOLUME=my_input -e OUTPUT_VOLUME=my_output -e "INPUT_IMAGE_FOLDER=/images" -e "OUTPUT_FOLDER=/output" agdrone/canopycover-workflow:latest run odm_workflow plot_shapes.shp my_input my_output +docker run --rm -v /var/run/docker.sock:/var/run/docker.sock -v "${PWD}/inputs:/scif/data/odm_workflow/images" -v my_output:/output -v "${PWD}/checkpoints:/scif/data/odm_workflow" -e INPUT_VOLUME=my_input -e OUTPUT_VOLUME=my_output -e "INPUT_IMAGE_FOLDER=/images" -e "OUTPUT_FOLDER=/output" agdrone/canopycover-workflow:latest run odm_workflow plot_shapes.shp ``` and we wait until it's finished. @@ -158,13 +187,15 @@ Finally, in step 6 we clean up the named volumes by deleting everything on them: docker run --rm -v my_input:/input -v my_output:/output --entrypoint bash agdrone/canopycover-workflow:latest -c 'rm -r /input/* && rm -r /output/*' ``` -### Clean +### Clean runs Cleaning up a workflow run will delete workflow generated files and folders. Be sure to copy the data you want to a safe place before cleaning. By adding the `--clean` flag to the end of the command line used to execute the workflow, the artifacts of a previous run will be cleaned up. +It's recommended, but not necessary, to run the clean app between processing runs by either running this command or through other means. + **Example:** The following docker command line will clean up the files generated using the [Canopy Cover: Orthomosaic and Shapefile](#om_can_shp) example above. @@ -173,7 +204,11 @@ docker run --rm -v "${PWD}/inputs:/scif/data/odm_workflow/images" -v "${PWD}/out ``` Notice the additional parameter at the end of the command line (--clean). -## Build the container +## Running Other Apps + +TBD + +## Build The Container This section describes how the Docker container could be built. Please refer to the [Docker](https://www.docker.com/) documentation for more information on building Docker containers. @@ -183,6 +218,13 @@ cp jx-args.json.example jx-args.json docker build --progress=plain -t agdrone/canopycover-workflow:latest . ``` +## Monitoring the Workflow + +To monitor the running workflows, you will need to be using the checkpoints folder as described in the [Prerequisites](#prerequisites) section. + +[Makeflow](#def_makeflow) has [monitoring tools](https://cctools.readthedocs.io/en/latest/man_pages/) available that can be used to follow the progress of the workflows. +The [makeflow_monitor](https://cctools.readthedocs.io/en/latest/man_pages/makeflow_monitor/) tool can be a good starting point. + ## A Note On Docker Sibling Containers The OpenDroneMap workflow uses sibling containers. @@ -190,3 +232,66 @@ This is a technique for having one Docker container start another Docker contain We plan to find a secure alternative for future releases (see [AgPipeline/issues-and-projects#240](https://github.com/AgPipeline/issues-and-projects/issues/240)), primarily because of a potential security risk that makes this approach not suitable for shared cluster computing environments (it is also a concern for containers such as websites and databases that are exposed to the internet, but that is not the case here). You can just as safely run these workflows on your own computer as you can any trusted Docker container. However, with sibling containers the second container requires administrator ("root") privileges - please see [Docker documentation](https://docs.docker.com/engine/security/security/) for more details. + +## Acceptance Testing + +There are automated test suites that are run via [GitHub Actions](https://docs.github.com/en/actions). +In this section we provide details on these tests so that they can be run locally as well. + +These tests are run when a [Pull Request](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/about-pull-requests) or [push](https://docs.github.com/en/github/using-git/pushing-commits-to-a-remote-repository) occurs on the `develop` or `master` branches. +There may be other instances when these tests are automatically run, but these are considered the mandatory events and branches. + +### PyLint and PyTest + +These tests are run against any Python scripts that are in the repository. + +[PyLint](https://www.pylint.org/) is used to both check that Python code conforms to the recommended coding style, and checks for syntax errors. +The default behavior of PyLint is modified by the `pylint.rc` file in the [Organization-info](https://github.com/AgPipeline/Organization-info) repository. +Please also refer to our [Coding Standards](https://github.com/AgPipeline/Organization-info#python) for information on how we use [pylint](https://www.pylint.org/). + +The following command can be used to fetch the `pylint.rc` file: +```bash +wget https://raw.githubusercontent.com/AgPipeline/Organization-info/master/pylint.rc +``` + +Assuming the `pylint.rc` file is in the current folder, the following command can be used against the `betydb2geojson.py` file: +```bash +# Assumes Python3.7+ is default Python version +python -m pylint --rcfile ./pylint.rc betydb2geojson.py +``` + +[PyTest](https://docs.pytest.org/en/stable/) is used to run Unit and Integration Testing. +The following command can be used to run the test suite: +```bash +# Assumes Python3.7+ is default Python version +python -m pytest -rpP +``` + +If [pytest-cov](https://pytest-cov.readthedocs.io/en/latest/) is installed, it can be used to generate a code coverage report as part of running PyTest. +The code coverage report shows how much of the code has been tested; it doesn't indicate **how well** that code has been tested. +The modified PyTest command line including coverage is: +```bash +# Assumes Python3.7+ is default Python version +python -m pytest --cov=. -rpP +``` + +### shellcheck and shfmt + +These tests are run against shell scripts within the repository. +It's expected that shell scripts will conform to these tools (no reported issues). + +[shellcheck](https://www.shellcheck.net/) is used to enforce modern script coding. +The following command runs `shellcheck` against the "prep-canopy-cover.sh" bash shell script: +```bash +shellcheck prep-canopy-cover.sh +``` + +[shfmt](https://github.com/mvdan/sh#shfmt) is used to ensure scripts conform to Google's shell script [style guide](https://google.github.io/styleguide/shellguide.html). +The following command runs `shfmt` against the "prep-canopy-cover.sh" bash shell script: +```bash +shfmt -i 2 -ci -w prep-canopy-cover.sh +``` + +### Docker Testing + +The Docker testing Workflow replicate the examples in this document to ensure they continue to work. diff --git a/cyverse_short_workflow.sh b/cyverse_short_workflow.sh index 71e11b8..44c56f8 100644 --- a/cyverse_short_workflow.sh +++ b/cyverse_short_workflow.sh @@ -1,6 +1,6 @@ #!/bin/bash -WORKING_DIR=`pwd` +WORKING_DIR=$(pwd) # Copy the files to where we want them to be echo "Copying source files from current folder (${WORKING_DIR}) to target location" @@ -13,31 +13,29 @@ mkdir -p /output echo "Discovering source image file and possible plot shapes file (.shp, or .json)" SOURCE_IMAGE="" PLOT_SHAPE="" -for ONE_FILE in `find "${WORKING_DIR}" -type f` -do +while IFS= read -r -d '' ONE_FILE; do case "${ONE_FILE: -4}" in - ".tif") - SOURCE_IMAGE="${ONE_FILE}" - ;; - ".shp") - PLOT_SHAPE=${ONE_FILE#"$(dirname ${ONE_FILE})/"} - ;; + ".tif") + SOURCE_IMAGE="${ONE_FILE}" + ;; + ".shp") + PLOT_SHAPE=${ONE_FILE#"$(dirname "${ONE_FILE}")/"} + ;; esac case "${ONE_FILE: -5}" in - ".tiff") - SOURCE_IMAGE="${ONE_FILE}" - ;; - ".json") - PLOT_SHAPE=${ONE_FILE#"$(dirname ${ONE_FILE})/"} - ;; + ".tiff") + SOURCE_IMAGE="${ONE_FILE}" + ;; + ".json") + PLOT_SHAPE=${ONE_FILE#"$(dirname "${ONE_FILE}")/"} + ;; esac -done +done < <(find "${WORKING_DIR}" -type f -print0) # Determine if there's a URL in the command line if there isn't a file if [[ "${PLOT_SHAPE}" == "" ]]; then echo "Searching parameters for BETYdb URL" - for ONE_PARAM in "${@}" - do + for ONE_PARAM in "${@}"; do if [[ "${ONE_PARAM:0:4}" == "http" ]]; then PLOT_SHAPE="${ONE_PARAM}" fi @@ -47,7 +45,7 @@ fi # Get the desired file name format for the image echo "Manipulating image file name for processing: ${SOURCE_IMAGE}" if [[ "${SOURCE_IMAGE}" != "" ]]; then - BASE_NAME=${SOURCE_IMAGE#"$(dirname ${SOURCE_IMAGE})/"} + BASE_NAME=${SOURCE_IMAGE#"$(dirname "${SOURCE_IMAGE}")/"} echo " BASE NAME: ${BASE_NAME}" SOURCE_IMAGE=${BASE_NAME%.*} echo " final: ${SOURCE_IMAGE}" diff --git a/generate_geojson.sh b/generate_geojson.sh index 1889fe1..f493cd4 100755 --- a/generate_geojson.sh +++ b/generate_geojson.sh @@ -13,6 +13,7 @@ if [[ "${3}" != *"--clean"* ]]; then elif [[ ${FILE_PARAM} == *.shp ]]; then scif run shp2geojson "${FILE_PARAM}" "${DESTINATION_FILE}" elif [[ ${FILE_PARAM} == *.json || ${FILE_PARAM} == *.geojson ]]; then + # shellcheck disable=SC2154 cp "${SCIF_APPDATA_odm_workflow}/images/${FILE_PARAM}" "${DESTINATION_FILE}" else echo "Unknown plot geometries file specified: \"${FILE_PARAM}\"" @@ -20,4 +21,4 @@ if [[ "${3}" != *"--clean"* ]]; then fi else rm "${DESTINATION_FILE}" -fi \ No newline at end of file +fi diff --git a/prep-canopy-cover.sh b/prep-canopy-cover.sh index fde7ad0..ee36890 100644 --- a/prep-canopy-cover.sh +++ b/prep-canopy-cover.sh @@ -2,24 +2,24 @@ ORTHOMOSAIC_NAME="${1}_mask.tif" OUTPUT_FOLDER="/output" echo "Orthomosaic name to look for: ${ORTHOMOSAIC_NAME}" -clips=( "${OUTPUT_FOLDER}/*" ) -echo ${clips} +# shellcheck disable=SC2206 +clips=(${OUTPUT_FOLDER}/*) +echo "${clips[@]}" found_files=0 -echo "{\"CANOPYCOVER_FILE_LIST\": [" >> "${OUTPUT_FOLDER}/canopycover_fileslist.json" +echo "{\"CANOPYCOVER_FILE_LIST\": [" >>"${OUTPUT_FOLDER}/canopycover_fileslist.json" sep="" -for entry in ${clips[@]} -do +for entry in "${clips[@]}"; do possible="${entry}/${ORTHOMOSAIC_NAME}" echo "Checking possible ${possible}" if [ -f "${possible}" ]; then - echo "${sep}{\"FILE\": \"${possible}\"," >> "${OUTPUT_FOLDER}/canopycover_fileslist.json" - echo "\"DIR\": \"${entry}/\"}" >> "${OUTPUT_FOLDER}/canopycover_fileslist.json" + echo "${sep}{\"FILE\": \"${possible}\"," >>"${OUTPUT_FOLDER}/canopycover_fileslist.json" + echo "\"DIR\": \"${entry}/\"}" >>"${OUTPUT_FOLDER}/canopycover_fileslist.json" sep="," ((found_files++)) fi done -echo "]}" >> "${OUTPUT_FOLDER}/canopycover_fileslist.json" +echo "]}" >>"${OUTPUT_FOLDER}/canopycover_fileslist.json" if [ "$found_files" -eq "0" ]; then rm "${OUTPUT_FOLDER}/canopycover_fileslist.json" diff --git a/scif_app_recipes/canopycover_v0.0.1_ubuntu16.04.scif b/scif_app_recipes/canopycover_v0.0.1_ubuntu16.04.scif index a1fee99..8ba92d6 100644 --- a/scif_app_recipes/canopycover_v0.0.1_ubuntu16.04.scif +++ b/scif_app_recipes/canopycover_v0.0.1_ubuntu16.04.scif @@ -1,8 +1,8 @@ %appinstall canopycover # Download canopycover code. In the future use pip/conda install. - wget -O canopycover.tar.gz https://github.com/AgPipeline/transformer-canopycover/archive/v1.2.tar.gz + wget -O canopycover.tar.gz https://github.com/AgPipeline/transformer-canopycover/archive/v1.3.tar.gz tar xvf canopycover.tar.gz - mv transformer-canopycover-1.2 src + mv transformer-canopycover-1.3 src # Install packages needed python3 -m venv --system-site-packages .venv diff --git a/scif_app_recipes/ndcctools_v7.1.2_ubuntu16.04.scif b/scif_app_recipes/ndcctools_v7.1.2_ubuntu16.04.scif index 27ed7d4..8eb22ec 100644 --- a/scif_app_recipes/ndcctools_v7.1.2_ubuntu16.04.scif +++ b/scif_app_recipes/ndcctools_v7.1.2_ubuntu16.04.scif @@ -3,7 +3,7 @@ conda create --prefix "${PWD}/conda" --yes -c conda-forge ndcctools %apprun odm_workflow - echo "{\"INPUT_IMAGE_FOLDER\": \"${INPUT_IMAGE_FOLDER}\", \"OUTPUT_FOLDER\": \"/output\", \"INPUT_VOLUME\": \"${2}\", \"OUTPUT_VOLUME\": \"${3}\" }" > ${SCIF_APPDATA}/jx-params.json + echo "{\"INPUT_IMAGE_FOLDER\": \"${INPUT_IMAGE_FOLDER}\", \"OUTPUT_FOLDER\": \"/output\", \"INPUT_VOLUME\": \"${INPUT_VOLUME}\", \"OUTPUT_VOLUME\": \"${OUTPUT_VOLUME}\" }" > ${SCIF_APPDATA}/jx-params.json if [ ! -f "/output/plots.json" ]; then ${SCIF_APPROOT_odm_workflow}/src/generate_geojson.sh "${1}" "${SCIF_APPDATA_odm_workflow}/plots.json"; fi conda run --prefix "${SCIF_APPROOT}/conda" makeflow \ --jx \ @@ -14,7 +14,7 @@ --change-directory="${SCIF_APPDATA}" \ --makeflow-log="${SCIF_APPDATA}/workflow.jx.makeflowlog" \ --batch-log="${SCIF_APPDATA}/workflow.jx.batchlog" \ - ${4} \ + ${2} \ "/scif/apps/odm_workflow/src/workflow.jx" if [ ! -f "/output/canopycover_fileslist.json" ]; then "/scif/apps/odm_workflow/src/prep-canopy-cover.sh" "orthomosaic"; fi conda run --prefix "${SCIF_APPROOT}/conda" makeflow \ @@ -26,10 +26,10 @@ --change-directory="${SCIF_APPDATA}" \ --makeflow-log="${SCIF_APPDATA}/canopy-cover.jx.makeflowlog" \ --batch-log="${SCIF_APPDATA}/canopy-cover.jx.batchlog" \ - ${4} \ + ${2} \ "${SCIF_APPROOT}/src/canopy-cover.jx" - if [ -f "/output/plots.json" ] && [[ "${4}" =~ "--clean" ]]; then echo "Removing plots.json" && rm "/output/plots.json"; fi - if [ -f "/output/canopycover_fileslist.json" ] && [[ "${4}" =~ "--clean" ]]; then echo "Removing fileslist.json" && rm "/output/canopycover_fileslist.json"; fi + if [ -f "/output/plots.json" ] && [[ "${2}" =~ "--clean" ]]; then echo "Removing plots.json" && rm "/output/plots.json"; fi + if [ -f "/output/canopycover_fileslist.json" ] && [[ "${2}" =~ "--clean" ]]; then echo "Removing fileslist.json" && rm "/output/canopycover_fileslist.json"; fi %apphelp odm_workflow This app provides an entrypoint to the ODM (OpenDroneMap) makeflow app. diff --git a/scif_app_recipes/plotclip_v0.0.1_ubuntu16.04.scif b/scif_app_recipes/plotclip_v0.0.1_ubuntu16.04.scif index 6b8cbeb..5fa17fb 100644 --- a/scif_app_recipes/plotclip_v0.0.1_ubuntu16.04.scif +++ b/scif_app_recipes/plotclip_v0.0.1_ubuntu16.04.scif @@ -1,8 +1,8 @@ %appinstall plotclip # Download plotclip code. In the future use pip/conda install. - wget -O plotclip.tar.gz https://github.com/AgPipeline/transformer-plotclip/archive/v2.3.tar.gz + wget -O plotclip.tar.gz https://github.com/AgPipeline/transformer-plotclip/archive/v2.4.tar.gz tar xvf plotclip.tar.gz - mv transformer-plotclip-2.3 src + mv transformer-plotclip-2.4 src # Install packages needed python3 -m venv --system-site-packages .venv diff --git a/scif_app_recipes/soilmask_v0.0.1_ubuntu16.04.scif b/scif_app_recipes/soilmask_v0.0.1_ubuntu16.04.scif index 4c9d550..d7b40b6 100644 --- a/scif_app_recipes/soilmask_v0.0.1_ubuntu16.04.scif +++ b/scif_app_recipes/soilmask_v0.0.1_ubuntu16.04.scif @@ -1,8 +1,8 @@ %appinstall soilmask # Download soilmask code. In the future use pip/conda install. - wget -O soilmask.tar.gz https://github.com/AgPipeline/transformer-soilmask/archive/v2.1.tar.gz + wget -O soilmask.tar.gz https://github.com/AgPipeline/transformer-soilmask/archive/v2.2.tar.gz tar xvzf soilmask.tar.gz - mv transformer-soilmask-2.1 src + mv transformer-soilmask-2.2 src # Install packages needed python3 -m venv --system-site-packages .venv