diff --git a/.github/workflows/testing_checks.yaml b/.github/workflows/testing_checks.yaml index dd730aa..23808d9 100644 --- a/.github/workflows/testing_checks.yaml +++ b/.github/workflows/testing_checks.yaml @@ -51,7 +51,7 @@ jobs: - name: Fetch source code uses: actions/checkout@v2 - name: Finding files to process - run: find . -type f -name "*.py" | grep -v '/tests/' > action_test_files.txt + run: find . -type f -name "*.py" > action_test_files.txt - name: Install system requirements shell: bash run: 'sudo apt-get install -y python3-gdal gdal-bin libgdal-dev gcc g++ python3.8-dev' @@ -69,6 +69,7 @@ jobs: run: 'if [ -s "requirements.txt" ]; then (python3 -m pip install --no-cache-dir -r requirements.txt) || (echo "Failed to install Python requirements" && exit 1); fi' - name: Run action pylint script shell: bash + if: ${{ matrix.app == 'pylint' }} run: 'if [ -s ".github/workflows/action_pylint.sh" ]; then (chmod +x ".github/workflows/action_pylint.sh" && ./.github/workflows/action_pylint.sh) || (echo "Error running shell script" && exit 1); fi' - name: Fetching pylint.rc file if: ${{ matrix.app == 'pylint' }} diff --git a/canopycover.py b/canopycover.py index 98cea7c..5c6ed5a 100755 --- a/canopycover.py +++ b/canopycover.py @@ -10,6 +10,7 @@ import subprocess import tempfile from typing import Union +import cv2 import numpy as np from agpypeline import entrypoint, algorithm, geoimage from agpypeline.checkmd import CheckMD @@ -88,6 +89,24 @@ def _add_image_mask(source_file: str) -> np.ndarray: return pixels +def _add_image_mask_non_geo(pxarray: np.ndarray) -> np.ndarray: + """Adds an alpha channel to an image that isn't geo-referenced + Arguments: + pxarray: the image array to add an alpha channel to + Return: + Returns the image with an alpha channel added + Note: no check is made to see if the image already has an alpha channel + """ + rolled_image = np.rollaxis(pxarray, 0, 3) + channel1 = rolled_image[:, :, 0] + channel2 = rolled_image[:, :, 1] + channel3 = rolled_image[:, :, 2] + alpha = np.ones(channel1.shape, dtype=channel1.dtype) * 255 + # Disable this warning since 'cv2.merge' exists + # pylint: disable=no-member + return np.rollaxis(cv2.merge((channel1, channel2, channel3, alpha)), 2, 0) + + def get_fields() -> list: """Returns the supported field names as a list """ @@ -357,9 +376,9 @@ def perform_process(self, environment: Environment, check_md: CheckMD, transform continue image_bounds = geoimage.get_image_bounds(one_file) - if not image_bounds: - logging.info("Image file does not appear to be geo-referenced '%s'", one_file) - continue + # if not image_bounds: + # logging.info("Image file does not appear to be geo-referenced '%s'", one_file) + # continue overlap_plots = [os.path.basename(os.path.dirname(one_file))] @@ -380,7 +399,7 @@ def perform_process(self, environment: Environment, check_md: CheckMD, transform image_to_use = pxarray else: logging.info('Adding missing alpha channel to loaded image from "%s"', one_file) - image_to_use = _add_image_mask(one_file) + image_to_use = _add_image_mask(one_file) if image_bounds else _add_image_mask_non_geo(pxarray) del pxarray # Potentially free up memory logging.debug("Calculating canopy cover") diff --git a/requirements.txt b/requirements.txt index f22bb9d..989ade2 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1 +1,2 @@ +opencv-contrib-python-headless agpypeline \ No newline at end of file diff --git a/test_data/three_channel_mask.tif b/test_data/three_channel_mask.tif new file mode 100644 index 0000000..4765bb1 Binary files /dev/null and b/test_data/three_channel_mask.tif differ diff --git a/tests/test_canopy_cover.py b/tests/test_canopy_cover.py index e3ef004..4320d52 100644 --- a/tests/test_canopy_cover.py +++ b/tests/test_canopy_cover.py @@ -23,6 +23,7 @@ # Path to files to use for testing META = os.path.abspath(os.path.join(TESTING_JSON_FILE_PATH, 'meta.yaml')) INPUT1 = os.path.abspath(os.path.join(TESTING_JSON_FILE_PATH, 'rgb_17_7_W.tif')) +INPUT_NO_ALPHA = os.path.abspath(os.path.join(TESTING_JSON_FILE_PATH, 'three_channel_mask.tif')) def random_string(): @@ -83,7 +84,8 @@ def test_no_metadata(): results = os.path.join(out_dir, 'result.json') assert os.path.isfile(results) - result = json.load(open(results)) + with open(results) as res_file: + result = json.load(res_file) assert 'files' in result out_files = [f['path'] for f in result['files']] @@ -92,16 +94,60 @@ def test_no_metadata(): assert os.path.isfile(canopycover) - canopy = csv.DictReader(open(canopycover)) - canopy_flds = [ - 'local_datetime', 'canopy_cover', 'species', 'site', 'method' - ] - assert canopy.fieldnames == canopy_flds + with open(canopycover) as cc_file: + canopy = csv.DictReader(cc_file) + canopy_flds = [ + 'local_datetime', 'canopy_cover', 'species', 'site', 'method' + ] + assert canopy.fieldnames == canopy_flds - canopy_data = list(canopy) - assert len(canopy_data) == 1 + canopy_data = list(canopy) + assert len(canopy_data) == 1 - assert canopy_data[0]['canopy_cover'] == '99.8' + assert canopy_data[0]['canopy_cover'] == '99.8' + + finally: + if os.path.isdir(out_dir): + rmtree(out_dir) + + +def test_no_metadata_no_alpha(): + """ Run with a file that doesn't have an alpha channel, and with no metadata""" + out_dir = random_string() + + # This ought not be necessary as the program *should* + # create it; for now, we'll create the output dir. + os.makedirs(out_dir) + + try: + cmd = f'{SOURCE_PATH} --working_space {out_dir} {INPUT_NO_ALPHA}' + ret_val, _ = getstatusoutput(cmd) + assert ret_val == 0 + + results = os.path.join(out_dir, 'result.json') + assert os.path.isfile(results) + + with open(results) as res_file: + result = json.load(res_file) + assert 'files' in result + out_files = [f['path'] for f in result['files']] + + canopycover = f'{out_dir}/canopycover.csv' + assert canopycover in out_files + + assert os.path.isfile(canopycover) + + with open(canopycover) as cc_file: + canopy = csv.DictReader(cc_file) + canopy_flds = [ + 'local_datetime', 'canopy_cover', 'species', 'site', 'method' + ] + assert canopy.fieldnames == canopy_flds + + canopy_data = list(canopy) + assert len(canopy_data) == 1 + + assert canopy_data[0]['canopy_cover'] == '1.05' finally: if os.path.isdir(out_dir): @@ -199,7 +245,8 @@ def test_good_input(): results = os.path.join(out_dir, 'result.json') assert os.path.isfile(results) - result = json.load(open(results)) + with open(results) as res_file: + result = json.load(res_file) assert 'files' in result out_files = [f['path'] for f in result['files']] @@ -208,16 +255,17 @@ def test_good_input(): assert os.path.isfile(canopycover) - canopy = csv.DictReader(open(canopycover)) - canopy_flds = [ - 'local_datetime', 'canopy_cover', 'species', 'site', 'method' - ] - assert canopy.fieldnames == canopy_flds + with open(canopycover) as cc_file: + canopy = csv.DictReader(cc_file) + canopy_flds = [ + 'local_datetime', 'canopy_cover', 'species', 'site', 'method' + ] + assert canopy.fieldnames == canopy_flds - canopy_data = list(canopy) - assert len(canopy_data) == 1 + canopy_data = list(canopy) + assert len(canopy_data) == 1 - assert canopy_data[0]['canopy_cover'] == '99.8' + assert canopy_data[0]['canopy_cover'] == '99.8' finally: if os.path.isdir(out_dir):