diff --git a/README.rst b/README.rst index 4d7f0b8..6fcb131 100644 --- a/README.rst +++ b/README.rst @@ -36,7 +36,7 @@ Features * Interpret and upload csv, rasters, uavsar files, * Full provenance for download and upload * Manage Site, Point, Profile and Raster Data -* PostgreSQL Database end point for researchers +* Postgres SQL Database end point for researchers Installing @@ -49,7 +49,7 @@ Mac OS First ensure you have following prerequisites: -* Python3.7 + +* Python3.8 + * HomeBrew Then to install the postgres database with postgis functionality run: @@ -64,7 +64,7 @@ Ubuntu First ensure you have following prerequisites: -* Python3.7 + +* Python3.8 + * wget Then to install the postgres database with postgis functionality run: @@ -121,7 +121,7 @@ Quickly test your installation by running: This will run a series of tests that create a small database and confirm that samples of the data sets references in `./scripts/upload` folder can be uploaded seamlessly. These tests can serve as a nice way to see how to -interact with the database but also serve to confirm our reproduciblity. +interact with the database but also serve to confirm our reproducibility. The goal of this project is to have high fidelity in data interpretation/submission to the database. To see the current diff --git a/scripts/download/nsidc_sources.txt b/scripts/download/nsidc_sources.txt index 3b67c5b..2f3ea51 100644 --- a/scripts/download/nsidc_sources.txt +++ b/scripts/download/nsidc_sources.txt @@ -9,3 +9,5 @@ https://n5eil01u.ecs.nsidc.org/SNOWEX/SNEX20_SD_TLI.001/2019.09.29/SNEX20_SD_TLI https://n5eil01u.ecs.nsidc.org/SNOWEX/SNEX20_TS_SP.002/ https://n5eil01u.ecs.nsidc.org/SNOWEX/SNEX21_TS_SP.001/ https://n5eil01u.ecs.nsidc.org/SNOWEX/SNEX_Met.001/ +https://n5eil01u.ecs.nsidc.org/SNOWEX/SNEX23_Lidar.001/2022.10.24/SNEX23_Lidar_FLCF_CH_0.25M_20221024_V01.0.tif +https://n5eil01u.ecs.nsidc.org/SNOWEX/SNEX23_Lidar.001/2023.03.11/SNEX23_Lidar_FLCF_SD_0.25M_20230311_V01.0.tif \ No newline at end of file diff --git a/scripts/upload/add_AK_lidar.py b/scripts/upload/add_AK_lidar.py new file mode 100644 index 0000000..0dfa5c0 --- /dev/null +++ b/scripts/upload/add_AK_lidar.py @@ -0,0 +1,77 @@ +""" +Upload the SnowEx AK lidar products from Farmers and Creamers field +for now. + +# To run with all the scripts +python run.py + +# To run individually +python add_AK_lidar.py +""" +import os +from datetime import date +from pathlib import Path +from subprocess import Popen + +from snowex_db.batch import UploadRasterBatch + + +def main(): + """ + Uploader script partial SnowEx Lidar + """ + + # Typical kwargs for the dataset + kwargs = {'instrument': 'lidar', + 'observers': 'chris larsen', + 'description': '0.5m products', # Datasheet says 0.25 but data is actually 0.5 + 'tiled': True, + 'epsg': 26906, # Alaska is Zone 6 + 'no_data': -9999, + 'in_timezone': 'AKST', + 'doi':'https://doi.org/10.5067/BV4D8RRU1H7U', + "site_name": "farmers-creamers" + } + # Build a list of uploaders and then execute them + uploaders = [] + + # Directory of SNOWEX products + lidar_dir = Path('../download/data/SNOWEX/SNEX23_Lidar.001/') + reprojected = lidar_dir.joinpath('reprojected') + + if not reprojected.is_dir(): + os.mkdir(reprojected) + + + # Reproject using GDAL + print('Reprojecting files...') + raw_files = lidar_dir.glob('*/*.tif') + for f in raw_files: + # Watch out for files already in the reprojection + if f.parent != reprojected: + output = reprojected.joinpath(f.name) + cmd = f'gdalwarp -overwrite -t_srs EPSG:{kwargs["epsg"]} {f} {output}' + print(cmd) + p = Popen(cmd,shell=True) + p.wait() + + ######################################## Farmers/Creamers Field (FLCF) ################################################### + # Snow off - canopy height + f = reprojected.joinpath("SNEX23_Lidar_FLCF_CH_0.25M_20221024_V01.0.tif") + uploaders.append(UploadRasterBatch([f], date=date(2022, 10, 24), + type="canopy_height", units="meters", **kwargs)) + + # Snow Depth + f = reprojected.joinpath(reprojected, "SNEX23_Lidar_FLCF_SD_0.25M_20230311_V01.0.tif") + uploaders.append(UploadRasterBatch([f], date=date(2023, 3, 11), + type="depth", units="meters", **kwargs)) + + errors = 0 + for u in uploaders: + u.push() + errors += len(u.errors) + + +# Add this so you can run your script directly without running run.py +if __name__ == '__main__': + main() diff --git a/scripts/upload/add_csu_gpr_AK.py b/scripts/upload/add_csu_gpr_AK.py new file mode 100644 index 0000000..d56bb7b --- /dev/null +++ b/scripts/upload/add_csu_gpr_AK.py @@ -0,0 +1,66 @@ +""" +Read in the SnowEx 2023 CSU GPR data collected at Farmers Loop and Creamers Field. + +1. Data is preliminary and currently only available via email from Randall B. +2A. python run.py # To run all together all at once +2B. python add_gpr.py # To run individually + +""" + +from pathlib import Path +from snowexsql.db import get_db +from snowex_db.upload import PointDataCSV +import pandas as pd + + +def main(): + file = Path('../download/data/SnowEx223_FLCF_1GHz_GPR_CSU.csv') + + # Fix quirks + df = pd.read_csv(file, dtype=str) + + # Upload is not able to handle the Notes col. So just remove it for now + modified = file.parent.joinpath(file.stem + f'_mod{file.suffix}') + print(f"Removing Notes Column prior to upload. Writing to {modified}") + + coi = [c for c in df.columns if c != 'Notes'] + + # No time is a problem. Use 12 AKST == 9pm (21:00) UTC + df['Time[HHMM]'] = '21:00' + + # Write out the modified version + df[coi].to_csv(modified, index=False) + + + kwargs = { + # Keyword argument to upload depth measurements + 'depth_is_metadata': False, + + # Constant Metadata for the GPR data + 'site_name': 'farmers-creamers', + 'observers': 'Randall Bonnell', + 'instrument': 'pulseEkko pro 1 GHz GPR', + 'in_timezone': 'UTC', + 'out_timezone': 'UTC', + 'doi': None, # Data is preliminary + 'epsg': 26906 + } + + # Grab a db connection to a local db named snowex + db_name = 'localhost/snowex' + engine, session = get_db(db_name, credentials='./credentials.json') + + # Instantiate the point uploader + csv = PointDataCSV(modified, **kwargs) + # Push it to the database + csv.submit(session) + + # Close out the session with the DB + session.close() + + # return the number of errors for run.py can report it + return len(csv.errors) + + +if __name__ == '__main__': + main() diff --git a/snowex_db/metadata.py b/snowex_db/metadata.py index 7674efc..e90e4f0 100644 --- a/snowex_db/metadata.py +++ b/snowex_db/metadata.py @@ -336,7 +336,7 @@ class ExtendedSnowExProfileVariables(SnowExProfileVariables): ) ELEVATION = MeasurementDescription( 'elevation', "Elevation", - ['elev_m', 'elevation'] + ['elev_m', 'elevation', 'elevationwgs84'] ) EQUIPMENT = MeasurementDescription( 'equipment', "Equipment", diff --git a/snowex_db/upload.py b/snowex_db/upload.py index 53ee6c3..b52a6d7 100644 --- a/snowex_db/upload.py +++ b/snowex_db/upload.py @@ -515,6 +515,7 @@ def persist_cog(self): if exists(self._cog_path): if self.use_s3: self._key_name = join(self.s3_prefix, self._cog_path.name) + LOG.info(f'Uploading {self._cog_path} to {self.s3_bucket}/{self._key_name}') s3 = boto3.resource('s3', region_name=self.AWS_REGION) s3.meta.client.upload_file( str(self._cog_path), # local file