diff --git a/gee_scripts/lake_classification.js b/gee_scripts/lake_classification.js index b5b56ca..3e7824e 100644 --- a/gee_scripts/lake_classification.js +++ b/gee_scripts/lake_classification.js @@ -3,8 +3,8 @@ print('Commencing script run'); //-------------------------------------------------------------------------- // DEFINE VARIABLES -var date1='2017-07-01'; -var date2='2017-08-31'; +var date1='2016-07-01'; +var date2='2016-08-31'; // Bounds filter as rectangle //var pos=[-49.5854, 70.7360, -55.5516, 59.9702]; @@ -13,6 +13,7 @@ var date2='2017-08-31'; // Bounds as feature var aoi = ee.FeatureCollection('users/pennyruthhow/greenland_coastline'); +var featcol = ee.FeatureCollection('users/pennyruthhow/greenland_rectangle_mask'); print('Searching for images... ','Date range', date1, date2, 'Cloud percentage', 20) @@ -35,13 +36,18 @@ function maskS2clouds(image) { } // Search for images -var dataset = ee.ImageCollection('COPERNICUS/S2') +var dataset = ee.ImageCollection('COPERNICUS/S2_HARMONIZED') .filterDate(date1, date2) .filterBounds(aoi) // Pre-filter to get less cloudy granules. .filter(ee.Filter.lt('CLOUDY_PIXEL_PERCENTAGE', 20)) .map(maskS2clouds); +//var rgb = dataset.select(['B2', 'B3', 'B4']); +//var mosaic = rgb.mosaic(); +//Map.setCenter(-71.12532, 42.3712, 12); +//Map.addLayer(mosaic, {}, 'spatial mosaic'); + var blue='B2'; var green='B3'; var red='B4'; diff --git a/gee_scripts/lake_classification.py b/gee_scripts/lake_classification.py new file mode 100644 index 0000000..cf0103f --- /dev/null +++ b/gee_scripts/lake_classification.py @@ -0,0 +1,168 @@ +import ee + +# Initialize the Earth Engine API +ee.Initialize() + +print('Commencing script run') + +#-------------------------------------------------------------------------- +# DEFINE VARIABLES + +date1 = '2016-07-01' +date2 = '2016-08-31' + +# Define the Area of Interest (AOI) +aoi = ee.FeatureCollection('users/pennyruthhow/greenland_coastline') +featcol = ee.FeatureCollection('users/pennyruthhow/greenland_rectangle_mask') + +print(f'Searching for images... Date range: {date1} - {date2}, Cloud percentage: 20') + +#-------------------------------------------------------------------------- +# Sentinel-2 classification + +# Function to mask clouds from Sentinel-2 imagery +def maskS2clouds(image): + qa = image.select('QA60') + cloudBitMask = 1 << 10 + cirrusBitMask = 1 << 11 + + # Both cloud and cirrus flags should be 0 for clear conditions + mask = qa.bitwiseAnd(cloudBitMask).eq(0).And(qa.bitwiseAnd(cirrusBitMask).eq(0)) + return image.updateMask(mask).divide(10000) + +# Search for Sentinel-2 images +dataset_s2 = ee.ImageCollection('COPERNICUS/S2_HARMONIZED') \ + .filterDate(date1, date2) \ + .filterBounds(aoi) \ + .filter(ee.Filter.lt('CLOUDY_PIXEL_PERCENTAGE', 20)) \ + .map(maskS2clouds) + +# Select bands +blue = 'B2' +green = 'B3' +red = 'B4' +vnir = 'B8' +swir1 = 'B11' +swir2 = 'B12' + +# Mean reducer +image = dataset_s2.select([blue, green, red, vnir, swir1, swir2]).reduce(ee.Reducer.mean()) + +# Resample SWIR bands +sw = image.select([swir1 + '_mean', swir2 + '_mean']).resample('bilinear')\ + .reproject(crs='EPSG:4326', scale=10).rename(['B11_mean_1', 'B12_mean_1']) + +# Add bands and print band names +image = image.addBands(sw) +print(image.bandNames().getInfo()) + +# Create water indices +ndwi = image.normalizedDifference([green + '_mean', vnir + '_mean']) +mndwi = image.normalizedDifference([green + '_mean', swir1 + '_mean_1']) + +AWEIsh = image.expression( + 'BLUE + 2.5 * GREEN - 1.5 * (VNIR + SWIR1) - 0.25 * SWIR2', { + 'BLUE': image.select(blue + '_mean'), + 'GREEN': image.select(green + '_mean'), + 'SWIR1': image.select(swir1 + '_mean_1'), + 'VNIR': image.select(vnir + '_mean'), + 'SWIR2': image.select(swir2 + '_mean_1') + }) + +AWEInsh = image.expression( + '4.0 * (GREEN - SWIR1) - (0.25 * VNIR + 2.75 * SWIR2)', { + 'GREEN': image.select(green + '_mean'), + 'SWIR1': image.select(swir1 + '_mean_1'), + 'VNIR': image.select(vnir + '_mean'), + 'SWIR2': image.select(swir2 + '_mean_1') + }) + +bright = image.expression( + '(RED + GREEN + BLUE) / 3', { + 'BLUE': image.select(blue + '_mean'), + 'GREEN': image.select(green + '_mean'), + 'RED': image.select(red + '_mean') + }) + +# Classify Sentinel-2 lakes +s2_lakes = image.expression( + "(BRIGHT > 5000) ? 0 : (NDWI > 0.3) ? 1 : (MNDWI < 0.1) ? 0 : " + "(AWEISH < 2000) ? 0 : (AWEISH > 5000) ? 0 : " + "(AWEINSH < 4000) ? 0 : (AWEINSH > 6000) ? 0 : 1", { + 'NDWI': ndwi, + 'MNDWI': mndwi, + 'AWEISH': AWEIsh, + 'AWEINSH': AWEInsh, + 'BRIGHT': bright + }).rename('s2_lakes').toByte() + +s2_lakes = s2_lakes.updateMask(s2_lakes) + +print('S2 scenes classified') + +#-------------------------------------------------------------------------- +# Sentinel-1 lakes classification + +# Search for Sentinel-1 images +dataset_s1 = ee.ImageCollection('COPERNICUS/S1_GRD') \ + .filterDate(date1, date2) \ + .filterBounds(aoi) \ + .filter(ee.Filter.listContains('transmitterReceiverPolarisation', 'HH')) \ + .filter(ee.Filter.eq('instrumentMode', 'IW')) + +# Create smooth mosaic +aver = dataset_s1.select('HH').mosaic() +smooth = aver.focal_median(50, 'circle', 'meters') + +# Classify lakes based on Sentinel-1 +s1_lakes = smooth.lt(-20).rename('s1_lakes').toByte() +s1_lakes = s1_lakes.updateMask(s1_lakes) + +print('S1 scenes classified') + +#-------------------------------------------------------------------------- +# DEM lakes classification + +# Load DEM data and process +dem = ee.Image('UMN/PGC/ArcticDEM/V3/2m_mosaic').clip(aoi) +elev = dem.select('elevation').focal_median(110, 'circle', 'meters').toInt64() + +fill = ee.Terrain.fillMinima(elev, 10, 50) +diff = fill.subtract(elev) +dem_lakes = diff.gt(0).rename('dem_lakes').toByte() + +dem_lakes = dem_lakes.focal_median(50, 'circle', 'meters').updateMask(dem_lakes) + +print('DEM scenes classified') + +#-------------------------------------------------------------------------- +# Combine all lakes and export + +# Combine layers +all_lakes = s2_lakes.addBands([s1_lakes.select('s1_lakes'), dem_lakes.select('dem_lakes')]) +print('All lakes combined') + +# Clip and reproject the combined image +clip_lakes = all_lakes.clip(featcol.first()).reproject(crs='EPSG:3413', scale=10) + +# Get projection info +projection = clip_lakes.select('s2_lakes').projection().getInfo() +print('All lakes clipped and reprojected') +print(projection) + +# Export the image to Google Drive +task = ee.batch.Export.image.toDrive( + image=clip_lakes, + description='lakes', + folder='out', + region=featcol.first().geometry(), + scale=10, + crs=projection['crs'], + crsTransform=projection['transform'], + maxPixels=1e13 +) +task.start() + +#-------------------------------------------------------------------------- +print('Run finished') + diff --git a/gee_scripts/lake_temperature_estimation.js b/gee_scripts/lake_temperature_estimation.js index db25ccf..d8b79d5 100644 --- a/gee_scripts/lake_temperature_estimation.js +++ b/gee_scripts/lake_temperature_estimation.js @@ -65,7 +65,6 @@ function getImages(aoi) { return L9.merge(L8).merge(L7).merge(L5).merge(L4); } - //********* DERIVE ST FUNCTION ******************// function applyWaterCorrection(image) { var waterBands = (image.select('ST').multiply(0.00341802).add(149.0)) // Scale factor @@ -74,50 +73,66 @@ function applyWaterCorrection(image) { return image.addBands(waterBands, null, true); } - -//************ DERIVE AND EXPORT FUNCTION ******************// +//************ DERIVE AND ACCUMULATE FUNCTION ******************// function getST(feature, id) { - - var aoi = feature.geometry() - var pt = aoi.buffer(30); + var aoi = feature.geometry(); + var pt = aoi.buffer(-30); var landsatWT = getImages(aoi).map(applyWaterCorrection); - var values = landsatWT.map(function(image){ + var values = landsatWT.map(function(image) { return ee.Feature(null, image.reduceRegion(ee.Reducer.mean(), pt, 30)) .set('lake_id', id) .set('system:time_start', image.get('system:time_start')) .set('system:index', image.get('system:index')) - .set('date', ee.Date(image.get('system:time_start')).format('yyy-MM-dd')) + .set('date', ee.Date(image.get('system:time_start')).format('yyyy-MM-dd')) .set('ST_max', image.reduceRegion(ee.Reducer.max(), pt, 30).values(['ST'])) .set('ST_min', image.reduceRegion(ee.Reducer.min(), pt, 30).values(['ST'])) .set('ST_stddev', image.reduceRegion(ee.Reducer.stdDev(), pt, 30).values(['ST'])) - .set('ST_count', image.reduceRegion(ee.Reducer.count(), pt, 30).values(['ST'])) + .set('ST_count', image.reduceRegion(ee.Reducer.count(), pt, 30).values(['ST'])); + }); + + return values; +} + +//************ ACCUMULATE ALL FEATURES INTO A SINGLE COLLECTION (Batch Processing) ******************// + +// Function to process a batch of lakes and export with a unique filename +function processBatch(ids, batchNumber) { + var allValues = ee.FeatureCollection([]); + + ids.map(function(id) { + var t = table.filter(ee.Filter.eq('lake_id', id)); + var values = getST(t, id); + allValues = allValues.merge(values); }); + + // Generate a unique filename for each batch using the batchNumber + var fileName = 'ST_lake_timeseries_batch_' + batchNumber; - var name_file = id + '_lake_timeseries' Export.table.toDrive({ - collection: values, - description: name_file, - folder: 'ST_IIML_poly', + collection: allValues, + description: fileName, // Use the unique fileName for each batch + folder: 'out', fileFormat: 'CSV', - selectors: ['lake_id','system:time_start','date','system:index','ST','ST_max','ST_min','ST_stddev','ST_count','QA_PIXEL'] + selectors: ['lake_id', 'system:time_start', 'date', 'system:index', 'ST', 'ST_max', 'ST_min', 'ST_stddev', 'ST_count', 'QA_PIXEL'] }); - } -//************ ITERATE OVER ALL PTS ******************// -print(table) +//************ ITERATE OVER ALL LAKES IN BATCHES AND EXPORT WITH UNIQUE NAMES ******************// + +// Define batch size (number of lakes per batch) +var batchSize = 50; // You can adjust this batch size based on your testing and performance -//var first = table.aggregate_array('LakeID').get(0); -//print(first); -//var t = table.filter(ee.Filter.eq('LakeID', first)); -//getST(t, first); +table.sort('lake_id').aggregate_array('lake_id').evaluate(function(ids) { + var totalLakes = ids.length; + var batchNumber = 0; -table.sort('LakeID').aggregate_array('LakeID').evaluate(function (ids) { - ids.map(function (id) { - var t = table.filter(ee.Filter.eq('LakeID', id)) - getST(t, id) - }) + // Loop over the lakes, processing them in batches + for (var i = 0; i < totalLakes; i += batchSize) { + var batchIds = ids.slice(i, i + batchSize); + batchNumber++; + processBatch(batchIds, batchNumber); // Pass batchNumber to create unique filenames for each batch + } }); diff --git a/gee_scripts/lake_temperature_estimation.py b/gee_scripts/lake_temperature_estimation.py new file mode 100644 index 0000000..7bb6970 --- /dev/null +++ b/gee_scripts/lake_temperature_estimation.py @@ -0,0 +1,111 @@ +import ee + +# Initialize the Earth Engine API +ee.Initialize() + +#****************** CLOUD MASK FUNCTION *****************# +def cloudMask(image): + qa = image.select('QA_PIXEL') + mask = qa.bitwiseAnd(1 << 3).Or(qa.bitwiseAnd(1 << 4)) + return image.updateMask(mask.Not()) + +#****************** GET IMAGERY FUNCTION *****************# +def getImages(aoi): + L9 = ee.ImageCollection('LANDSAT/LC09/C02/T1_L2')\ + .select(['ST_B10', 'QA_PIXEL'], ['ST', 'QA_PIXEL'])\ + .filterBounds(aoi)\ + .filter(ee.Filter.lt('CLOUD_COVER', 20))\ + .map(cloudMask) + + L8 = ee.ImageCollection('LANDSAT/LC08/C02/T1_L2')\ + .select(['ST_B10', 'QA_PIXEL'], ['ST', 'QA_PIXEL'])\ + .filterBounds(aoi)\ + .filter(ee.Filter.lt('CLOUD_COVER', 20))\ + .map(cloudMask) + + L7 = ee.ImageCollection('LANDSAT/LE07/C02/T1_L2')\ + .select(['ST_B6', 'QA_PIXEL'], ['ST', 'QA_PIXEL'])\ + .filterBounds(aoi)\ + .filter(ee.Filter.lt('CLOUD_COVER', 20))\ + .map(cloudMask) + + L5 = ee.ImageCollection('LANDSAT/LT05/C02/T1_L2')\ + .select(['ST_B6', 'QA_PIXEL'], ['ST', 'QA_PIXEL'])\ + .filterBounds(aoi)\ + .filter(ee.Filter.lt('CLOUD_COVER', 20))\ + .map(cloudMask) + + L4 = ee.ImageCollection('LANDSAT/LT04/C02/T1_L2')\ + .select(['ST_B6', 'QA_PIXEL'], ['ST', 'QA_PIXEL'])\ + .filterBounds(aoi)\ + .filter(ee.Filter.lt('CLOUD_COVER', 20))\ + .map(cloudMask) + + return L9.merge(L8).merge(L7).merge(L5).merge(L4) + +#********* DERIVE ST FUNCTION ******************# +def applyWaterCorrection(image): + waterBands = image.select('ST').multiply(0.00341802).add(149.0)\ + .multiply(0.806).add(54.37).subtract(273.15) + return image.addBands(waterBands, None, True) + +#************ DERIVE AND ACCUMULATE FUNCTION ******************# +def getST(feature, lake_id): + aoi = feature.geometry() + pt = aoi.buffer(-30) + + landsatWT = getImages(aoi).map(applyWaterCorrection) + + def process_image(image): + return ee.Feature(None, image.reduceRegion(ee.Reducer.mean(), pt, 30))\ + .set('lake_id', lake_id)\ + .set('system:time_start', image.get('system:time_start'))\ + .set('system:index', image.get('system:index'))\ + .set('date', ee.Date(image.get('system:time_start')).format('yyyy-MM-dd'))\ + .set('ST_max', image.reduceRegion(ee.Reducer.max(), pt, 30).get('ST'))\ + .set('ST_min', image.reduceRegion(ee.Reducer.min(), pt, 30).get('ST'))\ + .set('ST_stddev', image.reduceRegion(ee.Reducer.stdDev(), pt, 30).get('ST'))\ + .set('ST_count', image.reduceRegion(ee.Reducer.count(), pt, 30).get('ST')) + + values = landsatWT.map(process_image) + return values + +#************ BATCH PROCESSING FUNCTION ******************# +def process_batch(lake_ids, batch_number): + all_values = ee.FeatureCollection([]) + + for lake_id in lake_ids: + feature = table.filter(ee.Filter.eq('lake_id', lake_id)).first() + values = getST(feature, lake_id) + all_values = all_values.merge(values) + + # Generate a unique filename for each batch + file_name = f'lake_timeseries_batch_{batch_number}' + + task = ee.batch.Export.table.toDrive( + collection=all_values, + description=file_name, + folder='ST_IIML_poly', + fileFormat='CSV', + selectors=['lake_id', 'system:time_start', 'date', 'system:index', 'ST', 'ST_max', 'ST_min', 'ST_stddev', 'ST_count', 'QA_PIXEL'] + ) + task.start() + +#************ ITERATE OVER ALL LAKES IN BATCHES AND EXPORT WITH UNIQUE NAMES ******************# +def export_in_batches(): + # Fetch the lake IDs from the table + lake_ids = table.aggregate_array('lake_id').getInfo() + batch_size = 50 # Adjust batch size as necessary + batch_number = 0 + + for i in range(0, len(lake_ids), batch_size): + batch_ids = lake_ids[i:i + batch_size] + batch_number += 1 + process_batch(batch_ids, batch_number) + +# Load your FeatureCollection (replace with the appropriate variable or asset path) +table = ee.FeatureCollection('users/your_username/your_asset') # Example asset path + +# Run the batch export +export_in_batches() +