Skip to content

Commit

Permalink
python versions added
Browse files Browse the repository at this point in the history
  • Loading branch information
PennyHow committed Nov 4, 2024
1 parent 6cb3eb3 commit f749624
Show file tree
Hide file tree
Showing 4 changed files with 329 additions and 29 deletions.
12 changes: 9 additions & 3 deletions gee_scripts/lake_classification.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@ print('Commencing script run');
//--------------------------------------------------------------------------
// DEFINE VARIABLES

var date1='2017-07-01';
var date2='2017-08-31';
var date1='2016-07-01';
var date2='2016-08-31';

// Bounds filter as rectangle
//var pos=[-49.5854, 70.7360, -55.5516, 59.9702];
Expand All @@ -13,6 +13,7 @@ var date2='2017-08-31';

// Bounds as feature
var aoi = ee.FeatureCollection('users/pennyruthhow/greenland_coastline');
var featcol = ee.FeatureCollection('users/pennyruthhow/greenland_rectangle_mask');

print('Searching for images... ','Date range', date1, date2, 'Cloud percentage', 20)

Expand All @@ -35,13 +36,18 @@ function maskS2clouds(image) {
}

// Search for images
var dataset = ee.ImageCollection('COPERNICUS/S2')
var dataset = ee.ImageCollection('COPERNICUS/S2_HARMONIZED')
.filterDate(date1, date2)
.filterBounds(aoi)
// Pre-filter to get less cloudy granules.
.filter(ee.Filter.lt('CLOUDY_PIXEL_PERCENTAGE', 20))
.map(maskS2clouds);

//var rgb = dataset.select(['B2', 'B3', 'B4']);
//var mosaic = rgb.mosaic();
//Map.setCenter(-71.12532, 42.3712, 12);
//Map.addLayer(mosaic, {}, 'spatial mosaic');

var blue='B2';
var green='B3';
var red='B4';
Expand Down
168 changes: 168 additions & 0 deletions gee_scripts/lake_classification.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,168 @@
import ee

# Initialize the Earth Engine API
ee.Initialize()

print('Commencing script run')

#--------------------------------------------------------------------------
# DEFINE VARIABLES

date1 = '2016-07-01'
date2 = '2016-08-31'

# Define the Area of Interest (AOI)
aoi = ee.FeatureCollection('users/pennyruthhow/greenland_coastline')
featcol = ee.FeatureCollection('users/pennyruthhow/greenland_rectangle_mask')

print(f'Searching for images... Date range: {date1} - {date2}, Cloud percentage: 20')

#--------------------------------------------------------------------------
# Sentinel-2 classification

# Function to mask clouds from Sentinel-2 imagery
def maskS2clouds(image):
qa = image.select('QA60')
cloudBitMask = 1 << 10
cirrusBitMask = 1 << 11

# Both cloud and cirrus flags should be 0 for clear conditions
mask = qa.bitwiseAnd(cloudBitMask).eq(0).And(qa.bitwiseAnd(cirrusBitMask).eq(0))
return image.updateMask(mask).divide(10000)

# Search for Sentinel-2 images
dataset_s2 = ee.ImageCollection('COPERNICUS/S2_HARMONIZED') \
.filterDate(date1, date2) \
.filterBounds(aoi) \
.filter(ee.Filter.lt('CLOUDY_PIXEL_PERCENTAGE', 20)) \
.map(maskS2clouds)

# Select bands
blue = 'B2'
green = 'B3'
red = 'B4'
vnir = 'B8'
swir1 = 'B11'
swir2 = 'B12'

# Mean reducer
image = dataset_s2.select([blue, green, red, vnir, swir1, swir2]).reduce(ee.Reducer.mean())

# Resample SWIR bands
sw = image.select([swir1 + '_mean', swir2 + '_mean']).resample('bilinear')\
.reproject(crs='EPSG:4326', scale=10).rename(['B11_mean_1', 'B12_mean_1'])

# Add bands and print band names
image = image.addBands(sw)
print(image.bandNames().getInfo())

# Create water indices
ndwi = image.normalizedDifference([green + '_mean', vnir + '_mean'])
mndwi = image.normalizedDifference([green + '_mean', swir1 + '_mean_1'])

AWEIsh = image.expression(
'BLUE + 2.5 * GREEN - 1.5 * (VNIR + SWIR1) - 0.25 * SWIR2', {
'BLUE': image.select(blue + '_mean'),
'GREEN': image.select(green + '_mean'),
'SWIR1': image.select(swir1 + '_mean_1'),
'VNIR': image.select(vnir + '_mean'),
'SWIR2': image.select(swir2 + '_mean_1')
})

AWEInsh = image.expression(
'4.0 * (GREEN - SWIR1) - (0.25 * VNIR + 2.75 * SWIR2)', {
'GREEN': image.select(green + '_mean'),
'SWIR1': image.select(swir1 + '_mean_1'),
'VNIR': image.select(vnir + '_mean'),
'SWIR2': image.select(swir2 + '_mean_1')
})

bright = image.expression(
'(RED + GREEN + BLUE) / 3', {
'BLUE': image.select(blue + '_mean'),
'GREEN': image.select(green + '_mean'),
'RED': image.select(red + '_mean')
})

# Classify Sentinel-2 lakes
s2_lakes = image.expression(
"(BRIGHT > 5000) ? 0 : (NDWI > 0.3) ? 1 : (MNDWI < 0.1) ? 0 : "
"(AWEISH < 2000) ? 0 : (AWEISH > 5000) ? 0 : "
"(AWEINSH < 4000) ? 0 : (AWEINSH > 6000) ? 0 : 1", {
'NDWI': ndwi,
'MNDWI': mndwi,
'AWEISH': AWEIsh,
'AWEINSH': AWEInsh,
'BRIGHT': bright
}).rename('s2_lakes').toByte()

s2_lakes = s2_lakes.updateMask(s2_lakes)

print('S2 scenes classified')

#--------------------------------------------------------------------------
# Sentinel-1 lakes classification

# Search for Sentinel-1 images
dataset_s1 = ee.ImageCollection('COPERNICUS/S1_GRD') \
.filterDate(date1, date2) \
.filterBounds(aoi) \
.filter(ee.Filter.listContains('transmitterReceiverPolarisation', 'HH')) \
.filter(ee.Filter.eq('instrumentMode', 'IW'))

# Create smooth mosaic
aver = dataset_s1.select('HH').mosaic()
smooth = aver.focal_median(50, 'circle', 'meters')

# Classify lakes based on Sentinel-1
s1_lakes = smooth.lt(-20).rename('s1_lakes').toByte()
s1_lakes = s1_lakes.updateMask(s1_lakes)

print('S1 scenes classified')

#--------------------------------------------------------------------------
# DEM lakes classification

# Load DEM data and process
dem = ee.Image('UMN/PGC/ArcticDEM/V3/2m_mosaic').clip(aoi)
elev = dem.select('elevation').focal_median(110, 'circle', 'meters').toInt64()

fill = ee.Terrain.fillMinima(elev, 10, 50)
diff = fill.subtract(elev)
dem_lakes = diff.gt(0).rename('dem_lakes').toByte()

dem_lakes = dem_lakes.focal_median(50, 'circle', 'meters').updateMask(dem_lakes)

print('DEM scenes classified')

#--------------------------------------------------------------------------
# Combine all lakes and export

# Combine layers
all_lakes = s2_lakes.addBands([s1_lakes.select('s1_lakes'), dem_lakes.select('dem_lakes')])
print('All lakes combined')

# Clip and reproject the combined image
clip_lakes = all_lakes.clip(featcol.first()).reproject(crs='EPSG:3413', scale=10)

# Get projection info
projection = clip_lakes.select('s2_lakes').projection().getInfo()
print('All lakes clipped and reprojected')
print(projection)

# Export the image to Google Drive
task = ee.batch.Export.image.toDrive(
image=clip_lakes,
description='lakes',
folder='out',
region=featcol.first().geometry(),
scale=10,
crs=projection['crs'],
crsTransform=projection['transform'],
maxPixels=1e13
)
task.start()

#--------------------------------------------------------------------------
print('Run finished')

67 changes: 41 additions & 26 deletions gee_scripts/lake_temperature_estimation.js
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,6 @@ function getImages(aoi) {
return L9.merge(L8).merge(L7).merge(L5).merge(L4);
}


//********* DERIVE ST FUNCTION ******************//
function applyWaterCorrection(image) {
var waterBands = (image.select('ST').multiply(0.00341802).add(149.0)) // Scale factor
Expand All @@ -74,50 +73,66 @@ function applyWaterCorrection(image) {
return image.addBands(waterBands, null, true);
}


//************ DERIVE AND EXPORT FUNCTION ******************//
//************ DERIVE AND ACCUMULATE FUNCTION ******************//
function getST(feature, id) {

var aoi = feature.geometry()
var pt = aoi.buffer(30);
var aoi = feature.geometry();
var pt = aoi.buffer(-30);

var landsatWT = getImages(aoi).map(applyWaterCorrection);

var values = landsatWT.map(function(image){
var values = landsatWT.map(function(image) {
return ee.Feature(null, image.reduceRegion(ee.Reducer.mean(), pt, 30))
.set('lake_id', id)
.set('system:time_start', image.get('system:time_start'))
.set('system:index', image.get('system:index'))
.set('date', ee.Date(image.get('system:time_start')).format('yyy-MM-dd'))
.set('date', ee.Date(image.get('system:time_start')).format('yyyy-MM-dd'))
.set('ST_max', image.reduceRegion(ee.Reducer.max(), pt, 30).values(['ST']))
.set('ST_min', image.reduceRegion(ee.Reducer.min(), pt, 30).values(['ST']))
.set('ST_stddev', image.reduceRegion(ee.Reducer.stdDev(), pt, 30).values(['ST']))
.set('ST_count', image.reduceRegion(ee.Reducer.count(), pt, 30).values(['ST']))
.set('ST_count', image.reduceRegion(ee.Reducer.count(), pt, 30).values(['ST']));
});

return values;
}

//************ ACCUMULATE ALL FEATURES INTO A SINGLE COLLECTION (Batch Processing) ******************//

// Function to process a batch of lakes and export with a unique filename
function processBatch(ids, batchNumber) {
var allValues = ee.FeatureCollection([]);

ids.map(function(id) {
var t = table.filter(ee.Filter.eq('lake_id', id));
var values = getST(t, id);
allValues = allValues.merge(values);
});

// Generate a unique filename for each batch using the batchNumber
var fileName = 'ST_lake_timeseries_batch_' + batchNumber;

var name_file = id + '_lake_timeseries'
Export.table.toDrive({
collection: values,
description: name_file,
folder: 'ST_IIML_poly',
collection: allValues,
description: fileName, // Use the unique fileName for each batch
folder: 'out',
fileFormat: 'CSV',
selectors: ['lake_id','system:time_start','date','system:index','ST','ST_max','ST_min','ST_stddev','ST_count','QA_PIXEL']
selectors: ['lake_id', 'system:time_start', 'date', 'system:index', 'ST', 'ST_max', 'ST_min', 'ST_stddev', 'ST_count', 'QA_PIXEL']
});

}

//************ ITERATE OVER ALL PTS ******************//
print(table)
//************ ITERATE OVER ALL LAKES IN BATCHES AND EXPORT WITH UNIQUE NAMES ******************//

// Define batch size (number of lakes per batch)
var batchSize = 50; // You can adjust this batch size based on your testing and performance

//var first = table.aggregate_array('LakeID').get(0);
//print(first);
//var t = table.filter(ee.Filter.eq('LakeID', first));
//getST(t, first);
table.sort('lake_id').aggregate_array('lake_id').evaluate(function(ids) {
var totalLakes = ids.length;
var batchNumber = 0;

table.sort('LakeID').aggregate_array('LakeID').evaluate(function (ids) {
ids.map(function (id) {
var t = table.filter(ee.Filter.eq('LakeID', id))
getST(t, id)
})
// Loop over the lakes, processing them in batches
for (var i = 0; i < totalLakes; i += batchSize) {
var batchIds = ids.slice(i, i + batchSize);
batchNumber++;
processBatch(batchIds, batchNumber); // Pass batchNumber to create unique filenames for each batch
}
});

Loading

0 comments on commit f749624

Please sign in to comment.