diff --git a/code/adamont_forcings.py b/code/adamont_forcings.py index 2b54e58..f9430ff 100644 --- a/code/adamont_forcings.py +++ b/code/adamont_forcings.py @@ -129,9 +129,7 @@ def main(compute): # Folders workspace = str(Path(os.getcwd()).parent) + '\\' # Path to be updated with location of the ADAMONT forcings - path_adamont_forcings = 'C:\\Jordi\\PhD\\Data\\ADAMONT\\treated\\' -# path_adamont_forcings = 'C:\\Jordi\\PhD\\Data\\ADAMONT\\FORCING_ADAMONT_IGE_BERGER\\' -# path_adamont_forcings = 'C:\\Jordi\\PhD\\Data\\ADAMONT\\FORCING_ADAMONT_IGE_BERGER\\subset_AGU\\' + path_adamont_forcings = settings.path_adamont path_smb = workspace + 'glacier_data\\smb\\' path_smb_function_adamont = path_smb + 'smb_function\\ADAMONT\\' # path_glacier_coordinates = workspace + 'glacier_data\\glacier_coordinates\\' diff --git a/code/alpgm_interface.py b/code/alpgm_interface.py index e45824c..2381ec5 100644 --- a/code/alpgm_interface.py +++ b/code/alpgm_interface.py @@ -43,15 +43,20 @@ # 'ann_no_weights': Deep Artificial Neural Network without sample weights # 'ann_weights': Deep Artificial Neural Network with sample weights # 'lasso': Lasso linear model -settings.init(historical_forcing, projection_forcing, simulation_type, smb_model = 'ann_no_weights') +settings.init(historical_forcing, + projection_forcing, + simulation_type, + smb_model = 'ann_no_weights', + cluster = False) ########## WORKFLOW ################################################################################ ########## SMB SIMULATION ###################################### # SMB machine learning models generation -settings.train_smb_model(historical_forcing, compute_forcing = False, # Compute historical climate forcings - train_model = False) # Re-train SMB machine learning models +settings.train_smb_model(historical_forcing, + compute_forcing = False, # Compute historical climate forcings + train_model = False) # Re-train SMB machine learning models ########## DELTA H FUNCTIONS GENERATION ####################### diff --git a/code/glacier_evolution.py b/code/glacier_evolution.py index 3b8c23e..b06904b 100644 --- a/code/glacier_evolution.py +++ b/code/glacier_evolution.py @@ -15,8 +15,7 @@ import matplotlib.pyplot as plt from matplotlib_scalebar.scalebar import ScaleBar import numpy as np -# To ignore errors when raster has only one pixel and normalization is divided by 0 -np.seterr(divide='ignore', invalid='ignore') +import xarray as xr from numpy import genfromtxt from numba import jit import unicodedata @@ -24,6 +23,7 @@ import os import shutil import sys +import time from osgeo import gdal, ogr, osr import copy from difflib import SequenceMatcher @@ -35,6 +35,10 @@ from keras import backend as K from keras.models import load_model + +# To ignore errors when raster has only one pixel and normalization is divided by 0 +np.seterr(divide='ignore', invalid='ignore') + #import tensorflow as tf os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' @@ -45,52 +49,45 @@ os.environ['SHAPE_ENCODING'] = "latin-1" - - ###### FILE PATHS ####### # Folders - -workspace = str(Path(os.getcwd()).parent) + '\\' +workspace = Path(os.getcwd()).parent #workspace = 'C:\\Jordi\\PhD\\Python\\' -path_smb = workspace + 'glacier_data\\smb\\' -path_glacier_evolution = workspace + 'glacier_data\\glacier_evolution\\' +path_smb = os.path.join(workspace, 'glacier_data', 'smb') +path_glacier_evolution = os.path.join(workspace, 'glacier_data', 'glacier_evolution') # Delta h parameterization functions -path_delta_h_param = workspace + "glacier_data\\delta_h_param\\" +path_delta_h_param = os.path.join(workspace, "glacier_data", "delta_h_param") # Shapefiles -path_glacier_shapefiles = workspace + 'glacier_data\\glacier_shapefiles\\' -path_glacier_2003_shapefiles = workspace + 'glacier_data\\glacier_shapefiles\\2003\\' -path_glacier_2015_shapefiles = workspace + 'glacier_data\\glacier_shapefiles\\2015\\' -path_glacier_flowlines_shapefile = path_glacier_2003_shapefiles + 'GLIMS_flowlines_2003' + '.shp' +path_glacier_shapefiles = os.path.join(workspace, 'glacier_data', 'glacier_shapefiles') +path_glacier_2003_shapefiles = os.path.join(workspace, 'glacier_data', 'glacier_shapefiles', '2003') +path_glacier_2015_shapefiles = os.path.join(workspace, 'glacier_data', 'glacier_shapefiles', '2015') +path_glacier_flowlines_shapefile = os.path.join(path_glacier_2003_shapefiles, 'GLIMS_flowlines_2003' + '.shp') # Rasters -path_glacier_ID_rasters = workspace + 'glacier_data\\glacier_rasters\\glacier_thickness\\thickness_tif\\' -path_glacier_DEM_rasters = workspace + 'glacier_data\\glacier_rasters\\glacier_thickness\\dem_tif\\' -path_glacier_evolution_DEM_rasters = path_glacier_DEM_rasters + 'glacier_evolution\\' -path_glacier_evolution_ID_rasters = path_glacier_ID_rasters + 'glacier_evolution\\' +path_glacier_ID_rasters = os.path.join(workspace, 'glacier_data', 'glacier_rasters', 'glacier_thickness', 'thickness_tif') +path_glacier_DEM_rasters =os.path.join( workspace, 'glacier_data', 'glacier_rasters', 'glacier_thickness', 'dem_tif') +path_glacier_evolution_DEM_rasters = os.path.join(path_glacier_DEM_rasters, 'glacier_evolution') +path_glacier_evolution_ID_rasters = os.path.join(path_glacier_ID_rasters, 'glacier_evolution') # Glacier evolution files -path_glacier_evolution_plots = path_glacier_evolution + 'plots\\' -path_glacier_area = path_glacier_evolution + 'glacier_area\\' -path_glacier_volume = path_glacier_evolution + 'glacier_volume\\' -path_glacier_zmean = path_glacier_evolution + 'glacier_zmean\\' -path_glacier_slope20 = path_glacier_evolution + 'glacier_slope20\\' -path_glacier_melt_years = path_glacier_evolution + 'glacier_melt_years\\' -path_glacier_w_errors = path_glacier_evolution + 'glacier_w_errors\\' -path_glacier_CPDDs = path_glacier_evolution + 'glacier_CPDDs\\' -path_glacier_snowfall = path_glacier_evolution + 'glacier_snowfall\\' +path_glacier_evolution_plots = os.path.join(path_glacier_evolution, 'plots') +path_glacier_area = os.path.join(path_glacier_evolution, 'glacier_area') +path_glacier_volume = os.path.join(path_glacier_evolution, 'glacier_volume') +path_glacier_zmean = os.path.join(path_glacier_evolution, 'glacier_zmean') +path_glacier_slope20 = os.path.join(path_glacier_evolution, 'glacier_slope20') +path_glacier_melt_years = os.path.join(path_glacier_evolution, 'glacier_melt_years') +path_glacier_w_errors = os.path.join(path_glacier_evolution, 'glacier_w_errors') +path_glacier_CPDDs = os.path.join(path_glacier_evolution, 'glacier_CPDDs') +path_glacier_snowfall = os.path.join(path_glacier_evolution, 'glacier_snowfall') # GLIMS data -path_glims = workspace + 'glacier_data\\GLIMS\\' -# SAFRAN climate forcings -path_safran_forcings = 'C:\\Jordi\\PhD\\Data\\SAFRAN-Nivo-2017\\' +path_glims = os.path.join(workspace, 'glacier_data', 'GLIMS') + global path_smb_function_safran -path_smb_function_safran = path_smb + 'smb_function\\SAFRAN\\' +path_smb_function_safran = os.path.join(path_smb, 'smb_function', 'SAFRAN') global path_smb_function_adamont -path_smb_function_adamont = path_smb + 'smb_function\\ADAMONT\\' -# Path to be updated with ADAMONT forcings local path -path_adamont_forcings = 'C:\\Jordi\\PhD\\Data\\ADAMONT\\treated\\' -#path_adamont_forcings = 'C:\\Jordi\\PhD\\Data\\ADAMONT\\\FORCING_ADAMONT_IGE_BERGER\\projections\\' +path_smb_function_adamont = os.path.join(path_smb, 'smb_function', 'ADAMONT') # SMB simulation files -path_smb_simulations = path_smb + 'smb_simulations\\' -path_smb_function = path_smb + 'smb_function\\' +path_smb_simulations = os.path.join(path_smb, 'smb_simulations') +path_smb_function = os.path.join(path_smb, 'smb_function') @@ -149,12 +146,12 @@ def store_file(data, path, midfolder, file_description, glimsID, year_start, yea year_range = np.asarray(range(year_start, year)) data = np.asarray(data).reshape(-1,1) data_w_years = np.column_stack((year_range, data)) - path_midfolder = path + midfolder + path_midfolder = os.path.join(path, midfolder) if not os.path.exists(path_midfolder): os.makedirs(path_midfolder) # file_name = path + midfolder + glimsID + "_" + str(file_description) + '.csv' - file_name_h = path + midfolder + str(glimsID) + "_" + file_name_h = os.path.join(path, midfolder, str(glimsID) + "_") file_name_t = str(file_description) + '.csv' # We save the file with an unexisting name automatic_file_name_save(file_name_h, file_name_t, data_w_years, 'csv') @@ -235,10 +232,14 @@ def preload_ensemble_SMB_models(): path_CV_ensemble = settings.path_cv_ann path_CV_ensemble_members = np.asarray(os.listdir(path_CV_ensemble)) + print("\nTaking CV ensemble models from: " + str(path_CV_ensemble)) + # Full model ensemble path_ensemble = settings.path_ensemble_ann path_ensemble_members = np.asarray(os.listdir(path_ensemble)) + print("\nTaking full ensemble models from: " + str(path_ensemble)) + CV_ensemble_members = np.ndarray(path_CV_ensemble_members.shape, dtype=np.object) ensemble_members = np.ndarray(path_ensemble_members.shape, dtype=np.object) @@ -246,22 +247,22 @@ def preload_ensemble_SMB_models(): print("\nPreloading CV ensemble SMB models...") for path_CV_member in path_CV_ensemble_members: # We retrieve the ensemble member ANN model - ann_CV_member_model = load_model(path_CV_ensemble + path_CV_member, custom_objects={"r2_keras": r2_keras, "root_mean_squared_error": root_mean_squared_error}, compile=False) + ann_CV_member_model = load_model(os.path.join(path_CV_ensemble, path_CV_member), custom_objects={"r2_keras": r2_keras, "root_mean_squared_error": root_mean_squared_error}, compile=False) # CV_ensemble_members.append(ann_CV_member_model) CV_ensemble_members[member_idx] = ann_CV_member_model print("|", end="", flush=True) member_idx = member_idx+1 - member_idx = 0 - print("\n\nPreloading ensemble full SMB models...") - for path_member in path_ensemble_members: - # We retrieve the ensemble member ANN model - ann_member_model = load_model(path_ensemble + path_member + '\\ann_glacier_model.h5', custom_objects={"r2_keras": r2_keras, "root_mean_squared_error": root_mean_squared_error}, compile=False) -# ensemble_members.append(ann_member_model) - ensemble_members[member_idx] = ann_member_model - print("|", end="", flush=True) - member_idx = member_idx+1 - +# member_idx = 0 +# print("\n\nPreloading ensemble full SMB models...") +# for path_member in path_ensemble_members: +# # We retrieve the ensemble member ANN model +# ann_member_model = load_model(os.path.join(path_ensemble, path_member, 'ann_glacier_model.h5'), custom_objects={"r2_keras": r2_keras, "root_mean_squared_error": root_mean_squared_error}, compile=False) +## ensemble_members.append(ann_member_model) +# ensemble_members[member_idx] = ann_member_model +# print("|", end="", flush=True) +# member_idx = member_idx+1 +# CV_ensemble_members = np.asarray(CV_ensemble_members) ensemble_members = np.asarray(ensemble_members) print("\n") @@ -306,16 +307,16 @@ def make_ensemble_simulation(ensemble_SMB_models, x_ann, batch_size, glimsID, gl member_idx = 0 # We compute the quadratic slope difference - slope_dist = (training_slopes - ref_slope)**2 +# slope_dist = (training_slopes - ref_slope)**2 # Depending if glacier is present in training dataset we use the CV or full model - if(np.any(glims_rabatel['GLIMS_ID'] == glimsID.encode('utf-8'))): + if(not evolution and np.any(glims_rabatel['GLIMS_ID'] == glimsID.encode('utf-8'))): SMB_ensemble_members = ensemble_SMB_models['full'] print("\nFull ensemble models") else: - SMB_ensemble_members = ensemble_SMB_models['CV'] - CV_ensemble = True - print("\nCross-validation ensemble models") + SMB_ensemble_members = ensemble_SMB_models['CV'] + CV_ensemble = True + print("\nCross-validation ensemble models") # We iterate the previously loaded ensemble models for ensemble_model in SMB_ensemble_members: @@ -498,13 +499,13 @@ def crop_inital_rasters_to_GLIMS(path_glacier_ID_rasters, path_glacier_DEM_raste print("glacierID: " + str(glacierID)) if(glacierID == 3638): # If Argentiere glacier, use field data - current_glacier_ice_depth = path_glacier_ID_rasters + "argentiere_2003_glacioclim.tif" + current_glacier_ice_depth = os.path.join(path_glacier_ID_rasters, "argentiere_2003_glacioclim.tif") else: - current_glacier_ice_depth = path_glacier_ID_rasters + "RGI60-11.0" + str(glacierID) + "_thickness.tif" + current_glacier_ice_depth = os.path.join(path_glacier_ID_rasters, "RGI60-11.0" + str(glacierID) + "_thickness.tif") - current_glacier_DEM = path_glacier_DEM_rasters + "dem_0" + str(glacierID) + ".asc.tif" + current_glacier_DEM = os.path.join(path_glacier_DEM_rasters, "dem_0" + str(glacierID) + ".asc.tif") # - path_glacier_ID_GLIMS = path_glacier_ID_rasters + "thick_0" + str(glacierID) + "_GLIMS2003.tif" + path_glacier_ID_GLIMS = os.path.join(path_glacier_ID_rasters, "thick_0" + str(glacierID) + "_GLIMS2003.tif") path_glacier_DEM_GLIMS = current_glacier_DEM path_glacier_DEM_2003 = path_glacier_DEM_GLIMS @@ -518,11 +519,11 @@ def crop_inital_rasters_to_GLIMS(path_glacier_ID_rasters, path_glacier_DEM_raste elif(year_start == 2015): # We open the 2015 projected F19 files - path_glacier_ID_GLIMS = path_glacier_ID_rasters + "glacier_evolution\\SAFRAN\\1\\" + "IceDepth_Glacier_0" + str(glacierID) + "_2014.tif" - path_glacier_DEM_GLIMS = path_glacier_DEM_rasters + "glacier_evolution\\SAFRAN\\1\\" + "DEM_Glacier_0" + str(glacierID) + "_2014.tif" + path_glacier_ID_GLIMS = os.path.join(path_glacier_ID_rasters, "glacier_evolution", "SAFRAN", "1", "IceDepth_Glacier_0" + str(glacierID) + "_2014.tif") + path_glacier_DEM_GLIMS = os.path.join(path_glacier_DEM_rasters, "glacier_evolution", "SAFRAN", "1", "DEM_Glacier_0" + str(glacierID) + "_2014.tif") - path_glacier_DEM_2003 = path_glacier_DEM_rasters + "dem_0" + str(glacierID) + ".asc.tif" - path_glacier_ID_2003 = path_glacier_ID_rasters + "RGI60-11.0" + str(glacierID) + "_thickness.tif" + path_glacier_DEM_2003 = os.path.join(path_glacier_DEM_rasters, "dem_0" + str(glacierID) + ".asc.tif") + path_glacier_ID_2003 = os.path.join(path_glacier_ID_rasters, "RGI60-11.0" + str(glacierID) + "_thickness.tif") # path_glacier_ID_2003 = path_glacier_ID_rasters + "thick_0" + str(glacierID) + "_GLIMS2003.tif" # print("Clipping raster to GLIMS extent... ") @@ -531,8 +532,8 @@ def crop_inital_rasters_to_GLIMS(path_glacier_ID_rasters, path_glacier_DEM_raste elif(year_start == 2019): if(glacierID == 3651): # If Tré la Tête glacier, use field data - path_glacier_ID_GLIMS = path_glacier_ID_rasters + "interp_cleaned_masked_newh_25m.tif" - path_glacier_DEM_GLIMS = path_glacier_DEM_rasters + "masked_dem_lidar_25m.tif" + path_glacier_ID_GLIMS = os.path.join(path_glacier_ID_rasters, "interp_cleaned_masked_newh_25m.tif") + path_glacier_DEM_GLIMS = os.path.join(path_glacier_DEM_rasters, "masked_dem_lidar_25m.tif") path_glacier_DEM_2003 = '' path_glacier_ID_2003 = '' else: @@ -591,7 +592,7 @@ def get_slope20(_masked_DEM_current_glacier_u, DEM_sorted_current_glacier_u, gla raster_current_DEM = gdal.Open(path_raster_current_DEM) - path_temp_shapefile = path_glacier_DEM_rasters + "aux_vector_" + str(glacierName) + ".shp" + path_temp_shapefile = os.path.join(path_glacier_DEM_rasters, "aux_vector_" + str(glacierName) + ".shp") flowline_altitudes_full, flowline_coordinates = get_point_values(flowline, raster_current_DEM) if(flowline_altitudes_full.size == 0): print("[ ERROR ] No match between DEM and flowline. Skipping glacier...") @@ -666,7 +667,7 @@ def find_glacier_coordinates(massif_number, zs, aspects, glims_data): def get_SAFRAN_glacier_coordinates(glims_dataset): # We read the first year to get some basic information - dummy_SAFRAN_forcing = Dataset(path_safran_forcings + '84\\FORCING.nc') + dummy_SAFRAN_forcing = Dataset(os.path.join(path_safran_forcings, '84', 'FORCING.nc')) aspects = dummy_SAFRAN_forcing.variables['aspect'][:] zs = dummy_SAFRAN_forcing.variables['ZS'][:] @@ -676,78 +677,11 @@ def get_SAFRAN_glacier_coordinates(glims_dataset): return all_glacier_coordinates -@jit -# Mean daily temperature -def get_mean_temps(datetimes, hourly_data): - ref_day = -9 - daily_data = [] - idx, day_idx = 0, 0 - first = True - for time_hour in datetimes: - current_day = time_hour.astype(object).timetuple().tm_yday - if(current_day == ref_day): - daily_data[day_idx] = daily_data[day_idx] + hourly_data[idx]/24.0 - else: - ref_day = current_day - daily_data.append(hourly_data[idx]/24.0) - if(not first): - day_idx = day_idx + 1 - else: - first = False - - idx = idx + 1 - - return np.asarray(daily_data) - -@jit -# Daily precipitations -def get_precips(datetimes, hourly_data): - ref_day = -9 - daily_data = [] - idx, day_idx = 0, 0 - isFirst = True - for time_hour in datetimes: - current_day = time_hour.astype(object).timetuple().tm_yday - if(current_day == ref_day): - daily_data[day_idx] = daily_data[day_idx] + hourly_data[idx] - else: - ref_day = current_day - daily_data.append(hourly_data[idx]) - if(not isFirst): - day_idx = day_idx + 1 - else: - isFirst = False - idx = idx + 1 - return np.asarray(daily_data) - -def get_monthly_temps(daily_data, daily_datetimes): +def compute_local_anomalies(glacier_CPDD, glacier_winter_snow, glacier_summer_snow, meteo_refs): - d = {'Dates': daily_datetimes, 'Temps': daily_data} - df_datetimes = pd.DataFrame(data=d) - df_datetimes.set_index('Dates', inplace=True) - df_datetimes.index = pd.to_datetime(df_datetimes.index) - df_datetimes = df_datetimes.resample('M').mean() - - monthly_avg_data = df_datetimes.Temps.to_numpy() - - return monthly_avg_data[:12] - -def get_monthly_snow(daily_data, daily_datetimes): - d = {'Dates': daily_datetimes, 'Temps': daily_data} - df_datetimes = pd.DataFrame(data=d) - df_datetimes.set_index('Dates', inplace=True) - df_datetimes.index = pd.to_datetime(df_datetimes.index) - df_datetimes = df_datetimes.resample('M').sum() - - monthly_avg_data = df_datetimes.Temps.to_numpy() - - return monthly_avg_data[:12] - -def compute_local_anomalies(glacier_CPDD, glacier_winter_snow, glacier_summer_snow, meteo_anomalies): - - local_CPDD_anomaly = glacier_CPDD - meteo_anomalies['CPDD'] - local_w_snow_anomaly = glacier_winter_snow - meteo_anomalies['w_snow'] - local_s_snow_anomaly = glacier_summer_snow - meteo_anomalies['s_snow'] + local_CPDD_anomaly = glacier_CPDD - meteo_refs['CPDD'] + local_w_snow_anomaly = glacier_winter_snow - meteo_refs['w_snow'] + local_s_snow_anomaly = glacier_summer_snow - meteo_refs['s_snow'] return local_CPDD_anomaly, local_w_snow_anomaly, local_s_snow_anomaly @@ -761,11 +695,11 @@ def compute_monthly_anomalies(mon_temps, mon_snow, mon_temp_ref, mon_snow_ref): # Fetches the preprocessed SAFRAN daily data def get_default_SAFRAN_forcings(safran_start, safran_end): - path_temps = path_smb_function_safran +'daily_temps_years_' + str(safran_start) + '-' + str(safran_end) + '.txt' - path_snow = path_smb_function_safran +'daily_snow_years_' + str(safran_start) + '-' + str(safran_end) + '.txt' - path_rain = path_smb_function_safran +'daily_rain_years_' + str(safran_start) + '-' + str(safran_end) + '.txt' - path_dates = path_smb_function_safran +'daily_dates_years_' + str(safran_start) + '-' + str(safran_end) + '.txt' - path_zs = path_smb_function_safran +'zs_years' + str(safran_start) + '-' + str(safran_end) + '.txt' + path_temps = os.path.join(path_smb_function_safran, 'daily_temps_years_' + str(safran_start) + '-' + str(safran_end) + '.txt') + path_snow = os.path.join(path_smb_function_safran, 'daily_snow_years_' + str(safran_start) + '-' + str(safran_end) + '.txt') + path_rain = os.path.join(path_smb_function_safran, 'daily_rain_years_' + str(safran_start) + '-' + str(safran_end) + '.txt') + path_dates = os.path.join(path_smb_function_safran, 'daily_dates_years_' + str(year_start) + '-' + str(year_end) + '.txt') + path_zs = os.path.join(path_smb_function_safran, 'zs_years' + str(safran_start) + '-' + str(safran_end) + '.txt') if(os.path.exists(path_temps) & os.path.exists(path_snow) & os.path.exists(path_rain) & os.path.exists(path_dates) & os.path.exists(path_zs)): print("\nFetching SAFRAN forcings...") @@ -788,126 +722,60 @@ def get_default_SAFRAN_forcings(safran_start, safran_end): return daily_meteo_data -# Adjusts the daily SAFRAN data for each glacier -def get_adjusted_glacier_SAFRAN_forcings(year, year_start, glacier_mean_altitude, SAFRAN_idx, daily_meteo_data, meteo_anomalies): +# Adjusts the daily SAFRAN data for each glacier for a specific year +def get_adjusted_glacier_SAFRAN_forcings(year, year_start, glacier_mean_altitude, SAFRAN_idx, daily_meteo_data, meteo_refs): + # We also need to fetch the previous year since data goes from 1st of August to 31st of July - idx = year - year_start +1 + idx = year - year_start glacier_idx = int(SAFRAN_idx) - print("Year idx: " + str(idx)) - - daily_temps_years = daily_meteo_data['temps'] - daily_snow_years = daily_meteo_data['snow'] - daily_rain_years = daily_meteo_data['rain'] - zs = daily_meteo_data['zs'] - daily_dates_years = daily_meteo_data['dates'] + t_lim = 2.0 +# print("Year idx: " + str(idx)) - daily_temps_mean_1 = copy.deepcopy(daily_temps_years[idx-1]) + ((zs[glacier_idx] - glacier_mean_altitude)/1000.0)*6.0 - daily_temps_mean = copy.deepcopy(daily_temps_years[idx]) + ((zs[glacier_idx] - glacier_mean_altitude)/1000.0)*6.0 +# import pdb; pdb.set_trace() - snow_sum_1 = copy.deepcopy(daily_snow_years[idx-1]) - snow_sum = copy.deepcopy(daily_snow_years[idx]) + # Retrieve raw meteo data for the current year + zs = daily_meteo_data['zs'][idx][0] + dates = daily_meteo_data['dates'][idx] - rain_sum_1 = copy.deepcopy(daily_rain_years[idx-1]) - rain_sum = copy.deepcopy(daily_rain_years[idx]) + safran_tmean_d = xr.DataArray(daily_meteo_data['temps'][idx], coords=[dates, zs], dims=['time', 'zs']) + safran_snow_d = xr.DataArray(daily_meteo_data['snow'][idx], coords=[dates, zs], dims=['time', 'zs']) + safran_rain_d = xr.DataArray(daily_meteo_data['rain'][idx], coords=[dates, zs], dims=['time', 'zs']) - # We get the daily datetimes - daily_datetimes_1 = copy.deepcopy(daily_dates_years[idx-1]) - daily_datetimes = copy.deepcopy(daily_dates_years[idx]) + # Re-scale temperature at glacier's actual altitude + safran_tmean_d_g = copy.deepcopy(safran_tmean_d[:, glacier_idx] + ((zs[glacier_idx] - glacier_mean_altitude)/1000.0)*6.0) - #### We compute the monthly average data - # Monthly average temperature - glacier_temps_1 = daily_temps_mean_1[:, glacier_idx] - glacier_temps = daily_temps_mean[:, glacier_idx] - mon_temps_1 = get_monthly_temps(glacier_temps_1, daily_datetimes_1) - mon_temps = get_monthly_temps(glacier_temps, daily_datetimes) - mon_temp_year = np.append(mon_temps_1[2:], mon_temps[:2]) - - # Monthly average snowfall # We adjust the snowfall rate at the glacier's altitude - glacier_snow_1 = snow_sum_1[:, glacier_idx] - glacier_snow = snow_sum[:, glacier_idx] - glacier_rain_1 = rain_sum_1[:, glacier_idx] - glacier_rain = rain_sum[:, glacier_idx] - glacier_snow_1 = np.where(glacier_temps_1 > 0.0, 0.0, glacier_snow_1) - glacier_snow_1 = np.where(((glacier_temps_1 < 0.0) & (glacier_snow_1 == 0.0)), glacier_rain_1, glacier_snow_1) - glacier_snow = np.where(glacier_temps > 0.0, 0.0, glacier_snow) - glacier_snow = np.where(((glacier_temps < 0.0) & (glacier_snow == 0.0)), glacier_rain, glacier_snow) - - mon_snow_1 = get_monthly_snow(glacier_snow_1, daily_datetimes_1) - mon_snow = get_monthly_snow(glacier_snow, daily_datetimes) - mon_snow_year = np.append(mon_snow_1[2:], mon_snow[:2]) - - year_1_offset = 213 - year_offset = 152 - temp_year = np.append(daily_temps_mean_1[-year_1_offset:, glacier_idx], daily_temps_mean[:year_offset, glacier_idx]) - pos_temp_year = np.where(temp_year < 0, 0, temp_year) - integ_temp = np.cumsum(pos_temp_year) - - start_div, end_div = 30, 10 - - ### Dynamic temperature ablation period - start_y_ablation = np.where(integ_temp > integ_temp.max()/start_div)[0] -# start_y_ablation = np.where(integ_temp > 30)[0] - end_y_ablation = np.where(integ_temp > (integ_temp.max() - integ_temp.max()/end_div))[0] - - start_ablation = start_y_ablation[0] + (daily_temps_mean_1[:,glacier_idx].size - year_1_offset) - end_ablation = end_y_ablation[0] - year_1_offset - if(start_ablation > 366): - start_ablation = 366 - - # We get the dynamic indexes for the ablation and accumulation periods for temperature - ablation_temp_idx_1 = range(start_ablation, daily_datetimes_1.size) - ablation_temp_idx = range(0, end_ablation) -# accum_temp_idx_1 = range(end_ablation+1, start_ablation-1) - - # We get the indexes for the ablation and accumulation periods for snowfall - # Classic 120-270 ablation period - ablation_idx_1 = range(daily_datetimes_1.size-95, daily_datetimes_1.size) - ablation_idx = range(0, 55) - accum_idx_1 = range(56, daily_datetimes_1.size-96) - - glacier_ablation_temps = np.append(daily_temps_mean_1[ablation_temp_idx_1, glacier_idx], daily_temps_mean[ablation_temp_idx, glacier_idx]) -# glacier_accumulation_temps = daily_temps_mean_1[accum_temp_idx_1, glacier_idx] - dummy_glacier_ablation_temps = np.append(daily_temps_mean_1[ablation_idx_1, glacier_idx], daily_temps_mean[ablation_idx, glacier_idx]) - dummy_glacier_accumulation_temps = daily_temps_mean_1[accum_idx_1, glacier_idx] - -# glacier_ablation_temps = glacier_ablation_temps + ((zs[glacier_idx] - glacier_mean_altitude)/1000.0)*6.0 -# glacier_accumulation_temps = glacier_accumulation_temps + ((zs[glacier_idx] - glacier_mean_altitude)/1000.0)*6.0 - - glacier_year_pos_temps = np.where(glacier_ablation_temps < 0, 0, glacier_ablation_temps) -# glacier_accum_pos_temps = np.where(glacier_accumulation_temps < 0, 0, glacier_accumulation_temps) - dummy_glacier_ablation_pos_temps = np.where(dummy_glacier_ablation_temps < 0, 0, dummy_glacier_ablation_temps) - dummy_glacier_accum_pos_temps = np.where(dummy_glacier_accumulation_temps < 0, 0, dummy_glacier_accumulation_temps) - - glacier_accum_snow = snow_sum_1[accum_idx_1, glacier_idx] - glacier_accum_rain = rain_sum_1[accum_idx_1, glacier_idx] + safran_snow_d_g = copy.deepcopy(safran_snow_d[:, glacier_idx]) + safran_rain_d_g = copy.deepcopy(safran_rain_d[:, glacier_idx]) + safran_snow_d_g.data = np.where(safran_tmean_d_g.data > t_lim, 0.0, safran_snow_d_g.data) + safran_snow_d_g.data = np.where(safran_tmean_d_g.data < t_lim, safran_snow_d_g.data + safran_rain_d_g.data, safran_snow_d_g.data) - glacier_ablation_snow = np.append(snow_sum_1[ablation_idx_1, glacier_idx], snow_sum[ablation_idx, glacier_idx]) - glacier_ablation_rain = np.append(rain_sum_1[ablation_idx_1, glacier_idx], rain_sum[ablation_idx, glacier_idx]) + # Monthly data during the current hydrological year + safran_tmean_m_g = safran_tmean_d_g.resample(time="1MS").mean().data + safran_snow_m_g = safran_snow_d_g.resample(time="1MS").sum().data - # We recompute the rain/snow limit with the new adjusted temperatures - glacier_accum_snow = np.where(dummy_glacier_accum_pos_temps > 0.0, 0.0, glacier_accum_snow) - glacier_accum_snow = np.where(((dummy_glacier_accumulation_temps < 0.0) & (glacier_accum_snow == 0.0)), glacier_accum_rain, glacier_accum_snow) - glacier_ablation_snow = np.where(dummy_glacier_ablation_pos_temps > 0.0, 0.0, glacier_ablation_snow) - glacier_ablation_snow = np.where(((dummy_glacier_ablation_temps < 0.0) & (glacier_ablation_snow == 0.0)), glacier_ablation_rain, glacier_ablation_snow) + # Compute CPDD + # Compute dask arrays prior to storage + glacier_CPDD = np.sum(np.where(safran_tmean_d_g.data < 0, 0, safran_tmean_d_g.data)) - # We compute the cumulative yearly CPDD and snowfall - glacier_CPDD = np.sum(glacier_year_pos_temps) - glacier_winter_snow = np.sum(glacier_accum_snow) - glacier_summer_snow = np.sum(glacier_ablation_snow) + # Compute snowfall + # Compute dask arrays prior to storage + glacier_winter_snow = np.sum(safran_snow_d_g.sel(time = slice(str(year-1)+'-10-01', str(year)+'-03-31')).data) + glacier_summer_snow = np.sum(safran_snow_d_g.sel(time = slice(str(year)+'-04-01', str(year)+'-07-31')).data) # We compute the seasonal anomalies - CPDD_LocalAnomaly, winter_snow_LocalAnomaly, summer_snow_LocalAnomaly = compute_local_anomalies(glacier_CPDD, glacier_winter_snow, glacier_summer_snow, - meteo_anomalies) + CPDD_LocalAnomaly, winter_snow_LocalAnomaly, summer_snow_LocalAnomaly = compute_local_anomalies(glacier_CPDD, + glacier_winter_snow, + glacier_summer_snow, + meteo_refs) # We compute the monthly anomalies - mon_temp_anomaly, mon_snow_anomaly = compute_monthly_anomalies(mon_temp_year, mon_snow_year, meteo_anomalies['mon_temp'], meteo_anomalies['mon_snow']) - + mon_temp_anomaly, mon_snow_anomaly = compute_monthly_anomalies(safran_tmean_m_g, safran_snow_m_g, + meteo_refs['mon_temp'], meteo_refs['mon_snow']) season_anomalies_y = {'CPDD': CPDD_LocalAnomaly, 'winter_snow':winter_snow_LocalAnomaly, 'summer_snow': summer_snow_LocalAnomaly} monthly_anomalies_y = {'temps': mon_temp_anomaly, 'snow': mon_snow_anomaly} - return season_anomalies_y, monthly_anomalies_y @@ -925,17 +793,17 @@ def get_ADAMONT_idx(massif, glacier_altitude, massif_number, zs): return ADAMONT_idx -def get_default_ADAMONT_forcings(year_start, year_end, midfolder): +def get_default_ADAMONT_forcings(year_start, year_end, midfolder, overwrite): - path_temps = path_smb_function_adamont + midfolder + 'daily_temps_years_' + str(year_start) + '-' + str(year_end) + '.txt' - path_snow = path_smb_function_adamont + midfolder + 'daily_snow_years_' + str(year_start) + '-' + str(year_end) + '.txt' - path_rain = path_smb_function_adamont + midfolder + 'daily_rain_years_' + str(year_start) + '-' + str(year_end) + '.txt' - path_dates = path_smb_function_adamont + midfolder + 'daily_datetimes_' + str(year_start) + '-' + str(year_end) + '.txt' - path_zs = path_smb_function_adamont + midfolder + 'zs_' + str(year_start) + '-' + str(year_end) + '.txt' - path_massif = path_smb_function_adamont + midfolder + 'massif_number.txt' - path_aspect = path_smb_function_adamont + midfolder + 'aspects.txt' + path_temps = os.path.join(path_smb_function_adamont, midfolder, 'daily_temps_years_' + str(year_start) + '-' + str(year_end) + '.txt') + path_snow = os.path.join(path_smb_function_adamont, midfolder, 'daily_snow_years_' + str(year_start) + '-' + str(year_end) + '.txt') + path_rain = os.path.join(path_smb_function_adamont, midfolder, 'daily_rain_years_' + str(year_start) + '-' + str(year_end) + '.txt') + path_dates = os.path.join(path_smb_function_adamont, midfolder, 'daily_datetimes_' + str(year_start) + '-' + str(year_end) + '.txt') + path_zs = os.path.join(path_smb_function_adamont, midfolder, 'zs_' + str(year_start) + '-' + str(year_end) + '.txt') + path_massif = os.path.join(path_smb_function_adamont, midfolder, 'massif_number.txt') + path_aspect = os.path.join(path_smb_function_adamont, midfolder, 'aspects.txt') - if(os.path.exists(path_temps) & os.path.exists(path_snow) & os.path.exists(path_rain) & os.path.exists(path_dates) & os.path.exists(path_zs) & os.path.exists(path_massif) & os.path.exists(path_aspect)): + if((os.path.exists(path_temps) & os.path.exists(path_snow) & os.path.exists(path_rain) & os.path.exists(path_dates) & os.path.exists(path_zs) & os.path.exists(path_massif) & os.path.exists(path_aspect))): print("Fetching ADAMONT forcings...") with open(path_temps, 'rb') as temps_f: daily_temps_years = np.load(temps_f, allow_pickle=True) @@ -955,244 +823,129 @@ def get_default_ADAMONT_forcings(year_start, year_end, midfolder): daily_meteo_data = {'temps':daily_temps_years, 'snow': daily_snow_years, 'rain': daily_rain_years, 'dates': daily_datetimes, 'zs': zs} else: # We read all the files - print("Re-computing ADAMONT forcings...") + print("\nRe-computing ADAMONT forcings...") + forcing_daymean = settings.current_ADAMONT_model_daymean forcing_daysum = settings.current_ADAMONT_model_daysum - print("Current ADAMONT combination: " + str(forcing_daymean) + "\\n") - - file_forcing_daymean = Dataset(path_adamont_forcings + forcing_daymean) - file_forcing_daysum = Dataset(path_adamont_forcings + forcing_daysum) + print("\nCurrent ADAMONT combination: " + str(forcing_daymean) + "\n") - # lat = file_forcing_daymean.variables['LAT'][:] - # lon = file_forcing_daymean.variables['LON'][:] + start = time.time() + # We load the two ADAMONT files + adamont_mean_climate = xr.open_dataset(forcing_daymean) + adamont_sum_climate = xr.open_dataset(forcing_daysum) - zs = file_forcing_daymean.variables['ZS'][:] - # TODO: Change depending on the ADAMONT version -# massif_number = file_forcing_daymean.variables['massif_number'][:] - massif_number = file_forcing_daymean.variables['MASSIF_NUMBER'][:] + # Rename the time coordinates to match the ADAMONT format + adamont_mean_climate = adamont_mean_climate.rename({"TIME": "time"}) + adamont_sum_climate = adamont_sum_climate.rename({"TIME": "time"}) + + end = time.time() + print("\n-> open ADAMONT dataset processing time: " + str(end - start) + " s") - # Aspects commented until they are available for ADAMONT -# aspects = file_forcing_daymean.variables['aspect'][:] + zs = adamont_mean_climate['ZS'].compute() + massif_number = adamont_mean_climate['MASSIF_NUMBER'].compute() aspects = np.repeat(-1, len(zs)) - # Temperatures (from K to C) - temps_mean = file_forcing_daymean.variables['Tair'][:] -273.15 - - rain_sum = file_forcing_daysum.variables['RAIN'][:] -# rain_sum = file_forcing_daysum.variables['Rainf'][:] * 3600 - - snow_sum = file_forcing_daysum.variables['SNOW'][:] -# snow_sum = file_forcing_daysum.variables['Snowf'][:] * 3600 - - times = file_forcing_daysum.variables['TIME'][:] -# times = file_forcing_daysum.variables['time'][:] - start = np.datetime64('2005-08-01 06:00:00') - datetimes = np.array([start + np.timedelta64(np.int32(time), 'h') for time in times]) - yeartimes = [] - for h in datetimes: - yeartimes.append(h.astype(object).timetuple().tm_year) - yeartimes = np.asarray(yeartimes) - - # We also need to fetch the previous year since data goes from 1st of August to 31st of July daily_temps_years, daily_snow_years, daily_rain_years, daily_datetimes = [],[],[],[] - adamont_year_range = range(year_start-1, year_end+1) - for year in adamont_year_range: - current_year_idx = np.where(yeartimes == year) + for year in range(year_start, year_end+1): + print("Hydrological year: " + str(year-1) + "-" + str(year)) - if(len(current_year_idx) != 0): - daily_temps_mean = temps_mean[current_year_idx,:] - daily_snow_sum = snow_sum[current_year_idx,:] - daily_rain_sum = rain_sum[current_year_idx,:] - - daily_temps_mean = np.asarray(daily_temps_mean[0,:,:]) - - if(len(daily_temps_mean) >= 365): - daily_temps_years.append(daily_temps_mean) - daily_snow_sum = np.asarray(daily_snow_sum[0,:,:]) - daily_snow_years.append(daily_snow_sum) - daily_rain_sum = np.asarray(daily_rain_sum[0,:,:]) - daily_rain_years.append(daily_rain_sum) - daily_datetimes.append(datetimes[current_year_idx]) - else: - # We change the end year for the forcings who finish at 2098 - print("End year at " + str(year-1)) - year_end = year-1 - - daily_temps_years = np.asarray(daily_temps_years) - daily_snow_years = np.asarray(daily_snow_years) - daily_rain_years = np.asarray(daily_rain_years) + start = time.time() + # We load into memory only the current year to speed things up + # Only two years are loaded: compute dask arrays in memory so computations are faster + safran_tmean_d = (adamont_mean_climate.sel(time = slice(str(year-1)+'-10-01', str(year)+'-09-30'))['Tair'].resample(time="1D").mean() -273.15).compute() + safran_snow_d = (adamont_sum_climate.sel(time = slice(str(year-1)+'-10-01', str(year)+'-09-30'))['SNOW'].resample(time="1D").sum()).compute() + safran_rain_d = (adamont_sum_climate.sel(time = slice(str(year-1)+'-10-01', str(year)+'-09-30'))['RAIN'].resample(time="1D").sum()).compute() - zs = np.asarray(zs) + # Store daily raw data for future re-processing + daily_temps_years.append(safran_tmean_d.data) + daily_snow_years.append(safran_snow_d.data) + daily_rain_years.append(safran_rain_d.data) + daily_datetimes.append(safran_tmean_d.time.data) daily_meteo_data = {'temps':daily_temps_years, 'snow': daily_snow_years, 'rain': daily_rain_years, 'dates': daily_datetimes, 'zs': zs} # We create the folder if it's not there - if(not os.path.exists(path_smb_function_adamont+midfolder)): - os.makedirs(path_smb_function_adamont+midfolder) + if(not os.path.exists(os.path.join(path_smb_function_adamont, midfolder))): + os.makedirs(os.path.join(path_smb_function_adamont, midfolder)) - with open(path_smb_function_adamont+midfolder+'daily_temps_years_' + str(year_start) + '-' + str(year_end) + '.txt', 'wb') as dtemp_f: + with open(os.path.join(path_smb_function_adamont, midfolder, 'daily_temps_years_' + str(year_start) + '-' + str(year_end) + '.txt'), 'wb') as dtemp_f: np.save(dtemp_f, daily_temps_years) - with open(path_smb_function_adamont+midfolder+'daily_snow_years_' + str(year_start) + '-' + str(year_end) + '.txt', 'wb') as dsnow_f: + with open(os.path.join(path_smb_function_adamont, midfolder, 'daily_snow_years_' + str(year_start) + '-' + str(year_end) + '.txt'), 'wb') as dsnow_f: np.save(dsnow_f, daily_snow_years) - with open(path_smb_function_adamont+midfolder+'daily_rain_years_' + str(year_start) + '-' + str(year_end) + '.txt', 'wb') as drain_f: + with open(os.path.join(path_smb_function_adamont, midfolder, 'daily_rain_years_' + str(year_start) + '-' + str(year_end) + '.txt'), 'wb') as drain_f: np.save(drain_f, daily_rain_years) - with open(path_smb_function_adamont+midfolder+'daily_datetimes_' + str(year_start) + '-' + str(year_end) + '.txt', 'wb') as ddates_f: + with open(os.path.join(path_smb_function_adamont, midfolder, 'daily_datetimes_' + str(year_start) + '-' + str(year_end) + '.txt'), 'wb') as ddates_f: np.save(ddates_f, daily_datetimes) - with open(path_smb_function_adamont+midfolder+'zs_' + str(year_start) + '-' + str(year_end) + '.txt', 'wb') as dzs_f: + with open(os.path.join(path_smb_function_adamont, midfolder, 'zs_' + str(year_start) + '-' + str(year_end) + '.txt'), 'wb') as dzs_f: np.save(dzs_f, zs) - with open(path_smb_function_adamont+midfolder+'massif_number.txt', 'wb') as massif_f: + with open(os.path.join(path_smb_function_adamont, midfolder, 'massif_number.txt'), 'wb') as massif_f: np.save(massif_f, massif_number.data) - with open(path_smb_function_adamont+midfolder+'aspects.txt', 'wb') as aspects_f: + with open(os.path.join(path_smb_function_adamont, midfolder, 'aspects.txt'), 'wb') as aspects_f: np.save(aspects_f, aspects.data) return daily_meteo_data, massif_number, aspects, year_end def get_adjusted_glacier_ADAMONT_forcings(year, year_start, glacier_mean_altitude, ADAMONT_idx, daily_meteo_data, meteo_anomalies): # We also need to fetch the previous year since data goes from 1st of August to 31st of July - idx = year - year_start + 1 + idx = year - year_start # print("ADAMONT_idx: " + str(ADAMONT_idx)) glacier_idx = int(ADAMONT_idx) + t_lim = 0.0 - daily_temps_years = daily_meteo_data['temps'] - daily_snow_years = daily_meteo_data['snow'] - daily_rain_years = daily_meteo_data['rain'] - zs = daily_meteo_data['zs'] - daily_datetimes_years = daily_meteo_data['dates'] - - # We convert the temperature to daily accumulated snowfall - daily_temps_mean_1 = copy.deepcopy(daily_temps_years[idx-1][:,glacier_idx].flatten()) + ((zs[glacier_idx] - glacier_mean_altitude)/1000.0)*6.0 - daily_temps_mean = copy.deepcopy(daily_temps_years[idx][:,glacier_idx].flatten()) + ((zs[glacier_idx] - glacier_mean_altitude)/1000.0)*6.0 - - # We convert the snow to daily accumulated snowfall - daily_snow_sum_1 = copy.deepcopy(daily_snow_years[idx-1][:,glacier_idx].flatten()) - daily_snow_sum = copy.deepcopy(daily_snow_years[idx][:,glacier_idx].flatten()) - - daily_rain_sum_1 = copy.deepcopy(daily_rain_years[idx-1][:,glacier_idx].flatten()) - daily_rain_sum = copy.deepcopy(daily_rain_years[idx][:,glacier_idx].flatten()) - # We get the daily datetimes - daily_datetimes_1 = copy.deepcopy(daily_datetimes_years[idx-1]) - daily_datetimes = copy.deepcopy(daily_datetimes_years[idx]) - - #### We compute the monthly average data - # Monthly average temperature - mon_temps_1 = get_monthly_temps(daily_temps_mean_1, daily_datetimes_1) - mon_temps = get_monthly_temps(daily_temps_mean, daily_datetimes) - mon_temp_year = np.append(mon_temps_1[9:], mon_temps[:9]) - - # We adjust the snowfall rate at the glacier's altitude - glacier_snow_1 = np.where(daily_temps_mean_1 > 0.0, 0.0, daily_snow_sum_1) - glacier_snow_1 = np.where(((daily_temps_mean_1 < 2.0) & (glacier_snow_1 == 0.0)), daily_rain_sum_1, glacier_snow_1) - glacier_snow = np.where(daily_temps_mean > 0.0, 0.0, daily_snow_sum) - glacier_snow = np.where(((daily_temps_mean < 0.0) & (glacier_snow == 0.0)), daily_rain_sum, glacier_snow) - - mon_snow_1 = get_monthly_snow(glacier_snow_1, daily_datetimes_1) - mon_snow = get_monthly_snow(glacier_snow, daily_datetimes) - mon_snow_year = np.append(mon_snow_1[9:], mon_snow[:9]) - - # We get the indexes for the ablation and accumulation periods - start_div, end_div = 30, 10 -# temp_year = np.append(daily_temps_mean_1[-year_1_offset:], daily_temps_mean[:year_offset]) - pos_temp_year = np.where(daily_temps_mean < 0, 0, daily_temps_mean) - pos_temp_year_1 = np.where(daily_temps_mean_1 < 0, 0, daily_temps_mean_1) - integ_temp = np.cumsum(pos_temp_year) - integ_temp_1 = np.cumsum(pos_temp_year_1) - - if(np.all(integ_temp == 0)): - print("-- Year without ablation --") - # No ablation during this year - end_y_ablation_1 = np.where(integ_temp_1 > (integ_temp_1.max() - integ_temp_1.max()/end_div))[0] - if(end_y_ablation_1.size > 0): - end_ablation_1 = end_y_ablation_1[0] - else: - end_ablation_1 = -1 - - ablation_temp_idx = [] - accum_temp_idx_1 = range(end_ablation_1+1, daily_temps_mean_1.size) - accum_temp_idx = range(0, daily_temps_mean.size) - else: - if(np.all(integ_temp_1 == 0)): - start_y_ablation = np.where(integ_temp > integ_temp.max()/start_div)[0] - end_y_ablation = np.where(integ_temp > (integ_temp.max() - integ_temp.max()/end_div))[0] - -# print("start_y_ablation: " + str(start_y_ablation)) -# print("end_y_ablation: " + str(end_y_ablation)) - - start_ablation = start_y_ablation[0] - end_ablation = end_y_ablation[0] - - # We get the indexes for the ablation and accumulation periods - #Dynamic ablation period - ablation_temp_idx = range(start_ablation, end_ablation+1) - accum_temp_idx_1 = range(0, daily_temps_mean_1.size) - accum_temp_idx = range(0, start_ablation) - else: - start_y_ablation = np.where(integ_temp > integ_temp.max()/start_div)[0] -# start_y_ablation_1 = np.where(integ_temp_1 > integ_temp_1.max()/start_div)[0] - # start_y_ablation = np.where(integ_temp > 30)[0] - end_y_ablation = np.where(integ_temp > (integ_temp.max() - integ_temp.max()/end_div))[0] - end_y_ablation_1 = np.where(integ_temp_1 > (integ_temp_1.max() - integ_temp_1.max()/end_div))[0] + # We create a dataset for the current year + adamont_ds = xr.Dataset({'temperature': (['time','space'], daily_meteo_data['temps'][idx]), + 'snow': (['time','space'], daily_meteo_data['snow'][idx]), + 'rain': (['time','space'], daily_meteo_data['rain'][idx])}, + coords={'zs': daily_meteo_data['zs'], + 'time': daily_meteo_data['dates'][idx]}) -# print("start_y_ablation: " + str(start_y_ablation)) -# print("end_y_ablation: " + str(end_y_ablation)) - - start_ablation = start_y_ablation[0] - end_ablation = end_y_ablation[0] -# start_ablation_1 = start_y_ablation_1[0] - end_ablation_1 = end_y_ablation_1[0] - - # We get the indexes for the ablation and accumulation periods - #Dynamic ablation period - ablation_temp_idx = range(start_ablation, end_ablation+1) - accum_temp_idx_1 = range(end_ablation_1+1, daily_temps_mean_1.size) - accum_temp_idx = range(0, start_ablation) - - # Classic 120-270 ablation period for the snow - ablation_idx = range(120, 271) - accum_idx_1 = range(271, daily_temps_mean_1.size) - accum_idx = range(0, 120) + # Re-scale temperature at glacier's actual altitude + adamont_tmean_d_g = copy.deepcopy(adamont_ds['temperature'][:, glacier_idx] + ((adamont_ds['zs'][glacier_idx].data - glacier_mean_altitude)/1000.0)*6.0) - glacier_ablation_temps = daily_temps_mean[ablation_temp_idx] - glacier_accumulation_temps = np.append(daily_temps_mean_1[accum_temp_idx_1], daily_temps_mean[accum_temp_idx]) - - dummy_glacier_ablation_temps = daily_temps_mean[ablation_idx] - dummy_glacier_accumulation_temps = np.append(daily_temps_mean_1[accum_idx_1], daily_temps_mean[accum_idx]) - - glacier_year_pos_temps = np.where(glacier_ablation_temps < 0, 0, glacier_ablation_temps) - dummy_glacier_ablation_pos_temps = np.where(dummy_glacier_ablation_temps < 0, 0, dummy_glacier_ablation_temps) - dummy_glacier_accum_pos_temps = np.where(dummy_glacier_accumulation_temps < 0, 0, dummy_glacier_accumulation_temps) - - glacier_accum_snow = np.append(daily_snow_sum_1[accum_idx_1], daily_snow_sum[accum_idx]) - glacier_accum_rain = np.append(daily_rain_sum_1[accum_idx_1], daily_rain_sum[accum_idx]) + # We adjust the snowfall rate at the glacier's altitude + adamont_snow_d_g = copy.deepcopy(adamont_ds['snow'][:, glacier_idx]) + adamont_rain_d_g = copy.deepcopy(adamont_ds['rain'][:, glacier_idx]) - glacier_ablation_snow = daily_snow_sum[ablation_idx] - glacier_ablation_rain = daily_rain_sum[ablation_idx] + adamont_snow_d_g.data = np.where(adamont_tmean_d_g.data > t_lim, 0.0, + adamont_snow_d_g.data) + adamont_snow_d_g.data = np.where((adamont_tmean_d_g.data < t_lim), adamont_snow_d_g.data + adamont_rain_d_g.data, + adamont_snow_d_g.data) - # We recompute the rain/snow limit with the new adjusted temperatures - glacier_accum_snow = np.where(dummy_glacier_accum_pos_temps > 0.0, 0.0, glacier_accum_snow) - glacier_accum_snow = np.where(((dummy_glacier_accumulation_temps < 0.0) & (glacier_accum_snow == 0.0)), glacier_accum_rain, glacier_accum_snow) - glacier_ablation_snow = np.where(dummy_glacier_ablation_pos_temps > 0.0, 0.0, glacier_ablation_snow) - glacier_ablation_snow = np.where(((dummy_glacier_ablation_temps < 0.0) & (glacier_ablation_snow == 0.0)), glacier_ablation_rain, glacier_ablation_snow) + # Monthly data during the current hydrological year + adamont_tmean_m_g = adamont_tmean_d_g.resample(time="1MS").mean().data + adamont_snow_m_g = adamont_snow_d_g.resample(time="1MS").sum().data + #### /!\ If climate data is not complete for year 2099, skip this year ####### + if(adamont_tmean_m_g.size != 12): + return {'CPDD': []}, {'temps': []} -# glacier_ablation_season = end_y_ablation[0] - start_y_ablation[0] -# glacier_ablation_season = len(ablation_temp_idx) + len(ablation_temp_idx) -# print("Ablation season length: " + str(glacier_ablation_season) + # Compute CPDD + glacier_CPDD = np.sum(np.where(adamont_tmean_d_g.data < 0, 0, + adamont_tmean_d_g.data)) - glacier_CPDD = np.sum(glacier_year_pos_temps) - glacier_winter_snow = np.sum(glacier_accum_snow) - glacier_summer_snow = np.sum(glacier_ablation_snow) + # Compute snowfall + glacier_winter_snow = np.sum(adamont_snow_d_g.sel(time = slice(str(year-1)+'-10-01', str(year)+'-03-31')).data) + glacier_summer_snow = np.sum(adamont_snow_d_g.sel(time = slice(str(year)+'-04-01', str(year)+'-07-31')).data) # Seasonal anomalies - CPDD_LocalAnomaly, winter_snow_LocalAnomaly, summer_snow_LocalAnomaly = compute_local_anomalies(glacier_CPDD, glacier_winter_snow, glacier_summer_snow, + CPDD_LocalAnomaly, winter_snow_LocalAnomaly, summer_snow_LocalAnomaly = compute_local_anomalies(glacier_CPDD, + glacier_winter_snow, + glacier_summer_snow, meteo_anomalies) # Monthly anomalies - mon_temp_anomaly, mon_snow_anomaly = compute_monthly_anomalies(mon_temp_year, mon_snow_year, meteo_anomalies['mon_temp'], meteo_anomalies['mon_snow']) + mon_temp_anomaly, mon_snow_anomaly = compute_monthly_anomalies(adamont_tmean_m_g, + adamont_snow_m_g, + meteo_anomalies['mon_temp'], + meteo_anomalies['mon_snow']) - season_anomalies_y = {'CPDD': CPDD_LocalAnomaly, 'winter_snow':winter_snow_LocalAnomaly, 'summer_snow': summer_snow_LocalAnomaly} - monthly_anomalies_y = {'temps': mon_temp_anomaly, 'snow': mon_snow_anomaly} + season_anomalies_y = {'CPDD': CPDD_LocalAnomaly, + 'winter_snow':winter_snow_LocalAnomaly, + 'summer_snow': summer_snow_LocalAnomaly} + monthly_anomalies_y = {'temps': mon_temp_anomaly, + 'snow': mon_snow_anomaly} # print("\nCPDD_LocalAnomaly: " + str(CPDD_LocalAnomaly)) # print("winter_snow_LocalAnomaly: " + str(winter_snow_LocalAnomaly)) @@ -1285,7 +1038,7 @@ def store_plot(masked_ID_current_glacier, masked_DEM_current_glacier_u, masked_I # plt.show(block=False) # We store the plots as images - plt.savefig(newpath + "Glacier " + glacierName + "_" + str(year)) + plt.savefig(os.path.join(newpath, "Glacier " + glacierName + "_" + str(year))) plt.close() nfigure = nfigure+1 @@ -1295,12 +1048,12 @@ def store_plot(masked_ID_current_glacier, masked_DEM_current_glacier_u, masked_I return nfigure, isNotFirst def store_rasters(masked_DEM_current_glacier_u, masked_ID_current_glacier_u, midfolder, glacierID, year): - path_DEM_raster_year = path_glacier_evolution_DEM_rasters + midfolder + "DEM_Glacier_0" + str(glacierID) + "_" + str(year) + ".tif" - if not os.path.exists(path_glacier_evolution_DEM_rasters + midfolder): - os.makedirs(path_glacier_evolution_DEM_rasters + midfolder) - path_ID_raster_year = path_glacier_evolution_ID_rasters + midfolder + "IceDepth_Glacier_0" + str(glacierID) + "_" + str(year) + ".tif" - if not os.path.exists(path_glacier_evolution_ID_rasters + midfolder): - os.makedirs(path_glacier_evolution_ID_rasters + midfolder) + path_DEM_raster_year = os.path.join(path_glacier_evolution_DEM_rasters, midfolder, "DEM_Glacier_0" + str(glacierID) + "_" + str(year) + ".tif") + if not os.path.exists(os.path.join(path_glacier_evolution_DEM_rasters, midfolder)): + os.makedirs(os.path.join(path_glacier_evolution_DEM_rasters, midfolder)) + path_ID_raster_year = os.path.join(path_glacier_evolution_ID_rasters, midfolder, "IceDepth_Glacier_0" + str(glacierID) + "_" + str(year) + ".tif") + if not os.path.exists(os.path.join(path_glacier_evolution_ID_rasters, midfolder)): + os.makedirs(os.path.join(path_glacier_evolution_ID_rasters, midfolder)) array2raster(path_DEM_raster_year, r_origin, r_pixelwidth, r_pixelheight, masked_DEM_current_glacier_u) array2raster(path_ID_raster_year, r_origin, r_pixelwidth, r_pixelheight, masked_ID_current_glacier_u) @@ -1341,7 +1094,7 @@ def glacier_evolution(masked_DEM_current_glacier, masked_ID_current_glacier, # We shorten the name of the glacier if it's too long glacierName = shorten_name(glacierName) - newpath = path_glacier_evolution_plots + midfolder + strip_accents(massif) + '\\' + "Glacier " + strip_accents(glacierName) + "\\" + newpath = os.path.join(path_glacier_evolution_plots, midfolder, strip_accents(massif), "Glacier " + strip_accents(glacierName)) path_raster_current_DEM = current_glacier_DEM @@ -1383,6 +1136,10 @@ def glacier_evolution(masked_DEM_current_glacier, masked_ID_current_glacier, season_anomalies_y, monthly_anomalies_y = get_adjusted_glacier_ADAMONT_forcings(year, year_start, masked_DEM_current_glacier_u.compressed().mean(), SAFRAN_idx, daily_meteo_data, meteo_anomalies) + ### Check if data is not available for year 2099 ### + if(len(monthly_anomalies_y['temps']) == 0): + # Skip year + break #### CREATION OF THE MODEL TEST DATASET #### x_lasso, x_ann = create_input_array(season_anomalies_y, monthly_anomalies_y, mean_glacier_alt, max_glacier_alt, slope20, current_glacierArea, lat, lon, aspect) @@ -1496,9 +1253,9 @@ def glacier_evolution(masked_DEM_current_glacier, masked_ID_current_glacier, store_file(yearly_glacier_slope20, path_glacier_slope20, midfolder, "slope20", glimsID, year_start, year) # Melt year (if available) if(glacier_melted_flag): - if not os.path.exists(path_glacier_melt_years + midfolder): - os.makedirs(path_glacier_melt_years + midfolder) - file_name_h = path_glacier_melt_years + midfolder + str(glimsID) + '_' + if not os.path.exists(os.path.join(path_glacier_melt_years, midfolder)): + os.makedirs(os.path.join(path_glacier_melt_years, midfolder)) + file_name_h = os.path.join(path_glacier_melt_years, midfolder, str(glimsID) + '_') file_name_t = '_melt_year.csv' glacier_melt_year = np.asarray(glacier_melt_year) automatic_file_name_save(file_name_h, file_name_t, glacier_melt_year, 'txt') @@ -1516,7 +1273,7 @@ def glacier_evolution(masked_DEM_current_glacier, masked_ID_current_glacier, -def main(compute, ensemble_SMB_models, overwrite_flag, counter_threshold, thickness_idx): +def main(compute, ensemble_SMB_models, overwrite_flag, use_cluster, counter_threshold, thickness_idx): ################################################################################## ################## MAIN ##################### @@ -1530,29 +1287,32 @@ def main(compute, ensemble_SMB_models, overwrite_flag, counter_threshold, thickn # We close all previous plots plt.close('all') - path_smb = workspace + 'glacier_data\\smb\\' - path_smb_function = path_smb + 'smb_function\\' - path_glacier_outlines_shapefile = path_glacier_2003_shapefiles + 'GLIMS_glaciers_2003_ID_massif' + '.shp' + path_smb = os.path.join(workspace, 'glacier_data', 'smb') + path_smb_function = os.path.join(path_smb, 'smb_function') + path_glacier_outlines_shapefile = os.path.join(path_glacier_2003_shapefiles, 'GLIMS_glaciers_2003_ID_massif' + '.shp') + # SAFRAN climate forcings path_ann = settings.path_ann - path_safran_forcings = path_smb_function + 'SAFRAN\\' + path_safran_forcings = os.path.join(path_smb_function, 'SAFRAN') + # Path to be updated with ADAMONT forcings local path + path_adamont_forcings = settings.path_adamont - if(settings.smb_model_type == 'ann_no_weights'): - path_ann_train = path_smb + 'ANN\\LSYGO\\no_weights\\' - path_cv_ann = path_ann_train + 'CV\\' - elif(settings.smb_model_type == 'ann_weights'): - path_ann_train = path_smb + 'ANN\\LSYGO\\weights\\' - path_cv_ann = path_ann_train + 'CV\\' +# if(settings.smb_model_type == 'ann_no_weights'): +# path_ann_train = path_smb + 'ANN\\LSYGO\\no_weights\\' +# path_cv_ann = path_ann_train + 'CV\\' +# elif(settings.smb_model_type == 'ann_weights'): +# path_ann_train = path_smb + 'ANN\\LSYGO\\weights\\' +# path_cv_ann = path_ann_train + 'CV\\' ### We detect the forcing between SAFRAN or ADAMONT forcing = settings.projection_forcing # print("forcing: " + str(forcing)) # We determine the path depending on the forcing - path_smb_function_forcing = path_smb_function + forcing + "\\" + path_smb_function_forcing = os.path.join(path_smb_function, forcing) # glims_2015 = genfromtxt(path_glims + 'GLIMS_2015_massif.csv', delimiter=';', skip_header=1, dtype=[('Area', ' 0): - np.savetxt(path_glacier_w_errors_current_combination + "glaciers_w_errors_" + str(year_start)+ "_" + str(year_end) + '.csv', glaciers_with_errors, delimiter=";", fmt="%s") + np.savetxt(os.path.join(path_glacier_w_errors_current_combination, "glaciers_w_errors_" + str(year_start)+ "_" + str(year_end) + '.csv'), glaciers_with_errors, delimiter=";", fmt="%s") if(melted_glaciers.size > 0): - np.savetxt(path_melt_years_current_combination + "melted_glaciers_" + str(year_start)+ "_" + str(year_end) + '.csv', melted_glaciers, delimiter=";", fmt="%s") + np.savetxt(os.path.join(path_melt_years_current_combination, "melted_glaciers_" + str(year_start)+ "_" + str(year_end) + '.csv'), melted_glaciers, delimiter=";", fmt="%s") except IOError: print("File currently opened. Please close it to proceed.") os.system('pause') # We try again try: if(glaciers_with_errors.size > 0): - np.savetxt(path_glacier_evolution + midfolder + "glaciers_w_errors_" + str(year_start)+ "_" + str(year_end) + '.csv', glaciers_with_errors, delimiter=";", fmt="%s") + np.savetxt(os.path.join(path_glacier_evolution, midfolder, "glaciers_w_errors_" + str(year_start)+ "_" + str(year_end) + '.csv'), glaciers_with_errors, delimiter=";", fmt="%s") if(melted_glaciers.size > 0): - np.savetxt(path_glacier_evolution + midfolder + "melted_glaciers_" + str(year_start)+ "_" + str(year_end) + '.csv', melted_glaciers, delimiter=";", fmt="%s") + np.savetxt(os.path.join(path_glacier_evolution, midfolder, "melted_glaciers_" + str(year_start)+ "_" + str(year_end) + '.csv'), melted_glaciers, delimiter=";", fmt="%s") except IOError: print("File still not available. Aborting simulations.") diff --git a/code/safran_forcings.py b/code/safran_forcings.py index 8468c82..b6fee63 100644 --- a/code/safran_forcings.py +++ b/code/safran_forcings.py @@ -10,7 +10,6 @@ """ ## Dependencies: ## -from netCDF4 import Dataset import numpy as np from numpy import genfromtxt from numba import jit @@ -19,8 +18,12 @@ import os from difflib import SequenceMatcher import math -import pandas as pd +import xarray as xr +#import dask as da from pathlib import Path +import time + +import settings ### FUNCTIONS #### @@ -35,6 +38,7 @@ def similar(a, b): ratios = np.asarray(ratios) return ratios.max(), ratios.argmax() +@jit def find_glacier_idx(glacier_massif, massif_number, altitudes, glacier_altitude, aspects, glacier_aspect): massif_altitudes_idx = np.where(massif_number == float(glacier_massif))[0] glacier_aspect_idx = np.where(aspects == float(glacier_aspect))[0] @@ -48,6 +52,7 @@ def find_glacier_idx(glacier_massif, massif_number, altitudes, glacier_altitude, def get_SAFRAN_glacier_coordinates(massif_number, zs, aspects, glims_data, glims_rabatel): glacier_centroid_altitude = glims_data['MEAN_Pixel'] GLIMS_IDs = glims_data['GLIMS_ID'] + RGI_IDs = glims_data['ID'] glacier_massifs = glims_data['Massif_SAFRAN'] glacier_names = glims_data['Glacier'] glacier_aspects = glims_data['Aspect_num'] @@ -59,123 +64,44 @@ def get_SAFRAN_glacier_coordinates(massif_number, zs, aspects, glims_data, glims # All glaciers loop for glims_id, glacier_name, glacier_massif, glacier_altitude, glacier_aspect in zip(GLIMS_IDs, glacier_names, glacier_massifs, glacier_centroid_altitude, glacier_aspects): - all_glacier_coordinates.append([glacier_name, find_glacier_idx(glacier_massif, massif_number, zs, glacier_altitude, aspects, glacier_aspect), float(glacier_altitude), glims_id, int(glacier_massif)]) + all_glacier_coordinates.append([glacier_name, find_glacier_idx(glacier_massif, massif_number, zs, glacier_altitude, aspects, glacier_aspect), float(glacier_altitude), glims_id, int(glacier_massif), RGI_IDs]) return np.asarray(glacier_SMB_coordinates), np.asarray(all_glacier_coordinates) -@jit -# Computes daily temps from hourly data -def get_mean_temps(datetimes, hourly_data): - ref_day = -9 - daily_data = [] - idx, day_idx = 0, 0 - first = True - for time_hour in datetimes: - current_day = time_hour.astype(object).timetuple().tm_yday - if(current_day == ref_day): - daily_data[day_idx] = daily_data[day_idx] + hourly_data[idx]/24.0 - else: - ref_day = current_day - daily_data.append(hourly_data[idx]/24.0) - if(not first): - day_idx = day_idx + 1 - else: - first = False - - idx = idx + 1 - - return np.asarray(daily_data) - -@jit -# Computes daily precipitations from hourly data -def get_precips(datetimes, hourly_data): - ref_day = -9 - daily_data = [] - idx, day_idx = 0, 0 - isFirst = True - for time_hour in datetimes: - current_day = time_hour.astype(object).timetuple().tm_yday - if(current_day == ref_day): - daily_data[day_idx] = daily_data[day_idx] + hourly_data[idx] - else: - ref_day = current_day - daily_data.append(hourly_data[idx]) - if(not isFirst): - day_idx = day_idx + 1 - else: - isFirst = False - idx = idx + 1 - return np.asarray(daily_data) - -@jit -# Computes monthly temperature from daily data -def get_monthly_temps(daily_data, daily_datetimes): - d = {'Dates': daily_datetimes, 'Temps': daily_data} - df_datetimes = pd.DataFrame(data=d) - df_datetimes.set_index('Dates', inplace=True) - df_datetimes.index = pd.to_datetime(df_datetimes.index) - df_datetimes = df_datetimes.resample('M').mean() - - monthly_avg_data = df_datetimes.Temps.to_numpy() - - return monthly_avg_data[:12] - - -@jit -# Computes monthly snowfall from daily data -def get_monthly_snow(daily_data, daily_datetimes): - d = {'Dates': daily_datetimes, 'Temps': daily_data} - df_datetimes = pd.DataFrame(data=d) - df_datetimes.set_index('Dates', inplace=True) - df_datetimes.index = pd.to_datetime(df_datetimes.index) - df_datetimes = df_datetimes.resample('M').sum() - - monthly_avg_data = df_datetimes.Temps.to_numpy() - - return monthly_avg_data[:12] - # Computes seasonal meteo anomalies at glacier scale def compute_local_anomalies(idx, glacier_CPDDs, glacier_winter_snow, glacier_summer_snow, - local_CPDD_anomalies, raw_local_CPDD_anomalies, - local_w_snow_anomalies, raw_local_w_snow_anomalies, - local_s_snow_anomalies, raw_local_s_snow_anomalies): - -# import pdb; pdb.set_trace() + local_anomalies, raw_local_anomalies): - # TODO: Test to use 1967-2015 anomalies # The anomalies are always computed with respect to the 1984-2014 mean glacier_CPDDs[idx]['CPDD'] = np.asarray(glacier_CPDDs[idx]['CPDD']) -# glacier_CPDDs_training = glacier_CPDDs[idx]['CPDD'][1::2][-32:] - glacier_CPDDs_training = glacier_CPDDs[idx]['CPDD'][1::2][-49:] + glacier_CPDDs_training = glacier_CPDDs[idx]['CPDD'][-49:] glacier_CPDDs[idx]['Mean'] = glacier_CPDDs_training.mean() glacier_winter_snow[idx]['snow'] = np.asarray(glacier_winter_snow[idx]['snow']) -# glacier_winter_snow_training = glacier_winter_snow[idx]['snow'][1::2][-32:] - glacier_winter_snow_training = glacier_winter_snow[idx]['snow'][1::2][-49:] + glacier_winter_snow_training = glacier_winter_snow[idx]['snow'][-49:] glacier_winter_snow[idx]['Mean'] = glacier_winter_snow_training.mean() glacier_summer_snow[idx]['snow'] = np.asarray(glacier_summer_snow[idx]['snow']) -# glacier_summer_snow_training = glacier_summer_snow[idx]['snow'][1::2][-32:] - glacier_summer_snow_training = glacier_summer_snow[idx]['snow'][1::2][-49:] + glacier_summer_snow_training = glacier_summer_snow[idx]['snow'][-49:] glacier_summer_snow[idx]['Mean'] = glacier_summer_snow_training.mean() - local_CPDD_anomalies.append(copy.deepcopy(glacier_CPDDs[idx])) - local_CPDD_anomalies[-1]['CPDD'][1::2] = glacier_CPDDs[idx]['CPDD'][1::2] - glacier_CPDDs[idx]['Mean'] - raw_local_CPDD_anomalies.append(local_CPDD_anomalies[-1]['CPDD'][1::2]) + local_anomalies['CPDD'].append(copy.deepcopy(glacier_CPDDs[idx])) + local_anomalies['years'].append(copy.deepcopy(glacier_CPDDs[idx]['years'])) + local_anomalies['CPDD'][-1]['CPDD'] = glacier_CPDDs[idx]['CPDD'] - glacier_CPDDs[idx]['Mean'] + raw_local_anomalies['CPDD'].append(local_anomalies['CPDD'][-1]['CPDD']) - local_w_snow_anomalies.append(copy.deepcopy(glacier_winter_snow[idx])) - local_w_snow_anomalies[-1]['snow'][1::2] = glacier_winter_snow[idx]['snow'][1::2] - glacier_winter_snow[idx]['Mean'] - raw_local_w_snow_anomalies.append(local_w_snow_anomalies[-1]['snow'][1::2]) + local_anomalies['w_snow'].append(copy.deepcopy(glacier_winter_snow[idx])) + local_anomalies['w_snow'][-1]['w_snow'] = glacier_winter_snow[idx]['snow'] - glacier_winter_snow[idx]['Mean'] + raw_local_anomalies['w_snow'].append(local_anomalies['w_snow'][-1]['w_snow']) - local_s_snow_anomalies.append(copy.deepcopy(glacier_summer_snow[idx])) - local_s_snow_anomalies[-1]['snow'][1::2] = glacier_summer_snow[idx]['snow'][1::2] - glacier_summer_snow[idx]['Mean'] - raw_local_s_snow_anomalies.append(local_s_snow_anomalies[-1]['snow'][1::2]) + local_anomalies['s_snow'].append(copy.deepcopy(glacier_summer_snow[idx])) + local_anomalies['s_snow'][-1]['s_snow'] = glacier_summer_snow[idx]['snow'] - glacier_summer_snow[idx]['Mean'] + raw_local_anomalies['s_snow'].append(local_anomalies['s_snow'][-1]['s_snow']) - return glacier_CPDDs, glacier_winter_snow, glacier_summer_snow, local_CPDD_anomalies, raw_local_CPDD_anomalies, local_w_snow_anomalies, raw_local_w_snow_anomalies, local_s_snow_anomalies, raw_local_s_snow_anomalies + return glacier_CPDDs, glacier_winter_snow, glacier_summer_snow, local_anomalies, raw_local_anomalies # Computes monthly meteo anomalies at glacier scale def compute_monthly_anomalies(idx, glacier_mon_temp, glacier_mon_snow, - local_mon_temp_anomalies, local_mon_snow_anomalies, - _raw_local_mon_temp_anomalies, _raw_local_mon_snow_anomalies): + local_mon_anomalies, raw_local_mon_anomalies): # The monthly meteo anomalies, as well as the seasonal ones, are always computed with respect to the 1984-2014 period mon_range = range(0, 12) @@ -188,24 +114,20 @@ def compute_monthly_anomalies(idx, glacier_mon_temp, glacier_mon_snow, mon_avg_temp = np.asarray(mon_avg_temp) mon_avg_snow = np.asarray(mon_avg_snow) - # TODO: test to use 1967-2015 anomalies -# glacier_mon_temp[idx]['mon_means'].append(mon_avg_temp[-32:].mean()) -# glacier_mon_snow[idx]['mon_means'].append(mon_avg_snow[-32:].mean()) - glacier_mon_temp[idx]['mon_means'].append(mon_avg_temp[-49:].mean()) glacier_mon_snow[idx]['mon_means'].append(mon_avg_snow[-49:].mean()) - local_mon_temp_anomalies.append(copy.deepcopy(glacier_mon_temp[idx])) - local_mon_snow_anomalies.append(copy.deepcopy(glacier_mon_snow[idx])) + local_mon_anomalies['temp'] = np.append(local_mon_anomalies['temp'], copy.deepcopy(glacier_mon_temp[idx])) + local_mon_anomalies['snow'] = np.append(local_mon_anomalies['snow'], copy.deepcopy(glacier_mon_snow[idx])) for mon_idx in mon_range: year_idx = 0 - for glacier_temp, glacier_snow in zip(local_mon_temp_anomalies[-1]['mon_temp'], local_mon_snow_anomalies[-1]['mon_snow']): - local_mon_temp_anomalies[-1]['mon_temp'][year_idx][mon_idx] = local_mon_temp_anomalies[-1]['mon_temp'][year_idx][mon_idx] - local_mon_temp_anomalies[-1]['mon_means'][mon_idx] - local_mon_snow_anomalies[-1]['mon_snow'][year_idx][mon_idx] = local_mon_snow_anomalies[-1]['mon_snow'][year_idx][mon_idx] - local_mon_snow_anomalies[-1]['mon_means'][mon_idx] + for glacier_temp, glacier_snow in zip(local_mon_anomalies['temp'][-1]['mon_temp'], local_mon_anomalies['snow'][-1]['mon_snow']): + local_mon_anomalies['temp'][-1]['mon_temp'][year_idx][mon_idx] = local_mon_anomalies['temp'][-1]['mon_temp'][year_idx][mon_idx] - local_mon_anomalies['temp'][-1]['mon_means'][mon_idx] + local_mon_anomalies['snow'][-1]['mon_snow'][year_idx][mon_idx] = local_mon_anomalies['snow'][-1]['mon_snow'][year_idx][mon_idx] - local_mon_anomalies['snow'][-1]['mon_means'][mon_idx] year_idx = year_idx+1 - return glacier_mon_temp, glacier_mon_snow, local_mon_temp_anomalies, local_mon_snow_anomalies + return glacier_mon_temp, glacier_mon_snow, local_mon_anomalies, raw_local_mon_anomalies @jit @@ -216,41 +138,55 @@ def get_SMB_glaciers(glacier_SMB_coordinates): return np.asarray(smb_glaciers) # Sorts the temperature and snow data in the same order as the GLIMS Rabatel 2003 file -def sort_SMB_rabatel_data(season_meteo_SMB, season_meteo_anomalies_SMB, season_raw_meteo_anomalies_SMB, monthly_meteo_SMB, monthly_meteo_anomalies_SMB, glims_rabatel): +#@jit +def sort_SMB_rabatel_data(i_season_meteo_SMB, local_anomalies_SMB, raw_local_anomalies_SMB, i_monthly_meteo_SMB, local_mon_anomalies_SMB, glims_rabatel): glacier_order = [] + # Seasonal + season_meteo_SMB = {'CPDD':[], 'winter_snow':[], 'summer_snow':[]} + season_meteo_anomalies_SMB = {'CPDD':[], 'winter_snow':[], 'summer_snow':[]} + season_raw_meteo_anomalies_SMB = {'CPDD':[], 'winter_snow':[], 'summer_snow':[]} + + # Monthly + monthly_meteo_SMB = {'temp':[], 'snow':[]} + monthly_meteo_anomalies_SMB = {'temp':[], 'snow':[]} + # We first capture the glacier order for glims_id in glims_rabatel['GLIMS_ID']: glacier_idx = 0 - for cpdd_smb_glacier in season_meteo_SMB['CPDD']: + for cpdd_smb_glacier in i_season_meteo_SMB['CPDD']: if(glims_id == cpdd_smb_glacier['GLIMS_ID']): + + # Seasonal + season_meteo_SMB['CPDD'].append(i_season_meteo_SMB['CPDD'][glacier_idx]) + season_meteo_SMB['winter_snow'].append(i_season_meteo_SMB['winter_snow'][glacier_idx]) + season_meteo_SMB['summer_snow'].append(i_season_meteo_SMB['summer_snow'][glacier_idx]) + + season_meteo_anomalies_SMB['CPDD'].append(local_anomalies_SMB['CPDD'][glacier_idx]) + season_meteo_anomalies_SMB['winter_snow'].append(local_anomalies_SMB['w_snow'][glacier_idx]) + season_meteo_anomalies_SMB['summer_snow'].append(local_anomalies_SMB['s_snow'][glacier_idx]) + + season_raw_meteo_anomalies_SMB['CPDD'].append(raw_local_anomalies_SMB['CPDD'][glacier_idx]) + season_raw_meteo_anomalies_SMB['winter_snow'].append(raw_local_anomalies_SMB['w_snow'][glacier_idx]) + season_raw_meteo_anomalies_SMB['summer_snow'].append(raw_local_anomalies_SMB['s_snow'][glacier_idx]) + + # Monthly + monthly_meteo_SMB['temp'].append(i_monthly_meteo_SMB['temp'][glacier_idx]) + monthly_meteo_SMB['snow'].append(i_monthly_meteo_SMB['snow'][glacier_idx]) + + monthly_meteo_anomalies_SMB['temp'].append(local_mon_anomalies_SMB['temp'][glacier_idx]) + monthly_meteo_anomalies_SMB['snow'].append(local_mon_anomalies_SMB['snow'][glacier_idx]) + glacier_order.append(glacier_idx) glacier_idx = glacier_idx+1 # And then we apply it to all the datasets glacier_order = np.asarray(glacier_order, dtype=np.int32) - season_meteo_SMB['CPDD'] = season_meteo_SMB['CPDD'][glacier_order] - season_meteo_SMB['winter_snow'] = season_meteo_SMB['winter_snow'][glacier_order] - season_meteo_SMB['summer_snow'] = season_meteo_SMB['summer_snow'][glacier_order] - - season_meteo_anomalies_SMB['CPDD'] = season_meteo_anomalies_SMB['CPDD'][glacier_order] - season_meteo_anomalies_SMB['winter_snow'] = season_meteo_anomalies_SMB['winter_snow'][glacier_order] - season_meteo_anomalies_SMB['summer_snow'] = season_meteo_anomalies_SMB['summer_snow'][glacier_order] - - season_raw_meteo_anomalies_SMB['CPDD'] = season_raw_meteo_anomalies_SMB['CPDD'][glacier_order] - season_raw_meteo_anomalies_SMB['winter_snow'] = season_raw_meteo_anomalies_SMB['winter_snow'][glacier_order] - season_raw_meteo_anomalies_SMB['summer_snow'] = season_raw_meteo_anomalies_SMB['summer_snow'][glacier_order] - - monthly_meteo_SMB['temp'] = monthly_meteo_SMB['temp'][glacier_order] - monthly_meteo_SMB['snow'] = monthly_meteo_SMB['snow'][glacier_order] - - monthly_meteo_anomalies_SMB['temp'] = monthly_meteo_anomalies_SMB['temp'][glacier_order] - monthly_meteo_anomalies_SMB['snow'] = monthly_meteo_anomalies_SMB['snow'][glacier_order] - return season_meteo_SMB, season_meteo_anomalies_SMB, season_raw_meteo_anomalies_SMB, monthly_meteo_SMB, monthly_meteo_anomalies_SMB # Interpolates topo data to build the training matrix +@jit def interpolate_extended_glims_variable(variable_name, glims_glacier, glims_2003, glims_1985, glims_1967): # In case there are multiple results, we choose the one with the most similar area idx_2003 = np.where(glims_2003['GLIMS_ID'] == glims_glacier['GLIMS_ID'])[0] @@ -284,6 +220,7 @@ def interpolate_extended_glims_variable(variable_name, glims_glacier, glims_2003 return interp_1959_2015 # Interpolates and generates the glacier mean altitudes from the topo inventories +@jit def generate_glacier_altitudes(glacier, glims_1967, glims_1985, glims_2003, glims_2015, glacier_altitudes): glacier_name = glacier[0] glacier_alt = float(glacier[2]) @@ -346,56 +283,74 @@ def main(compute): ###### FILE PATHS ####### # Folders # workspace = 'C:\\Jordi\\PhD\\Python\\' - workspace = str(Path(os.getcwd()).parent) + '\\' - path_safran_forcings = 'C:\\Jordi\\PhD\\Data\\SAFRAN-Nivo-2017\\' - path_smb = workspace + 'glacier_data\\smb\\' - path_smb_function_safran = path_smb + 'smb_function\\SAFRAN\\' - path_glims = workspace + 'glacier_data\\GLIMS\\' + workspace = Path(os.getcwd()).parent + path_safran_forcings = settings.path_safran + path_smb = os.path.join(workspace, 'glacier_data', 'smb') + path_smb_function_safran = os.path.join(path_smb, 'smb_function', 'SAFRAN') + path_glims = os.path.join(workspace, 'glacier_data', 'GLIMS') + + #### Flags ##### + bypass_glacier_data = False + t_lim = 0.0 # year_start = 1984 # year_start = 1967 year_start = 1959 +# year_start = 2014 year_end = 2015 # year_end = 2014 - path_temps = path_smb_function_safran +'daily_temps_years_' + str(year_start) + '-' + str(year_end) + '.txt' - path_snow = path_smb_function_safran +'daily_snow_years_' + str(year_start) + '-' + str(year_end) + '.txt' - path_rain = path_smb_function_safran +'daily_rain_years_' + str(year_start) + '-' + str(year_end) + '.txt' - path_dates = path_smb_function_safran +'daily_dates_years_' + str(year_start) + '-' + str(year_end) + '.txt' - path_zs = path_smb_function_safran +'zs_years' + str(year_start) + '-' + str(year_end) + '.txt' + path_temps = os.path.join(path_smb_function_safran, 'daily_temps_years_' + str(year_start) + '-' + str(year_end) + '.txt') + path_snow = os.path.join(path_smb_function_safran, 'daily_snow_years_' + str(year_start) + '-' + str(year_end) + '.txt') + path_rain = os.path.join(path_smb_function_safran, 'daily_rain_years_' + str(year_start) + '-' + str(year_end) + '.txt') + path_dates = os.path.join(path_smb_function_safran, 'daily_dates_years_' + str(year_start) + '-' + str(year_end) + '.txt') + path_zs = os.path.join(path_smb_function_safran, 'zs_years' + str(year_start) + '-' + str(year_end) + '.txt') #### GLIMS data for 1985, 2003 and 2015 - glims_2015 = genfromtxt(path_glims + 'GLIMS_2015.csv', delimiter=';', skip_header=1, dtype=[('Area', ' open SAFRAN dataset processing time: " + str(end - start) + " s") + + for year in year_period: print("Hydrological year: " + str(year-1) + "-" + str(year)) - # We also need to fetch the previous year since data goes from 1st of August to 31st of July - SAFRAN_forcing_1 = Dataset(path_safran_forcings + str(year-1)[-2:] + '\\FORCING.nc') - current_SAFRAN_forcing = Dataset(path_safran_forcings + str(year)[-2:] + '\\FORCING.nc') - - zs = current_SAFRAN_forcing.variables['ZS'][:] - zs_years.append(zs) - - massif_number = current_SAFRAN_forcing.variables['massif_number'][:] - - # Temperatures (from K to C) - temps_mean_1 = SAFRAN_forcing_1.variables['Tair'][:] -273.15 - temps_mean = current_SAFRAN_forcing.variables['Tair'][:] -273.15 + start = time.time() + # We load into memory only the current year to speed things up + # Only two years are loaded: compute dask arrays in memory so computations are faster + safran_tmean_d = (safran_climate.sel(time = slice(str(year-1)+'-10-01', str(year)+'-09-30'))['Tair'].resample(time="1D").mean() -273.15).compute() + safran_snow_d = (safran_climate.sel(time = slice(str(year-1)+'-10-01', str(year)+'-09-30'))['Snowf'].resample(time="1D").sum()*3600).compute() + safran_rain_d = (safran_climate.sel(time = slice(str(year-1)+'-10-01', str(year)+'-09-30'))['Rainf'].resample(time="1D").sum()*3600).compute() - snow_h_1 = SAFRAN_forcing_1.variables['Snowf'][:]*3600 - snow_h = current_SAFRAN_forcing.variables['Snowf'][:]*3600 + zs = safran_climate['ZS'].sel(time = slice(str(year-1)+'-10-01', str(year)+'-09-30')).compute() - rain_h_1 = SAFRAN_forcing_1.variables['Rainf'][:]*3600 - rain_h = current_SAFRAN_forcing.variables['Rainf'][:]*3600 + # Store daily raw data for future re-processing + daily_temps_years.append(safran_tmean_d.data) + daily_snow_years.append(safran_snow_d.data) + daily_rain_years.append(safran_rain_d.data) + daily_dates_years.append(safran_tmean_d.time.data) + zs_years.append(zs.data) - times_1 = current_SAFRAN_forcing.variables['time'][:] - start_1 = np.datetime64(str(year-1) + '-08-01 06:00:00') - datetimes_1 = np.array([start_1 + np.timedelta64(np.int32(time), 'h') for time in times_1]) - times = current_SAFRAN_forcing.variables['time'][:] - start = np.datetime64(str(year) + '-08-01 06:00:00') - datetimes = np.array([start + np.timedelta64(np.int32(time), 'h') for time in times]) + if(not bypass_glacier_data): - # We convert the temperature to daily accumulated snowfall - daily_temps_mean_1 = get_mean_temps(datetimes_1, temps_mean_1) - daily_temps_mean = get_mean_temps(datetimes, temps_mean) - - # We convert the snow to daily accumulated snowfall - snow_sum_1 = get_precips(datetimes_1, snow_h_1) - snow_sum = get_precips(datetimes, snow_h) - rain_sum_1 = get_precips(datetimes_1, rain_h_1) - rain_sum = get_precips(datetimes, rain_h) - - # We get the daily datetimes - daily_datetimes_1 = datetimes_1[::24] - daily_datetimes = datetimes[::24] - - if(year == year_start): - daily_temps_years.append(daily_temps_mean_1) - daily_snow_years.append(snow_sum_1) - daily_rain_years.append(rain_sum_1) - daily_dates_years.append(daily_datetimes_1) - - daily_temps_years.append(daily_temps_mean) - daily_snow_years.append(snow_sum) - daily_rain_years.append(rain_sum) - daily_dates_years.append(daily_datetimes) - - # We get the indexes for the precipitation ablation and accumulation periods - # Classic 120-270 ablation period - ablation_idx_1 = range(daily_datetimes_1.size-95, daily_datetimes_1.size) - ablation_idx = range(0, 55) - accum_idx_1 = range(56, daily_datetimes_1.size-96) - - year_CPDD, year_w_snow, year_s_snow, year_ablation_season = 0,0,0,0 - year_CPDD_SMB, year_w_snow_SMB, year_s_snow_SMB, year_ablation_season_SMB = 0,0,0,0 - - i, j = 0, 0 - start_div, end_div = 30, 10 - - for glacier_coords, glacier_alt in zip(all_glacier_coordinates, glacier_altitudes): - - # Reset the indexes - glacier_name = glacier_coords[0] - glacier_idx = int(glacier_coords[1]) -# glims_ID = glacier_coords[3] -# glacier_massif = int(glacier_coords[4]) - if(len(glacier_alt) > 1): - glacier_alt_y = glacier_alt[np.where(year == np.asarray(year_period))[0][0]] - else: - glacier_alt_y = glacier_alt - - #### We compute the monthly average data - - # Monthly average temperature - glacier_temps_1 = daily_temps_mean_1[:, glacier_idx] + ((zs[glacier_idx] - glacier_alt_y)/1000.0)*6.0 - glacier_temps = daily_temps_mean[:, glacier_idx] + ((zs[glacier_idx] - glacier_alt_y)/1000.0)*6.0 - mon_temps_1 = get_monthly_temps(glacier_temps_1, daily_datetimes_1) - mon_temps = get_monthly_temps(glacier_temps, daily_datetimes) - mon_temp_year = np.append(mon_temps_1[2:], mon_temps[:2]) - - # Monthly average snowfall - # We adjust the snowfall rate at the glacier's altitude - glacier_snow_1 = snow_sum_1[:, glacier_idx] - glacier_snow = snow_sum[:, glacier_idx] - glacier_rain_1 = rain_sum_1[:, glacier_idx] - glacier_rain = rain_sum[:, glacier_idx] - glacier_snow_1 = np.where(glacier_temps_1 > 0.0, 0.0, glacier_snow_1) - glacier_snow_1 = np.where(((glacier_temps_1 < 0.0) & (glacier_snow_1 == 0.0)), glacier_rain_1, glacier_snow_1) - glacier_snow = np.where(glacier_temps > 0.0, 0.0, glacier_snow) - glacier_snow = np.where(((glacier_temps < 0.0) & (glacier_snow == 0.0)), glacier_rain, glacier_snow) - - mon_snow_1 = get_monthly_snow(glacier_snow_1, daily_datetimes_1) - mon_snow = get_monthly_snow(glacier_snow, daily_datetimes) - mon_snow_year = np.append(mon_snow_1[2:], mon_snow[:2]) - - year_1_offset = 213 - year_offset = 152 - - temp_year = np.append(daily_temps_mean_1[-year_1_offset:, glacier_idx], daily_temps_mean[:year_offset, glacier_idx]) + ((zs[glacier_idx] - glacier_alt_y)/1000.0)*6.0 - - pos_temp_year = np.where(temp_year < 0, 0, temp_year) - integ_temp = np.cumsum(pos_temp_year) - - ### Dynamic temperature ablation period - start_y_ablation = np.where(integ_temp > integ_temp.max()/start_div)[0] - # start_y_ablation = np.where(integ_temp > 30)[0] - end_y_ablation = np.where(integ_temp > (integ_temp.max() - integ_temp.max()/end_div))[0] - - start_ablation = start_y_ablation[0] + (daily_temps_mean_1[:,glacier_idx].size - year_1_offset) - end_ablation = end_y_ablation[0] - year_1_offset - - # Plot ablation season evolution -# if(j == 432): -# print("Glacier: " + str(glacier_name)) -# print("Year: " + str(year)) -# -# plt.figure(nfigure) -# plt.title("Ablation season length", fontsize=20) -# plt.ylabel('CPDD', fontsize=15) -# plt.xlabel('Day of year', fontsize=15) -# plt.tick_params(labelsize=15) -# plt.legend() -# plt.plot(integ_temp, color='maroon', linewidth=4) -# plt.axvspan(start_y_ablation[0], end_y_ablation[0], color='lightcoral') -# nfigure = nfigure+1 -# plt.show() - - if(start_ablation > 366): - start_ablation = 366 - # - # We get the indexes for the ablation and accumulation periods - ablation_temp_idx_1 = range(start_ablation, daily_datetimes_1.size) - ablation_temp_idx = range(0, end_ablation) - - # We correct the glacier's temperature depending on the glacier's altitude - # We use deepcopy in order to update the temperature values - glacier_ablation_temps = copy.deepcopy(np.append(daily_temps_mean_1[ablation_temp_idx_1, glacier_idx], daily_temps_mean[ablation_temp_idx, glacier_idx])) + ((zs[glacier_idx] - glacier_alt_y)/1000.0)*6.0 - - dummy_glacier_ablation_temps = copy.deepcopy(np.append(daily_temps_mean_1[ablation_idx_1, glacier_idx], daily_temps_mean[ablation_idx, glacier_idx]) + ((zs[glacier_idx] - glacier_alt_y)/1000.0)*6.0) - dummy_glacier_accumulation_temps = copy.deepcopy(daily_temps_mean_1[accum_idx_1, glacier_idx] + ((zs[glacier_idx] - glacier_alt_y)/1000.0)*6.0) - - glacier_year_pos_temps = np.where(glacier_ablation_temps < 0, 0, glacier_ablation_temps) - dummy_glacier_ablation_pos_temps = np.where(dummy_glacier_ablation_temps < 0, 0, dummy_glacier_ablation_temps) - dummy_glacier_accum_pos_temps = np.where(dummy_glacier_accumulation_temps < 0, 0, dummy_glacier_accumulation_temps) - - glacier_accum_snow = snow_sum_1[accum_idx_1, glacier_idx] - glacier_accum_rain = rain_sum_1[accum_idx_1, glacier_idx] + # Initialize year and glacier indexes + i, j = 0, 0 - glacier_ablation_snow = np.append(snow_sum_1[ablation_idx_1, glacier_idx], snow_sum[ablation_idx, glacier_idx]) - glacier_ablation_rain = np.append(rain_sum_1[ablation_idx_1, glacier_idx], rain_sum[ablation_idx, glacier_idx]) - - # We recompute the rain/snow limit with the new adjusted temperatures - glacier_accum_snow = np.where(dummy_glacier_accum_pos_temps > 0.0, 0.0, glacier_accum_snow) - glacier_accum_snow = np.where(((dummy_glacier_accumulation_temps < 0.0) & (glacier_accum_snow == 0.0)), glacier_accum_rain, glacier_accum_snow) - glacier_ablation_snow = np.where(dummy_glacier_ablation_pos_temps > 0.0, 0.0, glacier_ablation_snow) - glacier_ablation_snow = np.where(((dummy_glacier_ablation_temps < 0.0) & (glacier_ablation_snow == 0.0)), glacier_ablation_rain, glacier_ablation_snow) - - glacier_ablation_season = len(ablation_idx_1) + len(ablation_idx) - year_ablation_season = year_ablation_season + glacier_ablation_season - - glacier_CPDD = np.sum(glacier_year_pos_temps) - glacier_CPDDs_all[j]['CPDD'].append(year) - glacier_CPDDs_all[j]['CPDD'].append(glacier_CPDD) - year_CPDD = year_CPDD+glacier_CPDD - - glacier_year_accum_snow = np.sum(glacier_accum_snow) - glacier_year_ablation_snow = np.sum(glacier_ablation_snow) - glacier_winter_snow_all[j]['snow'].append(year) - glacier_winter_snow_all[j]['snow'].append(glacier_year_accum_snow) - year_w_snow = year_w_snow + glacier_year_accum_snow - glacier_summer_snow_all[j]['snow'].append(year) - glacier_summer_snow_all[j]['snow'].append(glacier_year_ablation_snow) - year_s_snow = year_s_snow + glacier_year_ablation_snow - - glacier_mon_temp_all[j]['years'].append(year) - glacier_mon_temp_all[j]['mon_temp'].append(mon_temp_year) - glacier_mon_snow_all[j]['mon_snow'].append(mon_snow_year) - - # Now we store the data for the sub-dataset of glaciers with SMB data - if(np.any(smb_glaciers == glacier_name)): - glacier_CPDDs_SMB[i]['CPDD'].append(year) - glacier_CPDDs_SMB[i]['CPDD'].append(glacier_CPDD) - year_CPDD_SMB = year_CPDD_SMB+glacier_CPDD - glacier_winter_snow_SMB[i]['snow'].append(year) - glacier_winter_snow_SMB[i]['snow'].append(glacier_year_accum_snow) - year_w_snow_SMB = year_w_snow_SMB + glacier_year_accum_snow - glacier_summer_snow_SMB[i]['snow'].append(year) - glacier_summer_snow_SMB[i]['snow'].append(glacier_year_ablation_snow) - year_s_snow_SMB = year_s_snow_SMB + glacier_year_ablation_snow - year_ablation_season_SMB = year_ablation_season_SMB + glacier_ablation_season + for glacier_coords, glacier_alt in zip(all_glacier_coordinates, glacier_altitudes): + + # Reset the indexes + glacier_name = glacier_coords[0] + glacier_idx = int(glacier_coords[1]) + # glims_ID = glacier_coords[3] + # glacier_massif = int(glacier_coords[4]) - glacier_mon_temp_SMB[i]['years'].append(year) - glacier_mon_temp_SMB[i]['mon_temp'].append(mon_temp_year) - glacier_mon_snow_SMB[i]['mon_snow'].append(mon_snow_year) + if(len(glacier_alt) > 1): + glacier_alt_y = glacier_alt[np.where(year == np.asarray(year_period))[0][0]] + else: + glacier_alt_y = glacier_alt[0] + # Re-scale temperature at glacier's actual altitude + safran_tmean_d_g = copy.deepcopy(safran_tmean_d[:, glacier_idx] + ((zs[0,glacier_idx].data - glacier_alt_y)/1000.0)*6.0) - # If we reach the end of the time period, we compute the local anomalies and means - if(year == year_period[-1]): - glacier_CPDDs_all, glacier_winter_snow_all, glacier_summer_snow_all, CPDD_LocalAnomaly_all, raw_CPDD_LocalAnomaly_all, winter_snow_LocalAnomaly_all, winter_raw_snow_LocalAnomaly_all, summer_snow_LocalAnomaly_all, summer_raw_snow_LocalAnomaly_all = compute_local_anomalies(j, glacier_CPDDs_all, glacier_winter_snow_all, glacier_summer_snow_all, - local_CPDD_anomalies, raw_local_CPDD_anomalies, - local_w_snow_anomalies, raw_local_w_snow_anomalies, - local_s_snow_anomalies, raw_local_s_snow_anomalies) + # We adjust the snowfall rate at the glacier's altitude + safran_snow_d_g = copy.deepcopy(safran_snow_d[:, glacier_idx]) + safran_rain_d_g = copy.deepcopy(safran_rain_d[:, glacier_idx]) + safran_snow_d_g.data = np.where(safran_tmean_d_g.data > t_lim, 0.0, safran_snow_d_g.data) + safran_snow_d_g.data = np.where(safran_tmean_d_g.data < t_lim, safran_snow_d_g.data + safran_rain_d_g.data, safran_snow_d_g.data) - glacier_mon_temp_all, glacier_mon_snow_all, local_mon_temp_anomalies, local_mon_snow_anomalies = compute_monthly_anomalies(j, glacier_mon_temp_all, glacier_mon_snow_all, - local_mon_temp_anomalies, local_mon_snow_anomalies, - raw_local_mon_temp_anomalies, raw_local_mon_snow_anomalies) + # Monthly data during the current hydrological year + # Compute dask arrays prior to storage + safran_tmean_m_g = safran_tmean_d_g.resample(time="1MS").mean().data + safran_snow_m_g = safran_snow_d_g.resample(time="1MS").sum().data + # Compute CPDD + # Compute dask arrays prior to storage + glacier_CPDD = np.sum(np.where(safran_tmean_d_g.data < 0, 0, safran_tmean_d_g.data)) + glacier_CPDDs_all[j]['years'].append(year) + glacier_CPDDs_all[j]['CPDD'].append(glacier_CPDD) + # Compute snowfall + # Compute dask arrays prior to storage + glacier_year_accum_snow = np.sum(safran_snow_d_g.sel(time = slice(str(year-1)+'-10-01', str(year)+'-03-31')).data) + glacier_year_ablation_snow = np.sum(safran_snow_d_g.sel(time = slice(str(year)+'-04-01', str(year)+'-07-31')).data) - # Glaciers with SMB data - if(np.any(smb_glaciers == glacier_name)): - glacier_CPDDs_SMB, glacier_winter_snow_SMB, glacier_summer_snow_SMB, CPDD_SMB_LocalAnomaly, raw_CPDD_SMB_LocalAnomaly, winter_snow_SMB_LocalAnomaly, raw_winter_snow_SMB_LocalAnomaly, summer_snow_SMB_LocalAnomaly, raw_summer_snow_SMB_LocalAnomaly = compute_local_anomalies(i, glacier_CPDDs_SMB, glacier_winter_snow_SMB, glacier_summer_snow_SMB, - local_CPDD_anomalies_SMB, raw_local_CPDD_anomalies_SMB, - local_w_snow_anomalies_SMB, raw_local_w_snow_anomalies_SMB, - local_s_snow_anomalies_SMB, raw_local_s_snow_anomalies_SMB) - glacier_mon_temp_SMB, glacier_mon_snow_SMB, local_mon_temp_anomalies_SMB, local_mon_snow_anomalies_SMB = compute_monthly_anomalies(i, glacier_mon_temp_SMB, glacier_mon_snow_SMB, - local_mon_temp_anomalies_SMB, local_mon_snow_anomalies_SMB, - raw_local_mon_temp_anomalies_SMB, raw_local_mon_snow_anomalies_SMB) + glacier_winter_snow_all[j]['years'].append(year) + glacier_winter_snow_all[j]['snow'].append(glacier_year_accum_snow) + glacier_summer_snow_all[j]['years'].append(year) + glacier_summer_snow_all[j]['snow'].append(glacier_year_ablation_snow) + glacier_mon_temp_all[j]['years'].append(year) + glacier_mon_temp_all[j]['mon_temp'].append(safran_tmean_m_g) + glacier_mon_snow_all[j]['mon_snow'].append(safran_snow_m_g) + # Now we store the data for the sub-dataset of glaciers with SMB data + if(np.any(smb_glaciers == glacier_name)): + glacier_CPDDs_SMB[i]['years'].append(year) + glacier_CPDDs_SMB[i]['CPDD'].append(glacier_CPDD) + + glacier_winter_snow_SMB[i]['years'].append(year) + glacier_winter_snow_SMB[i]['snow'].append(glacier_year_accum_snow) + + glacier_summer_snow_SMB[i]['years'].append(year) + glacier_summer_snow_SMB[i]['snow'].append(glacier_year_ablation_snow) + + glacier_mon_temp_SMB[i]['years'].append(year) + glacier_mon_temp_SMB[i]['mon_temp'].append(safran_tmean_m_g) + glacier_mon_snow_SMB[i]['mon_snow'].append(safran_snow_m_g) + + # If we reach the end of the time period, we compute the local anomalies and means + if(year == year_period[-1]): + + glacier_CPDDs_all, glacier_winter_snow_all, glacier_summer_snow_all, local_anomalies, raw_local_anomalies = compute_local_anomalies(j, glacier_CPDDs_all, + glacier_winter_snow_all, + glacier_summer_snow_all, + local_anomalies, + raw_local_anomalies) + + glacier_mon_temp_all, glacier_mon_snow_all, local_mon_anomalies, raw_local_mon_anomalies = compute_monthly_anomalies(j, glacier_mon_temp_all, + glacier_mon_snow_all, + local_mon_anomalies, + raw_local_mon_anomalies) + + + + # Glaciers with SMB data + if(np.any(smb_glaciers == glacier_name)): + glacier_CPDDs_SMB, glacier_winter_snow_SMB, glacier_summer_snow_SMB, local_anomalies_SMB, raw_local_anomalies_SMB = compute_local_anomalies(i, glacier_CPDDs_SMB, + glacier_winter_snow_SMB, + glacier_summer_snow_SMB, + local_anomalies_SMB, + raw_local_anomalies_SMB) + + glacier_mon_temp_SMB, glacier_mon_snow_SMB, local_mon_anomalies_SMB, raw_local_mon_anomalies_SMB = compute_monthly_anomalies(i, glacier_mon_temp_SMB, + glacier_mon_snow_SMB, + local_mon_anomalies_SMB, + raw_local_mon_anomalies_SMB) + + ### End of glacier loop ### - - - # We iterate the independent indexes - j = j+1 - if(np.any(smb_glaciers == glacier_name)): - i = i+1 - ### End of years loop ### - - raw_yearly_mean_CPDD.append(year_CPDD/all_glacier_coordinates.shape[0]) - raw_yearly_mean_CPDD_SMB.append(year_CPDD_SMB/glacier_SMB_coordinates.shape[0]) - raw_yearly_mean_winter_snow.append(year_w_snow/all_glacier_coordinates.shape[0]) - raw_yearly_mean_winter_snow_SMB.append(year_w_snow_SMB/glacier_SMB_coordinates.shape[0]) - raw_yearly_mean_summer_snow.append(year_s_snow/all_glacier_coordinates.shape[0]) - raw_yearly_mean_summer_snow_SMB.append(year_s_snow_SMB/glacier_SMB_coordinates.shape[0]) - raw_yearly_mean_ablation_season.append(year_ablation_season/all_glacier_coordinates.shape[0]) - raw_yearly_mean_ablation_season_SMB.append(year_ablation_season_SMB/glacier_SMB_coordinates.shape[0]) + + # We iterate the independent indexes + j = j+1 + if(np.any(smb_glaciers == glacier_name)): + i = i+1 + + end = time.time() + print("-> processing time: " + str(end - start) + " s") - # We combine the meteo SMB dataset forcings - # Seasonal - season_meteo_SMB = {'CPDD':np.asarray(glacier_CPDDs_SMB), 'winter_snow':np.asarray(glacier_winter_snow_SMB), 'summer_snow':np.asarray(glacier_summer_snow_SMB)} - season_meteo_anomalies_SMB = {'CPDD':np.asarray(CPDD_SMB_LocalAnomaly), 'winter_snow':np.asarray(winter_snow_SMB_LocalAnomaly), 'summer_snow':np.asarray(summer_snow_SMB_LocalAnomaly)} - season_raw_meteo_anomalies_SMB = {'CPDD':np.asarray(raw_CPDD_SMB_LocalAnomaly), 'winter_snow':np.asarray(raw_winter_snow_SMB_LocalAnomaly), 'summer_snow':np.asarray(raw_summer_snow_SMB_LocalAnomaly)} - # Monthly - monthly_meteo_SMB = {'temp':np.asarray(glacier_mon_temp_SMB), 'snow':np.asarray(glacier_mon_snow_SMB)} - monthly_meteo_anomalies_SMB = {'temp':np.asarray(local_mon_temp_anomalies_SMB), 'snow':np.asarray(local_mon_snow_anomalies_SMB)} + ### End of years loop ### - # We sort the SMB meteo forcings data to fit the order to the GLIMS Rabatel file - season_meteo_SMB, season_meteo_anomalies_SMB, season_raw_meteo_anomalies_SMB, monthly_meteo_SMB, monthly_meteo_anomalies_SMB = sort_SMB_rabatel_data(season_meteo_SMB, - season_meteo_anomalies_SMB, - season_raw_meteo_anomalies_SMB, - monthly_meteo_SMB, - monthly_meteo_anomalies_SMB, - glims_rabatel) - # We combine the full meteo dataset forcings for all glaciers - # Seasonal - season_meteo = {'CPDD':np.asarray(glacier_CPDDs_all), 'winter_snow':np.asarray(glacier_winter_snow_all), 'summer_snow':np.asarray(glacier_summer_snow_all)} - season_meteo_anomalies = {'CPDD':np.asarray(CPDD_LocalAnomaly_all), 'winter_snow':np.asarray(winter_snow_LocalAnomaly_all), 'summer_snow':np.asarray(summer_snow_LocalAnomaly_all)} - season_raw_meteo_anomalies = {'CPDD':np.asarray(raw_CPDD_LocalAnomaly_all), 'winter_snow':np.asarray(winter_raw_snow_LocalAnomaly_all), 'summer_snow':np.asarray(summer_raw_snow_LocalAnomaly_all)} - # Monthly - monthly_meteo = {'temp':np.asarray(glacier_mon_temp_all), 'snow':np.asarray(glacier_mon_snow_all)} - monthly_meteo_anomalies = {'temp':np.asarray(local_mon_temp_anomalies), 'snow':np.asarray(local_mon_snow_anomalies)} - + if(not bypass_glacier_data): + # We combine the meteo SMB dataset forcings + # Seasonal + season_meteo_SMB = {'CPDD':glacier_CPDDs_SMB, 'winter_snow':glacier_winter_snow_SMB, 'summer_snow':glacier_summer_snow_SMB} + + # Monthly + monthly_meteo_SMB = {'temp':glacier_mon_temp_SMB, 'snow':glacier_mon_snow_SMB} + + # We sort the SMB meteo forcings data to fit the order to the GLIMS Rabatel file + season_meteo_SMB, season_meteo_anomalies_SMB, season_raw_meteo_anomalies_SMB, monthly_meteo_SMB, monthly_meteo_anomalies_SMB = sort_SMB_rabatel_data(season_meteo_SMB, + local_anomalies_SMB, + raw_local_anomalies_SMB, + monthly_meteo_SMB, + local_mon_anomalies_SMB, + glims_rabatel) + # We combine the full meteo dataset forcings for all glaciers + # Seasonal + season_meteo = {'CPDD':glacier_CPDDs_all, 'winter_snow':glacier_winter_snow_all, 'summer_snow':glacier_summer_snow_all} + season_meteo_anomalies = {'CPDD':local_anomalies['CPDD'], 'winter_snow':local_anomalies['w_snow'], 'summer_snow':local_anomalies['s_snow']} + season_raw_meteo_anomalies = {'CPDD':raw_local_anomalies['CPDD'], 'winter_snow':raw_local_anomalies['w_snow'], 'summer_snow':raw_local_anomalies['s_snow']} + # Monthly + monthly_meteo = {'temp':glacier_mon_temp_all, 'snow':glacier_mon_snow_all} + monthly_meteo_anomalies = {'temp':local_mon_anomalies['temp'], 'snow':local_mon_anomalies['snow']} + + # If the forcing folder is not created we create it + if not os.path.exists(path_smb_function_safran): + # We create a new folder in order to store the raster plots + os.makedirs(path_smb_function_safran) + + # We store the compacted seasonal and monthly meteo forcings + # Glaciers with SMB data (machine learning model training) + with open(os.path.join(path_smb_function_safran, 'season_meteo_SMB.txt'), 'wb') as season_f: + np.save(season_f, season_meteo_SMB) + with open(os.path.join(path_smb_function_safran, 'season_meteo_anomalies_SMB.txt'), 'wb') as season_a_f: + np.save(season_a_f, season_meteo_anomalies_SMB) + with open(os.path.join(path_smb_function_safran, 'season_raw_meteo_anomalies_SMB.txt'), 'wb') as season_raw_a_f: + np.save(season_raw_a_f, season_raw_meteo_anomalies_SMB) + with open(os.path.join(path_smb_function_safran, 'monthly_meteo_SMB.txt'), 'wb') as mon_f: + np.save(mon_f, monthly_meteo_SMB) + with open(os.path.join(path_smb_function_safran, 'monthly_meteo_anomalies_SMB.txt'), 'wb') as mon_a_f: + np.save(mon_a_f, monthly_meteo_anomalies_SMB) + + # All glaciers + with open(os.path.join(path_smb_function_safran, 'season_meteo.txt'), 'wb') as season_f: + np.save(season_f, season_meteo) + with open(os.path.join(path_smb_function_safran, 'season_meteo_anomalies.txt'), 'wb') as season_a_f: + np.save(season_a_f, season_meteo_anomalies) + with open(os.path.join(path_smb_function_safran, 'season_raw_meteo_anomalies.txt'), 'wb') as season_raw_a_f: + np.save(season_raw_a_f, season_raw_meteo_anomalies) + with open(os.path.join(path_smb_function_safran, 'monthly_meteo.txt'), 'wb') as mon_f: + np.save(mon_f, monthly_meteo) + with open(os.path.join(path_smb_function_safran, 'monthly_meteo_anomalies.txt'), 'wb') as mon_a_f: + np.save(mon_a_f, monthly_meteo_anomalies) + # We store the base SAFRAN data for future runs with open(path_temps, 'wb') as dtemp_f: @@ -766,41 +649,7 @@ def main(compute): np.save(ddates_f, daily_dates_years) with open(path_zs, 'wb') as dzs_f: np.save(dzs_f, zs_years) - - # If the forcing folder is not created we create it - if not os.path.exists(path_smb_function_safran): - # We create a new folder in order to store the raster plots - os.makedirs(path_smb_function_safran) - - # We store the compacted seasonal and monthly meteo forcings - # Glaciers with SMB data (machine learning model training) - with open(path_smb_function_safran+'season_meteo_SMB.txt', 'wb') as season_f: - np.save(season_f, season_meteo_SMB) - with open(path_smb_function_safran+'season_meteo_anomalies_SMB.txt', 'wb') as season_a_f: - np.save(season_a_f, season_meteo_anomalies_SMB) - with open(path_smb_function_safran+'season_raw_meteo_anomalies_SMB.txt', 'wb') as season_raw_a_f: - np.save(season_raw_a_f, season_raw_meteo_anomalies_SMB) - with open(path_smb_function_safran+'monthly_meteo_SMB.txt', 'wb') as mon_f: - np.save(mon_f, monthly_meteo_SMB) - with open(path_smb_function_safran+'monthly_meteo_anomalies_SMB.txt', 'wb') as mon_a_f: - np.save(mon_a_f, monthly_meteo_anomalies_SMB) - # All glaciers - with open(path_smb_function_safran+'season_meteo.txt', 'wb') as season_f: - np.save(season_f, season_meteo) - with open(path_smb_function_safran+'season_meteo_anomalies.txt', 'wb') as season_a_f: - np.save(season_a_f, season_meteo_anomalies) - with open(path_smb_function_safran+'season_raw_meteo_anomalies.txt', 'wb') as season_raw_a_f: - np.save(season_raw_a_f, season_raw_meteo_anomalies) - with open(path_smb_function_safran+'monthly_meteo.txt', 'wb') as mon_f: - np.save(mon_f, monthly_meteo) - with open(path_smb_function_safran+'monthly_meteo_anomalies.txt', 'wb') as mon_a_f: - np.save(mon_a_f, monthly_meteo_anomalies) - - # Ablation season length - with open(path_smb_function_safran+'raw_yearly_mean_ablation_season.txt', 'wb') as rym_as_f: - np.save(rym_as_f, raw_yearly_mean_ablation_season_SMB) - else: print("Skipping...") ### End of main function ### \ No newline at end of file diff --git a/code/settings.py b/code/settings.py index 4d07808..b5a7058 100644 --- a/code/settings.py +++ b/code/settings.py @@ -21,25 +21,36 @@ from pathlib import Path -workspace = str(Path(os.getcwd()).parent) + '\\' -path_adamont = 'C:\\Jordi\\PhD\\Data\\ADAMONT\\treated\\' +workspace = str(Path(os.getcwd()).parent) + #path_adamont = 'C:\\Jordi\\PhD\\Data\\ADAMONT\\FORCING_ADAMONT_IGE_BERGER\\normal\\' #path_adamont = 'C:\\Jordi\\PhD\\Data\\ADAMONT\\FORCING_ADAMONT_IGE_BERGER\\INERIS\\' #path_adamont = 'C:\\Jordi\\PhD\\Data\\ADAMONT\\FORCING_ADAMONT_IGE_BERGER\\HIRHAM5\\' #path_adamont = 'C:\\Jordi\\PhD\\Data\\ADAMONT\\FORCING_ADAMONT_IGE_BERGER\\projections\\' #path_adamont = 'C:\\Jordi\\PhD\\Data\\ADAMONT\\FORCING_ADAMONT_IGE_BERGER\\subset_AGU\\projections\\' -path_smb = workspace + 'glacier_data\\smb\\' +path_smb = os.path.join(workspace, 'glacier_data', 'smb') -def init(hist_forcing, proj_forcing, simu_type, smb_model): +def init(hist_forcing, proj_forcing, simu_type, smb_model, cluster): print("Applying settings...") + global ADAMONT_proj_filepaths + global path_safran + global path_adamont + global use_cluster + use_cluster = cluster + if(use_cluster): + path_adamont = '/bettik/jordibn/python/Data/ADAMONT/' + path_safran = '/bettik/jordibn/python/Data/SAFRAN/' + else: + path_adamont = 'C:\\Jordi\\PhD\\Data\\ADAMONT\\treated\\' + path_safran = 'C:\\Jordi\\PhD\\Data\\SAFRAN-Nivo-2017\\' + global historical_forcing historical_forcing = hist_forcing global projection_forcing projection_forcing = proj_forcing global simulation_type simulation_type = simu_type - global ADAMONT_proj_filepaths ADAMONT_proj_filepaths = np.asarray(os.listdir(path_adamont)) global current_ADAMONT_combination @@ -48,16 +59,20 @@ def init(hist_forcing, proj_forcing, simu_type, smb_model): global path_ensemble_ann global smb_model_type if(smb_model == 'ann_no_weights'): - path_ann = path_smb + 'ANN\\LSYGO\\' + if(simulation_type == 'historical'): + path_ann = os.path.join(path_smb, 'ANN', 'LSYGO_soft') + elif(simulation_type == 'future'): + path_ann = os.path.join(path_smb, 'ANN', 'LSYGO_hard') +# path_ann = os.path.join(path_smb, 'ANN', 'LSYGO') # path_ann = path_smb + 'ANN\\LOGO\\' # path_ann = path_smb + 'ANN\\LOYO\\' - path_cv_ann = path_ann + 'CV\\' - path_ensemble_ann = path_ann + 'ensemble\\' + path_cv_ann = os.path.join(path_ann, 'CV') + path_ensemble_ann = os.path.join(path_ann, 'ensemble') smb_model_type = smb_model elif(smb_model == 'ann_weights'): - path_ann = path_smb + 'ANN\\LSYGO\\weights\\' - path_cv_ann = path_ann + 'CV\\' - path_ensemble_ann = path_ann + 'ensemble\\' + path_ann = os.path.join(path_smb, 'ANN', 'LSYGO', 'weights') + path_cv_ann = os.path.join(path_ann, 'CV') + path_ensemble_ann = os.path.join(path_ann, 'ensemble') smb_model_type = smb_model elif(smb_model == 'lasso'): smb_model_type = smb_model @@ -85,22 +100,23 @@ def adamont_simulation(simulation_type, compute_projection_forcings, compute_evo global current_ADAMONT_forcing_sum global current_ADAMONT_model_daymean global current_ADAMONT_model_daysum + counter = 0 forcing_threshold = 0 if(simulation_type == 'future'): # Preload the ensemble SMB models to speed up simulations ensemble_SMB_models = glacier_evolution.preload_ensemble_SMB_models() -# thickness_idx = 0 - for thickness_idx in range(0,2): - for i in range(0, ADAMONT_proj_filepaths.size, 2): - if(forcing_threshold <= counter): - current_ADAMONT_model_daymean = str(ADAMONT_proj_filepaths[i]) - current_ADAMONT_model_daysum = str(ADAMONT_proj_filepaths[i+1]) - current_ADAMONT_forcing_mean = 'projections\\' + ADAMONT_proj_filepaths[i] - current_ADAMONT_forcing_sum = 'projections\\' + ADAMONT_proj_filepaths[i+1] - adamont_forcings.main(compute_projection_forcings) - glacier_evolution.main(compute_evolution, ensemble_SMB_models, overwrite, counter_threshold, thickness_idx) - counter = counter+1 + thickness_idx = 0 +# for thickness_idx in range(0,2): + for i in range(0, ADAMONT_proj_filepaths.size, 2): + if(forcing_threshold <= counter): + current_ADAMONT_model_daymean = str(os.path.join(path_adamont, ADAMONT_proj_filepaths[i])) + current_ADAMONT_model_daysum = str(os.path.join(path_adamont, ADAMONT_proj_filepaths[i+1])) + current_ADAMONT_forcing_mean = os.path.join('projections', ADAMONT_proj_filepaths[i]) + current_ADAMONT_forcing_sum = os.path.join('projections', ADAMONT_proj_filepaths[i+1]) + adamont_forcings.main(compute_projection_forcings) + glacier_evolution.main(compute_evolution, ensemble_SMB_models, overwrite, use_cluster, counter_threshold, thickness_idx) + counter = counter+1 def glacier_simulation(simulation_type, counter_threshold, validate_SMB, compute_projection_forcings, compute_evolution, reconstruct, overwrite): imp.reload(adamont_forcings) @@ -117,7 +133,7 @@ def glacier_simulation(simulation_type, counter_threshold, validate_SMB, compute ensemble_SMB_models = glacier_evolution.preload_ensemble_SMB_models() # for thickness_idx in range(0,3): # 0 = original ice thickness / 1 = 1.3*ice thickness / 2 = 0.7*ice thickness - glacier_evolution.main(compute_evolution, ensemble_SMB_models, overwrite, counter_threshold, 0) # thickness idx = 0 by default + glacier_evolution.main(compute_evolution, ensemble_SMB_models, overwrite, use_cluster, counter_threshold, 0) # thickness idx = 0 by default else: print("\n[ERROR] Wrong type of projection!") diff --git a/code/smb_model_training.py b/code/smb_model_training.py index e3e1fc6..3f62d59 100644 --- a/code/smb_model_training.py +++ b/code/smb_model_training.py @@ -153,12 +153,8 @@ def create_spatiotemporal_matrix(SMB_raw, season_raw_meteo_anomalies_SMB, mon_te for SMB_glacier, CPDD_glacier, winter_snow_glacier, summer_snow_glacier, mon_temp_glacier, mon_snow_glacier, mean_alt, slope20, max_alt, area, lon, lat, aspect in zip(SMB_raw, season_raw_meteo_anomalies_SMB['CPDD'], season_raw_meteo_anomalies_SMB['winter_snow'], season_raw_meteo_anomalies_SMB['summer_snow'], mon_temp_anomalies, mon_snow_anomalies, glacier_mean_altitude, slopes20, max_altitudes, glacier_area, lons, lats, aspects): year = 1959 - for SMB_y, cpdd_y, w_snow_y, s_snow_y, mon_temp_y, mon_snow_y, mean_alt_y, area_y in zip(SMB_glacier, CPDD_glacier[first_year:], winter_snow_glacier[first_year:], summer_snow_glacier[first_year:], mon_temp_glacier[first_year:], mon_snow_glacier[first_year:], mean_alt, area): - mon_temp_y = mon_temp_y[:12] - mon_snow_y = mon_snow_y[:12] - # We get the current iteration combination input_variables_array = np.array([cpdd_y, w_snow_y, s_snow_y, mean_alt_y, max_alt, slope20, area_y, lon, lat, aspect, mean_alt_y*cpdd_y, slope20*cpdd_y, max_alt*cpdd_y, area_y*cpdd_y, lat*cpdd_y, lon*cpdd_y, aspect*cpdd_y, mean_alt_y*w_snow_y, slope20*w_snow_y, max_alt*w_snow_y, area_y*w_snow_y, lat*w_snow_y, lon*w_snow_y, aspect*w_snow_y, mean_alt_y*s_snow_y, slope20*s_snow_y, max_alt*s_snow_y, area_y*s_snow_y, lat*s_snow_y, lon*s_snow_y, aspect*s_snow_y]) if(spatiotemporal_flag == 'spatial'): @@ -192,6 +188,8 @@ def create_spatiotemporal_matrix(SMB_raw, season_raw_meteo_anomalies_SMB, mon_te with open(root + 'y_extended.txt', 'wb') as y_f: np.save(y_f, SMB_raw) + import pdb; pdb.set_trace() + finite_idxs = np.isfinite(y_reg) x_reg_full = x_reg_full[finite_idxs,:] x_reg_nn = x_reg_nn[finite_idxs,:] @@ -778,14 +776,13 @@ def main(compute): mon_temp_anomalies = get_raw_mon_data(monthly_meteo_anomalies_SMB['temp'], 'mon_temp') mon_snow_anomalies = get_raw_mon_data(monthly_meteo_anomalies_SMB['snow'], 'mon_snow') - year_start = int(season_meteo_anomalies_SMB['CPDD'][0]['CPDD'][0]) + year_start = int(season_meteo_anomalies_SMB['CPDD'][0]['years'][0]) # year_end = int(season_meteo_anomalies_SMB['CPDD'][0]['CPDD'][-2]) first_SMB_year = 1959 # first_SMB_year = 1984 first_year = first_SMB_year - year_start - ########### PLOTS ######################################################## fig_idx = 1 diff --git a/code/smb_validation.py b/code/smb_validation.py index 6f1fcf6..f59faed 100644 --- a/code/smb_validation.py +++ b/code/smb_validation.py @@ -28,7 +28,7 @@ import sys import pandas as pd from pathlib import Path -from glacier_evolution import store_file, get_aspect_deg, empty_folder, make_ensemble_simulation, preload_ensemble_SMB_models, automatic_file_name_save +from glacier_evolution import store_file, get_aspect_deg, empty_folder, make_ensemble_simulation, preload_ensemble_SMB_models, automatic_file_name_save, get_adjusted_glacier_SAFRAN_forcings from sklearn.metrics import mean_squared_error from sklearn.metrics import r2_score @@ -48,25 +48,24 @@ # Folders #workspace = 'C:\\Jordi\\PhD\\Python\\' workspace = Path(os.getcwd()).parent -root = str(workspace.parent) + '\\' -workspace = str(workspace) + '\\' -path_smb = workspace + 'glacier_data\\smb\\' -path_smb_validations = path_smb + 'smb_simulations\\validation\\' -path_smb_function = path_smb + 'smb_function\\' + cv -path_smb_simulations = path_smb + 'smb_simulations\\' -path_glims = workspace + 'glacier_data\\GLIMS\\' -path_glacier_coordinates = workspace + 'glacier_data\\glacier_coordinates\\' -path_safran_forcings = 'C:\\Jordi\\PhD\\Data\\SAFRAN-Nivo-2017\\' -path_smb_function_safran = path_smb + 'smb_function\\SAFRAN\\' -path_smb_all_glaciers = path_smb + 'smb_simulations\\SAFRAN\\1\\all_glaciers_1967_2015\\' -path_smb_all_glaciers_smb = path_smb + 'smb_simulations\\SAFRAN\\1\\all_glaciers_1967_2015\\smb\\' -path_smb_all_ensemble_smb = path_smb + 'smb_simulations\\SAFRAN\\1\\all_glaciers_1967_2015\\ensemble_smb\\' -path_smb_all_glaciers_area = path_smb + 'smb_simulations\\SAFRAN\\1\\all_glaciers_1967_2015\\area\\' -path_smb_all_glaciers_slope = path_smb + 'smb_simulations\\SAFRAN\\1\\all_glaciers_1967_2015\\slope\\' -path_training_data = workspace + 'glacier_data\\glacier_evolution\\training\\' -path_training_glacier_info = path_training_data + 'glacier_info\\' +root = workspace.parent +#workspace = str(workspace) + '\\' +path_smb = os.path.join(workspace, 'glacier_data', 'smb') +path_smb_validations = os.path.join(path_smb, 'smb_simulations', 'validation') +path_smb_function = os.path.join(path_smb, 'smb_function', cv) +path_smb_simulations = os.path.join(path_smb, 'smb_simulations') +path_glims = os.path.join(workspace, 'glacier_data', 'GLIMS') +path_glacier_coordinates = os.path.join(workspace, 'glacier_data', 'glacier_coordinates') +path_smb_function_safran = os.path.join(path_smb, 'smb_function', 'SAFRAN') +path_smb_all_glaciers = os.path.join(path_smb, 'smb_simulations', 'SAFRAN', '1', 'all_glaciers_1967_2015') +path_smb_all_glaciers_smb = os.path.join(path_smb, 'smb_simulations', 'SAFRAN', '1', 'all_glaciers_1967_2015', 'smb') +path_smb_all_ensemble_smb = os.path.join(path_smb, 'smb_simulations', 'SAFRAN', '1', 'all_glaciers_1967_2015', 'ensemble_smb') +path_smb_all_glaciers_area = os.path.join(path_smb, 'smb_simulations', 'SAFRAN', '1', 'all_glaciers_1967_2015', 'area') +path_smb_all_glaciers_slope = os.path.join(path_smb, 'smb_simulations', 'SAFRAN', '1', 'all_glaciers_1967_2015', 'slope') +path_training_data = os.path.join(workspace, 'glacier_data', 'glacier_evolution', 'training') +path_training_glacier_info = os.path.join(path_training_data, 'glacier_info') global path_slope20 -path_slope20 = workspace + 'glacier_data\\glacier_evolution\\glacier_slope20\\SAFRAN\\1\\' +path_slope20 = os.path.join(workspace, 'glacier_data', 'glacier_evolution', 'glacier_slope20', 'SAFRAN', '1') ###### FUNCTIONS ###### @@ -105,7 +104,7 @@ def find_nearest(array,value): def store_glacier_info(glacier_info, path): if not os.path.exists(path): os.makedirs(path) - path = path + 'glacier_info_' + str(glacier_info['glimsID']) + path = os.path.join(path, 'glacier_info_' + str(glacier_info['glimsID'])) with open(path, 'wb') as glacier_info_f: np.save(glacier_info_f, glacier_info) # print("File saved: " + str(path)) @@ -202,14 +201,14 @@ def get_slope20(glims_glacier): # Load the file if(str(glacier_name[-1]).isdigit() and str(glacier_name[-1]) != '1'): appendix = str(glacier_name[-1]) - if(os.path.isfile(path_slope20 + glimsID + '_' + appendix + '_slope20.csv')): - slope20_array = genfromtxt(path_slope20 + glimsID + '_' + appendix + '_slope20.csv', delimiter=';', dtype=np.dtype('float32')) - elif(os.path.isfile(path_slope20 + glimsID + '_slope20.csv')): - slope20_array = genfromtxt(path_slope20 + glimsID + '_slope20.csv', delimiter=';', dtype=np.dtype('float32')) + if(os.path.isfile(os.path.join(path_slope20, glimsID + '_' + appendix + '_slope20.csv'))): + slope20_array = genfromtxt(os.path.join(path_slope20, glimsID + '_' + appendix + '_slope20.csv'), delimiter=';', dtype=np.dtype('float32')) + elif(os.path.isfile(os.path.join(path_slope20, glimsID + '_slope20.csv'))): + slope20_array = genfromtxt(os.path.join(path_slope20, glimsID + '_slope20.csv'), delimiter=';', dtype=np.dtype('float32')) else: slope20_array = np.array([]) - elif(os.path.isfile(path_slope20 + glimsID + '_slope20.csv')): - slope20_array = genfromtxt(path_slope20 + glimsID + '_slope20.csv', delimiter=';', dtype=np.dtype('float32')) + elif(os.path.isfile(os.path.join(path_slope20, glimsID + '_slope20.csv'))): + slope20_array = genfromtxt(os.path.join(path_slope20, glimsID + '_slope20.csv'), delimiter=';', dtype=np.dtype('float32')) else: slope20_array = np.array([]) ### Retrieve the 20% slope @@ -227,7 +226,7 @@ def get_slope20(glims_glacier): # Limit slope with random variance to avoid unrealistic slopes slope20 = random.uniform(40.0, 55.0) - print("Glacier tongue slope: " + str(slope20)) +# print("Glacier tongue slope: " + str(slope20)) return slope20 @@ -242,22 +241,21 @@ def create_spatiotemporal_matrix(season_anomalies, mon_anomalies, glims_glacier, lat = glims_glacier['y_coord'] aspect = np.cos(get_aspect_deg(glims_glacier['Aspect'].decode('ascii'))) -# print('max_alt: ' + str(max_alt)) -# print('slope20: ' + str(slope20)) -# print("glacier_area: " + str(glacier_area)) -# print("glacier_mean_altitude: " + str(glacier_mean_altitude)) -## -# print("season_anomalies: " + str(season_anomalies)) -# print("mon_anomalies: " + str(mon_anomalies)) - count, n_model = 0,0 for model in best_models: x_reg, x_reg_nn, x_reg_full = [],[],[] x_reg_idxs = eval(model['f1']) - for cpdd_y, w_snow_y, s_snow_y, mon_temp_y, mon_snow_y, mean_alt_y, area_y in zip(season_anomalies['CPDD'], season_anomalies['w_snow'], season_anomalies['s_snow'], mon_anomalies['temp'], mon_anomalies['snow'], glacier_mean_altitude, glacier_area): + for cpdd_y, w_snow_y, s_snow_y, mon_temp_y, mon_snow_y, mean_alt_y, area_y in zip(season_anomalies['CPDD'], season_anomalies['w_snow'], + season_anomalies['s_snow'], mon_anomalies['temp'], + mon_anomalies['snow'], glacier_mean_altitude, glacier_area): # We get the current iteration combination - input_variables_array = np.array([cpdd_y, w_snow_y, s_snow_y, mean_alt_y, max_alt, slope20, area_y, lon, lat, aspect, mean_alt_y*cpdd_y, slope20*cpdd_y, max_alt*cpdd_y, area_y*cpdd_y, lat*cpdd_y, lon*cpdd_y, aspect*cpdd_y, mean_alt_y*w_snow_y, slope20*w_snow_y, max_alt*w_snow_y, area_y*w_snow_y, lat*w_snow_y, lon*w_snow_y, aspect*w_snow_y, mean_alt_y*s_snow_y, slope20*s_snow_y, max_alt*s_snow_y, area_y*s_snow_y, lat*s_snow_y, lon*s_snow_y, aspect*s_snow_y]) + input_variables_array = np.array([cpdd_y, w_snow_y, s_snow_y, mean_alt_y, max_alt, slope20, area_y, lon, lat, aspect, + mean_alt_y*cpdd_y, slope20*cpdd_y, max_alt*cpdd_y, area_y*cpdd_y, lat*cpdd_y, lon*cpdd_y, + aspect*cpdd_y, mean_alt_y*w_snow_y, slope20*w_snow_y, max_alt*w_snow_y, area_y*w_snow_y, + lat*w_snow_y, lon*w_snow_y, aspect*w_snow_y, mean_alt_y*s_snow_y, slope20*s_snow_y, + max_alt*s_snow_y, area_y*s_snow_y, lat*s_snow_y, lon*s_snow_y, aspect*s_snow_y]) + x_reg.append(input_variables_array[x_reg_idxs]) input_full_array = np.append(input_variables_array, mon_temp_y) input_full_array = np.append(input_full_array, mon_snow_y) @@ -308,14 +306,17 @@ def find_glacier_coordinates(massif_number, zs, aspects, glims_data): all_glacier_coordinates = [] # All glaciers loop - for glims_id, glacier_name, glacier_massif, glacier_altitude, glacier_aspect in zip(GLIMS_IDs, glacier_names, glacier_massifs, glacier_centroid_altitude, glacier_aspects): - all_glacier_coordinates.append([glacier_name, find_glacier_idx(glacier_massif, massif_number, zs, glacier_altitude, aspects, glacier_aspect), float(glacier_altitude), glims_id, int(glacier_massif)]) + for glims_id, glacier_name, glacier_massif, glacier_altitude, glacier_aspect in zip(GLIMS_IDs, glacier_names, glacier_massifs, + glacier_centroid_altitude, glacier_aspects): + + all_glacier_coordinates.append([glacier_name, find_glacier_idx(glacier_massif, massif_number, zs, glacier_altitude, aspects, glacier_aspect), + float(glacier_altitude), glims_id, int(glacier_massif)]) return np.asarray(all_glacier_coordinates) def get_SAFRAN_glacier_coordinates(glims_dataset): # We read the first year to get some basic information - dummy_SAFRAN_forcing = Dataset(path_safran_forcings + '84\\FORCING.nc') + dummy_SAFRAN_forcing = Dataset(os.path.join(path_safran_forcings, '84', 'FORCING.nc')) aspects = dummy_SAFRAN_forcing.variables['aspect'][:] zs = dummy_SAFRAN_forcing.variables['ZS'][:] @@ -325,85 +326,32 @@ def get_SAFRAN_glacier_coordinates(glims_dataset): return all_glacier_coordinates -@jit -def get_mean_temps(datetimes, hourly_data): - ref_day = -9 - daily_data = [] - idx, day_idx = 0, 0 - first = True - for time_hour in datetimes: - current_day = time_hour.astype(object).timetuple().tm_yday - if(current_day == ref_day): - daily_data[day_idx] = daily_data[day_idx] + hourly_data[idx]/24.0 - else: - ref_day = current_day - daily_data.append(hourly_data[idx]/24.0) - if(not first): - day_idx = day_idx + 1 - else: - first = False - - idx = idx + 1 - - return np.asarray(daily_data) -@jit -def get_precips(datetimes, hourly_data): - ref_day = -9 - daily_data = [] - idx, day_idx = 0, 0 - isFirst = True - for time_hour in datetimes: - current_day = time_hour.astype(object).timetuple().tm_yday - if(current_day == ref_day): - daily_data[day_idx] = daily_data[day_idx] + hourly_data[idx] - else: - ref_day = current_day - daily_data.append(hourly_data[idx]) - if(not isFirst): - day_idx = day_idx + 1 - else: - isFirst = False - idx = idx + 1 - return np.asarray(daily_data) - -def compute_local_anomalies(glacier_CPDD, glacier_winter_snow, glacier_summer_snow, - CPDD_ref, w_snow_ref, s_snow_ref): - - local_CPDD_anomaly = glacier_CPDD - CPDD_ref - local_w_snow_anomaly = glacier_winter_snow - w_snow_ref - local_s_snow_anomaly = glacier_summer_snow - s_snow_ref +def get_default_SAFRAN_forcings(year_start, year_end): - return local_CPDD_anomaly, local_w_snow_anomaly, local_s_snow_anomaly - -def compute_monthly_anomalies(mon_temps, mon_snow, mon_temp_ref, mon_snow_ref): + year_start_f = 1959 + year_end_f = 2015 - mon_temp_anomaly = mon_temps - mon_temp_ref - mon_snow_anomaly = mon_snow - mon_snow_ref + path_temps = os.path.join(path_smb_function_safran, 'daily_temps_years_' + str(year_start_f) + '-' + str(year_end_f) + '.txt') + path_snow = os.path.join(path_smb_function_safran, 'daily_snow_years_' + str(year_start_f) + '-' + str(year_end_f) + '.txt') + path_rain = os.path.join(path_smb_function_safran, 'daily_rain_years_' + str(year_start_f) + '-' + str(year_end_f) + '.txt') + path_dates = os.path.join(path_smb_function_safran, 'daily_dates_years_' + str(year_start) + '-' + str(year_end) + '.txt') + path_zs = os.path.join(path_smb_function_safran, 'zs_years' + str(year_start_f) + '-' + str(year_end_f) + '.txt') - return mon_temp_anomaly, mon_snow_anomaly - -def get_default_SAFRAN_forcings(year_start, year_end): - - path_temps = path_smb_function_safran +'daily_temps_years_' + str(year_start) + '-' + str(year_end) + '.txt' - path_snow = path_smb_function_safran +'daily_snow_years_' + str(year_start) + '-' + str(year_end) + '.txt' - path_rain = path_smb_function_safran +'daily_rain_years_' + str(year_start) + '-' + str(year_end) + '.txt' - path_dates = path_smb_function_safran +'daily_dates_years_' + str(year_start) + '-' + str(year_end) + '.txt' - path_zs = path_smb_function_safran +'zs_years' + str(year_start) + '-' + str(year_end) + '.txt' if(os.path.exists(path_temps) & os.path.exists(path_snow) & os.path.exists(path_rain) & os.path.exists(path_dates) & os.path.exists(path_zs)): print("Fetching SAFRAN forcings...") print("\nTaking forcings from " + str(path_temps)) with open(path_temps, 'rb') as temps_f: - daily_temps_years = np.load(temps_f, encoding='latin1') + daily_temps_years = np.load(temps_f, encoding='latin1', allow_pickle=True) with open(path_snow, 'rb') as snow_f: - daily_snow_years = np.load(snow_f, encoding='latin1') + daily_snow_years = np.load(snow_f, encoding='latin1', allow_pickle=True) with open(path_rain, 'rb') as rain_f: - daily_rain_years = np.load(rain_f, encoding='latin1') + daily_rain_years = np.load(rain_f, encoding='latin1', allow_pickle=True) with open(path_dates, 'rb') as dates_f: - daily_dates_years = np.load(dates_f, encoding='latin1') + daily_dates_years = np.load(dates_f, encoding='latin1', allow_pickle=True) with open(path_zs, 'rb') as zs_f: - zs_years = np.load(zs_f, encoding='latin1') + zs_years = np.load(zs_f, encoding='latin1', allow_pickle=True) else: sys.exit("\n[ ERROR ] SAFRAN base forcing files not found. Please execute SAFRAN forcing module before") @@ -411,115 +359,6 @@ def get_default_SAFRAN_forcings(year_start, year_end): return daily_temps_years, daily_snow_years, daily_rain_years, daily_dates_years, zs_years -def get_adjusted_glacier_SAFRAN_forcings(year, year_start, glacier_mean_altitude, SAFRAN_idx, daily_temps_years, daily_snow_years, daily_rain_years, daily_dates_years, zs_years, CPDD_ref, w_snow_ref, s_snow_ref, mon_temp_ref, mon_snow_ref): - # We also need to fetch the previous year since data goes from 1st of August to 31st of July - idx = year - (year_start-1) - glacier_idx = int(SAFRAN_idx) - - zs = zs_years[idx-1] - - daily_temps_mean_1 = copy.deepcopy(daily_temps_years[idx-1]) + ((zs[glacier_idx] - glacier_mean_altitude)/1000.0)*6.0 - daily_temps_mean = copy.deepcopy(daily_temps_years[idx]) + ((zs[glacier_idx] - glacier_mean_altitude)/1000.0)*6.0 - - snow_sum_1 = copy.deepcopy(daily_snow_years[idx-1]) - snow_sum = copy.deepcopy(daily_snow_years[idx]) - - rain_sum_1 = copy.deepcopy(daily_rain_years[idx-1]) - rain_sum = copy.deepcopy(daily_rain_years[idx]) - - # We get the daily datetimes - daily_datetimes_1 = copy.deepcopy(daily_dates_years[idx-1]) - daily_datetimes = copy.deepcopy(daily_dates_years[idx]) - - #### We compute the monthly average data - # Monthly average temperature - glacier_temps_1 = daily_temps_mean_1[:, glacier_idx] - glacier_temps = daily_temps_mean[:, glacier_idx] - mon_temps_1 = get_monthly_temps(glacier_temps_1, daily_datetimes_1) - mon_temps = get_monthly_temps(glacier_temps, daily_datetimes) - mon_temp_year = np.append(mon_temps_1[2:], mon_temps[:2]) - - # Monthly average snowfall - # We adjust the snowfall rate at the glacier's altitude - glacier_snow_1 = snow_sum_1[:, glacier_idx] - glacier_snow = snow_sum[:, glacier_idx] - glacier_rain_1 = rain_sum_1[:, glacier_idx] - glacier_rain = rain_sum[:, glacier_idx] - glacier_snow_1 = np.where(glacier_temps_1 > 2.0, 0.0, glacier_snow_1) - glacier_snow_1 = np.where(((glacier_temps_1 < 2.0) & (glacier_snow_1 == 0.0)), glacier_rain_1, glacier_snow_1) - glacier_snow = np.where(glacier_temps > 2.0, 0.0, glacier_snow) - glacier_snow = np.where(((glacier_temps < 2.0) & (glacier_snow == 0.0)), glacier_rain, glacier_snow) - - mon_snow_1 = get_monthly_snow(glacier_snow_1, daily_datetimes_1) - mon_snow = get_monthly_snow(glacier_snow, daily_datetimes) - mon_snow_year = np.append(mon_snow_1[2:], mon_snow[:2]) - - year_1_offset = 213 - year_offset = 152 - temp_year = np.append(daily_temps_mean_1[-year_1_offset:, glacier_idx], daily_temps_mean[:year_offset, glacier_idx]) - pos_temp_year = np.where(temp_year < 0, 0, temp_year) - integ_temp = np.cumsum(pos_temp_year) - - start_div, end_div = 30, 10 - - ### Dynamic temperature ablation period - start_y_ablation = np.where(integ_temp > integ_temp.max()/start_div)[0] -# start_y_ablation = np.where(integ_temp > 30)[0] - end_y_ablation = np.where(integ_temp > (integ_temp.max() - integ_temp.max()/end_div))[0] - - start_ablation = start_y_ablation[0] + (daily_temps_mean_1[:,glacier_idx].size - year_1_offset) - end_ablation = end_y_ablation[0] - year_1_offset - if(start_ablation > 366): - start_ablation = 366 - - # We get the dynamic indexes for the ablation and accumulation periods for temperature - ablation_temp_idx_1 = range(start_ablation, daily_datetimes_1.size) - ablation_temp_idx = range(0, end_ablation) -# accum_temp_idx_1 = range(end_ablation+1, start_ablation-1) - - # We get the indexes for the ablation and accumulation periods for snowfall - # Classic 120-270 ablation period - ablation_idx_1 = range(daily_datetimes_1.size-95, daily_datetimes_1.size) - ablation_idx = range(0, 55) - accum_idx_1 = range(56, daily_datetimes_1.size-96) - - glacier_ablation_temps = np.append(daily_temps_mean_1[ablation_temp_idx_1, glacier_idx], daily_temps_mean[ablation_temp_idx, glacier_idx]) -# glacier_accumulation_temps = daily_temps_mean_1[accum_temp_idx_1, glacier_idx] - dummy_glacier_ablation_temps = np.append(daily_temps_mean_1[ablation_idx_1, glacier_idx], daily_temps_mean[ablation_idx, glacier_idx]) - dummy_glacier_accumulation_temps = daily_temps_mean_1[accum_idx_1, glacier_idx] - -# glacier_ablation_temps = glacier_ablation_temps + ((zs[glacier_idx] - glacier_mean_altitude)/1000.0)*6.0 -# glacier_accumulation_temps = glacier_accumulation_temps + ((zs[glacier_idx] - glacier_mean_altitude)/1000.0)*6.0 - - glacier_year_pos_temps = np.where(glacier_ablation_temps < 0, 0, glacier_ablation_temps) -# glacier_accum_pos_temps = np.where(glacier_accumulation_temps < 0, 0, glacier_accumulation_temps) - dummy_glacier_ablation_pos_temps = np.where(dummy_glacier_ablation_temps < 0, 0, dummy_glacier_ablation_temps) - dummy_glacier_accum_pos_temps = np.where(dummy_glacier_accumulation_temps < 0, 0, dummy_glacier_accumulation_temps) - - glacier_accum_snow = snow_sum_1[accum_idx_1, glacier_idx] - glacier_accum_rain = rain_sum_1[accum_idx_1, glacier_idx] - - glacier_ablation_snow = np.append(snow_sum_1[ablation_idx_1, glacier_idx], snow_sum[ablation_idx, glacier_idx]) - glacier_ablation_rain = np.append(rain_sum_1[ablation_idx_1, glacier_idx], rain_sum[ablation_idx, glacier_idx]) - - # We recompute the rain/snow limit with the new adjusted temperatures - glacier_accum_snow = np.where(dummy_glacier_accum_pos_temps > 2.0, 0.0, glacier_accum_snow) - glacier_accum_snow = np.where(((dummy_glacier_accumulation_temps < 2.0) & (glacier_accum_snow == 0.0)), glacier_accum_rain, glacier_accum_snow) - glacier_ablation_snow = np.where(dummy_glacier_ablation_pos_temps > 2.0, 0.0, glacier_ablation_snow) - glacier_ablation_snow = np.where(((dummy_glacier_ablation_temps < 2.0) & (glacier_ablation_snow == 0.0)), glacier_ablation_rain, glacier_ablation_snow) - - # We compute the cumulative yearly CPDD and snowfall - glacier_CPDD = np.sum(glacier_year_pos_temps) - glacier_winter_snow = np.sum(glacier_accum_snow) - glacier_summer_snow = np.sum(glacier_ablation_snow) - - CPDD_LocalAnomaly, winter_snow_LocalAnomaly, summer_snow_LocalAnomaly = compute_local_anomalies(glacier_CPDD, glacier_winter_snow, glacier_summer_snow, - CPDD_ref, w_snow_ref, s_snow_ref) - - mon_temp_anomaly, mon_snow_anomaly = compute_monthly_anomalies(mon_temp_year, mon_snow_year, mon_temp_ref, mon_snow_ref) - - - return CPDD_LocalAnomaly, winter_snow_LocalAnomaly, summer_snow_LocalAnomaly, mon_temp_anomaly, mon_snow_anomaly def get_meteo_references(season_meteo_SMB, monthly_meteo_SMB, glimsID, glacierName): found = False @@ -533,57 +372,32 @@ def get_meteo_references(season_meteo_SMB, monthly_meteo_SMB, glimsID, glacierNa if(cpdd['GLIMS_ID'] == glimsID): if((found and cpdd['Glacier'] == glacierName) or not found): - CPDD_ref = cpdd['Mean'] - w_snow_ref = w_snow['Mean'] - s_snow_ref = s_snow['Mean'] - - mon_temp_ref = mon_temps['mon_means'] - mon_snow_ref = mon_snow['mon_means'] + meteo_refs = {'CPDD':cpdd['Mean'], 'w_snow':w_snow['Mean'],'s_snow':s_snow['Mean'],'mon_temp':mon_temps['mon_means'],'mon_snow':mon_snow['mon_means']} found = True - return CPDD_ref, w_snow_ref, s_snow_ref, mon_temp_ref, mon_snow_ref + return meteo_refs -def get_monthly_temps(daily_data, daily_datetimes): - - d = {'Dates': daily_datetimes, 'Temps': daily_data} - df_datetimes = pd.DataFrame(data=d) - df_datetimes.set_index('Dates', inplace=True) - df_datetimes.index = pd.to_datetime(df_datetimes.index) - df_datetimes = df_datetimes.resample('M').mean() - - monthly_avg_data = df_datetimes.Temps.to_numpy() - - return monthly_avg_data[:12] - -def get_monthly_snow(daily_data, daily_datetimes): - d = {'Dates': daily_datetimes, 'Temps': daily_data} - df_datetimes = pd.DataFrame(data=d) - df_datetimes.set_index('Dates', inplace=True) - df_datetimes.index = pd.to_datetime(df_datetimes.index) - df_datetimes = df_datetimes.resample('M').sum() - - monthly_avg_data = df_datetimes.Temps.to_numpy() - - return monthly_avg_data[:12] def compute_SAFRAN_anomalies(glacier_info, year_range, year_start, all_glacier_coordinates, season_meteo, monthly_meteo, daily_meteo_data): # We get the glacier indexes SAFRAN_idx = all_glacier_coordinates[np.where(all_glacier_coordinates[:,3] == glacier_info['glimsID'])[0]][0][1] # We extract the meteo references for the simulation period - CPDD_ref, w_snow_ref, s_snow_ref, mon_temp_ref, mon_snow_ref = get_meteo_references(season_meteo[()], monthly_meteo[()], glacier_info['glimsID'], glacier_info['name']) + meteo_refs = get_meteo_references(season_meteo[()], monthly_meteo[()], glacier_info['glimsID'], glacier_info['name']) CPDD_LocalAnomaly, winter_snow_LocalAnomaly, summer_snow_LocalAnomaly, mon_temp_anomaly, mon_snow_anomaly = [],[],[],[],[] # We compute the meteo anomalies year by year to reproduce the main flow of the model for year, z_mean in zip(year_range, glacier_info['mean_altitude']): - CPDD_LocalAnomaly_y, winter_snow_LocalAnomaly_y, summer_snow_LocalAnomaly_y, mon_temp_anomaly_y, mon_snow_anomaly_y = get_adjusted_glacier_SAFRAN_forcings(year, year_start, z_mean, SAFRAN_idx, - daily_meteo_data['temps'], daily_meteo_data['snow'], daily_meteo_data['rain'], daily_meteo_data['dates'], - daily_meteo_data['zs'], CPDD_ref, w_snow_ref, s_snow_ref, mon_temp_ref, mon_snow_ref) - CPDD_LocalAnomaly.append(CPDD_LocalAnomaly_y) - winter_snow_LocalAnomaly.append(winter_snow_LocalAnomaly_y) - summer_snow_LocalAnomaly.append(summer_snow_LocalAnomaly_y) - mon_temp_anomaly.append(mon_temp_anomaly_y) - mon_snow_anomaly.append(mon_snow_anomaly_y) + + season_anomalies_y, monthly_anomalies_y = get_adjusted_glacier_SAFRAN_forcings(year, year_start, + z_mean, SAFRAN_idx, + daily_meteo_data, + meteo_refs) + CPDD_LocalAnomaly.append(season_anomalies_y['CPDD']) + winter_snow_LocalAnomaly.append(season_anomalies_y['winter_snow']) + summer_snow_LocalAnomaly.append(season_anomalies_y['summer_snow']) + mon_temp_anomaly.append(monthly_anomalies_y['temps']) + mon_snow_anomaly.append(monthly_anomalies_y['snow']) CPDD_LocalAnomaly = np.asarray(CPDD_LocalAnomaly) winter_snow_LocalAnomaly = np.asarray(winter_snow_LocalAnomaly) @@ -608,25 +422,28 @@ def main(compute, reconstruct): print("-----------------------------------------------\n") if(compute): + global path_safran_forcings + path_safran_forcings = settings.path_safran if(reconstruct): - path_ann = path_smb + 'ANN\\LOGO\\no_weights\\' - path_cv_ann = path_ann + 'CV\\' + # TODO: this doesn't do anything + path_ann = os.path.join(path_smb, 'ANN', 'LOGO') + path_cv_ann = os.path.join(path_ann, 'CV') else: # Set LOGO for model validation if(settings.smb_model_type == 'ann_no_weights'): - path_ann = path_smb + 'ANN\\LOGO\\no_weights\\' - path_cv_ann = path_ann + 'CV\\' + path_ann = os.path.join(path_smb, 'ANN', 'LOGO') + path_cv_ann = os.path.join(path_ann, 'CV') elif(settings.smb_model_type == 'ann_weights'): - path_ann = path_smb + 'ANN\\LOGO\\weights\\' - path_cv_ann = path_ann + 'CV\\' + path_ann = os.path.join(path_smb, 'ANN', 'LOGO', 'weights') + path_cv_ann = os.path.join(path_ann, 'CV') # Close all previous open plots in the workflow plt.close('all') # We read the SMB from the csv file - SMB_raw = genfromtxt(path_smb + 'SMB_raw_extended.csv', delimiter=';', dtype=float) - SMB_uncertainties = genfromtxt(path_glacier_coordinates + 'glaciers_rabatel_uncertainties.csv', delimiter=';', dtype=float) + SMB_raw = genfromtxt(os.path.join(path_smb, 'SMB_raw_extended.csv'), delimiter=';', dtype=float) + SMB_uncertainties = genfromtxt(os.path.join(path_glacier_coordinates, 'glaciers_rabatel_uncertainties.csv'), delimiter=';', dtype=float) ### We detect the forcing between SPAZM, SAFRAN or ADAMONT if(settings.simulation_type == "future"): @@ -635,60 +452,60 @@ def main(compute, reconstruct): forcing = settings.historical_forcing # We determine the path depending on the forcing - path_smb_function_forcing = path_smb + 'smb_function\\' + forcing + "\\" + path_smb_function_forcing = os.path.join(path_smb, 'smb_function', forcing) #### GLIMS data for 1985, 2003 and 2015 - glims_2015 = genfromtxt(path_glims + 'GLIMS_2015.csv', delimiter=';', skip_header=1, dtype=[('Area', '