Skip to content

Commit

Permalink
revert
Browse files Browse the repository at this point in the history
  • Loading branch information
PennyHow committed Jun 4, 2024
1 parent 1b355a7 commit af09818
Show file tree
Hide file tree
Showing 9 changed files with 129 additions and 303 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/process_test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ jobs:
run: |
mkdir $GITHUB_WORKSPACE/out/
for i in $(echo ${{ env.TEST_STATION }} | tr ' ' '\n'); do
python3 $GITHUB_WORKSPACE/main/src/pypromice/process/get_l2.py -v $GITHUB_WORKSPACE/main/src/pypromice/process/variables.csv -m $GITHUB_WORKSPACE/main/src/pypromice/process/metadata.csv -c $GITHUB_WORKSPACE/aws-l0/raw/config/$i.toml -i $GITHUB_WORKSPACE/aws-l0/raw -o $GITHUB_WORKSPACE/out/
python3 $GITHUB_WORKSPACE/main/src/pypromice/process/get_l3.py -v $GITHUB_WORKSPACE/main/src/pypromice/process/variables.csv -m $GITHUB_WORKSPACE/main/src/pypromice/process/metadata.csv -c $GITHUB_WORKSPACE/aws-l0/raw/config/$i.toml -i $GITHUB_WORKSPACE/aws-l0/raw -o $GITHUB_WORKSPACE/out/
done
- name: Upload test output
uses: actions/upload-artifact@v3
Expand Down
1 change: 0 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -29,4 +29,3 @@ src/pypromice/postprocess/positions.csv

# sqlite db files
*.db
*.bak
3 changes: 1 addition & 2 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,9 +42,8 @@
'console_scripts': [
'get_promice_data = pypromice.get.get_promice_data:get_promice_data',
'get_l0tx = pypromice.tx.get_l0tx:get_l0tx',
'get_l2 = pypromice.process.get_l2:get_l2',
'join_l2 = pypromice.process.join_l2:join_l2',
'get_l3 = pypromice.process.get_l3:get_l3',
'join_l3 = pypromice.process.join_l3:join_l3',
'get_watsontx = pypromice.tx.get_watsontx:get_watsontx',
'get_bufr = pypromice.postprocess.get_bufr:main',
'get_msg = pypromice.tx.get_msg:get_msg'
Expand Down
67 changes: 8 additions & 59 deletions src/pypromice/process/L1toL2.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,18 +30,7 @@ def toL2(
eps_clear=9.36508e-6,
emissivity=0.97,
) -> xr.Dataset:
'''Process one Level 1 (L1) product to Level 2.
In this step we do:
- manual flagging and adjustments
- automated QC: persistence, percentile
- custom filter: gps_alt filter, NaN t_rad removed from dlr & ulr
- smoothing of tilt and rot
- calculation of rh with regards to ice in subfreezin conditions
- caluclation of cloud coverage
- correction of dsr and usr for tilt
- filtering of dsr based on a theoritical TOA irradiance and grazing light
- calculation of albedo
- caluclation of directional wind speed
'''Process one Level 1 (L1) product to Level 2
Parameters
----------
Expand Down Expand Up @@ -96,20 +85,10 @@ def toL2(
ds['dlr'] = ds.dlr.where(ds.t_rad.notnull())
ds['ulr'] = ds.ulr.where(ds.t_rad.notnull())

# calculating realtive humidity with regard to ice
T_100 = _getTempK(T_0)
ds['rh_u_cor'] = correctHumidity(ds['rh_u'], ds['t_u'],
T_0, T_100, ews, ei0)

if ds.attrs['number_of_booms']==2:
ds['rh_l_cor'] = correctHumidity(ds['rh_l'], ds['t_l'],
T_0, T_100, ews, ei0)

if hasattr(ds,'t_i'):
if ~ds['t_i'].isnull().all():
ds['rh_i_cor'] = correctHumidity(ds['rh_i'], ds['t_i'],
T_0, T_100, ews, ei0)

# Determiune cloud cover for on-ice stations
cc = calcCloudCoverage(ds['t_u'], T_0, eps_overcast, eps_clear, # Calculate cloud coverage
ds['dlr'], ds.attrs['station_id'])
Expand Down Expand Up @@ -197,52 +176,22 @@ def toL2(
ds['precip_u_cor'], ds['precip_u_rate'] = correctPrecip(ds['precip_u'],
ds['wspd_u'])
if ds.attrs['number_of_booms']==2:
ds['rh_l_cor'] = correctHumidity(ds['rh_l'], ds['t_l'], # Correct relative humidity
T_0, T_100, ews, ei0)

if ~ds['precip_l'].isnull().all() and precip_flag: # Correct precipitation
ds['precip_l_cor'], ds['precip_l_rate']= correctPrecip(ds['precip_l'],
ds['wspd_l'])

# Get directional wind speed
ds['wdir_u'] = ds['wdir_u'].where(ds['wspd_u'] != 0)
ds['wspd_x_u'], ds['wspd_y_u'] = calcDirWindSpeeds(ds['wspd_u'], ds['wdir_u'])

if ds.attrs['number_of_booms']==2:
ds['wdir_l'] = ds['wdir_l'].where(ds['wspd_l'] != 0)
ds['wspd_x_l'], ds['wspd_y_l'] = calcDirWindSpeeds(ds['wspd_l'], ds['wdir_l'])

if hasattr(ds, 'wdir_i'):
if ~ds['wdir_i'].isnull().all() and ~ds['wspd_i'].isnull().all():
ds['wdir_i'] = ds['wdir_i'].where(ds['wspd_i'] != 0)
ds['wspd_x_i'], ds['wspd_y_i'] = calcDirWindSpeeds(ds['wspd_i'], ds['wdir_i'])

if hasattr(ds,'t_i'):
if ~ds['t_i'].isnull().all(): # Instantaneous msg processing
ds['rh_i_cor'] = correctHumidity(ds['rh_i'], ds['t_i'], # Correct relative humidity
T_0, T_100, ews, ei0)

ds = clip_values(ds, vars_df)
return ds


def calcDirWindSpeeds(wspd, wdir, deg2rad=np.pi/180):
'''Calculate directional wind speed from wind speed and direction
Parameters
----------
wspd : xr.Dataarray
Wind speed data array
wdir : xr.Dataarray
Wind direction data array
deg2rad : float
Degree to radians coefficient. The default is np.pi/180
Returns
-------
wspd_x : xr.Dataarray
Wind speed in X direction
wspd_y : xr.Datarray
Wind speed in Y direction
'''
wspd_x = wspd * np.sin(wdir * deg2rad)
wspd_y = wspd * np.cos(wdir * deg2rad)
return wspd_x, wspd_y


def calcCloudCoverage(T, T_0, eps_overcast, eps_clear, dlr, station_id):
'''Calculate cloud cover from T and T_0
Expand Down
40 changes: 36 additions & 4 deletions src/pypromice/process/L2toL3.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,7 @@

def toL3(L2, T_0=273.15, z_0=0.001, R_d=287.05, eps=0.622, es_0=6.1071,
es_100=1013.246):
'''Process one Level 2 (L2) product to Level 3 (L3) meaning calculating all
derived variables:
- Sensible fluxes
'''Process one Level 2 (L2) product to Level 3 (L3)
Parameters
----------
Expand All @@ -35,6 +32,9 @@ def toL3(L2, T_0=273.15, z_0=0.001, R_d=287.05, eps=0.622, es_0=6.1071,

T_100 = _getTempK(T_0) # Get steam point temperature as K

ds['wdir_u'] = ds['wdir_u'].where(ds['wspd_u'] != 0) # Get directional wind speed
ds['wspd_x_u'], ds['wspd_y_u'] = calcDirWindSpeeds(ds['wspd_u'], ds['wdir_u'])

# Upper boom bulk calculation
T_h_u = ds['t_u'].copy() # Copy for processing
p_h_u = ds['p_u'].copy()
Expand Down Expand Up @@ -85,9 +85,41 @@ def toL3(L2, T_0=273.15, z_0=0.001, R_d=287.05, eps=0.622, es_0=6.1071,
q_h_l = cleanSpHumid(q_h_l, T_h_l, Tsurf_h, p_h_l, RH_cor_h_l) # Clean sp.humid values
ds['qh_l'] = (('time'), q_h_l.data)

ds['wdir_l'] = ds['wdir_l'].where(ds['wspd_l'] != 0) # Get directional wind speed
ds['wspd_x_l'], ds['wspd_y_l'] = calcDirWindSpeeds(ds['wspd_l'], ds['wdir_l'])

if hasattr(ds, 'wdir_i'):
if ~ds['wdir_i'].isnull().all() and ~ds['wspd_i'].isnull().all(): # Instantaneous msg processing
ds['wdir_i'] = ds['wdir_i'].where(ds['wspd_i'] != 0) # Get directional wind speed
ds['wspd_x_i'], ds['wspd_y_i'] = calcDirWindSpeeds(ds['wspd_i'], ds['wdir_i'])

return ds


def calcDirWindSpeeds(wspd, wdir, deg2rad=np.pi/180):
'''Calculate directional wind speed from wind speed and direction
Parameters
----------
wspd : xr.Dataarray
Wind speed data array
wdir : xr.Dataarray
Wind direction data array
deg2rad : float
Degree to radians coefficient. The default is np.pi/180
Returns
-------
wspd_x : xr.Dataarray
Wind speed in X direction
wspd_y : xr.Datarray
Wind speed in Y direction
'''
wspd_x = wspd * np.sin(wdir * deg2rad)
wspd_y = wspd * np.cos(wdir * deg2rad)
return wspd_x, wspd_y


def calcHeatFlux(T_0, T_h, Tsurf_h, rho_atm, WS_h, z_WS, z_T, nu, q_h, p_h,
kappa=0.4, WS_lim=1., z_0=0.001, g=9.82, es_0=6.1071, eps=0.622,
gamma=16., L_sub=2.83e6, L_dif_max=0.01, c_pd=1005., aa=0.7,
Expand Down
109 changes: 32 additions & 77 deletions src/pypromice/process/aws.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,79 +91,44 @@ def getL1(self):
logger.info('Level 1 processing...')
self.L0 = [addBasicMeta(item, self.vars) for item in self.L0]
self.L1 = [toL1(item, self.vars) for item in self.L0]

if self.merge_flag:
self.L1A = self.hard_merge(self.L1)
else:
self.L1A = reduce(xr.Dataset.combine_first, self.L1)
self.L1A = reduce(xr.Dataset.combine_first, self.L1)

def getL2(self):
'''Perform L1 to L2 data processing'''
logger.info('Level 2 processing...')
self.L2 = toL2(self.L1A, vars_df=self.vars)
self.L2 = self.resample(self.L2)
self.L2 = reformat_time(self.L2)

# Switch gps_lon to negative (degrees_east)
# Do this here, and NOT in addMeta, otherwise we switch back to positive
# when calling getMeta in joinL2! PJW
if self.L2.attrs['station_id'] not in ['UWN', 'Roof_GEUS', 'Roof_PROMICE']:
self.L2['gps_lon'] = self.L2['gps_lon'] * -1

# Add variable attributes and metadata
self.L2 = self.addAttributes(self.L2)

# Round all values to specified decimals places
self.L2 = roundValues(self.L2, self.vars)

def getL3(self):
'''Perform L2 to L3 data processing, including resampling and metadata
and attribute population'''
logger.info('Level 3 processing...')
self.L3 = toL3(self.L2)

def resample(self, dataset):
'''Resample dataset to specific temporal resolution (based on input
data type)'''
# Resample L3 product
f = [l.attrs['format'] for l in self.L0]
if 'raw' in f or 'STM' in f:
logger.info('Resampling to 10 minute')
resampled = resampleL2(dataset, '10min')
self.L3 = resampleL3(self.L3, '10min')
else:
resampled = resampleL2(dataset, '60min')
self.L3 = resampleL3(self.L3, '60min')
logger.info('Resampling to hour')
return resampled

def merge_flag(self):
'''Determine if hard merging is needed, based on whether a hard
merge_type flag is defined in any of the configs'''
f = [l.attrs['merge_type'] for l in self.L0]
if 'hard' in f:
return True
else:
return False

def hard_merge(self, dataset_list):
'''Determine positions where hard merging should occur, combine
data and append to list of combined data chunks, then hard merge all
combined data chunks. This should be called in instances where there
needs to be a clear break between input datasets, such as when a station
is moved (and we do not want the GPS position jumping)'''
# Define positions where hard merging should occur
m=[]
f = [l.attrs['merge_type'] for l in self.L0]
[m.append(i) for i, item in enumerate(f) if item=='hard']

# Perform combine between hard merge breaks and append to list of combined data
combined=[]
for i in range(len(m[:-1])):
combined.append(reduce(xr.Dataset.combine_first, dataset_list[m[i]:m[i+1]]))
combined.append(reduce(xr.Dataset.combine_first, dataset_list[m[-1]:]))

# Hard merge all combined datasets together
return reduce(xr.Dataset.update, combined)



# Re-format time
t = self.L3['time'].values
self.L3['time'] = list(t)

# Switch gps_lon to negative (degrees_east)
# Do this here, and NOT in addMeta, otherwise we switch back to positive
# when calling getMeta in joinL3! PJW
if self.L3.attrs['station_id'] not in ['UWN', 'Roof_GEUS', 'Roof_PROMICE']:
self.L3['gps_lon'] = self.L3['gps_lon'] * -1

# Add variable attributes and metadata
self.L3 = self.addAttributes(self.L3)

# Round all values to specified decimals places
self.L3 = roundValues(self.L3, self.vars)

def addAttributes(self, L3):
'''Add variable and attribute metadata
Expand Down Expand Up @@ -400,12 +365,6 @@ def getL0(infile, nodata, cols, skiprows, file_version,
ds = xr.Dataset.from_dataframe(df)
return ds

def reformat_time(dataset):
'''Re-format time'''
t = dataset['time'].values
dataset['time'] = list(t)
return dataset

def addBasicMeta(ds, vars_df):
''' Use a variable lookup table DataFrame to add the basic metadata
to the xarray dataset. This is later amended to finalise L3
Expand Down Expand Up @@ -753,8 +712,8 @@ def getMeta(m_file=None, delimiter=','):
pass
return meta

def resampleL2(ds_h, t):
'''Resample L2 AWS data, e.g. hourly to daily average. This uses pandas
def resampleL3(ds_h, t):
'''Resample L3 AWS data, e.g. hourly to daily average. This uses pandas
DataFrame resampling at the moment as a work-around to the xarray Dataset
resampling. As stated, xarray resampling is a lengthy process that takes
~2-3 minutes per operation: ds_d = ds_h.resample({'time':"1D"}).mean()
Expand Down Expand Up @@ -922,7 +881,7 @@ def testAddAll(self):
self.assertTrue(d.attrs['station_id']=='TEST')
self.assertIsInstance(d.attrs['references'], str)

def testL0toL2(self):
def testL0toL3(self):
'''Test L0 to L3 processing'''
try:
import pypromice
Expand All @@ -931,23 +890,19 @@ def testL0toL2(self):
except:
pAWS = AWS('../test/test_config1.toml', '../test/')
pAWS.process()
self.assertIsInstance(pAWS.L2, xr.Dataset)
self.assertTrue(pAWS.L2.attrs['station_id']=='TEST1')

def testCLIgetl2(self):
'''Test get_l2 CLI'''
exit_status = os.system('get_l2 -h')
self.assertEqual(exit_status, 0)
self.assertIsInstance(pAWS.L3, xr.Dataset)
self.assertTrue(pAWS.L3.attrs['station_id']=='TEST1')

def testCLIjoinl2(self):
'''Test join_l2 CLI'''
exit_status = os.system('join_l2 -h')
self.assertEqual(exit_status, 0)

def testCLIgetl3(self):
'''Test get_l3 CLI'''
exit_status = os.system('get_l3 -h')
self.assertEqual(exit_status, 0)

def testCLIjoinl3(self):
'''Test join_l3 CLI'''
exit_status = os.system('join_l3 -h')
self.assertEqual(exit_status, 0)

#------------------------------------------------------------------------------

if __name__ == "__main__":
Expand Down
Loading

0 comments on commit af09818

Please sign in to comment.