Skip to content

Commit

Permalink
files
Browse files Browse the repository at this point in the history
  • Loading branch information
Sceki committed Dec 2, 2024
1 parent 2a0d6f3 commit eb6161f
Show file tree
Hide file tree
Showing 2 changed files with 8,663 additions and 6 deletions.
15 changes: 9 additions & 6 deletions karman/density_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@

from . import nn
from . import util
from importlib.resources import files

#get the directory of the current script
script_dir = os.path.dirname(os.path.abspath(__file__))
Expand All @@ -19,17 +20,19 @@
scalers_dict={}
keys_time_series_data=['omni_indices', 'omni_solar_wind', 'omni_magnetic_field', 'soho', 'msise']
for key in keys_time_series_data:
with open(os.path.join(script_dir,f"scaler_{key}.pk"),"rb") as f:
scalers_dict[key]=pk.load(f)

with open(os.path.join(script_dir,"normalization_dict_ts.pk"), "rb") as f:
with files("karman").joinpath(f"scaler_{key}.pk").open("rb") as f:
scalers_dict[key] = pk.load(f)
with files("karman").joinpath("normalization_dict_ts.pk").open("rb") as f:
_normalization_dict_ts=pk.load(f)

with open(os.path.join(script_dir,"normalization_dict.pk"), "rb") as f:
with files("karman").joinpath("normalization_dict.pk").open("rb") as f:
_normalization_dict=pk.load(f)

#we also load the data for the space-weather indices, in case needed:
df_sw=pd.read_csv(os.path.join(script_dir,'../data/merged_datasets/satellites_data_subsampled_1d.csv'))
file_path = files("karman").joinpath("satellites_data_subsampled_1d.csv")

#file_path = files("karman").joinpath("data/merged_datasets/satellites_data_subsampled_1d.csv")
df_sw=pd.read_csv(file_path)

class ForecastingModel():
def __init__(self,
Expand Down
Loading

0 comments on commit eb6161f

Please sign in to comment.