Skip to content

Commit

Permalink
removing matplotlib for testing
Browse files Browse the repository at this point in the history
  • Loading branch information
lionel42 committed Nov 15, 2023
1 parent 899250c commit 4d8481f
Showing 1 changed file with 9 additions and 10 deletions.
19 changes: 9 additions & 10 deletions tests/test_tno_inv.py
Original file line number Diff line number Diff line change
@@ -1,25 +1,26 @@
"""Test file for tno inventory"""
#%% Imports
# %% Imports
from emiproc.inventories.tno import TNO_Inventory
from emiproc.inventories.utils import group_categories
from emiproc.profiles.vertical_profiles import check_valid_vertical_profile
from pathlib import Path
import matplotlib.pyplot as plt
import pytest

# TODO change the pth of that if you want to test it
tno_path = Path(r"C:\Users\coli\Documents\emiproc\files\TNO_6x6_GHGco_v4_0\TNO_GHGco_v4_0_year2018.nc")
tno_path = Path(
r"C:\Users\coli\Documents\emiproc\files\TNO_6x6_GHGco_v4_0\TNO_GHGco_v4_0_year2018.nc"
)


# make this test only if the tno inventory is available
# otherwise skip it
@pytest.mark.slow
@pytest.mark.skipif(not tno_path.exists(), reason="TNO inventory not found")
def test_loading_and_grouping():

# %% Test vertical profiles on the TNO inventory
if not tno_path.exists():
raise ValueError("The path to the TNO inventory is not correct")

inv_tno = TNO_Inventory(tno_path)
# Check the vertical profiles
check_valid_vertical_profile(inv_tno.v_profiles)
Expand All @@ -31,9 +32,7 @@ def test_loading_and_grouping():
# test that we have the same number of point sources in both inventories
# the number of point source is the number of rows of each of the gdfs

assert sum([len(gdf) for gdf in inv_tno.gdfs.values()]) == len(groupped_tno.gdfs['all'])
# %% plot the vertical profiles from the groupped inventory
fig, ax = plt.subplots()
for r in groupped_tno.v_profiles.ratios:
ax.plot(groupped_tno.v_profiles.height, r)
assert sum([len(gdf) for gdf in inv_tno.gdfs.values()]) == len(
groupped_tno.gdfs["all"]
)
# %%

0 comments on commit 4d8481f

Please sign in to comment.