Skip to content

Commit

Permalink
Merge pull request #356 from euroargodev/bgc-2024
Browse files Browse the repository at this point in the history
Work on BGC from 2024 LOV visit
  • Loading branch information
gmaze authored Sep 25, 2024
2 parents 1e79ec0 + 2d4785d commit 549d8c3
Show file tree
Hide file tree
Showing 710 changed files with 100,810 additions and 758 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/pytests-upstream.yml
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ jobs:
defaults:
run:
shell: bash -l {0}
timeout-minutes: 45
# timeout-minutes: 45
strategy:
fail-fast: true
matrix:
Expand Down Expand Up @@ -196,7 +196,7 @@ jobs:
defaults:
run:
shell: bash -l {0}
timeout-minutes: 45
# timeout-minutes: 45
strategy:
fail-fast: true
matrix:
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/pytests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ jobs:
run:
shell: bash -l {0}
continue-on-error: ${{ matrix.experimental }}
timeout-minutes: 45
# timeout-minutes: 45
strategy:
max-parallel: 12
fail-fast: false
Expand Down Expand Up @@ -169,7 +169,7 @@ jobs:
run:
shell: bash -l {0}
continue-on-error: ${{ matrix.experimental }}
timeout-minutes: 45
# timeout-minutes: 45
strategy:
max-parallel: 12
fail-fast: false
Expand Down
5 changes: 5 additions & 0 deletions argopy/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@
from .utils import MonitoredThreadPoolExecutor # noqa: E402, F401
from .utils import monitor_status as status # noqa: E402
from .related import TopoFetcher, OceanOPSDeployments, ArgoNVSReferenceTables, ArgoDocs, ArgoDOI # noqa: E402
from .extensions import CanyonMED


#
Expand Down Expand Up @@ -77,6 +78,10 @@
"ArgoColors", # Class
"stores",
"tutorial",

# Argo xarray accessor extensions
"CanyonMED",

# Constants
"__version__"
)
32 changes: 27 additions & 5 deletions argopy/data_fetchers/argovis_data.py
100644 → 100755
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,7 @@ def __init__(
"""
self.definition = "Argovis Argo data fetcher"
self.dataset_id = OPTIONS["dataset"] if ds == "" else ds
self.user_mode = kwargs["mode"] if "mode" in kwargs else OPTIONS["mode"]
self.server = kwargs["server"] if "server" in kwargs else api_server
timeout = OPTIONS["api_timeout"] if api_timeout == 0 else api_timeout
self.store_opts = {
Expand Down Expand Up @@ -379,14 +380,20 @@ def to_xarray(self, errors: str = "ignore"):
ds = ds[np.sort(ds.data_vars)]
return ds

def transform_data_mode(self, ds: xr.Dataset, **kwargs):
# Argovis data are already curated !
if ds.argo._type == "point":
ds["N_POINTS"] = np.arange(0, len(ds["N_POINTS"]))
return ds

def filter_data_mode(self, ds: xr.Dataset, **kwargs):
# Argovis data already curated !
# Argovis data are already curated !
if ds.argo._type == "point":
ds["N_POINTS"] = np.arange(0, len(ds["N_POINTS"]))
return ds

def filter_qc(self, ds: xr.Dataset, **kwargs):
# Argovis data already curated !
# Argovis data are already curated !
if ds.argo._type == "point":
ds["N_POINTS"] = np.arange(0, len(ds["N_POINTS"]))
return ds
Expand All @@ -396,7 +403,7 @@ def filter_researchmode(self, ds: xr.Dataset, *args, **kwargs) -> xr.Dataset:
This filter will select only QC=1 delayed mode data with pressure errors smaller than 20db
Use this filter instead of filter_data_mode and filter_qc
Use this filter instead of transform_data_mode and filter_qc
"""
ds = ds.argo.filter_researchmode()
if ds.argo._type == "point":
Expand Down Expand Up @@ -521,7 +528,12 @@ def uri(self):
)
boxes = self.Chunker.fit_transform()
for box in boxes:
urls.append(Fetch_box(box=box, ds=self.dataset_id).get_url())
opts = {
"ds": self.dataset_id,
"fs": self.fs,
"server": self.server,
}
urls.append(Fetch_box(box=box, **opts).get_url())
else:
urls.append(self.get_url())
else:
Expand Down Expand Up @@ -549,6 +561,16 @@ def uri(self):
)
boxes = self.Chunker.fit_transform()
for box in boxes:
urls.append(Fetch_box(box=box, ds=self.dataset_id).get_url())
opts = {
"ds": self.dataset_id,
"fs": self.fs,
"server": self.server,
}
urls.append(
Fetch_box(
box=box,
**opts,
).get_url()
)

return self.url_encode(urls)
Loading

0 comments on commit 549d8c3

Please sign in to comment.