diff --git a/.github/workflows/pytests-upstream.yml b/.github/workflows/pytests-upstream.yml index 60ecd1f3..d6239df3 100644 --- a/.github/workflows/pytests-upstream.yml +++ b/.github/workflows/pytests-upstream.yml @@ -80,7 +80,7 @@ jobs: steps: - name: Energy Estimation - Initialize if: ${{matrix.os == 'ubuntu-latest'}} - uses: green-coding-berlin/eco-ci-energy-estimation@v3 + uses: green-coding-berlin/eco-ci-energy-estimation@v4 with: task: start-measurement company-uuid: ${{ secrets.CARBONDB_COMPANY_UUID }} @@ -127,7 +127,7 @@ jobs: - name: Energy Estimation - Measure Tests Setup if: ${{matrix.os == 'ubuntu-latest'}} - uses: green-coding-berlin/eco-ci-energy-estimation@v3 + uses: green-coding-berlin/eco-ci-energy-estimation@v4 env: ELECTRICITY_MAPS_TOKEN: ${{ secrets.ELECTRICITY_MAPS_TOKEN }} with: @@ -147,7 +147,7 @@ jobs: - name: Energy Estimation - Measure Tests Exec if: ${{matrix.os == 'ubuntu-latest'}} - uses: green-coding-berlin/eco-ci-energy-estimation@v3 + uses: green-coding-berlin/eco-ci-energy-estimation@v4 env: ELECTRICITY_MAPS_TOKEN: ${{ secrets.ELECTRICITY_MAPS_TOKEN }} with: @@ -173,7 +173,7 @@ jobs: - name: Energy Estimation - Show Results if: ${{matrix.os == 'ubuntu-latest'}} - uses: green-coding-berlin/eco-ci-energy-estimation@v3 + uses: green-coding-berlin/eco-ci-energy-estimation@v4 env: ELECTRICITY_MAPS_TOKEN: ${{ secrets.ELECTRICITY_MAPS_TOKEN }} with: @@ -208,7 +208,7 @@ jobs: steps: - name: Energy Estimation - Initialize if: ${{matrix.os == 'ubuntu-latest'}} - uses: green-coding-berlin/eco-ci-energy-estimation@v3 + uses: green-coding-berlin/eco-ci-energy-estimation@v4 with: task: start-measurement company-uuid: ${{ secrets.CARBONDB_COMPANY_UUID }} @@ -255,7 +255,7 @@ jobs: - name: Energy Estimation - Measure Tests Setup if: ${{matrix.os == 'ubuntu-latest'}} - uses: green-coding-berlin/eco-ci-energy-estimation@v3 + uses: green-coding-berlin/eco-ci-energy-estimation@v4 env: ELECTRICITY_MAPS_TOKEN: ${{ secrets.ELECTRICITY_MAPS_TOKEN }} with: @@ -275,7 +275,7 @@ jobs: - name: Energy Estimation - Measure Tests Exec if: ${{matrix.os == 'ubuntu-latest'}} - uses: green-coding-berlin/eco-ci-energy-estimation@v3 + uses: green-coding-berlin/eco-ci-energy-estimation@v4 env: ELECTRICITY_MAPS_TOKEN: ${{ secrets.ELECTRICITY_MAPS_TOKEN }} with: @@ -292,7 +292,7 @@ jobs: - name: Energy Estimation - Show Results if: ${{matrix.os == 'ubuntu-latest'}} - uses: green-coding-berlin/eco-ci-energy-estimation@v3 + uses: green-coding-berlin/eco-ci-energy-estimation@v4 env: ELECTRICITY_MAPS_TOKEN: ${{ secrets.ELECTRICITY_MAPS_TOKEN }} with: diff --git a/.github/workflows/pytests.yml b/.github/workflows/pytests.yml index b9e5cae5..177227ea 100644 --- a/.github/workflows/pytests.yml +++ b/.github/workflows/pytests.yml @@ -132,7 +132,7 @@ jobs: continue-on-error: true - name: Upload coverage to Codecov - uses: codecov/codecov-action@v4.6.0 + uses: codecov/codecov-action@v5.1.1 with: token: ${{ secrets.CODECOV_TOKEN }} file: ./cov.xml @@ -261,7 +261,7 @@ jobs: continue-on-error: true - name: Upload coverage to Codecov - uses: codecov/codecov-action@v4.6.0 + uses: codecov/codecov-action@v5.1.1 with: token: ${{ secrets.CODECOV_TOKEN }} file: ./cov.xml diff --git a/argopy/data_fetchers/erddap_data_processors.py b/argopy/data_fetchers/erddap_data_processors.py index 58c50e21..f63a1381 100644 --- a/argopy/data_fetchers/erddap_data_processors.py +++ b/argopy/data_fetchers/erddap_data_processors.py @@ -76,7 +76,6 @@ def pre_process( Fetched_url = this_ds.attrs.get("Fetched_uri") else: Fetched_url = this_ds.attrs.get("history", "").split('\n')[-1].split(' ')[-1] - Fetched_constraints = UriCName(Fetched_url) # Finally overwrite erddap attributes with those from argopy: raw_attrs = this_ds.attrs.copy() @@ -104,7 +103,7 @@ def pre_process( this_ds.attrs["Fetched_date"] = pd.to_datetime("now", utc=True).strftime( "%Y/%m/%d" ) - this_ds.attrs["Fetched_constraints"] = Fetched_constraints + this_ds.attrs["Fetched_constraints"] = UriCName(Fetched_url).cname this_ds.attrs["Fetched_uri"] = Fetched_url this_ds = this_ds[np.sort(this_ds.data_vars)] diff --git a/argopy/data_fetchers/gdac_data_processors.py b/argopy/data_fetchers/gdac_data_processors.py index c4d76bcc..a56f5d7d 100644 --- a/argopy/data_fetchers/gdac_data_processors.py +++ b/argopy/data_fetchers/gdac_data_processors.py @@ -1,6 +1,10 @@ import numpy as np import xarray as xr from typing import Literal +import logging + + +log = logging.getLogger("argopy.gdac.data") def pre_process_multiprof( diff --git a/argopy/extensions/params_data_mode.py b/argopy/extensions/params_data_mode.py index 46000bff..45c2f0c6 100644 --- a/argopy/extensions/params_data_mode.py +++ b/argopy/extensions/params_data_mode.py @@ -6,7 +6,11 @@ import copy from ..utils import to_list, list_core_parameters -from ..utils.transform import split_data_mode, merge_param_with_param_adjusted, filter_param_by_data_mode +from ..utils.transform import ( + split_data_mode, + merge_param_with_param_adjusted, + filter_param_by_data_mode, +) from ..stores import ( indexstore_pd as ArgoIndex, ) # make sure we work with a Pandas index store @@ -43,10 +47,12 @@ class ParamsDataMode(ArgoAccessorExtension): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - def compute(self, indexfs: Union[None, ArgoIndex]) -> xr.Dataset: # noqa: C901 - """Compute and add _DATA_MODE variables to a xarray dataset + def _compute_from_ArgoIndex( + self, indexfs: Union[None, ArgoIndex] + ) -> xr.Dataset: # noqa: C901 + """Compute _DATA_MODE variables from ArgoIndex - This method consume a collection of points. + This method consumes a collection of points. Parameters ---------- @@ -55,9 +61,9 @@ def compute(self, indexfs: Union[None, ArgoIndex]) -> xr.Dataset: # noqa: C901 Returns ------- - :class:`xr.Dataset` + :class:`xarray.Dataset` """ - idx = copy.copy(indexfs) if isinstance(indexfs, ArgoIndex) else ArgoIndex() + idx = indexfs.copy(deep=True) if isinstance(indexfs, ArgoIndex) else ArgoIndex() def complete_df(this_df, params): """Add 'wmo', 'cyc' and '_data_mode' columns to this dataframe""" @@ -103,6 +109,7 @@ def print_etime(txt, t0): profiles = self._argo.list_WMO_CYC idx.search_wmo(self._argo.list_WMO) + params = [ p for p in idx.read_params() @@ -168,10 +175,30 @@ def print_etime(txt, t0): self._obj = self._obj[np.sort(self._obj.data_vars)] return self._obj - def split(self): + def compute(self, indexfs: Union[None, ArgoIndex]) -> xr.Dataset: + """Compute _DATA_MODE variables""" + if "STATION_PARAMETERS" in self._obj and "PARAMETER_DATA_MODE" in self._obj: + return split_data_mode(self._obj) + else: + return self._compute_from_ArgoIndex(indexfs=indexfs) + + def split(self) -> xr.Dataset: + """Convert PARAMETER_DATA_MODE(N_PROF, N_PARAM) into several _DATA_MODE(N_PROF) variables + + Using the list of *PARAM* found in ``STATION_PARAMETERS``, this method will create ``N_PARAM`` + new variables in the dataset ``_DATA_MODE(N_PROF)``. + + The variable ``PARAMETER_DATA_MODE`` is drop from the dataset at the end of the process. + + Returns + ------- + :class:`xarray.Dataset` + """ return split_data_mode(self._obj) - def merge(self, params: Union[str, List[str]] = "all", errors: str = "raise") -> xr.Dataset: + def merge( + self, params: Union[str, List[str]] = "all", errors: str = "raise" + ) -> xr.Dataset: """Merge and _ADJUSTED variables according to DATA_MODE or _DATA_MODE Merging is done as follows: @@ -251,7 +278,7 @@ def filter( logical: str = "and", mask: bool = False, errors: str = "raise", - ): + ) -> xr.Dataset: """Filter measurements according to parameters data mode Filter the dataset to keep points where all or some of the parameters are in any of the data mode specified. diff --git a/argopy/fetchers.py b/argopy/fetchers.py index a86e70b7..6f441530 100755 --- a/argopy/fetchers.py +++ b/argopy/fetchers.py @@ -177,6 +177,8 @@ def __init__(self, mode: str = "", src: str = "", ds: str = "", **fetcher_kwargs raise OptionValueError( "The 'argovis' data source fetching is only available in 'standard' user mode" ) + if self._src == "gdac" and "bgc" in self._dataset_id: + warnings.warn("BGC data support with the 'gdac' data source is still in Work In Progress") @property def _icon_user_mode(self): diff --git a/argopy/stores/argo_index.py b/argopy/stores/argo_index.py index 85b103e2..d5e0cfb1 100644 --- a/argopy/stores/argo_index.py +++ b/argopy/stores/argo_index.py @@ -76,6 +76,8 @@ class ArgoIndex(indexstore): >>> idx.read_wmo >>> idx.read_params >>> idx.records_per_wmo + >>> idx.copy(deep=False) + """ diff --git a/argopy/stores/argo_index_proto.py b/argopy/stores/argo_index_proto.py index 0079f484..6e39a84a 100644 --- a/argopy/stores/argo_index_proto.py +++ b/argopy/stores/argo_index_proto.py @@ -2,6 +2,8 @@ Argo file index store prototype """ + +import copy import numpy as np import pandas as pd import logging @@ -11,6 +13,13 @@ from urllib.parse import urlparse from typing import Union from pathlib import Path +import sys + + +if sys.version_info >= (3, 11): + from typing import Self +else: + from typing_extensions import Self from ..options import OPTIONS from ..errors import GdacPathError, S3PathError, InvalidDataset, OptionValueError @@ -64,37 +73,44 @@ def __init__( cachedir: str = "", timeout: int = 0, **kwargs, - ) -> object: + ): """Create an Argo index file store Parameters ---------- host: str, default: ``https://data-argo.ifremer.fr`` - Local or remote (ftp, https or s3) path to a `dac` folder (GDAC structure compliant). This takes values - like: - - ``https://data-argo.ifremer.fr`` - - ``ftp://ftp.ifremer.fr/ifremer/argo`` - - ``s3://argo-gdac-sandbox/pub/idx`` - - a local absolute path + Local or remote (ftp, https or s3) path to a `dac` folder (GDAC structure compliant). + + This parameter takes values like: + + - ``https://data-argo.ifremer.fr`` + - ``ftp://ftp.ifremer.fr/ifremer/argo`` + - ``s3://argo-gdac-sandbox/pub/idx`` + - a local absolute path You can also use the following keywords: ``http``/``https``, ``ftp`` and ``s3``/``aws``, respectively. index_file: str, default: ``ar_index_global_prof.txt`` Name of the csv-like text file with the index. - Possible values are standard file name: ``ar_index_global_prof.txt``, + Possible values are the standard file names: ``ar_index_global_prof.txt``, ``argo_bio-profile_index.txt``, ``argo_synthetic-profile_index.txt`` - or ``etc/argo-index/argo_aux-profile_index.txt`` + or ``etc/argo-index/argo_aux-profile_index.txt``. You can also use the following keywords: ``core``, ``bgc-b``, ``bgc-s`` and ``aux``. convention: str, default: None - Set the expected format convention of the index file. This is useful when trying to load index file with custom name. If set to ``None``, we'll try to infer the convention from the ``index_file`` value. - Possible values: ``ar_index_global_prof``, ``argo_bio-profile_index``, ``argo_synthetic-profile_index`` or ``argo_aux-profile_index``. + Set the expected format convention of the index file. + + This is useful when trying to load an index file with a custom name. + If set to ``None``, we'll try to infer the convention from the ``index_file`` value. + + Possible values: ``ar_index_global_prof``, ``argo_bio-profile_index``, ``argo_synthetic-profile_index`` + or ``argo_aux-profile_index``. You can also use the following keywords: ``core``, ``bgc-s``, ``bgc-b`` and ``aux``. cache : bool, default: False Use cache or not. cachedir: str, default: OPTIONS['cachedir'] - Folder where to store cached files + Folder where to store cached files. timeout: int, default: OPTIONS['api_timeout'] Time out in seconds to connect to a remote host (ftp or http). """ @@ -127,7 +143,7 @@ def __init__( # Create a File Store to access index file: self.cache = cache self.cachedir = OPTIONS["cachedir"] if cachedir == "" else cachedir - timeout = OPTIONS["api_timeout"] if timeout == 0 else timeout + self.timeout = OPTIONS["api_timeout"] if timeout == 0 else timeout self.fs = {} if split_protocol(host)[0] is None: self.fs["src"] = filestore(cache=cache, cachedir=cachedir) @@ -152,7 +168,7 @@ def __init__( port=0 if urlparse(self.host).port is None else urlparse(self.host).port, cache=cache, cachedir=cachedir, - timeout=timeout, + timeout=self.timeout, block_size=1000 * (2**20), ) @@ -170,7 +186,8 @@ def __init__( raise S3PathError("This host (%s) is not alive !" % self.host) self.fs["src"] = s3store( - cache=cache, cachedir=cachedir, + cache=cache, + cachedir=cachedir, anon=not has_aws_credentials(), ) self.skip_rows = 10 @@ -231,13 +248,15 @@ def __init__( if self.fs["src"].exists(self.index_path + ".gz"): self.index_file += ".gz" - if isinstance(self.fs['src'], s3store): + if isinstance(self.fs["src"], s3store): # If the index host is on a S3 store, we add another file system that will bypass some # search methods to improve performances. self.fs["s3"] = get_a_s3index(self.convention) # Adjust S3 bucket name and key with host and index file names: self.fs["s3"].bucket_name = Path(split_protocol(self.host)[1]).parts[0] - self.fs["s3"].key = str(Path(*Path(split_protocol(self.host)[1]).parts[1:]) / self.index_file) + self.fs["s3"].key = str( + Path(*Path(split_protocol(self.host)[1]).parts[1:]) / self.index_file + ) # # CNAME internal manager to be able to chain search methods: # self._cname = None @@ -249,8 +268,10 @@ def __repr__(self): summary.append("Convention: %s (%s)" % (self.convention, self.convention_title)) if hasattr(self, "index"): summary.append("In memory: True (%i records)" % self.N_RECORDS) - elif 's3' in self.host: - summary.append("In memory: False [But there's no need to load the full index with a S3 host to make a search]") + elif "s3" in self.host: + summary.append( + "In memory: False [But there's no need to load the full index with a S3 host to make a search]" + ) else: summary.append("In memory: False") @@ -420,7 +441,7 @@ def N_RECORDS(self): # Must work for all internal storage type (:class:`pyarrow.Table` or :class:`pandas.DataFrame`) if hasattr(self, "index"): return self.index.shape[0] - elif 's3' in self.host: + elif "s3" in self.host: return np.Inf else: raise InvalidDataset("Load the index first !") @@ -477,7 +498,9 @@ def _write(self, fs, path, obj, fmt="pq"): if fmt == "parquet": fmt = "pq" if isinstance(fs, memorystore): - fs.fs.touch(this_path) # Fix for https://github.com/euroargodev/argopy/issues/345 + fs.fs.touch( + this_path + ) # Fix for https://github.com/euroargodev/argopy/issues/345 # fs.fs.touch(this_path) # Fix for https://github.com/euroargodev/argopy/issues/345 # This is an f* mystery to me, why do we need 2 calls to trigger file creation FOR REAL ???? # log.debug("memorystore touched this path before open context: '%s'" % this_path) @@ -519,7 +542,7 @@ def _read(self, fs, path, fmt="pq"): # log.debug("_read this path: '%s'" % this_path) return obj - def clear_cache(self): + def clear_cache(self) -> Self: """Clear cache registry and files associated with this store instance.""" self.fs["src"].clear_cache() self.fs["client"].clear_cache() @@ -609,7 +632,7 @@ def get_filename(s, index): from ..related import load_dict, mapp_dict if nrows is not None: - df = df.loc[0: nrows - 1].copy() + df = df.loc[0 : nrows - 1].copy() if "index" in df: df.drop("index", axis=1, inplace=True) @@ -903,7 +926,9 @@ def search_params(self, PARAMs: Union[str, list], logical: str): raise NotImplementedError("Not implemented") @abstractmethod - def search_parameter_data_mode(self, PARAMs: dict, logical: bool = 'and', nrows=None): + def search_parameter_data_mode( + self, PARAMs: dict, logical: bool = "and", nrows=None + ): """Search index for profiles with a parameter in a specific data mode Parameters @@ -997,3 +1022,80 @@ def _insert_header(self, originalfile): f.write(data) return originalfile + + def _copy( + self, + deep: bool = True, + ) -> Self: + cls = self.__class__ + + if deep: + # Ensure complete independence between the original and the copied index: + obj = cls.__new__(cls) + obj.__init__( + host=copy.deepcopy(self.host), + index_file=copy.deepcopy(self.index_file), + timeout=copy.deepcopy(self.timeout), + cache=copy.deepcopy(self.cache), + cachedir=copy.deepcopy(self.cachedir), + ) + if hasattr(self, "index"): + obj._nrows_index = copy.deepcopy(self._nrows_index) + obj.index = copy.deepcopy(self.index) + if self.cache: + obj.index_path_cache = copy.deepcopy(self.index_path_cache) + + else: + obj = cls.__new__(cls) + obj.__init__( + host=copy.copy(self.host), + index_file=copy.copy(self.index_file), + timeout=copy.copy(self.timeout), + cache=copy.copy(self.cache), + cachedir=copy.copy(self.cachedir), + ) + if hasattr(self, "index"): + obj._nrows_index = copy.copy(self._nrows_index) + obj.index = copy.copy(self.index) + if self.cache: + obj.index_path_cache = copy.copy(self.index_path_cache) + + if hasattr(self, "search"): + obj.search_type = copy.copy(self.search_type) + obj.search_filter = copy.copy(self.search_filter) + obj.search = copy.copy(self.search) + if obj.cache: + obj.search_path_cache = copy.copy(self.search_path_cache) + + return obj + + def __copy__(self) -> Self: + return self._copy(deep=False) + + def __deepcopy__(self) -> Self: + return self._copy(deep=True) + + def copy( + self, + deep: bool = True, + ) -> Self: + """Returns a copy of this :class:`ArgoIndex` instance + + A copy is a new instance based on similar parameters (e.g. ``host`` and ``index_file``). + + A deep copy ensure complete independence between the original and the copied index. + If the index was loaded, a new view is returned with the copied index, but search parameters and results are lost. + + A shallow copy preserves the index array, search parameters and results. + + Parameters + ---------- + deep: bool, optional, default=True + + Whether the search parameters and results are copied onto the new ArgoIndex instance. + + Returns + ------- + :class:`ArgoIndex` + """ + return self._copy(deep=deep) diff --git a/argopy/stores/filesystems.py b/argopy/stores/filesystems.py index c79a493a..c35e9e1c 100644 --- a/argopy/stores/filesystems.py +++ b/argopy/stores/filesystems.py @@ -2133,7 +2133,6 @@ def connect(self): try: payload = self._login_payload.copy() payload["password"] = "*" * len(payload["password"]) - log.info("Try to log-in to '%s' page with %s" % (self._login_page, payload)) self.fs.info(self._login_page) self._connected = True except ErddapHTTPUnauthorized: diff --git a/argopy/utils/transform.py b/argopy/utils/transform.py index 87d55f31..be0f1e58 100644 --- a/argopy/utils/transform.py +++ b/argopy/utils/transform.py @@ -4,6 +4,7 @@ import numpy as np import xarray as xr +import pandas as pd import logging from typing import List, Union @@ -349,6 +350,7 @@ def filter_param_by_data_mode( return ds.loc[dict(N_POINTS=filter)] if len(filter) > 0 else ds + def split_data_mode(ds: xr.Dataset) -> xr.Dataset: """Convert PARAMETER_DATA_MODE(N_PROF, N_PARAM) into several _DATA_MODE(N_PROF) variables @@ -361,6 +363,11 @@ def split_data_mode(ds: xr.Dataset) -> xr.Dataset: ------- :class:`xr.Dataset` """ + if ds.argo._type != "profile": + raise InvalidDatasetStructure( + "Method only available to a collection of profiles" + ) + if "STATION_PARAMETERS" in ds and "PARAMETER_DATA_MODE" in ds: # Ensure N_PROF is a coordinate @@ -372,19 +379,38 @@ def split_data_mode(ds: xr.Dataset) -> xr.Dataset: u64 = lambda s: "%s%s" % (s, " " * (64 - len(s))) # noqa: E731 params = [p.strip() for p in np.unique(ds["STATION_PARAMETERS"])] + def read_data_mode_for(ds: xr.Dataset, param: str) -> xr.DataArray: + """Return data mode of a given parameter""" + da_masked = ds['PARAMETER_DATA_MODE'].where(ds['STATION_PARAMETERS'] == u64(param)) + + def _dropna(x): + # x('N_PARAM') is reduced to the first non nan value, a scalar, no dimension + y = pd.Series(x).dropna().tolist() + if len(y) == 0: + return "" + else: + return y[0] + + kwargs = dict( + dask="parallelized", + input_core_dims=[["N_PARAM"]], # Function takes N_PARAM as input + output_core_dims=[[]], # Function reduces to a scalar (no dimension) + vectorize=True # Apply function element-wise along the other dimensions + ) + + dm = xr.apply_ufunc(_dropna, da_masked, **kwargs) + dm = dm.rename("%s_DATA_MODE" % param) + dm.attrs = ds['PARAMETER_DATA_MODE'].attrs + return dm + for param in params: name = "%s_DATA_MODE" % param.replace("_PARAMETER", "").replace( "PARAMETER_", "" ) - mask = ds["STATION_PARAMETERS"] == xr.full_like( - ds["STATION_PARAMETERS"], - u64(param), - dtype=ds["STATION_PARAMETERS"].dtype, - ) - da = ds["PARAMETER_DATA_MODE"].where(mask, drop=True).isel(N_PARAM=0) - da = da.rename(name) - da = da.astype(ds["PARAMETER_DATA_MODE"].dtype) - ds[name] = da + if name == "_DATA_MODE": + log.error("This dataset has an error in 'STATION_PARAMETERS': it contains an empty string") + else: + ds[name] = read_data_mode_for(ds, param) ds = ds.drop_vars("PARAMETER_DATA_MODE") ds.argo.add_history("Transformed with 'split_data_mode'") diff --git a/argopy/xarray.py b/argopy/xarray.py index 900d8e5c..26cca9a3 100644 --- a/argopy/xarray.py +++ b/argopy/xarray.py @@ -402,7 +402,7 @@ def point2profile(self, drop: bool = False) -> xr.Dataset: # noqa: C901 Returns ------- - :class:`xr.dataset` + :class:`xr.Dataset` See Also -------- @@ -563,9 +563,11 @@ def profile2point(self) -> xr.Dataset: - A "point" is a location with unique (N_PROF, N_LEVELS) indexes - A "profile" is a collection of points with an unique UID based on WMO, CYCLE_NUMBER and DIRECTION + Note that this method will systematically apply the :meth:`datamode.split` method. + Returns ------- - :class:`xr.dataset` + :class:`xr.Dataset` Warnings -------- diff --git a/docs/_static/argopy_logo_long_5years.png b/docs/_static/argopy_logo_long_5years.png new file mode 100644 index 00000000..00b3d251 Binary files /dev/null and b/docs/_static/argopy_logo_long_5years.png differ diff --git a/docs/api-hidden.rst b/docs/api-hidden.rst index 7867be7f..1e1f3b1e 100644 --- a/docs/api-hidden.rst +++ b/docs/api-hidden.rst @@ -296,6 +296,8 @@ argopy.ArgoIndex.search_parameter_data_mode argopy.ArgoIndex.to_dataframe argopy.ArgoIndex.to_indexfile + argopy.ArgoIndex.copy + argopy.stores.argo_index_proto_s3.s3index argopy.stores.argo_index_proto_s3.s3index_core diff --git a/docs/api.rst b/docs/api.rst index 09d71a2a..a2e7d3ad 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -159,6 +159,7 @@ Data Transformation Dataset.argo.interp_std_levels Dataset.argo.groupby_pressure_bins Dataset.argo.datamode.merge + Dataset.argo.datamode.split Data Filters diff --git a/docs/cheatsheet.rst b/docs/cheatsheet.rst index ec280d78..e36ddd3e 100644 --- a/docs/cheatsheet.rst +++ b/docs/cheatsheet.rst @@ -3,7 +3,7 @@ Cheat sheet =========== -.. admonition:: Last version available v0.1.17 +.. admonition:: Last version available v1.0.0 .. centered:: :download:`Get most of the argopy API in a small pdf <_static/argopy-cheatsheet.pdf>` diff --git a/docs/conf.py b/docs/conf.py index bd1427fe..4c0988f2 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -232,7 +232,7 @@ # The name of an image file (relative to this directory) to place at the top # of the sidebar. -html_logo = "_static/argopy_logo_long.png" +html_logo = "_static/argopy_logo_long_5years.png" html_favicon = '_static/argopy.ico' # html_title = "My site title" @@ -263,8 +263,11 @@ 'collapse_navigation': False, # https://pydata-sphinx-theme.readthedocs.io/en/latest/user_guide/navigation.html#remove-reveal-buttons-for-sidebar-items # 'show_toc_level': 3, # https://pydata-sphinx-theme.readthedocs.io/en/latest/user_guide/page-toc.html#show-more-levels-of-the-in-page-toc-by-default # 'launch_buttons': { "thebe": True} + # "announcement": ( + # "๐ŸŽ“ 2025 argopy training camps are open: pre-register here ๐ŸŽ“" + # ), "announcement": ( - "๐ŸŽ“ 2025 argopy training camps are open: pre-register here ๐ŸŽ“" + "๐ŸŽ‰ argopy turns 5! Check full details here ๐ŸŽŠ" ), "icon_links": [ { @@ -285,6 +288,12 @@ "icon": "fa-solid fa-graduation-cap", "type": "fontawesome", }, + { + "name": "5 years anniversary events", + "url": "https://euroargodev.github.io/argopy-5years", + "icon": "fa-solid fa-cake-candles", + "type": "fontawesome", + }, ] } diff --git a/docs/index.rst b/docs/index.rst index 360a2136..95cabd3b 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -9,13 +9,30 @@ for standard users as well as Argo experts. |License| |Python version| |Anaconda-Server Badge| +.. admonition:: ๐ŸŽ‰ argopy turns 5! ๐ŸŽ‰ + + Join us as we celebrate this milestone with exciting activities: + + - ๐Ÿš€ `Coding Challenges `_. Test your skills and creativity with a set of exciting Argo related challenges designed for all levels. Compete for bragging rights and prizes! + - ๐ŸŽฎ `Online Game Contest `_. Join the community for a fun-filled competition that blends tech and play. Perfect for taking a break and get a special price if you make it to the top 3. + - ๐Ÿ“‹ `User Survey `_. Share your feedback and ideas to help shape the future of argopy. Your input means the world to us. + - ๐Ÿ“š `Free Training Camp `_. Expand your knowledge with expert-led sessions on making the most of argopy. Perfect for new and experienced users alike! + + Weโ€™d love to have you join us in celebrating this milestone. Whether youโ€™ve been with us since day one or just started using argopy, your involvement makes a difference + + ๐Ÿ‘‰ ``_ + + **Thank you for your support and for being an essential part of our journey. Hereโ€™s to the next five years of innovation, learning, and collaboration!** + + + .. admonition:: 2025 argopy training camps ๐ŸŽ“ - The argopy team is contemplating to organise "training camps" in 2025: + The argopy team will organise "training camps" in 2025: - At least one event would be in-person and anoter online. + At least one event would be in-person and another online. - Overall, a training camp should be about 1 day long. + Overall, a training camp should be no more than 1 day long. The goal of these events is to train users with all the argopy features. Whether you're a standard, research or expert users, argopy has features for you ! diff --git a/docs/whats-new.rst b/docs/whats-new.rst index ca002d70..faa4cb03 100644 --- a/docs/whats-new.rst +++ b/docs/whats-new.rst @@ -8,6 +8,22 @@ What's New |pypi dwn| |conda dwn| +Coming up next +-------------- + +Internals +^^^^^^^^^ + +- New :meth:`ArgoIndex.copy` method (:pr:`418`) by |gmaze|. This copy allows for a: + + - deep copy, i.e. a new instance with same parameters (e.g. ``index_file``) and cleared search, + - shallow copy, i.e. a new instance with same parameters and search results if any. + +- Fix bug raising an error for ``STATION_PARAMETERS`` with a blank entry, with ``bgc`` dataset and ``gdac`` data source (well spotted |quai20|). (:pr:`418`) by |gmaze|. + +- Fix bug raising an error when exporting a dataset to netcdf after erddap fetch, :issue:`412`. (:pr:`413`) by |gmaze|. + + v1.0.0 (16 Oct. 2024) --------------------- @@ -18,7 +34,7 @@ v1.0.0 (16 Oct. 2024) This version comes with improved performances and support for the BGC-Argo dataset. But since this is a major, we also introduces breaking changes and significant internal refactoring possibly with un-expected side effects ! So don't hesitate to `report issues on the source code repository `_. - +.. _v1.0.0-features: Features and front-end API ^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -30,7 +46,7 @@ Features and front-end API - **Improved support for BGC** - **argopy now support `standard` and `research` user modes** with the `bgc` dataset. These new user modes follows the last available ADMT recommendations to bring users a finely tuned set of BGC parameters. Details of the BGC data processing chain for each user modes can be found in the :ref:`user-mode-definition` section. - - **Predict nutrients and carbonates in the Mediterranean Sea** with the new BGC method :class:`Dataset.argo.canyon_med`. The new method allows to make predictions of the water-Column nutrient concentrations and carbonate system variables in the Mediterranean Sea with the CANYON-MED model. This model can be used to predict PO4, NO3, DIC, SiOH4, AT and pHT. (:pr:`364`) by `G. Maze `_. + - **Predict nutrients and carbonates in the Mediterranean Sea** with the new BGC method :class:`Dataset.argo.canyon_med`. The new method allows to make predictions of the water-Column nutrient concentrations and carbonate system variables in the Mediterranean Sea with the CANYON-MED model. This model can be used to predict PO4, NO3, DIC, SiOH4, AT and pHT. (:pr:`364`) by |gmaze|. .. currentmodule:: argopy @@ -43,7 +59,7 @@ Features and front-end API ds.argo.canyon_med.predict() ds.argo.canyon_med.predict('PO4') - - **More BGC expert features** with support for the *auxiliary* index file with :class:`argopy.ArgoIndex`. Simply use the keyword `aux`. (:pr:`356`) by `G. Maze `_. + - **More BGC expert features** with support for the *auxiliary* index file with :class:`argopy.ArgoIndex`. Simply use the keyword `aux`. (:pr:`356`) by |gmaze|. .. code-block:: python @@ -52,7 +68,7 @@ Features and front-end API - **More scalable data fetching using multi-processing or a Dask Cluster**. -It is now possible to use multi-processing with all data fetchers and even possibly a Dask client object. This is set with the ``parallel`` option. In doing so, the Argo data pre-processing steps (download and conformation to internal conventions) will be distributed to all available resources, significantly improving performances for fetching large selection of Argo data. (:pr:`392`) by `G. Maze `_. +It is now possible to use multi-processing with all data fetchers and even possibly a Dask client object. This is set with the ``parallel`` option. In doing so, the Argo data pre-processing steps (download and conformation to internal conventions) will be distributed to all available resources, significantly improving performances for fetching large selection of Argo data. (:pr:`392`) by |gmaze|. Check the documentation on :ref:`Parallelization methods` for all the details. @@ -72,7 +88,7 @@ Check the documentation on :ref:`Parallelization methods` for all the details. - **Xarray argo accessor extensions mechanism**. -This should allows users to easily develop their own Argo dataset methods. This is possible thanks to a new class decorator :class:`argopy.extensions.register_argo_accessor` that allows to register a class as a property to the :class:`Dataset.argo` accessor. (:pr:`364`) by `G. Maze `_. +This should allows users to easily develop their own Argo dataset methods. This is possible thanks to a new class decorator :class:`argopy.extensions.register_argo_accessor` that allows to register a class as a property to the :class:`Dataset.argo` accessor. (:pr:`364`) by |gmaze|. Example: @@ -103,19 +119,21 @@ This makes syntax like this possible: .. currentmodule:: argopy +.. _v1.0.0-breaking: + Breaking changes ^^^^^^^^^^^^^^^^ .. currentmodule:: xarray -- In the :class:`Dataset.argo` accessor (:pr:`356`) by `G. Maze `_: +- In the :class:`Dataset.argo` accessor (:pr:`356`) by |gmaze|: - the :meth:`Dataset.argo.filter_data_mode` has been deprecated and replaced by :meth:`Dataset.argo.datamode.merge` method. To actually implement a real filter of data points on data mode values, i.e. to keep points with specific data mode values, use the :meth:`Dataset.argo.datamode.filter` method. .. currentmodule:: argopy -- The option name "ftp" is now replaced by "gdac" (:pr:`389`) by `G. Maze `_ +- The option name "ftp" is now renamed "gdac" (:pr:`389`) by |gmaze|. -- The option name "dataset" is now replaced by "ds" (:pr:`389`) by `G. Maze `_ +- The option name "dataset" is now renamed "ds" (:pr:`389`) by |gmaze|. - It is highly probable that more changes in this major v1.0.0 lead to breaking changes not listed here. Don't hesitate to `report them on the repository issue section `_. @@ -129,7 +147,7 @@ v0.1.17 (20 Sep. 2024) Comping up soon by the end of October the first **major argopy release: v1.0.0** -.. important:: List of deprecations before the upcoming major release v1.0.0. (:pr:`389`) by `G. Maze `_. +.. important:: List of deprecations before the upcoming major release v1.0.0. (:pr:`389`) by |gmaze|. .. currentmodule:: xarray @@ -140,12 +158,16 @@ v0.1.17 (20 Sep. 2024) - Refactor option "dataset" into "ds", see :class:`argopy.set_options` - Refactor option "ftp" into "gdac", see :class:`argopy.set_options` +.. _v0.1.17-internals: + Internals ^^^^^^^^^ -- Refactor Argovis CI tests to use mocked http server (:pr:`383`) by `G. Maze `_ +- Refactor Argovis CI tests to use mocked http server (:pr:`383`) by |gmaze| + +- Improve error and warning messages from mocked http server to address :issue:`381` (:pr:`382`) by |gmaze| -- Improve error and warning messages from mocked http server to address :issue:`381` (:pr:`382`) by `G. Maze `_ +.. _v0.1.17-energy: Energy ^^^^^^ @@ -155,10 +177,12 @@ Considering `energy used by CI tests `_. The `ADMT working group discussion items are listed here `_. Both CORE and BGC index files are supported. The new :class:`ArgoIndex` not only support access to the AWS S3 index files but also implement improved performances for search methods on WMO and cycle numbers, using :class:`boto3.client.select_object_content` SQL queries. Indeed, the ``https`` and ``ftp`` default GDAC server index files are downloaded and loaded in memory before being searched. But with ``s3``, index files can directly be queried on the server using SQL syntax; the full index is not necessarily downloaded. (:pr:`326`) by `G. Maze `_ +- **Support for AWS S3 index files**. This support is experimental and is primarily made available for benchmarking as part of the `ADMT working group on Argo cloud format activities `_. The `ADMT working group discussion items are listed here `_. Both CORE and BGC index files are supported. The new :class:`ArgoIndex` not only support access to the AWS S3 index files but also implement improved performances for search methods on WMO and cycle numbers, using :class:`boto3.client.select_object_content` SQL queries. Indeed, the ``https`` and ``ftp`` default GDAC server index files are downloaded and loaded in memory before being searched. But with ``s3``, index files can directly be queried on the server using SQL syntax; the full index is not necessarily downloaded. (:pr:`326`) by |gmaze| .. code-block:: python @@ -175,33 +199,37 @@ Features and front-end API - **argovis** data source now support the new `API server `_. This upgrade comes with a new option to define the optional API KEY to use. You can `get a free key here `_. (:pr:`371`) by `Bill Katie-Anne Mills `_. -- **argopy** is concerned about its environmental impact and we'd like to understand and optimize the carbon emissions of our digital activities. Starting June 1st 2024, we use `Green Coding `_ tools to assess energy consumption and CO2eq emissions from our activities on Github infrastructure. All results and data are available on the new dedicated web page: :ref:`Carbon emissions`. (:pr:`354`) by `G. Maze `_. +- **argopy** is concerned about its environmental impact and we'd like to understand and optimize the carbon emissions of our digital activities. Starting June 1st 2024, we use `Green Coding `_ tools to assess energy consumption and CO2eq emissions from our activities on Github infrastructure. All results and data are available on the new dedicated web page: :ref:`Carbon emissions`. (:pr:`354`) by |gmaze|. + +.. _v0.1.16-internals: Internals ^^^^^^^^^ -- Drop support for Python 3.8, add support for Python 3.10. (:pr:`379`) by `G. Maze `_ +- Drop support for Python 3.8, add support for Python 3.10. (:pr:`379`) by |gmaze| -- Update :class:`argopy.ArgoNVSReferenceTables` to handle new NVS server output format. (:pr:`378`) by `G. Maze `_. +- Update :class:`argopy.ArgoNVSReferenceTables` to handle new NVS server output format. (:pr:`378`) by |gmaze|. -- Update Ifremer erddap server information. The Argo reference for DMQC (returned by the :class:`DataFetcher` fetcher with ``ds='ref'`` argument ) and Argo CTD-reference for DQMC (returned by the :class:`CTDRefDataFetcher` fetcher) now indicate the dataset version used. (:pr:`344`) by `G. Maze `_. +- Update Ifremer erddap server information. The Argo reference for DMQC (returned by the :class:`DataFetcher` fetcher with ``ds='ref'`` argument ) and Argo CTD-reference for DQMC (returned by the :class:`CTDRefDataFetcher` fetcher) now indicate the dataset version used. (:pr:`344`) by |gmaze|. -- Pin upper bound on xarray < 2024.3 to fix failing upstream tests because of ``AttributeError: 'ScipyArrayWrapper' object has no attribute 'oindex'``, `reported here `_. (:pr:`326`) by `G. Maze `_ +- Pin upper bound on xarray < 2024.3 to fix failing upstream tests because of ``AttributeError: 'ScipyArrayWrapper' object has no attribute 'oindex'``, `reported here `_. (:pr:`326`) by |gmaze| -- Fix :class:`argopy.ArgoDocs` that was not working with new Archimer webpage design, :issue:`351`. (:pr:`352`) by `G. Maze `_. +- Fix :class:`argopy.ArgoDocs` that was not working with new Archimer webpage design, :issue:`351`. (:pr:`352`) by |gmaze|. -- Fix bug with ArgoIndex cache, :issue:`345`. (:pr:`346`) by `G. Maze `_. +- Fix bug with ArgoIndex cache, :issue:`345`. (:pr:`346`) by |gmaze|. -- Keep dependencies up to date. (:pr:`333`, :pr:`337`) by `G. Maze `_. +- Keep dependencies up to date. (:pr:`333`, :pr:`337`) by |gmaze|. -- Update :class:`argopy.ArgoDocs` with last BGC cookbooks on pH. (:pr:`321`) by `G. Maze `_. +- Update :class:`argopy.ArgoDocs` with last BGC cookbooks on pH. (:pr:`321`) by |gmaze|. -- Fix for fsspec > 2023.10.0. (:pr:`318`) by `G. Maze `_. +- Fix for fsspec > 2023.10.0. (:pr:`318`) by |gmaze|. + +.. _v0.1.16-breaking: Breaking changes ^^^^^^^^^^^^^^^^ -- Drop support for erddapy < v0.8.0 (:pr:`344`) by `G. Maze `_. +- Drop support for erddapy < v0.8.0 (:pr:`344`) by |gmaze|. v0.1.15 (12 Dec. 2023) @@ -210,7 +238,7 @@ v0.1.15 (12 Dec. 2023) Internals ^^^^^^^^^ -- Fix bug whereby user name could not be retrieved using :func:`getpass.getuser`. This closes :issue:`310` and allows argopy to be integrated into the EU Galaxy tools for `ecology `_. (:pr:`311`) by `G. Maze `_. +- Fix bug whereby user name could not be retrieved using :func:`getpass.getuser`. This closes :issue:`310` and allows argopy to be integrated into the EU Galaxy tools for `ecology `_. (:pr:`311`) by |gmaze|. v0.1.14 (29 Sep. 2023) @@ -223,7 +251,7 @@ v0.1.14 (29 Sep. 2023) Features and front-end API ^^^^^^^^^^^^^^^^^^^^^^^^^^ -- **argopy now support BGC dataset in `expert` user mode for the `erddap` data source**. The BGC-Argo content of synthetic multi-profile files is now available from the Ifremer erddap. Like for the core dataset, you can fetch data for a region, float(s) or profile(s). One novelty with regard to core, is that you can restrict data fetching to some parameters and furthermore impose no-NaNs on some of these parameters. Check out the new documentation page for :ref:`data-set`. (:pr:`278`) by `G. Maze `_ +- **argopy now support BGC dataset in `expert` user mode for the `erddap` data source**. The BGC-Argo content of synthetic multi-profile files is now available from the Ifremer erddap. Like for the core dataset, you can fetch data for a region, float(s) or profile(s). One novelty with regard to core, is that you can restrict data fetching to some parameters and furthermore impose no-NaNs on some of these parameters. Check out the new documentation page for :ref:`data-set`. (:pr:`278`) by |gmaze| .. code-block:: python @@ -246,7 +274,7 @@ Features and front-end API DataFetcher(ds='bgc', params='all', measured='all') # Return the smallest possible dataset DataFetcher(ds='bgc', params='all', measured=['DOXY', 'BBP700']) # Return all possible params for points where DOXY and BBP700 are not NaN -- **New methods in the ArgoIndex for BGC**. The :class:`ArgoIndex` has now full support for the BGC profile index files, both bio and synthetic index. In particular it is possible to search for profiles with specific data modes on parameters. (:pr:`278`) by `G. Maze `_ +- **New methods in the ArgoIndex for BGC**. The :class:`ArgoIndex` has now full support for the BGC profile index files, both bio and synthetic index. In particular it is possible to search for profiles with specific data modes on parameters. (:pr:`278`) by |gmaze| .. code-block:: python @@ -259,7 +287,7 @@ Features and front-end API idx.search_parameter_data_mode({'DOXY': ['R', 'A']}) idx.search_parameter_data_mode({'DOXY': 'D', 'CDOM': 'D'}, logical='or') -- **New xarray argo accessor features**. Easily retrieve an Argo sample index and domain extent with the ``index`` and ``domain`` properties. Get a list with all possible (PLATFORM_NUMBER, CYCLE_NUMBER) with the ``list_WMO_CYC`` method. (:pr:`278`) by `G. Maze `_ +- **New xarray argo accessor features**. Easily retrieve an Argo sample index and domain extent with the ``index`` and ``domain`` properties. Get a list with all possible (PLATFORM_NUMBER, CYCLE_NUMBER) with the ``list_WMO_CYC`` method. (:pr:`278`) by |gmaze| - **New search methods for Argo reference tables**. It is now possible to search for a string in tables title and/or description using the :meth:`related.ArgoNVSReferenceTables.search` method. @@ -278,9 +306,9 @@ Features and front-end API :align: center :target: _static/argopy-cheatsheet.pdf -- **Our internal Argo index store is promoted as a frontend feature**. The :class:`IndexFetcher` is a user-friendly **fetcher** built on top of our internal Argo index file store. But if you are familiar with Argo index files and/or cares about performances, you may be interested in using directly the Argo index **store**. We thus decided to promote this internal feature as a frontend class :class:`ArgoIndex`. See :ref:`Store: Low-level Argo Index access`. (:pr:`270`) by `G. Maze `_ +- **Our internal Argo index store is promoted as a frontend feature**. The :class:`IndexFetcher` is a user-friendly **fetcher** built on top of our internal Argo index file store. But if you are familiar with Argo index files and/or cares about performances, you may be interested in using directly the Argo index **store**. We thus decided to promote this internal feature as a frontend class :class:`ArgoIndex`. See :ref:`Store: Low-level Argo Index access`. (:pr:`270`) by |gmaze| -- **Easy access to all Argo manuals from the ADMT**. More than 20 pdf manuals have been produced by the Argo Data Management Team. Using the new :class:`ArgoDocs` class, it's now easier to navigate this great database for Argo experts. All details in :ref:`ADMT Documentation`. (:pr:`268`) by `G. Maze `_ +- **Easy access to all Argo manuals from the ADMT**. More than 20 pdf manuals have been produced by the Argo Data Management Team. Using the new :class:`ArgoDocs` class, it's now easier to navigate this great database for Argo experts. All details in :ref:`ADMT Documentation`. (:pr:`268`) by |gmaze| .. code-block:: python @@ -297,16 +325,16 @@ Features and front-end API ArgoDocs().search("CDOM") -- **New 'research' user mode**. This new feature implements automatic filtering of Argo data following international recommendations for research/climate studies. With this user mode, only Delayed Mode with good QC data are returned. Check out the :ref:`user-mode` section for all the details. (:pr:`265`) by `G. Maze `_ +- **New 'research' user mode**. This new feature implements automatic filtering of Argo data following international recommendations for research/climate studies. With this user mode, only Delayed Mode with good QC data are returned. Check out the :ref:`user-mode-definition` section for all the details. (:pr:`265`) by |gmaze| -- **argopy now provides a specific xarray engine to properly read Argo netcdf files**. Using ``engine='argo'`` in :func:`xarray.open_dataset`, all variables will properly be casted, i.e. returned with their expected data types, which is not the case otherwise. This works with *ALL* Argo netcdf file types (as listed in the `Reference table R01 `_). Some details in here: :class:`argopy.xarray.ArgoEngine` (:pr:`208`) by `G. Maze `_ +- **argopy now provides a specific xarray engine to properly read Argo netcdf files**. Using ``engine='argo'`` in :func:`xarray.open_dataset`, all variables will properly be casted, i.e. returned with their expected data types, which is not the case otherwise. This works with *ALL* Argo netcdf file types (as listed in the `Reference table R01 `_). Some details in here: :class:`argopy.xarray.ArgoEngine` (:pr:`208`) by |gmaze| .. code-block:: python import xarray as xr ds = xr.open_dataset("dac/aoml/1901393/1901393_prof.nc", engine='argo') -- **argopy now can provide authenticated access to the Argo CTD reference database for DMQC**. Using user/password new **argopy** options, it is possible to fetch the `Argo CTD reference database `_, with the :class:`CTDRefDataFetcher` class. (:pr:`256`) by `G. Maze `_ +- **argopy now can provide authenticated access to the Argo CTD reference database for DMQC**. Using user/password new **argopy** options, it is possible to fetch the `Argo CTD reference database `_, with the :class:`CTDRefDataFetcher` class. (:pr:`256`) by |gmaze| .. code-block:: python @@ -325,9 +353,9 @@ Features and front-end API Internals ^^^^^^^^^ -- Utilities refactoring. All classes and functions have been refactored to more appropriate locations like ``argopy.utils`` or ``argopy.related``. A deprecation warning message should be displayed every time utilities are being used from the deprecated locations. (:pr:`290`) by `G. Maze `_ +- Utilities refactoring. All classes and functions have been refactored to more appropriate locations like ``argopy.utils`` or ``argopy.related``. A deprecation warning message should be displayed every time utilities are being used from the deprecated locations. (:pr:`290`) by |gmaze| -- Fix bugs due to fsspec new internal cache handling and Windows specifics. (:pr:`293`) by `G. Maze `_ +- Fix bugs due to fsspec new internal cache handling and Windows specifics. (:pr:`293`) by |gmaze| - New utility class :class:`utils.MonitoredThreadPoolExecutor` to handle parallelization with a multi-threading Pool that provide a notebook or terminal computation progress dashboard. This class is used by the httpstore open_mfdataset method for erddap requests. @@ -347,9 +375,9 @@ Internals - And misc. bug and warning fixes all over the code. -- Update new argovis dashboard links for floats and profiles. (:pr:`271`) by `G. Maze `_ +- Update new argovis dashboard links for floats and profiles. (:pr:`271`) by |gmaze| -- **Index store can now export search results to standard Argo index file format**. See all details in :ref:`Store: Low-level Argo Index access`. (:pr:`260`) by `G. Maze `_ +- **Index store can now export search results to standard Argo index file format**. See all details in :ref:`Store: Low-level Argo Index access`. (:pr:`260`) by |gmaze| .. code-block:: python @@ -363,7 +391,7 @@ Internals idx.to_indexfile('short_index.txt') # export search results as standard Argo index csv file -- **Index store can now load/search the Argo Bio and Synthetic profile index files**. Simply gives the name of the Bio or Synthetic Profile index file and retrieve the full index. This store also comes with a new search criteria for BGC: by parameters. See all details in :ref:`Store: Low-level Argo Index access`. (:pr:`261`) by `G. Maze `_ +- **Index store can now load/search the Argo Bio and Synthetic profile index files**. Simply gives the name of the Bio or Synthetic Profile index file and retrieve the full index. This store also comes with a new search criteria for BGC: by parameters. See all details in :ref:`Store: Low-level Argo Index access`. (:pr:`261`) by |gmaze| .. code-block:: python @@ -376,8 +404,8 @@ Internals idx = indexstore(index_file="argo_bio-profile_index.txt").load() idx.search_params(['C1PHASE_DOXY', 'DOWNWELLING_PAR']) -- Use a mocked server for all http and GDAC ftp requests in CI tests (:pr:`249`, :pr:`252`, :pr:`255`) by `G. Maze `_ -- Removed support for minimal dependency requirements and for python 3.7. (:pr:`252`) by `G. Maze `_ +- Use a mocked server for all http and GDAC ftp requests in CI tests (:pr:`249`, :pr:`252`, :pr:`255`) by |gmaze| +- Removed support for minimal dependency requirements and for python 3.7. (:pr:`252`) by |gmaze| - Changed License from Apache to `EUPL 1.2 `_ Breaking changes @@ -385,7 +413,7 @@ Breaking changes - Some documentation pages may have moved to new urls. -- The legacy index store is deprecated, now available in argopy.stores.argo_index_deprec.py only (:pr:`270`) by `G. Maze `_ +- The legacy index store is deprecated, now available in argopy.stores.argo_index_deprec.py only (:pr:`270`) by |gmaze| v0.1.14rc2 (27 Jul. 2023) @@ -394,7 +422,7 @@ v0.1.14rc2 (27 Jul. 2023) Features and front-end API ^^^^^^^^^^^^^^^^^^^^^^^^^^ -- **argopy now support BGC dataset in `expert` user mode for the `erddap` data source**. The BGC-Argo content of synthetic multi-profile files is now available from the Ifremer erddap. Like for the core dataset, you can fetch data for a region, float(s) or profile(s). One novelty with regard to core, is that you can restrict data fetching to some parameters and furthermore impose no-NaNs on some of these parameters. Check out the new documentation page for :ref:`data-set`. (:pr:`278`) by `G. Maze `_ +- **argopy now support BGC dataset in `expert` user mode for the `erddap` data source**. The BGC-Argo content of synthetic multi-profile files is now available from the Ifremer erddap. Like for the core dataset, you can fetch data for a region, float(s) or profile(s). One novelty with regard to core, is that you can restrict data fetching to some parameters and furthermore impose no-NaNs on some of these parameters. Check out the new documentation page for :ref:`data-set`. (:pr:`278`) by |gmaze| .. code-block:: python @@ -418,7 +446,7 @@ Features and front-end API DataFetcher(ds='bgc', params='all', measured=['DOXY', 'BBP700']) # Return all possible params for points where DOXY and BBP700 are not NaN -- **New methods in the ArgoIndex for BGC**. The :class:`ArgoIndex` has now full support for the BGC profile index files, both bio and synthetic index. In particular it is possible to search for profiles with specific data modes on parameters. (:pr:`278`) by `G. Maze `_ +- **New methods in the ArgoIndex for BGC**. The :class:`ArgoIndex` has now full support for the BGC profile index files, both bio and synthetic index. In particular it is possible to search for profiles with specific data modes on parameters. (:pr:`278`) by |gmaze| .. code-block:: python @@ -432,7 +460,7 @@ Features and front-end API idx.search_parameter_data_mode({'DOXY': 'D', 'CDOM': 'D'}, logical='or') -- **New xarray argo accessor features**. Easily retrieve an Argo sample index and domain extent with the ``index`` and ``domain`` properties. Get a list with all possible (PLATFORM_NUMBER, CYCLE_NUMBER) with the ``list_WMO_CYC`` method. (:pr:`278`) by `G. Maze `_ +- **New xarray argo accessor features**. Easily retrieve an Argo sample index and domain extent with the ``index`` and ``domain`` properties. Get a list with all possible (PLATFORM_NUMBER, CYCLE_NUMBER) with the ``list_WMO_CYC`` method. (:pr:`278`) by |gmaze| - **New search methods for Argo reference tables**. It is now possible to search for a string in tables title and/or description using the :meth:`related.ArgoNVSReferenceTables.search` method. @@ -484,9 +512,9 @@ Features and front-end API :align: center :target: _static/argopy-cheatsheet.pdf -- **Our internal Argo index store is promoted as a frontend feature**. The :class:`IndexFetcher` is a user-friendly **fetcher** built on top of our internal Argo index file store. But if you are familiar with Argo index files and/or cares about performances, you may be interested in using directly the Argo index **store**. We thus decided to promote this internal feature as a frontend class :class:`ArgoIndex`. See :ref:`Store: Low-level Argo Index access`. (:pr:`270`) by `G. Maze `_ +- **Our internal Argo index store is promoted as a frontend feature**. The :class:`IndexFetcher` is a user-friendly **fetcher** built on top of our internal Argo index file store. But if you are familiar with Argo index files and/or cares about performances, you may be interested in using directly the Argo index **store**. We thus decided to promote this internal feature as a frontend class :class:`ArgoIndex`. See :ref:`Store: Low-level Argo Index access`. (:pr:`270`) by |gmaze| -- **Easy access to all Argo manuals from the ADMT**. More than 20 pdf manuals have been produced by the Argo Data Management Team. Using the new :class:`ArgoDocs` class, it's now easier to navigate this great database for Argo experts. All details in :ref:`ADMT Documentation`. (:pr:`268`) by `G. Maze `_ +- **Easy access to all Argo manuals from the ADMT**. More than 20 pdf manuals have been produced by the Argo Data Management Team. Using the new :class:`ArgoDocs` class, it's now easier to navigate this great database for Argo experts. All details in :ref:`ADMT Documentation`. (:pr:`268`) by |gmaze| .. code-block:: python @@ -503,16 +531,16 @@ Features and front-end API ArgoDocs().search("CDOM") -- **New 'research' user mode**. This new feature implements automatic filtering of Argo data following international recommendations for research/climate studies. With this user mode, only Delayed Mode with good QC data are returned. Check out the :ref:`user-mode` section for all the details. (:pr:`265`) by `G. Maze `_ +- **New 'research' user mode**. This new feature implements automatic filtering of Argo data following international recommendations for research/climate studies. With this user mode, only Delayed Mode with good QC data are returned. Check out the :ref:`user-mode-definition` section for all the details. (:pr:`265`) by |gmaze| -- **argopy now provides a specific xarray engine to properly read Argo netcdf files**. Using ``engine='argo'`` in :func:`xarray.open_dataset`, all variables will properly be casted, i.e. returned with their expected data types, which is not the case otherwise. This works with *ALL* Argo netcdf file types (as listed in the `Reference table R01 `_). Some details in here: :class:`argopy.xarray.ArgoEngine` (:pr:`208`) by `G. Maze `_ +- **argopy now provides a specific xarray engine to properly read Argo netcdf files**. Using ``engine='argo'`` in :func:`xarray.open_dataset`, all variables will properly be casted, i.e. returned with their expected data types, which is not the case otherwise. This works with *ALL* Argo netcdf file types (as listed in the `Reference table R01 `_). Some details in here: :class:`argopy.xarray.ArgoEngine` (:pr:`208`) by |gmaze| .. code-block:: python import xarray as xr ds = xr.open_dataset("dac/aoml/1901393/1901393_prof.nc", engine='argo') -- **argopy now can provide authenticated access to the Argo CTD reference database for DMQC**. Using user/password new **argopy** options, it is possible to fetch the `Argo CTD reference database `_, with the :class:`CTDRefDataFetcher` class. (:pr:`256`) by `G. Maze `_ +- **argopy now can provide authenticated access to the Argo CTD reference database for DMQC**. Using user/password new **argopy** options, it is possible to fetch the `Argo CTD reference database `_, with the :class:`CTDRefDataFetcher` class. (:pr:`256`) by |gmaze| .. code-block:: python @@ -530,9 +558,9 @@ Features and front-end API Internals ^^^^^^^^^ -- Update new argovis dashboard links for floats and profiles. (:pr:`271`) by `G. Maze `_ +- Update new argovis dashboard links for floats and profiles. (:pr:`271`) by |gmaze| -- **Index store can now export search results to standard Argo index file format**. See all details in :ref:`Store: Low-level Argo Index access`. (:pr:`260`) by `G. Maze `_ +- **Index store can now export search results to standard Argo index file format**. See all details in :ref:`Store: Low-level Argo Index access`. (:pr:`260`) by |gmaze| .. code-block:: python @@ -546,7 +574,7 @@ Internals idx.to_indexfile('short_index.txt') # export search results as standard Argo index csv file -- **Index store can now load/search the Argo Bio and Synthetic profile index files**. Simply gives the name of the Bio or Synthetic Profile index file and retrieve the full index. This store also comes with a new search criteria for BGC: by parameters. See all details in :ref:`Store: Low-level Argo Index access`. (:pr:`261`) by `G. Maze `_ +- **Index store can now load/search the Argo Bio and Synthetic profile index files**. Simply gives the name of the Bio or Synthetic Profile index file and retrieve the full index. This store also comes with a new search criteria for BGC: by parameters. See all details in :ref:`Store: Low-level Argo Index access`. (:pr:`261`) by |gmaze| .. code-block:: python @@ -559,14 +587,14 @@ Internals idx = indexstore(index_file="argo_bio-profile_index.txt").load() idx.search_params(['C1PHASE_DOXY', 'DOWNWELLING_PAR']) -- Use a mocked server for all http and GDAC ftp requests in CI tests (:pr:`249`, :pr:`252`, :pr:`255`) by `G. Maze `_ -- Removed support for minimal dependency requirements and for python 3.7. (:pr:`252`) by `G. Maze `_ +- Use a mocked server for all http and GDAC ftp requests in CI tests (:pr:`249`, :pr:`252`, :pr:`255`) by |gmaze| +- Removed support for minimal dependency requirements and for python 3.7. (:pr:`252`) by |gmaze| - Changed License from Apache to `EUPL 1.2 `_ Breaking changes ^^^^^^^^^^^^^^^^ -- The legacy index store is deprecated, now available in argopy.stores.argo_index_deprec.py only (:pr:`270`) by `G. Maze `_ +- The legacy index store is deprecated, now available in argopy.stores.argo_index_deprec.py only (:pr:`270`) by |gmaze| v0.1.13 (28 Mar. 2023) @@ -575,7 +603,7 @@ v0.1.13 (28 Mar. 2023) Features and front-end API ^^^^^^^^^^^^^^^^^^^^^^^^^^ -- **New utility class to retrieve the Argo deployment plan from the Ocean-OPS api.** This is the utility class :class:`OceanOPSDeployments`. See the new documentation section on :ref:`Deployment Plan` for more. (:pr:`244`) by `G. Maze `_ +- **New utility class to retrieve the Argo deployment plan from the Ocean-OPS api.** This is the utility class :class:`OceanOPSDeployments`. See the new documentation section on :ref:`Deployment Plan` for more. (:pr:`244`) by |gmaze| .. code-block:: python @@ -592,7 +620,7 @@ Features and front-end API .. image:: _static/scatter_map_deployment_status.png -- **New scatter map utility for easy Argo-related variables plotting.** The new :meth:`argopy.plot.scatter_map` utility function is dedicated to making maps with Argo profiles positions coloured according to specific variables: a scatter map. Profiles colouring is finely tuned for some variables: QC flags, Data Mode and Deployment Status. By default, floats trajectories are always shown, but this can be changed. See the new documentation section on :ref:`Scatter Maps` for more. (:pr:`245`) by `G. Maze `_ +- **New scatter map utility for easy Argo-related variables plotting.** The new :meth:`argopy.plot.scatter_map` utility function is dedicated to making maps with Argo profiles positions coloured according to specific variables: a scatter map. Profiles colouring is finely tuned for some variables: QC flags, Data Mode and Deployment Status. By default, floats trajectories are always shown, but this can be changed. See the new documentation section on :ref:`Scatter Maps` for more. (:pr:`245`) by |gmaze| .. code-block:: python @@ -604,7 +632,7 @@ Features and front-end API .. image:: _static/scatter_map_qcflag.png -- **New Argo colors utility to manage segmented colormaps and pre-defined Argo colors set.** The new :class:`argopy.plot.ArgoColors` utility class aims to easily provide colors for Argo-related variables plot. See the new documentation section on :ref:`Argo colors` for more (:pr:`245`) by `G. Maze `_ +- **New Argo colors utility to manage segmented colormaps and pre-defined Argo colors set.** The new :class:`argopy.plot.ArgoColors` utility class aims to easily provide colors for Argo-related variables plot. See the new documentation section on :ref:`Argo colors` for more (:pr:`245`) by |gmaze| .. code-block:: python @@ -623,16 +651,16 @@ Features and front-end API Internals ^^^^^^^^^ -- Because of the new :class:`argopy.plot.ArgoColors`, the `argopy.plot.discrete_coloring` utility is deprecated in 0.1.13. Calling it will raise an error after argopy 0.1.14. (:pr:`245`) by `G. Maze `_ +- Because of the new :class:`argopy.plot.ArgoColors`, the `argopy.plot.discrete_coloring` utility is deprecated in 0.1.13. Calling it will raise an error after argopy 0.1.14. (:pr:`245`) by |gmaze| -- New method to check status of web API: now allows for a keyword check rather than a simple url ping. This comes with 2 new utilities functions :meth:`utilities.urlhaskeyword` and :meth:`utilities.isalive`. (:pr:`247`) by `G. Maze `_. +- New method to check status of web API: now allows for a keyword check rather than a simple url ping. This comes with 2 new utilities functions :meth:`utilities.urlhaskeyword` and :meth:`utilities.isalive`. (:pr:`247`) by |gmaze|. -- Removed dependency to Scikit-learn LabelEncoder (:pr:`239`) by `G. Maze `_ +- Removed dependency to Scikit-learn LabelEncoder (:pr:`239`) by |gmaze| Breaking changes ^^^^^^^^^^^^^^^^ -- Data source ``localftp`` is deprecated and removed from **argopy**. It's been replaced by the ``gdac`` data source with the appropriate ``ftp`` option. See :ref:`Data sources`. (:pr:`240`) by `G. Maze `_ +- Data source ``localftp`` is deprecated and removed from **argopy**. It's been replaced by the ``gdac`` data source with the appropriate ``ftp`` option. See :ref:`Data sources`. (:pr:`240`) by |gmaze| - :class:`argopy.utilities.ArgoNVSReferenceTables` methods ``all_tbl`` and ``all_tbl_name`` are now properties, not methods. @@ -643,7 +671,7 @@ v0.1.12 (16 May 2022) Internals ^^^^^^^^^ -- Update ``erddap`` server from https://www.ifremer.fr/erddap to https://erddap.ifremer.fr/erddap. (:commit:`af5692f9f7b236c5cd62c202252074cccec97c34`) by `G. Maze `_ +- Update ``erddap`` server from https://www.ifremer.fr/erddap to https://erddap.ifremer.fr/erddap. (:commit:`af5692f9f7b236c5cd62c202252074cccec97c34`) by |gmaze| v0.1.11 (13 Apr. 2022) @@ -652,7 +680,7 @@ v0.1.11 (13 Apr. 2022) Features and front-end API ^^^^^^^^^^^^^^^^^^^^^^^^^^ -- **New data source ``gdac`` to retrieve data from a GDAC compliant source**, for DataFetcher and IndexFetcher. You can specify the FTP source with the ``ftp`` fetcher option or with the argopy global option ``ftp``. The FTP source support http, ftp or local files protocols. This fetcher is optimised if pyarrow is available, otherwise pandas dataframe are used. See update on :ref:`Data sources`. (:pr:`157`) by `G. Maze `_ +- **New data source ``gdac`` to retrieve data from a GDAC compliant source**, for DataFetcher and IndexFetcher. You can specify the FTP source with the ``ftp`` fetcher option or with the argopy global option ``ftp``. The FTP source support http, ftp or local files protocols. This fetcher is optimised if pyarrow is available, otherwise pandas dataframe are used. See update on :ref:`Data sources`. (:pr:`157`) by |gmaze| .. code-block:: python @@ -674,7 +702,7 @@ Features and front-end API Since the new ``gdac`` fetcher can use a local copy of the GDAC ftp server, the legacy ``localftp`` fetcher is now deprecated. Using it will raise a error up to v0.1.12. It will then be removed in v0.1.13. -- **New dashboard for profiles and new 3rd party dashboards**. Calling on the data fetcher dashboard method will return the Euro-Argo profile page for a single profile. Very useful to look at the data before load. This comes with 2 new utilities functions to get Coriolis ID of profiles (:meth:`utilities.get_coriolis_profile_id`) and to return the list of profile webpages (:meth:`utilities.get_ea_profile_page`). (:pr:`198`) by `G. Maze `_. +- **New dashboard for profiles and new 3rd party dashboards**. Calling on the data fetcher dashboard method will return the Euro-Argo profile page for a single profile. Very useful to look at the data before load. This comes with 2 new utilities functions to get Coriolis ID of profiles (:meth:`utilities.get_coriolis_profile_id`) and to return the list of profile webpages (:meth:`utilities.get_ea_profile_page`). (:pr:`198`) by |gmaze|. .. code-block:: python @@ -703,7 +731,7 @@ We added the Ocean-OPS (former JCOMMOPS) dashboard for all floats and the BGC-Ar # or argopy.dashboard(5904797, 12, type='bgc') -- **New utility :class:`argopy.utilities.ArgoNVSReferenceTables` to retrieve Argo Reference Tables**. (:commit:`cc8fdbe132874b71b35203053626cc29ae7d19c4`) by `G. Maze `_. +- **New utility :class:`argopy.utilities.ArgoNVSReferenceTables` to retrieve Argo Reference Tables**. (:commit:`cc8fdbe132874b71b35203053626cc29ae7d19c4`) by |gmaze|. .. code-block:: python @@ -716,7 +744,7 @@ We added the Ocean-OPS (former JCOMMOPS) dashboard for all floats and the BGC-Ar Internals ^^^^^^^^^ -- ``gdac`` and ``localftp`` data fetchers can return an index without loading the data. (:pr:`157`) by `G. Maze `_ +- ``gdac`` and ``localftp`` data fetchers can return an index without loading the data. (:pr:`157`) by |gmaze| .. code-block:: python @@ -735,16 +763,16 @@ Internals idx.N_MATCH # Return number of search results idx.to_dataframe() # Convert search results to a dataframe -- Refactoring of CI tests to use more fixtures and pytest parametrize. (:pr:`157`) by `G. Maze `_ +- Refactoring of CI tests to use more fixtures and pytest parametrize. (:pr:`157`) by |gmaze| -- Fix bug in erddap fata fetcher that was causing a `profile` request to do not account for cycle numbers. (:commit:`301e557fdec1f2d536841464b383edc3a4c4a62d`) by `G. Maze `_. +- Fix bug in erddap fata fetcher that was causing a `profile` request to do not account for cycle numbers. (:commit:`301e557fdec1f2d536841464b383edc3a4c4a62d`) by |gmaze|. Breaking changes ^^^^^^^^^^^^^^^^ -- Index fetcher for local FTP no longer support the option ``index_file``. The name of the file index is internally determined using the dataset requested: ``ar_index_global_prof.txt`` for ``ds='phy'`` and ``argo_synthetic-profile_index.txt`` for ``ds='bgc'``. Using this option will raise a deprecation warning up to v0.1.12 and will then raise an error. (:pr:`157`) by `G. Maze `_ +- Index fetcher for local FTP no longer support the option ``index_file``. The name of the file index is internally determined using the dataset requested: ``ar_index_global_prof.txt`` for ``ds='phy'`` and ``argo_synthetic-profile_index.txt`` for ``ds='bgc'``. Using this option will raise a deprecation warning up to v0.1.12 and will then raise an error. (:pr:`157`) by |gmaze| -- Complete refactoring of the ``argopy.plotters`` module into ``argopy.plot``. (:pr:`198`) by `G. Maze `_. +- Complete refactoring of the ``argopy.plotters`` module into ``argopy.plot``. (:pr:`198`) by |gmaze|. - Remove deprecation warnings for: 'plotters.plot_dac', 'plotters.plot_profilerType'. These now raise an error. @@ -764,7 +792,7 @@ v0.1.9 (19 Jan. 2022) Features and front-end API ^^^^^^^^^^^^^^^^^^^^^^^^^^ -- **New method to preprocess data for OWC software**. This method can preprocessed Argo data and possibly create float_source/.mat files to be used as inputs for OWC implementations in `Matlab `_ and `Python `_. See the :ref:`Salinity calibration` documentation page for more. (:pr:`142`) by `G. Maze `_. +- **New method to preprocess data for OWC software**. This method can preprocessed Argo data and possibly create float_source/.mat files to be used as inputs for OWC implementations in `Matlab `_ and `Python `_. See the :ref:`Salinity calibration` documentation page for more. (:pr:`142`) by |gmaze|. .. code-block:: python @@ -786,7 +814,7 @@ This new method comes with others methods and improvements: .. currentmodule:: argopy -- **New dataset properties** accessible from the `argo` xarray accessor: ``N_POINTS``, ``N_LEVELS``, ``N_PROF``. Note that depending on the format of the dataset (a collection of points or of profiles) these values do or do not take into account NaN. These information are also visible by a simple print of the accessor. (:pr:`142`) by `G. Maze `_. +- **New dataset properties** accessible from the `argo` xarray accessor: ``N_POINTS``, ``N_LEVELS``, ``N_PROF``. Note that depending on the format of the dataset (a collection of points or of profiles) these values do or do not take into account NaN. These information are also visible by a simple print of the accessor. (:pr:`142`) by |gmaze|. .. code-block:: python @@ -798,7 +826,7 @@ This new method comes with others methods and improvements: ds.argo -- **New plotter function** :meth:`argopy.plotters.open_sat_altim_report` to insert the CLS Satellite Altimeter Report figure in a notebook cell. (:pr:`159`) by `G. Maze `_. +- **New plotter function** :meth:`argopy.plotters.open_sat_altim_report` to insert the CLS Satellite Altimeter Report figure in a notebook cell. (:pr:`159`) by |gmaze|. .. code-block:: python @@ -816,7 +844,7 @@ This new method comes with others methods and improvements: IndexFetcher().float([6902745, 6902746]).plot('qc_altimetry') -- **New utility method to retrieve topography**. The :class:`argopy.TopoFetcher` will load the `GEBCO topography `_ for a given region. (:pr:`150`) by `G. Maze `_. +- **New utility method to retrieve topography**. The :class:`argopy.TopoFetcher` will load the `GEBCO topography `_ for a given region. (:pr:`150`) by |gmaze|. .. code-block:: python @@ -838,11 +866,11 @@ For convenience we also added a new property to the data fetcher that return the Internals ^^^^^^^^^ -- Uses a new API endpoint for the ``argovis`` data source when fetching a ``region``. `More on this issue here `_. (:pr:`158`) by `G. Maze `_. +- Uses a new API endpoint for the ``argovis`` data source when fetching a ``region``. `More on this issue here `_. (:pr:`158`) by |gmaze|. -- Update documentation theme, and pages now use the `xarray accessor sphinx extension `_. (:pr:`104`) by `G. Maze `_. +- Update documentation theme, and pages now use the `xarray accessor sphinx extension `_. (:pr:`104`) by |gmaze|. -- Update Binder links to work without the deprecated Pangeo-Binder service. (:pr:`164`) by `G. Maze `_. +- Update Binder links to work without the deprecated Pangeo-Binder service. (:pr:`164`) by |gmaze|. v0.1.8 (2 Nov. 2021) @@ -851,7 +879,7 @@ v0.1.8 (2 Nov. 2021) Features and front-end API ^^^^^^^^^^^^^^^^^^^^^^^^^^ -- Improve plotting functions. All functions are now available for both the index and data fetchers. See the :ref:`data-viz` page for more details. Reduced plotting dependencies to `Matplotlib `_ only. **Argopy** will use `Seaborn `_ and/or `Cartopy `_ if available. (:pr:`56`) by `G. Maze `_. +- Improve plotting functions. All functions are now available for both the index and data fetchers. See the :ref:`data-viz` page for more details. Reduced plotting dependencies to `Matplotlib `_ only. **Argopy** will use `Seaborn `_ and/or `Cartopy `_ if available. (:pr:`56`) by |gmaze|. .. code-block:: python @@ -869,7 +897,7 @@ Features and front-end API fig, ax = obj.plot('profiler') -- New methods and properties for data and index fetchers. (:pr:`56`) by `G. Maze `_. The :meth:`argopy.DataFetcher.load` and :meth:`argopy.IndexFetcher.load` methods internally call on the `to_xarray()` methods and store results in the fetcher instance. The :meth:`argopy.DataFetcher.to_xarray` will trigger a fetch on every call, while the :meth:`argopy.DataFetcher.load` will not. +- New methods and properties for data and index fetchers. (:pr:`56`) by |gmaze|. The :meth:`argopy.DataFetcher.load` and :meth:`argopy.IndexFetcher.load` methods internally call on the `to_xarray()` methods and store results in the fetcher instance. The :meth:`argopy.DataFetcher.to_xarray` will trigger a fetch on every call, while the :meth:`argopy.DataFetcher.load` will not. .. code-block:: python @@ -887,20 +915,20 @@ Features and front-end API indexer.load() indexer.index -- Add optional speed of sound computation to xarray accessor teos10 method. (:pr:`90`) by `G. Maze `_. +- Add optional speed of sound computation to xarray accessor teos10 method. (:pr:`90`) by |gmaze|. - Code spell fixes (:pr:`89`) by `K. Schwehr `_. Internals ^^^^^^^^^ -- Check validity of access points options (WMO and box) in the facade, no checks at the fetcher level. (:pr:`92`) by `G. Maze `_. +- Check validity of access points options (WMO and box) in the facade, no checks at the fetcher level. (:pr:`92`) by |gmaze|. -- More general options. Fix :issue:`91`. (:pr:`102`) by `G. Maze `_. +- More general options. Fix :issue:`91`. (:pr:`102`) by |gmaze|. - ``trust_env`` to allow for local environment variables to be used by fsspec to connect to the internet. Useful for those using a proxy. -- Documentation on `Read The Docs` now uses a pip environment and get rid of memory eager conda. (:pr:`103`) by `G. Maze `_. +- Documentation on `Read The Docs` now uses a pip environment and get rid of memory eager conda. (:pr:`103`) by |gmaze|. - :class:`xarray.Dataset` argopy accessor ``argo`` has a clean documentation. @@ -909,12 +937,12 @@ Breaking changes - Drop support for python 3.6 and older. Lock range of dependencies version support. -- In the plotters module, the ``plot_dac`` and ``plot_profilerType`` functions have been replaced by ``bar_plot``. (:pr:`56`) by `G. Maze `_. +- In the plotters module, the ``plot_dac`` and ``plot_profilerType`` functions have been replaced by ``bar_plot``. (:pr:`56`) by |gmaze|. Internals ^^^^^^^^^ -- Internal logging available and upgrade dependencies version support (:pr:`56`) by `G. Maze `_. To see internal logs, you can set-up your application like this: +- Internal logging available and upgrade dependencies version support (:pr:`56`) by |gmaze|. To see internal logs, you can set-up your application like this: .. code-block:: python @@ -936,7 +964,7 @@ Long due release ! Features and front-end API ^^^^^^^^^^^^^^^^^^^^^^^^^^ -- Live monitor for the status (availability) of data sources. See documentation page on :ref:`api-status`. (:pr:`36`) by `G. Maze `_. +- Live monitor for the status (availability) of data sources. See documentation page on :ref:`api-status`. (:pr:`36`) by |gmaze|. .. code-block:: python @@ -948,7 +976,7 @@ Features and front-end API .. image:: _static/status_monitor.png :width: 350 -- Optimise large data fetching with parallelization, for all data fetchers (erddap, localftp and argovis). See documentation page on :ref:`parallel`. Two parallel methods are available: multi-threading or multi-processing. (:pr:`28`) by `G. Maze `_. +- Optimise large data fetching with parallelization, for all data fetchers (erddap, localftp and argovis). See documentation page on :ref:`parallel`. Two parallel methods are available: multi-threading or multi-processing. (:pr:`28`) by |gmaze|. .. code-block:: python @@ -970,15 +998,15 @@ Breaking changes Internals ^^^^^^^^^ -- New ``open_mfdataset`` and ``open_mfjson`` methods in Argo stores. These can be used to open, pre-process and concatenate a collection of paths both in sequential or parallel order. (:pr:`28`) by `G. Maze `_. +- New ``open_mfdataset`` and ``open_mfjson`` methods in Argo stores. These can be used to open, pre-process and concatenate a collection of paths both in sequential or parallel order. (:pr:`28`) by |gmaze|. -- Unit testing is now done on a controlled conda environment. This allows to more easily identify errors coming from development vs errors due to dependencies update. (:pr:`65`) by `G. Maze `_. +- Unit testing is now done on a controlled conda environment. This allows to more easily identify errors coming from development vs errors due to dependencies update. (:pr:`65`) by |gmaze|. v0.1.6 (31 Aug. 2020) --------------------- -- **JOSS paper published**. You can now cite argopy with a clean reference. (:pr:`30`) by `G. Maze `_ and `K. Balem `_. +- **JOSS paper published**. You can now cite argopy with a clean reference. (:pr:`30`) by |gmaze| and |quai20|. Maze G. and Balem K. (2020). argopy: A Python library for Argo ocean data analysis. *Journal of Open Source Software*, 5(52), 2425 doi: `10.21105/joss.02425 `_. @@ -989,7 +1017,7 @@ v0.1.5 (10 July 2020) Features and front-end API ^^^^^^^^^^^^^^^^^^^^^^^^^^ -- A new data source with the **argovis** data fetcher, all access points available (:pr:`24`). By `T. Tucker `_ and `G. Maze `_. +- A new data source with the **argovis** data fetcher, all access points available (:pr:`24`). By `T. Tucker `_ and |gmaze|. .. code-block:: python @@ -999,7 +1027,7 @@ Features and front-end API loader.profile(6902746, 12).to_xarray() loader.region([-85,-45,10.,20.,0,1000.,'2012-01','2012-02']).to_xarray() -- Easily compute `TEOS-10 `_ variables with new argo accessor function **teos10**. This needs `gsw `_ to be installed. (:pr:`37`) By `G. Maze `_. +- Easily compute `TEOS-10 `_ variables with new argo accessor function **teos10**. This needs `gsw `_ to be installed. (:pr:`37`) By |gmaze|. .. code-block:: python @@ -1033,7 +1061,7 @@ v0.1.4 (24 June 2020) Features and front-end API ^^^^^^^^^^^^^^^^^^^^^^^^^^ -- Standard levels interpolation method available in **standard** user mode (:pr:`23`). By `K. Balem `_. +- Standard levels interpolation method available in **standard** user mode (:pr:`23`). By |quai20|. .. code-block:: python @@ -1041,7 +1069,7 @@ Features and front-end API ds = ds.argo.point2profile() ds_interp = ds.argo.interp_std_levels(np.arange(0,900,50)) -- Insert in a Jupyter notebook cell the `Euro-Argo fleet monitoring `_ dashboard page, possibly for a specific float (:pr:`20`). By `G. Maze `_. +- Insert in a Jupyter notebook cell the `Euro-Argo fleet monitoring `_ dashboard page, possibly for a specific float (:pr:`20`). By |gmaze|. .. code-block:: python @@ -1050,7 +1078,7 @@ Features and front-end API # or argopy.dashboard(wmo=6902746) -- The ``localftp`` index and data fetcher now have the ``region`` and ``profile`` access points available (:pr:`25`). By `G. Maze `_. +- The ``localftp`` index and data fetcher now have the ``region`` and ``profile`` access points available (:pr:`25`). By |gmaze|. Breaking changes ^^^^^^^^^^^^^^^^ @@ -1060,7 +1088,7 @@ Breaking changes Internals ^^^^^^^^^ -- Now uses `fsspec `_ as file system for caching as well as accessing local and remote files (:pr:`19`). This closes issues :issue:`12`, :issue:`15` and :issue:`17`. **argopy** fetchers must now use (or implement if necessary) one of the internal file systems available in the new module ``argopy.stores``. By `G. Maze `_. +- Now uses `fsspec `_ as file system for caching as well as accessing local and remote files (:pr:`19`). This closes issues :issue:`12`, :issue:`15` and :issue:`17`. **argopy** fetchers must now use (or implement if necessary) one of the internal file systems available in the new module ``argopy.stores``. By |gmaze|. - Erddap fetcher now uses netcdf format to retrieve data (:pr:`19`). @@ -1071,7 +1099,7 @@ v0.1.3 (15 May 2020) Features and front-end API ^^^^^^^^^^^^^^^^^^^^^^^^^^ -- New ``index`` fetcher to explore and work with meta-data (:pr:`6`). By `K. Balem `_. +- New ``index`` fetcher to explore and work with meta-data (:pr:`6`). By |quai20|. .. code-block:: python @@ -1092,7 +1120,7 @@ The ``index`` fetcher comes with basic plotting functionalities with the :func:` The design of plotting and visualisation features in ``argopy`` is constantly evolving, so this may change in future releases. -- Real documentation written and published (:pr:`13`). By `G. Maze `_. +- Real documentation written and published (:pr:`13`). By |gmaze|. - The :class:`argopy.DataFetcher` now has a :func:`argopy.DataFetcher.to_dataframe` method to return a :class:`pandas.DataFrame`. @@ -1200,6 +1228,9 @@ v0.1.0 (17 Mar. 2020) - Erddap data fetcher +.. |gmaze| replace:: `G. Maze `__ +.. |quai20| replace:: `K. Balem `__ + .. |pypi dwn| image:: https://img.shields.io/pypi/dm/argopy?label=Pypi%20downloads :target: //pypi.org/project/argopy/ .. |conda dwn| image:: https://img.shields.io/conda/dn/conda-forge/argopy?label=Conda%20downloads