Skip to content

Commit

Permalink
codespell [skip-ci]
Browse files Browse the repository at this point in the history
  • Loading branch information
gmaze committed Apr 13, 2022
1 parent c1119b2 commit 728c6ff
Show file tree
Hide file tree
Showing 8 changed files with 22 additions and 22 deletions.
2 changes: 1 addition & 1 deletion argopy/data_fetchers/proto.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ def _cname(self) -> str:

@property
def sha(self) -> str:
""" Returns a unique SHA for a specifc cname / fetcher implementation"""
""" Returns a unique SHA for a specific cname / fetcher implementation"""
path = "%s-%s" % (self.definition, self.cname())
return hashlib.sha256(path.encode()).hexdigest()

Expand Down
2 changes: 1 addition & 1 deletion argopy/plot/dashboards.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ def open_dashboard(
* "ea", "data": the `Euro-Argo data selection dashboard <https://dataselection.euro-argo.eu>`_
* "meta": the `Euro-Argo fleet monitoring dashboard <https://fleetmonitoring.euro-argo.eu>`_
* "op", "ocean-ops": the `Ocean-OPS Argo dashboard <https://www.ocean-ops.org/board?t=argo>`_
* "bgc": the `Argo-BGC specific dashbaord <https://maps.biogeochemical-argo.com/bgcargo>`_
* "bgc": the `Argo-BGC specific dashboard <https://maps.biogeochemical-argo.com/bgcargo>`_
* "argovis": the `Colorado Argovis dashboard <https://argovis.colorado.edu>`_
url_only: bool, optional, default: False
If set to True, will only return the URL toward the dashboard
Expand Down
8 changes: 4 additions & 4 deletions argopy/tests/test_fetchers_data_gdac.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ def core(fargs, apts):
return fct_safe_to_server_errors(core)(fetcher_args, access_point, xfail=xfail)


def assert_fetcher(this_fetcher, cachable=False):
def assert_fetcher(this_fetcher, cacheable=False):
"""Assert a data fetcher.
This should be used by all tests
Expand All @@ -102,7 +102,7 @@ def assert_fetcher(this_fetcher, cachable=False):
assert is_list_of_strings(this_fetcher.uri)
assert (this_fetcher.N_RECORDS >= 1) # Make sure we loaded the index file content
assert (this_fetcher.N_FILES >= 1) # Make sure we found results
if cachable:
if cacheable:
assert is_list_of_strings(this_fetcher.cachepath)


Expand Down Expand Up @@ -184,7 +184,7 @@ def test_hosts_invalid(self, ftp_host):
def test_fetching(self, _make_a_fetcher):
@safe_to_server_errors
def test(this_fetcher):
assert_fetcher(this_fetcher, cachable=False)
assert_fetcher(this_fetcher, cacheable=False)
test(_make_a_fetcher)

# @skip_for_debug
Expand All @@ -193,7 +193,7 @@ def test_fetching_cached(self, _make_a_cached_fetcher):
@safe_to_server_errors
def test(this_fetcher):
# Assert the fetcher (this trigger data fetching, hence caching as well):
assert_fetcher(this_fetcher, cachable=True)
assert_fetcher(this_fetcher, cacheable=True)

# Make sure we can clear the cache:
this_fetcher.clear_cache()
Expand Down
8 changes: 4 additions & 4 deletions argopy/tests/test_fetchers_index_gdac.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,13 +78,13 @@ def core(fargs, apts):
return fct_safe_to_server_errors(core)(fetcher_args, access_point, xfail=xfail)


def assert_fetcher(this_fetcher, cachable=False):
def assert_fetcher(this_fetcher, cacheable=False):
""" Assert structure of a fetcher """
assert isinstance(this_fetcher.to_dataframe(), pd.core.frame.DataFrame)
# assert is_list_of_strings(this_fetcher.uri)
assert (this_fetcher.N_RECORDS >= 1) # Make sure we loaded the index file content
assert (this_fetcher.N_FILES >= 1) # Make sure we found results
if cachable:
if cacheable:
assert is_list_of_strings(this_fetcher.cachepath)


Expand Down Expand Up @@ -167,7 +167,7 @@ def test_hosts_invalid(self, ftp_host):
def test_fetching(self, _make_a_fetcher):
@safe_to_server_errors
def test(this_fetcher):
assert_fetcher(this_fetcher, cachable=False)
assert_fetcher(this_fetcher, cacheable=False)
test(_make_a_fetcher)

# @skip_for_debug
Expand All @@ -176,7 +176,7 @@ def test_fetching_cached(self, _make_a_cached_fetcher):
@safe_to_server_errors
def test(this_fetcher):
# Assert the fetcher (this trigger data fetching, hence caching as well):
assert_fetcher(this_fetcher, cachable=True)
assert_fetcher(this_fetcher, cacheable=True)

# Make sure we can clear the cache:
this_fetcher.clear_cache()
Expand Down
18 changes: 9 additions & 9 deletions argopy/tests/test_stores.py
Original file line number Diff line number Diff line change
Expand Up @@ -170,7 +170,7 @@ def test_nocache(self):
with pytest.raises(FileSystemHasNoCache):
fs.cachepath("dummy_uri")

def test_cachable(self):
def test_cacheable(self):
fs = httpstore(cache=True)
assert isinstance(fs.fs, fsspec.implementations.cached.WholeFileCacheFileSystem)

Expand Down Expand Up @@ -265,7 +265,7 @@ def test_nocache(self):
with pytest.raises(FileSystemHasNoCache):
fs.cachepath("dummy_uri")

def test_cachable(self):
def test_cacheable(self):
fs = memorystore(cache=True)
assert isinstance(fs.fs, fsspec.implementations.cached.WholeFileCacheFileSystem)

Expand Down Expand Up @@ -519,20 +519,20 @@ def a_search(self, request):
srch = request.param[1]
yield run_a_search(self.new_idx, {'host': host, 'cache': True}, srch)

def assert_index(self, this_idx, cachable=False):
def assert_index(self, this_idx, cacheable=False):
assert hasattr(this_idx, 'index')
assert this_idx.shape[0] == this_idx.index.shape[0]
assert this_idx.N_RECORDS == this_idx.index.shape[0]
assert is_list_of_strings(this_idx.uri_full_index) and len(this_idx.uri_full_index) == this_idx.N_RECORDS
if cachable:
if cacheable:
assert is_list_of_strings(this_idx.cachepath('index'))

def assert_search(self, this_idx, cachable=False):
def assert_search(self, this_idx, cacheable=False):
assert hasattr(this_idx, 'search')
assert this_idx.N_MATCH == this_idx.search.shape[0]
assert this_idx.N_FILES == this_idx.N_MATCH
assert is_list_of_strings(this_idx.uri) and len(this_idx.uri) == this_idx.N_MATCH
if cachable:
if cacheable:
assert is_list_of_strings(this_idx.cachepath('search'))

# @skip_this
Expand Down Expand Up @@ -572,7 +572,7 @@ def new_idx():
def test_search(self, a_search):
@safe_to_server_errors
def test(this_searched_store):
self.assert_search(this_searched_store, cachable=False)
self.assert_search(this_searched_store, cacheable=False)
test(a_search)

# @skip_this
Expand Down Expand Up @@ -606,14 +606,14 @@ def test_to_dataframe_search(self):

def test_caching_index(self):
idx = self.new_idx(cache=True)
self.assert_index(idx, cachable=True)
self.assert_index(idx, cacheable=True)

# @skip_this
def test_caching_search(self):
idx = self.new_idx(cache=True)
wmo = [s['wmo'] for s in valid_searches if 'wmo' in s.keys()][0]
idx.search_wmo(wmo)
self.assert_search(idx, cachable=True)
self.assert_search(idx, cacheable=True)

# @skip_this
def test_read_wmo(self):
Expand Down
2 changes: 1 addition & 1 deletion argopy/utilities.py
Original file line number Diff line number Diff line change
Expand Up @@ -2142,7 +2142,7 @@ def load(self, errors: str = "ignore"):


def argo_split_path(this_path): # noqa C901
""" Split path from a GDAC ftp style Argo netcdf file and return informations
""" Split path from a GDAC ftp style Argo netcdf file and return information
>>> argo_split_path('coriolis/6901035/profiles/D6901035_001D.nc')
>>> argo_split_path('https://data-argo.ifremer.fr/dac/csiro/5903939/profiles/D5903939_103.nc')
Expand Down
2 changes: 1 addition & 1 deletion docs/whats-new.rst
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ v0.1.11 (13 Apr. 2022)
.. note::

The new ``gdac`` fetcher uses Argo index to determine which profile files to load. Hence, this fetcher may show poor performances when used with a ``region`` access point. Don't hesitate to check :ref:`Performances` to try to improve performances, otherwise, we recommand to use a webAPI access point (``erddap`` or ``argovis``).
The new ``gdac`` fetcher uses Argo index to determine which profile files to load. Hence, this fetcher may show poor performances when used with a ``region`` access point. Don't hesitate to check :ref:`Performances` to try to improve performances, otherwise, we recommend to use a webAPI access point (``erddap`` or ``argovis``).

.. warning::

Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,6 @@ dask>=2.9 # This could go away ?
toolz>=0.8.2
erddapy>=0.6 # This could go away ?
fsspec>=0.7.4
gsw<=3.4.0 # Used by xarray accessor to compute new variables, so not necessary to core functionnalities
gsw<=3.4.0 # Used by xarray accessor to compute new variables, so not necessary to core functionalities
aiohttp>=3.6.2
packaging>= 20.4 # Using 'version' to make API compatible with several fsspec releases

0 comments on commit 728c6ff

Please sign in to comment.