Skip to content

Commit

Permalink
updatedocs
Browse files Browse the repository at this point in the history
  • Loading branch information
matthiasprobst committed Apr 19, 2024
1 parent 9b5f745 commit 59ba7cf
Show file tree
Hide file tree
Showing 13 changed files with 1,478 additions and 3,482 deletions.
174 changes: 83 additions & 91 deletions docs/gettingstarted/quickoverview.ipynb

Large diffs are not rendered by default.

146 changes: 73 additions & 73 deletions docs/practical_examples/metadata4ing.ipynb

Large diffs are not rendered by default.

2,151 changes: 1,071 additions & 1,080 deletions docs/practical_examples/nexus.ipynb

Large diffs are not rendered by default.

2,171 changes: 5 additions & 2,166 deletions docs/practical_examples/photon_hdf5.ipynb

Large diffs are not rendered by default.

90 changes: 33 additions & 57 deletions docs/userguide/convention/examples/standard_name_convention.ipynb

Large diffs are not rendered by default.

180 changes: 180 additions & 0 deletions docs/userguide/misc/UserDirectories.ipynb
Original file line number Diff line number Diff line change
@@ -0,0 +1,180 @@
{
"cells": [
{
"cell_type": "markdown",
"id": "e93ec3be-9e45-4540-88fc-370310b3273b",
"metadata": {},
"source": [
"# User directories\n",
"\n",
"The `h5rdmtoolbox` uses a couple of local directories to store temporary files or manage cache files."
]
},
{
"cell_type": "code",
"execution_count": 1,
"id": "2190644a-d72a-4aa4-b6b3-e61fe9f496e1",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Failed to import module h5tbx\n"
]
}
],
"source": [
"import h5rdmtoolbox as h5tbx"
]
},
{
"cell_type": "markdown",
"id": "c6a80526-de72-4c29-b484-82ba6ae4ad15",
"metadata": {},
"source": [
"The `UserDir` variable manages the paths. The `__repr__` indicates the different paths:"
]
},
{
"cell_type": "code",
"execution_count": 2,
"id": "f9b2d695-b91e-486a-89d9-02d3b6a16c59",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"DirManger(root, tmp, convention, layouts, repository, standard_name_tables, cache)"
]
},
"execution_count": 2,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"h5tbx.UserDir"
]
},
{
"cell_type": "markdown",
"id": "6efea3fa-b401-49d9-92b1-7c54456413f8",
"metadata": {},
"source": [
"## Examples\n",
"Temporary files, e.g. created when empty `h5tbx.File` objects are created, will be put here:"
]
},
{
"cell_type": "code",
"execution_count": 6,
"id": "8eb9f616-2efc-463a-9915-b668f64958e0",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"WindowsPath('C:/Users/Matthias/AppData/Local/h5rdmtoolbox/h5rdmtoolbox/tmp/tmp_0')"
]
},
"execution_count": 6,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"h5tbx.UserDir['tmp']"
]
},
{
"cell_type": "markdown",
"id": "3c6b0e27-5236-4102-9fbe-ea0873bba947",
"metadata": {},
"source": [
"Let's check it: "
]
},
{
"cell_type": "code",
"execution_count": 10,
"id": "a59dc11f-85b3-4db1-9642-5daa7a05307e",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"WindowsPath('C:/Users/Matthias/AppData/Local/h5rdmtoolbox/h5rdmtoolbox/tmp/tmp_0/tmp2.hdf')"
]
},
"execution_count": 10,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"with h5tbx.File() as h5:\n",
" filename = h5.hdf_filename\n",
"filename"
]
},
{
"cell_type": "markdown",
"id": "c50b5275-ce80-403b-91ce-88c904497b4d",
"metadata": {},
"source": [
"Tokens for repositories, e.g. Zenodo, should be put here:"
]
},
{
"cell_type": "code",
"execution_count": 7,
"id": "ca53b6db-6f20-4500-91a7-1f738fd6bb5d",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"WindowsPath('C:/Users/Matthias/AppData/Local/h5rdmtoolbox/h5rdmtoolbox/repository')"
]
},
"execution_count": 7,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"h5tbx.UserDir['repository']"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "3f703cb7-7f4d-49b0-a12f-18b3a9fa9cd6",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.19"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
3 changes: 2 additions & 1 deletion docs/userguide/misc/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -16,4 +16,5 @@ of the sub-package implementations. This section outlines them.
identifiers.ipynb
Extensions.ipynb
Visualization.ipynb
Time.ipynb
Time.ipynb
UserDirectories.ipynb
10 changes: 7 additions & 3 deletions h5rdmtoolbox/layout/core.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,14 @@
"""Layout validation module"""
import enum
import h5py
import logging
import pathlib
import types
import uuid
import warnings
from typing import Dict, Union, List, Protocol, Optional, Callable, Tuple

import h5py

import h5rdmtoolbox as h5tbx

logger = logging.getLogger('h5rdmtoolbox')
Expand Down Expand Up @@ -106,7 +107,7 @@ class LayoutSpecification:

def __init__(self,
func: QueryCallable,
kwargs,
kwargs: Dict,
n: Union[int, None, Dict],
description: Optional[str] = None,
parent: Optional["LayoutSpecification"] = None):
Expand Down Expand Up @@ -327,7 +328,7 @@ def __call__(self, target: Union[h5py.Group, h5py.Dataset]):
def add(self,
func: QueryCallable,
*,
n: Union[int, None, Dict],
n: Optional[Union[int, None, Dict]] = None,
description: Optional[str] = None,
**kwargs):
"""
Expand Down Expand Up @@ -566,6 +567,9 @@ class Layout(LayoutSpecification):
def __init__(self):
self.specifications: List[LayoutSpecification] = []

def __repr__(self):
return f'{self.__class__.__name__}'

def __eq__(self, other):
if not isinstance(other, Layout):
return False
Expand Down
13 changes: 8 additions & 5 deletions h5rdmtoolbox/wrapper/jsonld.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,17 @@
import h5py
import json
import logging
import pathlib
import warnings
from typing import Dict, List
from typing import Optional, Union, Iterable, Tuple, Any

import h5py
import numpy as np
import ontolutils
import pathlib
import rdflib
import warnings
from ontolutils.classes.utils import split_URIRef
from rdflib import Graph, URIRef, Literal, BNode, XSD, RDF
from rdflib.plugins.shared.jsonld.context import Context
from typing import Dict, List
from typing import Optional, Union, Iterable, Tuple, Any

from h5rdmtoolbox.convention import hdf_ontology
from .core import Dataset, File
Expand Down Expand Up @@ -661,6 +662,8 @@ def _add_hdf_node(name, obj, ctx):
list_node = build_node_list(g, av.tolist())
else:
list_node = build_node_list(g, av)
elif isinstance(av, (h5py.Dataset, h5py.Group)):
attr_literal = rdflib.Literal(av.name)
else:
try:
attr_literal = rdflib.Literal(json.dumps(av))
Expand Down
3 changes: 2 additions & 1 deletion requirements-doc.txt
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,5 @@ sphinx_book_theme
sphinx-copybutton
sphinx-design
myst-nb
sphinxcontrib-bibtex
sphinxcontrib-bibtex
pivmetalib @ git+https://github.com/matthiasprobst/pivmetalib@main
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -23,4 +23,4 @@ python-gitlab
pypandoc>=1.11
pydantic >= 2.3.0
# other:
ontolutils
ontolutils >= 0.2.20
3 changes: 2 additions & 1 deletion setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ install_requires =
pint_xarray # >= 0.2.1
regex # >= 2023
packaging
ontolutils
ontolutils>=0.2.20
python-forge # == 18.6.0
requests
pydantic >= 2.3.0
Expand Down Expand Up @@ -80,6 +80,7 @@ docs =
simplejson
myst-nb
sphinxcontrib-bibtex
pivmetalib @ git+https://github.com/matthiasprobst/pivmetalib@main
complete =
%(gui)s
%(test)s
Expand Down
14 changes: 11 additions & 3 deletions tests/layouts/test_core.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,19 +110,27 @@ def test_alternative_specification(self):
def test_is_single_result(self):
self.assertEqual(is_single_result(None), False)

lay = layout.Layout()
spec = lay.add(hdfdb.ObjDB.find_one, flt={'$name': '/u'}, n=None)
lay1 = layout.Layout()
lay2 = layout.Layout()
spec = lay1.add(hdfdb.ObjDB.find_one, flt={'$name': '/u'}, n=None)
sub_spec = spec.add(hdfdb.ObjDB.find, flt={'$shape': (3, 4)}, n=None)

# note, in the following find_one is used also for the sub_spec:
spec2 = lay2.add(hdfdb.FileDB.find, flt={'$name': '/u'}, n=1)
sub_spec2 = spec2.add(hdfdb.ObjDB.find_one, flt={'$shape': (3, 4)}, n=1)

self.assertFalse(spec.is_valid())
self.assertFalse(sub_spec.is_valid())
self.assertEqual([], spec.get_valid())

with h5tbx.File() as h5:
h5.create_dataset('u', shape=(3, 4), dtype='float32')

res = lay.validate(h5)
res = lay1.validate(h5)
self.assertTrue(res.is_valid())
res2 = lay2.validate(h5)
self.assertTrue(res2.is_valid())

self.assertTrue(spec.is_valid())
self.assertTrue(sub_spec.is_valid())
self.assertEqual([spec, sub_spec], spec.get_valid())
Expand Down

0 comments on commit 59ba7cf

Please sign in to comment.