Skip to content

Commit

Permalink
bugfixes and cleanup
Browse files Browse the repository at this point in the history
  • Loading branch information
matthiasprobst committed Sep 26, 2023
1 parent b812be2 commit f74b385
Show file tree
Hide file tree
Showing 7 changed files with 50 additions and 77 deletions.
4 changes: 2 additions & 2 deletions docs/conventions/examples/EngMeta.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -322,8 +322,8 @@
"\n",
" <ul style=\"list-style-type: none;\" class=\"h5grp-sections\">\n",
" <li>\n",
" <input id=\"group-ds--5757953000\" type=\"checkbox\" checked>\n",
" <label style=\"font-weight: bold\" for=\"group-ds--5757953000\">\n",
" <input id=\"group-ds--9176614900\" type=\"checkbox\" checked>\n",
" <label style=\"font-weight: bold\" for=\"group-ds--9176614900\">\n",
" /<span>(0)</span></label>\n",
" \n",
"\n",
Expand Down
2 changes: 1 addition & 1 deletion docs/conventions/examples/EngMeta.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ description:
keywords:
description: List of describing keywords.
target_method: __init__
validator: $list_of_str
validator: List[str]
default_value: $NONE

$subject:
Expand Down
32 changes: 16 additions & 16 deletions docs/howto/ht004.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
"metadata": {},
"source": [
"# ... add an affix-function to a standard name table during runtime?\n",
"If you are using the [standard name convention](../conventions/standard_name_convention.ipynb) you are familiar with transformation functions like \"derivative_of_\\<SN1\\>_wrt_\\<SN2\\>\" or \"square_of_\\<SN\\>\".\n",
"If you are using the [standard name convention](../conventions/standard_attributes_and_conventions.ipynb) you are familiar with transformation functions like \"derivative_of_\\<SN1\\>_wrt_\\<SN2\\>\" or \"square_of_\\<SN\\>\".\n",
"\n",
"Now, you want to add your custom transformation function without changing the package code. Say, you want to add the transformation \"maximum_of_<StdName>\", referring to the maximum value of your data.\n",
"\n",
Expand All @@ -15,7 +15,7 @@
},
{
"cell_type": "code",
"execution_count": 2,
"execution_count": 1,
"id": "791af21d-0c99-4d0b-bb0b-b5f59ecb1c8c",
"metadata": {},
"outputs": [
Expand Down Expand Up @@ -59,7 +59,7 @@
},
{
"cell_type": "code",
"execution_count": 3,
"execution_count": 2,
"id": "dc0db89c-c3b9-4c41-87dc-6a5ae91fc356",
"metadata": {},
"outputs": [],
Expand All @@ -83,7 +83,7 @@
},
{
"cell_type": "code",
"execution_count": 4,
"execution_count": 3,
"id": "e10d2b65-b6ea-42ce-909d-a375a0ba71ed",
"metadata": {},
"outputs": [],
Expand All @@ -101,7 +101,7 @@
},
{
"cell_type": "code",
"execution_count": 5,
"execution_count": 4,
"id": "9f9d5fa2-9dfa-46ff-bc9b-8b9db8f5870a",
"metadata": {},
"outputs": [
Expand All @@ -111,7 +111,7 @@
"True"
]
},
"execution_count": 5,
"execution_count": 4,
"metadata": {},
"output_type": "execute_result"
}
Expand All @@ -122,7 +122,7 @@
},
{
"cell_type": "code",
"execution_count": 6,
"execution_count": 5,
"id": "f54490f6-0272-4483-86f2-71fba12207c0",
"metadata": {},
"outputs": [
Expand All @@ -132,7 +132,7 @@
"False"
]
},
"execution_count": 6,
"execution_count": 5,
"metadata": {},
"output_type": "execute_result"
}
Expand All @@ -151,7 +151,7 @@
},
{
"cell_type": "code",
"execution_count": 7,
"execution_count": 6,
"id": "a5ae8c3a-3115-400f-bd02-3d75578c707a",
"metadata": {},
"outputs": [],
Expand All @@ -161,7 +161,7 @@
},
{
"cell_type": "code",
"execution_count": 8,
"execution_count": 7,
"id": "835d7ccb-afca-429a-a5f1-38b8a18e4230",
"metadata": {},
"outputs": [
Expand All @@ -170,8 +170,8 @@
"text/html": [
"<ul style=\"list-style-type: none;\" class=\"h5grp-sections\">\n",
" <li>\n",
" <input id=\"group-maximum_of_static_pressure22629808100\" type=\"checkbox\" checked>\n",
" <label style=\"font-weight: bold\" for=\"group-maximum_of_static_pressure22629808100\">\n",
" <input id=\"group-maximum_of_static_pressure6668567300\" type=\"checkbox\" checked>\n",
" <label style=\"font-weight: bold\" for=\"group-maximum_of_static_pressure6668567300\">\n",
" maximum_of_static_pressure</label>\n",
" <ul class=\"h5tb-attr-list\">\n",
" <li style=\"list-style-type: none; font-style: italic\">units : Pa</li> <li style=\"list-style-type: none; font-style: italic\">description : Maximum of static_pressure. Static pressure refers to the force per unit area exerted by a fluid. Pressure is a scalar quantity.</li> </il>\n",
Expand All @@ -182,7 +182,7 @@
"<StandardName: \"maximum_of_static_pressure\" [Pa] Maximum of static_pressure. Static pressure refers to the force per unit area exerted by a fluid. Pressure is a scalar quantity.>"
]
},
"execution_count": 8,
"execution_count": 7,
"metadata": {},
"output_type": "execute_result"
}
Expand All @@ -193,7 +193,7 @@
},
{
"cell_type": "code",
"execution_count": 9,
"execution_count": 8,
"id": "ccc894ff-2748-4e1b-9677-5b37d0b7d792",
"metadata": {},
"outputs": [
Expand All @@ -203,7 +203,7 @@
"True"
]
},
"execution_count": 9,
"execution_count": 8,
"metadata": {},
"output_type": "execute_result"
}
Expand Down Expand Up @@ -237,7 +237,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.17"
"version": "3.8.18"
}
},
"nbformat": 4,
Expand Down
43 changes: 0 additions & 43 deletions h5rdmtoolbox/conventions/references.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,4 @@
import json
import requests
import warnings
from typing import Union

BIBTEX_ENTRY_TYPES = ('article',
'book',
'booklet',
'conference',
'inbook',
'incollection',
'inproceedings',)


def validate_url(url: str) -> bool:
Expand All @@ -32,35 +21,3 @@ def validate_url(url: str) -> bool:
if response.status_code == 200:
return True
return False


def validate_bibtex(bibtex: Union[str, dict]) -> bool:
"""Validate BibTeX entry based on the mandatory keys.
Example entry:
bibtex = {'article': {'title': 'Title',
'author': 'Author',
'year': '2020',
...}
}
"""
if isinstance(bibtex, str):
bibtex = json.loads(bibtex)
mandatory_keys = ['title', 'author', 'year']
for entry_type, fields in bibtex.items():
if entry_type[0] == '@':
entry_type = entry_type[1:]
if entry_type.lower() not in BIBTEX_ENTRY_TYPES:
warnings.warn(f'Invalid BibTeX entry type: {entry_type}. Expected types: {BIBTEX_ENTRY_TYPES}')
return False
if not all(k in fields for k in mandatory_keys):
return False
return True


def validate_reference(reference: str) -> bool:
if isinstance(reference, dict):
return validate_bibtex(reference)
if isinstance(reference, str) and reference[0] == '{':
return validate_bibtex(json.loads(reference))
return validate_url(reference)
10 changes: 0 additions & 10 deletions h5rdmtoolbox/conventions/toolbox_validators.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,15 +11,6 @@
from h5rdmtoolbox import get_ureg, errors


def __validate_list_of_str(value, handler, info):
if not isinstance(value, list):
raise TypeError(f'Expected a list but got {type(value)}')
for v in value:
if not isinstance(v, str):
raise TypeError(f'Value {v} is not a string: {type(v)}')
return value


def __validate_orcid(value, handler, info):
from h5rdmtoolbox import orcid
if not isinstance(value, str):
Expand Down Expand Up @@ -190,7 +181,6 @@ class IntValidator(BaseModel):
value: Annotated[str, WrapValidator(_get_validate_type(int))]


list_of_str = Annotated[str, WrapValidator(__validate_list_of_str)]
units = Annotated[str, WrapValidator(__validate_units)]
dateFormat = Annotated[str, WrapValidator(__validate_date_format)]
quantity = Annotated[str, WrapValidator(__validate_quantity)]
Expand Down
22 changes: 22 additions & 0 deletions tests/conventions/test_references.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
"""Testing the standard attributes"""
import pathlib
import unittest

from h5rdmtoolbox.conventions import references

__this_dir__ = pathlib.Path(__file__).parent

bibtext = """@ONLINE{hdf5group,
author = {{The HDF Group}},
title = "{Hierarchical Data Format, version 5}",
year = {1997-NNNN},
note = {https://www.hdfgroup.org/HDF5/},
addendum = "(accessed: 25.09.2023)",
}"""


class TestReferences(unittest.TestCase):

def test_url(self):
self.assertFalse(references.validate_url(123))

14 changes: 9 additions & 5 deletions tests/conventions/test_toolbox_validators.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@
import pint
import unittest
from pydantic import BaseModel
from pydantic import ValidationError
from typing import List, Union

import h5rdmtoolbox as h5tbx
from h5rdmtoolbox.conventions import toolbox_validators
Expand All @@ -15,14 +17,16 @@ class TestTbxValidators(unittest.TestCase):

def test_validate_list_of_str(self):
class MyStr(BaseModel):
los: toolbox_validators.list_of_str
los: Union[str, List[str]]

with self.assertRaises(TypeError):
with self.assertRaises(ValidationError):
MyStr(los=3.4)
with self.assertRaises(TypeError):
MyStr(los='3.4')

MyStr(los='3.4')

MyStr(los=['3.4', 'str'])
with self.assertRaises(TypeError):

with self.assertRaises(ValidationError):
MyStr(los=['3.4', 3.4])

cv = h5tbx.conventions.from_yaml(__this_dir__ / 'ListOfStr.yaml', overwrite=True)
Expand Down

0 comments on commit f74b385

Please sign in to comment.