diff --git a/taipy/core/config/checkers/_data_node_config_checker.py b/taipy/core/config/checkers/_data_node_config_checker.py index f81b65dae0..27eb270ba4 100644 --- a/taipy/core/config/checkers/_data_node_config_checker.py +++ b/taipy/core/config/checkers/_data_node_config_checker.py @@ -10,7 +10,7 @@ # specific language governing permissions and limitations under the License. from datetime import timedelta -from typing import Dict, List, cast +from typing import Any, Callable, Dict, List, Tuple, cast from taipy.common.config._config import _Config from taipy.common.config.checker._checkers._config_checker import _ConfigChecker @@ -23,6 +23,27 @@ class _DataNodeConfigChecker(_ConfigChecker): + _PROPERTIES_TYPES: Dict[str, List[Tuple[Any, List[str]]]] = { + DataNodeConfig._STORAGE_TYPE_VALUE_GENERIC: [ + ( + Callable, + [ + DataNodeConfig._OPTIONAL_READ_FUNCTION_GENERIC_PROPERTY, + DataNodeConfig._OPTIONAL_WRITE_FUNCTION_GENERIC_PROPERTY, + ], + ) + ], + DataNodeConfig._STORAGE_TYPE_VALUE_SQL: [ + ( + Callable, + [ + DataNodeConfig._REQUIRED_WRITE_QUERY_BUILDER_SQL_PROPERTY, + DataNodeConfig._OPTIONAL_APPEND_QUERY_BUILDER_SQL_PROPERTY, + ], + ), + ], + } + def __init__(self, config: _Config, collector: IssueCollector): super().__init__(config, collector) @@ -46,7 +67,7 @@ def _check(self) -> IssueCollector: self._check_scope(data_node_config_id, data_node_config) self._check_validity_period(data_node_config_id, data_node_config) self._check_required_properties(data_node_config_id, data_node_config) - self._check_callable(data_node_config_id, data_node_config) + self._check_class_type(data_node_config_id, data_node_config) self._check_generic_read_write_fct_and_args(data_node_config_id, data_node_config) self._check_exposed_type(data_node_config_id, data_node_config) return self._collector @@ -196,28 +217,25 @@ def _check_generic_read_write_fct_and_args(self, data_node_config_id: str, data_ f"DataNodeConfig `{data_node_config_id}` must be populated with a Callable function.", ) - def _check_callable(self, data_node_config_id: str, data_node_config: DataNodeConfig): - properties_to_check = { - DataNodeConfig._STORAGE_TYPE_VALUE_GENERIC: [ - DataNodeConfig._OPTIONAL_READ_FUNCTION_GENERIC_PROPERTY, - DataNodeConfig._OPTIONAL_WRITE_FUNCTION_GENERIC_PROPERTY, - ], - DataNodeConfig._STORAGE_TYPE_VALUE_SQL: [ - DataNodeConfig._REQUIRED_WRITE_QUERY_BUILDER_SQL_PROPERTY, - DataNodeConfig._OPTIONAL_APPEND_QUERY_BUILDER_SQL_PROPERTY, - ], - } - - if data_node_config.storage_type in properties_to_check.keys(): - for prop_key in properties_to_check[data_node_config.storage_type]: - prop_value = data_node_config.properties.get(prop_key) if data_node_config.properties else None - if prop_value and not callable(prop_value): - self._error( - prop_key, - prop_value, - f"`{prop_key}` of DataNodeConfig `{data_node_config_id}` must be" - f" populated with a Callable function.", - ) + def _check_class_type(self, data_node_config_id: str, data_node_config: DataNodeConfig): + if data_node_config.storage_type in self._PROPERTIES_TYPES.keys(): + for class_type, prop_keys in self._PROPERTIES_TYPES[data_node_config.storage_type]: + for prop_key in prop_keys: + prop_value = data_node_config.properties.get(prop_key) if data_node_config.properties else None + if prop_value and not isinstance(prop_value, class_type): + self._error( + prop_key, + prop_value, + f"`{prop_key}` of DataNodeConfig `{data_node_config_id}` must be" + f" populated with a {'Callable' if class_type == Callable else class_type.__name__}.", + ) + if class_type == Callable and callable(prop_value) and prop_value.__name__ == "": + self._error( + prop_key, + prop_value, + f"`{prop_key}` of DataNodeConfig `{data_node_config_id}` must be" + f" populated with a serializable Callable function but not a lambda.", + ) def _check_exposed_type(self, data_node_config_id: str, data_node_config: DataNodeConfig): if not isinstance(data_node_config.exposed_type, str): diff --git a/taipy/core/data/_data_converter.py b/taipy/core/data/_data_converter.py index 10442e626e..d934d42497 100644 --- a/taipy/core/data/_data_converter.py +++ b/taipy/core/data/_data_converter.py @@ -12,6 +12,7 @@ from copy import copy from datetime import datetime, timedelta from pydoc import locate +from typing import Dict from .._repository._abstract_converter import _AbstractConverter from ..common._utils import _load_fct @@ -120,14 +121,13 @@ def __serialize_exposed_type(properties: dict, exposed_type_key: str, valid_str_ for v in properties[exposed_type_key] ] else: - properties[ - exposed_type_key - ] = f"{properties[exposed_type_key].__module__}.{properties[exposed_type_key].__qualname__}" + properties[exposed_type_key] = ( + f"{properties[exposed_type_key].__module__}.{properties[exposed_type_key].__qualname__}" + ) return properties @classmethod - def _entity_to_model(cls, data_node: DataNode) -> _DataNodeModel: - properties = data_node._properties.data.copy() + def _serialize_properties(cls, data_node: DataNode, properties: Dict) -> Dict: if data_node.storage_type() == GenericDataNode.storage_type(): properties = cls.__serialize_generic_dn_properties(properties) @@ -144,6 +144,11 @@ def _entity_to_model(cls, data_node: DataNode) -> _DataNodeModel: properties = cls.__serialize_exposed_type( properties, cls._EXPOSED_TYPE_KEY, cls._VALID_STRING_EXPOSED_TYPES ) + return properties + + @classmethod + def _entity_to_model(cls, data_node: DataNode) -> _DataNodeModel: + properties = cls._serialize_properties(data_node, data_node._properties.data.copy()) return _DataNodeModel( data_node.id, @@ -273,25 +278,28 @@ def __deserialize_exposed_type(properties: dict, exposed_type_key: str, valid_st return properties @classmethod - def _model_to_entity(cls, model: _DataNodeModel) -> DataNode: - data_node_properties = model.data_node_properties.copy() - + def _deserialize_properties(cls, model: _DataNodeModel, properties: Dict) -> Dict: if model.storage_type == GenericDataNode.storage_type(): - data_node_properties = cls.__deserialize_generic_dn_properties(data_node_properties) + properties = cls.__deserialize_generic_dn_properties(properties) if model.storage_type == JSONDataNode.storage_type(): - data_node_properties = cls.__deserialize_json_dn_properties(data_node_properties) + properties = cls.__deserialize_json_dn_properties(properties) if model.storage_type == SQLDataNode.storage_type(): - data_node_properties = cls.__deserialize_sql_dn_model_properties(data_node_properties) + properties = cls.__deserialize_sql_dn_model_properties(properties) if model.storage_type == MongoCollectionDataNode.storage_type(): - data_node_properties = cls.__deserialize_mongo_collection_dn_model_properties(data_node_properties) + properties = cls.__deserialize_mongo_collection_dn_model_properties(properties) - if cls._EXPOSED_TYPE_KEY in data_node_properties.keys(): - data_node_properties = cls.__deserialize_exposed_type( - data_node_properties, cls._EXPOSED_TYPE_KEY, cls._VALID_STRING_EXPOSED_TYPES + if cls._EXPOSED_TYPE_KEY in properties.keys(): + properties = cls.__deserialize_exposed_type( + properties, cls._EXPOSED_TYPE_KEY, cls._VALID_STRING_EXPOSED_TYPES ) + return properties + + @classmethod + def _model_to_entity(cls, model: _DataNodeModel) -> DataNode: + data_node_properties = cls._deserialize_properties(model, model.data_node_properties.copy()) validity_period = None if model.validity_seconds is not None and model.validity_days is not None: diff --git a/tests/core/config/checkers/test_data_node_config_checker.py b/tests/core/config/checkers/test_data_node_config_checker.py index 9a4037b6b0..7c9c3f93b5 100644 --- a/tests/core/config/checkers/test_data_node_config_checker.py +++ b/tests/core/config/checkers/test_data_node_config_checker.py @@ -513,12 +513,12 @@ def test_check_callable_properties(self, caplog): Config.check() assert len(Config._collector.errors) == 2 expected_error_message_1 = ( - "`write_query_builder` of DataNodeConfig `new` must be populated with a Callable function." + "`write_query_builder` of DataNodeConfig `new` must be populated with a Callable." " Current value of property `write_query_builder` is 1." ) assert expected_error_message_1 in caplog.text expected_error_message_2 = ( - "`append_query_builder` of DataNodeConfig `new` must be populated with a Callable function." + "`append_query_builder` of DataNodeConfig `new` must be populated with a Callable." " Current value of property `append_query_builder` is 2." ) assert expected_error_message_2 in caplog.text @@ -530,7 +530,7 @@ def test_check_callable_properties(self, caplog): Config.check() assert len(Config._collector.errors) == 1 expected_error_messages = [ - "`write_fct` of DataNodeConfig `new` must be populated with a Callable function. Current value" + "`write_fct` of DataNodeConfig `new` must be populated with a Callable. Current value" " of property `write_fct` is 12.", ] assert all(message in caplog.text for message in expected_error_messages) @@ -542,7 +542,7 @@ def test_check_callable_properties(self, caplog): Config.check() assert len(Config._collector.errors) == 1 expected_error_messages = [ - "`read_fct` of DataNodeConfig `new` must be populated with a Callable function. Current value" + "`read_fct` of DataNodeConfig `new` must be populated with a Callable. Current value" " of property `read_fct` is 5.", ] assert all(message in caplog.text for message in expected_error_messages) @@ -554,9 +554,9 @@ def test_check_callable_properties(self, caplog): Config.check() assert len(Config._collector.errors) == 2 expected_error_messages = [ - "`write_fct` of DataNodeConfig `new` must be populated with a Callable function. Current value" + "`write_fct` of DataNodeConfig `new` must be populated with a Callable. Current value" " of property `write_fct` is 9.", - "`read_fct` of DataNodeConfig `new` must be populated with a Callable function. Current value" + "`read_fct` of DataNodeConfig `new` must be populated with a Callable. Current value" " of property `read_fct` is 5.", ] assert all(message in caplog.text for message in expected_error_messages) @@ -581,6 +581,22 @@ def test_check_callable_properties(self, caplog): Config.check() assert len(Config._collector.errors) == 0 + config._sections[DataNodeConfig.name]["new"].storage_type = "generic" + config._sections[DataNodeConfig.name]["new"].properties = {"write_fct": lambda x: x, "read_fct": lambda y: y} + with pytest.raises(SystemExit): + Config._collector = IssueCollector() + Config.check() + assert len(Config._collector.errors) == 2 + expected_error_messages = [ + "`write_fct` of DataNodeConfig `new` must be populated with a serializable Callable function but" + " not a lambda. Current value of property `write_fct` is .", + "`read_fct` of DataNodeConfig `new` must be populated with a serializable Callable function but" + " not a lambda. Current value of property `read_fct` is .", + ] + assert all(message in caplog.text for message in expected_error_messages) + def test_check_read_write_fct_args(self, caplog): config = Config._applied_config Config._compile_configs()