Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feature/#521 refactor code for databricks integration #2284

Merged
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
51 changes: 33 additions & 18 deletions taipy/core/config/checkers/_data_node_config_checker.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
# specific language governing permissions and limitations under the License.

from datetime import timedelta
from typing import Dict, List, cast
from typing import Any, Callable, Dict, List, Tuple, cast

from taipy.common.config._config import _Config
from taipy.common.config.checker._checkers._config_checker import _ConfigChecker
Expand Down Expand Up @@ -46,7 +46,7 @@ def _check(self) -> IssueCollector:
self._check_scope(data_node_config_id, data_node_config)
self._check_validity_period(data_node_config_id, data_node_config)
self._check_required_properties(data_node_config_id, data_node_config)
self._check_callable(data_node_config_id, data_node_config)
self._check_class_type(data_node_config_id, data_node_config)
self._check_generic_read_write_fct_and_args(data_node_config_id, data_node_config)
self._check_exposed_type(data_node_config_id, data_node_config)
return self._collector
Expand Down Expand Up @@ -196,28 +196,43 @@ def _check_generic_read_write_fct_and_args(self, data_node_config_id: str, data_
f"DataNodeConfig `{data_node_config_id}` must be populated with a Callable function.",
)

def _check_callable(self, data_node_config_id: str, data_node_config: DataNodeConfig):
properties_to_check = {
@staticmethod
def _get_class_type_and_properties() -> Dict[str, List[Tuple[Any, List[str]]]]:
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this can be turned into a class attribute instead of a function, as we were using function, I just went with it so can quickly implement it

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I agree. We can implement it as a dictionary class attribute of the DataNoceConfig class. Just like we have _REQUIRED_PROPERTIES: Dict[str, List] or _OPTIONAL_PROPERTIES: Dict[str, Dict[str, Any]], we could have something like :

_PROPERTIES_TYPES: Dict[str, Dict[str, type]] = {
    DataNodeConfig._STORAGE_TYPE_VALUE_GENERIC: {
        DataNodeConfig._OPTIONAL_READ_FUNCTION_GENERIC_PROPERTY: Callable,
        DataNodeConfig._OPTIONAL_WRITE_FUNCTION_GENERIC_PROPERTY: Callable},
    DataNodeConfig._STORAGE_TYPE_VALUE_SQL: {
        DataNodeConfig._REQUIRED_WRITE_QUERY_BUILDER_SQL_PROPERTY: Callable,
        DataNodeConfig._OPTIONAL_APPEND_QUERY_BUILDER_SQL_PROPERTY: Callable}
}

By the way, we could use it to check the types of all properties (both required and optional), not only for callables.
Then, this dictionary could be extended to enterprise.

Does it make sense?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yep, that's my initial intention with this refactoring. Trying to reuse the code for different types of instances. Though I'd say we should find a different ticket for that 😅

return {
DataNodeConfig._STORAGE_TYPE_VALUE_GENERIC: [
DataNodeConfig._OPTIONAL_READ_FUNCTION_GENERIC_PROPERTY,
DataNodeConfig._OPTIONAL_WRITE_FUNCTION_GENERIC_PROPERTY,
(
Callable,
toan-quach marked this conversation as resolved.
Show resolved Hide resolved
[
DataNodeConfig._OPTIONAL_READ_FUNCTION_GENERIC_PROPERTY,
DataNodeConfig._OPTIONAL_WRITE_FUNCTION_GENERIC_PROPERTY,
],
)
],
DataNodeConfig._STORAGE_TYPE_VALUE_SQL: [
DataNodeConfig._REQUIRED_WRITE_QUERY_BUILDER_SQL_PROPERTY,
DataNodeConfig._OPTIONAL_APPEND_QUERY_BUILDER_SQL_PROPERTY,
(
Callable,
[
DataNodeConfig._REQUIRED_WRITE_QUERY_BUILDER_SQL_PROPERTY,
DataNodeConfig._OPTIONAL_APPEND_QUERY_BUILDER_SQL_PROPERTY,
],
),
],
}

if data_node_config.storage_type in properties_to_check.keys():
for prop_key in properties_to_check[data_node_config.storage_type]:
prop_value = data_node_config.properties.get(prop_key) if data_node_config.properties else None
if prop_value and not callable(prop_value):
self._error(
prop_key,
prop_value,
f"`{prop_key}` of DataNodeConfig `{data_node_config_id}` must be"
f" populated with a Callable function.",
)
def _check_class_type(self, data_node_config_id: str, data_node_config: DataNodeConfig):
properties_to_check = self._get_class_type_and_properties()

for storage_type in properties_to_check.keys():
for class_type, prop_keys in properties_to_check[storage_type]:
for prop_key in prop_keys:
prop_value = data_node_config.properties.get(prop_key) if data_node_config.properties else None
if prop_value and not isinstance(prop_value, class_type):
self._error(
prop_key,
prop_value,
f"`{prop_key}` of DataNodeConfig `{data_node_config_id}` must be"
f" populated with a {class_type.__name__}.",
)

def _check_exposed_type(self, data_node_config_id: str, data_node_config: DataNodeConfig):
if not isinstance(data_node_config.exposed_type, str):
Expand Down
38 changes: 23 additions & 15 deletions taipy/core/data/_data_converter.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
from copy import copy
from datetime import datetime, timedelta
from pydoc import locate
from typing import Dict

from .._repository._abstract_converter import _AbstractConverter
from ..common._utils import _load_fct
Expand Down Expand Up @@ -120,14 +121,13 @@ def __serialize_exposed_type(properties: dict, exposed_type_key: str, valid_str_
for v in properties[exposed_type_key]
]
else:
properties[
exposed_type_key
] = f"{properties[exposed_type_key].__module__}.{properties[exposed_type_key].__qualname__}"
properties[exposed_type_key] = (
f"{properties[exposed_type_key].__module__}.{properties[exposed_type_key].__qualname__}"
)
return properties

@classmethod
def _entity_to_model(cls, data_node: DataNode) -> _DataNodeModel:
properties = data_node._properties.data.copy()
def _serialize_properties(cls, data_node: DataNode, properties: Dict) -> Dict:
if data_node.storage_type() == GenericDataNode.storage_type():
properties = cls.__serialize_generic_dn_properties(properties)

Expand All @@ -144,6 +144,11 @@ def _entity_to_model(cls, data_node: DataNode) -> _DataNodeModel:
properties = cls.__serialize_exposed_type(
properties, cls._EXPOSED_TYPE_KEY, cls._VALID_STRING_EXPOSED_TYPES
)
return properties

@classmethod
def _entity_to_model(cls, data_node: DataNode) -> _DataNodeModel:
properties = cls._serialize_properties(data_node, data_node._properties.data.copy())

return _DataNodeModel(
data_node.id,
Expand Down Expand Up @@ -273,25 +278,28 @@ def __deserialize_exposed_type(properties: dict, exposed_type_key: str, valid_st
return properties

@classmethod
def _model_to_entity(cls, model: _DataNodeModel) -> DataNode:
data_node_properties = model.data_node_properties.copy()

def _deserialize_properties(cls, model: _DataNodeModel, properties: Dict) -> Dict:
if model.storage_type == GenericDataNode.storage_type():
data_node_properties = cls.__deserialize_generic_dn_properties(data_node_properties)
properties = cls.__deserialize_generic_dn_properties(properties)

if model.storage_type == JSONDataNode.storage_type():
data_node_properties = cls.__deserialize_json_dn_properties(data_node_properties)
properties = cls.__deserialize_json_dn_properties(properties)

if model.storage_type == SQLDataNode.storage_type():
data_node_properties = cls.__deserialize_sql_dn_model_properties(data_node_properties)
properties = cls.__deserialize_sql_dn_model_properties(properties)

if model.storage_type == MongoCollectionDataNode.storage_type():
data_node_properties = cls.__deserialize_mongo_collection_dn_model_properties(data_node_properties)
properties = cls.__deserialize_mongo_collection_dn_model_properties(properties)

if cls._EXPOSED_TYPE_KEY in data_node_properties.keys():
data_node_properties = cls.__deserialize_exposed_type(
data_node_properties, cls._EXPOSED_TYPE_KEY, cls._VALID_STRING_EXPOSED_TYPES
if cls._EXPOSED_TYPE_KEY in properties.keys():
properties = cls.__deserialize_exposed_type(
properties, cls._EXPOSED_TYPE_KEY, cls._VALID_STRING_EXPOSED_TYPES
)
return properties

@classmethod
def _model_to_entity(cls, model: _DataNodeModel) -> DataNode:
data_node_properties = cls._deserialize_properties(model, model.data_node_properties.copy())

validity_period = None
if model.validity_seconds is not None and model.validity_days is not None:
Expand Down
12 changes: 6 additions & 6 deletions tests/core/config/checkers/test_data_node_config_checker.py
Original file line number Diff line number Diff line change
Expand Up @@ -513,12 +513,12 @@ def test_check_callable_properties(self, caplog):
Config.check()
assert len(Config._collector.errors) == 2
expected_error_message_1 = (
"`write_query_builder` of DataNodeConfig `new` must be populated with a Callable function."
"`write_query_builder` of DataNodeConfig `new` must be populated with a Callable."
" Current value of property `write_query_builder` is 1."
)
assert expected_error_message_1 in caplog.text
expected_error_message_2 = (
"`append_query_builder` of DataNodeConfig `new` must be populated with a Callable function."
"`append_query_builder` of DataNodeConfig `new` must be populated with a Callable."
" Current value of property `append_query_builder` is 2."
)
assert expected_error_message_2 in caplog.text
Expand All @@ -530,7 +530,7 @@ def test_check_callable_properties(self, caplog):
Config.check()
assert len(Config._collector.errors) == 1
expected_error_messages = [
"`write_fct` of DataNodeConfig `new` must be populated with a Callable function. Current value"
"`write_fct` of DataNodeConfig `new` must be populated with a Callable. Current value"
" of property `write_fct` is 12.",
]
assert all(message in caplog.text for message in expected_error_messages)
Expand All @@ -542,7 +542,7 @@ def test_check_callable_properties(self, caplog):
Config.check()
assert len(Config._collector.errors) == 1
expected_error_messages = [
"`read_fct` of DataNodeConfig `new` must be populated with a Callable function. Current value"
"`read_fct` of DataNodeConfig `new` must be populated with a Callable. Current value"
" of property `read_fct` is 5.",
]
assert all(message in caplog.text for message in expected_error_messages)
Expand All @@ -554,9 +554,9 @@ def test_check_callable_properties(self, caplog):
Config.check()
assert len(Config._collector.errors) == 2
expected_error_messages = [
"`write_fct` of DataNodeConfig `new` must be populated with a Callable function. Current value"
"`write_fct` of DataNodeConfig `new` must be populated with a Callable. Current value"
" of property `write_fct` is 9.",
"`read_fct` of DataNodeConfig `new` must be populated with a Callable function. Current value"
"`read_fct` of DataNodeConfig `new` must be populated with a Callable. Current value"
" of property `read_fct` is 5.",
]
assert all(message in caplog.text for message in expected_error_messages)
Expand Down
Loading