Skip to content

Commit

Permalink
Merge branch 'develop' into feature/#2071-rank-datanodes-in-selector
Browse files Browse the repository at this point in the history
  • Loading branch information
jrobinAV authored Nov 27, 2024
2 parents f55f699 + 45cb211 commit 4d43793
Show file tree
Hide file tree
Showing 10 changed files with 1,033 additions and 60 deletions.
76 changes: 53 additions & 23 deletions taipy/core/data/_file_datanode_mixin.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,12 +21,19 @@

from .._entity._reload import _self_reload
from ..common._utils import _normalize_path
from ..reason import InvalidUploadFile, NoFileToDownload, NotAFile, ReasonCollection, UploadFileCanNotBeRead
from ..reason import (
DataNodeEditInProgress,
InvalidUploadFile,
NoFileToDownload,
NotAFile,
ReasonCollection,
UploadFileCanNotBeRead,
)
from .data_node import DataNode
from .data_node_id import Edit


class _FileDataNodeMixin(object):
class _FileDataNodeMixin:
"""Mixin class designed to handle file-based data nodes."""

__EXTENSION_MAP = {"csv": "csv", "excel": "xlsx", "parquet": "parquet", "pickle": "p", "json": "json"}
Expand Down Expand Up @@ -102,53 +109,76 @@ def _get_downloadable_path(self) -> str:

return ""

def _upload(self, path: str, upload_checker: Optional[Callable[[str, Any], bool]] = None) -> ReasonCollection:
def _upload(self,
path: str,
upload_checker: Optional[Callable[[str, Any], bool]] = None,
editor_id: Optional[str] = None,
comment: Optional[str] = None,
**kwargs: Any) -> ReasonCollection:
"""Upload a file data to the data node.
Arguments:
path (str): The path of the file to upload to the data node.
upload_checker (Optional[Callable[[str, Any], bool]]): A function to check if the upload is allowed.
The function takes the title of the upload data and the data itself as arguments and returns
True if the upload is allowed, otherwise False.
upload_checker (Optional[Callable[[str, Any], bool]]): A function to check if the
upload is allowed. The function takes the title of the upload data and the data
itself as arguments and returns True if the upload is allowed, otherwise False.
editor_id (Optional[str]): The ID of the user who is uploading the file.
comment (Optional[str]): A comment to add to the edit history of the data node.
**kwargs: Additional keyword arguments. These arguments are stored in the edit
history of the data node. In particular, an `editor_id` or a `comment` can be
passed. The `editor_id` is the ID of the user who is uploading the file, and the
`comment` is a comment to add to the edit history.
Returns:
True if the upload was successful, otherwise False.
True if the upload was successful, the reasons why the upload was not successful
otherwise.
"""
from ._data_manager_factory import _DataManagerFactory

reason_collection = ReasonCollection()

upload_path = pathlib.Path(path)
reasons = ReasonCollection()
if (editor_id
and self.edit_in_progress # type: ignore[attr-defined]
and self.editor_id != editor_id # type: ignore[attr-defined]
and (not self.editor_expiration_date # type: ignore[attr-defined]
or self.editor_expiration_date > datetime.now())): # type: ignore[attr-defined]
reasons._add_reason(self.id, DataNodeEditInProgress(self.id)) # type: ignore[attr-defined]
return reasons

up_path = pathlib.Path(path)
try:
upload_data = self._read_from_path(str(upload_path))
upload_data = self._read_from_path(str(up_path))
except Exception as err:
self.__logger.error(f"Error while uploading {upload_path.name} to data node {self.id}:") # type: ignore[attr-defined]
self.__logger.error(f"Error uploading `{up_path.name}` to data "
f"node `{self.id}`:") # type: ignore[attr-defined]
self.__logger.error(f"Error: {err}")
reason_collection._add_reason(self.id, UploadFileCanNotBeRead(upload_path.name, self.id)) # type: ignore[attr-defined]
return reason_collection
reasons._add_reason(self.id, UploadFileCanNotBeRead(up_path.name, self.id)) # type: ignore[attr-defined]
return reasons

if upload_checker is not None:
try:
can_upload = upload_checker(upload_path.name, upload_data)
can_upload = upload_checker(up_path.name, upload_data)
except Exception as err:
self.__logger.error(
f"Error while checking if {upload_path.name} can be uploaded to data node {self.id}" # type: ignore[attr-defined]
f" using the upload checker {upload_checker.__name__}: {err}"
)
f"Error with the upload checker `{upload_checker.__name__}` "
f"while checking `{up_path.name}` file for upload to the data "
f"node `{self.id}`:") # type: ignore[attr-defined]
self.__logger.error(f"Error: {err}")
can_upload = False

if not can_upload:
reason_collection._add_reason(self.id, InvalidUploadFile(upload_path.name, self.id)) # type: ignore[attr-defined]
return reason_collection
reasons._add_reason(self.id, InvalidUploadFile(up_path.name, self.id)) # type: ignore[attr-defined]
return reasons

shutil.copy(upload_path, self.path)
shutil.copy(up_path, self.path)

self.track_edit(timestamp=datetime.now()) # type: ignore[attr-defined]
self.track_edit(timestamp=datetime.now(), # type: ignore[attr-defined]
editor_id=editor_id,
comment=comment, **kwargs)
self.unlock_edit() # type: ignore[attr-defined]

_DataManagerFactory._build_manager()._set(self) # type: ignore[arg-type]

return reason_collection
return reasons

def _read_from_path(self, path: Optional[str] = None, **read_kwargs) -> Any:
raise NotImplementedError
Expand Down
27 changes: 22 additions & 5 deletions taipy/core/data/data_node.py
Original file line number Diff line number Diff line change
Expand Up @@ -422,37 +422,54 @@ def read(self) -> Any:
)
return None

def append(self, data, editor_id: Optional[str] = None, **kwargs: Any):
def append(self, data, editor_id: Optional[str] = None, comment: Optional[str] = None, **kwargs: Any):
"""Append some data to this data node.
Arguments:
data (Any): The data to write to this data node.
editor_id (str): An optional identifier of the editor.
comment (str): An optional comment to attach to the edit document.
**kwargs (Any): Extra information to attach to the edit document
corresponding to this write.
"""
from ._data_manager_factory import _DataManagerFactory

if (editor_id
and self.edit_in_progress
and self.editor_id != editor_id
and (not self.editor_expiration_date or self.editor_expiration_date > datetime.now())):
raise DataNodeIsBeingEdited(self.id, self.editor_id)
self._append(data)
self.track_edit(editor_id=editor_id, **kwargs)
self.track_edit(editor_id=editor_id, comment=comment, **kwargs)
self.unlock_edit()
_DataManagerFactory._build_manager()._set(self)

def write(self, data, job_id: Optional[JobId] = None, **kwargs: Any):
def write(self,
data,
job_id: Optional[JobId] = None,
editor_id: Optional[str] = None,
comment: Optional[str] = None,
**kwargs: Any):
"""Write some data to this data node.
once the data is written, the data node is unlocked and the edit is tracked.
Arguments:
data (Any): The data to write to this data node.
job_id (JobId): An optional identifier of the job writing the data.
editor_id (str): An optional identifier of the editor writing the data.
comment (str): An optional comment to attach to the edit document.
**kwargs (Any): Extra information to attach to the edit document
corresponding to this write.
"""
from ._data_manager_factory import _DataManagerFactory

if (editor_id
and self.edit_in_progress
and self.editor_id != editor_id
and (not self.editor_expiration_date or self.editor_expiration_date > datetime.now())):
raise DataNodeIsBeingEdited(self.id, self.editor_id)
self._write(data)
self.track_edit(job_id=job_id, **kwargs)
self.track_edit(job_id=job_id, editor_id=editor_id, comment=comment, **kwargs)
self.unlock_edit()
_DataManagerFactory._build_manager()._set(self)

Expand Down
21 changes: 13 additions & 8 deletions taipy/gui_core/_context.py
Original file line number Diff line number Diff line change
Expand Up @@ -1020,10 +1020,10 @@ def get_data_node_history(self, id: str):

def __check_readable_editable(self, state: State, id: str, ent_type: str, var: t.Optional[str]):
if not (reason := is_readable(t.cast(ScenarioId, id))):
_GuiCoreContext.__assign_var(state, var, f"{ent_type} {id} is not readable: {_get_reason(reason)}.")
_GuiCoreContext.__assign_var(state, var, f"{ent_type} {id} is not readable: {_get_reason(reason)}")
return False
if not (reason := is_editable(t.cast(ScenarioId, id))):
_GuiCoreContext.__assign_var(state, var, f"{ent_type} {id} is not editable: {_get_reason(reason)}.")
_GuiCoreContext.__assign_var(state, var, f"{ent_type} {id} is not editable: {_get_reason(reason)}")
return False
return True

Expand All @@ -1033,7 +1033,7 @@ def update_data(self, state: State, id: str, payload: t.Dict[str, str]):
if args is None or not isinstance(args, list) or len(args) < 1 or not isinstance(args[0], dict):
return
data = t.cast(dict, args[0])
error_var = payload.get("error_id")
error_var = data.get("error_id")
entity_id = t.cast(str, data.get(_GuiCoreContext.__PROP_ENTITY_ID))
if not self.__check_readable_editable(state, entity_id, "Data node", error_var):
return
Expand All @@ -1049,9 +1049,9 @@ def update_data(self, state: State, id: str, payload: t.Dict[str, str]):
else float(val)
if data.get("type") == "float"
else data.get("value"),
editor_id=self.gui._get_client_id(),
comment=t.cast(dict, data.get(_GuiCoreContext.__PROP_ENTITY_COMMENT)),
)
entity.unlock_edit(self.gui._get_client_id())
_GuiCoreContext.__assign_var(state, error_var, "")
except Exception as e:
_GuiCoreContext.__assign_var(state, error_var, f"Error updating Data node value. {e}")
Expand Down Expand Up @@ -1135,7 +1135,9 @@ def tabular_data_edit(self, state: State, var_name: str, payload: dict): # noqa
"Error updating data node tabular value: type does not support at[] indexer.",
)
if new_data is not None:
datanode.write(new_data, comment=user_data.get(_GuiCoreContext.__PROP_ENTITY_COMMENT))
datanode.write(new_data,
editor_id=self.gui._get_client_id(),
comment=user_data.get(_GuiCoreContext.__PROP_ENTITY_COMMENT))
_GuiCoreContext.__assign_var(state, error_var, "")
except Exception as e:
_GuiCoreContext.__assign_var(state, error_var, f"Error updating data node tabular value. {e}")
Expand Down Expand Up @@ -1222,18 +1224,19 @@ def on_dag_select(self, state: State, id: str, payload: t.Dict[str, str]):

def on_file_action(self, state: State, id: str, payload: t.Dict[str, t.Any]):
args = t.cast(list, payload.get("args"))
if args is None or not isinstance(args, list) or len(args) < 1 or not isinstance(args[0], dict):
return
act_payload = t.cast(t.Dict[str, str], args[0])
dn_id = t.cast(DataNodeId, act_payload.get("id"))
error_id = act_payload.get("error_id", "")
if reason := is_readable(dn_id):
try:
dn = t.cast(_FileDataNodeMixin, core_get(dn_id))
if act_payload.get("action") == "export":
path = dn._get_downloadable_path()
if path:
if reason := dn.is_downloadable():
path = dn._get_downloadable_path()
self.gui._download(Path(path), dn_id)
else:
reason = dn.is_downloadable()
state.assign(
error_id,
"Data unavailable: "
Expand All @@ -1246,6 +1249,8 @@ def on_file_action(self, state: State, id: str, payload: t.Dict[str, t.Any]):
reason := dn._upload(
act_payload.get("path", ""),
t.cast(t.Callable[[str, t.Any], bool], checker) if callable(checker) else None,
editor_id=self.gui._get_client_id(),
comment=None
)
):
state.assign(error_id, f"Data unavailable: {reason.reasons}")
Expand Down
21 changes: 19 additions & 2 deletions tests/core/data/test_csv_data_node.py
Original file line number Diff line number Diff line change
Expand Up @@ -251,6 +251,22 @@ def test_upload(self, csv_file, tmpdir_factory):
assert dn.last_edit_date > old_last_edit_date
assert dn.path == _normalize_path(old_csv_path) # The path of the dn should not change

def test_upload_fails_if_data_node_locked(self, csv_file, tmpdir_factory):
old_csv_path = tmpdir_factory.mktemp("data").join("df.csv").strpath
old_data = pd.DataFrame([{"a": 0, "b": 1, "c": 2}, {"a": 3, "b": 4, "c": 5}])

dn = CSVDataNode("foo", Scope.SCENARIO, properties={"path": old_csv_path, "exposed_type": "pandas"})
dn.write(old_data)
upload_content = pd.read_csv(csv_file)
dn.lock_edit("editor_id_1")

reasons = dn._upload(csv_file, editor_id="editor_id_2")
assert not reasons

assert dn._upload(csv_file, editor_id="editor_id_1")

assert_frame_equal(dn.read(), upload_content) # The content of the dn should change to the uploaded content

def test_upload_with_upload_check_with_exception(self, csv_file, tmpdir_factory, caplog):
old_csv_path = tmpdir_factory.mktemp("data").join("df.csv").strpath
dn = CSVDataNode("foo", Scope.SCENARIO, properties={"path": old_csv_path, "exposed_type": "pandas"})
Expand All @@ -261,8 +277,9 @@ def check_with_exception(upload_path, upload_data):
reasons = dn._upload(csv_file, upload_checker=check_with_exception)
assert bool(reasons) is False
assert (
f"Error while checking if df.csv can be uploaded to data node {dn.id} using "
"the upload checker check_with_exception: An error with check_with_exception" in caplog.text
f"Error with the upload checker `check_with_exception` "
f"while checking `df.csv` file for upload to the data "
f"node `{dn.id}`:" in caplog.text
)

def test_upload_with_upload_check_pandas(self, csv_file, tmpdir_factory):
Expand Down
Loading

0 comments on commit 4d43793

Please sign in to comment.