From 7b70dad46bbadac461d880c1d1dc87ba174be26a Mon Sep 17 00:00:00 2001 From: Igor Davidyuk Date: Thu, 12 Sep 2024 16:24:31 +0300 Subject: [PATCH] Project access refactoring (#481) * fixes to import export module Signed-off-by: Igor-Davidyuk * add option to get project by id only Signed-off-by: Igor-Davidyuk * tabulate projects options output Signed-off-by: Igor-Davidyuk * refactor geti class Signed-off-by: Igor-Davidyuk * update project client Signed-off-by: Igor-Davidyuk * fix unit tests Signed-off-by: Igor-Davidyuk * update benchmarker Signed-off-by: Igor-Davidyuk * fix integration tests Signed-off-by: Igor-Davidyuk * fixes to notebooks Signed-off-by: Igor-Davidyuk * fixture fixes Signed-off-by: Igor-Davidyuk * fix test project finalizers Signed-off-by: Igor-Davidyuk * fix nightly tests Signed-off-by: Igor-Davidyuk * fix example scripts Signed-off-by: Igor-Davidyuk * Update geti_sdk/geti.py Co-authored-by: Ludo Cornelissen * Update geti_sdk/rest_clients/project_client/project_client.py Co-authored-by: Ludo Cornelissen * expand updated methods docstrings Signed-off-by: Igor-Davidyuk --------- Signed-off-by: Igor-Davidyuk Co-authored-by: Ludo Cornelissen --- examples/upload_and_predict_from_numpy.py | 6 +- .../upload_and_predict_media_from_folder.py | 4 +- geti_sdk/benchmarking/benchmarker.py | 13 +- geti_sdk/geti.py | 103 +++++------- .../import_export/import_export_module.py | 16 +- geti_sdk/import_export/tus_uploader.py | 9 +- .../project_client/project_client.py | 155 ++++++++++-------- notebooks/001_create_project.ipynb | 6 +- notebooks/003_upload_and_predict_image.ipynb | 2 +- notebooks/005_modify_image.ipynb | 2 +- notebooks/006_reconfigure_task.ipynb | 4 +- notebooks/007_train_project.ipynb | 4 +- notebooks/011_benchmarking_models.ipynb | 2 +- notebooks/014_asynchronous_inference.ipynb | 2 +- tests/fixtures/demos.py | 18 +- tests/fixtures/projects.py | 4 +- tests/fixtures/unit_tests/benchmarker.py | 10 +- tests/helpers/finalizers.py | 11 +- tests/helpers/project_helpers.py | 2 +- tests/helpers/project_service.py | 4 +- tests/nightly/demos/test_demo_projects.py | 3 +- tests/nightly/test_anomaly_classification.py | 3 +- tests/nightly/test_classification.py | 3 +- tests/nightly/test_nightly_project.py | 6 +- tests/pre-merge/integration/test_geti.py | 33 ++-- .../unit/benchmarking/test_benchmarker.py | 25 ++- tests/pre-merge/unit/test_geti_unit.py | 2 +- 27 files changed, 214 insertions(+), 238 deletions(-) diff --git a/examples/upload_and_predict_from_numpy.py b/examples/upload_and_predict_from_numpy.py index 4e73adf6..7d9a4811 100644 --- a/examples/upload_and_predict_from_numpy.py +++ b/examples/upload_and_predict_from_numpy.py @@ -62,7 +62,7 @@ def rotate_image(image: np.ndarray, angle: float) -> np.ndarray: rotated_image = rotate_image(image=numpy_image, angle=20) # Make sure that the project exists - ensure_trained_example_project(geti=geti, project_name=PROJECT_NAME) + project = ensure_trained_example_project(geti=geti, project_name=PROJECT_NAME) print( "Uploading and predicting example image now... The prediction results will be " @@ -71,7 +71,7 @@ def rotate_image(image: np.ndarray, angle: float) -> np.ndarray: # We can upload and predict the resulting array directly: sc_image, image_prediction = geti.upload_and_predict_image( - project_name=PROJECT_NAME, + project=project, image=rotated_image, visualise_output=False, delete_after_prediction=DELETE_AFTER_PREDICTION, @@ -100,7 +100,7 @@ def rotate_image(image: np.ndarray, angle: float) -> np.ndarray: print("Video generated, retrieving predictions...") # Create video, upload and predict from the list of frames sc_video, video_frames, frame_predictions = geti.upload_and_predict_video( - project_name=PROJECT_NAME, + project=project, video=rotation_video, frame_stride=1, visualise_output=False, diff --git a/examples/upload_and_predict_media_from_folder.py b/examples/upload_and_predict_media_from_folder.py index fb954d77..34407b64 100644 --- a/examples/upload_and_predict_media_from_folder.py +++ b/examples/upload_and_predict_media_from_folder.py @@ -38,11 +38,11 @@ # -------------------------------------------------- # Make sure that the specified project exists on the server - ensure_trained_example_project(geti=geti, project_name=PROJECT_NAME) + project = ensure_trained_example_project(geti=geti, project_name=PROJECT_NAME) # Upload the media in the folder and generate predictions geti.upload_and_predict_media_folder( - project_name=PROJECT_NAME, + project=project, media_folder=FOLDER_WITH_MEDIA, delete_after_prediction=DELETE_AFTER_PREDICTION, output_folder=OUTPUT_FOLDER, diff --git a/geti_sdk/benchmarking/benchmarker.py b/geti_sdk/benchmarking/benchmarker.py index bce2f332..3527b71a 100644 --- a/geti_sdk/benchmarking/benchmarker.py +++ b/geti_sdk/benchmarking/benchmarker.py @@ -53,7 +53,7 @@ class Benchmarker: def __init__( self, geti: Geti, - project: Union[str, Project], + project: Project, precision_levels: Optional[Sequence[str]] = None, models: Optional[Sequence[Model]] = None, algorithms: Optional[Sequence[str]] = None, @@ -83,7 +83,7 @@ def __init__( be called after initialization. :param geti: Geti instance on which the project to use for benchmarking lives - :param project: Project or project name to use for the benchmarking. The + :param project: Project to use for the benchmarking. The project must exist on the specified Geti instance :param precision_levels: List of model precision levels to run the benchmarking for. Throughput will be measured for each precision level @@ -111,11 +111,8 @@ def __init__( on. """ self.geti = geti - if isinstance(project, str): - project_name = project - else: - project_name = project.name - self.project = geti.get_project(project_name) + # Update project object to get the latest project details + self.project = self.geti.get_project(project_id=project.id) logging.info( f"Setting up Benchmarker for Intel® Geti™ project `{self.project.name}`." ) @@ -501,7 +498,7 @@ def prepare_benchmark(self, working_directory: os.PathLike = "."): output_folder = os.path.join(working_directory, f"deployment_{index}") with suppress_log_output(): self.geti.deploy_project( - project_name=self.project.name, + project=self.project, output_folder=output_folder, models=opt_models, ) diff --git a/geti_sdk/geti.py b/geti_sdk/geti.py index f1d96b34..0715c373 100644 --- a/geti_sdk/geti.py +++ b/geti_sdk/geti.py @@ -252,19 +252,28 @@ def credit_balance(self) -> Optional[int]: return balance.available if balance is not None else None def get_project( - self, project_name: str, project_id: Optional[str] = None + self, + project_name: Optional[str] = None, + project_id: Optional[str] = None, + project: Optional[Project] = None, ) -> Project: """ - Return the Intel® Geti™ project named `project_name`, if any. If no project by - that name is found on the Intel® Geti™ server, this method will raise a - KeyError. - - :param project_name: Name of the project to retrieve - :raises: KeyError if project named `project_name` is not found on the server - :return: Project identified by `project_name` + Return the Intel® Geti™ project by name or ID, if any. + If a project object is passed, the method will return the updated object. + If no project by that name is found on the Intel® Geti™ server, + this method will raise a KeyError. + + :param project_name: Name of the project to retrieve. + :param project_id: ID of the project to retrieve. If not specified, the + project with name `project_name` will be retrieved. + :param project: Project object to update. If provided, the associated `project_id` + will be used to update the project object. + :raises: KeyError if the project identified by one of the arguments is not found on the server + :raises: ValueError if there are several projects on the server named `project_name` + :return: Project identified by one of the arguments. """ - project = self.project_client.get_project_by_name( - project_name=project_name, project_id=project_id + project = self.project_client.get_project( + project_name=project_name, project_id=project_id, project=project ) if project is None: raise KeyError( @@ -275,8 +284,7 @@ def get_project( def download_project_data( self, - project_name: str, - project_id: Optional[str] = None, + project: Project, target_folder: Optional[str] = None, include_predictions: bool = False, include_active_models: bool = False, @@ -332,7 +340,7 @@ def download_project_data( Downloading a project may take a substantial amount of time if the project dataset is large. - :param project_name: Name of the project to download + :param project: Project object to download :param target_folder: Path to the local folder in which the project data should be saved. If not specified, a new directory will be created inside the current working directory. The name of the resulting directory will be @@ -354,7 +362,7 @@ def download_project_data( regarding the downloaded project """ project = self.import_export_module.download_project_data( - project=self.get_project(project_name=project_name, project_id=project_id), + project=project, target_folder=target_folder, include_predictions=include_predictions, include_active_models=include_active_models, @@ -363,7 +371,7 @@ def download_project_data( # Download deployment if include_deployment: logging.info("Creating deployment for project...") - self.deploy_project(project.name, output_folder=target_folder) + self.deploy_project(project, output_folder=target_folder) logging.info(f"Project '{project.name}' was downloaded successfully.") return project @@ -459,8 +467,7 @@ def upload_all_projects(self, target_folder: str) -> List[Project]: def export_project( self, filepath: os.PathLike, - project_name: str, - project_id: Optional[str] = None, + project: Project, ) -> None: """ Export a project with name `project_name` to the file specified by `filepath`. @@ -468,19 +475,15 @@ def export_project( and metadata required for project import to another instance of the Intel® Geti™ platform. :param filepath: Path to the file to save the project to - :param project_name: Name of the project to export - :param project_id: Optional ID of the project to export. If not specified, the - project with name `project_name` will be exported. + :param project: Project object to export """ - if project_id is None: - project_id = self.get_project(project_name=project_name).id - if project_id is None: + if project.id is None: raise ValueError( - f"Could not retrieve project ID for project '{project_name}'." - "Please specify the project ID explicitly." + f"Could not retrieve project ID for project '{project.name}'." + "Please reinitialize the project object." ) self.import_export_module.export_project( - project_id=project_id, filepath=filepath + project_id=project.id, filepath=filepath ) def import_project( @@ -523,7 +526,7 @@ def export_dataset( in the dataset, False to only include media with annotations. Defaults to False. """ - if type(export_format) is str: + if isinstance(export_format, str): export_format = DatasetFormat[export_format] self.import_export_module.export_dataset( project=project, @@ -858,7 +861,7 @@ def create_task_chain_project_from_dataset( def upload_and_predict_media_folder( self, - project_name: str, + project: Project, media_folder: str, output_folder: Optional[str] = None, delete_after_prediction: bool = False, @@ -867,7 +870,7 @@ def upload_and_predict_media_folder( ) -> bool: """ Upload a folder with media (images, videos or both) from local disk at path - `target_folder` to the project with name `project_name` on the Intel® Geti™ + `target_folder` to the project provided with the `project` argument on the Intel® Geti™ server. After the media upload is complete, predictions will be downloaded for all media in the folder. This method will create a 'predictions' directory in @@ -877,7 +880,7 @@ def upload_and_predict_media_folder( removed from the project on the Intel® Geti™ server after the predictions have been downloaded. - :param project_name: Name of the project to upload media to + :param project: Project object to upload the media to :param media_folder: Path to the folder to upload media from :param output_folder: Path to save the predictions to. If not specified, this method will create a folder named '_predictions' on @@ -892,16 +895,6 @@ def upload_and_predict_media_folder( :return: True if all media was uploaded, and predictions for all media were successfully downloaded. False otherwise """ - # Obtain project details from cluster - try: - project = self.get_project(project_name=project_name) - except ValueError: - logging.info( - f"Project '{project_name}' was not found on the cluster. Aborting " - f"media upload." - ) - return False - # Upload images image_client = ImageClient( session=self.session, workspace_id=self.workspace_id, project=project @@ -927,7 +920,7 @@ def upload_and_predict_media_folder( ) if not prediction_client.ready_to_predict: logging.info( - f"Project '{project_name}' is not ready to make predictions, likely " + f"Project '{project.name}' is not ready to make predictions, likely " f"because one of the tasks in the task chain does not have a " f"trained model yet. Aborting prediction." ) @@ -965,17 +958,17 @@ def upload_and_predict_media_folder( def upload_and_predict_image( self, - project_name: str, + project: Project, image: Union[np.ndarray, Image, VideoFrame, str, os.PathLike], visualise_output: bool = True, delete_after_prediction: bool = False, dataset_name: Optional[str] = None, ) -> Tuple[Image, Prediction]: """ - Upload a single image to a project named `project_name` on the Intel® Geti™ + Upload a single image to a project on the Intel® Geti™ server, and return a prediction for it. - :param project_name: Name of the project to upload the image to + :param project: Project object to upload the image to :param image: Image, numpy array representing an image, or filepath to an image to upload and get a prediction for :param visualise_output: True to show the resulting prediction, overlayed on @@ -989,8 +982,6 @@ def upload_and_predict_image( - Image object representing the image that was uploaded - Prediction for the image """ - project = self.get_project(project_name=project_name) - # Get the dataset to upload to dataset: Optional[Dataset] = None if dataset_name is not None: @@ -1030,7 +1021,7 @@ def upload_and_predict_image( ) if not prediction_client.ready_to_predict: raise ValueError( - f"Project '{project_name}' is not ready to make predictions. At least " + f"Project '{project.name}' is not ready to make predictions. At least " f"one of the tasks in the task chain does not have any models trained." ) prediction = prediction_client.get_image_prediction(uploaded_image) @@ -1048,21 +1039,21 @@ def upload_and_predict_image( def upload_and_predict_video( self, - project_name: str, + project: Project, video: Union[Video, str, os.PathLike, Union[Sequence[np.ndarray], np.ndarray]], frame_stride: Optional[int] = None, visualise_output: bool = True, delete_after_prediction: bool = False, ) -> Tuple[Video, MediaList[VideoFrame], List[Prediction]]: """ - Upload a single video to a project named `project_name` on the Intel® Geti™ + Upload a single video to a project on the Intel® Geti™ server, and return a list of predictions for the frames in the video. The parameter 'frame_stride' is used to control the stride for frame extraction. Predictions are only generated for the extracted frames. So to get predictions for all frames, `frame_stride=1` can be passed. - :param project_name: Name of the project to upload the image to + :param project: Project to upload the video to :param video: Video or filepath to a video to upload and get predictions for. Can also be a 4D numpy array or a list of 3D numpy arrays, shaped such that the array dimensions represent `frames x width x height x channels`, @@ -1081,8 +1072,6 @@ def upload_and_predict_video( have been generated - List of Predictions for the Video """ - project = self.get_project(project_name=project_name) - # Upload the video video_client = VideoClient( session=self.session, workspace_id=self.workspace_id, project=project @@ -1105,7 +1094,7 @@ def upload_and_predict_video( else: video_data = video if needs_upload: - logging.info(f"Uploading video to project '{project_name}'...") + logging.info(f"Uploading video to project '{project.name}'...") uploaded_video = video_client.upload_video(video=video_data) else: uploaded_video = video @@ -1116,7 +1105,7 @@ def upload_and_predict_video( ) if not prediction_client.ready_to_predict: raise ValueError( - f"Project '{project_name}' is not ready to make predictions. At least " + f"Project '{project.name}' is not ready to make predictions. At least " f"one of the tasks in the task chain does not have any models trained." ) if frame_stride is None: @@ -1141,7 +1130,7 @@ def upload_and_predict_video( def deploy_project( self, - project_name: str, + project: Project, output_folder: Optional[Union[str, os.PathLike]] = None, models: Optional[Sequence[BaseModel]] = None, enable_explainable_ai: bool = False, @@ -1156,7 +1145,7 @@ def deploy_project( for each task in the project. However, it is possible to specify a particular model to use, by passing it in the list of `models` as input to this method. - :param project_name: Name of the project to deploy + :param project: Project object to deploy :param output_folder: Path to a folder on local disk to which the Deployment should be downloaded. If no path is specified, the deployment will not be saved. @@ -1174,8 +1163,6 @@ def deploy_project( launch an OVMS container serving the models. :return: Deployment for the project """ - project = self.get_project(project_name=project_name) - deployment_client = self._deployment_clients.get(project.id, None) if deployment_client is None: # Create deployment client and add to cache. diff --git a/geti_sdk/import_export/import_export_module.py b/geti_sdk/import_export/import_export_module.py index 0f3b8ac6..fa871744 100644 --- a/geti_sdk/import_export/import_export_module.py +++ b/geti_sdk/import_export/import_export_module.py @@ -75,7 +75,7 @@ def download_project_data( # Download project creation parameters: self.project_client.download_project_info( - project_name=project.name, path_to_folder=target_folder + project=project, path_to_folder=target_folder ) # Download images @@ -280,7 +280,7 @@ def upload_project_data( return project def download_all_projects( - self, target_folder: str, include_predictions: bool = True + self, target_folder: str = "./projects", include_predictions: bool = True ) -> List[Project]: """ Download all projects from the Geti Platform. @@ -293,8 +293,6 @@ def download_all_projects( projects = self.project_client.get_all_projects() # Validate or create target_folder - if target_folder is None: - target_folder = os.path.join(".", "projects") os.makedirs(target_folder, exist_ok=True, mode=0o770) logging.info( f"Found {len(projects)} projects in the designated workspace on the " @@ -332,7 +330,7 @@ def upload_all_projects(self, target_folder: str) -> List[Project]: project_folders = [ folder for folder in candidate_project_folders - if ProjectClient.is_project_dir(folder) + if ProjectClient._is_project_dir(folder) ] logging.info( f"Found {len(project_folders)} project data folders in the target " @@ -435,8 +433,7 @@ def import_dataset_as_new_project( logging.info( f"Project '{project_name}' was successfully imported from the dataset." ) - imported_project = self.project_client.get_project_by_name( - project_name=project_name, + imported_project = self.project_client.get_project( project_id=job.metadata.project_id, ) if imported_project is None: @@ -481,8 +478,7 @@ def import_project( ) job = monitor_job(session=self.session, job=job, interval=5) - imported_project = self.project_client.get_project_by_name( - project_name=project_name, + imported_project = self.project_client.get_project( project_id=job.metadata.project_id, ) if imported_project is None: @@ -505,7 +501,7 @@ def _tus_upload_file(self, upload_endpoint: str, filepath: os.PathLike) -> str: ) tus_uploader.upload() file_id = tus_uploader.get_file_id() - if file_id is None or len(file_id) < 2: + if file_id is None: raise RuntimeError("Failed to get file id for project {project_name}.") return file_id diff --git a/geti_sdk/import_export/tus_uploader.py b/geti_sdk/import_export/tus_uploader.py index b68c45ef..a020b7ec 100644 --- a/geti_sdk/import_export/tus_uploader.py +++ b/geti_sdk/import_export/tus_uploader.py @@ -169,9 +169,14 @@ def get_file_id(self) -> Optional[str]: :return: File id. """ - if self.upload_url is None: + if ( + self.upload_url is None + or len(file_id := self.upload_url.split("/")[-1]) < 2 + ): + # We get the file_id from the upload url. If the url is not set or the file_id + # is not valid (may be an empty string if the url is not valid), we return None. return - return self.upload_url.split("/")[-1] + return file_id def upload_chunk(self): """ diff --git a/geti_sdk/rest_clients/project_client/project_client.py b/geti_sdk/rest_clients/project_client/project_client.py index 14b60565..308c40ab 100644 --- a/geti_sdk/rest_clients/project_client/project_client.py +++ b/geti_sdk/rest_clients/project_client/project_client.py @@ -17,6 +17,7 @@ import logging import os import time +import warnings from typing import Any, Dict, List, Optional, Tuple, Union from geti_sdk.data_models import Project, Task, TaskType @@ -97,7 +98,7 @@ def get_all_projects( project_detail_list: List[Project] = [] for project in project_list: try: - project_detail_list.append(self._get_project_detail(project)) + project_detail_list.append(self.get_project_by_id(project.id)) except GetiRequestException as e: if e.status_code == 403: logging.info( @@ -109,57 +110,47 @@ def get_all_projects( return project_list def get_project_by_name( - self, project_name: str, project_id: Optional[str] = None + self, + project_name: str, ) -> Optional[Project]: """ Get a project from the Intel® Geti™ server by project_name. + If multiple projects with the same name exist on the server, this method will + raise a ValueError. In that case, please use the `ProjectClient.get_project()` + method and provide a `project_id` to uniquely identify the project. + :param project_name: Name of the project to get - :param project_id: Optional ID of the project to get. Only used if more than - one project named `project_name` exists in the workspace. :raises: ValueError in case multiple projects with the specified name exist on the server, and no `project_id` is provided in order to allow unique identification of the project. :return: Project object containing the data of the project, if the project is - found on the server. Returns None if the project doesn't exist + found on the server. Returns None if the project doesn't exist. """ project_list = self.get_all_projects(get_project_details=False) matches = [project for project in project_list if project.name == project_name] if len(matches) == 1: - return self._get_project_detail(matches[0]) + return self.get_project_by_id(matches[0].id) elif len(matches) > 1: - if project_id is None: - detailed_matches = [ - self._get_project_detail(match) for match in matches - ] - projects_info = [ - ( - f"Name: {p.name}, Type: {p.project_type}, ID: {p.id}, " - f"creation_date: {p.creation_time}" - ) - for p in detailed_matches - ] - raise ValueError( - f"A total of {len(matches)} projects named `{project_name}` were " - f"found in the workspace. Unable to uniquely identify the " - f"desired project. Please provide a `project_id` to ensure the " - f"proper project is returned. The following projects were found:" - f"{projects_info}" - ) - else: - matched_project = next( - (project for project in matches if project.id == project_id), None + detailed_matches = [self.get_project_by_id(match.id) for match in matches] + projects_info = [ + ( + f"Name: {p.name},\t Type: {p.project_type},\t ID: {p.id},\t " + f"creation_date: {p.creation_time}\n" ) - if matched_project is None: - logging.info( - f"Projects with name `{project_name}` were found, but none of " - f"the project ID's `{[p.id for p in matches]}` matches the " - f"requested id `{project_id}`." - ) - return None - else: - return self._get_project_detail(matched_project) + for p in detailed_matches + ] + raise ValueError( + f"A total of {len(matches)} projects named `{project_name}` were " + f"found in the workspace. Unable to uniquely identify the " + f"desired project. Please provide a `project_id` to ensure the " + f"proper project is returned. The following projects were found:" + f"{projects_info}" + ) else: + warnings.warn( + f"Project with name {project_name} was not found on the server." + ) return None def get_or_create_project( @@ -212,40 +203,37 @@ def create_project( project_template = self._create_project_template( project_name=project_name, project_type=project_type, labels=labels ) - project = self.session.get_rest_response( + project_dict = self.session.get_rest_response( url=f"{self.base_url}projects", method="POST", data=project_template ) logging.info("Project created successfully.") - project = ProjectRESTConverter.from_dict(project) + project = ProjectRESTConverter.from_dict(project_dict) self._await_project_ready(project=project) return project - def download_project_info(self, project_name: str, path_to_folder: str) -> None: + def download_project_info(self, project: Project, path_to_folder: str) -> None: """ - Get the project data that can be used for project creation for a project on - the Intel® Geti™ server, named `project_name`. From the returned data, the + Get the project data that can be used for project creation on + the Intel® Geti™ server. From the returned data, the method `ProjectClient.get_or_create_project` can create a project on the Intel® Geti™ server. The data is retrieved from the cluster and saved in the target folder `path_to_folder`. - :param project_name: Name of the project to retrieve the data for + :param project: Project to download the data for :param path_to_folder: Target folder to save the project data to. Data will be saved as a .json file named "project.json" :raises ValueError: If the project with `project_name` is not found on the cluster """ - project = self.get_project_by_name(project_name) - if project is None: - raise ValueError( - f"Project with name {project_name} was not found on the server." - ) + # Update the project state + project = self.get_project_by_id(project.id) project_data = ProjectRESTConverter.to_dict(project) os.makedirs(path_to_folder, exist_ok=True, mode=0o770) project_config_path = os.path.join(path_to_folder, "project.json") with open(project_config_path, "w") as file: json.dump(project_data, file, indent=4) logging.info( - f"Project parameters for project '{project_name}' were saved to file " + f"Project parameters for project '{project.name}' were saved to file " f"{project_config_path}." ) @@ -368,7 +356,7 @@ def create_project_from_folder( return created_project @staticmethod - def is_project_dir(path_to_folder: str) -> bool: + def _is_project_dir(path_to_folder: str) -> bool: """ Check if the folder specified in `path_to_folder` is a directory containing valid Intel® Geti™ project data that can be used to upload to an @@ -493,29 +481,22 @@ def _ensure_unique_task_name( return task_name def delete_project( - self, project: Union[str, Project], requires_confirmation: bool = True + self, project: Project, requires_confirmation: bool = True ) -> None: """ - Delete a project. The `project` to delete can either by a Project object or a - string containing the name of the project to delete. + Delete a project. By default, this method will ask for user confirmation before deleting the project. This can be overridden by passing `requires_confirmation = False`. - :param project: Project to delete, either a string containing the project - name or a Project instance + :param project: Project to delete :param requires_confirmation: True to ask for user confirmation before deleting the project, False to delete without confirmation. Defaults to True """ - if isinstance(project, str): - project = self.get_project_by_name(project_name=project) - if not isinstance(project, Project): - raise TypeError(f"{type(project)} is not a valid project type.") - if requires_confirmation: - # Update project details - project = self._get_project_detail(project) + # Update the project details + project = self.get_project_by_id(project.id) if project.datasets is None: project.datasets = [] image_count = 0 @@ -680,19 +661,49 @@ def _await_project_ready( f"seconds)." ) from error - def _get_project_detail(self, project: Union[Project, str]) -> Project: + def get_project_by_id(self, project_id: str) -> Optional[Project]: """ - Fetch the most recent project details from the Intel® Geti™ server + Get a project from the Intel® Geti™ server by project_id. - :param project: Name of the project or Project object representing the project - to get detailed information for. - :return: Updated Project object + :param project_id: ID of the project to get + :return: Project object containing the data of the project, if the project is + found on the server. Returns None if the project doesn't exist """ - if isinstance(project, str): - project = self.get_project_by_name(project_name=project) - return project + response = self.session.get_rest_response( + url=f"{self.base_url}projects/{project_id}", method="GET" + ) + return ProjectRESTConverter.from_dict(response) + + def get_project( + self, + project_name: Optional[str] = None, + project_id: Optional[str] = None, + project: Optional[Project] = None, + ) -> Optional[Project]: + """ + Get a project from the Intel® Geti™ server by project_name or project_id, or + update a provided Project object with the latest data from the server. + + :param project_name: Name of the project to get + :param project_id: ID of the project to get + :param project: Project object to update with the latest data from the server + :return: Project object containing the data of the project, if the project is + found on the server. Returns None if the project doesn't exist + """ + # The method prioritize the parameters in the following order: + if project_id is not None: + return self.get_project_by_id(project_id) + elif project is not None: + if project.id is not None: + return self.get_project_by_id(project.id) + else: + return self.get_project_by_name(project_name=project.name) + elif project_name is not None: + return self.get_project_by_name(project_name=project_name) else: - response = self.session.get_rest_response( - url=f"{self.base_url}projects/{project.id}", method="GET" + # No parameters provided + # Warn the user and return None + warnings.warn( + "At least one of the parameters `project_name`, `project_id`, or " + "`project` must be provided." ) - return ProjectRESTConverter.from_dict(response) diff --git a/notebooks/001_create_project.ipynb b/notebooks/001_create_project.ipynb index 781fb572..4d2f5dd2 100644 --- a/notebooks/001_create_project.ipynb +++ b/notebooks/001_create_project.ipynb @@ -225,7 +225,7 @@ "id": "aa289ae0-36bb-40db-afb3-d1c89fb2a9e1", "metadata": {}, "source": [ - "The `project` object that was created by the `project_client.create_project()` method can also be retrieved by calling `project_client.get_project_by_name()`. This is useful if you do not want to create a new project, but would like to interact with an existing project instead" + "The `project` object that was created by the `project_client.create_project()` method can also be retrieved by calling `project_client.get_project()`. This is useful if you do not want to create a new project, but would like to interact with an existing project instead" ] }, { @@ -235,7 +235,7 @@ "metadata": {}, "outputs": [], "source": [ - "project = project_client.get_project_by_name(project_name=PROJECT_NAME)\n", + "project = project_client.get_project(project_name=PROJECT_NAME)\n", "print(project.summary)" ] }, @@ -395,7 +395,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.19" + "version": "3.10.12" } }, "nbformat": 4, diff --git a/notebooks/003_upload_and_predict_image.ipynb b/notebooks/003_upload_and_predict_image.ipynb index eb064fee..630e384a 100644 --- a/notebooks/003_upload_and_predict_image.ipynb +++ b/notebooks/003_upload_and_predict_image.ipynb @@ -84,7 +84,7 @@ "metadata": {}, "outputs": [], "source": [ - "project = project_client.get_project_by_name(PROJECT_NAME)\n", + "project = project_client.get_project(project_name=PROJECT_NAME)\n", "image_client = ImageClient(\n", " session=geti.session, workspace_id=geti.workspace_id, project=project\n", ")\n", diff --git a/notebooks/005_modify_image.ipynb b/notebooks/005_modify_image.ipynb index 31b81001..c8a246d1 100644 --- a/notebooks/005_modify_image.ipynb +++ b/notebooks/005_modify_image.ipynb @@ -116,7 +116,7 @@ "metadata": {}, "outputs": [], "source": [ - "project = project_client.get_project_by_name(project_name=\"COCO horse detection demo\")" + "project = project_client.get_project(project_name=\"COCO horse detection demo\")" ] }, { diff --git a/notebooks/006_reconfigure_task.ipynb b/notebooks/006_reconfigure_task.ipynb index 2f61fbc3..b5dc4fc0 100644 --- a/notebooks/006_reconfigure_task.ipynb +++ b/notebooks/006_reconfigure_task.ipynb @@ -48,7 +48,7 @@ "PROJECT_NAME = \"COCO multitask animal demo\"\n", "projects = project_client.list_projects()\n", "\n", - "project = project_client.get_project_by_name(PROJECT_NAME)" + "project = project_client.get_project(PROJECT_NAME)" ] }, { @@ -233,7 +233,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.19" + "version": "3.10.12" } }, "nbformat": 4, diff --git a/notebooks/007_train_project.ipynb b/notebooks/007_train_project.ipynb index a021b1fd..9ae790ba 100644 --- a/notebooks/007_train_project.ipynb +++ b/notebooks/007_train_project.ipynb @@ -65,7 +65,7 @@ "source": [ "PROJECT_NAME = \"COCO multitask animal demo\"\n", "\n", - "project = project_client.get_project_by_name(project_name=PROJECT_NAME)" + "project = project_client.get_project(project_name=PROJECT_NAME)" ] }, { @@ -297,7 +297,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.8.17" + "version": "3.10.12" } }, "nbformat": 4, diff --git a/notebooks/011_benchmarking_models.ipynb b/notebooks/011_benchmarking_models.ipynb index 197170fa..a2151bdb 100644 --- a/notebooks/011_benchmarking_models.ipynb +++ b/notebooks/011_benchmarking_models.ipynb @@ -44,7 +44,7 @@ "outputs": [], "source": [ "PROJECT_NAME = \"COCO animal detection demo\"\n", - "project = geti.get_project(PROJECT_NAME)" + "project = geti.get_project(project_name=PROJECT_NAME)" ] }, { diff --git a/notebooks/014_asynchronous_inference.ipynb b/notebooks/014_asynchronous_inference.ipynb index 7cc09991..7a520717 100644 --- a/notebooks/014_asynchronous_inference.ipynb +++ b/notebooks/014_asynchronous_inference.ipynb @@ -55,7 +55,7 @@ "geti = Geti(server_config=geti_server_configuration)\n", "\n", "PROJECT_NAME = \"COCO multitask animal demo\"\n", - "project = geti.get_project(PROJECT_NAME)" + "project = geti.get_project(project_name=PROJECT_NAME)" ] }, { diff --git a/tests/fixtures/demos.py b/tests/fixtures/demos.py index b16e23e5..c524c075 100644 --- a/tests/fixtures/demos.py +++ b/tests/fixtures/demos.py @@ -62,9 +62,8 @@ def fxt_anomaly_classification_demo_project( ) yield project force_delete_project( - project_name=project_name, + project, project_client=fxt_project_client_no_vcr, - project_id=project.id, ) @@ -87,9 +86,8 @@ def fxt_segmentation_demo_project( ) yield project force_delete_project( - project_name=project.name, + project, project_client=fxt_project_client_no_vcr, - project_id=project.id, ) @@ -112,9 +110,8 @@ def fxt_detection_to_classification_demo_project( ) yield project force_delete_project( - project_name=project.name, + project, project_client=fxt_project_client_no_vcr, - project_id=project.id, ) @@ -137,9 +134,8 @@ def fxt_detection_to_segmentation_demo_project( ) yield project force_delete_project( - project_name=project_name, + project, project_client=fxt_project_client_no_vcr, - project_id=project.id, ) @@ -162,9 +158,8 @@ def fxt_classification_demo_project( ) yield project force_delete_project( - project_name=project_name, + project, project_client=fxt_project_client_no_vcr, - project_id=project.id, ) @@ -187,9 +182,8 @@ def fxt_detection_demo_project( ) yield project force_delete_project( - project_name=project_name, + project, project_client=fxt_project_client_no_vcr, - project_id=project.id, ) diff --git a/tests/fixtures/projects.py b/tests/fixtures/projects.py index 6b9df649..91e98580 100644 --- a/tests/fixtures/projects.py +++ b/tests/fixtures/projects.py @@ -98,8 +98,8 @@ def fxt_project_finalizer(fxt_project_client: ProjectClient) -> Callable[[str], :var project_name: Name of the project for which to add the finalizer """ - def _project_finalizer(project_name: str, project_id: str) -> None: - force_delete_project(project_name, fxt_project_client, project_id) + def _project_finalizer(project: Project) -> None: + force_delete_project(project, fxt_project_client) return _project_finalizer diff --git a/tests/fixtures/unit_tests/benchmarker.py b/tests/fixtures/unit_tests/benchmarker.py index 86edc42b..69a7fc30 100644 --- a/tests/fixtures/unit_tests/benchmarker.py +++ b/tests/fixtures/unit_tests/benchmarker.py @@ -27,18 +27,17 @@ def fxt_benchmarker( fxt_mocked_geti: Geti, ) -> Benchmarker: _ = mocker.patch( - "geti_sdk.geti.ProjectClient.get_project_by_name", + "geti_sdk.geti.Geti.get_project", return_value=fxt_classification_project, ) _ = mocker.patch("geti_sdk.benchmarking.benchmarker.ModelClient") _ = mocker.patch("geti_sdk.benchmarking.benchmarker.TrainingClient") - project_name = "project name" algorithms_to_benchmark = ("ALGO_1", "ALGO_2") precision_levels = ("PRECISION_1", "PRECISION_2") images = ("path_1", "path_2") yield Benchmarker( geti=fxt_mocked_geti, - project=project_name, + project=mocker.MagicMock(), algorithms=algorithms_to_benchmark, precision_levels=precision_levels, benchmark_images=images, @@ -52,7 +51,7 @@ def fxt_benchmarker_task_chain( fxt_mocked_geti: Geti, ) -> Benchmarker: _ = mocker.patch( - "geti_sdk.geti.ProjectClient.get_project_by_name", + "geti_sdk.geti.Geti.get_project", return_value=fxt_detection_to_classification_project, ) model_client_object_mock = mocker.MagicMock() @@ -64,13 +63,12 @@ def fxt_benchmarker_task_chain( model_client_object_mock.get_all_active_models.return_value = active_models _ = mocker.patch("geti_sdk.benchmarking.benchmarker.TrainingClient") - project_name = "project name" precision_levels = ("PRECISION_1", "PRECISION_2") images = ("path_1", "path_2") yield Benchmarker( geti=fxt_mocked_geti, - project=project_name, + project=mocker.MagicMock(), precision_levels=precision_levels, benchmark_images=images, ) diff --git a/tests/helpers/finalizers.py b/tests/helpers/finalizers.py index 2094b4d6..67353acc 100644 --- a/tests/helpers/finalizers.py +++ b/tests/helpers/finalizers.py @@ -13,14 +13,12 @@ # and limitations under the License. import logging import time -from typing import Optional +from geti_sdk.data_models.project import Project from geti_sdk.rest_clients import ProjectClient, TrainingClient -def force_delete_project( - project_name: str, project_client: ProjectClient, project_id: Optional[str] = None -) -> None: +def force_delete_project(project: Project, project_client: ProjectClient) -> None: """ Deletes the project named 'project_name'. If any jobs are running for the project, this finalizer cancels them. @@ -30,17 +28,16 @@ def force_delete_project( :param project_id: Optional ID of the project to delete. This can be useful in case there are multiple projects with the same name in the workspace """ - project = project_client.get_project_by_name(project_name, project_id) try: project_client.delete_project(project=project, requires_confirmation=False) except TypeError: logging.warning( - f"Project {project_name} was not found on the server, it was most " + f"Project {project.name} was not found on the server, it was most " f"likely already deleted." ) except ValueError: logging.error( - f"Unable to delete project '{project_name}' from the server, it " + f"Unable to delete project '{project.name}' from the server, it " f"is most likely locked for deletion due to an operation/training " f"session that is in progress. " f"\n\n Attempting to cancel the job and re-try project deletion." diff --git a/tests/helpers/project_helpers.py b/tests/helpers/project_helpers.py index 50f5a3b4..a289a94e 100644 --- a/tests/helpers/project_helpers.py +++ b/tests/helpers/project_helpers.py @@ -94,7 +94,7 @@ def remove_all_test_projects(geti: Geti) -> List[str]: projects_removed: List[str] = [] for project in project_client.get_all_projects(get_project_details=False): if project.name.startswith(PROJECT_PREFIX): - force_delete_project(project.name, project_client, project_id=project.id) + force_delete_project(project, project_client) projects_removed.append(project.name) logging.info(f"{len(projects_removed)} test projects were removed from the server.") return projects_removed diff --git a/tests/helpers/project_service.py b/tests/helpers/project_service.py index 5969eb1f..42e38388 100644 --- a/tests/helpers/project_service.py +++ b/tests/helpers/project_service.py @@ -334,9 +334,7 @@ def delete_project(self): """Deletes the project from the server""" if self._project is not None: with self.vcr_context(f"{self.project.name}_deletion.{CASSETTE_EXTENSION}"): - force_delete_project( - self.project.name, self.project_client, self.project.id - ) + force_delete_project(self.project, self.project_client) self.reset_state() def reset_state(self) -> None: diff --git a/tests/nightly/demos/test_demo_projects.py b/tests/nightly/demos/test_demo_projects.py index 1cbdf7d7..b4851065 100644 --- a/tests/nightly/demos/test_demo_projects.py +++ b/tests/nightly/demos/test_demo_projects.py @@ -160,9 +160,8 @@ def test_ensure_trained_example_project( ) if any_project is not None: force_delete_project( - project_name=non_existing_project_name, + project=any_project, project_client=fxt_project_client_no_vcr, - project_id=any_project.id, ) assert non_existing_project_name not in [ project.name for project in fxt_project_client_no_vcr.get_all_projects() diff --git a/tests/nightly/test_anomaly_classification.py b/tests/nightly/test_anomaly_classification.py index 492d75b2..e6b7e3e4 100644 --- a/tests/nightly/test_anomaly_classification.py +++ b/tests/nightly/test_anomaly_classification.py @@ -23,9 +23,8 @@ def test_project_setup( existing_project = fxt_project_client_no_vcr.get_project_by_name(project_name) if existing_project is not None: force_delete_project( - project_name=project_name, + project=existing_project, project_client=fxt_project_client_no_vcr, - project_id=existing_project.id, ) assert project_name not in [ project.name for project in fxt_project_client_no_vcr.get_all_projects() diff --git a/tests/nightly/test_classification.py b/tests/nightly/test_classification.py index b2688a1a..6c6765c6 100644 --- a/tests/nightly/test_classification.py +++ b/tests/nightly/test_classification.py @@ -89,7 +89,8 @@ def test_export_import_project( # Project is exported assert not os.path.exists(archive_path) fxt_geti_no_vcr.export_project( - project_name=project.name, project_id=project.id, filepath=archive_path + filepath=archive_path, + project=project, ) assert os.path.exists(archive_path) diff --git a/tests/nightly/test_nightly_project.py b/tests/nightly/test_nightly_project.py index ee8022c8..0c1a2fed 100644 --- a/tests/nightly/test_nightly_project.py +++ b/tests/nightly/test_nightly_project.py @@ -127,7 +127,7 @@ def test_upload_and_predict_image( for j in range(n_attempts): try: image, prediction = fxt_geti_no_vcr.upload_and_predict_image( - project_name=project.name, + project=project, image=fxt_image_path, visualise_output=False, delete_after_prediction=False, @@ -160,7 +160,7 @@ def test_deployment( deployment_folder = os.path.join(fxt_temp_directory, project.name) deployment = fxt_geti_no_vcr.deploy_project( - project.name, + project, output_folder=deployment_folder, enable_explainable_ai=True, ) @@ -177,7 +177,7 @@ def test_deployment( local_prediction = deployment.infer(image_np) assert isinstance(local_prediction, Prediction) image, online_prediction = fxt_geti_no_vcr.upload_and_predict_image( - project.name, + project, image=image_bgr, delete_after_prediction=True, visualise_output=False, diff --git a/tests/pre-merge/integration/test_geti.py b/tests/pre-merge/integration/test_geti.py index fc7b509f..009716f3 100644 --- a/tests/pre-merge/integration/test_geti.py +++ b/tests/pre-merge/integration/test_geti.py @@ -209,7 +209,7 @@ def test_create_single_task_project_from_dataset( max_threads=1, ) - request.addfinalizer(lambda: fxt_project_finalizer(project_name, project.id)) + request.addfinalizer(lambda: fxt_project_finalizer(project)) @pytest.mark.vcr() @pytest.mark.parametrize( @@ -257,7 +257,7 @@ def test_create_task_chain_project_from_dataset( enable_auto_train=False, max_threads=1, ) - request.addfinalizer(lambda: fxt_project_finalizer(project_name, project.id)) + request.addfinalizer(lambda: fxt_project_finalizer(project)) all_labels = fxt_default_labels + ["block"] for label_name in all_labels: @@ -287,7 +287,7 @@ def test_download_and_upload_project( target_folder = os.path.join(fxt_temp_directory, project.name) fxt_geti.download_project_data( - project.name, + project, target_folder=target_folder, max_threads=1, ) @@ -304,9 +304,7 @@ def test_download_and_upload_project( enable_auto_train=False, max_threads=1, ) - request.addfinalizer( - lambda: fxt_project_finalizer(uploaded_project.name, uploaded_project.id) - ) + request.addfinalizer(lambda: fxt_project_finalizer(uploaded_project)) image_client = ImageClient( session=fxt_geti.session, workspace_id=fxt_geti.workspace_id, @@ -393,7 +391,7 @@ def test_upload_and_predict_image( for j in range(n_attempts): try: image, prediction = fxt_geti.upload_and_predict_image( - project_name=project.name, + project=project, image=fxt_image_path, visualise_output=False, delete_after_prediction=False, @@ -419,7 +417,7 @@ def test_upload_and_predict_video( Verify that the `Geti.upload_and_predict_video` method works as expected """ video, frames, predictions = fxt_geti.upload_and_predict_video( - project_name=fxt_project_service.project.name, + project=fxt_project_service.project, video=fxt_video_path_1_light_bulbs, visualise_output=False, ) @@ -432,15 +430,16 @@ def test_upload_and_predict_video( # Check that invalid project raises a KeyError with pytest.raises(KeyError): + project = fxt_geti.get_project(project_name="invalid_project_name") fxt_geti.upload_and_predict_video( - project_name="invalid_project_name", + project=project, video=fxt_video_path_1_light_bulbs, visualise_output=False, ) # Check that video is not uploaded if it's already in the project video, frames, predictions = fxt_geti.upload_and_predict_video( - project_name=fxt_project_service.project.name, + project=fxt_project_service.project, video=video, visualise_output=False, ) @@ -450,7 +449,7 @@ def test_upload_and_predict_video( new_frames = video.to_frames(frame_stride=50, include_data=True) np_frames = [frame.numpy for frame in new_frames] np_video, frames, predictions = fxt_geti.upload_and_predict_video( - project_name=fxt_project_service.project.name, + project=fxt_project_service.project, video=np_frames, visualise_output=False, delete_after_prediction=True, @@ -475,14 +474,14 @@ def test_upload_and_predict_media_folder( image_output_folder = os.path.join(fxt_temp_directory, "inferred_images") video_success = fxt_geti.upload_and_predict_media_folder( - project_name=fxt_project_service.project.name, + project=fxt_project_service.project, media_folder=fxt_video_folder_light_bulbs, output_folder=video_output_folder, delete_after_prediction=True, max_threads=1, ) image_success = fxt_geti.upload_and_predict_media_folder( - project_name=fxt_project_service.project.name, + project=fxt_project_service.project, media_folder=fxt_image_folder_light_bulbs, output_folder=image_output_folder, delete_after_prediction=True, @@ -519,7 +518,7 @@ def test_deployment( for _ in range(n_attempts): try: deployment = fxt_geti.deploy_project( - project.name, + project, output_folder=deployment_folder, enable_explainable_ai=True, ) @@ -538,7 +537,7 @@ def test_deployment( local_prediction = deployment.infer(image_np) assert isinstance(local_prediction, Prediction) image, online_prediction = fxt_geti.upload_and_predict_image( - project.name, + project, image=image_np, delete_after_prediction=True, visualise_output=False, @@ -577,7 +576,7 @@ def test_post_inference_hooks( project = fxt_project_service.project deployment_folder = os.path.join(fxt_temp_directory, project.name) - deployment = fxt_geti.deploy_project(project.name) + deployment = fxt_geti.deploy_project(project) dataset_name = "Test hooks" # Add a GetiDataCollectionHook @@ -670,7 +669,7 @@ def test_download_project_including_models_and_predictions( fxt_temp_directory, project.name + "_all_inclusive" ) fxt_geti.download_project_data( - project_name=project.name, + project=project, target_folder=target_folder, include_predictions=True, include_active_models=True, diff --git a/tests/pre-merge/unit/benchmarking/test_benchmarker.py b/tests/pre-merge/unit/benchmarking/test_benchmarker.py index 14afa952..d150aa20 100644 --- a/tests/pre-merge/unit/benchmarking/test_benchmarker.py +++ b/tests/pre-merge/unit/benchmarking/test_benchmarker.py @@ -32,8 +32,8 @@ def test_initialize( mocker: MockerFixture, ): # Arrange - mock_get_project_by_name = mocker.patch( - "geti_sdk.geti.ProjectClient.get_project_by_name", + mock_get_project = mocker.patch( + "geti_sdk.geti.Geti.get_project", return_value=fxt_classification_project, ) mocked_model_client = mocker.patch( @@ -42,7 +42,7 @@ def test_initialize( mocked_training_client = mocker.patch( "geti_sdk.benchmarking.benchmarker.TrainingClient" ) - project_name = "project name" + project_mock = mocker.MagicMock() algorithms_to_benchmark = ("ALGO_1", "ALGO_2") precision_levels = ("PRECISION_1", "PRECISION_2") images = ("path_1", "path_2") @@ -52,16 +52,14 @@ def test_initialize( # Single task project, benchmarking on images benchmarker = Benchmarker( geti=fxt_mocked_geti, - project=project_name, + project=project_mock, algorithms=algorithms_to_benchmark, precision_levels=precision_levels, benchmark_images=images, ) # Assert - mock_get_project_by_name.assert_called_once_with( - project_name=project_name, project_id=None - ) + mock_get_project.assert_called_once_with(project_id=project_mock.id) mocked_model_client.assert_called_once() mocked_training_client.assert_called_once() assert benchmarker._is_single_task @@ -74,7 +72,7 @@ def test_initialize( with pytest.raises(ValueError): benchmarker = Benchmarker( geti=fxt_mocked_geti, - project=project_name, + project=project_mock, algorithms=algorithms_to_benchmark, precision_levels=precision_levels, benchmark_images=images, @@ -88,8 +86,8 @@ def test_initialize_task_chain( mocker: MockerFixture, ): # Arrange - mock_get_project_by_name = mocker.patch( - "geti_sdk.geti.ProjectClient.get_project_by_name", + mocker.patch( + "geti_sdk.geti.Geti.get_project", return_value=fxt_detection_to_classification_project, ) fetched_images = (mocker.MagicMock(),) @@ -108,21 +106,18 @@ def test_initialize_task_chain( mocked_training_client = mocker.patch( "geti_sdk.benchmarking.benchmarker.TrainingClient" ) - project_name = "project name" + project_mock = mocker.MagicMock() precision_levels = ["PRECISION_1", "PRECISION_2"] # Act # Multi task project, no media provided benchmarker = Benchmarker( geti=fxt_mocked_geti, - project=project_name, + project=project_mock, precision_levels=precision_levels, ) # Assert - mock_get_project_by_name.assert_called_once_with( - project_name=project_name, project_id=None - ) mock_image_client_get_all.assert_called_once() mocked_model_client.assert_called_once() model_client_object_mock.get_all_active_models.assert_called_once() diff --git a/tests/pre-merge/unit/test_geti_unit.py b/tests/pre-merge/unit/test_geti_unit.py index 45dec52f..faee28fa 100644 --- a/tests/pre-merge/unit/test_geti_unit.py +++ b/tests/pre-merge/unit/test_geti_unit.py @@ -173,7 +173,7 @@ def test_upload_all_projects( for project in fxt_nightly_projects: os.makedirs(os.path.join(target_dir, project.name)) mock_is_project_dir = mocker.patch( - "geti_sdk.geti.ProjectClient.is_project_dir", return_value=True + "geti_sdk.geti.ProjectClient._is_project_dir", return_value=True ) mock_upload_project_data = mocker.patch( "geti_sdk.import_export.import_export_module.GetiIE.upload_project_data"