From 6fc93250befdf89da65eb71ee634eaad4601031c Mon Sep 17 00:00:00 2001 From: magueylard Date: Fri, 18 Oct 2024 13:38:18 +0200 Subject: [PATCH 01/12] datasets deal with datatypes --- .../abstract_datasets_serializer.py | 1 + .../filesystem_datasets_serializer.py | 6 ++- .../json_datasets_serializer.py | 40 ++++++++++--------- 3 files changed, 27 insertions(+), 20 deletions(-) diff --git a/sostrades_core/datasets/datasets_serializers/abstract_datasets_serializer.py b/sostrades_core/datasets/datasets_serializers/abstract_datasets_serializer.py index b4bfadee1..dc0bc5640 100644 --- a/sostrades_core/datasets/datasets_serializers/abstract_datasets_serializer.py +++ b/sostrades_core/datasets/datasets_serializers/abstract_datasets_serializer.py @@ -25,6 +25,7 @@ class AbstractDatasetsSerializer(abc.ABC): Abstract class to inherit in order to build specific datasets connector """ __logger = logging.getLogger(__name__) + SOSTRADES_TYPES = {'string', 'int', 'float', 'bool', 'list', 'dict', 'dataframe', 'array'} @abc.abstractmethod def convert_from_dataset_data(self, data_name: str, data_value: Any, data_types_dict: dict[str:str]) -> Any: diff --git a/sostrades_core/datasets/datasets_serializers/filesystem_datasets_serializer.py b/sostrades_core/datasets/datasets_serializers/filesystem_datasets_serializer.py index 79cfc1014..711f5311f 100644 --- a/sostrades_core/datasets/datasets_serializers/filesystem_datasets_serializer.py +++ b/sostrades_core/datasets/datasets_serializers/filesystem_datasets_serializer.py @@ -322,7 +322,11 @@ def __serialize_into_filesystem(self, serialization_function: Callable[[str, Any def _deserialize_dataframe(self, data_value: str, data_name: str = None) -> pd.DataFrame: # NB: dataframe csv deserialization as in webapi - return self.__deserialize_from_filesystem(_load_dataframe, data_value) + try: + return self.__deserialize_from_filesystem(_load_dataframe, data_value) + except Exception as error: + self.__logger.warning(f"Error while trying to convert data {data_name} with value {data_value} into the type dataframe: {error}") + return pd.DataFrame() def _deserialize_array(self, data_value: str) -> np.ndarray: # NB: to be improved with astype(subtype) along subtype management diff --git a/sostrades_core/datasets/datasets_serializers/json_datasets_serializer.py b/sostrades_core/datasets/datasets_serializers/json_datasets_serializer.py index bd48a60c0..7df91857a 100644 --- a/sostrades_core/datasets/datasets_serializers/json_datasets_serializer.py +++ b/sostrades_core/datasets/datasets_serializers/json_datasets_serializer.py @@ -53,15 +53,16 @@ def convert_from_dataset_data(self, data_name: str, data_value: Any, data_types_ converted_data = "" try: - if data_type in ['string', 'int', 'float', 'bool', 'list', 'dict']: - converted_data = data_value - elif data_type == 'dataframe': - converted_data = self._deserialize_dataframe(data_value, data_name) - elif data_type == 'array': - converted_data = self._deserialize_array(data_value) - else: - converted_data = data_value - self.__logger.warning(f"Data type {data_type} for data {data_name} not found in default type list 'string', 'int', 'float', 'bool', 'list', 'dict', 'dataframe, 'array'.") + if data_type in self.SOSTRADES_TYPES: + if data_value is None: + converted_data = data_value + elif data_type == 'dataframe': + converted_data = self._deserialize_dataframe(data_value, data_name) + elif data_type == 'array': + converted_data = self._deserialize_array(data_value) + else: + converted_data = data_value + self.__logger.warning(f"Data type {data_type} for data {data_name} not found in default type list 'string', 'int', 'float', 'bool', 'list', 'dict', 'dataframe, 'array'.") except Exception as error: converted_data = data_value self.__logger.warning(f"Error while trying to convert data {data_name} with value {data_value} into the type {data_type}: {error}") @@ -86,16 +87,17 @@ def convert_to_dataset_data(self, data_name: str, data_value: Any, data_types_di converted_data = "" try: - if data_type in ['string', 'int', 'float', 'bool', 'list', 'dict']: - converted_data = self._serialize_jsonifiable(data_value, data_name) - elif data_type == 'dataframe': - # convert dataframe into dict with orient='list' to have {column:values} - converted_data = self._serialize_dataframe(data_value, data_name) - elif data_type == 'array': - converted_data = self._serialize_array(data_value, data_name) - else: - converted_data = data_value - self.__logger.warning(f"Data type {data_type} for data {data_name} not found in default type list 'string', 'int', 'float', 'bool', 'list', 'dict', 'dataframe, 'array'.") + if data_type in self.SOSTRADES_TYPES: + if data_value is None: + converted_data = data_value + elif data_type == 'dataframe': + # convert dataframe into dict with orient='list' to have {column:values} + converted_data = self._serialize_dataframe(data_value, data_name) + elif data_type == 'array': + converted_data = self._serialize_array(data_value, data_name) + else: + converted_data = self._serialize_jsonifiable(data_value, data_name) + self.__logger.warning(f"Data type {data_type} for data {data_name} not found in default type list 'string', 'int', 'float', 'bool', 'list', 'dict', 'dataframe, 'array'.") except Exception as error: converted_data = data_value self.__logger.warning(f"Error while trying to convert data {data_name} with value {data_value} into the type {data_type}: {error}") From b577935aa2d2e0f3e2f5bffb1a315418aac484ac Mon Sep 17 00:00:00 2001 From: magueylard Date: Fri, 18 Oct 2024 15:28:46 +0200 Subject: [PATCH 02/12] add warning if no version in dataset info --- sostrades_core/datasets/dataset_info/dataset_info_factory.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/sostrades_core/datasets/dataset_info/dataset_info_factory.py b/sostrades_core/datasets/dataset_info/dataset_info_factory.py index f4b5edab1..d4756f37d 100644 --- a/sostrades_core/datasets/dataset_info/dataset_info_factory.py +++ b/sostrades_core/datasets/dataset_info/dataset_info_factory.py @@ -17,6 +17,7 @@ import logging import re from enum import Enum +from warnings import warn from sostrades_core.datasets.dataset_info.dataset_info_v0 import DatasetInfoV0 from sostrades_core.datasets.dataset_info.dataset_info_v1 import DatasetInfoV1 @@ -61,5 +62,7 @@ def get_dataset_info_version(cls, dataset_mapping_key: str) -> DatasetInfoSerial version = DatasetInfoSerializerVersion.V0 if match: version = DatasetInfoSerializerVersion.get_enum_value(match.group(1)) + else: + warn("No version in dataset info is tolerated for now but will be deprecated in future versions", UserWarning) # noqa: B028 return version From 7ad4aeba590944baa23d12e643e08f7f368f4b3e Mon Sep 17 00:00:00 2001 From: magueylard Date: Mon, 28 Oct 2024 10:06:10 +0100 Subject: [PATCH 03/12] WIP try to fix data memory leak on gunicorn --- sostrades_core/execution_engine/data_manager.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/sostrades_core/execution_engine/data_manager.py b/sostrades_core/execution_engine/data_manager.py index a60d63548..e24bd7a94 100644 --- a/sostrades_core/execution_engine/data_manager.py +++ b/sostrades_core/execution_engine/data_manager.py @@ -14,6 +14,7 @@ See the License for the specific language governing permissions and limitations under the License. ''' +import gc import logging from copy import copy, deepcopy from dataclasses import dataclass @@ -525,6 +526,9 @@ def apply_parameter_change(self, date=datetime.now(), dataset_data_path=dataset_data_path, variable_key=variable_key)) + if not dict_are_equal({VALUE: dm_data[VALUE]}, {VALUE: new_value}): + del dm_data[VALUE] + gc.collect() dm_data[VALUE] = new_value def export_data_in_datasets(self, datasets_mapping: DatasetsMapping) -> None: From b524958a9bd8a58afcb367368d3e677af6821a92 Mon Sep 17 00:00:00 2001 From: magueylard Date: Mon, 28 Oct 2024 10:09:52 +0100 Subject: [PATCH 04/12] add warning on dataset version --- sostrades_core/datasets/dataset_info/dataset_info_factory.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/sostrades_core/datasets/dataset_info/dataset_info_factory.py b/sostrades_core/datasets/dataset_info/dataset_info_factory.py index d4756f37d..3e99748f6 100644 --- a/sostrades_core/datasets/dataset_info/dataset_info_factory.py +++ b/sostrades_core/datasets/dataset_info/dataset_info_factory.py @@ -65,4 +65,8 @@ def get_dataset_info_version(cls, dataset_mapping_key: str) -> DatasetInfoSerial else: warn("No version in dataset info is tolerated for now but will be deprecated in future versions", UserWarning) # noqa: B028 + else: + warn("No version in dataset info is tolerated for now but will be deprecated in future versions", UserWarning) + + return version From 05595a09d582480691e80e6814041bf22d0b0d89 Mon Sep 17 00:00:00 2001 From: magueylard Date: Mon, 28 Oct 2024 10:32:59 +0100 Subject: [PATCH 05/12] fix bad merge --- sostrades_core/datasets/dataset_info/dataset_info_factory.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/sostrades_core/datasets/dataset_info/dataset_info_factory.py b/sostrades_core/datasets/dataset_info/dataset_info_factory.py index 3e99748f6..9d13c44ac 100644 --- a/sostrades_core/datasets/dataset_info/dataset_info_factory.py +++ b/sostrades_core/datasets/dataset_info/dataset_info_factory.py @@ -65,8 +65,5 @@ def get_dataset_info_version(cls, dataset_mapping_key: str) -> DatasetInfoSerial else: warn("No version in dataset info is tolerated for now but will be deprecated in future versions", UserWarning) # noqa: B028 - else: - warn("No version in dataset info is tolerated for now but will be deprecated in future versions", UserWarning) - - + return version From 2d850ba1c0389e6addf17c399c735d4f10f261f4 Mon Sep 17 00:00:00 2001 From: magueylard Date: Mon, 28 Oct 2024 11:58:53 +0100 Subject: [PATCH 06/12] Remove delete value (degrade perfo) --- sostrades_core/execution_engine/data_manager.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/sostrades_core/execution_engine/data_manager.py b/sostrades_core/execution_engine/data_manager.py index e24bd7a94..8b7b5333f 100644 --- a/sostrades_core/execution_engine/data_manager.py +++ b/sostrades_core/execution_engine/data_manager.py @@ -526,9 +526,7 @@ def apply_parameter_change(self, date=datetime.now(), dataset_data_path=dataset_data_path, variable_key=variable_key)) - if not dict_are_equal({VALUE: dm_data[VALUE]}, {VALUE: new_value}): - del dm_data[VALUE] - gc.collect() + dm_data[VALUE] = new_value def export_data_in_datasets(self, datasets_mapping: DatasetsMapping) -> None: From f391b830daf14e861363b3b1a59bd331a4c1768e Mon Sep 17 00:00:00 2001 From: magueylard Date: Tue, 29 Oct 2024 09:58:04 +0100 Subject: [PATCH 07/12] fix ruff errors --- sostrades_core/datasets/dataset_info/dataset_info_factory.py | 2 +- sostrades_core/execution_engine/data_manager.py | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/sostrades_core/datasets/dataset_info/dataset_info_factory.py b/sostrades_core/datasets/dataset_info/dataset_info_factory.py index 9d13c44ac..a9e1a7b46 100644 --- a/sostrades_core/datasets/dataset_info/dataset_info_factory.py +++ b/sostrades_core/datasets/dataset_info/dataset_info_factory.py @@ -65,5 +65,5 @@ def get_dataset_info_version(cls, dataset_mapping_key: str) -> DatasetInfoSerial else: warn("No version in dataset info is tolerated for now but will be deprecated in future versions", UserWarning) # noqa: B028 - + return version diff --git a/sostrades_core/execution_engine/data_manager.py b/sostrades_core/execution_engine/data_manager.py index 8b7b5333f..ee0689c69 100644 --- a/sostrades_core/execution_engine/data_manager.py +++ b/sostrades_core/execution_engine/data_manager.py @@ -14,7 +14,6 @@ See the License for the specific language governing permissions and limitations under the License. ''' -import gc import logging from copy import copy, deepcopy from dataclasses import dataclass @@ -526,7 +525,7 @@ def apply_parameter_change(self, date=datetime.now(), dataset_data_path=dataset_data_path, variable_key=variable_key)) - + dm_data[VALUE] = new_value def export_data_in_datasets(self, datasets_mapping: DatasetsMapping) -> None: From 07c3561650a01ab7aa6c40257dfe9964d4db2ea1 Mon Sep 17 00:00:00 2001 From: magueylard Date: Tue, 29 Oct 2024 11:24:31 +0100 Subject: [PATCH 08/12] add non functionning formula in disc1 markdown --- .../test_discs/documentation/disc1.markdown | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/sostrades_core/sos_wrapping/test_discs/documentation/disc1.markdown b/sostrades_core/sos_wrapping/test_discs/documentation/disc1.markdown index e4de13f88..dc03b3964 100644 --- a/sostrades_core/sos_wrapping/test_discs/documentation/disc1.markdown +++ b/sostrades_core/sos_wrapping/test_discs/documentation/disc1.markdown @@ -150,6 +150,21 @@ Where: - $r$ [^16] - $\Delta T$ is the world +### 3. text in formula +Therefore the variables $\text{My new variable}_{t}$ and $\text{My other variable}_{t}$ are updated for all years $t \geq t$. + +$\text{function fn (called f)} = \text{big F function }\text{\%} \times \text{function G}$ + +$\text{Delta X}_{t} = \text{result}_{t} - \text{y}_{t} + a^{\text{b}}_{t} + a^{\text{variable c}}_{t}$ + +$$\text{F1 function (X,\$)} = \frac{\text{ab, M\$}}{\text{number of data}} \times 10^6$$ +$$\text{F2 function (Y, \%)} = \frac{\text{cd}}{\text{number}} \times 100$$ +$$\text{F3 function for another F1 (\%)} = \frac{\text{function G}}{\text{temp test}} \times 100$$ +$$\text{function F4 (Z, \%)} = \frac{\text{yz}}{\text{reset x}} \times 100$$ +$$\text{Total of all functions, M\$} = \text{ruslt of summ}$$ + +$$\text{Mass Loss (}\text{\%}\text{)} = \max\left(0, 0.1 \times (P - 9.81) \times 100\right)$$ + END. ## Sources From c513a202bb3002742690420be1f124e6c1f34b01 Mon Sep 17 00:00:00 2001 From: magueylard Date: Tue, 29 Oct 2024 18:54:51 +0100 Subject: [PATCH 09/12] Fix bug dataset connector mapping +Add test --- sostrades_core/datasets/dataset_mapping.py | 5 +- .../test_92_export_mapping_disc1_disc2.json | 29 ++++++++ sostrades_core/tests/l0_test_92_datasets.py | 69 ++++++++++++++++++- 3 files changed, 101 insertions(+), 2 deletions(-) create mode 100644 sostrades_core/tests/data/test_92_export_mapping_disc1_disc2.json diff --git a/sostrades_core/datasets/dataset_mapping.py b/sostrades_core/datasets/dataset_mapping.py index 280b9a770..39a7caefe 100644 --- a/sostrades_core/datasets/dataset_mapping.py +++ b/sostrades_core/datasets/dataset_mapping.py @@ -247,7 +247,10 @@ def get_datasets_namespace_mapping_for_study(self, study_name: str, namespaces_d dataset_info_list = {} for namespace in namespaces_dict.keys(): study_namespace = namespace.replace(self.STUDY_PLACEHOLDER, study_name) - dataset_info_list.update({dataset_id:{study_namespace:mapping_data} for dataset_id, mapping_data in self.get_datasets_info_from_namespace(namespace, study_name).items()}) + for dataset, mapping_data in self.get_datasets_info_from_namespace(namespace, study_name).items(): + dataset_info_list[dataset] = dataset_info_list.get(dataset, {}) + dataset_info_list[dataset][study_namespace] = dataset_info_list[dataset].get(study_namespace, {}) + dataset_info_list[dataset][study_namespace].update(mapping_data) for dataset, namespaces_mapping_dict in dataset_info_list.items(): try: diff --git a/sostrades_core/tests/data/test_92_export_mapping_disc1_disc2.json b/sostrades_core/tests/data/test_92_export_mapping_disc1_disc2.json new file mode 100644 index 000000000..ecfe5d6ab --- /dev/null +++ b/sostrades_core/tests/data/test_92_export_mapping_disc1_disc2.json @@ -0,0 +1,29 @@ +{ + "process_module_path": "sostrades_core.sos_processes.test.test_disc1_all_types", + "namespace_datasets_mapping": { + "v0||x": [ + "MVP0_local_datasets_connector_export_test|test_dataset_disc1_disc2|x" + ], + "v0|.Disc1|a": [ + "MVP0_local_datasets_connector_export_test|test_dataset_disc1_disc2|a" + ], + "v0|.Disc1|b": [ + "MVP0_local_datasets_connector_export_test|test_dataset_disc1_disc2|b" + ], + "v0|.Disc1|indicator": [ + "MVP0_local_datasets_connector_export_test|test_dataset_disc1_disc2|indicator" + ], + "v0||y": [ + "MVP0_local_datasets_connector_export_test|test_dataset_disc1_disc2|y" + ], + "v0|.Disc2|constant": [ + "MVP0_local_datasets_connector_export_test|test_dataset_disc1_disc2|constant" + ], + "v0|.Disc2|power": [ + "MVP0_local_datasets_connector_export_test|test_dataset_disc1_disc2|power" + ], + "v0||z": [ + "MVP0_local_datasets_connector_export_test|test_dataset_disc1_disc2|z" + ] + } +} diff --git a/sostrades_core/tests/l0_test_92_datasets.py b/sostrades_core/tests/l0_test_92_datasets.py index 2d74b5037..c167923e2 100644 --- a/sostrades_core/tests/l0_test_92_datasets.py +++ b/sostrades_core/tests/l0_test_92_datasets.py @@ -23,6 +23,7 @@ import sostrades_core.sos_processes.test.sellar.test_sellar_coupling.usecase_dataset_and_dict_sellar_coupling as uc_dataset_dict import sostrades_core.sos_processes.test.sellar.test_sellar_coupling.usecase_dataset_sellar_coupling +from sostrades_core.sos_processes.test.test_disc1_disc2_coupling.usecase_coupling_2_disc_test import Study as StudyDisc1Disc2 import sostrades_core.sos_processes.test.test_disc1_disc2_dataset.usecase_dataset import sostrades_core.sos_processes.test.test_disc1_nested_types.usecase_local_dataset from sostrades_core.datasets.dataset_info.dataset_info_v0 import DatasetInfoV0 @@ -276,6 +277,72 @@ def test_07_datasets_local_connector_with_all_non_nested_types(self): self.assertEqual(dm.get_value("usecase_dataset.Disc1.b_bool"), False) self.assertTrue((dm.get_value("usecase_dataset.Disc1.d") == pd.DataFrame({"years": [2023, 2024], "x": [1.0, 10.0]})).all().all()) + def test_07b_datasets_local_connector_with_several_namespace(self): + """ + Check correctness of loaded values after loading a handcrafted local directories' dataset, testing usage of + LocalDatasetsConnector and FileSystemDatasetsSerializer. and more than one namespace + """ + usecase_file_path = sostrades_core.sos_processes.test.test_disc1_disc2_coupling.usecase_coupling_2_disc_test.__file__ + process_path = os.path.dirname(usecase_file_path) + study = StudyDisc1Disc2() + study.load_data() + study.run() + dm = study.execution_engine.dm + + data_types_dict = {'a' :'float', + 'x' :'float', + 'b' :'float', + 'y' :'float', + 'z' :'float', + 'constant' :'float', + 'power' :'int', + 'indicator' :'float' + } + + # export study in another folder + # create connector test for export + connector_args = { + "root_directory_path": "./sostrades_core/tests/data/local_datasets_db_export_test/", + "create_if_not_exists": True + } + + export_connector = DatasetsConnectorManager.register_connector(connector_identifier="MVP0_local_datasets_connector_export_test", + connector_type=DatasetConnectorType.get_enum_value("Local"), + **connector_args) + test_data_folder = os.path.join(os.path.dirname(__file__), "data") + export_mapping_repo_file_path = os.path.join(test_data_folder, "test_92_export_mapping_disc1_disc2.json") + + # test export + mapping = DatasetsMapping.from_json_file(export_mapping_repo_file_path) + study.export_data_from_dataset_mapping(mapping) + exported_data = export_connector.get_values_all(DatasetInfoV0("MVP0_local_datasets_connector_export_test", + "test_dataset_disc1_disc2"),data_types_dict) + self.assertEqual(dm.get_value("usecase_coupling_2_disc_test.Disc1.a"), exported_data.get("a")) + self.assertEqual(dm.get_value("usecase_coupling_2_disc_test.x"), exported_data.get("x")) + self.assertEqual(dm.get_value("usecase_coupling_2_disc_test.Disc1.b"), exported_data.get("b")) + self.assertEqual(dm.get_value("usecase_coupling_2_disc_test.Disc1.indicator"), exported_data.get("indicator")) + self.assertEqual(dm.get_value("usecase_coupling_2_disc_test.y"), exported_data.get("y")) + self.assertEqual(dm.get_value("usecase_coupling_2_disc_test.Disc2.constant"), exported_data.get("constant")) + self.assertEqual(dm.get_value("usecase_coupling_2_disc_test.Disc2.power"), exported_data.get("power")) + self.assertEqual(dm.get_value("usecase_coupling_2_disc_test.z"), exported_data.get("z")) + + # test import + study2 = StudyManager(file_path=usecase_file_path) + study2.update_data_from_dataset_mapping(mapping) + dm2 = study2.execution_engine.dm + self.assertEqual(dm2.get_value("usecase_coupling_2_disc_test.Disc1.a"), exported_data.get("a")) + self.assertEqual(dm2.get_value("usecase_coupling_2_disc_test.x"), exported_data.get("x")) + self.assertEqual(dm2.get_value("usecase_coupling_2_disc_test.Disc1.b"), exported_data.get("b")) + self.assertEqual(dm2.get_value("usecase_coupling_2_disc_test.Disc1.indicator"), exported_data.get("indicator")) + self.assertEqual(dm2.get_value("usecase_coupling_2_disc_test.y"), exported_data.get("y")) + self.assertEqual(dm2.get_value("usecase_coupling_2_disc_test.Disc2.constant"), exported_data.get("constant")) + self.assertEqual(dm2.get_value("usecase_coupling_2_disc_test.Disc2.power"), exported_data.get("power")) + self.assertEqual(dm2.get_value("usecase_coupling_2_disc_test.z"), exported_data.get("z")) + + + export_connector.clear(remove_root_directory=True) + + def test_08_json_to_local_connector_conversion_and_loading(self): """ Use a local connector to copy values from a JSON connector then load them in the study and check correctness, @@ -968,4 +1035,4 @@ def test_22_compatibility_V0_V1(self): if __name__ == "__main__": cls = TestDatasets() cls.setUp() - cls.test_22_compatibility_V0_V1() + cls.test_07b_datasets_local_connector_with_several_namespace() From 95e96d8a1ed00ca203949e1813ba03f1fa38918b Mon Sep 17 00:00:00 2001 From: magueylard Date: Tue, 29 Oct 2024 19:31:27 +0100 Subject: [PATCH 10/12] fix ruff error --- sostrades_core/tests/l0_test_92_datasets.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/sostrades_core/tests/l0_test_92_datasets.py b/sostrades_core/tests/l0_test_92_datasets.py index c167923e2..10fc592b5 100644 --- a/sostrades_core/tests/l0_test_92_datasets.py +++ b/sostrades_core/tests/l0_test_92_datasets.py @@ -23,7 +23,6 @@ import sostrades_core.sos_processes.test.sellar.test_sellar_coupling.usecase_dataset_and_dict_sellar_coupling as uc_dataset_dict import sostrades_core.sos_processes.test.sellar.test_sellar_coupling.usecase_dataset_sellar_coupling -from sostrades_core.sos_processes.test.test_disc1_disc2_coupling.usecase_coupling_2_disc_test import Study as StudyDisc1Disc2 import sostrades_core.sos_processes.test.test_disc1_disc2_dataset.usecase_dataset import sostrades_core.sos_processes.test.test_disc1_nested_types.usecase_local_dataset from sostrades_core.datasets.dataset_info.dataset_info_v0 import DatasetInfoV0 @@ -38,6 +37,9 @@ from sostrades_core.datasets.datasets_connectors.datasets_connector_factory import DatasetConnectorType from sostrades_core.datasets.datasets_connectors.datasets_connector_manager import DatasetsConnectorManager from sostrades_core.sos_processes.test.test_disc1_all_types.usecase_dataset import Study +from sostrades_core.sos_processes.test.test_disc1_disc2_coupling.usecase_coupling_2_disc_test import ( + Study as StudyDisc1Disc2, +) from sostrades_core.study_manager.study_manager import StudyManager @@ -291,14 +293,14 @@ def test_07b_datasets_local_connector_with_several_namespace(self): data_types_dict = {'a' :'float', 'x' :'float', - 'b' :'float', + 'b' :'float', 'y' :'float', 'z' :'float', 'constant' :'float', 'power' :'int', 'indicator' :'float' } - + # export study in another folder # create connector test for export connector_args = { From b29fdad7b444119d4936d638ac2a68f7475fcba9 Mon Sep 17 00:00:00 2001 From: adiseshu-capgemini Date: Wed, 30 Oct 2024 11:24:37 +0530 Subject: [PATCH 11/12] indicator_chart_teest --- sostrades_core/tests/l0_test_20_charts.py | 38 +++++++++++++++++++++-- 1 file changed, 36 insertions(+), 2 deletions(-) diff --git a/sostrades_core/tests/l0_test_20_charts.py b/sostrades_core/tests/l0_test_20_charts.py index 22e20a414..fe4283ac5 100644 --- a/sostrades_core/tests/l0_test_20_charts.py +++ b/sostrades_core/tests/l0_test_20_charts.py @@ -1,6 +1,6 @@ ''' -Copyright 2022 Airbus SAS -Modifications on 02/01/2024-2024/06/28 Copyright 2024 Capgemini +Copyright 2024 Capgemini + Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at @@ -638,3 +638,37 @@ def test_20_create_plotly_native_chart(self): # plotly_native_chart.to_plotly().show() plotly_native_chart.to_plotly() + + def test_21_create_indicator_chart(self): + + import plotly.graph_objects as go + + from sostrades_core.tools.post_processing.indicator_charts.instanciated_indicator_gauge_chart import ( + InstantiatedIndicatorChart, + ) + + fig = go.Figure() + fig.add_trace(go.Indicator()) + value = 50.85 + indicator_chart = InstantiatedIndicatorChart( + value=value, + mode="gauge+number", + title={'text': ' Plotly Indicator chart'}, + gauge={ + 'axis': {'range': [0, 100]}, + 'steps': [ + {'range': [0, 33], 'color': "red"}, + {'range': [33, 66], 'color': "orange"}, + {'range': [66, 100], 'color': "green"}, + + ], + 'threshold': { + 'line': {'color': 'black', 'width': 4}, + 'thickness': 0.8, + 'value': value, + }, + "bar": {"color": "black"} + } + ) + # indicator_chart.to_plotly().show() + indicator_chart.to_plotly() From f2577e412626a5b398cf1bd606cfc66894af75fd Mon Sep 17 00:00:00 2001 From: magueylard Date: Thu, 31 Oct 2024 08:38:46 +0100 Subject: [PATCH 12/12] fix header --- sostrades_core/tests/l0_test_20_charts.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/sostrades_core/tests/l0_test_20_charts.py b/sostrades_core/tests/l0_test_20_charts.py index fe4283ac5..a795e4df5 100644 --- a/sostrades_core/tests/l0_test_20_charts.py +++ b/sostrades_core/tests/l0_test_20_charts.py @@ -1,5 +1,6 @@ ''' -Copyright 2024 Capgemini +Copyright 2022 Airbus SAS +Modifications on 02/01/2024-2024/06/28 Copyright 2024 Capgemini Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License.