diff --git a/bootstrap/sql/migrations/native/1.6.0/mysql/schemaChanges.sql b/bootstrap/sql/migrations/native/1.6.0/mysql/schemaChanges.sql index 9203128c0b24..aaa61aa8d720 100644 --- a/bootstrap/sql/migrations/native/1.6.0/mysql/schemaChanges.sql +++ b/bootstrap/sql/migrations/native/1.6.0/mysql/schemaChanges.sql @@ -1773,3 +1773,27 @@ SET json = JSON_SET( ) ) WHERE serviceType = 'DBTCloud'; + +-- Update serviceType in dashboard_entity table +UPDATE dashboard_entity +SET json = JSON_SET(json, '$.serviceType', 'MicroStrategy') +WHERE JSON_UNQUOTE(JSON_EXTRACT(json, '$.serviceType')) = 'Mstr'; + +-- Update serviceType in dashboard_service_entity table +UPDATE dashboard_service_entity +SET json = JSON_SET(json, '$.serviceType', 'MicroStrategy') +WHERE JSON_UNQUOTE(JSON_EXTRACT(json, '$.serviceType')) = 'Mstr'; + +UPDATE dashboard_service_entity +SET json = JSON_SET(json, '$.connection.config.type', 'MicroStrategy') +WHERE JSON_UNQUOTE(JSON_EXTRACT(json, '$.connection.config.type')) = 'Mstr'; + +-- Update serviceType in dashboard_data_model_entity table +UPDATE dashboard_data_model_entity +SET json = JSON_SET(json, '$.serviceType', 'MicroStrategy') +WHERE JSON_UNQUOTE(JSON_EXTRACT(json, '$.serviceType')) = 'Mstr'; + +-- Update serviceType in chart_entity table +UPDATE chart_entity +SET json = JSON_SET(json, '$.serviceType', 'MicroStrategy') +WHERE JSON_UNQUOTE(JSON_EXTRACT(json, '$.serviceType')) = 'Mstr'; \ No newline at end of file diff --git a/bootstrap/sql/migrations/native/1.6.0/postgres/schemaChanges.sql b/bootstrap/sql/migrations/native/1.6.0/postgres/schemaChanges.sql index 38fb01676a24..461d746d5504 100644 --- a/bootstrap/sql/migrations/native/1.6.0/postgres/schemaChanges.sql +++ b/bootstrap/sql/migrations/native/1.6.0/postgres/schemaChanges.sql @@ -1756,3 +1756,27 @@ and servicetype = 'DBTCloud'; UPDATE pipeline_service_entity SET json = jsonb_set(json, '{connection, config, projectIds}', '[]', true) WHERE servicetype = 'DBTCloud'; + +-- Update serviceType in dashboard_entity table +UPDATE dashboard_entity +SET json = jsonb_set(json, '{serviceType}', '"MicroStrategy"') +WHERE jsonb_extract_path_text(json, 'serviceType') = 'Mstr'; + +-- Update serviceType in dashboard_service_entity table +UPDATE dashboard_service_entity +SET json = jsonb_set(json, '{serviceType}', '"MicroStrategy"') +WHERE jsonb_extract_path_text(json, 'serviceType') = 'Mstr'; + +UPDATE dashboard_service_entity +SET json = jsonb_set(json, '{connection,config,type}', '"MicroStrategy"') +WHERE jsonb_extract_path_text(json, 'connection', 'config', 'type') = 'Mstr'; + +-- Update serviceType in dashboard_data_model_entity table +UPDATE dashboard_data_model_entity +SET json = jsonb_set(json, '{serviceType}', '"MicroStrategy"') +WHERE jsonb_extract_path_text(json, 'serviceType') = 'Mstr'; + +-- Update serviceType in chart_entity table +UPDATE chart_entity +SET json = jsonb_set(json, '{serviceType}', '"MicroStrategy"') +WHERE jsonb_extract_path_text(json, 'serviceType') = 'Mstr'; \ No newline at end of file diff --git a/ingestion/src/metadata/data_quality/interface/sqlalchemy/sqa_test_suite_interface.py b/ingestion/src/metadata/data_quality/interface/sqlalchemy/sqa_test_suite_interface.py index 67baa22335d5..76d8d640e307 100644 --- a/ingestion/src/metadata/data_quality/interface/sqlalchemy/sqa_test_suite_interface.py +++ b/ingestion/src/metadata/data_quality/interface/sqlalchemy/sqa_test_suite_interface.py @@ -104,6 +104,7 @@ def _create_runner(self) -> QueryRunner: QueryRunner( session=self.session, dataset=self.dataset, + raw_dataset=self.sampler.raw_dataset, partition_details=self.table_partition_config, profile_sample_query=self.table_sample_query, ) diff --git a/ingestion/src/metadata/data_quality/runner/base_test_suite_source.py b/ingestion/src/metadata/data_quality/runner/base_test_suite_source.py index 9651e0d045f7..bf4897843a9f 100644 --- a/ingestion/src/metadata/data_quality/runner/base_test_suite_source.py +++ b/ingestion/src/metadata/data_quality/runner/base_test_suite_source.py @@ -49,9 +49,10 @@ def __init__( ): self.validator_builder_class = ValidatorBuilder self._interface = None - self._interface_type: str = config.source.type.lower() self.entity = entity self.service_conn_config = self._copy_service_config(config, self.entity.database) # type: ignore + self._interface_type: str = self.service_conn_config.type.value.lower() + self.source_config = TestSuitePipeline.model_validate( config.source.sourceConfig.config ) diff --git a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValueLengthsToBeBetween.py b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValueLengthsToBeBetween.py index 2473436cd2b0..33df13ff2098 100644 --- a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValueLengthsToBeBetween.py +++ b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValueLengthsToBeBetween.py @@ -40,7 +40,7 @@ def _get_column_name(self) -> Column: """ return self.get_column_name( self.test_case.entityLink.root, - inspect(self.runner.table).c, + inspect(self.runner.dataset).c, ) def _run_results(self, metric: Metrics, column: Column) -> Optional[int]: diff --git a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValueMaxToBeBetween.py b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValueMaxToBeBetween.py index 235ca42985e1..13d860c35f67 100644 --- a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValueMaxToBeBetween.py +++ b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValueMaxToBeBetween.py @@ -38,7 +38,7 @@ def _get_column_name(self) -> Column: """ return self.get_column_name( self.test_case.entityLink.root, - inspect(self.runner.table).c, + inspect(self.runner.dataset).c, ) def _run_results(self, metric: Metrics, column: Column) -> Optional[int]: diff --git a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValueMeanToBeBetween.py b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValueMeanToBeBetween.py index 80aca69912f2..5e45344b3ff4 100644 --- a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValueMeanToBeBetween.py +++ b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValueMeanToBeBetween.py @@ -39,7 +39,7 @@ def _get_column_name(self) -> Column: """ return self.get_column_name( self.test_case.entityLink.root, - inspect(self.runner.table).c, + inspect(self.runner.dataset).c, ) def _run_results(self, metric: Metrics, column: Column) -> Optional[int]: diff --git a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValueMedianToBeBetween.py b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValueMedianToBeBetween.py index a4104213470a..b473c4bda918 100644 --- a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValueMedianToBeBetween.py +++ b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValueMedianToBeBetween.py @@ -39,7 +39,7 @@ def _get_column_name(self) -> Column: """ return self.get_column_name( self.test_case.entityLink.root, - inspect(self.runner.table).c, + inspect(self.runner.dataset).c, ) def _run_results(self, metric: Metrics, column: Column) -> Optional[int]: diff --git a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValueMinToBeBetween.py b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValueMinToBeBetween.py index dd867dab6ec0..512b3bee68ea 100644 --- a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValueMinToBeBetween.py +++ b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValueMinToBeBetween.py @@ -39,7 +39,7 @@ def _get_column_name(self) -> Column: """ return self.get_column_name( self.test_case.entityLink.root, - inspect(self.runner.table).c, + inspect(self.runner.dataset).c, ) def _run_results(self, metric: Metrics, column: Column) -> Optional[int]: diff --git a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValueStdDevToBeBetween.py b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValueStdDevToBeBetween.py index 8be659d211e3..7d08f3ab8fdf 100644 --- a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValueStdDevToBeBetween.py +++ b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValueStdDevToBeBetween.py @@ -39,7 +39,7 @@ def _get_column_name(self) -> Column: """ return self.get_column_name( self.test_case.entityLink.root, - inspect(self.runner.table).c, + inspect(self.runner.dataset).c, ) def _run_results(self, metric: Metrics, column: Column) -> Optional[int]: diff --git a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesMissingCount.py b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesMissingCount.py index ebbd620dd61d..a3b06e648b63 100644 --- a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesMissingCount.py +++ b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesMissingCount.py @@ -42,7 +42,7 @@ def _get_column_name(self) -> Column: """ return self.get_column_name( self.test_case.entityLink.root, - inspect(self.runner.table).c, + inspect(self.runner.dataset).c, ) def _run_results(self, metric: Metrics, column: Column, **kwargs) -> Optional[int]: diff --git a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesSumToBeBetween.py b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesSumToBeBetween.py index 16b7f939cb1d..96ba1d14d8d7 100644 --- a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesSumToBeBetween.py +++ b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesSumToBeBetween.py @@ -39,7 +39,7 @@ def _get_column_name(self) -> Column: """ return self.get_column_name( self.test_case.entityLink.root, - inspect(self.runner.table).c, + inspect(self.runner.dataset).c, ) def _run_results(self, metric: Metrics, column: Column) -> Optional[int]: diff --git a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToBeAtExpectedLocation.py b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToBeAtExpectedLocation.py index 138c5d0c2f8b..a4e6c1ef9a3b 100644 --- a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToBeAtExpectedLocation.py +++ b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToBeAtExpectedLocation.py @@ -37,7 +37,7 @@ class ColumnValuesToBeAtExpectedLocationValidator( def _fetch_data(self, columns: List[str]) -> Iterator: """Fetch data from the runner object""" self.runner = cast(QueryRunner, self.runner) - inspection = inspect(self.runner.table) + inspection = inspect(self.runner.dataset) table_columns: List[Column] = inspection.c if inspection is not None else [] cols = [col for col in table_columns if col.name in columns] for col in cols: diff --git a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToBeBetween.py b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToBeBetween.py index af8fcc9b5fec..c1cb8ad1acc9 100644 --- a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToBeBetween.py +++ b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToBeBetween.py @@ -39,7 +39,7 @@ def _get_column_name(self) -> Column: """ return self.get_column_name( self.test_case.entityLink.root, - inspect(self.runner.table).c, + inspect(self.runner.dataset).c, ) def _run_results(self, metric: Metrics, column: Column) -> Optional[int]: diff --git a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToBeInSet.py b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToBeInSet.py index 4bccac6445a0..f920ae253059 100644 --- a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToBeInSet.py +++ b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToBeInSet.py @@ -39,7 +39,7 @@ def _get_column_name(self) -> Column: """ return self.get_column_name( self.test_case.entityLink.root, - inspect(self.runner.table).c, + inspect(self.runner.dataset).c, ) def _run_results(self, metric: Metrics, column: Column, **kwargs) -> Optional[int]: diff --git a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToBeNotInSet.py b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToBeNotInSet.py index d50e98efa9b2..012059bd72fb 100644 --- a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToBeNotInSet.py +++ b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToBeNotInSet.py @@ -39,7 +39,7 @@ def _get_column_name(self) -> Column: """ return self.get_column_name( self.test_case.entityLink.root, - inspect(self.runner.table).c, + inspect(self.runner.dataset).c, ) def _run_results(self, metric: Metrics, column: Column, **kwargs) -> Optional[int]: diff --git a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToBeNotNull.py b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToBeNotNull.py index da11812ad8c1..e425cbcb89f9 100644 --- a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToBeNotNull.py +++ b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToBeNotNull.py @@ -42,7 +42,7 @@ def _get_column_name(self) -> Column: """ return self.get_column_name( self.test_case.entityLink.root, - inspect(self.runner.table).c, + inspect(self.runner.dataset).c, ) def _run_results(self, metric: Metrics, column: Column) -> Optional[int]: diff --git a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToBeUnique.py b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToBeUnique.py index 89a93f09f90e..daf1afef4bc2 100644 --- a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToBeUnique.py +++ b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToBeUnique.py @@ -17,7 +17,6 @@ from sqlalchemy import Column, inspect from sqlalchemy.exc import SQLAlchemyError -from sqlalchemy.orm.util import AliasedClass from metadata.data_quality.validations.column.base.columnValuesToBeUnique import ( BaseColumnValuesToBeUniqueValidator, @@ -41,7 +40,7 @@ def _get_column_name(self) -> Column: """ return self.get_column_name( self.test_case.entityLink.root, - inspect(self.runner.table).c, + inspect(self.runner.dataset).c, ) def _run_results(self, metric: Metrics, column: Column) -> Optional[int]: @@ -53,12 +52,7 @@ def _run_results(self, metric: Metrics, column: Column) -> Optional[int]: """ count = Metrics.COUNT.value(column).fn() unique_count = Metrics.UNIQUE_COUNT.value(column).query( - sample=self.runner._sample # pylint: disable=protected-access - if isinstance( - self.runner._sample, # pylint: disable=protected-access - AliasedClass, - ) - else self.runner.table, + sample=self.runner.dataset, session=self.runner._session, # pylint: disable=protected-access ) # type: ignore diff --git a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToMatchRegex.py b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToMatchRegex.py index be28e57963b3..0f1c0537303c 100644 --- a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToMatchRegex.py +++ b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToMatchRegex.py @@ -43,7 +43,7 @@ def _get_column_name(self) -> Column: """ return self.get_column_name( self.test_case.entityLink.root, - inspect(self.runner.table).c, + inspect(self.runner.dataset).c, ) def _run_results( diff --git a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToNotMatchRegex.py b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToNotMatchRegex.py index f5a8c2656dcd..fda43496900b 100644 --- a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToNotMatchRegex.py +++ b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToNotMatchRegex.py @@ -43,7 +43,7 @@ def _get_column_name(self) -> Column: """ return self.get_column_name( self.test_case.entityLink.root, - inspect(self.runner.table).c, + inspect(self.runner.dataset).c, ) def _run_results(self, metric: Metrics, column: Column, **kwargs) -> Optional[int]: diff --git a/ingestion/src/metadata/data_quality/validations/table/sqlalchemy/tableRowInsertedCountToBeBetween.py b/ingestion/src/metadata/data_quality/validations/table/sqlalchemy/tableRowInsertedCountToBeBetween.py index 425f49606527..30894951c3f4 100644 --- a/ingestion/src/metadata/data_quality/validations/table/sqlalchemy/tableRowInsertedCountToBeBetween.py +++ b/ingestion/src/metadata/data_quality/validations/table/sqlalchemy/tableRowInsertedCountToBeBetween.py @@ -13,7 +13,7 @@ Validator for table row inserted count to be between test case """ -from sqlalchemy import Column, text +from sqlalchemy import Column, inspect, text from metadata.data_quality.validations.mixins.sqa_validator_mixin import ( SQAValidatorMixin, @@ -52,7 +52,7 @@ def _run_results(self, column_name: str, range_type: str, range_interval: int): date_or_datetime_fn = dispatch_to_date_or_datetime( range_interval, text(range_type), - get_partition_col_type(column_name.name, self.runner.table.c), # type: ignore + get_partition_col_type(column_name.name, inspect(self.runner.dataset).c), # type: ignore ) return dict( diff --git a/ingestion/src/metadata/examples/workflows/mstr.yaml b/ingestion/src/metadata/examples/workflows/microstrategy.yaml similarity index 94% rename from ingestion/src/metadata/examples/workflows/mstr.yaml rename to ingestion/src/metadata/examples/workflows/microstrategy.yaml index 64ad7a44c0ee..4989872137ef 100644 --- a/ingestion/src/metadata/examples/workflows/mstr.yaml +++ b/ingestion/src/metadata/examples/workflows/microstrategy.yaml @@ -1,13 +1,14 @@ source: - type: mstr + type: microstrategy serviceName: test serviceConnection: config: - type: Mstr + type: MicroStrategy username: username password: password hostPort: http://hostPort projectName: project + loginMode: "8" sourceConfig: config: type: DashboardMetadata diff --git a/ingestion/src/metadata/ingestion/source/dashboard/mstr/__init__.py b/ingestion/src/metadata/ingestion/source/dashboard/microstrategy/__init__.py similarity index 100% rename from ingestion/src/metadata/ingestion/source/dashboard/mstr/__init__.py rename to ingestion/src/metadata/ingestion/source/dashboard/microstrategy/__init__.py diff --git a/ingestion/src/metadata/ingestion/source/dashboard/mstr/client.py b/ingestion/src/metadata/ingestion/source/dashboard/microstrategy/client.py similarity index 93% rename from ingestion/src/metadata/ingestion/source/dashboard/mstr/client.py rename to ingestion/src/metadata/ingestion/source/dashboard/microstrategy/client.py index 1388364db11c..7c25e8793cdc 100644 --- a/ingestion/src/metadata/ingestion/source/dashboard/mstr/client.py +++ b/ingestion/src/metadata/ingestion/source/dashboard/microstrategy/client.py @@ -9,19 +9,19 @@ # See the License for the specific language governing permissions and # limitations under the License. """ -REST Auth & Client for Mstr +REST Auth & Client for MicroStrategy """ import traceback from typing import List, Optional import requests -from metadata.generated.schema.entity.services.connections.dashboard.mstrConnection import ( - MstrConnection, +from metadata.generated.schema.entity.services.connections.dashboard.microStrategyConnection import ( + MicroStrategyConnection, ) from metadata.ingestion.connections.test_connections import SourceConnectionException from metadata.ingestion.ometa.client import REST, ClientConfig -from metadata.ingestion.source.dashboard.mstr.models import ( +from metadata.ingestion.source.dashboard.microstrategy.models import ( AuthHeaderCookie, MstrDashboard, MstrDashboardDetails, @@ -37,30 +37,29 @@ logger = ingestion_logger() API_VERSION = "MicroStrategyLibrary/api" -LOGIN_MODE_GUEST = 8 APPLICATION_TYPE = 35 -class MSTRClient: +class MicroStrategyClient: """ Client Handling API communication with Metabase """ def _get_base_url(self, path=None): if not path: - return f"{clean_uri(self.config.hostPort)}/{API_VERSION}" - return f"{clean_uri(self.config.hostPort)}/{API_VERSION}/{path}" + return f"{clean_uri(str(self.config.hostPort))}/{API_VERSION}" + return f"{clean_uri(str(self.config.hostPort))}/{API_VERSION}/{path}" def __init__( self, - config: MstrConnection, + config: MicroStrategyConnection, ): self.config = config self.auth_params: AuthHeaderCookie = self._get_auth_header_and_cookies() client_config = ClientConfig( - base_url=clean_uri(config.hostPort), + base_url=clean_uri(str(self.config.hostPort)), api_version=API_VERSION, extra_headers=self.auth_params.auth_header, allow_redirects=True, @@ -81,7 +80,7 @@ def _get_auth_header_and_cookies(self) -> Optional[AuthHeaderCookie]: data = { "username": self.config.username, "password": self.config.password.get_secret_value(), - "loginMode": LOGIN_MODE_GUEST, + "loginMode": self.config.loginMode, "applicationType": APPLICATION_TYPE, } response = requests.post( diff --git a/ingestion/src/metadata/ingestion/source/dashboard/mstr/connection.py b/ingestion/src/metadata/ingestion/source/dashboard/microstrategy/connection.py similarity index 79% rename from ingestion/src/metadata/ingestion/source/dashboard/mstr/connection.py rename to ingestion/src/metadata/ingestion/source/dashboard/microstrategy/connection.py index 156022a8aaca..ce96e54b3cee 100644 --- a/ingestion/src/metadata/ingestion/source/dashboard/mstr/connection.py +++ b/ingestion/src/metadata/ingestion/source/dashboard/microstrategy/connection.py @@ -17,31 +17,29 @@ from metadata.generated.schema.entity.automations.workflow import ( Workflow as AutomationWorkflow, ) -from metadata.generated.schema.entity.services.connections.dashboard.mstrConnection import ( - MstrConnection, +from metadata.generated.schema.entity.services.connections.dashboard.microStrategyConnection import ( + MicroStrategyConnection, ) from metadata.generated.schema.entity.services.connections.testConnectionResult import ( TestConnectionResult, ) from metadata.ingestion.connections.test_connections import test_connection_steps from metadata.ingestion.ometa.ometa_api import OpenMetadata -from metadata.ingestion.source.dashboard.mstr.client import MSTRClient -from metadata.utils.constants import THREE_MIN +from metadata.ingestion.source.dashboard.microstrategy.client import MicroStrategyClient -def get_connection(connection: MstrConnection) -> MSTRClient: +def get_connection(connection: MicroStrategyConnection) -> MicroStrategyClient: """ Create connection """ - return MSTRClient(connection) + return MicroStrategyClient(connection) def test_connection( metadata: OpenMetadata, - client: MSTRClient, - service_connection: MstrConnection, + client: MicroStrategyClient, + service_connection: MicroStrategyConnection, automation_workflow: Optional[AutomationWorkflow] = None, - timeout_seconds: Optional[int] = THREE_MIN, ) -> TestConnectionResult: """ Test connection. This can be executed either as part @@ -55,5 +53,4 @@ def test_connection( test_fn=test_fn, service_type=service_connection.type.value, automation_workflow=automation_workflow, - timeout_seconds=timeout_seconds, ) diff --git a/ingestion/src/metadata/ingestion/source/dashboard/mstr/metadata.py b/ingestion/src/metadata/ingestion/source/dashboard/microstrategy/metadata.py similarity index 90% rename from ingestion/src/metadata/ingestion/source/dashboard/mstr/metadata.py rename to ingestion/src/metadata/ingestion/source/dashboard/microstrategy/metadata.py index d26ee0ef0e60..1fcd23c1ccab 100644 --- a/ingestion/src/metadata/ingestion/source/dashboard/mstr/metadata.py +++ b/ingestion/src/metadata/ingestion/source/dashboard/microstrategy/metadata.py @@ -8,7 +8,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -"""Mstr source module""" +"""MicroStrategy source module""" import traceback from typing import Iterable, List, Optional @@ -16,8 +16,8 @@ from metadata.generated.schema.api.data.createDashboard import CreateDashboardRequest from metadata.generated.schema.api.lineage.addLineage import AddLineageRequest from metadata.generated.schema.entity.data.chart import Chart -from metadata.generated.schema.entity.services.connections.dashboard.mstrConnection import ( - MstrConnection, +from metadata.generated.schema.entity.services.connections.dashboard.microStrategyConnection import ( + MicroStrategyConnection, ) from metadata.generated.schema.entity.services.ingestionPipelines.status import ( StackTraceError, @@ -34,7 +34,7 @@ from metadata.ingestion.api.steps import InvalidSourceException from metadata.ingestion.ometa.ometa_api import OpenMetadata from metadata.ingestion.source.dashboard.dashboard_service import DashboardServiceSource -from metadata.ingestion.source.dashboard.mstr.models import ( +from metadata.ingestion.source.dashboard.microstrategy.models import ( MstrDashboard, MstrDashboardDetails, MstrPage, @@ -47,9 +47,9 @@ logger = ingestion_logger() -class MstrSource(DashboardServiceSource): +class MicrostrategySource(DashboardServiceSource): """ - MSTR Source Class + Microstrategy Source Class """ @classmethod @@ -60,10 +60,10 @@ def create( pipeline_name: Optional[str] = None, ): config = WorkflowSource.model_validate(config_dict) - connection: MstrConnection = config.serviceConnection.root.config - if not isinstance(connection, MstrConnection): + connection: MicroStrategyConnection = config.serviceConnection.root.config + if not isinstance(connection, MicroStrategyConnection): raise InvalidSourceException( - f"Expected MstrConnection, but got {connection}" + f"Expected MicroStrategyConnection, but got {connection}" ) return cls(config, metadata) @@ -75,14 +75,18 @@ def get_dashboards_list(self) -> Optional[List[MstrDashboard]]: if self.client.is_project_name(): project = self.client.get_project_by_name() - dashboards.extend(self.client.get_dashboards_list(project.id, project.name)) - - if not self.client.is_project_name(): - for project in self.client.get_projects_list(): + if project: dashboards.extend( self.client.get_dashboards_list(project.id, project.name) ) + if not self.client.is_project_name(): + for project in self.client.get_projects_list(): + if project: + dashboards.extend( + self.client.get_dashboards_list(project.id, project.name) + ) + return dashboards def get_dashboard_name(self, dashboard: MstrDashboard) -> str: @@ -121,7 +125,7 @@ def yield_dashboard( if dashboard_details: try: dashboard_url = ( - f"{clean_uri(self.service_connection.hostPort)}/MicroStrategyLibrary/app/" + f"{clean_uri(str(self.service_connection.hostPort))}/MicroStrategyLibrary/app/" f"{dashboard_details.projectId}/{dashboard_details.id}" ) dashboard_request = CreateDashboardRequest( diff --git a/ingestion/src/metadata/ingestion/source/dashboard/mstr/models.py b/ingestion/src/metadata/ingestion/source/dashboard/microstrategy/models.py similarity index 99% rename from ingestion/src/metadata/ingestion/source/dashboard/mstr/models.py rename to ingestion/src/metadata/ingestion/source/dashboard/microstrategy/models.py index 372dfb92d0a6..0c550cdfea7b 100644 --- a/ingestion/src/metadata/ingestion/source/dashboard/mstr/models.py +++ b/ingestion/src/metadata/ingestion/source/dashboard/microstrategy/models.py @@ -9,7 +9,7 @@ # See the License for the specific language governing permissions and # limitations under the License. """ -MSTR Models +MicroStrategy Models """ from datetime import datetime from typing import Any, List, Optional diff --git a/ingestion/src/metadata/ingestion/source/dashboard/microstrategy/service_spec.py b/ingestion/src/metadata/ingestion/source/dashboard/microstrategy/service_spec.py new file mode 100644 index 000000000000..d9b6c3a9547b --- /dev/null +++ b/ingestion/src/metadata/ingestion/source/dashboard/microstrategy/service_spec.py @@ -0,0 +1,6 @@ +from metadata.ingestion.source.dashboard.microstrategy.metadata import ( + MicrostrategySource, +) +from metadata.utils.service_spec.default import DefaultDatabaseSpec + +ServiceSpec = DefaultDatabaseSpec(metadata_source_class=MicrostrategySource) diff --git a/ingestion/src/metadata/ingestion/source/dashboard/mstr/service_spec.py b/ingestion/src/metadata/ingestion/source/dashboard/mstr/service_spec.py deleted file mode 100644 index f2ea7e03df8a..000000000000 --- a/ingestion/src/metadata/ingestion/source/dashboard/mstr/service_spec.py +++ /dev/null @@ -1,4 +0,0 @@ -from metadata.ingestion.source.dashboard.mstr.metadata import MstrSource -from metadata.utils.service_spec import BaseSpec - -ServiceSpec = BaseSpec(metadata_source_class=MstrSource) diff --git a/ingestion/src/metadata/ingestion/source/database/bigquery/profiler/profiler.py b/ingestion/src/metadata/ingestion/source/database/bigquery/profiler/profiler.py index f85810bfd13a..2cd0f225b31d 100644 --- a/ingestion/src/metadata/ingestion/source/database/bigquery/profiler/profiler.py +++ b/ingestion/src/metadata/ingestion/source/database/bigquery/profiler/profiler.py @@ -22,7 +22,7 @@ def _compute_system_metrics( **kwargs, ) -> List[SystemProfile]: return self.system_metrics_computer.get_system_metrics( - table=runner.table, + table=runner.dataset, usage_location=self.service_connection_config.usageLocation, ) diff --git a/ingestion/src/metadata/ingestion/source/database/dbt/constants.py b/ingestion/src/metadata/ingestion/source/database/dbt/constants.py index 83c49c0724a4..834e248d2fa5 100644 --- a/ingestion/src/metadata/ingestion/source/database/dbt/constants.py +++ b/ingestion/src/metadata/ingestion/source/database/dbt/constants.py @@ -82,6 +82,7 @@ DBT_CATALOG_FILE_NAME = "catalog.json" DBT_MANIFEST_FILE_NAME = "manifest.json" DBT_RUN_RESULTS_FILE_NAME = "run_results" +DBT_SOURCES_FILE_NAME = "sources.json" class SkipResourceTypeEnum(Enum): @@ -91,6 +92,7 @@ class SkipResourceTypeEnum(Enum): ANALYSIS = "analysis" TEST = "test" + SOURCE = "source" class CompiledQueriesEnum(Enum): @@ -127,6 +129,7 @@ class DbtTestFailureEnum(Enum): FAILURE = "failure" FAIL = "fail" + ERROR = "error" class DbtCommonEnum(Enum): @@ -137,6 +140,7 @@ class DbtCommonEnum(Enum): OWNER = "owner" NODES = "nodes" SOURCES = "sources" + SOURCES_FILE = "sources_file" SOURCE = "source" RESOURCETYPE = "resource_type" MANIFEST_NODE = "manifest_node" diff --git a/ingestion/src/metadata/ingestion/source/database/dbt/dbt_config.py b/ingestion/src/metadata/ingestion/source/database/dbt/dbt_config.py index 216d7c6e9f83..66e25332e1e9 100644 --- a/ingestion/src/metadata/ingestion/source/database/dbt/dbt_config.py +++ b/ingestion/src/metadata/ingestion/source/database/dbt/dbt_config.py @@ -43,6 +43,7 @@ DBT_CATALOG_FILE_NAME, DBT_MANIFEST_FILE_NAME, DBT_RUN_RESULTS_FILE_NAME, + DBT_SOURCES_FILE_NAME, ) from metadata.ingestion.source.database.dbt.models import DbtFiles from metadata.readers.file.config_source_factory import get_reader @@ -85,6 +86,7 @@ def _(config: DbtLocalConfig): config.dbtManifestFilePath, config.dbtCatalogFilePath, config.dbtRunResultsFilePath, + config.dbtSourcesFilePath, ] yield from download_dbt_files( blob_grouped_by_directory=blob_grouped_by_directory, @@ -123,12 +125,22 @@ def _(config: DbtHttpConfig): dbt_catalog = requests.get( # pylint: disable=missing-timeout config.dbtCatalogHttpPath ) + + dbt_sources = None + if config.dbtSourcesHttpPath: + logger.debug( + f"Requesting [dbtSourcesHttpPath] to: {config.dbtSourcesHttpPath}" + ) + dbt_sources = requests.get( # pylint: disable=missing-timeout + config.dbtSourcesHttpPath + ) if not dbt_manifest: raise DBTConfigException("Manifest file not found in file server") yield DbtFiles( dbt_catalog=dbt_catalog.json() if dbt_catalog else None, dbt_manifest=dbt_manifest.json(), dbt_run_results=[dbt_run_results.json()] if dbt_run_results else None, + dbt_sources=dbt_sources.json() if dbt_sources else None, ) except DBTConfigException as exc: raise exc @@ -243,6 +255,7 @@ def get_blobs_grouped_by_dir(blobs: List[str]) -> Dict[str, List[str]]: return blob_grouped_by_directory +# pylint: disable=too-many-locals, too-many-branches def download_dbt_files( blob_grouped_by_directory: Dict, config, client, bucket_name: Optional[str] ) -> Iterable[DbtFiles]: @@ -255,6 +268,7 @@ def download_dbt_files( ) in blob_grouped_by_directory.items(): dbt_catalog = None dbt_manifest = None + dbt_sources = None dbt_run_results = [] kwargs = {} if bucket_name: @@ -285,12 +299,16 @@ def download_dbt_files( logger.warning( f"{DBT_RUN_RESULTS_FILE_NAME} not found in {key}: {exc}" ) + if DBT_SOURCES_FILE_NAME == blob_file_name.lower(): + logger.debug(f"{DBT_SOURCES_FILE_NAME} found in {key}") + dbt_sources = reader.read(path=blob, **kwargs) if not dbt_manifest: raise DBTConfigException(f"Manifest file not found at: {key}") yield DbtFiles( dbt_catalog=json.loads(dbt_catalog) if dbt_catalog else None, dbt_manifest=json.loads(dbt_manifest), dbt_run_results=dbt_run_results if dbt_run_results else None, + dbt_sources=json.loads(dbt_sources) if dbt_sources else None, ) except DBTConfigException as exc: logger.warning(exc) diff --git a/ingestion/src/metadata/ingestion/source/database/dbt/dbt_service.py b/ingestion/src/metadata/ingestion/source/database/dbt/dbt_service.py index aa2d65f4e2cf..50a160164a00 100644 --- a/ingestion/src/metadata/ingestion/source/database/dbt/dbt_service.py +++ b/ingestion/src/metadata/ingestion/source/database/dbt/dbt_service.py @@ -15,7 +15,12 @@ from abc import ABC, abstractmethod from typing import Iterable, List -from dbt_artifacts_parser.parser import parse_catalog, parse_manifest, parse_run_results +from dbt_artifacts_parser.parser import ( + parse_catalog, + parse_manifest, + parse_run_results, + parse_sources, +) from pydantic import Field from typing_extensions import Annotated @@ -209,11 +214,13 @@ def get_dbt_objects(self) -> Iterable[DbtObjects]: self.remove_run_result_non_required_keys( run_results=self.context.get().dbt_file.dbt_run_results ) + dbt_objects = DbtObjects( dbt_catalog=parse_catalog(self.context.get().dbt_file.dbt_catalog) if self.context.get().dbt_file.dbt_catalog else None, dbt_manifest=parse_manifest(self.context.get().dbt_file.dbt_manifest), + dbt_sources=parse_sources(self.context.get().dbt_file.dbt_sources), dbt_run_results=[ parse_run_results(run_result_file) for run_result_file in self.context.get().dbt_file.dbt_run_results diff --git a/ingestion/src/metadata/ingestion/source/database/dbt/dbt_utils.py b/ingestion/src/metadata/ingestion/source/database/dbt/dbt_utils.py index 70bfcabe1b13..1897f4a7f5d3 100644 --- a/ingestion/src/metadata/ingestion/source/database/dbt/dbt_utils.py +++ b/ingestion/src/metadata/ingestion/source/database/dbt/dbt_utils.py @@ -44,6 +44,20 @@ def create_test_case_parameter_definitions(dbt_test): } ] return test_case_param_definition + if hasattr(dbt_test, "freshness"): + test_case_param_definition = [ + { + "name": "warn_after", + "displayName": "warn_after", + "required": False, + }, + { + "name": "error_after", + "displayName": "error_after", + "required": False, + }, + ] + return test_case_param_definition except Exception as err: # pylint: disable=broad-except logger.debug(traceback.format_exc()) logger.error( @@ -67,6 +81,21 @@ def create_test_case_parameter_values(dbt_test): {"name": manifest_node.test_metadata.name, "value": dbt_test_values} ] return test_case_param_values + if hasattr(manifest_node, "freshness"): + warn_after = manifest_node.freshness.warn_after + error_after = manifest_node.freshness.error_after + + test_case_param_values = [ + { + "name": "error_after", + "value": f"{error_after.count} {error_after.period.value}", + }, + { + "name": "warn_after", + "value": f"{warn_after.count} {warn_after.period.value}", + }, + ] + return test_case_param_values except Exception as err: # pylint: disable=broad-except logger.debug(traceback.format_exc()) logger.error( diff --git a/ingestion/src/metadata/ingestion/source/database/dbt/metadata.py b/ingestion/src/metadata/ingestion/source/database/dbt/metadata.py index 82bdf2e3085a..5c1b2cf81e31 100644 --- a/ingestion/src/metadata/ingestion/source/database/dbt/metadata.py +++ b/ingestion/src/metadata/ingestion/source/database/dbt/metadata.py @@ -13,6 +13,7 @@ DBT source methods. """ import traceback +from copy import deepcopy from datetime import datetime from typing import Any, Iterable, List, Optional, Union @@ -324,7 +325,41 @@ def add_dbt_tests( None, ) - # pylint: disable=too-many-locals, too-many-branches + def _add_dbt_freshness_test_from_sources( + self, key: str, manifest_node, manifest_entities, dbt_objects: DbtObjects + ): + # in dbt manifest sources node name is table/view name (not test name like with test nodes) + # so in order for the test creation to be named precisely I am amending manifest node name within it's deepcopy + manifest_node_new = deepcopy(manifest_node) + manifest_node_new.name = manifest_node_new.name + "_freshness" + + freshness_test_result = next( + (item for item in dbt_objects.dbt_sources.results if item.unique_id == key), + None, + ) + + if freshness_test_result: + self.context.get().dbt_tests[key + "_freshness"] = { + DbtCommonEnum.MANIFEST_NODE.value: manifest_node_new + } + self.context.get().dbt_tests[key + "_freshness"][ + DbtCommonEnum.UPSTREAM.value + ] = self.parse_upstream_nodes(manifest_entities, manifest_node) + self.context.get().dbt_tests[key + "_freshness"][ + DbtCommonEnum.RESULTS.value + ] = freshness_test_result + + def add_dbt_sources( + self, key: str, manifest_node, manifest_entities, dbt_objects: DbtObjects + ) -> None: + """ + Method to append dbt test cases based on sources file for later processing + """ + self._add_dbt_freshness_test_from_sources( + key, manifest_node, manifest_entities, dbt_objects + ) + + # pylint: disable=too-many-locals, too-many-branches, too-many-statements def yield_data_models( self, dbt_objects: DbtObjects ) -> Iterable[Either[DataModelLink]]: @@ -376,6 +411,17 @@ def yield_data_models( ) continue + if ( + dbt_objects.dbt_sources + and resource_type == SkipResourceTypeEnum.SOURCE.value + ): + self.add_dbt_sources( + key, + manifest_node=manifest_node, + manifest_entities=manifest_entities, + dbt_objects=dbt_objects, + ) + # Skip the ephemeral nodes since it is not materialized if check_ephemeral_node(manifest_node): logger.debug(f"Skipping ephemeral DBT node: {key}.") @@ -549,6 +595,29 @@ def parse_upstream_nodes(self, manifest_entities, dbt_node): f"Failed to parse the DBT node {node} to get upstream nodes: {exc}" ) continue + + if dbt_node.resource_type == SkipResourceTypeEnum.SOURCE.value: + parent_fqn = fqn.build( + self.metadata, + entity_type=Table, + service_name="*", + database_name=get_corrected_name(dbt_node.database), + schema_name=get_corrected_name(dbt_node.schema_), + table_name=dbt_node.name, + ) + + # check if the parent table exists in OM before adding it to the upstream list + parent_table_entity: Optional[ + Union[Table, List[Table]] + ] = get_entity_from_es_result( + entity_list=self.metadata.es_search_from_fqn( + entity_type=Table, fqn_search_string=parent_fqn + ), + fetch_multiple_entities=False, + ) + if parent_table_entity: + upstream_nodes.append(parent_fqn) + return upstream_nodes def parse_data_model_columns( diff --git a/ingestion/src/metadata/ingestion/source/database/dbt/models.py b/ingestion/src/metadata/ingestion/source/database/dbt/models.py index 88671141d43a..e505368994af 100644 --- a/ingestion/src/metadata/ingestion/source/database/dbt/models.py +++ b/ingestion/src/metadata/ingestion/source/database/dbt/models.py @@ -20,12 +20,14 @@ class DbtFiles(BaseModel): dbt_catalog: Optional[dict] = None dbt_manifest: dict + dbt_sources: Optional[dict] = None dbt_run_results: Optional[List[dict]] = None class DbtObjects(BaseModel): dbt_catalog: Optional[Any] = None dbt_manifest: Any + dbt_sources: Optional[Any] = None dbt_run_results: Optional[List[Any]] = None diff --git a/ingestion/src/metadata/profiler/interface/sqlalchemy/db2/profiler_interface.py b/ingestion/src/metadata/profiler/interface/sqlalchemy/db2/profiler_interface.py index 4c92aa1c8a0e..55aeca6d1ef3 100644 --- a/ingestion/src/metadata/profiler/interface/sqlalchemy/db2/profiler_interface.py +++ b/ingestion/src/metadata/profiler/interface/sqlalchemy/db2/profiler_interface.py @@ -32,7 +32,7 @@ def _programming_error_static_metric(self, runner, column, exc, session, metrics # pylint: disable=protected-access if exc.orig and "overflow" in exc.orig._message: logger.info( - f"Computing metrics without sum for {runner.table.name}.{column.name}" + f"Computing metrics without sum for {runner.table_name}.{column.name}" ) return self._compute_static_metrics_wo_sum(metrics, runner, session, column) return None diff --git a/ingestion/src/metadata/profiler/interface/sqlalchemy/mariadb/profiler_interface.py b/ingestion/src/metadata/profiler/interface/sqlalchemy/mariadb/profiler_interface.py index edca4361ef77..cb2f24f50149 100644 --- a/ingestion/src/metadata/profiler/interface/sqlalchemy/mariadb/profiler_interface.py +++ b/ingestion/src/metadata/profiler/interface/sqlalchemy/mariadb/profiler_interface.py @@ -77,11 +77,11 @@ def _compute_window_metrics( return dict(row) except ProgrammingError: logger.info( - f"Skipping window metrics for {runner.table.name}.{column.name} due to overflow" + f"Skipping window metrics for {runner.table_name}.{column.name} due to overflow" ) return None except Exception as exc: - msg = f"Error trying to compute profile for {runner.table.name}.{column.name}: {exc}" + msg = f"Error trying to compute profile for {runner.table_name}.{column.name}: {exc}" handle_query_exception(msg, exc, session) return None diff --git a/ingestion/src/metadata/profiler/interface/sqlalchemy/profiler_interface.py b/ingestion/src/metadata/profiler/interface/sqlalchemy/profiler_interface.py index cfe76f34be93..571cd247a9db 100644 --- a/ingestion/src/metadata/profiler/interface/sqlalchemy/profiler_interface.py +++ b/ingestion/src/metadata/profiler/interface/sqlalchemy/profiler_interface.py @@ -156,7 +156,7 @@ def _compute_static_metrics_wo_sum( ) return dict(row) except Exception as exc: - msg = f"Error trying to compute profile for {runner.table.name}.{column.name}: {exc}" + msg = f"Error trying to compute profile for {runner.table_name}.{column.name}: {exc}" handle_query_exception(msg, exc, session) return None @@ -194,7 +194,7 @@ def _compute_table_metrics( except Exception as exc: logger.debug(traceback.format_exc()) logger.warning( - f"Error trying to compute profile for {runner.table.name}: {exc}" # type: ignore + f"Error trying to compute profile for {runner.table_name}: {exc}" # type: ignore ) session.rollback() raise RuntimeError(exc) @@ -231,7 +231,7 @@ def _compute_static_metrics( runner, column, exc, session, metrics ) except Exception as exc: - msg = f"Error trying to compute profile for {runner.table.name}.{column.name}: {exc}" + msg = f"Error trying to compute profile for {runner.table_name}.{column.name}: {exc}" handle_query_exception(msg, exc, session) return None @@ -274,10 +274,10 @@ def _compute_query_metrics( runner._session.get_bind().dialect.name != Dialects.Druid ): - msg = f"Error trying to compute profile for {runner.table.name}.{column.name}: {exc}" + msg = f"Error trying to compute profile for {runner.table_name}.{column.name}: {exc}" handle_query_exception(msg, exc, session) except Exception as exc: - msg = f"Error trying to compute profile for {runner.table.name}.{column.name}: {exc}" + msg = f"Error trying to compute profile for {runner.table_name}.{column.name}: {exc}" handle_query_exception(msg, exc, session) return None @@ -310,10 +310,10 @@ def _compute_window_metrics( return dict(row) except ProgrammingError as exc: logger.info( - f"Skipping metrics for {runner.table.name}.{column.name} due to {exc}" + f"Skipping metrics for {runner.table_name}.{column.name} due to {exc}" ) except Exception as exc: - msg = f"Error trying to compute profile for {runner.table.name}.{column.name}: {exc}" + msg = f"Error trying to compute profile for {runner.table_name}.{column.name}: {exc}" handle_query_exception(msg, exc, session) return None @@ -347,7 +347,7 @@ def _compute_custom_metrics( ) except Exception as exc: - msg = f"Error trying to compute profile for {runner.table.name}.{metric.columnName}: {exc}" + msg = f"Error trying to compute profile for {runner.table_name}.{metric.columnName}: {exc}" logger.debug(traceback.format_exc()) logger.warning(msg) if custom_metrics: @@ -371,8 +371,8 @@ def _compute_system_metrics( Returns: dictionnary of results """ - logger.debug(f"Computing system metrics for {runner.table.name}") - return self.system_metrics_computer.get_system_metrics(table=runner.table) + logger.debug(f"Computing system metrics for {runner.table_name}") + return self.system_metrics_computer.get_system_metrics(table=runner.dataset) def _create_thread_safe_runner(self, session, column=None): """Create thread safe runner""" @@ -380,6 +380,7 @@ def _create_thread_safe_runner(self, session, column=None): thread_local.runner = QueryRunner( session=session, dataset=self.sampler.get_dataset(column=column), + raw_dataset=self.sampler.raw_dataset, partition_details=self.sampler.partition_details, profile_sample_query=self.sampler.sample_query, ) diff --git a/ingestion/src/metadata/profiler/interface/sqlalchemy/single_store/profiler_interface.py b/ingestion/src/metadata/profiler/interface/sqlalchemy/single_store/profiler_interface.py index 7c032e651b63..26e4b3174566 100644 --- a/ingestion/src/metadata/profiler/interface/sqlalchemy/single_store/profiler_interface.py +++ b/ingestion/src/metadata/profiler/interface/sqlalchemy/single_store/profiler_interface.py @@ -76,11 +76,11 @@ def _compute_window_metrics( return dict(row) except ProgrammingError: logger.info( - f"Skipping window metrics for {runner.table.name}.{column.name} due to overflow" + f"Skipping window metrics for {runner.table_name}.{column.name} due to overflow" ) return None except Exception as exc: - msg = f"Error trying to compute profile for {runner.table.name}.{column.name}: {exc}" + msg = f"Error trying to compute profile for {runner.table_name}.{column.name}: {exc}" handle_query_exception(msg, exc, session) return None diff --git a/ingestion/src/metadata/profiler/interface/sqlalchemy/snowflake/profiler_interface.py b/ingestion/src/metadata/profiler/interface/sqlalchemy/snowflake/profiler_interface.py index 464fbdd88a4e..61df2a3c7a6f 100644 --- a/ingestion/src/metadata/profiler/interface/sqlalchemy/snowflake/profiler_interface.py +++ b/ingestion/src/metadata/profiler/interface/sqlalchemy/snowflake/profiler_interface.py @@ -41,7 +41,7 @@ def _programming_error_static_metric(self, runner, column, exc, session, metrics session.bind.dialect.name ): logger.info( - f"Computing metrics without sum for {runner.table.name}.{column.name}" + f"Computing metrics without sum for {runner.table_name}.{column.name}" ) return self._compute_static_metrics_wo_sum(metrics, runner, session, column) return None diff --git a/ingestion/src/metadata/profiler/interface/sqlalchemy/stored_statistics_profiler.py b/ingestion/src/metadata/profiler/interface/sqlalchemy/stored_statistics_profiler.py index c949098d974c..c0f570eb0e32 100644 --- a/ingestion/src/metadata/profiler/interface/sqlalchemy/stored_statistics_profiler.py +++ b/ingestion/src/metadata/profiler/interface/sqlalchemy/stored_statistics_profiler.py @@ -79,8 +79,8 @@ def _compute_static_metrics( list, partition(self.is_statistic_metric, metrics), ) - schema = runner.table.schema - table_name = runner.table.name + schema = runner.schema_name + table_name = runner.table_name logger.debug( "Getting statistics for column: %s.%s.%s", schema, @@ -118,8 +118,8 @@ def _compute_table_metrics( list, partition(self.is_statistic_metric, metrics), ) - schema = runner.table.schema - table_name = runner.table.name + schema = runner.schema_name + table_name = runner.table_name logger.debug("Geting statistics for table: %s.%s", schema, table_name) result.update( super().get_table_statistics(stat_metrics, schema, table_name) diff --git a/ingestion/src/metadata/profiler/interface/sqlalchemy/trino/profiler_interface.py b/ingestion/src/metadata/profiler/interface/sqlalchemy/trino/profiler_interface.py index b7c387a748f4..a45b1bf512be 100644 --- a/ingestion/src/metadata/profiler/interface/sqlalchemy/trino/profiler_interface.py +++ b/ingestion/src/metadata/profiler/interface/sqlalchemy/trino/profiler_interface.py @@ -76,11 +76,11 @@ def _compute_window_metrics( return dict(row) except ProgrammingError as err: logger.info( - f"Skipping window metrics for {runner.table.name}.{column.name} due to {err}" + f"Skipping window metrics for {runner.table_name}.{column.name} due to {err}" ) return None except Exception as exc: - msg = f"Error trying to compute profile for {runner.table.name}.{column.name}: {exc}" + msg = f"Error trying to compute profile for {runner.table_name}.{column.name}: {exc}" handle_query_exception(msg, exc, session) return None diff --git a/ingestion/src/metadata/profiler/orm/functions/table_metric_computer.py b/ingestion/src/metadata/profiler/orm/functions/table_metric_computer.py index 5f8a69cf2904..6a6a02188228 100644 --- a/ingestion/src/metadata/profiler/orm/functions/table_metric_computer.py +++ b/ingestion/src/metadata/profiler/orm/functions/table_metric_computer.py @@ -53,7 +53,7 @@ def __init__( self._metrics = metrics self._conn_config = conn_config self._database = self._runner._session.get_bind().url.database - self._table = self._runner.table + self._table = self._runner.dataset self._entity = entity @property diff --git a/ingestion/src/metadata/profiler/processor/runner.py b/ingestion/src/metadata/profiler/processor/runner.py index 3098a26083e1..8f80108de624 100644 --- a/ingestion/src/metadata/profiler/processor/runner.py +++ b/ingestion/src/metadata/profiler/processor/runner.py @@ -44,6 +44,7 @@ def __init__( self, session: Session, dataset: Union[DeclarativeMeta, AliasedClass], + raw_dataset: Table, partition_details: Optional[Dict] = None, profile_sample_query: Optional[str] = None, ): @@ -51,11 +52,12 @@ def __init__( self._dataset = dataset self.partition_details = partition_details self.profile_sample_query = profile_sample_query + self.raw_dataset = raw_dataset @property def table(self) -> Table: """Backward compatibility table attribute access""" - return self._dataset.__table__ + return self.raw_dataset @property def _sample(self): @@ -71,6 +73,16 @@ def dataset(self): def dataset(self, dataset): self._dataset = dataset + @property + def table_name(self): + """Table name attribute access""" + return self.raw_dataset.__table__.name + + @property + def schema_name(self): + """Table name attribute access""" + return self.raw_dataset.__table__.schema + def _build_query(self, *entities, **kwargs) -> Query: """Build query object diff --git a/ingestion/src/metadata/sampler/sqlalchemy/sampler.py b/ingestion/src/metadata/sampler/sqlalchemy/sampler.py index 3cb7fd62944d..0fa6a3d68bdc 100644 --- a/ingestion/src/metadata/sampler/sqlalchemy/sampler.py +++ b/ingestion/src/metadata/sampler/sqlalchemy/sampler.py @@ -16,7 +16,7 @@ from typing import List, Optional, Union, cast from sqlalchemy import Column, inspect, text -from sqlalchemy.orm import DeclarativeMeta, Query, aliased +from sqlalchemy.orm import DeclarativeMeta, Query from sqlalchemy.orm.util import AliasedClass from sqlalchemy.schema import Table from sqlalchemy.sql.sqltypes import Enum @@ -145,13 +145,12 @@ def get_dataset(self, column=None, **__) -> Union[DeclarativeMeta, AliasedClass] and self.sample_config.profile_sample_type == ProfileSampleType.PERCENTAGE ): if self.partition_details: - return self._partitioned_table() + partitioned = self._partitioned_table() + return partitioned.cte(f"{self.raw_dataset.__tablename__}_partitioned") return self.raw_dataset - sampled = self.get_sample_query(column=column) - - return aliased(self.raw_dataset, sampled) + return self.get_sample_query(column=column) def fetch_sample_data(self, columns: Optional[List[Column]] = None) -> TableData: """ @@ -230,7 +229,7 @@ def _rdn_sample_from_user_query(self) -> Query: def _partitioned_table(self) -> Query: """Return the Query object for partitioned tables""" - return aliased(self.raw_dataset, self.get_partitioned_query().subquery()) + return self.get_partitioned_query() def get_partitioned_query(self, query=None) -> Query: """Return the partitioned query""" diff --git a/ingestion/tests/unit/profiler/sqlalchemy/test_runner.py b/ingestion/tests/unit/profiler/sqlalchemy/test_runner.py index 738d4985fe49..880cc6ed94fe 100644 --- a/ingestion/tests/unit/profiler/sqlalchemy/test_runner.py +++ b/ingestion/tests/unit/profiler/sqlalchemy/test_runner.py @@ -94,7 +94,9 @@ def setUpClass(cls) -> None: ) cls.dataset = sampler.get_dataset() - cls.raw_runner = QueryRunner(session=cls.session, dataset=cls.dataset) + cls.raw_runner = QueryRunner( + session=cls.session, dataset=cls.dataset, raw_dataset=sampler.raw_dataset + ) cls.timeout_runner: Timer = cls_timeout(1)(Timer()) # Insert 30 rows diff --git a/ingestion/tests/unit/test_dbt.py b/ingestion/tests/unit/test_dbt.py index b04d69c5fc5a..1c2db6edc0a2 100644 --- a/ingestion/tests/unit/test_dbt.py +++ b/ingestion/tests/unit/test_dbt.py @@ -51,6 +51,7 @@ "dbtCatalogFilePath": "sample/dbt_files/catalog.json", "dbtManifestFilePath": "sample/dbt_files/manifest.json", "dbtRunResultsFilePath": "sample/dbt_files/run_results.json", + "dbtSourcesFilePath": "sample/dbt_files/sources.json", }, } }, @@ -682,7 +683,7 @@ def check_yield_datamodel(self, dbt_objects, expected_data_models): self.assertEqual(expected, original) @patch("metadata.ingestion.ometa.mixins.es_mixin.ESMixin.es_search_from_fqn") - def test_updtream_nodes_for_lineage(self, es_search_from_fqn): + def test_upstream_nodes_for_lineage(self, es_search_from_fqn): expected_upstream_nodes = [ "model.jaffle_shop.stg_customers", "model.jaffle_shop.stg_orders", diff --git a/ingestion/tests/unit/topology/dashboard/test_microstrategy.py b/ingestion/tests/unit/topology/dashboard/test_microstrategy.py new file mode 100644 index 000000000000..5af80363c50e --- /dev/null +++ b/ingestion/tests/unit/topology/dashboard/test_microstrategy.py @@ -0,0 +1,127 @@ +# Copyright 2021 Collate +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Test Microstrategy using the topology +""" +from datetime import datetime +from types import SimpleNamespace +from unittest import TestCase +from unittest.mock import patch + +from metadata.generated.schema.metadataIngestion.workflow import ( + OpenMetadataWorkflowConfig, +) +from metadata.ingestion.ometa.ometa_api import OpenMetadata +from metadata.ingestion.source.dashboard.microstrategy.metadata import ( + MicrostrategySource, +) +from metadata.ingestion.source.dashboard.microstrategy.models import ( + MstrDashboard, + MstrOwner, + MstrProject, +) + +mock_micro_config = { + "source": { + "type": "microstrategy", + "serviceName": "local_stitch_test", + "serviceConnection": { + "config": { + "type": "MicroStrategy", + "hostPort": "https://demo.microstrategy.com", + "username": "username", + "password": "password", + } + }, + "sourceConfig": {"config": {"type": "DashboardMetadata"}}, + }, + "sink": {"type": "metadata-rest", "config": {}}, + "workflowConfig": { + "loggerLevel": "DEBUG", + "openMetadataServerConfig": { + "hostPort": "http://localhost:8585/api", + "authProvider": "openmetadata", + "securityConfig": { + "jwtToken": "eyJraWQiOiJHYjM4OWEtOWY3Ni1nZGpzLWE5MmotMDI0MmJrOTQzNTYiLCJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiJ9.eyJzdWIiOiJhZG1pbiIsImlzQm90IjpmYWxzZSwiaXNzIjoib3Blbi1tZXRhZGF0YS5vcmciLCJpYXQiOjE2NjM5Mzg0NjIsImVtYWlsIjoiYWRtaW5Ab3Blbm1ldGFkYXRhLm9yZyJ9.tS8um_5DKu7HgzGBzS1VTA5uUjKWOCU0B_j08WXBiEC0mr0zNREkqVfwFDD-d24HlNEbrqioLsBuFRiwIWKc1m_ZlVQbG7P36RUxhuv2vbSp80FKyNM-Tj93FDzq91jsyNmsQhyNv_fNr3TXfzzSPjHt8Go0FMMP66weoKMgW2PbXlhVKwEuXUHyakLLzewm9UMeQaEiRzhiTMU3UkLXcKbYEJJvfNFcLwSl9W8JCO_l0Yj3ud-qt_nQYEZwqW6u5nfdQllN133iikV4fM5QZsMCnm8Rq1mvLR0y9bmJiD7fwM1tmJ791TUWqmKaTnP49U493VanKpUAfzIiOiIbhg" + }, + }, + }, +} + +MOCK_PROJECT_LIST = [ + MstrProject( + acg=5, + id="B7CA92F04B9FAE8D941C3E9B7E0CD754", + name="MicroStrategy Tutorial", + status=0, + alias="", + description="fun", + dateCreated=datetime(2015, 6, 30, 21, 55, 35), + dateModified=datetime(2024, 10, 1, 21, 42, 50), + owner=MstrOwner(name="Administrator", id="54F3D26011D2896560009A8E67019608"), + ) +] + +MOCK_DASHBORD_LIST = [ + MstrDashboard( + name="Library of Demos", + id="925FB4A311EA52FF3EA80080EF059105", + type=55, + description="abc", + subtype=14081, + dateCreated="2020-02-19T10:07:01.000+0000", + dateModified="2024-11-06T14:14:42.000+0000", + version="3E367000E84DD4AA9B501EAD892EB2E1", + acg=199, + owner=MstrOwner(name="Administrator", id="54F3D26011D2896560009A8E67019608"), + extType=0, + viewMedia=1879072805, + certifiedInfo={"certified": False}, + templateInfo={"template": False, "lastModifiedBy": {}}, + projectId="EC70648611E7A2F962E90080EFD58751", + projectName="MicroStrategy Tutorial", + ) +] + + +class MicroStrategyUnitTest(TestCase): + """ + Implements the necessary methods to extract + MicroStrategy Unit Testtest_dbt + """ + + @patch( + "metadata.ingestion.source.dashboard.microstrategy.metadata.MicrostrategySource.test_connection" + ) + @patch( + "metadata.ingestion.source.dashboard.microstrategy.connection.get_connection" + ) + def __init__(self, methodName, get_connection, test_connection) -> None: + super().__init__(methodName) + test_connection.return_value = False + get_connection.return_value = False + self.config = OpenMetadataWorkflowConfig.model_validate(mock_micro_config) + self.microstrategy = MicrostrategySource.create( + mock_micro_config["source"], + OpenMetadata(self.config.workflowConfig.openMetadataServerConfig), + ) + self.microstrategy.client = SimpleNamespace() + + def test_get_dashboards_list(self): + """ + Get the dashboards + """ + self.microstrategy.client.is_project_name = lambda *_: False + self.microstrategy.client.get_projects_list = lambda *_: MOCK_PROJECT_LIST + self.microstrategy.client.get_dashboards_list = lambda *_: MOCK_DASHBORD_LIST + fetched_dashboards_list = self.microstrategy.get_dashboards_list() + self.assertEqual(list(fetched_dashboards_list), MOCK_DASHBORD_LIST) diff --git a/openmetadata-docs/content/v1.5.x/connectors/dashboard/microstrategy/index.md b/openmetadata-docs/content/v1.5.x/connectors/dashboard/microstrategy/index.md index dd67e19c9da1..dad15758364b 100644 --- a/openmetadata-docs/content/v1.5.x/connectors/dashboard/microstrategy/index.md +++ b/openmetadata-docs/content/v1.5.x/connectors/dashboard/microstrategy/index.md @@ -24,6 +24,16 @@ Configure and schedule MicroStrategy metadata and profiler workflows from the Op To integrate MicroStrategy, ensure you are using OpenMetadata version 1.2.x or higher. +When a service user is created, it is already provisioned with the necessary permissions. +However, if the user still cannot access the APIs, the following should be checked as part of the troubleshooting process: +- Required DSS Privileges for MicroStrategy REST/JSON API: +- Web Services API: Essential for REST API usage. +- Login to MicroStrategy: User authentication. +- Use Project Sources: Access to project sources. +- View Metadata: Metadata browsing and viewing. +- Access Administration Objects: Global metadata access (connections, DB instances). +- Browse Repository: Object navigation within projects/folders. + ## Metadata Ingestion {% partial @@ -41,16 +51,18 @@ To integrate MicroStrategy, ensure you are using OpenMetadata version 1.2.x or h #### Connection Details -- **Username**: Username to connect to Mstr, e.g., user@organization.com. This user should have access to relevant dashboards and charts in Mstr to fetch the metadata. +- **Username**: Username to connect to MicroStrategy, e.g., user@organization.com. This user should have access to relevant dashboards and charts in MicroStrategy to fetch the metadata. -- **Password**: Password of the user account to connect with Mstr. +- **Password**: Password of the user account to connect with MicroStrategy. -- **Host Port**: This parameter specifies the host and port of the Mstr instance. This should be specified as a URI string in the format http://hostname:port or https://hostname:port. +- **Host Port**: This parameter specifies the host of the MicroStrategy instance. This should be specified as a URI string in the format http://hostname or https://hostname. -For example, you might set it to https://org.mstr.com:3000. +For example, you might set it to https://demo.microstrategy.com. -- **Project Name**: The name of the project within Mstr that OpenMetadata will connect to, linking to the relevant dashboards and reports for metadata retrieval. +- **Project Name**: The name of the project within MicroStrategy that OpenMetadata will connect to, linking to the relevant dashboards and reports for metadata retrieval. +- **Login Mode**: Login Mode for Microstrategy's REST API connection. You can authenticate with one of the following authentication modes: `Standard (1)`, `Anonymous (8)`. Default will be `Standard (1)`. +If you're using demo account for Microstrategy, it will be needed to authenticate through loginMode `8`. {% /extraContent %} {% partial file="/v1.5/connectors/test-connection.md" /%} diff --git a/openmetadata-docs/content/v1.5.x/connectors/dashboard/microstrategy/yaml.md b/openmetadata-docs/content/v1.5.x/connectors/dashboard/microstrategy/yaml.md index 539122b62247..bbbf138c0197 100644 --- a/openmetadata-docs/content/v1.5.x/connectors/dashboard/microstrategy/yaml.md +++ b/openmetadata-docs/content/v1.5.x/connectors/dashboard/microstrategy/yaml.md @@ -31,7 +31,7 @@ To integrate MicroStrategy, ensure you are using OpenMetadata version 1.2.x or h To run the MicroStrategy ingestion, you will need to install: ```bash -pip3 install "openmetadata-ingestion[mstr]" +pip3 install "openmetadata-ingestion[microstrategy]" ``` ## Metadata Ingestion @@ -59,27 +59,34 @@ This is a sample config for MicroStrategy: {% codeInfo srNumber=1 %} -- **Username**: Username to connect to Mstr, e.g., user@organization.com. This user should have access to relevant dashboards and charts in Mstr to fetch the metadata. +- **Username**: Username to connect to MicroStrategy, e.g., user@organization.com. This user should have access to relevant dashboards and charts in MicroStrategy to fetch the metadata. {% /codeInfo %} {% codeInfo srNumber=2 %} -- **Password**: Password of the user account to connect with Mstr. +- **Password**: Password of the user account to connect with MicroStrategy. {% /codeInfo %} {% codeInfo srNumber=3 %} -- **Host Port**: This parameter specifies the host and port of the Mstr instance. This should be specified as a URI string in the format http://hostname:port or https://hostname:port. +- **Host Port**: This parameter specifies the host of the MicroStrategy instance. This should be specified as a URI string in the format http://hostname or https://hostname. -For example, you might set it to https://org.mstr.com:3000. +For example, you might set it to https://demo.microstrategy.com. {% /codeInfo %} {% codeInfo srNumber=4 %} -- **Project Name**: The name of the project within Mstr that OpenMetadata will connect to, linking to the relevant dashboards and reports for metadata retrieval. +- **Project Name**: The name of the project within MicroStrategy that OpenMetadata will connect to, linking to the relevant dashboards and reports for metadata retrieval. + +{% /codeInfo %} + +{% codeInfo srNumber=5 %} + +- **Login Mode**: Login Mode for Microstrategy's REST API connection. You can authenticate with one of the following authentication modes: `Standard (1)`, `Anonymous (8)`. Default will be `Standard (1)`. +If you're using demo account for Microstrategy, it will be needed to authenticate through loginMode `8`. {% /codeInfo %} @@ -95,11 +102,11 @@ For example, you might set it to https://org.mstr.com:3000. ```yaml {% isCodeBlock=true %} source: - type: mstr + type: microstrategy serviceName: local_Mstr serviceConnection: config: - type: Mstr + type: MicroStrategy ``` ```yaml {% srNumber=1 %} username: username @@ -113,6 +120,9 @@ source: ```yaml {% srNumber=4 %} projectName: project ``` +```yaml {% srNumber=5 %} + loginMode: "1" +``` {% partial file="/v1.5/connectors/yaml/dashboard/source-config.md" /%} diff --git a/openmetadata-docs/content/v1.5.x/sdk/python/ingestion/lineage.md b/openmetadata-docs/content/v1.5.x/sdk/python/ingestion/lineage.md index 59b15e969ce1..d4a895bacf99 100644 --- a/openmetadata-docs/content/v1.5.x/sdk/python/ingestion/lineage.md +++ b/openmetadata-docs/content/v1.5.x/sdk/python/ingestion/lineage.md @@ -160,6 +160,7 @@ With everything prepared, we can now create the Lineage between both Entities. A represents the edge between two Entities, typed under `EntitiesEdge`. ```python +from metadata.generated.schema.type.entityReference import EntityReference from metadata.generated.schema.api.lineage.addLineage import AddLineageRequest from metadata.generated.schema.type.entityLineage import EntitiesEdge @@ -346,6 +347,7 @@ To prepare this example, we need to start by creating the Pipeline Entity. Again to prepare the Pipeline Service: ```python +from metadata.generated.schema.type.entityReference import EntityReference from metadata.generated.schema.api.data.createPipeline import CreatePipelineRequest from metadata.generated.schema.api.services.createPipelineService import ( CreatePipelineServiceRequest, diff --git a/openmetadata-docs/content/v1.6.x-SNAPSHOT/connectors/dashboard/microstrategy/index.md b/openmetadata-docs/content/v1.6.x-SNAPSHOT/connectors/dashboard/microstrategy/index.md index 0700370930c7..197dbcabb870 100644 --- a/openmetadata-docs/content/v1.6.x-SNAPSHOT/connectors/dashboard/microstrategy/index.md +++ b/openmetadata-docs/content/v1.6.x-SNAPSHOT/connectors/dashboard/microstrategy/index.md @@ -24,6 +24,16 @@ Configure and schedule MicroStrategy metadata and profiler workflows from the Op To integrate MicroStrategy, ensure you are using OpenMetadata version 1.2.x or higher. +When a service user is created, it is already provisioned with the necessary permissions. +However, if the user still cannot access the APIs, the following should be checked as part of the troubleshooting process: +- Required DSS Privileges for MicroStrategy REST/JSON API: +- Web Services API: Essential for REST API usage. +- Login to MicroStrategy: User authentication. +- Use Project Sources: Access to project sources. +- View Metadata: Metadata browsing and viewing. +- Access Administration Objects: Global metadata access (connections, DB instances). +- Browse Repository: Object navigation within projects/folders. + ## Metadata Ingestion {% partial @@ -41,15 +51,18 @@ To integrate MicroStrategy, ensure you are using OpenMetadata version 1.2.x or h #### Connection Details -- **Username**: Username to connect to Mstr, e.g., user@organization.com. This user should have access to relevant dashboards and charts in Mstr to fetch the metadata. +- **Username**: Username to connect to MicroStrategy, e.g., user@organization.com. This user should have access to relevant dashboards and charts in MicroStrategy to fetch the metadata. + +- **Password**: Password of the user account to connect with MicroStrategy. -- **Password**: Password of the user account to connect with Mstr. +- **Host Port**: This parameter specifies the host of the MicroStrategy instance. This should be specified as a URI string in the format http://hostname or https://hostname. -- **Host Port**: This parameter specifies the host and port of the Mstr instance. This should be specified as a URI string in the format http://hostname:port or https://hostname:port. +For example, you might set it to https://demo.microstrategy.com. -For example, you might set it to https://org.mstr.com:3000. +- **Project Name**: The name of the project within MicroStrategy that OpenMetadata will connect to, linking to the relevant dashboards and reports for metadata retrieval. -- **Project Name**: The name of the project within Mstr that OpenMetadata will connect to, linking to the relevant dashboards and reports for metadata retrieval. +- **Login Mode**: Login Mode for Microstrategy's REST API connection. You can authenticate with one of the following authentication modes: `Standard (1)`, `Anonymous (8)`. Default will be `Standard (1)`. +If you're using demo account for Microstrategy, it will be needed to authenticate through loginMode `8`. {% /extraContent %} diff --git a/openmetadata-docs/content/v1.6.x-SNAPSHOT/connectors/dashboard/microstrategy/yaml.md b/openmetadata-docs/content/v1.6.x-SNAPSHOT/connectors/dashboard/microstrategy/yaml.md index bd159630e6ea..d01482d50299 100644 --- a/openmetadata-docs/content/v1.6.x-SNAPSHOT/connectors/dashboard/microstrategy/yaml.md +++ b/openmetadata-docs/content/v1.6.x-SNAPSHOT/connectors/dashboard/microstrategy/yaml.md @@ -31,7 +31,7 @@ To integrate MicroStrategy, ensure you are using OpenMetadata version 1.2.x or h To run the MicroStrategy ingestion, you will need to install: ```bash -pip3 install "openmetadata-ingestion[mstr]" +pip3 install "openmetadata-ingestion[microstrategy]" ``` ## Metadata Ingestion @@ -59,27 +59,34 @@ This is a sample config for MicroStrategy: {% codeInfo srNumber=1 %} -- **Username**: Username to connect to Mstr, e.g., user@organization.com. This user should have access to relevant dashboards and charts in Mstr to fetch the metadata. +- **Username**: Username to connect to MicroStrategy, e.g., user@organization.com. This user should have access to relevant dashboards and charts in MicroStrategy to fetch the metadata. {% /codeInfo %} {% codeInfo srNumber=2 %} -- **Password**: Password of the user account to connect with Mstr. +- **Password**: Password of the user account to connect with MicroStrategy. {% /codeInfo %} {% codeInfo srNumber=3 %} -- **Host Port**: This parameter specifies the host and port of the Mstr instance. This should be specified as a URI string in the format http://hostname:port or https://hostname:port. +- **Host Port**: This parameter specifies the host of the MicroStrategy instance. This should be specified as a URI string in the format http://hostname or https://hostname. -For example, you might set it to https://org.mstr.com:3000. +For example, you might set it to https://demo.microstrategy.com. {% /codeInfo %} {% codeInfo srNumber=4 %} -- **Project Name**: The name of the project within Mstr that OpenMetadata will connect to, linking to the relevant dashboards and reports for metadata retrieval. +- **Project Name**: The name of the project within MicroStrategy that OpenMetadata will connect to, linking to the relevant dashboards and reports for metadata retrieval. + +{% /codeInfo %} + +{% codeInfo srNumber=5 %} + +- **Login Mode**: Login Mode for Microstrategy's REST API connection. You can authenticate with one of the following authentication modes: `Standard (1)`, `Anonymous (8)`. Default will be `Standard (1)`. +If you're using demo account for Microstrategy, it will be needed to authenticate through loginMode `8`. {% /codeInfo %} @@ -95,11 +102,11 @@ For example, you might set it to https://org.mstr.com:3000. ```yaml {% isCodeBlock=true %} source: - type: mstr + type: microstrategy serviceName: local_Mstr serviceConnection: config: - type: Mstr + type: MicroStrategy ``` ```yaml {% srNumber=1 %} username: username @@ -113,6 +120,9 @@ source: ```yaml {% srNumber=4 %} projectName: project ``` +```yaml {% srNumber=5 %} + loginMode: "1" +``` {% partial file="/v1.6/connectors/yaml/dashboard/source-config.md" /%} diff --git a/openmetadata-docs/content/v1.6.x-SNAPSHOT/sdk/python/ingestion/lineage.md b/openmetadata-docs/content/v1.6.x-SNAPSHOT/sdk/python/ingestion/lineage.md index 59b15e969ce1..d4a895bacf99 100644 --- a/openmetadata-docs/content/v1.6.x-SNAPSHOT/sdk/python/ingestion/lineage.md +++ b/openmetadata-docs/content/v1.6.x-SNAPSHOT/sdk/python/ingestion/lineage.md @@ -160,6 +160,7 @@ With everything prepared, we can now create the Lineage between both Entities. A represents the edge between two Entities, typed under `EntitiesEdge`. ```python +from metadata.generated.schema.type.entityReference import EntityReference from metadata.generated.schema.api.lineage.addLineage import AddLineageRequest from metadata.generated.schema.type.entityLineage import EntitiesEdge @@ -346,6 +347,7 @@ To prepare this example, we need to start by creating the Pipeline Entity. Again to prepare the Pipeline Service: ```python +from metadata.generated.schema.type.entityReference import EntityReference from metadata.generated.schema.api.data.createPipeline import CreatePipelineRequest from metadata.generated.schema.api.services.createPipelineService import ( CreatePipelineServiceRequest, diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/resources/apps/AppResource.java b/openmetadata-service/src/main/java/org/openmetadata/service/resources/apps/AppResource.java index 53259cf8e320..21a88f23d942 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/resources/apps/AppResource.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/resources/apps/AppResource.java @@ -1135,7 +1135,6 @@ private App getApplication( // validate Bot if provided validateAndAddBot(app, createAppRequest.getBot()); - return app; } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/util/WebsocketNotificationHandler.java b/openmetadata-service/src/main/java/org/openmetadata/service/util/WebsocketNotificationHandler.java index ceecb6265c7a..6e7bed98aa5e 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/util/WebsocketNotificationHandler.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/util/WebsocketNotificationHandler.java @@ -37,6 +37,7 @@ import org.openmetadata.schema.type.api.BulkOperationResult; import org.openmetadata.schema.type.csv.CsvImportResult; import org.openmetadata.service.Entity; +import org.openmetadata.service.exception.EntityNotFoundException; import org.openmetadata.service.jdbi3.CollectionDAO; import org.openmetadata.service.resources.feeds.MessageParser; import org.openmetadata.service.socket.WebSocketManager; @@ -65,8 +66,10 @@ public static void sendCsvExportCompleteNotification( CSVExportMessage message = new CSVExportMessage(jobId, "COMPLETED", csvData, null); String jsonMessage = JsonUtils.pojoToJson(message); UUID userId = getUserIdFromSecurityContext(securityContext); - WebSocketManager.getInstance() - .sendToOne(userId, WebSocketManager.CSV_EXPORT_CHANNEL, jsonMessage); + if (userId != null) { + WebSocketManager.getInstance() + .sendToOne(userId, WebSocketManager.CSV_EXPORT_CHANNEL, jsonMessage); + } } public static void bulkAssetsOperationCompleteNotification( @@ -75,8 +78,10 @@ public static void bulkAssetsOperationCompleteNotification( new BulkAssetsOperationMessage(jobId, "COMPLETED", result, null); String jsonMessage = JsonUtils.pojoToJson(message); UUID userId = getUserIdFromSecurityContext(securityContext); - WebSocketManager.getInstance() - .sendToOne(userId, WebSocketManager.BULK_ASSETS_CHANNEL, jsonMessage); + if (userId != null) { + WebSocketManager.getInstance() + .sendToOne(userId, WebSocketManager.BULK_ASSETS_CHANNEL, jsonMessage); + } } public static void bulkAssetsOperationFailedNotification( @@ -84,8 +89,10 @@ public static void bulkAssetsOperationFailedNotification( CSVExportMessage message = new CSVExportMessage(jobId, "FAILED", null, errorMessage); String jsonMessage = JsonUtils.pojoToJson(message); UUID userId = getUserIdFromSecurityContext(securityContext); - WebSocketManager.getInstance() - .sendToOne(userId, WebSocketManager.BULK_ASSETS_CHANNEL, jsonMessage); + if (userId != null) { + WebSocketManager.getInstance() + .sendToOne(userId, WebSocketManager.BULK_ASSETS_CHANNEL, jsonMessage); + } } private void handleNotifications(ContainerResponseContext responseContext) { @@ -186,14 +193,24 @@ public static void sendCsvExportFailedNotification( CSVExportMessage message = new CSVExportMessage(jobId, "FAILED", null, errorMessage); String jsonMessage = JsonUtils.pojoToJson(message); UUID userId = getUserIdFromSecurityContext(securityContext); - WebSocketManager.getInstance() - .sendToOne(userId, WebSocketManager.CSV_EXPORT_CHANNEL, jsonMessage); + if (userId != null) { + WebSocketManager.getInstance() + .sendToOne(userId, WebSocketManager.CSV_EXPORT_CHANNEL, jsonMessage); + } } private static UUID getUserIdFromSecurityContext(SecurityContext securityContext) { - String username = securityContext.getUserPrincipal().getName(); - User user = Entity.getCollectionDAO().userDAO().findEntityByName(username); - return user.getId(); + try { + String username = securityContext.getUserPrincipal().getName(); + User user = + Entity.getCollectionDAO() + .userDAO() + .findEntityByName(FullyQualifiedName.quoteName(username)); + return user.getId(); + } catch (EntityNotFoundException e) { + LOG.error("User not found ", e); + } + return null; } public static void sendCsvImportCompleteNotification( @@ -201,8 +218,10 @@ public static void sendCsvImportCompleteNotification( CSVImportMessage message = new CSVImportMessage(jobId, "COMPLETED", result, null); String jsonMessage = JsonUtils.pojoToJson(message); UUID userId = getUserIdFromSecurityContext(securityContext); - WebSocketManager.getInstance() - .sendToOne(userId, WebSocketManager.CSV_IMPORT_CHANNEL, jsonMessage); + if (userId != null) { + WebSocketManager.getInstance() + .sendToOne(userId, WebSocketManager.CSV_IMPORT_CHANNEL, jsonMessage); + } } public static void sendCsvImportFailedNotification( @@ -210,7 +229,9 @@ public static void sendCsvImportFailedNotification( CSVExportMessage message = new CSVExportMessage(jobId, "FAILED", null, errorMessage); String jsonMessage = JsonUtils.pojoToJson(message); UUID userId = getUserIdFromSecurityContext(securityContext); - WebSocketManager.getInstance() - .sendToOne(userId, WebSocketManager.CSV_IMPORT_CHANNEL, jsonMessage); + if (userId != null) { + WebSocketManager.getInstance() + .sendToOne(userId, WebSocketManager.CSV_IMPORT_CHANNEL, jsonMessage); + } } } diff --git a/openmetadata-service/src/main/resources/json/data/EntityObservabilityFilterDescriptor.json b/openmetadata-service/src/main/resources/json/data/EntityObservabilityFilterDescriptor.json index 92687fed3271..8e7c7e4a83d1 100644 --- a/openmetadata-service/src/main/resources/json/data/EntityObservabilityFilterDescriptor.json +++ b/openmetadata-service/src/main/resources/json/data/EntityObservabilityFilterDescriptor.json @@ -395,6 +395,18 @@ } ], "supportedActions" : [ + { + "name": "GetTestSuiteStatusUpdates", + "fullyQualifiedName": "eventSubscription.GetTestSuiteStatusUpdates", + "displayName": "Get Test Suite Status Updates", + "description" : "Get Status Updates Test Suite", + "effect" : "include", + "condition": "matchTestResult(${testResultList})", + "arguments": [ + "testResultList" + ], + "inputType": "runtime" + } ] } ] \ No newline at end of file diff --git a/openmetadata-service/src/main/resources/json/data/testConnections/dashboard/mstr.json b/openmetadata-service/src/main/resources/json/data/testConnections/dashboard/microstrategy.json similarity index 85% rename from openmetadata-service/src/main/resources/json/data/testConnections/dashboard/mstr.json rename to openmetadata-service/src/main/resources/json/data/testConnections/dashboard/microstrategy.json index b6e5a2a25e58..203fa19caceb 100644 --- a/openmetadata-service/src/main/resources/json/data/testConnections/dashboard/mstr.json +++ b/openmetadata-service/src/main/resources/json/data/testConnections/dashboard/microstrategy.json @@ -1,6 +1,6 @@ { - "name": "Mstr", - "displayName": "Mstr Test Connection", + "name": "MicroStrategy", + "displayName": "MicroStrategy Test Connection", "description": "This Test Connection validates the access against the server and basic metadata extraction of dashboards and charts.", "steps": [ { diff --git a/openmetadata-spec/src/main/resources/json/schema/entity/applications/configuration/external/slackAppTokenConfiguration.json b/openmetadata-spec/src/main/resources/json/schema/entity/applications/configuration/external/slackAppTokenConfiguration.json index 9cf8a79cb389..5d1093341f52 100644 --- a/openmetadata-spec/src/main/resources/json/schema/entity/applications/configuration/external/slackAppTokenConfiguration.json +++ b/openmetadata-spec/src/main/resources/json/schema/entity/applications/configuration/external/slackAppTokenConfiguration.json @@ -9,12 +9,14 @@ "userToken": { "title": "User Token", "description": "User Token", - "type": "string" + "type": "string", + "format": "password" }, "botToken": { "title": "Bot Token", "description": "Bot Token", - "type": "string" + "type": "string", + "format": "password" } }, "additionalProperties": false, diff --git a/openmetadata-spec/src/main/resources/json/schema/entity/services/connections/dashboard/microStrategyConnection.json b/openmetadata-spec/src/main/resources/json/schema/entity/services/connections/dashboard/microStrategyConnection.json new file mode 100644 index 000000000000..dd423785f4af --- /dev/null +++ b/openmetadata-spec/src/main/resources/json/schema/entity/services/connections/dashboard/microStrategyConnection.json @@ -0,0 +1,59 @@ +{ + "$id": "https://open-metadata.org/schema/entity/services/connections/dashboard/microStrategyConnection.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "MicroStrategyConnection", + "description": "MicroStrategy Connection Config", + "type": "object", + "javaType": "org.openmetadata.schema.services.connections.dashboard.MicroStrategyConnection", + "definitions": { + "microStrategyType": { + "description": "MicroStrategy service type", + "type": "string", + "enum": ["MicroStrategy"], + "default": "MicroStrategy" + } + }, + "properties": { + "type": { + "title": "Service Type", + "description": "Service Type", + "$ref": "#/definitions/microStrategyType", + "default": "MicroStrategy" + }, + "username": { + "title": "Username", + "description": "Username to connect to MicroStrategy. This user should have privileges to read all the metadata in MicroStrategy.", + "type": "string" + }, + "password": { + "title": "Password", + "description": "Password to connect to MicroStrategy.", + "type": "string", + "format": "password" + }, + "hostPort": { + "expose": true, + "title": "Host and Port", + "description": "Host and Port of the MicroStrategy instance.", + "type": "string", + "format": "uri" + }, + "projectName": { + "title": "Project Name", + "description": "MicroStrategy Project Name", + "type": "string" + }, + "loginMode": { + "title": "Login Mode", + "description": "Login Mode for Microstrategy's REST API connection. You can authenticate with one of the following authentication modes: `Standard (1)`, `Anonymous (8)`. Default will be `Standard (1)`. If you're using demo account for Microstrategy, it will be needed to authenticate through loginMode `8`.", + "type": "string", + "default": "1" + }, + "supportsMetadataExtraction": { + "title": "Supports Metadata Extraction", + "$ref": "../connectionBasicType.json#/definitions/supportsMetadataExtraction" + } + }, + "additionalProperties": false, + "required": ["hostPort", "username", "password"] +} diff --git a/openmetadata-spec/src/main/resources/json/schema/entity/services/connections/dashboard/mstrConnection.json b/openmetadata-spec/src/main/resources/json/schema/entity/services/connections/dashboard/mstrConnection.json deleted file mode 100644 index 66355ee7fa0f..000000000000 --- a/openmetadata-spec/src/main/resources/json/schema/entity/services/connections/dashboard/mstrConnection.json +++ /dev/null @@ -1,53 +0,0 @@ -{ - "$id": "https://open-metadata.org/schema/entity/services/connections/dashboard/mstrConnection.json", - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "MstrConnection", - "description": "Mstr Connection Config", - "type": "object", - "javaType": "org.openmetadata.schema.services.connections.dashboard.MstrConnection", - "definitions": { - "mstrType": { - "description": "Mstr service type", - "type": "string", - "enum": ["Mstr"], - "default": "Mstr" - } - }, - "properties": { - "type": { - "title": "Service Type", - "description": "Service Type", - "$ref": "#/definitions/mstrType", - "default": "Mstr" - }, - "username": { - "title": "Username", - "description": "Username to connect to MSTR. This user should have privileges to read all the metadata in MSTR.", - "type": "string" - }, - "password": { - "title": "Password", - "description": "Password to connect to MSTR.", - "type": "string", - "format": "password" - }, - "hostPort": { - "expose": true, - "title": "Host and Port", - "description": "Host and Port of the Metabase instance.", - "type": "string", - "format": "uri" - }, - "projectName": { - "title": "Project Name", - "description": "MSTR Project Name", - "type": "string" - }, - "supportsMetadataExtraction": { - "title": "Supports Metadata Extraction", - "$ref": "../connectionBasicType.json#/definitions/supportsMetadataExtraction" - } - }, - "additionalProperties": false, - "required": ["hostPort", "username"] -} diff --git a/openmetadata-spec/src/main/resources/json/schema/entity/services/dashboardService.json b/openmetadata-spec/src/main/resources/json/schema/entity/services/dashboardService.json index 04986503a090..8bb69c03ffde 100644 --- a/openmetadata-spec/src/main/resources/json/schema/entity/services/dashboardService.json +++ b/openmetadata-spec/src/main/resources/json/schema/entity/services/dashboardService.json @@ -28,7 +28,7 @@ "QuickSight", "QlikSense", "Lightdash", - "Mstr", + "MicroStrategy", "QlikCloud", "Sigma" ], @@ -73,7 +73,7 @@ "name": "Lightdash" }, { - "name": "Mstr" + "name": "MicroStrategy" }, { "name": "QlikCloud" @@ -134,7 +134,7 @@ "$ref": "./connections/dashboard/lightdashConnection.json" }, { - "$ref": "./connections/dashboard/mstrConnection.json" + "$ref": "./connections/dashboard/microStrategyConnection.json" }, { "$ref": "./connections/dashboard/qlikCloudConnection.json" diff --git a/openmetadata-spec/src/main/resources/json/schema/metadataIngestion/dbtconfig/dbtHttpConfig.json b/openmetadata-spec/src/main/resources/json/schema/metadataIngestion/dbtconfig/dbtHttpConfig.json index 179573b67ecc..7da25a515369 100644 --- a/openmetadata-spec/src/main/resources/json/schema/metadataIngestion/dbtconfig/dbtHttpConfig.json +++ b/openmetadata-spec/src/main/resources/json/schema/metadataIngestion/dbtconfig/dbtHttpConfig.json @@ -26,6 +26,11 @@ "title": "DBT Run Results HTTP File Path", "description": "DBT run results http file path to extract the test results information.", "type": "string" + }, + "dbtSourcesHttpPath": { + "title": "DBT Sources HTTP File Path", + "description": "DBT sources http file path to extract freshness test results information.", + "type": "string" } }, "additionalProperties": false, diff --git a/openmetadata-spec/src/main/resources/json/schema/metadataIngestion/dbtconfig/dbtLocalConfig.json b/openmetadata-spec/src/main/resources/json/schema/metadataIngestion/dbtconfig/dbtLocalConfig.json index 171b2a675f8e..94ffc2cf1774 100644 --- a/openmetadata-spec/src/main/resources/json/schema/metadataIngestion/dbtconfig/dbtLocalConfig.json +++ b/openmetadata-spec/src/main/resources/json/schema/metadataIngestion/dbtconfig/dbtLocalConfig.json @@ -26,6 +26,11 @@ "title": "DBT Run Results File Path", "description": "DBT run results file path to extract the test results information.", "type": "string" + }, + "dbtSourcesFilePath": { + "title": "DBT Sources File Path", + "description": "DBT sources file path to extract the freshness test result.", + "type": "string" } }, "additionalProperties": false, diff --git a/openmetadata-ui/src/main/resources/ui/playwright/constant/permission.ts b/openmetadata-ui/src/main/resources/ui/playwright/constant/permission.ts index f8f6515de1c5..b4dccdce8030 100644 --- a/openmetadata-ui/src/main/resources/ui/playwright/constant/permission.ts +++ b/openmetadata-ui/src/main/resources/ui/playwright/constant/permission.ts @@ -62,6 +62,48 @@ export const DATA_STEWARD_RULES: PolicyRulesType[] = [ }, ]; +export const DATA_CONSUMER_RULES: PolicyRulesType[] = [ + { + name: 'DataConsumerPolicy-EditRule', + resources: ['All'], + operations: [ + 'EditDescription', + 'EditGlossaryTerms', + 'EditTags', + 'EditTier', + 'ViewAll', + ], + effect: 'allow', + }, +]; + +export const ORGANIZATION_POLICY_RULES: PolicyRulesType[] = [ + { + name: 'OrganizationPolicy-NoOwner-Rule', + description: + 'Allow any one to set the owner of an entity that has no owner set.', + effect: 'allow', + operations: ['EditOwners'], + resources: ['All'], + condition: 'noOwner()', + }, + { + name: 'OrganizationPolicy-Owner-Rule', + description: 'Allow all the operations on an entity for the owner.', + effect: 'allow', + operations: ['All'], + resources: ['All'], + condition: 'isOwner()', + }, + { + name: 'OrganizationPolicy-ViewAll-Rule', + description: 'Allow all users to discover data assets.', + effect: 'allow', + operations: ['ViewAll'], + resources: ['All'], + }, +]; + export const GLOBAL_SETTING_PERMISSIONS: Record< string, { testid: GlobalSettingOptions; isCustomProperty?: boolean } diff --git a/openmetadata-ui/src/main/resources/ui/playwright/e2e/auth.setup.ts b/openmetadata-ui/src/main/resources/ui/playwright/e2e/auth.setup.ts index 8894062b2f7e..fc6b43f36141 100644 --- a/openmetadata-ui/src/main/resources/ui/playwright/e2e/auth.setup.ts +++ b/openmetadata-ui/src/main/resources/ui/playwright/e2e/auth.setup.ts @@ -10,24 +10,39 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import { test as setup } from '@playwright/test'; +import { Page, test as setup } from '@playwright/test'; import { JWT_EXPIRY_TIME_MAP } from '../constant/login'; import { AdminClass } from '../support/user/AdminClass'; import { getApiContext } from '../utils/common'; import { updateJWTTokenExpiryTime } from '../utils/login'; +import { + updateDefaultDataConsumerPolicy, + updateDefaultOrganizationPolicy, +} from '../utils/permission'; import { removeOrganizationPolicyAndRole } from '../utils/team'; const adminFile = 'playwright/.auth/admin.json'; +const initialSetup = async (page: Page) => { + const { apiContext, afterAction } = await getApiContext(page); + // Update JWT expiry time to 4 hours + await updateJWTTokenExpiryTime(apiContext, JWT_EXPIRY_TIME_MAP['4 hours']); + // Remove organization policy and role + await removeOrganizationPolicyAndRole(apiContext); + // update default Organization policy + await updateDefaultOrganizationPolicy(apiContext); + // update default Data consumer policy + await updateDefaultDataConsumerPolicy(apiContext); + + await afterAction(); +}; + setup('authenticate as admin', async ({ page }) => { const admin = new AdminClass(); // login with admin user await admin.login(page); await page.waitForURL('**/my-data'); - const { apiContext, afterAction } = await getApiContext(page); - await updateJWTTokenExpiryTime(apiContext, JWT_EXPIRY_TIME_MAP['4 hours']); - await removeOrganizationPolicyAndRole(apiContext); - await afterAction(); + await initialSetup(page); await admin.logout(page); await page.waitForURL('**/signin'); await admin.login(page); diff --git a/openmetadata-ui/src/main/resources/ui/playwright/support/access-control/PoliciesClass.ts b/openmetadata-ui/src/main/resources/ui/playwright/support/access-control/PoliciesClass.ts index 08853690bcf0..abf90b38e9d6 100644 --- a/openmetadata-ui/src/main/resources/ui/playwright/support/access-control/PoliciesClass.ts +++ b/openmetadata-ui/src/main/resources/ui/playwright/support/access-control/PoliciesClass.ts @@ -27,6 +27,8 @@ export type PolicyRulesType = { resources: string[]; operations: string[]; effect: string; + description?: string; + condition?: string; }; export class PolicyClass { diff --git a/openmetadata-ui/src/main/resources/ui/playwright/support/entity/ingestion/ServiceBaseClass.ts b/openmetadata-ui/src/main/resources/ui/playwright/support/entity/ingestion/ServiceBaseClass.ts index 4de563f6313f..540851881eaa 100644 --- a/openmetadata-ui/src/main/resources/ui/playwright/support/entity/ingestion/ServiceBaseClass.ts +++ b/openmetadata-ui/src/main/resources/ui/playwright/support/entity/ingestion/ServiceBaseClass.ts @@ -170,7 +170,7 @@ class ServiceBaseClass { // Header available once page loads await page.waitForSelector('[data-testid="data-assets-header"]'); - await page.getByTestId('loader').waitFor({ state: 'detached' }); + await page.getByTestId('loader').first().waitFor({ state: 'detached' }); await page.getByTestId('ingestions').click(); await page .getByLabel('Ingestions') diff --git a/openmetadata-ui/src/main/resources/ui/playwright/utils/observabilityAlert.ts b/openmetadata-ui/src/main/resources/ui/playwright/utils/observabilityAlert.ts index ef68c74b5a88..a93ba4a4b165 100644 --- a/openmetadata-ui/src/main/resources/ui/playwright/utils/observabilityAlert.ts +++ b/openmetadata-ui/src/main/resources/ui/playwright/utils/observabilityAlert.ts @@ -321,7 +321,18 @@ export const getObservabilityCreationDetails = ({ exclude: false, }, ], - actions: [], + actions: [ + { + name: 'Get Test Suite Status Updates', + inputs: [ + { + inputSelector: 'test-result-select', + inputValue: 'Failed', + }, + ], + exclude: false, + }, + ], destinations: [ { mode: 'external', diff --git a/openmetadata-ui/src/main/resources/ui/playwright/utils/permission.ts b/openmetadata-ui/src/main/resources/ui/playwright/utils/permission.ts index 26fabc27009a..e37b9ea0d801 100644 --- a/openmetadata-ui/src/main/resources/ui/playwright/utils/permission.ts +++ b/openmetadata-ui/src/main/resources/ui/playwright/utils/permission.ts @@ -10,7 +10,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import { expect, Page } from '@playwright/test'; +import { APIRequestContext, expect, Page } from '@playwright/test'; +import { + DATA_CONSUMER_RULES, + ORGANIZATION_POLICY_RULES, +} from '../constant/permission'; export const checkNoPermissionPlaceholder = async ( page: Page, @@ -117,3 +121,45 @@ export const validateViewPermissions = async ( await page.waitForLoadState('domcontentloaded'); await checkNoPermissionPlaceholder(page, /Custom Properties/); }; + +export const updateDefaultDataConsumerPolicy = async ( + apiContext: APIRequestContext +) => { + const dataConsumerRoleResponse = await apiContext + .get('/api/v1/policies/name/DataConsumerPolicy') + .then((response) => response.json()); + + await apiContext.patch(`/api/v1/policies/${dataConsumerRoleResponse.id}`, { + data: [ + { + op: 'replace', + path: '/rules', + value: DATA_CONSUMER_RULES, + }, + ], + headers: { + 'Content-Type': 'application/json-patch+json', + }, + }); +}; + +export const updateDefaultOrganizationPolicy = async ( + apiContext: APIRequestContext +) => { + const orgPolicyResponse = await apiContext + .get('/api/v1/policies/name/OrganizationPolicy') + .then((response) => response.json()); + + await apiContext.patch(`/api/v1/policies/${orgPolicyResponse.id}`, { + data: [ + { + op: 'replace', + path: '/rules', + value: ORGANIZATION_POLICY_RULES, + }, + ], + headers: { + 'Content-Type': 'application/json-patch+json', + }, + }); +}; diff --git a/openmetadata-ui/src/main/resources/ui/public/locales/en-US/Dashboard/MicroStrategy.md b/openmetadata-ui/src/main/resources/ui/public/locales/en-US/Dashboard/MicroStrategy.md new file mode 100644 index 000000000000..f8136e57d864 --- /dev/null +++ b/openmetadata-ui/src/main/resources/ui/public/locales/en-US/Dashboard/MicroStrategy.md @@ -0,0 +1,32 @@ +# MicroStrategy + +In this section, we provide guides and references to use the MicroStrategy connector. + +## Connection Details + +$$section +### Username $(id="username") + +Username to connect to MicroStrategy, e.g., `user@organization.com`. This user should have access to relevant dashboards and charts in MicroStrategy to fetch the metadata. +$$ + +$$section +### Password $(id="password") + +Password of the user account to connect with MicroStrategy. +$$ + +$$section +### Host Port $(id="hostPort") + +This parameter specifies the host of the MicroStrategy instance. This should be specified as a URI string in the format http://hostname or https://hostname. + +For example, you might set it to https://demo.microstrategy.com. +$$ + +$$section +### Login Mode $(id="loginMode") + +Login Mode for Microstrategy's REST API connection. You can authenticate with one of the following authentication modes: `Standard (1)`, `Anonymous (8)`. Default will be `Anonymous (8)`. +If you're using demo account for Microstrategy, it will be needed to authenticate through loginMode `8`. +$$ diff --git a/openmetadata-ui/src/main/resources/ui/public/locales/en-US/Dashboard/Mstr.md b/openmetadata-ui/src/main/resources/ui/public/locales/en-US/Dashboard/Mstr.md deleted file mode 100644 index 2b04f02f0e77..000000000000 --- a/openmetadata-ui/src/main/resources/ui/public/locales/en-US/Dashboard/Mstr.md +++ /dev/null @@ -1,25 +0,0 @@ -# Mstr - -In this section, we provide guides and references to use the Mstr connector. - -## Connection Details - -$$section -### Username $(id="username") - -Username to connect to Mstr, e.g., `user@organization.com`. This user should have access to relevant dashboards and charts in Mstr to fetch the metadata. -$$ - -$$section -### Password $(id="password") - -Password of the user account to connect with Mstr. -$$ - -$$section -### Host Port $(id="hostPort") - -This parameter specifies the host and port of the Mstr instance. This should be specified as a URI string in the format `http://hostname:port` or `https://hostname:port`. - -For example, you might set it to `https://org.mstr.com:3000`. -$$ diff --git a/openmetadata-ui/src/main/resources/ui/src/assets/img/service-icon-microstrategy.svg b/openmetadata-ui/src/main/resources/ui/src/assets/img/service-icon-microstrategy.svg new file mode 100644 index 000000000000..df5d5555e3ea --- /dev/null +++ b/openmetadata-ui/src/main/resources/ui/src/assets/img/service-icon-microstrategy.svg @@ -0,0 +1,11 @@ + + microstrategy-m-icon-circle-svg + + + + + + \ No newline at end of file diff --git a/openmetadata-ui/src/main/resources/ui/src/assets/img/service-icon-mstr.png b/openmetadata-ui/src/main/resources/ui/src/assets/img/service-icon-mstr.png deleted file mode 100644 index d453270cf371..000000000000 Binary files a/openmetadata-ui/src/main/resources/ui/src/assets/img/service-icon-mstr.png and /dev/null differ diff --git a/openmetadata-ui/src/main/resources/ui/src/components/Database/DatabaseSchema/DatabaseSchemaTable/DatabaseSchemaTable.interface.ts b/openmetadata-ui/src/main/resources/ui/src/components/Database/DatabaseSchema/DatabaseSchemaTable/DatabaseSchemaTable.interface.ts index 792196cb0c30..9038eef2b3c8 100644 --- a/openmetadata-ui/src/main/resources/ui/src/components/Database/DatabaseSchema/DatabaseSchemaTable/DatabaseSchemaTable.interface.ts +++ b/openmetadata-ui/src/main/resources/ui/src/components/Database/DatabaseSchema/DatabaseSchemaTable/DatabaseSchemaTable.interface.ts @@ -13,4 +13,5 @@ export interface DatabaseSchemaTableProps { isDatabaseDeleted?: boolean; + isVersionPage?: boolean; } diff --git a/openmetadata-ui/src/main/resources/ui/src/components/Database/DatabaseSchema/DatabaseSchemaTable/DatabaseSchemaTable.tsx b/openmetadata-ui/src/main/resources/ui/src/components/Database/DatabaseSchema/DatabaseSchemaTable/DatabaseSchemaTable.tsx index 36acb772229d..64148e9108e6 100644 --- a/openmetadata-ui/src/main/resources/ui/src/components/Database/DatabaseSchema/DatabaseSchemaTable/DatabaseSchemaTable.tsx +++ b/openmetadata-ui/src/main/resources/ui/src/components/Database/DatabaseSchema/DatabaseSchemaTable/DatabaseSchemaTable.tsx @@ -11,45 +11,70 @@ * limitations under the License. */ import { Col, Row, Switch, Typography } from 'antd'; +import { ColumnsType } from 'antd/lib/table'; import { AxiosError } from 'axios'; +import { compare } from 'fast-json-patch'; import { t } from 'i18next'; import { isEmpty } from 'lodash'; import QueryString from 'qs'; import React, { useCallback, useEffect, useMemo, useState } from 'react'; import { useHistory } from 'react-router-dom'; import { + getEntityDetailsPath, INITIAL_PAGING_VALUE, + NO_DATA_PLACEHOLDER, PAGE_SIZE, } from '../../../../constants/constants'; -import { TabSpecificField } from '../../../../enums/entity.enum'; +import { usePermissionProvider } from '../../../../context/PermissionProvider/PermissionProvider'; +import { EntityType, TabSpecificField } from '../../../../enums/entity.enum'; import { SearchIndex } from '../../../../enums/search.enum'; import { DatabaseSchema } from '../../../../generated/entity/data/databaseSchema'; +import { EntityReference } from '../../../../generated/entity/type'; +import { UsageDetails } from '../../../../generated/type/entityUsage'; import { Include } from '../../../../generated/type/include'; import { Paging } from '../../../../generated/type/paging'; import { usePaging } from '../../../../hooks/paging/usePaging'; import useCustomLocation from '../../../../hooks/useCustomLocation/useCustomLocation'; import { useFqn } from '../../../../hooks/useFqn'; -import { getDatabaseSchemas } from '../../../../rest/databaseAPI'; +import { + getDatabaseSchemas, + patchDatabaseSchemaDetails, +} from '../../../../rest/databaseAPI'; import { searchQuery } from '../../../../rest/searchAPI'; -import { schemaTableColumns } from '../../../../utils/Database/Database.util'; +import { getEntityName } from '../../../../utils/EntityUtils'; +import { getUsagePercentile } from '../../../../utils/TableUtils'; import { showErrorToast } from '../../../../utils/ToastUtils'; +import DisplayName from '../../../common/DisplayName/DisplayName'; import ErrorPlaceHolder from '../../../common/ErrorWithPlaceholder/ErrorPlaceHolder'; import NextPrevious from '../../../common/NextPrevious/NextPrevious'; import { PagingHandlerParams } from '../../../common/NextPrevious/NextPrevious.interface'; +import RichTextEditorPreviewer from '../../../common/RichTextEditor/RichTextEditorPreviewer'; import Searchbar from '../../../common/SearchBarComponent/SearchBar.component'; import Table from '../../../common/Table/Table'; +import { EntityName } from '../../../Modals/EntityNameModal/EntityNameModal.interface'; import { DatabaseSchemaTableProps } from './DatabaseSchemaTable.interface'; export const DatabaseSchemaTable = ({ isDatabaseDeleted, + isVersionPage = false, }: Readonly) => { const { fqn: decodedDatabaseFQN } = useFqn(); const history = useHistory(); const location = useCustomLocation(); + const { permissions } = usePermissionProvider(); + const [schemas, setSchemas] = useState([]); const [isLoading, setIsLoading] = useState(true); const [showDeletedSchemas, setShowDeletedSchemas] = useState(false); + const allowEditDisplayNamePermission = useMemo(() => { + return ( + !isVersionPage && + (permissions.databaseSchema.EditAll || + permissions.databaseSchema.EditDisplayName) + ); + }, [permissions, isVersionPage]); + const searchValue = useMemo(() => { const param = location.search; const searchData = QueryString.parse( @@ -160,6 +185,98 @@ export const DatabaseSchemaTable = ({ } }; + const handleDisplayNameUpdate = useCallback( + async (data: EntityName, id?: string) => { + try { + const schemaDetails = schemas.find((schema) => schema.id === id); + if (!schemaDetails) { + return; + } + const updatedData = { + ...schemaDetails, + displayName: data.displayName || undefined, + }; + const jsonPatch = compare(schemaDetails, updatedData); + await patchDatabaseSchemaDetails(schemaDetails.id ?? '', jsonPatch); + setSchemas((prevData) => + prevData.map((schema) => + schema.id === id + ? { ...schema, displayName: data.displayName } + : schema + ) + ); + } catch (error) { + showErrorToast(error as AxiosError); + } + }, + [schemas] + ); + + const schemaTableColumns: ColumnsType = useMemo( + () => [ + { + title: t('label.schema-name'), + dataIndex: 'name', + key: 'name', + width: 250, + render: (_, record: DatabaseSchema) => ( + + ), + }, + { + title: t('label.description'), + dataIndex: 'description', + key: 'description', + render: (text: string) => + text?.trim() ? ( + + ) : ( + + {t('label.no-entity', { entity: t('label.description') })} + + ), + }, + { + title: t('label.owner-plural'), + dataIndex: 'owners', + key: 'owners', + width: 120, + render: (owners: EntityReference[]) => + !isEmpty(owners) && owners.length > 0 ? ( + owners.map((owner: EntityReference) => getEntityName(owner)) + ) : ( + + {NO_DATA_PLACEHOLDER} + + ), + }, + { + title: t('label.usage'), + dataIndex: 'usageSummary', + key: 'usageSummary', + width: 120, + render: (text: UsageDetails) => + getUsagePercentile(text?.weeklyStats?.percentileRank ?? 0), + }, + ], + [handleDisplayNameUpdate, allowEditDisplayNamePermission] + ); + useEffect(() => { fetchDatabaseSchema(); }, [decodedDatabaseFQN, pageSize, showDeletedSchemas, isDatabaseDeleted]); diff --git a/openmetadata-ui/src/main/resources/ui/src/components/Modals/EntityNameModal/EntityNameModal.interface.ts b/openmetadata-ui/src/main/resources/ui/src/components/Modals/EntityNameModal/EntityNameModal.interface.ts index 4f57e3699588..2106081055e3 100644 --- a/openmetadata-ui/src/main/resources/ui/src/components/Modals/EntityNameModal/EntityNameModal.interface.ts +++ b/openmetadata-ui/src/main/resources/ui/src/components/Modals/EntityNameModal/EntityNameModal.interface.ts @@ -13,7 +13,7 @@ import { Rule } from 'antd/lib/form'; import { Constraint } from '../../../generated/entity/data/table'; -export type EntityName = { name: string; displayName?: string }; +export type EntityName = { name: string; displayName?: string; id?: string }; export type EntityNameWithAdditionFields = EntityName & { constraint: Constraint; diff --git a/openmetadata-ui/src/main/resources/ui/src/components/common/DisplayName/DisplayName.interface.ts b/openmetadata-ui/src/main/resources/ui/src/components/common/DisplayName/DisplayName.interface.ts new file mode 100644 index 000000000000..b7a3f0f8218a --- /dev/null +++ b/openmetadata-ui/src/main/resources/ui/src/components/common/DisplayName/DisplayName.interface.ts @@ -0,0 +1,22 @@ +/* + * Copyright 2024 Collate. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { EntityName } from '../../Modals/EntityNameModal/EntityNameModal.interface'; + +export interface DisplayNameProps { + id: string; + name?: string; + displayName?: string; + link: string; + onEditDisplayName?: (data: EntityName, id?: string) => Promise; + allowRename?: boolean; +} diff --git a/openmetadata-ui/src/main/resources/ui/src/components/common/DisplayName/DisplayName.test.tsx b/openmetadata-ui/src/main/resources/ui/src/components/common/DisplayName/DisplayName.test.tsx new file mode 100644 index 000000000000..5f6322bd41b8 --- /dev/null +++ b/openmetadata-ui/src/main/resources/ui/src/components/common/DisplayName/DisplayName.test.tsx @@ -0,0 +1,102 @@ +/* + * Copyright 2024 Collate. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { act, fireEvent, render, screen } from '@testing-library/react'; +import React from 'react'; +import { MemoryRouter } from 'react-router-dom'; +import DisplayName from './DisplayName'; +import { DisplayNameProps } from './DisplayName.interface'; + +jest.mock('react-router-dom', () => ({ + ...jest.requireActual('react-router-dom'), + Link: jest + .fn() + .mockImplementation(({ children, ...props }) => ( + {children} + )), +})); + +jest.mock('../../../constants/constants', () => ({ + DE_ACTIVE_COLOR: '#BFBFBF', + ICON_DIMENSION: { width: 16, height: 16 }, +})); + +jest.mock('../../Modals/EntityNameModal/EntityNameModal.component', () => + jest.fn().mockImplementation(() =>

Mocked Modal

) +); + +const mockOnEditDisplayName = jest.fn(); + +const mockProps: DisplayNameProps = { + id: '1', + name: 'Sample Entity', + displayName: 'Sample Display Name', + link: '/entity/1', + allowRename: true, + onEditDisplayName: mockOnEditDisplayName, +}; + +describe('Test DisplayName Component', () => { + it('Should render the component with the display name', async () => { + await act(async () => { + render( + + + + ); + + const displayNameField = await screen.getByTestId('column-display-name'); + + expect(displayNameField).toBeInTheDocument(); + expect(displayNameField).toHaveTextContent('Sample Display Name'); + + const editButton = screen.queryByTestId('edit-displayName-button'); + + expect(editButton).toBeInTheDocument(); + }); + }); + + it('Should render the component with name when display name is empty', async () => { + await act(async () => { + render( + + + + ); + + const nameField = screen.getByTestId('column-name'); + + expect(nameField).toBeInTheDocument(); + expect(nameField).toHaveTextContent('Sample Entity'); + }); + }); + + it('Should open the edit modal on edit button click', async () => { + await act(async () => { + render( + + + + ); + const editButton = screen.getByTestId('edit-displayName-button'); + fireEvent.click(editButton); + + const nameField = await screen.findByTestId('column-name'); + + expect(nameField).toBeInTheDocument(); + + const displayNameField = await screen.findByTestId('column-display-name'); + + expect(displayNameField).toBeInTheDocument(); + }); + }); +}); diff --git a/openmetadata-ui/src/main/resources/ui/src/components/common/DisplayName/DisplayName.tsx b/openmetadata-ui/src/main/resources/ui/src/components/common/DisplayName/DisplayName.tsx new file mode 100644 index 000000000000..5a8a1f23a2a7 --- /dev/null +++ b/openmetadata-ui/src/main/resources/ui/src/components/common/DisplayName/DisplayName.tsx @@ -0,0 +1,103 @@ +/* + * Copyright 2024 Collate. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { Button, Tooltip, Typography } from 'antd'; +import { AxiosError } from 'axios'; +import { isEmpty } from 'lodash'; +import React, { useState } from 'react'; +import { useTranslation } from 'react-i18next'; +import { Link } from 'react-router-dom'; +import { ReactComponent as IconEdit } from '../../../assets/svg/edit-new.svg'; +import { DE_ACTIVE_COLOR, ICON_DIMENSION } from '../../../constants/constants'; +import { showErrorToast } from '../../../utils/ToastUtils'; +import EntityNameModal from '../../Modals/EntityNameModal/EntityNameModal.component'; +import { EntityName } from '../../Modals/EntityNameModal/EntityNameModal.interface'; +import { DisplayNameProps } from './DisplayName.interface'; + +const DisplayName: React.FC = ({ + id, + name, + displayName, + onEditDisplayName, + link, + allowRename, +}) => { + const { t } = useTranslation(); + + const [isDisplayNameEditing, setIsDisplayNameEditing] = useState(false); + + const handleDisplayNameUpdate = async (data: EntityName) => { + setIsDisplayNameEditing(true); + try { + await onEditDisplayName?.(data, id); + } catch (error) { + showErrorToast(error as AxiosError); + } finally { + setIsDisplayNameEditing(false); + } + }; + + return ( +
+ + {isEmpty(displayName) ? ( + + {name} + + ) : ( + <> + {name} + + + {displayName} + + + + )} + + + {allowRename ? ( + +
+ ); +}; + +export default DisplayName; diff --git a/openmetadata-ui/src/main/resources/ui/src/constants/Services.constant.ts b/openmetadata-ui/src/main/resources/ui/src/constants/Services.constant.ts index 7b415b948dcd..c3ca33b88743 100644 --- a/openmetadata-ui/src/main/resources/ui/src/constants/Services.constant.ts +++ b/openmetadata-ui/src/main/resources/ui/src/constants/Services.constant.ts @@ -52,11 +52,11 @@ import lightDash from '../assets/img/service-icon-lightdash.png'; import looker from '../assets/img/service-icon-looker.png'; import mariadb from '../assets/img/service-icon-mariadb.png'; import metabase from '../assets/img/service-icon-metabase.png'; +import microstrategy from '../assets/img/service-icon-microstrategy.svg'; import mode from '../assets/img/service-icon-mode.png'; import mongodb from '../assets/img/service-icon-mongodb.png'; import msAzure from '../assets/img/service-icon-ms-azure.png'; import mssql from '../assets/img/service-icon-mssql.png'; -import mstr from '../assets/img/service-icon-mstr.png'; import nifi from '../assets/img/service-icon-nifi.png'; import openlineage from '../assets/img/service-icon-openlineage.svg'; import oracle from '../assets/img/service-icon-oracle.png'; @@ -153,7 +153,7 @@ export const REDPANDA = redpanda; export const SUPERSET = superset; export const SYNAPSE = synapse; export const LOOKER = looker; -export const MSTR = mstr; +export const MICROSTRATEGY = microstrategy; export const TABLEAU = tableau; export const REDASH = redash; export const METABASE = metabase; diff --git a/openmetadata-ui/src/main/resources/ui/src/locale/languages/mr-in.json b/openmetadata-ui/src/main/resources/ui/src/locale/languages/mr-in.json index a35924c5c410..5748e124c160 100644 --- a/openmetadata-ui/src/main/resources/ui/src/locale/languages/mr-in.json +++ b/openmetadata-ui/src/main/resources/ui/src/locale/languages/mr-in.json @@ -56,7 +56,6 @@ "aggregate": "एकूण", "airflow-config-plural": "एअरफ्लो संरचना", "alert": "सूचना", - "alert-details": "Alert Details", "alert-lowercase": "सूचना", "alert-lowercase-plural": "सूचना", "alert-plural": "सूचना", diff --git a/openmetadata-ui/src/main/resources/ui/src/pages/DatabaseSchemaPage/SchemaTablesTab.tsx b/openmetadata-ui/src/main/resources/ui/src/pages/DatabaseSchemaPage/SchemaTablesTab.tsx index d9ef74254263..cc397a63a3d9 100644 --- a/openmetadata-ui/src/main/resources/ui/src/pages/DatabaseSchemaPage/SchemaTablesTab.tsx +++ b/openmetadata-ui/src/main/resources/ui/src/pages/DatabaseSchemaPage/SchemaTablesTab.tsx @@ -13,23 +13,29 @@ import { Col, Row, Switch, Typography } from 'antd'; import { ColumnsType } from 'antd/lib/table'; +import { AxiosError } from 'axios'; +import { compare } from 'fast-json-patch'; import { isEmpty, isUndefined } from 'lodash'; -import React, { useMemo } from 'react'; +import React, { useCallback, useEffect, useMemo, useState } from 'react'; import { useTranslation } from 'react-i18next'; -import { Link } from 'react-router-dom'; +import DisplayName from '../../components/common/DisplayName/DisplayName'; import DescriptionV1 from '../../components/common/EntityDescription/DescriptionV1'; import ErrorPlaceHolder from '../../components/common/ErrorWithPlaceholder/ErrorPlaceHolder'; import NextPrevious from '../../components/common/NextPrevious/NextPrevious'; import { NextPreviousProps } from '../../components/common/NextPrevious/NextPrevious.interface'; import RichTextEditorPreviewer from '../../components/common/RichTextEditor/RichTextEditorPreviewer'; import TableAntd from '../../components/common/Table/Table'; +import { EntityName } from '../../components/Modals/EntityNameModal/EntityNameModal.interface'; +import { usePermissionProvider } from '../../context/PermissionProvider/PermissionProvider'; import { ERROR_PLACEHOLDER_TYPE } from '../../enums/common.enum'; import { EntityType } from '../../enums/entity.enum'; import { DatabaseSchema } from '../../generated/entity/data/databaseSchema'; import { Table } from '../../generated/entity/data/table'; import { UsePagingInterface } from '../../hooks/paging/usePaging'; +import { patchTableDetails } from '../../rest/tableAPI'; import entityUtilClassBase from '../../utils/EntityUtilClassBase'; import { getEntityName } from '../../utils/EntityUtils'; +import { showErrorToast } from '../../utils/ToastUtils'; interface SchemaTablesTabProps { databaseSchemaDetails: DatabaseSchema; @@ -69,6 +75,48 @@ function SchemaTablesTab({ pagingInfo, }: Readonly) { const { t } = useTranslation(); + const [localTableData, setLocalTableData] = useState([]); + + const { permissions } = usePermissionProvider(); + + const allowEditDisplayNamePermission = useMemo(() => { + return ( + !isVersionView && + (permissions.table.EditAll || permissions.table.EditDisplayName) + ); + }, [permissions, isVersionView]); + + const handleDisplayNameUpdate = useCallback( + async (data: EntityName, id?: string) => { + try { + const tableDetails = localTableData.find((table) => table.id === id); + if (!tableDetails) { + return; + } + const updatedData = { + ...tableDetails, + displayName: data.displayName || undefined, + }; + const jsonPatch = compare(tableDetails, updatedData); + await patchTableDetails(tableDetails.id, jsonPatch); + + setLocalTableData((prevData) => + prevData.map((table) => + table.id === id + ? { ...table, displayName: data.displayName } + : table + ) + ); + } catch (error) { + showErrorToast(error as AxiosError); + } + }, + [localTableData] + ); + + useEffect(() => { + setLocalTableData(tableData); + }, [tableData]); const tableColumn: ColumnsType = useMemo( () => [ @@ -79,17 +127,18 @@ function SchemaTablesTab({ width: 500, render: (_, record: Table) => { return ( -
- - {getEntityName(record)} - -
+ ); }, }, @@ -105,7 +154,7 @@ function SchemaTablesTab({ ), }, ], - [] + [handleDisplayNameUpdate, allowEditDisplayNamePermission] ); return ( @@ -158,7 +207,7 @@ function SchemaTablesTab({ bordered columns={tableColumn} data-testid="databaseSchema-tables" - dataSource={tableData} + dataSource={localTableData} loading={tableDataLoading} locale={{ emptyText: ( diff --git a/openmetadata-ui/src/main/resources/ui/src/pages/DatabaseVersionPage/DatabaseVersionPage.tsx b/openmetadata-ui/src/main/resources/ui/src/pages/DatabaseVersionPage/DatabaseVersionPage.tsx index dc7eef110c1f..84eeee904ffe 100644 --- a/openmetadata-ui/src/main/resources/ui/src/pages/DatabaseVersionPage/DatabaseVersionPage.tsx +++ b/openmetadata-ui/src/main/resources/ui/src/pages/DatabaseVersionPage/DatabaseVersionPage.tsx @@ -209,7 +209,7 @@ function DatabaseVersionPage() { /> - + diff --git a/openmetadata-ui/src/main/resources/ui/src/pages/ServiceDetailsPage/ServiceMainTabContent.tsx b/openmetadata-ui/src/main/resources/ui/src/pages/ServiceDetailsPage/ServiceMainTabContent.tsx index b24b1f2af7dd..bf9643b01fe1 100644 --- a/openmetadata-ui/src/main/resources/ui/src/pages/ServiceDetailsPage/ServiceMainTabContent.tsx +++ b/openmetadata-ui/src/main/resources/ui/src/pages/ServiceDetailsPage/ServiceMainTabContent.tsx @@ -13,9 +13,11 @@ import { Col, Row, Space, Switch, Table, Typography } from 'antd'; import { ColumnsType } from 'antd/lib/table'; +import { AxiosError } from 'axios'; +import { compare } from 'fast-json-patch'; import { isUndefined } from 'lodash'; import { EntityTags, ServiceTypes } from 'Models'; -import React, { useCallback, useMemo, useState } from 'react'; +import React, { useCallback, useEffect, useMemo, useState } from 'react'; import { useTranslation } from 'react-i18next'; import { useParams } from 'react-router-dom'; import DescriptionV1 from '../../components/common/EntityDescription/DescriptionV1'; @@ -25,7 +27,9 @@ import NextPrevious from '../../components/common/NextPrevious/NextPrevious'; import { NextPreviousProps } from '../../components/common/NextPrevious/NextPrevious.interface'; import ResizablePanels from '../../components/common/ResizablePanels/ResizablePanels'; import EntityRightPanel from '../../components/Entity/EntityRightPanel/EntityRightPanel'; +import { EntityName } from '../../components/Modals/EntityNameModal/EntityNameModal.interface'; import { COMMON_RESIZABLE_PANEL_CONFIG } from '../../constants/ResizablePanel.constants'; +import { usePermissionProvider } from '../../context/PermissionProvider/PermissionProvider'; import { OperationPermission } from '../../context/PermissionProvider/PermissionProvider.interface'; import { EntityType } from '../../enums/entity.enum'; import { DatabaseService } from '../../generated/entity/services/databaseService'; @@ -33,10 +37,14 @@ import { Paging } from '../../generated/type/paging'; import { UsePagingInterface } from '../../hooks/paging/usePaging'; import { useFqn } from '../../hooks/useFqn'; import { ServicesType } from '../../interface/service.interface'; -import { getServiceMainTabColumns } from '../../utils/ServiceMainTabContentUtils'; +import { + callServicePatchAPI, + getServiceMainTabColumns, +} from '../../utils/ServiceMainTabContentUtils'; import { getEntityTypeFromServiceCategory } from '../../utils/ServiceUtils'; import { getTagsWithoutTier, getTierTags } from '../../utils/TableUtils'; import { createTagObject } from '../../utils/TagsUtils'; +import { showErrorToast } from '../../utils/ToastUtils'; import { ServicePageData } from './ServiceDetailsPage'; interface ServiceMainTabContentProps { @@ -53,6 +61,7 @@ interface ServiceMainTabContentProps { pagingHandler: NextPreviousProps['pagingHandler']; saveUpdatedServiceData: (updatedData: ServicesType) => Promise; pagingInfo: UsePagingInterface; + isVersionPage?: boolean; } function ServiceMainTabContent({ @@ -69,6 +78,7 @@ function ServiceMainTabContent({ serviceDetails, saveUpdatedServiceData, pagingInfo, + isVersionPage = false, }: Readonly) { const { t } = useTranslation(); const { serviceCategory } = useParams<{ @@ -76,7 +86,10 @@ function ServiceMainTabContent({ }>(); const { fqn: serviceFQN } = useFqn(); + const { permissions } = usePermissionProvider(); + const [isEdit, setIsEdit] = useState(false); + const [pageData, setPageData] = useState([]); const tier = getTierTags(serviceDetails?.tags ?? []); const tags = getTagsWithoutTier(serviceDetails?.tags ?? []); @@ -131,9 +144,69 @@ function ServiceMainTabContent({ setIsEdit(false); }; + const handleDisplayNameUpdate = useCallback( + async (entityData: EntityName, id?: string) => { + try { + const pageDataDetails = pageData.find((data) => data.id === id); + if (!pageDataDetails) { + return; + } + const updatedData = { + ...pageDataDetails, + displayName: entityData.displayName || undefined, + }; + const jsonPatch = compare(pageDataDetails, updatedData); + await callServicePatchAPI( + serviceCategory, + pageDataDetails.id, + jsonPatch + ); + setPageData((prevData) => + prevData.map((data) => + data.id === id + ? { ...data, displayName: entityData.displayName } + : data + ) + ); + } catch (error) { + showErrorToast(error as AxiosError); + } + }, + [pageData, serviceCategory] + ); + + const editDisplayNamePermission = useMemo(() => { + if (isVersionPage) { + return false; + } + + const servicePermissions = { + databaseServices: permissions.databaseService, + messagingServices: permissions.messagingService, + dashboardServices: permissions.dashboardService, + pipelineServices: permissions.pipelineService, + mlmodelServices: permissions.mlmodelService, + storageServices: permissions.storageService, + searchServices: permissions.searchService, + apiServices: permissions.apiService, + }; + + const currentPermission = + servicePermissions[serviceCategory as keyof typeof servicePermissions]; + + return ( + currentPermission?.EditAll || currentPermission?.EditDisplayName || false + ); + }, [permissions, serviceCategory, isVersionPage]); + const tableColumn: ColumnsType = useMemo( - () => getServiceMainTabColumns(serviceCategory), - [serviceCategory] + () => + getServiceMainTabColumns( + serviceCategory, + editDisplayNamePermission, + handleDisplayNameUpdate + ), + [serviceCategory, handleDisplayNameUpdate, editDisplayNamePermission] ); const entityType = useMemo( @@ -160,6 +233,10 @@ function ServiceMainTabContent({ [servicePermission, serviceDetails] ); + useEffect(() => { + setPageData(data); + }, [data]); + return ( @@ -210,7 +287,7 @@ function ServiceMainTabContent({ bordered columns={tableColumn} data-testid="service-children-table" - dataSource={data} + dataSource={pageData} locale={{ emptyText: , }} diff --git a/openmetadata-ui/src/main/resources/ui/src/utils/DashboardServiceUtils.ts b/openmetadata-ui/src/main/resources/ui/src/utils/DashboardServiceUtils.ts index ab71882577b4..d0ffe2488928 100644 --- a/openmetadata-ui/src/main/resources/ui/src/utils/DashboardServiceUtils.ts +++ b/openmetadata-ui/src/main/resources/ui/src/utils/DashboardServiceUtils.ts @@ -22,8 +22,8 @@ import domoDashboardConnection from '../jsons/connectionSchemas/connections/dash import lightdashConnection from '../jsons/connectionSchemas/connections/dashboard/lightdashConnection.json'; import lookerConnection from '../jsons/connectionSchemas/connections/dashboard/lookerConnection.json'; import metabaseConnection from '../jsons/connectionSchemas/connections/dashboard/metabaseConnection.json'; +import microStrategyConnection from '../jsons/connectionSchemas/connections/dashboard/microStrategyConnection.json'; import modeConnection from '../jsons/connectionSchemas/connections/dashboard/modeConnection.json'; -import mstrConnection from '../jsons/connectionSchemas/connections/dashboard/mstrConnection.json'; import powerBIConnection from '../jsons/connectionSchemas/connections/dashboard/powerBIConnection.json'; import qlikcloudConnection from '../jsons/connectionSchemas/connections/dashboard/qlikCloudConnection.json'; import qliksenseConnection from '../jsons/connectionSchemas/connections/dashboard/qlikSenseConnection.json'; @@ -118,8 +118,8 @@ export const getDashboardConfig = (type: DashboardServiceType) => { break; } - case DashboardServiceType.Mstr: { - schema = mstrConnection; + case DashboardServiceType.MicroStrategy: { + schema = microStrategyConnection; break; } diff --git a/openmetadata-ui/src/main/resources/ui/src/utils/ServiceMainTabContentUtils.tsx b/openmetadata-ui/src/main/resources/ui/src/utils/ServiceMainTabContentUtils.tsx index c629f20711fd..2c58500e1216 100644 --- a/openmetadata-ui/src/main/resources/ui/src/utils/ServiceMainTabContentUtils.tsx +++ b/openmetadata-ui/src/main/resources/ui/src/utils/ServiceMainTabContentUtils.tsx @@ -17,44 +17,51 @@ import { t } from 'i18next'; import { isUndefined } from 'lodash'; import { ServiceTypes } from 'Models'; import React from 'react'; -import { Link } from 'react-router-dom'; +import DisplayName from '../components/common/DisplayName/DisplayName'; import { OwnerLabel } from '../components/common/OwnerLabel/OwnerLabel.component'; import RichTextEditorPreviewer from '../components/common/RichTextEditor/RichTextEditorPreviewer'; +import { EntityName } from '../components/Modals/EntityNameModal/EntityNameModal.interface'; import TagsViewer from '../components/Tag/TagsViewer/TagsViewer'; import { NO_DATA_PLACEHOLDER } from '../constants/constants'; import { ServiceCategory } from '../enums/service.enum'; import { Database } from '../generated/entity/data/database'; import { Pipeline } from '../generated/entity/data/pipeline'; import { ServicePageData } from '../pages/ServiceDetailsPage/ServiceDetailsPage'; -import { getEntityName } from './EntityUtils'; +import { patchApiCollection } from '../rest/apiCollectionsAPI'; +import { patchDashboardDetails } from '../rest/dashboardAPI'; +import { patchDatabaseDetails } from '../rest/databaseAPI'; +import { patchMlModelDetails } from '../rest/mlModelAPI'; +import { patchPipelineDetails } from '../rest/pipelineAPI'; +import { patchSearchIndexDetails } from '../rest/SearchIndexAPI'; +import { patchContainerDetails } from '../rest/storageAPI'; +import { patchTopicDetails } from '../rest/topicsAPI'; import { getLinkForFqn } from './ServiceUtils'; import { getUsagePercentile } from './TableUtils'; export const getServiceMainTabColumns = ( - serviceCategory: ServiceTypes + serviceCategory: ServiceTypes, + editDisplayNamePermission?: boolean, + handleDisplayNameUpdate?: ( + entityData: EntityName, + id?: string + ) => Promise ): ColumnsType => [ { title: t('label.name'), dataIndex: 'name', key: 'name', width: 280, - render: (_, record: ServicePageData) => { - return ( - - - {getEntityName(record)} - - - ); - }, + render: (_, record: ServicePageData) => ( + + ), }, { title: t('label.description'), @@ -123,3 +130,30 @@ export const getServiceMainTabColumns = ( ] : []), ]; + +export const callServicePatchAPI = async ( + serviceCategory: ServiceTypes, + id: string, + jsonPatch: any +) => { + switch (serviceCategory) { + case ServiceCategory.DATABASE_SERVICES: + return await patchDatabaseDetails(id, jsonPatch); + case ServiceCategory.MESSAGING_SERVICES: + return await patchTopicDetails(id, jsonPatch); + case ServiceCategory.DASHBOARD_SERVICES: + return await patchDashboardDetails(id, jsonPatch); + case ServiceCategory.PIPELINE_SERVICES: + return await patchPipelineDetails(id, jsonPatch); + case ServiceCategory.ML_MODEL_SERVICES: + return await patchMlModelDetails(id, jsonPatch); + case ServiceCategory.STORAGE_SERVICES: + return await patchContainerDetails(id, jsonPatch); + case ServiceCategory.SEARCH_SERVICES: + return await patchSearchIndexDetails(id, jsonPatch); + case ServiceCategory.API_SERVICES: + return await patchApiCollection(id, jsonPatch); + default: + return; + } +}; diff --git a/openmetadata-ui/src/main/resources/ui/src/utils/ServiceUtilClassBase.ts b/openmetadata-ui/src/main/resources/ui/src/utils/ServiceUtilClassBase.ts index 6bc96a9852a8..e8e8317af669 100644 --- a/openmetadata-ui/src/main/resources/ui/src/utils/ServiceUtilClassBase.ts +++ b/openmetadata-ui/src/main/resources/ui/src/utils/ServiceUtilClassBase.ts @@ -58,6 +58,7 @@ import { LOOKER, MARIADB, METABASE, + MICROSTRATEGY, MLFLOW, ML_MODEL_DEFAULT, MODE, @@ -376,6 +377,7 @@ class ServiceUtilClassBase { case this.DashboardServiceTypeSmallCase.CustomDashboard: return DASHBOARD_DEFAULT; + case this.DashboardServiceTypeSmallCase.Superset: return SUPERSET; @@ -468,6 +470,7 @@ class ServiceUtilClassBase { case this.MlModelServiceTypeSmallCase.Sklearn: return SCIKIT; + case this.MlModelServiceTypeSmallCase.SageMaker: return SAGEMAKER; @@ -504,6 +507,9 @@ class ServiceUtilClassBase { case this.ApiServiceTypeSmallCase.REST: return REST_SERVICE; + case this.DashboardServiceTypeSmallCase.MicroStrategy: + return MICROSTRATEGY; + default: { let logo; if (serviceTypes.messagingServices.includes(type)) {