diff --git a/.github/workflows/cypress-integration-tests-mysql.yml b/.github/workflows/cypress-integration-tests-mysql.yml index 53628e155634..4932f5a2a8a7 100644 --- a/.github/workflows/cypress-integration-tests-mysql.yml +++ b/.github/workflows/cypress-integration-tests-mysql.yml @@ -15,6 +15,7 @@ name: MySQL Cypress Integration Tests on: + workflow_dispatch: push: branches: - main diff --git a/.github/workflows/cypress-integration-tests-postgresql.yml b/.github/workflows/cypress-integration-tests-postgresql.yml index 47cd833276bb..56ecedafa43c 100644 --- a/.github/workflows/cypress-integration-tests-postgresql.yml +++ b/.github/workflows/cypress-integration-tests-postgresql.yml @@ -15,6 +15,7 @@ name: PostgreSQL Cypress Integration Tests on: + workflow_dispatch: push: branches: - main diff --git a/.github/workflows/docker-openmetadata-db.yml b/.github/workflows/docker-openmetadata-db.yml index f89df6ede88d..2abfedde3967 100644 --- a/.github/workflows/docker-openmetadata-db.yml +++ b/.github/workflows/docker-openmetadata-db.yml @@ -31,7 +31,7 @@ jobs: steps: - name: Check trigger type if: ${{ env.input == '' }} - run: echo "input=1.2.0" >> $GITHUB_ENV + run: echo "input=1.1.5" >> $GITHUB_ENV - name: Check out the Repo uses: actions/checkout@v3 diff --git a/.github/workflows/docker-openmetadata-ingestion-base.yml b/.github/workflows/docker-openmetadata-ingestion-base.yml index 840e3ecdb571..eea6906ee09b 100644 --- a/.github/workflows/docker-openmetadata-ingestion-base.yml +++ b/.github/workflows/docker-openmetadata-ingestion-base.yml @@ -31,7 +31,7 @@ jobs: steps: - name: Check trigger type if: ${{ env.input == '' }} - run: echo "input=1.2.0" >> $GITHUB_ENV + run: echo "input=1.1.5" >> $GITHUB_ENV - name: Check out the Repo uses: actions/checkout@v3 diff --git a/.github/workflows/docker-openmetadata-ingestion.yml b/.github/workflows/docker-openmetadata-ingestion.yml index 9278ab0728fb..a9abe5883ec4 100644 --- a/.github/workflows/docker-openmetadata-ingestion.yml +++ b/.github/workflows/docker-openmetadata-ingestion.yml @@ -31,7 +31,7 @@ jobs: steps: - name: Check trigger type if: ${{ env.input == '' }} - run: echo "input=1.2.0" >> $GITHUB_ENV + run: echo "input=1.1.5" >> $GITHUB_ENV - name: Check out the Repo uses: actions/checkout@v3 diff --git a/.github/workflows/docker-openmetadata-postgres.yml b/.github/workflows/docker-openmetadata-postgres.yml index 6505f27c7ff9..32ec33645f41 100644 --- a/.github/workflows/docker-openmetadata-postgres.yml +++ b/.github/workflows/docker-openmetadata-postgres.yml @@ -31,7 +31,7 @@ jobs: steps: - name: Check trigger type if: ${{ env.input == '' }} - run: echo "input=1.2.0" >> $GITHUB_ENV + run: echo "input=1.1.5" >> $GITHUB_ENV - name: Check out the Repo uses: actions/checkout@v3 diff --git a/.github/workflows/docker-openmetadata-server.yml b/.github/workflows/docker-openmetadata-server.yml index 7c89d253176c..34c2e9efa3f3 100644 --- a/.github/workflows/docker-openmetadata-server.yml +++ b/.github/workflows/docker-openmetadata-server.yml @@ -63,7 +63,7 @@ jobs: steps: - name: Check trigger type id: check_trigger - run: echo "DOCKER_RELEASE_TAG=1.2.0" >> $GITHUB_OUTPUT + run: echo "DOCKER_RELEASE_TAG=1.1.5" >> $GITHUB_OUTPUT - name: Download application from Artifiact uses: actions/download-artifact@v2 @@ -128,7 +128,7 @@ jobs: - name: Check trigger type id: check_trigger if: ${{ env.DOCKER_RELEASE_TAG == '' }} - run: echo "DOCKER_RELEASE_TAG=1.2.0" >> $GITHUB_ENV + run: echo "DOCKER_RELEASE_TAG=1.1.5" >> $GITHUB_ENV - name: Check out the Repo uses: actions/checkout@v3 diff --git a/.github/workflows/maven-build.yml b/.github/workflows/maven-build.yml index ba65c1b4abf5..ca5a65651de8 100644 --- a/.github/workflows/maven-build.yml +++ b/.github/workflows/maven-build.yml @@ -12,6 +12,7 @@ name: Maven MySQL Tests CI on: + workflow_dispatch: push: branches: - main @@ -116,7 +117,7 @@ jobs: - name: Build with Maven env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - if: ${{ github.event_name == 'push' }} + if: ${{ github.event_name == 'push' || github.event_name == 'workflow_dispatch' }} run: mvn -Dsonar.login=${{ secrets.SONAR_TOKEN }} clean test - name: Clean Up diff --git a/.github/workflows/openmetadata-airflow-apis.yml b/.github/workflows/openmetadata-airflow-apis.yml index d24101197e0a..13f887d984fc 100644 --- a/.github/workflows/openmetadata-airflow-apis.yml +++ b/.github/workflows/openmetadata-airflow-apis.yml @@ -34,6 +34,6 @@ jobs: run: | make install_dev install_apis cd openmetadata-airflow-apis; \ - python setup.py install sdist bdist_wheel; \ + python setup.py build sdist bdist_wheel; \ twine check dist/*; \ twine upload dist/* --verbose diff --git a/Makefile b/Makefile index 0112b042e713..02f87a85835c 100644 --- a/Makefile +++ b/Makefile @@ -246,4 +246,83 @@ generate-schema-docs: ## Generates markdown files for documenting the JSON Sche @echo "Generating Schema docs" python -m pip install "jsonschema2md" python scripts/generate_docs_schemas.py - \ No newline at end of file + +#Upgrade release automation scripts below +.PHONY: update_all +update_all: ## To update all the release related files run make update_all RELEASE_VERSION=2.2.2 PY_RELEASE_VERSION=2.2.2.2 + @echo "The release version is: $(RELEASE_VERSION)" ; \ + echo "The python metadata release version: $(PY_RELEASE_VERSION)" ; \ + $(MAKE) update_maven ; \ + $(MAKE) update_github_action_paths ; \ + $(MAKE) update_python_release_paths ; \ + $(MAKE) update_dockerfile_version ; \ + $(MAKE) update_ingestion_dockerfile_version ; \ + +#remove comment and use the below section when want to use this sub module "update_all" independently to update github actions +#make update_all RELEASE_VERSION=2.2.2 PY_RELEASE_VERSION=2.2.2.2 + +.PHONY: update_maven +update_maven: ## To update the common and pom.xml maven version + @echo "Updating Maven projects to version $(RELEASE_VERSION)..."; \ + mvn versions:set -DnewVersion=$(RELEASE_VERSION) +#remove comment and use the below section when want to use this sub module "update_maven" independently to update github actions +#make update_maven RELEASE_VERSION=2.2.2 + + +.PHONY: update_github_action_paths +update_github_action_paths: ## To update the github action ci docker files + @echo "Updating docker github action release version to $(RELEASE_VERSION)... "; \ + file_paths="docker/docker-compose-quickstart/Dockerfile \ + .github/workflows/docker-openmetadata-db.yml \ + .github/workflows/docker-openmetadata-ingestion-base.yml \ + .github/workflows/docker-openmetadata-ingestion.yml \ + .github/workflows/docker-openmetadata-postgres.yml \ + .github/workflows/docker-openmetadata-server.yml"; \ + for file_path in $$file_paths; do \ + python3 scripts/update_version.py 1 $$file_path -s $(RELEASE_VERSION) ; \ + done; \ + file_paths1="docker/docker-compose-quickstart/Dockerfile"; \ + for file_path in $$file_paths1; do \ + python3 scripts/update_version.py 4 $$file_path -s $(RELEASE_VERSION) ; \ + done + +#remove comment and use the below section when want to use this sub module "update_github_action_paths" independently to update github actions +#make update_github_action_paths RELEASE_VERSION=2.2.2 + +.PHONY: update_python_release_paths +update_python_release_paths: ## To update the setup.py files + file_paths="ingestion/setup.py \ + openmetadata-airflow-apis/setup.py"; \ + echo "Updating Python setup file versions to $(PY_RELEASE_VERSION)... "; \ + for file_path in $$file_paths; do \ + python3 scripts/update_version.py 2 $$file_path -s $(PY_RELEASE_VERSION) ; \ + done +# Commented section for independent usage of the module update_python_release_paths independently to update github actions +#make update_python_release_paths PY_RELEASE_VERSION=2.2.2.2 + +.PHONY: update_dockerfile_version +update_dockerfile_version: ## To update the dockerfiles version + @file_paths="docker/docker-compose-ingestion/docker-compose-ingestion-postgres.yml \ + docker/docker-compose-ingestion/docker-compose-ingestion.yml \ + docker/docker-compose-openmetadata/docker-compose-openmetadata.yml \ + docker/docker-compose-quickstart/docker-compose-postgres.yml \ + docker/docker-compose-quickstart/docker-compose.yml"; \ + echo "Updating docker github action release version to $(RELEASE_VERSION)... "; \ + for file_path in $$file_paths; do \ + python3 scripts/update_version.py 3 $$file_path -s $(RELEASE_VERSION) ; \ + done +#remove comment and use the below section when want to use this sub module "update_dockerfile_version" independently to update github actions +#make update_dockerfile_version RELEASE_VERSION=2.2.2 + +.PHONY: update_ingestion_dockerfile_version +update_ingestion_dockerfile_version: ## To update the ingestion dockerfiles version + @file_paths="ingestion/Dockerfile \ + ingestion/operators/docker/Dockerfile"; \ + echo "Updating ingestion dockerfile release version to $(PY_RELEASE_VERSION)... "; \ + for file_path in $$file_paths; do \ + python3 scripts/update_version.py 4 $$file_path -s $(PY_RELEASE_VERSION) ; \ + done +#remove comment and use the below section when want to use this sub module "update_ingestion_dockerfile_version" independently to update github actions +#make update_ingestion_dockerfile_version PY_RELEASE_VERSION=2.2.2.2 + +#Upgrade release automation scripts above diff --git a/bootstrap/sql/migrations/native/1.1.2/mysql/schemaChanges.sql b/bootstrap/sql/migrations/native/1.1.2/mysql/schemaChanges.sql index 6b09a3a84afb..3c69c9fd152c 100644 --- a/bootstrap/sql/migrations/native/1.1.2/mysql/schemaChanges.sql +++ b/bootstrap/sql/migrations/native/1.1.2/mysql/schemaChanges.sql @@ -20,4 +20,4 @@ SET json = JSON_INSERT( '$.connection.config.authType.password', JSON_EXTRACT(json, '$.connection.config.password')) where serviceType = 'Trino' - AND JSON_EXTRACT(json, '$.connection.config.password') IS NOT NULL; \ No newline at end of file + AND JSON_EXTRACT(json, '$.connection.config.password') IS NOT NULL; diff --git a/bootstrap/sql/migrations/native/1.1.3/postgres/postDataMigrationSQLScript.sql b/bootstrap/sql/migrations/native/1.1.3/postgres/postDataMigrationSQLScript.sql deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/bootstrap/sql/migrations/native/1.1.3/postgres/schemaChanges.sql b/bootstrap/sql/migrations/native/1.1.3/postgres/schemaChanges.sql deleted file mode 100644 index e6de4bf12ec9..000000000000 --- a/bootstrap/sql/migrations/native/1.1.3/postgres/schemaChanges.sql +++ /dev/null @@ -1,6 +0,0 @@ -ALTER TABLE entity_extension_time_series ALTER COLUMN entityFQNHash TYPE VARCHAR(768), ALTER COLUMN jsonSchema TYPE VARCHAR(50) , ALTER COLUMN extension TYPE VARCHAR(100) , - ADD CONSTRAINT entity_extension_time_series_constraint UNIQUE (entityFQNHash, extension, timestamp); -ALTER TABLE field_relationship ALTER COLUMN fromFQNHash TYPE VARCHAR(768), ALTER COLUMN toFQNHash TYPE VARCHAR(768); -ALTER TABLE thread_entity ALTER COLUMN entityLink TYPE VARCHAR(3072); -ALTER TABLE tag_usage ALTER COLUMN tagFQNHash TYPE VARCHAR(768), ALTER COLUMN targetFQNHash TYPE VARCHAR(768); -ALTER TABLE test_suite ALTER COLUMN fqnHash TYPE VARCHAR(768); diff --git a/bootstrap/sql/migrations/native/1.1.5/mysql/postDataMigrationSQLScript.sql b/bootstrap/sql/migrations/native/1.1.5/mysql/postDataMigrationSQLScript.sql new file mode 100644 index 000000000000..2ce1654285b5 --- /dev/null +++ b/bootstrap/sql/migrations/native/1.1.5/mysql/postDataMigrationSQLScript.sql @@ -0,0 +1,50 @@ +START TRANSACTION; +-- We'll rank all the runs (timestamps) for every day, and delete all the data but the most recent one. +DELETE FROM report_data_time_series WHERE JSON_EXTRACT(json, '$.id') IN ( + select ids FROM ( + SELECT + (json ->> '$.id') AS ids, + DENSE_RANK() OVER(PARTITION BY `date` ORDER BY `timestamp` DESC) as denseRank + FROM ( + SELECT + * + FROM report_data_time_series rdts + WHERE json ->> '$.reportDataType' = 'WebAnalyticEntityViewReportData' + ) duplicates + ORDER BY `date` DESC, `timestamp` DESC + ) as dense_ranked + WHERE denseRank != 1 +); + +DELETE FROM report_data_time_series WHERE JSON_EXTRACT(json, '$.id') IN ( + select ids FROM ( + SELECT + (json ->> '$.id') AS ids, + DENSE_RANK() OVER(PARTITION BY `date` ORDER BY `timestamp` DESC) as denseRank + FROM ( + SELECT + * + FROM report_data_time_series rdts + WHERE json ->> '$.reportDataType' = 'EntityReportData' + ) duplicates + ORDER BY `date` DESC, `timestamp` DESC + ) as dense_ranked + WHERE denseRank != 1 +); + +DELETE FROM report_data_time_series WHERE JSON_EXTRACT(json, '$.id') IN ( + select ids FROM ( + SELECT + (json ->> '$.id') AS ids, + DENSE_RANK() OVER(PARTITION BY `date` ORDER BY `timestamp` DESC) as denseRank + FROM ( + SELECT + * + FROM report_data_time_series rdts + WHERE json ->> '$.reportDataType' = 'WebAnalyticUserActivityReportData' + ) duplicates + ORDER BY `date` DESC, `timestamp` DESC + ) as dense_ranked + WHERE denseRank != 1 +); +COMMIT; \ No newline at end of file diff --git a/bootstrap/sql/migrations/native/1.1.3/mysql/schemaChanges.sql b/bootstrap/sql/migrations/native/1.1.5/mysql/schemaChanges.sql similarity index 55% rename from bootstrap/sql/migrations/native/1.1.3/mysql/schemaChanges.sql rename to bootstrap/sql/migrations/native/1.1.5/mysql/schemaChanges.sql index d52b8f8dd0e1..837bedf99cb6 100644 --- a/bootstrap/sql/migrations/native/1.1.3/mysql/schemaChanges.sql +++ b/bootstrap/sql/migrations/native/1.1.5/mysql/schemaChanges.sql @@ -1,9 +1,83 @@ +-- Update table and column profile timestamps to be in milliseconds +UPDATE entity_extension_time_series + SET json = JSON_INSERT( + JSON_REMOVE(json, '$.timestamp'), + '$.timestamp', + JSON_EXTRACT(json, '$.timestamp') * 1000 + ) +WHERE + extension in ('table.tableProfile', 'table.columnProfile', 'testCase.testCaseResult'); +; + +START TRANSACTION; +-- Create report data time series table and move data from entity_extension_time_series +CREATE TABLE IF NOT EXISTS report_data_time_series ( + entityFQNHash VARCHAR(768) CHARACTER SET ascii COLLATE ascii_bin NOT NULL, + extension VARCHAR(256) NOT NULL, + jsonSchema VARCHAR(256) NOT NULL, + json JSON NOT NULL, + timestamp BIGINT UNSIGNED GENERATED ALWAYS AS (json ->> '$.timestamp') NOT NULL, + date DATE GENERATED ALWAYS AS (FROM_UNIXTIME((json ->> '$.timestamp') DIV 1000)) NOT NULL, + INDEX report_data_time_series_point_ts (timestamp), + INDEX report_data_time_series_date (date) +); + +INSERT INTO report_data_time_series (entityFQNHash,extension,jsonSchema,json) +SELECT entityFQNHash, extension, jsonSchema, json +FROM entity_extension_time_series WHERE extension = 'reportData.reportDataResult'; + +DELETE FROM entity_extension_time_series +WHERE extension = 'reportData.reportDataResult'; +COMMIT; + +START TRANSACTION; +-- Create profiler data time series table and move data from entity_extension_time_series +CREATE TABLE IF NOT EXISTS profiler_data_time_series ( + entityFQNHash VARCHAR(768) CHARACTER SET ascii COLLATE ascii_bin NOT NULL, + extension VARCHAR(256) NOT NULL, + jsonSchema VARCHAR(256) NOT NULL, + json JSON NOT NULL, + operation VARCHAR(256) GENERATED ALWAYS AS (json ->> '$.operation') NULL, + timestamp BIGINT UNSIGNED GENERATED ALWAYS AS (json ->> '$.timestamp') NOT NULL, + UNIQUE profiler_data_time_series_unique_hash_extension_ts (entityFQNHash, extension, operation, timestamp), + INDEX profiler_data_time_series_combined_id_ts (extension, timestamp) +); + +INSERT INTO profiler_data_time_series (entityFQNHash,extension,jsonSchema,json) +SELECT entityFQNHash, extension, jsonSchema, json +FROM entity_extension_time_series +WHERE extension IN ('table.columnProfile', 'table.tableProfile', 'table.systemProfile'); + +DELETE FROM entity_extension_time_series +WHERE extension IN ('table.columnProfile', 'table.tableProfile', 'table.systemProfile'); +COMMIT; + +START TRANSACTION; +-- Create data quality data time series table and move data from entity_extension_time_series +CREATE TABLE IF NOT EXISTS data_quality_data_time_series ( + entityFQNHash VARCHAR(768) CHARACTER SET ascii COLLATE ascii_bin NOT NULL, + extension VARCHAR(256) NOT NULL, + jsonSchema VARCHAR(256) NOT NULL, + json JSON NOT NULL, + timestamp BIGINT UNSIGNED GENERATED ALWAYS AS (json ->> '$.timestamp') NOT NULL, + UNIQUE data_quality_data_time_series_unique_hash_extension_ts (entityFQNHash, extension, timestamp), + INDEX data_quality_data_time_series_combined_id_ts (extension, timestamp) +); + +INSERT INTO data_quality_data_time_series (entityFQNHash,extension,jsonSchema,json) +SELECT entityFQNHash, extension, jsonSchema, json +FROM entity_extension_time_series +WHERE extension = 'testCase.testCaseResult'; + +DELETE FROM entity_extension_time_series +WHERE extension = 'testCase.testCaseResult'; +COMMIT; + ALTER TABLE automations_workflow MODIFY COLUMN nameHash VARCHAR(256) COLLATE ascii_bin,MODIFY COLUMN workflowType VARCHAR(256) COLLATE ascii_bin, MODIFY COLUMN status VARCHAR(256) COLLATE ascii_bin; -ALTER TABLE entity_extension MODIFY COLUMN extension VARCHAR(256) COLLATE ascii_bin; ALTER TABLE entity_extension_time_series MODIFY COLUMN entityFQNHash VARCHAR(768) COLLATE ascii_bin, MODIFY COLUMN jsonSchema VARCHAR(50) COLLATE ascii_bin, MODIFY COLUMN extension VARCHAR(100) COLLATE ascii_bin, ADD CONSTRAINT entity_extension_time_series_constraint UNIQUE (entityFQNHash, extension, timestamp); ALTER TABLE field_relationship MODIFY COLUMN fromFQNHash VARCHAR(768) COLLATE ascii_bin, MODIFY COLUMN toFQNHash VARCHAR(768) COLLATE ascii_bin; -ALTER TABLE thread_entity MODIFY COLUMN entityLink VARCHAR(3072) GENERATED ALWAYS AS (json ->> '$.about') NOT NULL, MODIFY COLUMN createdBy VARCHAR(256) GENERATED ALWAYS AS (json ->> '$.createdBy') STORED NOT NULL COLLATE ascii_bin; +ALTER TABLE thread_entity MODIFY COLUMN entityLink VARCHAR(3072) GENERATED ALWAYS AS (json ->> '$.about') NOT NULL; ALTER TABLE event_subscription_entity MODIFY COLUMN nameHash VARCHAR(256) COLLATE ascii_bin; ALTER TABLE ingestion_pipeline_entity MODIFY COLUMN fqnHash VARCHAR(768) COLLATE ascii_bin; ALTER TABLE bot_entity MODIFY COLUMN nameHash VARCHAR(256) COLLATE ascii_bin; diff --git a/bootstrap/sql/migrations/native/1.1.5/postgres/postDataMigrationSQLScript.sql b/bootstrap/sql/migrations/native/1.1.5/postgres/postDataMigrationSQLScript.sql new file mode 100644 index 000000000000..948f4fb754c2 --- /dev/null +++ b/bootstrap/sql/migrations/native/1.1.5/postgres/postDataMigrationSQLScript.sql @@ -0,0 +1,53 @@ +BEGIN; +-- We'll rank all the runs (timestamps) for every day, and delete all the data but the most recent one. +DELETE FROM report_data_time_series WHERE (json ->> 'id') IN ( + select ids FROM ( + SELECT + (json ->> 'id') AS ids, + DENSE_RANK() OVER(PARTITION BY date ORDER BY timestamp DESC) as denseRank + FROM ( + SELECT + *, + DATE(TO_TIMESTAMP((json ->> 'timestamp')::bigint/1000)) as date + FROM report_data_time_series rdts + WHERE json ->> 'reportDataType' = 'WebAnalyticEntityViewReportData' + ) duplicates + ORDER BY date DESC, timestamp DESC + ) as dense_ranked + WHERE denseRank != 1 +); + +DELETE FROM report_data_time_series WHERE (json ->> 'id') IN ( + select ids FROM ( + SELECT + (json ->> 'id') AS ids, + DENSE_RANK() OVER(PARTITION BY date ORDER BY timestamp DESC) as denseRank + FROM ( + SELECT + *, + DATE(TO_TIMESTAMP((json ->> 'timestamp')::bigint/1000)) as date + FROM report_data_time_series rdts + WHERE json ->> 'reportDataType' = 'EntityReportData' + ) duplicates + ORDER BY date DESC, timestamp DESC + ) as dense_ranked + WHERE denseRank != 1 +); + +DELETE FROM report_data_time_series WHERE (json ->> 'id') IN ( + select ids FROM ( + SELECT + (json ->> 'id') AS ids, + DENSE_RANK() OVER(PARTITION BY date ORDER BY timestamp DESC) as denseRank + FROM ( + SELECT + *, + DATE(TO_TIMESTAMP((json ->> 'timestamp')::bigint/1000)) as date + FROM report_data_time_series rdts + WHERE json ->> 'reportDataType' = 'WebAnalyticUserActivityReportData' + ) duplicates + ORDER BY date DESC, timestamp DESC + ) as dense_ranked + WHERE denseRank != 1 +); +COMMIT; \ No newline at end of file diff --git a/bootstrap/sql/migrations/native/1.1.5/postgres/schemaChanges.sql b/bootstrap/sql/migrations/native/1.1.5/postgres/schemaChanges.sql new file mode 100644 index 000000000000..553447353a97 --- /dev/null +++ b/bootstrap/sql/migrations/native/1.1.5/postgres/schemaChanges.sql @@ -0,0 +1,83 @@ +-- Update table and column profile timestamps to be in milliseconds +UPDATE entity_extension_time_series +SET json = jsonb_set( + json, + '{timestamp}', + to_jsonb(cast(json#>'{timestamp}' as int8) *1000) +) +WHERE + extension in ('table.tableProfile', 'table.columnProfile', 'testCase.testCaseResult'); +; + +BEGIN; +-- Run the following SQL to update the schema in a transaction +-- Create report data time series table and move data from entity_extension_time_series +CREATE TABLE IF NOT EXISTS report_data_time_series ( + entityFQNHash VARCHAR(768), + extension VARCHAR(256) NOT NULL, + jsonSchema VARCHAR(256) NOT NULL, + json JSONB NOT NULL, + timestamp BIGINT CHECK (timestamp > 0) GENERATED ALWAYS AS ((json ->> 'timestamp')::bigint) STORED NOT NULL +); +CREATE INDEX IF NOT EXISTS report_data_time_series_point_ts ON report_data_time_series (timestamp); + +INSERT INTO report_data_time_series (entityFQNHash,extension,jsonSchema,json) + +SELECT entityFQNHash, extension, jsonSchema, json +FROM entity_extension_time_series WHERE extension = 'reportData.reportDataResult'; + +DELETE FROM entity_extension_time_series +WHERE extension = 'reportData.reportDataResult'; +COMMIT; + +BEGIN; +-- Create profiler data time series table and move data from entity_extension_time_series +CREATE TABLE IF NOT EXISTS profiler_data_time_series ( + entityFQNHash VARCHAR(768), + extension VARCHAR(256) NOT NULL, + jsonSchema VARCHAR(256) NOT NULL, + json JSON NOT NULL, + operation VARCHAR(256) GENERATED ALWAYS AS ((json ->> 'operation')::text) STORED NULL, + timestamp BIGINT CHECK (timestamp > 0) GENERATED ALWAYS AS ((json ->> 'timestamp')::bigint) STORED NOT NULL, + CONSTRAINT profiler_data_time_series_unique_hash_extension_ts UNIQUE(entityFQNHash, extension, operation, timestamp) +); + +CREATE INDEX IF NOT EXISTS profiler_data_time_series_combined_id_ts ON profiler_data_time_series (extension, timestamp); + +INSERT INTO profiler_data_time_series (entityFQNHash,extension,jsonSchema,json) +SELECT entityFQNHash, extension, jsonSchema, json +FROM entity_extension_time_series +WHERE extension IN ('table.columnProfile', 'table.tableProfile', 'table.systemProfile'); + +DELETE FROM entity_extension_time_series +WHERE extension IN ('table.columnProfile', 'table.tableProfile', 'table.systemProfile'); +COMMIT; + +BEGIN; +-- Create profiler data time series table and move data from entity_extension_time_series +CREATE TABLE IF NOT EXISTS data_quality_data_time_series ( + entityFQNHash VARCHAR(768), + extension VARCHAR(256) NOT NULL, + jsonSchema VARCHAR(256) NOT NULL, + json JSON NOT NULL, + timestamp BIGINT CHECK (timestamp > 0) GENERATED ALWAYS AS ((json ->> 'timestamp')::bigint) STORED NOT NULL, + CONSTRAINT data_quality_data_time_series_unique_hash_extension_ts UNIQUE(entityFQNHash, extension, timestamp) +); + +CREATE INDEX IF NOT EXISTS data_quality_data_time_series_combined_id_ts ON data_quality_data_time_series (extension, timestamp); + +INSERT INTO data_quality_data_time_series (entityFQNHash,extension,jsonSchema,json) +SELECT entityFQNHash, extension, jsonSchema, json +FROM entity_extension_time_series +WHERE extension = 'testCase.testCaseResult'; + +DELETE FROM entity_extension_time_series +WHERE extension = 'testCase.testCaseResult'; +COMMIT; + +ALTER TABLE entity_extension_time_series ALTER COLUMN entityFQNHash TYPE VARCHAR(768), ALTER COLUMN jsonSchema TYPE VARCHAR(50) , ALTER COLUMN extension TYPE VARCHAR(100) , + ADD CONSTRAINT entity_extension_time_series_constraint UNIQUE (entityFQNHash, extension, timestamp); +ALTER TABLE field_relationship ALTER COLUMN fromFQNHash TYPE VARCHAR(768), ALTER COLUMN toFQNHash TYPE VARCHAR(768); +ALTER TABLE thread_entity ALTER COLUMN entityLink TYPE VARCHAR(3072); +ALTER TABLE tag_usage ALTER COLUMN tagFQNHash TYPE VARCHAR(768), ALTER COLUMN targetFQNHash TYPE VARCHAR(768); +ALTER TABLE test_suite ALTER COLUMN fqnHash TYPE VARCHAR(768); diff --git a/bootstrap/sql/migrations/native/1.2.0/mysql/postDataMigrationSQLScript.sql b/bootstrap/sql/migrations/native/1.2.0/mysql/postDataMigrationSQLScript.sql deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/bootstrap/sql/migrations/native/1.2.0/mysql/schemaChanges.sql b/bootstrap/sql/migrations/native/1.2.0/mysql/schemaChanges.sql deleted file mode 100644 index 05f678e0a590..000000000000 --- a/bootstrap/sql/migrations/native/1.2.0/mysql/schemaChanges.sql +++ /dev/null @@ -1,54 +0,0 @@ --- column deleted not needed for entities that don't support soft delete -ALTER TABLE query_entity DROP COLUMN deleted; -ALTER TABLE event_subscription_entity DROP COLUMN deleted; - --- create domain entity table -CREATE TABLE IF NOT EXISTS domain_entity ( - id VARCHAR(36) GENERATED ALWAYS AS (json ->> '$.id') STORED NOT NULL, - name VARCHAR(256) GENERATED ALWAYS AS (json ->> '$.name') NOT NULL, - fqnHash VARCHAR(256) NOT NULL COLLATE ascii_bin, - json JSON NOT NULL, - updatedAt BIGINT UNSIGNED GENERATED ALWAYS AS (json ->> '$.updatedAt') NOT NULL, - updatedBy VARCHAR(256) GENERATED ALWAYS AS (json ->> '$.updatedBy') NOT NULL, - PRIMARY KEY (id), - UNIQUE (fqnHash) - ); - --- create data product entity table -CREATE TABLE IF NOT EXISTS data_product_entity ( - id VARCHAR(36) GENERATED ALWAYS AS (json ->> '$.id') STORED NOT NULL, - name VARCHAR(256) GENERATED ALWAYS AS (json ->> '$.name') NOT NULL, - fqnHash VARCHAR(256) NOT NULL COLLATE ascii_bin, - json JSON NOT NULL, - updatedAt BIGINT UNSIGNED GENERATED ALWAYS AS (json ->> '$.updatedAt') NOT NULL, - updatedBy VARCHAR(256) GENERATED ALWAYS AS (json ->> '$.updatedBy') NOT NULL, - PRIMARY KEY (id), - UNIQUE (fqnHash) - ); - --- create search service entity -CREATE TABLE IF NOT EXISTS search_service_entity ( - id VARCHAR(36) GENERATED ALWAYS AS (json ->> '$.id') STORED NOT NULL, - nameHash VARCHAR(256) NOT NULL COLLATE ascii_bin, - name VARCHAR(256) GENERATED ALWAYS AS (json ->> '$.name') NOT NULL, - serviceType VARCHAR(256) GENERATED ALWAYS AS (json ->> '$.serviceType') NOT NULL, - json JSON NOT NULL, - updatedAt BIGINT UNSIGNED GENERATED ALWAYS AS (json ->> '$.updatedAt') NOT NULL, - updatedBy VARCHAR(256) GENERATED ALWAYS AS (json ->> '$.updatedBy') NOT NULL, - deleted BOOLEAN GENERATED ALWAYS AS (json -> '$.deleted'), - PRIMARY KEY (id), - UNIQUE (nameHash) - ); - --- create search index entity -CREATE TABLE IF NOT EXISTS search_index_entity ( - id VARCHAR(36) GENERATED ALWAYS AS (json ->> '$.id') STORED NOT NULL, - name VARCHAR(256) GENERATED ALWAYS AS (json ->> '$.name') NOT NULL, - fqnHash VARCHAR(256) NOT NULL COLLATE ascii_bin, - json JSON NOT NULL, - updatedAt BIGINT UNSIGNED GENERATED ALWAYS AS (json ->> '$.updatedAt') NOT NULL, - updatedBy VARCHAR(256) GENERATED ALWAYS AS (json ->> '$.updatedBy') NOT NULL, - deleted BOOLEAN GENERATED ALWAYS AS (json -> '$.deleted'), - PRIMARY KEY (id), - UNIQUE (fqnHash) - ); diff --git a/bootstrap/sql/migrations/native/1.2.0/postgres/postDataMigrationSQLScript.sql b/bootstrap/sql/migrations/native/1.2.0/postgres/postDataMigrationSQLScript.sql deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/bootstrap/sql/migrations/native/1.2.0/postgres/schemaChanges.sql b/bootstrap/sql/migrations/native/1.2.0/postgres/schemaChanges.sql deleted file mode 100644 index 127237d506e6..000000000000 --- a/bootstrap/sql/migrations/native/1.2.0/postgres/schemaChanges.sql +++ /dev/null @@ -1,54 +0,0 @@ --- column deleted not needed for entities that don't support soft delete -ALTER TABLE query_entity DROP COLUMN deleted; -ALTER TABLE event_subscription_entity DROP COLUMN deleted; - --- create domain entity table -CREATE TABLE IF NOT EXISTS domain_entity ( - id VARCHAR(36) GENERATED ALWAYS AS (json ->> 'id') STORED NOT NULL, - name VARCHAR(256) GENERATED ALWAYS AS (json ->> 'name') STORED NOT NULL, - fqnHash VARCHAR(256) NOT NULL, - json JSONB NOT NULL, - updatedAt BIGINT GENERATED ALWAYS AS ((json ->> 'updatedAt')::bigint) STORED NOT NULL, - updatedBy VARCHAR(256) GENERATED ALWAYS AS (json ->> 'updatedBy') STORED NOT NULL, - PRIMARY KEY (id), - UNIQUE (fqnHash) - ); - --- create data product entity table -CREATE TABLE IF NOT EXISTS data_product_entity ( - id VARCHAR(36) GENERATED ALWAYS AS (json ->> 'id') STORED NOT NULL, - name VARCHAR(256) GENERATED ALWAYS AS (json ->> 'name') STORED NOT NULL, - fqnHash VARCHAR(256) NOT NULL, - json JSONB NOT NULL, - updatedAt BIGINT GENERATED ALWAYS AS ((json ->> 'updatedAt')::bigint) STORED NOT NULL, - updatedBy VARCHAR(256) GENERATED ALWAYS AS (json ->> 'updatedBy') STORED NOT NULL, - PRIMARY KEY (id), - UNIQUE (fqnHash) - ); - --- create search service entity -CREATE TABLE IF NOT EXISTS search_service_entity ( - id VARCHAR(36) GENERATED ALWAYS AS (json ->> 'id') STORED NOT NULL, - nameHash VARCHAR(256) NOT NULL, - name VARCHAR(256) GENERATED ALWAYS AS (json ->> 'name') STORED NOT NULL, - serviceType VARCHAR(256) GENERATED ALWAYS AS (json ->> 'serviceType') STORED NOT NULL, - json JSONB NOT NULL, - updatedAt BIGINT GENERATED ALWAYS AS ((json ->> 'updatedAt')::bigint) STORED NOT NULL, - updatedBy VARCHAR(256) GENERATED ALWAYS AS (json ->> 'updatedBy') STORED NOT NULL, - deleted BOOLEAN GENERATED ALWAYS AS ((json ->> 'deleted')::boolean) STORED, - PRIMARY KEY (id), - UNIQUE (nameHash) - ); - --- create search index entity -CREATE TABLE IF NOT EXISTS search_index_entity ( - id VARCHAR(36) GENERATED ALWAYS AS (json ->> 'id') STORED NOT NULL, - name VARCHAR(256) GENERATED ALWAYS AS (json ->> 'name') STORED NOT NULL, - fqnHash VARCHAR(256) NOT NULL, - json JSONB NOT NULL, - updatedAt BIGINT GENERATED ALWAYS AS ((json ->> 'updatedAt')::bigint) STORED NOT NULL, - updatedBy VARCHAR(256) GENERATED ALWAYS AS (json ->> 'updatedBy') STORED NOT NULL, - deleted BOOLEAN GENERATED ALWAYS AS ((json ->> 'deleted')::boolean) STORED, - PRIMARY KEY (id), - UNIQUE (fqnHash) - ); diff --git a/common/pom.xml b/common/pom.xml index 7531a6c26042..2fb2c2fe3114 100644 --- a/common/pom.xml +++ b/common/pom.xml @@ -18,7 +18,7 @@ platform org.open-metadata - 1.2.0-SNAPSHOT + 1.1.5 4.0.0 diff --git a/docker/development/docker-compose-postgres.yml b/docker/development/docker-compose-postgres.yml index b22e42fc41e2..78716934cea9 100644 --- a/docker/development/docker-compose-postgres.yml +++ b/docker/development/docker-compose-postgres.yml @@ -70,7 +70,6 @@ services: SERVER_PORT: ${SERVER_PORT:-8585} SERVER_ADMIN_PORT: ${SERVER_ADMIN_PORT:-8586} LOG_LEVEL: ${LOG_LEVEL:-INFO} - OPENMETADATA_DEBUG: ${OPENMETADATA_DEBUG:-false} # Migration MIGRATION_LIMIT_PARAM: ${MIGRATION_LIMIT_PARAM:-1200} @@ -229,7 +228,6 @@ services: SERVER_PORT: ${SERVER_PORT:-8585} SERVER_ADMIN_PORT: ${SERVER_ADMIN_PORT:-8586} LOG_LEVEL: ${LOG_LEVEL:-INFO} - OPENMETADATA_DEBUG: ${OPENMETADATA_DEBUG:-false} # OpenMetadata Server Authentication Configuration AUTHORIZER_CLASS_NAME: ${AUTHORIZER_CLASS_NAME:-org.openmetadata.service.security.DefaultAuthorizer} AUTHORIZER_REQUEST_FILTER: ${AUTHORIZER_REQUEST_FILTER:-org.openmetadata.service.security.JwtFilter} diff --git a/docker/development/docker-compose.yml b/docker/development/docker-compose.yml index 77b8ba7b75f7..34c87bd66e45 100644 --- a/docker/development/docker-compose.yml +++ b/docker/development/docker-compose.yml @@ -69,7 +69,6 @@ services: SERVER_PORT: ${SERVER_PORT:-8585} SERVER_ADMIN_PORT: ${SERVER_ADMIN_PORT:-8586} LOG_LEVEL: ${LOG_LEVEL:-INFO} - OPENMETADATA_DEBUG: ${OPENMETADATA_DEBUG:-false} # Migration MIGRATION_LIMIT_PARAM: ${MIGRATION_LIMIT_PARAM:-1200} @@ -225,7 +224,6 @@ services: dockerfile: docker/development/Dockerfile container_name: openmetadata_server environment: - OPENMETADATA_DEBUG: ${OPENMETADATA_DEBUG:-false} OPENMETADATA_CLUSTER_NAME: ${OPENMETADATA_CLUSTER_NAME:-openmetadata} SERVER_PORT: ${SERVER_PORT:-8585} SERVER_ADMIN_PORT: ${SERVER_ADMIN_PORT:-8586} diff --git a/docker/docker-compose-ingestion/docker-compose-ingestion.yml b/docker/docker-compose-ingestion/docker-compose-ingestion.yml index dc5d7cf3f123..769c2f21af4f 100644 --- a/docker/docker-compose-ingestion/docker-compose-ingestion.yml +++ b/docker/docker-compose-ingestion/docker-compose-ingestion.yml @@ -18,7 +18,7 @@ volumes: services: ingestion: container_name: openmetadata_ingestion - image: docker.getcollate.io/openmetadata/ingestion:1.2.0 + image: docker.getcollate.io/openmetadata/ingestion:1.1.5 environment: AIRFLOW__API__AUTH_BACKENDS: "airflow.api.auth.backend.basic_auth,airflow.api.auth.backend.session" AIRFLOW__CORE__EXECUTOR: LocalExecutor diff --git a/docker/docker-compose-openmetadata/docker-compose-openmetadata.yml b/docker/docker-compose-openmetadata/docker-compose-openmetadata.yml index a830c73cb994..25ace6583898 100644 --- a/docker/docker-compose-openmetadata/docker-compose-openmetadata.yml +++ b/docker/docker-compose-openmetadata/docker-compose-openmetadata.yml @@ -14,13 +14,12 @@ services: execute-migrate-all: container_name: execute_migrate_all command: "./bootstrap/bootstrap_storage.sh migrate-all" - image: docker.getcollate.io/openmetadata/server:1.2.0 + image: docker.getcollate.io/openmetadata/server:1.1.5 environment: OPENMETADATA_CLUSTER_NAME: ${OPENMETADATA_CLUSTER_NAME:-openmetadata} SERVER_PORT: ${SERVER_PORT:-8585} SERVER_ADMIN_PORT: ${SERVER_ADMIN_PORT:-8586} LOG_LEVEL: ${LOG_LEVEL:-INFO} - OPENMETADATA_DEBUG: ${OPENMETADATA_DEBUG:-false} # Migration MIGRATION_LIMIT_PARAM: ${MIGRATION_LIMIT_PARAM:-1200} @@ -168,13 +167,12 @@ services: openmetadata-server: container_name: openmetadata_server restart: always - image: docker.getcollate.io/openmetadata/server:1.2.0 + image: docker.getcollate.io/openmetadata/server:1.1.5 environment: OPENMETADATA_CLUSTER_NAME: ${OPENMETADATA_CLUSTER_NAME:-openmetadata} SERVER_PORT: ${SERVER_PORT:-8585} SERVER_ADMIN_PORT: ${SERVER_ADMIN_PORT:-8586} LOG_LEVEL: ${LOG_LEVEL:-INFO} - OPENMETADATA_DEBUG: ${OPENMETADATA_DEBUG:-false} # OpenMetadata Server Authentication Configuration AUTHORIZER_CLASS_NAME: ${AUTHORIZER_CLASS_NAME:-org.openmetadata.service.security.DefaultAuthorizer} diff --git a/docker/docker-compose-openmetadata/env-mysql b/docker/docker-compose-openmetadata/env-mysql index 3cb8aa814bf4..fbd5ff88d80b 100644 --- a/docker/docker-compose-openmetadata/env-mysql +++ b/docker/docker-compose-openmetadata/env-mysql @@ -2,7 +2,6 @@ OPENMETADATA_CLUSTER_NAME="openmetadata" SERVER_PORT="8585" SERVER_ADMIN_PORT="8586" LOG_LEVEL="INFO" -OPENMETADATA_DEBUG="false" # Migration MIGRATION_LIMIT_PARAM = 1200 diff --git a/docker/docker-compose-openmetadata/env-postgres b/docker/docker-compose-openmetadata/env-postgres index d9004ac4bbc5..a450f775dd59 100644 --- a/docker/docker-compose-openmetadata/env-postgres +++ b/docker/docker-compose-openmetadata/env-postgres @@ -2,7 +2,6 @@ OPENMETADATA_CLUSTER_NAME="openmetadata" SERVER_PORT="8585" SERVER_ADMIN_PORT="8586" LOG_LEVEL="INFO" -OPENMETADATA_DEBUG="false" # Migration MIGRATION_LIMIT_PARAM = 1200 diff --git a/docker/docker-compose-quickstart/Dockerfile b/docker/docker-compose-quickstart/Dockerfile index e35fb3932fb3..b0c6b5eb845b 100644 --- a/docker/docker-compose-quickstart/Dockerfile +++ b/docker/docker-compose-quickstart/Dockerfile @@ -11,7 +11,9 @@ # Build stage FROM alpine:3.15 AS build -ENV RELEASE_URL="https://github.com/open-metadata/OpenMetadata/releases/download/1.2.0-release/openmetadata-1.2.0.tar.gz" +ARG RI_VERSION="1.1.5" +ENV RELEASE_URL="https://github.com/open-metadata/OpenMetadata/releases/download/${RI_VERSION}-release/openmetadata-${RI_VERSION}.tar.gz" + RUN mkdir -p /opt/openmetadata && \ wget ${RELEASE_URL} && \ tar zxvf openmetadata-*.tar.gz -C /opt/openmetadata --strip-components 1 && \ @@ -24,7 +26,7 @@ ARG COMMIT_ID LABEL maintainer="OpenMetadata" LABEL org.open-metadata.image.authors="support@openmetadata.org" LABEL org.open-metadata.vendor="OpenMetadata" -LABEL org.open-metadata.release-version="1.2.0" +LABEL org.open-metadata.release-version="1.1.5" LABEL org.open-metadata.description="OpenMetadata is an open source platform for metadata management and discovery." LABEL org.open-metadata.url="https://open-metadata.org/" LABEL org.open-metadata.vcs-url="https://github.com/open-metadata/OpenMetadata" diff --git a/docker/docker-compose-quickstart/docker-compose-postgres.yml b/docker/docker-compose-quickstart/docker-compose-postgres.yml index 2d5cf718d1ab..e6518bb14431 100644 --- a/docker/docker-compose-quickstart/docker-compose-postgres.yml +++ b/docker/docker-compose-quickstart/docker-compose-postgres.yml @@ -18,7 +18,7 @@ volumes: services: postgresql: container_name: openmetadata_postgresql - image: docker.getcollate.io/openmetadata/postgresql:1.2.0 + image: docker.getcollate.io/openmetadata/postgresql:1.1.5 restart: always command: "--work_mem=10MB" environment: @@ -55,14 +55,13 @@ services: execute-migrate-all: container_name: execute_migrate_all - image: docker.getcollate.io/openmetadata/server:1.1.0 + image: docker.getcollate.io/openmetadata/server:1.1.5 command: "./bootstrap/bootstrap_storage.sh migrate-all" environment: OPENMETADATA_CLUSTER_NAME: ${OPENMETADATA_CLUSTER_NAME:-openmetadata} SERVER_PORT: ${SERVER_PORT:-8585} SERVER_ADMIN_PORT: ${SERVER_ADMIN_PORT:-8586} LOG_LEVEL: ${LOG_LEVEL:-INFO} - OPENMETADATA_DEBUG: ${OPENMETADATA_DEBUG:-false} # Migration MIGRATION_LIMIT_PARAM: ${MIGRATION_LIMIT_PARAM:-1200} @@ -214,13 +213,12 @@ services: openmetadata-server: container_name: openmetadata_server restart: always - image: docker.getcollate.io/openmetadata/server:1.1.0 + image: docker.getcollate.io/openmetadata/server:1.1.5 environment: OPENMETADATA_CLUSTER_NAME: ${OPENMETADATA_CLUSTER_NAME:-openmetadata} SERVER_PORT: ${SERVER_PORT:-8585} SERVER_ADMIN_PORT: ${SERVER_ADMIN_PORT:-8586} LOG_LEVEL: ${LOG_LEVEL:-INFO} - OPENMETADATA_DEBUG: ${OPENMETADATA_DEBUG:-false} # OpenMetadata Server Authentication Configuration AUTHORIZER_CLASS_NAME: ${AUTHORIZER_CLASS_NAME:-org.openmetadata.service.security.DefaultAuthorizer} @@ -373,7 +371,7 @@ services: ingestion: container_name: openmetadata_ingestion - image: docker.getcollate.io/openmetadata/ingestion:1.1.0 + image: docker.getcollate.io/openmetadata/ingestion:1.1.5 depends_on: elasticsearch: condition: service_started diff --git a/docker/docker-compose-quickstart/docker-compose.yml b/docker/docker-compose-quickstart/docker-compose.yml index 2a8167645722..a0c8a733aa83 100644 --- a/docker/docker-compose-quickstart/docker-compose.yml +++ b/docker/docker-compose-quickstart/docker-compose.yml @@ -18,7 +18,7 @@ volumes: services: mysql: container_name: openmetadata_mysql - image: docker.getcollate.io/openmetadata/db:1.2.0 + image: docker.getcollate.io/openmetadata/db:1.1.5 command: "--sort_buffer_size=10M" restart: always environment: @@ -53,14 +53,13 @@ services: execute-migrate-all: container_name: execute_migrate_all - image: docker.getcollate.io/openmetadata/server:1.1.0 + image: docker.getcollate.io/openmetadata/server:1.1.5 command: "./bootstrap/bootstrap_storage.sh migrate-all" environment: OPENMETADATA_CLUSTER_NAME: ${OPENMETADATA_CLUSTER_NAME:-openmetadata} SERVER_PORT: ${SERVER_PORT:-8585} SERVER_ADMIN_PORT: ${SERVER_ADMIN_PORT:-8586} LOG_LEVEL: ${LOG_LEVEL:-INFO} - OPENMETADATA_DEBUG: ${OPENMETADATA_DEBUG:-false} # Migration MIGRATION_LIMIT_PARAM: ${MIGRATION_LIMIT_PARAM:-1200} @@ -213,13 +212,12 @@ services: openmetadata-server: container_name: openmetadata_server restart: always - image: docker.getcollate.io/openmetadata/server:1.1.0 + image: docker.getcollate.io/openmetadata/server:1.1.5 environment: OPENMETADATA_CLUSTER_NAME: ${OPENMETADATA_CLUSTER_NAME:-openmetadata} SERVER_PORT: ${SERVER_PORT:-8585} SERVER_ADMIN_PORT: ${SERVER_ADMIN_PORT:-8586} LOG_LEVEL: ${LOG_LEVEL:-INFO} - OPENMETADATA_DEBUG: ${OPENMETADATA_DEBUG:-false} # OpenMetadata Server Authentication Configuration AUTHORIZER_CLASS_NAME: ${AUTHORIZER_CLASS_NAME:-org.openmetadata.service.security.DefaultAuthorizer} @@ -373,7 +371,7 @@ services: ingestion: container_name: openmetadata_ingestion - image: docker.getcollate.io/openmetadata/ingestion:1.1.0 + image: docker.getcollate.io/openmetadata/ingestion:1.1.5 depends_on: elasticsearch: condition: service_started diff --git a/docker/postgresql/Dockerfile_postgres b/docker/postgresql/Dockerfile_postgres index e264e58b19bc..ddd375fcd81e 100644 --- a/docker/postgresql/Dockerfile_postgres +++ b/docker/postgresql/Dockerfile_postgres @@ -1,4 +1,4 @@ -FROM postgres:15 +FROM postgres:14 WORKDIR /docker-entrypoint-initdb.d COPY docker/postgresql/postgres-script.sql . RUN chmod -R 775 /docker-entrypoint-initdb.d \ No newline at end of file diff --git a/docker/run_local_docker.sh b/docker/run_local_docker.sh index d6299d41dfbb..b0236001eeb2 100755 --- a/docker/run_local_docker.sh +++ b/docker/run_local_docker.sh @@ -114,6 +114,7 @@ done until curl -s -f --header 'Authorization: Basic YWRtaW46YWRtaW4=' "http://localhost:8080/api/v1/dags/sample_data"; do printf 'Checking if Sample Data DAG is reachable...\n' + curl --header 'Authorization: Basic YWRtaW46YWRtaW4=' "http://localhost:8080/api/v1/dags/sample_data" sleep 5 done diff --git a/ingestion/Dockerfile b/ingestion/Dockerfile index bb0fad549b14..0fe31ae4dddc 100644 --- a/ingestion/Dockerfile +++ b/ingestion/Dockerfile @@ -1,4 +1,4 @@ -FROM apache/airflow:2.5.3-python3.9 +FROM apache/airflow:2.6.3-python3.9 USER root RUN curl -sS https://packages.microsoft.com/keys/microsoft.asc | apt-key add - RUN curl -sS https://packages.microsoft.com/config/debian/11/prod.list > /etc/apt/sources.list.d/mssql-release.list @@ -80,10 +80,10 @@ ARG INGESTION_DEPENDENCY="all" ENV PIP_NO_CACHE_DIR=1 # Make pip silent ENV PIP_QUIET=1 - +ARG RI_VERSION="1.1.5.0" RUN pip install --upgrade pip -RUN pip install "openmetadata-managed-apis~=1.1.0.4" --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.6.3/constraints-3.9.txt" -RUN pip install "openmetadata-ingestion[${INGESTION_DEPENDENCY}]~=1.1.0.4" +RUN pip install "openmetadata-managed-apis~=${RI_VERSION}" --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.6.3/constraints-3.9.txt" +RUN pip install "openmetadata-ingestion[${INGESTION_DEPENDENCY}]~=${RI_VERSION}" # Temporary workaround for https://github.com/open-metadata/OpenMetadata/issues/9593 RUN echo "Image built for $(uname -m)" diff --git a/ingestion/Dockerfile.ci b/ingestion/Dockerfile.ci index df3244588257..6931b899f210 100644 --- a/ingestion/Dockerfile.ci +++ b/ingestion/Dockerfile.ci @@ -1,4 +1,4 @@ -FROM apache/airflow:2.5.3-python3.9 +FROM apache/airflow:2.6.3-python3.9 USER root RUN curl -sS https://packages.microsoft.com/keys/microsoft.asc | apt-key add - RUN curl -sS https://packages.microsoft.com/config/debian/11/prod.list > /etc/apt/sources.list.d/mssql-release.list diff --git a/ingestion/examples/sample_data/searchIndexes/searchIndexes.json b/ingestion/examples/sample_data/searchIndexes/searchIndexes.json deleted file mode 100644 index 9c496d38ed6f..000000000000 --- a/ingestion/examples/sample_data/searchIndexes/searchIndexes.json +++ /dev/null @@ -1,76 +0,0 @@ -{ - "searchIndexes": [ - { - "id": "e093dd27-390e-4360-8efd-e4d63ec167a9", - "name": "table_search_index", - "displayName": "TableSearchIndex", - "fullyQualifiedName": "elasticsearch_sample.table_search_index", - "description": "Table Search Index", - "version": 0.1, - "updatedAt": 1638354087591, - "serviceType": "ElasticSearch", - "fields": [ - { - "name": "name", - "dataType": "TEXT", - "dataTypeDisplay": "text", - "description": "Table Entity Name.", - "tags": [] - }, - { - "name": "displayName", - "dataType": "TEXT", - "dataTypeDisplay": "text", - "description": "Table Entity DisplayName.", - "tags": [] - }, - { - "name": "description", - "dataType": "TEXT", - "dataTypeDisplay": "text", - "description": "Table Entity Description.", - "tags": [] - }, - { - "name": "columns", - "dataType": "NESTED", - "dataTypeDisplay": "nested", - "description": "Table Columns.", - "tags": [], - "children": [ - { - "name": "name", - "dataType": "TEXT", - "dataTypeDisplay": "text", - "description": "Column Name.", - "tags": [] - }, - { - "name": "displayName", - "dataType": "TEXT", - "dataTypeDisplay": "text", - "description": "Column DisplayName.", - "tags": [] - }, - { - "name": "description", - "dataType": "TEXT", - "dataTypeDisplay": "text", - "description": "Column Description.", - "tags": [] - } - ] - }, - { - "name": "databaseSchema", - "dataType": "TEXT", - "dataTypeDisplay": "text", - "description": "Database Schema that this table belongs to.", - "tags": [] - } - ], - "tags": [], - "followers": [] - } - ] -} \ No newline at end of file diff --git a/ingestion/examples/sample_data/searchIndexes/service.json b/ingestion/examples/sample_data/searchIndexes/service.json deleted file mode 100644 index 82e9b21cc80b..000000000000 --- a/ingestion/examples/sample_data/searchIndexes/service.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "type": "elasticsearch", - "serviceName": "elasticsearch_sample", - "serviceConnection": { - "config": { - "type": "ElasticSearch", - "hostPort": "localhost:9200" - } - }, - "sourceConfig": { - } -} \ No newline at end of file diff --git a/ingestion/operators/docker/Dockerfile b/ingestion/operators/docker/Dockerfile index 1bdc65807024..d99268712e65 100644 --- a/ingestion/operators/docker/Dockerfile +++ b/ingestion/operators/docker/Dockerfile @@ -86,9 +86,10 @@ ENV PIP_QUIET=1 RUN pip install --upgrade pip ARG INGESTION_DEPENDENCY="all" +ARG RI_VERSION="1.1.5.0" RUN pip install --upgrade pip -RUN pip install "openmetadata-ingestion[airflow]~=1.1.0.4" -RUN pip install "openmetadata-ingestion[${INGESTION_DEPENDENCY}]~=1.1.0.4" +RUN pip install "openmetadata-ingestion[airflow]~=${RI_VERSION}" +RUN pip install "openmetadata-ingestion[${INGESTION_DEPENDENCY}]~=${RI_VERSION}" # Temporary workaround for https://github.com/open-metadata/OpenMetadata/issues/9593 RUN echo "Image built for $(uname -m)" diff --git a/ingestion/setup.py b/ingestion/setup.py index c3bd2947e714..0352dc16197a 100644 --- a/ingestion/setup.py +++ b/ingestion/setup.py @@ -93,7 +93,7 @@ def get_long_description(): "google-auth>=1.33.0", VERSIONS["grpc-tools"], # Used in sample data "idna<3,>=2.5", - "importlib-metadata~=4.13.0", # From airflow constraints + "importlib-metadata>=4.13.0", # From airflow constraints "Jinja2>=2.11.3", "jsonpatch==1.32", "jsonschema", @@ -108,7 +108,7 @@ def get_long_description(): "requests-aws4auth~=1.1", # Only depends on requests as external package. Leaving as base. "setuptools~=66.0.0", "sqlalchemy>=1.4.0,<2", - "openmetadata-sqllineage>=1.0.4", + "collate-sqllineage>=1.0.4", "tabulate==0.9.0", "typing-compat~=0.1.0", # compatibility requirements for 3.7 "typing_extensions<=4.5.0", # We need to have this fixed due to a yanked release 4.6.0 @@ -266,7 +266,7 @@ def get_long_description(): build_options = {"includes": ["_cffi_backend"]} setup( name="openmetadata-ingestion", - version="1.2.0.0.dev0", + version="1.1.5.0", url="https://open-metadata.org/", author="OpenMetadata Committers", license="Apache License 2.0", diff --git a/ingestion/src/metadata/cli/db_dump.py b/ingestion/src/metadata/cli/db_dump.py index ec39207cee61..674ff93a0e0f 100644 --- a/ingestion/src/metadata/cli/db_dump.py +++ b/ingestion/src/metadata/cli/db_dump.py @@ -31,6 +31,7 @@ "field_relationship", "tag_usage", "openmetadata_settings", + "profiler_data_time_series" } CUSTOM_TABLES = {"entity_extension_time_series": {"exclude_columns": ["timestamp"]}} diff --git a/ingestion/src/metadata/data_insight/api/workflow.py b/ingestion/src/metadata/data_insight/api/workflow.py index 6e5a3db33b97..0c4e66160d38 100644 --- a/ingestion/src/metadata/data_insight/api/workflow.py +++ b/ingestion/src/metadata/data_insight/api/workflow.py @@ -27,6 +27,7 @@ from pydantic import ValidationError from metadata.config.common import WorkflowExecutionError +from metadata.data_insight.helper.data_insight_es_index import DataInsightEsIndex from metadata.data_insight.processor.data_processor import DataProcessor from metadata.data_insight.processor.entity_report_data_processor import ( EntityReportDataProcessor, @@ -58,7 +59,10 @@ from metadata.timer.workflow_reporter import get_ingestion_status_timer from metadata.utils.importer import get_sink from metadata.utils.logger import data_insight_logger, set_loggers_level -from metadata.utils.time_utils import get_beginning_of_day_timestamp_mill +from metadata.utils.time_utils import ( + get_beginning_of_day_timestamp_mill, + get_end_of_day_timestamp_mill, +) from metadata.utils.workflow_output_handler import print_data_insight_status from metadata.workflow.workflow_status_mixin import WorkflowStatusMixin @@ -78,6 +82,7 @@ class DataInsightWorkflow(WorkflowStatusMixin): def __init__(self, config: OpenMetadataWorkflowConfig) -> None: self.config = config self._timer: Optional[RepeatedTimer] = None + self.date = datetime.utcnow().strftime("%Y-%m-%d") set_loggers_level(config.workflowConfig.loggerLevel.value) @@ -165,10 +170,56 @@ def _get_kpis(self) -> list[Kpi]: return [kpi for kpi in kpis.entities if self._is_kpi_active(kpi)] + def _check_and_handle_existing_es_data(self, index: str) -> None: + """Handles scenarios where data has already been ingested for the execution data. + If we find some data for the execution date we should deleted those documents before + re indexing new documents. + + !IMPORTANT! This should be deprecared and the logic should be handle in the event + publisher side once we have the event publisher handling DI indexing. + """ + gte = get_beginning_of_day_timestamp_mill() + lte = get_end_of_day_timestamp_mill() + query = { + "size": 1000, + "query": { + "range": { + "timestamp": { + "gte": gte, + "lte": lte, + } + } + }, + } + data = self.es_sink.read_records(index, query) + try: + hit_total = data["hits"]["total"]["value"] + documents = data["hits"]["hits"] + except KeyError as exc: + logger.error(exc) + else: + if hit_total > 0: + body = [ + {"delete": {"_index": document["_index"], "_id": document["_id"]}} + for document in documents + ] + try: + self.es_sink.bulk_operation(body) + except Exception as exc: + logger.debug(traceback.format_exc()) + logger.error(f"Could not delete existing data - {exc}") + raise RuntimeError + return None + return None + def _execute_data_processor(self): """Data processor method to refine raw data into report data and ingest it in ES""" for report_data_type in ReportDataType: logger.info(f"Processing data for report type {report_data_type}") + # we delete the report data for the current date to avoid duplicates + # entries in the database. + self.metadata.delete_report_data(report_data_type, self.date) + has_checked_and_handled_existing_es_data = False try: self.source = DataProcessor.create( _data_processor_type=report_data_type.value, metadata=self.metadata @@ -177,6 +228,11 @@ def _execute_data_processor(self): if hasattr(self, "sink"): self.sink.write_record(record) if hasattr(self, "es_sink"): + if not has_checked_and_handled_existing_es_data: + self._check_and_handle_existing_es_data( + DataInsightEsIndex[record.data.__class__.__name__].value + ) + has_checked_and_handled_existing_es_data = True self.es_sink.write_record(record) else: logger.warning( diff --git a/ingestion/src/metadata/data_quality/interface/pandas/pandas_test_suite_interface.py b/ingestion/src/metadata/data_quality/interface/pandas/pandas_test_suite_interface.py index 10d6c7d12c89..e6d1a199c361 100644 --- a/ingestion/src/metadata/data_quality/interface/pandas/pandas_test_suite_interface.py +++ b/ingestion/src/metadata/data_quality/interface/pandas/pandas_test_suite_interface.py @@ -93,7 +93,7 @@ def run_test_case( test_handler = TestHandler( self.dfs, test_case=test_case, - execution_date=datetime.now(tz=timezone.utc).timestamp(), + execution_date=int(datetime.now(tz=timezone.utc).timestamp() * 1000), ) return Validator(validator_obj=test_handler).validate() diff --git a/ingestion/src/metadata/data_quality/interface/sqlalchemy/sqa_test_suite_interface.py b/ingestion/src/metadata/data_quality/interface/sqlalchemy/sqa_test_suite_interface.py index 8da479a2dab8..9413625cfaa0 100644 --- a/ingestion/src/metadata/data_quality/interface/sqlalchemy/sqa_test_suite_interface.py +++ b/ingestion/src/metadata/data_quality/interface/sqlalchemy/sqa_test_suite_interface.py @@ -165,7 +165,7 @@ def run_test_case( test_handler = TestHandler( self.runner, test_case=test_case, - execution_date=datetime.now(tz=timezone.utc).timestamp(), + execution_date=int(datetime.now(tz=timezone.utc).timestamp() * 1000), ) return Validator(validator_obj=test_handler).validate() diff --git a/ingestion/src/metadata/ingestion/api/parser.py b/ingestion/src/metadata/ingestion/api/parser.py index dfbc302c7f80..3743a331257c 100644 --- a/ingestion/src/metadata/ingestion/api/parser.py +++ b/ingestion/src/metadata/ingestion/api/parser.py @@ -42,10 +42,6 @@ PipelineConnection, PipelineServiceType, ) -from metadata.generated.schema.entity.services.searchService import ( - SearchConnection, - SearchServiceType, -) from metadata.generated.schema.entity.services.storageService import ( StorageConnection, StorageServiceType, @@ -78,10 +74,6 @@ PipelineMetadataConfigType, PipelineServiceMetadataPipeline, ) -from metadata.generated.schema.metadataIngestion.searchServiceMetadataPipeline import ( - SearchMetadataConfigType, - SearchServiceMetadataPipeline, -) from metadata.generated.schema.metadataIngestion.storageServiceMetadataPipeline import ( StorageMetadataConfigType, StorageServiceMetadataPipeline, @@ -110,7 +102,6 @@ **{service: PipelineConnection for service in PipelineServiceType.__members__}, **{service: MlModelConnection for service in MlModelServiceType.__members__}, **{service: StorageConnection for service in StorageServiceType.__members__}, - **{service: SearchConnection for service in SearchServiceType.__members__}, } SOURCE_CONFIG_CLASS_MAP = { @@ -122,7 +113,6 @@ MlModelMetadataConfigType.MlModelMetadata.value: MlModelServiceMetadataPipeline, DatabaseMetadataConfigType.DatabaseMetadata.value: DatabaseServiceMetadataPipeline, StorageMetadataConfigType.StorageMetadata.value: StorageServiceMetadataPipeline, - SearchMetadataConfigType.SearchMetadata.value: SearchServiceMetadataPipeline, } diff --git a/ingestion/src/metadata/ingestion/models/search_index_data.py b/ingestion/src/metadata/ingestion/models/search_index_data.py deleted file mode 100644 index 8d8ae80a3a4c..000000000000 --- a/ingestion/src/metadata/ingestion/models/search_index_data.py +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright 2021 Collate -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -""" -Model required to ingest search index sample data -""" - -from pydantic import BaseModel - -from metadata.generated.schema.entity.data.searchIndex import ( - SearchIndex, - SearchIndexSampleData, -) - - -class OMetaIndexSampleData(BaseModel): - entity: SearchIndex - data: SearchIndexSampleData diff --git a/ingestion/src/metadata/ingestion/ometa/mixins/data_insight_mixin.py b/ingestion/src/metadata/ingestion/ometa/mixins/data_insight_mixin.py index 9046ec580cda..09a52f46a913 100644 --- a/ingestion/src/metadata/ingestion/ometa/mixins/data_insight_mixin.py +++ b/ingestion/src/metadata/ingestion/ometa/mixins/data_insight_mixin.py @@ -19,7 +19,7 @@ from typing import List, Optional from metadata.generated.schema.analytics.basic import WebAnalyticEventType -from metadata.generated.schema.analytics.reportData import ReportData +from metadata.generated.schema.analytics.reportData import ReportData, ReportDataType from metadata.generated.schema.analytics.webAnalyticEventData import ( WebAnalyticEventData, ) @@ -174,3 +174,14 @@ def delete_web_analytic_event_before_ts_exclusive( """ event_type_value = event_type.value self.client.delete(f"/analytics/web/events/{event_type_value}/{tmsp}/collect") + + def delete_report_data(self, report_data_type: ReportDataType, date: str) -> None: + """Delete report data at a specific date for a specific report data type + + Args: + report_data_type (ReportDataType): report date type to delete + date (str): date for which to delete the report data + """ + self.client.delete( + f"/analytics/dataInsights/data/{report_data_type.value}/{date}" + ) diff --git a/ingestion/src/metadata/ingestion/ometa/mixins/query_mixin.py b/ingestion/src/metadata/ingestion/ometa/mixins/query_mixin.py index 72806148ec2f..5e7291e7eccb 100644 --- a/ingestion/src/metadata/ingestion/ometa/mixins/query_mixin.py +++ b/ingestion/src/metadata/ingestion/ometa/mixins/query_mixin.py @@ -13,7 +13,8 @@ To be used by OpenMetadata class """ - +import hashlib +import json from typing import List, Optional, Union from metadata.generated.schema.api.data.createQuery import CreateQueryRequest @@ -22,6 +23,7 @@ from metadata.generated.schema.entity.data.table import Table from metadata.generated.schema.type.basic import Uuid from metadata.generated.schema.type.entityReference import EntityReference +from metadata.ingestion.ometa.client import REST from metadata.ingestion.ometa.utils import model_str @@ -32,6 +34,21 @@ class OMetaQueryMixin: To be inherited by OpenMetadata """ + client: REST + + def _get_query_hash(self, query: str) -> str: + result = hashlib.md5(query.encode()) + return str(result.hexdigest()) + + def _get_or_create_query(self, query: CreateQueryRequest) -> Optional[Query]: + query_hash = self._get_query_hash(query=query.query.__root__) + query_entity = self.get_by_name(entity=Query, fqn=query_hash) + if query_entity is None: + resp = self.client.put(self.get_suffix(Query), data=query.json()) + if resp and resp.get("id"): + query_entity = Query(**resp) + return query_entity + def ingest_entity_queries_data( self, entity: Union[Table, Dashboard], queries: List[CreateQueryRequest] ) -> None: @@ -42,16 +59,35 @@ def ingest_entity_queries_data( :param queries: CreateQueryRequest to add """ for create_query in queries: - query = self.client.put(self.get_suffix(Query), data=create_query.json()) - if query and query.get("id"): + query = self._get_or_create_query(create_query) + if query: + # Add Query Usage table_ref = EntityReference(id=entity.id.__root__, type="table") # convert object to json array string table_ref_json = "[" + table_ref.json() + "]" self.client.put( - f"{self.get_suffix(Query)}/{query.get('id')}/usage", + f"{self.get_suffix(Query)}/{model_str(query.id)}/usage", data=table_ref_json, ) + # Add Query Users + user_fqn_list = create_query.users + if user_fqn_list: + self.client.put( + f"{self.get_suffix(Query)}/{model_str(query.id)}/users", + data=json.dumps( + [model_str(user_fqn) for user_fqn in user_fqn_list] + ), + ) + + # Add Query used by + user_list = create_query.usedBy + if user_list: + self.client.put( + f"{self.get_suffix(Query)}/{model_str(query.id)}/usedBy", + data=json.dumps(user_list), + ) + def get_entity_queries( self, entity_id: Union[Uuid, str], fields: Optional[List[str]] = None ) -> Optional[List[Query]]: diff --git a/ingestion/src/metadata/ingestion/ometa/mixins/search_index_mixin.py b/ingestion/src/metadata/ingestion/ometa/mixins/search_index_mixin.py deleted file mode 100644 index 590b74ca509a..000000000000 --- a/ingestion/src/metadata/ingestion/ometa/mixins/search_index_mixin.py +++ /dev/null @@ -1,75 +0,0 @@ -# Copyright 2021 Collate -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -""" -Mixin class containing Search Index specific methods - -To be used by OpenMetadata class -""" -import traceback -from typing import Optional - -from metadata.generated.schema.entity.data.searchIndex import ( - SearchIndex, - SearchIndexSampleData, -) -from metadata.ingestion.ometa.client import REST -from metadata.utils.logger import ometa_logger - -logger = ometa_logger() - - -class OMetaSearchIndexMixin: - """ - OpenMetadata API methods related to search index. - - To be inherited by OpenMetadata - """ - - client: REST - - def ingest_search_index_sample_data( - self, search_index: SearchIndex, sample_data: SearchIndexSampleData - ) -> Optional[SearchIndexSampleData]: - """ - PUT sample data for a search index - - :param search_index: SearchIndex Entity to update - :param sample_data: Data to add - """ - resp = None - try: - resp = self.client.put( - f"{self.get_suffix(SearchIndex)}/{search_index.id.__root__}/sampleData", - data=sample_data.json(), - ) - except Exception as exc: - logger.debug(traceback.format_exc()) - logger.warning( - f"Error trying to PUT sample data for {search_index.fullyQualifiedName.__root__}: {exc}" - ) - - if resp: - try: - return SearchIndexSampleData(**resp["sampleData"]) - except UnicodeError as err: - logger.debug(traceback.format_exc()) - logger.warning( - "Unicode Error parsing the sample data response " - f"from {search_index.fullyQualifiedName.__root__}: {err}" - ) - except Exception as exc: - logger.debug(traceback.format_exc()) - logger.warning( - "Error trying to parse sample data results" - f"from {search_index.fullyQualifiedName.__root__}: {exc}" - ) - - return None diff --git a/ingestion/src/metadata/ingestion/ometa/mixins/table_mixin.py b/ingestion/src/metadata/ingestion/ometa/mixins/table_mixin.py index f098bcf01e8a..511ceb30deb6 100644 --- a/ingestion/src/metadata/ingestion/ometa/mixins/table_mixin.py +++ b/ingestion/src/metadata/ingestion/ometa/mixins/table_mixin.py @@ -254,11 +254,6 @@ def get_profile_data( url_after = f"&after={after}" if after else "" profile_type_url = profile_type.__name__[0].lower() + profile_type.__name__[1:] - # system profile uses milliseconds - if profile_type is not SystemProfile: - start_ts = start_ts // 1000 - end_ts = end_ts // 1000 - resp = self.client.get( f"{self.get_suffix(Table)}/{fqn}/{profile_type_url}?limit={limit}{url_after}", data={"startTs": start_ts, "endTs": end_ts}, diff --git a/ingestion/src/metadata/ingestion/ometa/ometa_api.py b/ingestion/src/metadata/ingestion/ometa/ometa_api.py index d614629fce24..e58e2344be25 100644 --- a/ingestion/src/metadata/ingestion/ometa/ometa_api.py +++ b/ingestion/src/metadata/ingestion/ometa/ometa_api.py @@ -42,7 +42,6 @@ from metadata.ingestion.ometa.mixins.pipeline_mixin import OMetaPipelineMixin from metadata.ingestion.ometa.mixins.query_mixin import OMetaQueryMixin from metadata.ingestion.ometa.mixins.role_policy_mixin import OMetaRolePolicyMixin -from metadata.ingestion.ometa.mixins.search_index_mixin import OMetaSearchIndexMixin from metadata.ingestion.ometa.mixins.server_mixin import OMetaServerMixin from metadata.ingestion.ometa.mixins.service_mixin import OMetaServiceMixin from metadata.ingestion.ometa.mixins.table_mixin import OMetaTableMixin @@ -120,7 +119,6 @@ class OpenMetadata( OMetaUserMixin, OMetaQueryMixin, OMetaRolePolicyMixin, - OMetaSearchIndexMixin, Generic[T, C], ): """ @@ -246,7 +244,6 @@ def get_entity_from_create(self, create: Type[C]) -> Type[T]: .replace("testsuite", "testSuite") .replace("testdefinition", "testDefinition") .replace("testcase", "testCase") - .replace("searchindex", "searchIndex") ) class_path = ".".join( diff --git a/ingestion/src/metadata/ingestion/ometa/routes.py b/ingestion/src/metadata/ingestion/ometa/routes.py index b57b83214914..232c2562d138 100644 --- a/ingestion/src/metadata/ingestion/ometa/routes.py +++ b/ingestion/src/metadata/ingestion/ometa/routes.py @@ -38,9 +38,6 @@ from metadata.generated.schema.api.data.createMlModel import CreateMlModelRequest from metadata.generated.schema.api.data.createPipeline import CreatePipelineRequest from metadata.generated.schema.api.data.createQuery import CreateQueryRequest -from metadata.generated.schema.api.data.createSearchIndex import ( - CreateSearchIndexRequest, -) from metadata.generated.schema.api.data.createTable import CreateTableRequest from metadata.generated.schema.api.data.createTopic import CreateTopicRequest from metadata.generated.schema.api.lineage.addLineage import AddLineageRequest @@ -63,9 +60,6 @@ from metadata.generated.schema.api.services.createPipelineService import ( CreatePipelineServiceRequest, ) -from metadata.generated.schema.api.services.createSearchService import ( - CreateSearchServiceRequest, -) from metadata.generated.schema.api.services.createStorageService import ( CreateStorageServiceRequest, ) @@ -97,7 +91,6 @@ from metadata.generated.schema.entity.data.pipeline import Pipeline from metadata.generated.schema.entity.data.query import Query from metadata.generated.schema.entity.data.report import Report -from metadata.generated.schema.entity.data.searchIndex import SearchIndex from metadata.generated.schema.entity.data.table import Table from metadata.generated.schema.entity.data.topic import Topic from metadata.generated.schema.entity.policies.policy import Policy @@ -113,7 +106,6 @@ from metadata.generated.schema.entity.services.metadataService import MetadataService from metadata.generated.schema.entity.services.mlmodelService import MlModelService from metadata.generated.schema.entity.services.pipelineService import PipelineService -from metadata.generated.schema.entity.services.searchService import SearchService from metadata.generated.schema.entity.services.storageService import StorageService from metadata.generated.schema.entity.teams.role import Role from metadata.generated.schema.entity.teams.team import Team @@ -148,8 +140,6 @@ CreateQueryRequest.__name__: "/queries", Container.__name__: "/containers", CreateContainerRequest.__name__: "/containers", - SearchIndex.__name__: "/searchIndexes", - CreateSearchIndexRequest.__name__: "/searchIndexes", # Classifications Tag.__name__: "/tags", CreateTagRequest.__name__: "/tags", @@ -189,8 +179,6 @@ CreateMlModelServiceRequest.__name__: "/services/mlmodelServices", MetadataService.__name__: "/services/metadataServices", CreateMetadataServiceRequest.__name__: "/services/metadataServices", - SearchService.__name__: "/services/searchServices", - CreateSearchServiceRequest.__name__: "/services/searchServices", IngestionPipeline.__name__: "/services/ingestionPipelines", TestConnectionDefinition.__name__: "/services/testConnectionDefinitions", # Data Quality diff --git a/ingestion/src/metadata/ingestion/ometa/utils.py b/ingestion/src/metadata/ingestion/ometa/utils.py index 43a8ee6085db..c5b1db7f52ae 100644 --- a/ingestion/src/metadata/ingestion/ometa/utils.py +++ b/ingestion/src/metadata/ingestion/ometa/utils.py @@ -31,7 +31,6 @@ def format_name(name: str) -> str: return re.sub(r"[" + subs + "]", "_", name) -# pylint: disable=too-many-return-statements def get_entity_type( entity: Union[Type[T], str], ) -> str: @@ -55,8 +54,6 @@ def get_entity_type( return class_name.replace("testsuite", "testSuite") if "databaseschema" in class_name: return class_name.replace("databaseschema", "databaseSchema") - if "searchindex" in class_name: - return class_name.replace("searchindex", "searchIndex") return class_name diff --git a/ingestion/src/metadata/ingestion/processor/query_parser.py b/ingestion/src/metadata/ingestion/processor/query_parser.py index ca6cb6576824..1c9d427f1eb3 100644 --- a/ingestion/src/metadata/ingestion/processor/query_parser.py +++ b/ingestion/src/metadata/ingestion/processor/query_parser.py @@ -20,6 +20,7 @@ from metadata.generated.schema.entity.services.connections.metadata.openMetadataConnection import ( OpenMetadataConnection, ) +from metadata.generated.schema.type.basic import DateTime from metadata.generated.schema.type.queryParserData import ParsedData, QueryParserData from metadata.generated.schema.type.tableQuery import TableQueries, TableQuery from metadata.ingestion.api.processor import Processor @@ -40,11 +41,10 @@ def parse_sql_statement(record: TableQuery, dialect: Dialect) -> Optional[Parsed :return: QueryParserData """ - start_date = record.analysisDate - if isinstance(record.analysisDate, str): - start_date = datetime.datetime.strptime( - str(record.analysisDate), "%Y-%m-%d %H:%M:%S" - ).date() + start_time = record.analysisDate + if isinstance(start_time, DateTime): + start_date = start_time.__root__.date() + start_time = datetime.datetime.strptime(str(start_date.isoformat()), "%Y-%m-%d") lineage_parser = LineageParser(record.query, dialect=dialect) @@ -58,7 +58,7 @@ def parse_sql_statement(record: TableQuery, dialect: Dialect) -> Optional[Parsed databaseSchema=record.databaseSchema, sql=record.query, userName=record.userName, - date=int(start_date.__root__.timestamp()), + date=int(start_time.timestamp()), serviceName=record.serviceName, duration=record.duration, ) diff --git a/ingestion/src/metadata/ingestion/sink/metadata_rest.py b/ingestion/src/metadata/ingestion/sink/metadata_rest.py index 962cd46654ab..10c047be6dcb 100644 --- a/ingestion/src/metadata/ingestion/sink/metadata_rest.py +++ b/ingestion/src/metadata/ingestion/sink/metadata_rest.py @@ -41,7 +41,6 @@ from metadata.ingestion.models.ometa_topic_data import OMetaTopicSampleData from metadata.ingestion.models.pipeline_status import OMetaPipelineStatus from metadata.ingestion.models.profile_data import OMetaTableProfileSampleData -from metadata.ingestion.models.search_index_data import OMetaIndexSampleData from metadata.ingestion.models.tests_data import ( OMetaLogicalTestSuiteSample, OMetaTestCaseResultsSample, @@ -110,9 +109,6 @@ def __init__( OMetaTestCaseResultsSample, self.write_test_case_results_sample ) self.write_record.register(OMetaTopicSampleData, self.write_topic_sample_data) - self.write_record.register( - OMetaIndexSampleData, self.write_search_index_sample_data - ) @classmethod def create(cls, config_dict: dict, metadata_config: OpenMetadataConnection): @@ -471,27 +467,5 @@ def write_topic_sample_data(self, record: OMetaTopicSampleData): f"Unexpected error while ingesting sample data for topic [{record.topic.name.__root__}]: {exc}" ) - def write_search_index_sample_data(self, record: OMetaIndexSampleData): - """ - Ingest Search Index Sample Data - """ - try: - if record.data.messages: - self.metadata.ingest_search_index_sample_data( - record.entity, - record.data, - ) - logger.debug( - f"Successfully ingested sample data for {record.entity.name.__root__}" - ) - self.status.records_written( - f"SearchIndexSampleData: {record.entity.name.__root__}" - ) - except Exception as exc: - logger.debug(traceback.format_exc()) - logger.error( - f"Unexpected error while ingesting sample data for search index [{record.entity.name.__root__}]: {exc}" - ) - def close(self): pass diff --git a/ingestion/src/metadata/ingestion/source/dashboard/tableau/models.py b/ingestion/src/metadata/ingestion/source/dashboard/tableau/models.py index 32f514d964ab..caf0a74d3ab1 100644 --- a/ingestion/src/metadata/ingestion/source/dashboard/tableau/models.py +++ b/ingestion/src/metadata/ingestion/source/dashboard/tableau/models.py @@ -96,7 +96,6 @@ class DatasourceField(BaseModel): id: str name: Optional[str] upstreamColumns: Optional[List[Union[UpstreamColumn, None]]] - fullyQualifiedName: Optional[str] description: Optional[str] diff --git a/ingestion/src/metadata/ingestion/source/dashboard/tableau/queries.py b/ingestion/src/metadata/ingestion/source/dashboard/tableau/queries.py index 573f57ad14ea..89dac28cd276 100644 --- a/ingestion/src/metadata/ingestion/source/dashboard/tableau/queries.py +++ b/ingestion/src/metadata/ingestion/source/dashboard/tableau/queries.py @@ -27,7 +27,6 @@ name remoteType }} - fullyQualifiedName description }} workbook {{ diff --git a/ingestion/src/metadata/ingestion/source/database/bigquery/connection.py b/ingestion/src/metadata/ingestion/source/database/bigquery/connection.py index e12af299ef47..33c24e36ecfb 100644 --- a/ingestion/src/metadata/ingestion/source/database/bigquery/connection.py +++ b/ingestion/src/metadata/ingestion/source/database/bigquery/connection.py @@ -98,6 +98,8 @@ def test_connection( Test connection. This can be executed either as part of a metadata workflow or during an Automation Workflow """ + _, project_ids = auth.default() + project_ids = project_ids if isinstance(project_ids, list) else [project_ids] def get_tags(taxonomies): for taxonomy in taxonomies: @@ -107,42 +109,37 @@ def get_tags(taxonomies): return policy_tags def test_tags(): - list_project_ids = auth.default() - project_id = list_project_ids[1] - - if isinstance(project_id, str): - taxonomies = PolicyTagManagerClient().list_taxonomies( - parent=f"projects/{project_id}/locations/{service_connection.taxonomyLocation}" - ) - return get_tags(taxonomies) - - if isinstance(project_id, list): + for project in project_ids: taxonomies = PolicyTagManagerClient().list_taxonomies( - parent=f"projects/{project_id[0]}/locations/{service_connection.taxonomyLocation}" + parent=f"projects/{project}/locations/{service_connection.taxonomyLocation}" ) - return get_tags(taxonomies) - return None - test_fn = { - "CheckAccess": partial(test_connection_engine_step, engine), - "GetSchemas": partial(execute_inspector_func, engine, "get_schema_names"), - "GetTables": partial(execute_inspector_func, engine, "get_table_names"), - "GetViews": partial(execute_inspector_func, engine, "get_view_names"), - "GetTags": test_tags, - "GetQueries": partial( - test_query, - engine=engine, - statement=BIGQUERY_TEST_STATEMENT.format( - region=service_connection.usageLocation + def test_connection_inner(engine): + test_fn = { + "CheckAccess": partial(test_connection_engine_step, engine), + "GetSchemas": partial(execute_inspector_func, engine, "get_schema_names"), + "GetTables": partial(execute_inspector_func, engine, "get_table_names"), + "GetViews": partial(execute_inspector_func, engine, "get_view_names"), + "GetTags": test_tags, + "GetQueries": partial( + test_query, + engine=engine, + statement=BIGQUERY_TEST_STATEMENT.format( + region=service_connection.usageLocation + ), ), - ), - } - - test_connection_steps( - metadata=metadata, - test_fn=test_fn, - service_type=service_connection.type.value, - automation_workflow=automation_workflow, - ) + } + + test_connection_steps( + metadata=metadata, + test_fn=test_fn, + service_type=service_connection.type.value, + automation_workflow=automation_workflow, + ) + + for project in project_ids: + if project in str(engine.url): + continue + test_connection_inner(engine) diff --git a/ingestion/src/metadata/ingestion/source/database/bigquery/metadata.py b/ingestion/src/metadata/ingestion/source/database/bigquery/metadata.py index a9bba7575192..bfda62c173f4 100644 --- a/ingestion/src/metadata/ingestion/source/database/bigquery/metadata.py +++ b/ingestion/src/metadata/ingestion/source/database/bigquery/metadata.py @@ -44,12 +44,12 @@ ) from metadata.generated.schema.security.credentials.gcpValues import ( GcpCredentialsValues, - MultipleProjectId, SingleProjectId, ) from metadata.generated.schema.type.tagLabel import TagLabel from metadata.ingestion.api.source import InvalidSourceException from metadata.ingestion.models.ometa_classification import OMetaTagAndClassification +from metadata.ingestion.source.connections import get_connection from metadata.ingestion.source.database.bigquery.queries import ( BIGQUERY_SCHEMA_DESCRIPTION, BIGQUERY_TABLE_AND_TYPE, @@ -200,9 +200,9 @@ def create(cls, config_dict, metadata_config: OpenMetadataConnection): return cls(config, metadata_config) @staticmethod - def set_project_id(): + def set_project_id() -> List[str]: _, project_ids = auth.default() - return project_ids + return project_ids if isinstance(project_ids, list) else [project_ids] def query_table_names_and_types( self, schema_name: str @@ -222,7 +222,9 @@ def query_table_names_and_types( type_=_bigquery_table_types.get(table_type, TableType.Regular), ) for table_name, table_type in self.engine.execute( - BIGQUERY_TABLE_AND_TYPE.format(schema_name) + BIGQUERY_TABLE_AND_TYPE.format( + project_id=self.client.project, schema_name=schema_name + ) ) or [] ] @@ -319,12 +321,41 @@ def yield_database_schema( ) yield database_schema_request_obj + def get_table_obj(self, table_name: str): + schema_name = self.context.database_schema.name.__root__ + database = self.context.database.name.__root__ + bq_table_fqn = fqn._build(database, schema_name, table_name) + return self.client.get_table(bq_table_fqn) + + def yield_table_tag_details(self, table_name_and_type: Tuple[str, str]): + table_name, _ = table_name_and_type + table_obj = self.get_table_obj(table_name=table_name) + if table_obj.labels: + for key, value in table_obj.labels.items(): + yield from get_ometa_tag_and_classification( + tags=[value], + classification_name=key, + tag_description="Bigquery Table Label", + classification_desciption="", + ) + def get_tag_labels(self, table_name: str) -> Optional[List[TagLabel]]: """ This will only get executed if the tags context is properly informed """ - return [] + table_tag_labels = super().get_tag_labels(table_name) or [] + table_obj = self.get_table_obj(table_name=table_name) + if table_obj.labels: + for key, _ in table_obj.labels.items(): + tag_label = get_tag_label( + metadata=self.metadata, + tag_name=key, + classification_name=key, + ) + if tag_label: + table_tag_labels.append(tag_label) + return table_tag_labels def get_column_tag_labels( self, table_name: str, column: dict @@ -366,45 +397,31 @@ def set_inspector(self, database_name: str): ) self.client = get_bigquery_client(project_id=database_name, **kwargs) + self.engine = get_connection(self.service_connection) self.inspector = inspect(self.engine) def get_database_names(self) -> Iterable[str]: - if hasattr( - self.service_connection.credentials.gcpConfig, "projectId" - ) and isinstance( - self.service_connection.credentials.gcpConfig.projectId, MultipleProjectId - ): - for project_id in self.project_ids: - database_name = project_id - database_fqn = fqn.build( - self.metadata, - entity_type=Database, - service_name=self.context.database_service.name.__root__, - database_name=database_name, - ) - if filter_by_database( - self.source_config.databaseFilterPattern, - database_fqn - if self.source_config.useFqnForFiltering - else database_name, - ): - self.status.filter(database_fqn, "Database Filtered out") - continue - + for project_id in self.project_ids: + database_fqn = fqn.build( + self.metadata, + entity_type=Database, + service_name=self.context.database_service.name.__root__, + database_name=project_id, + ) + if filter_by_database( + self.source_config.databaseFilterPattern, + database_fqn if self.source_config.useFqnForFiltering else project_id, + ): + self.status.filter(database_fqn, "Database Filtered out") + else: try: - self.set_inspector(database_name=database_name) - self.project_id = ( # pylint: disable=attribute-defined-outside-init - database_name - ) - yield database_name + self.set_inspector(database_name=project_id) + yield project_id except Exception as exc: logger.debug(traceback.format_exc()) logger.error( - f"Error trying to connect to database {database_name}: {exc}" + f"Error trying to connect to database {project_id}: {exc}" ) - else: - self.set_inspector(database_name=self.project_ids) - yield self.project_ids def get_view_definition( self, table_type: str, table_name: str, schema_name: str, inspector: Inspector @@ -412,7 +429,9 @@ def get_view_definition( if table_type == TableType.View: try: view_definition = inspector.get_view_definition( - f"{self.context.database.name.__root__}.{schema_name}.{table_name}" + fqn._build( + self.context.database.name.__root__, schema_name, table_name + ) ) view_definition = ( "" if view_definition is None else str(view_definition) @@ -430,7 +449,7 @@ def get_table_partition_details( check if the table is partitioned table and return the partition details """ database = self.context.database.name.__root__ - table = self.client.get_table(f"{database}.{schema_name}.{table_name}") + table = self.client.get_table(fqn._build(database, schema_name, table_name)) if table.time_partitioning is not None: if table.time_partitioning.field: table_partition = TablePartition( diff --git a/ingestion/src/metadata/ingestion/source/database/bigquery/queries.py b/ingestion/src/metadata/ingestion/source/database/bigquery/queries.py index 6b33dc0e1467..c0186255e69e 100644 --- a/ingestion/src/metadata/ingestion/source/database/bigquery/queries.py +++ b/ingestion/src/metadata/ingestion/source/database/bigquery/queries.py @@ -45,7 +45,7 @@ BIGQUERY_SCHEMA_DESCRIPTION = textwrap.dedent( """ SELECT option_value as schema_description FROM - {project_id}.region-{region}.INFORMATION_SCHEMA.SCHEMATA_OPTIONS + `{project_id}`.`region-{region}`.INFORMATION_SCHEMA.SCHEMATA_OPTIONS where schema_name = '{schema_name}' and option_name = 'description' and option_value is not null """ @@ -53,6 +53,6 @@ BIGQUERY_TABLE_AND_TYPE = textwrap.dedent( """ - select table_name, table_type from {}.INFORMATION_SCHEMA.TABLES where table_type != 'VIEW' + select table_name, table_type from `{project_id}`.{schema_name}.INFORMATION_SCHEMA.TABLES where table_type != 'VIEW' """ ) diff --git a/ingestion/src/metadata/ingestion/source/database/common_db_source.py b/ingestion/src/metadata/ingestion/source/database/common_db_source.py index dea5e52e93ac..2d76cc19cadd 100644 --- a/ingestion/src/metadata/ingestion/source/database/common_db_source.py +++ b/ingestion/src/metadata/ingestion/source/database/common_db_source.py @@ -523,9 +523,6 @@ def standardize_table_name(self, schema_name: str, table: str) -> str: """ return table - def yield_table_tag(self) -> Iterable[OMetaTagAndClassification]: - pass - def get_source_url( self, database_name: Optional[str] = None, diff --git a/ingestion/src/metadata/ingestion/source/database/database_service.py b/ingestion/src/metadata/ingestion/source/database/database_service.py index 736de690516f..6719c4326529 100644 --- a/ingestion/src/metadata/ingestion/source/database/database_service.py +++ b/ingestion/src/metadata/ingestion/source/database/database_service.py @@ -112,7 +112,7 @@ class DatabaseServiceTopology(ServiceTopology): NodeStage( type_=OMetaTagAndClassification, context="tags", - processor="yield_tag_details", + processor="yield_database_schema_tag_details", ack_sink=False, nullable=True, cache_all=True, @@ -130,6 +130,14 @@ class DatabaseServiceTopology(ServiceTopology): table = TopologyNode( producer="get_tables_name_and_type", stages=[ + NodeStage( + type_=OMetaTagAndClassification, + context="tags", + processor="yield_table_tag_details", + ack_sink=False, + nullable=True, + cache_all=True, + ), NodeStage( type_=Table, context="table", @@ -218,7 +226,23 @@ def yield_tag(self, schema_name: str) -> Iterable[OMetaTagAndClassification]: From topology. To be run for each schema """ - def yield_tag_details( + def yield_table_tags( + self, table_name_and_type: Tuple[str, TableType] + ) -> Iterable[CreateTableRequest]: + """ + From topology. To be run for each table + """ + + def yield_table_tag_details( + self, table_name_and_type: str + ) -> Iterable[OMetaTagAndClassification]: + """ + From topology. To be run for each table + """ + if self.source_config.includeTags: + yield from self.yield_table_tags(table_name_and_type) or [] + + def yield_database_schema_tag_details( self, schema_name: str ) -> Iterable[OMetaTagAndClassification]: """ @@ -267,7 +291,7 @@ def get_tag_by_fqn(self, entity_fqn: str) -> Optional[List[TagLabel]]: tag_labels = [] for tag_and_category in self.context.tags or []: - if tag_and_category.fqn.__root__ == entity_fqn: + if tag_and_category.fqn and tag_and_category.fqn.__root__ == entity_fqn: tag_label = get_tag_label( metadata=self.metadata, tag_name=tag_and_category.tag_request.name.__root__, diff --git a/ingestion/src/metadata/ingestion/source/database/sample_data.py b/ingestion/src/metadata/ingestion/source/database/sample_data.py index 479289e9c156..a68aeb878ee3 100644 --- a/ingestion/src/metadata/ingestion/source/database/sample_data.py +++ b/ingestion/src/metadata/ingestion/source/database/sample_data.py @@ -34,9 +34,6 @@ ) from metadata.generated.schema.api.data.createMlModel import CreateMlModelRequest from metadata.generated.schema.api.data.createPipeline import CreatePipelineRequest -from metadata.generated.schema.api.data.createSearchIndex import ( - CreateSearchIndexRequest, -) from metadata.generated.schema.api.data.createTable import CreateTableRequest from metadata.generated.schema.api.data.createTableProfile import ( CreateTableProfileRequest, @@ -80,7 +77,6 @@ from metadata.generated.schema.entity.services.messagingService import MessagingService from metadata.generated.schema.entity.services.mlmodelService import MlModelService from metadata.generated.schema.entity.services.pipelineService import PipelineService -from metadata.generated.schema.entity.services.searchService import SearchService from metadata.generated.schema.entity.services.storageService import StorageService from metadata.generated.schema.entity.teams.team import Team from metadata.generated.schema.entity.teams.user import User @@ -462,34 +458,6 @@ def __init__(self, config: WorkflowSource, metadata_config: OpenMetadataConnecti ) ) - self.storage_service_json = json.load( - open( # pylint: disable=consider-using-with - sample_data_folder + "/storage/service.json", - "r", - encoding=UTF_8, - ) - ) - - self.search_service_json = json.load( - open( # pylint: disable=consider-using-with - sample_data_folder + "/searchIndexes/service.json", - "r", - encoding=UTF_8, - ) - ) - self.search_service = self.metadata.get_service_or_create( - entity=SearchService, - config=WorkflowSource(**self.search_service_json), - ) - - self.search_indexes = json.load( - open( # pylint: disable=consider-using-with - sample_data_folder + "/searchIndexes/searchIndexes.json", - "r", - encoding=UTF_8, - ) - ) - @classmethod def create(cls, config_dict, metadata_config: OpenMetadataConnection): """Create class instance""" @@ -519,7 +487,6 @@ def next_record(self) -> Iterable[Entity]: yield from self.ingest_pipeline_status() yield from self.ingest_mlmodels() yield from self.ingest_containers() - yield from self.ingest_search_indexes() yield from self.ingest_profiles() yield from self.ingest_test_suite() yield from self.ingest_test_case() @@ -740,30 +707,6 @@ def ingest_topics(self) -> Iterable[CreateTopicRequest]: sample_data=TopicSampleData(messages=topic["sampleData"]), ) - def ingest_search_indexes(self) -> Iterable[CreateSearchIndexRequest]: - """ - Ingest Sample SearchIndexes - """ - for search_index in self.search_indexes["searchIndexes"]: - search_index["service"] = EntityReference( - id=self.search_service.id, type="searchService" - ) - create_search_index = CreateSearchIndexRequest( - name=search_index["name"], - description=search_index["description"], - displayName=search_index["displayName"], - tags=search_index["tags"], - fields=search_index["fields"], - service=self.search_service.fullyQualifiedName, - ) - - self.status.scanned( - f"SearchIndex Scanned: {create_search_index.name.__root__}" - ) - yield create_search_index - - # TODO: Add search index sample data - def ingest_looker(self) -> Iterable[Entity]: """ Looker sample data @@ -1178,15 +1121,22 @@ def ingest_profiles(self) -> Iterable[OMetaTableProfileSampleData]: rowCount=profile["rowCount"], createDateTime=profile.get("createDateTime"), sizeInByte=profile.get("sizeInByte"), - timestamp=( - datetime.now(tz=timezone.utc) - timedelta(days=days) - ).timestamp(), + timestamp=int( + ( + datetime.now(tz=timezone.utc) - timedelta(days=days) + ).timestamp() + * 1000 + ), ), columnProfile=[ ColumnProfile( - timestamp=( - datetime.now(tz=timezone.utc) - timedelta(days=days) - ).timestamp(), + timestamp=int( + ( + datetime.now(tz=timezone.utc) + - timedelta(days=days) + ).timestamp() + * 1000 + ), **col_profile, ) for col_profile in profile["columnProfile"] @@ -1273,9 +1223,10 @@ def ingest_test_case_results(self) -> Iterable[OMetaTestCaseResultsSample]: for days, result in enumerate(test_case_results["results"]): yield OMetaTestCaseResultsSample( test_case_results=TestCaseResult( - timestamp=( - datetime.now() - timedelta(days=days) - ).timestamp(), + timestamp=int( + (datetime.now() - timedelta(days=days)).timestamp() + * 1000 + ), testCaseStatus=result["testCaseStatus"], result=result["result"], testResultValue=[ diff --git a/ingestion/src/metadata/ingestion/source/search/elasticsearch/connection.py b/ingestion/src/metadata/ingestion/source/search/elasticsearch/connection.py deleted file mode 100644 index dabc89f17a44..000000000000 --- a/ingestion/src/metadata/ingestion/source/search/elasticsearch/connection.py +++ /dev/null @@ -1,83 +0,0 @@ -# Copyright 2021 Collate -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -Source connection handler -""" -from typing import Optional - -from elasticsearch import Elasticsearch - -from metadata.generated.schema.entity.automations.workflow import ( - Workflow as AutomationWorkflow, -) -from metadata.generated.schema.entity.services.connections.search.elasticSearchConnection import ( - ApiAuthentication, - BasicAuthentication, - ElasticsearchConnection, -) -from metadata.ingestion.connections.builders import init_empty_connection_arguments -from metadata.ingestion.connections.test_connections import test_connection_steps -from metadata.ingestion.ometa.ometa_api import OpenMetadata - - -def get_connection(connection: ElasticsearchConnection) -> Elasticsearch: - """ - Create connection - """ - basic_auth = None - api_key = None - if isinstance(connection.authType, BasicAuthentication): - basic_auth = ( - connection.authType.username, - connection.authType.password.get_secret_value(), - ) - - if isinstance(connection.authType, ApiAuthentication): - api_key = ( - connection.authType.apiKeyId, - connection.authType.apiKey.get_secret_value(), - ) - - if not connection.connectionArguments: - connection.connectionArguments = init_empty_connection_arguments() - - return Elasticsearch( - [connection.hostPort], - basic_auth=basic_auth, - api_key=api_key, - scheme=connection.scheme.value, - **connection.connectionArguments.__root__ - ) - - -def test_connection( - metadata: OpenMetadata, - client: Elasticsearch, - service_connection: ElasticsearchConnection, - automation_workflow: Optional[AutomationWorkflow] = None, -) -> None: - """ - Test connection. This can be executed either as part - of a metadata workflow or during an Automation Workflow - """ - - test_fn = { - "CheckAccess": client.info, - "GetSearchIndexes": client.indices.get_alias, - } - - test_connection_steps( - metadata=metadata, - test_fn=test_fn, - service_type=service_connection.type.value, - automation_workflow=automation_workflow, - ) diff --git a/ingestion/src/metadata/ingestion/source/search/elasticsearch/metadata.py b/ingestion/src/metadata/ingestion/source/search/elasticsearch/metadata.py deleted file mode 100644 index dd58649aff1c..000000000000 --- a/ingestion/src/metadata/ingestion/source/search/elasticsearch/metadata.py +++ /dev/null @@ -1,123 +0,0 @@ -# Copyright 2021 Collate -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -""" -Elasticsearch source to extract metadata -""" -from typing import Any, Iterable, Optional - -from elasticsearch import Elasticsearch - -from metadata.generated.schema.api.data.createSearchIndex import ( - CreateSearchIndexRequest, -) -from metadata.generated.schema.entity.data.searchIndex import SearchIndexSampleData -from metadata.generated.schema.entity.services.connections.metadata.openMetadataConnection import ( - OpenMetadataConnection, -) -from metadata.generated.schema.entity.services.connections.search.elasticSearchConnection import ( - ElasticsearchConnection, -) -from metadata.generated.schema.metadataIngestion.workflow import ( - Source as WorkflowSource, -) -from metadata.ingestion.api.source import InvalidSourceException, Source -from metadata.ingestion.models.search_index_data import OMetaIndexSampleData -from metadata.ingestion.source.search.elasticsearch.parser import parse_es_index_mapping -from metadata.ingestion.source.search.search_service import SearchServiceSource -from metadata.utils.logger import ingestion_logger - -logger = ingestion_logger() - - -WILDCARD_SEARCH = "*" - - -class ElasticsearchSource(SearchServiceSource): - """ - Implements the necessary methods ot extract - Search Index metadata from Elastic Search - """ - - def __init__(self, config: Source, metadata_config: OpenMetadataConnection): - super().__init__(config, metadata_config) - self.client: Elasticsearch = self.connection - - @classmethod - def create(cls, config_dict, metadata_config: OpenMetadataConnection): - config: WorkflowSource = WorkflowSource.parse_obj(config_dict) - connection: ElasticsearchConnection = config.serviceConnection.__root__.config - if not isinstance(connection, ElasticsearchConnection): - raise InvalidSourceException( - f"Expected ElasticsearchConnection, but got {connection}" - ) - return cls(config, metadata_config) - - def get_search_index_list(self) -> Iterable[dict]: - """ - Get List of all search index - """ - index_list = self.client.indices.get_alias() or {} - for index in index_list.keys(): - yield self.client.indices.get(index) - - def get_search_index_name(self, search_index_details: dict) -> Optional[str]: - """ - Get Search Index Name - """ - if search_index_details and len(search_index_details) == 1: - return list(search_index_details.keys())[0] - - return None - - def yield_search_index( - self, search_index_details: Any - ) -> Iterable[CreateSearchIndexRequest]: - """ - Method to Get Search Index Entity - """ - index_name = self.get_search_index_name(search_index_details) - if index_name: - yield CreateSearchIndexRequest( - name=index_name, - displayName=index_name, - searchIndexSettings=search_index_details.get(index_name, {}).get( - "settings", {} - ), - service=self.context.search_service.fullyQualifiedName.__root__, - fields=parse_es_index_mapping( - search_index_details.get(index_name, {}).get("mappings") - ), - ) - - def yield_search_index_sample_data( - self, search_index_details: Any - ) -> Iterable[OMetaIndexSampleData]: - """ - Method to Get Sample Data of Search Index Entity - """ - if self.source_config.includeSampleData and self.context.search_index: - - sample_data = self.client.search( - index=self.context.search_index.name.__root__, - q=WILDCARD_SEARCH, - size=self.source_config.sampleSize, - request_timeout=self.service_connection.connectionTimeoutSecs, - ) - - yield OMetaIndexSampleData( - entity=self.context.search_index, - data=SearchIndexSampleData( - messages=[ - str(message) - for message in sample_data.get("hits", {}).get("hits", []) - ] - ), - ) diff --git a/ingestion/src/metadata/ingestion/source/search/elasticsearch/parser.py b/ingestion/src/metadata/ingestion/source/search/elasticsearch/parser.py deleted file mode 100644 index 06b7aa7ce875..000000000000 --- a/ingestion/src/metadata/ingestion/source/search/elasticsearch/parser.py +++ /dev/null @@ -1,62 +0,0 @@ -# Copyright 2021 Collate -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -Utils module to parse the jsonschema -""" - -import traceback -from typing import List, Optional - -from metadata.generated.schema.entity.data.searchIndex import DataType, SearchIndexField -from metadata.utils.logger import ingestion_logger - -logger = ingestion_logger() - - -# If any type of ES field is not recognized mark it as unknown -# pylint: disable=no-member,unused-argument,protected-access -@classmethod -def _missing_(cls, value): - return cls.UNKNOWN - - -DataType._missing_ = _missing_ - - -def parse_es_index_mapping(mapping: dict) -> Optional[List[SearchIndexField]]: - """ - Recursively convert the parsed schema into required models - """ - field_models = [] - try: - properties = mapping.get("properties", {}) - for key, value in properties.items(): - data_type = ( - DataType(value.get("type").upper()) - if value.get("type") - else DataType.OBJECT - ) - field_models.append( - SearchIndexField( - name=key, - dataType=data_type, - description=value.get("description"), - children=parse_es_index_mapping(value) - if value.get("properties") - else None, - ) - ) - except Exception as exc: # pylint: disable=broad-except - logger.debug(traceback.format_exc()) - logger.warning(f"Unable to parse the index properties: {exc}") - - return field_models diff --git a/ingestion/src/metadata/ingestion/source/search/search_service.py b/ingestion/src/metadata/ingestion/source/search/search_service.py deleted file mode 100644 index 5e59426479ad..000000000000 --- a/ingestion/src/metadata/ingestion/source/search/search_service.py +++ /dev/null @@ -1,226 +0,0 @@ -# Copyright 2021 Collate -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -""" -Base class for ingesting search index services -""" -from abc import ABC, abstractmethod -from typing import Any, Iterable, List, Optional, Set - -from metadata.generated.schema.api.data.createSearchIndex import ( - CreateSearchIndexRequest, -) -from metadata.generated.schema.entity.data.searchIndex import ( - SearchIndex, - SearchIndexSampleData, -) -from metadata.generated.schema.entity.services.connections.metadata.openMetadataConnection import ( - OpenMetadataConnection, -) -from metadata.generated.schema.entity.services.searchService import ( - SearchConnection, - SearchService, -) -from metadata.generated.schema.metadataIngestion.searchServiceMetadataPipeline import ( - SearchServiceMetadataPipeline, -) -from metadata.generated.schema.metadataIngestion.workflow import ( - Source as WorkflowSource, -) -from metadata.ingestion.api.source import Source -from metadata.ingestion.api.topology_runner import TopologyRunnerMixin -from metadata.ingestion.models.delete_entity import ( - DeleteEntity, - delete_entity_from_source, -) -from metadata.ingestion.models.search_index_data import OMetaIndexSampleData -from metadata.ingestion.models.topology import ( - NodeStage, - ServiceTopology, - TopologyNode, - create_source_context, -) -from metadata.ingestion.ometa.ometa_api import OpenMetadata -from metadata.ingestion.source.connections import get_connection, get_test_connection_fn -from metadata.utils import fqn -from metadata.utils.filters import filter_by_search_index -from metadata.utils.logger import ingestion_logger - -logger = ingestion_logger() - - -class SearchServiceTopology(ServiceTopology): - """ - Defines the hierarchy in Search Services. - - We could have a topology validator. We can only consume - data that has been produced by any parent node. - """ - - root = TopologyNode( - producer="get_services", - stages=[ - NodeStage( - type_=SearchService, - context="search_service", - processor="yield_create_request_search_service", - overwrite=False, - must_return=True, - ), - ], - children=["search_index"], - post_process=["mark_search_indexes_as_deleted"], - ) - search_index = TopologyNode( - producer="get_search_index", - stages=[ - NodeStage( - type_=SearchIndex, - context="search_index", - processor="yield_search_index", - consumer=["search_service"], - ), - NodeStage( - type_=OMetaIndexSampleData, - context="search_index_sample_data", - processor="yield_search_index_sample_data", - consumer=["search_service"], - ack_sink=False, - nullable=True, - ), - ], - ) - - -class SearchServiceSource(TopologyRunnerMixin, Source, ABC): - """ - Base class for Search Services. - It implements the topology and context. - """ - - source_config: SearchServiceMetadataPipeline - config: WorkflowSource - # Big union of types we want to fetch dynamically - service_connection: SearchConnection.__fields__["config"].type_ - - topology = SearchServiceTopology() - context = create_source_context(topology) - index_source_state: Set = set() - - def __init__( - self, - config: WorkflowSource, - metadata_config: OpenMetadataConnection, - ): - super().__init__() - self.config = config - self.metadata_config = metadata_config - self.metadata = OpenMetadata(metadata_config) - self.source_config: SearchServiceMetadataPipeline = ( - self.config.sourceConfig.config - ) - self.service_connection = self.config.serviceConnection.__root__.config - self.connection = get_connection(self.service_connection) - - # Flag the connection for the test connection - self.connection_obj = self.connection - self.test_connection() - - @abstractmethod - def yield_search_index( - self, search_index_details: Any - ) -> Iterable[CreateSearchIndexRequest]: - """ - Method to Get Search Index Entity - """ - - def yield_search_index_sample_data( - self, search_index_details: Any - ) -> Iterable[SearchIndexSampleData]: - """ - Method to Get Sample Data of Search Index Entity - """ - - @abstractmethod - def get_search_index_list(self) -> Optional[List[Any]]: - """ - Get List of all search index - """ - - @abstractmethod - def get_search_index_name(self, search_index_details: Any) -> str: - """ - Get Search Index Name - """ - - def get_search_index(self) -> Any: - for index_details in self.get_search_index_list(): - search_index_name = self.get_search_index_name(index_details) - if filter_by_search_index( - self.source_config.searchIndexFilterPattern, - search_index_name, - ): - self.status.filter( - search_index_name, - "Search Index Filtered Out", - ) - continue - yield index_details - - def yield_create_request_search_service(self, config: WorkflowSource): - yield self.metadata.get_create_service_from_source( - entity=SearchService, config=config - ) - - def get_services(self) -> Iterable[WorkflowSource]: - yield self.config - - def prepare(self): - """ - Nothing to prepare by default - """ - - def test_connection(self) -> None: - test_connection_fn = get_test_connection_fn(self.service_connection) - test_connection_fn(self.metadata, self.connection_obj, self.service_connection) - - def mark_search_indexes_as_deleted(self) -> Iterable[DeleteEntity]: - """ - Method to mark the search index as deleted - """ - if self.source_config.markDeletedSearchIndexes: - yield from delete_entity_from_source( - metadata=self.metadata, - entity_type=SearchIndex, - entity_source_state=self.index_source_state, - mark_deleted_entity=self.source_config.markDeletedSearchIndexes, - params={ - "service": self.context.search_service.fullyQualifiedName.__root__ - }, - ) - - def register_record(self, search_index_request: CreateSearchIndexRequest) -> None: - """ - Mark the search index record as scanned and update the index_source_state - """ - index_fqn = fqn.build( - self.metadata, - entity_type=SearchIndex, - service_name=search_index_request.service.__root__, - search_index_name=search_index_request.name.__root__, - ) - - self.index_source_state.add(index_fqn) - self.status.scanned(search_index_request.name.__root__) - - def close(self): - """ - Nothing to close by default - """ diff --git a/ingestion/src/metadata/ingestion/stage/table_usage.py b/ingestion/src/metadata/ingestion/stage/table_usage.py index cc58a659b5fd..1c8b64e2cd08 100644 --- a/ingestion/src/metadata/ingestion/stage/table_usage.py +++ b/ingestion/src/metadata/ingestion/stage/table_usage.py @@ -18,6 +18,7 @@ import shutil import traceback from pathlib import Path +from typing import List, Tuple from metadata.config.common import ConfigModel from metadata.generated.schema.api.data.createQuery import CreateQueryRequest @@ -25,7 +26,7 @@ OpenMetadataConnection, ) from metadata.generated.schema.entity.teams.user import User -from metadata.generated.schema.type.queryParserData import QueryParserData +from metadata.generated.schema.type.queryParserData import ParsedData, QueryParserData from metadata.generated.schema.type.tableUsageCount import TableUsageCount from metadata.ingestion.api.stage import Stage from metadata.ingestion.ometa.ometa_api import OpenMetadata @@ -80,20 +81,22 @@ def init_location(self) -> None: logger.info(f"Creating the directory to store staging data in {location}") location.mkdir(parents=True, exist_ok=True) - def _get_user_entity(self, username: str): + def _get_user_entity(self, username: str) -> Tuple[List[str], List[str]]: if username: user = self.metadata.get_by_name(entity=User, fqn=username) if user: - return [user.fullyQualifiedName.__root__] - return [] + return [user.fullyQualifiedName.__root__], [] + return [], [username] def _add_sql_query(self, record, table): + users, used_by = self._get_user_entity(record.userName) if self.table_queries.get((table, record.date)): self.table_queries[(table, record.date)].append( CreateQueryRequest( query=record.sql, - users=self._get_user_entity(record.userName), + users=users, queryDate=record.date, + usedBy=used_by, duration=record.duration, ) ) @@ -101,13 +104,45 @@ def _add_sql_query(self, record, table): self.table_queries[(table, record.date)] = [ CreateQueryRequest( query=record.sql, - users=self._get_user_entity(record.userName), + users=users, queryDate=record.date, + usedBy=used_by, duration=record.duration, ) ] + def _handle_table_usage(self, parsed_data: ParsedData, table: str) -> None: + table_joins = parsed_data.joins.get(table) + try: + self._add_sql_query(record=parsed_data, table=table) + table_usage_count = self.table_usage.get((table, parsed_data.date)) + if table_usage_count is not None: + table_usage_count.count = table_usage_count.count + 1 + if table_joins: + table_usage_count.joins.extend(table_joins) + else: + joins = [] + if table_joins: + joins.extend(table_joins) + + table_usage_count = TableUsageCount( + table=table, + databaseName=parsed_data.databaseName, + date=parsed_data.date, + joins=joins, + serviceName=parsed_data.serviceName, + sqlQueries=[], + databaseSchema=parsed_data.databaseSchema, + ) + + except Exception as exc: + logger.debug(traceback.format_exc()) + logger.warning(f"Error in staging record: {exc}") + self.table_usage[(table, parsed_data.date)] = table_usage_count + logger.info(f"Successfully record staged for {table}") + def stage_record(self, record: QueryParserData) -> None: + """ Process the parsed data and store it in a file """ @@ -119,34 +154,7 @@ def stage_record(self, record: QueryParserData) -> None: if parsed_data is None: continue for table in parsed_data.tables: - table_joins = parsed_data.joins.get(table) - try: - self._add_sql_query(record=parsed_data, table=table) - table_usage_count = self.table_usage.get((table, parsed_data.date)) - if table_usage_count is not None: - table_usage_count.count = table_usage_count.count + 1 - if table_joins: - table_usage_count.joins.extend(table_joins) - else: - joins = [] - if table_joins: - joins.extend(table_joins) - - table_usage_count = TableUsageCount( - table=table, - databaseName=parsed_data.databaseName, - date=parsed_data.date, - joins=joins, - serviceName=parsed_data.serviceName, - sqlQueries=[], - databaseSchema=parsed_data.databaseSchema, - ) - - except Exception as exc: - logger.debug(traceback.format_exc()) - logger.warning(f"Error in staging record: {exc}") - self.table_usage[(table, parsed_data.date)] = table_usage_count - logger.info(f"Successfully record staged for {table}") + self._handle_table_usage(parsed_data=parsed_data, table=table) self.dump_data_to_file() def dump_data_to_file(self): diff --git a/ingestion/src/metadata/profiler/interface/pandas/profiler_interface.py b/ingestion/src/metadata/profiler/interface/pandas/profiler_interface.py index dbbecb9a292a..f29fe0c4c2b8 100644 --- a/ingestion/src/metadata/profiler/interface/pandas/profiler_interface.py +++ b/ingestion/src/metadata/profiler/interface/pandas/profiler_interface.py @@ -8,6 +8,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +# pylint: disable=arguments-differ """ Interfaces with database for all database engine @@ -32,7 +33,6 @@ from metadata.profiler.processor.sampler.sampler_factory import sampler_factory_ from metadata.readers.dataframe.models import DatalakeTableSchemaWrapper from metadata.utils.datalake.datalake_utils import fetch_dataframe -from metadata.utils.dispatch import valuedispatch from metadata.utils.logger import profiler_interface_registry_logger from metadata.utils.sqa_like_column import SQALikeColumn, Type @@ -109,20 +109,10 @@ def _get_sampler(self): profile_sample_query=self.profile_query, ) - @valuedispatch - def _get_metrics(self, *args, **kwargs): - """Generic getter method for metrics. To be used with - specific dispatch methods - """ - logger.warning("Could not get metric. No function registered.") - - # pylint: disable=unused-argument - @_get_metrics.register(MetricTypes.Table.value) - def _( + def _compute_table_metrics( self, - metric_type: str, metrics: List[Metrics], - dfs, + runner: List, *args, **kwargs, ): @@ -138,7 +128,7 @@ def _( try: row_dict = {} - df_list = [df.where(pd.notnull(df), None) for df in dfs] + df_list = [df.where(pd.notnull(df), None) for df in runner] for metric in metrics: row_dict[metric.name()] = metric().df_fn(df_list) return row_dict @@ -147,13 +137,10 @@ def _( logger.warning(f"Error trying to compute profile for {exc}") raise RuntimeError(exc) - # pylint: disable=unused-argument - @_get_metrics.register(MetricTypes.Static.value) - def _( + def _compute_static_metrics( self, - metric_type: str, metrics: List[Metrics], - dfs, + runner: List, column, *args, **kwargs, @@ -172,7 +159,7 @@ def _( try: row_dict = {} for metric in metrics: - metric_resp = metric(column).df_fn(dfs) + metric_resp = metric(column).df_fn(runner) row_dict[metric.name()] = ( None if pd.isnull(metric_resp) else metric_resp ) @@ -183,13 +170,10 @@ def _( ) raise RuntimeError(exc) - # pylint: disable=unused-argument - @_get_metrics.register(MetricTypes.Query.value) - def _( + def _compute_query_metrics( self, - metric_type: str, - metrics: Metrics, - dfs, + metric: Metrics, + runner: List, column, *args, **kwargs, @@ -204,18 +188,15 @@ def _( dictionnary of results """ col_metric = None - col_metric = metrics(column).df_fn(dfs) + col_metric = metric(column).df_fn(runner) if not col_metric: return None - return {metrics.name(): col_metric} + return {metric.name(): col_metric} - # pylint: disable=unused-argument - @_get_metrics.register(MetricTypes.Window.value) - def _( + def _compute_window_metrics( self, - metric_type: str, - metrics: Metrics, - dfs, + metrics: List[Metrics], + runner: List, column, *args, **kwargs, @@ -224,19 +205,21 @@ def _( Given a list of metrics, compute the given results and returns the values """ + try: metric_values = {} for metric in metrics: - metric_values[metric.name()] = metric(column).df_fn(dfs) + metric_values[metric.name()] = metric(column).df_fn(runner) return metric_values if metric_values else None except Exception as exc: logger.debug(traceback.format_exc()) logger.warning(f"Unexpected exception computing metrics: {exc}") return None - @_get_metrics.register(MetricTypes.System.value) - def _( + def _compute_system_metrics( self, + metrics: Metrics, + runner: List, *args, **kwargs, ): @@ -260,11 +243,9 @@ def compute_metrics( try: row = None if self.dfs: - row = self._get_metrics( - metric_type.value, + row = self._get_metric_fn[metric_type.value]( metrics, dfs, - session=self.client, column=column, ) except Exception as exc: @@ -354,7 +335,9 @@ def get_all_metrics( profile_results["columns"][column].update( { "name": column, - "timestamp": datetime.now(tz=timezone.utc).timestamp(), + "timestamp": int( + datetime.now(tz=timezone.utc).timestamp() * 1000 + ), **profile, } ) diff --git a/ingestion/src/metadata/profiler/interface/profiler_interface.py b/ingestion/src/metadata/profiler/interface/profiler_interface.py index 5befd078bccf..85bb77d138e4 100644 --- a/ingestion/src/metadata/profiler/interface/profiler_interface.py +++ b/ingestion/src/metadata/profiler/interface/profiler_interface.py @@ -15,7 +15,7 @@ """ from abc import ABC, abstractmethod -from typing import Dict, Optional, Union +from typing import Dict, List, Optional, Union from sqlalchemy import Column from typing_extensions import Self @@ -36,7 +36,9 @@ from metadata.ingestion.ometa.ometa_api import OpenMetadata from metadata.ingestion.source.connections import get_connection from metadata.profiler.api.models import ProfileSampleConfig, TableConfig +from metadata.profiler.metrics.core import MetricTypes from metadata.profiler.metrics.registry import Metrics +from metadata.profiler.processor.runner import QueryRunner from metadata.utils.partition import get_partition_details @@ -78,6 +80,14 @@ def __init__( ) self.timeout_seconds = timeout_seconds + self._get_metric_fn = { + MetricTypes.Table.value: self._compute_table_metrics, + MetricTypes.Static.value: self._compute_static_metrics, + MetricTypes.Query.value: self._compute_query_metrics, + MetricTypes.Window.value: self._compute_window_metrics, + MetricTypes.System.value: self._compute_system_metrics, + } + @abstractmethod def _get_sampler(self): """Get the sampler""" @@ -222,7 +232,57 @@ def table(self): raise NotImplementedError @abstractmethod - def _get_metrics(self, *args, **kwargs): + def _compute_table_metrics( + self, + metrics: List[Metrics], + runner, + *args, + **kwargs, + ): + """Get metrics""" + raise NotImplementedError + + @abstractmethod + def _compute_static_metrics( + self, + metrics: List[Metrics], + runner, + *args, + **kwargs, + ): + """Get metrics""" + raise NotImplementedError + + @abstractmethod + def _compute_query_metrics( + self, + metric: Metrics, + runner, + *args, + **kwargs, + ): + """Get metrics""" + raise NotImplementedError + + @abstractmethod + def _compute_window_metrics( + self, + metrics: List[Metrics], + runner: QueryRunner, + *args, + **kwargs, + ): + """Get metrics""" + raise NotImplementedError + + @abstractmethod + def _compute_system_metrics( + self, + metrics: Metrics, + runner, + *args, + **kwargs, + ): """Get metrics""" raise NotImplementedError diff --git a/ingestion/src/metadata/profiler/interface/profiler_interface_factory.py b/ingestion/src/metadata/profiler/interface/profiler_interface_factory.py index 684e6be3643c..15d7aff293cf 100644 --- a/ingestion/src/metadata/profiler/interface/profiler_interface_factory.py +++ b/ingestion/src/metadata/profiler/interface/profiler_interface_factory.py @@ -21,6 +21,9 @@ from metadata.generated.schema.entity.services.connections.database.datalakeConnection import ( DatalakeConnection, ) +from metadata.generated.schema.entity.services.connections.database.singleStoreConnection import ( + SingleStoreConnection, +) from metadata.generated.schema.entity.services.databaseService import DatabaseConnection from metadata.profiler.interface.pandas.profiler_interface import ( PandasProfilerInterface, @@ -32,6 +35,9 @@ from metadata.profiler.interface.sqlalchemy.profiler_interface import ( SQAProfilerInterface, ) +from metadata.profiler.interface.sqlalchemy.single_store.profiler_interface import ( + SingleStoreProfilerInterface, +) class ProfilerInterfaceFactory: @@ -58,6 +64,9 @@ def create(self, interface_type: str, *args, **kwargs): profiler_interface_factory.register( BigQueryConnection.__name__, BigQueryProfilerInterface ) +profiler_interface_factory.register( + SingleStoreConnection.__name__, SingleStoreProfilerInterface +) profiler_interface_factory.register( DatalakeConnection.__name__, PandasProfilerInterface ) diff --git a/ingestion/src/metadata/profiler/interface/sqlalchemy/profiler_interface.py b/ingestion/src/metadata/profiler/interface/sqlalchemy/profiler_interface.py index 99f26c96ea58..09a6c814fcbe 100644 --- a/ingestion/src/metadata/profiler/interface/sqlalchemy/profiler_interface.py +++ b/ingestion/src/metadata/profiler/interface/sqlalchemy/profiler_interface.py @@ -8,6 +8,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +# pylint: disable=arguments-differ """ Interfaces with database for all database engine @@ -40,7 +41,6 @@ from metadata.profiler.processor.runner import QueryRunner from metadata.profiler.processor.sampler.sampler_factory import sampler_factory_ from metadata.utils.custom_thread_pool import CustomThreadPoolExecutor -from metadata.utils.dispatch import valuedispatch from metadata.utils.logger import profiler_interface_registry_logger logger = profiler_interface_registry_logger() @@ -153,18 +153,8 @@ def _compute_static_metrics_wo_sum( handle_query_exception(msg, exc, session) return None - @valuedispatch - def _get_metrics(self, *args, **kwargs): - """Generic getter method for metrics. To be used with - specific dispatch methods - """ - logger.warning("Could not get metric. No function registered.") - - # pylint: disable=unused-argument - @_get_metrics.register(MetricTypes.Table.value) - def _( + def _compute_table_metrics( self, - metric_type: str, metrics: List[Metrics], runner: QueryRunner, session, @@ -180,7 +170,6 @@ def _( dictionnary of results """ # pylint: disable=protected-access - try: dialect = runner._session.get_bind().dialect.name row = table_metric_construct_factory.construct( @@ -201,15 +190,12 @@ def _( session.rollback() raise RuntimeError(exc) - # pylint: disable=unused-argument - @_get_metrics.register(MetricTypes.Static.value) - def _( + def _compute_static_metrics( self, - metric_type: str, metrics: List[Metrics], runner: QueryRunner, + column, session, - column: Column, *args, **kwargs, ): @@ -247,16 +233,15 @@ def _( handle_query_exception(msg, exc, session) return None - # pylint: disable=unused-argument - @_get_metrics.register(MetricTypes.Query.value) - def _( + def _compute_query_metrics( self, - metric_type: str, metric: Metrics, runner: QueryRunner, + column, session, - column: Column, sample, + *args, + **kwargs, ): """Given a list of metrics, compute the given results and returns the values @@ -267,6 +252,7 @@ def _( Returns: dictionnary of results """ + try: col_metric = metric(column) metric_query = col_metric.query(sample=sample, session=session) @@ -284,15 +270,12 @@ def _( handle_query_exception(msg, exc, session) return None - # pylint: disable=unused-argument - @_get_metrics.register(MetricTypes.Window.value) - def _( + def _compute_window_metrics( self, - metric_type: str, metrics: List[Metrics], runner: QueryRunner, + column, session, - column: Column, *args, **kwargs, ): @@ -305,6 +288,7 @@ def _( Returns: dictionnary of results """ + if not metrics: return None try: @@ -327,11 +311,9 @@ def _( return dict(row) return None - @_get_metrics.register(MetricTypes.System.value) - def _( + def _compute_system_metrics( self, - metric_type: str, - metric: Metrics, + metrics: Metrics, runner: QueryRunner, session, *args, @@ -348,7 +330,7 @@ def _( dictionnary of results """ try: - rows = metric().sql(session, conn_config=self.service_connection_config) + rows = metrics().sql(session, conn_config=self.service_connection_config) return rows except Exception as exc: msg = f"Error trying to compute profile for {runner.table.__tablename__}: {exc}" @@ -412,8 +394,7 @@ def compute_metrics_in_thread( ) try: - row = self._get_metrics( - metric_type.value, + row = self._get_metric_fn[metric_type.value]( metrics, runner=runner, session=session, @@ -471,7 +452,9 @@ def get_all_metrics( profile_results["columns"][column].update( { "name": column, - "timestamp": datetime.now(tz=timezone.utc).timestamp(), + "timestamp": int( + datetime.now(tz=timezone.utc).timestamp() * 1000 + ), **profile, } ) diff --git a/bootstrap/sql/migrations/native/1.1.3/mysql/postDataMigrationSQLScript.sql b/ingestion/src/metadata/profiler/interface/sqlalchemy/single_store/__init__.py similarity index 100% rename from bootstrap/sql/migrations/native/1.1.3/mysql/postDataMigrationSQLScript.sql rename to ingestion/src/metadata/profiler/interface/sqlalchemy/single_store/__init__.py diff --git a/ingestion/src/metadata/profiler/interface/sqlalchemy/single_store/profiler_interface.py b/ingestion/src/metadata/profiler/interface/sqlalchemy/single_store/profiler_interface.py new file mode 100644 index 000000000000..6d87883a2e34 --- /dev/null +++ b/ingestion/src/metadata/profiler/interface/sqlalchemy/single_store/profiler_interface.py @@ -0,0 +1,86 @@ +# Copyright 2021 Collate +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Interfaces with database for all database engine +supporting sqlalchemy abstraction layer +""" + +from typing import List + +from sqlalchemy.exc import ProgrammingError + +from metadata.profiler.interface.sqlalchemy.profiler_interface import ( + SQAProfilerInterface, + handle_query_exception, +) +from metadata.profiler.metrics.registry import Metrics +from metadata.profiler.processor.runner import QueryRunner +from metadata.profiler.source.single_store.metrics.window.first_quartile import ( + SingleStoreFirstQuartile, +) +from metadata.profiler.source.single_store.metrics.window.median import ( + SingleStoreMedian, +) +from metadata.profiler.source.single_store.metrics.window.third_quartile import ( + SingleStoreThirdQuartile, +) +from metadata.utils.logger import profiler_interface_registry_logger + +logger = profiler_interface_registry_logger() + + +class SingleStoreProfilerInterface(SQAProfilerInterface): + """ + Interface to interact with registry supporting + sqlalchemy. + """ + + def _compute_window_metrics( + self, + metrics: List[Metrics], + runner: QueryRunner, + *args, + **kwargs, + ): + """Given a list of metrics, compute the given results + and returns the values + + Args: + column: the column to compute the metrics against + metrics: list of metrics to compute + Returns: + dictionnary of results + """ + session = kwargs.get("session") + column = kwargs.get("column") + + if not metrics: + return None + try: + # we patch the metrics at runtime to use the SingleStore specific functions + # as we can't compile the query based on the dialect as it return `mysql` + metrics = [SingleStoreFirstQuartile, SingleStoreMedian, SingleStoreThirdQuartile] # type: ignore + row = runner.select_first_from_sample( + *[metric(column).fn() for metric in metrics], + ) + except ProgrammingError: + logger.info( + f"Skipping window metrics for {runner.table.__tablename__}.{column.name} due to overflow" + ) + return None + + except Exception as exc: + msg = f"Error trying to compute profile for {runner.table.__tablename__}.{column.name}: {exc}" + handle_query_exception(msg, exc, session) + if row: + return dict(row) + return None diff --git a/ingestion/src/metadata/profiler/metrics/system/queries/redshift.py b/ingestion/src/metadata/profiler/metrics/system/queries/redshift.py index c6a3d61fa4db..1e76ab80b98d 100644 --- a/ingestion/src/metadata/profiler/metrics/system/queries/redshift.py +++ b/ingestion/src/metadata/profiler/metrics/system/queries/redshift.py @@ -35,19 +35,17 @@ sti."database", sti."schema", sti."table", - sq.text, DATE_TRUNC('second', data.starttime) AS starttime FROM data INNER JOIN pg_catalog.svv_table_info sti ON data.tbl = sti.table_id - INNER JOIN pg_catalog.stl_querytext sq ON data.query = sq.query where sti."database" = '{database}' AND sti."schema" = '{schema}' AND "rows" != 0 AND DATE(data.starttime) >= CURRENT_DATE - 1 - GROUP BY 2,3,4,5,6 - ORDER BY 6 DESC + GROUP BY 2,3,4,5 + ORDER BY 5 DESC """ @@ -73,7 +71,7 @@ def get_query_results( database_name=row.database, schema_name=row.schema, table_name=row.table, - query_text=row.text, + query_text=None, query_type=operation, timestamp=row.starttime, rows=row.rows, diff --git a/ingestion/src/metadata/profiler/metrics/system/system.py b/ingestion/src/metadata/profiler/metrics/system/system.py index 5f3b7608c319..b57a4ae38571 100644 --- a/ingestion/src/metadata/profiler/metrics/system/system.py +++ b/ingestion/src/metadata/profiler/metrics/system/system.py @@ -113,10 +113,6 @@ def _( dataset_id = table.__table_args__["schema"] # type: ignore metric_results: List[Dict] = [] - # QueryResult = namedtuple( - # "QueryResult", - # "query_type,timestamp,destination_table,dml_statistics", - # ) jobs = get_value_from_cache( SYSTEM_QUERY_RESULT_CACHE, f"{Dialects.BigQuery}.{project_id}.{dataset_id}.jobs" diff --git a/ingestion/src/metadata/profiler/metrics/window/first_quartile.py b/ingestion/src/metadata/profiler/metrics/window/first_quartile.py index c508f276ec0a..ecfce32fff17 100644 --- a/ingestion/src/metadata/profiler/metrics/window/first_quartile.py +++ b/ingestion/src/metadata/profiler/metrics/window/first_quartile.py @@ -19,15 +19,15 @@ from sqlalchemy import column from metadata.profiler.metrics.core import StaticMetric, _label +from metadata.profiler.metrics.window.percentille_mixin import PercentilMixin from metadata.profiler.orm.functions.length import LenFn -from metadata.profiler.orm.functions.median import MedianFn from metadata.profiler.orm.registry import is_concatenable, is_quantifiable from metadata.utils.logger import profiler_logger logger = profiler_logger() -class FirstQuartile(StaticMetric): +class FirstQuartile(StaticMetric, PercentilMixin): """ First Quartile Metric @@ -53,14 +53,14 @@ def fn(self): """sqlalchemy function""" if is_quantifiable(self.col.type): # col fullname is only needed for MySQL and SQLite - return MedianFn( + return self._compute_sqa_fn( column(self.col.name, self.col.type), self.col.table.fullname if self.col.table is not None else None, 0.25, ) if is_concatenable(self.col.type): - return MedianFn( + return self._compute_sqa_fn( LenFn(column(self.col.name, self.col.type)), self.col.table.fullname if self.col.table is not None else None, 0.25, diff --git a/ingestion/src/metadata/profiler/metrics/window/median.py b/ingestion/src/metadata/profiler/metrics/window/median.py index 1a4c623e581b..e8d4f42e946e 100644 --- a/ingestion/src/metadata/profiler/metrics/window/median.py +++ b/ingestion/src/metadata/profiler/metrics/window/median.py @@ -19,15 +19,15 @@ from sqlalchemy import column from metadata.profiler.metrics.core import StaticMetric, _label +from metadata.profiler.metrics.window.percentille_mixin import PercentilMixin from metadata.profiler.orm.functions.length import LenFn -from metadata.profiler.orm.functions.median import MedianFn from metadata.profiler.orm.registry import is_concatenable, is_quantifiable from metadata.utils.logger import profiler_logger logger = profiler_logger() -class Median(StaticMetric): +class Median(StaticMetric, PercentilMixin): """ Median Metric @@ -53,14 +53,14 @@ def fn(self): """sqlalchemy function""" if is_quantifiable(self.col.type): # col fullname is only needed for MySQL and SQLite - return MedianFn( + return self._compute_sqa_fn( column(self.col.name, self.col.type), self.col.table.fullname if self.col.table is not None else None, 0.5, ) if is_concatenable(self.col.type): - return MedianFn( + return self._compute_sqa_fn( LenFn(column(self.col.name, self.col.type)), self.col.table.fullname if self.col.table is not None else None, 0.5, diff --git a/ingestion/src/metadata/profiler/metrics/window/percentille_mixin.py b/ingestion/src/metadata/profiler/metrics/window/percentille_mixin.py new file mode 100644 index 000000000000..38a98ccc8ce7 --- /dev/null +++ b/ingestion/src/metadata/profiler/metrics/window/percentille_mixin.py @@ -0,0 +1,9 @@ +"""function calls shared accross all percentile metrics""" + +from metadata.profiler.orm.functions.median import MedianFn + + +class PercentilMixin: + def _compute_sqa_fn(self, column, table, percentile): + """Generic method to compute the quartile using sqlalchemy""" + return MedianFn(column, table, percentile) diff --git a/ingestion/src/metadata/profiler/metrics/window/third_quartile.py b/ingestion/src/metadata/profiler/metrics/window/third_quartile.py index d571739419de..1a7b06094e9a 100644 --- a/ingestion/src/metadata/profiler/metrics/window/third_quartile.py +++ b/ingestion/src/metadata/profiler/metrics/window/third_quartile.py @@ -19,15 +19,15 @@ from sqlalchemy import column from metadata.profiler.metrics.core import StaticMetric, _label +from metadata.profiler.metrics.window.percentille_mixin import PercentilMixin from metadata.profiler.orm.functions.length import LenFn -from metadata.profiler.orm.functions.median import MedianFn from metadata.profiler.orm.registry import is_concatenable, is_quantifiable from metadata.utils.logger import profiler_logger logger = profiler_logger() -class ThirdQuartile(StaticMetric): +class ThirdQuartile(StaticMetric, PercentilMixin): """ Third Quartile Metric @@ -53,14 +53,14 @@ def fn(self): """sqlalchemy function""" if is_quantifiable(self.col.type): # col fullname is only needed for MySQL and SQLite - return MedianFn( + return self._compute_sqa_fn( column(self.col.name, self.col.type), self.col.table.fullname if self.col.table is not None else None, 0.75, ) if is_concatenable(self.col.type): - return MedianFn( + return self._compute_sqa_fn( LenFn(column(self.col.name, self.col.type)), self.col.table.fullname if self.col.table is not None else None, 0.75, diff --git a/ingestion/src/metadata/profiler/processor/core.py b/ingestion/src/metadata/profiler/processor/core.py index 342295ae14d3..8a8354c02429 100644 --- a/ingestion/src/metadata/profiler/processor/core.py +++ b/ingestion/src/metadata/profiler/processor/core.py @@ -92,7 +92,7 @@ def __init__( self.include_columns = include_columns self.exclude_columns = exclude_columns self._metrics = metrics - self._profile_date = datetime.now(tz=timezone.utc).timestamp() + self._profile_date = int(datetime.now(tz=timezone.utc).timestamp() * 1000) self.profile_sample_config = self.profiler_interface.profile_sample_config self.validate_composed_metric() diff --git a/ingestion/src/metadata/profiler/source/single_store/functions/median.py b/ingestion/src/metadata/profiler/source/single_store/functions/median.py new file mode 100644 index 000000000000..cd509d782e5d --- /dev/null +++ b/ingestion/src/metadata/profiler/source/single_store/functions/median.py @@ -0,0 +1,17 @@ +"""Median function for single store""" + +from sqlalchemy.ext.compiler import compiles +from sqlalchemy.sql.functions import FunctionElement + +from metadata.profiler.metrics.core import CACHE + + +class SingleStoreMedianFn(FunctionElement): + inherit_cache = CACHE + + +@compiles(SingleStoreMedianFn) +def _(elements, compiler, **kwargs): # pylint: disable=unused-argument + col = compiler.process(elements.clauses.clauses[0]) + percentile = elements.clauses.clauses[2].value + return f"approx_percentile({col}, {percentile:.2f})" diff --git a/ingestion/src/metadata/profiler/source/single_store/metrics/window/first_quartile.py b/ingestion/src/metadata/profiler/source/single_store/metrics/window/first_quartile.py new file mode 100644 index 000000000000..3bfd15f1bcbf --- /dev/null +++ b/ingestion/src/metadata/profiler/source/single_store/metrics/window/first_quartile.py @@ -0,0 +1,10 @@ +"""Override first quartile metric definition for SingleStore""" + +from metadata.profiler.metrics.window.first_quartile import FirstQuartile +from metadata.profiler.source.single_store.functions.median import SingleStoreMedianFn + + +class SingleStoreFirstQuartile(FirstQuartile): + def _compute_sqa_fn(self, column, table, percentile): + """Generic method to compute the quartile using sqlalchemy""" + return SingleStoreMedianFn(column, table, percentile) diff --git a/ingestion/src/metadata/profiler/source/single_store/metrics/window/median.py b/ingestion/src/metadata/profiler/source/single_store/metrics/window/median.py new file mode 100644 index 000000000000..843fb971b5f6 --- /dev/null +++ b/ingestion/src/metadata/profiler/source/single_store/metrics/window/median.py @@ -0,0 +1,10 @@ +"""Override first quartile metric definition for SingleStore""" + +from metadata.profiler.metrics.window.median import Median +from metadata.profiler.source.single_store.functions.median import SingleStoreMedianFn + + +class SingleStoreMedian(Median): + def _compute_sqa_fn(self, column, table, percentile): + """Generic method to compute the quartile using sqlalchemy""" + return SingleStoreMedianFn(column, table, percentile) diff --git a/ingestion/src/metadata/profiler/source/single_store/metrics/window/third_quartile.py b/ingestion/src/metadata/profiler/source/single_store/metrics/window/third_quartile.py new file mode 100644 index 000000000000..c8c8ef53274d --- /dev/null +++ b/ingestion/src/metadata/profiler/source/single_store/metrics/window/third_quartile.py @@ -0,0 +1,10 @@ +"""Override first quartile metric definition for SingleStore""" + +from metadata.profiler.metrics.window.third_quartile import ThirdQuartile +from metadata.profiler.source.single_store.functions.median import SingleStoreMedianFn + + +class SingleStoreThirdQuartile(ThirdQuartile): + def _compute_sqa_fn(self, column, table, percentile): + """Generic method to compute the quartile using sqlalchemy""" + return SingleStoreMedianFn(column, table, percentile) diff --git a/ingestion/src/metadata/utils/fqn.py b/ingestion/src/metadata/utils/fqn.py index 9e92393b0c91..347d78b34322 100644 --- a/ingestion/src/metadata/utils/fqn.py +++ b/ingestion/src/metadata/utils/fqn.py @@ -33,7 +33,6 @@ from metadata.generated.schema.entity.data.databaseSchema import DatabaseSchema from metadata.generated.schema.entity.data.mlmodel import MlModel from metadata.generated.schema.entity.data.pipeline import Pipeline -from metadata.generated.schema.entity.data.searchIndex import SearchIndex from metadata.generated.schema.entity.data.table import Column, DataModel, Table from metadata.generated.schema.entity.data.topic import Topic from metadata.generated.schema.entity.teams.team import Team @@ -265,20 +264,6 @@ def _( return _build(service_name, topic_name) -@fqn_build_registry.add(SearchIndex) -def _( - _: OpenMetadata, # ES Index not necessary for Search Index FQN building - *, - service_name: str, - search_index_name: str, -) -> str: - if not service_name or not search_index_name: - raise FQNBuildingException( - f"Args should be informed, but got service=`{service_name}`, search_index=`{search_index_name}``" - ) - return _build(service_name, search_index_name) - - @fqn_build_registry.add(Tag) def _( _: OpenMetadata, # ES Index not necessary for Tag FQN building diff --git a/ingestion/tests/integration/data_insight/test_data_insight_workflow.py b/ingestion/tests/integration/data_insight/test_data_insight_workflow.py index 448589f66940..9373d845ae00 100644 --- a/ingestion/tests/integration/data_insight/test_data_insight_workflow.py +++ b/ingestion/tests/integration/data_insight/test_data_insight_workflow.py @@ -359,6 +359,32 @@ def test_write_kpi_result(self): assert kpi_result + def test_multiple_execution(self) -> None: + """test multiple execution of the workflow is not yielding duplicate entries""" + data = {} + + workflow: DataInsightWorkflow = DataInsightWorkflow.create(data_insight_config) + workflow.execute() + workflow.stop() + sleep(2) # we'll wait for 2 seconds + new_workflow: DataInsightWorkflow = DataInsightWorkflow.create( + data_insight_config + ) + new_workflow.execute() + new_workflow.stop() + + for report_data_type in ReportDataType: + data[report_data_type] = self.metadata.get_data_insight_report_data( + self.start_ts, + self.end_ts, + report_data_type.value, + ) + + for _, values in data.items(): + timestamp = [value.get("timestamp") for value in values.get("data")] + # we'll check we only have 1 execution timestamp + assert len(set(timestamp)) == 1 + @classmethod def tearDownClass(cls) -> None: kpis: list[Kpi] = cls.metadata.list_entities( diff --git a/ingestion/tests/integration/orm_profiler/test_orm_profiler_e2e.py b/ingestion/tests/integration/orm_profiler/test_orm_profiler_e2e.py index 5fedf2a96a6e..fea999391411 100644 --- a/ingestion/tests/integration/orm_profiler/test_orm_profiler_e2e.py +++ b/ingestion/tests/integration/orm_profiler/test_orm_profiler_e2e.py @@ -212,9 +212,6 @@ def test_ingestion(self): ) assert table_entity.fullyQualifiedName.__root__ == "test_sqlite.main.main.users" - @pytest.mark.skip( - "need to reactivate once https://github.com/open-metadata/OpenMetadata/issues/8930 is handled. Skipping to prevent Cypress failure" - ) def test_profiler_workflow(self): """ Prepare and execute the profiler workflow @@ -539,8 +536,7 @@ def test_workflow_values_partition(self): ).profile assert profile.rowCount == 4.0 - # uncomment when reactivate once https://github.com/open-metadata/OpenMetadata/issues/8930 is fixed - # assert profile.profileSample is None + assert profile.profileSample is None workflow_config["processor"] = { "type": "orm-profiler", diff --git a/ingestion/tests/unit/topology/search/test_elasticsearch.py b/ingestion/tests/unit/topology/search/test_elasticsearch.py deleted file mode 100644 index 8b0896dbfd9e..000000000000 --- a/ingestion/tests/unit/topology/search/test_elasticsearch.py +++ /dev/null @@ -1,218 +0,0 @@ -# Copyright 2021 Collate -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -Test ES using the topology -""" - -from unittest import TestCase -from unittest.mock import patch - -from metadata.generated.schema.api.data.createSearchIndex import ( - CreateSearchIndexRequest, -) -from metadata.generated.schema.entity.data.searchIndex import DataType, SearchIndexField -from metadata.generated.schema.entity.services.searchService import ( - SearchConnection, - SearchService, - SearchServiceType, -) -from metadata.generated.schema.metadataIngestion.workflow import ( - OpenMetadataWorkflowConfig, -) -from metadata.ingestion.source.search.elasticsearch.metadata import ElasticsearchSource - -mock_es_config = { - "source": { - "type": "elasticsearch", - "serviceName": "local_elasticsearch", - "serviceConnection": { - "config": { - "type": "ElasticSearch", - "authType": { - "username": "username", - "password": "password", - }, - "hostPort": "localhost:9200", - } - }, - "sourceConfig": {"config": {"type": "SearchMetadata"}}, - }, - "sink": {"type": "metadata-rest", "config": {}}, - "workflowConfig": { - "openMetadataServerConfig": { - "hostPort": "http://localhost:8585/api", - "authProvider": "openmetadata", - "securityConfig": { - "jwtToken": "eyJraWQiOiJHYjM4OWEtOWY3Ni1nZGpzLWE5MmotMDI0MmJrOTQzNTYiLCJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiJ9.eyJzdWIiOiJhZG1pbiIsImlzQm90IjpmYWxzZSwiaXNzIjoib3Blbi1tZXRhZGF0YS5vcmciLCJpYXQiOjE2NjM5Mzg0NjIsImVtYWlsIjoiYWRtaW5Ab3Blbm1ldGFkYXRhLm9yZyJ9.tS8um_5DKu7HgzGBzS1VTA5uUjKWOCU0B_j08WXBiEC0mr0zNREkqVfwFDD-d24HlNEbrqioLsBuFRiwIWKc1m_ZlVQbG7P36RUxhuv2vbSp80FKyNM-Tj93FDzq91jsyNmsQhyNv_fNr3TXfzzSPjHt8Go0FMMP66weoKMgW2PbXlhVKwEuXUHyakLLzewm9UMeQaEiRzhiTMU3UkLXcKbYEJJvfNFcLwSl9W8JCO_l0Yj3ud-qt_nQYEZwqW6u5nfdQllN133iikV4fM5QZsMCnm8Rq1mvLR0y9bmJiD7fwM1tmJ791TUWqmKaTnP49U493VanKpUAfzIiOiIbhg" - }, - } - }, -} - -MOCK_SETTINGS = { - "index": { - "routing": {"allocation": {"include": {"_tier_preference": "data_content"}}}, - "number_of_shards": "1", - "provided_name": "test_case_search_index", - "creation_date": "1692181190239", - "analysis": { - "filter": {"om_stemmer": {"name": "english", "type": "stemmer"}}, - "normalizer": { - "lowercase_normalizer": { - "filter": ["lowercase"], - "type": "custom", - "char_filter": [], - } - }, - "analyzer": { - "om_ngram": { - "filter": ["lowercase"], - "min_gram": "1", - "max_gram": "2", - "tokenizer": "ngram", - }, - "om_analyzer": { - "filter": ["lowercase", "om_stemmer"], - "tokenizer": "letter", - }, - }, - }, - "number_of_replicas": "1", - "uuid": "8HAGhnVkSy-X__XwWFdJqg", - "version": {"created": "7160399"}, - } -} - -MOCK_DETAILS = { - "test_case_search_index": { - "aliases": {}, - "mappings": { - "properties": { - "href": {"type": "text"}, - "name": { - "type": "text", - "fields": { - "keyword": {"type": "keyword", "ignore_above": 256}, - "ngram": {"type": "text", "analyzer": "om_ngram"}, - }, - "analyzer": "om_analyzer", - }, - "owner": { - "properties": { - "deleted": {"type": "text"}, - "description": {"type": "text"}, - "displayName": { - "type": "text", - "fields": { - "keyword": {"type": "keyword", "ignore_above": 256} - }, - }, - "fullyQualifiedName": {"type": "text"}, - "href": {"type": "text"}, - "id": {"type": "text"}, - "name": { - "type": "keyword", - "normalizer": "lowercase_normalizer", - "fields": { - "keyword": {"type": "keyword", "ignore_above": 256} - }, - }, - "type": {"type": "keyword"}, - } - }, - } - }, - "settings": MOCK_SETTINGS, - } -} - -MOCK_SEARCH_SERVICE = SearchService( - id="85811038-099a-11ed-861d-0242ac120002", - name="es_source", - fullyQualifiedName="es_source", - connection=SearchConnection(), - serviceType=SearchServiceType.ElasticSearch, -) - -EXPECTED_RESULT = CreateSearchIndexRequest( - name="test_case_search_index", - displayName="test_case_search_index", - searchIndexSettings=MOCK_SETTINGS, - service="es_source", - fields=[ - SearchIndexField( - name="href", - dataType=DataType.TEXT, - ), - SearchIndexField( - name="name", - dataType=DataType.TEXT, - ), - SearchIndexField( - name="owner", - dataType=DataType.OBJECT, - children=[ - SearchIndexField( - name="deleted", - dataType=DataType.TEXT, - ), - SearchIndexField( - name="description", - dataType=DataType.TEXT, - ), - SearchIndexField( - name="displayName", - dataType=DataType.TEXT, - ), - SearchIndexField( - name="fullyQualifiedName", - dataType=DataType.TEXT, - ), - SearchIndexField( - name="href", - dataType=DataType.TEXT, - ), - SearchIndexField( - name="id", - dataType=DataType.TEXT, - ), - SearchIndexField( - name="name", - dataType=DataType.KEYWORD, - ), - SearchIndexField( - name="type", - dataType=DataType.KEYWORD, - ), - ], - ), - ], -) - - -class ElasticSearchUnitTest(TestCase): - @patch( - "metadata.ingestion.source.search.search_service.SearchServiceSource.test_connection" - ) - def __init__(self, methodName, test_connection) -> None: - super().__init__(methodName) - test_connection.return_value = False - self.config = OpenMetadataWorkflowConfig.parse_obj(mock_es_config) - self.es_source = ElasticsearchSource.create( - mock_es_config["source"], - self.config.workflowConfig.openMetadataServerConfig, - ) - self.es_source.context.__dict__["search_service"] = MOCK_SEARCH_SERVICE - - def test_partition_parse_columns(self): - actual_index = self.es_source.yield_search_index(MOCK_DETAILS) - self.assertEqual(list(actual_index), [EXPECTED_RESULT]) diff --git a/openmetadata-airflow-apis/setup.py b/openmetadata-airflow-apis/setup.py index 656cee745926..b6f715971cff 100644 --- a/openmetadata-airflow-apis/setup.py +++ b/openmetadata-airflow-apis/setup.py @@ -69,7 +69,7 @@ def get_long_description(): packages=find_packages(include=[f"{PLUGIN_NAME}.*", PLUGIN_NAME]), include_package_data=True, package_data={PLUGIN_NAME: get_package_data()}, - version="1.2.0.0.dev0", + version="1.1.5.0", url="https://open-metadata.org/", author="OpenMetadata Committers", license="Apache License 2.0", diff --git a/openmetadata-clients/openmetadata-java-client/pom.xml b/openmetadata-clients/openmetadata-java-client/pom.xml index 071c045f8b23..6a43e8f617ae 100644 --- a/openmetadata-clients/openmetadata-java-client/pom.xml +++ b/openmetadata-clients/openmetadata-java-client/pom.xml @@ -5,7 +5,7 @@ openmetadata-clients org.open-metadata - 1.2.0-SNAPSHOT + 1.1.5 4.0.0 diff --git a/openmetadata-clients/pom.xml b/openmetadata-clients/pom.xml index 9cdfc403afdf..dc812a02c3ea 100644 --- a/openmetadata-clients/pom.xml +++ b/openmetadata-clients/pom.xml @@ -5,7 +5,7 @@ platform org.open-metadata - 1.2.0-SNAPSHOT + 1.1.5 4.0.0 diff --git a/openmetadata-dist/pom.xml b/openmetadata-dist/pom.xml index 1790aa5d8674..e3e53c5adb8f 100644 --- a/openmetadata-dist/pom.xml +++ b/openmetadata-dist/pom.xml @@ -20,7 +20,7 @@ platform org.open-metadata - 1.2.0-SNAPSHOT + 1.1.5 openmetadata-dist diff --git a/openmetadata-service/pom.xml b/openmetadata-service/pom.xml index 7703b087b177..9f8dad39eb17 100644 --- a/openmetadata-service/pom.xml +++ b/openmetadata-service/pom.xml @@ -5,7 +5,7 @@ platform org.open-metadata - 1.2.0-SNAPSHOT + 1.1.5 4.0.0 openmetadata-service diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/Entity.java b/openmetadata-service/src/main/java/org/openmetadata/service/Entity.java index 3f973f607d2f..82902048722c 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/Entity.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/Entity.java @@ -72,13 +72,7 @@ public final class Entity { public static final String FIELD_DISPLAY_NAME = "displayName"; public static final String FIELD_EXTENSION = "extension"; public static final String FIELD_USAGE_SUMMARY = "usageSummary"; - public static final String FIELD_CHILDREN = "children"; - public static final String FIELD_PARENT = "parent"; public static final String FIELD_REVIEWERS = "reviewers"; - public static final String FIELD_EXPERTS = "experts"; - public static final String FIELD_DOMAIN = "domain"; - public static final String FIELD_DATA_PRODUCTS = "dataProducts"; - public static final String FIELD_ASSETS = "assets"; // // Service entities @@ -90,7 +84,6 @@ public final class Entity { public static final String STORAGE_SERVICE = "storageService"; public static final String MLMODEL_SERVICE = "mlmodelService"; public static final String METADATA_SERVICE = "metadataService"; - public static final String SEARCH_SERVICE = "searchService"; // // Data asset entities // @@ -104,7 +97,6 @@ public final class Entity { public static final String CHART = "chart"; public static final String REPORT = "report"; public static final String TOPIC = "topic"; - public static final String SEARCH_INDEX = "searchIndex"; public static final String MLMODEL = "mlmodel"; public static final String CONTAINER = "container"; public static final String QUERY = "query"; @@ -141,12 +133,6 @@ public final class Entity { // public static final String INGESTION_PIPELINE = "ingestionPipeline"; - // - // Domain related entities - // - public static final String DOMAIN = "domain"; - public static final String DATA_PRODUCT = "dataProduct"; - // // Other entities public static final String EVENT_SUBSCRIPTION = "eventsubscription"; diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/OpenMetadataApplication.java b/openmetadata-service/src/main/java/org/openmetadata/service/OpenMetadataApplication.java index 46c87036a614..2db69b59ea0b 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/OpenMetadataApplication.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/OpenMetadataApplication.java @@ -464,11 +464,12 @@ private void initializeWebsockets(OpenMetadataApplicationConfig catalogConfig, E WebSocketUpgradeFilter.configure(environment.getApplicationContext()); NativeWebSocketServletContainerInitializer.configure( environment.getApplicationContext(), - (context, container) -> - container.addMapping( - new ServletPathSpec(pathSpec), - (servletUpgradeRequest, servletUpgradeResponse) -> - new JettyWebSocketHandler(WebSocketManager.getInstance().getEngineIoServer()))); + (context, container) -> { + container.addMapping( + new ServletPathSpec(pathSpec), + (servletUpgradeRequest, servletUpgradeResponse) -> + new JettyWebSocketHandler(WebSocketManager.getInstance().getEngineIoServer())); + }); } catch (ServletException ex) { LOG.error("Websocket Upgrade Filter error : " + ex.getMessage()); } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/events/subscription/AlertsRuleEvaluator.java b/openmetadata-service/src/main/java/org/openmetadata/service/events/subscription/AlertsRuleEvaluator.java index d50ba7c0aedc..5960a4d1919f 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/events/subscription/AlertsRuleEvaluator.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/events/subscription/AlertsRuleEvaluator.java @@ -10,6 +10,7 @@ import static org.openmetadata.service.Entity.USER; import java.io.IOException; +import java.util.List; import java.util.Set; import java.util.UUID; import lombok.extern.slf4j.Slf4j; @@ -19,6 +20,7 @@ import org.openmetadata.schema.entity.services.ingestionPipelines.PipelineStatusType; import org.openmetadata.schema.entity.teams.Team; import org.openmetadata.schema.entity.teams.User; +import org.openmetadata.schema.tests.TestCase; import org.openmetadata.schema.tests.type.TestCaseResult; import org.openmetadata.schema.tests.type.TestCaseStatus; import org.openmetadata.schema.type.ChangeEvent; @@ -27,6 +29,7 @@ import org.openmetadata.schema.type.Include; import org.openmetadata.service.Entity; import org.openmetadata.service.formatter.util.FormatterUtil; +import org.openmetadata.service.resources.feeds.MessageParser; import org.openmetadata.service.util.JsonUtils; @Slf4j @@ -100,6 +103,10 @@ public boolean matchAnyEntityFqn(String... entityNames) throws IOException { } EntityInterface entity = getEntity(changeEvent); for (String name : entityNames) { + if (changeEvent.getEntityType().equals(TEST_CASE) + && (MessageParser.EntityLink.parse(((TestCase) entity).getEntityLink()).getEntityFQN().equals(name))) { + return true; + } if (entity.getFullyQualifiedName().equals(name)) { return true; } @@ -159,7 +166,14 @@ public boolean matchTestResult(String... testResults) { // in case the entity is not test case return since the filter doesn't apply return true; } - for (FieldChange fieldChange : changeEvent.getChangeDescription().getFieldsUpdated()) { + + // we need to handle both fields updated and fields added + List fieldChanges = changeEvent.getChangeDescription().getFieldsUpdated(); + if (!changeEvent.getChangeDescription().getFieldsAdded().isEmpty()) { + fieldChanges.addAll(changeEvent.getChangeDescription().getFieldsAdded()); + } + + for (FieldChange fieldChange : fieldChanges) { if (fieldChange.getName().equals("testCaseResult") && fieldChange.getNewValue() != null) { TestCaseResult testCaseResult = (TestCaseResult) fieldChange.getNewValue(); TestCaseStatus status = testCaseResult.getTestCaseStatus(); diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/ChartRepository.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/ChartRepository.java index 38d10c811926..9bc123e4ba4c 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/ChartRepository.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/ChartRepository.java @@ -13,8 +13,6 @@ package org.openmetadata.service.jdbi3; -import static org.openmetadata.schema.type.Include.ALL; - import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import org.openmetadata.schema.entity.data.Chart; @@ -61,12 +59,6 @@ public void storeRelationships(Chart chart) { addRelationship(service.getId(), chart.getId(), service.getType(), Entity.CHART, Relationship.CONTAINS); } - @Override - public Chart setInheritedFields(Chart chart, Fields fields) { - DashboardService dashboardService = Entity.getEntity(chart.getService(), "domain", ALL); - return inheritDomain(chart, fields, dashboardService); - } - @Override public Chart setFields(Chart chart, Fields fields) { return chart.withService(getContainer(chart.getId())); diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/CollectionDAO.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/CollectionDAO.java index 18bb0badcd82..00d6a3fd71e8 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/CollectionDAO.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/CollectionDAO.java @@ -79,11 +79,8 @@ import org.openmetadata.schema.entity.data.Pipeline; import org.openmetadata.schema.entity.data.Query; import org.openmetadata.schema.entity.data.Report; -import org.openmetadata.schema.entity.data.SearchIndex; import org.openmetadata.schema.entity.data.Table; import org.openmetadata.schema.entity.data.Topic; -import org.openmetadata.schema.entity.domains.DataProduct; -import org.openmetadata.schema.entity.domains.Domain; import org.openmetadata.schema.entity.events.EventSubscription; import org.openmetadata.schema.entity.policies.Policy; import org.openmetadata.schema.entity.services.DashboardService; @@ -92,7 +89,6 @@ import org.openmetadata.schema.entity.services.MetadataService; import org.openmetadata.schema.entity.services.MlModelService; import org.openmetadata.schema.entity.services.PipelineService; -import org.openmetadata.schema.entity.services.SearchService; import org.openmetadata.schema.entity.services.StorageService; import org.openmetadata.schema.entity.services.connections.TestConnectionDefinition; import org.openmetadata.schema.entity.services.ingestionPipelines.IngestionPipeline; @@ -104,6 +100,7 @@ import org.openmetadata.schema.tests.TestCase; import org.openmetadata.schema.tests.TestDefinition; import org.openmetadata.schema.tests.TestSuite; +import org.openmetadata.schema.type.Include; import org.openmetadata.schema.type.Relationship; import org.openmetadata.schema.type.TagLabel; import org.openmetadata.schema.type.TaskStatus; @@ -145,6 +142,15 @@ public interface CollectionDAO { @CreateSqlObject EntityExtensionTimeSeriesDAO entityExtensionTimeSeriesDao(); + @CreateSqlObject + ReportDataTimeSeriesDAO reportDataTimeSeriesDao(); + + @CreateSqlObject + ProfilerDataTimeSeriesDAO profilerDataTimeSeriesDao(); + + @CreateSqlObject + DataQualityDataTimeSeriesDAO dataQualityDataTimeSeriesDao(); + @CreateSqlObject RoleDAO roleDAO(); @@ -190,9 +196,6 @@ public interface CollectionDAO { @CreateSqlObject MlModelDAO mlModelDAO(); - @CreateSqlObject - SearchIndexDAO searchIndexDAO(); - @CreateSqlObject GlossaryDAO glossaryDAO(); @@ -202,12 +205,6 @@ public interface CollectionDAO { @CreateSqlObject BotDAO botDAO(); - @CreateSqlObject - DomainDAO domainDAO(); - - @CreateSqlObject - DataProductDAO dataProductDAO(); - @CreateSqlObject EventSubscriptionDAO eventSubscriptionDAO(); @@ -238,9 +235,6 @@ public interface CollectionDAO { @CreateSqlObject StorageServiceDAO storageServiceDAO(); - @CreateSqlObject - SearchServiceDAO searchServiceDAO(); - @CreateSqlObject ContainerDAO containerDAO(); @@ -452,6 +446,7 @@ default List listBefore(ListFilter filter, int limit, String before) { } String sqlCondition = String.format("%s AND er.toId is NULL", condition); + return listBefore(getTableName(), getNameColumn(), sqlCondition, limit, before); } @@ -538,40 +533,6 @@ int listCount( @Define("sqlCondition") String mysqlCond); } - interface SearchServiceDAO extends EntityDAO { - @Override - default String getTableName() { - return "search_service_entity"; - } - - @Override - default Class getEntityClass() { - return SearchService.class; - } - - @Override - default String getNameHashColumn() { - return "nameHash"; - } - } - - interface SearchIndexDAO extends EntityDAO { - @Override - default String getTableName() { - return "search_index_entity"; - } - - @Override - default Class getEntityClass() { - return SearchIndex.class; - } - - @Override - default String getNameHashColumn() { - return "fqnHash"; - } - } - interface EntityExtensionDAO { @ConnectionAwareSqlUpdate( value = @@ -1381,50 +1342,6 @@ default String getNameHashColumn() { } } - interface DomainDAO extends EntityDAO { - @Override - default String getTableName() { - return "domain_entity"; - } - - @Override - default Class getEntityClass() { - return Domain.class; - } - - @Override - default String getNameHashColumn() { - return "fqnHash"; - } - - @Override - default boolean supportsSoftDelete() { - return false; - } - } - - interface DataProductDAO extends EntityDAO { - @Override - default String getTableName() { - return "data_product_entity"; - } - - @Override - default Class getEntityClass() { - return DataProduct.class; - } - - @Override - default String getNameHashColumn() { - return "fqnHash"; - } - - @Override - default boolean supportsSoftDelete() { - return false; - } - } - interface EventSubscriptionDAO extends EntityDAO { @Override default String getTableName() { @@ -2797,12 +2714,16 @@ List listAfter( @Bind("after") String after, @Bind("relation") int relation); - @ConnectionAwareSqlQuery(value = "SELECT count(*) FROM user_entity WHERE email = :email", connectionType = MYSQL) - @ConnectionAwareSqlQuery(value = "SELECT count(*) FROM user_entity WHERE email = :email", connectionType = POSTGRES) + @SqlQuery("SELECT COUNT(*) FROM user_entity WHERE LOWER(email) = LOWER(:email)") int checkEmailExists(@Bind("email") String email); - @SqlQuery("SELECT json FROM user_entity WHERE email = :email") + @SqlQuery("SELECT json FROM user_entity WHERE LOWER(email) = LOWER(:email)") String findUserByEmail(@Bind("email") String email); + + @Override + default User findEntityByName(String fqn, Include include) { + return EntityDAO.super.findEntityByName(fqn.toLowerCase(), include); + } } interface ChangeEventDAO { @@ -3120,281 +3041,44 @@ default String getNameHashColumn() { } } - interface EntityExtensionTimeSeriesDAO { - enum OrderBy { - ASC, - DESC - } - - @ConnectionAwareSqlUpdate( - value = - "INSERT INTO entity_extension_time_series(entityFQNHash, extension, jsonSchema, json) " - + "VALUES (:entityFQNHash, :extension, :jsonSchema, :json)", - connectionType = MYSQL) - @ConnectionAwareSqlUpdate( - value = - "INSERT INTO entity_extension_time_series(entityFQNHash, extension, jsonSchema, json) " - + "VALUES (:entityFQNHash, :extension, :jsonSchema, (:json :: jsonb))", - connectionType = POSTGRES) - void insert( - @BindFQN("entityFQNHash") String entityFQNHash, - @Bind("extension") String extension, - @Bind("jsonSchema") String jsonSchema, - @Bind("json") String json); - - @ConnectionAwareSqlUpdate( - value = - "UPDATE entity_extension_time_series set json = :json where entityFQNHash=:entityFQNHash and extension=:extension and timestamp=:timestamp", - connectionType = MYSQL) - @ConnectionAwareSqlUpdate( - value = - "UPDATE entity_extension_time_series set json = (:json :: jsonb) where entityFQNHash=:entityFQNHash and extension=:extension and timestamp=:timestamp", - connectionType = POSTGRES) - void update( - @BindFQN("entityFQNHash") String entityFQNHash, - @Bind("extension") String extension, - @Bind("json") String json, - @Bind("timestamp") Long timestamp); - - @ConnectionAwareSqlUpdate( - value = - "UPDATE entity_extension_time_series set json = :json where entityFQNHash=:entityFQNHash and extension=:extension and timestamp=:timestamp and json -> '$.operation' = :operation", - connectionType = MYSQL) - @ConnectionAwareSqlUpdate( - value = - "UPDATE entity_extension_time_series set json = (:json :: jsonb) where entityFQNHash=:entityFQNHash and extension=:extension and timestamp=:timestamp and json #>>'{operation}' = :operation", - connectionType = POSTGRES) - void updateExtensionByOperation( - @BindFQN("entityFQNHash") String entityFQNHash, - @Bind("extension") String extension, - @Bind("json") String json, - @Bind("timestamp") Long timestamp, - @Bind("operation") String operation); - - @SqlQuery( - "SELECT json FROM entity_extension_time_series WHERE entityFQNHash = :entityFQNHash AND extension = :extension") - String getExtension(@BindFQN("entityFQNHash") String entityId, @Bind("extension") String extension); - - @SqlQuery("SELECT count(*) FROM entity_extension_time_series WHERE entityFQNHash = :entityFQNHash") - int listCount(@BindFQN("entityFQNHash") String entityFQNHash); - - @SqlQuery("SELECT COUNT(DISTINCT entityFQN) FROM entity_extension_time_series") - @Deprecated - int listDistinctCount(); - - @ConnectionAwareSqlQuery( - value = - "WITH data AS (SELECT ROW_NUMBER() OVER(ORDER BY timestamp ASC) AS row_num, json " - + "FROM entity_extension_time_series WHERE entityFQNHash = :entityFQNHash) " - + "SELECT row_num, json FROM data WHERE row_num > :after LIMIT :limit", - connectionType = MYSQL) - @ConnectionAwareSqlQuery( - value = - "WITH data AS (SELECT ROW_NUMBER() OVER(ORDER BY timestamp ASC) AS row_num, json " - + "FROM entity_extension_time_series WHERE entityFQNHash = :entityFQNHash) " - + "SELECT row_num, json FROM data WHERE row_num > (:after :: integer) LIMIT :limit", - connectionType = POSTGRES) - @RegisterRowMapper(ReportDataMapper.class) - List getAfterExtension( - @BindFQN("entityFQNHash") String entityFQNHash, @Bind("limit") int limit, @Bind("after") String after); - - @SqlQuery( - "SELECT json FROM entity_extension_time_series WHERE entityFQNHash = :entityFQNHash AND extension = :extension AND timestamp = :timestamp") - String getExtensionAtTimestamp( - @BindFQN("entityFQNHash") String entityFQNHash, - @Bind("extension") String extension, - @Bind("timestamp") long timestamp); - - @ConnectionAwareSqlQuery( - value = - "SELECT json FROM entity_extension_time_series WHERE entityFQNHash = :entityFQNHash AND extension = :extension AND timestamp = :timestamp AND json -> '$.operation' = :operation", - connectionType = MYSQL) - @ConnectionAwareSqlQuery( - value = - "SELECT json FROM entity_extension_time_series WHERE entityFQNHash = :entityFQNHash AND extension = :extension AND timestamp = :timestamp AND json #>>'{operation}' = :operation", - connectionType = POSTGRES) - String getExtensionAtTimestampWithOperation( - @BindFQN("entityFQNHash") String entityFQNHash, - @Bind("extension") String extension, - @Bind("timestamp") long timestamp, - @Bind("operation") String operation); - - @SqlQuery( - "SELECT json FROM entity_extension_time_series WHERE entityFQNHash = :entityFQNHash AND extension = :extension " - + "ORDER BY timestamp DESC LIMIT 1") - String getLatestExtension(@BindFQN("entityFQNHash") String entityFQNHash, @Bind("extension") String extension); - - @SqlQuery( - "SELECT ranked.json FROM (SELECT json, ROW_NUMBER() OVER(PARTITION BY entityFQNHash ORDER BY timestamp DESC) AS row_num " - + "FROM entity_extension_time_series WHERE entityFQNHash IN () AND extension = :extension) ranked WHERE ranked.row_num = 1") - List getLatestExtensionByFQNs( - @BindList("entityFQNHashes") List entityFQNHashes, @Bind("extension") String extension); - - @SqlQuery( - "SELECT json FROM entity_extension_time_series WHERE extension = :extension " - + "ORDER BY timestamp DESC LIMIT 1") - String getLatestByExtension(@Bind("extension") String extension); - - @SqlQuery("SELECT json FROM entity_extension_time_series WHERE extension = :extension " + "ORDER BY timestamp DESC") - List getAllByExtension(@Bind("extension") String extension); - - @RegisterRowMapper(ExtensionMapper.class) - @SqlQuery( - "SELECT extension, json FROM entity_extension WHERE id = :id AND extension " - + "LIKE CONCAT (:extensionPrefix, '.%') " - + "ORDER BY extension") - List getExtensions(@Bind("id") String id, @Bind("extensionPrefix") String extensionPrefix); - - @SqlUpdate("DELETE FROM entity_extension_time_series WHERE entityFQNHash = :entityFQNHash") - void deleteAll(@Bind("entityFQNHash") String entityFQNHash); - - @SqlUpdate( - "DELETE FROM entity_extension_time_series WHERE entityFQNHash = :entityFQNHash AND extension = :extension") - void delete(@BindFQN("entityFQNHash") String entityFQNHash, @Bind("extension") String extension); - - // This just saves the limit number of records, and remove all other with given extension - @SqlUpdate( - "DELETE FROM entity_extension_time_series WHERE extension = :extension AND entityFQNHash NOT IN(SELECT entityFQNHash FROM (select * from entity_extension_time_series WHERE extension = :extension ORDER BY timestamp DESC LIMIT :records) AS subquery)") - void deleteLastRecords(@Bind("extension") String extension, @Bind("records") int noOfRecord); - - @SqlUpdate( - "DELETE FROM entity_extension_time_series WHERE entityFQNHash = :entityFQNHash AND extension = :extension AND timestamp = :timestamp") - void deleteAtTimestamp( - @BindFQN("entityFQNHash") String entityFQNHash, - @Bind("extension") String extension, - @Bind("timestamp") Long timestamp); - - @SqlUpdate( - "DELETE FROM entity_extension_time_series WHERE entityFQNHash = :entityFQNHash AND extension = :extension AND timestamp < :timestamp") - void deleteBeforeTimestamp( - @BindFQN("entityFQNHash") String entityFQNHash, - @Bind("extension") String extension, - @Bind("timestamp") Long timestamp); - - @SqlQuery( - "SELECT json FROM entity_extension_time_series where entityFQNHash = :entityFQNHash and extension = :extension " - + " AND timestamp >= :startTs and timestamp <= :endTs ORDER BY timestamp DESC") - List listBetweenTimestamps( - @BindFQN("entityFQNHash") String entityFQNHash, - @Bind("extension") String extension, - @Bind("startTs") Long startTs, - @Bind("endTs") long endTs); - - @SqlQuery( - "SELECT json FROM entity_extension_time_series where entityFQNHash = :entityFQNHash and extension = :extension " - + " AND timestamp >= :startTs and timestamp <= :endTs ORDER BY timestamp ") - List listBetweenTimestampsByOrder( - @BindFQN("entityFQNHash") String entityFQNHash, - @Bind("extension") String extension, - @Bind("startTs") Long startTs, - @Bind("endTs") long endTs, - @Define("orderBy") OrderBy orderBy); - - default void updateExtensionByKey(String key, String value, String entityFQN, String extension, String json) { - String mysqlCond = String.format("AND JSON_UNQUOTE(JSON_EXTRACT(json, '$.%s')) = :value", key); - String psqlCond = String.format("AND json->>'%s' = :value", key); - updateExtensionByKeyInternal(value, entityFQN, extension, json, mysqlCond, psqlCond); + interface EntityExtensionTimeSeriesDAO extends EntityTimeSeriesDAO { + @Override + default String getTimeSeriesTableName() { + return "entity_extension_time_series"; } + } - default String getExtensionByKey(String key, String value, String entityFQN, String extension) { - String mysqlCond = String.format("AND JSON_UNQUOTE(JSON_EXTRACT(json, '$.%s')) = :value", key); - String psqlCond = String.format("AND json->>'%s' = :value", key); - return getExtensionByKeyInternal(value, entityFQN, extension, mysqlCond, psqlCond); + interface ReportDataTimeSeriesDAO extends EntityTimeSeriesDAO { + @Override + default String getTimeSeriesTableName() { + return "report_data_time_series"; } - default String getLatestExtensionByKey(String key, String value, String entityFQN, String extension) { - String mysqlCond = String.format("AND JSON_UNQUOTE(JSON_EXTRACT(json, '$.%s')) = :value", key); - String psqlCond = String.format("AND json->>'%s' = :value", key); - return getLatestExtensionByKeyInternal(value, entityFQN, extension, mysqlCond, psqlCond); - } + @SqlQuery("SELECT json FROM report_data_time_series WHERE entityFQNHash = :reportDataType and date = :date") + List listReportDataAtDate(@BindFQN("reportDataType") String reportDataType, @Bind("date") String date); - /* - * Support updating data filtering by top-level keys in the JSON - */ @ConnectionAwareSqlUpdate( - value = - "UPDATE entity_extension_time_series SET json = :json " - + "WHERE entityFQNHash = :entityFQNHash " - + "AND extension = :extension " - + "", + value = "DELETE FROM report_data_time_series WHERE entityFQNHash = :reportDataType and date = :date", connectionType = MYSQL) @ConnectionAwareSqlUpdate( value = - "UPDATE entity_extension_time_series SET json = (:json :: jsonb) " - + "WHERE entityFQNHash = :entityFQNHash " - + "AND extension = :extension " - + "", + "DELETE FROM report_data_time_series WHERE entityFQNHash = :reportDataType and DATE(TO_TIMESTAMP((json ->> 'timestamp')::bigint/1000)) = DATE(:date)", connectionType = POSTGRES) - void updateExtensionByKeyInternal( - @Bind("value") String value, - @BindFQN("entityFQNHash") String entityFQNHash, - @Bind("extension") String extension, - @Bind("json") String json, - @Define("mysqlCond") String mysqlCond, - @Define("psqlCond") String psqlCond); - - /* - * Support selecting data filtering by top-level keys in the JSON - */ - @ConnectionAwareSqlQuery( - value = - "SELECT json from entity_extension_time_series " - + "WHERE entityFQNHash = :entityFQNHash " - + "AND extension = :extension " - + "", - connectionType = MYSQL) - @ConnectionAwareSqlQuery( - value = - "SELECT json from entity_extension_time_series " - + "WHERE entityFQNHash = :entityFQNHash " - + "AND extension = :extension " - + "", - connectionType = POSTGRES) - String getExtensionByKeyInternal( - @Bind("value") String value, - @BindFQN("entityFQNHash") String entityFQNHash, - @Bind("extension") String extension, - @Define("mysqlCond") String mysqlCond, - @Define("psqlCond") String psqlCond); - - @ConnectionAwareSqlQuery( - value = - "SELECT json from entity_extension_time_series " - + "WHERE entityFQNHash = :entityFQNHash " - + "AND extension = :extension " - + " " - + "ORDER BY timestamp DESC LIMIT 1", - connectionType = MYSQL) - @ConnectionAwareSqlQuery( - value = - "SELECT json from entity_extension_time_series " - + "WHERE entityFQNHash = :entityFQNHash " - + "AND extension = :extension " - + " " - + "ORDER BY timestamp DESC LIMIT 1", - connectionType = POSTGRES) - String getLatestExtensionByKeyInternal( - @Bind("value") String value, - @BindFQN("entityFQNHash") String entityFQNHash, - @Bind("extension") String extension, - @Define("mysqlCond") String mysqlCond, - @Define("psqlCond") String psqlCond); + void deleteReportDataTypeAtDate(@BindFQN("reportDataType") String reportDataType, @Bind("date") String date); + } - class ReportDataMapper implements RowMapper { - @Override - public ReportDataRow map(ResultSet rs, StatementContext ctx) throws SQLException { - String rowNumber = rs.getString("row_num"); - String json = rs.getString("json"); - ReportData reportData; - reportData = JsonUtils.readValue(json, ReportData.class); - return new ReportDataRow(rowNumber, reportData); - } + interface ProfilerDataTimeSeriesDAO extends EntityTimeSeriesDAO { + @Override + default String getTimeSeriesTableName() { + return "profiler_data_time_series"; } + } - @SqlQuery( - "SELECT DISTINCT entityFQN FROM entity_extension_time_series WHERE entityFQNHash = '' or entityFQNHash is null LIMIT :limit") - @Deprecated - List migrationListDistinctWithOffset(@Bind("limit") int limit); + interface DataQualityDataTimeSeriesDAO extends EntityTimeSeriesDAO { + @Override + default String getTimeSeriesTableName() { + return "data_quality_data_time_series"; + } } class EntitiesCountRowMapper implements RowMapper { @@ -3438,7 +3122,6 @@ interface SystemDAO { + "(SELECT COUNT(*) FROM pipeline_entity ) as pipelineCount, " + "(SELECT COUNT(*) FROM ml_model_entity ) as mlmodelCount, " + "(SELECT COUNT(*) FROM storage_container_entity ) as storageContainerCount, " - + "(SELECT COUNT(*) FROM search_index_entity ) as searchIndexCount, " + "(SELECT COUNT(*) FROM glossary_entity ) as glossaryCount, " + "(SELECT COUNT(*) FROM glossary_term_entity ) as glossaryTermCount, " + "(SELECT (SELECT COUNT(*) FROM metadata_service_entity ) + " @@ -3447,7 +3130,6 @@ interface SystemDAO { + "(SELECT COUNT(*) FROM dashboard_service_entity )+ " + "(SELECT COUNT(*) FROM pipeline_service_entity )+ " + "(SELECT COUNT(*) FROM mlmodel_service_entity )+ " - + "(SELECT COUNT(*) FROM search_service_entity )+ " + "(SELECT COUNT(*) FROM storage_service_entity )) as servicesCount, " + "(SELECT COUNT(*) FROM user_entity AND (JSON_EXTRACT(json, '$.isBot') IS NULL OR JSON_EXTRACT(json, '$.isBot') = FALSE)) as userCount, " + "(SELECT COUNT(*) FROM team_entity ) as teamCount, " @@ -3461,7 +3143,6 @@ interface SystemDAO { + "(SELECT COUNT(*) FROM pipeline_entity ) as pipelineCount, " + "(SELECT COUNT(*) FROM ml_model_entity ) as mlmodelCount, " + "(SELECT COUNT(*) FROM storage_container_entity ) as storageContainerCount, " - + "(SELECT COUNT(*) FROM search_index_entity ) as searchIndexCount, " + "(SELECT COUNT(*) FROM glossary_entity ) as glossaryCount, " + "(SELECT COUNT(*) FROM glossary_term_entity ) as glossaryTermCount, " + "(SELECT (SELECT COUNT(*) FROM metadata_service_entity ) + " @@ -3470,7 +3151,6 @@ interface SystemDAO { + "(SELECT COUNT(*) FROM dashboard_service_entity )+ " + "(SELECT COUNT(*) FROM pipeline_service_entity )+ " + "(SELECT COUNT(*) FROM mlmodel_service_entity )+ " - + "(SELECT COUNT(*) FROM search_service_entity )+ " + "(SELECT COUNT(*) FROM storage_service_entity )) as servicesCount, " + "(SELECT COUNT(*) FROM user_entity AND (json#>'{isBot}' IS NULL OR ((json#>'{isBot}')::boolean) = FALSE)) as userCount, " + "(SELECT COUNT(*) FROM team_entity ) as teamCount, " @@ -3485,8 +3165,7 @@ interface SystemDAO { + "(SELECT COUNT(*) FROM dashboard_service_entity ) as dashboardServiceCount, " + "(SELECT COUNT(*) FROM pipeline_service_entity ) as pipelineServiceCount, " + "(SELECT COUNT(*) FROM mlmodel_service_entity ) as mlModelServiceCount, " - + "(SELECT COUNT(*) FROM storage_service_entity ) as storageServiceCount, " - + "(SELECT COUNT(*) FROM search_service_entity ) as searchServiceCount") + + "(SELECT COUNT(*) FROM storage_service_entity ) as storageServiceCount") @RegisterRowMapper(ServicesCountRowMapper.class) ServicesCount getAggregatedServicesCount(@Define("cond") String cond) throws StatementException; diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/ContainerRepository.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/ContainerRepository.java index 6c768285c992..c28ff23d6698 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/ContainerRepository.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/ContainerRepository.java @@ -4,7 +4,6 @@ import static org.openmetadata.common.utils.CommonUtil.nullOrEmpty; import static org.openmetadata.schema.type.Include.ALL; import static org.openmetadata.service.Entity.CONTAINER; -import static org.openmetadata.service.Entity.FIELD_PARENT; import static org.openmetadata.service.Entity.FIELD_TAGS; import static org.openmetadata.service.Entity.STORAGE_SERVICE; @@ -48,7 +47,7 @@ public ContainerRepository(CollectionDAO dao) { @Override public Container setFields(Container container, EntityUtil.Fields fields) { setDefaultFields(container); - container.setParent(fields.contains(FIELD_PARENT) ? getParent(container) : container.getParent()); + container.setParent(fields.contains("parent") ? getParent(container) : container.getParent()); if (container.getDataModel() != null) { populateDataModelColumnTags(fields.contains(FIELD_TAGS), container.getDataModel().getColumns()); } @@ -57,7 +56,8 @@ public Container setFields(Container container, EntityUtil.Fields fields) { @Override public Container clearFields(Container container, EntityUtil.Fields fields) { - container.setParent(fields.contains(FIELD_PARENT) ? container.getParent() : null); + container.setChildren(fields.contains("children") ? getChildren(container) : null); + container.setParent(fields.contains("parent") ? container.getParent() : null); return container.withDataModel(fields.contains("dataModel") ? container.getDataModel() : null); } @@ -121,7 +121,9 @@ public void prepare(Container container) { public void storeEntity(Container container, boolean update) { EntityReference storageService = container.getService(); EntityReference parent = container.getParent(); - container.withService(null).withParent(null); + List children = container.getChildren(); + + container.withService(null).withParent(null).withChildren(null); // Don't store datamodel column tags as JSON but build it on the fly based on relationships List columnWithTags = Lists.newArrayList(); @@ -134,7 +136,7 @@ public void storeEntity(Container container, boolean update) { store(container, update); // Restore the relationships - container.withService(storageService).withParent(parent); + container.withService(storageService).withParent(parent).withChildren(children); if (container.getDataModel() != null) { container.getDataModel().setColumns(columnWithTags); } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/DashboardDataModelRepository.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/DashboardDataModelRepository.java index 26e5b4c53a99..93c508bdecd4 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/DashboardDataModelRepository.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/DashboardDataModelRepository.java @@ -14,7 +14,6 @@ package org.openmetadata.service.jdbi3; import static org.openmetadata.common.utils.CommonUtil.listOrEmpty; -import static org.openmetadata.schema.type.Include.ALL; import static org.openmetadata.service.Entity.FIELD_TAGS; import java.util.List; @@ -123,12 +122,6 @@ public void storeRelationships(DashboardDataModel dashboardDataModel) { Relationship.CONTAINS); } - @Override - public DashboardDataModel setInheritedFields(DashboardDataModel dataModel, Fields fields) { - DashboardService dashboardService = Entity.getEntity(dataModel.getService(), "domain", ALL); - return inheritDomain(dataModel, fields, dashboardService); - } - @Override public DashboardDataModel setFields(DashboardDataModel dashboardDataModel, Fields fields) { getColumnTags(fields.contains(FIELD_TAGS), dashboardDataModel.getColumns()); diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/DashboardRepository.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/DashboardRepository.java index eb21e0214042..be7db38d399e 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/DashboardRepository.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/DashboardRepository.java @@ -14,7 +14,6 @@ package org.openmetadata.service.jdbi3; import static org.openmetadata.common.utils.CommonUtil.listOrEmpty; -import static org.openmetadata.schema.type.Include.ALL; import static org.openmetadata.service.Entity.FIELD_DESCRIPTION; import static org.openmetadata.service.Entity.FIELD_TAGS; @@ -164,12 +163,6 @@ public void storeRelationships(Dashboard dashboard) { } } - @Override - public Dashboard setInheritedFields(Dashboard dashboard, Fields fields) { - DashboardService dashboardService = Entity.getEntity(dashboard.getService(), "domain", ALL); - return inheritDomain(dashboard, fields, dashboardService); - } - @Override public EntityUpdater getUpdater(Dashboard original, Dashboard updated, Operation operation) { return new DashboardUpdater(original, updated, operation); diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/DataProductRepository.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/DataProductRepository.java deleted file mode 100644 index 12f2855ed763..000000000000 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/DataProductRepository.java +++ /dev/null @@ -1,131 +0,0 @@ -/* - * Copyright 2021 Collate - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.openmetadata.service.jdbi3; - -import static org.openmetadata.common.utils.CommonUtil.listOrEmpty; -import static org.openmetadata.service.Entity.DATA_PRODUCT; -import static org.openmetadata.service.Entity.FIELD_ASSETS; -import static org.openmetadata.service.util.EntityUtil.entityReferenceMatch; - -import java.util.ArrayList; -import java.util.List; -import lombok.extern.slf4j.Slf4j; -import org.openmetadata.schema.entity.domains.DataProduct; -import org.openmetadata.schema.type.EntityReference; -import org.openmetadata.schema.type.Relationship; -import org.openmetadata.service.Entity; -import org.openmetadata.service.resources.domains.DataProductResource; -import org.openmetadata.service.util.EntityUtil; -import org.openmetadata.service.util.EntityUtil.Fields; -import org.openmetadata.service.util.FullyQualifiedName; - -@Slf4j -public class DataProductRepository extends EntityRepository { - private static final String UPDATE_FIELDS = "experts,assets"; // Domain field can't be updated - - public DataProductRepository(CollectionDAO dao) { - super( - DataProductResource.COLLECTION_PATH, - Entity.DATA_PRODUCT, - DataProduct.class, - dao.dataProductDAO(), - dao, - UPDATE_FIELDS, - UPDATE_FIELDS); - } - - @Override - public DataProduct setFields(DataProduct entity, Fields fields) { - return entity.withAssets(fields.contains(FIELD_ASSETS) ? getAssets(entity) : null); - } - - @Override - public DataProduct clearFields(DataProduct entity, Fields fields) { - return entity.withAssets(fields.contains(FIELD_ASSETS) ? entity.getAssets() : null); - } - - private List getAssets(DataProduct entity) { - return findTo(entity.getId(), Entity.DATA_PRODUCT, Relationship.HAS, null); - } - - @Override - public void prepare(DataProduct entity) { - // Parent, Experts, Owner, Assets are already validated - } - - @Override - public void storeEntity(DataProduct entity, boolean update) { - store(entity, update); - } - - @Override - public void storeRelationships(DataProduct entity) { - addRelationship( - entity.getDomain().getId(), entity.getId(), Entity.DOMAIN, Entity.DATA_PRODUCT, Relationship.CONTAINS); - for (EntityReference expert : listOrEmpty(entity.getExperts())) { - addRelationship(entity.getId(), expert.getId(), Entity.DATA_PRODUCT, Entity.USER, Relationship.EXPERT); - } - for (EntityReference asset : listOrEmpty(entity.getAssets())) { - addRelationship(entity.getId(), asset.getId(), Entity.DATA_PRODUCT, asset.getType(), Relationship.HAS); - } - } - - @Override - public EntityUpdater getUpdater(DataProduct original, DataProduct updated, Operation operation) { - return new DataProductUpdater(original, updated, operation); - } - - @Override - public void restorePatchAttributes(DataProduct original, DataProduct updated) { - updated.withDomain(original.getDomain()); // Domain can't be changed - } - - @Override - public void setFullyQualifiedName(DataProduct entity) { - EntityReference domain = entity.getDomain(); - entity.setFullyQualifiedName(FullyQualifiedName.add(domain.getFullyQualifiedName(), entity.getName())); - } - - public class DataProductUpdater extends EntityUpdater { - public DataProductUpdater(DataProduct original, DataProduct updated, Operation operation) { - super(original, updated, operation); - } - - @Override - public void entitySpecificUpdate() { - updateAssets(); - } - - private void updateAssets() { - List origToRefs = listOrEmpty(original.getAssets()); - List updatedToRefs = listOrEmpty(updated.getAssets()); - List added = new ArrayList<>(); - List deleted = new ArrayList<>(); - - if (!recordListChange(FIELD_ASSETS, origToRefs, updatedToRefs, added, deleted, entityReferenceMatch)) { - return; // No changes between original and updated. - } - // Remove assets that were deleted - for (EntityReference asset : deleted) { - deleteRelationship(original.getId(), DATA_PRODUCT, asset.getId(), asset.getType(), Relationship.HAS); - } - // Add new assets - for (EntityReference asset : added) { - addRelationship(original.getId(), asset.getId(), DATA_PRODUCT, asset.getType(), Relationship.HAS, false); - } - updatedToRefs.sort(EntityUtil.compareEntityReference); - origToRefs.sort(EntityUtil.compareEntityReference); - } - } -} diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/DatabaseRepository.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/DatabaseRepository.java index ce8278691a67..cdc78c37c98b 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/DatabaseRepository.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/DatabaseRepository.java @@ -13,9 +13,6 @@ package org.openmetadata.service.jdbi3; -import static org.openmetadata.schema.type.Include.ALL; -import static org.openmetadata.service.Entity.DATABASE_SERVICE; - import java.util.List; import org.openmetadata.schema.entity.data.Database; import org.openmetadata.schema.entity.services.DatabaseService; @@ -58,12 +55,6 @@ public void storeRelationships(Database database) { addRelationship(service.getId(), database.getId(), service.getType(), Entity.DATABASE, Relationship.CONTAINS); } - @Override - public Database setInheritedFields(Database database, Fields fields) { - DatabaseService service = Entity.getEntity(DATABASE_SERVICE, database.getService().getId(), "domain", ALL); - return inheritDomain(database, fields, service); - } - private List getSchemas(Database database) { return database == null ? null diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/DatabaseSchemaRepository.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/DatabaseSchemaRepository.java index 78bb0849ef48..b30f1821ded3 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/DatabaseSchemaRepository.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/DatabaseSchemaRepository.java @@ -14,9 +14,11 @@ package org.openmetadata.service.jdbi3; import static org.openmetadata.schema.type.Include.ALL; +import static org.openmetadata.service.Entity.FIELD_OWNER; import java.util.Collections; import java.util.List; +import java.util.UUID; import org.openmetadata.schema.entity.data.Database; import org.openmetadata.schema.entity.data.DatabaseSchema; import org.openmetadata.schema.type.EntityReference; @@ -95,11 +97,19 @@ private void setDefaultFields(DatabaseSchema schema) { @Override public DatabaseSchema setInheritedFields(DatabaseSchema schema, Fields fields) { - Database database = Entity.getEntity(Entity.DATABASE, schema.getDatabase().getId(), "owner,domain", ALL); - inheritOwner(schema, fields, database); - inheritDomain(schema, fields, database); - schema.withRetentionPeriod( - schema.getRetentionPeriod() == null ? database.getRetentionPeriod() : schema.getRetentionPeriod()); + Database database = null; + UUID databaseId = schema.getDatabase().getId(); + // If schema does not have owner, then inherit parent database owner + if (fields.contains(FIELD_OWNER) && schema.getOwner() == null) { + database = Entity.getEntity(Entity.DATABASE, databaseId, "owner", ALL); + schema.withOwner(database.getOwner()); + } + + // If schema does not have its own retention period, then inherit parent database retention period + if (schema.getRetentionPeriod() == null) { + database = database == null ? Entity.getEntity(Entity.DATABASE, databaseId, "", ALL) : database; + schema.withRetentionPeriod(database.getRetentionPeriod()); + } return schema; } @@ -125,6 +135,9 @@ private void populateDatabase(DatabaseSchema schema) { .withDatabase(database.getEntityReference()) .withService(database.getService()) .withServiceType(database.getServiceType()); + + // Carry forward ownership from database, if necessary + schema.withOwner(schema.getOwner() == null ? database.getOwner() : schema.getOwner()); } public class DatabaseSchemaUpdater extends EntityUpdater { diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/DomainRepository.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/DomainRepository.java deleted file mode 100644 index fa454abb8776..000000000000 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/DomainRepository.java +++ /dev/null @@ -1,132 +0,0 @@ -/* - * Copyright 2021 Collate - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.openmetadata.service.jdbi3; - -import static org.openmetadata.common.utils.CommonUtil.listOrEmpty; -import static org.openmetadata.schema.type.Include.ALL; -import static org.openmetadata.service.Entity.DOMAIN; -import static org.openmetadata.service.Entity.FIELD_EXPERTS; - -import java.util.List; -import lombok.extern.slf4j.Slf4j; -import org.openmetadata.schema.entity.domains.Domain; -import org.openmetadata.schema.type.EntityReference; -import org.openmetadata.schema.type.Relationship; -import org.openmetadata.service.Entity; -import org.openmetadata.service.resources.domains.DomainResource; -import org.openmetadata.service.util.EntityUtil.Fields; -import org.openmetadata.service.util.FullyQualifiedName; - -@Slf4j -public class DomainRepository extends EntityRepository { - private static final String UPDATE_FIELDS = "parent,children,experts"; - - public DomainRepository(CollectionDAO dao) { - super(DomainResource.COLLECTION_PATH, DOMAIN, Domain.class, dao.domainDAO(), dao, UPDATE_FIELDS, UPDATE_FIELDS); - } - - @Override - public Domain setFields(Domain entity, Fields fields) { - return entity.withParent(fields.contains("parent") ? getParent(entity) : entity.getParent()); - } - - @Override - public Domain clearFields(Domain entity, Fields fields) { - entity.withParent(fields.contains("parent") ? entity.getParent() : null); - return entity; - } - - @Override - public void prepare(Domain entity) { - // Parent, Experts, Owner are already validated - } - - @Override - public void storeEntity(Domain entity, boolean update) { - EntityReference parent = entity.getParent(); - List children = entity.getChildren(); - entity.withParent(null); - store(entity, update); - entity.withParent(parent); - } - - @Override - public void storeRelationships(Domain entity) { - if (entity.getParent() != null) { - addRelationship(entity.getParent().getId(), entity.getId(), DOMAIN, DOMAIN, Relationship.CONTAINS); - } - for (EntityReference expert : listOrEmpty(entity.getExperts())) { - addRelationship(entity.getId(), expert.getId(), DOMAIN, Entity.USER, Relationship.EXPERT); - } - } - - @Override - public Domain setInheritedFields(Domain domain, Fields fields) { - // If subdomain does not have owner and experts, then inherit it from parent domain - EntityReference parentRef = domain.getParent() != null ? domain.getParent() : getParent(domain); - if (parentRef != null) { - Domain parent = Entity.getEntity(DOMAIN, parentRef.getId(), "owner,experts", ALL); - inheritOwner(domain, fields, parent); - inheritExperts(domain, fields, parent); - } - return domain; - } - - @Override - public EntityUpdater getUpdater(Domain original, Domain updated, Operation operation) { - return new DomainUpdater(original, updated, operation); - } - - @Override - public void restorePatchAttributes(Domain original, Domain updated) { - updated.withParent(original.getParent()); // Parent can't be changed - updated.withChildren(original.getChildren()); // Children can't be changed - } - - @Override - public void setFullyQualifiedName(Domain entity) { - // Validate parent - if (entity.getParent() == null) { // Top level domain - entity.setFullyQualifiedName(FullyQualifiedName.build(entity.getName())); - } else { // Sub domain - EntityReference parent = entity.getParent(); - entity.setFullyQualifiedName(FullyQualifiedName.add(parent.getFullyQualifiedName(), entity.getName())); - } - } - - public class DomainUpdater extends EntityUpdater { - public DomainUpdater(Domain original, Domain updated, Operation operation) { - super(original, updated, operation); - } - - @Override - public void entitySpecificUpdate() { - updateExperts(); - } - - private void updateExperts() { - List origExperts = listOrEmpty(original.getExperts()); - List updatedExperts = listOrEmpty(updated.getExperts()); - updateToRelationships( - FIELD_EXPERTS, - DOMAIN, - original.getId(), - Relationship.EXPERT, - Entity.USER, - origExperts, - updatedExperts, - false); - } - } -} diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/EntityDAO.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/EntityDAO.java index 3448db276022..f42ba85c9db0 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/EntityDAO.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/EntityDAO.java @@ -51,7 +51,7 @@ default String getNameColumn() { default String getNameHashColumn() { return "nameHash"; - } + }; default boolean supportsSoftDelete() { return true; @@ -64,7 +64,7 @@ default boolean supportsSoftDelete() { @ConnectionAwareSqlUpdate( value = "INSERT INTO (, json) VALUES (:nameHashColumnValue, :json :: jsonb)", connectionType = POSTGRES) - int insert( + void insert( @Define("table") String table, @Define("nameHashColumn") String nameHashColumn, @BindFQN("nameHashColumnValue") String nameHashColumnValue, @@ -288,7 +288,10 @@ default String getCondition(Include include) { if (include == null || include == Include.NON_DELETED) { return "AND deleted = FALSE"; } - return include == Include.DELETED ? " AND deleted = TRUE" : ""; + if (include == Include.DELETED) { + return " AND deleted = TRUE"; + } + return ""; } default T findEntityById(UUID id, Include include) { @@ -315,7 +318,11 @@ default T findEntityByName(String fqn, String nameHashColumn, Include include) { default T jsonToEntity(String json, String identity) { Class clz = getEntityClass(); - T entity = json != null ? JsonUtils.readValue(json, clz) : null; + T entity = null; + if (json != null) { + + entity = JsonUtils.readValue(json, clz); + } if (entity == null) { String entityType = Entity.getEntityTypeFromClass(clz); throw EntityNotFoundException.byMessage(CatalogExceptionMessage.entityNotFound(entityType, identity)); diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/EntityRepository.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/EntityRepository.java index 2f23e85dad4c..edd9e44332dd 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/EntityRepository.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/EntityRepository.java @@ -20,19 +20,12 @@ import static org.openmetadata.schema.type.Include.NON_DELETED; import static org.openmetadata.schema.utils.EntityInterfaceUtil.quoteName; import static org.openmetadata.service.Entity.ADMIN_USER_NAME; -import static org.openmetadata.service.Entity.DATA_PRODUCT; -import static org.openmetadata.service.Entity.DOMAIN; -import static org.openmetadata.service.Entity.FIELD_CHILDREN; -import static org.openmetadata.service.Entity.FIELD_DATA_PRODUCTS; import static org.openmetadata.service.Entity.FIELD_DELETED; import static org.openmetadata.service.Entity.FIELD_DESCRIPTION; import static org.openmetadata.service.Entity.FIELD_DISPLAY_NAME; -import static org.openmetadata.service.Entity.FIELD_DOMAIN; -import static org.openmetadata.service.Entity.FIELD_EXPERTS; import static org.openmetadata.service.Entity.FIELD_EXTENSION; import static org.openmetadata.service.Entity.FIELD_FOLLOWERS; import static org.openmetadata.service.Entity.FIELD_OWNER; -import static org.openmetadata.service.Entity.FIELD_REVIEWERS; import static org.openmetadata.service.Entity.FIELD_TAGS; import static org.openmetadata.service.Entity.FIELD_VOTES; import static org.openmetadata.service.Entity.USER; @@ -192,8 +185,6 @@ public abstract class EntityRepository { protected final boolean supportsFollower; protected final boolean supportsExtension; protected final boolean supportsVotes; - @Getter protected final boolean supportsDomain; - protected final boolean supportsDataProducts; protected boolean quoteFqn = false; // Entity fqns not hierarchical such user, teams, services need to be quoted /** Fields that can be updated during PATCH operation */ @@ -241,12 +232,6 @@ public abstract class EntityRepository { this.putFields.addField(allowedFields, FIELD_EXTENSION); } this.supportsVotes = allowedFields.contains(FIELD_VOTES); - this.supportsDomain = allowedFields.contains(FIELD_DOMAIN); - if (supportsDomain) { - this.patchFields.addField(allowedFields, FIELD_DOMAIN); - this.putFields.addField(allowedFields, FIELD_DOMAIN); - } - this.supportsDataProducts = allowedFields.contains(FIELD_DATA_PRODUCTS); } /** @@ -672,8 +657,6 @@ public void prepareInternal(T entity) { public void storeRelationshipsInternal(T entity) { storeOwner(entity, entity.getOwner()); applyTags(entity); - storeDomain(entity, entity.getDomain()); - storeDataProducts(entity, entity.getDataProducts()); storeRelationships(entity); } @@ -681,12 +664,9 @@ public T setFieldsInternal(T entity, Fields fields) { entity.setOwner(fields.contains(FIELD_OWNER) ? getOwner(entity) : entity.getOwner()); entity.setTags(fields.contains(FIELD_TAGS) ? getTags(entity) : entity.getTags()); entity.setExtension(fields.contains(FIELD_EXTENSION) ? getExtension(entity) : entity.getExtension()); - entity.setDomain(fields.contains(FIELD_DOMAIN) ? getDomain(entity) : entity.getDomain()); - entity.setDataProducts(fields.contains(FIELD_DATA_PRODUCTS) ? getDataProducts(entity) : entity.getDataProducts()); entity.setFollowers(fields.contains(FIELD_FOLLOWERS) ? getFollowers(entity) : entity.getFollowers()); - entity.setChildren(fields.contains(FIELD_CHILDREN) ? getChildren(entity) : entity.getChildren()); - entity.setExperts(fields.contains(FIELD_EXPERTS) ? getExperts(entity) : entity.getExperts()); - entity.setReviewers(fields.contains(FIELD_REVIEWERS) ? getReviewers(entity) : entity.getReviewers()); + entity.setChildren(fields.contains("children") ? getChildren(entity) : entity.getChildren()); + entity.setReviewers(fields.contains("reviewers") ? getReviewers(entity) : entity.getReviewers()); setFields(entity, fields); return entity; } @@ -695,12 +675,9 @@ public T clearFieldsInternal(T entity, Fields fields) { entity.setOwner(fields.contains(FIELD_OWNER) ? entity.getOwner() : null); entity.setTags(fields.contains(FIELD_TAGS) ? entity.getTags() : null); entity.setExtension(fields.contains(FIELD_EXTENSION) ? entity.getExtension() : null); - entity.setDomain(fields.contains(FIELD_DOMAIN) ? entity.getDomain() : null); - entity.setDataProducts(fields.contains(FIELD_DATA_PRODUCTS) ? entity.getDataProducts() : null); entity.setFollowers(fields.contains(FIELD_FOLLOWERS) ? entity.getFollowers() : null); - entity.setChildren(fields.contains(FIELD_CHILDREN) ? entity.getChildren() : null); - entity.setExperts(fields.contains(FIELD_EXPERTS) ? entity.getExperts() : null); - entity.setReviewers(fields.contains(FIELD_REVIEWERS) ? entity.getReviewers() : null); + entity.setChildren(fields.contains("children") ? entity.getChildren() : null); + entity.setReviewers(fields.contains("reviewers") ? entity.getReviewers() : null); clearFields(entity, fields); return entity; } @@ -1035,18 +1012,10 @@ protected void store(T entity, boolean update) { entity.withHref(null); EntityReference owner = entity.getOwner(); entity.setOwner(null); - List children = entity.getChildren(); - entity.setChildren(null); List tags = entity.getTags(); entity.setTags(null); - EntityReference domain = entity.getDomain(); - entity.setDomain(null); - List dataProducts = entity.getDataProducts(); - entity.setDataProducts(null); List followers = entity.getFollowers(); entity.setFollowers(null); - List experts = entity.getExperts(); - entity.setExperts(null); if (update) { dao.update(entity.getId(), entity.getFullyQualifiedName(), JsonUtils.pojoToJson(entity)); @@ -1059,45 +1028,18 @@ protected void store(T entity, boolean update) { // Restore the relationships entity.setOwner(owner); - entity.setChildren(children); entity.setTags(tags); - entity.setDomain(domain); - entity.setDataProducts(dataProducts); entity.setFollowers(followers); - entity.setExperts(experts); } protected void storeTimeSeries(String fqn, String extension, String jsonSchema, String entityJson, Long timestamp) { daoCollection.entityExtensionTimeSeriesDao().insert(fqn, extension, jsonSchema, entityJson); } - protected void storeTimeSeriesWithOperation( - String fqn, - String extension, - String jsonSchema, - String entityJson, - Long timestamp, - String operation, - boolean update) { - if (update) { - daoCollection - .entityExtensionTimeSeriesDao() - .updateExtensionByOperation(fqn, extension, entityJson, timestamp, operation); - } else { - daoCollection.entityExtensionTimeSeriesDao().insert(fqn, extension, jsonSchema, entityJson); - } - } - public String getExtensionAtTimestamp(String fqn, String extension, Long timestamp) { return daoCollection.entityExtensionTimeSeriesDao().getExtensionAtTimestamp(fqn, extension, timestamp); } - public String getExtensionAtTimestampWithOperation(String fqn, String extension, Long timestamp, String operation) { - return daoCollection - .entityExtensionTimeSeriesDao() - .getExtensionAtTimestampWithOperation(fqn, extension, timestamp, operation); - } - public String getLatestExtensionFromTimeseries(String fqn, String extension) { return daoCollection.entityExtensionTimeSeriesDao().getLatestExtension(fqn, extension); } @@ -1512,14 +1454,6 @@ public EntityReference getOwner(T entity) { return !supportsOwner ? null : getFromEntityRef(entity.getId(), Relationship.OWNS, null, false); } - public EntityReference getDomain(T entity) { - return getFromEntityRef(entity.getId(), Relationship.HAS, DOMAIN, false); - } - - private List getDataProducts(T entity) { - return !supportsDataProducts ? null : findFrom(entity.getId(), entityType, Relationship.HAS, DATA_PRODUCT); - } - protected EntityReference getParent(T entity) { return getFromEntityRef(entity.getId(), Relationship.CONTAINS, entityType, false); } @@ -1532,42 +1466,10 @@ protected List getReviewers(T entity) { return findFrom(entity.getId(), entityType, Relationship.REVIEWS, Entity.USER); } - protected List getExperts(T entity) { - return findTo(entity.getId(), entityType, Relationship.EXPERT, Entity.USER); - } - public EntityReference getOwner(EntityReference ref) { return !supportsOwner ? null : Entity.getEntityReferenceById(ref.getType(), ref.getId(), ALL); } - public T inheritDomain(T entity, Fields fields, EntityInterface parent) { - if (fields.contains(FIELD_DOMAIN) && entity.getDomain() == null) { - entity.setDomain(parent.getDomain()); - } - return entity; - } - - public T inheritOwner(T entity, Fields fields, EntityInterface parent) { - if (fields.contains(FIELD_OWNER) && entity.getOwner() == null) { - entity.setOwner(parent.getOwner()); - } - return entity; - } - - public T inheritExperts(T entity, Fields fields, EntityInterface parent) { - if (fields.contains(FIELD_EXPERTS) && nullOrEmpty(entity.getExperts())) { - entity.setExperts(parent.getExperts()); - } - return entity; - } - - public T inheritReviewers(T entity, Fields fields, EntityInterface parent) { - if (fields.contains(FIELD_REVIEWERS) && nullOrEmpty(entity.getReviewers())) { - entity.setReviewers(parent.getReviewers()); - } - return entity; - } - protected void populateOwner(EntityReference owner) { if (owner == null) { return; @@ -1589,25 +1491,6 @@ protected void storeOwner(T entity, EntityReference owner) { } } - protected void storeDomain(T entity, EntityReference domain) { - if (supportsDomain && domain != null) { - // Add relationship domain --- has ---> entity - LOG.info("Adding domain {} for entity {}:{}", domain.getFullyQualifiedName(), entityType, entity.getId()); - addRelationship(domain.getId(), entity.getId(), Entity.DOMAIN, entityType, Relationship.HAS); - } - } - - protected void storeDataProducts(T entity, List dataProducts) { - if (supportsDataProducts && !nullOrEmpty(dataProducts)) { - for (EntityReference dataProduct : dataProducts) { - // Add relationship dataProduct --- has ---> entity - LOG.info( - "Adding dataProduct {} for entity {}:{}", dataProduct.getFullyQualifiedName(), entityType, entity.getId()); - addRelationship(dataProduct.getId(), entity.getId(), Entity.DATA_PRODUCT, entityType, Relationship.HAS); - } - } - } - /** Remove owner relationship for a given entity */ private void removeOwner(T entity, EntityReference owner) { if (EntityUtil.getId(owner) != null) { @@ -1623,7 +1506,7 @@ public void updateOwner(T ownedEntity, EntityReference originalOwner, EntityRefe } public final Fields getFields(String fields) { - if ("*".equals(fields)) { + if (fields != null && fields.equals("*")) { return new Fields(allowedFields, String.join(",", allowedFields)); } return new Fields(allowedFields, fields); @@ -1688,13 +1571,6 @@ public EntityReference validateOwner(EntityReference owner) { return Entity.getEntityReferenceById(owner.getType(), owner.getId(), ALL); } - public EntityReference validateDomain(String domainFqn) { - if (!supportsDomain || domainFqn == null) { - return null; - } - return Entity.getEntityReferenceByName(Entity.DOMAIN, domainFqn, NON_DELETED); - } - /** Override this method to support downloading CSV functionality */ public String exportToCsv(String name, String user) throws IOException { throw new IllegalArgumentException(csvNotSupported(entityType)); @@ -1769,9 +1645,6 @@ public final void update() { updateOwner(); updateExtension(); updateTags(updated.getFullyQualifiedName(), FIELD_TAGS, original.getTags(), updated.getTags()); - updateDomain(); - updateDataProducts(); - updateExperts(); entitySpecificUpdate(); } @@ -1903,63 +1776,6 @@ private void updateExtension() { storeExtension(updated); } - private void updateDomain() { - if (original.getDomain() == updated.getDomain()) { - return; - } - - EntityReference origDomain = original.getDomain(); - EntityReference updatedDomain = updated.getDomain(); - if ((operation.isPatch() || updatedDomain != null) - && recordChange(FIELD_DOMAIN, origDomain, updatedDomain, true, entityReferenceMatch)) { - if (origDomain != null) { - LOG.info( - "Removing domain {} for entity {}", origDomain.getFullyQualifiedName(), original.getFullyQualifiedName()); - deleteRelationship(origDomain.getId(), Entity.DOMAIN, original.getId(), entityType, Relationship.HAS); - } - if (updatedDomain != null) { - // Add relationship owner --- owns ---> ownedEntity - LOG.info( - "Adding domain {} for entity {}", - updatedDomain.getFullyQualifiedName(), - original.getFullyQualifiedName()); - addRelationship(updatedDomain.getId(), original.getId(), Entity.DOMAIN, entityType, Relationship.HAS); - } - } else { - updated.setDomain(original.getDomain()); - } - } - - private void updateDataProducts() { - if (!supportsDataProducts) { - return; - } - List origDataProducts = listOrEmpty(original.getDataProducts()); - List updatedDataProducts = listOrEmpty(updated.getDataProducts()); - updateFromRelationships( - FIELD_DATA_PRODUCTS, - DATA_PRODUCT, - origDataProducts, - updatedDataProducts, - Relationship.HAS, - entityType, - original.getId()); - } - - private void updateExperts() { - List origExperts = listOrEmpty(original.getExperts()); - List updatedExperts = listOrEmpty(updated.getExperts()); - updateToRelationships( - FIELD_EXPERTS, - Entity.DATA_PRODUCT, - original.getId(), - Relationship.EXPERT, - Entity.USER, - origExperts, - updatedExperts, - false); - } - public final boolean updateVersion(Double oldVersion) { Double newVersion = oldVersion; if (majorVersionChange) { diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/EntityTimeSeriesDAO.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/EntityTimeSeriesDAO.java new file mode 100644 index 000000000000..0ae1150be040 --- /dev/null +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/EntityTimeSeriesDAO.java @@ -0,0 +1,419 @@ +package org.openmetadata.service.jdbi3; + +import static org.openmetadata.service.jdbi3.locator.ConnectionType.MYSQL; +import static org.openmetadata.service.jdbi3.locator.ConnectionType.POSTGRES; + +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.List; +import org.jdbi.v3.core.mapper.RowMapper; +import org.jdbi.v3.core.statement.StatementContext; +import org.jdbi.v3.sqlobject.config.RegisterRowMapper; +import org.jdbi.v3.sqlobject.customizer.Bind; +import org.jdbi.v3.sqlobject.customizer.BindList; +import org.jdbi.v3.sqlobject.customizer.Define; +import org.jdbi.v3.sqlobject.statement.SqlQuery; +import org.jdbi.v3.sqlobject.statement.SqlUpdate; +import org.openmetadata.schema.analytics.ReportData; +import org.openmetadata.service.jdbi3.locator.ConnectionAwareSqlQuery; +import org.openmetadata.service.jdbi3.locator.ConnectionAwareSqlUpdate; +import org.openmetadata.service.util.JsonUtils; +import org.openmetadata.service.util.jdbi.BindFQN; + +public interface EntityTimeSeriesDAO { + String getTimeSeriesTableName(); + + enum OrderBy { + ASC, + DESC + } + + class ReportDataMapper implements RowMapper { + @Override + public CollectionDAO.ReportDataRow map(ResultSet rs, StatementContext ctx) throws SQLException { + String rowNumber = rs.getString("row_num"); + String json = rs.getString("json"); + ReportData reportData; + reportData = JsonUtils.readValue(json, ReportData.class); + return new CollectionDAO.ReportDataRow(rowNumber, reportData); + } + } + + @ConnectionAwareSqlUpdate( + value = + "INSERT INTO
(entityFQNHash, extension, jsonSchema, json) " + + "VALUES (:entityFQNHash, :extension, :jsonSchema, :json)", + connectionType = MYSQL) + @ConnectionAwareSqlUpdate( + value = + "INSERT INTO
(entityFQNHash, extension, jsonSchema, json) " + + "VALUES (:entityFQNHash, :extension, :jsonSchema, (:json :: jsonb))", + connectionType = POSTGRES) + void insert( + @Define("table") String table, + @BindFQN("entityFQNHash") String entityFQNHash, + @Bind("extension") String extension, + @Bind("jsonSchema") String jsonSchema, + @Bind("json") String json); + + default void insert(String entityFQNHash, String extension, String jsonSchema, String json) { + insert(getTimeSeriesTableName(), entityFQNHash, extension, jsonSchema, json); + } + + @ConnectionAwareSqlUpdate( + value = + "UPDATE
set json = :json where entityFQNHash=:entityFQNHash and extension=:extension and timestamp=:timestamp", + connectionType = MYSQL) + @ConnectionAwareSqlUpdate( + value = + "UPDATE
set json = (:json :: jsonb) where entityFQNHash=:entityFQNHash and extension=:extension and timestamp=:timestamp", + connectionType = POSTGRES) + void update( + @Define("table") String table, + @BindFQN("entityFQNHash") String entityFQNHash, + @Bind("extension") String extension, + @Bind("json") String json, + @Bind("timestamp") Long timestamp); + + default void update(String entityFQNHash, String extension, String json, Long timestamp) { + update(getTimeSeriesTableName(), entityFQNHash, extension, json, timestamp); + } + + @ConnectionAwareSqlUpdate( + value = + "UPDATE
set json = :json where entityFQNHash=:entityFQNHash and extension=:extension and timestamp=:timestamp and json -> '$.operation' = :operation", + connectionType = MYSQL) + @ConnectionAwareSqlUpdate( + value = + "UPDATE
set json = (:json :: jsonb) where entityFQNHash=:entityFQNHash and extension=:extension and timestamp=:timestamp and json #>>'{operation}' = :operation", + connectionType = POSTGRES) + void updateExtensionByOperation( + @Define("table") String table, + @BindFQN("entityFQNHash") String entityFQNHash, + @Bind("extension") String extension, + @Bind("json") String json, + @Bind("timestamp") Long timestamp, + @Bind("operation") String operation); + + default void updateExtensionByOperation( + String entityFQNHash, String extension, String json, Long timestamp, String operation) { + updateExtensionByOperation(getTimeSeriesTableName(), entityFQNHash, extension, json, timestamp, operation); + } + + @SqlQuery("SELECT json FROM
WHERE entityFQNHash = :entityFQNHash AND extension = :extension") + String getExtension( + @Define("table") String table, @BindFQN("entityFQNHash") String entityId, @Bind("extension") String extension); + + default String getExtension(String entityId, String extension) { + return getExtension(getTimeSeriesTableName(), entityId, extension); + } + + @SqlQuery("SELECT count(*) FROM
WHERE entityFQNHash = :entityFQNHash") + int listCount(@Define("table") String table, @BindFQN("entityFQNHash") String entityFQNHash); + + default int listCount(String entityFQNHash) { + return listCount(getTimeSeriesTableName(), entityFQNHash); + } + + /** @deprecated */ + @SqlQuery("SELECT COUNT(DISTINCT entityFQN) FROM
") + @Deprecated(since = "1.1.1") + int listDistinctCount(@Define("table") String table); + + default int listDistinctCount() { + return listDistinctCount(getTimeSeriesTableName()); + } + + @ConnectionAwareSqlQuery( + value = + "WITH data AS (SELECT ROW_NUMBER() OVER(ORDER BY timestamp ASC) AS row_num, json " + + "FROM
WHERE entityFQNHash = :entityFQNHash) " + + "SELECT row_num, json FROM data WHERE row_num > :after LIMIT :limit", + connectionType = MYSQL) + @ConnectionAwareSqlQuery( + value = + "WITH data AS (SELECT ROW_NUMBER() OVER(ORDER BY timestamp ASC) AS row_num, json " + + "FROM
WHERE entityFQNHash = :entityFQNHash) " + + "SELECT row_num, json FROM data WHERE row_num > (:after :: integer) LIMIT :limit", + connectionType = POSTGRES) + @RegisterRowMapper(ReportDataMapper.class) + List getAfterExtension( + @Define("table") String table, + @BindFQN("entityFQNHash") String entityFQNHash, + @Bind("limit") int limit, + @Bind("after") String after); + + default List getAfterExtension(String entityFQNHash, int limit, String after) { + return getAfterExtension(getTimeSeriesTableName(), entityFQNHash, limit, after); + } + + @SqlQuery( + "SELECT json FROM
WHERE entityFQNHash = :entityFQNHash AND extension = :extension AND timestamp = :timestamp") + String getExtensionAtTimestamp( + @Define("table") String table, + @BindFQN("entityFQNHash") String entityFQNHash, + @Bind("extension") String extension, + @Bind("timestamp") long timestamp); + + default String getExtensionAtTimestamp(String entityFQNHash, String extension, long timestamp) { + return getExtensionAtTimestamp(getTimeSeriesTableName(), entityFQNHash, extension, timestamp); + } + + @ConnectionAwareSqlQuery( + value = + "SELECT json FROM
WHERE entityFQNHash = :entityFQNHash AND extension = :extension AND timestamp = :timestamp AND json -> '$.operation' = :operation", + connectionType = MYSQL) + @ConnectionAwareSqlQuery( + value = + "SELECT json FROM
WHERE entityFQNHash = :entityFQNHash AND extension = :extension AND timestamp = :timestamp AND json #>>'{operation}' = :operation", + connectionType = POSTGRES) + String getExtensionAtTimestampWithOperation( + @Define("table") String table, + @BindFQN("entityFQNHash") String entityFQNHash, + @Bind("extension") String extension, + @Bind("timestamp") long timestamp, + @Bind("operation") String operation); + + default String getExtensionAtTimestampWithOperation( + String entityFQNHash, String extension, long timestamp, String operation) { + return getExtensionAtTimestampWithOperation( + getTimeSeriesTableName(), entityFQNHash, extension, timestamp, operation); + } + + @SqlQuery( + "SELECT json FROM
WHERE entityFQNHash = :entityFQNHash AND extension = :extension " + + "ORDER BY timestamp DESC LIMIT 1") + String getLatestExtension( + @Define("table") String table, + @BindFQN("entityFQNHash") String entityFQNHash, + @Bind("extension") String extension); + + default String getLatestExtension(String entityFQNHash, String extension) { + return getLatestExtension(getTimeSeriesTableName(), entityFQNHash, extension); + } + + @SqlQuery( + "SELECT ranked.json FROM (SELECT json, ROW_NUMBER() OVER(PARTITION BY entityFQNHash ORDER BY timestamp DESC) AS row_num " + + "FROM
WHERE entityFQNHash IN () AND extension = :extension) ranked WHERE ranked.row_num = 1") + List getLatestExtensionByFQNs( + @Define("table") String table, + @BindList("entityFQNHashes") List entityFQNHashes, + @Bind("extension") String extension); + + default List getLatestExtensionByFQNs(List entityFQNHashes, String extension) { + return getLatestExtensionByFQNs(getTimeSeriesTableName(), entityFQNHashes, extension); + } + + @SqlQuery("SELECT json FROM
WHERE extension = :extension " + "ORDER BY timestamp DESC LIMIT 1") + String getLatestByExtension(@Define("table") String table, @Bind("extension") String extension); + + default String getLatestByExtension(String extension) { + return getLatestByExtension(getTimeSeriesTableName(), extension); + } + + @SqlQuery("SELECT json FROM
WHERE extension = :extension " + "ORDER BY timestamp DESC") + List getAllByExtension(@Define("table") String table, @Bind("extension") String extension); + + default List getAllByExtension(String extension) { + return getAllByExtension(getTimeSeriesTableName(), extension); + } + + @SqlUpdate("DELETE FROM
WHERE entityFQNHash = :entityFQNHash") + void deleteAll(@Define("table") String table, @Bind("entityFQNHash") String entityFQNHash); + + default void deleteAll(String entityFQNHash) { + deleteAll(getTimeSeriesTableName(), entityFQNHash); + } + + @SqlUpdate("DELETE FROM
WHERE entityFQNHash = :entityFQNHash AND extension = :extension") + void delete( + @Define("table") String table, + @BindFQN("entityFQNHash") String entityFQNHash, + @Bind("extension") String extension); + + default void delete(String entityFQNHash, String extension) { + delete(getTimeSeriesTableName(), entityFQNHash, extension); + } + + // This just saves the limit number of records, and remove all other with given extension + @SqlUpdate( + "DELETE FROM
WHERE extension = :extension AND entityFQNHash NOT IN(SELECT entityFQNHash FROM (select * from
WHERE extension = :extension ORDER BY timestamp DESC LIMIT :records) AS subquery)") + void deleteLastRecords( + @Define("table") String table, @Bind("extension") String extension, @Bind("records") int noOfRecord); + + default void deleteLastRecords(String extension, int noOfRecord) { + deleteLastRecords(getTimeSeriesTableName(), extension, noOfRecord); + } + + @SqlUpdate( + "DELETE FROM
WHERE entityFQNHash = :entityFQNHash AND extension = :extension AND timestamp = :timestamp") + void deleteAtTimestamp( + @Define("table") String table, + @BindFQN("entityFQNHash") String entityFQNHash, + @Bind("extension") String extension, + @Bind("timestamp") Long timestamp); + + default void deleteAtTimestamp(String entityFQNHash, String extension, Long timestamp) { + deleteAtTimestamp(getTimeSeriesTableName(), entityFQNHash, extension, timestamp); + } + + @SqlUpdate( + "DELETE FROM
WHERE entityFQNHash = :entityFQNHash AND extension = :extension AND timestamp < :timestamp") + void deleteBeforeTimestamp( + @Define("table") String table, + @BindFQN("entityFQNHash") String entityFQNHash, + @Bind("extension") String extension, + @Bind("timestamp") Long timestamp); + + default void deleteBeforeTimestamp(String entityFQNHash, String extension, Long timestamp) { + deleteBeforeTimestamp(getTimeSeriesTableName(), entityFQNHash, extension, timestamp); + } + + @SqlQuery( + "SELECT json FROM
where entityFQNHash = :entityFQNHash and extension = :extension " + + " AND timestamp >= :startTs and timestamp <= :endTs ORDER BY timestamp DESC") + List listBetweenTimestamps( + @Define("table") String table, + @BindFQN("entityFQNHash") String entityFQNHash, + @Bind("extension") String extension, + @Bind("startTs") Long startTs, + @Bind("endTs") long endTs); + + default List listBetweenTimestamps(String entityFQNHash, String extension, Long startTs, long endTs) { + return listBetweenTimestamps(getTimeSeriesTableName(), entityFQNHash, extension, startTs, endTs); + } + + @SqlQuery( + "SELECT json FROM
where entityFQNHash = :entityFQNHash and extension = :extension " + + " AND timestamp >= :startTs and timestamp <= :endTs ORDER BY timestamp ") + List listBetweenTimestampsByOrder( + @Define("table") String table, + @BindFQN("entityFQNHash") String entityFQNHash, + @Bind("extension") String extension, + @Bind("startTs") Long startTs, + @Bind("endTs") long endTs, + @Define("orderBy") CollectionDAO.EntityExtensionTimeSeriesDAO.OrderBy orderBy); + + default List listBetweenTimestampsByOrder( + String entityFQNHash, + String extension, + Long startTs, + long endTs, + CollectionDAO.EntityExtensionTimeSeriesDAO.OrderBy orderBy) { + return listBetweenTimestampsByOrder(getTimeSeriesTableName(), entityFQNHash, extension, startTs, endTs, orderBy); + } + + @ConnectionAwareSqlUpdate( + value = + "UPDATE
SET json = :json " + + "WHERE entityFQNHash = :entityFQNHash " + + "AND extension = :extension " + + "", + connectionType = MYSQL) + @ConnectionAwareSqlUpdate( + value = + "UPDATE
SET json = (:json :: jsonb) " + + "WHERE entityFQNHash = :entityFQNHash " + + "AND extension = :extension " + + "", + connectionType = POSTGRES) + void updateExtensionByKeyInternal( + @Define("table") String table, + @Bind("value") String value, + @BindFQN("entityFQNHash") String entityFQNHash, + @Bind("extension") String extension, + @Bind("json") String json, + @Define("mysqlCond") String mysqlCond, + @Define("psqlCond") String psqlCond); + + default void updateExtensionByKey(String key, String value, String entityFQN, String extension, String json) { + String mysqlCond = String.format("AND JSON_UNQUOTE(JSON_EXTRACT(json, '$.%s')) = :value", key); + String psqlCond = String.format("AND json->>'%s' = :value", key); + updateExtensionByKeyInternal(getTimeSeriesTableName(), value, entityFQN, extension, json, mysqlCond, psqlCond); + } + + /* + * Support selecting data filtering by top-level keys in the JSON + */ + @ConnectionAwareSqlQuery( + value = + "SELECT json from
" + + "WHERE entityFQNHash = :entityFQNHash " + + "AND extension = :extension " + + "", + connectionType = MYSQL) + @ConnectionAwareSqlQuery( + value = + "SELECT json from
" + + "WHERE entityFQNHash = :entityFQNHash " + + "AND extension = :extension " + + "", + connectionType = POSTGRES) + String getExtensionByKeyInternal( + @Define("table") String table, + @Bind("value") String value, + @BindFQN("entityFQNHash") String entityFQNHash, + @Bind("extension") String extension, + @Define("mysqlCond") String mysqlCond, + @Define("psqlCond") String psqlCond); + + default String getExtensionByKey(String key, String value, String entityFQN, String extension) { + String mysqlCond = String.format("AND JSON_UNQUOTE(JSON_EXTRACT(json, '$.%s')) = :value", key); + String psqlCond = String.format("AND json->>'%s' = :value", key); + return getExtensionByKeyInternal(getTimeSeriesTableName(), value, entityFQN, extension, mysqlCond, psqlCond); + } + + @ConnectionAwareSqlQuery( + value = + "SELECT json from
" + + "WHERE entityFQNHash = :entityFQNHash " + + "AND extension = :extension " + + " " + + "ORDER BY timestamp DESC LIMIT 1", + connectionType = MYSQL) + @ConnectionAwareSqlQuery( + value = + "SELECT json from
" + + "WHERE entityFQNHash = :entityFQNHash " + + "AND extension = :extension " + + " " + + "ORDER BY timestamp DESC LIMIT 1", + connectionType = POSTGRES) + String getLatestExtensionByKeyInternal( + @Define("table") String table, + @Bind("value") String value, + @BindFQN("entityFQNHash") String entityFQNHash, + @Bind("extension") String extension, + @Define("mysqlCond") String mysqlCond, + @Define("psqlCond") String psqlCond); + + default String getLatestExtensionByKey(String key, String value, String entityFQN, String extension) { + String mysqlCond = String.format("AND JSON_UNQUOTE(JSON_EXTRACT(json, '$.%s')) = :value", key); + String psqlCond = String.format("AND json->>'%s' = :value", key); + return getLatestExtensionByKeyInternal(getTimeSeriesTableName(), value, entityFQN, extension, mysqlCond, psqlCond); + } + + default void storeTimeSeriesWithOperation( + String fqn, + String extension, + String jsonSchema, + String entityJson, + Long timestamp, + String operation, + boolean update) { + if (update) { + updateExtensionByOperation(fqn, extension, entityJson, timestamp, operation); + } else { + insert(fqn, extension, jsonSchema, entityJson); + } + } + + /** @deprecated */ + @SqlQuery("SELECT DISTINCT entityFQN FROM
WHERE entityFQNHash = '' or entityFQNHash is null LIMIT :limit") + @Deprecated(since = "1.1.1") + List migrationListDistinctWithOffset(@Define("table") String table, @Bind("limit") int limit); + + default List migrationListDistinctWithOffset(int limit) { + return migrationListDistinctWithOffset(getTimeSeriesTableName(), limit); + } +} diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/GlossaryTermRepository.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/GlossaryTermRepository.java index 32e345f57d58..c98e4a3a032a 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/GlossaryTermRepository.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/GlossaryTermRepository.java @@ -17,7 +17,9 @@ package org.openmetadata.service.jdbi3; import static org.openmetadata.common.utils.CommonUtil.listOrEmpty; +import static org.openmetadata.common.utils.CommonUtil.nullOrEmpty; import static org.openmetadata.schema.type.Include.ALL; +import static org.openmetadata.service.Entity.FIELD_OWNER; import static org.openmetadata.service.Entity.FIELD_REVIEWERS; import static org.openmetadata.service.Entity.GLOSSARY; import static org.openmetadata.service.Entity.GLOSSARY_TERM; @@ -33,7 +35,6 @@ import java.util.UUID; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.tuple.ImmutablePair; -import org.openmetadata.schema.EntityInterface; import org.openmetadata.schema.api.data.TermReference; import org.openmetadata.schema.entity.data.Glossary; import org.openmetadata.schema.entity.data.GlossaryTerm; @@ -82,15 +83,31 @@ public GlossaryTerm clearFields(GlossaryTerm entity, Fields fields) { @Override public GlossaryTerm setInheritedFields(GlossaryTerm glossaryTerm, Fields fields) { - EntityInterface parent; - if (glossaryTerm.getParent() != null) { - parent = get(null, glossaryTerm.getParent().getId(), getFields("owner,reviewers,domain")); - } else { - parent = Entity.getEntity(glossaryTerm.getGlossary(), "owner,reviewers,domain", ALL); + Glossary glossary = null; + GlossaryTerm parentTerm = null; + if (fields.contains(FIELD_OWNER) && glossaryTerm.getOwner() == null) { + if (glossaryTerm.getParent() != null) { + parentTerm = get(null, glossaryTerm.getParent().getId(), getFields("owner,reviewers")); + glossaryTerm.setOwner(parentTerm.getOwner()); + } else { + glossary = Entity.getEntity(glossaryTerm.getGlossary(), "owner,reviewers", ALL); + glossaryTerm.setOwner(glossary.getOwner()); + } + } + + if (fields.contains(FIELD_REVIEWERS) && nullOrEmpty(glossaryTerm.getReviewers())) { + if (glossaryTerm.getParent() != null) { + if (parentTerm == null) { + parentTerm = get(null, glossaryTerm.getParent().getId(), getFields(FIELD_REVIEWERS)); + } + glossaryTerm.setReviewers(parentTerm.getReviewers()); + } else { + if (glossary == null) { + glossary = Entity.getEntity(glossaryTerm.getGlossary(), FIELD_REVIEWERS, ALL); + } + glossaryTerm.setReviewers(glossary.getReviewers()); + } } - inheritOwner(glossaryTerm, fields, parent); - inheritDomain(glossaryTerm, fields, parent); - inheritReviewers(glossaryTerm, fields, parent); return glossaryTerm; } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/ListFilter.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/ListFilter.java index 7ba804ca3f64..af773ef08202 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/ListFilter.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/ListFilter.java @@ -5,6 +5,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.stream.Collectors; import lombok.Getter; import org.openmetadata.schema.type.Include; import org.openmetadata.schema.type.Relationship; @@ -55,7 +56,8 @@ public String getCondition(String tableName) { condition = addCondition(condition, getWebhookCondition(tableName)); condition = addCondition(condition, getWebhookTypeCondition(tableName)); condition = addCondition(condition, getTestCaseCondition()); - condition = addCondition(condition, getTestSuiteCondition()); + condition = addCondition(condition, getTestSuiteTypeCondition()); + condition = addCondition(condition, getTestSuiteFQNCondition()); return condition.isEmpty() ? "WHERE TRUE" : "WHERE " + condition; } @@ -85,6 +87,13 @@ public String getServiceCondition(String tableName) { return service == null ? "" : getFqnPrefixCondition(tableName, EntityInterfaceUtil.quoteName(service)); } + public String getTestSuiteFQNCondition() { + String testSuiteName = queryParams.get("testSuite"); + return testSuiteName == null + ? "" + : String.format("fqnHash LIKE '%s%s%%'", FullyQualifiedName.buildHash(testSuiteName), Entity.SEPARATOR); + } + public String getParentCondition(String tableName) { String parentFqn = queryParams.get("parent"); return parentFqn == null ? "" : getFqnPrefixCondition(tableName, parentFqn); @@ -158,7 +167,7 @@ private String getTestCaseCondition() { return addCondition(condition1, condition2); } - private String getTestSuiteCondition() { + private String getTestSuiteTypeCondition() { String testSuiteType = getQueryParam("testSuiteType"); if (testSuiteType == null) { @@ -197,17 +206,22 @@ private String getWebhookTypePrefixCondition(String tableName, String typePrefix private String getPipelineTypePrefixCondition(String tableName, String pipelineType) { pipelineType = escape(pipelineType); + String inCondition = getInConditionFromString(pipelineType); if (DatasourceConfig.getInstance().isMySQL()) { return tableName == null ? String.format( - "JSON_UNQUOTE(JSON_EXTRACT(ingestion_pipeline_entity.json, '$.pipelineType')) = '%s'", pipelineType) + "JSON_UNQUOTE(JSON_EXTRACT(ingestion_pipeline_entity.json, '$.pipelineType')) IN (%s)", inCondition) : String.format( - "%s.JSON_UNQUOTE(JSON_EXTRACT(ingestion_pipeline_entity.json, '$.pipelineType')) = '%s%%'", - tableName, pipelineType); + "%s.JSON_UNQUOTE(JSON_EXTRACT(ingestion_pipeline_entity.json, '$.pipelineType')) IN (%s)", + tableName, inCondition); } return tableName == null - ? String.format("ingestion_pipeline_entity.json->>'pipelineType' = '%s'", pipelineType) - : String.format("%s.json->>'pipelineType' = '%s%%'", tableName, pipelineType); + ? String.format("ingestion_pipeline_entity.json->>'pipelineType' IN (%s)", inCondition) + : String.format("%s.json->>'pipelineType' IN (%s)", tableName, inCondition); + } + + private String getInConditionFromString(String condition) { + return Arrays.stream(condition.split(",")).map(s -> String.format("'%s'", s)).collect(Collectors.joining(",")); } private String getCategoryPrefixCondition(String tableName, String category) { diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/MigrationDAO.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/MigrationDAO.java index eef51bc43da5..db2051425b78 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/MigrationDAO.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/MigrationDAO.java @@ -32,6 +32,14 @@ public interface MigrationDAO { connectionType = POSTGRES) String getVersionMigrationChecksum(@Bind("version") String version) throws StatementException; + @ConnectionAwareSqlQuery( + value = "SELECT sqlStatement FROM SERVER_MIGRATION_SQL_LOGS where version = :version and checksum = :checksum", + connectionType = MYSQL) + @ConnectionAwareSqlQuery( + value = "SELECT sqlStatement FROM SERVER_MIGRATION_SQL_LOGS where version = :version and checksum = :checksum", + connectionType = POSTGRES) + String getSqlQuery(@Bind("version") String version, @Bind("checksum") String checksum) throws StatementException; + @ConnectionAwareSqlUpdate( value = "INSERT INTO SERVER_CHANGE_LOG (version, migrationFileName, checksum, installed_on)" diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/MlModelRepository.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/MlModelRepository.java index 3276aeae954c..2087cd1dbb0f 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/MlModelRepository.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/MlModelRepository.java @@ -15,10 +15,8 @@ import static org.openmetadata.common.utils.CommonUtil.listOrEmpty; import static org.openmetadata.common.utils.CommonUtil.nullOrEmpty; -import static org.openmetadata.schema.type.Include.ALL; import static org.openmetadata.service.Entity.DASHBOARD; import static org.openmetadata.service.Entity.MLMODEL; -import static org.openmetadata.service.Entity.MLMODEL_SERVICE; import static org.openmetadata.service.util.EntityUtil.entityReferenceMatch; import static org.openmetadata.service.util.EntityUtil.mlFeatureMatch; import static org.openmetadata.service.util.EntityUtil.mlHyperParameterMatch; @@ -181,13 +179,6 @@ public void storeRelationships(MlModel mlModel) { setMlFeatureSourcesLineage(mlModel); } - @Override - public MlModel setInheritedFields(MlModel mlModel, Fields fields) { - // If mlModel does not have domain, then inherit it from parent MLModel service - MlModelService service = Entity.getEntity(MLMODEL_SERVICE, mlModel.getService().getId(), "domain", ALL); - return inheritDomain(mlModel, fields, service); - } - /** * If we have the properties MLFeatures -> MlFeatureSources and the feature sources have properly informed the Data * Source EntityRef, then we will automatically build the lineage between tables and ML Model. diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/PipelineRepository.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/PipelineRepository.java index 76b2dad68c88..cb3d80516b70 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/PipelineRepository.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/PipelineRepository.java @@ -15,9 +15,7 @@ import static org.openmetadata.common.utils.CommonUtil.listOrEmpty; import static org.openmetadata.common.utils.CommonUtil.nullOrEmpty; -import static org.openmetadata.schema.type.Include.ALL; import static org.openmetadata.service.Entity.FIELD_TAGS; -import static org.openmetadata.service.Entity.PIPELINE_SERVICE; import static org.openmetadata.service.util.EntityUtil.taskMatch; import java.util.ArrayList; @@ -207,13 +205,6 @@ public void storeRelationships(Pipeline pipeline) { addRelationship(service.getId(), pipeline.getId(), service.getType(), Entity.PIPELINE, Relationship.CONTAINS); } - @Override - public Pipeline setInheritedFields(Pipeline pipeline, Fields fields) { - // If pipeline does not have domain, then inherit it from parent Pipeline service - PipelineService service = Entity.getEntity(PIPELINE_SERVICE, pipeline.getService().getId(), "domain", ALL); - return inheritDomain(pipeline, fields, service); - } - @Override public void applyTags(Pipeline pipeline) { // Add table level tags by adding tag to table relationship diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/QueryRepository.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/QueryRepository.java index 69cda89e47de..6e7dd4accac0 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/QueryRepository.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/QueryRepository.java @@ -4,14 +4,12 @@ import static org.openmetadata.common.utils.CommonUtil.nullOrEmpty; import static org.openmetadata.service.Entity.USER; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.UUID; +import java.util.*; import javax.ws.rs.core.Response; import javax.ws.rs.core.UriInfo; import lombok.SneakyThrows; import org.openmetadata.schema.entity.data.Query; +import org.openmetadata.schema.entity.teams.User; import org.openmetadata.schema.type.ChangeDescription; import org.openmetadata.schema.type.ChangeEvent; import org.openmetadata.schema.type.EntityReference; @@ -22,10 +20,15 @@ import org.openmetadata.service.Entity; import org.openmetadata.service.resources.query.QueryResource; import org.openmetadata.service.util.EntityUtil; +import org.openmetadata.service.util.JsonUtils; import org.openmetadata.service.util.RestUtil; public class QueryRepository extends EntityRepository { private static final String QUERY_USED_IN_FIELD = "queryUsedIn"; + + private static final String QUERY_USERS_FIELD = "users"; + + private static final String QUERY_USED_BY_FIELD = "usedBy"; private static final String QUERY_PATCH_FIELDS = "users,query,queryUsedIn"; private static final String QUERY_UPDATE_FIELDS = "users,votes,queryUsedIn"; @@ -116,6 +119,36 @@ private void storeQueryUsedIn( } } + public RestUtil.PutResponse AddQueryUser( + UriInfo uriInfo, String updatedBy, UUID queryId, List userFqnList) { + Query query = Entity.getEntity(Entity.QUERY, queryId, QUERY_USERS_FIELD, Include.NON_DELETED); + List oldValue = query.getUsers(); + + for (String userFqn : userFqnList) { + User user = Entity.getEntityByName(USER, userFqn, "", Include.NON_DELETED); + EntityReference entityRef = user.getEntityReference(); + addRelationship(entityRef.getId(), queryId, entityRef.getType(), Entity.QUERY, Relationship.USES); + } + // Populate Fields + setFieldsInternal(query, new EntityUtil.Fields(allowedFields, QUERY_USERS_FIELD)); + Entity.withHref(uriInfo, query.getUsers()); + ChangeEvent changeEvent = + getQueryChangeEvent(updatedBy, QUERY_USERS_FIELD, oldValue, query.getUsers(), withHref(uriInfo, query)); + return new RestUtil.PutResponse<>(Response.Status.CREATED, changeEvent, RestUtil.ENTITY_FIELDS_CHANGED); + } + + public RestUtil.PutResponse AddQueryUsedBy( + UriInfo uriInfo, String updatedBy, UUID queryId, List userList) { + Query query = Entity.getEntity(Entity.QUERY, queryId, QUERY_UPDATE_FIELDS, Include.NON_DELETED); + Query oldQuery = JsonUtils.readValue(JsonUtils.pojoToJson(query), Query.class); + query.getUsedBy().addAll(userList); + ChangeEvent changeEvent = + getQueryChangeEvent( + updatedBy, QUERY_USERS_FIELD, oldQuery.getUsedBy(), query.getUsers(), withHref(uriInfo, query)); + update(uriInfo, oldQuery, query); + return new RestUtil.PutResponse<>(Response.Status.CREATED, changeEvent, RestUtil.ENTITY_FIELDS_CHANGED); + } + public RestUtil.PutResponse addQueryUsage( UriInfo uriInfo, String updatedBy, UUID queryId, List entityIds) { Query query = Entity.getEntity(Entity.QUERY, queryId, QUERY_USED_IN_FIELD, Include.NON_DELETED); @@ -187,6 +220,7 @@ public void entitySpecificUpdate() { deleted, EntityUtil.entityReferenceMatch); // Store Query Used in Relation + recordChange("usedBy", original.getUsedBy(), updated.getUsedBy(), true); storeQueryUsedIn(updated.getId(), added, deleted); String originalChecksum = EntityUtil.hash(original.getQuery()); String updatedChecksum = EntityUtil.hash(updated.getQuery()); diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/ReportDataRepository.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/ReportDataRepository.java index f91b2df5366f..9d2e1bce23f5 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/ReportDataRepository.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/ReportDataRepository.java @@ -1,5 +1,6 @@ package org.openmetadata.service.jdbi3; +import java.io.IOException; import java.util.List; import java.util.UUID; import javax.ws.rs.core.Response; @@ -22,7 +23,7 @@ public ReportDataRepository(CollectionDAO dao) { public Response addReportData(ReportData reportData) { reportData.setId(UUID.randomUUID()); daoCollection - .entityExtensionTimeSeriesDao() + .reportDataTimeSeriesDao() .insert( reportData.getReportDataType().value(), REPORT_DATA_EXTENSION, @@ -37,10 +38,15 @@ public ResultList getReportData(ReportDataType reportDataType, Long reportData = JsonUtils.readObjects( daoCollection - .entityExtensionTimeSeriesDao() + .reportDataTimeSeriesDao() .listBetweenTimestamps(reportDataType.value(), REPORT_DATA_EXTENSION, startTs, endTs), ReportData.class); return new ResultList<>(reportData, String.valueOf(startTs), String.valueOf(endTs), reportData.size()); } + + public void deleteReportDataAtDate(ReportDataType reportDataType, String date) throws IOException { + // We'll check if we have data to delete before we delete it + daoCollection.reportDataTimeSeriesDao().deleteReportDataTypeAtDate(reportDataType.value(), date); + } } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/SearchIndexRepository.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/SearchIndexRepository.java deleted file mode 100644 index 4377f5af66a1..000000000000 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/SearchIndexRepository.java +++ /dev/null @@ -1,455 +0,0 @@ -/* - * Copyright 2021 Collate - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.openmetadata.service.jdbi3; - -import static org.openmetadata.common.utils.CommonUtil.listOrEmpty; -import static org.openmetadata.common.utils.CommonUtil.nullOrEmpty; -import static org.openmetadata.schema.type.Include.ALL; -import static org.openmetadata.service.Entity.FIELD_DESCRIPTION; -import static org.openmetadata.service.Entity.FIELD_DISPLAY_NAME; -import static org.openmetadata.service.Entity.FIELD_FOLLOWERS; -import static org.openmetadata.service.Entity.FIELD_TAGS; -import static org.openmetadata.service.Entity.SEARCH_SERVICE; -import static org.openmetadata.service.util.EntityUtil.getSearchIndexField; - -import java.util.ArrayList; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.UUID; -import java.util.function.BiPredicate; -import java.util.function.Function; -import java.util.stream.Collectors; -import javax.json.JsonPatch; -import org.jdbi.v3.sqlobject.transaction.Transaction; -import org.openmetadata.schema.EntityInterface; -import org.openmetadata.schema.entity.data.SearchIndex; -import org.openmetadata.schema.entity.services.SearchService; -import org.openmetadata.schema.type.EntityReference; -import org.openmetadata.schema.type.Relationship; -import org.openmetadata.schema.type.SearchIndexField; -import org.openmetadata.schema.type.TagLabel; -import org.openmetadata.schema.type.TaskDetails; -import org.openmetadata.schema.type.searchindex.SearchIndexSampleData; -import org.openmetadata.service.Entity; -import org.openmetadata.service.exception.CatalogExceptionMessage; -import org.openmetadata.service.resources.feeds.MessageParser; -import org.openmetadata.service.resources.searchindex.SearchIndexResource; -import org.openmetadata.service.security.mask.PIIMasker; -import org.openmetadata.service.util.EntityUtil; -import org.openmetadata.service.util.EntityUtil.Fields; -import org.openmetadata.service.util.FullyQualifiedName; -import org.openmetadata.service.util.JsonUtils; - -public class SearchIndexRepository extends EntityRepository { - @Override - public void setFullyQualifiedName(SearchIndex searchIndex) { - searchIndex.setFullyQualifiedName( - FullyQualifiedName.add(searchIndex.getService().getFullyQualifiedName(), searchIndex.getName())); - if (searchIndex.getFields() != null) { - setFieldFQN(searchIndex.getFullyQualifiedName(), searchIndex.getFields()); - } - } - - public SearchIndexRepository(CollectionDAO dao) { - super( - SearchIndexResource.COLLECTION_PATH, Entity.SEARCH_INDEX, SearchIndex.class, dao.searchIndexDAO(), dao, "", ""); - } - - @Override - public void prepare(SearchIndex searchIndex) { - SearchService searchService = Entity.getEntity(searchIndex.getService(), "", ALL); - searchIndex.setService(searchService.getEntityReference()); - searchIndex.setServiceType(searchService.getServiceType()); - // Validate field tags - if (searchIndex.getFields() != null) { - addDerivedFieldTags(searchIndex.getFields()); - validateSchemaFieldTags(searchIndex.getFields()); - } - } - - @Override - public void storeEntity(SearchIndex searchIndex, boolean update) { - // Relationships and fields such as service are derived and not stored as part of json - EntityReference service = searchIndex.getService(); - searchIndex.withService(null); - - // Don't store fields tags as JSON but build it on the fly based on relationships - List fieldsWithTags = null; - if (searchIndex.getFields() != null) { - fieldsWithTags = searchIndex.getFields(); - searchIndex.setFields(cloneWithoutTags(fieldsWithTags)); - searchIndex.getFields().forEach(field -> field.setTags(null)); - } - - store(searchIndex, update); - - // Restore the relationships - if (fieldsWithTags != null) { - searchIndex.setFields(fieldsWithTags); - } - searchIndex.withService(service); - } - - @Override - public void storeRelationships(SearchIndex searchIndex) { - setService(searchIndex, searchIndex.getService()); - } - - @Override - public SearchIndex setInheritedFields(SearchIndex searchIndex, Fields fields) { - // If searchIndex does not have domain, then inherit it from parent messaging service - SearchService service = Entity.getEntity(SEARCH_SERVICE, searchIndex.getService().getId(), "domain", ALL); - return inheritDomain(searchIndex, fields, service); - } - - @Override - public SearchIndex setFields(SearchIndex searchIndex, Fields fields) { - searchIndex.setService(getContainer(searchIndex.getId())); - searchIndex.setFollowers(fields.contains(FIELD_FOLLOWERS) ? getFollowers(searchIndex) : null); - if (searchIndex.getFields() != null) { - getFieldTags(fields.contains(FIELD_TAGS), searchIndex.getFields()); - } - return searchIndex; - } - - @Override - public SearchIndex clearFields(SearchIndex searchIndex, Fields fields) { - return searchIndex; - } - - @Override - public SearchIndexUpdater getUpdater(SearchIndex original, SearchIndex updated, Operation operation) { - return new SearchIndexUpdater(original, updated, operation); - } - - public void setService(SearchIndex searchIndex, EntityReference service) { - if (service != null && searchIndex != null) { - addRelationship( - service.getId(), searchIndex.getId(), service.getType(), Entity.SEARCH_INDEX, Relationship.CONTAINS); - searchIndex.setService(service); - } - } - - public SearchIndex getSampleData(UUID searchIndexId, boolean authorizePII) { - // Validate the request content - SearchIndex searchIndex = dao.findEntityById(searchIndexId); - - SearchIndexSampleData sampleData = - JsonUtils.readValue( - daoCollection.entityExtensionDAO().getExtension(searchIndex.getId().toString(), "searchIndex.sampleData"), - SearchIndexSampleData.class); - searchIndex.setSampleData(sampleData); - setFieldsInternal(searchIndex, Fields.EMPTY_FIELDS); - - // Set the fields tags. Will be used to mask the sample data - if (!authorizePII) { - getFieldTags(true, searchIndex.getFields()); - searchIndex.setTags(getTags(searchIndex.getFullyQualifiedName())); - return PIIMasker.getSampleData(searchIndex); - } - - return searchIndex; - } - - @Transaction - public SearchIndex addSampleData(UUID searchIndexId, SearchIndexSampleData sampleData) { - // Validate the request content - SearchIndex searchIndex = daoCollection.searchIndexDAO().findEntityById(searchIndexId); - - daoCollection - .entityExtensionDAO() - .insert( - searchIndexId.toString(), - "searchIndex.sampleData", - "searchIndexSampleData", - JsonUtils.pojoToJson(sampleData)); - setFieldsInternal(searchIndex, Fields.EMPTY_FIELDS); - return searchIndex.withSampleData(sampleData); - } - - private void setFieldFQN(String parentFQN, List fields) { - fields.forEach( - c -> { - String fieldFqn = FullyQualifiedName.add(parentFQN, c.getName()); - c.setFullyQualifiedName(fieldFqn); - if (c.getChildren() != null) { - setFieldFQN(fieldFqn, c.getChildren()); - } - }); - } - - private void getFieldTags(boolean setTags, List fields) { - for (SearchIndexField f : listOrEmpty(fields)) { - f.setTags(setTags ? getTags(f.getFullyQualifiedName()) : null); - getFieldTags(setTags, f.getChildren()); - } - } - - private void addDerivedFieldTags(List fields) { - if (nullOrEmpty(fields)) { - return; - } - - for (SearchIndexField field : fields) { - field.setTags(addDerivedTags(field.getTags())); - if (field.getChildren() != null) { - addDerivedFieldTags(field.getChildren()); - } - } - } - - List cloneWithoutTags(List fields) { - if (nullOrEmpty(fields)) { - return fields; - } - List copy = new ArrayList<>(); - fields.forEach(f -> copy.add(cloneWithoutTags(f))); - return copy; - } - - private SearchIndexField cloneWithoutTags(SearchIndexField field) { - List children = cloneWithoutTags(field.getChildren()); - return new SearchIndexField() - .withDescription(field.getDescription()) - .withName(field.getName()) - .withDisplayName(field.getDisplayName()) - .withFullyQualifiedName(field.getFullyQualifiedName()) - .withDataType(field.getDataType()) - .withDataTypeDisplay(field.getDataTypeDisplay()) - .withChildren(children); - } - - private void validateSchemaFieldTags(List fields) { - // Add field level tags by adding tag to field relationship - for (SearchIndexField field : fields) { - checkMutuallyExclusive(field.getTags()); - if (field.getChildren() != null) { - validateSchemaFieldTags(field.getChildren()); - } - } - } - - private void applyTags(List fields) { - // Add field level tags by adding tag to field relationship - for (SearchIndexField field : fields) { - applyTags(field.getTags(), field.getFullyQualifiedName()); - if (field.getChildren() != null) { - applyTags(field.getChildren()); - } - } - } - - @Override - public void applyTags(SearchIndex searchIndex) { - // Add table level tags by adding tag to table relationship - super.applyTags(searchIndex); - if (searchIndex.getFields() != null) { - applyTags(searchIndex.getFields()); - } - } - - @Override - public List getAllTags(EntityInterface entity) { - List allTags = new ArrayList<>(); - SearchIndex searchIndex = (SearchIndex) entity; - EntityUtil.mergeTags(allTags, searchIndex.getTags()); - List schemaFields = searchIndex.getFields() != null ? searchIndex.getFields() : null; - for (SearchIndexField schemaField : listOrEmpty(schemaFields)) { - EntityUtil.mergeTags(allTags, schemaField.getTags()); - } - return allTags; - } - - @Override - public void update(TaskDetails task, MessageParser.EntityLink entityLink, String newValue, String user) { - if (entityLink.getFieldName().equals("fields")) { - String schemaName = entityLink.getArrayFieldName(); - String childrenSchemaName = ""; - if (entityLink.getArrayFieldName().contains(".")) { - String fieldNameWithoutQuotes = - entityLink.getArrayFieldName().substring(1, entityLink.getArrayFieldName().length() - 1); - schemaName = fieldNameWithoutQuotes.substring(0, fieldNameWithoutQuotes.indexOf(".")); - childrenSchemaName = fieldNameWithoutQuotes.substring(fieldNameWithoutQuotes.lastIndexOf(".") + 1); - } - SearchIndex searchIndex = getByName(null, entityLink.getEntityFQN(), getFields("tags"), ALL, false); - SearchIndexField schemaField = null; - for (SearchIndexField field : searchIndex.getFields()) { - if (field.getName().equals(schemaName)) { - schemaField = field; - break; - } - } - if (!"".equals(childrenSchemaName) && schemaField != null) { - schemaField = getChildrenSchemaField(schemaField.getChildren(), childrenSchemaName); - } - if (schemaField == null) { - throw new IllegalArgumentException( - CatalogExceptionMessage.invalidFieldName("schema", entityLink.getArrayFieldName())); - } - - String origJson = JsonUtils.pojoToJson(searchIndex); - if (EntityUtil.isDescriptionTask(task.getType())) { - schemaField.setDescription(newValue); - } else if (EntityUtil.isTagTask(task.getType())) { - List tags = JsonUtils.readObjects(newValue, TagLabel.class); - schemaField.setTags(tags); - } - String updatedEntityJson = JsonUtils.pojoToJson(searchIndex); - JsonPatch patch = JsonUtils.getJsonPatch(origJson, updatedEntityJson); - patch(null, searchIndex.getId(), user, patch); - return; - } - super.update(task, entityLink, newValue, user); - } - - private static SearchIndexField getChildrenSchemaField(List fields, String childrenSchemaName) { - SearchIndexField childrenSchemaField = null; - for (SearchIndexField field : fields) { - if (field.getName().equals(childrenSchemaName)) { - childrenSchemaField = field; - break; - } - } - if (childrenSchemaField == null) { - for (SearchIndexField field : fields) { - if (field.getChildren() != null) { - childrenSchemaField = getChildrenSchemaField(field.getChildren(), childrenSchemaName); - if (childrenSchemaField != null) { - break; - } - } - } - } - return childrenSchemaField; - } - - public static Set getAllFieldTags(SearchIndexField field) { - Set tags = new HashSet<>(); - if (!listOrEmpty(field.getTags()).isEmpty()) { - tags.addAll(field.getTags()); - } - for (SearchIndexField c : listOrEmpty(field.getChildren())) { - tags.addAll(getAllFieldTags(c)); - } - return tags; - } - - public class SearchIndexUpdater extends EntityUpdater { - public static final String FIELD_DATA_TYPE_DISPLAY = "dataTypeDisplay"; - - public SearchIndexUpdater(SearchIndex original, SearchIndex updated, Operation operation) { - super(original, updated, operation); - } - - @Override - public void entitySpecificUpdate() { - if (updated.getFields() != null) { - updateSearchIndexFields( - "fields", - original.getFields() == null ? null : original.getFields(), - updated.getFields(), - EntityUtil.searchIndexFieldMatch); - } - recordChange("searchIndexSettings", original.getSearchIndexSettings(), updated.getSearchIndexSettings()); - } - - private void updateSearchIndexFields( - String fieldName, - List origFields, - List updatedFields, - BiPredicate fieldMatch) { - List deletedFields = new ArrayList<>(); - List addedFields = new ArrayList<>(); - recordListChange(fieldName, origFields, updatedFields, addedFields, deletedFields, fieldMatch); - // carry forward tags and description if deletedFields matches added field - Map addedFieldMap = - addedFields.stream().collect(Collectors.toMap(SearchIndexField::getName, Function.identity())); - - for (SearchIndexField deleted : deletedFields) { - if (addedFieldMap.containsKey(deleted.getName())) { - SearchIndexField addedField = addedFieldMap.get(deleted.getName()); - if (nullOrEmpty(addedField.getDescription()) && nullOrEmpty(deleted.getDescription())) { - addedField.setDescription(deleted.getDescription()); - } - if (nullOrEmpty(addedField.getTags()) && nullOrEmpty(deleted.getTags())) { - addedField.setTags(deleted.getTags()); - } - } - } - - // Delete tags related to deleted fields - deletedFields.forEach(deleted -> daoCollection.tagUsageDAO().deleteTagsByTarget(deleted.getFullyQualifiedName())); - - // Add tags related to newly added fields - for (SearchIndexField added : addedFields) { - applyTags(added.getTags(), added.getFullyQualifiedName()); - } - - // Carry forward the user generated metadata from existing fields to new fields - for (SearchIndexField updated : updatedFields) { - // Find stored field matching name, data type and ordinal position - SearchIndexField stored = origFields.stream().filter(c -> fieldMatch.test(c, updated)).findAny().orElse(null); - if (stored == null) { // New field added - continue; - } - updateFieldDescription(stored, updated); - updateFieldDataTypeDisplay(stored, updated); - updateFieldDisplayName(stored, updated); - updateTags( - stored.getFullyQualifiedName(), - EntityUtil.getFieldName(fieldName, updated.getName(), FIELD_TAGS), - stored.getTags(), - updated.getTags()); - - if (updated.getChildren() != null && stored.getChildren() != null) { - String childrenFieldName = EntityUtil.getFieldName(fieldName, updated.getName()); - updateSearchIndexFields(childrenFieldName, stored.getChildren(), updated.getChildren(), fieldMatch); - } - } - majorVersionChange = majorVersionChange || !deletedFields.isEmpty(); - } - - private void updateFieldDescription(SearchIndexField origField, SearchIndexField updatedField) { - if (operation.isPut() && !nullOrEmpty(origField.getDescription()) && updatedByBot()) { - // Revert the non-empty field description if being updated by a bot - updatedField.setDescription(origField.getDescription()); - return; - } - String field = getSearchIndexField(original, origField, FIELD_DESCRIPTION); - recordChange(field, origField.getDescription(), updatedField.getDescription()); - } - - private void updateFieldDisplayName(SearchIndexField origField, SearchIndexField updatedField) { - if (operation.isPut() && !nullOrEmpty(origField.getDescription()) && updatedByBot()) { - // Revert the non-empty field description if being updated by a bot - updatedField.setDisplayName(origField.getDisplayName()); - return; - } - String field = getSearchIndexField(original, origField, FIELD_DISPLAY_NAME); - recordChange(field, origField.getDisplayName(), updatedField.getDisplayName()); - } - - private void updateFieldDataTypeDisplay(SearchIndexField origField, SearchIndexField updatedField) { - if (operation.isPut() && !nullOrEmpty(origField.getDataTypeDisplay()) && updatedByBot()) { - // Revert the non-empty field dataTypeDisplay if being updated by a bot - updatedField.setDataTypeDisplay(origField.getDataTypeDisplay()); - return; - } - String field = getSearchIndexField(original, origField, FIELD_DATA_TYPE_DISPLAY); - recordChange(field, origField.getDataTypeDisplay(), updatedField.getDataTypeDisplay()); - } - } -} diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/SearchServiceRepository.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/SearchServiceRepository.java deleted file mode 100644 index 2cbcba49127c..000000000000 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/SearchServiceRepository.java +++ /dev/null @@ -1,19 +0,0 @@ -package org.openmetadata.service.jdbi3; - -import org.openmetadata.schema.entity.services.SearchService; -import org.openmetadata.schema.entity.services.ServiceType; -import org.openmetadata.schema.type.SearchConnection; -import org.openmetadata.service.Entity; -import org.openmetadata.service.resources.services.storage.StorageServiceResource; - -public class SearchServiceRepository extends ServiceEntityRepository { - public SearchServiceRepository(CollectionDAO dao) { - super( - StorageServiceResource.COLLECTION_PATH, - Entity.SEARCH_SERVICE, - dao, - dao.searchServiceDAO(), - SearchConnection.class, - ServiceType.SEARCH); - } -} diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/ServiceEntityRepository.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/ServiceEntityRepository.java index 6131a28ad773..0cb688ac4f11 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/ServiceEntityRepository.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/ServiceEntityRepository.java @@ -49,9 +49,9 @@ protected ServiceEntityRepository( CollectionDAO dao, EntityDAO entityDAO, Class serviceConnectionClass, - String updateFields, + String updatedFields, ServiceType serviceType) { - super(collectionPath, service, entityDAO.getEntityClass(), entityDAO, dao, "", updateFields); + super(collectionPath, service, entityDAO.getEntityClass(), entityDAO, dao, "", updatedFields); this.serviceConnectionClass = serviceConnectionClass; this.serviceType = serviceType; } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/TableRepository.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/TableRepository.java index 14d26d2306e4..34a749076cfb 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/TableRepository.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/TableRepository.java @@ -145,12 +145,16 @@ public Table clearFields(Table table, Fields fields) { @Override public Table setInheritedFields(Table table, Fields fields) { - DatabaseSchema schema = Entity.getEntity(DATABASE_SCHEMA, table.getDatabaseSchema().getId(), "owner,domain", ALL); - inheritOwner(table, fields, schema); - inheritDomain(table, fields, schema); + setInheritedProperties(table, table.getDatabaseSchema().getId()); + return table; + } + + public void setInheritedProperties(Table table, UUID schemaId) { // If table does not have retention period, then inherit it from parent databaseSchema - return table.withRetentionPeriod( - table.getRetentionPeriod() == null ? schema.getRetentionPeriod() : table.getRetentionPeriod()); + if (table.getRetentionPeriod() == null) { + DatabaseSchema schema = Entity.getEntity(DATABASE_SCHEMA, schemaId, "", ALL); + table.withRetentionPeriod(schema.getRetentionPeriod()); + } } private void setDefaultFields(Table table) { @@ -353,12 +357,13 @@ private Column getColumnNameForProfiler(List columnList, ColumnProfile c public Table addTableProfileData(UUID tableId, CreateTableProfile createTableProfile) { // Validate the request content Table table = dao.findEntityById(tableId); - storeTimeSeries( - table.getFullyQualifiedName(), - TABLE_PROFILE_EXTENSION, - "tableProfile", - JsonUtils.pojoToJson(createTableProfile.getTableProfile()), - createTableProfile.getTableProfile().getTimestamp()); + daoCollection + .profilerDataTimeSeriesDao() + .insert( + table.getFullyQualifiedName(), + TABLE_PROFILE_EXTENSION, + "tableProfile", + JsonUtils.pojoToJson(createTableProfile.getTableProfile())); for (ColumnProfile columnProfile : createTableProfile.getColumnProfile()) { // Validate all the columns @@ -366,31 +371,38 @@ public Table addTableProfileData(UUID tableId, CreateTableProfile createTablePro if (column == null) { throw new IllegalArgumentException("Invalid column name " + columnProfile.getName()); } - storeTimeSeries( - column.getFullyQualifiedName(), - TABLE_COLUMN_PROFILE_EXTENSION, - "columnProfile", - JsonUtils.pojoToJson(columnProfile), - columnProfile.getTimestamp()); + daoCollection + .profilerDataTimeSeriesDao() + .insert( + column.getFullyQualifiedName(), + TABLE_COLUMN_PROFILE_EXTENSION, + "columnProfile", + JsonUtils.pojoToJson(columnProfile)); } List systemProfiles = createTableProfile.getSystemProfile(); if (systemProfiles != null && !systemProfiles.isEmpty()) { for (SystemProfile systemProfile : createTableProfile.getSystemProfile()) { + // system metrics timestamp is the one of the operation. We'll need to + // update the entry if it already exists in the database String storedSystemProfile = - getExtensionAtTimestampWithOperation( + daoCollection + .profilerDataTimeSeriesDao() + .getExtensionAtTimestampWithOperation( + table.getFullyQualifiedName(), + SYSTEM_PROFILE_EXTENSION, + systemProfile.getTimestamp(), + systemProfile.getOperation().value()); + daoCollection + .profilerDataTimeSeriesDao() + .storeTimeSeriesWithOperation( table.getFullyQualifiedName(), SYSTEM_PROFILE_EXTENSION, + "systemProfile", + JsonUtils.pojoToJson(systemProfile), systemProfile.getTimestamp(), - systemProfile.getOperation().value()); - storeTimeSeriesWithOperation( - table.getFullyQualifiedName(), - SYSTEM_PROFILE_EXTENSION, - "systemProfile", - JsonUtils.pojoToJson(systemProfile), - systemProfile.getTimestamp(), - systemProfile.getOperation().value(), - storedSystemProfile != null); + systemProfile.getOperation().value(), + storedSystemProfile != null); } } @@ -416,11 +428,13 @@ public void deleteTableProfile(String fqn, String entityType, Long timestamp) { throw new IllegalArgumentException("entityType must be table, column or system"); } - Object storedTableProfile = JsonUtils.readValue(getExtensionAtTimestamp(fqn, extension, timestamp), classMapper); + Object storedTableProfile = + JsonUtils.readValue( + daoCollection.profilerDataTimeSeriesDao().getExtensionAtTimestamp(fqn, extension, timestamp), classMapper); if (storedTableProfile == null) { throw new EntityNotFoundException(String.format("Failed to find table profile for %s at %s", fqn, timestamp)); } - deleteExtensionAtTimestamp(fqn, extension, timestamp); + daoCollection.profilerDataTimeSeriesDao().deleteAtTimestamp(fqn, extension, timestamp); } @Transaction @@ -428,7 +442,11 @@ public ResultList getTableProfiles(String fqn, Long startTs, Long List tableProfiles; tableProfiles = JsonUtils.readObjects( - getResultsFromAndToTimestamps(fqn, TABLE_PROFILE_EXTENSION, startTs, endTs), TableProfile.class); + daoCollection + .profilerDataTimeSeriesDao() + .listBetweenTimestampsByOrder( + fqn, TABLE_PROFILE_EXTENSION, startTs, endTs, EntityTimeSeriesDAO.OrderBy.DESC), + TableProfile.class); return new ResultList<>(tableProfiles, startTs.toString(), endTs.toString(), tableProfiles.size()); } @@ -437,7 +455,11 @@ public ResultList getColumnProfiles(String fqn, Long startTs, Lon List columnProfiles; columnProfiles = JsonUtils.readObjects( - getResultsFromAndToTimestamps(fqn, TABLE_COLUMN_PROFILE_EXTENSION, startTs, endTs), ColumnProfile.class); + daoCollection + .profilerDataTimeSeriesDao() + .listBetweenTimestampsByOrder( + fqn, TABLE_COLUMN_PROFILE_EXTENSION, startTs, endTs, EntityTimeSeriesDAO.OrderBy.DESC), + ColumnProfile.class); return new ResultList<>(columnProfiles, startTs.toString(), endTs.toString(), columnProfiles.size()); } @@ -446,7 +468,11 @@ public ResultList getSystemProfiles(String fqn, Long startTs, Lon List systemProfiles; systemProfiles = JsonUtils.readObjects( - getResultsFromAndToTimestamps(fqn, SYSTEM_PROFILE_EXTENSION, startTs, endTs), SystemProfile.class); + daoCollection + .profilerDataTimeSeriesDao() + .listBetweenTimestampsByOrder( + fqn, SYSTEM_PROFILE_EXTENSION, startTs, endTs, EntityTimeSeriesDAO.OrderBy.DESC), + SystemProfile.class); return new ResultList<>(systemProfiles, startTs.toString(), endTs.toString(), systemProfiles.size()); } @@ -454,7 +480,9 @@ private void setColumnProfile(List columnList) { for (Column column : columnList) { ColumnProfile columnProfile = JsonUtils.readValue( - getLatestExtensionFromTimeseries(column.getFullyQualifiedName(), TABLE_COLUMN_PROFILE_EXTENSION), + daoCollection + .profilerDataTimeSeriesDao() + .getLatestExtension(column.getFullyQualifiedName(), TABLE_COLUMN_PROFILE_EXTENSION), ColumnProfile.class); column.setProfile(columnProfile); if (column.getChildren() != null) { @@ -468,7 +496,9 @@ public Table getLatestTableProfile(String fqn, boolean authorizePII) { Table table = dao.findEntityByName(fqn, ALL); TableProfile tableProfile = JsonUtils.readValue( - getLatestExtensionFromTimeseries(table.getFullyQualifiedName(), TABLE_PROFILE_EXTENSION), + daoCollection + .profilerDataTimeSeriesDao() + .getLatestExtension(table.getFullyQualifiedName(), TABLE_PROFILE_EXTENSION), TableProfile.class); table.setProfile(tableProfile); setColumnProfile(table.getColumns()); @@ -602,13 +632,16 @@ private void addDerivedColumnTags(List columns) { @Override public void prepare(Table table) { - DatabaseSchema schema = Entity.getEntity(table.getDatabaseSchema(), "", ALL); + DatabaseSchema schema = Entity.getEntity(table.getDatabaseSchema(), "owner", ALL); table .withDatabaseSchema(schema.getEntityReference()) .withDatabase(schema.getDatabase()) .withService(schema.getService()) .withServiceType(schema.getServiceType()); + // Carry forward ownership from database schema + table.setOwner(table.getOwner() == null ? schema.getOwner() : table.getOwner()); + // Validate column tags addDerivedColumnTags(table.getColumns()); validateColumnTags(table.getColumns()); diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/TagRepository.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/TagRepository.java index f49cfa944341..13e8795037b6 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/TagRepository.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/TagRepository.java @@ -31,7 +31,6 @@ import org.openmetadata.service.Entity; import org.openmetadata.service.exception.CatalogExceptionMessage; import org.openmetadata.service.jdbi3.CollectionDAO.EntityRelationshipRecord; -import org.openmetadata.service.jdbi3.EntityRepository.EntityUpdater; import org.openmetadata.service.resources.tags.TagResource; import org.openmetadata.service.util.EntityUtil.Fields; import org.openmetadata.service.util.FullyQualifiedName; diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/TeamRepository.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/TeamRepository.java index 3482a57e5083..bae7ed4519a8 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/TeamRepository.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/TeamRepository.java @@ -23,9 +23,7 @@ import static org.openmetadata.schema.api.teams.CreateTeam.TeamType.DIVISION; import static org.openmetadata.schema.api.teams.CreateTeam.TeamType.GROUP; import static org.openmetadata.schema.api.teams.CreateTeam.TeamType.ORGANIZATION; -import static org.openmetadata.schema.type.Include.ALL; import static org.openmetadata.service.Entity.ADMIN_USER_NAME; -import static org.openmetadata.service.Entity.FIELD_DOMAIN; import static org.openmetadata.service.Entity.ORGANIZATION_NAME; import static org.openmetadata.service.Entity.POLICY; import static org.openmetadata.service.Entity.ROLE; @@ -136,15 +134,25 @@ public void storeEntity(Team team, boolean update) { List users = team.getUsers(); List defaultRoles = team.getDefaultRoles(); List parents = team.getParents(); + List children = team.getChildren(); List policies = team.getPolicies(); // Don't store users, defaultRoles, href as JSON. Build it on the fly based on relationships - team.withUsers(null).withDefaultRoles(null).withParents(null).withPolicies(null).withInheritedRoles(null); + team.withUsers(null) + .withDefaultRoles(null) + .withParents(null) + .withChildren(null) + .withPolicies(null) + .withInheritedRoles(null); store(team, update); // Restore the relationships - team.withUsers(users).withDefaultRoles(defaultRoles).withParents(parents).withPolicies(policies); + team.withUsers(users) + .withDefaultRoles(defaultRoles) + .withParents(parents) + .withChildren(children) + .withPolicies(policies); } @Override @@ -169,20 +177,6 @@ public void storeRelationships(Team team) { } } - @Override - public Team setInheritedFields(Team team, Fields fields) { - // If user does not have domain, then inherit it from parent Team - // TODO have default team when a user belongs to multiple teams - if (fields.contains(FIELD_DOMAIN) && team.getDomain() == null) { - List parents = !fields.contains(PARENTS_FIELD) ? getParents(team) : team.getParents(); - if (!nullOrEmpty(parents)) { - Team parent = Entity.getEntity(TEAM, parents.get(0).getId(), "domain", ALL); - team.withDomain(parent.getDomain()); - } - } - return team; - } - @Override public TeamUpdater getUpdater(Team original, Team updated, Operation operation) { return new TeamUpdater(original, updated, operation); diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/TestCaseRepository.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/TestCaseRepository.java index f52c4dec8e4c..bf1e72d7f206 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/TestCaseRepository.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/TestCaseRepository.java @@ -81,7 +81,7 @@ public RestUtil.PatchResponse patchTestCaseResults( TestCaseResult original = JsonUtils.readValue( daoCollection - .entityExtensionTimeSeriesDao() + .dataQualityDataTimeSeriesDao() .getExtensionAtTimestamp(fqn, TESTCASE_RESULT_EXTENSION, timestamp), TestCaseResult.class); @@ -91,7 +91,7 @@ public RestUtil.PatchResponse patchTestCaseResults( updated.getTestCaseFailureStatus().setUpdatedBy(user); updated.getTestCaseFailureStatus().setUpdatedAt(System.currentTimeMillis()); daoCollection - .entityExtensionTimeSeriesDao() + .dataQualityDataTimeSeriesDao() .update(fqn, TESTCASE_RESULT_EXTENSION, JsonUtils.pojoToJson(updated), timestamp); change = ENTITY_UPDATED; } @@ -122,27 +122,27 @@ public void prepare(TestCase test) { validateTestParameters(test.getParameterValues(), testDefinition.getParameterDefinition()); } - private EntityReference getTestSuite(TestCase test) { + private EntityReference getTestSuite(TestCase test) throws EntityNotFoundException { // `testSuite` field returns the executable `testSuite` linked to that testCase List records = findFromRecords(test.getId(), entityType, Relationship.CONTAINS, TEST_SUITE); - ensureSingleRelationship(entityType, test.getId(), records, Relationship.CONTAINS.value(), true); for (CollectionDAO.EntityRelationshipRecord testSuiteId : records) { TestSuite testSuite = Entity.getEntity(TEST_SUITE, testSuiteId.getId(), "", Include.ALL); if (Boolean.TRUE.equals(testSuite.getExecutable())) { return testSuite.getEntityReference(); } } - return null; + throw new EntityNotFoundException( + String.format("Error occurred when retrieving executable test suite for testCase %s. ", test.getName()) + + "No executable test suite was found."); } private List getTestSuites(TestCase test) { // `testSuites` field returns all the `testSuite` (executable and logical) linked to that testCase List records = findFromRecords(test.getId(), entityType, Relationship.CONTAINS, TEST_SUITE); - ensureSingleRelationship(entityType, test.getId(), records, Relationship.CONTAINS.value(), true); return records.stream() - .map(testSuiteId -> Entity.getEntity(TEST_SUITE, testSuiteId.getId(), "", Include.ALL)) + .map(testSuiteId -> Entity.getEntity(TEST_SUITE, testSuiteId.getId(), "", Include.ALL, false)) .collect(Collectors.toList()); } @@ -202,12 +202,13 @@ public RestUtil.PutResponse addTestCaseResult( // Validate the request content TestCase testCase = dao.findEntityByName(fqn); - storeTimeSeries( - testCase.getFullyQualifiedName(), - TESTCASE_RESULT_EXTENSION, - TEST_CASE_RESULT_FIELD, - JsonUtils.pojoToJson(testCaseResult), - testCaseResult.getTimestamp()); + daoCollection + .dataQualityDataTimeSeriesDao() + .insert( + testCase.getFullyQualifiedName(), + TESTCASE_RESULT_EXTENSION, + TEST_CASE_RESULT_FIELD, + JsonUtils.pojoToJson(testCaseResult)); setFieldsInternal(testCase, new EntityUtil.Fields(allowedFields, TEST_SUITE_FIELD)); setTestSuiteSummary(testCase, testCaseResult.getTimestamp(), testCaseResult.getTestCaseStatus()); @@ -223,10 +224,14 @@ public RestUtil.PutResponse deleteTestCaseResult(String updatedBy, String fqn // Validate the request content TestCase testCase = dao.findEntityByName(fqn); TestCaseResult storedTestCaseResult = - JsonUtils.readValue(getExtensionAtTimestamp(fqn, TESTCASE_RESULT_EXTENSION, timestamp), TestCaseResult.class); + JsonUtils.readValue( + daoCollection + .dataQualityDataTimeSeriesDao() + .getExtensionAtTimestamp(fqn, TESTCASE_RESULT_EXTENSION, timestamp), + TestCaseResult.class); if (storedTestCaseResult != null) { - deleteExtensionAtTimestamp(fqn, TESTCASE_RESULT_EXTENSION, timestamp); + daoCollection.dataQualityDataTimeSeriesDao().deleteAtTimestamp(fqn, TESTCASE_RESULT_EXTENSION, timestamp); testCase.setTestCaseResult(storedTestCaseResult); ChangeDescription change = deleteTestCaseChangeDescription(testCase.getVersion(), storedTestCaseResult); ChangeEvent changeEvent = getChangeEvent(updatedBy, testCase, change, entityType, testCase.getVersion()); @@ -236,27 +241,38 @@ public RestUtil.PutResponse deleteTestCaseResult(String updatedBy, String fqn String.format("Failed to find testCase result for %s at %s", testCase.getName(), timestamp)); } + private ResultSummary getResultSummary(TestCase testCase, Long timestamp, TestCaseStatus testCaseStatus) { + return new ResultSummary() + .withTestCaseName(testCase.getFullyQualifiedName()) + .withStatus(testCaseStatus) + .withTimestamp(timestamp); + } + private void setTestSuiteSummary(TestCase testCase, Long timestamp, TestCaseStatus testCaseStatus) { - ResultSummary resultSummary = - new ResultSummary() - .withTestCaseName(testCase.getFullyQualifiedName()) - .withStatus(testCaseStatus) - .withTimestamp(timestamp); - EntityReference ref = testCase.getTestSuite(); - TestSuite testSuite = Entity.getEntity(ref.getType(), ref.getId(), "", Include.ALL, false); - List resultSummaries = listOrEmpty(testSuite.getTestCaseResultSummary()); - if (resultSummaries.isEmpty()) { - resultSummaries.add(resultSummary); - } else { - // We'll remove the existing summary for this test case and add the new one - resultSummaries.removeIf(summary -> summary.getTestCaseName().equals(resultSummary.getTestCaseName())); - resultSummaries.add(resultSummary); - } + ResultSummary resultSummary = getResultSummary(testCase, timestamp, testCaseStatus); + + // list all executable and logical test suite linked to the test case + List testSuites = getTestSuites(testCase); + + // update the summary for each test suite + for (TestSuite testSuite : testSuites) { + testSuite.setSummary(null); // we don't want to store the summary in the database + List resultSummaries = listOrEmpty(testSuite.getTestCaseResultSummary()); + if (resultSummaries.isEmpty()) { + resultSummaries.add(resultSummary); + } else { + // We'll remove the existing summary for this test case and add the new one + resultSummaries.removeIf(summary -> summary.getTestCaseName().equals(resultSummary.getTestCaseName())); + resultSummaries.add(resultSummary); + } - testSuite.setTestCaseResultSummary(resultSummaries); - daoCollection - .testSuiteDAO() - .update(testSuite.getId(), testSuite.getFullyQualifiedName(), JsonUtils.pojoToJson(testSuite)); + // set test case result summary for the test suite + // and update it in the database + testSuite.setTestCaseResultSummary(resultSummaries); + daoCollection + .testSuiteDAO() + .update(testSuite.getId(), testSuite.getFullyQualifiedName(), JsonUtils.pojoToJson(testSuite)); + } } private ChangeDescription addTestCaseChangeDescription(Double version, Object newValue) { @@ -290,7 +306,9 @@ private ChangeEvent getChangeEvent( private TestCaseResult getTestCaseResult(TestCase testCase) { return JsonUtils.readValue( - getLatestExtensionFromTimeseries(testCase.getFullyQualifiedName(), TESTCASE_RESULT_EXTENSION), + daoCollection + .dataQualityDataTimeSeriesDao() + .getLatestExtension(testCase.getFullyQualifiedName(), TESTCASE_RESULT_EXTENSION), TestCaseResult.class); } @@ -298,7 +316,11 @@ public ResultList getTestCaseResults(String fqn, Long startTs, L List testCaseResults; testCaseResults = JsonUtils.readObjects( - getResultsFromAndToTimestamps(fqn, TESTCASE_RESULT_EXTENSION, startTs, endTs), TestCaseResult.class); + daoCollection + .dataQualityDataTimeSeriesDao() + .listBetweenTimestampsByOrder( + fqn, TESTCASE_RESULT_EXTENSION, startTs, endTs, EntityTimeSeriesDAO.OrderBy.DESC), + TestCaseResult.class); return new ResultList<>(testCaseResults, String.valueOf(startTs), String.valueOf(endTs), testCaseResults.size()); } @@ -319,8 +341,21 @@ public void isTestSuiteExecutable(String testSuiteFqn) { public RestUtil.PutResponse addTestCasesToLogicalTestSuite(TestSuite testSuite, List testCaseIds) { bulkAddToRelationship(testSuite.getId(), testCaseIds, TEST_SUITE, TEST_CASE, Relationship.CONTAINS); List testCasesEntityReferences = new ArrayList<>(); + List resultSummaries = listOrEmpty(testSuite.getTestCaseResultSummary()); for (UUID testCaseId : testCaseIds) { TestCase testCase = Entity.getEntity(Entity.TEST_CASE, testCaseId, "", Include.ALL); + // Get the latest result to set the testSuite summary field + String result = + daoCollection + .dataQualityDataTimeSeriesDao() + .getLatestExtension(testCase.getFullyQualifiedName(), TESTCASE_RESULT_EXTENSION); + if (result != null) { + TestCaseResult testCaseResult = JsonUtils.readValue(result, TestCaseResult.class); + ResultSummary resultSummary = + getResultSummary(testCase, testCaseResult.getTimestamp(), testCaseResult.getTestCaseStatus()); + resultSummaries.removeIf(summary -> summary.getTestCaseName().equals(resultSummary.getTestCaseName())); + resultSummaries.add(resultSummary); + } testCasesEntityReferences.add( new EntityReference() .withId(testCase.getId()) @@ -331,6 +366,14 @@ public RestUtil.PutResponse addTestCasesToLogicalTestSuite(TestSuite .withHref(testCase.getHref()) .withDeleted(testCase.getDeleted())); } + // set test case result summary for logical test suite + // and update it in the database + testSuite.setTestCaseResultSummary(resultSummaries); + testSuite.setSummary(null); // we don't want to store the summary in the database + daoCollection + .testSuiteDAO() + .update(testSuite.getId(), testSuite.getFullyQualifiedName(), JsonUtils.pojoToJson(testSuite)); + testSuite.setTests(testCasesEntityReferences); return new RestUtil.PutResponse<>(Response.Status.OK, testSuite, LOGICAL_TEST_CASES_ADDED); } @@ -338,16 +381,42 @@ public RestUtil.PutResponse addTestCasesToLogicalTestSuite(TestSuite public RestUtil.DeleteResponse deleteTestCaseFromLogicalTestSuite(UUID testSuiteId, UUID testCaseId) { TestCase testCase = Entity.getEntity(Entity.TEST_CASE, testCaseId, null, null); deleteRelationship(testSuiteId, TEST_SUITE, testCaseId, TEST_CASE, Relationship.CONTAINS); + // remove test case from logical test suite summary and update test suite + removeTestCaseFromTestSuiteResultSummary(testSuiteId, testCase.getFullyQualifiedName()); EntityReference entityReference = Entity.getEntityReferenceById(TEST_SUITE, testSuiteId, Include.ALL); testCase.setTestSuite(entityReference); return new RestUtil.DeleteResponse<>(testCase, RestUtil.ENTITY_DELETED); } + /** Remove test case from test suite summary and update test suite */ + private void removeTestCaseFromTestSuiteResultSummary(UUID testSuiteId, String testCaseFqn) { + TestSuite testSuite = Entity.getEntity(TEST_SUITE, testSuiteId, "*", Include.ALL, false); + testSuite.setSummary(null); // we don't want to store the summary in the database + List resultSummaries = testSuite.getTestCaseResultSummary(); + resultSummaries.removeIf(summary -> summary.getTestCaseName().equals(testCaseFqn)); + testSuite.setTestCaseResultSummary(resultSummaries); + daoCollection + .testSuiteDAO() + .update(testSuite.getId(), testSuite.getFullyQualifiedName(), JsonUtils.pojoToJson(testSuite)); + } + @Override public EntityUpdater getUpdater(TestCase original, TestCase updated, Operation operation) { return new TestUpdater(original, updated, operation); } + @Override + protected void preDelete(TestCase entity) { + // delete test case from test suite summary when test case is deleted + // from an executable test suite + List testSuites = getTestSuites(entity); + if (!testSuites.isEmpty()) { + for (TestSuite testSuite : testSuites) { + removeTestCaseFromTestSuiteResultSummary(testSuite.getId(), entity.getFullyQualifiedName()); + } + } + } + public class TestUpdater extends EntityUpdater { public TestUpdater(TestCase original, TestCase updated, Operation operation) { super(original, updated, operation); diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/TestSuiteRepository.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/TestSuiteRepository.java index fcc8f0222a24..aff85c0ee07f 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/TestSuiteRepository.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/TestSuiteRepository.java @@ -58,6 +58,7 @@ public TestSuite clearFields(TestSuite entity, EntityUtil.Fields fields) { } private TestSummary buildTestSummary(HashMap testCaseSummary, int total) { + return new TestSummary() .withAborted(testCaseSummary.getOrDefault(TestCaseStatus.Aborted.toString(), 0)) .withFailed(testCaseSummary.getOrDefault(TestCaseStatus.Failed.toString(), 0)) @@ -115,13 +116,10 @@ public TestSummary getTestSummary(UUID testSuiteId) { List testSuites = listAll(EntityUtil.Fields.EMPTY_FIELDS, filter); testSummary = getTestCasesExecutionSummary(testSuites); } else { - TestSuite testSuite = find(testSuiteId, Include.ALL); - if (!Boolean.TRUE.equals(testSuite.getExecutable())) { - throw new IllegalArgumentException("Test Suite is not executable. Please provide an executable test suite."); - } + // don't want to get it from the cache as test results summary may be stale + TestSuite testSuite = Entity.getEntity(TEST_SUITE, testSuiteId, "", Include.ALL, false); testSummary = getTestCasesExecutionSummary(testSuite); } - return testSummary; } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/TopicRepository.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/TopicRepository.java index 1bf3bda1d6bf..2a62e8260543 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/TopicRepository.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/TopicRepository.java @@ -19,7 +19,6 @@ import static org.openmetadata.service.Entity.FIELD_DESCRIPTION; import static org.openmetadata.service.Entity.FIELD_DISPLAY_NAME; import static org.openmetadata.service.Entity.FIELD_TAGS; -import static org.openmetadata.service.Entity.MESSAGING_SERVICE; import static org.openmetadata.service.util.EntityUtil.getSchemaField; import java.util.ArrayList; @@ -106,13 +105,6 @@ public void storeRelationships(Topic topic) { setService(topic, topic.getService()); } - @Override - public Topic setInheritedFields(Topic topic, Fields fields) { - // If topic does not have domain, then inherit it from parent messaging service - MessagingService service = Entity.getEntity(MESSAGING_SERVICE, topic.getService().getId(), "domain", ALL); - return inheritDomain(topic, fields, service); - } - @Override public Topic setFields(Topic topic, Fields fields) { topic.setService(getContainer(topic.getId())); diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/UserRepository.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/UserRepository.java index 43a2799e8f9b..36c8648635cb 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/UserRepository.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/UserRepository.java @@ -18,7 +18,7 @@ import static org.openmetadata.csv.CsvUtil.addEntityReferences; import static org.openmetadata.csv.CsvUtil.addField; import static org.openmetadata.schema.type.Include.ALL; -import static org.openmetadata.service.Entity.FIELD_DOMAIN; +import static org.openmetadata.schema.utils.EntityInterfaceUtil.quoteName; import static org.openmetadata.service.Entity.ROLE; import static org.openmetadata.service.Entity.TEAM; import static org.openmetadata.service.Entity.USER; @@ -78,6 +78,13 @@ public UserRepository(CollectionDAO dao) { this.quoteFqn = true; } + // with the introduction of fqnhash we added case sensitivity to all of the entities + // however usernames , emails cannot be case sensitive + @Override + public void setFullyQualifiedName(User user) { + user.setFullyQualifiedName(quoteName(user.getName().toLowerCase())); + } + public final Fields getFieldsWithUserAuth(String fields) { Set tempFields = getAllowedFieldsCopy(); if (fields != null && fields.equals("*")) { @@ -151,20 +158,6 @@ public void storeRelationships(User user) { user.setInheritedRoles(getInheritedRoles(user)); } - @Override - public User setInheritedFields(User user, Fields fields) { - // If user does not have domain, then inherit it from parent Team - // TODO have default team when a user belongs to multiple teams - if (fields.contains(FIELD_DOMAIN) && user.getDomain() == null) { - List teams = !fields.contains("teams") ? getTeams(user) : user.getTeams(); - if (!nullOrEmpty(teams)) { - Team team = Entity.getEntity(TEAM, teams.get(0).getId(), "domain", ALL); - user.withDomain(team.getDomain()); - } - } - return user; - } - @Override public UserUpdater getUpdater(User original, User updated, Operation operation) { return new UserUpdater(original, updated, operation); diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/migration/mysql/v112/Migration.java b/openmetadata-service/src/main/java/org/openmetadata/service/migration/mysql/v112/Migration.java index 4b1a6450f4ea..94e5d7302fc8 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/migration/mysql/v112/Migration.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/migration/mysql/v112/Migration.java @@ -1,6 +1,7 @@ package org.openmetadata.service.migration.mysql.v112; -import static org.openmetadata.service.migration.postgres.v112.Migration.unquoteTestSuiteMigration; +import static org.openmetadata.service.migration.utils.V112.MigrationUtil.fixExecutableTestSuiteFQN; +import static org.openmetadata.service.migration.utils.V112.MigrationUtil.lowerCaseUserNameAndEmail; import lombok.SneakyThrows; import org.jdbi.v3.core.Handle; @@ -10,7 +11,6 @@ public class Migration extends MigrationProcessImpl { private CollectionDAO collectionDAO; - private Handle handle; public Migration(MigrationFile migrationFile) { super(migrationFile); @@ -19,7 +19,6 @@ public Migration(MigrationFile migrationFile) { @Override public void initialize(Handle handle) { super.initialize(handle); - this.handle = handle; this.collectionDAO = handle.attach(CollectionDAO.class); } @@ -27,6 +26,8 @@ public void initialize(Handle handle) { @SneakyThrows public void runDataMigration() { // Run Data Migration to Remove the quoted Fqn` - unquoteTestSuiteMigration(collectionDAO); + fixExecutableTestSuiteFQN(collectionDAO); + // Run UserName Migration to make lowercase + lowerCaseUserNameAndEmail(collectionDAO); } } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/migration/mysql/v114/Migration.java b/openmetadata-service/src/main/java/org/openmetadata/service/migration/mysql/v114/Migration.java new file mode 100644 index 000000000000..eab684f2b844 --- /dev/null +++ b/openmetadata-service/src/main/java/org/openmetadata/service/migration/mysql/v114/Migration.java @@ -0,0 +1,31 @@ +package org.openmetadata.service.migration.mysql.v114; + +import static org.openmetadata.service.migration.utils.V112.MigrationUtil.lowerCaseUserNameAndEmail; +import static org.openmetadata.service.migration.utils.V114.MigrationUtil.fixTestSuites; + +import lombok.SneakyThrows; +import org.jdbi.v3.core.Handle; +import org.openmetadata.service.jdbi3.CollectionDAO; +import org.openmetadata.service.migration.api.MigrationProcessImpl; +import org.openmetadata.service.migration.utils.MigrationFile; + +public class Migration extends MigrationProcessImpl { + private CollectionDAO collectionDAO; + + public Migration(MigrationFile migrationFile) { + super(migrationFile); + } + + @Override + public void initialize(Handle handle) { + super.initialize(handle); + this.collectionDAO = handle.attach(CollectionDAO.class); + } + + @Override + @SneakyThrows + public void runDataMigration() { + fixTestSuites(collectionDAO); + lowerCaseUserNameAndEmail(collectionDAO); + } +} diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/migration/postgres/v112/Migration.java b/openmetadata-service/src/main/java/org/openmetadata/service/migration/postgres/v112/Migration.java index c30c89930daa..478ff8b6d307 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/migration/postgres/v112/Migration.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/migration/postgres/v112/Migration.java @@ -1,18 +1,14 @@ package org.openmetadata.service.migration.postgres.v112; -import java.util.List; -import java.util.Set; +import static org.openmetadata.service.migration.utils.V112.MigrationUtil.fixExecutableTestSuiteFQN; +import static org.openmetadata.service.migration.utils.V112.MigrationUtil.lowerCaseUserNameAndEmail; + import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import org.jdbi.v3.core.Handle; -import org.openmetadata.schema.tests.TestSuite; -import org.openmetadata.schema.type.Include; import org.openmetadata.service.jdbi3.CollectionDAO; -import org.openmetadata.service.jdbi3.ListFilter; -import org.openmetadata.service.jdbi3.TestSuiteRepository; import org.openmetadata.service.migration.api.MigrationProcessImpl; import org.openmetadata.service.migration.utils.MigrationFile; -import org.openmetadata.service.util.EntityUtil; @Slf4j public class Migration extends MigrationProcessImpl { @@ -34,25 +30,8 @@ public void initialize(Handle handle) { @SneakyThrows public void runDataMigration() { // Run Data Migration to Remove the quoted Fqn` - unquoteTestSuiteMigration(collectionDAO); - } - - public static void unquoteTestSuiteMigration(CollectionDAO collectionDAO) { - TestSuiteRepository testSuiteRepository = new TestSuiteRepository(collectionDAO); - List testSuites = - testSuiteRepository.listAll(new EntityUtil.Fields(Set.of("id")), new ListFilter(Include.ALL)); - for (TestSuite suite : testSuites) { - if (Boolean.TRUE.equals(suite.getExecutable())) { - String fqn = suite.getFullyQualifiedName(); - String updatedFqn = fqn; - if (fqn.startsWith("\"") && fqn.endsWith("\"")) { - updatedFqn = fqn.substring(1, fqn.length() - 1); - } - // update the name and fqn - suite.setName(updatedFqn); - suite.setFullyQualifiedName(updatedFqn); - collectionDAO.testSuiteDAO().update(suite); - } - } + fixExecutableTestSuiteFQN(collectionDAO); + // Run UserName Migration to make lowercase + lowerCaseUserNameAndEmail(collectionDAO); } } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/migration/postgres/v114/Migration.java b/openmetadata-service/src/main/java/org/openmetadata/service/migration/postgres/v114/Migration.java new file mode 100644 index 000000000000..3f0cd4b69466 --- /dev/null +++ b/openmetadata-service/src/main/java/org/openmetadata/service/migration/postgres/v114/Migration.java @@ -0,0 +1,35 @@ +package org.openmetadata.service.migration.postgres.v114; + +import static org.openmetadata.service.migration.utils.V112.MigrationUtil.lowerCaseUserNameAndEmail; +import static org.openmetadata.service.migration.utils.V114.MigrationUtil.fixTestSuites; + +import lombok.SneakyThrows; +import lombok.extern.slf4j.Slf4j; +import org.jdbi.v3.core.Handle; +import org.openmetadata.service.jdbi3.CollectionDAO; +import org.openmetadata.service.migration.api.MigrationProcessImpl; +import org.openmetadata.service.migration.utils.MigrationFile; + +@Slf4j +public class Migration extends MigrationProcessImpl { + private CollectionDAO collectionDAO; + private Handle handle; + + public Migration(MigrationFile migrationFile) { + super(migrationFile); + } + + @Override + public void initialize(Handle handle) { + super.initialize(handle); + this.handle = handle; + this.collectionDAO = handle.attach(CollectionDAO.class); + } + + @Override + @SneakyThrows + public void runDataMigration() { + fixTestSuites(collectionDAO); + lowerCaseUserNameAndEmail(collectionDAO); + } +} diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/migration/utils/V112/MigrationUtil.java b/openmetadata-service/src/main/java/org/openmetadata/service/migration/utils/V112/MigrationUtil.java new file mode 100644 index 000000000000..03901456efcc --- /dev/null +++ b/openmetadata-service/src/main/java/org/openmetadata/service/migration/utils/V112/MigrationUtil.java @@ -0,0 +1,52 @@ +package org.openmetadata.service.migration.utils.V112; + +import java.util.List; +import java.util.Set; +import lombok.extern.slf4j.Slf4j; +import org.openmetadata.schema.entity.teams.User; +import org.openmetadata.schema.tests.TestSuite; +import org.openmetadata.schema.type.Include; +import org.openmetadata.schema.utils.EntityInterfaceUtil; +import org.openmetadata.service.jdbi3.CollectionDAO; +import org.openmetadata.service.jdbi3.ListFilter; +import org.openmetadata.service.jdbi3.TestSuiteRepository; +import org.openmetadata.service.util.EntityUtil; +import org.openmetadata.service.util.JsonUtils; + +@Slf4j +public class MigrationUtil { + private MigrationUtil() {} + + public static void fixExecutableTestSuiteFQN(CollectionDAO collectionDAO) { + TestSuiteRepository testSuiteRepository = new TestSuiteRepository(collectionDAO); + List testSuites = + testSuiteRepository.listAll(new EntityUtil.Fields(Set.of("id")), new ListFilter(Include.ALL)); + for (TestSuite suite : testSuites) { + if (Boolean.TRUE.equals(suite.getExecutable()) && suite.getExecutableEntityReference() != null) { + String tableFQN = suite.getExecutableEntityReference().getFullyQualifiedName(); + String suiteFQN = tableFQN + ".testSuite"; + suite.setName(suiteFQN); + suite.setFullyQualifiedName(suiteFQN); + collectionDAO.testSuiteDAO().update(suite); + } + } + } + + public static void lowerCaseUserNameAndEmail(CollectionDAO daoCollection) { + LOG.debug("Starting Migration UserName and Email to Lowercase"); + int total = daoCollection.userDAO().listTotalCount(); + int offset = 0; + int limit = 200; + while (offset < total) { + List userEntities = daoCollection.userDAO().listAfterWithOffset(limit, offset); + for (String json : userEntities) { + User userEntity = JsonUtils.readValue(json, User.class); + userEntity.setFullyQualifiedName( + EntityInterfaceUtil.quoteName(userEntity.getFullyQualifiedName().toLowerCase())); + daoCollection.userDAO().update(userEntity); + } + offset = offset + limit; + } + LOG.debug("Completed Migrating UserName and Email to Lowercase"); + } +} diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/migration/utils/V114/MigrationUtil.java b/openmetadata-service/src/main/java/org/openmetadata/service/migration/utils/V114/MigrationUtil.java new file mode 100644 index 000000000000..8292f4a317fa --- /dev/null +++ b/openmetadata-service/src/main/java/org/openmetadata/service/migration/utils/V114/MigrationUtil.java @@ -0,0 +1,110 @@ +package org.openmetadata.service.migration.utils.V114; + +import static org.openmetadata.service.Entity.*; +import static org.openmetadata.service.migration.utils.v110.MigrationUtil.groupTestCasesByTable; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Set; +import org.openmetadata.schema.tests.TestCase; +import org.openmetadata.schema.tests.TestSuite; +import org.openmetadata.schema.type.Include; +import org.openmetadata.schema.type.Relationship; +import org.openmetadata.service.exception.EntityNotFoundException; +import org.openmetadata.service.jdbi3.CollectionDAO; +import org.openmetadata.service.jdbi3.ListFilter; +import org.openmetadata.service.jdbi3.TestSuiteRepository; +import org.openmetadata.service.util.EntityUtil; + +public class MigrationUtil { + private MigrationUtil() { + /* Cannot create object util class*/ + } + + /** + * Step 1: re-run the fix for FQN to catch any issues from previous release where we were quoting the FQN Step 2: + * Group all the testCases with the table. We will create a Map with Table FQN as the key and all the test cases + * belonging to that Table Step 3: Iterate through the Map keySet, which is table names. For each table name we create + * a executable test suite FQN Step 4: Fetch executable testSuite using step 3 FQN Step 5: Iterate through the test + * case list associated with the current table FQN in the loop Step 6: for each test case fetch TestSuite + * relationships Step 7: Iterate through the testSuite relation to check if the executableTestSuite FQN matches. If it + * matches there exists a relation from testCase to a executable Test suite Step 8: If we can't find a match, create a + * relationship. + * + * @param collectionDAO + */ + public static void fixTestSuites(CollectionDAO collectionDAO) { + // Fix any FQN issues for executable TestSuite + TestSuiteRepository testSuiteRepository = new TestSuiteRepository(collectionDAO); + List testSuites = + testSuiteRepository.listAll(new EntityUtil.Fields(Set.of("id")), new ListFilter(Include.ALL)); + for (TestSuite suite : testSuites) { + if (suite.getExecutableEntityReference() != null + && (!suite.getExecutable() || !suite.getFullyQualifiedName().contains("testSuite"))) { + String tableFQN = suite.getExecutableEntityReference().getFullyQualifiedName(); + String suiteFQN = tableFQN + ".testSuite"; + suite.setName(suiteFQN); + suite.setFullyQualifiedName(suiteFQN); + suite.setExecutable(true); + collectionDAO.testSuiteDAO().update(suite); + } + } + // Let's iterate through the test cases and make sure there exists a relationship between testcases and its native + // TestSuite + Map> testCasesGroupByTable = groupTestCasesByTable(collectionDAO); + for (String tableFQN : testCasesGroupByTable.keySet()) { + List testCases = testCasesGroupByTable.get(tableFQN); + String executableTestSuiteFQN = tableFQN + ".testSuite"; + TestSuite executableTestSuite = + testSuiteRepository.getDao().findEntityByName(executableTestSuiteFQN, "fqnHash", Include.ALL); + for (TestCase testCase : testCases) { + // we are setting mustHaveRelationship to "false" to not throw any error. + List existingRelations = + testSuiteRepository.findFromRecords(testCase.getId(), TEST_CASE, Relationship.CONTAINS, TEST_SUITE); + boolean relationWithExecutableTestSuiteExists = false; + if (existingRelations != null) { + for (CollectionDAO.EntityRelationshipRecord existingTestSuiteRel : existingRelations) { + try { + TestSuite existingTestSuite = testSuiteRepository.getDao().findEntityById(existingTestSuiteRel.getId()); + if (existingTestSuite.getExecutable() + && existingTestSuite.getFullyQualifiedName().equals(executableTestSuiteFQN)) { + // There is a native test suite associated with this testCase. + relationWithExecutableTestSuiteExists = true; + } + } catch (EntityNotFoundException ex) { + // if testsuite cannot be retrieved but the relation exists, then this is orphaned relation, we will + // delete the relation + testSuiteRepository.deleteRelationship( + existingTestSuiteRel.getId(), TEST_SUITE, testCase.getId(), TEST_CASE, Relationship.CONTAINS); + } + } + } + // if we can't find any executable testSuite relationship add one + if (!relationWithExecutableTestSuiteExists) { + testSuiteRepository.addRelationship( + executableTestSuite.getId(), testCase.getId(), TEST_SUITE, TEST_CASE, Relationship.CONTAINS); + } + } + + // check from table -> nativeTestSuite there should only one relation + List testSuiteRels = + testSuiteRepository.findToRecords( + executableTestSuite.getExecutableEntityReference().getId(), TABLE, Relationship.CONTAINS, TEST_SUITE); + for (CollectionDAO.EntityRelationshipRecord testSuiteRel : testSuiteRels) { + try { + TestSuite existingTestSuite = testSuiteRepository.getDao().findEntityById(testSuiteRel.getId()); + } catch (EntityNotFoundException ex) { + // if testsuite cannot be retrieved but the relation exists, then this is orphaned relation, we will + // delete the relation + testSuiteRepository.deleteRelationship( + executableTestSuite.getExecutableEntityReference().getId(), + TABLE, + testSuiteRel.getId(), + TEST_SUITE, + Relationship.CONTAINS); + } + } + } + } +} diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/migration/utils/v110/MigrationUtil.java b/openmetadata-service/src/main/java/org/openmetadata/service/migration/utils/v110/MigrationUtil.java index 46a1485a2e93..1bcc4130f021 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/migration/utils/v110/MigrationUtil.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/migration/utils/v110/MigrationUtil.java @@ -4,11 +4,9 @@ import static org.openmetadata.service.Entity.INGESTION_PIPELINE; import static org.openmetadata.service.Entity.TEST_CASE; import static org.openmetadata.service.Entity.TEST_SUITE; +import static org.openmetadata.service.util.EntityUtil.hash; -import java.util.ArrayList; -import java.util.List; -import java.util.Set; -import java.util.UUID; +import java.util.*; import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.tuple.Pair; @@ -61,7 +59,6 @@ import org.openmetadata.schema.type.Relationship; import org.openmetadata.schema.utils.EntityInterfaceUtil; import org.openmetadata.service.Entity; -import org.openmetadata.service.exception.EntityNotFoundException; import org.openmetadata.service.jdbi3.CollectionDAO; import org.openmetadata.service.jdbi3.EntityDAO; import org.openmetadata.service.jdbi3.IngestionPipelineRepository; @@ -72,7 +69,6 @@ import org.openmetadata.service.jdbi3.TestSuiteRepository; import org.openmetadata.service.resources.databases.DatasourceConfig; import org.openmetadata.service.resources.feeds.MessageParser; -import org.openmetadata.service.util.EntityUtil; import org.openmetadata.service.util.EntityUtil.Fields; import org.openmetadata.service.util.FullyQualifiedName; import org.openmetadata.service.util.JsonUtils; @@ -157,55 +153,60 @@ public static void readAndProcessEntity( } while (true) { // Read from Database - List jsons = dao.migrationListAfterWithOffset(limitParam, nameHashColumn); - LOG.debug("[{}]Read a Batch of Size: {}", dao.getTableName(), jsons.size()); - if (jsons.isEmpty()) { - break; - } - // Process Update - for (String json : jsons) { - // Update the Statements to Database - T entity = JsonUtils.readValue(json, clazz); - try { - String hash; - if (entity.getFullyQualifiedName() != null) { - hash = - withName - ? FullyQualifiedName.buildHash(EntityInterfaceUtil.quoteName(entity.getFullyQualifiedName())) - : FullyQualifiedName.buildHash(entity.getFullyQualifiedName()); - } else { - LOG.info( - "Failed in creating FQN Hash for Entity Name : {}, since the FQN is null. Auto Correcting.", - entity.getName()); - hash = - withName - ? FullyQualifiedName.buildHash(EntityInterfaceUtil.quoteName(entity.getName())) - : FullyQualifiedName.buildHash(entity.getName()); - entity.setFullyQualifiedName(entity.getName()); - dao.update(entity.getId(), entity.getName(), JsonUtils.pojoToJson(entity)); - } - int result = - handle - .createUpdate(updateSql) - .bind("nameHashColumnValue", hash) - .bind("id", entity.getId().toString()) - .execute(); - if (result <= 0) { - LOG.error("No Rows Affected for Updating Hash with Entity Name : {}", entity.getFullyQualifiedName()); + try { + List jsons = dao.migrationListAfterWithOffset(limitParam, nameHashColumn); + LOG.debug("[{}]Read a Batch of Size: {}", dao.getTableName(), jsons.size()); + if (jsons.isEmpty()) { + break; + } + // Process Update + for (String json : jsons) { + // Update the Statements to Database + T entity = JsonUtils.readValue(json, clazz); + try { + String hash; + if (entity.getFullyQualifiedName() != null) { + hash = + withName + ? FullyQualifiedName.buildHash(EntityInterfaceUtil.quoteName(entity.getFullyQualifiedName())) + : FullyQualifiedName.buildHash(entity.getFullyQualifiedName()); + } else { + LOG.info( + "Failed in creating FQN Hash for Entity Name : {}, since the FQN is null. Auto Correcting.", + entity.getName()); + hash = + withName + ? FullyQualifiedName.buildHash(EntityInterfaceUtil.quoteName(entity.getName())) + : FullyQualifiedName.buildHash(entity.getName()); + entity.setFullyQualifiedName(entity.getName()); + dao.update(entity.getId(), entity.getName(), JsonUtils.pojoToJson(entity)); + } + int result = + handle + .createUpdate(updateSql) + .bind("nameHashColumnValue", hash) + .bind("id", entity.getId().toString()) + .execute(); + if (result <= 0) { + LOG.error("No Rows Affected for Updating Hash with Entity Name : {}", entity.getFullyQualifiedName()); + } + } catch (Exception ex) { + LOG.error("Failed in creating FQN Hash for Entity Name : {}", entity.getFullyQualifiedName(), ex); } - } catch (Exception ex) { - LOG.error("Failed in creating FQN Hash for Entity Name : {}", entity.getFullyQualifiedName(), ex); } + } catch (Exception ex) { + LOG.warn("Failed to list the entities, they might already migrated ", ex); + break; } + LOG.debug("End Migration for table : {}", dao.getTableName()); } - LOG.debug("End Migration for table : {}", dao.getTableName()); } public static MigrationDAO.ServerMigrationSQLTable buildServerMigrationTable(String version, String statement) { MigrationDAO.ServerMigrationSQLTable result = new MigrationDAO.ServerMigrationSQLTable(); result.setVersion(String.valueOf(version)); result.setSqlStatement(statement); - result.setCheckSum(EntityUtil.hash(statement)); + result.setCheckSum(hash(statement)); return result; } @@ -406,11 +407,13 @@ public static void performSqlExecutionAndUpdate( if (!nullOrEmpty(queryList)) { for (String sql : queryList) { try { - handle.execute(sql); - migrationDAO.upsertServerMigrationSQL(version, sql, EntityUtil.hash(sql)); + String previouslyRanSql = migrationDAO.getSqlQuery(hash(sql), version); + if ((previouslyRanSql == null || previouslyRanSql.isEmpty())) { + handle.execute(sql); + migrationDAO.upsertServerMigrationSQL(version, sql, hash(sql)); + } } catch (Exception e) { LOG.error(String.format("Failed to run sql %s due to %s", sql, e)); - throw e; } } } @@ -450,118 +453,101 @@ public static TestSuite copy(TestSuite entity, CreateEntity request, String upda return entity; } + /** + * Test Suites Migration in 1.0.x -> 1.1.4 1. This is the first time users are migrating from User created TestSuite + * to System created native TestSuite Per Table 2. Our Goal with this migration is to list all the test cases and + * create .testSuite with executable set to true and associate all of the respective test cases with new native test + * suite. + * + * @param collectionDAO + */ @SneakyThrows public static void testSuitesMigration(CollectionDAO collectionDAO) { - IngestionPipelineRepository ingestionPipelineRepository = new IngestionPipelineRepository(collectionDAO); - TestSuiteRepository testSuiteRepository = new TestSuiteRepository(collectionDAO); - TestCaseRepository testCaseRepository = new TestCaseRepository(collectionDAO); - List testCases = testCaseRepository.listAll(new Fields(Set.of("id")), new ListFilter(Include.ALL)); - - for (TestCase test : testCases) { + // Update existing test suites as logical test suites and delete any ingestion pipeline associated with the existing + // test suite + migrateExistingTestSuitesToLogical(collectionDAO); - // Create New Executable Test Suites - MessageParser.EntityLink entityLink = MessageParser.EntityLink.parse(test.getEntityLink()); - // Create new Logical Test Suite - String testSuiteFqn = entityLink.getEntityFQN() + ".testSuite"; - TestSuite stored; - try { - // If entity is found by Hash it is already migrated - testSuiteRepository - .getDao() - .findEntityByName(EntityInterfaceUtil.quoteName(testSuiteFqn), "nameHash", Include.ALL); - } catch (EntityNotFoundException entityNotFoundException) { + // create native test suites + TestSuiteRepository testSuiteRepository = new TestSuiteRepository(collectionDAO); + Map> testCasesByTable = groupTestCasesByTable(collectionDAO); + for (String tableFQN : testCasesByTable.keySet()) { + String nativeTestSuiteFqn = tableFQN + ".testSuite"; + List testCases = testCasesByTable.get(tableFQN); + if (testCases != null && !testCases.isEmpty()) { + MessageParser.EntityLink entityLink = + MessageParser.EntityLink.parse(testCases.stream().findFirst().get().getEntityLink()); + TestSuite newExecutableTestSuite = + getTestSuite( + collectionDAO, + new CreateTestSuite() + .withName(FullyQualifiedName.buildHash(nativeTestSuiteFqn)) + .withDisplayName(nativeTestSuiteFqn) + .withExecutableEntityReference(entityLink.getEntityFQN()), + "ingestion-bot") + .withExecutable(true) + .withFullyQualifiedName(nativeTestSuiteFqn); + testSuiteRepository.prepareInternal(newExecutableTestSuite); try { - // Check if the test Suite Exists, this brings the data on nameHash basis - stored = - testSuiteRepository - .getDao() - .findEntityByName(EntityInterfaceUtil.quoteName(testSuiteFqn), "nameHash", Include.ALL); + testSuiteRepository + .getDao() + .insert("nameHash", newExecutableTestSuite, newExecutableTestSuite.getFullyQualifiedName()); + } catch (Exception ex) { + LOG.warn("TestSuite %s exists".format(nativeTestSuiteFqn)); + } + // add relationship between executable TestSuite with Table + testSuiteRepository.addRelationship( + newExecutableTestSuite.getExecutableEntityReference().getId(), + newExecutableTestSuite.getId(), + Entity.TABLE, + TEST_SUITE, + Relationship.CONTAINS); + + // add relationship between all the testCases that are created against a table with native test suite. + for (TestCase testCase : testCases) { testSuiteRepository.addRelationship( - stored.getId(), test.getId(), TEST_SUITE, TEST_CASE, Relationship.CONTAINS); - stored.setExecutable(true); - stored.setName(FullyQualifiedName.buildHash(testSuiteFqn)); - // the update() method here internally calls FullyQualifiedName.buildHash so not adding it - stored.setFullyQualifiedName(EntityInterfaceUtil.quoteName(FullyQualifiedName.buildHash(testSuiteFqn))); - stored.setDisplayName(testSuiteFqn); - testSuiteRepository.getDao().update(stored); - } catch (EntityNotFoundException ex) { - try { - TestSuite newExecutableTestSuite = - getTestSuite( - collectionDAO, - new CreateTestSuite() - .withName(FullyQualifiedName.buildHash(testSuiteFqn)) - .withDisplayName(testSuiteFqn) - .withExecutableEntityReference(entityLink.getEntityFQN()), - "ingestion-bot") - .withExecutable(false); - // Create - testSuiteRepository.prepareInternal(newExecutableTestSuite); - testSuiteRepository - .getDao() - .insert("nameHash", newExecutableTestSuite, newExecutableTestSuite.getFullyQualifiedName()); - // Here we aer manually adding executable relationship since the table Repository is not registered and - // result - // into null for entity type table - testSuiteRepository.addRelationship( - newExecutableTestSuite.getExecutableEntityReference().getId(), - newExecutableTestSuite.getId(), - Entity.TABLE, - TEST_SUITE, - Relationship.CONTAINS); - - // add relationship from testSuite to TestCases - testSuiteRepository.addRelationship( - newExecutableTestSuite.getId(), test.getId(), TEST_SUITE, TEST_CASE, Relationship.CONTAINS); - - // Not a good approach but executable cannot be set true before - TestSuite temp = - testSuiteRepository - .getDao() - .findEntityByName( - EntityInterfaceUtil.quoteName(FullyQualifiedName.buildHash(testSuiteFqn)), - "nameHash", - Include.ALL); - temp.setExecutable(true); - testSuiteRepository.getDao().update("nameHash", temp); - } catch (Exception exIgnore) { - LOG.warn("Ignoring error since already added: {}", ex.getMessage()); - } + newExecutableTestSuite.getId(), testCase.getId(), TEST_SUITE, TEST_CASE, Relationship.CONTAINS); } } } + } - // Update Test Suites + private static void migrateExistingTestSuitesToLogical(CollectionDAO collectionDAO) { + IngestionPipelineRepository ingestionPipelineRepository = new IngestionPipelineRepository(collectionDAO); + TestSuiteRepository testSuiteRepository = new TestSuiteRepository(collectionDAO); ListFilter filter = new ListFilter(Include.ALL); - filter.addQueryParam("testSuiteType", "logical"); List testSuites = testSuiteRepository.listAll(new Fields(Set.of("id")), filter); - - for (TestSuite testSuiteRecord : testSuites) { - TestSuite temp = testSuiteRepository.getDao().findEntityById(testSuiteRecord.getId(), Include.ALL); - if (Boolean.FALSE.equals(temp.getExecutable())) { - temp.setExecutable(false); - testSuiteRepository.getDao().update(temp); + for (TestSuite testSuite : testSuites) { + testSuite.setExecutable(false); + List ingestionPipelineRecords = + collectionDAO + .relationshipDAO() + .findTo(testSuite.getId().toString(), TEST_SUITE, Relationship.CONTAINS.ordinal(), INGESTION_PIPELINE); + for (CollectionDAO.EntityRelationshipRecord ingestionRecord : ingestionPipelineRecords) { + // remove relationship + collectionDAO.relationshipDAO().deleteAll(ingestionRecord.getId().toString(), INGESTION_PIPELINE); + // Cannot use Delete directly it uses other repos internally + ingestionPipelineRepository.getDao().delete(ingestionRecord.getId().toString()); } + } + } - // get Ingestion Pipelines - try { - List ingestionPipelineRecords = - collectionDAO - .relationshipDAO() - .findTo( - testSuiteRecord.getId().toString(), - TEST_SUITE, - Relationship.CONTAINS.ordinal(), - INGESTION_PIPELINE); - for (CollectionDAO.EntityRelationshipRecord ingestionRecord : ingestionPipelineRecords) { - // remove relationship - collectionDAO.relationshipDAO().deleteAll(ingestionRecord.getId().toString(), INGESTION_PIPELINE); - // Cannot use Delete directly it uses other repos internally - ingestionPipelineRepository.getDao().delete(ingestionRecord.getId().toString()); - } - } catch (EntityNotFoundException ex) { - // Already Removed + public static Map> groupTestCasesByTable(CollectionDAO collectionDAO) { + Map> testCasesByTable = new HashMap<>(); + TestCaseRepository testCaseRepository = new TestCaseRepository(collectionDAO); + List testCases = testCaseRepository.listAll(new Fields(Set.of("id")), new ListFilter(Include.ALL)); + for (TestCase testCase : testCases) { + // Create New Executable Test Suites + MessageParser.EntityLink entityLink = MessageParser.EntityLink.parse(testCase.getEntityLink()); + // Create new Logical Test Suite + ArrayList testCasesGroup = new ArrayList<>(); + if (testCasesByTable.containsKey(entityLink.getEntityFQN())) { + testCasesGroup = testCasesByTable.get(entityLink.getEntityFQN()); + testCasesGroup.add(testCase); + } else { + testCasesGroup.add(testCase); } + testCasesByTable.put(entityLink.getEntityFQN(), testCasesGroup); } + return testCasesByTable; } } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/resources/CollectionRegistry.java b/openmetadata-service/src/main/java/org/openmetadata/service/resources/CollectionRegistry.java index e51c9c0d19bd..f69d2df833cb 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/resources/CollectionRegistry.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/resources/CollectionRegistry.java @@ -238,7 +238,7 @@ private static Object createResource( AuthenticatorHandler authHandler) throws ClassNotFoundException, NoSuchMethodException, IllegalAccessException, InvocationTargetException, InstantiationException { - Object resource = null; + Object resource; Class clz = Class.forName(resourceClass); // Create the resource identified by resourceClass @@ -252,8 +252,6 @@ private static Object createResource( } catch (NoSuchMethodException ex) { resource = Class.forName(resourceClass).getConstructor().newInstance(); } - } catch (Exception ex) { - LOG.warn("Exception encountered", ex); } // Call initialize method, if it exists diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/resources/EntityResource.java b/openmetadata-service/src/main/java/org/openmetadata/service/resources/EntityResource.java index 0d3c328178d2..7bd500e9920a 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/resources/EntityResource.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/resources/EntityResource.java @@ -82,16 +82,7 @@ public final Fields getFields(String fields) { return repository.getFields(fields); } - protected T addHref(UriInfo uriInfo, T entity) { - Entity.withHref(uriInfo, entity.getOwner()); - Entity.withHref(uriInfo, entity.getFollowers()); - Entity.withHref(uriInfo, entity.getExperts()); - Entity.withHref(uriInfo, entity.getReviewers()); - Entity.withHref(uriInfo, entity.getChildren()); - Entity.withHref(uriInfo, entity.getDomain()); - Entity.withHref(uriInfo, entity.getDataProducts()); - return entity; - } + public abstract T addHref(UriInfo uriInfo, T entity); protected List getEntitySpecificOperations() { return null; @@ -284,14 +275,11 @@ protected CsvImportResult importCsvInternal(SecurityContext securityContext, Str public T copy(T entity, CreateEntity request, String updatedBy) { EntityReference owner = repository.validateOwner(request.getOwner()); - EntityReference domain = repository.validateDomain(request.getDomain()); entity.setId(UUID.randomUUID()); entity.setName(request.getName()); entity.setDisplayName(request.getDisplayName()); entity.setDescription(request.getDescription()); entity.setOwner(owner); - entity.setDomain(domain); - entity.setDataProducts(getEntityReferences(Entity.DATA_PRODUCT, request.getDataProducts())); entity.setExtension(request.getExtension()); entity.setUpdatedBy(updatedBy); entity.setUpdatedAt(System.currentTimeMillis()); diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/resources/analytics/ReportDataResource.java b/openmetadata-service/src/main/java/org/openmetadata/service/resources/analytics/ReportDataResource.java index 087cdf84770c..5d09b599c5e6 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/resources/analytics/ReportDataResource.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/resources/analytics/ReportDataResource.java @@ -10,9 +10,11 @@ import java.io.IOException; import javax.validation.Valid; import javax.ws.rs.Consumes; +import javax.ws.rs.DELETE; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.Path; +import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.Context; @@ -45,12 +47,12 @@ @Collection(name = "analytics") public class ReportDataResource { public static final String COLLECTION_PATH = "v1/analytics/dataInsights/data"; - @Getter protected final ReportDataRepository dao; + @Getter protected final ReportDataRepository repository; protected final Authorizer authorizer; - public ReportDataResource(CollectionDAO dao, Authorizer authorizer) { + public ReportDataResource(CollectionDAO repository, Authorizer authorizer) { this.authorizer = authorizer; - this.dao = new ReportDataRepository(dao); + this.repository = new ReportDataRepository(repository); } public static class ReportDataResultList extends ResultList { @@ -89,12 +91,11 @@ public ResultList list( schema = @Schema(type = "number")) @NonNull @QueryParam("endTs") - Long endTs) - throws IOException { + Long endTs) { OperationContext operationContext = new OperationContext(Entity.DATA_INSIGHT_CHART, MetadataOperation.VIEW_ALL); ResourceContextInterface resourceContext = ReportDataContext.builder().build(); authorizer.authorize(securityContext, operationContext, resourceContext); - return dao.getReportData(reportDataType, startTs, endTs); + return repository.getReportData(reportDataType, startTs, endTs); } @POST @@ -109,11 +110,41 @@ public ResultList list( content = @Content(mediaType = "application/json", schema = @Schema(implementation = ReportData.class))) }) public Response addReportData( - @Context UriInfo uriInfo, @Context SecurityContext securityContext, @Valid ReportData reportData) - throws IOException { + @Context UriInfo uriInfo, @Context SecurityContext securityContext, @Valid ReportData reportData) { OperationContext operationContext = new OperationContext(Entity.DATA_INSIGHT_CHART, MetadataOperation.CREATE); ResourceContextInterface resourceContext = ReportDataContext.builder().build(); authorizer.authorize(securityContext, operationContext, resourceContext); - return dao.addReportData(reportData); + return repository.addReportData(reportData); + } + + @DELETE + @Path("/{reportDataType}/{date}") + @Operation( + operationId = "deleteReportData", + summary = "Delete report data for a given report data type ando date", + description = "Delete report data for a given report data type and date.", + responses = { + @ApiResponse( + responseCode = "200", + description = "Successfully deleted report data.", + content = @Content(mediaType = "application/json", schema = @Schema(implementation = ReportData.class))) + }) + public Response deleteReportData( + @Context UriInfo uriInfo, + @Context SecurityContext securityContext, + @Parameter(description = "report data type", schema = @Schema(implementation = ReportDataType.class)) + @NonNull + @PathParam("reportDataType") + ReportDataType reportDataType, + @Parameter(description = "date in format YYYY-MM-DD", schema = @Schema(type = "String")) + @NonNull + @PathParam("date") + String date) + throws IOException { + OperationContext operationContext = new OperationContext(Entity.DATA_INSIGHT_CHART, MetadataOperation.DELETE); + ResourceContextInterface resourceContext = ReportDataContext.builder().build(); + authorizer.authorize(securityContext, operationContext, resourceContext); + repository.deleteReportDataAtDate(reportDataType, date); + return Response.ok().build(); } } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/resources/analytics/WebAnalyticEventResource.java b/openmetadata-service/src/main/java/org/openmetadata/service/resources/analytics/WebAnalyticEventResource.java index 5355e3ad884d..da811b83f637 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/resources/analytics/WebAnalyticEventResource.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/resources/analytics/WebAnalyticEventResource.java @@ -43,6 +43,7 @@ import org.openmetadata.schema.type.EntityHistory; import org.openmetadata.schema.type.Include; import org.openmetadata.schema.type.MetadataOperation; +import org.openmetadata.service.Entity; import org.openmetadata.service.OpenMetadataApplicationConfig; import org.openmetadata.service.jdbi3.CollectionDAO; import org.openmetadata.service.jdbi3.ListFilter; @@ -51,6 +52,7 @@ import org.openmetadata.service.resources.EntityResource; import org.openmetadata.service.security.Authorizer; import org.openmetadata.service.security.policyevaluator.OperationContext; +import org.openmetadata.service.util.RestUtil; import org.openmetadata.service.util.ResultList; @Slf4j @@ -64,6 +66,13 @@ public class WebAnalyticEventResource extends EntityResource { public static final String COLLECTION_PATH = "v1/charts/"; - static final String FIELDS = "owner,followers,tags,domain,dataProducts"; + static final String FIELDS = "owner,followers,tags"; @Override public Chart addHref(UriInfo uriInfo, Chart chart) { - super.addHref(uriInfo, chart); + chart.setHref(RestUtil.getHref(uriInfo, COLLECTION_PATH, chart.getId())); + Entity.withHref(uriInfo, chart.getOwner()); Entity.withHref(uriInfo, chart.getService()); + Entity.withHref(uriInfo, chart.getFollowers()); return chart; } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/resources/dashboards/DashboardResource.java b/openmetadata-service/src/main/java/org/openmetadata/service/resources/dashboards/DashboardResource.java index 559bb3648930..b2663c70a6c9 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/resources/dashboards/DashboardResource.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/resources/dashboards/DashboardResource.java @@ -72,14 +72,14 @@ @Collection(name = "dashboards") public class DashboardResource extends EntityResource { public static final String COLLECTION_PATH = "v1/dashboards/"; - protected static final String FIELDS = - "owner,charts,followers,tags,usageSummary,extension,dataModels," + "domain,dataProducts"; + protected static final String FIELDS = "owner,charts,followers,tags,usageSummary,extension,dataModels"; @Override public Dashboard addHref(UriInfo uriInfo, Dashboard dashboard) { - super.addHref(uriInfo, dashboard); + Entity.withHref(uriInfo, dashboard.getOwner()); Entity.withHref(uriInfo, dashboard.getService()); Entity.withHref(uriInfo, dashboard.getCharts()); + Entity.withHref(uriInfo, dashboard.getFollowers()); Entity.withHref(uriInfo, dashboard.getDataModels()); return dashboard; } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/resources/dataInsight/DataInsightChartResource.java b/openmetadata-service/src/main/java/org/openmetadata/service/resources/dataInsight/DataInsightChartResource.java index b3cc6ff6f0b6..59e64ca9d018 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/resources/dataInsight/DataInsightChartResource.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/resources/dataInsight/DataInsightChartResource.java @@ -56,6 +56,7 @@ import org.openmetadata.service.search.SearchClient; import org.openmetadata.service.security.Authorizer; import org.openmetadata.service.security.policyevaluator.OperationContext; +import org.openmetadata.service.util.RestUtil; import org.openmetadata.service.util.ResultList; @Slf4j @@ -71,6 +72,13 @@ public class DataInsightChartResource extends EntityResource { public static final String COLLECTION_PATH = "v1/databases/"; - static final String FIELDS = "owner,databaseSchemas,usageSummary,location,tags,extension,domain"; + static final String FIELDS = "owner,databaseSchemas,usageSummary,location,tags,extension"; @Override public Database addHref(UriInfo uriInfo, Database db) { - super.addHref(uriInfo, db); Entity.withHref(uriInfo, db.getDatabaseSchemas()); Entity.withHref(uriInfo, db.getLocation()); + Entity.withHref(uriInfo, db.getOwner()); Entity.withHref(uriInfo, db.getService()); return db; } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/resources/databases/DatabaseSchemaResource.java b/openmetadata-service/src/main/java/org/openmetadata/service/resources/databases/DatabaseSchemaResource.java index 778d24c69c2a..292389eb9635 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/resources/databases/DatabaseSchemaResource.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/resources/databases/DatabaseSchemaResource.java @@ -70,12 +70,12 @@ @Collection(name = "databaseSchemas") public class DatabaseSchemaResource extends EntityResource { public static final String COLLECTION_PATH = "v1/databaseSchemas/"; - static final String FIELDS = "owner,tables,usageSummary,tags,extension,domain"; + static final String FIELDS = "owner,tables,usageSummary,tags,extension"; @Override public DatabaseSchema addHref(UriInfo uriInfo, DatabaseSchema schema) { - super.addHref(uriInfo, schema); Entity.withHref(uriInfo, schema.getTables()); + Entity.withHref(uriInfo, schema.getOwner()); Entity.withHref(uriInfo, schema.getService()); Entity.withHref(uriInfo, schema.getDatabase()); return schema; diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/resources/databases/TableResource.java b/openmetadata-service/src/main/java/org/openmetadata/service/resources/databases/TableResource.java index 09f7be9a45fc..e26fd9018123 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/resources/databases/TableResource.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/resources/databases/TableResource.java @@ -85,14 +85,15 @@ public class TableResource extends EntityResource { public static final String COLLECTION_PATH = "v1/tables/"; static final String FIELDS = "tableConstraints,tablePartition,usageSummary,owner,customMetrics," - + "tags,followers,joins,viewDefinition,dataModel,extension,testSuite,domain,dataProducts"; + + "tags,followers,joins,viewDefinition,dataModel,extension,testSuite"; @Override public Table addHref(UriInfo uriInfo, Table table) { - super.addHref(uriInfo, table); Entity.withHref(uriInfo, table.getDatabaseSchema()); Entity.withHref(uriInfo, table.getDatabase()); Entity.withHref(uriInfo, table.getService()); + Entity.withHref(uriInfo, table.getOwner()); + Entity.withHref(uriInfo, table.getFollowers()); return table; } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/resources/datamodels/DashboardDataModelResource.java b/openmetadata-service/src/main/java/org/openmetadata/service/resources/datamodels/DashboardDataModelResource.java index ed2c34c33c75..e1639e224c35 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/resources/datamodels/DashboardDataModelResource.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/resources/datamodels/DashboardDataModelResource.java @@ -57,6 +57,7 @@ import org.openmetadata.service.resources.databases.DatabaseUtil; import org.openmetadata.service.security.Authorizer; import org.openmetadata.service.util.EntityUtil; +import org.openmetadata.service.util.RestUtil; import org.openmetadata.service.util.ResultList; @Path("/v1/dashboard/datamodels") @@ -68,12 +69,14 @@ @Collection(name = "datamodels") public class DashboardDataModelResource extends EntityResource { public static final String COLLECTION_PATH = "/v1/dashboard/datamodels"; - protected static final String FIELDS = "owner,tags,followers,domain"; + protected static final String FIELDS = "owner,tags,followers"; @Override public DashboardDataModel addHref(UriInfo uriInfo, DashboardDataModel dashboardDataModel) { - super.addHref(uriInfo, dashboardDataModel); + dashboardDataModel.setHref(RestUtil.getHref(uriInfo, COLLECTION_PATH, dashboardDataModel.getId())); + Entity.withHref(uriInfo, dashboardDataModel.getOwner()); Entity.withHref(uriInfo, dashboardDataModel.getService()); + Entity.withHref(uriInfo, dashboardDataModel.getFollowers()); return dashboardDataModel; } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/resources/domains/DataProductResource.java b/openmetadata-service/src/main/java/org/openmetadata/service/resources/domains/DataProductResource.java deleted file mode 100644 index eb2bd7e96c10..000000000000 --- a/openmetadata-service/src/main/java/org/openmetadata/service/resources/domains/DataProductResource.java +++ /dev/null @@ -1,340 +0,0 @@ -/* - * Copyright 2021 Collate - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.openmetadata.service.resources.domains; - -import static org.openmetadata.common.utils.CommonUtil.listOrEmpty; - -import io.swagger.v3.oas.annotations.ExternalDocumentation; -import io.swagger.v3.oas.annotations.Operation; -import io.swagger.v3.oas.annotations.Parameter; -import io.swagger.v3.oas.annotations.media.Content; -import io.swagger.v3.oas.annotations.media.ExampleObject; -import io.swagger.v3.oas.annotations.media.Schema; -import io.swagger.v3.oas.annotations.parameters.RequestBody; -import io.swagger.v3.oas.annotations.responses.ApiResponse; -import io.swagger.v3.oas.annotations.tags.Tag; -import java.util.ArrayList; -import java.util.List; -import java.util.UUID; -import javax.json.JsonPatch; -import javax.validation.Valid; -import javax.validation.constraints.Max; -import javax.validation.constraints.Min; -import javax.ws.rs.Consumes; -import javax.ws.rs.DELETE; -import javax.ws.rs.DefaultValue; -import javax.ws.rs.GET; -import javax.ws.rs.PATCH; -import javax.ws.rs.POST; -import javax.ws.rs.PUT; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.Produces; -import javax.ws.rs.QueryParam; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; -import javax.ws.rs.core.SecurityContext; -import javax.ws.rs.core.UriInfo; -import lombok.extern.slf4j.Slf4j; -import org.openmetadata.schema.api.domains.CreateDataProduct; -import org.openmetadata.schema.entity.domains.DataProduct; -import org.openmetadata.schema.type.EntityHistory; -import org.openmetadata.schema.type.EntityReference; -import org.openmetadata.schema.type.Include; -import org.openmetadata.service.Entity; -import org.openmetadata.service.jdbi3.CollectionDAO; -import org.openmetadata.service.jdbi3.DataProductRepository; -import org.openmetadata.service.jdbi3.ListFilter; -import org.openmetadata.service.resources.Collection; -import org.openmetadata.service.resources.EntityResource; -import org.openmetadata.service.security.Authorizer; -import org.openmetadata.service.util.EntityUtil; -import org.openmetadata.service.util.ResultList; - -@Slf4j -@Path("/v1/dataProducts") -@Tag( - name = "Domains", - description = - "A `Data Product` or `Data as a Product` is a logical unit that contains all components to process and store " - + "domain data for analytical or data-intensive use cases made available to data consumers.") -@Produces(MediaType.APPLICATION_JSON) -@Consumes(MediaType.APPLICATION_JSON) -@Collection(name = "dataProducts", order = 4) // initialize after user resource -public class DataProductResource extends EntityResource { - public static final String COLLECTION_PATH = "/v1/dataProducts/"; - static final String FIELDS = "domain,owner,experts,assets"; - - public DataProductResource(CollectionDAO dao, Authorizer authorizer) { - super(DataProduct.class, new DataProductRepository(dao), authorizer); - } - - @Override - public DataProduct addHref(UriInfo uriInfo, DataProduct dataProduct) { - super.addHref(uriInfo, dataProduct); - Entity.withHref(uriInfo, dataProduct.getAssets()); - return dataProduct; - } - - public static class DataProductList extends ResultList { - @SuppressWarnings("unused") - public DataProductList() { - /* Required for serde */ - } - } - - @GET - @Operation( - operationId = "listDataProducts", - summary = "List dataProducts", - description = "Get a list of DataProducts.", - responses = { - @ApiResponse( - responseCode = "200", - description = "List of DataProducts", - content = - @Content(mediaType = "application/json", schema = @Schema(implementation = DataProductList.class))) - }) - public ResultList list( - @Context UriInfo uriInfo, - @Context SecurityContext securityContext, - @Parameter( - description = "Fields requested in the returned resource", - schema = @Schema(type = "string", example = FIELDS)) - @QueryParam("fields") - String fieldsParam, - @Parameter( - description = "Filter data products by domain name", - schema = @Schema(type = "string", example = "marketing")) - @QueryParam("domain") - String domain, - @DefaultValue("10") @Min(0) @Max(1000000) @QueryParam("limit") int limitParam, - @Parameter(description = "Returns list of DataProduct before this cursor", schema = @Schema(type = "string")) - @QueryParam("before") - String before, - @Parameter(description = "Returns list of DataProduct after this cursor", schema = @Schema(type = "string")) - @QueryParam("after") - String after) { - ListFilter filter = new ListFilter(null).addQueryParam("domain", domain); - return listInternal(uriInfo, securityContext, fieldsParam, filter, limitParam, before, after); - } - - @GET - @Path("/{id}") - @Operation( - operationId = "getDataProductByID", - summary = "Get a dataProduct by Id", - description = "Get a dataProduct by `Id`.", - responses = { - @ApiResponse( - responseCode = "200", - description = "The dataProduct", - content = @Content(mediaType = "application/json", schema = @Schema(implementation = DataProduct.class))), - @ApiResponse(responseCode = "404", description = "DataProduct for instance {id} is not found") - }) - public DataProduct get( - @Context UriInfo uriInfo, - @Context SecurityContext securityContext, - @Parameter( - description = "Fields requested in the returned resource", - schema = @Schema(type = "string", example = FIELDS)) - @QueryParam("fields") - String fieldsParam, - @Parameter(description = "Id of the dataProduct", schema = @Schema(type = "UUID")) @PathParam("id") UUID id) { - return getInternal(uriInfo, securityContext, id, fieldsParam, null); - } - - @GET - @Path("/name/{name}") - @Operation( - operationId = "getDataProductByFQN", - summary = "Get a dataProduct by name", - description = "Get a dataProduct by `name`.", - responses = { - @ApiResponse( - responseCode = "200", - description = "dataProduct", - content = @Content(mediaType = "application/json", schema = @Schema(implementation = DataProduct.class))), - @ApiResponse(responseCode = "404", description = "DataProduct for instance {name} is not found") - }) - public DataProduct getByName( - @Context UriInfo uriInfo, - @Context SecurityContext securityContext, - @Parameter(description = "Name of the dataProduct", schema = @Schema(type = "string")) @PathParam("name") - String name, - @Parameter( - description = "Fields requested in the returned resource", - schema = @Schema(type = "string", example = FIELDS)) - @QueryParam("fields") - String fieldsParam) { - return getByNameInternal(uriInfo, securityContext, name, fieldsParam, null); - } - - @GET - @Path("/{id}/versions") - @Operation( - operationId = "listAllDataProductVersion", - summary = "List dataProduct versions", - description = "Get a list of all the versions of a dataProduct identified by `Id`", - responses = { - @ApiResponse( - responseCode = "200", - description = "List of dataProduct versions", - content = @Content(mediaType = "application/json", schema = @Schema(implementation = EntityHistory.class))) - }) - public EntityHistory listVersions( - @Context UriInfo uriInfo, - @Context SecurityContext securityContext, - @Parameter(description = "Id of the dataProduct", schema = @Schema(type = "UUID")) @PathParam("id") UUID id) { - return super.listVersionsInternal(securityContext, id); - } - - @GET - @Path("/{id}/versions/{version}") - @Operation( - operationId = "listSpecificDataProductVersion", - summary = "Get a version of the dataProduct", - description = "Get a version of the dataProduct by given `Id`", - responses = { - @ApiResponse( - responseCode = "200", - description = "dataProduct", - content = @Content(mediaType = "application/json", schema = @Schema(implementation = DataProduct.class))), - @ApiResponse( - responseCode = "404", - description = "DataProduct for instance {id} and version {version} is " + "not found") - }) - public DataProduct getVersion( - @Context UriInfo uriInfo, - @Context SecurityContext securityContext, - @Parameter(description = "Id of the dataProduct", schema = @Schema(type = "UUID")) @PathParam("id") UUID id, - @Parameter( - description = "DataProduct version number in the form `major`.`minor`", - schema = @Schema(type = "string", example = "0.1 or 1.1")) - @PathParam("version") - String version) { - return super.getVersionInternal(securityContext, id, version); - } - - @POST - @Operation( - operationId = "createDataProduct", - summary = "Create a dataProduct", - description = "Create a new dataProduct.", - responses = { - @ApiResponse( - responseCode = "200", - description = "The dataProduct ", - content = @Content(mediaType = "application/json", schema = @Schema(implementation = DataProduct.class))), - @ApiResponse(responseCode = "400", description = "Bad request") - }) - public Response create( - @Context UriInfo uriInfo, @Context SecurityContext securityContext, @Valid CreateDataProduct create) { - DataProduct dataProduct = getDataProduct(create, securityContext.getUserPrincipal().getName()); - return create(uriInfo, securityContext, dataProduct); - } - - @PUT - @Operation( - operationId = "createOrUpdateDataProduct", - summary = "Create or update a dataProduct", - description = - "Create a dataProduct. if it does not exist. If a dataProduct already exists, update the " + "dataProduct.", - responses = { - @ApiResponse( - responseCode = "200", - description = "The dataProduct", - content = @Content(mediaType = "application/json", schema = @Schema(implementation = DataProduct.class))), - @ApiResponse(responseCode = "400", description = "Bad request") - }) - public Response createOrUpdate( - @Context UriInfo uriInfo, @Context SecurityContext securityContext, @Valid CreateDataProduct create) { - DataProduct dataProduct = getDataProduct(create, securityContext.getUserPrincipal().getName()); - return createOrUpdate(uriInfo, securityContext, dataProduct); - } - - @PATCH - @Path("/{id}") - @Operation( - operationId = "patchDataProduct", - summary = "Update a dataProduct", - description = "Update an existing dataProduct using JsonPatch.", - externalDocs = @ExternalDocumentation(description = "JsonPatch RFC", url = "https://tools.ietf.org/html/rfc6902")) - @Consumes(MediaType.APPLICATION_JSON_PATCH_JSON) - public Response patch( - @Context UriInfo uriInfo, - @Context SecurityContext securityContext, - @Parameter(description = "Id of the dataProduct", schema = @Schema(type = "UUID")) @PathParam("id") UUID id, - @RequestBody( - description = "JsonPatch with array of operations", - content = - @Content( - mediaType = MediaType.APPLICATION_JSON_PATCH_JSON, - examples = { - @ExampleObject("[" + "{op:remove, path:/a}," + "{op:add, path: /b, value: val}" + "]") - })) - JsonPatch patch) { - return patchInternal(uriInfo, securityContext, id, patch); - } - - @DELETE - @Path("/{id}") - @Operation( - operationId = "deleteDataProduct", - summary = "Delete a dataProduct by Id", - description = "Delete a dataProduct by `Id`.", - responses = { - @ApiResponse(responseCode = "200", description = "OK"), - @ApiResponse(responseCode = "404", description = "DataProduct for instance {id} is not found") - }) - public Response delete( - @Context UriInfo uriInfo, - @Context SecurityContext securityContext, - @Parameter(description = "Id of the dataProduct", schema = @Schema(type = "UUID")) @PathParam("id") UUID id) { - return delete(uriInfo, securityContext, id, true, true); - } - - @DELETE - @Path("/name/{name}") - @Operation( - operationId = "deleteDataProductByFQN", - summary = "Delete a dataProduct by name", - description = "Delete a dataProduct by `name`.", - responses = { - @ApiResponse(responseCode = "200", description = "OK"), - @ApiResponse(responseCode = "404", description = "DataProduct for instance {name} is not found") - }) - public Response delete( - @Context UriInfo uriInfo, - @Context SecurityContext securityContext, - @Parameter(description = "Name of the dataProduct", schema = @Schema(type = "string")) @PathParam("name") - String name) { - return deleteByName(uriInfo, securityContext, name, true, true); - } - - private DataProduct getDataProduct(CreateDataProduct create, String user) { - List experts = create.getExperts(); - DataProduct dataProduct = - copy(new DataProduct(), create, user) - .withFullyQualifiedName(create.getName()) - .withExperts(EntityUtil.populateEntityReferences(getEntityReferences(Entity.USER, experts))); - dataProduct.withAssets(new ArrayList<>()); - for (EntityReference asset : listOrEmpty(create.getAssets())) { - asset = Entity.getEntityReference(asset, Include.NON_DELETED); - dataProduct.getAssets().add(asset); - } - return dataProduct; - } -} diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/resources/domains/DomainResource.java b/openmetadata-service/src/main/java/org/openmetadata/service/resources/domains/DomainResource.java deleted file mode 100644 index d8960a21768d..000000000000 --- a/openmetadata-service/src/main/java/org/openmetadata/service/resources/domains/DomainResource.java +++ /dev/null @@ -1,322 +0,0 @@ -/* - * Copyright 2021 Collate - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.openmetadata.service.resources.domains; - -import io.swagger.v3.oas.annotations.ExternalDocumentation; -import io.swagger.v3.oas.annotations.Operation; -import io.swagger.v3.oas.annotations.Parameter; -import io.swagger.v3.oas.annotations.media.Content; -import io.swagger.v3.oas.annotations.media.ExampleObject; -import io.swagger.v3.oas.annotations.media.Schema; -import io.swagger.v3.oas.annotations.parameters.RequestBody; -import io.swagger.v3.oas.annotations.responses.ApiResponse; -import io.swagger.v3.oas.annotations.tags.Tag; -import java.util.List; -import java.util.UUID; -import javax.json.JsonPatch; -import javax.validation.Valid; -import javax.validation.constraints.Max; -import javax.validation.constraints.Min; -import javax.ws.rs.Consumes; -import javax.ws.rs.DELETE; -import javax.ws.rs.DefaultValue; -import javax.ws.rs.GET; -import javax.ws.rs.PATCH; -import javax.ws.rs.POST; -import javax.ws.rs.PUT; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.Produces; -import javax.ws.rs.QueryParam; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; -import javax.ws.rs.core.SecurityContext; -import javax.ws.rs.core.UriInfo; -import lombok.extern.slf4j.Slf4j; -import org.openmetadata.schema.api.domains.CreateDomain; -import org.openmetadata.schema.entity.domains.Domain; -import org.openmetadata.schema.type.EntityHistory; -import org.openmetadata.schema.type.Include; -import org.openmetadata.service.Entity; -import org.openmetadata.service.jdbi3.CollectionDAO; -import org.openmetadata.service.jdbi3.DomainRepository; -import org.openmetadata.service.jdbi3.ListFilter; -import org.openmetadata.service.resources.Collection; -import org.openmetadata.service.resources.EntityResource; -import org.openmetadata.service.security.Authorizer; -import org.openmetadata.service.util.EntityUtil; -import org.openmetadata.service.util.ResultList; - -@Slf4j -@Path("/v1/domains") -@Tag( - name = "Domains", - description = - "A `Domain` is a bounded context that is aligned with a Business Unit or a function within an organization.") -@Produces(MediaType.APPLICATION_JSON) -@Consumes(MediaType.APPLICATION_JSON) -@Collection(name = "domains", order = 4) // initialize after user resource -public class DomainResource extends EntityResource { - public static final String COLLECTION_PATH = "/v1/domains/"; - static final String FIELDS = "children,owner,parent,experts"; - - public DomainResource(CollectionDAO dao, Authorizer authorizer) { - super(Domain.class, new DomainRepository(dao), authorizer); - } - - @Override - public Domain addHref(UriInfo uriInfo, Domain domain) { - super.addHref(uriInfo, domain); - Entity.withHref(uriInfo, domain.getParent()); - return domain; - } - - public static class DomainList extends ResultList { - @SuppressWarnings("unused") - public DomainList() { - /* Required for serde */ - } - } - - @GET - @Operation( - operationId = "listDomains", - summary = "List domains", - description = "Get a list of Domains.", - responses = { - @ApiResponse( - responseCode = "200", - description = "List of Domains", - content = @Content(mediaType = "application/json", schema = @Schema(implementation = DomainList.class))) - }) - public ResultList list( - @Context UriInfo uriInfo, - @Context SecurityContext securityContext, - @Parameter( - description = "Fields requested in the returned resource", - schema = @Schema(type = "string", example = FIELDS)) - @QueryParam("fields") - String fieldsParam, - @DefaultValue("10") @Min(0) @Max(1000000) @QueryParam("limit") int limitParam, - @Parameter(description = "Returns list of Domain before this cursor", schema = @Schema(type = "string")) - @QueryParam("before") - String before, - @Parameter(description = "Returns list of Domain after this cursor", schema = @Schema(type = "string")) - @QueryParam("after") - String after) { - return listInternal(uriInfo, securityContext, fieldsParam, new ListFilter(null), limitParam, before, after); - } - - @GET - @Path("/{id}") - @Operation( - operationId = "getDomainByID", - summary = "Get a domain by Id", - description = "Get a domain by `Id`.", - responses = { - @ApiResponse( - responseCode = "200", - description = "The domain", - content = @Content(mediaType = "application/json", schema = @Schema(implementation = Domain.class))), - @ApiResponse(responseCode = "404", description = "Domain for instance {id} is not found") - }) - public Domain get( - @Context UriInfo uriInfo, - @Context SecurityContext securityContext, - @Parameter( - description = "Fields requested in the returned resource", - schema = @Schema(type = "string", example = FIELDS)) - @QueryParam("fields") - String fieldsParam, - @Parameter(description = "Id of the domain", schema = @Schema(type = "UUID")) @PathParam("id") UUID id) { - return getInternal(uriInfo, securityContext, id, fieldsParam, null); - } - - @GET - @Path("/name/{name}") - @Operation( - operationId = "getDomainByFQN", - summary = "Get a domain by name", - description = "Get a domain by `name`.", - responses = { - @ApiResponse( - responseCode = "200", - description = "domain", - content = @Content(mediaType = "application/json", schema = @Schema(implementation = Domain.class))), - @ApiResponse(responseCode = "404", description = "Domain for instance {name} is not found") - }) - public Domain getByName( - @Context UriInfo uriInfo, - @Context SecurityContext securityContext, - @Parameter(description = "Name of the domain", schema = @Schema(type = "string")) @PathParam("name") String name, - @Parameter( - description = "Fields requested in the returned resource", - schema = @Schema(type = "string", example = FIELDS)) - @QueryParam("fields") - String fieldsParam) { - return getByNameInternal(uriInfo, securityContext, name, fieldsParam, null); - } - - @GET - @Path("/{id}/versions") - @Operation( - operationId = "listAllDomainVersion", - summary = "List domain versions", - description = "Get a list of all the versions of a domain identified by `Id`", - responses = { - @ApiResponse( - responseCode = "200", - description = "List of domain versions", - content = @Content(mediaType = "application/json", schema = @Schema(implementation = EntityHistory.class))) - }) - public EntityHistory listVersions( - @Context UriInfo uriInfo, - @Context SecurityContext securityContext, - @Parameter(description = "Id of the domain", schema = @Schema(type = "UUID")) @PathParam("id") UUID id) { - return super.listVersionsInternal(securityContext, id); - } - - @GET - @Path("/{id}/versions/{version}") - @Operation( - operationId = "listSpecificDomainVersion", - summary = "Get a version of the domain", - description = "Get a version of the domain by given `Id`", - responses = { - @ApiResponse( - responseCode = "200", - description = "domain", - content = @Content(mediaType = "application/json", schema = @Schema(implementation = Domain.class))), - @ApiResponse( - responseCode = "404", - description = "Domain for instance {id} and version {version} is " + "not found") - }) - public Domain getVersion( - @Context UriInfo uriInfo, - @Context SecurityContext securityContext, - @Parameter(description = "Id of the domain", schema = @Schema(type = "UUID")) @PathParam("id") UUID id, - @Parameter( - description = "Domain version number in the form `major`.`minor`", - schema = @Schema(type = "string", example = "0.1 or 1.1")) - @PathParam("version") - String version) { - return super.getVersionInternal(securityContext, id, version); - } - - @POST - @Operation( - operationId = "createDomain", - summary = "Create a domain", - description = "Create a new domain.", - responses = { - @ApiResponse( - responseCode = "200", - description = "The domain ", - content = @Content(mediaType = "application/json", schema = @Schema(implementation = Domain.class))), - @ApiResponse(responseCode = "400", description = "Bad request") - }) - public Response create( - @Context UriInfo uriInfo, @Context SecurityContext securityContext, @Valid CreateDomain create) { - Domain domain = getDomain(create, securityContext.getUserPrincipal().getName()); - return create(uriInfo, securityContext, domain); - } - - @PUT - @Operation( - operationId = "createOrUpdateDomain", - summary = "Create or update a domain", - description = "Create a domain. if it does not exist. If a domain already exists, update the domain.", - responses = { - @ApiResponse( - responseCode = "200", - description = "The domain", - content = @Content(mediaType = "application/json", schema = @Schema(implementation = Domain.class))), - @ApiResponse(responseCode = "400", description = "Bad request") - }) - public Response createOrUpdate( - @Context UriInfo uriInfo, @Context SecurityContext securityContext, @Valid CreateDomain create) { - Domain domain = getDomain(create, securityContext.getUserPrincipal().getName()); - return createOrUpdate(uriInfo, securityContext, domain); - } - - @PATCH - @Path("/{id}") - @Operation( - operationId = "patchDomain", - summary = "Update a domain", - description = "Update an existing domain using JsonPatch.", - externalDocs = @ExternalDocumentation(description = "JsonPatch RFC", url = "https://tools.ietf.org/html/rfc6902")) - @Consumes(MediaType.APPLICATION_JSON_PATCH_JSON) - public Response patch( - @Context UriInfo uriInfo, - @Context SecurityContext securityContext, - @Parameter(description = "Id of the domain", schema = @Schema(type = "UUID")) @PathParam("id") UUID id, - @RequestBody( - description = "JsonPatch with array of operations", - content = - @Content( - mediaType = MediaType.APPLICATION_JSON_PATCH_JSON, - examples = { - @ExampleObject("[" + "{op:remove, path:/a}," + "{op:add, path: /b, value: val}" + "]") - })) - JsonPatch patch) { - return patchInternal(uriInfo, securityContext, id, patch); - } - - @DELETE - @Path("/{id}") - @Operation( - operationId = "deleteDomain", - summary = "Delete a domain by Id", - description = "Delete a domain by `Id`.", - responses = { - @ApiResponse(responseCode = "200", description = "OK"), - @ApiResponse(responseCode = "404", description = "Domain for instance {id} is not found") - }) - public Response delete( - @Context UriInfo uriInfo, - @Context SecurityContext securityContext, - @Parameter(description = "Id of the domain", schema = @Schema(type = "UUID")) @PathParam("id") UUID id) { - return delete(uriInfo, securityContext, id, true, true); - } - - @DELETE - @Path("/name/{name}") - @Operation( - operationId = "deleteDomainByFQN", - summary = "Delete a domain by name", - description = "Delete a domain by `name`.", - responses = { - @ApiResponse(responseCode = "200", description = "OK"), - @ApiResponse(responseCode = "404", description = "Domain for instance {name} is not found") - }) - public Response delete( - @Context UriInfo uriInfo, - @Context SecurityContext securityContext, - @Parameter(description = "Name of the domain", schema = @Schema(type = "string")) @PathParam("name") - String name) { - return deleteByName(uriInfo, securityContext, name, true, true); - } - - private Domain getDomain(CreateDomain create, String user) { - List experts = create.getExperts(); - return copy(new Domain(), create, user) - .withDomainType(create.getDomainType()) - .withFullyQualifiedName(create.getName()) - .withParent( - Entity.getEntityReference(getEntityReference(Entity.DOMAIN, create.getParent()), Include.NON_DELETED)) - .withExperts(EntityUtil.populateEntityReferences(getEntityReferences(Entity.USER, experts))); - } -} diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/resources/dqtests/TestCaseResource.java b/openmetadata-service/src/main/java/org/openmetadata/service/resources/dqtests/TestCaseResource.java index 76e244f3b2d2..f8362db2e111 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/resources/dqtests/TestCaseResource.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/resources/dqtests/TestCaseResource.java @@ -78,7 +78,8 @@ public class TestCaseResource extends EntityResource { public static final String COLLECTION_PATH = "v1/glossaries/"; - static final String FIELDS = "owner,tags,reviewers,usageCount,termCount,domain"; + static final String FIELDS = "owner,tags,reviewers,usageCount,termCount"; + + @Override + public Glossary addHref(UriInfo uriInfo, Glossary glossary) { + glossary.setHref(RestUtil.getHref(uriInfo, COLLECTION_PATH, glossary.getId())); + Entity.withHref(uriInfo, glossary.getOwner()); + Entity.withHref(uriInfo, glossary.getReviewers()); + return glossary; + } public GlossaryResource(CollectionDAO dao, Authorizer authorizer) { super(Glossary.class, new GlossaryRepository(dao), authorizer); diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/resources/glossary/GlossaryTermResource.java b/openmetadata-service/src/main/java/org/openmetadata/service/resources/glossary/GlossaryTermResource.java index a4a0d6a4f2b4..78371aced6ef 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/resources/glossary/GlossaryTermResource.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/resources/glossary/GlossaryTermResource.java @@ -71,14 +71,17 @@ @Collection(name = "glossaryTerms", order = 7) // Initialized after Glossary, Classification, and Tags public class GlossaryTermResource extends EntityResource { public static final String COLLECTION_PATH = "v1/glossaryTerms/"; - static final String FIELDS = "children,relatedTerms,reviewers,owner,tags,usageCount,domain"; + static final String FIELDS = "children,relatedTerms,reviewers,owner,tags,usageCount"; @Override public GlossaryTerm addHref(UriInfo uriInfo, GlossaryTerm term) { - super.addHref(uriInfo, term); + term.withHref(RestUtil.getHref(uriInfo, COLLECTION_PATH, term.getId())); Entity.withHref(uriInfo, term.getGlossary()); Entity.withHref(uriInfo, term.getParent()); + Entity.withHref(uriInfo, term.getChildren()); Entity.withHref(uriInfo, term.getRelatedTerms()); + Entity.withHref(uriInfo, term.getReviewers()); + Entity.withHref(uriInfo, term.getOwner()); return term; } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/resources/kpi/KpiResource.java b/openmetadata-service/src/main/java/org/openmetadata/service/resources/kpi/KpiResource.java index 363fc3f1293c..d5dba666e4ae 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/resources/kpi/KpiResource.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/resources/kpi/KpiResource.java @@ -49,6 +49,7 @@ import org.openmetadata.service.resources.Collection; import org.openmetadata.service.resources.EntityResource; import org.openmetadata.service.security.Authorizer; +import org.openmetadata.service.util.RestUtil; import org.openmetadata.service.util.ResultList; @Slf4j @@ -64,7 +65,8 @@ public class KpiResource extends EntityResource { @Override public Kpi addHref(UriInfo uriInfo, Kpi kpi) { - super.addHref(uriInfo, kpi); + kpi.withHref(RestUtil.getHref(uriInfo, COLLECTION_PATH, kpi.getId())); + Entity.withHref(uriInfo, kpi.getOwner()); Entity.withHref(uriInfo, kpi.getDataInsightChart()); return kpi; } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/resources/metrics/MetricsResource.java b/openmetadata-service/src/main/java/org/openmetadata/service/resources/metrics/MetricsResource.java index 6d68f2f05d20..5c6cd6a1cdb7 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/resources/metrics/MetricsResource.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/resources/metrics/MetricsResource.java @@ -63,7 +63,7 @@ @Collection(name = "metrics") public class MetricsResource extends EntityResource { public static final String COLLECTION_PATH = "/v1/metrics/"; - static final String FIELDS = "owner,usageSummary,domain"; + static final String FIELDS = "owner,usageSummary"; public MetricsResource(CollectionDAO dao, Authorizer authorizer) { super(Metrics.class, new MetricsRepository(dao), authorizer); @@ -75,6 +75,11 @@ protected List getEntitySpecificOperations() { return listOf(MetadataOperation.VIEW_USAGE, MetadataOperation.EDIT_USAGE); } + @Override + public Metrics addHref(UriInfo uriInfo, Metrics entity) { + return entity; + } + public static class MetricsList extends ResultList { /* Required for serde */ } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/resources/mlmodels/MlModelResource.java b/openmetadata-service/src/main/java/org/openmetadata/service/resources/mlmodels/MlModelResource.java index 00c30c8dd7de..6354146a9587 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/resources/mlmodels/MlModelResource.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/resources/mlmodels/MlModelResource.java @@ -60,6 +60,7 @@ import org.openmetadata.service.resources.Collection; import org.openmetadata.service.resources.EntityResource; import org.openmetadata.service.security.Authorizer; +import org.openmetadata.service.util.RestUtil; import org.openmetadata.service.util.ResultList; @Path("/v1/mlmodels") @@ -71,13 +72,15 @@ @Collection(name = "mlmodels") public class MlModelResource extends EntityResource { public static final String COLLECTION_PATH = "v1/mlmodels/"; - static final String FIELDS = "owner,dashboard,followers,tags,usageSummary,extension,domain"; + static final String FIELDS = "owner,dashboard,followers,tags,usageSummary,extension"; @Override public MlModel addHref(UriInfo uriInfo, MlModel mlmodel) { - super.addHref(uriInfo, mlmodel); + mlmodel.setHref(RestUtil.getHref(uriInfo, COLLECTION_PATH, mlmodel.getId())); + Entity.withHref(uriInfo, mlmodel.getOwner()); Entity.withHref(uriInfo, mlmodel.getDashboard()); Entity.withHref(uriInfo, mlmodel.getService()); + Entity.withHref(uriInfo, mlmodel.getFollowers()); return mlmodel; } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/resources/pipelines/PipelineResource.java b/openmetadata-service/src/main/java/org/openmetadata/service/resources/pipelines/PipelineResource.java index 927a4b816a41..01b45edba63c 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/resources/pipelines/PipelineResource.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/resources/pipelines/PipelineResource.java @@ -64,6 +64,7 @@ import org.openmetadata.service.resources.dqtests.TestCaseResource; import org.openmetadata.service.security.Authorizer; import org.openmetadata.service.security.policyevaluator.OperationContext; +import org.openmetadata.service.util.RestUtil; import org.openmetadata.service.util.ResultList; @Path("/v1/pipelines") @@ -76,12 +77,14 @@ @Collection(name = "pipelines") public class PipelineResource extends EntityResource { public static final String COLLECTION_PATH = "v1/pipelines/"; - static final String FIELDS = "owner,tasks,pipelineStatus,followers,tags,extension,scheduleInterval,domain"; + static final String FIELDS = "owner,tasks,pipelineStatus,followers,tags,extension,scheduleInterval"; @Override public Pipeline addHref(UriInfo uriInfo, Pipeline pipeline) { - super.addHref(uriInfo, pipeline); + pipeline.setHref(RestUtil.getHref(uriInfo, COLLECTION_PATH, pipeline.getId())); + Entity.withHref(uriInfo, pipeline.getOwner()); Entity.withHref(uriInfo, pipeline.getService()); + Entity.withHref(uriInfo, pipeline.getFollowers()); return pipeline; } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/resources/policies/PolicyResource.java b/openmetadata-service/src/main/java/org/openmetadata/service/resources/policies/PolicyResource.java index 488abe4c61b7..a4b3d56b956b 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/resources/policies/PolicyResource.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/resources/policies/PolicyResource.java @@ -84,7 +84,7 @@ public class PolicyResource extends EntityResource { @Override public Policy addHref(UriInfo uriInfo, Policy policy) { - super.addHref(uriInfo, policy); + Entity.withHref(uriInfo, policy.getOwner()); Entity.withHref(uriInfo, policy.getTeams()); Entity.withHref(uriInfo, policy.getRoles()); return policy; diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/resources/query/QueryResource.java b/openmetadata-service/src/main/java/org/openmetadata/service/resources/query/QueryResource.java index c164d407f3d8..0b87e356c614 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/resources/query/QueryResource.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/resources/query/QueryResource.java @@ -78,7 +78,8 @@ protected List getEntitySpecificOperations() { @Override public Query addHref(UriInfo uriInfo, Query entity) { - super.addHref(uriInfo, entity); + Entity.withHref(uriInfo, entity.getOwner()); + Entity.withHref(uriInfo, entity.getFollowers()); Entity.withHref(uriInfo, entity.getUsers()); Entity.withHref(uriInfo, entity.getQueryUsedIn()); return entity; @@ -383,6 +384,51 @@ public Response addQueryUsage( return repository.addQueryUsage(uriInfo, securityContext.getUserPrincipal().getName(), id, entityIds).toResponse(); } + @PUT + @Path("/{id}/users") + @Operation( + operationId = "addQueryUsers", + summary = "Add query users", + description = "Add query users", + responses = { + @ApiResponse( + responseCode = "200", + description = "OK", + content = @Content(mediaType = "application/json", schema = @Schema(implementation = Query.class))) + }) + public Response addQueryUsers( + @Context UriInfo uriInfo, + @Context SecurityContext securityContext, + @Parameter(description = "Id of the query", schema = @Schema(type = "UUID")) @PathParam("id") UUID id, + @Valid List userFqnList) { + OperationContext operationContext = new OperationContext(entityType, MetadataOperation.EDIT_ALL); + authorizer.authorize(securityContext, operationContext, getResourceContextById(id)); + return repository.AddQueryUser(uriInfo, securityContext.getUserPrincipal().getName(), id, userFqnList).toResponse(); + } + + @PUT + @Path("/{id}/usedBy") + @Operation( + operationId = "addQueryUsedBy", + summary = "Populate Used By Field", + description = "Add query users", + responses = { + @ApiResponse( + responseCode = "200", + description = "OK", + content = @Content(mediaType = "application/json", schema = @Schema(implementation = Query.class))) + }) + public Response addQueryUsedBy( + @Context UriInfo uriInfo, + @Context SecurityContext securityContext, + @Parameter(description = "Id of the query", schema = @Schema(type = "UUID")) @PathParam("id") UUID id, + @Valid List usedByList) { + OperationContext operationContext = new OperationContext(entityType, MetadataOperation.EDIT_ALL); + authorizer.authorize(securityContext, operationContext, getResourceContextById(id)); + return repository.AddQueryUsedBy(uriInfo, securityContext.getUserPrincipal().getName(), id, usedByList) + .toResponse(); + } + @DELETE @Path("/{id}/usage") @Operation( diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/resources/reports/ReportResource.java b/openmetadata-service/src/main/java/org/openmetadata/service/resources/reports/ReportResource.java index 3d6d3c339012..409522ce8b09 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/resources/reports/ReportResource.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/resources/reports/ReportResource.java @@ -74,6 +74,11 @@ protected List getEntitySpecificOperations() { return listOf(MetadataOperation.VIEW_USAGE, MetadataOperation.EDIT_USAGE); } + @Override + public Report addHref(UriInfo uriInfo, Report entity) { + return entity; + } + public static class ReportList extends ResultList { /* Required for serde */ } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/resources/searchindex/SearchIndexResource.java b/openmetadata-service/src/main/java/org/openmetadata/service/resources/searchindex/SearchIndexResource.java deleted file mode 100644 index 45ab3fcb2e20..000000000000 --- a/openmetadata-service/src/main/java/org/openmetadata/service/resources/searchindex/SearchIndexResource.java +++ /dev/null @@ -1,478 +0,0 @@ -/* - * Copyright 2021 Collate - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.openmetadata.service.resources.searchindex; - -import static org.openmetadata.common.utils.CommonUtil.listOf; - -import io.swagger.v3.oas.annotations.ExternalDocumentation; -import io.swagger.v3.oas.annotations.Operation; -import io.swagger.v3.oas.annotations.Parameter; -import io.swagger.v3.oas.annotations.media.Content; -import io.swagger.v3.oas.annotations.media.ExampleObject; -import io.swagger.v3.oas.annotations.media.Schema; -import io.swagger.v3.oas.annotations.parameters.RequestBody; -import io.swagger.v3.oas.annotations.responses.ApiResponse; -import io.swagger.v3.oas.annotations.tags.Tag; -import java.util.List; -import java.util.UUID; -import javax.json.JsonPatch; -import javax.validation.Valid; -import javax.validation.constraints.Max; -import javax.validation.constraints.Min; -import javax.ws.rs.Consumes; -import javax.ws.rs.DELETE; -import javax.ws.rs.DefaultValue; -import javax.ws.rs.GET; -import javax.ws.rs.PATCH; -import javax.ws.rs.POST; -import javax.ws.rs.PUT; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.Produces; -import javax.ws.rs.QueryParam; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; -import javax.ws.rs.core.SecurityContext; -import javax.ws.rs.core.UriInfo; -import org.openmetadata.schema.api.data.CreateSearchIndex; -import org.openmetadata.schema.api.data.RestoreEntity; -import org.openmetadata.schema.entity.data.SearchIndex; -import org.openmetadata.schema.type.ChangeEvent; -import org.openmetadata.schema.type.EntityHistory; -import org.openmetadata.schema.type.Include; -import org.openmetadata.schema.type.MetadataOperation; -import org.openmetadata.schema.type.searchindex.SearchIndexSampleData; -import org.openmetadata.service.Entity; -import org.openmetadata.service.jdbi3.CollectionDAO; -import org.openmetadata.service.jdbi3.ListFilter; -import org.openmetadata.service.jdbi3.SearchIndexRepository; -import org.openmetadata.service.resources.Collection; -import org.openmetadata.service.resources.EntityResource; -import org.openmetadata.service.security.Authorizer; -import org.openmetadata.service.security.policyevaluator.OperationContext; -import org.openmetadata.service.security.policyevaluator.ResourceContext; -import org.openmetadata.service.util.ResultList; - -@Path("/v1/searchIndexes") -@Tag( - name = "SearchIndex", - description = "A `SearchIndex` is a index mapping for indexing documents in a `Search Service`.") -@Produces(MediaType.APPLICATION_JSON) -@Consumes(MediaType.APPLICATION_JSON) -@Collection(name = "searchIndexes") -public class SearchIndexResource extends EntityResource { - public static final String COLLECTION_PATH = "v1/searchIndexes/"; - static final String FIELDS = "owner,followers,tags,extension,domain,dataProducts"; - - @Override - public SearchIndex addHref(UriInfo uriInfo, SearchIndex searchIndex) { - super.addHref(uriInfo, searchIndex); - Entity.withHref(uriInfo, searchIndex.getService()); - return searchIndex; - } - - public SearchIndexResource(CollectionDAO dao, Authorizer authorizer) { - super(SearchIndex.class, new SearchIndexRepository(dao), authorizer); - } - - @Override - protected List getEntitySpecificOperations() { - addViewOperation("sampleData", MetadataOperation.VIEW_SAMPLE_DATA); - return listOf(MetadataOperation.VIEW_SAMPLE_DATA, MetadataOperation.EDIT_SAMPLE_DATA); - } - - public static class SearchIndexList extends ResultList { - /* Required for serde */ - } - - @GET - @Operation( - operationId = "listSearchIndexes", - summary = "List searchIndexes", - description = - "Get a list of SearchIndexes, optionally filtered by `service` it belongs to. Use `fields` " - + "parameter to get only necessary fields. Use cursor-based pagination to limit the number " - + "entries in the list using `limit` and `before` or `after` query params.", - responses = { - @ApiResponse( - responseCode = "200", - description = "List of SearchIndexes", - content = - @Content(mediaType = "application/json", schema = @Schema(implementation = SearchIndexList.class))) - }) - public ResultList list( - @Context UriInfo uriInfo, - @Context SecurityContext securityContext, - @Parameter( - description = "Fields requested in the returned resource", - schema = @Schema(type = "string", example = FIELDS)) - @QueryParam("fields") - String fieldsParam, - @Parameter( - description = "Filter SearchIndexes by service name", - schema = @Schema(type = "string", example = "ElasticSearchWestCoast")) - @QueryParam("service") - String serviceParam, - @Parameter(description = "Limit the number SearchIndexes returned. (1 to 1000000, default = " + "10)") - @DefaultValue("10") - @QueryParam("limit") - @Min(0) - @Max(1000000) - int limitParam, - @Parameter(description = "Returns list of SearchIndexes before this cursor", schema = @Schema(type = "string")) - @QueryParam("before") - String before, - @Parameter(description = "Returns list of SearchIndexes after this cursor", schema = @Schema(type = "string")) - @QueryParam("after") - String after, - @Parameter( - description = "Include all, deleted, or non-deleted entities.", - schema = @Schema(implementation = Include.class)) - @QueryParam("include") - @DefaultValue("non-deleted") - Include include) { - ListFilter filter = new ListFilter(include).addQueryParam("service", serviceParam); - return super.listInternal(uriInfo, securityContext, fieldsParam, filter, limitParam, before, after); - } - - @GET - @Path("/{id}/versions") - @Operation( - operationId = "listAllSearchIndexVersion", - summary = "List SearchIndex versions", - description = "Get a list of all the versions of a SearchIndex identified by `id`", - responses = { - @ApiResponse( - responseCode = "200", - description = "List of SearchIndex versions", - content = @Content(mediaType = "application/json", schema = @Schema(implementation = EntityHistory.class))) - }) - public EntityHistory listVersions( - @Context UriInfo uriInfo, - @Context SecurityContext securityContext, - @Parameter(description = "Id of the SearchIndex", schema = @Schema(type = "UUID")) @PathParam("id") UUID id) { - return super.listVersionsInternal(securityContext, id); - } - - @GET - @Path("/{id}") - @Operation( - summary = "Get a SearchIndex by id", - description = "Get a SearchIndex by `id`.", - responses = { - @ApiResponse( - responseCode = "200", - description = "The SearchIndex", - content = @Content(mediaType = "application/json", schema = @Schema(implementation = SearchIndex.class))), - @ApiResponse(responseCode = "404", description = "SearchIndex for instance {id} is not found") - }) - public SearchIndex get( - @Context UriInfo uriInfo, - @Parameter(description = "Id of the SearchIndex", schema = @Schema(type = "UUID")) @PathParam("id") UUID id, - @Context SecurityContext securityContext, - @Parameter( - description = "Fields requested in the returned resource", - schema = @Schema(type = "string", example = FIELDS)) - @QueryParam("fields") - String fieldsParam, - @Parameter( - description = "Include all, deleted, or non-deleted entities.", - schema = @Schema(implementation = Include.class)) - @QueryParam("include") - @DefaultValue("non-deleted") - Include include) { - return getInternal(uriInfo, securityContext, id, fieldsParam, include); - } - - @GET - @Path("/name/{fqn}") - @Operation( - operationId = "getSearchIndexByFQN", - summary = "Get a SearchIndex by fully qualified name", - description = "Get a SearchIndex by fully qualified name.", - responses = { - @ApiResponse( - responseCode = "200", - description = "The SearchIndex", - content = @Content(mediaType = "application/json", schema = @Schema(implementation = SearchIndex.class))), - @ApiResponse(responseCode = "404", description = "SearchIndex for instance {fqn} is not found") - }) - public SearchIndex getByName( - @Context UriInfo uriInfo, - @Parameter(description = "Fully qualified name of the SearchIndex", schema = @Schema(type = "string")) - @PathParam("fqn") - String fqn, - @Context SecurityContext securityContext, - @Parameter( - description = "Fields requested in the returned resource", - schema = @Schema(type = "string", example = FIELDS)) - @QueryParam("fields") - String fieldsParam, - @Parameter( - description = "Include all, deleted, or non-deleted entities.", - schema = @Schema(implementation = Include.class)) - @QueryParam("include") - @DefaultValue("non-deleted") - Include include) { - return getByNameInternal(uriInfo, securityContext, fqn, fieldsParam, include); - } - - @GET - @Path("/{id}/versions/{version}") - @Operation( - operationId = "getSpecificSearchIndexVersion", - summary = "Get a version of the SearchIndex", - description = "Get a version of the SearchIndex by given `id`", - responses = { - @ApiResponse( - responseCode = "200", - description = "SearchIndex", - content = @Content(mediaType = "application/json", schema = @Schema(implementation = SearchIndex.class))), - @ApiResponse( - responseCode = "404", - description = "SearchIndex for instance {id} and version {version} is " + "not found") - }) - public SearchIndex getVersion( - @Context UriInfo uriInfo, - @Context SecurityContext securityContext, - @Parameter(description = "Id of the SearchIndex", schema = @Schema(type = "UUID")) @PathParam("id") UUID id, - @Parameter( - description = "SearchIndex version number in the form `major`.`minor`", - schema = @Schema(type = "string", example = "0.1 or 1.1")) - @PathParam("version") - String version) { - return super.getVersionInternal(securityContext, id, version); - } - - @POST - @Operation( - operationId = "createSearchIndex", - summary = "Create a SearchIndex", - description = "Create a SearchIndex under an existing `service`.", - responses = { - @ApiResponse( - responseCode = "200", - description = "The SearchIndex", - content = @Content(mediaType = "application/json", schema = @Schema(implementation = SearchIndex.class))), - @ApiResponse(responseCode = "400", description = "Bad request") - }) - public Response create( - @Context UriInfo uriInfo, @Context SecurityContext securityContext, @Valid CreateSearchIndex create) { - SearchIndex searchIndex = getSearchIndex(create, securityContext.getUserPrincipal().getName()); - return create(uriInfo, securityContext, searchIndex); - } - - @PATCH - @Path("/{id}") - @Operation( - operationId = "patchSearchIndex", - summary = "Update a SearchIndex", - description = "Update an existing SearchIndex using JsonPatch.", - externalDocs = @ExternalDocumentation(description = "JsonPatch RFC", url = "https://tools.ietf.org/html/rfc6902")) - @Consumes(MediaType.APPLICATION_JSON_PATCH_JSON) - public Response updateDescription( - @Context UriInfo uriInfo, - @Context SecurityContext securityContext, - @Parameter(description = "Id of the SearchIndex", schema = @Schema(type = "UUID")) @PathParam("id") UUID id, - @RequestBody( - description = "JsonPatch with array of operations", - content = - @Content( - mediaType = MediaType.APPLICATION_JSON_PATCH_JSON, - examples = { - @ExampleObject("[" + "{op:remove, path:/a}," + "{op:add, path: /b, value: val}" + "]") - })) - JsonPatch patch) { - return patchInternal(uriInfo, securityContext, id, patch); - } - - @PUT - @Operation( - operationId = "createOrUpdateSearchIndex", - summary = "Update SearchIndex", - description = "Create a SearchIndex, it it does not exist or update an existing SearchIndex.", - responses = { - @ApiResponse( - responseCode = "200", - description = "The updated SearchIndex ", - content = @Content(mediaType = "application/json", schema = @Schema(implementation = SearchIndex.class))) - }) - public Response createOrUpdate( - @Context UriInfo uriInfo, @Context SecurityContext securityContext, @Valid CreateSearchIndex create) { - SearchIndex searchIndex = getSearchIndex(create, securityContext.getUserPrincipal().getName()); - return createOrUpdate(uriInfo, securityContext, searchIndex); - } - - @PUT - @Path("/{id}/sampleData") - @Operation( - operationId = "addSampleData", - summary = "Add sample data", - description = "Add sample data to the searchIndex.", - responses = { - @ApiResponse( - responseCode = "200", - description = "The SearchIndex", - content = @Content(mediaType = "application/json", schema = @Schema(implementation = SearchIndex.class))), - }) - public SearchIndex addSampleData( - @Context UriInfo uriInfo, - @Context SecurityContext securityContext, - @Parameter(description = "Id of the SearchIndex", schema = @Schema(type = "UUID")) @PathParam("id") UUID id, - @Valid SearchIndexSampleData sampleData) { - OperationContext operationContext = new OperationContext(entityType, MetadataOperation.EDIT_SAMPLE_DATA); - authorizer.authorize(securityContext, operationContext, getResourceContextById(id)); - SearchIndex searchIndex = repository.addSampleData(id, sampleData); - return addHref(uriInfo, searchIndex); - } - - @GET - @Path("/{id}/sampleData") - @Operation( - operationId = "getSampleData", - summary = "Get sample data", - description = "Get sample data from the SearchIndex.", - responses = { - @ApiResponse( - responseCode = "200", - description = "Successfully obtained the SampleData for SearchIndex", - content = @Content(mediaType = "application/json", schema = @Schema(implementation = SearchIndex.class))) - }) - public SearchIndex getSampleData( - @Context UriInfo uriInfo, - @Context SecurityContext securityContext, - @Parameter(description = "Id of the SearchIndex", schema = @Schema(type = "UUID")) @PathParam("id") UUID id) { - OperationContext operationContext = new OperationContext(entityType, MetadataOperation.VIEW_SAMPLE_DATA); - ResourceContext resourceContext = getResourceContextById(id); - authorizer.authorize(securityContext, operationContext, resourceContext); - boolean authorizePII = authorizer.authorizePII(securityContext, resourceContext.getOwner()); - - SearchIndex searchIndex = repository.getSampleData(id, authorizePII); - return addHref(uriInfo, searchIndex); - } - - @PUT - @Path("/{id}/followers") - @Operation( - operationId = "addFollower", - summary = "Add a follower", - description = "Add a user identified by `userId` as followed of this SearchIndex", - responses = { - @ApiResponse( - responseCode = "200", - description = "OK", - content = @Content(mediaType = "application/json", schema = @Schema(implementation = ChangeEvent.class))), - @ApiResponse(responseCode = "404", description = "SearchIndex for instance {id} is not found") - }) - public Response addFollower( - @Context UriInfo uriInfo, - @Context SecurityContext securityContext, - @Parameter(description = "Id of the SearchIndex", schema = @Schema(type = "UUID")) @PathParam("id") UUID id, - @Parameter(description = "Id of the user to be added as follower", schema = @Schema(type = "UUID")) UUID userId) { - return repository.addFollower(securityContext.getUserPrincipal().getName(), id, userId).toResponse(); - } - - @DELETE - @Path("/{id}/followers/{userId}") - @Operation( - summary = "Remove a follower", - description = "Remove the user identified `userId` as a follower of the SearchIndex.", - responses = { - @ApiResponse( - responseCode = "200", - description = "OK", - content = @Content(mediaType = "application/json", schema = @Schema(implementation = ChangeEvent.class))) - }) - public Response deleteFollower( - @Context UriInfo uriInfo, - @Context SecurityContext securityContext, - @Parameter(description = "Id of the SearchIndex", schema = @Schema(type = "UUID")) @PathParam("id") UUID id, - @Parameter(description = "Id of the user being removed as follower", schema = @Schema(type = "string")) - @PathParam("userId") - String userId) { - return repository - .deleteFollower(securityContext.getUserPrincipal().getName(), id, UUID.fromString(userId)) - .toResponse(); - } - - @DELETE - @Path("/{id}") - @Operation( - operationId = "deleteSearchIndex", - summary = "Delete a SearchIndex by id", - description = "Delete a SearchIndex by `id`.", - responses = { - @ApiResponse(responseCode = "200", description = "OK"), - @ApiResponse(responseCode = "404", description = "SearchIndex for instance {id} is not found") - }) - public Response delete( - @Context UriInfo uriInfo, - @Context SecurityContext securityContext, - @Parameter(description = "Hard delete the entity. (Default = `false`)") - @QueryParam("hardDelete") - @DefaultValue("false") - boolean hardDelete, - @Parameter(description = "Id of the SearchIndex", schema = @Schema(type = "UUID")) @PathParam("id") UUID id) { - return delete(uriInfo, securityContext, id, false, hardDelete); - } - - @DELETE - @Path("/name/{fqn}") - @Operation( - operationId = "deleteSearchIndexByFQN", - summary = "Delete a SearchIndex by fully qualified name", - description = "Delete a SearchIndex by `fullyQualifiedName`.", - responses = { - @ApiResponse(responseCode = "200", description = "OK"), - @ApiResponse(responseCode = "404", description = "SearchIndex for instance {fqn} is not found") - }) - public Response delete( - @Context UriInfo uriInfo, - @Context SecurityContext securityContext, - @Parameter(description = "Hard delete the entity. (Default = `false`)") - @QueryParam("hardDelete") - @DefaultValue("false") - boolean hardDelete, - @Parameter(description = "Fully qualified name of the SearchIndex", schema = @Schema(type = "string")) - @PathParam("fqn") - String fqn) { - return deleteByName(uriInfo, securityContext, fqn, false, hardDelete); - } - - @PUT - @Path("/restore") - @Operation( - operationId = "restore", - summary = "Restore a soft deleted SearchIndex", - description = "Restore a soft deleted SearchIndex.", - responses = { - @ApiResponse( - responseCode = "200", - description = "Successfully restored the SearchIndex. ", - content = @Content(mediaType = "application/json", schema = @Schema(implementation = SearchIndex.class))) - }) - public Response restoreSearchIndex( - @Context UriInfo uriInfo, @Context SecurityContext securityContext, @Valid RestoreEntity restore) { - return restoreEntity(uriInfo, securityContext, restore.getId()); - } - - private SearchIndex getSearchIndex(CreateSearchIndex create, String user) { - return copy(new SearchIndex(), create, user) - .withService(getEntityReference(Entity.SEARCH_SERVICE, create.getService())) - .withFields(create.getFields()) - .withSearchIndexSettings(create.getSearchIndexSettings()) - .withTags(create.getTags()); - } -} diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/resources/services/connections/TestConnectionDefinitionResource.java b/openmetadata-service/src/main/java/org/openmetadata/service/resources/services/connections/TestConnectionDefinitionResource.java index c64f1fb2311b..283d7ab9586a 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/resources/services/connections/TestConnectionDefinitionResource.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/resources/services/connections/TestConnectionDefinitionResource.java @@ -29,6 +29,7 @@ import lombok.extern.slf4j.Slf4j; import org.openmetadata.schema.entity.services.connections.TestConnectionDefinition; import org.openmetadata.schema.type.Include; +import org.openmetadata.service.Entity; import org.openmetadata.service.OpenMetadataApplicationConfig; import org.openmetadata.service.jdbi3.CollectionDAO; import org.openmetadata.service.jdbi3.ListFilter; @@ -36,6 +37,7 @@ import org.openmetadata.service.resources.Collection; import org.openmetadata.service.resources.EntityResource; import org.openmetadata.service.security.Authorizer; +import org.openmetadata.service.util.RestUtil; import org.openmetadata.service.util.ResultList; @Slf4j @@ -51,6 +53,13 @@ public class TestConnectionDefinitionResource public static final String COLLECTION_PATH = "/v1/services/testConnectionDefinitions"; static final String FIELDS = "owner"; + @Override + public TestConnectionDefinition addHref(UriInfo uriInfo, TestConnectionDefinition testConnectionDefinition) { + testConnectionDefinition.withHref(RestUtil.getHref(uriInfo, COLLECTION_PATH, testConnectionDefinition.getId())); + Entity.withHref(uriInfo, testConnectionDefinition.getOwner()); + return testConnectionDefinition; + } + public TestConnectionDefinitionResource(CollectionDAO dao, Authorizer authorizer) { super(TestConnectionDefinition.class, new TestConnectionDefinitionRepository(dao), authorizer); } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/resources/services/dashboard/DashboardServiceResource.java b/openmetadata-service/src/main/java/org/openmetadata/service/resources/services/dashboard/DashboardServiceResource.java index 38c77c5c4089..36adccff905f 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/resources/services/dashboard/DashboardServiceResource.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/resources/services/dashboard/DashboardServiceResource.java @@ -13,6 +13,8 @@ package org.openmetadata.service.resources.services.dashboard; +import static org.openmetadata.service.Entity.FIELD_OWNER; + import io.swagger.v3.oas.annotations.ExternalDocumentation; import io.swagger.v3.oas.annotations.Operation; import io.swagger.v3.oas.annotations.Parameter; @@ -55,6 +57,7 @@ import org.openmetadata.schema.type.EntityHistory; import org.openmetadata.schema.type.Include; import org.openmetadata.schema.type.MetadataOperation; +import org.openmetadata.service.Entity; import org.openmetadata.service.jdbi3.CollectionDAO; import org.openmetadata.service.jdbi3.DashboardServiceRepository; import org.openmetadata.service.jdbi3.ListFilter; @@ -63,6 +66,7 @@ import org.openmetadata.service.security.Authorizer; import org.openmetadata.service.security.policyevaluator.OperationContext; import org.openmetadata.service.util.JsonUtils; +import org.openmetadata.service.util.RestUtil; import org.openmetadata.service.util.ResultList; @Path("/v1/services/dashboardServices") @@ -73,9 +77,17 @@ public class DashboardServiceResource extends ServiceEntityResource { public static final String COLLECTION_PATH = "v1/services/dashboardServices"; - static final String FIELDS = "owner,domain"; + static final String FIELDS = FIELD_OWNER; + + @Override + public DashboardService addHref(UriInfo uriInfo, DashboardService service) { + service.setHref(RestUtil.getHref(uriInfo, COLLECTION_PATH, service.getId())); + Entity.withHref(uriInfo, service.getOwner()); + return service; + } public DashboardServiceResource(CollectionDAO dao, Authorizer authorizer) { + super(DashboardService.class, new DashboardServiceRepository(dao), authorizer, ServiceType.DASHBOARD); } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/resources/services/database/DatabaseServiceResource.java b/openmetadata-service/src/main/java/org/openmetadata/service/resources/services/database/DatabaseServiceResource.java index 421d91f62072..038cbe462df8 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/resources/services/database/DatabaseServiceResource.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/resources/services/database/DatabaseServiceResource.java @@ -81,11 +81,12 @@ public class DatabaseServiceResource extends ServiceEntityResource { public static final String COLLECTION_PATH = "v1/services/databaseServices/"; - static final String FIELDS = "pipelines,owner,tags,domain"; + static final String FIELDS = "pipelines,owner,tags"; @Override public DatabaseService addHref(UriInfo uriInfo, DatabaseService service) { - super.addHref(uriInfo, service); + service.setHref(RestUtil.getHref(uriInfo, COLLECTION_PATH, service.getId())); + Entity.withHref(uriInfo, service.getOwner()); Entity.withHref(uriInfo, service.getPipelines()); return service; } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/resources/services/ingestionpipelines/IngestionPipelineResource.java b/openmetadata-service/src/main/java/org/openmetadata/service/resources/services/ingestionpipelines/IngestionPipelineResource.java index 2cdbf55b86bb..577ffd417621 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/resources/services/ingestionpipelines/IngestionPipelineResource.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/resources/services/ingestionpipelines/IngestionPipelineResource.java @@ -112,7 +112,7 @@ public class IngestionPipelineResource extends EntityResource list( @QueryParam("fields") String fieldsParam, @Parameter( - description = "Filter airflow pipelines by service fully qualified name", + description = "Filter Ingestion Pipelines by service fully qualified name", schema = @Schema(type = "string", example = "snowflakeWestCoast")) @QueryParam("service") String serviceParam, @Parameter( - description = "Filter airflow pipelines by pipeline Type", + description = "Filter Ingestion Pipelines by test suite fully qualified name", + schema = @Schema(type = "string", example = "service.db.schema.name.testSuite")) + @QueryParam("testSuite") + String testSuiteParam, + @Parameter( + description = "Filter Ingestion Pipelines by pipeline Type", schema = @Schema(type = "string", example = "elasticSearchReindex")) @QueryParam("pipelineType") String pipelineType, @Parameter( - description = "Filter airflow pipelines by service Type", + description = "Filter Ingestion Pipelines by service Type", schema = @Schema(type = "string", example = "messagingService")) @QueryParam("serviceType") String serviceType, @@ -243,7 +248,8 @@ public ResultList list( new ListFilter(include) .addQueryParam("service", serviceParam) .addQueryParam("pipelineType", pipelineType) - .addQueryParam("serviceType", serviceType); + .addQueryParam("serviceType", serviceType) + .addQueryParam("testSuite", testSuiteParam); ResultList ingestionPipelines = super.listInternal(uriInfo, securityContext, fieldsParam, filter, limitParam, before, after); diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/resources/services/messaging/MessagingServiceResource.java b/openmetadata-service/src/main/java/org/openmetadata/service/resources/services/messaging/MessagingServiceResource.java index 5b4bc2a355be..65979d6d1d06 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/resources/services/messaging/MessagingServiceResource.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/resources/services/messaging/MessagingServiceResource.java @@ -13,6 +13,8 @@ package org.openmetadata.service.resources.services.messaging; +import static org.openmetadata.service.Entity.FIELD_OWNER; + import io.swagger.v3.oas.annotations.ExternalDocumentation; import io.swagger.v3.oas.annotations.Operation; import io.swagger.v3.oas.annotations.Parameter; @@ -55,6 +57,7 @@ import org.openmetadata.schema.type.Include; import org.openmetadata.schema.type.MessagingConnection; import org.openmetadata.schema.type.MetadataOperation; +import org.openmetadata.service.Entity; import org.openmetadata.service.jdbi3.CollectionDAO; import org.openmetadata.service.jdbi3.ListFilter; import org.openmetadata.service.jdbi3.MessagingServiceRepository; @@ -63,6 +66,7 @@ import org.openmetadata.service.security.Authorizer; import org.openmetadata.service.security.policyevaluator.OperationContext; import org.openmetadata.service.util.JsonUtils; +import org.openmetadata.service.util.RestUtil; import org.openmetadata.service.util.ResultList; @Path("/v1/services/messagingServices") @@ -73,7 +77,14 @@ public class MessagingServiceResource extends ServiceEntityResource { public static final String COLLECTION_PATH = "v1/services/messagingServices/"; - public static final String FIELDS = "owner,domain"; + public static final String FIELDS = FIELD_OWNER; + + @Override + public MessagingService addHref(UriInfo uriInfo, MessagingService service) { + service.setHref(RestUtil.getHref(uriInfo, COLLECTION_PATH, service.getId())); + Entity.withHref(uriInfo, service.getOwner()); + return service; + } public MessagingServiceResource(CollectionDAO dao, Authorizer authorizer) { super(MessagingService.class, new MessagingServiceRepository(dao), authorizer, ServiceType.MESSAGING); diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/resources/services/metadata/MetadataServiceResource.java b/openmetadata-service/src/main/java/org/openmetadata/service/resources/services/metadata/MetadataServiceResource.java index a007220d07db..b8a265049b09 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/resources/services/metadata/MetadataServiceResource.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/resources/services/metadata/MetadataServiceResource.java @@ -116,8 +116,9 @@ private void registerMetadataServices(OpenMetadataApplicationConfig config) thro @Override public MetadataService addHref(UriInfo uriInfo, MetadataService service) { - super.addHref(uriInfo, service); + service.setHref(RestUtil.getHref(uriInfo, COLLECTION_PATH, service.getId())); Entity.withHref(uriInfo, service.getOwner()); + Entity.withHref(uriInfo, service.getPipelines()); return service; } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/resources/services/mlmodel/MlModelServiceResource.java b/openmetadata-service/src/main/java/org/openmetadata/service/resources/services/mlmodel/MlModelServiceResource.java index b96775389136..1c3b3bc649e3 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/resources/services/mlmodel/MlModelServiceResource.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/resources/services/mlmodel/MlModelServiceResource.java @@ -66,6 +66,7 @@ import org.openmetadata.service.security.Authorizer; import org.openmetadata.service.security.policyevaluator.OperationContext; import org.openmetadata.service.util.JsonUtils; +import org.openmetadata.service.util.RestUtil; import org.openmetadata.service.util.ResultList; @Path("/v1/services/mlmodelServices") @@ -76,11 +77,12 @@ public class MlModelServiceResource extends ServiceEntityResource { public static final String COLLECTION_PATH = "v1/services/mlmodelServices/"; - public static final String FIELDS = "pipelines,owner,tags,domain"; + public static final String FIELDS = "pipelines,owner,tags"; @Override public MlModelService addHref(UriInfo uriInfo, MlModelService service) { - super.addHref(uriInfo, service); + service.setHref(RestUtil.getHref(uriInfo, COLLECTION_PATH, service.getId())); + Entity.withHref(uriInfo, service.getOwner()); Entity.withHref(uriInfo, service.getPipelines()); return service; } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/resources/services/pipeline/PipelineServiceResource.java b/openmetadata-service/src/main/java/org/openmetadata/service/resources/services/pipeline/PipelineServiceResource.java index 3e9c78a8ff4a..1bebcde7e7c1 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/resources/services/pipeline/PipelineServiceResource.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/resources/services/pipeline/PipelineServiceResource.java @@ -64,6 +64,7 @@ import org.openmetadata.service.security.Authorizer; import org.openmetadata.service.security.policyevaluator.OperationContext; import org.openmetadata.service.util.JsonUtils; +import org.openmetadata.service.util.RestUtil; import org.openmetadata.service.util.ResultList; @Path("/v1/services/pipelineServices") @@ -74,11 +75,12 @@ public class PipelineServiceResource extends ServiceEntityResource { public static final String COLLECTION_PATH = "v1/services/pipelineServices/"; - static final String FIELDS = "pipelines,owner,domain"; + static final String FIELDS = "pipelines,owner"; @Override public PipelineService addHref(UriInfo uriInfo, PipelineService service) { - super.addHref(uriInfo, service); + service.setHref(RestUtil.getHref(uriInfo, COLLECTION_PATH, service.getId())); + Entity.withHref(uriInfo, service.getOwner()); Entity.withHref(uriInfo, service.getPipelines()); return service; } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/resources/services/searchIndexes/SearchServiceResource.java b/openmetadata-service/src/main/java/org/openmetadata/service/resources/services/searchIndexes/SearchServiceResource.java deleted file mode 100644 index f9e591ade0a8..000000000000 --- a/openmetadata-service/src/main/java/org/openmetadata/service/resources/services/searchIndexes/SearchServiceResource.java +++ /dev/null @@ -1,435 +0,0 @@ -package org.openmetadata.service.resources.services.searchIndexes; - -import io.swagger.v3.oas.annotations.ExternalDocumentation; -import io.swagger.v3.oas.annotations.Operation; -import io.swagger.v3.oas.annotations.Parameter; -import io.swagger.v3.oas.annotations.media.Content; -import io.swagger.v3.oas.annotations.media.ExampleObject; -import io.swagger.v3.oas.annotations.media.Schema; -import io.swagger.v3.oas.annotations.parameters.RequestBody; -import io.swagger.v3.oas.annotations.responses.ApiResponse; -import io.swagger.v3.oas.annotations.tags.Tag; -import java.util.List; -import java.util.UUID; -import java.util.stream.Collectors; -import javax.json.JsonPatch; -import javax.validation.Valid; -import javax.validation.constraints.Max; -import javax.validation.constraints.Min; -import javax.ws.rs.Consumes; -import javax.ws.rs.DELETE; -import javax.ws.rs.DefaultValue; -import javax.ws.rs.GET; -import javax.ws.rs.PATCH; -import javax.ws.rs.POST; -import javax.ws.rs.PUT; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.Produces; -import javax.ws.rs.QueryParam; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; -import javax.ws.rs.core.SecurityContext; -import javax.ws.rs.core.UriInfo; -import lombok.extern.slf4j.Slf4j; -import org.openmetadata.schema.api.data.RestoreEntity; -import org.openmetadata.schema.api.services.CreateSearchService; -import org.openmetadata.schema.entity.services.SearchService; -import org.openmetadata.schema.entity.services.ServiceType; -import org.openmetadata.schema.entity.services.connections.TestConnectionResult; -import org.openmetadata.schema.type.EntityHistory; -import org.openmetadata.schema.type.Include; -import org.openmetadata.schema.type.MetadataOperation; -import org.openmetadata.schema.type.SearchConnection; -import org.openmetadata.schema.utils.EntityInterfaceUtil; -import org.openmetadata.service.Entity; -import org.openmetadata.service.jdbi3.CollectionDAO; -import org.openmetadata.service.jdbi3.ListFilter; -import org.openmetadata.service.jdbi3.SearchServiceRepository; -import org.openmetadata.service.resources.Collection; -import org.openmetadata.service.resources.services.ServiceEntityResource; -import org.openmetadata.service.security.Authorizer; -import org.openmetadata.service.security.policyevaluator.OperationContext; -import org.openmetadata.service.util.EntityUtil; -import org.openmetadata.service.util.JsonUtils; -import org.openmetadata.service.util.RestUtil; -import org.openmetadata.service.util.ResultList; - -@Slf4j -@Path("/v1/services/searchServices") -@Tag( - name = "Search Services", - description = "APIs related `Search Service` entities, such as ElasticSearch, OpenSearch.") -@Produces(MediaType.APPLICATION_JSON) -@Consumes(MediaType.APPLICATION_JSON) -@Collection(name = "searchServices") -public class SearchServiceResource - extends ServiceEntityResource { - public static final String COLLECTION_PATH = "v1/services/searchServices/"; - static final String FIELDS = "pipelines,owner,tags,domain"; - - @Override - public SearchService addHref(UriInfo uriInfo, SearchService service) { - super.addHref(uriInfo, service); - Entity.withHref(uriInfo, service.getPipelines()); - return service; - } - - public SearchServiceResource(CollectionDAO dao, Authorizer authorizer) { - super(SearchService.class, new SearchServiceRepository(dao), authorizer, ServiceType.SEARCH); - } - - @Override - protected List getEntitySpecificOperations() { - addViewOperation("pipelines", MetadataOperation.VIEW_BASIC); - return null; - } - - public static class SearchServiceList extends ResultList { - /* Required for serde */ - } - - @GET - @Operation( - operationId = "listSearchServices", - summary = "List search services", - description = "Get a list of search services.", - responses = { - @ApiResponse( - responseCode = "200", - description = "List of search service instances", - content = - @Content( - mediaType = "application/json", - schema = @Schema(implementation = SearchServiceResource.SearchServiceList.class))) - }) - public ResultList list( - @Context UriInfo uriInfo, - @Context SecurityContext securityContext, - @Parameter( - description = "Fields requested in the returned resource", - schema = @Schema(type = "string", example = FIELDS)) - @QueryParam("fields") - String fieldsParam, - @DefaultValue("10") @Min(0) @Max(1000000) @QueryParam("limit") int limitParam, - @Parameter(description = "Returns list of search services before this cursor", schema = @Schema(type = "string")) - @QueryParam("before") - String before, - @Parameter(description = "Returns list of search services after this cursor", schema = @Schema(type = "string")) - @QueryParam("after") - String after, - @Parameter( - description = "Include all, deleted, or non-deleted entities.", - schema = @Schema(implementation = Include.class)) - @QueryParam("include") - @DefaultValue("non-deleted") - Include include) { - RestUtil.validateCursors(before, after); - EntityUtil.Fields fields = getFields(fieldsParam); - ResultList searchServices; - - ListFilter filter = new ListFilter(include); - if (before != null) { - searchServices = repository.listBefore(uriInfo, fields, filter, limitParam, before); - } else { - searchServices = repository.listAfter(uriInfo, fields, filter, limitParam, after); - } - return addHref(uriInfo, decryptOrNullify(securityContext, searchServices)); - } - - @GET - @Path("/{id}") - @Operation( - operationId = "getSearchServiceByID", - summary = "Get an search service", - description = "Get an search service by `id`.", - responses = { - @ApiResponse( - responseCode = "200", - description = "search service instance", - content = @Content(mediaType = "application/json", schema = @Schema(implementation = SearchService.class))), - @ApiResponse(responseCode = "404", description = "search service for instance {id} is not found") - }) - public SearchService get( - @Context UriInfo uriInfo, - @Context SecurityContext securityContext, - @PathParam("id") UUID id, - @Parameter( - description = "Fields requested in the returned resource", - schema = @Schema(type = "string", example = FIELDS)) - @QueryParam("fields") - String fieldsParam, - @Parameter( - description = "Include all, deleted, or non-deleted entities.", - schema = @Schema(implementation = Include.class)) - @QueryParam("include") - @DefaultValue("non-deleted") - Include include) { - SearchService searchService = getInternal(uriInfo, securityContext, id, fieldsParam, include); - return decryptOrNullify(securityContext, searchService); - } - - @GET - @Path("/name/{name}") - @Operation( - operationId = "getSearchServiceByFQN", - summary = "Get search service by name", - description = "Get a search service by the service `name`.", - responses = { - @ApiResponse( - responseCode = "200", - description = "search service instance", - content = @Content(mediaType = "application/json", schema = @Schema(implementation = SearchService.class))), - @ApiResponse(responseCode = "404", description = "search service for instance {id} is not found") - }) - public SearchService getByName( - @Context UriInfo uriInfo, - @Context SecurityContext securityContext, - @PathParam("name") String name, - @Parameter( - description = "Fields requested in the returned resource", - schema = @Schema(type = "string", example = FIELDS)) - @QueryParam("fields") - String fieldsParam, - @Parameter( - description = "Include all, deleted, or non-deleted entities.", - schema = @Schema(implementation = Include.class)) - @QueryParam("include") - @DefaultValue("non-deleted") - Include include) { - SearchService searchService = - getByNameInternal(uriInfo, securityContext, EntityInterfaceUtil.quoteName(name), fieldsParam, include); - return decryptOrNullify(securityContext, searchService); - } - - @PUT - @Path("/{id}/testConnectionResult") - @Operation( - operationId = "addTestConnectionResult", - summary = "Add test connection result", - description = "Add test connection result to the service.", - responses = { - @ApiResponse( - responseCode = "200", - description = "Successfully updated the service", - content = @Content(mediaType = "application/json", schema = @Schema(implementation = SearchService.class))) - }) - public SearchService addTestConnectionResult( - @Context UriInfo uriInfo, - @Context SecurityContext securityContext, - @Parameter(description = "Id of the service", schema = @Schema(type = "UUID")) @PathParam("id") UUID id, - @Valid TestConnectionResult testConnectionResult) { - OperationContext operationContext = new OperationContext(entityType, MetadataOperation.CREATE); - authorizer.authorize(securityContext, operationContext, getResourceContextById(id)); - SearchService service = repository.addTestConnectionResult(id, testConnectionResult); - return decryptOrNullify(securityContext, service); - } - - @GET - @Path("/{id}/versions") - @Operation( - operationId = "listAllSearchServiceVersion", - summary = "List search service versions", - description = "Get a list of all the versions of an search service identified by `id`", - responses = { - @ApiResponse( - responseCode = "200", - description = "List of search service versions", - content = @Content(mediaType = "application/json", schema = @Schema(implementation = EntityHistory.class))) - }) - public EntityHistory listVersions( - @Context UriInfo uriInfo, - @Context SecurityContext securityContext, - @Parameter(description = "search service Id", schema = @Schema(type = "string")) @PathParam("id") UUID id) { - EntityHistory entityHistory = super.listVersionsInternal(securityContext, id); - - List versions = - entityHistory.getVersions().stream() - .map( - json -> { - try { - SearchService searchService = JsonUtils.readValue((String) json, SearchService.class); - return JsonUtils.pojoToJson(decryptOrNullify(securityContext, searchService)); - } catch (Exception e) { - return json; - } - }) - .collect(Collectors.toList()); - entityHistory.setVersions(versions); - return entityHistory; - } - - @GET - @Path("/{id}/versions/{version}") - @Operation( - operationId = "getSpecificSearchServiceVersion", - summary = "Get a version of the search service", - description = "Get a version of the search service by given `id`", - responses = { - @ApiResponse( - responseCode = "200", - description = "search service", - content = @Content(mediaType = "application/json", schema = @Schema(implementation = SearchService.class))), - @ApiResponse( - responseCode = "404", - description = "Object store service for instance {id} and version " + "{version} is not found") - }) - public SearchService getVersion( - @Context UriInfo uriInfo, - @Context SecurityContext securityContext, - @Parameter(description = "search service Id", schema = @Schema(type = "string")) @PathParam("id") UUID id, - @Parameter( - description = "search service version number in the form `major`" + ".`minor`", - schema = @Schema(type = "string", example = "0.1 or 1.1")) - @PathParam("version") - String version) { - SearchService searchService = super.getVersionInternal(securityContext, id, version); - return decryptOrNullify(securityContext, searchService); - } - - @POST - @Operation( - operationId = "createSearchService", - summary = "Create search service", - description = "Create a new search service.", - responses = { - @ApiResponse( - responseCode = "200", - description = "Search service instance", - content = @Content(mediaType = "application/json", schema = @Schema(implementation = SearchService.class))), - @ApiResponse(responseCode = "400", description = "Bad request") - }) - public Response create( - @Context UriInfo uriInfo, @Context SecurityContext securityContext, @Valid CreateSearchService create) { - SearchService service = getService(create, securityContext.getUserPrincipal().getName()); - Response response = create(uriInfo, securityContext, service); - decryptOrNullify(securityContext, (SearchService) response.getEntity()); - return response; - } - - @PUT - @Operation( - operationId = "createOrUpdateSearchService", - summary = "Update search service", - description = "Update an existing or create a new search service.", - responses = { - @ApiResponse( - responseCode = "200", - description = "Object store service instance", - content = @Content(mediaType = "application/json", schema = @Schema(implementation = SearchService.class))), - @ApiResponse(responseCode = "400", description = "Bad request") - }) - public Response createOrUpdate( - @Context UriInfo uriInfo, @Context SecurityContext securityContext, @Valid CreateSearchService update) { - SearchService service = getService(update, securityContext.getUserPrincipal().getName()); - Response response = createOrUpdate(uriInfo, securityContext, unmask(service)); - decryptOrNullify(securityContext, (SearchService) response.getEntity()); - return response; - } - - @PATCH - @Path("/{id}") - @Operation( - operationId = "patchSearchService", - summary = "Update an search service", - description = "Update an existing search service using JsonPatch.", - externalDocs = @ExternalDocumentation(description = "JsonPatch RFC", url = "https://tools.ietf.org/html/rfc6902")) - @Consumes(MediaType.APPLICATION_JSON_PATCH_JSON) - public Response patch( - @Context UriInfo uriInfo, - @Context SecurityContext securityContext, - @PathParam("id") UUID id, - @RequestBody( - description = "JsonPatch with array of operations", - content = - @Content( - mediaType = MediaType.APPLICATION_JSON_PATCH_JSON, - examples = { - @ExampleObject("[" + "{op:remove, path:/a}," + "{op:add, path: /b, value: val}" + "]") - })) - JsonPatch patch) { - return patchInternal(uriInfo, securityContext, id, patch); - } - - @DELETE - @Path("/{id}") - @Operation( - operationId = "deleteSearchService", - summary = "Delete an search service", - description = "Delete an search services. If containers belong the service, it can't be " + "deleted.", - responses = { - @ApiResponse(responseCode = "200", description = "OK"), - @ApiResponse(responseCode = "404", description = "SearchService service for instance {id} " + "is not found") - }) - public Response delete( - @Context UriInfo uriInfo, - @Context SecurityContext securityContext, - @Parameter(description = "Recursively delete this entity and it's children. (Default `false`)") - @DefaultValue("false") - @QueryParam("recursive") - boolean recursive, - @Parameter(description = "Hard delete the entity. (Default = `false`)") - @QueryParam("hardDelete") - @DefaultValue("false") - boolean hardDelete, - @Parameter(description = "Id of the search service", schema = @Schema(type = "string")) @PathParam("id") - UUID id) { - return delete(uriInfo, securityContext, id, recursive, hardDelete); - } - - @DELETE - @Path("/name/{fqn}") - @Operation( - operationId = "deleteSearchServiceByFQN", - summary = "Delete an SearchService by fully qualified name", - description = "Delete an SearchService by `fullyQualifiedName`.", - responses = { - @ApiResponse(responseCode = "200", description = "OK"), - @ApiResponse(responseCode = "404", description = "SearchService for instance {fqn} is not found") - }) - public Response delete( - @Context UriInfo uriInfo, - @Context SecurityContext securityContext, - @Parameter(description = "Hard delete the entity. (Default = `false`)") - @QueryParam("hardDelete") - @DefaultValue("false") - boolean hardDelete, - @Parameter(description = "Name of the SearchService", schema = @Schema(type = "string")) @PathParam("fqn") - String fqn) { - return deleteByName(uriInfo, securityContext, EntityInterfaceUtil.quoteName(fqn), false, hardDelete); - } - - @PUT - @Path("/restore") - @Operation( - operationId = "restore", - summary = "Restore a soft deleted SearchService.", - description = "Restore a soft deleted SearchService.", - responses = { - @ApiResponse( - responseCode = "200", - description = "Successfully restored the SearchService.", - content = @Content(mediaType = "application/json", schema = @Schema(implementation = SearchService.class))) - }) - public Response restoreSearchService( - @Context UriInfo uriInfo, @Context SecurityContext securityContext, @Valid RestoreEntity restore) { - return restoreEntity(uriInfo, securityContext, restore.getId()); - } - - private SearchService getService(CreateSearchService create, String user) { - return copy(new SearchService(), create, user) - .withServiceType(create.getServiceType()) - .withConnection(create.getConnection()); - } - - @Override - protected SearchService nullifyConnection(SearchService service) { - return service.withConnection(null); - } - - @Override - protected String extractServiceType(SearchService service) { - return service.getServiceType().value(); - } -} diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/resources/services/storage/StorageServiceResource.java b/openmetadata-service/src/main/java/org/openmetadata/service/resources/services/storage/StorageServiceResource.java index 458f2496eeca..41d232d22a5a 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/resources/services/storage/StorageServiceResource.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/resources/services/storage/StorageServiceResource.java @@ -67,12 +67,13 @@ public class StorageServiceResource extends ServiceEntityResource { public static final String COLLECTION_PATH = "v1/services/storageServices/"; - static final String FIELDS = "pipelines,owner,tags,domain"; + static final String FIELDS = "pipelines,owner,tags"; @Override public StorageService addHref(UriInfo uriInfo, StorageService service) { - super.addHref(uriInfo, service); + service.setHref(RestUtil.getHref(uriInfo, COLLECTION_PATH, service.getId())); Entity.withHref(uriInfo, service.getOwner()); + Entity.withHref(uriInfo, service.getPipelines()); return service; } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/resources/storages/ContainerResource.java b/openmetadata-service/src/main/java/org/openmetadata/service/resources/storages/ContainerResource.java index eb9df698fda2..e13c290d85bb 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/resources/storages/ContainerResource.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/resources/storages/ContainerResource.java @@ -45,6 +45,7 @@ import org.openmetadata.service.resources.Collection; import org.openmetadata.service.resources.EntityResource; import org.openmetadata.service.security.Authorizer; +import org.openmetadata.service.util.RestUtil; import org.openmetadata.service.util.ResultList; @Path("/v1/containers") @@ -59,13 +60,16 @@ @Collection(name = "containers") public class ContainerResource extends EntityResource { public static final String COLLECTION_PATH = "v1/containers/"; - static final String FIELDS = "parent,children,dataModel,owner,tags,followers,extension,domain"; + static final String FIELDS = "parent,children,dataModel,owner,tags,followers,extension"; @Override public Container addHref(UriInfo uriInfo, Container container) { - super.addHref(uriInfo, container); + container.setHref(RestUtil.getHref(uriInfo, COLLECTION_PATH, container.getId())); + Entity.withHref(uriInfo, container.getOwner()); Entity.withHref(uriInfo, container.getService()); Entity.withHref(uriInfo, container.getParent()); + Entity.withHref(uriInfo, container.getChildren()); + Entity.withHref(uriInfo, container.getFollowers()); return container; } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/resources/tags/ClassificationResource.java b/openmetadata-service/src/main/java/org/openmetadata/service/resources/tags/ClassificationResource.java index f684daa27e6f..7848e8b414e6 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/resources/tags/ClassificationResource.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/resources/tags/ClassificationResource.java @@ -366,6 +366,11 @@ public Response restore( return restoreEntity(uriInfo, securityContext, restore.getId()); } + @Override + public Classification addHref(UriInfo uriInfo, Classification category) { + return category; + } + public static Classification getClassification(CreateClassification create, SecurityContext securityContext) { return getClassification(create, securityContext.getUserPrincipal().getName()); } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/resources/tags/TagResource.java b/openmetadata-service/src/main/java/org/openmetadata/service/resources/tags/TagResource.java index 610232d70f01..6f5f4a028704 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/resources/tags/TagResource.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/resources/tags/TagResource.java @@ -487,9 +487,9 @@ public Response restore( @Override public Tag addHref(UriInfo uriInfo, Tag tag) { - super.addHref(uriInfo, tag); Entity.withHref(uriInfo, tag.getClassification()); Entity.withHref(uriInfo, tag.getParent()); + Entity.withHref(uriInfo, tag.getChildren()); return tag; } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/resources/teams/RoleResource.java b/openmetadata-service/src/main/java/org/openmetadata/service/resources/teams/RoleResource.java index 33bc9907e17e..c6fa10c94269 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/resources/teams/RoleResource.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/resources/teams/RoleResource.java @@ -83,7 +83,6 @@ public class RoleResource extends EntityResource { @Override public Role addHref(UriInfo uriInfo, Role role) { - super.addHref(uriInfo, role); Entity.withHref(uriInfo, role.getPolicies()); Entity.withHref(uriInfo, role.getTeams()); Entity.withHref(uriInfo, role.getUsers()); diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/resources/teams/TeamResource.java b/openmetadata-service/src/main/java/org/openmetadata/service/resources/teams/TeamResource.java index 9b845d8edbfb..f857cb260c20 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/resources/teams/TeamResource.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/resources/teams/TeamResource.java @@ -85,15 +85,16 @@ public class TeamResource extends EntityResource { public static final String COLLECTION_PATH = "/v1/teams/"; static final String FIELDS = - "owner,profile,users,owns,defaultRoles,parents,children,policies,userCount,childrenCount,domain"; + "owner,profile,users,owns,defaultRoles,parents,children,policies,userCount,childrenCount"; @Override public Team addHref(UriInfo uriInfo, Team team) { - super.addHref(uriInfo, team); + Entity.withHref(uriInfo, team.getOwner()); Entity.withHref(uriInfo, team.getUsers()); Entity.withHref(uriInfo, team.getDefaultRoles()); Entity.withHref(uriInfo, team.getOwns()); Entity.withHref(uriInfo, team.getParents()); + Entity.withHref(uriInfo, team.getChildren()); Entity.withHref(uriInfo, team.getPolicies()); return team; } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/resources/teams/UserResource.java b/openmetadata-service/src/main/java/org/openmetadata/service/resources/teams/UserResource.java index 2eea4619ef68..a240c23e0488 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/resources/teams/UserResource.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/resources/teams/UserResource.java @@ -159,11 +159,10 @@ public class UserResource extends EntityResource { private boolean isEmailServiceEnabled; private AuthenticationConfiguration authenticationConfiguration; private final AuthenticatorHandler authHandler; - static final String FIELDS = "profile,roles,teams,follows,owns,domain"; + static final String FIELDS = "profile,roles,teams,follows,owns"; @Override public User addHref(UriInfo uriInfo, User user) { - super.addHref(uriInfo, user); Entity.withHref(uriInfo, user.getTeams()); Entity.withHref(uriInfo, user.getRoles()); Entity.withHref(uriInfo, user.getInheritedRoles()); @@ -948,8 +947,8 @@ public Response changeUserPassword( @Path("/checkEmailInUse") @Operation( operationId = "checkEmailInUse", - summary = "Check if a mail is already in use", - description = "Check if a mail is already in use", + summary = "Check if a email is already in use", + description = "Check if a email is already in use", responses = { @ApiResponse( responseCode = "200", diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/resources/topics/TopicResource.java b/openmetadata-service/src/main/java/org/openmetadata/service/resources/topics/TopicResource.java index dfdc0111dc2a..53e440a04194 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/resources/topics/TopicResource.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/resources/topics/TopicResource.java @@ -76,12 +76,13 @@ @Collection(name = "topics") public class TopicResource extends EntityResource { public static final String COLLECTION_PATH = "v1/topics/"; - static final String FIELDS = "owner,followers,tags,extension,domain,dataProducts"; + static final String FIELDS = "owner,followers,tags,extension"; @Override public Topic addHref(UriInfo uriInfo, Topic topic) { - super.addHref(uriInfo, topic); + Entity.withHref(uriInfo, topic.getOwner()); Entity.withHref(uriInfo, topic.getService()); + Entity.withHref(uriInfo, topic.getFollowers()); return topic; } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/search/elasticSearch/ElasticSearchClientImpl.java b/openmetadata-service/src/main/java/org/openmetadata/service/search/elasticSearch/ElasticSearchClientImpl.java index d556e7168ee7..425f11513ef0 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/search/elasticSearch/ElasticSearchClientImpl.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/search/elasticSearch/ElasticSearchClientImpl.java @@ -1691,7 +1691,7 @@ private static AggregationBuilder buildQueryAggregation( DateHistogramAggregationBuilder dateHistogramAggregationBuilder = AggregationBuilders.dateHistogram(DataInsightChartRepository.TIMESTAMP) .field(DataInsightChartRepository.TIMESTAMP) - .calendarInterval(DateHistogramInterval.minutes(1)); + .calendarInterval(DateHistogramInterval.DAY); TermsAggregationBuilder termsAggregationBuilder; SumAggregationBuilder sumAggregationBuilder; diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/security/JwtFilter.java b/openmetadata-service/src/main/java/org/openmetadata/service/security/JwtFilter.java index e1bb3c288d01..27606503c19b 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/security/JwtFilter.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/security/JwtFilter.java @@ -120,7 +120,7 @@ public void filter(ContainerRequestContext requestContext) { LOG.debug("Token from header:{}", tokenFromHeader); // the case where OMD generated the Token for the Client - if (AuthProvider.BASIC.equals(providerType) || AuthProvider.SAML.toString().equals(providerType)) { + if (AuthProvider.BASIC.equals(providerType) || AuthProvider.SAML.equals(providerType)) { validateTokenIsNotUsedAfterLogout(tokenFromHeader); } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/security/auth/LoginAttemptCache.java b/openmetadata-service/src/main/java/org/openmetadata/service/security/auth/LoginAttemptCache.java index f5e3b88e42b5..f6cf7b1280c6 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/security/auth/LoginAttemptCache.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/security/auth/LoginAttemptCache.java @@ -6,6 +6,7 @@ import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import lombok.NonNull; +import org.jetbrains.annotations.NotNull; import org.openmetadata.schema.api.configuration.LoginConfiguration; import org.openmetadata.service.OpenMetadataApplicationConfig; @@ -26,7 +27,7 @@ public LoginAttemptCache(OpenMetadataApplicationConfig config) { .expireAfterWrite(accessBlockTime, TimeUnit.SECONDS) .build( new CacheLoader<>() { - public Integer load(@NonNull String key) { + public @NotNull Integer load(@NonNull String username) { return 0; } }); @@ -40,38 +41,38 @@ public LoginAttemptCache(int maxAttempt, int blockTimeInSec) { .expireAfterWrite(blockTimeInSec, TimeUnit.SECONDS) .build( new CacheLoader<>() { - public Integer load(@NonNull String key) { + public @NotNull Integer load(@NonNull String username) { return 0; } }); } - public void recordSuccessfulLogin(String key) { - attemptsCache.invalidate(key); + public void recordSuccessfulLogin(String username) { + attemptsCache.invalidate(username.toLowerCase()); } - public void recordFailedLogin(String key) { + public void recordFailedLogin(String username) { int attempts; try { - attempts = attemptsCache.get(key); + attempts = attemptsCache.get(username.toLowerCase()); } catch (ExecutionException e) { attempts = 0; } attempts++; - attemptsCache.put(key, attempts); + attemptsCache.put(username, attempts); } - public boolean isLoginBlocked(String key) { + public boolean isLoginBlocked(String username) { try { - return attemptsCache.get(key) >= maxAttempt; + return attemptsCache.get(username.toLowerCase()) >= maxAttempt; } catch (ExecutionException e) { return false; } } - public int getUserFailedLoginCount(String key) { + public int getUserFailedLoginCount(String username) { try { - return attemptsCache.get(key); + return attemptsCache.get(username.toLowerCase()); } catch (ExecutionException e) { return -1; } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/security/mask/PIIMasker.java b/openmetadata-service/src/main/java/org/openmetadata/service/security/mask/PIIMasker.java index b2a9c9a70049..59f15d7e34be 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/security/mask/PIIMasker.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/security/mask/PIIMasker.java @@ -13,7 +13,6 @@ import java.util.stream.IntStream; import javax.ws.rs.core.SecurityContext; import org.openmetadata.schema.entity.data.Query; -import org.openmetadata.schema.entity.data.SearchIndex; import org.openmetadata.schema.entity.data.Table; import org.openmetadata.schema.entity.data.Topic; import org.openmetadata.schema.tests.TestCase; @@ -22,7 +21,6 @@ import org.openmetadata.schema.type.Include; import org.openmetadata.schema.type.TableData; import org.openmetadata.schema.type.TagLabel; -import org.openmetadata.schema.type.searchindex.SearchIndexSampleData; import org.openmetadata.schema.type.topic.TopicSampleData; import org.openmetadata.service.Entity; import org.openmetadata.service.jdbi3.ColumnUtil; @@ -101,22 +99,6 @@ public static Topic getSampleData(Topic topic) { return topic; } - public static SearchIndex getSampleData(SearchIndex searchIndex) { - SearchIndexSampleData sampleData = searchIndex.getSampleData(); - - // If we don't have sample data, there's nothing to do - if (sampleData == null) { - return searchIndex; - } - - if (hasPiiSensitiveTag(searchIndex)) { - sampleData.setMessages(List.of(MASKED_VALUE)); - searchIndex.setSampleData(sampleData); - } - - return searchIndex; - } - public static Table getTableProfile(Table table) { for (Column column : table.getColumns()) { if (hasPiiSensitiveTag(column)) { @@ -208,10 +190,6 @@ private static boolean hasPiiSensitiveTag(Table table) { return table.getTags().stream().map(TagLabel::getTagFQN).anyMatch(SENSITIVE_PII_TAG::equals); } - private static boolean hasPiiSensitiveTag(SearchIndex searchIndex) { - return searchIndex.getTags().stream().map(TagLabel::getTagFQN).anyMatch(SENSITIVE_PII_TAG::equals); - } - /* Check if the Topic is flagged as PII or any of its fields */ diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/security/policyevaluator/ResourceContext.java b/openmetadata-service/src/main/java/org/openmetadata/service/security/policyevaluator/ResourceContext.java index d3d63920b934..904624ef9d83 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/security/policyevaluator/ResourceContext.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/security/policyevaluator/ResourceContext.java @@ -83,9 +83,6 @@ private EntityInterface resolveEntity() { if (entityRepository.isSupportsTags()) { fields = EntityUtil.addField(fields, Entity.FIELD_TAGS); } - if (entityRepository.isSupportsDomain()) { - fields = EntityUtil.addField(fields, Entity.FIELD_DOMAIN); - } Fields fieldList = entityRepository.getFields(fields); try { if (id != null) { diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/util/EntityUtil.java b/openmetadata-service/src/main/java/org/openmetadata/service/util/EntityUtil.java index 933c48285153..ddfc35a26420 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/util/EntityUtil.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/util/EntityUtil.java @@ -41,7 +41,6 @@ import org.openmetadata.schema.api.data.TermReference; import org.openmetadata.schema.entity.classification.Tag; import org.openmetadata.schema.entity.data.GlossaryTerm; -import org.openmetadata.schema.entity.data.SearchIndex; import org.openmetadata.schema.entity.data.Table; import org.openmetadata.schema.entity.data.Topic; import org.openmetadata.schema.entity.policies.accessControl.Rule; @@ -127,10 +126,6 @@ public final class EntityUtil { (field1, field2) -> field1.getName().equalsIgnoreCase(field2.getName()) && field1.getDataType() == field2.getDataType(); - public static final BiPredicate searchIndexFieldMatch = - (field1, field2) -> - field1.getName().equalsIgnoreCase(field2.getName()) && field1.getDataType() == field2.getDataType(); - private EntityUtil() {} /** Validate that JSON payload can be turned into POJO object */ @@ -347,15 +342,6 @@ public static String getSchemaField(Topic topic, Field field, String fieldName) ? FullyQualifiedName.build("schemaFields", localFieldName) : FullyQualifiedName.build("schemaFields", localFieldName, fieldName); } - /** Return searchIndex field name of format "fields".fieldName.fieldName */ - public static String getSearchIndexField(SearchIndex searchIndex, SearchIndexField field, String fieldName) { - // Remove topic FQN from schemaField FQN to get the local name - String localFieldName = - EntityUtil.getLocalColumnName(searchIndex.getFullyQualifiedName(), field.getFullyQualifiedName()); - return fieldName == null - ? FullyQualifiedName.build("fields", localFieldName) - : FullyQualifiedName.build("fields", localFieldName, fieldName); - } /** Return rule field name of format "rules".ruleName.ruleFieldName */ public static String getRuleField(Rule rule, String ruleField) { diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/workflows/searchIndex/PaginatedDataInsightSource.java b/openmetadata-service/src/main/java/org/openmetadata/service/workflows/searchIndex/PaginatedDataInsightSource.java index 1777518f50a2..8a000ab3a379 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/workflows/searchIndex/PaginatedDataInsightSource.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/workflows/searchIndex/PaginatedDataInsightSource.java @@ -42,7 +42,7 @@ public PaginatedDataInsightSource(CollectionDAO dao, String entityType, int batc this.dao = dao; this.entityType = entityType; this.batchSize = batchSize; - stats.setTotalRecords(dao.entityExtensionTimeSeriesDao().listCount(entityType)); + stats.setTotalRecords(dao.reportDataTimeSeriesDao().listCount(entityType)); } @Override @@ -93,9 +93,9 @@ private ResultList read(String afterCursor) throws SourceException { public ResultList getReportDataPagination(String entityFQN, int limit, String after) { // workaround. Should be fixed in https://github.com/open-metadata/OpenMetadata/issues/12298 String upperCaseFQN = StringUtils.capitalize(entityFQN); - int reportDataCount = dao.entityExtensionTimeSeriesDao().listCount(upperCaseFQN); + int reportDataCount = dao.reportDataTimeSeriesDao().listCount(upperCaseFQN); List reportDataList = - dao.entityExtensionTimeSeriesDao() + dao.reportDataTimeSeriesDao() .getAfterExtension(upperCaseFQN, limit + 1, after == null ? "0" : RestUtil.decodeCursor(after)); return getAfterExtensionList(reportDataList, after, limit, reportDataCount); } diff --git a/openmetadata-service/src/test/java/org/openmetadata/service/EnumBackwardCompatibilityTest.java b/openmetadata-service/src/test/java/org/openmetadata/service/EnumBackwardCompatibilityTest.java index a5124bc73738..eb79ba7c685b 100644 --- a/openmetadata-service/src/test/java/org/openmetadata/service/EnumBackwardCompatibilityTest.java +++ b/openmetadata-service/src/test/java/org/openmetadata/service/EnumBackwardCompatibilityTest.java @@ -34,9 +34,7 @@ class EnumBackwardCompatibilityTest { /** */ @Test void testRelationshipEnumBackwardCompatible() { - assertEquals(20, Relationship.values().length); - assertEquals(19, Relationship.EXPERT.ordinal()); - assertEquals(18, Relationship.VOTED.ordinal()); + assertEquals(19, Relationship.values().length); assertEquals(17, Relationship.REACTED_TO.ordinal()); assertEquals(16, Relationship.REVIEWS.ordinal()); } diff --git a/openmetadata-service/src/test/java/org/openmetadata/service/resources/EntityResourceTest.java b/openmetadata-service/src/test/java/org/openmetadata/service/resources/EntityResourceTest.java index 7d35b7af719e..aa1f87ca9651 100644 --- a/openmetadata-service/src/test/java/org/openmetadata/service/resources/EntityResourceTest.java +++ b/openmetadata-service/src/test/java/org/openmetadata/service/resources/EntityResourceTest.java @@ -26,13 +26,11 @@ import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.openmetadata.common.utils.CommonUtil.listOf; import static org.openmetadata.common.utils.CommonUtil.listOrEmpty; import static org.openmetadata.common.utils.CommonUtil.nullOrEmpty; import static org.openmetadata.csv.EntityCsvTest.assertSummary; import static org.openmetadata.schema.type.MetadataOperation.EDIT_ALL; import static org.openmetadata.schema.type.MetadataOperation.EDIT_TESTS; -import static org.openmetadata.schema.type.TaskType.RequestDescription; import static org.openmetadata.service.Entity.ADMIN_USER_NAME; import static org.openmetadata.service.Entity.FIELD_DELETED; import static org.openmetadata.service.Entity.FIELD_EXTENSION; @@ -69,6 +67,7 @@ import static org.openmetadata.service.util.TestUtils.validateEntityReference; import static org.openmetadata.service.util.TestUtils.validateEntityReferences; +import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ObjectNode; @@ -130,7 +129,6 @@ import org.openmetadata.schema.EntityInterface; import org.openmetadata.schema.api.data.RestoreEntity; import org.openmetadata.schema.api.data.TermReference; -import org.openmetadata.schema.api.feed.CreateThread; import org.openmetadata.schema.api.teams.CreateTeam; import org.openmetadata.schema.api.teams.CreateTeam.TeamType; import org.openmetadata.schema.api.tests.CreateTestSuite; @@ -144,9 +142,6 @@ import org.openmetadata.schema.entity.data.Glossary; import org.openmetadata.schema.entity.data.GlossaryTerm; import org.openmetadata.schema.entity.data.Table; -import org.openmetadata.schema.entity.domains.DataProduct; -import org.openmetadata.schema.entity.domains.Domain; -import org.openmetadata.schema.entity.feed.Thread; import org.openmetadata.schema.entity.policies.Policy; import org.openmetadata.schema.entity.policies.accessControl.Rule; import org.openmetadata.schema.entity.services.connections.TestConnectionResult; @@ -159,7 +154,6 @@ import org.openmetadata.schema.entity.type.CustomProperty; import org.openmetadata.schema.tests.TestDefinition; import org.openmetadata.schema.tests.TestSuite; -import org.openmetadata.schema.type.AnnouncementDetails; import org.openmetadata.schema.type.ChangeDescription; import org.openmetadata.schema.type.ChangeEvent; import org.openmetadata.schema.type.Column; @@ -178,14 +172,11 @@ import org.openmetadata.service.exception.CatalogExceptionMessage; import org.openmetadata.service.resources.bots.BotResourceTest; import org.openmetadata.service.resources.databases.TableResourceTest; -import org.openmetadata.service.resources.domains.DataProductResourceTest; -import org.openmetadata.service.resources.domains.DomainResourceTest; import org.openmetadata.service.resources.dqtests.TestCaseResourceTest; import org.openmetadata.service.resources.dqtests.TestDefinitionResourceTest; import org.openmetadata.service.resources.dqtests.TestSuiteResourceTest; import org.openmetadata.service.resources.events.EventResource.EventList; import org.openmetadata.service.resources.events.EventSubscriptionResourceTest; -import org.openmetadata.service.resources.feeds.FeedResourceTest; import org.openmetadata.service.resources.glossary.GlossaryResourceTest; import org.openmetadata.service.resources.kpi.KpiResourceTest; import org.openmetadata.service.resources.metadata.TypeResourceTest; @@ -197,7 +188,6 @@ import org.openmetadata.service.resources.services.MetadataServiceResourceTest; import org.openmetadata.service.resources.services.MlModelServiceResourceTest; import org.openmetadata.service.resources.services.PipelineServiceResourceTest; -import org.openmetadata.service.resources.services.SearchServiceResourceTest; import org.openmetadata.service.resources.services.StorageServiceResourceTest; import org.openmetadata.service.resources.tags.TagResourceTest; import org.openmetadata.service.resources.teams.RoleResourceTest; @@ -227,9 +217,9 @@ public abstract class EntityResourceTest allowedFields = Entity.getEntityFields(entityClass); - this.supportsEmptyDescription = !EntityUtil.isDescriptionRequired(entityClass); this.supportsFollowers = allowedFields.contains(FIELD_FOLLOWERS); this.supportsOwner = allowedFields.contains(FIELD_OWNER); this.supportsTags = allowedFields.contains(FIELD_TAGS); this.supportsSoftDelete = allowedFields.contains(FIELD_DELETED); this.supportsCustomExtension = allowedFields.contains(FIELD_EXTENSION); + this.supportsEmptyDescription = !EntityUtil.isDescriptionRequired(entityClass); this.systemEntityName = systemEntityName; } @@ -406,8 +389,6 @@ public void setup(TestInfo test) throws URISyntaxException, IOException { new RoleResourceTest().setupRoles(test); new TeamResourceTest().setupTeams(test); new UserResourceTest().setupUsers(test); - new DomainResourceTest().setupDomains(test); - new DataProductResourceTest().setupDataProducts(test); new TagResourceTest().setupTags(); new GlossaryResourceTest().setupGlossaries(); @@ -418,7 +399,6 @@ public void setup(TestInfo test) throws URISyntaxException, IOException { new DashboardServiceResourceTest().setupDashboardServices(test); new MlModelServiceResourceTest().setupMlModelServices(test); new StorageServiceResourceTest().setupStorageService(test); - new SearchServiceResourceTest().setupSearchService(test); new MetadataServiceResourceTest().setupMetadataServices(); new TableResourceTest().setupDatabaseSchemas(test); new TestSuiteResourceTest().setupTestSuites(test); @@ -474,14 +454,14 @@ public final K createRequest(String name, String description, String displayName public abstract K createRequest(String name); // Get container entity used in createRequest that has CONTAINS relationship to the entity created with this - // request has. For table, it is database. For database, it is databaseService. See Relationship.CONTAINS for + // request has . For table, it is database. For database, it is databaseService. See Relationship.CONTAINS for // details. public EntityReference getContainer() { return null; } // Get container entity based on create request that has CONTAINS relationship to the entity created with this - // request has. For table, it is database. For database, it is databaseService. See Relationship.CONTAINS for + // request has . For table, it is database. For database, it is databaseService. See Relationship.CONTAINS for // details. public EntityReference getContainer(T e) { return null; @@ -600,7 +580,7 @@ void get_entityListWithPagination_200(TestInfo test) throws IOException { int totalRecords = allEntities.getData().size(); printEntities(allEntities); - // List entity with "limit" set from 1 to maxEntities size with random jumps (to reduce the test time) + // List entity with "limit" set from 1 to maxTables size with random jumps (to reduce the test time) // Each time compare the returned list with allTables list to make sure right results are returned for (int limit = 1; limit < maxEntities; limit += random.nextInt(5) + 1) { String after = null; @@ -611,13 +591,7 @@ void get_entityListWithPagination_200(TestInfo test) throws IOException { ResultList backwardPage; boolean foundDeleted = false; do { // For each limit (or page size) - forward scroll till the end - LOG.debug( - "Limit {} forward pageCount {} indexInAllTables {} totalRecords {} afterCursor {}", - limit, - pageCount, - indexInAllTables, - totalRecords, - after); + LOG.debug("Limit {} forward scrollCount {} afterCursor {}", limit, pageCount, after); forwardPage = listEntities(queryParams, limit, null, after, ADMIN_AUTH_HEADERS); foundDeleted = forwardPage.getData().stream().anyMatch(matchDeleted) || foundDeleted; after = forwardPage.getPaging().getAfter(); @@ -649,13 +623,7 @@ void get_entityListWithPagination_200(TestInfo test) throws IOException { indexInAllTables = totalRecords - limit - forwardPage.getData().size(); foundDeleted = false; do { - LOG.debug( - "Limit {} backward pageCount {} indexInAllTables {} totalRecords {} afterCursor {}", - limit, - pageCount, - indexInAllTables, - totalRecords, - after); + LOG.debug("Limit {} backward scrollCount {} beforeCursor {}", limit, pageCount, before); forwardPage = listEntities(queryParams, limit, before, null, ADMIN_AUTH_HEADERS); foundDeleted = forwardPage.getData().stream().anyMatch(matchDeleted) || foundDeleted; printEntities(forwardPage); @@ -1049,7 +1017,7 @@ void put_entityCreate_as_owner_200(TestInfo test) throws IOException { K request = createRequest(getEntityName(test), "", null, USER1_REF); T entity = createAndCheckEntity(request, ADMIN_AUTH_HEADERS); - // Update the entity as USER1 + // Update the entity as USER_OWNER1 request.withDescription("newDescription"); ChangeDescription change = getChangeDescription(entity.getVersion()); fieldUpdated(change, "description", "", "newDescription"); @@ -1771,7 +1739,7 @@ protected void updateDescriptionAndCheckInSearch(TestInfo test) throws IOExcepti @Test protected void deleteTagAndCheckRelationshipsInSearch(TestInfo test) - throws HttpResponseException, InterruptedException { + throws HttpResponseException, JsonProcessingException, InterruptedException { if (supportsTags && supportsSearchIndex && RUN_ELASTIC_SEARCH_TESTCASES) { // create an entity T entity = createEntity(createRequest(test), ADMIN_AUTH_HEADERS); @@ -1846,48 +1814,6 @@ private static SearchResponse getResponseFormSearch(String indexName) throws Htt return response; } - @Test - @Execution(ExecutionMode.CONCURRENT) - void test_cleanupConversations(TestInfo test) throws HttpResponseException { - if (!Entity.supportsFeed(entityType)) { - return; - } - K request = createRequest(getEntityName(test), "", "", null); - T entity = createEntity(request, ADMIN_AUTH_HEADERS); - - // Add a conversation thread for the entity - FeedResourceTest feedTest = new FeedResourceTest(); - String about = String.format("<#E::%s::%s>", entityType, entity.getFullyQualifiedName()); - CreateThread createThread = new CreateThread().withFrom(USER1.getName()).withMessage("message").withAbout(about); - Thread thread = feedTest.createAndCheck(createThread, ADMIN_AUTH_HEADERS); - - // Add task thread for the entity from user1 to user2 - Thread taskThread = - feedTest.createTaskThread( - USER1.getName(), - about, - USER2.getEntityReference(), - "old", - "new", - RequestDescription, - authHeaders(USER1.getName())); - - // Add announcement thread for the entity from user1 to user2 - AnnouncementDetails announcementDetails = feedTest.getAnnouncementDetails("Announcement", 10, 11); - Thread announcementThread = - feedTest.createAnnouncement( - USER1.getName(), about, "message", announcementDetails, authHeaders(USER1.getName())); - - // When the entity is deleted, all the threads also should be deleted - deleteEntity(entity.getId(), true, true, ADMIN_AUTH_HEADERS); - for (UUID id : listOf(thread.getId(), taskThread.getId(), announcementThread.getId())) { - assertResponseContains( - () -> feedTest.getThread(id, ADMIN_AUTH_HEADERS), - NOT_FOUND, - CatalogExceptionMessage.entityNotFound("Thread", id)); - } - } - /////////////////////////////////////////////////////////////////////////////////////////////////////////////////// // Common entity functionality for tests /////////////////////////////////////////////////////////////////////////////////////////////////////////////////// @@ -2737,56 +2663,4 @@ protected CsvDocumentation getCsvDocumentation() throws HttpResponseException { WebTarget target = getCollection().path("/documentation/csv"); return TestUtils.get(target, CsvDocumentation.class, ADMIN_AUTH_HEADERS); } - - public T assertOwnerInheritance(K createRequest, EntityReference expectedOwner) throws HttpResponseException { - // Create entity with no owner and ensure it inherits owner from the parent - createRequest.withOwner(null); - T entity = createEntity(createRequest, ADMIN_AUTH_HEADERS); - assertReference(expectedOwner, entity.getOwner()); // Inherited owner - entity = getEntity(entity.getId(), "owner", ADMIN_AUTH_HEADERS); - assertReference(expectedOwner, entity.getOwner()); // Inherited owner - entity = getEntityByName(entity.getFullyQualifiedName(), "owner", ADMIN_AUTH_HEADERS); - assertReference(expectedOwner, entity.getOwner()); // Inherited owner - return entity; - } - - public void assertOwnershipInheritanceOverride(T entity, K updateRequest, EntityReference newOwner) - throws HttpResponseException { - // When an entity has ownership set, it does not inherit owner from the parent - String json = JsonUtils.pojoToJson(entity); - entity.setOwner(newOwner); - entity = patchEntity(entity.getId(), json, entity, ADMIN_AUTH_HEADERS); - assertReference(newOwner, entity.getOwner()); - entity = updateEntity(updateRequest.withOwner(null), OK, ADMIN_AUTH_HEADERS); // Simulate ingestion update - assertReference(newOwner, entity.getOwner()); // Owner remains the same - entity = getEntity(entity.getId(), "owner", ADMIN_AUTH_HEADERS); - assertReference(newOwner, entity.getOwner()); // Owner remains the same - entity = getEntityByName(entity.getFullyQualifiedName(), "owner", ADMIN_AUTH_HEADERS); - assertReference(newOwner, entity.getOwner()); // Owner remains the same - } - - public T assertDomainInheritance(K createRequest, EntityReference expectedDomain) throws HttpResponseException { - T entity = createEntity(createRequest.withDomain(null), ADMIN_AUTH_HEADERS); - assertReference(expectedDomain, entity.getDomain()); // Inherited owner - entity = getEntity(entity.getId(), "domain", ADMIN_AUTH_HEADERS); - assertReference(expectedDomain, entity.getDomain()); // Inherited owner - entity = getEntityByName(entity.getFullyQualifiedName(), "domain", ADMIN_AUTH_HEADERS); - assertReference(expectedDomain, entity.getDomain()); // Inherited owner - return entity; - } - - public void assertDomainInheritanceOverride(T entity, K updateRequest, EntityReference newDomain) - throws HttpResponseException { - // When an entity has domain set, it does not inherit domain from the parent - String json = JsonUtils.pojoToJson(entity); - entity.setDomain(newDomain); - entity = patchEntity(entity.getId(), json, entity, ADMIN_AUTH_HEADERS); - assertReference(newDomain, entity.getDomain()); - entity = updateEntity(updateRequest.withDomain(null), OK, ADMIN_AUTH_HEADERS); // Simulate ingestion update - assertReference(newDomain, entity.getDomain()); // Domain remains the same - entity = getEntity(entity.getId(), "domain", ADMIN_AUTH_HEADERS); - assertReference(newDomain, entity.getDomain()); // Domain remains the same - entity = getEntityByName(entity.getFullyQualifiedName(), "domain", ADMIN_AUTH_HEADERS); - assertReference(newDomain, entity.getDomain()); // Domain remains the same - } } diff --git a/openmetadata-service/src/test/java/org/openmetadata/service/resources/analytics/ReportDataResourceTest.java b/openmetadata-service/src/test/java/org/openmetadata/service/resources/analytics/ReportDataResourceTest.java index 1739fb7e5efe..f06c4cdd62c8 100644 --- a/openmetadata-service/src/test/java/org/openmetadata/service/resources/analytics/ReportDataResourceTest.java +++ b/openmetadata-service/src/test/java/org/openmetadata/service/resources/analytics/ReportDataResourceTest.java @@ -1,6 +1,8 @@ package org.openmetadata.service.resources.analytics; import static javax.ws.rs.core.Response.Status.FORBIDDEN; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; import static org.openmetadata.service.exception.CatalogExceptionMessage.permissionNotAllowed; import static org.openmetadata.service.util.TestUtils.ADMIN_AUTH_HEADERS; import static org.openmetadata.service.util.TestUtils.INGESTION_BOT_AUTH_HEADERS; @@ -8,8 +10,11 @@ import static org.openmetadata.service.util.TestUtils.TEST_USER_NAME; import java.text.ParseException; +import java.util.ArrayList; +import java.util.Date; import java.util.List; import java.util.Map; +import java.util.UUID; import javax.ws.rs.client.WebTarget; import org.apache.http.client.HttpResponseException; import org.junit.jupiter.api.Test; @@ -17,13 +22,14 @@ import org.junit.jupiter.api.parallel.ExecutionMode; import org.openmetadata.schema.analytics.EntityReportData; import org.openmetadata.schema.analytics.ReportData; +import org.openmetadata.schema.analytics.WebAnalyticUserActivityReportData; import org.openmetadata.schema.type.MetadataOperation; import org.openmetadata.service.OpenMetadataApplicationTest; import org.openmetadata.service.resources.analytics.ReportDataResource.ReportDataResultList; import org.openmetadata.service.util.ResultList; import org.openmetadata.service.util.TestUtils; -public class ReportDataResourceTest extends OpenMetadataApplicationTest { +class ReportDataResourceTest extends OpenMetadataApplicationTest { private final String collectionName = "analytics/dataInsights/data"; @@ -47,6 +53,8 @@ void report_data_admin_200() throws HttpResponseException, ParseException { ResultList reportDataList = getReportData("2022-10-10", "2022-10-12", ReportData.ReportDataType.ENTITY_REPORT_DATA, ADMIN_AUTH_HEADERS); + + assertNotEquals(0, reportDataList.getData().size()); } @Test @@ -92,6 +100,70 @@ void report_data_bot_200() throws HttpResponseException, ParseException { ResultList reportDataList = getReportData( "2022-10-10", "2022-10-12", ReportData.ReportDataType.ENTITY_REPORT_DATA, INGESTION_BOT_AUTH_HEADERS); + + assertNotEquals(0, reportDataList.getData().size()); + } + + @Test + void delete_endpoint_200() throws HttpResponseException, ParseException { + List createReportDataList = new ArrayList<>(); + + // create some entity report data + EntityReportData entityReportData = + new EntityReportData() + .withEntityType("table") + .withEntityTier("Tier.Tier1") + .withCompletedDescriptions(1) + .withEntityCount(11); + ReportData reportData1 = + new ReportData() + .withTimestamp(new Date(122, 9, 15, 10, 10, 10).getTime()) + .withReportDataType(ReportData.ReportDataType.ENTITY_REPORT_DATA) + .withData(entityReportData); + + // create some web analytic user activity report data + WebAnalyticUserActivityReportData webAnalyticUserActivityReportData = + new WebAnalyticUserActivityReportData() + .withUserId(UUID.randomUUID()) + .withUserName("testUser") + .withLastSession(TestUtils.dateToTimestamp("2022-10-13")); + ReportData reportData2 = + new ReportData() + .withTimestamp(new Date(122, 9, 15, 10, 10, 10).getTime()) + .withReportDataType(ReportData.ReportDataType.WEB_ANALYTIC_USER_ACTIVITY_REPORT_DATA) + .withData(webAnalyticUserActivityReportData); + + createReportDataList.add(reportData1); + createReportDataList.add(reportData2); + + for (ReportData reportData : createReportDataList) { + postReportData(reportData, INGESTION_BOT_AUTH_HEADERS); + } + + // check we have our data + ResultList entityReportDataList = + getReportData("2022-10-15", "2022-10-16", ReportData.ReportDataType.ENTITY_REPORT_DATA, ADMIN_AUTH_HEADERS); + ResultList webAnalyticsReportDataList = + getReportData( + "2022-10-15", + "2022-10-16", + ReportData.ReportDataType.WEB_ANALYTIC_USER_ACTIVITY_REPORT_DATA, + ADMIN_AUTH_HEADERS); + assertNotEquals(0, entityReportDataList.getData().size()); + assertNotEquals(0, webAnalyticsReportDataList.getData().size()); + + // delete the entity report data and check that it has been deleted + deleteReportData(ReportData.ReportDataType.ENTITY_REPORT_DATA.value(), "2022-10-15", ADMIN_AUTH_HEADERS); + entityReportDataList = + getReportData("2022-10-15", "2022-10-16", ReportData.ReportDataType.ENTITY_REPORT_DATA, ADMIN_AUTH_HEADERS); + assertEquals(0, entityReportDataList.getData().size()); + webAnalyticsReportDataList = + getReportData( + "2022-10-15", + "2022-10-16", + ReportData.ReportDataType.WEB_ANALYTIC_USER_ACTIVITY_REPORT_DATA, + ADMIN_AUTH_HEADERS); + assertNotEquals(0, webAnalyticsReportDataList.getData().size()); } public void postReportData(ReportData reportData, Map authHeader) throws HttpResponseException { @@ -108,4 +180,11 @@ public ResultList getReportData( target = target.queryParam("reportDataType", reportDataType); return TestUtils.get(target, ReportDataResultList.class, authHeader); } + + private void deleteReportData(String reportDataType, String date, Map authHeader) + throws HttpResponseException { + String path = String.format("/%s/%s", reportDataType, date); + WebTarget target = getResource(collectionName).path(path); + TestUtils.delete(target, authHeader); + } } diff --git a/openmetadata-service/src/test/java/org/openmetadata/service/resources/bots/BotResourceTest.java b/openmetadata-service/src/test/java/org/openmetadata/service/resources/bots/BotResourceTest.java index 9de137a1e7c2..676fbdbb219c 100644 --- a/openmetadata-service/src/test/java/org/openmetadata/service/resources/bots/BotResourceTest.java +++ b/openmetadata-service/src/test/java/org/openmetadata/service/resources/bots/BotResourceTest.java @@ -11,6 +11,7 @@ import java.util.Map; import java.util.Objects; import org.apache.http.client.HttpResponseException; +import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.TestInfo; @@ -25,6 +26,7 @@ import org.openmetadata.service.resources.bots.BotResource.BotList; import org.openmetadata.service.resources.teams.UserResourceTest; import org.openmetadata.service.util.ResultList; +import org.openmetadata.service.util.TestUtils; public class BotResourceTest extends EntityResourceTest { public static User botUser; @@ -113,7 +115,14 @@ public CreateBot createRequest(String name) { @Override public void validateCreatedEntity(Bot entity, CreateBot request, Map authHeaders) { - assertReference(request.getBotUser(), entity.getBotUser()); + if (request.getBotUser() != null) { + assertNotNull(entity.getBotUser()); + TestUtils.validateEntityReference(entity.getBotUser()); + Assertions.assertEquals( + request.getBotUser().toLowerCase(), entity.getBotUser().getFullyQualifiedName().toLowerCase()); + } else { + Assertions.assertNull(entity.getBotUser()); + } } @Override diff --git a/openmetadata-service/src/test/java/org/openmetadata/service/resources/charts/ChartResourceTest.java b/openmetadata-service/src/test/java/org/openmetadata/service/resources/charts/ChartResourceTest.java index 5b8b19b6241e..79d4a1b07e5b 100644 --- a/openmetadata-service/src/test/java/org/openmetadata/service/resources/charts/ChartResourceTest.java +++ b/openmetadata-service/src/test/java/org/openmetadata/service/resources/charts/ChartResourceTest.java @@ -34,16 +34,13 @@ import org.junit.jupiter.api.parallel.Execution; import org.junit.jupiter.api.parallel.ExecutionMode; import org.openmetadata.schema.api.data.CreateChart; -import org.openmetadata.schema.api.services.CreateDashboardService; import org.openmetadata.schema.entity.data.Chart; -import org.openmetadata.schema.entity.services.DashboardService; import org.openmetadata.schema.type.ChangeDescription; import org.openmetadata.schema.type.ChartType; import org.openmetadata.schema.type.EntityReference; import org.openmetadata.service.Entity; import org.openmetadata.service.resources.EntityResourceTest; import org.openmetadata.service.resources.charts.ChartResource.ChartList; -import org.openmetadata.service.resources.services.DashboardServiceResourceTest; import org.openmetadata.service.util.JsonUtils; import org.openmetadata.service.util.ResultList; import org.openmetadata.service.util.TestUtils.UpdateType; @@ -164,18 +161,6 @@ void patch_chart_200(TestInfo test) throws IOException { patchEntityAndCheck(chart, originalJson, ADMIN_AUTH_HEADERS, UpdateType.MINOR_UPDATE, change); } - @Test - void test_inheritDomain(TestInfo test) throws IOException { - // When domain is not set for a dashboard service, carry it forward from the chart - DashboardServiceResourceTest serviceTest = new DashboardServiceResourceTest(); - CreateDashboardService createService = serviceTest.createRequest(test).withDomain(DOMAIN.getFullyQualifiedName()); - DashboardService service = serviceTest.createEntity(createService, ADMIN_AUTH_HEADERS); - - // Create a chart without domain and ensure it inherits domain from the parent - CreateChart create = createRequest("chart").withService(service.getFullyQualifiedName()); - assertDomainInheritance(create, DOMAIN.getEntityReference()); - } - @Override @Execution(ExecutionMode.CONCURRENT) public Chart validateGetWithDifferentFields(Chart chart, boolean byName) throws HttpResponseException { diff --git a/openmetadata-service/src/test/java/org/openmetadata/service/resources/dashboards/DashboardResourceTest.java b/openmetadata-service/src/test/java/org/openmetadata/service/resources/dashboards/DashboardResourceTest.java index 4ca426bd6d0e..a7216d2ecb83 100644 --- a/openmetadata-service/src/test/java/org/openmetadata/service/resources/dashboards/DashboardResourceTest.java +++ b/openmetadata-service/src/test/java/org/openmetadata/service/resources/dashboards/DashboardResourceTest.java @@ -40,15 +40,12 @@ import org.junit.jupiter.api.Test; import org.junit.jupiter.api.TestInfo; import org.openmetadata.schema.api.data.CreateDashboard; -import org.openmetadata.schema.api.services.CreateDashboardService; import org.openmetadata.schema.entity.data.Dashboard; -import org.openmetadata.schema.entity.services.DashboardService; import org.openmetadata.schema.type.ChangeDescription; import org.openmetadata.schema.type.EntityReference; import org.openmetadata.service.Entity; import org.openmetadata.service.resources.EntityResourceTest; import org.openmetadata.service.resources.dashboards.DashboardResource.DashboardList; -import org.openmetadata.service.resources.services.DashboardServiceResourceTest; import org.openmetadata.service.util.FullyQualifiedName; import org.openmetadata.service.util.JsonUtils; import org.openmetadata.service.util.ResultList; @@ -144,18 +141,6 @@ void put_AddRemoveDashboardChartsUpdate_200(TestInfo test) throws IOException { updateAndCheckEntity(request.withCharts(CHART_REFERENCES), OK, ADMIN_AUTH_HEADERS, MINOR_UPDATE, change); } - @Test - void test_inheritDomain(TestInfo test) throws IOException { - // When domain is not set for a Dashboard service, carry it forward from the dashbaord - DashboardServiceResourceTest serviceTest = new DashboardServiceResourceTest(); - CreateDashboardService createService = serviceTest.createRequest(test).withDomain(DOMAIN.getFullyQualifiedName()); - DashboardService service = serviceTest.createEntity(createService, ADMIN_AUTH_HEADERS); - - // Create a dashboard without domain and ensure it inherits domain from the parent - CreateDashboard create = createRequest("dashboard").withService(service.getFullyQualifiedName()); - assertDomainInheritance(create, DOMAIN.getEntityReference()); - } - @Override public Dashboard validateGetWithDifferentFields(Dashboard dashboard, boolean byName) throws HttpResponseException { String fields = ""; diff --git a/openmetadata-service/src/test/java/org/openmetadata/service/resources/databases/TableResourceTest.java b/openmetadata-service/src/test/java/org/openmetadata/service/resources/databases/TableResourceTest.java index 86890bc052ea..0a08061f9040 100644 --- a/openmetadata-service/src/test/java/org/openmetadata/service/resources/databases/TableResourceTest.java +++ b/openmetadata-service/src/test/java/org/openmetadata/service/resources/databases/TableResourceTest.java @@ -1710,49 +1710,16 @@ void test_ownershipInheritance(TestInfo test) throws HttpResponseException { DatabaseResourceTest dbTest = new DatabaseResourceTest(); Database db = dbTest.createEntity(dbTest.createRequest(test).withOwner(USER1_REF), ADMIN_AUTH_HEADERS); - // Ensure databaseSchema owner is inherited from database DatabaseSchemaResourceTest schemaTest = new DatabaseSchemaResourceTest(); - CreateDatabaseSchema createSchema = schemaTest.createRequest(test).withDatabase(db.getFullyQualifiedName()); - DatabaseSchema schema = schemaTest.assertOwnerInheritance(createSchema, USER1_REF); - - // Ensure table owner is inherited from databaseSchema - CreateTable createTable = createRequest(test).withDatabaseSchema(schema.getFullyQualifiedName()); - Table table = assertOwnerInheritance(createTable, USER1_REF); - - // Change the ownership of table and ensure further ingestion updates don't overwrite the ownership - assertOwnershipInheritanceOverride(table, createTable.withOwner(null), USER2_REF); - - // Change the ownership of schema and ensure further ingestion updates don't overwrite the ownership - schemaTest.assertOwnershipInheritanceOverride(schema, createSchema.withOwner(null), USER2_REF); - } - - @Test - void test_domainInheritance(TestInfo test) throws HttpResponseException { - // Domain is inherited from databaseService > database > databaseSchema > table - DatabaseServiceResourceTest dbServiceTest = new DatabaseServiceResourceTest(); - DatabaseService dbService = - dbServiceTest.createEntity( - dbServiceTest.createRequest(test).withDomain(DOMAIN.getFullyQualifiedName()), ADMIN_AUTH_HEADERS); - - // Ensure database domain is inherited from database service - DatabaseResourceTest dbTest = new DatabaseResourceTest(); - CreateDatabase createDb = dbTest.createRequest(test).withService(dbService.getFullyQualifiedName()); - Database db = dbTest.assertDomainInheritance(createDb, DOMAIN.getEntityReference()); - - // Ensure databaseSchema domain is inherited from database - DatabaseSchemaResourceTest schemaTest = new DatabaseSchemaResourceTest(); - CreateDatabaseSchema createSchema = schemaTest.createRequest(test).withDatabase(db.getFullyQualifiedName()); - DatabaseSchema schema = schemaTest.assertDomainInheritance(createSchema, DOMAIN.getEntityReference()); - - // Ensure table domain is inherited from databaseSchema - CreateTable createTable = createRequest(test).withDatabaseSchema(schema.getFullyQualifiedName()); - Table table = assertDomainInheritance(createTable, DOMAIN.getEntityReference()); - - // Change the domain of table and ensure further ingestion updates don't overwrite the domain - assertDomainInheritanceOverride(table, createTable.withDomain(null), SUB_DOMAIN.getEntityReference()); + CreateDatabaseSchema createSchema = + schemaTest.createRequest(test).withDatabase(db.getFullyQualifiedName()).withOwner(null); + DatabaseSchema schema = schemaTest.createEntity(createSchema, ADMIN_AUTH_HEADERS); + assertEquals(USER1_REF, schema.getOwner()); // Ensure databaseSchema owner is inherited from database - // Change the ownership of schema and ensure further ingestion updates don't overwrite the ownership - schemaTest.assertDomainInheritanceOverride(schema, createSchema.withDomain(null), SUB_DOMAIN.getEntityReference()); + Table table = + createEntity( + createRequest(test).withOwner(null).withDatabaseSchema(schema.getFullyQualifiedName()), ADMIN_AUTH_HEADERS); + assertEquals(USER1_REF, table.getOwner()); // Ensure table owner is inherited from databaseSchema } @Test diff --git a/openmetadata-service/src/test/java/org/openmetadata/service/resources/domains/DataProductResourceTest.java b/openmetadata-service/src/test/java/org/openmetadata/service/resources/domains/DataProductResourceTest.java deleted file mode 100644 index 08493635f4cb..000000000000 --- a/openmetadata-service/src/test/java/org/openmetadata/service/resources/domains/DataProductResourceTest.java +++ /dev/null @@ -1,195 +0,0 @@ -package org.openmetadata.service.resources.domains; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.openmetadata.common.utils.CommonUtil.listOf; -import static org.openmetadata.service.Entity.FIELD_ASSETS; -import static org.openmetadata.service.util.EntityUtil.fieldAdded; -import static org.openmetadata.service.util.EntityUtil.fieldDeleted; -import static org.openmetadata.service.util.TestUtils.ADMIN_AUTH_HEADERS; -import static org.openmetadata.service.util.TestUtils.assertEntityReferenceNames; -import static org.openmetadata.service.util.TestUtils.assertListNull; - -import java.io.IOException; -import java.util.List; -import java.util.Map; -import javax.ws.rs.core.Response.Status; -import org.apache.http.client.HttpResponseException; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.TestInfo; -import org.openmetadata.schema.EntityInterface; -import org.openmetadata.schema.api.domains.CreateDataProduct; -import org.openmetadata.schema.entity.data.Topic; -import org.openmetadata.schema.entity.domains.DataProduct; -import org.openmetadata.schema.type.ChangeDescription; -import org.openmetadata.schema.type.EntityReference; -import org.openmetadata.service.Entity; -import org.openmetadata.service.resources.EntityResourceTest; -import org.openmetadata.service.resources.databases.TableResourceTest; -import org.openmetadata.service.resources.domains.DataProductResource.DataProductList; -import org.openmetadata.service.resources.topics.TopicResourceTest; -import org.openmetadata.service.util.JsonUtils; -import org.openmetadata.service.util.TestUtils; -import org.openmetadata.service.util.TestUtils.UpdateType; - -public class DataProductResourceTest extends EntityResourceTest { - public DataProductResourceTest() { - super(Entity.DATA_PRODUCT, DataProduct.class, DataProductList.class, "dataProducts", DataProductResource.FIELDS); - } - - public void setupDataProducts(TestInfo test) throws HttpResponseException { - DOMAIN_DATA_PRODUCT = createEntity(createRequest(getEntityName(test)), ADMIN_AUTH_HEADERS); - SUB_DOMAIN_DATA_PRODUCT = - createEntity( - createRequest(getEntityName(test, 1)).withDomain(SUB_DOMAIN.getFullyQualifiedName()), ADMIN_AUTH_HEADERS); - } - - @Test - void testDataProductAssets(TestInfo test) throws IOException { - // Create Data product with Table1 as the asset - CreateDataProduct create = createRequest(getEntityName(test)).withAssets(List.of(TEST_TABLE1.getEntityReference())); - DataProduct product = createAndCheckEntity(create, ADMIN_AUTH_HEADERS); - entityInDataProduct(TEST_TABLE1, product, true); // Table1 is part of data product - - TopicResourceTest topicTest = new TopicResourceTest(); - Topic topic = topicTest.createEntity(topicTest.createRequest(getEntityName(test)), ADMIN_AUTH_HEADERS); - - // Add asset topic with PUT - create.withAssets(List.of(TEST_TABLE1.getEntityReference(), topic.getEntityReference())); - ChangeDescription change = getChangeDescription(product.getVersion()); - fieldAdded(change, FIELD_ASSETS, listOf(topic.getEntityReference())); - product = updateAndCheckEntity(create, Status.OK, ADMIN_AUTH_HEADERS, UpdateType.MINOR_UPDATE, change); - entityInDataProduct(topic, product, true); // topic is part of data product - - // Remove asset topic with PUT - create.withAssets(List.of(TEST_TABLE1.getEntityReference())); - change = getChangeDescription(product.getVersion()); - fieldDeleted(change, FIELD_ASSETS, listOf(topic.getEntityReference())); - product = updateAndCheckEntity(create, Status.OK, ADMIN_AUTH_HEADERS, UpdateType.MINOR_UPDATE, change); - entityInDataProduct(topic, product, false); // topic is not part of data product - - // Add topic asset with PATCH - String json = JsonUtils.pojoToJson(product); - product.withAssets(List.of(TEST_TABLE1.getEntityReference(), topic.getEntityReference())); - change = getChangeDescription(product.getVersion()); - fieldAdded(change, FIELD_ASSETS, listOf(topic.getEntityReference())); - product = patchEntityAndCheck(product, json, ADMIN_AUTH_HEADERS, UpdateType.MINOR_UPDATE, change); - entityInDataProduct(topic, product, true); // topic is part of data product - - // Remove asset topic with PATCH - json = JsonUtils.pojoToJson(product); - product.withAssets(List.of(TEST_TABLE1.getEntityReference())); - change = getChangeDescription(product.getVersion()); - fieldDeleted(change, FIELD_ASSETS, listOf(topic.getEntityReference())); - patchEntityAndCheck(product, json, ADMIN_AUTH_HEADERS, UpdateType.MINOR_UPDATE, change); - entityInDataProduct(topic, product, false); // topic is not part of data product - } - - @Test - void testDataProductExperts(TestInfo test) throws IOException { - CreateDataProduct create = createRequest(getEntityName(test)).withExperts(listOf(USER1.getFullyQualifiedName())); - DataProduct product = createAndCheckEntity(create, ADMIN_AUTH_HEADERS); - - // Add User2 as expert using PUT - create.withExperts(List.of(USER1.getFullyQualifiedName(), USER2.getFullyQualifiedName())); - ChangeDescription change = getChangeDescription(product.getVersion()); - fieldAdded(change, "experts", listOf(USER2.getEntityReference())); - product = updateAndCheckEntity(create, Status.OK, ADMIN_AUTH_HEADERS, UpdateType.MINOR_UPDATE, change); - - // Remove User2 as expert using PUT - create.withExperts(List.of(USER1.getFullyQualifiedName())); - change = getChangeDescription(product.getVersion()); - fieldDeleted(change, "experts", listOf(USER2.getEntityReference())); - product = updateAndCheckEntity(create, Status.OK, ADMIN_AUTH_HEADERS, UpdateType.MINOR_UPDATE, change); - - // Add User2 as expert using PATCH - String json = JsonUtils.pojoToJson(product); - product.withExperts(List.of(USER1.getEntityReference(), USER2.getEntityReference())); - change = getChangeDescription(product.getVersion()); - fieldAdded(change, "experts", listOf(USER2.getEntityReference())); - product = patchEntityAndCheck(product, json, ADMIN_AUTH_HEADERS, UpdateType.MINOR_UPDATE, change); - - // Remove User2 as expert using PATCH - json = JsonUtils.pojoToJson(product); - product.withExperts(List.of(USER1.getEntityReference())); - change = getChangeDescription(product.getVersion()); - fieldDeleted(change, "experts", listOf(USER2.getEntityReference())); - patchEntityAndCheck(product, json, ADMIN_AUTH_HEADERS, UpdateType.MINOR_UPDATE, change); - } - - private void entityInDataProduct(EntityInterface entity, EntityInterface product, boolean inDataProduct) - throws HttpResponseException { - // Only table or topic is expected to assets currently in the tests - EntityResourceTest test = - entity.getEntityReference().getType().equals(Entity.TABLE) ? new TableResourceTest() : new TopicResourceTest(); - entity = test.getEntity(entity.getId(), "dataProducts", ADMIN_AUTH_HEADERS); - TestUtils.existsInEntityReferenceList(entity.getDataProducts(), product.getId(), inDataProduct); - } - - @Override - public CreateDataProduct createRequest(String name) { - return new CreateDataProduct() - .withName(name) - .withDescription(name) - .withDomain(DOMAIN.getFullyQualifiedName()) - .withExperts(listOf(USER1.getFullyQualifiedName())) - .withAssets(TEST_TABLE1 != null ? listOf(TEST_TABLE1.getEntityReference()) : null); - } - - @Override - public void validateCreatedEntity( - DataProduct createdEntity, CreateDataProduct request, Map authHeaders) { - // Entity specific validation - assertEquals(request.getDomain(), createdEntity.getDomain().getFullyQualifiedName()); - assertEntityReferenceNames(request.getExperts(), createdEntity.getExperts()); - assertEntityReferences(request.getAssets(), createdEntity.getAssets()); - } - - @Override - public void compareEntities(DataProduct expected, DataProduct updated, Map authHeaders) { - // Entity specific validation - assertReference(expected.getDomain(), updated.getDomain()); - assertEntityReferences(expected.getExperts(), updated.getExperts()); - assertEntityReferences(expected.getAssets(), updated.getAssets()); - } - - @Override - public DataProduct validateGetWithDifferentFields(DataProduct dataProduct, boolean byName) - throws HttpResponseException { - DataProduct getDataProduct = - byName - ? getEntityByName(dataProduct.getFullyQualifiedName(), null, ADMIN_AUTH_HEADERS) - : getEntity(dataProduct.getId(), null, ADMIN_AUTH_HEADERS); - assertListNull(getDataProduct.getOwner(), getDataProduct.getExperts()); - String fields = "owner,domain,experts,assets"; - getDataProduct = - byName - ? getEntityByName(getDataProduct.getFullyQualifiedName(), fields, ADMIN_AUTH_HEADERS) - : getEntity(getDataProduct.getId(), fields, ADMIN_AUTH_HEADERS); - // Fields requested are received - assertReference(dataProduct.getDomain(), getDataProduct.getDomain()); - assertEntityReferences(dataProduct.getExperts(), getDataProduct.getExperts()); - assertEntityReferences(dataProduct.getAssets(), getDataProduct.getAssets()); - - // Checks for other owner, tags, and followers is done in the base class - return getDataProduct; - } - - @Override - public void assertFieldChange(String fieldName, Object expected, Object actual) throws IOException { - if (expected == actual) { - return; - } - if (fieldName.startsWith("domain")) { - EntityReference expectedRef = (EntityReference) expected; - EntityReference actualRef = JsonUtils.readValue(actual.toString(), EntityReference.class); - assertEquals(expectedRef.getId(), actualRef.getId()); - } else if (fieldName.startsWith("experts") || fieldName.startsWith("assets")) { - @SuppressWarnings("unchecked") - List expectedRefs = (List) expected; - List actualRefs = JsonUtils.readObjects(actual.toString(), EntityReference.class); - assertEntityReferences(expectedRefs, actualRefs); - } else { - assertCommonFieldChange(fieldName, expected, actual); - } - } -} diff --git a/openmetadata-service/src/test/java/org/openmetadata/service/resources/domains/DomainResourceTest.java b/openmetadata-service/src/test/java/org/openmetadata/service/resources/domains/DomainResourceTest.java deleted file mode 100644 index 88c9dc13d3f9..000000000000 --- a/openmetadata-service/src/test/java/org/openmetadata/service/resources/domains/DomainResourceTest.java +++ /dev/null @@ -1,101 +0,0 @@ -package org.openmetadata.service.resources.domains; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.openmetadata.common.utils.CommonUtil.listOf; -import static org.openmetadata.service.util.TestUtils.ADMIN_AUTH_HEADERS; -import static org.openmetadata.service.util.TestUtils.assertEntityReferenceNames; -import static org.openmetadata.service.util.TestUtils.assertListNotNull; -import static org.openmetadata.service.util.TestUtils.assertListNull; - -import java.io.IOException; -import java.util.List; -import java.util.Map; -import org.apache.http.client.HttpResponseException; -import org.junit.jupiter.api.TestInfo; -import org.openmetadata.schema.api.domains.CreateDomain; -import org.openmetadata.schema.api.domains.CreateDomain.DomainType; -import org.openmetadata.schema.entity.domains.Domain; -import org.openmetadata.schema.type.EntityReference; -import org.openmetadata.service.Entity; -import org.openmetadata.service.resources.EntityResourceTest; -import org.openmetadata.service.resources.domains.DomainResource.DomainList; -import org.openmetadata.service.util.JsonUtils; - -public class DomainResourceTest extends EntityResourceTest { - public DomainResourceTest() { - super(Entity.DOMAIN, Domain.class, DomainList.class, "domains", DomainResource.FIELDS); - } - - public void setupDomains(TestInfo test) throws IOException { - DOMAIN = createEntity(createRequest(test), ADMIN_AUTH_HEADERS); - SUB_DOMAIN = - createEntity(createRequest("sub-domain").withParent(DOMAIN.getFullyQualifiedName()), ADMIN_AUTH_HEADERS); - } - - @Override - public CreateDomain createRequest(String name) { - return new CreateDomain() - .withName(name) - .withDomainType(DomainType.AGGREGATE) - .withDescription("name") - .withExperts(listOf(USER1.getFullyQualifiedName())); - } - - @Override - public void validateCreatedEntity(Domain createdEntity, CreateDomain request, Map authHeaders) { - // Entity specific validation - assertEquals(request.getDomainType(), createdEntity.getDomainType()); - assertReference(request.getParent(), createdEntity.getParent()); - assertEntityReferenceNames(request.getExperts(), createdEntity.getExperts()); - } - - @Override - public void compareEntities(Domain expected, Domain updated, Map authHeaders) { - // Entity specific validation - assertEquals(expected.getDomainType(), updated.getDomainType()); - assertReference(expected.getParent(), updated.getParent()); - assertEntityReferences(expected.getExperts(), updated.getExperts()); - } - - @Override - public Domain validateGetWithDifferentFields(Domain domain, boolean byName) throws HttpResponseException { - Domain getDomain = - byName - ? getEntityByName(domain.getFullyQualifiedName(), null, ADMIN_AUTH_HEADERS) - : getEntity(domain.getId(), null, ADMIN_AUTH_HEADERS); - assertListNotNull(getDomain.getDomainType()); - assertListNull(getDomain.getParent(), getDomain.getChildren(), getDomain.getOwner(), getDomain.getExperts()); - String fields = "children,owner,parent,experts"; - getDomain = - byName - ? getEntityByName(getDomain.getFullyQualifiedName(), fields, ADMIN_AUTH_HEADERS) - : getEntity(getDomain.getId(), fields, ADMIN_AUTH_HEADERS); - assertListNotNull(getDomain.getDomainType()); - // Fields requested are received - assertEquals(domain.getParent(), getDomain.getParent()); - assertEntityReferences(domain.getChildren(), getDomain.getChildren()); - assertEntityReferences(domain.getExperts(), getDomain.getExperts()); - - // Checks for other owner, tags, and followers is done in the base class - return getDomain; - } - - @Override - public void assertFieldChange(String fieldName, Object expected, Object actual) throws IOException { - if (expected == actual) { - return; - } - if (fieldName.startsWith("parent")) { - EntityReference expectedRef = (EntityReference) expected; - EntityReference actualRef = JsonUtils.readValue(actual.toString(), EntityReference.class); - assertEquals(expectedRef.getId(), actualRef.getId()); - } else if (fieldName.startsWith("experts")) { - @SuppressWarnings("unchecked") - List expectedRefs = (List) expected; - List actualRefs = JsonUtils.readObjects(actual.toString(), EntityReference.class); - assertEntityReferences(expectedRefs, actualRefs); - } else { - assertCommonFieldChange(fieldName, expected, actual); - } - } -} diff --git a/openmetadata-service/src/test/java/org/openmetadata/service/resources/dqtests/TestCaseResourceTest.java b/openmetadata-service/src/test/java/org/openmetadata/service/resources/dqtests/TestCaseResourceTest.java index eaff5a5b046d..05fe6389af9c 100644 --- a/openmetadata-service/src/test/java/org/openmetadata/service/resources/dqtests/TestCaseResourceTest.java +++ b/openmetadata-service/src/test/java/org/openmetadata/service/resources/dqtests/TestCaseResourceTest.java @@ -253,7 +253,7 @@ void put_testCaseResults_200(TestInfo test) throws IOException, ParseException { ADMIN_AUTH_HEADERS); verifyTestCaseResults(testCaseResults, List.of(testCaseResult), 1); - // Add new date for TableCaseResult + // Add new data for TableCaseResult TestCaseResult newTestCaseResult = new TestCaseResult() .withResult("tested") @@ -300,7 +300,7 @@ void put_testCaseResults_200(TestInfo test) throws IOException, ParseException { ADMIN_AUTH_HEADERS); verifyTestCaseResults(testCaseResults, testCaseResultList, 12); - // create another table and add profiles + // create another table and add test results TestCase testCase1 = createAndCheckEntity(createRequest(test, 1), ADMIN_AUTH_HEADERS); List testCase1ResultList = new ArrayList<>(); dateStr = "2021-10-"; @@ -342,6 +342,106 @@ void put_testCaseResults_200(TestInfo test) throws IOException, ParseException { () -> getTestSummary(ADMIN_AUTH_HEADERS, randomUUID), NOT_FOUND, "testSuite instance for " + randomUUID + " not found"); + + // Test that we can get the test summary for a logical test suite and that + // adding a logical test suite does not change the total number of tests + TestSuiteResourceTest testSuiteResourceTest = new TestSuiteResourceTest(); + CreateTestSuite createLogicalTestSuite = testSuiteResourceTest.createRequest(test); + TestSuite logicalTestSuite = testSuiteResourceTest.createEntity(createLogicalTestSuite, ADMIN_AUTH_HEADERS); + List testCaseIds = new ArrayList<>(); + testCaseIds.add(testCase1.getId()); + testSuiteResourceTest.addTestCasesToLogicalTestSuite(logicalTestSuite, testCaseIds); + + testSummary = getTestSummary(ADMIN_AUTH_HEADERS, logicalTestSuite.getId().toString()); + assertEquals(1, testSummary.getTotal()); + assertEquals(1, testSummary.getFailed()); + + // add a new test case to the logical test suite to validate if the + // summary is updated correctly + testCaseIds.removeAll(testCaseIds); + testCaseIds.add(testCase.getId()); + testSuiteResourceTest.addTestCasesToLogicalTestSuite(logicalTestSuite, testCaseIds); + + testSummary = getTestSummary(ADMIN_AUTH_HEADERS, logicalTestSuite.getId().toString()); + assertEquals(2, testSummary.getTotal()); + + // remove test case from logical test suite and validate + // the summary is updated as expected + deleteLogicalTestCase(logicalTestSuite, testCase.getId()); + + testSummary = getTestSummary(ADMIN_AUTH_HEADERS, logicalTestSuite.getId().toString()); + assertEquals(1, testSummary.getTotal()); + } + + @Test + void test_resultSummaryCascadeToAllSuites(TestInfo test) throws IOException, ParseException { + TestCase testCase = createAndCheckEntity(createRequest(test, 1), ADMIN_AUTH_HEADERS); + TestCase testCase1 = createAndCheckEntity(createRequest(test, 2), ADMIN_AUTH_HEADERS); + + TestCaseResult testCaseResult; + + String dateStr = "2021-10-"; + for (int i = 11; i <= 15; i++) { + testCaseResult = + new TestCaseResult() + .withResult("result") + .withTestCaseStatus(TestCaseStatus.Failed) + .withTimestamp(TestUtils.dateToTimestamp(dateStr + i)); + putTestCaseResult(testCase.getFullyQualifiedName(), testCaseResult, ADMIN_AUTH_HEADERS); + } + + for (int i = 11; i <= 20; i++) { + testCaseResult = + new TestCaseResult() + .withResult("result") + .withTestCaseStatus(TestCaseStatus.Success) + .withTimestamp(TestUtils.dateToTimestamp(dateStr + i)); + putTestCaseResult(testCase1.getFullyQualifiedName(), testCaseResult, ADMIN_AUTH_HEADERS); + } + + TestSuiteResourceTest testSuiteResourceTest = new TestSuiteResourceTest(); + CreateTestSuite createLogicalTestSuite = testSuiteResourceTest.createRequest(test); + TestSuite logicalTestSuite = testSuiteResourceTest.createEntity(createLogicalTestSuite, ADMIN_AUTH_HEADERS); + List testCaseIds = new ArrayList<>(); + testCaseIds.add(testCase1.getId()); + testSuiteResourceTest.addTestCasesToLogicalTestSuite(logicalTestSuite, testCaseIds); + + // test we get the right summary for the executable test suite + TestSummary executableTestSummary = getTestSummary(ADMIN_AUTH_HEADERS, testCase.getTestSuite().getId().toString()); + assertEquals(2, executableTestSummary.getTotal()); + + // test we get the right summary for the logical test suite + TestSummary logicalTestSummary = getTestSummary(ADMIN_AUTH_HEADERS, logicalTestSuite.getId().toString()); + assertEquals(1, logicalTestSummary.getTotal()); + testCaseIds.removeAll(testCaseIds); + testCaseIds.add(testCase.getId()); + testSuiteResourceTest.addTestCasesToLogicalTestSuite(logicalTestSuite, testCaseIds); + logicalTestSummary = getTestSummary(ADMIN_AUTH_HEADERS, logicalTestSuite.getId().toString()); + assertEquals( + 2, + logicalTestSummary + .getTotal()); // we added a new test case to the logical test suite check if the summary is updated + + deleteEntity(testCase1.getId(), ADMIN_AUTH_HEADERS); + + executableTestSummary = getTestSummary(ADMIN_AUTH_HEADERS, testCase.getTestSuite().getId().toString()); + assertEquals( + 1, + executableTestSummary + .getTotal()); // we deleted a test case from the executable test suite check if the summary is updated + + logicalTestSummary = getTestSummary(ADMIN_AUTH_HEADERS, logicalTestSuite.getId().toString()); + assertEquals( + 1, + logicalTestSummary + .getTotal()); // check the deletion of the test case from the executable test suite cascaded to the logical + // test suite + deleteLogicalTestCase(logicalTestSuite, testCase.getId()); + logicalTestSummary = getTestSummary(ADMIN_AUTH_HEADERS, logicalTestSuite.getId().toString()); + assertEquals( + null, + logicalTestSummary + .getTotal()); // check the deletion of the test case from the logical test suite is reflected in the summary } @Test diff --git a/openmetadata-service/src/test/java/org/openmetadata/service/resources/events/EventSubscriptionResourceTest.java b/openmetadata-service/src/test/java/org/openmetadata/service/resources/events/EventSubscriptionResourceTest.java index 39813d90af4e..0eabb8b0d3ba 100644 --- a/openmetadata-service/src/test/java/org/openmetadata/service/resources/events/EventSubscriptionResourceTest.java +++ b/openmetadata-service/src/test/java/org/openmetadata/service/resources/events/EventSubscriptionResourceTest.java @@ -58,6 +58,7 @@ public EventSubscriptionResourceTest() { EventSubscriptionResource.FIELDS); supportedNameCharacters = supportedNameCharacters.replace(" ", ""); // Space not supported supportsFieldsQueryParam = false; + supportsEmptyDescription = true; } @Test diff --git a/openmetadata-service/src/test/java/org/openmetadata/service/resources/events/WebhookCallbackResource.java b/openmetadata-service/src/test/java/org/openmetadata/service/resources/events/WebhookCallbackResource.java index 87720638ff71..9daaf07e3aef 100644 --- a/openmetadata-service/src/test/java/org/openmetadata/service/resources/events/WebhookCallbackResource.java +++ b/openmetadata-service/src/test/java/org/openmetadata/service/resources/events/WebhookCallbackResource.java @@ -130,7 +130,7 @@ public Response receiveEntityEvents( } else { list.addAll(events.getData()); } - LOG.debug("callback /{}/{} received event. Current count {}", eventType, entityType, list.size()); + LOG.info("callback /{}/{} received event. Current count {}", eventType, entityType, list.size()); return Response.ok().build(); } diff --git a/openmetadata-service/src/test/java/org/openmetadata/service/resources/glossary/GlossaryResourceTest.java b/openmetadata-service/src/test/java/org/openmetadata/service/resources/glossary/GlossaryResourceTest.java index 7599bd67b354..1197d7f45fdd 100644 --- a/openmetadata-service/src/test/java/org/openmetadata/service/resources/glossary/GlossaryResourceTest.java +++ b/openmetadata-service/src/test/java/org/openmetadata/service/resources/glossary/GlossaryResourceTest.java @@ -89,11 +89,12 @@ public GlossaryResourceTest() { } public void setupGlossaries() throws IOException { - CreateGlossary createGlossary = createRequest("g1", "", "", null); - GLOSSARY1 = createEntity(createGlossary, ADMIN_AUTH_HEADERS); + GlossaryResourceTest glossaryResourceTest = new GlossaryResourceTest(); + CreateGlossary createGlossary = glossaryResourceTest.createRequest("g1", "", "", null); + GLOSSARY1 = glossaryResourceTest.createEntity(createGlossary, ADMIN_AUTH_HEADERS); - createGlossary = createRequest("g2", "", "", null); - GLOSSARY2 = createEntity(createGlossary, ADMIN_AUTH_HEADERS); + createGlossary = glossaryResourceTest.createRequest("g2", "", "", null); + GLOSSARY2 = glossaryResourceTest.createEntity(createGlossary, ADMIN_AUTH_HEADERS); GlossaryTermResourceTest glossaryTermResourceTest = new GlossaryTermResourceTest(); CreateGlossaryTerm createGlossaryTerm = @@ -538,4 +539,11 @@ private void assertTagLabelsChanged(Table table, String previousTermFqn, String assertTagPrefixAbsent(table.getColumns().get(0).getTags(), previousTermFqn); } } + + private static String quoteName(String name) { + if (name != null && !name.contains("\"")) { + return name.contains(".") ? "\\\"" + name + "\\\"" : name; + } + return name; + } } diff --git a/openmetadata-service/src/test/java/org/openmetadata/service/resources/glossary/GlossaryTermResourceTest.java b/openmetadata-service/src/test/java/org/openmetadata/service/resources/glossary/GlossaryTermResourceTest.java index 727442697807..1fdd603f904e 100644 --- a/openmetadata-service/src/test/java/org/openmetadata/service/resources/glossary/GlossaryTermResourceTest.java +++ b/openmetadata-service/src/test/java/org/openmetadata/service/resources/glossary/GlossaryTermResourceTest.java @@ -171,36 +171,28 @@ void test_inheritGlossaryReviewerAndOwner(TestInfo test) throws IOException { // Glossary glossary = createGlossary(test, listOf(USER1_REF), USER2_REF); - // Create term t1 in the glossary without reviewers and owner + // Create terms t1 and a term t12 under t1 in the glossary without reviewers and owner CreateGlossaryTerm create = - new CreateGlossaryTerm().withName("t1").withGlossary(glossary.getFullyQualifiedName()).withDescription("desc"); - GlossaryTerm t1 = assertOwnerInheritance(create, USER2_REF); + new CreateGlossaryTerm().withGlossary(glossary.getFullyQualifiedName()).withDescription("description"); + GlossaryTerm t1 = createEntity(create.withName("t1"), ADMIN_AUTH_HEADERS); + assertEntityReferences(glossary.getReviewers(), t1.getReviewers()); // Reviewers are inherited + assertReference(glossary.getOwner(), t1.getOwner()); // Owner is inherited t1 = getEntity(t1.getId(), "reviewers,owner", ADMIN_AUTH_HEADERS); assertEntityReferences(glossary.getReviewers(), t1.getReviewers()); // Reviewers are inherited - - // Create term t12 under t1 without reviewers and owner - create = - create.withName("t12").withGlossary(glossary.getFullyQualifiedName()).withParent(t1.getFullyQualifiedName()); - GlossaryTerm t12 = assertOwnerInheritance(create, USER2_REF); + assertReference(glossary.getOwner(), t1.getOwner()); // Owner is inherited + + GlossaryTerm t12 = + createEntity( + create + .withName("t12") + .withGlossary(glossary.getFullyQualifiedName()) + .withParent(t1.getFullyQualifiedName()), + ADMIN_AUTH_HEADERS); + assertEntityReferences(glossary.getReviewers(), t12.getReviewers()); // Reviewers are inherited + assertReference(glossary.getOwner(), t12.getOwner()); // Owner is inherited t12 = getEntity(t12.getId(), "reviewers,owner", ADMIN_AUTH_HEADERS); assertEntityReferences(glossary.getReviewers(), t12.getReviewers()); // Reviewers are inherited - } - - @Test - void test_inheritDomain(TestInfo test) throws IOException { - // When domain is not set for a glossary term, carry it forward from the glossary - CreateGlossary createGlossary = glossaryResourceTest.createRequest(test).withDomain(DOMAIN.getFullyQualifiedName()); - Glossary glossary = glossaryResourceTest.createEntity(createGlossary, ADMIN_AUTH_HEADERS); - - // Create term t1 in the glossary without domain - CreateGlossaryTerm create = - new CreateGlossaryTerm().withName("t1").withGlossary(glossary.getFullyQualifiedName()).withDescription("desc"); - GlossaryTerm t1 = assertDomainInheritance(create, DOMAIN.getEntityReference()); - - // Create terms t12 under t1 without reviewers and owner - create = - create.withName("t12").withGlossary(glossary.getFullyQualifiedName()).withParent(t1.getFullyQualifiedName()); - assertDomainInheritance(create, DOMAIN.getEntityReference()); + assertReference(glossary.getOwner(), t12.getOwner()); // Owner is inherited } @Test diff --git a/openmetadata-service/src/test/java/org/openmetadata/service/resources/mlmodels/MlModelResourceTest.java b/openmetadata-service/src/test/java/org/openmetadata/service/resources/mlmodels/MlModelResourceTest.java index 148f0b234d09..c266df6231dc 100644 --- a/openmetadata-service/src/test/java/org/openmetadata/service/resources/mlmodels/MlModelResourceTest.java +++ b/openmetadata-service/src/test/java/org/openmetadata/service/resources/mlmodels/MlModelResourceTest.java @@ -47,11 +47,9 @@ import org.junit.jupiter.api.TestMethodOrder; import org.openmetadata.schema.api.data.CreateMlModel; import org.openmetadata.schema.api.data.CreateTable; -import org.openmetadata.schema.api.services.CreateMlModelService; import org.openmetadata.schema.entity.data.Dashboard; import org.openmetadata.schema.entity.data.MlModel; import org.openmetadata.schema.entity.data.Table; -import org.openmetadata.schema.entity.services.MlModelService; import org.openmetadata.schema.type.ChangeDescription; import org.openmetadata.schema.type.EntityReference; import org.openmetadata.schema.type.FeatureSourceDataType; @@ -66,7 +64,6 @@ import org.openmetadata.service.resources.dashboards.DashboardResourceTest; import org.openmetadata.service.resources.databases.TableResourceTest; import org.openmetadata.service.resources.mlmodels.MlModelResource.MlModelList; -import org.openmetadata.service.resources.services.MlModelServiceResourceTest; import org.openmetadata.service.util.JsonUtils; import org.openmetadata.service.util.TestUtils; @@ -386,18 +383,6 @@ void test_mutuallyExclusiveTags(TestInfo testInfo) { CatalogExceptionMessage.mutuallyExclusiveLabels(TIER2_TAG_LABEL, TIER1_TAG_LABEL)); } - @Test - void test_inheritDomain(TestInfo test) throws IOException { - // When domain is not set for an ML Model, carry it forward from the ML Model Service - MlModelServiceResourceTest serviceTest = new MlModelServiceResourceTest(); - CreateMlModelService createService = serviceTest.createRequest(test).withDomain(DOMAIN.getFullyQualifiedName()); - MlModelService service = serviceTest.createEntity(createService, ADMIN_AUTH_HEADERS); - - // Create a ML Model without domain and ensure it inherits domain from the parent - CreateMlModel create = createRequest("model").withService(service.getFullyQualifiedName()); - assertDomainInheritance(create, DOMAIN.getEntityReference()); - } - @Override public MlModel validateGetWithDifferentFields(MlModel model, boolean byName) throws HttpResponseException { // .../models?fields=owner diff --git a/openmetadata-service/src/test/java/org/openmetadata/service/resources/pipelines/PipelineResourceTest.java b/openmetadata-service/src/test/java/org/openmetadata/service/resources/pipelines/PipelineResourceTest.java index 73c826231190..3f87e7da99f2 100644 --- a/openmetadata-service/src/test/java/org/openmetadata/service/resources/pipelines/PipelineResourceTest.java +++ b/openmetadata-service/src/test/java/org/openmetadata/service/resources/pipelines/PipelineResourceTest.java @@ -49,10 +49,8 @@ import org.junit.jupiter.api.Test; import org.junit.jupiter.api.TestInfo; import org.openmetadata.schema.api.data.CreatePipeline; -import org.openmetadata.schema.api.services.CreatePipelineService; import org.openmetadata.schema.entity.data.Pipeline; import org.openmetadata.schema.entity.data.PipelineStatus; -import org.openmetadata.schema.entity.services.PipelineService; import org.openmetadata.schema.type.ChangeDescription; import org.openmetadata.schema.type.EntityReference; import org.openmetadata.schema.type.FieldChange; @@ -63,7 +61,6 @@ import org.openmetadata.service.Entity; import org.openmetadata.service.resources.EntityResourceTest; import org.openmetadata.service.resources.pipelines.PipelineResource.PipelineList; -import org.openmetadata.service.resources.services.PipelineServiceResourceTest; import org.openmetadata.service.util.FullyQualifiedName; import org.openmetadata.service.util.JsonUtils; import org.openmetadata.service.util.ResultList; @@ -535,18 +532,6 @@ void put_AddRemovePipelineTasksUpdate_200(TestInfo test) throws IOException { assertEquals(2, pipeline.getTasks().size()); } - @Test - void test_inheritDomain(TestInfo test) throws IOException { - // When domain is not set for a pipeline, carry it forward from the pipeline service - PipelineServiceResourceTest serviceTest = new PipelineServiceResourceTest(); - CreatePipelineService createService = serviceTest.createRequest(test).withDomain(DOMAIN.getFullyQualifiedName()); - PipelineService service = serviceTest.createEntity(createService, ADMIN_AUTH_HEADERS); - - // Create a pipeline without domain and ensure it inherits domain from the parent - CreatePipeline create = createRequest("pipeline").withService(service.getFullyQualifiedName()); - assertDomainInheritance(create, DOMAIN.getEntityReference()); - } - @Override public Pipeline validateGetWithDifferentFields(Pipeline pipeline, boolean byName) throws HttpResponseException { String fields = ""; diff --git a/openmetadata-service/src/test/java/org/openmetadata/service/resources/searchindex/SearchIndexResourceTest.java b/openmetadata-service/src/test/java/org/openmetadata/service/resources/searchindex/SearchIndexResourceTest.java deleted file mode 100644 index b90b274ce73d..000000000000 --- a/openmetadata-service/src/test/java/org/openmetadata/service/resources/searchindex/SearchIndexResourceTest.java +++ /dev/null @@ -1,426 +0,0 @@ -/* - * Copyright 2021 Collate - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.openmetadata.service.resources.searchindex; - -import static java.util.Collections.singletonList; -import static javax.ws.rs.core.Response.Status.BAD_REQUEST; -import static javax.ws.rs.core.Response.Status.OK; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.openmetadata.common.utils.CommonUtil.listOf; -import static org.openmetadata.service.Entity.FIELD_OWNER; -import static org.openmetadata.service.util.EntityUtil.fieldAdded; -import static org.openmetadata.service.util.EntityUtil.fieldUpdated; -import static org.openmetadata.service.util.TestUtils.ADMIN_AUTH_HEADERS; -import static org.openmetadata.service.util.TestUtils.assertListNotNull; -import static org.openmetadata.service.util.TestUtils.assertListNull; -import static org.openmetadata.service.util.TestUtils.assertResponse; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Comparator; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import javax.ws.rs.client.WebTarget; -import javax.ws.rs.core.Response.Status; -import lombok.extern.slf4j.Slf4j; -import org.apache.http.client.HttpResponseException; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.TestInfo; -import org.openmetadata.schema.api.data.CreateSearchIndex; -import org.openmetadata.schema.api.services.CreateSearchService; -import org.openmetadata.schema.entity.data.SearchIndex; -import org.openmetadata.schema.entity.services.SearchService; -import org.openmetadata.schema.type.ChangeDescription; -import org.openmetadata.schema.type.EntityReference; -import org.openmetadata.schema.type.SearchIndexDataType; -import org.openmetadata.schema.type.SearchIndexField; -import org.openmetadata.schema.type.TagLabel; -import org.openmetadata.schema.type.searchindex.SearchIndexSampleData; -import org.openmetadata.service.Entity; -import org.openmetadata.service.exception.CatalogExceptionMessage; -import org.openmetadata.service.resources.EntityResourceTest; -import org.openmetadata.service.resources.services.SearchServiceResourceTest; -import org.openmetadata.service.util.JsonUtils; -import org.openmetadata.service.util.ResultList; -import org.openmetadata.service.util.TestUtils; -import org.openmetadata.service.util.TestUtils.UpdateType; - -@Slf4j -public class SearchIndexResourceTest extends EntityResourceTest { - public static final List SEARCH_INDEX_FIELDS = - Arrays.asList( - getField("id", SearchIndexDataType.KEYWORD, null), - getField("name", SearchIndexDataType.KEYWORD, null), - getField("address", SearchIndexDataType.TEXT, null)); - - public SearchIndexResourceTest() { - super( - Entity.SEARCH_INDEX, - SearchIndex.class, - SearchIndexResource.SearchIndexList.class, - "searchIndexes", - SearchIndexResource.FIELDS); - supportsSearchIndex = true; - } - - @Test - void post_searchIndexWithoutRequiredFields_4xx(TestInfo test) { - // Service is required field - assertResponse( - () -> createEntity(createRequest(test).withService(null), ADMIN_AUTH_HEADERS), - BAD_REQUEST, - "[service must not be null]"); - - // Partitions is required field - assertResponse( - () -> createEntity(createRequest(test).withFields(null), ADMIN_AUTH_HEADERS), - BAD_REQUEST, - "[fields must not be null]"); - } - - @Test - void post_searchIndexWithDifferentService_200_ok(TestInfo test) throws IOException { - String[] differentServices = { - ELASTICSEARCH_SEARCH_SERVICE_REFERENCE.getName(), OPENSEARCH_SEARCH_SERVICE_REFERENCE.getName() - }; - - // Create searchIndex for each service and test APIs - for (String service : differentServices) { - createAndCheckEntity(createRequest(test).withService(service), ADMIN_AUTH_HEADERS); - - // List searchIndexes by filtering on service name and ensure right searchIndexes in the response - Map queryParams = new HashMap<>(); - queryParams.put("service", service); - - ResultList list = listEntities(queryParams, ADMIN_AUTH_HEADERS); - for (SearchIndex searchIndex : list.getData()) { - assertEquals(service, searchIndex.getService().getName()); - } - } - } - - @Test - void put_searchIndexAttributes_200_ok(TestInfo test) throws IOException { - ArrayList fields = - new ArrayList<>( - Arrays.asList( - new SearchIndexField().withName("name").withDataType(SearchIndexDataType.TEXT), - new SearchIndexField().withName("displayName").withDataType(SearchIndexDataType.KEYWORD))); - List searchIndexFields = - Arrays.asList( - new SearchIndexField() - .withName("tableSearchIndex") - .withDataType(SearchIndexDataType.NESTED) - .withChildren(fields)); - CreateSearchIndex createSearchIndex = createRequest(test).withOwner(USER1_REF).withFields(searchIndexFields); - - SearchIndex searchIndex = createEntity(createSearchIndex, ADMIN_AUTH_HEADERS); - ChangeDescription change = getChangeDescription(searchIndex.getVersion()); - - // Patch and update the searchIndex - fields.add(new SearchIndexField().withName("updatedBy").withDataType(SearchIndexDataType.KEYWORD)); - List updatedSearchIndexFields = - List.of( - new SearchIndexField() - .withName("tableSearchIndex") - .withChildren(fields) - .withDataType(SearchIndexDataType.NESTED)); - createSearchIndex.withOwner(TEAM11_REF).withDescription("searchIndex").withFields(updatedSearchIndexFields); - SearchIndexField addedField = fields.get(2); - addedField.setFullyQualifiedName( - searchIndex.getFields().get(0).getFullyQualifiedName() + "." + addedField.getName()); - fieldUpdated(change, FIELD_OWNER, USER1_REF, TEAM11_REF); - fieldUpdated(change, "description", "", "searchIndex"); - fieldAdded(change, "fields.tableSearchIndex", JsonUtils.pojoToJson(List.of(addedField))); - updateAndCheckEntity(createSearchIndex, Status.OK, ADMIN_AUTH_HEADERS, UpdateType.MINOR_UPDATE, change); - } - - @Test - void put_searchIndexFields_200_ok(TestInfo test) throws IOException { - List fields = - Arrays.asList( - getField("id", SearchIndexDataType.KEYWORD, null), - getField("first_name", SearchIndexDataType.KEYWORD, null), - getField("last_name", SearchIndexDataType.TEXT, null), - getField("email", SearchIndexDataType.KEYWORD, null), - getField("address_line_1", SearchIndexDataType.ARRAY, null), - getField("address_line_2", SearchIndexDataType.TEXT, null), - getField("post_code", SearchIndexDataType.TEXT, null), - getField("county", SearchIndexDataType.TEXT, PERSONAL_DATA_TAG_LABEL)); - - CreateSearchIndex createSearchIndex = createRequest(test).withOwner(USER1_REF).withFields(fields); - - // update the searchIndex - SearchIndex searchIndex = createEntity(createSearchIndex, ADMIN_AUTH_HEADERS); - searchIndex = getEntity(searchIndex.getId(), ADMIN_AUTH_HEADERS); - assertFields(fields, searchIndex.getFields()); - } - - @Test - void patch_searchIndexAttributes_200_ok(TestInfo test) throws IOException { - List fields = - Arrays.asList( - getField("id", SearchIndexDataType.KEYWORD, null), - getField("first_name", SearchIndexDataType.KEYWORD, null), - getField("last_name", SearchIndexDataType.TEXT, null), - getField("email", SearchIndexDataType.KEYWORD, null), - getField("address_line_1", SearchIndexDataType.ARRAY, null), - getField("address_line_2", SearchIndexDataType.TEXT, null), - getField("post_code", SearchIndexDataType.TEXT, null), - getField("county", SearchIndexDataType.TEXT, PERSONAL_DATA_TAG_LABEL)); - CreateSearchIndex createSearchIndex = createRequest(test).withOwner(USER1_REF).withFields(fields); - - SearchIndex searchIndex = createEntity(createSearchIndex, ADMIN_AUTH_HEADERS); - String origJson = JsonUtils.pojoToJson(searchIndex); - - List updatedFields = - Arrays.asList( - getField("id", SearchIndexDataType.KEYWORD, null), - getField("first_name", SearchIndexDataType.KEYWORD, null), - getField("last_name", SearchIndexDataType.TEXT, null), - getField("email", SearchIndexDataType.KEYWORD, null), - getField("address_line_1", SearchIndexDataType.ARRAY, null), - getField("address_line_2", SearchIndexDataType.TEXT, null), - getField("post_code", SearchIndexDataType.TEXT, null), - getField("county", SearchIndexDataType.TEXT, PERSONAL_DATA_TAG_LABEL), - getField("phone", SearchIndexDataType.TEXT, PERSONAL_DATA_TAG_LABEL)); - - searchIndex.withOwner(TEAM11_REF).withFields(updatedFields); - - SearchIndexField addedField = updatedFields.get(updatedFields.size() - 1); - addedField.setFullyQualifiedName(searchIndex.getFullyQualifiedName() + "." + addedField.getName()); - - ChangeDescription change = getChangeDescription(searchIndex.getVersion()); - fieldUpdated(change, FIELD_OWNER, USER1_REF, TEAM11_REF); - fieldAdded(change, "fields", JsonUtils.pojoToJson(List.of(addedField))); - patchEntityAndCheck(searchIndex, origJson, ADMIN_AUTH_HEADERS, UpdateType.MINOR_UPDATE, change); - } - - @Test - void test_mutuallyExclusiveTags(TestInfo testInfo) { - // Apply mutually exclusive tags to a table - List fields = - Arrays.asList( - getField("id", SearchIndexDataType.KEYWORD, null), - getField("first_name", SearchIndexDataType.KEYWORD, null), - getField("last_name", SearchIndexDataType.TEXT, null), - getField("email", SearchIndexDataType.KEYWORD, null)); - - CreateSearchIndex create = - createRequest(testInfo) - .withTags(List.of(TIER1_TAG_LABEL, TIER2_TAG_LABEL)) - .withOwner(USER1_REF) - .withFields(fields); - - // Apply mutually exclusive tags to a searchIndex - assertResponse( - () -> createEntity(create, ADMIN_AUTH_HEADERS), - BAD_REQUEST, - CatalogExceptionMessage.mutuallyExclusiveLabels(TIER2_TAG_LABEL, TIER1_TAG_LABEL)); - - // Apply mutually exclusive tags to a searchIndex field - CreateSearchIndex create1 = createRequest(testInfo, 1).withOwner(USER1_REF); - SearchIndexField field = - getField("first_name", SearchIndexDataType.TEXT, null).withTags(listOf(TIER1_TAG_LABEL, TIER2_TAG_LABEL)); - create1.withFields(List.of(field)); - assertResponse( - () -> createEntity(create1, ADMIN_AUTH_HEADERS), - BAD_REQUEST, - CatalogExceptionMessage.mutuallyExclusiveLabels(TIER2_TAG_LABEL, TIER1_TAG_LABEL)); - - // Apply mutually exclusive tags to a searchIndexes's nested field - CreateSearchIndex create2 = createRequest(testInfo, 1).withOwner(USER1_REF); - SearchIndexField nestedField = - getField("testNested", SearchIndexDataType.TEXT, null).withTags(listOf(TIER1_TAG_LABEL, TIER2_TAG_LABEL)); - SearchIndexField field1 = getField("test", SearchIndexDataType.NESTED, null).withChildren(List.of(nestedField)); - create2.withFields(List.of(field1)); - assertResponse( - () -> createEntity(create2, ADMIN_AUTH_HEADERS), - BAD_REQUEST, - CatalogExceptionMessage.mutuallyExclusiveLabels(TIER2_TAG_LABEL, TIER1_TAG_LABEL)); - } - - @Test - void put_searchIndexSampleData_200(TestInfo test) throws IOException { - List fields = - Arrays.asList( - getField("email", SearchIndexDataType.KEYWORD, null), - getField("firstName", SearchIndexDataType.KEYWORD, null), - getField("lastName", SearchIndexDataType.TEXT, null)); - SearchIndex searchIndex = createAndCheckEntity(createRequest(test).withFields(fields), ADMIN_AUTH_HEADERS); - List messages = - Arrays.asList( - "{\"email\": \"email1@email.com\", \"firstName\": \"Bob\", \"lastName\": \"Jones\"}", - "{\"email\": \"email2@email.com\", \"firstName\": \"Test\", \"lastName\": \"Jones\"}", - "{\"email\": \"email3@email.com\", \"firstName\": \"Bob\", \"lastName\": \"Jones\"}"); - SearchIndexSampleData searchIndexSampleData = new SearchIndexSampleData().withMessages(messages); - SearchIndex searchIndex1 = putSampleData(searchIndex.getId(), searchIndexSampleData, ADMIN_AUTH_HEADERS); - assertEquals(searchIndexSampleData, searchIndex1.getSampleData()); - - SearchIndex searchIndex2 = getSampleData(searchIndex.getId(), ADMIN_AUTH_HEADERS); - assertEquals(searchIndex2.getSampleData(), searchIndex1.getSampleData()); - messages = - Arrays.asList( - "{\"email\": \"email1@email.com\", \"firstName\": \"Bob\", \"lastName\": \"Jones\"}", - "{\"email\": \"email2@email.com\", \"firstName\": \"Test\", \"lastName\": \"Jones\"}"); - searchIndexSampleData.withMessages(messages); - SearchIndex putResponse = putSampleData(searchIndex2.getId(), searchIndexSampleData, ADMIN_AUTH_HEADERS); - assertEquals(searchIndexSampleData, putResponse.getSampleData()); - searchIndex2 = getSampleData(searchIndex.getId(), ADMIN_AUTH_HEADERS); - assertEquals(searchIndexSampleData, searchIndex2.getSampleData()); - } - - @Test - void test_inheritDomain(TestInfo test) throws IOException { - // When domain is not set for a searchIndex, carry it forward from the search service - SearchServiceResourceTest serviceTest = new SearchServiceResourceTest(); - CreateSearchService createService = serviceTest.createRequest(test).withDomain(DOMAIN.getFullyQualifiedName()); - SearchService service = serviceTest.createEntity(createService, ADMIN_AUTH_HEADERS); - - // Create a searchIndex without domain and ensure it inherits domain from the parent - CreateSearchIndex create = createRequest("user").withService(service.getFullyQualifiedName()); - assertDomainInheritance(create, DOMAIN.getEntityReference()); - } - - @Override - public SearchIndex validateGetWithDifferentFields(SearchIndex searchIndex, boolean byName) - throws HttpResponseException { - // .../searchIndex?fields=owner - String fields = ""; - searchIndex = - byName - ? getSearchIndexByName(searchIndex.getFullyQualifiedName(), fields, ADMIN_AUTH_HEADERS) - : getSearchIndex(searchIndex.getId(), fields, ADMIN_AUTH_HEADERS); - assertListNull(searchIndex.getOwner(), searchIndex.getFollowers(), searchIndex.getFollowers()); - - fields = "owner, followers, tags"; - searchIndex = - byName - ? getSearchIndexByName(searchIndex.getFullyQualifiedName(), fields, ADMIN_AUTH_HEADERS) - : getSearchIndex(searchIndex.getId(), fields, ADMIN_AUTH_HEADERS); - assertListNotNull(searchIndex.getService(), searchIndex.getServiceType()); - // Checks for other owner, tags, and followers is done in the base class - return searchIndex; - } - - public SearchIndex getSearchIndex(UUID id, String fields, Map authHeaders) - throws HttpResponseException { - WebTarget target = getResource(id); - target = fields != null ? target.queryParam("fields", fields) : target; - return TestUtils.get(target, SearchIndex.class, authHeaders); - } - - public SearchIndex getSearchIndexByName(String fqn, String fields, Map authHeaders) - throws HttpResponseException { - WebTarget target = getResourceByName(fqn); - target = fields != null ? target.queryParam("fields", fields) : target; - return TestUtils.get(target, SearchIndex.class, authHeaders); - } - - @Override - public CreateSearchIndex createRequest(String name) { - return new CreateSearchIndex() - .withName(name) - .withService(getContainer().getFullyQualifiedName()) - .withFields(SEARCH_INDEX_FIELDS); - } - - @Override - public EntityReference getContainer() { - return ELASTICSEARCH_SEARCH_SERVICE_REFERENCE; - } - - @Override - public EntityReference getContainer(SearchIndex entity) { - return entity.getService(); - } - - @Override - public void validateCreatedEntity( - SearchIndex searchIndex, CreateSearchIndex createRequest, Map authHeaders) - throws HttpResponseException { - assertReference(createRequest.getService(), searchIndex.getService()); - // TODO add other fields - TestUtils.validateTags(createRequest.getTags(), searchIndex.getTags()); - } - - @Override - public void compareEntities(SearchIndex expected, SearchIndex updated, Map authHeaders) - throws HttpResponseException { - assertReference(expected.getService(), expected.getService()); - // TODO add other fields - TestUtils.validateTags(expected.getTags(), updated.getTags()); - } - - @Override - public void assertFieldChange(String fieldName, Object expected, Object actual) throws IOException { - if (expected == actual) { - return; - } - assertCommonFieldChange(fieldName, expected, actual); - } - - public SearchIndex putSampleData(UUID searchIndexId, SearchIndexSampleData data, Map authHeaders) - throws HttpResponseException { - WebTarget target = getResource(searchIndexId).path("/sampleData"); - return TestUtils.put(target, data, SearchIndex.class, OK, authHeaders); - } - - public SearchIndex getSampleData(UUID searchIndexId, Map authHeaders) throws HttpResponseException { - WebTarget target = getResource(searchIndexId).path("/sampleData"); - return TestUtils.get(target, SearchIndex.class, authHeaders); - } - - private static SearchIndexField getField(String name, SearchIndexDataType fieldDataType, TagLabel tag) { - List tags = tag == null ? new ArrayList<>() : singletonList(tag); - return new SearchIndexField().withName(name).withDataType(fieldDataType).withDescription(name).withTags(tags); - } - - private static void assertFields(List expectedFields, List actualFields) - throws HttpResponseException { - if (expectedFields == actualFields) { - return; - } - // Sort columns by name - assertEquals(expectedFields.size(), actualFields.size()); - - // Make a copy before sorting in case the lists are immutable - List expected = new ArrayList<>(expectedFields); - List actual = new ArrayList<>(actualFields); - expected.sort(Comparator.comparing(SearchIndexField::getName)); - actual.sort(Comparator.comparing(SearchIndexField::getName)); - for (int i = 0; i < expected.size(); i++) { - assertField(expected.get(i), actual.get(i)); - } - } - - private static void assertField(SearchIndexField expectedField, SearchIndexField actualField) - throws HttpResponseException { - assertNotNull(actualField.getFullyQualifiedName()); - assertTrue( - expectedField.getName().equals(actualField.getName()) - || expectedField.getName().equals(actualField.getDisplayName())); - assertEquals(expectedField.getDescription(), actualField.getDescription()); - assertEquals(expectedField.getDataType(), actualField.getDataType()); - TestUtils.validateTags(expectedField.getTags(), actualField.getTags()); - - // Check the nested columns - assertFields(expectedField.getChildren(), actualField.getChildren()); - } -} diff --git a/openmetadata-service/src/test/java/org/openmetadata/service/resources/services/DashboardServiceResourceTest.java b/openmetadata-service/src/test/java/org/openmetadata/service/resources/services/DashboardServiceResourceTest.java index a8ae321c7f2b..83e109a037af 100644 --- a/openmetadata-service/src/test/java/org/openmetadata/service/resources/services/DashboardServiceResourceTest.java +++ b/openmetadata-service/src/test/java/org/openmetadata/service/resources/services/DashboardServiceResourceTest.java @@ -279,7 +279,7 @@ public void setupDashboardServices(TestInfo test) throws HttpResponseException, .withHostPort(new URI("http://localhost:8080")) .withPassword("test") .withUsername("admin")); - createDashboardService.withConnection(dashboardConnection).withDomain(DOMAIN.getFullyQualifiedName()); + createDashboardService.withConnection(dashboardConnection); DashboardService dashboardService = new DashboardServiceResourceTest().createEntity(createDashboardService, ADMIN_AUTH_HEADERS); METABASE_REFERENCE = dashboardService.getEntityReference(); diff --git a/openmetadata-service/src/test/java/org/openmetadata/service/resources/services/PipelineServiceResourceTest.java b/openmetadata-service/src/test/java/org/openmetadata/service/resources/services/PipelineServiceResourceTest.java index 5785a7dea9cd..97d1af6cbd2a 100644 --- a/openmetadata-service/src/test/java/org/openmetadata/service/resources/services/PipelineServiceResourceTest.java +++ b/openmetadata-service/src/test/java/org/openmetadata/service/resources/services/PipelineServiceResourceTest.java @@ -54,6 +54,7 @@ import org.openmetadata.schema.services.connections.database.RedshiftConnection; import org.openmetadata.schema.services.connections.pipeline.AirflowConnection; import org.openmetadata.schema.type.ChangeDescription; +import org.openmetadata.schema.type.EntityReference; import org.openmetadata.schema.type.PipelineConnection; import org.openmetadata.service.Entity; import org.openmetadata.service.resources.EntityResourceTest; @@ -166,6 +167,7 @@ void put_addIngestion_as_admin_2xx(TestInfo test) throws IOException { // Create Pipeline Service CreatePipelineService create = createRequest(test); PipelineService service = createAndCheckEntity(create, ADMIN_AUTH_HEADERS); + EntityReference serviceRef = service.getEntityReference(); // Add an IngestionPipeline to the service IngestionPipelineResourceTest ingestionPipelineResourceTest = new IngestionPipelineResourceTest(); diff --git a/openmetadata-service/src/test/java/org/openmetadata/service/resources/services/SearchServiceResourceTest.java b/openmetadata-service/src/test/java/org/openmetadata/service/resources/services/SearchServiceResourceTest.java deleted file mode 100644 index fed491009f88..000000000000 --- a/openmetadata-service/src/test/java/org/openmetadata/service/resources/services/SearchServiceResourceTest.java +++ /dev/null @@ -1,208 +0,0 @@ -package org.openmetadata.service.resources.services; - -import static javax.ws.rs.core.Response.Status.BAD_REQUEST; -import static javax.ws.rs.core.Response.Status.OK; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertNull; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.openmetadata.service.util.EntityUtil.fieldAdded; -import static org.openmetadata.service.util.EntityUtil.fieldUpdated; -import static org.openmetadata.service.util.TestUtils.ADMIN_AUTH_HEADERS; -import static org.openmetadata.service.util.TestUtils.assertResponse; - -import java.io.IOException; -import java.util.Map; -import java.util.UUID; -import javax.ws.rs.client.WebTarget; -import org.apache.http.client.HttpResponseException; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.TestInfo; -import org.openmetadata.schema.api.services.CreateSearchService; -import org.openmetadata.schema.entity.services.SearchService; -import org.openmetadata.schema.entity.services.connections.TestConnectionResult; -import org.openmetadata.schema.entity.services.connections.TestConnectionResultStatus; -import org.openmetadata.schema.services.connections.search.ElasticSearchConnection; -import org.openmetadata.schema.type.ChangeDescription; -import org.openmetadata.schema.type.SearchConnection; -import org.openmetadata.service.Entity; -import org.openmetadata.service.resources.EntityResourceTest; -import org.openmetadata.service.resources.services.searchIndexes.SearchServiceResource; -import org.openmetadata.service.util.JsonUtils; -import org.openmetadata.service.util.TestUtils; - -public class SearchServiceResourceTest extends EntityResourceTest { - public SearchServiceResourceTest() { - super( - Entity.SEARCH_SERVICE, - SearchService.class, - SearchServiceResource.SearchServiceList.class, - "services/searchServices", - "owner"); - this.supportsPatch = false; - } - - public void setupSearchService(TestInfo test) throws HttpResponseException { - SearchServiceResourceTest esSearchServiceResourceTest = new SearchServiceResourceTest(); - CreateSearchService createSearchService = - esSearchServiceResourceTest - .createRequest(test, 1) - .withName("elasticSearch") - .withServiceType(CreateSearchService.SearchServiceType.ElasticSearch) - .withConnection(TestUtils.ELASTIC_SEARCH_CONNECTION); - - SearchService esSearchService = - new SearchServiceResourceTest().createEntity(createSearchService, ADMIN_AUTH_HEADERS); - ELASTICSEARCH_SEARCH_SERVICE_REFERENCE = esSearchService.getEntityReference(); - SearchServiceResourceTest osSearchServiceResourceTest = new SearchServiceResourceTest(); - createSearchService = - osSearchServiceResourceTest - .createRequest(test, 1) - .withName("opensearch") - .withServiceType(CreateSearchService.SearchServiceType.OpenSearch) - .withConnection(TestUtils.OPEN_SEARCH_CONNECTION); - SearchService osSearchService = - new SearchServiceResourceTest().createEntity(createSearchService, ADMIN_AUTH_HEADERS); - OPENSEARCH_SEARCH_SERVICE_REFERENCE = osSearchService.getEntityReference(); - } - - @Test - void post_withoutRequiredFields_400_badRequest(TestInfo test) { - // Create StorageService with mandatory serviceType field empty - assertResponse( - () -> createEntity(createRequest(test).withServiceType(null), ADMIN_AUTH_HEADERS), - BAD_REQUEST, - "[serviceType must not be null]"); - - // Create StorageService with mandatory connection field empty - assertResponse( - () -> createEntity(createRequest(test).withConnection(null), ADMIN_AUTH_HEADERS), - BAD_REQUEST, - "[connection must not be null]"); - } - - @Test - void post_validService_as_admin_200_ok(TestInfo test) throws IOException { - // Create Storage service with different optional fields - Map authHeaders = ADMIN_AUTH_HEADERS; - createAndCheckEntity(createRequest(test, 1).withDescription(null), authHeaders); - createAndCheckEntity(createRequest(test, 2).withDescription("description"), authHeaders); - createAndCheckEntity(createRequest(test, 3).withConnection(TestUtils.ELASTIC_SEARCH_CONNECTION), authHeaders); - } - - @Test - void put_updateService_as_admin_2xx(TestInfo test) throws IOException { - SearchConnection connection1 = - new SearchConnection().withConfig(new ElasticSearchConnection().withHostPort("http://localhost:9300")); - SearchService service = - createAndCheckEntity(createRequest(test).withDescription(null).withConnection(connection1), ADMIN_AUTH_HEADERS); - - ElasticSearchConnection credentials2 = new ElasticSearchConnection().withHostPort("https://localhost:9400"); - SearchConnection connection2 = new SearchConnection().withConfig(credentials2); - - // Update SearchService description and connection - - CreateSearchService update = createRequest(test).withDescription("description1").withConnection(connection2); - - ChangeDescription change = getChangeDescription(service.getVersion()); - fieldAdded(change, "description", "description1"); - fieldUpdated(change, "connection", connection1, connection2); - updateAndCheckEntity(update, OK, ADMIN_AUTH_HEADERS, TestUtils.UpdateType.MINOR_UPDATE, change); - } - - @Test - void put_testConnectionResult_200(TestInfo test) throws IOException { - SearchService service = createAndCheckEntity(createRequest(test), ADMIN_AUTH_HEADERS); - // By default, we have no result logged in - assertNull(service.getTestConnectionResult()); - SearchService updatedService = putTestConnectionResult(service.getId(), TEST_CONNECTION_RESULT, ADMIN_AUTH_HEADERS); - // Validate that the data got properly stored - assertNotNull(updatedService.getTestConnectionResult()); - assertEquals(TestConnectionResultStatus.SUCCESSFUL, updatedService.getTestConnectionResult().getStatus()); - assertEquals(updatedService.getConnection(), service.getConnection()); - // Check that the stored data is also correct - SearchService stored = getEntity(service.getId(), ADMIN_AUTH_HEADERS); - assertNotNull(stored.getTestConnectionResult()); - assertEquals(TestConnectionResultStatus.SUCCESSFUL, stored.getTestConnectionResult().getStatus()); - assertEquals(stored.getConnection(), service.getConnection()); - } - - public SearchService putTestConnectionResult( - UUID serviceId, TestConnectionResult testConnectionResult, Map authHeaders) - throws HttpResponseException { - WebTarget target = getResource(serviceId).path("/testConnectionResult"); - return TestUtils.put(target, testConnectionResult, SearchService.class, OK, authHeaders); - } - - @Override - public CreateSearchService createRequest(String name) { - return new CreateSearchService() - .withName(name) - .withServiceType(CreateSearchService.SearchServiceType.ElasticSearch) - .withConnection( - new SearchConnection().withConfig(new ElasticSearchConnection().withHostPort("http://localhost:9200"))); - } - - @Override - public void validateCreatedEntity( - SearchService service, CreateSearchService createRequest, Map authHeaders) - throws HttpResponseException { - assertEquals(createRequest.getName(), service.getName()); - SearchConnection expectedConnection = createRequest.getConnection(); - SearchConnection actualConnection = service.getConnection(); - validateConnection(expectedConnection, actualConnection, service.getServiceType()); - } - - @Override - public void compareEntities(SearchService expected, SearchService updated, Map authHeaders) - throws HttpResponseException { - // PATCH operation is not supported by this entity - - } - - @Override - public SearchService validateGetWithDifferentFields(SearchService service, boolean byName) - throws HttpResponseException { - String fields = ""; - service = - byName - ? getEntityByName(service.getFullyQualifiedName(), fields, ADMIN_AUTH_HEADERS) - : getEntity(service.getId(), fields, ADMIN_AUTH_HEADERS); - TestUtils.assertListNull(service.getOwner()); - - fields = "owner,tags"; - service = - byName - ? getEntityByName(service.getFullyQualifiedName(), fields, ADMIN_AUTH_HEADERS) - : getEntity(service.getId(), fields, ADMIN_AUTH_HEADERS); - // Checks for other owner, tags, and followers is done in the base class - return service; - } - - @Override - public void assertFieldChange(String fieldName, Object expected, Object actual) throws IOException { - if (fieldName.equals("connection")) { - assertTrue(((String) actual).contains("-encrypted-value")); - } else { - super.assertCommonFieldChange(fieldName, expected, actual); - } - } - - private void validateConnection( - SearchConnection expectedConnection, - SearchConnection actualConnection, - CreateSearchService.SearchServiceType serviceType) { - if (expectedConnection != null && actualConnection != null) { - if (serviceType == CreateSearchService.SearchServiceType.ElasticSearch) { - ElasticSearchConnection expectedESConnection = (ElasticSearchConnection) expectedConnection.getConfig(); - ElasticSearchConnection actualESConnection; - if (actualConnection.getConfig() instanceof ElasticSearchConnection) { - actualESConnection = (ElasticSearchConnection) actualConnection.getConfig(); - } else { - actualESConnection = JsonUtils.convertValue(actualConnection.getConfig(), ElasticSearchConnection.class); - } - assertEquals(expectedESConnection.getHostPort(), actualESConnection.getHostPort()); - } - } - } -} diff --git a/openmetadata-service/src/test/java/org/openmetadata/service/resources/system/SystemResourceTest.java b/openmetadata-service/src/test/java/org/openmetadata/service/resources/system/SystemResourceTest.java index dcc8ef7d66ec..6b2b337f7b07 100644 --- a/openmetadata-service/src/test/java/org/openmetadata/service/resources/system/SystemResourceTest.java +++ b/openmetadata-service/src/test/java/org/openmetadata/service/resources/system/SystemResourceTest.java @@ -23,7 +23,13 @@ import org.junit.jupiter.api.TestInfo; import org.junit.jupiter.api.TestMethodOrder; import org.openmetadata.api.configuration.LogoConfiguration; -import org.openmetadata.schema.api.data.*; +import org.openmetadata.schema.api.data.CreateContainer; +import org.openmetadata.schema.api.data.CreateDashboard; +import org.openmetadata.schema.api.data.CreateGlossary; +import org.openmetadata.schema.api.data.CreateGlossaryTerm; +import org.openmetadata.schema.api.data.CreatePipeline; +import org.openmetadata.schema.api.data.CreateTable; +import org.openmetadata.schema.api.data.CreateTopic; import org.openmetadata.schema.api.services.CreateDashboardService; import org.openmetadata.schema.api.services.CreateDatabaseService; import org.openmetadata.schema.api.services.CreateMessagingService; @@ -51,7 +57,6 @@ import org.openmetadata.service.resources.glossary.GlossaryResourceTest; import org.openmetadata.service.resources.glossary.GlossaryTermResourceTest; import org.openmetadata.service.resources.pipelines.PipelineResourceTest; -import org.openmetadata.service.resources.searchindex.SearchIndexResourceTest; import org.openmetadata.service.resources.services.DashboardServiceResourceTest; import org.openmetadata.service.resources.services.DatabaseServiceResourceTest; import org.openmetadata.service.resources.services.MessagingServiceResourceTest; @@ -136,10 +141,6 @@ void entitiesCount(TestInfo test) throws HttpResponseException { CreateContainer createContainer = containerResourceTest.createRequest(test); containerResourceTest.createEntity(createContainer, ADMIN_AUTH_HEADERS); - SearchIndexResourceTest SearchIndexResourceTest = new SearchIndexResourceTest(); - CreateSearchIndex createSearchIndex = SearchIndexResourceTest.createRequest(test); - SearchIndexResourceTest.createEntity(createSearchIndex, ADMIN_AUTH_HEADERS); - GlossaryResourceTest glossaryResourceTest = new GlossaryResourceTest(); CreateGlossary createGlossary = glossaryResourceTest.createRequest(test); glossaryResourceTest.createEntity(createGlossary, ADMIN_AUTH_HEADERS); diff --git a/openmetadata-service/src/test/java/org/openmetadata/service/resources/teams/TeamResourceTest.java b/openmetadata-service/src/test/java/org/openmetadata/service/resources/teams/TeamResourceTest.java index e182785494ad..8f3d9582a9bb 100644 --- a/openmetadata-service/src/test/java/org/openmetadata/service/resources/teams/TeamResourceTest.java +++ b/openmetadata-service/src/test/java/org/openmetadata/service/resources/teams/TeamResourceTest.java @@ -811,17 +811,6 @@ void testTeamImportExport() throws IOException { assertTrue(result.getImportResultsCsv().contains(error)); } - @Test - void test_inheritDomain(TestInfo test) throws IOException { - // When domain is not set for a user term, carry it forward from the parent team - CreateTeam createTeam = createRequest(test).withDomain(DOMAIN.getFullyQualifiedName()).withTeamType(DEPARTMENT); - Team team = createEntity(createTeam, ADMIN_AUTH_HEADERS); - - // Create a children team without domain and ensure it inherits domain from the parent - createTeam = createRequest("team1").withParents(listOf(team.getId())); - assertDomainInheritance(createTeam, DOMAIN.getEntityReference()); - } - private static void validateTeam( Team team, String expectedDescription, diff --git a/openmetadata-service/src/test/java/org/openmetadata/service/resources/teams/UserResourceTest.java b/openmetadata-service/src/test/java/org/openmetadata/service/resources/teams/UserResourceTest.java index ebd44871e93e..6806aab7a1c4 100644 --- a/openmetadata-service/src/test/java/org/openmetadata/service/resources/teams/UserResourceTest.java +++ b/openmetadata-service/src/test/java/org/openmetadata/service/resources/teams/UserResourceTest.java @@ -90,7 +90,6 @@ import org.openmetadata.csv.EntityCsv; import org.openmetadata.csv.EntityCsvTest; import org.openmetadata.schema.api.CreateBot; -import org.openmetadata.schema.api.teams.CreateTeam; import org.openmetadata.schema.api.teams.CreateUser; import org.openmetadata.schema.auth.CreatePersonalToken; import org.openmetadata.schema.auth.GenerateTokenRequest; @@ -818,10 +817,11 @@ void put_generateToken_bot_user_200_ok() throws HttpResponseException { @Test void post_createUser_BasicAuth_AdminCreate_login_200_ok(TestInfo test) throws HttpResponseException { // Create a user with Auth and Try Logging in + String name = "testBasicAuth"; User user = createEntity( createRequest(test) - .withName("testBasicAuth") + .withName(name) .withDisplayName("Test") .withEmail("testBasicAuth@email.com") .withIsBot(false) @@ -833,6 +833,8 @@ void post_createUser_BasicAuth_AdminCreate_login_200_ok(TestInfo test) throws Ht // jwtAuth Response should be null always user = getEntity(user.getId(), ADMIN_AUTH_HEADERS); assertNull(user.getAuthenticationMechanism()); + assertEquals(name, user.getName()); + assertEquals(name.toLowerCase(), user.getFullyQualifiedName()); // Login With Correct Password LoginRequest loginRequest = @@ -875,11 +877,12 @@ void post_createUser_BasicAuth_AdminCreate_login_200_ok(TestInfo test) throws Ht @Test void post_createUser_BasicAuth_SignUp_200_ok() throws HttpResponseException { // Create a user with Auth and Try Logging in + String name = "testBasicAuth123"; RegistrationRequest newRegistrationRequest = new RegistrationRequest() .withFirstName("Test") .withLastName("Test") - .withEmail("testBasicAuth123@email.com") + .withEmail(String.format("%s@email.com", name)) .withPassword("Test@1234"); TestUtils.post(getResource("users/signup"), newRegistrationRequest, String.class, ADMIN_AUTH_HEADERS); @@ -887,6 +890,8 @@ void post_createUser_BasicAuth_SignUp_200_ok() throws HttpResponseException { // jwtAuth Response should be null always User user = getEntityByName("testBasicAuth123", null, ADMIN_AUTH_HEADERS); assertNull(user.getAuthenticationMechanism()); + assertEquals(name, user.getName()); + assertEquals(name.toLowerCase(), user.getFullyQualifiedName()); // Login With Correct Password LoginRequest loginRequest = @@ -1043,6 +1048,25 @@ private void validateJwtBasicAuth(JwtResponse jwtResponse, String username) { assertEquals(false, jwt.getClaims().get("isBot").asBoolean()); } + @Test + void test_userNameIgnoreCase(TestInfo test) throws IOException { + // Create user with different optional fields + CreateUser create = createRequest(test, 1).withName("UserEmailTest").withEmail("UserEmailTest@domainx.com"); + User created = createEntity(create, ADMIN_AUTH_HEADERS); + + // Creating another user with different case should fail + create.withName("Useremailtest").withEmail("Useremailtest@Domainx.com"); + assertResponse(() -> createEntity(create, ADMIN_AUTH_HEADERS), CONFLICT, "Entity already exists"); + + // get user with username in different case + User user = getEntityByName("UsERemailTEST", ADMIN_AUTH_HEADERS); + compareEntities(user, created, ADMIN_AUTH_HEADERS); + user.setName("UsERemailTEST"); + user.setFullyQualifiedName("UsERemailTEST"); + // delete user with different + deleteByNameAndCheckEntity(user, false, false, ADMIN_AUTH_HEADERS); + } + @Test void testInheritedRole() throws HttpResponseException { // USER1 inherits DATA_CONSUMER_ROLE from Organization @@ -1112,18 +1136,6 @@ void patch_ProfileWithSubscription(TestInfo test) throws IOException, URISyntaxE patchEntityAndCheck(user, json, ADMIN_AUTH_HEADERS, MINOR_UPDATE, change); } - @Test - void test_inheritDomain(TestInfo test) throws IOException { - // When domain is not set for a user term, carry it forward from the parent team - TeamResourceTest teamResourceTest = new TeamResourceTest(); - CreateTeam createTeam = teamResourceTest.createRequest(test).withDomain(DOMAIN.getFullyQualifiedName()); - Team team = teamResourceTest.createEntity(createTeam, ADMIN_AUTH_HEADERS); - - // Create a user without domain and ensure it inherits domain from the parent - CreateUser create = createRequest(test).withTeams(listOf(team.getId())); - assertDomainInheritance(create, DOMAIN.getEntityReference()); - } - private DecodedJWT decodedJWT(String token) { DecodedJWT jwt; try { @@ -1168,9 +1180,10 @@ public User validateGetWithDifferentFields(User user, boolean byName) throws Htt @Override public CreateUser createRequest(String name) { // user part of the email should be less than 64 in length - String emailUser = nullOrEmpty(name) ? UUID.randomUUID().toString() : name; + String entityName = name != null ? name.toLowerCase() : null; + String emailUser = nullOrEmpty(entityName) ? UUID.randomUUID().toString().toLowerCase() : entityName; emailUser = emailUser.length() > 64 ? emailUser.substring(0, 64) : emailUser; - return new CreateUser().withName(name).withEmail(emailUser + "@open-metadata.org").withProfile(PROFILE); + return new CreateUser().withName(entityName).withEmail(emailUser + "@open-metadata.org").withProfile(PROFILE); } @Override @@ -1257,7 +1270,7 @@ public void assertFieldChange(String fieldName, Object expected, Object actual) @Override public String getAllowedFields() { Set allowedFields = Entity.getEntityFields(entityClass); - of(USER_PROTECTED_FIELDS.split(",")).forEach(allowedFields::remove); + allowedFields.removeAll(of(USER_PROTECTED_FIELDS.split(","))); return String.join(",", allowedFields); } diff --git a/openmetadata-service/src/test/java/org/openmetadata/service/resources/topics/TopicResourceTest.java b/openmetadata-service/src/test/java/org/openmetadata/service/resources/topics/TopicResourceTest.java index afd745cd40b1..695c5e6b4ea9 100644 --- a/openmetadata-service/src/test/java/org/openmetadata/service/resources/topics/TopicResourceTest.java +++ b/openmetadata-service/src/test/java/org/openmetadata/service/resources/topics/TopicResourceTest.java @@ -44,9 +44,7 @@ import org.junit.jupiter.api.Test; import org.junit.jupiter.api.TestInfo; import org.openmetadata.schema.api.data.CreateTopic; -import org.openmetadata.schema.api.services.CreateMessagingService; import org.openmetadata.schema.entity.data.Topic; -import org.openmetadata.schema.entity.services.MessagingService; import org.openmetadata.schema.type.ChangeDescription; import org.openmetadata.schema.type.EntityReference; import org.openmetadata.schema.type.Field; @@ -59,7 +57,6 @@ import org.openmetadata.service.Entity; import org.openmetadata.service.exception.CatalogExceptionMessage; import org.openmetadata.service.resources.EntityResourceTest; -import org.openmetadata.service.resources.services.MessagingServiceResourceTest; import org.openmetadata.service.resources.topics.TopicResource.TopicList; import org.openmetadata.service.util.JsonUtils; import org.openmetadata.service.util.ResultList; @@ -328,18 +325,6 @@ void put_topicSampleData_200(TestInfo test) throws IOException { assertEquals(topicSampleData, topic.getSampleData()); } - @Test - void test_inheritDomain(TestInfo test) throws IOException { - // When domain is not set for a topic, carry it forward from the messaging service - MessagingServiceResourceTest serviceTest = new MessagingServiceResourceTest(); - CreateMessagingService createService = serviceTest.createRequest(test).withDomain(DOMAIN.getFullyQualifiedName()); - MessagingService service = serviceTest.createEntity(createService, ADMIN_AUTH_HEADERS); - - // Create a topic without domain and ensure it inherits domain from the parent - CreateTopic create = createRequest("chart").withService(service.getFullyQualifiedName()); - assertDomainInheritance(create, DOMAIN.getEntityReference()); - } - @Override public Topic validateGetWithDifferentFields(Topic topic, boolean byName) throws HttpResponseException { // .../topics?fields=owner diff --git a/openmetadata-service/src/test/java/org/openmetadata/service/util/TestUtils.java b/openmetadata-service/src/test/java/org/openmetadata/service/util/TestUtils.java index 1197c0eadbbb..09d4f6c8d62e 100644 --- a/openmetadata-service/src/test/java/org/openmetadata/service/util/TestUtils.java +++ b/openmetadata-service/src/test/java/org/openmetadata/service/util/TestUtils.java @@ -69,15 +69,12 @@ import org.openmetadata.schema.services.connections.mlmodel.MlflowConnection; import org.openmetadata.schema.services.connections.pipeline.AirflowConnection; import org.openmetadata.schema.services.connections.pipeline.GluePipelineConnection; -import org.openmetadata.schema.services.connections.search.ElasticSearchConnection; -import org.openmetadata.schema.services.connections.search.OpenSearchConnection; import org.openmetadata.schema.services.connections.storage.S3Connection; import org.openmetadata.schema.type.DashboardConnection; import org.openmetadata.schema.type.EntityReference; import org.openmetadata.schema.type.MessagingConnection; import org.openmetadata.schema.type.MlModelConnection; import org.openmetadata.schema.type.PipelineConnection; -import org.openmetadata.schema.type.SearchConnection; import org.openmetadata.schema.type.StorageConnection; import org.openmetadata.schema.type.TagLabel; import org.openmetadata.schema.type.TagLabel.TagSource; @@ -114,10 +111,6 @@ public final class TestUtils { public static final MlModelConnection MLFLOW_CONNECTION; public static final StorageConnection S3_STORAGE_CONNECTION; - - public static final SearchConnection ELASTIC_SEARCH_CONNECTION; - public static final SearchConnection OPEN_SEARCH_CONNECTION; - public static MetadataConnection AMUNDSEN_CONNECTION; public static MetadataConnection ATLAS_CONNECTION; @@ -225,13 +218,6 @@ public enum UpdateType { S3_STORAGE_CONNECTION = new StorageConnection().withConfig(new S3Connection().withAwsConfig(AWS_CREDENTIALS)); } - static { - ELASTIC_SEARCH_CONNECTION = - new SearchConnection().withConfig(new ElasticSearchConnection().withHostPort("http://localhost:9200")); - OPEN_SEARCH_CONNECTION = - new SearchConnection().withConfig(new OpenSearchConnection().withHostPort("http://localhost:9200")); - } - static { try { PIPELINE_URL = new URI("http://localhost:8080"); diff --git a/openmetadata-spec/pom.xml b/openmetadata-spec/pom.xml index eddd4afdbb90..b4a82d018260 100644 --- a/openmetadata-spec/pom.xml +++ b/openmetadata-spec/pom.xml @@ -5,7 +5,7 @@ platform org.open-metadata - 1.2.0-SNAPSHOT + 1.1.5 4.0.0 diff --git a/openmetadata-spec/src/main/java/org/openmetadata/schema/CreateEntity.java b/openmetadata-spec/src/main/java/org/openmetadata/schema/CreateEntity.java index 7a5275a859dd..3f09c3418423 100644 --- a/openmetadata-spec/src/main/java/org/openmetadata/schema/CreateEntity.java +++ b/openmetadata-spec/src/main/java/org/openmetadata/schema/CreateEntity.java @@ -13,10 +13,8 @@ package org.openmetadata.schema; -import java.util.List; import org.openmetadata.schema.type.EntityReference; -@SuppressWarnings("unchecked") public interface CreateEntity { String getName(); @@ -32,14 +30,6 @@ default Object getExtension() { return null; } - default String getDomain() { - return null; - } - - default List getDataProducts() { - return null; - } - K withName(String name); K withDisplayName(String displayName); @@ -53,8 +43,4 @@ default K withOwner(EntityReference owner) { default K withExtension(Object extension) { return (K) this; } - - default K withDomain(String domain) { - return (K) this; - } } diff --git a/openmetadata-spec/src/main/resources/json/schema/api/data/createChart.json b/openmetadata-spec/src/main/resources/json/schema/api/data/createChart.json index 91bd8146e5c9..f648dba6fbfe 100644 --- a/openmetadata-spec/src/main/resources/json/schema/api/data/createChart.json +++ b/openmetadata-spec/src/main/resources/json/schema/api/data/createChart.json @@ -41,17 +41,6 @@ "service": { "description": "Link to the chart service where this chart is hosted in", "$ref": "../../type/basic.json#/definitions/fullyQualifiedEntityName" - }, - "domain" : { - "description": "Fully qualified name of the domain the Chart belongs to.", - "$ref" : "../../type/basic.json#/definitions/fullyQualifiedEntityName" - }, - "dataProducts" : { - "description": "List of fully qualified names of data products this entity is part of.", - "type": "array", - "items" : { - "$ref" : "../../type/basic.json#/definitions/fullyQualifiedEntityName" - } } }, "required": ["name", "service"], diff --git a/openmetadata-spec/src/main/resources/json/schema/api/data/createContainer.json b/openmetadata-spec/src/main/resources/json/schema/api/data/createContainer.json index f6e7ec1a3b64..f2d2d29d2eb2 100644 --- a/openmetadata-spec/src/main/resources/json/schema/api/data/createContainer.json +++ b/openmetadata-spec/src/main/resources/json/schema/api/data/createContainer.json @@ -75,10 +75,6 @@ "sourceUrl": { "description": "Source URL of container.", "$ref": "../../type/basic.json#/definitions/sourceUrl" - }, - "domain" : { - "description": "Fully qualified name of the domain the Container belongs to.", - "type": "string" } }, "required": ["name", "service"], diff --git a/openmetadata-spec/src/main/resources/json/schema/api/data/createDashboard.json b/openmetadata-spec/src/main/resources/json/schema/api/data/createDashboard.json index 617c59eb54fa..c247388003d3 100644 --- a/openmetadata-spec/src/main/resources/json/schema/api/data/createDashboard.json +++ b/openmetadata-spec/src/main/resources/json/schema/api/data/createDashboard.json @@ -65,17 +65,6 @@ "extension": { "description": "Entity extension data with custom attributes added to the entity.", "$ref": "../../type/basic.json#/definitions/entityExtension" - }, - "domain" : { - "description": "Fully qualified name of the domain the Dashboard belongs to.", - "$ref" : "../../type/basic.json#/definitions/fullyQualifiedEntityName" - }, - "dataProducts" : { - "description": "List of fully qualified names of data products this entity is part of.", - "type": "array", - "items" : { - "$ref" : "../../type/basic.json#/definitions/fullyQualifiedEntityName" - } } }, "required": ["name", "service"], diff --git a/openmetadata-spec/src/main/resources/json/schema/api/data/createDashboardDataModel.json b/openmetadata-spec/src/main/resources/json/schema/api/data/createDashboardDataModel.json index 84020b0e0a9e..5ba60068a32a 100644 --- a/openmetadata-spec/src/main/resources/json/schema/api/data/createDashboardDataModel.json +++ b/openmetadata-spec/src/main/resources/json/schema/api/data/createDashboardDataModel.json @@ -58,10 +58,6 @@ "project": { "description": "Name of the project / workspace / collection in which the dataModel is contained", "type": "string" - }, - "domain" : { - "description": "Fully qualified name of the domain the Dashboard Data Model belongs to.", - "type": "string" } }, "required": ["name", "service", "dataModelType", "columns"], diff --git a/openmetadata-spec/src/main/resources/json/schema/api/data/createDatabase.json b/openmetadata-spec/src/main/resources/json/schema/api/data/createDatabase.json index 7fd5acb96134..328273fcb9f0 100644 --- a/openmetadata-spec/src/main/resources/json/schema/api/data/createDatabase.json +++ b/openmetadata-spec/src/main/resources/json/schema/api/data/createDatabase.json @@ -52,10 +52,6 @@ "sourceUrl": { "description": "Source URL of database.", "$ref": "../../type/basic.json#/definitions/sourceUrl" - }, - "domain" : { - "description": "Fully qualified name of the domain the Database belongs to.", - "type": "string" } }, "required": ["name", "service"], diff --git a/openmetadata-spec/src/main/resources/json/schema/api/data/createDatabaseSchema.json b/openmetadata-spec/src/main/resources/json/schema/api/data/createDatabaseSchema.json index 551f622a2960..b7ee5ea3722c 100644 --- a/openmetadata-spec/src/main/resources/json/schema/api/data/createDatabaseSchema.json +++ b/openmetadata-spec/src/main/resources/json/schema/api/data/createDatabaseSchema.json @@ -48,10 +48,6 @@ "sourceUrl": { "description": "Source URL of database schema.", "$ref": "../../type/basic.json#/definitions/sourceUrl" - }, - "domain" : { - "description": "Fully qualified name of the domain the Database Schema belongs to.", - "type": "string" } }, "required": [ diff --git a/openmetadata-spec/src/main/resources/json/schema/api/data/createGlossary.json b/openmetadata-spec/src/main/resources/json/schema/api/data/createGlossary.json index 7fcf26f9a24b..ac2f8236248d 100644 --- a/openmetadata-spec/src/main/resources/json/schema/api/data/createGlossary.json +++ b/openmetadata-spec/src/main/resources/json/schema/api/data/createGlossary.json @@ -46,10 +46,6 @@ "description" : "Glossary terms that are direct children in this glossary are mutually exclusive. When mutually exclusive is `true` only one term can be used to label an entity. When mutually exclusive is `false`, multiple terms from this group can be used to label an entity.", "type" : "boolean", "default" : "false" - }, - "domain" : { - "description": "Fully qualified name of the domain the Glossary belongs to.", - "type": "string" } }, "required": ["name", "description"], diff --git a/openmetadata-spec/src/main/resources/json/schema/api/data/createMlModel.json b/openmetadata-spec/src/main/resources/json/schema/api/data/createMlModel.json index 3123bc35e421..052f8a4b0751 100644 --- a/openmetadata-spec/src/main/resources/json/schema/api/data/createMlModel.json +++ b/openmetadata-spec/src/main/resources/json/schema/api/data/createMlModel.json @@ -79,10 +79,6 @@ "sourceUrl": { "description": "Source URL of mlModel.", "$ref": "../../type/basic.json#/definitions/sourceUrl" - }, - "domain" : { - "description": "Fully qualified name of the domain the MLModel belongs to.", - "type": "string" } }, "required": ["name", "algorithm", "service"], diff --git a/openmetadata-spec/src/main/resources/json/schema/api/data/createPipeline.json b/openmetadata-spec/src/main/resources/json/schema/api/data/createPipeline.json index 48114b539bfb..0535d8ee837b 100644 --- a/openmetadata-spec/src/main/resources/json/schema/api/data/createPipeline.json +++ b/openmetadata-spec/src/main/resources/json/schema/api/data/createPipeline.json @@ -68,10 +68,6 @@ "description": "Scheduler Interval for the pipeline in cron format.", "type": "string", "default": null - }, - "domain" : { - "description": "Fully qualified name of the domain the Pipeline belongs to.", - "type": "string" } }, "required": ["name", "service"], diff --git a/openmetadata-spec/src/main/resources/json/schema/api/data/createQuery.json b/openmetadata-spec/src/main/resources/json/schema/api/data/createQuery.json index 7df8173dcfeb..d1696fc54416 100644 --- a/openmetadata-spec/src/main/resources/json/schema/api/data/createQuery.json +++ b/openmetadata-spec/src/main/resources/json/schema/api/data/createQuery.json @@ -49,6 +49,14 @@ "$ref" : "../../type/basic.json#/definitions/fullyQualifiedEntityName" } }, + "usedBy" : { + "description": "List of users who ran the query but does not exist in OpenMetadata.", + "type": "array", + "items": { + "type": "string" + }, + "uniqueItems": true + }, "queryDate": { "description": "Date on which the query ran.", "$ref": "../../type/basic.json#/definitions/timestamp" diff --git a/openmetadata-spec/src/main/resources/json/schema/api/data/createSearchIndex.json b/openmetadata-spec/src/main/resources/json/schema/api/data/createSearchIndex.json deleted file mode 100644 index e70b2b63252e..000000000000 --- a/openmetadata-spec/src/main/resources/json/schema/api/data/createSearchIndex.json +++ /dev/null @@ -1,70 +0,0 @@ -{ - "$id": "https://open-metadata.org/schema/api/data/createSearchIndex.json", - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "CreateSearchIndexRequest", - "description": "Create a SearchIndex entity request", - "type": "object", - "javaType": "org.openmetadata.schema.api.data.CreateSearchIndex", - "javaInterfaces": ["org.openmetadata.schema.CreateEntity"], - - "properties": { - "name": { - "description": "Name that identifies this SearchIndex instance uniquely.", - "$ref": "../../type/basic.json#/definitions/entityName" - }, - "displayName": { - "description": "Display Name that identifies this SearchIndex.", - "type": "string" - }, - "description": { - "description": "Description of the SearchIndex instance. What it has and how to use it.", - "$ref": "../../type/basic.json#/definitions/markdown" - }, - "service": { - "description": "Fully qualified name of the search service where this searchIndex is hosted in", - "$ref": "../../type/basic.json#/definitions/fullyQualifiedEntityName" - }, - "fields": { - "description": "Fields in this SearchIndex.", - "type": "array", - "items": { - "$ref": "../../entity/data/searchIndex.json#/definitions/searchIndexField" - }, - "default": null - }, - "searchIndexSettings": { - "description": "Contains key/value pair of searchIndex settings.", - "$ref": "../../entity/data/searchIndex.json#/definitions/searchIndexSettings" - }, - "owner": { - "description": "Owner of this SearchIndex", - "$ref": "../../type/entityReference.json" - }, - "tags": { - "description": "Tags for this SearchIndex", - "type": "array", - "items": { - "$ref": "../../type/tagLabel.json" - }, - "default": null - }, - "extension": { - "description": "Entity extension data with custom attributes added to the entity.", - "$ref": "../../type/basic.json#/definitions/entityExtension" - }, - "domain" : { - "description": "Fully qualified name of the domain the SearchIndex belongs to.", - "type": "string", - "$ref" : "../../type/basic.json#/definitions/fullyQualifiedEntityName" - }, - "dataProducts" : { - "description": "List of fully qualified names of data products this entity is part of.", - "type": "array", - "items" : { - "$ref" : "../../type/basic.json#/definitions/fullyQualifiedEntityName" - } - } - }, - "required": ["name", "service", "fields"], - "additionalProperties": false -} diff --git a/openmetadata-spec/src/main/resources/json/schema/api/data/createTable.json b/openmetadata-spec/src/main/resources/json/schema/api/data/createTable.json index 4397ce776879..1c0bd7d05e03 100644 --- a/openmetadata-spec/src/main/resources/json/schema/api/data/createTable.json +++ b/openmetadata-spec/src/main/resources/json/schema/api/data/createTable.json @@ -78,17 +78,6 @@ "description": "Source URL of table.", "$ref": "../../type/basic.json#/definitions/sourceUrl" }, - "domain" : { - "description": "Fully qualified name of the domain the Table belongs to.", - "type": "string" - }, - "dataProducts" : { - "description": "List of fully qualified names of data products this entity is part of.", - "type": "array", - "items" : { - "$ref" : "../../type/basic.json#/definitions/fullyQualifiedEntityName" - } - }, "fileFormat": { "description": "File format in case of file/datalake tables.", "$ref": "../../entity/data/table.json#/definitions/fileFormat" diff --git a/openmetadata-spec/src/main/resources/json/schema/api/data/createTopic.json b/openmetadata-spec/src/main/resources/json/schema/api/data/createTopic.json index 888f5697a0f5..6fce1ac2bd1a 100644 --- a/openmetadata-spec/src/main/resources/json/schema/api/data/createTopic.json +++ b/openmetadata-spec/src/main/resources/json/schema/api/data/createTopic.json @@ -83,18 +83,6 @@ "sourceUrl": { "description": "Source URL of topic.", "$ref": "../../type/basic.json#/definitions/sourceUrl" - }, - "domain" : { - "description": "Fully qualified name of the domain the Topic belongs to.", - "type": "string", - "$ref" : "../../type/basic.json#/definitions/fullyQualifiedEntityName" - }, - "dataProducts" : { - "description": "List of fully qualified names of data products this entity is part of.", - "type": "array", - "items" : { - "$ref" : "../../type/basic.json#/definitions/fullyQualifiedEntityName" - } } }, "required": ["name", "service", "partitions"], diff --git a/openmetadata-spec/src/main/resources/json/schema/api/domains/createDataProduct.json b/openmetadata-spec/src/main/resources/json/schema/api/domains/createDataProduct.json deleted file mode 100644 index 7b5fa786784e..000000000000 --- a/openmetadata-spec/src/main/resources/json/schema/api/domains/createDataProduct.json +++ /dev/null @@ -1,52 +0,0 @@ -{ - "$id": "https://open-metadata.org/schema/entity/domains/createDataProduct.json", - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "createDataProduct", - "description": "Create DataProduct API request", - "type": "object", - "javaType": "org.openmetadata.schema.api.domains.CreateDataProduct", - "javaInterfaces": ["org.openmetadata.schema.CreateEntity"], - "properties": { - "name": { - "description": "A unique name of the DataProduct", - "$ref": "../../type/basic.json#/definitions/entityName" - }, - "fullyQualifiedName": { - "description": "FullyQualifiedName of the Domain.", - "$ref": "../../type/basic.json#/definitions/fullyQualifiedEntityName" - }, - "displayName": { - "description": "Name used for display purposes. Example 'Customer Churn', 'Sentiment Analysis', etc.", - "type": "string" - }, - "description": { - "description": "Description of the DataProduct.", - "$ref": "../../type/basic.json#/definitions/markdown" - }, - "owner": { - "description": "Owner of this DataProduct.", - "$ref": "../../type/entityReference.json", - "default": null - }, - "domain": { - "description": "Fully qualified name of the Domain the DataProduct belongs to.", - "$ref" : "../../type/basic.json#/definitions/fullyQualifiedEntityName", - "default": null - }, - "experts": { - "description": "List of user/login names of users who are experts in this DataProduct.", - "type" : "array", - "items": { - "type" : "string" - }, - "default": null - }, - "assets": { - "description": "Data assets collection that is part of this data product.", - "$ref" : "../../type/entityReferenceList.json", - "default": null - } - }, - "required": ["id", "name", "description", "domain"], - "additionalProperties": false -} diff --git a/openmetadata-spec/src/main/resources/json/schema/api/domains/createDomain.json b/openmetadata-spec/src/main/resources/json/schema/api/domains/createDomain.json deleted file mode 100644 index a1078df276a1..000000000000 --- a/openmetadata-spec/src/main/resources/json/schema/api/domains/createDomain.json +++ /dev/null @@ -1,50 +0,0 @@ -{ - "$id": "https://open-metadata.org/schema/entity/domains/createDomain.json", - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "createDomain", - "description": "Create Domain API request", - "type": "object", - "javaType": "org.openmetadata.schema.api.domains.CreateDomain", - "javaInterfaces": ["org.openmetadata.schema.CreateEntity"], - "properties": { - "domainType": { - "description": "Domain type", - "$ref": "../../entity/domains/domain.json#/definitions/domainType" - }, - "name": { - "description": "A unique name of the Domain", - "$ref": "../../type/basic.json#/definitions/entityName" - }, - "fullyQualifiedName": { - "description": "FullyQualifiedName same as `name`.", - "$ref": "../../type/basic.json#/definitions/fullyQualifiedEntityName" - }, - "displayName": { - "description": "Name used for display purposes. Example 'Marketing', 'Payments', etc.", - "type": "string" - }, - "description": { - "description": "Description of the Domain.", - "$ref": "../../type/basic.json#/definitions/markdown" - }, - "parent" : { - "description" : "Fully qualified name of parent domain.", - "type" : "string" - }, - "owner": { - "description": "Owner of this Domain.", - "$ref": "../../type/entityReference.json", - "default": null - }, - "experts": { - "description": "List of user/login names of users who are experts in this Domain.", - "type" : "array", - "items": { - "type" : "string" - }, - "default": null - } - }, - "required": ["id", "name", "description", "domainType"], - "additionalProperties": false -} diff --git a/openmetadata-spec/src/main/resources/json/schema/api/services/createDashboardService.json b/openmetadata-spec/src/main/resources/json/schema/api/services/createDashboardService.json index c9da120c794d..287167e760e3 100644 --- a/openmetadata-spec/src/main/resources/json/schema/api/services/createDashboardService.json +++ b/openmetadata-spec/src/main/resources/json/schema/api/services/createDashboardService.json @@ -36,10 +36,6 @@ "owner": { "description": "Owner of this dashboard service.", "$ref": "../../type/entityReference.json" - }, - "domain" : { - "description": "Fully qualified name of the domain the Dashboard Service belongs to.", - "type": "string" } }, "required": ["name", "serviceType", "connection"], diff --git a/openmetadata-spec/src/main/resources/json/schema/api/services/createDatabaseService.json b/openmetadata-spec/src/main/resources/json/schema/api/services/createDatabaseService.json index 11d1254081b9..780919998f7e 100644 --- a/openmetadata-spec/src/main/resources/json/schema/api/services/createDatabaseService.json +++ b/openmetadata-spec/src/main/resources/json/schema/api/services/createDatabaseService.json @@ -37,10 +37,6 @@ "owner": { "description": "Owner of this database service.", "$ref": "../../type/entityReference.json" - }, - "domain" : { - "description": "Fully qualified name of the domain the Database Service belongs to.", - "type": "string" } }, "required": ["name", "serviceType", "connection"], diff --git a/openmetadata-spec/src/main/resources/json/schema/api/services/createMessagingService.json b/openmetadata-spec/src/main/resources/json/schema/api/services/createMessagingService.json index b64c8b10f579..25b1ce6a2127 100644 --- a/openmetadata-spec/src/main/resources/json/schema/api/services/createMessagingService.json +++ b/openmetadata-spec/src/main/resources/json/schema/api/services/createMessagingService.json @@ -37,10 +37,6 @@ "owner": { "description": "Owner of this messaging service.", "$ref": "../../type/entityReference.json" - }, - "domain" : { - "description": "Fully qualified name of the domain the Messaging Service belongs to.", - "type": "string" } }, "required": ["name", "serviceType", "connection"], diff --git a/openmetadata-spec/src/main/resources/json/schema/api/services/createMlModelService.json b/openmetadata-spec/src/main/resources/json/schema/api/services/createMlModelService.json index f1963f98bced..92b7ed404945 100644 --- a/openmetadata-spec/src/main/resources/json/schema/api/services/createMlModelService.json +++ b/openmetadata-spec/src/main/resources/json/schema/api/services/createMlModelService.json @@ -37,10 +37,6 @@ "owner": { "description": "Owner of this mlModel service.", "$ref": "../../type/entityReference.json" - }, - "domain" : { - "description": "Fully qualified name of the domain the MLModel Service belongs to.", - "type": "string" } }, "required": ["name", "serviceType", "connection"], diff --git a/openmetadata-spec/src/main/resources/json/schema/api/services/createPipelineService.json b/openmetadata-spec/src/main/resources/json/schema/api/services/createPipelineService.json index 050a3988d0e3..bfb30bebec1d 100644 --- a/openmetadata-spec/src/main/resources/json/schema/api/services/createPipelineService.json +++ b/openmetadata-spec/src/main/resources/json/schema/api/services/createPipelineService.json @@ -42,10 +42,6 @@ "description": "Scheduler Interval for the pipeline in cron format.", "type": "string", "default": null - }, - "domain" : { - "description": "Fully qualified name of the domain the Pipeline Service belongs to.", - "type": "string" } }, "required": ["name", "serviceType", "connection"], diff --git a/openmetadata-spec/src/main/resources/json/schema/api/services/createSearchService.json b/openmetadata-spec/src/main/resources/json/schema/api/services/createSearchService.json deleted file mode 100644 index da5c731374de..000000000000 --- a/openmetadata-spec/src/main/resources/json/schema/api/services/createSearchService.json +++ /dev/null @@ -1,48 +0,0 @@ -{ - "$id": "https://open-metadata.org/schema/api/services/createSearchService.json", - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "CreateSearchServiceRequest", - "description": "Create Search Service entity request", - "type": "object", - "javaType": "org.openmetadata.schema.api.services.CreateSearchService", - "javaInterfaces": ["org.openmetadata.schema.CreateEntity"], - - "properties": { - "name": { - "description": "Name that identifies the this entity instance uniquely", - "$ref": "../../type/basic.json#/definitions/entityName" - }, - "displayName": { - "description": "Display Name that identifies this search service. It could be title or label from the source services.", - "type": "string" - }, - "description": { - "description": "Description of search service entity.", - "$ref": "../../type/basic.json#/definitions/markdown" - }, - "serviceType": { - "$ref": "../../entity/services/searchService.json#/definitions/searchServiceType" - }, - "connection": { - "$ref": "../../entity/services/searchService.json#/definitions/searchConnection" - }, - "tags": { - "description": "Tags for this Search Service.", - "type": "array", - "items": { - "$ref": "../../type/tagLabel.json" - }, - "default": null - }, - "owner": { - "description": "Owner of this search service.", - "$ref": "../../type/entityReference.json" - }, - "domain" : { - "description": "Fully qualified name of the domain the Search Service belongs to.", - "type": "string" - } - }, - "required": ["name", "serviceType", "connection"], - "additionalProperties": false -} diff --git a/openmetadata-spec/src/main/resources/json/schema/api/services/createStorageService.json b/openmetadata-spec/src/main/resources/json/schema/api/services/createStorageService.json index b9b15fb8994e..0d1cab472c15 100644 --- a/openmetadata-spec/src/main/resources/json/schema/api/services/createStorageService.json +++ b/openmetadata-spec/src/main/resources/json/schema/api/services/createStorageService.json @@ -37,10 +37,6 @@ "owner": { "description": "Owner of this object store service.", "$ref": "../../type/entityReference.json" - }, - "domain" : { - "description": "Fully qualified name of the domain the Storage Service belongs to.", - "type": "string" } }, "required": ["name", "serviceType", "connection"], diff --git a/openmetadata-spec/src/main/resources/json/schema/api/teams/createTeam.json b/openmetadata-spec/src/main/resources/json/schema/api/teams/createTeam.json index 955eebfa4296..98744decc30b 100644 --- a/openmetadata-spec/src/main/resources/json/schema/api/teams/createTeam.json +++ b/openmetadata-spec/src/main/resources/json/schema/api/teams/createTeam.json @@ -80,10 +80,6 @@ "$ref": "../../type/basic.json#/definitions/uuid" }, "default": null - }, - "domain" : { - "description": "Fully qualified name of the domain the Team belongs to.", - "type": "string" } }, "required": ["name", "teamType"], diff --git a/openmetadata-spec/src/main/resources/json/schema/entity/data/chart.json b/openmetadata-spec/src/main/resources/json/schema/entity/data/chart.json index 739d34e493eb..d5f22d47b568 100644 --- a/openmetadata-spec/src/main/resources/json/schema/entity/data/chart.json +++ b/openmetadata-spec/src/main/resources/json/schema/entity/data/chart.json @@ -138,14 +138,6 @@ "description": "When `true` indicates the entity has been soft deleted.", "type": "boolean", "default": false - }, - "domain" : { - "description": "Domain the Chart belongs to. The Chart inherits domain from the dashboard service it belongs to.", - "$ref": "../../type/entityReference.json" - }, - "dataProducts" : { - "description": "List of data products this entity is part of.", - "$ref" : "../../type/entityReferenceList.json" } }, "required": ["id", "name", "service"], diff --git a/openmetadata-spec/src/main/resources/json/schema/entity/data/container.json b/openmetadata-spec/src/main/resources/json/schema/entity/data/container.json index 2f130648664b..60e6a0a1bd15 100644 --- a/openmetadata-spec/src/main/resources/json/schema/entity/data/container.json +++ b/openmetadata-spec/src/main/resources/json/schema/entity/data/container.json @@ -171,10 +171,6 @@ "sourceUrl": { "description": "Source URL of container.", "$ref": "../../type/basic.json#/definitions/sourceUrl" - }, - "domain" : { - "description": "Domain the Container belongs to. When not set, the Container inherits the domain from the storage service it belongs to.", - "$ref": "../../type/entityReference.json" } }, "required": [ diff --git a/openmetadata-spec/src/main/resources/json/schema/entity/data/dashboard.json b/openmetadata-spec/src/main/resources/json/schema/entity/data/dashboard.json index d8e1faef0f27..985c12e12658 100644 --- a/openmetadata-spec/src/main/resources/json/schema/entity/data/dashboard.json +++ b/openmetadata-spec/src/main/resources/json/schema/entity/data/dashboard.json @@ -126,14 +126,6 @@ "extension": { "description": "Entity extension data with custom attributes added to the entity.", "$ref": "../../type/basic.json#/definitions/entityExtension" - }, - "domain" : { - "description": "Domain the Dashboard belongs to. When not set, the Dashboard inherits the domain from the dashboard service it belongs to.", - "$ref": "../../type/entityReference.json" - }, - "dataProducts" : { - "description": "List of data products this entity is part of.", - "$ref" : "../../type/entityReferenceList.json" } }, "required": ["id", "name", "service"], diff --git a/openmetadata-spec/src/main/resources/json/schema/entity/data/dashboardDataModel.json b/openmetadata-spec/src/main/resources/json/schema/entity/data/dashboardDataModel.json index 1210a77d986b..53c5b7f31bdc 100644 --- a/openmetadata-spec/src/main/resources/json/schema/entity/data/dashboardDataModel.json +++ b/openmetadata-spec/src/main/resources/json/schema/entity/data/dashboardDataModel.json @@ -137,10 +137,6 @@ "project": { "description": "Name of the project / workspace / collection in which the dataModel is contained", "type": "string" - }, - "domain" : { - "description": "Domain the Dashboard Data Model belongs to. When not set, the Dashboard model inherits the domain from the dashboard service it belongs to.", - "$ref": "../../type/entityReference.json" } }, "required": [ diff --git a/openmetadata-spec/src/main/resources/json/schema/entity/data/database.json b/openmetadata-spec/src/main/resources/json/schema/entity/data/database.json index d1f8474cf505..aa25cb303a14 100644 --- a/openmetadata-spec/src/main/resources/json/schema/entity/data/database.json +++ b/openmetadata-spec/src/main/resources/json/schema/entity/data/database.json @@ -111,10 +111,6 @@ "sourceUrl": { "description": "Source URL of database.", "$ref": "../../type/basic.json#/definitions/sourceUrl" - }, - "domain" : { - "description": "Domain the Database belongs to. When not set, the Database inherits the domain from the database service it belongs to.", - "$ref": "../../type/entityReference.json" } }, "required": ["name", "service"], diff --git a/openmetadata-spec/src/main/resources/json/schema/entity/data/databaseSchema.json b/openmetadata-spec/src/main/resources/json/schema/entity/data/databaseSchema.json index bc231e1c85ef..92d50a931083 100644 --- a/openmetadata-spec/src/main/resources/json/schema/entity/data/databaseSchema.json +++ b/openmetadata-spec/src/main/resources/json/schema/entity/data/databaseSchema.json @@ -106,10 +106,6 @@ "sourceUrl": { "description": "Source URL of database schema.", "$ref": "../../type/basic.json#/definitions/sourceUrl" - }, - "domain" : { - "description": "Domain the Database Schema belongs to. When not set, the Schema inherits the domain from the database it belongs to.", - "$ref": "../../type/entityReference.json" } }, "required": ["name", "database", "service"], diff --git a/openmetadata-spec/src/main/resources/json/schema/entity/data/glossary.json b/openmetadata-spec/src/main/resources/json/schema/entity/data/glossary.json index 05d94253b314..fc9f95beb4f3 100644 --- a/openmetadata-spec/src/main/resources/json/schema/entity/data/glossary.json +++ b/openmetadata-spec/src/main/resources/json/schema/entity/data/glossary.json @@ -91,10 +91,6 @@ "description" : "Glossary terms that are direct children in this glossary are mutually exclusive. When mutually exclusive is `true` only one term can be used to label an entity. When mutually exclusive is `false`, multiple terms from this group can be used to label an entity.", "type" : "boolean", "default" : "false" - }, - "domain" : { - "description": "Domain the Glossary belongs to.", - "$ref": "../../type/entityReference.json" } }, "required": ["id", "name", "description"], diff --git a/openmetadata-spec/src/main/resources/json/schema/entity/data/glossaryTerm.json b/openmetadata-spec/src/main/resources/json/schema/entity/data/glossaryTerm.json index c503130b869a..57d6622ff7a8 100644 --- a/openmetadata-spec/src/main/resources/json/schema/entity/data/glossaryTerm.json +++ b/openmetadata-spec/src/main/resources/json/schema/entity/data/glossaryTerm.json @@ -139,10 +139,6 @@ "description" : "Glossary terms that are children of this term are mutually exclusive. When mutually exclusive is `true` only one term can be used to label an entity from this group. When mutually exclusive is `false`, multiple terms from this group can be used to label an entity.", "type" : "boolean", "default" : "false" - }, - "domain" : { - "description": "Domain the Glossary Term belongs to. When not set, the Glossary TErm inherits the domain from the Glossary it belongs to.", - "$ref": "../../type/entityReference.json" } }, "required": ["id", "name", "description", "glossary"], diff --git a/openmetadata-spec/src/main/resources/json/schema/entity/data/metrics.json b/openmetadata-spec/src/main/resources/json/schema/entity/data/metrics.json index b4daba9af04f..bf4dc98ee8e2 100644 --- a/openmetadata-spec/src/main/resources/json/schema/entity/data/metrics.json +++ b/openmetadata-spec/src/main/resources/json/schema/entity/data/metrics.json @@ -72,10 +72,6 @@ "description": "When `true` indicates the entity has been soft deleted.", "type": "boolean", "default": false - }, - "domain" : { - "description": "Domain the Metrics belongs to.", - "$ref": "../../type/entityReference.json" } }, "required": ["id", "name", "service"], diff --git a/openmetadata-spec/src/main/resources/json/schema/entity/data/mlmodel.json b/openmetadata-spec/src/main/resources/json/schema/entity/data/mlmodel.json index 7acecd52bcbd..79daa3cde6bf 100644 --- a/openmetadata-spec/src/main/resources/json/schema/entity/data/mlmodel.json +++ b/openmetadata-spec/src/main/resources/json/schema/entity/data/mlmodel.json @@ -268,12 +268,7 @@ "sourceUrl": { "description": "Source URL of mlModel.", "$ref": "../../type/basic.json#/definitions/sourceUrl" - }, - "domain" : { - "description": "Domain the MLModel belongs to. When not set, the MLModel inherits the domain from the ML Model Service it belongs to.", - "$ref": "../../type/entityReference.json" } - }, "required": ["id", "name", "algorithm", "service"], "additionalProperties": false diff --git a/openmetadata-spec/src/main/resources/json/schema/entity/data/pipeline.json b/openmetadata-spec/src/main/resources/json/schema/entity/data/pipeline.json index ad7085081c8f..28757bb0895c 100644 --- a/openmetadata-spec/src/main/resources/json/schema/entity/data/pipeline.json +++ b/openmetadata-spec/src/main/resources/json/schema/entity/data/pipeline.json @@ -254,12 +254,7 @@ "description": "Scheduler Interval for the pipeline in cron format.", "type": "string", "default": null - }, - "domain" : { - "description": "Domain the Pipeline belongs to. When not set, the pipeline inherits the domain from the Pipeline service it belongs to.", - "$ref": "../../type/entityReference.json" } - }, "required": ["id", "name", "service"], "additionalProperties": false diff --git a/openmetadata-spec/src/main/resources/json/schema/entity/data/query.json b/openmetadata-spec/src/main/resources/json/schema/entity/data/query.json index d67ad669178d..f4f1011dbaf8 100644 --- a/openmetadata-spec/src/main/resources/json/schema/entity/data/query.json +++ b/openmetadata-spec/src/main/resources/json/schema/entity/data/query.json @@ -83,6 +83,14 @@ "description": "Date on which the query ran.", "$ref": "../../type/basic.json#/definitions/timestamp" }, + "usedBy" : { + "description": "List of users who ran the query but does not exist in OpenMetadata.", + "type": "array", + "items": { + "type": "string" + }, + "uniqueItems": true + }, "tags": { "description": "Tags for this SQL query.", "type": "array", diff --git a/openmetadata-spec/src/main/resources/json/schema/entity/data/searchIndex.json b/openmetadata-spec/src/main/resources/json/schema/entity/data/searchIndex.json deleted file mode 100644 index 9da2bdd2a750..000000000000 --- a/openmetadata-spec/src/main/resources/json/schema/entity/data/searchIndex.json +++ /dev/null @@ -1,233 +0,0 @@ -{ - "$id": "https://open-metadata.org/schema/entity/data/SearchIndex.json", - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "SearchIndex", - "$comment": "@om-entity-type", - "description": "A `SearchIndex` is a index mapping definition in ElasticSearch or OpenSearch", - "type": "object", - "javaType": "org.openmetadata.schema.entity.data.SearchIndex", - "javaInterfaces": ["org.openmetadata.schema.EntityInterface"], - "definitions": { - "searchIndexSettings": { - "javaType": "org.openmetadata.schema.type.searchindex.SearchIndexSettings", - "description": "Contains key/value pair of SearchIndex Settings.", - "type": "object", - "additionalProperties": { - ".{1,}": { "type": "string" } - } - }, - "searchIndexSampleData": { - "type": "object", - "javaType": "org.openmetadata.schema.type.searchindex.SearchIndexSampleData", - "description": "This schema defines the type to capture sample data for a SearchIndex.", - "properties": { - "messages": { - "description": "List of local sample messages for a SearchIndex.", - "type": "array", - "items": { - "type": "string" - } - } - }, - "additionalProperties": false - }, - "dataType": { - "javaType": "org.openmetadata.schema.type.SearchIndexDataType", - "description": "This enum defines the type of data stored in a searchIndex.", - "type": "string", - "enum": [ - "NUMBER", - "TEXT", - "BINARY", - "TIMESTAMP", - "TIMESTAMPZ", - "TIME", - "DATE", - "DATETIME", - "KEYWORD", - "ARRAY", - "OBJECT", - "FLATTENED", - "NESTED", - "JOIN", - "RANGE", - "IP", - "VERSION", - "MURMUR3", - "AGGREGATE_METRIC_DOUBLE", - "HISTOGRAM", - "ANNOTATED-TEXT", - "COMPLETION", - "SEARCH_AS_YOU_TYPE", - "DENSE_VECTOR", - "RANK_FEATURE", - "RANK_FEATURES", - "GEO_POINT", - "GEO_SHAPE", - "POINT", - "SHAPE", - "PERCOLATOR", - "UNKNOWN" - ] - }, - "searchIndexFieldName": { - "description": "Local name (not fully qualified name) of the field. ", - "type": "string", - "minLength": 1, - "maxLength": 256, - "pattern": "^((?!::).)*$" - }, - "searchIndexField": { - "type": "object", - "javaType": "org.openmetadata.schema.type.SearchIndexField", - "description": "This schema defines the type for a field in a searchIndex.", - "properties": { - "name": { - "$ref": "#/definitions/searchIndexFieldName" - }, - "displayName": { - "description": "Display Name that identifies this searchIndexField name.", - "type": "string" - }, - "dataType": { - "description": "Data type of the searchIndex (int, date etc.).", - "$ref": "#/definitions/dataType" - }, - "dataTypeDisplay": { - "description": "Display name used for dataType. ", - "type": "string" - }, - "description": { - "description": "Description of the field.", - "$ref": "../../type/basic.json#/definitions/markdown" - }, - "fullyQualifiedName": { - "$ref": "../../type/basic.json#/definitions/fullyQualifiedEntityName" - }, - "tags": { - "description": "Tags associated with the column.", - "type": "array", - "items": { - "$ref": "../../type/tagLabel.json" - }, - "default": null - }, - "children": { - "description": "Child columns if dataType has properties.", - "type": "array", - "items": { - "$ref": "#/definitions/searchIndexField" - }, - "default": null - } - }, - "required": [ - "name", - "dataType" - ], - "additionalProperties": false - } - }, - "properties": { - "id": { - "description": "Unique identifier that identifies this SearchIndex instance.", - "$ref": "../../type/basic.json#/definitions/uuid" - }, - "name": { - "description": "Name that identifies the SearchIndex.", - "$ref": "../../type/basic.json#/definitions/entityName" - }, - "fullyQualifiedName": { - "description": "Name that uniquely identifies a SearchIndex in the format 'searchServiceName.searchIndexName'.", - "$ref": "../../type/basic.json#/definitions/fullyQualifiedEntityName" - }, - "displayName": { - "description": "Display Name that identifies this SearchIndex. It could be title or label from the source services.", - "type": "string" - }, - "description": { - "description": "Description of the SearchIndex instance.", - "$ref": "../../type/basic.json#/definitions/markdown" - }, - "version": { - "description": "Metadata version of the entity.", - "$ref": "../../type/entityHistory.json#/definitions/entityVersion" - }, - "updatedAt": { - "description": "Last update time corresponding to the new version of the entity in Unix epoch time milliseconds.", - "$ref": "../../type/basic.json#/definitions/timestamp" - }, - "updatedBy": { - "description": "User who made the update.", - "type": "string" - }, - "service": { - "description": "Link to the search cluster/service where this SearchIndex is hosted in.", - "$ref": "../../type/entityReference.json" - }, - "serviceType": { - "description": "Service type where this SearchIndex is hosted in.", - "$ref": "../services/searchService.json#/definitions/searchServiceType" - }, - "fields": { - "description": "Fields in this SearchIndex.", - "type": "array", - "items": { - "$ref": "#/definitions/searchIndexField" - }, - "default": null - }, - "searchIndexSettings": { - "description": "Contains key/value pair of searchIndex settings.", - "$ref": "#/definitions/searchIndexSettings" - }, - "sampleData": { - "description": "Sample data for a searchIndex.", - "$ref": "#/definitions/searchIndexSampleData", - "default": null - }, - "owner": { - "description": "Owner of this searchIndex.", - "$ref": "../../type/entityReference.json" - }, - "followers": { - "description": "Followers of this searchIndex.", - "$ref": "../../type/entityReferenceList.json" - }, - "tags": { - "description": "Tags for this searchIndex.", - "type": "array", - "items": { - "$ref": "../../type/tagLabel.json" - }, - "default": null - }, - "href": { - "description": "Link to the resource corresponding to this entity.", - "$ref": "../../type/basic.json#/definitions/href" - }, - "changeDescription": { - "description": "Change that lead to this version of the entity.", - "$ref": "../../type/entityHistory.json#/definitions/changeDescription" - }, - "deleted": { - "description": "When `true` indicates the entity has been soft deleted.", - "type": "boolean", - "default": false - }, - "extension": { - "description": "Entity extension data with custom attributes added to the entity.", - "$ref": "../../type/basic.json#/definitions/entityExtension" - }, - "domain" : { - "description": "Domain the SearchIndex belongs to. When not set, the SearchIndex inherits the domain from the messaging service it belongs to.", - "$ref": "../../type/entityReference.json" - }, - "dataProducts" : { - "description": "List of data products this entity is part of.", - "$ref" : "../../type/entityReferenceList.json" - } - }, - "required": ["id", "name", "service", "fields"], - "additionalProperties": false -} diff --git a/openmetadata-spec/src/main/resources/json/schema/entity/data/table.json b/openmetadata-spec/src/main/resources/json/schema/entity/data/table.json index de54f04346da..faee71a4aa41 100644 --- a/openmetadata-spec/src/main/resources/json/schema/entity/data/table.json +++ b/openmetadata-spec/src/main/resources/json/schema/entity/data/table.json @@ -1021,14 +1021,6 @@ "description": "Source URL of table.", "$ref": "../../type/basic.json#/definitions/sourceUrl" }, - "domain" : { - "description": "Domain the table belongs to. When not set, the table inherits the domain from the database schema it belongs to.", - "$ref": "../../type/entityReference.json" - }, - "dataProducts" : { - "description": "List of data products this entity is part of.", - "$ref" : "../../type/entityReferenceList.json" - }, "fileFormat": { "description": "File format in case of file/datalake tables.", "$ref" : "#/definitions/fileFormat" diff --git a/openmetadata-spec/src/main/resources/json/schema/entity/data/topic.json b/openmetadata-spec/src/main/resources/json/schema/entity/data/topic.json index 94ab86cb7a6d..14da8b0fb89a 100644 --- a/openmetadata-spec/src/main/resources/json/schema/entity/data/topic.json +++ b/openmetadata-spec/src/main/resources/json/schema/entity/data/topic.json @@ -157,14 +157,6 @@ "sourceUrl": { "description": "Source URL of topic.", "$ref": "../../type/basic.json#/definitions/sourceUrl" - }, - "domain" : { - "description": "Domain the Topic belongs to. When not set, the Topic inherits the domain from the messaging service it belongs to.", - "$ref": "../../type/entityReference.json" - }, - "dataProducts" : { - "description": "List of data products this entity is part of.", - "$ref" : "../../type/entityReferenceList.json" } }, "required": ["id", "name", "partitions", "service"], diff --git a/openmetadata-spec/src/main/resources/json/schema/entity/domains/dataProduct.json b/openmetadata-spec/src/main/resources/json/schema/entity/domains/dataProduct.json deleted file mode 100644 index 4e48db9df2f1..000000000000 --- a/openmetadata-spec/src/main/resources/json/schema/entity/domains/dataProduct.json +++ /dev/null @@ -1,70 +0,0 @@ -{ - "$id": "https://open-metadata.org/schema/entity/domains/dataProduct.json", - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "DataProduct", - "description": "A `Data Product` or `Data as a Product` is a logical unit that contains all components to process and store domain data for analytical or data-intensive use cases made available to data consumers.", - "type": "object", - "javaType": "org.openmetadata.schema.entity.domains.DataProduct", - "javaInterfaces": ["org.openmetadata.schema.EntityInterface"], - "properties": { - "id": { - "description": "Unique ID of the Data Product", - "$ref": "../../type/basic.json#/definitions/uuid" - }, - "name": { - "description": "A unique name of the Data Product", - "$ref": "../../type/basic.json#/definitions/entityName" - }, - "fullyQualifiedName": { - "description": "FullyQualifiedName is `domain.dataProductName` or `sub-domain.dataProductName`.", - "$ref": "../../type/basic.json#/definitions/fullyQualifiedEntityName" - }, - "displayName": { - "description": "Name used for display purposes. Example 'Marketing', 'Payments', etc.", - "type": "string" - }, - "description": { - "description": "Description of the Data Product.", - "$ref": "../../type/basic.json#/definitions/markdown" - }, - "version": { - "description": "Metadata version of the entity.", - "$ref": "../../type/entityHistory.json#/definitions/entityVersion" - }, - "updatedAt": { - "description": "Last update time corresponding to the new version of the entity in Unix epoch time milliseconds.", - "$ref": "../../type/basic.json#/definitions/timestamp" - }, - "updatedBy": { - "description": "User who made the update.", - "type": "string" - }, - "href": { - "description": "Link to the resource corresponding to this entity.", - "$ref": "../../type/basic.json#/definitions/href" - }, - "owner": { - "description": "Owner of this Data Product.", - "$ref": "../../type/entityReference.json" - }, - "experts": { - "description": "List of users who are experts for this Data Product.", - "$ref": "../../type/entityReferenceList.json", - "default" : null - }, - "domain": { - "description": "Domain or sub-domain to which this Data Product belongs to.", - "$ref": "../../type/entityReference.json" - }, - "assets": { - "description": "Data assets collection that is part of this data product.", - "$ref" : "../../type/entityReferenceList.json" - }, - "changeDescription": { - "description": "Change that lead to this version of the entity.", - "$ref": "../../type/entityHistory.json#/definitions/changeDescription" - } - }, - "required": ["id", "name", "description", "domain"], - "additionalProperties": false -} diff --git a/openmetadata-spec/src/main/resources/json/schema/entity/domains/domain.json b/openmetadata-spec/src/main/resources/json/schema/entity/domains/domain.json deleted file mode 100644 index 775cc3bfccdc..000000000000 --- a/openmetadata-spec/src/main/resources/json/schema/entity/domains/domain.json +++ /dev/null @@ -1,85 +0,0 @@ -{ - "$id": "https://open-metadata.org/schema/entity/domains/domain.json", - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "Domain", - "description": "A `Domain` is a bounded context that is aligned with a Business Unit or a function within an organization.", - "type": "object", - "javaType": "org.openmetadata.schema.entity.domains.Domain", - "javaInterfaces": ["org.openmetadata.schema.EntityInterface"], - "definitions": { - "domainType" : { - "description" : "Type of a domain", - "type" : "string", - "enum": [ - "Source-aligned", - "Consumer-aligned", - "Aggregate" - ] - } - }, - "properties": { - "id": { - "description": "Unique ID of the Domain", - "$ref": "../../type/basic.json#/definitions/uuid" - }, - "domainType": { - "description": "Domain type", - "$ref": "#/definitions/domainType" - }, - "name": { - "description": "A unique name of the Domain", - "$ref": "../../type/basic.json#/definitions/entityName" - }, - "fullyQualifiedName": { - "description": "FullyQualifiedName same as `name`.", - "$ref": "../../type/basic.json#/definitions/fullyQualifiedEntityName" - }, - "displayName": { - "description": "Name used for display purposes. Example 'Marketing', 'Payments', etc.", - "type": "string" - }, - "description": { - "description": "Description of the Domain.", - "$ref": "../../type/basic.json#/definitions/markdown" - }, - "version": { - "description": "Metadata version of the entity.", - "$ref": "../../type/entityHistory.json#/definitions/entityVersion" - }, - "updatedAt": { - "description": "Last update time corresponding to the new version of the entity in Unix epoch time milliseconds.", - "$ref": "../../type/basic.json#/definitions/timestamp" - }, - "updatedBy": { - "description": "User who made the update.", - "type": "string" - }, - "href": { - "description": "Link to the resource corresponding to this entity.", - "$ref": "../../type/basic.json#/definitions/href" - }, - "parent" : { - "description" : "Parent domains. When 'null' or not set, indicates that this is the top level domain.", - "$ref" : "../../type/entityReference.json" - }, - "children" : { - "description" : "Children domains or sub-domains.", - "$ref": "../../type/entityReferenceList.json" - }, - "owner": { - "description": "Owner of this Domain.", - "$ref": "../../type/entityReference.json" - }, - "experts": { - "description": "List of users who are experts in this Domain.", - "$ref": "../../type/entityReferenceList.json", - "default" : null - }, - "changeDescription": { - "description": "Change that lead to this version of the entity.", - "$ref": "../../type/entityHistory.json#/definitions/changeDescription" - } - }, - "required": ["id", "name", "description", "domainType"], - "additionalProperties": false -} diff --git a/openmetadata-spec/src/main/resources/json/schema/entity/services/connections/database/hiveConnection.json b/openmetadata-spec/src/main/resources/json/schema/entity/services/connections/database/hiveConnection.json index 678d535e4abe..937f1e4d9563 100644 --- a/openmetadata-spec/src/main/resources/json/schema/entity/services/connections/database/hiveConnection.json +++ b/openmetadata-spec/src/main/resources/json/schema/entity/services/connections/database/hiveConnection.json @@ -79,15 +79,15 @@ "title": "Hive Metastore Connection Details", "description": "Hive Metastore Connection Details", "oneOf": [ - { - "title": "None", - "type": "object" - }, { "$ref": "./postgresConnection.json" }, { "$ref": "./mysqlConnection.json" + }, + { + "title": "None", + "type": "object" } ] }, diff --git a/openmetadata-spec/src/main/resources/json/schema/entity/services/connections/search/customSearchConnection.json b/openmetadata-spec/src/main/resources/json/schema/entity/services/connections/search/customSearchConnection.json deleted file mode 100644 index 20d6208f3f06..000000000000 --- a/openmetadata-spec/src/main/resources/json/schema/entity/services/connections/search/customSearchConnection.json +++ /dev/null @@ -1,36 +0,0 @@ -{ - "$id": "https://open-metadata.org/schema/entity/services/connections/search/customSearchConnection.json", - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "CustomSearchConnection", - "description": "Custom Search Service connection to build a source that is not supported by OpenMetadata yet.", - "type": "object", - "javaType": "org.openmetadata.schema.services.connections.search.CustomSearchConnection", - "definitions": { - "customSearchType": { - "title": "Service Type", - "description": "Custom search service type", - "type": "string", - "enum": ["CustomSearch"], - "default": "CustomSearch" - } - }, - "properties": { - "type": { - "title": "Service Type", - "description": "Custom search service type", - "$ref": "#/definitions/customSearchType", - "default": "CustomSearch" - }, - "sourcePythonClass": { - "title": "Source Python Class Name", - "description": "Source Python Class Name to instantiated by the ingestion workflow", - "type": "string" - }, - "connectionOptions": { - "title": "Connection Options", - "$ref": "../connectionBasicType.json#/definitions/connectionOptions" - } - }, - "additionalProperties": false, - "required": ["type"] -} diff --git a/openmetadata-spec/src/main/resources/json/schema/entity/services/connections/search/openSearchConnection.json b/openmetadata-spec/src/main/resources/json/schema/entity/services/connections/search/openSearchConnection.json deleted file mode 100644 index b2679e3b051a..000000000000 --- a/openmetadata-spec/src/main/resources/json/schema/entity/services/connections/search/openSearchConnection.json +++ /dev/null @@ -1,79 +0,0 @@ -{ - "$id": "https://open-metadata.org/schema/entity/services/connections/search/openSearchConnection.json", - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "OpenSearch Connection", - "description": "OpenSearch Connection.", - "type": "object", - "javaType": "org.openmetadata.schema.services.connections.search.OpenSearchConnection", - "definitions": { - "openSearchType": { - "description": "OpenSearch service type", - "type": "string", - "enum": ["OpenSearch"], - "default": "OpenSearch" - } - }, - "properties": { - "type": { - "title": "Service Type", - "description": "Service Type", - "$ref": "#/definitions/openSearchType", - "default": "OpenSearch" - }, - "hostPort": { - "title": "Host and Port", - "description": "Host and port of the OpenSearch service.", - "type": "string" - }, - "scheme": { - "description": "Http/Https connection scheme", - "type": "string" - }, - "username": { - "description": "OpenSearch Username for Login", - "type": "string" - }, - "password": { - "description": "OpenSearch Password for Login", - "type": "string" - }, - "truststorePath": { - "description": "Truststore Path", - "type": "string" - }, - "truststorePassword": { - "description": "Truststore Password", - "type": "string" - }, - "connectionTimeoutSecs": { - "description": "Connection Timeout in Seconds", - "type": "integer", - "default": 5 - }, - "socketTimeoutSecs": { - "description": "Socket Timeout in Seconds", - "type": "integer", - "default": 60 - }, - "keepAliveTimeoutSecs": { - "description": "Keep Alive Timeout in Seconds", - "type": "integer" - }, - "connectionOptions": { - "title": "Connection Options", - "$ref": "../connectionBasicType.json#/definitions/connectionOptions" - }, - "connectionArguments": { - "title": "Connection Arguments", - "$ref": "../connectionBasicType.json#/definitions/connectionArguments" - }, - "supportsMetadataExtraction": { - "title": "Supports Metadata Extraction", - "$ref": "../connectionBasicType.json#/definitions/supportsMetadataExtraction" - } - }, - "additionalProperties": false, - "required": [ - "hostPort" - ] -} \ No newline at end of file diff --git a/openmetadata-spec/src/main/resources/json/schema/entity/services/connections/serviceConnection.json b/openmetadata-spec/src/main/resources/json/schema/entity/services/connections/serviceConnection.json index ae2c5d26bfb3..570b901ac0b1 100644 --- a/openmetadata-spec/src/main/resources/json/schema/entity/services/connections/serviceConnection.json +++ b/openmetadata-spec/src/main/resources/json/schema/entity/services/connections/serviceConnection.json @@ -28,9 +28,6 @@ }, { "$ref": "../storageService.json#/definitions/storageConnection" - }, - { - "$ref": "../searchService.json#/definitions/searchConnection" } ] } diff --git a/openmetadata-spec/src/main/resources/json/schema/entity/services/dashboardService.json b/openmetadata-spec/src/main/resources/json/schema/entity/services/dashboardService.json index dd6d85c41c80..46d0309b1686 100644 --- a/openmetadata-spec/src/main/resources/json/schema/entity/services/dashboardService.json +++ b/openmetadata-spec/src/main/resources/json/schema/entity/services/dashboardService.json @@ -185,10 +185,6 @@ "description": "When `true` indicates the entity has been soft deleted.", "type": "boolean", "default": false - }, - "domain" : { - "description": "Domain the Dashboard service belongs to.", - "$ref": "../../type/entityReference.json" } }, "required": ["id", "name", "serviceType"], diff --git a/openmetadata-spec/src/main/resources/json/schema/entity/services/databaseService.json b/openmetadata-spec/src/main/resources/json/schema/entity/services/databaseService.json index 3dd12c51975c..9bba62dc0c08 100644 --- a/openmetadata-spec/src/main/resources/json/schema/entity/services/databaseService.json +++ b/openmetadata-spec/src/main/resources/json/schema/entity/services/databaseService.json @@ -332,10 +332,6 @@ "description": "When `true` indicates the entity has been soft deleted.", "type": "boolean", "default": false - }, - "domain" : { - "description": "Domain the Database service belongs to.", - "$ref": "../../type/entityReference.json" } }, "required": ["id", "name", "serviceType"], diff --git a/openmetadata-spec/src/main/resources/json/schema/entity/services/messagingService.json b/openmetadata-spec/src/main/resources/json/schema/entity/services/messagingService.json index d4558b0525d3..1e55346097cb 100644 --- a/openmetadata-spec/src/main/resources/json/schema/entity/services/messagingService.json +++ b/openmetadata-spec/src/main/resources/json/schema/entity/services/messagingService.json @@ -139,10 +139,6 @@ "description": "When `true` indicates the entity has been soft deleted.", "type": "boolean", "default": false - }, - "domain" : { - "description": "Domain the Messaging service belongs to.", - "$ref": "../../type/entityReference.json" } }, "required": ["id", "name", "serviceType"], diff --git a/openmetadata-spec/src/main/resources/json/schema/entity/services/mlmodelService.json b/openmetadata-spec/src/main/resources/json/schema/entity/services/mlmodelService.json index 72efa49bc89c..79b774ff9063 100644 --- a/openmetadata-spec/src/main/resources/json/schema/entity/services/mlmodelService.json +++ b/openmetadata-spec/src/main/resources/json/schema/entity/services/mlmodelService.json @@ -131,10 +131,6 @@ "description": "When `true` indicates the entity has been soft deleted.", "type": "boolean", "default": false - }, - "domain" : { - "description": "Domain the MLModel service belongs to.", - "$ref": "../../type/entityReference.json" } }, "required": ["id", "name", "serviceType"], diff --git a/openmetadata-spec/src/main/resources/json/schema/entity/services/pipelineService.json b/openmetadata-spec/src/main/resources/json/schema/entity/services/pipelineService.json index 1aee045549c1..83cc7a126810 100644 --- a/openmetadata-spec/src/main/resources/json/schema/entity/services/pipelineService.json +++ b/openmetadata-spec/src/main/resources/json/schema/entity/services/pipelineService.json @@ -180,10 +180,6 @@ "description": "When `true` indicates the entity has been soft deleted.", "type": "boolean", "default": false - }, - "domain" : { - "description": "Domain the Pipeline service belongs to.", - "$ref": "../../type/entityReference.json" } }, "required": [ diff --git a/openmetadata-spec/src/main/resources/json/schema/entity/services/searchService.json b/openmetadata-spec/src/main/resources/json/schema/entity/services/searchService.json deleted file mode 100644 index 040209c6e73d..000000000000 --- a/openmetadata-spec/src/main/resources/json/schema/entity/services/searchService.json +++ /dev/null @@ -1,142 +0,0 @@ -{ - "$id": "https://open-metadata.org/schema/entity/services/searchService.json", - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "Search Service", - "description": "This schema defines the Search Service entity, such as ElasticSearch, OpenSearch.", - "type": "object", - "javaType": "org.openmetadata.schema.entity.services.SearchService", - "javaInterfaces": [ - "org.openmetadata.schema.EntityInterface", - "org.openmetadata.schema.ServiceEntityInterface" - ], - "definitions": { - "searchServiceType": { - "description": "Type of search service such as ElasticSearch or OpenSearch.", - "javaInterfaces": [ - "org.openmetadata.schema.EnumInterface" - ], - "type": "string", - "enum": [ - "ElasticSearch", - "OpenSearch", - "CustomSearch" - ], - "javaEnums": [ - { - "name": "ElasticSearch" - }, - { - "name": "OpenSearch" - }, - { - "name": "CustomSearch" - } - ] - }, - "searchConnection": { - "type": "object", - "javaType": "org.openmetadata.schema.type.SearchConnection", - "description": "search Connection.", - "javaInterfaces": [ - "org.openmetadata.schema.ServiceConnectionEntityInterface" - ], - "properties": { - "config": { - "mask": true, - "oneOf": [ - { - "$ref": "connections/search/elasticSearchConnection.json" - }, - { - "$ref": "connections/search/openSearchConnection.json" - }, - { - "$ref": "connections/search/customSearchConnection.json" - } - ] - } - }, - "additionalProperties": false - } - }, - "properties": { - "id": { - "description": "Unique identifier of this search service instance.", - "$ref": "../../type/basic.json#/definitions/uuid" - }, - "name": { - "description": "Name that identifies this search service.", - "$ref": "../../type/basic.json#/definitions/entityName" - }, - "fullyQualifiedName": { - "description": "FullyQualifiedName same as `name`.", - "$ref": "../../type/basic.json#/definitions/fullyQualifiedEntityName" - }, - "displayName": { - "description": "Display Name that identifies this search service.", - "type": "string" - }, - "serviceType": { - "description": "Type of search service such as S3, GCS, AZURE...", - "$ref": "#/definitions/searchServiceType" - }, - "description": { - "description": "Description of a search service instance.", - "$ref": "../../type/basic.json#/definitions/markdown" - }, - "connection": { - "$ref": "#/definitions/searchConnection" - }, - "pipelines": { - "description": "References to pipelines deployed for this search service to extract metadata etc..", - "$ref": "../../type/entityReferenceList.json" - }, - "testConnectionResult": { - "description": "Last test connection results for this service", - "$ref": "connections/testConnectionResult.json" - }, - "tags": { - "description": "Tags for this search Service.", - "type": "array", - "items": { - "$ref": "../../type/tagLabel.json" - }, - "default": null - }, - "version": { - "description": "Metadata version of the entity.", - "$ref": "../../type/entityHistory.json#/definitions/entityVersion" - }, - "updatedAt": { - "description": "Last update time corresponding to the new version of the entity in Unix epoch time milliseconds.", - "$ref": "../../type/basic.json#/definitions/timestamp" - }, - "updatedBy": { - "description": "User who made the update.", - "type": "string" - }, - "href": { - "description": "Link to the resource corresponding to this search service.", - "$ref": "../../type/basic.json#/definitions/href" - }, - "owner": { - "description": "Owner of this search service.", - "$ref": "../../type/entityReference.json" - }, - "changeDescription": { - "description": "Change that lead to this version of the entity.", - "$ref": "../../type/entityHistory.json#/definitions/changeDescription" - }, - "deleted": { - "description": "When `true` indicates the entity has been soft deleted.", - "type": "boolean", - "default": false - }, - "domain" : { - "description": "Domain the search service belongs to.", - "$ref": "../../type/entityReference.json" - } - }, - "required": ["id", "name", "serviceType"], - "additionalProperties": false -} diff --git a/openmetadata-spec/src/main/resources/json/schema/entity/services/serviceType.json b/openmetadata-spec/src/main/resources/json/schema/entity/services/serviceType.json index 14b6faa5af78..05110e189cf4 100644 --- a/openmetadata-spec/src/main/resources/json/schema/entity/services/serviceType.json +++ b/openmetadata-spec/src/main/resources/json/schema/entity/services/serviceType.json @@ -12,8 +12,7 @@ "Metadata", "MlModel", "Pipeline", - "Storage", - "Search" + "Storage" ], "additionalProperties": false } diff --git a/openmetadata-spec/src/main/resources/json/schema/entity/services/storageService.json b/openmetadata-spec/src/main/resources/json/schema/entity/services/storageService.json index a9f3129dd5ed..ef5657747857 100644 --- a/openmetadata-spec/src/main/resources/json/schema/entity/services/storageService.json +++ b/openmetadata-spec/src/main/resources/json/schema/entity/services/storageService.json @@ -131,10 +131,6 @@ "description": "When `true` indicates the entity has been soft deleted.", "type": "boolean", "default": false - }, - "domain" : { - "description": "Domain the Storage service belongs to.", - "$ref": "../../type/entityReference.json" } }, "required": ["id", "name", "serviceType"], diff --git a/openmetadata-spec/src/main/resources/json/schema/entity/teams/team.json b/openmetadata-spec/src/main/resources/json/schema/entity/teams/team.json index 522ec4dd499d..fc9df0309c0a 100644 --- a/openmetadata-spec/src/main/resources/json/schema/entity/teams/team.json +++ b/openmetadata-spec/src/main/resources/json/schema/entity/teams/team.json @@ -123,10 +123,6 @@ "policies": { "description": "Policies that is attached to this team.", "$ref": "../../type/entityReferenceList.json" - }, - "domain" : { - "description": "Domain the Team belongs to.", - "$ref": "../../type/entityReference.json" } }, "required": ["id", "name"], diff --git a/openmetadata-spec/src/main/resources/json/schema/entity/teams/teamHierarchy.json b/openmetadata-spec/src/main/resources/json/schema/entity/teams/teamHierarchy.json index ac87b5d2f1a7..1a9b9919d6d5 100644 --- a/openmetadata-spec/src/main/resources/json/schema/entity/teams/teamHierarchy.json +++ b/openmetadata-spec/src/main/resources/json/schema/entity/teams/teamHierarchy.json @@ -20,7 +20,7 @@ }, "teamType": { "description": "Team type", - "$ref": "team.json#/definitions/teamType" + "$ref": "../../entity/teams/team.json#/definitions/teamType" }, "name": { "description": "A unique name of the team typically the team ID from an identity provider. Example - group Id from LDAP.", diff --git a/openmetadata-spec/src/main/resources/json/schema/entity/teams/user.json b/openmetadata-spec/src/main/resources/json/schema/entity/teams/user.json index d7083aa9fd73..2337ba068b2e 100644 --- a/openmetadata-spec/src/main/resources/json/schema/entity/teams/user.json +++ b/openmetadata-spec/src/main/resources/json/schema/entity/teams/user.json @@ -132,10 +132,6 @@ "isEmailVerified": { "description": "If the User has verified the mail", "type": "boolean" - }, - "domain" : { - "description": "Domain the User belongs to. This is inherited by the team the user belongs to.", - "$ref": "../../type/entityReference.json" } }, "additionalProperties": false, diff --git a/openmetadata-spec/src/main/resources/json/schema/metadataIngestion/searchServiceMetadataPipeline.json b/openmetadata-spec/src/main/resources/json/schema/metadataIngestion/searchServiceMetadataPipeline.json deleted file mode 100644 index f38e934c55ca..000000000000 --- a/openmetadata-spec/src/main/resources/json/schema/metadataIngestion/searchServiceMetadataPipeline.json +++ /dev/null @@ -1,42 +0,0 @@ -{ - "$id": "https://open-metadata.org/schema/metadataIngestion/searchServiceMetadataPipeline.json", - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "SearchServiceMetadataPipeline", - "description": "SearchService Metadata Pipeline Configuration.", - "type": "object", - "definitions": { - "searchMetadataConfigType": { - "description": "Search Source Config Metadata Pipeline type", - "type": "string", - "enum": ["SearchMetadata"], - "default": "SearchMetadata" - } - }, - "properties": { - "type": { - "description": "Pipeline type", - "$ref": "#/definitions/searchMetadataConfigType", - "default": "SearchMetadata" - }, - "searchIndexFilterPattern": { - "description": "Regex to only fetch search indexes that matches the pattern.", - "$ref": "../type/filterPattern.json#/definitions/filterPattern" - }, - "markDeletedSearchIndexes": { - "description": "Optional configuration to soft delete search indexes in OpenMetadata if the source search indexes are deleted. Also, if the search index is deleted, all the associated entities like lineage, etc., with that search index will be deleted", - "type": "boolean", - "default": true - }, - "includeSampleData": { - "description": "Optional configuration to turn off fetching sample data for search index.", - "type": "boolean", - "default": true - }, - "sampleSize": { - "description": "No. of rows of sample data we want to ingest.", - "default": 10, - "type": "integer" - } - }, - "additionalProperties": false -} diff --git a/openmetadata-spec/src/main/resources/json/schema/metadataIngestion/workflow.json b/openmetadata-spec/src/main/resources/json/schema/metadataIngestion/workflow.json index 9c6b9bb9725e..db0152b8eaf8 100644 --- a/openmetadata-spec/src/main/resources/json/schema/metadataIngestion/workflow.json +++ b/openmetadata-spec/src/main/resources/json/schema/metadataIngestion/workflow.json @@ -39,9 +39,6 @@ { "$ref": "storageServiceMetadataPipeline.json" }, - { - "$ref": "searchServiceMetadataPipeline.json" - }, { "$ref": "testSuitePipeline.json" }, diff --git a/openmetadata-spec/src/main/resources/json/schema/type/entityRelationship.json b/openmetadata-spec/src/main/resources/json/schema/type/entityRelationship.json index 5246efc5a525..8b30f9f85bc7 100644 --- a/openmetadata-spec/src/main/resources/json/schema/type/entityRelationship.json +++ b/openmetadata-spec/src/main/resources/json/schema/type/entityRelationship.json @@ -30,8 +30,7 @@ "relatedTo", "reviews", "reactedTo", - "voted", - "expert" + "voted" ], "javaEnums": [ { "name": "CONTAINS" }, diff --git a/openmetadata-ui/pom.xml b/openmetadata-ui/pom.xml index 0c7322c823d3..d22121118203 100644 --- a/openmetadata-ui/pom.xml +++ b/openmetadata-ui/pom.xml @@ -5,7 +5,7 @@ platform org.open-metadata - 1.2.0-SNAPSHOT + 1.1.5 4.0.0 diff --git a/openmetadata-ui/src/main/resources/ui/cypress/common/common.js b/openmetadata-ui/src/main/resources/ui/cypress/common/common.js index 5f6e7096b392..0cee3f28b357 100644 --- a/openmetadata-ui/src/main/resources/ui/cypress/common/common.js +++ b/openmetadata-ui/src/main/resources/ui/cypress/common/common.js @@ -1090,6 +1090,105 @@ export const updateDescriptionForIngestedTables = ( .should('contain', description); }; +export const editIngestionService = (serviceName, serviceType) => { + interceptURL( + 'GET', + `/api/v1/services/ingestionPipelines?fields=*&service=*`, + 'ingestionPipelines' + ); + interceptURL('GET', `/api/v1/*?service=*&fields=*`, 'serviceDetails'); + interceptURL( + 'GET', + `/api/v1/system/config/pipeline-service-client`, + 'pipelineServiceClient' + ); + interceptURL( + 'GET', + `/api/v1/services/ingestionPipelines/*/pipelineStatus?*`, + 'pipelineStatus' + ); + + // edit ingestion flow + cy.get('[data-testid="appbar-item-settings"]').should('be.visible').click(); + + // Services page + cy.get('.ant-menu-title-content') + .contains(serviceType) + .should('be.visible') + .click(); + + // click on created service + cy.get(`[data-testid="service-name-${serviceName}"]`) + .should('exist') + .should('be.visible') + .click(); + + verifyResponseStatusCode('@serviceDetails', 200); + verifyResponseStatusCode('@ingestionPipelines', 200); + verifyResponseStatusCode('@pipelineServiceClient', 200); + cy.get('[data-testid="ingestions"]').should('be.visible').click(); + verifyResponseStatusCode('@pipelineStatus', 200); + + interceptURL( + 'GET', + '/api/v1/services/ingestionPipelines/status', + 'getIngestionPipelineStatus' + ); + + // click on edit ingestion + cy.get( + `[data-row-key*="${replaceAllSpacialCharWith_( + serviceName + )}_metadata"] [data-testid="edit"]` + ) + .should('be.visible') + .click(); + + cy.get('[data-testid="submit-btn"]').should('exist').click(); + verifyResponseStatusCode('@getIngestionPipelineStatus', 200); + + interceptURL( + 'PUT', + '/api/v1/services/ingestionPipelines', + 'updateIngestionPipelines' + ); + interceptURL( + 'POST', + '/api/v1/services/ingestionPipelines/deploy/*', + 'deployPipeline' + ); + interceptURL( + 'GET', + '/api/v1/services/ingestionPipelines/status', + 'getIngestionPipelineStatus' + ); + // Schedule & Deploy + cy.get('[data-testid="cron-type"]').should('be.visible').click(); + cy.get('.ant-select-item-option-content').contains('Day').click(); + cy.get('[data-testid="deploy-button"]').should('be.visible').click(); + + verifyResponseStatusCode('@updateIngestionPipelines', 200); + verifyResponseStatusCode('@deployPipeline', 200, { + responseTimeout: 50000, + }); + verifyResponseStatusCode('@getIngestionPipelineStatus', 200); + // check success + cy.get('[data-testid="success-line"]', { timeout: 15000 }).should( + 'be.visible' + ); + + interceptURL( + 'GET', + '/api/v1/services/ingestionPipelines?*', + 'ingestionPipelines' + ); + interceptURL('GET', '/api/v1/services/*/name/*', 'serviceDetails'); + + cy.get('[data-testid="view-service-button"]').should('be.visible').click(); + verifyResponseStatusCode('@serviceDetails', 200); + verifyResponseStatusCode('@ingestionPipelines', 200); +}; + export const addOwner = (ownerName, entity, isGlossaryPage) => { cy.get('[data-testid="edit-owner"]').click(); diff --git a/openmetadata-ui/src/main/resources/ui/cypress/e2e/AddNewService/mysql.spec.js b/openmetadata-ui/src/main/resources/ui/cypress/e2e/AddNewService/mysql.spec.js index a3c5d509c4ff..1273f488191e 100644 --- a/openmetadata-ui/src/main/resources/ui/cypress/e2e/AddNewService/mysql.spec.js +++ b/openmetadata-ui/src/main/resources/ui/cypress/e2e/AddNewService/mysql.spec.js @@ -13,6 +13,7 @@ import { deleteCreatedService, + editIngestionService, goToAddNewServicePage, mySqlConnectionInput, testServiceCreationAndIngestion, @@ -63,6 +64,10 @@ describe('MySQL Ingestion', () => { ); }); + it('edit and deploy metadata ingestion', () => { + editIngestionService(serviceName, SERVICE_TYPE.Database, 'database'); + }); + it('delete created service', () => { deleteCreatedService( SERVICE_TYPE.Database, diff --git a/openmetadata-ui/src/main/resources/ui/cypress/e2e/Pages/Service.spec.js b/openmetadata-ui/src/main/resources/ui/cypress/e2e/Pages/Service.spec.js index 9604dea6f62a..05bce93458dd 100644 --- a/openmetadata-ui/src/main/resources/ui/cypress/e2e/Pages/Service.spec.js +++ b/openmetadata-ui/src/main/resources/ui/cypress/e2e/Pages/Service.spec.js @@ -32,7 +32,7 @@ describe('Services page should work properly', () => { ); interceptURL( 'GET', - `/api/v1/services/ingestionPipelines?fields=*&service=${service.name}`, + `/api/v1/services/ingestionPipelines?fields=*&service=${service.name}*`, 'ingestionPipelines' ); cy.login(); diff --git a/openmetadata-ui/src/main/resources/ui/src/components/AddIngestion/AddIngestion.component.tsx b/openmetadata-ui/src/main/resources/ui/src/components/AddIngestion/AddIngestion.component.tsx index 93fa242b7468..eeb35adb34f0 100644 --- a/openmetadata-ui/src/main/resources/ui/src/components/AddIngestion/AddIngestion.component.tsx +++ b/openmetadata-ui/src/main/resources/ui/src/components/AddIngestion/AddIngestion.component.tsx @@ -14,7 +14,7 @@ import { Typography } from 'antd'; import IngestionWorkflowForm from 'components/IngestionWorkflowForm/IngestionWorkflowForm'; import { LOADING_STATE } from 'enums/common.enum'; -import { isUndefined, omit, trim } from 'lodash'; +import { isEmpty, isUndefined, omit, trim } from 'lodash'; import React, { useMemo, useState } from 'react'; import { useTranslation } from 'react-i18next'; import { STEPS_FOR_ADD_INGESTION } from '../../constants/Ingestions.constant'; @@ -128,7 +128,9 @@ const AddIngestion = ({ const ingestionDetails: CreateIngestionPipeline = { airflowConfig: { - scheduleInterval, + scheduleInterval: isEmpty(scheduleInterval) + ? undefined + : scheduleInterval, startDate: date, }, loggerLevel: enableDebugLog ? LogLevels.Debug : LogLevels.Info, @@ -174,7 +176,9 @@ const AddIngestion = ({ ...data, airflowConfig: { ...data.airflowConfig, - scheduleInterval, + scheduleInterval: isEmpty(scheduleInterval) + ? undefined + : scheduleInterval, }, loggerLevel: workflowData?.enableDebugLog ? LogLevels.Debug diff --git a/openmetadata-ui/src/main/resources/ui/src/components/AppContainer/AppContainer.test.tsx b/openmetadata-ui/src/main/resources/ui/src/components/AppContainer/AppContainer.test.tsx index 9b4e97c82518..ae1a07f184f4 100644 --- a/openmetadata-ui/src/main/resources/ui/src/components/AppContainer/AppContainer.test.tsx +++ b/openmetadata-ui/src/main/resources/ui/src/components/AppContainer/AppContainer.test.tsx @@ -64,7 +64,9 @@ jest.mock('components/AppBar/Appbar', () => jest.fn().mockReturnValue(

Appbar

) ); -jest.mock('pages/signup', () => jest.fn().mockReturnValue(

SignUpPage

)); +jest.mock('pages/SignUp/SignUpPage', () => + jest.fn().mockReturnValue(

SignUpPage

) +); jest.mock('components/router/AuthenticatedAppRouter', () => jest.fn().mockReturnValue(

AuthenticatedAppRouter

) diff --git a/openmetadata-ui/src/main/resources/ui/src/components/AppContainer/AppContainer.tsx b/openmetadata-ui/src/main/resources/ui/src/components/AppContainer/AppContainer.tsx index e76ae9d13f8e..3a2cb8e0b3f6 100644 --- a/openmetadata-ui/src/main/resources/ui/src/components/AppContainer/AppContainer.tsx +++ b/openmetadata-ui/src/main/resources/ui/src/components/AppContainer/AppContainer.tsx @@ -19,7 +19,7 @@ import LeftSidebar from 'components/MyData/LeftSidebar/LeftSidebar.component'; import AuthenticatedAppRouter from 'components/router/AuthenticatedAppRouter'; import { ROUTES } from 'constants/constants'; import { isEmpty } from 'lodash'; -import SignupPage from 'pages/signup'; +import SignUpPage from 'pages/SignUp/SignUpPage'; import React from 'react'; import { Redirect, Route, Switch } from 'react-router-dom'; import './app-container.less'; @@ -27,7 +27,7 @@ import './app-container.less'; const AppContainer = () => { return ( - + {!isEmpty(AppState.userDetails) && } diff --git a/openmetadata-ui/src/main/resources/ui/src/components/Chart/DataDistributionHistogram.component.tsx b/openmetadata-ui/src/main/resources/ui/src/components/Chart/DataDistributionHistogram.component.tsx index b1b361fb7355..31198dfad80e 100644 --- a/openmetadata-ui/src/main/resources/ui/src/components/Chart/DataDistributionHistogram.component.tsx +++ b/openmetadata-ui/src/main/resources/ui/src/components/Chart/DataDistributionHistogram.component.tsx @@ -30,7 +30,7 @@ import { YAxis, } from 'recharts'; import { axisTickFormatter, tooltipFormatter } from 'utils/ChartUtils'; -import { getFormattedDateFromSeconds } from 'utils/TimeUtils'; +import { getFormattedDateFromMilliSeconds } from 'utils/TimeUtils'; import { DataDistributionHistogramProps } from './Chart.interface'; const DataDistributionHistogram = ({ @@ -70,7 +70,7 @@ const DataDistributionHistogram = ({ frequency, })); - const graphDate = getFormattedDateFromSeconds( + const graphDate = getFormattedDateFromMilliSeconds( columnProfile?.timestamp || 0, 'dd/MMM' ); diff --git a/openmetadata-ui/src/main/resources/ui/src/components/DatePickerMenu/DatePickerMenu.component.tsx b/openmetadata-ui/src/main/resources/ui/src/components/DatePickerMenu/DatePickerMenu.component.tsx index 8b5e5baaf565..ca93c1358da2 100644 --- a/openmetadata-ui/src/main/resources/ui/src/components/DatePickerMenu/DatePickerMenu.component.tsx +++ b/openmetadata-ui/src/main/resources/ui/src/components/DatePickerMenu/DatePickerMenu.component.tsx @@ -25,8 +25,8 @@ import React, { useState } from 'react'; import { useTranslation } from 'react-i18next'; import { getDaysCount, getTimestampLabel } from 'utils/DatePickerMenuUtils'; import { - getCurrentDateTimeStamp, - getPastDatesTimeStampFromCurrentDate, + getCurrentDateTimeMillis, + getPastDaysDateTimeMillis, } from 'utils/TimeUtils'; import { ReactComponent as DropdownIcon } from '../../assets/svg/DropDown.svg'; import './DatePickerMenu.style.less'; @@ -57,9 +57,9 @@ function DatePickerMenu({ dateStrings ) => { if (values) { - const startTs = values[0]?.set({ h: 0, m: 0 }).utc().unix() ?? 0; + const startTs = (values[0]?.set({ h: 0, m: 0 }).utc().unix() ?? 0) * 1000; - const endTs = values[1]?.set({ h: 23, m: 59 }).utc().unix() ?? 0; + const endTs = (values[1]?.set({ h: 23, m: 59 }).utc().unix() ?? 0) * 1000; const daysCount = getDaysCount(dateStrings[0], dateStrings[1]); @@ -87,9 +87,9 @@ function DatePickerMenu({ const selectedNumberOfDays = filterRange.days; const keyString = key as keyof typeof PROFILER_FILTER_RANGE; - const startTs = getPastDatesTimeStampFromCurrentDate(selectedNumberOfDays); + const startTs = getPastDaysDateTimeMillis(selectedNumberOfDays); - const endTs = getCurrentDateTimeStamp(); + const endTs = getCurrentDateTimeMillis(); setSelectedTimeRange(PROFILER_FILTER_RANGE[keyString].title); setSelectedTimeRangeKey(keyString); diff --git a/openmetadata-ui/src/main/resources/ui/src/components/Modals/WhatsNewModal/whatsNewData.ts b/openmetadata-ui/src/main/resources/ui/src/components/Modals/WhatsNewModal/whatsNewData.ts index 5f386a1a9c4d..548c0a37319b 100644 --- a/openmetadata-ui/src/main/resources/ui/src/components/Modals/WhatsNewModal/whatsNewData.ts +++ b/openmetadata-ui/src/main/resources/ui/src/components/Modals/WhatsNewModal/whatsNewData.ts @@ -18,9 +18,9 @@ import sqlLineageImg from '../../../assets/img/ImprovedSQLLineage.png'; import ingestionFramework from '../../../assets/img/IngestionFramework.png'; import tagCategoryImg from '../../../assets/img/TagCategory.png'; -export const LATEST_VERSION_ID = 17; +export const LATEST_VERSION_ID = 18; -export const COOKIE_VERSION = 'VERSION_1_1_1'; // To be changed with each release. +export const COOKIE_VERSION = 'VERSION_1_1_2'; // To be changed with each release. // for youtube video make isImage = false and path = {video embed id} // embed:- youtube video => share => click on embed and take {url with id} from it @@ -964,4 +964,31 @@ export const WHATS_NEW = [ `, }, }, + { + id: 18, + version: 'v1.1.2', + description: 'Released on 22nd Aug 2023.', + features: [], + changeLogs: { + 'UI Improvements': `- Added Russian language support. +- Supports Delete functionality for sample data. +- Improved Schema page UX. +- Table mentions now show Service, Schema and Database information. +- Fixed the version history list. +`, + 'Data Quality': `- Added support for Postgres version 11.19. +- Fixed MariaDB time column issues.`, + Ingestion: `- Improved performance when ingesting table constraints.`, + Connectors: `- Added JWT authentication support for Trino +- Fixed Snowflake connection test. +- Fixed SageMaker ingestion. +- Added external table support for BigQuery.`, + Backend: `- Improved Glossary import validations. +- Fixed Test Suite migrations and naming. +- Fixed Classification migration. +- Deprecated Flyway and using native migrations. +- Improved Test Suite UI performance. + `, + }, + }, ]; diff --git a/openmetadata-ui/src/main/resources/ui/src/components/ProfilerDashboard/component/DataQualityTab.tsx b/openmetadata-ui/src/main/resources/ui/src/components/ProfilerDashboard/component/DataQualityTab.tsx index 8372c0104206..2af5074a2a74 100644 --- a/openmetadata-ui/src/main/resources/ui/src/components/ProfilerDashboard/component/DataQualityTab.tsx +++ b/openmetadata-ui/src/main/resources/ui/src/components/ProfilerDashboard/component/DataQualityTab.tsx @@ -32,11 +32,12 @@ import { TestCaseStatusModal } from 'components/DataQuality/TestCaseStatusModal/ import ConfirmationModal from 'components/Modals/ConfirmationModal/ConfirmationModal'; import { usePermissionProvider } from 'components/PermissionProvider/PermissionProvider'; import { ResourceEntity } from 'components/PermissionProvider/PermissionProvider.interface'; +import { compare } from 'fast-json-patch'; import { TestCaseStatus } from 'generated/configuration/testResultNotificationConfiguration'; import { Operation } from 'generated/entity/policies/policy'; import { isUndefined, sortBy } from 'lodash'; import QueryString from 'qs'; -import { putTestCaseResult, removeTestCaseFromTestSuite } from 'rest/testAPI'; +import { patchTestCaseResult, removeTestCaseFromTestSuite } from 'rest/testAPI'; import { checkPermission } from 'utils/PermissionsUtils'; import { getEncodedFqn, replacePlus } from 'utils/StringsUtils'; import { showErrorToast } from 'utils/ToastUtils'; @@ -52,10 +53,7 @@ import { getEntityFqnFromEntityLink, getTableExpandableConfig, } from '../../../utils/TableUtils'; -import { - getFormattedDateFromMilliSeconds, - getFormattedDateFromSeconds, -} from '../../../utils/TimeUtils'; +import { getFormattedDateFromMilliSeconds } from '../../../utils/TimeUtils'; import DeleteWidgetModal from '../../common/DeleteWidget/DeleteWidgetModal'; import Loader from '../../Loader/Loader'; import { @@ -119,14 +117,19 @@ const DataQualityTab: React.FC = ({ }; const handleStatusSubmit = async (data: TestCaseFailureStatus) => { - if (selectedTestCase?.data) { + if (selectedTestCase?.data?.testCaseResult) { + const timestamp = selectedTestCase.data?.testCaseResult.timestamp ?? 0; const updatedResult: TestCaseResult = { ...selectedTestCase.data?.testCaseResult, testCaseFailureStatus: data, }; const testCaseFqn = selectedTestCase.data?.fullyQualifiedName ?? ''; + const patch = compare( + selectedTestCase.data.testCaseResult, + updatedResult + ); try { - await putTestCaseResult(testCaseFqn, updatedResult); + await patchTestCaseResult({ testCaseFqn, patch, timestamp }); onTestCaseResultUpdate?.({ ...selectedTestCase.data, @@ -239,7 +242,7 @@ const DataQualityTab: React.FC = ({ width: 150, render: (result: TestCaseResult) => result?.timestamp - ? getFormattedDateFromSeconds( + ? getFormattedDateFromMilliSeconds( result.timestamp, 'MMM dd, yyyy HH:mm' ) diff --git a/openmetadata-ui/src/main/resources/ui/src/components/ProfilerDashboard/component/TestSummary.tsx b/openmetadata-ui/src/main/resources/ui/src/components/ProfilerDashboard/component/TestSummary.tsx index ef04232e746d..87536ec28238 100644 --- a/openmetadata-ui/src/main/resources/ui/src/components/ProfilerDashboard/component/TestSummary.tsx +++ b/openmetadata-ui/src/main/resources/ui/src/components/ProfilerDashboard/component/TestSummary.tsx @@ -52,7 +52,7 @@ import { } from '../../../generated/tests/testCase'; import { axisTickFormatter } from '../../../utils/ChartUtils'; import { getEncodedFqn } from '../../../utils/StringsUtils'; -import { getFormattedDateFromSeconds } from '../../../utils/TimeUtils'; +import { getFormattedDateFromMilliSeconds } from '../../../utils/TimeUtils'; import { showErrorToast } from '../../../utils/ToastUtils'; import ErrorPlaceHolder from '../../common/error-with-placeholder/ErrorPlaceHolder'; import Loader from '../../Loader/Loader'; @@ -103,7 +103,7 @@ const TestSummary: React.FC = ({ }, {}); chartData.push({ - name: getFormattedDateFromSeconds(result.timestamp as number), + name: getFormattedDateFromMilliSeconds(result.timestamp as number), status: result.testCaseStatus ?? '', ...values, }); diff --git a/openmetadata-ui/src/main/resources/ui/src/components/SettingsIngestion/SettingsIngestion.component.tsx b/openmetadata-ui/src/main/resources/ui/src/components/SettingsIngestion/SettingsIngestion.component.tsx index 5a446dfa6041..67550ca56550 100644 --- a/openmetadata-ui/src/main/resources/ui/src/components/SettingsIngestion/SettingsIngestion.component.tsx +++ b/openmetadata-ui/src/main/resources/ui/src/components/SettingsIngestion/SettingsIngestion.component.tsx @@ -81,12 +81,12 @@ function SettingsIngestion({ const getAllIngestionWorkflows = async (paging?: string) => { setIsLoading(true); try { - const res = await getIngestionPipelines( - ['pipelineStatuses'], - serviceFQN, + const res = await getIngestionPipelines({ + arrQueryFields: ['pipelineStatuses'], + serviceFilter: serviceFQN, paging, - pipelineType - ); + pipelineType: [pipelineType], + }); if (res.data) { const pipelinesList = res.data.filter( diff --git a/openmetadata-ui/src/main/resources/ui/src/components/TableProfiler/Component/SingleColumnProfile.tsx b/openmetadata-ui/src/main/resources/ui/src/components/TableProfiler/Component/SingleColumnProfile.tsx index 50491c8f77a6..b17a76ac2911 100644 --- a/openmetadata-ui/src/main/resources/ui/src/components/TableProfiler/Component/SingleColumnProfile.tsx +++ b/openmetadata-ui/src/main/resources/ui/src/components/TableProfiler/Component/SingleColumnProfile.tsx @@ -31,7 +31,7 @@ import React, { FC, useEffect, useMemo, useState } from 'react'; import { useTranslation } from 'react-i18next'; import { getColumnProfilerList } from 'rest/tableAPI'; import { getEncodedFqn } from 'utils/StringsUtils'; -import { getFormattedDateFromSeconds } from 'utils/TimeUtils'; +import { getFormattedDateFromMilliSeconds } from 'utils/TimeUtils'; import { showErrorToast } from 'utils/ToastUtils'; interface SingleColumnProfileProps { @@ -99,7 +99,7 @@ const SingleColumnProfile: FC = ({ const sumMetricData: MetricChartType['data'] = []; const quartileMetricData: MetricChartType['data'] = []; updateProfilerData.forEach((col) => { - const x = getFormattedDateFromSeconds(col.timestamp); + const x = getFormattedDateFromMilliSeconds(col.timestamp); countMetricData.push({ name: x, diff --git a/openmetadata-ui/src/main/resources/ui/src/components/TableProfiler/Component/TableProfilerChart.test.tsx b/openmetadata-ui/src/main/resources/ui/src/components/TableProfiler/Component/TableProfilerChart.test.tsx index 3f2ea806c6c6..ec285a4d3f12 100644 --- a/openmetadata-ui/src/main/resources/ui/src/components/TableProfiler/Component/TableProfilerChart.test.tsx +++ b/openmetadata-ui/src/main/resources/ui/src/components/TableProfiler/Component/TableProfilerChart.test.tsx @@ -23,7 +23,7 @@ const mockTimeValue = { endMilli: 1670667984000, startMilli: 1670408784000, }; -const mockDateRangeObject = { startTs: 1670408784, endTs: 1670667984 }; +const mockDateRangeObject = { startTs: 1670408784000, endTs: 1670667984000 }; jest.mock('react-router-dom', () => ({ useParams: jest.fn().mockImplementation(() => ({ datasetFQN: mockFQN })), @@ -87,8 +87,8 @@ describe('TableProfilerChart component test', () => { endTs: mockTimeValue.endMilli, }); expect(mockGetTableProfilesList.mock.calls[0][1]).toEqual({ - startTs: mockTimeValue.startSec, - endTs: mockTimeValue.endSec, + startTs: mockTimeValue.startMilli, + endTs: mockTimeValue.endMilli, }); }); @@ -110,8 +110,8 @@ describe('TableProfilerChart component test', () => { endTs: mockTimeValue.endMilli, }); expect(mockGetTableProfilesList.mock.calls[0][1]).toEqual({ - startTs: startTime.inSec, - endTs: mockTimeValue.endSec, + startTs: startTime.inMilli, + endTs: mockTimeValue.endMilli, }); }); }); diff --git a/openmetadata-ui/src/main/resources/ui/src/components/TableProfiler/Component/TableProfilerChart.tsx b/openmetadata-ui/src/main/resources/ui/src/components/TableProfiler/Component/TableProfilerChart.tsx index 7bcfa97365b3..fb795907cb67 100644 --- a/openmetadata-ui/src/main/resources/ui/src/components/TableProfiler/Component/TableProfilerChart.tsx +++ b/openmetadata-ui/src/main/resources/ui/src/components/TableProfiler/Component/TableProfilerChart.tsx @@ -87,8 +87,8 @@ const TableProfilerChart = ({ setIsLoading(true); await fetchTableProfiler(fqn, dateRangeObj); await fetchSystemProfiler(fqn, { - startTs: dateRangeObj.startTs * 1000, - endTs: dateRangeObj.endTs * 1000, + startTs: dateRangeObj.startTs, + endTs: dateRangeObj.endTs, }); setIsLoading(false); }; diff --git a/openmetadata-ui/src/main/resources/ui/src/components/TableQueries/QueryCard.tsx b/openmetadata-ui/src/main/resources/ui/src/components/TableQueries/QueryCard.tsx index f8fb9aedf3eb..3c41408d2642 100644 --- a/openmetadata-ui/src/main/resources/ui/src/components/TableQueries/QueryCard.tsx +++ b/openmetadata-ui/src/main/resources/ui/src/components/TableQueries/QueryCard.tsx @@ -14,7 +14,7 @@ import { Button, Card, Col, Row, Space, Typography } from 'antd'; import { DefaultOptionType } from 'antd/lib/select'; import classNames from 'classnames'; -import { getTableTabPath, getUserPath, PIPE_SYMBOL } from 'constants/constants'; +import { getTableTabPath, PIPE_SYMBOL } from 'constants/constants'; import { QUERY_DATE_FORMAT, QUERY_LINE_HEIGHT } from 'constants/Query.constant'; import { EntityType } from 'enums/entity.enum'; import { useClipboard } from 'hooks/useClipBoard'; @@ -23,7 +23,7 @@ import { Duration } from 'luxon'; import Qs from 'qs'; import React, { FC, useMemo, useState } from 'react'; import { useTranslation } from 'react-i18next'; -import { Link, useHistory, useLocation, useParams } from 'react-router-dom'; +import { useHistory, useLocation, useParams } from 'react-router-dom'; import { parseSearchParams } from 'utils/Query/QueryUtils'; import { getQueryPath } from 'utils/RouterUtils'; import { getFormattedDateFromSeconds } from 'utils/TimeUtils'; @@ -177,17 +177,8 @@ const QueryCard: FC = ({ {duration && ( <> {duration} - {PIPE_SYMBOL} )} - {query.updatedBy && ( - - {`${t('label.by-lowercase')} `} - - {query.updatedBy} - - - )} } onClick={handleCardClick}> diff --git a/openmetadata-ui/src/main/resources/ui/src/components/TableQueries/TableQueryRightPanel/TableQueryRightPanel.component.tsx b/openmetadata-ui/src/main/resources/ui/src/components/TableQueries/TableQueryRightPanel/TableQueryRightPanel.component.tsx index a5fa67d35c10..8b1f968c715e 100644 --- a/openmetadata-ui/src/main/resources/ui/src/components/TableQueries/TableQueryRightPanel/TableQueryRightPanel.component.tsx +++ b/openmetadata-ui/src/main/resources/ui/src/components/TableQueries/TableQueryRightPanel/TableQueryRightPanel.component.tsx @@ -11,7 +11,9 @@ * limitations under the License. */ +import Icon from '@ant-design/icons'; import { Button, Col, Drawer, Row, Space, Typography } from 'antd'; +import { ReactComponent as IconUser } from 'assets/svg/user.svg'; import Description from 'components/common/description/Description'; import ProfilePicture from 'components/common/ProfilePicture/ProfilePicture'; import { UserTeamSelectableList } from 'components/common/UserTeamSelectableList/UserTeamSelectableList.component'; @@ -175,7 +177,7 @@ const TableQueryRightPanel = ({ - {t('label.used-by')} + {t('label.user-plural')} {query.users && query.users.length ? ( @@ -203,6 +205,31 @@ const TableQueryRightPanel = ({ )} +
+ + + {t('label.used-by')} + + {query.usedBy && query.usedBy.length ? ( + + {query.usedBy.map((user) => ( + + + {user} + + ))} + + ) : ( + + {t('label.no-entity', { + entity: t('label.used-by'), + })} + + )} + + )} diff --git a/openmetadata-ui/src/main/resources/ui/src/components/TeamsSelectable/TeamsSelectable.interface.ts b/openmetadata-ui/src/main/resources/ui/src/components/TeamsSelectable/TeamsSelectable.interface.ts index 7585db4f9497..4cd70ec21d35 100644 --- a/openmetadata-ui/src/main/resources/ui/src/components/TeamsSelectable/TeamsSelectable.interface.ts +++ b/openmetadata-ui/src/main/resources/ui/src/components/TeamsSelectable/TeamsSelectable.interface.ts @@ -12,7 +12,7 @@ */ export interface TeamsSelectableProps { showTeamsAlert?: boolean; - onSelectionChange: (teams: string[]) => void; + onSelectionChange?: (teams: string[]) => void; filterJoinable?: boolean; placeholder?: string; selectedTeams?: string[]; diff --git a/openmetadata-ui/src/main/resources/ui/src/components/TeamsSelectable/TeamsSelectable.tsx b/openmetadata-ui/src/main/resources/ui/src/components/TeamsSelectable/TeamsSelectable.tsx index 0c39789688b8..9eac0514dd5c 100644 --- a/openmetadata-ui/src/main/resources/ui/src/components/TeamsSelectable/TeamsSelectable.tsx +++ b/openmetadata-ui/src/main/resources/ui/src/components/TeamsSelectable/TeamsSelectable.tsx @@ -36,7 +36,7 @@ const TeamsSelectable = ({ const [teams, setTeams] = useState>([]); const onChange = (newValue: string[]) => { - onSelectionChange(newValue); + onSelectionChange && onSelectionChange(newValue); setValue(newValue); }; diff --git a/openmetadata-ui/src/main/resources/ui/src/components/TestSuite/TestSuitePipelineTab/TestSuitePipelineTab.component.tsx b/openmetadata-ui/src/main/resources/ui/src/components/TestSuite/TestSuitePipelineTab/TestSuitePipelineTab.component.tsx index 0fb8703392e0..10396e785a6f 100644 --- a/openmetadata-ui/src/main/resources/ui/src/components/TestSuite/TestSuitePipelineTab/TestSuitePipelineTab.component.tsx +++ b/openmetadata-ui/src/main/resources/ui/src/components/TestSuite/TestSuitePipelineTab/TestSuitePipelineTab.component.tsx @@ -27,6 +27,7 @@ import { ResourceEntity } from 'components/PermissionProvider/PermissionProvider import cronstrue from 'cronstrue'; import { ERROR_PLACEHOLDER_TYPE } from 'enums/common.enum'; import { EntityType } from 'enums/entity.enum'; +import { PipelineType } from 'generated/api/services/ingestionPipelines/createIngestionPipeline'; import { Table as TableType } from 'generated/entity/data/table'; import { Operation } from 'generated/entity/policies/policy'; import { IngestionPipeline } from 'generated/entity/services/ingestionPipelines/ingestionPipeline'; @@ -127,14 +128,14 @@ const TestSuitePipelineTab = ({ testSuite }: Props) => { }); }; - const getAllIngestionWorkflows = async (paging?: string) => { + const getAllIngestionWorkflows = async () => { try { setIsLoading(true); - const response = await getIngestionPipelines( - ['owner', 'pipelineStatuses'], - testSuiteFQN, - paging - ); + const response = await getIngestionPipelines({ + arrQueryFields: ['owner', 'pipelineStatuses'], + testSuite: testSuiteFQN, + pipelineType: [PipelineType.TestSuite], + }); setTestSuitePipelines(response.data); } catch (error) { showErrorToast(error as AxiosError); diff --git a/openmetadata-ui/src/main/resources/ui/src/components/TestSuite/TestSuitePipelineTab/TestSuitePipelineTab.test.tsx b/openmetadata-ui/src/main/resources/ui/src/components/TestSuite/TestSuitePipelineTab/TestSuitePipelineTab.test.tsx new file mode 100644 index 000000000000..891204b7f1cc --- /dev/null +++ b/openmetadata-ui/src/main/resources/ui/src/components/TestSuite/TestSuitePipelineTab/TestSuitePipelineTab.test.tsx @@ -0,0 +1,79 @@ +/* + * Copyright 2023 Collate. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { render } from '@testing-library/react'; +import { Table } from 'generated/entity/data/table'; +import React from 'react'; +import { act } from 'react-test-renderer'; +import { getIngestionPipelines } from 'rest/ingestionPipelineAPI'; +import TestSuitePipelineTab from './TestSuitePipelineTab.component'; + +const mockTestSuite = { + id: '6a048962-cd78-4d51-9517-62838720ef97', + name: 'mySQL.openmetadata_db.openmetadata_db.web_analytic_event.testSuite', + fullyQualifiedName: + 'mySQL.openmetadata_db.openmetadata_db.web_analytic_event.testSuite', + tests: [], + pipelines: [ + { + id: 'd16c64b6-fb36-4e20-8700-d6f1e2754ef5', + type: 'ingestionPipeline', + name: 'web_analytic_event_TestSuite', + fullyQualifiedName: + 'mySQL.openmetadata_db.openmetadata_db.web_analytic_event.testSuite.web_analytic_event_TestSuite', + deleted: false, + }, + ], + serviceType: 'TestSuite', + version: 0.1, + updatedAt: 1692766701920, + updatedBy: 'admin', + deleted: false, + executable: true, + executableEntityReference: { + id: 'e926d275-441e-49ee-a073-ad509f625a14', + type: 'table', + name: 'web_analytic_event', + fullyQualifiedName: + 'mySQL.openmetadata_db.openmetadata_db.web_analytic_event', + }, + summary: { + success: 0, + failed: 1, + aborted: 0, + total: 1, + }, + testCaseResultSummary: [], +} as unknown as Table['testSuite']; + +jest.mock('rest/ingestionPipelineAPI', () => { + return { + getIngestionPipelines: jest + .fn() + .mockImplementation(() => Promise.resolve()), + }; +}); + +describe('TestSuite Pipeline component', () => { + it('getIngestionPipelines API should call on page load', async () => { + const mockGetIngestionPipelines = getIngestionPipelines as jest.Mock; + await act(async () => { + render(); + }); + + expect(mockGetIngestionPipelines).toHaveBeenCalledWith({ + arrQueryFields: ['owner', 'pipelineStatuses'], + pipelineType: ['TestSuite'], + testSuite: mockTestSuite?.fullyQualifiedName, + }); + }); +}); diff --git a/openmetadata-ui/src/main/resources/ui/src/components/authentication/auth-provider/AuthProvider.tsx b/openmetadata-ui/src/main/resources/ui/src/components/authentication/auth-provider/AuthProvider.tsx index d4869f4e014b..6ba7318627f5 100644 --- a/openmetadata-ui/src/main/resources/ui/src/components/authentication/auth-provider/AuthProvider.tsx +++ b/openmetadata-ui/src/main/resources/ui/src/components/authentication/auth-provider/AuthProvider.tsx @@ -30,6 +30,7 @@ import React, { useCallback, useContext, useEffect, + useMemo, useRef, useState, } from 'react'; @@ -123,6 +124,14 @@ export const AuthProvider = ({ let silentSignInRetries = 0; const handleUserCreated = (isUser: boolean) => setIsUserCreated(isUser); + const userConfig = useMemo( + () => + getUserManagerConfig({ + ...(authConfig as Record), + }), + [authConfig] + ); + const onLoginHandler = () => { setLoading(true); authenticatorRef.current?.invokeLogin(); @@ -558,9 +567,7 @@ export const AuthProvider = ({ ), - })} + userConfig={userConfig} onLoginFailure={handleFailedLogin} onLoginSuccess={handleSuccessfulLogin} onLogoutSuccess={handleSuccessfulLogout}> diff --git a/openmetadata-ui/src/main/resources/ui/src/components/common/EntitySummaryDetails/EntitySummaryDetails.test.tsx b/openmetadata-ui/src/main/resources/ui/src/components/common/EntitySummaryDetails/EntitySummaryDetails.test.tsx index 62592badda07..553af0fdfaca 100644 --- a/openmetadata-ui/src/main/resources/ui/src/components/common/EntitySummaryDetails/EntitySummaryDetails.test.tsx +++ b/openmetadata-ui/src/main/resources/ui/src/components/common/EntitySummaryDetails/EntitySummaryDetails.test.tsx @@ -11,9 +11,9 @@ * limitations under the License. */ -import { findByTestId, render } from '@testing-library/react'; +import { act, findByTestId, render, screen } from '@testing-library/react'; +import userEvent from '@testing-library/user-event'; import React from 'react'; -import { act } from 'react-test-renderer'; import EntitySummaryDetails from './EntitySummaryDetails'; const mockData = { @@ -69,4 +69,34 @@ describe('EntitySummaryDetails Component', () => { expect(EntitySummary).toBeInTheDocument(); }); }); + + it('Edit team type should render the appropriate component', async () => { + render( + + ); + + const editTeamTypeBtn = screen.getByTestId('edit-TeamType-icon'); + + await act(async () => { + userEvent.click(editTeamTypeBtn); + }); + + // should show the team type select box and action buttons + expect(screen.getByTestId('team-type-select')).toBeInTheDocument(); + + const cancelBtn = screen.getByTestId('cancel-btn'); + const saveBtn = screen.getByTestId('save-btn'); + + expect(cancelBtn).toBeInTheDocument(); + expect(saveBtn).toBeInTheDocument(); + + // should hide the team type select box and action buttons after save + await act(async () => { + userEvent.click(saveBtn); + }); + + expect(screen.queryByTestId('team-type-select')).toBeNull(); + expect(screen.queryByTestId('cancel-btn')).toBeNull(); + expect(screen.queryByTestId('save-btn')).toBeNull(); + }); }); diff --git a/openmetadata-ui/src/main/resources/ui/src/components/common/EntitySummaryDetails/EntitySummaryDetails.tsx b/openmetadata-ui/src/main/resources/ui/src/components/common/EntitySummaryDetails/EntitySummaryDetails.tsx index 4025e10678fb..35f0d0438e14 100644 --- a/openmetadata-ui/src/main/resources/ui/src/components/common/EntitySummaryDetails/EntitySummaryDetails.tsx +++ b/openmetadata-ui/src/main/resources/ui/src/components/common/EntitySummaryDetails/EntitySummaryDetails.tsx @@ -83,6 +83,11 @@ const EntitySummaryDetails = ({ setShowTypeSelector(value); }, []); + const handleUpdateTeamType = (type: TeamType) => { + updateTeamType?.(type); + handleShowTypeSelector(false); + }; + const ownerDropdown = allowTeamOwner ? ( ) : ( <> diff --git a/openmetadata-ui/src/main/resources/ui/src/components/common/TeamTypeSelect/TeamTypeSelect.component.tsx b/openmetadata-ui/src/main/resources/ui/src/components/common/TeamTypeSelect/TeamTypeSelect.component.tsx index f0d3f7749cbe..b36d8fde8766 100644 --- a/openmetadata-ui/src/main/resources/ui/src/components/common/TeamTypeSelect/TeamTypeSelect.component.tsx +++ b/openmetadata-ui/src/main/resources/ui/src/components/common/TeamTypeSelect/TeamTypeSelect.component.tsx @@ -42,7 +42,11 @@ function TeamTypeSelect({ const options = useMemo(() => getTeamTypeOptions(showGroupOption), []); return ( - + + + + + + + + + + + + + + + + + + + + + + ); +}; + +export default SignUp; diff --git a/openmetadata-ui/src/main/resources/ui/src/pages/signup/account-activation-confirmation.component.tsx b/openmetadata-ui/src/main/resources/ui/src/pages/SignUp/account-activation-confirmation.component.tsx similarity index 100% rename from openmetadata-ui/src/main/resources/ui/src/pages/signup/account-activation-confirmation.component.tsx rename to openmetadata-ui/src/main/resources/ui/src/pages/SignUp/account-activation-confirmation.component.tsx diff --git a/openmetadata-ui/src/main/resources/ui/src/pages/signup/basic-signup.style.less b/openmetadata-ui/src/main/resources/ui/src/pages/SignUp/basic-signup.style.less similarity index 100% rename from openmetadata-ui/src/main/resources/ui/src/pages/signup/basic-signup.style.less rename to openmetadata-ui/src/main/resources/ui/src/pages/SignUp/basic-signup.style.less diff --git a/openmetadata-ui/src/main/resources/ui/src/pages/signup/mocks/signup.mock.ts b/openmetadata-ui/src/main/resources/ui/src/pages/SignUp/mocks/signup.mock.ts similarity index 87% rename from openmetadata-ui/src/main/resources/ui/src/pages/signup/mocks/signup.mock.ts rename to openmetadata-ui/src/main/resources/ui/src/pages/SignUp/mocks/signup.mock.ts index 66a770b85847..fa462d7f8317 100644 --- a/openmetadata-ui/src/main/resources/ui/src/pages/signup/mocks/signup.mock.ts +++ b/openmetadata-ui/src/main/resources/ui/src/pages/SignUp/mocks/signup.mock.ts @@ -11,6 +11,19 @@ * limitations under the License. */ +export const mockFormData = { + name: 'Sample Name', + email: 'sample123@sample.com', + picture: 'Profile Picture', + userName: 'sample123', +}; + +export const mockChangedFormData = { + fullName: 'f_name m_name l_name', + userName: 'mockUserName', + email: 'test@gmail.com', +}; + export const mockCreateUser = { data: { id: '911d4be4-6ebf-48a0-9016-43a2cf716428', diff --git a/openmetadata-ui/src/main/resources/ui/src/pages/TestCaseDetailsPage/TestCaseDetailsPage.component.tsx b/openmetadata-ui/src/main/resources/ui/src/pages/TestCaseDetailsPage/TestCaseDetailsPage.component.tsx index 04119a150ac2..0f7700618f61 100644 --- a/openmetadata-ui/src/main/resources/ui/src/pages/TestCaseDetailsPage/TestCaseDetailsPage.component.tsx +++ b/openmetadata-ui/src/main/resources/ui/src/pages/TestCaseDetailsPage/TestCaseDetailsPage.component.tsx @@ -26,7 +26,7 @@ import { getTestCaseByFqn } from 'rest/testAPI'; import { getEntityName } from 'utils/EntityUtils'; import { getDataQualityPagePath } from 'utils/RouterUtils'; import { getEncodedFqn } from 'utils/StringsUtils'; -import { getFormattedDateFromSeconds } from 'utils/TimeUtils'; +import { getFormattedDateFromMilliSeconds } from 'utils/TimeUtils'; import { showErrorToast } from 'utils/ToastUtils'; import './TestCaseDetailsPage.style.less'; @@ -122,7 +122,7 @@ function TestCaseDetailsPage() { {testCaseData.testCaseResult?.timestamp - ? getFormattedDateFromSeconds( + ? getFormattedDateFromMilliSeconds( testCaseData.testCaseResult?.timestamp ) : '--'} diff --git a/openmetadata-ui/src/main/resources/ui/src/pages/signup/index.test.tsx b/openmetadata-ui/src/main/resources/ui/src/pages/signup/index.test.tsx deleted file mode 100644 index 4c03cbbfb080..000000000000 --- a/openmetadata-ui/src/main/resources/ui/src/pages/signup/index.test.tsx +++ /dev/null @@ -1,289 +0,0 @@ -/* - * Copyright 2022 Collate. - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import { act, fireEvent, render } from '@testing-library/react'; -import userEvent from '@testing-library/user-event'; -import React from 'react'; -import { createUser } from 'rest/userAPI'; -import SignUp from '.'; -import AppState from '../../AppState'; -import { getImages } from '../../utils/CommonUtils'; -import { mockCreateUser } from './mocks/signup.mock'; - -let letExpectedUserName = { name: 'sample123', email: 'sample123@sample.com' }; - -const mockChangeHandler = jest.fn(); -const mockSubmitHandler = jest.fn(); -const mockShowErrorToast = jest.fn(); - -jest.mock('react-router-dom', () => ({ - useHistory: jest.fn().mockReturnValue({ - push: jest.fn(), - }), -})); - -jest.mock('components/authentication/auth-provider/AuthProvider', () => ({ - useAuthContext: jest.fn(() => ({ - setIsSigningIn: jest.fn(), - })), -})); - -jest.mock('components/TeamsSelectable/TeamsSelectable', () => { - return jest.fn().mockImplementation(() =>
TeamSelectable
); -}); - -jest.mock('rest/userAPI', () => ({ - createUser: jest - .fn() - .mockImplementation(() => Promise.resolve(mockCreateUser)), -})); - -jest.mock('../../utils/ToastUtils', () => ({ - showErrorToast: jest.fn().mockImplementation(() => mockShowErrorToast), -})); - -jest.mock('../../AppState', () => ({ - ...jest.requireActual('../../AppState'), - newUser: { - name: 'Sample Name', - email: 'sample123@sample.com', - picture: 'Profile Picture', - }, - updateUserDetails: jest.fn(), - updateUserPermissions: jest.fn(), -})); - -jest.mock('../../utils/CommonUtils', () => ({ - getImages: jest - .fn() - .mockResolvedValue( - 'https://lh3.googleusercontent.com/a/ALm5wu0HwEPhAbyRha16cUHrEum-zxTDzj6KZiqYsT5Y=s96-c' - ), - Transi18next: jest.fn().mockReturnValue('text'), -})); - -jest.mock('utils/AuthProvider.util', () => ({ - getNameFromUserData: jest.fn().mockImplementation(() => letExpectedUserName), -})); - -describe('SignUp page', () => { - it('Component should render properly', async () => { - (createUser as jest.Mock).mockImplementationOnce(() => - Promise.resolve({ data: {} }) - ); - - const { getByTestId, queryByTestId } = render(); - - const logo = getByTestId('om-logo'); - const heading = getByTestId('om-heading'); - const form = getByTestId('create-user-form'); - const fullNameLabel = getByTestId('full-name-label'); - const fullNameInput = getByTestId('full-name-input'); - const usernameLabel = getByTestId('username-label'); - const usernameInput = getByTestId('username-input'); - const emailLabel = getByTestId('email-label'); - const emailInput = getByTestId('email-input'); - const selectTeamLabel = getByTestId('select-team-label'); - const createButton = getByTestId('create-button'); - const loadingContent = await queryByTestId('loading-content'); - - expect(logo).toBeInTheDocument(); - expect(heading).toBeInTheDocument(); - expect(form).toBeInTheDocument(); - expect(fullNameLabel).toBeInTheDocument(); - expect(fullNameInput).toBeInTheDocument(); - expect(usernameLabel).toBeInTheDocument(); - expect(usernameInput).toBeInTheDocument(); - expect(emailLabel).toBeInTheDocument(); - expect(emailInput).toBeInTheDocument(); - expect(selectTeamLabel).toBeInTheDocument(); - expect(createButton).toBeInTheDocument(); - expect(loadingContent).toBeNull(); - - await act(async () => { - form.onsubmit = mockSubmitHandler; - - fireEvent.submit(form); - - expect(mockSubmitHandler).toHaveBeenCalledTimes(1); - }); - }); - - it('Handlers in forms for change and submit should work properly', async () => { - (createUser as jest.Mock).mockImplementationOnce(() => - Promise.resolve(undefined) - ); - - const { getByTestId } = render(); - - const form = getByTestId('create-user-form'); - const fullNameInput = getByTestId('full-name-input'); - const usernameInput = getByTestId('username-input'); - const emailInput = getByTestId('email-input'); - - expect(form).toBeInTheDocument(); - expect(fullNameInput).toHaveValue('Sample Name'); - expect(usernameInput).toHaveValue('sample123'); - expect(emailInput).toHaveValue('sample123@sample.com'); - - fullNameInput.onchange = mockChangeHandler; - usernameInput.onchange = mockChangeHandler; - emailInput.onchange = mockChangeHandler; - - await act(async () => { - fireEvent.change(fullNameInput, { - target: { name: 'displayName', value: 'Fname Mname Lname' }, - }); - - fireEvent.change(usernameInput, { - target: { name: 'displayName', value: 'mockUserName' }, - }); - fireEvent.change(emailInput, { - target: { name: 'displayName', value: 'sample@sample.com' }, - }); - - expect(mockChangeHandler).toHaveBeenCalledTimes(3); - - form.onsubmit = mockSubmitHandler; - - fireEvent.submit(form); - - expect(mockSubmitHandler).toHaveBeenCalledTimes(1); - }); - }); - - it('Error should be thrown if createUser API fails', async () => { - const { getByTestId } = render(); - - const form = getByTestId('create-user-form'); - const fullNameInput = getByTestId('full-name-input'); - const usernameInput = getByTestId('username-input'); - const emailInput = getByTestId('email-input'); - - expect(form).toBeInTheDocument(); - expect(fullNameInput).toHaveValue('Sample Name'); - expect(usernameInput).toHaveValue('sample123'); - expect(emailInput).toHaveValue('sample123@sample.com'); - - fullNameInput.onchange = mockChangeHandler; - usernameInput.onchange = mockChangeHandler; - emailInput.onchange = mockChangeHandler; - - await act(async () => { - fireEvent.change(fullNameInput, { - target: { name: 'displayName', value: 'Fname Mname Lname' }, - }); - - fireEvent.change(usernameInput, { - target: { name: 'displayName', value: 'mockUserName' }, - }); - fireEvent.change(emailInput, { - target: { name: 'displayName', value: '' }, - }); - }); - - expect(mockChangeHandler).toHaveBeenCalledTimes(3); - - form.onsubmit = mockSubmitHandler; - - await act(async () => { - (createUser as jest.Mock).mockImplementationOnce(() => - Promise.reject({ - response: { data: { message: 'error' } }, - }) - ); - fireEvent.submit(form); - }); - - expect(createUser as jest.Mock).toHaveBeenCalledTimes(1); - }); - - it('Handlers in form should work if data is empty', async () => { - (getImages as jest.Mock).mockImplementationOnce(() => Promise.reject('')); - letExpectedUserName = { name: '', email: '' }; - - AppState.newUser = { - name: '', - email: '', - picture: '', - }; - - const { getByTestId } = render(); - - const form = getByTestId('create-user-form'); - const fullNameInput = getByTestId('full-name-input'); - const usernameInput = getByTestId('username-input'); - const emailInput = getByTestId('email-input'); - - expect(form).toBeInTheDocument(); - expect(fullNameInput).toHaveValue(''); - expect(usernameInput).toHaveValue(''); - expect(emailInput).toHaveValue(''); - - fullNameInput.onchange = mockChangeHandler; - usernameInput.onchange = mockChangeHandler; - emailInput.onchange = mockChangeHandler; - - expect(mockChangeHandler).not.toHaveBeenCalled(); - - form.onsubmit = mockSubmitHandler; - - await act(async () => { - fireEvent.submit(form); - }); - - expect(createUser as jest.Mock).toHaveBeenCalledTimes(0); - }); - - it('Create Button Should Work Properly and call the form handler', async () => { - (createUser as jest.Mock).mockImplementationOnce(() => - Promise.resolve(undefined) - ); - - const { getByTestId } = render(); - - const form = getByTestId('create-user-form'); - const fullNameInput = getByTestId('full-name-input'); - const usernameInput = getByTestId('username-input'); - const emailInput = getByTestId('email-input'); - - expect(form).toBeInTheDocument(); - - fullNameInput.onchange = mockChangeHandler; - usernameInput.onchange = mockChangeHandler; - emailInput.onchange = mockChangeHandler; - - await act(async () => { - fireEvent.change(fullNameInput, { - target: { name: 'displayName', value: 'Fname Mname Lname' }, - }); - - fireEvent.change(usernameInput, { - target: { name: 'displayName', value: 'mockUserName' }, - }); - fireEvent.change(emailInput, { - target: { name: 'displayName', value: 'sample@sample.com' }, - }); - - expect(mockChangeHandler).toHaveBeenCalledTimes(3); - - form.onsubmit = mockSubmitHandler; - - const createButton = getByTestId('create-button'); - - userEvent.click(createButton); - - expect(mockSubmitHandler).toHaveBeenCalledTimes(1); - }); - }); -}); diff --git a/openmetadata-ui/src/main/resources/ui/src/pages/signup/index.tsx b/openmetadata-ui/src/main/resources/ui/src/pages/signup/index.tsx deleted file mode 100644 index 6e938982c893..000000000000 --- a/openmetadata-ui/src/main/resources/ui/src/pages/signup/index.tsx +++ /dev/null @@ -1,224 +0,0 @@ -/* - * Copyright 2022 Collate. - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import { Button } from 'antd'; -import { AxiosError } from 'axios'; -import { useAuthContext } from 'components/authentication/auth-provider/AuthProvider'; -import { UserProfile } from 'components/authentication/auth-provider/AuthProvider.interface'; -import TeamsSelectable from 'components/TeamsSelectable/TeamsSelectable'; -import { CookieStorage } from 'cookie-storage'; -import React, { useState } from 'react'; -import { useTranslation } from 'react-i18next'; -import { useHistory } from 'react-router-dom'; -import { createUser } from 'rest/userAPI'; -import { getNameFromUserData } from 'utils/AuthProvider.util'; -import appState from '../../AppState'; -import { ReactComponent as OMDLogo } from '../../assets/svg/logo-monogram.svg'; -import { ELLIPSES, REDIRECT_PATHNAME, ROUTES } from '../../constants/constants'; -import { CreateUser } from '../../generated/api/teams/createUser'; -import { User } from '../../generated/entity/teams/user'; -import { getImages, Transi18next } from '../../utils/CommonUtils'; -import { showErrorToast } from '../../utils/ToastUtils'; - -const cookieStorage = new CookieStorage(); - -const SignUp = () => { - const { t } = useTranslation(); - const history = useHistory(); - const { - setIsSigningIn, - jwtPrincipalClaims = [], - authorizerConfig, - } = useAuthContext(); - - const [selectedTeams, setSelectedTeams] = useState>([]); - const [loading, setLoading] = useState(false); - const [details, setDetails] = useState({ - displayName: appState.newUser.name || '', - ...getNameFromUserData( - appState.newUser as UserProfile, - jwtPrincipalClaims, - authorizerConfig?.principalDomain - ), - }); - - const createNewUser = (details: User | CreateUser) => { - setLoading(true); - createUser(details as CreateUser) - .then((res) => { - if (res) { - appState.updateUserDetails(res); - cookieStorage.removeItem(REDIRECT_PATHNAME); - setIsSigningIn(false); - history.push(ROUTES.HOME); - } else { - setLoading(false); - } - }) - .catch((err: AxiosError) => { - showErrorToast( - err, - t('server.create-entity-error', { - entity: t('label.user'), - }) - ); - }) - .finally(() => { - setLoading(false); - }); - }; - - const onChangeHandler = (e: React.ChangeEvent) => { - e.persist(); - setDetails((prevState) => { - return { - ...prevState, - [e.target.name]: e.target.value, - }; - }); - }; - - const onSubmitHandler = (e: React.FormEvent) => { - e.preventDefault(); - if (details.name && details.displayName) { - createNewUser({ - ...details, - teams: selectedTeams as Array, - profile: { - images: getImages(appState.newUser.picture ?? ''), - }, - }); - } - }; - - return loading ? ( -

- {t('label.creating-account')} - {ELLIPSES} -

- ) : ( - // TODO: replace this with form -
-
-
- -
-
-

- } - values={{ - entity: t('label.open-metadata'), - }} - /> -

-
-
-
-
- - -
-
- - -
-
- - -
-
- - -
-
- -
- -
-
-
- ); -}; - -export default SignUp; diff --git a/openmetadata-ui/src/main/resources/ui/src/rest/ingestionPipelineAPI.ts b/openmetadata-ui/src/main/resources/ui/src/rest/ingestionPipelineAPI.ts index 9bd1fef98bdf..c151771c59d0 100644 --- a/openmetadata-ui/src/main/resources/ui/src/rest/ingestionPipelineAPI.ts +++ b/openmetadata-ui/src/main/resources/ui/src/rest/ingestionPipelineAPI.ts @@ -78,15 +78,19 @@ export const getIngestionPipelineByName = async ( return response.data; }; -export const getIngestionPipelines = async ( - arrQueryFields: Array, - serviceFilter?: string, - paging?: string, - pipelineType?: PipelineType -) => { +export const getIngestionPipelines = async (data: { + arrQueryFields: Array; + serviceFilter?: string; + paging?: string; + pipelineType?: PipelineType[]; + testSuite?: string; +}) => { + const { arrQueryFields, serviceFilter, paging, pipelineType, testSuite } = + data; const queryParamString = QueryString.stringify({ service: serviceFilter, - pipelineType, + testSuite, + pipelineType: pipelineType?.length ? pipelineType.join(',') : undefined, }); const url = `${getURLWithQueryFields( diff --git a/openmetadata-ui/src/main/resources/ui/src/rest/testAPI.ts b/openmetadata-ui/src/main/resources/ui/src/rest/testAPI.ts index 4b8282ce0148..8c36e1aa5389 100644 --- a/openmetadata-ui/src/main/resources/ui/src/rest/testAPI.ts +++ b/openmetadata-ui/src/main/resources/ui/src/rest/testAPI.ts @@ -273,14 +273,23 @@ export const restoreTestSuite = async (id: string) => { // Test Result -export const putTestCaseResult = async ( - testCaseFqn: string, - data: TestCaseResult -) => { - const response = await APIClient.put< - TestCaseResult, - AxiosResponse - >(`${testCaseUrl}/${testCaseFqn}/testCaseResult`, data); +export const patchTestCaseResult = async ({ + testCaseFqn, + timestamp, + patch, +}: { + testCaseFqn: string; + timestamp: number; + patch: Operation[]; +}) => { + const configOptions = { + headers: { 'Content-type': 'application/json-patch+json' }, + }; + const response = await APIClient.patch>( + `${testCaseUrl}/${testCaseFqn}/testCaseResult/${timestamp}`, + patch, + configOptions + ); return response.data; }; diff --git a/openmetadata-ui/src/main/resources/ui/src/utils/TableProfilerUtils.ts b/openmetadata-ui/src/main/resources/ui/src/utils/TableProfilerUtils.ts index 50d586db097c..cc03e69df8b4 100644 --- a/openmetadata-ui/src/main/resources/ui/src/utils/TableProfilerUtils.ts +++ b/openmetadata-ui/src/main/resources/ui/src/utils/TableProfilerUtils.ts @@ -15,10 +15,7 @@ import { MetricChartType } from 'components/ProfilerDashboard/profilerDashboard. import { findLast, sortBy } from 'lodash'; import { SystemProfile } from '../generated/api/data/createTableProfile'; import { TableProfile } from '../generated/entity/data/table'; -import { - getFormattedDateFromMilliSeconds, - getFormattedDateFromSeconds, -} from './TimeUtils'; +import { getFormattedDateFromMilliSeconds } from './TimeUtils'; export const calculateRowCountMetrics = ( profiler: TableProfile[], @@ -28,7 +25,7 @@ export const calculateRowCountMetrics = ( const rowCountMetricData: MetricChartType['data'] = []; updateProfilerData.forEach((data) => { - const timestamp = getFormattedDateFromSeconds(data.timestamp); + const timestamp = getFormattedDateFromMilliSeconds(data.timestamp); rowCountMetricData.push({ name: timestamp, diff --git a/pom.xml b/pom.xml index adcda9896d5d..16fdd141b21d 100644 --- a/pom.xml +++ b/pom.xml @@ -26,7 +26,7 @@ based on Open Metadata Standards/APIs, supporting connectors to a wide range of data services, OpenMetadata enables end-to-end metadata management, giving you the freedom to unlock the value of your data assets. - 1.2.0-SNAPSHOT + 1.1.5 https://github.com/open-metadata/OpenMetadata openmetadata-spec diff --git a/scripts/update_version.py b/scripts/update_version.py new file mode 100644 index 000000000000..c8bf177bebe4 --- /dev/null +++ b/scripts/update_version.py @@ -0,0 +1,118 @@ +import sys +import argparse +import fileinput +import os +import re +import logging + +# Configure the logger +logging.basicConfig(level=logging.INFO, format="%(levelname)s: %(message)s") +logger = logging.getLogger() + +# Function to update the Github workflow with search pattern as "input=" or "DOCKER_RELEASE_TAG=" +def update_github_action(file_path, release_version): + logger.info(f"Updating Github workflow's Docker version in {file_path} to version {release_version}\n") + try: + with open(file_path, 'r') as file: + content = file.read() + + # Update the input pattern + input_pattern = r'input=\d+(\.\d+)*(\.\d+)?' + input_replacement = f'input={release_version}' + updated_content = re.sub(input_pattern, input_replacement, content) + + # Update the DOCKER_RELEASE_TAG pattern + docker_release_tag_pattern = r'DOCKER_RELEASE_TAG=\d+(\.\d+)*(\.\d+)?' + docker_release_tag_replacement = f'DOCKER_RELEASE_TAG={release_version}' + updated_content = re.sub(docker_release_tag_pattern, docker_release_tag_replacement, updated_content) + + with open(file_path, 'w') as file: + file.write(updated_content) + + logger.info(f"Patterns updated to {release_version} in {file_path}") + except Exception as e: + logger.error(f"An error occurred: {e}") + +# Function to update the Python files in ingestion with search pattern as "version=" +def update_python_files(file_path, release_version): + # Logic for updating Python files + logger.info(f"Updating version numbers in {file_path} to {release_version}\n") + try: + with open(file_path, 'r') as file: + content = file.read() + + pattern = r'version\s*=\s*"([^"]+)"' + updated_content = re.sub(pattern, f'version="{release_version}"', content) + + with open(file_path, 'w') as file: + file.write(updated_content) + + logger.info(f"Version numbers updated to {release_version} in {file_path}") + except Exception as e: + logger.error(f"An error occurred: {e}") + +# Function to update the image version in Docker compose files with search pattern where image, docker, getcollate, and openmetadata are used. +def update_dockerfile_version(file_path, release_version): + # Logic for updating Docker compose version + try: + with open(file_path, 'r') as file: + content = file.read() + + # Update image versions using regular expression + updated_content = re.sub( + r'(image: docker\.getcollate\.io/openmetadata/.*?):[\d.]+', + rf'\1:{release_version}', + content + ) + + with open(file_path, 'w') as file: + file.write(updated_content) + + logger.info(f"Updated image versions in {file_path}") + except Exception as e: + logger.error(f"An error occurred while updating {file_path}: {e}") + +# Function to update the DOCKERFILE used to create the images, search pattern used as "RI_VERSION" +def update_ingestion_version(file_path, release_version): + logger.info(f"Updating ingestion version in {file_path} to version {release_version}\n") + try: + with open(file_path, 'r') as file: + content = file.read() + + pattern = r'RI_VERSION="[\d\.]+"' + replacement = f'RI_VERSION="{release_version}"' + updated_content = re.sub(pattern, replacement, content) + + with open(file_path, 'w') as file: + file.write(updated_content) + + logger.info(f"RI_VERSION updated to {release_version} in {file_path}") + except Exception as e: + logger.error(f"An error occurred: {e}") + +def main(): + parser = argparse.ArgumentParser(description="Update version information in files.") + parser.add_argument("action_type", type=int, choices=range(1, 5), help="Type of action to perform") + parser.add_argument("file_path", type=str, help="Path to the file to update") + parser.add_argument("-s", dest="release_version", required=True, help="Release version to set") + + args = parser.parse_args() + + action_type = args.action_type + file_path = args.file_path + release_version = args.release_version + + if action_type == 1: + update_github_action(file_path, release_version) + elif action_type == 2: + update_python_files(file_path, release_version) + elif action_type == 3: + update_dockerfile_version(file_path, release_version) + elif action_type == 4: + update_ingestion_version(file_path, release_version) + else: + logger.error("Invalid action type") + sys.exit(1) + +if __name__ == "__main__": + main() \ No newline at end of file