From 9d37ff0e34bb45092838f9a617b8a5c83b6692be Mon Sep 17 00:00:00 2001 From: Pranita Fulsundar Date: Thu, 28 Nov 2024 19:49:58 +0530 Subject: [PATCH 1/7] Feat: edit display name (#18720) * feat: edit display name * refactor: logic and style changes * fix: minor changes * style: update style for edit button * style: remove inline styles and update hover effect * refactor: remove edit permission for version page and fix handleEditDisplayName function * refactor: updated displayName * fix: data-testid for the Link --------- Co-authored-by: Shailesh Parmar --- .../DatabaseSchemaTable.interface.ts | 1 + .../DatabaseSchemaTable.tsx | 123 +++++++++++++++++- .../EntityNameModal.interface.ts | 2 +- .../DisplayName/DisplayName.interface.ts | 22 ++++ .../common/DisplayName/DisplayName.test.tsx | 102 +++++++++++++++ .../common/DisplayName/DisplayName.tsx | 103 +++++++++++++++ .../DatabaseSchemaPage/SchemaTablesTab.tsx | 79 ++++++++--- .../DatabaseVersionPage.tsx | 2 +- .../ServiceMainTabContent.tsx | 87 ++++++++++++- .../src/utils/ServiceMainTabContentUtils.tsx | 74 ++++++++--- 10 files changed, 550 insertions(+), 45 deletions(-) create mode 100644 openmetadata-ui/src/main/resources/ui/src/components/common/DisplayName/DisplayName.interface.ts create mode 100644 openmetadata-ui/src/main/resources/ui/src/components/common/DisplayName/DisplayName.test.tsx create mode 100644 openmetadata-ui/src/main/resources/ui/src/components/common/DisplayName/DisplayName.tsx diff --git a/openmetadata-ui/src/main/resources/ui/src/components/Database/DatabaseSchema/DatabaseSchemaTable/DatabaseSchemaTable.interface.ts b/openmetadata-ui/src/main/resources/ui/src/components/Database/DatabaseSchema/DatabaseSchemaTable/DatabaseSchemaTable.interface.ts index 792196cb0c30..9038eef2b3c8 100644 --- a/openmetadata-ui/src/main/resources/ui/src/components/Database/DatabaseSchema/DatabaseSchemaTable/DatabaseSchemaTable.interface.ts +++ b/openmetadata-ui/src/main/resources/ui/src/components/Database/DatabaseSchema/DatabaseSchemaTable/DatabaseSchemaTable.interface.ts @@ -13,4 +13,5 @@ export interface DatabaseSchemaTableProps { isDatabaseDeleted?: boolean; + isVersionPage?: boolean; } diff --git a/openmetadata-ui/src/main/resources/ui/src/components/Database/DatabaseSchema/DatabaseSchemaTable/DatabaseSchemaTable.tsx b/openmetadata-ui/src/main/resources/ui/src/components/Database/DatabaseSchema/DatabaseSchemaTable/DatabaseSchemaTable.tsx index 36acb772229d..64148e9108e6 100644 --- a/openmetadata-ui/src/main/resources/ui/src/components/Database/DatabaseSchema/DatabaseSchemaTable/DatabaseSchemaTable.tsx +++ b/openmetadata-ui/src/main/resources/ui/src/components/Database/DatabaseSchema/DatabaseSchemaTable/DatabaseSchemaTable.tsx @@ -11,45 +11,70 @@ * limitations under the License. */ import { Col, Row, Switch, Typography } from 'antd'; +import { ColumnsType } from 'antd/lib/table'; import { AxiosError } from 'axios'; +import { compare } from 'fast-json-patch'; import { t } from 'i18next'; import { isEmpty } from 'lodash'; import QueryString from 'qs'; import React, { useCallback, useEffect, useMemo, useState } from 'react'; import { useHistory } from 'react-router-dom'; import { + getEntityDetailsPath, INITIAL_PAGING_VALUE, + NO_DATA_PLACEHOLDER, PAGE_SIZE, } from '../../../../constants/constants'; -import { TabSpecificField } from '../../../../enums/entity.enum'; +import { usePermissionProvider } from '../../../../context/PermissionProvider/PermissionProvider'; +import { EntityType, TabSpecificField } from '../../../../enums/entity.enum'; import { SearchIndex } from '../../../../enums/search.enum'; import { DatabaseSchema } from '../../../../generated/entity/data/databaseSchema'; +import { EntityReference } from '../../../../generated/entity/type'; +import { UsageDetails } from '../../../../generated/type/entityUsage'; import { Include } from '../../../../generated/type/include'; import { Paging } from '../../../../generated/type/paging'; import { usePaging } from '../../../../hooks/paging/usePaging'; import useCustomLocation from '../../../../hooks/useCustomLocation/useCustomLocation'; import { useFqn } from '../../../../hooks/useFqn'; -import { getDatabaseSchemas } from '../../../../rest/databaseAPI'; +import { + getDatabaseSchemas, + patchDatabaseSchemaDetails, +} from '../../../../rest/databaseAPI'; import { searchQuery } from '../../../../rest/searchAPI'; -import { schemaTableColumns } from '../../../../utils/Database/Database.util'; +import { getEntityName } from '../../../../utils/EntityUtils'; +import { getUsagePercentile } from '../../../../utils/TableUtils'; import { showErrorToast } from '../../../../utils/ToastUtils'; +import DisplayName from '../../../common/DisplayName/DisplayName'; import ErrorPlaceHolder from '../../../common/ErrorWithPlaceholder/ErrorPlaceHolder'; import NextPrevious from '../../../common/NextPrevious/NextPrevious'; import { PagingHandlerParams } from '../../../common/NextPrevious/NextPrevious.interface'; +import RichTextEditorPreviewer from '../../../common/RichTextEditor/RichTextEditorPreviewer'; import Searchbar from '../../../common/SearchBarComponent/SearchBar.component'; import Table from '../../../common/Table/Table'; +import { EntityName } from '../../../Modals/EntityNameModal/EntityNameModal.interface'; import { DatabaseSchemaTableProps } from './DatabaseSchemaTable.interface'; export const DatabaseSchemaTable = ({ isDatabaseDeleted, + isVersionPage = false, }: Readonly) => { const { fqn: decodedDatabaseFQN } = useFqn(); const history = useHistory(); const location = useCustomLocation(); + const { permissions } = usePermissionProvider(); + const [schemas, setSchemas] = useState([]); const [isLoading, setIsLoading] = useState(true); const [showDeletedSchemas, setShowDeletedSchemas] = useState(false); + const allowEditDisplayNamePermission = useMemo(() => { + return ( + !isVersionPage && + (permissions.databaseSchema.EditAll || + permissions.databaseSchema.EditDisplayName) + ); + }, [permissions, isVersionPage]); + const searchValue = useMemo(() => { const param = location.search; const searchData = QueryString.parse( @@ -160,6 +185,98 @@ export const DatabaseSchemaTable = ({ } }; + const handleDisplayNameUpdate = useCallback( + async (data: EntityName, id?: string) => { + try { + const schemaDetails = schemas.find((schema) => schema.id === id); + if (!schemaDetails) { + return; + } + const updatedData = { + ...schemaDetails, + displayName: data.displayName || undefined, + }; + const jsonPatch = compare(schemaDetails, updatedData); + await patchDatabaseSchemaDetails(schemaDetails.id ?? '', jsonPatch); + setSchemas((prevData) => + prevData.map((schema) => + schema.id === id + ? { ...schema, displayName: data.displayName } + : schema + ) + ); + } catch (error) { + showErrorToast(error as AxiosError); + } + }, + [schemas] + ); + + const schemaTableColumns: ColumnsType = useMemo( + () => [ + { + title: t('label.schema-name'), + dataIndex: 'name', + key: 'name', + width: 250, + render: (_, record: DatabaseSchema) => ( + + ), + }, + { + title: t('label.description'), + dataIndex: 'description', + key: 'description', + render: (text: string) => + text?.trim() ? ( + + ) : ( + + {t('label.no-entity', { entity: t('label.description') })} + + ), + }, + { + title: t('label.owner-plural'), + dataIndex: 'owners', + key: 'owners', + width: 120, + render: (owners: EntityReference[]) => + !isEmpty(owners) && owners.length > 0 ? ( + owners.map((owner: EntityReference) => getEntityName(owner)) + ) : ( + + {NO_DATA_PLACEHOLDER} + + ), + }, + { + title: t('label.usage'), + dataIndex: 'usageSummary', + key: 'usageSummary', + width: 120, + render: (text: UsageDetails) => + getUsagePercentile(text?.weeklyStats?.percentileRank ?? 0), + }, + ], + [handleDisplayNameUpdate, allowEditDisplayNamePermission] + ); + useEffect(() => { fetchDatabaseSchema(); }, [decodedDatabaseFQN, pageSize, showDeletedSchemas, isDatabaseDeleted]); diff --git a/openmetadata-ui/src/main/resources/ui/src/components/Modals/EntityNameModal/EntityNameModal.interface.ts b/openmetadata-ui/src/main/resources/ui/src/components/Modals/EntityNameModal/EntityNameModal.interface.ts index 4f57e3699588..2106081055e3 100644 --- a/openmetadata-ui/src/main/resources/ui/src/components/Modals/EntityNameModal/EntityNameModal.interface.ts +++ b/openmetadata-ui/src/main/resources/ui/src/components/Modals/EntityNameModal/EntityNameModal.interface.ts @@ -13,7 +13,7 @@ import { Rule } from 'antd/lib/form'; import { Constraint } from '../../../generated/entity/data/table'; -export type EntityName = { name: string; displayName?: string }; +export type EntityName = { name: string; displayName?: string; id?: string }; export type EntityNameWithAdditionFields = EntityName & { constraint: Constraint; diff --git a/openmetadata-ui/src/main/resources/ui/src/components/common/DisplayName/DisplayName.interface.ts b/openmetadata-ui/src/main/resources/ui/src/components/common/DisplayName/DisplayName.interface.ts new file mode 100644 index 000000000000..b7a3f0f8218a --- /dev/null +++ b/openmetadata-ui/src/main/resources/ui/src/components/common/DisplayName/DisplayName.interface.ts @@ -0,0 +1,22 @@ +/* + * Copyright 2024 Collate. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { EntityName } from '../../Modals/EntityNameModal/EntityNameModal.interface'; + +export interface DisplayNameProps { + id: string; + name?: string; + displayName?: string; + link: string; + onEditDisplayName?: (data: EntityName, id?: string) => Promise; + allowRename?: boolean; +} diff --git a/openmetadata-ui/src/main/resources/ui/src/components/common/DisplayName/DisplayName.test.tsx b/openmetadata-ui/src/main/resources/ui/src/components/common/DisplayName/DisplayName.test.tsx new file mode 100644 index 000000000000..5f6322bd41b8 --- /dev/null +++ b/openmetadata-ui/src/main/resources/ui/src/components/common/DisplayName/DisplayName.test.tsx @@ -0,0 +1,102 @@ +/* + * Copyright 2024 Collate. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { act, fireEvent, render, screen } from '@testing-library/react'; +import React from 'react'; +import { MemoryRouter } from 'react-router-dom'; +import DisplayName from './DisplayName'; +import { DisplayNameProps } from './DisplayName.interface'; + +jest.mock('react-router-dom', () => ({ + ...jest.requireActual('react-router-dom'), + Link: jest + .fn() + .mockImplementation(({ children, ...props }) => ( + {children} + )), +})); + +jest.mock('../../../constants/constants', () => ({ + DE_ACTIVE_COLOR: '#BFBFBF', + ICON_DIMENSION: { width: 16, height: 16 }, +})); + +jest.mock('../../Modals/EntityNameModal/EntityNameModal.component', () => + jest.fn().mockImplementation(() =>

Mocked Modal

) +); + +const mockOnEditDisplayName = jest.fn(); + +const mockProps: DisplayNameProps = { + id: '1', + name: 'Sample Entity', + displayName: 'Sample Display Name', + link: '/entity/1', + allowRename: true, + onEditDisplayName: mockOnEditDisplayName, +}; + +describe('Test DisplayName Component', () => { + it('Should render the component with the display name', async () => { + await act(async () => { + render( + + + + ); + + const displayNameField = await screen.getByTestId('column-display-name'); + + expect(displayNameField).toBeInTheDocument(); + expect(displayNameField).toHaveTextContent('Sample Display Name'); + + const editButton = screen.queryByTestId('edit-displayName-button'); + + expect(editButton).toBeInTheDocument(); + }); + }); + + it('Should render the component with name when display name is empty', async () => { + await act(async () => { + render( + + + + ); + + const nameField = screen.getByTestId('column-name'); + + expect(nameField).toBeInTheDocument(); + expect(nameField).toHaveTextContent('Sample Entity'); + }); + }); + + it('Should open the edit modal on edit button click', async () => { + await act(async () => { + render( + + + + ); + const editButton = screen.getByTestId('edit-displayName-button'); + fireEvent.click(editButton); + + const nameField = await screen.findByTestId('column-name'); + + expect(nameField).toBeInTheDocument(); + + const displayNameField = await screen.findByTestId('column-display-name'); + + expect(displayNameField).toBeInTheDocument(); + }); + }); +}); diff --git a/openmetadata-ui/src/main/resources/ui/src/components/common/DisplayName/DisplayName.tsx b/openmetadata-ui/src/main/resources/ui/src/components/common/DisplayName/DisplayName.tsx new file mode 100644 index 000000000000..5a8a1f23a2a7 --- /dev/null +++ b/openmetadata-ui/src/main/resources/ui/src/components/common/DisplayName/DisplayName.tsx @@ -0,0 +1,103 @@ +/* + * Copyright 2024 Collate. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { Button, Tooltip, Typography } from 'antd'; +import { AxiosError } from 'axios'; +import { isEmpty } from 'lodash'; +import React, { useState } from 'react'; +import { useTranslation } from 'react-i18next'; +import { Link } from 'react-router-dom'; +import { ReactComponent as IconEdit } from '../../../assets/svg/edit-new.svg'; +import { DE_ACTIVE_COLOR, ICON_DIMENSION } from '../../../constants/constants'; +import { showErrorToast } from '../../../utils/ToastUtils'; +import EntityNameModal from '../../Modals/EntityNameModal/EntityNameModal.component'; +import { EntityName } from '../../Modals/EntityNameModal/EntityNameModal.interface'; +import { DisplayNameProps } from './DisplayName.interface'; + +const DisplayName: React.FC = ({ + id, + name, + displayName, + onEditDisplayName, + link, + allowRename, +}) => { + const { t } = useTranslation(); + + const [isDisplayNameEditing, setIsDisplayNameEditing] = useState(false); + + const handleDisplayNameUpdate = async (data: EntityName) => { + setIsDisplayNameEditing(true); + try { + await onEditDisplayName?.(data, id); + } catch (error) { + showErrorToast(error as AxiosError); + } finally { + setIsDisplayNameEditing(false); + } + }; + + return ( +
+ + {isEmpty(displayName) ? ( + + {name} + + ) : ( + <> + {name} + + + {displayName} + + + + )} + + + {allowRename ? ( + +
+ ); +}; + +export default DisplayName; diff --git a/openmetadata-ui/src/main/resources/ui/src/pages/DatabaseSchemaPage/SchemaTablesTab.tsx b/openmetadata-ui/src/main/resources/ui/src/pages/DatabaseSchemaPage/SchemaTablesTab.tsx index d9ef74254263..cc397a63a3d9 100644 --- a/openmetadata-ui/src/main/resources/ui/src/pages/DatabaseSchemaPage/SchemaTablesTab.tsx +++ b/openmetadata-ui/src/main/resources/ui/src/pages/DatabaseSchemaPage/SchemaTablesTab.tsx @@ -13,23 +13,29 @@ import { Col, Row, Switch, Typography } from 'antd'; import { ColumnsType } from 'antd/lib/table'; +import { AxiosError } from 'axios'; +import { compare } from 'fast-json-patch'; import { isEmpty, isUndefined } from 'lodash'; -import React, { useMemo } from 'react'; +import React, { useCallback, useEffect, useMemo, useState } from 'react'; import { useTranslation } from 'react-i18next'; -import { Link } from 'react-router-dom'; +import DisplayName from '../../components/common/DisplayName/DisplayName'; import DescriptionV1 from '../../components/common/EntityDescription/DescriptionV1'; import ErrorPlaceHolder from '../../components/common/ErrorWithPlaceholder/ErrorPlaceHolder'; import NextPrevious from '../../components/common/NextPrevious/NextPrevious'; import { NextPreviousProps } from '../../components/common/NextPrevious/NextPrevious.interface'; import RichTextEditorPreviewer from '../../components/common/RichTextEditor/RichTextEditorPreviewer'; import TableAntd from '../../components/common/Table/Table'; +import { EntityName } from '../../components/Modals/EntityNameModal/EntityNameModal.interface'; +import { usePermissionProvider } from '../../context/PermissionProvider/PermissionProvider'; import { ERROR_PLACEHOLDER_TYPE } from '../../enums/common.enum'; import { EntityType } from '../../enums/entity.enum'; import { DatabaseSchema } from '../../generated/entity/data/databaseSchema'; import { Table } from '../../generated/entity/data/table'; import { UsePagingInterface } from '../../hooks/paging/usePaging'; +import { patchTableDetails } from '../../rest/tableAPI'; import entityUtilClassBase from '../../utils/EntityUtilClassBase'; import { getEntityName } from '../../utils/EntityUtils'; +import { showErrorToast } from '../../utils/ToastUtils'; interface SchemaTablesTabProps { databaseSchemaDetails: DatabaseSchema; @@ -69,6 +75,48 @@ function SchemaTablesTab({ pagingInfo, }: Readonly) { const { t } = useTranslation(); + const [localTableData, setLocalTableData] = useState([]); + + const { permissions } = usePermissionProvider(); + + const allowEditDisplayNamePermission = useMemo(() => { + return ( + !isVersionView && + (permissions.table.EditAll || permissions.table.EditDisplayName) + ); + }, [permissions, isVersionView]); + + const handleDisplayNameUpdate = useCallback( + async (data: EntityName, id?: string) => { + try { + const tableDetails = localTableData.find((table) => table.id === id); + if (!tableDetails) { + return; + } + const updatedData = { + ...tableDetails, + displayName: data.displayName || undefined, + }; + const jsonPatch = compare(tableDetails, updatedData); + await patchTableDetails(tableDetails.id, jsonPatch); + + setLocalTableData((prevData) => + prevData.map((table) => + table.id === id + ? { ...table, displayName: data.displayName } + : table + ) + ); + } catch (error) { + showErrorToast(error as AxiosError); + } + }, + [localTableData] + ); + + useEffect(() => { + setLocalTableData(tableData); + }, [tableData]); const tableColumn: ColumnsType = useMemo( () => [ @@ -79,17 +127,18 @@ function SchemaTablesTab({ width: 500, render: (_, record: Table) => { return ( -
- - {getEntityName(record)} - -
+ ); }, }, @@ -105,7 +154,7 @@ function SchemaTablesTab({ ), }, ], - [] + [handleDisplayNameUpdate, allowEditDisplayNamePermission] ); return ( @@ -158,7 +207,7 @@ function SchemaTablesTab({ bordered columns={tableColumn} data-testid="databaseSchema-tables" - dataSource={tableData} + dataSource={localTableData} loading={tableDataLoading} locale={{ emptyText: ( diff --git a/openmetadata-ui/src/main/resources/ui/src/pages/DatabaseVersionPage/DatabaseVersionPage.tsx b/openmetadata-ui/src/main/resources/ui/src/pages/DatabaseVersionPage/DatabaseVersionPage.tsx index dc7eef110c1f..84eeee904ffe 100644 --- a/openmetadata-ui/src/main/resources/ui/src/pages/DatabaseVersionPage/DatabaseVersionPage.tsx +++ b/openmetadata-ui/src/main/resources/ui/src/pages/DatabaseVersionPage/DatabaseVersionPage.tsx @@ -209,7 +209,7 @@ function DatabaseVersionPage() { /> - + diff --git a/openmetadata-ui/src/main/resources/ui/src/pages/ServiceDetailsPage/ServiceMainTabContent.tsx b/openmetadata-ui/src/main/resources/ui/src/pages/ServiceDetailsPage/ServiceMainTabContent.tsx index b24b1f2af7dd..bf9643b01fe1 100644 --- a/openmetadata-ui/src/main/resources/ui/src/pages/ServiceDetailsPage/ServiceMainTabContent.tsx +++ b/openmetadata-ui/src/main/resources/ui/src/pages/ServiceDetailsPage/ServiceMainTabContent.tsx @@ -13,9 +13,11 @@ import { Col, Row, Space, Switch, Table, Typography } from 'antd'; import { ColumnsType } from 'antd/lib/table'; +import { AxiosError } from 'axios'; +import { compare } from 'fast-json-patch'; import { isUndefined } from 'lodash'; import { EntityTags, ServiceTypes } from 'Models'; -import React, { useCallback, useMemo, useState } from 'react'; +import React, { useCallback, useEffect, useMemo, useState } from 'react'; import { useTranslation } from 'react-i18next'; import { useParams } from 'react-router-dom'; import DescriptionV1 from '../../components/common/EntityDescription/DescriptionV1'; @@ -25,7 +27,9 @@ import NextPrevious from '../../components/common/NextPrevious/NextPrevious'; import { NextPreviousProps } from '../../components/common/NextPrevious/NextPrevious.interface'; import ResizablePanels from '../../components/common/ResizablePanels/ResizablePanels'; import EntityRightPanel from '../../components/Entity/EntityRightPanel/EntityRightPanel'; +import { EntityName } from '../../components/Modals/EntityNameModal/EntityNameModal.interface'; import { COMMON_RESIZABLE_PANEL_CONFIG } from '../../constants/ResizablePanel.constants'; +import { usePermissionProvider } from '../../context/PermissionProvider/PermissionProvider'; import { OperationPermission } from '../../context/PermissionProvider/PermissionProvider.interface'; import { EntityType } from '../../enums/entity.enum'; import { DatabaseService } from '../../generated/entity/services/databaseService'; @@ -33,10 +37,14 @@ import { Paging } from '../../generated/type/paging'; import { UsePagingInterface } from '../../hooks/paging/usePaging'; import { useFqn } from '../../hooks/useFqn'; import { ServicesType } from '../../interface/service.interface'; -import { getServiceMainTabColumns } from '../../utils/ServiceMainTabContentUtils'; +import { + callServicePatchAPI, + getServiceMainTabColumns, +} from '../../utils/ServiceMainTabContentUtils'; import { getEntityTypeFromServiceCategory } from '../../utils/ServiceUtils'; import { getTagsWithoutTier, getTierTags } from '../../utils/TableUtils'; import { createTagObject } from '../../utils/TagsUtils'; +import { showErrorToast } from '../../utils/ToastUtils'; import { ServicePageData } from './ServiceDetailsPage'; interface ServiceMainTabContentProps { @@ -53,6 +61,7 @@ interface ServiceMainTabContentProps { pagingHandler: NextPreviousProps['pagingHandler']; saveUpdatedServiceData: (updatedData: ServicesType) => Promise; pagingInfo: UsePagingInterface; + isVersionPage?: boolean; } function ServiceMainTabContent({ @@ -69,6 +78,7 @@ function ServiceMainTabContent({ serviceDetails, saveUpdatedServiceData, pagingInfo, + isVersionPage = false, }: Readonly) { const { t } = useTranslation(); const { serviceCategory } = useParams<{ @@ -76,7 +86,10 @@ function ServiceMainTabContent({ }>(); const { fqn: serviceFQN } = useFqn(); + const { permissions } = usePermissionProvider(); + const [isEdit, setIsEdit] = useState(false); + const [pageData, setPageData] = useState([]); const tier = getTierTags(serviceDetails?.tags ?? []); const tags = getTagsWithoutTier(serviceDetails?.tags ?? []); @@ -131,9 +144,69 @@ function ServiceMainTabContent({ setIsEdit(false); }; + const handleDisplayNameUpdate = useCallback( + async (entityData: EntityName, id?: string) => { + try { + const pageDataDetails = pageData.find((data) => data.id === id); + if (!pageDataDetails) { + return; + } + const updatedData = { + ...pageDataDetails, + displayName: entityData.displayName || undefined, + }; + const jsonPatch = compare(pageDataDetails, updatedData); + await callServicePatchAPI( + serviceCategory, + pageDataDetails.id, + jsonPatch + ); + setPageData((prevData) => + prevData.map((data) => + data.id === id + ? { ...data, displayName: entityData.displayName } + : data + ) + ); + } catch (error) { + showErrorToast(error as AxiosError); + } + }, + [pageData, serviceCategory] + ); + + const editDisplayNamePermission = useMemo(() => { + if (isVersionPage) { + return false; + } + + const servicePermissions = { + databaseServices: permissions.databaseService, + messagingServices: permissions.messagingService, + dashboardServices: permissions.dashboardService, + pipelineServices: permissions.pipelineService, + mlmodelServices: permissions.mlmodelService, + storageServices: permissions.storageService, + searchServices: permissions.searchService, + apiServices: permissions.apiService, + }; + + const currentPermission = + servicePermissions[serviceCategory as keyof typeof servicePermissions]; + + return ( + currentPermission?.EditAll || currentPermission?.EditDisplayName || false + ); + }, [permissions, serviceCategory, isVersionPage]); + const tableColumn: ColumnsType = useMemo( - () => getServiceMainTabColumns(serviceCategory), - [serviceCategory] + () => + getServiceMainTabColumns( + serviceCategory, + editDisplayNamePermission, + handleDisplayNameUpdate + ), + [serviceCategory, handleDisplayNameUpdate, editDisplayNamePermission] ); const entityType = useMemo( @@ -160,6 +233,10 @@ function ServiceMainTabContent({ [servicePermission, serviceDetails] ); + useEffect(() => { + setPageData(data); + }, [data]); + return ( @@ -210,7 +287,7 @@ function ServiceMainTabContent({ bordered columns={tableColumn} data-testid="service-children-table" - dataSource={data} + dataSource={pageData} locale={{ emptyText: , }} diff --git a/openmetadata-ui/src/main/resources/ui/src/utils/ServiceMainTabContentUtils.tsx b/openmetadata-ui/src/main/resources/ui/src/utils/ServiceMainTabContentUtils.tsx index c629f20711fd..2c58500e1216 100644 --- a/openmetadata-ui/src/main/resources/ui/src/utils/ServiceMainTabContentUtils.tsx +++ b/openmetadata-ui/src/main/resources/ui/src/utils/ServiceMainTabContentUtils.tsx @@ -17,44 +17,51 @@ import { t } from 'i18next'; import { isUndefined } from 'lodash'; import { ServiceTypes } from 'Models'; import React from 'react'; -import { Link } from 'react-router-dom'; +import DisplayName from '../components/common/DisplayName/DisplayName'; import { OwnerLabel } from '../components/common/OwnerLabel/OwnerLabel.component'; import RichTextEditorPreviewer from '../components/common/RichTextEditor/RichTextEditorPreviewer'; +import { EntityName } from '../components/Modals/EntityNameModal/EntityNameModal.interface'; import TagsViewer from '../components/Tag/TagsViewer/TagsViewer'; import { NO_DATA_PLACEHOLDER } from '../constants/constants'; import { ServiceCategory } from '../enums/service.enum'; import { Database } from '../generated/entity/data/database'; import { Pipeline } from '../generated/entity/data/pipeline'; import { ServicePageData } from '../pages/ServiceDetailsPage/ServiceDetailsPage'; -import { getEntityName } from './EntityUtils'; +import { patchApiCollection } from '../rest/apiCollectionsAPI'; +import { patchDashboardDetails } from '../rest/dashboardAPI'; +import { patchDatabaseDetails } from '../rest/databaseAPI'; +import { patchMlModelDetails } from '../rest/mlModelAPI'; +import { patchPipelineDetails } from '../rest/pipelineAPI'; +import { patchSearchIndexDetails } from '../rest/SearchIndexAPI'; +import { patchContainerDetails } from '../rest/storageAPI'; +import { patchTopicDetails } from '../rest/topicsAPI'; import { getLinkForFqn } from './ServiceUtils'; import { getUsagePercentile } from './TableUtils'; export const getServiceMainTabColumns = ( - serviceCategory: ServiceTypes + serviceCategory: ServiceTypes, + editDisplayNamePermission?: boolean, + handleDisplayNameUpdate?: ( + entityData: EntityName, + id?: string + ) => Promise ): ColumnsType => [ { title: t('label.name'), dataIndex: 'name', key: 'name', width: 280, - render: (_, record: ServicePageData) => { - return ( - - - {getEntityName(record)} - - - ); - }, + render: (_, record: ServicePageData) => ( + + ), }, { title: t('label.description'), @@ -123,3 +130,30 @@ export const getServiceMainTabColumns = ( ] : []), ]; + +export const callServicePatchAPI = async ( + serviceCategory: ServiceTypes, + id: string, + jsonPatch: any +) => { + switch (serviceCategory) { + case ServiceCategory.DATABASE_SERVICES: + return await patchDatabaseDetails(id, jsonPatch); + case ServiceCategory.MESSAGING_SERVICES: + return await patchTopicDetails(id, jsonPatch); + case ServiceCategory.DASHBOARD_SERVICES: + return await patchDashboardDetails(id, jsonPatch); + case ServiceCategory.PIPELINE_SERVICES: + return await patchPipelineDetails(id, jsonPatch); + case ServiceCategory.ML_MODEL_SERVICES: + return await patchMlModelDetails(id, jsonPatch); + case ServiceCategory.STORAGE_SERVICES: + return await patchContainerDetails(id, jsonPatch); + case ServiceCategory.SEARCH_SERVICES: + return await patchSearchIndexDetails(id, jsonPatch); + case ServiceCategory.API_SERVICES: + return await patchApiCollection(id, jsonPatch); + default: + return; + } +}; From 4720a8da2bb0d8504c9d22d8db49252d9ca4b028 Mon Sep 17 00:00:00 2001 From: Shailesh Parmar Date: Thu, 28 Nov 2024 20:26:30 +0530 Subject: [PATCH 2/7] playwright: added initial db setup for playwright test (#18832) * playwright: fixed service ingestion aut * updated the initial start of the playwright test --- .../ui/playwright/constant/permission.ts | 42 ++++++++++++++++ .../resources/ui/playwright/e2e/auth.setup.ts | 25 ++++++++-- .../support/access-control/PoliciesClass.ts | 2 + .../entity/ingestion/ServiceBaseClass.ts | 2 +- .../ui/playwright/utils/permission.ts | 48 ++++++++++++++++++- 5 files changed, 112 insertions(+), 7 deletions(-) diff --git a/openmetadata-ui/src/main/resources/ui/playwright/constant/permission.ts b/openmetadata-ui/src/main/resources/ui/playwright/constant/permission.ts index f8f6515de1c5..b4dccdce8030 100644 --- a/openmetadata-ui/src/main/resources/ui/playwright/constant/permission.ts +++ b/openmetadata-ui/src/main/resources/ui/playwright/constant/permission.ts @@ -62,6 +62,48 @@ export const DATA_STEWARD_RULES: PolicyRulesType[] = [ }, ]; +export const DATA_CONSUMER_RULES: PolicyRulesType[] = [ + { + name: 'DataConsumerPolicy-EditRule', + resources: ['All'], + operations: [ + 'EditDescription', + 'EditGlossaryTerms', + 'EditTags', + 'EditTier', + 'ViewAll', + ], + effect: 'allow', + }, +]; + +export const ORGANIZATION_POLICY_RULES: PolicyRulesType[] = [ + { + name: 'OrganizationPolicy-NoOwner-Rule', + description: + 'Allow any one to set the owner of an entity that has no owner set.', + effect: 'allow', + operations: ['EditOwners'], + resources: ['All'], + condition: 'noOwner()', + }, + { + name: 'OrganizationPolicy-Owner-Rule', + description: 'Allow all the operations on an entity for the owner.', + effect: 'allow', + operations: ['All'], + resources: ['All'], + condition: 'isOwner()', + }, + { + name: 'OrganizationPolicy-ViewAll-Rule', + description: 'Allow all users to discover data assets.', + effect: 'allow', + operations: ['ViewAll'], + resources: ['All'], + }, +]; + export const GLOBAL_SETTING_PERMISSIONS: Record< string, { testid: GlobalSettingOptions; isCustomProperty?: boolean } diff --git a/openmetadata-ui/src/main/resources/ui/playwright/e2e/auth.setup.ts b/openmetadata-ui/src/main/resources/ui/playwright/e2e/auth.setup.ts index 8894062b2f7e..fc6b43f36141 100644 --- a/openmetadata-ui/src/main/resources/ui/playwright/e2e/auth.setup.ts +++ b/openmetadata-ui/src/main/resources/ui/playwright/e2e/auth.setup.ts @@ -10,24 +10,39 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import { test as setup } from '@playwright/test'; +import { Page, test as setup } from '@playwright/test'; import { JWT_EXPIRY_TIME_MAP } from '../constant/login'; import { AdminClass } from '../support/user/AdminClass'; import { getApiContext } from '../utils/common'; import { updateJWTTokenExpiryTime } from '../utils/login'; +import { + updateDefaultDataConsumerPolicy, + updateDefaultOrganizationPolicy, +} from '../utils/permission'; import { removeOrganizationPolicyAndRole } from '../utils/team'; const adminFile = 'playwright/.auth/admin.json'; +const initialSetup = async (page: Page) => { + const { apiContext, afterAction } = await getApiContext(page); + // Update JWT expiry time to 4 hours + await updateJWTTokenExpiryTime(apiContext, JWT_EXPIRY_TIME_MAP['4 hours']); + // Remove organization policy and role + await removeOrganizationPolicyAndRole(apiContext); + // update default Organization policy + await updateDefaultOrganizationPolicy(apiContext); + // update default Data consumer policy + await updateDefaultDataConsumerPolicy(apiContext); + + await afterAction(); +}; + setup('authenticate as admin', async ({ page }) => { const admin = new AdminClass(); // login with admin user await admin.login(page); await page.waitForURL('**/my-data'); - const { apiContext, afterAction } = await getApiContext(page); - await updateJWTTokenExpiryTime(apiContext, JWT_EXPIRY_TIME_MAP['4 hours']); - await removeOrganizationPolicyAndRole(apiContext); - await afterAction(); + await initialSetup(page); await admin.logout(page); await page.waitForURL('**/signin'); await admin.login(page); diff --git a/openmetadata-ui/src/main/resources/ui/playwright/support/access-control/PoliciesClass.ts b/openmetadata-ui/src/main/resources/ui/playwright/support/access-control/PoliciesClass.ts index 08853690bcf0..abf90b38e9d6 100644 --- a/openmetadata-ui/src/main/resources/ui/playwright/support/access-control/PoliciesClass.ts +++ b/openmetadata-ui/src/main/resources/ui/playwright/support/access-control/PoliciesClass.ts @@ -27,6 +27,8 @@ export type PolicyRulesType = { resources: string[]; operations: string[]; effect: string; + description?: string; + condition?: string; }; export class PolicyClass { diff --git a/openmetadata-ui/src/main/resources/ui/playwright/support/entity/ingestion/ServiceBaseClass.ts b/openmetadata-ui/src/main/resources/ui/playwright/support/entity/ingestion/ServiceBaseClass.ts index 4de563f6313f..540851881eaa 100644 --- a/openmetadata-ui/src/main/resources/ui/playwright/support/entity/ingestion/ServiceBaseClass.ts +++ b/openmetadata-ui/src/main/resources/ui/playwright/support/entity/ingestion/ServiceBaseClass.ts @@ -170,7 +170,7 @@ class ServiceBaseClass { // Header available once page loads await page.waitForSelector('[data-testid="data-assets-header"]'); - await page.getByTestId('loader').waitFor({ state: 'detached' }); + await page.getByTestId('loader').first().waitFor({ state: 'detached' }); await page.getByTestId('ingestions').click(); await page .getByLabel('Ingestions') diff --git a/openmetadata-ui/src/main/resources/ui/playwright/utils/permission.ts b/openmetadata-ui/src/main/resources/ui/playwright/utils/permission.ts index 26fabc27009a..e37b9ea0d801 100644 --- a/openmetadata-ui/src/main/resources/ui/playwright/utils/permission.ts +++ b/openmetadata-ui/src/main/resources/ui/playwright/utils/permission.ts @@ -10,7 +10,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import { expect, Page } from '@playwright/test'; +import { APIRequestContext, expect, Page } from '@playwright/test'; +import { + DATA_CONSUMER_RULES, + ORGANIZATION_POLICY_RULES, +} from '../constant/permission'; export const checkNoPermissionPlaceholder = async ( page: Page, @@ -117,3 +121,45 @@ export const validateViewPermissions = async ( await page.waitForLoadState('domcontentloaded'); await checkNoPermissionPlaceholder(page, /Custom Properties/); }; + +export const updateDefaultDataConsumerPolicy = async ( + apiContext: APIRequestContext +) => { + const dataConsumerRoleResponse = await apiContext + .get('/api/v1/policies/name/DataConsumerPolicy') + .then((response) => response.json()); + + await apiContext.patch(`/api/v1/policies/${dataConsumerRoleResponse.id}`, { + data: [ + { + op: 'replace', + path: '/rules', + value: DATA_CONSUMER_RULES, + }, + ], + headers: { + 'Content-Type': 'application/json-patch+json', + }, + }); +}; + +export const updateDefaultOrganizationPolicy = async ( + apiContext: APIRequestContext +) => { + const orgPolicyResponse = await apiContext + .get('/api/v1/policies/name/OrganizationPolicy') + .then((response) => response.json()); + + await apiContext.patch(`/api/v1/policies/${orgPolicyResponse.id}`, { + data: [ + { + op: 'replace', + path: '/rules', + value: ORGANIZATION_POLICY_RULES, + }, + ], + headers: { + 'Content-Type': 'application/json-patch+json', + }, + }); +}; From 641058301886828323556c3f6194726c5dee5a3d Mon Sep 17 00:00:00 2001 From: Sriharsha Chintalapani Date: Thu, 28 Nov 2024 07:37:45 -0800 Subject: [PATCH 3/7] Fix export async for users with "." in their email's principal name (#18845) * Fix exportAsync failing for usernames with . in it * Fix exportAsync failing for usernames with . in it --- .../util/WebsocketNotificationHandler.java | 51 +++++++++++++------ 1 file changed, 36 insertions(+), 15 deletions(-) diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/util/WebsocketNotificationHandler.java b/openmetadata-service/src/main/java/org/openmetadata/service/util/WebsocketNotificationHandler.java index ceecb6265c7a..6e7bed98aa5e 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/util/WebsocketNotificationHandler.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/util/WebsocketNotificationHandler.java @@ -37,6 +37,7 @@ import org.openmetadata.schema.type.api.BulkOperationResult; import org.openmetadata.schema.type.csv.CsvImportResult; import org.openmetadata.service.Entity; +import org.openmetadata.service.exception.EntityNotFoundException; import org.openmetadata.service.jdbi3.CollectionDAO; import org.openmetadata.service.resources.feeds.MessageParser; import org.openmetadata.service.socket.WebSocketManager; @@ -65,8 +66,10 @@ public static void sendCsvExportCompleteNotification( CSVExportMessage message = new CSVExportMessage(jobId, "COMPLETED", csvData, null); String jsonMessage = JsonUtils.pojoToJson(message); UUID userId = getUserIdFromSecurityContext(securityContext); - WebSocketManager.getInstance() - .sendToOne(userId, WebSocketManager.CSV_EXPORT_CHANNEL, jsonMessage); + if (userId != null) { + WebSocketManager.getInstance() + .sendToOne(userId, WebSocketManager.CSV_EXPORT_CHANNEL, jsonMessage); + } } public static void bulkAssetsOperationCompleteNotification( @@ -75,8 +78,10 @@ public static void bulkAssetsOperationCompleteNotification( new BulkAssetsOperationMessage(jobId, "COMPLETED", result, null); String jsonMessage = JsonUtils.pojoToJson(message); UUID userId = getUserIdFromSecurityContext(securityContext); - WebSocketManager.getInstance() - .sendToOne(userId, WebSocketManager.BULK_ASSETS_CHANNEL, jsonMessage); + if (userId != null) { + WebSocketManager.getInstance() + .sendToOne(userId, WebSocketManager.BULK_ASSETS_CHANNEL, jsonMessage); + } } public static void bulkAssetsOperationFailedNotification( @@ -84,8 +89,10 @@ public static void bulkAssetsOperationFailedNotification( CSVExportMessage message = new CSVExportMessage(jobId, "FAILED", null, errorMessage); String jsonMessage = JsonUtils.pojoToJson(message); UUID userId = getUserIdFromSecurityContext(securityContext); - WebSocketManager.getInstance() - .sendToOne(userId, WebSocketManager.BULK_ASSETS_CHANNEL, jsonMessage); + if (userId != null) { + WebSocketManager.getInstance() + .sendToOne(userId, WebSocketManager.BULK_ASSETS_CHANNEL, jsonMessage); + } } private void handleNotifications(ContainerResponseContext responseContext) { @@ -186,14 +193,24 @@ public static void sendCsvExportFailedNotification( CSVExportMessage message = new CSVExportMessage(jobId, "FAILED", null, errorMessage); String jsonMessage = JsonUtils.pojoToJson(message); UUID userId = getUserIdFromSecurityContext(securityContext); - WebSocketManager.getInstance() - .sendToOne(userId, WebSocketManager.CSV_EXPORT_CHANNEL, jsonMessage); + if (userId != null) { + WebSocketManager.getInstance() + .sendToOne(userId, WebSocketManager.CSV_EXPORT_CHANNEL, jsonMessage); + } } private static UUID getUserIdFromSecurityContext(SecurityContext securityContext) { - String username = securityContext.getUserPrincipal().getName(); - User user = Entity.getCollectionDAO().userDAO().findEntityByName(username); - return user.getId(); + try { + String username = securityContext.getUserPrincipal().getName(); + User user = + Entity.getCollectionDAO() + .userDAO() + .findEntityByName(FullyQualifiedName.quoteName(username)); + return user.getId(); + } catch (EntityNotFoundException e) { + LOG.error("User not found ", e); + } + return null; } public static void sendCsvImportCompleteNotification( @@ -201,8 +218,10 @@ public static void sendCsvImportCompleteNotification( CSVImportMessage message = new CSVImportMessage(jobId, "COMPLETED", result, null); String jsonMessage = JsonUtils.pojoToJson(message); UUID userId = getUserIdFromSecurityContext(securityContext); - WebSocketManager.getInstance() - .sendToOne(userId, WebSocketManager.CSV_IMPORT_CHANNEL, jsonMessage); + if (userId != null) { + WebSocketManager.getInstance() + .sendToOne(userId, WebSocketManager.CSV_IMPORT_CHANNEL, jsonMessage); + } } public static void sendCsvImportFailedNotification( @@ -210,7 +229,9 @@ public static void sendCsvImportFailedNotification( CSVExportMessage message = new CSVExportMessage(jobId, "FAILED", null, errorMessage); String jsonMessage = JsonUtils.pojoToJson(message); UUID userId = getUserIdFromSecurityContext(securityContext); - WebSocketManager.getInstance() - .sendToOne(userId, WebSocketManager.CSV_IMPORT_CHANNEL, jsonMessage); + if (userId != null) { + WebSocketManager.getInstance() + .sendToOne(userId, WebSocketManager.CSV_IMPORT_CHANNEL, jsonMessage); + } } } From da176767a88eb8edc2fe94e26a1c3dd275ab764a Mon Sep 17 00:00:00 2001 From: mgorsk1 Date: Thu, 28 Nov 2024 18:30:11 +0100 Subject: [PATCH 4/7] feat: add dbt freshness check test (#18730) * add dbt freshness check * docs * run linting * add test case param definition * fix test case param definition * add config for dbt http, fix linting * refactor (only create freshness test definition when user executed one) * fix dbt files class * fix dbt files class 2 * fix dbt objects class * fix linting * fix pylint * fix linting once and for all --------- Co-authored-by: Teddy --- .../source/database/dbt/constants.py | 4 ++ .../source/database/dbt/dbt_config.py | 18 +++++ .../source/database/dbt/dbt_service.py | 9 ++- .../source/database/dbt/dbt_utils.py | 29 ++++++++ .../ingestion/source/database/dbt/metadata.py | 71 ++++++++++++++++++- .../ingestion/source/database/dbt/models.py | 2 + ingestion/tests/unit/test_dbt.py | 3 +- .../dbtconfig/dbtHttpConfig.json | 5 ++ .../dbtconfig/dbtLocalConfig.json | 5 ++ 9 files changed, 143 insertions(+), 3 deletions(-) diff --git a/ingestion/src/metadata/ingestion/source/database/dbt/constants.py b/ingestion/src/metadata/ingestion/source/database/dbt/constants.py index 83c49c0724a4..834e248d2fa5 100644 --- a/ingestion/src/metadata/ingestion/source/database/dbt/constants.py +++ b/ingestion/src/metadata/ingestion/source/database/dbt/constants.py @@ -82,6 +82,7 @@ DBT_CATALOG_FILE_NAME = "catalog.json" DBT_MANIFEST_FILE_NAME = "manifest.json" DBT_RUN_RESULTS_FILE_NAME = "run_results" +DBT_SOURCES_FILE_NAME = "sources.json" class SkipResourceTypeEnum(Enum): @@ -91,6 +92,7 @@ class SkipResourceTypeEnum(Enum): ANALYSIS = "analysis" TEST = "test" + SOURCE = "source" class CompiledQueriesEnum(Enum): @@ -127,6 +129,7 @@ class DbtTestFailureEnum(Enum): FAILURE = "failure" FAIL = "fail" + ERROR = "error" class DbtCommonEnum(Enum): @@ -137,6 +140,7 @@ class DbtCommonEnum(Enum): OWNER = "owner" NODES = "nodes" SOURCES = "sources" + SOURCES_FILE = "sources_file" SOURCE = "source" RESOURCETYPE = "resource_type" MANIFEST_NODE = "manifest_node" diff --git a/ingestion/src/metadata/ingestion/source/database/dbt/dbt_config.py b/ingestion/src/metadata/ingestion/source/database/dbt/dbt_config.py index 216d7c6e9f83..66e25332e1e9 100644 --- a/ingestion/src/metadata/ingestion/source/database/dbt/dbt_config.py +++ b/ingestion/src/metadata/ingestion/source/database/dbt/dbt_config.py @@ -43,6 +43,7 @@ DBT_CATALOG_FILE_NAME, DBT_MANIFEST_FILE_NAME, DBT_RUN_RESULTS_FILE_NAME, + DBT_SOURCES_FILE_NAME, ) from metadata.ingestion.source.database.dbt.models import DbtFiles from metadata.readers.file.config_source_factory import get_reader @@ -85,6 +86,7 @@ def _(config: DbtLocalConfig): config.dbtManifestFilePath, config.dbtCatalogFilePath, config.dbtRunResultsFilePath, + config.dbtSourcesFilePath, ] yield from download_dbt_files( blob_grouped_by_directory=blob_grouped_by_directory, @@ -123,12 +125,22 @@ def _(config: DbtHttpConfig): dbt_catalog = requests.get( # pylint: disable=missing-timeout config.dbtCatalogHttpPath ) + + dbt_sources = None + if config.dbtSourcesHttpPath: + logger.debug( + f"Requesting [dbtSourcesHttpPath] to: {config.dbtSourcesHttpPath}" + ) + dbt_sources = requests.get( # pylint: disable=missing-timeout + config.dbtSourcesHttpPath + ) if not dbt_manifest: raise DBTConfigException("Manifest file not found in file server") yield DbtFiles( dbt_catalog=dbt_catalog.json() if dbt_catalog else None, dbt_manifest=dbt_manifest.json(), dbt_run_results=[dbt_run_results.json()] if dbt_run_results else None, + dbt_sources=dbt_sources.json() if dbt_sources else None, ) except DBTConfigException as exc: raise exc @@ -243,6 +255,7 @@ def get_blobs_grouped_by_dir(blobs: List[str]) -> Dict[str, List[str]]: return blob_grouped_by_directory +# pylint: disable=too-many-locals, too-many-branches def download_dbt_files( blob_grouped_by_directory: Dict, config, client, bucket_name: Optional[str] ) -> Iterable[DbtFiles]: @@ -255,6 +268,7 @@ def download_dbt_files( ) in blob_grouped_by_directory.items(): dbt_catalog = None dbt_manifest = None + dbt_sources = None dbt_run_results = [] kwargs = {} if bucket_name: @@ -285,12 +299,16 @@ def download_dbt_files( logger.warning( f"{DBT_RUN_RESULTS_FILE_NAME} not found in {key}: {exc}" ) + if DBT_SOURCES_FILE_NAME == blob_file_name.lower(): + logger.debug(f"{DBT_SOURCES_FILE_NAME} found in {key}") + dbt_sources = reader.read(path=blob, **kwargs) if not dbt_manifest: raise DBTConfigException(f"Manifest file not found at: {key}") yield DbtFiles( dbt_catalog=json.loads(dbt_catalog) if dbt_catalog else None, dbt_manifest=json.loads(dbt_manifest), dbt_run_results=dbt_run_results if dbt_run_results else None, + dbt_sources=json.loads(dbt_sources) if dbt_sources else None, ) except DBTConfigException as exc: logger.warning(exc) diff --git a/ingestion/src/metadata/ingestion/source/database/dbt/dbt_service.py b/ingestion/src/metadata/ingestion/source/database/dbt/dbt_service.py index aa2d65f4e2cf..50a160164a00 100644 --- a/ingestion/src/metadata/ingestion/source/database/dbt/dbt_service.py +++ b/ingestion/src/metadata/ingestion/source/database/dbt/dbt_service.py @@ -15,7 +15,12 @@ from abc import ABC, abstractmethod from typing import Iterable, List -from dbt_artifacts_parser.parser import parse_catalog, parse_manifest, parse_run_results +from dbt_artifacts_parser.parser import ( + parse_catalog, + parse_manifest, + parse_run_results, + parse_sources, +) from pydantic import Field from typing_extensions import Annotated @@ -209,11 +214,13 @@ def get_dbt_objects(self) -> Iterable[DbtObjects]: self.remove_run_result_non_required_keys( run_results=self.context.get().dbt_file.dbt_run_results ) + dbt_objects = DbtObjects( dbt_catalog=parse_catalog(self.context.get().dbt_file.dbt_catalog) if self.context.get().dbt_file.dbt_catalog else None, dbt_manifest=parse_manifest(self.context.get().dbt_file.dbt_manifest), + dbt_sources=parse_sources(self.context.get().dbt_file.dbt_sources), dbt_run_results=[ parse_run_results(run_result_file) for run_result_file in self.context.get().dbt_file.dbt_run_results diff --git a/ingestion/src/metadata/ingestion/source/database/dbt/dbt_utils.py b/ingestion/src/metadata/ingestion/source/database/dbt/dbt_utils.py index 70bfcabe1b13..1897f4a7f5d3 100644 --- a/ingestion/src/metadata/ingestion/source/database/dbt/dbt_utils.py +++ b/ingestion/src/metadata/ingestion/source/database/dbt/dbt_utils.py @@ -44,6 +44,20 @@ def create_test_case_parameter_definitions(dbt_test): } ] return test_case_param_definition + if hasattr(dbt_test, "freshness"): + test_case_param_definition = [ + { + "name": "warn_after", + "displayName": "warn_after", + "required": False, + }, + { + "name": "error_after", + "displayName": "error_after", + "required": False, + }, + ] + return test_case_param_definition except Exception as err: # pylint: disable=broad-except logger.debug(traceback.format_exc()) logger.error( @@ -67,6 +81,21 @@ def create_test_case_parameter_values(dbt_test): {"name": manifest_node.test_metadata.name, "value": dbt_test_values} ] return test_case_param_values + if hasattr(manifest_node, "freshness"): + warn_after = manifest_node.freshness.warn_after + error_after = manifest_node.freshness.error_after + + test_case_param_values = [ + { + "name": "error_after", + "value": f"{error_after.count} {error_after.period.value}", + }, + { + "name": "warn_after", + "value": f"{warn_after.count} {warn_after.period.value}", + }, + ] + return test_case_param_values except Exception as err: # pylint: disable=broad-except logger.debug(traceback.format_exc()) logger.error( diff --git a/ingestion/src/metadata/ingestion/source/database/dbt/metadata.py b/ingestion/src/metadata/ingestion/source/database/dbt/metadata.py index 82bdf2e3085a..5c1b2cf81e31 100644 --- a/ingestion/src/metadata/ingestion/source/database/dbt/metadata.py +++ b/ingestion/src/metadata/ingestion/source/database/dbt/metadata.py @@ -13,6 +13,7 @@ DBT source methods. """ import traceback +from copy import deepcopy from datetime import datetime from typing import Any, Iterable, List, Optional, Union @@ -324,7 +325,41 @@ def add_dbt_tests( None, ) - # pylint: disable=too-many-locals, too-many-branches + def _add_dbt_freshness_test_from_sources( + self, key: str, manifest_node, manifest_entities, dbt_objects: DbtObjects + ): + # in dbt manifest sources node name is table/view name (not test name like with test nodes) + # so in order for the test creation to be named precisely I am amending manifest node name within it's deepcopy + manifest_node_new = deepcopy(manifest_node) + manifest_node_new.name = manifest_node_new.name + "_freshness" + + freshness_test_result = next( + (item for item in dbt_objects.dbt_sources.results if item.unique_id == key), + None, + ) + + if freshness_test_result: + self.context.get().dbt_tests[key + "_freshness"] = { + DbtCommonEnum.MANIFEST_NODE.value: manifest_node_new + } + self.context.get().dbt_tests[key + "_freshness"][ + DbtCommonEnum.UPSTREAM.value + ] = self.parse_upstream_nodes(manifest_entities, manifest_node) + self.context.get().dbt_tests[key + "_freshness"][ + DbtCommonEnum.RESULTS.value + ] = freshness_test_result + + def add_dbt_sources( + self, key: str, manifest_node, manifest_entities, dbt_objects: DbtObjects + ) -> None: + """ + Method to append dbt test cases based on sources file for later processing + """ + self._add_dbt_freshness_test_from_sources( + key, manifest_node, manifest_entities, dbt_objects + ) + + # pylint: disable=too-many-locals, too-many-branches, too-many-statements def yield_data_models( self, dbt_objects: DbtObjects ) -> Iterable[Either[DataModelLink]]: @@ -376,6 +411,17 @@ def yield_data_models( ) continue + if ( + dbt_objects.dbt_sources + and resource_type == SkipResourceTypeEnum.SOURCE.value + ): + self.add_dbt_sources( + key, + manifest_node=manifest_node, + manifest_entities=manifest_entities, + dbt_objects=dbt_objects, + ) + # Skip the ephemeral nodes since it is not materialized if check_ephemeral_node(manifest_node): logger.debug(f"Skipping ephemeral DBT node: {key}.") @@ -549,6 +595,29 @@ def parse_upstream_nodes(self, manifest_entities, dbt_node): f"Failed to parse the DBT node {node} to get upstream nodes: {exc}" ) continue + + if dbt_node.resource_type == SkipResourceTypeEnum.SOURCE.value: + parent_fqn = fqn.build( + self.metadata, + entity_type=Table, + service_name="*", + database_name=get_corrected_name(dbt_node.database), + schema_name=get_corrected_name(dbt_node.schema_), + table_name=dbt_node.name, + ) + + # check if the parent table exists in OM before adding it to the upstream list + parent_table_entity: Optional[ + Union[Table, List[Table]] + ] = get_entity_from_es_result( + entity_list=self.metadata.es_search_from_fqn( + entity_type=Table, fqn_search_string=parent_fqn + ), + fetch_multiple_entities=False, + ) + if parent_table_entity: + upstream_nodes.append(parent_fqn) + return upstream_nodes def parse_data_model_columns( diff --git a/ingestion/src/metadata/ingestion/source/database/dbt/models.py b/ingestion/src/metadata/ingestion/source/database/dbt/models.py index 88671141d43a..e505368994af 100644 --- a/ingestion/src/metadata/ingestion/source/database/dbt/models.py +++ b/ingestion/src/metadata/ingestion/source/database/dbt/models.py @@ -20,12 +20,14 @@ class DbtFiles(BaseModel): dbt_catalog: Optional[dict] = None dbt_manifest: dict + dbt_sources: Optional[dict] = None dbt_run_results: Optional[List[dict]] = None class DbtObjects(BaseModel): dbt_catalog: Optional[Any] = None dbt_manifest: Any + dbt_sources: Optional[Any] = None dbt_run_results: Optional[List[Any]] = None diff --git a/ingestion/tests/unit/test_dbt.py b/ingestion/tests/unit/test_dbt.py index b04d69c5fc5a..1c2db6edc0a2 100644 --- a/ingestion/tests/unit/test_dbt.py +++ b/ingestion/tests/unit/test_dbt.py @@ -51,6 +51,7 @@ "dbtCatalogFilePath": "sample/dbt_files/catalog.json", "dbtManifestFilePath": "sample/dbt_files/manifest.json", "dbtRunResultsFilePath": "sample/dbt_files/run_results.json", + "dbtSourcesFilePath": "sample/dbt_files/sources.json", }, } }, @@ -682,7 +683,7 @@ def check_yield_datamodel(self, dbt_objects, expected_data_models): self.assertEqual(expected, original) @patch("metadata.ingestion.ometa.mixins.es_mixin.ESMixin.es_search_from_fqn") - def test_updtream_nodes_for_lineage(self, es_search_from_fqn): + def test_upstream_nodes_for_lineage(self, es_search_from_fqn): expected_upstream_nodes = [ "model.jaffle_shop.stg_customers", "model.jaffle_shop.stg_orders", diff --git a/openmetadata-spec/src/main/resources/json/schema/metadataIngestion/dbtconfig/dbtHttpConfig.json b/openmetadata-spec/src/main/resources/json/schema/metadataIngestion/dbtconfig/dbtHttpConfig.json index 179573b67ecc..7da25a515369 100644 --- a/openmetadata-spec/src/main/resources/json/schema/metadataIngestion/dbtconfig/dbtHttpConfig.json +++ b/openmetadata-spec/src/main/resources/json/schema/metadataIngestion/dbtconfig/dbtHttpConfig.json @@ -26,6 +26,11 @@ "title": "DBT Run Results HTTP File Path", "description": "DBT run results http file path to extract the test results information.", "type": "string" + }, + "dbtSourcesHttpPath": { + "title": "DBT Sources HTTP File Path", + "description": "DBT sources http file path to extract freshness test results information.", + "type": "string" } }, "additionalProperties": false, diff --git a/openmetadata-spec/src/main/resources/json/schema/metadataIngestion/dbtconfig/dbtLocalConfig.json b/openmetadata-spec/src/main/resources/json/schema/metadataIngestion/dbtconfig/dbtLocalConfig.json index 171b2a675f8e..94ffc2cf1774 100644 --- a/openmetadata-spec/src/main/resources/json/schema/metadataIngestion/dbtconfig/dbtLocalConfig.json +++ b/openmetadata-spec/src/main/resources/json/schema/metadataIngestion/dbtconfig/dbtLocalConfig.json @@ -26,6 +26,11 @@ "title": "DBT Run Results File Path", "description": "DBT run results file path to extract the test results information.", "type": "string" + }, + "dbtSourcesFilePath": { + "title": "DBT Sources File Path", + "description": "DBT sources file path to extract the freshness test result.", + "type": "string" } }, "additionalProperties": false, From ac2f6d713251a7b2908978a2cb0519f4dc079846 Mon Sep 17 00:00:00 2001 From: Teddy Date: Thu, 28 Nov 2024 18:49:11 +0100 Subject: [PATCH 5/7] MINOR - Fix sqa table reference (#18839) * fix: sqa table reference * style: ran python linting * fix: added raw dataset to query runner * fix: get table and schema name from orm object * fix: get table level config for table tests --- .../sqlalchemy/sqa_test_suite_interface.py | 1 + .../columnValueLengthsToBeBetween.py | 2 +- .../sqlalchemy/columnValueMaxToBeBetween.py | 2 +- .../sqlalchemy/columnValueMeanToBeBetween.py | 2 +- .../columnValueMedianToBeBetween.py | 2 +- .../sqlalchemy/columnValueMinToBeBetween.py | 2 +- .../columnValueStdDevToBeBetween.py | 2 +- .../sqlalchemy/columnValuesMissingCount.py | 2 +- .../sqlalchemy/columnValuesSumToBeBetween.py | 2 +- .../columnValuesToBeAtExpectedLocation.py | 2 +- .../sqlalchemy/columnValuesToBeBetween.py | 2 +- .../sqlalchemy/columnValuesToBeInSet.py | 2 +- .../sqlalchemy/columnValuesToBeNotInSet.py | 2 +- .../sqlalchemy/columnValuesToBeNotNull.py | 2 +- .../sqlalchemy/columnValuesToBeUnique.py | 10 ++------- .../sqlalchemy/columnValuesToMatchRegex.py | 2 +- .../sqlalchemy/columnValuesToNotMatchRegex.py | 2 +- .../tableRowInsertedCountToBeBetween.py | 4 ++-- .../database/bigquery/profiler/profiler.py | 2 +- .../sqlalchemy/db2/profiler_interface.py | 2 +- .../sqlalchemy/mariadb/profiler_interface.py | 4 ++-- .../sqlalchemy/profiler_interface.py | 21 ++++++++++--------- .../single_store/profiler_interface.py | 4 ++-- .../snowflake/profiler_interface.py | 2 +- .../sqlalchemy/stored_statistics_profiler.py | 8 +++---- .../sqlalchemy/trino/profiler_interface.py | 4 ++-- .../orm/functions/table_metric_computer.py | 2 +- .../src/metadata/profiler/processor/runner.py | 14 ++++++++++++- .../metadata/sampler/sqlalchemy/sampler.py | 11 +++++----- .../unit/profiler/sqlalchemy/test_runner.py | 4 +++- 30 files changed, 66 insertions(+), 57 deletions(-) diff --git a/ingestion/src/metadata/data_quality/interface/sqlalchemy/sqa_test_suite_interface.py b/ingestion/src/metadata/data_quality/interface/sqlalchemy/sqa_test_suite_interface.py index 67baa22335d5..76d8d640e307 100644 --- a/ingestion/src/metadata/data_quality/interface/sqlalchemy/sqa_test_suite_interface.py +++ b/ingestion/src/metadata/data_quality/interface/sqlalchemy/sqa_test_suite_interface.py @@ -104,6 +104,7 @@ def _create_runner(self) -> QueryRunner: QueryRunner( session=self.session, dataset=self.dataset, + raw_dataset=self.sampler.raw_dataset, partition_details=self.table_partition_config, profile_sample_query=self.table_sample_query, ) diff --git a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValueLengthsToBeBetween.py b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValueLengthsToBeBetween.py index 2473436cd2b0..33df13ff2098 100644 --- a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValueLengthsToBeBetween.py +++ b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValueLengthsToBeBetween.py @@ -40,7 +40,7 @@ def _get_column_name(self) -> Column: """ return self.get_column_name( self.test_case.entityLink.root, - inspect(self.runner.table).c, + inspect(self.runner.dataset).c, ) def _run_results(self, metric: Metrics, column: Column) -> Optional[int]: diff --git a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValueMaxToBeBetween.py b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValueMaxToBeBetween.py index 235ca42985e1..13d860c35f67 100644 --- a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValueMaxToBeBetween.py +++ b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValueMaxToBeBetween.py @@ -38,7 +38,7 @@ def _get_column_name(self) -> Column: """ return self.get_column_name( self.test_case.entityLink.root, - inspect(self.runner.table).c, + inspect(self.runner.dataset).c, ) def _run_results(self, metric: Metrics, column: Column) -> Optional[int]: diff --git a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValueMeanToBeBetween.py b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValueMeanToBeBetween.py index 80aca69912f2..5e45344b3ff4 100644 --- a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValueMeanToBeBetween.py +++ b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValueMeanToBeBetween.py @@ -39,7 +39,7 @@ def _get_column_name(self) -> Column: """ return self.get_column_name( self.test_case.entityLink.root, - inspect(self.runner.table).c, + inspect(self.runner.dataset).c, ) def _run_results(self, metric: Metrics, column: Column) -> Optional[int]: diff --git a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValueMedianToBeBetween.py b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValueMedianToBeBetween.py index a4104213470a..b473c4bda918 100644 --- a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValueMedianToBeBetween.py +++ b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValueMedianToBeBetween.py @@ -39,7 +39,7 @@ def _get_column_name(self) -> Column: """ return self.get_column_name( self.test_case.entityLink.root, - inspect(self.runner.table).c, + inspect(self.runner.dataset).c, ) def _run_results(self, metric: Metrics, column: Column) -> Optional[int]: diff --git a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValueMinToBeBetween.py b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValueMinToBeBetween.py index dd867dab6ec0..512b3bee68ea 100644 --- a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValueMinToBeBetween.py +++ b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValueMinToBeBetween.py @@ -39,7 +39,7 @@ def _get_column_name(self) -> Column: """ return self.get_column_name( self.test_case.entityLink.root, - inspect(self.runner.table).c, + inspect(self.runner.dataset).c, ) def _run_results(self, metric: Metrics, column: Column) -> Optional[int]: diff --git a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValueStdDevToBeBetween.py b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValueStdDevToBeBetween.py index 8be659d211e3..7d08f3ab8fdf 100644 --- a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValueStdDevToBeBetween.py +++ b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValueStdDevToBeBetween.py @@ -39,7 +39,7 @@ def _get_column_name(self) -> Column: """ return self.get_column_name( self.test_case.entityLink.root, - inspect(self.runner.table).c, + inspect(self.runner.dataset).c, ) def _run_results(self, metric: Metrics, column: Column) -> Optional[int]: diff --git a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesMissingCount.py b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesMissingCount.py index ebbd620dd61d..a3b06e648b63 100644 --- a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesMissingCount.py +++ b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesMissingCount.py @@ -42,7 +42,7 @@ def _get_column_name(self) -> Column: """ return self.get_column_name( self.test_case.entityLink.root, - inspect(self.runner.table).c, + inspect(self.runner.dataset).c, ) def _run_results(self, metric: Metrics, column: Column, **kwargs) -> Optional[int]: diff --git a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesSumToBeBetween.py b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesSumToBeBetween.py index 16b7f939cb1d..96ba1d14d8d7 100644 --- a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesSumToBeBetween.py +++ b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesSumToBeBetween.py @@ -39,7 +39,7 @@ def _get_column_name(self) -> Column: """ return self.get_column_name( self.test_case.entityLink.root, - inspect(self.runner.table).c, + inspect(self.runner.dataset).c, ) def _run_results(self, metric: Metrics, column: Column) -> Optional[int]: diff --git a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToBeAtExpectedLocation.py b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToBeAtExpectedLocation.py index 138c5d0c2f8b..a4e6c1ef9a3b 100644 --- a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToBeAtExpectedLocation.py +++ b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToBeAtExpectedLocation.py @@ -37,7 +37,7 @@ class ColumnValuesToBeAtExpectedLocationValidator( def _fetch_data(self, columns: List[str]) -> Iterator: """Fetch data from the runner object""" self.runner = cast(QueryRunner, self.runner) - inspection = inspect(self.runner.table) + inspection = inspect(self.runner.dataset) table_columns: List[Column] = inspection.c if inspection is not None else [] cols = [col for col in table_columns if col.name in columns] for col in cols: diff --git a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToBeBetween.py b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToBeBetween.py index af8fcc9b5fec..c1cb8ad1acc9 100644 --- a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToBeBetween.py +++ b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToBeBetween.py @@ -39,7 +39,7 @@ def _get_column_name(self) -> Column: """ return self.get_column_name( self.test_case.entityLink.root, - inspect(self.runner.table).c, + inspect(self.runner.dataset).c, ) def _run_results(self, metric: Metrics, column: Column) -> Optional[int]: diff --git a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToBeInSet.py b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToBeInSet.py index 4bccac6445a0..f920ae253059 100644 --- a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToBeInSet.py +++ b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToBeInSet.py @@ -39,7 +39,7 @@ def _get_column_name(self) -> Column: """ return self.get_column_name( self.test_case.entityLink.root, - inspect(self.runner.table).c, + inspect(self.runner.dataset).c, ) def _run_results(self, metric: Metrics, column: Column, **kwargs) -> Optional[int]: diff --git a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToBeNotInSet.py b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToBeNotInSet.py index d50e98efa9b2..012059bd72fb 100644 --- a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToBeNotInSet.py +++ b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToBeNotInSet.py @@ -39,7 +39,7 @@ def _get_column_name(self) -> Column: """ return self.get_column_name( self.test_case.entityLink.root, - inspect(self.runner.table).c, + inspect(self.runner.dataset).c, ) def _run_results(self, metric: Metrics, column: Column, **kwargs) -> Optional[int]: diff --git a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToBeNotNull.py b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToBeNotNull.py index da11812ad8c1..e425cbcb89f9 100644 --- a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToBeNotNull.py +++ b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToBeNotNull.py @@ -42,7 +42,7 @@ def _get_column_name(self) -> Column: """ return self.get_column_name( self.test_case.entityLink.root, - inspect(self.runner.table).c, + inspect(self.runner.dataset).c, ) def _run_results(self, metric: Metrics, column: Column) -> Optional[int]: diff --git a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToBeUnique.py b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToBeUnique.py index 89a93f09f90e..daf1afef4bc2 100644 --- a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToBeUnique.py +++ b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToBeUnique.py @@ -17,7 +17,6 @@ from sqlalchemy import Column, inspect from sqlalchemy.exc import SQLAlchemyError -from sqlalchemy.orm.util import AliasedClass from metadata.data_quality.validations.column.base.columnValuesToBeUnique import ( BaseColumnValuesToBeUniqueValidator, @@ -41,7 +40,7 @@ def _get_column_name(self) -> Column: """ return self.get_column_name( self.test_case.entityLink.root, - inspect(self.runner.table).c, + inspect(self.runner.dataset).c, ) def _run_results(self, metric: Metrics, column: Column) -> Optional[int]: @@ -53,12 +52,7 @@ def _run_results(self, metric: Metrics, column: Column) -> Optional[int]: """ count = Metrics.COUNT.value(column).fn() unique_count = Metrics.UNIQUE_COUNT.value(column).query( - sample=self.runner._sample # pylint: disable=protected-access - if isinstance( - self.runner._sample, # pylint: disable=protected-access - AliasedClass, - ) - else self.runner.table, + sample=self.runner.dataset, session=self.runner._session, # pylint: disable=protected-access ) # type: ignore diff --git a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToMatchRegex.py b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToMatchRegex.py index be28e57963b3..0f1c0537303c 100644 --- a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToMatchRegex.py +++ b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToMatchRegex.py @@ -43,7 +43,7 @@ def _get_column_name(self) -> Column: """ return self.get_column_name( self.test_case.entityLink.root, - inspect(self.runner.table).c, + inspect(self.runner.dataset).c, ) def _run_results( diff --git a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToNotMatchRegex.py b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToNotMatchRegex.py index f5a8c2656dcd..fda43496900b 100644 --- a/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToNotMatchRegex.py +++ b/ingestion/src/metadata/data_quality/validations/column/sqlalchemy/columnValuesToNotMatchRegex.py @@ -43,7 +43,7 @@ def _get_column_name(self) -> Column: """ return self.get_column_name( self.test_case.entityLink.root, - inspect(self.runner.table).c, + inspect(self.runner.dataset).c, ) def _run_results(self, metric: Metrics, column: Column, **kwargs) -> Optional[int]: diff --git a/ingestion/src/metadata/data_quality/validations/table/sqlalchemy/tableRowInsertedCountToBeBetween.py b/ingestion/src/metadata/data_quality/validations/table/sqlalchemy/tableRowInsertedCountToBeBetween.py index 425f49606527..30894951c3f4 100644 --- a/ingestion/src/metadata/data_quality/validations/table/sqlalchemy/tableRowInsertedCountToBeBetween.py +++ b/ingestion/src/metadata/data_quality/validations/table/sqlalchemy/tableRowInsertedCountToBeBetween.py @@ -13,7 +13,7 @@ Validator for table row inserted count to be between test case """ -from sqlalchemy import Column, text +from sqlalchemy import Column, inspect, text from metadata.data_quality.validations.mixins.sqa_validator_mixin import ( SQAValidatorMixin, @@ -52,7 +52,7 @@ def _run_results(self, column_name: str, range_type: str, range_interval: int): date_or_datetime_fn = dispatch_to_date_or_datetime( range_interval, text(range_type), - get_partition_col_type(column_name.name, self.runner.table.c), # type: ignore + get_partition_col_type(column_name.name, inspect(self.runner.dataset).c), # type: ignore ) return dict( diff --git a/ingestion/src/metadata/ingestion/source/database/bigquery/profiler/profiler.py b/ingestion/src/metadata/ingestion/source/database/bigquery/profiler/profiler.py index f85810bfd13a..2cd0f225b31d 100644 --- a/ingestion/src/metadata/ingestion/source/database/bigquery/profiler/profiler.py +++ b/ingestion/src/metadata/ingestion/source/database/bigquery/profiler/profiler.py @@ -22,7 +22,7 @@ def _compute_system_metrics( **kwargs, ) -> List[SystemProfile]: return self.system_metrics_computer.get_system_metrics( - table=runner.table, + table=runner.dataset, usage_location=self.service_connection_config.usageLocation, ) diff --git a/ingestion/src/metadata/profiler/interface/sqlalchemy/db2/profiler_interface.py b/ingestion/src/metadata/profiler/interface/sqlalchemy/db2/profiler_interface.py index 4c92aa1c8a0e..55aeca6d1ef3 100644 --- a/ingestion/src/metadata/profiler/interface/sqlalchemy/db2/profiler_interface.py +++ b/ingestion/src/metadata/profiler/interface/sqlalchemy/db2/profiler_interface.py @@ -32,7 +32,7 @@ def _programming_error_static_metric(self, runner, column, exc, session, metrics # pylint: disable=protected-access if exc.orig and "overflow" in exc.orig._message: logger.info( - f"Computing metrics without sum for {runner.table.name}.{column.name}" + f"Computing metrics without sum for {runner.table_name}.{column.name}" ) return self._compute_static_metrics_wo_sum(metrics, runner, session, column) return None diff --git a/ingestion/src/metadata/profiler/interface/sqlalchemy/mariadb/profiler_interface.py b/ingestion/src/metadata/profiler/interface/sqlalchemy/mariadb/profiler_interface.py index edca4361ef77..cb2f24f50149 100644 --- a/ingestion/src/metadata/profiler/interface/sqlalchemy/mariadb/profiler_interface.py +++ b/ingestion/src/metadata/profiler/interface/sqlalchemy/mariadb/profiler_interface.py @@ -77,11 +77,11 @@ def _compute_window_metrics( return dict(row) except ProgrammingError: logger.info( - f"Skipping window metrics for {runner.table.name}.{column.name} due to overflow" + f"Skipping window metrics for {runner.table_name}.{column.name} due to overflow" ) return None except Exception as exc: - msg = f"Error trying to compute profile for {runner.table.name}.{column.name}: {exc}" + msg = f"Error trying to compute profile for {runner.table_name}.{column.name}: {exc}" handle_query_exception(msg, exc, session) return None diff --git a/ingestion/src/metadata/profiler/interface/sqlalchemy/profiler_interface.py b/ingestion/src/metadata/profiler/interface/sqlalchemy/profiler_interface.py index cfe76f34be93..571cd247a9db 100644 --- a/ingestion/src/metadata/profiler/interface/sqlalchemy/profiler_interface.py +++ b/ingestion/src/metadata/profiler/interface/sqlalchemy/profiler_interface.py @@ -156,7 +156,7 @@ def _compute_static_metrics_wo_sum( ) return dict(row) except Exception as exc: - msg = f"Error trying to compute profile for {runner.table.name}.{column.name}: {exc}" + msg = f"Error trying to compute profile for {runner.table_name}.{column.name}: {exc}" handle_query_exception(msg, exc, session) return None @@ -194,7 +194,7 @@ def _compute_table_metrics( except Exception as exc: logger.debug(traceback.format_exc()) logger.warning( - f"Error trying to compute profile for {runner.table.name}: {exc}" # type: ignore + f"Error trying to compute profile for {runner.table_name}: {exc}" # type: ignore ) session.rollback() raise RuntimeError(exc) @@ -231,7 +231,7 @@ def _compute_static_metrics( runner, column, exc, session, metrics ) except Exception as exc: - msg = f"Error trying to compute profile for {runner.table.name}.{column.name}: {exc}" + msg = f"Error trying to compute profile for {runner.table_name}.{column.name}: {exc}" handle_query_exception(msg, exc, session) return None @@ -274,10 +274,10 @@ def _compute_query_metrics( runner._session.get_bind().dialect.name != Dialects.Druid ): - msg = f"Error trying to compute profile for {runner.table.name}.{column.name}: {exc}" + msg = f"Error trying to compute profile for {runner.table_name}.{column.name}: {exc}" handle_query_exception(msg, exc, session) except Exception as exc: - msg = f"Error trying to compute profile for {runner.table.name}.{column.name}: {exc}" + msg = f"Error trying to compute profile for {runner.table_name}.{column.name}: {exc}" handle_query_exception(msg, exc, session) return None @@ -310,10 +310,10 @@ def _compute_window_metrics( return dict(row) except ProgrammingError as exc: logger.info( - f"Skipping metrics for {runner.table.name}.{column.name} due to {exc}" + f"Skipping metrics for {runner.table_name}.{column.name} due to {exc}" ) except Exception as exc: - msg = f"Error trying to compute profile for {runner.table.name}.{column.name}: {exc}" + msg = f"Error trying to compute profile for {runner.table_name}.{column.name}: {exc}" handle_query_exception(msg, exc, session) return None @@ -347,7 +347,7 @@ def _compute_custom_metrics( ) except Exception as exc: - msg = f"Error trying to compute profile for {runner.table.name}.{metric.columnName}: {exc}" + msg = f"Error trying to compute profile for {runner.table_name}.{metric.columnName}: {exc}" logger.debug(traceback.format_exc()) logger.warning(msg) if custom_metrics: @@ -371,8 +371,8 @@ def _compute_system_metrics( Returns: dictionnary of results """ - logger.debug(f"Computing system metrics for {runner.table.name}") - return self.system_metrics_computer.get_system_metrics(table=runner.table) + logger.debug(f"Computing system metrics for {runner.table_name}") + return self.system_metrics_computer.get_system_metrics(table=runner.dataset) def _create_thread_safe_runner(self, session, column=None): """Create thread safe runner""" @@ -380,6 +380,7 @@ def _create_thread_safe_runner(self, session, column=None): thread_local.runner = QueryRunner( session=session, dataset=self.sampler.get_dataset(column=column), + raw_dataset=self.sampler.raw_dataset, partition_details=self.sampler.partition_details, profile_sample_query=self.sampler.sample_query, ) diff --git a/ingestion/src/metadata/profiler/interface/sqlalchemy/single_store/profiler_interface.py b/ingestion/src/metadata/profiler/interface/sqlalchemy/single_store/profiler_interface.py index 7c032e651b63..26e4b3174566 100644 --- a/ingestion/src/metadata/profiler/interface/sqlalchemy/single_store/profiler_interface.py +++ b/ingestion/src/metadata/profiler/interface/sqlalchemy/single_store/profiler_interface.py @@ -76,11 +76,11 @@ def _compute_window_metrics( return dict(row) except ProgrammingError: logger.info( - f"Skipping window metrics for {runner.table.name}.{column.name} due to overflow" + f"Skipping window metrics for {runner.table_name}.{column.name} due to overflow" ) return None except Exception as exc: - msg = f"Error trying to compute profile for {runner.table.name}.{column.name}: {exc}" + msg = f"Error trying to compute profile for {runner.table_name}.{column.name}: {exc}" handle_query_exception(msg, exc, session) return None diff --git a/ingestion/src/metadata/profiler/interface/sqlalchemy/snowflake/profiler_interface.py b/ingestion/src/metadata/profiler/interface/sqlalchemy/snowflake/profiler_interface.py index 464fbdd88a4e..61df2a3c7a6f 100644 --- a/ingestion/src/metadata/profiler/interface/sqlalchemy/snowflake/profiler_interface.py +++ b/ingestion/src/metadata/profiler/interface/sqlalchemy/snowflake/profiler_interface.py @@ -41,7 +41,7 @@ def _programming_error_static_metric(self, runner, column, exc, session, metrics session.bind.dialect.name ): logger.info( - f"Computing metrics without sum for {runner.table.name}.{column.name}" + f"Computing metrics without sum for {runner.table_name}.{column.name}" ) return self._compute_static_metrics_wo_sum(metrics, runner, session, column) return None diff --git a/ingestion/src/metadata/profiler/interface/sqlalchemy/stored_statistics_profiler.py b/ingestion/src/metadata/profiler/interface/sqlalchemy/stored_statistics_profiler.py index c949098d974c..c0f570eb0e32 100644 --- a/ingestion/src/metadata/profiler/interface/sqlalchemy/stored_statistics_profiler.py +++ b/ingestion/src/metadata/profiler/interface/sqlalchemy/stored_statistics_profiler.py @@ -79,8 +79,8 @@ def _compute_static_metrics( list, partition(self.is_statistic_metric, metrics), ) - schema = runner.table.schema - table_name = runner.table.name + schema = runner.schema_name + table_name = runner.table_name logger.debug( "Getting statistics for column: %s.%s.%s", schema, @@ -118,8 +118,8 @@ def _compute_table_metrics( list, partition(self.is_statistic_metric, metrics), ) - schema = runner.table.schema - table_name = runner.table.name + schema = runner.schema_name + table_name = runner.table_name logger.debug("Geting statistics for table: %s.%s", schema, table_name) result.update( super().get_table_statistics(stat_metrics, schema, table_name) diff --git a/ingestion/src/metadata/profiler/interface/sqlalchemy/trino/profiler_interface.py b/ingestion/src/metadata/profiler/interface/sqlalchemy/trino/profiler_interface.py index b7c387a748f4..a45b1bf512be 100644 --- a/ingestion/src/metadata/profiler/interface/sqlalchemy/trino/profiler_interface.py +++ b/ingestion/src/metadata/profiler/interface/sqlalchemy/trino/profiler_interface.py @@ -76,11 +76,11 @@ def _compute_window_metrics( return dict(row) except ProgrammingError as err: logger.info( - f"Skipping window metrics for {runner.table.name}.{column.name} due to {err}" + f"Skipping window metrics for {runner.table_name}.{column.name} due to {err}" ) return None except Exception as exc: - msg = f"Error trying to compute profile for {runner.table.name}.{column.name}: {exc}" + msg = f"Error trying to compute profile for {runner.table_name}.{column.name}: {exc}" handle_query_exception(msg, exc, session) return None diff --git a/ingestion/src/metadata/profiler/orm/functions/table_metric_computer.py b/ingestion/src/metadata/profiler/orm/functions/table_metric_computer.py index 5f8a69cf2904..6a6a02188228 100644 --- a/ingestion/src/metadata/profiler/orm/functions/table_metric_computer.py +++ b/ingestion/src/metadata/profiler/orm/functions/table_metric_computer.py @@ -53,7 +53,7 @@ def __init__( self._metrics = metrics self._conn_config = conn_config self._database = self._runner._session.get_bind().url.database - self._table = self._runner.table + self._table = self._runner.dataset self._entity = entity @property diff --git a/ingestion/src/metadata/profiler/processor/runner.py b/ingestion/src/metadata/profiler/processor/runner.py index 3098a26083e1..8f80108de624 100644 --- a/ingestion/src/metadata/profiler/processor/runner.py +++ b/ingestion/src/metadata/profiler/processor/runner.py @@ -44,6 +44,7 @@ def __init__( self, session: Session, dataset: Union[DeclarativeMeta, AliasedClass], + raw_dataset: Table, partition_details: Optional[Dict] = None, profile_sample_query: Optional[str] = None, ): @@ -51,11 +52,12 @@ def __init__( self._dataset = dataset self.partition_details = partition_details self.profile_sample_query = profile_sample_query + self.raw_dataset = raw_dataset @property def table(self) -> Table: """Backward compatibility table attribute access""" - return self._dataset.__table__ + return self.raw_dataset @property def _sample(self): @@ -71,6 +73,16 @@ def dataset(self): def dataset(self, dataset): self._dataset = dataset + @property + def table_name(self): + """Table name attribute access""" + return self.raw_dataset.__table__.name + + @property + def schema_name(self): + """Table name attribute access""" + return self.raw_dataset.__table__.schema + def _build_query(self, *entities, **kwargs) -> Query: """Build query object diff --git a/ingestion/src/metadata/sampler/sqlalchemy/sampler.py b/ingestion/src/metadata/sampler/sqlalchemy/sampler.py index 3cb7fd62944d..0fa6a3d68bdc 100644 --- a/ingestion/src/metadata/sampler/sqlalchemy/sampler.py +++ b/ingestion/src/metadata/sampler/sqlalchemy/sampler.py @@ -16,7 +16,7 @@ from typing import List, Optional, Union, cast from sqlalchemy import Column, inspect, text -from sqlalchemy.orm import DeclarativeMeta, Query, aliased +from sqlalchemy.orm import DeclarativeMeta, Query from sqlalchemy.orm.util import AliasedClass from sqlalchemy.schema import Table from sqlalchemy.sql.sqltypes import Enum @@ -145,13 +145,12 @@ def get_dataset(self, column=None, **__) -> Union[DeclarativeMeta, AliasedClass] and self.sample_config.profile_sample_type == ProfileSampleType.PERCENTAGE ): if self.partition_details: - return self._partitioned_table() + partitioned = self._partitioned_table() + return partitioned.cte(f"{self.raw_dataset.__tablename__}_partitioned") return self.raw_dataset - sampled = self.get_sample_query(column=column) - - return aliased(self.raw_dataset, sampled) + return self.get_sample_query(column=column) def fetch_sample_data(self, columns: Optional[List[Column]] = None) -> TableData: """ @@ -230,7 +229,7 @@ def _rdn_sample_from_user_query(self) -> Query: def _partitioned_table(self) -> Query: """Return the Query object for partitioned tables""" - return aliased(self.raw_dataset, self.get_partitioned_query().subquery()) + return self.get_partitioned_query() def get_partitioned_query(self, query=None) -> Query: """Return the partitioned query""" diff --git a/ingestion/tests/unit/profiler/sqlalchemy/test_runner.py b/ingestion/tests/unit/profiler/sqlalchemy/test_runner.py index 738d4985fe49..880cc6ed94fe 100644 --- a/ingestion/tests/unit/profiler/sqlalchemy/test_runner.py +++ b/ingestion/tests/unit/profiler/sqlalchemy/test_runner.py @@ -94,7 +94,9 @@ def setUpClass(cls) -> None: ) cls.dataset = sampler.get_dataset() - cls.raw_runner = QueryRunner(session=cls.session, dataset=cls.dataset) + cls.raw_runner = QueryRunner( + session=cls.session, dataset=cls.dataset, raw_dataset=sampler.raw_dataset + ) cls.timeout_runner: Timer = cls_timeout(1)(Timer()) # Insert 30 rows From 8fcb8ecdd71421486158cc915a543b1a2be8f920 Mon Sep 17 00:00:00 2001 From: Siddhant <86899184+Siddhanttimeline@users.noreply.github.com> Date: Thu, 28 Nov 2024 23:28:36 +0530 Subject: [PATCH 6/7] Add trigger option 'TestSuiteStatusUpdates' for TestSuite status changes (#18836) * fix: Provide trigger option as TestSuiteStatusUpdates for TestSuite source * add the playwright change to test the test suite status field in alert * localization changes --------- Co-authored-by: Aniket Katkar --- .../data/EntityObservabilityFilterDescriptor.json | 12 ++++++++++++ .../ui/playwright/utils/observabilityAlert.ts | 13 ++++++++++++- 2 files changed, 24 insertions(+), 1 deletion(-) diff --git a/openmetadata-service/src/main/resources/json/data/EntityObservabilityFilterDescriptor.json b/openmetadata-service/src/main/resources/json/data/EntityObservabilityFilterDescriptor.json index 92687fed3271..8e7c7e4a83d1 100644 --- a/openmetadata-service/src/main/resources/json/data/EntityObservabilityFilterDescriptor.json +++ b/openmetadata-service/src/main/resources/json/data/EntityObservabilityFilterDescriptor.json @@ -395,6 +395,18 @@ } ], "supportedActions" : [ + { + "name": "GetTestSuiteStatusUpdates", + "fullyQualifiedName": "eventSubscription.GetTestSuiteStatusUpdates", + "displayName": "Get Test Suite Status Updates", + "description" : "Get Status Updates Test Suite", + "effect" : "include", + "condition": "matchTestResult(${testResultList})", + "arguments": [ + "testResultList" + ], + "inputType": "runtime" + } ] } ] \ No newline at end of file diff --git a/openmetadata-ui/src/main/resources/ui/playwright/utils/observabilityAlert.ts b/openmetadata-ui/src/main/resources/ui/playwright/utils/observabilityAlert.ts index ef68c74b5a88..a93ba4a4b165 100644 --- a/openmetadata-ui/src/main/resources/ui/playwright/utils/observabilityAlert.ts +++ b/openmetadata-ui/src/main/resources/ui/playwright/utils/observabilityAlert.ts @@ -321,7 +321,18 @@ export const getObservabilityCreationDetails = ({ exclude: false, }, ], - actions: [], + actions: [ + { + name: 'Get Test Suite Status Updates', + inputs: [ + { + inputSelector: 'test-result-select', + inputValue: 'Failed', + }, + ], + exclude: false, + }, + ], destinations: [ { mode: 'external', From 3453c799f7772b072867f923ecc5e7af4c68c4d5 Mon Sep 17 00:00:00 2001 From: Siddhant <86899184+Siddhanttimeline@users.noreply.github.com> Date: Thu, 28 Nov 2024 23:59:46 +0530 Subject: [PATCH 7/7] FIX: Mask Slack App User & Bot Tokens (#18814) * fix: mask slack user and bot token. * refactor: Move the code in SlackApp --- .../openmetadata/service/resources/apps/AppResource.java | 1 - .../configuration/external/slackAppTokenConfiguration.json | 6 ++++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/resources/apps/AppResource.java b/openmetadata-service/src/main/java/org/openmetadata/service/resources/apps/AppResource.java index 53259cf8e320..21a88f23d942 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/resources/apps/AppResource.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/resources/apps/AppResource.java @@ -1135,7 +1135,6 @@ private App getApplication( // validate Bot if provided validateAndAddBot(app, createAppRequest.getBot()); - return app; } diff --git a/openmetadata-spec/src/main/resources/json/schema/entity/applications/configuration/external/slackAppTokenConfiguration.json b/openmetadata-spec/src/main/resources/json/schema/entity/applications/configuration/external/slackAppTokenConfiguration.json index 9cf8a79cb389..5d1093341f52 100644 --- a/openmetadata-spec/src/main/resources/json/schema/entity/applications/configuration/external/slackAppTokenConfiguration.json +++ b/openmetadata-spec/src/main/resources/json/schema/entity/applications/configuration/external/slackAppTokenConfiguration.json @@ -9,12 +9,14 @@ "userToken": { "title": "User Token", "description": "User Token", - "type": "string" + "type": "string", + "format": "password" }, "botToken": { "title": "Bot Token", "description": "Bot Token", - "type": "string" + "type": "string", + "format": "password" } }, "additionalProperties": false,