From 2ae73acc8d57c8687c841e669f4601a6812ad977 Mon Sep 17 00:00:00 2001 From: bharatgulati Date: Tue, 16 May 2023 12:40:48 +0530 Subject: [PATCH] e2e_CloudSQLPostGreSQL Test scenarios --- cloudsql-postgresql-plugin/pom.xml | 8 +- .../sink/DesignTime.feature | 70 ++++ .../sink/DesignTimeWithMacros.feature | 53 +++ .../sink/DesignTimeWithValidation.feature | 143 ++++++++ .../cloudsql-postgresql/sink/RunTime.feature | 146 ++++++++ .../sink/RunTimeMacro.feature | 134 +++++++ .../source/DesignTime.feature | 57 +++ .../source/DesignTimeWithMacro.feature | 54 +++ .../source/DesignTimeWithValidation.feature | 229 ++++++++++++ .../source/RunTime.feature | 237 +++++++++++++ .../source/RunTimeMacro.feature | 334 ++++++++++++++++++ .../cloudsqlpostgresql/BQValidation.java | 268 ++++++++++++++ .../CloudSqlPostgreSqlClient.java | 174 +++++++++ .../runners/sinkrunner/TestRunner.java | 39 ++ .../sinkrunner/TestRunnerRequired.java | 36 ++ .../runners/sinkrunner/package-info.java | 19 + .../runners/sourcerunner/TestRunner.java | 38 ++ .../sourcerunner/TestRunnerRequired.java | 36 ++ .../runners/sourcerunner/package-info.java | 19 + .../stepsdesign/CloudSqlPostgreSql.java | 73 ++++ .../common/stepsdesign/TestSetUpHooks.java | 165 +++++++++ .../common/stepsdesign/package-info.java | 20 ++ .../resources/errorMessage.properties | 23 ++ .../pluginDataCyAttributes.properties | 18 + .../resources/pluginParameters.properties | 103 ++++++ .../BigQuery/BigQueryCreateTableQuery.txt | 2 + .../BigQuery/BigQueryInsertDataQuery.txt | 5 + 27 files changed, 2502 insertions(+), 1 deletion(-) create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTime.feature create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTimeWithMacros.feature create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTimeWithValidation.feature create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/RunTime.feature create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/RunTimeMacro.feature create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTime.feature create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTimeWithMacro.feature create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTimeWithValidation.feature create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/RunTime.feature create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/RunTimeMacro.feature create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/BQValidation.java create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/CloudSqlPostgreSqlClient.java create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sinkrunner/TestRunner.java create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sinkrunner/TestRunnerRequired.java create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sinkrunner/package-info.java create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sourcerunner/TestRunner.java create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sourcerunner/TestRunnerRequired.java create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sourcerunner/package-info.java create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/stepsdesign/CloudSqlPostgreSql.java create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetUpHooks.java create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/package-info.java create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/resources/errorMessage.properties create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/resources/pluginDataCyAttributes.properties create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/resources/pluginParameters.properties create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/resources/testdata/BigQuery/BigQueryCreateTableQuery.txt create mode 100644 cloudsql-postgresql-plugin/src/e2e-test/resources/testdata/BigQuery/BigQueryInsertDataQuery.txt diff --git a/cloudsql-postgresql-plugin/pom.xml b/cloudsql-postgresql-plugin/pom.xml index e8736f8b1..1e0c60b9b 100644 --- a/cloudsql-postgresql-plugin/pom.xml +++ b/cloudsql-postgresql-plugin/pom.xml @@ -50,7 +50,7 @@ com.google.guava guava - 23.0 + 31.0.1-jre @@ -102,6 +102,12 @@ 42.3.1 test + + com.google.code.gson + gson + 2.8.8 + test + diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTime.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTime.feature new file mode 100644 index 000000000..2eac819b0 --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTime.feature @@ -0,0 +1,70 @@ +# +# Copyright © 2023 Cask Data, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. +# + +@Cloudsqlpostgresql_Sink @Cloudsqlpostgresql_Sink_Required +Feature: CloudSQL-PostgreSQL sink - Verify CloudSQL-PostgreSQL sink plugin design time scenarios + + Scenario: To verify CloudSQLPostgreSQL sink plugin validation with connection and basic details for connectivity + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Replace input plugin property: "dbSchemaName" with value: "schema" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + + Scenario: To verify CloudSQLPostgreSQL sink plugin validation with connection arguments + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Replace input plugin property: "dbSchemaName" with value: "schema" + Then Enter key value pairs for plugin property: "connectionArguments" with values from json: "connectionArgumentsList" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + + Scenario: To verify CloudSQLPostgreSQL sink plugin validation with advanced details with connection timeout + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Replace input plugin property: "connectionTimeout" with value: "connectionTimeout" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTimeWithMacros.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTimeWithMacros.feature new file mode 100644 index 000000000..6c68180fa --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTimeWithMacros.feature @@ -0,0 +1,53 @@ +# +# Copyright © 2023 Cask Data, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. +# + +@Cloudsqlpostgresql_Sink @Cloudsqlpostgresql_Sink_Required +Feature: CloudSQL-PostgreSQL sink- Verify CloudSQL-PostgreSQL sink plugin design time macro scenarios + + Scenario: To verify CloudSQLPostgreSQL sink plugin validation with macro enabled fields for connection section + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "cloudSQLPostGreSQLDriverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Click on the Macro button of Property: "user" and set the value to: "cloudSQLPostGreSQLUser" + Then Click on the Macro button of Property: "password" and set the value to: "cloudSQLPostGreSQLPassword" + Then Click on the Macro button of Property: "connectionArguments" and set the value to: "cloudSQLPostGreSQLConnectionArguments" + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Replace input plugin property: "dbSchemaName" with value: "schema" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + + Scenario: To verify CloudSQLPostgreSQL sink plugin validation with macro enabled fields for basic section + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Click on the Macro button of Property: "tableName" and set the value to: "cloudSQLPostGreSQLTableName" + Then Click on the Macro button of Property: "dbSchemaName" and set the value to: "cloudSQLPostGreSQLSchemaName" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTimeWithValidation.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTimeWithValidation.feature new file mode 100644 index 000000000..0eda6558c --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/DesignTimeWithValidation.feature @@ -0,0 +1,143 @@ +# +# Copyright © 2022 Cask Data, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. +# + +@Cloudsqlpostgresql_Sink @Cloudsqlpostgresql_Sink_Required +Feature: CloudSQL-PostgreSQL Sink - Verify CloudSQL-postgreSQL Sink Plugin Error scenarios + + Scenario:Verify CloudSQLPostgreSQL sink plugin validation errors for mandatory fields + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Click on the Validate button + Then Verify mandatory property error for below listed properties: + | jdbcPluginName | + | referenceName | + | database | + | tableName | + + Scenario: To verify CloudSQLPostgreSQL sink plugin validation error message with invalid reference test data + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "invalidRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Click on the Validate button + Then Verify that the Plugin Property: "referenceName" is displaying an in-line error message: "errorMessageCloudPostgreSQLInvalidReferenceName" + + Scenario: To verify CloudSQLPostgreSQL sink plugin validation error message with invalid connection name test data + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "invalidConnectionName" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Click on the Validate button + Then Verify that the Plugin Property: "connectionName" is displaying an in-line error message: "errorMessageConnectionName" + + @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST + Scenario: To verify CloudSQLPostgreSQL sink plugin validation error message with invalid database + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL PostgreSQL" and "CloudSQL PostgreSQL2" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "datatypesSchema" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL2" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Replace input plugin property: "database" with value: "invalidDatabaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Click on the Validate button + Then Verify that the Plugin is displaying an error message: "errorMessageInvalidSinkDatabase" on the header + + @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST + Scenario: To verify CloudSQLPostgreSQL sink plugin validation error message with invalid table name + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL PostgreSQL" and "CloudSQL PostgreSQL2" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "datatypesSchema" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL2" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Replace input plugin property: "tableName" with value: "invalidTable" + Then Click on the Validate button + Then Verify that the Plugin Property: "tableName" is displaying an in-line error message: "errorMessageInvalidTableName" + + Scenario: To verify CloudSQLPostgreSQL sink plugin validation error message with blank username + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Click on the Validate button + Then Verify that the Plugin Property: "user" is displaying an in-line error message: "errorMessageBlankUsername" diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/RunTime.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/RunTime.feature new file mode 100644 index 000000000..2216773cc --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/RunTime.feature @@ -0,0 +1,146 @@ +# +# Copyright © 2023 Cask Data, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. +# + +@Cloudsqlpostgresql_Sink @Cloudsqlpostgresql_Sink_Required +Feature: CloudSQL-PostgreSQL sink - Verify data transfer from BigQuery source to CloudSQL-PostgreSQL sink + + @BQ_SOURCE_TEST @CLOUDSQLPOSTGRESQL_TEST_TABLE @PLUGIN-1526 + Scenario: To verify data is getting transferred from BigQuery source to CloudSQLPostgreSQL sink successfully with supported datatypes + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "CloudSQL PostgreSQL" to establish connection + Then Navigate to the properties page of plugin: "BigQuery" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqSourceTable" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "bqOutputMultipleDatatypesSchema" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "database" with value: "databaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Replace input plugin property: "dbSchemaName" with value: "schema" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Verify the preview of pipeline is "success" + Then Click on preview data for CloudSQLPostgreSQL sink + Then Close the preview data + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Validate the values of records transferred to target CloudSQLPostGreSQL table is equal to the values from source BigQuery table + + @BQ_SOURCE_TEST @CLOUDSQLPOSTGRESQL_TEST_TABLE @PLUGIN-1526 + Scenario: To verify data is getting transferred from BigQuery source to CloudSQLPostgreSQL sink successfully when connection arguments are set + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "CloudSQL PostgreSQL" to establish connection + Then Navigate to the properties page of plugin: "BigQuery" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqSourceTable" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "bqOutputMultipleDatatypesSchema" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "database" with value: "databaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter key value pairs for plugin property: "connectionArguments" with values from json: "connectionArgumentsList" + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Replace input plugin property: "dbSchemaName" with value: "schema" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Verify the preview of pipeline is "success" + Then Click on preview data for CloudSQLPostgreSQL sink + Then Close the preview data + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Validate the values of records transferred to target CloudSQLPostGreSQL table is equal to the values from source BigQuery table + + @BQ_SOURCE_TEST @CLOUDSQLPOSTGRESQL_TEST_TABLE @PLUGIN-1526 + Scenario: To verify data is getting transferred from BigQuery source to CloudSQLPostgreSQL sink with Advanced property Connection timeout + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "CloudSQL PostgreSQL" to establish connection + Then Navigate to the properties page of plugin: "BigQuery" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqSourceTable" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "bqOutputMultipleDatatypesSchema" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "database" with value: "databaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Replace input plugin property: "dbSchemaName" with value: "schema" + Then Replace input plugin property: "connectionTimeout" with value: "connectionTimeout" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Verify the preview of pipeline is "success" + Then Click on preview data for CloudSQLPostgreSQL sink + Then Close the preview data + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Validate the values of records transferred to target CloudSQLPostGreSQL table is equal to the values from source BigQuery table diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/RunTimeMacro.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/RunTimeMacro.feature new file mode 100644 index 000000000..cf6a89cbb --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/sink/RunTimeMacro.feature @@ -0,0 +1,134 @@ +# +# Copyright © 2023 Cask Data, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. +# + +@Cloudsqlpostgresql_Sink @Cloudsqlpostgresql_Sink_Required +Feature: CloudSQL-PostgreSQL sink - Verify data transfer to PostgreSQL sink with macro arguments + + @BQ_SOURCE_TEST @CLOUDSQLPOSTGRESQL_TEST_TABLE @PLUGIN-1629 @PLUGIN-1526 + Scenario: To verify data is getting transferred from BigQuery source to CloudSQLPostgreSQL sink using macro arguments in connection section + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "CloudSQL PostgreSQL" to establish connection + Then Navigate to the properties page of plugin: "BigQuery" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Click on the Macro button of Property: "projectId" and set the value to: "bqProjectId" + Then Click on the Macro button of Property: "datasetProjectId" and set the value to: "bqDatasetProjectId" + Then Click on the Macro button of Property: "dataset" and set the value to: "bqDataset" + Then Click on the Macro button of Property: "table" and set the value to: "bqTable" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "cloudSQLPostgreSQLDriverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Click on the Macro button of Property: "user" and set the value to: "cloudSQLPostgreSQLUsername" + Then Click on the Macro button of Property: "password" and set the value to: "cloudSQLPostgreSQLPassword" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Replace input plugin property: "dbSchemaName" with value: "schema" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Enter runtime argument value "projectId" for key "bqProjectId" + Then Enter runtime argument value "projectId" for key "bqDatasetProjectId" + Then Enter runtime argument value "dataset" for key "bqDataset" + Then Enter runtime argument value "bqSourceTable" for key "bqTable" + Then Enter runtime argument value "driverName" for key "cloudSQLPostgreSQLDriverName" + Then Enter runtime argument value from environment variable "username" for key "cloudSQLPostgreSQLUsername" + Then Enter runtime argument value from environment variable "password" for key "cloudSQLPostgreSQLPassword" + Then Run the preview of pipeline with runtime arguments + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Enter runtime argument value "projectId" for key "bqProjectId" + Then Enter runtime argument value "projectId" for key "bqDatasetProjectId" + Then Enter runtime argument value "dataset" for key "bqDataset" + Then Enter runtime argument value "bqSourceTable" for key "bqTable" + Then Enter runtime argument value "driverName" for key "cloudSQLPostgreSQLDriverName" + Then Enter runtime argument value from environment variable "username" for key "cloudSQLPostgreSQLUsername" + Then Enter runtime argument value from environment variable "password" for key "cloudSQLPostgreSQLPassword" + Then Run the Pipeline in Runtime with runtime arguments + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs + Then Validate the values of records transferred to target CloudSQLPostGreSQL table is equal to the values from source BigQuery table + + @BQ_SOURCE_TEST @CLOUDSQLPOSTGRESQL_TEST_TABLE @PLUGIN-1629 @PLUGIN-1526 + Scenario: To verify data is getting transferred from BigQuery source to CloudSQLPostgreSQL sink using macro arguments in basic section + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "CloudSQL PostgreSQL" to establish connection + Then Navigate to the properties page of plugin: "BigQuery" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Click on the Macro button of Property: "projectId" and set the value to: "bqProjectId" + Then Click on the Macro button of Property: "datasetProjectId" and set the value to: "bqDatasetProjectId" + Then Click on the Macro button of Property: "dataset" and set the value to: "bqDataset" + Then Click on the Macro button of Property: "table" and set the value to: "bqTable" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Click on the Macro button of Property: "tableName" and set the value to: "cloudSQLPostgreSQLTableName" + Then Click on the Macro button of Property: "dbSchemaName" and set the value to: "cloudSQLPostgreSQLSchemaName" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Enter runtime argument value "projectId" for key "bqProjectId" + Then Enter runtime argument value "projectId" for key "bqDatasetProjectId" + Then Enter runtime argument value "dataset" for key "bqDataset" + Then Enter runtime argument value "bqSourceTable" for key "bqTable" + Then Enter runtime argument value "targetTable" for key "cloudSQLPostgreSQLTableName" + Then Enter runtime argument value "schema" for key "cloudSQLPostgreSQLSchemaName" + Then Run the preview of pipeline with runtime arguments + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Enter runtime argument value "projectId" for key "bqProjectId" + Then Enter runtime argument value "projectId" for key "bqDatasetProjectId" + Then Enter runtime argument value "dataset" for key "bqDataset" + Then Enter runtime argument value "bqSourceTable" for key "bqTable" + Then Enter runtime argument value "targetTable" for key "cloudSQLPostgreSQLTableName" + Then Enter runtime argument value "schema" for key "cloudSQLPostgreSQLSchemaName" + Then Run the Pipeline in Runtime with runtime arguments + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs + Then Validate the values of records transferred to target CloudSQLPostGreSQL table is equal to the values from source BigQuery table diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTime.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTime.feature new file mode 100644 index 000000000..fdedeaa76 --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTime.feature @@ -0,0 +1,57 @@ +# +# Copyright © 2023 Cask Data, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. +# + +@Cloudsqlpostgresql_Source @Cloudsqlpostgresql_Source_Required +Feature: CloudSQL-PostgreSQL source - Verify CloudSQLPostgreSQL source plugin design time scenarios + + @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST + Scenario: To verify CloudSQLPostgreSQL source plugin validation with connection and basic details for connectivity + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "datatypesSchema" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + + @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST + Scenario: To verify CloudSQLPostgreSQL source plugin validation with connection arguments + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter key value pairs for plugin property: "connectionArguments" with values from json: "connectionArgumentsList" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "datatypesSchema" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTimeWithMacro.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTimeWithMacro.feature new file mode 100644 index 000000000..25e7ac529 --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTimeWithMacro.feature @@ -0,0 +1,54 @@ +# +# Copyright © 2023 Cask Data, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. +# + +@Cloudsqlpostgresql_Source @Cloudsqlpostgresql_Source_Required +Feature: CloudSQL-PostgreSQL source - Verify CloudSQL-PostgreSQL source plugin design time macros scenarios + + Scenario: To verify CloudSQLPostgreSQL source plugin validation with macro enabled fields for connection section + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "cloudSQLPostGreSQLDriverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Click on the Macro button of Property: "user" and set the value to: "cloudSQLPostGreSQLUser" + Then Click on the Macro button of Property: "password" and set the value to: "cloudSQLPostGreSQLPassword" + Then Click on the Macro button of Property: "connectionArguments" and set the value to: "cloudSQLPostGreSQLConnectionArguments" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + + Scenario: To verify CloudSQLPostgreSQL source plugin validation with macro enabled fields for basic section + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Click on the Macro button of Property: "splitBy" and set the value to: "cloudSQLPostGreSQLSplitBy" + Then Click on the Macro button of Property: "fetchSize" and set the value to: "cloudSQLPostGreSQLFetchSize" + Then Click on the Macro button of Property: "boundingQuery" and set the value in textarea: "cloudSQLPostGreSQLBoundingQuery" + Then Click on the Macro button of Property: "importQuery" and set the value in textarea: "cloudSQLPostGreSQLImportQuery" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTimeWithValidation.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTimeWithValidation.feature new file mode 100644 index 000000000..907906b4a --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/DesignTimeWithValidation.feature @@ -0,0 +1,229 @@ +# +# Copyright © 2023 Cask Data, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. +# + +@Cloudsqlpostgresql_Source @Cloudsqlpostgresql_Source_Required +Feature: CloudSQL-postgreSQL source - Verify CloudSQL-postgreSQL source plugin design time validation scenarios + + Scenario:Verify CloudSQLPostgreSQL source plugin validation errors for mandatory fields + Given Open Datafusion Project to configure pipeline + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Click on the Validate button + Then Verify mandatory property error for below listed properties: + | jdbcPluginName | + | database | + | referenceName | + | importQuery | + + Scenario: To verify CloudSQLPostgreSQL source plugin validation error message with invalid reference test data + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "invalidRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Validate button + Then Verify that the Plugin Property: "referenceName" is displaying an in-line error message: "errorMessageCloudPostgreSQLInvalidReferenceName" + + Scenario: To verify CloudSQLPostgreSQL source plugin validation error message with invalid connection name test data + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "invalidConnectionName" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Validate button + Then Verify that the Plugin Property: "connectionName" is displaying an in-line error message: "errorMessageConnectionName" + + Scenario: To verify CloudSQLPostgreSQL source plugin validation error message with blank bounding query + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "invalidDatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "invalidImportQuery" + Then Replace input plugin property: "splitBy" with value: "splitBy" + Then Replace input plugin property: "numSplits" with value: "numberOfSplits" + Then Click on the Validate button + Then Verify that the Plugin Property: "boundingQuery" is displaying an in-line error message: "errorMessageBoundingQuery" + Then Verify that the Plugin Property: "numSplits" is displaying an in-line error message: "errorMessageBoundingQuery" + Then Verify that the Plugin Property: "importQuery" is displaying an in-line error message: "errorMessageInvalidImportQuery" + + Scenario: To verify CloudSQLPostgreSQL source plugin validation error message with number of splits without split by field name + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "DatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Replace input plugin property: "numSplits" with value: "numberOfSplits" + Then Click on the Validate button + Then Verify that the Plugin Property: "numSplits" is displaying an in-line error message: "errorMessageBlankSplitBy" + Then Verify that the Plugin Property: "splitBy" is displaying an in-line error message: "errorMessageBlankSplitBy" + + Scenario: To verify CloudSQLPostgreSQL source plugin validation error message when number of Split value is not a number + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "DatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Replace input plugin property: "numSplits" with value: "zeroSplits" + Then Click on the Validate button + Then Verify that the Plugin Property: "numSplits" is displaying an in-line error message: "errorMessageNumberOfSplitNotNumber" + + Scenario: To verify CloudSQLPostgreSQL source plugin validation error message when number of Split value is changed to zero + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "DatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Replace input plugin property: "numSplits" with value: "zeroValue" + Then Click on the Validate button + Then Verify that the Plugin Property: "numSplits" is displaying an in-line error message: "errorMessageInvalidNumberOfSplits" + + Scenario: To verify CloudSQLPostgreSQL source plugin validation error message when fetch size is changed to zero + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "DatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Replace input plugin property: "fetchSize" with value: "zeroValue" + Then Click on the Validate button + Then Verify that the Plugin Property: "fetchSize" is displaying an in-line error message: "errorMessageInvalidFetchSize" + + Scenario: To verify CloudSQLPostgreSQL source plugin validation error message with invalid database + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "invalidDatabase" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Validate button + Then Verify that the Plugin is displaying an error message: "errorMessageInvalidSourceDatabase" on the header + + Scenario: To verify CloudSQLPostgreSQL source plugin validation error message with invalid import query + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter textarea plugin property: "importQuery" with value: "invalidImportQuery" + Then Replace input plugin property: "numSplits" with value: "numberOfSplits" + Then Click on the Validate button + Then Verify that the Plugin Property: "importQuery" is displaying an in-line error message: "errorMessageInvalidImportQuery" + + Scenario: To verify CloudSQLPostgreSQL source plugin validation error message with blank username + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter textarea plugin property: "importQuery" with value: "invalidImportQuery" + Then Click on the Validate button + Then Verify that the Plugin Property: "user" is displaying an in-line error message: "errorMessageBlankUsername" + + Scenario: To verify CloudSQLPostgreSQL source plugin validation error message with blank password + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "invalidDatabase" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Validate button + Then Verify that the Plugin is displaying an error message: "errorMessageBlankPassword" on the header + + Scenario: To verify CloudSQLPostgreSQL source plugin validation error message with invalid password + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "invalidPassword" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "invalidDatabase" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Validate button + Then Verify that the Plugin is displaying an error message: "errorMessageInvalidPassword" on the header diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/RunTime.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/RunTime.feature new file mode 100644 index 000000000..770541b2b --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/RunTime.feature @@ -0,0 +1,237 @@ +# +# Copyright © 2023 Cask Data, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. +# + +@Cloudsqlpostgresql_Source @Cloudsqlpostgresql_Source_Required +Feature: CloudSQL-PostGreSQL Source - Run Time scenarios + + @CLOUDSQLPOSTGRESQL_SOURCE_TEST @BQ_SINK_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST @PLUGIN-1526 + Scenario: To verify data is getting transferred from CloudSQLPostgreSQL source to BigQuery sink successfully with supported datatypes + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL PostgreSQL" and "BigQuery" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "datatypesSchema" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" + Then Click plugin property: "truncateTable" + Then Click plugin property: "updateTableSchema" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs + Then Validate the values of records transferred to target Big Query table is equal to the values from source table + + @CLOUDSQLPOSTGRESQL_SOURCE_TEST @BQ_SINK_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST @PLUGIN-1526 + Scenario: To verify data is getting transferred from PostgreSQL source to BigQuery sink successfully when connection arguments are set + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL PostgreSQL" and "BigQuery" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Enter key value pairs for plugin property: "connectionArguments" with values from json: "connectionArgumentsList" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "datatypesSchema" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" + Then Click plugin property: "truncateTable" + Then Click plugin property: "updateTableSchema" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs + Then Validate the values of records transferred to target Big Query table is equal to the values from source table + + @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST @BQ_SINK_TEST + Scenario: To verify pipeline failure message in logs when an invalid bounding query is provided + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL PostgreSQL" and "BigQuery" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Replace input plugin property: "splitBy" with value: "splitBy" + Then Enter textarea plugin property: "importQuery" with value: "importQuery" + Then Click on the Get Schema button + Then Replace input plugin property: "numSplits" with value: "numberOfSplits" + Then Enter textarea plugin property: "boundingQuery" with value: "invalidBoundingQueryValue" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" + Then Click plugin property: "truncateTable" + Then Click plugin property: "updateTableSchema" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + And Save and Deploy Pipeline + And Run the Pipeline in Runtime + And Wait till pipeline is in running state + And Verify the pipeline status is "Failed" + Then Open Pipeline logs and verify Log entries having below listed Level and Message: + | Level | Message | + | ERROR | errorLogsMessageInvalidBoundingQuery | + + @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST @BQ_SINK_TEST + Scenario: To verify the pipeline fails while preview with invalid bounding query setting the split-By field + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL PostgreSQL" and "BigQuery" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Replace input plugin property: "splitBy" with value: "splitBy" + Then Enter textarea plugin property: "importQuery" with value: "importQuery" + Then Click on the Get Schema button + Then Replace input plugin property: "numSplits" with value: "numberOfSplits" + Then Enter textarea plugin property: "boundingQuery" with value: "invalidBoundingQuery" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" + Then Click plugin property: "truncateTable" + Then Click plugin property: "updateTableSchema" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Wait till pipeline preview is in running state + Then Verify the preview run status of pipeline in the logs is "failed" + + @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST + Scenario: To verify data is getting transferred from PostgreSQL to PostgreSQL successfully with supported datatypes + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL PostgreSQL" and "CloudSQL PostgreSQL2" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "datatypesSchema" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL2" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "database" with value: "databaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Replace input plugin property: "dbSchemaName" with value: "schema" + Then Validate "CloudSQL PostgreSQL2" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Verify the preview of pipeline is "success" + Then Click on preview data for CloudSQLPostgreSQL sink + Then Close the preview data + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Validate the values of records transferred to target table is equal to the values from source table diff --git a/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/RunTimeMacro.feature b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/RunTimeMacro.feature new file mode 100644 index 000000000..9729ce1ef --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/feature/cloudsql-postgresql/source/RunTimeMacro.feature @@ -0,0 +1,334 @@ +# +# Copyright © 2023 Cask Data, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. +# + +@Cloudsqlpostgresql_Source @Cloudsqlpostgresql_Source_Required +Feature: CloudSQL-PostGreSQL source - Verify CloudSQL-PostGreSQL plugin data transfer with macro arguments + + @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST + Scenario: To verify data is getting transferred from CloudSQLPostgreSQL to CloudSQLPostgreSQL successfully using macro arguments in connection section + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL PostgreSQL" and "CloudSQL PostgreSQL2" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "cloudSQLPostgreSQLDriverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Click on the Macro button of Property: "user" and set the value to: "cloudSQLPostgreSQLUsername" + Then Click on the Macro button of Property: "password" and set the value to: "cloudSQLPostgreSQLPassword" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL2" + Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "cloudSQLPostgreSQLDriverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Click on the Macro button of Property: "user" and set the value to: "cloudSQLPostgreSQLUsername" + Then Click on the Macro button of Property: "password" and set the value to: "cloudSQLPostgreSQLPassword" + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Replace input plugin property: "dbSchemaName" with value: "schema" + Then Validate "CloudSQL PostgreSQL2" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Enter runtime argument value "driverName" for key "cloudSQLPostgreSQLDriverName" + Then Enter runtime argument value from environment variable "username" for key "cloudSQLPostgreSQLUsername" + Then Enter runtime argument value from environment variable "password" for key "cloudSQLPostgreSQLPassword" + Then Run the preview of pipeline with runtime arguments + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Enter runtime argument value "driverName" for key "cloudSQLPostgreSQLDriverName" + Then Enter runtime argument value from environment variable "username" for key "cloudSQLPostgreSQLUsername" + Then Enter runtime argument value from environment variable "password" for key "cloudSQLPostgreSQLPassword" + Then Run the Pipeline in Runtime with runtime arguments + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs + Then Validate the values of records transferred to target table is equal to the values from source table + + @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST + Scenario: To verify data is getting transferred from CloudSQLPostgreSQL to CloudSQLPostgreSQL successfully using macro arguments in basic section + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL PostgreSQL" and "CloudSQL PostgreSQL2" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Click on the Macro button of Property: "splitBy" and set the value to: "cloudSQLPostgreSQLSplitByColumn" + Then Click on the Macro button of Property: "importQuery" and set the value in textarea: "cloudSQLPostgreSQLImportQuery" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL2" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "database" with value: "databaseName" + Then Click on the Macro button of Property: "tableName" and set the value to: "cloudSQLPostgreSQLTableName" + Then Click on the Macro button of Property: "dbSchemaName" and set the value to: "cloudSQLPostgreSQLSchemaName" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Validate "CloudSQL PostgreSQL2" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Enter runtime argument value "splitByColumn" for key "cloudSQLPostgreSQLSplitByColumn" + Then Enter runtime argument value "selectQuery" for key "cloudSQLPostgreSQLImportQuery" + Then Enter runtime argument value "targetTable" for key "cloudSQLPostgreSQLTableName" + Then Enter runtime argument value "schema" for key "cloudSQLPostgreSQLSchemaName" + Then Run the preview of pipeline with runtime arguments + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Enter runtime argument value "splitByColumn" for key "cloudSQLPostgreSQLSplitByColumn" + Then Enter runtime argument value "selectQuery" for key "cloudSQLPostgreSQLImportQuery" + Then Enter runtime argument value "targetTable" for key "cloudSQLPostgreSQLTableName" + Then Enter runtime argument value "schema" for key "cloudSQLPostgreSQLSchemaName" + Then Run the Pipeline in Runtime with runtime arguments + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs + Then Validate the values of records transferred to target table is equal to the values from source table + + @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST + Scenario: To verify pipeline preview fails when invalid connection details provided using macro arguments + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL PostgreSQL" and "CloudSQL PostgreSQL2" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "cloudSQLPostgreSQLDriverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Click on the Macro button of Property: "user" and set the value to: "cloudSQLPostgreSQLUsername" + Then Click on the Macro button of Property: "password" and set the value to: "cloudSQLPostgreSQLPassword" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL2" + Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "cloudSQLPostgreSQLDriverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Click on the Macro button of Property: "user" and set the value to: "cloudSQLPostgreSQLUsername" + Then Click on the Macro button of Property: "password" and set the value to: "cloudSQLPostgreSQLPassword" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Replace input plugin property: "dbSchemaName" with value: "schema" + Then Validate "CloudSQL PostgreSQL2" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Enter runtime argument value "invalidDriverName" for key "cloudSQLPostgreSQLDriverName" + Then Enter runtime argument value "invalidUserName" for key "cloudSQLPostgreSQLUsername" + Then Enter runtime argument value "invalidPassword" for key "cloudSQLPostgreSQLPassword" + Then Run the preview of pipeline with runtime arguments + Then Verify the preview of pipeline is "Failed" + + @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST + Scenario: To verify pipeline preview fails when invalid basic details provided using macro arguments + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL PostgreSQL" and "CloudSQL PostgreSQL2" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Click on the Macro button of Property: "importQuery" and set the value in textarea: "cloudSQLPostgreSQLInvalidImportQuery" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL2" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "database" with value: "databaseName" + Then Click on the Macro button of Property: "tableName" and set the value to: "cloudSQLPostgreSQLTableName" + Then Click on the Macro button of Property: "dbSchemaName" and set the value to: "cloudSQLPostgreSQLSchemaName" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Validate "CloudSQL PostgreSQL2" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Enter runtime argument value "invalidTableNameImportQuery" for key "cloudSQLPostgreSQLInvalidImportQuery" + Then Enter runtime argument value "invalidTable" for key "cloudSQLPostgreSQLTableName" + Then Enter runtime argument value "schema" for key "cloudSQLPostgreSQLSchemaName" + Then Run the preview of pipeline with runtime arguments + Then Verify the preview of pipeline is "Failed" + + @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST @BQ_SINK_TEST @PLUGIN-1526 + Scenario: To verify data is getting transferred from CloudSQLPostgreSQL source to BigQuery sink using macro arguments in connection section + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL PostgreSQL" and "BigQuery" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "cloudSQLPostgreSQLDriverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Click on the Macro button of Property: "user" and set the value to: "cloudSQLPostgreSQLUsername" + Then Click on the Macro button of Property: "password" and set the value to: "cloudSQLPostgreSQLPassword" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Click on the Macro button of Property: "projectId" and set the value to: "bqProjectId" + Then Click on the Macro button of Property: "datasetProjectId" and set the value to: "bqDatasetProjectId" + Then Click on the Macro button of Property: "dataset" and set the value to: "bqDataset" + Then Click on the Macro button of Property: "table" and set the value to: "bqTable" + Then Click on the Macro button of Property: "truncateTableMacroInput" and set the value to: "bqTruncateTable" + Then Click on the Macro button of Property: "updateTableSchemaMacroInput" and set the value to: "bqUpdateTableSchema" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Enter runtime argument value "driverName" for key "cloudSQLPostgreSQLDriverName" + Then Enter runtime argument value from environment variable "username" for key "cloudSQLPostgreSQLUsername" + Then Enter runtime argument value from environment variable "password" for key "cloudSQLPostgreSQLPassword" + Then Enter runtime argument value "projectId" for key "bqProjectId" + Then Enter runtime argument value "projectId" for key "bqDatasetProjectId" + Then Enter runtime argument value "dataset" for key "bqDataset" + Then Enter runtime argument value "bqTargetTable" for key "bqTable" + Then Enter runtime argument value "bqTruncateTable" for key "bqTruncateTable" + Then Enter runtime argument value "bqUpdateTableSchema" for key "bqUpdateTableSchema" + Then Run the preview of pipeline with runtime arguments + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Enter runtime argument value "driverName" for key "cloudSQLPostgreSQLDriverName" + Then Enter runtime argument value from environment variable "username" for key "cloudSQLPostgreSQLUsername" + Then Enter runtime argument value from environment variable "password" for key "cloudSQLPostgreSQLPassword" + Then Enter runtime argument value "projectId" for key "bqProjectId" + Then Enter runtime argument value "projectId" for key "bqDatasetProjectId" + Then Enter runtime argument value "dataset" for key "bqDataset" + Then Enter runtime argument value "bqTargetTable" for key "bqTable" + Then Enter runtime argument value "bqTruncateTable" for key "bqTruncateTable" + Then Enter runtime argument value "bqUpdateTableSchema" for key "bqUpdateTableSchema" + Then Run the Pipeline in Runtime with runtime arguments + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs + Then Validate the values of records transferred to target Big Query table is equal to the values from source table + + @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_SINK_TEST @BQ_SINK_TEST @PLUGIN-1526 + Scenario: To verify data is getting transferred from CloudSQLPostgreSQL source to BigQuery sink using macro arguments in basic section + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL PostgreSQL" and "BigQuery" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Click on the Macro button of Property: "splitBy" and set the value to: "cloudSQLPostgreSQLSplitByColumn" + Then Click on the Macro button of Property: "importQuery" and set the value in textarea: "cloudSQLPostgreSQLImportQuery" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Click on the Macro button of Property: "projectId" and set the value to: "bqProjectId" + Then Click on the Macro button of Property: "datasetProjectId" and set the value to: "bqDatasetProjectId" + Then Click on the Macro button of Property: "dataset" and set the value to: "bqDataset" + Then Click on the Macro button of Property: "table" and set the value to: "bqTable" + Then Click on the Macro button of Property: "truncateTableMacroInput" and set the value to: "bqTruncateTable" + Then Click on the Macro button of Property: "updateTableSchemaMacroInput" and set the value to: "bqUpdateTableSchema" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Enter runtime argument value "splitByColumn" for key "cloudSQLPostgreSQLSplitByColumn" + Then Enter runtime argument value "selectQuery" for key "cloudSQLPostgreSQLImportQuery" + Then Enter runtime argument value "projectId" for key "bqProjectId" + Then Enter runtime argument value "projectId" for key "bqDatasetProjectId" + Then Enter runtime argument value "dataset" for key "bqDataset" + Then Enter runtime argument value "bqTargetTable" for key "bqTable" + Then Enter runtime argument value "bqTruncateTable" for key "bqTruncateTable" + Then Enter runtime argument value "bqUpdateTableSchema" for key "bqUpdateTableSchema" + Then Run the preview of pipeline with runtime arguments + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Enter runtime argument value "splitByColumn" for key "cloudSQLPostgreSQLSplitByColumn" + Then Enter runtime argument value "selectQuery" for key "cloudSQLPostgreSQLImportQuery" + Then Enter runtime argument value "projectId" for key "bqProjectId" + Then Enter runtime argument value "projectId" for key "bqDatasetProjectId" + Then Enter runtime argument value "dataset" for key "bqDataset" + Then Enter runtime argument value "bqTargetTable" for key "bqTable" + Then Enter runtime argument value "bqTruncateTable" for key "bqTruncateTable" + Then Enter runtime argument value "bqUpdateTableSchema" for key "bqUpdateTableSchema" + Then Run the Pipeline in Runtime with runtime arguments + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs + Then Validate the values of records transferred to target Big Query table is equal to the values from source table diff --git a/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/BQValidation.java b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/BQValidation.java new file mode 100644 index 000000000..86214b9bc --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/BQValidation.java @@ -0,0 +1,268 @@ +/* + * Copyright © 2023 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +package io.cdap.plugin.cloudsqlpostgresql; + +import com.google.cloud.bigquery.TableResult; +import com.google.gson.Gson; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import io.cdap.e2e.utils.BigQueryClient; +import io.cdap.e2e.utils.PluginPropertyUtils; +import org.apache.spark.sql.types.Decimal; +import org.junit.Assert; + +import java.io.IOException; +import java.math.BigDecimal; +import java.math.RoundingMode; +import java.sql.*; +import java.time.*; +import java.util.Date; +import java.text.ParseException; +import java.time.format.DateTimeFormatter; +import java.util.*; + +/** + * BQValidation. + */ +public class BQValidation { + static List BigQueryResponse = new ArrayList<>(); + static List bigQueryRows = new ArrayList<>(); + + /** + * Extracts entire data from source and target tables. + * + * @param sourceTable table at the source side + * @param targetTable table at the sink side + * @return true if the values in source and target side are equal + */ + public static boolean validateDBToBQRecordValues(String schema, String sourceTable, String targetTable) + throws SQLException, ClassNotFoundException, IOException, InterruptedException { + getBigQueryTableData(targetTable, bigQueryRows); + for (Object rows : bigQueryRows) { + JsonObject json = new Gson().fromJson(String.valueOf(rows), JsonObject.class); + BigQueryResponse.add(json); + } + String getSourceQuery = "SELECT * FROM " + schema + "." + sourceTable; + try (Connection connect = CloudSqlPostgreSqlClient.getCloudSqlConnection()) { + connect.setHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT); + Statement statement1 = connect.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE, + ResultSet.HOLD_CURSORS_OVER_COMMIT); + + ResultSet rsSource = statement1.executeQuery(getSourceQuery); + return compareResultSetandJsonData(rsSource, BigQueryResponse); + } + } + public static boolean validateBQToDBRecordValues(String schema, String sourceTable, String targetTable) + throws SQLException, ClassNotFoundException, IOException, InterruptedException { + getBigQueryTableData(sourceTable, bigQueryRows); + for (Object rows : bigQueryRows) { + JsonObject json = new Gson().fromJson(String.valueOf(rows), JsonObject.class); + BigQueryResponse.add(json); + } + String getTargetQuery = "SELECT * FROM " + schema + "." + targetTable; + try (Connection connect = CloudSqlPostgreSqlClient.getCloudSqlConnection()) { + connect.setHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT); + Statement statement1 = connect.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE, + ResultSet.HOLD_CURSORS_OVER_COMMIT); + + ResultSet rsTarget = statement1.executeQuery(getTargetQuery); + return compareResultSetandJsonData(rsTarget, BigQueryResponse); + } + } + /** + * Retrieves the data from a specified BigQuery table and populates it into the provided list of objects. + * + * @param table The name of the BigQuery table to fetch data from. + * @param bigQueryRows The list to store the fetched BigQuery data. + */ + private static void getBigQueryTableData(String table, List bigQueryRows) + throws IOException, InterruptedException { + + String projectId = PluginPropertyUtils.pluginProp("projectId"); + String dataset = PluginPropertyUtils.pluginProp("dataset"); + String selectQuery = "SELECT TO_JSON(t) FROM `" + projectId + "." + dataset + "." + table + "` AS t"; + TableResult result = BigQueryClient.getQueryResult(selectQuery); + result.iterateAll().forEach(value -> bigQueryRows.add(value.get(0).getValue())); + } + + /** + * Compares the data in the result set obtained from the Oracle database with the provided BigQuery JSON objects. + * + * @param rsSource The result set obtained from the Oracle database. + * @param bigQueryData The list of BigQuery JSON objects to compare with the result set data. + * @return True if the result set data matches the BigQuery data, false otherwise. + * @throws SQLException If an SQL error occurs during the result set operations. + * @throws ParseException If an error occurs while parsing the data. + */ + public static boolean compareResultSetandJsonData(ResultSet rsSource, List bigQueryData) + throws SQLException { + ResultSetMetaData mdSource = rsSource.getMetaData(); + boolean result = false; + int columnCountSource = mdSource.getColumnCount(); + + if (bigQueryData == null) { + Assert.fail("bigQueryData is null"); + return result; + } + // Get the column count of the first JsonObject in bigQueryData + int jsonObjectIdx = 0; + int columnCountTarget = 0; + if (bigQueryData.size() > 0) { + columnCountTarget = bigQueryData.get(jsonObjectIdx).entrySet().size(); + } + // Compare the number of columns in the source and target + Assert.assertEquals("Number of columns in source and target are not equal", + columnCountSource, columnCountTarget); + + while (rsSource.next()) { + int currentColumnCount = 1; + while (currentColumnCount <= columnCountSource) { + String columnTypeName = mdSource.getColumnTypeName(currentColumnCount); + int columnType = mdSource.getColumnType(currentColumnCount); + String columnName = mdSource.getColumnName(currentColumnCount); + // Perform different comparisons based on column type + switch (columnType) { + case Types.BIT: + boolean bqDateString = bigQueryData.get(jsonObjectIdx).get(columnName).getAsBoolean(); + result = getBooleanValidation(rsSource, String.valueOf(bqDateString), columnName, columnTypeName); + Assert.assertTrue("Different values found for column : %s", result); + break; + + case Types.DECIMAL: + case Types.NUMERIC: + BigDecimal sourceDecimal = rsSource.getBigDecimal(currentColumnCount); + BigDecimal targetDecimal = bigQueryData.get(jsonObjectIdx).get(columnName).getAsBigDecimal(); + int desiredScale = 2; // Set the desired scale (number of decimal places) + BigDecimal adjustedSourceValue = sourceDecimal.setScale(desiredScale, RoundingMode.HALF_UP); + BigDecimal adjustedTargetValue = targetDecimal.setScale(desiredScale, RoundingMode.HALF_UP); + Decimal sourceDecimalValue = Decimal.fromDecimal(adjustedSourceValue); + Decimal targetDecimalValue = Decimal.fromDecimal(adjustedTargetValue); + Assert.assertEquals("Different values found for column : %s", sourceDecimalValue, targetDecimalValue); + break; + + case Types.REAL: + float sourceReal = rsSource.getFloat(currentColumnCount); + float targetReal = bigQueryData.get(jsonObjectIdx).get(columnName).getAsFloat(); + Assert.assertTrue(String.format("Different values found for column : %s", columnName), + Float.compare(sourceReal, targetReal) == 0); + break; + + case Types.TIMESTAMP: + break; + + case Types.TIME: + String bqTimeString = bigQueryData.get(jsonObjectIdx).get(columnName).getAsString(); + result = getTimeValidation(rsSource, bqTimeString, columnName, columnTypeName); + Assert.assertTrue("Different values found for column : %s", result); + break; + + case Types.BINARY: + case Types.VARBINARY: + String sourceB64String = new String(Base64.getEncoder().encode(rsSource.getBytes(currentColumnCount))); + String targetB64String = bigQueryData.get(jsonObjectIdx).get(columnName).getAsString(); + Assert.assertEquals("Different values found for column : %s", + sourceB64String, targetB64String); + break; + + case Types.BIGINT: + long sourceVal = rsSource.getLong(currentColumnCount); + long targetVal = bigQueryData.get(jsonObjectIdx).get(columnName).getAsLong(); + Assert.assertTrue("Different values found for column : %s", + String.valueOf(sourceVal).equals(String.valueOf(targetVal))); + break; + + case Types.SMALLINT: + case Types.TINYINT: + case Types.INTEGER: + int sourceInt = rsSource.getInt(currentColumnCount); + int targetInt = bigQueryData.get(jsonObjectIdx).get(columnName).getAsInt(); + Assert.assertTrue("Different values found for column : %s", + String.valueOf(sourceInt).equals(String.valueOf(targetInt))); + break; + + case Types.DATE: + Date dateSource = rsSource.getDate(currentColumnCount); + Date dateTarget = java.sql.Date.valueOf( + bigQueryData.get(jsonObjectIdx).get(columnName).getAsString()); + Assert.assertEquals("Different values found for column : %s", dateSource, dateTarget); + break; + + case Types.DOUBLE: + Double sourceMoney = rsSource.getDouble(currentColumnCount); + String targetMoneyStr = bigQueryData.get(jsonObjectIdx).get(columnName).getAsString(); + Double targetMoney; + // Remove non-numeric characters from the targetMoneyStr + targetMoneyStr = targetMoneyStr.replaceAll("[^0-9.]", ""); + targetMoney = new Double(targetMoneyStr); + Assert.assertTrue(String.format("Different values found for column: %s", columnName), + sourceMoney.compareTo(targetMoney) == 0); + break; + + case Types.VARCHAR: + case Types.CHAR: + case Types.SQLXML: + case Types.OTHER: + default: + String sourceValue = rsSource.getString(currentColumnCount); + JsonElement jsonElement = bigQueryData.get(jsonObjectIdx).get(columnName); + String targetValue = (jsonElement != null && !jsonElement.isJsonNull()) ? jsonElement.getAsString() : null; + Assert.assertEquals( + String.format("Different %s values found for column : %s", columnTypeName, columnName), + String.valueOf(sourceValue), String.valueOf(targetValue)); + } + currentColumnCount++; + } + jsonObjectIdx++; + } + Assert.assertFalse("Number of rows in Source table is greater than the number of rows in Target table", + rsSource.next()); + return true; + } + + private static boolean getBooleanValidation(ResultSet rsSource, String bqDateString, String columnName, + String columnTypeName) throws SQLException { + switch (columnTypeName) { + case "bit": + byte source = rsSource.getByte(columnName); + boolean sourceAsBoolean = source != 0; + return String.valueOf(sourceAsBoolean).equals(String.valueOf(bqDateString)); + case "bool": + boolean sourceValue = rsSource.getBoolean(columnName); + return String.valueOf(sourceValue).equals(String.valueOf(bqDateString)); + default: + return false; + } + } + + private static boolean getTimeValidation(ResultSet rsSource, String bqDateString, String columnName, String + columnTypeName) throws SQLException { + switch (columnTypeName) { + case "time": + Time sourceTime = rsSource.getTime(columnName); + Time targetTime = Time.valueOf(bqDateString); + return sourceTime.equals(targetTime); + case "timetz": + Time sourceT = rsSource.getTime(columnName); + LocalTime sourceLocalTime = sourceT.toLocalTime(); + OffsetTime targetOffsetTime = OffsetTime.parse(bqDateString, DateTimeFormatter.ISO_OFFSET_TIME); + LocalTime targetLocalTime = targetOffsetTime.toLocalTime(); + return String.valueOf(sourceLocalTime).equals(String.valueOf(targetLocalTime)); + + default: + return false; + } + } +} diff --git a/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/CloudSqlPostgreSqlClient.java b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/CloudSqlPostgreSqlClient.java new file mode 100644 index 000000000..69412d43c --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/CloudSqlPostgreSqlClient.java @@ -0,0 +1,174 @@ +/* + * Copyright © 2023 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package io.cdap.plugin.cloudsqlpostgresql; + +import io.cdap.e2e.utils.PluginPropertyUtils; +import org.junit.Assert; + +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.sql.Statement; +import java.sql.Timestamp; +import java.sql.Types; +import java.util.Date; +import java.util.GregorianCalendar; +import java.util.TimeZone; + +/** + * CloudSQLPostgreSQL client. + */ + +public class CloudSqlPostgreSqlClient { + public static Connection getCloudSqlConnection() throws ClassNotFoundException, SQLException { + Class.forName("org.postgresql.Driver"); + String database=PluginPropertyUtils.pluginProp("databaseName"); + String instanceConnectionName = System.getenv("CONNECTION_NAME"); + String username = System.getenv("CLOUDSQL_POSTGRESQL_USERNAME"); + String password = System.getenv("CLOUDSQL_POSTGRESQL_PASSWORD"); + + String jdbcUrl = String.format( + "jdbc:postgresql://google/%s?cloudSqlInstance=%s&socketFactory=com.google.cloud.sql.postgres.SocketFactory&user=%s&password=%s", + database,instanceConnectionName, username, password); + Connection conn = DriverManager.getConnection(jdbcUrl); + System.out.println("Connected to the database successfully"); + return conn; + } + + public static int countRecord(String table, String schema) throws SQLException, ClassNotFoundException { + String countQuery = "SELECT COUNT(*) as total FROM " + schema + "." + table; + try (Connection connect = getCloudSqlConnection(); + Statement statement = connect.createStatement(); + ResultSet rs = statement.executeQuery(countQuery)) { + int num = 0; + while (rs.next()) { + num = (rs.getInt(1)); + } + return num; + } + } + + /** + * Extracts entire data from source and target tables. + * @param sourceTable table at the source side + * @param targetTable table at the sink side + * @return true if the values in source and target side are equal + */ + public static boolean validateRecordValues(String sourceTable, String targetTable, String schema) + throws SQLException, ClassNotFoundException { + String getSourceQuery = "SELECT * FROM " + schema + "." + sourceTable; + String getTargetQuery = "SELECT * FROM " + schema + "." + targetTable; + try (Connection connect = getCloudSqlConnection()) { + connect.setHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT); + Statement statement1 = connect.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE, + ResultSet.HOLD_CURSORS_OVER_COMMIT); + Statement statement2 = connect.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE, + ResultSet.HOLD_CURSORS_OVER_COMMIT); + ResultSet rsSource = statement1.executeQuery(getSourceQuery); + ResultSet rsTarget = statement2.executeQuery(getTargetQuery); + return compareResultSetData(rsSource, rsTarget); + } + } + + /** + * Compares the result Set data in source table and sink table.. + * @param rsSource result set of the source table data + * @param rsTarget result set of the target table data + * @return true if rsSource matches rsTarget + */ + public static boolean compareResultSetData(ResultSet rsSource, ResultSet rsTarget) throws SQLException { + ResultSetMetaData mdSource = rsSource.getMetaData(); + ResultSetMetaData mdTarget = rsTarget.getMetaData(); + int columnCountSource = mdSource.getColumnCount(); + int columnCountTarget = mdTarget.getColumnCount(); + Assert.assertEquals("Number of columns in source and target are not equal", + columnCountSource, columnCountTarget); + while (rsSource.next() && rsTarget.next()) { + int currentColumnCount = 1; + while (currentColumnCount <= columnCountSource) { + String columnTypeName = mdSource.getColumnTypeName(currentColumnCount); + int columnType = mdSource.getColumnType(currentColumnCount); + String columnName = mdSource.getColumnName(currentColumnCount); + if (columnType == Types.TIMESTAMP) { + GregorianCalendar gc = new GregorianCalendar(TimeZone.getTimeZone("UTC")); + gc.setGregorianChange(new Date(Long.MIN_VALUE)); + Timestamp sourceTS = rsSource.getTimestamp(currentColumnCount, gc); + Timestamp targetTS = rsTarget.getTimestamp(currentColumnCount, gc); + Assert.assertEquals(String.format("Different values found for column : %s", columnName), sourceTS, targetTS); + } else { + String sourceString = rsSource.getString(currentColumnCount); + String targetString = rsTarget.getString(currentColumnCount); + Assert.assertEquals(String.format("Different values found for column : %s", columnName), + sourceString, targetString); + } + currentColumnCount++; + } + } + Assert.assertFalse("Number of rows in Source table is greater than the number of rows in Target table", + rsSource.next()); + Assert.assertFalse("Number of rows in Target table is greater than the number of rows in Source table", + rsTarget.next()); + return true; + } + + public static void createSourceTable(String sourceTable, String schema) throws SQLException, ClassNotFoundException { + try (Connection connect = getCloudSqlConnection(); + Statement statement = connect.createStatement()) { + String datatypesColumns = PluginPropertyUtils.pluginProp("datatypesColumns"); + String createSourceTableQuery = "CREATE TABLE " + schema + "." + sourceTable + datatypesColumns; + statement.executeUpdate(createSourceTableQuery); + System.out.println(createSourceTableQuery); + + // Insert dummy data. + String datatypesValues = PluginPropertyUtils.pluginProp("datatypesValues"); + String datatypesColumnsList = PluginPropertyUtils.pluginProp("datatypesColumnsList"); + statement.executeUpdate("INSERT INTO " + schema + "." + sourceTable + " " + datatypesColumnsList + " " + + datatypesValues); + } + } + + public static void createTargetTable(String targetTable, String schema) throws SQLException, ClassNotFoundException { + try (Connection connect = getCloudSqlConnection(); + Statement statement = connect.createStatement()) { + String datatypesColumns = PluginPropertyUtils.pluginProp("datatypesColumns"); + String createTargetTableQuery = "CREATE TABLE " + schema + "." + targetTable + " " + datatypesColumns; + statement.executeUpdate(createTargetTableQuery); + } + } + + public static void createTargetPostgresqlTable(String targetTable, String schema) throws SQLException, + ClassNotFoundException { + try (Connection connect = getCloudSqlConnection(); + Statement statement = connect.createStatement()) { + String datatypesColumns = PluginPropertyUtils.pluginProp("bigQueryDatatypesColumns"); + String createTargetTableQuery = "CREATE TABLE " + schema + "." + targetTable + " " + datatypesColumns; + statement.executeUpdate(createTargetTableQuery); + } + } + + public static void dropTables(String[] tables, String schema) throws SQLException, ClassNotFoundException { + try (Connection connect = getCloudSqlConnection(); + Statement statement = connect.createStatement()) { + for (String table : tables) { + String dropTableQuery = "Drop Table " + schema + "." + table; + statement.executeUpdate(dropTableQuery); + } + } + } +} diff --git a/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sinkrunner/TestRunner.java b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sinkrunner/TestRunner.java new file mode 100644 index 000000000..c51429a58 --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sinkrunner/TestRunner.java @@ -0,0 +1,39 @@ +/* + * Copyright © 2022 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +package io.cdap.plugin.cloudsqlpostgresql.runners.sinkrunner; + +import io.cucumber.junit.Cucumber; +import io.cucumber.junit.CucumberOptions; +import org.junit.runner.RunWith; + +/** + * Test Runner to execute CloudSQLPostgreSQL Sink plugin testcases. + */ +@RunWith(Cucumber.class) +@CucumberOptions( + features = {"src/e2e-test/features"}, + glue = {"io.cdap.plugin.cloudsqlpostgresql.stepsdesign", "stepsdesign", "io.cdap.plugin.common.stepsdesign"}, + tags = {"@Cloudsqlpostgresql_Sink and not @PLUGIN-1629 and not @PLUGIN-1526"}, + /* TODO :Enable tests once issue fixed https://cdap.atlassian.net/browse/PLUGIN-1629, + https://cdap.atlassian.net/browse/PLUGIN-1526 + */ + monochrome = true, + plugin = {"pretty", "html:target/cucumber-html-report/cloudsqlpostgresql-sink", + "json:target/cucumber-reports/cucumber-cloudsqlpostgresql-sink.json", + "junit:target/cucumber-reports/cucumber-cloudsqlpostgresql-sink.xml"} +) +public class TestRunner { +} diff --git a/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sinkrunner/TestRunnerRequired.java b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sinkrunner/TestRunnerRequired.java new file mode 100644 index 000000000..e73132982 --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sinkrunner/TestRunnerRequired.java @@ -0,0 +1,36 @@ +/* + * Copyright © 2022 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +package io.cdap.plugin.cloudsqlpostgresql.runners.sinkrunner; + +import io.cucumber.junit.Cucumber; +import io.cucumber.junit.CucumberOptions; +import org.junit.runner.RunWith; + +/** + * Test Runner to execute only required CloudSQLPostgreSQL Source plugin testcases. + */ +@RunWith(Cucumber.class) +@CucumberOptions( + features = {"src/e2e-test/features"}, + glue = {"io.cdap.plugin.cloudsqlpostgresql.stepsdesign", "stepsdesign", "io.cdap.plugin.common.stepsdesign"}, + tags = {"@Cloudsqlpostgresql_Sink_Required"}, + monochrome = true, + plugin = {"pretty", "html:target/cucumber-html-report/cloudsqlpostgresql-sink-required", + "json:target/cucumber-reports/cucumber-cloudsqlpostgresql-sink-required.json", + "junit:target/cucumber-reports/cucumber-cloudsqlpostgresql-sink-required.xml"} +) +public class TestRunnerRequired { +} diff --git a/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sinkrunner/package-info.java b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sinkrunner/package-info.java new file mode 100644 index 000000000..007d0ff52 --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sinkrunner/package-info.java @@ -0,0 +1,19 @@ +/* + * Copyright © 2022 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +/** + * Package contains the runner for the CloudSQLPostgreSQL sink plugin. + */ +package io.cdap.plugin.cloudsqlpostgresql.runners.sinkrunner; diff --git a/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sourcerunner/TestRunner.java b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sourcerunner/TestRunner.java new file mode 100644 index 000000000..c1700f1b4 --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sourcerunner/TestRunner.java @@ -0,0 +1,38 @@ +/* + * Copyright © 2022 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +package io.cdap.plugin.cloudsqlpostgresql.runners.sourcerunner; + +import io.cucumber.junit.Cucumber; +import io.cucumber.junit.CucumberOptions; +import org.junit.runner.RunWith; + +/** + * Test Runner to execute CloudSQLPostgreSQL Source plugin testcases. + */ +@RunWith(Cucumber.class) +@CucumberOptions( + features = {"src/e2e-test/features"}, + glue = {"io.cdap.plugin.cloudsqlpostgresql.stepsdesign", "stepsdesign", "io.cdap.plugin.common.stepsdesign"}, + tags = {"@Cloudsqlpostgresql_Source and not @PLUGIN-1526"}, + /* TODO :Enable tests once issue fixed https://cdap.atlassian.net/browse/PLUGIN-1526 + */ + monochrome = true, + plugin = {"pretty", "html:target/cucumber-html-report/cloudsqlpostgresql-source", + "json:target/cucumber-reports/cucumber-cloudsqlpostgresql-source.json", + "junit:target/cucumber-reports/cucumber-cloudsqlpostgresql-source.xml"} +) +public class TestRunner { +} diff --git a/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sourcerunner/TestRunnerRequired.java b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sourcerunner/TestRunnerRequired.java new file mode 100644 index 000000000..ba6c93f45 --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sourcerunner/TestRunnerRequired.java @@ -0,0 +1,36 @@ +/* + * Copyright © 2022 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +package io.cdap.plugin.cloudsqlpostgresql.runners.sourcerunner; + +import io.cucumber.junit.Cucumber; +import io.cucumber.junit.CucumberOptions; +import org.junit.runner.RunWith; + +/** + * Test Runner to execute only required CloudSQLPostgreSQL Source plugin testcases. + */ +@RunWith(Cucumber.class) +@CucumberOptions( + features = {"src/e2e-test/features"}, + glue = {"io.cdap.plugin.cloudsqlpostgresql.stepsdesign", "stepsdesign", "io.cdap.plugin.common.stepsdesign"}, + tags = {"@Cloudsqlpostgresql_Source_Required"}, + monochrome = true, + plugin = {"pretty", "html:target/cucumber-html-report/cloudsqlpostgresql-source-required", + "json:target/cucumber-reports/cucumber-cloudsqlpostgresql-source-required.json", + "junit:target/cucumber-reports/cucumber-cloudsqlpostgresql-source-required.xml"} +) +public class TestRunnerRequired { +} diff --git a/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sourcerunner/package-info.java b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sourcerunner/package-info.java new file mode 100644 index 000000000..02144ec6e --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/runners/sourcerunner/package-info.java @@ -0,0 +1,19 @@ +/* + * Copyright © 2022 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +/** + * Package contains the runner for the CloudSQLPostgreSQL source plugin. + */ +package io.cdap.plugin.cloudsqlpostgresql.runners.sourcerunner; diff --git a/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/stepsdesign/CloudSqlPostgreSql.java b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/stepsdesign/CloudSqlPostgreSql.java new file mode 100644 index 000000000..041cdb47d --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/cloudsqlpostgresql/stepsdesign/CloudSqlPostgreSql.java @@ -0,0 +1,73 @@ +package io.cdap.plugin.cloudsqlpostgresql.stepsdesign; + +import io.cdap.e2e.pages.actions.CdfPipelineRunAction; +import io.cdap.e2e.utils.BigQueryClient; +import io.cdap.e2e.utils.CdfHelper; +import io.cdap.e2e.utils.PluginPropertyUtils; +import io.cdap.plugin.cloudsqlpostgresql.BQValidation; +import io.cdap.plugin.cloudsqlpostgresql.CloudSqlPostgreSqlClient; +import io.cucumber.java.en.Then; +import org.junit.Assert; +import stepsdesign.BeforeActions; + +import java.io.IOException; +import java.sql.SQLException; +import java.text.ParseException; + +/** + * CLOUDSQLPOSTGRESQL Plugin related step design. + */ +public class CloudSqlPostgreSql implements CdfHelper { + + @Then("Click on preview data for CloudSQLPostgreSQL sink") + public void clickOnPreviewDataForCloudSQLPostgreSQLSink() { + openSinkPluginPreviewData("CloudSQLPostgreSQL"); + } + + @Then("Validate the values of records transferred to target table is equal to the values from source table") + public void validateTheValuesOfRecordsTransferredToTargetTableIsEqualToTheValuesFromSourceTable() throws + SQLException, ClassNotFoundException { + int countRecords = CloudSqlPostgreSqlClient.countRecord(PluginPropertyUtils.pluginProp("targetTable"), + PluginPropertyUtils.pluginProp("schema")); + Assert.assertEquals("Number of records transferred should be equal to records out ", + countRecords, recordOut()); + BeforeActions.scenario.write(" ******** Number of records transferred ********:" + countRecords); + boolean recordsMatched = CloudSqlPostgreSqlClient.validateRecordValues( + PluginPropertyUtils.pluginProp("sourceTable"), + PluginPropertyUtils.pluginProp("targetTable"), + PluginPropertyUtils.pluginProp("schema")); + Assert.assertTrue("Value of records transferred to the target table should be equal to the value " + + "of the records in the source table", recordsMatched); + } + + @Then("Validate the values of records transferred to target Big Query table is equal to the values from source table") + public void validateTheValuesOfRecordsTransferredToTargetBigQueryTableIsEqualToTheValuesFromSourceTable() + throws IOException, InterruptedException, IOException, SQLException, ClassNotFoundException, ParseException { + int targetBQRecordsCount = BigQueryClient.countBqQuery(PluginPropertyUtils.pluginProp("bqTargetTable")); + BeforeActions.scenario.write("No of Records Transferred to BigQuery:" + targetBQRecordsCount); + Assert.assertEquals("Out records should match with target BigQuery table records count", + CdfPipelineRunAction.getCountDisplayedOnSourcePluginAsRecordsOut(), targetBQRecordsCount); + + boolean recordsMatched = BQValidation.validateDBToBQRecordValues(PluginPropertyUtils.pluginProp("schema"), + PluginPropertyUtils.pluginProp("sourceTable"), + PluginPropertyUtils.pluginProp("bqTargetTable")); + Assert.assertTrue("Value of records transferred to the target table should be equal to the value " + + "of the records in the source table", recordsMatched); + } + + @Then("Validate the values of records transferred to target CloudSQLPostGreSQL table is equal to the values from source " + + "BigQuery table") + public void validateTheValuesOfRecordsTransferredToTargetCloudSQLPostGreSQLTableIsEqualToTheValuesFromSourceBigQueryTable() + throws IOException, InterruptedException, IOException, SQLException, ClassNotFoundException, ParseException { + int sourceBQRecordsCount = BigQueryClient.countBqQuery(PluginPropertyUtils.pluginProp("bqSourceTable")); + BeforeActions.scenario.write("No of Records from source BigQuery table:" + sourceBQRecordsCount); + Assert.assertEquals("Out records should match with target PostgreSQL table records count", + CdfPipelineRunAction.getCountDisplayedOnSourcePluginAsRecordsOut(), sourceBQRecordsCount); + + boolean recordsMatched = BQValidation.validateBQToDBRecordValues(PluginPropertyUtils.pluginProp("schema"), + PluginPropertyUtils.pluginProp("bqSourceTable"), + PluginPropertyUtils.pluginProp("targetTable")); + Assert.assertTrue("Value of records transferred to the target table should be equal to the value " + + "of the records in the source table", recordsMatched); + } +} diff --git a/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetUpHooks.java b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetUpHooks.java new file mode 100644 index 000000000..359b241dc --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetUpHooks.java @@ -0,0 +1,165 @@ +/* + * Copyright © 2023 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package io.cdap.plugin.common.stepsdesign; + +import com.google.cloud.bigquery.BigQueryException; +import io.cdap.e2e.utils.BigQueryClient; +import io.cdap.e2e.utils.PluginPropertyUtils; +import io.cdap.plugin.cloudsqlpostgresql.CloudSqlPostgreSqlClient; +import io.cucumber.java.After; +import io.cucumber.java.Before; +import org.apache.commons.lang3.RandomStringUtils; +import org.apache.commons.lang3.StringUtils; +import org.junit.Assert; +import stepsdesign.BeforeActions; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.sql.SQLException; +import java.util.NoSuchElementException; +import java.util.UUID; + +/** + * Cloudsql-postgresql test hooks. + */ + +public class TestSetUpHooks { + + @Before(order = 1) + public static void setTableName() { + String randomString = RandomStringUtils.randomAlphabetic(10).toLowerCase(); + String sourceTableName = String.format("sourcetable_%s", randomString); + String targetTableName = String.format("targettable_%s", randomString); + PluginPropertyUtils.addPluginProp("sourceTable", sourceTableName); + PluginPropertyUtils.addPluginProp("targetTable", targetTableName); + String schema = PluginPropertyUtils.pluginProp("schema"); + PluginPropertyUtils.addPluginProp("selectQuery", + String.format("select * from %s.%s", schema, sourceTableName)); + } + + @Before(order = 2, value = "@CLOUDSQLPOSTGRESQL_SOURCE_TEST") + public static void createTables() throws SQLException, ClassNotFoundException { + CloudSqlPostgreSqlClient.createSourceTable(PluginPropertyUtils.pluginProp("sourceTable"), + PluginPropertyUtils.pluginProp("schema")); + CloudSqlPostgreSqlClient.createTargetTable(PluginPropertyUtils.pluginProp("targetTable"), + PluginPropertyUtils.pluginProp("schema")); + } + + @After(order = 2, value = "@CLOUDSQLPOSTGRESQL_SINK_TEST") + public static void dropTables() throws SQLException, ClassNotFoundException { + CloudSqlPostgreSqlClient.dropTables(new String[]{PluginPropertyUtils.pluginProp("sourceTable"), + PluginPropertyUtils.pluginProp("targetTable")}, + PluginPropertyUtils.pluginProp("schema")); + } + + @Before(order = 2, value = "@CLOUDSQLPOSTGRESQL_TEST_TABLE") + public static void createPostgresqlTestTable() throws SQLException, ClassNotFoundException { + CloudSqlPostgreSqlClient.createTargetPostgresqlTable(PluginPropertyUtils.pluginProp("targetTable"), + PluginPropertyUtils.pluginProp("schema")); + } + + @After(order = 1, value = "@CLOUDSQLPOSTGRESQL_TEST_TABLE") + public static void dropTestTables() throws SQLException, ClassNotFoundException { + CloudSqlPostgreSqlClient.dropTables(new String[] {PluginPropertyUtils.pluginProp("targetTable")}, + PluginPropertyUtils.pluginProp("schema")); + } + + @Before(order = 1, value = "@BQ_SINK_TEST") + public static void setTempTargetBQTableName() { + String bqTargetTableName = "E2E_TARGET_" + UUID.randomUUID().toString().replaceAll("-", "_"); + PluginPropertyUtils.addPluginProp("bqTargetTable", bqTargetTableName); + BeforeActions.scenario.write("BQ Target table name - " + bqTargetTableName); + } + + @After(order = 1, value = "@BQ_SINK_TEST") + public static void deleteTempTargetBQTable() throws IOException, InterruptedException { + String bqTargetTableName = PluginPropertyUtils.pluginProp("bqTargetTable"); + try { + BigQueryClient.dropBqQuery(bqTargetTableName); + BeforeActions.scenario.write("BQ Target table - " + bqTargetTableName + " deleted successfully"); + PluginPropertyUtils.removePluginProp("bqTargetTable"); + } catch (BigQueryException e) { + if (e.getMessage().contains("Not found: Table")) { + BeforeActions.scenario.write("BQ Target Table " + bqTargetTableName + " does not exist"); + } else { + Assert.fail(e.getMessage()); + } + } + } + + /** + * Create BigQuery table. + */ + @Before(order = 1, value = "@BQ_SOURCE_TEST") + public static void createTempSourceBQTable() throws IOException, InterruptedException { + createSourceBQTableWithQueries(PluginPropertyUtils.pluginProp("CreateBQTableQueryFile"), + PluginPropertyUtils.pluginProp("InsertBQDataQueryFile")); + } + + @After(order = 1, value = "@BQ_SOURCE_TEST") + public static void deleteTempSourceBQTable() throws IOException, InterruptedException { + String bqSourceTable = PluginPropertyUtils.pluginProp("bqSourceTable"); + BigQueryClient.dropBqQuery(bqSourceTable); + BeforeActions.scenario.write("BQ source Table " + bqSourceTable + " deleted successfully"); + PluginPropertyUtils.removePluginProp("bqSourceTable"); + } + + private static void createSourceBQTableWithQueries(String bqCreateTableQueryFile, String bqInsertDataQueryFile) + throws IOException, InterruptedException { + String bqSourceTable = "E2E_SOURCE_" + UUID.randomUUID().toString().substring(0, 5).replaceAll("-", + "_"); + + String createTableQuery = StringUtils.EMPTY; + try { + createTableQuery = new String(Files.readAllBytes(Paths.get(TestSetUpHooks.class.getResource + ("/" + bqCreateTableQueryFile).toURI())) + , StandardCharsets.UTF_8); + createTableQuery = createTableQuery.replace("DATASET", PluginPropertyUtils.pluginProp("dataset")) + .replace("TABLE_NAME", bqSourceTable); + } catch (Exception e) { + BeforeActions.scenario.write("Exception in reading " + bqCreateTableQueryFile + " - " + e.getMessage()); + Assert.fail("Exception in BigQuery testdata prerequisite setup " + + "- error in reading create table query file " + e.getMessage()); + } + + String insertDataQuery = StringUtils.EMPTY; + try { + insertDataQuery = new String(Files.readAllBytes(Paths.get(TestSetUpHooks.class.getResource + ("/" + bqInsertDataQueryFile).toURI())) + , StandardCharsets.UTF_8); + insertDataQuery = insertDataQuery.replace("DATASET", PluginPropertyUtils.pluginProp("dataset")) + .replace("TABLE_NAME", bqSourceTable); + } catch (Exception e) { + BeforeActions.scenario.write("Exception in reading " + bqInsertDataQueryFile + " - " + e.getMessage()); + Assert.fail("Exception in BigQuery testdata prerequisite setup " + + "- error in reading insert data query file " + e.getMessage()); + } + BigQueryClient.getSoleQueryResult(createTableQuery); + try { + BigQueryClient.getSoleQueryResult(insertDataQuery); + } catch (NoSuchElementException e) { + // Insert query does not return any record. + // Iterator on TableResult values in getSoleQueryResult method throws NoSuchElementException + } + PluginPropertyUtils.addPluginProp("bqSourceTable", bqSourceTable); + BeforeActions.scenario.write("BQ Source Table " + bqSourceTable + " created successfully"); + + } + +} diff --git a/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/package-info.java b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/package-info.java new file mode 100644 index 000000000..63f8efabc --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/package-info.java @@ -0,0 +1,20 @@ +/* + * Copyright © 2023 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +/** + * Package contains the stepDesign for common features. + */ +package io.cdap.plugin.common.stepsdesign; diff --git a/cloudsql-postgresql-plugin/src/e2e-test/resources/errorMessage.properties b/cloudsql-postgresql-plugin/src/e2e-test/resources/errorMessage.properties new file mode 100644 index 000000000..3d7958094 --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/resources/errorMessage.properties @@ -0,0 +1,23 @@ +validationSuccessMessage=No errors found. +errorMessageCloudPostgreSQLInvalidReferenceName=Invalid reference name 'invalidRef&^*&&*'. Supported characters are: \ + letters, numbers, and '_', '-', '.', or '$'. +errorMessageBoundingQuery=Bounding Query must be specified if Number of Splits is not set to 1. Specify the Bounding Query. +errorMessageBlankSplitBy=Split-By Field Name must be specified if Number of Splits is not set to 1. Specify the Split-by Field Name. +errorMessageInvalidNumberOfSplits=Invalid value for Number of Splits '0'. Must be at least 1. Specify a Number of Splits no less than 1. +errorMessageNumberOfSplitNotNumber=Unable to create config for batchsource CloudSQLPostgreSQL 'numSplits' is invalid: Value of \ + field class io.cdap.plugin.db.config.AbstractDBSpecificSourceConfig.numSplits is expected to be a number. +errorMessageInvalidFetchSize=Invalid fetch size. Fetch size must be a positive integer. +errorMessageInvalidSourceDatabase=SQL error while getting query schema: FATAL: database "invalidDatabase" does not exist +errorMessageInvalidImportQuery=Import Query select must contain the string '$CONDITIONS'. if Number of Splits is not set\ + \ to 1. Include '$CONDITIONS' in the Import Query +errorMessageBlankUsername=Username is required when password is given. +errorMessageBlankPassword=SQL error while getting query schema: The server requested password-based authentication, \ + but no password was provided. +errorMessageInvalidPassword=SQL error while getting query schema: FATAL: password authentication failed for user +errorMessageInvalidSourceHost=SQL error while getting query schema: The connection attempt failed. +errorMessageInvalidTableName=Table 'table' does not exist. Ensure table '"table"' is set correctly and that the +errorMessageInvalidSinkDatabase=Exception while trying to validate schema of database table '"TARGETTABLE_ +errorLogsMessageInvalidBoundingQuery=Spark program 'phase-1' failed with error: The column index is out of range: 1, \ + number of columns: 0.. Please check the system logs for more details. +errorMessageConnectionName=Connection Name must be in the format :: to connect to \ + a public CloudSQL PostgreSQL instance. diff --git a/cloudsql-postgresql-plugin/src/e2e-test/resources/pluginDataCyAttributes.properties b/cloudsql-postgresql-plugin/src/e2e-test/resources/pluginDataCyAttributes.properties new file mode 100644 index 000000000..4f8cb9508 --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/resources/pluginDataCyAttributes.properties @@ -0,0 +1,18 @@ +jdbcDriverName=select-jdbcPluginName +jdbcDriverNameMacroInput=jdbcPluginName +username=user +password=password +database=database +referenceName=referenceName +importQuery=importQuery +boundingQuery=boundingQuery +connectionArguments=connectionArguments +truncateTable=switch-truncateTable +truncateTableMacroInput=truncateTable +updateTableSchema=switch-allowSchemaRelaxation +updateTableSchemaMacroInput=allowSchemaRelaxation +outputSchemaMacroInput=Output Schema-macro-input +projectId=project +datasetProjectId=datasetProject +dataset=dataset +table=table diff --git a/cloudsql-postgresql-plugin/src/e2e-test/resources/pluginParameters.properties b/cloudsql-postgresql-plugin/src/e2e-test/resources/pluginParameters.properties new file mode 100644 index 000000000..fb415f9a7 --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/resources/pluginParameters.properties @@ -0,0 +1,103 @@ +driverName=cloudsql-postgresql +username=CLOUDSQL_POSTGRESQL_USERNAME +password=CLOUDSQL_POSTGRESQL_PASSWORD +databaseName=test_automation_db +schema=public + +datatypesColumns=( id varchar(100) primary key, col1 bpchar, col2 bpchar(10), col3 varchar, col4 varchar(3), \ + col5 bytea, col6 int2, col7 int4, col8 int8, col10 numeric(10, 4), col11 numeric(10), col12 float4, col13 float8, \ + col14 money, col15 text, col16 name, col17 float8, col18 numeric(38), col22 timestamp, col23 timestamptz, \ + col24 time, col25 interval, col26 interval, col27 date, col28 timetz, col29 point, col30 line, col31 lseg, \ + col32 box, col33 path, col34 polygon, col35 circle, col36 cidr, col37 inet, col38 macaddr, col39 macaddr8, \ + col40 bit(2), col41 varbit(5), col42 json, col43 jsonb, col44 _pg_lsn, col45 pg_snapshot, col46 tsquery, \ + col47 tsvector, col48 txid_snapshot, col49 uuid, col50 xml, col51 int4range, col52 int8range, col53 numrange, \ + col54 tsrange, col55 tstzrange, col56 daterange, col57 pg_lsn, col58 int4, col59 int2, col60 int8, col61 real, \ + col62 smallint, col63 serial, col64 smallserial, col65 double precision, col66 bigint, col67 bigserial, col68 boolean) + +datatypesColumnsList=( id, col1, col2, col3, col4, col5, col6 , col7 , col8 , col10, col11, col12, col13, col14, \ + col15, col16, col17, col18, col22, col23, col24, col25, col26, col27, col28, col29, col30, col31, col32, col33, \ + col34, col35, col36, col37, col38, col39, col40, col41, col42, col43, col44, col45, col46, col47, col48, col49, \ + col50, col51, col52, col53, col54, col55, col56, col57, col58, col59, col60, col61, col62, col63, col64, col65,\ + col66, col67, col68 ) + +datatypesValues=VALUES ('User5', 'M', 'ABC...1234', 'B', 'ABC', decode('48656C6C6F20576F726C6421','hex'), 123, 123, \ + 123456, 123.4567, 123456789, 123.456, 123.456, 100.26, 'Hello World!', 'User 5', 123.456, 100, \ + '2023-01-01 07:30:00.000', '2023-01-01 15:30:00.000', '02:00:00', '6 mons 02:30:00'::interval, \ + '6 mons 02:30:00'::interval, '2001-01-01', '02:00:00', '(21.0,32.0)'::point, '{2.0,3.0,4.0}'::line, \ + '[(2.0,3.0),(4.0,5.0)]'::lseg, '(4.0,5.0),(2.0,3.0)'::box, '((2.0,3.0),(4.0,5.0),(6.0,7.0))'::path, \ + '((2.0,3.0),(4.0,5.0),(6.0,1.0))'::polygon, '<(4.0,5.0),2.0>'::circle, '192.168.0.0/24'::cidr, \ + '192.168.0.1/24'::inet, '08:00:2b:01:02:03'::macaddr, '08:00:2b:01:02:03:04:05'::macaddr8, '00', '11100', \ + '{"bar": "baz", "balance": 7.77, "active": false}'::json, '{"bar": "baz", "active": false, "balance": 7.77}'::jsonb, \ + '{16/B374D848}', NULL, '''fat'' & ''rat'''::tsquery, \ + '''a'' ''and'' ''ate'' ''cat'' ''fat'' ''mat'' ''on'' ''rat'' ''sat'''::tsvector, NULL, \ + 'a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11'::uuid, 'xml ''bar''', '[3,7)'::int4range, '[3,7)'::int8range, \ + '(1.0,14.0)'::numrange, '["2010-01-01 14:30:00","2010-01-01 15:30:00")'::tsrange, \ + '["2010-01-01 20:00:00+05:30","2010-01-01 21:00:00+05:30")'::tstzrange, '[1992-03-21,1994-06-26)'::daterange, \ + '16/B374D848'::pg_lsn, 2, 2, 2, '1234.5679', '600', DEFAULT, DEFAULT, '61.823765812', '2500000000000', \ + DEFAULT, false); + +datatypesSchema=[{"key":"id","value":"string"},{"key":"col1","value":"string"},{"key":"col2","value":"string"},\ + {"key":"col3","value":"string"},{"key":"col4","value":"string"},{"key":"col5","value":"bytes"},\ + {"key":"col6","value":"int"},{"key":"col7","value":"int"},{"key":"col8","value":"long"},\ + {"key":"col10","value":"decimal"},{"key":"col11","value":"decimal"},{"key":"col12","value":"float"},\ + {"key":"col13","value":"double"},{"key":"col14","value":"string"},{"key":"col15","value":"string"},\ + {"key":"col16","value":"string"},{"key":"col17","value":"double"},{"key":"col18","value":"decimal"},\ + {"key":"col22","value":"timestamp"},{"key":"col23","value":"timestamp"},{"key":"col24","value":"time"},\ + {"key":"col25","value":"string"},{"key":"col26","value":"string"},{"key":"col27","value":"date"},\ + {"key":"col28","value":"string"},{"key":"col29","value":"string"},{"key":"col30","value":"string"},\ + {"key":"col31","value":"string"},{"key":"col32","value":"string"},{"key":"col33","value":"string"},\ + {"key":"col34","value":"string"},{"key":"col35","value":"string"},{"key":"col36","value":"string"},\ + {"key":"col37","value":"string"},{"key":"col38","value":"string"},{"key":"col39","value":"string"},\ + {"key":"col40","value":"string"},{"key":"col41","value":"string"},{"key":"col42","value":"string"},\ + {"key":"col43","value":"string"},{"key":"col44","value":"string"},{"key":"col45","value":"string"},\ + {"key":"col46","value":"string"},{"key":"col47","value":"string"},{"key":"col48","value":"string"},\ + {"key":"col49","value":"string"},{"key":"col50","value":"string"},{"key":"col51","value":"string"},\ + {"key":"col52","value":"string"},{"key":"col53","value":"string"},{"key":"col54","value":"string"},\ + {"key":"col55","value":"string"},{"key":"col56","value":"string"},{"key":"col57","value":"string"},\ + {"key":"col58","value":"int"},{"key":"col59","value":"int"},{"key":"col60","value":"long"}, \ + {"key":"col61","value":"float"},{"key":"col62","value":"int"},{"key":"col63","value":"int"},\ + {"key":"col64","value":"int"},{"key":"col65","value":"double"},{"key":"col66","value":"long"},\ + {"key":"col67","value":"long"},{"key":"col68","value":"boolean"}] + +#CLOUDSQLPOSTGRESQL Invalid Properties +invalidRef=invalidRef&^*&&* +invalidDatabaseName=invalidDB +invalidImportQuery=select +invalidTableNameImportQuery=select * from abc; +invalidDriverName=abcdriver +invalidUserName=testUser +invalidPassword=testPassword +invalidBoundingQuery=SELECT MIN(id),MAX(id) FROM table +invalidBoundingQueryValue=select; +invalidTable=table +invalidConnectionName=abd3 + +#CLOUDSQLPOSTGRESQL Valid Properties +connectionArgumentsList=[{"key":"queryTimeout","value":"-1"}] +connectionTimeout=150 +numberOfSplits=2 +zeroValue=0 +splitByColumn=ID +importQuery = where $CONDITIONS +connectionName=CONNECTION_NAME + +#bq properties +projectId=cdf-athena +dataset=test_automation +bqOutputMultipleDatatypesSchema= [{"key":"col1","value":"bytes"},{"key":"col2","value":"string"},\ + {"key":"col3","value":"date"},{"key":"col4","value":"double"},{"key":"col5","value":"decimal"},\ + {"key":"col6","value":"timestamp"},{"key":"col7","value":"decimal"},{"key":"col8","value":"boolean"},\ + {"key":"col9","value":"long"},{"key":"col10","value":"time"}] + +#bq macro properties +bqTruncateTable=true +bqUpdateTableSchema=true + +#bq queries file path +CreateBQTableQueryFile=testdata/BigQuery/BigQueryCreateTableQuery.txt +InsertBQDataQueryFile=testdata/BigQuery/BigQueryInsertDataQuery.txt + +#BIGQUERY Datatypes +bigQueryDatatypesColumns=(col1 bytea, col2 varchar(100), col3 date, col4 double precision, col5 numeric(5, 2), \ + col6 timestamp, col7 numeric(5, 2), col8 boolean, col9 int8, col10 time) +bigQueryDatatypesColumnsList=(col1,col2,col3,col4,col5,col6,col7,col8,col9,col10) diff --git a/cloudsql-postgresql-plugin/src/e2e-test/resources/testdata/BigQuery/BigQueryCreateTableQuery.txt b/cloudsql-postgresql-plugin/src/e2e-test/resources/testdata/BigQuery/BigQueryCreateTableQuery.txt new file mode 100644 index 000000000..1799b4a36 --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/resources/testdata/BigQuery/BigQueryCreateTableQuery.txt @@ -0,0 +1,2 @@ +create table `DATASET.TABLE_NAME` (col1 BYTES, col2 STRING, col3 DATE, col4 FLOAT64, col5 NUMERIC, col6 TIMESTAMP, +col7 BIGNUMERIC, col8 BOOL, col9 INT, col10 TIME) diff --git a/cloudsql-postgresql-plugin/src/e2e-test/resources/testdata/BigQuery/BigQueryInsertDataQuery.txt b/cloudsql-postgresql-plugin/src/e2e-test/resources/testdata/BigQuery/BigQueryInsertDataQuery.txt new file mode 100644 index 000000000..a2829c8d5 --- /dev/null +++ b/cloudsql-postgresql-plugin/src/e2e-test/resources/testdata/BigQuery/BigQueryInsertDataQuery.txt @@ -0,0 +1,5 @@ +insert into `DATASET.TABLE_NAME` (col1, col2, col3, col4, col5, col6, col7, col8, col9, col10) values +(b'01011011','30','2021-01-28',61.823765812,500.22, +'2019-03-10 04:50:01 UTC',500.21, false,200,'21:26:00'), +(b'00011011','10','2021-01-21',51.823765812,500.22, +'2018-03-10 04:50:01 UTC',500.21, true,206,'20:26:00');