From 4ed945b347c32d0e5c5348173048e63c0cd46137 Mon Sep 17 00:00:00 2001 From: priyabhatnagar Date: Fri, 26 May 2023 13:52:41 +0530 Subject: [PATCH] bq validation --- .../CloudMySqlDesignTimeValidation.feature | 43 ++++-- .../CloudMySqlDesignTimeWithMacro.feature | 23 +++- .../features/sink/CloudMySqlRunTime.feature | 97 ++++++++++++- .../sink/CloudMySqlRunTimeMacro.feature | 65 ++++++++- .../source/CloudMySqlDesignTime.feature | 1 + .../CloudMySqlDesignTimeVaidation.feature | 20 +-- .../CloudMySqlDesignTimeWithMacro.feature | 13 +- .../features/source/CloudMySqlRunTime.feature | 42 +++--- .../source/CloudMySqlRunTimeMacro.feature | 30 ++-- .../cdap/plugin/CloudMySql/BQValidation.java | 129 ++++++++++++++---- .../plugin/CloudMySql/runners/TestRunner.java | 7 +- .../CloudMySql/runners/package-info.java | 21 +++ .../CloudMySql/stepsdesign/CloudMySql.java | 5 - .../CloudMySql/stepsdesign/CloudMysql.java | 66 +++++++++ .../CloudMySql/stepsdesign/package-info.java | 21 +++ .../java/io/cdap/plugin/CloudMySqlClient.java | 44 +++--- .../common/stepsdesign/TestSetupHooks.java | 19 +-- .../BigQuery/BigQueryCreateTableQuery.txt | 4 +- .../BigQuery/BigQueryInsertDataQuery.txt | 6 +- .../resources/errorMessage.properties | 5 + .../resources/pluginParameters.properties | 25 ++-- .../oracle/OracleSourceSchemaReader.java | 3 + 22 files changed, 528 insertions(+), 161 deletions(-) create mode 100644 cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/runners/package-info.java delete mode 100644 cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/stepsdesign/CloudMySql.java create mode 100644 cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/stepsdesign/CloudMysql.java create mode 100644 cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/stepsdesign/package-info.java diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlDesignTimeValidation.feature b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlDesignTimeValidation.feature index ea6f744e2..7039062d4 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlDesignTimeValidation.feature +++ b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlDesignTimeValidation.feature @@ -1,4 +1,19 @@ -Feature: CloudMySql sink- Verify ,Mysql sink plugin design time validation scenarios +# Copyright © 2023 Cask Data, Inc. +## +## Licensed under the Apache License, Version 2.0 (the "License"); you may not +## use this file except in compliance with the License. You may obtain a copy of +## the License at +## +## http://www.apache.org/licenses/LICENSE-2.0 +## +## Unless required by applicable law or agreed to in writing, software +## distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +## WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +## License for the specific language governing permissions and limitations under +# the License.. + +@CloudMySql +Feature: CloudMySql sink- Verify CloudsqlMysql sink plugin design time validation scenarios Scenario: To verify CloudMySql sink plugin validation error message with invalid database Given Open Datafusion Project to configure pipeline @@ -10,15 +25,15 @@ Feature: CloudMySql sink- Verify ,Mysql sink plugin design time validation scena Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" Then Enter input plugin property: "database" with value: "TestDatabase" Then Enter textarea plugin property: "importQuery" with value: "insertQuery" -# Then Click on the Get Schema button -# Then Verify the Output Schema matches the Expected Schema: "outputSchema" -# Then Validate "CloudSQL MySQL" plugin properties + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "outputSchema" + Then Validate "CloudSQL MySQL" plugin properties Then Close the Plugin Properties page Then Navigate to the properties page of plugin: "CloudSQL MySQL2" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" @@ -30,7 +45,7 @@ Feature: CloudMySql sink- Verify ,Mysql sink plugin design time validation scena Then Enter input plugin property: "database" with value: "invalidDatabaseName" Then Enter input plugin property: "tableName" with value: "mytable" Then Click on the Validate button -# Then Verify that the Plugin is displaying an error message: "errorMessageInvalidSinkDatabase" on the header + Then Verify that the Plugin is displaying an error message: "errorMessageInvalidSinkDatabase" on the header Scenario: To verify CloudMySql sink plugin validation error message with invalid tablename Given Open Datafusion Project to configure pipeline @@ -42,26 +57,26 @@ Feature: CloudMySql sink- Verify ,Mysql sink plugin design time validation scena Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" Then Enter input plugin property: "database" with value: "TestDatabase" Then Enter textarea plugin property: "importQuery" with value: "insertQuery" -# Then Click on the Get Schema button -# Then Verify the Output Schema matches the Expected Schema: "outputSchema" -# Then Validate "CloudSQL MySQL" plugin properties + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "outputSchema" + Then Validate "CloudSQL MySQL" plugin properties Then Close the Plugin Properties page Then Navigate to the properties page of plugin: "CloudSQL MySQL2" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "invalidRefName" Then Enter input plugin property: "database" with value: "TestDatabase" Then Enter input plugin property: "tableName" with value: "Invalidtable" Then Click on the Validate button -# Then Verify that the Plugin Property: "table" is displaying an in-line error message: "errorMessageInvalidTableName" + Then Verify that the Plugin Property: "table" is displaying an in-line error message: "errorMessageInvalidTableName" Scenario: To verify CloudMySql sink plugin validation error message with invalid reference Name Given Open Datafusion Project to configure pipeline @@ -70,7 +85,7 @@ Feature: CloudMySql sink- Verify ,Mysql sink plugin design time validation scena Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "invalidRef" @@ -87,7 +102,7 @@ Feature: CloudMySql sink- Verify ,Mysql sink plugin design time validation scena Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter key value pairs for plugin property: "connectionArguments" with values from json: "connectionArgumentsList" Then Enter input plugin property: "referenceName" with value: "RefName" diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlDesignTimeWithMacro.feature b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlDesignTimeWithMacro.feature index 9ffda6fc4..a2de617c0 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlDesignTimeWithMacro.feature +++ b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlDesignTimeWithMacro.feature @@ -1,3 +1,18 @@ +# Copyright © 2023 Cask Data, Inc. +## +## Licensed under the Apache License, Version 2.0 (the "License"); you may not +## use this file except in compliance with the License. You may obtain a copy of +## the License at +## +## http://www.apache.org/licenses/LICENSE-2.0 +## +## Unless required by applicable law or agreed to in writing, software +## distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +## WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +## License for the specific language governing permissions and limitations under +# the License.. + +@CloudMySql Feature: CloudMySql sink- Verify CloudMySql sink plugin design time macro scenarios Scenario: To verify CloudMySql sink plugin validation with macro enabled fields for connection section @@ -6,7 +21,7 @@ Feature: CloudMySql sink- Verify CloudMySql sink plugin design time macro scenar When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "cloudsql-mysql" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Select radio button plugin property: "instanceType" with value: "public" Then Click on the Macro button of Property: "user" and set the value to: "username" Then Click on the Macro button of Property: "password" and set the value to: "password" @@ -14,7 +29,7 @@ Feature: CloudMySql sink- Verify CloudMySql sink plugin design time macro scenar Then Enter input plugin property: "referenceName" with value: "sourceRef" Then Replace input plugin property: "database" with value: "TestDatabase" Then Click on the Validate button -# Then Validate "CloudSQL MySQL" plugin properties + Then Validate "CloudSQL MySQL" plugin properties Then Close the Plugin Properties page Scenario: To verify cloudsql sink plugin validation with macro enabled fields for basic section @@ -23,7 +38,7 @@ Feature: CloudMySql sink- Verify CloudMySql sink plugin design time macro scenar When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Select radio button plugin property: "instanceType" with value: "public" Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields @@ -31,5 +46,5 @@ Feature: CloudMySql sink- Verify CloudMySql sink plugin design time macro scenar Then Replace input plugin property: "database" with value: "TestDatabase" Then Click on the Macro button of Property: "tableName" and set the value to: "mytable" Then Click on the Validate button -# Then Validate "CloudSQL MySQL" plugin properties + Then Validate "CloudSQL MySQL" plugin properties Then Close the Plugin Properties page \ No newline at end of file diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlRunTime.feature b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlRunTime.feature index fb6cc5374..d63105bd8 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlRunTime.feature +++ b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlRunTime.feature @@ -15,7 +15,7 @@ @CloudMySql Feature: CloudMySql Sink - Run time scenarios - @BQ_SOURCE_TEST @CLOUDMYSQL_TEST_TABLE + @BQ_SOURCE_TEST @CLOUDMYSQL_TEST_TABLE @PLUGIN-1526 Scenario: To verify data is getting transferred from BigQuery source to CloudMySql sink successfully Given Open Datafusion Project to configure pipeline When Expand Plugin group in the LHS plugins list: "Source" @@ -24,7 +24,7 @@ Feature: CloudMySql Sink - Run time scenarios And Enter input plugin property: "referenceName" with value: "Reference" And Replace input plugin property: "project" with value: "projectId" And Enter input plugin property: "datasetProject" with value: "datasetprojectId" - And Enter input plugin property: "dataset" with value: "dataset" + And Replace input plugin property: "dataset" with value: "dataset" And Enter input plugin property: "table" with value: "bqSourceTable" Then Click on the Get Schema button Then Verify the Output Schema matches the Expected Schema: "bqOutputMultipleDatatypesSchema" @@ -36,7 +36,7 @@ Feature: CloudMySql Sink - Run time scenarios Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" @@ -57,7 +57,98 @@ Feature: CloudMySql Sink - Run time scenarios Then Open and capture logs Then Verify the pipeline status is "Succeeded" Then Close the pipeline logs + Then Validate the values of records transferred to target CloudSQLMySql table is equal to the values from source BigQuery table + @BQ_SOURCE_TEST @CLOUDMYSQL_TEST_TABLE @PLUGIN-1526 + Scenario: To verify data is getting transferred from BigQuery source to CloudMySql sink successfully when connection arguments are set + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + And Navigate to the properties page of plugin: "BigQuery" + And Enter input plugin property: "referenceName" with value: "Reference" + And Replace input plugin property: "project" with value: "projectId" + And Enter input plugin property: "datasetProject" with value: "datasetprojectId" + And Replace input plugin property: "dataset" with value: "dataset" + And Enter input plugin property: "table" with value: "bqSourceTable" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "bqOutputMultipleDatatypesSchema" + Then Validate "BigQuery" plugin properties + And Close the Plugin Properties page + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "CloudSQL MySQL" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter key value pairs for plugin property: "connectionArguments" with values from json: "connectionArgumentsList" + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "DatabaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Validate "CloudSQL MySQL" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs + Then Validate the values of records transferred to target CloudSQLMySql table is equal to the values from source BigQuery table + + + @BQ_SOURCE_TEST @CLOUDMYSQL_TEST_TABLE @PLUGIN-1526 + Scenario: To verify data is getting transferred from BigQuery source to CloudSMySQL sink with Advanced property Connection timeout + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + And Navigate to the properties page of plugin: "BigQuery" + And Enter input plugin property: "referenceName" with value: "Reference" + And Replace input plugin property: "project" with value: "projectId" + And Enter input plugin property: "datasetProject" with value: "datasetprojectId" + And Replace input plugin property: "dataset" with value: "dataset" + And Enter input plugin property: "table" with value: "bqSourceTable" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "bqOutputMultipleDatatypesSchema" + Then Validate "BigQuery" plugin properties + And Close the Plugin Properties page + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "CloudSQL MySQL" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "DatabaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Replace input plugin property: "connectionTimeout" with value: "connectionTimeout" + Then Validate "CloudSQL MySQL" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs + Then Validate the values of records transferred to target CloudSQLMySql table is equal to the values from source BigQuery table diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlRunTimeMacro.feature b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlRunTimeMacro.feature index 97d537c70..be729b396 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlRunTimeMacro.feature +++ b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlRunTimeMacro.feature @@ -15,7 +15,7 @@ Feature: CloudMySql Sink - Run time scenarios (macro) - @BQ_SOURCE_TEST @CLOUDMYSQL_TEST_TABLE + @BQ_SOURCE_TEST @CLOUDMYSQL_TEST_TABLE @PLUGIN-1526 Scenario: To verify data is getting transferred from BigQuery source to CloudMySql sink using macro arguments in connection section Given Open Datafusion Project to configure pipeline When Expand Plugin group in the LHS plugins list: "Source" @@ -34,7 +34,7 @@ Feature: CloudMySql Sink - Run time scenarios (macro) Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "driverName" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Click on the Macro button of Property: "user" and set the value to: "Username" Then Click on the Macro button of Property: "password" and set the value to: "Password" Then Enter input plugin property: "referenceName" with value: "RefName" @@ -63,8 +63,9 @@ Feature: CloudMySql Sink - Run time scenarios (macro) Then Open and capture logs Then Verify the pipeline status is "Succeeded" Then Close the pipeline logs + Then Validate the values of records transferred to target CloudSQLMySql table is equal to the values from source BigQuery table - @BQ_SOURCE_TEST @CLOUDMYSQL_TEST_TABLE + @BQ_SOURCE_TEST @CLOUDMYSQL_TEST_TABLE @PLUGIN-1526 Scenario: To verify data is getting transferred from BigQuery source to CloudMySql sink using macro arguments in basic section Given Open Datafusion Project to configure pipeline When Expand Plugin group in the LHS plugins list: "Source" @@ -82,7 +83,7 @@ Feature: CloudMySql Sink - Run time scenarios (macro) Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" @@ -107,6 +108,7 @@ Feature: CloudMySql Sink - Run time scenarios (macro) Then Open and capture logs Then Verify the pipeline status is "Succeeded" Then Close the pipeline logs + Then Validate the values of records transferred to target CloudSQLMySql table is equal to the values from source BigQuery table @BQ_SOURCE_TEST @CLOUDMYSQL_TEST_TABLE Scenario: Verify pipeline failure message in logs when user provides invalid Table Name of plugin with Macros @@ -126,7 +128,7 @@ Feature: CloudMySql Sink - Run time scenarios (macro) Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" @@ -142,6 +144,9 @@ Feature: CloudMySql Sink - Run time scenarios (macro) Then Wait till pipeline is in running state Then Open and capture logs And Verify the pipeline status is "Failed" + Then Open Pipeline logs and verify Log entries having below listed Level and Message: + | Level | Message | + | ERROR | errorLogsMessageInvalidTableName | @BQ_SOURCE_TEST @CLOUDMYSQL_TEST_TABLE Scenario: Verify pipeline failure message in logs when user provides invalid credentials of plugin with Macros @@ -162,7 +167,7 @@ Feature: CloudMySql Sink - Run time scenarios (macro) Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Click on the Macro button of Property: "user" and set the value to: "Username" Then Click on the Macro button of Property: "password" and set the value to: "Password" Then Enter input plugin property: "referenceName" with value: "RefName" @@ -179,4 +184,52 @@ Feature: CloudMySql Sink - Run time scenarios (macro) Then Wait till pipeline is in running state Then Open and capture logs And Verify the pipeline status is "Failed" + Then Open Pipeline logs and verify Log entries having below listed Level and Message: + | Level | Message | + | ERROR | errorLogsMessageInvalidCredentials | + @BQ_SOURCE_TEST @CLOUDMYSQL_TEST_TABLE @PLUGIN-1526 + Scenario: To verify data is getting transferred from BigQuery source to CloudMySql sink using macro arguments in advance section + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "CloudSQL MySQL" to establish connection + Then Navigate to the properties page of plugin: "BigQuery" + And Replace input plugin property: "project" with value: "projectId" + And Enter input plugin property: "datasetProject" with value: "datasetprojectId" + And Enter input plugin property: "dataset" with value: "dataset" + And Enter input plugin property: "table" with value: "bqSourceTable" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "DatabaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Click on the Macro button of Property: "connectionTimeout" and set the value to: "ConnectionTimeout" + Then Validate "CloudSQL MySQL2" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Enter runtime argument value "connectionTimeout" for key "ConnectionTimeout" + Then Run the preview of pipeline with runtime arguments + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Enter runtime argument value "connectionTimeout" for key "ConnectionTimeout" + Then Run the Pipeline in Runtime with runtime arguments + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs + Then Validate the values of records transferred to target CloudSQLMySql table is equal to the values from source BigQuery table diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTime.feature b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTime.feature index 6041602f2..0822fc381 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTime.feature +++ b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTime.feature @@ -23,6 +23,7 @@ Feature: CloudMySql source- Verify CloudMySql source plugin design time scenario Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Select radio button plugin property: "instanceType" with value: "public" Then Enter input plugin property: "connectionName" with value: "ConnectionName" Then Enter input plugin property: "referenceName" with value: "RefName" diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTimeVaidation.feature b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTimeVaidation.feature index e1494327b..d21495919 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTimeVaidation.feature +++ b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTimeVaidation.feature @@ -21,7 +21,7 @@ Feature: CloudMySql source- Verify CloudMySql source plugin design time validati Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" @@ -37,7 +37,7 @@ Feature: CloudMySql source- Verify CloudMySql source plugin design time validati Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "sourceRef" @@ -54,7 +54,7 @@ Feature: CloudMySql source- Verify CloudMySql source plugin design time validati Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "invalidRef" @@ -70,7 +70,7 @@ Feature: CloudMySql source- Verify CloudMySql source plugin design time validati Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "sourceRef" Then Replace input plugin property: "database" with value: "DatabaseName" @@ -85,7 +85,7 @@ Feature: CloudMySql source- Verify CloudMySql source plugin design time validati Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "sourceRef" Then Replace input plugin property: "database" with value: "DatabaseName" @@ -100,7 +100,7 @@ Feature: CloudMySql source- Verify CloudMySql source plugin design time validati Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "sourceRef" @@ -117,7 +117,7 @@ Feature: CloudMySql source- Verify CloudMySql source plugin design time validati Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "sourceRef" @@ -135,7 +135,7 @@ Feature: CloudMySql source- Verify CloudMySql source plugin design time validati Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "sourceRef" @@ -152,7 +152,7 @@ Feature: CloudMySql source- Verify CloudMySql source plugin design time validati Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "sourceRef" @@ -169,7 +169,7 @@ Feature: CloudMySql source- Verify CloudMySql source plugin design time validati Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "sourceRef" diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTimeWithMacro.feature b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTimeWithMacro.feature index 216a14c28..a24e45227 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTimeWithMacro.feature +++ b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTimeWithMacro.feature @@ -15,38 +15,37 @@ @CloudMySql Feature: CloudMySql source- Verify CloudMySql source plugin design time macro scenarios + @CLOUDMYSQL_SOURCE_TEST Scenario: To verify CloudMySql source plugin validation with macro enabled fields for connection section Given Open Datafusion Project to configure pipeline When Expand Plugin group in the LHS plugins list: "Source" When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "DriverName" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Select radio button plugin property: "instanceType" with value: "public" Then Click on the Macro button of Property: "user" and set the value to: "username" Then Click on the Macro button of Property: "password" and set the value to: "password" Then Click on the Macro button of Property: "connectionArguments" and set the value to: "connectionArgumentsList" Then Enter input plugin property: "referenceName" with value: "sourceRef" Then Replace input plugin property: "database" with value: "DatabaseName" - Then Enter textarea plugin property: "importQuery" with value: "insertQuery" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" Then Click on the Validate button Then Close the Plugin Properties page + @CLOUDMYSQL_SOURCE_TEST Scenario: To verify cloudsql source plugin validation with macro enabled fields for basic section Given Open Datafusion Project to configure pipeline When Expand Plugin group in the LHS plugins list: "Source" When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Select radio button plugin property: "instanceType" with value: "public" Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "sourceRef" Then Replace input plugin property: "database" with value: "DatabaseName" - Then Click on the Macro button of Property: "importQuery" and set the value in textarea: "CloudMySqlImportQuery" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" Then Click on the Validate button Then Close the Plugin Properties page - - - diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTime.feature b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTime.feature index ad700116c..5700f2900 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTime.feature +++ b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTime.feature @@ -17,7 +17,7 @@ Feature: CloudMySql Source - Run time scenarios Feature: CloudMySql - Verify data transfer from CloudMySql source to BigQuery sink - @CLOUDMYSQL_SOURCE_TEST + @CLOUDMYSQL_SOURCE_TEST @BQ_SINK_TEST Scenario: To verify data is getting transferred from CloudMySql source to BigQuery sink successfully Given Open Datafusion Project to configure pipeline When Expand Plugin group in the LHS plugins list: "Source" @@ -28,11 +28,9 @@ Feature: CloudMySql - Verify data transfer from CloudMySql source to BigQuery si Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Enter input plugin property: "user" with value: "username" Then Enter input plugin property: "password" with value: "password" -# Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields -# Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" Then Enter input plugin property: "database" with value: "DatabaseName" Then Enter textarea plugin property: "importQuery" with value: "selectQuery" @@ -50,10 +48,7 @@ Feature: CloudMySql - Verify data transfer from CloudMySql source to BigQuery si Then Close the Plugin Properties page Then Save the pipeline Then Preview and run the pipeline - Then Wait till pipeline preview is in running state - Then Open and capture pipeline preview logs - Then Verify the preview run status of pipeline in the logs is "succeeded" - Then Close the pipeline logs + Then Verify the preview of pipeline is "success" Then Close the preview Then Deploy the pipeline Then Run the Pipeline in Runtime @@ -61,8 +56,7 @@ Feature: CloudMySql - Verify data transfer from CloudMySql source to BigQuery si Then Open and capture logs Then Verify the pipeline status is "Succeeded" Then Close the pipeline logs - #Then Validate the values of records transferred to target Big Query table is equal to the values from source table - + Then Validate the values of records transferred to target Big Query table is equal to the values from source table @CLOUDMYSQL_SOURCE_DATATYPES_TEST @BQ_SINK_TEST Scenario: To verify data is getting transferred from CloudMySql source to BigQuery sink successfully with all datatypes Given Open Datafusion Project to configure pipeline @@ -74,7 +68,7 @@ Feature: CloudMySql - Verify data transfer from CloudMySql source to BigQuery si Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" @@ -104,9 +98,10 @@ Feature: CloudMySql - Verify data transfer from CloudMySql source to BigQuery si Then Open and capture logs Then Verify the pipeline status is "Succeeded" Then Close the pipeline logs - #Then Validate the values of records transferred to target Big Query table is equal to the values from source table + Then Validate the values of records transferred to target Big Query table is equal to the values from source table - @CLOUDMYSQL_SOURCE_DATATYPES_TEST + + @CLOUDMYSQL_SOURCE_DATATYPES_TEST @CLOUDMYSQL_SINK_TEST @PLUGIN-20670 Scenario: To verify data is getting transferred from CloudMySql source to CloudMySql sink successfully Given Open Datafusion Project to configure pipeline When Expand Plugin group in the LHS plugins list: "Source" @@ -117,20 +112,20 @@ Feature: CloudMySql - Verify data transfer from CloudMySql source to BigQuery si Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" Then Enter input plugin property: "database" with value: "DatabaseName" Then Enter textarea plugin property: "importQuery" with value: "selectQuery" Then Click on the Get Schema button -# Then Verify the Output Schema matches the Expected Schema: "OutputSchema" + Then Verify the Output Schema matches the Expected Schema: "datatypesSchema" Then Validate "CloudSQL MySQL" plugin properties Then Close the Plugin Properties page Then Navigate to the properties page of plugin: "CloudSQL MySQL2" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" @@ -147,8 +142,9 @@ Feature: CloudMySql - Verify data transfer from CloudMySql source to BigQuery si Then Wait till pipeline is in running state Then Open and capture logs Then Verify the pipeline status is "Succeeded" + Then Validate the values of records transferred to target table is equal to the values from source table - @CLOUDMYSQL_SOURCE_DATATYPES_TEST + @CLOUDMYSQL_SOURCE_DATATYPES_TEST @CLOUDMYSQL_SINK_TEST @PLUGIN-20670 Scenario: To verify data is getting transferred from CloudMySql source to CloudMySql successfully when connection arguments are set Given Open Datafusion Project to configure pipeline When Expand Plugin group in the LHS plugins list: "Source" @@ -159,7 +155,7 @@ Feature: CloudMySql - Verify data transfer from CloudMySql source to BigQuery si Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter key value pairs for plugin property: "connectionArguments" with values from json: "connectionArgumentsList" @@ -172,7 +168,7 @@ Feature: CloudMySql - Verify data transfer from CloudMySql source to BigQuery si Then Navigate to the properties page of plugin: "CloudSQL MySQL2" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" @@ -189,6 +185,7 @@ Feature: CloudMySql - Verify data transfer from CloudMySql source to BigQuery si Then Wait till pipeline is in running state Then Open and capture logs Then Verify the pipeline status is "Succeeded" + Then Validate the values of records transferred to target table is equal to the values from source table @CLOUDMYSQL_SOURCE_DATATYPES_TEST Scenario: Verify user should not be able to deploy and run the pipeline when plugin is configured with invalid bounding query @@ -201,7 +198,7 @@ Feature: CloudMySql - Verify data transfer from CloudMySql source to BigQuery si Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" @@ -217,7 +214,7 @@ Feature: CloudMySql - Verify data transfer from CloudMySql source to BigQuery si Then Navigate to the properties page of plugin: "CloudSQL MySQL2" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" @@ -231,3 +228,6 @@ Feature: CloudMySql - Verify data transfer from CloudMySql source to BigQuery si Then Wait till pipeline is in running state Then Open and capture logs And Verify the pipeline status is "Failed" + Then Open Pipeline logs and verify Log entries having below listed Level and Message: + | Level | Message | + | ERROR | errorLogsMessageInvalidBoundingQuery | diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTimeMacro.feature b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTimeMacro.feature index ada6144bc..69d29c030 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTimeMacro.feature +++ b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTimeMacro.feature @@ -26,7 +26,7 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "driverName" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Click on the Macro button of Property: "user" and set the value to: "Username" Then Click on the Macro button of Property: "password" and set the value to: "Password" Then Enter input plugin property: "referenceName" with value: "RefName" @@ -37,7 +37,7 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument Then Navigate to the properties page of plugin: "CloudSQL MySQL2" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" @@ -66,6 +66,7 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument Then Open and capture logs Then Verify the pipeline status is "Succeeded" Then Close the pipeline logs + Then Validate the values of records transferred to target table is equal to the values from source table @CLOUDMYSQL_SOURCE_TEST Scenario: To verify data is getting transferred from CloudMySql to CloudMySql successfully using macro arguments in basic section @@ -78,7 +79,7 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" @@ -89,7 +90,7 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument Then Navigate to the properties page of plugin: "CloudSQL MySQL2" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" @@ -112,6 +113,7 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument Then Open and capture logs Then Verify the pipeline status is "Succeeded" Then Close the pipeline logs + Then Validate the values of records transferred to target table is equal to the values from source table @CLOUDMYSQL_SOURCE_TEST Scenario: To verify data is getting transferred from CloudMySql to CloudMySql successfully using macro arguments in advance section @@ -124,7 +126,7 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" @@ -137,7 +139,7 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument Then Navigate to the properties page of plugin: "CloudSQL MySQL2" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" @@ -148,7 +150,7 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument Then Save the pipeline Then Preview and run the pipeline Then Enter runtime argument value "fetchSize" for key "fetchSize" - Then Enter runtime argument value "splitBy" for key "SplitBy" + Then Enter runtime argument value "splitby" for key "SplitBy" Then Run the preview of pipeline with runtime arguments Then Wait till pipeline preview is in running state Then Verify the preview of pipeline is "success" @@ -156,12 +158,13 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument Then Deploy the pipeline Then Run the Pipeline in Runtime Then Enter runtime argument value "fetchSize" for key "fetchSize" - Then Enter runtime argument value "splitBy" for key "SplitBy" + Then Enter runtime argument value "splitby" for key "SplitBy" Then Run the Pipeline in Runtime with runtime arguments Then Wait till pipeline is in running state Then Open and capture logs Then Verify the pipeline status is "Succeeded" Then Close the pipeline logs + Then Validate the values of records transferred to target table is equal to the values from source table @CLOUDMYSQL_SOURCE_TEST @BQ_SINK_TEST Scenario: To verify data is getting transferred from CloudMySql source to BigQuery sink successfully using macro arguments in connection section @@ -174,7 +177,7 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "driverName" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Click on the Macro button of Property: "user" and set the value to: "Username" Then Click on the Macro button of Property: "password" and set the value to: "Password" Then Enter input plugin property: "referenceName" with value: "RefName" @@ -209,6 +212,7 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument Then Open and capture logs Then Verify the pipeline status is "Succeeded" Then Close the pipeline logs + Then Validate the values of records transferred to target Big Query table is equal to the values from source table @CLOUDMYSQL_SOURCE_TEST @BQ_SINK_TEST Scenario: To verify data is getting transferred from CloudMySql source to BigQuery sink successfully using macro arguments in basic section @@ -221,7 +225,7 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" @@ -252,6 +256,8 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument Then Open and capture logs Then Verify the pipeline status is "Succeeded" Then Close the pipeline logs + Then Validate the values of records transferred to target Big Query table is equal to the values from source table + @CLOUDMYSQL_SOURCE_TEST @BQ_SINK_TEST Scenario: To verify data is getting transferred from CloudMySql source to BigQuery sink successfully using macro arguments in advance section @@ -264,7 +270,7 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" @@ -299,3 +305,5 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument Then Open and capture logs Then Verify the pipeline status is "Succeeded" Then Close the pipeline logs + Then Validate the values of records transferred to target Big Query table is equal to the values from source table + diff --git a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/BQValidation.java b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/BQValidation.java index 850ea08a1..97217708c 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/BQValidation.java +++ b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/BQValidation.java @@ -3,28 +3,30 @@ import com.google.cloud.bigquery.TableResult; import com.google.gson.Gson; import com.google.gson.JsonObject; +import com.google.type.Decimal; import io.cdap.e2e.utils.BigQueryClient; import io.cdap.e2e.utils.PluginPropertyUtils; import io.cdap.plugin.CloudMySqlClient; +import io.cdap.plugin.common.stepsdesign.TestSetupHooks; import org.junit.Assert; - +import org.junit.Test; import java.io.IOException; import java.sql.*; +import java.sql.Date; import java.text.ParseException; import java.text.SimpleDateFormat; -import java.util.ArrayList; -import java.util.Base64; -import java.util.Date; -import java.util.List; +import java.time.LocalDateTime; +import java.time.format.DateTimeFormatter; +import java.util.*; /** * BQValidation */ public class BQValidation { - public static void main(String[] args) { -// validateBQAndDBRecordValues(String schema, String sourceTable, String targetTable) - } + + public static List bigQueryResponse = new ArrayList<>(); + public static List bigQueryRows = new ArrayList<>(); /** * Extracts entire data from source and target tables. @@ -33,23 +35,39 @@ public static void main(String[] args) { * @return true if the values in source and target side are equal */ - public static boolean validateBQAndDBRecordValues(String schema, String sourceTable, String targetTable) - throws SQLException, ClassNotFoundException, ParseException, IOException, InterruptedException { - List jsonResponse = new ArrayList<>(); - List bigQueryRows = new ArrayList<>(); - getBigQueryTableData(targetTable, bigQueryRows); + + public static boolean validateBQAndDBRecordValues(String sourceTable, String targetTable) + throws SQLException, ClassNotFoundException, IOException, InterruptedException, ParseException { + getBigQueryTableData(sourceTable, bigQueryRows); for (Object rows : bigQueryRows) { JsonObject json = new Gson().fromJson(String.valueOf(rows), JsonObject.class); - jsonResponse.add(json); + bigQueryResponse.add(json); } - String getSourceQuery = "SELECT * FROM " + schema + "." + sourceTable; - try (Connection connect = CloudMySqlClient.getCloudMysqlConnection()) { + String getSourceQuery = "SELECT * FROM "+ targetTable; + try (Connection connect = CloudMySqlClient.getCloudSqlConnection()) { connect.setHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT); Statement statement1 = connect.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE, ResultSet.HOLD_CURSORS_OVER_COMMIT); + ResultSet rsTarget = statement1.executeQuery(getSourceQuery); + return compareResultSetWithJsonData(rsTarget, bigQueryResponse); + } + } + + public static boolean validateDBAndBQRecordValues(String sourceTable, String targetTable) + throws SQLException, ClassNotFoundException, IOException, InterruptedException, ParseException { - ResultSet rsSource = statement1.executeQuery(getSourceQuery); - return compareResultSetData(rsSource, jsonResponse); + getBigQueryTableData(targetTable, bigQueryRows); + for (Object rows : bigQueryRows) { + JsonObject json = new Gson().fromJson(String.valueOf(rows), JsonObject.class); + bigQueryResponse.add(json); + } + String getTargetQuery = "SELECT * FROM " + sourceTable; + try (Connection connect = CloudMySqlClient.getCloudSqlConnection()) { + connect.setHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT); + Statement statement1 = connect.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE, + ResultSet.HOLD_CURSORS_OVER_COMMIT); + ResultSet rsSource = statement1.executeQuery(getTargetQuery); + return compareResultSetWithJsonData(rsSource, bigQueryResponse); } } @@ -68,13 +86,13 @@ private static void getBigQueryTableData(String table, List bigQueryRows String dataset = PluginPropertyUtils.pluginProp("dataset"); String selectQuery = "SELECT TO_JSON(t) FROM `" + projectId + "." + dataset + "." + table + "` AS t"; TableResult result = BigQueryClient.getQueryResult(selectQuery); - result.iterateAll().forEach(value -> bigQueryRows.add(value.get(0).getValue())); + result.iterateAll().forEach(value -> bigQueryRows.add(value.get(0).getValue())); } /** - * Compares the data in the result set obtained from the Oracle database with the provided BigQuery JSON objects. + * Compares the data in the result set obtained from the CloudSql MySql database with the provided BigQuery JSON objects. * - * @param rsSource The result set obtained from the Oracle database. + * @param rsSource The result set obtained from the CloudSql MySql database. * @param bigQueryData The list of BigQuery JSON objects to compare with the result set data. * * @return True if the result set data matches the BigQuery data, false otherwise. @@ -82,7 +100,7 @@ private static void getBigQueryTableData(String table, List bigQueryRows * @throws ParseException If an error occurs while parsing the data. */ - public static boolean compareResultSetData(ResultSet rsSource, List bigQueryData) throws SQLException, + public static boolean compareResultSetWithJsonData(ResultSet rsSource, List bigQueryData) throws SQLException, ParseException { ResultSetMetaData mdSource = rsSource.getMetaData(); boolean result = false; @@ -105,7 +123,7 @@ public static boolean compareResultSetData(ResultSet rsSource, List //Variable 'jsonObjectIdx' to track the index of the current JsonObject in the bigQueryData list, int jsonObjectIdx = 0; while (rsSource.next()) { - int currentColumnCount = 1; + int currentColumnCount = 2; while (currentColumnCount <= columnCountSource) { String columnTypeName = mdSource.getColumnTypeName(currentColumnCount); int columnType = mdSource.getColumnType(currentColumnCount); @@ -113,9 +131,60 @@ public static boolean compareResultSetData(ResultSet rsSource, List // Perform different comparisons based on column type switch (columnType) { // Since we skip BFILE in Oracle Sink, we are not comparing the BFILE source and sink values + case Types.BIT: + Boolean sourceBit = rsSource.getBoolean(currentColumnCount); + Boolean targetBit = Boolean.parseBoolean(bigQueryData.get(jsonObjectIdx).get(columnName).getAsString()); + Assert.assertTrue("Different values found for column : %s", + String.valueOf(sourceBit).equals(String.valueOf(targetBit))); + break; + + case Types.SMALLINT: + case Types.INTEGER: + case Types.TINYINT: + Integer sourceTinyInt = rsSource.getInt(currentColumnCount); + Integer targetTinyInt = Integer.parseInt(bigQueryData.get(jsonObjectIdx).get(columnName).getAsString()); + Assert.assertTrue("Different values found for column : %s", + String.valueOf(sourceTinyInt).equals(String.valueOf(targetTinyInt))); + break; + + case Types.REAL: + Float sourceFloat = rsSource.getFloat(currentColumnCount); + Float targetFloat = Float.parseFloat(bigQueryData.get(jsonObjectIdx).get(columnName).getAsString()); + Assert.assertTrue("Different values found for column : %s", + String.valueOf(sourceFloat).equals(String.valueOf(targetFloat))); + break; + + case Types.DOUBLE: + Double sourceDouble= rsSource.getDouble(currentColumnCount); + Double targetDouble = Double.parseDouble(bigQueryData.get(jsonObjectIdx).get(columnName).getAsString()); + Assert.assertTrue("Different values found for column : %s", + String.valueOf(sourceDouble).equals(String.valueOf(targetDouble))); + break; + + case Types.DATE: + Date sourceDate = rsSource.getDate(currentColumnCount); + Date targetDate = java.sql.Date.valueOf(bigQueryData.get(jsonObjectIdx).get(columnName).getAsString()); + Assert.assertTrue("Different values found for column : %s", + String.valueOf(sourceDate).equals(String.valueOf(targetDate))); + break; + + case Types.TIME: + Time sourceTime= rsSource.getTime(currentColumnCount); + Time targetTime = Time.valueOf(bigQueryData.get(jsonObjectIdx).get(columnName).getAsString()); + Assert.assertTrue("Different values found for column : %s", + String.valueOf(sourceTime).equals(String.valueOf(targetTime))); + break; + + case Types.DECIMAL: + org.apache.spark.sql.types.Decimal sourceDecimal = org.apache.spark.sql.types.Decimal.fromDecimal(rsSource.getBigDecimal(currentColumnCount)); + org.apache.spark.sql.types.Decimal targetDecimal = org.apache.spark.sql.types.Decimal.fromDecimal(bigQueryData.get(jsonObjectIdx).get(columnName).getAsBigDecimal()); + Assert.assertEquals("Different values found for column : %s", sourceDecimal, targetDecimal); + break; + case Types.BLOB: case Types.VARBINARY: case Types.LONGVARBINARY: + case Types.BINARY: String sourceB64String = new String(Base64.getEncoder().encode(rsSource.getBytes(currentColumnCount))); String targetB64String = bigQueryData.get(jsonObjectIdx).get(columnName).getAsString(); Assert.assertEquals("Different values found for column : %s", @@ -130,18 +199,20 @@ public static boolean compareResultSetData(ResultSet rsSource, List break; case Types.TIMESTAMP: - Timestamp sourceTS = rsSource.getTimestamp(columnName); - SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'hh:mm:ss'Z'"); - Date parsedDate = dateFormat.parse(bigQueryData.get(jsonObjectIdx).get(columnName).getAsString()); - Timestamp targetTs = new Timestamp(parsedDate.getTime()); - Assert.assertEquals("Different values found for column : %s", String.valueOf(sourceTS). - equals(String.valueOf(targetTs))); + String sourceTS= String.valueOf(rsSource.getTimestamp(currentColumnCount)); + String targetTS=bigQueryData.get(jsonObjectIdx).get(columnName).getAsString(); + LocalDateTime timestamp = LocalDateTime.parse(targetTS, DateTimeFormatter.ISO_DATE_TIME); + DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss.S"); + String formattedTimestamp = timestamp.format(formatter); + Assert.assertEquals(sourceTS, formattedTimestamp); break; + default: String sourceString = rsSource.getString(currentColumnCount); String targetString = bigQueryData.get(jsonObjectIdx).get(columnName).getAsString(); Assert.assertEquals(String.format("Different %s values found for column : %s", columnTypeName, columnName), String.valueOf(sourceString), String.valueOf(targetString)); + break; } currentColumnCount++; } diff --git a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/runners/TestRunner.java b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/runners/TestRunner.java index 2d8ac7c4e..05772562d 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/runners/TestRunner.java +++ b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/runners/TestRunner.java @@ -25,8 +25,11 @@ @RunWith(Cucumber.class) @CucumberOptions( features = {"src/e2e-test/features"}, - glue = {"stepsdesign", "io.cdap.plugin.common.stepsdesign"}, - tags = {"@CloudMySql"}, + glue = {"stepsdesign", "io.cdap.plugin.common.stepsdesign", "io.cdap.plugin.CloudMySql.stepsdesign"}, + tags = {"@CloudMySql and not @PLUGIN-20670 and not @PLUGIN-1526"}, + /* TODO :Enable tests once issue fixed https://cdap.atlassian.net/browse/CDAP-20670, + https://cdap.atlassian.net/browse/PLUGIN-1526 + */ plugin = {"pretty", "html:target/cucumber-html-report/CloudMySql", "json:target/cucumber-reports/cucumber-mysql.json", "junit:target/cucumber-reports/cucumber-mysql.xml"} diff --git a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/runners/package-info.java b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/runners/package-info.java new file mode 100644 index 000000000..ac35d14dc --- /dev/null +++ b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/runners/package-info.java @@ -0,0 +1,21 @@ +/* + * Copyright © 2022 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +/** + * Package contains the runners for CoudMysql features. + */ + +package io.cdap.plugin.CloudMySql.runners; \ No newline at end of file diff --git a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/stepsdesign/CloudMySql.java b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/stepsdesign/CloudMySql.java deleted file mode 100644 index f3d96427e..000000000 --- a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/stepsdesign/CloudMySql.java +++ /dev/null @@ -1,5 +0,0 @@ -package io.cdap.plugin.CloudMySql.stepsdesign; - -public class CloudMySql { - -} diff --git a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/stepsdesign/CloudMysql.java b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/stepsdesign/CloudMysql.java new file mode 100644 index 000000000..3faf14cd7 --- /dev/null +++ b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/stepsdesign/CloudMysql.java @@ -0,0 +1,66 @@ +package io.cdap.plugin.CloudMySql.stepsdesign; + +import io.cdap.e2e.pages.actions.CdfPipelineRunAction; +import io.cdap.e2e.utils.BigQueryClient; +import io.cdap.e2e.utils.CdfHelper; +import io.cdap.e2e.utils.PluginPropertyUtils; +import io.cdap.plugin.CloudMySqlClient; +import io.cucumber.java.en.Then; +import org.junit.Assert; +import stepsdesign.BeforeActions; +import io.cdap.plugin.CloudMySql.BQValidation; + +import java.io.IOException; +import java.sql.SQLException; +import java.text.ParseException; + +/** + * CloudSqlMySql Plugin related step design. + */ +public class CloudMysql implements CdfHelper { + @Then("Validate the values of records transferred to target table is equal to the values from source table") + public void validateTheValuesOfRecordsTransferredToTargetTableIsEqualToTheValuesFromSourceTable() + throws SQLException, ClassNotFoundException { + int countRecords = CloudMySqlClient.countRecord(PluginPropertyUtils.pluginProp("targetTable")); + Assert.assertEquals("Number of records transferred should be equal to records out ", + countRecords, recordOut()); + BeforeActions.scenario.write(" ******** Number of records transferred ********:" + countRecords); + boolean recordsMatched = CloudMySqlClient.validateRecordValues(PluginPropertyUtils.pluginProp("sourceTable"), + PluginPropertyUtils.pluginProp("targetTable")); + Assert.assertTrue("Value of records transferred to the target table should be equal to the value " + + "of the records in the source table", recordsMatched); + } + + @Then("Validate the values of records transferred to target Big Query table is equal to the values from source table") + public void validateTheValuesOfRecordsTransferredToTargetBigQueryTableIsEqualToTheValuesFromSourceTable() + throws IOException, InterruptedException, IOException, SQLException, ClassNotFoundException, ParseException { + int targetBQRecordsCount = BigQueryClient.countBqQuery(PluginPropertyUtils.pluginProp("bqTargetTable")); + BeforeActions.scenario.write("No of Records Transferred to BigQuery:" + targetBQRecordsCount); + Assert.assertEquals("Out records should match with target BigQuery table records count", + CdfPipelineRunAction.getCountDisplayedOnSourcePluginAsRecordsOut(), targetBQRecordsCount); + + boolean recordsMatched = BQValidation.validateDBAndBQRecordValues( + PluginPropertyUtils.pluginProp("sourceTable"), + PluginPropertyUtils.pluginProp("bqTargetTable")); + Assert.assertTrue("Value of records transferred to the target table should be equal to the value " + + "of the records in the source table", recordsMatched); + } + + @Then("Validate the values of records transferred to target CloudSQLMySql table is equal to the values from source " + + "BigQuery table") + public void validateTheValuesOfRecordsTransferredToTargetCloudSQLMySqlTableIsEqualToTheValuesFromSourceBigQueryTable() + throws IOException, InterruptedException, IOException, SQLException, ClassNotFoundException, ParseException, ParseException { + int sourceBQRecordsCount = BigQueryClient.countBqQuery(PluginPropertyUtils.pluginProp("bqSourceTable")); + BeforeActions.scenario.write("No of Records from source BigQuery table:" + sourceBQRecordsCount); + Assert.assertEquals("Out records should match with target PostgreSQL table records count", + CdfPipelineRunAction.getCountDisplayedOnSourcePluginAsRecordsOut(), sourceBQRecordsCount); + + boolean recordsMatched = BQValidation.validateBQAndDBRecordValues( + PluginPropertyUtils.pluginProp("bqSourceTable"), + PluginPropertyUtils.pluginProp("targetTable")); + Assert.assertTrue("Value of records transferred to the target table should be equal to the value " + + "of the records in the source table", recordsMatched); + } + +} + diff --git a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/stepsdesign/package-info.java b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/stepsdesign/package-info.java new file mode 100644 index 000000000..56ff78143 --- /dev/null +++ b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/stepsdesign/package-info.java @@ -0,0 +1,21 @@ +/* + * Copyright © 2023 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +/** + * Package contains the stepDesign for CloudMysql features. + */ + +package io.cdap.plugin.CloudMySql.stepsdesign; \ No newline at end of file diff --git a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySqlClient.java b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySqlClient.java index c78b7aee0..b699e7a34 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySqlClient.java +++ b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySqlClient.java @@ -13,31 +13,23 @@ public class CloudMySqlClient { private static final String database = PluginPropertyUtils.pluginProp("DatabaseName"); private static final String connectionName = PluginPropertyUtils.pluginProp("ConnectionName"); - public static void main(String[] args) throws SQLException, ClassNotFoundException { - getCloudMysqlConnection(); - //createSourceTable("myTable"); -// createSourceTable("newTable"); -// String[] tablesToDrop = {"newTable"}; -// dropTables(tablesToDrop); - //System.out.println("done"); - - } - - public static Connection getCloudMysqlConnection() throws SQLException, ClassNotFoundException { + public static Connection getCloudSqlConnection() throws ClassNotFoundException, SQLException { Class.forName("com.google.cloud.sql.mysql.SocketFactory"); - String instanceConnectionName = "cdf-athena:us-central1:sql-automation-test-instance"; - String databaseName = "TestDatabase"; - String Username = "v"; - String Password = "v@123"; - String jdbcUrl = String.format("jdbc:mysql:///%s?cloudSqlInstance=%s&socketFactory=com.google.cloud.sql.mysql.SocketFactory&user=%s&password=%s", databaseName, instanceConnectionName, Username, Password); + String instanceConnectionName = System.getenv("CLOUDSQLMYSQL_CONNECTIONNAME"); + String database = PluginPropertyUtils.pluginProp("DatabaseName"); + String username = System.getenv("CLOUDSQLMYSQL_USERNAME"); + String password = System.getenv("CLOUDSQLMYSQL_PASSWORD"); + + String jdbcUrl = String.format( + PluginPropertyUtils.pluginProp("jdbcURL"), + database, instanceConnectionName, username, password); Connection conn = DriverManager.getConnection(jdbcUrl); - System.out.println("connected to database"); return conn; } public static int countRecord(String table) throws SQLException, ClassNotFoundException { String countQuery = "SELECT COUNT(*) as total FROM " + table; - try (Connection connect = getCloudMysqlConnection(); + try (Connection connect = getCloudSqlConnection(); Statement statement = connect.createStatement(); ResultSet rs = statement.executeQuery(countQuery)) { int num = 0; @@ -52,7 +44,7 @@ public static boolean validateRecordValues(String sourceTable, String targetTabl throws SQLException, ClassNotFoundException { String getSourceQuery = "SELECT * FROM " + sourceTable; String getTargetQuery = "SELECT * FROM " + targetTable; - try (Connection connect = getCloudMysqlConnection()) { + try (Connection connect = getCloudSqlConnection()) { connect.setHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT); Statement statement1 = connect.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE, ResultSet.HOLD_CURSORS_OVER_COMMIT); @@ -108,7 +100,7 @@ public static boolean compareResultSetData(ResultSet rsSource, ResultSet rsTarge } public static void createSourceTable(String sourceTable) throws SQLException, ClassNotFoundException { - try (Connection connect = getCloudMysqlConnection(); + try (Connection connect = getCloudSqlConnection(); Statement statement = connect.createStatement()) { String createSourceTableQuery = "CREATE TABLE IF NOT EXISTS " + sourceTable + "(id int, lastName varchar(255), PRIMARY KEY (id))"; @@ -131,7 +123,7 @@ public static void createSourceTable(String sourceTable) throws SQLException, Cl } public static void createTargetTable(String targetTable) throws SQLException, ClassNotFoundException { - try (Connection connect = getCloudMysqlConnection(); + try (Connection connect = getCloudSqlConnection(); Statement statement = connect.createStatement()) { String createTargetTableQuery = "CREATE TABLE IF NOT EXISTS " + targetTable + "(id int, lastName varchar(255), PRIMARY KEY (id))"; @@ -143,7 +135,7 @@ public static void createTargetTable(String targetTable) throws SQLException, Cl } public static void createSourceDatatypesTable(String sourceTable) throws SQLException, ClassNotFoundException { - try (Connection connect = getCloudMysqlConnection(); + try (Connection connect = getCloudSqlConnection(); Statement statement = connect.createStatement()) { String datatypesColumns = PluginPropertyUtils.pluginProp("datatypesColumns"); String createSourceTableQuery = "CREATE TABLE " + sourceTable + " " + datatypesColumns; @@ -158,27 +150,29 @@ public static void createSourceDatatypesTable(String sourceTable) throws SQLExce } public static void createTargetDatatypesTable(String targetTable) throws SQLException, ClassNotFoundException { - try (Connection connect = getCloudMysqlConnection(); + try (Connection connect = getCloudSqlConnection(); Statement statement = connect.createStatement()) { String datatypesColumns = PluginPropertyUtils.pluginProp("datatypesColumns"); String createTargetTableQuery = "CREATE TABLE " + targetTable + " " + datatypesColumns; statement.executeUpdate(createTargetTableQuery); + System.out.println(createTargetTableQuery); } } public static void createTargetCloudMysqlTable(String targetTable) throws SQLException, ClassNotFoundException { - try (Connection connect = getCloudMysqlConnection(); + try (Connection connect = getCloudSqlConnection(); Statement statement = connect.createStatement()) { String datatypesColumns = PluginPropertyUtils.pluginProp("CloudMySqlDatatypesColumns"); String createTargetTableQuery = "CREATE TABLE " + targetTable + " " + datatypesColumns; statement.executeUpdate(createTargetTableQuery); + System.out.println(createTargetTableQuery); } } public static void dropTables(String[] tables) throws SQLException, ClassNotFoundException { - try (Connection connect = getCloudMysqlConnection(); + try (Connection connect = getCloudSqlConnection(); Statement statement = connect.createStatement()) { for (String table : tables) { String dropTableQuery = "Drop Table " + table; diff --git a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java index abaf9f4aa..97259d5f9 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java +++ b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java @@ -24,11 +24,7 @@ */ public class TestSetupHooks { - public static void main(String[] args) throws SQLException, ClassNotFoundException { - setTableName(); - createDatatypesTable(); - } - private static void setTableName() { + public static void setTableName() { String randomString = RandomStringUtils.randomAlphabetic(10); String sourceTableName = String.format("SourceTable_%s", randomString); String targetTableName = String.format("TargetTable_%s", randomString); @@ -54,7 +50,7 @@ public static void initializeDBProperties() { @Before(order = 2, value = "@CLOUDMYSQL_SOURCE_TEST") public static void createTables() throws SQLException, ClassNotFoundException { - CloudMySqlClient.createSourceTable(PluginPropertyUtils.pluginProp("sourceTable")); + CloudMySqlClient.createSourceTable(PluginPropertyUtils.pluginProp("CLOUDMYSQL_SOURCE_TEST")); CloudMySqlClient.createTargetTable(PluginPropertyUtils.pluginProp("targetTable")); } @@ -73,7 +69,9 @@ public static void dropTables() throws SQLException, ClassNotFoundException { @Before(order = 2, value = "@CLOUDMYSQL_TEST_TABLE") public static void createCloudMysqlTestTable() throws SQLException, ClassNotFoundException { +// BeforeActions.scenario.write("SQL Target table name - " + sqlTargetTableName); CloudMySqlClient.createTargetCloudMysqlTable(PluginPropertyUtils.pluginProp("targetTable")); + } @Before(order = 1, value = "@BQ_SINK_TEST") @@ -113,8 +111,8 @@ public static void deleteTempSourceBQTable() throws IOException, InterruptedExce PluginPropertyUtils.removePluginProp("bqSourceTable"); } private static void createSourceBQTableWithQueries(String bqCreateTableQueryFile, String bqInsertDataQueryFile) - throws IOException, InterruptedException { - String bqSourceTable = "E2E_SOURCE_" + UUID.randomUUID().toString().replaceAll("-", "_"); + throws IOException, InterruptedException,NullPointerException { + String bqSourceTable = "E2E_SOURCE_" + UUID.randomUUID().toString().substring(0,5).replaceAll("-", "_"); String createTableQuery = StringUtils.EMPTY; try { @@ -124,6 +122,7 @@ private static void createSourceBQTableWithQueries(String bqCreateTableQueryFile createTableQuery = createTableQuery.replace("DATASET", PluginPropertyUtils.pluginProp("dataset")) .replace("TABLE_NAME", bqSourceTable); } catch (Exception e) { + e.printStackTrace(); BeforeActions.scenario.write("Exception in reading " + bqCreateTableQueryFile + " - " + e.getMessage()); Assert.fail("Exception in BigQuery testdata prerequisite setup " + "- error in reading create table query file " + e.getMessage()); @@ -140,6 +139,7 @@ private static void createSourceBQTableWithQueries(String bqCreateTableQueryFile BeforeActions.scenario.write("Exception in reading " + bqInsertDataQueryFile + " - " + e.getMessage()); Assert.fail("Exception in BigQuery testdata prerequisite setup " + "- error in reading insert data query file " + e.getMessage()); + } BigQueryClient.getSoleQueryResult(createTableQuery); try { @@ -148,8 +148,9 @@ private static void createSourceBQTableWithQueries(String bqCreateTableQueryFile // Insert query does not return any record. // Iterator on TableResult values in getSoleQueryResult method throws NoSuchElementException } + System.out.println(bqSourceTable); PluginPropertyUtils.addPluginProp("bqSourceTable", bqSourceTable); - BeforeActions.scenario.write("BQ Source Table " + bqSourceTable + " created successfully"); +// BeforeActions.scenario.write("BQ Source Table " + bqSourceTable + " created successfully"); } } diff --git a/cloudsql-mysql-plugin/src/e2e-test/resources/BigQuery/BigQueryCreateTableQuery.txt b/cloudsql-mysql-plugin/src/e2e-test/resources/BigQuery/BigQueryCreateTableQuery.txt index 54fd6ef5e..1188d6591 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/resources/BigQuery/BigQueryCreateTableQuery.txt +++ b/cloudsql-mysql-plugin/src/e2e-test/resources/BigQuery/BigQueryCreateTableQuery.txt @@ -1,2 +1,2 @@ -create table `DATASET.TABLE_NAME` (col1 BYTES, col2 STRING, col3 DATE, col4 FLOAT64, col6 TIMESTAMP, -col8 BOOL, col9 INT64, col10 TIME) +create table `DATASET.TABLE_NAME` (COL1 BYTES, COL2 STRING, COL3 DATE, COL4 FLOAT64, COL6 TIMESTAMP, +COL8 BOOL, COL9 INT64, COL10 TIME) diff --git a/cloudsql-mysql-plugin/src/e2e-test/resources/BigQuery/BigQueryInsertDataQuery.txt b/cloudsql-mysql-plugin/src/e2e-test/resources/BigQuery/BigQueryInsertDataQuery.txt index bdccb0ea8..5b8643fff 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/resources/BigQuery/BigQueryInsertDataQuery.txt +++ b/cloudsql-mysql-plugin/src/e2e-test/resources/BigQuery/BigQueryInsertDataQuery.txt @@ -1,3 +1,3 @@ -insert into `DATASET.TABLE_NAME` (col1, col2, col3, col4, col6, col8, col9, col10) values -(b'01011011','priya','2021-01-28',1.110,'2019-03-10 04:50:01 UTC',false,92233720,'21:26:00'), -(b'01011011','surya','2021-01-21',1.110000001,'2018-03-10 04:50:01 UTC',true,92233729,'20:26:00'); +insert into `DATASET.TABLE_NAME` (COL1, COL2, COL3, COL4, COL6, COL8, COL9, COL10) values +(b'01011011','priya','2021-01-27',1.110,'2019-03-10 04:50:01 UTC',false,92233720,'21:26:00'), +(b'01011011','surya','2021-01-27',1.110000001,'2018-03-10 04:50:01 UTC',true,92233729,'20:26:00'); diff --git a/cloudsql-mysql-plugin/src/e2e-test/resources/errorMessage.properties b/cloudsql-mysql-plugin/src/e2e-test/resources/errorMessage.properties index 27437b5f0..2ad0a8369 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/resources/errorMessage.properties +++ b/cloudsql-mysql-plugin/src/e2e-test/resources/errorMessage.properties @@ -16,3 +16,8 @@ errorMessageInvalidTableName=Exception while trying to validate schema of databa errorMessageConnectionName=Connection Name must be in the format :: to connect to a public CloudSQL PostgreSQL instance. validationSuccessMessage=No errors found. validationErrorMessage=COUNT ERROR found +errorLogsMessageInvalidTableName=Spark program 'phase-1' failed with error: Errors were encountered during validation. \ + Table 'Table123' does not exist +errorLogsMessageInvalidCredentials =Spark program 'phase-1' failed with error: Errors were encountered during validation. +errorLogsMessageInvalidBoundingQuery=Spark program 'phase-1' failed with error: The column index is out of range: 1, \ + number of columns: 0.. Please check the system logs for more details. diff --git a/cloudsql-mysql-plugin/src/e2e-test/resources/pluginParameters.properties b/cloudsql-mysql-plugin/src/e2e-test/resources/pluginParameters.properties index 0e43a9e82..68b1c41d1 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/resources/pluginParameters.properties +++ b/cloudsql-mysql-plugin/src/e2e-test/resources/pluginParameters.properties @@ -8,18 +8,20 @@ invalidImportQuery=select numberOfSplits=2 invalidRef=invalidRef&^*&&* zeroValue=0 -ConnectionName=cdf-athena:us-central1:sql-automation-test-instance +connectionName=CLOUDSQLMYSQL_CONNECTIONNAME zeroSplits=isha insertQuery= select * from mytable CloudMySqlImportQuery=select * from mytable fetchSize=1000 NumSplits=1 SplitBy=ID +jdbcURL=jdbc:mysql:///%s?cloudSqlInstance=%s&socketFactory=com.google.cloud.sql.mysql.SocketFactory&user=%s&password=%s projectId=cdf-athena +datasetprojectId=cdf-athena BQReferenceName=reference -bqTargetTable=mytable +targetTable=mytable5 bqDatasetId=1234 -dataset=sql +dataset=test_automation bqSourceTable=mysql driver=cloudsql-mysql table=myTable @@ -28,6 +30,7 @@ pass=PASS invalidUserName=testUser invalidPassword=testPassword invalidTable=data +jdbcURL=jdbc:mysql:///%s?cloudSqlInstance=%s&socketFactory=com.google.cloud.sql.mysql.SocketFactory&user=%s&password=%s CloudMySqlDriverName=cloudsql-mysql bqTruncateTable=truncateTable bqUpdateTableSchema=updateSchema @@ -36,6 +39,8 @@ invalidboundQuery=SELECT MIN(id),MAX(id) FROM table cloudsqlimportQuery=where $CONDITIONS; splitby=ID numbersplitsgenerate=2 +connectionTimeout=100 +invalidTablename=Table123 outputSchema=[{"key":"fname","value":"string"},{"key":"lname","value":"string"},{"key":"cost","value":"double"},\ {"key":"zipcode","value":"int"}] OutputSchema=[{"key":"id","value":"int"},{"key":"lastName","value":"string"}] @@ -66,20 +71,20 @@ datatypesValue1=VALUES ('User1',1,-1,true,-32768,HEX('27486920546869732069732061 'This is a test message to check ','X') datatypesSchema=[{"key":"ID","value":"string"},{"key":"COL1","value":"boolean"},{"key":"COL2","value":"int"},\ {"key":"COL3","value":"boolean"},{"key":"COL4","value":"int"},{"key":"COL5","value":"bytes"},\ - {"key":"COL6","value":"double"},{"key":"COL7","value":"int"},{"key":"COL8","value":"long"},\ + {"key":"COL6","value":"int"},{"key":"COL7","value":"int"},{"key":"COL8","value":"long"},\ {"key":"COL9","value":"float"},{"key":"COL10","value":"date"},{"key":"COL11","value":"timestamp"},\ {"key":"COL12","value":"decimal"},{"key":"COL13","value":"double"},{"key":"COL14","value":"string"},\ {"key":"COL15","value":"time"},{"key":"COL16","value":"timestamp"},\ {"key":"COL18","value":"string"},{"key":"COL19","value":"bytes"},{"key":"COL20","value":"string"},\ {"key":"COL21","value":"bytes"},{"key":"COL22","value":"bytes"},{"key":"COL23","value":"bytes"},\ - {"key":"COL24","value":"bytes"},{"key":"COL25","value":"string"},{"key":"COL26","value":"time"},\ + {"key":"COL24","value":"bytes"},{"key":"COL25","value":"string"},{"key":"COL26","value":"string"},\ {"key":"COL27","value":"bytes"},{"key":"COL28","value":"string"},{"key":"COL29","value":"string"}] -bqOutputMultipleDatatypesSchema= [{"key":"col1","value":"bytes"},{"key":"col2","value":"string"},\ - {"key":"col3","value":"date"},{"key":"col4","value":"double"},{"key":"col6","value":"timestamp"},\ - {"key":"col8","value":"boolean"},{"key":"col9","value":"long"},{"key":"col10","value":"time"}] +bqOutputMultipleDatatypesSchema= [{"key":"COL1","value":"bytes"},{"key":"COL2","value":"string"},\ + {"key":"COL3","value":"date"},{"key":"COL4","value":"double"},{"key":"COL6","value":"timestamp"},\ + {"key":"COL8","value":"boolean"},{"key":"COL9","value":"long"},{"key":"COL10","value":"time"}] CloudMySqlDatatypesColumns=(COL1 VARBINARY(100) , COL2 VARCHAR(100), COL3 DATE, COL4 DOUBLE,\ COL6 TIMESTAMP, COL8 BIT, COL9 BIGINT, COL10 TIME) #bq queries file path -CreateBQTableQueryFile=testData/BigQuery/BigQueryCreateTableQuery.txt -InsertBQDataQueryFile=testData/BigQuery/BigQueryInsertDataQuery.txt +CreateBQTableQueryFile=BigQuery/BigQueryCreateTableQuery.txt +InsertBQDataQueryFile=BigQuery/BigQueryInsertDataQuery.txt diff --git a/oracle-plugin/src/main/java/io/cdap/plugin/oracle/OracleSourceSchemaReader.java b/oracle-plugin/src/main/java/io/cdap/plugin/oracle/OracleSourceSchemaReader.java index a178f6ee8..dfdb641ac 100644 --- a/oracle-plugin/src/main/java/io/cdap/plugin/oracle/OracleSourceSchemaReader.java +++ b/oracle-plugin/src/main/java/io/cdap/plugin/oracle/OracleSourceSchemaReader.java @@ -35,6 +35,7 @@ public class OracleSourceSchemaReader extends CommonSchemaReader { * Oracle type constants, from Oracle JDBC Implementation. */ public static final int INTERVAL_YM = -103; + public static final int TINY_BLOB = -3; public static final int INTERVAL_DS = -104; public static final int TIMESTAMP_TZ = -101; public static final int TIMESTAMP_LTZ = -102; @@ -59,6 +60,7 @@ public class OracleSourceSchemaReader extends CommonSchemaReader { BFILE, LONG, LONG_RAW, + TINY_BLOB, Types.NUMERIC, Types.DECIMAL ); @@ -87,6 +89,7 @@ public Schema getSchema(ResultSetMetaData metadata, int index) throws SQLExcepti return Schema.of(Schema.Type.FLOAT); case BINARY_DOUBLE: return Schema.of(Schema.Type.DOUBLE); + case TINY_BLOB: case BFILE: case LONG_RAW: return Schema.of(Schema.Type.BYTES);