From be2c4f923d6e49fa902eac64656897d54df1335b Mon Sep 17 00:00:00 2001 From: priyabhatnagar Date: Mon, 23 Oct 2023 16:27:18 +0530 Subject: [PATCH] control center cdap phase2 --- .../features/controlcenter/DesignTime.feature | 94 ++++++++ .../features/controlcenter/RunTime.feature | 223 ++++++++++++++++++ .../controlcenter/runners/TestRunner.java | 36 +++ .../controlcenter/runners/package-info.java | 20 ++ .../stepsdesign/TestSetupHooks.java | 87 +++++++ .../pluginDataCyAttributes.properties | 3 + .../resources/pluginParameters.properties | 18 +- 7 files changed, 479 insertions(+), 2 deletions(-) create mode 100644 cdap-e2e-tests/src/e2e-test/features/controlcenter/DesignTime.feature create mode 100644 cdap-e2e-tests/src/e2e-test/features/controlcenter/RunTime.feature create mode 100644 cdap-e2e-tests/src/e2e-test/java/io/cdap/cdap/controlcenter/runners/TestRunner.java create mode 100644 cdap-e2e-tests/src/e2e-test/java/io/cdap/cdap/controlcenter/runners/package-info.java create mode 100644 cdap-e2e-tests/src/e2e-test/java/io/cdap/cdap/controlcenter/stepsdesign/TestSetupHooks.java diff --git a/cdap-e2e-tests/src/e2e-test/features/controlcenter/DesignTime.feature b/cdap-e2e-tests/src/e2e-test/features/controlcenter/DesignTime.feature new file mode 100644 index 000000000000..445761f5f90e --- /dev/null +++ b/cdap-e2e-tests/src/e2e-test/features/controlcenter/DesignTime.feature @@ -0,0 +1,94 @@ +# Copyright © 2023 Cask Data, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. + +@Controlcenter + +Feature: Controlcenter - Validate control center page flow design time features. + + Scenario: Verify user is able to click the control center tab and successfully navigates to control center page + Given Open Datafusion Project to configure pipeline + Then Click on the Hamburger bar on the left panel + Then Click on Control Center link from the hamburger menu + Then Verify that the user is navigated to control center page successfully + + Scenario: Verify the Display message should be updated on the basis of filter selection as per the user + Given Open Datafusion Project to configure pipeline + Then Click on the Hamburger bar on the left panel + Then Click on Control Center link from the hamburger menu + Then Select dropdown : "Filter" with option value: "Artifacts" in control center + Then Verify the all entities message is displayed with the filter selection: "allEntitiesDisplayedMessage" + + Scenario: Verify User is able to switch between Schema and programs of the dataset. + Given Open Datafusion Project to configure pipeline + Then Click on the Hamburger bar on the left panel + Then Click on Control Center link from the hamburger menu + Then Verify the user is able to click dataset entity icon to navigate to details page + Then Verify user is navigated to the details page of the dataset entity icon successfully + Then Click on the schema link of the dataset entity details page + Then Verify user is navigated to the schema details page of the dataset entity page + Then Click on the programs link of the dataset entity details page + Then Verify user is navigated to the programs details page of the dataset entity page + + Scenario: Verify User is able to sort the entities with all the available filter types in control center. + Given Open Datafusion Project to configure pipeline + Then Click on the Hamburger bar on the left panel + Then Click on Control Center link from the hamburger menu + Then Select the sort by dropdown with option value: "Newest" + Then Verify the entities are sorted by the newest option: "newFilterMessage" + Then Select the sort by dropdown with option value: "Oldest" + Then Verify the entities are sorted by the oldest option: "oldestFilterMessage" + Then Select the sort by dropdown with option value: "A - Z" + Then Verify the entities are sorted by the Z to A option: "aToZFilterMessage" + Then Select the sort by dropdown with option value: "Z - A" + Then Verify the entities are sorted by the A to Z option: "zToAFilterMessage" + + Scenario: Verify the user is able to navigate to and from details page of a dataset inside control center + Given Open Datafusion Project to configure pipeline + Then Click on the Hamburger bar on the left panel + Then Click on Control Center link from the hamburger menu + Then Verify the user is able to click dataset entity icon to navigate to details page + Then Click on view details tab of dataset entity + Then Verify user is successfully navigated to details page of the dataset entity + Then Click on the back link of the view details page of dataset entity + Then Click on close link to close the details page of dataset entity + Then Verify that the user is navigated to control center page successfully + + Scenario: Verify user should be able to search the dataset using the added tags. + Given Open Datafusion Project to configure pipeline + Then Click on the Hamburger bar on the left panel + Then Click on Control Center link from the hamburger menu + Then Verify the user is able to click dataset entity icon to navigate to details page + Then Click on the plus button to add the tag for a dataset entity + Then Verify user is able to enter the values in tag input field: "testingTag" + Then Enter the text in search tab "testingTag" in control center + Then Verify the searched tag is displayed successfully on control center page: "searchedTagDisplayedMessage" + + Scenario: Verify that User is able to click on the dataset entity and is navigated to the details page of the dataset successfully + Given Open Datafusion Project to configure pipeline + Then Click on the Hamburger bar on the left panel + Then Click on Control Center link from the hamburger menu + Then Verify the user is able to click dataset entity icon to navigate to details page + Then Verify user is navigated to the details page of the dataset entity icon successfully + + Scenario: Verify that tags counts should be increased and decreased in case User perform Add or Remove actions on dataset entity. + Given Open Datafusion Project to configure pipeline + Then Click on the Hamburger bar on the left panel + Then Click on Control Center link from the hamburger menu + Then Verify the user is able to click dataset entity icon to navigate to details page + Then Click on the plus button to add the tag for a dataset entity + Then Verify user is able to enter the values in tag input field: "testingTag" + Then Verify the tag count of dataset entity when user adds the tag + Then Click on the close icon of tag added + Then Verify the tag count of dataset entity decreases message: "tagCountDecreaseMessage" + Then Click on close link to close the details page of dataset entity diff --git a/cdap-e2e-tests/src/e2e-test/features/controlcenter/RunTime.feature b/cdap-e2e-tests/src/e2e-test/features/controlcenter/RunTime.feature new file mode 100644 index 000000000000..554e67588ec2 --- /dev/null +++ b/cdap-e2e-tests/src/e2e-test/features/controlcenter/RunTime.feature @@ -0,0 +1,223 @@ +# Copyright © 2023 Cask Data, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. + +@Controlcenter + +Feature: Controlcenter - Validate control center page flow run time features. + + @BQ_INSERT_INT_SOURCE_TEST @BQ_SINK_TEST + Scenario: Verify that user is able to create a pipeline and then validate the presence of created pipeline in control center. + Given Open Datafusion Project to configure pipeline + Then Click on the Hamburger bar on the left panel + Then Click on Control Center link from the hamburger menu + Then Click on the Plus Green Button to import the pipelines + Then Click on create button to create a pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "BigQuery2" to establish connection + Then Navigate to the properties page of plugin: "BigQuery" + And Enter input plugin property: "referenceName" with value: "Reference" + And Replace input plugin property: "project" with value: "projectId" + And Enter input plugin property: "datasetProject" with value: "projectId" + And Replace input plugin property: "dataset" with value: "dataset" + Then Override Service account details if set in environment variables + And Enter input plugin property: "table" with value: "bqSourceTable" + Then Click on the Get Schema button + Then Validate "BigQuery" plugin properties + And Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery2" + Then Replace input plugin property: "project" with value: "projectId" + Then Override Service account details if set in environment variables + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" + Then Validate "BigQuery" plugin properties + And Close the Plugin Properties page + Then Save the pipeline + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Verify the pipeline status is "Succeeded" + Then Click on the Hamburger bar on the left panel + Then Click on Control Center link from the hamburger menu + Then Verify the pipeline created successfully is present in control center page + + @BQ_INSERT_INT_SOURCE_TEST @BQ_SINK_TEST + Scenario: Verify the user is able to set preferences for a deployed pipeline and use the same while running the pipeline. + Given Open Datafusion Project to configure pipeline + Then Click on the Hamburger bar on the left panel + Then Click on Control Center link from the hamburger menu + Then Click on the Plus Green Button to import the pipelines + Then Click on create button to create a pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "BigQuery2" to establish connection + Then Navigate to the properties page of plugin: "BigQuery" + And Enter input plugin property: "referenceName" with value: "Reference" + Then Click on the Macro button of Property: "project" and set the value to: "projectId" + And Enter input plugin property: "datasetProject" with value: "projectId" + And Replace input plugin property: "dataset" with value: "dataset" + Then Override Service account details if set in environment variables + And Enter input plugin property: "table" with value: "bqSourceTable" + Then Click on the Get Schema button + Then Validate "BigQuery" plugin properties + And Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery2" + Then Click on the Macro button of Property: "project" and set the value to: "projectId" + Then Override Service account details if set in environment variables + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" + Then Validate "BigQuery" plugin properties + And Close the Plugin Properties page + Then Save the pipeline + Then Deploy the pipeline + Then Click on the Hamburger bar on the left panel + Then Click on Control Center link from the hamburger menu + Then Verify the user is able to set the preferences for the created pipeline in the control center page + Then Verify the user is able to enter the value in the key input field "keyValue" + Then Verify the user is able to enter the value of the key in the value input field "value" + Then Verify user is able to click on save and close button of set preferences + Then Verify user is able to click on the data pipeline added in the control center page + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Verify the pipeline status is "Succeeded" + + @BQ_INSERT_INT_SOURCE_TEST @BQ_SINK_TEST + Scenario: Verify User is able to delete the preferences by clicking on the delete icon button + Given Open Datafusion Project to configure pipeline + Then Click on the Hamburger bar on the left panel + Then Click on Control Center link from the hamburger menu + Then Click on the Plus Green Button to import the pipelines + Then Click on create button to create a pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "BigQuery2" to establish connection + Then Navigate to the properties page of plugin: "BigQuery" + And Enter input plugin property: "referenceName" with value: "Reference" + Then Click on the Macro button of Property: "project" and set the value to: "projectId" + And Enter input plugin property: "datasetProject" with value: "projectId" + And Replace input plugin property: "dataset" with value: "dataset" + Then Override Service account details if set in environment variables + And Enter input plugin property: "table" with value: "bqSourceTable" + Then Click on the Get Schema button + Then Validate "BigQuery" plugin properties + And Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery2" + Then Click on the Macro button of Property: "project" and set the value to: "projectId" + Then Override Service account details if set in environment variables + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" + Then Validate "BigQuery" plugin properties + And Close the Plugin Properties page + Then Save the pipeline + Then Deploy the pipeline + Then Click on the Hamburger bar on the left panel + Then Click on Control Center link from the hamburger menu + Then Verify the user is able to set the preferences for the created pipeline in the control center page + Then Verify the user is able to enter the value in the key input field "keyValue" + Then Verify the user is able to enter the value of the key in the value input field "value" + Then Verify user is able to click on save and close button of set preferences + Then Verify the user is able to set the preferences for the created pipeline in the control center page + Then Verify user is able to click on the delete icon of preferences to delete the added preferences successfully + + @BQ_INSERT_INT_SOURCE_TEST @BQ_SINK_TEST + Scenario: Verify that User is able to perform Delete and Truncate operation on dataset entity successfully. + Given Open Datafusion Project to configure pipeline + Then Click on the Hamburger bar on the left panel + Then Click on Control Center link from the hamburger menu + Then Click on the Plus Green Button to import the pipelines + Then Click on create button to create a pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "BigQuery2" to establish connection + Then Navigate to the properties page of plugin: "BigQuery" + And Enter input plugin property: "referenceName" with value: "ReferenceDatasetDelete" + And Replace input plugin property: "project" with value: "projectId" + And Enter input plugin property: "datasetProject" with value: "projectId" + And Replace input plugin property: "dataset" with value: "dataset" + Then Override Service account details if set in environment variables + And Enter input plugin property: "table" with value: "bqSourceTable" + Then Click on the Get Schema button + Then Validate "BigQuery" plugin properties + And Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery2" + Then Replace input plugin property: "project" with value: "projectId" + Then Override Service account details if set in environment variables + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "BQReferenceNameDatasetTruncate" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" + Then Validate "BigQuery" plugin properties + And Close the Plugin Properties page + Then Save the pipeline + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Verify the pipeline status is "Succeeded" + Then Click on the Hamburger bar on the left panel + Then Click on Control Center link from the hamburger menu + Then Click truncate and verify the successful truncate of dataset entity + Then Click delete and verify the dataset is deleted successfully + Then Verify the deleted dataset "BQReferenceNameDatasetDelete" entity is not present in control center page + + @BQ_INSERT_INT_SOURCE_TEST @BQ_SINK_TEST + Scenario: Verify that user is able to delete the created pipeline in control center successfully. + Given Open Datafusion Project to configure pipeline + Then Click on the Hamburger bar on the left panel + Then Click on Control Center link from the hamburger menu + Then Click on the Plus Green Button to import the pipelines + Then Click on create button to create a pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "BigQuery2" to establish connection + Then Navigate to the properties page of plugin: "BigQuery" + And Enter input plugin property: "referenceName" with value: "Reference" + And Replace input plugin property: "project" with value: "projectId" + And Enter input plugin property: "datasetProject" with value: "projectId" + And Replace input plugin property: "dataset" with value: "dataset" + Then Override Service account details if set in environment variables + And Enter input plugin property: "table" with value: "bqSourceTable" + Then Click on the Get Schema button + Then Validate "BigQuery" plugin properties + And Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery2" + Then Replace input plugin property: "project" with value: "projectId" + Then Override Service account details if set in environment variables + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" + Then Validate "BigQuery" plugin properties + And Close the Plugin Properties page + Then Save the pipeline + Then Deploy the pipeline + Then Click on the Hamburger bar on the left panel + Then Click on Control Center link from the hamburger menu + Then Click on the delete icon of the created pipeline and pipeline should get deleted successfully + Then Verify the deleted pipeline is not present in the control center page diff --git a/cdap-e2e-tests/src/e2e-test/java/io/cdap/cdap/controlcenter/runners/TestRunner.java b/cdap-e2e-tests/src/e2e-test/java/io/cdap/cdap/controlcenter/runners/TestRunner.java new file mode 100644 index 000000000000..1b878581a01c --- /dev/null +++ b/cdap-e2e-tests/src/e2e-test/java/io/cdap/cdap/controlcenter/runners/TestRunner.java @@ -0,0 +1,36 @@ +/* + * Copyright © 2023 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package io.cdap.cdap.controlcenter.runners; + +import io.cucumber.junit.Cucumber; +import io.cucumber.junit.CucumberOptions; +import org.junit.runner.RunWith; + +/** + * Test Runner to execute controlcenter related test cases. + */ +@RunWith(Cucumber.class) +@CucumberOptions( + features = {"src/e2e-test/features"}, + glue = {"io.cdap.cdap.controlcenter.stepsdesign", "stepsdesign"}, + tags = {"@Controlcenter"}, + plugin = {"pretty", "html:target/cucumber-html-report/controlcenter", + "json:target/cucumber-reports/cucumber-controlcenter.json", + "junit:target/cucumber-reports/cucumber-controlcenter.xml"} +) +public class TestRunner { +} diff --git a/cdap-e2e-tests/src/e2e-test/java/io/cdap/cdap/controlcenter/runners/package-info.java b/cdap-e2e-tests/src/e2e-test/java/io/cdap/cdap/controlcenter/runners/package-info.java new file mode 100644 index 000000000000..0b84865f722e --- /dev/null +++ b/cdap-e2e-tests/src/e2e-test/java/io/cdap/cdap/controlcenter/runners/package-info.java @@ -0,0 +1,20 @@ +/* + * Copyright © 2023 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +/** + * Package contains the runners for controlcenter features. + */ +package io.cdap.cdap.controlcenter.runners; diff --git a/cdap-e2e-tests/src/e2e-test/java/io/cdap/cdap/controlcenter/stepsdesign/TestSetupHooks.java b/cdap-e2e-tests/src/e2e-test/java/io/cdap/cdap/controlcenter/stepsdesign/TestSetupHooks.java new file mode 100644 index 000000000000..527e60cbcdfa --- /dev/null +++ b/cdap-e2e-tests/src/e2e-test/java/io/cdap/cdap/controlcenter/stepsdesign/TestSetupHooks.java @@ -0,0 +1,87 @@ +/* + * Copyright © 2023 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package io.cdap.cdap.controlcenter.stepsdesign; + +import com.google.cloud.bigquery.BigQueryException; +import io.cdap.e2e.utils.BigQueryClient; +import io.cdap.e2e.utils.PluginPropertyUtils; +import io.cucumber.java.After; +import io.cucumber.java.Before; +import org.apache.commons.lang3.StringUtils; +import org.junit.Assert; +import stepsdesign.BeforeActions; + +import java.io.IOException; +import java.util.NoSuchElementException; +import java.util.UUID; + +public class TestSetupHooks { + public static String bqTargetTable = StringUtils.EMPTY; + public static String bqSourceTable = StringUtils.EMPTY; + public static String datasetName = PluginPropertyUtils.pluginProp("dataset"); + + @Before(order = 1, value = "@BQ_INSERT_INT_SOURCE_TEST") + public static void createSourceBQTable() throws IOException, InterruptedException { + bqSourceTable = "E2E_SOURCE_" + UUID.randomUUID().toString().replaceAll("-", "_"); + PluginPropertyUtils.addPluginProp("bqSourceTable", bqSourceTable); + BeforeActions.scenario.write("BQ source table name - " + bqSourceTable); + BigQueryClient.getSoleQueryResult("create table `" + datasetName + "." + bqSourceTable + "` " + + "(ID INT64, Name STRING, Price FLOAT64, Customer_Exists BOOL)"); + try { + BigQueryClient.getSoleQueryResult("INSERT INTO `" + datasetName + "." + bqSourceTable + "` " + + "(ID, Name, Price,Customer_Exists)" + + "VALUES" + "(3, 'Rajan Kumar', 100.0, true)"); + } catch (NoSuchElementException e) { + // Insert query does not return any record. + // Iterator on TableResult values in getSoleQueryResult method throws NoSuchElementException + BeforeActions.scenario.write("Error inserting the record in the table" + e.getStackTrace()); + } + PluginPropertyUtils.addPluginProp(" bqSourceTable", bqSourceTable); + BeforeActions.scenario.write("BQ Source Table " + bqSourceTable + " updated successfully"); + } + + @After(order = 1, value = "@BQ_INSERT_INT_SOURCE_TEST") + public static void deleteTempSourceBQTable() throws IOException, InterruptedException { + BigQueryClient.dropBqQuery(bqSourceTable); + PluginPropertyUtils.removePluginProp("bqSourceTable"); + BeforeActions.scenario.write("BQ source Table " + bqSourceTable + " deleted successfully"); + bqSourceTable = StringUtils.EMPTY; + } + + @Before(order = 1, value = "@BQ_SINK_TEST") + public static void setTempTargetBQTableName() { + bqTargetTable = "E2E_TARGET_" + UUID.randomUUID().toString().replaceAll("-", "_"); + PluginPropertyUtils.addPluginProp("bqTargetTable", bqTargetTable); + BeforeActions.scenario.write("BQ Target table name - " + bqTargetTable); + } + + @After(order = 1, value = "@BQ_SINK_TEST") + public static void deleteTempTargetBQTable() throws IOException, InterruptedException { + try { + BigQueryClient.dropBqQuery(bqTargetTable); + PluginPropertyUtils.removePluginProp("bqTargetTable"); + BeforeActions.scenario.write("BQ Target table - " + bqTargetTable + " deleted successfully"); + bqTargetTable = StringUtils.EMPTY; + } catch (BigQueryException e) { + if (e.getMessage().contains("Not found: Table")) { + BeforeActions.scenario.write("BQ Target Table " + bqTargetTable + " does not exist"); + } else { + Assert.fail(e.getMessage()); + } + } + } +} diff --git a/cdap-e2e-tests/src/e2e-test/resources/pluginDataCyAttributes.properties b/cdap-e2e-tests/src/e2e-test/resources/pluginDataCyAttributes.properties index cf1c8e728e82..163c7247d653 100644 --- a/cdap-e2e-tests/src/e2e-test/resources/pluginDataCyAttributes.properties +++ b/cdap-e2e-tests/src/e2e-test/resources/pluginDataCyAttributes.properties @@ -21,3 +21,6 @@ studio=pipeline-studio namespaceAdmin=project-admin projectId=project datasetProjectId=datasetProject +referenceName=referenceName +table=table +Artifacts=Artifacts-input diff --git a/cdap-e2e-tests/src/e2e-test/resources/pluginParameters.properties b/cdap-e2e-tests/src/e2e-test/resources/pluginParameters.properties index 80d35ed2d702..8160ad65cac9 100644 --- a/cdap-e2e-tests/src/e2e-test/resources/pluginParameters.properties +++ b/cdap-e2e-tests/src/e2e-test/resources/pluginParameters.properties @@ -2,14 +2,29 @@ clientName=cdap clientUrl=http://localhost:11011 serverUrl=https://placeholder.com/api # command to generate token: gcloud auth print-access-token + +## CONTROLCENTER-START serverAccessToken=placeholder +bqSourceTable=dummy +dataset=bq_automation projectId=cdf-athena +datasetprojectId=cdf-athena +keyValue=projectId +value=cdf-athena +Artifacts=Artifacts-input +newFilterMessage=Displaying Applications, Datasets, sorted by Newest +oldestFilterMessage=Displaying Applications, Datasets, sorted by Oldest +zToAFilterMessage=Displaying Applications, Datasets, sorted by Z - A +aToZFilterMessage=Displaying Applications, Datasets, sorted by A - Z +tagCountDecreaseMessage=Tags (0): +searchedTagDisplayedMessage=Search results for "testingTag", filtered by Applications, Datasets +allEntitiesDisplayedMessage=Displaying all entities, sorted by Newest +## CONTROLCENTER-END ## HUB-PROPERTIES-START expectedPluginName=Anaplan expectedElement=HUB invalidMessage_hub=sfts -Artifacts=Artifacts-input gcsSourceBucket=dummy gcsTargetBucketName=dummy gcsTargetPath=dummy @@ -32,5 +47,4 @@ bodyValue={ "description": "Example Secure Key","data": "test123","properties": httpPutMethod=PUT httpGetMethod=GET httpDeleteMethod=DELETE -dataset=test_automation ## SYSTEMADMIN-PROPERTIES-END