-
Notifications
You must be signed in to change notification settings - Fork 6
e2e add field scenarios #19
base: develop
Are you sure you want to change the base?
Changes from 1 commit
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -26,8 +26,9 @@ | |
|
||
<properties> | ||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> | ||
<cdap.version>6.1.2</cdap.version> | ||
<cdap.version>6.8.0-SNAPSHOT</cdap.version> | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. you shouldn't need to bump cdap.version as we aren't adding a dependency on CDAP APIs that have been added in 6.8.0-SNAPSHOT |
||
<powermock.version>2.0.2</powermock.version> | ||
<testSourceLocation>${project.basedir}/src/test/java/</testSourceLocation> | ||
</properties> | ||
|
||
<repositories> | ||
|
@@ -46,11 +47,22 @@ | |
<groupId>io.cdap.cdap</groupId> | ||
<artifactId>cdap-etl-api</artifactId> | ||
<version>${cdap.version}</version> | ||
<exclusions> | ||
<exclusion> | ||
<artifactId>guava</artifactId> | ||
<groupId>com.google.guava</groupId> | ||
</exclusion> | ||
</exclusions> | ||
</dependency> | ||
<dependency> | ||
<groupId>com.google.guava</groupId> | ||
<artifactId>guava</artifactId> | ||
<version>30.1.1-jre</version> | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. why are you adding guava dependency here? If it's needed for e2e-tests, can you add the dependency under e2e-tests profile? |
||
</dependency> | ||
<dependency> | ||
<groupId>io.cdap.cdap</groupId> | ||
<artifactId>cdap-data-pipeline</artifactId> | ||
<version>${cdap.version}</version> | ||
<version>6.4.0</version> | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. why are we hardcoding the version instead of taking from the property? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Hardcoding removed. |
||
<scope>test</scope> | ||
</dependency> | ||
<dependency> | ||
|
@@ -68,6 +80,7 @@ | |
</dependencies> | ||
|
||
<build> | ||
<testSourceDirectory>${testSourceLocation}</testSourceDirectory> | ||
<plugins> | ||
<plugin> | ||
<groupId>org.apache.maven.plugins</groupId> | ||
|
@@ -146,5 +159,110 @@ | |
</plugin> | ||
</plugins> | ||
</build> | ||
</project> | ||
|
||
<profiles> | ||
<profile> | ||
<id>e2e-tests</id> | ||
<properties> | ||
<testSourceLocation>src/e2e-test/java</testSourceLocation> | ||
</properties> | ||
<build> | ||
<testResources> | ||
<testResource> | ||
<directory>src/e2e-test/resources</directory> | ||
</testResource> | ||
</testResources> | ||
<plugins> | ||
<plugin> | ||
<groupId>org.apache.maven.plugins</groupId> | ||
<artifactId>maven-surefire-plugin</artifactId> | ||
<version>2.18.1</version> | ||
<configuration> | ||
<skipTests>true</skipTests> | ||
</configuration> | ||
</plugin> | ||
|
||
<plugin> | ||
<groupId>org.apache.maven.plugins</groupId> | ||
<artifactId>maven-failsafe-plugin</artifactId> | ||
<version>3.0.0-M5</version> | ||
<configuration> | ||
<includes> | ||
<include>TestRunner.java</include> | ||
</includes> | ||
<!--Start configuration to run TestRunners in parallel--> | ||
<parallel>classes</parallel> <!--Running TestRunner classes in parallel--> | ||
<threadCount>2</threadCount> <!--Number of classes to run in parallel--> | ||
<forkCount>2</forkCount> <!--Number of JVM processes --> | ||
<reuseForks>true</reuseForks> | ||
<!--End configuration to run TestRunners in parallel--> | ||
<environmentVariables> | ||
<GOOGLE_APPLICATION_CREDENTIALS> | ||
${GOOGLE_APPLICATION_CREDENTIALS} | ||
</GOOGLE_APPLICATION_CREDENTIALS> | ||
<SERVICE_ACCOUNT_TYPE> | ||
${SERVICE_ACCOUNT_TYPE} | ||
</SERVICE_ACCOUNT_TYPE> | ||
<SERVICE_ACCOUNT_FILE_PATH> | ||
${SERVICE_ACCOUNT_FILE_PATH} | ||
</SERVICE_ACCOUNT_FILE_PATH> | ||
<SERVICE_ACCOUNT_JSON> | ||
${SERVICE_ACCOUNT_JSON} | ||
</SERVICE_ACCOUNT_JSON> | ||
</environmentVariables> | ||
</configuration> | ||
<executions> | ||
<execution> | ||
<goals> | ||
<goal>integration-test</goal> | ||
</goals> | ||
</execution> | ||
</executions> | ||
</plugin> | ||
|
||
<plugin> | ||
<groupId>net.masterthought</groupId> | ||
<artifactId>maven-cucumber-reporting</artifactId> | ||
<version>5.5.0</version> | ||
|
||
<executions> | ||
<execution> | ||
<id>execution</id> | ||
<phase>verify</phase> | ||
<goals> | ||
<goal>generate</goal> | ||
</goals> | ||
<configuration> | ||
<projectName>Cucumber Reports</projectName> <!-- Replace with project name --> | ||
<outputDirectory>target/cucumber-reports/advanced-reports</outputDirectory> | ||
<buildNumber>1</buildNumber> | ||
<skip>false</skip> | ||
<inputDirectory>${project.build.directory}/cucumber-reports</inputDirectory> | ||
<jsonFiles> <!-- supports wildcard or name pattern --> | ||
<param>**/*.json</param> | ||
</jsonFiles> <!-- optional, defaults to outputDirectory if not specified --> | ||
<classificationDirectory>${project.build.directory}/cucumber-reports</classificationDirectory> | ||
<checkBuildResult>true</checkBuildResult> | ||
</configuration> | ||
</execution> | ||
</executions> | ||
</plugin> | ||
</plugins> | ||
</build> | ||
<dependencies> | ||
<dependency> | ||
<groupId>io.cdap.tests.e2e</groupId> | ||
<artifactId>cdap-e2e-framework</artifactId> | ||
<version>0.0.1-SNAPSHOT</version> | ||
<scope>test</scope> | ||
</dependency> | ||
<dependency> | ||
<groupId>ch.qos.logback</groupId> | ||
<artifactId>logback-classic</artifactId> | ||
<version>1.2.8</version> | ||
<scope>runtime</scope> | ||
</dependency> | ||
</dependencies> | ||
</profile> | ||
</profiles> | ||
</project> |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,45 @@ | ||
@Add_Field | ||
Feature: AddField Plugin - Verify error scenarios | ||
|
||
@ADD_FIELD-01 | ||
Scenario: Verify add field validation errors for mandatory fields | ||
Given Open Datafusion Project to configure pipeline | ||
And Expand Plugin group in the LHS plugins list: "Transform" | ||
And Select plugin: "Add Field" from the plugins list as: "Transform" | ||
And Navigate to the properties page of plugin: "AddField" | ||
And Click on the Validate button | ||
And Verify mandatory property error for below listed properties: | ||
| fieldName | | ||
|
||
@ADD_FIELD-02 | ||
Scenario: Validate invalid error messages in add field plugin without any input data | ||
Given Open Datafusion Project to configure pipeline | ||
And Expand Plugin group in the LHS plugins list: "Transform" | ||
And Select plugin: "Add Field" from the plugins list as: "Transform" | ||
And Navigate to the properties page of plugin: "AddField" | ||
And Enter input plugin property: "addFieldFieldName" with value: "afFieldName" | ||
And Click on the Get Schema button | ||
And Verify that the Plugin Property: "fieldValue" is displaying an in-line error message: "addFieldErrorMessageInvalidField" | ||
And Verify that the Plugin Property: "asUUID" is displaying an in-line error message: "addFieldErrorMessageInvalidField" | ||
|
||
@ADD_FIELD-03 | ||
Scenario: Verify error count for add field plugin for mandatory fields | ||
Given Open Datafusion Project to configure pipeline | ||
And Expand Plugin group in the LHS plugins list: "Transform" | ||
And Select plugin: "Add Field" from the plugins list as: "Transform" | ||
And Navigate to the properties page of plugin: "AddField" | ||
And Click on the Validate button | ||
And Verify plugin properties validation fails with 1 error | ||
|
||
@ADD_FIELD-04 | ||
Scenario: Validate errors when field value is given and generate uuid as value is set to true | ||
Given Open Datafusion Project to configure pipeline | ||
And Expand Plugin group in the LHS plugins list: "Transform" | ||
And Select plugin: "Add Field" from the plugins list as: "Transform" | ||
And Navigate to the properties page of plugin: "AddField" | ||
And Enter input plugin property: "addFieldFieldName" with value: "afFieldName" | ||
And Enter input plugin property: "addFieldFieldValue" with value: "afFieldValue" | ||
And Select dropdown plugin property: "addFieldGenerateUUID" with option value: "true" | ||
And Click on the Get Schema button | ||
And Verify that the Plugin Property: "fieldValue" is displaying an in-line error message: "addFieldErrorMessageValidFieldValue" | ||
And Verify that the Plugin Property: "asUUID" is displaying an in-line error message: "addFieldErrorMessageValidFieldValue" |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,58 @@ | ||
@Add_Field | ||
Feature: AddField Plugin - Verify macro scenarios | ||
|
||
@FILE_SOURCE_TEST @BQ_SINK_TEST | ||
Scenario: Verify add field functionality with macro arguments using File to BigQuery pipeline | ||
Given Open Datafusion Project to configure pipeline | ||
And Select plugin: "File" from the plugins list as: "Source" | ||
And Expand Plugin group in the LHS plugins list: "Transform" | ||
And Select plugin: "Add Field" from the plugins list as: "Transform" | ||
And Expand Plugin group in the LHS plugins list: "Sink" | ||
And Select plugin: "BigQuery" from the plugins list as: "Sink" | ||
And Connect plugins: "File" and "AddField" to establish connection | ||
And Connect plugins: "AddField" and "BigQuery" to establish connection | ||
And Navigate to the properties page of plugin: "File" | ||
And Enter input plugin property: "referenceName" with value: "FileReferenceName" | ||
And Enter input plugin property: "path" with value: "csvAllDataTypeFile" | ||
And Select dropdown plugin property: "format" with option value: "csv" | ||
And Click plugin property: "skipHeader" | ||
And Click on the Get Schema button | ||
And Validate "File" plugin properties | ||
And Close the Plugin Properties page | ||
And Navigate to the properties page of plugin: "AddField" | ||
And Enter input plugin property: "addFieldFieldName" with value: "afFieldName" | ||
And Click on the Macro button of Property: "addFieldFieldValue" and set the value to: "fieldvalue" | ||
And Select dropdown plugin property: "addFieldGenerateUUID" with option value: "false" | ||
And Validate "AddField" plugin properties | ||
And Validate output schema with expectedSchema "csvAllDataTypeFileSchemaAddField" | ||
And Close the Plugin Properties page | ||
And Navigate to the properties page of plugin: "BigQuery" | ||
And Replace input plugin property: "projectId" with value: "projectId" | ||
And Enter input plugin property: "datasetProjectId" with value: "projectId" | ||
Then Override Service account details if set in environment variables | ||
And Enter input plugin property: "referenceName" with value: "BQReferenceName" | ||
And Enter input plugin property: "dataset" with value: "dataset" | ||
And Enter input plugin property: "table" with value: "bqTargetTable" | ||
And Click plugin property: "truncateTable" | ||
And Click plugin property: "updateTableSchema" | ||
And Validate "BigQuery" plugin properties | ||
And Close the Plugin Properties page | ||
And Save the pipeline | ||
And Preview and run the pipeline | ||
And Enter runtime argument value "afFieldValue" for key "fieldvalue" | ||
And Run the preview of pipeline with runtime arguments | ||
Then Wait till pipeline preview is in running state | ||
Then Open and capture pipeline preview logs | ||
Then Verify the preview run status of pipeline in the logs is "succeeded" | ||
Then Close the pipeline logs | ||
Then Close the preview | ||
And Deploy the pipeline | ||
And Run the Pipeline in Runtime | ||
And Enter runtime argument value "afFieldValue" for key "fieldvalue" | ||
And Run the Pipeline in Runtime with runtime arguments | ||
And Wait till pipeline is in running state | ||
And Open and capture logs | ||
And Verify the pipeline status is "Succeeded" | ||
Then Close the pipeline logs | ||
Then Validate OUT record count is equal to IN record count | ||
Then Verify column: "afFieldName" is added in target BigQuery table: "bqTargetTable" | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. can we also verify that the data stored in the new column is correct? |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,102 @@ | ||
@Add_Field | ||
Feature: AddField Plugin - Run time scenarios | ||
|
||
@BQ_SINK_TEST @FILE_SOURCE_TEST | ||
Scenario: Verify add field plugin functionality by setting field value using File to BigQuery pipeline | ||
Given Open Datafusion Project to configure pipeline | ||
And Select plugin: "File" from the plugins list as: "Source" | ||
And Expand Plugin group in the LHS plugins list: "Transform" | ||
And Select plugin: "Add Field" from the plugins list as: "Transform" | ||
And Expand Plugin group in the LHS plugins list: "Sink" | ||
And Select plugin: "BigQuery" from the plugins list as: "Sink" | ||
And Connect plugins: "File" and "AddField" to establish connection | ||
And Connect plugins: "AddField" and "BigQuery" to establish connection | ||
And Navigate to the properties page of plugin: "File" | ||
And Enter input plugin property: "referenceName" with value: "FileReferenceName" | ||
And Enter input plugin property: "path" with value: "csvAllDataTypeFile" | ||
And Select dropdown plugin property: "format" with option value: "csv" | ||
And Click plugin property: "skipHeader" | ||
And Click on the Get Schema button | ||
And Validate "File" plugin properties | ||
And Close the Plugin Properties page | ||
And Navigate to the properties page of plugin: "AddField" | ||
And Enter input plugin property: "addFieldFieldName" with value: "afFieldName" | ||
And Enter input plugin property: "addFieldFieldValue" with value: "afFieldValue" | ||
And Validate "AddField" plugin properties | ||
And Validate output schema with expectedSchema "csvAllDataTypeFileSchemaAddField" | ||
And Close the Plugin Properties page | ||
And Navigate to the properties page of plugin: "BigQuery" | ||
And Replace input plugin property: "projectId" with value: "projectId" | ||
And Enter input plugin property: "datasetProjectId" with value: "projectId" | ||
Then Override Service account details if set in environment variables | ||
And Enter input plugin property: "referenceName" with value: "BQReferenceName" | ||
And Enter input plugin property: "dataset" with value: "dataset" | ||
And Enter input plugin property: "table" with value: "bqTargetTable" | ||
And Click plugin property: "truncateTable" | ||
And Click plugin property: "updateTableSchema" | ||
And Validate "BigQuery" plugin properties | ||
And Close the Plugin Properties page | ||
And Save the pipeline | ||
And Preview and run the pipeline | ||
Then Wait till pipeline preview is in running state | ||
Then Open and capture pipeline preview logs | ||
Then Verify the preview run status of pipeline in the logs is "succeeded" | ||
Then Close the pipeline logs | ||
Then Close the preview | ||
And Save and Deploy Pipeline | ||
And Run the Pipeline in Runtime | ||
And Wait till pipeline is in running state | ||
And Open and capture logs | ||
And Verify the pipeline status is "Succeeded" | ||
Then Validate OUT record count is equal to IN record count | ||
Then Verify column: "afFieldName" is added in target BigQuery table: "bqTargetTable" | ||
|
||
@BQ_SINK_TEST @FILE_SOURCE_TEST | ||
Scenario: Verify add field plugin functionality by setting generate uuid as value to true using File to BigQuery pipeline | ||
Given Open Datafusion Project to configure pipeline | ||
And Select plugin: "File" from the plugins list as: "Source" | ||
And Expand Plugin group in the LHS plugins list: "Transform" | ||
And Select plugin: "Add Field" from the plugins list as: "Transform" | ||
And Expand Plugin group in the LHS plugins list: "Sink" | ||
And Select plugin: "BigQuery" from the plugins list as: "Sink" | ||
And Connect plugins: "File" and "AddField" to establish connection | ||
And Connect plugins: "AddField" and "BigQuery" to establish connection | ||
And Navigate to the properties page of plugin: "File" | ||
And Enter input plugin property: "referenceName" with value: "FileReferenceName" | ||
And Enter input plugin property: "path" with value: "csvAllDataTypeFile" | ||
And Select dropdown plugin property: "format" with option value: "csv" | ||
And Click plugin property: "skipHeader" | ||
And Click on the Get Schema button | ||
And Validate "File" plugin properties | ||
And Close the Plugin Properties page | ||
And Navigate to the properties page of plugin: "AddField" | ||
And Enter input plugin property: "addFieldFieldName" with value: "afFieldName" | ||
And Select dropdown plugin property: "addFieldGenerateUUID" with option value: "true" | ||
And Validate "AddField" plugin properties | ||
And Validate output schema with expectedSchema "csvAllDataTypeFileSchemaAddField" | ||
And Close the Plugin Properties page | ||
And Navigate to the properties page of plugin: "BigQuery" | ||
And Replace input plugin property: "projectId" with value: "projectId" | ||
And Enter input plugin property: "datasetProjectId" with value: "projectId" | ||
Then Override Service account details if set in environment variables | ||
And Enter input plugin property: "referenceName" with value: "BQReferenceName" | ||
And Enter input plugin property: "dataset" with value: "dataset" | ||
And Enter input plugin property: "table" with value: "bqTargetTable" | ||
And Click plugin property: "truncateTable" | ||
And Click plugin property: "updateTableSchema" | ||
And Validate "BigQuery" plugin properties | ||
And Close the Plugin Properties page | ||
And Save the pipeline | ||
And Preview and run the pipeline | ||
Then Wait till pipeline preview is in running state | ||
Then Open and capture pipeline preview logs | ||
Then Verify the preview run status of pipeline in the logs is "succeeded" | ||
Then Close the pipeline logs | ||
Then Close the preview | ||
And Save and Deploy Pipeline | ||
And Run the Pipeline in Runtime | ||
And Wait till pipeline is in running state | ||
And Open and capture logs | ||
And Verify the pipeline status is "Succeeded" | ||
Then Validate OUT record count is equal to IN record count | ||
Then Verify column: "afFieldName" is added in target BigQuery table: "bqTargetTable" |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,36 @@ | ||
/* | ||
* Copyright © 2022 Cask Data, Inc. | ||
* | ||
* Licensed under the Apache License, Version 2.0 (the "License"); you may not | ||
* use this file except in compliance with the License. You may obtain a copy of | ||
* the License at | ||
* | ||
* http://www.apache.org/licenses/LICENSE-2.0 | ||
* | ||
* Unless required by applicable law or agreed to in writing, software | ||
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT | ||
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the | ||
* License for the specific language governing permissions and limitations under | ||
* the License. | ||
*/ | ||
package io.cdap.plugin.addfield.runners; | ||
|
||
import io.cucumber.junit.Cucumber; | ||
import io.cucumber.junit.CucumberOptions; | ||
import org.junit.runner.RunWith; | ||
|
||
/** | ||
* Test Runner to execute add-field plugin testcases. | ||
*/ | ||
@RunWith(Cucumber.class) | ||
@CucumberOptions( | ||
features = {"src/e2e-test/features"}, | ||
glue = {"io.cdap.plugin.addfield.stepsdesign", "stepsdesign", "io.cdap.plugin.common.stepsdesign"}, | ||
tags = {"@Add_Field"}, | ||
monochrome = true, | ||
plugin = {"pretty", "html:target/cucumber-html-report/add-field", | ||
"json:target/cucumber-reports/cucumber-add-field.json", | ||
"junit:target/cucumber-reports/cucumber-add-field.xml"} | ||
) | ||
public class TestRunner { | ||
} |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
why is this change required?
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
It is changed to latest version becuase for older versions the build was not getting successful.