diff --git a/src/e2e-test/features/bigquery/sink/GCSToBigQuery.feature b/src/e2e-test/features/bigquery/sink/GCSToBigQuery.feature index 2ba5e7e24e..c68b962489 100644 --- a/src/e2e-test/features/bigquery/sink/GCSToBigQuery.feature +++ b/src/e2e-test/features/bigquery/sink/GCSToBigQuery.feature @@ -26,7 +26,10 @@ Feature: BigQuery sink - Verification of GCS to BigQuery successful data transfe Then Connect source as "GCS" and sink as "BigQuery" to establish connection Then Save the pipeline Then Preview and run the pipeline - Then Verify the preview of pipeline is "success" + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs Then Close the preview Then Deploy the pipeline Then Run the Pipeline in Runtime diff --git a/src/e2e-test/features/bigquery/sink/GCSToBigQuery_WithMacro.feature b/src/e2e-test/features/bigquery/sink/GCSToBigQuery_WithMacro.feature index b12e6480fb..c18b000444 100644 --- a/src/e2e-test/features/bigquery/sink/GCSToBigQuery_WithMacro.feature +++ b/src/e2e-test/features/bigquery/sink/GCSToBigQuery_WithMacro.feature @@ -50,7 +50,10 @@ Feature: BigQuery sink - Verification of GCS to BigQuery successful data transfe Then Enter runtime argument value "bqTruncateTableTrue" for key "bqTruncateTable" Then Enter runtime argument value "bqUpdateTableSchemaTrue" for key "bqUpdateTableSchema" Then Run the preview of pipeline with runtime arguments - Then Verify the preview of pipeline is "success" + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs Then Close the preview Then Deploy the pipeline Then Run the Pipeline in Runtime @@ -74,3 +77,82 @@ Feature: BigQuery sink - Verification of GCS to BigQuery successful data transfe Then Verify the pipeline status is "Succeeded" Then Get count of no of records transferred to target BigQuery Table Then Validate the cmek key "cmekBQ" of target BigQuery table if cmek is enabled + + @GCS_CSV_TEST @BQ_SINK_TEST @SERVICE_ACCOUNT_JSON_TEST + Scenario:Validate successful records transfer from GCS to BigQuery with macro arguments - Service account type as Json + Given Open Datafusion Project to configure pipeline + When Source is GCS + When Sink is BigQuery + Then Open GCS source properties + Then Enter GCS property reference name + Then Enter GCS property "projectId" as macro argument "gcsProjectId" + Then Enter GCS property "serviceAccountType" as macro argument "gcsServiceAccountType" + Then Enter GCS property "serviceAccountJSON" as macro argument "gcsServiceAccountJSON" + Then Enter GCS property "path" as macro argument "gcsSourcePath" + Then Enter GCS source property "skipHeader" as macro argument "gcsSkipHeader" + Then Enter GCS property "format" as macro argument "gcsFormat" + Then Enter GCS source property output schema "outputSchema" as macro argument "gcsOutputSchema" + Then Validate "GCS" plugin properties + Then Close the GCS properties + Then Open BigQuery sink properties + Then Enter BigQuery property reference name + Then Enter BigQuery property "projectId" as macro argument "bqProjectId" + Then Enter BigQuery property "datasetProjectId" as macro argument "bqDatasetProjectId" + Then Enter BigQuery property "serviceAccountType" as macro argument "bqServiceAccountType" + Then Enter BigQuery property "serviceAccountJSON" as macro argument "bqServiceAccountJSON" + Then Enter BigQuery property "dataset" as macro argument "bqDataset" + Then Enter BigQuery property "table" as macro argument "bqTargetTable" + Then Enter BigQuery cmek property "encryptionKeyName" as macro argument "cmekBQ" if cmek is enabled + Then Enter BigQuery sink property "truncateTable" as macro argument "bqTruncateTable" + Then Enter BigQuery sink property "updateTableSchema" as macro argument "bqUpdateTableSchema" + Then Validate "BigQuery" plugin properties + Then Close the BigQuery properties + Then Connect source as "GCS" and sink as "BigQuery" to establish connection + Then Save the pipeline + Then Preview and run the pipeline + Then Enter runtime argument value "projectId" for key "gcsProjectId" + Then Enter runtime argument value "serviceAccountTypeJSON" for key "gcsServiceAccountType" + Then Enter runtime argument value "serviceAccountJSON" for key "gcsServiceAccountJSON" + Then Enter runtime argument value "gcsCsvFile" for GCS source property path key "gcsSourcePath" + Then Enter runtime argument value "gcsSkipHeaderTrue" for key "gcsSkipHeader" + Then Enter runtime argument value "csvFormat" for key "gcsFormat" + Then Enter runtime argument value "gcsCSVFileOutputSchema" for key "gcsOutputSchema" + Then Enter runtime argument value "projectId" for key "bqProjectId" + Then Enter runtime argument value "projectId" for key "bqDatasetProjectId" + Then Enter runtime argument value "serviceAccountTypeJSON" for key "bqServiceAccountType" + Then Enter runtime argument value "serviceAccountJSON" for key "bqServiceAccountJSON" + Then Enter runtime argument value "dataset" for key "bqDataset" + Then Enter runtime argument value for BigQuery sink table name key "bqTargetTable" + Then Enter runtime argument value "cmekBQ" for BigQuery cmek property key "cmekBQ" if BQ cmek is enabled + Then Enter runtime argument value "bqTruncateTableTrue" for key "bqTruncateTable" + Then Enter runtime argument value "bqUpdateTableSchemaTrue" for key "bqUpdateTableSchema" + Then Run the preview of pipeline with runtime arguments + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Enter runtime argument value "projectId" for key "gcsProjectId" + Then Enter runtime argument value "serviceAccountTypeJSON" for key "gcsServiceAccountType" + Then Enter runtime argument value "serviceAccountJSON" for key "gcsServiceAccountJSON" + Then Enter runtime argument value "gcsCsvFile" for GCS source property path key "gcsSourcePath" + Then Enter runtime argument value "gcsSkipHeaderTrue" for key "gcsSkipHeader" + Then Enter runtime argument value "csvFormat" for key "gcsFormat" + Then Enter runtime argument value "gcsCSVFileOutputSchema" for key "gcsOutputSchema" + Then Enter runtime argument value "projectId" for key "bqProjectId" + Then Enter runtime argument value "projectId" for key "bqDatasetProjectId" + Then Enter runtime argument value "serviceAccountTypeJSON" for key "bqServiceAccountType" + Then Enter runtime argument value "serviceAccountJSON" for key "bqServiceAccountJSON" + Then Enter runtime argument value "dataset" for key "bqDataset" + Then Enter runtime argument value for BigQuery sink table name key "bqTargetTable" + Then Enter runtime argument value "cmekBQ" for BigQuery cmek property key "cmekBQ" if BQ cmek is enabled + Then Enter runtime argument value "bqTruncateTableTrue" for key "bqTruncateTable" + Then Enter runtime argument value "bqUpdateTableSchemaTrue" for key "bqUpdateTableSchema" + Then Run the Pipeline in Runtime with runtime arguments + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Get count of no of records transferred to target BigQuery Table + Then Validate the cmek key "cmekBQ" of target BigQuery table if cmek is enabled diff --git a/src/e2e-test/features/bigquery/source/BigQueryToBigQuery.feature b/src/e2e-test/features/bigquery/source/BigQueryToBigQuery.feature index d5c9aafbc2..ce0413fbe0 100644 --- a/src/e2e-test/features/bigquery/source/BigQueryToBigQuery.feature +++ b/src/e2e-test/features/bigquery/source/BigQueryToBigQuery.feature @@ -25,7 +25,10 @@ Feature: BigQuery source - Verification of BigQuery to BigQuery successful data Then Connect source as "BigQuery" and sink as "BigQuery" to establish connection Then Save the pipeline Then Preview and run the pipeline - Then Verify the preview of pipeline is "success" + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs Then Click on preview data for BigQuery sink Then Verify preview output schema matches the outputSchema captured in properties Then Close the preview data @@ -61,7 +64,9 @@ Feature: BigQuery source - Verification of BigQuery to BigQuery successful data Then Connect source as "BigQuery" and sink as "BigQuery" to establish connection Then Save the pipeline Then Preview and run the pipeline - Then Verify the preview of pipeline is "failed" + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "failed" @BQ_PARTITIONED_SOURCE_TEST @BQ_SINK_TEST Scenario: Verify records are getting transferred with respect to partitioned date @@ -87,7 +92,10 @@ Feature: BigQuery source - Verification of BigQuery to BigQuery successful data Then Connect source as "BigQuery" and sink as "BigQuery" to establish connection Then Save the pipeline Then Preview and run the pipeline - Then Verify the preview of pipeline is "success" + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs Then Click on preview data for BigQuery sink Then Verify preview output schema matches the outputSchema captured in properties Then Close the preview data diff --git a/src/e2e-test/features/bigquery/source/BigQueryToGCS.feature b/src/e2e-test/features/bigquery/source/BigQueryToGCS.feature index c7a1bed32f..77ce5e6303 100644 --- a/src/e2e-test/features/bigquery/source/BigQueryToGCS.feature +++ b/src/e2e-test/features/bigquery/source/BigQueryToGCS.feature @@ -96,7 +96,10 @@ Feature: BigQuery source - Verification of BigQuery to GCS successful data trans Then Connect source as "BigQuery" and sink as "GCS" to establish connection Then Save the pipeline Then Preview and run the pipeline - Then Verify the preview of pipeline is "success" + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs Then Click on preview data for GCS sink Then Verify preview output schema matches the outputSchema captured in properties Then Close the preview data diff --git a/src/e2e-test/features/bigquery/source/BigQueryToGCS_WithMacro.feature b/src/e2e-test/features/bigquery/source/BigQueryToGCS_WithMacro.feature index 2c52ac9ba0..4383e4d839 100644 --- a/src/e2e-test/features/bigquery/source/BigQueryToGCS_WithMacro.feature +++ b/src/e2e-test/features/bigquery/source/BigQueryToGCS_WithMacro.feature @@ -44,7 +44,10 @@ Feature: BigQuery source - Verification of BigQuery to GCS successful data trans Then Enter runtime argument value "csvFormat" for key "gcsFormat" Then Enter runtime argument value "cmekGCS" for GCS cmek property key "cmekGCS" if GCS cmek is enabled Then Run the preview of pipeline with runtime arguments - Then Verify the preview of pipeline is "success" + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs Then Click on preview data for GCS sink Then Close the preview data Then Deploy the pipeline @@ -66,3 +69,74 @@ Feature: BigQuery source - Verification of BigQuery to GCS successful data trans Then Verify the pipeline status is "Succeeded" Then Verify data is transferred to target GCS bucket Then Validate the cmek key "cmekGCS" of target GCS bucket if cmek is enabled + + @BQ_SOURCE_TEST @GCS_SINK_TEST @SERVICE_ACCOUNT_JSON_TEST + Scenario:Validate successful records transfer from BigQuery to GCS with macro arguments - Service account type as Json + Given Open Datafusion Project to configure pipeline + When Source is BigQuery + When Sink is GCS + Then Open BigQuery source properties + Then Enter BigQuery property reference name + Then Enter BigQuery property "projectId" as macro argument "bqProjectId" + Then Enter BigQuery property "datasetProjectId" as macro argument "bqDatasetProjectId" + Then Enter BigQuery property "serviceAccountType" as macro argument "bqServiceAccountType" + Then Enter BigQuery property "serviceAccountJSON" as macro argument "bqServiceAccountJSON" + Then Enter BigQuery property "dataset" as macro argument "bqDataset" + Then Enter BigQuery property "table" as macro argument "bqSourceTable" + Then Validate "BigQuery" plugin properties + Then Close the BigQuery properties + Then Open GCS sink properties + Then Enter GCS property reference name + Then Enter GCS property "projectId" as macro argument "gcsProjectId" + Then Enter GCS property "serviceAccountType" as macro argument "gcsServiceAccountType" + Then Enter GCS property "serviceAccountJSON" as macro argument "gcsServiceAccountJSON" + Then Enter GCS property "path" as macro argument "gcsSinkPath" + Then Enter GCS sink property "pathSuffix" as macro argument "gcsPathSuffix" + Then Enter GCS property "format" as macro argument "gcsFormat" + Then Enter GCS sink cmek property "encryptionKeyName" as macro argument "cmekGCS" if cmek is enabled + Then Validate "GCS" plugin properties + Then Close the GCS properties + Then Connect source as "BigQuery" and sink as "GCS" to establish connection + Then Save the pipeline + Then Preview and run the pipeline + Then Enter runtime argument value "projectId" for key "bqProjectId" + Then Enter runtime argument value "projectId" for key "bqDatasetProjectId" + Then Enter runtime argument value "serviceAccountTypeJSON" for key "bqServiceAccountType" + Then Enter runtime argument value "serviceAccountJSON" for key "bqServiceAccountJSON" + Then Enter runtime argument value "dataset" for key "bqDataset" + Then Enter runtime argument value for BigQuery source table name key "bqSourceTable" + Then Enter runtime argument value "projectId" for key "gcsProjectId" + Then Enter runtime argument value "serviceAccountTypeJSON" for key "gcsServiceAccountType" + Then Enter runtime argument value "serviceAccountJSON" for key "gcsServiceAccountJSON" + Then Enter runtime argument value for GCS sink property path key "gcsSinkPath" + Then Enter runtime argument value "gcsPathDateSuffix" for key "gcsPathSuffix" + Then Enter runtime argument value "csvFormat" for key "gcsFormat" + Then Enter runtime argument value "cmekGCS" for GCS cmek property key "cmekGCS" if GCS cmek is enabled + Then Run the preview of pipeline with runtime arguments + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Click on preview data for GCS sink + Then Close the preview data + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Enter runtime argument value "projectId" for key "bqProjectId" + Then Enter runtime argument value "projectId" for key "bqDatasetProjectId" + Then Enter runtime argument value "serviceAccountTypeJSON" for key "bqServiceAccountType" + Then Enter runtime argument value "serviceAccountJSON" for key "bqServiceAccountJSON" + Then Enter runtime argument value "dataset" for key "bqDataset" + Then Enter runtime argument value for BigQuery source table name key "bqSourceTable" + Then Enter runtime argument value "projectId" for key "gcsProjectId" + Then Enter runtime argument value "serviceAccountTypeJSON" for key "gcsServiceAccountType" + Then Enter runtime argument value "serviceAccountJSON" for key "gcsServiceAccountJSON" + Then Enter runtime argument value for GCS sink property path key "gcsSinkPath" + Then Enter runtime argument value "gcsPathDateSuffix" for key "gcsPathSuffix" + Then Enter runtime argument value "csvFormat" for key "gcsFormat" + Then Enter runtime argument value "cmekGCS" for GCS cmek property key "cmekGCS" if GCS cmek is enabled + Then Run the Pipeline in Runtime with runtime arguments + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Verify data is transferred to target GCS bucket + Then Validate the cmek key "cmekGCS" of target GCS bucket if cmek is enabled diff --git a/src/e2e-test/features/bigquery/source/BigQueryToMultipleSinks.feature b/src/e2e-test/features/bigquery/source/BigQueryToMultipleSinks.feature index cdfc0178de..8494b62212 100644 --- a/src/e2e-test/features/bigquery/source/BigQueryToMultipleSinks.feature +++ b/src/e2e-test/features/bigquery/source/BigQueryToMultipleSinks.feature @@ -36,7 +36,10 @@ Feature: BigQuery source - Verification of BigQuery to Multiple sinks successful Then Close the PubSub properties Then Save the pipeline Then Preview and run the pipeline - Then Verify the preview of pipeline is "success" + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs Then Close the preview Then Deploy the pipeline Then Run the Pipeline in Runtime diff --git a/src/e2e-test/features/gcs/sink/GCSSink.feature b/src/e2e-test/features/gcs/sink/GCSSink.feature index b0f0794332..00030ae66f 100644 --- a/src/e2e-test/features/gcs/sink/GCSSink.feature +++ b/src/e2e-test/features/gcs/sink/GCSSink.feature @@ -22,7 +22,10 @@ Feature: GCS sink - Verification of GCS Sink plugin Then Close the GCS properties Then Save the pipeline Then Preview and run the pipeline - Then Verify the preview of pipeline is "success" + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs Then Close the preview Then Deploy the pipeline Then Run the Pipeline in Runtime diff --git a/src/e2e-test/features/gcs/source/GCSSourceToBigQuery.feature b/src/e2e-test/features/gcs/source/GCSSourceToBigQuery.feature index 3b2a7b8360..8c1ffb75a3 100644 --- a/src/e2e-test/features/gcs/source/GCSSourceToBigQuery.feature +++ b/src/e2e-test/features/gcs/source/GCSSourceToBigQuery.feature @@ -23,7 +23,10 @@ Feature: GCS source - Verification of GCS to BQ successful data transfer Then Close the BigQuery properties Then Save the pipeline Then Preview and run the pipeline - Then Verify the preview of pipeline is "success" + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs Then Click on preview data for BigQuery sink Then Verify preview output schema matches the outputSchema captured in properties Then Close the preview data diff --git a/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java b/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java index 78e99db475..50c0d6857e 100644 --- a/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java +++ b/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java @@ -21,6 +21,7 @@ import io.cdap.e2e.pages.actions.CdfConnectionActions; import io.cdap.e2e.pages.actions.CdfPluginPropertiesActions; import io.cdap.e2e.utils.BigQueryClient; +import io.cdap.e2e.utils.GCPServiceAccountClient; import io.cdap.e2e.utils.PluginPropertyUtils; import io.cdap.e2e.utils.StorageClient; import io.cdap.plugin.utils.PubSubClient; @@ -38,11 +39,13 @@ import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; +import java.security.GeneralSecurityException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; import java.util.List; +import java.util.Map; import java.util.NoSuchElementException; import java.util.UUID; import java.util.concurrent.ExecutionException; @@ -66,6 +69,7 @@ public class TestSetupHooks { public static String spannerTargetDatabase = StringUtils.EMPTY; public static String spannerTargetTable = StringUtils.EMPTY; public static boolean firstSpannerTestFlag = true; + public static String serviceAccountKeyName = StringUtils.EMPTY; @Before(order = 1) public static void overrideServiceAccountFilePathIfProvided() { @@ -673,4 +677,35 @@ private static void deleteConnection(String connectionType, String connectionNam CdfConnectionActions.selectConnectionAction(connectionType, connectionName, "Delete"); CdfPluginPropertiesActions.clickPluginPropertyButton("Delete"); } + + @Before(order = 1, value = "@SERVICE_ACCOUNT_JSON_TEST") + public static void createServiceAccountKey() { + try { + Map serviceAccountKeyDetails = GCPServiceAccountClient + .createServiceAccountKey(PluginPropertyUtils.pluginProp("projectId"), + PluginPropertyUtils.pluginProp("serviceAccountName")); + serviceAccountKeyName = serviceAccountKeyDetails.get("KeyName"); + PluginPropertyUtils + .addPluginProp("serviceAccountJSON", + serviceAccountKeyDetails.get("JsonKeyFile").replaceAll("[\\n\\t]", "")); + } catch (GeneralSecurityException | IOException e) { + Assert.fail("Unable to create service account key: \n" + e.toString()); + } + } + + @After(order = 1, value = "@SERVICE_ACCOUNT_JSON_TEST") + public static void deleteServiceAccountKey() { + try { + GCPServiceAccountClient.deleteServiceAccountKey(serviceAccountKeyName); + serviceAccountKeyName = StringUtils.EMPTY; + PluginPropertyUtils.removePluginProp("serviceAccountJSON"); + BeforeActions.scenario.write("Deleted service account key " + serviceAccountKeyName); + } catch (GeneralSecurityException | IOException e) { + if (e.toString().contains("does not exist")) { + BeforeActions.scenario.write("Service account key " + serviceAccountKeyName + " does not exist."); + } else { + Assert.fail(e.toString()); + } + } + } } diff --git a/src/e2e-test/resources/pluginParameters.properties b/src/e2e-test/resources/pluginParameters.properties index e43b02c8a4..b1abcefde2 100644 --- a/src/e2e-test/resources/pluginParameters.properties +++ b/src/e2e-test/resources/pluginParameters.properties @@ -2,6 +2,9 @@ projectId=cdf-athena dataset=test_automation serviceAccountType=filePath serviceAccount=auto-detect +serviceAccountAutoDetect=auto-detect +serviceAccountTypeJSON=JSON +serviceAccountName=github-action csvFormat=csv ## GCS-PLUGIN-PROPERTIES-START