diff --git a/.circleci/config.yml b/.circleci/config.yml deleted file mode 100644 index b6179d1e..00000000 --- a/.circleci/config.yml +++ /dev/null @@ -1,119 +0,0 @@ -version: 2.1 - -orbs: - azure-cli: circleci/azure-cli@1.1.0 - -jobs: - integration-redshift: - docker: - - image: cimg/python:3.9.9 - steps: - - checkout - - run: - name: "Run Tests - Redshift" - command: ./run_test.sh redshift - - store_artifacts: &artifacts-path - path: integration_tests/logs - - integration-snowflake: - docker: - - image: cimg/python:3.9.9 - steps: - - checkout - - run: - name: "Run Tests - Snowflake" - command: ./run_test.sh snowflake - - store_artifacts: *artifacts-path - - integration-bigquery: - environment: - BIGQUERY_SERVICE_KEY_PATH: "/home/circleci/bigquery-service-key.json" - docker: - - image: cimg/python:3.9.9 - steps: - - checkout - - run: - name: "Set up credentials" - command: echo $BIGQUERY_SERVICE_ACCOUNT_JSON > ${HOME}/bigquery-service-key.json - - run: - name: "Run Tests - BigQuery" - command: ./run_test.sh bigquery - - store_artifacts: *artifacts-path - - integration-databricks: - environment: - ODBC_DRIVER: Simba - docker: - # image based on `fishtownanalytics/test-container` w/ Simba ODBC Spark driver installed - - image: 828731156495.dkr.ecr.us-east-1.amazonaws.com/dbt-spark-odbc-test-container:latest - aws_auth: - aws_access_key_id: $AWS_ACCESS_KEY_ID_STAGING - aws_secret_access_key: $AWS_SECRET_ACCESS_KEY_STAGING - steps: - - checkout - - run: - name: "Run Tests - Databricks" - command: ./run_test.sh databricks - - store_artifacts: *artifacts-path - - # DISABLED FOR NOW - integration-synapse: - docker: - - image: dataders/pyodbc:1.2 - steps: - - checkout - - run: &gnupg2 - name: az cli dep - command: apt-get install gnupg2 -y - - azure-cli/install - - azure-cli/login-with-service-principal: &azure-creds - azure-sp: DBT_AZURE_SP_NAME - azure-sp-password: DBT_AZURE_SP_SECRET - azure-sp-tenant: DBT_AZURE_TENANT - - run: - name: resume Synapse pool/db - command: az synapse sql pool resume --name $DBT_SYNAPSE_DB --workspace-name $DBT_SYNAPSE_SERVER --resource-group dbt-msft - - run: - name: "Run Tests - synapse" - command: ./run_test.sh synapse - - run: - name: pause Synapse pool/db - command: az synapse sql pool resume --name $DBT_SYNAPSE_DB --workspace-name $DBT_SYNAPSE_SERVER --resource-group dbt-msft - - store_artifacts: - path: ./logs - - # DISABLED FOR NOW - integration-azuresql: - docker: - - image: dataders/pyodbc:1.2 - steps: - - checkout - - run: *gnupg2 - - azure-cli/install - - azure-cli/login-with-service-principal: *azure-creds - - run: - name: "Run Tests - azuresql" - command: ./run_test.sh azuresql - - store_artifacts: - path: ./logs - -workflows: - version: 2 - test-all: - jobs: - - integration-redshift: - context: profile-redshift - - integration-snowflake: - context: profile-snowflake - - integration-bigquery: - context: profile-bigquery - # - integration-databricks: - # context: - # - aws-credentials - # - profile-databricks - #- integration-synapse: - # context: profile-synapse - #- integration-azuresql: - # context: profile-azure - # requires: - # - integration-synapse diff --git a/.github/workflows/integration_tests.yml b/.github/workflows/integration_tests.yml new file mode 100644 index 00000000..55a865a4 --- /dev/null +++ b/.github/workflows/integration_tests.yml @@ -0,0 +1,60 @@ +name: Integration Testing + +on: + push: + branches: [ "main" ] + pull_request: + branches: [ "main" ] + +jobs: + build: + + runs-on: ubuntu-latest + environment: + name: ci_testing + strategy: + fail-fast: true + max-parallel: 3 + matrix: + python-version: [ "3.11"] # "3.10", "3.12"] + dbt-version: ["1.7.0"] # "1.6.0", , "1.8.0b1"] + data-platform: ["redshift", "snowflake", "bigquery"] + + steps: + - uses: actions/checkout@v3 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v3 + with: + python-version: ${{ matrix.python-version }} + - name: Install dbt-${{ matrix.data-platform }}~=${{ matrix.dbt-version }} + run: | + python -m pip install --upgrade pip + python -m pip install "dbt-${{ matrix.data-platform }}~=${{ matrix.dbt-version }}" "dbt-core~=${{ matrix.dbt-version }}" + - name: run integration_tests project on ${{ matrix.data-platform }} + run: | + cd integration_tests + export DBT_PROFILES_DIR=. + dbt deps --target ${{ matrix.data-platform }} + dbt seed --full-refresh --target ${{ matrix.data-platform }} + dbt run-operation prep_external --target ${{ matrix.data-platform }} + dbt run-operation dbt_external_tables.stage_external_sources --vars 'ext_full_refresh: true' --target ${{ matrix.data-platform }} + dbt run-operation dbt_external_tables.stage_external_sources --target ${{ matrix.data-platform }} + dbt test --target ${{ matrix.data-platform }} + env: + REDSHIFT_TEST_HOST: ${{ secrets.REDSHIFT_TEST_HOST }} + REDSHIFT_TEST_USER: ${{ secrets.REDSHIFT_TEST_USER }} + REDSHIFT_TEST_PASS: ${{ secrets.REDSHIFT_TEST_PASS }} + REDSHIFT_TEST_PORT: ${{ secrets.REDSHIFT_TEST_PORT }} + REDSHIFT_TEST_DBNAME: ${{ secrets.REDSHIFT_TEST_DBNAME }} + REDSHIFT_SPECTRUM_IAM_ROLE: ${{ secrets.REDSHIFT_SPECTRUM_IAM_ROLE }} + SNOWFLAKE_TEST_ACCOUNT: ${{ secrets.SNOWFLAKE_TEST_ACCOUNT }} + SNOWFLAKE_TEST_USER: ${{ secrets.SNOWFLAKE_TEST_USER }} + SNOWFLAKE_TEST_PASS: ${{ secrets.SNOWFLAKE_TEST_PASS }} + SNOWFLAKE_TEST_WHNAME: ${{ secrets.SNOWFLAKE_TEST_WHNAME }} + SNOWFLAKE_TEST_ROLE: ${{ secrets.SNOWFLAKE_TEST_ROLE }} + SNOWFLAKE_TEST_DBNAME: ${{ secrets.SNOWFLAKE_TEST_DBNAME }} + BIGQUERY_TEST_PROJECT: ${{ secrets.BIGQUERY_TEST_PROJECT }} + BIGQUERY_PRIVATE_KEY: ${{ secrets.BIGQUERY_PRIVATE_KEY }} + BIGQUERY_PRIVATE_KEY_ID: ${{ secrets.BIGQUERY_PRIVATE_KEY_ID }} + BIGQUERY_CLIENT_EMAIL: ${{ secrets.BIGQUERY_CLIENT_EMAIL }} + BIGQUERY_CLIENT_ID: ${{ secrets.BIGQUERY_CLIENT_ID }} \ No newline at end of file diff --git a/.gitignore b/.gitignore index 273c5ba3..f986a117 100644 --- a/.gitignore +++ b/.gitignore @@ -5,3 +5,4 @@ **/logs/ **/env/ **/venv/ +**/test.env diff --git a/integration_tests/macros/plugins/redshift/prep_external.sql b/integration_tests/macros/plugins/redshift/prep_external.sql index b3a36249..40a77676 100644 --- a/integration_tests/macros/plugins/redshift/prep_external.sql +++ b/integration_tests/macros/plugins/redshift/prep_external.sql @@ -8,7 +8,7 @@ {{ external_schema }} from data catalog database '{{ external_schema }}' - iam_role '{{ env_var("SPECTRUM_IAM_ROLE", "") }}' + iam_role '{{ env_var("REDSHIFT_SPECTRUM_IAM_ROLE") }}' create external database if not exists; {% endset %} diff --git a/integration_tests/ci/sample.profiles.yml b/integration_tests/profiles.yml similarity index 54% rename from integration_tests/ci/sample.profiles.yml rename to integration_tests/profiles.yml index 0bd21272..1c366bc9 100644 --- a/integration_tests/ci/sample.profiles.yml +++ b/integration_tests/profiles.yml @@ -24,18 +24,28 @@ integration_tests: type: snowflake account: "{{ env_var('SNOWFLAKE_TEST_ACCOUNT') }}" user: "{{ env_var('SNOWFLAKE_TEST_USER') }}" - password: "{{ env_var('SNOWFLAKE_TEST_PASSWORD') }}" + password: "{{ env_var('SNOWFLAKE_TEST_PASS') }}" role: "{{ env_var('SNOWFLAKE_TEST_ROLE') }}" - database: "{{ env_var('SNOWFLAKE_TEST_DATABASE') }}" - warehouse: "{{ env_var('SNOWFLAKE_TEST_WAREHOUSE') }}" + database: "{{ env_var('SNOWFLAKE_TEST_DBNAME') }}" + warehouse: "{{ env_var('SNOWFLAKE_TEST_WHNAME') }}" schema: dbt_external_tables_integration_tests_snowflake threads: 1 bigquery: type: bigquery - method: service-account - keyfile: "{{ env_var('BIGQUERY_SERVICE_KEY_PATH') }}" - project: "{{ env_var('BIGQUERY_TEST_DATABASE') }}" + method: service-account-json + keyfile_json: + type: "service_account" + project_id: "{{ env_var('BIGQUERY_TEST_PROJECT') }}" + private_key: "{{ env_var('BIGQUERY_PRIVATE_KEY') }}" + private_key_id: "{{ env_var('BIGQUERY_PRIVATE_KEY_ID') }}" + client_email: "{{ env_var('BIGQUERY_CLIENT_EMAIL') }}" + client_id: "{{ env_var('BIGQUERY_CLIENT_ID') }}" + auth_uri: "https://accounts.google.com/o/oauth2/auth" + token_uri: "https://oauth2.googleapis.com/token" + auth_provider_x509_cert_url: "https://www.googleapis.com/oauth2/v1/certs" + client_x509_cert_url: https://www.googleapis.com/robot/v1/metadata/x509/{{ env_var('BIGQUERY_CLIENT_EMAIL') | urlencode }}" + project: "{{ env_var('BIGQUERY_TEST_PROJECT') }}" schema: dbt_external_tables_integration_tests_bigquery threads: 1 @@ -44,17 +54,17 @@ integration_tests: method: odbc port: 443 driver: "{{ env_var('ODBC_DRIVER') }}" - host: "{{ env_var('DBT_DATABRICKS_HOST_NAME') }}" - endpoint: "{{ env_var('DBT_DATABRICKS_ENDPOINT') }}" - token: "{{ env_var('DBT_DATABRICKS_TOKEN') }}" + host: "{{ env_var('DATABRICKS_TEST_HOST') }}" + endpoint: "{{ env_var('DATBRICKS_TEST_ENDPOINT') }}" + token: "{{ env_var('DATABRICKS_TOKEN') }}" schema: dbt_external_tables_integration_tests_databricks synapse: type: synapse driver: "ODBC Driver 17 for SQL Server" port: 1433 - host: "{{ env_var('DBT_SYNAPSE_SERVER') }}.sql.azuresynapse.net" - database: "{{ env_var('DBT_SYNAPSE_DB') }}" + host: "{{ env_var('SYNAPSE_TEST_SERVER') }}.sql.azuresynapse.net" + database: "{{ env_var('SYNAPSE_TEST_DBNAME') }}" authentication: CLI schema: dbt_external_tables_integration_tests_synapse threads: 1 @@ -63,8 +73,8 @@ integration_tests: type: sqlserver driver: "ODBC Driver 17 for SQL Server" port: 1433 - host: "{{ env_var('DBT_AZURESQL_SERVER') }}" - database: "{{ env_var('DBT_AZURESQL_DB') }}" + host: "{{ env_var('AZURESQL_TEST_SERVER') }}" + database: "{{ env_var('AZURESQL_TEST_DBNAME') }}" authentication: CLI schema: dbt_external_tables_integration_tests_azuresql threads: 1 diff --git a/integration_tests/test.env.sample b/integration_tests/test.env.sample new file mode 100644 index 00000000..d510d187 --- /dev/null +++ b/integration_tests/test.env.sample @@ -0,0 +1,41 @@ +# gh secret set -f integration_tests/test.env -e ci_testing + +# redshift +REDSHIFT_TEST_HOST= +REDSHIFT_TEST_USER= +REDSHIFT_TEST_PASS= +REDSHIFT_TEST_DBNAME= +REDSHIFT_TEST_PORT= +REDSHIFT_SPECTRUM_IAM_ROLE= + +# snowflake + +SNOWFLAKE_TEST_ACCOUNT= +SNOWFLAKE_TEST_USER= +SNOWFLAKE_TEST_PASS= +SNOWFLAKE_TEST_ROLE= +SNOWFLAKE_TEST_DBNAME= +SNOWFLAKE_TEST_WHNAME= + +# bigquery +BIGQUERY_PRIVATE_KEY= +BIGQUERY_PRIVATE_KEY_ID= +BIGQUERY_CLIENT_EMAIL= +BIGQUERY_CLIENT_ID= +BIGQUERY_TEST_PROJECT= + +# databricks +DATABRICKS_TEST_HOST= +DATBRICKS_TEST_ENDPOINT= +DATABRICKS_TOKEN= + +# msft +SYNAPSE_TEST_SERVER= +SYNAPSE_TEST_DBNAME= +SYNAPSE_TEST_USER= +SYNAPSE_TEST_PASS= + +AZURESQL_TEST_SERVER= +AZURESQL_TEST_DBNAME= +AZURESQL_TEST_USER= +AZURESQL_TEST_PASS=