Skip to content

Commit

Permalink
Merge branch 'main' into bigquery-table-expiry
Browse files Browse the repository at this point in the history
  • Loading branch information
dataders authored Apr 4, 2024
2 parents 112794f + 93f0711 commit 272fcf8
Show file tree
Hide file tree
Showing 6 changed files with 126 additions and 133 deletions.
119 changes: 0 additions & 119 deletions .circleci/config.yml

This file was deleted.

60 changes: 60 additions & 0 deletions .github/workflows/integration_tests.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
name: Integration Testing

on:
push:
branches: [ "main" ]
pull_request:
branches: [ "main" ]

jobs:
build:

runs-on: ubuntu-latest
environment:
name: ci_testing
strategy:
fail-fast: true
max-parallel: 3
matrix:
python-version: [ "3.11"] # "3.10", "3.12"]
dbt-version: ["1.7.0"] # "1.6.0", , "1.8.0b1"]
data-platform: ["redshift", "snowflake", "bigquery"]

steps:
- uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v3
with:
python-version: ${{ matrix.python-version }}
- name: Install dbt-${{ matrix.data-platform }}~=${{ matrix.dbt-version }}
run: |
python -m pip install --upgrade pip
python -m pip install "dbt-${{ matrix.data-platform }}~=${{ matrix.dbt-version }}" "dbt-core~=${{ matrix.dbt-version }}"
- name: run integration_tests project on ${{ matrix.data-platform }}
run: |
cd integration_tests
export DBT_PROFILES_DIR=.
dbt deps --target ${{ matrix.data-platform }}
dbt seed --full-refresh --target ${{ matrix.data-platform }}
dbt run-operation prep_external --target ${{ matrix.data-platform }}
dbt run-operation dbt_external_tables.stage_external_sources --vars 'ext_full_refresh: true' --target ${{ matrix.data-platform }}
dbt run-operation dbt_external_tables.stage_external_sources --target ${{ matrix.data-platform }}
dbt test --target ${{ matrix.data-platform }}
env:
REDSHIFT_TEST_HOST: ${{ secrets.REDSHIFT_TEST_HOST }}
REDSHIFT_TEST_USER: ${{ secrets.REDSHIFT_TEST_USER }}
REDSHIFT_TEST_PASS: ${{ secrets.REDSHIFT_TEST_PASS }}
REDSHIFT_TEST_PORT: ${{ secrets.REDSHIFT_TEST_PORT }}
REDSHIFT_TEST_DBNAME: ${{ secrets.REDSHIFT_TEST_DBNAME }}
REDSHIFT_SPECTRUM_IAM_ROLE: ${{ secrets.REDSHIFT_SPECTRUM_IAM_ROLE }}
SNOWFLAKE_TEST_ACCOUNT: ${{ secrets.SNOWFLAKE_TEST_ACCOUNT }}
SNOWFLAKE_TEST_USER: ${{ secrets.SNOWFLAKE_TEST_USER }}
SNOWFLAKE_TEST_PASS: ${{ secrets.SNOWFLAKE_TEST_PASS }}
SNOWFLAKE_TEST_WHNAME: ${{ secrets.SNOWFLAKE_TEST_WHNAME }}
SNOWFLAKE_TEST_ROLE: ${{ secrets.SNOWFLAKE_TEST_ROLE }}
SNOWFLAKE_TEST_DBNAME: ${{ secrets.SNOWFLAKE_TEST_DBNAME }}
BIGQUERY_TEST_PROJECT: ${{ secrets.BIGQUERY_TEST_PROJECT }}
BIGQUERY_PRIVATE_KEY: ${{ secrets.BIGQUERY_PRIVATE_KEY }}
BIGQUERY_PRIVATE_KEY_ID: ${{ secrets.BIGQUERY_PRIVATE_KEY_ID }}
BIGQUERY_CLIENT_EMAIL: ${{ secrets.BIGQUERY_CLIENT_EMAIL }}
BIGQUERY_CLIENT_ID: ${{ secrets.BIGQUERY_CLIENT_ID }}
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -5,3 +5,4 @@
**/logs/
**/env/
**/venv/
**/test.env
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
{{ external_schema }}
from data catalog
database '{{ external_schema }}'
iam_role '{{ env_var("SPECTRUM_IAM_ROLE", "") }}'
iam_role '{{ env_var("REDSHIFT_SPECTRUM_IAM_ROLE") }}'
create external database if not exists;

{% endset %}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,18 +24,28 @@ integration_tests:
type: snowflake
account: "{{ env_var('SNOWFLAKE_TEST_ACCOUNT') }}"
user: "{{ env_var('SNOWFLAKE_TEST_USER') }}"
password: "{{ env_var('SNOWFLAKE_TEST_PASSWORD') }}"
password: "{{ env_var('SNOWFLAKE_TEST_PASS') }}"
role: "{{ env_var('SNOWFLAKE_TEST_ROLE') }}"
database: "{{ env_var('SNOWFLAKE_TEST_DATABASE') }}"
warehouse: "{{ env_var('SNOWFLAKE_TEST_WAREHOUSE') }}"
database: "{{ env_var('SNOWFLAKE_TEST_DBNAME') }}"
warehouse: "{{ env_var('SNOWFLAKE_TEST_WHNAME') }}"
schema: dbt_external_tables_integration_tests_snowflake
threads: 1

bigquery:
type: bigquery
method: service-account
keyfile: "{{ env_var('BIGQUERY_SERVICE_KEY_PATH') }}"
project: "{{ env_var('BIGQUERY_TEST_DATABASE') }}"
method: service-account-json
keyfile_json:
type: "service_account"
project_id: "{{ env_var('BIGQUERY_TEST_PROJECT') }}"
private_key: "{{ env_var('BIGQUERY_PRIVATE_KEY') }}"
private_key_id: "{{ env_var('BIGQUERY_PRIVATE_KEY_ID') }}"
client_email: "{{ env_var('BIGQUERY_CLIENT_EMAIL') }}"
client_id: "{{ env_var('BIGQUERY_CLIENT_ID') }}"
auth_uri: "https://accounts.google.com/o/oauth2/auth"
token_uri: "https://oauth2.googleapis.com/token"
auth_provider_x509_cert_url: "https://www.googleapis.com/oauth2/v1/certs"
client_x509_cert_url: https://www.googleapis.com/robot/v1/metadata/x509/{{ env_var('BIGQUERY_CLIENT_EMAIL') | urlencode }}"
project: "{{ env_var('BIGQUERY_TEST_PROJECT') }}"
schema: dbt_external_tables_integration_tests_bigquery
threads: 1

Expand All @@ -44,17 +54,17 @@ integration_tests:
method: odbc
port: 443
driver: "{{ env_var('ODBC_DRIVER') }}"
host: "{{ env_var('DBT_DATABRICKS_HOST_NAME') }}"
endpoint: "{{ env_var('DBT_DATABRICKS_ENDPOINT') }}"
token: "{{ env_var('DBT_DATABRICKS_TOKEN') }}"
host: "{{ env_var('DATABRICKS_TEST_HOST') }}"
endpoint: "{{ env_var('DATBRICKS_TEST_ENDPOINT') }}"
token: "{{ env_var('DATABRICKS_TOKEN') }}"
schema: dbt_external_tables_integration_tests_databricks

synapse:
type: synapse
driver: "ODBC Driver 17 for SQL Server"
port: 1433
host: "{{ env_var('DBT_SYNAPSE_SERVER') }}.sql.azuresynapse.net"
database: "{{ env_var('DBT_SYNAPSE_DB') }}"
host: "{{ env_var('SYNAPSE_TEST_SERVER') }}.sql.azuresynapse.net"
database: "{{ env_var('SYNAPSE_TEST_DBNAME') }}"
authentication: CLI
schema: dbt_external_tables_integration_tests_synapse
threads: 1
Expand All @@ -63,8 +73,8 @@ integration_tests:
type: sqlserver
driver: "ODBC Driver 17 for SQL Server"
port: 1433
host: "{{ env_var('DBT_AZURESQL_SERVER') }}"
database: "{{ env_var('DBT_AZURESQL_DB') }}"
host: "{{ env_var('AZURESQL_TEST_SERVER') }}"
database: "{{ env_var('AZURESQL_TEST_DBNAME') }}"
authentication: CLI
schema: dbt_external_tables_integration_tests_azuresql
threads: 1
41 changes: 41 additions & 0 deletions integration_tests/test.env.sample
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
# gh secret set -f integration_tests/test.env -e ci_testing

# redshift
REDSHIFT_TEST_HOST=
REDSHIFT_TEST_USER=
REDSHIFT_TEST_PASS=
REDSHIFT_TEST_DBNAME=
REDSHIFT_TEST_PORT=
REDSHIFT_SPECTRUM_IAM_ROLE=

# snowflake

SNOWFLAKE_TEST_ACCOUNT=
SNOWFLAKE_TEST_USER=
SNOWFLAKE_TEST_PASS=
SNOWFLAKE_TEST_ROLE=
SNOWFLAKE_TEST_DBNAME=
SNOWFLAKE_TEST_WHNAME=

# bigquery
BIGQUERY_PRIVATE_KEY=
BIGQUERY_PRIVATE_KEY_ID=
BIGQUERY_CLIENT_EMAIL=
BIGQUERY_CLIENT_ID=
BIGQUERY_TEST_PROJECT=

# databricks
DATABRICKS_TEST_HOST=
DATBRICKS_TEST_ENDPOINT=
DATABRICKS_TOKEN=

# msft
SYNAPSE_TEST_SERVER=
SYNAPSE_TEST_DBNAME=
SYNAPSE_TEST_USER=
SYNAPSE_TEST_PASS=

AZURESQL_TEST_SERVER=
AZURESQL_TEST_DBNAME=
AZURESQL_TEST_USER=
AZURESQL_TEST_PASS=

0 comments on commit 272fcf8

Please sign in to comment.