Skip to content

Commit

Permalink
Merge pull request #332 from dbt-labs/reenable-sqlserver-and-synapse-…
Browse files Browse the repository at this point in the history
…support

Reenable sqlserver and synapse support
  • Loading branch information
dataders authored Dec 18, 2024
2 parents 6528e75 + 705e3eb commit b8eed97
Show file tree
Hide file tree
Showing 15 changed files with 2,147 additions and 31 deletions.
9 changes: 9 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -35,9 +35,18 @@ jobs:
# bigquery
BIGQUERY_PROJECT: ${{ vars.BIGQUERY_PROJECT }}
BIGQUERY_SCHEMA: "integration_tests_bigquery_${{ github.run_number }}"
# synapse
SYNAPSE_DRIVER: ${{ vars.SYNAPSE_DRIVER }}
SYNAPSE_HOST: ${{ vars.SYNAPSE_HOST }}
SYNAPSE_PORT: ${{ vars.SYNAPSE_PORT }}
SYNAPSE_DATABASE: ${{ vars.SYNAPSE_DATABASE }}
SYNAPSE_AUTHENTICATION: ${{ vars.SYNAPSE_AUTHENTICATION }}
SYNAPSE_TENANT_ID: ${{ vars.SYNAPSE_TENANT_ID }}
SYNAPSE_CLIENT_ID: ${{ vars.SYNAPSE_CLIENT_ID }}

secrets:
DBT_ENV_SECRET_REDSHIFT_PASS: ${{ secrets.DBT_ENV_SECRET_REDSHIFT_PASS }}
SNOWFLAKE_ACCOUNT: ${{ secrets.SNOWFLAKE_ACCOUNT }}
DBT_ENV_SECRET_SNOWFLAKE_PASS: ${{ secrets.DBT_ENV_SECRET_SNOWFLAKE_PASS }}
BIGQUERY_KEYFILE_JSON: ${{ secrets.BIGQUERY_KEYFILE_JSON }}
SYNAPSE_CLIENT_SECRET: ${{ secrets.SYNAPSE_CLIENT_SECRET }}
1 change: 1 addition & 0 deletions .python-version
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
3.11
8 changes: 8 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,13 @@
# Changelog

## dbt-external-tables v0.11.0

### Synapse & SQL Server
* Reenable sqlserver and synapse support https://github.com/dbt-labs/dbt-external-tables/pull/332


**Full Changelog**: https://github.com/dbt-labs/dbt-external-tables/compare/0.10.1...0.10.0

## dbt-external-tables v0.10.1

* [FIX] OOPS! Revert https://github.com/dbt-labs/dbt-external-tables/pull/330 "stage_external_sources Comparing source_name of the node instead of the name of the node" by @dataders in https://github.com/dbt-labs/dbt-external-tables/pull/330
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
{% macro sqlserver__prep_external() %}
{% macro fabric__prep_external() %}

{% set external_data_source = target.schema ~ '.dbt_external_tables_testing' %}

Expand Down
13 changes: 8 additions & 5 deletions integration_tests/profiles.yml
Original file line number Diff line number Diff line change
Expand Up @@ -49,11 +49,14 @@ integration_tests:

synapse:
type: synapse
driver: "ODBC Driver 17 for SQL Server"
port: 1433
host: "{{ env_var('SYNAPSE_TEST_SERVER') }}.sql.azuresynapse.net"
database: "{{ env_var('SYNAPSE_TEST_DBNAME') }}"
authentication: CLI
driver: "{{ env_var('SYNAPSE_DRIVER') }}"
port: "{{ env_var('SYNAPSE_PORT') }}"
host: "{{ env_var('SYNAPSE_HOST') }}"
database: "{{ env_var('SYNAPSE_DATABASE') }}"
authentication: "{{ env_var('SYNAPSE_AUTHENTICATION') }}"
tenant_id: "{{ env_var('SYNAPSE_TENANT_ID') }}"
client_id: "{{ env_var('SYNAPSE_CLIENT_ID') }}"
client_secret: "{{ env_var('SYNAPSE_CLIENT_SECRET') }}"
schema: dbt_external_tables_integration_tests_synapse
threads: 1

Expand Down
29 changes: 13 additions & 16 deletions integration_tests/test.env.sample
Original file line number Diff line number Diff line change
@@ -1,40 +1,37 @@
# gh secret set -f integration_tests/test.env -e ci_testing

# redshift
REDSHIFT_HOST=
REDSHIFT_USER=
DBT_ENV_SECRET_REDSHIFT_PASS=
REDSHIFT_PORT=
REDSHIFT_DBNAME=
REDSHIFT_SCHEMA=
REDSHIFT_SPECTRUM_IAM_ROLE=
# local testing only
# REDSHIFT_SCHEMA=

# snowflake
SNOWFLAKE_ACCOUNT=
SNOWFLAKE_USER=
DBT_ENV_SECRET_SNOWFLAKE_PASS=
SNOWFLAKE_ROLE=
SNOWFLAKE_DATABASE=
SNOWFLAKE_SCHEMA=
SNOWFLAKE_WAREHOUSE=
# local testing only
# SNOWFLAKE_SCHEMA=

# bigquery
BIGQUERY_PROJECT=
BIGQUERY_SCHEMA=
BIGQUERY_KEYFILE_JSON=
# local testing only
# BIGQUERY_SCHEMA=

# synapse
SYNAPSE_CLIENT_SECRET=
# local testing only
# SYNAPSE_SCHEMA=


# NOT CURRENTLY USED
# databricks
DATABRICKS_TEST_HOST=
DATBRICKS_TEST_ENDPOINT=
DATABRICKS_TOKEN=

# msft
SYNAPSE_TEST_SERVER=
SYNAPSE_TEST_DBNAME=
SYNAPSE_TEST_USER=
SYNAPSE_TEST_PASS=

# old
AZURESQL_TEST_SERVER=
AZURESQL_TEST_DBNAME=
AZURESQL_TEST_USER=
Expand Down
14 changes: 11 additions & 3 deletions integration_tests/vars.env.sample
Original file line number Diff line number Diff line change
@@ -1,18 +1,26 @@
# gh variable set -f integration_tests/vars.env

# redshift
# NOTE: REDSHIFT_SPECTRUM_IAM_ROLE is currently hard-coded
REDSHIFT_HOST=
REDSHIFT_USER=
REDSHIFT_DATABASE=
REDSHIFT_PORT=
REDSHIFT_SPECTRUM_IAM_ROLE=

# snowflake
SNOWFLAKE_ACCOUNT=
SNOWFLAKE_USER=
SNOWFLAKE_ROLE=
SNOWFLAKE_DATABASE=
SNOWFLAKE_WAREHOUSE=

# bigquery
BIGQUERY_PROJECT=
BIGQUERY_PROJECT=

# synapse
SYNAPSE_DRIVER=
SYNAPSE_HOST=
SYNAPSE_PORT=
SYNAPSE_DATABASE=
SYNAPSE_AUTHENTICATION=
SYNAPSE_TENANT_ID=
SYNAPSE_CLIENT_ID=
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
{% macro sqlserver__create_external_schema(source_node) %}
{% macro fabric__create_external_schema(source_node) %}
{# https://learn.microsoft.com/en-us/sql/t-sql/statements/create-schema-transact-sql?view=sql-server-ver16 #}

{% set ddl %}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
{% macro sqlserver__create_external_table(source_node) %}
{% macro fabric__create_external_table(source_node) %}

{%- set columns = source_node.columns.values() -%}
{%- set external = source_node.external -%}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
{% macro sqlserver__get_external_build_plan(source_node) %}
{% macro fabric__get_external_build_plan(source_node) %}

{% set build_plan = [] %}

Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
{% macro sqlserver__dropif(node) %}
{% macro fabric__dropif(node) %}

{% set ddl %}
if object_id ('{{source(node.source_name, node.name)}}') is not null
Expand Down
14 changes: 14 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
[project]
name = "dbt-external-tables"
version = "0.1.0"
description = "Add your description here"
readme = "README.md"
requires-python = ">=3.11"
dependencies = [
"dbt-bigquery>=1.9.0",
"dbt-core>=1.9.1",
"dbt-redshift>=1.9.0",
"dbt-snowflake>=1.9.0",
"dbt-synapse>=1.8.2",
"tox>=4.23.2",
]
2 changes: 1 addition & 1 deletion supported_adapters.env
Original file line number Diff line number Diff line change
@@ -1 +1 @@
SUPPORTED_ADAPTERS=snowflake,redshift,bigquery
SUPPORTED_ADAPTERS=snowflake,redshift,bigquery,synapse
26 changes: 25 additions & 1 deletion tox.ini
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,15 @@ passenv =
BIGQUERY_KEYFILE_JSON
BIGQUERY_PROJECT
BIGQUERY_SCHEMA
# synapse
SYNAPSE_DRIVER
SYNAPSE_HOST
SYNAPSE_PORT
SYNAPSE_DATABASE
SYNAPSE_AUTHENTICATION
SYNAPSE_TENANT_ID
SYNAPSE_CLIENT_ID
SYNAPSE_CLIENT_SECRET

# run dbt commands directly, assumes dbt is already installed in environment
[testenv:dbt_integration_redshift]
Expand Down Expand Up @@ -68,4 +77,19 @@ commands =
dbt run-operation prep_external --target bigquery
dbt run-operation dbt_external_tables.stage_external_sources --vars 'ext_full_refresh: true' --target bigquery
dbt run-operation dbt_external_tables.stage_external_sources --target bigquery
dbt test --target bigquery
dbt test --target bigquery

# run dbt commands directly, assumes dbt is already installed in environment
[testenv:dbt_integration_synapse]
changedir = integration_tests
allowlist_externals =
dbt
skip_install = true
commands =
dbt deps --target synapse
dbt seed --full-refresh --target synapse
dbt run --target synapse
dbt run-operation prep_external --target synapse
dbt run-operation dbt_external_tables.stage_external_sources --vars 'ext_full_refresh: true' --target synapse
dbt run-operation dbt_external_tables.stage_external_sources --target synapse
dbt test --target synapse
Loading

0 comments on commit b8eed97

Please sign in to comment.