diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 0fa84469a..7ff98322d 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.9.0a1 +current_version = 1.9.0b1 parse = (?P[\d]+) # major version number \.(?P[\d]+) # minor version number \.(?P[\d]+) # patch version number diff --git a/.changes/1.9.0-b1.md b/.changes/1.9.0-b1.md new file mode 100644 index 000000000..15a01afe7 --- /dev/null +++ b/.changes/1.9.0-b1.md @@ -0,0 +1,61 @@ +## dbt-snowflake 1.9.0-b1 - October 01, 2024 + +### Features + +- Support refresh_mode and initialize parameters for dynamic tables ([#1076](https://github.com/dbt-labs/dbt-snowflake/issues/1076)) +- Add tests for cross-database `cast` macro ([#1009](https://github.com/dbt-labs/dbt-snowflake/issues/1009)) +- Cross-database `date` macro ([#1013](https://github.com/dbt-labs/dbt-snowflake/issues/1013)) +- Replace underscores with hyphens in account IDs to prevent SSL issues ([#1068](https://github.com/dbt-labs/dbt-snowflake/issues/1068)) +- Support JWT Authentication ([#1079](https://github.com/dbt-labs/dbt-snowflake/issues/1079), [#726](https://github.com/dbt-labs/dbt-snowflake/issues/726)) +- Improve run times for large projects by reusing connections by default ([#1082](https://github.com/dbt-labs/dbt-snowflake/issues/1082)) +- Improve run times when using key pair auth by caching the private key ([#1082](https://github.com/dbt-labs/dbt-snowflake/issues/1082)) +- Add support for Iceberg table materializations. ([#321](https://github.com/dbt-labs/dbt-snowflake/issues/321)) +- Microbatch incremental strategy ([#1182](https://github.com/dbt-labs/dbt-snowflake/issues/1182)) +- Add support for Iceberg table format in Dynamic Tables ([#1183](https://github.com/dbt-labs/dbt-snowflake/issues/1183)) +- Add Iceberg format Incremental Models ([#321](https://github.com/dbt-labs/dbt-snowflake/issues/321)) +- Add support for all on_schema_change incremental model strategies. ([#321](https://github.com/dbt-labs/dbt-snowflake/issues/321)) + +### Fixes + +- Get catalog metadata for a single relation in the most optimized way using the get_catalog_for_single_relation macro and capability ([#1048](https://github.com/dbt-labs/dbt-snowflake/issues/1048)) +- Update relation caching to correctly identify dynamic tables, accounting for Snowflake's `2024_03` bundle ([#1016](https://github.com/dbt-labs/dbt-snowflake/issues/1016)) +- Rename targets for tables and views use fully qualified names ([#1031](https://github.com/dbt-labs/dbt-snowflake/issues/1031)) +- Surface SSO token expiration in logs ([#851](https://github.com/dbt-labs/dbt-snowflake/issues/851)) +- return to previous naming convention to return to quoting policy ([#1074](https://github.com/dbt-labs/dbt-snowflake/issues/1074)) +- Fix scenario where using the `--empty` flag causes metadata queries to contain limit clauses ([#1033](https://github.com/dbt-labs/dbt-snowflake/issues/1033)) +- Use show ... starts with instead of show ... like in _show_object_metadata ([#1102](https://github.com/dbt-labs/dbt-snowflake/issues/1102)) +- Fix issue where dbt-snowflake attempts to drop database roles during grants sync ([#1151](https://github.com/dbt-labs/dbt-snowflake/issues/1151)) + +### Under the Hood + +- Lazy load agate ([#953](https://github.com/dbt-labs/dbt-snowflake/issues/953)) +- Speedup catalog string comparison by using ilike before equals ([#1035](https://github.com/dbt-labs/dbt-snowflake/issues/1035)) +- Improve memory efficiency of the process_results() override. ([#1053](https://github.com/dbt-labs/dbt-snowflake/issues/1053)) +- Automate all manual integration tests for Dynamic Tables ([#1084](https://github.com/dbt-labs/dbt-snowflake/issues/1084)) +- Add support for experimental record/replay testing. ([#1106](https://github.com/dbt-labs/dbt-snowflake/issues/1106)) +- Remove `freezegun` as a testing dependency; this package is no longer used ([#1136](https://github.com/dbt-labs/dbt-snowflake/issues/1136)) +- Add support for Python 3.12 ([#903](https://github.com/dbt-labs/dbt-snowflake/issues/903)) +- Isolating distribution testing ([#1130](https://github.com/dbt-labs/dbt-snowflake/issues/1130)) +- Change behavior flag semantics to log iceberg flag warnings.. ([#321](https://github.com/dbt-labs/dbt-snowflake/issues/321)) + +### Dependencies + +- Update freezegun requirement from ~=1.3 to ~=1.4 ([#869](https://github.com/dbt-labs/dbt-snowflake/pull/869)) +- Bump actions/upload-artifact from 3 to 4 ([#971](https://github.com/dbt-labs/dbt-snowflake/pull/971)) +- Bump dbt-labs/actions from 1.1.0 to 1.1.1 ([#1006](https://github.com/dbt-labs/dbt-snowflake/pull/1006)) +- Bump actions/download-artifact from 3 to 4 ([#1007](https://github.com/dbt-labs/dbt-snowflake/pull/1007)) +- Bump aurelien-baudet/workflow-dispatch from 2 to 4 ([#1093](https://github.com/dbt-labs/dbt-snowflake/pull/1093)) +- Update twine requirement from ~=4.0 to ~=5.1 ([#1120](https://github.com/dbt-labs/dbt-snowflake/pull/1120)) +- Bump pre-commit from 3.7.0 to 3.7.1 ([#1119](https://github.com/dbt-labs/dbt-snowflake/pull/1119)) +- Update wheel requirement from ~=0.42 to ~=0.43 ([#1121](https://github.com/dbt-labs/dbt-snowflake/pull/1121)) +- Update pytest-xdist requirement from ~=3.5 to ~=3.6 ([#1122](https://github.com/dbt-labs/dbt-snowflake/pull/1122)) +- Update tox requirement from ~=4.11 to ~=4.16 ([#1135](https://github.com/dbt-labs/dbt-snowflake/pull/1135)) + +### Contributors +- [@HenkvanDyk,mikealfare](https://github.com/HenkvanDyk,mikealfare) ([#1076](https://github.com/dbt-labs/dbt-snowflake/issues/1076)) +- [@McKnight-42](https://github.com/McKnight-42) ([#851](https://github.com/dbt-labs/dbt-snowflake/issues/851), [#1074](https://github.com/dbt-labs/dbt-snowflake/issues/1074)) +- [@amardatar](https://github.com/amardatar) ([#1082](https://github.com/dbt-labs/dbt-snowflake/issues/1082)) +- [@dwreeves](https://github.com/dwreeves) ([#953](https://github.com/dbt-labs/dbt-snowflake/issues/953)) +- [@leahwicz](https://github.com/leahwicz) ([#1130](https://github.com/dbt-labs/dbt-snowflake/issues/1130)) +- [@llam15](https://github.com/llam15) ([#1079](https://github.com/dbt-labs/dbt-snowflake/issues/1079), [#726](https://github.com/dbt-labs/dbt-snowflake/issues/726)) +- [@mikealfare,](https://github.com/mikealfare,) ([#851](https://github.com/dbt-labs/dbt-snowflake/issues/851)) diff --git a/.changes/unreleased/Dependencies-20231219-125152.yaml b/.changes/1.9.0/Dependencies-20231219-125152.yaml similarity index 100% rename from .changes/unreleased/Dependencies-20231219-125152.yaml rename to .changes/1.9.0/Dependencies-20231219-125152.yaml diff --git a/.changes/unreleased/Dependencies-20240412-155921.yaml b/.changes/1.9.0/Dependencies-20240412-155921.yaml similarity index 100% rename from .changes/unreleased/Dependencies-20240412-155921.yaml rename to .changes/1.9.0/Dependencies-20240412-155921.yaml diff --git a/.changes/unreleased/Dependencies-20240429-124038.yaml b/.changes/1.9.0/Dependencies-20240429-124038.yaml similarity index 100% rename from .changes/unreleased/Dependencies-20240429-124038.yaml rename to .changes/1.9.0/Dependencies-20240429-124038.yaml diff --git a/.changes/unreleased/Dependencies-20240429-124044.yaml b/.changes/1.9.0/Dependencies-20240429-124044.yaml similarity index 100% rename from .changes/unreleased/Dependencies-20240429-124044.yaml rename to .changes/1.9.0/Dependencies-20240429-124044.yaml diff --git a/.changes/unreleased/Dependencies-20240624-122538.yaml b/.changes/1.9.0/Dependencies-20240624-122538.yaml similarity index 100% rename from .changes/unreleased/Dependencies-20240624-122538.yaml rename to .changes/1.9.0/Dependencies-20240624-122538.yaml diff --git a/.changes/unreleased/Dependencies-20240718-120848.yaml b/.changes/1.9.0/Dependencies-20240718-120848.yaml similarity index 100% rename from .changes/unreleased/Dependencies-20240718-120848.yaml rename to .changes/1.9.0/Dependencies-20240718-120848.yaml diff --git a/.changes/unreleased/Dependencies-20240718-120849.yaml b/.changes/1.9.0/Dependencies-20240718-120849.yaml similarity index 100% rename from .changes/unreleased/Dependencies-20240718-120849.yaml rename to .changes/1.9.0/Dependencies-20240718-120849.yaml diff --git a/.changes/unreleased/Dependencies-20240718-120852.yaml b/.changes/1.9.0/Dependencies-20240718-120852.yaml similarity index 100% rename from .changes/unreleased/Dependencies-20240718-120852.yaml rename to .changes/1.9.0/Dependencies-20240718-120852.yaml diff --git a/.changes/unreleased/Dependencies-20240718-120857.yaml b/.changes/1.9.0/Dependencies-20240718-120857.yaml similarity index 100% rename from .changes/unreleased/Dependencies-20240718-120857.yaml rename to .changes/1.9.0/Dependencies-20240718-120857.yaml diff --git a/.changes/unreleased/Dependencies-20240719-120828.yaml b/.changes/1.9.0/Dependencies-20240719-120828.yaml similarity index 100% rename from .changes/unreleased/Dependencies-20240719-120828.yaml rename to .changes/1.9.0/Dependencies-20240719-120828.yaml diff --git a/.changes/unreleased/Features-20240131-125318.yaml b/.changes/1.9.0/Features-20240131-125318.yaml similarity index 100% rename from .changes/unreleased/Features-20240131-125318.yaml rename to .changes/1.9.0/Features-20240131-125318.yaml diff --git a/.changes/unreleased/Features-20240430-185714.yaml b/.changes/1.9.0/Features-20240430-185714.yaml similarity index 100% rename from .changes/unreleased/Features-20240430-185714.yaml rename to .changes/1.9.0/Features-20240430-185714.yaml diff --git a/.changes/unreleased/Features-20240501-151901.yaml b/.changes/1.9.0/Features-20240501-151901.yaml similarity index 100% rename from .changes/unreleased/Features-20240501-151901.yaml rename to .changes/1.9.0/Features-20240501-151901.yaml diff --git a/.changes/unreleased/Features-20240604-154856.yaml b/.changes/1.9.0/Features-20240604-154856.yaml similarity index 100% rename from .changes/unreleased/Features-20240604-154856.yaml rename to .changes/1.9.0/Features-20240604-154856.yaml diff --git a/.changes/unreleased/Features-20240610-171026.yaml b/.changes/1.9.0/Features-20240610-171026.yaml similarity index 100% rename from .changes/unreleased/Features-20240610-171026.yaml rename to .changes/1.9.0/Features-20240610-171026.yaml diff --git a/.changes/unreleased/Features-20240709-194316.yaml b/.changes/1.9.0/Features-20240709-194316.yaml similarity index 100% rename from .changes/unreleased/Features-20240709-194316.yaml rename to .changes/1.9.0/Features-20240709-194316.yaml diff --git a/.changes/unreleased/Features-20240710-172345.yaml b/.changes/1.9.0/Features-20240710-172345.yaml similarity index 100% rename from .changes/unreleased/Features-20240710-172345.yaml rename to .changes/1.9.0/Features-20240710-172345.yaml diff --git a/.changes/unreleased/Features-20240911-001806.yaml b/.changes/1.9.0/Features-20240911-001806.yaml similarity index 100% rename from .changes/unreleased/Features-20240911-001806.yaml rename to .changes/1.9.0/Features-20240911-001806.yaml diff --git a/.changes/unreleased/Features-20240913-215416.yaml b/.changes/1.9.0/Features-20240913-215416.yaml similarity index 100% rename from .changes/unreleased/Features-20240913-215416.yaml rename to .changes/1.9.0/Features-20240913-215416.yaml diff --git a/.changes/1.9.0/Features-20240917-100505.yaml b/.changes/1.9.0/Features-20240917-100505.yaml new file mode 100644 index 000000000..22cabc904 --- /dev/null +++ b/.changes/1.9.0/Features-20240917-100505.yaml @@ -0,0 +1,6 @@ +kind: Features +body: Add support for Iceberg table format in Dynamic Tables +time: 2024-09-17T10:05:05.609859-04:00 +custom: + Author: mikealfare + Issue: "1183" diff --git a/.changes/1.9.0/Features-20240923-203204.yaml b/.changes/1.9.0/Features-20240923-203204.yaml new file mode 100644 index 000000000..eaca4906b --- /dev/null +++ b/.changes/1.9.0/Features-20240923-203204.yaml @@ -0,0 +1,6 @@ +kind: Features +body: Add Iceberg format Incremental Models +time: 2024-09-23T20:32:04.783741-07:00 +custom: + Author: versusfacit + Issue: "321" diff --git a/.changes/1.9.0/Features-20240930-112041.yaml b/.changes/1.9.0/Features-20240930-112041.yaml new file mode 100644 index 000000000..1395a8bf7 --- /dev/null +++ b/.changes/1.9.0/Features-20240930-112041.yaml @@ -0,0 +1,6 @@ +kind: Features +body: Add support for all on_schema_change incremental model strategies. +time: 2024-09-30T11:20:41.99589-07:00 +custom: + Author: versusfacit + Issue: "321" diff --git a/.changes/unreleased/Fixes-20240516-174337.yaml b/.changes/1.9.0/Fixes-20240516-174337.yaml similarity index 100% rename from .changes/unreleased/Fixes-20240516-174337.yaml rename to .changes/1.9.0/Fixes-20240516-174337.yaml diff --git a/.changes/unreleased/Fixes-20240516-224134.yaml b/.changes/1.9.0/Fixes-20240516-224134.yaml similarity index 100% rename from .changes/unreleased/Fixes-20240516-224134.yaml rename to .changes/1.9.0/Fixes-20240516-224134.yaml diff --git a/.changes/unreleased/Fixes-20240522-160538.yaml b/.changes/1.9.0/Fixes-20240522-160538.yaml similarity index 100% rename from .changes/unreleased/Fixes-20240522-160538.yaml rename to .changes/1.9.0/Fixes-20240522-160538.yaml diff --git a/.changes/unreleased/Fixes-20240605-125611.yaml b/.changes/1.9.0/Fixes-20240605-125611.yaml similarity index 100% rename from .changes/unreleased/Fixes-20240605-125611.yaml rename to .changes/1.9.0/Fixes-20240605-125611.yaml diff --git a/.changes/unreleased/Fixes-20240607-102708.yaml b/.changes/1.9.0/Fixes-20240607-102708.yaml similarity index 100% rename from .changes/unreleased/Fixes-20240607-102708.yaml rename to .changes/1.9.0/Fixes-20240607-102708.yaml diff --git a/.changes/unreleased/Fixes-20240628-190140.yaml b/.changes/1.9.0/Fixes-20240628-190140.yaml similarity index 100% rename from .changes/unreleased/Fixes-20240628-190140.yaml rename to .changes/1.9.0/Fixes-20240628-190140.yaml diff --git a/.changes/unreleased/Fixes-20240705-165932.yaml b/.changes/1.9.0/Fixes-20240705-165932.yaml similarity index 100% rename from .changes/unreleased/Fixes-20240705-165932.yaml rename to .changes/1.9.0/Fixes-20240705-165932.yaml diff --git a/.changes/1.9.0/Fixes-20240920-193613.yaml b/.changes/1.9.0/Fixes-20240920-193613.yaml new file mode 100644 index 000000000..f85f6fc56 --- /dev/null +++ b/.changes/1.9.0/Fixes-20240920-193613.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: Fix issue where dbt-snowflake attempts to drop database roles during grants sync +time: 2024-09-20T19:36:13.671173-04:00 +custom: + Author: mikealfare + Issue: "1151" diff --git a/.changes/unreleased/Under the Hood-20240327-001304.yaml b/.changes/1.9.0/Under the Hood-20240327-001304.yaml similarity index 100% rename from .changes/unreleased/Under the Hood-20240327-001304.yaml rename to .changes/1.9.0/Under the Hood-20240327-001304.yaml diff --git a/.changes/unreleased/Under the Hood-20240425-144556.yaml b/.changes/1.9.0/Under the Hood-20240425-144556.yaml similarity index 100% rename from .changes/unreleased/Under the Hood-20240425-144556.yaml rename to .changes/1.9.0/Under the Hood-20240425-144556.yaml diff --git a/.changes/unreleased/Under the Hood-20240517-143743.yaml b/.changes/1.9.0/Under the Hood-20240517-143743.yaml similarity index 100% rename from .changes/unreleased/Under the Hood-20240517-143743.yaml rename to .changes/1.9.0/Under the Hood-20240517-143743.yaml diff --git a/.changes/unreleased/Under the Hood-20240614-170858.yaml b/.changes/1.9.0/Under the Hood-20240614-170858.yaml similarity index 100% rename from .changes/unreleased/Under the Hood-20240614-170858.yaml rename to .changes/1.9.0/Under the Hood-20240614-170858.yaml diff --git a/.changes/unreleased/Under the Hood-20240716-174655.yaml b/.changes/1.9.0/Under the Hood-20240716-174655.yaml similarity index 100% rename from .changes/unreleased/Under the Hood-20240716-174655.yaml rename to .changes/1.9.0/Under the Hood-20240716-174655.yaml diff --git a/.changes/unreleased/Under the Hood-20240719-125618.yaml b/.changes/1.9.0/Under the Hood-20240719-125618.yaml similarity index 100% rename from .changes/unreleased/Under the Hood-20240719-125618.yaml rename to .changes/1.9.0/Under the Hood-20240719-125618.yaml diff --git a/.changes/unreleased/Under the Hood-20240722-143114.yaml b/.changes/1.9.0/Under the Hood-20240722-143114.yaml similarity index 100% rename from .changes/unreleased/Under the Hood-20240722-143114.yaml rename to .changes/1.9.0/Under the Hood-20240722-143114.yaml diff --git a/.changes/unreleased/Under the Hood-20240806-215935.yaml b/.changes/1.9.0/Under the Hood-20240806-215935.yaml similarity index 100% rename from .changes/unreleased/Under the Hood-20240806-215935.yaml rename to .changes/1.9.0/Under the Hood-20240806-215935.yaml diff --git a/.changes/unreleased/Under the Hood-20240917-181147.yaml b/.changes/1.9.0/Under the Hood-20240917-181147.yaml similarity index 100% rename from .changes/unreleased/Under the Hood-20240917-181147.yaml rename to .changes/1.9.0/Under the Hood-20240917-181147.yaml diff --git a/.changes/unreleased/Breaking Changes-20241016-183143.yaml b/.changes/unreleased/Breaking Changes-20241016-183143.yaml new file mode 100644 index 000000000..26cc4b6de --- /dev/null +++ b/.changes/unreleased/Breaking Changes-20241016-183143.yaml @@ -0,0 +1,6 @@ +kind: Breaking Changes +body: Drop support for Python 3.8 +time: 2024-10-16T18:31:43.4167-04:00 +custom: + Author: mikealfare + Issue: "1211" diff --git a/.changes/unreleased/Fixes-20241008-122635.yaml b/.changes/unreleased/Fixes-20241008-122635.yaml new file mode 100644 index 000000000..c069283d6 --- /dev/null +++ b/.changes/unreleased/Fixes-20241008-122635.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: Dynamic Iceberg table base_location_subpath generation fix. +time: 2024-10-08T12:26:35.521308-07:00 +custom: + Author: versusfacit + Issue: "1200" diff --git a/.github/scripts/integration-test-matrix.js b/.github/scripts/integration-test-matrix.js index 81386c54e..756c21d5e 100644 --- a/.github/scripts/integration-test-matrix.js +++ b/.github/scripts/integration-test-matrix.js @@ -1,6 +1,6 @@ module.exports = ({ context }) => { - const defaultPythonVersion = "3.8"; - const supportedPythonVersions = ["3.8", "3.9", "3.10", "3.11", "3.12"]; + const defaultPythonVersion = "3.9"; + const supportedPythonVersions = ["3.9", "3.10", "3.11", "3.12"]; const supportedAdapters = ["snowflake"]; // if PR, generate matrix based on files changed and PR labels diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 95ce18033..4913917f4 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -50,7 +50,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: '3.8' + python-version: '3.9' - name: Install python dependencies run: | @@ -71,7 +71,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] + python-version: ['3.9', '3.10', '3.11', '3.12'] env: TOXENV: "unit" @@ -127,7 +127,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: '3.8' + python-version: '3.9' - name: Install python dependencies run: | @@ -175,7 +175,7 @@ jobs: fail-fast: false matrix: os: [ubuntu-latest, macos-12, windows-latest] - python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] + python-version: ['3.9', '3.10', '3.11', '3.12'] dist-type: ['whl', 'gz'] steps: diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index e14455e28..807e32a9a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -24,7 +24,6 @@ repos: - id: black args: - --line-length=99 - - --target-version=py38 - --target-version=py39 - --target-version=py310 - --target-version=py311 diff --git a/CHANGELOG.md b/CHANGELOG.md index 301a00ea9..599c20195 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,10 +5,71 @@ - "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version. - Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-snowflake/blob/main/CONTRIBUTING.md#adding-changelog-entry) +## dbt-snowflake 1.9.0-b1 - October 01, 2024 + +### Features + +- Support refresh_mode and initialize parameters for dynamic tables ([#1076](https://github.com/dbt-labs/dbt-snowflake/issues/1076)) +- Add tests for cross-database `cast` macro ([#1009](https://github.com/dbt-labs/dbt-snowflake/issues/1009)) +- Cross-database `date` macro ([#1013](https://github.com/dbt-labs/dbt-snowflake/issues/1013)) +- Replace underscores with hyphens in account IDs to prevent SSL issues ([#1068](https://github.com/dbt-labs/dbt-snowflake/issues/1068)) +- Support JWT Authentication ([#1079](https://github.com/dbt-labs/dbt-snowflake/issues/1079), [#726](https://github.com/dbt-labs/dbt-snowflake/issues/726)) +- Improve run times for large projects by reusing connections by default ([#1082](https://github.com/dbt-labs/dbt-snowflake/issues/1082)) +- Improve run times when using key pair auth by caching the private key ([#1082](https://github.com/dbt-labs/dbt-snowflake/issues/1082)) +- Add support for Iceberg table materializations. ([#321](https://github.com/dbt-labs/dbt-snowflake/issues/321)) +- Microbatch incremental strategy ([#1182](https://github.com/dbt-labs/dbt-snowflake/issues/1182)) +- Add support for Iceberg table format in Dynamic Tables ([#1183](https://github.com/dbt-labs/dbt-snowflake/issues/1183)) +- Add Iceberg format Incremental Models ([#321](https://github.com/dbt-labs/dbt-snowflake/issues/321)) +- Add support for all on_schema_change incremental model strategies. ([#321](https://github.com/dbt-labs/dbt-snowflake/issues/321)) + +### Fixes + +- Get catalog metadata for a single relation in the most optimized way using the get_catalog_for_single_relation macro and capability ([#1048](https://github.com/dbt-labs/dbt-snowflake/issues/1048)) +- Update relation caching to correctly identify dynamic tables, accounting for Snowflake's `2024_03` bundle ([#1016](https://github.com/dbt-labs/dbt-snowflake/issues/1016)) +- Rename targets for tables and views use fully qualified names ([#1031](https://github.com/dbt-labs/dbt-snowflake/issues/1031)) +- Surface SSO token expiration in logs ([#851](https://github.com/dbt-labs/dbt-snowflake/issues/851)) +- return to previous naming convention to return to quoting policy ([#1074](https://github.com/dbt-labs/dbt-snowflake/issues/1074)) +- Fix scenario where using the `--empty` flag causes metadata queries to contain limit clauses ([#1033](https://github.com/dbt-labs/dbt-snowflake/issues/1033)) +- Use show ... starts with instead of show ... like in _show_object_metadata ([#1102](https://github.com/dbt-labs/dbt-snowflake/issues/1102)) +- Fix issue where dbt-snowflake attempts to drop database roles during grants sync ([#1151](https://github.com/dbt-labs/dbt-snowflake/issues/1151)) + +### Under the Hood + +- Lazy load agate ([#953](https://github.com/dbt-labs/dbt-snowflake/issues/953)) +- Speedup catalog string comparison by using ilike before equals ([#1035](https://github.com/dbt-labs/dbt-snowflake/issues/1035)) +- Improve memory efficiency of the process_results() override. ([#1053](https://github.com/dbt-labs/dbt-snowflake/issues/1053)) +- Automate all manual integration tests for Dynamic Tables ([#1084](https://github.com/dbt-labs/dbt-snowflake/issues/1084)) +- Add support for experimental record/replay testing. ([#1106](https://github.com/dbt-labs/dbt-snowflake/issues/1106)) +- Remove `freezegun` as a testing dependency; this package is no longer used ([#1136](https://github.com/dbt-labs/dbt-snowflake/issues/1136)) +- Add support for Python 3.12 ([#903](https://github.com/dbt-labs/dbt-snowflake/issues/903)) +- Isolating distribution testing ([#1130](https://github.com/dbt-labs/dbt-snowflake/issues/1130)) +- Change behavior flag semantics to log iceberg flag warnings.. ([#321](https://github.com/dbt-labs/dbt-snowflake/issues/321)) + +### Dependencies + +- Update freezegun requirement from ~=1.3 to ~=1.4 ([#869](https://github.com/dbt-labs/dbt-snowflake/pull/869)) +- Bump actions/upload-artifact from 3 to 4 ([#971](https://github.com/dbt-labs/dbt-snowflake/pull/971)) +- Bump dbt-labs/actions from 1.1.0 to 1.1.1 ([#1006](https://github.com/dbt-labs/dbt-snowflake/pull/1006)) +- Bump actions/download-artifact from 3 to 4 ([#1007](https://github.com/dbt-labs/dbt-snowflake/pull/1007)) +- Bump aurelien-baudet/workflow-dispatch from 2 to 4 ([#1093](https://github.com/dbt-labs/dbt-snowflake/pull/1093)) +- Update twine requirement from ~=4.0 to ~=5.1 ([#1120](https://github.com/dbt-labs/dbt-snowflake/pull/1120)) +- Bump pre-commit from 3.7.0 to 3.7.1 ([#1119](https://github.com/dbt-labs/dbt-snowflake/pull/1119)) +- Update wheel requirement from ~=0.42 to ~=0.43 ([#1121](https://github.com/dbt-labs/dbt-snowflake/pull/1121)) +- Update pytest-xdist requirement from ~=3.5 to ~=3.6 ([#1122](https://github.com/dbt-labs/dbt-snowflake/pull/1122)) +- Update tox requirement from ~=4.11 to ~=4.16 ([#1135](https://github.com/dbt-labs/dbt-snowflake/pull/1135)) + +### Contributors +- [@HenkvanDyk,mikealfare](https://github.com/HenkvanDyk,mikealfare) ([#1076](https://github.com/dbt-labs/dbt-snowflake/issues/1076)) +- [@McKnight-42](https://github.com/McKnight-42) ([#851](https://github.com/dbt-labs/dbt-snowflake/issues/851), [#1074](https://github.com/dbt-labs/dbt-snowflake/issues/1074)) +- [@amardatar](https://github.com/amardatar) ([#1082](https://github.com/dbt-labs/dbt-snowflake/issues/1082)) +- [@dwreeves](https://github.com/dwreeves) ([#953](https://github.com/dbt-labs/dbt-snowflake/issues/953)) +- [@leahwicz](https://github.com/leahwicz) ([#1130](https://github.com/dbt-labs/dbt-snowflake/issues/1130)) +- [@llam15](https://github.com/llam15) ([#1079](https://github.com/dbt-labs/dbt-snowflake/issues/1079), [#726](https://github.com/dbt-labs/dbt-snowflake/issues/726)) +- [@mikealfare,](https://github.com/mikealfare,) ([#851](https://github.com/dbt-labs/dbt-snowflake/issues/851)) + + ## Previous Releases For information on prior major and minor releases, see their changelogs: -- [1.8](https://github.com/dbt-labs/dbt-snowflake/blob/1.8.latest/CHANGELOG.md) -- [1.7](https://github.com/dbt-labs/dbt-snowflake/blob/1.7.latest/CHANGELOG.md) - [1.6](https://github.com/dbt-labs/dbt-snowflake/blob/1.6.latest/CHANGELOG.md) - [1.5](https://github.com/dbt-labs/dbt-snowflake/blob/1.5.latest/CHANGELOG.md) - [1.4](https://github.com/dbt-labs/dbt-snowflake/blob/1.4.latest/CHANGELOG.md) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 45e0054c5..5b68aa03a 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -79,7 +79,7 @@ WARNING: The parameters in your `test.env` file must link to a valid Snowflake a There are a few methods for running tests locally. #### `tox` -`tox` automatically runs unit tests against several Python versions using its own virtualenvs. Run `tox -p` to run unit tests for Python 3.8, Python 3.9, Python 3.10, and `flake8` in parallel. Run `tox -e py38` to invoke tests on Python version 3.8 only (use py38, py39, or py310). Tox recipes are found in `tox.ini`. +`tox` automatically runs unit tests against several Python versions using its own virtualenvs. Run `tox -p` to run unit tests for Python 3.9 and Python 3.10, and `flake8` in parallel. Run `tox -e py39` to invoke tests on Python version 3.9 only (use py39 or py310). Tox recipes are found in `tox.ini`. #### `pytest` You may run a specific test or group of tests using `pytest` directly. Activate a Python virtualenv active with dev dependencies installed. Then, run tests like so: diff --git a/Makefile b/Makefile index b42de9147..9a2376f47 100644 --- a/Makefile +++ b/Makefile @@ -12,20 +12,20 @@ dev-uninstall: ## Uninstalls all packages while maintaining the virtual environm pip uninstall -y dbt-snowflake .PHONY: unit -unit: ## Runs unit tests with py38. +unit: ## Runs unit tests with py39. @\ - tox -e py38 + tox -e py39 .PHONY: test -test: ## Runs unit tests with py38 and code checks against staged changes. +test: ## Runs unit tests with py39 and code checks against staged changes. @\ - tox -p -e py38; \ + tox -p -e py39; \ pre-commit run --all-files .PHONY: integration integration: ## Runs snowflake integration tests with py38. @\ - tox -e py38-snowflake -- + tox -e py39-snowflake -- .PHONY: clean @echo "cleaning repo" diff --git a/dbt/adapters/snowflake/__version__.py b/dbt/adapters/snowflake/__version__.py index 6698ed64c..a4077fff2 100644 --- a/dbt/adapters/snowflake/__version__.py +++ b/dbt/adapters/snowflake/__version__.py @@ -1 +1 @@ -version = "1.9.0a1" +version = "1.9.0b1" diff --git a/dbt/adapters/snowflake/impl.py b/dbt/adapters/snowflake/impl.py index 5b5881eed..6320893e1 100644 --- a/dbt/adapters/snowflake/impl.py +++ b/dbt/adapters/snowflake/impl.py @@ -320,7 +320,7 @@ def standardize_grants_dict(self, grants_table: "agate.Table") -> dict: grantee = row["grantee_name"] granted_to = row["granted_to"] privilege = row["privilege"] - if privilege != "OWNERSHIP" and granted_to != "SHARE": + if privilege != "OWNERSHIP" and granted_to not in ["SHARE", "DATABASE_ROLE"]: if privilege in grants_dict.keys(): grants_dict[privilege].append(grantee) else: diff --git a/dbt/adapters/snowflake/relation.py b/dbt/adapters/snowflake/relation.py index 224b2b75e..b6924b9b3 100644 --- a/dbt/adapters/snowflake/relation.py +++ b/dbt/adapters/snowflake/relation.py @@ -17,6 +17,7 @@ from dbt_common.events.functions import fire_event, warn_or_error from dbt.adapters.snowflake.relation_configs import ( + SnowflakeCatalogConfigChange, SnowflakeDynamicTableConfig, SnowflakeDynamicTableConfigChangeset, SnowflakeDynamicTableRefreshModeConfigChange, @@ -114,6 +115,12 @@ def dynamic_table_config_changeset( context=new_dynamic_table.refresh_mode, ) + if new_dynamic_table.catalog != existing_dynamic_table.catalog: + config_change_collection.catalog = SnowflakeCatalogConfigChange( + action=RelationConfigChangeAction.create, + context=new_dynamic_table.catalog, + ) + if config_change_collection.has_changes: return config_change_collection return None @@ -132,6 +139,14 @@ def as_case_sensitive(self) -> "SnowflakeRelation": return self.replace_path(**path_part_map) + @property + def can_be_renamed(self) -> bool: + """ + Standard tables and dynamic tables can be renamed, but Snowflake does not support renaming iceberg relations. + The iceberg standard does support renaming, so this may change in the future. + """ + return self.type in self.renameable_relations and not self.is_iceberg_format + def get_ddl_prefix_for_create(self, config: RelationConfig, temporary: bool) -> str: """ This macro renders the appropriate DDL prefix during the create_table_as diff --git a/dbt/adapters/snowflake/relation_configs/__init__.py b/dbt/adapters/snowflake/relation_configs/__init__.py index 61941ab50..fec9d8a54 100644 --- a/dbt/adapters/snowflake/relation_configs/__init__.py +++ b/dbt/adapters/snowflake/relation_configs/__init__.py @@ -1,3 +1,7 @@ +from dbt.adapters.snowflake.relation_configs.catalog import ( + SnowflakeCatalogConfig, + SnowflakeCatalogConfigChange, +) from dbt.adapters.snowflake.relation_configs.dynamic_table import ( SnowflakeDynamicTableConfig, SnowflakeDynamicTableConfigChangeset, @@ -5,9 +9,9 @@ SnowflakeDynamicTableWarehouseConfigChange, SnowflakeDynamicTableTargetLagConfigChange, ) +from dbt.adapters.snowflake.relation_configs.formats import TableFormat from dbt.adapters.snowflake.relation_configs.policies import ( SnowflakeIncludePolicy, SnowflakeQuotePolicy, SnowflakeRelationType, ) -from dbt.adapters.snowflake.relation_configs.formats import TableFormat diff --git a/dbt/adapters/snowflake/relation_configs/catalog.py b/dbt/adapters/snowflake/relation_configs/catalog.py new file mode 100644 index 000000000..c8d7de40f --- /dev/null +++ b/dbt/adapters/snowflake/relation_configs/catalog.py @@ -0,0 +1,125 @@ +from dataclasses import dataclass +from typing import Any, Dict, Optional, TYPE_CHECKING, Set, List + +if TYPE_CHECKING: + import agate + +from dbt.adapters.relation_configs import ( + RelationConfigChange, + RelationResults, + RelationConfigValidationMixin, + RelationConfigValidationRule, +) +from dbt.adapters.contracts.relation import RelationConfig +from dbt_common.exceptions import DbtConfigError +from typing_extensions import Self + +from dbt.adapters.snowflake.relation_configs.base import SnowflakeRelationConfigBase +from dbt.adapters.snowflake.relation_configs.formats import TableFormat + + +@dataclass(frozen=True, eq=True, unsafe_hash=True) +class SnowflakeCatalogConfig(SnowflakeRelationConfigBase, RelationConfigValidationMixin): + """ + This config follow the specs found here: + https://docs.snowflake.com/en/sql-reference/sql/create-iceberg-table + https://docs.snowflake.com/en/sql-reference/sql/create-dynamic-table#create-dynamic-iceberg-table + + The following parameters are configurable by dbt: + - table_format: format for interfacing with the table, e.g. default, iceberg + - external_volume: name of the external volume in Snowflake + - base_location: the directory within the external volume that contains the data + *Note*: This directory can’t be changed after you create a table. + + The following parameters are not currently configurable by dbt: + - name: snowflake + """ + + table_format: Optional[TableFormat] = TableFormat.default() + name: Optional[str] = "SNOWFLAKE" + external_volume: Optional[str] = None + base_location: Optional[str] = None + + @property + def validation_rules(self) -> Set[RelationConfigValidationRule]: + return { + RelationConfigValidationRule( + (self.table_format == "default") + or (self.table_format == "iceberg" and self.base_location is not None), + DbtConfigError("Please provide a `base_location` when using iceberg"), + ), + RelationConfigValidationRule( + (self.table_format == "default") + or (self.table_format == "iceberg" and self.name == "SNOWFLAKE"), + DbtConfigError( + "Only Snowflake catalogs are currently supported when using iceberg" + ), + ), + } + + @classmethod + def from_dict(cls, config_dict: Dict[str, Any]) -> Self: + kwargs_dict = { + "name": config_dict.get("name"), + "external_volume": config_dict.get("external_volume"), + "base_location": config_dict.get("base_location"), + } + if table_format := config_dict.get("table_format"): + kwargs_dict["table_format"] = TableFormat(table_format) + return super().from_dict(kwargs_dict) + + @classmethod + def parse_relation_config(cls, relation_config: RelationConfig) -> Dict[str, Any]: + + if relation_config.config.extra.get("table_format") is None: + return {} + + config_dict = { + "table_format": relation_config.config.extra.get("table_format"), + "name": "SNOWFLAKE", # this is not currently configurable + } + + if external_volume := relation_config.config.extra.get("external_volume"): + config_dict["external_volume"] = external_volume + + catalog_dirs: List[str] = ["_dbt", relation_config.schema, relation_config.name] + if base_location_subpath := relation_config.config.extra.get("base_location_subpath"): + catalog_dirs.append(base_location_subpath) + config_dict["base_location"] = "/".join(catalog_dirs) + + return config_dict + + @classmethod + def parse_relation_results(cls, relation_results: RelationResults) -> Dict[str, Any]: + # this try block can be removed once enable_iceberg_materializations is retired + try: + catalog_results: "agate.Table" = relation_results["catalog"] + except KeyError: + # this happens when `enable_iceberg_materializations` is turned off + return {} + + if len(catalog_results) == 0: + # this happens when the dynamic table is a standard dynamic table (e.g. not iceberg) + return {} + + # for now, if we get catalog results, it's because this is an iceberg table + # this is because we only run `show iceberg tables` to get catalog metadata + # this will need to be updated once this is in `show objects` + catalog: "agate.Row" = catalog_results.rows[0] + config_dict = { + "table_format": "iceberg", + "name": catalog.get("catalog_name"), + "external_volume": catalog.get("external_volume_name"), + "base_location": catalog.get("base_location"), + } + + return config_dict + + +@dataclass(frozen=True, eq=True, unsafe_hash=True) +class SnowflakeCatalogConfigChange(RelationConfigChange): + context: Optional[SnowflakeCatalogConfig] = None + + @property + def requires_full_refresh(self) -> bool: + return True diff --git a/dbt/adapters/snowflake/relation_configs/dynamic_table.py b/dbt/adapters/snowflake/relation_configs/dynamic_table.py index 2e227d3a4..7361df80a 100644 --- a/dbt/adapters/snowflake/relation_configs/dynamic_table.py +++ b/dbt/adapters/snowflake/relation_configs/dynamic_table.py @@ -8,6 +8,11 @@ from typing_extensions import Self from dbt.adapters.snowflake.relation_configs.base import SnowflakeRelationConfigBase +from dbt.adapters.snowflake.relation_configs.catalog import ( + SnowflakeCatalogConfig, + SnowflakeCatalogConfigChange, +) + if TYPE_CHECKING: import agate @@ -55,11 +60,12 @@ class SnowflakeDynamicTableConfig(SnowflakeRelationConfigBase): query: str target_lag: str snowflake_warehouse: str + catalog: SnowflakeCatalogConfig refresh_mode: Optional[RefreshMode] = RefreshMode.default() initialize: Optional[Initialize] = Initialize.default() @classmethod - def from_dict(cls, config_dict) -> "SnowflakeDynamicTableConfig": + def from_dict(cls, config_dict: Dict[str, Any]) -> Self: kwargs_dict = { "name": cls._render_part(ComponentName.Identifier, config_dict.get("name")), "schema_name": cls._render_part(ComponentName.Schema, config_dict.get("schema_name")), @@ -69,12 +75,12 @@ def from_dict(cls, config_dict) -> "SnowflakeDynamicTableConfig": "query": config_dict.get("query"), "target_lag": config_dict.get("target_lag"), "snowflake_warehouse": config_dict.get("snowflake_warehouse"), + "catalog": SnowflakeCatalogConfig.from_dict(config_dict["catalog"]), "refresh_mode": config_dict.get("refresh_mode"), "initialize": config_dict.get("initialize"), } - dynamic_table: "SnowflakeDynamicTableConfig" = super().from_dict(kwargs_dict) - return dynamic_table + return super().from_dict(kwargs_dict) @classmethod def parse_relation_config(cls, relation_config: RelationConfig) -> Dict[str, Any]: @@ -85,18 +91,19 @@ def parse_relation_config(cls, relation_config: RelationConfig) -> Dict[str, Any "query": relation_config.compiled_code, "target_lag": relation_config.config.extra.get("target_lag"), "snowflake_warehouse": relation_config.config.extra.get("snowflake_warehouse"), + "catalog": SnowflakeCatalogConfig.parse_relation_config(relation_config), } if refresh_mode := relation_config.config.extra.get("refresh_mode"): - config_dict.update(refresh_mode=refresh_mode.upper()) + config_dict["refresh_mode"] = refresh_mode.upper() if initialize := relation_config.config.extra.get("initialize"): - config_dict.update(initialize=initialize.upper()) + config_dict["initialize"] = initialize.upper() return config_dict @classmethod - def parse_relation_results(cls, relation_results: RelationResults) -> Dict: + def parse_relation_results(cls, relation_results: RelationResults) -> Dict[str, Any]: dynamic_table: "agate.Row" = relation_results["dynamic_table"].rows[0] config_dict = { @@ -106,6 +113,7 @@ def parse_relation_results(cls, relation_results: RelationResults) -> Dict: "query": dynamic_table.get("text"), "target_lag": dynamic_table.get("target_lag"), "snowflake_warehouse": dynamic_table.get("warehouse"), + "catalog": SnowflakeCatalogConfig.parse_relation_results(relation_results), "refresh_mode": dynamic_table.get("refresh_mode"), # we don't get initialize since that's a one-time scheduler attribute, not a DT attribute } @@ -145,6 +153,7 @@ class SnowflakeDynamicTableConfigChangeset: target_lag: Optional[SnowflakeDynamicTableTargetLagConfigChange] = None snowflake_warehouse: Optional[SnowflakeDynamicTableWarehouseConfigChange] = None refresh_mode: Optional[SnowflakeDynamicTableRefreshModeConfigChange] = None + catalog: Optional[SnowflakeCatalogConfigChange] = None @property def requires_full_refresh(self) -> bool: @@ -157,9 +166,10 @@ def requires_full_refresh(self) -> bool: else False ), self.refresh_mode.requires_full_refresh if self.refresh_mode else False, + self.catalog.requires_full_refresh if self.catalog else False, ] ) @property def has_changes(self) -> bool: - return any([self.target_lag, self.snowflake_warehouse, self.refresh_mode]) + return any([self.target_lag, self.snowflake_warehouse, self.refresh_mode, self.catalog]) diff --git a/dbt/adapters/snowflake/relation_configs/formats.py b/dbt/adapters/snowflake/relation_configs/formats.py index 460241d9d..b6bb0bdda 100644 --- a/dbt/adapters/snowflake/relation_configs/formats.py +++ b/dbt/adapters/snowflake/relation_configs/formats.py @@ -1,4 +1,5 @@ from dbt_common.dataclass_schema import StrEnum # doesn't exist in standard library until py3.11 +from typing_extensions import Self class TableFormat(StrEnum): @@ -10,5 +11,9 @@ class TableFormat(StrEnum): DEFAULT = "default" ICEBERG = "iceberg" + @classmethod + def default(cls) -> Self: + return cls("default") + def __str__(self): return self.value diff --git a/dbt/include/snowflake/macros/adapters.sql b/dbt/include/snowflake/macros/adapters.sql index aa8895819..b60cea0b0 100644 --- a/dbt/include/snowflake/macros/adapters.sql +++ b/dbt/include/snowflake/macros/adapters.sql @@ -195,7 +195,7 @@ {% macro snowflake__alter_column_type(relation, column_name, new_column_type) -%} {% call statement('alter_column_type') %} - alter table {{ relation.render() }} alter {{ adapter.quote(column_name) }} set data type {{ new_column_type }}; + alter {{ relation.get_ddl_prefix_for_alter() }} table {{ relation.render() }} alter {{ adapter.quote(column_name) }} set data type {{ new_column_type }}; {% endcall %} {% endmacro %} @@ -216,7 +216,7 @@ {% else -%} {% set relation_type = relation.type %} {% endif %} - alter {{ relation_type }} {{ relation.render() }} alter + alter {{ relation.get_ddl_prefix_for_alter() }} {{ relation_type }} {{ relation.render() }} alter {% for column_name in existing_columns if (column_name in existing_columns) or (column_name|lower in existing_columns) %} {{ get_column_comment_sql(column_name, column_dict) }} {{- ',' if not loop.last else ';' }} {% endfor %} @@ -275,7 +275,7 @@ {% if add_columns %} {% set sql -%} - alter {{ relation_type }} {{ relation.render() }} add column + alter {{ relation.get_ddl_prefix_for_alter() }} {{ relation_type }} {{ relation.render() }} add column {% for column in add_columns %} {{ column.name }} {{ column.data_type }}{{ ',' if not loop.last }} {% endfor %} @@ -288,7 +288,7 @@ {% if remove_columns %} {% set sql -%} - alter {{ relation_type }} {{ relation.render() }} drop column + alter {{ relation.get_ddl_prefix_for_alter() }} {{ relation_type }} {{ relation.render() }} drop column {% for column in remove_columns %} {{ column.name }}{{ ',' if not loop.last }} {% endfor %} diff --git a/dbt/include/snowflake/macros/materializations/incremental.sql b/dbt/include/snowflake/macros/materializations/incremental.sql index 9172c061e..d73525d6d 100644 --- a/dbt/include/snowflake/macros/materializations/incremental.sql +++ b/dbt/include/snowflake/macros/materializations/incremental.sql @@ -63,7 +63,17 @@ {#-- Set vars --#} {%- set full_refresh_mode = (should_full_refresh()) -%} {%- set language = model['language'] -%} - {% set target_relation = this %} + + {%- set identifier = this.name -%} + + {%- set target_relation = api.Relation.create( + identifier=identifier, + schema=schema, + database=database, + type='table', + table_format=config.get('table_format', 'default') + ) -%} + {% set existing_relation = load_relation(this) %} {#-- The temp relation will be a view (faster) or temp table, depending on upsert/merge strategy --#} @@ -90,11 +100,21 @@ {%- call statement('main', language=language) -%} {{ create_table_as(False, target_relation, compiled_code, language) }} {%- endcall -%} + {% elif full_refresh_mode %} + {% if target_relation.needs_to_drop(existing_relation) %} + {{ drop_relation_if_exists(existing_relation) }} + {% endif %} {%- call statement('main', language=language) -%} {{ create_table_as(False, target_relation, compiled_code, language) }} {%- endcall -%} + {% elif target_relation.table_format != existing_relation.table_format %} + {% do exceptions.raise_compiler_error( + "Unable to alter incremental model `" ~ target_relation.identifier ~ "` to '" ~ target_relation.table_format ~ " table format due to Snowflake limitation. Please execute with --full-refresh to drop the table and recreate in new table format.'" + ) + %} + {% else %} {#-- Create the temp relation, either as a view or as a temp table --#} {% if tmp_relation_type == 'view' %} diff --git a/dbt/include/snowflake/macros/materializations/table.sql b/dbt/include/snowflake/macros/materializations/table.sql index 9ee8a0b12..995757b6b 100644 --- a/dbt/include/snowflake/macros/materializations/table.sql +++ b/dbt/include/snowflake/macros/materializations/table.sql @@ -7,7 +7,7 @@ {% set grant_config = config.get('grants') %} - {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%} + {%- set existing_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%} {%- set target_relation = api.Relation.create( identifier=identifier, schema=schema, @@ -18,8 +18,8 @@ {{ run_hooks(pre_hooks) }} - {% if target_relation.needs_to_drop(old_relation) %} - {{ drop_relation_if_exists(old_relation) }} + {% if target_relation.needs_to_drop(existing_relation) %} + {{ drop_relation_if_exists(existing_relation) }} {% endif %} {% call statement('main', language=language) -%} @@ -28,7 +28,7 @@ {{ run_hooks(post_hooks) }} - {% set should_revoke = should_revoke(old_relation, full_refresh_mode=True) %} + {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %} {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %} {% do persist_docs(target_relation, model) %} diff --git a/dbt/include/snowflake/macros/relations/dynamic_table/create.sql b/dbt/include/snowflake/macros/relations/dynamic_table/create.sql index 253788779..4ebcf145b 100644 --- a/dbt/include/snowflake/macros/relations/dynamic_table/create.sql +++ b/dbt/include/snowflake/macros/relations/dynamic_table/create.sql @@ -1,16 +1,83 @@ {% macro snowflake__get_create_dynamic_table_as_sql(relation, sql) -%} +{#- +-- Produce DDL that creates a dynamic table +-- +-- Args: +-- - relation: Union[SnowflakeRelation, str] +-- - SnowflakeRelation - required for relation.render() +-- - str - is already the rendered relation name +-- - sql: str - the code defining the model +-- Globals: +-- - config: NodeConfig - contains the attribution required to produce a SnowflakeDynamicTableConfig +-- Returns: +-- A valid DDL statement which will result in a new dynamic table. +-#} {%- set dynamic_table = relation.from_config(config.model) -%} + {%- if dynamic_table.catalog.table_format == 'iceberg' -%} + {{ _get_create_dynamic_iceberg_table_as_sql(dynamic_table, relation, sql) }} + {%- else -%} + {{ _get_create_dynamic_standard_table_as_sql(dynamic_table, relation, sql) }} + {%- endif -%} + +{%- endmacro %} + + +{% macro _get_create_dynamic_standard_table_as_sql(dynamic_table, relation, sql) -%} +{#- +-- Produce DDL that creates a standard dynamic table +-- +-- This follows the syntax outlined here: +-- https://docs.snowflake.com/en/sql-reference/sql/create-dynamic-table#syntax +-- +-- Args: +-- - dynamic_table: SnowflakeDynamicTableConfig - contains all of the configuration for the dynamic table +-- - relation: Union[SnowflakeRelation, str] +-- - SnowflakeRelation - required for relation.render() +-- - str - is already the rendered relation name +-- - sql: str - the code defining the model +-- Returns: +-- A valid DDL statement which will result in a new dynamic standard table. +-#} + create dynamic table {{ relation }} target_lag = '{{ dynamic_table.target_lag }}' warehouse = {{ dynamic_table.snowflake_warehouse }} - {% if dynamic_table.refresh_mode %} - refresh_mode = {{ dynamic_table.refresh_mode }} - {% endif %} - {% if dynamic_table.initialize %} - initialize = {{ dynamic_table.initialize }} - {% endif %} + {{ optional('refresh_mode', dynamic_table.refresh_mode) }} + {{ optional('initialize', dynamic_table.initialize) }} + as ( + {{ sql }} + ) + +{%- endmacro %} + + +{% macro _get_create_dynamic_iceberg_table_as_sql(dynamic_table, relation, sql) -%} +{#- +-- Produce DDL that creates a dynamic iceberg table +-- +-- This follows the syntax outlined here: +-- https://docs.snowflake.com/en/sql-reference/sql/create-dynamic-table#create-dynamic-iceberg-table +-- +-- Args: +-- - dynamic_table: SnowflakeDynamicTableConfig - contains all of the configuration for the dynamic table +-- - relation: Union[SnowflakeRelation, str] +-- - SnowflakeRelation - required for relation.render() +-- - str - is already the rendered relation name +-- - sql: str - the code defining the model +-- Returns: +-- A valid DDL statement which will result in a new dynamic iceberg table. +-#} + + create dynamic iceberg table {{ relation }} + target_lag = '{{ dynamic_table.target_lag }}' + warehouse = {{ dynamic_table.snowflake_warehouse }} + {{ optional('external_volume', dynamic_table.catalog.external_volume) }} + {{ optional('catalog', dynamic_table.catalog.name) }} + base_location = '{{ dynamic_table.catalog.base_location }}' + {{ optional('refresh_mode', dynamic_table.refresh_mode) }} + {{ optional('initialize', dynamic_table.initialize) }} as ( {{ sql }} ) diff --git a/dbt/include/snowflake/macros/relations/dynamic_table/describe.sql b/dbt/include/snowflake/macros/relations/dynamic_table/describe.sql index cc79328fe..b5c49ad37 100644 --- a/dbt/include/snowflake/macros/relations/dynamic_table/describe.sql +++ b/dbt/include/snowflake/macros/relations/dynamic_table/describe.sql @@ -1,4 +1,14 @@ {% macro snowflake__describe_dynamic_table(relation) %} +{#- +-- Get all relevant metadata about a dynamic table +-- +-- Args: +-- - relation: SnowflakeRelation - the relation to describe +-- Returns: +-- A dictionary with one or two entries depending on whether iceberg is enabled: +-- - dynamic_table: the metadata associated with a standard dynamic table +-- - catalog: the metadata associated with the iceberg catalog +-#} {%- set _dynamic_table_sql -%} show dynamic tables like '{{ relation.identifier }}' @@ -14,7 +24,32 @@ "refresh_mode" from table(result_scan(last_query_id())) {%- endset %} - {% set _dynamic_table = run_query(_dynamic_table_sql) %} + {% set results = {'dynamic_table': run_query(_dynamic_table_sql)} %} - {% do return({'dynamic_table': _dynamic_table}) %} + {% if adapter.behavior.enable_iceberg_materializations.no_warn %} + {% set _ = results.update({'catalog': run_query(_get_describe_iceberg_catalog_sql(relation))}) %} + {% endif %} + + {% do return(results) %} +{% endmacro %} + + +{% macro _get_describe_iceberg_catalog_sql(relation) %} +{#- +-- Produce DQL that returns all relevant metadata about an iceberg catalog +-- +-- Args: +-- - relation: SnowflakeRelation - the relation to describe +-- Returns: +-- A valid DQL statement that will return metadata associated with an iceberg catalog +-#} + show iceberg tables + like '{{ relation.identifier }}' + in schema {{ relation.database }}.{{ relation.schema }} + ; + select + "catalog_name", + "external_volume_name", + "base_location" + from table(result_scan(last_query_id())) {% endmacro %} diff --git a/dbt/include/snowflake/macros/relations/dynamic_table/replace.sql b/dbt/include/snowflake/macros/relations/dynamic_table/replace.sql index dbe27d66e..2e7b4566a 100644 --- a/dbt/include/snowflake/macros/relations/dynamic_table/replace.sql +++ b/dbt/include/snowflake/macros/relations/dynamic_table/replace.sql @@ -1,16 +1,82 @@ {% macro snowflake__get_replace_dynamic_table_sql(relation, sql) -%} +{#- +-- Produce DDL that replaces a dynamic table with a new dynamic table +-- +-- Args: +-- - relation: Union[SnowflakeRelation, str] +-- - SnowflakeRelation - required for relation.render() +-- - str - is already the rendered relation name +-- - sql: str - the code defining the model +-- Globals: +-- - config: NodeConfig - contains the attribution required to produce a SnowflakeDynamicTableConfig +-- Returns: +-- A valid DDL statement which will result in a new dynamic table. +-#} {%- set dynamic_table = relation.from_config(config.model) -%} + {%- if dynamic_table.catalog.table_format == 'iceberg' -%} + {{ _get_replace_dynamic_iceberg_table_as_sql(dynamic_table, relation, sql) }} + {%- else -%} + {{ _get_replace_dynamic_standard_table_as_sql(dynamic_table, relation, sql) }} + {%- endif -%} + +{%- endmacro %} + +{% macro _get_replace_dynamic_standard_table_as_sql(dynamic_table, relation, sql) -%} +{#- +-- Produce DDL that replaces a standard dynamic table with a new standard dynamic table +-- +-- This follows the syntax outlined here: +-- https://docs.snowflake.com/en/sql-reference/sql/create-dynamic-table#syntax +-- +-- Args: +-- - dynamic_table: SnowflakeDynamicTableConfig - contains all of the configuration for the dynamic table +-- - relation: Union[SnowflakeRelation, str] +-- - SnowflakeRelation - required for relation.render() +-- - str - is already the rendered relation name +-- - sql: str - the code defining the model +-- Returns: +-- A valid DDL statement which will result in a new dynamic standard table. +-#} + create or replace dynamic table {{ relation }} target_lag = '{{ dynamic_table.target_lag }}' warehouse = {{ dynamic_table.snowflake_warehouse }} - {% if dynamic_table.refresh_mode %} - refresh_mode = {{ dynamic_table.refresh_mode }} - {% endif %} - {% if dynamic_table.initialize %} - initialize = {{ dynamic_table.initialize }} - {% endif %} + {{ optional('refresh_mode', dynamic_table.refresh_mode) }} + {{ optional('initialize', dynamic_table.initialize) }} + as ( + {{ sql }} + ) + +{%- endmacro %} + + +{% macro _get_replace_dynamic_iceberg_table_as_sql(dynamic_table, relation, sql) -%} +{#- +-- Produce DDL that replaces a dynamic iceberg table with a new dynamic iceberg table +-- +-- This follows the syntax outlined here: +-- https://docs.snowflake.com/en/sql-reference/sql/create-dynamic-table#create-dynamic-iceberg-table +-- +-- Args: +-- - dynamic_table: SnowflakeDynamicTableConfig - contains all of the configuration for the dynamic table +-- - relation: Union[SnowflakeRelation, str] +-- - SnowflakeRelation - required for relation.render() +-- - str - is already the rendered relation name +-- - sql: str - the code defining the model +-- Returns: +-- A valid DDL statement which will result in a new dynamic iceberg table. +-#} + + create or replace dynamic iceberg table {{ relation }} + target_lag = '{{ dynamic_table.target_lag }}' + warehouse = {{ dynamic_table.snowflake_warehouse }} + {{ optional('external_volume', dynamic_table.catalog.external_volume) }} + {{ optional('catalog', dynamic_table.catalog.name) }} + base_location = '{{ dynamic_table.catalog.base_location }}' + {{ optional('refresh_mode', dynamic_table.refresh_mode) }} + {{ optional('initialize', dynamic_table.initialize) }} as ( {{ sql }} ) diff --git a/dbt/include/snowflake/macros/relations/table/create.sql b/dbt/include/snowflake/macros/relations/table/create.sql index e60b93039..e2141df4d 100644 --- a/dbt/include/snowflake/macros/relations/table/create.sql +++ b/dbt/include/snowflake/macros/relations/table/create.sql @@ -1,7 +1,7 @@ {% macro snowflake__create_table_as(temporary, relation, compiled_code, language='sql') -%} {%- if relation.is_iceberg_format and not adapter.behavior.enable_iceberg_materializations.no_warn %} - {% do exceptions.raise_compiler_error('Was unable to create model as Iceberg Table Format. Please set the `enable_iceberg_materializations` behavior flag to True in your dbt_project.yml. For more information, go to .') %} + {% do exceptions.raise_compiler_error('Was unable to create model as Iceberg Table Format. Please set the `enable_iceberg_materializations` behavior flag to True in your dbt_project.yml. For more information, go to https://docs.getdbt.com/reference/resource-configs/snowflake-configs.') %} {%- endif %} {%- set materialization_prefix = relation.get_ddl_prefix_for_create(config.model.config, temporary) -%} diff --git a/dbt/include/snowflake/macros/utils/optional.sql b/dbt/include/snowflake/macros/utils/optional.sql new file mode 100644 index 000000000..0758ca59f --- /dev/null +++ b/dbt/include/snowflake/macros/utils/optional.sql @@ -0,0 +1,14 @@ +{% macro optional(name, value, quote_char = '') %} +{#- +-- Insert optional DDL parameters only when their value is provided; makes DDL statements more readable +-- +-- Args: +-- - name: the name of the DDL option +-- - value: the value of the DDL option, may be None +-- - quote_char: the quote character to use (e.g. string), leave blank if unnecessary (e.g. integer or bool) +-- Returns: +-- If the value is not None (e.g. provided by the user), return the option setting DDL +-- If the value is None, return an empty string +-#} +{% if value is not none %}{{ name }} = {{ quote_char }}{{ value }}{{ quote_char }}{% endif %} +{% endmacro %} diff --git a/dev-requirements.txt b/dev-requirements.txt index f3d120eec..906003768 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -6,8 +6,7 @@ git+https://github.com/dbt-labs/dbt-common.git # dev ipdb~=0.13.13 -pre-commit~=3.7.0;python_version>="3.9" -pre-commit~=3.5.0;python_version<"3.9" +pre-commit~=3.7.0 # test ddtrace==2.3.0 diff --git a/docker/Dockerfile b/docker/Dockerfile index d256dcac4..17315b12d 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -1,7 +1,7 @@ # this image gets published to GHCR for production use ARG py_version=3.11.2 -FROM python:$py_version-slim-bullseye as base +FROM python:$py_version-slim-bullseye AS base RUN apt-get update \ && apt-get dist-upgrade -y \ @@ -25,7 +25,7 @@ ENV LANG=C.UTF-8 RUN python -m pip install --upgrade "pip==24.0" "setuptools==69.2.0" "wheel==0.43.0" --no-cache-dir -FROM base as dbt-snowflake +FROM base AS dbt-snowflake ARG commit_ref=main diff --git a/docker/dev.Dockerfile b/docker/dev.Dockerfile index 0fc667048..44f86f005 100644 --- a/docker/dev.Dockerfile +++ b/docker/dev.Dockerfile @@ -1,43 +1,43 @@ # this image does not get published, it is intended for local development only, see `Makefile` for usage -FROM ubuntu:22.04 as base +FROM ubuntu:24.04 AS base # prevent python installation from asking for time zone region ARG DEBIAN_FRONTEND=noninteractive # add python repository RUN apt-get update \ - && apt-get install -y software-properties-common=0.99.22.9 \ - && add-apt-repository -y ppa:deadsnakes/ppa \ - && apt-get clean \ - && rm -rf \ - /var/lib/apt/lists/* \ - /tmp/* \ - /var/tmp/* + && apt-get install -y software-properties-common=0.99.48 \ + && add-apt-repository -y ppa:deadsnakes/ppa \ + && apt-get clean \ + && rm -rf \ + /var/lib/apt/lists/* \ + /tmp/* \ + /var/tmp/* # install python RUN apt-get update \ - && apt-get install -y --no-install-recommends \ - build-essential=12.9ubuntu3 \ - git-all=1:2.34.1-1ubuntu1.10 \ - python3.8=3.8.19-1+jammy1 \ - python3.8-dev=3.8.19-1+jammy1 \ - python3.8-distutils=3.8.19-1+jammy1 \ - python3.8-venv=3.8.19-1+jammy1 \ - python3-pip=22.0.2+dfsg-1ubuntu0.4 \ - python3-wheel=0.37.1-2ubuntu0.22.04.1 \ - && apt-get clean \ - && rm -rf \ - /var/lib/apt/lists/* \ - /tmp/* \ - /var/tmp/* + && apt-get install -y --no-install-recommends \ + build-essential=12.10ubuntu1 \ + git-all=1:2.43.0-1ubuntu7.1 \ + python3.9=3.9.20-1+noble1 \ + python3.9-dev=3.9.20-1+noble1 \ + python3.9-distutils=3.9.20-1+noble1 \ + python3.9-venv=3.9.20-1+noble1 \ + python3-pip=24.0+dfsg-1ubuntu1 \ + python3-wheel=0.42.0-2 \ + && apt-get clean \ + && rm -rf \ + /var/lib/apt/lists/* \ + /tmp/* \ + /var/tmp/* \ # update the default system interpreter to the newly installed version -RUN update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.8 1 +RUN update-alternatives --install /usr/bin/python python /usr/bin/python3.9 1 -FROM base as dbt-snowflake-dev +FROM base AS dbt-snowflake-dev -HEALTHCHECK CMD python3 --version || exit 1 +HEALTHCHECK CMD python --version || exit 1 # send stdout/stderr to terminal ENV PYTHONUNBUFFERED=1 @@ -47,4 +47,4 @@ WORKDIR /opt/code VOLUME /opt/code # create a virtual environment -RUN python3 -m venv /opt/venv +RUN python -m venv /opt/venv diff --git a/setup.py b/setup.py index 210c309b1..c0716341d 100644 --- a/setup.py +++ b/setup.py @@ -5,9 +5,9 @@ import sys # require python 3.8 or newer -if sys.version_info < (3, 8): +if sys.version_info < (3, 9): print("Error: dbt does not support this version of Python.") - print("Please upgrade to Python 3.8 or higher.") + print("Please upgrade to Python 3.9 or higher.") sys.exit(1) @@ -57,8 +57,8 @@ def _plugin_version() -> str: packages=find_namespace_packages(include=["dbt", "dbt.*"]), include_package_data=True, install_requires=[ - "dbt-common>=1.3.0,<2.0", - "dbt-adapters>=1.3.1,<2.0", + "dbt-common>=1.10,<2.0", + "dbt-adapters>=1.7,<2.0", "snowflake-connector-python[secure-local-storage]~=3.0", # add dbt-core to ensure backwards compatibility of installation, this is not a functional dependency "dbt-core>=1.8.0", @@ -72,12 +72,10 @@ def _plugin_version() -> str: "Operating System :: Microsoft :: Windows", "Operating System :: MacOS :: MacOS X", "Operating System :: POSIX :: Linux", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", ], - python_requires=">=3.8", + python_requires=">=3.9", ) diff --git a/tests/functional/auth_tests/test_database_role.py b/tests/functional/auth_tests/test_database_role.py new file mode 100644 index 000000000..c0f93d7d6 --- /dev/null +++ b/tests/functional/auth_tests/test_database_role.py @@ -0,0 +1,68 @@ +import os + +import pytest + +from dbt.tests.util import run_dbt + + +SEED = """ +id +1 +""".strip() + + +MODEL = """ +{{ config( + materialized='incremental', +) }} +select * from {{ ref('my_seed') }} +""" + + +class TestDatabaseRole: + """ + This test addresses https://github.com/dbt-labs/dbt-snowflake/issues/1151 + + While dbt-snowflake does not manage database roles (it only manages account roles, + it still needs to account for them so that it doesn't try to revoke them. + """ + + @pytest.fixture(scope="class") + def seeds(self): + return {"my_seed.csv": SEED} + + @pytest.fixture(scope="class") + def models(self): + return {"my_model.sql": MODEL} + + @pytest.fixture(scope="class") + def project_config_update(self): + # grant to the test role even though this role already has these permissions + # this triggers syncing grants since `apply_grants` first looks for a grants config + return {"models": {"+grants": {"select": [os.getenv("SNOWFLAKE_TEST_ROLE")]}}} + + @pytest.fixture(scope="class", autouse=True) + def setup(self, project, prefix): + """ + Create a database role with access to the model we're about to create. + The existence of this database role triggered the bug as dbt-snowflake attempts + to revoke it if the user also provides a grants config. + """ + role = f"BLOCKING_DB_ROLE_{prefix}" + project.run_sql(f"CREATE DATABASE ROLE {role}") + sql = f""" + GRANT + ALL PRIVILEGES ON FUTURE TABLES + IN SCHEMA {project.test_schema} + TO DATABASE ROLE {role} + """ + project.run_sql(sql) + yield + project.run_sql(f"DROP DATABASE ROLE {role}") + + def test_database_role(self, project): + run_dbt(["seed"]) + run_dbt(["run"]) + # run a second time to trigger revoke on an incremental update + # this originally failed, demonstrating the bug + run_dbt(["run"]) diff --git a/tests/functional/iceberg/models.py b/tests/functional/iceberg/models.py new file mode 100644 index 000000000..6433f74bf --- /dev/null +++ b/tests/functional/iceberg/models.py @@ -0,0 +1,85 @@ +_MODEL_BASIC_TABLE_MODEL = """ +{{ + config( + materialized = "table", + cluster_by=['id'], + ) +}} +select 1 as id +""" + +_MODEL_BASIC_ICEBERG_MODEL = """ +{{ + config( + transient = "true", + materialized = "table", + cluster_by=['id'], + table_format="iceberg", + external_volume="s3_iceberg_snow", + base_location_subpath="subpath", + ) +}} + +select * from {{ ref('first_table') }} +""" + +_MODEL_BASIC_DYNAMIC_TABLE_MODEL = """ +{{ config( + materialized='dynamic_table', + snowflake_warehouse='DBT_TESTING', + target_lag='1 minute', + refresh_mode='INCREMENTAL', + table_format='iceberg', + external_volume='s3_iceberg_snow', +) }} + +select * from {{ ref('first_table') }} +""" + +_MODEL_BASIC_DYNAMIC_TABLE_MODEL_WITH_SUBPATH = """ +{{ config( + materialized='dynamic_table', + snowflake_warehouse='DBT_TESTING', + target_lag='1 minute', + refresh_mode='INCREMENTAL', + table_format='iceberg', + external_volume='s3_iceberg_snow', + base_location_subpath='subpath', +) }} + +select * from {{ ref('first_table') }} +""" + +_MODEL_BUILT_ON_ICEBERG_TABLE = """ +{{ + config( + materialized = "table", + ) +}} +select * from {{ ref('iceberg_table') }} +""" + +_MODEL_TABLE_BEFORE_SWAP = """ +{{ + config( + materialized = "table", + ) +}} +select 1 as id +""" + +_MODEL_VIEW_BEFORE_SWAP = """ +select 1 as id +""" + +_MODEL_TABLE_FOR_SWAP_ICEBERG = """ +{{ + config( + materialized = "table", + table_format="iceberg", + external_volume="s3_iceberg_snow", + base_location_subpath="subpath", + ) +}} +select 1 as id +""" diff --git a/tests/functional/iceberg/test_incremental_models.py b/tests/functional/iceberg/test_incremental_models.py new file mode 100644 index 000000000..35ccdcd89 --- /dev/null +++ b/tests/functional/iceberg/test_incremental_models.py @@ -0,0 +1,169 @@ +import pytest +import time + +from pathlib import Path + +from dbt.tests.util import run_dbt, run_dbt_and_capture, write_file + + +_SEED_INCREMENTAL_STRATEGIES = """ +world_id,world_name,boss +1,Yoshi's Island,Iggy +2,Donut Plains,Morton +3,Vanilla Dome,Lemmy +4,Cookie Mountain,Temmy +5,Forest of Illusion,Roy +""".strip() + +_MODEL_BASIC_TABLE_MODEL = """ +{{ + config( + materialized = "table", + ) +}} +select * from {{ ref('seed') }} +""" + +_MODEL_INCREMENTAL_ICEBERG_BASE = """ +{{{{ + config( + materialized='incremental', + table_format='iceberg', + incremental_strategy='{strategy}', + unique_key="world_id", + external_volume = "s3_iceberg_snow", + on_schema_change = "sync_all_columns" + ) +}}}} +select * from {{{{ ref('upstream_table') }}}} + +{{% if is_incremental() %}} +where world_id > 2 +{{% endif %}} +""" + +_MODEL_INCREMENTAL_ICEBERG_APPEND = _MODEL_INCREMENTAL_ICEBERG_BASE.format(strategy="append") +_MODEL_INCREMENTAL_ICEBERG_MERGE = _MODEL_INCREMENTAL_ICEBERG_BASE.format(strategy="merge") +_MODEL_INCREMENTAL_ICEBERG_DELETE_INSERT = _MODEL_INCREMENTAL_ICEBERG_BASE.format( + strategy="delete+insert" +) + + +_QUERY_UPDATE_UPSTREAM_TABLE = """ +UPDATE {database}.{schema}.upstream_table set world_name = 'Twin Bridges', boss = 'Ludwig' where world_id = 4; +""" + +_QUERY_UPDATE_UPSTREAM_TABLE_NO_EFFECT = """ +UPDATE {database}.{schema}.upstream_table set world_name = 'Doughnut Plains' where world_id = 2; +""" + + +class TestIcebergIncrementalStrategies: + append: str = f"append_{hash(time.time())}" + + @pytest.fixture(scope="class") + def project_config_update(self): + return {"flags": {"enable_iceberg_materializations": True}} + + @pytest.fixture(scope="class") + def seeds(self): + return { + "seed.csv": _SEED_INCREMENTAL_STRATEGIES, + } + + @pytest.fixture(scope="function", autouse=True) + def setup_class(self, project): + run_dbt(["seed"]) + yield + + @pytest.fixture(scope="class") + def models(self): + return { + "upstream_table.sql": _MODEL_BASIC_TABLE_MODEL, + f"{self.append}.sql": _MODEL_INCREMENTAL_ICEBERG_APPEND, + "merge.sql": _MODEL_INCREMENTAL_ICEBERG_MERGE, + "delete_insert.sql": _MODEL_INCREMENTAL_ICEBERG_DELETE_INSERT, + } + + def __check_correct_operations(self, model_name, /, rows_affected, status="SUCCESS"): + run_results = run_dbt( + ["show", "--inline", f"select * from {{{{ ref('{model_name}') }}}} where world_id = 4"] + ) + assert run_results[0].adapter_response["rows_affected"] == rows_affected + assert run_results[0].adapter_response["code"] == status + + if "append" not in model_name: + run_results, stdout = run_dbt_and_capture( + [ + "show", + "--inline", + f"select * from {{{{ ref('{model_name}') }}}} where world_id = 2", + ] + ) + run_results[0].adapter_response["rows_affected"] == 1 + assert "Doughnut" not in stdout + + def test_incremental_strategies_with_update(self, project, setup_class): + run_results = run_dbt() + assert len(run_results) == 4 + + project.run_sql( + _QUERY_UPDATE_UPSTREAM_TABLE.format( + database=project.database, schema=project.test_schema + ) + ) + project.run_sql( + _QUERY_UPDATE_UPSTREAM_TABLE_NO_EFFECT.format( + database=project.database, schema=project.test_schema + ) + ) + + run_results = run_dbt(["run", "-s", self.append, "merge", "delete_insert"]) + assert len(run_results) == 3 + + self.__check_correct_operations(self.append, rows_affected=2) + self.__check_correct_operations("merge", rows_affected=1) + self.__check_correct_operations("delete_insert", rows_affected=1) + + +class TestIcebergIncrementalOnSchemaChangeMutatesRelations: + @pytest.fixture(scope="class") + def project_config_update(self): + return {"flags": {"enable_iceberg_materializations": True}} + + @pytest.fixture(scope="class") + def seeds(self): + return { + "seed.csv": _SEED_INCREMENTAL_STRATEGIES, + } + + @pytest.fixture(scope="function", autouse=True) + def setup_class(self, project): + run_dbt(["seed"]) + run_dbt(["run"]) + yield + + @pytest.fixture(scope="class") + def models(self): + return { + "upstream_table.sql": _MODEL_BASIC_TABLE_MODEL, + "merge.sql": _MODEL_INCREMENTAL_ICEBERG_MERGE, + } + + def test_sync_and_append_semantics(self, project, setup_class): + model_file = project.project_root / Path("models") / Path("merge.sql") + sql = f"show columns in {project.database}.{project.test_schema}.merge;" + column_names = [column[2] for column in project.run_sql(sql, fetch="all")] + assert len(column_names) == 3 + + write_file(_MODEL_INCREMENTAL_ICEBERG_MERGE.replace("*", "*, 1 as new_column"), model_file) + run_dbt() + column_names = [column[2].lower() for column in project.run_sql(sql, fetch="all")] + assert len(column_names) == 4 + assert "new_column" in column_names + + write_file(_MODEL_INCREMENTAL_ICEBERG_MERGE, model_file) + run_dbt() + column_names = [column[2].lower() for column in project.run_sql(sql, fetch="all")] + assert len(column_names) == 3 + assert "new_column" not in column_names diff --git a/tests/functional/iceberg/test_table_basic.py b/tests/functional/iceberg/test_table_basic.py index 0bfdf59f1..e835a5fce 100644 --- a/tests/functional/iceberg/test_table_basic.py +++ b/tests/functional/iceberg/test_table_basic.py @@ -4,64 +4,16 @@ from dbt.tests.util import run_dbt, rm_file, write_file -_MODEL_BASIC_TABLE_MODEL = """ -{{ - config( - materialized = "table", - cluster_by=['id'], - ) -}} -select 1 as id -""" - -_MODEL_BASIC_ICEBERG_MODEL = """ -{{ - config( - transient = "true", - materialized = "table", - cluster_by=['id'], - table_format="iceberg", - external_volume="s3_iceberg_snow", - base_location_subpath="subpath", - ) -}} - -select * from {{ ref('first_table') }} -""" - -_MODEL_BUILT_ON_ICEBERG_TABLE = """ -{{ - config( - materialized = "table", - ) -}} -select * from {{ ref('iceberg_table') }} -""" - -_MODEL_TABLE_BEFORE_SWAP = """ -{{ - config( - materialized = "table", - ) -}} -select 1 as id -""" - -_MODEL_VIEW_BEFORE_SWAP = """ -select 1 as id -""" - -_MODEL_TABLE_FOR_SWAP_ICEBERG = """ -{{ - config( - materialized = "table", - table_format="iceberg", - external_volume="s3_iceberg_snow", - base_location_subpath="subpath", - ) -}} -select 1 as id -""" +from tests.functional.iceberg.models import ( + _MODEL_BASIC_TABLE_MODEL, + _MODEL_BASIC_ICEBERG_MODEL, + _MODEL_BASIC_DYNAMIC_TABLE_MODEL, + _MODEL_BASIC_DYNAMIC_TABLE_MODEL_WITH_SUBPATH, + _MODEL_BUILT_ON_ICEBERG_TABLE, + _MODEL_TABLE_BEFORE_SWAP, + _MODEL_VIEW_BEFORE_SWAP, + _MODEL_TABLE_FOR_SWAP_ICEBERG, +) class TestIcebergTableBuilds: @@ -75,11 +27,13 @@ def models(self): "first_table.sql": _MODEL_BASIC_TABLE_MODEL, "iceberg_table.sql": _MODEL_BASIC_ICEBERG_MODEL, "table_built_on_iceberg_table.sql": _MODEL_BUILT_ON_ICEBERG_TABLE, + "dynamic_table.sql": _MODEL_BASIC_DYNAMIC_TABLE_MODEL, + "dynamic_tableb.sql": _MODEL_BASIC_DYNAMIC_TABLE_MODEL_WITH_SUBPATH, } def test_iceberg_tables_build_and_can_be_referred(self, project): run_results = run_dbt() - assert len(run_results) == 3 + assert len(run_results) == 5 class TestIcebergTableTypeBuildsOnExistingTable: diff --git a/tests/functional/query_tag/test_query_tags.py b/tests/functional/query_tag/test_query_tags.py index 421aae1b6..4ddafabb2 100644 --- a/tests/functional/query_tag/test_query_tags.py +++ b/tests/functional/query_tag/test_query_tags.py @@ -1,6 +1,7 @@ import pytest from dbt.tests.util import run_dbt + snapshots__snapshot_query_tag_sql = """ {% snapshot snapshot_query_tag %} {{ @@ -14,16 +15,15 @@ }} select 1 as id, 'blue' as color {% endsnapshot %} - """ + models__table_model_query_tag_sql = """ {{ config(materialized = 'table') }} - select 1 as id - """ + models__models_config_yml = """ version: 2 @@ -33,23 +33,21 @@ - name: id data_tests: - unique - """ + models__view_model_query_tag_sql = """ {{ config(materialized = 'view') }} - select 1 as id - """ + models__incremental_model_query_tag_sql = """ {{ config(materialized = 'incremental', unique_key = 'id') }} - select 1 as id - """ + macros__check_tag_sql = """ {% macro check_query_tag() %} @@ -61,12 +59,12 @@ {% endif %} {% endmacro %} - """ + seeds__seed_query_tag_csv = """id 1 -""" +""".strip() class TestQueryTag: @@ -95,20 +93,14 @@ def seeds(self): def project_config_update(self, prefix): return { "config-version": 2, - "models": { - "tests": {"query_tag": prefix, "post-hook": "{{ check_tag() }}"}, - }, - "seeds": { - "tests": {"query_tag": prefix, "post-hook": "{{ check_tag() }}"}, - }, - "snapshots": { - "tests": {"query_tag": prefix, "post-hook": "{{ check_tag() }}"}, - }, + "models": {"query_tag": prefix, "post-hook": "{{ check_query_tag() }}"}, + "seeds": {"query_tag": prefix, "post-hook": "{{ check_query_tag() }}"}, + "snapshots": {"query_tag": prefix, "post-hook": "{{ check_query_tag() }}"}, "tests": {"test": {"query_tag": prefix, "post-hook": "{{ check_query_tag() }}"}}, } def build_all_with_query_tags(self, project, prefix): - run_dbt(["build", "--vars", '{{"check_tag": "{}"}}'.format(prefix)]) + run_dbt(["build", "--vars", '{{"query_tag": "{}"}}'.format(prefix)]) def test_snowflake_query_tag(self, project, prefix): self.build_all_with_query_tags(project, prefix) @@ -130,7 +122,7 @@ def profiles_config_update(self, prefix): return {"query_tag": prefix} def build_all_with_query_tags(self, project, prefix): - run_dbt(["build", "--vars", '{{"check_tag": "{}"}}'.format(prefix)]) + run_dbt(["build", "--vars", '{{"query_tag": "{}"}}'.format(prefix)]) def test_snowflake_query_tag(self, project, prefix): self.build_all_with_query_tags(project, prefix) diff --git a/tests/functional/relation_tests/dynamic_table_tests/models.py b/tests/functional/relation_tests/dynamic_table_tests/models.py index 5e46bed53..4dcd6cf48 100644 --- a/tests/functional/relation_tests/dynamic_table_tests/models.py +++ b/tests/functional/relation_tests/dynamic_table_tests/models.py @@ -10,7 +10,7 @@ {{ config( materialized='dynamic_table', snowflake_warehouse='DBT_TESTING', - target_lag='2 minutes', + target_lag='2 minutes', refresh_mode='INCREMENTAL', ) }} select * from {{ ref('my_seed') }} @@ -28,11 +28,25 @@ """ +DYNAMIC_ICEBERG_TABLE = """ +{{ config( + materialized='dynamic_table', + snowflake_warehouse='DBT_TESTING', + target_lag='2 minutes', + refresh_mode='INCREMENTAL', + table_format="iceberg", + external_volume="s3_iceberg_snow", + base_location_subpath="subpath", +) }} +select * from {{ ref('my_seed') }} +""" + + DYNAMIC_TABLE_ALTER = """ {{ config( materialized='dynamic_table', snowflake_warehouse='DBT_TESTING', - target_lag='5 minutes', + target_lag='5 minutes', refresh_mode='INCREMENTAL', ) }} select * from {{ ref('my_seed') }} @@ -43,8 +57,36 @@ {{ config( materialized='dynamic_table', snowflake_warehouse='DBT_TESTING', - target_lag='2 minutes', + target_lag='2 minutes', + refresh_mode='FULL', +) }} +select * from {{ ref('my_seed') }} +""" + + +DYNAMIC_ICEBERG_TABLE_ALTER = """ +{{ config( + materialized='dynamic_table', + snowflake_warehouse='DBT_TESTING', + target_lag='5 minutes', + refresh_mode='INCREMENTAL', + table_format="iceberg", + external_volume="s3_iceberg_snow", + base_location_subpath="subpath", +) }} +select * from {{ ref('my_seed') }} +""" + + +DYNAMIC_ICEBERG_TABLE_REPLACE = """ +{{ config( + materialized='dynamic_table', + snowflake_warehouse='DBT_TESTING', + target_lag='2 minutes', refresh_mode='FULL', + table_format="iceberg", + external_volume="s3_iceberg_snow", + base_location_subpath="subpath", ) }} select * from {{ ref('my_seed') }} """ diff --git a/tests/functional/relation_tests/dynamic_table_tests/test_basic.py b/tests/functional/relation_tests/dynamic_table_tests/test_basic.py index 2406e1c14..79a2241ca 100644 --- a/tests/functional/relation_tests/dynamic_table_tests/test_basic.py +++ b/tests/functional/relation_tests/dynamic_table_tests/test_basic.py @@ -7,6 +7,7 @@ class TestBasic: + iceberg: bool = False @pytest.fixture(scope="class", autouse=True) def seeds(self): @@ -14,10 +15,17 @@ def seeds(self): @pytest.fixture(scope="class", autouse=True) def models(self): - yield { + my_models = { "my_dynamic_table.sql": models.DYNAMIC_TABLE, "my_dynamic_table_downstream.sql": models.DYNAMIC_TABLE_DOWNSTREAM, } + if self.iceberg: + my_models.update( + { + "my_dynamic_iceberg_table.sql": models.DYNAMIC_ICEBERG_TABLE, + } + ) + yield my_models @pytest.fixture(scope="class", autouse=True) def setup(self, project): @@ -28,3 +36,13 @@ def test_dynamic_table_full_refresh(self, project): run_dbt(["run", "--full-refresh"]) assert query_relation_type(project, "my_dynamic_table") == "dynamic_table" assert query_relation_type(project, "my_dynamic_table_downstream") == "dynamic_table" + if self.iceberg: + assert query_relation_type(project, "my_dynamic_iceberg_table") == "dynamic_table" + + +class TestBasicIcebergOn(TestBasic): + iceberg = True + + @pytest.fixture(scope="class") + def project_config_update(self): + return {"flags": {"enable_iceberg_materializations": True}} diff --git a/tests/functional/relation_tests/dynamic_table_tests/test_configuration_changes.py b/tests/functional/relation_tests/dynamic_table_tests/test_configuration_changes.py index 3c4f65a87..f389344e0 100644 --- a/tests/functional/relation_tests/dynamic_table_tests/test_configuration_changes.py +++ b/tests/functional/relation_tests/dynamic_table_tests/test_configuration_changes.py @@ -7,6 +7,7 @@ class Changes: + iceberg: bool = False @pytest.fixture(scope="class", autouse=True) def seeds(self): @@ -14,10 +15,18 @@ def seeds(self): @pytest.fixture(scope="class", autouse=True) def models(self): - yield { + my_models = { "dynamic_table_alter.sql": models.DYNAMIC_TABLE, "dynamic_table_replace.sql": models.DYNAMIC_TABLE, } + if self.iceberg: + my_models.update( + { + "dynamic_table_iceberg_alter.sql": models.DYNAMIC_ICEBERG_TABLE, + "dynamic_table_iceberg_replace.sql": models.DYNAMIC_ICEBERG_TABLE, + } + ) + yield my_models @pytest.fixture(scope="function", autouse=True) def setup_class(self, project): @@ -33,14 +42,23 @@ def setup_method(self, project, setup_class): update_model(project, "dynamic_table_alter", models.DYNAMIC_TABLE_ALTER) update_model(project, "dynamic_table_replace", models.DYNAMIC_TABLE_REPLACE) + if self.iceberg: + update_model( + project, "dynamic_table_iceberg_alter", models.DYNAMIC_ICEBERG_TABLE_ALTER + ) + update_model( + project, "dynamic_table_iceberg_replace", models.DYNAMIC_ICEBERG_TABLE_REPLACE + ) yield update_model(project, "dynamic_table_alter", models.DYNAMIC_TABLE) update_model(project, "dynamic_table_replace", models.DYNAMIC_TABLE) + if self.iceberg: + update_model(project, "dynamic_table_iceberg_alter", models.DYNAMIC_ICEBERG_TABLE) + update_model(project, "dynamic_table_iceberg_replace", models.DYNAMIC_ICEBERG_TABLE) - @staticmethod - def assert_changes_are_applied(project): + def assert_changes_are_applied(self, project): altered = describe_dynamic_table(project, "dynamic_table_alter") assert altered.snowflake_warehouse == "DBT_TESTING" assert altered.target_lag == "5 minutes" # this updated @@ -51,8 +69,18 @@ def assert_changes_are_applied(project): assert replaced.target_lag == "2 minutes" assert replaced.refresh_mode == "FULL" # this updated - @staticmethod - def assert_changes_are_not_applied(project): + if self.iceberg: + altered_iceberg = describe_dynamic_table(project, "dynamic_table_iceberg_alter") + assert altered_iceberg.snowflake_warehouse == "DBT_TESTING" + assert altered_iceberg.target_lag == "5 minutes" # this updated + assert altered_iceberg.refresh_mode == "INCREMENTAL" + + replaced_iceberg = describe_dynamic_table(project, "dynamic_table_iceberg_replace") + assert replaced_iceberg.snowflake_warehouse == "DBT_TESTING" + assert replaced_iceberg.target_lag == "2 minutes" + assert replaced_iceberg.refresh_mode == "FULL" # this updated + + def assert_changes_are_not_applied(self, project): altered = describe_dynamic_table(project, "dynamic_table_alter") assert altered.snowflake_warehouse == "DBT_TESTING" assert altered.target_lag == "2 minutes" # this would have updated, but didn't @@ -63,6 +91,19 @@ def assert_changes_are_not_applied(project): assert replaced.target_lag == "2 minutes" assert replaced.refresh_mode == "INCREMENTAL" # this would have updated, but didn't + if self.iceberg: + altered_iceberg = describe_dynamic_table(project, "dynamic_table_iceberg_alter") + assert altered_iceberg.snowflake_warehouse == "DBT_TESTING" + assert altered_iceberg.target_lag == "2 minutes" # this would have updated, but didn't + assert altered_iceberg.refresh_mode == "INCREMENTAL" + + replaced_iceberg = describe_dynamic_table(project, "dynamic_table_iceberg_replace") + assert replaced_iceberg.snowflake_warehouse == "DBT_TESTING" + assert replaced_iceberg.target_lag == "2 minutes" + assert ( + replaced_iceberg.refresh_mode == "INCREMENTAL" + ) # this would have updated, but didn't + def test_full_refresh_is_always_successful(self, project): # this always passes and always changes the configuration, regardless of on_configuration_change # and regardless of whether the changes require a replace versus an alter @@ -81,6 +122,17 @@ def test_changes_are_applied(self, project): self.assert_changes_are_applied(project) +class TestChangesApplyIcebergOn(TestChangesApply): + iceberg = True + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "models": {"on_configuration_change": "apply"}, + "flags": {"enable_iceberg_materializations": True}, + } + + class TestChangesContinue(Changes): @pytest.fixture(scope="class") def project_config_update(self): @@ -92,6 +144,17 @@ def test_changes_are_not_applied(self, project): self.assert_changes_are_not_applied(project) +class TestChangesContinueIcebergOn(TestChangesContinue): + iceberg = True + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "models": {"on_configuration_change": "continue"}, + "flags": {"enable_iceberg_materializations": True}, + } + + class TestChangesFail(Changes): @pytest.fixture(scope="class") def project_config_update(self): @@ -101,3 +164,14 @@ def test_changes_are_not_applied(self, project): # this fails and does not change the configuration run_dbt(["run"], expect_pass=False) self.assert_changes_are_not_applied(project) + + +class TestChangesFailIcebergOn(TestChangesFail): + iceberg = True + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "models": {"on_configuration_change": "fail"}, + "flags": {"enable_iceberg_materializations": True}, + } diff --git a/tests/functional/relation_tests/models.py b/tests/functional/relation_tests/models.py index 6fe066313..7b0050d11 100644 --- a/tests/functional/relation_tests/models.py +++ b/tests/functional/relation_tests/models.py @@ -31,3 +31,47 @@ ) }} select * from {{ ref('my_seed') }} """ + + +DYNAMIC_ICEBERG_TABLE = """ +{{ config( + materialized='dynamic_table', + snowflake_warehouse='DBT_TESTING', + target_lag='1 minute', + refresh_mode='INCREMENTAL', + table_format="iceberg", + external_volume="s3_iceberg_snow", + base_location_subpath="subpath", +) }} +select * from {{ ref('my_seed') }} +""" + +ICEBERG_TABLE = """ +{{ config( + materialized='table', + table_format="iceberg", + external_volume="s3_iceberg_snow", +) }} +select * from {{ ref('my_seed') }} +""" + +INCREMENTAL_ICEBERG_TABLE = """ +{{ config( + materialized='incremental', + table_format='iceberg', + incremental_strategy='append', + unique_key="id", + external_volume = "s3_iceberg_snow", +) }} +select * from {{ ref('my_seed') }} +""" + + +INCREMENTAL_TABLE = """ +{{ config( + materialized='incremental', + incremental_strategy='append', + unique_key="id", +) }} +select * from {{ ref('my_seed') }} +""" diff --git a/tests/functional/relation_tests/test_relation_type_change.py b/tests/functional/relation_tests/test_relation_type_change.py index 1246b0791..1024a92ca 100644 --- a/tests/functional/relation_tests/test_relation_type_change.py +++ b/tests/functional/relation_tests/test_relation_type_change.py @@ -1,21 +1,36 @@ from dataclasses import dataclass from itertools import product +from typing import Optional from dbt.tests.util import run_dbt import pytest from tests.functional.relation_tests import models -from tests.functional.utils import query_relation_type, update_model +from tests.functional.utils import describe_dynamic_table, query_relation_type, update_model @dataclass class Model: model: str relation_type: str + table_format: Optional[str] = "default" + is_incremental: Optional[bool] = False @property def name(self) -> str: - return f"{self.relation_type}" + if self.is_incremental: + name = f"{self.relation_type}_{self.table_format}_incremental" + else: + name = f"{self.relation_type}_{self.table_format}" + return name + + @property + def is_iceberg(self) -> bool: + return self.table_format == "iceberg" + + @property + def is_standard_table(self) -> bool: + return self.relation_type == "table" and not self.is_incremental @dataclass @@ -31,16 +46,47 @@ def name(self) -> str: def error_message(self) -> str: return f"Failed when migrating from: {self.initial.name} to: {self.final.name}" + @property + def uses_iceberg(self) -> bool: + return any([self.initial.is_iceberg, self.final.is_iceberg]) + relations = [ Model(models.VIEW, "view"), - Model(models.TABLE, "table"), - Model(models.DYNAMIC_TABLE, "dynamic_table"), + Model(models.TABLE, "table", "default"), + Model(models.INCREMENTAL_TABLE, "table", "default", is_incremental=True), + Model(models.DYNAMIC_TABLE, "dynamic_table", "default"), + Model(models.ICEBERG_TABLE, "table", "iceberg"), + Model(models.INCREMENTAL_ICEBERG_TABLE, "table", "iceberg", is_incremental=True), + Model(models.DYNAMIC_ICEBERG_TABLE, "dynamic_table", "iceberg"), ] scenarios = [Scenario(*scenario) for scenario in product(relations, relations)] +def requires_full_refresh(scenario) -> bool: + return any( + [ + # we can only swap incremental to table and back if both are iceberg + scenario.initial.is_incremental + and scenario.final.is_standard_table + and scenario.initial.table_format != scenario.final.table_format, + scenario.initial.is_standard_table + and scenario.final.is_incremental + and scenario.initial.table_format != scenario.final.table_format, + # we can't swap from an incremental to a dynamic table because the materialization does not handle this case + scenario.initial.relation_type == "dynamic_table" and scenario.final.is_incremental, + ] + ) + + class TestRelationTypeChange: + @pytest.fixture(scope="class") + def project_config_update(self): + return {"flags": {"enable_iceberg_materializations": False}} + + @staticmethod + def include(scenario) -> bool: + return not scenario.uses_iceberg and not requires_full_refresh(scenario) @pytest.fixture(scope="class", autouse=True) def seeds(self): @@ -48,17 +94,68 @@ def seeds(self): @pytest.fixture(scope="class", autouse=True) def models(self): - yield {f"{scenario.name}.sql": scenario.initial.model for scenario in scenarios} + yield { + f"{scenario.name}.sql": scenario.initial.model + for scenario in scenarios + if self.include(scenario) + } @pytest.fixture(scope="class", autouse=True) def setup(self, project): run_dbt(["seed"]) run_dbt(["run"]) for scenario in scenarios: - update_model(project, scenario.name, scenario.final.model) - run_dbt(["run"]) + if self.include(scenario): + update_model(project, scenario.name, scenario.final.model) + # allow for dbt to fail so that we can see which scenarios pass and which scenarios fail + try: + run_dbt(["run"], expect_pass=False) + except Exception: + pass @pytest.mark.parametrize("scenario", scenarios, ids=[scenario.name for scenario in scenarios]) def test_replace(self, project, scenario): - relation_type = query_relation_type(project, scenario.name) - assert relation_type == scenario.final.relation_type, scenario.error_message + if self.include(scenario): + relation_type = query_relation_type(project, scenario.name) + assert relation_type == scenario.final.relation_type, scenario.error_message + if relation_type == "dynamic_table": + dynamic_table = describe_dynamic_table(project, scenario.name) + assert dynamic_table.catalog.table_format == scenario.final.table_format + else: + pytest.skip() + + +class TestRelationTypeChangeFullRefreshRequired(TestRelationTypeChange): + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "flags": {"enable_iceberg_materializations": False}, + "models": {"full_refresh": True}, + } + + @staticmethod + def include(scenario) -> bool: + return not scenario.uses_iceberg and requires_full_refresh(scenario) + + +class TestRelationTypeChangeIcebergOn(TestRelationTypeChange): + @pytest.fixture(scope="class") + def project_config_update(self): + return {"flags": {"enable_iceberg_materializations": True}} + + @staticmethod + def include(scenario) -> bool: + return scenario.uses_iceberg and not requires_full_refresh(scenario) + + +class TestRelationTypeChangeIcebergOnFullRefreshRequired(TestRelationTypeChange): + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "flags": {"enable_iceberg_materializations": True}, + "models": {"full_refresh": True}, + } + + @staticmethod + def include(scenario) -> bool: + return scenario.uses_iceberg and requires_full_refresh(scenario) diff --git a/tox.ini b/tox.ini index d6f040a61..f6952efaf 100644 --- a/tox.ini +++ b/tox.ini @@ -1,8 +1,8 @@ [tox] skipsdist = True -envlist = py38,py39,py310,py311,py312 +envlist = py39,py310,py311,py312 -[testenv:{unit,py38,py39,py310,py311,py312,py}] +[testenv:{unit,py39,py310,py311,py312,py}] description = unit testing skip_install = true passenv = @@ -13,7 +13,7 @@ deps = -rdev-requirements.txt -e. -[testenv:{integration,py38,py39,py310,py311,py312,py}-{snowflake}] +[testenv:{integration,py39,py310,py311,py312,py}-{snowflake}] description = adapter plugin integration testing skip_install = true passenv =