diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 43e4f539b..0fa84469a 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.8.0a1 +current_version = 1.9.0a1 parse = (?P[\d]+) # major version number \.(?P[\d]+) # minor version number \.(?P[\d]+) # patch version number @@ -32,6 +32,4 @@ first_value = 1 [bumpversion:part:nightly] -[bumpversion:file:setup.py] - [bumpversion:file:dbt/adapters/snowflake/__version__.py] diff --git a/.changes/unreleased/Dependencies-20230919-120616.yaml b/.changes/unreleased/Dependencies-20230919-120616.yaml deleted file mode 100644 index 8d9d45681..000000000 --- a/.changes/unreleased/Dependencies-20230919-120616.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: "Dependencies" -body: "Update ddtrace requirement from ~=1.18 to ~=1.19" -time: 2023-09-19T12:06:17.00000Z -custom: - Author: dependabot[bot] - PR: 780 diff --git a/.changes/unreleased/Dependencies-20231009-122807.yaml b/.changes/unreleased/Dependencies-20231009-122807.yaml deleted file mode 100644 index c133a1d4e..000000000 --- a/.changes/unreleased/Dependencies-20231009-122807.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: "Dependencies" -body: "Update pre-commit-hooks requirement from ~=4.4 to ~=4.5" -time: 2023-10-09T12:28:07.00000Z -custom: - Author: dependabot[bot] - PR: 795 diff --git a/.changes/unreleased/Dependencies-20231011-124519.yaml b/.changes/unreleased/Dependencies-20231011-124519.yaml deleted file mode 100644 index 4c3df6550..000000000 --- a/.changes/unreleased/Dependencies-20231011-124519.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: "Dependencies" -body: "Bump mypy from 1.5.1 to 1.6.0" -time: 2023-10-11T12:45:19.00000Z -custom: - Author: dependabot[bot] - PR: 799 diff --git a/.changes/unreleased/Dependencies-20231016-121821.yaml b/.changes/unreleased/Dependencies-20231016-121821.yaml deleted file mode 100644 index 80028d3f2..000000000 --- a/.changes/unreleased/Dependencies-20231016-121821.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: "Dependencies" -body: "Update pre-commit requirement from ~=3.4 to ~=3.5" -time: 2023-10-16T12:18:21.00000Z -custom: - Author: dependabot[bot] - PR: 807 diff --git a/.changes/unreleased/Dependencies-20231018-123921.yaml b/.changes/unreleased/Dependencies-20231018-123921.yaml deleted file mode 100644 index 03cbb3faa..000000000 --- a/.changes/unreleased/Dependencies-20231018-123921.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: "Dependencies" -body: "Update black requirement from ~=23.9 to ~=23.10" -time: 2023-10-18T12:39:21.00000Z -custom: - Author: dependabot[bot] - PR: 809 diff --git a/.changes/unreleased/Dependencies-20231031-120931.yaml b/.changes/unreleased/Dependencies-20231031-120931.yaml deleted file mode 100644 index e7f08d021..000000000 --- a/.changes/unreleased/Dependencies-20231031-120931.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: "Dependencies" -body: "Bump mypy from 1.6.0 to 1.6.1" -time: 2023-10-31T12:09:31.00000Z -custom: - Author: dependabot[bot] - PR: 821 diff --git a/.changes/unreleased/Dependencies-20231031-121003.yaml b/.changes/unreleased/Dependencies-20231031-121003.yaml deleted file mode 100644 index ac89c2486..000000000 --- a/.changes/unreleased/Dependencies-20231031-121003.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: "Dependencies" -body: "Update ddtrace requirement from ~=1.19 to ~=2.1" -time: 2023-10-31T12:10:03.00000Z -custom: - Author: dependabot[bot] - PR: 822 diff --git a/.changes/unreleased/Dependencies-20231108-121743.yaml b/.changes/unreleased/Dependencies-20231108-121743.yaml deleted file mode 100644 index e6f418cfa..000000000 --- a/.changes/unreleased/Dependencies-20231108-121743.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: "Dependencies" -body: "Update black requirement from ~=23.10 to ~=23.11" -time: 2023-11-08T12:17:43.00000Z -custom: - Author: dependabot[bot] - PR: 826 diff --git a/.changes/unreleased/Dependencies-20231113-122906.yaml b/.changes/unreleased/Dependencies-20231113-122906.yaml deleted file mode 100644 index 718a074f4..000000000 --- a/.changes/unreleased/Dependencies-20231113-122906.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: "Dependencies" -body: "Bump mypy from 1.6.1 to 1.7.0" -time: 2023-11-13T12:29:06.00000Z -custom: - Author: dependabot[bot] - PR: 828 diff --git a/.changes/unreleased/Dependencies-20231113-122910.yaml b/.changes/unreleased/Dependencies-20231113-122910.yaml deleted file mode 100644 index 065043fc7..000000000 --- a/.changes/unreleased/Dependencies-20231113-122910.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: "Dependencies" -body: "Update pytest-xdist requirement from ~=3.3 to ~=3.4" -time: 2023-11-13T12:29:10.00000Z -custom: - Author: dependabot[bot] - PR: 829 diff --git a/.changes/unreleased/Dependencies-20231116-125757.yaml b/.changes/unreleased/Dependencies-20231116-125757.yaml deleted file mode 100644 index 26b6357e7..000000000 --- a/.changes/unreleased/Dependencies-20231116-125757.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: "Dependencies" -body: "Update ddtrace requirement from ~=2.1 to ~=2.2" -time: 2023-11-16T12:57:57.00000Z -custom: - Author: dependabot[bot] - PR: 832 diff --git a/.changes/unreleased/Dependencies-20231127-120651.yaml b/.changes/unreleased/Dependencies-20231127-120651.yaml deleted file mode 100644 index 706233474..000000000 --- a/.changes/unreleased/Dependencies-20231127-120651.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: "Dependencies" -body: "Update wheel requirement from ~=0.41 to ~=0.42" -time: 2023-11-27T12:06:51.00000Z -custom: - Author: dependabot[bot] - PR: 844 diff --git a/.changes/unreleased/Dependencies-20231128-120535.yaml b/.changes/unreleased/Dependencies-20231128-120535.yaml deleted file mode 100644 index 1a9ce74aa..000000000 --- a/.changes/unreleased/Dependencies-20231128-120535.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: "Dependencies" -body: "Update ddtrace requirement from ~=2.2 to ~=2.3" -time: 2023-11-28T12:05:35.00000Z -custom: - Author: dependabot[bot] - PR: 848 diff --git a/.changes/unreleased/Dependencies-20231128-120543.yaml b/.changes/unreleased/Dependencies-20231128-120543.yaml deleted file mode 100644 index 704a9949d..000000000 --- a/.changes/unreleased/Dependencies-20231128-120543.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: "Dependencies" -body: "Update pytest-xdist requirement from ~=3.4 to ~=3.5" -time: 2023-11-28T12:05:43.00000Z -custom: - Author: dependabot[bot] - PR: 849 diff --git a/.changes/unreleased/Dependencies-20231128-120548.yaml b/.changes/unreleased/Dependencies-20231128-120548.yaml deleted file mode 100644 index a2d5574ef..000000000 --- a/.changes/unreleased/Dependencies-20231128-120548.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: "Dependencies" -body: "Bump mypy from 1.7.0 to 1.7.1" -time: 2023-11-28T12:05:48.00000Z -custom: - Author: dependabot[bot] - PR: 850 diff --git a/.changes/unreleased/Dependencies-20231204-120306.yaml b/.changes/unreleased/Dependencies-20231204-120306.yaml deleted file mode 100644 index cbc90b9eb..000000000 --- a/.changes/unreleased/Dependencies-20231204-120306.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: "Dependencies" -body: "Update freezegun requirement from ~=1.2 to ~=1.3" -time: 2023-12-04T12:03:06.00000Z -custom: - Author: dependabot[bot] - PR: 860 diff --git a/.changes/unreleased/Dependencies-20231212-121926.yaml b/.changes/unreleased/Dependencies-20231212-121926.yaml deleted file mode 100644 index 16124f097..000000000 --- a/.changes/unreleased/Dependencies-20231212-121926.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: "Dependencies" -body: "Update black requirement from ~=23.11 to ~=23.12" -time: 2023-12-12T12:19:26.00000Z -custom: - Author: dependabot[bot] - PR: 864 diff --git a/.changes/unreleased/Dependencies-20231219-125152.yaml b/.changes/unreleased/Dependencies-20231219-125152.yaml new file mode 100644 index 000000000..2d730daf1 --- /dev/null +++ b/.changes/unreleased/Dependencies-20231219-125152.yaml @@ -0,0 +1,6 @@ +kind: "Dependencies" +body: "Update freezegun requirement from ~=1.3 to ~=1.4" +time: 2023-12-19T12:51:52.00000Z +custom: + Author: dependabot[bot] + PR: 869 diff --git a/.changes/unreleased/Dependencies-20240412-155921.yaml b/.changes/unreleased/Dependencies-20240412-155921.yaml new file mode 100644 index 000000000..f83e5b404 --- /dev/null +++ b/.changes/unreleased/Dependencies-20240412-155921.yaml @@ -0,0 +1,6 @@ +kind: "Dependencies" +body: "Bump actions/upload-artifact from 3 to 4" +time: 2024-04-12T15:59:21.00000Z +custom: + Author: dependabot[bot] + PR: 971 diff --git a/.changes/unreleased/Dependencies-20240429-124038.yaml b/.changes/unreleased/Dependencies-20240429-124038.yaml new file mode 100644 index 000000000..5fa954c8a --- /dev/null +++ b/.changes/unreleased/Dependencies-20240429-124038.yaml @@ -0,0 +1,6 @@ +kind: "Dependencies" +body: "Bump dbt-labs/actions from 1.1.0 to 1.1.1" +time: 2024-04-29T12:40:38.00000Z +custom: + Author: dependabot[bot] + PR: 1006 diff --git a/.changes/unreleased/Dependencies-20240429-124044.yaml b/.changes/unreleased/Dependencies-20240429-124044.yaml new file mode 100644 index 000000000..834fce096 --- /dev/null +++ b/.changes/unreleased/Dependencies-20240429-124044.yaml @@ -0,0 +1,6 @@ +kind: "Dependencies" +body: "Bump actions/download-artifact from 3 to 4" +time: 2024-04-29T12:40:44.00000Z +custom: + Author: dependabot[bot] + PR: 1007 diff --git a/.changes/unreleased/Dependencies-20240624-122538.yaml b/.changes/unreleased/Dependencies-20240624-122538.yaml new file mode 100644 index 000000000..e47731aef --- /dev/null +++ b/.changes/unreleased/Dependencies-20240624-122538.yaml @@ -0,0 +1,6 @@ +kind: "Dependencies" +body: "Bump aurelien-baudet/workflow-dispatch from 2 to 4" +time: 2024-06-24T12:25:38.00000Z +custom: + Author: dependabot[bot] + PR: 1093 diff --git a/.changes/unreleased/Dependencies-20240718-120848.yaml b/.changes/unreleased/Dependencies-20240718-120848.yaml new file mode 100644 index 000000000..c46a30eba --- /dev/null +++ b/.changes/unreleased/Dependencies-20240718-120848.yaml @@ -0,0 +1,6 @@ +kind: "Dependencies" +body: "Update twine requirement from ~=4.0 to ~=5.1" +time: 2024-07-18T12:08:48.00000Z +custom: + Author: dependabot[bot] + PR: 1120 diff --git a/.changes/unreleased/Dependencies-20240718-120849.yaml b/.changes/unreleased/Dependencies-20240718-120849.yaml new file mode 100644 index 000000000..df248ff7d --- /dev/null +++ b/.changes/unreleased/Dependencies-20240718-120849.yaml @@ -0,0 +1,6 @@ +kind: "Dependencies" +body: "Bump pre-commit from 3.7.0 to 3.7.1" +time: 2024-07-18T12:08:49.00000Z +custom: + Author: dependabot[bot] + PR: 1119 diff --git a/.changes/unreleased/Dependencies-20240718-120852.yaml b/.changes/unreleased/Dependencies-20240718-120852.yaml new file mode 100644 index 000000000..40c171f93 --- /dev/null +++ b/.changes/unreleased/Dependencies-20240718-120852.yaml @@ -0,0 +1,6 @@ +kind: "Dependencies" +body: "Update wheel requirement from ~=0.42 to ~=0.43" +time: 2024-07-18T12:08:52.00000Z +custom: + Author: dependabot[bot] + PR: 1121 diff --git a/.changes/unreleased/Dependencies-20240718-120857.yaml b/.changes/unreleased/Dependencies-20240718-120857.yaml new file mode 100644 index 000000000..e4bfe04d0 --- /dev/null +++ b/.changes/unreleased/Dependencies-20240718-120857.yaml @@ -0,0 +1,6 @@ +kind: "Dependencies" +body: "Update pytest-xdist requirement from ~=3.5 to ~=3.6" +time: 2024-07-18T12:08:57.00000Z +custom: + Author: dependabot[bot] + PR: 1122 diff --git a/.changes/unreleased/Dependencies-20240719-120828.yaml b/.changes/unreleased/Dependencies-20240719-120828.yaml new file mode 100644 index 000000000..ea7af843c --- /dev/null +++ b/.changes/unreleased/Dependencies-20240719-120828.yaml @@ -0,0 +1,6 @@ +kind: "Dependencies" +body: "Update tox requirement from ~=4.11 to ~=4.16" +time: 2024-07-19T12:08:28.00000Z +custom: + Author: dependabot[bot] + PR: 1135 diff --git a/.changes/unreleased/Features-20240109-165520.yaml b/.changes/unreleased/Features-20240109-165520.yaml deleted file mode 100644 index b38770760..000000000 --- a/.changes/unreleased/Features-20240109-165520.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Features -body: Update base adapter references as part of decoupling migration -time: 2024-01-09T16:55:20.859657-06:00 -custom: - Author: McKnight-42 - Issue: "882" diff --git a/.changes/unreleased/Features-20240131-125318.yaml b/.changes/unreleased/Features-20240131-125318.yaml new file mode 100644 index 000000000..63771d71e --- /dev/null +++ b/.changes/unreleased/Features-20240131-125318.yaml @@ -0,0 +1,6 @@ +kind: Features +body: Support refresh_mode and initialize parameters for dynamic tables +time: 2024-01-31T12:53:18.111616Z +custom: + Author: HenkvanDyk,mikealfare + Issue: "1076" diff --git a/.changes/unreleased/Features-20240205-174816.yaml b/.changes/unreleased/Features-20240205-174816.yaml deleted file mode 100644 index 5cf6d41f2..000000000 --- a/.changes/unreleased/Features-20240205-174816.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Features -body: Support primative types + object, variant, array in snowflake for unit testing" -time: 2024-02-05T17:48:16.118398-05:00 -custom: - Author: michelleark - Issue: "898" diff --git a/.changes/unreleased/Features-20240430-185714.yaml b/.changes/unreleased/Features-20240430-185714.yaml new file mode 100644 index 000000000..9fd1e97ea --- /dev/null +++ b/.changes/unreleased/Features-20240430-185714.yaml @@ -0,0 +1,6 @@ +kind: Features +body: Add tests for cross-database `cast` macro +time: 2024-04-30T18:57:14.753057-06:00 +custom: + Author: dbeatty10 + Issue: "1009" diff --git a/.changes/unreleased/Features-20240501-151901.yaml b/.changes/unreleased/Features-20240501-151901.yaml new file mode 100644 index 000000000..0f792c40e --- /dev/null +++ b/.changes/unreleased/Features-20240501-151901.yaml @@ -0,0 +1,6 @@ +kind: Features +body: Cross-database `date` macro +time: 2024-05-01T15:19:01.141157-06:00 +custom: + Author: dbeatty10 + Issue: 1013 diff --git a/.changes/unreleased/Features-20240604-154856.yaml b/.changes/unreleased/Features-20240604-154856.yaml new file mode 100644 index 000000000..7d83b1da7 --- /dev/null +++ b/.changes/unreleased/Features-20240604-154856.yaml @@ -0,0 +1,6 @@ +kind: Features +body: Replace underscores with hyphens in account IDs to prevent SSL issues +time: 2024-06-04T15:48:56.845374-07:00 +custom: + Author: colin-rogers-dbt + Issue: "1068" diff --git a/.changes/unreleased/Features-20240610-171026.yaml b/.changes/unreleased/Features-20240610-171026.yaml new file mode 100644 index 000000000..5cc055160 --- /dev/null +++ b/.changes/unreleased/Features-20240610-171026.yaml @@ -0,0 +1,6 @@ +kind: Features +body: Support JWT Authentication +time: 2024-06-10T17:10:26.421463-04:00 +custom: + Author: llam15 + Issue: 1079 726 diff --git a/.changes/unreleased/Features-20240709-194316.yaml b/.changes/unreleased/Features-20240709-194316.yaml new file mode 100644 index 000000000..a867387e3 --- /dev/null +++ b/.changes/unreleased/Features-20240709-194316.yaml @@ -0,0 +1,6 @@ +kind: Features +body: Improve run times for large projects by reusing connections by default +time: 2024-07-09T19:43:16.489649-04:00 +custom: + Author: mikealfare amardatar + Issue: "1082" diff --git a/.changes/unreleased/Features-20240710-172345.yaml b/.changes/unreleased/Features-20240710-172345.yaml new file mode 100644 index 000000000..e68f63812 --- /dev/null +++ b/.changes/unreleased/Features-20240710-172345.yaml @@ -0,0 +1,6 @@ +kind: Features +body: Improve run times when using key pair auth by caching the private key +time: 2024-07-10T17:23:45.046905-04:00 +custom: + Author: mikealfare aranke + Issue: "1082" diff --git a/.changes/unreleased/Fixes-20231030-212151.yaml b/.changes/unreleased/Fixes-20231030-212151.yaml deleted file mode 100644 index 10939228f..000000000 --- a/.changes/unreleased/Fixes-20231030-212151.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Dynamic tables now show the proper type in catalog queries -time: 2023-10-30T21:21:51.220225-04:00 -custom: - Author: mikealfare - Issue: "817" diff --git a/.changes/unreleased/Fixes-20240216-125014.yaml b/.changes/unreleased/Fixes-20240216-125014.yaml deleted file mode 100644 index b2d4ea163..000000000 --- a/.changes/unreleased/Fixes-20240216-125014.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: remove `token` field from connection keys -time: 2024-02-16T12:50:14.459161-08:00 -custom: - Author: colin-rogers-dbt - Issue: "906" diff --git a/.changes/unreleased/Fixes-20240516-174337.yaml b/.changes/unreleased/Fixes-20240516-174337.yaml new file mode 100644 index 000000000..955d90ed3 --- /dev/null +++ b/.changes/unreleased/Fixes-20240516-174337.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: Update relation caching to correctly identify dynamic tables, accounting for Snowflake's `2024_03` bundle +time: 2024-05-16T17:43:37.336858-04:00 +custom: + Author: mikealfare + Issue: "1016" diff --git a/.changes/unreleased/Fixes-20240516-224134.yaml b/.changes/unreleased/Fixes-20240516-224134.yaml new file mode 100644 index 000000000..011ecb449 --- /dev/null +++ b/.changes/unreleased/Fixes-20240516-224134.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: Get catalog metadata for a single relation in the most optimized way using the get_catalog_for_single_relation macro and capability +time: 2024-05-16T22:41:34.256095+01:00 +custom: + Author: aranke + Issue: "1048" diff --git a/.changes/unreleased/Fixes-20240522-160538.yaml b/.changes/unreleased/Fixes-20240522-160538.yaml new file mode 100644 index 000000000..4921706a9 --- /dev/null +++ b/.changes/unreleased/Fixes-20240522-160538.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: 'Rename targets for tables and views use fully qualified names' +time: 2024-05-22T16:05:38.602074-04:00 +custom: + Author: mikealfare + Issue: "1031" diff --git a/.changes/unreleased/Fixes-20240605-125611.yaml b/.changes/unreleased/Fixes-20240605-125611.yaml new file mode 100644 index 000000000..c4560774c --- /dev/null +++ b/.changes/unreleased/Fixes-20240605-125611.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: Surface SSO token expiration in logs +time: 2024-06-05T12:56:11.802237-04:00 +custom: + Author: mikealfare, McKnight-42 + Issue: "851" diff --git a/.changes/unreleased/Fixes-20240607-102708.yaml b/.changes/unreleased/Fixes-20240607-102708.yaml new file mode 100644 index 000000000..58cd9bbee --- /dev/null +++ b/.changes/unreleased/Fixes-20240607-102708.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: return to previous naming convention to return to quoting policy +time: 2024-06-07T10:27:08.542159-05:00 +custom: + Author: McKnight-42 + Issue: "1074" diff --git a/.changes/unreleased/Fixes-20240628-190140.yaml b/.changes/unreleased/Fixes-20240628-190140.yaml new file mode 100644 index 000000000..c58b465fd --- /dev/null +++ b/.changes/unreleased/Fixes-20240628-190140.yaml @@ -0,0 +1,7 @@ +kind: Fixes +body: Fix scenario where using the `--empty` flag causes metadata queries to contain + limit clauses +time: 2024-06-28T19:01:40.558234-04:00 +custom: + Author: mikealfare + Issue: "1033" diff --git a/.changes/unreleased/Fixes-20240705-165932.yaml b/.changes/unreleased/Fixes-20240705-165932.yaml new file mode 100644 index 000000000..ffe902c92 --- /dev/null +++ b/.changes/unreleased/Fixes-20240705-165932.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: Use show ... starts with instead of show ... like in _show_object_metadata +time: 2024-07-05T16:59:32.087555+01:00 +custom: + Author: aranke + Issue: "1102" diff --git a/.changes/unreleased/Security-20231128-173716.yaml b/.changes/unreleased/Security-20231128-173716.yaml deleted file mode 100644 index 18d48ad9c..000000000 --- a/.changes/unreleased/Security-20231128-173716.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Security -body: DDOS vulnerability in 41.0.5. Uptick to avoid -time: 2023-11-28T17:37:16.11557-08:00 -custom: - Author: versusfacit - PR: "852" diff --git a/.changes/unreleased/Under the Hood-20231119-122853.yaml b/.changes/unreleased/Under the Hood-20231119-122853.yaml deleted file mode 100644 index 06f8d5e4b..000000000 --- a/.changes/unreleased/Under the Hood-20231119-122853.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Add tests for --empty flag -time: 2023-11-19T12:28:53.795687-05:00 -custom: - Author: michelleark - Issue: "837" diff --git a/.changes/unreleased/Under the Hood-20240117-112026.yaml b/.changes/unreleased/Under the Hood-20240117-112026.yaml deleted file mode 100644 index edf040221..000000000 --- a/.changes/unreleased/Under the Hood-20240117-112026.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Migrate to dbt-common and dbt-adapters package -time: 2024-01-17T11:20:26.713584-06:00 -custom: - Author: McKnight-42 - Issue: "890" diff --git a/.changes/unreleased/Under the Hood-20240327-001304.yaml b/.changes/unreleased/Under the Hood-20240327-001304.yaml new file mode 100644 index 000000000..3e823ec86 --- /dev/null +++ b/.changes/unreleased/Under the Hood-20240327-001304.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: Lazy load agate +time: 2024-03-27T00:13:04.246062-04:00 +custom: + Author: dwreeves + Issue: "953" diff --git a/.changes/unreleased/Under the Hood-20240425-144556.yaml b/.changes/unreleased/Under the Hood-20240425-144556.yaml new file mode 100644 index 000000000..002da3c1f --- /dev/null +++ b/.changes/unreleased/Under the Hood-20240425-144556.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: Speedup catalog string comparison by using ilike before equals +time: 2024-04-25T14:45:56.549787+02:00 +custom: + Author: aranke + Issue: '1035' diff --git a/.changes/unreleased/Under the Hood-20240517-143743.yaml b/.changes/unreleased/Under the Hood-20240517-143743.yaml new file mode 100644 index 000000000..598c60ad4 --- /dev/null +++ b/.changes/unreleased/Under the Hood-20240517-143743.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: Improve memory efficiency of the process_results() override. +time: 2024-05-17T14:37:43.7414-04:00 +custom: + Author: peterallenwebb + Issue: "1053" diff --git a/.changes/unreleased/Under the Hood-20240614-170858.yaml b/.changes/unreleased/Under the Hood-20240614-170858.yaml new file mode 100644 index 000000000..cc806726b --- /dev/null +++ b/.changes/unreleased/Under the Hood-20240614-170858.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: Automate all manual integration tests for Dynamic Tables +time: 2024-06-14T17:08:58.231472-04:00 +custom: + Author: mikealfare + Issue: "1084" diff --git a/.changes/unreleased/Under the Hood-20240716-174655.yaml b/.changes/unreleased/Under the Hood-20240716-174655.yaml new file mode 100644 index 000000000..14c3c8d76 --- /dev/null +++ b/.changes/unreleased/Under the Hood-20240716-174655.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: Add support for experimental record/replay testing. +time: 2024-07-16T17:46:55.11204-04:00 +custom: + Author: peterallenwebb + Issue: "1106" diff --git a/.changes/unreleased/Under the Hood-20240719-125618.yaml b/.changes/unreleased/Under the Hood-20240719-125618.yaml new file mode 100644 index 000000000..3d90b732c --- /dev/null +++ b/.changes/unreleased/Under the Hood-20240719-125618.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: Remove `freezegun` as a testing dependency; this package is no longer used +time: 2024-07-19T12:56:18.957049-04:00 +custom: + Author: mikealfare + Issue: "1136" diff --git a/.changes/unreleased/Under the Hood-20240722-143114.yaml b/.changes/unreleased/Under the Hood-20240722-143114.yaml new file mode 100644 index 000000000..dc5c2dbb1 --- /dev/null +++ b/.changes/unreleased/Under the Hood-20240722-143114.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: Add support for Python 3.12 +time: 2024-07-22T14:31:14.024865-07:00 +custom: + Author: versusfacit + Issue: "903" diff --git a/.changes/unreleased/Under the Hood-20240806-215935.yaml b/.changes/unreleased/Under the Hood-20240806-215935.yaml new file mode 100644 index 000000000..660918350 --- /dev/null +++ b/.changes/unreleased/Under the Hood-20240806-215935.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: Isolating distribution testing +time: 2024-08-06T21:59:35.284641-04:00 +custom: + Author: leahwicz + Issue: "1130" diff --git a/.flake8 b/.flake8 deleted file mode 100644 index b08ffcd53..000000000 --- a/.flake8 +++ /dev/null @@ -1,16 +0,0 @@ -[flake8] -select = - E - W - F -ignore = - # makes Flake8 work like black - W503, - W504, - # makes Flake8 work like black - E203, - E741, - E501, -exclude = test -per-file-ignores = - */__init__.py: F401 diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index f6283d123..02ed72d45 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1,3 +1,3 @@ # This codeowners file is used to ensure all PRs require reviews from the adapters team -* @dbt-labs/core-adapters +* @dbt-labs/adapters diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 2a6f34492..746dcae22 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -1,8 +1,29 @@ version: 2 updates: - # python dependencies - package-ecosystem: "pip" directory: "/" schedule: interval: "daily" rebase-strategy: "disabled" + ignore: + - dependency-name: "*" + update-types: + - version-update:semver-patch + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "weekly" + rebase-strategy: "disabled" + ignore: + - dependency-name: "*" + update-types: + - version-update:semver-patch + - package-ecosystem: "docker" + directory: "/docker" + schedule: + interval: "weekly" + rebase-strategy: "disabled" + ignore: + - dependency-name: "*" + update-types: + - version-update:semver-patch diff --git a/.github/scripts/integration-test-matrix.js b/.github/scripts/integration-test-matrix.js index 5cc39722e..81386c54e 100644 --- a/.github/scripts/integration-test-matrix.js +++ b/.github/scripts/integration-test-matrix.js @@ -1,6 +1,6 @@ module.exports = ({ context }) => { const defaultPythonVersion = "3.8"; - const supportedPythonVersions = ["3.8", "3.9", "3.10", "3.11"]; + const supportedPythonVersions = ["3.8", "3.9", "3.10", "3.11", "3.12"]; const supportedAdapters = ["snowflake"]; // if PR, generate matrix based on files changed and PR labels @@ -44,7 +44,7 @@ module.exports = ({ context }) => { if (labels.includes("test macos") || testAllLabel) { include.push({ - os: "macos-latest", + os: "macos-12", adapter, "python-version": pythonVersion, }); @@ -78,7 +78,7 @@ module.exports = ({ context }) => { // additionally include runs for all adapters, on macos and windows, // but only for the default python version for (const adapter of supportedAdapters) { - for (const operatingSystem of ["windows-latest", "macos-latest"]) { + for (const operatingSystem of ["windows-latest", "macos-12"]) { include.push({ os: operatingSystem, adapter: adapter, diff --git a/.github/scripts/update_dbt_core_branch.sh b/.github/scripts/update_dbt_core_branch.sh deleted file mode 100755 index d28a40c35..000000000 --- a/.github/scripts/update_dbt_core_branch.sh +++ /dev/null @@ -1,20 +0,0 @@ -#!/bin/bash -e -set -e - -git_branch=$1 -target_req_file="dev-requirements.txt" -core_req_sed_pattern="s|dbt-core.git.*#egg=dbt-core|dbt-core.git@${git_branch}#egg=dbt-core|g" -postgres_req_sed_pattern="s|dbt-core.git.*#egg=dbt-postgres|dbt-core.git@${git_branch}#egg=dbt-postgres|g" -tests_req_sed_pattern="s|dbt-core.git.*#egg=dbt-tests|dbt-core.git@${git_branch}#egg=dbt-tests|g" -if [[ "$OSTYPE" == darwin* ]]; then - # mac ships with a different version of sed that requires a delimiter arg - sed -i "" "$core_req_sed_pattern" $target_req_file - sed -i "" "$postgres_req_sed_pattern" $target_req_file - sed -i "" "$tests_req_sed_pattern" $target_req_file -else - sed -i "$core_req_sed_pattern" $target_req_file - sed -i "$postgres_req_sed_pattern" $target_req_file - sed -i "$tests_req_sed_pattern" $target_req_file -fi -core_version=$(curl "https://raw.githubusercontent.com/dbt-labs/dbt-core/${git_branch}/core/dbt/version.py" | grep "__version__ = *"|cut -d'=' -f2) -bumpversion --allow-dirty --new-version "$core_version" major diff --git a/.github/scripts/update_dev_dependency_branches.sh b/.github/scripts/update_dev_dependency_branches.sh new file mode 100755 index 000000000..022df6a8a --- /dev/null +++ b/.github/scripts/update_dev_dependency_branches.sh @@ -0,0 +1,21 @@ +#!/bin/bash -e +set -e + + +dbt_adapters_branch=$1 +dbt_core_branch=$2 +dbt_common_branch=$3 +target_req_file="dev-requirements.txt" +core_req_sed_pattern="s|dbt-core.git.*#egg=dbt-core|dbt-core.git@${dbt_core_branch}#egg=dbt-core|g" +adapters_req_sed_pattern="s|dbt-adapters.git|dbt-adapters.git@${dbt_adapters_branch}|g" +common_req_sed_pattern="s|dbt-common.git|dbt-common.git@${dbt_common_branch}|g" +if [[ "$OSTYPE" == darwin* ]]; then + # mac ships with a different version of sed that requires a delimiter arg + sed -i "" "$adapters_req_sed_pattern" $target_req_file + sed -i "" "$core_req_sed_pattern" $target_req_file + sed -i "" "$common_req_sed_pattern" $target_req_file +else + sed -i "$adapters_req_sed_pattern" $target_req_file + sed -i "$core_req_sed_pattern" $target_req_file + sed -i "$common_req_sed_pattern" $target_req_file +fi diff --git a/.github/workflows/docs-issues.yml b/.github/workflows/docs-issues.yml index 00a098df8..f49cf517c 100644 --- a/.github/workflows/docs-issues.yml +++ b/.github/workflows/docs-issues.yml @@ -1,19 +1,18 @@ # **what?** -# Open an issue in docs.getdbt.com when a PR is labeled `user docs` +# Open an issue in docs.getdbt.com when an issue is labeled `user docs` and closed as completed # **why?** # To reduce barriers for keeping docs up to date # **when?** -# When a PR is labeled `user docs` and is merged. Runs on pull_request_target to run off the workflow already merged, -# not the workflow that existed on the PR branch. This allows old PRs to get comments. +# When an issue is labeled `user docs` and is closed as completed. Can be labeled before or after the issue is closed. -name: Open issues in docs.getdbt.com repo when a PR is labeled -run-name: "Open an issue in docs.getdbt.com for PR #${{ github.event.pull_request.number }}" +name: Open issues in docs.getdbt.com repo when an issue is labeled +run-name: "Open an issue in docs.getdbt.com for issue #${{ github.event.issue.number }}" on: - pull_request_target: + issues: types: [labeled, closed] defaults: @@ -21,23 +20,22 @@ defaults: shell: bash permissions: - issues: write # opens new issues - pull-requests: write # comments on PRs - + issues: write # comments on issues jobs: open_issues: - # we only want to run this when the PR has been merged or the label in the labeled event is `user docs`. Otherwise it runs the + # we only want to run this when the issue is closed as completed and the label `user docs` has been assigned. + # If this logic does not exist in this workflow, it runs the # risk of duplicaton of issues being created due to merge and label both triggering this workflow to run and neither having # generating the comment before the other runs. This lives here instead of the shared workflow because this is where we # decide if it should run or not. if: | - (github.event.pull_request.merged == true) && - ((github.event.action == 'closed' && contains( github.event.pull_request.labels.*.name, 'user docs')) || + (github.event.issue.state == 'closed' && github.event.issue.state_reason == 'completed') && ( + (github.event.action == 'closed' && contains(github.event.issue.labels.*.name, 'user docs')) || (github.event.action == 'labeled' && github.event.label.name == 'user docs')) uses: dbt-labs/actions/.github/workflows/open-issue-in-repo.yml@main with: issue_repository: "dbt-labs/docs.getdbt.com" - issue_title: "Docs Changes Needed from ${{ github.event.repository.name }} PR #${{ github.event.pull_request.number }}" + issue_title: "Docs Changes Needed from ${{ github.event.repository.name }} Issue #${{ github.event.issue.number }}" issue_body: "At a minimum, update body to include a link to the page on docs.getdbt.com requiring updates and what part(s) of the page you would like to see updated." secrets: inherit diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml index 698982c0f..b3662d5c0 100644 --- a/.github/workflows/integration.yml +++ b/.github/workflows/integration.yml @@ -20,6 +20,8 @@ name: Adapter Integration Tests +run-name: "${{ (contains(github.event_name, 'workflow_') && inputs.name) || github.event_name }}: ${{ (contains(github.event_name, 'workflow_') && inputs.adapter_branch) || github.ref_name }} by @${{ github.actor }}" + on: # pushes to release branches push: @@ -34,10 +36,31 @@ on: # manual trigger workflow_dispatch: inputs: - dbt-core-branch: - description: "branch of dbt-core to use in dev-requirements.txt" + name: + description: "Name to associate with run (example: 'dbt-adapters-242')" + required: false + type: string + default: "Adapter Integration Tests" + adapter_branch: + description: "The branch of this adapter repository to use" + type: string required: false + default: "main" + dbt_adapters_branch: + description: "The branch of dbt-adapters to use" type: string + required: false + default: "main" + dbt_core_branch: + description: "The branch of dbt-core to use" + type: string + required: false + default: "main" + dbt_common_branch: + description: "The branch of dbt-common to use" + type: string + required: false + default: "main" # explicitly turn off permissions for `GITHUB_TOKEN` permissions: read-all @@ -67,13 +90,13 @@ jobs: steps: - name: Check out the repository (non-PR) if: github.event_name != 'pull_request_target' - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: persist-credentials: false - name: Check out the repository (PR) if: github.event_name == 'pull_request_target' - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: persist-credentials: false ref: ${{ github.event.pull_request.head.sha }} @@ -86,18 +109,21 @@ jobs: # 'false' - if none of changed files matches any of filter rules # also, returns: # `changes` - JSON array with names of all filters matching any of the changed files - uses: dorny/paths-filter@v2 + uses: dorny/paths-filter@v3 id: get-changes with: token: ${{ secrets.GITHUB_TOKEN }} filters: | snowflake: + - '.github/**/*.yml' + - '.github/**/*.sh' - 'dbt/**' - 'tests/**' - 'dev-requirements.txt' + - '*.py' - name: Generate integration test matrix id: generate-matrix - uses: actions/github-script@v6 + uses: actions/github-script@v7 env: CHANGES: ${{ steps.get-changes.outputs.changes }} with: @@ -138,25 +164,41 @@ jobs: steps: - name: Check out the repository - if: github.event_name != 'pull_request_target' - uses: actions/checkout@v3 + if: github.event_name == 'push' + uses: actions/checkout@v4 + with: + persist-credentials: false + + - name: Check out the repository (workflow_dispatch) + if: github.event_name == 'workflow_dispatch' + uses: actions/checkout@v4 with: persist-credentials: false + ref: ${{ inputs.adapter_branch }} - # explicity checkout the branch for the PR, + # explicitly checkout the branch for the PR, # this is necessary for the `pull_request_target` event - name: Check out the repository (PR) if: github.event_name == 'pull_request_target' - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: persist-credentials: false ref: ${{ github.event.pull_request.head.sha }} - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} + - name: Update Adapters and Core branches (update dev_requirements.txt) + if: ${{ github.event_name == 'workflow_dispatch' }} + run: | + ./.github/scripts/update_dev_dependency_branches.sh \ + ${{ inputs.dbt_adapters_branch }} \ + ${{ inputs.dbt_core_branch }} \ + ${{ inputs.dbt_common_branch }} + cat dev-requirements.txt + - name: Install python dependencies run: | python -m pip install --user --upgrade pip @@ -164,12 +206,6 @@ jobs: python -m pip --version tox --version - - name: Update dev_requirements.txt - if: inputs.dbt-core-branch != '' - run: | - pip install bumpversion - ./.github/scripts/update_dbt_core_branch.sh ${{ inputs.dbt-core-branch }} - - name: Run tox (snowflake) if: matrix.adapter == 'snowflake' env: @@ -180,6 +216,8 @@ jobs: SNOWFLAKE_TEST_OAUTH_REFRESH_TOKEN: ${{ secrets.SNOWFLAKE_TEST_OAUTH_REFRESH_TOKEN }} SNOWFLAKE_TEST_OAUTH_CLIENT_ID: ${{ secrets.SNOWFLAKE_TEST_OAUTH_CLIENT_ID }} SNOWFLAKE_TEST_OAUTH_CLIENT_SECRET: ${{ secrets.SNOWFLAKE_TEST_OAUTH_CLIENT_SECRET }} + SNOWFLAKE_TEST_PRIVATE_KEY: ${{ secrets.SNOWFLAKE_TEST_PRIVATE_KEY }} + SNOWFLAKE_TEST_PRIVATE_KEY_PASSPHRASE: ${{ secrets.SNOWFLAKE_TEST_PRIVATE_KEY_PASSPHRASE }} SNOWFLAKE_TEST_ALT_DATABASE: ${{ secrets.SNOWFLAKE_TEST_ALT_DATABASE }} SNOWFLAKE_TEST_ALT_WAREHOUSE: ${{ secrets.SNOWFLAKE_TEST_ALT_WAREHOUSE }} SNOWFLAKE_TEST_DATABASE: ${{ secrets.SNOWFLAKE_TEST_DATABASE }} @@ -190,22 +228,24 @@ jobs: DBT_TEST_USER_3: dbt_test_role_3 run: tox -- --ddtrace - - uses: actions/upload-artifact@v3 - if: always() - with: - name: logs - path: ./logs - - name: Get current date if: always() id: date run: echo "date=$(date +'%Y-%m-%dT%H_%M_%S')" >> $GITHUB_OUTPUT #no colons allowed for artifacts - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 + if: always() + with: + name: logs_${{ matrix.python-version }}_${{ matrix.os }}_${{ matrix.adapter }}-${{ steps.date.outputs.date }} + path: ./logs + overwrite: true + + - uses: actions/upload-artifact@v4 if: always() with: name: integration_results_${{ matrix.python-version }}_${{ matrix.os }}_${{ matrix.adapter }}-${{ steps.date.outputs.date }}.csv path: integration_results.csv + overwrite: true require-label-comment: runs-on: ubuntu-latest diff --git a/.github/workflows/main-branch-tests.yml b/.github/workflows/main-branch-tests.yml index 0684b1c2c..1c3a8b797 100644 --- a/.github/workflows/main-branch-tests.yml +++ b/.github/workflows/main-branch-tests.yml @@ -41,7 +41,7 @@ jobs: steps: - name: Call CI workflow for ${{ matrix.branch }} branch id: trigger-step - uses: aurelien-baudet/workflow-dispatch@v2 + uses: aurelien-baudet/workflow-dispatch@v4 with: workflow: ${{ matrix.workflow_name }} ref: ${{ matrix.branch }} diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index a0d1ba6cb..95ce18033 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -43,12 +43,12 @@ jobs: steps: - name: Check out the repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: persist-credentials: false - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: '3.8' @@ -58,7 +58,6 @@ jobs: python -m pip install -r dev-requirements.txt python -m pip --version pre-commit --version - mypy --version dbt --version - name: Run pre-commit hooks @@ -72,7 +71,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ['3.8', '3.9', '3.10', '3.11'] + python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] env: TOXENV: "unit" @@ -80,12 +79,12 @@ jobs: steps: - name: Check out the repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: persist-credentials: false - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} @@ -104,11 +103,12 @@ jobs: id: date run: echo "date=$(date +'%Y-%m-%dT%H_%M_%S')" >> $GITHUB_OUTPUT #no colons allowed for artifacts - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 if: always() with: name: unit_results_${{ matrix.python-version }}-${{ steps.date.outputs.date }}.csv path: unit_results.csv + overwrite: true build: name: build packages @@ -120,12 +120,12 @@ jobs: steps: - name: Check out the repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: persist-credentials: false - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: '3.8' @@ -156,13 +156,14 @@ jobs: if [[ "$(ls -lh dist/)" == *"a1"* ]]; then export is_alpha=1; fi echo "is_alpha=$is_alpha" >> $GITHUB_OUTPUT - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: dist path: dist/ + overwrite: true test-build: - name: verify packages / python ${{ matrix.python-version }} / ${{ matrix.os }} + name: verify packages / python ${{ matrix.python-version }} / ${{ matrix.os }} / ${{ matrix.dist-type }} if: needs.build.outputs.is_alpha == 0 @@ -173,12 +174,13 @@ jobs: strategy: fail-fast: false matrix: - os: [ubuntu-latest, macos-latest, windows-latest] - python-version: ['3.8', '3.9', '3.10', '3.11'] + os: [ubuntu-latest, macos-12, windows-latest] + python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] + dist-type: ['whl', 'gz'] steps: - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} @@ -188,7 +190,7 @@ jobs: python -m pip install --upgrade wheel python -m pip --version - - uses: actions/download-artifact@v3 + - uses: actions/download-artifact@v4 with: name: dist path: dist/ @@ -196,18 +198,10 @@ jobs: - name: Show distributions run: ls -lh dist/ - - name: Install wheel distributions + - name: Install ${{ matrix.dist-type }} distributions run: | - find ./dist/*.whl -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/ + find ./dist/*.${{ matrix.dist-type }} -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/ - - name: Check wheel distributions + - name: Check ${{ matrix.dist-type }} distributions run: | - dbt --version - - - name: Install source distributions - run: | - find ./dist/*.gz -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/ - - - name: Check source distributions - run: | - dbt --version + python -c "import dbt.adapters.snowflake" diff --git a/.github/workflows/nightly-release.yml b/.github/workflows/nightly-release.yml index fddec31c7..16a5d0da1 100644 --- a/.github/workflows/nightly-release.yml +++ b/.github/workflows/nightly-release.yml @@ -20,13 +20,14 @@ on: permissions: contents: write # this is the permission that allows creating a new release + packages: write # this is the permission that allows pushing Docker images defaults: run: shell: bash env: - RELEASE_BRANCH: "1.6.latest" + RELEASE_BRANCH: "1.8.latest" jobs: aggregate-release-data: @@ -39,7 +40,7 @@ jobs: steps: - name: "Checkout ${{ github.repository }} Branch ${{ env.RELEASE_BRANCH }}" - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: ref: ${{ env.RELEASE_BRANCH }} @@ -57,7 +58,7 @@ jobs: - name: "Audit Version And Parse Into Parts" id: semver - uses: dbt-labs/actions/parse-semver@v1.1.0 + uses: dbt-labs/actions/parse-semver@v1.1.1 with: version: ${{ steps.version-number-sources.outputs.current_version }} @@ -79,7 +80,7 @@ jobs: echo "number=$number" >> $GITHUB_OUTPUT - name: "Audit Nightly Release Version And Parse Into Parts" - uses: dbt-labs/actions/parse-semver@v1.1.0 + uses: dbt-labs/actions/parse-semver@v1.1.1 with: version: ${{ steps.nightly-release-version.outputs.number }} diff --git a/.github/workflows/release-internal.yml b/.github/workflows/release-internal.yml new file mode 100644 index 000000000..59d782498 --- /dev/null +++ b/.github/workflows/release-internal.yml @@ -0,0 +1,51 @@ +# What? +# +# Tag and release an arbitrary ref. Uploads to an internal archive for further processing. +# +# How? +# +# After checking out and testing the provided ref, the image is built and uploaded. +# +# When? +# +# Manual trigger. + +name: "Release to Cloud" +run-name: "Release to Cloud off of ${{ inputs.ref }}" + +on: + workflow_dispatch: + inputs: + ref: + description: "The ref (sha or branch name) to use" + type: string + default: "main" + required: true + package_test_command: + description: "Package test command" + type: string + default: "python -c \"import dbt.adapters.snowflake\"" + required: true + skip_tests: + description: "Should the tests be skipped? (default to false)" + type: boolean + required: true + default: false + +defaults: + run: + shell: bash + +jobs: + invoke-reusable-workflow: + name: "Build and Release Internally" + + uses: "dbt-labs/dbt-release/.github/workflows/internal-archive-release.yml@main" + + with: + package_test_command: "${{ inputs.package_test_command }}" + dbms_name: "snowflake" + ref: "${{ inputs.ref }}" + skip_tests: "${{ inputs.skip_tests }}" + + secrets: "inherit" diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 88942e251..ad7cf76b4 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -13,7 +13,8 @@ # # **when?** # This workflow can be run manually on demand or can be called by other workflows -name: Release to GitHub and PyPI +name: "Release to GitHub, PyPI, and Docker" +run-name: "Release ${{ inputs.version_number }} to GitHub, PyPI, and Docker" on: workflow_dispatch: @@ -60,6 +61,11 @@ on: type: boolean default: false required: false + only_docker: + description: "Only release Docker image, skip GitHub & PyPI" + type: boolean + default: false + required: false workflow_call: inputs: sha: @@ -128,12 +134,11 @@ jobs: echo Package test command: ${{ inputs.package_test_command }} echo Test run: ${{ inputs.test_run }} echo Nightly release: ${{ inputs.nightly_release }} + echo Only Docker: ${{ inputs.only_docker }} bump-version-generate-changelog: name: Bump package version, Generate changelog - uses: dbt-labs/dbt-release/.github/workflows/release-prep.yml@main - with: sha: ${{ inputs.sha }} version_number: ${{ inputs.version_number }} @@ -141,17 +146,13 @@ jobs: env_setup_script_path: ${{ inputs.env_setup_script_path }} test_run: ${{ inputs.test_run }} nightly_release: ${{ inputs.nightly_release }} - secrets: inherit log-outputs-bump-version-generate-changelog: name: "[Log output] Bump package version, Generate changelog" - if: ${{ !failure() && !cancelled() }} - + if: ${{ !failure() && !cancelled() && !inputs.only_docker }} needs: [bump-version-generate-changelog] - runs-on: ubuntu-latest - steps: - name: Print variables run: | @@ -160,11 +161,9 @@ jobs: build-test-package: name: Build, Test, Package - if: ${{ !failure() && !cancelled() }} + if: ${{ !failure() && !cancelled() && !inputs.only_docker }} needs: [bump-version-generate-changelog] - uses: dbt-labs/dbt-release/.github/workflows/build.yml@main - with: sha: ${{ needs.bump-version-generate-changelog.outputs.final_sha }} version_number: ${{ inputs.version_number }} @@ -174,19 +173,15 @@ jobs: package_test_command: ${{ inputs.package_test_command }} test_run: ${{ inputs.test_run }} nightly_release: ${{ inputs.nightly_release }} - secrets: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} github-release: name: GitHub Release - if: ${{ !failure() && !cancelled() }} - + if: ${{ !failure() && !cancelled() && !inputs.only_docker }} needs: [bump-version-generate-changelog, build-test-package] - uses: dbt-labs/dbt-release/.github/workflows/github-release.yml@main - with: sha: ${{ needs.bump-version-generate-changelog.outputs.final_sha }} version_number: ${{ inputs.version_number }} @@ -195,34 +190,41 @@ jobs: pypi-release: name: PyPI Release - - needs: [github-release] - + if: ${{ !failure() && !cancelled() && !inputs.only_docker }} + needs: [bump-version-generate-changelog, build-test-package] uses: dbt-labs/dbt-release/.github/workflows/pypi-release.yml@main - with: version_number: ${{ inputs.version_number }} test_run: ${{ inputs.test_run }} - secrets: PYPI_API_TOKEN: ${{ secrets.PYPI_API_TOKEN }} TEST_PYPI_API_TOKEN: ${{ secrets.TEST_PYPI_API_TOKEN }} + docker-release: + name: "Docker Release" + # We cannot release to docker on a test run because it uses the tag in GitHub as + # what we need to release but draft releases don't actually tag the commit so it + # finds nothing to release + if: ${{ !failure() && !cancelled() && (!inputs.test_run || inputs.only_docker) }} + needs: [bump-version-generate-changelog, build-test-package, github-release] + permissions: + packages: write + uses: dbt-labs/dbt-release/.github/workflows/release-docker.yml@main + with: + version_number: ${{ inputs.version_number }} + test_run: ${{ inputs.test_run }} + slack-notification: name: Slack Notification if: ${{ failure() && (!inputs.test_run || inputs.nightly_release) }} - needs: [ - bump-version-generate-changelog, - build-test-package, github-release, pypi-release, + docker-release, ] - uses: dbt-labs/dbt-release/.github/workflows/slack-post-notification.yml@main with: status: "failure" - secrets: SLACK_WEBHOOK_URL: ${{ secrets.SLACK_DEV_ADAPTER_ALERTS }} diff --git a/.gitignore b/.gitignore index 780d98f70..de95d4777 100644 --- a/.gitignore +++ b/.gitignore @@ -94,3 +94,4 @@ venv/ # vscode .vscode/ +.venv/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 3d80b955c..e14455e28 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,63 +1,58 @@ # For more on configuring pre-commit hooks (see https://pre-commit.com/) - -# Force all unspecified python hooks to run python 3.8 default_language_version: - python: python3 + python: python3 repos: -- repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.4.0 - hooks: - - id: check-yaml - args: [--unsafe] - - id: check-json - - id: end-of-file-fixer - - id: trailing-whitespace - - id: check-case-conflict -- repo: https://github.com/psf/black - rev: 23.1.0 - hooks: - - id: black - additional_dependencies: ['click~=8.1'] - args: - - "--line-length=99" - - "--target-version=py38" - - id: black - alias: black-check - stages: [manual] - additional_dependencies: ['click~=8.1'] - args: - - "--line-length=99" - - "--target-version=py38" - - "--check" - - "--diff" -- repo: https://github.com/pycqa/flake8 - rev: 6.0.0 - hooks: - - id: flake8 - - id: flake8 - alias: flake8-check - stages: [manual] -- repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.1.1 - hooks: - - id: mypy - # N.B.: Mypy is... a bit fragile. - # - # By using `language: system` we run this hook in the local - # environment instead of a pre-commit isolated one. This is needed - # to ensure mypy correctly parses the project. +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.6.0 + hooks: + - id: check-yaml + args: [--unsafe] + - id: check-json + - id: end-of-file-fixer + - id: trailing-whitespace + - id: check-case-conflict + +- repo: https://github.com/dbt-labs/pre-commit-hooks + rev: v0.1.0a1 + hooks: + - id: dbt-core-in-adapters-check + +- repo: https://github.com/psf/black + rev: 24.4.2 + hooks: + - id: black + args: + - --line-length=99 + - --target-version=py38 + - --target-version=py39 + - --target-version=py310 + - --target-version=py311 + - --target-version=py312 + additional_dependencies: [flaky] + +- repo: https://github.com/pycqa/flake8 + rev: 7.0.0 + hooks: + - id: flake8 + exclude: tests/ + args: + - --max-line-length=99 + - --select=E,F,W + - --ignore=E203,E501,E741,W503,W504 + - --per-file-ignores=*/__init__.py:F401 - # It may cause trouble in that it adds environmental variables out - # of our control to the mix. Unfortunately, there's nothing we can - # do about per pre-commit's author. - # See https://github.com/pre-commit/pre-commit/issues/730 for details. - args: [--show-error-codes, --ignore-missing-imports, --explicit-package-bases] - files: ^dbt/adapters/.* - language: system - - id: mypy - alias: mypy-check - stages: [manual] - args: [--show-error-codes, --pretty, --ignore-missing-imports, --explicit-package-bases] - files: ^dbt/adapters - language: system +- repo: https://github.com/pre-commit/mirrors-mypy + rev: v1.10.0 + hooks: + - id: mypy + args: + - --show-error-codes + - --ignore-missing-imports + - --explicit-package-bases + - --warn-unused-ignores + - --pretty + files: ^dbt/adapters + additional_dependencies: + - types-pytz + - types-requests diff --git a/CHANGELOG.md b/CHANGELOG.md index 8b6702b8f..301a00ea9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,8 @@ ## Previous Releases For information on prior major and minor releases, see their changelogs: +- [1.8](https://github.com/dbt-labs/dbt-snowflake/blob/1.8.latest/CHANGELOG.md) +- [1.7](https://github.com/dbt-labs/dbt-snowflake/blob/1.7.latest/CHANGELOG.md) - [1.6](https://github.com/dbt-labs/dbt-snowflake/blob/1.6.latest/CHANGELOG.md) - [1.5](https://github.com/dbt-labs/dbt-snowflake/blob/1.5.latest/CHANGELOG.md) - [1.4](https://github.com/dbt-labs/dbt-snowflake/blob/1.4.latest/CHANGELOG.md) diff --git a/Makefile b/Makefile index c8f682a1c..b42de9147 100644 --- a/Makefile +++ b/Makefile @@ -11,32 +11,6 @@ dev-uninstall: ## Uninstalls all packages while maintaining the virtual environm pip freeze | grep -v "^-e" | cut -d "@" -f1 | xargs pip uninstall -y pip uninstall -y dbt-snowflake -.PHONY: mypy -mypy: ## Runs mypy against staged changes for static type checking. - @\ - pre-commit run --hook-stage manual mypy-check | grep -v "INFO" - -.PHONY: flake8 -flake8: ## Runs flake8 against staged changes to enforce style guide. - @\ - pre-commit run --hook-stage manual flake8-check | grep -v "INFO" - -.PHONY: black -black: ## Runs black against staged changes to enforce style guide. - @\ - pre-commit run --hook-stage manual black-check -v | grep -v "INFO" - -.PHONY: lint -lint: ## Runs flake8 and mypy code checks against staged changes. - @\ - pre-commit run flake8-check --hook-stage manual | grep -v "INFO"; \ - pre-commit run mypy-check --hook-stage manual | grep -v "INFO" - -.PHONY: linecheck -linecheck: ## Checks for all Python lines 100 characters or more - @\ - find dbt -type f -name "*.py" -exec grep -I -r -n '.\{100\}' {} \; - .PHONY: unit unit: ## Runs unit tests with py38. @\ @@ -46,9 +20,7 @@ unit: ## Runs unit tests with py38. test: ## Runs unit tests with py38 and code checks against staged changes. @\ tox -p -e py38; \ - pre-commit run black-check --hook-stage manual | grep -v "INFO"; \ - pre-commit run flake8-check --hook-stage manual | grep -v "INFO"; \ - pre-commit run mypy-check --hook-stage manual | grep -v "INFO" + pre-commit run --all-files .PHONY: integration integration: ## Runs snowflake integration tests with py38. @@ -66,32 +38,11 @@ help: ## Show this help message. @echo 'targets:' @grep -E '^[7+a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' -.PHONY: ubuntu-py38 -ubuntu-py38: - docker build -f docker_dev/ubuntu.Dockerfile -t dbt-snowflake-ubuntu-py38 . --build-arg version=3.8 - docker run --rm -it --name dbt-snowflake-ubuntu-py38 -v $(shell pwd):/opt/code dbt-snowflake-ubuntu-py38 - -.PHONY: ubuntu-py39 -ubuntu-py39: - docker build -f docker_dev/ubuntu.Dockerfile -t dbt-snowflake-ubuntu-py39 . --build-arg version=3.9 - docker run --rm -it --name dbt-snowflake-ubuntu-py39 -v $(shell pwd):/opt/code dbt-snowflake-ubuntu-py39 - -.PHONY: ubuntu-py310 -ubuntu-py310: - docker build -f docker_dev/ubuntu.Dockerfile -t dbt-snowflake-ubuntu-py310 . --build-arg version=3.10 - docker run --rm -it --name dbt-snowflake-ubuntu-py310 -v $(shell pwd):/opt/code dbt-snowflake-ubuntu-py310 - -.PHONY: ubuntu-py311 -ubuntu-py311: - docker build -f docker_dev/ubuntu.Dockerfile -t dbt-snowflake-ubuntu-py311 . --build-arg version=3.11 - docker run --rm -it --name dbt-snowflake-ubuntu-py311 -v $(shell pwd):/opt/code dbt-snowflake-ubuntu-py311 - -.PHONY: debian-py38 -debian-py38: - docker build -f docker/debian-py38.Dockerfile -t dbt-snowflake-debian-py38 . --build-arg version=3.8.15 - docker run --rm -it --name dbt-snowflake-debian-py38 -v $(shell pwd):/opt/code dbt-snowflake-debian-py38 +.PHONY: docker-dev +docker-dev: + docker build -f docker/dev.Dockerfile -t dbt-snowflake-dev . + docker run --rm -it --name dbt-snowflake-dev -v $(shell pwd):/opt/code dbt-snowflake-dev -.PHONY: dev-env-default -dev-env-default: - docker build -f docker/dev-env-default.Dockerfile -t dbt-snowflake-dev-env-default . - docker run --rm -it --name dbt-snowflake-dev-env-default -v $(shell pwd):/opt/code dbt-snowflake-dev-env-default +.PHONY: docker-prod +docker-prod: + docker build -f docker/Dockerfile -t dbt-snowflake . diff --git a/dbt/adapters/snowflake/__init__.py b/dbt/adapters/snowflake/__init__.py index fa7f831e3..f0c546067 100644 --- a/dbt/adapters/snowflake/__init__.py +++ b/dbt/adapters/snowflake/__init__.py @@ -1,12 +1,12 @@ -from dbt.adapters.snowflake.column import SnowflakeColumn # noqa -from dbt.adapters.snowflake.connections import SnowflakeConnectionManager # noqa +from dbt.adapters.snowflake.column import SnowflakeColumn +from dbt.adapters.snowflake.connections import SnowflakeConnectionManager from dbt.adapters.snowflake.connections import SnowflakeCredentials -from dbt.adapters.snowflake.relation import SnowflakeRelation # noqa +from dbt.adapters.snowflake.relation import SnowflakeRelation from dbt.adapters.snowflake.impl import SnowflakeAdapter -from dbt.adapters.base import AdapterPlugin # type: ignore -from dbt.include import snowflake # type: ignore +from dbt.adapters.base import AdapterPlugin +from dbt.include import snowflake Plugin = AdapterPlugin( - adapter=SnowflakeAdapter, credentials=SnowflakeCredentials, include_path=snowflake.PACKAGE_PATH # type: ignore + adapter=SnowflakeAdapter, credentials=SnowflakeCredentials, include_path=snowflake.PACKAGE_PATH ) diff --git a/dbt/adapters/snowflake/__version__.py b/dbt/adapters/snowflake/__version__.py index f15b401d1..6698ed64c 100644 --- a/dbt/adapters/snowflake/__version__.py +++ b/dbt/adapters/snowflake/__version__.py @@ -1 +1 @@ -version = "1.8.0a1" +version = "1.9.0a1" diff --git a/dbt/adapters/snowflake/auth.py b/dbt/adapters/snowflake/auth.py new file mode 100644 index 000000000..e914b6f3d --- /dev/null +++ b/dbt/adapters/snowflake/auth.py @@ -0,0 +1,57 @@ +import base64 +import sys +from typing import Optional + +if sys.version_info < (3, 9): + from functools import lru_cache + + cache = lru_cache(maxsize=None) +else: + from functools import cache + +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey + + +@cache +def private_key_from_string( + private_key_string: str, passphrase: Optional[str] = None +) -> RSAPrivateKey: + + if passphrase: + encoded_passphrase = passphrase.encode() + else: + encoded_passphrase = None + + if private_key_string.startswith("-"): + return serialization.load_pem_private_key( + data=bytes(private_key_string, "utf-8"), + password=encoded_passphrase, + backend=default_backend(), + ) + return serialization.load_der_private_key( + data=base64.b64decode(private_key_string), + password=encoded_passphrase, + backend=default_backend(), + ) + + +@cache +def private_key_from_file( + private_key_path: str, passphrase: Optional[str] = None +) -> RSAPrivateKey: + + if passphrase: + encoded_passphrase = passphrase.encode() + else: + encoded_passphrase = None + + with open(private_key_path, "rb") as file: + private_key_bytes = file.read() + + return serialization.load_pem_private_key( + data=private_key_bytes, + password=encoded_passphrase, + backend=default_backend(), + ) diff --git a/dbt/adapters/snowflake/connections.py b/dbt/adapters/snowflake/connections.py index 955357231..10bee30f0 100644 --- a/dbt/adapters/snowflake/connections.py +++ b/dbt/adapters/snowflake/connections.py @@ -1,6 +1,14 @@ import base64 import datetime import os +import sys + +if sys.version_info < (3, 9): + from functools import lru_cache + + cache = lru_cache(maxsize=None) +else: + from functools import cache import pytz import re @@ -8,13 +16,11 @@ from dataclasses import dataclass from io import StringIO from time import sleep -from typing import Optional, Tuple, Union, Any, List -import agate -from dbt_common.clients.agate_helper import empty_table +from typing import Optional, Tuple, Union, Any, List, Iterable, TYPE_CHECKING -from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey import requests import snowflake.connector import snowflake.connector.constants @@ -38,13 +44,20 @@ DbtConfigError, ) from dbt_common.exceptions import DbtDatabaseError +from dbt_common.record import get_record_mode_from_env, RecorderMode from dbt.adapters.exceptions.connection import FailedToConnectError from dbt.adapters.contracts.connection import AdapterResponse, Connection, Credentials -from dbt.adapters.sql import SQLConnectionManager # type: ignore -from dbt.adapters.events.logging import AdapterLogger # type: ignore +from dbt.adapters.sql import SQLConnectionManager +from dbt.adapters.events.logging import AdapterLogger from dbt_common.events.functions import warn_or_error -from dbt.adapters.events.types import AdapterEventWarning +from dbt.adapters.events.types import AdapterEventWarning, AdapterEventError from dbt_common.ui import line_wrap_message, warning_tag +from dbt.adapters.snowflake.record import SnowflakeRecordReplayHandle + +from dbt.adapters.snowflake.auth import private_key_from_file, private_key_from_string + +if TYPE_CHECKING: + import agate logger = AdapterLogger("Snowflake") @@ -62,6 +75,15 @@ } +@cache +def snowflake_private_key(private_key: RSAPrivateKey) -> bytes: + return private_key.private_bytes( + encoding=serialization.Encoding.DER, + format=serialization.PrivateFormat.PKCS8, + encryption_algorithm=serialization.NoEncryption(), + ) + + @dataclass class SnowflakeAdapterResponse(AdapterResponse): query_id: str = "" @@ -70,7 +92,7 @@ class SnowflakeAdapterResponse(AdapterResponse): @dataclass class SnowflakeCredentials(Credentials): account: str - user: str + user: Optional[str] = None warehouse: Optional[str] = None role: Optional[str] = None password: Optional[str] = None @@ -93,12 +115,11 @@ class SnowflakeCredentials(Credentials): retry_on_database_errors: bool = False retry_all: bool = False insecure_mode: Optional[bool] = False + # this needs to default to `None` so that we can tell if the user set it; see `__post_init__()` reuse_connections: Optional[bool] = None def __post_init__(self): - if self.authenticator != "oauth" and ( - self.oauth_client_secret or self.oauth_client_id or self.token - ): + if self.authenticator != "oauth" and (self.oauth_client_secret or self.oauth_client_id): # the user probably forgot to set 'authenticator' like I keep doing warn_or_error( AdapterEventWarning( @@ -106,6 +127,30 @@ def __post_init__(self): ) ) + if self.authenticator not in ["oauth", "jwt"]: + if self.token: + warn_or_error( + AdapterEventWarning( + base_msg=( + "The token parameter was set, but the authenticator was " + "not set to 'oauth' or 'jwt'." + ) + ) + ) + + if not self.user: + # The user attribute is only optional if 'authenticator' is 'jwt' or 'oauth' + warn_or_error( + AdapterEventError(base_msg="Invalid profile: 'user' is a required property.") + ) + + self.account = self.account.replace("_", "-") + + # only default `reuse_connections` to `True` if the user has not turned on `client_session_keep_alive` + # having both of these set to `True` could lead to hanging open connections, so it should be opt-in behavior + if self.client_session_keep_alive is False and self.reuse_connections is None: + self.reuse_connections = True + @property def type(self): return "snowflake" @@ -125,7 +170,6 @@ def _connection_keys(self): "role", "schema", "authenticator", - "private_key_path", "oauth_client_id", "query_tag", "client_session_keep_alive", @@ -146,6 +190,8 @@ def auth_args(self): # Pull all of the optional authentication args for the connector, # let connector handle the actual arg validation result = {} + if self.user: + result["user"] = self.user if self.password: result["password"] = self.password if self.host: @@ -180,6 +226,14 @@ def auth_args(self): ) result["token"] = token + + elif self.authenticator == "jwt": + # If authenticator is 'jwt', then the 'token' value should be used + # unmodified. We expose this as 'jwt' in the profile, but the value + # passed into the snowflake.connect method should still be 'oauth' + result["token"] = self.token + result["authenticator"] = "oauth" + # enable id token cache for linux result["client_store_temporary_credential"] = True # enable mfa token cache for linux @@ -239,47 +293,24 @@ def _get_access_token(self) -> str: f"""Did not receive valid json with access_token. Showing json response: {result_json}""" ) - + elif "access_token" not in result_json: + raise FailedToConnectError( + "This error occurs when authentication has expired. " + "Please reauth with your auth provider." + ) return result_json["access_token"] - def _get_private_key(self): + def _get_private_key(self) -> Optional[bytes]: """Get Snowflake private key by path, from a Base64 encoded DER bytestring or None.""" if self.private_key and self.private_key_path: raise DbtConfigError("Cannot specify both `private_key` and `private_key_path`") - - if self.private_key_passphrase: - encoded_passphrase = self.private_key_passphrase.encode() - else: - encoded_passphrase = None - - if self.private_key: - if self.private_key.startswith("-"): - p_key = serialization.load_pem_private_key( - data=bytes(self.private_key, "utf-8"), - password=encoded_passphrase, - backend=default_backend(), - ) - - else: - p_key = serialization.load_der_private_key( - data=base64.b64decode(self.private_key), - password=encoded_passphrase, - backend=default_backend(), - ) - + elif self.private_key: + private_key = private_key_from_string(self.private_key, self.private_key_passphrase) elif self.private_key_path: - with open(self.private_key_path, "rb") as key: - p_key = serialization.load_pem_private_key( - key.read(), password=encoded_passphrase, backend=default_backend() - ) + private_key = private_key_from_file(self.private_key_path, self.private_key_passphrase) else: return None - - return p_key.private_bytes( - encoding=serialization.Encoding.DER, - format=serialization.PrivateFormat.PKCS8, - encryption_algorithm=serialization.NoEncryption(), - ) + return snowflake_private_key(private_key) class SnowflakeConnectionManager(SQLConnectionManager): @@ -343,21 +374,32 @@ def connect(): if creds.query_tag: session_parameters.update({"QUERY_TAG": creds.query_tag}) + handle = None + + # In replay mode, we won't connect to a real database at all, while + # in record and diff modes we do, but insert an intermediate handle + # object which monitors native connection activity. + rec_mode = get_record_mode_from_env() + handle = None + if rec_mode != RecorderMode.REPLAY: + handle = snowflake.connector.connect( + account=creds.account, + database=creds.database, + schema=creds.schema, + warehouse=creds.warehouse, + role=creds.role, + autocommit=True, + client_session_keep_alive=creds.client_session_keep_alive, + application="dbt", + insecure_mode=creds.insecure_mode, + session_parameters=session_parameters, + **creds.auth_args(), + ) - handle = snowflake.connector.connect( - account=creds.account, - user=creds.user, - database=creds.database, - schema=creds.schema, - warehouse=creds.warehouse, - role=creds.role, - autocommit=True, - client_session_keep_alive=creds.client_session_keep_alive, - application="dbt", - insecure_mode=creds.insecure_mode, - session_parameters=session_parameters, - **creds.auth_args(), - ) + if rec_mode is not None: + # If using the record/replay mechanism, regardless of mode, we + # use a wrapper. + handle = SnowflakeRecordReplayHandle(handle, connection) return handle @@ -416,7 +458,7 @@ def get_response(cls, cursor) -> SnowflakeAdapterResponse: rows_affected=cursor.rowcount, code=code, query_id=cursor.sfqid, - ) # type: ignore + ) # disable transactional logic by default on Snowflake # except for DML statements where explicitly defined @@ -444,31 +486,36 @@ def _split_queries(cls, sql): split_query = snowflake.connector.util_text.split_statements(sql_buf) return [part[0] for part in split_query] - @classmethod - def process_results(cls, column_names, rows): - # Override for Snowflake. The datetime objects returned by - # snowflake-connector-python are not pickleable, so we need - # to replace them with sane timezones - fixed = [] + @staticmethod + def _fix_rows(rows: Iterable[Iterable]) -> Iterable[Iterable]: + # See note in process_results(). for row in rows: fixed_row = [] for col in row: if isinstance(col, datetime.datetime) and col.tzinfo: offset = col.utcoffset() + assert offset is not None offset_seconds = offset.total_seconds() - new_timezone = pytz.FixedOffset(offset_seconds // 60) + new_timezone = pytz.FixedOffset(int(offset_seconds // 60)) col = col.astimezone(tz=new_timezone) fixed_row.append(col) - fixed.append(fixed_row) + yield fixed_row - return super().process_results(column_names, fixed) + @classmethod + def process_results(cls, column_names, rows): + # Override for Snowflake. The datetime objects returned by + # snowflake-connector-python are not pickleable, so we need + # to replace them with sane timezones. + return super().process_results(column_names, cls._fix_rows(rows)) def execute( self, sql: str, auto_begin: bool = False, fetch: bool = False, limit: Optional[int] = None - ) -> Tuple[AdapterResponse, agate.Table]: + ) -> Tuple[AdapterResponse, "agate.Table"]: # don't apply the query comment here # it will be applied after ';' queries are split + from dbt_common.clients.agate_helper import empty_table + _, cursor = self.add_query(sql, auto_begin) response = self.get_response(cursor) if fetch: @@ -488,7 +535,7 @@ def add_query( auto_begin: bool = True, bindings: Optional[Any] = None, abridge_sql_log: bool = False, - ) -> Tuple[Connection, Any]: # type: ignore + ) -> Tuple[Connection, Any]: if bindings: # The snowflake connector is stricter than, e.g., psycopg2 - # which allows any iterable thing to be passed as a binding. @@ -514,7 +561,7 @@ def add_query( if cursor is None: self._raise_cursor_not_found_error(sql) - return connection, cursor # type: ignore + return connection, cursor def _stripped_queries(self, sql: str) -> List[str]: def strip_query(query): @@ -581,7 +628,7 @@ def release(self): """Reuse connections by deferring release until adapter context manager in core resets adapters. This cleanup_all happens before Python teardown. Idle connections incur no costs while waiting in the connection pool.""" - if self.profile.credentials.reuse_connections: # type: ignore + if self.profile.credentials.reuse_connections: return super().release() diff --git a/dbt/adapters/snowflake/impl.py b/dbt/adapters/snowflake/impl.py index 3a192cfe7..65cd19a93 100644 --- a/dbt/adapters/snowflake/impl.py +++ b/dbt/adapters/snowflake/impl.py @@ -1,24 +1,35 @@ from dataclasses import dataclass -from typing import Mapping, Any, Optional, List, Union, Dict, FrozenSet, Tuple +from typing import Mapping, Any, Optional, List, Union, Dict, FrozenSet, Tuple, TYPE_CHECKING -import agate - -from dbt.adapters.base.impl import AdapterConfig, ConstraintSupport # type: ignore +from dbt.adapters.base.impl import AdapterConfig, ConstraintSupport from dbt.adapters.base.meta import available from dbt.adapters.capability import CapabilityDict, CapabilitySupport, Support, Capability -from dbt.adapters.sql import SQLAdapter # type: ignore +from dbt.adapters.sql import SQLAdapter from dbt.adapters.sql.impl import ( LIST_SCHEMAS_MACRO_NAME, LIST_RELATIONS_MACRO_NAME, ) - -from dbt.adapters.snowflake import SnowflakeConnectionManager -from dbt.adapters.snowflake import SnowflakeRelation -from dbt.adapters.snowflake import SnowflakeColumn from dbt_common.contracts.constraints import ConstraintType +from dbt_common.contracts.metadata import ( + TableMetadata, + StatsDict, + StatsItem, + CatalogTable, + ColumnMetadata, +) from dbt_common.exceptions import CompilationError, DbtDatabaseError, DbtRuntimeError from dbt_common.utils import filter_null_values +from dbt.adapters.snowflake.relation_configs import SnowflakeRelationType +from dbt.adapters.snowflake import SnowflakeColumn +from dbt.adapters.snowflake import SnowflakeConnectionManager +from dbt.adapters.snowflake import SnowflakeRelation + +if TYPE_CHECKING: + import agate + +SHOW_OBJECT_METADATA_MACRO_NAME = "snowflake__show_object_metadata" + @dataclass class SnowflakeConfig(AdapterConfig): @@ -54,6 +65,8 @@ class SnowflakeAdapter(SQLAdapter): { Capability.SchemaMetadataByRelations: CapabilitySupport(support=Support.Full), Capability.TableLastModifiedMetadata: CapabilitySupport(support=Support.Full), + Capability.TableLastModifiedMetadataBatch: CapabilitySupport(support=Support.Full), + Capability.GetCatalogForSingleRelation: CapabilitySupport(support=Support.Full), } ) @@ -63,8 +76,8 @@ def date_function(cls): @classmethod def _catalog_filter_table( - cls, table: agate.Table, used_schemas: FrozenSet[Tuple[str, str]] - ) -> agate.Table: + cls, table: "agate.Table", used_schemas: FrozenSet[Tuple[str, str]] + ) -> "agate.Table": # On snowflake, users can set QUOTED_IDENTIFIERS_IGNORE_CASE, so force # the column names to their lowercased forms. lowered = table.rename(column_names=[c.lower() for c in table.column_names]) @@ -129,7 +142,87 @@ def get_columns_in_relation(self, relation): else: raise - def list_relations_without_caching(self, schema_relation: SnowflakeRelation) -> List[SnowflakeRelation]: # type: ignore + def _show_object_metadata(self, relation: SnowflakeRelation) -> Optional[dict]: + try: + kwargs = {"relation": relation} + results = self.execute_macro(SHOW_OBJECT_METADATA_MACRO_NAME, kwargs=kwargs) + + if len(results) == 0: + return None + + return results + except DbtDatabaseError: + return None + + def get_catalog_for_single_relation( + self, relation: SnowflakeRelation + ) -> Optional[CatalogTable]: + object_metadata = self._show_object_metadata(relation.as_case_sensitive()) + + if not object_metadata: + return None + + row = object_metadata[0] + + is_dynamic = row.get("is_dynamic") in ("Y", "YES") + kind = row.get("kind") + + if is_dynamic and kind == str(SnowflakeRelationType.Table).upper(): + table_type = str(SnowflakeRelationType.DynamicTable).upper() + else: + table_type = kind + + # https://docs.snowflake.com/en/sql-reference/sql/show-views#output + # Note: we don't support materialized views in dbt-snowflake + is_view = kind == str(SnowflakeRelationType.View).upper() + + table_metadata = TableMetadata( + type=table_type, + schema=row.get("schema_name"), + name=row.get("name"), + database=row.get("database_name"), + comment=row.get("comment"), + owner=row.get("owner"), + ) + + stats_dict: StatsDict = { + "has_stats": StatsItem( + id="has_stats", + label="Has Stats?", + value=True, + include=False, + description="Indicates whether there are statistics for this table", + ), + "row_count": StatsItem( + id="row_count", + label="Row Count", + value=row.get("rows"), + include=(not is_view), + description="Number of rows in the table as reported by Snowflake", + ), + "bytes": StatsItem( + id="bytes", + label="Approximate Size", + value=row.get("bytes"), + include=(not is_view), + description="Size of the table as reported by Snowflake", + ), + } + + catalog_columns = { + c.column: ColumnMetadata(type=c.dtype, index=i + 1, name=c.column) + for i, c in enumerate(self.get_columns_in_relation(relation)) + } + + return CatalogTable( + metadata=table_metadata, + columns=catalog_columns, + stats=stats_dict, + ) + + def list_relations_without_caching( + self, schema_relation: SnowflakeRelation + ) -> List[SnowflakeRelation]: kwargs = {"schema_relation": schema_relation} try: results = self.execute_macro(LIST_RELATIONS_MACRO_NAME, kwargs=kwargs) @@ -141,26 +234,37 @@ def list_relations_without_caching(self, schema_relation: SnowflakeRelation) -> return [] raise - relations = [] - quote_policy = {"database": True, "schema": True, "identifier": True} - + # this can be reduced to always including `is_dynamic` once bundle `2024_03` is mandatory columns = ["database_name", "schema_name", "name", "kind"] - for _database, _schema, _identifier, _type in results.select(columns): # type: ignore - try: - _type = self.Relation.get_relation_type(_type.lower()) - except ValueError: - _type = self.Relation.External - relations.append( - self.Relation.create( - database=_database, - schema=_schema, - identifier=_identifier, - quote_policy=quote_policy, - type=_type, - ) - ) + if "is_dynamic" in results.column_names: + columns.append("is_dynamic") - return relations + return [self._parse_list_relations_result(result) for result in results.select(columns)] + + def _parse_list_relations_result(self, result: "agate.Row") -> SnowflakeRelation: + # this can be reduced to always including `is_dynamic` once bundle `2024_03` is mandatory + try: + database, schema, identifier, relation_type, is_dynamic = result + except ValueError: + database, schema, identifier, relation_type = result + is_dynamic = "N" + + try: + relation_type = self.Relation.get_relation_type(relation_type.lower()) + except ValueError: + relation_type = self.Relation.External + + if relation_type == self.Relation.Table and is_dynamic == "Y": + relation_type = self.Relation.DynamicTable + + quote_policy = {"database": True, "schema": True, "identifier": True} + return self.Relation.create( + database=database, + schema=schema, + identifier=identifier, + type=relation_type, + quote_policy=quote_policy, + ) def quote_seed_column(self, column: str, quote_config: Optional[bool]) -> str: quote_columns: bool = False @@ -181,7 +285,7 @@ def quote_seed_column(self, column: str, quote_config: Optional[bool]) -> str: return column @available - def standardize_grants_dict(self, grants_table: agate.Table) -> dict: + def standardize_grants_dict(self, grants_table: "agate.Table") -> dict: grants_dict: Dict[str, Any] = {} for row in grants_table: @@ -206,6 +310,10 @@ def submit_python_job(self, parsed_model: dict, compiled_code: str): packages = parsed_model["config"].get("packages", []) imports = parsed_model["config"].get("imports", []) + external_access_integrations = parsed_model["config"].get( + "external_access_integrations", [] + ) + secrets = parsed_model["config"].get("secrets", {}) # adding default packages we need to make python model work default_packages = ["snowflake-snowpark-python"] package_names = [package.split("==")[0] for package in packages] @@ -214,20 +322,34 @@ def submit_python_job(self, parsed_model: dict, compiled_code: str): packages.append(default_package) packages = "', '".join(packages) imports = "', '".join(imports) - # we can't pass empty imports clause to snowflake + external_access_integrations = ", ".join(external_access_integrations) + secrets = ", ".join(f"'{key}' = {value}" for key, value in secrets.items()) + + # we can't pass empty imports, external_access_integrations or secrets clause to snowflake if imports: imports = f"IMPORTS = ('{imports}')" - - snowpark_telemetry_string = "dbtLabs_dbtPython" - snowpark_telemetry_snippet = f""" + if external_access_integrations: + # Black is trying to make this a tuple. + # fmt: off + external_access_integrations = f"EXTERNAL_ACCESS_INTEGRATIONS = ({external_access_integrations})" + if secrets: + secrets = f"SECRETS = ({secrets})" + + if self.config.args.SEND_ANONYMOUS_USAGE_STATS: + snowpark_telemetry_string = "dbtLabs_dbtPython" + snowpark_telemetry_snippet = f""" import sys sys._xoptions['snowflake_partner_attribution'].append("{snowpark_telemetry_string}")""" + else: + snowpark_telemetry_snippet = "" common_procedure_code = f""" RETURNS STRING LANGUAGE PYTHON RUNTIME_VERSION = '{python_version}' PACKAGES = ('{packages}') +{external_access_integrations} +{secrets} {imports} HANDLER = 'main' EXECUTE AS CALLER diff --git a/dbt/adapters/snowflake/record/__init__.py b/dbt/adapters/snowflake/record/__init__.py new file mode 100644 index 000000000..f763dc3a4 --- /dev/null +++ b/dbt/adapters/snowflake/record/__init__.py @@ -0,0 +1,2 @@ +from dbt.adapters.snowflake.record.cursor.cursor import SnowflakeRecordReplayCursor +from dbt.adapters.snowflake.record.handle import SnowflakeRecordReplayHandle diff --git a/dbt/adapters/snowflake/record/cursor/cursor.py b/dbt/adapters/snowflake/record/cursor/cursor.py new file mode 100644 index 000000000..a07468867 --- /dev/null +++ b/dbt/adapters/snowflake/record/cursor/cursor.py @@ -0,0 +1,21 @@ +from dbt_common.record import record_function + +from dbt.adapters.record import RecordReplayCursor +from dbt.adapters.snowflake.record.cursor.sfqid import CursorGetSfqidRecord +from dbt.adapters.snowflake.record.cursor.sqlstate import CursorGetSqlStateRecord + + +class SnowflakeRecordReplayCursor(RecordReplayCursor): + """A custom extension of RecordReplayCursor that adds the sqlstate + and sfqid properties which are specific to snowflake-connector.""" + + @property + @property + @record_function(CursorGetSqlStateRecord, method=True, id_field_name="connection_name") + def sqlstate(self): + return self.native_cursor.sqlstate + + @property + @record_function(CursorGetSfqidRecord, method=True, id_field_name="connection_name") + def sfqid(self): + return self.native_cursor.sfqid diff --git a/dbt/adapters/snowflake/record/cursor/sfqid.py b/dbt/adapters/snowflake/record/cursor/sfqid.py new file mode 100644 index 000000000..e39c857d3 --- /dev/null +++ b/dbt/adapters/snowflake/record/cursor/sfqid.py @@ -0,0 +1,21 @@ +import dataclasses +from typing import Optional + +from dbt_common.record import Record, Recorder + + +@dataclasses.dataclass +class CursorGetSfqidParams: + connection_name: str + + +@dataclasses.dataclass +class CursorGetSfqidResult: + msg: Optional[str] + + +@Recorder.register_record_type +class CursorGetSfqidRecord(Record): + params_cls = CursorGetSfqidParams + result_cls = CursorGetSfqidResult + group = "Database" diff --git a/dbt/adapters/snowflake/record/cursor/sqlstate.py b/dbt/adapters/snowflake/record/cursor/sqlstate.py new file mode 100644 index 000000000..5619058fd --- /dev/null +++ b/dbt/adapters/snowflake/record/cursor/sqlstate.py @@ -0,0 +1,21 @@ +import dataclasses +from typing import Optional + +from dbt_common.record import Record, Recorder + + +@dataclasses.dataclass +class CursorGetSqlStateParams: + connection_name: str + + +@dataclasses.dataclass +class CursorGetSqlStateResult: + msg: Optional[str] + + +@Recorder.register_record_type +class CursorGetSqlStateRecord(Record): + params_cls = CursorGetSqlStateParams + result_cls = CursorGetSqlStateResult + group = "Database" diff --git a/dbt/adapters/snowflake/record/handle.py b/dbt/adapters/snowflake/record/handle.py new file mode 100644 index 000000000..046bb911b --- /dev/null +++ b/dbt/adapters/snowflake/record/handle.py @@ -0,0 +1,12 @@ +from dbt.adapters.record import RecordReplayHandle + +from dbt.adapters.snowflake.record.cursor.cursor import SnowflakeRecordReplayCursor + + +class SnowflakeRecordReplayHandle(RecordReplayHandle): + """A custom extension of RecordReplayHandle that returns a + snowflake-connector-specific SnowflakeRecordReplayCursor object.""" + + def cursor(self): + cursor = None if self.native_handle is None else self.native_handle.cursor() + return SnowflakeRecordReplayCursor(cursor, self.connection) diff --git a/dbt/adapters/snowflake/relation.py b/dbt/adapters/snowflake/relation.py index 325d23c9b..ace85695b 100644 --- a/dbt/adapters/snowflake/relation.py +++ b/dbt/adapters/snowflake/relation.py @@ -1,14 +1,20 @@ from dataclasses import dataclass, field -from typing import Optional, Type +from typing import FrozenSet, Optional, Type from dbt.adapters.base.relation import BaseRelation -from dbt.adapters.relation_configs import RelationConfigChangeAction, RelationResults -from dbt.adapters.contracts.relation import RelationConfig +from dbt.adapters.contracts.relation import ComponentName, RelationConfig +from dbt.adapters.relation_configs import ( + RelationConfigBase, + RelationConfigChangeAction, + RelationResults, +) from dbt.adapters.utils import classproperty +from dbt_common.exceptions import DbtRuntimeError from dbt.adapters.snowflake.relation_configs import ( SnowflakeDynamicTableConfig, SnowflakeDynamicTableConfigChangeset, + SnowflakeDynamicTableRefreshModeConfigChange, SnowflakeDynamicTableTargetLagConfigChange, SnowflakeDynamicTableWarehouseConfigChange, SnowflakeQuotePolicy, @@ -18,15 +24,29 @@ @dataclass(frozen=True, eq=False, repr=False) class SnowflakeRelation(BaseRelation): - type: Optional[SnowflakeRelationType] = None # type: ignore + type: Optional[SnowflakeRelationType] = None quote_policy: SnowflakeQuotePolicy = field(default_factory=lambda: SnowflakeQuotePolicy()) - renameable_relations = frozenset({SnowflakeRelationType.Table, SnowflakeRelationType.View}) - replaceable_relations = frozenset( - { - SnowflakeRelationType.DynamicTable, - SnowflakeRelationType.Table, - SnowflakeRelationType.View, - } + require_alias: bool = False + relation_configs = { + SnowflakeRelationType.DynamicTable: SnowflakeDynamicTableConfig, + } + renameable_relations: FrozenSet[SnowflakeRelationType] = field( + default_factory=lambda: frozenset( + { + SnowflakeRelationType.Table, # type: ignore + SnowflakeRelationType.View, # type: ignore + } + ) + ) + + replaceable_relations: FrozenSet[SnowflakeRelationType] = field( + default_factory=lambda: frozenset( + { + SnowflakeRelationType.DynamicTable, # type: ignore + SnowflakeRelationType.Table, # type: ignore + SnowflakeRelationType.View, # type: ignore + } + ) ) @property @@ -41,6 +61,17 @@ def DynamicTable(cls) -> str: def get_relation_type(cls) -> Type[SnowflakeRelationType]: return SnowflakeRelationType + @classmethod + def from_config(cls, config: RelationConfig) -> RelationConfigBase: + relation_type: str = config.config.materialized + + if relation_config := cls.relation_configs.get(relation_type): + return relation_config.from_relation_config(config) + + raise DbtRuntimeError( + f"from_config() is not supported for the provided relation type: {relation_type}" + ) + @classmethod def dynamic_table_config_changeset( cls, relation_results: RelationResults, relation_config: RelationConfig @@ -66,6 +97,26 @@ def dynamic_table_config_changeset( ) ) + if new_dynamic_table.refresh_mode != existing_dynamic_table.refresh_mode: + config_change_collection.refresh_mode = SnowflakeDynamicTableRefreshModeConfigChange( + action=RelationConfigChangeAction.create, + context=new_dynamic_table.refresh_mode, + ) + if config_change_collection.has_changes: return config_change_collection return None + + def as_case_sensitive(self) -> "SnowflakeRelation": + path_part_map = {} + + for path in ComponentName: + if self.include_policy.get_part(path): + part = self.path.get_part(path) + if part: + if self.quote_policy.get_part(path): + path_part_map[path] = part + else: + path_part_map[path] = part.upper() + + return self.replace_path(**path_part_map) diff --git a/dbt/adapters/snowflake/relation_configs/__init__.py b/dbt/adapters/snowflake/relation_configs/__init__.py index e5ceabe49..62f95faff 100644 --- a/dbt/adapters/snowflake/relation_configs/__init__.py +++ b/dbt/adapters/snowflake/relation_configs/__init__.py @@ -1,6 +1,7 @@ from dbt.adapters.snowflake.relation_configs.dynamic_table import ( SnowflakeDynamicTableConfig, SnowflakeDynamicTableConfigChangeset, + SnowflakeDynamicTableRefreshModeConfigChange, SnowflakeDynamicTableWarehouseConfigChange, SnowflakeDynamicTableTargetLagConfigChange, ) diff --git a/dbt/adapters/snowflake/relation_configs/base.py b/dbt/adapters/snowflake/relation_configs/base.py index 7b4367e2d..a1ef79684 100644 --- a/dbt/adapters/snowflake/relation_configs/base.py +++ b/dbt/adapters/snowflake/relation_configs/base.py @@ -1,6 +1,5 @@ from dataclasses import dataclass -from typing import Any, Dict, Optional -import agate +from typing import Any, Dict, Optional, TYPE_CHECKING from dbt.adapters.base.relation import Policy from dbt.adapters.relation_configs import ( RelationConfigBase, @@ -14,6 +13,10 @@ SnowflakeQuotePolicy, ) +if TYPE_CHECKING: + # Imported downfile for specific row gathering function. + import agate + @dataclass(frozen=True, eq=True, unsafe_hash=True) class SnowflakeRelationConfigBase(RelationConfigBase): @@ -45,7 +48,7 @@ def parse_relation_config(cls, relation_config: RelationConfig) -> Dict: def from_relation_results(cls, relation_results: RelationResults): relation_config = cls.parse_relation_results(relation_results) relation = cls.from_dict(relation_config) - return relation # type: ignore + return relation @classmethod def parse_relation_results(cls, relation_results: RelationResults) -> Dict[str, Any]: @@ -62,8 +65,10 @@ def _render_part(cls, component: ComponentName, value: Optional[str]) -> Optiona return None @classmethod - def _get_first_row(cls, results: agate.Table) -> agate.Row: + def _get_first_row(cls, results: "agate.Table") -> "agate.Row": try: return results.rows[0] except IndexError: + import agate + return agate.Row(values=set()) diff --git a/dbt/adapters/snowflake/relation_configs/dynamic_table.py b/dbt/adapters/snowflake/relation_configs/dynamic_table.py index cc1b9112d..2e227d3a4 100644 --- a/dbt/adapters/snowflake/relation_configs/dynamic_table.py +++ b/dbt/adapters/snowflake/relation_configs/dynamic_table.py @@ -1,13 +1,36 @@ from dataclasses import dataclass -from typing import Optional, Dict, Any +from typing import Optional, Dict, Any, TYPE_CHECKING -import agate from dbt.adapters.relation_configs import RelationConfigChange, RelationResults from dbt.adapters.contracts.relation import RelationConfig from dbt.adapters.contracts.relation import ComponentName +from dbt_common.dataclass_schema import StrEnum # doesn't exist in standard library until py3.11 +from typing_extensions import Self from dbt.adapters.snowflake.relation_configs.base import SnowflakeRelationConfigBase +if TYPE_CHECKING: + import agate + + +class RefreshMode(StrEnum): + AUTO = "AUTO" + FULL = "FULL" + INCREMENTAL = "INCREMENTAL" + + @classmethod + def default(cls) -> Self: + return cls("AUTO") + + +class Initialize(StrEnum): + ON_CREATE = "ON_CREATE" + ON_SCHEDULE = "ON_SCHEDULE" + + @classmethod + def default(cls) -> Self: + return cls("ON_CREATE") + @dataclass(frozen=True, eq=True, unsafe_hash=True) class SnowflakeDynamicTableConfig(SnowflakeRelationConfigBase): @@ -20,6 +43,8 @@ class SnowflakeDynamicTableConfig(SnowflakeRelationConfigBase): - query: the query behind the table - target_lag: the maximum amount of time that the dynamic table’s content should lag behind updates to the base tables - snowflake_warehouse: the name of the warehouse that provides the compute resources for refreshing the dynamic table + - refresh_mode: specifies the refresh type for the dynamic table + - initialize: specifies the behavior of the initial refresh of the dynamic table There are currently no non-configurable parameters. """ @@ -30,6 +55,8 @@ class SnowflakeDynamicTableConfig(SnowflakeRelationConfigBase): query: str target_lag: str snowflake_warehouse: str + refresh_mode: Optional[RefreshMode] = RefreshMode.default() + initialize: Optional[Initialize] = Initialize.default() @classmethod def from_dict(cls, config_dict) -> "SnowflakeDynamicTableConfig": @@ -42,9 +69,11 @@ def from_dict(cls, config_dict) -> "SnowflakeDynamicTableConfig": "query": config_dict.get("query"), "target_lag": config_dict.get("target_lag"), "snowflake_warehouse": config_dict.get("snowflake_warehouse"), + "refresh_mode": config_dict.get("refresh_mode"), + "initialize": config_dict.get("initialize"), } - dynamic_table: "SnowflakeDynamicTableConfig" = super().from_dict(kwargs_dict) # type: ignore + dynamic_table: "SnowflakeDynamicTableConfig" = super().from_dict(kwargs_dict) return dynamic_table @classmethod @@ -53,16 +82,22 @@ def parse_relation_config(cls, relation_config: RelationConfig) -> Dict[str, Any "name": relation_config.identifier, "schema_name": relation_config.schema, "database_name": relation_config.database, - "query": relation_config.compiled_code, # type: ignore - "target_lag": relation_config.config.extra.get("target_lag"), # type: ignore - "snowflake_warehouse": relation_config.config.extra.get("snowflake_warehouse"), # type: ignore + "query": relation_config.compiled_code, + "target_lag": relation_config.config.extra.get("target_lag"), + "snowflake_warehouse": relation_config.config.extra.get("snowflake_warehouse"), } + if refresh_mode := relation_config.config.extra.get("refresh_mode"): + config_dict.update(refresh_mode=refresh_mode.upper()) + + if initialize := relation_config.config.extra.get("initialize"): + config_dict.update(initialize=initialize.upper()) + return config_dict @classmethod def parse_relation_results(cls, relation_results: RelationResults) -> Dict: - dynamic_table: agate.Row = relation_results["dynamic_table"].rows[0] + dynamic_table: "agate.Row" = relation_results["dynamic_table"].rows[0] config_dict = { "name": dynamic_table.get("name"), @@ -71,6 +106,8 @@ def parse_relation_results(cls, relation_results: RelationResults) -> Dict: "query": dynamic_table.get("text"), "target_lag": dynamic_table.get("target_lag"), "snowflake_warehouse": dynamic_table.get("warehouse"), + "refresh_mode": dynamic_table.get("refresh_mode"), + # we don't get initialize since that's a one-time scheduler attribute, not a DT attribute } return config_dict @@ -94,22 +131,35 @@ def requires_full_refresh(self) -> bool: return False +@dataclass(frozen=True, eq=True, unsafe_hash=True) +class SnowflakeDynamicTableRefreshModeConfigChange(RelationConfigChange): + context: Optional[str] = None + + @property + def requires_full_refresh(self) -> bool: + return True + + @dataclass class SnowflakeDynamicTableConfigChangeset: target_lag: Optional[SnowflakeDynamicTableTargetLagConfigChange] = None snowflake_warehouse: Optional[SnowflakeDynamicTableWarehouseConfigChange] = None + refresh_mode: Optional[SnowflakeDynamicTableRefreshModeConfigChange] = None @property def requires_full_refresh(self) -> bool: return any( [ self.target_lag.requires_full_refresh if self.target_lag else False, - self.snowflake_warehouse.requires_full_refresh - if self.snowflake_warehouse - else False, + ( + self.snowflake_warehouse.requires_full_refresh + if self.snowflake_warehouse + else False + ), + self.refresh_mode.requires_full_refresh if self.refresh_mode else False, ] ) @property def has_changes(self) -> bool: - return any([self.target_lag, self.snowflake_warehouse]) + return any([self.target_lag, self.snowflake_warehouse, self.refresh_mode]) diff --git a/dbt/include/snowflake/macros/adapters.sql b/dbt/include/snowflake/macros/adapters.sql index 157738187..4cb4bcffa 100644 --- a/dbt/include/snowflake/macros/adapters.sql +++ b/dbt/include/snowflake/macros/adapters.sql @@ -27,14 +27,14 @@ {% macro snowflake__get_columns_in_relation(relation) -%} {%- set sql -%} - describe table {{ relation }} + describe table {{ relation.render() }} {%- endset -%} {%- set result = run_query(sql) -%} {% set maximum = 10000 %} {% if (result | length) >= maximum %} {% set msg %} - Too many columns in relation {{ relation }}! dbt can only get + Too many columns in relation {{ relation.render() }}! dbt can only get information about relations with fewer than {{ maximum }} columns. {% endset %} {% do exceptions.raise_compiler_error(msg) %} @@ -47,6 +47,15 @@ {% do return(columns) %} {% endmacro %} +{% macro snowflake__show_object_metadata(relation) %} + {%- set sql -%} + show objects in {{ relation.include(identifier=False) }} starts with '{{ relation.identifier }}' limit 1 + {%- endset -%} + + {%- set result = run_query(sql) -%} + {{ return(result) }} +{% endmacro %} + {% macro snowflake__list_schemas(database) -%} {# 10k limit from here: https://docs.snowflake.net/manuals/sql-reference/sql/show-schemas.html#usage-notes #} {% set maximum = 10000 %} @@ -72,9 +81,15 @@ {% for _ in range(0, max_iter) %} - {%- set paginated_sql -%} - show terse objects in {{ schema_relation.database }}.{{ schema_relation.schema }} limit {{ max_results_per_iter }} from '{{ watermark.table_name }}' - {%- endset -%} + {% if schema_relation is string %} + {%- set paginated_sql -%} + show objects in {{ schema_relation }} limit {{ max_results_per_iter }} from '{{ watermark.table_name }}' + {%- endset -%} + {% else %} + {%- set paginated_sql -%} + show objects in {{ schema_relation.include(identifier=False) }} limit {{ max_results_per_iter }} from '{{ watermark.table_name }}' + {%- endset -%} + {% endif -%} {%- set paginated_result = run_query(paginated_sql) %} {%- set paginated_n = (paginated_result | length) -%} @@ -96,7 +111,7 @@ {%- if loop.index == max_iter -%} {%- set msg -%} - dbt will list a maximum of {{ max_total_results }} objects in schema {{ schema_relation.database }}.{{ schema_relation.schema }}. + dbt will list a maximum of {{ max_total_results }} objects in schema {{ schema_relation }}. Your schema exceeds this limit. Please contact support@getdbt.com for troubleshooting tips, or review and reduce the number of objects contained. {%- endset -%} @@ -122,10 +137,15 @@ {% macro snowflake__list_relations_without_caching(schema_relation, max_iter=10, max_results_per_iter=10000) %} {%- set max_total_results = max_results_per_iter * max_iter -%} - - {%- set sql -%} - show terse objects in {{ schema_relation.database }}.{{ schema_relation.schema }} limit {{ max_results_per_iter }} - {%- endset -%} + {% if schema_relation is string %} + {%- set sql -%} + show objects in {{ schema_relation }} limit {{ max_results_per_iter }} + {%- endset -%} + {% else %} + {%- set sql -%} + show objects in {{ schema_relation.include(identifier=False) }} limit {{ max_results_per_iter }} + {%- endset -%} + {% endif -%} {%- set result = run_query(sql) -%} @@ -166,7 +186,7 @@ {% macro snowflake__alter_column_type(relation, column_name, new_column_type) -%} {% call statement('alter_column_type') %} - alter table {{ relation }} alter {{ adapter.quote(column_name) }} set data type {{ new_column_type }}; + alter table {{ relation.render() }} alter {{ adapter.quote(column_name) }} set data type {{ new_column_type }}; {% endcall %} {% endmacro %} @@ -176,7 +196,7 @@ {%- else -%} {%- set relation_type = relation.type -%} {%- endif -%} - comment on {{ relation_type }} {{ relation }} IS $${{ relation_comment | replace('$', '[$]') }}$$; + comment on {{ relation_type }} {{ relation.render() }} IS $${{ relation_comment | replace('$', '[$]') }}$$; {% endmacro %} @@ -187,7 +207,7 @@ {% else -%} {% set relation_type = relation.type %} {% endif %} - alter {{ relation_type }} {{ relation }} alter + alter {{ relation_type }} {{ relation.render() }} alter {% for column_name in existing_columns if (column_name in existing_columns) or (column_name|lower in existing_columns) %} {{ get_column_comment_sql(column_name, column_dict) }} {{- ',' if not loop.last else ';' }} {% endfor %} @@ -246,7 +266,7 @@ {% if add_columns %} {% set sql -%} - alter {{ relation_type }} {{ relation }} add column + alter {{ relation_type }} {{ relation.render() }} add column {% for column in add_columns %} {{ column.name }} {{ column.data_type }}{{ ',' if not loop.last }} {% endfor %} @@ -259,7 +279,7 @@ {% if remove_columns %} {% set sql -%} - alter {{ relation_type }} {{ relation }} drop column + alter {{ relation_type }} {{ relation.render() }} drop column {% for column in remove_columns %} {{ column.name }}{{ ',' if not loop.last }} {% endfor %} @@ -292,7 +312,7 @@ {% macro snowflake__truncate_relation(relation) -%} {% set truncate_dml %} - truncate table {{ relation }} + truncate table {{ relation.render() }} {% endset %} {% call statement('truncate_relation') -%} {{ snowflake_dml_explicit_transaction(truncate_dml) }} diff --git a/dbt/include/snowflake/macros/catalog.sql b/dbt/include/snowflake/macros/catalog.sql index f0c766865..bde8b8f8f 100644 --- a/dbt/include/snowflake/macros/catalog.sql +++ b/dbt/include/snowflake/macros/catalog.sql @@ -41,7 +41,10 @@ table_catalog as "table_database", table_schema as "table_schema", table_name as "table_name", - coalesce(table_type, 'DYNAMIC TABLE') as "table_type", + case + when is_dynamic = 'YES' and table_type = 'BASE TABLE' THEN 'DYNAMIC TABLE' + else table_type + end as "table_type", comment as "table_comment", -- note: this is the _role_ that owns the table @@ -92,9 +95,14 @@ {%- endmacro %} +{% macro snowflake__catalog_equals(field, value) %} + "{{ field }}" ilike '{{ value }}' and upper("{{ field }}") = upper('{{ value }}') +{% endmacro %} + + {% macro snowflake__get_catalog_schemas_where_clause_sql(schemas) -%} where ({%- for schema in schemas -%} - upper("table_schema") = upper('{{ schema }}'){%- if not loop.last %} or {% endif -%} + ({{ snowflake__catalog_equals('table_schema', schema) }}){%- if not loop.last %} or {% endif -%} {%- endfor -%}) {%- endmacro %} @@ -104,12 +112,12 @@ {%- for relation in relations -%} {% if relation.schema and relation.identifier %} ( - upper("table_schema") = upper('{{ relation.schema }}') - and upper("table_name") = upper('{{ relation.identifier }}') + {{ snowflake__catalog_equals('table_schema', relation.schema) }} + and {{ snowflake__catalog_equals('table_name', relation.identifier) }} ) {% elif relation.schema %} ( - upper("table_schema") = upper('{{ relation.schema }}') + {{ snowflake__catalog_equals('table_schema', relation.schema) }} ) {% else %} {% do exceptions.raise_compiler_error( diff --git a/dbt/include/snowflake/macros/relations/create_backup.sql b/dbt/include/snowflake/macros/relations/create_backup.sql new file mode 100644 index 000000000..b5f347cd9 --- /dev/null +++ b/dbt/include/snowflake/macros/relations/create_backup.sql @@ -0,0 +1,12 @@ +{%- macro snowflake__get_create_backup_sql(relation) -%} + + -- get the standard backup name + {% set backup_relation = make_backup_relation(relation, relation.type) %} + + -- drop any pre-existing backup + {{ get_drop_sql(backup_relation) }}; + + -- use `render` to ensure that the fully qualified name is used + {{ get_rename_sql(relation, backup_relation.render()) }} + +{%- endmacro -%} diff --git a/dbt/include/snowflake/macros/relations/dynamic_table/create.sql b/dbt/include/snowflake/macros/relations/dynamic_table/create.sql index 8e8f3287f..253788779 100644 --- a/dbt/include/snowflake/macros/relations/dynamic_table/create.sql +++ b/dbt/include/snowflake/macros/relations/dynamic_table/create.sql @@ -1,11 +1,18 @@ {% macro snowflake__get_create_dynamic_table_as_sql(relation, sql) -%} + {%- set dynamic_table = relation.from_config(config.model) -%} + create dynamic table {{ relation }} - target_lag = '{{ config.get("target_lag") }}' - warehouse = {{ config.get("snowflake_warehouse") }} + target_lag = '{{ dynamic_table.target_lag }}' + warehouse = {{ dynamic_table.snowflake_warehouse }} + {% if dynamic_table.refresh_mode %} + refresh_mode = {{ dynamic_table.refresh_mode }} + {% endif %} + {% if dynamic_table.initialize %} + initialize = {{ dynamic_table.initialize }} + {% endif %} as ( {{ sql }} ) - ; {%- endmacro %} diff --git a/dbt/include/snowflake/macros/relations/dynamic_table/describe.sql b/dbt/include/snowflake/macros/relations/dynamic_table/describe.sql index a5f612039..cc79328fe 100644 --- a/dbt/include/snowflake/macros/relations/dynamic_table/describe.sql +++ b/dbt/include/snowflake/macros/relations/dynamic_table/describe.sql @@ -10,7 +10,8 @@ "database_name", "text", "target_lag", - "warehouse" + "warehouse", + "refresh_mode" from table(result_scan(last_query_id())) {%- endset %} {% set _dynamic_table = run_query(_dynamic_table_sql) %} diff --git a/dbt/include/snowflake/macros/relations/dynamic_table/replace.sql b/dbt/include/snowflake/macros/relations/dynamic_table/replace.sql index 385ce119c..dbe27d66e 100644 --- a/dbt/include/snowflake/macros/relations/dynamic_table/replace.sql +++ b/dbt/include/snowflake/macros/relations/dynamic_table/replace.sql @@ -1,12 +1,18 @@ -{% macro snowflake__get_replace_dynamic_table_sql(relation, sql) %} +{% macro snowflake__get_replace_dynamic_table_sql(relation, sql) -%} + + {%- set dynamic_table = relation.from_config(config.model) -%} create or replace dynamic table {{ relation }} - target_lag = '{{ config.get("target_lag") }}' - warehouse = {{ config.get("snowflake_warehouse") }} + target_lag = '{{ dynamic_table.target_lag }}' + warehouse = {{ dynamic_table.snowflake_warehouse }} + {% if dynamic_table.refresh_mode %} + refresh_mode = {{ dynamic_table.refresh_mode }} + {% endif %} + {% if dynamic_table.initialize %} + initialize = {{ dynamic_table.initialize }} + {% endif %} as ( {{ sql }} ) - ; - {{ snowflake__refresh_dynamic_table(relation) }} -{% endmacro %} +{%- endmacro %} diff --git a/dbt/include/snowflake/macros/relations/rename_intermediate.sql b/dbt/include/snowflake/macros/relations/rename_intermediate.sql new file mode 100644 index 000000000..abd5fee92 --- /dev/null +++ b/dbt/include/snowflake/macros/relations/rename_intermediate.sql @@ -0,0 +1,9 @@ +{%- macro snowflake__get_rename_intermediate_sql(relation) -%} + + -- get the standard intermediate name + {% set intermediate_relation = make_intermediate_relation(relation) %} + + -- use `render` to ensure that the fully qualified name is used + {{ get_rename_sql(intermediate_relation, relation.render()) }} + +{%- endmacro -%} diff --git a/dbt/include/snowflake/macros/relations/table/rename.sql b/dbt/include/snowflake/macros/relations/table/rename.sql index 7b363e03d..699debf28 100644 --- a/dbt/include/snowflake/macros/relations/table/rename.sql +++ b/dbt/include/snowflake/macros/relations/table/rename.sql @@ -1,3 +1,13 @@ {%- macro snowflake__get_rename_table_sql(relation, new_name) -%} + /* + Rename or move a table to the new name. + + Args: + relation: SnowflakeRelation - relation to be renamed + new_name: Union[str, SnowflakeRelation] - new name for `relation` + if providing a string, the default database/schema will be used if that string is just an identifier + if providing a SnowflakeRelation, `render` will be used to produce a fully qualified name + Returns: templated string + */ alter table {{ relation }} rename to {{ new_name }} {%- endmacro -%} diff --git a/dbt/include/snowflake/macros/relations/view/rename.sql b/dbt/include/snowflake/macros/relations/view/rename.sql index 4cfd410a4..add2f49b9 100644 --- a/dbt/include/snowflake/macros/relations/view/rename.sql +++ b/dbt/include/snowflake/macros/relations/view/rename.sql @@ -1,3 +1,13 @@ {%- macro snowflake__get_rename_view_sql(relation, new_name) -%} + /* + Rename or move a view to the new name. + + Args: + relation: SnowflakeRelation - relation to be renamed + new_name: Union[str, SnowflakeRelation] - new name for `relation` + if providing a string, the default database/schema will be used if that string is just an identifier + if providing a SnowflakeRelation, `render` will be used to produce a fully qualified name + Returns: templated string + */ alter view {{ relation }} rename to {{ new_name }} {%- endmacro -%} diff --git a/dev-requirements.txt b/dev-requirements.txt index db49d0497..f3d120eec 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -1,32 +1,24 @@ # install latest changes in dbt-core -# TODO: how to automate switching from develop to version branches? git+https://github.com/dbt-labs/dbt-core.git#egg=dbt-core&subdirectory=core git+https://github.com/dbt-labs/dbt-adapters.git git+https://github.com/dbt-labs/dbt-adapters.git#subdirectory=dbt-tests-adapter +git+https://github.com/dbt-labs/dbt-common.git -# if version 1.x or greater -> pin to major version -# if version 0.x -> pin to minor -black~=23.12 -bumpversion~=0.6.0 -click~=8.1 -cryptography~=41.0.7 -ddtrace~=2.3 -flake8~=6.1 -flaky~=3.7 -freezegun~=1.3 +# dev ipdb~=0.13.13 -mypy==1.7.1 # patch updates have historically introduced breaking changes -pip-tools~=7.3 -pre-commit~=3.5 -pre-commit-hooks~=4.5 +pre-commit~=3.7.0;python_version>="3.9" +pre-commit~=3.5.0;python_version<"3.9" + +# test +ddtrace==2.3.0 pytest~=7.4 pytest-csv~=3.0 pytest-dotenv~=0.5.2 pytest-logbook~=1.2 -pytest-xdist~=3.5 -pytz~=2023.3 -tox~=4.11 -types-pytz~=2023.3 -types-requests~=2.31 -twine~=4.0 -wheel~=0.42 +pytest-xdist~=3.6 +tox~=4.16 + +# build +bumpversion~=0.6.0 +twine~=5.1 +wheel~=0.43 diff --git a/docker/Dockerfile b/docker/Dockerfile new file mode 100644 index 000000000..d256dcac4 --- /dev/null +++ b/docker/Dockerfile @@ -0,0 +1,37 @@ +# this image gets published to GHCR for production use +ARG py_version=3.11.2 + +FROM python:$py_version-slim-bullseye as base + +RUN apt-get update \ + && apt-get dist-upgrade -y \ + && apt-get install -y --no-install-recommends \ + build-essential=12.9 \ + ca-certificates=20210119 \ + git=1:2.30.2-1+deb11u2 \ + libpq-dev=13.14-0+deb11u1 \ + make=4.3-4.1 \ + openssh-client=1:8.4p1-5+deb11u3 \ + software-properties-common=0.96.20.2-2.1 \ + && apt-get clean \ + && rm -rf \ + /var/lib/apt/lists/* \ + /tmp/* \ + /var/tmp/* + +ENV PYTHONIOENCODING=utf-8 +ENV LANG=C.UTF-8 + +RUN python -m pip install --upgrade "pip==24.0" "setuptools==69.2.0" "wheel==0.43.0" --no-cache-dir + + +FROM base as dbt-snowflake + +ARG commit_ref=main + +HEALTHCHECK CMD dbt --version || exit 1 + +WORKDIR /usr/app/dbt/ +ENTRYPOINT ["dbt"] + +RUN python -m pip install --no-cache-dir "dbt-snowflake @ git+https://github.com/dbt-labs/dbt-snowflake@${commit_ref}" diff --git a/docker/README.md b/docker/README.md new file mode 100644 index 000000000..95ecde101 --- /dev/null +++ b/docker/README.md @@ -0,0 +1,58 @@ +# Docker for dbt +This docker file is suitable for building dbt Docker images locally or using with CI/CD to automate populating a container registry. + + +## Building an image: +This Dockerfile can create images for the following target: `dbt-snowflake` + +In order to build a new image, run the following docker command. +```shell +docker build --tag --target dbt-snowflake +``` +--- +> **Note:** Docker must be configured to use [BuildKit](https://docs.docker.com/develop/develop-images/build_enhancements/) in order for images to build properly! + +--- + +By default the image will be populated with the latest version of `dbt-snowflake` on `main`. +If you need to use a different version you can specify it by git ref using the `--build-arg` flag: +```shell +docker build --tag \ + --target dbt-snowflake \ + --build-arg commit_ref= \ + +``` + +### Examples: +To build an image named "my-dbt" that supports Snowflake using the latest releases: +```shell +cd dbt-core/docker +docker build --tag my-dbt --target dbt-snowflake . +``` + +To build an image named "my-other-dbt" that supports Snowflake using the adapter version 1.0.0b1: +```shell +cd dbt-core/docker +docker build \ + --tag my-other-dbt \ + --target dbt-snowflake \ + --build-arg commit_ref=v1.0.0b1 \ + . +``` + +## Running an image in a container: +The `ENTRYPOINT` for this Dockerfile is the command `dbt` so you can bind-mount your project to `/usr/app` and use dbt as normal: +```shell +docker run \ + --network=host \ + --mount type=bind,source=path/to/project,target=/usr/app \ + --mount type=bind,source=path/to/profiles.yml,target=/root/.dbt/profiles.yml \ + my-dbt \ + ls +``` +--- +**Notes:** +* Bind-mount sources _must_ be an absolute path +* You may need to make adjustments to the docker networking setting depending on the specifics of your data warehouse/database host. + +--- diff --git a/docker/dev.Dockerfile b/docker/dev.Dockerfile new file mode 100644 index 000000000..0fc667048 --- /dev/null +++ b/docker/dev.Dockerfile @@ -0,0 +1,50 @@ +# this image does not get published, it is intended for local development only, see `Makefile` for usage +FROM ubuntu:22.04 as base + +# prevent python installation from asking for time zone region +ARG DEBIAN_FRONTEND=noninteractive + +# add python repository +RUN apt-get update \ + && apt-get install -y software-properties-common=0.99.22.9 \ + && add-apt-repository -y ppa:deadsnakes/ppa \ + && apt-get clean \ + && rm -rf \ + /var/lib/apt/lists/* \ + /tmp/* \ + /var/tmp/* + +# install python +RUN apt-get update \ + && apt-get install -y --no-install-recommends \ + build-essential=12.9ubuntu3 \ + git-all=1:2.34.1-1ubuntu1.10 \ + python3.8=3.8.19-1+jammy1 \ + python3.8-dev=3.8.19-1+jammy1 \ + python3.8-distutils=3.8.19-1+jammy1 \ + python3.8-venv=3.8.19-1+jammy1 \ + python3-pip=22.0.2+dfsg-1ubuntu0.4 \ + python3-wheel=0.37.1-2ubuntu0.22.04.1 \ + && apt-get clean \ + && rm -rf \ + /var/lib/apt/lists/* \ + /tmp/* \ + /var/tmp/* + +# update the default system interpreter to the newly installed version +RUN update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.8 1 + + +FROM base as dbt-snowflake-dev + +HEALTHCHECK CMD python3 --version || exit 1 + +# send stdout/stderr to terminal +ENV PYTHONUNBUFFERED=1 + +# setup mount for local code +WORKDIR /opt/code +VOLUME /opt/code + +# create a virtual environment +RUN python3 -m venv /opt/venv diff --git a/docker/test.sh b/docker/test.sh new file mode 100755 index 000000000..79311b6c9 --- /dev/null +++ b/docker/test.sh @@ -0,0 +1,22 @@ +# - VERY rudimentary test script to run latest + specific branch image builds and test them all by running `--version` +# TODO: create a real test suite + +clear \ +&& echo "\n\n"\ +"########################################\n"\ +"##### Testing dbt-snowflake latest #####\n"\ +"########################################\n"\ +&& docker build --tag dbt-snowflake \ + --target dbt-snowflake \ + docker \ +&& docker run dbt-snowflake --version \ +\ +&& echo "\n\n"\ +"#########################################\n"\ +"##### Testing dbt-snowflake-1.0.0b1 #####\n"\ +"#########################################\n"\ +&& docker build --tag dbt-snowflake-1.0.0b1 \ + --target dbt-snowflake \ + --build-arg commit_ref=v1.0.0b1 \ + docker \ +&& docker run dbt-snowflake-1.0.0b1 --version diff --git a/docker_dev/README.md b/docker_dev/README.md deleted file mode 100644 index dd487fea7..000000000 --- a/docker_dev/README.md +++ /dev/null @@ -1,5 +0,0 @@ -# Docker Dev Images - -These images are solely for development purposes. They are -saved here for convenience. There should be no expectation -of stability or maintenance. diff --git a/docker_dev/debian.Dockerfile b/docker_dev/debian.Dockerfile deleted file mode 100644 index 9c3415be5..000000000 --- a/docker_dev/debian.Dockerfile +++ /dev/null @@ -1,49 +0,0 @@ -FROM debian:latest - -# default to py3.11.1, this can be overridden at build, e.g. `docker build ... --build-arg version=3.10.8` -ARG version=3.11.1 - -# install python dependencies -RUN apt-get update && \ - apt-get install -y --no-install-recommends \ - build-essential \ - zlib1g-dev \ - libncurses5-dev \ - libgdbm-dev \ - libnss3-dev \ - libssl-dev \ - libreadline-dev \ - libffi-dev \ - libsqlite3-dev \ - wget \ - libbz2-dev \ - git-all - -# download, extract, and install python -RUN wget https://www.python.org/ftp/python/$version/Python-$version.tgz && \ - tar -xvf Python-$version.tgz && \ - cd Python-$version && \ - ./configure --enable-optimizations && \ - make -j $(shell nproc) && \ - make altinstall - -# clean up -RUN apt-get clean && \ - rm -rf \ - /var/lib/apt/lists/* \ - /tmp/* \ - /var/tmp/* \ - /Python-$version.tgz - -# add this installation to the path and update the default system interpreter to the newly installed version -RUN export PATH="/Python-$version:$PATH" && \ - update-alternatives --install /usr/bin/python3 python3 /Python-$version/python 1 - -# update python build tools -RUN python3 -m pip install --upgrade pip setuptools wheel --no-cache-dir - -# setup mount for our code -WORKDIR /opt/code -VOLUME /opt/code - -ENV PYTHONUNBUFFERED=1 diff --git a/docker_dev/dev-env-default.Dockerfile b/docker_dev/dev-env-default.Dockerfile deleted file mode 100644 index ed90889a7..000000000 --- a/docker_dev/dev-env-default.Dockerfile +++ /dev/null @@ -1,25 +0,0 @@ -FROM docker/dev-environments-default:latest - -# install python and git (for installing dbt-core) -RUN apt-get update && \ - apt-get install -y --no-install-recommends \ - python3-pip \ - python3-wheel \ - build-essential - -# clean up -RUN apt-get clean && \ - rm -rf \ - /var/lib/apt/lists/* \ - /tmp/* \ - /var/tmp/* - -# update python build tools -RUN python3 -m pip install --upgrade pip setuptools wheel --no-cache-dir - -# setup mount for our code -WORKDIR /opt/code -VOLUME /opt/code - -# send stdout/stderr to terminal -ENV PYTHONUNBUFFERED=1 diff --git a/docker_dev/ubuntu.Dockerfile b/docker_dev/ubuntu.Dockerfile deleted file mode 100644 index bac3f5993..000000000 --- a/docker_dev/ubuntu.Dockerfile +++ /dev/null @@ -1,50 +0,0 @@ -FROM ubuntu:latest - -# default to py3.11, this can be overridden at build, e.g. `docker build ... --build-arg version=3.10` -ARG version=3.11 - -# prevent python installation from asking for time zone region -ARG DEBIAN_FRONTEND=noninteractive - -# get add-apt-repository -RUN apt-get update && \ - apt-get install -y software-properties-common - -# add the python repository -RUN apt-get update && \ - add-apt-repository -y ppa:deadsnakes/ppa - -# install python and git (for installing dbt-core) -RUN apt-get update && \ - apt-get install -y --no-install-recommends \ - python$version \ - python$version-dev \ - python$version-distutils \ - python$version-venv \ - python3-pip \ - python3-wheel \ - build-essential \ - git-all - -# clean up -RUN apt-get clean && \ - rm -rf \ - /var/lib/apt/lists/* \ - /tmp/* \ - /var/tmp/* - -# update the default system interpreter to the newly installed version -RUN update-alternatives --install /usr/bin/python3 python3 /usr/bin/python$version 1 - -# setup mount for our code -WORKDIR /opt/code -VOLUME /opt/code - -# install tox in the system interpreter (it creates it's own virtual environments) -RUN pip install tox - -# explicitly create a virtual environment as well for interactive testing -RUN python3 -m venv /opt/venv - -# send stdout/stderr to terminal -ENV PYTHONUNBUFFERED=1 diff --git a/mypy.ini b/mypy.ini deleted file mode 100644 index b6e603581..000000000 --- a/mypy.ini +++ /dev/null @@ -1,2 +0,0 @@ -[mypy] -namespace_packages = True diff --git a/setup.py b/setup.py index a9e1149db..210c309b1 100644 --- a/setup.py +++ b/setup.py @@ -1,7 +1,8 @@ #!/usr/bin/env python import os +from pathlib import Path + import sys -import re # require python 3.8 or newer if sys.version_info < (3, 8): @@ -28,36 +29,25 @@ long_description = f.read() -# get this package's version from dbt/adapters//__version__.py -def _get_plugin_version_dict(): - _version_path = os.path.join(this_directory, "dbt", "adapters", "snowflake", "__version__.py") - _semver = r"""(?P\d+)\.(?P\d+)\.(?P\d+)""" - _pre = r"""((?Pa|b|rc)(?P
\d+))?"""
-    _nightly = r"""(\.(?P[a-z0-9]+)?)?"""
-    _version_pattern = rf"""version\s*=\s*["']{_semver}{_pre}{_nightly}["']"""
-    with open(_version_path) as f:
-        match = re.search(_version_pattern, f.read().strip())
-        if match is None:
-            raise ValueError(f"invalid version at {_version_path}")
-        return match.groupdict()
+# used for this adapter's version
+VERSION = Path(__file__).parent / "dbt/adapters/snowflake/__version__.py"
 
 
-# require a compatible minor version (~=), prerelease if this is a prerelease
-def _get_dbt_core_version():
-    parts = _get_plugin_version_dict()
-    minor = "{major}.{minor}.0".format(**parts)
-    pre = parts["prekind"] + "1" if parts["prekind"] else ""
-    return f"{minor}{pre}"
+def _plugin_version() -> str:
+    """
+    Pull the package version from the main package version file
+    """
+    attributes = {}
+    exec(VERSION.read_text(), attributes)
+    return attributes["version"]
 
 
 package_name = "dbt-snowflake"
-package_version = "1.8.0a1"
-dbt_core_version = _get_dbt_core_version()
 description = """The Snowflake adapter plugin for dbt"""
 
 setup(
     name=package_name,
-    version=package_version,
+    version=_plugin_version(),
     description=description,
     long_description=long_description,
     long_description_content_type="text/markdown",
@@ -67,10 +57,11 @@ def _get_dbt_core_version():
     packages=find_namespace_packages(include=["dbt", "dbt.*"]),
     include_package_data=True,
     install_requires=[
-        "dbt-core~={}".format(dbt_core_version),
-        "dbt-common~=0.1.0",
-        "dbt-adapters~=0.1.0a2",
+        "dbt-common>=1.3.0,<2.0",
+        "dbt-adapters>=1.3.1,<2.0",
         "snowflake-connector-python[secure-local-storage]~=3.0",
+        # add dbt-core to ensure backwards compatibility of installation, this is not a functional dependency
+        "dbt-core>=1.8.0",
         # installed via dbt-core but referenced directly; don't pin to avoid version conflicts with dbt-core
         "agate",
     ],
@@ -86,6 +77,7 @@ def _get_dbt_core_version():
         "Programming Language :: Python :: 3.9",
         "Programming Language :: Python :: 3.10",
         "Programming Language :: Python :: 3.11",
+        "Programming Language :: Python :: 3.12",
     ],
     python_requires=">=3.8",
 )
diff --git a/tests/functional/adapter/dbt_clone/test_dbt_clone.py b/tests/functional/adapter/dbt_clone/test_dbt_clone.py
index 78e220140..2a73eb7f4 100644
--- a/tests/functional/adapter/dbt_clone/test_dbt_clone.py
+++ b/tests/functional/adapter/dbt_clone/test_dbt_clone.py
@@ -63,7 +63,6 @@ def copy_state(self, project_root):
     def run_and_save_state(self, project_root, with_snapshot=False):
         results = run_dbt(["run"])
         assert len(results) == 1
-        assert not any(r.node.deferred for r in results)
 
         self.copy_state(project_root)
 
diff --git a/tests/functional/adapter/dbt_show/test_dbt_show.py b/tests/functional/adapter/dbt_show/test_dbt_show.py
index c60a26aec..d12b91f52 100644
--- a/tests/functional/adapter/dbt_show/test_dbt_show.py
+++ b/tests/functional/adapter/dbt_show/test_dbt_show.py
@@ -1,9 +1,17 @@
-from dbt.tests.adapter.dbt_show.test_dbt_show import BaseShowSqlHeader, BaseShowLimit
+from dbt.tests.adapter.dbt_show.test_dbt_show import (
+    BaseShowSqlHeader,
+    BaseShowLimit,
+    BaseShowDoesNotHandleDoubleLimit,
+)
 
 
-class TestBigQueryShowLimit(BaseShowLimit):
+class TestSnowflakeShowLimit(BaseShowLimit):
     pass
 
 
-class TestBigQueryShowSqlHeader(BaseShowSqlHeader):
+class TestSnowflakeShowSqlHeader(BaseShowSqlHeader):
     pass
+
+
+class TestSnowflakeShowDoesNotHandleDoubleLimit(BaseShowDoesNotHandleDoubleLimit):
+    DATABASE_ERROR_MESSAGE = "unexpected 'limit'"
diff --git a/tests/functional/adapter/dynamic_table_tests/files.py b/tests/functional/adapter/dynamic_table_tests/files.py
index 6b449d476..ef8d2bf1f 100644
--- a/tests/functional/adapter/dynamic_table_tests/files.py
+++ b/tests/functional/adapter/dynamic_table_tests/files.py
@@ -26,7 +26,8 @@
 {{ config(
     materialized='dynamic_table',
     snowflake_warehouse='DBT_TESTING',
-    target_lag='120        seconds',
+    target_lag='2        minutes',
+    refresh_mode='INCREMENTAL',
 ) }}
 select * from {{ ref('my_seed') }}
 """
diff --git a/tests/functional/adapter/dynamic_table_tests/test_dynamic_tables_changes.py b/tests/functional/adapter/dynamic_table_tests/test_dynamic_tables_changes.py
index 984454fb9..a58b76f29 100644
--- a/tests/functional/adapter/dynamic_table_tests/test_dynamic_tables_changes.py
+++ b/tests/functional/adapter/dynamic_table_tests/test_dynamic_tables_changes.py
@@ -17,6 +17,7 @@
     MY_SEED,
 )
 from tests.functional.adapter.dynamic_table_tests.utils import (
+    query_refresh_mode,
     query_relation_type,
     query_target_lag,
     query_warehouse,
@@ -25,21 +26,16 @@
 
 class SnowflakeDynamicTableChanges:
     @staticmethod
-    def check_start_state(adapter, dynamic_table):
-        """
-        This needs to be done manually for now until we fix the test suite's runner. The test suite's
-        runner cannot run queries with multiple statements. Snowflake's metadata is all behind `show`
-        and `describe` calls that require a second call to fetch the results; hence, the results
-        cannot be fetched.
-        """
-        assert query_target_lag(adapter, dynamic_table) is None == "120 seconds"
-        assert query_warehouse(adapter, dynamic_table) is None == "DBT_TESTING"
+    def check_start_state(project, dynamic_table):
+        assert query_target_lag(project, dynamic_table) == "2 minutes"
+        assert query_warehouse(project, dynamic_table) == "DBT_TESTING"
+        assert query_refresh_mode(project, dynamic_table) == "INCREMENTAL"
 
     @staticmethod
     def change_config_via_alter(project, dynamic_table):
         initial_model = get_model_file(project, dynamic_table)
         new_model = initial_model.replace(
-            "target_lag='120        seconds'", "target_lag='5   minutes'"
+            "target_lag='2        minutes'", "target_lag='5   minutes'"
         )
         set_model_file(project, dynamic_table, new_model)
 
@@ -47,31 +43,29 @@ def change_config_via_alter(project, dynamic_table):
     def change_config_via_alter_downstream(project, dynamic_table):
         initial_model = get_model_file(project, dynamic_table)
         new_model = initial_model.replace(
-            "target_lag='120        seconds'", "target_lag='downstream'"
+            "target_lag='2        minutes'", "target_lag='DOWNSTREAM'"
         )
         set_model_file(project, dynamic_table, new_model)
 
     @staticmethod
-    def check_state_alter_change_is_applied(adapter, dynamic_table):
-        # see above
-        assert query_target_lag(adapter, dynamic_table) == "5 minutes"
-        assert query_warehouse(adapter, dynamic_table) == "DBT_TESTING"
+    def check_state_alter_change_is_applied(project, dynamic_table):
+        assert query_target_lag(project, dynamic_table) == "5 minutes"
+        assert query_warehouse(project, dynamic_table) == "DBT_TESTING"
 
     @staticmethod
-    def check_state_alter_change_is_applied_downstream(adapter, dynamic_table):
-        # see above
-        assert query_target_lag(adapter, dynamic_table) == "downstream"
-        assert query_warehouse(adapter, dynamic_table) == "DBT_TESTING"
+    def check_state_alter_change_is_applied_downstream(project, dynamic_table):
+        assert query_target_lag(project, dynamic_table) == "DOWNSTREAM"
+        assert query_warehouse(project, dynamic_table) == "DBT_TESTING"
 
     @staticmethod
     def change_config_via_replace(project, dynamic_table):
-        # dbt-snowflake does not currently monitor any changes that trigger a full refresh
-        pass
+        initial_model = get_model_file(project, dynamic_table)
+        new_model = initial_model.replace("refresh_mode='INCREMENTAL'", "refresh_mode='FULL'")
+        set_model_file(project, dynamic_table, new_model)
 
     @staticmethod
     def check_state_replace_change_is_applied(project, dynamic_table):
-        # dbt-snowflake does not currently monitor any changes that trigger a full refresh
-        pass
+        assert query_refresh_mode(project, dynamic_table) == "FULL"
 
     @staticmethod
     def query_relation_type(project, relation: SnowflakeRelation) -> Optional[str]:
@@ -103,6 +97,9 @@ def setup(self, project, my_dynamic_table):
         # the tests touch these files, store their contents in memory
         initial_model = get_model_file(project, my_dynamic_table)
 
+        # verify the initial settings are correct in Snowflake
+        self.check_start_state(project, my_dynamic_table)
+
         yield
 
         # and then reset them after the test runs
@@ -112,12 +109,19 @@ def setup(self, project, my_dynamic_table):
         project.run_sql(f"drop schema if exists {project.test_schema} cascade")
 
     def test_full_refresh_occurs_with_changes(self, project, my_dynamic_table):
+
+        # update the settings
         self.change_config_via_alter(project, my_dynamic_table)
         self.change_config_via_replace(project, my_dynamic_table)
         _, logs = run_dbt_and_capture(
             ["--debug", "run", "--models", my_dynamic_table.identifier, "--full-refresh"]
         )
-        assert self.query_relation_type(project, my_dynamic_table) == "dynamic_table"
+
+        # verify the updated settings are correct in Snowflake
+        self.check_state_alter_change_is_applied(project, my_dynamic_table)
+        self.check_state_replace_change_is_applied(project, my_dynamic_table)
+
+        # verify the settings were changed with the correct method
         assert_message_in_logs(
             f"Applying ALTER to: {my_dynamic_table.render().upper()}", logs.replace('"', ""), False
         )
@@ -131,17 +135,16 @@ class TestSnowflakeDynamicTableChangesApply(SnowflakeDynamicTableChanges):
     def project_config_update(self):
         return {"models": {"on_configuration_change": OnConfigurationChangeOption.Apply.value}}
 
-    def test_change_is_applied_via_alter(self, project, adapter, my_dynamic_table):
-        """
-        See above about the two commented assertions. In the meantime, these have been validated manually.
-        """
-        # self.check_start_state(adapter, my_dynamic_table)
+    def test_change_is_applied_via_alter(self, project, my_dynamic_table):
 
+        # update the settings
         self.change_config_via_alter(project, my_dynamic_table)
         _, logs = run_dbt_and_capture(["--debug", "run", "--models", my_dynamic_table.name])
 
-        # self.check_state_alter_change_is_applied(adapter, my_dynamic_table)
+        # verify the updated settings are correct in Snowflake
+        self.check_state_alter_change_is_applied(project, my_dynamic_table)
 
+        # verify the settings were changed with the correct method
         assert_message_in_logs(
             f"Applying ALTER to: {my_dynamic_table.render().upper()}", logs.replace('"', "")
         )
@@ -151,17 +154,16 @@ def test_change_is_applied_via_alter(self, project, adapter, my_dynamic_table):
             False,
         )
 
-    def test_change_is_applied_via_alter_downstream(self, project, adapter, my_dynamic_table):
-        """
-        See above about the two commented assertions. In the meantime, these have been validated manually.
-        """
-        # self.check_start_state(adapter, my_dynamic_table)
+    def test_change_is_applied_via_alter_downstream(self, project, my_dynamic_table):
 
+        # update the settings
         self.change_config_via_alter_downstream(project, my_dynamic_table)
         _, logs = run_dbt_and_capture(["--debug", "run", "--models", my_dynamic_table.name])
 
-        # self.check_state_alter_change_is_applied_downstream(adapter, my_dynamic_table)
+        # verify the updated settings are correct in Snowflake
+        self.check_state_alter_change_is_applied_downstream(project, my_dynamic_table)
 
+        # verify the settings were changed with the correct method
         assert_message_in_logs(
             f"Applying ALTER to: {my_dynamic_table.render().upper()}", logs.replace('"', "")
         )
@@ -174,16 +176,18 @@ def test_change_is_applied_via_alter_downstream(self, project, adapter, my_dynam
     @pytest.mark.skip(
         "dbt-snowflake does not currently monitor any changes the trigger a full refresh"
     )
-    def test_change_is_applied_via_replace(self, project, adapter, my_dynamic_table):
-        # self.check_start_state(adapter, my_dynamic_table)
+    def test_change_is_applied_via_replace(self, project, my_dynamic_table):
 
+        # update the settings
         self.change_config_via_alter(project, my_dynamic_table)
         self.change_config_via_replace(project, my_dynamic_table)
         _, logs = run_dbt_and_capture(["--debug", "run", "--models", my_dynamic_table.name])
 
-        # self.check_state_alter_change_is_applied(adapter, my_dynamic_table)
-        # self.check_state_replace_change_is_applied(adapter, my_dynamic_table)
+        # verify the updated settings are correct in Snowflake
+        self.check_state_alter_change_is_applied(project, my_dynamic_table)
+        self.check_state_replace_change_is_applied(project, my_dynamic_table)
 
+        # verify the settings were changed with the correct method
         assert_message_in_logs(
             f"Applying REPLACE to: {my_dynamic_table.render().upper()}", logs.replace('"', "")
         )
@@ -194,17 +198,16 @@ class TestSnowflakeDynamicTableChangesContinue(SnowflakeDynamicTableChanges):
     def project_config_update(self):
         return {"models": {"on_configuration_change": OnConfigurationChangeOption.Continue.value}}
 
-    def test_change_is_not_applied_via_alter(self, project, adapter, my_dynamic_table):
-        """
-        See above about the two commented assertions. In the meantime, these have been validated manually.
-        """
-        # self.check_start_state(adapter, my_dynamic_table)
+    def test_change_is_not_applied_via_alter(self, project, my_dynamic_table):
 
+        # update the settings
         self.change_config_via_alter(project, my_dynamic_table)
         _, logs = run_dbt_and_capture(["--debug", "run", "--models", my_dynamic_table.name])
 
-        # self.check_start_state(adapter, my_dynamic_table)
+        # verify the updated settings are correct in Snowflake
+        self.check_start_state(project, my_dynamic_table)
 
+        # verify the settings were changed with the correct method
         assert_message_in_logs(
             f"Configuration changes were identified and `on_configuration_change` was set"
             f" to `continue` for `{my_dynamic_table}`",
@@ -219,15 +222,17 @@ def test_change_is_not_applied_via_alter(self, project, adapter, my_dynamic_tabl
             False,
         )
 
-    def test_change_is_not_applied_via_replace(self, project, adapter, my_dynamic_table):
-        # self.check_start_state(adapter, my_dynamic_table)
+    def test_change_is_not_applied_via_replace(self, project, my_dynamic_table):
 
+        # update the settings
         self.change_config_via_alter(project, my_dynamic_table)
         self.change_config_via_replace(project, my_dynamic_table)
         _, logs = run_dbt_and_capture(["--debug", "run", "--models", my_dynamic_table.name])
 
-        # self.check_start_state(adapter, my_dynamic_table)
+        # verify the updated settings are correct in Snowflake
+        self.check_start_state(project, my_dynamic_table)
 
+        # verify the settings were changed with the correct method
         assert_message_in_logs(
             f"Configuration changes were identified and `on_configuration_change` was set"
             f" to `continue` for `{my_dynamic_table}`",
@@ -248,19 +253,18 @@ class TestSnowflakeDynamicTableChangesFailMixin(SnowflakeDynamicTableChanges):
     def project_config_update(self):
         return {"models": {"on_configuration_change": OnConfigurationChangeOption.Fail.value}}
 
-    def test_change_is_not_applied_via_alter(self, project, adapter, my_dynamic_table):
-        """
-        See above about the two commented assertions. In the meantime, these have been validated manually.
-        """
-        # self.check_start_state(adapter, my_dynamic_table)
+    def test_change_is_not_applied_via_alter(self, project, my_dynamic_table):
 
+        # update the settings
         self.change_config_via_alter(project, my_dynamic_table)
         _, logs = run_dbt_and_capture(
             ["--debug", "run", "--models", my_dynamic_table.name], expect_pass=False
         )
 
-        # self.check_start_state(adapter, my_dynamic_table)
+        # verify the updated settings are correct in Snowflake
+        self.check_start_state(project, my_dynamic_table)
 
+        # verify the settings were changed with the correct method
         assert_message_in_logs(
             f"Configuration changes were identified and `on_configuration_change` was set"
             f" to `fail` for `{my_dynamic_table}`",
@@ -275,17 +279,19 @@ def test_change_is_not_applied_via_alter(self, project, adapter, my_dynamic_tabl
             False,
         )
 
-    def test_change_is_not_applied_via_replace(self, project, adapter, my_dynamic_table):
-        # self.check_start_state(adapter, my_dynamic_table)
+    def test_change_is_not_applied_via_replace(self, project, my_dynamic_table):
 
+        # update the settings
         self.change_config_via_alter(project, my_dynamic_table)
         self.change_config_via_replace(project, my_dynamic_table)
         _, logs = run_dbt_and_capture(
             ["--debug", "run", "--models", my_dynamic_table.name], expect_pass=False
         )
 
-        # self.check_start_state(adapter, my_dynamic_table)
+        # verify the updated settings are correct in Snowflake
+        self.check_start_state(project, my_dynamic_table)
 
+        # verify the settings were changed with the correct method
         assert_message_in_logs(
             f"Configuration changes were identified and `on_configuration_change` was set"
             f" to `fail` for `{my_dynamic_table}`",
diff --git a/tests/functional/adapter/dynamic_table_tests/utils.py b/tests/functional/adapter/dynamic_table_tests/utils.py
index 1f145ec04..d72b231c9 100644
--- a/tests/functional/adapter/dynamic_table_tests/utils.py
+++ b/tests/functional/adapter/dynamic_table_tests/utils.py
@@ -1,7 +1,6 @@
 from typing import Optional
 
 import agate
-from dbt.adapters.base import BaseAdapter
 from dbt.tests.util import get_connection
 
 from dbt.adapters.snowflake.relation import SnowflakeRelation
@@ -11,10 +10,10 @@ def query_relation_type(project, relation: SnowflakeRelation) -> Optional[str]:
     sql = f"""
         select
             case
+                when table_type = 'BASE TABLE' and is_dynamic = 'YES' then 'dynamic_table'
                 when table_type = 'BASE TABLE' then 'table'
                 when table_type = 'VIEW' then 'view'
                 when table_type = 'EXTERNAL TABLE' then 'external_table'
-                when table_type is null then 'dynamic_table'
             end as relation_type
         from information_schema.tables
         where table_name like '{relation.identifier.upper()}'
@@ -30,19 +29,24 @@ def query_relation_type(project, relation: SnowflakeRelation) -> Optional[str]:
         return results[0].lower()
 
 
-def query_target_lag(adapter, dynamic_table: SnowflakeRelation) -> Optional[str]:
-    config = describe_dynamic_table(adapter, dynamic_table)
+def query_target_lag(project, dynamic_table: SnowflakeRelation) -> Optional[str]:
+    config = describe_dynamic_table(project, dynamic_table)
     return config.get("target_lag")
 
 
-def query_warehouse(adapter, dynamic_table: SnowflakeRelation) -> Optional[str]:
-    config = describe_dynamic_table(adapter, dynamic_table)
+def query_warehouse(project, dynamic_table: SnowflakeRelation) -> Optional[str]:
+    config = describe_dynamic_table(project, dynamic_table)
     return config.get("warehouse")
 
 
-def describe_dynamic_table(adapter: BaseAdapter, dynamic_table: SnowflakeRelation) -> agate.Row:
-    with get_connection(adapter):
-        macro_results = adapter.execute_macro(
+def query_refresh_mode(project, dynamic_table: SnowflakeRelation) -> Optional[str]:
+    config = describe_dynamic_table(project, dynamic_table)
+    return config.get("refresh_mode")
+
+
+def describe_dynamic_table(project, dynamic_table: SnowflakeRelation) -> agate.Row:
+    with get_connection(project.adapter):
+        macro_results = project.adapter.execute_macro(
             "snowflake__describe_dynamic_table", kwargs={"relation": dynamic_table}
         )
     config = macro_results["dynamic_table"]
diff --git a/tests/functional/adapter/empty/test_empty.py b/tests/functional/adapter/empty/test_empty.py
index 37aa45f60..0bf9d1a41 100644
--- a/tests/functional/adapter/empty/test_empty.py
+++ b/tests/functional/adapter/empty/test_empty.py
@@ -1,5 +1,17 @@
-from dbt.tests.adapter.empty.test_empty import BaseTestEmpty
+from dbt.tests.adapter.empty.test_empty import (
+    BaseTestEmpty,
+    BaseTestEmptyInlineSourceRef,
+    MetadataWithEmptyFlag,
+)
 
 
 class TestSnowflakeEmpty(BaseTestEmpty):
     pass
+
+
+class TestSnowflakeEmptyInlineSourceRef(BaseTestEmptyInlineSourceRef):
+    pass
+
+
+class TestMetadataWithEmptyFlag(MetadataWithEmptyFlag):
+    pass
diff --git a/tests/functional/adapter/test_list_relations_without_caching.py b/tests/functional/adapter/list_relations_tests/test_pagination.py
similarity index 79%
rename from tests/functional/adapter/test_list_relations_without_caching.py
rename to tests/functional/adapter/list_relations_tests/test_pagination.py
index b126984a3..407f9c501 100644
--- a/tests/functional/adapter/test_list_relations_without_caching.py
+++ b/tests/functional/adapter/list_relations_tests/test_pagination.py
@@ -1,19 +1,24 @@
+import os
 import pytest
-
 import json
 from dbt.tests.util import run_dbt, run_dbt_and_capture
+from dbt.adapters.snowflake import SnowflakeRelation  # Ensure this is the correct import path
 
 # Testing rationale:
 # - snowflake SHOW TERSE OBJECTS command returns at max 10K objects in a single call
-# - when dbt attempts to write into a scehma with more than 10K objects, compilation will fail
+# - when dbt attempts to write into a schema with more than 10K objects, compilation will fail
 #   unless we paginate the result
 # - however, testing this process is difficult at a full scale of 10K actual objects populated
 #   into a fresh testing schema
 # - accordingly, we create a smaller set of views and test the looping iteration logic in
 #   smaller chunks
 
-NUM_VIEWS = 100
-NUM_EXPECTED_RELATIONS = 1 + NUM_VIEWS
+NUM_VIEWS = 90
+NUM_DYNAMIC_TABLES = 10
+# the total number should be between the numbers referenced in the "passing" and "failing" macros below
+# - MACROS__VALIDATE__SNOWFLAKE__LIST_RELATIONS_WITHOUT_CACHING (11 iter * 10 results per iter -> 110 objects)
+# - MACROS__VALIDATE__SNOWFLAKE__LIST_RELATIONS_WITHOUT_CACHING_RAISE_ERROR (33 iter * 3 results per iter -> 99 objects)
+NUM_EXPECTED_RELATIONS = 1 + NUM_VIEWS + NUM_DYNAMIC_TABLES
 
 TABLE_BASE_SQL = """
 {{ config(materialized='table') }}
@@ -25,6 +30,20 @@
 select id from {{ ref('my_model_base') }}
 """.lstrip()
 
+DYNAMIC_TABLE = (
+    """
+{{ config(
+    materialized='dynamic_table',
+    target_lag='1 hour',
+    snowflake_warehouse='"""
+    + os.getenv("SNOWFLAKE_TEST_WAREHOUSE")
+    + """',
+) }}
+
+select id from {{ ref('my_model_base') }}
+"""
+)
+
 MACROS__VALIDATE__SNOWFLAKE__LIST_RELATIONS_WITHOUT_CACHING = """
 {% macro validate_list_relations_without_caching(schema_relation) %}
     {% set relation_list_result = snowflake__list_relations_without_caching(schema_relation, max_iter=11, max_results_per_iter=10) %}
@@ -81,7 +100,8 @@ def models(self):
         my_models = {"my_model_base.sql": TABLE_BASE_SQL}
         for view in range(0, NUM_VIEWS):
             my_models.update({f"my_model_{view}.sql": VIEW_X_SQL})
-
+        for dynamic_table in range(0, NUM_DYNAMIC_TABLES):
+            my_models.update({f"my_dynamic_table_{dynamic_table}.sql": DYNAMIC_TABLE})
         return my_models
 
     @pytest.fixture(scope="class")
@@ -101,8 +121,8 @@ def test__snowflake__list_relations_without_caching_termination(self, project):
         schemas = project.created_schemas
 
         for schema in schemas:
-            schema_relation = {"database": database, "schema": schema}
-            kwargs = {"schema_relation": schema_relation}
+            schema_relation = SnowflakeRelation.create(database=database, schema=schema)
+            kwargs = {"schema_relation": schema_relation.render()}
             _, log_output = run_dbt_and_capture(
                 [
                     "--debug",
@@ -116,7 +136,6 @@ def test__snowflake__list_relations_without_caching_termination(self, project):
 
             parsed_logs = parse_json_logs(log_output)
             n_relations = find_result_in_parsed_logs(parsed_logs, "n_relations")
-
             assert n_relations == "n_relations: 1"
 
 
@@ -126,7 +145,8 @@ def models(self):
         my_models = {"my_model_base.sql": TABLE_BASE_SQL}
         for view in range(0, NUM_VIEWS):
             my_models.update({f"my_model_{view}.sql": VIEW_X_SQL})
-
+        for dynamic_table in range(0, NUM_DYNAMIC_TABLES):
+            my_models.update({f"my_dynamic_table_{dynamic_table}.sql": DYNAMIC_TABLE})
         return my_models
 
     @pytest.fixture(scope="class")
@@ -149,8 +169,8 @@ def test__snowflake__list_relations_without_caching(self, project):
         schemas = project.created_schemas
 
         for schema in schemas:
-            schema_relation = {"database": database, "schema": schema}
-            kwargs = {"schema_relation": schema_relation}
+            schema_relation = SnowflakeRelation.create(database=database, schema=schema)
+            kwargs = {"schema_relation": schema_relation.render()}
             _, log_output = run_dbt_and_capture(
                 [
                     "--debug",
@@ -177,9 +197,9 @@ def test__snowflake__list_relations_without_caching_raise_error(self, project):
         schemas = project.created_schemas
 
         for schema in schemas:
-            schema_relation = {"database": database, "schema": schema}
+            schema_relation = SnowflakeRelation.create(database=database, schema=schema)
 
-            kwargs = {"schema_relation": schema_relation}
+            kwargs = {"schema_relation": schema_relation.render()}
             _, log_output = run_dbt_and_capture(
                 [
                     "--debug",
diff --git a/tests/functional/adapter/list_relations_tests/test_show_objects.py b/tests/functional/adapter/list_relations_tests/test_show_objects.py
new file mode 100644
index 000000000..e5eee39d9
--- /dev/null
+++ b/tests/functional/adapter/list_relations_tests/test_show_objects.py
@@ -0,0 +1,89 @@
+import os
+from typing import List
+
+import pytest
+
+from dbt.adapters.factory import get_adapter_by_type
+from dbt.adapters.snowflake import SnowflakeRelation
+
+from dbt.tests.util import run_dbt, get_connection
+
+
+SEED = """
+id,value
+0,red
+1,yellow
+2,blue
+""".strip()
+
+
+VIEW = """
+select * from {{ ref('my_seed') }}
+"""
+
+
+TABLE = """
+{{ config(materialized='table') }}
+select * from {{ ref('my_seed') }}
+"""
+
+
+DYNAMIC_TABLE = (
+    """
+{{ config(
+    materialized='dynamic_table',
+    target_lag='1 day',
+    snowflake_warehouse='"""
+    + os.getenv("SNOWFLAKE_TEST_WAREHOUSE")
+    + """',
+) }}
+select * from {{ ref('my_seed') }}
+"""
+)
+
+
+class TestShowObjects:
+    views: int = 10
+    tables: int = 10
+    dynamic_tables: int = 10
+
+    @pytest.fixture(scope="class")
+    def seeds(self):
+        yield {"my_seed.csv": SEED}
+
+    @pytest.fixture(scope="class")
+    def models(self):
+        models = {}
+        models.update({f"my_view_{i}.sql": VIEW for i in range(self.views)})
+        models.update({f"my_table_{i}.sql": TABLE for i in range(self.tables)})
+        models.update(
+            {f"my_dynamic_table_{i}.sql": DYNAMIC_TABLE for i in range(self.dynamic_tables)}
+        )
+        yield models
+
+    @pytest.fixture(scope="class", autouse=True)
+    def setup(self, project):
+        run_dbt(["seed"])
+        run_dbt(["run"])
+
+    @staticmethod
+    def list_relations_without_caching(project) -> List[SnowflakeRelation]:
+        my_adapter = get_adapter_by_type("snowflake")
+        schema = my_adapter.Relation.create(
+            database=project.database, schema=project.test_schema, identifier=""
+        )
+        with get_connection(my_adapter):
+            relations = my_adapter.list_relations_without_caching(schema)
+        return relations
+
+    def test_list_relations_without_caching(self, project):
+        relations = self.list_relations_without_caching(project)
+        assert len([relation for relation in relations if relation.is_view]) == self.views
+        assert (
+            len([relation for relation in relations if relation.is_table])
+            == self.tables + 1  # add the seed
+        )
+        assert (
+            len([relation for relation in relations if relation.is_dynamic_table])
+            == self.dynamic_tables
+        )
diff --git a/tests/functional/adapter/list_relations_tests/test_special_characters.py b/tests/functional/adapter/list_relations_tests/test_special_characters.py
new file mode 100644
index 000000000..4dce56da9
--- /dev/null
+++ b/tests/functional/adapter/list_relations_tests/test_special_characters.py
@@ -0,0 +1,25 @@
+import pytest
+from dbt.tests.util import run_dbt
+
+
+TABLE_BASE_SQL = """
+-- models/my_model.sql
+{{ config(schema = '1_contains_special*character$') }}
+select 1 as id
+"""
+
+
+class TestSpecialCharactersInSchema:
+
+    @pytest.fixture(scope="class")
+    def project_config_update(self):
+        return {"quoting": {"schema": True}}
+
+    @pytest.fixture(scope="class")
+    def models(self):
+        return {
+            "my_model.sql": TABLE_BASE_SQL,
+        }
+
+    def test_schema_with_special_chars(self, project):
+        run_dbt(["run", "-s", "my_model"])
diff --git a/tests/functional/adapter/query_comment_tests/test_query_comments.py b/tests/functional/adapter/query_comment_tests/test_query_comments.py
index 1ce4b6c7c..8fbfcbdd8 100644
--- a/tests/functional/adapter/query_comment_tests/test_query_comments.py
+++ b/tests/functional/adapter/query_comment_tests/test_query_comments.py
@@ -1,3 +1,4 @@
+import pytest
 from dbt.tests.adapter.query_comment.test_query_comment import (
     BaseQueryComments,
     BaseMacroQueryComments,
@@ -17,7 +18,12 @@ class TestMacroQueryCommentsSnowflake(BaseMacroQueryComments):
 
 
 class TestMacroArgsQueryCommentsSnowflake(BaseMacroArgsQueryComments):
-    pass
+    @pytest.mark.skip(
+        "This test is incorrectly comparing the version of `dbt-core`"
+        "to the version of `dbt-snowflake`, which is not always the same."
+    )
+    def test_matches_comment(self, project, get_package_version):
+        pass
 
 
 class TestMacroInvalidQueryCommentsSnowflake(BaseMacroInvalidQueryComments):
diff --git a/tests/functional/adapter/test_anonymous_usage_stats.py b/tests/functional/adapter/test_anonymous_usage_stats.py
new file mode 100644
index 000000000..bc5a4d334
--- /dev/null
+++ b/tests/functional/adapter/test_anonymous_usage_stats.py
@@ -0,0 +1,43 @@
+from dbt.tests.util import run_dbt_and_capture
+import pytest
+
+
+ANONYMOUS_USAGE_MESSAGE = """
+sys._xoptions['snowflake_partner_attribution'].append("dbtLabs_dbtPython")
+""".strip()
+
+
+MY_PYTHON_MODEL = """
+import pandas
+
+def model(dbt, session):
+    dbt.config(materialized='table')
+    data = [[1,2]] * 10
+    return pandas.DataFrame(data, columns=['test', 'test2'])
+"""
+
+
+class AnonymousUsageStatsBase:
+    @pytest.fixture(scope="class")
+    def models(self):
+        return {"my_python_model.py": MY_PYTHON_MODEL}
+
+
+class TestAnonymousUsageStatsOn(AnonymousUsageStatsBase):
+    @pytest.fixture(scope="class")
+    def project_config_update(self):
+        return {"flags": {"send_anonymous_usage_stats": True}}
+
+    def test_stats_get_sent(self, project):
+        _, logs = run_dbt_and_capture(["--debug", "run"])
+        assert ANONYMOUS_USAGE_MESSAGE in logs
+
+
+class TestAnonymousUsageStatsOff(AnonymousUsageStatsBase):
+    @pytest.fixture(scope="class")
+    def project_config_update(self, dbt_profile_target):
+        return {"flags": {"send_anonymous_usage_stats": False}}
+
+    def test_stats_do_not_get_sent(self, project):
+        _, logs = run_dbt_and_capture(["--debug", "run"])
+        assert ANONYMOUS_USAGE_MESSAGE not in logs
diff --git a/tests/functional/adapter/test_basic.py b/tests/functional/adapter/test_basic.py
index 45bdcf150..1a79f672b 100644
--- a/tests/functional/adapter/test_basic.py
+++ b/tests/functional/adapter/test_basic.py
@@ -5,6 +5,9 @@
 from dbt.tests.adapter.basic.test_singular_tests_ephemeral import (
     BaseSingularTestsEphemeral,
 )
+from dbt.tests.adapter.basic.test_get_catalog_for_single_relation import (
+    BaseGetCatalogForSingleRelation,
+)
 from dbt.tests.adapter.basic.test_empty import BaseEmpty
 from dbt.tests.adapter.basic.test_ephemeral import BaseEphemeral
 from dbt.tests.adapter.basic.test_incremental import BaseIncremental
@@ -14,6 +17,9 @@
 from dbt.tests.adapter.basic.test_adapter_methods import BaseAdapterMethod
 from dbt.tests.adapter.basic.test_docs_generate import BaseDocsGenerate
 from dbt.tests.adapter.basic.expected_catalog import base_expected_catalog, no_stats
+from dbt_common.contracts.metadata import CatalogTable, TableMetadata, ColumnMetadata, StatsItem
+
+from dbt.adapters.snowflake.relation_configs import SnowflakeRelationType
 from tests.functional.adapter.expected_stats import snowflake_stats
 
 
@@ -25,6 +31,162 @@ class TestSingularTestsSnowflake(BaseSingularTests):
     pass
 
 
+class TestGetCatalogForSingleRelationSnowflake(BaseGetCatalogForSingleRelation):
+    @pytest.fixture(scope="class")
+    def current_role(self, project):
+        return project.run_sql("select current_role()", fetch="one")[0]
+
+    @pytest.fixture(scope="class")
+    def expected_catalog_my_seed(self, project, current_role):
+        return CatalogTable(
+            metadata=TableMetadata(
+                type=SnowflakeRelationType.Table.upper(),
+                schema=project.test_schema.upper(),
+                name="MY_SEED",
+                database=project.database,
+                comment="",
+                owner=current_role,
+            ),
+            columns={
+                "ID": ColumnMetadata(type="NUMBER", index=1, name="ID", comment=None),
+                "FIRST_NAME": ColumnMetadata(
+                    type="VARCHAR", index=2, name="FIRST_NAME", comment=None
+                ),
+                "EMAIL": ColumnMetadata(type="VARCHAR", index=3, name="EMAIL", comment=None),
+                "IP_ADDRESS": ColumnMetadata(
+                    type="VARCHAR", index=4, name="IP_ADDRESS", comment=None
+                ),
+                "UPDATED_AT": ColumnMetadata(
+                    type="TIMESTAMP_NTZ", index=5, name="UPDATED_AT", comment=None
+                ),
+            },
+            stats={
+                "has_stats": StatsItem(
+                    id="has_stats",
+                    label="Has Stats?",
+                    value=True,
+                    include=False,
+                    description="Indicates whether there are statistics for this table",
+                ),
+                "row_count": StatsItem(
+                    id="row_count",
+                    label="Row Count",
+                    value=1,
+                    include=True,
+                    description="Number of rows in the table as reported by Snowflake",
+                ),
+                "bytes": StatsItem(
+                    id="bytes",
+                    label="Approximate Size",
+                    value=2048,
+                    include=True,
+                    description="Size of the table as reported by Snowflake",
+                ),
+            },
+            unique_id=None,
+        )
+
+    @pytest.fixture(scope="class")
+    def expected_catalog_my_view_model(self, project, current_role):
+        return CatalogTable(
+            metadata=TableMetadata(
+                type=SnowflakeRelationType.View.upper(),
+                schema=project.test_schema.upper(),
+                name="MY_VIEW_MODEL",
+                database=project.database,
+                comment="",
+                owner=current_role,
+            ),
+            columns={
+                "ID": ColumnMetadata(type="NUMBER", index=1, name="ID", comment=None),
+                "FIRST_NAME": ColumnMetadata(
+                    type="VARCHAR", index=2, name="FIRST_NAME", comment=None
+                ),
+                "EMAIL": ColumnMetadata(type="VARCHAR", index=3, name="EMAIL", comment=None),
+                "IP_ADDRESS": ColumnMetadata(
+                    type="VARCHAR", index=4, name="IP_ADDRESS", comment=None
+                ),
+                "UPDATED_AT": ColumnMetadata(
+                    type="TIMESTAMP_NTZ", index=5, name="UPDATED_AT", comment=None
+                ),
+            },
+            stats={
+                "has_stats": StatsItem(
+                    id="has_stats",
+                    label="Has Stats?",
+                    value=True,
+                    include=False,
+                    description="Indicates whether there are statistics for this table",
+                ),
+                "row_count": StatsItem(
+                    id="row_count",
+                    label="Row Count",
+                    value=0,
+                    include=False,
+                    description="Number of rows in the table as reported by Snowflake",
+                ),
+                "bytes": StatsItem(
+                    id="bytes",
+                    label="Approximate Size",
+                    value=0,
+                    include=False,
+                    description="Size of the table as reported by Snowflake",
+                ),
+            },
+            unique_id=None,
+        )
+
+    @pytest.fixture(scope="class")
+    def expected_catalog_my_table_model(self, project, current_role):
+        return CatalogTable(
+            metadata=TableMetadata(
+                type=SnowflakeRelationType.Table.upper(),
+                schema=project.test_schema.upper(),
+                name="MY_TABLE_MODEL",
+                database=project.database,
+                comment="",
+                owner=current_role,
+            ),
+            columns={
+                "ID": ColumnMetadata(type="NUMBER", index=1, name="ID", comment=None),
+                "FIRST_NAME": ColumnMetadata(
+                    type="VARCHAR", index=2, name="FIRST_NAME", comment=None
+                ),
+                "EMAIL": ColumnMetadata(type="VARCHAR", index=3, name="EMAIL", comment=None),
+                "IP_ADDRESS": ColumnMetadata(
+                    type="VARCHAR", index=4, name="IP_ADDRESS", comment=None
+                ),
+                "UPDATED_AT": ColumnMetadata(
+                    type="TIMESTAMP_NTZ", index=5, name="UPDATED_AT", comment=None
+                ),
+            },
+            stats={
+                "has_stats": StatsItem(
+                    id="has_stats",
+                    label="Has Stats?",
+                    value=True,
+                    include=False,
+                    description="Indicates whether there are statistics for this table",
+                ),
+                "row_count": StatsItem(
+                    id="row_count",
+                    label="Row Count",
+                    value=1,
+                    include=True,
+                    description="Number of rows in the table as reported by Snowflake",
+                ),
+                "bytes": StatsItem(
+                    id="bytes",
+                    label="Approximate Size",
+                    value=2048,
+                    include=True,
+                    description="Size of the table as reported by Snowflake",
+                ),
+            },
+            unique_id=None,
+        )
+
+
 class TestSingularTestsEphemeralSnowflake(BaseSingularTestsEphemeral):
     pass
 
diff --git a/tests/functional/adapter/test_constraints.py b/tests/functional/adapter/test_constraints.py
index 12c70a6eb..03adc3bed 100644
--- a/tests/functional/adapter/test_constraints.py
+++ b/tests/functional/adapter/test_constraints.py
@@ -84,6 +84,8 @@ def data_types(self, int_type, schema_int_type, string_type):
             ["'2013-11-03 00:00:00-07'::timestamp", "timestamp", "TIMESTAMP_NTZ"],
             ["ARRAY_CONSTRUCT('a','b','c')", "array", "ARRAY"],
             ["ARRAY_CONSTRUCT(1,2,3)", "array", "ARRAY"],
+            ["TO_GEOGRAPHY('POINT(-122.35 37.55)')", "geography", "GEOGRAPHY"],
+            ["TO_GEOMETRY('POINT(1820.12 890.56)')", "geometry", "GEOMETRY"],
             [
                 """TO_VARIANT(PARSE_JSON('{"key3": "value3", "key4": "value4"}'))""",
                 "variant",
diff --git a/tests/functional/adapter/test_ephemeral.py b/tests/functional/adapter/test_ephemeral.py
index f58141bf7..ff8cee913 100644
--- a/tests/functional/adapter/test_ephemeral.py
+++ b/tests/functional/adapter/test_ephemeral.py
@@ -3,7 +3,7 @@
 
 
 class TestEphemeralMultiSnowflake(BaseEphemeralMulti):
-    def test_ephemeral_multi_snowflake(self, project):
+    def test_ephemeral_multi(self, project):
         run_dbt(["seed"])
         results = run_dbt(["run"])
         assert len(results) == 3
diff --git a/tests/functional/adapter/test_get_last_relation_modified.py b/tests/functional/adapter/test_get_last_relation_modified.py
index 870488499..56e8d46bf 100644
--- a/tests/functional/adapter/test_get_last_relation_modified.py
+++ b/tests/functional/adapter/test_get_last_relation_modified.py
@@ -1,10 +1,13 @@
 import os
 import pytest
+from unittest import mock
 
+from dbt.adapters.snowflake.impl import SnowflakeAdapter
+from dbt.adapters.capability import Capability, CapabilityDict
 from dbt.cli.main import dbtRunner
 
 
-freshness_via_metadata_schema_yml = """version: 2
+freshness_via_metadata_schema_yml = """
 sources:
   - name: test_source
     freshness:
@@ -15,18 +18,28 @@
       - name: test_table
 """
 
+freshness_metadata_schema_batch_yml = """
+sources:
+  - name: test_source
+    freshness:
+      warn_after: {count: 10, period: hour}
+      error_after: {count: 1, period: day}
+    schema: "{{ env_var('DBT_GET_LAST_RELATION_TEST_SCHEMA') }}"
+    tables:
+      - name: test_table
+      - name: test_table2
+      - name: test_table_with_loaded_at_field
+        loaded_at_field: my_loaded_at_field
+"""
 
-class TestGetLastRelationModified:
+
+class SetupGetLastRelationModified:
     @pytest.fixture(scope="class", autouse=True)
     def set_env_vars(self, project):
         os.environ["DBT_GET_LAST_RELATION_TEST_SCHEMA"] = project.test_schema
         yield
         del os.environ["DBT_GET_LAST_RELATION_TEST_SCHEMA"]
 
-    @pytest.fixture(scope="class")
-    def models(self):
-        return {"schema.yml": freshness_via_metadata_schema_yml}
-
     @pytest.fixture(scope="class")
     def custom_schema(self, project, set_env_vars):
         with project.adapter.connection_named("__test"):
@@ -41,6 +54,12 @@ def custom_schema(self, project, set_env_vars):
         with project.adapter.connection_named("__test"):
             project.adapter.drop_schema(relation)
 
+
+class TestGetLastRelationModified(SetupGetLastRelationModified):
+    @pytest.fixture(scope="class")
+    def models(self):
+        return {"schema.yml": freshness_via_metadata_schema_yml}
+
     def test_get_last_relation_modified(self, project, set_env_vars, custom_schema):
         project.run_sql(
             f"create table {custom_schema}.test_table (id integer autoincrement, name varchar(100) not null);"
@@ -58,3 +77,76 @@ def probe(e):
 
         # The 'source freshness' command should succeed without warnings or errors.
         assert not warning_or_error
+
+
+class TestGetLastRelationModifiedBatch(SetupGetLastRelationModified):
+    @pytest.fixture(scope="class")
+    def models(self):
+        return {"schema.yml": freshness_metadata_schema_batch_yml}
+
+    def get_freshness_result_for_table(self, table_name, results):
+        for result in results:
+            if result.node.name == table_name:
+                return result
+        return None
+
+    def test_get_last_relation_modified_batch(self, project, set_env_vars, custom_schema):
+        project.run_sql(
+            f"create table {custom_schema}.test_table (id integer autoincrement, name varchar(100) not null);"
+        )
+        project.run_sql(
+            f"create table {custom_schema}.test_table2 (id integer autoincrement, name varchar(100) not null);"
+        )
+        project.run_sql(
+            f"create table {custom_schema}.test_table_with_loaded_at_field as (select 1 as id, timestamp '2009-09-15 10:59:43' as my_loaded_at_field);"
+        )
+
+        runner = dbtRunner()
+        freshness_results_batch = runner.invoke(["source", "freshness"]).result
+
+        assert len(freshness_results_batch) == 3
+        test_table_batch_result = self.get_freshness_result_for_table(
+            "test_table", freshness_results_batch
+        )
+        test_table2_batch_result = self.get_freshness_result_for_table(
+            "test_table2", freshness_results_batch
+        )
+        test_table_with_loaded_at_field_batch_result = self.get_freshness_result_for_table(
+            "test_table_with_loaded_at_field", freshness_results_batch
+        )
+
+        # Remove TableLastModifiedMetadataBatch and run freshness on same input without batch strategy
+        capabilities_no_batch = CapabilityDict(
+            {
+                capability: support
+                for capability, support in SnowflakeAdapter.capabilities().items()
+                if capability != Capability.TableLastModifiedMetadataBatch
+            }
+        )
+        with mock.patch.object(
+            SnowflakeAdapter, "capabilities", return_value=capabilities_no_batch
+        ):
+            freshness_results = runner.invoke(["source", "freshness"]).result
+
+        assert len(freshness_results) == 3
+        test_table_result = self.get_freshness_result_for_table("test_table", freshness_results)
+        test_table2_result = self.get_freshness_result_for_table("test_table2", freshness_results)
+        test_table_with_loaded_at_field_result = self.get_freshness_result_for_table(
+            "test_table_with_loaded_at_field", freshness_results
+        )
+
+        # assert results between batch vs non-batch freshness strategy are equivalent
+        assert test_table_result.status == test_table_batch_result.status
+        assert test_table_result.max_loaded_at == test_table_batch_result.max_loaded_at
+
+        assert test_table2_result.status == test_table2_batch_result.status
+        assert test_table2_result.max_loaded_at == test_table2_batch_result.max_loaded_at
+
+        assert (
+            test_table_with_loaded_at_field_batch_result.status
+            == test_table_with_loaded_at_field_result.status
+        )
+        assert (
+            test_table_with_loaded_at_field_batch_result.max_loaded_at
+            == test_table_with_loaded_at_field_result.max_loaded_at
+        )
diff --git a/tests/functional/adapter/test_python_model.py b/tests/functional/adapter/test_python_model.py
index ea67e6c1c..86ea0a346 100644
--- a/tests/functional/adapter/test_python_model.py
+++ b/tests/functional/adapter/test_python_model.py
@@ -1,4 +1,5 @@
 import pytest
+import uuid
 from dbt.tests.util import run_dbt, write_file
 from dbt.tests.adapter.python_model.test_python_model import (
     BasePythonModelTests,
@@ -138,3 +139,93 @@ def teardown_method(self, project):
     def test_custom_target(self, project):
         results = run_dbt()
         assert results[0].node.schema == f"{project.test_schema}_MY_CUSTOM_SCHEMA"
+
+
+EXTERNAL_ACCESS_INTEGRATION_MODE = """
+import pandas
+import snowflake.snowpark as snowpark
+
+def model(dbt, session: snowpark.Session):
+    dbt.config(
+        materialized="table",
+        external_access_integrations=["test_external_access_integration"],
+        packages=["httpx==0.26.0"]
+    )
+    import httpx
+    return session.create_dataframe(
+        pandas.DataFrame(
+            [{"result": httpx.get(url="https://www.google.com").status_code}]
+        )
+    )
+"""
+
+
+class TestExternalAccessIntegration:
+    @pytest.fixture(scope="class")
+    def models(self):
+        return {"external_access_integration_python_model.py": EXTERNAL_ACCESS_INTEGRATION_MODE}
+
+    def test_external_access_integration(self, project):
+        project.run_sql(
+            "create or replace network rule test_network_rule type = host_port mode = egress value_list= ('www.google.com:443');"
+        )
+        project.run_sql(
+            "create or replace external access integration test_external_access_integration allowed_network_rules = (test_network_rule) enabled = true;"
+        )
+        run_dbt(["run"])
+
+
+TEST_RUN_ID = uuid.uuid4().hex
+TEST_SECRET = f"test_secret_{TEST_RUN_ID}"
+TEST_NETWORK_RULE = f"test_network_rule_{TEST_RUN_ID}"
+TEST_EXTERNAL_ACCESS_INTEGRATION = f"test_external_access_integration_{TEST_RUN_ID}"
+SECRETS_MODE = f"""
+import pandas
+import snowflake.snowpark as snowpark
+
+def model(dbt, session: snowpark.Session):
+    dbt.config(
+        materialized="table",
+        secrets={{"secret_variable_name": "{TEST_SECRET}"}},
+        external_access_integrations=["{TEST_EXTERNAL_ACCESS_INTEGRATION}"],
+    )
+    import _snowflake
+    return session.create_dataframe(
+        pandas.DataFrame(
+            [{{"secret_value": _snowflake.get_generic_secret_string('secret_variable_name')}}]
+        )
+    )
+"""
+
+
+class TestSecrets:
+    @pytest.fixture(scope="class")
+    def models(self):
+        return {"secret_python_model.py": SECRETS_MODE}
+
+    @pytest.fixture(scope="class")
+    def profiles_config_update(self):
+        return {"retry_all": True, "connect_retries": 3}
+
+    def test_secrets(self, project):
+        project.run_sql(
+            f"create or replace secret {TEST_SECRET} type = generic_string secret_string='secret value';"
+        )
+
+        project.run_sql(
+            f"create or replace network rule {TEST_NETWORK_RULE} type = host_port mode = egress value_list= ('www.google.com:443');"
+        )
+
+        project.run_sql(
+            f"create or replace external access integration {TEST_EXTERNAL_ACCESS_INTEGRATION} "
+            f"allowed_network_rules = ({TEST_NETWORK_RULE}) "
+            f"allowed_authentication_secrets = ({TEST_SECRET}) enabled = true;"
+        )
+
+        run_dbt(["run"])
+
+        project.run_sql(f"drop secret if exists {TEST_SECRET};")
+        project.run_sql(f"drop network rule if exists {TEST_NETWORK_RULE};")
+        project.run_sql(
+            f"drop external access integration if exists {TEST_EXTERNAL_ACCESS_INTEGRATION};"
+        )
diff --git a/tests/functional/adapter/utils/test_utils.py b/tests/functional/adapter/utils/test_utils.py
index bea418998..c7ec0ce6f 100644
--- a/tests/functional/adapter/utils/test_utils.py
+++ b/tests/functional/adapter/utils/test_utils.py
@@ -3,9 +3,11 @@
 from dbt.tests.adapter.utils.test_array_construct import BaseArrayConstruct
 from dbt.tests.adapter.utils.test_any_value import BaseAnyValue
 from dbt.tests.adapter.utils.test_bool_or import BaseBoolOr
+from dbt.tests.adapter.utils.test_cast import BaseCast
 from dbt.tests.adapter.utils.test_cast_bool_to_text import BaseCastBoolToText
 from dbt.tests.adapter.utils.test_concat import BaseConcat
 from dbt.tests.adapter.utils.test_current_timestamp import BaseCurrentTimestampAware
+from dbt.tests.adapter.utils.test_date import BaseDate
 from dbt.tests.adapter.utils.test_dateadd import BaseDateAdd
 from dbt.tests.adapter.utils.test_datediff import BaseDateDiff
 from dbt.tests.adapter.utils.test_date_spine import BaseDateSpine
@@ -49,6 +51,10 @@ class TestBoolOr(BaseBoolOr):
     pass
 
 
+class TestCast(BaseCast):
+    pass
+
+
 class TestCastBoolToText(BaseCastBoolToText):
     pass
 
@@ -62,6 +68,10 @@ class TestCurrentTimestamp(BaseCurrentTimestampAware):
     pass
 
 
+class TestDate(BaseDate):
+    pass
+
+
 class TestDateAdd(BaseDateAdd):
     pass
 
diff --git a/tests/functional/auth_tests/test_jwt.py b/tests/functional/auth_tests/test_jwt.py
new file mode 100644
index 000000000..fbe8e20e6
--- /dev/null
+++ b/tests/functional/auth_tests/test_jwt.py
@@ -0,0 +1,91 @@
+"""
+Please follow the instructions in test_oauth.py for instructions on how to set up
+the security integration required to retrieve a JWT from Snowflake.
+"""
+
+import pytest
+import os
+from dbt.tests.util import run_dbt, check_relations_equal
+
+from dbt.adapters.snowflake import SnowflakeCredentials
+
+_MODELS__MODEL_1_SQL = """
+select 1 as id
+"""
+
+
+_MODELS__MODEL_2_SQL = """
+select 2 as id
+"""
+
+
+_MODELS__MODEL_3_SQL = """
+select * from {{ ref('model_1') }}
+union all
+select * from {{ ref('model_2') }}
+"""
+
+
+_MODELS__MODEL_4_SQL = """
+select 1 as id
+union all
+select 2 as id
+"""
+
+
+class TestSnowflakeJWT:
+    """Tests that setting authenticator: jwt allows setting token to a plain JWT
+    that will be passed into the Snowflake connection without modification."""
+
+    @pytest.fixture(scope="class", autouse=True)
+    def access_token(self):
+        """Because JWTs are short-lived, we need to get a fresh JWT via the refresh
+        token flow before running the test.
+
+        This fixture leverages the existing SnowflakeCredentials._get_access_token
+        method to retrieve a valid JWT from Snowflake.
+        """
+        client_id = os.getenv("SNOWFLAKE_TEST_OAUTH_CLIENT_ID")
+        client_secret = os.getenv("SNOWFLAKE_TEST_OAUTH_CLIENT_SECRET")
+        refresh_token = os.getenv("SNOWFLAKE_TEST_OAUTH_REFRESH_TOKEN")
+
+        credentials = SnowflakeCredentials(
+            account=os.getenv("SNOWFLAKE_TEST_ACCOUNT"),
+            database="",
+            schema="",
+            authenticator="oauth",
+            oauth_client_id=client_id,
+            oauth_client_secret=client_secret,
+            token=refresh_token,
+        )
+
+        yield credentials._get_access_token()
+
+    @pytest.fixture(scope="class", autouse=True)
+    def dbt_profile_target(self, access_token):
+        """A dbt_profile that has authenticator set to JWT, and token set to
+        a JWT accepted by Snowflake. Also omits the user, as the user attribute
+        is optional when the authenticator is set to JWT.
+        """
+        return {
+            "type": "snowflake",
+            "threads": 4,
+            "account": os.getenv("SNOWFLAKE_TEST_ACCOUNT"),
+            "database": os.getenv("SNOWFLAKE_TEST_DATABASE"),
+            "warehouse": os.getenv("SNOWFLAKE_TEST_WAREHOUSE"),
+            "authenticator": "jwt",
+            "token": access_token,
+        }
+
+    @pytest.fixture(scope="class")
+    def models(self):
+        return {
+            "model_1.sql": _MODELS__MODEL_1_SQL,
+            "model_2.sql": _MODELS__MODEL_2_SQL,
+            "model_3.sql": _MODELS__MODEL_3_SQL,
+            "model_4.sql": _MODELS__MODEL_4_SQL,
+        }
+
+    def test_snowflake_basic(self, project):
+        run_dbt()
+        check_relations_equal(project.adapter, ["MODEL_3", "MODEL_4"])
diff --git a/tests/functional/auth_tests/test_key_pair.py b/tests/functional/auth_tests/test_key_pair.py
new file mode 100644
index 000000000..6d3254f33
--- /dev/null
+++ b/tests/functional/auth_tests/test_key_pair.py
@@ -0,0 +1,26 @@
+import os
+
+from dbt.tests.util import run_dbt
+import pytest
+
+
+class TestKeyPairAuth:
+    @pytest.fixture(scope="class", autouse=True)
+    def dbt_profile_target(self):
+        return {
+            "type": "snowflake",
+            "threads": 4,
+            "account": os.getenv("SNOWFLAKE_TEST_ACCOUNT"),
+            "user": os.getenv("SNOWFLAKE_TEST_USER"),
+            "private_key": os.getenv("SNOWFLAKE_TEST_PRIVATE_KEY"),
+            "private_key_passphrase": os.getenv("SNOWFLAKE_TEST_PRIVATE_KEY_PASSPHRASE"),
+            "database": os.getenv("SNOWFLAKE_TEST_DATABASE"),
+            "warehouse": os.getenv("SNOWFLAKE_TEST_WAREHOUSE"),
+        }
+
+    @pytest.fixture(scope="class")
+    def models(self):
+        return {"my_model.sql": "select 1 as id"}
+
+    def test_connection(self, project):
+        run_dbt()
diff --git a/tests/functional/oauth/test_oauth.py b/tests/functional/auth_tests/test_oauth.py
similarity index 97%
rename from tests/functional/oauth/test_oauth.py
rename to tests/functional/auth_tests/test_oauth.py
index 89daece0f..c8986763e 100644
--- a/tests/functional/oauth/test_oauth.py
+++ b/tests/functional/auth_tests/test_oauth.py
@@ -31,9 +31,9 @@
 integration the same, just the refresh token changed)
 """
 
-import pytest
 import os
-from dbt.tests.util import run_dbt, check_relations_equal
+from dbt.tests.util import check_relations_equal, run_dbt
+import pytest
 
 
 _MODELS__MODEL_1_SQL = """
diff --git a/tests/functional/relation_tests/base.py b/tests/functional/relation_tests/base.py
new file mode 100644
index 000000000..d08a6945b
--- /dev/null
+++ b/tests/functional/relation_tests/base.py
@@ -0,0 +1,75 @@
+import pytest
+
+from dbt.tests.util import run_dbt, run_dbt_and_capture
+
+
+SEED = """
+id
+0
+1
+2
+""".strip()
+
+
+TABLE = """
+{{ config(materialized="table") }}
+select * from {{ ref('my_seed') }}
+"""
+
+
+VIEW = """
+{{ config(materialized="view") }}
+select * from {{ ref('my_seed') }}
+"""
+
+
+MACRO__GET_CREATE_BACKUP_SQL = """
+{% macro test__get_create_backup_sql(database, schema, identifier, relation_type) -%}
+    {%- set relation = adapter.Relation.create(database=database, schema=schema, identifier=identifier, type=relation_type) -%}
+    {% call statement('test__get_create_backup_sql') -%}
+        {{ get_create_backup_sql(relation) }}
+    {%- endcall %}
+{% endmacro %}"""
+
+
+MACRO__GET_RENAME_INTERMEDIATE_SQL = """
+{% macro test__get_rename_intermediate_sql(database, schema, identifier, relation_type) -%}
+    {%- set relation = adapter.Relation.create(database=database, schema=schema, identifier=identifier, type=relation_type) -%}
+    {% call statement('test__get_rename_intermediate_sql') -%}
+        {{ get_rename_intermediate_sql(relation) }}
+    {%- endcall %}
+{% endmacro %}"""
+
+
+class RelationOperation:
+    @pytest.fixture(scope="class")
+    def seeds(self):
+        yield {"my_seed.csv": SEED}
+
+    @pytest.fixture(scope="class")
+    def models(self):
+        yield {
+            "my_table.sql": TABLE,
+            "my_table__dbt_tmp.sql": TABLE,
+            "my_view.sql": VIEW,
+            "my_view__dbt_tmp.sql": VIEW,
+        }
+
+    @pytest.fixture(scope="class")
+    def macros(self):
+        yield {
+            "test__get_create_backup_sql.sql": MACRO__GET_CREATE_BACKUP_SQL,
+            "test__get_rename_intermediate_sql.sql": MACRO__GET_RENAME_INTERMEDIATE_SQL,
+        }
+
+    @pytest.fixture(scope="class", autouse=True)
+    def setup(self, project):
+        run_dbt(["seed"])
+        run_dbt(["run"])
+
+    def assert_operation(self, project, operation, args, expected_statement):
+        results, logs = run_dbt_and_capture(
+            ["--debug", "run-operation", operation, "--args", str(args)]
+        )
+        assert len(results) == 1
+        assert expected_statement in logs
diff --git a/tests/functional/relation_tests/test_table.py b/tests/functional/relation_tests/test_table.py
new file mode 100644
index 000000000..b4a8709ea
--- /dev/null
+++ b/tests/functional/relation_tests/test_table.py
@@ -0,0 +1,25 @@
+from tests.functional.relation_tests.base import RelationOperation
+
+
+class TestTable(RelationOperation):
+
+    def test_get_create_backup_and_rename_intermediate_sql(self, project):
+        args = {
+            "database": project.database,
+            "schema": project.test_schema,
+            "identifier": "my_table",
+            "relation_type": "table",
+        }
+        expected_statement = (
+            f"alter table {project.database}.{project.test_schema}.my_table "
+            f"rename to {project.database}.{project.test_schema}.my_table__dbt_backup"
+        )
+        self.assert_operation(project, "test__get_create_backup_sql", args, expected_statement)
+
+        expected_statement = (
+            f"alter table {project.database}.{project.test_schema}.my_table__dbt_tmp "
+            f"rename to {project.database}.{project.test_schema}.my_table"
+        )
+        self.assert_operation(
+            project, "test__get_rename_intermediate_sql", args, expected_statement
+        )
diff --git a/tests/functional/relation_tests/test_view.py b/tests/functional/relation_tests/test_view.py
new file mode 100644
index 000000000..721455da1
--- /dev/null
+++ b/tests/functional/relation_tests/test_view.py
@@ -0,0 +1,25 @@
+from tests.functional.relation_tests.base import RelationOperation
+
+
+class TestView(RelationOperation):
+
+    def test_get_create_backup_and_rename_intermediate_sql(self, project):
+        args = {
+            "database": project.database,
+            "schema": project.test_schema,
+            "identifier": "my_view",
+            "relation_type": "view",
+        }
+        expected_statement = (
+            f"alter view {project.database}.{project.test_schema}.my_view "
+            f"rename to {project.database}.{project.test_schema}.my_view__dbt_backup"
+        )
+        self.assert_operation(project, "test__get_create_backup_sql", args, expected_statement)
+
+        expected_statement = (
+            f"alter view {project.database}.{project.test_schema}.my_view__dbt_tmp "
+            f"rename to {project.database}.{project.test_schema}.my_view"
+        )
+        self.assert_operation(
+            project, "test__get_rename_intermediate_sql", args, expected_statement
+        )
diff --git a/tests/functional/warehouse_test/test_warehouses.py b/tests/functional/warehouse_test/test_warehouses.py
index 717784185..268473729 100644
--- a/tests/functional/warehouse_test/test_warehouses.py
+++ b/tests/functional/warehouse_test/test_warehouses.py
@@ -3,35 +3,34 @@
 
 import os
 
-
 models__override_warehouse_sql = """
 {{ config(snowflake_warehouse=env_var('SNOWFLAKE_TEST_ALT_WAREHOUSE', 'DBT_TEST_ALT'), materialized='table') }}
 select current_warehouse() as warehouse
-
 """
 
 models__expected_warehouse_sql = """
 {{ config(materialized='table') }}
 select '{{ env_var("SNOWFLAKE_TEST_ALT_WAREHOUSE", "DBT_TEST_ALT") }}' as warehouse
-
 """
 
 models__invalid_warehouse_sql = """
 {{ config(snowflake_warehouse='DBT_TEST_DOES_NOT_EXIST') }}
 select current_warehouse() as warehouse
-
 """
 
 project_config_models__override_warehouse_sql = """
 {{ config(materialized='table') }}
 select current_warehouse() as warehouse
-
 """
 
 project_config_models__expected_warehouse_sql = """
 {{ config(materialized='table') }}
 select '{{ env_var("SNOWFLAKE_TEST_ALT_WAREHOUSE", "DBT_TEST_ALT") }}' as warehouse
+"""
 
+project_config_models__warehouse_sql = """
+{{ config(materialized='table') }}
+select current_warehouse() as warehouse
 """
 
 
@@ -90,3 +89,47 @@ def test_snowflake_override_ok(self, project):
             ]
         )
         check_relations_equal(project.adapter, ["OVERRIDE_WAREHOUSE", "EXPECTED_WAREHOUSE"])
+
+
+class TestInvalidConfigWarehouse:
+    @pytest.fixture(scope="class")
+    def models(self):
+        return {
+            "invalid_warehouse.sql": project_config_models__warehouse_sql,
+        }
+
+    @pytest.fixture(scope="class")
+    def project_config_update(self):
+        return {
+            "config-version": 2,
+            "models": {
+                "test": {"snowflake_warehouse": "DBT_TEST_DOES_NOT_EXIST"},
+            },
+        }
+
+    def test_snowflake_override_invalid(self, project):
+        result = run_dbt(["run", "--models", "invalid_warehouse"], expect_pass=False)
+        assert "Object does not exist, or operation cannot be performed" in result[0].message
+
+
+class TestValidConfigWarehouse:
+    @pytest.fixture(scope="class")
+    def models(self):
+        return {
+            "valid_warehouse.sql": project_config_models__warehouse_sql,
+        }
+
+    @pytest.fixture(scope="class")
+    def project_config_update(self):
+        return {
+            "config-version": 2,
+            "models": {
+                "test": {
+                    "snowflake_warehouse": "DBT_TESTING",
+                },
+            },
+        }
+
+    def test_snowflake_warehouse_valid(self, project):
+        result = run_dbt(["run", "--models", "valid_warehouse"])
+        assert "DBT_TESTING" in result[0].node.config.get("snowflake_warehouse")
diff --git a/tests/performance/README.md b/tests/performance/README.md
new file mode 100644
index 000000000..02130c5c6
--- /dev/null
+++ b/tests/performance/README.md
@@ -0,0 +1,6 @@
+# Performance testing
+
+These tests are not meant to run on a regular basis; instead, they are tools for measuring performance impacts of changes as needed.
+We often get requests for reducing processing times, researching why a particular component is taking longer to run than expected, etc.
+In the past we have performed one-off analyses to address these requests and documented the results in the relevant PR (when a change is made).
+It is more useful to document those analyses in the form of performance tests so that we can easily rerun the analysis at a later date.
diff --git a/tests/performance/test_auth_methods.py b/tests/performance/test_auth_methods.py
new file mode 100644
index 000000000..ad0b424ab
--- /dev/null
+++ b/tests/performance/test_auth_methods.py
@@ -0,0 +1,132 @@
+"""
+Results:
+
+| method        | project_size | reuse_connections | unsafe_skip_rsa_key_validation | duration |
+|---------------|--------------|-------------------|--------------------------------|----------|
+| User Password |        1,000 | False             | -                              |  234.09s |
+| User Password |        1,000 | True              | -                              |   78.34s |
+| Key Pair      |        1,000 | False             | False                          |  271.47s |
+| Key Pair      |        1,000 | False             | True                           |  275.73s |
+| Key Pair      |        1,000 | True              | False                          |   63.69s |
+| Key Pair      |        1,000 | True              | True                           |   73.45s |
+
+Notes:
+- run locally on MacOS, single threaded
+- `unsafe_skip_rsa_key_validation` only applies to the Key Pair auth method
+- `unsafe_skip_rsa_key_validation=True` was tested by updating the relevant `cryptography` calls directly as it is not a user configuration
+- since the models are all views, time differences should be viewed as absolute differences, e.g.:
+    - this: (271.47s - 63.69s) / 1,000 models = 208ms improvement
+    - NOT this: 1 - (63.69s / 271.47s) = 76.7% improvement
+"""
+
+from datetime import datetime
+import os
+
+from dbt.tests.util import run_dbt
+import pytest
+
+
+SEED = """
+id,value
+1,a
+2,b
+3,c
+""".strip()
+
+
+MODEL = """
+select * from {{ ref("my_seed") }}
+"""
+
+
+class Scenario:
+    """
+    Runs a full load test. The test can be configured to run an arbitrary number of models.
+
+    To use this test, configure the test by setting `project_size` and/or `expected_duration`.
+    """
+
+    auth_method: str
+    project_size: int = 1
+    reuse_connections: bool = False
+
+    @pytest.fixture(scope="class")
+    def seeds(self):
+        return {"my_seed.csv": SEED}
+
+    @pytest.fixture(scope="class")
+    def models(self):
+        return {f"my_model_{i}.sql": MODEL for i in range(self.project_size)}
+
+    @pytest.fixture(scope="class", autouse=True)
+    def setup(self, project):
+        run_dbt(["seed"])
+
+        start = datetime.now()
+        yield
+        end = datetime.now()
+
+        duration = (end - start).total_seconds()
+        print(f"Run took: {duration} seconds")
+
+    @pytest.fixture(scope="class")
+    def dbt_profile_target(self, auth_params):
+        yield {
+            "type": "snowflake",
+            "threads": 4,
+            "account": os.getenv("SNOWFLAKE_TEST_ACCOUNT"),
+            "database": os.getenv("SNOWFLAKE_TEST_DATABASE"),
+            "warehouse": os.getenv("SNOWFLAKE_TEST_WAREHOUSE"),
+            "user": os.getenv("SNOWFLAKE_TEST_USER"),
+            "reuse_connections": self.reuse_connections,
+            **auth_params,
+        }
+
+    @pytest.fixture(scope="class")
+    def auth_params(self):
+
+        if self.auth_method == "user_password":
+            yield {"password": os.getenv("SNOWFLAKE_TEST_PASSWORD")}
+
+        elif self.auth_method == "key_pair":
+            """
+            This connection method uses key pair auth.
+            Follow the instructions here to setup key pair authentication for your test user:
+            https://docs.snowflake.com/en/user-guide/key-pair-auth
+            """
+            yield {
+                "private_key": os.getenv("SNOWFLAKE_TEST_PRIVATE_KEY"),
+                "private_key_passphrase": os.getenv("SNOWFLAKE_TEST_PRIVATE_KEY_PASSPHRASE"),
+            }
+
+        else:
+            raise ValueError(
+                f"`auth_method` must be one of `user_password` or `key_pair`, received: {self.auth_method}"
+            )
+
+    def test_scenario(self, project):
+        run_dbt(["run"])
+
+
+class TestUserPasswordAuth(Scenario):
+    auth_method = "user_password"
+    project_size = 1_000
+    reuse_connections = False
+
+
+class TestUserPasswordAuthReuseConnections(Scenario):
+    auth_method = "user_password"
+    project_size = 1_000
+    reuse_connections = True
+
+
+class TestKeyPairAuth(Scenario):
+    auth_method = "key_pair"
+    project_size = 1_000
+    reuse_connections = False
+
+
+class TestKeyPairAuthReuseConnections(Scenario):
+    auth_method = "key_pair"
+    project_size = 1_000
+    reuse_connections = True
diff --git a/tests/unit/mock_adapter.py b/tests/unit/mock_adapter.py
index d3bdf87b2..93394bedc 100644
--- a/tests/unit/mock_adapter.py
+++ b/tests/unit/mock_adapter.py
@@ -1,7 +1,7 @@
+from contextlib import contextmanager
 from unittest import mock
 
 from dbt.adapters.base import BaseAdapter
-from contextlib import contextmanager
 
 
 def adapter_factory():
@@ -33,6 +33,9 @@ def rename_relation(self, *args, **kwargs):
         def get_columns_in_relation(self, *args, **kwargs):
             return self.responder.get_columns_in_relation(*args, **kwargs)
 
+        def get_catalog_for_single_relation(self, *args, **kwargs):
+            return self.responder.get_catalog_for_single_relation(*args, **kwargs)
+
         def expand_column_types(self, *args, **kwargs):
             return self.responder.expand_column_types(*args, **kwargs)
 
diff --git a/tests/unit/test_connections.py b/tests/unit/test_connections.py
index 555091c57..fb9c57615 100644
--- a/tests/unit/test_connections.py
+++ b/tests/unit/test_connections.py
@@ -1,6 +1,9 @@
 import os
+import pytest
 from importlib import reload
-from unittest.mock import Mock
+from unittest.mock import Mock, patch
+import multiprocessing
+from dbt.adapters.exceptions.connection import FailedToConnectError
 import dbt.adapters.snowflake.connections as connections
 import dbt.adapters.events.logging
 
@@ -23,3 +26,44 @@ def test_connections_does_not_set_logs_in_response_to_env_var(monkeypatch):
 
     assert log_mock.debug.call_count == 0
     assert log_mock.set_adapter_dependency_log_level.call_count == 0
+
+
+def test_connnections_credentials_replaces_underscores_with_hyphens():
+    credentials = {
+        "account": "account_id_with_underscores",
+        "user": "user",
+        "password": "password",
+        "database": "database",
+        "warehouse": "warehouse",
+        "schema": "schema",
+    }
+    creds = connections.SnowflakeCredentials(**credentials)
+    assert creds.account == "account-id-with-underscores"
+
+
+def test_snowflake_oauth_expired_token_raises_error():
+    credentials = {
+        "account": "test_account",
+        "user": "test_user",
+        "authenticator": "oauth",
+        "token": "expired_or_incorrect_token",
+        "database": "database",
+        "schema": "schema",
+    }
+
+    mp_context = multiprocessing.get_context("spawn")
+    mock_credentials = connections.SnowflakeCredentials(**credentials)
+
+    with patch.object(
+        connections.SnowflakeConnectionManager,
+        "open",
+        side_effect=FailedToConnectError(
+            "This error occurs when authentication has expired. "
+            "Please reauth with your auth provider."
+        ),
+    ):
+
+        adapter = connections.SnowflakeConnectionManager(mock_credentials, mp_context)
+
+        with pytest.raises(FailedToConnectError):
+            adapter.open()
diff --git a/tests/unit/test_private_keys.py b/tests/unit/test_private_keys.py
new file mode 100644
index 000000000..59b8522d2
--- /dev/null
+++ b/tests/unit/test_private_keys.py
@@ -0,0 +1,61 @@
+import os
+import tempfile
+from typing import Generator
+
+from cryptography.hazmat.primitives import serialization
+from cryptography.hazmat.primitives.asymmetric import rsa
+import pytest
+
+from dbt.adapters.snowflake.auth import private_key_from_file, private_key_from_string
+
+
+PASSPHRASE = "password1234"
+
+
+def serialize(private_key: rsa.RSAPrivateKey) -> bytes:
+    return private_key.private_bytes(
+        serialization.Encoding.DER,
+        serialization.PrivateFormat.PKCS8,
+        serialization.NoEncryption(),
+    )
+
+
+@pytest.fixture(scope="session")
+def private_key() -> rsa.RSAPrivateKey:
+    return rsa.generate_private_key(public_exponent=65537, key_size=2048)
+
+
+@pytest.fixture(scope="session")
+def private_key_string(private_key) -> str:
+    private_key_bytes = private_key.private_bytes(
+        encoding=serialization.Encoding.PEM,
+        format=serialization.PrivateFormat.PKCS8,
+        encryption_algorithm=serialization.BestAvailableEncryption(PASSPHRASE.encode()),
+    )
+    return private_key_bytes.decode("utf-8")
+
+
+@pytest.fixture(scope="session")
+def private_key_file(private_key) -> Generator[str, None, None]:
+    private_key_bytes = private_key.private_bytes(
+        encoding=serialization.Encoding.PEM,
+        format=serialization.PrivateFormat.PKCS8,
+        encryption_algorithm=serialization.BestAvailableEncryption(PASSPHRASE.encode()),
+    )
+    file = tempfile.NamedTemporaryFile()
+    file.write(private_key_bytes)
+    file.seek(0)
+    yield file.name
+    file.close()
+
+
+def test_private_key_from_string_pem(private_key_string, private_key):
+    assert isinstance(private_key_string, str)
+    calculated_private_key = private_key_from_string(private_key_string, PASSPHRASE)
+    assert serialize(calculated_private_key) == serialize(private_key)
+
+
+def test_private_key_from_file(private_key_file, private_key):
+    assert os.path.exists(private_key_file)
+    calculated_private_key = private_key_from_file(private_key_file, PASSPHRASE)
+    assert serialize(calculated_private_key) == serialize(private_key)
diff --git a/tests/unit/test_relation_as_case_sensitive.py b/tests/unit/test_relation_as_case_sensitive.py
new file mode 100644
index 000000000..f362d66b3
--- /dev/null
+++ b/tests/unit/test_relation_as_case_sensitive.py
@@ -0,0 +1,19 @@
+from dbt.adapters.snowflake.relation import SnowflakeRelation
+from dbt.adapters.snowflake.relation_configs import SnowflakeQuotePolicy
+
+
+def test_relation_as_case_sensitive_quoting_true():
+    relation = SnowflakeRelation.create(
+        database="My_Db",
+        schema="My_ScHeMa",
+        identifier="My_TaBlE",
+        quote_policy=SnowflakeQuotePolicy(database=False, schema=True, identifier=False),
+    )
+
+    case_sensitive_relation = relation.as_case_sensitive()
+    case_sensitive_relation.render_limited()
+
+    assert case_sensitive_relation.database == "MY_DB"
+    assert case_sensitive_relation.schema == "My_ScHeMa"
+    assert case_sensitive_relation.identifier == "MY_TABLE"
+    assert case_sensitive_relation.render() == 'MY_DB."My_ScHeMa".MY_TABLE'
diff --git a/tests/unit/test_renamed_relations.py b/tests/unit/test_renamed_relations.py
new file mode 100644
index 000000000..315d5f343
--- /dev/null
+++ b/tests/unit/test_renamed_relations.py
@@ -0,0 +1,17 @@
+from dbt.adapters.snowflake.relation import SnowflakeRelation
+from dbt.adapters.snowflake.relation_configs import SnowflakeRelationType
+
+
+def test_renameable_relation():
+    relation = SnowflakeRelation.create(
+        database="my_db",
+        schema="my_schema",
+        identifier="my_table",
+        type=SnowflakeRelationType.Table,
+    )
+    assert relation.renameable_relations == frozenset(
+        {
+            SnowflakeRelationType.Table,
+            SnowflakeRelationType.View,
+        }
+    )
diff --git a/tests/unit/test_snowflake_adapter.py b/tests/unit/test_snowflake_adapter.py
index a9f8f3572..32e73eb45 100644
--- a/tests/unit/test_snowflake_adapter.py
+++ b/tests/unit/test_snowflake_adapter.py
@@ -10,7 +10,7 @@
 from dbt.adapters.snowflake.column import SnowflakeColumn
 from dbt.adapters.snowflake.connections import SnowflakeCredentials
 from dbt.contracts.files import FileHash
-from dbt.context.manifest import generate_query_header_context
+from dbt.context.query_header import generate_query_header_context
 from dbt.context.providers import generate_runtime_macro_context
 from dbt.contracts.graph.manifest import ManifestStateCheck
 from dbt_common.clients import agate_helper
@@ -278,7 +278,7 @@ def test_client_session_keep_alive_false_by_default(self):
         self.snowflake.assert_has_calls(
             [
                 mock.call(
-                    account="test_account",
+                    account="test-account",
                     autocommit=True,
                     client_session_keep_alive=False,
                     database="test_database",
@@ -290,13 +290,19 @@ def test_client_session_keep_alive_false_by_default(self):
                     application="dbt",
                     insecure_mode=False,
                     session_parameters={},
-                    reuse_connections=None,
+                    reuse_connections=True,
                 ),
             ]
         )
 
     def test_client_session_keep_alive_true(self):
-        self.config.credentials = self.config.credentials.replace(client_session_keep_alive=True)
+        self.config.credentials = self.config.credentials.replace(
+            client_session_keep_alive=True,
+            # this gets defaulted via `__post_init__` when `client_session_keep_alive` comes in as `False`
+            # then when `replace` is called, `__post_init__` cannot set it back to `None` since it cannot
+            # tell the difference between set by user and set by `__post_init__`
+            reuse_connections=None,
+        )
         self.adapter = SnowflakeAdapter(self.config, get_context("spawn"))
         conn = self.adapter.connections.set_connection_name(name="new_connection_with_new_config")
 
@@ -305,7 +311,7 @@ def test_client_session_keep_alive_true(self):
         self.snowflake.assert_has_calls(
             [
                 mock.call(
-                    account="test_account",
+                    account="test-account",
                     autocommit=True,
                     client_session_keep_alive=True,
                     database="test_database",
@@ -332,14 +338,14 @@ def test_client_has_query_tag(self):
         self.snowflake.assert_has_calls(
             [
                 mock.call(
-                    account="test_account",
+                    account="test-account",
                     autocommit=True,
                     client_session_keep_alive=False,
                     database="test_database",
                     role=None,
                     schema="public",
                     user="test_user",
-                    reuse_connections=None,
+                    reuse_connections=True,
                     warehouse="test_warehouse",
                     private_key=None,
                     application="dbt",
@@ -366,7 +372,7 @@ def test_user_pass_authentication(self):
         self.snowflake.assert_has_calls(
             [
                 mock.call(
-                    account="test_account",
+                    account="test-account",
                     autocommit=True,
                     client_session_keep_alive=False,
                     database="test_database",
@@ -379,7 +385,7 @@ def test_user_pass_authentication(self):
                     application="dbt",
                     insecure_mode=False,
                     session_parameters={},
-                    reuse_connections=None,
+                    reuse_connections=True,
                 )
             ]
         )
@@ -397,7 +403,7 @@ def test_authenticator_user_pass_authentication(self):
         self.snowflake.assert_has_calls(
             [
                 mock.call(
-                    account="test_account",
+                    account="test-account",
                     autocommit=True,
                     client_session_keep_alive=False,
                     database="test_database",
@@ -413,7 +419,7 @@ def test_authenticator_user_pass_authentication(self):
                     client_store_temporary_credential=True,
                     insecure_mode=False,
                     session_parameters={},
-                    reuse_connections=None,
+                    reuse_connections=True,
                 )
             ]
         )
@@ -428,7 +434,7 @@ def test_authenticator_externalbrowser_authentication(self):
         self.snowflake.assert_has_calls(
             [
                 mock.call(
-                    account="test_account",
+                    account="test-account",
                     autocommit=True,
                     client_session_keep_alive=False,
                     database="test_database",
@@ -443,7 +449,7 @@ def test_authenticator_externalbrowser_authentication(self):
                     client_store_temporary_credential=True,
                     insecure_mode=False,
                     session_parameters={},
-                    reuse_connections=None,
+                    reuse_connections=True,
                 )
             ]
         )
@@ -461,7 +467,7 @@ def test_authenticator_oauth_authentication(self):
         self.snowflake.assert_has_calls(
             [
                 mock.call(
-                    account="test_account",
+                    account="test-account",
                     autocommit=True,
                     client_session_keep_alive=False,
                     database="test_database",
@@ -477,7 +483,7 @@ def test_authenticator_oauth_authentication(self):
                     client_store_temporary_credential=True,
                     insecure_mode=False,
                     session_parameters={},
-                    reuse_connections=None,
+                    reuse_connections=True,
                 )
             ]
         )
@@ -499,7 +505,7 @@ def test_authenticator_private_key_authentication(self, mock_get_private_key):
         self.snowflake.assert_has_calls(
             [
                 mock.call(
-                    account="test_account",
+                    account="test-account",
                     autocommit=True,
                     client_session_keep_alive=False,
                     database="test_database",
@@ -511,7 +517,7 @@ def test_authenticator_private_key_authentication(self, mock_get_private_key):
                     application="dbt",
                     insecure_mode=False,
                     session_parameters={},
-                    reuse_connections=None,
+                    reuse_connections=True,
                 )
             ]
         )
@@ -533,7 +539,7 @@ def test_authenticator_private_key_authentication_no_passphrase(self, mock_get_p
         self.snowflake.assert_has_calls(
             [
                 mock.call(
-                    account="test_account",
+                    account="test-account",
                     autocommit=True,
                     client_session_keep_alive=False,
                     database="test_database",
@@ -545,7 +551,39 @@ def test_authenticator_private_key_authentication_no_passphrase(self, mock_get_p
                     application="dbt",
                     insecure_mode=False,
                     session_parameters={},
-                    reuse_connections=None,
+                    reuse_connections=True,
+                )
+            ]
+        )
+
+    def test_authenticator_jwt_authentication(self):
+        self.config.credentials = self.config.credentials.replace(
+            authenticator="jwt", token="my-jwt-token", user=None
+        )
+        self.adapter = SnowflakeAdapter(self.config, get_context("spawn"))
+        conn = self.adapter.connections.set_connection_name(name="new_connection_with_new_config")
+
+        self.snowflake.assert_not_called()
+        conn.handle
+        self.snowflake.assert_has_calls(
+            [
+                mock.call(
+                    account="test-account",
+                    autocommit=True,
+                    client_session_keep_alive=False,
+                    database="test_database",
+                    role=None,
+                    schema="public",
+                    warehouse="test_warehouse",
+                    authenticator="oauth",
+                    token="my-jwt-token",
+                    private_key=None,
+                    application="dbt",
+                    client_request_mfa_token=True,
+                    client_store_temporary_credential=True,
+                    insecure_mode=False,
+                    session_parameters={},
+                    reuse_connections=True,
                 )
             ]
         )
@@ -562,7 +600,7 @@ def test_query_tag(self):
         self.snowflake.assert_has_calls(
             [
                 mock.call(
-                    account="test_account",
+                    account="test-account",
                     autocommit=True,
                     client_session_keep_alive=False,
                     database="test_database",
@@ -575,7 +613,7 @@ def test_query_tag(self):
                     application="dbt",
                     insecure_mode=False,
                     session_parameters={"QUERY_TAG": "test_query_tag"},
-                    reuse_connections=None,
+                    reuse_connections=True,
                 )
             ]
         )
@@ -592,7 +630,7 @@ def test_reuse_connections_with_keep_alive(self):
         self.snowflake.assert_has_calls(
             [
                 mock.call(
-                    account="test_account",
+                    account="test-account",
                     autocommit=True,
                     client_session_keep_alive=True,
                     database="test_database",
@@ -626,7 +664,7 @@ def test_authenticator_private_key_string_authentication(self, mock_get_private_
         self.snowflake.assert_has_calls(
             [
                 mock.call(
-                    account="test_account",
+                    account="test-account",
                     autocommit=True,
                     client_session_keep_alive=False,
                     database="test_database",
@@ -638,7 +676,7 @@ def test_authenticator_private_key_string_authentication(self, mock_get_private_
                     application="dbt",
                     insecure_mode=False,
                     session_parameters={},
-                    reuse_connections=None,
+                    reuse_connections=True,
                 )
             ]
         )
@@ -662,7 +700,7 @@ def test_authenticator_private_key_string_authentication_no_passphrase(
         self.snowflake.assert_has_calls(
             [
                 mock.call(
-                    account="test_account",
+                    account="test-account",
                     autocommit=True,
                     client_session_keep_alive=False,
                     database="test_database",
@@ -674,7 +712,7 @@ def test_authenticator_private_key_string_authentication_no_passphrase(
                     application="dbt",
                     insecure_mode=False,
                     session_parameters={},
-                    reuse_connections=None,
+                    reuse_connections=True,
                 )
             ]
         )
@@ -907,7 +945,7 @@ class TestSnowflakeAdapterCredentials(unittest.TestCase):
 
     def test_private_key_string(self):
         creds = SnowflakeCredentials(
-            account="test_account",
+            account="test-account",
             user="test_user",
             database="test_database",
             schema="public",
@@ -917,7 +955,7 @@ def test_private_key_string(self):
 
     def test_private_key_string_encrypted(self):
         creds = SnowflakeCredentials(
-            account="test_account",
+            account="test-account",
             user="test_user",
             database="test_database",
             schema="public",
@@ -928,7 +966,7 @@ def test_private_key_string_encrypted(self):
 
     def test_malformed_private_key_string(self):
         creds = SnowflakeCredentials(
-            account="test_account",
+            account="test-account",
             user="test_user",
             database="test_database",
             schema="public",
@@ -938,7 +976,7 @@ def test_malformed_private_key_string(self):
 
     def test_invalid_private_key_string(self):
         creds = SnowflakeCredentials(
-            account="test_account",
+            account="test-account",
             user="test_user",
             database="test_database",
             schema="public",
@@ -948,7 +986,7 @@ def test_invalid_private_key_string(self):
 
     def test_invalid_private_key_path(self):
         creds = SnowflakeCredentials(
-            account="test_account",
+            account="test-account",
             user="test_user",
             database="test_database",
             schema="public",
diff --git a/tests/unit/utils.py b/tests/unit/utils.py
index b2b308185..c7ec66bf4 100644
--- a/tests/unit/utils.py
+++ b/tests/unit/utils.py
@@ -2,6 +2,7 @@
 Note that all imports should be inside the functions to avoid import/mocking
 issues.
 """
+
 import string
 import os
 from unittest import mock
diff --git a/tox.ini b/tox.ini
index 4697044da..d6f040a61 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,8 +1,8 @@
 [tox]
 skipsdist = True
-envlist = py38,py39,py310,py311
+envlist = py38,py39,py310,py311,py312
 
-[testenv:{unit,py38,py39,py310,py311,py}]
+[testenv:{unit,py38,py39,py310,py311,py312,py}]
 description = unit testing
 skip_install = true
 passenv =
@@ -13,7 +13,7 @@ deps =
   -rdev-requirements.txt
   -e.
 
-[testenv:{integration,py38,py39,py310,py311,py}-{snowflake}]
+[testenv:{integration,py38,py39,py310,py311,py312,py}-{snowflake}]
 description = adapter plugin integration testing
 skip_install = true
 passenv =