diff --git a/.github/workflows/integration-tests-azure.yml b/.github/workflows/integration-tests-azure.yml index 8d40135..0691777 100644 --- a/.github/workflows/integration-tests-azure.yml +++ b/.github/workflows/integration-tests-azure.yml @@ -11,11 +11,12 @@ jobs: name: Regular strategy: fail-fast: false + max-parallel: 1 matrix: - python_version: ["3.8", "3.9", "3.10", "3.11"] profile: ["ci_azure_auto"] + python_version: ["3.11"] msodbc_version: ["17", "18"] - max-parallel: 1 + runs-on: ubuntu-latest container: image: ghcr.io/${{ github.repository }}:CI-${{ matrix.python_version }}-msodbc${{ matrix.msodbc_version }} diff --git a/dbt/adapters/fabric/__version__.py b/dbt/adapters/fabric/__version__.py index cb9f516..fd11d27 100644 --- a/dbt/adapters/fabric/__version__.py +++ b/dbt/adapters/fabric/__version__.py @@ -1 +1 @@ -version = "1.8.5" +version = "1.8.6" diff --git a/dbt/adapters/fabric/fabric_connection_manager.py b/dbt/adapters/fabric/fabric_connection_manager.py index 9719d84..f4130fa 100644 --- a/dbt/adapters/fabric/fabric_connection_manager.py +++ b/dbt/adapters/fabric/fabric_connection_manager.py @@ -323,6 +323,12 @@ def open(cls, connection: Connection) -> Connection: con_str.append(f"Database={credentials.database}") + #Enabling trace flag + if credentials.trace_flag: + con_str.append("SQL_ATTR_TRACE=SQL_OPT_TRACE_ON") + else: + con_str.append("SQL_ATTR_TRACE=SQL_OPT_TRACE_OFF") + assert credentials.authentication is not None if "ActiveDirectory" in credentials.authentication: diff --git a/dbt/adapters/fabric/fabric_credentials.py b/dbt/adapters/fabric/fabric_credentials.py index dec36fa..a824fac 100644 --- a/dbt/adapters/fabric/fabric_credentials.py +++ b/dbt/adapters/fabric/fabric_credentials.py @@ -13,6 +13,7 @@ class FabricCredentials(Credentials): UID: Optional[str] = None PWD: Optional[str] = None windows_login: Optional[bool] = False + trace_flag: Optional[bool] = False tenant_id: Optional[str] = None client_id: Optional[str] = None client_secret: Optional[str] = None @@ -36,6 +37,7 @@ class FabricCredentials(Credentials): "app_secret": "client_secret", "TrustServerCertificate": "trust_cert", "schema_auth": "schema_authorization", + "SQL_ATTR_TRACE": "trace_flag", } @property @@ -63,6 +65,7 @@ def _connection_keys(self): "retries", "login_timeout", "query_timeout", + "trace_flag", ) @property diff --git a/dbt/include/fabric/macros/adapters/columns.sql b/dbt/include/fabric/macros/adapters/columns.sql index db081ff..69f2ce8 100644 --- a/dbt/include/fabric/macros/adapters/columns.sql +++ b/dbt/include/fabric/macros/adapters/columns.sql @@ -54,8 +54,8 @@ {% macro fabric__alter_column_type(relation, column_name, new_column_type) %} - {%- set table_name= tmp_relation.include(database=False).include(schema=False)-%} - {%- set schema_name = tmp_relation.include(database=False).include(identifier=False) -%} + {%- set table_name= relation.identifier -%} + {%- set schema_name = relation.schema -%} {% set generate_tmp_relation_script %} SELECT TRIM(REPLACE(STRING_AGG(ColumnName + ' ', ',-'), '-', CHAR(10))) AS ColumnDef diff --git a/dbt/include/fabric/macros/adapters/metadata.sql b/dbt/include/fabric/macros/adapters/metadata.sql index bfea1ef..2fe6a58 100644 --- a/dbt/include/fabric/macros/adapters/metadata.sql +++ b/dbt/include/fabric/macros/adapters/metadata.sql @@ -9,6 +9,14 @@ information_schema {%- endmacro %} +{% macro get_use_database_sql(database) %} + {{ return(adapter.dispatch('get_use_database_sql', 'dbt')(database)) }} +{% endmacro %} + +{%- macro fabric__get_use_database_sql(database) -%} + USE [{{database}}]; +{%- endmacro -%} + {% macro fabric__list_schemas(database) %} {% call statement('list_schemas', fetch_result=True, auto_begin=False) -%} select name as [schema] @@ -27,7 +35,7 @@ {% macro fabric__list_relations_without_caching(schema_relation) -%} {% call statement('list_relations_without_caching', fetch_result=True) -%} - USE [{{ schema_relation.database }}]; + {{ get_use_database_sql(schema_relation.database) }} with base as ( select DB_NAME() as [database], @@ -51,7 +59,7 @@ {% macro fabric__get_relation_without_caching(schema_relation) -%} {% call statement('get_relation_without_caching', fetch_result=True) -%} - USE [{{ schema_relation.database }}]; + {{ get_use_database_sql(schema_relation.database) }} with base as ( select DB_NAME() as [database], diff --git a/dbt/include/fabric/macros/adapters/relation.sql b/dbt/include/fabric/macros/adapters/relation.sql index b8f607b..a6fe0f0 100644 --- a/dbt/include/fabric/macros/adapters/relation.sql +++ b/dbt/include/fabric/macros/adapters/relation.sql @@ -6,51 +6,44 @@ {{ return(temp_relation) }} {% endmacro %} -{% macro fabric__drop_relation(relation) -%} - {% call statement('drop_relation', auto_begin=False) -%} - {{ fabric__drop_relation_script(relation) }} - {%- endcall %} -{% endmacro %} - -{% macro fabric__drop_relation_script(relation) -%} - - {% if relation.type == 'view' -%} +{% macro fabric__get_drop_sql(relation) -%} + {% if relation.type == 'view' -%} {% call statement('find_references', fetch_result=true) %} - USE [{{ relation.database }}]; - select - sch.name as schema_name, - obj.name as view_name - from sys.sql_expression_dependencies refs - inner join sys.objects obj - on refs.referencing_id = obj.object_id - inner join sys.schemas sch - on obj.schema_id = sch.schema_id - where refs.referenced_database_name = '{{ relation.database }}' - and refs.referenced_schema_name = '{{ relation.schema }}' - and refs.referenced_entity_name = '{{ relation.identifier }}' - and refs.referencing_class = 1 - and obj.type = 'V' + {{ get_use_database_sql(relation.database) }} + select + sch.name as schema_name, + obj.name as view_name + from sys.sql_expression_dependencies refs + inner join sys.objects obj + on refs.referencing_id = obj.object_id + inner join sys.schemas sch + on obj.schema_id = sch.schema_id + where refs.referenced_database_name = '{{ relation.database }}' + and refs.referenced_schema_name = '{{ relation.schema }}' + and refs.referenced_entity_name = '{{ relation.identifier }}' + and refs.referencing_class = 1 + and obj.type = 'V' {% endcall %} {% set references = load_result('find_references')['data'] %} {% for reference in references -%} - -- dropping referenced view {{ reference[0] }}.{{ reference[1] }} - {{ fabric__drop_relation_script(relation.incorporate( - type="view", - path={"schema": reference[0], "identifier": reference[1]})) }} + -- dropping referenced view {{ reference[0] }}.{{ reference[1] }} + {% do adapter.drop_relation + (api.Relation.create( + identifier = reference[1], schema = reference[0], database = relation.database, type='view' + ))%} {% endfor %} {% elif relation.type == 'table'%} {% set object_id_type = 'U' %} - {%- else -%} {{ exceptions.raise_not_implemented('Invalid relation being dropped: ' ~ relation) }} {% endif %} - USE [{{ relation.database }}]; + {{ get_use_database_sql(relation.database) }} EXEC('DROP {{ relation.type }} IF EXISTS {{ relation.include(database=False) }};'); {% endmacro %} {% macro fabric__rename_relation(from_relation, to_relation) -%} {% call statement('rename_relation') -%} - USE [{{ from_relation.database }}]; + {{ get_use_database_sql(from_relation.database) }} EXEC sp_rename '{{ from_relation.schema }}.{{ from_relation.identifier }}', '{{ to_relation.identifier }}' {%- endcall %} {% endmacro %} diff --git a/dbt/include/fabric/macros/adapters/schema.sql b/dbt/include/fabric/macros/adapters/schema.sql index 7fa1083..a117e47 100644 --- a/dbt/include/fabric/macros/adapters/schema.sql +++ b/dbt/include/fabric/macros/adapters/schema.sql @@ -10,7 +10,7 @@ {% macro fabric__create_schema_with_authorization(relation, schema_authorization) -%} {% call statement('create_schema') -%} - USE [{{ relation.database }}]; + {{ get_use_database_sql(relation.database) }} IF NOT EXISTS (SELECT * FROM sys.schemas WHERE name = '{{ relation.schema }}') BEGIN EXEC('CREATE SCHEMA [{{ relation.schema }}] AUTHORIZATION [{{ schema_authorization }}]') @@ -27,7 +27,7 @@ identifier=row[1], type=row[3] ) -%} - {% do drop_relation(schema_relation) %} + {% do adapter.drop_relation(schema_relation) %} {%- endfor %} {% call statement('drop_schema') -%} diff --git a/dbt/include/fabric/macros/materializations/models/incremental/incremental.sql b/dbt/include/fabric/macros/materializations/models/incremental/incremental.sql index 797f09a..bd045df 100644 --- a/dbt/include/fabric/macros/materializations/models/incremental/incremental.sql +++ b/dbt/include/fabric/macros/materializations/models/incremental/incremental.sql @@ -1,23 +1,16 @@ - {% materialization incremental, adapter='fabric' -%} {%- set full_refresh_mode = (should_full_refresh()) -%} {% set target_relation = this.incorporate(type='table') %} - {%- set relations_list = fabric__get_relation_without_caching(target_relation) -%} + {%- set relation = load_cached_relation(this) -%} {%- set existing_relation = none %} - {% if (relations_list|length == 1) and (relations_list[0][2] == target_relation.schema) - and (relations_list[0][1] == target_relation.identifier) and (relations_list[0][3] == target_relation.type)%} + {% if relation.type == 'table' %} {% set existing_relation = target_relation %} - {% elif (relations_list|length == 1) and (relations_list[0][2] == target_relation.schema) - and (relations_list[0][1] == target_relation.identifier) and (relations_list[0][3] != target_relation.type) %} - {% set existing_relation = get_or_create_relation(relations_list[0][0], relations_list[0][2] , relations_list[0][1] , relations_list[0][3])[1] %} + {% elif relation.type == 'view' %} + {% set existing_relation = get_or_create_relation(relation.database, relation.schema, relation.identifier, relation.type)[1] %} {% endif %} - {{ log("Full refresh mode" ~ full_refresh_mode)}} - {{ log("existing relation : "~existing_relation ~ " type "~ existing_relation.type ~ " is view? "~existing_relation.is_view) }} - {{ log("target relation: " ~target_relation ~ " type "~ target_relation.type ~ " is view? "~target_relation.is_view) }} - -- configs {%- set unique_key = config.get('unique_key') -%} {% set incremental_strategy = config.get('incremental_strategy') or 'default' %} @@ -28,35 +21,39 @@ {{ run_hooks(pre_hooks, inside_transaction=True) }} - {% if existing_relation is none %} + -- naming a temp relation + {% set tmp_relation_view = target_relation.incorporate(path={"identifier": target_relation.identifier ~ '__dbt_tmp_vw'}, type='view')-%} + -- Fabric & Synapse adapters use temp relation because of lack of CTE support for CTE in CTAS, Insert + -- drop temp relation if exists + {% do adapter.drop_relation(tmp_relation_view) %} + + {% if existing_relation is none %} {%- call statement('main') -%} - {{ fabric__create_table_as(False, target_relation, sql)}} + {{ get_create_table_as_sql(False, target_relation, sql)}} {%- endcall -%} {% elif existing_relation.is_view %} - {#-- Can't overwrite a view with a table - we must drop --#} {{ log("Dropping relation " ~ target_relation ~ " because it is a view and this model is a table.") }} - {{ drop_relation_if_exists(existing_relation) }} + {% do adapter.drop_relation(existing_relation) %} + {%- call statement('main') -%} - {{ fabric__create_table_as(False, target_relation, sql)}} + {{ get_create_table_as_sql(False, target_relation, sql)}} {%- endcall -%} {% elif full_refresh_mode %} - {%- call statement('main') -%} - {{ fabric__create_table_as(False, target_relation, sql)}} + {{ get_create_table_as_sql(False, target_relation, sql)}} {%- endcall -%} {% else %} - {%- call statement('create_tmp_relation') -%} - {{ fabric__create_table_as(True, temp_relation, sql)}} + {{ get_create_table_as_sql(True, temp_relation, sql)}} {%- endcall -%} {% do adapter.expand_target_column_types( - from_relation=temp_relation, - to_relation=target_relation) %} + from_relation=temp_relation, + to_relation=target_relation) %} {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#} {% set dest_columns = process_schema_changes(on_schema_change, temp_relation, existing_relation) %} {% if not dest_columns %} @@ -72,14 +69,12 @@ {%- endcall -%} {% endif %} - {% do drop_relation_if_exists(temp_relation) %} + {% do adapter.drop_relation(tmp_relation_view) %} + {% do adapter.drop_relation(temp_relation) %} {{ run_hooks(post_hooks, inside_transaction=True) }} - {% set target_relation = target_relation.incorporate(type='table') %} - {% set should_revoke = should_revoke(existing_relation, full_refresh_mode) %} {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %} - {% do persist_docs(target_relation, model) %} {% do adapter.commit() %} {{ return({'relations': [target_relation]}) }} diff --git a/dbt/include/fabric/macros/materializations/models/table/clone.sql b/dbt/include/fabric/macros/materializations/models/table/clone.sql index 110dd03..b552e45 100644 --- a/dbt/include/fabric/macros/materializations/models/table/clone.sql +++ b/dbt/include/fabric/macros/materializations/models/table/clone.sql @@ -24,7 +24,7 @@ {%- set target_relation = this.incorporate(type='table') -%} {% call statement('main') %} - {{ fabric__drop_relation_script(target_relation) }} + {% do adapter.drop_relation(target_relation) %} {{ create_or_replace_clone(target_relation, defer_relation) }} {% endcall %} {{ return({'relations': [target_relation]}) }} diff --git a/dbt/include/fabric/macros/materializations/models/table/columns_spec_ddl.sql b/dbt/include/fabric/macros/materializations/models/table/columns_spec_ddl.sql index 83188c9..e8307d8 100644 --- a/dbt/include/fabric/macros/materializations/models/table/columns_spec_ddl.sql +++ b/dbt/include/fabric/macros/materializations/models/table/columns_spec_ddl.sql @@ -1,3 +1,7 @@ +{% macro build_columns_constraints(relation) %} + {{ return(adapter.dispatch('build_columns_constraints', 'dbt')(relation)) }} +{% endmacro %} + {% macro fabric__build_columns_constraints(relation) %} {# loop through user_provided_columns to create DDL with data types and constraints #} {%- set raw_column_constraints = adapter.render_raw_columns_constraints(raw_columns=model['columns']) -%} @@ -8,6 +12,10 @@ ) {% endmacro %} +{% macro build_model_constraints(relation) %} + {{ return(adapter.dispatch('build_model_constraints', 'dbt')(relation)) }} +{% endmacro %} + {% macro fabric__build_model_constraints(relation) %} {# loop through user_provided_columns to create DDL with data types and constraints #} {%- set raw_model_constraints = adapter.render_raw_model_constraints(raw_constraints=model['constraints']) -%} diff --git a/dbt/include/fabric/macros/materializations/models/table/create_table_as.sql b/dbt/include/fabric/macros/materializations/models/table/create_table_as.sql index 2ec9013..00c84bd 100644 --- a/dbt/include/fabric/macros/materializations/models/table/create_table_as.sql +++ b/dbt/include/fabric/macros/materializations/models/table/create_table_as.sql @@ -1,19 +1,14 @@ {% macro fabric__create_table_as(temporary, relation, sql) -%} - {% set tmp_relation = relation.incorporate( - path={"identifier": relation.identifier.replace("#", "") ~ '_temp_view'}, - type='view')-%} - {% do run_query(fabric__drop_relation_script(tmp_relation)) %} + {% set tmp_relation = relation.incorporate(path={"identifier": relation.identifier ~ '__dbt_tmp_vw'}, type='view')-%} + {{ get_create_view_as_sql(tmp_relation, sql) }} - {% set contract_config = config.get('contract') %} - - {{ fabric__create_view_as(tmp_relation, sql) }} + {% set contract_config = config.get('contract') %} {% if contract_config.enforced %} CREATE TABLE [{{relation.database}}].[{{relation.schema}}].[{{relation.identifier}}] - {{ fabric__build_columns_constraints(relation) }} + {{ build_columns_constraints(relation) }} {{ get_assert_columns_equivalent(sql) }} - {% set listColumns %} {% for column in model['columns'] %} {{ "["~column~"]" }}{{ ", " if not loop.last }} @@ -24,9 +19,7 @@ ({{listColumns}}) SELECT {{listColumns}} FROM [{{tmp_relation.database}}].[{{tmp_relation.schema}}].[{{tmp_relation.identifier}}]; {%- else %} - EXEC('CREATE TABLE [{{relation.database}}].[{{relation.schema}}].[{{relation.identifier}}] AS (SELECT * FROM [{{tmp_relation.database}}].[{{tmp_relation.schema}}].[{{tmp_relation.identifier}}]);'); + EXEC('CREATE TABLE [{{relation.database}}].[{{relation.schema}}].[{{relation.identifier}}] AS (SELECT * FROM [{{tmp_relation.database}}].[{{tmp_relation.schema}}].[{{tmp_relation.identifier}}]);'); {% endif %} - - {{ fabric__drop_relation_script(tmp_relation) }} - + {% do adapter.drop_relation(tmp_relation)%} {% endmacro %} diff --git a/dbt/include/fabric/macros/materializations/models/table/table.sql b/dbt/include/fabric/macros/materializations/models/table/table.sql index 24f71f3..88f1df9 100644 --- a/dbt/include/fabric/macros/materializations/models/table/table.sql +++ b/dbt/include/fabric/macros/materializations/models/table/table.sql @@ -2,16 +2,9 @@ -- Load target relation {%- set target_relation = this.incorporate(type='table') %} - -- Load existing relation - {%- set relation = fabric__get_relation_without_caching(this) %} - - {% set existing_relation = none %} - {% if (relation|length == 1) %} - {% set existing_relation = get_or_create_relation(relation[0][0], relation[0][2] , relation[0][1] , relation[0][3])[1] %} - {% endif %} + {%- set existing_relation = adapter.get_relation(database=this.database, schema=this.schema, identifier=this.identifier) -%} {%- set backup_relation = none %} - {{log("Existing Relation type is "~ existing_relation.type)}} {% if (existing_relation != none and existing_relation.type == "table") %} {%- set backup_relation = make_backup_relation(target_relation, 'table') -%} {% elif (existing_relation != none and existing_relation.type == "view") %} @@ -20,7 +13,7 @@ {% if (existing_relation != none) %} -- drop the temp relations if they exist already in the database - {{ drop_relation_if_exists(backup_relation) }} + {% do adapter.drop_relation(backup_relation) %} -- Rename target relation as backup relation {{ adapter.rename_relation(existing_relation, backup_relation) }} {% endif %} @@ -32,13 +25,24 @@ -- `BEGIN` happens here: {{ run_hooks(pre_hooks, inside_transaction=True) }} + -- naming a temp relation + {% set tmp_relation = target_relation.incorporate(path={"identifier": target_relation.identifier ~ '__dbt_tmp_vw'}, type='view')-%} + + -- Fabric & Synapse adapters use temp relation because of lack of CTE support for CTE in CTAS, Insert + -- drop temp relation if exists + {% do adapter.drop_relation(tmp_relation) %} + -- build model {% call statement('main') -%} {{ get_create_table_as_sql(False, target_relation, sql) }} {%- endcall %} + -- drop temp relation if exists + {% do adapter.drop_relation(tmp_relation) %} + -- cleanup {{ run_hooks(post_hooks, inside_transaction=True) }} + {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %} {% do persist_docs(target_relation, model) %} -- `COMMIT` happens here @@ -48,10 +52,11 @@ -- finally, drop the foreign key references if exists {{ drop_fk_indexes_on_table(backup_relation) }} -- drop existing/backup relation after the commit - {{ drop_relation_if_exists(backup_relation) }} + {% do adapter.drop_relation(backup_relation) %} {% endif %} + -- Add constraints including FK relation. - {{ fabric__build_model_constraints(target_relation) }} + {{ build_model_constraints(target_relation) }} {{ run_hooks(post_hooks, inside_transaction=False) }} {{ return({'relations': [target_relation]}) }} diff --git a/dbt/include/fabric/macros/materializations/models/view/create_view_as.sql b/dbt/include/fabric/macros/materializations/models/view/create_view_as.sql index cfb610f..f0ec12d 100644 --- a/dbt/include/fabric/macros/materializations/models/view/create_view_as.sql +++ b/dbt/include/fabric/macros/materializations/models/view/create_view_as.sql @@ -6,7 +6,7 @@ {%- set temp_view_sql = sql.replace("'", "''") -%} - USE [{{ relation.database }}]; + {{ get_use_database_sql(relation.database) }} {% set contract_config = config.get('contract') %} {% if contract_config.enforced %} {{ get_assert_columns_equivalent(sql) }} diff --git a/dbt/include/fabric/macros/materializations/models/view/view.sql b/dbt/include/fabric/macros/materializations/models/view/view.sql index 8576b2d..e4aa8dc 100644 --- a/dbt/include/fabric/macros/materializations/models/view/view.sql +++ b/dbt/include/fabric/macros/materializations/models/view/view.sql @@ -1,17 +1,9 @@ {% materialization view, adapter='fabric' -%} {%- set target_relation = this.incorporate(type='view') -%} - {{log("Target Relation "~target_relation)}} - - {%- set relation = fabric__get_relation_without_caching(this) %} - {% set existing_relation = none %} - {% if (relation|length == 1) %} - {% set existing_relation = get_or_create_relation(relation[0][0], relation[0][2] , relation[0][1] , relation[0][3])[1] %} - {% endif %} - {{log("Existing Relation "~existing_relation)}} + {%- set existing_relation = adapter.get_relation(database=this.database, schema=this.schema, identifier=this.identifier) -%} {%- set backup_relation = none %} - {{log("Existing Relation type is "~ existing_relation.type)}} {% if (existing_relation != none and existing_relation.type == "table") %} {%- set backup_relation = make_backup_relation(target_relation, 'table') -%} {% elif (existing_relation != none and existing_relation.type == "view") %} @@ -20,7 +12,7 @@ {% if (existing_relation != none) %} -- drop the temp relations if they exist already in the database - {{ drop_relation_if_exists(backup_relation) }} + {% do adapter.drop_relation(backup_relation) %} -- Rename target relation as backup relation {{ adapter.rename_relation(existing_relation, backup_relation) }} {% endif %} @@ -43,7 +35,7 @@ {{ run_hooks(post_hooks, inside_transaction=True) }} {{ adapter.commit() }} {% if (backup_relation != none) %} - {{ drop_relation_if_exists(backup_relation) }} + {% do adapter.drop_relation(backup_relation) %} {% endif %} {{ run_hooks(post_hooks, inside_transaction=False) }} {{ return({'relations': [target_relation]}) }} diff --git a/dbt/include/fabric/macros/materializations/snapshots/helpers.sql b/dbt/include/fabric/macros/materializations/snapshots/helpers.sql index 58107d4..a37ae64 100644 --- a/dbt/include/fabric/macros/materializations/snapshots/helpers.sql +++ b/dbt/include/fabric/macros/materializations/snapshots/helpers.sql @@ -1,6 +1,6 @@ {% macro fabric__post_snapshot(staging_relation) %} -- Clean up the snapshot temp table - {% do drop_relation(staging_relation) %} + {% do drop_relation_if_exists(staging_relation) %} {% endmacro %} --Due to Alter not being supported, have to rely on this for temporarily @@ -190,9 +190,9 @@ {% macro build_snapshot_staging_table(strategy, temp_snapshot_relation, target_relation) %} {% set temp_relation = make_temp_relation(target_relation) %} - {% set select = fabric__snapshot_staging_table(strategy, temp_snapshot_relation, target_relation) %} + {% set select = snapshot_staging_table(strategy, temp_snapshot_relation, target_relation) %} {% call statement('build_snapshot_staging_relation') %} - {{ create_table_as(True, temp_relation, select) }} + {{ get_create_table_as_sql(True, temp_relation, select) }} {% endcall %} {% do return(temp_relation) %} diff --git a/dbt/include/fabric/macros/materializations/snapshots/snapshot.sql b/dbt/include/fabric/macros/materializations/snapshots/snapshot.sql index 83e3f9b..bf874bb 100644 --- a/dbt/include/fabric/macros/materializations/snapshots/snapshot.sql +++ b/dbt/include/fabric/macros/materializations/snapshots/snapshot.sql @@ -39,7 +39,14 @@ {% if not target_relation_exists %} {% set build_sql = build_snapshot_table(strategy, temp_snapshot_relation) %} - {% set final_sql = create_table_as(False, target_relation, build_sql) %} + + -- naming a temp relation + {% set tmp_relation_view = target_relation.incorporate(path={"identifier": target_relation.identifier ~ '__dbt_tmp_vw'}, type='view')-%} + -- Fabric & Synapse adapters use temp relation because of lack of CTE support for CTE in CTAS, Insert + -- drop temp relation if exists + {% do adapter.drop_relation(tmp_relation_view) %} + {% set final_sql = get_create_table_as_sql(False, target_relation, build_sql) %} + {% do adapter.drop_relation(tmp_relation_view) %} {% else %} @@ -75,15 +82,13 @@ insert_cols = quoted_source_columns ) %} - {% endif %} {% call statement('main') %} {{ final_sql }} {% endcall %} - fabric__drop_relation_script(temp_snapshot_relation) - + {% do adapter.drop_relation(temp_snapshot_relation) %} {% set should_revoke = should_revoke(target_relation_exists, full_refresh_mode=False) %} {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %} @@ -94,7 +99,6 @@ {% endif %} {{ run_hooks(post_hooks, inside_transaction=True) }} - {{ adapter.commit() }} {% if staging_table is defined %} @@ -102,7 +106,6 @@ {% endif %} {{ run_hooks(post_hooks, inside_transaction=False) }} - {{ return({'relations': [target_relation]}) }} {% endmaterialization %} diff --git a/dbt/include/fabric/macros/materializations/tests/helpers.sql b/dbt/include/fabric/macros/materializations/tests/helpers.sql index ad8c4d8..9e4b057 100644 --- a/dbt/include/fabric/macros/materializations/tests/helpers.sql +++ b/dbt/include/fabric/macros/materializations/tests/helpers.sql @@ -1,12 +1,18 @@ {% macro fabric__get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%} + -- Create target schema if it does not + USE [{{ target.database }}]; + IF NOT EXISTS (SELECT * FROM sys.schemas WHERE name = '{{ target.schema }}') + BEGIN + EXEC('CREATE SCHEMA [{{ target.schema }}]') + END + {% if main_sql.strip().lower().startswith('with') %} {% set testview %} - {{ config.get('schema') }}.testview_{{ range(1300, 19000) | random }} + {{ target.schema }}.testview_{{ range(1300, 19000) | random }} {% endset %} {% set sql = main_sql.replace("'", "''")%} - EXEC('create view {{testview}} as {{ sql }};') select {{ "top (" ~ limit ~ ')' if limit != none }} diff --git a/tests/conftest.py b/tests/conftest.py index 1e3670c..3e60ce0 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -55,6 +55,7 @@ def _profile_ci_azure_base(): "database": os.getenv("DBT_AZURESQL_DB"), "encrypt": True, "trust_cert": True, + "trace_flag":False, }, } diff --git a/tests/functional/adapter/test_aliases.py b/tests/functional/adapter/test_aliases.py index f5d43b0..b426258 100644 --- a/tests/functional/adapter/test_aliases.py +++ b/tests/functional/adapter/test_aliases.py @@ -8,7 +8,6 @@ ) -@pytest.mark.skip(reason="CTAS is not supported without a table.") class TestAliasesFabric(BaseAliases): @pytest.fixture(scope="class") def macros(self): @@ -21,14 +20,12 @@ def macros(self): return {"expect_value.sql": MACROS__EXPECT_VALUE_SQL} -@pytest.mark.skip(reason="Test audit tables are using CTAS on View without a table definition.") class TestSameAliasDifferentSchemasFabric(BaseSameAliasDifferentSchemas): @pytest.fixture(scope="class") def macros(self): return {"expect_value.sql": MACROS__EXPECT_VALUE_SQL} -@pytest.mark.skip(reason="Test audit tables are using CTAS on View without a table definition.") class TestSameAliasDifferentDatabasesFabric(BaseSameAliasDifferentDatabases): @pytest.fixture(scope="class") def macros(self): diff --git a/tests/functional/adapter/test_basic.py b/tests/functional/adapter/test_basic.py index 65a1b49..ad833ca 100644 --- a/tests/functional/adapter/test_basic.py +++ b/tests/functional/adapter/test_basic.py @@ -19,12 +19,11 @@ class TestSimpleMaterializations(BaseSimpleMaterializations): pass -@pytest.mark.skip(reason="CTAS is not supported without a table.") class TestSingularTestsFabric(BaseSingularTests): pass -@pytest.mark.skip(reason="ephemeral not supported") +@pytest.mark.skip(reason="Nested CTE is not supported") class TestSingularTestsEphemeralFabric(BaseSingularTestsEphemeral): pass @@ -41,8 +40,6 @@ class TestIncrementalFabric(BaseIncremental): pass -# Modified incremental_not_schema_change.sql file to handle DATETIME compatibility issues. -@pytest.mark.skip(reason="CTAS is not supported without a table.") class TestIncrementalNotSchemaChangeFabric(BaseIncrementalNotSchemaChange): @pytest.fixture(scope="class") def models(self): @@ -73,7 +70,6 @@ class TestSnapshotTimestampFabric(BaseSnapshotTimestamp): pass -# Assertion Failed. class TestBaseCachingFabric(BaseAdapterMethod): pass diff --git a/tests/functional/adapter/test_caching.py b/tests/functional/adapter/test_caching.py index 11a9c4d..186ae69 100644 --- a/tests/functional/adapter/test_caching.py +++ b/tests/functional/adapter/test_caching.py @@ -1,107 +1,10 @@ import pytest -from dbt.tests.util import run_dbt - -model_sql = """ -{{ - config( - materialized='table' - ) -}} -select 1 as id -""" - -another_schema_model_sql = """ -{{ - config( - materialized='table', - schema='another_schema' - ) -}} -select 1 as id -""" - - -class BaseCachingTest: - @pytest.fixture(scope="class") - def project_config_update(self): - return { - "config-version": 2, - "quoting": { - "identifier": False, - "schema": False, - }, - } - - def run_and_inspect_cache(self, project, run_args=None): - run_dbt(run_args) - - # the cache was empty at the start of the run. - # the model materialization returned an unquoted relation and added to the cache. - adapter = project.adapter - assert len(adapter.cache.relations) == 1 - relation = list(adapter.cache.relations).pop() - assert relation.schema == project.test_schema - assert relation.schema == project.test_schema.lower() - - # on the second run, dbt will find a relation in the database during cache population. - # this relation will be quoted, because list_relations_without_caching (by default) uses - # quote_policy = {"database": True, "schema": True, "identifier": True} - # when adding relations to the cache. - run_dbt(run_args) - adapter = project.adapter - assert len(adapter.cache.relations) == 1 - second_relation = list(adapter.cache.relations).pop() - - # perform a case-insensitive + quote-insensitive comparison - for key in ["database", "schema", "identifier"]: - assert getattr(relation, key).lower() == getattr(second_relation, key).lower() - - def test_cache(self, project): - self.run_and_inspect_cache(project, run_args=["run"]) - - -class BaseCachingLowercaseModel(BaseCachingTest): - @pytest.fixture(scope="class") - def models(self): - return { - "model.sql": model_sql, - } - - -class BaseCachingUppercaseModel(BaseCachingTest): - @pytest.fixture(scope="class") - def models(self): - return { - "MODEL.sql": model_sql, - } - - -class BaseCachingSelectedSchemaOnly(BaseCachingTest): - @pytest.fixture(scope="class") - def models(self): - return { - "model.sql": model_sql, - "another_schema_model.sql": another_schema_model_sql, - } - - def test_cache(self, project): - # this should only cache the schema containing the selected model - run_args = ["--cache-selected-only", "run", "--select", "model"] - self.run_and_inspect_cache(project, run_args) - - -class TestNoPopulateCache(BaseCachingTest): - @pytest.fixture(scope="class") - def models(self): - return { - "model.sql": model_sql, - } - - def test_cache(self, project): - # --no-populate-cache still allows the cache to populate all relations - # under a schema, so the behavior here remains the same as other tests - run_args = ["--no-populate-cache", "run"] - self.run_and_inspect_cache(project, run_args) +from dbt.tests.adapter.caching.test_caching import ( + BaseCachingLowercaseModel, + BaseCachingSelectedSchemaOnly, + BaseCachingUppercaseModel, + BaseNoPopulateCache, +) class TestCachingLowerCaseModel(BaseCachingLowercaseModel): @@ -115,3 +18,7 @@ class TestCachingUppercaseModel(BaseCachingUppercaseModel): class TestCachingSelectedSchemaOnly(BaseCachingSelectedSchemaOnly): pass + + +class TestNoPopulateCache(BaseNoPopulateCache): + pass diff --git a/tests/functional/adapter/test_changing_relation_type.py b/tests/functional/adapter/test_changing_relation_type.py index b0b6bcd..82984fa 100644 --- a/tests/functional/adapter/test_changing_relation_type.py +++ b/tests/functional/adapter/test_changing_relation_type.py @@ -1,7 +1,5 @@ -import pytest from dbt.tests.adapter.relations.test_changing_relation_type import BaseChangeRelationTypeValidator -@pytest.mark.skip(reason="CTAS is not supported without a underlying table definition.") class TestChangeRelationTypesFabric(BaseChangeRelationTypeValidator): pass diff --git a/tests/functional/adapter/test_constraints.py b/tests/functional/adapter/test_constraints.py index 6ff5265..6759424 100644 --- a/tests/functional/adapter/test_constraints.py +++ b/tests/functional/adapter/test_constraints.py @@ -480,9 +480,11 @@ def models(self): @pytest.fixture(scope="class") def expected_sql(self): return """ -EXEC('create view as -- depends_on: select ''blue'' as color, 1 as id, ''2019-01-01'' as date_day;'); CREATE TABLE ( id int not null, color varchar(100), date_day varchar(100) ) INSERT INTO ( [id], [color], [date_day] ) SELECT [id], [color], [date_day] FROM EXEC('DROP view IF EXISTS +EXEC('create view as -- depends_on: select ''blue'' as color, 1 as id, ''2019-01-01'' as date_day;'); CREATE TABLE ( id int not null, color varchar(100), date_day varchar(100) ) INSERT INTO ( [id], [color], [date_day] ) SELECT [id], [color], [date_day] FROM """ + # EXEC('DROP view IF EXISTS + def test__constraints_ddl(self, project, expected_sql): unformatted_constraint_schema_yml = read_file("models", "constraints_schema.yml") write_file( @@ -501,7 +503,9 @@ def test__constraints_ddl(self, project, expected_sql): generated_sql_generic, "foreign_key_model", "" ) generated_sql_wodb = generated_sql_generic.replace("USE [" + project.database + "];", "") - assert _normalize_whitespace(expected_sql) == _normalize_whitespace(generated_sql_wodb) + assert _normalize_whitespace(expected_sql.strip()) == _normalize_whitespace( + generated_sql_wodb.strip() + ) class TestTableConstraintsRuntimeDdlEnforcement(BaseConstraintsRuntimeDdlEnforcement): @@ -542,9 +546,11 @@ def models(self): @pytest.fixture(scope="class") def expected_sql(self): return """ -EXEC('create view as -- depends_on: select ''blue'' as color, 1 as id, ''2019-01-01'' as date_day;'); CREATE TABLE ( id int not null, color varchar(100), date_day varchar(100) ) INSERT INTO ( [id], [color], [date_day] ) SELECT [id], [color], [date_day] FROM EXEC('DROP view IF EXISTS +EXEC('create view as -- depends_on: select ''blue'' as color, 1 as id, ''2019-01-01'' as date_day;'); CREATE TABLE ( id int not null, color varchar(100), date_day varchar(100) ) INSERT INTO ( [id], [color], [date_day] ) SELECT [id], [color], [date_day] FROM """ + # EXEC('DROP view IF EXISTS + def test__model_constraints_ddl(self, project, expected_sql): unformatted_constraint_schema_yml = read_file("models", "constraints_schema.yml") write_file( @@ -562,7 +568,9 @@ def test__model_constraints_ddl(self, project, expected_sql): generated_sql_generic, "foreign_key_model", "" ) generated_sql_wodb = generated_sql_generic.replace("USE [" + project.database + "];", "") - assert _normalize_whitespace(expected_sql) == _normalize_whitespace(generated_sql_wodb) + assert _normalize_whitespace(expected_sql.strip()) == _normalize_whitespace( + generated_sql_wodb.strip() + ) class TestModelConstraintsRuntimeEnforcement(BaseModelConstraintsRuntimeEnforcement): diff --git a/tests/functional/adapter/test_empty.py b/tests/functional/adapter/test_empty.py index 10906a1..8af58cb 100644 --- a/tests/functional/adapter/test_empty.py +++ b/tests/functional/adapter/test_empty.py @@ -1,5 +1,4 @@ # from dbt.tests.adapter.empty.test_empty import BaseTestEmpty - # class TestEmpty(BaseTestEmpty): # pass diff --git a/tests/functional/adapter/test_incremental.py b/tests/functional/adapter/test_incremental.py index da1676a..a572745 100644 --- a/tests/functional/adapter/test_incremental.py +++ b/tests/functional/adapter/test_incremental.py @@ -87,12 +87,13 @@ """ -@pytest.mark.skip(reason="CTAS is not supported without a table.") class TestBaseIncrementalUniqueKeyFabric(BaseIncrementalUniqueKey): pass -@pytest.mark.skip(reason="CTAS is not supported without a table.") +@pytest.mark.skip( + "The specified ALTER TABLE statement is not supported in this edition of SQL Server." +) class TestIncrementalOnSchemaChangeFabric(BaseIncrementalOnSchemaChange): @pytest.fixture(scope="class") def models(self): @@ -112,12 +113,10 @@ def models(self): } -@pytest.mark.skip(reason="CTAS is not supported without a table.") class TestIncrementalPredicatesDeleteInsertFabric(BaseIncrementalPredicates): pass -@pytest.mark.skip(reason="CTAS is not supported without a table.") class TestPredicatesDeleteInsertFabric(BaseIncrementalPredicates): @pytest.fixture(scope="class") def project_config_update(self): diff --git a/tests/functional/adapter/test_list_relations_without_caching.py b/tests/functional/adapter/test_list_relations_without_caching.py index 3c43761..5d8815a 100644 --- a/tests/functional/adapter/test_list_relations_without_caching.py +++ b/tests/functional/adapter/test_list_relations_without_caching.py @@ -108,8 +108,6 @@ def test__fabric__list_relations_without_caching(self, project): schemas = project.created_schemas for schema in schemas: - # schema_relation = BaseRelation.create(schema=schema, database=database) - # schema_relation = f"{database}.{schema}" kwargs = {"schema_relation": schema} _, log_output = run_dbt_and_capture( [ @@ -121,12 +119,6 @@ def test__fabric__list_relations_without_caching(self, project): str(kwargs), ] ) - - # parsed_logs = parse_json_logs(log_output) - # print(parsed_logs) - # n_relations = find_result_in_parsed_logs(parsed_logs, "n_relations") - - # assert n_relations == "n_relations: 1" assert "n_relations: 1" in log_output @@ -165,9 +157,4 @@ def test__fabric__list_relations_without_caching(self, project): str(kwargs), ] ) - - # parsed_logs = parse_json_logs(log_output) - # n_relations = find_result_in_parsed_logs(parsed_logs, "n_relations") - - # assert n_relations == f"n_relations: {NUM_EXPECTED_RELATIONS}" assert f"n_relations: {NUM_EXPECTED_RELATIONS}" in log_output diff --git a/tests/functional/adapter/test_query_comment.py b/tests/functional/adapter/test_query_comment.py index 7e4781a..98fc95b 100644 --- a/tests/functional/adapter/test_query_comment.py +++ b/tests/functional/adapter/test_query_comment.py @@ -10,7 +10,6 @@ ) -# Tests # class TestQueryComments(BaseQueryComments): pass diff --git a/tests/functional/adapter/test_store_test_failures.py b/tests/functional/adapter/test_store_test_failures.py index 1439ee5..6a2fdbf 100644 --- a/tests/functional/adapter/test_store_test_failures.py +++ b/tests/functional/adapter/test_store_test_failures.py @@ -1,34 +1,17 @@ import pytest -from dbt.tests.adapter.store_test_failures_tests import basic -from dbt.tests.adapter.store_test_failures_tests.fixtures import ( - models__file_model_but_with_a_no_good_very_long_name, - models__fine_model, - models__problematic_model, - properties__schema_yml, - seeds__expected_accepted_values, - seeds__expected_failing_test, - seeds__expected_not_null_problematic_model_id, - seeds__expected_unique_problematic_model_id, - seeds__people, - tests__failing_test, -) +from dbt.tests.adapter.store_test_failures_tests import basic, fixtures from dbt.tests.util import check_relations_equal, run_dbt -# from dbt.tests.adapter.store_test_failures_tests.test_store_test_failures import ( -# TestStoreTestFailures, -# ) - +# used to rename test audit schema to help test schema meet max char limit +# the default is _dbt_test__audit but this runs over the postgres 63 schema name char limit +# without which idempotency conditions will not hold (i.e. dbt can't drop the schema properly) +TEST_AUDIT_SCHEMA_SUFFIX = "dbt_test__aud" tests__passing_test = """ select * from {{ ref('fine_model') }} where 1=2 """ -# used to rename test audit schema to help test schema meet max char limit -# the default is _dbt_test__audit but this runs over the postgres 63 schema name char limit -# without which idempotency conditions will not hold (i.e. dbt can't drop the schema properly) -TEST_AUDIT_SCHEMA_SUFFIX = "dbt_test__aud" - class StoreTestFailuresBase: @pytest.fixture(scope="function", autouse=True) @@ -40,30 +23,30 @@ def setUp(self, project): @pytest.fixture(scope="class") def seeds(self): return { - "people.csv": seeds__people, - "expected_accepted_values.csv": seeds__expected_accepted_values, - "expected_failing_test.csv": seeds__expected_failing_test, - "expected_not_null_problematic_model_id.csv": seeds__expected_not_null_problematic_model_id, - "expected_unique_problematic_model_id.csv": seeds__expected_unique_problematic_model_id, + "people.csv": fixtures.seeds__people, + "expected_accepted_values.csv": fixtures.seeds__expected_accepted_values, + "expected_failing_test.csv": fixtures.seeds__expected_failing_test, + "expected_not_null_problematic_model_id.csv": fixtures.seeds__expected_not_null_problematic_model_id, + "expected_unique_problematic_model_id.csv": fixtures.seeds__expected_unique_problematic_model_id, } @pytest.fixture(scope="class") def tests(self): return { - "failing_test.sql": tests__failing_test, + "failing_test.sql": fixtures.tests__failing_test, "passing_test.sql": tests__passing_test, } @pytest.fixture(scope="class") def properties(self): - return {"schema.yml": properties__schema_yml} + return {"schema.yml": fixtures.properties__schema_yml} @pytest.fixture(scope="class") def models(self): return { - "fine_model.sql": models__fine_model, - "fine_model_but_with_a_no_good_very_long_name.sql": models__file_model_but_with_a_no_good_very_long_name, - "problematic_model.sql": models__problematic_model, + "fine_model.sql": fixtures.models__fine_model, + "fine_model_but_with_a_no_good_very_long_name.sql": fixtures.models__file_model_but_with_a_no_good_very_long_name, + "problematic_model.sql": fixtures.models__problematic_model, } @pytest.fixture(scope="class") @@ -73,7 +56,7 @@ def project_config_update(self): "quote_columns": False, "test": self.column_type_overrides(), }, - "tests": {"+schema": TEST_AUDIT_SCHEMA_SUFFIX}, + "data_tests": {"+schema": TEST_AUDIT_SCHEMA_SUFFIX}, } def column_type_overrides(self): @@ -138,7 +121,7 @@ def run_tests_store_failures_and_assert(self, project): ) -class TestStoreTestFailures(StoreTestFailuresBase): +class BaseStoreTestFailures(StoreTestFailuresBase): @pytest.fixture(scope="function") def clean_up(self, project): yield @@ -172,11 +155,7 @@ def test__store_and_assert(self, project, clean_up): self.run_tests_store_failures_and_assert(project) -class TestFabricStoreTestFailures(TestStoreTestFailures): - pass - - -class TestStoreTestFailuresAsInteractions(basic.StoreTestFailuresAsInteractions): +class TestStoreTestFailures(BaseStoreTestFailures): pass