Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add BaseDocsGenerate and BaseDocsGenReferences Tests #52

Merged
merged 12 commits into from
Nov 3, 2022
116 changes: 0 additions & 116 deletions dbt/include/dremio/macros/adapters/snapshot.sql
Original file line number Diff line number Diff line change
Expand Up @@ -28,119 +28,3 @@ limitations under the License.*/
values ({{ insert_cols_csv }})

{% endmacro %}

{% macro dremio__snapshot_staging_table(strategy, source_sql, target_relation) -%}

with snapshot_query as (

{{ source_sql }}

),

snapshotted_data as (

select *,
{{ strategy.unique_key }} as dbt_unique_key

from {{ target_relation }}
where dbt_valid_to is null

),

insertions_source_data as (

select
*,
{{ strategy.unique_key }} as dbt_unique_key,
{{ strategy.updated_at }} as dbt_updated_at,
{{ strategy.updated_at }} as dbt_valid_from,
nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to,
{{ strategy.scd_id }} as dbt_scd_id

from snapshot_query
),

updates_source_data as (

select
*,
{{ strategy.unique_key }} as dbt_unique_key,
{{ strategy.updated_at }} as dbt_updated_at,
{{ strategy.updated_at }} as dbt_valid_from,
{{ strategy.updated_at }} as dbt_valid_to

from snapshot_query
),

{%- if strategy.invalidate_hard_deletes %}

deletes_source_data as (

select
*,
{{ strategy.unique_key }} as dbt_unique_key
from snapshot_query
),
{% endif %}

insertions as (

select
'insert' as dbt_change_type,
source_data.*

from insertions_source_data as source_data
left outer join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key
where snapshotted_data.dbt_unique_key is null
or (
snapshotted_data.dbt_unique_key is not null
and (
{{ strategy.row_changed }}
)
)

),

updates as (

select
'update' as dbt_change_type,
source_data.*,
snapshotted_data.dbt_scd_id

from updates_source_data as source_data
join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key
where (
{{ strategy.row_changed }}
)
)

{%- if strategy.invalidate_hard_deletes -%}
,

deletes as (

select
'delete' as dbt_change_type,
source_data.*,
{{ snapshot_get_time() }} as dbt_valid_from,
{{ snapshot_get_time() }} as dbt_updated_at,
{{ snapshot_get_time() }} as dbt_valid_to,
snapshotted_data.dbt_scd_id

from snapshotted_data
left join deletes_source_data as source_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key
where source_data.dbt_unique_key is null
)
{%- endif %}

select * from insertions
union all
select * from updates
{%- if strategy.invalidate_hard_deletes %}
union all
select * from deletes
{%- endif %}

{%- endmacro %}

4 changes: 2 additions & 2 deletions dev_requirements.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
dbt-core==1.1.2
dbt-tests-adapter==1.1.2
dbt-core==1.2.2
dbt-tests-adapter==1.2.2
black==22.3.0
bumpversion
flake8
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
packages=find_namespace_packages(include=["dbt", "dbt.*"]),
include_package_data=True,
install_requires=[
"dbt-core==1.1.2",
"dbt-core==1.2.2",
],
classifiers=[
"License :: OSI Approved :: Apache Software License",
Expand Down
87 changes: 87 additions & 0 deletions tests/functional/adapter/basic/test_adapter_methods.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,87 @@
import pytest
from dbt.tests.adapter.basic.test_adapter_methods import BaseAdapterMethod
from dbt.tests.adapter.basic.test_adapter_methods import models__upstream_sql
from tests.functional.adapter.utils.test_utils import DATALAKE

models__my_model_sql = """

{% set upstream = ref('upstream_view') %}

{% if execute %}
{# don't ever do any of this #}
{%- do adapter.drop_schema(upstream) -%}
{% set existing = adapter.get_relation(upstream.database, upstream.schema, upstream.identifier) %}
{% if existing is not defined %}
{% do exceptions.raise_compiler_error('expected ' ~ ' to not exist, but it did') %}
{% endif %}

{%- do adapter.create_schema(upstream) -%}

{% set sql = create_view_as(upstream, 'select 2 as id') %}
{% do run_query(sql) %}
{% endif %}


select * from {{ upstream }}

"""

models__expected_sql = """
-- make sure this runs after 'model'
-- {{ ref('model_view') }}
select 2 as id

"""


class TestBaseAdapterMethodDremio(BaseAdapterMethod):
@pytest.fixture(scope="class")
def project_config_update(self):
return {
"models": {
"+twin_strategy": "clone",
},
"name": "adapter_methods",
}

@pytest.fixture(scope="class")
def models(self):
return {
"upstream_view.sql": models__upstream_sql,
"expected_view.sql": models__expected_sql,
"model_view.sql": models__my_model_sql,
}

@pytest.fixture(scope="class")
def unique_schema(self, request, prefix) -> str:
test_file = request.module.__name__
# We only want the last part of the name
test_file = test_file.split(".")[-1]
unique_schema = f"{DATALAKE}.{prefix}_{test_file}"
return unique_schema

@pytest.fixture(scope="class")
def dbt_profile_data(
self, unique_schema, dbt_profile_target, profiles_config_update
):
profile = {
"config": {"send_anonymous_usage_stats": False},
"test": {
"outputs": {
"default": {},
},
"target": "default",
},
}
target = dbt_profile_target
target["schema"] = unique_schema
target["root_path"] = unique_schema
profile["test"]["outputs"]["default"] = target

if profiles_config_update:
profile.update(profiles_config_update)
return profile

@pytest.fixture(scope="class")
def equal_tables(self):
return ["model_view", "expected_view"]
165 changes: 165 additions & 0 deletions tests/functional/adapter/basic/test_base_mat.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,165 @@
import pytest
from dbt.tests.adapter.basic.test_base import BaseSimpleMaterializations
from tests.functional.adapter.utils.test_utils import (
relation_from_name,
check_relations_equal,
check_relation_types,
)
from dbt.tests.adapter.basic.files import (
base_view_sql,
base_table_sql,
base_materialized_var_sql,
)
from dbt.tests.util import (
run_dbt,
check_result_nodes_by_name,
)
from tests.functional.adapter.utils.test_utils import DATALAKE

# Unable to insert variable into docstring, so "rav-test" is hardcoded
schema_base_yml = """
version: 2
sources:
- name: raw
database: "rav-test"
ArgusLi marked this conversation as resolved.
Show resolved Hide resolved
schema: "{{ target.schema }}"
tables:
- name: seed
identifier: "{{ var('seed_name', 'base') }}"
"""


class TestSimpleMaterializationsDremio(BaseSimpleMaterializations):
@pytest.fixture(scope="class")
def models(self):
return {
"view_model.sql": base_view_sql,
"table_model.sql": base_table_sql,
"swappable.sql": base_materialized_var_sql,
"schema.yml": schema_base_yml,
}

@pytest.fixture(scope="class")
def project_config_update(self):
return {
"models": {
"+twin_strategy": "prevent",
},
"seeds": {"+twin_strategy": "allow"},
"name": "base",
"vars": {"dremio:reflections": "false"},
}

@pytest.fixture(scope="class")
def unique_schema(self, request, prefix) -> str:
test_file = request.module.__name__
# We only want the last part of the name
test_file = test_file.split(".")[-1]
unique_schema = f"{DATALAKE}.{prefix}_{test_file}"
return unique_schema

@pytest.fixture(scope="class")
def dbt_profile_data(
self, unique_schema, dbt_profile_target, profiles_config_update
):
profile = {
"config": {"send_anonymous_usage_stats": False},
"test": {
"outputs": {
"default": {},
},
"target": "default",
},
}
target = dbt_profile_target
target["schema"] = unique_schema
target["root_path"] = unique_schema
profile["test"]["outputs"]["default"] = target

if profiles_config_update:
profile.update(profiles_config_update)
return profile

def test_base(self, project):

# seed command
results = run_dbt(["seed"])
# seed result length
assert len(results) == 1

# run command
results = run_dbt()
# run result length
assert len(results) == 3

# names exist in result nodes
check_result_nodes_by_name(results, ["view_model", "table_model", "swappable"])

# check relation types
expected = {
"base": "table",
"view_model": "view",
"table_model": "table",
"swappable": "table",
}
check_relation_types(project.adapter, expected)

# base table rowcount
relation = relation_from_name(project.adapter, "base")
result = project.run_sql(
f"select count(*) as num_rows from {relation}", fetch="one"
)
assert result[0] == 10

# relations_equal
check_relations_equal(
project.adapter, ["base", "view_model", "table_model", "swappable"]
)

# check relations in catalog
catalog = run_dbt(["docs", "generate"])
assert len(catalog.nodes) == 4
assert len(catalog.sources) == 1

# run_dbt changing materialized_var to view
# required for BigQuery
if project.test_config.get("require_full_refresh", False):
results = run_dbt(
[
"run",
"--full-refresh",
"-m",
"swappable",
"--vars",
"materialized_var: view",
]
)
else:
results = run_dbt(
["run", "-m", "swappable", "--vars", "materialized_var: view"]
)
assert len(results) == 1
# check relation types, swappable is view
expected = {
"base": "table",
"view_model": "view",
"table_model": "table",
"swappable": "view",
}

check_relation_types(project.adapter, expected)

# run_dbt changing materialized_var to incremental
results = run_dbt(
["run", "-m", "swappable", "--vars", "materialized_var: incremental"]
)
assert len(results) == 1

# check relation types, swappable is table
expected = {
"base": "table",
"view_model": "view",
"table_model": "table",
"swappable": "table",
}
check_relation_types(project.adapter, expected)
Loading