diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 2d884a7e3..dbd792cdd 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.7.0b1 +current_version = 1.7.0b2 parse = (?P[\d]+) # major version number \.(?P[\d]+) # minor version number \.(?P[\d]+) # patch version number diff --git a/.changes/1.7.0-b2.md b/.changes/1.7.0-b2.md new file mode 100644 index 000000000..1f2676536 --- /dev/null +++ b/.changes/1.7.0-b2.md @@ -0,0 +1,27 @@ +## dbt-spark 1.7.0-b2 - October 02, 2023 + +### Features + +- Persist Column level comments when creating views ([#372](https://github.com/dbt-labs/dbt-spark/issues/372)) + +### Under the Hood + +- Remove dependency on hologram ([#881](https://github.com/dbt-labs/dbt-spark/issues/881)) + +### Dependencies + +- Replace sasl with pure-sasl for PyHive ([#818](https://github.com/dbt-labs/dbt-spark/pull/818)) +- Update tox requirement from ~=4.8 to ~=4.9 ([#874](https://github.com/dbt-labs/dbt-spark/pull/874)) +- Bump mypy from 1.5.0 to 1.5.1 ([#875](https://github.com/dbt-labs/dbt-spark/pull/875)) +- Update tox requirement from ~=4.9 to ~=4.10 ([#879](https://github.com/dbt-labs/dbt-spark/pull/879)) +- Update pre-commit requirement from ~=3.3 to ~=3.4 ([#884](https://github.com/dbt-labs/dbt-spark/pull/884)) +- Update black requirement from ~=23.7 to ~=23.9 ([#886](https://github.com/dbt-labs/dbt-spark/pull/886)) +- Update tox requirement from ~=4.10 to ~=4.11 ([#887](https://github.com/dbt-labs/dbt-spark/pull/887)) + +### Security + +- Add docker image to the repo ([#876](https://github.com/dbt-labs/dbt-spark/pull/876)) + +### Contributors +- [@Fokko](https://github.com/Fokko) ([#876](https://github.com/dbt-labs/dbt-spark/pull/876)) +- [@jurasan](https://github.com/jurasan) ([#372](https://github.com/dbt-labs/dbt-spark/issues/372)) diff --git a/.changes/unreleased/Dependencies-20230628-121341.yaml b/.changes/1.7.0/Dependencies-20230628-121341.yaml similarity index 100% rename from .changes/unreleased/Dependencies-20230628-121341.yaml rename to .changes/1.7.0/Dependencies-20230628-121341.yaml diff --git a/.changes/unreleased/Dependencies-20230816-221452.yaml b/.changes/1.7.0/Dependencies-20230816-221452.yaml similarity index 100% rename from .changes/unreleased/Dependencies-20230816-221452.yaml rename to .changes/1.7.0/Dependencies-20230816-221452.yaml diff --git a/.changes/unreleased/Dependencies-20230816-221455.yaml b/.changes/1.7.0/Dependencies-20230816-221455.yaml similarity index 100% rename from .changes/unreleased/Dependencies-20230816-221455.yaml rename to .changes/1.7.0/Dependencies-20230816-221455.yaml diff --git a/.changes/unreleased/Dependencies-20230825-154517.yaml b/.changes/1.7.0/Dependencies-20230825-154517.yaml similarity index 100% rename from .changes/unreleased/Dependencies-20230825-154517.yaml rename to .changes/1.7.0/Dependencies-20230825-154517.yaml diff --git a/.changes/unreleased/Dependencies-20230904-221612.yaml b/.changes/1.7.0/Dependencies-20230904-221612.yaml similarity index 100% rename from .changes/unreleased/Dependencies-20230904-221612.yaml rename to .changes/1.7.0/Dependencies-20230904-221612.yaml diff --git a/.changes/unreleased/Dependencies-20230911-222120.yaml b/.changes/1.7.0/Dependencies-20230911-222120.yaml similarity index 100% rename from .changes/unreleased/Dependencies-20230911-222120.yaml rename to .changes/1.7.0/Dependencies-20230911-222120.yaml diff --git a/.changes/unreleased/Dependencies-20230912-222718.yaml b/.changes/1.7.0/Dependencies-20230912-222718.yaml similarity index 100% rename from .changes/unreleased/Dependencies-20230912-222718.yaml rename to .changes/1.7.0/Dependencies-20230912-222718.yaml diff --git a/.changes/unreleased/Features-20230817-130731.yaml b/.changes/1.7.0/Features-20230817-130731.yaml similarity index 100% rename from .changes/unreleased/Features-20230817-130731.yaml rename to .changes/1.7.0/Features-20230817-130731.yaml diff --git a/.changes/unreleased/Security-20230817-145626.yaml b/.changes/1.7.0/Security-20230817-145626.yaml similarity index 100% rename from .changes/unreleased/Security-20230817-145626.yaml rename to .changes/1.7.0/Security-20230817-145626.yaml diff --git a/.changes/unreleased/Under the Hood-20230830-160616.yaml b/.changes/1.7.0/Under the Hood-20230830-160616.yaml similarity index 100% rename from .changes/unreleased/Under the Hood-20230830-160616.yaml rename to .changes/1.7.0/Under the Hood-20230830-160616.yaml diff --git a/.changes/unreleased/Features-20230921-180958.yaml b/.changes/unreleased/Features-20230921-180958.yaml new file mode 100644 index 000000000..66141eb5f --- /dev/null +++ b/.changes/unreleased/Features-20230921-180958.yaml @@ -0,0 +1,6 @@ +kind: Features +body: Support storing test failures as views +time: 2023-09-21T18:09:58.174136-04:00 +custom: + Author: mikealfare + Issue: "6914" diff --git a/.changes/unreleased/Features-20231011-094718.yaml b/.changes/unreleased/Features-20231011-094718.yaml new file mode 100644 index 000000000..8503a70b8 --- /dev/null +++ b/.changes/unreleased/Features-20231011-094718.yaml @@ -0,0 +1,6 @@ +kind: Features +body: Create temporary views with 'or replace' +time: 2023-10-11T09:47:18.485764-07:00 +custom: + Author: annazizian + Issue: "350" diff --git a/CHANGELOG.md b/CHANGELOG.md index 583cdd61b..362976a1a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,35 @@ - "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version. - Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-spark/blob/main/CONTRIBUTING.md#adding-changelog-entry) +## dbt-spark 1.7.0-b2 - October 02, 2023 + +### Features + +- Persist Column level comments when creating views ([#372](https://github.com/dbt-labs/dbt-spark/issues/372)) + +### Under the Hood + +- Remove dependency on hologram ([#881](https://github.com/dbt-labs/dbt-spark/issues/881)) + +### Dependencies + +- Replace sasl with pure-sasl for PyHive ([#818](https://github.com/dbt-labs/dbt-spark/pull/818)) +- Update tox requirement from ~=4.8 to ~=4.9 ([#874](https://github.com/dbt-labs/dbt-spark/pull/874)) +- Bump mypy from 1.5.0 to 1.5.1 ([#875](https://github.com/dbt-labs/dbt-spark/pull/875)) +- Update tox requirement from ~=4.9 to ~=4.10 ([#879](https://github.com/dbt-labs/dbt-spark/pull/879)) +- Update pre-commit requirement from ~=3.3 to ~=3.4 ([#884](https://github.com/dbt-labs/dbt-spark/pull/884)) +- Update black requirement from ~=23.7 to ~=23.9 ([#886](https://github.com/dbt-labs/dbt-spark/pull/886)) +- Update tox requirement from ~=4.10 to ~=4.11 ([#887](https://github.com/dbt-labs/dbt-spark/pull/887)) + +### Security + +- Add docker image to the repo ([#876](https://github.com/dbt-labs/dbt-spark/pull/876)) + +### Contributors +- [@Fokko](https://github.com/Fokko) ([#876](https://github.com/dbt-labs/dbt-spark/pull/876)) +- [@jurasan](https://github.com/jurasan) ([#372](https://github.com/dbt-labs/dbt-spark/issues/372)) + + ## dbt-spark 1.7.0-b1 - August 17, 2023 ### Features @@ -53,7 +82,6 @@ - [@etheleon](https://github.com/etheleon) ([#865](https://github.com/dbt-labs/dbt-spark/issues/865)) - [@hanna-liashchuk](https://github.com/hanna-liashchuk) ([#387](https://github.com/dbt-labs/dbt-spark/issues/387)) - ## Previous Releases For information on prior major and minor releases, see their changelogs: - [1.6](https://github.com/dbt-labs/dbt-spark/blob/1.6.latest/CHANGELOG.md) diff --git a/Makefile b/Makefile index 876440a01..cc1d9f75d 100644 --- a/Makefile +++ b/Makefile @@ -9,6 +9,7 @@ dev: ## Installs adapter in develop mode along with development dependencies dev-uninstall: ## Uninstalls all packages while maintaining the virtual environment ## Useful when updating versions, or if you accidentally installed into the system interpreter pip freeze | grep -v "^-e" | cut -d "@" -f1 | xargs pip uninstall -y + pip uninstall -y dbt-spark .PHONY: mypy mypy: ## Runs mypy against staged changes for static type checking. diff --git a/README.md b/README.md index fa286b1f7..2d2586795 100644 --- a/README.md +++ b/README.md @@ -26,18 +26,20 @@ more information, consult [the docs](https://docs.getdbt.com/docs/profile-spark) ## Running locally A `docker-compose` environment starts a Spark Thrift server and a Postgres database as a Hive Metastore backend. -Note: dbt-spark now supports Spark 3.1.1 (formerly on Spark 2.x). +Note: dbt-spark now supports Spark 3.3.2. -The following command would start two docker containers -``` +The following command starts two docker containers: + +```sh docker-compose up -d ``` + It will take a bit of time for the instance to start, you can check the logs of the two containers. If the instance doesn't start correctly, try the complete reset command listed below and then try start again. Create a profile like this one: -``` +```yaml spark_testing: target: local outputs: @@ -60,7 +62,7 @@ Connecting to the local spark instance: Note that the Hive metastore data is persisted under `./.hive-metastore/`, and the Spark-produced data under `./.spark-warehouse/`. To completely reset you environment run the following: -``` +```sh docker-compose down rm -rf ./.hive-metastore/ rm -rf ./.spark-warehouse/ diff --git a/dbt/adapters/spark/__version__.py b/dbt/adapters/spark/__version__.py index 48607b01f..3f5d3c0b7 100644 --- a/dbt/adapters/spark/__version__.py +++ b/dbt/adapters/spark/__version__.py @@ -1 +1 @@ -version = "1.7.0b1" +version = "1.7.0b2" diff --git a/dbt/adapters/spark/impl.py b/dbt/adapters/spark/impl.py index 2864c4f30..feae34129 100644 --- a/dbt/adapters/spark/impl.py +++ b/dbt/adapters/spark/impl.py @@ -347,7 +347,9 @@ def _get_columns_for_catalog(self, relation: BaseRelation) -> Iterable[Dict[str, as_dict["table_database"] = None yield as_dict - def get_catalog(self, manifest: Manifest) -> Tuple[agate.Table, List[Exception]]: + def get_catalog( + self, manifest: Manifest, selected_nodes: Optional[Set] = None + ) -> Tuple[agate.Table, List[Exception]]: schema_map = self._get_catalog_schemas(manifest) if len(schema_map) > 1: raise dbt.exceptions.CompilationError( diff --git a/dbt/include/spark/macros/adapters.sql b/dbt/include/spark/macros/adapters.sql index 9e277dd68..bfc1f198d 100644 --- a/dbt/include/spark/macros/adapters.sql +++ b/dbt/include/spark/macros/adapters.sql @@ -138,7 +138,7 @@ {#-- We can't use temporary tables with `create ... as ()` syntax --#} {% macro spark__create_temporary_view(relation, compiled_code) -%} - create temporary view {{ relation }} as + create or replace temporary view {{ relation }} as {{ compiled_code }} {%- endmacro -%} diff --git a/setup.py b/setup.py index 2ac9e63c1..088e5f87d 100644 --- a/setup.py +++ b/setup.py @@ -49,7 +49,7 @@ def _get_dbt_core_version(): package_name = "dbt-spark" -package_version = "1.7.0b1" +package_version = "1.7.0b2" dbt_core_version = _get_dbt_core_version() description = """The Apache Spark adapter plugin for dbt""" diff --git a/tests/functional/adapter/store_test_failures_tests/test_store_test_failures.py b/tests/functional/adapter/test_store_test_failures.py similarity index 62% rename from tests/functional/adapter/store_test_failures_tests/test_store_test_failures.py rename to tests/functional/adapter/test_store_test_failures.py index c445fe671..e27cb9b95 100644 --- a/tests/functional/adapter/store_test_failures_tests/test_store_test_failures.py +++ b/tests/functional/adapter/test_store_test_failures.py @@ -1,5 +1,6 @@ import pytest +from dbt.tests.adapter.store_test_failures_tests import basic from dbt.tests.adapter.store_test_failures_tests.test_store_test_failures import ( StoreTestFailuresBase, TEST_AUDIT_SCHEMA_SUFFIX, @@ -42,3 +43,33 @@ def project_config_update(self): def test_store_and_assert_failure_with_delta(self, project): self.run_tests_store_one_failure(project) self.run_tests_store_failures_and_assert(project) + + +@pytest.mark.skip_profile("spark_session") +class TestStoreTestFailuresAsInteractions(basic.StoreTestFailuresAsInteractions): + pass + + +@pytest.mark.skip_profile("spark_session") +class TestStoreTestFailuresAsProjectLevelOff(basic.StoreTestFailuresAsProjectLevelOff): + pass + + +@pytest.mark.skip_profile("spark_session") +class TestStoreTestFailuresAsProjectLevelView(basic.StoreTestFailuresAsProjectLevelView): + pass + + +@pytest.mark.skip_profile("spark_session") +class TestStoreTestFailuresAsGeneric(basic.StoreTestFailuresAsGeneric): + pass + + +@pytest.mark.skip_profile("spark_session") +class TestStoreTestFailuresAsProjectLevelEphemeral(basic.StoreTestFailuresAsProjectLevelEphemeral): + pass + + +@pytest.mark.skip_profile("spark_session") +class TestStoreTestFailuresAsExceptions(basic.StoreTestFailuresAsExceptions): + pass