diff --git a/.changes/unreleased/Features-20230921-180958.yaml b/.changes/unreleased/Features-20230921-180958.yaml index fefb3da53..66141eb5f 100644 --- a/.changes/unreleased/Features-20230921-180958.yaml +++ b/.changes/unreleased/Features-20230921-180958.yaml @@ -1,5 +1,5 @@ kind: Features -body: Support test results as views +body: Support storing test failures as views time: 2023-09-21T18:09:58.174136-04:00 custom: Author: mikealfare diff --git a/tests/functional/adapter/test_persist_test_results.py b/tests/functional/adapter/store_test_failures_tests/test_store_test_failures_as_view.py similarity index 73% rename from tests/functional/adapter/test_persist_test_results.py rename to tests/functional/adapter/store_test_failures_tests/test_store_test_failures_as_view.py index 1ae94e1b2..d092c612e 100644 --- a/tests/functional/adapter/test_persist_test_results.py +++ b/tests/functional/adapter/store_test_failures_tests/test_store_test_failures_as_view.py @@ -4,25 +4,41 @@ import pytest from dbt.contracts.results import TestStatus -from dbt.tests.adapter.persist_test_results.basic import PersistTestResults +from dbt.tests.adapter.store_test_failures_tests.basic import StoreTestFailures from dbt.tests.util import run_dbt, check_relation_types @pytest.mark.skip_profile("spark_session", "apache_spark") -class TestPersistTestResultsDatabricks(PersistTestResults): +class TestStoreTestFailuresDatabricks(StoreTestFailures): + """ + Databricks works as expected. This tests all Databricks profiles as they are not skipped above. + """ + pass @pytest.mark.skip_profile("spark_session", "databricks_cluster", "databricks_sql_endpoint") -class TestPersistTestResultsSpark(PersistTestResults): +class TestStoreTestFailuresSpark(StoreTestFailures): + """ + This is the same set of test cases as the test class above; it's the same subclass. + + Using "DELETE FROM" with Spark throws the following error: + dbt.exceptions.DbtDatabaseError: Database Error + org.apache.hive.service.cli.HiveSQLException: + Error running query: org.apache.spark.sql.AnalysisException: + DELETE is only supported with v2 tables. + + As a result, this class overrides `self.delete_record` to do nothing and then overrides the test + only to skip updating the expected changes to reflect the absence of a delete. + + This should be updated in the future: + - `self.delete_record` should be updated to properly delete the record by replacing the data frame + with a filtered dataframe + - the test case should be removed from here; it should not need to be altered once `self.delete_record` + is updated correctly + """ + def delete_record(self, project, record: Dict[str, str]): - """ - Using "DELETE FROM" with Spark throws the following error: - dbt.exceptions.DbtDatabaseError: Database Error - org.apache.hive.service.cli.HiveSQLException: - Error running query: org.apache.spark.sql.AnalysisException: - DELETE is only supported with v2 tables. - """ pass def test_tests_run_successfully_and_are_persisted_correctly(self, project):