From 7b95356d58e21a492765dcc5f175817f09f19417 Mon Sep 17 00:00:00 2001 From: Mike Alfare Date: Mon, 9 Dec 2024 14:50:14 -0500 Subject: [PATCH] update dagger script for hatch --- dagger/run_dbt_spark_tests.py | 30 +++--------------------------- hatch.toml | 1 + 2 files changed, 4 insertions(+), 27 deletions(-) diff --git a/dagger/run_dbt_spark_tests.py b/dagger/run_dbt_spark_tests.py index 6c310a6f8..611f48692 100644 --- a/dagger/run_dbt_spark_tests.py +++ b/dagger/run_dbt_spark_tests.py @@ -94,11 +94,9 @@ async def test_spark(test_args): pip_cache = client.cache_volume("pip") # setup directories as we don't want to copy the whole repo into the container - req_files = client.host().directory( - "./", include=["*.txt", "*.env", "*.ini", "*.md", "setup.py"] + client.host().directory( + "./", include=["*.env", "hatch.toml", "pyproject.toml", "./dbt", "./tests"] ) - dbt_spark_dir = client.host().directory("./dbt") - test_dir = client.host().directory("./tests") scripts = client.host().directory("./dagger/scripts") platform = dagger.Platform("linux/amd64") @@ -110,27 +108,6 @@ async def test_spark(test_args): # install OS deps first so any local changes don't invalidate the cache .with_directory("/scripts", scripts) .with_exec(["./scripts/install_os_reqs.sh"]) - # install dbt-spark + python deps - .with_directory("/src", req_files) - .with_exec(["pip", "install", "-U", "pip"]) - .with_workdir("/src") - .with_exec(["pip", "install", "-r", "requirements.txt"]) - .with_exec(["pip", "install", "-r", "dev-requirements.txt"]) - ) - - # install local dbt-spark changes - tst_container = ( - tst_container.with_workdir("/") - .with_directory("src/dbt", dbt_spark_dir) - .with_workdir("/src") - .with_exec(["pip", "install", "-e", "."]) - ) - - # install local test changes - tst_container = ( - tst_container.with_workdir("/") - .with_directory("src/tests", test_dir) - .with_workdir("/src") ) if test_profile == "apache_spark": @@ -145,13 +122,12 @@ async def test_spark(test_args): ) elif test_profile == "spark_session": - tst_container = tst_container.with_exec(["pip", "install", "pyspark"]) tst_container = tst_container.with_exec(["apt-get", "install", "openjdk-17-jre", "-y"]) tst_container = tst_container.with_(env_variables(TESTING_ENV_VARS)) test_path = test_args.test_path if test_args.test_path else "tests/functional/adapter" result = await tst_container.with_exec( - ["pytest", "-v", "--profile", test_profile, "-n", "auto", test_path] + ["hatch", "run", "pytest", "-v", "--profile", test_profile, "-n", "auto", test_path] ).stdout() return result diff --git a/hatch.toml b/hatch.toml index a255a6d0a..8fec666b8 100644 --- a/hatch.toml +++ b/hatch.toml @@ -13,6 +13,7 @@ dependencies = [ "dbt-common @ git+https://github.com/dbt-labs/dbt-common.git", "dbt-tests-adapter @ git+https://github.com/dbt-labs/dbt-adapters.git#subdirectory=dbt-tests-adapter", "dbt-core @ git+https://github.com/dbt-labs/dbt-core.git#subdirectory=core", + "anyio", "beartype<0.18.0", "dagger-io~=0.9.7", "ddtrace==2.3.0",