From a4f100472a31f6d151057314098cca8373dfbddc Mon Sep 17 00:00:00 2001 From: Pankaj Koti Date: Fri, 10 May 2024 19:31:32 +0530 Subject: [PATCH 01/13] Correct stale `root_path` in partial parse file With the introduction of enabling partial parse in PR #904, upon testing the implementation, it is observed that the seeds files were not been able to be located as the partial parse file contained a stale `root_path` from previous command runs. This PR attempts to correct the `root_path` in the partial parse file by replacing it with the needed project directory where the project files are located. closes: #937 --- cosmos/cache.py | 15 +++++++++++++++ pyproject.toml | 9 +++++---- scripts/test/integration-dbt-1-5-4.sh | 10 ++++++++++ 3 files changed, 30 insertions(+), 4 deletions(-) create mode 100644 scripts/test/integration-dbt-1-5-4.sh diff --git a/cosmos/cache.py b/cosmos/cache.py index 3c2086c7a..cb2345f92 100644 --- a/cosmos/cache.py +++ b/cosmos/cache.py @@ -3,6 +3,7 @@ import shutil from pathlib import Path +import msgpack from airflow.models.dag import DAG from airflow.utils.task_group import TaskGroup @@ -121,4 +122,18 @@ def _copy_partial_parse_to_project(partial_parse_filepath: Path, project_path: P source_manifest_filepath = partial_parse_filepath.parent / DBT_MANIFEST_FILE_NAME target_manifest_filepath = target_partial_parse_file.parent / DBT_MANIFEST_FILE_NAME shutil.copy(str(partial_parse_filepath), str(target_partial_parse_file)) + + # Update root_path in partial parse file to point to the target project directory. This is necessary because in some + # earlier versions of dbt (e.g. 1.5.4), the root_path was hardcoded to a stale directory and is not updated to the + # needed target directory. This seems to have been resolved in later versions of dbt, but we still need to handle + # this for compatibility with older versions. + with target_partial_parse_file.open("rb") as f: + data = msgpack.unpack(f) + for node in data["nodes"].values(): + if node.get("root_path"): + node["root_path"] = str(project_path) + with target_partial_parse_file.open("wb") as f: + packed = msgpack.packb(data) + f.write(packed) + shutil.copy(str(source_manifest_filepath), str(target_manifest_filepath)) diff --git a/pyproject.toml b/pyproject.toml index 5f0e5ee0e..4d5a220c8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -141,16 +141,17 @@ matrix.airflow.dependencies = [ [tool.hatch.envs.tests.scripts] freeze = "pip freeze" -type-check = "mypy cosmos" test = 'sh scripts/test/unit.sh' test-cov = 'sh scripts/test/unit-cov.sh' -test-integration-setup = 'sh scripts/test/integration-setup.sh' test-integration = 'sh scripts/test/integration.sh' +test-integration-dbt-1-5-4 = 'sh scripts/test/integration-dbt-1-5-4.sh' test-integration-expensive = 'sh scripts/test/integration-expensive.sh' -test-integration-sqlite-setup = 'sh scripts/test/integration-sqlite-setup.sh' +test-integration-setup = 'sh scripts/test/integration-setup.sh' test-integration-sqlite = 'sh scripts/test/integration-sqlite.sh' -test-performance-setup = 'sh scripts/test/performance-setup.sh' +test-integration-sqlite-setup = 'sh scripts/test/integration-sqlite-setup.sh' test-performance = 'sh scripts/test/performance.sh' +test-performance-setup = 'sh scripts/test/performance-setup.sh' +type-check = "mypy cosmos" [tool.pytest.ini_options] filterwarnings = ["ignore::DeprecationWarning"] diff --git a/scripts/test/integration-dbt-1-5-4.sh b/scripts/test/integration-dbt-1-5-4.sh new file mode 100644 index 000000000..04597bd18 --- /dev/null +++ b/scripts/test/integration-dbt-1-5-4.sh @@ -0,0 +1,10 @@ +pip uninstall dbt-adapters dbt-common dbt-core dbt-extractor dbt-postgres dbt-semantic-interfaces -y +pip install dbt-postgres==1.5.4 +pytest -vv \ + --cov=cosmos \ + --cov-report=term-missing \ + --cov-report=xml \ + --durations=0 \ + -m integration \ + --ignore=tests/perf \ + -k 'basic_cosmos_task_group' From 57b13cc5a14a4b7105688f0e9128de815084f8a1 Mon Sep 17 00:00:00 2001 From: Pankaj Koti Date: Fri, 10 May 2024 19:37:21 +0530 Subject: [PATCH 02/13] Update cosmos/cache.py --- cosmos/cache.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cosmos/cache.py b/cosmos/cache.py index cb2345f92..d8ae06b4b 100644 --- a/cosmos/cache.py +++ b/cosmos/cache.py @@ -123,7 +123,7 @@ def _copy_partial_parse_to_project(partial_parse_filepath: Path, project_path: P target_manifest_filepath = target_partial_parse_file.parent / DBT_MANIFEST_FILE_NAME shutil.copy(str(partial_parse_filepath), str(target_partial_parse_file)) - # Update root_path in partial parse file to point to the target project directory. This is necessary because in some + # Update root_path in partial parse file to point to the needed project directory. This is necessary because in some # earlier versions of dbt (e.g. 1.5.4), the root_path was hardcoded to a stale directory and is not updated to the # needed target directory. This seems to have been resolved in later versions of dbt, but we still need to handle # this for compatibility with older versions. From c3ea4aa7f0de0ac64d5be33d424fc9007be23ebd Mon Sep 17 00:00:00 2001 From: Pankaj Koti Date: Fri, 10 May 2024 19:37:40 +0530 Subject: [PATCH 03/13] Update cosmos/cache.py --- cosmos/cache.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cosmos/cache.py b/cosmos/cache.py index d8ae06b4b..7bbe1ba10 100644 --- a/cosmos/cache.py +++ b/cosmos/cache.py @@ -125,7 +125,7 @@ def _copy_partial_parse_to_project(partial_parse_filepath: Path, project_path: P # Update root_path in partial parse file to point to the needed project directory. This is necessary because in some # earlier versions of dbt (e.g. 1.5.4), the root_path was hardcoded to a stale directory and is not updated to the - # needed target directory. This seems to have been resolved in later versions of dbt, but we still need to handle + # needed project directory. This seems to have been resolved in later versions of dbt, but we still need to handle # this for compatibility with older versions. with target_partial_parse_file.open("rb") as f: data = msgpack.unpack(f) From b4f6466996eb37ffc731fa9f91ba46a566d19c94 Mon Sep 17 00:00:00 2001 From: Pankaj Koti Date: Fri, 10 May 2024 19:53:47 +0530 Subject: [PATCH 04/13] Run integration test for dbt-1.5.4 in CI and add msgpack as a dependency --- .github/workflows/test.yml | 61 ++++++++++++++++++++++++++++++++++++++ pyproject.toml | 1 + 2 files changed, 62 insertions(+) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index f6e3701a8..96c98ce81 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -292,6 +292,67 @@ jobs: AIRFLOW_CONN_AIRFLOW_DB: postgres://postgres:postgres@0.0.0.0:5432/postgres PYTHONPATH: /home/runner/work/astronomer-cosmos/astronomer-cosmos/:$PYTHONPATH + Run-Integration-Tests-DBT-1-5-4: + needs: Authorize + runs-on: ubuntu-latest + strategy: + matrix: + python-version: [ "3.11" ] + airflow-version: [ "2.7" ] + + steps: + - uses: actions/checkout@v3 + with: + ref: ${{ github.event.pull_request.head.sha || github.ref }} + - uses: actions/cache@v3 + with: + path: | + ~/.cache/pip + .nox + key: integration-dbt-1-5-4-${{ runner.os }}-${{ matrix.python-version }}-${{ matrix.airflow-version }}-${{ hashFiles('pyproject.toml') }}-${{ hashFiles('cosmos/__init__.py') }} + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Install packages and dependencies + run: | + python -m pip install hatch + hatch -e tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }} run pip freeze + + - name: Test Cosmos against Airflow ${{ matrix.airflow-version }}, Python ${{ matrix.python-version }} and dbt 1.5.4 + run: | + hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-integration-dbt-1-5-4 + env: + AIRFLOW_HOME: /home/runner/work/astronomer-cosmos/astronomer-cosmos/ + AIRFLOW_CONN_AIRFLOW_DB: postgres://postgres:postgres@0.0.0.0:5432/postgres + AIRFLOW__CORE__DAGBAG_IMPORT_TIMEOUT: 90.0 + PYTHONPATH: /home/runner/work/astronomer-cosmos/astronomer-cosmos/:$PYTHONPATH + AIRFLOW_CONN_DATABRICKS_DEFAULT: ${{ secrets.AIRFLOW_CONN_DATABRICKS_DEFAULT }} + DATABRICKS_HOST: ${{ secrets.DATABRICKS_HOST }} + DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }} + DATABRICKS_WAREHOUSE_ID: ${{ secrets.DATABRICKS_WAREHOUSE_ID }} + DATABRICKS_CLUSTER_ID: ${{ secrets.DATABRICKS_CLUSTER_ID }} + COSMOS_CONN_POSTGRES_PASSWORD: ${{ secrets.COSMOS_CONN_POSTGRES_PASSWORD }} + POSTGRES_HOST: localhost + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + POSTGRES_DB: postgres + POSTGRES_SCHEMA: public + POSTGRES_PORT: 5432 + + - name: Upload coverage to Github + uses: actions/upload-artifact@v2 + with: + name: coverage-integration-dbt-1-5-4-test-${{ matrix.python-version }}-${{ matrix.airflow-version }} + path: .coverage + + env: + AIRFLOW_HOME: /home/runner/work/astronomer-cosmos/astronomer-cosmos/ + AIRFLOW_CONN_AIRFLOW_DB: postgres://postgres:postgres@0.0.0.0:5432/postgres + PYTHONPATH: /home/runner/work/astronomer-cosmos/astronomer-cosmos/:$PYTHONPATH + Run-Performance-Tests: needs: Authorize runs-on: ubuntu-latest diff --git a/pyproject.toml b/pyproject.toml index 4d5a220c8..5690100d8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -32,6 +32,7 @@ dependencies = [ "apache-airflow>=2.3.0", "importlib-metadata; python_version < '3.8'", "Jinja2>=3.0.0", + "msgpack", "pydantic>=1.10.0", "typing-extensions; python_version < '3.8'", "virtualenv", From 3249e3ec459052f08c9ac8bd154f4328ea7f4660 Mon Sep 17 00:00:00 2001 From: Pankaj Koti Date: Fri, 10 May 2024 20:30:14 +0530 Subject: [PATCH 05/13] Add msgpack as a dependency for hatch docs env --- pyproject.toml | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 5690100d8..f740f2071 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -166,13 +166,14 @@ markers = ["integration", "sqlite", "perf"] [tool.hatch.envs.docs] dependencies = [ "aenum", - "sphinx", - "pydata-sphinx-theme", - "sphinx-autobuild", - "sphinx-autoapi", - "openlineage-airflow", "apache-airflow-providers-cncf-kubernetes>=5.1.1", + "msgpack", + "openlineage-airflow", "pydantic>=1.10.0", + "pydata-sphinx-theme", + "sphinx", + "sphinx-autoapi", + "sphinx-autobuild", ] [tool.hatch.envs.docs.scripts] From a680458ed22ea4eb36d1eef32e21222e493cb8ea Mon Sep 17 00:00:00 2001 From: Pankaj Koti Date: Fri, 10 May 2024 20:38:59 +0530 Subject: [PATCH 06/13] Add current branch to test.yml to test the integration test on dbt 1.5.4 --- .github/workflows/test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 96c98ce81..63d3abade 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -2,7 +2,7 @@ name: test on: push: # Run on pushes to the default branch - branches: [main] + branches: [main, correct-project-dir-partial-parse-file] pull_request_target: # Also run on pull requests originated from forks branches: [main] From a38881f57470a1e0645a82415f0822b0e088490d Mon Sep 17 00:00:00 2001 From: Pankaj Koti Date: Fri, 10 May 2024 21:14:02 +0530 Subject: [PATCH 07/13] Address review comments and install dbt-databricks in integration test runner --- cosmos/cache.py | 11 +++++++---- scripts/test/integration-dbt-1-5-4.sh | 2 +- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/cosmos/cache.py b/cosmos/cache.py index 7bbe1ba10..7d136a127 100644 --- a/cosmos/cache.py +++ b/cosmos/cache.py @@ -123,10 +123,13 @@ def _copy_partial_parse_to_project(partial_parse_filepath: Path, project_path: P target_manifest_filepath = target_partial_parse_file.parent / DBT_MANIFEST_FILE_NAME shutil.copy(str(partial_parse_filepath), str(target_partial_parse_file)) - # Update root_path in partial parse file to point to the needed project directory. This is necessary because in some - # earlier versions of dbt (e.g. 1.5.4), the root_path was hardcoded to a stale directory and is not updated to the - # needed project directory. This seems to have been resolved in later versions of dbt, but we still need to handle - # this for compatibility with older versions. + # Update root_path in partial parse file to point to the needed project directory. This is necessary because + # an issue is observed where on specific earlier versions of dbt-core like 1.5.4 and 1.6.5, the commands fail to + # locate project files as they are pointed to a stale directory by the root_path in the partial parse file. + # This issue was not observed on recent versions of dbt-core 1.5.8, 1.6.6, 1.7.0 and 1.8.0 as tested on. + # It is suspected that PR dbt-labs/dbt-core#8762 is likely the fix and the fix appears to be backported to later + # version releases of 1.5.x and 1.6.x. However, the below modification is applied to ensure that the root_path is + # correctly set to the needed project directory and the feature is compatible across all dbt-core versions. with target_partial_parse_file.open("rb") as f: data = msgpack.unpack(f) for node in data["nodes"].values(): diff --git a/scripts/test/integration-dbt-1-5-4.sh b/scripts/test/integration-dbt-1-5-4.sh index 04597bd18..d50013224 100644 --- a/scripts/test/integration-dbt-1-5-4.sh +++ b/scripts/test/integration-dbt-1-5-4.sh @@ -1,5 +1,5 @@ pip uninstall dbt-adapters dbt-common dbt-core dbt-extractor dbt-postgres dbt-semantic-interfaces -y -pip install dbt-postgres==1.5.4 +pip install dbt-postgres==1.5.4 dbt-databricks==1.5.4 pytest -vv \ --cov=cosmos \ --cov-report=term-missing \ From 8893d1de7778626b5c2fcdfdd5bf7f9de5e30d7b Mon Sep 17 00:00:00 2001 From: Pankaj Koti Date: Fri, 10 May 2024 21:21:46 +0530 Subject: [PATCH 08/13] Add msgpack as a dependency to docs/requirements.txt --- docs/requirements.txt | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/docs/requirements.txt b/docs/requirements.txt index 430993ff8..81a7084e4 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,10 +1,11 @@ -google-re2==1.1 aenum -sphinx -pydata-sphinx-theme -sphinx-autobuild -sphinx-autoapi apache-airflow apache-airflow-providers-cncf-kubernetes>=5.1.1 +google-re2==1.1 +msgpack openlineage-airflow pydantic +pydata-sphinx-theme +sphinx +sphinx-autoapi +sphinx-autobuild From fb4f993593535140dd82b4a6be23184ad423ad1c Mon Sep 17 00:00:00 2001 From: Pankaj Koti Date: Fri, 10 May 2024 21:23:29 +0530 Subject: [PATCH 09/13] Temporarily disable all tests in CI except for dbt 1.5.4 integration test --- .github/workflows/test.yml | 678 ++++++++++++++++++------------------- 1 file changed, 339 insertions(+), 339 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 63d3abade..6b4aef553 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -19,278 +19,278 @@ jobs: steps: - run: true - Type-Check: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - with: - ref: ${{ github.event.pull_request.head.sha || github.ref }} - - - uses: actions/setup-python@v3 - with: - python-version: "3.9" - architecture: "x64" - - - run: pip3 install hatch - - run: hatch run tests.py3.9-2.7:type-check - - Run-Unit-Tests: - runs-on: ubuntu-latest - strategy: - matrix: - python-version: ["3.8", "3.9", "3.10", "3.11"] - airflow-version: ["2.3", "2.4", "2.5", "2.6", "2.7", "2.8", "2.9"] - exclude: - - python-version: "3.11" - airflow-version: "2.3" - - python-version: "3.11" - airflow-version: "2.4" - steps: - - uses: actions/checkout@v3 - with: - ref: ${{ github.event.pull_request.head.sha || github.ref }} - - - uses: actions/cache@v3 - with: - path: | - ~/.cache/pip - .nox - key: unit-${{ runner.os }}-${{ matrix.python-version }}-${{ matrix.airflow-version }}-${{ hashFiles('pyproject.toml') }}-${{ hashFiles('cosmos/__init__.py') }} - - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python-version }} - - - name: Install packages and dependencies - run: | - python -m pip install hatch - hatch -e tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }} run pip freeze - - - name: Test Cosmos against Airflow ${{ matrix.airflow-version }} and Python ${{ matrix.python-version }} - run: | - hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-cov - - - name: Upload coverage to Github - uses: actions/upload-artifact@v2 - with: - name: coverage-unit-test-${{ matrix.python-version }}-${{ matrix.airflow-version }} - path: .coverage - - Run-Integration-Tests: - needs: Authorize - runs-on: ubuntu-latest - strategy: - matrix: - python-version: ["3.8", "3.9", "3.10", "3.11"] - airflow-version: ["2.3", "2.4", "2.5", "2.6", "2.7", "2.8", "2.9"] - exclude: - - python-version: "3.11" - airflow-version: "2.3" - - python-version: "3.11" - airflow-version: "2.4" - services: - postgres: - image: postgres - env: - POSTGRES_PASSWORD: postgres - options: >- - --health-cmd pg_isready - --health-interval 10s - --health-timeout 5s - --health-retries 5 - ports: - - 5432:5432 - steps: - - uses: actions/checkout@v3 - with: - ref: ${{ github.event.pull_request.head.sha || github.ref }} - - uses: actions/cache@v3 - with: - path: | - ~/.cache/pip - .nox - key: integration-${{ runner.os }}-${{ matrix.python-version }}-${{ matrix.airflow-version }}-${{ hashFiles('pyproject.toml') }}-${{ hashFiles('cosmos/__init__.py') }} - - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python-version }} - - - name: Install packages and dependencies - run: | - python -m pip install hatch - hatch -e tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }} run pip freeze - - - name: Test Cosmos against Airflow ${{ matrix.airflow-version }} and Python ${{ matrix.python-version }} - run: | - hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-integration-setup - DATABRICKS_UNIQUE_ID="${{github.run_id}}_${{matrix.python-version}}_${{ matrix.airflow-version }}" hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-integration - env: - AIRFLOW_HOME: /home/runner/work/astronomer-cosmos/astronomer-cosmos/ - AIRFLOW_CONN_AIRFLOW_DB: postgres://postgres:postgres@0.0.0.0:5432/postgres - AIRFLOW_CONN_DATABRICKS_DEFAULT: ${{ secrets.AIRFLOW_CONN_DATABRICKS_DEFAULT }} - AIRFLOW__CORE__DAGBAG_IMPORT_TIMEOUT: 90.0 - PYTHONPATH: /home/runner/work/astronomer-cosmos/astronomer-cosmos/:$PYTHONPATH - DATABRICKS_HOST: ${{ secrets.DATABRICKS_HOST }} - DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }} - DATABRICKS_WAREHOUSE_ID: ${{ secrets.DATABRICKS_WAREHOUSE_ID }} - DATABRICKS_CLUSTER_ID: ${{ secrets.DATABRICKS_CLUSTER_ID }} - COSMOS_CONN_POSTGRES_PASSWORD: ${{ secrets.COSMOS_CONN_POSTGRES_PASSWORD }} - POSTGRES_HOST: localhost - POSTGRES_USER: postgres - POSTGRES_PASSWORD: postgres - POSTGRES_DB: postgres - POSTGRES_SCHEMA: public - POSTGRES_PORT: 5432 - - - name: Upload coverage to Github - uses: actions/upload-artifact@v2 - with: - name: coverage-integration-test-${{ matrix.python-version }}-${{ matrix.airflow-version }} - path: .coverage - - Run-Integration-Tests-Expensive: - needs: Authorize - runs-on: ubuntu-latest - strategy: - matrix: - python-version: ["3.11"] - airflow-version: ["2.6"] - - services: - postgres: - image: postgres - env: - POSTGRES_PASSWORD: postgres - options: >- - --health-cmd pg_isready - --health-interval 10s - --health-timeout 5s - --health-retries 5 - ports: - - 5432:5432 - - steps: - - uses: actions/checkout@v3 - with: - ref: ${{ github.event.pull_request.head.sha || github.ref }} - - uses: actions/cache@v3 - with: - path: | - ~/.cache/pip - .nox - key: integration-expensive-${{ runner.os }}-${{ matrix.python-version }}-${{ matrix.airflow-version }}-${{ hashFiles('pyproject.toml') }}-${{ hashFiles('cosmos/__init__.py') }} - - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python-version }} - - - name: Install packages and dependencies - run: | - python -m pip install hatch - hatch -e tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }} run pip freeze - - - name: Test Cosmos against Airflow ${{ matrix.airflow-version }} and Python ${{ matrix.python-version }} - run: | - hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-integration-setup - DATABRICKS_UNIQUE_ID="${{github.run_id}}" hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-integration-expensive - env: - AIRFLOW_HOME: /home/runner/work/astronomer-cosmos/astronomer-cosmos/ - AIRFLOW_CONN_AIRFLOW_DB: postgres://postgres:postgres@0.0.0.0:5432/postgres - PYTHONPATH: /home/runner/work/astronomer-cosmos/astronomer-cosmos/:$PYTHONPATH - AIRFLOW_CONN_DATABRICKS_DEFAULT: ${{ secrets.AIRFLOW_CONN_DATABRICKS_DEFAULT }} - AIRFLOW__CORE__DAGBAG_IMPORT_TIMEOUT: 90.0 - DATABRICKS_HOST: ${{ secrets.DATABRICKS_HOST }} - DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }} - DATABRICKS_WAREHOUSE_ID: ${{ secrets.DATABRICKS_WAREHOUSE_ID }} - DATABRICKS_CLUSTER_ID: ${{ secrets.DATABRICKS_CLUSTER_ID }} - COSMOS_CONN_POSTGRES_PASSWORD: ${{ secrets.COSMOS_CONN_POSTGRES_PASSWORD }} - POSTGRES_HOST: localhost - POSTGRES_USER: postgres - POSTGRES_PASSWORD: postgres - POSTGRES_DB: postgres - POSTGRES_SCHEMA: public - POSTGRES_PORT: 5432 - - - name: Upload coverage to Github - uses: actions/upload-artifact@v2 - with: - name: coverage-integration-expensive-test-${{ matrix.python-version }}-${{ matrix.airflow-version }} - path: .coverage - - env: - AIRFLOW_HOME: /home/runner/work/astronomer-cosmos/astronomer-cosmos/ - AIRFLOW_CONN_AIRFLOW_DB: postgres://postgres:postgres@0.0.0.0:5432/postgres - PYTHONPATH: /home/runner/work/astronomer-cosmos/astronomer-cosmos/:$PYTHONPATH - AIRFLOW_CONN_DATABRICKS_DEFAULT: ${{ secrets.AIRFLOW_CONN_DATABRICKS_DEFAULT }} - DATABRICKS_HOST: ${{ secrets.DATABRICKS_HOST }} - DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }} - DATABRICKS_WAREHOUSE_ID: ${{ secrets.DATABRICKS_WAREHOUSE_ID }} - DATABRICKS_CLUSTER_ID: ${{ secrets.DATABRICKS_CLUSTER_ID }} - - Run-Integration-Tests-Sqlite: - needs: Authorize - runs-on: ubuntu-latest - strategy: - matrix: - python-version: ["3.11"] - airflow-version: ["2.7"] - - steps: - - uses: actions/checkout@v3 - with: - ref: ${{ github.event.pull_request.head.sha || github.ref }} - - uses: actions/cache@v3 - with: - path: | - ~/.cache/pip - .nox - key: integration-sqlite-${{ runner.os }}-${{ matrix.python-version }}-${{ matrix.airflow-version }}-${{ hashFiles('pyproject.toml') }}-${{ hashFiles('cosmos/__init__.py') }} - - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python-version }} - - - name: Install packages and dependencies - run: | - python -m pip install hatch - hatch -e tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }} run pip freeze - - - name: Test Cosmos against Airflow ${{ matrix.airflow-version }} and Python ${{ matrix.python-version }} - run: | - hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-integration-sqlite-setup - hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-integration-sqlite - env: - AIRFLOW_HOME: /home/runner/work/astronomer-cosmos/astronomer-cosmos/ - AIRFLOW_CONN_AIRFLOW_DB: postgres://postgres:postgres@0.0.0.0:5432/postgres - AIRFLOW__CORE__DAGBAG_IMPORT_TIMEOUT: 90.0 - PYTHONPATH: /home/runner/work/astronomer-cosmos/astronomer-cosmos/:$PYTHONPATH - AIRFLOW_CONN_DATABRICKS_DEFAULT: ${{ secrets.AIRFLOW_CONN_DATABRICKS_DEFAULT }} - DATABRICKS_HOST: ${{ secrets.DATABRICKS_HOST }} - DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }} - DATABRICKS_WAREHOUSE_ID: ${{ secrets.DATABRICKS_WAREHOUSE_ID }} - DATABRICKS_CLUSTER_ID: ${{ secrets.DATABRICKS_CLUSTER_ID }} - COSMOS_CONN_POSTGRES_PASSWORD: ${{ secrets.COSMOS_CONN_POSTGRES_PASSWORD }} - POSTGRES_HOST: localhost - POSTGRES_USER: postgres - POSTGRES_PASSWORD: postgres - POSTGRES_DB: postgres - POSTGRES_SCHEMA: public - POSTGRES_PORT: 5432 - - - name: Upload coverage to Github - uses: actions/upload-artifact@v2 - with: - name: coverage-integration-sqlite-test-${{ matrix.python-version }}-${{ matrix.airflow-version }} - path: .coverage - - env: - AIRFLOW_HOME: /home/runner/work/astronomer-cosmos/astronomer-cosmos/ - AIRFLOW_CONN_AIRFLOW_DB: postgres://postgres:postgres@0.0.0.0:5432/postgres - PYTHONPATH: /home/runner/work/astronomer-cosmos/astronomer-cosmos/:$PYTHONPATH +# Type-Check: +# runs-on: ubuntu-latest +# steps: +# - uses: actions/checkout@v3 +# with: +# ref: ${{ github.event.pull_request.head.sha || github.ref }} +# +# - uses: actions/setup-python@v3 +# with: +# python-version: "3.9" +# architecture: "x64" +# +# - run: pip3 install hatch +# - run: hatch run tests.py3.9-2.7:type-check +# +# Run-Unit-Tests: +# runs-on: ubuntu-latest +# strategy: +# matrix: +# python-version: ["3.8", "3.9", "3.10", "3.11"] +# airflow-version: ["2.3", "2.4", "2.5", "2.6", "2.7", "2.8", "2.9"] +# exclude: +# - python-version: "3.11" +# airflow-version: "2.3" +# - python-version: "3.11" +# airflow-version: "2.4" +# steps: +# - uses: actions/checkout@v3 +# with: +# ref: ${{ github.event.pull_request.head.sha || github.ref }} +# +# - uses: actions/cache@v3 +# with: +# path: | +# ~/.cache/pip +# .nox +# key: unit-${{ runner.os }}-${{ matrix.python-version }}-${{ matrix.airflow-version }}-${{ hashFiles('pyproject.toml') }}-${{ hashFiles('cosmos/__init__.py') }} +# +# - name: Set up Python ${{ matrix.python-version }} +# uses: actions/setup-python@v4 +# with: +# python-version: ${{ matrix.python-version }} +# +# - name: Install packages and dependencies +# run: | +# python -m pip install hatch +# hatch -e tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }} run pip freeze +# +# - name: Test Cosmos against Airflow ${{ matrix.airflow-version }} and Python ${{ matrix.python-version }} +# run: | +# hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-cov +# +# - name: Upload coverage to Github +# uses: actions/upload-artifact@v2 +# with: +# name: coverage-unit-test-${{ matrix.python-version }}-${{ matrix.airflow-version }} +# path: .coverage +# +# Run-Integration-Tests: +# needs: Authorize +# runs-on: ubuntu-latest +# strategy: +# matrix: +# python-version: ["3.8", "3.9", "3.10", "3.11"] +# airflow-version: ["2.3", "2.4", "2.5", "2.6", "2.7", "2.8", "2.9"] +# exclude: +# - python-version: "3.11" +# airflow-version: "2.3" +# - python-version: "3.11" +# airflow-version: "2.4" +# services: +# postgres: +# image: postgres +# env: +# POSTGRES_PASSWORD: postgres +# options: >- +# --health-cmd pg_isready +# --health-interval 10s +# --health-timeout 5s +# --health-retries 5 +# ports: +# - 5432:5432 +# steps: +# - uses: actions/checkout@v3 +# with: +# ref: ${{ github.event.pull_request.head.sha || github.ref }} +# - uses: actions/cache@v3 +# with: +# path: | +# ~/.cache/pip +# .nox +# key: integration-${{ runner.os }}-${{ matrix.python-version }}-${{ matrix.airflow-version }}-${{ hashFiles('pyproject.toml') }}-${{ hashFiles('cosmos/__init__.py') }} +# +# - name: Set up Python ${{ matrix.python-version }} +# uses: actions/setup-python@v4 +# with: +# python-version: ${{ matrix.python-version }} +# +# - name: Install packages and dependencies +# run: | +# python -m pip install hatch +# hatch -e tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }} run pip freeze +# +# - name: Test Cosmos against Airflow ${{ matrix.airflow-version }} and Python ${{ matrix.python-version }} +# run: | +# hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-integration-setup +# DATABRICKS_UNIQUE_ID="${{github.run_id}}_${{matrix.python-version}}_${{ matrix.airflow-version }}" hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-integration +# env: +# AIRFLOW_HOME: /home/runner/work/astronomer-cosmos/astronomer-cosmos/ +# AIRFLOW_CONN_AIRFLOW_DB: postgres://postgres:postgres@0.0.0.0:5432/postgres +# AIRFLOW_CONN_DATABRICKS_DEFAULT: ${{ secrets.AIRFLOW_CONN_DATABRICKS_DEFAULT }} +# AIRFLOW__CORE__DAGBAG_IMPORT_TIMEOUT: 90.0 +# PYTHONPATH: /home/runner/work/astronomer-cosmos/astronomer-cosmos/:$PYTHONPATH +# DATABRICKS_HOST: ${{ secrets.DATABRICKS_HOST }} +# DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }} +# DATABRICKS_WAREHOUSE_ID: ${{ secrets.DATABRICKS_WAREHOUSE_ID }} +# DATABRICKS_CLUSTER_ID: ${{ secrets.DATABRICKS_CLUSTER_ID }} +# COSMOS_CONN_POSTGRES_PASSWORD: ${{ secrets.COSMOS_CONN_POSTGRES_PASSWORD }} +# POSTGRES_HOST: localhost +# POSTGRES_USER: postgres +# POSTGRES_PASSWORD: postgres +# POSTGRES_DB: postgres +# POSTGRES_SCHEMA: public +# POSTGRES_PORT: 5432 +# +# - name: Upload coverage to Github +# uses: actions/upload-artifact@v2 +# with: +# name: coverage-integration-test-${{ matrix.python-version }}-${{ matrix.airflow-version }} +# path: .coverage +# +# Run-Integration-Tests-Expensive: +# needs: Authorize +# runs-on: ubuntu-latest +# strategy: +# matrix: +# python-version: ["3.11"] +# airflow-version: ["2.6"] +# +# services: +# postgres: +# image: postgres +# env: +# POSTGRES_PASSWORD: postgres +# options: >- +# --health-cmd pg_isready +# --health-interval 10s +# --health-timeout 5s +# --health-retries 5 +# ports: +# - 5432:5432 +# +# steps: +# - uses: actions/checkout@v3 +# with: +# ref: ${{ github.event.pull_request.head.sha || github.ref }} +# - uses: actions/cache@v3 +# with: +# path: | +# ~/.cache/pip +# .nox +# key: integration-expensive-${{ runner.os }}-${{ matrix.python-version }}-${{ matrix.airflow-version }}-${{ hashFiles('pyproject.toml') }}-${{ hashFiles('cosmos/__init__.py') }} +# +# - name: Set up Python ${{ matrix.python-version }} +# uses: actions/setup-python@v4 +# with: +# python-version: ${{ matrix.python-version }} +# +# - name: Install packages and dependencies +# run: | +# python -m pip install hatch +# hatch -e tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }} run pip freeze +# +# - name: Test Cosmos against Airflow ${{ matrix.airflow-version }} and Python ${{ matrix.python-version }} +# run: | +# hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-integration-setup +# DATABRICKS_UNIQUE_ID="${{github.run_id}}" hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-integration-expensive +# env: +# AIRFLOW_HOME: /home/runner/work/astronomer-cosmos/astronomer-cosmos/ +# AIRFLOW_CONN_AIRFLOW_DB: postgres://postgres:postgres@0.0.0.0:5432/postgres +# PYTHONPATH: /home/runner/work/astronomer-cosmos/astronomer-cosmos/:$PYTHONPATH +# AIRFLOW_CONN_DATABRICKS_DEFAULT: ${{ secrets.AIRFLOW_CONN_DATABRICKS_DEFAULT }} +# AIRFLOW__CORE__DAGBAG_IMPORT_TIMEOUT: 90.0 +# DATABRICKS_HOST: ${{ secrets.DATABRICKS_HOST }} +# DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }} +# DATABRICKS_WAREHOUSE_ID: ${{ secrets.DATABRICKS_WAREHOUSE_ID }} +# DATABRICKS_CLUSTER_ID: ${{ secrets.DATABRICKS_CLUSTER_ID }} +# COSMOS_CONN_POSTGRES_PASSWORD: ${{ secrets.COSMOS_CONN_POSTGRES_PASSWORD }} +# POSTGRES_HOST: localhost +# POSTGRES_USER: postgres +# POSTGRES_PASSWORD: postgres +# POSTGRES_DB: postgres +# POSTGRES_SCHEMA: public +# POSTGRES_PORT: 5432 +# +# - name: Upload coverage to Github +# uses: actions/upload-artifact@v2 +# with: +# name: coverage-integration-expensive-test-${{ matrix.python-version }}-${{ matrix.airflow-version }} +# path: .coverage +# +# env: +# AIRFLOW_HOME: /home/runner/work/astronomer-cosmos/astronomer-cosmos/ +# AIRFLOW_CONN_AIRFLOW_DB: postgres://postgres:postgres@0.0.0.0:5432/postgres +# PYTHONPATH: /home/runner/work/astronomer-cosmos/astronomer-cosmos/:$PYTHONPATH +# AIRFLOW_CONN_DATABRICKS_DEFAULT: ${{ secrets.AIRFLOW_CONN_DATABRICKS_DEFAULT }} +# DATABRICKS_HOST: ${{ secrets.DATABRICKS_HOST }} +# DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }} +# DATABRICKS_WAREHOUSE_ID: ${{ secrets.DATABRICKS_WAREHOUSE_ID }} +# DATABRICKS_CLUSTER_ID: ${{ secrets.DATABRICKS_CLUSTER_ID }} +# +# Run-Integration-Tests-Sqlite: +# needs: Authorize +# runs-on: ubuntu-latest +# strategy: +# matrix: +# python-version: ["3.11"] +# airflow-version: ["2.7"] +# +# steps: +# - uses: actions/checkout@v3 +# with: +# ref: ${{ github.event.pull_request.head.sha || github.ref }} +# - uses: actions/cache@v3 +# with: +# path: | +# ~/.cache/pip +# .nox +# key: integration-sqlite-${{ runner.os }}-${{ matrix.python-version }}-${{ matrix.airflow-version }}-${{ hashFiles('pyproject.toml') }}-${{ hashFiles('cosmos/__init__.py') }} +# +# - name: Set up Python ${{ matrix.python-version }} +# uses: actions/setup-python@v4 +# with: +# python-version: ${{ matrix.python-version }} +# +# - name: Install packages and dependencies +# run: | +# python -m pip install hatch +# hatch -e tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }} run pip freeze +# +# - name: Test Cosmos against Airflow ${{ matrix.airflow-version }} and Python ${{ matrix.python-version }} +# run: | +# hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-integration-sqlite-setup +# hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-integration-sqlite +# env: +# AIRFLOW_HOME: /home/runner/work/astronomer-cosmos/astronomer-cosmos/ +# AIRFLOW_CONN_AIRFLOW_DB: postgres://postgres:postgres@0.0.0.0:5432/postgres +# AIRFLOW__CORE__DAGBAG_IMPORT_TIMEOUT: 90.0 +# PYTHONPATH: /home/runner/work/astronomer-cosmos/astronomer-cosmos/:$PYTHONPATH +# AIRFLOW_CONN_DATABRICKS_DEFAULT: ${{ secrets.AIRFLOW_CONN_DATABRICKS_DEFAULT }} +# DATABRICKS_HOST: ${{ secrets.DATABRICKS_HOST }} +# DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }} +# DATABRICKS_WAREHOUSE_ID: ${{ secrets.DATABRICKS_WAREHOUSE_ID }} +# DATABRICKS_CLUSTER_ID: ${{ secrets.DATABRICKS_CLUSTER_ID }} +# COSMOS_CONN_POSTGRES_PASSWORD: ${{ secrets.COSMOS_CONN_POSTGRES_PASSWORD }} +# POSTGRES_HOST: localhost +# POSTGRES_USER: postgres +# POSTGRES_PASSWORD: postgres +# POSTGRES_DB: postgres +# POSTGRES_SCHEMA: public +# POSTGRES_PORT: 5432 +# +# - name: Upload coverage to Github +# uses: actions/upload-artifact@v2 +# with: +# name: coverage-integration-sqlite-test-${{ matrix.python-version }}-${{ matrix.airflow-version }} +# path: .coverage +# +# env: +# AIRFLOW_HOME: /home/runner/work/astronomer-cosmos/astronomer-cosmos/ +# AIRFLOW_CONN_AIRFLOW_DB: postgres://postgres:postgres@0.0.0.0:5432/postgres +# PYTHONPATH: /home/runner/work/astronomer-cosmos/astronomer-cosmos/:$PYTHONPATH Run-Integration-Tests-DBT-1-5-4: needs: Authorize @@ -353,73 +353,73 @@ jobs: AIRFLOW_CONN_AIRFLOW_DB: postgres://postgres:postgres@0.0.0.0:5432/postgres PYTHONPATH: /home/runner/work/astronomer-cosmos/astronomer-cosmos/:$PYTHONPATH - Run-Performance-Tests: - needs: Authorize - runs-on: ubuntu-latest - strategy: - matrix: - python-version: ["3.11"] - airflow-version: ["2.7"] - num-models: [1, 10, 50, 100] - services: - postgres: - image: postgres - env: - POSTGRES_PASSWORD: postgres - options: >- - --health-cmd pg_isready - --health-interval 10s - --health-timeout 5s - --health-retries 5 - ports: - - 5432:5432 - steps: - - uses: actions/checkout@v3 - with: - ref: ${{ github.event.pull_request.head.sha || github.ref }} - - uses: actions/cache@v3 - with: - path: | - ~/.cache/pip - .nox - key: perf-test-${{ runner.os }}-${{ matrix.python-version }}-${{ matrix.airflow-version }}-${{ hashFiles('pyproject.toml') }}-${{ hashFiles('cosmos/__init__.py') }} - - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python-version }} - - - name: Install packages and dependencies - run: | - python -m pip install hatch - hatch -e tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }} run pip freeze - - - name: Run performance tests against against Airflow ${{ matrix.airflow-version }} and Python ${{ matrix.python-version }} - id: run-performance-tests - run: | - hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-performance-setup - hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-performance - - # read the performance results and set them as an env var for the next step - # format: NUM_MODELS={num_models}\nTIME={end - start}\n - cat /tmp/performance_results.txt > $GITHUB_STEP_SUMMARY - env: - AIRFLOW_HOME: /home/runner/work/astronomer-cosmos/astronomer-cosmos/ - AIRFLOW_CONN_AIRFLOW_DB: postgres://postgres:postgres@0.0.0.0:5432/postgres - AIRFLOW__CORE__DAGBAG_IMPORT_TIMEOUT: 90.0 - PYTHONPATH: /home/runner/work/astronomer-cosmos/astronomer-cosmos/:$PYTHONPATH - COSMOS_CONN_POSTGRES_PASSWORD: ${{ secrets.COSMOS_CONN_POSTGRES_PASSWORD }} - POSTGRES_HOST: localhost - POSTGRES_USER: postgres - POSTGRES_PASSWORD: postgres - POSTGRES_DB: postgres - POSTGRES_SCHEMA: public - POSTGRES_PORT: 5432 - MODEL_COUNT: ${{ matrix.num-models }} - env: - AIRFLOW_HOME: /home/runner/work/astronomer-cosmos/astronomer-cosmos/ - AIRFLOW_CONN_AIRFLOW_DB: postgres://postgres:postgres@0.0.0.0:5432/postgres - PYTHONPATH: /home/runner/work/astronomer-cosmos/astronomer-cosmos/:$PYTHONPATH +# Run-Performance-Tests: +# needs: Authorize +# runs-on: ubuntu-latest +# strategy: +# matrix: +# python-version: ["3.11"] +# airflow-version: ["2.7"] +# num-models: [1, 10, 50, 100] +# services: +# postgres: +# image: postgres +# env: +# POSTGRES_PASSWORD: postgres +# options: >- +# --health-cmd pg_isready +# --health-interval 10s +# --health-timeout 5s +# --health-retries 5 +# ports: +# - 5432:5432 +# steps: +# - uses: actions/checkout@v3 +# with: +# ref: ${{ github.event.pull_request.head.sha || github.ref }} +# - uses: actions/cache@v3 +# with: +# path: | +# ~/.cache/pip +# .nox +# key: perf-test-${{ runner.os }}-${{ matrix.python-version }}-${{ matrix.airflow-version }}-${{ hashFiles('pyproject.toml') }}-${{ hashFiles('cosmos/__init__.py') }} +# +# - name: Set up Python ${{ matrix.python-version }} +# uses: actions/setup-python@v4 +# with: +# python-version: ${{ matrix.python-version }} +# +# - name: Install packages and dependencies +# run: | +# python -m pip install hatch +# hatch -e tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }} run pip freeze +# +# - name: Run performance tests against against Airflow ${{ matrix.airflow-version }} and Python ${{ matrix.python-version }} +# id: run-performance-tests +# run: | +# hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-performance-setup +# hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-performance +# +# # read the performance results and set them as an env var for the next step +# # format: NUM_MODELS={num_models}\nTIME={end - start}\n +# cat /tmp/performance_results.txt > $GITHUB_STEP_SUMMARY +# env: +# AIRFLOW_HOME: /home/runner/work/astronomer-cosmos/astronomer-cosmos/ +# AIRFLOW_CONN_AIRFLOW_DB: postgres://postgres:postgres@0.0.0.0:5432/postgres +# AIRFLOW__CORE__DAGBAG_IMPORT_TIMEOUT: 90.0 +# PYTHONPATH: /home/runner/work/astronomer-cosmos/astronomer-cosmos/:$PYTHONPATH +# COSMOS_CONN_POSTGRES_PASSWORD: ${{ secrets.COSMOS_CONN_POSTGRES_PASSWORD }} +# POSTGRES_HOST: localhost +# POSTGRES_USER: postgres +# POSTGRES_PASSWORD: postgres +# POSTGRES_DB: postgres +# POSTGRES_SCHEMA: public +# POSTGRES_PORT: 5432 +# MODEL_COUNT: ${{ matrix.num-models }} +# env: +# AIRFLOW_HOME: /home/runner/work/astronomer-cosmos/astronomer-cosmos/ +# AIRFLOW_CONN_AIRFLOW_DB: postgres://postgres:postgres@0.0.0.0:5432/postgres +# PYTHONPATH: /home/runner/work/astronomer-cosmos/astronomer-cosmos/:$PYTHONPATH Code-Coverage: if: github.event.action != 'labeled' From 2ad1340a55a813e701f85db928ab5900c8462e47 Mon Sep 17 00:00:00 2001 From: Pankaj Koti Date: Fri, 10 May 2024 21:26:26 +0530 Subject: [PATCH 10/13] Revert "Temporarily disable all tests in CI except for dbt 1.5.4 integration test" This reverts commit 13353f63d15eb572b5ea1745675cbe834826da12. --- .github/workflows/test.yml | 678 ++++++++++++++++++------------------- 1 file changed, 339 insertions(+), 339 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 6b4aef553..63d3abade 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -19,278 +19,278 @@ jobs: steps: - run: true -# Type-Check: -# runs-on: ubuntu-latest -# steps: -# - uses: actions/checkout@v3 -# with: -# ref: ${{ github.event.pull_request.head.sha || github.ref }} -# -# - uses: actions/setup-python@v3 -# with: -# python-version: "3.9" -# architecture: "x64" -# -# - run: pip3 install hatch -# - run: hatch run tests.py3.9-2.7:type-check -# -# Run-Unit-Tests: -# runs-on: ubuntu-latest -# strategy: -# matrix: -# python-version: ["3.8", "3.9", "3.10", "3.11"] -# airflow-version: ["2.3", "2.4", "2.5", "2.6", "2.7", "2.8", "2.9"] -# exclude: -# - python-version: "3.11" -# airflow-version: "2.3" -# - python-version: "3.11" -# airflow-version: "2.4" -# steps: -# - uses: actions/checkout@v3 -# with: -# ref: ${{ github.event.pull_request.head.sha || github.ref }} -# -# - uses: actions/cache@v3 -# with: -# path: | -# ~/.cache/pip -# .nox -# key: unit-${{ runner.os }}-${{ matrix.python-version }}-${{ matrix.airflow-version }}-${{ hashFiles('pyproject.toml') }}-${{ hashFiles('cosmos/__init__.py') }} -# -# - name: Set up Python ${{ matrix.python-version }} -# uses: actions/setup-python@v4 -# with: -# python-version: ${{ matrix.python-version }} -# -# - name: Install packages and dependencies -# run: | -# python -m pip install hatch -# hatch -e tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }} run pip freeze -# -# - name: Test Cosmos against Airflow ${{ matrix.airflow-version }} and Python ${{ matrix.python-version }} -# run: | -# hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-cov -# -# - name: Upload coverage to Github -# uses: actions/upload-artifact@v2 -# with: -# name: coverage-unit-test-${{ matrix.python-version }}-${{ matrix.airflow-version }} -# path: .coverage -# -# Run-Integration-Tests: -# needs: Authorize -# runs-on: ubuntu-latest -# strategy: -# matrix: -# python-version: ["3.8", "3.9", "3.10", "3.11"] -# airflow-version: ["2.3", "2.4", "2.5", "2.6", "2.7", "2.8", "2.9"] -# exclude: -# - python-version: "3.11" -# airflow-version: "2.3" -# - python-version: "3.11" -# airflow-version: "2.4" -# services: -# postgres: -# image: postgres -# env: -# POSTGRES_PASSWORD: postgres -# options: >- -# --health-cmd pg_isready -# --health-interval 10s -# --health-timeout 5s -# --health-retries 5 -# ports: -# - 5432:5432 -# steps: -# - uses: actions/checkout@v3 -# with: -# ref: ${{ github.event.pull_request.head.sha || github.ref }} -# - uses: actions/cache@v3 -# with: -# path: | -# ~/.cache/pip -# .nox -# key: integration-${{ runner.os }}-${{ matrix.python-version }}-${{ matrix.airflow-version }}-${{ hashFiles('pyproject.toml') }}-${{ hashFiles('cosmos/__init__.py') }} -# -# - name: Set up Python ${{ matrix.python-version }} -# uses: actions/setup-python@v4 -# with: -# python-version: ${{ matrix.python-version }} -# -# - name: Install packages and dependencies -# run: | -# python -m pip install hatch -# hatch -e tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }} run pip freeze -# -# - name: Test Cosmos against Airflow ${{ matrix.airflow-version }} and Python ${{ matrix.python-version }} -# run: | -# hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-integration-setup -# DATABRICKS_UNIQUE_ID="${{github.run_id}}_${{matrix.python-version}}_${{ matrix.airflow-version }}" hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-integration -# env: -# AIRFLOW_HOME: /home/runner/work/astronomer-cosmos/astronomer-cosmos/ -# AIRFLOW_CONN_AIRFLOW_DB: postgres://postgres:postgres@0.0.0.0:5432/postgres -# AIRFLOW_CONN_DATABRICKS_DEFAULT: ${{ secrets.AIRFLOW_CONN_DATABRICKS_DEFAULT }} -# AIRFLOW__CORE__DAGBAG_IMPORT_TIMEOUT: 90.0 -# PYTHONPATH: /home/runner/work/astronomer-cosmos/astronomer-cosmos/:$PYTHONPATH -# DATABRICKS_HOST: ${{ secrets.DATABRICKS_HOST }} -# DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }} -# DATABRICKS_WAREHOUSE_ID: ${{ secrets.DATABRICKS_WAREHOUSE_ID }} -# DATABRICKS_CLUSTER_ID: ${{ secrets.DATABRICKS_CLUSTER_ID }} -# COSMOS_CONN_POSTGRES_PASSWORD: ${{ secrets.COSMOS_CONN_POSTGRES_PASSWORD }} -# POSTGRES_HOST: localhost -# POSTGRES_USER: postgres -# POSTGRES_PASSWORD: postgres -# POSTGRES_DB: postgres -# POSTGRES_SCHEMA: public -# POSTGRES_PORT: 5432 -# -# - name: Upload coverage to Github -# uses: actions/upload-artifact@v2 -# with: -# name: coverage-integration-test-${{ matrix.python-version }}-${{ matrix.airflow-version }} -# path: .coverage -# -# Run-Integration-Tests-Expensive: -# needs: Authorize -# runs-on: ubuntu-latest -# strategy: -# matrix: -# python-version: ["3.11"] -# airflow-version: ["2.6"] -# -# services: -# postgres: -# image: postgres -# env: -# POSTGRES_PASSWORD: postgres -# options: >- -# --health-cmd pg_isready -# --health-interval 10s -# --health-timeout 5s -# --health-retries 5 -# ports: -# - 5432:5432 -# -# steps: -# - uses: actions/checkout@v3 -# with: -# ref: ${{ github.event.pull_request.head.sha || github.ref }} -# - uses: actions/cache@v3 -# with: -# path: | -# ~/.cache/pip -# .nox -# key: integration-expensive-${{ runner.os }}-${{ matrix.python-version }}-${{ matrix.airflow-version }}-${{ hashFiles('pyproject.toml') }}-${{ hashFiles('cosmos/__init__.py') }} -# -# - name: Set up Python ${{ matrix.python-version }} -# uses: actions/setup-python@v4 -# with: -# python-version: ${{ matrix.python-version }} -# -# - name: Install packages and dependencies -# run: | -# python -m pip install hatch -# hatch -e tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }} run pip freeze -# -# - name: Test Cosmos against Airflow ${{ matrix.airflow-version }} and Python ${{ matrix.python-version }} -# run: | -# hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-integration-setup -# DATABRICKS_UNIQUE_ID="${{github.run_id}}" hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-integration-expensive -# env: -# AIRFLOW_HOME: /home/runner/work/astronomer-cosmos/astronomer-cosmos/ -# AIRFLOW_CONN_AIRFLOW_DB: postgres://postgres:postgres@0.0.0.0:5432/postgres -# PYTHONPATH: /home/runner/work/astronomer-cosmos/astronomer-cosmos/:$PYTHONPATH -# AIRFLOW_CONN_DATABRICKS_DEFAULT: ${{ secrets.AIRFLOW_CONN_DATABRICKS_DEFAULT }} -# AIRFLOW__CORE__DAGBAG_IMPORT_TIMEOUT: 90.0 -# DATABRICKS_HOST: ${{ secrets.DATABRICKS_HOST }} -# DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }} -# DATABRICKS_WAREHOUSE_ID: ${{ secrets.DATABRICKS_WAREHOUSE_ID }} -# DATABRICKS_CLUSTER_ID: ${{ secrets.DATABRICKS_CLUSTER_ID }} -# COSMOS_CONN_POSTGRES_PASSWORD: ${{ secrets.COSMOS_CONN_POSTGRES_PASSWORD }} -# POSTGRES_HOST: localhost -# POSTGRES_USER: postgres -# POSTGRES_PASSWORD: postgres -# POSTGRES_DB: postgres -# POSTGRES_SCHEMA: public -# POSTGRES_PORT: 5432 -# -# - name: Upload coverage to Github -# uses: actions/upload-artifact@v2 -# with: -# name: coverage-integration-expensive-test-${{ matrix.python-version }}-${{ matrix.airflow-version }} -# path: .coverage -# -# env: -# AIRFLOW_HOME: /home/runner/work/astronomer-cosmos/astronomer-cosmos/ -# AIRFLOW_CONN_AIRFLOW_DB: postgres://postgres:postgres@0.0.0.0:5432/postgres -# PYTHONPATH: /home/runner/work/astronomer-cosmos/astronomer-cosmos/:$PYTHONPATH -# AIRFLOW_CONN_DATABRICKS_DEFAULT: ${{ secrets.AIRFLOW_CONN_DATABRICKS_DEFAULT }} -# DATABRICKS_HOST: ${{ secrets.DATABRICKS_HOST }} -# DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }} -# DATABRICKS_WAREHOUSE_ID: ${{ secrets.DATABRICKS_WAREHOUSE_ID }} -# DATABRICKS_CLUSTER_ID: ${{ secrets.DATABRICKS_CLUSTER_ID }} -# -# Run-Integration-Tests-Sqlite: -# needs: Authorize -# runs-on: ubuntu-latest -# strategy: -# matrix: -# python-version: ["3.11"] -# airflow-version: ["2.7"] -# -# steps: -# - uses: actions/checkout@v3 -# with: -# ref: ${{ github.event.pull_request.head.sha || github.ref }} -# - uses: actions/cache@v3 -# with: -# path: | -# ~/.cache/pip -# .nox -# key: integration-sqlite-${{ runner.os }}-${{ matrix.python-version }}-${{ matrix.airflow-version }}-${{ hashFiles('pyproject.toml') }}-${{ hashFiles('cosmos/__init__.py') }} -# -# - name: Set up Python ${{ matrix.python-version }} -# uses: actions/setup-python@v4 -# with: -# python-version: ${{ matrix.python-version }} -# -# - name: Install packages and dependencies -# run: | -# python -m pip install hatch -# hatch -e tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }} run pip freeze -# -# - name: Test Cosmos against Airflow ${{ matrix.airflow-version }} and Python ${{ matrix.python-version }} -# run: | -# hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-integration-sqlite-setup -# hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-integration-sqlite -# env: -# AIRFLOW_HOME: /home/runner/work/astronomer-cosmos/astronomer-cosmos/ -# AIRFLOW_CONN_AIRFLOW_DB: postgres://postgres:postgres@0.0.0.0:5432/postgres -# AIRFLOW__CORE__DAGBAG_IMPORT_TIMEOUT: 90.0 -# PYTHONPATH: /home/runner/work/astronomer-cosmos/astronomer-cosmos/:$PYTHONPATH -# AIRFLOW_CONN_DATABRICKS_DEFAULT: ${{ secrets.AIRFLOW_CONN_DATABRICKS_DEFAULT }} -# DATABRICKS_HOST: ${{ secrets.DATABRICKS_HOST }} -# DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }} -# DATABRICKS_WAREHOUSE_ID: ${{ secrets.DATABRICKS_WAREHOUSE_ID }} -# DATABRICKS_CLUSTER_ID: ${{ secrets.DATABRICKS_CLUSTER_ID }} -# COSMOS_CONN_POSTGRES_PASSWORD: ${{ secrets.COSMOS_CONN_POSTGRES_PASSWORD }} -# POSTGRES_HOST: localhost -# POSTGRES_USER: postgres -# POSTGRES_PASSWORD: postgres -# POSTGRES_DB: postgres -# POSTGRES_SCHEMA: public -# POSTGRES_PORT: 5432 -# -# - name: Upload coverage to Github -# uses: actions/upload-artifact@v2 -# with: -# name: coverage-integration-sqlite-test-${{ matrix.python-version }}-${{ matrix.airflow-version }} -# path: .coverage -# -# env: -# AIRFLOW_HOME: /home/runner/work/astronomer-cosmos/astronomer-cosmos/ -# AIRFLOW_CONN_AIRFLOW_DB: postgres://postgres:postgres@0.0.0.0:5432/postgres -# PYTHONPATH: /home/runner/work/astronomer-cosmos/astronomer-cosmos/:$PYTHONPATH + Type-Check: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + with: + ref: ${{ github.event.pull_request.head.sha || github.ref }} + + - uses: actions/setup-python@v3 + with: + python-version: "3.9" + architecture: "x64" + + - run: pip3 install hatch + - run: hatch run tests.py3.9-2.7:type-check + + Run-Unit-Tests: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.8", "3.9", "3.10", "3.11"] + airflow-version: ["2.3", "2.4", "2.5", "2.6", "2.7", "2.8", "2.9"] + exclude: + - python-version: "3.11" + airflow-version: "2.3" + - python-version: "3.11" + airflow-version: "2.4" + steps: + - uses: actions/checkout@v3 + with: + ref: ${{ github.event.pull_request.head.sha || github.ref }} + + - uses: actions/cache@v3 + with: + path: | + ~/.cache/pip + .nox + key: unit-${{ runner.os }}-${{ matrix.python-version }}-${{ matrix.airflow-version }}-${{ hashFiles('pyproject.toml') }}-${{ hashFiles('cosmos/__init__.py') }} + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Install packages and dependencies + run: | + python -m pip install hatch + hatch -e tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }} run pip freeze + + - name: Test Cosmos against Airflow ${{ matrix.airflow-version }} and Python ${{ matrix.python-version }} + run: | + hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-cov + + - name: Upload coverage to Github + uses: actions/upload-artifact@v2 + with: + name: coverage-unit-test-${{ matrix.python-version }}-${{ matrix.airflow-version }} + path: .coverage + + Run-Integration-Tests: + needs: Authorize + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.8", "3.9", "3.10", "3.11"] + airflow-version: ["2.3", "2.4", "2.5", "2.6", "2.7", "2.8", "2.9"] + exclude: + - python-version: "3.11" + airflow-version: "2.3" + - python-version: "3.11" + airflow-version: "2.4" + services: + postgres: + image: postgres + env: + POSTGRES_PASSWORD: postgres + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 5432:5432 + steps: + - uses: actions/checkout@v3 + with: + ref: ${{ github.event.pull_request.head.sha || github.ref }} + - uses: actions/cache@v3 + with: + path: | + ~/.cache/pip + .nox + key: integration-${{ runner.os }}-${{ matrix.python-version }}-${{ matrix.airflow-version }}-${{ hashFiles('pyproject.toml') }}-${{ hashFiles('cosmos/__init__.py') }} + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Install packages and dependencies + run: | + python -m pip install hatch + hatch -e tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }} run pip freeze + + - name: Test Cosmos against Airflow ${{ matrix.airflow-version }} and Python ${{ matrix.python-version }} + run: | + hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-integration-setup + DATABRICKS_UNIQUE_ID="${{github.run_id}}_${{matrix.python-version}}_${{ matrix.airflow-version }}" hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-integration + env: + AIRFLOW_HOME: /home/runner/work/astronomer-cosmos/astronomer-cosmos/ + AIRFLOW_CONN_AIRFLOW_DB: postgres://postgres:postgres@0.0.0.0:5432/postgres + AIRFLOW_CONN_DATABRICKS_DEFAULT: ${{ secrets.AIRFLOW_CONN_DATABRICKS_DEFAULT }} + AIRFLOW__CORE__DAGBAG_IMPORT_TIMEOUT: 90.0 + PYTHONPATH: /home/runner/work/astronomer-cosmos/astronomer-cosmos/:$PYTHONPATH + DATABRICKS_HOST: ${{ secrets.DATABRICKS_HOST }} + DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }} + DATABRICKS_WAREHOUSE_ID: ${{ secrets.DATABRICKS_WAREHOUSE_ID }} + DATABRICKS_CLUSTER_ID: ${{ secrets.DATABRICKS_CLUSTER_ID }} + COSMOS_CONN_POSTGRES_PASSWORD: ${{ secrets.COSMOS_CONN_POSTGRES_PASSWORD }} + POSTGRES_HOST: localhost + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + POSTGRES_DB: postgres + POSTGRES_SCHEMA: public + POSTGRES_PORT: 5432 + + - name: Upload coverage to Github + uses: actions/upload-artifact@v2 + with: + name: coverage-integration-test-${{ matrix.python-version }}-${{ matrix.airflow-version }} + path: .coverage + + Run-Integration-Tests-Expensive: + needs: Authorize + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.11"] + airflow-version: ["2.6"] + + services: + postgres: + image: postgres + env: + POSTGRES_PASSWORD: postgres + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 5432:5432 + + steps: + - uses: actions/checkout@v3 + with: + ref: ${{ github.event.pull_request.head.sha || github.ref }} + - uses: actions/cache@v3 + with: + path: | + ~/.cache/pip + .nox + key: integration-expensive-${{ runner.os }}-${{ matrix.python-version }}-${{ matrix.airflow-version }}-${{ hashFiles('pyproject.toml') }}-${{ hashFiles('cosmos/__init__.py') }} + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Install packages and dependencies + run: | + python -m pip install hatch + hatch -e tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }} run pip freeze + + - name: Test Cosmos against Airflow ${{ matrix.airflow-version }} and Python ${{ matrix.python-version }} + run: | + hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-integration-setup + DATABRICKS_UNIQUE_ID="${{github.run_id}}" hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-integration-expensive + env: + AIRFLOW_HOME: /home/runner/work/astronomer-cosmos/astronomer-cosmos/ + AIRFLOW_CONN_AIRFLOW_DB: postgres://postgres:postgres@0.0.0.0:5432/postgres + PYTHONPATH: /home/runner/work/astronomer-cosmos/astronomer-cosmos/:$PYTHONPATH + AIRFLOW_CONN_DATABRICKS_DEFAULT: ${{ secrets.AIRFLOW_CONN_DATABRICKS_DEFAULT }} + AIRFLOW__CORE__DAGBAG_IMPORT_TIMEOUT: 90.0 + DATABRICKS_HOST: ${{ secrets.DATABRICKS_HOST }} + DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }} + DATABRICKS_WAREHOUSE_ID: ${{ secrets.DATABRICKS_WAREHOUSE_ID }} + DATABRICKS_CLUSTER_ID: ${{ secrets.DATABRICKS_CLUSTER_ID }} + COSMOS_CONN_POSTGRES_PASSWORD: ${{ secrets.COSMOS_CONN_POSTGRES_PASSWORD }} + POSTGRES_HOST: localhost + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + POSTGRES_DB: postgres + POSTGRES_SCHEMA: public + POSTGRES_PORT: 5432 + + - name: Upload coverage to Github + uses: actions/upload-artifact@v2 + with: + name: coverage-integration-expensive-test-${{ matrix.python-version }}-${{ matrix.airflow-version }} + path: .coverage + + env: + AIRFLOW_HOME: /home/runner/work/astronomer-cosmos/astronomer-cosmos/ + AIRFLOW_CONN_AIRFLOW_DB: postgres://postgres:postgres@0.0.0.0:5432/postgres + PYTHONPATH: /home/runner/work/astronomer-cosmos/astronomer-cosmos/:$PYTHONPATH + AIRFLOW_CONN_DATABRICKS_DEFAULT: ${{ secrets.AIRFLOW_CONN_DATABRICKS_DEFAULT }} + DATABRICKS_HOST: ${{ secrets.DATABRICKS_HOST }} + DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }} + DATABRICKS_WAREHOUSE_ID: ${{ secrets.DATABRICKS_WAREHOUSE_ID }} + DATABRICKS_CLUSTER_ID: ${{ secrets.DATABRICKS_CLUSTER_ID }} + + Run-Integration-Tests-Sqlite: + needs: Authorize + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.11"] + airflow-version: ["2.7"] + + steps: + - uses: actions/checkout@v3 + with: + ref: ${{ github.event.pull_request.head.sha || github.ref }} + - uses: actions/cache@v3 + with: + path: | + ~/.cache/pip + .nox + key: integration-sqlite-${{ runner.os }}-${{ matrix.python-version }}-${{ matrix.airflow-version }}-${{ hashFiles('pyproject.toml') }}-${{ hashFiles('cosmos/__init__.py') }} + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Install packages and dependencies + run: | + python -m pip install hatch + hatch -e tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }} run pip freeze + + - name: Test Cosmos against Airflow ${{ matrix.airflow-version }} and Python ${{ matrix.python-version }} + run: | + hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-integration-sqlite-setup + hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-integration-sqlite + env: + AIRFLOW_HOME: /home/runner/work/astronomer-cosmos/astronomer-cosmos/ + AIRFLOW_CONN_AIRFLOW_DB: postgres://postgres:postgres@0.0.0.0:5432/postgres + AIRFLOW__CORE__DAGBAG_IMPORT_TIMEOUT: 90.0 + PYTHONPATH: /home/runner/work/astronomer-cosmos/astronomer-cosmos/:$PYTHONPATH + AIRFLOW_CONN_DATABRICKS_DEFAULT: ${{ secrets.AIRFLOW_CONN_DATABRICKS_DEFAULT }} + DATABRICKS_HOST: ${{ secrets.DATABRICKS_HOST }} + DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }} + DATABRICKS_WAREHOUSE_ID: ${{ secrets.DATABRICKS_WAREHOUSE_ID }} + DATABRICKS_CLUSTER_ID: ${{ secrets.DATABRICKS_CLUSTER_ID }} + COSMOS_CONN_POSTGRES_PASSWORD: ${{ secrets.COSMOS_CONN_POSTGRES_PASSWORD }} + POSTGRES_HOST: localhost + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + POSTGRES_DB: postgres + POSTGRES_SCHEMA: public + POSTGRES_PORT: 5432 + + - name: Upload coverage to Github + uses: actions/upload-artifact@v2 + with: + name: coverage-integration-sqlite-test-${{ matrix.python-version }}-${{ matrix.airflow-version }} + path: .coverage + + env: + AIRFLOW_HOME: /home/runner/work/astronomer-cosmos/astronomer-cosmos/ + AIRFLOW_CONN_AIRFLOW_DB: postgres://postgres:postgres@0.0.0.0:5432/postgres + PYTHONPATH: /home/runner/work/astronomer-cosmos/astronomer-cosmos/:$PYTHONPATH Run-Integration-Tests-DBT-1-5-4: needs: Authorize @@ -353,73 +353,73 @@ jobs: AIRFLOW_CONN_AIRFLOW_DB: postgres://postgres:postgres@0.0.0.0:5432/postgres PYTHONPATH: /home/runner/work/astronomer-cosmos/astronomer-cosmos/:$PYTHONPATH -# Run-Performance-Tests: -# needs: Authorize -# runs-on: ubuntu-latest -# strategy: -# matrix: -# python-version: ["3.11"] -# airflow-version: ["2.7"] -# num-models: [1, 10, 50, 100] -# services: -# postgres: -# image: postgres -# env: -# POSTGRES_PASSWORD: postgres -# options: >- -# --health-cmd pg_isready -# --health-interval 10s -# --health-timeout 5s -# --health-retries 5 -# ports: -# - 5432:5432 -# steps: -# - uses: actions/checkout@v3 -# with: -# ref: ${{ github.event.pull_request.head.sha || github.ref }} -# - uses: actions/cache@v3 -# with: -# path: | -# ~/.cache/pip -# .nox -# key: perf-test-${{ runner.os }}-${{ matrix.python-version }}-${{ matrix.airflow-version }}-${{ hashFiles('pyproject.toml') }}-${{ hashFiles('cosmos/__init__.py') }} -# -# - name: Set up Python ${{ matrix.python-version }} -# uses: actions/setup-python@v4 -# with: -# python-version: ${{ matrix.python-version }} -# -# - name: Install packages and dependencies -# run: | -# python -m pip install hatch -# hatch -e tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }} run pip freeze -# -# - name: Run performance tests against against Airflow ${{ matrix.airflow-version }} and Python ${{ matrix.python-version }} -# id: run-performance-tests -# run: | -# hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-performance-setup -# hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-performance -# -# # read the performance results and set them as an env var for the next step -# # format: NUM_MODELS={num_models}\nTIME={end - start}\n -# cat /tmp/performance_results.txt > $GITHUB_STEP_SUMMARY -# env: -# AIRFLOW_HOME: /home/runner/work/astronomer-cosmos/astronomer-cosmos/ -# AIRFLOW_CONN_AIRFLOW_DB: postgres://postgres:postgres@0.0.0.0:5432/postgres -# AIRFLOW__CORE__DAGBAG_IMPORT_TIMEOUT: 90.0 -# PYTHONPATH: /home/runner/work/astronomer-cosmos/astronomer-cosmos/:$PYTHONPATH -# COSMOS_CONN_POSTGRES_PASSWORD: ${{ secrets.COSMOS_CONN_POSTGRES_PASSWORD }} -# POSTGRES_HOST: localhost -# POSTGRES_USER: postgres -# POSTGRES_PASSWORD: postgres -# POSTGRES_DB: postgres -# POSTGRES_SCHEMA: public -# POSTGRES_PORT: 5432 -# MODEL_COUNT: ${{ matrix.num-models }} -# env: -# AIRFLOW_HOME: /home/runner/work/astronomer-cosmos/astronomer-cosmos/ -# AIRFLOW_CONN_AIRFLOW_DB: postgres://postgres:postgres@0.0.0.0:5432/postgres -# PYTHONPATH: /home/runner/work/astronomer-cosmos/astronomer-cosmos/:$PYTHONPATH + Run-Performance-Tests: + needs: Authorize + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.11"] + airflow-version: ["2.7"] + num-models: [1, 10, 50, 100] + services: + postgres: + image: postgres + env: + POSTGRES_PASSWORD: postgres + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 5432:5432 + steps: + - uses: actions/checkout@v3 + with: + ref: ${{ github.event.pull_request.head.sha || github.ref }} + - uses: actions/cache@v3 + with: + path: | + ~/.cache/pip + .nox + key: perf-test-${{ runner.os }}-${{ matrix.python-version }}-${{ matrix.airflow-version }}-${{ hashFiles('pyproject.toml') }}-${{ hashFiles('cosmos/__init__.py') }} + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Install packages and dependencies + run: | + python -m pip install hatch + hatch -e tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }} run pip freeze + + - name: Run performance tests against against Airflow ${{ matrix.airflow-version }} and Python ${{ matrix.python-version }} + id: run-performance-tests + run: | + hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-performance-setup + hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-performance + + # read the performance results and set them as an env var for the next step + # format: NUM_MODELS={num_models}\nTIME={end - start}\n + cat /tmp/performance_results.txt > $GITHUB_STEP_SUMMARY + env: + AIRFLOW_HOME: /home/runner/work/astronomer-cosmos/astronomer-cosmos/ + AIRFLOW_CONN_AIRFLOW_DB: postgres://postgres:postgres@0.0.0.0:5432/postgres + AIRFLOW__CORE__DAGBAG_IMPORT_TIMEOUT: 90.0 + PYTHONPATH: /home/runner/work/astronomer-cosmos/astronomer-cosmos/:$PYTHONPATH + COSMOS_CONN_POSTGRES_PASSWORD: ${{ secrets.COSMOS_CONN_POSTGRES_PASSWORD }} + POSTGRES_HOST: localhost + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + POSTGRES_DB: postgres + POSTGRES_SCHEMA: public + POSTGRES_PORT: 5432 + MODEL_COUNT: ${{ matrix.num-models }} + env: + AIRFLOW_HOME: /home/runner/work/astronomer-cosmos/astronomer-cosmos/ + AIRFLOW_CONN_AIRFLOW_DB: postgres://postgres:postgres@0.0.0.0:5432/postgres + PYTHONPATH: /home/runner/work/astronomer-cosmos/astronomer-cosmos/:$PYTHONPATH Code-Coverage: if: github.event.action != 'labeled' From 8204d7050d8bb634ee71a38000cbfb76c7f68a47 Mon Sep 17 00:00:00 2001 From: Pankaj Koti Date: Fri, 10 May 2024 21:34:09 +0530 Subject: [PATCH 11/13] Run airflow db init in the dbt-1.5.4 integrationt test script --- scripts/test/integration-dbt-1-5-4.sh | 2 ++ 1 file changed, 2 insertions(+) diff --git a/scripts/test/integration-dbt-1-5-4.sh b/scripts/test/integration-dbt-1-5-4.sh index d50013224..087533082 100644 --- a/scripts/test/integration-dbt-1-5-4.sh +++ b/scripts/test/integration-dbt-1-5-4.sh @@ -1,5 +1,7 @@ pip uninstall dbt-adapters dbt-common dbt-core dbt-extractor dbt-postgres dbt-semantic-interfaces -y pip install dbt-postgres==1.5.4 dbt-databricks==1.5.4 +rm -rf airflow.*; \ +airflow db init; \ pytest -vv \ --cov=cosmos \ --cov-report=term-missing \ From 54d4674fb92fc86c476e36d4250904924263a5d0 Mon Sep 17 00:00:00 2001 From: Pankaj Koti Date: Fri, 10 May 2024 21:40:05 +0530 Subject: [PATCH 12/13] Run postgres service for the dbt-1.5.4 integrationt test script --- .github/workflows/test.yml | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 63d3abade..abb802b86 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -299,6 +299,18 @@ jobs: matrix: python-version: [ "3.11" ] airflow-version: [ "2.7" ] + services: + postgres: + image: postgres + env: + POSTGRES_PASSWORD: postgres + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 5432:5432 steps: - uses: actions/checkout@v3 From b00c50f262be165be721090a40169fd7630b745f Mon Sep 17 00:00:00 2001 From: Tatiana Al-Chueyr Date: Fri, 10 May 2024 17:57:30 +0100 Subject: [PATCH 13/13] Update .github/workflows/test.yml Co-authored-by: Pankaj Koti --- .github/workflows/test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index abb802b86..dc0cfd055 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -2,7 +2,7 @@ name: test on: push: # Run on pushes to the default branch - branches: [main, correct-project-dir-partial-parse-file] + branches: [main] pull_request_target: # Also run on pull requests originated from forks branches: [main]