diff --git a/.changes/unreleased/Breaking Changes-20241016-175527.yaml b/.changes/unreleased/Breaking Changes-20241016-175527.yaml new file mode 100644 index 00000000..d1e4df8f --- /dev/null +++ b/.changes/unreleased/Breaking Changes-20241016-175527.yaml @@ -0,0 +1,6 @@ +kind: Breaking Changes +body: Drop support for Python 3.8 +time: 2024-10-16T17:55:27.844499-04:00 +custom: + Author: mikealfare + Issue: "161" diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml index 374908f4..0e5f4127 100644 --- a/.github/workflows/integration-tests.yml +++ b/.github/workflows/integration-tests.yml @@ -82,7 +82,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] + python-version: ["3.9", "3.10", "3.11", "3.12"] services: postgres: @@ -148,7 +148,7 @@ jobs: fail-fast: false matrix: platform: [ubuntu-22.04, macos-12] - python-version: ["3.8", "3.12"] + python-version: ["3.9", "3.12"] steps: - name: "Check out repository" uses: actions/checkout@v4 diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml index afb88136..5805c982 100644 --- a/.github/workflows/unit-tests.yml +++ b/.github/workflows/unit-tests.yml @@ -23,7 +23,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] + python-version: ["3.9", "3.10", "3.11", "3.12"] steps: - name: Check out repository diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 0bd01f7c..688eb244 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -23,7 +23,6 @@ repos: - id: black args: - --line-length=99 - - --target-version=py38 - --target-version=py39 - --target-version=py310 - --target-version=py311 diff --git a/docker/Dockerfile b/docker/Dockerfile index b6a87dfc..cfbc81ae 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -1,7 +1,7 @@ # this image gets published to GHCR for production use ARG py_version=3.12.4 -FROM python:$py_version-slim-bullseye as base +FROM python:$py_version-slim-bullseye AS base RUN apt-get update \ && apt-get dist-upgrade -y \ @@ -25,7 +25,7 @@ ENV LANG=C.UTF-8 RUN python -m pip install --upgrade "pip==24.0" "setuptools==69.2.0" "wheel==0.43.0" --no-cache-dir -FROM base as dbt-postgres +FROM base AS dbt-postgres ARG commit_ref=main diff --git a/docker/README.md b/docker/README.md index 22af3fe9..f571eebd 100644 --- a/docker/README.md +++ b/docker/README.md @@ -24,20 +24,19 @@ docker build --tag \ ``` ### Examples: -To build an image named "my-dbt" that supports Snowflake using the latest releases: +To build an image named "my-dbt" that supports Postgres using the latest releases: ```shell -cd dbt-core/docker docker build --tag my-dbt --target dbt-postgres . ``` -To build an image named "my-other-dbt" that supports Snowflake using the adapter version 1.0.0b1: +To build an image named "my-other-dbt" that supports Postgres using the adapter version 1.8.0: ```shell cd dbt-core/docker docker build \ --tag my-other-dbt \ --target dbt-postgres \ - --build-arg commit_ref=v1.0.0b1 \ - . + --build-arg commit_ref=v1.8.0 \ + . ``` ## Running an image in a container: diff --git a/docker/dev.Dockerfile b/docker/dev.Dockerfile index e137ff84..280ff39f 100644 --- a/docker/dev.Dockerfile +++ b/docker/dev.Dockerfile @@ -1,47 +1,47 @@ # this image does not get published, it is intended for local development only, see `Makefile` for usage -FROM ubuntu:24.04 as base +FROM ubuntu:24.04 AS base # prevent python installation from asking for time zone region ARG DEBIAN_FRONTEND=noninteractive # add python repository RUN apt-get update \ - && apt-get install -y software-properties-common=0.99.22.9 \ - && add-apt-repository -y ppa:deadsnakes/ppa \ - && apt-get clean \ - && rm -rf \ - /var/lib/apt/lists/* \ - /tmp/* \ - /var/tmp/* + && apt-get install -y software-properties-common=0.99.48 \ + && add-apt-repository -y ppa:deadsnakes/ppa \ + && apt-get clean \ + && rm -rf \ + /var/lib/apt/lists/* \ + /tmp/* \ + /var/tmp/* # install python RUN apt-get update \ - && apt-get install -y --no-install-recommends \ - build-essential=12.9ubuntu3 \ - git-all=1:2.34.1-1ubuntu1.10 \ - libpq-dev=14.11-0ubuntu0.22.04.1 \ - python3.8=3.8.19-1+jammy1 \ - python3.8-dev=3.8.19-1+jammy1 \ - python3.8-distutils=3.8.19-1+jammy1 \ - python3.8-venv=3.8.19-1+jammy1 \ - python3-pip=22.0.2+dfsg-1ubuntu0.4 \ - python3-wheel=0.37.1-2ubuntu0.22.04.1 \ - && apt-get clean \ - && rm -rf \ - /var/lib/apt/lists/* \ - /tmp/* \ - /var/tmp/* + && apt-get install -y --no-install-recommends \ + build-essential=12.10ubuntu1 \ + git-all=1:2.43.0-1ubuntu7.1 \ + libpq-dev=16.4-0ubuntu0.24.04.2 \ + python3.9=3.9.20-1+noble1 \ + python3.9-dev=3.9.20-1+noble1 \ + python3.9-distutils=3.9.20-1+noble1 \ + python3.9-venv=3.9.20-1+noble1 \ + python3-pip=24.0+dfsg-1ubuntu1 \ + python3-wheel=0.42.0-2 \ + && apt-get clean \ + && rm -rf \ + /var/lib/apt/lists/* \ + /tmp/* \ + /var/tmp/* # update the default system interpreter to the newly installed version -RUN update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.8 1 +RUN update-alternatives --install /usr/bin/python python /usr/bin/python3.9 1 # install python dependencies -RUN python3 -m pip install --upgrade --no-cache-dir "hatch==1.9.1" +RUN python -m pip install --upgrade "hatch==1.13.0" --no-cache-dir --compile -FROM base as dbt-postgres-dev +FROM base AS dbt-postgres-dev -HEALTHCHECK CMD python3 --version || exit 1 +HEALTHCHECK CMD python --version || exit 1 # send stdout/stderr to terminal ENV PYTHONUNBUFFERED=1 @@ -50,5 +50,5 @@ ENV PYTHONUNBUFFERED=1 WORKDIR /opt/code VOLUME /opt/code -# create a virtual environment -RUN python3 -m venv /opt/venv +# setup hatch virtual envs +RUN hatch config set dirs.env.virtual ".hatch" diff --git a/pyproject.toml b/pyproject.toml index 450be743..bb4a65ac 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ name = "dbt-postgres" description = "The set of adapter protocols and base functionality that supports integration with dbt-core" readme = "README.md" keywords = ["dbt", "adapter", "adapters", "database", "elt", "dbt-core", "dbt Core", "dbt Cloud", "dbt Labs", "postgres"] -requires-python = ">=3.8.0" +requires-python = ">=3.9.0" authors = [ { name = "dbt Labs", email = "info@dbtlabs.com" }, ] @@ -17,7 +17,6 @@ classifiers = [ "Operating System :: MacOS :: MacOS X", "Operating System :: Microsoft :: Windows", "Operating System :: POSIX :: Linux", - "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", @@ -58,8 +57,7 @@ dependencies = [ "dbt-common @ git+https://github.com/dbt-labs/dbt-common.git", "dbt-tests-adapter @ git+https://github.com/dbt-labs/dbt-adapters.git@qmalcolm--330-always-ensure-valid-materialization#subdirectory=dbt-tests-adapter", "dbt-core @ git+https://github.com/dbt-labs/dbt-core.git#subdirectory=core", - 'pre-commit==3.7.0;python_version>="3.9"', - 'pre-commit==3.5.0;python_version=="3.8"', + "pre-commit==3.7.0", "freezegun", "pytest", "pytest-dotenv", diff --git a/tests/functional/logging/test_logging.py b/tests/functional/logging/test_logging.py deleted file mode 100644 index a7e226eb..00000000 --- a/tests/functional/logging/test_logging.py +++ /dev/null @@ -1,98 +0,0 @@ -import json -import os - -from dbt.events.types import InvalidOptionYAML -from dbt.tests.util import get_manifest, run_dbt, read_file -from dbt_common.events.functions import fire_event -import pytest - - -@pytest.fixture(scope="class") -def models(): - return {"my_model.sql": "select 1 as fun"} - - -# This test checks that various events contain node_info, -# which is supplied by the log_contextvars context manager -def test_basic(project, logs_dir): - results = run_dbt(["--log-format=json", "run"]) - assert len(results) == 1 - manifest = get_manifest(project.project_root) - assert "model.test.my_model" in manifest.nodes - - # get log file - log_file = read_file(logs_dir, "dbt.log") - assert log_file - node_start = False - node_finished = False - connection_reused_data = [] - for log_line in log_file.split("\n"): - # skip empty lines - if len(log_line) == 0: - continue - # The adapter logging also shows up, so skip non-json lines - if "[debug]" in log_line: - continue - log_dct = json.loads(log_line) - log_data = log_dct["data"] - log_event = log_dct["info"]["name"] - if log_event == "ConnectionReused": - connection_reused_data.append(log_data) - if log_event == "NodeStart": - node_start = True - if log_event == "NodeFinished": - node_finished = True - assert log_data["run_result"]["adapter_response"] - if node_start and not node_finished: - if log_event == "NodeExecuting": - assert "node_info" in log_data - if log_event == "JinjaLogDebug": - assert "node_info" in log_data - if log_event == "SQLQuery": - assert "node_info" in log_data - if log_event == "TimingInfoCollected": - assert "node_info" in log_data - assert "timing_info" in log_data - - # windows doesn't have the same thread/connection flow so the ConnectionReused - # events don't show up - if os.name != "nt": - # Verify the ConnectionReused event occurs and has the right data - assert connection_reused_data - for data in connection_reused_data: - assert "conn_name" in data and data["conn_name"] - assert "orig_conn_name" in data and data["orig_conn_name"] - - -def test_formatted_logs(project, logs_dir): - # a basic run of dbt with a single model should have 5 `Formatting` events in the json logs - results = run_dbt(["--log-format=json", "run"]) - assert len(results) == 1 - - # get log file - json_log_file = read_file(logs_dir, "dbt.log") - formatted_json_lines = 0 - for log_line in json_log_file.split("\n"): - # skip the empty line at the end - if len(log_line) == 0: - continue - log_dct = json.loads(log_line) - log_event = log_dct["info"]["name"] - if log_event == "Formatting": - formatted_json_lines += 1 - - assert formatted_json_lines == 5 - - -def test_invalid_event_value(project, logs_dir): - results = run_dbt(["--log-format=json", "run"]) - assert len(results) == 1 - with pytest.raises(Exception): - # This should raise because positional arguments are provided to the event - fire_event(InvalidOptionYAML("testing")) - - # Provide invalid type to "option_name" - with pytest.raises(Exception) as excinfo: - fire_event(InvalidOptionYAML(option_name=1)) - - assert str(excinfo.value) == "[InvalidOptionYAML]: Unable to parse dict {'option_name': 1}" diff --git a/tests/functional/logging/test_meta_logging.py b/tests/functional/logging/test_meta_logging.py deleted file mode 100644 index 7c535bce..00000000 --- a/tests/functional/logging/test_meta_logging.py +++ /dev/null @@ -1,46 +0,0 @@ -import json - -from dbt.tests.util import read_file, run_dbt -import pytest - - -model1 = "select 1 as fun" -model2 = '{{ config(meta={"owners": ["team1", "team2"]})}} select 1 as fun' -model3 = '{{ config(meta={"key": 1})}} select 1 as fun' - - -@pytest.fixture(scope="class") # noqa -def models(): - return {"model1.sql": model1, "model2.sql": model2, "model3.sql": model3} - - -# This test checks that various events contain node_info, -# which is supplied by the log_contextvars context manager -def test_meta(project, logs_dir): - run_dbt(["--log-format=json", "run"]) - - # get log file - log_file = read_file(logs_dir, "dbt.log") - assert log_file - - for log_line in log_file.split("\n"): - # skip empty lines - if len(log_line) == 0: - continue - # The adapter logging also shows up, so skip non-json lines - if "[debug]" in log_line: - continue - - log_dct = json.loads(log_line) - if "node_info" not in log_dct["data"]: - continue - - print(f"--- log_dct: {log_dct}") - node_info = log_dct["data"]["node_info"] - node_path = node_info["node_path"] - if node_path == "model1.sql": - assert node_info["meta"] == {} - elif node_path == "model2.sql": - assert node_info["meta"] == {"owners": ["team1", "team2"]} - elif node_path == "model3.sql": - assert node_info["meta"] == {"key": 1}