diff --git a/.changes/unreleased/Dependencies-20221209-233905.yaml b/.changes/unreleased/Dependencies-20221209-233905.yaml new file mode 100644 index 000000000..31b06b6ac --- /dev/null +++ b/.changes/unreleased/Dependencies-20221209-233905.yaml @@ -0,0 +1,7 @@ +kind: Dependencies +body: Add support for python 3.11 +time: 2022-12-09T23:39:05.296196-05:00 +custom: + Author: mikealfare + Issue: "225" + PR: "236" diff --git a/.github/scripts/integration-test-matrix.js b/.github/scripts/integration-test-matrix.js index 252bf171f..9e7698ef5 100644 --- a/.github/scripts/integration-test-matrix.js +++ b/.github/scripts/integration-test-matrix.js @@ -1,6 +1,6 @@ module.exports = ({ context }) => { const defaultPythonVersion = "3.8"; - const supportedPythonVersions = ["3.7", "3.8", "3.9", "3.10"]; + const supportedPythonVersions = ["3.7", "3.8", "3.9", "3.10", "3.11"]; const supportedAdapters = ["redshift"]; // if PR, generate matrix based on files changed and PR labels diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 8069e7772..909766cc1 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -72,7 +72,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ['3.7', '3.8', '3.9', '3.10'] + python-version: ['3.7', '3.8', '3.9', '3.10', '3.11'] env: TOXENV: "unit" @@ -174,7 +174,7 @@ jobs: fail-fast: false matrix: os: [ubuntu-latest, macos-latest, windows-latest] - python-version: ['3.7', '3.8', '3.9', '3.10'] + python-version: ['3.7', '3.8', '3.9', '3.10', '3.11'] steps: - name: Set up Python ${{ matrix.python-version }} diff --git a/Makefile b/Makefile index c24e63d92..924c6a26f 100644 --- a/Makefile +++ b/Makefile @@ -1,9 +1,9 @@ .DEFAULT_GOAL:=help .PHONY: dev -dev: ## Installs adapter in develop mode along with development depedencies +dev: ## Installs adapter in develop mode along with development dependencies @\ - pip install -r dev-requirements.txt && pre-commit install + pip install -e . -r dev-requirements.txt && pre-commit install .PHONY: mypy mypy: ## Runs mypy against staged changes for static type checking. diff --git a/dev-requirements.txt b/dev-requirements.txt index 22d4ad0bb..6eac3d2b4 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -4,22 +4,22 @@ git+https://github.com/dbt-labs/dbt-core.git#egg=dbt-core&subdirectory=core git+https://github.com/dbt-labs/dbt-core.git#egg=dbt-tests-adapter&subdirectory=tests/adapter git+https://github.com/dbt-labs/dbt-core.git#egg=dbt-postgres&subdirectory=plugins/postgres -black==22.8.0 +black~=22.8.0 click~=8.1.3 -bumpversion +bumpversion~=0.6.0 flake8 -flaky -freezegun==0.3.12 -ipdb -mypy==0.971 -pip-tools -pre-commit -pytest -pytest-dotenv -pytest-logbook -pytest-csv -pytest-xdist -pytz -tox>=3.13 -twine -wheel +flaky~=3.7.0 +freezegun~=0.3.12 +ipdb~=0.13.9 +mypy~=0.971.0 +pip-tools~=6.11.0 +pre-commit~=2.20.0 +pytest~=7.2.0 +pytest-dotenv~=0.5.2 +pytest-logbook~=1.2.0 +pytest-csv~=3.0.0 +pytest-xdist~=3.1.0 +pytz~=2022.6.0 +tox~=4.0.0 +twine~=4.0.2 +wheel~=0.37.1 diff --git a/setup.py b/setup.py index 4f7ef3822..6f82944de 100644 --- a/setup.py +++ b/setup.py @@ -1,64 +1,79 @@ #!/usr/bin/env python -import os import sys -import re -# require python 3.7 or newer if sys.version_info < (3, 7): print("Error: dbt does not support this version of Python.") print("Please upgrade to Python 3.7 or higher.") sys.exit(1) -# require version of setuptools that supports find_namespace_packages -from setuptools import setup - try: from setuptools import find_namespace_packages except ImportError: - # the user has a downlevel version of setuptools. print("Error: dbt requires setuptools v40.1.0 or higher.") - print('Please upgrade setuptools with "pip install --upgrade setuptools" ' "and try again") + print('Please upgrade setuptools with "pip install --upgrade setuptools" and try again') sys.exit(1) -# pull long description from README -this_directory = os.path.abspath(os.path.dirname(__file__)) -with open(os.path.join(this_directory, "README.md")) as f: - long_description = f.read() +from pathlib import Path +from setuptools import setup + + +# pull the long description from the README +README = Path(__file__).parent / "README.md" + + +# used for this adapter's version and in determining the compatible dbt-core version +VERSION = Path(__file__).parent / "dbt/adapters/redshift/__version__.py" + + +def _plugin_version() -> str: + """ + Pull the package version from the main package version file + """ + attributes = {} + exec(VERSION.read_text(), attributes) + return attributes["version"] + + +def _core_patch(plugin_patch: str): + """ + Determines the compatible dbt-core patch given this plugin's patch + + Args: + plugin_patch: the version patch of this plugin + """ + pre_release_phase = "".join([i for i in plugin_patch if not i.isdigit()]) + if pre_release_phase: + if pre_release_phase not in ["a", "b", "rc"]: + raise ValueError(f"Invalid prerelease patch: {plugin_patch}") + return f"0{pre_release_phase}1" + return "0" -# get this package's version from dbt/adapters//__version__.py -def _get_plugin_version_dict(): - _version_path = os.path.join(this_directory, "dbt", "adapters", "redshift", "__version__.py") - _semver = r"""(?P\d+)\.(?P\d+)\.(?P\d+)""" - _pre = r"""((?Pa|b|rc)(?P
\d+))?"""
-    _version_pattern = fr"""version\s*=\s*["']{_semver}{_pre}["']"""
-    with open(_version_path) as f:
-        match = re.search(_version_pattern, f.read().strip())
-        if match is None:
-            raise ValueError(f"invalid version at {_version_path}")
-        return match.groupdict()
+# require a compatible minor version (~=) and prerelease if this is a prerelease
+def _core_version(plugin_version: str = _plugin_version()) -> str:
+    """
+    Determine the compatible dbt-core version give this plugin's version.
 
+    We assume that the plugin must agree with `dbt-core` down to the minor version.
 
-# require a compatible minor version (~=), prerelease if this is a prerelease
-def _get_dbt_core_version():
-    parts = _get_plugin_version_dict()
-    minor = "{major}.{minor}.0".format(**parts)
-    pre = parts["prekind"] + "1" if parts["prekind"] else ""
-    return f"{minor}{pre}"
+    Args:
+        plugin_version: the version of this plugin, this is an argument in case we ever want to unit test this
+    """
+    try:
+        major, minor, plugin_patch = plugin_version.split(".")
+    except ValueError:
+        raise ValueError(f"Invalid version: {plugin_version}")
 
+    return f"{major}.{minor}.{_core_patch(plugin_patch)}"
 
-package_name = "dbt-redshift"
-package_version = "1.4.0b1"
-dbt_core_version = _get_dbt_core_version()
-description = """The Redshift adapter plugin for dbt"""
 
 setup(
-    name=package_name,
-    version=package_version,
-    description=description,
-    long_description=long_description,
+    name="dbt-redshift",
+    version=_plugin_version(),
+    description="The Redshift adapter plugin for dbt",
+    long_description=README.read_text(),
     long_description_content_type="text/markdown",
     author="dbt Labs",
     author_email="info@dbtlabs.com",
@@ -66,10 +81,9 @@ def _get_dbt_core_version():
     packages=find_namespace_packages(include=["dbt", "dbt.*"]),
     include_package_data=True,
     install_requires=[
-        "dbt-core~={}".format(dbt_core_version),
-        "dbt-postgres~={}".format(dbt_core_version),
-        # the following are all to match snowflake-connector-python
-        "boto3>=1.4.4,<2.0.0",
+        f"dbt-core~={_core_version()}",
+        f"dbt-postgres~={_core_version()}",
+        "boto3~=1.26.26",
     ],
     zip_safe=False,
     classifiers=[
@@ -82,6 +96,7 @@ def _get_dbt_core_version():
         "Programming Language :: Python :: 3.8",
         "Programming Language :: Python :: 3.9",
         "Programming Language :: Python :: 3.10",
+        "Programming Language :: Python :: 3.11",
     ],
     python_requires=">=3.7",
 )
diff --git a/tests/conftest.py b/tests/conftest.py
index 3719e4573..18fcbb714 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -1,11 +1,12 @@
 import pytest
 import os
 
-# Import the fuctional fixtures as a plugin
+# Import the functional fixtures as a plugin
 # Note: fixtures with session scope need to be local
 
 pytest_plugins = ["dbt.tests.fixtures.project"]
 
+
 # The profile dictionary, used to write out profiles.yml
 @pytest.fixture(scope="class")
 def dbt_profile_target():
@@ -19,5 +20,3 @@ def dbt_profile_target():
         'pass': os.getenv('REDSHIFT_TEST_PASS'),
         'dbname': os.getenv('REDSHIFT_TEST_DBNAME'),
     }
-
-
diff --git a/tests/functional/adapter/test_basic.py b/tests/functional/adapter/test_basic.py
index ba8e6c380..06cf9948f 100644
--- a/tests/functional/adapter/test_basic.py
+++ b/tests/functional/adapter/test_basic.py
@@ -1,12 +1,9 @@
 import pytest
 
-from dbt.tests.util import AnyStringWith, AnyFloat
-
+from dbt.tests.util import AnyStringWith
 from dbt.tests.adapter.basic.test_base import BaseSimpleMaterializations
 from dbt.tests.adapter.basic.test_singular_tests import BaseSingularTests
-from dbt.tests.adapter.basic.test_singular_tests_ephemeral import (
-    BaseSingularTestsEphemeral,
-)
+from dbt.tests.adapter.basic.test_singular_tests_ephemeral import BaseSingularTestsEphemeral
 from dbt.tests.adapter.basic.test_empty import BaseEmpty
 from dbt.tests.adapter.basic.test_ephemeral import BaseEphemeral
 from dbt.tests.adapter.basic.test_incremental import BaseIncremental
@@ -16,12 +13,12 @@
 from dbt.tests.adapter.basic.test_adapter_methods import BaseAdapterMethod
 from dbt.tests.adapter.basic.test_docs_generate import BaseDocsGenerate, BaseDocsGenReferences
 from dbt.tests.adapter.basic.expected_catalog import base_expected_catalog, no_stats, expected_references_catalog
+from dbt.tests.adapter.basic.files import seeds_base_csv, seeds_added_csv, seeds_newcolumns_csv
+
 from tests.functional.adapter.expected_stats import redshift_stats, redshift_ephemeral_summary_stats
 
-from dbt.tests.adapter.basic.files import seeds_base_csv, seeds_added_csv, seeds_newcolumns_csv
 
-# set the datatype of the name column in the 'added' seed so it
-# can hold the '_update' that's added
+# set the datatype of the name column in the 'added' seed so that it can hold the '_update' that's added
 schema_seed_added_yml = """
 version: 2
 seeds:
@@ -32,6 +29,7 @@
 """
 
 
+# TODO: update these with test cases or remove them if not needed
 class TestSimpleMaterializationsRedshift(BaseSimpleMaterializations):
     pass
 
@@ -61,8 +59,7 @@ class TestGenericTestsRedshift(BaseGenericTests):
 
 
 class TestSnapshotCheckColsRedshift(BaseSnapshotCheckCols):
-    # Redshift defines the 'name' column such that it's not big enough
-    # to hold the '_update' added in the test.
+    # Redshift defines the 'name' column such that it's not big enough to hold the '_update' added in the test.
     @pytest.fixture(scope="class")
     def models(self):
         return {
@@ -73,8 +70,7 @@ def models(self):
 
 
 class TestSnapshotTimestampRedshift(BaseSnapshotTimestamp):
-    # Redshift defines the 'name' column such that it's not big enough
-    # to hold the '_update' added in the test.
+    # Redshift defines the 'name' column such that it's not big enough to hold the '_update' added in the test.
     @pytest.fixture(scope="class")
     def models(self):
         return {
@@ -84,11 +80,11 @@ def models(self):
             "seeds.yml": schema_seed_added_yml,
         }
 
+
 class TestBaseAdapterMethod(BaseAdapterMethod):
     pass
 
 
-
 class TestDocsGenerateRedshift(BaseDocsGenerate):
     @pytest.fixture(scope="class")                               
     def expected_catalog(self, project, profile_user):
@@ -105,6 +101,7 @@ def expected_catalog(self, project, profile_user):
         )             
 
 
+# TODO: update this or delete it
 @pytest.mark.skip(reason="Needs updated dbt-core code")
 class TestDocsGenReferencesRedshift(BaseDocsGenReferences):
     @pytest.fixture(scope="class")
@@ -123,4 +120,3 @@ def expected_catalog(self, project, profile_user):
             view_summary_stats=no_stats(),
             ephemeral_summary_stats=redshift_ephemeral_summary_stats(),
         )
-
diff --git a/tests/test_incremental_run_result.py b/tests/test_incremental_run_result.py
deleted file mode 100644
index e53285895..000000000
--- a/tests/test_incremental_run_result.py
+++ /dev/null
@@ -1,6 +0,0 @@
-import pytest
-
-from dbt.tests.adapter.basic.test_incremental import BaseIncrementalNotSchemaChange
-
-class TestBaseIncrementalNotSchemaChange(BaseIncrementalNotSchemaChange):
-    pass
\ No newline at end of file
diff --git a/tests/unit/mock_adapter.py b/tests/unit/mock_adapter.py
index 4cd9a9717..cc2861e4e 100644
--- a/tests/unit/mock_adapter.py
+++ b/tests/unit/mock_adapter.py
@@ -1,8 +1,8 @@
 from unittest import mock
-
-from dbt.adapters.base import BaseAdapter
 from contextlib import contextmanager
 
+from dbt.adapters.base import BaseAdapter, PythonJobHelper
+
 
 def adapter_factory():
     class MockAdapter(BaseAdapter):
diff --git a/tests/unit/test_context.py b/tests/unit/test_context.py
index c5c676ae9..5b975d029 100644
--- a/tests/unit/test_context.py
+++ b/tests/unit/test_context.py
@@ -53,6 +53,7 @@ def setUp(self):
     },
 }
 
+
 PROJECT_DATA = {
     'name': 'root',
     'version': '0.1',
diff --git a/tests/unit/test_redshift_adapter.py b/tests/unit/test_redshift_adapter.py
index 1a21e4d34..33c3dc1aa 100644
--- a/tests/unit/test_redshift_adapter.py
+++ b/tests/unit/test_redshift_adapter.py
@@ -15,7 +15,6 @@
 from .utils import config_from_parts_or_dicts, mock_connection, TestAdapterConversions, inject_adapter
 
 
-@classmethod
 def fetch_cluster_credentials(*args, **kwargs):
     return {
         'DbUser': 'root',
@@ -80,7 +79,11 @@ def test_explicit_iam_conn(self):
             iam_duration_seconds=1200
         )
 
-        with mock.patch.object(RedshiftAdapter.ConnectionManager, 'fetch_cluster_credentials', new=fetch_cluster_credentials):
+        with mock.patch.object(
+                RedshiftAdapter.ConnectionManager,
+                'fetch_cluster_credentials',
+                new=fetch_cluster_credentials
+        ):
             creds = RedshiftAdapter.ConnectionManager.get_credentials(self.config.credentials)
 
         expected_creds = self.config.credentials.replace(password='tmp_password')
@@ -113,7 +116,11 @@ def test_invalid_auth_method(self):
         self.config.credentials.method = 'badmethod'
 
         with self.assertRaises(FailedToConnectException) as context:
-            with mock.patch.object(RedshiftAdapter.ConnectionManager, 'fetch_cluster_credentials', new=fetch_cluster_credentials):
+            with mock.patch.object(
+                    RedshiftAdapter.ConnectionManager,
+                    'fetch_cluster_credentials',
+                    new=fetch_cluster_credentials
+            ):
                 RedshiftAdapter.ConnectionManager.get_credentials(self.config.credentials)
 
         self.assertTrue('badmethod' in context.exception.msg)
@@ -121,7 +128,11 @@ def test_invalid_auth_method(self):
     def test_invalid_iam_no_cluster_id(self):
         self.config.credentials = self.config.credentials.replace(method='iam')
         with self.assertRaises(FailedToConnectException) as context:
-            with mock.patch.object(RedshiftAdapter.ConnectionManager, 'fetch_cluster_credentials', new=fetch_cluster_credentials):
+            with mock.patch.object(
+                    RedshiftAdapter.ConnectionManager,
+                    'fetch_cluster_credentials',
+                    new=fetch_cluster_credentials
+            ):
                 RedshiftAdapter.ConnectionManager.get_credentials(self.config.credentials)
 
         self.assertTrue("'cluster_id' must be provided" in context.exception.msg)
@@ -132,16 +143,23 @@ def test_default_session_is_not_used_when_iam_used(self):
         self.config.credentials.cluster_id = 'clusterid'
         with mock.patch('dbt.adapters.redshift.connections.boto3.Session'):
             RedshiftAdapter.ConnectionManager.get_credentials(self.config.credentials)
-            self.assertEqual(boto3.DEFAULT_SESSION.client.call_count, 0,
-                              "The redshift client should not be created using the default session because the session object is not thread-safe")
+            self.assertEqual(
+                boto3.DEFAULT_SESSION.client.call_count,
+                0,
+                "The redshift client should not be created using "
+                "the default session because the session object is not thread-safe"
+            )
 
     def test_default_session_is_not_used_when_iam_not_used(self):
         boto3.DEFAULT_SESSION = Mock()
         self.config.credentials = self.config.credentials.replace(method=None)
         with mock.patch('dbt.adapters.redshift.connections.boto3.Session'):
             RedshiftAdapter.ConnectionManager.get_credentials(self.config.credentials)
-            self.assertEqual(boto3.DEFAULT_SESSION.client.call_count, 0,
-                              "The redshift client should not be created using the default session because the session object is not thread-safe")
+            self.assertEqual(
+                boto3.DEFAULT_SESSION.client.call_count, 0,
+                "The redshift client should not be created using "
+                "the default session because the session object is not thread-safe"
+            )
 
     def test_cancel_open_connections_empty(self):
         self.assertEqual(len(list(self.adapter.cancel_open_connections())), 0)
@@ -176,7 +194,7 @@ def test_default_keepalive(self, psycopg2):
         connection = self.adapter.acquire_connection('dummy')
 
         psycopg2.connect.assert_not_called()
-        connection.handle
+        connection.handle  # this "property" changes the state of the class
         psycopg2.connect.assert_called_once_with(
             dbname='redshift',
             user='root',
@@ -194,7 +212,7 @@ def test_changed_keepalive(self, psycopg2):
         connection = self.adapter.acquire_connection('dummy')
 
         psycopg2.connect.assert_not_called()
-        connection.handle
+        connection.handle  # this "property" changes the state of the class
         psycopg2.connect.assert_called_once_with(
             dbname='redshift',
             user='root',
@@ -211,7 +229,7 @@ def test_search_path(self, psycopg2):
         connection = self.adapter.acquire_connection('dummy')
 
         psycopg2.connect.assert_not_called()
-        connection.handle
+        connection.handle  # this "property" changes the state of the class
         psycopg2.connect.assert_called_once_with(
             dbname='redshift',
             user='root',
@@ -229,7 +247,7 @@ def test_search_path_with_space(self, psycopg2):
         connection = self.adapter.acquire_connection('dummy')
 
         psycopg2.connect.assert_not_called()
-        connection.handle
+        connection.handle  # this "property" changes the state of the class
         psycopg2.connect.assert_called_once_with(
             dbname='redshift',
             user='root',
@@ -247,7 +265,7 @@ def test_set_zero_keepalive(self, psycopg2):
         connection = self.adapter.acquire_connection('dummy')
 
         psycopg2.connect.assert_not_called()
-        connection.handle
+        connection.handle  # this "property" changes the state of the class
         psycopg2.connect.assert_called_once_with(
             dbname='redshift',
             user='root',
@@ -349,7 +367,6 @@ def test_convert_date_type(self):
 
     def test_convert_time_type(self):
         # dbt's default type testers actually don't have a TimeDelta at all.
-        agate.TimeDelta
         rows = [
             ['', '120s', '10s'],
             ['', '3m', '11s'],
@@ -359,9 +376,3 @@ def test_convert_time_type(self):
         expected = ['varchar(24)', 'varchar(24)', 'varchar(24)']
         for col_idx, expect in enumerate(expected):
             assert RedshiftAdapter.convert_time_type(agate_table, col_idx) == expect
-
-
-# convert_boolean_type
-# convert_datetime_type
-# convert_date_type
-# convert_time_type
diff --git a/tests/unit/utils.py b/tests/unit/utils.py
index 7e6159f4a..a2a0147ac 100644
--- a/tests/unit/utils.py
+++ b/tests/unit/utils.py
@@ -116,7 +116,7 @@ def inject_plugin_for(config):
     # from dbt.adapters.postgres import Plugin, PostgresAdapter
     from dbt.adapters.factory import FACTORY
     FACTORY.load_plugin(config.credentials.type)
-    adapter = FACTORY.get_adapter(config)
+    adapter = FACTORY.get_adapter(config)  # TODO: there's a get_adaptor function in factory.py, but no method on AdapterContainer
     return adapter
 
 
@@ -216,7 +216,9 @@ def assert_fails_validation(dct, cls):
 
 
 class TestAdapterConversions(TestCase):
-    def _get_tester_for(self, column_type):
+
+    @staticmethod
+    def _get_tester_for(column_type):
         from dbt.clients import agate_helper
         if column_type is agate.TimeDelta:  # dbt never makes this!
             return agate.TimeDelta()
diff --git a/tox.ini b/tox.ini
index c900733c0..85d20f595 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,8 +1,8 @@
 [tox]
 skipsdist = True
-envlist = py37,py38,py39,py310
+envlist = py37,py38,py39,py310,py311
 
-[testenv:{unit,py37,py38,py39,py310,py}]
+[testenv:{unit,py37,py38,py39,py310,py311,py}]
 description = unit testing
 skip_install = true
 passenv =
@@ -13,7 +13,7 @@ deps =
   -rdev-requirements.txt
   -e.
 
-[testenv:{integration,py37,py38,py39,py310,py}-{redshift}]
+[testenv:{integration,py37,py38,py39,py310,py311,py}-{redshift}]
 description = adapter plugin integration testing
 skip_install = true
 passenv =