From 7b92cc55d9538e925367941697b0bc358021c1d5 Mon Sep 17 00:00:00 2001 From: Mike Alfare Date: Sat, 7 Dec 2024 18:13:52 -0500 Subject: [PATCH] update to pyproject.toml --- .bumpversion.cfg | 33 --------- .github/workflows/main.yml | 52 ++------------- .github/workflows/release-internal.yml | 26 ++------ .github/workflows/release-prep.yml | 78 ++++++---------------- .github/workflows/version-bump.yml | 28 -------- .pre-commit-config.yaml | 2 + MANIFEST.in | 1 - Makefile | 44 ------------ dagger/requirements.txt | 3 - dev-requirements.txt | 20 ------ hatch.toml | 64 ++++++++++++++++++ pyproject.toml | 59 +++++++++++++++++ pytest.ini | 9 --- requirements.txt | 11 --- setup.py | 92 -------------------------- tox.ini | 3 - 16 files changed, 157 insertions(+), 368 deletions(-) delete mode 100644 .bumpversion.cfg delete mode 100644 .github/workflows/version-bump.yml delete mode 100644 MANIFEST.in delete mode 100644 Makefile delete mode 100644 dagger/requirements.txt delete mode 100644 dev-requirements.txt create mode 100644 hatch.toml create mode 100644 pyproject.toml delete mode 100644 pytest.ini delete mode 100644 requirements.txt delete mode 100644 setup.py delete mode 100644 tox.ini diff --git a/.bumpversion.cfg b/.bumpversion.cfg deleted file mode 100644 index 40074e83c..000000000 --- a/.bumpversion.cfg +++ /dev/null @@ -1,33 +0,0 @@ -[bumpversion] -current_version = 1.9.0b1 -parse = (?P[\d]+) # major version number - \.(?P[\d]+) # minor version number - \.(?P[\d]+) # patch version number - (((?Pa|b|rc) # optional pre-release type - ?(?P[\d]+?)) # optional pre-release version number - \.?(?P[a-z0-9]+\+[a-z]+)? # optional nightly release indicator - )? # expected matches: `1.5.0`, `1.5.0a1`, `1.5.0a1.dev123457+nightly`, expected failures: `1`, `1.5`, `1.5.2-a1`, `text1.5.0` -serialize = - {major}.{minor}.{patch}{prekind}{num}.{nightly} - {major}.{minor}.{patch}{prekind}{num} - {major}.{minor}.{patch} -commit = False -tag = False - -[bumpversion:part:prekind] -first_value = a -optional_value = final -values = - a - b - rc - final - -[bumpversion:part:num] -first_value = 1 - -[bumpversion:part:nightly] - -[bumpversion:file:setup.py] - -[bumpversion:file:dbt/adapters/spark/__version__.py] diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 287e5acb7..458048a7f 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -51,20 +51,8 @@ jobs: with: python-version: '3.9' - - name: Install python dependencies - run: | - sudo apt-get update - sudo apt-get install libsasl2-dev - python -m pip install --user --upgrade pip - python -m pip --version - python -m pip install pre-commit - pre-commit --version - python -m pip install -r requirements.txt - python -m pip install -r dev-requirements.txt - python -c "import dbt.adapters.spark" - - name: Run pre-commit hooks - run: pre-commit run --all-files --show-diff-on-failure + uses: pre-commit/action@v3.0.1 unit: name: unit test / python ${{ matrix.python-version }} @@ -87,29 +75,9 @@ jobs: python-version: ${{ matrix.python-version }} - name: Install python dependencies - run: | - sudo apt-get update - sudo apt-get install libsasl2-dev - python -m pip install --user --upgrade pip - python -m pip --version - python -m pip install -r requirements.txt - python -m pip install -r dev-requirements.txt - python -m pip install -e . - - - name: Run unit tests - run: python -m pytest --color=yes --csv unit_results.csv -v tests/unit + uses: pypa/hatch@install - - name: Get current date - if: always() - id: date - run: echo "date=$(date +'%Y-%m-%dT%H_%M_%S')" >> $GITHUB_OUTPUT #no colons allowed for artifacts - - - uses: actions/upload-artifact@v4 - if: always() - with: - name: unit_results_${{ matrix.python-version }}-${{ steps.date.outputs.date }}.csv - path: unit_results.csv - overwrite: true + - run: hatch run unit-tests build: name: build packages @@ -128,24 +96,16 @@ jobs: with: python-version: '3.9' - - name: Install python dependencies - run: | - python -m pip install --user --upgrade pip - python -m pip install --upgrade setuptools wheel twine check-wheel-contents - python -m pip --version + - uses: pypa/hatch@install - name: Build distributions - run: ./scripts/build-dist.sh + run: hatch build - name: Show distributions run: ls -lh dist/ - name: Check distribution descriptions - run: | - twine check dist/* - - name: Check wheel contents - run: | - check-wheel-contents dist/*.whl --ignore W007,W008 + run: hatch run build:check-all - name: Check if this is an alpha version id: check-is-alpha diff --git a/.github/workflows/release-internal.yml b/.github/workflows/release-internal.yml index 702ef9aea..c467d3fce 100644 --- a/.github/workflows/release-internal.yml +++ b/.github/workflows/release-internal.yml @@ -56,17 +56,9 @@ jobs: python-version: "${{ env.PYTHON_TARGET_VERSION }}" - name: Install python dependencies - run: | - sudo apt-get update - sudo apt-get install libsasl2-dev - python -m pip install --user --upgrade pip - python -m pip --version - python -m pip install -r requirements.txt - python -m pip install -r dev-requirements.txt - python -m pip install -e . + uses: pypa/hatch@install - - name: Run unit tests - run: python -m pytest --color=yes --csv unit_results.csv -v tests/unit + - run: hatch run unit-tests run-integration-tests: name: "${{ matrix.test }}" @@ -102,21 +94,11 @@ jobs: steps: - name: Check out the repository - if: github.event_name != 'pull_request_target' - uses: actions/checkout@v4 - with: - persist-credentials: false - - # explicitly checkout the branch for the PR, - # this is necessary for the `pull_request` event - - name: Check out the repository (PR) - if: github.event_name == 'pull_request_target' uses: actions/checkout@v4 with: persist-credentials: false - ref: ${{ github.event.pull_request.head.ref }} - # the python version used here is not what is used in the tests themselves + # the python version used here is not what is used in the tests themselves - name: Set up Python for dagger uses: actions/setup-python@v5 with: @@ -140,6 +122,6 @@ jobs: package_test_command: "${{ inputs.package_test_command }}" dbms_name: "spark" ref: "${{ inputs.ref }}" - skip_tests: "${{ inputs.skip_tests }}" + skip_tests: "true" secrets: "inherit" diff --git a/.github/workflows/release-prep.yml b/.github/workflows/release-prep.yml index d5878ec1e..bffbb54e0 100644 --- a/.github/workflows/release-prep.yml +++ b/.github/workflows/release-prep.yml @@ -168,8 +168,8 @@ jobs: run: | if [[ ${{ steps.set_existence.outputs.exists }} != true ]] then - title="Spark version-bump.yml check" - message="dbt-spark needs version-bump.yml run before running the release. The changelog is not up to date." + title="Spark version bump check" + message="dbt-spark needs a version bump before running the release. The changelog is not up to date." echo "::error title=${{ env.NOTIFICATION_PREFIX }}: $title::$message" exit 1 fi @@ -186,7 +186,7 @@ jobs: runs-on: ubuntu-latest outputs: - up_to_date: ${{ steps.version-check.outputs.up_to_date }} + up_to_date: ${{ steps.version.outputs.is-current }} steps: - name: "Checkout ${{ github.repository }} Commit ${{ inputs.sha }}" @@ -194,20 +194,24 @@ jobs: with: ref: ${{ inputs.sha }} + - uses: pypa/hatch@install + - name: "Check Current Version In Code" - id: version-check + id: version run: | is_updated=false - if grep -Fxq "current_version = ${{ inputs.version_number }}" .bumpversion.cfg + is_current=false + current_version=$(hatch version) + if test "$current_version" = "${{ inputs.version }}" then - is_updated=true + is_current=true fi - echo "up_to_date=$is_updated" >> $GITHUB_OUTPUT + echo "is-current=$is_current" >> $GITHUB_OUTPUT - name: "[Notification] Check Current Version In Code" run: | title="Version check" - if [[ ${{ steps.version-check.outputs.up_to_date }} == true ]] + if [[ ${{ steps.version.outputs.is-current }} == true ]] then message="The version in the codebase is equal to the provided version" else @@ -218,7 +222,7 @@ jobs: - name: "Spark safety check" if: ${{ contains(github.repository, 'dbt-labs/dbt-spark') }} run: | - if [[ ${{ steps.version-check.outputs.up_to_date }} != true ]] + if [[ ${{ steps.version.outputs.is-current }} != true ]] then title="Spark version-bump.yml check" message="dbt-spark needs version-bump.yml run before running the release. The version bump is not up to date." @@ -228,7 +232,7 @@ jobs: - name: "[DEBUG] Print Outputs" run: | - echo up_to_date: ${{ steps.version-check.outputs.up_to_date }} + echo up_to_date: ${{ steps.version.outputs.is-current }} skip-generate-changelog: runs-on: ubuntu-latest @@ -383,26 +387,10 @@ jobs: with: python-version: ${{ env.PYTHON_TARGET_VERSION }} - - name: "Install Python Dependencies" - if: needs.audit-version-in-code.outputs.up_to_date == 'false' - run: | - python3 -m venv env - source env/bin/activate - python -m pip install --upgrade pip + - uses: pypa/hatch@install - name: "Bump Version To ${{ inputs.version_number }}" - if: needs.audit-version-in-code.outputs.up_to_date == 'false' - # note: bumpversion is no longer supported, it actually points to bump2version now - run: | - source env/bin/activate - if [ -f "editable-requirements.txt" ] - then - python -m pip install -r dev-requirements.txt -r editable-requirements.txt - else - python -m pip install -r dev-requirements.txt - fi - env/bin/bumpversion --allow-dirty --new-version ${{ inputs.version_number }} major - git status + run: hatch version ${{ inputs.version_number }} - name: "[Notification] Bump Version To ${{ inputs.version_number }}" if: needs.audit-version-in-code.outputs.up_to_date == 'false' @@ -415,14 +403,14 @@ jobs: - name: "Remove Trailing Whitespace Via Pre-commit" continue-on-error: true run: | - pre-commit run trailing-whitespace --files .bumpversion.cfg CHANGELOG.md .changes/* + pre-commit run trailing-whitespace --files CHANGELOG.md .changes/* git status # this step will fail on newline errors but also correct them - name: "Removing Extra Newlines Via Pre-commit" continue-on-error: true run: | - pre-commit run end-of-file-fixer --files .bumpversion.cfg CHANGELOG.md .changes/* + pre-commit run end-of-file-fixer --files CHANGELOG.md .changes/* git status - name: "Commit & Push Changes" @@ -459,18 +447,10 @@ jobs: with: python-version: ${{ matrix.python-version }} - - name: Install python dependencies - run: | - sudo apt-get update - sudo apt-get install libsasl2-dev - python -m pip install --user --upgrade pip - python -m pip --version - python -m pip install -r requirements.txt - python -m pip install -r dev-requirements.txt - python -m pip install -e . + - uses: pypa/hatch@install - name: Run unit tests - run: python -m pytest --color=yes --csv unit_results.csv -v tests/unit + run: hatch run unit-tests run-integration-tests: name: ${{ matrix.test }} @@ -505,34 +485,20 @@ jobs: steps: - name: Check out the repository - if: github.event_name != 'pull_request_target' uses: actions/checkout@v4 with: persist-credentials: false - # explicitly checkout the branch for the PR, - # this is necessary for the `pull_request` event - - name: Check out the repository (PR) - if: github.event_name == 'pull_request_target' - uses: actions/checkout@v4 - with: - persist-credentials: false - ref: ${{ github.event.pull_request.head.sha }} - # the python version used here is not what is used in the tests themselves - name: Set up Python for dagger uses: actions/setup-python@v5 with: python-version: "3.11" - - name: Install python dependencies - run: | - python -m pip install --user --upgrade pip - python -m pip --version - python -m pip install -r dagger/requirements.txt + - uses: pypa/hatch@install - name: Run tests for ${{ matrix.test }} - run: python dagger/run_dbt_spark_tests.py --profile ${{ matrix.test }} + run: hatch run python dagger/run_dbt_spark_tests.py --profile ${{ matrix.test }} merge-changes-into-target-branch: runs-on: ubuntu-latest diff --git a/.github/workflows/version-bump.yml b/.github/workflows/version-bump.yml deleted file mode 100644 index bde34d683..000000000 --- a/.github/workflows/version-bump.yml +++ /dev/null @@ -1,28 +0,0 @@ -# **what?** -# This workflow will take the new version number to bump to. With that -# it will run versionbump to update the version number everywhere in the -# code base and then run changie to create the corresponding changelog. -# A PR will be created with the changes that can be reviewed before committing. - -# **why?** -# This is to aid in releasing dbt and making sure we have updated -# the version in all places and generated the changelog. - -# **when?** -# This is triggered manually - -name: Version Bump - -on: - workflow_dispatch: - inputs: - version_number: - description: 'The version number to bump to (ex. 1.2.0, 1.3.0b1)' - required: true - -jobs: - version_bump_and_changie: - uses: dbt-labs/actions/.github/workflows/version-bump.yml@main - with: - version_number: ${{ inputs.version_number }} - secrets: inherit # ok since what we are calling is internally maintained diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 6697bbeb5..1a34810bf 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -54,5 +54,7 @@ repos: - --pretty files: ^dbt/adapters additional_dependencies: + - types-PyYAML + - types-python-dateutil - types-pytz - types-requests diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index cfbc714ed..000000000 --- a/MANIFEST.in +++ /dev/null @@ -1 +0,0 @@ -recursive-include dbt/include *.sql *.yml *.md diff --git a/Makefile b/Makefile deleted file mode 100644 index 46b9af294..000000000 --- a/Makefile +++ /dev/null @@ -1,44 +0,0 @@ -.DEFAULT_GOAL:=help - -.PHONY: dev -dev: ## Installs adapter in develop mode along with development dependencies - @\ - pip install -e . -r requirements.txt -r dev-requirements.txt -r dagger/requirements.txt && pre-commit install - -.PHONY: dev-uninstall -dev-uninstall: ## Uninstalls all packages while maintaining the virtual environment - ## Useful when updating versions, or if you accidentally installed into the system interpreter - pip freeze | grep -v "^-e" | cut -d "@" -f1 | xargs pip uninstall -y - pip uninstall -y dbt-spark - -.PHONY: lint -lint: ## Runs flake8 and mypy code checks against staged changes. - @\ - pre-commit run --all-files - -.PHONY: unit -unit: ## Runs unit tests with py39. - @\ - python -m pytest tests/unit - -.PHONY: test -test: ## Runs unit tests with py39 and code checks against staged changes. - @\ - python -m pytest tests/unit; \ - python dagger/run_dbt_spark_tests.py --profile spark_session \ - pre-commit run --all-files - -.PHONY: clean - @echo "cleaning repo" - @git clean -f -X - -.PHONY: help -help: ## Show this help message. - @echo 'usage: make [target]' - @echo - @echo 'targets:' - @grep -E '^[7+a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' - -.PHONY: docker-prod -docker-prod: - docker build -f docker/Dockerfile -t dbt-spark . diff --git a/dagger/requirements.txt b/dagger/requirements.txt deleted file mode 100644 index f150e3093..000000000 --- a/dagger/requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -beartype<0.18.0 -dagger-io~=0.9.7 -python-dotenv diff --git a/dev-requirements.txt b/dev-requirements.txt deleted file mode 100644 index 3947695c7..000000000 --- a/dev-requirements.txt +++ /dev/null @@ -1,20 +0,0 @@ -# install latest changes in dbt-core -# TODO: how to automate switching from develop to version branches? -git+https://github.com/dbt-labs/dbt-core.git#egg=dbt-core&subdirectory=core -git+https://github.com/dbt-labs/dbt-common.git -git+https://github.com/dbt-labs/dbt-adapters.git -git+https://github.com/dbt-labs/dbt-adapters.git#subdirectory=dbt-tests-adapter - -# dev -ipdb~=0.13.13 -pre-commit~=3.7.0 -pytest~=7.4 -pytest-csv~=3.0 -pytest-dotenv~=0.5.2 -pytest-logbook~=1.2 -pytest-xdist~=3.6 - -# build -bumpversion~=0.6.0 -twine~=4.0 -wheel~=0.43 diff --git a/hatch.toml b/hatch.toml new file mode 100644 index 000000000..37ba0fce4 --- /dev/null +++ b/hatch.toml @@ -0,0 +1,64 @@ +[version] +path = "dbt/adapters/redshift/__version__.py" + +[build.targets.sdist] +packages = ["dbt"] + +[build.targets.wheel] +packages = ["dbt"] + +[envs.default] +dependencies = [ + "dbt-adapters @ git+https://github.com/dbt-labs/dbt-adapters.git", + "dbt-common @ git+https://github.com/dbt-labs/dbt-common.git", + "dbt-tests-adapter @ git+https://github.com/dbt-labs/dbt-adapters.git#subdirectory=dbt-tests-adapter", + "dbt-core @ git+https://github.com/dbt-labs/dbt-core.git#subdirectory=core", + "beartype<0.18.0", + "dagger-io~=0.9.7", + "ddtrace==2.3.0", + "ipdb~=0.13.13", + "pre-commit==3.7.0", + "freezegun", + "pytest>=7.0,<8.0", + "pytest-csv~=3.0", + "pytest-dotenv", + "pytest-logbook~=1.2", + "pytest-mock", + "pytest-xdist", +] + +[envs.default.scripts] +setup = "pre-commit install" +code-quality = "pre-commit run --all-files" +unit-tests = "python -m pytest {args:tests/unit}" +integration-tests = "python -m pytest {args:tests/functional}" +docker-dev = [ + "docker build -f docker/dev.Dockerfile -t dbt-redshift-dev .", + "docker run --rm -it --name dbt-redshift-dev -v $(shell pwd):/opt/code dbt-redshift-dev", +] +docker-prod = "docker build -f docker/Dockerfile -t dbt-spark ." + +[envs.build] +detached = true +dependencies = [ + "wheel", + "twine", + "check-wheel-contents", +] + +[envs.build.scripts] +check-all = [ + "- check-wheel", + "- check-sdist", +] +check-wheel = [ + "twine check dist/*", + "find ./dist/dbt_redshift-*.whl -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/", + "pip freeze | grep dbt-redshift", +] +check-sdist = [ + "check-wheel-contents dist/*.whl --ignore W007,W008", + "find ./dist/dbt_redshift-*.gz -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/", + "pip freeze | grep dbt-redshift", +] +docker-prod = "docker build -f docker/Dockerfile -t dbt-redshift ." diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 000000000..481cc9e3f --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,59 @@ +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[project] +dynamic = ["version"] +name = "dbt-spark" +description = "The Apache Spark adapter plugin for dbt" +readme = "README.md" +keywords = ["dbt", "adapter", "adapters", "database", "elt", "dbt-core", "dbt Core", "dbt Cloud", "dbt Labs", "spark"] +requires-python = ">=3.9.0" +authors = [{ name = "dbt Labs", email = "info@dbtlabs.com" }] +maintainers = [{ name = "dbt Labs", email = "info@dbtlabs.com" }] +classifiers = [ + "Development Status :: 5 - Production/Stable", + "License :: OSI Approved :: Apache Software License", + "Operating System :: MacOS :: MacOS X", + "Operating System :: Microsoft :: Windows", + "Operating System :: POSIX :: Linux", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", +] +dependencies = [ + "dbt-common>=1.10,<2.0", + "dbt-adapters>=1.7,<2.0", + # add dbt-core to ensure backwards compatibility of installation, this is not a functional dependency + "dbt-core>=1.8.0", +] +[project.optional-dependencies] +ODBC = ["pyodbc~=5.1.0"] +PyHive = [ + "PyHive[hive_pure_sasl]~=0.7.0", + "thrift>=0.11.0,<0.17.0", +] +session = ["pyspark>=3.0.0,<4.0.0"] +all = [ + "pyodbc~=5.1.0", + "PyHive[hive_pure_sasl]~=0.7.0", + "thrift>=0.11.0,<0.17.0", + "pyspark>=3.0.0,<4.0.0", +] + +[project.urls] +Homepage = "https://github.com/dbt-labs/dbt-spark" +Documentation = "https://docs.getdbt.com" +Repository = "https://github.com/dbt-labs/dbt-spark.git" +Issues = "https://github.com/dbt-labs/dbt-spark/issues" +Changelog = "https://github.com/dbt-labs/dbt-spark/blob/main/CHANGELOG.md" + +[tool.pytest] +testpaths = ["tests/functional", "tests/unit"] +addopts = "-v -n auto" +color = true +filterwarnings = [ + "ignore:.*'soft_unicode' has been renamed to 'soft_str'*:DeprecationWarning", + "ignore:unclosed file .*:ResourceWarning", +] diff --git a/pytest.ini b/pytest.ini deleted file mode 100644 index b3d74bc14..000000000 --- a/pytest.ini +++ /dev/null @@ -1,9 +0,0 @@ -[pytest] -filterwarnings = - ignore:.*'soft_unicode' has been renamed to 'soft_str'*:DeprecationWarning - ignore:unclosed file .*:ResourceWarning -env_files = - test.env -testpaths = - tests/unit - tests/functional diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 412630919..000000000 --- a/requirements.txt +++ /dev/null @@ -1,11 +0,0 @@ -pyhive[hive_pure_sasl]~=0.7.0 -requests>=2.28.1 - -pyodbc~=5.1.0 --no-binary pyodbc -sqlparams>=3.0.0 -thrift>=0.13.0 -pyspark>=3.0.0,<4.0.0 -sqlparse>=0.4.2 # not directly required, pinned by Snyk to avoid a vulnerability - -types-PyYAML -types-python-dateutil diff --git a/setup.py b/setup.py deleted file mode 100644 index 406c181d5..000000000 --- a/setup.py +++ /dev/null @@ -1,92 +0,0 @@ -#!/usr/bin/env python -import os -import sys -import re - -# require python 3.8 or newer -if sys.version_info < (3, 9): - print("Error: dbt does not support this version of Python.") - print("Please upgrade to Python 3.9 or higher.") - sys.exit(1) - -# require version of setuptools that supports find_namespace_packages -from setuptools import setup - -try: - from setuptools import find_namespace_packages -except ImportError: - # the user has a downlevel version of setuptools. - print("Error: dbt requires setuptools v40.1.0 or higher.") - print('Please upgrade setuptools with "pip install --upgrade setuptools" ' "and try again") - sys.exit(1) - -# pull long description from README -this_directory = os.path.abspath(os.path.dirname(__file__)) -with open(os.path.join(this_directory, "README.md"), "r", encoding="utf8") as f: - long_description = f.read() - - -# get this package's version from dbt/adapters//__version__.py -def _get_plugin_version_dict(): - _version_path = os.path.join(this_directory, "dbt", "adapters", "spark", "__version__.py") - _semver = r"""(?P\d+)\.(?P\d+)\.(?P\d+)""" - _pre = r"""((?Pa|b|rc)(?P
\d+))?"""
-    _build = r"""(\+build[0-9]+)?"""
-    _version_pattern = rf"""version\s*=\s*["']{_semver}{_pre}{_build}["']"""
-    with open(_version_path) as f:
-        match = re.search(_version_pattern, f.read().strip())
-        if match is None:
-            raise ValueError(f"invalid version at {_version_path}")
-        return match.groupdict()
-
-
-package_name = "dbt-spark"
-package_version = "1.9.0b1"
-description = """The Apache Spark adapter plugin for dbt"""
-
-odbc_extras = ["pyodbc~=5.1.0"]
-pyhive_extras = [
-    "PyHive[hive_pure_sasl]~=0.7.0",
-    "thrift>=0.11.0,<0.17.0",
-]
-session_extras = ["pyspark>=3.0.0,<4.0.0"]
-all_extras = odbc_extras + pyhive_extras + session_extras
-
-setup(
-    name=package_name,
-    version=package_version,
-    description=description,
-    long_description=long_description,
-    long_description_content_type="text/markdown",
-    author="dbt Labs",
-    author_email="info@dbtlabs.com",
-    url="https://github.com/dbt-labs/dbt-spark",
-    packages=find_namespace_packages(include=["dbt", "dbt.*"]),
-    include_package_data=True,
-    install_requires=[
-        "sqlparams>=3.0.0",
-        "dbt-common>=1.10,<2.0",
-        "dbt-adapters>=1.7,<2.0",
-        # add dbt-core to ensure backwards compatibility of installation, this is not a functional dependency
-        "dbt-core>=1.8.0",
-    ],
-    extras_require={
-        "ODBC": odbc_extras,
-        "PyHive": pyhive_extras,
-        "session": session_extras,
-        "all": all_extras,
-    },
-    zip_safe=False,
-    classifiers=[
-        "Development Status :: 5 - Production/Stable",
-        "License :: OSI Approved :: Apache Software License",
-        "Operating System :: Microsoft :: Windows",
-        "Operating System :: MacOS :: MacOS X",
-        "Operating System :: POSIX :: Linux",
-        "Programming Language :: Python :: 3.9",
-        "Programming Language :: Python :: 3.10",
-        "Programming Language :: Python :: 3.11",
-        "Programming Language :: Python :: 3.12",
-    ],
-    python_requires=">=3.9",
-)
diff --git a/tox.ini b/tox.ini
deleted file mode 100644
index 62bb9c5b0..000000000
--- a/tox.ini
+++ /dev/null
@@ -1,3 +0,0 @@
-[tox]
-skipsdist = True
-envlist = unit, flake8, integration-spark-thrift