From d42ebdc2641b777f307f041ee5d20a6109476271 Mon Sep 17 00:00:00 2001 From: Mark Keller Date: Mon, 11 Nov 2024 15:37:34 -0800 Subject: [PATCH 01/10] deprecate python 3.8 --- .github/workflows/build_test.yml | 24 ++++++++++---------- .github/workflows/create_req_files.yml | 2 +- README.md | 6 ++--- ci/build_darwin.sh | 8 ++----- ci/build_docker.sh | 2 +- ci/build_linux.sh | 4 ++-- ci/build_windows.bat | 4 ++-- ci/docker/connector_build/Dockerfile | 2 -- ci/docker/connector_test/Dockerfile | 1 - ci/docker/connector_test_lambda/Dockerfile38 | 3 +-- ci/test_darwin.sh | 4 ++-- ci/test_docker.sh | 6 ++--- ci/test_fips.sh | 4 ++-- ci/test_fips_docker.sh | 6 ++--- ci/test_lambda_docker.sh | 2 +- ci/test_linux.sh | 4 ++-- ci/test_windows.bat | 2 +- setup.cfg | 5 ++-- tested_requirements/requirements_38.reqs | 20 ---------------- tox.ini | 6 ++--- 20 files changed, 43 insertions(+), 72 deletions(-) delete mode 100644 tested_requirements/requirements_38.reqs diff --git a/.github/workflows/build_test.yml b/.github/workflows/build_test.yml index ab98dd370..14dd4aa62 100644 --- a/.github/workflows/build_test.yml +++ b/.github/workflows/build_test.yml @@ -30,7 +30,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v4 with: - python-version: '3.8' + python-version: '3.9' - name: Display Python version run: python -c "import sys; import os; print(\"\n\".join(os.environ[\"PATH\"].split(os.pathsep))); print(sys.version); print(sys.executable);" - name: Upgrade setuptools, pip and wheel @@ -51,7 +51,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] + python-version: ["3.9", "3.10", "3.11", "3.12"] steps: - uses: actions/checkout@v4 - name: Set up Python @@ -80,7 +80,7 @@ jobs: id: macosx_x86_64 - image: macos-latest id: macosx_arm64 - python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] + python-version: ["3.9", "3.10", "3.11", "3.12"] name: Build ${{ matrix.os.id }}-py${{ matrix.python-version }} runs-on: ${{ matrix.os.image }} steps: @@ -125,7 +125,7 @@ jobs: download_name: macosx_x86_64 - image_name: windows-2019 download_name: win_amd64 - python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] + python-version: ["3.9", "3.10", "3.11", "3.12"] cloud-provider: [aws, azure, gcp] steps: - uses: actions/checkout@v4 @@ -183,7 +183,7 @@ jobs: os: - image_name: ubuntu-latest download_name: linux - python-version: [3.8] + python-version: [3.9] cloud-provider: [aws] steps: - uses: actions/checkout@v4 @@ -222,7 +222,7 @@ jobs: os: - image_name: ubuntu-latest download_name: linux - python-version: [3.8] + python-version: [3.9] cloud-provider: [aws] steps: - uses: actions/checkout@v4 @@ -245,7 +245,7 @@ jobs: shell: bash test-fips: - name: Test FIPS linux-3.8-${{ matrix.cloud-provider }} + name: Test FIPS linux-3.9-${{ matrix.cloud-provider }} needs: build runs-on: ubuntu-latest strategy: @@ -264,7 +264,7 @@ jobs: - name: Download wheel(s) uses: actions/download-artifact@v4 with: - name: manylinux_x86_64_py3.8 + name: manylinux_x86_64_py3.9 path: dist - name: Show wheels downloaded run: ls -lh dist @@ -272,7 +272,7 @@ jobs: - name: Run tests run: ./ci/test_fips_docker.sh env: - PYTHON_VERSION: 3.8 + PYTHON_VERSION: 3.9 cloud_provider: ${{ matrix.cloud-provider }} PYTEST_ADDOPTS: --color=yes --tb=short TOX_PARALLEL_NO_SPINNER: 1 @@ -280,7 +280,7 @@ jobs: - uses: actions/upload-artifact@v4 with: include-hidden-files: true - name: coverage_linux-fips-3.8-${{ matrix.cloud-provider }} + name: coverage_linux-fips-3.9-${{ matrix.cloud-provider }} path: | .coverage coverage.xml @@ -292,7 +292,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] + python-version: ["3.9", "3.10", "3.11", "3.12"] cloud-provider: [aws] steps: - name: Set shortver @@ -345,7 +345,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v4 with: - python-version: '3.8' + python-version: '3.9' - name: Display Python version run: python -c "import sys; print(sys.version)" - name: Upgrade setuptools and pip diff --git a/.github/workflows/create_req_files.yml b/.github/workflows/create_req_files.yml index 5dc43886c..083da1ae1 100644 --- a/.github/workflows/create_req_files.yml +++ b/.github/workflows/create_req_files.yml @@ -9,7 +9,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] + python-version: ["3.9", "3.10", "3.11", "3.12"] steps: - uses: actions/checkout@v3 - name: Set up Python diff --git a/README.md b/README.md index 5b76a5806..ccbe1d42e 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ using the Snowflake JDBC or ODBC drivers. The connector has **no** dependencies on JDBC or ODBC. It can be installed using ``pip`` on Linux, Mac OSX, and Windows platforms -where Python 3.8.0 (or higher) is installed. +where Python 3.9.0 (or higher) is installed. Snowflake Documentation is available at: https://docs.snowflake.com/ @@ -27,7 +27,7 @@ https://community.snowflake.com/s/article/How-To-Submit-a-Support-Case-in-Snowfl ### Locally -Install Python 3.8.0 or higher. Clone the Snowflake Connector for Python repository, then run the following commands +Install a supported Python version. Clone the Snowflake Connector for Python repository, then run the following commands to create a wheel package using PEP-517 build: ```shell @@ -42,7 +42,7 @@ Find the `snowflake_connector_python*.whl` package in the `./dist` directory. ### In Docker Or use our Dockerized build script `ci/build_docker.sh` and find the built wheel files in `dist/repaired_wheels`. -Note: `ci/build_docker.sh` can be used to compile only certain versions, like this: `ci/build_docker.sh "3.8 3.9"` +Note: `ci/build_docker.sh` can be used to compile only certain versions, like this: `ci/build_docker.sh "3.9 3.10"` ## Code hygiene and other utilities These tools are integrated into `tox` to allow us to easily set them up universally on any computer. diff --git a/ci/build_darwin.sh b/ci/build_darwin.sh index 08214a357..24e979f0f 100755 --- a/ci/build_darwin.sh +++ b/ci/build_darwin.sh @@ -2,13 +2,9 @@ # # Build Snowflake Python Connector on Mac # NOTES: -# - To compile only a specific version(s) pass in versions like: `./build_darwin.sh "3.8 3.9"` +# - To compile only a specific version(s) pass in versions like: `./build_darwin.sh "3.9 3.10"` arch=$(uname -m) -if [[ "$arch" == "arm64" ]]; then - PYTHON_VERSIONS="${1:-3.8 3.9 3.10 3.11 3.12}" -else - PYTHON_VERSIONS="${1:-3.8 3.9 3.10 3.11 3.12}" -fi +PYTHON_VERSIONS="${1:-3.9 3.10 3.11 3.12, 3.13}" THIS_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" CONNECTOR_DIR="$(dirname "${THIS_DIR}")" diff --git a/ci/build_docker.sh b/ci/build_docker.sh index f98dcc86d..1c661ea3a 100755 --- a/ci/build_docker.sh +++ b/ci/build_docker.sh @@ -2,7 +2,7 @@ # # Build Snowflake Python Connector in Docker # NOTES: -# - To compile only a specific version(s) pass in versions like: `./build_docker.sh "3.8 3.9"` +# - To compile only a specific version(s) pass in versions like: `./build_docker.sh "3.9 3.10"` set -o pipefail THIS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" diff --git a/ci/build_linux.sh b/ci/build_linux.sh index 1daad7ffb..f12717ec4 100755 --- a/ci/build_linux.sh +++ b/ci/build_linux.sh @@ -3,11 +3,11 @@ # Build Snowflake Python Connector on Linux # NOTES: # - This is designed to ONLY be called in our build docker image -# - To compile only a specific version(s) pass in versions like: `./build_linux.sh "3.8 3.9"` +# - To compile only a specific version(s) pass in versions like: `./build_linux.sh "3.9 3.10"` set -o pipefail U_WIDTH=16 -PYTHON_VERSIONS="${1:-3.8 3.9 3.10 3.11 3.12}" +PYTHON_VERSIONS="${1:-3.9 3.10 3.11 3.12 3.13}" THIS_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" CONNECTOR_DIR="$(dirname "${THIS_DIR}")" DIST_DIR="${CONNECTOR_DIR}/dist" diff --git a/ci/build_windows.bat b/ci/build_windows.bat index 5e0f6ba23..3835243c3 100644 --- a/ci/build_windows.bat +++ b/ci/build_windows.bat @@ -6,14 +6,14 @@ SET SCRIPT_DIR=%~dp0 SET CONNECTOR_DIR=%~dp0\..\ -set python_versions= 3.8 3.9 3.10 3.11 3.12 +set python_versions= 3.9 3.10 3.11 3.12 3.13 cd %CONNECTOR_DIR% set venv_dir=%WORKSPACE%\venv-flake8 if %errorlevel% neq 0 goto :error -py -3.8 -m venv %venv_dir% +py -3.9 -m venv %venv_dir% if %errorlevel% neq 0 goto :error call %venv_dir%\scripts\activate diff --git a/ci/docker/connector_build/Dockerfile b/ci/docker/connector_build/Dockerfile index 263803feb..fa1febc88 100644 --- a/ci/docker/connector_build/Dockerfile +++ b/ci/docker/connector_build/Dockerfile @@ -14,6 +14,4 @@ WORKDIR /home/user RUN chmod 777 /home/user RUN git clone https://github.com/matthew-brett/multibuild.git && cd /home/user/multibuild && git checkout bfc6d8b82d8c37b8ca1e386081fd800e81c6ab4a -ENV PATH="${PATH}:/opt/python/cp37-cp37m/bin:/opt/python/cp38-cp38/bin:/opt/python/cp39-cp39/bin:/opt/python/cp310-cp310/bin:/opt/python/cp311-cp311/bin:/opt/python/cp312-cp312/bin" - ENTRYPOINT ["/usr/local/bin/entrypoint.sh"] diff --git a/ci/docker/connector_test/Dockerfile b/ci/docker/connector_test/Dockerfile index 400d26d14..365154c69 100644 --- a/ci/docker/connector_test/Dockerfile +++ b/ci/docker/connector_test/Dockerfile @@ -12,6 +12,5 @@ RUN chmod +x /usr/local/bin/entrypoint.sh WORKDIR /home/user RUN chmod 777 /home/user -ENV PATH="${PATH}:/opt/python/cp37-cp37m/bin:/opt/python/cp38-cp38/bin/:/opt/python/cp39-cp39/bin/:/opt/python/cp310-cp310/bin/:/opt/python/cp311-cp311/bin/:/opt/python/cp312-cp312/bin/" ENTRYPOINT ["/usr/local/bin/entrypoint.sh"] diff --git a/ci/docker/connector_test_lambda/Dockerfile38 b/ci/docker/connector_test_lambda/Dockerfile38 index 3d9d0c812..26b6cc48e 100644 --- a/ci/docker/connector_test_lambda/Dockerfile38 +++ b/ci/docker/connector_test_lambda/Dockerfile38 @@ -4,9 +4,8 @@ RUN yum install -y git WORKDIR /home/user/snowflake-connector-python RUN chmod 777 /home/user/snowflake-connector-python -ENV PATH="${PATH}:/opt/python/cp38-cp38/bin/" ENV PYTHONPATH="${PYTHONPATH}:/home/user/snowflake-connector-python/ci/docker/connector_test_lambda/" -RUN pip3 install -U pip setuptools wheel tox>=4 +RUN python3.8 -m pip install -U pip setuptools wheel tox>=4 CMD [ "app.handler" ] diff --git a/ci/test_darwin.sh b/ci/test_darwin.sh index 81ea9911a..848d43224 100755 --- a/ci/test_darwin.sh +++ b/ci/test_darwin.sh @@ -2,10 +2,10 @@ # # Test Snowflake Connector on a Darwin Jenkins slave # NOTES: -# - Versions to be tested should be passed in as the first argument, e.g: "3.8 3.9". If omitted 3.8-3.11 will be assumed. +# - Versions to be tested should be passed in as the first argument, e.g: "3.9 3.10". If omitted 3.8-3.11 will be assumed. # - This script uses .. to download the newest wheel files from S3 -PYTHON_VERSIONS="${1:-3.8 3.9 3.10 3.11 3.12}" +PYTHON_VERSIONS="${1:-3.9 3.10 3.11 3.12 3.13}" THIS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" CONNECTOR_DIR="$( dirname "${THIS_DIR}")" PARAMETERS_DIR="${CONNECTOR_DIR}/.github/workflows/parameters/public" diff --git a/ci/test_docker.sh b/ci/test_docker.sh index 073372366..9da02c588 100755 --- a/ci/test_docker.sh +++ b/ci/test_docker.sh @@ -1,13 +1,13 @@ #!/bin/bash -e # Test Snowflake Python Connector in Docker # NOTES: -# - By default this script runs Python 3.8 tests, as these are installed in dev vms -# - To compile only a specific version(s) pass in versions like: `./test_docker.sh "3.8 3.9"` +# - By default this script runs Python 3.9 tests, as these are installed in dev vms +# - To compile only a specific version(s) pass in versions like: `./test_docker.sh "3.9 3.10"` set -o pipefail # In case this is ran from dev-vm -PYTHON_ENV=${1:-3.8} +PYTHON_ENV=${1:-3.9} # Set constants THIS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" diff --git a/ci/test_fips.sh b/ci/test_fips.sh index bc97c9d7f..4d6f1b48f 100755 --- a/ci/test_fips.sh +++ b/ci/test_fips.sh @@ -6,9 +6,9 @@ THIS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" # shellcheck disable=SC1090 CONNECTOR_DIR="$( dirname "${THIS_DIR}")" -CONNECTOR_WHL="$(ls $CONNECTOR_DIR/dist/*cp38*manylinux2014*.whl | sort -r | head -n 1)" +CONNECTOR_WHL="$(ls $CONNECTOR_DIR/dist/*cp39*manylinux2014*.whl | sort -r | head -n 1)" -python3.8 -m venv fips_env +python3.9 -m venv fips_env source fips_env/bin/activate pip install -U setuptools pip pip install "${CONNECTOR_WHL}[pandas,secure-local-storage,development]" diff --git a/ci/test_fips_docker.sh b/ci/test_fips_docker.sh index 4150296de..46f3a1ed3 100755 --- a/ci/test_fips_docker.sh +++ b/ci/test_fips_docker.sh @@ -4,10 +4,10 @@ THIS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" CONNECTOR_DIR="$( dirname "${THIS_DIR}")" # In case this is not run locally and not on Jenkins -if [[ ! -d "$CONNECTOR_DIR/dist/" ]] || [[ $(ls $CONNECTOR_DIR/dist/*cp38*manylinux2014*.whl) == '' ]]; then +if [[ ! -d "$CONNECTOR_DIR/dist/" ]] || [[ $(ls $CONNECTOR_DIR/dist/*cp39*manylinux2014*.whl) == '' ]]; then echo "Missing wheel files, going to compile Python connector in Docker..." - $THIS_DIR/build_docker.sh 3.8 - cp $CONNECTOR_DIR/dist/repaired_wheels/*cp38*manylinux2014*.whl $CONNECTOR_DIR/dist/ + $THIS_DIR/build_docker.sh 3.9 + cp $CONNECTOR_DIR/dist/repaired_wheels/*cp39*manylinux2014*.whl $CONNECTOR_DIR/dist/ fi cd $THIS_DIR/docker/connector_test_fips diff --git a/ci/test_lambda_docker.sh b/ci/test_lambda_docker.sh index e4869f125..cc3c1fe9f 100755 --- a/ci/test_lambda_docker.sh +++ b/ci/test_lambda_docker.sh @@ -2,7 +2,7 @@ THIS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" CONNECTOR_DIR="$( dirname "${THIS_DIR}")" -PYTHON_VERSION="${1:-3.8}" +PYTHON_VERSION="${1:-3.9}" PYTHON_SHORT_VERSION="$(echo "$PYTHON_VERSION" | tr -d .)" # In case this is not run locally and not on Jenkins diff --git a/ci/test_linux.sh b/ci/test_linux.sh index 7f765947c..3596e9cbd 100755 --- a/ci/test_linux.sh +++ b/ci/test_linux.sh @@ -2,11 +2,11 @@ # # Test Snowflake Connector in Linux # NOTES: -# - Versions to be tested should be passed in as the first argument, e.g: "3.8 3.9". If omitted 3.7-3.11 will be assumed. +# - Versions to be tested should be passed in as the first argument, e.g: "3.9 3.10". If omitted 3.9-3.13 will be assumed. # - This script assumes that ../dist/repaired_wheels has the wheel(s) built for all versions to be tested # - This is the script that test_docker.sh runs inside of the docker container -PYTHON_VERSIONS="${1:-3.8 3.9 3.10 3.11 3.12}" +PYTHON_VERSIONS="${1:-3.9 3.10 3.11 3.12 3.13}" THIS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" CONNECTOR_DIR="$( dirname "${THIS_DIR}")" diff --git a/ci/test_windows.bat b/ci/test_windows.bat index 4c62329f3..643758abe 100644 --- a/ci/test_windows.bat +++ b/ci/test_windows.bat @@ -30,7 +30,7 @@ gpg --quiet --batch --yes --decrypt --passphrase="%PARAMETERS_SECRET%" %PARAMS_F :: create tox execution virtual env set venv_dir=%WORKSPACE%\tox_venv -py -3.8 -m venv %venv_dir% +py -3.9 -m venv %venv_dir% if %errorlevel% neq 0 goto :error call %venv_dir%\scripts\activate diff --git a/setup.cfg b/setup.cfg index 38c3b3e5d..791b9fafa 100644 --- a/setup.cfg +++ b/setup.cfg @@ -20,11 +20,11 @@ classifiers = Operating System :: OS Independent Programming Language :: Python :: 3 Programming Language :: Python :: 3 :: Only - Programming Language :: Python :: 3.8 Programming Language :: Python :: 3.9 Programming Language :: Python :: 3.10 Programming Language :: Python :: 3.11 Programming Language :: Python :: 3.12 + Programming Language :: Python :: 3.13 Programming Language :: SQL Topic :: Database Topic :: Scientific/Engineering :: Information Analysis @@ -40,7 +40,7 @@ project_urls = Changelog=https://github.com/snowflakedb/snowflake-connector-python/blob/main/DESCRIPTION.md [options] -python_requires = >=3.8 +python_requires = >=3.9 packages = find_namespace: install_requires = asn1crypto>0.24.0,<2.0.0 @@ -50,7 +50,6 @@ install_requires = pyjwt<3.0.0 pytz requests<3.0.0 - importlib-metadata; python_version < '3.8' packaging charset_normalizer>=2,<4 idna>=2.5,<4 diff --git a/tested_requirements/requirements_38.reqs b/tested_requirements/requirements_38.reqs deleted file mode 100644 index 5891eb725..000000000 --- a/tested_requirements/requirements_38.reqs +++ /dev/null @@ -1,20 +0,0 @@ -# Generated on: Python 3.8.18 -asn1crypto==1.5.1 -certifi==2024.8.30 -cffi==1.17.1 -charset-normalizer==3.4.0 -cryptography==43.0.3 -filelock==3.16.1 -idna==3.10 -packaging==24.1 -platformdirs==4.3.6 -pycparser==2.22 -PyJWT==2.9.0 -pyOpenSSL==24.2.1 -pytz==2024.2 -requests==2.32.3 -sortedcontainers==2.4.0 -tomlkit==0.13.2 -typing_extensions==4.12.2 -urllib3==1.26.20 -snowflake-connector-python==3.12.3 diff --git a/tox.ini b/tox.ini index 6faca8c0d..f256eefa5 100644 --- a/tox.ini +++ b/tox.ini @@ -67,7 +67,7 @@ commands = extras: python -m test.extras.run {posargs:} [testenv:olddriver] -basepython = python3.8 +basepython = python3.9 description = run the old driver tests with pytest under {basepython} deps = pip >= 19.3.1 @@ -89,7 +89,7 @@ commands = {env:SNOWFLAKE_PYTEST_CMD} -m "not skipolddriver" -vvv {posargs:} test [testenv:noarrowextension] -basepython = python3.8 +basepython = python3.9 skip_install = True description = run import with no arrow extension under {basepython} setenv = SNOWFLAKE_DISABLE_COMPILE_ARROW_EXTENSIONS=1 @@ -131,7 +131,7 @@ deps = flake8 commands = flake8 {posargs} [testenv:fix_lint] -basepython = python3.8 +basepython = python3.9 description = format the code base to adhere to our styles, and complain about what we cannot do automatically passenv = PROGRAMDATA From 601c231cc7252031f88234673d35fbf6d4d05964 Mon Sep 17 00:00:00 2001 From: Mark Keller Date: Tue, 12 Nov 2024 20:00:47 -0800 Subject: [PATCH 02/10] adding Python 3.13 builds --- .github/workflows/build_test.yml | 8 ++++---- .github/workflows/create_req_files.yml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/build_test.yml b/.github/workflows/build_test.yml index 14dd4aa62..6d9be84ae 100644 --- a/.github/workflows/build_test.yml +++ b/.github/workflows/build_test.yml @@ -51,7 +51,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.9", "3.10", "3.11", "3.12"] + python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] steps: - uses: actions/checkout@v4 - name: Set up Python @@ -80,7 +80,7 @@ jobs: id: macosx_x86_64 - image: macos-latest id: macosx_arm64 - python-version: ["3.9", "3.10", "3.11", "3.12"] + python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] name: Build ${{ matrix.os.id }}-py${{ matrix.python-version }} runs-on: ${{ matrix.os.image }} steps: @@ -125,7 +125,7 @@ jobs: download_name: macosx_x86_64 - image_name: windows-2019 download_name: win_amd64 - python-version: ["3.9", "3.10", "3.11", "3.12"] + python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] cloud-provider: [aws, azure, gcp] steps: - uses: actions/checkout@v4 @@ -292,7 +292,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.9", "3.10", "3.11", "3.12"] + python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] cloud-provider: [aws] steps: - name: Set shortver diff --git a/.github/workflows/create_req_files.yml b/.github/workflows/create_req_files.yml index 083da1ae1..12d115bb9 100644 --- a/.github/workflows/create_req_files.yml +++ b/.github/workflows/create_req_files.yml @@ -9,7 +9,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.9", "3.10", "3.11", "3.12"] + python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] steps: - uses: actions/checkout@v3 - name: Set up Python From f87e0d47ffb4d6477ace938ba986a2429c04e7e5 Mon Sep 17 00:00:00 2001 From: Mark Keller Date: Tue, 12 Nov 2024 20:15:48 -0800 Subject: [PATCH 03/10] pre-commit autoupdate --- .pre-commit-config.yaml | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 39c97d4a4..81ef4ee87 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -5,7 +5,7 @@ repos: - id: check-hooks-apply - id: check-useless-excludes - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.6.0 + rev: v5.0.0 hooks: - id: trailing-whitespace exclude: > @@ -24,7 +24,7 @@ repos: - id: debug-statements - id: check-ast - repo: https://github.com/Lucas-C/pre-commit-hooks.git - rev: v1.5.1 + rev: v1.5.5 hooks: - id: insert-license name: insert-py-license @@ -61,7 +61,7 @@ repos: hooks: - id: yesqa - repo: https://github.com/mgedmin/check-manifest - rev: "0.49" + rev: "0.50" hooks: - id: check-manifest - repo: https://github.com/PyCQA/isort @@ -76,18 +76,18 @@ repos: - --append-only files: ^src/snowflake/connector/.*\.py$ - repo: https://github.com/asottile/pyupgrade - rev: v3.15.2 + rev: v3.19.0 hooks: - id: pyupgrade args: [--py38-plus] - repo: https://github.com/PyCQA/flake8 - rev: 7.0.0 + rev: 7.1.1 hooks: - id: flake8 additional_dependencies: - flake8-bugbear - repo: https://github.com/pre-commit/mirrors-mypy - rev: 'v1.10.0' + rev: 'v1.13.0' hooks: - id: mypy files: | @@ -120,14 +120,14 @@ repos: - types-pyOpenSSL - types-setuptools - repo: https://github.com/psf/black - rev: 24.4.2 + rev: 24.10.0 hooks: - id: black args: - --safe language_version: python3 - repo: https://github.com/pre-commit/mirrors-clang-format - rev: v17.0.6 + rev: v19.1.3 hooks: - id: clang-format types_or: [c++, c] From ba236a30caff6fa6f5675d3dd48ca0d9e4827ae6 Mon Sep 17 00:00:00 2001 From: Mark Keller Date: Tue, 12 Nov 2024 20:17:20 -0800 Subject: [PATCH 04/10] fixing all files with pre-commit --- src/snowflake/connector/connection.py | 2 +- src/snowflake/connector/gzip_decoder.py | 2 +- .../ArrowIterator/nanoarrow_ipc.c | 77 +++++++++++-------- test/conftest.py | 2 +- test/integ/conftest.py | 6 +- test/integ/pandas/test_pandas_tools.py | 34 ++++---- 6 files changed, 66 insertions(+), 57 deletions(-) diff --git a/src/snowflake/connector/connection.py b/src/snowflake/connector/connection.py index 5205bafc1..a5d2832aa 100644 --- a/src/snowflake/connector/connection.py +++ b/src/snowflake/connector/connection.py @@ -894,7 +894,7 @@ def execute_stream( remove_comments: bool = False, cursor_class: SnowflakeCursor = SnowflakeCursor, **kwargs, - ) -> Generator[SnowflakeCursor, None, None]: + ) -> Generator[SnowflakeCursor]: """Executes a stream of SQL statements. This is a non-standard convenient method.""" split_statements_list = split_statements( stream, remove_comments=remove_comments diff --git a/src/snowflake/connector/gzip_decoder.py b/src/snowflake/connector/gzip_decoder.py index 6296d0ab5..6c370bc6d 100644 --- a/src/snowflake/connector/gzip_decoder.py +++ b/src/snowflake/connector/gzip_decoder.py @@ -67,7 +67,7 @@ def decompress_raw_data_by_zcat(raw_data_fd: IO, add_bracket: bool = True) -> by def decompress_raw_data_to_unicode_stream( raw_data_fd: IO, -) -> Generator[str, None, None]: +) -> Generator[str]: """Decompresses a raw data in file like object and yields a Unicode string. Args: diff --git a/src/snowflake/connector/nanoarrow_cpp/ArrowIterator/nanoarrow_ipc.c b/src/snowflake/connector/nanoarrow_cpp/ArrowIterator/nanoarrow_ipc.c index 975cf37cf..371e19884 100644 --- a/src/snowflake/connector/nanoarrow_cpp/ArrowIterator/nanoarrow_ipc.c +++ b/src/snowflake/connector/nanoarrow_cpp/ArrowIterator/nanoarrow_ipc.c @@ -17,15 +17,18 @@ flatbuffers_voffset_t id__tmp, *vt__tmp; \ FLATCC_ASSERT(t != 0 && "null pointer table access"); \ id__tmp = ID; \ - vt__tmp = (flatbuffers_voffset_t *)(( \ - uint8_t *)(t)-__flatbuffers_soffset_read_from_pe(t)); \ + vt__tmp = \ + (flatbuffers_voffset_t *)((uint8_t *)(t) - \ + __flatbuffers_soffset_read_from_pe(t)); \ if (__flatbuffers_voffset_read_from_pe(vt__tmp) >= \ sizeof(vt__tmp[0]) * (id__tmp + 3u)) { \ offset = __flatbuffers_voffset_read_from_pe(vt__tmp + id__tmp + 2); \ } \ } -#define __flatbuffers_field_present(ID, t) \ - { __flatbuffers_read_vt(ID, offset__tmp, t) return offset__tmp != 0; } +#define __flatbuffers_field_present(ID, t) \ + { \ + __flatbuffers_read_vt(ID, offset__tmp, t) return offset__tmp != 0; \ + } #define __flatbuffers_scalar_field(T, ID, t) \ { \ __flatbuffers_read_vt(ID, offset__tmp, t) if (offset__tmp) { \ @@ -222,27 +225,27 @@ static inline flatbuffers_string_t flatbuffers_string_cast_from_union( const flatbuffers_union_t u__tmp) { return flatbuffers_string_cast_from_generic(u__tmp.value); } -#define __flatbuffers_define_union_field(NS, ID, N, NK, T, r) \ - static inline T##_union_type_t N##_##NK##_type_get(N##_table_t t__tmp) \ - __##NS##union_type_field(((ID)-1), t__tmp) static inline NS##generic_t \ - N##_##NK##_get(N##_table_t t__tmp) __##NS##table_field( \ - NS##generic_t, ID, t__tmp, r) static inline T##_union_type_t \ - N##_##NK##_type(N##_table_t t__tmp) __##NS##union_type_field( \ - ((ID)-1), t__tmp) static inline NS##generic_t \ - N##_##NK(N##_table_t t__tmp) __##NS##table_field( \ - NS##generic_t, ID, t__tmp, r) static inline int \ - N##_##NK##_is_present(N##_table_t t__tmp) \ - __##NS##field_present( \ - ID, t__tmp) static inline T##_union_t \ - N##_##NK##_union(N##_table_t t__tmp) { \ - T##_union_t u__tmp = {0, 0}; \ - u__tmp.type = N##_##NK##_type_get(t__tmp); \ - if (u__tmp.type == 0) return u__tmp; \ - u__tmp.value = N##_##NK##_get(t__tmp); \ - return u__tmp; \ - } \ - static inline NS##string_t N##_##NK##_as_string(N##_table_t t__tmp) { \ - return NS##string_cast_from_generic(N##_##NK##_get(t__tmp)); \ +#define __flatbuffers_define_union_field(NS, ID, N, NK, T, r) \ + static inline T##_union_type_t N##_##NK##_type_get(N##_table_t t__tmp) \ + __##NS##union_type_field(((ID) - 1), t__tmp) static inline NS##generic_t \ + N##_##NK##_get(N##_table_t t__tmp) __##NS##table_field( \ + NS##generic_t, ID, t__tmp, r) static inline T##_union_type_t \ + N##_##NK##_type(N##_table_t t__tmp) __##NS##union_type_field( \ + ((ID) - 1), t__tmp) static inline NS##generic_t \ + N##_##NK(N##_table_t t__tmp) __##NS##table_field( \ + NS##generic_t, ID, t__tmp, r) static inline int \ + N##_##NK##_is_present(N##_table_t t__tmp) \ + __##NS##field_present( \ + ID, t__tmp) static inline T##_union_t \ + N##_##NK##_union(N##_table_t t__tmp) { \ + T##_union_t u__tmp = {0, 0}; \ + u__tmp.type = N##_##NK##_type_get(t__tmp); \ + if (u__tmp.type == 0) return u__tmp; \ + u__tmp.value = N##_##NK##_get(t__tmp); \ + return u__tmp; \ + } \ + static inline NS##string_t N##_##NK##_as_string(N##_table_t t__tmp) { \ + return NS##string_cast_from_generic(N##_##NK##_get(t__tmp)); \ } #define __flatbuffers_define_union_vector_ops(NS, T) \ @@ -703,10 +706,14 @@ static inline int __flatbuffers_string_cmp(flatbuffers_string_t v, T##_mutable_vec_t v__tmp = (T##_mutable_vec_t)N##_##NK##_get(t); \ if (v__tmp) T##_vec_sort(v__tmp); \ } -#define __flatbuffers_sort_table_field(N, NK, T, t) \ - { T##_sort((T##_mutable_table_t)N##_##NK##_get(t)); } -#define __flatbuffers_sort_union_field(N, NK, T, t) \ - { T##_sort(T##_mutable_union_cast(N##_##NK##_union(t))); } +#define __flatbuffers_sort_table_field(N, NK, T, t) \ + { \ + T##_sort((T##_mutable_table_t)N##_##NK##_get(t)); \ + } +#define __flatbuffers_sort_union_field(N, NK, T, t) \ + { \ + T##_sort(T##_mutable_union_cast(N##_##NK##_union(t))); \ + } #define __flatbuffers_sort_table_vector_field_elements(N, NK, T, t) \ { \ T##_vec_t v__tmp = N##_##NK##_get(t); \ @@ -12006,7 +12013,9 @@ static inline size_t org_apache_arrow_flatbuf_Tensor_vec_len( #endif static const flatbuffers_voffset_t - __org_apache_arrow_flatbuf_TensorDim_required[] = {0}; + __org_apache_arrow_flatbuf_TensorDim_required[] = { + 0 + }; typedef flatbuffers_ref_t org_apache_arrow_flatbuf_TensorDim_ref_t; static org_apache_arrow_flatbuf_TensorDim_ref_t org_apache_arrow_flatbuf_TensorDim_clone( @@ -24265,7 +24274,9 @@ static inline size_t org_apache_arrow_flatbuf_Tensor_vec_len( #endif static const flatbuffers_voffset_t - __org_apache_arrow_flatbuf_TensorDim_required[] = {0}; + __org_apache_arrow_flatbuf_TensorDim_required[] = { + 0 + }; typedef flatbuffers_ref_t org_apache_arrow_flatbuf_TensorDim_ref_t; static org_apache_arrow_flatbuf_TensorDim_ref_t org_apache_arrow_flatbuf_TensorDim_clone( @@ -30667,7 +30678,9 @@ static inline size_t org_apache_arrow_flatbuf_Tensor_vec_len( #endif static const flatbuffers_voffset_t - __org_apache_arrow_flatbuf_TensorDim_required[] = {0}; + __org_apache_arrow_flatbuf_TensorDim_required[] = { + 0 + }; typedef flatbuffers_ref_t org_apache_arrow_flatbuf_TensorDim_ref_t; static org_apache_arrow_flatbuf_TensorDim_ref_t org_apache_arrow_flatbuf_TensorDim_clone( diff --git a/test/conftest.py b/test/conftest.py index c85f954c2..59b46690b 100644 --- a/test/conftest.py +++ b/test/conftest.py @@ -55,7 +55,7 @@ def patch_connection( self, con: SnowflakeConnection, propagate: bool = True, - ) -> Generator[TelemetryCaptureHandler, None, None]: + ) -> Generator[TelemetryCaptureHandler]: original_telemetry = con._telemetry new_telemetry = TelemetryCaptureHandler( original_telemetry, diff --git a/test/integ/conftest.py b/test/integ/conftest.py index 0f112ec30..bd92de2cf 100644 --- a/test/integ/conftest.py +++ b/test/integ/conftest.py @@ -163,7 +163,7 @@ def get_db_parameters(connection_name: str = "default") -> dict[str, Any]: @pytest.fixture(scope="session", autouse=True) -def init_test_schema(db_parameters) -> Generator[None, None, None]: +def init_test_schema(db_parameters) -> Generator[None]: """Initializes and destroys the schema specific to this pytest session. This is automatically called per test session. @@ -200,7 +200,7 @@ def create_connection(connection_name: str, **kwargs) -> SnowflakeConnection: def db( connection_name: str = "default", **kwargs, -) -> Generator[SnowflakeConnection, None, None]: +) -> Generator[SnowflakeConnection]: if not kwargs.get("timezone"): kwargs["timezone"] = "UTC" if not kwargs.get("converter_class"): @@ -216,7 +216,7 @@ def db( def negative_db( connection_name: str = "default", **kwargs, -) -> Generator[SnowflakeConnection, None, None]: +) -> Generator[SnowflakeConnection]: if not kwargs.get("timezone"): kwargs["timezone"] = "UTC" if not kwargs.get("converter_class"): diff --git a/test/integ/pandas/test_pandas_tools.py b/test/integ/pandas/test_pandas_tools.py index 3fa8c8b8b..ef0226c4e 100644 --- a/test/integ/pandas/test_pandas_tools.py +++ b/test/integ/pandas/test_pandas_tools.py @@ -63,9 +63,7 @@ def assert_result_equals( assert set(cnx.cursor().execute(sql).fetchall()) == set(expected_data) -def test_fix_snow_746341( - conn_cnx: Callable[..., Generator[SnowflakeConnection, None, None]] -): +def test_fix_snow_746341(conn_cnx: Callable[..., Generator[SnowflakeConnection]]): cat = '"cat"' df = pandas.DataFrame([[1], [2]], columns=[f"col_'{cat}'"]) table_name = random_string(5, "snow746341_") @@ -83,7 +81,7 @@ def test_fix_snow_746341( @pytest.mark.parametrize("auto_create_table", [True, False]) @pytest.mark.parametrize("index", [False]) def test_write_pandas_with_overwrite( - conn_cnx: Callable[..., Generator[SnowflakeConnection, None, None]], + conn_cnx: Callable[..., Generator[SnowflakeConnection]], quote_identifiers: bool, auto_create_table: bool, index: bool, @@ -225,7 +223,7 @@ def test_write_pandas_with_overwrite( @pytest.mark.parametrize("create_temp_table", [True, False]) @pytest.mark.parametrize("index", [False]) def test_write_pandas( - conn_cnx: Callable[..., Generator[SnowflakeConnection, None, None]], + conn_cnx: Callable[..., Generator[SnowflakeConnection]], db_parameters: dict[str, str], compression: str, chunk_size: int, @@ -296,7 +294,7 @@ def test_write_pandas( def test_write_non_range_index_pandas( - conn_cnx: Callable[..., Generator[SnowflakeConnection, None, None]], + conn_cnx: Callable[..., Generator[SnowflakeConnection]], db_parameters: dict[str, str], ): compression = "gzip" @@ -376,7 +374,7 @@ def test_write_non_range_index_pandas( @pytest.mark.parametrize("table_type", ["", "temp", "temporary", "transient"]) def test_write_pandas_table_type( - conn_cnx: Callable[..., Generator[SnowflakeConnection, None, None]], + conn_cnx: Callable[..., Generator[SnowflakeConnection]], table_type: str, ): with conn_cnx() as cnx: @@ -408,7 +406,7 @@ def test_write_pandas_table_type( def test_write_pandas_create_temp_table_deprecation_warning( - conn_cnx: Callable[..., Generator[SnowflakeConnection, None, None]], + conn_cnx: Callable[..., Generator[SnowflakeConnection]], ): with conn_cnx() as cnx: table_name = random_string(5, "driver_versions_") @@ -436,7 +434,7 @@ def test_write_pandas_create_temp_table_deprecation_warning( @pytest.mark.parametrize("use_logical_type", [None, True, False]) def test_write_pandas_use_logical_type( - conn_cnx: Callable[..., Generator[SnowflakeConnection, None, None]], + conn_cnx: Callable[..., Generator[SnowflakeConnection]], use_logical_type: bool | None, ): table_name = random_string(5, "USE_LOCAL_TYPE_").upper() @@ -483,7 +481,7 @@ def test_write_pandas_use_logical_type( def test_invalid_table_type_write_pandas( - conn_cnx: Callable[..., Generator[SnowflakeConnection, None, None]], + conn_cnx: Callable[..., Generator[SnowflakeConnection]], ): with conn_cnx() as cnx: with pytest.raises(ValueError, match="Unsupported table type"): @@ -496,7 +494,7 @@ def test_invalid_table_type_write_pandas( def test_empty_dataframe_write_pandas( - conn_cnx: Callable[..., Generator[SnowflakeConnection, None, None]], + conn_cnx: Callable[..., Generator[SnowflakeConnection]], ): table_name = random_string(5, "empty_dataframe_") df = pandas.DataFrame([], columns=["name", "balance"]) @@ -720,7 +718,7 @@ def mocked_execute(*args, **kwargs): @pytest.mark.parametrize("quote_identifiers", [True, False]) def test_default_value_insertion( - conn_cnx: Callable[..., Generator[SnowflakeConnection, None, None]], + conn_cnx: Callable[..., Generator[SnowflakeConnection]], quote_identifiers: bool, ): """Tests whether default values can be successfully inserted with the pandas writeback.""" @@ -774,7 +772,7 @@ def test_default_value_insertion( @pytest.mark.parametrize("quote_identifiers", [True, False]) def test_autoincrement_insertion( - conn_cnx: Callable[..., Generator[SnowflakeConnection, None, None]], + conn_cnx: Callable[..., Generator[SnowflakeConnection]], quote_identifiers: bool, ): """Tests whether default values can be successfully inserted with the pandas writeback.""" @@ -828,7 +826,7 @@ def test_autoincrement_insertion( ], ) def test_special_name_quoting( - conn_cnx: Callable[..., Generator[SnowflakeConnection, None, None]], + conn_cnx: Callable[..., Generator[SnowflakeConnection]], auto_create_table: bool, column_names: list[str], ): @@ -875,7 +873,7 @@ def test_special_name_quoting( def test_auto_create_table_similar_column_names( - conn_cnx: Callable[..., Generator[SnowflakeConnection, None, None]], + conn_cnx: Callable[..., Generator[SnowflakeConnection]], ): """Tests whether similar names do not cause issues when auto-creating a table as expected.""" table_name = random_string(5, "numbas_") @@ -905,9 +903,7 @@ def test_auto_create_table_similar_column_names( cnx.execute_string(drop_sql) -def test_all_pandas_types( - conn_cnx: Callable[..., Generator[SnowflakeConnection, None, None]] -): +def test_all_pandas_types(conn_cnx: Callable[..., Generator[SnowflakeConnection]]): table_name = random_string(5, "all_types_") datetime_with_tz = datetime(1997, 6, 3, 14, 21, 32, 00, tzinfo=timezone.utc) datetime_with_ntz = datetime(1997, 6, 3, 14, 21, 32, 00) @@ -979,7 +975,7 @@ def test_all_pandas_types( @pytest.mark.parametrize("object_type", ["STAGE", "FILE FORMAT"]) def test_no_create_internal_object_privilege_in_target_schema( - conn_cnx: Callable[..., Generator[SnowflakeConnection, None, None]], + conn_cnx: Callable[..., Generator[SnowflakeConnection]], caplog, object_type, ): From aede7eda3d5e3f565d9fcc14fe277d8c4babb496 Mon Sep 17 00:00:00 2001 From: Mark Keller Date: Tue, 12 Nov 2024 22:09:30 -0800 Subject: [PATCH 05/10] bump cibuildwheel used --- .github/workflows/build_test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build_test.yml b/.github/workflows/build_test.yml index 6d9be84ae..af798705f 100644 --- a/.github/workflows/build_test.yml +++ b/.github/workflows/build_test.yml @@ -96,7 +96,7 @@ jobs: platforms: all - uses: actions/checkout@v4 - name: Building wheel - uses: pypa/cibuildwheel@v2.16.5 + uses: pypa/cibuildwheel@v2.21.3 env: CIBW_BUILD: cp${{ env.shortver }}-${{ matrix.os.id }} MACOSX_DEPLOYMENT_TARGET: 10.14 # Should be kept in sync with ci/build_darwin.sh From d7610f5ac3e4c0c9ee3a446fe3039551c0ec4edf Mon Sep 17 00:00:00 2001 From: Mark Keller Date: Tue, 12 Nov 2024 22:11:38 -0800 Subject: [PATCH 06/10] changelog entry --- DESCRIPTION.md | 1 + 1 file changed, 1 insertion(+) diff --git a/DESCRIPTION.md b/DESCRIPTION.md index 9513fa71f..0f5a0542a 100644 --- a/DESCRIPTION.md +++ b/DESCRIPTION.md @@ -11,6 +11,7 @@ Source code is also available at: https://github.com/snowflakedb/snowflake-conne - v3.12.4(TBD) - Fixed a bug where multipart uploads to Azure would be missing their MD5 hashes. - Fixed a bug where OpenTelemetry header injection would sometimes cause Exceptions to be thrown. + - Added support for Python 3.12. - v3.12.3(October 25,2024) - Improved the error message for SSL-related issues to provide clearer guidance when an SSL error occurs. From fb680fc6508e123467888cc4eb2ab92d37607459 Mon Sep 17 00:00:00 2001 From: Mark Keller Date: Tue, 12 Nov 2024 22:17:35 -0800 Subject: [PATCH 07/10] update numpy.nan usage --- test/integ/test_arrow_result.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/integ/test_arrow_result.py b/test/integ/test_arrow_result.py index d8118617d..1afc586fe 100644 --- a/test/integ/test_arrow_result.py +++ b/test/integ/test_arrow_result.py @@ -115,7 +115,7 @@ pandas.NaT, pandas.Timestamp("2024-01-01 12:00:00+0000", tz="UTC"), ], - "NUMBER": [numpy.NAN, 1.0, 2.0, 3.0], + "NUMBER": [numpy.nan, 1.0, 2.0, 3.0], } PANDAS_STRUCTURED_REPRS = { From 0042ed53ba4dec79c254345b25d196eea6a76855 Mon Sep 17 00:00:00 2001 From: Mark Keller Date: Wed, 13 Nov 2024 16:18:15 -0800 Subject: [PATCH 08/10] fix olddriver tests --- test/integ/test_vendored_urllib.py | 4 +--- tox.ini | 7 ++++--- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/test/integ/test_vendored_urllib.py b/test/integ/test_vendored_urllib.py index 3d6f27f9b..85c96fdb6 100644 --- a/test/integ/test_vendored_urllib.py +++ b/test/integ/test_vendored_urllib.py @@ -13,9 +13,7 @@ vendored_imported = False -@pytest.mark.skipif( - not vendored_imported, reason="vendored library is not imported for old driver" -) +@pytest.mark.skipolddriver def test_local_fix_for_closed_socket_bug(): # https://github.com/urllib3/urllib3/issues/1878#issuecomment-641534573 http = urllib3.PoolManager(maxsize=1) diff --git a/tox.ini b/tox.ini index f256eefa5..1863fb290 100644 --- a/tox.ini +++ b/tox.ini @@ -71,10 +71,11 @@ basepython = python3.9 description = run the old driver tests with pytest under {basepython} deps = pip >= 19.3.1 - pyOpenSSL==22.1.0 - snowflake-connector-python==1.9.1 + pyOpenSSL==22.0.0 + snowflake-connector-python==2.8.0 azure-storage-blob==2.1.0 - pandas + pandas==2.0.3 + numpy==1.26.4 pendulum!=2.1.1 pytest<6.1.0 pytest-cov From 8e1599a44ab05b48ad336a69cd47e55581b6ae79 Mon Sep 17 00:00:00 2001 From: Mark Keller Date: Wed, 13 Nov 2024 16:18:59 -0800 Subject: [PATCH 09/10] update windows image used for builds and tests --- .github/workflows/build_test.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build_test.yml b/.github/workflows/build_test.yml index af798705f..0638b0820 100644 --- a/.github/workflows/build_test.yml +++ b/.github/workflows/build_test.yml @@ -74,7 +74,7 @@ jobs: id: manylinux_x86_64 - image: ubuntu-20.04 id: manylinux_aarch64 - - image: windows-2019 + - image: windows-2022 id: win_amd64 - image: macos-latest id: macosx_x86_64 @@ -123,7 +123,7 @@ jobs: download_name: manylinux_x86_64 - image_name: macos-latest download_name: macosx_x86_64 - - image_name: windows-2019 + - image_name: windows-2022 download_name: win_amd64 python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] cloud-provider: [aws, azure, gcp] From 3832ff11b75b8fe7dc2ad78a759a3d48cd9a06ab Mon Sep 17 00:00:00 2001 From: Mark Keller Date: Mon, 18 Nov 2024 12:03:19 -0800 Subject: [PATCH 10/10] use already built wheels --- .github/workflows/build_test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build_test.yml b/.github/workflows/build_test.yml index 0638b0820..5bf11b3c8 100644 --- a/.github/workflows/build_test.yml +++ b/.github/workflows/build_test.yml @@ -155,7 +155,7 @@ jobs: - name: Install tox run: python -m pip install tox>=4 - name: Run tests - run: python -m tox run -e `echo py${PYTHON_VERSION/\./}-{extras,unit,integ,pandas,sso}-ci | sed 's/ /,/g'` + run: python -m tox run --installpkg dist/*.whl -e `echo py${PYTHON_VERSION/\./}-{extras,unit,integ,pandas,sso}-ci | sed 's/ /,/g'` env: PYTHON_VERSION: ${{ matrix.python-version }} cloud_provider: ${{ matrix.cloud-provider }}