Skip to content

Commit

Permalink
chore!: drop support for Python 3.8 (#3592)
Browse files Browse the repository at this point in the history
BREAKING CHANGE: Python 3.8 has reached EOL, set our minimum version to
3.9
  • Loading branch information
kevinzwang authored Dec 19, 2024
1 parent c30f6a8 commit 8f8e210
Show file tree
Hide file tree
Showing 19 changed files with 76 additions and 140 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/build-artifact-s3.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ on:

env:
PACKAGE_NAME: getdaft
PYTHON_VERSION: 3.8
PYTHON_VERSION: 3.9

jobs:
build-and-push:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/nightlies-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ on:
env:
DAFT_ANALYTICS_ENABLED: '0'
UV_SYSTEM_PYTHON: 1
PYTHON_VERSION: '3.8'
PYTHON_VERSION: '3.9'

jobs:
integration-test-tpch:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/notebook-checker.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ jobs:
strategy:
fail-fast: false
matrix:
python-version: ['3.8']
python-version: ['3.9']
steps:
- uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }}
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/property-based-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ jobs:
strategy:
fail-fast: false
matrix:
python-version: ['3.8']
python-version: ['3.9']
daft_runner: [py]
steps:
- uses: actions/checkout@v4
Expand Down
22 changes: 11 additions & 11 deletions .github/workflows/python-package.yml
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ jobs:
strategy:
fail-fast: false
matrix:
python-version: ['3.8', '3.10']
python-version: ['3.9', '3.10']
daft-runner: [py, ray, native]
pyarrow-version: [7.0.0, 16.0.0]
os: [ubuntu-20.04, windows-latest]
Expand All @@ -40,10 +40,10 @@ jobs:
python-version: '3.10'
pyarrow-version: 7.0.0
os: ubuntu-20.04
- python-version: '3.8'
- python-version: '3.9'
pyarrow-version: 16.0.0
- os: windows-latest
python-version: '3.8'
python-version: '3.9'
- os: windows-latest
pyarrow-version: 7.0.0
steps:
Expand Down Expand Up @@ -181,7 +181,7 @@ jobs:
package-name: getdaft
strategy:
matrix:
python-version: ['3.8']
python-version: ['3.9']
steps:
- uses: actions/checkout@v4
with:
Expand Down Expand Up @@ -219,7 +219,7 @@ jobs:
strategy:
fail-fast: false
matrix:
python-version: ['3.8']
python-version: ['3.9']
daft-runner: [py, ray, native]
steps:
- uses: actions/checkout@v4
Expand Down Expand Up @@ -294,7 +294,7 @@ jobs:
strategy:
fail-fast: false
matrix:
python-version: ['3.8'] # can't use 3.7 due to requiring anon mode for adlfs
python-version: ['3.9'] # can't use 3.7 due to requiring anon mode for adlfs
daft-runner: [py, ray, native]
steps:
- uses: actions/checkout@v4
Expand Down Expand Up @@ -372,7 +372,7 @@ jobs:
strategy:
fail-fast: false
matrix:
python-version: ['3.8'] # can't use 3.7 due to requiring anon mode for adlfs
python-version: ['3.9'] # can't use 3.7 due to requiring anon mode for adlfs
daft-runner: [py, ray, native]
# These permissions are needed to interact with GitHub's OIDC Token endpoint.
# This is used in the step "Assume GitHub Actions AWS Credentials"
Expand Down Expand Up @@ -466,7 +466,7 @@ jobs:
strategy:
fail-fast: false
matrix:
python-version: ['3.8'] # can't use 3.7 due to requiring anon mode for adlfs
python-version: ['3.9'] # can't use 3.7 due to requiring anon mode for adlfs
daft-runner: [py, ray, native]
steps:
- uses: actions/checkout@v4
Expand Down Expand Up @@ -543,7 +543,7 @@ jobs:
strategy:
fail-fast: false
matrix:
python-version: ['3.8'] # can't use 3.7 due to requiring anon mode for adlfs
python-version: ['3.9'] # can't use 3.7 due to requiring anon mode for adlfs
daft-runner: [py, ray, native]
steps:
- uses: actions/checkout@v4
Expand Down Expand Up @@ -854,7 +854,7 @@ jobs:
fail-fast: false
matrix:
os: [ubuntu, Windows]
python-version: ['3.8']
python-version: ['3.9']
steps:
- uses: actions/checkout@v4
- uses: moonrepo/setup-rust@v1
Expand Down Expand Up @@ -946,7 +946,7 @@ jobs:
runs-on: ubuntu-latest
timeout-minutes: 15
env:
python-version: '3.8'
python-version: '3.9'
steps:
- uses: actions/checkout@v4
- name: Set up Python ${{ env.python-version }}
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/python-publish.yml
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,7 @@ jobs:
with:
# Really doesn't matter what version we upload with
# just the version we test with
python-version: '3.8'
python-version: '3.9'
channels: conda-forge
channel-priority: true

Expand Down
1 change: 1 addition & 0 deletions .ruff.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
fix = true
indent-width = 4
line-length = 120
# TODO: clean up typing code and update to py39
target-version = "py38"

[format]
Expand Down
2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -276,7 +276,7 @@ features = ['async']
path = "src/parquet2"

[workspace.dependencies.pyo3]
features = ["extension-module", "multiple-pymethods", "abi3-py38", "indexmap"]
features = ["extension-module", "multiple-pymethods", "abi3-py39", "indexmap"]
version = "0.21.0"

[workspace.dependencies.pyo3-log]
Expand Down
4 changes: 0 additions & 4 deletions daft/dataframe/dataframe.py
Original file line number Diff line number Diff line change
Expand Up @@ -1094,13 +1094,9 @@ def write_lance(
<BLANKLINE>
(Showing first 1 of 1 rows)
"""
import sys

from daft import from_pydict
from daft.io.object_store_options import io_config_to_storage_options

if sys.version_info < (3, 9):
raise ValueError("'write_lance' requires python 3.9 or higher")
try:
import lance
import pyarrow as pa
Expand Down
15 changes: 1 addition & 14 deletions daft/pickle/cloudpickle.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,20 +70,7 @@
except ImportError:
_typing_extensions = Literal = Final = None

if sys.version_info >= (3, 8):
from types import CellType
else:

def f():
a = 1

def g():
return a

return g

CellType = type(f().__closure__[0])

from types import CellType

# cloudpickle is meant for inter process communication: we expect all
# communicating processes to run the same Python version hence we favor
Expand Down
20 changes: 4 additions & 16 deletions daft/pickle/compat.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,20 +4,8 @@

from __future__ import annotations

import sys
import pickle # noqa: F401

if sys.version_info < (3, 8):
try:
import pickle5 as pickle
from pickle5 import Pickler
except ImportError:
import pickle

# Use the Python pickler for old CPython versions
from pickle import _Pickler as Pickler
else:
import pickle # noqa: F401

# Pickler will the C implementation in CPython and the Python
# implementation in PyPy
from pickle import Pickler # noqa: F401
# Pickler will the C implementation in CPython and the Python
# implementation in PyPy
from pickle import Pickler # noqa: F401
4 changes: 2 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ maintainers = [
]
name = "getdaft"
readme = "README.rst"
requires-python = ">=3.8"
requires-python = ">=3.9"

[project.optional-dependencies]
all = ["getdaft[aws, azure, gcp, ray, pandas, numpy, iceberg, deltalake, sql, unity]"]
Expand Down Expand Up @@ -64,7 +64,7 @@ features = ["python"]
[tool.mypy]
exclude = ['daft/pickle/*.py$']
files = ["daft/**/*.py", "daft/**/*.pyx", "tests/**/*.py"]
python_version = "3.8"
python_version = "3.9"
warn_return_any = true
warn_unused_configs = true

Expand Down
55 changes: 24 additions & 31 deletions requirements-dev.txt
Original file line number Diff line number Diff line change
Expand Up @@ -26,73 +26,66 @@ pytest-codspeed==2.2.1

# Testing dependencies
lxml==5.3.0
dask==2023.5.0; python_version == '3.8'
dask[dataframe]==2024.4.1; python_version >= '3.9'
numpy; python_version < '3.9'
numpy==1.26.2; python_version >= '3.9'
pandas==2.0.3; python_version == '3.8'
pandas==2.1.3; python_version >= '3.9'
dask[dataframe]==2024.4.1
numpy==1.26.2
pandas==2.1.3
xxhash>=3.0.0
Pillow==10.4.0
opencv-python==4.10.0.84
tiktoken==0.7.0
duckdb==1.1.2

# Pyarrow
pyarrow==16.0.0; python_version >= '3.9'
pyarrow==15.0.0; python_version < '3.9'
pyarrow==16.0.0
# Ray
ray[data, client]==2.10.0; python_version == '3.8'
ray[data, client]==2.34.0; python_version >= '3.9'
ray[data, client]==2.34.0

# Lance
lancedb>=0.6.10; python_version >= '3.8'
lancedb>=0.6.10

#Iceberg
pyiceberg==0.7.0; python_version >= '3.8'
tenacity==8.2.3; python_version >= '3.8'
pyiceberg==0.7.0
tenacity==8.2.3

# Delta Lake
deltalake==0.5.8; platform_system == "Windows"
deltalake==0.18.2; platform_system != "Windows" and python_version < '3.9'
deltalake==0.19.2; platform_system != "Windows" and python_version >= '3.9'
deltalake==0.19.2; platform_system != "Windows"

# Databricks
databricks-sdk==0.12.0
unitycatalog==0.1.1

#SQL
sqlalchemy==2.0.36; python_version >= '3.8'
connectorx==0.2.3; platform_system == "Linux" and platform_machine == "aarch64" and python_version >= '3.8'
connectorx==0.3.2; (platform_system != "Linux" or platform_machine != "aarch64") and python_version <= '3.8'
connectorx==0.3.3; (platform_system != "Linux" or platform_machine != "aarch64") and python_version > '3.8'
trino[sqlalchemy]==0.328.0; python_version >= '3.8'
PyMySQL==1.1.0; python_version >= '3.8'
psycopg2-binary==2.9.10; python_version >= '3.8'
sqlglot==23.3.0; python_version >= '3.8'
pyodbc==5.1.0; python_version >= '3.8'
sqlalchemy==2.0.36
connectorx==0.2.3; platform_system == "Linux" and platform_machine == "aarch64"
connectorx==0.3.3; platform_system != "Linux" or platform_machine != "aarch64"
trino[sqlalchemy]==0.328.0
PyMySQL==1.1.0
psycopg2-binary==2.9.10
sqlglot==23.3.0
pyodbc==5.1.0

# AWS
s3fs==2023.12.0; python_version >= '3.8'
s3fs==2023.12.0
# on old versions of s3fs's pinned botocore, they neglected to pin urllib3<2 which leads to:
# "ImportError: cannot import name 'DEFAULT_CIPHERS' from 'urllib3.util.ssl_'"
boto3==1.34.51; python_version >= '3.8'
moto[s3,server]==5.0.21; python_version >= '3.8'
boto3==1.34.51
moto[s3,server]==5.0.21

# Azure
adlfs==2024.7.0; python_version >= '3.8'
adlfs==2024.7.0
azure-storage-blob==12.24.0

# GCS
gcsfs==2023.12.0; python_version >= '3.8'
gcsfs==2023.12.0

# Documentation
myst-nb>=0.16.0
Sphinx==5.3.0
sphinx-book-theme==1.1.0; python_version >= "3.9"
sphinx-book-theme==1.1.0
sphinx-reredirects>=0.1.1
sphinx-copybutton>=0.5.2
sphinx-autosummary-accessors==2023.4.0; python_version >= "3.9"
sphinx-autosummary-accessors==2023.4.0
sphinx-tabs==3.4.5

# Daft connect testing
Expand Down
6 changes: 2 additions & 4 deletions tests/io/delta_lake/test_table_read.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
from __future__ import annotations

import sys

import pyarrow as pa
import pytest

Expand All @@ -11,9 +9,9 @@
from tests.utils import assert_pyarrow_tables_equal

PYARROW_LE_8_0_0 = tuple(int(s) for s in pa.__version__.split(".") if s.isnumeric()) < (8, 0, 0)
PYTHON_LT_3_8 = sys.version_info[:2] < (3, 8)
pytestmark = pytest.mark.skipif(
PYARROW_LE_8_0_0 or PYTHON_LT_3_8, reason="deltalake only supported if pyarrow >= 8.0.0 and python >= 3.8"
PYARROW_LE_8_0_0,
reason="deltalake only supported if pyarrow >= 8.0.0",
)


Expand Down
5 changes: 2 additions & 3 deletions tests/io/delta_lake/test_table_read_pushdowns.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
from daft.io.object_store_options import io_config_to_storage_options

deltalake = pytest.importorskip("deltalake")
import sys

import pyarrow as pa
import pyarrow.compute as pc
Expand All @@ -18,9 +17,9 @@
from tests.utils import assert_pyarrow_tables_equal

PYARROW_LE_8_0_0 = tuple(int(s) for s in pa.__version__.split(".") if s.isnumeric()) < (8, 0, 0)
PYTHON_LT_3_8 = sys.version_info[:2] < (3, 8)
pytestmark = pytest.mark.skipif(
PYARROW_LE_8_0_0 or PYTHON_LT_3_8, reason="deltalake only supported if pyarrow >= 8.0.0 and python >= 3.8"
PYARROW_LE_8_0_0,
reason="deltalake only supported if pyarrow >= 8.0.0",
)


Expand Down
12 changes: 3 additions & 9 deletions tests/io/delta_lake/test_table_write.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@

import datetime
import decimal
import sys
from pathlib import Path

import pyarrow as pa
Expand All @@ -13,15 +12,10 @@
from daft.logical.schema import Schema
from tests.conftest import get_tests_daft_runner_name

PYARROW_LE_8_0_0 = tuple(int(s) for s in pa.__version__.split(".") if s.isnumeric()) < (
8,
0,
0,
)
PYTHON_LT_3_8 = sys.version_info[:2] < (3, 8)
PYARROW_LE_8_0_0 = tuple(int(s) for s in pa.__version__.split(".") if s.isnumeric()) < (8, 0, 0)
pytestmark = pytest.mark.skipif(
PYARROW_LE_8_0_0 or PYTHON_LT_3_8,
reason="deltalake only supported if pyarrow >= 8.0.0 and python >= 3.8",
PYARROW_LE_8_0_0,
reason="deltalake only supported if pyarrow >= 8.0.0",
)


Expand Down
Loading

0 comments on commit 8f8e210

Please sign in to comment.