Skip to content

Commit

Permalink
* Switch from setup.py to pyproject.toml
Browse files Browse the repository at this point in the history
* Add numpy<2,pandas<2 test environment to build pipeline test matrix
  • Loading branch information
Mateusz Kopeć committed Dec 13, 2024
1 parent f0b9f72 commit 5f54e1e
Show file tree
Hide file tree
Showing 6 changed files with 83 additions and 151 deletions.
12 changes: 8 additions & 4 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ jobs:
matrix:
os: [ubuntu-latest]
python: ['3.9', '3.10', '3.11', '3.12']
test_numpy_pre2: [true, false]
runs-on: ${{ matrix.os }}

steps:
Expand All @@ -22,14 +23,17 @@ jobs:
- uses: actions/cache@v3
with:
path: ~/.cache/pip
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}-${{ hashFiles('**/requirements-test.txt') }}
key: ${{ runner.os }}-pip-${{ hashFiles('**/pyproject.toml') }}
restore-keys: |
${{ runner.os }}-pip-
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -e .
pip install -r requirements-test.txt
if [ "${{ matrix.test_numpy_pre2 }}" = "true" ]; then
pip install ".[test,test_numpy_pre2]"
else
pip install ".[test]"
fi
- name: Test with pytest
run: |
pytest tests/
pytest tests
3 changes: 3 additions & 0 deletions CHANGES.rst
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,9 @@ Version 1.0.34, Dec 2024
* Remove unused test_gpu.twosigfigs function.
* Refactor tests with Numpy() and Pandas() context managers to use single 'with' statement.

* Switch from setup.py to pyproject.toml
* Add numpy<2,pandas<2 test environment to build pipeline test matrix

Version 1.0.33, Dec 2022
------------------------
* fix of get_sub_hist() when Bin histogram is filled only with nans.
Expand Down
72 changes: 72 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
[build-system]
requires = ["setuptools"]
build-backend = "setuptools.build_meta"

[project]
name = "histogrammar"
description = "Composable histogram primitives for distributed data reduction"
keywords = [
"pandas",
"spark",
"data-science",
"data-analysis",
"statistics",
"python",
"jupyter",
"ipython"
]
readme = "README.rst"
requires-python = ">=3.9"
authors = [{ name = "Jim Pivarski (DIANA-HEP)", email = "[email protected]" }, { name = "Max Baak", email = "[email protected]" }]
maintainers = [{ name = "Max Baak", email = "[email protected]" }]
license = { type = "Apache Software License v2", file = "LICENSE" }
dependencies = [
"numpy",
"tqdm",
"joblib>=0.14.0"
]
classifiers = ["Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: Apache Software License",
"Topic :: Scientific/Engineering :: Information Analysis",
"Topic :: Scientific/Engineering :: Mathematics",
"Topic :: Scientific/Engineering :: Physics",
]
dynamic = ["version"]

[project.optional-dependencies]
test = [
"ipykernel>=5.1.3",
"jupyter_client>=5.2.3",
"matplotlib",
"pandas",
"pre-commit>=2.9.0",
"pytest-notebook>=0.6.1",
"pytest>=4.0.2",
]
test_numpy_pre2 = [
"numpy<2",
"pandas<2",
]

# files to be shipped with the installation, under: histogrammar/test_data and histogrammar/notebooks
# after installation, these can be found with the functions in resources.py
[tool.setuptools.package-data]
histogrammar = [
"test_data/*.csv.gz",
"test_data/*.json*",
"notebooks/*tutorial*.ipynb",
]

[project.urls]
repository = "https://github.com/histogrammar/histogrammar-python"

[tool.semantic_release]
version_variable = [
"histogrammar/version.py:version",
]
build_command = "pip install build && python -m build"

[tool.setuptools.dynamic]
version = { attr = "histogrammar.version.version" }
7 changes: 0 additions & 7 deletions requirements-test.txt

This file was deleted.

3 changes: 0 additions & 3 deletions requirements.txt

This file was deleted.

137 changes: 0 additions & 137 deletions setup.py

This file was deleted.

0 comments on commit 5f54e1e

Please sign in to comment.