Skip to content

Commit

Permalink
try to fix cython int type error; add github CI for quick checks
Browse files Browse the repository at this point in the history
closes issue #134
(hopefully)
  • Loading branch information
JohannesBuchner committed May 26, 2024
1 parent ca3a099 commit 4e9f375
Show file tree
Hide file tree
Showing 7 changed files with 93 additions and 37 deletions.
7 changes: 6 additions & 1 deletion .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,8 @@ jobs:

- run: sudo ln -s /usr/lib/python3/dist-packages/numpy/core/include/numpy/ /usr/include/numpy

- run: sudo python3 -m pip install -r pip-requirements.txt pytest-html coveralls pyyaml mpi4py
- run: sudo python3 -m pip install -r pip-requirements.txt pytest-html coveralls pyyaml mpi4py pydocstyle pycodestyle flake8

- run: mkdir -p test-reports

- run: python3 -m pip install -e .
Expand All @@ -35,6 +36,10 @@ jobs:
- run: coverage3 report --include="$PWD/*" --omit="$PWD/.eggs/*"
- run: coverage3 html --include="$PWD/*" --omit="$PWD/.eggs/*" && mv htmlcov test-reports

- run: flake8 $(ls ultranest/*.py | grep -Ev '^ultranest/(flatnuts|dychmc|dyhmc).py')
- run: pycodestyle $(ls ultranest/*.py | grep -Ev '^ultranest/(flatnuts|dychmc|dyhmc).py')
- run: pydocstyle $(ls ultranest/*.py | grep -Ev '^ultranest/(flatnuts|dychmc|dyhmc).py')

- run: coveralls

- store_test_results:
Expand Down
44 changes: 44 additions & 0 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
# This workflow will install Python dependencies, run tests and lint with a variety of Python versions
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions

name: build

on:
push:
pull_request:
schedule:
- cron: '42 4 5,20 * *'

jobs:
build:

runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
python-version: [3.7, 3.8, 3.9, "3.10", 3.11, 3.12]

steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}

- name: Install dependencies
run: python -m pip install -r pip-requirements.txt

- name: Lint with flake8
run: flake8 $(ls ultranest/*.py | grep -Ev '^ultranest/(flatnuts|dychmc|dyhmc).py')

- name: Check code style
run: pycodestyle $(ls ultranest/*.py | grep -Ev '^ultranest/(flatnuts|dychmc|dyhmc).py')

- name: Check doc style
run: pydocstyle $(ls ultranest/*.py | grep -Ev '^ultranest/(flatnuts|dychmc|dyhmc).py')

- name: Install package
run: python -m pip install -e .

- name: Test with pytest
run: pytest -n -x
6 changes: 4 additions & 2 deletions ultranest/integrator.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,8 @@

__all__ = ['ReactiveNestedSampler', 'NestedSampler', 'read_file', 'warmstart_from_similar_file']

int_t = int


def _get_cumsum_range(pi, dp):
"""Compute quantile indices from probabilities.
Expand Down Expand Up @@ -2015,8 +2017,8 @@ def _update_region(

# instead, track the clusters from before by matching manually
oldt = self.transformLayer.transform(oldu)
clusterids = np.zeros(len(active_u), dtype=np.int_)
nnearby = np.empty(len(self.region.unormed), dtype=np.int_)
clusterids = np.zeros(len(active_u), dtype=int_t)
nnearby = np.empty(len(self.region.unormed), dtype=int_t)
for ci in np.unique(self.transformLayer.clusterids):
if ci == 0:
continue
Expand Down
10 changes: 5 additions & 5 deletions ultranest/mlfriends.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ cdef count_nearby(
np.ndarray[np.float_t, ndim=2] apts,
np.ndarray[np.float_t, ndim=2] bpts,
np.float_t radiussq,
np.ndarray[np.int_, ndim=1] nnearby
np.ndarray[np.int_t, ndim=1] nnearby
):
"""Count the number of points in ``apts`` within square radius ``radiussq`` for each point ``b`` in `bpts``.
Expand Down Expand Up @@ -139,7 +139,7 @@ def find_nearby(
np.ndarray[np.float_t, ndim=2] apts,
np.ndarray[np.float_t, ndim=2] bpts,
np.float_t radiussq,
np.ndarray[np.int_, ndim=1] nnearby
np.ndarray[np.int_t, ndim=1] nnearby
):
"""Gets the index of a point in `a` within square radius `radiussq`, for each point `b` in `bpts`.
Expand Down Expand Up @@ -223,7 +223,7 @@ cdef float compute_maxradiussq(np.ndarray[np.float_t, ndim=2] apts, np.ndarray[n
@cython.wraparound(False)
def compute_mean_pair_distance(
np.ndarray[np.float_t, ndim=2] pts,
np.ndarray[np.int_, ndim=1] clusterids
np.ndarray[np.int_t, ndim=1] clusterids
):
"""Compute the average distance between pairs of points.
Pairs from different clusters are excluded in the computation.
Expand Down Expand Up @@ -271,7 +271,7 @@ cdef _update_clusters(
np.ndarray[np.float_t, ndim=2] upoints,
np.ndarray[np.float_t, ndim=2] tpoints,
np.float_t maxradiussq,
np.ndarray[np.int_, ndim=1] clusterids,
np.ndarray[np.int_t, ndim=1] clusterids,
):
"""same signature as ``update_clusters()``, see there."""
assert upoints.shape[0] == tpoints.shape[0], ('different number of points', upoints.shape[0], tpoints.shape[0])
Expand Down Expand Up @@ -845,7 +845,7 @@ class LocalAffineLayer(AffineLayer):
return s


def vol_prefactor(np.int_ n):
def vol_prefactor(np.int_t n):
"""Volume constant for an ``n``-dimensional sphere.
for ``n`` even: $$ (2pi)^(n /2) / (2 * 4 * ... * n)$$
Expand Down
8 changes: 5 additions & 3 deletions ultranest/popstepsampler.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@
from ultranest.stepfuncs import generate_differential_direction, generate_mixture_random_direction
import scipy.stats

int_t = int


def unitcube_line_intersection(ray_origin, ray_direction):
r"""Compute intersection of a line (ray) and a unit box (0:1 in all axes).
Expand Down Expand Up @@ -425,7 +427,7 @@ def _setup(self, ndim):
self.allL = np.zeros((self.popsize, self.nsteps + 1)) + np.nan
self.currentt = np.zeros(self.popsize) + np.nan
self.currentv = np.zeros((self.popsize, ndim)) + np.nan
self.generation = np.zeros(self.popsize, dtype=np.int_) - 1
self.generation = np.zeros(self.popsize, dtype=int_t) - 1
self.current_left = np.zeros(self.popsize)
self.current_right = np.zeros(self.popsize)
self.searching_left = np.zeros(self.popsize, dtype=bool)
Expand Down Expand Up @@ -923,9 +925,9 @@ def __next__(
# Slice bounds for each points
tleft, tright = self.slice_limit(tleft_unitcube,tright_unitcube)
# Index of the workers working concurrently
worker_running = np.arange(0, self.popsize, 1, dtype=np.int_)
worker_running = np.arange(0, self.popsize, 1, dtype=int_t)
# Status indicating if a points has already find its next position
status = np.zeros(self.popsize, dtype=np.int_) # one for success, zero for running
status = np.zeros(self.popsize, dtype=int_t) # one for success, zero for running

# Loop until each points has found its next position or we reached 100 iterations
for it in range(self.max_it):
Expand Down
4 changes: 2 additions & 2 deletions ultranest/stepfuncs.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -331,7 +331,7 @@ def step_back(Lmin, allL, generation, currentt, log=False):

cdef _fill_directions(
np.ndarray[np.float_t, ndim=2] v,
np.ndarray[np.int_, ndim=1] indices,
np.ndarray[np.int_t, ndim=1] indices,
float scale
):
cdef size_t nsamples = v.shape[0]
Expand Down Expand Up @@ -533,7 +533,7 @@ cpdef tuple update_vectorised_slice_sampler(\
np.ndarray[np.float_t, ndim=1] t, np.ndarray[np.float_t, ndim=1] tleft,\
np.ndarray[np.float_t, ndim=1] tright, np.ndarray[np.float_t, ndim=1] proposed_L,\
np.ndarray[np.float_t, ndim=2] proposed_u, np.ndarray[np.float_t, ndim=2] proposed_p,\
np.ndarray[np.int_, ndim=1] worker_running, np.ndarray[np.int_, ndim=1] status,\
np.ndarray[np.int_t, ndim=1] worker_running, np.ndarray[np.int_t, ndim=1] status,\
np.float_t Likelihood_threshold,np.float_t shrink_factor, np.ndarray[np.float_t, ndim=2] allu,\
np.ndarray[np.float_t, ndim=1] allL, np.ndarray[np.float_t, ndim=2] allp, int popsize):

Expand Down
51 changes: 27 additions & 24 deletions ultranest/stepsampler.py
Original file line number Diff line number Diff line change
Expand Up @@ -466,33 +466,36 @@ def select_random_livepoint(us, Ls, Lmin):


class IslandPopulationRandomLivepointSelector(object):
"""Mutually isolated live point subsets.
To replace dead points, chains are only started from the same
island as the dead point. Island refers to chunks of
live point indices (0,1,2,3 as stored, not sorted).
Each chunk has size ´island_size´.
If ´island_size´ is large, for example, the total number of live points,
then clumping can occur more easily. This is the observed behaviour
that a limited random walk is run from one live point, giving
two similar points, then the next dead point replacement is
likely run again from these, giving more and more similar live points.
This gives a run-away process leading to clumps of similar,
highly correlated points.
If ´island_size´ is small, for example, 1, then each dead point
is replaced by a chain started from it. This is a problem because
modes can never die out. Nested sampling can then not complete.
In a multi-modal run, within a given number of live points,
the number of live points per mode is proportional to the mode's
prior volume, but can fluctuate. If the number of live points
is small, a fluctuation can lead to mode die-out, which cannot
be reversed. Therefore, the number of island members should be
large enough to represent each mode.
"""

def __init__(self, island_size, exchange_probability=0):
"""Set up multiple isolated islands.
To replace dead points, chains are only started from the same
island as the dead point. Island refers to chunks of
live point indices (0,1,2,3 as stored, not sorted).
Each chunk has size ´island_size´.
If ´island_size´ is large, for example, the total number of live points,
then clumping can occur more easily. This is the observed behaviour
that a limited random walk is run from one live point, giving
two similar points, then the next dead point replacement is
likely run again from these, giving more and more similar live points.
This gives a run-away process leading to clumps of similar,
highly correlated points.
If ´island_size´ is small, for example, 1, then each dead point
is replaced by a chain started from it. This is a problem because
modes can never die out. Nested sampling can then not complete.
In a multi-modal run, within a given number of live points,
the number of live points per mode is proportional to the mode's
prior volume, but can fluctuate. If the number of live points
is small, a fluctuation can lead to mode die-out, which cannot
be reversed. Therefore, the number of island members should be
large enough to represent each mode.
Parameters
-----------
island_size: int
Expand Down

0 comments on commit 4e9f375

Please sign in to comment.