Skip to content

Commit

Permalink
Merge pull request #593 from ICB-DCM/develop
Browse files Browse the repository at this point in the history
Release 0.12.9
  • Loading branch information
yannikschaelte authored Mar 1, 2023
2 parents 59e06fd + cc64c37 commit 7d9c109
Show file tree
Hide file tree
Showing 17 changed files with 55 additions and 34 deletions.
12 changes: 6 additions & 6 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -11,12 +11,12 @@

repos:
- repo: https://github.com/psf/black
rev: 22.3.0
rev: 23.1.0
hooks:
- id: black
description: The uncompromising code formatter
- repo: https://github.com/pycqa/isort
rev: 5.10.1
rev: 5.12.0
hooks:
- id: isort
name: isort (python)
Expand All @@ -27,14 +27,14 @@ repos:
name: isort (pyi)
types: [pyi]
- repo: https://github.com/nbQA-dev/nbQA
rev: 1.2.3
rev: 1.6.1
hooks:
- id: nbqa-black
- id: nbqa-pyupgrade
args: [--py36-plus]
- id: nbqa-isort
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.2.0
rev: v4.4.0
hooks:
- id: check-yaml
description: Check yaml files for parseable syntax
Expand All @@ -55,6 +55,6 @@ repos:
- id: style
name: Check style
description: Check style
entry: tox -e project,flake8
language: python
entry: tox -e flake8 --
language: system
types: [python]
9 changes: 9 additions & 0 deletions CHANGELOG.rst
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,15 @@ Release Notes
0.12 Series
...........

0.12.9 (2023-03-01)
-------------------

Minor:

* Improve documentation of p-norm (#592)
* Update citation to JOSS
* Temporarily fixate sqlalchemy version
* Update pre-commit hooks (all #596)

0.12.8 (2022-11-16)
-------------------
Expand Down
29 changes: 17 additions & 12 deletions CITE.rst
Original file line number Diff line number Diff line change
Expand Up @@ -5,21 +5,26 @@ When using pyABC version >= 0.8, please cite:

* Schälte, Y., Klinger, E., Alamoudi, E., Hasenauer, J., 2022.
pyABC: Efficient and robust easy-to-use approximate Bayesian computation.
arXiv.
https://doi.org/10.48550/arxiv.2203.13043.
Journal of Open Source Software.
https://doi.org/10.21105/joss.04304.

.. code-block:: bibtex
@article{schaelte2022pyabc,
title = {pyABC: Efficient and robust easy-to-use approximate Bayesian computation},
author = {Schälte, Yannik and Klinger, Emmanuel and Alamoudi, Emad and Hasenauer, Jan},
journal = {arXiv},
year = {2022},
doi = {10.48550/arxiv.2203.13043},
url = {https://arxiv.org/abs/2203.13043},
}
When using pyABC version < 0.8 or functionality not introduced in later versions, please cite:
@article{schaelte2022pyabc,
title = {pyABC: Efficient and robust easy-to-use approximate Bayesian computation},
author = {Schälte, Yannik and Klinger, Emmanuel and Alamoudi, Emad and Hasenauer, Jan},
journal = {Journal of Open Source Software},
publisher = {The Open Journal},
year = {2022},
volume = {7},
number = {74},
pages = {4304},
doi = {10.21105/joss.04304},
url = {https://doi.org/10.21105/joss.04304},
}
When using pyABC version < 0.8 or functionality not introduced in later
versions, please (also) cite:

* Klinger, E., Rickert, D., Hasenauer, J., 2018.
pyABC: distributed, likelihood-free inference.
Expand Down
2 changes: 2 additions & 0 deletions doc/examples/adaptive_distances.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -111,6 +111,7 @@
"df_logger = logging.getLogger(\"ABC.Distance\")\n",
"df_logger.setLevel(logging.DEBUG)\n",
"\n",
"\n",
"# model definition\n",
"def model(p):\n",
" return {\n",
Expand Down Expand Up @@ -470,6 +471,7 @@
"df_logger = logging.getLogger(\"Distance\")\n",
"df_logger.setLevel(logging.DEBUG)\n",
"\n",
"\n",
"# model definition\n",
"def model(p):\n",
" return {\n",
Expand Down
4 changes: 2 additions & 2 deletions pyabc/distance/pnorm.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,8 +35,8 @@ class PNormDistance(Distance):
\\left [\\sum_{i} \\left| w_i ( x_i-y_i ) \\right|^{p} \\right ]^{1/p}
E.g.
* p=1 for a Euclidean or L1 metric,
* p=2 for a Manhattan or L2 metric,
* p=1 for a Manhattan or L1 metric,
* p=2 for a Euclidean or L2 metric,
* p=np.inf for a Chebyshev, maximum or inf metric.
Parameters
Expand Down
1 change: 1 addition & 0 deletions pyabc/epsilon/temperature.py
Original file line number Diff line number Diff line change
Expand Up @@ -372,6 +372,7 @@ def match_acceptance_rate(weights, pds, pdf_norm, kernel_scale, target_rate):
For a temperature close to 1, subtler changes are neccesary, however here
the logarhtm is nearly linear anyway.
"""

# objective function which we wish to find a root for
def obj(b):
beta = np.exp(b)
Expand Down
5 changes: 4 additions & 1 deletion pyabc/external/r/r_rpy2.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,10 @@ class R:
def __init__(self, source_file: str):
if r is None:
raise ImportError("Install rpy2, e.g. via `pip install pyabc[R]`")
warnings.warn("The support of R via rpy2 is considered experimental.")
warnings.warn(
"The support of R via rpy2 is considered experimental.",
stacklevel=2,
)
self.source_file = source_file
self._read_source()

Expand Down
1 change: 0 additions & 1 deletion pyabc/population.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,6 @@ def __init__(
proposal_id: int = 0,
preliminary: bool = False,
):

self.m = m
self.parameter = parameter
self.weight = weight
Expand Down
1 change: 0 additions & 1 deletion pyabc/sampler/multicore_evaluation_parallel.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,6 @@ def work(
sample.append(new_sim)

if new_sim.accepted:

# increase number of accepted particles
with n_acc.get_lock():
n_acc.value += 1
Expand Down
7 changes: 4 additions & 3 deletions pyabc/sge/sge.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,6 @@ def __init__(
execution_context=DefaultContext,
chunk_size=1,
):

# simple assignments
self.memory = memory
self.time_h = time_h
Expand All @@ -159,7 +158,8 @@ def __init__(
self.job_name = name
if self.config["SGE"]["PRIORITY"] == "0":
warnings.warn(
"Priority set to 0. " "This enables the reservation flag."
"Priority set to 0. " "This enables the reservation flag.",
stacklevel=2,
)
self.num_threads = num_threads
self.execution_context = execution_context
Expand All @@ -168,7 +168,8 @@ def __init__(
if chunk_size != 1:
warnings.warn(
"Chunk size != 1. "
"This can potentially have bad side effect."
"This can potentially have bad side effect.",
stacklevel=2,
)

if not sge_available():
Expand Down
1 change: 1 addition & 0 deletions pyabc/storage/dataframe_bytes_storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,6 +129,7 @@ def df_to_bytes(df: pd.DataFrame) -> bytes:
"Can't find pyarrow, falling back to less efficient csv "
"to store pandas DataFrames.\n"
"Install e.g. via `pip install pyabc[pyarrow]`",
stacklevel=2,
)
return df_to_bytes_csv(df)
return df_to_bytes_parquet(df)
Expand Down
8 changes: 4 additions & 4 deletions pyabc/storage/history.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import os
import tempfile
from functools import wraps
from typing import List, Union
from typing import List, Tuple, Union

import numpy as np
import pandas as pd
Expand Down Expand Up @@ -308,7 +308,7 @@ def alive_models(self, t: int = None) -> List:
@with_session
def get_distribution(
self, m: int = 0, t: int = None
) -> (pd.DataFrame, np.ndarray):
) -> Tuple[pd.DataFrame, np.ndarray]:
"""
Returns the weighted population sample for model m and timepoint t
as a tuple.
Expand Down Expand Up @@ -997,7 +997,7 @@ def n_populations(self):
@with_session
def get_weighted_sum_stats_for_model(
self, m: int = 0, t: int = None
) -> (np.ndarray, List):
) -> Tuple[np.ndarray, List]:
"""
Summary statistics for model `m`. The weights sum to 1, unless
there were multiple acceptances per particle.
Expand Down Expand Up @@ -1046,7 +1046,7 @@ def get_weighted_sum_stats_for_model(
@with_session
def get_weighted_sum_stats(
self, t: int = None
) -> (List[float], List[dict]):
) -> Tuple[List[float], List[dict]]:
"""
Population's weighted summary statistics.
These weights do not necessarily sum up to 1.
Expand Down
1 change: 0 additions & 1 deletion pyabc/transition/grid_search.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,6 @@ def __init__(
error_score='raise',
return_train_score=True,
):

if estimator is None:
estimator = MultivariateNormalTransition()
if param_grid is None:
Expand Down
2 changes: 1 addition & 1 deletion pyabc/version.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = '0.12.8'
__version__ = '0.12.9'
3 changes: 2 additions & 1 deletion setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,8 @@ install_requires =
redis >= 2.10.6
distributed >= 2022.10.2
matplotlib >= 3.3.0
sqlalchemy >= 1.3.18
# until https://github.com/pandas-dev/pandas/issues/40686
sqlalchemy >= 1.3.18, < 2.0.0
jabbar >= 0.0.10
gitpython >= 3.1.7

Expand Down
2 changes: 1 addition & 1 deletion test/base/test_epsilon.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ def test_constantepsilon():

def test_listepsilon():
eps = pyabc.ListEpsilon([3.5, 2.3, 1, 0.3])
with pytest.raises(Exception):
with pytest.raises(IndexError):
eps(4)


Expand Down
1 change: 1 addition & 0 deletions test/base/test_storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -572,6 +572,7 @@ def test_export():
Just calls export and does some very basic checks.
"""

# simple problem
def model(p):
return {"y": p["p"] + 0.1 * np.random.normal()}
Expand Down

0 comments on commit 7d9c109

Please sign in to comment.