Skip to content

Commit

Permalink
Added a walkaround for freezing parallel processing. Now when one spe…
Browse files Browse the repository at this point in the history
…cifies n_jobs=1 it will not do anything in parallel and will not span new processes
  • Loading branch information
cotterpl committed Jan 8, 2020
1 parent 75bc957 commit ebb914d
Show file tree
Hide file tree
Showing 4 changed files with 55 additions and 38 deletions.
69 changes: 37 additions & 32 deletions requirements_stable.txt
Original file line number Diff line number Diff line change
@@ -1,46 +1,51 @@
atomicwrites==1.3.0
attrs==19.1.0
appnope==0.1.0
backcall==0.1.0
bleach==3.1.0
certifi==2019.6.16
cffi==1.12.3
certifi==2019.11.28
chardet==3.0.4
Click==7.0
Cython==0.29.13
Cython==0.29.14
decorator==4.4.1
docutils==0.15.2
idna==2.8
importlib-metadata==0.19
Jinja2==2.10.1
joblib==0.13.2
MarkupSafe==1.1.1
more-itertools==7.2.0
numpy==1.17.0
packaging==19.1
pandas==0.25.0
importlib-metadata==1.3.0
ipykernel==5.1.3
ipython==7.9.0
ipython-genutils==0.2.0
jedi==0.15.1
joblib==0.14.1
jupyter-client==5.3.4
jupyter-core==4.6.1
keyring==21.0.0
more-itertools==8.0.2
numpy==1.18.1
pandas==0.25.3
parso==0.5.1
patsy==0.5.1
pip-tools==4.0.0
pexpect==4.7.0
pickleshare==0.7.5
pip-tools==4.3.0
pkginfo==1.5.0.1
pluggy==0.12.0
pmdarima==1.2.1
py==1.8.0
pycparser==2.19
pmdarima==1.5.2
prompt-toolkit==2.0.10
ptyprocess==0.6.0
Pygments==2.4.2
pyparsing==2.4.2
pytest==5.0.1
python-dateutil==2.8.0
pytz==2019.2
python-dateutil==2.8.1
pytz==2019.3
pyzmq==18.1.0
readme-renderer==24.0
requests==2.22.0
requests-toolbelt==0.9.1
rpy2==3.0.5
scikit-learn==0.21.3
scipy==1.3.0
simplegeneric==0.8.1
scikit-learn==0.22.1
scipy==1.4.1
six==1.12.0
statsmodels==0.10.1
-e [email protected]:intive-DataScience/tbats.git@4e726919f08e39e74dd70a592b5258dfc7b25953#egg=tbats
tqdm==4.32.2
twine==1.13.0
urllib3==1.25.3
statsmodels==0.10.2
-e [email protected]:intive-DataScience/tbats.git@75bc957e425c3b82dfe75373e2cea7aff17ecc12#egg=tbats
tornado==6.0.3
tqdm==4.41.1
traitlets==4.3.3
twine==3.1.1
urllib3==1.25.7
wcwidth==0.1.7
webencodings==0.5.1
zipp==0.5.2
zipp==0.6.0
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ def run_tests(self):

setuptools.setup(
name='tbats',
version='1.0.8',
version='1.0.9',
packages=setuptools.find_packages(exclude=('test', 'test_R')),
url='https://github.com/intive-DataScience/tbats',
license='MIT License',
Expand Down
10 changes: 8 additions & 2 deletions tbats/abstract/Estimator.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import numpy as np
import multiprocessing
import multiprocessing as actual_processing
import multiprocessing.dummy as dummy_processing
from sklearn.base import BaseEstimator
from sklearn.utils.validation import check_array, column_or_1d as c1d
from sklearn.model_selection import ParameterGrid
Expand Down Expand Up @@ -139,7 +140,7 @@ def _choose_model_from_possible_component_settings(self, y, components_grid):
"""
self._y = y
# note n_jobs = None means to use cpu_count()
pool = multiprocessing.pool.Pool(processes=self.n_jobs)
pool = self._prepare_pool(self.n_jobs)
models = pool.map(self._case_fit, components_grid)
pool.close()
self._y = None # clean-up
Expand All @@ -151,6 +152,11 @@ def _choose_model_from_possible_component_settings(self, y, components_grid):
best_model = model
return best_model

def _prepare_pool(self, n_jobs=None):
if n_jobs == 1:
return dummy_processing.Pool(processes=n_jobs)
return actual_processing.Pool(processes=n_jobs)

def _prepare_components_grid(self, seasonal_harmonics=None):
"""Provides a grid of all allowed model component combinations.
Expand Down
12 changes: 9 additions & 3 deletions tbats/tbats/HarmonicsChoosingStrategy.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,15 @@
import numpy as np
import fractions
import multiprocessing
import multiprocessing as actual_processing
import multiprocessing.dummy as dummy_processing


class HarmonicsChoosingStrategy(object):

def __init__(self, context, n_jobs=None):
self.n_jobs = n_jobs
if n_jobs is None:
self.n_jobs = multiprocessing.cpu_count()
self.n_jobs = actual_processing.cpu_count()
self.context = context

def choose(self, y, components):
Expand Down Expand Up @@ -78,7 +79,7 @@ def choose_for_season(self, season_index, max_harmonic, best_model_so_far):
self._season_index = season_index
self._y = best_model_so_far.y
self._components = best_model_so_far.params.components
pool = multiprocessing.pool.Pool(processes=self.n_jobs)
pool = self._prepare_pool(self.n_jobs)
models = pool.map(self._fit_model, harmonics_range)
pool.close()
for model in models:
Expand All @@ -102,6 +103,11 @@ def choose_for_season(self, season_index, max_harmonic, best_model_so_far):
return best_model_so_far
return best_model

def _prepare_pool(self, n_jobs=None):
if n_jobs == 1:
return dummy_processing.Pool(processes=n_jobs)
return actual_processing.Pool(processes=n_jobs)

def _fit_model(self, harmonic_to_check):
components = self._components.with_harmonic_for_season(
season_index=self._season_index, new_harmonic=harmonic_to_check
Expand Down

0 comments on commit ebb914d

Please sign in to comment.