Skip to content

Commit

Permalink
Undo dependency on botorch master
Browse files Browse the repository at this point in the history
Summary:
Ax released a version that depended on changes on botorch master: #159

This changes things back so it should work with botorch 0.1.3.

Reviewed By: lena-kashtelyan

Differential Revision: D17093727

fbshipit-source-id: 7d08d4205743c8135fb4adde73befa45a0204816
  • Loading branch information
Balandat authored and facebook-github-bot committed Aug 28, 2019
1 parent 564c6fc commit 93d6fd6
Show file tree
Hide file tree
Showing 3 changed files with 20 additions and 19 deletions.
8 changes: 2 additions & 6 deletions ax/models/tests/test_botorch_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -148,14 +148,11 @@ def test_BotorchModel(self, dtype=torch.float, cuda=False):
n = 3

X_dummy = torch.tensor([[[1.0, 2.0, 3.0]]], dtype=dtype, device=device)
acq_dummy = torch.tensor(0.0, dtype=dtype, device=device)
model_gen_options = {}
# test sequential optimize
with mock.patch(
"ax.models.torch.botorch_defaults.optimize_acqf",
return_value=(X_dummy, acq_dummy),
"ax.models.torch.botorch_defaults.sequential_optimize", return_value=X_dummy
) as mock_optimize_acqf:

Xgen, wgen = model.gen(
n=n,
bounds=bounds,
Expand All @@ -173,8 +170,7 @@ def test_BotorchModel(self, dtype=torch.float, cuda=False):

# test joint optimize
with mock.patch(
"ax.models.torch.botorch_defaults.optimize_acqf",
return_value=(X_dummy, acq_dummy),
"ax.models.torch.botorch_defaults.joint_optimize", return_value=X_dummy
) as mock_optimize_acqf:
Xgen, wgen = model.gen(
n=n,
Expand Down
9 changes: 4 additions & 5 deletions ax/models/torch/botorch.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@
Optional[Callable[[Tensor], Tensor]],
Any,
],
Tuple[Tensor, Tensor],
Tensor,
]


Expand Down Expand Up @@ -146,16 +146,15 @@ class BotorchModel(TorchModel):
fixed_features,
rounding_func,
**kwargs,
) -> (candidates, acq_values)
) -> candidates
Here `acq_function` is a BoTorch `AcquisitionFunction`, `bounds` is a
tensor containing bounds on the parameters, `n` is the number of
candidates to be generated, `inequality_constraints` are inequality
constraints on parameter values, `fixed_features` specifies features that
should be fixed during generation, and `rounding_func` is a callback
that rounds an optimization result appropriately. `candidates` is
a tensor of generated candidates, and `acq_values` are the acquisition
values associated with the candidates. For additional details on the
a tensor of generated candidates. For additional details on the
arguments, see `scipy_optimizer`.
"""

Expand Down Expand Up @@ -316,7 +315,7 @@ def gen(

botorch_rounding_func = get_rounding_func(rounding_func)

candidates, _ = self.acqf_optimizer( # pyre-ignore: [28]
candidates = self.acqf_optimizer( # pyre-ignore: [28]
acq_function=checked_cast(AcquisitionFunction, acquisition_function),
bounds=bounds_,
n=n,
Expand Down
22 changes: 14 additions & 8 deletions ax/models/torch/botorch_defaults.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
from botorch.models.model import Model
from botorch.models.model_list_gp_regression import ModelListGP
from botorch.models.multitask import FixedNoiseMultiTaskGP, MultiTaskGP
from botorch.optim.optimize import optimize_acqf
from botorch.optim.optimize import joint_optimize, sequential_optimize
from botorch.utils import (
get_objective_weights_transform,
get_outcome_constraint_transforms,
Expand Down Expand Up @@ -204,7 +204,7 @@ def scipy_optimizer(
fixed_features: Optional[Dict[int, float]] = None,
rounding_func: Optional[Callable[[Tensor], Tensor]] = None,
**kwargs: Any,
) -> Tuple[Tensor, Tensor]:
) -> Tensor:
r"""Optimizer using scipy's minimize module on a numpy-adpator.
Args:
Expand Down Expand Up @@ -233,12 +233,15 @@ def scipy_optimizer(
num_restarts: int = kwargs.get("num_restarts", 20)
raw_samples: int = kwargs.get("num_raw_samples", 50 * num_restarts)

sequential = not kwargs.get("joint_optimization", False)
# use SLSQP by default for small problems since it yields faster wall times
if sequential and "method" not in kwargs:
kwargs["method"] = "SLSQP"
if kwargs.get("joint_optimization", False):
optimize = joint_optimize
else:
optimize = sequential_optimize
# use SLSQP by default for small problems since it yields faster wall times
if "method" not in kwargs:
kwargs["method"] = "SLSQP"

return optimize_acqf(
X = optimize(
acq_function=acq_function,
bounds=bounds,
q=n,
Expand All @@ -248,8 +251,11 @@ def scipy_optimizer(
inequality_constraints=inequality_constraints,
fixed_features=fixed_features,
post_processing_func=rounding_func,
sequential=not kwargs.get("joint_optimization", False),
)
# TODO: Un-hack this once botorch #234 is part of a stable release
if isinstance(X, tuple):
X, _ = X
return X


def _get_model(
Expand Down

0 comments on commit 93d6fd6

Please sign in to comment.