Skip to content

Commit

Permalink
Fix imports that were not valid for GPy
Browse files Browse the repository at this point in the history
  • Loading branch information
noscode committed Nov 6, 2024
1 parent dac0a3e commit d3b9677
Show file tree
Hide file tree
Showing 4 changed files with 102 additions and 101 deletions.
2 changes: 1 addition & 1 deletion docs/source/changelogs.rst
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
Changelogs
==============

[2.0.1] - 2023-11-06
[2.0.2] - 2023-11-06
--------------------
GADMA was updated and tested on Python3.10 with the latest versions of dependencies.

Expand Down
5 changes: 3 additions & 2 deletions gadma/optimizers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,11 @@
from .global_optimizer import register_global_optimizer, get_global_optimizer # NOQA
from .global_optimizer import all_global_optimizers, GlobalOptimizer # NOQA
from .genetic_algorithm import GeneticAlgorithm # NOQA
from .bayesian_optimization import GPyOptBayesianOptimizer # NOQA
# from .bayesian_optimization import GPyOptBayesianOptimizer # NOQA
from .bayesian_optimization import SMACBayesianOptimizer, SMACSquirrelOptimizer # NOQA
from .combinations import GlobalOptimizerAndLocalOptimizer # NOQA
from .linear_constrain import LinearConstrain # NOQA
from .optimizer_result import OptimizerResult # NOQA
from .gaussian_process import GaussianProcess # NOQA
from .gaussian_process import GPyGaussianProcess, SMACGaussianProcess # NOQA
# from .gaussian_process import GPyGaussianProcess
from .gaussian_process import SMACGaussianProcess # NOQA
104 changes: 52 additions & 52 deletions gadma/optimizers/gaussian_process.py
Original file line number Diff line number Diff line change
@@ -1,30 +1,30 @@
# import numpy as np
# import copy
#
#
# class GaussianProcess(object):
# """
# Base class to keep Gaussian process for Bayesian optimization.
# """
# def __init__(self, gp_model):
# self.gp_model = gp_model
#
# def train(self, X, Y, optimize=True):
# raise NotImplementedError
#
# def get_noise(self):
# raise NotImplementedError
#
# def predict(self, X):
# raise NotImplementedError
#
# def get_K(self):
# raise NotImplementedError
#
# def get_hypers(self):
# raise NotImplementedError
#
#
import numpy as np
import copy


class GaussianProcess(object):
"""
Base class to keep Gaussian process for Bayesian optimization.
"""
def __init__(self, gp_model):
self.gp_model = gp_model

def train(self, X, Y, optimize=True):
raise NotImplementedError

def get_noise(self):
raise NotImplementedError

def predict(self, X):
raise NotImplementedError

def get_K(self):
raise NotImplementedError

def get_hypers(self):
raise NotImplementedError


# class GPyGaussianProcess(GaussianProcess):
# def _convert(self, X, Y=None):
# X = np.array(X, dtype=float)
Expand Down Expand Up @@ -58,28 +58,28 @@
# theta.extend(self.gp_model.model.kern.lengthscale)
# theta.append(self.get_noise())
# return theta
#
#
# class SMACGaussianProcess(GaussianProcess):
# def __init__(self, gp_model):
# super(SMACGaussianProcess, self).__init__(gp_model=gp_model)
# self.gp_model.normalize_y = True
#
# def train(self, X, Y, optimize=True):
# self.gp_model._train(X, Y, do_optimize=optimize)
#
# def get_noise(self):
# return 0
#
# def predict(self, X):
# mu, var = self.gp_model.predict_marginalized_over_instances(
# np.array(X)
# )
# sigma = np.sqrt(var)
# return mu.reshape(mu.shape[0]), sigma.reshape(sigma.shape[0])
#
# def get_K(self):
# return self.gp_model.kernel(self.gp_model.gp.X_train_)
#
# def get_hypers(self):
# return np.exp(self.gp_model.hypers)


class SMACGaussianProcess(GaussianProcess):
def __init__(self, gp_model):
super(SMACGaussianProcess, self).__init__(gp_model=gp_model)
self.gp_model.normalize_y = True

def train(self, X, Y, optimize=True):
self.gp_model._train(X, Y, do_optimize=optimize)

def get_noise(self):
return 0

def predict(self, X):
mu, var = self.gp_model.predict_marginalized_over_instances(
np.array(X)
)
sigma = np.sqrt(var)
return mu.reshape(mu.shape[0]), sigma.reshape(sigma.shape[0])

def get_K(self):
return self.gp_model.kernel(self.gp_model.gp.X_train_)

def get_hypers(self):
return np.exp(self.gp_model.hypers)
92 changes: 46 additions & 46 deletions gadma/optimizers/optimizer_result.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,52 +83,52 @@ def from_SciPy_OptimizeResult(
X_out=[scipy_result.x],
Y_out=[scipy_result.fun])

@staticmethod
def from_GPyOpt_OptimizerResult(gpyopt_obj):
"""
Create OptimizerResult from instance of bayesian optimization.
:param gpyopt_obj: Object of GPyOpt optimizer
:type gpyopt_obj: GPyOpt.methods.BayesianOptimization
"""
gpyopt_obj._compute_results()
if (gpyopt_obj.num_acquisitions == gpyopt_obj.max_iter and
not gpyopt_obj.initial_iter):
message = ' ** Maximum number of iterations reached **'
success = True
status = 1
elif (gpyopt_obj._distance_last_evaluations() < gpyopt_obj.eps and
not gpyopt_obj.initial_iter):
message = ' ** Two equal location selected **'
success = True
status = 1
elif (gpyopt_obj.max_time < gpyopt_obj.cum_time and
not gpyopt_obj.initial_iter):
message = ' ** Evaluation time reached **'
success = True
status = 0
else:
message = '** GPyOpt Bayesian Optimization class initialized '\
'successfully **'
success = False
status = 2

if hasattr(gpyopt_obj.f, 'cache_info'):
n_eval = gpyopt_obj.f.cache_info.misses
else:
n_eval = len(gpyopt_obj.Y)

return OptimizerResult(x=gpyopt_obj.x_opt,
y=gpyopt_obj.fx_opt,
success=success,
status=status,
message=message,
X=gpyopt_obj.X,
Y=gpyopt_obj.Y,
n_eval=n_eval,
n_iter=gpyopt_obj.num_acquisitions,
X_out=gpyopt_obj.X,
Y_out=gpyopt_obj.Y)
# @staticmethod
# def from_GPyOpt_OptimizerResult(gpyopt_obj):
# """
# Create OptimizerResult from instance of bayesian optimization.
#
# :param gpyopt_obj: Object of GPyOpt optimizer
# :type gpyopt_obj: GPyOpt.methods.BayesianOptimization
# """
# gpyopt_obj._compute_results()
# if (gpyopt_obj.num_acquisitions == gpyopt_obj.max_iter and
# not gpyopt_obj.initial_iter):
# message = ' ** Maximum number of iterations reached **'
# success = True
# status = 1
# elif (gpyopt_obj._distance_last_evaluations() < gpyopt_obj.eps and
# not gpyopt_obj.initial_iter):
# message = ' ** Two equal location selected **'
# success = True
# status = 1
# elif (gpyopt_obj.max_time < gpyopt_obj.cum_time and
# not gpyopt_obj.initial_iter):
# message = ' ** Evaluation time reached **'
# success = True
# status = 0
# else:
# message = '** GPyOpt Bayesian Optimization class initialized '\
# 'successfully **'
# success = False
# status = 2
#
# if hasattr(gpyopt_obj.f, 'cache_info'):
# n_eval = gpyopt_obj.f.cache_info.misses
# else:
# n_eval = len(gpyopt_obj.Y)
#
# return OptimizerResult(x=gpyopt_obj.x_opt,
# y=gpyopt_obj.fx_opt,
# success=success,
# status=status,
# message=message,
# X=gpyopt_obj.X,
# Y=gpyopt_obj.Y,
# n_eval=n_eval,
# n_iter=gpyopt_obj.num_acquisitions,
# X_out=gpyopt_obj.X,
# Y_out=gpyopt_obj.Y)

def __repr__(self):
string = f" status: {self.status}\n"\
Expand Down

0 comments on commit d3b9677

Please sign in to comment.