Skip to content

Commit

Permalink
rm sobol
Browse files Browse the repository at this point in the history
  • Loading branch information
Carlos Xavier Hernández committed Apr 18, 2019
1 parent 72ac8b8 commit f235515
Showing 1 changed file with 22 additions and 59 deletions.
81 changes: 22 additions & 59 deletions osprey/strategies.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,13 +15,8 @@

from .search_space import EnumVariable
from .acquisition_functions import AcquisitionFunction
from .surrogate_models import MaximumLikelihoodGaussianProcess, GaussianProcessKernel

# try:
# from SALib.sample import sobol_sequence as ss
# except ImportError:
# ss = None
# pass
from .surrogate_models import (MaximumLikelihoodGaussianProcess,
GaussianProcessKernel)

DEFAULT_TIMEOUT = socket._GLOBAL_DEFAULT_TIMEOUT

Expand Down Expand Up @@ -72,50 +67,6 @@ def is_repeated_suggestion(params, history):
return False


# class SobolSearch(BaseStrategy):
# short_name = 'sobol'
# _SKIP = int(1e4)
#
# def __init__(self, length=1000):
# # TODO length should be n_trials. But this doesn't seem to be accessible to strategies without major re-write.
# self.sequence = None
# self.length = length
# self.n_dims = 0
# self.offset = 0
# self.counter = 0
#
# def _set_sequence(self):
# # TODO could get rid of first part of sequence
# self.sequence = ss.sample(self.length + self._SKIP, self.n_dims)
#
# def _from_unit_cube(self, result, searchspace):
# # TODO this should be a method common to both Sobol and GP.
# # Note that Sobol only deals with float-valued variables, so we have
# # a transform step on either side, where int and enum valued variables
# # are transformed before calling gp, and then the result suggested by
# # Sobol needs to be reverse-transformed.
# out = {}
# for gpvalue, var in zip(result, searchspace):
# out[var.name] = var.point_from_unit(float(gpvalue))
# return out
#
# def suggest(self, history, searchspace):
# if 'SALib' not in sys.modules:
# raise ImportError('No module named SALib')
#
# if self.sequence is None:
# self.n_dims = searchspace.n_dims
# self.offset = len(history) + self._SKIP
# self._set_sequence()
# try:
# points = self.sequence[self.offset+ self.counter]
# self.counter += 1
# except IndexError:
# raise RuntimeError('Increase sobol sequence length')
#
# return self._from_unit_cube(points, searchspace)


class RandomSearch(BaseStrategy):
short_name = 'random'

Expand Down Expand Up @@ -225,15 +176,17 @@ def suggest(self, history, searchspace):
chosen_params_container = []

def suggest(*args, **kwargs):
return tpe.suggest(*args, **kwargs, gamma=self.gamma, n_startup_jobs=self.seeds)
return tpe.suggest(*args,
**kwargs,
gamma=self.gamma,
n_startup_jobs=self.seeds)

def mock_fn(x):
# http://stackoverflow.com/a/3190783/1079728
# to get around no nonlocal keywork in python2
chosen_params_container.append(x)
return 0


fmin(fn=mock_fn,
algo=tpe.suggest,
space=hp_searchspace,
Expand All @@ -257,6 +210,7 @@ def _hyperopt_fmin_random_kwarg(random):

class Bayes(BaseStrategy):
short_name = 'bayes'

def __init__(self,
acquisition=None,
surrogate=None,
Expand All @@ -265,8 +219,7 @@ def __init__(self,
seeds=1,
max_feval=5E4,
max_iter=1E5,
n_iter=50
):
n_iter=50):
self.seed = seed
self.seeds = seeds
self.max_feval = max_feval
Expand All @@ -277,8 +230,15 @@ def __init__(self,
surrogate = 'gp'
self.surrogate = surrogate
if kernels is None:
kernels = [{'name': 'GPy.kern.Matern52', 'params': {'ARD': True},
'options': {'independent': False}}]
kernels = [{
'name': 'GPy.kern.Matern52',
'params': {
'ARD': True
},
'options': {
'independent': False
}
}]
self.kernel_params = kernels
if acquisition is None:
acquisition = {'name': 'osprey', 'params': {}}
Expand Down Expand Up @@ -338,10 +298,13 @@ def suggest(self, history, searchspace, max_tries=5):
# Define and fit model
if self.surrogate == 'gp':
kernel = GaussianProcessKernel(self.kernel_params, self.n_dims)
model = MaximumLikelihoodGaussianProcess(X=X, Y=Y, kernel=kernel.kernel,
model = MaximumLikelihoodGaussianProcess(X=X,
Y=Y,
kernel=kernel.kernel,
max_feval=self.max_feval)
else:
raise NotImplementedError('Surrogate model not recognised. Please choose from: gp')
raise NotImplementedError(
'Surrogate model not recognised. Please choose from: gp')
model.fit()

# Define acquisition function and get best candidate
Expand Down

0 comments on commit f235515

Please sign in to comment.