random_state=None, acq="EI", xi=0.01, kappa=1.96,
verbose=False, specs=None, callback=None):
if callback is not None:
if isinstance(callback, Callable):
callback = [callback]
elif not (isinstance(callback, list) and
all([isinstance(c, Callable) for c in callback])):
raise ValueError("callback should be either a callable or "
"a list of callables.")
rng = check_random_state(random_state)
space = Space(dimensions)
// Initialize with provided points (x0 and y0) and/or random points
if n_calls <= 0:
raise ValueError(
"Expected `n_calls` > 0, got %d" % n_random_starts)
if x0 is None:
x0 = []
elif not isinstance(x0[0], list):
x0 = [x0]
if not isinstance(x0, list):
raise ValueError("`x0` should be a list, but got %s" % type(x0))
n_init_func_calls = len(x0) if y0 is None else 0
n_total_init_calls = n_random_starts + n_init_func_calls
if n_total_init_calls <= 0:
// if x0 is not provided and n_random_starts is 0 then
// it will ask for n_random_starts to be > 0.
raise ValueError(
"Expected `n_random_starts` > 0, got %d" % n_random_starts)
if n_calls < n_total_init_calls:
raise ValueError(
"Expected `n_calls` >= %d, got %d" % (n_total_init_calls, n_calls))
func_call_no = 1
if y0 is None and x0:
y0 = []
for i, x in enumerate(x0):
y0.append(verbose_func(
func, x, verbose=verbose, prev_ys=y0, x_info="provided",
func_call_no=func_call_no))
func_call_no += 1
if callback is not None:
curr_res = create_result(x0, y0, space, rng, specs)
for c in callback:
c(curr_res)
elif x0:
if isinstance(y0, Iterable):
y0 = list(y0)
elif isinstance(y0, numbers.Number):
y0 = [y0]
else:
raise ValueError(
"`y0` should be an iterable or a scalar, got %s" % type(y0))
if len(x0) != len(y0):
raise ValueError("`x0` and `y0` should have the same length")
if not all(map(np.isscalar, y0)):
raise ValueError("`y0` elements should be scalars")
else:
y0 = []
// Random function evaluations.
X_rand = space.rvs(n_samples=n_random_starts, random_state=rng)
Xi = x0 + X_rand
yi = y0
for i, x in enumerate(X_rand):
yi.append(verbose_func(
func, x, verbose=verbose, prev_ys=yi, x_info="random",
func_call_no=func_call_no))
func_call_no += 1
if callback is not None:
curr_res = create_result(
x0 + X_rand[:i + 1], yi, space, rng, specs)
for c in callback:
c(curr_res)
if np.ndim(yi) != 1:
raise ValueError("`func` should return a scalar")
// Tree-based optimization loop
models = []
n_model_iter = n_calls - n_total_init_calls
for i in range(n_model_iter):
if verbose:
print("Fitting model no: %d" % (i + 1))
rgr = clone(base_estimator)
rgr.fit(space.transform(Xi), yi)
models.append(rgr)
// `rgr` predicts constants for each leaf which means that the EI
// has zero gradient over large distances. As a result we can not
// use gradient based optimizers like BFGS, so using random sampling
// for the moment.
X = space.transform(space.rvs(n_samples=n_points,
random_state=rng))
values = _gaussian_acquisition(
X=X, model=rgr, y_opt=np.min(yi), method=acq,
xi=xi, kappa=kappa)
next_x = X[np.argmin(values)]
next_x = space.inverse_transform(next_x.reshape((1, -1)))[0]
yi.append(verbose_func(
func, next_x, verbose=verbose, prev_ys=yi,
func_call_no=func_call_no))
func_call_no += 1
Xi.append(next_x)
if callback is not None:
curr_res = create_result(Xi, yi, space, rng, specs)for c in callback:
c(curr_res)
return create_result(Xi, yi, space, rng, specs, models)
def gbrt_minimize(func, dimensions, base_estimator=None, n_calls=100,