Skip to content

Commit a3021cc

Browse files
authored
Merge pull request #344 from till-m/constrained-optimization
Advanced constrained optimization
2 parents cc05408 + a46c335 commit a3021cc

File tree

9 files changed

+1011
-32
lines changed

9 files changed

+1011
-32
lines changed

bayes_opt/__init__.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,9 +2,11 @@
22
from .domain_reduction import SequentialDomainReductionTransformer
33
from .util import UtilityFunction
44
from .logger import ScreenLogger, JSONLogger
5+
from .constraint import ConstraintModel
56

67
__all__ = [
78
"BayesianOptimization",
9+
"ConstraintModel"
810
"UtilityFunction",
911
"Events",
1012
"ScreenLogger",

bayes_opt/bayesian_optimization.py

Lines changed: 58 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,9 @@
11
import warnings
22
from queue import Queue, Empty
33

4-
from .target_space import TargetSpace
4+
from bayes_opt.constraint import ConstraintModel
5+
6+
from .target_space import TargetSpace, ConstrainedTargetSpace
57
from .event import Events, DEFAULT_EVENTS
68
from .logger import _get_default_logger
79
from .util import UtilityFunction, acq_max, ensure_rng
@@ -16,6 +18,7 @@ class Observable(object):
1618
Inspired/Taken from
1719
https://www.protechtraining.com/blog/post/879#simple-observer
1820
"""
21+
1922
def __init__(self, events):
2023
# maps event names to subscribers
2124
# str -> dict
@@ -52,9 +55,12 @@ class BayesianOptimization(Observable):
5255
Dictionary with parameters names as keys and a tuple with minimum
5356
and maximum values.
5457
58+
constraint: A ConstraintModel. Note that the names of arguments of the
59+
constraint function and of f need to be the same.
60+
5561
random_state: int or numpy.random.RandomState, optional(default=None)
5662
If the value is an integer, it is used as the seed for creating a
57-
numpy.random.RandomState. Otherwise the random state provided it is used.
63+
numpy.random.RandomState. Otherwise the random state provided is used.
5864
When set to None, an unseeded random state is generated.
5965
6066
verbose: int, optional(default=2)
@@ -76,14 +82,16 @@ class BayesianOptimization(Observable):
7682
set_bounds()
7783
Allows changing the lower and upper searching bounds
7884
"""
79-
def __init__(self, f, pbounds, random_state=None, verbose=2,
85+
86+
def __init__(self,
87+
f,
88+
pbounds,
89+
constraint=None,
90+
random_state=None,
91+
verbose=2,
8092
bounds_transformer=None):
8193
self._random_state = ensure_rng(random_state)
8294

83-
# Data structure containing the function to be optimized, the bounds of
84-
# its domain, and a record of the evaluations we have done so far
85-
self._space = TargetSpace(f, pbounds, random_state)
86-
8795
self._queue = Queue()
8896

8997
# Internal GP regressor
@@ -95,6 +103,27 @@ def __init__(self, f, pbounds, random_state=None, verbose=2,
95103
random_state=self._random_state,
96104
)
97105

106+
if constraint is None:
107+
# Data structure containing the function to be optimized, the
108+
# bounds of its domain, and a record of the evaluations we have
109+
# done so far
110+
self._space = TargetSpace(f, pbounds, random_state)
111+
self.is_constrained = False
112+
else:
113+
constraint_ = ConstraintModel(
114+
constraint.fun,
115+
constraint.lb,
116+
constraint.ub,
117+
random_state=random_state
118+
)
119+
self._space = ConstrainedTargetSpace(
120+
f,
121+
constraint_,
122+
pbounds,
123+
random_state
124+
)
125+
self.is_constrained = True
126+
98127
self._verbose = verbose
99128
self._bounds_transformer = bounds_transformer
100129
if self._bounds_transformer:
@@ -110,6 +139,12 @@ def __init__(self, f, pbounds, random_state=None, verbose=2,
110139
def space(self):
111140
return self._space
112141

142+
@property
143+
def constraint(self):
144+
if self.is_constrained:
145+
return self._space.constraint
146+
return None
147+
113148
@property
114149
def max(self):
115150
return self._space.max()
@@ -136,6 +171,7 @@ def probe(self, params, lazy=True):
136171
If True, the optimizer will evaluate the points when calling
137172
maximize(). Otherwise it will evaluate it at the moment.
138173
"""
174+
139175
if lazy:
140176
self._queue.put(params)
141177
else:
@@ -152,15 +188,17 @@ def suggest(self, utility_function):
152188
with warnings.catch_warnings():
153189
warnings.simplefilter("ignore")
154190
self._gp.fit(self._space.params, self._space.target)
191+
if self.is_constrained:
192+
self.constraint.fit(self._space.params,
193+
self._space._constraint_values)
155194

156195
# Finding argmax of the acquisition function.
157-
suggestion = acq_max(
158-
ac=utility_function.utility,
159-
gp=self._gp,
160-
y_max=self._space.target.max(),
161-
bounds=self._space.bounds,
162-
random_state=self._random_state
163-
)
196+
suggestion = acq_max(ac=utility_function.utility,
197+
gp=self._gp,
198+
constraint=self.constraint,
199+
y_max=self._space.target.max(),
200+
bounds=self._space.bounds,
201+
random_state=self._random_state)
164202

165203
return self._space.array_to_params(suggestion)
166204

@@ -211,15 +249,15 @@ def maximize(self,
211249
kappa: float, optional(default=2.576)
212250
Parameter to indicate how closed are the next parameters sampled.
213251
Higher value = favors spaces that are least explored.
214-
Lower value = favors spaces where the regression function is the
215-
highest.
252+
Lower value = favors spaces where the regression function is
253+
the highest.
216254
217255
kappa_decay: float, optional(default=1)
218256
`kappa` is multiplied by this factor every iteration.
219257
220258
kappa_decay_delay: int, optional(default=0)
221-
Number of iterations that must have passed before applying the decay
222-
to `kappa`.
259+
Number of iterations that must have passed before applying the
260+
decay to `kappa`.
223261
224262
xi: float, optional(default=0.0)
225263
[unused]
@@ -242,12 +280,11 @@ def maximize(self,
242280
util.update_params()
243281
x_probe = self.suggest(util)
244282
iteration += 1
245-
246283
self.probe(x_probe, lazy=False)
247284

248285
if self._bounds_transformer and iteration > 0:
249-
# The bounds transformer should only modify the bounds after the init_points points (only for the true
250-
# iterations)
286+
# The bounds transformer should only modify the bounds after
287+
# the init_points points (only for the true iterations)
251288
self.set_bounds(
252289
self._bounds_transformer.transform(self._space))
253290

bayes_opt/constraint.py

Lines changed: 154 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,154 @@
1+
import numpy as np
2+
from sklearn.gaussian_process.kernels import Matern
3+
from sklearn.gaussian_process import GaussianProcessRegressor
4+
from scipy.stats import norm
5+
6+
7+
class ConstraintModel():
8+
"""
9+
This class takes the function to optimize as well as the parameters bounds
10+
in order to find which values for the parameters yield the maximum value
11+
using bayesian optimization.
12+
13+
Parameters
14+
----------
15+
fun: function
16+
Constraint function. If multiple constraints are handled, this should
17+
return a numpy.ndarray of appropriate size.
18+
19+
lb: numeric or numpy.ndarray
20+
Upper limit(s) for the constraints. The return value of `fun` should
21+
have exactly this shape.
22+
23+
ub: numeric or numpy.ndarray
24+
Upper limit(s) for the constraints. The return value of `fun` should
25+
have exactly this shape.
26+
27+
random_state: int or numpy.random.RandomState, optional(default=None)
28+
If the value is an integer, it is used as the seed for creating a
29+
numpy.random.RandomState. Otherwise the random state provided is used.
30+
When set to None, an unseeded random state is generated.
31+
32+
Note
33+
----
34+
In case of multiple constraints, this model assumes conditional
35+
independence. This means that for each constraint, the probability of
36+
fulfillment is the cdf of a univariate Gaussian. The overall probability
37+
is a simply the product of the individual probabilities.
38+
"""
39+
40+
def __init__(self, fun, lb, ub, random_state=None):
41+
self.fun = fun
42+
43+
if isinstance(lb, float):
44+
self._lb = np.array([lb])
45+
else:
46+
self._lb = lb
47+
48+
if isinstance(ub, float):
49+
self._ub = np.array([ub])
50+
else:
51+
self._ub = ub
52+
53+
54+
basis = lambda: GaussianProcessRegressor(
55+
kernel=Matern(nu=2.5),
56+
alpha=1e-6,
57+
normalize_y=True,
58+
n_restarts_optimizer=5,
59+
random_state=random_state,
60+
)
61+
self._model = [basis() for _ in range(len(self._lb))]
62+
63+
@property
64+
def lb(self):
65+
return self._lb
66+
67+
@property
68+
def ub(self):
69+
return self._ub
70+
71+
@property
72+
def model(self):
73+
return self._model
74+
75+
def eval(self, **kwargs):
76+
"""
77+
Evaluates the constraint function.
78+
"""
79+
try:
80+
return self.fun(**kwargs)
81+
except TypeError as e:
82+
msg = (
83+
"Encountered TypeError when evaluating constraint " +
84+
"function. This could be because your constraint function " +
85+
"doesn't use the same keyword arguments as the target " +
86+
f"function. Original error message:\n\n{e}"
87+
)
88+
e.args = (msg,)
89+
raise
90+
91+
def fit(self, X, Y):
92+
"""
93+
Fits internal GaussianProcessRegressor's to the data.
94+
"""
95+
if len(self._model) == 1:
96+
self._model[0].fit(X, Y)
97+
else:
98+
for i, gp in enumerate(self._model):
99+
gp.fit(X, Y[:, i])
100+
101+
def predict(self, X):
102+
"""
103+
Returns the probability that the constraint is fulfilled at `X` based
104+
on the internal Gaussian Process Regressors.
105+
106+
Note that this does not try to approximate the values of the constraint
107+
function, but probability that the constraint function is fulfilled.
108+
For the former, see `ConstraintModel.approx()`.
109+
"""
110+
X_shape = X.shape
111+
X = X.reshape((-1, self._model[0].n_features_in_))
112+
if len(self._model) == 1:
113+
y_mean, y_std = self._model[0].predict(X, return_std=True)
114+
115+
p_lower = (norm(loc=y_mean, scale=y_std).cdf(self._lb[0])
116+
if self._lb[0] != -np.inf else np.array([0]))
117+
p_upper = (norm(loc=y_mean, scale=y_std).cdf(self._ub[0])
118+
if self._lb[0] != np.inf else np.array([1]))
119+
result = p_upper - p_lower
120+
return result.reshape(X_shape[:-1])
121+
else:
122+
result = np.ones(X.shape[0])
123+
for j, gp in enumerate(self._model):
124+
y_mean, y_std = gp.predict(X, return_std=True)
125+
p_lower = (norm(loc=y_mean, scale=y_std).cdf(self._lb[j])
126+
if self._lb[j] != -np.inf else np.array([0]))
127+
p_upper = (norm(loc=y_mean, scale=y_std).cdf(self._ub[j])
128+
if self._lb[j] != np.inf else np.array([1]))
129+
result = result * (p_upper - p_lower)
130+
return result.reshape(X_shape[:-1])
131+
132+
def approx(self, X):
133+
"""
134+
Returns the approximation of the constraint function using the internal
135+
Gaussian Process Regressors.
136+
"""
137+
X_shape = X.shape
138+
X = X.reshape((-1, self._model[0].n_features_in_))
139+
if len(self._model) == 1:
140+
return self._model[0].predict(X).reshape(X_shape[:-1])
141+
else:
142+
result = np.column_stack([gp.predict(X) for gp in self._model])
143+
return result.reshape(X_shape[:-1] + (len(self._lb), ))
144+
145+
def allowed(self, constraint_values):
146+
"""
147+
Checks whether `constraint_values` are below the specified limits.
148+
"""
149+
if self._lb.size == 1:
150+
return (np.less_equal(self._lb, constraint_values)
151+
& np.less_equal(constraint_values, self._ub))
152+
153+
return (np.all(constraint_values <= self._ub, axis=-1)
154+
& np.all(constraint_values >= self._lb, axis=-1))

bayes_opt/logger.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,6 @@
66
from .event import Events
77
from .util import Colours
88

9-
109
def _get_default_logger(verbose):
1110
return ScreenLogger(verbose=verbose)
1211

@@ -81,6 +80,11 @@ def _header(self, instance):
8180
return line + "\n" + ("-" * self._header_length)
8281

8382
def _is_new_max(self, instance):
83+
if instance.max["target"] is None:
84+
# During constrained optimization, there might not be a maximum
85+
# value since the optimizer might've not encountered any points
86+
# that fulfill the constraints.
87+
return False
8488
if self._previous_max is None:
8589
self._previous_max = instance.max["target"]
8690
return instance.max["target"] > self._previous_max

bayes_opt/observer.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -25,8 +25,8 @@ def _update_tracker(self, event, instance):
2525
self._iterations += 1
2626

2727
current_max = instance.max
28-
if (self._previous_max is None or
29-
current_max["target"] > self._previous_max):
28+
if (self._previous_max is None
29+
or current_max["target"] > self._previous_max):
3030
self._previous_max = current_max["target"]
3131
self._previous_max_params = current_max["params"]
3232

0 commit comments

Comments
 (0)