11import warnings
22from queue import Queue , Empty
33
4- from .target_space import TargetSpace
4+ from bayes_opt .constraint import ConstraintModel
5+
6+ from .target_space import TargetSpace , ConstrainedTargetSpace
57from .event import Events , DEFAULT_EVENTS
68from .logger import _get_default_logger
79from .util import UtilityFunction , acq_max , ensure_rng
@@ -16,6 +18,7 @@ class Observable(object):
1618 Inspired/Taken from
1719 https://www.protechtraining.com/blog/post/879#simple-observer
1820 """
21+
1922 def __init__ (self , events ):
2023 # maps event names to subscribers
2124 # str -> dict
@@ -52,9 +55,12 @@ class BayesianOptimization(Observable):
5255 Dictionary with parameters names as keys and a tuple with minimum
5356 and maximum values.
5457
58+ constraint: A ConstraintModel. Note that the names of arguments of the
59+ constraint function and of f need to be the same.
60+
5561 random_state: int or numpy.random.RandomState, optional(default=None)
5662 If the value is an integer, it is used as the seed for creating a
57- numpy.random.RandomState. Otherwise the random state provided it is used.
63+ numpy.random.RandomState. Otherwise the random state provided is used.
5864 When set to None, an unseeded random state is generated.
5965
6066 verbose: int, optional(default=2)
@@ -76,14 +82,16 @@ class BayesianOptimization(Observable):
7682 set_bounds()
7783 Allows changing the lower and upper searching bounds
7884 """
79- def __init__ (self , f , pbounds , random_state = None , verbose = 2 ,
85+
86+ def __init__ (self ,
87+ f ,
88+ pbounds ,
89+ constraint = None ,
90+ random_state = None ,
91+ verbose = 2 ,
8092 bounds_transformer = None ):
8193 self ._random_state = ensure_rng (random_state )
8294
83- # Data structure containing the function to be optimized, the bounds of
84- # its domain, and a record of the evaluations we have done so far
85- self ._space = TargetSpace (f , pbounds , random_state )
86-
8795 self ._queue = Queue ()
8896
8997 # Internal GP regressor
@@ -95,6 +103,27 @@ def __init__(self, f, pbounds, random_state=None, verbose=2,
95103 random_state = self ._random_state ,
96104 )
97105
106+ if constraint is None :
107+ # Data structure containing the function to be optimized, the
108+ # bounds of its domain, and a record of the evaluations we have
109+ # done so far
110+ self ._space = TargetSpace (f , pbounds , random_state )
111+ self .is_constrained = False
112+ else :
113+ constraint_ = ConstraintModel (
114+ constraint .fun ,
115+ constraint .lb ,
116+ constraint .ub ,
117+ random_state = random_state
118+ )
119+ self ._space = ConstrainedTargetSpace (
120+ f ,
121+ constraint_ ,
122+ pbounds ,
123+ random_state
124+ )
125+ self .is_constrained = True
126+
98127 self ._verbose = verbose
99128 self ._bounds_transformer = bounds_transformer
100129 if self ._bounds_transformer :
@@ -110,6 +139,12 @@ def __init__(self, f, pbounds, random_state=None, verbose=2,
110139 def space (self ):
111140 return self ._space
112141
142+ @property
143+ def constraint (self ):
144+ if self .is_constrained :
145+ return self ._space .constraint
146+ return None
147+
113148 @property
114149 def max (self ):
115150 return self ._space .max ()
@@ -136,6 +171,7 @@ def probe(self, params, lazy=True):
136171 If True, the optimizer will evaluate the points when calling
137172 maximize(). Otherwise it will evaluate it at the moment.
138173 """
174+
139175 if lazy :
140176 self ._queue .put (params )
141177 else :
@@ -152,15 +188,17 @@ def suggest(self, utility_function):
152188 with warnings .catch_warnings ():
153189 warnings .simplefilter ("ignore" )
154190 self ._gp .fit (self ._space .params , self ._space .target )
191+ if self .is_constrained :
192+ self .constraint .fit (self ._space .params ,
193+ self ._space ._constraint_values )
155194
156195 # Finding argmax of the acquisition function.
157- suggestion = acq_max (
158- ac = utility_function .utility ,
159- gp = self ._gp ,
160- y_max = self ._space .target .max (),
161- bounds = self ._space .bounds ,
162- random_state = self ._random_state
163- )
196+ suggestion = acq_max (ac = utility_function .utility ,
197+ gp = self ._gp ,
198+ constraint = self .constraint ,
199+ y_max = self ._space .target .max (),
200+ bounds = self ._space .bounds ,
201+ random_state = self ._random_state )
164202
165203 return self ._space .array_to_params (suggestion )
166204
@@ -211,15 +249,15 @@ def maximize(self,
211249 kappa: float, optional(default=2.576)
212250 Parameter to indicate how closed are the next parameters sampled.
213251 Higher value = favors spaces that are least explored.
214- Lower value = favors spaces where the regression function is the
215- highest.
252+ Lower value = favors spaces where the regression function is
253+ the highest.
216254
217255 kappa_decay: float, optional(default=1)
218256 `kappa` is multiplied by this factor every iteration.
219257
220258 kappa_decay_delay: int, optional(default=0)
221- Number of iterations that must have passed before applying the decay
222- to `kappa`.
259+ Number of iterations that must have passed before applying the
260+ decay to `kappa`.
223261
224262 xi: float, optional(default=0.0)
225263 [unused]
@@ -242,12 +280,11 @@ def maximize(self,
242280 util .update_params ()
243281 x_probe = self .suggest (util )
244282 iteration += 1
245-
246283 self .probe (x_probe , lazy = False )
247284
248285 if self ._bounds_transformer and iteration > 0 :
249- # The bounds transformer should only modify the bounds after the init_points points (only for the true
250- # iterations)
286+ # The bounds transformer should only modify the bounds after
287+ # the init_points points (only for the true iterations)
251288 self .set_bounds (
252289 self ._bounds_transformer .transform (self ._space ))
253290
0 commit comments