Skip to content

Commit c6a754e

Browse files
fmfnfmfn
authored andcommitted
Overhaul of examples
This commit updates/fixes/improves/adds to the set of examples. - Two tour notebooks are included with everything users need to know about how to use the package. - A clear yet realist example of optimizing parameters of machine learning models is included. It is a much nicer version of the old sklearn_example script. - A glorious example on how to utilize this package in a concurrent fashion was added. - The buggy and redundant xgboost example was removed. PS: The advanced tour notebook is not quite done yet. However I needed to merge this branch so people could submit PRs to the updated release branch.
1 parent b06c2d4 commit c6a754e

13 files changed

+1261
-343
lines changed

bayes_opt/__init__.py

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,11 @@
11
from .bayesian_optimization import BayesianOptimization, Events
22
from .util import UtilityFunction
3-
from .observer import ScreenLogger
3+
from .observer import ScreenLogger, JSONLogger
44

5-
__all__ = ["BayesianOptimization", "UtilityFunction", "Events", "ScreenLogger"]
5+
__all__ = [
6+
"BayesianOptimization",
7+
"UtilityFunction",
8+
"Events",
9+
"ScreenLogger",
10+
"JSONLogger",
11+
]

bayes_opt/bayesian_optimization.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ def dispatch(self, event):
6161

6262

6363
class BayesianOptimization(Observable):
64-
def __init__(self, f, pbounds, random_state=None, verbose=1):
64+
def __init__(self, f, pbounds, random_state=None, verbose=2):
6565
""""""
6666
self._random_state = ensure_rng(random_state)
6767

@@ -96,16 +96,18 @@ def max(self):
9696
def res(self):
9797
return self._space.res()
9898

99-
def register(self, x, target):
99+
def register(self, params, target):
100100
"""Expect observation with known target"""
101-
self._space.register(x, target)
101+
self._space.register(params, target)
102+
self.dispatch(Events.OPTMIZATION_STEP)
102103

103-
def probe(self, x, lazy=True):
104+
def probe(self, params, lazy=True):
104105
"""Probe target of x"""
105106
if lazy:
106-
self._queue.add(x)
107+
self._queue.add(params)
107108
else:
108-
self._space.probe(x)
109+
self._space.probe(params)
110+
self.dispatch(Events.OPTMIZATION_STEP)
109111

110112
def suggest(self, utility_function):
111113
"""Most promissing point to probe next"""
@@ -166,9 +168,7 @@ def maximize(self,
166168
iteration += 1
167169

168170
self.probe(x_probe, lazy=False)
169-
self.dispatch(Events.OPTMIZATION_STEP)
170171

171-
# Notify about finished optimization
172172
self.dispatch(Events.OPTMIZATION_END)
173173

174174
def set_bounds(self, new_bounds):

bayes_opt/observer.py

Lines changed: 19 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@ class ScreenLogger(_Tracker):
5555
_default_cell_size = 9
5656
_default_precision = 4
5757

58-
def __init__(self, verbose=0):
58+
def __init__(self, verbose=2):
5959
self._verbose = verbose
6060
self._header_length = None
6161
super(ScreenLogger, self).__init__()
@@ -101,11 +101,11 @@ def _step(self, instance, colour=Colours.black):
101101
res = instance.res[-1]
102102
cells = []
103103

104-
cells.append(self._format_number(self._iterations))
104+
cells.append(self._format_number(self._iterations + 1))
105105
cells.append(self._format_number(res["target"]))
106106

107-
for val in res["params"].values():
108-
cells.append(self._format_number(val))
107+
for key in instance.space.keys:
108+
cells.append(self._format_number(res["params"][key]))
109109

110110
return "| " + " | ".join(map(colour, cells)) + " |"
111111

@@ -120,21 +120,26 @@ def _header(self, instance):
120120
self._header_length = len(line)
121121
return line + "\n" + ("-" * self._header_length)
122122

123+
def _is_new_max(self, instance):
124+
if self._previous_max is None:
125+
self._previous_max = instance.max["target"]
126+
return instance.max["target"] > self._previous_max
127+
123128
def update(self, event, instance):
124129
if event == Events.OPTMIZATION_START:
125-
line = self._header(instance)
130+
line = self._header(instance) + "\n"
126131
elif event == Events.OPTMIZATION_STEP:
127-
colour = (
128-
Colours.purple if
129-
self._previous_max is None or
130-
instance.max["target"] > self._previous_max else
131-
Colours.black
132-
)
133-
line = self._step(instance, colour=colour)
132+
is_new_max = self._is_new_max(instance)
133+
if self._verbose == 1 and not is_new_max:
134+
line = ""
135+
else:
136+
colour = Colours.purple if is_new_max else Colours.black
137+
line = self._step(instance, colour=colour) + "\n"
134138
elif event == Events.OPTMIZATION_END:
135-
line = "=" * self._header_length
139+
line = "=" * self._header_length + "\n"
136140

137-
print(line)
141+
if self._verbose:
142+
print(line, end="")
138143
self._update_tracker(event, instance)
139144

140145
class JSONLogger(_Tracker):

bayes_opt/target_space.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -182,7 +182,7 @@ def probe(self, x):
182182
x = self._as_array(x)
183183

184184
try:
185-
y = self._cache[_hashable(x)]
185+
target = self._cache[_hashable(x)]
186186
except KeyError:
187187
params = dict(zip(self._keys, x))
188188
target = self.target_func(**params)

bayes_opt/util.py

Lines changed: 29 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,3 @@
1-
from __future__ import print_function
2-
from __future__ import division
31
import warnings
42
import numpy as np
53
from scipy.stats import norm
@@ -129,6 +127,35 @@ def _poi(x, gp, y_max, xi):
129127
return norm.cdf(z)
130128

131129

130+
def load_logs(optimizer, logs):
131+
"""Load previous ...
132+
133+
"""
134+
import json
135+
136+
if isinstance(logs, str):
137+
logs = [logs]
138+
139+
for log in logs:
140+
with open(log, "r") as j:
141+
while True:
142+
try:
143+
iteration = next(j)
144+
except StopIteration:
145+
break
146+
147+
iteration = json.loads(iteration)
148+
try:
149+
optimizer.register(
150+
x=iteration["params"],
151+
target=iteration["target"],
152+
)
153+
except KeyError:
154+
pass
155+
156+
return optimizer
157+
158+
132159
def unique_rows(a):
133160
"""
134161
A function to trim repeated rows that may appear when optimizing.

0 commit comments

Comments
 (0)