安装BO软件包
1 pip install bayesian-optimization
2 conda install -c bayesian-optimization
3 git clone https://github.com/fmfn/BayesianOptimization.git
使用方法
#coding: utf-8
from bayes_opt import BayesianOptimization
#建议、评估
#定义一个函数,边界并实例化一个对象
def black_box_function(x,y):
return -x ** 2 - (y - 1) ** 2 + 1
optimizer = BayesianOptimization(
f=None,
pbounds={'x':(-2,2),'y':(-3,3)},
verbose=2,
random_state=1,)
from bayes_opt import UtilityFunction
utility = UtilityFunction(kind="ucb",kappa=2.5,xi=0.0)
next_point_to_probe = optimizer.suggest(utility)
print("Next point to probe is:", next_point_to_probe)
target = black_box_function(**next_point_to_probe)
print("Found the target value to be:", target)
optimizer.register(params=next_point_to_probe,target=target)
#最大化循环
for _ in range(5):
next_point = optimizer.suggest(utility)
target = black_box_function(**next_point)
optimizer.register(params=next_point,target=target)
print(next_point,target)
print(optimizer.max)
#处理离散参数
def func_with_discrete_params(x,y,d):
#模拟 d 离散的必要
assert type(d) == int
return ((x+y+d) // (1+d) /(1+(x+y)**2))
def function_to_be_optimized(x,y,w):
d = int(w)
return func_with_discrete_params(x,y,d)
optimizer = BayesianOptimization(f=function_to_be_optimized,pbounds={'x': (-10, 10), 'y': (-10, 10), 'w': (0, 5)},verbose=2,random_state=1,)
optimizer.maximize(alpha=1e-3)
#调整基本高斯过程
optimizer = BayesianOptimization(
f=black_box_function,
pbounds={'x': (-2, 2), 'y': (-3, 3)},
verbose=2,
random_state=1,
)
optimizer.maximize(
init_points=1,
n_iter=5,
# 下面为GP 回归参数
alpha=1e-3,
n_restarts_optimizer=5
)
optimizer.set_gp_params(normalize_y=True)
from bayes_opt.event import DEFAULT_EVENTS,Events
optimizer = BayesianOptimization(
f=black_box_function,
pbounds={'x': (-2, 2), 'y': (-3, 3)},
verbose=2,
random_state=1,
)
class BasicObserver:
def update(self, event, instance):
"""Does whatever you want with the event and `BayesianOptimization` instance."""
print("Event `{}` was observed".format(event))
my_observer = BasicObserver()
optimizer.subscribe(
event=Events.OPTMIZATION_STEP,
subscriber=my_observer,
callback=None, # 此处使用 update 方法回调
)
def my_callback(event, instance):
print("Go nuts here!")
optimizer.subscribe(
event=Events.OPTMIZATION_START,
subscriber="Any hashable object",
callback=my_callback,
)
optimizer.maximize(init_points=1, n_iter=2)
#可以通过DEFAULT_EVENTS列出所有默认事件的列表
DEFAULT_EVENTS
更多使用方法请参考https://github.com/fmfn/BayesianOptimization