BayesOpt
demo_dimscaling.py
1 #!/usr/bin/env python
2 # -------------------------------------------------------------------------
3 # This file is part of BayesOpt, an efficient C++ library for
4 # Bayesian optimization.
5 #
6 # Copyright (C) 2011-2015 Ruben Martinez-Cantin <rmcantin@unizar.es>
7 #
8 # BayesOpt is free software: you can redistribute it and/or modify it
9 # under the terms of the GNU Affero General Public License as published by
10 # the Free Software Foundation, either version 3 of the License, or
11 # (at your option) any later version.
12 #
13 # BayesOpt is distributed in the hope that it will be useful, but
14 # WITHOUT ANY WARRANTY; without even the implied warranty of
15 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 # GNU Affero General Public License for more details.
17 #
18 # You should have received a copy of the GNU Affero General Public License
19 # along with BayesOpt. If not, see <http://www.gnu.org/licenses/>.
20 # ------------------------------------------------------------------------
21 
22 # This example was provided by Janto Dreijer <jantod@gmail.com>
23 
24 import math
25 import numpy as np
26 import bayesopt
27 
28 def quad(x,mu):
29  return ((np.asarray(x) - mu)**2).mean()
30 
31 def func(x):
32  #print "x", x
33  #~ target = np.ones(len(x))*0.3
34  target = np.arange(1,1+len(x))
35  target2 = np.ones(len(x))*10
36  #print "target", target
37  e = quad(x,target)
38  return e
39 
40 # Initialize the parameters by default
41 params = {} #bayesopt.initialize_params()
42 
43 # We decided to change some of them
44 params['n_init_samples'] = 30
45 params['n_iter_relearn'] = 1
46 params['l_type'] = "mcmc"
47 params['noise'] = 1e-10
48 params['kernel_name'] = "kMaternARD5"
49 params['kernel_hp_mean'] = [1]
50 params['kernel_hp_std'] = [5]
51 params['surr_name'] = "sStudentTProcessNIG"
52 #params['crit_name'] = "cMI"
53 
54 dim = 20
55 lb = np.ones((dim,))*0
56 ub = np.ones((dim,))*20
57 
58 mvalue, x_out, error = bayesopt.optimize(func, dim, lb, ub, params)
59 
60 print("Result", mvalue, x_out)
61 
62 print("Global optimal", 0, np.arange(1,1+dim))
63 
64 print("Y Gap", mvalue)
65 print("X Gap", math.sqrt(mvalue*dim))