BayesOpt
bo_compare.cpp
1 /*
2 -------------------------------------------------------------------------
3  This file is part of BayesOpt, an efficient C++ library for
4  Bayesian optimization.
5 
6  Copyright (C) 2011-2015 Ruben Martinez-Cantin <rmcantin@unizar.es>
7 
8  BayesOpt is free software: you can redistribute it and/or modify it
9  under the terms of the GNU Affero General Public License as published by
10  the Free Software Foundation, either version 3 of the License, or
11  (at your option) any later version.
12 
13  BayesOpt is distributed in the hope that it will be useful, but
14  WITHOUT ANY WARRANTY; without even the implied warranty of
15  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16  GNU Affero General Public License for more details.
17 
18  You should have received a copy of the GNU Affero General Public License
19  along with BayesOpt. If not, see <http://www.gnu.org/licenses/>.
20 ------------------------------------------------------------------------
21 */
22 
23 #include "testfunctions.hpp"
24 #include <ctime>
25 #include <fstream>
26 
27 int main(int nargs, char *args[])
28 {
30  par.verbose_level = 0;
31  par.noise = 1e-10;
32  par.force_jump = 30;
33 
34  std::ofstream log;
35  std::clock_t start_t;
36 
37 
38  /* Branin */
39  log.open("branin.log");
40  par.n_init_samples = 5;
41  par.n_iterations = 195;
42 
43  for (size_t ii = 0; ii < 10; ++ii)
44  {
45  par.random_seed = ii;
46  BraninNormalized branin(par);
47  vectord result(2);
48 
49  start_t = clock();
50  branin.initializeOptimization();
51 
52  for (size_t jj = 0; jj < par.n_iterations; ++jj)
53  {
54  branin.stepOptimization();
55  if (jj == 50)
56  {
57  result = branin.getFinalResult();
58  log << branin.evaluateSample(result) << ", ";
59  }
60  }
61  result = branin.getFinalResult();
62  log << branin.evaluateSample(result) << ", ";
63 
64  log << static_cast<double>(clock() - start_t) / static_cast<double>(CLOCKS_PER_SEC)
65  << std::endl;
66  }
67 
68  log.close();
69 
70 
71  /* Camel */
72  log.open("camel.log");
73  par.n_init_samples = 5;
74  par.n_iterations = 95;
75 
76  for (size_t ii = 0; ii < 10; ++ii)
77  {
78  par.random_seed = ii;
79  ExampleCamelback camel(par);
80  vectord result(2);
81 
82  vectord lb(2); lb(0) = -2; lb(1) = -1;
83  vectord ub(2); ub(0) = 2; ub(1) = 1;
84 
85  camel.setBoundingBox(lb,ub);
86 
87  start_t = clock();
88  camel.initializeOptimization();
89 
90  for (size_t jj = 0; jj < par.n_iterations; ++jj)
91  {
92  camel.stepOptimization();
93  if (jj == 50)
94  {
95  result = camel.getFinalResult();
96  log << camel.evaluateSample(result) << ", ";
97  }
98  }
99  result = camel.getFinalResult();
100  log << camel.evaluateSample(result) << ", ";
101 
102  log << static_cast<double>(clock() - start_t) / static_cast<double>(CLOCKS_PER_SEC)
103  << std::endl;
104  }
105 
106  log.close();
107 
108 
109  /* Hart */
110  log.open("hart.log");
111  par.n_init_samples = 10;
112  par.n_iterations = 190;
113 
114  for (size_t ii = 0; ii < 10; ++ii)
115  {
116  par.random_seed = ii;
117  ExampleHartmann6 hart(par);
118  vectord result(6);
119 
120  start_t = clock();
121  hart.initializeOptimization();
122 
123  for (size_t jj = 0; jj < par.n_iterations; ++jj)
124  {
125  hart.stepOptimization();
126  if (jj == 50)
127  {
128  result = hart.getFinalResult();
129  log << hart.evaluateSample(result) << ", ";
130  }
131  }
132  result = hart.getFinalResult();
133  log << hart.evaluateSample(result) << ", ";
134 
135  log << static_cast<double>(clock() - start_t) / static_cast<double>(CLOCKS_PER_SEC)
136  << std::endl;
137  }
138 
139  log.close();
140 
141 
142  /***********************************************************************/
143  par.n_init_samples = 2;
144  par.n_iter_relearn = 1;
145 
146  par.l_type = L_MCMC;
147  par.sc_type = SC_MAP;
148 
149 
150  /* Branin */
151  log.open("branin_mcmc.log");
152  par.n_iterations = 198;
153 
154  for (size_t ii = 0; ii < 10; ++ii)
155  {
156  par.random_seed = ii;
157  BraninNormalized branin(par);
158  vectord result(2);
159 
160  start_t = clock();
161  branin.initializeOptimization();
162 
163  for (size_t jj = 0; jj < par.n_iterations; ++jj)
164  {
165  branin.stepOptimization();
166  if (jj == 50)
167  {
168  result = branin.getFinalResult();
169  log << branin.evaluateSample(result) << ", ";
170  }
171  }
172  result = branin.getFinalResult();
173  log << branin.evaluateSample(result) << ", ";
174 
175  log << static_cast<double>(clock() - start_t) / static_cast<double>(CLOCKS_PER_SEC)
176  << std::endl;
177  }
178 
179  log.close();
180 
181 
182  /* Camel */
183  log.open("camel_mcmc.log");
184  par.n_iterations = 98;
185 
186  for (size_t ii = 0; ii < 10; ++ii)
187  {
188  par.random_seed = ii;
189  ExampleCamelback camel(par);
190  vectord result(2);
191 
192  vectord lb(2); lb(0) = -2; lb(1) = -1;
193  vectord ub(2); ub(0) = 2; ub(1) = 1;
194 
195  camel.setBoundingBox(lb,ub);
196 
197  start_t = clock();
198  camel.initializeOptimization();
199 
200  for (size_t jj = 0; jj < par.n_iterations; ++jj)
201  {
202  camel.stepOptimization();
203  if (jj == 50)
204  {
205  result = camel.getFinalResult();
206  log << camel.evaluateSample(result) << ", ";
207  }
208  }
209  result = camel.getFinalResult();
210  log << camel.evaluateSample(result) << ", ";
211 
212  log << static_cast<double>(clock() - start_t) / static_cast<double>(CLOCKS_PER_SEC)
213  << std::endl;
214  }
215 
216  log.close();
217 
218 
219  /* Hart */
220  log.open("hart_mcmc.log");
221  par.n_iterations = 198;
222 
223  for (size_t ii = 0; ii < 10; ++ii)
224  {
225  par.random_seed = ii;
226  ExampleHartmann6 hart(par);
227  vectord result(6);
228 
229  start_t = clock();
230  hart.initializeOptimization();
231 
232  for (size_t jj = 0; jj < par.n_iterations; ++jj)
233  {
234  hart.stepOptimization();
235  if (jj == 50)
236  {
237  result = hart.getFinalResult();
238  log << hart.evaluateSample(result) << ", ";
239  }
240  }
241  result = hart.getFinalResult();
242  log << hart.evaluateSample(result) << ", ";
243 
244  log << static_cast<double>(clock() - start_t) / static_cast<double>(CLOCKS_PER_SEC)
245  << std::endl;
246  }
247 
248  log.close();
249 
250 
251  return 0;
252 }
learning_type l_type
Type of learning for the kernel params.
Definition: parameters.hpp:96
size_t n_init_samples
Number of samples before optimization.
Definition: parameters.hpp:68
size_t n_iterations
Maximum BayesOpt evaluations (budget)
Definition: parameters.hpp:66
size_t force_jump
If >0, and the difference between two consecutive observations is pure noise, for n consecutive steps...
Definition: parameters.hpp:100
int verbose_level
Neg-Error,0-Warning,1-Info,2-Debug -> stdout 3-Error,4-Warning,5-Info,>5-Debug -> logfile...
Definition: parameters.hpp:76
double noise
Variance of observation noise (and nugget)
Definition: parameters.hpp:88
score_type sc_type
Score type for kernel hyperparameters (ML,MAP,etc)
Definition: parameters.hpp:95
int random_seed
>=0 -> Fixed seed, <0 -> Time based (variable).
Definition: parameters.hpp:74
size_t n_iter_relearn
Number of samples before relearn kernel.
Definition: parameters.hpp:69