BayesOpt
gpmlnlopt.m
1 % Bayesian optimization using GPML and Finker's Direct
2 %
3 % We assume that both files are loaded in the path
4 %
5 % Author: Ruben Martinez-Cantin <rmcantin@unizar.es>
6 %
7 
8 
9 clear all, close all
10 addpath('../testfunctions')
11 seed = 197; randn('seed',seed), rand('seed',seed)
12 plotting = 0;
13 
14 f = @ackley;
15 bounds = ones(2,1)*[-32.768, 32.768];
16 
17 % Set initial set of points using latin hypercube sampling
18 nInit = 30;
19 nIter = 270;
20 xtr = lhsu(bounds(:,1),bounds(:,2),nInit);
21 ytr = zeros(size(xtr,1),1);
22 for i=1:size(xtr,1)
23  ytr(i) = f(xtr(i,:));
24 end;
25 
26 start_t = cputime;
27 
28 % setup the GP
29 cov = {@covMaterniso,3};
30 sf = 1; ell = 0.4; hyp0.cov = log([ell;sf]);
31 mean = 'meanZero';
32 sn = 0.005; hyp0.lik = log(sn);
33 lik = 'likGauss'; inf = 'infExact';
34 
35 % Only learn the parameters once before doing optimization
36 Ncg = 50;
37 hyp = minimize(hyp0,'gp', -Ncg, inf, mean, cov, lik, xtr, ytr); % opt hypers
38 
39 
40 % set up DIRECT
41 opt.algorithm = NLOPT_GN_DIRECT_L;
42 opt.lower_bounds = bounds(:,1);
43 opt.upper_bounds = bounds(:,2);
44 
45 opt.maxeval = 1000;
46 % opts.maxevals = 1000;
47 % opts.maxits = 1000;
48 % opts.showits = 0;
49 % Problem.f = criteria;
50 
51 times = zeros(nIter,1);
52 
53 for i = 1:nIter
54 
55  fprintf(1,'- Iter %i of %i \n', i,nIter);
56  %[minval, newX, hist] = Direct(Problem, bounds, opts, hyp, inf, mean,...
57  % cov, lik, xtr, ytr);
58  criteria = (@(x) nei(x,hyp,inf,mean,cov,lik,xtr,ytr));
59  opt.min_objective = criteria;
60 
61  [newX, minval, retcode] = nlopt_optimize(opt, [0 0]);
62  newY = f(newX);
63  xtr = [xtr; newX];
64  ytr = [ytr; newY];
65 
66  times(i) = cputime - start_t;
67 
68  fprintf(1,'New evaluation point. y = %2.4f \n', newY);
69  disp(newX')
70 
71  if plotting
72  % =====================================================================
73  % PLOT RESULTS =====================================================
74  % =====================================================================
75  x = 0:0.001:1;
76  for j = 1:length(x)
77  [crit(j), ymu(j), ys(j)] = feval(criteria, x(j) );
78  yh(j) = f(x(j));
79  end
80 
81  lw = 2;
82  h = figure(1);
83  clf;hold on;
84 
85  subplot(3,1,[1 2]); hold on;
86  title('Bayesian Optimization');
87  plot(xtr,ytr,'ko','LineWidth',lw);
88  plot(newX,newY,'ro','LineWidth',lw);
89 
90  plot(x,yh, 'b','LineWidth',lw);
91  plot(x, ymu, 'k','LineWidth',lw);
92  plot(x, ymu+2*ys, 'k:','LineWidth',lw);
93  plot(x, ymu-2*ys, 'k:','LineWidth',lw);
94  avalue = axis; axis([0 1 avalue(3) avalue(4)]);
95 
96  subplot(3,1,3); hold on;
97  title('Criteria');
98  bar(x,-crit,'k');
99  avalue = axis; axis([0 1 avalue(3) avalue(4)]);
100 
101  pause(0.2);
102  end
103 end
104 
105 save('log_aiken.mat',times)
106 
107 minY = min(ytr);
108 
109 
110 
111 
112