BayesOpt
criteria_hedge.cpp
1 /*
2 -------------------------------------------------------------------------
3  This file is part of BayesOpt, an efficient C++ library for
4  Bayesian optimization.
5 
6  Copyright (C) 2011-2015 Ruben Martinez-Cantin <rmcantin@unizar.es>
7 
8  BayesOpt is free software: you can redistribute it and/or modify it
9  under the terms of the GNU Affero General Public License as published by
10  the Free Software Foundation, either version 3 of the License, or
11  (at your option) any later version.
12 
13  BayesOpt is distributed in the hope that it will be useful, but
14  WITHOUT ANY WARRANTY; without even the implied warranty of
15  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16  GNU Affero General Public License for more details.
17 
18  You should have received a copy of the GNU Affero General Public License
19  along with BayesOpt. If not, see <http://www.gnu.org/licenses/>.
20 ------------------------------------------------------------------------
21 */
22 #include <numeric>
23 #include <algorithm>
24 #include "boost/bind.hpp"
25 #include "log.hpp"
27 
28 namespace bayesopt
29 {
30 
38  inline double softmax(double g, double eta)
39  {
40  return exp(eta*g);
41  };
42 
44  GP_Hedge::GP_Hedge(){};
45 
46  void GP_Hedge::init(NonParametricProcess *proc)
47  {
48  mProc = proc;
49 
50  size_t n = mCriteriaList.size();
51  if (!n)
52  {
53  throw std::logic_error("Criteria list should be created (pushed)"
54  " before initializing combined criterion.");
55  }
56 
57  loss_ = zvectord(n);
58  gain_ = zvectord(n);
59  prob_ = zvectord(n);
60  cumprob_ = zvectord(n);
61  };
62 
63  void GP_Hedge::initialCriteria()
64  {
65  mIndex = 0;
66  mCurrentCriterium = &mCriteriaList[mIndex];
67  mBestLists.clear();
68  };
69 
70  bool GP_Hedge::rotateCriteria()
71  {
72  ++mIndex;
73  if (mIndex >= mCriteriaList.size())
74  {
75  return false;
76  }
77  else
78  {
79  mCurrentCriterium = &mCriteriaList[mIndex];
80  return true;
81  }
82  };
83 
84  void GP_Hedge::pushResult(const vectord& prevResult)
85  {
86  loss_(mIndex) = computeLoss(prevResult);
87  mBestLists.push_back(prevResult);
88  };
89 
90  std::string GP_Hedge::getBestCriteria(vectord& best)
91  {
92  int optIndex = update_hedge();
93  best = mBestLists[optIndex];
94  return mCriteriaList[optIndex].name();
95  };
96 
97 
98  int GP_Hedge::update_hedge()
99  {
100  // We just care about the differences
101  double max_l = *std::max_element(loss_.begin(),loss_.end());
102  loss_ += svectord(loss_.size(),max_l);
103 
104  // To avoid overflow
105  double mean_g = std::accumulate(gain_.begin(),gain_.end(),0.0)
106  / static_cast<double>(gain_.size());
107  gain_ -= svectord(gain_.size(),mean_g);
108 
109  // Optimal eta according to Shapire
110  double max_g = *std::max_element(gain_.begin(),gain_.end());
111  double eta = (std::min)(10.0,sqrt(2.0*log(3.0)/max_g));
112 
113  // Compute probabilities
114  std::transform(gain_.begin(), gain_.end(), prob_.begin(),
115  boost::bind(softmax,_1,eta));
116 
117  //Normalize
118  double sum_p =std::accumulate(prob_.begin(),prob_.end(),0.0);
119  prob_ /= sum_p;
120 
121  //Update bandits gain
122  gain_ -= loss_;
123 
124  std::partial_sum(prob_.begin(), prob_.end(), cumprob_.begin(),
125  std::plus<double>());
126 
127  randFloat sampleUniform( *mtRandom, realUniformDist(0,1));
128  double u = sampleUniform();
129 
130  for (size_t i=0; i < cumprob_.size(); ++i)
131  {
132  if (u < cumprob_(i))
133  return i;
134  }
135  FILE_LOG(logERROR) << "Error updating Hedge algorithm. "
136  << "Selecting first criteria by default.";
137  return 0;
138  };
139 
140 
141 } //namespace bayesopt
Portfolio selection of criteria based on Hedge algorithm.
Namespace of the library interface.
Definition: using.dox:1
Abstract class to implement Bayesian regressors.
double softmax(double g, double eta)
Softmax function.
Modules and helper macros for logging.