24 #include "boost/bind.hpp" 38 inline double softmax(
double g,
double eta)
44 GP_Hedge::GP_Hedge(){};
50 size_t n = mCriteriaList.size();
53 throw std::logic_error(
"Criteria list should be created (pushed)" 54 " before initializing combined criterion.");
60 cumprob_ = zvectord(n);
63 void GP_Hedge::initialCriteria()
66 mCurrentCriterium = &mCriteriaList[mIndex];
70 bool GP_Hedge::rotateCriteria()
73 if (mIndex >= mCriteriaList.size())
79 mCurrentCriterium = &mCriteriaList[mIndex];
84 void GP_Hedge::pushResult(
const vectord& prevResult)
86 loss_(mIndex) = computeLoss(prevResult);
87 mBestLists.push_back(prevResult);
90 std::string GP_Hedge::getBestCriteria(vectord& best)
92 int optIndex = update_hedge();
93 best = mBestLists[optIndex];
94 return mCriteriaList[optIndex].name();
98 int GP_Hedge::update_hedge()
101 double max_l = *std::max_element(loss_.begin(),loss_.end());
102 loss_ += svectord(loss_.size(),max_l);
105 double mean_g = std::accumulate(gain_.begin(),gain_.end(),0.0)
106 /
static_cast<double>(gain_.size());
107 gain_ -= svectord(gain_.size(),mean_g);
110 double max_g = *std::max_element(gain_.begin(),gain_.end());
111 double eta = (std::min)(10.0,sqrt(2.0*log(3.0)/max_g));
114 std::transform(gain_.begin(), gain_.end(), prob_.begin(),
118 double sum_p =std::accumulate(prob_.begin(),prob_.end(),0.0);
124 std::partial_sum(prob_.begin(), prob_.end(), cumprob_.begin(),
125 std::plus<double>());
127 randFloat sampleUniform( *mtRandom, realUniformDist(0,1));
128 double u = sampleUniform();
130 for (
size_t i=0; i < cumprob_.size(); ++i)
135 FILE_LOG(logERROR) <<
"Error updating Hedge algorithm. " 136 <<
"Selecting first criteria by default.";
Portfolio selection of criteria based on Hedge algorithm.
Namespace of the library interface.
Abstract class to implement Bayesian regressors.
double softmax(double g, double eta)
Softmax function.
Modules and helper macros for logging.