35 BayesOptBase::BayesOptBase(
size_t dim,
Parameters parameters):
36 mParameters(parameters), mDims(dim)
47 Output2FILE::Stream() = log_fd;
53 case 0: FILELog::ReportingLevel() = logWARNING;
break;
54 case 1: FILELog::ReportingLevel() = logINFO;
break;
55 case 2: FILELog::ReportingLevel() = logDEBUG4;
break;
57 FILELog::ReportingLevel() = logERROR;
break;
68 assert(
mDims == bestPoint.size());
79 FILE_LOG(logINFO) <<
"State succesfully restored from file \"" 87 <<
"\" does not exist," 88 <<
" starting a new optimization";
119 FILE_LOG(logDEBUG) <<
"Stuck for "<< mCounterStuck <<
" steps";
129 FILE_LOG(logINFO) <<
"Forced random query!";
136 mModel->addSample(xNext,yNext);
144 mModel->updateHyperParameters();
145 mModel->fitSurrogateModel();
149 mModel->updateSurrogateModel();
153 mModel->updateCriteria(xNext);
181 matrixd xPoints(nSamples,
mDims);
182 vectord yPoints(nSamples,0);
187 mModel->setSamples(xPoints);
190 for(
size_t i=0; i<yPoints.size(); i++)
194 saveResponse(yPoints[i], i==0);
198 mModel->setSamples(yPoints);
202 mModel->plotDataset(logDEBUG);
205 mModel->updateHyperParameters();
206 mModel->fitSurrogateModel();
224 state.mCounterStuck = mCounterStuck;
225 state.mYPrev = mYPrev;
230 state.mX = mModel->getData()->mX;
231 state.mY = mModel->getData()->mY;
243 matrixd xPoints(state.mX.size(),state.mX[0].size());
244 vectord yPoints(state.mX.size(),0);
245 for(
size_t i=0; i<state.mX.size(); i++)
247 row(xPoints, i) = state.mX[i];
248 if(i < state.mY.size())
250 yPoints[i] = state.mY[i];
256 saveResponse(yPoints[i],
false);
261 mModel->setSamples(xPoints,yPoints);
265 mModel->plotDataset(logDEBUG);
269 mModel->updateHyperParameters();
270 mModel->fitSurrogateModel();
273 mCounterStuck = state.mCounterStuck;
274 mYPrev = state.mYPrev;
279 FILE_LOG(logINFO) <<
"Optimization has already finished, delete \"" 281 <<
"\" or give more n_iterations in parameters.";
290 {
return mModel->getPrediction(query); };
292 const Dataset* BayesOptBase::getData()
293 {
return mModel->getData(); };
298 double BayesOptBase::getValueAtMinimum()
299 {
return mModel->getValueAtMinimum(); };
301 double BayesOptBase::evaluateCriteria(
const vectord& query)
307 size_t BayesOptBase::getCurrentIter()
314 {
return mModel->getPointAtMinimum(); };
319 if (yNext == HUGE_VAL)
321 throw std::runtime_error(
"Function evaluation out of range");
335 FILE_LOG(logINFO) <<
"Iteration: " << iteration+1 <<
" of " 338 FILE_LOG(logINFO) <<
"Query: " <<
remapPoint(xNext); ;
339 FILE_LOG(logINFO) <<
"Query outcome: " << yNext ;
341 FILE_LOG(logINFO) <<
"Best outcome: " << getValueAtMinimum();
356 for(
size_t i=0; i<xPoints.size1(); i++)
358 state.mX.push_back(row(xPoints,i));
365 void BayesOptBase::saveResponse(
double yPoint,
bool clear)
376 utils::append(state.mY,yPoint);
392 randFloat drawSample(
mEngine,realUniformDist(0,1));
393 double result = drawSample();
394 FILE_LOG(logINFO) <<
"Trying random jump with prob:" << result;
397 FILE_LOG(logINFO) <<
"Epsilon-greedy random query!";
402 vectord Xnext(
mDims);
405 if (mModel->criteriaRequiresComparison())
409 mModel->setFirstCriterium();
413 changed = mModel->setNextCriterium(Xnext);
415 std::string name = mModel->getBestCriteria(Xnext);
416 FILE_LOG(logINFO) << name <<
" was selected.";
420 FILE_LOG(logDEBUG) <<
"------ Optimizing criteria ------";
virtual void generateInitialPoints(matrixd &xPoints)=0
Selects the initial set of points to build the surrogate model.
void plotStepData(size_t iteration, const vectord &xNext, double yNext)
Print data for every step according to the verbose level.
Boost vector and matrix types.
double epsilon
For epsilon-greedy exploration.
size_t load_save_flag
1-Load data,2-Save data, 3-Load and save data.
void saveToFile(std::string filename)
Creates or overwrites the provided file with the state.
virtual void findOptimal(vectord &xOpt)=0
Call the inner optimization method to find the optimal point acording to the criteria.
Class that represents the state of an optimization.
Namespace of the library interface.
virtual bool checkReachability(const vectord &query)
This function checks if the query is valid or not.
std::string load_filename
Init data file path (if applicable)
boost::mt19937 mEngine
Random number generator.
void saveOptimization(BOptState &state)
Saves the current state of the optimization process into a state class.
Parameters mParameters
Configuration parameters.
size_t n_init_samples
Number of samples before optimization.
void saveInitialSamples(matrixd xPoints)
Eases the process of saving a state during initial samples.
virtual vectord samplePoint()=0
Sample a single point in the input space.
virtual double evaluateSample(const vectord &query)=0
Function that defines the actual function to be optimized.
size_t mDims
Number of dimensions.
bool loadFromFile(std::string filename, Parameters &program_params)
Loads the state from the provided file and takes program_params values if needed. ...
size_t n_iterations
Maximum BayesOpt evaluations (budget)
void optimize(vectord &bestPoint)
Execute the optimization process of the function defined in evaluateSample.
size_t force_jump
If >0, and the difference between two consecutive observations is pure noise, for n consecutive steps...
vectord getFinalResult()
Once the optimization has been perfomed, return the optimal point.
Abstract interface for posterior model/criteria.
Representation of a optimization state.
Dataset model to deal with the vector (real) based datasets.
double evaluateSampleInternal(const vectord &query)
Wrapper for the target function adding any preprocessing or constraint.
void initializeOptimization()
Initialize the optimization process.
vectord nextPoint()
Selects the next point to evaluate according to a certain criteria or metacriteria.
std::string log_filename
Log file path (if applicable)
void stepOptimization()
Execute ONE step the optimization process of the function defined in evaluateSample.
int verbose_level
Neg-Error,0-Warning,1-Info,2-Debug -> stdout 3-Error,4-Warning,5-Info,>5-Debug -> logfile...
double noise
Variance of observation noise (and nugget)
Modules and helper macros for logging.
BayesOpt common module for interfaces.
size_t mCurrentIter
Current iteration number.
void restoreOptimization(BOptState state)
Restores the optimization process of a previous execution.
virtual ~BayesOptBase()
Default destructor.
virtual vectord remapPoint(const vectord &x)=0
Remap the point x to the original space (e.g.
std::string save_filename
Sava data file path (if applicable)
vectord getPointAtMinimum()
Get optimal point in the inner space (e.g.
int random_seed
>=0 -> Fixed seed, <0 -> Time based (variable).
size_t n_iter_relearn
Number of samples before relearn kernel.