00001
00015 #ifndef _DLR_OPTIMIZERLINESEARCH_H_
00016 #define _DLR_OPTIMIZERLINESEARCH_H_
00017
00018 #include <dlrOptimization/optimizer.h>
00019 #include <dlrOptimization/optimizerCommon.h>
00020
00021 namespace dlr {
00022
00023 namespace optimization {
00024
00042 template <class Functor>
00043 class OptimizerLineSearch
00044 : public Optimizer<Functor>
00045 {
00046 public:
00047
00048 typedef typename Functor::argument_type argument_type;
00049 typedef typename Functor::result_type result_type;
00050
00056 OptimizerLineSearch();
00057
00067 explicit OptimizerLineSearch(const Functor& functor);
00068
00081 OptimizerLineSearch(const Functor& functor,
00082 const argument_type& startPoint,
00083 const argument_type& startGradient,
00084 const result_type& startValue);
00085
00091 OptimizerLineSearch(const OptimizerLineSearch& source);
00092
00096 virtual
00097 ~OptimizerLineSearch();
00098
00107 size_t
00108 getNumberOfFunctionCalls() {return this->m_functionCallCount;}
00109
00119 virtual void
00120 setStartPoint(const argument_type& startPoint);
00121
00136 virtual void
00137 setStartPoint(const argument_type& startPoint,
00138 const result_type& startValue,
00139 const argument_type& startGradient);
00140
00150 virtual void
00151 setInitialStep(const argument_type& initialStep);
00152
00167 void
00168 setParameters(double argumentTolerance=1.0e-7,
00169 double alpha=1.0e-4,
00170 double maximumStepMagnitude=100.0);
00171
00178 OptimizerLineSearch&
00179 operator=(const OptimizerLineSearch& source);
00180
00181 protected:
00182
00188 void
00189 checkState() {
00190
00191 }
00192
00200 std::pair<typename Functor::argument_type, typename Functor::result_type>
00201 run();
00202
00203
00204 double m_alpha;
00205 double m_argumentTolerance;
00206 size_t m_functionCallCount;
00207 argument_type m_initialStep;
00208 double m_initialStepMagnitude;
00209 double m_maximumStepMagnitude;
00210 argument_type m_startGradient;
00211 argument_type m_startPoint;
00212 result_type m_startValue;
00213
00214 };
00215
00216 }
00217
00218 }
00219
00220
00221
00222
00223 namespace dlr {
00224
00225 using optimization::OptimizerLineSearch;
00226
00227 }
00228
00229
00230
00231
00232
00233
00234
00235 #include <cmath>
00236 #include <sstream>
00237 #include <dlrCommon/exception.h>
00238 #include <dlrOptimization/optimizerCommon.h>
00239
00240 namespace dlr {
00241
00242 namespace optimization {
00243
00244 template <class Functor>
00245 OptimizerLineSearch<Functor>::
00246 OptimizerLineSearch()
00247 : Optimizer<Functor>(),
00248 m_alpha(0.0),
00249 m_argumentTolerance(0.0),
00250 m_functionCallCount(0),
00251 m_initialStep(),
00252 m_initialStepMagnitude(0.0),
00253 m_maximumStepMagnitude(0.0),
00254 m_startGradient(),
00255 m_startPoint(),
00256 m_startValue(0.0)
00257 {
00258 this->setParameters();
00259 }
00260
00261 template <class Functor>
00262 OptimizerLineSearch<Functor>::
00263 OptimizerLineSearch(const Functor& functor)
00264 : Optimizer<Functor>(functor),
00265 m_alpha(0.0),
00266 m_argumentTolerance(0.0),
00267 m_functionCallCount(0),
00268 m_initialStep(),
00269 m_initialStepMagnitude(0.0),
00270 m_maximumStepMagnitude(0.0),
00271 m_startGradient(),
00272 m_startPoint(),
00273 m_startValue(0.0)
00274 {
00275 this->setParameters();
00276 }
00277
00278 template<class Functor>
00279 OptimizerLineSearch<Functor>::
00280 OptimizerLineSearch(const OptimizerLineSearch& source)
00281 : Optimizer<Functor>(source),
00282 m_alpha(source.m_alpha),
00283 m_argumentTolerance(source.argumentTolerance),
00284 m_functionCallCount(source.m_functionCallCount),
00285 m_initialStep(source.m_initialStep.size()),
00286 m_initialStepMagnitude(source.initialStepMagnitude),
00287 m_maximumStepMagnitude(source.m_maximumStepMagnitude),
00288 m_startGradient(source.m_startGradient.size()),
00289 m_startPoint(source.m_startPoint.size()),
00290 m_startValue(source.m_startValue)
00291 {
00292 copyArgumentType(source.m_initialStep, this->m_initialStep);
00293 copyArgumentType(source.m_startGradient, this->m_startGradient);
00294 copyArgumentType(source.m_startPoint, this->m_startPoint);
00295 }
00296
00297 template <class Functor>
00298 OptimizerLineSearch<Functor>::
00299 ~OptimizerLineSearch()
00300 {
00301
00302 }
00303
00304 template <class Functor>
00305 void OptimizerLineSearch<Functor>::
00306 setStartPoint(const argument_type& startPoint)
00307 {
00308
00309 copyArgumentType(startPoint, this->m_startPoint);
00310
00311
00312
00313
00314 this->m_startValue = this->m_functor(startPoint);
00315 this->m_startGradient = this->m_functor.gradient(startPoint);
00316
00317
00318
00319 this->m_functionCallCount = 0;
00320
00321
00322
00323
00324 this->m_needsOptimization = true;
00325 }
00326
00327 template<class Functor>
00328 void
00329 OptimizerLineSearch<Functor>::
00330 setStartPoint(const argument_type& startPoint,
00331 const result_type& startValue,
00332 const argument_type& startGradient)
00333 {
00334
00335 copyArgumentType(startPoint, this->m_startPoint);
00336 this->m_startValue = startValue;
00337 copyArgumentType(startGradient, this->m_startGradient);
00338
00339
00340 this->m_functionCallCount = 0;
00341
00342
00343
00344
00345 this->m_needsOptimization = true;
00346 }
00347
00348 template<class Functor>
00349 void OptimizerLineSearch<Functor>::
00350 setInitialStep(const argument_type& initialStep)
00351 {
00352
00353 double initialStepMagnitude =
00354 std::sqrt(dotArgumentType(initialStep, initialStep));
00355 if(initialStepMagnitude == 0.0) {
00356 DLR_THROW3(ValueException, "OptimizerLineSearch::setInitialStep()",
00357 "initialStep magnitude is equal to 0.0.");
00358 }
00359
00360
00361
00362 this->m_initialStepMagnitude = initialStepMagnitude;
00363 copyArgumentType(initialStep, this->m_initialStep);
00364
00365
00366 this->m_functionCallCount = 0;
00367
00368
00369
00370
00371 this->m_needsOptimization = true;
00372 }
00373
00374 template<class Functor>
00375 void OptimizerLineSearch<Functor>::
00376 setParameters(double argumentTolerance,
00377 double alpha,
00378 double maximumStepMagnitude)
00379 {
00380 this->m_argumentTolerance = argumentTolerance;
00381 this->m_alpha = alpha;
00382 this->m_maximumStepMagnitude = maximumStepMagnitude;
00383
00384
00385 this->m_functionCallCount = 0;
00386
00387
00388
00389
00390 this->m_needsOptimization = true;
00391 }
00392
00393 template <class Functor>
00394 OptimizerLineSearch<Functor>& OptimizerLineSearch<Functor>::
00395 operator=(const OptimizerLineSearch& source)
00396 {
00397
00398 Optimizer<Functor>::operator=(source);
00399
00400
00401
00402 this->m_alpha = source.m_alpha;
00403 this->m_argumentTolerance = source.m_argumentTolerance;
00404 this->m_functionCallCount = source.m_functionCallCount;
00405 this->m_initialStepMagnitude = source.m_initialStepMagnitude;
00406 this->m_maximumStepMagnitude = source.m_maximumStepMagnitude;
00407 this->m_startValue = source.m_startValue;
00408 copyArgumentType(source.m_initialStep, this->m_initialStep);
00409 copyArgumentType(source.m_startGradient, this->m_startGradient);
00410 copyArgumentType(source.m_startPoint, this->m_startPoint);
00411 return *this;
00412 }
00413
00414 template <class Functor>
00415 std::pair<typename Functor::argument_type, typename Functor::result_type>
00416 OptimizerLineSearch<Functor>::
00417 run()
00418 {
00419
00420 this->checkState();
00421
00422
00423 this->m_functionCallCount = 0;
00424
00425
00426 argument_type initialStep;
00427 copyArgumentType(this->m_initialStep, initialStep);
00428 double initialStepMagnitude = this->m_initialStepMagnitude;
00429
00430
00431 if(this->m_initialStepMagnitude > this->m_maximumStepMagnitude) {
00432 initialStepMagnitude = this->m_maximumStepMagnitude;
00433 for(size_t index = 0; index < initialStep.size(); ++index) {
00434 initialStep[index] *=
00435 this->m_maximumStepMagnitude / this->m_initialStepMagnitude;
00436 }
00437 }
00438
00439
00440 double slope = dotArgumentType(this->m_startGradient, initialStep);
00441 if(slope >= 0.0) {
00442 std::ostringstream message;
00443 message << "Initial search direction: " << initialStep << " "
00444 << "is not downhill with respect to initial gradient: "
00445 << this->m_startGradient << ".";
00446 DLR_THROW3(StateException, "OptimizerLineSearch::run()",
00447 message.str().c_str());
00448
00449 }
00450
00451
00452 double scale = contextSensitiveScale(initialStep, this->m_startPoint);
00453 if(scale == 0.0) {
00454 DLR_THROW3(RunTimeException, "OptimizerLineSearch::run()",
00455 "Invalid initial scale. "
00456 "Perhaps initialStep is the zero vector.");
00457 }
00458 double minimumLambda = this->m_argumentTolerance / scale;
00459
00460
00461 double lambdaValue = 1.0;
00462 argument_type nextSamplePoint(initialStep.size());
00463 double previousLambdaValue = 1.0;
00464 double previousNextFunctionValue = this->m_startValue;
00465 while(1) {
00466
00467
00468
00469
00470
00471 if(lambdaValue < minimumLambda) {
00472
00473
00474 copyArgumentType(this->m_startPoint, nextSamplePoint);
00475
00476
00477
00478 return std::make_pair(nextSamplePoint, this->m_startValue);
00479 }
00480
00481
00482 for(size_t index = 0; index < initialStep.size(); ++index) {
00483 nextSamplePoint[index] =
00484 this->m_startPoint[index] + (lambdaValue * initialStep[index]);
00485 }
00486
00487
00488 result_type nextFunctionValue = this->m_functor(nextSamplePoint);
00489 ++(this->m_functionCallCount);
00490
00491
00492 if(nextFunctionValue
00493 < this->m_startValue + (this->m_alpha * lambdaValue * slope)) {
00494 return std::make_pair(nextSamplePoint, nextFunctionValue);
00495 }
00496
00497
00498 double lambdaHat;
00499 if(lambdaValue == 1.0) {
00500 lambdaHat =
00501 (-slope / (2.0 * (nextFunctionValue - this->m_startValue
00502 - slope)));
00503 } else {
00504
00505 double b0 = (nextFunctionValue - this->m_startValue
00506 - (lambdaValue * slope));
00507 double b1 = (previousNextFunctionValue - this->m_startValue
00508 - (previousLambdaValue * slope));
00509
00510 double deltaLambda = lambdaValue - previousLambdaValue;
00511 double a00 = 1.0 / (lambdaValue * lambdaValue * deltaLambda);
00512 double a01 = -1.0 / (previousLambdaValue * previousLambdaValue
00513 * deltaLambda);
00514 double a10 = -previousLambdaValue / (lambdaValue * lambdaValue
00515 * deltaLambda);
00516 double a11 = lambdaValue / (previousLambdaValue * previousLambdaValue
00517 * deltaLambda);
00518
00519 double ab0 = a00 * b0 + a01 * b1;
00520 double ab1 = a10 * b0 + a11 * b1;
00521
00522
00523 if(ab0 == 0.0) {
00524 if(ab1 == 0.0) {
00525 DLR_THROW3(RunTimeException, "OptimizerLineSearch::run()",
00526 "Invalid value for internal variable ab1.");
00527 }
00528 lambdaHat = -slope / (2.0 * ab1);
00529 } else {
00530 double discriminant = ab1 * ab1 - (3.0 * ab0 * slope);
00531 if(discriminant < 0.0) {
00532 DLR_THROW3(RunTimeException, "OptimizerLineSearch::run()",
00533 "Roundoff error, discriminant < 0.0");
00534 }
00535 lambdaHat = (-ab1 + std::sqrt(discriminant)) / (3.0 * ab0);
00536 }
00537 if(lambdaHat > (0.5 * lambdaValue)) {
00538 lambdaHat = 0.5 * lambdaValue;
00539 }
00540 }
00541 previousLambdaValue = lambdaValue;
00542 previousNextFunctionValue = nextFunctionValue;
00543 lambdaValue = std::max(lambdaHat, 0.1 * lambdaValue);
00544 }
00545 }
00546
00547 }
00548
00549 }
00550
00551 #endif // #ifndef _DLR_OPTIMIZERLINESEARCH_H_