optimizerLineSearch.h

Go to the documentation of this file.
00001 
00015 #ifndef _DLR_OPTIMIZERLINESEARCH_H_
00016 #define _DLR_OPTIMIZERLINESEARCH_H_
00017 
00018 #include <dlrOptimization/optimizer.h>
00019 #include <dlrOptimization/optimizerCommon.h>
00020 
00021 namespace dlr {
00022 
00023   namespace optimization {
00024 
00042     template <class Functor>
00043     class OptimizerLineSearch
00044       : public Optimizer<Functor>
00045     {
00046     public:
00047       // Typedefs for convenience
00048       typedef typename Functor::argument_type argument_type;
00049       typedef typename Functor::result_type result_type;
00050 
00056       OptimizerLineSearch();
00057 
00067       explicit OptimizerLineSearch(const Functor& functor);
00068 
00081       OptimizerLineSearch(const Functor& functor, 
00082                           const argument_type& startPoint,
00083                           const argument_type& startGradient,
00084                           const result_type& startValue);
00085 
00091       OptimizerLineSearch(const OptimizerLineSearch& source);
00092 
00096       virtual
00097       ~OptimizerLineSearch();
00098 
00107       size_t
00108       getNumberOfFunctionCalls() {return this->m_functionCallCount;}
00109     
00119       virtual void
00120       setStartPoint(const argument_type& startPoint);
00121 
00136       virtual void
00137       setStartPoint(const argument_type& startPoint,
00138                     const result_type& startValue,
00139                     const argument_type& startGradient);
00140 
00150       virtual void
00151       setInitialStep(const argument_type& initialStep);
00152 
00167       void
00168       setParameters(double argumentTolerance=1.0e-7,
00169                     double alpha=1.0e-4,
00170                     double maximumStepMagnitude=100.0);
00171     
00178       OptimizerLineSearch&
00179       operator=(const OptimizerLineSearch& source);
00180     
00181     protected:
00182 
00188       void
00189       checkState() {
00190         // Warning(xxx): currently no state checking is implemented.
00191       }
00192     
00200       std::pair<typename Functor::argument_type, typename Functor::result_type>
00201       run();
00202     
00203       // Data members
00204       double m_alpha;
00205       double m_argumentTolerance;
00206       size_t m_functionCallCount;
00207       argument_type m_initialStep;
00208       double m_initialStepMagnitude;
00209       double m_maximumStepMagnitude;
00210       argument_type m_startGradient;
00211       argument_type m_startPoint;
00212       result_type m_startValue;
00213 
00214     }; // class OptimizerLineSearch
00215 
00216   } // namespace optimization
00217 
00218 } // namespace dlr
00219 
00220 
00221 /* ======= Declarations to maintain compatibility with legacy code. ======= */
00222 
00223 namespace dlr {
00224 
00225   using optimization::OptimizerLineSearch;
00226 
00227 } // namespace dlr
00228 
00229 
00230 /*******************************************************************
00231  * Member function definitions follow.  This would be a .cpp file
00232  * if it weren't templated.
00233  *******************************************************************/
00234 
00235 #include <cmath>
00236 #include <sstream>
00237 #include <dlrCommon/exception.h>
00238 #include <dlrOptimization/optimizerCommon.h>
00239 
00240 namespace dlr {
00241 
00242   namespace optimization {
00243 
00244     template <class Functor>
00245     OptimizerLineSearch<Functor>::
00246     OptimizerLineSearch()
00247       : Optimizer<Functor>(),
00248         m_alpha(0.0),
00249         m_argumentTolerance(0.0),
00250         m_functionCallCount(0),
00251         m_initialStep(),
00252         m_initialStepMagnitude(0.0),
00253         m_maximumStepMagnitude(0.0),
00254         m_startGradient(),
00255         m_startPoint(),
00256         m_startValue(0.0)
00257     {
00258       this->setParameters();
00259     }
00260 
00261     template <class Functor>
00262     OptimizerLineSearch<Functor>::
00263     OptimizerLineSearch(const Functor& functor)
00264       : Optimizer<Functor>(functor),
00265         m_alpha(0.0),
00266         m_argumentTolerance(0.0),
00267         m_functionCallCount(0),
00268         m_initialStep(),
00269         m_initialStepMagnitude(0.0),
00270         m_maximumStepMagnitude(0.0),
00271         m_startGradient(),
00272         m_startPoint(),
00273         m_startValue(0.0)
00274     {
00275       this->setParameters();
00276     }
00277   
00278     template<class Functor>
00279     OptimizerLineSearch<Functor>::
00280     OptimizerLineSearch(const OptimizerLineSearch& source)
00281       : Optimizer<Functor>(source),
00282         m_alpha(source.m_alpha),
00283         m_argumentTolerance(source.argumentTolerance),
00284         m_functionCallCount(source.m_functionCallCount),
00285         m_initialStep(source.m_initialStep.size()),
00286         m_initialStepMagnitude(source.initialStepMagnitude),
00287         m_maximumStepMagnitude(source.m_maximumStepMagnitude),
00288         m_startGradient(source.m_startGradient.size()),
00289         m_startPoint(source.m_startPoint.size()),
00290         m_startValue(source.m_startValue)
00291     {
00292       copyArgumentType(source.m_initialStep, this->m_initialStep);
00293       copyArgumentType(source.m_startGradient, this->m_startGradient);
00294       copyArgumentType(source.m_startPoint, this->m_startPoint);
00295     }
00296 
00297     template <class Functor>
00298     OptimizerLineSearch<Functor>::
00299     ~OptimizerLineSearch()
00300     {
00301       // Empty
00302     }
00303 
00304     template <class Functor>
00305     void OptimizerLineSearch<Functor>::
00306     setStartPoint(const argument_type& startPoint)
00307     {
00308       // Copy input argument.
00309       copyArgumentType(startPoint, this->m_startPoint);
00310 
00311       // Compute other starting info.  Note that we don't need to use
00312       // copyArgumentType(), because we have sole ownership of the
00313       // return value from m_functor.gradient().
00314       this->m_startValue = this->m_functor(startPoint);
00315       this->m_startGradient = this->m_functor.gradient(startPoint);
00316     
00317       // Reset the count of function calls.  We ignore the calls used 
00318       // to initialize m_startValue, etc. above.
00319       this->m_functionCallCount = 0;
00320 
00321       // We've changed the starting point, so we'll have to rerun the 
00322       // optimization.  Indicate this by setting the inherited member
00323       // m_needsOptimization.
00324       this->m_needsOptimization = true;
00325     }
00326 
00327     template<class Functor>
00328     void
00329     OptimizerLineSearch<Functor>::
00330     setStartPoint(const argument_type& startPoint,
00331                   const result_type& startValue,
00332                   const argument_type& startGradient)
00333     {
00334       // Copy input arguments.
00335       copyArgumentType(startPoint, this->m_startPoint);
00336       this->m_startValue = startValue;
00337       copyArgumentType(startGradient, this->m_startGradient);
00338     
00339       // Reset the count of function calls.
00340       this->m_functionCallCount = 0;
00341 
00342       // We've changed the starting point, so we'll have to rerun the 
00343       // optimization.  Indicate this by setting the inherited member
00344       // m_needsOptimization.
00345       this->m_needsOptimization = true;
00346     }
00347 
00348     template<class Functor>
00349     void OptimizerLineSearch<Functor>::
00350     setInitialStep(const argument_type& initialStep)
00351     {
00352       // First, make sure this is a valid initialStep.  
00353       double initialStepMagnitude =
00354         std::sqrt(dotArgumentType(initialStep, initialStep));
00355       if(initialStepMagnitude == 0.0) {
00356         DLR_THROW3(ValueException, "OptimizerLineSearch::setInitialStep()",
00357                    "initialStep magnitude is equal to 0.0.");
00358       }
00359 
00360       // OK, valid initialStep, save it for later.  Since we'll also
00361       // be needing the magnitude of initialStep, save it too.
00362       this->m_initialStepMagnitude = initialStepMagnitude;
00363       copyArgumentType(initialStep, this->m_initialStep);
00364     
00365       // Reset the count of function calls.
00366       this->m_functionCallCount = 0;
00367 
00368       // We've changed the line direction, so we'll have to rerun the 
00369       // optimization.  Indicate this by setting the inherited member
00370       // m_needsOptimization.
00371       this->m_needsOptimization = true;
00372     }
00373   
00374     template<class Functor>
00375     void OptimizerLineSearch<Functor>::
00376     setParameters(double argumentTolerance,
00377                   double alpha,
00378                   double maximumStepMagnitude)
00379     {
00380       this->m_argumentTolerance = argumentTolerance;
00381       this->m_alpha = alpha;
00382       this->m_maximumStepMagnitude = maximumStepMagnitude;
00383 
00384       // Reset the count of function calls.
00385       this->m_functionCallCount = 0;
00386 
00387       // We've changed the parameters, so we'll have to rerun the 
00388       // optimization.  Indicate this by setting the inherited member
00389       // m_needsOptimization.    
00390       this->m_needsOptimization = true;
00391     }    
00392   
00393     template <class Functor>
00394     OptimizerLineSearch<Functor>& OptimizerLineSearch<Functor>::
00395     operator=(const OptimizerLineSearch& source)
00396     {
00397       // First, call the assignment operator of the parent class.
00398       Optimizer<Functor>::operator=(source);
00399 
00400       // Then copy all of the variables which are specific to
00401       // OptimizerLineSearch.
00402       this->m_alpha = source.m_alpha;
00403       this->m_argumentTolerance = source.m_argumentTolerance;
00404       this->m_functionCallCount = source.m_functionCallCount;
00405       this->m_initialStepMagnitude = source.m_initialStepMagnitude;
00406       this->m_maximumStepMagnitude = source.m_maximumStepMagnitude;
00407       this->m_startValue = source.m_startValue;
00408       copyArgumentType(source.m_initialStep, this->m_initialStep);
00409       copyArgumentType(source.m_startGradient, this->m_startGradient);
00410       copyArgumentType(source.m_startPoint, this->m_startPoint);
00411       return *this;
00412     }
00413 
00414     template <class Functor>
00415     std::pair<typename Functor::argument_type, typename Functor::result_type>
00416     OptimizerLineSearch<Functor>::
00417     run()
00418     {
00419       // Make sure all required starting values have been specified.
00420       this->checkState();
00421 
00422       // Reset the count of function calls.
00423       this->m_functionCallCount = 0;
00424     
00425       // Initialize a few variables.
00426       argument_type initialStep;
00427       copyArgumentType(this->m_initialStep, initialStep);
00428       double initialStepMagnitude = this->m_initialStepMagnitude;
00429 
00430       // Restrict length of initial step.
00431       if(this->m_initialStepMagnitude > this->m_maximumStepMagnitude) {
00432         initialStepMagnitude = this->m_maximumStepMagnitude;
00433         for(size_t index = 0; index < initialStep.size(); ++index) {
00434           initialStep[index] *=
00435             this->m_maximumStepMagnitude / this->m_initialStepMagnitude;
00436         }
00437       }
00438 
00439       // Compute degree of similarity between startGradient and initialStep.
00440       double slope = dotArgumentType(this->m_startGradient, initialStep);
00441       if(slope >= 0.0) {
00442         std::ostringstream message;
00443         message << "Initial search direction: " << initialStep << " "
00444                 << "is not downhill with respect to initial gradient: "
00445                 << this->m_startGradient << ".";
00446         DLR_THROW3(StateException, "OptimizerLineSearch::run()",
00447                    message.str().c_str());
00448                    
00449       }
00450 
00451       // Compute smallest allowable step, allowing for numerical issues.
00452       double scale = contextSensitiveScale(initialStep, this->m_startPoint);
00453       if(scale == 0.0) {
00454         DLR_THROW3(RunTimeException, "OptimizerLineSearch::run()",
00455                    "Invalid initial scale.  "
00456                    "Perhaps initialStep is the zero vector.");
00457       }
00458       double minimumLambda = this->m_argumentTolerance / scale;
00459 
00460       // Now do the search.
00461       double lambdaValue = 1.0;
00462       argument_type nextSamplePoint(initialStep.size());
00463       double previousLambdaValue = 1.0;
00464       double previousNextFunctionValue = this->m_startValue;
00465       while(1) {
00466         // Check first termination criterion.
00467         // Note: this differs from NRC in that NRC first computes
00468         // nextFunctionValue before doing this check, and consequently
00469         // returns it to the calling context if the following conditional
00470         // passes.
00471         if(lambdaValue < minimumLambda) {
00472           // In this case, root finding programs should double check
00473           // convergence!
00474           copyArgumentType(this->m_startPoint, nextSamplePoint);
00475           // Note: This return value differs from NRC, in that we return
00476           // startValue instead of the most recently computed value.
00477           // return std::make_pair(nextSamplePoint, nextFunctionValue);
00478           return std::make_pair(nextSamplePoint, this->m_startValue);
00479         }
00480 
00481         // Compute next place to evaluate function.
00482         for(size_t index = 0; index < initialStep.size(); ++index) {
00483           nextSamplePoint[index] =
00484             this->m_startPoint[index] + (lambdaValue * initialStep[index]);
00485         }
00486 
00487         // Do the function evaluation.
00488         result_type nextFunctionValue = this->m_functor(nextSamplePoint);
00489         ++(this->m_functionCallCount);
00490 
00491         // Check second termination criterion.
00492         if(nextFunctionValue
00493            < this->m_startValue + (this->m_alpha * lambdaValue * slope)) {
00494           return std::make_pair(nextSamplePoint, nextFunctionValue);
00495         }
00496 
00497         // Compute next value of lambda.
00498         double lambdaHat;
00499         if(lambdaValue == 1.0) {
00500           lambdaHat =
00501             (-slope / (2.0 * (nextFunctionValue - this->m_startValue
00502                               - slope)));
00503         } else {
00504           // Buld vector.
00505           double b0 = (nextFunctionValue - this->m_startValue
00506                        - (lambdaValue * slope));
00507           double b1 = (previousNextFunctionValue - this->m_startValue
00508                        - (previousLambdaValue * slope));
00509           // Build matrix.
00510           double deltaLambda = lambdaValue - previousLambdaValue;
00511           double a00 = 1.0 / (lambdaValue * lambdaValue * deltaLambda);
00512           double a01 = -1.0 / (previousLambdaValue * previousLambdaValue
00513                                * deltaLambda);
00514           double a10 = -previousLambdaValue / (lambdaValue * lambdaValue
00515                                                * deltaLambda);
00516           double a11 = lambdaValue / (previousLambdaValue * previousLambdaValue
00517                                       * deltaLambda);
00518           // Do matrix multiplication.
00519           double ab0 = a00 * b0 + a01 * b1;
00520           double ab1 = a10 * b0 + a11 * b1;
00521 
00522           // Use result to choose next value of lambda.
00523           if(ab0 == 0.0) {
00524             if(ab1 == 0.0) {
00525               DLR_THROW3(RunTimeException, "OptimizerLineSearch::run()",
00526                          "Invalid value for internal variable ab1.");
00527             }
00528             lambdaHat = -slope / (2.0 * ab1);
00529           } else {
00530             double discriminant = ab1 * ab1 - (3.0 * ab0 * slope);
00531             if(discriminant < 0.0) {
00532               DLR_THROW3(RunTimeException, "OptimizerLineSearch::run()",
00533                          "Roundoff error, discriminant < 0.0");
00534             }
00535             lambdaHat = (-ab1 + std::sqrt(discriminant)) / (3.0 * ab0);
00536           }
00537           if(lambdaHat > (0.5 * lambdaValue)) {
00538             lambdaHat = 0.5 * lambdaValue;
00539           }
00540         }
00541         previousLambdaValue = lambdaValue;
00542         previousNextFunctionValue = nextFunctionValue;
00543         lambdaValue = std::max(lambdaHat, 0.1 * lambdaValue);
00544       }
00545     }
00546 
00547   } // namespace optimization
00548 
00549 } // namespace dlr
00550 
00551 #endif // #ifndef _DLR_OPTIMIZERLINESEARCH_H_

Generated on Wed Nov 25 00:57:05 2009 for dlrOptimization Utility Library by  doxygen 1.5.8