optimizerLineSearch.h

Go to the documentation of this file.
00001 
00015 #ifndef _DLR_OPTIMIZERLINESEARCH_H_
00016 #define _DLR_OPTIMIZERLINESEARCH_H_
00017 
00018 #include <dlrOptimization/optimizer.h>
00019 #include <dlrOptimization/optimizerCommon.h>
00020 
00021 namespace dlr {
00022 
00023   namespace optimization {
00024 
00042     template <class Functor>
00043     class OptimizerLineSearch
00044       : public Optimizer<Functor>
00045     {
00046     public:
00047       // Typedefs for convenience
00048       typedef typename Functor::argument_type argument_type;
00049       typedef typename Functor::result_type result_type;
00050 
00056       OptimizerLineSearch();
00057 
00067       explicit OptimizerLineSearch(const Functor& functor);
00068 
00081       OptimizerLineSearch(const Functor& functor, 
00082                           const argument_type& startPoint,
00083                           const argument_type& startGradient,
00084                           const result_type& startValue);
00085 
00091       OptimizerLineSearch(const OptimizerLineSearch& source);
00092 
00096       virtual
00097       ~OptimizerLineSearch();
00098 
00107       size_t
00108       getNumberOfFunctionCalls() {return this->m_functionCallCount;}
00109     
00119       virtual void
00120       setStartPoint(const argument_type& startPoint);
00121 
00136       virtual void
00137       setStartPoint(const argument_type& startPoint,
00138                     const result_type& startValue,
00139                     const argument_type& startGradient);
00140 
00150       virtual void
00151       setInitialStep(const argument_type& initialStep);
00152 
00167       void
00168       setParameters(double argumentTolerance=1.0e-7,
00169                     double alpha=1.0e-4,
00170                     double maximumStepMagnitude=100.0);
00171     
00178       OptimizerLineSearch&
00179       operator=(const OptimizerLineSearch& source);
00180     
00181     protected:
00182 
00188       void
00189       checkState() {
00190         // Warning(xxx): currently no state checking is implemented.
00191       }
00192     
00200       std::pair<typename Functor::argument_type, typename Functor::result_type>
00201       run();
00202     
00203       // Data members
00204       double m_alpha;
00205       double m_argumentTolerance;
00206       size_t m_functionCallCount;
00207       argument_type m_initialStep;
00208       double m_initialStepMagnitude;
00209       double m_maximumStepMagnitude;
00210       argument_type m_startGradient;
00211       argument_type m_startPoint;
00212       result_type m_startValue;
00213 
00214     }; // class OptimizerLineSearch
00215 
00216   } // namespace optimization
00217 
00218 } // namespace dlr
00219 
00220 
00221 /* ======= Declarations to maintain compatibility with legacy code. ======= */
00222 
00223 namespace dlr {
00224 
00225   using optimization::OptimizerLineSearch;
00226 
00227 } // namespace dlr
00228 
00229 
00230 /*******************************************************************
00231  * Member function definitions follow.  This would be a .cpp file
00232  * if it weren't templated.
00233  *******************************************************************/
00234 
00235 #include <cmath>
00236 #include <dlrCommon/exception.h>
00237 #include <dlrOptimization/optimizerCommon.h>
00238 
00239 namespace dlr {
00240 
00241   namespace optimization {
00242 
00243     template <class Functor>
00244     OptimizerLineSearch<Functor>::
00245     OptimizerLineSearch()
00246       : Optimizer<Functor>(),
00247         m_alpha(0.0),
00248         m_argumentTolerance(0.0),
00249         m_functionCallCount(0),
00250         m_initialStep(),
00251         m_initialStepMagnitude(0.0),
00252         m_maximumStepMagnitude(0.0),
00253         m_startGradient(),
00254         m_startPoint(),
00255         m_startValue(0.0)
00256     {
00257       this->setParameters();
00258     }
00259 
00260     template <class Functor>
00261     OptimizerLineSearch<Functor>::
00262     OptimizerLineSearch(const Functor& functor)
00263       : Optimizer<Functor>(functor),
00264         m_alpha(0.0),
00265         m_argumentTolerance(0.0),
00266         m_functionCallCount(0),
00267         m_initialStep(),
00268         m_initialStepMagnitude(0.0),
00269         m_maximumStepMagnitude(0.0),
00270         m_startGradient(),
00271         m_startPoint(),
00272         m_startValue(0.0)
00273     {
00274       this->setParameters();
00275     }
00276   
00277     template<class Functor>
00278     OptimizerLineSearch<Functor>::
00279     OptimizerLineSearch(const OptimizerLineSearch& source)
00280       : Optimizer<Functor>(source),
00281         m_alpha(source.m_alpha),
00282         m_argumentTolerance(source.argumentTolerance),
00283         m_functionCallCount(source.m_functionCallCount),
00284         m_initialStep(source.m_initialStep.size()),
00285         m_initialStepMagnitude(source.initialStepMagnitude),
00286         m_maximumStepMagnitude(source.m_maximumStepMagnitude),
00287         m_startGradient(source.m_startGradient.size()),
00288         m_startPoint(source.m_startPoint.size()),
00289         m_startValue(source.m_startValue)
00290     {
00291       copyArgumentType(source.m_initialStep, this->m_initialStep);
00292       copyArgumentType(source.m_startGradient, this->m_startGradient);
00293       copyArgumentType(source.m_startPoint, this->m_startPoint);
00294     }
00295 
00296     template <class Functor>
00297     OptimizerLineSearch<Functor>::
00298     ~OptimizerLineSearch()
00299     {
00300       // Empty
00301     }
00302 
00303     template <class Functor>
00304     void OptimizerLineSearch<Functor>::
00305     setStartPoint(const argument_type& startPoint)
00306     {
00307       // Copy input argument.
00308       copyArgumentType(startPoint, this->m_startPoint);
00309 
00310       // Compute other starting info.  Note that we don't need to use
00311       // copyArgumentType(), because we have sole ownership of the
00312       // return value from m_functor.gradient().
00313       this->m_startValue = this->m_functor(startPoint);
00314       this->m_startGradient = this->m_functor.gradient(startPoint);
00315     
00316       // Reset the count of function calls.  We ignore the calls used 
00317       // to initialize m_startValue, etc. above.
00318       this->m_functionCallCount = 0;
00319 
00320       // We've changed the starting point, so we'll have to rerun the 
00321       // optimization.  Indicate this by setting the inherited member
00322       // m_needsOptimization.
00323       this->m_needsOptimization = true;
00324     }
00325 
00326     template<class Functor>
00327     void
00328     OptimizerLineSearch<Functor>::
00329     setStartPoint(const argument_type& startPoint,
00330                   const result_type& startValue,
00331                   const argument_type& startGradient)
00332     {
00333       // Copy input arguments.
00334       copyArgumentType(startPoint, this->m_startPoint);
00335       this->m_startValue = startValue;
00336       copyArgumentType(startGradient, this->m_startGradient);
00337     
00338       // Reset the count of function calls.
00339       this->m_functionCallCount = 0;
00340 
00341       // We've changed the starting point, so we'll have to rerun the 
00342       // optimization.  Indicate this by setting the inherited member
00343       // m_needsOptimization.
00344       this->m_needsOptimization = true;
00345     }
00346 
00347     template<class Functor>
00348     void OptimizerLineSearch<Functor>::
00349     setInitialStep(const argument_type& initialStep)
00350     {
00351       // First, make sure this is a valid initialStep.  
00352       double initialStepMagnitude =
00353         std::sqrt(dotArgumentType(initialStep, initialStep));
00354       if(initialStepMagnitude == 0.0) {
00355         DLR_THROW3(ValueException, "OptimizerLineSearch::setInitialStep()",
00356                    "initialStep magnitude is equal to 0.0.");
00357       }
00358 
00359       // OK, valid initialStep, save it for later.  Since we'll also
00360       // be needing the magnitude of initialStep, save it too.
00361       this->m_initialStepMagnitude = initialStepMagnitude;
00362       copyArgumentType(initialStep, this->m_initialStep);
00363     
00364       // Reset the count of function calls.
00365       this->m_functionCallCount = 0;
00366 
00367       // We've changed the line direction, so we'll have to rerun the 
00368       // optimization.  Indicate this by setting the inherited member
00369       // m_needsOptimization.
00370       this->m_needsOptimization = true;
00371     }
00372   
00373     template<class Functor>
00374     void OptimizerLineSearch<Functor>::
00375     setParameters(double argumentTolerance,
00376                   double alpha,
00377                   double maximumStepMagnitude)
00378     {
00379       this->m_argumentTolerance = argumentTolerance;
00380       this->m_alpha = alpha;
00381       this->m_maximumStepMagnitude = maximumStepMagnitude;
00382 
00383       // Reset the count of function calls.
00384       this->m_functionCallCount = 0;
00385 
00386       // We've changed the parameters, so we'll have to rerun the 
00387       // optimization.  Indicate this by setting the inherited member
00388       // m_needsOptimization.    
00389       this->m_needsOptimization = true;
00390     }    
00391   
00392     template <class Functor>
00393     OptimizerLineSearch<Functor>& OptimizerLineSearch<Functor>::
00394     operator=(const OptimizerLineSearch& source)
00395     {
00396       // First, call the assignment operator of the parent class.
00397       Optimizer<Functor>::operator=(source);
00398 
00399       // Then copy all of the variables which are specific to
00400       // OptimizerLineSearch.
00401       this->m_alpha = source.m_alpha;
00402       this->m_argumentTolerance = source.m_argumentTolerance;
00403       this->m_functionCallCount = source.m_functionCallCount;
00404       this->m_initialStepMagnitude = source.m_initialStepMagnitude;
00405       this->m_maximumStepMagnitude = source.m_maximumStepMagnitude;
00406       this->m_startValue = source.m_startValue;
00407       copyArgumentType(source.m_initialStep, this->m_initialStep);
00408       copyArgumentType(source.m_startGradient, this->m_startGradient);
00409       copyArgumentType(source.m_startPoint, this->m_startPoint);
00410       return *this;
00411     }
00412 
00413     template <class Functor>
00414     std::pair<typename Functor::argument_type, typename Functor::result_type>
00415     OptimizerLineSearch<Functor>::
00416     run()
00417     {
00418       // Make sure all required starting values have been specified.
00419       this->checkState();
00420 
00421       // Reset the count of function calls.
00422       this->m_functionCallCount = 0;
00423     
00424       // Initialize a few variables.
00425       argument_type initialStep;
00426       copyArgumentType(this->m_initialStep, initialStep);
00427       double initialStepMagnitude = this->m_initialStepMagnitude;
00428 
00429       // Restrict length of initial step.
00430       if(this->m_initialStepMagnitude > this->m_maximumStepMagnitude) {
00431         initialStepMagnitude = this->m_maximumStepMagnitude;
00432         for(size_t index = 0; index < initialStep.size(); ++index) {
00433           initialStep[index] *=
00434             this->m_maximumStepMagnitude / this->m_initialStepMagnitude;
00435         }
00436       }
00437 
00438       // Compute degree of similarity between startGradient and initialStep.
00439       double slope = dotArgumentType(this->m_startGradient, initialStep);
00440       if(slope >= 0.0) {
00441         DLR_THROW3(StateException, "OptimizerLineSearch::run()",
00442                    "Initial search direction is not downhill.");
00443       }
00444 
00445       // Compute smallest allowable step, allowing for numerical issues.
00446       double scale = contextSensitiveScale(initialStep, this->m_startPoint);
00447       if(scale == 0.0) {
00448         DLR_THROW3(RunTimeException, "OptimizerLineSearch::run()",
00449                    "Invalid initial scale.  "
00450                    "Perhaps initialStep is the zero vector.");
00451       }
00452       double minimumLambda = this->m_argumentTolerance / scale;
00453 
00454       // Now do the search.
00455       double lambdaValue = 1.0;
00456       argument_type nextSamplePoint(initialStep.size());
00457       double previousLambdaValue = 1.0;
00458       double previousNextFunctionValue = this->m_startValue;
00459       while(1) {
00460         // Check first termination criterion.
00461         // Note: this differs from NRC in that NRC first computes
00462         // nextFunctionValue before doing this check, and consequently
00463         // returns it to the calling context if the following conditional
00464         // passes.
00465         if(lambdaValue < minimumLambda) {
00466           // In this case, root finding programs should double check
00467           // convergence!
00468           copyArgumentType(this->m_startPoint, nextSamplePoint);
00469           // Note: This return value differs from NRC, in that we return
00470           // startValue instead of the most recently computed value.
00471           // return std::make_pair(nextSamplePoint, nextFunctionValue);
00472           return std::make_pair(nextSamplePoint, this->m_startValue);
00473         }
00474 
00475         // Compute next place to evaluate function.
00476         for(size_t index = 0; index < initialStep.size(); ++index) {
00477           nextSamplePoint[index] =
00478             this->m_startPoint[index] + (lambdaValue * initialStep[index]);
00479         }
00480 
00481         // Do the function evaluation.
00482         result_type nextFunctionValue = this->m_functor(nextSamplePoint);
00483         ++(this->m_functionCallCount);
00484 
00485         // Check second termination criterion.
00486         if(nextFunctionValue
00487            < this->m_startValue + (this->m_alpha * lambdaValue * slope)) {
00488           return std::make_pair(nextSamplePoint, nextFunctionValue);
00489         }
00490 
00491         // Compute next value of lambda.
00492         double lambdaHat;
00493         if(lambdaValue == 1.0) {
00494           lambdaHat =
00495             (-slope / (2.0 * (nextFunctionValue - this->m_startValue
00496                               - slope)));
00497         } else {
00498           // Buld vector.
00499           double b0 = (nextFunctionValue - this->m_startValue
00500                        - (lambdaValue * slope));
00501           double b1 = (previousNextFunctionValue - this->m_startValue
00502                        - (previousLambdaValue * slope));
00503           // Build matrix.
00504           double deltaLambda = lambdaValue - previousLambdaValue;
00505           double a00 = 1.0 / (lambdaValue * lambdaValue * deltaLambda);
00506           double a01 = -1.0 / (previousLambdaValue * previousLambdaValue
00507                                * deltaLambda);
00508           double a10 = -previousLambdaValue / (lambdaValue * lambdaValue
00509                                                * deltaLambda);
00510           double a11 = lambdaValue / (previousLambdaValue * previousLambdaValue
00511                                       * deltaLambda);
00512           // Do matrix multiplication.
00513           double ab0 = a00 * b0 + a01 * b1;
00514           double ab1 = a10 * b0 + a11 * b1;
00515 
00516           // Use result to choose next value of lambda.
00517           if(ab0 == 0.0) {
00518             if(ab1 == 0.0) {
00519               DLR_THROW3(RunTimeException, "OptimizerLineSearch::run()",
00520                          "Invalid value for internal variable ab1.");
00521             }
00522             lambdaHat = -slope / (2.0 * ab1);
00523           } else {
00524             double discriminant = ab1 * ab1 - (3.0 * ab0 * slope);
00525             if(discriminant < 0.0) {
00526               DLR_THROW3(RunTimeException, "OptimizerLineSearch::run()",
00527                          "Roundoff error, discriminant < 0.0");
00528             }
00529             lambdaHat = (-ab1 + std::sqrt(discriminant)) / (3.0 * ab0);
00530           }
00531           if(lambdaHat > (0.5 * lambdaValue)) {
00532             lambdaHat = 0.5 * lambdaValue;
00533           }
00534         }
00535         previousLambdaValue = lambdaValue;
00536         previousNextFunctionValue = nextFunctionValue;
00537         lambdaValue = std::max(lambdaHat, 0.1 * lambdaValue);
00538       }
00539     }
00540 
00541   } // namespace optimization
00542 
00543 } // namespace dlr
00544 
00545 #endif // #ifndef _DLR_OPTIMIZERLINESEARCH_H_

Generated on Mon Jul 9 20:34:03 2007 for dlrLibs Utility Libraries by  doxygen 1.5.2