Home | History | Annotate | Download | only in ceres
      1 // Ceres Solver - A fast non-linear least squares minimizer
      2 // Copyright 2012 Google Inc. All rights reserved.
      3 // http://code.google.com/p/ceres-solver/
      4 //
      5 // Redistribution and use in source and binary forms, with or without
      6 // modification, are permitted provided that the following conditions are met:
      7 //
      8 // * Redistributions of source code must retain the above copyright notice,
      9 //   this list of conditions and the following disclaimer.
     10 // * Redistributions in binary form must reproduce the above copyright notice,
     11 //   this list of conditions and the following disclaimer in the documentation
     12 //   and/or other materials provided with the distribution.
     13 // * Neither the name of Google Inc. nor the names of its contributors may be
     14 //   used to endorse or promote products derived from this software without
     15 //   specific prior written permission.
     16 //
     17 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
     18 // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
     19 // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
     20 // ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
     21 // LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
     22 // CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
     23 // SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
     24 // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
     25 // CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
     26 // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
     27 // POSSIBILITY OF SUCH DAMAGE.
     28 //
     29 // Author: sameeragarwal (at) google.com (Sameer Agarwal)
     30 
     31 #ifndef CERES_INTERNAL_DOGLEG_STRATEGY_H_
     32 #define CERES_INTERNAL_DOGLEG_STRATEGY_H_
     33 
     34 #include "ceres/linear_solver.h"
     35 #include "ceres/trust_region_strategy.h"
     36 
     37 namespace ceres {
     38 namespace internal {
     39 
     40 // Dogleg step computation and trust region sizing strategy based on
     41 // on "Methods for Nonlinear Least Squares" by K. Madsen, H.B. Nielsen
     42 // and O. Tingleff. Available to download from
     43 //
     44 // http://www2.imm.dtu.dk/pubdb/views/edoc_download.php/3215/pdf/imm3215.pdf
     45 //
     46 // One minor modification is that instead of computing the pure
     47 // Gauss-Newton step, we compute a regularized version of it. This is
     48 // because the Jacobian is often rank-deficient and in such cases
     49 // using a direct solver leads to numerical failure.
     50 //
     51 // If SUBSPACE is passed as the type argument to the constructor, the
     52 // DoglegStrategy follows the approach by Shultz, Schnabel, Byrd.
     53 // This finds the exact optimum over the two-dimensional subspace
     54 // spanned by the two Dogleg vectors.
     55 class DoglegStrategy : public TrustRegionStrategy {
     56 public:
     57   DoglegStrategy(const TrustRegionStrategy::Options& options);
     58   virtual ~DoglegStrategy() {}
     59 
     60   // TrustRegionStrategy interface
     61   virtual Summary ComputeStep(const PerSolveOptions& per_solve_options,
     62                               SparseMatrix* jacobian,
     63                               const double* residuals,
     64                               double* step);
     65   virtual void StepAccepted(double step_quality);
     66   virtual void StepRejected(double step_quality);
     67   virtual void StepIsInvalid();
     68 
     69   virtual double Radius() const;
     70 
     71   // These functions are predominantly for testing.
     72   Vector gradient() const { return gradient_; }
     73   Vector gauss_newton_step() const { return gauss_newton_step_; }
     74   Matrix subspace_basis() const { return subspace_basis_; }
     75   Vector subspace_g() const { return subspace_g_; }
     76   Matrix subspace_B() const { return subspace_B_; }
     77 
     78  private:
     79   typedef Eigen::Matrix<double, 2, 1, Eigen::DontAlign> Vector2d;
     80   typedef Eigen::Matrix<double, 2, 2, Eigen::DontAlign> Matrix2d;
     81 
     82   LinearSolver::Summary ComputeGaussNewtonStep(SparseMatrix* jacobian,
     83                                                const double* residuals);
     84   void ComputeCauchyPoint(SparseMatrix* jacobian);
     85   void ComputeGradient(SparseMatrix* jacobian, const double* residuals);
     86   void ComputeTraditionalDoglegStep(double* step);
     87   bool ComputeSubspaceModel(SparseMatrix* jacobian);
     88   void ComputeSubspaceDoglegStep(double* step);
     89 
     90   bool FindMinimumOnTrustRegionBoundary(Vector2d* minimum) const;
     91   Vector MakePolynomialForBoundaryConstrainedProblem() const;
     92   Vector2d ComputeSubspaceStepFromRoot(double lambda) const;
     93   double EvaluateSubspaceModel(const Vector2d& x) const;
     94 
     95   LinearSolver* linear_solver_;
     96   double radius_;
     97   const double max_radius_;
     98 
     99   const double min_diagonal_;
    100   const double max_diagonal_;
    101 
    102   // mu is used to scale the diagonal matrix used to make the
    103   // Gauss-Newton solve full rank. In each solve, the strategy starts
    104   // out with mu = min_mu, and tries values upto max_mu. If the user
    105   // reports an invalid step, the value of mu_ is increased so that
    106   // the next solve starts with a stronger regularization.
    107   //
    108   // If a successful step is reported, then the value of mu_ is
    109   // decreased with a lower bound of min_mu_.
    110   double mu_;
    111   const double min_mu_;
    112   const double max_mu_;
    113   const double mu_increase_factor_;
    114   const double increase_threshold_;
    115   const double decrease_threshold_;
    116 
    117   Vector diagonal_;  // sqrt(diag(J^T J))
    118   Vector lm_diagonal_;
    119 
    120   Vector gradient_;
    121   Vector gauss_newton_step_;
    122 
    123   // cauchy_step = alpha * gradient
    124   double alpha_;
    125   double dogleg_step_norm_;
    126 
    127   // When, ComputeStep is called, reuse_ indicates whether the
    128   // Gauss-Newton and Cauchy steps from the last call to ComputeStep
    129   // can be reused or not.
    130   //
    131   // If the user called StepAccepted, then it is expected that the
    132   // user has recomputed the Jacobian matrix and new Gauss-Newton
    133   // solve is needed and reuse is set to false.
    134   //
    135   // If the user called StepRejected, then it is expected that the
    136   // user wants to solve the trust region problem with the same matrix
    137   // but a different trust region radius and the Gauss-Newton and
    138   // Cauchy steps can be reused to compute the Dogleg, thus reuse is
    139   // set to true.
    140   //
    141   // If the user called StepIsInvalid, then there was a numerical
    142   // problem with the step computed in the last call to ComputeStep,
    143   // and the regularization used to do the Gauss-Newton solve is
    144   // increased and a new solve should be done when ComputeStep is
    145   // called again, thus reuse is set to false.
    146   bool reuse_;
    147 
    148   // The dogleg type determines how the minimum of the local
    149   // quadratic model is found.
    150   DoglegType dogleg_type_;
    151 
    152   // If the type is SUBSPACE_DOGLEG, the two-dimensional
    153   // model 1/2 x^T B x + g^T x has to be computed and stored.
    154   bool subspace_is_one_dimensional_;
    155   Matrix subspace_basis_;
    156   Vector2d subspace_g_;
    157   Matrix2d subspace_B_;
    158 };
    159 
    160 }  // namespace internal
    161 }  // namespace ceres
    162 
    163 #endif  // CERES_INTERNAL_DOGLEG_STRATEGY_H_
    164