Home | History | Annotate | Download | only in ceres
      1 // Ceres Solver - A fast non-linear least squares minimizer
      2 // Copyright 2010, 2011, 2012 Google Inc. All rights reserved.
      3 // http://code.google.com/p/ceres-solver/
      4 //
      5 // Redistribution and use in source and binary forms, with or without
      6 // modification, are permitted provided that the following conditions are met:
      7 //
      8 // * Redistributions of source code must retain the above copyright notice,
      9 //   this list of conditions and the following disclaimer.
     10 // * Redistributions in binary form must reproduce the above copyright notice,
     11 //   this list of conditions and the following disclaimer in the documentation
     12 //   and/or other materials provided with the distribution.
     13 // * Neither the name of Google Inc. nor the names of its contributors may be
     14 //   used to endorse or promote products derived from this software without
     15 //   specific prior written permission.
     16 //
     17 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
     18 // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
     19 // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
     20 // ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
     21 // LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
     22 // CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
     23 // SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
     24 // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
     25 // CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
     26 // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
     27 // POSSIBILITY OF SUCH DAMAGE.
     28 //
     29 // Author: keir (at) google.com (Keir Mierle)
     30 //
     31 // Tests shared across evaluators. The tests try all combinations of linear
     32 // solver and num_eliminate_blocks (for schur-based solvers).
     33 
     34 #include "ceres/evaluator.h"
     35 
     36 #include "ceres/casts.h"
     37 #include "ceres/cost_function.h"
     38 #include "ceres/crs_matrix.h"
     39 #include "ceres/evaluator_test_utils.h"
     40 #include "ceres/internal/eigen.h"
     41 #include "ceres/internal/scoped_ptr.h"
     42 #include "ceres/local_parameterization.h"
     43 #include "ceres/problem_impl.h"
     44 #include "ceres/program.h"
     45 #include "ceres/sized_cost_function.h"
     46 #include "ceres/sparse_matrix.h"
     47 #include "ceres/types.h"
     48 #include "gtest/gtest.h"
     49 
     50 namespace ceres {
     51 namespace internal {
     52 
     53 // TODO(keir): Consider pushing this into a common test utils file.
     54 template<int kFactor, int kNumResiduals,
     55          int N0 = 0, int N1 = 0, int N2 = 0, bool kSucceeds = true>
     56 class ParameterIgnoringCostFunction
     57     : public SizedCostFunction<kNumResiduals, N0, N1, N2> {
     58   typedef SizedCostFunction<kNumResiduals, N0, N1, N2> Base;
     59  public:
     60   virtual bool Evaluate(double const* const* parameters,
     61                         double* residuals,
     62                         double** jacobians) const {
     63     for (int i = 0; i < Base::num_residuals(); ++i) {
     64       residuals[i] = i + 1;
     65     }
     66     if (jacobians) {
     67       for (int k = 0; k < Base::parameter_block_sizes().size(); ++k) {
     68         // The jacobians here are full sized, but they are transformed in the
     69         // evaluator into the "local" jacobian. In the tests, the "subset
     70         // constant" parameterization is used, which should pick out columns
     71         // from these jacobians. Put values in the jacobian that make this
     72         // obvious; in particular, make the jacobians like this:
     73         //
     74         //   1 2 3 4 ...
     75         //   1 2 3 4 ...   .*  kFactor
     76         //   1 2 3 4 ...
     77         //
     78         // where the multiplication by kFactor makes it easier to distinguish
     79         // between Jacobians of different residuals for the same parameter.
     80         if (jacobians[k] != NULL) {
     81           MatrixRef jacobian(jacobians[k],
     82                              Base::num_residuals(),
     83                              Base::parameter_block_sizes()[k]);
     84           for (int j = 0; j < Base::parameter_block_sizes()[k]; ++j) {
     85             jacobian.col(j).setConstant(kFactor * (j + 1));
     86           }
     87         }
     88       }
     89     }
     90     return kSucceeds;
     91   }
     92 };
     93 
     94 struct EvaluatorTest
     95     : public ::testing::TestWithParam<pair<LinearSolverType, int> > {
     96   Evaluator* CreateEvaluator(Program* program) {
     97     // This program is straight from the ProblemImpl, and so has no index/offset
     98     // yet; compute it here as required by the evalutor implementations.
     99     program->SetParameterOffsetsAndIndex();
    100 
    101     VLOG(1) << "Creating evaluator with type: " << GetParam().first
    102             << " and num_eliminate_blocks: " << GetParam().second;
    103     Evaluator::Options options;
    104     options.linear_solver_type = GetParam().first;
    105     options.num_eliminate_blocks = GetParam().second;
    106     string error;
    107     return Evaluator::Create(options, program, &error);
    108   }
    109 
    110   void EvaluateAndCompare(ProblemImpl *problem,
    111                           int expected_num_rows,
    112                           int expected_num_cols,
    113                           double expected_cost,
    114                           const double* expected_residuals,
    115                           const double* expected_gradient,
    116                           const double* expected_jacobian) {
    117     scoped_ptr<Evaluator> evaluator(
    118         CreateEvaluator(problem->mutable_program()));
    119     int num_residuals = expected_num_rows;
    120     int num_parameters = expected_num_cols;
    121 
    122     double cost = -1;
    123 
    124     Vector residuals(num_residuals);
    125     residuals.setConstant(-2000);
    126 
    127     Vector gradient(num_parameters);
    128     gradient.setConstant(-3000);
    129 
    130     scoped_ptr<SparseMatrix> jacobian(evaluator->CreateJacobian());
    131 
    132     ASSERT_EQ(expected_num_rows, evaluator->NumResiduals());
    133     ASSERT_EQ(expected_num_cols, evaluator->NumEffectiveParameters());
    134     ASSERT_EQ(expected_num_rows, jacobian->num_rows());
    135     ASSERT_EQ(expected_num_cols, jacobian->num_cols());
    136 
    137     vector<double> state(evaluator->NumParameters());
    138 
    139     ASSERT_TRUE(evaluator->Evaluate(
    140           &state[0],
    141           &cost,
    142           expected_residuals != NULL ? &residuals[0]  : NULL,
    143           expected_gradient  != NULL ? &gradient[0]   : NULL,
    144           expected_jacobian  != NULL ? jacobian.get() : NULL));
    145 
    146     Matrix actual_jacobian;
    147     if (expected_jacobian != NULL) {
    148       jacobian->ToDenseMatrix(&actual_jacobian);
    149     }
    150 
    151     CompareEvaluations(expected_num_rows,
    152                        expected_num_cols,
    153                        expected_cost,
    154                        expected_residuals,
    155                        expected_gradient,
    156                        expected_jacobian,
    157                        cost,
    158                        &residuals[0],
    159                        &gradient[0],
    160                        actual_jacobian.data());
    161   }
    162 
    163   // Try all combinations of parameters for the evaluator.
    164   void CheckAllEvaluationCombinations(const ExpectedEvaluation &expected) {
    165     for (int i = 0; i < 8; ++i) {
    166       EvaluateAndCompare(&problem,
    167                          expected.num_rows,
    168                          expected.num_cols,
    169                          expected.cost,
    170                          (i & 1) ? expected.residuals : NULL,
    171                          (i & 2) ? expected.gradient  : NULL,
    172                          (i & 4) ? expected.jacobian  : NULL);
    173     }
    174   }
    175 
    176   // The values are ignored completely by the cost function.
    177   double x[2];
    178   double y[3];
    179   double z[4];
    180 
    181   ProblemImpl problem;
    182 };
    183 
    184 void SetSparseMatrixConstant(SparseMatrix* sparse_matrix, double value) {
    185   VectorRef(sparse_matrix->mutable_values(),
    186             sparse_matrix->num_nonzeros()).setConstant(value);
    187 }
    188 
    189 TEST_P(EvaluatorTest, SingleResidualProblem) {
    190   problem.AddResidualBlock(new ParameterIgnoringCostFunction<1, 3, 2, 3, 4>,
    191                            NULL,
    192                            x, y, z);
    193 
    194   ExpectedEvaluation expected = {
    195     // Rows/columns
    196     3, 9,
    197     // Cost
    198     7.0,
    199     // Residuals
    200     { 1.0, 2.0, 3.0 },
    201     // Gradient
    202     { 6.0, 12.0,              // x
    203       6.0, 12.0, 18.0,        // y
    204       6.0, 12.0, 18.0, 24.0,  // z
    205     },
    206     // Jacobian
    207     //   x          y             z
    208     { 1, 2,   1, 2, 3,   1, 2, 3, 4,
    209       1, 2,   1, 2, 3,   1, 2, 3, 4,
    210       1, 2,   1, 2, 3,   1, 2, 3, 4
    211     }
    212   };
    213   CheckAllEvaluationCombinations(expected);
    214 }
    215 
    216 TEST_P(EvaluatorTest, SingleResidualProblemWithPermutedParameters) {
    217   // Add the parameters in explicit order to force the ordering in the program.
    218   problem.AddParameterBlock(x,  2);
    219   problem.AddParameterBlock(y,  3);
    220   problem.AddParameterBlock(z,  4);
    221 
    222   // Then use a cost function which is similar to the others, but swap around
    223   // the ordering of the parameters to the cost function. This shouldn't affect
    224   // the jacobian evaluation, but requires explicit handling in the evaluators.
    225   // At one point the compressed row evaluator had a bug that went undetected
    226   // for a long time, since by chance most users added parameters to the problem
    227   // in the same order that they occured as parameters to a cost function.
    228   problem.AddResidualBlock(new ParameterIgnoringCostFunction<1, 3, 4, 3, 2>,
    229                            NULL,
    230                            z, y, x);
    231 
    232   ExpectedEvaluation expected = {
    233     // Rows/columns
    234     3, 9,
    235     // Cost
    236     7.0,
    237     // Residuals
    238     { 1.0, 2.0, 3.0 },
    239     // Gradient
    240     { 6.0, 12.0,              // x
    241       6.0, 12.0, 18.0,        // y
    242       6.0, 12.0, 18.0, 24.0,  // z
    243     },
    244     // Jacobian
    245     //   x          y             z
    246     { 1, 2,   1, 2, 3,   1, 2, 3, 4,
    247       1, 2,   1, 2, 3,   1, 2, 3, 4,
    248       1, 2,   1, 2, 3,   1, 2, 3, 4
    249     }
    250   };
    251   CheckAllEvaluationCombinations(expected);
    252 }
    253 
    254 TEST_P(EvaluatorTest, SingleResidualProblemWithNuisanceParameters) {
    255   // These parameters are not used.
    256   double a[2];
    257   double b[1];
    258   double c[1];
    259   double d[3];
    260 
    261   // Add the parameters in a mixed order so the Jacobian is "checkered" with the
    262   // values from the other parameters.
    263   problem.AddParameterBlock(a, 2);
    264   problem.AddParameterBlock(x, 2);
    265   problem.AddParameterBlock(b, 1);
    266   problem.AddParameterBlock(y, 3);
    267   problem.AddParameterBlock(c, 1);
    268   problem.AddParameterBlock(z, 4);
    269   problem.AddParameterBlock(d, 3);
    270 
    271   problem.AddResidualBlock(new ParameterIgnoringCostFunction<1, 3, 2, 3, 4>,
    272                            NULL,
    273                            x, y, z);
    274 
    275   ExpectedEvaluation expected = {
    276     // Rows/columns
    277     3, 16,
    278     // Cost
    279     7.0,
    280     // Residuals
    281     { 1.0, 2.0, 3.0 },
    282     // Gradient
    283     { 0.0, 0.0,               // a
    284       6.0, 12.0,              // x
    285       0.0,                    // b
    286       6.0, 12.0, 18.0,        // y
    287       0.0,                    // c
    288       6.0, 12.0, 18.0, 24.0,  // z
    289       0.0, 0.0, 0.0,          // d
    290     },
    291     // Jacobian
    292     //   a        x     b           y     c              z           d
    293     { 0, 0,    1, 2,    0,    1, 2, 3,    0,    1, 2, 3, 4,    0, 0, 0,
    294       0, 0,    1, 2,    0,    1, 2, 3,    0,    1, 2, 3, 4,    0, 0, 0,
    295       0, 0,    1, 2,    0,    1, 2, 3,    0,    1, 2, 3, 4,    0, 0, 0
    296     }
    297   };
    298   CheckAllEvaluationCombinations(expected);
    299 }
    300 
    301 TEST_P(EvaluatorTest, MultipleResidualProblem) {
    302   // Add the parameters in explicit order to force the ordering in the program.
    303   problem.AddParameterBlock(x,  2);
    304   problem.AddParameterBlock(y,  3);
    305   problem.AddParameterBlock(z,  4);
    306 
    307   // f(x, y) in R^2
    308   problem.AddResidualBlock(new ParameterIgnoringCostFunction<1, 2, 2, 3>,
    309                            NULL,
    310                            x, y);
    311 
    312   // g(x, z) in R^3
    313   problem.AddResidualBlock(new ParameterIgnoringCostFunction<2, 3, 2, 4>,
    314                            NULL,
    315                            x, z);
    316 
    317   // h(y, z) in R^4
    318   problem.AddResidualBlock(new ParameterIgnoringCostFunction<3, 4, 3, 4>,
    319                            NULL,
    320                            y, z);
    321 
    322   ExpectedEvaluation expected = {
    323     // Rows/columns
    324     9, 9,
    325     // Cost
    326     // f       g           h
    327     (  1 + 4 + 1 + 4 + 9 + 1 + 4 + 9 + 16) / 2.0,
    328     // Residuals
    329     { 1.0, 2.0,           // f
    330       1.0, 2.0, 3.0,      // g
    331       1.0, 2.0, 3.0, 4.0  // h
    332     },
    333     // Gradient
    334     { 15.0, 30.0,               // x
    335       33.0, 66.0, 99.0,         // y
    336       42.0, 84.0, 126.0, 168.0  // z
    337     },
    338     // Jacobian
    339     //                x        y           z
    340     {   /* f(x, y) */ 1, 2,    1, 2, 3,    0, 0, 0, 0,
    341                       1, 2,    1, 2, 3,    0, 0, 0, 0,
    342 
    343         /* g(x, z) */ 2, 4,    0, 0, 0,    2, 4, 6, 8,
    344                       2, 4,    0, 0, 0,    2, 4, 6, 8,
    345                       2, 4,    0, 0, 0,    2, 4, 6, 8,
    346 
    347         /* h(y, z) */ 0, 0,    3, 6, 9,    3, 6, 9, 12,
    348                       0, 0,    3, 6, 9,    3, 6, 9, 12,
    349                       0, 0,    3, 6, 9,    3, 6, 9, 12,
    350                       0, 0,    3, 6, 9,    3, 6, 9, 12
    351     }
    352   };
    353   CheckAllEvaluationCombinations(expected);
    354 }
    355 
    356 TEST_P(EvaluatorTest, MultipleResidualsWithLocalParameterizations) {
    357   // Add the parameters in explicit order to force the ordering in the program.
    358   problem.AddParameterBlock(x,  2);
    359 
    360   // Fix y's first dimension.
    361   vector<int> y_fixed;
    362   y_fixed.push_back(0);
    363   problem.AddParameterBlock(y, 3, new SubsetParameterization(3, y_fixed));
    364 
    365   // Fix z's second dimension.
    366   vector<int> z_fixed;
    367   z_fixed.push_back(1);
    368   problem.AddParameterBlock(z, 4, new SubsetParameterization(4, z_fixed));
    369 
    370   // f(x, y) in R^2
    371   problem.AddResidualBlock(new ParameterIgnoringCostFunction<1, 2, 2, 3>,
    372                            NULL,
    373                            x, y);
    374 
    375   // g(x, z) in R^3
    376   problem.AddResidualBlock(new ParameterIgnoringCostFunction<2, 3, 2, 4>,
    377                            NULL,
    378                            x, z);
    379 
    380   // h(y, z) in R^4
    381   problem.AddResidualBlock(new ParameterIgnoringCostFunction<3, 4, 3, 4>,
    382                            NULL,
    383                            y, z);
    384 
    385   ExpectedEvaluation expected = {
    386     // Rows/columns
    387     9, 7,
    388     // Cost
    389     // f       g           h
    390     (  1 + 4 + 1 + 4 + 9 + 1 + 4 + 9 + 16) / 2.0,
    391     // Residuals
    392     { 1.0, 2.0,           // f
    393       1.0, 2.0, 3.0,      // g
    394       1.0, 2.0, 3.0, 4.0  // h
    395     },
    396     // Gradient
    397     { 15.0, 30.0,         // x
    398       66.0, 99.0,         // y
    399       42.0, 126.0, 168.0  // z
    400     },
    401     // Jacobian
    402     //                x        y           z
    403     {   /* f(x, y) */ 1, 2,    2, 3,    0, 0, 0,
    404                       1, 2,    2, 3,    0, 0, 0,
    405 
    406         /* g(x, z) */ 2, 4,    0, 0,    2, 6, 8,
    407                       2, 4,    0, 0,    2, 6, 8,
    408                       2, 4,    0, 0,    2, 6, 8,
    409 
    410         /* h(y, z) */ 0, 0,    6, 9,    3, 9, 12,
    411                       0, 0,    6, 9,    3, 9, 12,
    412                       0, 0,    6, 9,    3, 9, 12,
    413                       0, 0,    6, 9,    3, 9, 12
    414     }
    415   };
    416   CheckAllEvaluationCombinations(expected);
    417 }
    418 
    419 TEST_P(EvaluatorTest, MultipleResidualProblemWithSomeConstantParameters) {
    420   // The values are ignored completely by the cost function.
    421   double x[2];
    422   double y[3];
    423   double z[4];
    424 
    425   // Add the parameters in explicit order to force the ordering in the program.
    426   problem.AddParameterBlock(x,  2);
    427   problem.AddParameterBlock(y,  3);
    428   problem.AddParameterBlock(z,  4);
    429 
    430   // f(x, y) in R^2
    431   problem.AddResidualBlock(new ParameterIgnoringCostFunction<1, 2, 2, 3>,
    432                            NULL,
    433                            x, y);
    434 
    435   // g(x, z) in R^3
    436   problem.AddResidualBlock(new ParameterIgnoringCostFunction<2, 3, 2, 4>,
    437                            NULL,
    438                            x, z);
    439 
    440   // h(y, z) in R^4
    441   problem.AddResidualBlock(new ParameterIgnoringCostFunction<3, 4, 3, 4>,
    442                            NULL,
    443                            y, z);
    444 
    445   // For this test, "z" is constant.
    446   problem.SetParameterBlockConstant(z);
    447 
    448   // Create the reduced program which is missing the fixed "z" variable.
    449   // Normally, the preprocessing of the program that happens in solver_impl
    450   // takes care of this, but we don't want to invoke the solver here.
    451   Program reduced_program;
    452   vector<ParameterBlock*>* parameter_blocks =
    453       problem.mutable_program()->mutable_parameter_blocks();
    454 
    455   // "z" is the last parameter; save it for later and pop it off temporarily.
    456   // Note that "z" will still get read during evaluation, so it cannot be
    457   // deleted at this point.
    458   ParameterBlock* parameter_block_z = parameter_blocks->back();
    459   parameter_blocks->pop_back();
    460 
    461   ExpectedEvaluation expected = {
    462     // Rows/columns
    463     9, 5,
    464     // Cost
    465     // f       g           h
    466     (  1 + 4 + 1 + 4 + 9 + 1 + 4 + 9 + 16) / 2.0,
    467     // Residuals
    468     { 1.0, 2.0,           // f
    469       1.0, 2.0, 3.0,      // g
    470       1.0, 2.0, 3.0, 4.0  // h
    471     },
    472     // Gradient
    473     { 15.0, 30.0,        // x
    474       33.0, 66.0, 99.0,  // y
    475     },
    476     // Jacobian
    477     //                x        y
    478     {   /* f(x, y) */ 1, 2,    1, 2, 3,
    479                       1, 2,    1, 2, 3,
    480 
    481         /* g(x, z) */ 2, 4,    0, 0, 0,
    482                       2, 4,    0, 0, 0,
    483                       2, 4,    0, 0, 0,
    484 
    485         /* h(y, z) */ 0, 0,    3, 6, 9,
    486                       0, 0,    3, 6, 9,
    487                       0, 0,    3, 6, 9,
    488                       0, 0,    3, 6, 9
    489     }
    490   };
    491   CheckAllEvaluationCombinations(expected);
    492 
    493   // Restore parameter block z, so it will get freed in a consistent way.
    494   parameter_blocks->push_back(parameter_block_z);
    495 }
    496 
    497 TEST_P(EvaluatorTest, EvaluatorAbortsForResidualsThatFailToEvaluate) {
    498   // Switch the return value to failure.
    499   problem.AddResidualBlock(
    500       new ParameterIgnoringCostFunction<20, 3, 2, 3, 4, false>, NULL, x, y, z);
    501 
    502   // The values are ignored.
    503   double state[9];
    504 
    505   scoped_ptr<Evaluator> evaluator(CreateEvaluator(problem.mutable_program()));
    506   scoped_ptr<SparseMatrix> jacobian(evaluator->CreateJacobian());
    507   double cost;
    508   EXPECT_FALSE(evaluator->Evaluate(state, &cost, NULL, NULL, NULL));
    509 }
    510 
    511 // In the pairs, the first argument is the linear solver type, and the second
    512 // argument is num_eliminate_blocks. Changing the num_eliminate_blocks only
    513 // makes sense for the schur-based solvers.
    514 //
    515 // Try all values of num_eliminate_blocks that make sense given that in the
    516 // tests a maximum of 4 parameter blocks are present.
    517 INSTANTIATE_TEST_CASE_P(
    518     LinearSolvers,
    519     EvaluatorTest,
    520     ::testing::Values(make_pair(DENSE_QR, 0),
    521                       make_pair(DENSE_SCHUR, 0),
    522                       make_pair(DENSE_SCHUR, 1),
    523                       make_pair(DENSE_SCHUR, 2),
    524                       make_pair(DENSE_SCHUR, 3),
    525                       make_pair(DENSE_SCHUR, 4),
    526                       make_pair(SPARSE_SCHUR, 0),
    527                       make_pair(SPARSE_SCHUR, 1),
    528                       make_pair(SPARSE_SCHUR, 2),
    529                       make_pair(SPARSE_SCHUR, 3),
    530                       make_pair(SPARSE_SCHUR, 4),
    531                       make_pair(ITERATIVE_SCHUR, 0),
    532                       make_pair(ITERATIVE_SCHUR, 1),
    533                       make_pair(ITERATIVE_SCHUR, 2),
    534                       make_pair(ITERATIVE_SCHUR, 3),
    535                       make_pair(ITERATIVE_SCHUR, 4),
    536                       make_pair(SPARSE_NORMAL_CHOLESKY, 0)));
    537 
    538 // Simple cost function used to check if the evaluator is sensitive to
    539 // state changes.
    540 class ParameterSensitiveCostFunction : public SizedCostFunction<2, 2> {
    541  public:
    542   virtual bool Evaluate(double const* const* parameters,
    543                         double* residuals,
    544                         double** jacobians) const {
    545     double x1 = parameters[0][0];
    546     double x2 = parameters[0][1];
    547     residuals[0] = x1 * x1;
    548     residuals[1] = x2 * x2;
    549 
    550     if (jacobians != NULL) {
    551       double* jacobian = jacobians[0];
    552       if (jacobian != NULL) {
    553         jacobian[0] = 2.0 * x1;
    554         jacobian[1] = 0.0;
    555         jacobian[2] = 0.0;
    556         jacobian[3] = 2.0 * x2;
    557       }
    558     }
    559     return true;
    560   }
    561 };
    562 
    563 TEST(Evaluator, EvaluatorRespectsParameterChanges) {
    564   ProblemImpl problem;
    565 
    566   double x[2];
    567   x[0] = 1.0;
    568   x[1] = 1.0;
    569 
    570   problem.AddResidualBlock(new ParameterSensitiveCostFunction(), NULL, x);
    571   Program* program = problem.mutable_program();
    572   program->SetParameterOffsetsAndIndex();
    573 
    574   Evaluator::Options options;
    575   options.linear_solver_type = DENSE_QR;
    576   options.num_eliminate_blocks = 0;
    577   string error;
    578   scoped_ptr<Evaluator> evaluator(Evaluator::Create(options, program, &error));
    579   scoped_ptr<SparseMatrix> jacobian(evaluator->CreateJacobian());
    580 
    581   ASSERT_EQ(2, jacobian->num_rows());
    582   ASSERT_EQ(2, jacobian->num_cols());
    583 
    584   double state[2];
    585   state[0] = 2.0;
    586   state[1] = 3.0;
    587 
    588   // The original state of a residual block comes from the user's
    589   // state. So the original state is 1.0, 1.0, and the only way we get
    590   // the 2.0, 3.0 results in the following tests is if it respects the
    591   // values in the state vector.
    592 
    593   // Cost only; no residuals and no jacobian.
    594   {
    595     double cost = -1;
    596     ASSERT_TRUE(evaluator->Evaluate(state, &cost, NULL, NULL, NULL));
    597     EXPECT_EQ(48.5, cost);
    598   }
    599 
    600   // Cost and residuals, no jacobian.
    601   {
    602     double cost = -1;
    603     double residuals[2] = { -2, -2 };
    604     ASSERT_TRUE(evaluator->Evaluate(state, &cost, residuals, NULL, NULL));
    605     EXPECT_EQ(48.5, cost);
    606     EXPECT_EQ(4, residuals[0]);
    607     EXPECT_EQ(9, residuals[1]);
    608   }
    609 
    610   // Cost, residuals, and jacobian.
    611   {
    612     double cost = -1;
    613     double residuals[2] = { -2, -2};
    614     SetSparseMatrixConstant(jacobian.get(), -1);
    615     ASSERT_TRUE(evaluator->Evaluate(state,
    616                                     &cost,
    617                                     residuals,
    618                                     NULL,
    619                                     jacobian.get()));
    620     EXPECT_EQ(48.5, cost);
    621     EXPECT_EQ(4, residuals[0]);
    622     EXPECT_EQ(9, residuals[1]);
    623     Matrix actual_jacobian;
    624     jacobian->ToDenseMatrix(&actual_jacobian);
    625 
    626     Matrix expected_jacobian(2, 2);
    627     expected_jacobian
    628         << 2 * state[0], 0,
    629            0, 2 * state[1];
    630 
    631     EXPECT_TRUE((actual_jacobian.array() == expected_jacobian.array()).all())
    632         << "Actual:\n" << actual_jacobian
    633         << "\nExpected:\n" << expected_jacobian;
    634   }
    635 }
    636 
    637 }  // namespace internal
    638 }  // namespace ceres
    639