1 /*M/////////////////////////////////////////////////////////////////////////////////////// 2 // 3 // IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING. 4 // 5 // By downloading, copying, installing or using the software you agree to this license. 6 // If you do not agree to this license, do not download, install, 7 // copy or use the software. 8 // 9 // 10 // License Agreement 11 // For Open Source Computer Vision Library 12 // 13 // Copyright (C) 2000-2008, Intel Corporation, all rights reserved. 14 // Copyright (C) 2009, Willow Garage Inc., all rights reserved. 15 // Third party copyrights are property of their respective owners. 16 // 17 // Redistribution and use in source and binary forms, with or without modification, 18 // are permitted provided that the following conditions are met: 19 // 20 // * Redistribution's of source code must retain the above copyright notice, 21 // this list of conditions and the following disclaimer. 22 // 23 // * Redistribution's in binary form must reproduce the above copyright notice, 24 // this list of conditions and the following disclaimer in the documentation 25 // and/or other materials provided with the distribution. 26 // 27 // * The name of the copyright holders may not be used to endorse or promote products 28 // derived from this software without specific prior written permission. 29 // 30 // This software is provided by the copyright holders and contributors "as is" and 31 // any express or implied warranties, including, but not limited to, the implied 32 // warranties of merchantability and fitness for a particular purpose are disclaimed. 33 // In no event shall the Intel Corporation or contributors be liable for any direct, 34 // indirect, incidental, special, exemplary, or consequential damages 35 // (including, but not limited to, procurement of substitute goods or services; 36 // loss of use, data, or profits; or business interruption) however caused 37 // and on any theory of liability, whether in contract, strict liability, 38 // or tort (including negligence or otherwise) arising in any way out of 39 // the use of this software, even if advised of the possibility of such damage. 40 // 41 //M*/ 42 43 #include "precomp.hpp" 44 #include "opencv2/calib3d/calib3d_c.h" 45 #include "opencv2/core/cvdef.h" 46 47 using namespace cv; 48 using namespace cv::detail; 49 50 namespace { 51 52 struct IncDistance 53 { 54 IncDistance(std::vector<int> &vdists) : dists(&vdists[0]) {} 55 void operator ()(const GraphEdge &edge) { dists[edge.to] = dists[edge.from] + 1; } 56 int* dists; 57 }; 58 59 60 struct CalcRotation 61 { 62 CalcRotation(int _num_images, const std::vector<MatchesInfo> &_pairwise_matches, std::vector<CameraParams> &_cameras) 63 : num_images(_num_images), pairwise_matches(&_pairwise_matches[0]), cameras(&_cameras[0]) {} 64 65 void operator ()(const GraphEdge &edge) 66 { 67 int pair_idx = edge.from * num_images + edge.to; 68 69 Mat_<double> K_from = Mat::eye(3, 3, CV_64F); 70 K_from(0,0) = cameras[edge.from].focal; 71 K_from(1,1) = cameras[edge.from].focal * cameras[edge.from].aspect; 72 K_from(0,2) = cameras[edge.from].ppx; 73 K_from(1,2) = cameras[edge.from].ppy; 74 75 Mat_<double> K_to = Mat::eye(3, 3, CV_64F); 76 K_to(0,0) = cameras[edge.to].focal; 77 K_to(1,1) = cameras[edge.to].focal * cameras[edge.to].aspect; 78 K_to(0,2) = cameras[edge.to].ppx; 79 K_to(1,2) = cameras[edge.to].ppy; 80 81 Mat R = K_from.inv() * pairwise_matches[pair_idx].H.inv() * K_to; 82 cameras[edge.to].R = cameras[edge.from].R * R; 83 } 84 85 int num_images; 86 const MatchesInfo* pairwise_matches; 87 CameraParams* cameras; 88 }; 89 90 91 ////////////////////////////////////////////////////////////////////////////// 92 93 void calcDeriv(const Mat &err1, const Mat &err2, double h, Mat res) 94 { 95 for (int i = 0; i < err1.rows; ++i) 96 res.at<double>(i, 0) = (err2.at<double>(i, 0) - err1.at<double>(i, 0)) / h; 97 } 98 99 } // namespace 100 101 102 namespace cv { 103 namespace detail { 104 105 bool HomographyBasedEstimator::estimate( 106 const std::vector<ImageFeatures> &features, 107 const std::vector<MatchesInfo> &pairwise_matches, 108 std::vector<CameraParams> &cameras) 109 { 110 LOGLN("Estimating rotations..."); 111 #if ENABLE_LOG 112 int64 t = getTickCount(); 113 #endif 114 115 const int num_images = static_cast<int>(features.size()); 116 117 #if 0 118 // Robustly estimate focal length from rotating cameras 119 std::vector<Mat> Hs; 120 for (int iter = 0; iter < 100; ++iter) 121 { 122 int len = 2 + rand()%(pairwise_matches.size() - 1); 123 std::vector<int> subset; 124 selectRandomSubset(len, pairwise_matches.size(), subset); 125 Hs.clear(); 126 for (size_t i = 0; i < subset.size(); ++i) 127 if (!pairwise_matches[subset[i]].H.empty()) 128 Hs.push_back(pairwise_matches[subset[i]].H); 129 Mat_<double> K; 130 if (Hs.size() >= 2) 131 { 132 if (calibrateRotatingCamera(Hs, K)) 133 cin.get(); 134 } 135 } 136 #endif 137 138 if (!is_focals_estimated_) 139 { 140 // Estimate focal length and set it for all cameras 141 std::vector<double> focals; 142 estimateFocal(features, pairwise_matches, focals); 143 cameras.assign(num_images, CameraParams()); 144 for (int i = 0; i < num_images; ++i) 145 cameras[i].focal = focals[i]; 146 } 147 else 148 { 149 for (int i = 0; i < num_images; ++i) 150 { 151 cameras[i].ppx -= 0.5 * features[i].img_size.width; 152 cameras[i].ppy -= 0.5 * features[i].img_size.height; 153 } 154 } 155 156 // Restore global motion 157 Graph span_tree; 158 std::vector<int> span_tree_centers; 159 findMaxSpanningTree(num_images, pairwise_matches, span_tree, span_tree_centers); 160 span_tree.walkBreadthFirst(span_tree_centers[0], CalcRotation(num_images, pairwise_matches, cameras)); 161 162 // As calculations were performed under assumption that p.p. is in image center 163 for (int i = 0; i < num_images; ++i) 164 { 165 cameras[i].ppx += 0.5 * features[i].img_size.width; 166 cameras[i].ppy += 0.5 * features[i].img_size.height; 167 } 168 169 LOGLN("Estimating rotations, time: " << ((getTickCount() - t) / getTickFrequency()) << " sec"); 170 return true; 171 } 172 173 174 ////////////////////////////////////////////////////////////////////////////// 175 176 bool BundleAdjusterBase::estimate(const std::vector<ImageFeatures> &features, 177 const std::vector<MatchesInfo> &pairwise_matches, 178 std::vector<CameraParams> &cameras) 179 { 180 LOG_CHAT("Bundle adjustment"); 181 #if ENABLE_LOG 182 int64 t = getTickCount(); 183 #endif 184 185 num_images_ = static_cast<int>(features.size()); 186 features_ = &features[0]; 187 pairwise_matches_ = &pairwise_matches[0]; 188 189 setUpInitialCameraParams(cameras); 190 191 // Leave only consistent image pairs 192 edges_.clear(); 193 for (int i = 0; i < num_images_ - 1; ++i) 194 { 195 for (int j = i + 1; j < num_images_; ++j) 196 { 197 const MatchesInfo& matches_info = pairwise_matches_[i * num_images_ + j]; 198 if (matches_info.confidence > conf_thresh_) 199 edges_.push_back(std::make_pair(i, j)); 200 } 201 } 202 203 // Compute number of correspondences 204 total_num_matches_ = 0; 205 for (size_t i = 0; i < edges_.size(); ++i) 206 total_num_matches_ += static_cast<int>(pairwise_matches[edges_[i].first * num_images_ + 207 edges_[i].second].num_inliers); 208 209 CvLevMarq solver(num_images_ * num_params_per_cam_, 210 total_num_matches_ * num_errs_per_measurement_, 211 term_criteria_); 212 213 Mat err, jac; 214 CvMat matParams = cam_params_; 215 cvCopy(&matParams, solver.param); 216 217 int iter = 0; 218 for(;;) 219 { 220 const CvMat* _param = 0; 221 CvMat* _jac = 0; 222 CvMat* _err = 0; 223 224 bool proceed = solver.update(_param, _jac, _err); 225 226 cvCopy(_param, &matParams); 227 228 if (!proceed || !_err) 229 break; 230 231 if (_jac) 232 { 233 calcJacobian(jac); 234 CvMat tmp = jac; 235 cvCopy(&tmp, _jac); 236 } 237 238 if (_err) 239 { 240 calcError(err); 241 LOG_CHAT("."); 242 iter++; 243 CvMat tmp = err; 244 cvCopy(&tmp, _err); 245 } 246 } 247 248 LOGLN_CHAT(""); 249 LOGLN_CHAT("Bundle adjustment, final RMS error: " << std::sqrt(err.dot(err) / total_num_matches_)); 250 LOGLN_CHAT("Bundle adjustment, iterations done: " << iter); 251 252 // Check if all camera parameters are valid 253 bool ok = true; 254 for (int i = 0; i < cam_params_.rows; ++i) 255 { 256 if (cvIsNaN(cam_params_.at<double>(i,0))) 257 { 258 ok = false; 259 break; 260 } 261 } 262 if (!ok) 263 return false; 264 265 obtainRefinedCameraParams(cameras); 266 267 // Normalize motion to center image 268 Graph span_tree; 269 std::vector<int> span_tree_centers; 270 findMaxSpanningTree(num_images_, pairwise_matches, span_tree, span_tree_centers); 271 Mat R_inv = cameras[span_tree_centers[0]].R.inv(); 272 for (int i = 0; i < num_images_; ++i) 273 cameras[i].R = R_inv * cameras[i].R; 274 275 LOGLN_CHAT("Bundle adjustment, time: " << ((getTickCount() - t) / getTickFrequency()) << " sec"); 276 return true; 277 } 278 279 280 ////////////////////////////////////////////////////////////////////////////// 281 282 void BundleAdjusterReproj::setUpInitialCameraParams(const std::vector<CameraParams> &cameras) 283 { 284 cam_params_.create(num_images_ * 7, 1, CV_64F); 285 SVD svd; 286 for (int i = 0; i < num_images_; ++i) 287 { 288 cam_params_.at<double>(i * 7, 0) = cameras[i].focal; 289 cam_params_.at<double>(i * 7 + 1, 0) = cameras[i].ppx; 290 cam_params_.at<double>(i * 7 + 2, 0) = cameras[i].ppy; 291 cam_params_.at<double>(i * 7 + 3, 0) = cameras[i].aspect; 292 293 svd(cameras[i].R, SVD::FULL_UV); 294 Mat R = svd.u * svd.vt; 295 if (determinant(R) < 0) 296 R *= -1; 297 298 Mat rvec; 299 Rodrigues(R, rvec); 300 CV_Assert(rvec.type() == CV_32F); 301 cam_params_.at<double>(i * 7 + 4, 0) = rvec.at<float>(0, 0); 302 cam_params_.at<double>(i * 7 + 5, 0) = rvec.at<float>(1, 0); 303 cam_params_.at<double>(i * 7 + 6, 0) = rvec.at<float>(2, 0); 304 } 305 } 306 307 308 void BundleAdjusterReproj::obtainRefinedCameraParams(std::vector<CameraParams> &cameras) const 309 { 310 for (int i = 0; i < num_images_; ++i) 311 { 312 cameras[i].focal = cam_params_.at<double>(i * 7, 0); 313 cameras[i].ppx = cam_params_.at<double>(i * 7 + 1, 0); 314 cameras[i].ppy = cam_params_.at<double>(i * 7 + 2, 0); 315 cameras[i].aspect = cam_params_.at<double>(i * 7 + 3, 0); 316 317 Mat rvec(3, 1, CV_64F); 318 rvec.at<double>(0, 0) = cam_params_.at<double>(i * 7 + 4, 0); 319 rvec.at<double>(1, 0) = cam_params_.at<double>(i * 7 + 5, 0); 320 rvec.at<double>(2, 0) = cam_params_.at<double>(i * 7 + 6, 0); 321 Rodrigues(rvec, cameras[i].R); 322 323 Mat tmp; 324 cameras[i].R.convertTo(tmp, CV_32F); 325 cameras[i].R = tmp; 326 } 327 } 328 329 330 void BundleAdjusterReproj::calcError(Mat &err) 331 { 332 err.create(total_num_matches_ * 2, 1, CV_64F); 333 334 int match_idx = 0; 335 for (size_t edge_idx = 0; edge_idx < edges_.size(); ++edge_idx) 336 { 337 int i = edges_[edge_idx].first; 338 int j = edges_[edge_idx].second; 339 double f1 = cam_params_.at<double>(i * 7, 0); 340 double f2 = cam_params_.at<double>(j * 7, 0); 341 double ppx1 = cam_params_.at<double>(i * 7 + 1, 0); 342 double ppx2 = cam_params_.at<double>(j * 7 + 1, 0); 343 double ppy1 = cam_params_.at<double>(i * 7 + 2, 0); 344 double ppy2 = cam_params_.at<double>(j * 7 + 2, 0); 345 double a1 = cam_params_.at<double>(i * 7 + 3, 0); 346 double a2 = cam_params_.at<double>(j * 7 + 3, 0); 347 348 double R1[9]; 349 Mat R1_(3, 3, CV_64F, R1); 350 Mat rvec(3, 1, CV_64F); 351 rvec.at<double>(0, 0) = cam_params_.at<double>(i * 7 + 4, 0); 352 rvec.at<double>(1, 0) = cam_params_.at<double>(i * 7 + 5, 0); 353 rvec.at<double>(2, 0) = cam_params_.at<double>(i * 7 + 6, 0); 354 Rodrigues(rvec, R1_); 355 356 double R2[9]; 357 Mat R2_(3, 3, CV_64F, R2); 358 rvec.at<double>(0, 0) = cam_params_.at<double>(j * 7 + 4, 0); 359 rvec.at<double>(1, 0) = cam_params_.at<double>(j * 7 + 5, 0); 360 rvec.at<double>(2, 0) = cam_params_.at<double>(j * 7 + 6, 0); 361 Rodrigues(rvec, R2_); 362 363 const ImageFeatures& features1 = features_[i]; 364 const ImageFeatures& features2 = features_[j]; 365 const MatchesInfo& matches_info = pairwise_matches_[i * num_images_ + j]; 366 367 Mat_<double> K1 = Mat::eye(3, 3, CV_64F); 368 K1(0,0) = f1; K1(0,2) = ppx1; 369 K1(1,1) = f1*a1; K1(1,2) = ppy1; 370 371 Mat_<double> K2 = Mat::eye(3, 3, CV_64F); 372 K2(0,0) = f2; K2(0,2) = ppx2; 373 K2(1,1) = f2*a2; K2(1,2) = ppy2; 374 375 Mat_<double> H = K2 * R2_.inv() * R1_ * K1.inv(); 376 377 for (size_t k = 0; k < matches_info.matches.size(); ++k) 378 { 379 if (!matches_info.inliers_mask[k]) 380 continue; 381 382 const DMatch& m = matches_info.matches[k]; 383 Point2f p1 = features1.keypoints[m.queryIdx].pt; 384 Point2f p2 = features2.keypoints[m.trainIdx].pt; 385 double x = H(0,0)*p1.x + H(0,1)*p1.y + H(0,2); 386 double y = H(1,0)*p1.x + H(1,1)*p1.y + H(1,2); 387 double z = H(2,0)*p1.x + H(2,1)*p1.y + H(2,2); 388 389 err.at<double>(2 * match_idx, 0) = p2.x - x/z; 390 err.at<double>(2 * match_idx + 1, 0) = p2.y - y/z; 391 match_idx++; 392 } 393 } 394 } 395 396 397 void BundleAdjusterReproj::calcJacobian(Mat &jac) 398 { 399 jac.create(total_num_matches_ * 2, num_images_ * 7, CV_64F); 400 jac.setTo(0); 401 402 double val; 403 const double step = 1e-4; 404 405 for (int i = 0; i < num_images_; ++i) 406 { 407 if (refinement_mask_.at<uchar>(0, 0)) 408 { 409 val = cam_params_.at<double>(i * 7, 0); 410 cam_params_.at<double>(i * 7, 0) = val - step; 411 calcError(err1_); 412 cam_params_.at<double>(i * 7, 0) = val + step; 413 calcError(err2_); 414 calcDeriv(err1_, err2_, 2 * step, jac.col(i * 7)); 415 cam_params_.at<double>(i * 7, 0) = val; 416 } 417 if (refinement_mask_.at<uchar>(0, 2)) 418 { 419 val = cam_params_.at<double>(i * 7 + 1, 0); 420 cam_params_.at<double>(i * 7 + 1, 0) = val - step; 421 calcError(err1_); 422 cam_params_.at<double>(i * 7 + 1, 0) = val + step; 423 calcError(err2_); 424 calcDeriv(err1_, err2_, 2 * step, jac.col(i * 7 + 1)); 425 cam_params_.at<double>(i * 7 + 1, 0) = val; 426 } 427 if (refinement_mask_.at<uchar>(1, 2)) 428 { 429 val = cam_params_.at<double>(i * 7 + 2, 0); 430 cam_params_.at<double>(i * 7 + 2, 0) = val - step; 431 calcError(err1_); 432 cam_params_.at<double>(i * 7 + 2, 0) = val + step; 433 calcError(err2_); 434 calcDeriv(err1_, err2_, 2 * step, jac.col(i * 7 + 2)); 435 cam_params_.at<double>(i * 7 + 2, 0) = val; 436 } 437 if (refinement_mask_.at<uchar>(1, 1)) 438 { 439 val = cam_params_.at<double>(i * 7 + 3, 0); 440 cam_params_.at<double>(i * 7 + 3, 0) = val - step; 441 calcError(err1_); 442 cam_params_.at<double>(i * 7 + 3, 0) = val + step; 443 calcError(err2_); 444 calcDeriv(err1_, err2_, 2 * step, jac.col(i * 7 + 3)); 445 cam_params_.at<double>(i * 7 + 3, 0) = val; 446 } 447 for (int j = 4; j < 7; ++j) 448 { 449 val = cam_params_.at<double>(i * 7 + j, 0); 450 cam_params_.at<double>(i * 7 + j, 0) = val - step; 451 calcError(err1_); 452 cam_params_.at<double>(i * 7 + j, 0) = val + step; 453 calcError(err2_); 454 calcDeriv(err1_, err2_, 2 * step, jac.col(i * 7 + j)); 455 cam_params_.at<double>(i * 7 + j, 0) = val; 456 } 457 } 458 } 459 460 461 ////////////////////////////////////////////////////////////////////////////// 462 463 void BundleAdjusterRay::setUpInitialCameraParams(const std::vector<CameraParams> &cameras) 464 { 465 cam_params_.create(num_images_ * 4, 1, CV_64F); 466 SVD svd; 467 for (int i = 0; i < num_images_; ++i) 468 { 469 cam_params_.at<double>(i * 4, 0) = cameras[i].focal; 470 471 svd(cameras[i].R, SVD::FULL_UV); 472 Mat R = svd.u * svd.vt; 473 if (determinant(R) < 0) 474 R *= -1; 475 476 Mat rvec; 477 Rodrigues(R, rvec); 478 CV_Assert(rvec.type() == CV_32F); 479 cam_params_.at<double>(i * 4 + 1, 0) = rvec.at<float>(0, 0); 480 cam_params_.at<double>(i * 4 + 2, 0) = rvec.at<float>(1, 0); 481 cam_params_.at<double>(i * 4 + 3, 0) = rvec.at<float>(2, 0); 482 } 483 } 484 485 486 void BundleAdjusterRay::obtainRefinedCameraParams(std::vector<CameraParams> &cameras) const 487 { 488 for (int i = 0; i < num_images_; ++i) 489 { 490 cameras[i].focal = cam_params_.at<double>(i * 4, 0); 491 492 Mat rvec(3, 1, CV_64F); 493 rvec.at<double>(0, 0) = cam_params_.at<double>(i * 4 + 1, 0); 494 rvec.at<double>(1, 0) = cam_params_.at<double>(i * 4 + 2, 0); 495 rvec.at<double>(2, 0) = cam_params_.at<double>(i * 4 + 3, 0); 496 Rodrigues(rvec, cameras[i].R); 497 498 Mat tmp; 499 cameras[i].R.convertTo(tmp, CV_32F); 500 cameras[i].R = tmp; 501 } 502 } 503 504 505 void BundleAdjusterRay::calcError(Mat &err) 506 { 507 err.create(total_num_matches_ * 3, 1, CV_64F); 508 509 int match_idx = 0; 510 for (size_t edge_idx = 0; edge_idx < edges_.size(); ++edge_idx) 511 { 512 int i = edges_[edge_idx].first; 513 int j = edges_[edge_idx].second; 514 double f1 = cam_params_.at<double>(i * 4, 0); 515 double f2 = cam_params_.at<double>(j * 4, 0); 516 517 double R1[9]; 518 Mat R1_(3, 3, CV_64F, R1); 519 Mat rvec(3, 1, CV_64F); 520 rvec.at<double>(0, 0) = cam_params_.at<double>(i * 4 + 1, 0); 521 rvec.at<double>(1, 0) = cam_params_.at<double>(i * 4 + 2, 0); 522 rvec.at<double>(2, 0) = cam_params_.at<double>(i * 4 + 3, 0); 523 Rodrigues(rvec, R1_); 524 525 double R2[9]; 526 Mat R2_(3, 3, CV_64F, R2); 527 rvec.at<double>(0, 0) = cam_params_.at<double>(j * 4 + 1, 0); 528 rvec.at<double>(1, 0) = cam_params_.at<double>(j * 4 + 2, 0); 529 rvec.at<double>(2, 0) = cam_params_.at<double>(j * 4 + 3, 0); 530 Rodrigues(rvec, R2_); 531 532 const ImageFeatures& features1 = features_[i]; 533 const ImageFeatures& features2 = features_[j]; 534 const MatchesInfo& matches_info = pairwise_matches_[i * num_images_ + j]; 535 536 Mat_<double> K1 = Mat::eye(3, 3, CV_64F); 537 K1(0,0) = f1; K1(0,2) = features1.img_size.width * 0.5; 538 K1(1,1) = f1; K1(1,2) = features1.img_size.height * 0.5; 539 540 Mat_<double> K2 = Mat::eye(3, 3, CV_64F); 541 K2(0,0) = f2; K2(0,2) = features2.img_size.width * 0.5; 542 K2(1,1) = f2; K2(1,2) = features2.img_size.height * 0.5; 543 544 Mat_<double> H1 = R1_ * K1.inv(); 545 Mat_<double> H2 = R2_ * K2.inv(); 546 547 for (size_t k = 0; k < matches_info.matches.size(); ++k) 548 { 549 if (!matches_info.inliers_mask[k]) 550 continue; 551 552 const DMatch& m = matches_info.matches[k]; 553 554 Point2f p1 = features1.keypoints[m.queryIdx].pt; 555 double x1 = H1(0,0)*p1.x + H1(0,1)*p1.y + H1(0,2); 556 double y1 = H1(1,0)*p1.x + H1(1,1)*p1.y + H1(1,2); 557 double z1 = H1(2,0)*p1.x + H1(2,1)*p1.y + H1(2,2); 558 double len = std::sqrt(x1*x1 + y1*y1 + z1*z1); 559 x1 /= len; y1 /= len; z1 /= len; 560 561 Point2f p2 = features2.keypoints[m.trainIdx].pt; 562 double x2 = H2(0,0)*p2.x + H2(0,1)*p2.y + H2(0,2); 563 double y2 = H2(1,0)*p2.x + H2(1,1)*p2.y + H2(1,2); 564 double z2 = H2(2,0)*p2.x + H2(2,1)*p2.y + H2(2,2); 565 len = std::sqrt(x2*x2 + y2*y2 + z2*z2); 566 x2 /= len; y2 /= len; z2 /= len; 567 568 double mult = std::sqrt(f1 * f2); 569 err.at<double>(3 * match_idx, 0) = mult * (x1 - x2); 570 err.at<double>(3 * match_idx + 1, 0) = mult * (y1 - y2); 571 err.at<double>(3 * match_idx + 2, 0) = mult * (z1 - z2); 572 573 match_idx++; 574 } 575 } 576 } 577 578 579 void BundleAdjusterRay::calcJacobian(Mat &jac) 580 { 581 jac.create(total_num_matches_ * 3, num_images_ * 4, CV_64F); 582 583 double val; 584 const double step = 1e-3; 585 586 for (int i = 0; i < num_images_; ++i) 587 { 588 for (int j = 0; j < 4; ++j) 589 { 590 val = cam_params_.at<double>(i * 4 + j, 0); 591 cam_params_.at<double>(i * 4 + j, 0) = val - step; 592 calcError(err1_); 593 cam_params_.at<double>(i * 4 + j, 0) = val + step; 594 calcError(err2_); 595 calcDeriv(err1_, err2_, 2 * step, jac.col(i * 4 + j)); 596 cam_params_.at<double>(i * 4 + j, 0) = val; 597 } 598 } 599 } 600 601 602 ////////////////////////////////////////////////////////////////////////////// 603 604 void waveCorrect(std::vector<Mat> &rmats, WaveCorrectKind kind) 605 { 606 LOGLN("Wave correcting..."); 607 #if ENABLE_LOG 608 int64 t = getTickCount(); 609 #endif 610 if (rmats.size() <= 1) 611 { 612 LOGLN("Wave correcting, time: " << ((getTickCount() - t) / getTickFrequency()) << " sec"); 613 return; 614 } 615 616 Mat moment = Mat::zeros(3, 3, CV_32F); 617 for (size_t i = 0; i < rmats.size(); ++i) 618 { 619 Mat col = rmats[i].col(0); 620 moment += col * col.t(); 621 } 622 Mat eigen_vals, eigen_vecs; 623 eigen(moment, eigen_vals, eigen_vecs); 624 625 Mat rg1; 626 if (kind == WAVE_CORRECT_HORIZ) 627 rg1 = eigen_vecs.row(2).t(); 628 else if (kind == WAVE_CORRECT_VERT) 629 rg1 = eigen_vecs.row(0).t(); 630 else 631 CV_Error(CV_StsBadArg, "unsupported kind of wave correction"); 632 633 Mat img_k = Mat::zeros(3, 1, CV_32F); 634 for (size_t i = 0; i < rmats.size(); ++i) 635 img_k += rmats[i].col(2); 636 Mat rg0 = rg1.cross(img_k); 637 double rg0_norm = norm(rg0); 638 639 if( rg0_norm <= DBL_MIN ) 640 { 641 return; 642 } 643 644 rg0 /= rg0_norm; 645 646 Mat rg2 = rg0.cross(rg1); 647 648 double conf = 0; 649 if (kind == WAVE_CORRECT_HORIZ) 650 { 651 for (size_t i = 0; i < rmats.size(); ++i) 652 conf += rg0.dot(rmats[i].col(0)); 653 if (conf < 0) 654 { 655 rg0 *= -1; 656 rg1 *= -1; 657 } 658 } 659 else if (kind == WAVE_CORRECT_VERT) 660 { 661 for (size_t i = 0; i < rmats.size(); ++i) 662 conf -= rg1.dot(rmats[i].col(0)); 663 if (conf < 0) 664 { 665 rg0 *= -1; 666 rg1 *= -1; 667 } 668 } 669 670 Mat R = Mat::zeros(3, 3, CV_32F); 671 Mat tmp = R.row(0); 672 Mat(rg0.t()).copyTo(tmp); 673 tmp = R.row(1); 674 Mat(rg1.t()).copyTo(tmp); 675 tmp = R.row(2); 676 Mat(rg2.t()).copyTo(tmp); 677 678 for (size_t i = 0; i < rmats.size(); ++i) 679 rmats[i] = R * rmats[i]; 680 681 LOGLN("Wave correcting, time: " << ((getTickCount() - t) / getTickFrequency()) << " sec"); 682 } 683 684 685 ////////////////////////////////////////////////////////////////////////////// 686 687 String matchesGraphAsString(std::vector<String> &pathes, std::vector<MatchesInfo> &pairwise_matches, 688 float conf_threshold) 689 { 690 std::stringstream str; 691 str << "graph matches_graph{\n"; 692 693 const int num_images = static_cast<int>(pathes.size()); 694 std::set<std::pair<int,int> > span_tree_edges; 695 DisjointSets comps(num_images); 696 697 for (int i = 0; i < num_images; ++i) 698 { 699 for (int j = 0; j < num_images; ++j) 700 { 701 if (pairwise_matches[i*num_images + j].confidence < conf_threshold) 702 continue; 703 int comp1 = comps.findSetByElem(i); 704 int comp2 = comps.findSetByElem(j); 705 if (comp1 != comp2) 706 { 707 comps.mergeSets(comp1, comp2); 708 span_tree_edges.insert(std::make_pair(i, j)); 709 } 710 } 711 } 712 713 for (std::set<std::pair<int,int> >::const_iterator itr = span_tree_edges.begin(); 714 itr != span_tree_edges.end(); ++itr) 715 { 716 std::pair<int,int> edge = *itr; 717 if (span_tree_edges.find(edge) != span_tree_edges.end()) 718 { 719 String name_src = pathes[edge.first]; 720 size_t prefix_len = name_src.find_last_of("/\\"); 721 if (prefix_len != String::npos) prefix_len++; else prefix_len = 0; 722 name_src = name_src.substr(prefix_len, name_src.size() - prefix_len); 723 724 String name_dst = pathes[edge.second]; 725 prefix_len = name_dst.find_last_of("/\\"); 726 if (prefix_len != String::npos) prefix_len++; else prefix_len = 0; 727 name_dst = name_dst.substr(prefix_len, name_dst.size() - prefix_len); 728 729 int pos = edge.first*num_images + edge.second; 730 str << "\"" << name_src.c_str() << "\" -- \"" << name_dst.c_str() << "\"" 731 << "[label=\"Nm=" << pairwise_matches[pos].matches.size() 732 << ", Ni=" << pairwise_matches[pos].num_inliers 733 << ", C=" << pairwise_matches[pos].confidence << "\"];\n"; 734 } 735 } 736 737 for (size_t i = 0; i < comps.size.size(); ++i) 738 { 739 if (comps.size[comps.findSetByElem((int)i)] == 1) 740 { 741 String name = pathes[i]; 742 size_t prefix_len = name.find_last_of("/\\"); 743 if (prefix_len != String::npos) prefix_len++; else prefix_len = 0; 744 name = name.substr(prefix_len, name.size() - prefix_len); 745 str << "\"" << name.c_str() << "\";\n"; 746 } 747 } 748 749 str << "}"; 750 return str.str().c_str(); 751 } 752 753 std::vector<int> leaveBiggestComponent(std::vector<ImageFeatures> &features, std::vector<MatchesInfo> &pairwise_matches, 754 float conf_threshold) 755 { 756 const int num_images = static_cast<int>(features.size()); 757 758 DisjointSets comps(num_images); 759 for (int i = 0; i < num_images; ++i) 760 { 761 for (int j = 0; j < num_images; ++j) 762 { 763 if (pairwise_matches[i*num_images + j].confidence < conf_threshold) 764 continue; 765 int comp1 = comps.findSetByElem(i); 766 int comp2 = comps.findSetByElem(j); 767 if (comp1 != comp2) 768 comps.mergeSets(comp1, comp2); 769 } 770 } 771 772 int max_comp = static_cast<int>(std::max_element(comps.size.begin(), comps.size.end()) - comps.size.begin()); 773 774 std::vector<int> indices; 775 std::vector<int> indices_removed; 776 for (int i = 0; i < num_images; ++i) 777 if (comps.findSetByElem(i) == max_comp) 778 indices.push_back(i); 779 else 780 indices_removed.push_back(i); 781 782 std::vector<ImageFeatures> features_subset; 783 std::vector<MatchesInfo> pairwise_matches_subset; 784 for (size_t i = 0; i < indices.size(); ++i) 785 { 786 features_subset.push_back(features[indices[i]]); 787 for (size_t j = 0; j < indices.size(); ++j) 788 { 789 pairwise_matches_subset.push_back(pairwise_matches[indices[i]*num_images + indices[j]]); 790 pairwise_matches_subset.back().src_img_idx = static_cast<int>(i); 791 pairwise_matches_subset.back().dst_img_idx = static_cast<int>(j); 792 } 793 } 794 795 if (static_cast<int>(features_subset.size()) == num_images) 796 return indices; 797 798 LOG("Removed some images, because can't match them or there are too similar images: ("); 799 LOG(indices_removed[0] + 1); 800 for (size_t i = 1; i < indices_removed.size(); ++i) 801 LOG(", " << indices_removed[i]+1); 802 LOGLN(")."); 803 LOGLN("Try to decrease the match confidence threshold and/or check if you're stitching duplicates."); 804 805 features = features_subset; 806 pairwise_matches = pairwise_matches_subset; 807 808 return indices; 809 } 810 811 812 void findMaxSpanningTree(int num_images, const std::vector<MatchesInfo> &pairwise_matches, 813 Graph &span_tree, std::vector<int> ¢ers) 814 { 815 Graph graph(num_images); 816 std::vector<GraphEdge> edges; 817 818 // Construct images graph and remember its edges 819 for (int i = 0; i < num_images; ++i) 820 { 821 for (int j = 0; j < num_images; ++j) 822 { 823 if (pairwise_matches[i * num_images + j].H.empty()) 824 continue; 825 float conf = static_cast<float>(pairwise_matches[i * num_images + j].num_inliers); 826 graph.addEdge(i, j, conf); 827 edges.push_back(GraphEdge(i, j, conf)); 828 } 829 } 830 831 DisjointSets comps(num_images); 832 span_tree.create(num_images); 833 std::vector<int> span_tree_powers(num_images, 0); 834 835 // Find maximum spanning tree 836 sort(edges.begin(), edges.end(), std::greater<GraphEdge>()); 837 for (size_t i = 0; i < edges.size(); ++i) 838 { 839 int comp1 = comps.findSetByElem(edges[i].from); 840 int comp2 = comps.findSetByElem(edges[i].to); 841 if (comp1 != comp2) 842 { 843 comps.mergeSets(comp1, comp2); 844 span_tree.addEdge(edges[i].from, edges[i].to, edges[i].weight); 845 span_tree.addEdge(edges[i].to, edges[i].from, edges[i].weight); 846 span_tree_powers[edges[i].from]++; 847 span_tree_powers[edges[i].to]++; 848 } 849 } 850 851 // Find spanning tree leafs 852 std::vector<int> span_tree_leafs; 853 for (int i = 0; i < num_images; ++i) 854 if (span_tree_powers[i] == 1) 855 span_tree_leafs.push_back(i); 856 857 // Find maximum distance from each spanning tree vertex 858 std::vector<int> max_dists(num_images, 0); 859 std::vector<int> cur_dists; 860 for (size_t i = 0; i < span_tree_leafs.size(); ++i) 861 { 862 cur_dists.assign(num_images, 0); 863 span_tree.walkBreadthFirst(span_tree_leafs[i], IncDistance(cur_dists)); 864 for (int j = 0; j < num_images; ++j) 865 max_dists[j] = std::max(max_dists[j], cur_dists[j]); 866 } 867 868 // Find min-max distance 869 int min_max_dist = max_dists[0]; 870 for (int i = 1; i < num_images; ++i) 871 if (min_max_dist > max_dists[i]) 872 min_max_dist = max_dists[i]; 873 874 // Find spanning tree centers 875 centers.clear(); 876 for (int i = 0; i < num_images; ++i) 877 if (max_dists[i] == min_max_dist) 878 centers.push_back(i); 879 CV_Assert(centers.size() > 0 && centers.size() <= 2); 880 } 881 882 } // namespace detail 883 } // namespace cv 884