1 /*M/////////////////////////////////////////////////////////////////////////////////////// 2 // 3 // IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING. 4 // 5 // By downloading, copying, installing or using the software you agree to this license. 6 // If you do not agree to this license, do not download, install, 7 // copy or use the software. 8 // 9 // 10 // License Agreement 11 // For Open Source Computer Vision Library 12 // 13 // Copyright (C) 2000-2008, Intel Corporation, all rights reserved. 14 // Copyright (C) 2009, Willow Garage Inc., all rights reserved. 15 // Copyright (C) 2013, OpenCV Foundation, all rights reserved. 16 // Third party copyrights are property of their respective owners. 17 // 18 // Redistribution and use in source and binary forms, with or without modification, 19 // are permitted provided that the following conditions are met: 20 // 21 // * Redistribution's of source code must retain the above copyright notice, 22 // this list of conditions and the following disclaimer. 23 // 24 // * Redistribution's in binary form must reproduce the above copyright notice, 25 // this list of conditions and the following disclaimer in the documentation 26 // and/or other materials provided with the distribution. 27 // 28 // * The name of the copyright holders may not be used to endorse or promote products 29 // derived from this software without specific prior written permission. 30 // 31 // This software is provided by the copyright holders and contributors "as is" and 32 // any express or implied warranties, including, but not limited to, the implied 33 // warranties of merchantability and fitness for a particular purpose are disclaimed. 34 // In no event shall the Intel Corporation or contributors be liable for any direct, 35 // indirect, incidental, special, exemplary, or consequential damages 36 // (including, but not limited to, procurement of substitute goods or services; 37 // loss of use, data, or profits; or business interruption) however caused 38 // and on any theory of liability, whether in contract, strict liability, 39 // or tort (including negligence or otherwise) arising in any way out of 40 // the use of this software, even if advised of the possibility of such damage. 41 // 42 //M*/ 43 44 #include "precomp.hpp" 45 46 #if (defined(__cplusplus) && __cplusplus > 199711L) || (defined(_MSC_VER) && _MSC_VER >= 1700) 47 #define USE_STD_THREADS 48 #endif 49 50 #if defined(__linux__) || defined(LINUX) || defined(__APPLE__) || defined(ANDROID) || defined(USE_STD_THREADS) 51 52 #include "opencv2/core/utility.hpp" 53 54 #ifdef USE_STD_THREADS 55 #include <thread> 56 #include <mutex> 57 #include <condition_variable> 58 #else 59 #include <pthread.h> 60 #endif 61 62 #if defined(DEBUG) || defined(_DEBUG) 63 #undef DEBUGLOGS 64 #define DEBUGLOGS 1 65 #endif 66 67 #ifndef DEBUGLOGS 68 #define DEBUGLOGS 0 69 #endif 70 71 #ifdef ANDROID 72 #include <android/log.h> 73 #define LOG_TAG "OBJECT_DETECTOR" 74 #define LOGD0(...) ((void)__android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS__)) 75 #define LOGI0(...) ((void)__android_log_print(ANDROID_LOG_INFO, LOG_TAG, __VA_ARGS__)) 76 #define LOGW0(...) ((void)__android_log_print(ANDROID_LOG_WARN, LOG_TAG, __VA_ARGS__)) 77 #define LOGE0(...) ((void)__android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__)) 78 #else 79 80 #include <stdio.h> 81 82 #define LOGD0(_str, ...) (printf(_str , ## __VA_ARGS__), printf("\n"), fflush(stdout)) 83 #define LOGI0(_str, ...) (printf(_str , ## __VA_ARGS__), printf("\n"), fflush(stdout)) 84 #define LOGW0(_str, ...) (printf(_str , ## __VA_ARGS__), printf("\n"), fflush(stdout)) 85 #define LOGE0(_str, ...) (printf(_str , ## __VA_ARGS__), printf("\n"), fflush(stdout)) 86 #endif 87 88 #if DEBUGLOGS 89 #define LOGD(_str, ...) LOGD0(_str , ## __VA_ARGS__) 90 #define LOGI(_str, ...) LOGI0(_str , ## __VA_ARGS__) 91 #define LOGW(_str, ...) LOGW0(_str , ## __VA_ARGS__) 92 #define LOGE(_str, ...) LOGE0(_str , ## __VA_ARGS__) 93 #else 94 #define LOGD(...) 95 #define LOGI(...) 96 #define LOGW(...) 97 #define LOGE(...) 98 #endif 99 100 101 using namespace cv; 102 103 static inline cv::Point2f centerRect(const cv::Rect& r) 104 { 105 return cv::Point2f(r.x+((float)r.width)/2, r.y+((float)r.height)/2); 106 } 107 108 static inline cv::Rect scale_rect(const cv::Rect& r, float scale) 109 { 110 cv::Point2f m=centerRect(r); 111 float width = r.width * scale; 112 float height = r.height * scale; 113 int x=cvRound(m.x - width/2); 114 int y=cvRound(m.y - height/2); 115 116 return cv::Rect(x, y, cvRound(width), cvRound(height)); 117 } 118 119 namespace cv 120 { 121 void* workcycleObjectDetectorFunction(void* p); 122 } 123 124 class cv::DetectionBasedTracker::SeparateDetectionWork 125 { 126 public: 127 SeparateDetectionWork(cv::DetectionBasedTracker& _detectionBasedTracker, cv::Ptr<DetectionBasedTracker::IDetector> _detector); 128 virtual ~SeparateDetectionWork(); 129 bool communicateWithDetectingThread(const Mat& imageGray, std::vector<Rect>& rectsWhereRegions); 130 bool run(); 131 void stop(); 132 void resetTracking(); 133 134 inline bool isWorking() 135 { 136 return (stateThread==STATE_THREAD_WORKING_SLEEPING) || (stateThread==STATE_THREAD_WORKING_WITH_IMAGE); 137 } 138 inline void lock() 139 { 140 #ifdef USE_STD_THREADS 141 mtx_lock.lock(); 142 #else 143 pthread_mutex_lock(&mutex); 144 #endif 145 } 146 inline void unlock() 147 { 148 #ifdef USE_STD_THREADS 149 mtx_lock.unlock(); 150 #else 151 pthread_mutex_unlock(&mutex); 152 #endif 153 } 154 155 protected: 156 157 DetectionBasedTracker& detectionBasedTracker; 158 cv::Ptr<DetectionBasedTracker::IDetector> cascadeInThread; 159 #ifdef USE_STD_THREADS 160 std::thread second_workthread; 161 std::mutex mtx; 162 std::unique_lock<std::mutex> mtx_lock; 163 std::condition_variable objectDetectorRun; 164 std::condition_variable objectDetectorThreadStartStop; 165 #else 166 pthread_t second_workthread; 167 pthread_mutex_t mutex; 168 pthread_cond_t objectDetectorRun; 169 pthread_cond_t objectDetectorThreadStartStop; 170 #endif 171 std::vector<cv::Rect> resultDetect; 172 volatile bool isObjectDetectingReady; 173 volatile bool shouldObjectDetectingResultsBeForgot; 174 175 enum StateSeparatedThread { 176 STATE_THREAD_STOPPED=0, 177 STATE_THREAD_WORKING_SLEEPING, 178 STATE_THREAD_WORKING_WITH_IMAGE, 179 STATE_THREAD_WORKING, 180 STATE_THREAD_STOPPING 181 }; 182 volatile StateSeparatedThread stateThread; 183 184 cv::Mat imageSeparateDetecting; 185 186 void workcycleObjectDetector(); 187 friend void* workcycleObjectDetectorFunction(void* p); 188 189 long long timeWhenDetectingThreadStartedWork; 190 }; 191 192 cv::DetectionBasedTracker::SeparateDetectionWork::SeparateDetectionWork(DetectionBasedTracker& _detectionBasedTracker, cv::Ptr<DetectionBasedTracker::IDetector> _detector) 193 :detectionBasedTracker(_detectionBasedTracker), 194 cascadeInThread(), 195 isObjectDetectingReady(false), 196 shouldObjectDetectingResultsBeForgot(false), 197 stateThread(STATE_THREAD_STOPPED), 198 timeWhenDetectingThreadStartedWork(-1) 199 { 200 CV_Assert(_detector); 201 202 cascadeInThread = _detector; 203 #ifdef USE_STD_THREADS 204 mtx_lock = std::unique_lock<std::mutex>(mtx); 205 mtx_lock.unlock(); 206 #else 207 int res=0; 208 res=pthread_mutex_init(&mutex, NULL);//TODO: should be attributes? 209 if (res) { 210 LOGE("ERROR in DetectionBasedTracker::SeparateDetectionWork::SeparateDetectionWork in pthread_mutex_init(&mutex, NULL) is %d", res); 211 throw(std::exception()); 212 } 213 res=pthread_cond_init (&objectDetectorRun, NULL); 214 if (res) { 215 LOGE("ERROR in DetectionBasedTracker::SeparateDetectionWork::SeparateDetectionWork in pthread_cond_init(&objectDetectorRun,, NULL) is %d", res); 216 pthread_mutex_destroy(&mutex); 217 throw(std::exception()); 218 } 219 res=pthread_cond_init (&objectDetectorThreadStartStop, NULL); 220 if (res) { 221 LOGE("ERROR in DetectionBasedTracker::SeparateDetectionWork::SeparateDetectionWork in pthread_cond_init(&objectDetectorThreadStartStop,, NULL) is %d", res); 222 pthread_cond_destroy(&objectDetectorRun); 223 pthread_mutex_destroy(&mutex); 224 throw(std::exception()); 225 } 226 #endif 227 } 228 229 cv::DetectionBasedTracker::SeparateDetectionWork::~SeparateDetectionWork() 230 { 231 if(stateThread!=STATE_THREAD_STOPPED) { 232 LOGE("\n\n\nATTENTION!!! dangerous algorithm error: destructor DetectionBasedTracker::DetectionBasedTracker::~SeparateDetectionWork is called before stopping the workthread"); 233 } 234 #ifndef USE_STD_THREADS 235 pthread_cond_destroy(&objectDetectorThreadStartStop); 236 pthread_cond_destroy(&objectDetectorRun); 237 pthread_mutex_destroy(&mutex); 238 #endif 239 } 240 bool cv::DetectionBasedTracker::SeparateDetectionWork::run() 241 { 242 LOGD("DetectionBasedTracker::SeparateDetectionWork::run() --- start"); 243 #ifdef USE_STD_THREADS 244 mtx_lock.lock(); 245 #else 246 pthread_mutex_lock(&mutex); 247 #endif 248 if (stateThread != STATE_THREAD_STOPPED) { 249 LOGE("DetectionBasedTracker::SeparateDetectionWork::run is called while the previous run is not stopped"); 250 #ifdef USE_STD_THREADS 251 mtx_lock.unlock(); 252 #else 253 pthread_mutex_unlock(&mutex); 254 #endif 255 return false; 256 } 257 stateThread=STATE_THREAD_WORKING_SLEEPING; 258 #ifdef USE_STD_THREADS 259 second_workthread = std::thread(workcycleObjectDetectorFunction, (void*)this); //TODO: add attributes? 260 objectDetectorThreadStartStop.wait(mtx_lock); 261 mtx_lock.unlock(); 262 #else 263 pthread_create(&second_workthread, NULL, workcycleObjectDetectorFunction, (void*)this); //TODO: add attributes? 264 pthread_cond_wait(&objectDetectorThreadStartStop, &mutex); 265 pthread_mutex_unlock(&mutex); 266 #endif 267 LOGD("DetectionBasedTracker::SeparateDetectionWork::run --- end"); 268 return true; 269 } 270 271 #define CATCH_ALL_AND_LOG(_block) \ 272 try { \ 273 _block; \ 274 } \ 275 catch(cv::Exception& e) { \ 276 LOGE0("\n %s: ERROR: OpenCV Exception caught: \n'%s'\n\n", CV_Func, e.what()); \ 277 } catch(std::exception& e) { \ 278 LOGE0("\n %s: ERROR: Exception caught: \n'%s'\n\n", CV_Func, e.what()); \ 279 } catch(...) { \ 280 LOGE0("\n %s: ERROR: UNKNOWN Exception caught\n\n", CV_Func); \ 281 } 282 283 void* cv::workcycleObjectDetectorFunction(void* p) 284 { 285 CATCH_ALL_AND_LOG({ ((cv::DetectionBasedTracker::SeparateDetectionWork*)p)->workcycleObjectDetector(); }); 286 try{ 287 ((cv::DetectionBasedTracker::SeparateDetectionWork*)p)->lock(); 288 ((cv::DetectionBasedTracker::SeparateDetectionWork*)p)->stateThread = cv::DetectionBasedTracker::SeparateDetectionWork::STATE_THREAD_STOPPED; 289 ((cv::DetectionBasedTracker::SeparateDetectionWork*)p)->isObjectDetectingReady=false; 290 ((cv::DetectionBasedTracker::SeparateDetectionWork*)p)->shouldObjectDetectingResultsBeForgot=false; 291 #ifdef USE_STD_THREADS 292 ((cv::DetectionBasedTracker::SeparateDetectionWork*)p)->objectDetectorThreadStartStop.notify_one(); 293 #else 294 pthread_cond_signal(&(((cv::DetectionBasedTracker::SeparateDetectionWork*)p)->objectDetectorThreadStartStop)); 295 #endif 296 ((cv::DetectionBasedTracker::SeparateDetectionWork*)p)->unlock(); 297 } catch(...) { 298 LOGE0("DetectionBasedTracker: workcycleObjectDetectorFunction: ERROR concerning pointer, received as the function parameter"); 299 } 300 return NULL; 301 } 302 303 void cv::DetectionBasedTracker::SeparateDetectionWork::workcycleObjectDetector() 304 { 305 static double freq = getTickFrequency(); 306 LOGD("DetectionBasedTracker::SeparateDetectionWork::workcycleObjectDetector() --- start"); 307 std::vector<Rect> objects; 308 309 CV_Assert(stateThread==STATE_THREAD_WORKING_SLEEPING); 310 #ifdef USE_STD_THREADS 311 mtx_lock.lock(); 312 #else 313 pthread_mutex_lock(&mutex); 314 #endif 315 { 316 #ifdef USE_STD_THREADS 317 objectDetectorThreadStartStop.notify_one(); 318 #else 319 pthread_cond_signal(&objectDetectorThreadStartStop); 320 #endif 321 LOGD("DetectionBasedTracker::SeparateDetectionWork::workcycleObjectDetector() --- before waiting"); 322 CV_Assert(stateThread==STATE_THREAD_WORKING_SLEEPING); 323 #ifdef USE_STD_THREADS 324 objectDetectorRun.wait(mtx_lock); 325 #else 326 pthread_cond_wait(&objectDetectorRun, &mutex); 327 #endif 328 if (isWorking()) { 329 stateThread=STATE_THREAD_WORKING_WITH_IMAGE; 330 } 331 LOGD("DetectionBasedTracker::SeparateDetectionWork::workcycleObjectDetector() --- after waiting"); 332 } 333 #ifdef USE_STD_THREADS 334 mtx_lock.unlock(); 335 #else 336 pthread_mutex_unlock(&mutex); 337 #endif 338 339 bool isFirstStep=true; 340 341 isObjectDetectingReady=false; 342 343 while(isWorking()) 344 { 345 LOGD("DetectionBasedTracker::SeparateDetectionWork::workcycleObjectDetector() --- next step"); 346 347 if (! isFirstStep) { 348 LOGD("DetectionBasedTracker::SeparateDetectionWork::workcycleObjectDetector() --- before waiting"); 349 CV_Assert(stateThread==STATE_THREAD_WORKING_SLEEPING); 350 #ifdef USE_STD_THREADS 351 mtx_lock.lock(); 352 #else 353 pthread_mutex_lock(&mutex); 354 #endif 355 if (!isWorking()) {//it is a rare case, but may cause a crash 356 LOGD("DetectionBasedTracker::SeparateDetectionWork::workcycleObjectDetector() --- go out from the workcycle from inner part of lock just before waiting"); 357 #ifdef USE_STD_THREADS 358 mtx_lock.unlock(); 359 #else 360 pthread_mutex_unlock(&mutex); 361 #endif 362 break; 363 } 364 CV_Assert(stateThread==STATE_THREAD_WORKING_SLEEPING); 365 #ifdef USE_STD_THREADS 366 objectDetectorRun.wait(mtx_lock); 367 #else 368 pthread_cond_wait(&objectDetectorRun, &mutex); 369 #endif 370 if (isWorking()) { 371 stateThread=STATE_THREAD_WORKING_WITH_IMAGE; 372 } 373 #ifdef USE_STD_THREADS 374 mtx_lock.unlock(); 375 #else 376 pthread_mutex_unlock(&mutex); 377 #endif 378 379 LOGD("DetectionBasedTracker::SeparateDetectionWork::workcycleObjectDetector() --- after waiting"); 380 } else { 381 isFirstStep=false; 382 } 383 384 if (!isWorking()) { 385 LOGD("DetectionBasedTracker::SeparateDetectionWork::workcycleObjectDetector() --- go out from the workcycle just after waiting"); 386 break; 387 } 388 389 390 if (imageSeparateDetecting.empty()) { 391 LOGD("DetectionBasedTracker::SeparateDetectionWork::workcycleObjectDetector() --- imageSeparateDetecting is empty, continue"); 392 continue; 393 } 394 LOGD("DetectionBasedTracker::SeparateDetectionWork::workcycleObjectDetector() --- start handling imageSeparateDetecting, img.size=%dx%d, img.data=0x%p", 395 imageSeparateDetecting.size().width, imageSeparateDetecting.size().height, (void*)imageSeparateDetecting.data); 396 397 398 int64 t1_detect=getTickCount(); 399 400 cascadeInThread->detect(imageSeparateDetecting, objects); 401 402 /*cascadeInThread.detectMultiScale( imageSeparateDetecting, objects, 403 detectionBasedTracker.parameters.scaleFactor, detectionBasedTracker.parameters.minNeighbors, 0 404 |CV_HAAR_SCALE_IMAGE 405 , 406 min_objectSize, 407 max_objectSize 408 ); 409 */ 410 411 LOGD("DetectionBasedTracker::SeparateDetectionWork::workcycleObjectDetector() --- end handling imageSeparateDetecting"); 412 413 if (!isWorking()) { 414 LOGD("DetectionBasedTracker::SeparateDetectionWork::workcycleObjectDetector() --- go out from the workcycle just after detecting"); 415 break; 416 } 417 418 int64 t2_detect = getTickCount(); 419 int64 dt_detect = t2_detect-t1_detect; 420 double dt_detect_ms=((double)dt_detect)/freq * 1000.0; 421 (void)(dt_detect_ms); 422 423 LOGI("DetectionBasedTracker::SeparateDetectionWork::workcycleObjectDetector() --- objects num==%d, t_ms=%.4f", (int)objects.size(), dt_detect_ms); 424 #ifdef USE_STD_THREADS 425 mtx_lock.lock(); 426 #else 427 pthread_mutex_lock(&mutex); 428 #endif 429 if (!shouldObjectDetectingResultsBeForgot) { 430 resultDetect=objects; 431 isObjectDetectingReady=true; 432 } else { //shouldObjectDetectingResultsBeForgot==true 433 resultDetect.clear(); 434 isObjectDetectingReady=false; 435 shouldObjectDetectingResultsBeForgot=false; 436 } 437 if(isWorking()) { 438 stateThread=STATE_THREAD_WORKING_SLEEPING; 439 } 440 #ifdef USE_STD_THREADS 441 mtx_lock.unlock(); 442 #else 443 pthread_mutex_unlock(&mutex); 444 #endif 445 446 objects.clear(); 447 }// while(isWorking()) 448 449 LOGI("DetectionBasedTracker::SeparateDetectionWork::workcycleObjectDetector: Returning"); 450 } 451 452 void cv::DetectionBasedTracker::SeparateDetectionWork::stop() 453 { 454 //FIXME: TODO: should add quickStop functionality 455 #ifdef USE_STD_THREADS 456 mtx_lock.lock(); 457 #else 458 pthread_mutex_lock(&mutex); 459 #endif 460 if (!isWorking()) { 461 #ifdef USE_STD_THREADS 462 mtx_lock.unlock(); 463 #else 464 pthread_mutex_unlock(&mutex); 465 #endif 466 LOGE("SimpleHighguiDemoCore::stop is called but the SimpleHighguiDemoCore pthread is not active"); 467 return; 468 } 469 stateThread=STATE_THREAD_STOPPING; 470 LOGD("DetectionBasedTracker::SeparateDetectionWork::stop: before going to sleep to wait for the signal from the workthread"); 471 #ifdef USE_STD_THREADS 472 objectDetectorRun.notify_one(); 473 objectDetectorThreadStartStop.wait(mtx_lock); 474 LOGD("DetectionBasedTracker::SeparateDetectionWork::stop: after receiving the signal from the workthread, stateThread=%d", (int)stateThread); 475 mtx_lock.unlock(); 476 #else 477 pthread_cond_signal(&objectDetectorRun); 478 pthread_cond_wait(&objectDetectorThreadStartStop, &mutex); 479 LOGD("DetectionBasedTracker::SeparateDetectionWork::stop: after receiving the signal from the workthread, stateThread=%d", (int)stateThread); 480 pthread_mutex_unlock(&mutex); 481 #endif 482 } 483 484 void cv::DetectionBasedTracker::SeparateDetectionWork::resetTracking() 485 { 486 LOGD("DetectionBasedTracker::SeparateDetectionWork::resetTracking"); 487 #ifdef USE_STD_THREADS 488 mtx_lock.lock(); 489 #else 490 pthread_mutex_lock(&mutex); 491 #endif 492 493 if (stateThread == STATE_THREAD_WORKING_WITH_IMAGE) { 494 LOGD("DetectionBasedTracker::SeparateDetectionWork::resetTracking: since workthread is detecting objects at the moment, we should make cascadeInThread stop detecting and forget the detecting results"); 495 shouldObjectDetectingResultsBeForgot=true; 496 //cascadeInThread.setStopFlag();//FIXME: TODO: this feature also should be contributed to OpenCV 497 } else { 498 LOGD("DetectionBasedTracker::SeparateDetectionWork::resetTracking: since workthread is NOT detecting objects at the moment, we should NOT make any additional actions"); 499 } 500 501 resultDetect.clear(); 502 isObjectDetectingReady=false; 503 504 #ifdef USE_STD_THREADS 505 mtx_lock.unlock(); 506 #else 507 pthread_mutex_unlock(&mutex); 508 #endif 509 510 } 511 512 bool cv::DetectionBasedTracker::SeparateDetectionWork::communicateWithDetectingThread(const Mat& imageGray, std::vector<Rect>& rectsWhereRegions) 513 { 514 static double freq = getTickFrequency(); 515 516 bool shouldCommunicateWithDetectingThread = (stateThread==STATE_THREAD_WORKING_SLEEPING); 517 LOGD("DetectionBasedTracker::SeparateDetectionWork::communicateWithDetectingThread: shouldCommunicateWithDetectingThread=%d", (shouldCommunicateWithDetectingThread?1:0)); 518 519 if (!shouldCommunicateWithDetectingThread) { 520 return false; 521 } 522 523 bool shouldHandleResult = false; 524 525 #ifdef USE_STD_THREADS 526 mtx_lock.lock(); 527 #else 528 pthread_mutex_lock(&mutex); 529 #endif 530 531 if (isObjectDetectingReady) { 532 shouldHandleResult=true; 533 rectsWhereRegions = resultDetect; 534 isObjectDetectingReady=false; 535 536 double lastBigDetectionDuration = 1000.0 * (((double)(getTickCount() - timeWhenDetectingThreadStartedWork )) / freq); 537 (void)(lastBigDetectionDuration); 538 LOGD("DetectionBasedTracker::SeparateDetectionWork::communicateWithDetectingThread: lastBigDetectionDuration=%f ms", (double)lastBigDetectionDuration); 539 } 540 541 bool shouldSendNewDataToWorkThread = true; 542 if (timeWhenDetectingThreadStartedWork > 0) { 543 double time_from_previous_launch_in_ms=1000.0 * (((double)(getTickCount() - timeWhenDetectingThreadStartedWork )) / freq); //the same formula as for lastBigDetectionDuration 544 shouldSendNewDataToWorkThread = (time_from_previous_launch_in_ms >= detectionBasedTracker.parameters.minDetectionPeriod); 545 LOGD("DetectionBasedTracker::SeparateDetectionWork::communicateWithDetectingThread: shouldSendNewDataToWorkThread was 1, now it is %d, since time_from_previous_launch_in_ms=%.2f, minDetectionPeriod=%d", 546 (shouldSendNewDataToWorkThread?1:0), time_from_previous_launch_in_ms, detectionBasedTracker.parameters.minDetectionPeriod); 547 } 548 549 if (shouldSendNewDataToWorkThread) { 550 551 imageSeparateDetecting.create(imageGray.size(), CV_8UC1); 552 553 imageGray.copyTo(imageSeparateDetecting);//may change imageSeparateDetecting ptr. But should not. 554 555 556 timeWhenDetectingThreadStartedWork = getTickCount() ; 557 558 #ifdef USE_STD_THREADS 559 objectDetectorRun.notify_one(); 560 #else 561 pthread_cond_signal(&objectDetectorRun); 562 #endif 563 } 564 565 #ifdef USE_STD_THREADS 566 mtx_lock.unlock(); 567 #else 568 pthread_mutex_unlock(&mutex); 569 #endif 570 LOGD("DetectionBasedTracker::SeparateDetectionWork::communicateWithDetectingThread: result: shouldHandleResult=%d", (shouldHandleResult?1:0)); 571 572 return shouldHandleResult; 573 } 574 575 cv::DetectionBasedTracker::Parameters::Parameters() 576 { 577 maxTrackLifetime=5; 578 minDetectionPeriod=0; 579 } 580 581 cv::DetectionBasedTracker::InnerParameters::InnerParameters() 582 { 583 numLastPositionsToTrack=4; 584 numStepsToWaitBeforeFirstShow=6; 585 numStepsToTrackWithoutDetectingIfObjectHasNotBeenShown=3; 586 numStepsToShowWithoutDetecting=3; 587 588 coeffTrackingWindowSize=2.0; 589 coeffObjectSizeToTrack=0.85f; 590 coeffObjectSpeedUsingInPrediction=0.8f; 591 592 } 593 594 cv::DetectionBasedTracker::DetectionBasedTracker(cv::Ptr<IDetector> mainDetector, cv::Ptr<IDetector> trackingDetector, const Parameters& params) 595 :separateDetectionWork(), 596 parameters(params), 597 innerParameters(), 598 numTrackedSteps(0), 599 cascadeForTracking(trackingDetector) 600 { 601 CV_Assert( (params.maxTrackLifetime >= 0) 602 // && mainDetector 603 && trackingDetector ); 604 605 if (mainDetector) { 606 separateDetectionWork.reset(new SeparateDetectionWork(*this, mainDetector)); 607 } 608 609 weightsPositionsSmoothing.push_back(1); 610 weightsSizesSmoothing.push_back(0.5); 611 weightsSizesSmoothing.push_back(0.3f); 612 weightsSizesSmoothing.push_back(0.2f); 613 } 614 615 cv::DetectionBasedTracker::~DetectionBasedTracker() 616 { 617 } 618 619 void DetectionBasedTracker::process(const Mat& imageGray) 620 { 621 CV_Assert(imageGray.type()==CV_8UC1); 622 623 if ( separateDetectionWork && !separateDetectionWork->isWorking() ) { 624 separateDetectionWork->run(); 625 } 626 627 static double freq = getTickFrequency(); 628 static long long time_when_last_call_started=getTickCount(); 629 630 { 631 double delta_time_from_prev_call=1000.0 * (((double)(getTickCount() - time_when_last_call_started)) / freq); 632 (void)(delta_time_from_prev_call); 633 LOGD("DetectionBasedTracker::process: time from the previous call is %f ms", (double)delta_time_from_prev_call); 634 time_when_last_call_started=getTickCount(); 635 } 636 637 Mat imageDetect=imageGray; 638 639 std::vector<Rect> rectsWhereRegions; 640 bool shouldHandleResult=false; 641 if (separateDetectionWork) { 642 shouldHandleResult = separateDetectionWork->communicateWithDetectingThread(imageGray, rectsWhereRegions); 643 } 644 645 if (shouldHandleResult) { 646 LOGD("DetectionBasedTracker::process: get _rectsWhereRegions were got from resultDetect"); 647 } else { 648 LOGD("DetectionBasedTracker::process: get _rectsWhereRegions from previous positions"); 649 for(size_t i = 0; i < trackedObjects.size(); i++) { 650 size_t n = trackedObjects[i].lastPositions.size(); 651 CV_Assert(n > 0); 652 653 Rect r = trackedObjects[i].lastPositions[n-1]; 654 if(r.area() == 0) { 655 LOGE("DetectionBasedTracker::process: ERROR: ATTENTION: strange algorithm's behavior: trackedObjects[i].rect() is empty"); 656 continue; 657 } 658 659 //correction by speed of rectangle 660 if (n > 1) { 661 Point2f center = centerRect(r); 662 Point2f center_prev = centerRect(trackedObjects[i].lastPositions[n-2]); 663 Point2f shift = (center - center_prev) * innerParameters.coeffObjectSpeedUsingInPrediction; 664 665 r.x += cvRound(shift.x); 666 r.y += cvRound(shift.y); 667 } 668 669 670 rectsWhereRegions.push_back(r); 671 } 672 } 673 LOGI("DetectionBasedTracker::process: tracked objects num==%d", (int)trackedObjects.size()); 674 675 std::vector<Rect> detectedObjectsInRegions; 676 677 LOGD("DetectionBasedTracker::process: rectsWhereRegions.size()=%d", (int)rectsWhereRegions.size()); 678 for(size_t i=0; i < rectsWhereRegions.size(); i++) { 679 Rect r = rectsWhereRegions[i]; 680 681 detectInRegion(imageDetect, r, detectedObjectsInRegions); 682 } 683 LOGD("DetectionBasedTracker::process: detectedObjectsInRegions.size()=%d", (int)detectedObjectsInRegions.size()); 684 685 updateTrackedObjects(detectedObjectsInRegions); 686 } 687 688 void cv::DetectionBasedTracker::getObjects(std::vector<cv::Rect>& result) const 689 { 690 result.clear(); 691 692 for(size_t i=0; i < trackedObjects.size(); i++) { 693 Rect r=calcTrackedObjectPositionToShow((int)i); 694 if (r.area()==0) { 695 continue; 696 } 697 result.push_back(r); 698 LOGD("DetectionBasedTracker::process: found a object with SIZE %d x %d, rect={%d, %d, %d x %d}", r.width, r.height, r.x, r.y, r.width, r.height); 699 } 700 } 701 702 void cv::DetectionBasedTracker::getObjects(std::vector<Object>& result) const 703 { 704 result.clear(); 705 706 for(size_t i=0; i < trackedObjects.size(); i++) { 707 Rect r=calcTrackedObjectPositionToShow((int)i); 708 if (r.area()==0) { 709 continue; 710 } 711 result.push_back(Object(r, trackedObjects[i].id)); 712 LOGD("DetectionBasedTracker::process: found a object with SIZE %d x %d, rect={%d, %d, %d x %d}", r.width, r.height, r.x, r.y, r.width, r.height); 713 } 714 } 715 void cv::DetectionBasedTracker::getObjects(std::vector<ExtObject>& result) const 716 { 717 result.clear(); 718 719 for(size_t i=0; i < trackedObjects.size(); i++) { 720 ObjectStatus status; 721 Rect r=calcTrackedObjectPositionToShow((int)i, status); 722 result.push_back(ExtObject(trackedObjects[i].id, r, status)); 723 LOGD("DetectionBasedTracker::process: found a object with SIZE %d x %d, rect={%d, %d, %d x %d}, status = %d", r.width, r.height, r.x, r.y, r.width, r.height, (int)status); 724 } 725 } 726 727 bool cv::DetectionBasedTracker::run() 728 { 729 if (separateDetectionWork) { 730 return separateDetectionWork->run(); 731 } 732 return false; 733 } 734 735 void cv::DetectionBasedTracker::stop() 736 { 737 if (separateDetectionWork) { 738 separateDetectionWork->stop(); 739 } 740 } 741 742 void cv::DetectionBasedTracker::resetTracking() 743 { 744 if (separateDetectionWork) { 745 separateDetectionWork->resetTracking(); 746 } 747 trackedObjects.clear(); 748 } 749 750 void cv::DetectionBasedTracker::updateTrackedObjects(const std::vector<Rect>& detectedObjects) 751 { 752 enum { 753 NEW_RECTANGLE=-1, 754 INTERSECTED_RECTANGLE=-2 755 }; 756 757 int N1=(int)trackedObjects.size(); 758 int N2=(int)detectedObjects.size(); 759 LOGD("DetectionBasedTracker::updateTrackedObjects: N1=%d, N2=%d", N1, N2); 760 761 for(int i=0; i < N1; i++) { 762 trackedObjects[i].numDetectedFrames++; 763 } 764 765 std::vector<int> correspondence(detectedObjects.size(), NEW_RECTANGLE); 766 correspondence.clear(); 767 correspondence.resize(detectedObjects.size(), NEW_RECTANGLE); 768 769 for(int i=0; i < N1; i++) { 770 LOGD("DetectionBasedTracker::updateTrackedObjects: i=%d", i); 771 TrackedObject& curObject=trackedObjects[i]; 772 773 int bestIndex=-1; 774 int bestArea=-1; 775 776 int numpositions=(int)curObject.lastPositions.size(); 777 CV_Assert(numpositions > 0); 778 Rect prevRect=curObject.lastPositions[numpositions-1]; 779 LOGD("DetectionBasedTracker::updateTrackedObjects: prevRect[%d]={%d, %d, %d x %d}", i, prevRect.x, prevRect.y, prevRect.width, prevRect.height); 780 781 for(int j=0; j < N2; j++) { 782 LOGD("DetectionBasedTracker::updateTrackedObjects: j=%d", j); 783 if (correspondence[j] >= 0) { 784 LOGD("DetectionBasedTracker::updateTrackedObjects: j=%d is rejected, because it has correspondence=%d", j, correspondence[j]); 785 continue; 786 } 787 if (correspondence[j] !=NEW_RECTANGLE) { 788 LOGD("DetectionBasedTracker::updateTrackedObjects: j=%d is rejected, because it is intersected with another rectangle", j); 789 continue; 790 } 791 LOGD("DetectionBasedTracker::updateTrackedObjects: detectedObjects[%d]={%d, %d, %d x %d}", 792 j, detectedObjects[j].x, detectedObjects[j].y, detectedObjects[j].width, detectedObjects[j].height); 793 794 Rect r=prevRect & detectedObjects[j]; 795 if ( (r.width > 0) && (r.height > 0) ) { 796 LOGD("DetectionBasedTracker::updateTrackedObjects: There is intersection between prevRect and detectedRect, r={%d, %d, %d x %d}", 797 r.x, r.y, r.width, r.height); 798 correspondence[j]=INTERSECTED_RECTANGLE; 799 800 if ( r.area() > bestArea) { 801 LOGD("DetectionBasedTracker::updateTrackedObjects: The area of intersection is %d, it is better than bestArea=%d", r.area(), bestArea); 802 bestIndex=j; 803 bestArea=r.area(); 804 } 805 } 806 } 807 if (bestIndex >= 0) { 808 LOGD("DetectionBasedTracker::updateTrackedObjects: The best correspondence for i=%d is j=%d", i, bestIndex); 809 correspondence[bestIndex]=i; 810 811 for(int j=0; j < N2; j++) { 812 if (correspondence[j] >= 0) 813 continue; 814 815 Rect r=detectedObjects[j] & detectedObjects[bestIndex]; 816 if ( (r.width > 0) && (r.height > 0) ) { 817 LOGD("DetectionBasedTracker::updateTrackedObjects: Found intersection between " 818 "rectangles j=%d and bestIndex=%d, rectangle j=%d is marked as intersected", j, bestIndex, j); 819 correspondence[j]=INTERSECTED_RECTANGLE; 820 } 821 } 822 } else { 823 LOGD("DetectionBasedTracker::updateTrackedObjects: There is no correspondence for i=%d ", i); 824 curObject.numFramesNotDetected++; 825 } 826 } 827 828 LOGD("DetectionBasedTracker::updateTrackedObjects: start second cycle"); 829 for(int j=0; j < N2; j++) { 830 LOGD("DetectionBasedTracker::updateTrackedObjects: j=%d", j); 831 int i=correspondence[j]; 832 if (i >= 0) {//add position 833 LOGD("DetectionBasedTracker::updateTrackedObjects: add position"); 834 trackedObjects[i].lastPositions.push_back(detectedObjects[j]); 835 while ((int)trackedObjects[i].lastPositions.size() > (int) innerParameters.numLastPositionsToTrack) { 836 trackedObjects[i].lastPositions.erase(trackedObjects[i].lastPositions.begin()); 837 } 838 trackedObjects[i].numFramesNotDetected=0; 839 } else if (i==NEW_RECTANGLE){ //new object 840 LOGD("DetectionBasedTracker::updateTrackedObjects: new object"); 841 trackedObjects.push_back(detectedObjects[j]); 842 } else { 843 LOGD("DetectionBasedTracker::updateTrackedObjects: was auxiliary intersection"); 844 } 845 } 846 847 std::vector<TrackedObject>::iterator it=trackedObjects.begin(); 848 while( it != trackedObjects.end() ) { 849 if ( (it->numFramesNotDetected > parameters.maxTrackLifetime) 850 || 851 ( 852 (it->numDetectedFrames <= innerParameters.numStepsToWaitBeforeFirstShow) 853 && 854 (it->numFramesNotDetected > innerParameters.numStepsToTrackWithoutDetectingIfObjectHasNotBeenShown) 855 ) 856 ) 857 { 858 int numpos=(int)it->lastPositions.size(); 859 CV_Assert(numpos > 0); 860 Rect r = it->lastPositions[numpos-1]; 861 (void)(r); 862 LOGD("DetectionBasedTracker::updateTrackedObjects: deleted object {%d, %d, %d x %d}", 863 r.x, r.y, r.width, r.height); 864 it=trackedObjects.erase(it); 865 } else { 866 it++; 867 } 868 } 869 } 870 871 int cv::DetectionBasedTracker::addObject(const Rect& location) 872 { 873 LOGD("DetectionBasedTracker::addObject: new object {%d, %d %dx%d}",location.x, location.y, location.width, location.height); 874 trackedObjects.push_back(TrackedObject(location)); 875 int newId = trackedObjects.back().id; 876 LOGD("DetectionBasedTracker::addObject: newId = %d", newId); 877 return newId; 878 } 879 880 Rect cv::DetectionBasedTracker::calcTrackedObjectPositionToShow(int i) const 881 { 882 ObjectStatus status; 883 return calcTrackedObjectPositionToShow(i, status); 884 } 885 Rect cv::DetectionBasedTracker::calcTrackedObjectPositionToShow(int i, ObjectStatus& status) const 886 { 887 if ( (i < 0) || (i >= (int)trackedObjects.size()) ) { 888 LOGE("DetectionBasedTracker::calcTrackedObjectPositionToShow: ERROR: wrong i=%d", i); 889 status = WRONG_OBJECT; 890 return Rect(); 891 } 892 if (trackedObjects[i].numDetectedFrames <= innerParameters.numStepsToWaitBeforeFirstShow){ 893 LOGI("DetectionBasedTracker::calcTrackedObjectPositionToShow: trackedObjects[%d].numDetectedFrames=%d <= numStepsToWaitBeforeFirstShow=%d --- return empty Rect()", 894 i, trackedObjects[i].numDetectedFrames, innerParameters.numStepsToWaitBeforeFirstShow); 895 status = DETECTED_NOT_SHOWN_YET; 896 return Rect(); 897 } 898 if (trackedObjects[i].numFramesNotDetected > innerParameters.numStepsToShowWithoutDetecting) { 899 status = DETECTED_TEMPORARY_LOST; 900 return Rect(); 901 } 902 903 const TrackedObject::PositionsVector& lastPositions=trackedObjects[i].lastPositions; 904 905 int N=(int)lastPositions.size(); 906 if (N<=0) { 907 LOGE("DetectionBasedTracker::calcTrackedObjectPositionToShow: ERROR: no positions for i=%d", i); 908 status = WRONG_OBJECT; 909 return Rect(); 910 } 911 912 int Nsize=std::min(N, (int)weightsSizesSmoothing.size()); 913 int Ncenter= std::min(N, (int)weightsPositionsSmoothing.size()); 914 915 Point2f center; 916 double w=0, h=0; 917 if (Nsize > 0) { 918 double sum=0; 919 for(int j=0; j < Nsize; j++) { 920 int k=N-j-1; 921 w += lastPositions[k].width * weightsSizesSmoothing[j]; 922 h += lastPositions[k].height * weightsSizesSmoothing[j]; 923 sum+=weightsSizesSmoothing[j]; 924 } 925 w /= sum; 926 h /= sum; 927 } else { 928 w=lastPositions[N-1].width; 929 h=lastPositions[N-1].height; 930 } 931 932 if (Ncenter > 0) { 933 double sum=0; 934 for(int j=0; j < Ncenter; j++) { 935 int k=N-j-1; 936 Point tl(lastPositions[k].tl()); 937 Point br(lastPositions[k].br()); 938 Point2f c1; 939 c1=tl; 940 c1=c1* 0.5f; 941 Point2f c2; 942 c2=br; 943 c2=c2*0.5f; 944 c1=c1+c2; 945 946 center=center+ (c1 * weightsPositionsSmoothing[j]); 947 sum+=weightsPositionsSmoothing[j]; 948 } 949 center *= (float)(1 / sum); 950 } else { 951 int k=N-1; 952 Point tl(lastPositions[k].tl()); 953 Point br(lastPositions[k].br()); 954 Point2f c1; 955 c1=tl; 956 c1=c1* 0.5f; 957 Point2f c2; 958 c2=br; 959 c2=c2*0.5f; 960 961 center=c1+c2; 962 } 963 Point2f tl=center-Point2f((float)w*0.5f,(float)h*0.5f); 964 Rect res(cvRound(tl.x), cvRound(tl.y), cvRound(w), cvRound(h)); 965 LOGD("DetectionBasedTracker::calcTrackedObjectPositionToShow: Result for i=%d: {%d, %d, %d x %d}", i, res.x, res.y, res.width, res.height); 966 967 status = DETECTED; 968 return res; 969 } 970 971 void cv::DetectionBasedTracker::detectInRegion(const Mat& img, const Rect& r, std::vector<Rect>& detectedObjectsInRegions) 972 { 973 Rect r0(Point(), img.size()); 974 Rect r1 = scale_rect(r, innerParameters.coeffTrackingWindowSize); 975 r1 = r1 & r0; 976 977 if ( (r1.width <=0) || (r1.height <= 0) ) { 978 LOGD("DetectionBasedTracker::detectInRegion: Empty intersection"); 979 return; 980 } 981 982 int d = cvRound(std::min(r.width, r.height) * innerParameters.coeffObjectSizeToTrack); 983 984 std::vector<Rect> tmpobjects; 985 986 Mat img1(img, r1);//subimage for rectangle -- without data copying 987 LOGD("DetectionBasedTracker::detectInRegion: img1.size()=%d x %d, d=%d", 988 img1.size().width, img1.size().height, d); 989 990 cascadeForTracking->setMinObjectSize(Size(d, d)); 991 cascadeForTracking->detect(img1, tmpobjects); 992 /* 993 detectMultiScale( img1, tmpobjects, 994 parameters.scaleFactor, parameters.minNeighbors, 0 995 |CV_HAAR_FIND_BIGGEST_OBJECT 996 |CV_HAAR_SCALE_IMAGE 997 , 998 Size(d,d), 999 max_objectSize 1000 );*/ 1001 1002 for(size_t i=0; i < tmpobjects.size(); i++) { 1003 Rect curres(tmpobjects[i].tl() + r1.tl(), tmpobjects[i].size()); 1004 detectedObjectsInRegions.push_back(curres); 1005 } 1006 } 1007 1008 bool cv::DetectionBasedTracker::setParameters(const Parameters& params) 1009 { 1010 if ( params.maxTrackLifetime < 0 ) 1011 { 1012 LOGE("DetectionBasedTracker::setParameters: ERROR: wrong parameters value"); 1013 return false; 1014 } 1015 1016 if (separateDetectionWork) { 1017 separateDetectionWork->lock(); 1018 } 1019 parameters=params; 1020 if (separateDetectionWork) { 1021 separateDetectionWork->unlock(); 1022 } 1023 return true; 1024 } 1025 1026 const cv::DetectionBasedTracker::Parameters& DetectionBasedTracker::getParameters() const 1027 { 1028 return parameters; 1029 } 1030 1031 #endif 1032