1 /*M/////////////////////////////////////////////////////////////////////////////////////// 2 // 3 // IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING. 4 // 5 // By downloading, copying, installing or using the software you agree to this license. 6 // If you do not agree to this license, do not download, install, 7 // copy or use the software. 8 // 9 // 10 // Intel License Agreement 11 // For Open Source Computer Vision Library 12 // 13 // Copyright (C) 2000, Intel Corporation, all rights reserved. 14 // Third party copyrights are property of their respective owners. 15 // 16 // Redistribution and use in source and binary forms, with or without modification, 17 // are permitted provided that the following conditions are met: 18 // 19 // * Redistribution's of source code must retain the above copyright notice, 20 // this list of conditions and the following disclaimer. 21 // 22 // * Redistribution's in binary form must reproduce the above copyright notice, 23 // this list of conditions and the following disclaimer in the documentation 24 // and/or other materials provided with the distribution. 25 // 26 // * The name of Intel Corporation may not be used to endorse or promote products 27 // derived from this software without specific prior written permission. 28 // 29 // This software is provided by the copyright holders and contributors "as is" and 30 // any express or implied warranties, including, but not limited to, the implied 31 // warranties of merchantability and fitness for a particular purpose are disclaimed. 32 // In no event shall the Intel Corporation or contributors be liable for any direct, 33 // indirect, incidental, special, exemplary, or consequential damages 34 // (including, but not limited to, procurement of substitute goods or services; 35 // loss of use, data, or profits; or business interruption) however caused 36 // and on any theory of liability, whether in contract, strict liability, 37 // or tort (including negligence or otherwise) arising in any way out of 38 // the use of this software, even if advised of the possibility of such damage. 39 // 40 //M*/ 41 42 /****************************************************************************************\ 43 Contour-based face feature tracking 44 The code was created by Tatiana Cherepanova (tata (at) sl.iae.nsk.su) 45 \****************************************************************************************/ 46 47 #include "_cvaux.h" 48 #include "_cvvectrack.h" 49 50 #define _ASSERT assert 51 #define NUM_FACE_ELEMENTS 3 52 enum 53 { 54 MOUTH = 0, 55 LEYE = 1, 56 REYE = 2, 57 }; 58 59 #define MAX_LAYERS 64 60 61 const double pi = 3.1415926535; 62 63 struct CvFaceTracker; 64 struct CvTrackingRect; 65 class CvFaceElement; 66 67 void ThresholdingParam(IplImage *imgGray, int iNumLayers, int &iMinLevel, int &iMaxLevel, float &step, float& power, int iHistMin /*= HIST_MIN*/); 68 int ChoiceTrackingFace3(CvFaceTracker* pTF, const int nElements, const CvFaceElement* big_face, CvTrackingRect* face, int& new_energy); 69 int ChoiceTrackingFace2(CvFaceTracker* pTF, const int nElements, const CvFaceElement* big_face, CvTrackingRect* face, int& new_energy, int noel); 70 inline int GetEnergy(CvTrackingRect** ppNew, const CvTrackingRect* pPrev, CvPoint* ptTempl, CvRect* rTempl); 71 inline int GetEnergy2(CvTrackingRect** ppNew, const CvTrackingRect* pPrev, CvPoint* ptTempl, CvRect* rTempl, int* element); 72 inline double CalculateTransformationLMS3_0( CvPoint* pTemplPoints, CvPoint* pSrcPoints); 73 inline double CalculateTransformationLMS3( CvPoint* pTemplPoints, 74 CvPoint* pSrcPoints, 75 double* pdbAverageScale, 76 double* pdbAverageRotate, 77 double* pdbAverageShiftX, 78 double* pdbAverageShiftY ); 79 80 struct CvTrackingRect 81 { 82 CvRect r; 83 CvPoint ptCenter; 84 int iColor; 85 int iEnergy; 86 int nRectsInThis; 87 int nRectsOnLeft; 88 int nRectsOnRight; 89 int nRectsOnTop; 90 int nRectsOnBottom; 91 CvTrackingRect() { memset(this, 0, sizeof(CvTrackingRect)); }; 92 int Energy(const CvTrackingRect& prev) 93 { 94 int prev_color = 0 == prev.iColor ? iColor : prev.iColor; 95 iEnergy = 1 * pow2(r.width - prev.r.width) + 96 1 * pow2(r.height - prev.r.height) + 97 1 * pow2(iColor - prev_color) / 4 + 98 - 1 * nRectsInThis + 99 - 0 * nRectsOnTop + 100 + 0 * nRectsOnLeft + 101 + 0 * nRectsOnRight + 102 + 0 * nRectsOnBottom; 103 return iEnergy; 104 } 105 }; 106 107 struct CvFaceTracker 108 { 109 CvTrackingRect face[NUM_FACE_ELEMENTS]; 110 int iTrackingFaceType; 111 double dbRotateDelta; 112 double dbRotateAngle; 113 CvPoint ptRotate; 114 115 CvPoint ptTempl[NUM_FACE_ELEMENTS]; 116 CvRect rTempl[NUM_FACE_ELEMENTS]; 117 118 IplImage* imgGray; 119 IplImage* imgThresh; 120 CvMemStorage* mstgContours; 121 CvFaceTracker() 122 { 123 ptRotate.x = 0; 124 ptRotate.y = 0; 125 dbRotateDelta = 0; 126 dbRotateAngle = 0; 127 iTrackingFaceType = -1; 128 imgThresh = NULL; 129 imgGray = NULL; 130 mstgContours = NULL; 131 }; 132 ~CvFaceTracker() 133 { 134 if (NULL != imgGray) 135 delete imgGray; 136 if (NULL != imgThresh) 137 delete imgThresh; 138 if (NULL != mstgContours) 139 cvReleaseMemStorage(&mstgContours); 140 }; 141 int Init(CvRect* pRects, IplImage* imgGray) 142 { 143 for (int i = 0; i < NUM_FACE_ELEMENTS; i++) 144 { 145 face[i].r = pRects[i]; 146 face[i].ptCenter = Center(face[i].r); 147 ptTempl[i] = face[i].ptCenter; 148 rTempl[i] = face[i].r; 149 } 150 imgGray = cvCreateImage(cvSize(imgGray->width, imgGray->height), 8, 1); 151 imgThresh = cvCreateImage(cvSize(imgGray->width, imgGray->height), 8, 1); 152 mstgContours = cvCreateMemStorage(); 153 if ((NULL == imgGray) || 154 (NULL == imgThresh) || 155 (NULL == mstgContours)) 156 return FALSE; 157 return TRUE; 158 }; 159 int InitNextImage(IplImage* img) 160 { 161 CvSize sz = {img->width, img->height}; 162 ReallocImage(&imgGray, sz, 1); 163 ReallocImage(&imgThresh, sz, 1); 164 ptRotate = face[MOUTH].ptCenter; 165 float m[6]; 166 CvMat mat = cvMat( 2, 3, CV_32FC1, m ); 167 168 if (NULL == imgGray || NULL == imgThresh) 169 return FALSE; 170 171 /*m[0] = (float)cos(-dbRotateAngle*CV_PI/180.); 172 m[1] = (float)sin(-dbRotateAngle*CV_PI/180.); 173 m[2] = (float)ptRotate.x; 174 m[3] = -m[1]; 175 m[4] = m[0]; 176 m[5] = (float)ptRotate.y;*/ 177 cv2DRotationMatrix( cvPointTo32f(ptRotate), -dbRotateAngle, 1., &mat ); 178 cvWarpAffine( img, imgGray, &mat ); 179 180 if (NULL == mstgContours) 181 mstgContours = cvCreateMemStorage(); 182 else 183 cvClearMemStorage(mstgContours); 184 if (NULL == mstgContours) 185 return FALSE; 186 return TRUE; 187 } 188 }; 189 190 class CvFaceElement 191 { 192 public: 193 CvSeq* m_seqRects; 194 CvMemStorage* m_mstgRects; 195 CvRect m_rROI; 196 CvTrackingRect m_trPrev; 197 inline CvFaceElement() 198 { 199 m_seqRects = NULL; 200 m_mstgRects = NULL; 201 m_rROI.x = 0; 202 m_rROI.y = 0; 203 m_rROI.width = 0; 204 m_rROI.height = 0; 205 }; 206 inline int Init(const CvRect& roi, const CvTrackingRect& prev, CvMemStorage* mstg = NULL) 207 { 208 m_rROI = roi; 209 m_trPrev = prev; 210 if (NULL != mstg) 211 m_mstgRects = mstg; 212 if (NULL == m_mstgRects) 213 return FALSE; 214 if (NULL == m_seqRects) 215 m_seqRects = cvCreateSeq(0, sizeof(CvSeq), sizeof(CvTrackingRect), m_mstgRects); 216 else 217 cvClearSeq(m_seqRects); 218 if (NULL == m_seqRects) 219 return FALSE; 220 return TRUE; 221 }; 222 void FindRects(IplImage* img, IplImage* thresh, int nLayers, int dMinSize); 223 protected: 224 void FindContours(IplImage* img, IplImage* thresh, int nLayers, int dMinSize); 225 void MergeRects(int d); 226 void Energy(); 227 }; //class CvFaceElement 228 229 int CV_CDECL CompareEnergy(const void* el1, const void* el2, void*) 230 { 231 return ((CvTrackingRect*)el1)->iEnergy - ((CvTrackingRect*)el2)->iEnergy; 232 }// int CV_CDECL CompareEnergy(const void* el1, const void* el2, void*) 233 234 void CvFaceElement::FindRects(IplImage* img, IplImage* thresh, int nLayers, int dMinSize) 235 { 236 FindContours(img, thresh, nLayers, dMinSize / 4); 237 if (0 == m_seqRects->total) 238 return; 239 Energy(); 240 cvSeqSort(m_seqRects, CompareEnergy, NULL); 241 CvTrackingRect* pR = (CvTrackingRect*)cvGetSeqElem(m_seqRects, 0); 242 if (m_seqRects->total < 32) 243 { 244 MergeRects(dMinSize / 8); 245 Energy(); 246 cvSeqSort(m_seqRects, CompareEnergy, NULL); 247 } 248 pR = (CvTrackingRect*)cvGetSeqElem(m_seqRects, 0); 249 if ((pR->iEnergy > 100 && m_seqRects->total < 32) || (m_seqRects->total < 16)) 250 { 251 MergeRects(dMinSize / 4); 252 Energy(); 253 cvSeqSort(m_seqRects, CompareEnergy, NULL); 254 } 255 pR = (CvTrackingRect*)cvGetSeqElem(m_seqRects, 0); 256 if ((pR->iEnergy > 100 && m_seqRects->total < 16) || (pR->iEnergy > 200 && m_seqRects->total < 32)) 257 { 258 MergeRects(dMinSize / 2); 259 Energy(); 260 cvSeqSort(m_seqRects, CompareEnergy, NULL); 261 } 262 263 }// void CvFaceElement::FindRects(IplImage* img, IplImage* thresh, int nLayers, int dMinSize) 264 265 void CvFaceElement::FindContours(IplImage* img, IplImage* thresh, int nLayers, int dMinSize) 266 { 267 CvSeq* seq; 268 CvRect roi = m_rROI; 269 Extend(roi, 1); 270 cvSetImageROI(img, roi); 271 cvSetImageROI(thresh, roi); 272 // layers 273 int colors[MAX_LAYERS] = {0}; 274 int iMinLevel = 0, iMaxLevel = 255; 275 float step, power; 276 ThresholdingParam(img, nLayers / 2, iMinLevel, iMaxLevel, step, power, 4); 277 int iMinLevelPrev = iMinLevel; 278 int iMaxLevelPrev = iMinLevel; 279 if (m_trPrev.iColor != 0) 280 { 281 iMinLevelPrev = m_trPrev.iColor - nLayers / 2; 282 iMaxLevelPrev = m_trPrev.iColor + nLayers / 2; 283 } 284 if (iMinLevelPrev < iMinLevel) 285 { 286 iMaxLevelPrev += iMinLevel - iMinLevelPrev; 287 iMinLevelPrev = iMinLevel; 288 } 289 if (iMaxLevelPrev > iMaxLevel) 290 { 291 iMinLevelPrev -= iMaxLevelPrev - iMaxLevel; 292 if (iMinLevelPrev < iMinLevel) 293 iMinLevelPrev = iMinLevel; 294 iMaxLevelPrev = iMaxLevel; 295 } 296 int n = nLayers; 297 n -= (iMaxLevelPrev - iMinLevelPrev + 1) / 2; 298 step = float(iMinLevelPrev - iMinLevel + iMaxLevel - iMaxLevelPrev) / float(n); 299 int j = 0; 300 float level; 301 for (level = (float)iMinLevel; level < iMinLevelPrev && j < nLayers; level += step, j++) 302 colors[j] = int(level + 0.5); 303 for (level = (float)iMinLevelPrev; level < iMaxLevelPrev && j < nLayers; level += 2.0, j++) 304 colors[j] = int(level + 0.5); 305 for (level = (float)iMaxLevelPrev; level < iMaxLevel && j < nLayers; level += step, j++) 306 colors[j] = int(level + 0.5); 307 // 308 for (int i = 0; i < nLayers; i++) 309 { 310 cvThreshold(img, thresh, colors[i], 255.0, CV_THRESH_BINARY); 311 if (cvFindContours(thresh, m_mstgRects, &seq, sizeof(CvContour), CV_RETR_CCOMP, CV_CHAIN_APPROX_SIMPLE)) 312 { 313 CvTrackingRect cr; 314 for (CvSeq* external = seq; external; external = external->h_next) 315 { 316 cr.r = cvContourBoundingRect(external); 317 Move(cr.r, roi.x, roi.y); 318 if (RectInRect(cr.r, m_rROI) && cr.r.width > dMinSize && cr.r.height > dMinSize) 319 { 320 cr.ptCenter = Center(cr.r); 321 cr.iColor = colors[i]; 322 cvSeqPush(m_seqRects, &cr); 323 } 324 for (CvSeq* internal = external->v_next; internal; internal = internal->h_next) 325 { 326 cr.r = cvContourBoundingRect(internal); 327 Move(cr.r, roi.x, roi.y); 328 if (RectInRect(cr.r, m_rROI) && cr.r.width > dMinSize && cr.r.height > dMinSize) 329 { 330 cr.ptCenter = Center(cr.r); 331 cr.iColor = colors[i]; 332 cvSeqPush(m_seqRects, &cr); 333 } 334 } 335 } 336 cvClearSeq(seq); 337 } 338 } 339 cvResetImageROI(img); 340 cvResetImageROI(thresh); 341 }//void CvFaceElement::FindContours(IplImage* img, IplImage* thresh, int nLayers) 342 343 void CvFaceElement::MergeRects(int d) 344 { 345 int nRects = m_seqRects->total; 346 CvSeqReader reader, reader2; 347 cvStartReadSeq( m_seqRects, &reader ); 348 int i, j; 349 for (i = 0; i < nRects; i++) 350 { 351 CvTrackingRect* pRect1 = (CvTrackingRect*)(reader.ptr); 352 cvStartReadSeq( m_seqRects, &reader2 ); 353 cvSetSeqReaderPos(&reader2, i + 1); 354 for (j = i + 1; j < nRects; j++) 355 { 356 CvTrackingRect* pRect2 = (CvTrackingRect*)(reader2.ptr); 357 if (abs(pRect1->ptCenter.y - pRect2->ptCenter.y) < d && 358 abs(pRect1->r.height - pRect2->r.height) < d) 359 { 360 CvTrackingRect rNew; 361 rNew.iColor = (pRect1->iColor + pRect2->iColor + 1) / 2; 362 rNew.r.x = min(pRect1->r.x, pRect2->r.x); 363 rNew.r.y = min(pRect1->r.y, pRect2->r.y); 364 rNew.r.width = max(pRect1->r.x + pRect1->r.width, pRect2->r.x + pRect2->r.width) - rNew.r.x; 365 rNew.r.height = min(pRect1->r.y + pRect1->r.height, pRect2->r.y + pRect2->r.height) - rNew.r.y; 366 if (rNew.r != pRect1->r && rNew.r != pRect2->r) 367 { 368 rNew.ptCenter = Center(rNew.r); 369 cvSeqPush(m_seqRects, &rNew); 370 } 371 } 372 CV_NEXT_SEQ_ELEM( sizeof(CvTrackingRect), reader2 ); 373 } 374 CV_NEXT_SEQ_ELEM( sizeof(CvTrackingRect), reader ); 375 } 376 // delete equal rects 377 for (i = 0; i < m_seqRects->total; i++) 378 { 379 CvTrackingRect* pRect1 = (CvTrackingRect*)cvGetSeqElem(m_seqRects, i); 380 int j_begin = i + 1; 381 for (j = j_begin; j < m_seqRects->total;) 382 { 383 CvTrackingRect* pRect2 = (CvTrackingRect*)cvGetSeqElem(m_seqRects, j); 384 if (pRect1->r == pRect2->r) 385 cvSeqRemove(m_seqRects, j); 386 else 387 j++; 388 } 389 } 390 391 }//void CvFaceElement::MergeRects(int d) 392 393 void CvFaceElement::Energy() 394 { 395 CvSeqReader reader, reader2; 396 cvStartReadSeq( m_seqRects, &reader ); 397 for (int i = 0; i < m_seqRects->total; i++) 398 { 399 CvTrackingRect* pRect = (CvTrackingRect*)(reader.ptr); 400 // outside and inside rects 401 cvStartReadSeq( m_seqRects, &reader2 ); 402 for (int j = 0; j < m_seqRects->total; j++) 403 { 404 CvTrackingRect* pRect2 = (CvTrackingRect*)(reader2.ptr); 405 if (i != j) 406 { 407 if (RectInRect(pRect2->r, pRect->r)) 408 pRect->nRectsInThis ++; 409 else if (pRect2->r.y + pRect2->r.height <= pRect->r.y) 410 pRect->nRectsOnTop ++; 411 else if (pRect2->r.y >= pRect->r.y + pRect->r.height) 412 pRect->nRectsOnBottom ++; 413 else if (pRect2->r.x + pRect2->r.width <= pRect->r.x) 414 pRect->nRectsOnLeft ++; 415 else if (pRect2->r.x >= pRect->r.x + pRect->r.width) 416 pRect->nRectsOnRight ++; 417 } 418 CV_NEXT_SEQ_ELEM( sizeof(CvTrackingRect), reader2 ); 419 } 420 // energy 421 pRect->Energy(m_trPrev); 422 CV_NEXT_SEQ_ELEM( sizeof(CvTrackingRect), reader ); 423 } 424 }//void CvFaceElement::Energy() 425 426 CV_IMPL CvFaceTracker* 427 cvInitFaceTracker(CvFaceTracker* pFaceTracker, const IplImage* imgGray, CvRect* pRects, int nRects) 428 { 429 _ASSERT(NULL != imgGray); 430 _ASSERT(NULL != pRects); 431 _ASSERT(nRects >= NUM_FACE_ELEMENTS); 432 if ((NULL == imgGray) || 433 (NULL == pRects) || 434 (nRects < NUM_FACE_ELEMENTS)) 435 return NULL; 436 437 int new_face = FALSE; 438 CvFaceTracker* pFace = pFaceTracker; 439 if (NULL == pFace) 440 { 441 pFace = new CvFaceTracker; 442 if (NULL == pFace) 443 return NULL; 444 new_face = TRUE; 445 } 446 pFace->Init(pRects, (IplImage*)imgGray); 447 return pFace; 448 }//CvFaceTracker* InitFaceTracker(IplImage* imgGray, CvRect* pRects, int nRects) 449 450 CV_IMPL void 451 cvReleaseFaceTracker(CvFaceTracker** ppFaceTracker) 452 { 453 if (NULL == *ppFaceTracker) 454 return; 455 delete *ppFaceTracker; 456 *ppFaceTracker = NULL; 457 }//void ReleaseFaceTracker(CvFaceTracker** ppFaceTracker) 458 459 460 CV_IMPL int 461 cvTrackFace(CvFaceTracker* pFaceTracker, IplImage* imgGray, CvRect* pRects, int nRects, CvPoint* ptRotate, double* dbAngleRotate) 462 { 463 _ASSERT(NULL != pFaceTracker); 464 _ASSERT(NULL != imgGray); 465 _ASSERT(NULL != pRects && nRects >= NUM_FACE_ELEMENTS); 466 if ((NULL == pFaceTracker) || 467 (NULL == imgGray)) 468 return FALSE; 469 pFaceTracker->InitNextImage(imgGray); 470 *ptRotate = pFaceTracker->ptRotate; 471 *dbAngleRotate = pFaceTracker->dbRotateAngle; 472 473 int nElements = 16; 474 double dx = pFaceTracker->face[LEYE].ptCenter.x - pFaceTracker->face[REYE].ptCenter.x; 475 double dy = pFaceTracker->face[LEYE].ptCenter.y - pFaceTracker->face[REYE].ptCenter.y; 476 double d_eyes = sqrt(dx*dx + dy*dy); 477 int d = cvRound(0.25 * d_eyes); 478 int dMinSize = d; 479 int nRestarts = 0; 480 481 int elem; 482 483 CvFaceElement big_face[NUM_FACE_ELEMENTS]; 484 START: 485 // init 486 for (elem = 0; elem < NUM_FACE_ELEMENTS; elem++) 487 { 488 CvRect r = pFaceTracker->face[elem].r; 489 Extend(r, d); 490 if (r.width < 4*d) 491 { 492 r.x -= (4*d - r.width) / 2; 493 r.width += 4*d - r.width; 494 } 495 if (r.height < 3*d) 496 { 497 r.y -= (3*d - r.height) / 2; 498 r.height += 3*d - r.height; 499 } 500 if (r.x < 1) 501 r.x = 1; 502 if (r.y < 1) 503 r.y = 1; 504 if (r.x + r.width > pFaceTracker->imgGray->width - 2) 505 r.width = pFaceTracker->imgGray->width - 2 - r.x; 506 if (r.y + r.height > pFaceTracker->imgGray->height - 2) 507 r.height = pFaceTracker->imgGray->height - 2 - r.y; 508 if (!big_face[elem].Init(r, pFaceTracker->face[elem], pFaceTracker->mstgContours)) 509 return FALSE; 510 } 511 // find contours 512 for (elem = 0; elem < NUM_FACE_ELEMENTS; elem++) 513 big_face[elem].FindRects(pFaceTracker->imgGray, pFaceTracker->imgThresh, 32, dMinSize); 514 // candidats 515 CvTrackingRect new_face[NUM_FACE_ELEMENTS]; 516 int new_energy = 0; 517 int found = ChoiceTrackingFace3(pFaceTracker, nElements, big_face, new_face, new_energy); 518 int restart = FALSE; 519 int find2 = FALSE; 520 int noel = -1; 521 if (found) 522 { 523 if (new_energy > 100000 && -1 != pFaceTracker->iTrackingFaceType) 524 find2 = TRUE; 525 else if (new_energy > 150000) 526 { 527 int elements = 0; 528 for (int el = 0; el < NUM_FACE_ELEMENTS; el++) 529 { 530 if (big_face[el].m_seqRects->total > 16 || (big_face[el].m_seqRects->total > 8 && new_face[el].iEnergy < 100)) 531 elements++; 532 else 533 noel = el; 534 } 535 if (2 == elements) 536 find2 = TRUE; 537 else 538 restart = TRUE; 539 } 540 } 541 else 542 { 543 if (-1 != pFaceTracker->iTrackingFaceType) 544 find2 = TRUE; 545 else 546 restart = TRUE; 547 } 548 RESTART: 549 if (restart) 550 { 551 if (nRestarts++ < 2) 552 { 553 d = d + d/4; 554 goto START; 555 } 556 } 557 else if (find2) 558 { 559 if (-1 != pFaceTracker->iTrackingFaceType) 560 noel = pFaceTracker->iTrackingFaceType; 561 int found2 = ChoiceTrackingFace2(pFaceTracker, nElements, big_face, new_face, new_energy, noel); 562 if (found2 && new_energy < 100000) 563 { 564 pFaceTracker->iTrackingFaceType = noel; 565 found = TRUE; 566 } 567 else 568 { 569 restart = TRUE; 570 goto RESTART; 571 } 572 } 573 574 if (found) 575 { 576 // angle by mouth & eyes 577 double vx_prev = double(pFaceTracker->face[LEYE].ptCenter.x + pFaceTracker->face[REYE].ptCenter.x) / 2.0 - pFaceTracker->face[MOUTH].ptCenter.x; 578 double vy_prev = double(pFaceTracker->face[LEYE].ptCenter.y + pFaceTracker->face[REYE].ptCenter.y) / 2.0 - pFaceTracker->face[MOUTH].ptCenter.y; 579 double vx_prev1 = vx_prev * cos(pFaceTracker->dbRotateDelta) - vy_prev * sin(pFaceTracker->dbRotateDelta); 580 double vy_prev1 = vx_prev * sin(pFaceTracker->dbRotateDelta) + vy_prev * cos(pFaceTracker->dbRotateDelta); 581 vx_prev = vx_prev1; 582 vy_prev = vy_prev1; 583 for (elem = 0; elem < NUM_FACE_ELEMENTS; elem++) 584 pFaceTracker->face[elem] = new_face[elem]; 585 double vx = double(pFaceTracker->face[LEYE].ptCenter.x + pFaceTracker->face[REYE].ptCenter.x) / 2.0 - pFaceTracker->face[MOUTH].ptCenter.x; 586 double vy = double(pFaceTracker->face[LEYE].ptCenter.y + pFaceTracker->face[REYE].ptCenter.y) / 2.0 - pFaceTracker->face[MOUTH].ptCenter.y; 587 pFaceTracker->dbRotateDelta = 0; 588 double n1_n2 = (vx * vx + vy * vy) * (vx_prev * vx_prev + vy_prev * vy_prev); 589 if (n1_n2 != 0) 590 pFaceTracker->dbRotateDelta = asin((vx * vy_prev - vx_prev * vy) / sqrt(n1_n2)); 591 pFaceTracker->dbRotateAngle -= pFaceTracker->dbRotateDelta; 592 } 593 else 594 { 595 pFaceTracker->dbRotateDelta = 0; 596 pFaceTracker->dbRotateAngle = 0; 597 } 598 if ((pFaceTracker->dbRotateAngle >= pi/2 && pFaceTracker->dbRotateAngle > 0) || 599 (pFaceTracker->dbRotateAngle <= -pi/2 && pFaceTracker->dbRotateAngle < 0)) 600 { 601 pFaceTracker->dbRotateDelta = 0; 602 pFaceTracker->dbRotateAngle = 0; 603 found = FALSE; 604 } 605 if (found) 606 { 607 for (int i = 0; i < NUM_FACE_ELEMENTS && i < nRects; i++) 608 pRects[i] = pFaceTracker->face[i].r; 609 } 610 return found; 611 }//int FindFaceTracker(CvFaceTracker* pFaceTracker, IplImage* imgGray, CvRect* pRects, int nRects, CvPoint& ptRotate, double& dbAngleRotate) 612 613 void ThresholdingParam(IplImage *imgGray, int iNumLayers, int &iMinLevel, int &iMaxLevel, float &step, float& power, int iHistMin /*= HIST_MIN*/) 614 { 615 _ASSERT(imgGray != NULL); 616 _ASSERT(imgGray->nChannels == 1); 617 int i, j; 618 // create histogram 619 int histImg[256] = {0}; 620 uchar* buffImg = (uchar*)imgGray->imageData; 621 CvRect rROI = cvGetImageROI(imgGray); 622 buffImg += rROI.y * imgGray->widthStep + rROI.x; 623 for (j = 0; j < rROI.height; j++) 624 { 625 for (i = 0; i < rROI.width; i++) 626 histImg[buffImg[i]] ++; 627 buffImg += imgGray->widthStep; 628 } 629 // params 630 for (i = 0; i < 256; i++) 631 { 632 if (histImg[i] > iHistMin) 633 break; 634 } 635 iMinLevel = i; 636 for (i = 255; i >= 0; i--) 637 { 638 if (histImg[i] > iHistMin) 639 break; 640 } 641 iMaxLevel = i; 642 if (iMaxLevel <= iMinLevel) 643 { 644 iMaxLevel = 255; 645 iMinLevel = 0; 646 } 647 // power 648 double black = 1; 649 double white = 1; 650 for (i = iMinLevel; i < (iMinLevel + iMaxLevel) / 2; i++) 651 black += histImg[i]; 652 for (i = (iMinLevel + iMaxLevel) / 2; i < iMaxLevel; i++) 653 white += histImg[i]; 654 power = float(black) / float(2 * white); 655 // 656 step = float(iMaxLevel - iMinLevel) / float(iNumLayers); 657 if (step < 1.0) 658 step = 1.0; 659 }// void ThresholdingParam(IplImage *imgGray, int iNumLayers, int &iMinLevel, int &iMaxLevel, int &iStep) 660 661 int ChoiceTrackingFace3(CvFaceTracker* pTF, const int nElements, const CvFaceElement* big_face, CvTrackingRect* face, int& new_energy) 662 { 663 CvTrackingRect* curr_face[NUM_FACE_ELEMENTS] = {NULL}; 664 CvTrackingRect* new_face[NUM_FACE_ELEMENTS] = {NULL}; 665 new_energy = 0x7fffffff; 666 int curr_energy = 0x7fffffff; 667 int found = FALSE; 668 int N = 0; 669 CvSeqReader reader_m, reader_l, reader_r; 670 cvStartReadSeq( big_face[MOUTH].m_seqRects, &reader_m ); 671 for (int i_mouth = 0; i_mouth < big_face[MOUTH].m_seqRects->total && i_mouth < nElements; i_mouth++) 672 { 673 curr_face[MOUTH] = (CvTrackingRect*)(reader_m.ptr); 674 cvStartReadSeq( big_face[LEYE].m_seqRects, &reader_l ); 675 for (int i_left = 0; i_left < big_face[LEYE].m_seqRects->total && i_left < nElements; i_left++) 676 { 677 curr_face[LEYE] = (CvTrackingRect*)(reader_l.ptr); 678 if (curr_face[LEYE]->r.y + curr_face[LEYE]->r.height < curr_face[MOUTH]->r.y) 679 { 680 cvStartReadSeq( big_face[REYE].m_seqRects, &reader_r ); 681 for (int i_right = 0; i_right < big_face[REYE].m_seqRects->total && i_right < nElements; i_right++) 682 { 683 curr_face[REYE] = (CvTrackingRect*)(reader_r.ptr); 684 if (curr_face[REYE]->r.y + curr_face[REYE]->r.height < curr_face[MOUTH]->r.y && 685 curr_face[REYE]->r.x > curr_face[LEYE]->r.x + curr_face[LEYE]->r.width) 686 { 687 curr_energy = GetEnergy(curr_face, pTF->face, pTF->ptTempl, pTF->rTempl); 688 if (curr_energy < new_energy) 689 { 690 for (int elem = 0; elem < NUM_FACE_ELEMENTS; elem++) 691 new_face[elem] = curr_face[elem]; 692 new_energy = curr_energy; 693 found = TRUE; 694 } 695 N++; 696 } 697 } 698 } 699 } 700 } 701 if (found) 702 { 703 for (int elem = 0; elem < NUM_FACE_ELEMENTS; elem++) 704 face[elem] = *(new_face[elem]); 705 } 706 return found; 707 } // int ChoiceTrackingFace3(const CvTrackingRect* tr_face, CvTrackingRect* new_face, int& new_energy) 708 709 int ChoiceTrackingFace2(CvFaceTracker* pTF, const int nElements, const CvFaceElement* big_face, CvTrackingRect* face, int& new_energy, int noel) 710 { 711 int element[NUM_FACE_ELEMENTS]; 712 for (int i = 0, elem = 0; i < NUM_FACE_ELEMENTS; i++) 713 { 714 if (i != noel) 715 { 716 element[elem] = i; 717 elem ++; 718 } 719 else 720 element[2] = i; 721 } 722 CvTrackingRect* curr_face[NUM_FACE_ELEMENTS] = {NULL}; 723 CvTrackingRect* new_face[NUM_FACE_ELEMENTS] = {NULL}; 724 new_energy = 0x7fffffff; 725 int curr_energy = 0x7fffffff; 726 int found = FALSE; 727 int N = 0; 728 CvSeqReader reader0, reader1; 729 cvStartReadSeq( big_face[element[0]].m_seqRects, &reader0 ); 730 for (int i0 = 0; i0 < big_face[element[0]].m_seqRects->total && i0 < nElements; i0++) 731 { 732 curr_face[element[0]] = (CvTrackingRect*)(reader0.ptr); 733 cvStartReadSeq( big_face[element[1]].m_seqRects, &reader1 ); 734 for (int i1 = 0; i1 < big_face[element[1]].m_seqRects->total && i1 < nElements; i1++) 735 { 736 curr_face[element[1]] = (CvTrackingRect*)(reader1.ptr); 737 curr_energy = GetEnergy2(curr_face, pTF->face, pTF->ptTempl, pTF->rTempl, element); 738 if (curr_energy < new_energy) 739 { 740 for (int elem = 0; elem < NUM_FACE_ELEMENTS; elem++) 741 new_face[elem] = curr_face[elem]; 742 new_energy = curr_energy; 743 found = TRUE; 744 } 745 N++; 746 } 747 } 748 if (found) 749 { 750 face[element[0]] = *(new_face[element[0]]); 751 face[element[1]] = *(new_face[element[1]]); 752 // 3 element find by template 753 CvPoint templ_v01 = {pTF->ptTempl[element[1]].x - pTF->ptTempl[element[0]].x, pTF->ptTempl[element[1]].y - pTF->ptTempl[element[0]].y}; 754 CvPoint templ_v02 = {pTF->ptTempl[element[2]].x - pTF->ptTempl[element[0]].x, pTF->ptTempl[element[2]].y - pTF->ptTempl[element[0]].y}; 755 CvPoint prev_v01 = {pTF->face[element[1]].ptCenter.x - pTF->face[element[0]].ptCenter.x, pTF->face[element[1]].ptCenter.y - pTF->face[element[0]].ptCenter.y}; 756 CvPoint prev_v02 = {pTF->face[element[2]].ptCenter.x - pTF->face[element[0]].ptCenter.x, pTF->face[element[2]].ptCenter.y - pTF->face[element[0]].ptCenter.y}; 757 CvPoint new_v01 = {new_face[element[1]]->ptCenter.x - new_face[element[0]]->ptCenter.x, new_face[element[1]]->ptCenter.y - new_face[element[0]]->ptCenter.y}; 758 double templ_d01 = sqrt((double)templ_v01.x*templ_v01.x + templ_v01.y*templ_v01.y); 759 double templ_d02 = sqrt((double)templ_v02.x*templ_v02.x + templ_v02.y*templ_v02.y); 760 double prev_d01 = sqrt((double)prev_v01.x*prev_v01.x + prev_v01.y*prev_v01.y); 761 double prev_d02 = sqrt((double)prev_v02.x*prev_v02.x + prev_v02.y*prev_v02.y); 762 double new_d01 = sqrt((double)new_v01.x*new_v01.x + new_v01.y*new_v01.y); 763 double scale = templ_d01 / new_d01; 764 double new_d02 = templ_d02 / scale; 765 double sin_a = double(prev_v01.x * prev_v02.y - prev_v01.y * prev_v02.x) / (prev_d01 * prev_d02); 766 double cos_a = cos(asin(sin_a)); 767 double x = double(new_v01.x) * cos_a - double(new_v01.y) * sin_a; 768 double y = double(new_v01.x) * sin_a + double(new_v01.y) * cos_a; 769 x = x * new_d02 / new_d01; 770 y = y * new_d02 / new_d01; 771 CvPoint new_v02 = {int(x + 0.5), int(y + 0.5)}; 772 face[element[2]].iColor = 0; 773 face[element[2]].iEnergy = 0; 774 face[element[2]].nRectsInThis = 0; 775 face[element[2]].nRectsOnBottom = 0; 776 face[element[2]].nRectsOnLeft = 0; 777 face[element[2]].nRectsOnRight = 0; 778 face[element[2]].nRectsOnTop = 0; 779 face[element[2]].ptCenter.x = new_v02.x + new_face[element[0]]->ptCenter.x; 780 face[element[2]].ptCenter.y = new_v02.y + new_face[element[0]]->ptCenter.y; 781 face[element[2]].r.width = int(double(pTF->rTempl[element[2]].width) / (scale) + 0.5); 782 face[element[2]].r.height = int(double(pTF->rTempl[element[2]].height) / (scale) + 0.5); 783 face[element[2]].r.x = face[element[2]].ptCenter.x - (face[element[2]].r.width + 1) / 2; 784 face[element[2]].r.y = face[element[2]].ptCenter.y - (face[element[2]].r.height + 1) / 2; 785 _ASSERT(face[LEYE].r.x + face[LEYE].r.width <= face[REYE].r.x); 786 } 787 return found; 788 } // int ChoiceTrackingFace3(const CvTrackingRect* tr_face, CvTrackingRect* new_face, int& new_energy) 789 790 inline int GetEnergy(CvTrackingRect** ppNew, const CvTrackingRect* pPrev, CvPoint* ptTempl, CvRect* rTempl) 791 { 792 int energy = 0; 793 CvPoint ptNew[NUM_FACE_ELEMENTS]; 794 CvPoint ptPrev[NUM_FACE_ELEMENTS]; 795 for (int i = 0; i < NUM_FACE_ELEMENTS; i++) 796 { 797 ptNew[i] = ppNew[i]->ptCenter; 798 ptPrev[i] = pPrev[i].ptCenter; 799 energy += ppNew[i]->iEnergy - 2 * ppNew[i]->nRectsInThis; 800 } 801 double dx = 0, dy = 0, scale = 1, rotate = 0; 802 double e_templ = CalculateTransformationLMS3(ptTempl, ptNew, &scale, &rotate, &dx, &dy); 803 double e_prev = CalculateTransformationLMS3_0(ptPrev, ptNew); 804 double w_eye = double(ppNew[LEYE]->r.width + ppNew[REYE]->r.width) * scale / 2.0; 805 double h_eye = double(ppNew[LEYE]->r.height + ppNew[REYE]->r.height) * scale / 2.0; 806 double w_mouth = double(ppNew[MOUTH]->r.width) * scale; 807 double h_mouth = double(ppNew[MOUTH]->r.height) * scale; 808 energy += 809 int(512.0 * (e_prev + 16.0 * e_templ)) + 810 4 * pow2(ppNew[LEYE]->r.width - ppNew[REYE]->r.width) + 811 4 * pow2(ppNew[LEYE]->r.height - ppNew[REYE]->r.height) + 812 4 * (int)pow(w_eye - double(rTempl[LEYE].width + rTempl[REYE].width) / 2.0, 2) + 813 2 * (int)pow(h_eye - double(rTempl[LEYE].height + rTempl[REYE].height) / 2.0, 2) + 814 1 * (int)pow(w_mouth - double(rTempl[MOUTH].width), 2) + 815 1 * (int)pow(h_mouth - double(rTempl[MOUTH].height), 2) + 816 0; 817 return energy; 818 } 819 820 inline int GetEnergy2(CvTrackingRect** ppNew, const CvTrackingRect* pPrev, CvPoint* ptTempl, CvRect* rTempl, int* element) 821 { 822 CvPoint new_v = {ppNew[element[0]]->ptCenter.x - ppNew[element[1]]->ptCenter.x, 823 ppNew[element[0]]->ptCenter.y - ppNew[element[1]]->ptCenter.y}; 824 CvPoint prev_v = {pPrev[element[0]].ptCenter.x - pPrev[element[1]].ptCenter.x, 825 pPrev[element[0]].ptCenter.y - pPrev[element[1]].ptCenter.y}; 826 double new_d = sqrt((double)new_v.x*new_v.x + new_v.y*new_v.y); 827 double prev_d = sqrt((double)prev_v.x*prev_v.x + prev_v.y*prev_v.y); 828 double dx = ptTempl[element[0]].x - ptTempl[element[1]].x; 829 double dy = ptTempl[element[0]].y - ptTempl[element[1]].y; 830 double templ_d = sqrt(dx*dx + dy*dy); 831 double scale_templ = new_d / templ_d; 832 double w0 = (double)ppNew[element[0]]->r.width * scale_templ; 833 double h0 = (double)ppNew[element[0]]->r.height * scale_templ; 834 double w1 = (double)ppNew[element[1]]->r.width * scale_templ; 835 double h1 = (double)ppNew[element[1]]->r.height * scale_templ; 836 837 int energy = ppNew[element[0]]->iEnergy + ppNew[element[1]]->iEnergy + 838 - 2 * (ppNew[element[0]]->nRectsInThis - ppNew[element[1]]->nRectsInThis) + 839 (int)pow(w0 - (double)rTempl[element[0]].width, 2) + 840 (int)pow(h0 - (double)rTempl[element[0]].height, 2) + 841 (int)pow(w1 - (double)rTempl[element[1]].width, 2) + 842 (int)pow(h1 - (double)rTempl[element[1]].height, 2) + 843 (int)pow(new_d - prev_d, 2) + 844 0; 845 846 return energy; 847 } 848 849 inline double CalculateTransformationLMS3( CvPoint* pTemplPoints, 850 CvPoint* pSrcPoints, 851 double* pdbAverageScale, 852 double* pdbAverageRotate, 853 double* pdbAverageShiftX, 854 double* pdbAverageShiftY ) 855 { 856 // double WS = 0; 857 double dbAverageScale = 1; 858 double dbAverageRotate = 0; 859 double dbAverageShiftX = 0; 860 double dbAverageShiftY = 0; 861 double dbLMS = 0; 862 863 _ASSERT( NULL != pTemplPoints); 864 _ASSERT( NULL != pSrcPoints); 865 866 double dbXt = double(pTemplPoints[0].x + pTemplPoints[1].x + pTemplPoints[2].x) / 3.0; 867 double dbYt = double(pTemplPoints[0].y + pTemplPoints[1].y + pTemplPoints[2].y ) / 3.0; 868 double dbXs = double(pSrcPoints[0].x + pSrcPoints[1].x + pSrcPoints[2].x) / 3.0; 869 double dbYs = double(pSrcPoints[0].y + pSrcPoints[1].y + pSrcPoints[2].y) / 3.0; 870 871 double dbXtXt = double(pow2(pTemplPoints[0].x) + pow2(pTemplPoints[1].x) + pow2(pTemplPoints[2].x)) / 3.0; 872 double dbYtYt = double(pow2(pTemplPoints[0].y) + pow2(pTemplPoints[1].y) + pow2(pTemplPoints[2].y)) / 3.0; 873 874 double dbXsXs = double(pow2(pSrcPoints[0].x) + pow2(pSrcPoints[1].x) + pow2(pSrcPoints[2].x)) / 3.0; 875 double dbYsYs = double(pow2(pSrcPoints[0].y) + pow2(pSrcPoints[1].y) + pow2(pSrcPoints[2].y)) / 3.0; 876 877 double dbXtXs = double(pTemplPoints[0].x * pSrcPoints[0].x + 878 pTemplPoints[1].x * pSrcPoints[1].x + 879 pTemplPoints[2].x * pSrcPoints[2].x) / 3.0; 880 double dbYtYs = double(pTemplPoints[0].y * pSrcPoints[0].y + 881 pTemplPoints[1].y * pSrcPoints[1].y + 882 pTemplPoints[2].y * pSrcPoints[2].y) / 3.0; 883 884 double dbXtYs = double(pTemplPoints[0].x * pSrcPoints[0].y + 885 pTemplPoints[1].x * pSrcPoints[1].y + 886 pTemplPoints[2].x * pSrcPoints[2].y) / 3.0; 887 double dbYtXs = double(pTemplPoints[0].y * pSrcPoints[0].x + 888 pTemplPoints[1].y * pSrcPoints[1].x + 889 pTemplPoints[2].y * pSrcPoints[2].x ) / 3.0; 890 891 dbXtXt -= dbXt * dbXt; 892 dbYtYt -= dbYt * dbYt; 893 894 dbXsXs -= dbXs * dbXs; 895 dbYsYs -= dbYs * dbYs; 896 897 dbXtXs -= dbXt * dbXs; 898 dbYtYs -= dbYt * dbYs; 899 900 dbXtYs -= dbXt * dbYs; 901 dbYtXs -= dbYt * dbXs; 902 903 dbAverageRotate = atan2( dbXtYs - dbYtXs, dbXtXs + dbYtYs ); 904 905 double cosR = cos(dbAverageRotate); 906 double sinR = sin(dbAverageRotate); 907 double del = dbXsXs + dbYsYs; 908 if( del != 0 ) 909 { 910 dbAverageScale = (double(dbXtXs + dbYtYs) * cosR + double(dbXtYs - dbYtXs) * sinR) / del; 911 dbLMS = dbXtXt + dbYtYt - ((double)pow(dbXtXs + dbYtYs,2) + (double)pow(dbXtYs - dbYtXs,2)) / del; 912 } 913 914 dbAverageShiftX = double(dbXt) - dbAverageScale * (double(dbXs) * cosR + double(dbYs) * sinR); 915 dbAverageShiftY = double(dbYt) - dbAverageScale * (double(dbYs) * cosR - double(dbXs) * sinR); 916 917 if( pdbAverageScale != NULL ) *pdbAverageScale = dbAverageScale; 918 if( pdbAverageRotate != NULL ) *pdbAverageRotate = dbAverageRotate; 919 if( pdbAverageShiftX != NULL ) *pdbAverageShiftX = dbAverageShiftX; 920 if( pdbAverageShiftY != NULL ) *pdbAverageShiftY = dbAverageShiftY; 921 922 _ASSERT(dbLMS >= 0); 923 return dbLMS; 924 } 925 926 inline double CalculateTransformationLMS3_0( CvPoint* pTemplPoints, CvPoint* pSrcPoints) 927 { 928 double dbLMS = 0; 929 930 _ASSERT( NULL != pTemplPoints); 931 _ASSERT( NULL != pSrcPoints); 932 933 double dbXt = double(pTemplPoints[0].x + pTemplPoints[1].x + pTemplPoints[2].x) / 3.0; 934 double dbYt = double(pTemplPoints[0].y + pTemplPoints[1].y + pTemplPoints[2].y ) / 3.0; 935 double dbXs = double(pSrcPoints[0].x + pSrcPoints[1].x + pSrcPoints[2].x) / 3.0; 936 double dbYs = double(pSrcPoints[0].y + pSrcPoints[1].y + pSrcPoints[2].y) / 3.0; 937 938 double dbXtXt = double(pow2(pTemplPoints[0].x) + pow2(pTemplPoints[1].x) + pow2(pTemplPoints[2].x)) / 3.0; 939 double dbYtYt = double(pow2(pTemplPoints[0].y) + pow2(pTemplPoints[1].y) + pow2(pTemplPoints[2].y)) / 3.0; 940 941 double dbXsXs = double(pow2(pSrcPoints[0].x) + pow2(pSrcPoints[1].x) + pow2(pSrcPoints[2].x)) / 3.0; 942 double dbYsYs = double(pow2(pSrcPoints[0].y) + pow2(pSrcPoints[1].y) + pow2(pSrcPoints[2].y)) / 3.0; 943 944 double dbXtXs = double(pTemplPoints[0].x * pSrcPoints[0].x + 945 pTemplPoints[1].x * pSrcPoints[1].x + 946 pTemplPoints[2].x * pSrcPoints[2].x) / 3.0; 947 double dbYtYs = double(pTemplPoints[0].y * pSrcPoints[0].y + 948 pTemplPoints[1].y * pSrcPoints[1].y + 949 pTemplPoints[2].y * pSrcPoints[2].y) / 3.0; 950 951 double dbXtYs = double(pTemplPoints[0].x * pSrcPoints[0].y + 952 pTemplPoints[1].x * pSrcPoints[1].y + 953 pTemplPoints[2].x * pSrcPoints[2].y) / 3.0; 954 double dbYtXs = double(pTemplPoints[0].y * pSrcPoints[0].x + 955 pTemplPoints[1].y * pSrcPoints[1].x + 956 pTemplPoints[2].y * pSrcPoints[2].x ) / 3.0; 957 958 dbXtXt -= dbXt * dbXt; 959 dbYtYt -= dbYt * dbYt; 960 961 dbXsXs -= dbXs * dbXs; 962 dbYsYs -= dbYs * dbYs; 963 964 dbXtXs -= dbXt * dbXs; 965 dbYtYs -= dbYt * dbYs; 966 967 dbXtYs -= dbXt * dbYs; 968 dbYtXs -= dbYt * dbXs; 969 970 double del = dbXsXs + dbYsYs; 971 if( del != 0 ) 972 dbLMS = dbXtXt + dbYtYt - ((double)pow(dbXtXs + dbYtYs,2) + (double)pow(dbXtYs - dbYtXs,2)) / del; 973 return dbLMS; 974 } 975 976