1 /* LzFindMt.c -- multithreaded Match finder for LZ algorithms 2 2014-12-29 : Igor Pavlov : Public domain */ 3 4 #include "Precomp.h" 5 6 #include "LzHash.h" 7 8 #include "LzFindMt.h" 9 10 void MtSync_Construct(CMtSync *p) 11 { 12 p->wasCreated = False; 13 p->csWasInitialized = False; 14 p->csWasEntered = False; 15 Thread_Construct(&p->thread); 16 Event_Construct(&p->canStart); 17 Event_Construct(&p->wasStarted); 18 Event_Construct(&p->wasStopped); 19 Semaphore_Construct(&p->freeSemaphore); 20 Semaphore_Construct(&p->filledSemaphore); 21 } 22 23 void MtSync_GetNextBlock(CMtSync *p) 24 { 25 if (p->needStart) 26 { 27 p->numProcessedBlocks = 1; 28 p->needStart = False; 29 p->stopWriting = False; 30 p->exit = False; 31 Event_Reset(&p->wasStarted); 32 Event_Reset(&p->wasStopped); 33 34 Event_Set(&p->canStart); 35 Event_Wait(&p->wasStarted); 36 } 37 else 38 { 39 CriticalSection_Leave(&p->cs); 40 p->csWasEntered = False; 41 p->numProcessedBlocks++; 42 Semaphore_Release1(&p->freeSemaphore); 43 } 44 Semaphore_Wait(&p->filledSemaphore); 45 CriticalSection_Enter(&p->cs); 46 p->csWasEntered = True; 47 } 48 49 /* MtSync_StopWriting must be called if Writing was started */ 50 51 void MtSync_StopWriting(CMtSync *p) 52 { 53 UInt32 myNumBlocks = p->numProcessedBlocks; 54 if (!Thread_WasCreated(&p->thread) || p->needStart) 55 return; 56 p->stopWriting = True; 57 if (p->csWasEntered) 58 { 59 CriticalSection_Leave(&p->cs); 60 p->csWasEntered = False; 61 } 62 Semaphore_Release1(&p->freeSemaphore); 63 64 Event_Wait(&p->wasStopped); 65 66 while (myNumBlocks++ != p->numProcessedBlocks) 67 { 68 Semaphore_Wait(&p->filledSemaphore); 69 Semaphore_Release1(&p->freeSemaphore); 70 } 71 p->needStart = True; 72 } 73 74 void MtSync_Destruct(CMtSync *p) 75 { 76 if (Thread_WasCreated(&p->thread)) 77 { 78 MtSync_StopWriting(p); 79 p->exit = True; 80 if (p->needStart) 81 Event_Set(&p->canStart); 82 Thread_Wait(&p->thread); 83 Thread_Close(&p->thread); 84 } 85 if (p->csWasInitialized) 86 { 87 CriticalSection_Delete(&p->cs); 88 p->csWasInitialized = False; 89 } 90 91 Event_Close(&p->canStart); 92 Event_Close(&p->wasStarted); 93 Event_Close(&p->wasStopped); 94 Semaphore_Close(&p->freeSemaphore); 95 Semaphore_Close(&p->filledSemaphore); 96 97 p->wasCreated = False; 98 } 99 100 #define RINOK_THREAD(x) { if ((x) != 0) return SZ_ERROR_THREAD; } 101 102 static SRes MtSync_Create2(CMtSync *p, THREAD_FUNC_TYPE startAddress, void *obj, UInt32 numBlocks) 103 { 104 if (p->wasCreated) 105 return SZ_OK; 106 107 RINOK_THREAD(CriticalSection_Init(&p->cs)); 108 p->csWasInitialized = True; 109 110 RINOK_THREAD(AutoResetEvent_CreateNotSignaled(&p->canStart)); 111 RINOK_THREAD(AutoResetEvent_CreateNotSignaled(&p->wasStarted)); 112 RINOK_THREAD(AutoResetEvent_CreateNotSignaled(&p->wasStopped)); 113 114 RINOK_THREAD(Semaphore_Create(&p->freeSemaphore, numBlocks, numBlocks)); 115 RINOK_THREAD(Semaphore_Create(&p->filledSemaphore, 0, numBlocks)); 116 117 p->needStart = True; 118 119 RINOK_THREAD(Thread_Create(&p->thread, startAddress, obj)); 120 p->wasCreated = True; 121 return SZ_OK; 122 } 123 124 static SRes MtSync_Create(CMtSync *p, THREAD_FUNC_TYPE startAddress, void *obj, UInt32 numBlocks) 125 { 126 SRes res = MtSync_Create2(p, startAddress, obj, numBlocks); 127 if (res != SZ_OK) 128 MtSync_Destruct(p); 129 return res; 130 } 131 132 void MtSync_Init(CMtSync *p) { p->needStart = True; } 133 134 #define kMtMaxValForNormalize 0xFFFFFFFF 135 136 #define DEF_GetHeads2(name, v, action) \ 137 static void GetHeads ## name(const Byte *p, UInt32 pos, \ 138 UInt32 *hash, UInt32 hashMask, UInt32 *heads, UInt32 numHeads, const UInt32 *crc) \ 139 { action; for (; numHeads != 0; numHeads--) { \ 140 const UInt32 value = (v); p++; *heads++ = pos - hash[value]; hash[value] = pos++; } } 141 142 #define DEF_GetHeads(name, v) DEF_GetHeads2(name, v, ;) 143 144 DEF_GetHeads2(2, (p[0] | ((UInt32)p[1] << 8)), hashMask = hashMask; crc = crc; ) 145 DEF_GetHeads(3, (crc[p[0]] ^ p[1] ^ ((UInt32)p[2] << 8)) & hashMask) 146 DEF_GetHeads(4, (crc[p[0]] ^ p[1] ^ ((UInt32)p[2] << 8) ^ (crc[p[3]] << 5)) & hashMask) 147 DEF_GetHeads(4b, (crc[p[0]] ^ p[1] ^ ((UInt32)p[2] << 8) ^ ((UInt32)p[3] << 16)) & hashMask) 148 /* DEF_GetHeads(5, (crc[p[0]] ^ p[1] ^ ((UInt32)p[2] << 8) ^ (crc[p[3]] << 5) ^ (crc[p[4]] << 3)) & hashMask) */ 149 150 void HashThreadFunc(CMatchFinderMt *mt) 151 { 152 CMtSync *p = &mt->hashSync; 153 for (;;) 154 { 155 UInt32 numProcessedBlocks = 0; 156 Event_Wait(&p->canStart); 157 Event_Set(&p->wasStarted); 158 for (;;) 159 { 160 if (p->exit) 161 return; 162 if (p->stopWriting) 163 { 164 p->numProcessedBlocks = numProcessedBlocks; 165 Event_Set(&p->wasStopped); 166 break; 167 } 168 169 { 170 CMatchFinder *mf = mt->MatchFinder; 171 if (MatchFinder_NeedMove(mf)) 172 { 173 CriticalSection_Enter(&mt->btSync.cs); 174 CriticalSection_Enter(&mt->hashSync.cs); 175 { 176 const Byte *beforePtr = MatchFinder_GetPointerToCurrentPos(mf); 177 const Byte *afterPtr; 178 MatchFinder_MoveBlock(mf); 179 afterPtr = MatchFinder_GetPointerToCurrentPos(mf); 180 mt->pointerToCurPos -= beforePtr - afterPtr; 181 mt->buffer -= beforePtr - afterPtr; 182 } 183 CriticalSection_Leave(&mt->btSync.cs); 184 CriticalSection_Leave(&mt->hashSync.cs); 185 continue; 186 } 187 188 Semaphore_Wait(&p->freeSemaphore); 189 190 MatchFinder_ReadIfRequired(mf); 191 if (mf->pos > (kMtMaxValForNormalize - kMtHashBlockSize)) 192 { 193 UInt32 subValue = (mf->pos - mf->historySize - 1); 194 MatchFinder_ReduceOffsets(mf, subValue); 195 MatchFinder_Normalize3(subValue, mf->hash + mf->fixedHashSize, mf->hashMask + 1); 196 } 197 { 198 UInt32 *heads = mt->hashBuf + ((numProcessedBlocks++) & kMtHashNumBlocksMask) * kMtHashBlockSize; 199 UInt32 num = mf->streamPos - mf->pos; 200 heads[0] = 2; 201 heads[1] = num; 202 if (num >= mf->numHashBytes) 203 { 204 num = num - mf->numHashBytes + 1; 205 if (num > kMtHashBlockSize - 2) 206 num = kMtHashBlockSize - 2; 207 mt->GetHeadsFunc(mf->buffer, mf->pos, mf->hash + mf->fixedHashSize, mf->hashMask, heads + 2, num, mf->crc); 208 heads[0] += num; 209 } 210 mf->pos += num; 211 mf->buffer += num; 212 } 213 } 214 215 Semaphore_Release1(&p->filledSemaphore); 216 } 217 } 218 } 219 220 void MatchFinderMt_GetNextBlock_Hash(CMatchFinderMt *p) 221 { 222 MtSync_GetNextBlock(&p->hashSync); 223 p->hashBufPosLimit = p->hashBufPos = ((p->hashSync.numProcessedBlocks - 1) & kMtHashNumBlocksMask) * kMtHashBlockSize; 224 p->hashBufPosLimit += p->hashBuf[p->hashBufPos++]; 225 p->hashNumAvail = p->hashBuf[p->hashBufPos++]; 226 } 227 228 #define kEmptyHashValue 0 229 230 /* #define MFMT_GM_INLINE */ 231 232 #ifdef MFMT_GM_INLINE 233 234 #define NO_INLINE MY_FAST_CALL 235 236 Int32 NO_INLINE GetMatchesSpecN(UInt32 lenLimit, UInt32 pos, const Byte *cur, CLzRef *son, 237 UInt32 _cyclicBufferPos, UInt32 _cyclicBufferSize, UInt32 _cutValue, 238 UInt32 *_distances, UInt32 _maxLen, const UInt32 *hash, Int32 limit, UInt32 size, UInt32 *posRes) 239 { 240 do 241 { 242 UInt32 *distances = _distances + 1; 243 UInt32 curMatch = pos - *hash++; 244 245 CLzRef *ptr0 = son + (_cyclicBufferPos << 1) + 1; 246 CLzRef *ptr1 = son + (_cyclicBufferPos << 1); 247 UInt32 len0 = 0, len1 = 0; 248 UInt32 cutValue = _cutValue; 249 UInt32 maxLen = _maxLen; 250 for (;;) 251 { 252 UInt32 delta = pos - curMatch; 253 if (cutValue-- == 0 || delta >= _cyclicBufferSize) 254 { 255 *ptr0 = *ptr1 = kEmptyHashValue; 256 break; 257 } 258 { 259 CLzRef *pair = son + ((_cyclicBufferPos - delta + ((delta > _cyclicBufferPos) ? _cyclicBufferSize : 0)) << 1); 260 const Byte *pb = cur - delta; 261 UInt32 len = (len0 < len1 ? len0 : len1); 262 if (pb[len] == cur[len]) 263 { 264 if (++len != lenLimit && pb[len] == cur[len]) 265 while (++len != lenLimit) 266 if (pb[len] != cur[len]) 267 break; 268 if (maxLen < len) 269 { 270 *distances++ = maxLen = len; 271 *distances++ = delta - 1; 272 if (len == lenLimit) 273 { 274 *ptr1 = pair[0]; 275 *ptr0 = pair[1]; 276 break; 277 } 278 } 279 } 280 if (pb[len] < cur[len]) 281 { 282 *ptr1 = curMatch; 283 ptr1 = pair + 1; 284 curMatch = *ptr1; 285 len1 = len; 286 } 287 else 288 { 289 *ptr0 = curMatch; 290 ptr0 = pair; 291 curMatch = *ptr0; 292 len0 = len; 293 } 294 } 295 } 296 pos++; 297 _cyclicBufferPos++; 298 cur++; 299 { 300 UInt32 num = (UInt32)(distances - _distances); 301 *_distances = num - 1; 302 _distances += num; 303 limit -= num; 304 } 305 } 306 while (limit > 0 && --size != 0); 307 *posRes = pos; 308 return limit; 309 } 310 311 #endif 312 313 void BtGetMatches(CMatchFinderMt *p, UInt32 *distances) 314 { 315 UInt32 numProcessed = 0; 316 UInt32 curPos = 2; 317 UInt32 limit = kMtBtBlockSize - (p->matchMaxLen * 2); 318 distances[1] = p->hashNumAvail; 319 while (curPos < limit) 320 { 321 if (p->hashBufPos == p->hashBufPosLimit) 322 { 323 MatchFinderMt_GetNextBlock_Hash(p); 324 distances[1] = numProcessed + p->hashNumAvail; 325 if (p->hashNumAvail >= p->numHashBytes) 326 continue; 327 for (; p->hashNumAvail != 0; p->hashNumAvail--) 328 distances[curPos++] = 0; 329 break; 330 } 331 { 332 UInt32 size = p->hashBufPosLimit - p->hashBufPos; 333 UInt32 lenLimit = p->matchMaxLen; 334 UInt32 pos = p->pos; 335 UInt32 cyclicBufferPos = p->cyclicBufferPos; 336 if (lenLimit >= p->hashNumAvail) 337 lenLimit = p->hashNumAvail; 338 { 339 UInt32 size2 = p->hashNumAvail - lenLimit + 1; 340 if (size2 < size) 341 size = size2; 342 size2 = p->cyclicBufferSize - cyclicBufferPos; 343 if (size2 < size) 344 size = size2; 345 } 346 #ifndef MFMT_GM_INLINE 347 while (curPos < limit && size-- != 0) 348 { 349 UInt32 *startDistances = distances + curPos; 350 UInt32 num = (UInt32)(GetMatchesSpec1(lenLimit, pos - p->hashBuf[p->hashBufPos++], 351 pos, p->buffer, p->son, cyclicBufferPos, p->cyclicBufferSize, p->cutValue, 352 startDistances + 1, p->numHashBytes - 1) - startDistances); 353 *startDistances = num - 1; 354 curPos += num; 355 cyclicBufferPos++; 356 pos++; 357 p->buffer++; 358 } 359 #else 360 { 361 UInt32 posRes; 362 curPos = limit - GetMatchesSpecN(lenLimit, pos, p->buffer, p->son, cyclicBufferPos, p->cyclicBufferSize, p->cutValue, 363 distances + curPos, p->numHashBytes - 1, p->hashBuf + p->hashBufPos, (Int32)(limit - curPos) , size, &posRes); 364 p->hashBufPos += posRes - pos; 365 cyclicBufferPos += posRes - pos; 366 p->buffer += posRes - pos; 367 pos = posRes; 368 } 369 #endif 370 371 numProcessed += pos - p->pos; 372 p->hashNumAvail -= pos - p->pos; 373 p->pos = pos; 374 if (cyclicBufferPos == p->cyclicBufferSize) 375 cyclicBufferPos = 0; 376 p->cyclicBufferPos = cyclicBufferPos; 377 } 378 } 379 distances[0] = curPos; 380 } 381 382 void BtFillBlock(CMatchFinderMt *p, UInt32 globalBlockIndex) 383 { 384 CMtSync *sync = &p->hashSync; 385 if (!sync->needStart) 386 { 387 CriticalSection_Enter(&sync->cs); 388 sync->csWasEntered = True; 389 } 390 391 BtGetMatches(p, p->btBuf + (globalBlockIndex & kMtBtNumBlocksMask) * kMtBtBlockSize); 392 393 if (p->pos > kMtMaxValForNormalize - kMtBtBlockSize) 394 { 395 UInt32 subValue = p->pos - p->cyclicBufferSize; 396 MatchFinder_Normalize3(subValue, p->son, p->cyclicBufferSize * 2); 397 p->pos -= subValue; 398 } 399 400 if (!sync->needStart) 401 { 402 CriticalSection_Leave(&sync->cs); 403 sync->csWasEntered = False; 404 } 405 } 406 407 void BtThreadFunc(CMatchFinderMt *mt) 408 { 409 CMtSync *p = &mt->btSync; 410 for (;;) 411 { 412 UInt32 blockIndex = 0; 413 Event_Wait(&p->canStart); 414 Event_Set(&p->wasStarted); 415 for (;;) 416 { 417 if (p->exit) 418 return; 419 if (p->stopWriting) 420 { 421 p->numProcessedBlocks = blockIndex; 422 MtSync_StopWriting(&mt->hashSync); 423 Event_Set(&p->wasStopped); 424 break; 425 } 426 Semaphore_Wait(&p->freeSemaphore); 427 BtFillBlock(mt, blockIndex++); 428 Semaphore_Release1(&p->filledSemaphore); 429 } 430 } 431 } 432 433 void MatchFinderMt_Construct(CMatchFinderMt *p) 434 { 435 p->hashBuf = 0; 436 MtSync_Construct(&p->hashSync); 437 MtSync_Construct(&p->btSync); 438 } 439 440 void MatchFinderMt_FreeMem(CMatchFinderMt *p, ISzAlloc *alloc) 441 { 442 alloc->Free(alloc, p->hashBuf); 443 p->hashBuf = 0; 444 } 445 446 void MatchFinderMt_Destruct(CMatchFinderMt *p, ISzAlloc *alloc) 447 { 448 MtSync_Destruct(&p->hashSync); 449 MtSync_Destruct(&p->btSync); 450 MatchFinderMt_FreeMem(p, alloc); 451 } 452 453 #define kHashBufferSize (kMtHashBlockSize * kMtHashNumBlocks) 454 #define kBtBufferSize (kMtBtBlockSize * kMtBtNumBlocks) 455 456 static THREAD_FUNC_RET_TYPE THREAD_FUNC_CALL_TYPE HashThreadFunc2(void *p) { HashThreadFunc((CMatchFinderMt *)p); return 0; } 457 static THREAD_FUNC_RET_TYPE THREAD_FUNC_CALL_TYPE BtThreadFunc2(void *p) 458 { 459 Byte allocaDummy[0x180]; 460 allocaDummy[0] = 0; 461 allocaDummy[1] = allocaDummy[0]; 462 BtThreadFunc((CMatchFinderMt *)p); 463 return 0; 464 } 465 466 SRes MatchFinderMt_Create(CMatchFinderMt *p, UInt32 historySize, UInt32 keepAddBufferBefore, 467 UInt32 matchMaxLen, UInt32 keepAddBufferAfter, ISzAlloc *alloc) 468 { 469 CMatchFinder *mf = p->MatchFinder; 470 p->historySize = historySize; 471 if (kMtBtBlockSize <= matchMaxLen * 4) 472 return SZ_ERROR_PARAM; 473 if (p->hashBuf == 0) 474 { 475 p->hashBuf = (UInt32 *)alloc->Alloc(alloc, (kHashBufferSize + kBtBufferSize) * sizeof(UInt32)); 476 if (p->hashBuf == 0) 477 return SZ_ERROR_MEM; 478 p->btBuf = p->hashBuf + kHashBufferSize; 479 } 480 keepAddBufferBefore += (kHashBufferSize + kBtBufferSize); 481 keepAddBufferAfter += kMtHashBlockSize; 482 if (!MatchFinder_Create(mf, historySize, keepAddBufferBefore, matchMaxLen, keepAddBufferAfter, alloc)) 483 return SZ_ERROR_MEM; 484 485 RINOK(MtSync_Create(&p->hashSync, HashThreadFunc2, p, kMtHashNumBlocks)); 486 RINOK(MtSync_Create(&p->btSync, BtThreadFunc2, p, kMtBtNumBlocks)); 487 return SZ_OK; 488 } 489 490 /* Call it after ReleaseStream / SetStream */ 491 void MatchFinderMt_Init(CMatchFinderMt *p) 492 { 493 CMatchFinder *mf = p->MatchFinder; 494 p->btBufPos = p->btBufPosLimit = 0; 495 p->hashBufPos = p->hashBufPosLimit = 0; 496 MatchFinder_Init(mf); 497 p->pointerToCurPos = MatchFinder_GetPointerToCurrentPos(mf); 498 p->btNumAvailBytes = 0; 499 p->lzPos = p->historySize + 1; 500 501 p->hash = mf->hash; 502 p->fixedHashSize = mf->fixedHashSize; 503 p->crc = mf->crc; 504 505 p->son = mf->son; 506 p->matchMaxLen = mf->matchMaxLen; 507 p->numHashBytes = mf->numHashBytes; 508 p->pos = mf->pos; 509 p->buffer = mf->buffer; 510 p->cyclicBufferPos = mf->cyclicBufferPos; 511 p->cyclicBufferSize = mf->cyclicBufferSize; 512 p->cutValue = mf->cutValue; 513 } 514 515 /* ReleaseStream is required to finish multithreading */ 516 void MatchFinderMt_ReleaseStream(CMatchFinderMt *p) 517 { 518 MtSync_StopWriting(&p->btSync); 519 /* p->MatchFinder->ReleaseStream(); */ 520 } 521 522 void MatchFinderMt_Normalize(CMatchFinderMt *p) 523 { 524 MatchFinder_Normalize3(p->lzPos - p->historySize - 1, p->hash, p->fixedHashSize); 525 p->lzPos = p->historySize + 1; 526 } 527 528 void MatchFinderMt_GetNextBlock_Bt(CMatchFinderMt *p) 529 { 530 UInt32 blockIndex; 531 MtSync_GetNextBlock(&p->btSync); 532 blockIndex = ((p->btSync.numProcessedBlocks - 1) & kMtBtNumBlocksMask); 533 p->btBufPosLimit = p->btBufPos = blockIndex * kMtBtBlockSize; 534 p->btBufPosLimit += p->btBuf[p->btBufPos++]; 535 p->btNumAvailBytes = p->btBuf[p->btBufPos++]; 536 if (p->lzPos >= kMtMaxValForNormalize - kMtBtBlockSize) 537 MatchFinderMt_Normalize(p); 538 } 539 540 const Byte * MatchFinderMt_GetPointerToCurrentPos(CMatchFinderMt *p) 541 { 542 return p->pointerToCurPos; 543 } 544 545 #define GET_NEXT_BLOCK_IF_REQUIRED if (p->btBufPos == p->btBufPosLimit) MatchFinderMt_GetNextBlock_Bt(p); 546 547 UInt32 MatchFinderMt_GetNumAvailableBytes(CMatchFinderMt *p) 548 { 549 GET_NEXT_BLOCK_IF_REQUIRED; 550 return p->btNumAvailBytes; 551 } 552 553 Byte MatchFinderMt_GetIndexByte(CMatchFinderMt *p, Int32 index) 554 { 555 return p->pointerToCurPos[index]; 556 } 557 558 UInt32 * MixMatches2(CMatchFinderMt *p, UInt32 matchMinPos, UInt32 *distances) 559 { 560 UInt32 hash2Value, curMatch2; 561 UInt32 *hash = p->hash; 562 const Byte *cur = p->pointerToCurPos; 563 UInt32 lzPos = p->lzPos; 564 MT_HASH2_CALC 565 566 curMatch2 = hash[hash2Value]; 567 hash[hash2Value] = lzPos; 568 569 if (curMatch2 >= matchMinPos) 570 if (cur[(ptrdiff_t)curMatch2 - lzPos] == cur[0]) 571 { 572 *distances++ = 2; 573 *distances++ = lzPos - curMatch2 - 1; 574 } 575 return distances; 576 } 577 578 UInt32 * MixMatches3(CMatchFinderMt *p, UInt32 matchMinPos, UInt32 *distances) 579 { 580 UInt32 hash2Value, hash3Value, curMatch2, curMatch3; 581 UInt32 *hash = p->hash; 582 const Byte *cur = p->pointerToCurPos; 583 UInt32 lzPos = p->lzPos; 584 MT_HASH3_CALC 585 586 curMatch2 = hash[ hash2Value]; 587 curMatch3 = hash[kFix3HashSize + hash3Value]; 588 589 hash[ hash2Value] = 590 hash[kFix3HashSize + hash3Value] = 591 lzPos; 592 593 if (curMatch2 >= matchMinPos && cur[(ptrdiff_t)curMatch2 - lzPos] == cur[0]) 594 { 595 distances[1] = lzPos - curMatch2 - 1; 596 if (cur[(ptrdiff_t)curMatch2 - lzPos + 2] == cur[2]) 597 { 598 distances[0] = 3; 599 return distances + 2; 600 } 601 distances[0] = 2; 602 distances += 2; 603 } 604 if (curMatch3 >= matchMinPos && cur[(ptrdiff_t)curMatch3 - lzPos] == cur[0]) 605 { 606 *distances++ = 3; 607 *distances++ = lzPos - curMatch3 - 1; 608 } 609 return distances; 610 } 611 612 /* 613 UInt32 *MixMatches4(CMatchFinderMt *p, UInt32 matchMinPos, UInt32 *distances) 614 { 615 UInt32 hash2Value, hash3Value, hash4Value, curMatch2, curMatch3, curMatch4; 616 UInt32 *hash = p->hash; 617 const Byte *cur = p->pointerToCurPos; 618 UInt32 lzPos = p->lzPos; 619 MT_HASH4_CALC 620 621 curMatch2 = hash[ hash2Value]; 622 curMatch3 = hash[kFix3HashSize + hash3Value]; 623 curMatch4 = hash[kFix4HashSize + hash4Value]; 624 625 hash[ hash2Value] = 626 hash[kFix3HashSize + hash3Value] = 627 hash[kFix4HashSize + hash4Value] = 628 lzPos; 629 630 if (curMatch2 >= matchMinPos && cur[(ptrdiff_t)curMatch2 - lzPos] == cur[0]) 631 { 632 distances[1] = lzPos - curMatch2 - 1; 633 if (cur[(ptrdiff_t)curMatch2 - lzPos + 2] == cur[2]) 634 { 635 distances[0] = (cur[(ptrdiff_t)curMatch2 - lzPos + 3] == cur[3]) ? 4 : 3; 636 return distances + 2; 637 } 638 distances[0] = 2; 639 distances += 2; 640 } 641 if (curMatch3 >= matchMinPos && cur[(ptrdiff_t)curMatch3 - lzPos] == cur[0]) 642 { 643 distances[1] = lzPos - curMatch3 - 1; 644 if (cur[(ptrdiff_t)curMatch3 - lzPos + 3] == cur[3]) 645 { 646 distances[0] = 4; 647 return distances + 2; 648 } 649 distances[0] = 3; 650 distances += 2; 651 } 652 653 if (curMatch4 >= matchMinPos) 654 if ( 655 cur[(ptrdiff_t)curMatch4 - lzPos] == cur[0] && 656 cur[(ptrdiff_t)curMatch4 - lzPos + 3] == cur[3] 657 ) 658 { 659 *distances++ = 4; 660 *distances++ = lzPos - curMatch4 - 1; 661 } 662 return distances; 663 } 664 */ 665 666 #define INCREASE_LZ_POS p->lzPos++; p->pointerToCurPos++; 667 668 UInt32 MatchFinderMt2_GetMatches(CMatchFinderMt *p, UInt32 *distances) 669 { 670 const UInt32 *btBuf = p->btBuf + p->btBufPos; 671 UInt32 len = *btBuf++; 672 p->btBufPos += 1 + len; 673 p->btNumAvailBytes--; 674 { 675 UInt32 i; 676 for (i = 0; i < len; i += 2) 677 { 678 *distances++ = *btBuf++; 679 *distances++ = *btBuf++; 680 } 681 } 682 INCREASE_LZ_POS 683 return len; 684 } 685 686 UInt32 MatchFinderMt_GetMatches(CMatchFinderMt *p, UInt32 *distances) 687 { 688 const UInt32 *btBuf = p->btBuf + p->btBufPos; 689 UInt32 len = *btBuf++; 690 p->btBufPos += 1 + len; 691 692 if (len == 0) 693 { 694 if (p->btNumAvailBytes-- >= 4) 695 len = (UInt32)(p->MixMatchesFunc(p, p->lzPos - p->historySize, distances) - (distances)); 696 } 697 else 698 { 699 /* Condition: there are matches in btBuf with length < p->numHashBytes */ 700 UInt32 *distances2; 701 p->btNumAvailBytes--; 702 distances2 = p->MixMatchesFunc(p, p->lzPos - btBuf[1], distances); 703 do 704 { 705 *distances2++ = *btBuf++; 706 *distances2++ = *btBuf++; 707 } 708 while ((len -= 2) != 0); 709 len = (UInt32)(distances2 - (distances)); 710 } 711 INCREASE_LZ_POS 712 return len; 713 } 714 715 #define SKIP_HEADER2_MT do { GET_NEXT_BLOCK_IF_REQUIRED 716 #define SKIP_HEADER_MT(n) SKIP_HEADER2_MT if (p->btNumAvailBytes-- >= (n)) { const Byte *cur = p->pointerToCurPos; UInt32 *hash = p->hash; 717 #define SKIP_FOOTER_MT } INCREASE_LZ_POS p->btBufPos += p->btBuf[p->btBufPos] + 1; } while (--num != 0); 718 719 void MatchFinderMt0_Skip(CMatchFinderMt *p, UInt32 num) 720 { 721 SKIP_HEADER2_MT { p->btNumAvailBytes--; 722 SKIP_FOOTER_MT 723 } 724 725 void MatchFinderMt2_Skip(CMatchFinderMt *p, UInt32 num) 726 { 727 SKIP_HEADER_MT(2) 728 UInt32 hash2Value; 729 MT_HASH2_CALC 730 hash[hash2Value] = p->lzPos; 731 SKIP_FOOTER_MT 732 } 733 734 void MatchFinderMt3_Skip(CMatchFinderMt *p, UInt32 num) 735 { 736 SKIP_HEADER_MT(3) 737 UInt32 hash2Value, hash3Value; 738 MT_HASH3_CALC 739 hash[kFix3HashSize + hash3Value] = 740 hash[ hash2Value] = 741 p->lzPos; 742 SKIP_FOOTER_MT 743 } 744 745 /* 746 void MatchFinderMt4_Skip(CMatchFinderMt *p, UInt32 num) 747 { 748 SKIP_HEADER_MT(4) 749 UInt32 hash2Value, hash3Value, hash4Value; 750 MT_HASH4_CALC 751 hash[kFix4HashSize + hash4Value] = 752 hash[kFix3HashSize + hash3Value] = 753 hash[ hash2Value] = 754 p->lzPos; 755 SKIP_FOOTER_MT 756 } 757 */ 758 759 void MatchFinderMt_CreateVTable(CMatchFinderMt *p, IMatchFinder *vTable) 760 { 761 vTable->Init = (Mf_Init_Func)MatchFinderMt_Init; 762 vTable->GetIndexByte = (Mf_GetIndexByte_Func)MatchFinderMt_GetIndexByte; 763 vTable->GetNumAvailableBytes = (Mf_GetNumAvailableBytes_Func)MatchFinderMt_GetNumAvailableBytes; 764 vTable->GetPointerToCurrentPos = (Mf_GetPointerToCurrentPos_Func)MatchFinderMt_GetPointerToCurrentPos; 765 vTable->GetMatches = (Mf_GetMatches_Func)MatchFinderMt_GetMatches; 766 switch(p->MatchFinder->numHashBytes) 767 { 768 case 2: 769 p->GetHeadsFunc = GetHeads2; 770 p->MixMatchesFunc = (Mf_Mix_Matches)0; 771 vTable->Skip = (Mf_Skip_Func)MatchFinderMt0_Skip; 772 vTable->GetMatches = (Mf_GetMatches_Func)MatchFinderMt2_GetMatches; 773 break; 774 case 3: 775 p->GetHeadsFunc = GetHeads3; 776 p->MixMatchesFunc = (Mf_Mix_Matches)MixMatches2; 777 vTable->Skip = (Mf_Skip_Func)MatchFinderMt2_Skip; 778 break; 779 default: 780 /* case 4: */ 781 p->GetHeadsFunc = p->MatchFinder->bigHash ? GetHeads4b : GetHeads4; 782 /* p->GetHeadsFunc = GetHeads4; */ 783 p->MixMatchesFunc = (Mf_Mix_Matches)MixMatches3; 784 vTable->Skip = (Mf_Skip_Func)MatchFinderMt3_Skip; 785 break; 786 /* 787 default: 788 p->GetHeadsFunc = GetHeads5; 789 p->MixMatchesFunc = (Mf_Mix_Matches)MixMatches4; 790 vTable->Skip = (Mf_Skip_Func)MatchFinderMt4_Skip; 791 break; 792 */ 793 } 794 } 795