Lines Matching +full:4 +full:- +full:byte

5  * This source code is licensed under both the BSD-style license (found in the
8 * You may select, at your option, one of the above-listed licenses.
15 /*-*************************************
21 const BYTE* ip, const BYTE* iend, in ZSTD_updateDUBT()
24 const ZSTD_compressionParameters* const cParams = &ms->cParams; in ZSTD_updateDUBT()
25 U32* const hashTable = ms->hashTable; in ZSTD_updateDUBT()
26 U32 const hashLog = cParams->hashLog; in ZSTD_updateDUBT()
28 U32* const bt = ms->chainTable; in ZSTD_updateDUBT()
29 U32 const btLog = cParams->chainLog - 1; in ZSTD_updateDUBT()
30 U32 const btMask = (1 << btLog) - 1; in ZSTD_updateDUBT()
32 const BYTE* const base = ms->window.base; in ZSTD_updateDUBT()
33 U32 const target = (U32)(ip - base); in ZSTD_updateDUBT()
34 U32 idx = ms->nextToUpdate; in ZSTD_updateDUBT()
38 idx, target, ms->window.dictLimit); in ZSTD_updateDUBT()
42 assert(idx >= ms->window.dictLimit); /* condition for valid base+idx */ in ZSTD_updateDUBT()
55 ms->nextToUpdate = target; in ZSTD_updateDUBT()
61 * assumption : curr >= btlow == (curr - btmask)
65 U32 curr, const BYTE* inputEnd, in ZSTD_insertDUBT1()
69 const ZSTD_compressionParameters* const cParams = &ms->cParams; in ZSTD_insertDUBT1()
70 U32* const bt = ms->chainTable; in ZSTD_insertDUBT1()
71 U32 const btLog = cParams->chainLog - 1; in ZSTD_insertDUBT1()
72 U32 const btMask = (1 << btLog) - 1; in ZSTD_insertDUBT1()
74 const BYTE* const base = ms->window.base; in ZSTD_insertDUBT1()
75 const BYTE* const dictBase = ms->window.dictBase; in ZSTD_insertDUBT1()
76 const U32 dictLimit = ms->window.dictLimit; in ZSTD_insertDUBT1()
77 const BYTE* const ip = (curr>=dictLimit) ? base + curr : dictBase + curr; in ZSTD_insertDUBT1()
78 const BYTE* const iend = (curr>=dictLimit) ? inputEnd : dictBase + dictLimit; in ZSTD_insertDUBT1()
79 const BYTE* const dictEnd = dictBase + dictLimit; in ZSTD_insertDUBT1()
80 const BYTE* const prefixStart = base + dictLimit; in ZSTD_insertDUBT1()
81 const BYTE* match; in ZSTD_insertDUBT1()
86 U32 const windowValid = ms->window.lowLimit; in ZSTD_insertDUBT1()
87 U32 const maxDistance = 1U << cParams->windowLog; in ZSTD_insertDUBT1()
88 U32 const windowLow = (curr - windowValid > maxDistance) ? curr - maxDistance : windowValid; in ZSTD_insertDUBT1()
96 for (; nbCompares && (matchIndex > windowLow); --nbCompares) { in ZSTD_insertDUBT1()
107 const BYTE* const mBase = ( (dictMode != ZSTD_extDict) in ZSTD_insertDUBT1()
155 const BYTE* const ip, const BYTE* const iend, in ZSTD_DUBT_findBetterDictMatch()
162 const ZSTD_matchState_t * const dms = ms->dictMatchState; in ZSTD_DUBT_findBetterDictMatch()
163 const ZSTD_compressionParameters* const dmsCParams = &dms->cParams; in ZSTD_DUBT_findBetterDictMatch()
164 const U32 * const dictHashTable = dms->hashTable; in ZSTD_DUBT_findBetterDictMatch()
165 U32 const hashLog = dmsCParams->hashLog; in ZSTD_DUBT_findBetterDictMatch()
169 const BYTE* const base = ms->window.base; in ZSTD_DUBT_findBetterDictMatch()
170 const BYTE* const prefixStart = base + ms->window.dictLimit; in ZSTD_DUBT_findBetterDictMatch()
171 U32 const curr = (U32)(ip-base); in ZSTD_DUBT_findBetterDictMatch()
172 const BYTE* const dictBase = dms->window.base; in ZSTD_DUBT_findBetterDictMatch()
173 const BYTE* const dictEnd = dms->window.nextSrc; in ZSTD_DUBT_findBetterDictMatch()
174 U32 const dictHighLimit = (U32)(dms->window.nextSrc - dms->window.base); in ZSTD_DUBT_findBetterDictMatch()
175 U32 const dictLowLimit = dms->window.lowLimit; in ZSTD_DUBT_findBetterDictMatch()
176 U32 const dictIndexDelta = ms->window.lowLimit - dictHighLimit; in ZSTD_DUBT_findBetterDictMatch()
178 U32* const dictBt = dms->chainTable; in ZSTD_DUBT_findBetterDictMatch()
179 U32 const btLog = dmsCParams->chainLog - 1; in ZSTD_DUBT_findBetterDictMatch()
180 U32 const btMask = (1 << btLog) - 1; in ZSTD_DUBT_findBetterDictMatch()
181 …U32 const btLow = (btMask >= dictHighLimit - dictLowLimit) ? dictLowLimit : dictHighLimit in ZSTD_DUBT_findBetterDictMatch()
188 for (; nbCompares && (dictMatchIndex > dictLowLimit); --nbCompares) { in ZSTD_DUBT_findBetterDictMatch()
191 const BYTE* match = dictBase + dictMatchIndex; in ZSTD_DUBT_findBetterDictMatch()
198 …if ( (4*(int)(matchLength-bestLength)) > (int)(ZSTD_highbit32(curr-matchIndex+1) - ZSTD_highbit32(… in ZSTD_DUBT_findBetterDictMatch()
199 …STD_DUBT_findBetterDictMatch(%u) : found better match length %u -> %u and offsetCode %u -> %u (dic… in ZSTD_DUBT_findBetterDictMatch()
200 …curr, (U32)bestLength, (U32)matchLength, (U32)*offsetPtr, STORE_OFFSET(curr - matchIndex), dictMat… in ZSTD_DUBT_findBetterDictMatch()
201 bestLength = matchLength, *offsetPtr = STORE_OFFSET(curr - matchIndex); in ZSTD_DUBT_findBetterDictMatch()
221 U32 const mIndex = curr - (U32)STORED_OFFSET(*offsetPtr); (void)mIndex; in ZSTD_DUBT_findBetterDictMatch()
232 const BYTE* const ip, const BYTE* const iend, in ZSTD_DUBT_findBestMatch()
237 const ZSTD_compressionParameters* const cParams = &ms->cParams; in ZSTD_DUBT_findBestMatch()
238 U32* const hashTable = ms->hashTable; in ZSTD_DUBT_findBestMatch()
239 U32 const hashLog = cParams->hashLog; in ZSTD_DUBT_findBestMatch()
243 const BYTE* const base = ms->window.base; in ZSTD_DUBT_findBestMatch()
244 U32 const curr = (U32)(ip-base); in ZSTD_DUBT_findBestMatch()
245 U32 const windowLow = ZSTD_getLowestMatchIndex(ms, curr, cParams->windowLog); in ZSTD_DUBT_findBestMatch()
247 U32* const bt = ms->chainTable; in ZSTD_DUBT_findBestMatch()
248 U32 const btLog = cParams->chainLog - 1; in ZSTD_DUBT_findBestMatch()
249 U32 const btMask = (1 << btLog) - 1; in ZSTD_DUBT_findBestMatch()
250 U32 const btLow = (btMask >= curr) ? 0 : curr - btMask; in ZSTD_DUBT_findBestMatch()
255 U32 nbCompares = 1U << cParams->searchLog; in ZSTD_DUBT_findBestMatch()
260 assert(ip <= iend-8); /* required for h calculation */ in ZSTD_DUBT_findBestMatch()
274 nbCandidates --; in ZSTD_DUBT_findBestMatch()
299 const BYTE* const dictBase = ms->window.dictBase; in ZSTD_DUBT_findBestMatch()
300 const U32 dictLimit = ms->window.dictLimit; in ZSTD_DUBT_findBestMatch()
301 const BYTE* const dictEnd = dictBase + dictLimit; in ZSTD_DUBT_findBestMatch()
302 const BYTE* const prefixStart = base + dictLimit; in ZSTD_DUBT_findBestMatch()
312 for (; nbCompares && (matchIndex > windowLow); --nbCompares) { in ZSTD_DUBT_findBestMatch()
315 const BYTE* match; in ZSTD_DUBT_findBestMatch()
328 if (matchLength > matchEndIdx - matchIndex) in ZSTD_DUBT_findBestMatch()
330 …if ( (4*(int)(matchLength-bestLength)) > (int)(ZSTD_highbit32(curr-matchIndex+1) - ZSTD_highbit32(… in ZSTD_DUBT_findBestMatch()
331 bestLength = matchLength, *offsetPtr = STORE_OFFSET(curr - matchIndex); in ZSTD_DUBT_findBestMatch()
369 ms->nextToUpdate = matchEndIdx - 8; /* skip repetitive patterns */ in ZSTD_DUBT_findBestMatch()
371 U32 const mIndex = curr - (U32)STORED_OFFSET(*offsetPtr); (void)mIndex; in ZSTD_DUBT_findBestMatch()
383 const BYTE* const ip, const BYTE* const iLimit, in ZSTD_BtFindBestMatch()
389 if (ip < ms->window.base + ms->nextToUpdate) return 0; /* skipped area */ in ZSTD_BtFindBestMatch()
398 void ZSTD_dedicatedDictSearch_lazy_loadDictionary(ZSTD_matchState_t* ms, const BYTE* const ip) in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
400 const BYTE* const base = ms->window.base; in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
401 U32 const target = (U32)(ip - base); in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
402 U32* const hashTable = ms->hashTable; in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
403 U32* const chainTable = ms->chainTable; in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
404 U32 const chainSize = 1 << ms->cParams.chainLog; in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
405 U32 idx = ms->nextToUpdate; in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
406 U32 const minChain = chainSize < target - idx ? target - chainSize : idx; in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
408 U32 const cacheSize = bucketSize - 1; in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
409 U32 const chainAttempts = (1 << ms->cParams.searchLog) - cacheSize; in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
417 U32 const hashLog = ms->cParams.hashLog - ZSTD_LAZY_DDSS_BUCKET_LOG; in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
420 U32 const tmpChainSize = (U32)((1 << ZSTD_LAZY_DDSS_BUCKET_LOG) - 1) << hashLog; in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
421 U32 const tmpMinChain = tmpChainSize < target ? target - tmpChainSize : idx; in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
424 assert(ms->cParams.chainLog <= 24); in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
425 assert(ms->cParams.hashLog > ms->cParams.chainLog); in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
431 U32 const h = (U32)ZSTD_hashPtr(base + idx, hashLog, ms->cParams.minMatch); in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
433 tmpChainTable[idx - tmpMinChain] = hashTable[h]; in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
451 i = tmpChainTable[i - tmpMinChain]; in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
473 i = tmpChainTable[i - tmpMinChain]; in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
479 tmpHashTable[hashIdx] = ((chainPos - count) << 8) + count; in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
489 U32 const bucketIdx = --hashIdx << ZSTD_LAZY_DDSS_BUCKET_LOG; in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
495 hashTable[bucketIdx + bucketSize - 1] = chainPackedPointer; in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
499 for (idx = ms->nextToUpdate; idx < target; idx++) { in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
500 U32 const h = (U32)ZSTD_hashPtr(base + idx, hashLog, ms->cParams.minMatch) in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
504 for (i = cacheSize - 1; i; i--) in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
505 hashTable[h + i] = hashTable[h + i - 1]; in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
509 ms->nextToUpdate = target; in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
518 const BYTE* const ip, const BYTE* const iLimit, in ZSTD_dedicatedDictSearch_lazy_search()
519 const BYTE* const prefixStart, const U32 curr, in ZSTD_dedicatedDictSearch_lazy_search()
521 const U32 ddsLowestIndex = dms->window.dictLimit; in ZSTD_dedicatedDictSearch_lazy_search()
522 const BYTE* const ddsBase = dms->window.base; in ZSTD_dedicatedDictSearch_lazy_search()
523 const BYTE* const ddsEnd = dms->window.nextSrc; in ZSTD_dedicatedDictSearch_lazy_search()
524 const U32 ddsSize = (U32)(ddsEnd - ddsBase); in ZSTD_dedicatedDictSearch_lazy_search()
525 const U32 ddsIndexDelta = dictLimit - ddsSize; in ZSTD_dedicatedDictSearch_lazy_search()
527 const U32 bucketLimit = nbAttempts < bucketSize - 1 ? nbAttempts : bucketSize - 1; in ZSTD_dedicatedDictSearch_lazy_search()
531 for (ddsAttempt = 0; ddsAttempt < bucketSize - 1; ddsAttempt++) { in ZSTD_dedicatedDictSearch_lazy_search()
532 PREFETCH_L1(ddsBase + dms->hashTable[ddsIdx + ddsAttempt]); in ZSTD_dedicatedDictSearch_lazy_search()
536 U32 const chainPackedPointer = dms->hashTable[ddsIdx + bucketSize - 1]; in ZSTD_dedicatedDictSearch_lazy_search()
539 PREFETCH_L1(&dms->chainTable[chainIndex]); in ZSTD_dedicatedDictSearch_lazy_search()
544 const BYTE* match; in ZSTD_dedicatedDictSearch_lazy_search()
545 matchIndex = dms->hashTable[ddsIdx + ddsAttempt]; in ZSTD_dedicatedDictSearch_lazy_search()
555 assert(match+4 <= ddsEnd); in ZSTD_dedicatedDictSearch_lazy_search()
557 /* assumption : matchIndex <= dictLimit-4 (by table construction) */ in ZSTD_dedicatedDictSearch_lazy_search()
558 currentMl = ZSTD_count_2segments(ip+4, match+4, iLimit, ddsEnd, prefixStart) + 4; in ZSTD_dedicatedDictSearch_lazy_search()
564 *offsetPtr = STORE_OFFSET(curr - (matchIndex + ddsIndexDelta)); in ZSTD_dedicatedDictSearch_lazy_search()
573 U32 const chainPackedPointer = dms->hashTable[ddsIdx + bucketSize - 1]; in ZSTD_dedicatedDictSearch_lazy_search()
576 U32 const chainAttempts = nbAttempts - ddsAttempt; in ZSTD_dedicatedDictSearch_lazy_search()
581 PREFETCH_L1(ddsBase + dms->chainTable[chainIndex + chainAttempt]); in ZSTD_dedicatedDictSearch_lazy_search()
586 const BYTE* match; in ZSTD_dedicatedDictSearch_lazy_search()
587 matchIndex = dms->chainTable[chainIndex]; in ZSTD_dedicatedDictSearch_lazy_search()
592 assert(match+4 <= ddsEnd); in ZSTD_dedicatedDictSearch_lazy_search()
594 /* assumption : matchIndex <= dictLimit-4 (by table construction) */ in ZSTD_dedicatedDictSearch_lazy_search()
595 currentMl = ZSTD_count_2segments(ip+4, match+4, iLimit, ddsEnd, prefixStart) + 4; in ZSTD_dedicatedDictSearch_lazy_search()
601 *offsetPtr = STORE_OFFSET(curr - (matchIndex + ddsIndexDelta)); in ZSTD_dedicatedDictSearch_lazy_search()
620 const BYTE* ip, U32 const mls) in ZSTD_insertAndFindFirstIndex_internal()
622 U32* const hashTable = ms->hashTable; in ZSTD_insertAndFindFirstIndex_internal()
623 const U32 hashLog = cParams->hashLog; in ZSTD_insertAndFindFirstIndex_internal()
624 U32* const chainTable = ms->chainTable; in ZSTD_insertAndFindFirstIndex_internal()
625 const U32 chainMask = (1 << cParams->chainLog) - 1; in ZSTD_insertAndFindFirstIndex_internal()
626 const BYTE* const base = ms->window.base; in ZSTD_insertAndFindFirstIndex_internal()
627 const U32 target = (U32)(ip - base); in ZSTD_insertAndFindFirstIndex_internal()
628 U32 idx = ms->nextToUpdate; in ZSTD_insertAndFindFirstIndex_internal()
637 ms->nextToUpdate = target; in ZSTD_insertAndFindFirstIndex_internal()
641 U32 ZSTD_insertAndFindFirstIndex(ZSTD_matchState_t* ms, const BYTE* ip) { in ZSTD_insertAndFindFirstIndex()
642 const ZSTD_compressionParameters* const cParams = &ms->cParams; in ZSTD_insertAndFindFirstIndex()
643 return ZSTD_insertAndFindFirstIndex_internal(ms, cParams, ip, ms->cParams.minMatch); in ZSTD_insertAndFindFirstIndex()
650 const BYTE* const ip, const BYTE* const iLimit, in ZSTD_HcFindBestMatch()
654 const ZSTD_compressionParameters* const cParams = &ms->cParams; in ZSTD_HcFindBestMatch()
655 U32* const chainTable = ms->chainTable; in ZSTD_HcFindBestMatch()
656 const U32 chainSize = (1 << cParams->chainLog); in ZSTD_HcFindBestMatch()
657 const U32 chainMask = chainSize-1; in ZSTD_HcFindBestMatch()
658 const BYTE* const base = ms->window.base; in ZSTD_HcFindBestMatch()
659 const BYTE* const dictBase = ms->window.dictBase; in ZSTD_HcFindBestMatch()
660 const U32 dictLimit = ms->window.dictLimit; in ZSTD_HcFindBestMatch()
661 const BYTE* const prefixStart = base + dictLimit; in ZSTD_HcFindBestMatch()
662 const BYTE* const dictEnd = dictBase + dictLimit; in ZSTD_HcFindBestMatch()
663 const U32 curr = (U32)(ip-base); in ZSTD_HcFindBestMatch()
664 const U32 maxDistance = 1U << cParams->windowLog; in ZSTD_HcFindBestMatch()
665 const U32 lowestValid = ms->window.lowLimit; in ZSTD_HcFindBestMatch()
666 …const U32 withinMaxDistance = (curr - lowestValid > maxDistance) ? curr - maxDistance : lowestVali… in ZSTD_HcFindBestMatch()
667 const U32 isDictionary = (ms->loadedDictEnd != 0); in ZSTD_HcFindBestMatch()
669 const U32 minChain = curr > chainSize ? curr - chainSize : 0; in ZSTD_HcFindBestMatch()
670 U32 nbAttempts = 1U << cParams->searchLog; in ZSTD_HcFindBestMatch()
671 size_t ml=4-1; in ZSTD_HcFindBestMatch()
673 const ZSTD_matchState_t* const dms = ms->dictMatchState; in ZSTD_HcFindBestMatch()
675 ? dms->cParams.hashLog - ZSTD_LAZY_DDSS_BUCKET_LOG : 0; in ZSTD_HcFindBestMatch()
682 const U32* entry = &dms->hashTable[ddsIdx]; in ZSTD_HcFindBestMatch()
689 for ( ; (matchIndex>=lowLimit) & (nbAttempts>0) ; nbAttempts--) { in ZSTD_HcFindBestMatch()
692 const BYTE* const match = base + matchIndex; in ZSTD_HcFindBestMatch()
697 const BYTE* const match = dictBase + matchIndex; in ZSTD_HcFindBestMatch()
698 assert(match+4 <= dictEnd); in ZSTD_HcFindBestMatch()
699 …if (MEM_read32(match) == MEM_read32(ip)) /* assumption : matchIndex <= dictLimit-4 (by table con… in ZSTD_HcFindBestMatch()
700 currentMl = ZSTD_count_2segments(ip+4, match+4, iLimit, dictEnd, prefixStart) + 4; in ZSTD_HcFindBestMatch()
706 *offsetPtr = STORE_OFFSET(curr - matchIndex); in ZSTD_HcFindBestMatch()
719 const U32* const dmsChainTable = dms->chainTable; in ZSTD_HcFindBestMatch()
720 const U32 dmsChainSize = (1 << dms->cParams.chainLog); in ZSTD_HcFindBestMatch()
721 const U32 dmsChainMask = dmsChainSize - 1; in ZSTD_HcFindBestMatch()
722 const U32 dmsLowestIndex = dms->window.dictLimit; in ZSTD_HcFindBestMatch()
723 const BYTE* const dmsBase = dms->window.base; in ZSTD_HcFindBestMatch()
724 const BYTE* const dmsEnd = dms->window.nextSrc; in ZSTD_HcFindBestMatch()
725 const U32 dmsSize = (U32)(dmsEnd - dmsBase); in ZSTD_HcFindBestMatch()
726 const U32 dmsIndexDelta = dictLimit - dmsSize; in ZSTD_HcFindBestMatch()
727 const U32 dmsMinChain = dmsSize > dmsChainSize ? dmsSize - dmsChainSize : 0; in ZSTD_HcFindBestMatch()
729 matchIndex = dms->hashTable[ZSTD_hashPtr(ip, dms->cParams.hashLog, mls)]; in ZSTD_HcFindBestMatch()
731 for ( ; (matchIndex>=dmsLowestIndex) & (nbAttempts>0) ; nbAttempts--) { in ZSTD_HcFindBestMatch()
733 const BYTE* const match = dmsBase + matchIndex; in ZSTD_HcFindBestMatch()
734 assert(match+4 <= dmsEnd); in ZSTD_HcFindBestMatch()
735 …if (MEM_read32(match) == MEM_read32(ip)) /* assumption : matchIndex <= dictLimit-4 (by table con… in ZSTD_HcFindBestMatch()
736 currentMl = ZSTD_count_2segments(ip+4, match+4, iLimit, dmsEnd, prefixStart) + 4; in ZSTD_HcFindBestMatch()
742 *offsetPtr = STORE_OFFSET(curr - (matchIndex + dmsIndexDelta)); in ZSTD_HcFindBestMatch()
756 * (SIMD) Row-based matchfinder
758 /* Constants for row-based hash */
759 #define ZSTD_ROW_HASH_TAG_OFFSET 16 /* byte offset of hashes in the match state's tagTable from…
761 #define ZSTD_ROW_HASH_TAG_MASK ((1u << ZSTD_ROW_HASH_TAG_BITS) - 1)
764 #define ZSTD_ROW_HASH_CACHE_MASK (ZSTD_ROW_HASH_CACHE_SIZE - 1)
769 * Starting from the LSB, returns the idx of the next non-zero bit.
774 # if (defined(__GNUC__) && ((__GNUC__ > 3) || ((__GNUC__ == 3) && (__GNUC_MINOR__ >= 4)))) in ZSTD_VecMask_next()
775 if (sizeof(size_t) == 4) { in ZSTD_VecMask_next()
788 …* and: https://stackoverflow.com/questions/2709430/count-number-of-bits-in-a-64-bit-long-big-integ… in ZSTD_VecMask_next()
790 val = ~val & (val - 1ULL); /* Lowest set bit mask */ in ZSTD_VecMask_next()
791 val = val - ((val >> 1) & 0x5555555555555555); in ZSTD_VecMask_next()
793 return (U32)((((val + (val >> 4)) & 0xF0F0F0F0F0F0F0FULL) * 0x101010101010101ULL) >> 56); in ZSTD_VecMask_next()
805 return (value >> count) | (U64)(value << ((0U - count) & 0x3F)); in ZSTD_rotateRight_U64()
812 return (value >> count) | (U32)(value << ((0U - count) & 0x1F)); in ZSTD_rotateRight_U32()
819 return (value >> count) | (U16)(value << ((0U - count) & 0x0F)); in ZSTD_rotateRight_U16()
826 FORCE_INLINE_TEMPLATE U32 ZSTD_row_nextIndex(BYTE* const tagRow, U32 const rowMask) { in ZSTD_row_nextIndex()
827 U32 const next = (*tagRow - 1) & rowMask; in ZSTD_row_nextIndex()
828 *tagRow = (BYTE)next; in ZSTD_row_nextIndex()
836 assert((align & (align - 1)) == 0); in ZSTD_isAligned()
837 return (((size_t)ptr) & (align - 1)) == 0; in ZSTD_isAligned()
847 … /* Note: prefetching more of the hash table does not appear to be beneficial for 128-entry rows */ in ZSTD_row_prefetch()
853 assert(rowLog == 4 || rowLog == 5 || rowLog == 6); in ZSTD_row_prefetch()
854 …sAligned(hashTable + relRow, 64)); /* prefetched hash row always 64-byte aligned */ in ZSTD_row_prefetch()
862 FORCE_INLINE_TEMPLATE void ZSTD_row_fillHashCache(ZSTD_matchState_t* ms, const BYTE* base, in ZSTD_row_fillHashCache()
864 U32 idx, const BYTE* const iLimit) in ZSTD_row_fillHashCache()
866 U32 const* const hashTable = ms->hashTable; in ZSTD_row_fillHashCache()
867 U16 const* const tagTable = ms->tagTable; in ZSTD_row_fillHashCache()
868 U32 const hashLog = ms->rowHashLog; in ZSTD_row_fillHashCache()
869 U32 const maxElemsToPrefetch = (base + idx) > iLimit ? 0 : (U32)(iLimit - (base + idx) + 1); in ZSTD_row_fillHashCache()
876 ms->hashCache[idx & ZSTD_ROW_HASH_CACHE_MASK] = hash; in ZSTD_row_fillHashCache()
879 …DEBUGLOG(6, "ZSTD_row_fillHashCache(): [%u %u %u %u %u %u %u %u]", ms->hashCache[0], ms->hashCache… in ZSTD_row_fillHashCache()
880 … ms->hashCache[2], ms->hashCache[3], ms->hashCache[4], in ZSTD_row_fillHashCache()
881 … ms->hashCache[5], ms->hashCache[6], ms->hashCache[7]); in ZSTD_row_fillHashCache()
885 * Returns the hash of base + idx, and replaces the hash in the hash cache with the byte at
889 U16 const* tagTable, BYTE const* base, in ZSTD_row_nextCachedHash()
910 U32* const hashTable = ms->hashTable; in ZSTD_row_update_internalImpl()
911 U16* const tagTable = ms->tagTable; in ZSTD_row_update_internalImpl()
912 U32 const hashLog = ms->rowHashLog; in ZSTD_row_update_internalImpl()
913 const BYTE* const base = ms->window.base; in ZSTD_row_update_internalImpl()
917 …U32 const hash = useCache ? ZSTD_row_nextCachedHash(ms->hashCache, hashTable, tagTable, base, upda… in ZSTD_row_update_internalImpl()
921BYTE* tagRow = (BYTE*)(tagTable + relRow); /* Though tagTable is laid out as a table of U16, each… in ZSTD_row_update_internalImpl()
926 ((BYTE*)tagRow)[pos + ZSTD_ROW_HASH_TAG_OFFSET] = hash & ZSTD_ROW_HASH_TAG_MASK; in ZSTD_row_update_internalImpl()
932 …* Inserts the byte at ip into the appropriate position in the hash table, and updates ms->nextToUp…
935 FORCE_INLINE_TEMPLATE void ZSTD_row_update_internal(ZSTD_matchState_t* ms, const BYTE* ip, in ZSTD_row_update_internal()
939 U32 idx = ms->nextToUpdate; in ZSTD_row_update_internal()
940 const BYTE* const base = ms->window.base; in ZSTD_row_update_internal()
941 const U32 target = (U32)(ip - base); in ZSTD_row_update_internal()
952 if (UNLIKELY(target - idx > kSkipThreshold)) { in ZSTD_row_update_internal()
955 idx = target - kMaxMatchEndPositionsToUpdate; in ZSTD_row_update_internal()
961 ms->nextToUpdate = target; in ZSTD_row_update_internal()
968 void ZSTD_row_update(ZSTD_matchState_t* const ms, const BYTE* ip) { in ZSTD_row_update()
969 const U32 rowLog = BOUNDED(4, ms->cParams.searchLog, 6); in ZSTD_row_update()
970 const U32 rowMask = (1u << rowLog) - 1; in ZSTD_row_update()
971 const U32 mls = MIN(ms->cParams.minMatch, 6 /* mls caps out at 6 */); in ZSTD_row_update()
979 ZSTD_row_getSSEMask(int nbChunks, const BYTE* const src, const BYTE tag, const U32 head) in ZSTD_row_getSSEMask()
982 int matches[4] = {0}; in ZSTD_row_getSSEMask()
984 assert(nbChunks == 1 || nbChunks == 2 || nbChunks == 4); in ZSTD_row_getSSEMask()
992 assert(nbChunks == 4); in ZSTD_row_getSSEMask()
997 /* Returns a ZSTD_VecMask (U32) that has the nth bit set to 1 if the newly-computed "tag" matches
1002 ZSTD_row_getMatchMask(const BYTE* const tagRow, const BYTE tag, const U32 head, const U32 rowEntrie… in ZSTD_row_getMatchMask()
1004 const BYTE* const src = tagRow + ZSTD_ROW_HASH_TAG_OFFSET; in ZSTD_row_getMatchMask()
1012 #else /* SW or NEON-LE */ in ZSTD_row_getMatchMask()
1015 /* This NEON path only works for little endian - otherwise use SWAR below */ in ZSTD_row_getMatchMask()
1039 const uint8x8_t t4 = vsri_n_u8(t3.val[1], t3.val[0], 4); in ZSTD_row_getMatchMask()
1053 const uint8x16_t t3 = vsriq_n_u8(t2, t2, 4); in ZSTD_row_getMatchMask()
1054 const uint8x8_t t4 = vshrn_n_u16(vreinterpretq_u16_u8(t3), 4); in ZSTD_row_getMatchMask()
1062 const size_t shiftAmount = ((chunkSize * 8) - chunkSize); in ZSTD_row_getMatchMask()
1068 int i = rowEntries - chunkSize; in ZSTD_row_getMatchMask()
1069 assert((sizeof(size_t) == 4) || (sizeof(size_t) == 8)); in ZSTD_row_getMatchMask()
1075 chunk = (((chunk | x80) - x01) | chunk) & x80; in ZSTD_row_getMatchMask()
1078 i -= chunkSize; in ZSTD_row_getMatchMask()
1086 chunk = (((chunk | x80) - x01) | chunk) & x80; in ZSTD_row_getMatchMask()
1089 i -= chunkSize; in ZSTD_row_getMatchMask()
1104 /* The high-level approach of the SIMD row based match finder is as follows:
1105 * - Figure out where to insert the new entry:
1106 …* - Generate a hash from a byte along with an additional 1-byte "short hash". The additional
1107 …* - The hashTable is effectively split into groups or "rows" of 16 or 32 entries of U32, and …
1109 …* - Determine the correct position within the row to insert the entry into. Each row of 16 or…
1111 * - Also insert the "tag" into the equivalent row and position in the tagTable.
1112 …* - Note: The tagTable has 17 or 33 1-byte entries per row, due to 16 or 32 tags, and 1 "…
1115 * - Use SIMD to efficiently compare the tags in the tagTable to the 1-byte "short hash" and
1117 * - Pick the longest match.
1122 const BYTE* const ip, const BYTE* const iLimit, in ZSTD_RowFindBestMatch()
1127 U32* const hashTable = ms->hashTable; in ZSTD_RowFindBestMatch()
1128 U16* const tagTable = ms->tagTable; in ZSTD_RowFindBestMatch()
1129 U32* const hashCache = ms->hashCache; in ZSTD_RowFindBestMatch()
1130 const U32 hashLog = ms->rowHashLog; in ZSTD_RowFindBestMatch()
1131 const ZSTD_compressionParameters* const cParams = &ms->cParams; in ZSTD_RowFindBestMatch()
1132 const BYTE* const base = ms->window.base; in ZSTD_RowFindBestMatch()
1133 const BYTE* const dictBase = ms->window.dictBase; in ZSTD_RowFindBestMatch()
1134 const U32 dictLimit = ms->window.dictLimit; in ZSTD_RowFindBestMatch()
1135 const BYTE* const prefixStart = base + dictLimit; in ZSTD_RowFindBestMatch()
1136 const BYTE* const dictEnd = dictBase + dictLimit; in ZSTD_RowFindBestMatch()
1137 const U32 curr = (U32)(ip-base); in ZSTD_RowFindBestMatch()
1138 const U32 maxDistance = 1U << cParams->windowLog; in ZSTD_RowFindBestMatch()
1139 const U32 lowestValid = ms->window.lowLimit; in ZSTD_RowFindBestMatch()
1140 …const U32 withinMaxDistance = (curr - lowestValid > maxDistance) ? curr - maxDistance : lowestVali… in ZSTD_RowFindBestMatch()
1141 const U32 isDictionary = (ms->loadedDictEnd != 0); in ZSTD_RowFindBestMatch()
1144 const U32 rowMask = rowEntries - 1; in ZSTD_RowFindBestMatch()
1145 …const U32 cappedSearchLog = MIN(cParams->searchLog, rowLog); /* nb of searches is capped at nb ent… in ZSTD_RowFindBestMatch()
1147 size_t ml=4-1; in ZSTD_RowFindBestMatch()
1150 const ZSTD_matchState_t* const dms = ms->dictMatchState; in ZSTD_RowFindBestMatch()
1157 BYTE* dmsTagRow = NULL; in ZSTD_RowFindBestMatch()
1160 const U32 ddsHashLog = dms->cParams.hashLog - ZSTD_LAZY_DDSS_BUCKET_LOG; in ZSTD_RowFindBestMatch()
1163 PREFETCH_L1(&dms->hashTable[ddsIdx]); in ZSTD_RowFindBestMatch()
1165 ddsExtraAttempts = cParams->searchLog > rowLog ? 1U << (cParams->searchLog - rowLog) : 0; in ZSTD_RowFindBestMatch()
1170 U32* const dmsHashTable = dms->hashTable; in ZSTD_RowFindBestMatch()
1171 U16* const dmsTagTable = dms->tagTable; in ZSTD_RowFindBestMatch()
1172 U32 const dmsHash = (U32)ZSTD_hashPtr(ip, dms->rowHashLog + ZSTD_ROW_HASH_TAG_BITS, mls); in ZSTD_RowFindBestMatch()
1175 dmsTagRow = (BYTE*)(dmsTagTable + dmsRelRow); in ZSTD_RowFindBestMatch()
1187 BYTE* tagRow = (BYTE*)(tagTable + relRow); in ZSTD_RowFindBestMatch()
1192 ZSTD_VecMask matches = ZSTD_row_getMatchMask(tagRow, (BYTE)tag, head, rowEntries); in ZSTD_RowFindBestMatch()
1195 for (; (matches > 0) && (nbAttempts > 0); --nbAttempts, matches &= (matches - 1)) { in ZSTD_RowFindBestMatch()
1209 …/* Speed opt: insert current byte into hashtable too. This allows us to avoid one iteration of the… in ZSTD_RowFindBestMatch()
1213 tagRow[pos + ZSTD_ROW_HASH_TAG_OFFSET] = (BYTE)tag; in ZSTD_RowFindBestMatch()
1214 row[pos] = ms->nextToUpdate++; in ZSTD_RowFindBestMatch()
1225 const BYTE* const match = base + matchIndex; in ZSTD_RowFindBestMatch()
1230 const BYTE* const match = dictBase + matchIndex; in ZSTD_RowFindBestMatch()
1231 assert(match+4 <= dictEnd); in ZSTD_RowFindBestMatch()
1232 …_read32(match) == MEM_read32(ip)) /* assumption : matchIndex <= dictLimit-4 (by table constructi… in ZSTD_RowFindBestMatch()
1233 … currentMl = ZSTD_count_2segments(ip+4, match+4, iLimit, dictEnd, prefixStart) + 4; in ZSTD_RowFindBestMatch()
1239 *offsetPtr = STORE_OFFSET(curr - matchIndex); in ZSTD_RowFindBestMatch()
1251 const U32 dmsLowestIndex = dms->window.dictLimit; in ZSTD_RowFindBestMatch()
1252 const BYTE* const dmsBase = dms->window.base; in ZSTD_RowFindBestMatch()
1253 const BYTE* const dmsEnd = dms->window.nextSrc; in ZSTD_RowFindBestMatch()
1254 const U32 dmsSize = (U32)(dmsEnd - dmsBase); in ZSTD_RowFindBestMatch()
1255 const U32 dmsIndexDelta = dictLimit - dmsSize; in ZSTD_RowFindBestMatch()
1261 ZSTD_VecMask matches = ZSTD_row_getMatchMask(dmsTagRow, (BYTE)dmsTag, head, rowEntries); in ZSTD_RowFindBestMatch()
1263 for (; (matches > 0) && (nbAttempts > 0); --nbAttempts, matches &= (matches - 1)) { in ZSTD_RowFindBestMatch()
1279 { const BYTE* const match = dmsBase + matchIndex; in ZSTD_RowFindBestMatch()
1280 assert(match+4 <= dmsEnd); in ZSTD_RowFindBestMatch()
1282 … currentMl = ZSTD_count_2segments(ip+4, match+4, iLimit, dmsEnd, prefixStart) + 4; in ZSTD_RowFindBestMatch()
1288 *offsetPtr = STORE_OFFSET(curr - (matchIndex + dmsIndexDelta)); in ZSTD_RowFindBestMatch()
1309 * TODO: Move the match re-winding into searchMax. This improves compression
1312 * TODO: Try moving the repcode search into searchMax. After the re-winding
1329 const BYTE* ip, const BYTE* const iLimit, \
1332 assert(MAX(4, MIN(6, ms->cParams.minMatch)) == mls); \
1339 const BYTE* ip, const BYTE* const iLimit, \
1342 assert(MAX(4, MIN(6, ms->cParams.minMatch)) == mls); \
1349 const BYTE* ip, const BYTE* const iLimit, \
1352 assert(MAX(4, MIN(6, ms->cParams.minMatch)) == mls); \
1353 assert(MAX(4, MIN(6, ms->cParams.searchLog)) == rowLog); \
1358 X(dictMode, mls, 4) \
1363 ZSTD_FOR_EACH_ROWLOG(X, dictMode, 4) \
1368 X(dictMode, 4) \
1440 * @param mls The minimum search length, in the range [4, 6].
1441 * @param rowLog The row log (if applicable), in the range [4, 6].
1450 const BYTE* ip, in ZSTD_searchMax()
1451 const BYTE* iend, in ZSTD_searchMax()
1472 * Common parser - lazy strategy
1483 const BYTE* const istart = (const BYTE*)src; in ZSTD_compressBlock_lazy_generic()
1484 const BYTE* ip = istart; in ZSTD_compressBlock_lazy_generic()
1485 const BYTE* anchor = istart; in ZSTD_compressBlock_lazy_generic()
1486 const BYTE* const iend = istart + srcSize; in ZSTD_compressBlock_lazy_generic()
1487 …const BYTE* const ilimit = (searchMethod == search_rowHash) ? iend - 8 - ZSTD_ROW_HASH_CACHE_SIZE … in ZSTD_compressBlock_lazy_generic()
1488 const BYTE* const base = ms->window.base; in ZSTD_compressBlock_lazy_generic()
1489 const U32 prefixLowestIndex = ms->window.dictLimit; in ZSTD_compressBlock_lazy_generic()
1490 const BYTE* const prefixLowest = base + prefixLowestIndex; in ZSTD_compressBlock_lazy_generic()
1491 const U32 mls = BOUNDED(4, ms->cParams.minMatch, 6); in ZSTD_compressBlock_lazy_generic()
1492 const U32 rowLog = BOUNDED(4, ms->cParams.searchLog, 6); in ZSTD_compressBlock_lazy_generic()
1499 const ZSTD_matchState_t* const dms = ms->dictMatchState; in ZSTD_compressBlock_lazy_generic()
1500 const U32 dictLowestIndex = isDxS ? dms->window.dictLimit : 0; in ZSTD_compressBlock_lazy_generic()
1501 const BYTE* const dictBase = isDxS ? dms->window.base : NULL; in ZSTD_compressBlock_lazy_generic()
1502 const BYTE* const dictLowest = isDxS ? dictBase + dictLowestIndex : NULL; in ZSTD_compressBlock_lazy_generic()
1503 const BYTE* const dictEnd = isDxS ? dms->window.nextSrc : NULL; in ZSTD_compressBlock_lazy_generic()
1505 prefixLowestIndex - (U32)(dictEnd - dictBase) : in ZSTD_compressBlock_lazy_generic()
1507 const U32 dictAndPrefixLength = (U32)((ip - prefixLowest) + (dictEnd - dictLowest)); in ZSTD_compressBlock_lazy_generic()
1512 U32 const curr = (U32)(ip - base); in ZSTD_compressBlock_lazy_generic()
1513 U32 const windowLow = ZSTD_getLowestPrefixIndex(ms, curr, ms->cParams.windowLog); in ZSTD_compressBlock_lazy_generic()
1514 U32 const maxRep = curr - windowLow; in ZSTD_compressBlock_lazy_generic()
1527 MIN(ms->cParams.minMatch, 6 /* mls caps out at 6 */), in ZSTD_compressBlock_lazy_generic()
1528 ms->nextToUpdate, ilimit); in ZSTD_compressBlock_lazy_generic()
1534 * code alignment is perturbed. To fix the instability align the loop on 32-bytes. in ZSTD_compressBlock_lazy_generic()
1541 const BYTE* start=ip+1; in ZSTD_compressBlock_lazy_generic()
1546 const U32 repIndex = (U32)(ip - base) + 1 - offset_1; in ZSTD_compressBlock_lazy_generic()
1547 … const BYTE* repMatch = ((dictMode == ZSTD_dictMatchState || dictMode == ZSTD_dedicatedDictSearch) in ZSTD_compressBlock_lazy_generic()
1549 dictBase + (repIndex - dictIndexDelta) : in ZSTD_compressBlock_lazy_generic()
1551 if (((U32)((prefixLowestIndex-1) - repIndex) >= 3 /* intentional underflow */) in ZSTD_compressBlock_lazy_generic()
1553 const BYTE* repMatchEnd = repIndex < prefixLowestIndex ? dictEnd : iend; in ZSTD_compressBlock_lazy_generic()
1554 … matchLength = ZSTD_count_2segments(ip+1+4, repMatch+4, iend, repMatchEnd, prefixLowest) + 4; in ZSTD_compressBlock_lazy_generic()
1559 && ((offset_1 > 0) & (MEM_read32(ip+1-offset_1) == MEM_read32(ip+1)))) { in ZSTD_compressBlock_lazy_generic()
1560 matchLength = ZSTD_count(ip+1+4, ip+1+4-offset_1, iend) + 4; in ZSTD_compressBlock_lazy_generic()
1571 if (matchLength < 4) { in ZSTD_compressBlock_lazy_generic()
1572 … ip += ((ip-anchor) >> kSearchStrength) + 1; /* jump faster over incompressible sections */ in ZSTD_compressBlock_lazy_generic()
1582 && (offcode) && ((offset_1>0) & (MEM_read32(ip) == MEM_read32(ip - offset_1)))) { in ZSTD_compressBlock_lazy_generic()
1583 size_t const mlRep = ZSTD_count(ip+4, ip+4-offset_1, iend) + 4; in ZSTD_compressBlock_lazy_generic()
1585 … int const gain1 = (int)(matchLength*3 - ZSTD_highbit32((U32)STORED_TO_OFFBASE(offcode)) + 1); in ZSTD_compressBlock_lazy_generic()
1586 if ((mlRep >= 4) && (gain2 > gain1)) in ZSTD_compressBlock_lazy_generic()
1590 const U32 repIndex = (U32)(ip - base) - offset_1; in ZSTD_compressBlock_lazy_generic()
1591 const BYTE* repMatch = repIndex < prefixLowestIndex ? in ZSTD_compressBlock_lazy_generic()
1592 dictBase + (repIndex - dictIndexDelta) : in ZSTD_compressBlock_lazy_generic()
1594 if (((U32)((prefixLowestIndex-1) - repIndex) >= 3 /* intentional underflow */) in ZSTD_compressBlock_lazy_generic()
1596 const BYTE* repMatchEnd = repIndex < prefixLowestIndex ? dictEnd : iend; in ZSTD_compressBlock_lazy_generic()
1597 … size_t const mlRep = ZSTD_count_2segments(ip+4, repMatch+4, iend, repMatchEnd, prefixLowest) + 4; in ZSTD_compressBlock_lazy_generic()
1599 … int const gain1 = (int)(matchLength*3 - ZSTD_highbit32((U32)STORED_TO_OFFBASE(offcode)) + 1); in ZSTD_compressBlock_lazy_generic()
1600 if ((mlRep >= 4) && (gain2 > gain1)) in ZSTD_compressBlock_lazy_generic()
1606 …int const gain2 = (int)(ml2*4 - ZSTD_highbit32((U32)STORED_TO_OFFBASE(offset2))); /* raw approx … in ZSTD_compressBlock_lazy_generic()
1607 … int const gain1 = (int)(matchLength*4 - ZSTD_highbit32((U32)STORED_TO_OFFBASE(offcode)) + 4); in ZSTD_compressBlock_lazy_generic()
1608 if ((ml2 >= 4) && (gain2 > gain1)) { in ZSTD_compressBlock_lazy_generic()
1618 && (offcode) && ((offset_1>0) & (MEM_read32(ip) == MEM_read32(ip - offset_1)))) { in ZSTD_compressBlock_lazy_generic()
1619 size_t const mlRep = ZSTD_count(ip+4, ip+4-offset_1, iend) + 4; in ZSTD_compressBlock_lazy_generic()
1620 int const gain2 = (int)(mlRep * 4); in ZSTD_compressBlock_lazy_generic()
1621 … int const gain1 = (int)(matchLength*4 - ZSTD_highbit32((U32)STORED_TO_OFFBASE(offcode)) + 1); in ZSTD_compressBlock_lazy_generic()
1622 if ((mlRep >= 4) && (gain2 > gain1)) in ZSTD_compressBlock_lazy_generic()
1626 const U32 repIndex = (U32)(ip - base) - offset_1; in ZSTD_compressBlock_lazy_generic()
1627 const BYTE* repMatch = repIndex < prefixLowestIndex ? in ZSTD_compressBlock_lazy_generic()
1628 dictBase + (repIndex - dictIndexDelta) : in ZSTD_compressBlock_lazy_generic()
1630 if (((U32)((prefixLowestIndex-1) - repIndex) >= 3 /* intentional underflow */) in ZSTD_compressBlock_lazy_generic()
1632 const BYTE* repMatchEnd = repIndex < prefixLowestIndex ? dictEnd : iend; in ZSTD_compressBlock_lazy_generic()
1633 … size_t const mlRep = ZSTD_count_2segments(ip+4, repMatch+4, iend, repMatchEnd, prefixLowest) + 4; in ZSTD_compressBlock_lazy_generic()
1634 int const gain2 = (int)(mlRep * 4); in ZSTD_compressBlock_lazy_generic()
1635 … int const gain1 = (int)(matchLength*4 - ZSTD_highbit32((U32)STORED_TO_OFFBASE(offcode)) + 1); in ZSTD_compressBlock_lazy_generic()
1636 if ((mlRep >= 4) && (gain2 > gain1)) in ZSTD_compressBlock_lazy_generic()
1642 …int const gain2 = (int)(ml2*4 - ZSTD_highbit32((U32)STORED_TO_OFFBASE(offset2))); /* raw approx … in ZSTD_compressBlock_lazy_generic()
1643 … int const gain1 = (int)(matchLength*4 - ZSTD_highbit32((U32)STORED_TO_OFFBASE(offcode)) + 7); in ZSTD_compressBlock_lazy_generic()
1644 if ((ml2 >= 4) && (gain2 > gain1)) { in ZSTD_compressBlock_lazy_generic()
1652 * Pay attention that `start[-value]` can lead to strange undefined behavior in ZSTD_compressBlock_lazy_generic()
1653 * notably if `value` is unsigned, resulting in a large positive `-value`. in ZSTD_compressBlock_lazy_generic()
1658 while ( ((start > anchor) & (start - STORED_OFFSET(offcode) > prefixLowest)) in ZSTD_compressBlock_lazy_generic()
1659 … && (start[-1] == (start-STORED_OFFSET(offcode))[-1]) ) /* only search for offset within prefix */ in ZSTD_compressBlock_lazy_generic()
1660 { start--; matchLength++; } in ZSTD_compressBlock_lazy_generic()
1663 U32 const matchIndex = (U32)((size_t)(start-base) - STORED_OFFSET(offcode)); in ZSTD_compressBlock_lazy_generic()
1664 …const BYTE* match = (matchIndex < prefixLowestIndex) ? dictBase + matchIndex - dictIndexDelta : ba… in ZSTD_compressBlock_lazy_generic()
1665 … const BYTE* const mStart = (matchIndex < prefixLowestIndex) ? dictLowest : prefixLowest; in ZSTD_compressBlock_lazy_generic()
1666 …while ((start>anchor) && (match>mStart) && (start[-1] == match[-1])) { start--; match--; matchLeng… in ZSTD_compressBlock_lazy_generic()
1672 { size_t const litLength = (size_t)(start - anchor); in ZSTD_compressBlock_lazy_generic()
1680 U32 const current2 = (U32)(ip-base); in ZSTD_compressBlock_lazy_generic()
1681 U32 const repIndex = current2 - offset_2; in ZSTD_compressBlock_lazy_generic()
1682 const BYTE* repMatch = repIndex < prefixLowestIndex ? in ZSTD_compressBlock_lazy_generic()
1683 dictBase - dictIndexDelta + repIndex : in ZSTD_compressBlock_lazy_generic()
1685 if ( ((U32)((prefixLowestIndex-1) - (U32)repIndex) >= 3 /* intentional overflow */) in ZSTD_compressBlock_lazy_generic()
1687 const BYTE* const repEnd2 = repIndex < prefixLowestIndex ? dictEnd : iend; in ZSTD_compressBlock_lazy_generic()
1688 … matchLength = ZSTD_count_2segments(ip+4, repMatch+4, iend, repEnd2, prefixLowest) + 4; in ZSTD_compressBlock_lazy_generic()
1701 && (MEM_read32(ip) == MEM_read32(ip - offset_2)) ) { in ZSTD_compressBlock_lazy_generic()
1703 matchLength = ZSTD_count(ip+4, ip+4-offset_2, iend) + 4; in ZSTD_compressBlock_lazy_generic()
1716 return (size_t)(iend - anchor); in ZSTD_compressBlock_lazy_generic()
1798 /* Row-based matchfinder */
1870 const BYTE* const istart = (const BYTE*)src; in ZSTD_compressBlock_lazy_extDict_generic()
1871 const BYTE* ip = istart; in ZSTD_compressBlock_lazy_extDict_generic()
1872 const BYTE* anchor = istart; in ZSTD_compressBlock_lazy_extDict_generic()
1873 const BYTE* const iend = istart + srcSize; in ZSTD_compressBlock_lazy_extDict_generic()
1874 …const BYTE* const ilimit = searchMethod == search_rowHash ? iend - 8 - ZSTD_ROW_HASH_CACHE_SIZE : … in ZSTD_compressBlock_lazy_extDict_generic()
1875 const BYTE* const base = ms->window.base; in ZSTD_compressBlock_lazy_extDict_generic()
1876 const U32 dictLimit = ms->window.dictLimit; in ZSTD_compressBlock_lazy_extDict_generic()
1877 const BYTE* const prefixStart = base + dictLimit; in ZSTD_compressBlock_lazy_extDict_generic()
1878 const BYTE* const dictBase = ms->window.dictBase; in ZSTD_compressBlock_lazy_extDict_generic()
1879 const BYTE* const dictEnd = dictBase + dictLimit; in ZSTD_compressBlock_lazy_extDict_generic()
1880 const BYTE* const dictStart = dictBase + ms->window.lowLimit; in ZSTD_compressBlock_lazy_extDict_generic()
1881 const U32 windowLog = ms->cParams.windowLog; in ZSTD_compressBlock_lazy_extDict_generic()
1882 const U32 mls = BOUNDED(4, ms->cParams.minMatch, 6); in ZSTD_compressBlock_lazy_extDict_generic()
1883 const U32 rowLog = BOUNDED(4, ms->cParams.searchLog, 6); in ZSTD_compressBlock_lazy_extDict_generic()
1893 MIN(ms->cParams.minMatch, 6 /* mls caps out at 6 */), in ZSTD_compressBlock_lazy_extDict_generic()
1894 ms->nextToUpdate, ilimit); in ZSTD_compressBlock_lazy_extDict_generic()
1900 * code alignment is perturbed. To fix the instability align the loop on 32-bytes. in ZSTD_compressBlock_lazy_extDict_generic()
1907 const BYTE* start=ip+1; in ZSTD_compressBlock_lazy_extDict_generic()
1908 U32 curr = (U32)(ip-base); in ZSTD_compressBlock_lazy_extDict_generic()
1912 const U32 repIndex = (U32)(curr+1 - offset_1); in ZSTD_compressBlock_lazy_extDict_generic()
1913 const BYTE* const repBase = repIndex < dictLimit ? dictBase : base; in ZSTD_compressBlock_lazy_extDict_generic()
1914 const BYTE* const repMatch = repBase + repIndex; in ZSTD_compressBlock_lazy_extDict_generic()
1915 if ( ((U32)((dictLimit-1) - repIndex) >= 3) /* intentional overflow */ in ZSTD_compressBlock_lazy_extDict_generic()
1916 & (offset_1 <= curr+1 - windowLow) ) /* note: we are searching at curr+1 */ in ZSTD_compressBlock_lazy_extDict_generic()
1919 const BYTE* const repEnd = repIndex < dictLimit ? dictEnd : iend; in ZSTD_compressBlock_lazy_extDict_generic()
1920 … matchLength = ZSTD_count_2segments(ip+1+4, repMatch+4, iend, repEnd, prefixStart) + 4; in ZSTD_compressBlock_lazy_extDict_generic()
1931 if (matchLength < 4) { in ZSTD_compressBlock_lazy_extDict_generic()
1932 … ip += ((ip-anchor) >> kSearchStrength) + 1; /* jump faster over incompressible sections */ in ZSTD_compressBlock_lazy_extDict_generic()
1944 const U32 repIndex = (U32)(curr - offset_1); in ZSTD_compressBlock_lazy_extDict_generic()
1945 const BYTE* const repBase = repIndex < dictLimit ? dictBase : base; in ZSTD_compressBlock_lazy_extDict_generic()
1946 const BYTE* const repMatch = repBase + repIndex; in ZSTD_compressBlock_lazy_extDict_generic()
1947 …if ( ((U32)((dictLimit-1) - repIndex) >= 3) /* intentional overflow : do not test positions overla… in ZSTD_compressBlock_lazy_extDict_generic()
1948 … & (offset_1 <= curr - windowLow) ) /* equivalent to `curr > repIndex >= windowLow` */ in ZSTD_compressBlock_lazy_extDict_generic()
1951 const BYTE* const repEnd = repIndex < dictLimit ? dictEnd : iend; in ZSTD_compressBlock_lazy_extDict_generic()
1952 … size_t const repLength = ZSTD_count_2segments(ip+4, repMatch+4, iend, repEnd, prefixStart) + 4; in ZSTD_compressBlock_lazy_extDict_generic()
1954 … int const gain1 = (int)(matchLength*3 - ZSTD_highbit32((U32)STORED_TO_OFFBASE(offcode)) + 1); in ZSTD_compressBlock_lazy_extDict_generic()
1955 if ((repLength >= 4) && (gain2 > gain1)) in ZSTD_compressBlock_lazy_extDict_generic()
1962 …int const gain2 = (int)(ml2*4 - ZSTD_highbit32((U32)STORED_TO_OFFBASE(offset2))); /* raw approx … in ZSTD_compressBlock_lazy_extDict_generic()
1963 … int const gain1 = (int)(matchLength*4 - ZSTD_highbit32((U32)STORED_TO_OFFBASE(offcode)) + 4); in ZSTD_compressBlock_lazy_extDict_generic()
1964 if ((ml2 >= 4) && (gain2 > gain1)) { in ZSTD_compressBlock_lazy_extDict_generic()
1976 const U32 repIndex = (U32)(curr - offset_1); in ZSTD_compressBlock_lazy_extDict_generic()
1977 const BYTE* const repBase = repIndex < dictLimit ? dictBase : base; in ZSTD_compressBlock_lazy_extDict_generic()
1978 const BYTE* const repMatch = repBase + repIndex; in ZSTD_compressBlock_lazy_extDict_generic()
1979 …if ( ((U32)((dictLimit-1) - repIndex) >= 3) /* intentional overflow : do not test positions overla… in ZSTD_compressBlock_lazy_extDict_generic()
1980 … & (offset_1 <= curr - windowLow) ) /* equivalent to `curr > repIndex >= windowLow` */ in ZSTD_compressBlock_lazy_extDict_generic()
1983 const BYTE* const repEnd = repIndex < dictLimit ? dictEnd : iend; in ZSTD_compressBlock_lazy_extDict_generic()
1984 … size_t const repLength = ZSTD_count_2segments(ip+4, repMatch+4, iend, repEnd, prefixStart) + 4; in ZSTD_compressBlock_lazy_extDict_generic()
1985 int const gain2 = (int)(repLength * 4); in ZSTD_compressBlock_lazy_extDict_generic()
1986 … int const gain1 = (int)(matchLength*4 - ZSTD_highbit32((U32)STORED_TO_OFFBASE(offcode)) + 1); in ZSTD_compressBlock_lazy_extDict_generic()
1987 if ((repLength >= 4) && (gain2 > gain1)) in ZSTD_compressBlock_lazy_extDict_generic()
1994 …int const gain2 = (int)(ml2*4 - ZSTD_highbit32((U32)STORED_TO_OFFBASE(offset2))); /* raw approx … in ZSTD_compressBlock_lazy_extDict_generic()
1995 … int const gain1 = (int)(matchLength*4 - ZSTD_highbit32((U32)STORED_TO_OFFBASE(offcode)) + 7); in ZSTD_compressBlock_lazy_extDict_generic()
1996 if ((ml2 >= 4) && (gain2 > gain1)) { in ZSTD_compressBlock_lazy_extDict_generic()
2005 U32 const matchIndex = (U32)((size_t)(start-base) - STORED_OFFSET(offcode)); in ZSTD_compressBlock_lazy_extDict_generic()
2006 … const BYTE* match = (matchIndex < dictLimit) ? dictBase + matchIndex : base + matchIndex; in ZSTD_compressBlock_lazy_extDict_generic()
2007 const BYTE* const mStart = (matchIndex < dictLimit) ? dictStart : prefixStart; in ZSTD_compressBlock_lazy_extDict_generic()
2008 …while ((start>anchor) && (match>mStart) && (start[-1] == match[-1])) { start--; match--; matchLeng… in ZSTD_compressBlock_lazy_extDict_generic()
2014 { size_t const litLength = (size_t)(start - anchor); in ZSTD_compressBlock_lazy_extDict_generic()
2021 const U32 repCurrent = (U32)(ip-base); in ZSTD_compressBlock_lazy_extDict_generic()
2023 const U32 repIndex = repCurrent - offset_2; in ZSTD_compressBlock_lazy_extDict_generic()
2024 const BYTE* const repBase = repIndex < dictLimit ? dictBase : base; in ZSTD_compressBlock_lazy_extDict_generic()
2025 const BYTE* const repMatch = repBase + repIndex; in ZSTD_compressBlock_lazy_extDict_generic()
2026 …if ( ((U32)((dictLimit-1) - repIndex) >= 3) /* intentional overflow : do not test positions overla… in ZSTD_compressBlock_lazy_extDict_generic()
2027 … & (offset_2 <= repCurrent - windowLow) ) /* equivalent to `curr > repIndex >= windowLow` */ in ZSTD_compressBlock_lazy_extDict_generic()
2030 const BYTE* const repEnd = repIndex < dictLimit ? dictEnd : iend; in ZSTD_compressBlock_lazy_extDict_generic()
2031 matchLength = ZSTD_count_2segments(ip+4, repMatch+4, iend, repEnd, prefixStart) + 4; in ZSTD_compressBlock_lazy_extDict_generic()
2046 return (size_t)(iend - anchor); in ZSTD_compressBlock_lazy_extDict_generic()