[lib] s/current/curr because it collides with Linux Kernel macro

dev
Nick Terrell 2020-08-11 14:31:09 -07:00
parent 5e4efd22d4
commit f91ed5c766
10 changed files with 165 additions and 158 deletions

View File

@ -50,6 +50,13 @@ void* ZSTD_memmove(void* destination, const void* source, size_t num);
void* ZSTD_memset(void* destination, int value, size_t num); void* ZSTD_memset(void* destination, int value, size_t num);
#endif #endif
/* Define this macro because the kernel does.
* This will ensure we don't introduce new instances of 'current'
* in the code.
*/
int this_variable_name_is_not_allowed();
#define current this_variable_name_is_not_allowed()
#endif /* ZSTD_DEPS_COMMON */ #endif /* ZSTD_DEPS_COMMON */
/* Need: /* Need:

View File

@ -163,9 +163,9 @@ size_t HUF_readCTable (HUF_CElt* CTable, unsigned* maxSymbolValuePtr, const void
/* Prepare base value per rank */ /* Prepare base value per rank */
{ U32 n, nextRankStart = 0; { U32 n, nextRankStart = 0;
for (n=1; n<=tableLog; n++) { for (n=1; n<=tableLog; n++) {
U32 current = nextRankStart; U32 curr = nextRankStart;
nextRankStart += (rankVal[n] << (n-1)); nextRankStart += (rankVal[n] << (n-1));
rankVal[n] = current; rankVal[n] = curr;
} } } }
/* fill nbBits */ /* fill nbBits */
@ -291,7 +291,7 @@ static U32 HUF_setMaxHeight(nodeElt* huffNode, U32 lastNonNull, U32 maxNbBits)
typedef struct { typedef struct {
U32 base; U32 base;
U32 current; U32 curr;
} rankPos; } rankPos;
typedef nodeElt huffNodeTable[HUF_CTABLE_WORKSPACE_SIZE_U32]; typedef nodeElt huffNodeTable[HUF_CTABLE_WORKSPACE_SIZE_U32];
@ -313,11 +313,11 @@ static void HUF_sort(nodeElt* huffNode, const unsigned* count, U32 maxSymbolValu
rankPosition[r].base ++; rankPosition[r].base ++;
} }
for (n=30; n>0; n--) rankPosition[n-1].base += rankPosition[n].base; for (n=30; n>0; n--) rankPosition[n-1].base += rankPosition[n].base;
for (n=0; n<32; n++) rankPosition[n].current = rankPosition[n].base; for (n=0; n<32; n++) rankPosition[n].curr = rankPosition[n].base;
for (n=0; n<=maxSymbolValue; n++) { for (n=0; n<=maxSymbolValue; n++) {
U32 const c = count[n]; U32 const c = count[n];
U32 const r = BIT_highbit32(c+1) + 1; U32 const r = BIT_highbit32(c+1) + 1;
U32 pos = rankPosition[r].current++; U32 pos = rankPosition[r].curr++;
while ((pos > rankPosition[r].base) && (c > huffNode[pos-1].count)) { while ((pos > rankPosition[r].base) && (c > huffNode[pos-1].count)) {
huffNode[pos] = huffNode[pos-1]; huffNode[pos] = huffNode[pos-1];
pos--; pos--;

View File

@ -2267,10 +2267,10 @@ static size_t ZSTD_buildSeqStore(ZSTD_CCtx* zc, const void* src, size_t srcSize)
/* limited update after a very long match */ /* limited update after a very long match */
{ const BYTE* const base = ms->window.base; { const BYTE* const base = ms->window.base;
const BYTE* const istart = (const BYTE*)src; const BYTE* const istart = (const BYTE*)src;
const U32 current = (U32)(istart-base); const U32 curr = (U32)(istart-base);
if (sizeof(ptrdiff_t)==8) assert(istart - base < (ptrdiff_t)(U32)(-1)); /* ensure no overflow */ if (sizeof(ptrdiff_t)==8) assert(istart - base < (ptrdiff_t)(U32)(-1)); /* ensure no overflow */
if (current > ms->nextToUpdate + 384) if (curr > ms->nextToUpdate + 384)
ms->nextToUpdate = current - MIN(192, (U32)(current - ms->nextToUpdate - 384)); ms->nextToUpdate = curr - MIN(192, (U32)(curr - ms->nextToUpdate - 384));
} }
/* select and store sequences */ /* select and store sequences */

View File

@ -762,8 +762,8 @@ MEM_STATIC ZSTD_dictMode_e ZSTD_matchState_dictMode(const ZSTD_matchState_t *ms)
MEM_STATIC U32 ZSTD_window_needOverflowCorrection(ZSTD_window_t const window, MEM_STATIC U32 ZSTD_window_needOverflowCorrection(ZSTD_window_t const window,
void const* srcEnd) void const* srcEnd)
{ {
U32 const current = (U32)((BYTE const*)srcEnd - window.base); U32 const curr = (U32)((BYTE const*)srcEnd - window.base);
return current > ZSTD_CURRENT_MAX; return curr > ZSTD_CURRENT_MAX;
} }
/** /**
@ -799,14 +799,14 @@ MEM_STATIC U32 ZSTD_window_correctOverflow(ZSTD_window_t* window, U32 cycleLog,
* windowLog <= 31 ==> 3<<29 + 1<<windowLog < 7<<29 < 1<<32. * windowLog <= 31 ==> 3<<29 + 1<<windowLog < 7<<29 < 1<<32.
*/ */
U32 const cycleMask = (1U << cycleLog) - 1; U32 const cycleMask = (1U << cycleLog) - 1;
U32 const current = (U32)((BYTE const*)src - window->base); U32 const curr = (U32)((BYTE const*)src - window->base);
U32 const currentCycle0 = current & cycleMask; U32 const currentCycle0 = curr & cycleMask;
/* Exclude zero so that newCurrent - maxDist >= 1. */ /* Exclude zero so that newCurrent - maxDist >= 1. */
U32 const currentCycle1 = currentCycle0 == 0 ? (1U << cycleLog) : currentCycle0; U32 const currentCycle1 = currentCycle0 == 0 ? (1U << cycleLog) : currentCycle0;
U32 const newCurrent = currentCycle1 + maxDist; U32 const newCurrent = currentCycle1 + maxDist;
U32 const correction = current - newCurrent; U32 const correction = curr - newCurrent;
assert((maxDist & cycleMask) == 0); assert((maxDist & cycleMask) == 0);
assert(current > newCurrent); assert(curr > newCurrent);
/* Loose bound, should be around 1<<29 (see above) */ /* Loose bound, should be around 1<<29 (see above) */
assert(correction > 1<<28); assert(correction > 1<<28);
@ -981,11 +981,11 @@ MEM_STATIC U32 ZSTD_window_update(ZSTD_window_t* window,
/** /**
* Returns the lowest allowed match index. It may either be in the ext-dict or the prefix. * Returns the lowest allowed match index. It may either be in the ext-dict or the prefix.
*/ */
MEM_STATIC U32 ZSTD_getLowestMatchIndex(const ZSTD_matchState_t* ms, U32 current, unsigned windowLog) MEM_STATIC U32 ZSTD_getLowestMatchIndex(const ZSTD_matchState_t* ms, U32 curr, unsigned windowLog)
{ {
U32 const maxDistance = 1U << windowLog; U32 const maxDistance = 1U << windowLog;
U32 const lowestValid = ms->window.lowLimit; U32 const lowestValid = ms->window.lowLimit;
U32 const withinWindow = (current - lowestValid > maxDistance) ? current - maxDistance : lowestValid; U32 const withinWindow = (curr - lowestValid > maxDistance) ? curr - maxDistance : lowestValid;
U32 const isDictionary = (ms->loadedDictEnd != 0); U32 const isDictionary = (ms->loadedDictEnd != 0);
U32 const matchLowest = isDictionary ? lowestValid : withinWindow; U32 const matchLowest = isDictionary ? lowestValid : withinWindow;
return matchLowest; return matchLowest;
@ -994,11 +994,11 @@ MEM_STATIC U32 ZSTD_getLowestMatchIndex(const ZSTD_matchState_t* ms, U32 current
/** /**
* Returns the lowest allowed match index in the prefix. * Returns the lowest allowed match index in the prefix.
*/ */
MEM_STATIC U32 ZSTD_getLowestPrefixIndex(const ZSTD_matchState_t* ms, U32 current, unsigned windowLog) MEM_STATIC U32 ZSTD_getLowestPrefixIndex(const ZSTD_matchState_t* ms, U32 curr, unsigned windowLog)
{ {
U32 const maxDistance = 1U << windowLog; U32 const maxDistance = 1U << windowLog;
U32 const lowestValid = ms->window.dictLimit; U32 const lowestValid = ms->window.dictLimit;
U32 const withinWindow = (current - lowestValid > maxDistance) ? current - maxDistance : lowestValid; U32 const withinWindow = (curr - lowestValid > maxDistance) ? curr - maxDistance : lowestValid;
U32 const isDictionary = (ms->loadedDictEnd != 0); U32 const isDictionary = (ms->loadedDictEnd != 0);
U32 const matchLowest = isDictionary ? lowestValid : withinWindow; U32 const matchLowest = isDictionary ? lowestValid : withinWindow;
return matchLowest; return matchLowest;

View File

@ -31,15 +31,15 @@ void ZSTD_fillDoubleHashTable(ZSTD_matchState_t* ms,
* is empty. * is empty.
*/ */
for (; ip + fastHashFillStep - 1 <= iend; ip += fastHashFillStep) { for (; ip + fastHashFillStep - 1 <= iend; ip += fastHashFillStep) {
U32 const current = (U32)(ip - base); U32 const curr = (U32)(ip - base);
U32 i; U32 i;
for (i = 0; i < fastHashFillStep; ++i) { for (i = 0; i < fastHashFillStep; ++i) {
size_t const smHash = ZSTD_hashPtr(ip + i, hBitsS, mls); size_t const smHash = ZSTD_hashPtr(ip + i, hBitsS, mls);
size_t const lgHash = ZSTD_hashPtr(ip + i, hBitsL, 8); size_t const lgHash = ZSTD_hashPtr(ip + i, hBitsL, 8);
if (i == 0) if (i == 0)
hashSmall[smHash] = current + i; hashSmall[smHash] = curr + i;
if (i == 0 || hashLarge[lgHash] == 0) if (i == 0 || hashLarge[lgHash] == 0)
hashLarge[lgHash] = current + i; hashLarge[lgHash] = curr + i;
/* Only load extra positions for ZSTD_dtlm_full */ /* Only load extra positions for ZSTD_dtlm_full */
if (dtlm == ZSTD_dtlm_fast) if (dtlm == ZSTD_dtlm_fast)
break; break;
@ -108,9 +108,9 @@ size_t ZSTD_compressBlock_doubleFast_generic(
/* init */ /* init */
ip += (dictAndPrefixLength == 0); ip += (dictAndPrefixLength == 0);
if (dictMode == ZSTD_noDict) { if (dictMode == ZSTD_noDict) {
U32 const current = (U32)(ip - base); U32 const curr = (U32)(ip - base);
U32 const windowLow = ZSTD_getLowestPrefixIndex(ms, current, cParams->windowLog); U32 const windowLow = ZSTD_getLowestPrefixIndex(ms, curr, cParams->windowLog);
U32 const maxRep = current - windowLow; U32 const maxRep = curr - windowLow;
if (offset_2 > maxRep) offsetSaved = offset_2, offset_2 = 0; if (offset_2 > maxRep) offsetSaved = offset_2, offset_2 = 0;
if (offset_1 > maxRep) offsetSaved = offset_1, offset_1 = 0; if (offset_1 > maxRep) offsetSaved = offset_1, offset_1 = 0;
} }
@ -129,17 +129,17 @@ size_t ZSTD_compressBlock_doubleFast_generic(
size_t const h = ZSTD_hashPtr(ip, hBitsS, mls); size_t const h = ZSTD_hashPtr(ip, hBitsS, mls);
size_t const dictHL = ZSTD_hashPtr(ip, dictHBitsL, 8); size_t const dictHL = ZSTD_hashPtr(ip, dictHBitsL, 8);
size_t const dictHS = ZSTD_hashPtr(ip, dictHBitsS, mls); size_t const dictHS = ZSTD_hashPtr(ip, dictHBitsS, mls);
U32 const current = (U32)(ip-base); U32 const curr = (U32)(ip-base);
U32 const matchIndexL = hashLong[h2]; U32 const matchIndexL = hashLong[h2];
U32 matchIndexS = hashSmall[h]; U32 matchIndexS = hashSmall[h];
const BYTE* matchLong = base + matchIndexL; const BYTE* matchLong = base + matchIndexL;
const BYTE* match = base + matchIndexS; const BYTE* match = base + matchIndexS;
const U32 repIndex = current + 1 - offset_1; const U32 repIndex = curr + 1 - offset_1;
const BYTE* repMatch = (dictMode == ZSTD_dictMatchState const BYTE* repMatch = (dictMode == ZSTD_dictMatchState
&& repIndex < prefixLowestIndex) ? && repIndex < prefixLowestIndex) ?
dictBase + (repIndex - dictIndexDelta) : dictBase + (repIndex - dictIndexDelta) :
base + repIndex; base + repIndex;
hashLong[h2] = hashSmall[h] = current; /* update hash tables */ hashLong[h2] = hashSmall[h] = curr; /* update hash tables */
/* check dictMatchState repcode */ /* check dictMatchState repcode */
if (dictMode == ZSTD_dictMatchState if (dictMode == ZSTD_dictMatchState
@ -177,7 +177,7 @@ size_t ZSTD_compressBlock_doubleFast_generic(
if (dictMatchL > dictStart && MEM_read64(dictMatchL) == MEM_read64(ip)) { if (dictMatchL > dictStart && MEM_read64(dictMatchL) == MEM_read64(ip)) {
mLength = ZSTD_count_2segments(ip+8, dictMatchL+8, iend, dictEnd, prefixLowest) + 8; mLength = ZSTD_count_2segments(ip+8, dictMatchL+8, iend, dictEnd, prefixLowest) + 8;
offset = (U32)(current - dictMatchIndexL - dictIndexDelta); offset = (U32)(curr - dictMatchIndexL - dictIndexDelta);
while (((ip>anchor) & (dictMatchL>dictStart)) && (ip[-1] == dictMatchL[-1])) { ip--; dictMatchL--; mLength++; } /* catch up */ while (((ip>anchor) & (dictMatchL>dictStart)) && (ip[-1] == dictMatchL[-1])) { ip--; dictMatchL--; mLength++; } /* catch up */
goto _match_found; goto _match_found;
} } } }
@ -209,7 +209,7 @@ _search_next_long:
size_t const dictHLNext = ZSTD_hashPtr(ip+1, dictHBitsL, 8); size_t const dictHLNext = ZSTD_hashPtr(ip+1, dictHBitsL, 8);
U32 const matchIndexL3 = hashLong[hl3]; U32 const matchIndexL3 = hashLong[hl3];
const BYTE* matchL3 = base + matchIndexL3; const BYTE* matchL3 = base + matchIndexL3;
hashLong[hl3] = current + 1; hashLong[hl3] = curr + 1;
/* check prefix long +1 match */ /* check prefix long +1 match */
if (matchIndexL3 > prefixLowestIndex) { if (matchIndexL3 > prefixLowestIndex) {
@ -228,7 +228,7 @@ _search_next_long:
if (dictMatchL3 > dictStart && MEM_read64(dictMatchL3) == MEM_read64(ip+1)) { if (dictMatchL3 > dictStart && MEM_read64(dictMatchL3) == MEM_read64(ip+1)) {
mLength = ZSTD_count_2segments(ip+1+8, dictMatchL3+8, iend, dictEnd, prefixLowest) + 8; mLength = ZSTD_count_2segments(ip+1+8, dictMatchL3+8, iend, dictEnd, prefixLowest) + 8;
ip++; ip++;
offset = (U32)(current + 1 - dictMatchIndexL3 - dictIndexDelta); offset = (U32)(curr + 1 - dictMatchIndexL3 - dictIndexDelta);
while (((ip>anchor) & (dictMatchL3>dictStart)) && (ip[-1] == dictMatchL3[-1])) { ip--; dictMatchL3--; mLength++; } /* catch up */ while (((ip>anchor) & (dictMatchL3>dictStart)) && (ip[-1] == dictMatchL3[-1])) { ip--; dictMatchL3--; mLength++; } /* catch up */
goto _match_found; goto _match_found;
} } } } } }
@ -236,7 +236,7 @@ _search_next_long:
/* if no long +1 match, explore the short match we found */ /* if no long +1 match, explore the short match we found */
if (dictMode == ZSTD_dictMatchState && matchIndexS < prefixLowestIndex) { if (dictMode == ZSTD_dictMatchState && matchIndexS < prefixLowestIndex) {
mLength = ZSTD_count_2segments(ip+4, match+4, iend, dictEnd, prefixLowest) + 4; mLength = ZSTD_count_2segments(ip+4, match+4, iend, dictEnd, prefixLowest) + 4;
offset = (U32)(current - matchIndexS); offset = (U32)(curr - matchIndexS);
while (((ip>anchor) & (match>dictStart)) && (ip[-1] == match[-1])) { ip--; match--; mLength++; } /* catch up */ while (((ip>anchor) & (match>dictStart)) && (ip[-1] == match[-1])) { ip--; match--; mLength++; } /* catch up */
} else { } else {
mLength = ZSTD_count(ip+4, match+4, iend) + 4; mLength = ZSTD_count(ip+4, match+4, iend) + 4;
@ -260,7 +260,7 @@ _match_stored:
if (ip <= ilimit) { if (ip <= ilimit) {
/* Complementary insertion */ /* Complementary insertion */
/* done after iLimit test, as candidates could be > iend-8 */ /* done after iLimit test, as candidates could be > iend-8 */
{ U32 const indexToInsert = current+2; { U32 const indexToInsert = curr+2;
hashLong[ZSTD_hashPtr(base+indexToInsert, hBitsL, 8)] = indexToInsert; hashLong[ZSTD_hashPtr(base+indexToInsert, hBitsL, 8)] = indexToInsert;
hashLong[ZSTD_hashPtr(ip-2, hBitsL, 8)] = (U32)(ip-2-base); hashLong[ZSTD_hashPtr(ip-2, hBitsL, 8)] = (U32)(ip-2-base);
hashSmall[ZSTD_hashPtr(base+indexToInsert, hBitsS, mls)] = indexToInsert; hashSmall[ZSTD_hashPtr(base+indexToInsert, hBitsS, mls)] = indexToInsert;
@ -401,12 +401,12 @@ static size_t ZSTD_compressBlock_doubleFast_extDict_generic(
const BYTE* const matchLongBase = matchLongIndex < prefixStartIndex ? dictBase : base; const BYTE* const matchLongBase = matchLongIndex < prefixStartIndex ? dictBase : base;
const BYTE* matchLong = matchLongBase + matchLongIndex; const BYTE* matchLong = matchLongBase + matchLongIndex;
const U32 current = (U32)(ip-base); const U32 curr = (U32)(ip-base);
const U32 repIndex = current + 1 - offset_1; /* offset_1 expected <= current +1 */ const U32 repIndex = curr + 1 - offset_1; /* offset_1 expected <= curr +1 */
const BYTE* const repBase = repIndex < prefixStartIndex ? dictBase : base; const BYTE* const repBase = repIndex < prefixStartIndex ? dictBase : base;
const BYTE* const repMatch = repBase + repIndex; const BYTE* const repMatch = repBase + repIndex;
size_t mLength; size_t mLength;
hashSmall[hSmall] = hashLong[hLong] = current; /* update hash table */ hashSmall[hSmall] = hashLong[hLong] = curr; /* update hash table */
if ((((U32)((prefixStartIndex-1) - repIndex) >= 3) /* intentional underflow : ensure repIndex doesn't overlap dict + prefix */ if ((((U32)((prefixStartIndex-1) - repIndex) >= 3) /* intentional underflow : ensure repIndex doesn't overlap dict + prefix */
& (repIndex > dictStartIndex)) & (repIndex > dictStartIndex))
@ -421,7 +421,7 @@ static size_t ZSTD_compressBlock_doubleFast_extDict_generic(
const BYTE* const lowMatchPtr = matchLongIndex < prefixStartIndex ? dictStart : prefixStart; const BYTE* const lowMatchPtr = matchLongIndex < prefixStartIndex ? dictStart : prefixStart;
U32 offset; U32 offset;
mLength = ZSTD_count_2segments(ip+8, matchLong+8, iend, matchEnd, prefixStart) + 8; mLength = ZSTD_count_2segments(ip+8, matchLong+8, iend, matchEnd, prefixStart) + 8;
offset = current - matchLongIndex; offset = curr - matchLongIndex;
while (((ip>anchor) & (matchLong>lowMatchPtr)) && (ip[-1] == matchLong[-1])) { ip--; matchLong--; mLength++; } /* catch up */ while (((ip>anchor) & (matchLong>lowMatchPtr)) && (ip[-1] == matchLong[-1])) { ip--; matchLong--; mLength++; } /* catch up */
offset_2 = offset_1; offset_2 = offset_1;
offset_1 = offset; offset_1 = offset;
@ -433,19 +433,19 @@ static size_t ZSTD_compressBlock_doubleFast_extDict_generic(
const BYTE* const match3Base = matchIndex3 < prefixStartIndex ? dictBase : base; const BYTE* const match3Base = matchIndex3 < prefixStartIndex ? dictBase : base;
const BYTE* match3 = match3Base + matchIndex3; const BYTE* match3 = match3Base + matchIndex3;
U32 offset; U32 offset;
hashLong[h3] = current + 1; hashLong[h3] = curr + 1;
if ( (matchIndex3 > dictStartIndex) && (MEM_read64(match3) == MEM_read64(ip+1)) ) { if ( (matchIndex3 > dictStartIndex) && (MEM_read64(match3) == MEM_read64(ip+1)) ) {
const BYTE* const matchEnd = matchIndex3 < prefixStartIndex ? dictEnd : iend; const BYTE* const matchEnd = matchIndex3 < prefixStartIndex ? dictEnd : iend;
const BYTE* const lowMatchPtr = matchIndex3 < prefixStartIndex ? dictStart : prefixStart; const BYTE* const lowMatchPtr = matchIndex3 < prefixStartIndex ? dictStart : prefixStart;
mLength = ZSTD_count_2segments(ip+9, match3+8, iend, matchEnd, prefixStart) + 8; mLength = ZSTD_count_2segments(ip+9, match3+8, iend, matchEnd, prefixStart) + 8;
ip++; ip++;
offset = current+1 - matchIndex3; offset = curr+1 - matchIndex3;
while (((ip>anchor) & (match3>lowMatchPtr)) && (ip[-1] == match3[-1])) { ip--; match3--; mLength++; } /* catch up */ while (((ip>anchor) & (match3>lowMatchPtr)) && (ip[-1] == match3[-1])) { ip--; match3--; mLength++; } /* catch up */
} else { } else {
const BYTE* const matchEnd = matchIndex < prefixStartIndex ? dictEnd : iend; const BYTE* const matchEnd = matchIndex < prefixStartIndex ? dictEnd : iend;
const BYTE* const lowMatchPtr = matchIndex < prefixStartIndex ? dictStart : prefixStart; const BYTE* const lowMatchPtr = matchIndex < prefixStartIndex ? dictStart : prefixStart;
mLength = ZSTD_count_2segments(ip+4, match+4, iend, matchEnd, prefixStart) + 4; mLength = ZSTD_count_2segments(ip+4, match+4, iend, matchEnd, prefixStart) + 4;
offset = current - matchIndex; offset = curr - matchIndex;
while (((ip>anchor) & (match>lowMatchPtr)) && (ip[-1] == match[-1])) { ip--; match--; mLength++; } /* catch up */ while (((ip>anchor) & (match>lowMatchPtr)) && (ip[-1] == match[-1])) { ip--; match--; mLength++; } /* catch up */
} }
offset_2 = offset_1; offset_2 = offset_1;
@ -464,7 +464,7 @@ static size_t ZSTD_compressBlock_doubleFast_extDict_generic(
if (ip <= ilimit) { if (ip <= ilimit) {
/* Complementary insertion */ /* Complementary insertion */
/* done after iLimit test, as candidates could be > iend-8 */ /* done after iLimit test, as candidates could be > iend-8 */
{ U32 const indexToInsert = current+2; { U32 const indexToInsert = curr+2;
hashLong[ZSTD_hashPtr(base+indexToInsert, hBitsL, 8)] = indexToInsert; hashLong[ZSTD_hashPtr(base+indexToInsert, hBitsL, 8)] = indexToInsert;
hashLong[ZSTD_hashPtr(ip-2, hBitsL, 8)] = (U32)(ip-2-base); hashLong[ZSTD_hashPtr(ip-2, hBitsL, 8)] = (U32)(ip-2-base);
hashSmall[ZSTD_hashPtr(base+indexToInsert, hBitsS, mls)] = indexToInsert; hashSmall[ZSTD_hashPtr(base+indexToInsert, hBitsS, mls)] = indexToInsert;

View File

@ -29,16 +29,16 @@ void ZSTD_fillHashTable(ZSTD_matchState_t* ms,
* Insert the other positions if their hash entry is empty. * Insert the other positions if their hash entry is empty.
*/ */
for ( ; ip + fastHashFillStep < iend + 2; ip += fastHashFillStep) { for ( ; ip + fastHashFillStep < iend + 2; ip += fastHashFillStep) {
U32 const current = (U32)(ip - base); U32 const curr = (U32)(ip - base);
size_t const hash0 = ZSTD_hashPtr(ip, hBits, mls); size_t const hash0 = ZSTD_hashPtr(ip, hBits, mls);
hashTable[hash0] = current; hashTable[hash0] = curr;
if (dtlm == ZSTD_dtlm_fast) continue; if (dtlm == ZSTD_dtlm_fast) continue;
/* Only load extra positions for ZSTD_dtlm_full */ /* Only load extra positions for ZSTD_dtlm_full */
{ U32 p; { U32 p;
for (p = 1; p < fastHashFillStep; ++p) { for (p = 1; p < fastHashFillStep; ++p) {
size_t const hash = ZSTD_hashPtr(ip + p, hBits, mls); size_t const hash = ZSTD_hashPtr(ip + p, hBits, mls);
if (hashTable[hash] == 0) { /* not yet filled */ if (hashTable[hash] == 0) { /* not yet filled */
hashTable[hash] = current + p; hashTable[hash] = curr + p;
} } } } } } } }
} }
@ -72,9 +72,9 @@ ZSTD_compressBlock_fast_generic(
DEBUGLOG(5, "ZSTD_compressBlock_fast_generic"); DEBUGLOG(5, "ZSTD_compressBlock_fast_generic");
ip0 += (ip0 == prefixStart); ip0 += (ip0 == prefixStart);
ip1 = ip0 + 1; ip1 = ip0 + 1;
{ U32 const current = (U32)(ip0 - base); { U32 const curr = (U32)(ip0 - base);
U32 const windowLow = ZSTD_getLowestPrefixIndex(ms, current, cParams->windowLog); U32 const windowLow = ZSTD_getLowestPrefixIndex(ms, curr, cParams->windowLog);
U32 const maxRep = current - windowLow; U32 const maxRep = curr - windowLow;
if (offset_2 > maxRep) offsetSaved = offset_2, offset_2 = 0; if (offset_2 > maxRep) offsetSaved = offset_2, offset_2 = 0;
if (offset_1 > maxRep) offsetSaved = offset_1, offset_1 = 0; if (offset_1 > maxRep) offsetSaved = offset_1, offset_1 = 0;
} }
@ -258,14 +258,14 @@ size_t ZSTD_compressBlock_fast_dictMatchState_generic(
while (ip < ilimit) { /* < instead of <=, because repcode check at (ip+1) */ while (ip < ilimit) { /* < instead of <=, because repcode check at (ip+1) */
size_t mLength; size_t mLength;
size_t const h = ZSTD_hashPtr(ip, hlog, mls); size_t const h = ZSTD_hashPtr(ip, hlog, mls);
U32 const current = (U32)(ip-base); U32 const curr = (U32)(ip-base);
U32 const matchIndex = hashTable[h]; U32 const matchIndex = hashTable[h];
const BYTE* match = base + matchIndex; const BYTE* match = base + matchIndex;
const U32 repIndex = current + 1 - offset_1; const U32 repIndex = curr + 1 - offset_1;
const BYTE* repMatch = (repIndex < prefixStartIndex) ? const BYTE* repMatch = (repIndex < prefixStartIndex) ?
dictBase + (repIndex - dictIndexDelta) : dictBase + (repIndex - dictIndexDelta) :
base + repIndex; base + repIndex;
hashTable[h] = current; /* update hash table */ hashTable[h] = curr; /* update hash table */
if ( ((U32)((prefixStartIndex-1) - repIndex) >= 3) /* intentional underflow : ensure repIndex isn't overlapping dict + prefix */ if ( ((U32)((prefixStartIndex-1) - repIndex) >= 3) /* intentional underflow : ensure repIndex isn't overlapping dict + prefix */
&& (MEM_read32(repMatch) == MEM_read32(ip+1)) ) { && (MEM_read32(repMatch) == MEM_read32(ip+1)) ) {
@ -284,7 +284,7 @@ size_t ZSTD_compressBlock_fast_dictMatchState_generic(
continue; continue;
} else { } else {
/* found a dict match */ /* found a dict match */
U32 const offset = (U32)(current-dictMatchIndex-dictIndexDelta); U32 const offset = (U32)(curr-dictMatchIndex-dictIndexDelta);
mLength = ZSTD_count_2segments(ip+4, dictMatch+4, iend, dictEnd, prefixStart) + 4; mLength = ZSTD_count_2segments(ip+4, dictMatch+4, iend, dictEnd, prefixStart) + 4;
while (((ip>anchor) & (dictMatch>dictStart)) while (((ip>anchor) & (dictMatch>dictStart))
&& (ip[-1] == dictMatch[-1])) { && (ip[-1] == dictMatch[-1])) {
@ -316,8 +316,8 @@ size_t ZSTD_compressBlock_fast_dictMatchState_generic(
if (ip <= ilimit) { if (ip <= ilimit) {
/* Fill Table */ /* Fill Table */
assert(base+current+2 > istart); /* check base overflow */ assert(base+curr+2 > istart); /* check base overflow */
hashTable[ZSTD_hashPtr(base+current+2, hlog, mls)] = current+2; /* here because current+2 could be > iend-8 */ hashTable[ZSTD_hashPtr(base+curr+2, hlog, mls)] = curr+2; /* here because curr+2 could be > iend-8 */
hashTable[ZSTD_hashPtr(ip-2, hlog, mls)] = (U32)(ip-2-base); hashTable[ZSTD_hashPtr(ip-2, hlog, mls)] = (U32)(ip-2-base);
/* check immediate repcode */ /* check immediate repcode */
@ -410,13 +410,13 @@ static size_t ZSTD_compressBlock_fast_extDict_generic(
const U32 matchIndex = hashTable[h]; const U32 matchIndex = hashTable[h];
const BYTE* const matchBase = matchIndex < prefixStartIndex ? dictBase : base; const BYTE* const matchBase = matchIndex < prefixStartIndex ? dictBase : base;
const BYTE* match = matchBase + matchIndex; const BYTE* match = matchBase + matchIndex;
const U32 current = (U32)(ip-base); const U32 curr = (U32)(ip-base);
const U32 repIndex = current + 1 - offset_1; const U32 repIndex = curr + 1 - offset_1;
const BYTE* const repBase = repIndex < prefixStartIndex ? dictBase : base; const BYTE* const repBase = repIndex < prefixStartIndex ? dictBase : base;
const BYTE* const repMatch = repBase + repIndex; const BYTE* const repMatch = repBase + repIndex;
hashTable[h] = current; /* update hash table */ hashTable[h] = curr; /* update hash table */
DEBUGLOG(7, "offset_1 = %u , current = %u", offset_1, current); DEBUGLOG(7, "offset_1 = %u , curr = %u", offset_1, curr);
assert(offset_1 <= current +1); /* check repIndex */ assert(offset_1 <= curr +1); /* check repIndex */
if ( (((U32)((prefixStartIndex-1) - repIndex) >= 3) /* intentional underflow */ & (repIndex > dictStartIndex)) if ( (((U32)((prefixStartIndex-1) - repIndex) >= 3) /* intentional underflow */ & (repIndex > dictStartIndex))
&& (MEM_read32(repMatch) == MEM_read32(ip+1)) ) { && (MEM_read32(repMatch) == MEM_read32(ip+1)) ) {
@ -435,7 +435,7 @@ static size_t ZSTD_compressBlock_fast_extDict_generic(
} }
{ const BYTE* const matchEnd = matchIndex < prefixStartIndex ? dictEnd : iend; { const BYTE* const matchEnd = matchIndex < prefixStartIndex ? dictEnd : iend;
const BYTE* const lowMatchPtr = matchIndex < prefixStartIndex ? dictStart : prefixStart; const BYTE* const lowMatchPtr = matchIndex < prefixStartIndex ? dictStart : prefixStart;
U32 const offset = current - matchIndex; U32 const offset = curr - matchIndex;
size_t mLength = ZSTD_count_2segments(ip+4, match+4, iend, matchEnd, prefixStart) + 4; size_t mLength = ZSTD_count_2segments(ip+4, match+4, iend, matchEnd, prefixStart) + 4;
while (((ip>anchor) & (match>lowMatchPtr)) && (ip[-1] == match[-1])) { ip--; match--; mLength++; } /* catch up */ while (((ip>anchor) & (match>lowMatchPtr)) && (ip[-1] == match[-1])) { ip--; match--; mLength++; } /* catch up */
offset_2 = offset_1; offset_1 = offset; /* update offset history */ offset_2 = offset_1; offset_1 = offset; /* update offset history */
@ -446,7 +446,7 @@ static size_t ZSTD_compressBlock_fast_extDict_generic(
if (ip <= ilimit) { if (ip <= ilimit) {
/* Fill Table */ /* Fill Table */
hashTable[ZSTD_hashPtr(base+current+2, hlog, mls)] = current+2; hashTable[ZSTD_hashPtr(base+curr+2, hlog, mls)] = curr+2;
hashTable[ZSTD_hashPtr(ip-2, hlog, mls)] = (U32)(ip-2-base); hashTable[ZSTD_hashPtr(ip-2, hlog, mls)] = (U32)(ip-2-base);
/* check immediate repcode */ /* check immediate repcode */
while (ip <= ilimit) { while (ip <= ilimit) {

View File

@ -58,11 +58,11 @@ ZSTD_updateDUBT(ZSTD_matchState_t* ms,
/** ZSTD_insertDUBT1() : /** ZSTD_insertDUBT1() :
* sort one already inserted but unsorted position * sort one already inserted but unsorted position
* assumption : current >= btlow == (current - btmask) * assumption : curr >= btlow == (curr - btmask)
* doesn't fail */ * doesn't fail */
static void static void
ZSTD_insertDUBT1(ZSTD_matchState_t* ms, ZSTD_insertDUBT1(ZSTD_matchState_t* ms,
U32 current, const BYTE* inputEnd, U32 curr, const BYTE* inputEnd,
U32 nbCompares, U32 btLow, U32 nbCompares, U32 btLow,
const ZSTD_dictMode_e dictMode) const ZSTD_dictMode_e dictMode)
{ {
@ -74,41 +74,41 @@ ZSTD_insertDUBT1(ZSTD_matchState_t* ms,
const BYTE* const base = ms->window.base; const BYTE* const base = ms->window.base;
const BYTE* const dictBase = ms->window.dictBase; const BYTE* const dictBase = ms->window.dictBase;
const U32 dictLimit = ms->window.dictLimit; const U32 dictLimit = ms->window.dictLimit;
const BYTE* const ip = (current>=dictLimit) ? base + current : dictBase + current; const BYTE* const ip = (curr>=dictLimit) ? base + curr : dictBase + curr;
const BYTE* const iend = (current>=dictLimit) ? inputEnd : dictBase + dictLimit; const BYTE* const iend = (curr>=dictLimit) ? inputEnd : dictBase + dictLimit;
const BYTE* const dictEnd = dictBase + dictLimit; const BYTE* const dictEnd = dictBase + dictLimit;
const BYTE* const prefixStart = base + dictLimit; const BYTE* const prefixStart = base + dictLimit;
const BYTE* match; const BYTE* match;
U32* smallerPtr = bt + 2*(current&btMask); U32* smallerPtr = bt + 2*(curr&btMask);
U32* largerPtr = smallerPtr + 1; U32* largerPtr = smallerPtr + 1;
U32 matchIndex = *smallerPtr; /* this candidate is unsorted : next sorted candidate is reached through *smallerPtr, while *largerPtr contains previous unsorted candidate (which is already saved and can be overwritten) */ U32 matchIndex = *smallerPtr; /* this candidate is unsorted : next sorted candidate is reached through *smallerPtr, while *largerPtr contains previous unsorted candidate (which is already saved and can be overwritten) */
U32 dummy32; /* to be nullified at the end */ U32 dummy32; /* to be nullified at the end */
U32 const windowValid = ms->window.lowLimit; U32 const windowValid = ms->window.lowLimit;
U32 const maxDistance = 1U << cParams->windowLog; U32 const maxDistance = 1U << cParams->windowLog;
U32 const windowLow = (current - windowValid > maxDistance) ? current - maxDistance : windowValid; U32 const windowLow = (curr - windowValid > maxDistance) ? curr - maxDistance : windowValid;
DEBUGLOG(8, "ZSTD_insertDUBT1(%u) (dictLimit=%u, lowLimit=%u)", DEBUGLOG(8, "ZSTD_insertDUBT1(%u) (dictLimit=%u, lowLimit=%u)",
current, dictLimit, windowLow); curr, dictLimit, windowLow);
assert(current >= btLow); assert(curr >= btLow);
assert(ip < iend); /* condition for ZSTD_count */ assert(ip < iend); /* condition for ZSTD_count */
while (nbCompares-- && (matchIndex > windowLow)) { while (nbCompares-- && (matchIndex > windowLow)) {
U32* const nextPtr = bt + 2*(matchIndex & btMask); U32* const nextPtr = bt + 2*(matchIndex & btMask);
size_t matchLength = MIN(commonLengthSmaller, commonLengthLarger); /* guaranteed minimum nb of common bytes */ size_t matchLength = MIN(commonLengthSmaller, commonLengthLarger); /* guaranteed minimum nb of common bytes */
assert(matchIndex < current); assert(matchIndex < curr);
/* note : all candidates are now supposed sorted, /* note : all candidates are now supposed sorted,
* but it's still possible to have nextPtr[1] == ZSTD_DUBT_UNSORTED_MARK * but it's still possible to have nextPtr[1] == ZSTD_DUBT_UNSORTED_MARK
* when a real index has the same value as ZSTD_DUBT_UNSORTED_MARK */ * when a real index has the same value as ZSTD_DUBT_UNSORTED_MARK */
if ( (dictMode != ZSTD_extDict) if ( (dictMode != ZSTD_extDict)
|| (matchIndex+matchLength >= dictLimit) /* both in current segment*/ || (matchIndex+matchLength >= dictLimit) /* both in current segment*/
|| (current < dictLimit) /* both in extDict */) { || (curr < dictLimit) /* both in extDict */) {
const BYTE* const mBase = ( (dictMode != ZSTD_extDict) const BYTE* const mBase = ( (dictMode != ZSTD_extDict)
|| (matchIndex+matchLength >= dictLimit)) ? || (matchIndex+matchLength >= dictLimit)) ?
base : dictBase; base : dictBase;
assert( (matchIndex+matchLength >= dictLimit) /* might be wrong if extDict is incorrectly set to 0 */ assert( (matchIndex+matchLength >= dictLimit) /* might be wrong if extDict is incorrectly set to 0 */
|| (current < dictLimit) ); || (curr < dictLimit) );
match = mBase + matchIndex; match = mBase + matchIndex;
matchLength += ZSTD_count(ip+matchLength, match+matchLength, iend); matchLength += ZSTD_count(ip+matchLength, match+matchLength, iend);
} else { } else {
@ -119,7 +119,7 @@ ZSTD_insertDUBT1(ZSTD_matchState_t* ms,
} }
DEBUGLOG(8, "ZSTD_insertDUBT1: comparing %u with %u : found %u common bytes ", DEBUGLOG(8, "ZSTD_insertDUBT1: comparing %u with %u : found %u common bytes ",
current, matchIndex, (U32)matchLength); curr, matchIndex, (U32)matchLength);
if (ip+matchLength == iend) { /* equal : no way to know if inf or sup */ if (ip+matchLength == iend) { /* equal : no way to know if inf or sup */
break; /* drop , to guarantee consistency ; miss a bit of compression, but other solutions can corrupt tree */ break; /* drop , to guarantee consistency ; miss a bit of compression, but other solutions can corrupt tree */
@ -168,7 +168,7 @@ ZSTD_DUBT_findBetterDictMatch (
const BYTE* const base = ms->window.base; const BYTE* const base = ms->window.base;
const BYTE* const prefixStart = base + ms->window.dictLimit; const BYTE* const prefixStart = base + ms->window.dictLimit;
U32 const current = (U32)(ip-base); U32 const curr = (U32)(ip-base);
const BYTE* const dictBase = dms->window.base; const BYTE* const dictBase = dms->window.base;
const BYTE* const dictEnd = dms->window.nextSrc; const BYTE* const dictEnd = dms->window.nextSrc;
U32 const dictHighLimit = (U32)(dms->window.nextSrc - dms->window.base); U32 const dictHighLimit = (U32)(dms->window.nextSrc - dms->window.base);
@ -195,10 +195,10 @@ ZSTD_DUBT_findBetterDictMatch (
if (matchLength > bestLength) { if (matchLength > bestLength) {
U32 matchIndex = dictMatchIndex + dictIndexDelta; U32 matchIndex = dictMatchIndex + dictIndexDelta;
if ( (4*(int)(matchLength-bestLength)) > (int)(ZSTD_highbit32(current-matchIndex+1) - ZSTD_highbit32((U32)offsetPtr[0]+1)) ) { if ( (4*(int)(matchLength-bestLength)) > (int)(ZSTD_highbit32(curr-matchIndex+1) - ZSTD_highbit32((U32)offsetPtr[0]+1)) ) {
DEBUGLOG(9, "ZSTD_DUBT_findBetterDictMatch(%u) : found better match length %u -> %u and offsetCode %u -> %u (dictMatchIndex %u, matchIndex %u)", DEBUGLOG(9, "ZSTD_DUBT_findBetterDictMatch(%u) : found better match length %u -> %u and offsetCode %u -> %u (dictMatchIndex %u, matchIndex %u)",
current, (U32)bestLength, (U32)matchLength, (U32)*offsetPtr, ZSTD_REP_MOVE + current - matchIndex, dictMatchIndex, matchIndex); curr, (U32)bestLength, (U32)matchLength, (U32)*offsetPtr, ZSTD_REP_MOVE + curr - matchIndex, dictMatchIndex, matchIndex);
bestLength = matchLength, *offsetPtr = ZSTD_REP_MOVE + current - matchIndex; bestLength = matchLength, *offsetPtr = ZSTD_REP_MOVE + curr - matchIndex;
} }
if (ip+matchLength == iend) { /* reached end of input : ip[matchLength] is not valid, no way to know if it's larger or smaller than match */ if (ip+matchLength == iend) { /* reached end of input : ip[matchLength] is not valid, no way to know if it's larger or smaller than match */
break; /* drop, to guarantee consistency (miss a little bit of compression) */ break; /* drop, to guarantee consistency (miss a little bit of compression) */
@ -218,9 +218,9 @@ ZSTD_DUBT_findBetterDictMatch (
} }
if (bestLength >= MINMATCH) { if (bestLength >= MINMATCH) {
U32 const mIndex = current - ((U32)*offsetPtr - ZSTD_REP_MOVE); (void)mIndex; U32 const mIndex = curr - ((U32)*offsetPtr - ZSTD_REP_MOVE); (void)mIndex;
DEBUGLOG(8, "ZSTD_DUBT_findBetterDictMatch(%u) : found match of length %u and offsetCode %u (pos %u)", DEBUGLOG(8, "ZSTD_DUBT_findBetterDictMatch(%u) : found match of length %u and offsetCode %u (pos %u)",
current, (U32)bestLength, (U32)*offsetPtr, mIndex); curr, (U32)bestLength, (U32)*offsetPtr, mIndex);
} }
return bestLength; return bestLength;
@ -241,13 +241,13 @@ ZSTD_DUBT_findBestMatch(ZSTD_matchState_t* ms,
U32 matchIndex = hashTable[h]; U32 matchIndex = hashTable[h];
const BYTE* const base = ms->window.base; const BYTE* const base = ms->window.base;
U32 const current = (U32)(ip-base); U32 const curr = (U32)(ip-base);
U32 const windowLow = ZSTD_getLowestMatchIndex(ms, current, cParams->windowLog); U32 const windowLow = ZSTD_getLowestMatchIndex(ms, curr, cParams->windowLog);
U32* const bt = ms->chainTable; U32* const bt = ms->chainTable;
U32 const btLog = cParams->chainLog - 1; U32 const btLog = cParams->chainLog - 1;
U32 const btMask = (1 << btLog) - 1; U32 const btMask = (1 << btLog) - 1;
U32 const btLow = (btMask >= current) ? 0 : current - btMask; U32 const btLow = (btMask >= curr) ? 0 : curr - btMask;
U32 const unsortLimit = MAX(btLow, windowLow); U32 const unsortLimit = MAX(btLow, windowLow);
U32* nextCandidate = bt + 2*(matchIndex&btMask); U32* nextCandidate = bt + 2*(matchIndex&btMask);
@ -256,7 +256,7 @@ ZSTD_DUBT_findBestMatch(ZSTD_matchState_t* ms,
U32 nbCandidates = nbCompares; U32 nbCandidates = nbCompares;
U32 previousCandidate = 0; U32 previousCandidate = 0;
DEBUGLOG(7, "ZSTD_DUBT_findBestMatch (%u) ", current); DEBUGLOG(7, "ZSTD_DUBT_findBestMatch (%u) ", curr);
assert(ip <= iend-8); /* required for h calculation */ assert(ip <= iend-8); /* required for h calculation */
/* reach end of unsorted candidates list */ /* reach end of unsorted candidates list */
@ -299,14 +299,14 @@ ZSTD_DUBT_findBestMatch(ZSTD_matchState_t* ms,
const U32 dictLimit = ms->window.dictLimit; const U32 dictLimit = ms->window.dictLimit;
const BYTE* const dictEnd = dictBase + dictLimit; const BYTE* const dictEnd = dictBase + dictLimit;
const BYTE* const prefixStart = base + dictLimit; const BYTE* const prefixStart = base + dictLimit;
U32* smallerPtr = bt + 2*(current&btMask); U32* smallerPtr = bt + 2*(curr&btMask);
U32* largerPtr = bt + 2*(current&btMask) + 1; U32* largerPtr = bt + 2*(curr&btMask) + 1;
U32 matchEndIdx = current + 8 + 1; U32 matchEndIdx = curr + 8 + 1;
U32 dummy32; /* to be nullified at the end */ U32 dummy32; /* to be nullified at the end */
size_t bestLength = 0; size_t bestLength = 0;
matchIndex = hashTable[h]; matchIndex = hashTable[h];
hashTable[h] = current; /* Update Hash Table */ hashTable[h] = curr; /* Update Hash Table */
while (nbCompares-- && (matchIndex > windowLow)) { while (nbCompares-- && (matchIndex > windowLow)) {
U32* const nextPtr = bt + 2*(matchIndex & btMask); U32* const nextPtr = bt + 2*(matchIndex & btMask);
@ -326,8 +326,8 @@ ZSTD_DUBT_findBestMatch(ZSTD_matchState_t* ms,
if (matchLength > bestLength) { if (matchLength > bestLength) {
if (matchLength > matchEndIdx - matchIndex) if (matchLength > matchEndIdx - matchIndex)
matchEndIdx = matchIndex + (U32)matchLength; matchEndIdx = matchIndex + (U32)matchLength;
if ( (4*(int)(matchLength-bestLength)) > (int)(ZSTD_highbit32(current-matchIndex+1) - ZSTD_highbit32((U32)offsetPtr[0]+1)) ) if ( (4*(int)(matchLength-bestLength)) > (int)(ZSTD_highbit32(curr-matchIndex+1) - ZSTD_highbit32((U32)offsetPtr[0]+1)) )
bestLength = matchLength, *offsetPtr = ZSTD_REP_MOVE + current - matchIndex; bestLength = matchLength, *offsetPtr = ZSTD_REP_MOVE + curr - matchIndex;
if (ip+matchLength == iend) { /* equal : no way to know if inf or sup */ if (ip+matchLength == iend) { /* equal : no way to know if inf or sup */
if (dictMode == ZSTD_dictMatchState) { if (dictMode == ZSTD_dictMatchState) {
nbCompares = 0; /* in addition to avoiding checking any nbCompares = 0; /* in addition to avoiding checking any
@ -363,12 +363,12 @@ ZSTD_DUBT_findBestMatch(ZSTD_matchState_t* ms,
mls, dictMode); mls, dictMode);
} }
assert(matchEndIdx > current+8); /* ensure nextToUpdate is increased */ assert(matchEndIdx > curr+8); /* ensure nextToUpdate is increased */
ms->nextToUpdate = matchEndIdx - 8; /* skip repetitive patterns */ ms->nextToUpdate = matchEndIdx - 8; /* skip repetitive patterns */
if (bestLength >= MINMATCH) { if (bestLength >= MINMATCH) {
U32 const mIndex = current - ((U32)*offsetPtr - ZSTD_REP_MOVE); (void)mIndex; U32 const mIndex = curr - ((U32)*offsetPtr - ZSTD_REP_MOVE); (void)mIndex;
DEBUGLOG(8, "ZSTD_DUBT_findBestMatch(%u) : found match of length %u and offsetCode %u (pos %u)", DEBUGLOG(8, "ZSTD_DUBT_findBestMatch(%u) : found match of length %u and offsetCode %u (pos %u)",
current, (U32)bestLength, (U32)*offsetPtr, mIndex); curr, (U32)bestLength, (U32)*offsetPtr, mIndex);
} }
return bestLength; return bestLength;
} }
@ -493,13 +493,13 @@ size_t ZSTD_HcFindBestMatch_generic (
const U32 dictLimit = ms->window.dictLimit; const U32 dictLimit = ms->window.dictLimit;
const BYTE* const prefixStart = base + dictLimit; const BYTE* const prefixStart = base + dictLimit;
const BYTE* const dictEnd = dictBase + dictLimit; const BYTE* const dictEnd = dictBase + dictLimit;
const U32 current = (U32)(ip-base); const U32 curr = (U32)(ip-base);
const U32 maxDistance = 1U << cParams->windowLog; const U32 maxDistance = 1U << cParams->windowLog;
const U32 lowestValid = ms->window.lowLimit; const U32 lowestValid = ms->window.lowLimit;
const U32 withinMaxDistance = (current - lowestValid > maxDistance) ? current - maxDistance : lowestValid; const U32 withinMaxDistance = (curr - lowestValid > maxDistance) ? curr - maxDistance : lowestValid;
const U32 isDictionary = (ms->loadedDictEnd != 0); const U32 isDictionary = (ms->loadedDictEnd != 0);
const U32 lowLimit = isDictionary ? lowestValid : withinMaxDistance; const U32 lowLimit = isDictionary ? lowestValid : withinMaxDistance;
const U32 minChain = current > chainSize ? current - chainSize : 0; const U32 minChain = curr > chainSize ? curr - chainSize : 0;
U32 nbAttempts = 1U << cParams->searchLog; U32 nbAttempts = 1U << cParams->searchLog;
size_t ml=4-1; size_t ml=4-1;
@ -523,7 +523,7 @@ size_t ZSTD_HcFindBestMatch_generic (
/* save best solution */ /* save best solution */
if (currentMl > ml) { if (currentMl > ml) {
ml = currentMl; ml = currentMl;
*offsetPtr = current - matchIndex + ZSTD_REP_MOVE; *offsetPtr = curr - matchIndex + ZSTD_REP_MOVE;
if (ip+currentMl == iLimit) break; /* best possible, avoids read overflow on next attempt */ if (ip+currentMl == iLimit) break; /* best possible, avoids read overflow on next attempt */
} }
@ -555,7 +555,7 @@ size_t ZSTD_HcFindBestMatch_generic (
/* save best solution */ /* save best solution */
if (currentMl > ml) { if (currentMl > ml) {
ml = currentMl; ml = currentMl;
*offsetPtr = current - (matchIndex + dmsIndexDelta) + ZSTD_REP_MOVE; *offsetPtr = curr - (matchIndex + dmsIndexDelta) + ZSTD_REP_MOVE;
if (ip+currentMl == iLimit) break; /* best possible, avoids read overflow on next attempt */ if (ip+currentMl == iLimit) break; /* best possible, avoids read overflow on next attempt */
} }
@ -667,9 +667,9 @@ ZSTD_compressBlock_lazy_generic(
/* init */ /* init */
ip += (dictAndPrefixLength == 0); ip += (dictAndPrefixLength == 0);
if (dictMode == ZSTD_noDict) { if (dictMode == ZSTD_noDict) {
U32 const current = (U32)(ip - base); U32 const curr = (U32)(ip - base);
U32 const windowLow = ZSTD_getLowestPrefixIndex(ms, current, ms->cParams.windowLog); U32 const windowLow = ZSTD_getLowestPrefixIndex(ms, curr, ms->cParams.windowLog);
U32 const maxRep = current - windowLow; U32 const maxRep = curr - windowLow;
if (offset_2 > maxRep) savedOffset = offset_2, offset_2 = 0; if (offset_2 > maxRep) savedOffset = offset_2, offset_2 = 0;
if (offset_1 > maxRep) savedOffset = offset_1, offset_1 = 0; if (offset_1 > maxRep) savedOffset = offset_1, offset_1 = 0;
} }
@ -968,11 +968,11 @@ size_t ZSTD_compressBlock_lazy_extDict_generic(
size_t matchLength=0; size_t matchLength=0;
size_t offset=0; size_t offset=0;
const BYTE* start=ip+1; const BYTE* start=ip+1;
U32 current = (U32)(ip-base); U32 curr = (U32)(ip-base);
/* check repCode */ /* check repCode */
{ const U32 windowLow = ZSTD_getLowestMatchIndex(ms, current+1, windowLog); { const U32 windowLow = ZSTD_getLowestMatchIndex(ms, curr+1, windowLog);
const U32 repIndex = (U32)(current+1 - offset_1); const U32 repIndex = (U32)(curr+1 - offset_1);
const BYTE* const repBase = repIndex < dictLimit ? dictBase : base; const BYTE* const repBase = repIndex < dictLimit ? dictBase : base;
const BYTE* const repMatch = repBase + repIndex; const BYTE* const repMatch = repBase + repIndex;
if (((U32)((dictLimit-1) - repIndex) >= 3) & (repIndex > windowLow)) /* intentional overflow */ if (((U32)((dictLimit-1) - repIndex) >= 3) & (repIndex > windowLow)) /* intentional overflow */
@ -999,11 +999,11 @@ size_t ZSTD_compressBlock_lazy_extDict_generic(
if (depth>=1) if (depth>=1)
while (ip<ilimit) { while (ip<ilimit) {
ip ++; ip ++;
current++; curr++;
/* check repCode */ /* check repCode */
if (offset) { if (offset) {
const U32 windowLow = ZSTD_getLowestMatchIndex(ms, current, windowLog); const U32 windowLow = ZSTD_getLowestMatchIndex(ms, curr, windowLog);
const U32 repIndex = (U32)(current - offset_1); const U32 repIndex = (U32)(curr - offset_1);
const BYTE* const repBase = repIndex < dictLimit ? dictBase : base; const BYTE* const repBase = repIndex < dictLimit ? dictBase : base;
const BYTE* const repMatch = repBase + repIndex; const BYTE* const repMatch = repBase + repIndex;
if (((U32)((dictLimit-1) - repIndex) >= 3) & (repIndex > windowLow)) /* intentional overflow */ if (((U32)((dictLimit-1) - repIndex) >= 3) & (repIndex > windowLow)) /* intentional overflow */
@ -1030,11 +1030,11 @@ size_t ZSTD_compressBlock_lazy_extDict_generic(
/* let's find an even better one */ /* let's find an even better one */
if ((depth==2) && (ip<ilimit)) { if ((depth==2) && (ip<ilimit)) {
ip ++; ip ++;
current++; curr++;
/* check repCode */ /* check repCode */
if (offset) { if (offset) {
const U32 windowLow = ZSTD_getLowestMatchIndex(ms, current, windowLog); const U32 windowLow = ZSTD_getLowestMatchIndex(ms, curr, windowLog);
const U32 repIndex = (U32)(current - offset_1); const U32 repIndex = (U32)(curr - offset_1);
const BYTE* const repBase = repIndex < dictLimit ? dictBase : base; const BYTE* const repBase = repIndex < dictLimit ? dictBase : base;
const BYTE* const repMatch = repBase + repIndex; const BYTE* const repMatch = repBase + repIndex;
if (((U32)((dictLimit-1) - repIndex) >= 3) & (repIndex > windowLow)) /* intentional overflow */ if (((U32)((dictLimit-1) - repIndex) >= 3) & (repIndex > windowLow)) /* intentional overflow */

View File

@ -246,10 +246,10 @@ void ZSTD_ldm_fillHashTable(
* (after a long match, only update tables a limited amount). */ * (after a long match, only update tables a limited amount). */
static void ZSTD_ldm_limitTableUpdate(ZSTD_matchState_t* ms, const BYTE* anchor) static void ZSTD_ldm_limitTableUpdate(ZSTD_matchState_t* ms, const BYTE* anchor)
{ {
U32 const current = (U32)(anchor - ms->window.base); U32 const curr = (U32)(anchor - ms->window.base);
if (current > ms->nextToUpdate + 1024) { if (curr > ms->nextToUpdate + 1024) {
ms->nextToUpdate = ms->nextToUpdate =
current - MIN(512, current - ms->nextToUpdate - 1024); curr - MIN(512, curr - ms->nextToUpdate - 1024);
} }
} }
@ -286,7 +286,7 @@ static size_t ZSTD_ldm_generateSequences_internal(
while (ip <= ilimit) { while (ip <= ilimit) {
size_t mLength; size_t mLength;
U32 const current = (U32)(ip - base); U32 const curr = (U32)(ip - base);
size_t forwardMatchLength = 0, backwardMatchLength = 0; size_t forwardMatchLength = 0, backwardMatchLength = 0;
ldmEntry_t* bestEntry = NULL; ldmEntry_t* bestEntry = NULL;
if (ip != istart) { if (ip != istart) {
@ -365,7 +365,7 @@ static size_t ZSTD_ldm_generateSequences_internal(
/* No match found -- continue searching */ /* No match found -- continue searching */
if (bestEntry == NULL) { if (bestEntry == NULL) {
ZSTD_ldm_makeEntryAndInsertByTag(ldmState, rollingHash, ZSTD_ldm_makeEntryAndInsertByTag(ldmState, rollingHash,
hBits, current, hBits, curr,
*params); *params);
ip++; ip++;
continue; continue;
@ -377,11 +377,11 @@ static size_t ZSTD_ldm_generateSequences_internal(
{ {
/* Store the sequence: /* Store the sequence:
* ip = current - backwardMatchLength * ip = curr - backwardMatchLength
* The match is at (bestEntry->offset - backwardMatchLength) * The match is at (bestEntry->offset - backwardMatchLength)
*/ */
U32 const matchIndex = bestEntry->offset; U32 const matchIndex = bestEntry->offset;
U32 const offset = current - matchIndex; U32 const offset = curr - matchIndex;
rawSeq* const seq = rawSeqStore->seq + rawSeqStore->size; rawSeq* const seq = rawSeqStore->seq + rawSeqStore->size;
/* Out of sequence storage */ /* Out of sequence storage */

View File

@ -386,32 +386,32 @@ static U32 ZSTD_insertBt1(
const BYTE* const dictEnd = dictBase + dictLimit; const BYTE* const dictEnd = dictBase + dictLimit;
const BYTE* const prefixStart = base + dictLimit; const BYTE* const prefixStart = base + dictLimit;
const BYTE* match; const BYTE* match;
const U32 current = (U32)(ip-base); const U32 curr = (U32)(ip-base);
const U32 btLow = btMask >= current ? 0 : current - btMask; const U32 btLow = btMask >= curr ? 0 : curr - btMask;
U32* smallerPtr = bt + 2*(current&btMask); U32* smallerPtr = bt + 2*(curr&btMask);
U32* largerPtr = smallerPtr + 1; U32* largerPtr = smallerPtr + 1;
U32 dummy32; /* to be nullified at the end */ U32 dummy32; /* to be nullified at the end */
U32 const windowLow = ms->window.lowLimit; U32 const windowLow = ms->window.lowLimit;
U32 matchEndIdx = current+8+1; U32 matchEndIdx = curr+8+1;
size_t bestLength = 8; size_t bestLength = 8;
U32 nbCompares = 1U << cParams->searchLog; U32 nbCompares = 1U << cParams->searchLog;
#ifdef ZSTD_C_PREDICT #ifdef ZSTD_C_PREDICT
U32 predictedSmall = *(bt + 2*((current-1)&btMask) + 0); U32 predictedSmall = *(bt + 2*((curr-1)&btMask) + 0);
U32 predictedLarge = *(bt + 2*((current-1)&btMask) + 1); U32 predictedLarge = *(bt + 2*((curr-1)&btMask) + 1);
predictedSmall += (predictedSmall>0); predictedSmall += (predictedSmall>0);
predictedLarge += (predictedLarge>0); predictedLarge += (predictedLarge>0);
#endif /* ZSTD_C_PREDICT */ #endif /* ZSTD_C_PREDICT */
DEBUGLOG(8, "ZSTD_insertBt1 (%u)", current); DEBUGLOG(8, "ZSTD_insertBt1 (%u)", curr);
assert(ip <= iend-8); /* required for h calculation */ assert(ip <= iend-8); /* required for h calculation */
hashTable[h] = current; /* Update Hash Table */ hashTable[h] = curr; /* Update Hash Table */
assert(windowLow > 0); assert(windowLow > 0);
while (nbCompares-- && (matchIndex >= windowLow)) { while (nbCompares-- && (matchIndex >= windowLow)) {
U32* const nextPtr = bt + 2*(matchIndex & btMask); U32* const nextPtr = bt + 2*(matchIndex & btMask);
size_t matchLength = MIN(commonLengthSmaller, commonLengthLarger); /* guaranteed minimum nb of common bytes */ size_t matchLength = MIN(commonLengthSmaller, commonLengthLarger); /* guaranteed minimum nb of common bytes */
assert(matchIndex < current); assert(matchIndex < curr);
#ifdef ZSTD_C_PREDICT /* note : can create issues when hlog small <= 11 */ #ifdef ZSTD_C_PREDICT /* note : can create issues when hlog small <= 11 */
const U32* predictPtr = bt + 2*((matchIndex-1) & btMask); /* written this way, as bt is a roll buffer */ const U32* predictPtr = bt + 2*((matchIndex-1) & btMask); /* written this way, as bt is a roll buffer */
@ -474,8 +474,8 @@ static U32 ZSTD_insertBt1(
*smallerPtr = *largerPtr = 0; *smallerPtr = *largerPtr = 0;
{ U32 positions = 0; { U32 positions = 0;
if (bestLength > 384) positions = MIN(192, (U32)(bestLength - 384)); /* speed optimization */ if (bestLength > 384) positions = MIN(192, (U32)(bestLength - 384)); /* speed optimization */
assert(matchEndIdx > current + 8); assert(matchEndIdx > curr + 8);
return MAX(positions, matchEndIdx - (current + 8)); return MAX(positions, matchEndIdx - (curr + 8));
} }
} }
@ -519,7 +519,7 @@ U32 ZSTD_insertBtAndGetAllMatches (
const ZSTD_compressionParameters* const cParams = &ms->cParams; const ZSTD_compressionParameters* const cParams = &ms->cParams;
U32 const sufficient_len = MIN(cParams->targetLength, ZSTD_OPT_NUM -1); U32 const sufficient_len = MIN(cParams->targetLength, ZSTD_OPT_NUM -1);
const BYTE* const base = ms->window.base; const BYTE* const base = ms->window.base;
U32 const current = (U32)(ip-base); U32 const curr = (U32)(ip-base);
U32 const hashLog = cParams->hashLog; U32 const hashLog = cParams->hashLog;
U32 const minMatch = (mls==3) ? 3 : 4; U32 const minMatch = (mls==3) ? 3 : 4;
U32* const hashTable = ms->hashTable; U32* const hashTable = ms->hashTable;
@ -533,12 +533,12 @@ U32 ZSTD_insertBtAndGetAllMatches (
U32 const dictLimit = ms->window.dictLimit; U32 const dictLimit = ms->window.dictLimit;
const BYTE* const dictEnd = dictBase + dictLimit; const BYTE* const dictEnd = dictBase + dictLimit;
const BYTE* const prefixStart = base + dictLimit; const BYTE* const prefixStart = base + dictLimit;
U32 const btLow = (btMask >= current) ? 0 : current - btMask; U32 const btLow = (btMask >= curr) ? 0 : curr - btMask;
U32 const windowLow = ZSTD_getLowestMatchIndex(ms, current, cParams->windowLog); U32 const windowLow = ZSTD_getLowestMatchIndex(ms, curr, cParams->windowLog);
U32 const matchLow = windowLow ? windowLow : 1; U32 const matchLow = windowLow ? windowLow : 1;
U32* smallerPtr = bt + 2*(current&btMask); U32* smallerPtr = bt + 2*(curr&btMask);
U32* largerPtr = bt + 2*(current&btMask) + 1; U32* largerPtr = bt + 2*(curr&btMask) + 1;
U32 matchEndIdx = current+8+1; /* farthest referenced position of any match => detects repetitive patterns */ U32 matchEndIdx = curr+8+1; /* farthest referenced position of any match => detects repetitive patterns */
U32 dummy32; /* to be nullified at the end */ U32 dummy32; /* to be nullified at the end */
U32 mnum = 0; U32 mnum = 0;
U32 nbCompares = 1U << cParams->searchLog; U32 nbCompares = 1U << cParams->searchLog;
@ -557,7 +557,7 @@ U32 ZSTD_insertBtAndGetAllMatches (
U32 const dmsBtLow = dictMode == ZSTD_dictMatchState && dmsBtMask < dmsHighLimit - dmsLowLimit ? dmsHighLimit - dmsBtMask : dmsLowLimit; U32 const dmsBtLow = dictMode == ZSTD_dictMatchState && dmsBtMask < dmsHighLimit - dmsLowLimit ? dmsHighLimit - dmsBtMask : dmsLowLimit;
size_t bestLength = lengthToBeat-1; size_t bestLength = lengthToBeat-1;
DEBUGLOG(8, "ZSTD_insertBtAndGetAllMatches: current=%u", current); DEBUGLOG(8, "ZSTD_insertBtAndGetAllMatches: current=%u", curr);
/* check repCode */ /* check repCode */
assert(ll0 <= 1); /* necessarily 1 or 0 */ assert(ll0 <= 1); /* necessarily 1 or 0 */
@ -565,29 +565,29 @@ U32 ZSTD_insertBtAndGetAllMatches (
U32 repCode; U32 repCode;
for (repCode = ll0; repCode < lastR; repCode++) { for (repCode = ll0; repCode < lastR; repCode++) {
U32 const repOffset = (repCode==ZSTD_REP_NUM) ? (rep[0] - 1) : rep[repCode]; U32 const repOffset = (repCode==ZSTD_REP_NUM) ? (rep[0] - 1) : rep[repCode];
U32 const repIndex = current - repOffset; U32 const repIndex = curr - repOffset;
U32 repLen = 0; U32 repLen = 0;
assert(current >= dictLimit); assert(curr >= dictLimit);
if (repOffset-1 /* intentional overflow, discards 0 and -1 */ < current-dictLimit) { /* equivalent to `current > repIndex >= dictLimit` */ if (repOffset-1 /* intentional overflow, discards 0 and -1 */ < curr-dictLimit) { /* equivalent to `curr > repIndex >= dictLimit` */
/* We must validate the repcode offset because when we're using a dictionary the /* We must validate the repcode offset because when we're using a dictionary the
* valid offset range shrinks when the dictionary goes out of bounds. * valid offset range shrinks when the dictionary goes out of bounds.
*/ */
if ((repIndex >= windowLow) & (ZSTD_readMINMATCH(ip, minMatch) == ZSTD_readMINMATCH(ip - repOffset, minMatch))) { if ((repIndex >= windowLow) & (ZSTD_readMINMATCH(ip, minMatch) == ZSTD_readMINMATCH(ip - repOffset, minMatch))) {
repLen = (U32)ZSTD_count(ip+minMatch, ip+minMatch-repOffset, iLimit) + minMatch; repLen = (U32)ZSTD_count(ip+minMatch, ip+minMatch-repOffset, iLimit) + minMatch;
} }
} else { /* repIndex < dictLimit || repIndex >= current */ } else { /* repIndex < dictLimit || repIndex >= curr */
const BYTE* const repMatch = dictMode == ZSTD_dictMatchState ? const BYTE* const repMatch = dictMode == ZSTD_dictMatchState ?
dmsBase + repIndex - dmsIndexDelta : dmsBase + repIndex - dmsIndexDelta :
dictBase + repIndex; dictBase + repIndex;
assert(current >= windowLow); assert(curr >= windowLow);
if ( dictMode == ZSTD_extDict if ( dictMode == ZSTD_extDict
&& ( ((repOffset-1) /*intentional overflow*/ < current - windowLow) /* equivalent to `current > repIndex >= windowLow` */ && ( ((repOffset-1) /*intentional overflow*/ < curr - windowLow) /* equivalent to `curr > repIndex >= windowLow` */
& (((U32)((dictLimit-1) - repIndex) >= 3) ) /* intentional overflow : do not test positions overlapping 2 memory segments */) & (((U32)((dictLimit-1) - repIndex) >= 3) ) /* intentional overflow : do not test positions overlapping 2 memory segments */)
&& (ZSTD_readMINMATCH(ip, minMatch) == ZSTD_readMINMATCH(repMatch, minMatch)) ) { && (ZSTD_readMINMATCH(ip, minMatch) == ZSTD_readMINMATCH(repMatch, minMatch)) ) {
repLen = (U32)ZSTD_count_2segments(ip+minMatch, repMatch+minMatch, iLimit, dictEnd, prefixStart) + minMatch; repLen = (U32)ZSTD_count_2segments(ip+minMatch, repMatch+minMatch, iLimit, dictEnd, prefixStart) + minMatch;
} }
if (dictMode == ZSTD_dictMatchState if (dictMode == ZSTD_dictMatchState
&& ( ((repOffset-1) /*intentional overflow*/ < current - (dmsLowLimit + dmsIndexDelta)) /* equivalent to `current > repIndex >= dmsLowLimit` */ && ( ((repOffset-1) /*intentional overflow*/ < curr - (dmsLowLimit + dmsIndexDelta)) /* equivalent to `curr > repIndex >= dmsLowLimit` */
& ((U32)((dictLimit-1) - repIndex) >= 3) ) /* intentional overflow : do not test positions overlapping 2 memory segments */ & ((U32)((dictLimit-1) - repIndex) >= 3) ) /* intentional overflow : do not test positions overlapping 2 memory segments */
&& (ZSTD_readMINMATCH(ip, minMatch) == ZSTD_readMINMATCH(repMatch, minMatch)) ) { && (ZSTD_readMINMATCH(ip, minMatch) == ZSTD_readMINMATCH(repMatch, minMatch)) ) {
repLen = (U32)ZSTD_count_2segments(ip+minMatch, repMatch+minMatch, iLimit, dmsEnd, prefixStart) + minMatch; repLen = (U32)ZSTD_count_2segments(ip+minMatch, repMatch+minMatch, iLimit, dmsEnd, prefixStart) + minMatch;
@ -609,7 +609,7 @@ U32 ZSTD_insertBtAndGetAllMatches (
if ((mls == 3) /*static*/ && (bestLength < mls)) { if ((mls == 3) /*static*/ && (bestLength < mls)) {
U32 const matchIndex3 = ZSTD_insertAndFindFirstIndexHash3(ms, nextToUpdate3, ip); U32 const matchIndex3 = ZSTD_insertAndFindFirstIndexHash3(ms, nextToUpdate3, ip);
if ((matchIndex3 >= matchLow) if ((matchIndex3 >= matchLow)
& (current - matchIndex3 < (1<<18)) /*heuristic : longer distance likely too expensive*/ ) { & (curr - matchIndex3 < (1<<18)) /*heuristic : longer distance likely too expensive*/ ) {
size_t mlen; size_t mlen;
if ((dictMode == ZSTD_noDict) /*static*/ || (dictMode == ZSTD_dictMatchState) /*static*/ || (matchIndex3 >= dictLimit)) { if ((dictMode == ZSTD_noDict) /*static*/ || (dictMode == ZSTD_dictMatchState) /*static*/ || (matchIndex3 >= dictLimit)) {
const BYTE* const match = base + matchIndex3; const BYTE* const match = base + matchIndex3;
@ -624,26 +624,26 @@ U32 ZSTD_insertBtAndGetAllMatches (
DEBUGLOG(8, "found small match with hlog3, of length %u", DEBUGLOG(8, "found small match with hlog3, of length %u",
(U32)mlen); (U32)mlen);
bestLength = mlen; bestLength = mlen;
assert(current > matchIndex3); assert(curr > matchIndex3);
assert(mnum==0); /* no prior solution */ assert(mnum==0); /* no prior solution */
matches[0].off = (current - matchIndex3) + ZSTD_REP_MOVE; matches[0].off = (curr - matchIndex3) + ZSTD_REP_MOVE;
matches[0].len = (U32)mlen; matches[0].len = (U32)mlen;
mnum = 1; mnum = 1;
if ( (mlen > sufficient_len) | if ( (mlen > sufficient_len) |
(ip+mlen == iLimit) ) { /* best possible length */ (ip+mlen == iLimit) ) { /* best possible length */
ms->nextToUpdate = current+1; /* skip insertion */ ms->nextToUpdate = curr+1; /* skip insertion */
return 1; return 1;
} } } } } }
/* no dictMatchState lookup: dicts don't have a populated HC3 table */ /* no dictMatchState lookup: dicts don't have a populated HC3 table */
} }
hashTable[h] = current; /* Update Hash Table */ hashTable[h] = curr; /* Update Hash Table */
while (nbCompares-- && (matchIndex >= matchLow)) { while (nbCompares-- && (matchIndex >= matchLow)) {
U32* const nextPtr = bt + 2*(matchIndex & btMask); U32* const nextPtr = bt + 2*(matchIndex & btMask);
const BYTE* match; const BYTE* match;
size_t matchLength = MIN(commonLengthSmaller, commonLengthLarger); /* guaranteed minimum nb of common bytes */ size_t matchLength = MIN(commonLengthSmaller, commonLengthLarger); /* guaranteed minimum nb of common bytes */
assert(current > matchIndex); assert(curr > matchIndex);
if ((dictMode == ZSTD_noDict) || (dictMode == ZSTD_dictMatchState) || (matchIndex+matchLength >= dictLimit)) { if ((dictMode == ZSTD_noDict) || (dictMode == ZSTD_dictMatchState) || (matchIndex+matchLength >= dictLimit)) {
assert(matchIndex+matchLength >= dictLimit); /* ensure the condition is correct when !extDict */ assert(matchIndex+matchLength >= dictLimit); /* ensure the condition is correct when !extDict */
@ -660,12 +660,12 @@ U32 ZSTD_insertBtAndGetAllMatches (
if (matchLength > bestLength) { if (matchLength > bestLength) {
DEBUGLOG(8, "found match of length %u at distance %u (offCode=%u)", DEBUGLOG(8, "found match of length %u at distance %u (offCode=%u)",
(U32)matchLength, current - matchIndex, current - matchIndex + ZSTD_REP_MOVE); (U32)matchLength, curr - matchIndex, curr - matchIndex + ZSTD_REP_MOVE);
assert(matchEndIdx > matchIndex); assert(matchEndIdx > matchIndex);
if (matchLength > matchEndIdx - matchIndex) if (matchLength > matchEndIdx - matchIndex)
matchEndIdx = matchIndex + (U32)matchLength; matchEndIdx = matchIndex + (U32)matchLength;
bestLength = matchLength; bestLength = matchLength;
matches[mnum].off = (current - matchIndex) + ZSTD_REP_MOVE; matches[mnum].off = (curr - matchIndex) + ZSTD_REP_MOVE;
matches[mnum].len = (U32)matchLength; matches[mnum].len = (U32)matchLength;
mnum++; mnum++;
if ( (matchLength > ZSTD_OPT_NUM) if ( (matchLength > ZSTD_OPT_NUM)
@ -708,11 +708,11 @@ U32 ZSTD_insertBtAndGetAllMatches (
if (matchLength > bestLength) { if (matchLength > bestLength) {
matchIndex = dictMatchIndex + dmsIndexDelta; matchIndex = dictMatchIndex + dmsIndexDelta;
DEBUGLOG(8, "found dms match of length %u at distance %u (offCode=%u)", DEBUGLOG(8, "found dms match of length %u at distance %u (offCode=%u)",
(U32)matchLength, current - matchIndex, current - matchIndex + ZSTD_REP_MOVE); (U32)matchLength, curr - matchIndex, curr - matchIndex + ZSTD_REP_MOVE);
if (matchLength > matchEndIdx - matchIndex) if (matchLength > matchEndIdx - matchIndex)
matchEndIdx = matchIndex + (U32)matchLength; matchEndIdx = matchIndex + (U32)matchLength;
bestLength = matchLength; bestLength = matchLength;
matches[mnum].off = (current - matchIndex) + ZSTD_REP_MOVE; matches[mnum].off = (curr - matchIndex) + ZSTD_REP_MOVE;
matches[mnum].len = (U32)matchLength; matches[mnum].len = (U32)matchLength;
mnum++; mnum++;
if ( (matchLength > ZSTD_OPT_NUM) if ( (matchLength > ZSTD_OPT_NUM)
@ -733,7 +733,7 @@ U32 ZSTD_insertBtAndGetAllMatches (
} }
} }
assert(matchEndIdx > current+8); assert(matchEndIdx > curr+8);
ms->nextToUpdate = matchEndIdx - 8; /* skip repetitive patterns */ ms->nextToUpdate = matchEndIdx - 8; /* skip repetitive patterns */
return mnum; return mnum;
} }
@ -1143,7 +1143,7 @@ size_t ZSTD_compressBlock_btultra2(
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
const void* src, size_t srcSize) const void* src, size_t srcSize)
{ {
U32 const current = (U32)((const BYTE*)src - ms->window.base); U32 const curr = (U32)((const BYTE*)src - ms->window.base);
DEBUGLOG(5, "ZSTD_compressBlock_btultra2 (srcSize=%zu)", srcSize); DEBUGLOG(5, "ZSTD_compressBlock_btultra2 (srcSize=%zu)", srcSize);
/* 2-pass strategy: /* 2-pass strategy:
@ -1158,7 +1158,7 @@ size_t ZSTD_compressBlock_btultra2(
if ( (ms->opt.litLengthSum==0) /* first block */ if ( (ms->opt.litLengthSum==0) /* first block */
&& (seqStore->sequences == seqStore->sequencesStart) /* no ldm */ && (seqStore->sequences == seqStore->sequencesStart) /* no ldm */
&& (ms->window.dictLimit == ms->window.lowLimit) /* no dictionary */ && (ms->window.dictLimit == ms->window.lowLimit) /* no dictionary */
&& (current == ms->window.dictLimit) /* start of frame, nothing already loaded nor skipped */ && (curr == ms->window.dictLimit) /* start of frame, nothing already loaded nor skipped */
&& (srcSize > ZSTD_PREDEF_THRESHOLD) && (srcSize > ZSTD_PREDEF_THRESHOLD)
) { ) {
ZSTD_initStats_ultra(ms, seqStore, rep, src, srcSize); ZSTD_initStats_ultra(ms, seqStore, rep, src, srcSize);

View File

@ -188,9 +188,9 @@ size_t HUF_readDTableX1_wksp_bmi2(HUF_DTable* DTable, const void* src, size_t sr
int const unroll = 4; int const unroll = 4;
int const nLimit = (int)nbSymbols - unroll + 1; int const nLimit = (int)nbSymbols - unroll + 1;
for (n=0; n<(int)tableLog+1; n++) { for (n=0; n<(int)tableLog+1; n++) {
U32 const current = nextRankStart; U32 const curr = nextRankStart;
nextRankStart += wksp->rankVal[n]; nextRankStart += wksp->rankVal[n];
wksp->rankStart[n] = current; wksp->rankStart[n] = curr;
} }
for (n=0; n < nLimit; n += unroll) { for (n=0; n < nLimit; n += unroll) {
int u; int u;
@ -692,9 +692,9 @@ size_t HUF_readDTableX2_wksp(HUF_DTable* DTable,
/* Get start index of each weight */ /* Get start index of each weight */
{ U32 w, nextRankStart = 0; { U32 w, nextRankStart = 0;
for (w=1; w<maxW+1; w++) { for (w=1; w<maxW+1; w++) {
U32 current = nextRankStart; U32 curr = nextRankStart;
nextRankStart += rankStats[w]; nextRankStart += rankStats[w];
rankStart[w] = current; rankStart[w] = curr;
} }
rankStart[0] = nextRankStart; /* put all 0w symbols at the end of sorted list*/ rankStart[0] = nextRankStart; /* put all 0w symbols at the end of sorted list*/
sizeOfSort = nextRankStart; sizeOfSort = nextRankStart;
@ -717,9 +717,9 @@ size_t HUF_readDTableX2_wksp(HUF_DTable* DTable,
U32 nextRankVal = 0; U32 nextRankVal = 0;
U32 w; U32 w;
for (w=1; w<maxW+1; w++) { for (w=1; w<maxW+1; w++) {
U32 current = nextRankVal; U32 curr = nextRankVal;
nextRankVal += rankStats[w] << (w+rescale); nextRankVal += rankStats[w] << (w+rescale);
rankVal0[w] = current; rankVal0[w] = curr;
} } } }
{ U32 const minBits = tableLog+1 - maxW; { U32 const minBits = tableLog+1 - maxW;
U32 consumed; U32 consumed;