move MEM_readMINMATCH() into zstd_opt.h

which is its only user.
Use case too narrow to belong to mem.h.
renamed to ZSTD_readMINMATCH()
dev
Yann Collet 2017-05-23 15:41:55 -07:00
parent c7fe262dc9
commit b81f19ffce
2 changed files with 19 additions and 19 deletions

View File

@ -352,20 +352,6 @@ MEM_STATIC void MEM_writeBEST(void* memPtr, size_t val)
} }
/* function safe only for comparisons */
MEM_STATIC U32 MEM_readMINMATCH(const void* memPtr, U32 length)
{
switch (length)
{
default :
case 4 : return MEM_read32(memPtr);
case 3 : if (MEM_isLittleEndian())
return MEM_read32(memPtr)<<8;
else
return MEM_read32(memPtr)>>8;
}
}
#if defined (__cplusplus) #if defined (__cplusplus)
} }
#endif #endif

View File

@ -202,6 +202,20 @@ MEM_STATIC void ZSTD_updatePrice(seqStore_t* seqStorePtr, U32 litLength, const B
} }
/* function safe only for comparisons */
MEM_STATIC U32 ZSTD_readMINMATCH(const void* memPtr, U32 length)
{
switch (length)
{
default :
case 4 : return MEM_read32(memPtr);
case 3 : if (MEM_isLittleEndian())
return MEM_read32(memPtr)<<8;
else
return MEM_read32(memPtr)>>8;
}
}
/* Update hashTable3 up to ip (excluded) /* Update hashTable3 up to ip (excluded)
Assumption : always within prefix (i.e. not within extDict) */ Assumption : always within prefix (i.e. not within extDict) */
@ -268,7 +282,7 @@ static U32 ZSTD_insertBtAndGetAllMatches (
if (match[bestLength] == ip[bestLength]) currentMl = ZSTD_count(ip, match, iLimit); if (match[bestLength] == ip[bestLength]) currentMl = ZSTD_count(ip, match, iLimit);
} else { } else {
match = dictBase + matchIndex3; match = dictBase + matchIndex3;
if (MEM_readMINMATCH(match, MINMATCH) == MEM_readMINMATCH(ip, MINMATCH)) /* assumption : matchIndex3 <= dictLimit-4 (by table construction) */ if (ZSTD_readMINMATCH(match, MINMATCH) == ZSTD_readMINMATCH(ip, MINMATCH)) /* assumption : matchIndex3 <= dictLimit-4 (by table construction) */
currentMl = ZSTD_count_2segments(ip+MINMATCH, match+MINMATCH, iLimit, dictEnd, prefixStart) + MINMATCH; currentMl = ZSTD_count_2segments(ip+MINMATCH, match+MINMATCH, iLimit, dictEnd, prefixStart) + MINMATCH;
} }
@ -440,7 +454,7 @@ void ZSTD_compressBlock_opt_generic(ZSTD_CCtx* ctx,
for (i=(ip == anchor); i<last_i; i++) { for (i=(ip == anchor); i<last_i; i++) {
const S32 repCur = (i==ZSTD_REP_MOVE_OPT) ? (rep[0] - 1) : rep[i]; const S32 repCur = (i==ZSTD_REP_MOVE_OPT) ? (rep[0] - 1) : rep[i];
if ( (repCur > 0) && (repCur < (S32)(ip-prefixStart)) if ( (repCur > 0) && (repCur < (S32)(ip-prefixStart))
&& (MEM_readMINMATCH(ip, minMatch) == MEM_readMINMATCH(ip - repCur, minMatch))) { && (ZSTD_readMINMATCH(ip, minMatch) == ZSTD_readMINMATCH(ip - repCur, minMatch))) {
mlen = (U32)ZSTD_count(ip+minMatch, ip+minMatch-repCur, iend) + minMatch; mlen = (U32)ZSTD_count(ip+minMatch, ip+minMatch-repCur, iend) + minMatch;
if (mlen > sufficient_len || mlen >= ZSTD_OPT_NUM) { if (mlen > sufficient_len || mlen >= ZSTD_OPT_NUM) {
best_mlen = mlen; best_off = i; cur = 0; last_pos = 1; best_mlen = mlen; best_off = i; cur = 0; last_pos = 1;
@ -525,7 +539,7 @@ void ZSTD_compressBlock_opt_generic(ZSTD_CCtx* ctx,
for (i=(opt[cur].mlen != 1); i<last_i; i++) { /* check rep */ for (i=(opt[cur].mlen != 1); i<last_i; i++) { /* check rep */
const S32 repCur = (i==ZSTD_REP_MOVE_OPT) ? (opt[cur].rep[0] - 1) : opt[cur].rep[i]; const S32 repCur = (i==ZSTD_REP_MOVE_OPT) ? (opt[cur].rep[0] - 1) : opt[cur].rep[i];
if ( (repCur > 0) && (repCur < (S32)(inr-prefixStart)) if ( (repCur > 0) && (repCur < (S32)(inr-prefixStart))
&& (MEM_readMINMATCH(inr, minMatch) == MEM_readMINMATCH(inr - repCur, minMatch))) { && (ZSTD_readMINMATCH(inr, minMatch) == ZSTD_readMINMATCH(inr - repCur, minMatch))) {
mlen = (U32)ZSTD_count(inr+minMatch, inr+minMatch - repCur, iend) + minMatch; mlen = (U32)ZSTD_count(inr+minMatch, inr+minMatch - repCur, iend) + minMatch;
if (mlen > sufficient_len || cur + mlen >= ZSTD_OPT_NUM) { if (mlen > sufficient_len || cur + mlen >= ZSTD_OPT_NUM) {
@ -699,7 +713,7 @@ void ZSTD_compressBlock_opt_extDict_generic(ZSTD_CCtx* ctx,
const BYTE* const repMatch = repBase + repIndex; const BYTE* const repMatch = repBase + repIndex;
if ( (repCur > 0 && repCur <= (S32)current) if ( (repCur > 0 && repCur <= (S32)current)
&& (((U32)((dictLimit-1) - repIndex) >= 3) & (repIndex>lowestIndex)) /* intentional overflow */ && (((U32)((dictLimit-1) - repIndex) >= 3) & (repIndex>lowestIndex)) /* intentional overflow */
&& (MEM_readMINMATCH(ip, minMatch) == MEM_readMINMATCH(repMatch, minMatch)) ) { && (ZSTD_readMINMATCH(ip, minMatch) == ZSTD_readMINMATCH(repMatch, minMatch)) ) {
/* repcode detected we should take it */ /* repcode detected we should take it */
const BYTE* const repEnd = repIndex < dictLimit ? dictEnd : iend; const BYTE* const repEnd = repIndex < dictLimit ? dictEnd : iend;
mlen = (U32)ZSTD_count_2segments(ip+minMatch, repMatch+minMatch, iend, repEnd, prefixStart) + minMatch; mlen = (U32)ZSTD_count_2segments(ip+minMatch, repMatch+minMatch, iend, repEnd, prefixStart) + minMatch;
@ -795,7 +809,7 @@ void ZSTD_compressBlock_opt_extDict_generic(ZSTD_CCtx* ctx,
const BYTE* const repMatch = repBase + repIndex; const BYTE* const repMatch = repBase + repIndex;
if ( (repCur > 0 && repCur <= (S32)(current+cur)) if ( (repCur > 0 && repCur <= (S32)(current+cur))
&& (((U32)((dictLimit-1) - repIndex) >= 3) & (repIndex>lowestIndex)) /* intentional overflow */ && (((U32)((dictLimit-1) - repIndex) >= 3) & (repIndex>lowestIndex)) /* intentional overflow */
&& (MEM_readMINMATCH(inr, minMatch) == MEM_readMINMATCH(repMatch, minMatch)) ) { && (ZSTD_readMINMATCH(inr, minMatch) == ZSTD_readMINMATCH(repMatch, minMatch)) ) {
/* repcode detected */ /* repcode detected */
const BYTE* const repEnd = repIndex < dictLimit ? dictEnd : iend; const BYTE* const repEnd = repIndex < dictLimit ? dictEnd : iend;
mlen = (U32)ZSTD_count_2segments(inr+minMatch, repMatch+minMatch, iend, repEnd, prefixStart) + minMatch; mlen = (U32)ZSTD_count_2segments(inr+minMatch, repMatch+minMatch, iend, repEnd, prefixStart) + minMatch;