zstd/lib/zstdhc.c

1092 lines
40 KiB
C
Raw Normal View History

2015-10-22 07:31:46 -07:00
/*
ZSTD HC - High Compression Mode of Zstandard
Copyright (C) 2015, Yann Collet.
BSD 2-Clause License (http://www.opensource.org/licenses/bsd-license.php)
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
You can contact the author at :
- Zstd source repository : https://www.zstd.net
*/
2015-10-30 07:49:48 -07:00
/* *******************************************************
* Compiler specifics
*********************************************************/
#ifdef _MSC_VER /* Visual Studio */
# define FORCE_INLINE static __forceinline
# include <intrin.h> /* For Visual 2005 */
# pragma warning(disable : 4127) /* disable: C4127: conditional expression is constant */
# pragma warning(disable : 4324) /* disable: C4324: padded structure */
#else
# define GCC_VERSION (__GNUC__ * 100 + __GNUC_MINOR__)
# ifdef __GNUC__
# define FORCE_INLINE static inline __attribute__((always_inline))
# else
# define FORCE_INLINE static inline
# endif
#endif
2015-10-22 07:31:46 -07:00
/* *************************************
* Includes
***************************************/
#include <stdlib.h> /* malloc */
#include <string.h> /* memset */
2015-10-25 06:06:35 -07:00
#include "zstdhc_static.h"
2015-10-22 07:31:46 -07:00
#include "zstd_static.h"
2015-10-29 09:15:14 -07:00
#include "zstd_internal.h"
2015-10-22 07:31:46 -07:00
#include "mem.h"
/* *************************************
* Local Constants
***************************************/
#define MINMATCH 4
2015-10-24 05:48:37 -07:00
#define MAXD_LOG 26
2015-10-22 07:31:46 -07:00
#define KB *1024
#define MB *1024*1024
#define GB *(1ULL << 30)
/* *************************************
* Local Types
***************************************/
#define BLOCKSIZE (128 KB) /* define, for static allocation */
#define WORKPLACESIZE (BLOCKSIZE*3)
struct ZSTD_HC_CCtx_s
{
const BYTE* end; /* next block here to continue on current prefix */
2015-10-22 08:55:40 -07:00
const BYTE* base; /* All regular indexes relative to this position */
const BYTE* dictBase; /* extDict indexes relative to this position */
2015-10-22 07:31:46 -07:00
U32 dictLimit; /* below that point, need extDict */
2015-10-22 08:55:40 -07:00
U32 lowLimit; /* below that point, no more data */
2015-10-22 07:31:46 -07:00
U32 nextToUpdate; /* index from which to continue dictionary update */
2015-10-25 06:06:35 -07:00
ZSTD_HC_parameters params;
2015-10-29 10:41:45 -07:00
void* workSpace;
size_t workSpaceSize;
seqStore_t seqStore; /* sequences storage ptrs */
2015-10-25 06:06:35 -07:00
U32* hashTable;
U32* chainTable;
2015-10-22 07:31:46 -07:00
};
ZSTD_HC_CCtx* ZSTD_HC_createCCtx(void)
{
2015-10-29 10:41:45 -07:00
return (ZSTD_HC_CCtx*) calloc(1, sizeof(ZSTD_HC_CCtx));
2015-10-22 07:31:46 -07:00
}
2015-10-25 06:06:35 -07:00
size_t ZSTD_HC_freeCCtx(ZSTD_HC_CCtx* cctx)
{
2015-10-29 10:41:45 -07:00
free(cctx->workSpace);
2015-10-25 06:06:35 -07:00
free(cctx);
return 0;
}
2015-10-22 07:31:46 -07:00
2015-11-04 03:05:27 -08:00
/** ZSTD_HC_validateParams
correct params value to remain within authorized range
optimize for srcSize if srcSize > 0 */
void ZSTD_HC_validateParams(ZSTD_HC_parameters* params, size_t srcSize)
{
const U32 chainplus = (params->strategy == ZSTD_HC_btlazy2);
/* validate params */
if (params->windowLog > ZSTD_HC_WINDOWLOG_MAX) params->windowLog = ZSTD_HC_WINDOWLOG_MAX;
if (params->windowLog < ZSTD_HC_WINDOWLOG_MIN) params->windowLog = ZSTD_HC_WINDOWLOG_MIN;
/* correct params, to use less memory */
if (srcSize > 0)
{
U32 srcLog = ZSTD_highbit((U32)srcSize-1) + 1;
if (params->windowLog > srcLog) params->windowLog = srcLog;
}
if (params->chainLog > params->windowLog + chainplus) params->chainLog = params->windowLog+chainplus; /* <= ZSTD_HC_CHAINLOG_MAX */
if (params->chainLog < ZSTD_HC_CHAINLOG_MIN) params->chainLog = ZSTD_HC_CHAINLOG_MIN;
if (params->hashLog > ZSTD_HC_HASHLOG_MAX) params->hashLog = ZSTD_HC_HASHLOG_MAX;
if (params->hashLog < ZSTD_HC_HASHLOG_MIN) params->hashLog = ZSTD_HC_HASHLOG_MIN;
if (params->searchLog > ZSTD_HC_SEARCHLOG_MAX) params->searchLog = ZSTD_HC_SEARCHLOG_MAX;
if (params->searchLog < ZSTD_HC_SEARCHLOG_MIN) params->searchLog = ZSTD_HC_SEARCHLOG_MIN;
if (params->searchLength> ZSTD_HC_SEARCHLENGTH_MAX) params->searchLength = ZSTD_HC_SEARCHLENGTH_MAX;
if (params->searchLength< ZSTD_HC_SEARCHLENGTH_MIN) params->searchLength = ZSTD_HC_SEARCHLENGTH_MIN;
if ((U32)params->strategy>(U32)ZSTD_HC_btlazy2) params->strategy = ZSTD_HC_btlazy2;
if ((int)params->strategy<(int)ZSTD_HC_greedy) params->strategy = ZSTD_HC_greedy;
}
2015-10-29 22:40:22 -07:00
static size_t ZSTD_HC_resetCCtx_advanced (ZSTD_HC_CCtx* zc,
ZSTD_HC_parameters params)
2015-10-22 07:31:46 -07:00
{
2015-11-04 03:05:27 -08:00
ZSTD_HC_validateParams(&params, 0);
/* reserve table memory */
2015-10-25 06:06:35 -07:00
{
2015-10-29 10:41:45 -07:00
const size_t tableSpace = ((1 << params.chainLog) + (1 << params.hashLog)) * sizeof(U32);
const size_t neededSpace = tableSpace + WORKPLACESIZE;
if (zc->workSpaceSize < neededSpace)
{
2015-10-29 10:41:45 -07:00
free(zc->workSpace);
zc->workSpaceSize = neededSpace;
zc->workSpace = malloc(neededSpace);
2015-10-29 22:40:22 -07:00
if (zc->workSpace == NULL) return ERROR(memory_allocation);
}
2015-10-29 10:41:45 -07:00
zc->hashTable = (U32*)zc->workSpace;
2015-10-30 03:51:26 -07:00
zc->chainTable = zc->hashTable + ((size_t)1 << params.hashLog);
zc->seqStore.buffer = (void*) (zc->chainTable + ((size_t)1 << params.chainLog));
2015-10-29 10:41:45 -07:00
memset(zc->hashTable, 0, tableSpace );
2015-10-25 06:06:35 -07:00
}
zc->nextToUpdate = 0;
zc->end = NULL;
zc->base = NULL;
zc->dictBase = NULL;
zc->dictLimit = 0;
zc->lowLimit = 0;
2015-10-25 06:06:35 -07:00
zc->params = params;
2015-10-22 07:31:46 -07:00
zc->seqStore.offsetStart = (U32*) (zc->seqStore.buffer);
zc->seqStore.offCodeStart = (BYTE*) (zc->seqStore.offsetStart + (BLOCKSIZE>>2));
zc->seqStore.litStart = zc->seqStore.offCodeStart + (BLOCKSIZE>>2);
zc->seqStore.litLengthStart = zc->seqStore.litStart + BLOCKSIZE;
zc->seqStore.matchLengthStart = zc->seqStore.litLengthStart + (BLOCKSIZE>>2);
zc->seqStore.dumpsStart = zc->seqStore.matchLengthStart + (BLOCKSIZE>>2);
2015-10-29 22:40:22 -07:00
return 0;
2015-10-22 07:31:46 -07:00
}
2015-10-25 06:06:35 -07:00
2015-10-22 07:31:46 -07:00
/* *************************************
2015-10-30 07:49:48 -07:00
* Inline functions and Macros
2015-10-22 07:31:46 -07:00
***************************************/
2015-10-30 07:49:48 -07:00
static const U32 prime4bytes = 2654435761U;
static U32 ZSTD_HC_hash4(U32 u, U32 h) { return (u * prime4bytes) >> (32-h) ; }
static size_t ZSTD_HC_hash4Ptr(const void* ptr, U32 h) { return ZSTD_HC_hash4(MEM_read32(ptr), h); }
2015-10-30 07:49:48 -07:00
static const U64 prime5bytes = 889523592379ULL;
static size_t ZSTD_HC_hash5(U64 u, U32 h) { return (size_t)((u * prime5bytes) << (64-40) >> (64-h)) ; }
static size_t ZSTD_HC_hash5Ptr(const void* p, U32 h) { return ZSTD_HC_hash5(MEM_read64(p), h); }
2015-10-30 07:49:48 -07:00
static const U64 prime6bytes = 227718039650203ULL;
static size_t ZSTD_HC_hash6(U64 u, U32 h) { return (size_t)((u * prime6bytes) << (64-48) >> (64-h)) ; }
static size_t ZSTD_HC_hash6Ptr(const void* p, U32 h) { return ZSTD_HC_hash6(MEM_read64(p), h); }
2015-10-22 07:31:46 -07:00
2015-11-03 18:52:54 -08:00
static size_t ZSTD_HC_hashPtr(const void* p, U32 hBits, U32 mls)
2015-10-30 07:49:48 -07:00
{
switch(mls)
{
default:
2015-11-03 18:52:54 -08:00
case 4: return ZSTD_HC_hash4Ptr(p, hBits);
case 5: return ZSTD_HC_hash5Ptr(p, hBits);
case 6: return ZSTD_HC_hash6Ptr(p, hBits);
2015-10-30 07:49:48 -07:00
}
}
2015-11-03 00:49:30 -08:00
#define NEXT_IN_CHAIN(d, mask) chainTable[(d) & mask]
2015-10-22 07:31:46 -07:00
/* *************************************
2015-11-03 18:52:54 -08:00
* Binary Tree search
2015-10-22 07:31:46 -07:00
***************************************/
2015-11-03 18:52:54 -08:00
/** ZSTD_HC_insertBt1 : add one ptr to tree
@ip : assumed <= iend-8 */
static void ZSTD_HC_insertBt1(ZSTD_HC_CCtx* zc, const BYTE* const ip, const U32 mls, const BYTE* const iend, U32 nbCompares)
{
U32* const hashTable = zc->hashTable;
const U32 hashLog = zc->params.hashLog;
const size_t h = ZSTD_HC_hashPtr(ip, hashLog, mls);
U32* const bt = zc->chainTable;
const U32 btLog = zc->params.chainLog - 1;
const U32 btMask= (1 << btLog) - 1;
U32 matchIndex = hashTable[h];
size_t commonLengthSmaller=0, commonLengthLarger=0;
const BYTE* const base = zc->base;
const U32 current = (U32)(ip-base);
const U32 btLow = btMask >= current ? 0 : current - btMask;
U32* smallerPtr = bt + 2*(current&btMask);
U32* largerPtr = bt + 2*(current&btMask) + 1;
2015-11-04 03:05:27 -08:00
U32 dummy32; /* to be nullified at the end */
2015-11-03 18:52:54 -08:00
const U32 windowSize = 1 << zc->params.windowLog;
const U32 windowLow = windowSize >= current ? 0 : current - windowSize;
hashTable[h] = (U32)(ip-base); /* Update Hash Table */
while (nbCompares-- && (matchIndex > windowLow))
{
U32* nextPtr = bt + 2*(matchIndex & btMask);
const BYTE* match = base + matchIndex;
size_t matchLength = MIN(commonLengthSmaller, commonLengthLarger); /* guaranteed minimum nb of common bytes */
matchLength += ZSTD_count(ip+matchLength, match+matchLength, iend);
2015-11-04 03:05:27 -08:00
if (ip+matchLength == iend) /* equal : no way to know if inf or sup */
break; /* just drop , to guarantee consistency (miss a bit of compression; if someone knows better, please tell) */
2015-11-03 18:52:54 -08:00
if (match[matchLength] < ip[matchLength])
2015-11-04 03:05:27 -08:00
{
2015-11-03 18:52:54 -08:00
/* match is smaller than current */
*smallerPtr = matchIndex; /* update smaller idx */
commonLengthSmaller = matchLength; /* all smaller will now have at least this guaranteed common length */
2015-11-04 03:05:27 -08:00
if (matchIndex <= btLow) { smallerPtr=&dummy32; break; } /* beyond tree size, stop the search */
2015-11-03 18:52:54 -08:00
smallerPtr = nextPtr+1; /* new "smaller" => larger of match */
matchIndex = nextPtr[1]; /* new matchIndex larger than previous (closer to current) */
2015-11-04 03:05:27 -08:00
}
2015-11-03 18:52:54 -08:00
else
2015-11-04 03:05:27 -08:00
{
2015-11-03 18:52:54 -08:00
/* match is larger than current */
*largerPtr = matchIndex;
commonLengthLarger = matchLength;
2015-11-04 03:05:27 -08:00
if (matchIndex <= btLow) { largerPtr=&dummy32; break; } /* beyond tree size, stop the search */
2015-11-03 18:52:54 -08:00
largerPtr = nextPtr;
matchIndex = nextPtr[0];
2015-11-04 03:05:27 -08:00
}
2015-11-03 18:52:54 -08:00
}
2015-11-04 03:05:27 -08:00
*smallerPtr = *largerPtr = 0;
2015-11-03 18:52:54 -08:00
}
FORCE_INLINE /* inlining is important to hardwire a hot branch (template emulation) */
size_t ZSTD_HC_insertBtAndFindBestMatch (
ZSTD_HC_CCtx* zc,
const BYTE* const ip, const BYTE* const iend,
size_t* offsetPtr,
U32 nbCompares, const U32 mls)
{
U32* const hashTable = zc->hashTable;
const U32 hashLog = zc->params.hashLog;
const size_t h = ZSTD_HC_hashPtr(ip, hashLog, mls);
U32* const bt = zc->chainTable;
const U32 btLog = zc->params.chainLog - 1;
const U32 btMask= (1 << btLog) - 1;
U32 matchIndex = hashTable[h];
size_t commonLengthSmaller=0, commonLengthLarger=0;
const BYTE* const base = zc->base;
const U32 current = (U32)(ip-base);
const U32 btLow = btMask >= current ? 0 : current - btMask;
const U32 windowSize = 1 << zc->params.windowLog;
const U32 windowLow = windowSize >= current ? 0 : current - windowSize;
U32* smallerPtr = bt + 2*(current&btMask);
U32* largerPtr = bt + 2*(current&btMask) + 1;
2015-11-04 04:57:24 -08:00
size_t bestLength = 0;
2015-11-04 03:05:27 -08:00
U32 dummy32; /* to be nullified at the end */
2015-11-03 18:52:54 -08:00
2015-11-04 03:05:27 -08:00
hashTable[h] = (U32)(ip-base); /* Update Hash Table */
2015-11-03 18:52:54 -08:00
while (nbCompares-- && (matchIndex > windowLow))
{
U32* nextPtr = bt + 2*(matchIndex & btMask);
const BYTE* match = base + matchIndex;
size_t matchLength = MIN(commonLengthSmaller, commonLengthLarger); /* guaranteed minimum nb of common bytes */
matchLength += ZSTD_count(ip+matchLength, match+matchLength, iend);
if (matchLength > bestLength)
{
2015-11-04 08:41:20 -08:00
if ( (4*(int)(matchLength-bestLength)) > (int)(ZSTD_highbit(current-matchIndex+1) - ZSTD_highbit((U32)offsetPtr[0]+1)) )
2015-11-04 04:57:24 -08:00
bestLength = matchLength, *offsetPtr = current - matchIndex;
2015-11-04 03:05:27 -08:00
if (ip+matchLength == iend) /* equal : no way to know if inf or sup */
break; /* drop, next to null, to guarantee consistency (is there a way to do better ?) */
2015-11-03 18:52:54 -08:00
}
if (match[matchLength] < ip[matchLength])
2015-11-04 03:05:27 -08:00
{
2015-11-03 18:52:54 -08:00
/* match is smaller than current */
*smallerPtr = matchIndex; /* update smaller idx */
commonLengthSmaller = matchLength; /* all smaller will now have at least this guaranteed common length */
2015-11-04 03:05:27 -08:00
if (matchIndex <= btLow) { smallerPtr=&dummy32; break; } /* beyond tree size, stop the search */
smallerPtr = nextPtr+1; /* new "smaller" => larger of match */
2015-11-03 18:52:54 -08:00
matchIndex = nextPtr[1]; /* new matchIndex larger than previous (closer to current) */
2015-11-04 03:05:27 -08:00
}
2015-11-03 18:52:54 -08:00
else
2015-11-04 03:05:27 -08:00
{
2015-11-03 18:52:54 -08:00
/* match is larger than current */
*largerPtr = matchIndex;
commonLengthLarger = matchLength;
2015-11-04 03:05:27 -08:00
if (matchIndex <= btLow) { largerPtr=&dummy32; break; } /* beyond tree size, stop the search */
2015-11-03 18:52:54 -08:00
largerPtr = nextPtr;
matchIndex = nextPtr[0];
2015-11-04 03:05:27 -08:00
}
2015-11-03 18:52:54 -08:00
}
2015-11-04 03:05:27 -08:00
*smallerPtr = *largerPtr = 0;
2015-11-03 18:52:54 -08:00
zc->nextToUpdate = current+1; /* current has been inserted */
2015-11-04 03:05:27 -08:00
if (bestLength < MINMATCH) return 0;
2015-11-03 18:52:54 -08:00
return bestLength;
}
static void ZSTD_HC_updateTree(ZSTD_HC_CCtx* zc, const BYTE* const ip, const BYTE* const iend, const U32 nbCompares, const U32 mls)
{
const BYTE* const base = zc->base;
const U32 target = (U32)(ip - base);
U32 idx = zc->nextToUpdate;
2015-11-04 03:05:27 -08:00
//size_t dummy;
2015-11-03 18:52:54 -08:00
for( ; idx < target ; idx++)
ZSTD_HC_insertBt1(zc, base+idx, mls, iend, nbCompares);
//ZSTD_HC_insertBtAndFindBestMatch(zc, base+idx, iend, &dummy, nbCompares, mls);
zc->nextToUpdate = target;
}
/** Tree updater, providing best match */
FORCE_INLINE /* inlining is important to hardwire a hot branch (template emulation) */
size_t ZSTD_HC_BtFindBestMatch (
ZSTD_HC_CCtx* zc,
const BYTE* const ip, const BYTE* const iLimit,
size_t* offsetPtr,
const U32 maxNbAttempts, const U32 mls)
{
ZSTD_HC_updateTree(zc, ip, iLimit, maxNbAttempts, mls);
return ZSTD_HC_insertBtAndFindBestMatch(zc, ip, iLimit, offsetPtr, maxNbAttempts, mls);
}
FORCE_INLINE size_t ZSTD_HC_BtFindBestMatch_selectMLS (
ZSTD_HC_CCtx* zc, /* Index table will be updated */
const BYTE* ip, const BYTE* const iLimit,
size_t* offsetPtr,
const U32 maxNbAttempts, const U32 matchLengthSearch)
{
switch(matchLengthSearch)
{
default :
case 4 : return ZSTD_HC_BtFindBestMatch(zc, ip, iLimit, offsetPtr, maxNbAttempts, 4);
case 5 : return ZSTD_HC_BtFindBestMatch(zc, ip, iLimit, offsetPtr, maxNbAttempts, 5);
case 6 : return ZSTD_HC_BtFindBestMatch(zc, ip, iLimit, offsetPtr, maxNbAttempts, 6);
}
}
size_t ZSTD_HC_compressBlock_btLazy2(ZSTD_HC_CCtx* ctx, void* dst, size_t maxDstSize, const void* src, size_t srcSize)
{
seqStore_t* seqStorePtr = &(ctx->seqStore);
const BYTE* const istart = (const BYTE*)src;
const BYTE* ip = istart;
const BYTE* anchor = istart;
const BYTE* const iend = istart + srcSize;
const BYTE* const ilimit = iend - 8;
size_t offset_2=REPCODE_STARTVALUE, offset_1=REPCODE_STARTVALUE;
const U32 maxSearches = 1 << ctx->params.searchLog;
const U32 mls = ctx->params.searchLength;
/* init */
ZSTD_resetSeqStore(seqStorePtr);
if (((ip-ctx->base) - ctx->dictLimit) < REPCODE_STARTVALUE) ip += REPCODE_STARTVALUE;
/* Match Loop */
while (ip <= ilimit)
{
size_t matchLength;
size_t offset=999999;
const BYTE* start;
/* try to find a first match */
if (MEM_read32(ip) == MEM_read32(ip - offset_2))
{
/* repcode : we take it*/
size_t offtmp = offset_2;
size_t litLength = ip - anchor;
matchLength = ZSTD_count(ip+MINMATCH, ip+MINMATCH-offset_2, iend);
offset_2 = offset_1;
offset_1 = offtmp;
ZSTD_storeSeq(seqStorePtr, litLength, anchor, 0, matchLength);
ip += matchLength+MINMATCH;
anchor = ip;
continue;
}
offset_2 = offset_1;
matchLength = ZSTD_HC_BtFindBestMatch_selectMLS(ctx, ip, iend, &offset, maxSearches, mls);
if (!matchLength) { ip++; continue; }
/* let's try to find a better solution */
start = ip;
while (ip<ilimit)
{
ip ++;
if (MEM_read32(ip) == MEM_read32(ip - offset_1))
{
size_t ml2 = ZSTD_count(ip+MINMATCH, ip+MINMATCH-offset_1, iend) + MINMATCH;
int gain2 = (int)(ml2 * 3);
int gain1 = (int)(matchLength*3 - ZSTD_highbit((U32)offset+1) + 1);
if (gain2 > gain1)
matchLength = ml2, offset = 0, start = ip;
}
{
size_t offset2=999999;
size_t ml2 = ZSTD_HC_BtFindBestMatch_selectMLS(ctx, ip, iend, &offset2, maxSearches, mls);
int gain2 = (int)(ml2*4 - ZSTD_highbit((U32)offset2+1)); /* raw approx */
int gain1 = (int)(matchLength*4 - ZSTD_highbit((U32)offset+1) + 4);
if (gain2 > gain1)
{
matchLength = ml2, offset = offset2, start = ip;
continue; /* search a better one */
}
}
/* let's find an even better one */
if (ip<ilimit)
{
ip ++;
if (MEM_read32(ip) == MEM_read32(ip - offset_1))
{
size_t ml2 = ZSTD_count(ip+MINMATCH, ip+MINMATCH-offset_1, iend) + MINMATCH;
int gain2 = (int)(ml2 * 4);
int gain1 = (int)(matchLength*4 - ZSTD_highbit((U32)offset+1) + 1);
if (gain2 > gain1)
matchLength = ml2, offset = 0, start = ip;
}
{
size_t offset2=999999;
size_t ml2 = ZSTD_HC_BtFindBestMatch_selectMLS(ctx, ip, iend, &offset2, maxSearches, mls);
int gain2 = (int)(ml2*4 - ZSTD_highbit((U32)offset2+1)); /* raw approx */
int gain1 = (int)(matchLength*4 - ZSTD_highbit((U32)offset+1) + 7);
if (gain2 > gain1)
{
matchLength = ml2, offset = offset2, start = ip;
continue;
}
}
}
break; /* nothing found : store previous solution */
}
/* store sequence */
{
size_t litLength = start - anchor;
if (offset) offset_1 = offset;
ZSTD_storeSeq(seqStorePtr, litLength, anchor, offset, matchLength-MINMATCH);
ip = start + matchLength;
anchor = ip;
}
}
/* Last Literals */
{
size_t lastLLSize = iend - anchor;
memcpy(seqStorePtr->lit, anchor, lastLLSize);
seqStorePtr->lit += lastLLSize;
}
/* Final compression stage */
return ZSTD_compressSequences((BYTE*)dst, maxDstSize,
seqStorePtr, srcSize);
}
/* ***********************
* Hash Chain
*************************/
2015-10-22 07:31:46 -07:00
/* Update chains up to ip (excluded) */
2015-11-01 03:40:22 -08:00
static U32 ZSTD_HC_insertAndFindFirstIndex (ZSTD_HC_CCtx* zc, const BYTE* ip, U32 mls)
2015-10-22 07:31:46 -07:00
{
2015-10-25 06:06:35 -07:00
U32* const hashTable = zc->hashTable;
const U32 hashLog = zc->params.hashLog;
U32* const chainTable = zc->chainTable;
const U32 chainMask = (1 << zc->params.chainLog) - 1;
2015-10-22 07:31:46 -07:00
const BYTE* const base = zc->base;
const U32 target = (U32)(ip - base);
U32 idx = zc->nextToUpdate;
while(idx < target)
{
2015-10-30 07:49:48 -07:00
size_t h = ZSTD_HC_hashPtr(base+idx, hashLog, mls);
2015-11-03 00:49:30 -08:00
NEXT_IN_CHAIN(idx, chainMask) = hashTable[h];
2015-10-25 06:06:35 -07:00
hashTable[h] = idx;
2015-10-22 07:31:46 -07:00
idx++;
}
zc->nextToUpdate = target;
2015-11-04 03:05:27 -08:00
return hashTable[ZSTD_HC_hashPtr(ip, hashLog, mls)];
2015-10-22 07:31:46 -07:00
}
2015-10-30 07:49:48 -07:00
FORCE_INLINE /* inlining is important to hardwire a hot branch (template emulation) */
size_t ZSTD_HC_insertAndFindBestMatch (
2015-10-22 07:31:46 -07:00
ZSTD_HC_CCtx* zc, /* Index table will be updated */
2015-11-03 00:49:30 -08:00
const BYTE* const ip, const BYTE* const iLimit,
size_t* offsetPtr,
2015-10-30 07:49:48 -07:00
const U32 maxNbAttempts, const U32 matchLengthSearch)
2015-10-22 07:31:46 -07:00
{
2015-10-24 05:48:37 -07:00
U32* const chainTable = zc->chainTable;
2015-10-25 06:06:35 -07:00
const U32 chainSize = (1 << zc->params.chainLog);
const U32 chainMask = chainSize-1;
2015-10-22 07:31:46 -07:00
const BYTE* const base = zc->base;
const BYTE* const dictBase = zc->dictBase;
const U32 dictLimit = zc->dictLimit;
2015-10-25 06:06:35 -07:00
const U32 maxDistance = (1 << zc->params.windowLog);
const U32 lowLimit = (zc->lowLimit + maxDistance > (U32)(ip-base)) ? zc->lowLimit : (U32)(ip - base) - (maxDistance - 1);
2015-10-22 07:31:46 -07:00
U32 matchIndex;
const BYTE* match;
int nbAttempts=maxNbAttempts;
size_t ml=0;
/* HC4 match finder */
2015-11-01 03:40:22 -08:00
matchIndex = ZSTD_HC_insertAndFindFirstIndex (zc, ip, matchLengthSearch);
2015-10-22 07:31:46 -07:00
2015-11-03 00:49:30 -08:00
while ((matchIndex>lowLimit) && (nbAttempts))
2015-10-22 07:31:46 -07:00
{
nbAttempts--;
if (matchIndex >= dictLimit)
{
match = base + matchIndex;
2015-11-03 00:49:30 -08:00
if ( (match[ml] == ip[ml])
&& (MEM_read32(match) == MEM_read32(ip)) ) /* ensures minimum match of 4 */
2015-10-22 07:31:46 -07:00
{
const size_t mlt = ZSTD_count(ip+MINMATCH, match+MINMATCH, iLimit) + MINMATCH;
2015-11-03 00:49:30 -08:00
if (mlt > ml)
//if (((int)(4*mlt) - (int)ZSTD_highbit((U32)(ip-match)+1)) > ((int)(4*ml) - (int)ZSTD_highbit((U32)((*offsetPtr)+1))))
{
ml = mlt; *offsetPtr = ip-match;
if (ip+ml >= iLimit) break;
}
2015-10-22 07:31:46 -07:00
}
}
else
{
match = dictBase + matchIndex;
if (MEM_read32(match) == MEM_read32(ip))
{
size_t mlt;
const BYTE* vLimit = ip + (dictLimit - matchIndex);
if (vLimit > iLimit) vLimit = iLimit;
mlt = ZSTD_count(ip+MINMATCH, match+MINMATCH, vLimit) + MINMATCH;
if ((ip+mlt == vLimit) && (vLimit < iLimit))
mlt += ZSTD_count(ip+mlt, base+dictLimit, iLimit);
2015-11-03 00:49:30 -08:00
if (mlt > ml) { ml = mlt; *offsetPtr = (ip-base) - matchIndex; }
2015-10-22 07:31:46 -07:00
}
}
2015-10-24 05:48:37 -07:00
2015-10-25 06:06:35 -07:00
if (base + matchIndex <= ip - chainSize) break;
2015-11-03 00:49:30 -08:00
matchIndex = NEXT_IN_CHAIN(matchIndex, chainMask);
2015-10-22 07:31:46 -07:00
}
return ml;
}
2015-11-01 03:40:22 -08:00
FORCE_INLINE size_t ZSTD_HC_insertAndFindBestMatch_selectMLS (
2015-10-30 07:49:48 -07:00
ZSTD_HC_CCtx* zc, /* Index table will be updated */
const BYTE* ip, const BYTE* const iLimit,
2015-11-03 00:49:30 -08:00
size_t* offsetPtr,
2015-10-30 07:49:48 -07:00
const U32 maxNbAttempts, const U32 matchLengthSearch)
{
switch(matchLengthSearch)
{
default :
2015-11-03 00:49:30 -08:00
case 4 : return ZSTD_HC_insertAndFindBestMatch(zc, ip, iLimit, offsetPtr, maxNbAttempts, 4);
case 5 : return ZSTD_HC_insertAndFindBestMatch(zc, ip, iLimit, offsetPtr, maxNbAttempts, 5);
case 6 : return ZSTD_HC_insertAndFindBestMatch(zc, ip, iLimit, offsetPtr, maxNbAttempts, 6);
2015-10-30 07:49:48 -07:00
}
}
2015-11-02 07:14:46 -08:00
size_t ZSTD_HC_compressBlock_lazydeep(ZSTD_HC_CCtx* ctx, void* dst, size_t maxDstSize, const void* src, size_t srcSize)
{
seqStore_t* seqStorePtr = &(ctx->seqStore);
const BYTE* const istart = (const BYTE*)src;
const BYTE* ip = istart;
const BYTE* anchor = istart;
const BYTE* const iend = istart + srcSize;
const BYTE* const ilimit = iend - 8;
size_t offset_2=REPCODE_STARTVALUE, offset_1=REPCODE_STARTVALUE;
const U32 maxSearches = 1 << ctx->params.searchLog;
const U32 mls = ctx->params.searchLength;
/* init */
ZSTD_resetSeqStore(seqStorePtr);
if (((ip-ctx->base) - ctx->dictLimit) < REPCODE_STARTVALUE) ip += REPCODE_STARTVALUE;
/* Match Loop */
while (ip <= ilimit)
{
size_t matchLength;
2015-11-03 00:49:30 -08:00
size_t offset=999999;
2015-11-02 07:14:46 -08:00
const BYTE* start;
/* try to find a first match */
if (MEM_read32(ip) == MEM_read32(ip - offset_2))
{
/* repcode : we take it*/
size_t offtmp = offset_2;
size_t litLength = ip - anchor;
matchLength = ZSTD_count(ip+MINMATCH, ip+MINMATCH-offset_2, iend);
offset_2 = offset_1;
offset_1 = offtmp;
ZSTD_storeSeq(seqStorePtr, litLength, anchor, 0, matchLength);
ip += matchLength+MINMATCH;
anchor = ip;
continue;
}
offset_2 = offset_1;
2015-11-03 00:49:30 -08:00
matchLength = ZSTD_HC_insertAndFindBestMatch_selectMLS(ctx, ip, iend, &offset, maxSearches, mls);
2015-11-02 07:14:46 -08:00
if (!matchLength) { ip++; continue; }
/* let's try to find a better solution */
start = ip;
while (ip<ilimit)
{
ip ++;
if (MEM_read32(ip) == MEM_read32(ip - offset_1))
{
size_t ml2 = ZSTD_count(ip+MINMATCH, ip+MINMATCH-offset_1, iend) + MINMATCH;
int gain2 = (int)(ml2 * 3);
int gain1 = (int)(matchLength*3 - ZSTD_highbit((U32)offset+1) + 1);
if (gain2 > gain1)
matchLength = ml2, offset = 0, start = ip;
}
{
2015-11-03 00:49:30 -08:00
size_t offset2=999999;
size_t ml2 = ZSTD_HC_insertAndFindBestMatch_selectMLS(ctx, ip, iend, &offset2, maxSearches, mls);
2015-11-02 07:14:46 -08:00
int gain2 = (int)(ml2*4 - ZSTD_highbit((U32)offset2+1)); /* raw approx */
int gain1 = (int)(matchLength*4 - ZSTD_highbit((U32)offset+1) + 4);
if (gain2 > gain1)
{
matchLength = ml2, offset = offset2, start = ip;
continue; /* search a better one */
}
}
2015-11-03 00:49:30 -08:00
/* let's find an even better one */
2015-11-02 07:14:46 -08:00
if (ip<ilimit)
{
ip ++;
if (MEM_read32(ip) == MEM_read32(ip - offset_1))
{
size_t ml2 = ZSTD_count(ip+MINMATCH, ip+MINMATCH-offset_1, iend) + MINMATCH;
int gain2 = (int)(ml2 * 4);
2015-11-03 00:49:30 -08:00
int gain1 = (int)(matchLength*4 - ZSTD_highbit((U32)offset+1) + 1);
2015-11-02 07:14:46 -08:00
if (gain2 > gain1)
matchLength = ml2, offset = 0, start = ip;
}
{
2015-11-03 00:49:30 -08:00
size_t offset2=999999;
size_t ml2 = ZSTD_HC_insertAndFindBestMatch_selectMLS(ctx, ip, iend, &offset2, maxSearches, mls);
2015-11-02 07:14:46 -08:00
int gain2 = (int)(ml2*4 - ZSTD_highbit((U32)offset2+1)); /* raw approx */
2015-11-03 00:49:30 -08:00
int gain1 = (int)(matchLength*4 - ZSTD_highbit((U32)offset+1) + 7);
2015-11-02 07:14:46 -08:00
if (gain2 > gain1)
{
matchLength = ml2, offset = offset2, start = ip;
continue;
}
}
}
2015-11-03 00:49:30 -08:00
break; /* nothing found : store previous solution */
2015-11-02 07:14:46 -08:00
}
/* store sequence */
{
size_t litLength = start - anchor;
if (offset) offset_1 = offset;
ZSTD_storeSeq(seqStorePtr, litLength, anchor, offset, matchLength-MINMATCH);
ip = start + matchLength;
anchor = ip;
}
}
/* Last Literals */
{
size_t lastLLSize = iend - anchor;
memcpy(seqStorePtr->lit, anchor, lastLLSize);
seqStorePtr->lit += lastLLSize;
}
/* Final compression stage */
return ZSTD_compressSequences((BYTE*)dst, maxDstSize,
seqStorePtr, srcSize);
}
2015-11-01 03:40:22 -08:00
size_t ZSTD_HC_compressBlock_lazy(ZSTD_HC_CCtx* ctx, void* dst, size_t maxDstSize, const void* src, size_t srcSize)
2015-10-22 07:31:46 -07:00
{
seqStore_t* seqStorePtr = &(ctx->seqStore);
const BYTE* const istart = (const BYTE*)src;
2015-10-23 11:25:06 -07:00
const BYTE* ip = istart;
2015-10-22 07:31:46 -07:00
const BYTE* anchor = istart;
const BYTE* const iend = istart + srcSize;
const BYTE* const ilimit = iend - 8;
2015-10-23 11:25:06 -07:00
size_t offset_2=REPCODE_STARTVALUE, offset_1=REPCODE_STARTVALUE;
2015-10-25 06:06:35 -07:00
const U32 maxSearches = 1 << ctx->params.searchLog;
2015-10-30 07:49:48 -07:00
const U32 mls = ctx->params.searchLength;
2015-10-22 07:31:46 -07:00
/* init */
ZSTD_resetSeqStore(seqStorePtr);
2015-10-23 11:25:06 -07:00
if (((ip-ctx->base) - ctx->dictLimit) < REPCODE_STARTVALUE) ip += REPCODE_STARTVALUE;
2015-10-22 07:31:46 -07:00
2015-10-23 11:25:06 -07:00
/* Match Loop */
2015-11-01 03:40:22 -08:00
while (ip <= ilimit)
2015-10-22 07:31:46 -07:00
{
2015-11-01 03:40:22 -08:00
size_t matchLength;
2015-11-03 00:49:30 -08:00
size_t offset=0;
2015-11-01 03:40:22 -08:00
const BYTE* start;
/* try to find a first match */
2015-10-23 11:25:06 -07:00
if (MEM_read32(ip) == MEM_read32(ip - offset_2))
2015-10-22 07:31:46 -07:00
{
2015-11-01 03:40:22 -08:00
/* repcode : we take it*/
size_t offtmp = offset_2;
size_t litLength = ip - anchor;
matchLength = ZSTD_count(ip+MINMATCH, ip+MINMATCH-offset_2, iend);
2015-10-22 07:31:46 -07:00
offset_2 = offset_1;
2015-11-01 03:40:22 -08:00
offset_1 = offtmp;
2015-10-23 11:25:06 -07:00
ZSTD_storeSeq(seqStorePtr, litLength, anchor, 0, matchLength);
ip += matchLength+MINMATCH;
2015-10-22 07:31:46 -07:00
anchor = ip;
2015-10-23 11:25:06 -07:00
continue;
}
2015-11-01 03:40:22 -08:00
offset_2 = offset_1;
2015-11-03 00:49:30 -08:00
matchLength = ZSTD_HC_insertAndFindBestMatch_selectMLS(ctx, ip, iend, &offset, maxSearches, mls);
2015-11-01 03:40:22 -08:00
if (!matchLength) { ip++; continue; }
2015-10-29 22:40:22 -07:00
2015-11-01 03:40:22 -08:00
/* let's try to find a better solution */
start = ip;
2015-10-26 09:44:04 -07:00
2015-11-01 03:40:22 -08:00
while (ip<ilimit)
2015-10-23 11:25:06 -07:00
{
2015-11-01 03:40:22 -08:00
ip ++;
if (MEM_read32(ip) == MEM_read32(ip - offset_1))
2015-10-23 11:25:06 -07:00
{
2015-11-01 03:40:22 -08:00
size_t ml2 = ZSTD_count(ip+MINMATCH, ip+MINMATCH-offset_1, iend) + MINMATCH;
2015-11-02 07:14:46 -08:00
int gain2 = (int)(ml2 * 3);
int gain1 = (int)(matchLength*3 - ZSTD_highbit((U32)offset+1) + 1);
2015-11-01 03:40:22 -08:00
if (gain2 > gain1)
{
matchLength = ml2, offset = 0, start = ip;
2015-11-02 07:14:46 -08:00
2015-11-01 03:40:22 -08:00
}
2015-10-23 11:25:06 -07:00
}
2015-11-01 03:40:22 -08:00
{
2015-11-03 00:49:30 -08:00
size_t offset2=999999;
size_t ml2 = ZSTD_HC_insertAndFindBestMatch_selectMLS(ctx, ip, iend, &offset2, maxSearches, mls);
2015-11-02 07:14:46 -08:00
int gain2 = (int)(ml2*3 - ZSTD_highbit((U32)offset2+1)); /* raw approx */
int gain1 = (int)(matchLength*3 - ZSTD_highbit((U32)offset+1) + 3);
2015-11-01 03:40:22 -08:00
if (gain2 > gain1)
{
matchLength = ml2, offset = offset2, start = ip;
continue; /* search a better one */
}
}
break; /* nothing found : store previous one */
2015-10-22 07:31:46 -07:00
}
2015-11-01 03:40:22 -08:00
/* store sequence */
{
size_t litLength = start - anchor;
if (offset) offset_1 = offset;
ZSTD_storeSeq(seqStorePtr, litLength, anchor, offset, matchLength-MINMATCH);
ip = start + matchLength;
anchor = ip;
}
2015-10-22 07:31:46 -07:00
}
/* Last Literals */
{
size_t lastLLSize = iend - anchor;
memcpy(seqStorePtr->lit, anchor, lastLLSize);
seqStorePtr->lit += lastLLSize;
}
2015-10-27 04:18:00 -07:00
/* Final compression stage */
2015-10-22 07:31:46 -07:00
return ZSTD_compressSequences((BYTE*)dst, maxDstSize,
seqStorePtr, srcSize);
}
2015-11-01 03:40:22 -08:00
size_t ZSTD_HC_compressBlock_greedy(ZSTD_HC_CCtx* ctx, void* dst, size_t maxDstSize, const void* src, size_t srcSize)
2015-10-31 04:57:14 -07:00
{
seqStore_t* seqStorePtr = &(ctx->seqStore);
const BYTE* const istart = (const BYTE*)src;
const BYTE* ip = istart;
const BYTE* anchor = istart;
const BYTE* const iend = istart + srcSize;
const BYTE* const ilimit = iend - 8;
size_t offset_2=REPCODE_STARTVALUE, offset_1=REPCODE_STARTVALUE;
const U32 maxSearches = 1 << ctx->params.searchLog;
const U32 mls = ctx->params.searchLength;
/* init */
ZSTD_resetSeqStore(seqStorePtr);
if (((ip-ctx->base) - ctx->dictLimit) < REPCODE_STARTVALUE) ip += REPCODE_STARTVALUE;
/* Match Loop */
2015-11-01 03:40:22 -08:00
while (ip < ilimit)
2015-10-31 04:57:14 -07:00
{
2015-11-01 03:40:22 -08:00
/* repcode */
2015-10-31 04:57:14 -07:00
if (MEM_read32(ip) == MEM_read32(ip - offset_2))
{
2015-11-01 03:40:22 -08:00
/* store sequence */
size_t matchLength = ZSTD_count(ip+MINMATCH, ip+MINMATCH-offset_2, iend);
size_t litLength = ip-anchor;
size_t offset = offset_2;
2015-10-31 04:57:14 -07:00
offset_2 = offset_1;
2015-11-01 03:40:22 -08:00
offset_1 = offset;
2015-10-31 04:57:14 -07:00
ZSTD_storeSeq(seqStorePtr, litLength, anchor, 0, matchLength);
ip += matchLength+MINMATCH;
anchor = ip;
continue;
}
2015-11-01 03:40:22 -08:00
offset_2 = offset_1; /* failed once : necessarily offset_1 now */
2015-10-31 04:57:14 -07:00
2015-11-01 03:40:22 -08:00
/* repcode at ip+1 */
if (MEM_read32(ip+1) == MEM_read32(ip+1 - offset_1))
2015-10-31 04:57:14 -07:00
{
2015-11-01 03:40:22 -08:00
size_t matchLength = ZSTD_count(ip+1+MINMATCH, ip+1+MINMATCH-offset_1, iend);
size_t litLength = ip+1-anchor;
ZSTD_storeSeq(seqStorePtr, litLength, anchor, 0, matchLength);
ip += 1+matchLength+MINMATCH;
anchor = ip;
continue;
2015-10-31 04:57:14 -07:00
}
2015-11-01 03:40:22 -08:00
/* search */
2015-10-31 04:57:14 -07:00
{
2015-11-03 00:49:30 -08:00
size_t offset=999999;
size_t matchLength = ZSTD_HC_insertAndFindBestMatch_selectMLS(ctx, ip, iend, &offset, maxSearches, mls);
2015-11-01 03:40:22 -08:00
if (!matchLength) { ip++; continue; }
/* store sequence */
{
size_t litLength = ip-anchor;
2015-11-03 00:49:30 -08:00
offset_1 = offset;
2015-11-01 03:40:22 -08:00
ZSTD_storeSeq(seqStorePtr, litLength, anchor, offset_1, matchLength-MINMATCH);
ip += matchLength;
anchor = ip;
}
2015-10-31 04:57:14 -07:00
}
}
/* Last Literals */
{
size_t lastLLSize = iend - anchor;
memcpy(seqStorePtr->lit, anchor, lastLLSize);
seqStorePtr->lit += lastLLSize;
}
/* Final compression stage */
return ZSTD_compressSequences((BYTE*)dst, maxDstSize,
2015-11-01 03:40:22 -08:00
2015-10-31 04:57:14 -07:00
seqStorePtr, srcSize);
}
2015-11-04 03:05:27 -08:00
typedef size_t (*ZSTD_HC_blockCompressor) (ZSTD_HC_CCtx* ctx, void* dst, size_t maxDstSize, const void* src, size_t srcSize);
static ZSTD_HC_blockCompressor ZSTD_HC_selectBlockCompressor(ZSTD_HC_strategy strat)
2015-10-31 04:57:14 -07:00
{
2015-11-04 03:05:27 -08:00
switch(strat)
2015-11-03 00:49:30 -08:00
{
2015-11-04 03:05:27 -08:00
default :
2015-11-03 00:49:30 -08:00
case ZSTD_HC_greedy:
2015-11-04 03:05:27 -08:00
return ZSTD_HC_compressBlock_greedy;
2015-11-03 00:49:30 -08:00
case ZSTD_HC_lazy:
2015-11-04 03:05:27 -08:00
return ZSTD_HC_compressBlock_lazy;
2015-11-03 00:49:30 -08:00
case ZSTD_HC_lazydeep:
2015-11-04 03:05:27 -08:00
return ZSTD_HC_compressBlock_lazydeep;
2015-11-03 18:52:54 -08:00
case ZSTD_HC_btlazy2:
2015-11-04 03:05:27 -08:00
return ZSTD_HC_compressBlock_btLazy2;
2015-11-03 00:49:30 -08:00
}
2015-10-31 04:57:14 -07:00
}
2015-11-04 03:05:27 -08:00
size_t ZSTD_HC_compressBlock(ZSTD_HC_CCtx* ctx, void* dst, size_t maxDstSize, const void* src, size_t srcSize)
{
ZSTD_HC_blockCompressor blockCompressor = ZSTD_HC_selectBlockCompressor(ctx->params.strategy);
return blockCompressor(ctx, dst, maxDstSize, src, srcSize);
}
2015-10-22 07:31:46 -07:00
static size_t ZSTD_HC_compress_generic (ZSTD_HC_CCtx* ctxPtr,
void* dst, size_t maxDstSize,
const void* src, size_t srcSize)
{
2015-11-04 09:19:39 -08:00
size_t blockSize = BLOCKSIZE;
2015-10-22 07:31:46 -07:00
size_t remaining = srcSize;
const BYTE* ip = (const BYTE*)src;
BYTE* const ostart = (BYTE*)dst;
BYTE* op = ostart;
2015-11-04 03:05:27 -08:00
const ZSTD_HC_blockCompressor blockCompressor = ZSTD_HC_selectBlockCompressor(ctxPtr->params.strategy);
2015-11-01 03:40:22 -08:00
2015-11-04 09:19:39 -08:00
while (remaining)
2015-10-22 07:31:46 -07:00
{
2015-11-04 09:19:39 -08:00
size_t cSize;
2015-10-22 07:31:46 -07:00
2015-11-04 14:36:36 -08:00
if (maxDstSize < 3 + MIN_CBLOCK_SIZE) return ERROR(dstSize_tooSmall); /* not enough space to store compressed block */
2015-10-22 07:31:46 -07:00
2015-11-04 09:19:39 -08:00
if (remaining < blockSize) blockSize = remaining;
cSize = blockCompressor(ctxPtr, op+3, maxDstSize-3, ip, blockSize);
2015-10-22 07:31:46 -07:00
if (ZSTD_isError(cSize)) return cSize;
if (cSize == 0)
{
2015-11-04 09:19:39 -08:00
cSize = ZSTD_noCompressBlock(op, maxDstSize, ip, blockSize); /* block is not compressible */
2015-10-22 07:31:46 -07:00
}
else
{
op[0] = (BYTE)(cSize>>16);
op[1] = (BYTE)(cSize>>8);
op[2] = (BYTE)cSize;
op[0] += (BYTE)(bt_compressed << 6); /* is a compressed block */
cSize += 3;
}
2015-11-04 09:19:39 -08:00
remaining -= blockSize;
maxDstSize -= cSize;
ip += blockSize;
2015-10-22 07:31:46 -07:00
op += cSize;
}
return op-ostart;
}
size_t ZSTD_HC_compressContinue (ZSTD_HC_CCtx* ctxPtr,
void* dst, size_t dstSize,
const void* src, size_t srcSize)
2015-10-22 07:31:46 -07:00
{
const BYTE* const ip = (const BYTE*) src;
2015-10-22 07:31:46 -07:00
/* Check if blocks follow each other */
if (ip != ctxPtr->end)
2015-10-22 07:31:46 -07:00
{
if (ctxPtr->end != NULL)
2015-10-29 22:40:22 -07:00
ZSTD_HC_resetCCtx_advanced(ctxPtr, ctxPtr->params); /* just reset, but no need to re-alloc */
ctxPtr->base = ip;
2015-10-22 07:31:46 -07:00
}
ctxPtr->end = ip + srcSize;
2015-10-22 07:31:46 -07:00
return ZSTD_HC_compress_generic (ctxPtr, dst, dstSize, src, srcSize);
}
2015-10-25 06:06:35 -07:00
size_t ZSTD_HC_compressBegin_advanced(ZSTD_HC_CCtx* ctx,
void* dst, size_t maxDstSize,
const ZSTD_HC_parameters params)
2015-10-22 07:31:46 -07:00
{
2015-10-29 22:40:22 -07:00
size_t errorCode;
2015-10-22 07:31:46 -07:00
if (maxDstSize < 4) return ERROR(dstSize_tooSmall);
2015-10-29 22:40:22 -07:00
errorCode = ZSTD_HC_resetCCtx_advanced(ctx, params);
if (ZSTD_isError(errorCode)) return errorCode;
MEM_writeLE32(dst, ZSTD_magicNumber); /* Write Header */
2015-10-22 07:31:46 -07:00
return 4;
}
2015-10-25 06:06:35 -07:00
size_t ZSTD_HC_compressBegin(ZSTD_HC_CCtx* ctx, void* dst, size_t maxDstSize, int compressionLevel)
2015-10-25 06:06:35 -07:00
{
if (compressionLevel<=0) compressionLevel = 1;
2015-10-26 07:45:58 -07:00
if (compressionLevel > ZSTD_HC_MAX_CLEVEL) compressionLevel = ZSTD_HC_MAX_CLEVEL;
return ZSTD_HC_compressBegin_advanced(ctx, dst, maxDstSize, ZSTD_HC_defaultParameters[compressionLevel]);
2015-10-25 06:06:35 -07:00
}
size_t ZSTD_HC_compressEnd(ZSTD_HC_CCtx* ctx, void* dst, size_t maxDstSize)
{
BYTE* op = (BYTE*)dst;
/* Sanity check */
(void)ctx;
if (maxDstSize < 3) return ERROR(dstSize_tooSmall);
/* End of frame */
op[0] = (BYTE)(bt_end << 6);
op[1] = 0;
op[2] = 0;
return 3;
}
2015-10-25 06:06:35 -07:00
size_t ZSTD_HC_compress_advanced (ZSTD_HC_CCtx* ctx,
void* dst, size_t maxDstSize,
const void* src, size_t srcSize,
ZSTD_HC_parameters params)
2015-10-22 07:31:46 -07:00
{
BYTE* const ostart = (BYTE*)dst;
BYTE* op = ostart;
2015-10-29 22:40:22 -07:00
size_t oSize;
2015-10-22 07:31:46 -07:00
/* correct params, to use less memory */
U32 srcLog = ZSTD_highbit((U32)srcSize-1) + 1;
if (params.windowLog > srcLog) params.windowLog = srcLog;
if (params.chainLog > srcLog) params.chainLog = srcLog;
2015-10-22 07:31:46 -07:00
/* Header */
2015-10-29 22:40:22 -07:00
oSize = ZSTD_HC_compressBegin_advanced(ctx, dst, maxDstSize, params);
2015-10-22 07:31:46 -07:00
if(ZSTD_isError(oSize)) return oSize;
op += oSize;
maxDstSize -= oSize;
/* body (compression) */
2015-10-29 09:15:14 -07:00
ctx->base = (const BYTE*)src;
2015-11-04 09:19:39 -08:00
oSize = ZSTD_HC_compress_generic (ctx, op, maxDstSize, src, srcSize);
2015-10-22 07:31:46 -07:00
if(ZSTD_isError(oSize)) return oSize;
op += oSize;
maxDstSize -= oSize;
/* Close frame */
oSize = ZSTD_HC_compressEnd(ctx, op, maxDstSize);
2015-10-22 07:31:46 -07:00
if(ZSTD_isError(oSize)) return oSize;
op += oSize;
return (op - ostart);
}
size_t ZSTD_HC_compressCCtx (ZSTD_HC_CCtx* ctx, void* dst, size_t maxDstSize, const void* src, size_t srcSize, int compressionLevel)
2015-10-25 06:06:35 -07:00
{
if (compressionLevel<=1) return ZSTD_compress(dst, maxDstSize, src, srcSize); /* fast mode */
2015-10-26 07:45:58 -07:00
if (compressionLevel > ZSTD_HC_MAX_CLEVEL) compressionLevel = ZSTD_HC_MAX_CLEVEL;
2015-10-25 06:06:35 -07:00
return ZSTD_HC_compress_advanced(ctx, dst, maxDstSize, src, srcSize, ZSTD_HC_defaultParameters[compressionLevel]);
}
size_t ZSTD_HC_compress(void* dst, size_t maxDstSize, const void* src, size_t srcSize, int compressionLevel)
2015-10-22 07:31:46 -07:00
{
2015-10-29 14:02:40 -07:00
size_t result;
2015-10-29 10:41:45 -07:00
ZSTD_HC_CCtx ctxBody;
memset(&ctxBody, 0, sizeof(ctxBody));
2015-10-29 14:02:40 -07:00
result = ZSTD_HC_compressCCtx(&ctxBody, dst, maxDstSize, src, srcSize, compressionLevel);
free(ctxBody.workSpace);
return result;
2015-10-22 07:31:46 -07:00
}