6 #elif defined(_MSC_VER) 10 #ifdef NEED_COMPAT_LZ4 54 # define LZ4_HEAPMODE 0 61 #define ACCELERATION_DEFAULT 1 80 #ifndef LZ4_FORCE_MEMORY_ACCESS 81 # if defined(__GNUC__) && \ 82 ( defined(__ARM_ARCH_6__) || defined(__ARM_ARCH_6J__) || defined(__ARM_ARCH_6K__) \ 83 || defined(__ARM_ARCH_6Z__) || defined(__ARM_ARCH_6ZK__) || defined(__ARM_ARCH_6T2__) ) 84 # define LZ4_FORCE_MEMORY_ACCESS 2 85 # elif (defined(__INTEL_COMPILER) && !defined(_WIN32)) || defined(__GNUC__) 86 # define LZ4_FORCE_MEMORY_ACCESS 1 94 #if defined(_MSC_VER) && defined(_WIN32_WCE) 95 # define LZ4_FORCE_SW_BITCOUNT 107 #ifndef LZ4_SRC_INCLUDED 108 # define LZ4_SRC_INCLUDED 1 111 #ifndef LZ4_STATIC_LINKING_ONLY 112 #define LZ4_STATIC_LINKING_ONLY 115 #ifndef LZ4_DISABLE_DEPRECATE_WARNINGS 116 #define LZ4_DISABLE_DEPRECATE_WARNINGS 119 #define LZ4_STATIC_LINKING_ONLY 129 # pragma warning(disable : 4127) 130 # pragma warning(disable : 4293) 133 #ifndef LZ4_FORCE_INLINE 135 # define LZ4_FORCE_INLINE static __forceinline 137 # if defined (__cplusplus) || defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L 139 # define LZ4_FORCE_INLINE static inline __attribute__((always_inline)) 141 # define LZ4_FORCE_INLINE static inline 144 # define LZ4_FORCE_INLINE static 163 #if defined(__PPC64__) && defined(__LITTLE_ENDIAN__) && defined(__GNUC__) && !defined(__clang__) 164 # define LZ4_FORCE_O2_GCC_PPC64LE __attribute__((optimize("O2"))) 165 # define LZ4_FORCE_O2_INLINE_GCC_PPC64LE __attribute__((optimize("O2"))) LZ4_FORCE_INLINE 167 # define LZ4_FORCE_O2_GCC_PPC64LE 168 # define LZ4_FORCE_O2_INLINE_GCC_PPC64LE static 171 #if (defined(__GNUC__) && (__GNUC__ >= 3)) || (defined(__INTEL_COMPILER) && (__INTEL_COMPILER >= 800)) || defined(__clang__) 172 # define expect(expr,value) (__builtin_expect ((expr),(value)) ) 174 # define expect(expr,value) (expr) 178 #define likely(expr) expect((expr) != 0, 1) 181 #define unlikely(expr) expect((expr) != 0, 0) 189 #define ALLOC(s) malloc(s) 190 #define ALLOC_AND_ZERO(s) calloc(1,s) 191 #define FREEMEM(p) free(p) 193 #define MEM_INIT(p,v,s) memset((p),(v),(s)) 201 #define WILDCOPYLENGTH 8 202 #define LASTLITERALS 5 204 #define MATCH_SAFEGUARD_DISTANCE ((2*WILDCOPYLENGTH) - MINMATCH) 205 #define FASTLOOP_SAFE_DISTANCE 64 206 static const int LZ4_minLength = (MFLIMIT+1);
212 #define LZ4_DISTANCE_ABSOLUTE_MAX 65535 213 #if (LZ4_DISTANCE_MAX > LZ4_DISTANCE_ABSOLUTE_MAX) 214 # error "LZ4_DISTANCE_MAX is too big : must be <= 65535" 218 #define ML_MASK ((1U<<ML_BITS)-1) 219 #define RUN_BITS (8-ML_BITS) 220 #define RUN_MASK ((1U<<RUN_BITS)-1) 226 #if defined(LZ4_DEBUG) && (LZ4_DEBUG>=1) 230 # define assert(condition) ((void)0) 234 #define LZ4_STATIC_ASSERT(c) { enum { LZ4_static_assert = 1/(int)(!!(c)) }; } 236 #if defined(LZ4_DEBUG) && (LZ4_DEBUG>=2) 238 static int g_debuglog_enable = 1;
239 # define DEBUGLOG(l, ...) { \ 240 if ((g_debuglog_enable) && (l<=LZ4_DEBUG)) { \ 241 fprintf(stderr, __FILE__ ": "); \ 242 fprintf(stderr, __VA_ARGS__); \ 243 fprintf(stderr, " \n"); \ 246 # define DEBUGLOG(l, ...) {} 253 #if defined(__cplusplus) || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) ) 262 typedef unsigned char BYTE;
263 typedef unsigned short U16;
264 typedef unsigned int U32;
265 typedef signed int S32;
266 typedef unsigned long long U64;
267 typedef size_t uptrval;
270 #if defined(__x86_64__) 273 typedef size_t reg_t;
280 } limitedOutput_directive;
286 static unsigned LZ4_isLittleEndian(
void)
288 const union { U32 u; BYTE c[4]; } one = { 1 };
293 #if defined(LZ4_FORCE_MEMORY_ACCESS) && (LZ4_FORCE_MEMORY_ACCESS==2) 296 static U16 LZ4_read16(
const void* memPtr) {
return *(
const U16*) memPtr; }
297 static U32 LZ4_read32(
const void* memPtr) {
return *(
const U32*) memPtr; }
298 static reg_t LZ4_read_ARCH(
const void* memPtr) {
return *(
const reg_t*) memPtr; }
300 static void LZ4_write16(
void* memPtr, U16 value) { *(U16*)memPtr = value; }
301 static void LZ4_write32(
void* memPtr, U32 value) { *(U32*)memPtr = value; }
303 #elif defined(LZ4_FORCE_MEMORY_ACCESS) && (LZ4_FORCE_MEMORY_ACCESS==1) 307 typedef union { U16 u16; U32 u32; reg_t uArch; } __attribute__((packed)) unalign;
309 static U16 LZ4_read16(
const void* ptr) {
return ((
const unalign*)ptr)->u16; }
310 static U32 LZ4_read32(
const void* ptr) {
return ((
const unalign*)ptr)->u32; }
311 static reg_t LZ4_read_ARCH(
const void* ptr) {
return ((
const unalign*)ptr)->uArch; }
313 static void LZ4_write16(
void* memPtr, U16 value) { ((unalign*)memPtr)->u16 = value; }
314 static void LZ4_write32(
void* memPtr, U32 value) { ((unalign*)memPtr)->u32 = value; }
318 static U16 LZ4_read16(
const void* memPtr)
320 U16 val; memcpy(&val, memPtr,
sizeof(val));
return val;
323 static U32 LZ4_read32(
const void* memPtr)
325 U32 val; memcpy(&val, memPtr,
sizeof(val));
return val;
328 static reg_t LZ4_read_ARCH(
const void* memPtr)
330 reg_t val; memcpy(&val, memPtr,
sizeof(val));
return val;
333 static void LZ4_write16(
void* memPtr, U16 value)
335 memcpy(memPtr, &value,
sizeof(value));
338 static void LZ4_write32(
void* memPtr, U32 value)
340 memcpy(memPtr, &value,
sizeof(value));
346 static U16 LZ4_readLE16(
const void* memPtr)
348 if (LZ4_isLittleEndian()) {
349 return LZ4_read16(memPtr);
351 const BYTE* p = (
const BYTE*)memPtr;
352 return (U16)((U16)p[0] + (p[1]<<8));
356 static void LZ4_writeLE16(
void* memPtr, U16 value)
358 if (LZ4_isLittleEndian()) {
359 LZ4_write16(memPtr, value);
361 BYTE* p = (BYTE*)memPtr;
363 p[1] = (BYTE)(value>>8);
368 LZ4_FORCE_O2_INLINE_GCC_PPC64LE
369 void LZ4_wildCopy8(
void* dstPtr,
const void* srcPtr,
void* dstEnd)
371 BYTE* d = (BYTE*)dstPtr;
372 const BYTE* s = (
const BYTE*)srcPtr;
373 BYTE*
const e = (BYTE*)dstEnd;
375 do { memcpy(d,s,8); d+=8; s+=8; }
while (d<e);
378 static const unsigned inc32table[8] = {0, 1, 2, 1, 0, 4, 4, 4};
379 static const int dec64table[8] = {0, 0, 0, -1, -4, 1, 2, 3};
382 #ifndef LZ4_FAST_DEC_LOOP 383 # if defined(__i386__) || defined(__x86_64__) 384 # define LZ4_FAST_DEC_LOOP 1 385 # elif defined(__aarch64__) && !defined(__clang__) 389 # define LZ4_FAST_DEC_LOOP 1 391 # define LZ4_FAST_DEC_LOOP 0 395 #if LZ4_FAST_DEC_LOOP 397 LZ4_FORCE_O2_INLINE_GCC_PPC64LE
void 398 LZ4_memcpy_using_offset_base(BYTE* dstPtr,
const BYTE* srcPtr, BYTE* dstEnd,
const size_t offset)
401 dstPtr[0] = srcPtr[0];
402 dstPtr[1] = srcPtr[1];
403 dstPtr[2] = srcPtr[2];
404 dstPtr[3] = srcPtr[3];
405 srcPtr += inc32table[offset];
406 memcpy(dstPtr+4, srcPtr, 4);
407 srcPtr -= dec64table[offset];
410 memcpy(dstPtr, srcPtr, 8);
415 LZ4_wildCopy8(dstPtr, srcPtr, dstEnd);
421 LZ4_FORCE_O2_INLINE_GCC_PPC64LE
void 422 LZ4_wildCopy32(
void* dstPtr,
const void* srcPtr,
void* dstEnd)
424 BYTE* d = (BYTE*)dstPtr;
425 const BYTE* s = (
const BYTE*)srcPtr;
426 BYTE*
const e = (BYTE*)dstEnd;
428 do { memcpy(d,s,16); memcpy(d+16,s+16,16); d+=32; s+=32; }
while (d<e);
434 LZ4_FORCE_O2_INLINE_GCC_PPC64LE
void 435 LZ4_memcpy_using_offset(BYTE* dstPtr,
const BYTE* srcPtr, BYTE* dstEnd,
const size_t offset)
439 assert(dstEnd >= dstPtr + MINMATCH);
440 LZ4_write32(dstPtr, 0);
444 memset(v, *srcPtr, 8);
447 memcpy(v, srcPtr, 2);
448 memcpy(&v[2], srcPtr, 2);
449 memcpy(&v[4], &v[0], 4);
452 memcpy(v, srcPtr, 4);
453 memcpy(&v[4], srcPtr, 4);
456 LZ4_memcpy_using_offset_base(dstPtr, srcPtr, dstEnd, offset);
460 memcpy(dstPtr, v, 8);
462 while (dstPtr < dstEnd) {
463 memcpy(dstPtr, v, 8);
473 static unsigned LZ4_NbCommonBytes (reg_t val)
475 if (LZ4_isLittleEndian()) {
476 if (
sizeof(val)==8) {
477 # if defined(_MSC_VER) && defined(_WIN64) && !defined(LZ4_FORCE_SW_BITCOUNT) 479 _BitScanForward64( &r, (U64)val );
481 # elif (defined(__clang__) || (defined(__GNUC__) && (__GNUC__>=3))) && !defined(LZ4_FORCE_SW_BITCOUNT) 482 return (
unsigned)__builtin_ctzll((U64)val) >> 3;
484 static const int DeBruijnBytePos[64] = { 0, 0, 0, 0, 0, 1, 1, 2,
485 0, 3, 1, 3, 1, 4, 2, 7,
486 0, 2, 3, 6, 1, 5, 3, 5,
487 1, 3, 4, 4, 2, 5, 6, 7,
488 7, 0, 1, 2, 3, 3, 4, 6,
489 2, 6, 5, 5, 3, 4, 5, 6,
490 7, 1, 2, 4, 6, 4, 4, 5,
491 7, 2, 6, 5, 7, 6, 7, 7 };
492 return DeBruijnBytePos[((U64)((val & -(
long long)val) * 0x0218A392CDABBD3FULL)) >> 58];
495 # if defined(_MSC_VER) && !defined(LZ4_FORCE_SW_BITCOUNT) 497 _BitScanForward( &r, (U32)val );
499 # elif (defined(__clang__) || (defined(__GNUC__) && (__GNUC__>=3))) && !defined(LZ4_FORCE_SW_BITCOUNT) 500 return (
unsigned)__builtin_ctz((U32)val) >> 3;
502 static const int DeBruijnBytePos[32] = { 0, 0, 3, 0, 3, 1, 3, 0,
503 3, 2, 2, 1, 3, 2, 0, 1,
504 3, 3, 1, 2, 2, 2, 2, 0,
505 3, 1, 2, 0, 1, 0, 1, 1 };
506 return DeBruijnBytePos[((U32)((val & -(S32)val) * 0x077CB531U)) >> 27];
510 if (
sizeof(val)==8) {
511 # if defined(_MSC_VER) && defined(_WIN64) && !defined(LZ4_FORCE_SW_BITCOUNT) 513 _BitScanReverse64( &r, val );
514 return (
unsigned)(r>>3);
515 # elif (defined(__clang__) || (defined(__GNUC__) && (__GNUC__>=3))) && !defined(LZ4_FORCE_SW_BITCOUNT) 516 return (
unsigned)__builtin_clzll((U64)val) >> 3;
518 static const U32 by32 =
sizeof(val)*4;
522 if (!(val>>by32)) { r=4; }
else { r=0; val>>=by32; }
523 if (!(val>>16)) { r+=2; val>>=8; }
else { val>>=24; }
528 # if defined(_MSC_VER) && !defined(LZ4_FORCE_SW_BITCOUNT) 530 _BitScanReverse( &r, (
unsigned long)val );
531 return (
unsigned)(r>>3);
532 # elif (defined(__clang__) || (defined(__GNUC__) && (__GNUC__>=3))) && !defined(LZ4_FORCE_SW_BITCOUNT) 533 return (
unsigned)__builtin_clz((U32)val) >> 3;
536 if (!(val>>16)) { r=2; val>>=8; }
else { r=0; val>>=24; }
544 #define STEPSIZE sizeof(reg_t) 546 unsigned LZ4_count(
const BYTE* pIn,
const BYTE* pMatch,
const BYTE* pInLimit)
548 const BYTE*
const pStart = pIn;
550 if (
likely(pIn < pInLimit-(STEPSIZE-1))) {
551 reg_t
const diff = LZ4_read_ARCH(pMatch) ^ LZ4_read_ARCH(pIn);
553 pIn+=STEPSIZE; pMatch+=STEPSIZE;
555 return LZ4_NbCommonBytes(diff);
558 while (
likely(pIn < pInLimit-(STEPSIZE-1))) {
559 reg_t
const diff = LZ4_read_ARCH(pMatch) ^ LZ4_read_ARCH(pIn);
560 if (!diff) { pIn+=STEPSIZE; pMatch+=STEPSIZE;
continue; }
561 pIn += LZ4_NbCommonBytes(diff);
562 return (
unsigned)(pIn - pStart);
565 if ((STEPSIZE==8) && (pIn<(pInLimit-3)) && (LZ4_read32(pMatch) == LZ4_read32(pIn))) { pIn+=4; pMatch+=4; }
566 if ((pIn<(pInLimit-1)) && (LZ4_read16(pMatch) == LZ4_read16(pIn))) { pIn+=2; pMatch+=2; }
567 if ((pIn<pInLimit) && (*pMatch == *pIn)) pIn++;
568 return (
unsigned)(pIn - pStart);
572 #ifndef LZ4_COMMONDEFS_ONLY 576 static const int LZ4_64Klimit = ((64 KB) + (MFLIMIT-1));
577 static const U32 LZ4_skipTrigger = 6;
583 typedef enum { clearedTable = 0, byPtr, byU32, byU16 } tableType_t;
608 typedef enum { noDict = 0, withPrefix64k, usingExtDict, usingDictCtx } dict_directive;
609 typedef enum { noDictIssue = 0, dictSmall } dictIssue_directive;
624 #if defined (__cplusplus) 630 int LZ4_decompress_safe_forceExtDict(
const char*
source,
char*
dest,
632 const void* dictStart,
size_t dictSize);
634 #if defined (__cplusplus) 641 static U32 LZ4_hash4(U32 sequence, tableType_t
const tableType)
643 if (tableType == byU16)
644 return ((sequence * 2654435761U) >> ((MINMATCH*8)-(
LZ4_HASHLOG+1)));
646 return ((sequence * 2654435761U) >> ((MINMATCH*8)-
LZ4_HASHLOG));
649 static U32 LZ4_hash5(U64 sequence, tableType_t
const tableType)
652 if (LZ4_isLittleEndian()) {
653 const U64 prime5bytes = 889523592379ULL;
654 return (U32)(((sequence << 24) * prime5bytes) >> (64 - hashLog));
656 const U64 prime8bytes = 11400714785074694791ULL;
657 return (U32)(((sequence >> 24) * prime8bytes) >> (64 - hashLog));
661 LZ4_FORCE_INLINE U32 LZ4_hashPosition(
const void*
const p, tableType_t
const tableType)
663 if ((
sizeof(reg_t)==8) && (tableType != byU16))
return LZ4_hash5(LZ4_read_ARCH(p), tableType);
664 return LZ4_hash4(LZ4_read32(p), tableType);
667 static void LZ4_clearHash(U32 h,
void* tableBase, tableType_t
const tableType)
672 case clearedTable: { assert(0);
return; }
673 case byPtr: {
const BYTE** hashTable = (
const BYTE**)tableBase; hashTable[h] = NULL;
return; }
674 case byU32: { U32* hashTable = (U32*) tableBase; hashTable[h] = 0;
return; }
675 case byU16: { U16* hashTable = (U16*) tableBase; hashTable[h] = 0;
return; }
679 static void LZ4_putIndexOnHash(U32 idx, U32 h,
void* tableBase, tableType_t
const tableType)
685 case byPtr: { assert(0);
return; }
686 case byU32: { U32* hashTable = (U32*) tableBase; hashTable[h] = idx;
return; }
687 case byU16: { U16* hashTable = (U16*) tableBase; assert(idx < 65536); hashTable[h] = (U16)idx;
return; }
691 static void LZ4_putPositionOnHash(
const BYTE* p, U32 h,
692 void* tableBase, tableType_t
const tableType,
697 case clearedTable: { assert(0);
return; }
698 case byPtr: {
const BYTE** hashTable = (
const BYTE**)tableBase; hashTable[h] = p;
return; }
699 case byU32: { U32* hashTable = (U32*) tableBase; hashTable[h] = (U32)(p-srcBase);
return; }
700 case byU16: { U16* hashTable = (U16*) tableBase; hashTable[h] = (U16)(p-srcBase);
return; }
704 LZ4_FORCE_INLINE
void LZ4_putPosition(
const BYTE* p,
void* tableBase, tableType_t tableType,
const BYTE* srcBase)
706 U32
const h = LZ4_hashPosition(p, tableType);
707 LZ4_putPositionOnHash(p, h, tableBase, tableType, srcBase);
716 static U32 LZ4_getIndexOnHash(U32 h,
const void* tableBase, tableType_t tableType)
719 if (tableType == byU32) {
720 const U32*
const hashTable = (
const U32*) tableBase;
724 if (tableType == byU16) {
725 const U16*
const hashTable = (
const U16*) tableBase;
732 static const BYTE* LZ4_getPositionOnHash(U32 h,
const void* tableBase, tableType_t tableType,
const BYTE* srcBase)
734 if (tableType == byPtr) {
const BYTE*
const* hashTable = (
const BYTE*
const*) tableBase;
return hashTable[h]; }
735 if (tableType == byU32) {
const U32*
const hashTable = (
const U32*) tableBase;
return hashTable[h] + srcBase; }
736 {
const U16*
const hashTable = (
const U16*) tableBase;
return hashTable[h] + srcBase; }
739 LZ4_FORCE_INLINE
const BYTE*
740 LZ4_getPosition(
const BYTE* p,
741 const void* tableBase, tableType_t tableType,
744 U32
const h = LZ4_hashPosition(p, tableType);
745 return LZ4_getPositionOnHash(h, tableBase, tableType, srcBase);
748 LZ4_FORCE_INLINE
void 751 const tableType_t tableType) {
756 DEBUGLOG(5,
"LZ4_prepareTable: Full reset for %p", cctx);
766 assert(inputSize >= 0);
768 || ((tableType == byU16) && cctx->
currentOffset + (
unsigned)inputSize >= 0xFFFFU)
770 || tableType == byPtr
771 || inputSize >= 4 KB)
773 DEBUGLOG(4,
"LZ4_prepareTable: Resetting table in %p", cctx);
778 DEBUGLOG(4,
"LZ4_prepareTable: Re-use hash table (no reset)");
787 DEBUGLOG(5,
"LZ4_prepareTable: adding 64KB to currentOffset");
799 LZ4_FORCE_INLINE
int LZ4_compress_generic(
806 const limitedOutput_directive outputDirective,
807 const tableType_t tableType,
808 const dict_directive dictDirective,
809 const dictIssue_directive dictIssue,
810 const int acceleration)
813 const BYTE* ip = (
const BYTE*) source;
816 const BYTE* base = (
const BYTE*) source - startIndex;
817 const BYTE* lowLimit;
820 const BYTE*
const dictionary =
824 const U32 dictDelta = (dictDirective == usingDictCtx) ? startIndex - dictCtx->
currentOffset : 0;
826 int const maybe_extMem = (dictDirective == usingExtDict) || (dictDirective == usingDictCtx);
827 U32
const prefixIdxLimit = startIndex - dictSize;
828 const BYTE*
const dictEnd = dictionary + dictSize;
829 const BYTE* anchor = (
const BYTE*) source;
831 const BYTE*
const mflimitPlusOne = iend - MFLIMIT + 1;
832 const BYTE*
const matchlimit = iend - LASTLITERALS;
836 const BYTE* dictBase = (dictDirective == usingDictCtx) ?
838 dictionary + dictSize - startIndex;
840 BYTE* op = (BYTE*)
dest;
846 DEBUGLOG(5,
"LZ4_compress_generic: srcSize=%i, tableType=%u", inputSize, tableType);
849 if (outputDirective == fillOutput && maxOutputSize < 1) {
return 0; }
851 if ((tableType == byU16) && (inputSize>=LZ4_64Klimit)) {
return 0; }
852 if (tableType==byPtr) assert(dictDirective==noDict);
853 assert(acceleration >= 1);
855 lowLimit = (
const BYTE*)source - (dictDirective == withPrefix64k ? dictSize : 0);
858 if (dictDirective == usingDictCtx) {
869 if (inputSize<LZ4_minLength)
goto _last_literals;
872 LZ4_putPosition(ip, cctx->
hashTable, tableType, base);
873 ip++; forwardH = LZ4_hashPosition(ip, tableType);
879 const BYTE* filledIp;
882 if (tableType == byPtr) {
883 const BYTE* forwardIp = ip;
885 int searchMatchNb = acceleration << LZ4_skipTrigger;
887 U32
const h = forwardH;
890 step = (searchMatchNb++ >> LZ4_skipTrigger);
892 if (
unlikely(forwardIp > mflimitPlusOne))
goto _last_literals;
893 assert(ip < mflimitPlusOne);
895 match = LZ4_getPositionOnHash(h, cctx->
hashTable, tableType, base);
896 forwardH = LZ4_hashPosition(forwardIp, tableType);
897 LZ4_putPositionOnHash(ip, h, cctx->
hashTable, tableType, base);
899 }
while ( (match+LZ4_DISTANCE_MAX < ip)
900 || (LZ4_read32(match) != LZ4_read32(ip)) );
904 const BYTE* forwardIp = ip;
906 int searchMatchNb = acceleration << LZ4_skipTrigger;
908 U32
const h = forwardH;
909 U32
const current = (U32)(forwardIp - base);
910 U32 matchIndex = LZ4_getIndexOnHash(h, cctx->
hashTable, tableType);
911 assert(matchIndex <= current);
912 assert(forwardIp - base < (ptrdiff_t)(2 GB - 1));
915 step = (searchMatchNb++ >> LZ4_skipTrigger);
917 if (
unlikely(forwardIp > mflimitPlusOne))
goto _last_literals;
918 assert(ip < mflimitPlusOne);
920 if (dictDirective == usingDictCtx) {
921 if (matchIndex < startIndex) {
923 assert(tableType == byU32);
924 matchIndex = LZ4_getIndexOnHash(h, dictCtx->
hashTable, byU32);
925 match = dictBase + matchIndex;
926 matchIndex += dictDelta;
927 lowLimit = dictionary;
929 match = base + matchIndex;
930 lowLimit = (
const BYTE*)source;
932 }
else if (dictDirective==usingExtDict) {
933 if (matchIndex < startIndex) {
934 DEBUGLOG(7,
"extDict candidate: matchIndex=%5u < startIndex=%5u", matchIndex, startIndex);
935 assert(startIndex - matchIndex >= MINMATCH);
936 match = dictBase + matchIndex;
937 lowLimit = dictionary;
939 match = base + matchIndex;
940 lowLimit = (
const BYTE*)source;
943 match = base + matchIndex;
945 forwardH = LZ4_hashPosition(forwardIp, tableType);
946 LZ4_putIndexOnHash(current, h, cctx->
hashTable, tableType);
948 DEBUGLOG(7,
"candidate at pos=%u (offset=%u \n", matchIndex, current - matchIndex);
949 if ((dictIssue == dictSmall) && (matchIndex < prefixIdxLimit)) {
continue; }
950 assert(matchIndex < current);
951 if ( ((tableType != byU16) || (LZ4_DISTANCE_MAX < LZ4_DISTANCE_ABSOLUTE_MAX))
952 && (matchIndex+LZ4_DISTANCE_MAX < current)) {
955 assert((current - matchIndex) <= LZ4_DISTANCE_MAX);
957 if (LZ4_read32(match) == LZ4_read32(ip)) {
958 if (maybe_extMem) offset = current - matchIndex;
967 while (((ip>anchor) & (match > lowLimit)) && (
unlikely(ip[-1]==match[-1]))) { ip--; match--; }
970 {
unsigned const litLength = (unsigned)(ip - anchor);
972 if ((outputDirective == limitedOutput) &&
973 (
unlikely(op + litLength + (2 + 1 + LASTLITERALS) + (litLength/255) > olimit)) ) {
976 if ((outputDirective == fillOutput) &&
977 (
unlikely(op + (litLength+240)/255 + litLength + 2 + 1 + MFLIMIT - MINMATCH > olimit))) {
981 if (litLength >= RUN_MASK) {
982 int len = (int)(litLength - RUN_MASK);
983 *token = (RUN_MASK<<ML_BITS);
984 for(; len >= 255 ; len-=255) *op++ = 255;
987 else *token = (BYTE)(litLength<<ML_BITS);
990 LZ4_wildCopy8(op, anchor, op+litLength);
992 DEBUGLOG(6,
"seq.start:%i, literals=%u, match.start:%i",
993 (
int)(anchor-(
const BYTE*)source), litLength, (
int)(ip-(
const BYTE*)source));
1005 if ((outputDirective == fillOutput) &&
1006 (op + 2 + 1 + MFLIMIT - MINMATCH > olimit)) {
1009 goto _last_literals;
1014 DEBUGLOG(6,
" with offset=%u (ext if > %i)", offset, (
int)(ip - (
const BYTE*)source));
1015 assert(offset <= LZ4_DISTANCE_MAX && offset > 0);
1016 LZ4_writeLE16(op, (U16)offset); op+=2;
1018 DEBUGLOG(6,
" with offset=%u (same segment)", (U32)(ip - match));
1019 assert(ip-match <= LZ4_DISTANCE_MAX);
1020 LZ4_writeLE16(op, (U16)(ip - match)); op+=2;
1024 {
unsigned matchCode;
1026 if ( (dictDirective==usingExtDict || dictDirective==usingDictCtx)
1027 && (lowLimit==dictionary) ) {
1028 const BYTE* limit = ip + (dictEnd-
match);
1029 assert(dictEnd > match);
1030 if (limit > matchlimit) limit = matchlimit;
1031 matchCode = LZ4_count(ip+MINMATCH, match+MINMATCH, limit);
1032 ip += (size_t)matchCode + MINMATCH;
1034 unsigned const more = LZ4_count(limit, (
const BYTE*)source, matchlimit);
1038 DEBUGLOG(6,
" with matchLength=%u starting in extDict", matchCode+MINMATCH);
1040 matchCode = LZ4_count(ip+MINMATCH, match+MINMATCH, matchlimit);
1041 ip += (size_t)matchCode + MINMATCH;
1042 DEBUGLOG(6,
" with matchLength=%u", matchCode+MINMATCH);
1045 if ((outputDirective) &&
1046 (
unlikely(op + (1 + LASTLITERALS) + (matchCode+240)/255 > olimit)) ) {
1047 if (outputDirective == fillOutput) {
1049 U32 newMatchCode = 15 - 1 + ((U32)(olimit - op) - 1 - LASTLITERALS) * 255;
1050 ip -= matchCode - newMatchCode;
1051 assert(newMatchCode < matchCode);
1052 matchCode = newMatchCode;
1060 DEBUGLOG(5,
"Clearing %u positions", (U32)(filledIp - ip));
1061 for (ptr = ip; ptr <= filledIp; ++ptr) {
1062 U32
const h = LZ4_hashPosition(ptr, tableType);
1063 LZ4_clearHash(h, cctx->
hashTable, tableType);
1067 assert(outputDirective == limitedOutput);
1071 if (matchCode >= ML_MASK) {
1073 matchCode -= ML_MASK;
1074 LZ4_write32(op, 0xFFFFFFFF);
1075 while (matchCode >= 4*255) {
1077 LZ4_write32(op, 0xFFFFFFFF);
1080 op += matchCode / 255;
1081 *op++ = (BYTE)(matchCode % 255);
1083 *token += (BYTE)(matchCode);
1086 assert(!(outputDirective == fillOutput && op + 1 + LASTLITERALS > olimit));
1091 if (ip >= mflimitPlusOne)
break;
1094 LZ4_putPosition(ip-2, cctx->
hashTable, tableType, base);
1097 if (tableType == byPtr) {
1099 match = LZ4_getPosition(ip, cctx->
hashTable, tableType, base);
1100 LZ4_putPosition(ip, cctx->
hashTable, tableType, base);
1101 if ( (match+LZ4_DISTANCE_MAX >= ip)
1102 && (LZ4_read32(match) == LZ4_read32(ip)) )
1103 { token=op++; *token=0;
goto _next_match; }
1107 U32
const h = LZ4_hashPosition(ip, tableType);
1108 U32
const current = (U32)(ip-base);
1109 U32 matchIndex = LZ4_getIndexOnHash(h, cctx->
hashTable, tableType);
1110 assert(matchIndex < current);
1111 if (dictDirective == usingDictCtx) {
1112 if (matchIndex < startIndex) {
1114 matchIndex = LZ4_getIndexOnHash(h, dictCtx->
hashTable, byU32);
1115 match = dictBase + matchIndex;
1116 lowLimit = dictionary;
1117 matchIndex += dictDelta;
1119 match = base + matchIndex;
1120 lowLimit = (
const BYTE*)source;
1122 }
else if (dictDirective==usingExtDict) {
1123 if (matchIndex < startIndex) {
1124 match = dictBase + matchIndex;
1125 lowLimit = dictionary;
1127 match = base + matchIndex;
1128 lowLimit = (
const BYTE*)source;
1131 match = base + matchIndex;
1133 LZ4_putIndexOnHash(current, h, cctx->
hashTable, tableType);
1134 assert(matchIndex < current);
1135 if ( ((dictIssue==dictSmall) ? (matchIndex >= prefixIdxLimit) : 1)
1136 && (((tableType==byU16) && (LZ4_DISTANCE_MAX == LZ4_DISTANCE_ABSOLUTE_MAX)) ? 1 : (matchIndex+LZ4_DISTANCE_MAX >= current))
1137 && (LZ4_read32(match) == LZ4_read32(ip)) ) {
1140 if (maybe_extMem) offset = current - matchIndex;
1141 DEBUGLOG(6,
"seq.start:%i, literals=%u, match.start:%i",
1142 (
int)(anchor-(
const BYTE*)source), 0, (
int)(ip-(
const BYTE*)source));
1148 forwardH = LZ4_hashPosition(++ip, tableType);
1154 {
size_t lastRun = (size_t)(iend - anchor);
1155 if ( (outputDirective) &&
1156 (op + lastRun + 1 + ((lastRun+255-RUN_MASK)/255) > olimit)) {
1157 if (outputDirective == fillOutput) {
1159 assert(olimit >= op);
1160 lastRun = (size_t)(olimit-op) - 1;
1161 lastRun -= (lastRun+240)/255;
1163 assert(outputDirective == limitedOutput);
1167 if (lastRun >= RUN_MASK) {
1168 size_t accumulator = lastRun - RUN_MASK;
1169 *op++ = RUN_MASK << ML_BITS;
1170 for(; accumulator >= 255 ; accumulator-=255) *op++ = 255;
1171 *op++ = (BYTE) accumulator;
1173 *op++ = (BYTE)(lastRun<<ML_BITS);
1175 memcpy(op, anchor, lastRun);
1176 ip = anchor + lastRun;
1180 if (outputDirective == fillOutput) {
1181 *inputConsumed = (int) (((
const char*)ip)-
source);
1183 DEBUGLOG(5,
"LZ4_compress_generic: compressed %i bytes into %i bytes", inputSize, (
int)(((
char*)op) - dest));
1184 result = (int)(((
char*)op) -
dest);
1190 int LZ4_compress_fast_extState(
void* state,
const char* source,
char* dest,
int inputSize,
int maxOutputSize,
int acceleration)
1193 assert(ctx != NULL);
1194 if (acceleration < 1) acceleration = ACCELERATION_DEFAULT;
1196 if (inputSize < LZ4_64Klimit) {
1197 return LZ4_compress_generic(ctx, source, dest, inputSize, NULL, 0, notLimited, byU16, noDict, noDictIssue, acceleration);
1199 const tableType_t tableType = ((
sizeof(
void*)==4) && ((uptrval)source > LZ4_DISTANCE_MAX)) ? byPtr : byU32;
1200 return LZ4_compress_generic(ctx, source, dest, inputSize, NULL, 0, notLimited, tableType, noDict, noDictIssue, acceleration);
1203 if (inputSize < LZ4_64Klimit) {
1204 return LZ4_compress_generic(ctx, source, dest, inputSize, NULL, maxOutputSize, limitedOutput, byU16, noDict, noDictIssue, acceleration);
1206 const tableType_t tableType = ((
sizeof(
void*)==4) && ((uptrval)source > LZ4_DISTANCE_MAX)) ? byPtr : byU32;
1207 return LZ4_compress_generic(ctx, source, dest, inputSize, NULL, maxOutputSize, limitedOutput, tableType, noDict, noDictIssue, acceleration);
1221 int LZ4_compress_fast_extState_fastReset(
void* state,
const char* src,
char*
dst,
int srcSize,
int dstCapacity,
int acceleration)
1224 if (acceleration < 1) acceleration = ACCELERATION_DEFAULT;
1227 if (srcSize < LZ4_64Klimit) {
1228 const tableType_t tableType = byU16;
1229 LZ4_prepareTable(ctx, srcSize, tableType);
1231 return LZ4_compress_generic(ctx, src, dst, srcSize, NULL, 0, notLimited, tableType, noDict, dictSmall, acceleration);
1233 return LZ4_compress_generic(ctx, src, dst, srcSize, NULL, 0, notLimited, tableType, noDict, noDictIssue, acceleration);
1236 const tableType_t tableType = ((
sizeof(
void*)==4) && ((uptrval)src > LZ4_DISTANCE_MAX)) ? byPtr : byU32;
1237 LZ4_prepareTable(ctx, srcSize, tableType);
1238 return LZ4_compress_generic(ctx, src, dst, srcSize, NULL, 0, notLimited, tableType, noDict, noDictIssue, acceleration);
1241 if (srcSize < LZ4_64Klimit) {
1242 const tableType_t tableType = byU16;
1243 LZ4_prepareTable(ctx, srcSize, tableType);
1245 return LZ4_compress_generic(ctx, src, dst, srcSize, NULL, dstCapacity, limitedOutput, tableType, noDict, dictSmall, acceleration);
1247 return LZ4_compress_generic(ctx, src, dst, srcSize, NULL, dstCapacity, limitedOutput, tableType, noDict, noDictIssue, acceleration);
1250 const tableType_t tableType = ((
sizeof(
void*)==4) && ((uptrval)src > LZ4_DISTANCE_MAX)) ? byPtr : byU32;
1251 LZ4_prepareTable(ctx, srcSize, tableType);
1252 return LZ4_compress_generic(ctx, src, dst, srcSize, NULL, dstCapacity, limitedOutput, tableType, noDict, noDictIssue, acceleration);
1258 int LZ4_compress_fast(
const char* source,
char* dest,
int inputSize,
int maxOutputSize,
int acceleration)
1263 if (ctxPtr == NULL)
return 0;
1285 int LZ4_compress_fast_force(
const char* src,
char* dst,
int srcSize,
int dstCapacity,
int acceleration)
1290 if (srcSize < LZ4_64Klimit) {
1291 return LZ4_compress_generic(&ctx.
internal_donotuse, src, dst, srcSize, NULL, dstCapacity, limitedOutput, byU16, noDict, noDictIssue, acceleration);
1293 tableType_t
const addrMode = (
sizeof(
void*) > 4) ? byU32 : byPtr;
1294 return LZ4_compress_generic(&ctx.
internal_donotuse, src, dst, srcSize, NULL, dstCapacity, limitedOutput, addrMode, noDict, noDictIssue, acceleration);
1302 static int LZ4_compress_destSize_extState (
LZ4_stream_t* state,
const char* src,
char* dst,
int* srcSizePtr,
int targetDstSize)
1305 assert(s != NULL); (void)s;
1310 if (*srcSizePtr < LZ4_64Klimit) {
1311 return LZ4_compress_generic(&state->
internal_donotuse, src, dst, *srcSizePtr, srcSizePtr, targetDstSize, fillOutput, byU16, noDict, noDictIssue, 1);
1313 tableType_t
const addrMode = ((
sizeof(
void*)==4) && ((uptrval)src > LZ4_DISTANCE_MAX)) ? byPtr : byU32;
1314 return LZ4_compress_generic(&state->
internal_donotuse, src, dst, *srcSizePtr, srcSizePtr, targetDstSize, fillOutput, addrMode, noDict, noDictIssue, 1);
1323 if (ctx == NULL)
return 0;
1329 int result = LZ4_compress_destSize_extState(ctx, src, dst, srcSizePtr, targetDstSize);
1347 DEBUGLOG(4,
"LZ4_createStream %p", lz4s);
1348 if (lz4s == NULL)
return NULL;
1356 static size_t LZ4_stream_t_alignment(
void)
1359 return sizeof(t_a) -
sizeof(t_a.t);
1365 DEBUGLOG(5,
"LZ4_initStream");
1366 if (buffer == NULL) {
return NULL; }
1371 if (((
size_t)buffer) & (LZ4_stream_t_alignment() - 1)) {
return NULL; }
1381 DEBUGLOG(5,
"LZ4_resetStream (ctx:%p)", LZ4_stream);
1391 if (!LZ4_stream)
return 0;
1392 DEBUGLOG(5,
"LZ4_freeStream %p", LZ4_stream);
1393 FREEMEM(LZ4_stream);
1398 #define HASH_UNIT sizeof(reg_t) 1402 const tableType_t tableType = byU32;
1403 const BYTE* p = (
const BYTE*)dictionary;
1404 const BYTE*
const dictEnd = p + dictSize;
1407 DEBUGLOG(4,
"LZ4_loadDict (%i bytes from %p into %p)", dictSize, dictionary, LZ4_dict);
1423 if (dictSize < (
int)HASH_UNIT) {
1427 if ((dictEnd - p) > 64 KB) p = dictEnd - 64 KB;
1430 dict->
dictSize = (U32)(dictEnd - p);
1433 while (p <= dictEnd-HASH_UNIT) {
1434 LZ4_putPosition(p, dict->
hashTable, tableType, base);
1445 DEBUGLOG(4,
"LZ4_attach_dictionary (%p, %p, size %u)",
1446 workingStream, dictionaryStream,
1447 dictCtx != NULL ? dictCtx->
dictSize : 0);
1455 if (dictCtx != NULL) {
1477 assert(nextSize >= 0);
1478 if (LZ4_dict->
currentOffset + (
unsigned)nextSize > 0x80000000) {
1483 DEBUGLOG(4,
"LZ4_renormDictT");
1496 const char* source,
char* dest,
1497 int inputSize,
int maxOutputSize,
1500 const tableType_t tableType = byU32;
1504 DEBUGLOG(5,
"LZ4_compress_fast_continue (inputSize=%i)", inputSize);
1506 if (streamPtr->
dirty) {
return 0; }
1507 LZ4_renormDictT(streamPtr, inputSize);
1508 if (acceleration < 1) acceleration = ACCELERATION_DEFAULT;
1512 && (dictEnd != (
const BYTE*)source) ) {
1513 DEBUGLOG(5,
"LZ4_compress_fast_continue: dictSize(%u) at addr:%p is too small", streamPtr->
dictSize, streamPtr->
dictionary);
1516 dictEnd = (
const BYTE*)source;
1520 {
const BYTE* sourceEnd = (
const BYTE*) source + inputSize;
1521 if ((sourceEnd > streamPtr->
dictionary) && (sourceEnd < dictEnd)) {
1522 streamPtr->
dictSize = (U32)(dictEnd - sourceEnd);
1530 if (dictEnd == (
const BYTE*)
source) {
1532 return LZ4_compress_generic(streamPtr, source, dest, inputSize, NULL, maxOutputSize, limitedOutput, tableType, withPrefix64k, dictSmall, acceleration);
1534 return LZ4_compress_generic(streamPtr, source, dest, inputSize, NULL, maxOutputSize, limitedOutput, tableType, withPrefix64k, noDictIssue, acceleration);
1546 if (inputSize > 4 KB) {
1552 result = LZ4_compress_generic(streamPtr, source, dest, inputSize, NULL, maxOutputSize, limitedOutput, tableType, usingExtDict, noDictIssue, acceleration);
1554 result = LZ4_compress_generic(streamPtr, source, dest, inputSize, NULL, maxOutputSize, limitedOutput, tableType, usingDictCtx, noDictIssue, acceleration);
1558 result = LZ4_compress_generic(streamPtr, source, dest, inputSize, NULL, maxOutputSize, limitedOutput, tableType, usingExtDict, dictSmall, acceleration);
1560 result = LZ4_compress_generic(streamPtr, source, dest, inputSize, NULL, maxOutputSize, limitedOutput, tableType, usingExtDict, noDictIssue, acceleration);
1564 streamPtr->
dictSize = (U32)inputSize;
1571 int LZ4_compress_forceExtDict (
LZ4_stream_t* LZ4_dict,
const char* source,
char* dest,
int srcSize)
1576 LZ4_renormDictT(streamPtr, srcSize);
1579 result = LZ4_compress_generic(streamPtr, source, dest, srcSize, NULL, 0, notLimited, byU32, usingExtDict, dictSmall, 1);
1581 result = LZ4_compress_generic(streamPtr, source, dest, srcSize, NULL, 0, notLimited, byU32, usingExtDict, noDictIssue, 1);
1585 streamPtr->
dictSize = (U32)srcSize;
1603 if ((U32)dictSize > 64 KB) { dictSize = 64 KB; }
1606 memmove(safeBuffer, previousDictEnd - dictSize, dictSize);
1620 typedef enum { endOnOutputSize = 0, endOnInputSize = 1 } endCondition_directive;
1621 typedef enum { decode_full_block = 0, partial_decode = 1 } earlyEnd_directive;
1624 #define MIN(a,b) ( (a) < (b) ? (a) : (b) ) 1634 typedef enum { loop_error = -2, initial_error = -1, ok = 0 } variable_length_error;
1635 LZ4_FORCE_INLINE
unsigned 1636 read_variable_length(
const BYTE**ip,
const BYTE* lencheck,
int loop_check,
int initial_check, variable_length_error* error)
1638 unsigned length = 0;
1640 if (initial_check &&
unlikely((*ip) >= lencheck)) {
1641 *error = initial_error;
1648 if (loop_check &&
unlikely((*ip) >= lencheck)) {
1649 *error = loop_error;
1663 LZ4_FORCE_INLINE
int 1664 LZ4_decompress_generic(
1665 const char*
const src,
1670 endCondition_directive endOnInput,
1671 earlyEnd_directive partialDecoding,
1672 dict_directive dict,
1673 const BYTE*
const lowPrefix,
1674 const BYTE*
const dictStart,
1675 const size_t dictSize
1678 if (src == NULL) {
return -1; }
1680 {
const BYTE* ip = (
const BYTE*) src;
1681 const BYTE*
const iend = ip +
srcSize;
1683 BYTE* op = (BYTE*) dst;
1687 const BYTE*
const dictEnd = (dictStart == NULL) ? NULL : dictStart + dictSize;
1689 const int safeDecode = (endOnInput==endOnInputSize);
1690 const int checkOffset = ((safeDecode) && (dictSize < (
int)(64 KB)));
1694 const BYTE*
const shortiend = iend - (endOnInput ? 14 : 8) - 2 ;
1695 const BYTE*
const shortoend = oend - (endOnInput ? 14 : 8) - 18 ;
1703 DEBUGLOG(5,
"LZ4_decompress_generic (srcSize:%i, dstSize:%i)", srcSize, outputSize);
1706 assert(lowPrefix <= op);
1707 if ((endOnInput) && (
unlikely(outputSize==0))) {
1709 if (partialDecoding)
return 0;
1710 return ((srcSize==1) && (*ip==0)) ? 0 : -1;
1712 if ((!endOnInput) && (
unlikely(outputSize==0))) {
return (*ip==0 ? 1 : -1); }
1713 if ((endOnInput) &&
unlikely(srcSize==0)) {
return -1; }
1716 #if LZ4_FAST_DEC_LOOP 1717 if ((oend - op) < FASTLOOP_SAFE_DISTANCE) {
1718 DEBUGLOG(6,
"skip fast decode loop");
1725 assert(oend - op >= FASTLOOP_SAFE_DISTANCE);
1726 if (endOnInput) { assert(ip < iend); }
1728 length = token >> ML_BITS;
1730 assert(!endOnInput || ip <= iend);
1733 if (length == RUN_MASK) {
1734 variable_length_error error = ok;
1735 length += read_variable_length(&ip, iend-RUN_MASK, endOnInput, endOnInput, &error);
1736 if (error == initial_error) {
goto _output_error; }
1737 if ((safeDecode) &&
unlikely((uptrval)(op)+length<(uptrval)(op))) {
goto _output_error; }
1738 if ((safeDecode) &&
unlikely((uptrval)(ip)+length<(uptrval)(ip))) {
goto _output_error; }
1742 LZ4_STATIC_ASSERT(MFLIMIT >= WILDCOPYLENGTH);
1744 if ((cpy>oend-32) || (ip+length>iend-32)) {
goto safe_literal_copy; }
1745 LZ4_wildCopy32(op, ip, cpy);
1747 if (cpy>oend-8) {
goto safe_literal_copy; }
1748 LZ4_wildCopy8(op, ip, cpy);
1751 ip += length; op = cpy;
1755 DEBUGLOG(7,
"copy %u bytes in a 16-bytes stripe", (
unsigned)length);
1757 if (ip > iend-(16 + 1)) {
goto safe_literal_copy; }
1764 if (length > 8) { memcpy(op+8, ip+8, 8); }
1766 ip += length; op = cpy;
1770 offset = LZ4_readLE16(ip); ip+=2;
1771 match = op - offset;
1772 assert(match <= op);
1775 length = token & ML_MASK;
1777 if (length == ML_MASK) {
1778 variable_length_error error = ok;
1779 if ((checkOffset) && (
unlikely(match + dictSize < lowPrefix))) {
goto _output_error; }
1780 length += read_variable_length(&ip, iend - LASTLITERALS + 1, endOnInput, 0, &error);
1781 if (error != ok) {
goto _output_error; }
1782 if ((safeDecode) &&
unlikely((uptrval)(op)+length<(uptrval)op)) {
goto _output_error; }
1784 if (op + length >= oend - FASTLOOP_SAFE_DISTANCE) {
1785 goto safe_match_copy;
1789 if (op + length >= oend - FASTLOOP_SAFE_DISTANCE) {
1790 goto safe_match_copy;
1794 if ((dict == withPrefix64k) || (match >= lowPrefix)) {
1796 assert(match >= lowPrefix);
1797 assert(match <= op);
1798 assert(op + 18 <= oend);
1800 memcpy(op, match, 8);
1801 memcpy(op+8, match+8, 8);
1802 memcpy(op+16, match+16, 2);
1807 if ((checkOffset) && (
unlikely(match + dictSize < lowPrefix))) {
goto _output_error; }
1809 if ((dict==usingExtDict) && (match < lowPrefix)) {
1810 if (
unlikely(op+length > oend-LASTLITERALS)) {
1811 if (partialDecoding) {
1812 length = MIN(length, (
size_t)(oend-op));
1817 if (length <= (
size_t)(lowPrefix-match)) {
1819 memmove(op, dictEnd - (lowPrefix-match), length);
1823 size_t const copySize = (size_t)(lowPrefix - match);
1824 size_t const restSize = length - copySize;
1825 memcpy(op, dictEnd - copySize, copySize);
1827 if (restSize > (
size_t)(op - lowPrefix)) {
1828 BYTE*
const endOfMatch = op + restSize;
1829 const BYTE* copyFrom = lowPrefix;
1830 while (op < endOfMatch) { *op++ = *copyFrom++; }
1832 memcpy(op, lowPrefix, restSize);
1841 assert((op <= oend) && (oend-op >= 32));
1843 LZ4_memcpy_using_offset(op, match, cpy, offset);
1845 LZ4_wildCopy32(op, match, cpy);
1856 length = token >> ML_BITS;
1858 assert(!endOnInput || ip <= iend);
1869 if ( (endOnInput ? length != RUN_MASK : length <= 8)
1871 &&
likely((endOnInput ? ip < shortiend : 1) & (op <= shortoend)) ) {
1873 memcpy(op, ip, endOnInput ? 16 : 8);
1874 op += length; ip += length;
1878 length = token & ML_MASK;
1879 offset = LZ4_readLE16(ip); ip += 2;
1880 match = op - offset;
1881 assert(match <= op);
1884 if ( (length != ML_MASK)
1886 && (dict==withPrefix64k || match >= lowPrefix) ) {
1888 memcpy(op + 0, match + 0, 8);
1889 memcpy(op + 8, match + 8, 8);
1890 memcpy(op +16, match +16, 2);
1891 op += length + MINMATCH;
1902 if (length == RUN_MASK) {
1903 variable_length_error error = ok;
1904 length += read_variable_length(&ip, iend-RUN_MASK, endOnInput, endOnInput, &error);
1905 if (error == initial_error) {
goto _output_error; }
1906 if ((safeDecode) &&
unlikely((uptrval)(op)+length<(uptrval)(op))) {
goto _output_error; }
1907 if ((safeDecode) &&
unlikely((uptrval)(ip)+length<(uptrval)(ip))) {
goto _output_error; }
1912 #if LZ4_FAST_DEC_LOOP 1915 LZ4_STATIC_ASSERT(MFLIMIT >= WILDCOPYLENGTH);
1916 if ( ((endOnInput) && ((cpy>oend-MFLIMIT) || (ip+length>iend-(2+1+LASTLITERALS))) )
1917 || ((!endOnInput) && (cpy>oend-WILDCOPYLENGTH)) )
1924 if (partialDecoding) {
1932 if ((ip+length>iend-(2+1+LASTLITERALS)) && (ip+length != iend)) {
goto _output_error; }
1933 assert(ip+length <= iend);
1940 length = (size_t)(oend-op);
1942 assert(ip+length <= iend);
1947 if ((!endOnInput) && (cpy != oend)) {
goto _output_error; }
1951 if ((endOnInput) && ((ip+length != iend) || (cpy > oend))) {
goto _output_error; }
1953 memmove(op, ip, length);
1960 if (!partialDecoding || (cpy == oend) || (ip == iend)) {
1964 LZ4_wildCopy8(op, ip, cpy);
1965 ip += length; op = cpy;
1969 offset = LZ4_readLE16(ip); ip+=2;
1970 match = op - offset;
1973 length = token & ML_MASK;
1976 if (length == ML_MASK) {
1977 variable_length_error error = ok;
1978 length += read_variable_length(&ip, iend - LASTLITERALS + 1, endOnInput, 0, &error);
1979 if (error != ok)
goto _output_error;
1980 if ((safeDecode) &&
unlikely((uptrval)(op)+length<(uptrval)op))
goto _output_error;
1984 #if LZ4_FAST_DEC_LOOP 1987 if ((checkOffset) && (
unlikely(match + dictSize < lowPrefix)))
goto _output_error;
1989 if ((dict==usingExtDict) && (match < lowPrefix)) {
1990 if (
unlikely(op+length > oend-LASTLITERALS)) {
1991 if (partialDecoding) length = MIN(length, (
size_t)(oend-op));
1992 else goto _output_error;
1995 if (length <= (
size_t)(lowPrefix-match)) {
1997 memmove(op, dictEnd - (lowPrefix-match), length);
2001 size_t const copySize = (size_t)(lowPrefix - match);
2002 size_t const restSize = length - copySize;
2003 memcpy(op, dictEnd - copySize, copySize);
2005 if (restSize > (
size_t)(op - lowPrefix)) {
2006 BYTE*
const endOfMatch = op + restSize;
2007 const BYTE* copyFrom = lowPrefix;
2008 while (op < endOfMatch) *op++ = *copyFrom++;
2010 memcpy(op, lowPrefix, restSize);
2015 assert(match >= lowPrefix);
2022 if (partialDecoding && (cpy > oend-MATCH_SAFEGUARD_DISTANCE)) {
2023 size_t const mlen = MIN(length, (
size_t)(oend-op));
2024 const BYTE*
const matchEnd = match + mlen;
2025 BYTE*
const copyEnd = op + mlen;
2026 if (matchEnd > op) {
2027 while (op < copyEnd) { *op++ = *match++; }
2029 memcpy(op, match, mlen);
2032 if (op == oend) {
break; }
2042 match += inc32table[offset];
2043 memcpy(op+4, match, 4);
2044 match -= dec64table[offset];
2046 memcpy(op, match, 8);
2051 if (
unlikely(cpy > oend-MATCH_SAFEGUARD_DISTANCE)) {
2052 BYTE*
const oCopyLimit = oend - (WILDCOPYLENGTH-1);
2053 if (cpy > oend-LASTLITERALS) {
goto _output_error; }
2054 if (op < oCopyLimit) {
2055 LZ4_wildCopy8(op, match, oCopyLimit);
2056 match += oCopyLimit - op;
2059 while (op < cpy) { *op++ = *match++; }
2061 memcpy(op, match, 8);
2062 if (length > 16) { LZ4_wildCopy8(op+8, match+8, cpy); }
2069 return (
int) (((
char*)op)-
dst);
2071 return (
int) (((
const char*)ip)-src);
2076 return (
int) (-(((
const char*)ip)-src))-1;
2083 LZ4_FORCE_O2_GCC_PPC64LE
2086 return LZ4_decompress_generic(source, dest, compressedSize, maxDecompressedSize,
2087 endOnInputSize, decode_full_block, noDict,
2088 (BYTE*)dest, NULL, 0);
2091 LZ4_FORCE_O2_GCC_PPC64LE
2094 dstCapacity = MIN(targetOutputSize, dstCapacity);
2095 return LZ4_decompress_generic(src, dst, compressedSize, dstCapacity,
2096 endOnInputSize, partial_decode,
2097 noDict, (BYTE*)dst, NULL, 0);
2100 LZ4_FORCE_O2_GCC_PPC64LE
2103 return LZ4_decompress_generic(source, dest, 0, originalSize,
2104 endOnOutputSize, decode_full_block, withPrefix64k,
2105 (BYTE*)dest - 64 KB, NULL, 0);
2110 LZ4_FORCE_O2_GCC_PPC64LE
2111 int LZ4_decompress_safe_withPrefix64k(
const char* source,
char* dest,
int compressedSize,
int maxOutputSize)
2113 return LZ4_decompress_generic(source, dest, compressedSize, maxOutputSize,
2114 endOnInputSize, decode_full_block, withPrefix64k,
2115 (BYTE*)dest - 64 KB, NULL, 0);
2119 int LZ4_decompress_fast_withPrefix64k(
const char* source,
char* dest,
int originalSize)
2126 LZ4_FORCE_O2_GCC_PPC64LE
2127 static int LZ4_decompress_safe_withSmallPrefix(
const char* source,
char* dest,
int compressedSize,
int maxOutputSize,
2130 return LZ4_decompress_generic(source, dest, compressedSize, maxOutputSize,
2131 endOnInputSize, decode_full_block, noDict,
2132 (BYTE*)dest-prefixSize, NULL, 0);
2135 LZ4_FORCE_O2_GCC_PPC64LE
2136 int LZ4_decompress_safe_forceExtDict(
const char* source,
char* dest,
2137 int compressedSize,
int maxOutputSize,
2138 const void* dictStart,
size_t dictSize)
2140 return LZ4_decompress_generic(source, dest, compressedSize, maxOutputSize,
2141 endOnInputSize, decode_full_block, usingExtDict,
2142 (BYTE*)dest, (
const BYTE*)dictStart, dictSize);
2145 LZ4_FORCE_O2_GCC_PPC64LE
2146 static int LZ4_decompress_fast_extDict(
const char* source,
char* dest,
int originalSize,
2147 const void* dictStart,
size_t dictSize)
2149 return LZ4_decompress_generic(source, dest, 0, originalSize,
2150 endOnOutputSize, decode_full_block, usingExtDict,
2151 (BYTE*)dest, (
const BYTE*)dictStart, dictSize);
2159 int LZ4_decompress_safe_doubleDict(
const char* source,
char* dest,
int compressedSize,
int maxOutputSize,
2160 size_t prefixSize,
const void* dictStart,
size_t dictSize)
2162 return LZ4_decompress_generic(source, dest, compressedSize, maxOutputSize,
2163 endOnInputSize, decode_full_block, usingExtDict,
2164 (BYTE*)dest-prefixSize, (
const BYTE*)dictStart, dictSize);
2168 int LZ4_decompress_fast_doubleDict(
const char* source,
char* dest,
int originalSize,
2169 size_t prefixSize,
const void* dictStart,
size_t dictSize)
2171 return LZ4_decompress_generic(source, dest, 0, originalSize,
2172 endOnOutputSize, decode_full_block, usingExtDict,
2173 (BYTE*)dest-prefixSize, (
const BYTE*)dictStart, dictSize);
2187 if (LZ4_stream == NULL) {
return 0; }
2188 FREEMEM(LZ4_stream);
2202 lz4sd->
prefixEnd = (
const BYTE*) dictionary + dictSize;
2221 if (maxBlockSize < 0)
return 0;
2222 if (maxBlockSize > LZ4_MAX_INPUT_SIZE)
return 0;
2223 if (maxBlockSize < 16) maxBlockSize = 16;
2234 LZ4_FORCE_O2_GCC_PPC64LE
2244 if (result <= 0)
return result;
2246 lz4sd->
prefixEnd = (BYTE*)dest + result;
2250 result = LZ4_decompress_safe_withPrefix64k(source, dest, compressedSize, maxOutputSize);
2252 result = LZ4_decompress_safe_withSmallPrefix(source, dest, compressedSize, maxOutputSize,
2255 result = LZ4_decompress_safe_doubleDict(source, dest, compressedSize, maxOutputSize,
2257 if (result <= 0)
return result;
2264 result = LZ4_decompress_safe_forceExtDict(source, dest, compressedSize, maxOutputSize,
2266 if (result <= 0)
return result;
2268 lz4sd->
prefixEnd = (BYTE*)dest + result;
2274 LZ4_FORCE_O2_GCC_PPC64LE
2279 assert(originalSize >= 0);
2284 if (result <= 0)
return result;
2286 lz4sd->
prefixEnd = (BYTE*)dest + originalSize;
2291 result = LZ4_decompress_fast_doubleDict(source, dest, originalSize,
2293 if (result <= 0)
return result;
2299 result = LZ4_decompress_fast_extDict(source, dest, originalSize,
2301 if (result <= 0)
return result;
2303 lz4sd->
prefixEnd = (BYTE*)dest + originalSize;
2317 int LZ4_decompress_safe_usingDict(
const char* source,
char* dest,
int compressedSize,
int maxOutputSize,
const char* dictStart,
int dictSize)
2321 if (dictStart+dictSize == dest) {
2322 if (dictSize >= 64 KB - 1) {
2323 return LZ4_decompress_safe_withPrefix64k(source, dest, compressedSize, maxOutputSize);
2325 assert(dictSize >= 0);
2326 return LZ4_decompress_safe_withSmallPrefix(source, dest, compressedSize, maxOutputSize, (
size_t)dictSize);
2328 assert(dictSize >= 0);
2329 return LZ4_decompress_safe_forceExtDict(source, dest, compressedSize, maxOutputSize, dictStart, (
size_t)dictSize);
2334 if (dictSize==0 || dictStart+dictSize == dest)
2336 assert(dictSize >= 0);
2337 return LZ4_decompress_fast_extDict(source, dest, originalSize, dictStart, (
size_t)dictSize);
2345 int LZ4_compress_limitedOutput(
const char* source,
char* dest,
int inputSize,
int maxOutputSize)
2349 int LZ4_compress(
const char* src,
char* dest,
int srcSize)
2353 int LZ4_compress_limitedOutput_withState (
void* state,
const char* src,
char* dst,
int srcSize,
int dstSize)
2357 int LZ4_compress_withState (
void* state,
const char* src,
char* dst,
int srcSize)
2361 int LZ4_compress_limitedOutput_continue (
LZ4_stream_t* LZ4_stream,
const char* src,
char* dst,
int srcSize,
int dstCapacity)
2365 int LZ4_compress_continue (
LZ4_stream_t* LZ4_stream,
const char* source,
char* dest,
int inputSize)
2376 int LZ4_uncompress (
const char* source,
char* dest,
int outputSize)
2380 int LZ4_uncompress_unknownOutputSize (
const char* source,
char* dest,
int isize,
int maxOutputSize)
2389 int LZ4_resetStreamState(
void* state,
char*
inputBuffer)
2396 void* LZ4_create (
char* inputBuffer)
2402 char* LZ4_slideInputBuffer (
void* state)
2405 return (
char *)(uptrval)((
LZ4_stream_t*)state)->internal_donotuse.dictionary;
#define LZ4_HASH_SIZE_U32
LZ4LIB_API int LZ4_compress_fast(const char *src, char *dst, int srcSize, int dstCapacity, int acceleration)
LZ4LIB_API int LZ4_decompress_fast_usingDict(const char *src, char *dst, int originalSize, const char *dictStart, int dictSize)
const unsigned char * prefixEnd
LZ4LIB_API int LZ4_setStreamDecode(LZ4_streamDecode_t *LZ4_streamDecode, const char *dictionary, int dictSize)
#define LZ4_STREAMDECODESIZE
LZ4LIB_API int LZ4_decompress_safe_continue(LZ4_streamDecode_t *LZ4_streamDecode, const char *src, char *dst, int srcSize, int dstCapacity)
#define LZ4_COMPRESSBOUND(isize)
const unsigned char * dictionary
LZ4LIB_API int LZ4_decompress_fast(const char *src, char *dst, int originalSize)
LZ4LIB_API int LZ4_decompress_safe_usingDict(const char *src, char *dst, int srcSize, int dstCapcity, const char *dictStart, int dictSize)
LZ4LIB_API int LZ4_compress_fast_continue(LZ4_stream_t *streamPtr, const char *src, char *dst, int srcSize, int dstCapacity, int acceleration)
static bool match(const WIN32_FIND_DATA *find, LPCTSTR ext)
unsigned int hashTable[LZ4_HASH_SIZE_U32]
#define LZ4_VERSION_STRING
const char char int inputSize
const LZ4_stream_t_internal * dictCtx
LZ4LIB_API int LZ4_decompress_safe_partial(const char *src, char *dst, int srcSize, int targetOutputSize, int dstCapacity)
LZ4LIB_API const char * LZ4_versionString(void)
library version string; useful to check dll version
#define LZ4_HASHTABLESIZE
char int int maxOutputSize
LZ4LIB_API int LZ4_compress_destSize(const char *src, char *dst, int *srcSizePtr, int targetDstSize)
LZ4_stream_t_internal internal_donotuse
LZ4LIB_API void LZ4_resetStream(LZ4_stream_t *streamPtr)
LZ4LIB_API int LZ4_freeStreamDecode(LZ4_streamDecode_t *LZ4_stream)
LZ4LIB_API int LZ4_compressBound(int inputSize)
LZ4LIB_API LZ4_streamDecode_t * LZ4_createStreamDecode(void)
LZ4LIB_API int LZ4_versionNumber(void)
library version number; useful to check dll version
LZ4LIB_API int LZ4_compress_fast_extState(void *state, const char *src, char *dst, int srcSize, int dstCapacity, int acceleration)
LZ4LIB_API LZ4_stream_t * LZ4_createStream(void)
unsigned __int32 uint32_t
LZ4LIB_API int LZ4_compress_default(const char *src, char *dst, int srcSize, int dstCapacity)
unsigned int currentOffset
#define LZ4_DECODER_RING_BUFFER_SIZE(maxBlockSize)
unsigned __int64 uint64_t
LZ4LIB_API int LZ4_freeStream(LZ4_stream_t *streamPtr)
LZ4LIB_API int LZ4_sizeofState(void)
LZ4LIB_API LZ4_stream_t * LZ4_initStream(void *buffer, size_t size)
LZ4LIB_API int LZ4_loadDict(LZ4_stream_t *streamPtr, const char *dictionary, int dictSize)
LZ4LIB_API int LZ4_saveDict(LZ4_stream_t *streamPtr, char *safeBuffer, int maxDictSize)
LZ4LIB_API int LZ4_decompress_safe(const char *src, char *dst, int compressedSize, int dstCapacity)
LZ4_streamDecode_t_internal internal_donotuse
Wrapper structure for dynamically allocated memory.
LZ4LIB_API void LZ4_resetStream_fast(LZ4_stream_t *streamPtr)
LZ4LIB_API int LZ4_decompress_fast_continue(LZ4_streamDecode_t *LZ4_streamDecode, const char *src, char *dst, int originalSize)
unsigned __int16 uint16_t
const unsigned char * externalDict
#define LZ4_MAX_INPUT_SIZE
#define LZ4_VERSION_NUMBER
LZ4LIB_API int LZ4_decoderRingBufferSize(int maxBlockSize)