diff options
Diffstat (limited to 'lib/lz4.c')
-rw-r--r-- | lib/lz4.c | 132 |
1 files changed, 56 insertions, 76 deletions
@@ -41,7 +41,9 @@ * Select how default compression functions will allocate memory for their hash table, * in memory stack (0:default, fastest), or in memory heap (1:requires malloc()). */ -#define HEAPMODE 0 +#ifndef HEAPMODE +# define HEAPMODE 0 +#endif /* * ACCELERATION_DEFAULT : @@ -109,8 +111,7 @@ # endif #endif /* _MSC_VER */ -/* LZ4_GCC_VERSION is defined into lz4.h */ -#if (LZ4_GCC_VERSION >= 302) || (__INTEL_COMPILER >= 800) || defined(__clang__) +#if (defined(__GNUC__) && (__GNUC__ >= 3)) || (defined(__INTEL_COMPILER) && (__INTEL_COMPILER >= 800)) || defined(__clang__) # define expect(expr,value) (__builtin_expect ((expr),(value)) ) #else # define expect(expr,value) (expr) @@ -299,7 +300,7 @@ static unsigned LZ4_NbCommonBytes (register size_t val) unsigned long r = 0; _BitScanForward64( &r, (U64)val ); return (int)(r>>3); -# elif (defined(__clang__) || (LZ4_GCC_VERSION >= 304)) && !defined(LZ4_FORCE_SW_BITCOUNT) +# elif (defined(__clang__) || (defined(__GNUC__) && (__GNUC__>=3))) && !defined(LZ4_FORCE_SW_BITCOUNT) return (__builtin_ctzll((U64)val) >> 3); # else static const int DeBruijnBytePos[64] = { 0, 0, 0, 0, 0, 1, 1, 2, 0, 3, 1, 3, 1, 4, 2, 7, 0, 2, 3, 6, 1, 5, 3, 5, 1, 3, 4, 4, 2, 5, 6, 7, 7, 0, 1, 2, 3, 3, 4, 6, 2, 6, 5, 5, 3, 4, 5, 6, 7, 1, 2, 4, 6, 4, 4, 5, 7, 2, 6, 5, 7, 6, 7, 7 }; @@ -310,7 +311,7 @@ static unsigned LZ4_NbCommonBytes (register size_t val) unsigned long r; _BitScanForward( &r, (U32)val ); return (int)(r>>3); -# elif (defined(__clang__) || (LZ4_GCC_VERSION >= 304)) && !defined(LZ4_FORCE_SW_BITCOUNT) +# elif (defined(__clang__) || (defined(__GNUC__) && (__GNUC__>=3))) && !defined(LZ4_FORCE_SW_BITCOUNT) return (__builtin_ctz((U32)val) >> 3); # else static const int DeBruijnBytePos[32] = { 0, 0, 3, 0, 3, 1, 3, 0, 3, 2, 2, 1, 3, 2, 0, 1, 3, 3, 1, 2, 2, 2, 2, 0, 3, 1, 2, 0, 1, 0, 1, 1 }; @@ -323,7 +324,7 @@ static unsigned LZ4_NbCommonBytes (register size_t val) unsigned long r = 0; _BitScanReverse64( &r, val ); return (unsigned)(r>>3); -# elif (defined(__clang__) || (LZ4_GCC_VERSION >= 304)) && !defined(LZ4_FORCE_SW_BITCOUNT) +# elif (defined(__clang__) || (defined(__GNUC__) && (__GNUC__>=3))) && !defined(LZ4_FORCE_SW_BITCOUNT) return (__builtin_clzll((U64)val) >> 3); # else unsigned r; @@ -337,7 +338,7 @@ static unsigned LZ4_NbCommonBytes (register size_t val) unsigned long r = 0; _BitScanReverse( &r, (unsigned long)val ); return (unsigned)(r>>3); -# elif (defined(__clang__) || (LZ4_GCC_VERSION >= 304)) && !defined(LZ4_FORCE_SW_BITCOUNT) +# elif (defined(__clang__) || (defined(__GNUC__) && (__GNUC__>=3))) && !defined(LZ4_FORCE_SW_BITCOUNT) return (__builtin_clz((U32)val) >> 3); # else unsigned r; @@ -371,10 +372,6 @@ static unsigned LZ4_count(const BYTE* pIn, const BYTE* pMatch, const BYTE* pInLi /*-************************************ * Local Constants **************************************/ -#define LZ4_HASHLOG (LZ4_MEMORY_USAGE-2) -#define HASHTABLESIZE (1 << LZ4_MEMORY_USAGE) -#define HASH_SIZE_U32 (1 << LZ4_HASHLOG) /* required as macro for static allocation */ - static const int LZ4_64Klimit = ((64 KB) + (MFLIMIT-1)); static const U32 LZ4_skipTrigger = 6; /* Increase this value ==> compression run slower on incompressible data */ @@ -382,15 +379,6 @@ static const U32 LZ4_skipTrigger = 6; /* Increase this value ==> compression ru /*-************************************ * Local Structures and types **************************************/ -typedef struct { - U32 hashTable[HASH_SIZE_U32]; - U32 currentOffset; - U32 initCheck; - const BYTE* dictionary; - BYTE* bufferStart; /* obsolete, used for slideInputBuffer */ - U32 dictSize; -} LZ4_stream_t_internal; - typedef enum { notLimited = 0, limitedOutput = 1 } limitedOutput_directive; typedef enum { byPtr, byU32, byU16 } tableType_t; @@ -470,7 +458,7 @@ FORCE_INLINE const BYTE* LZ4_getPosition(const BYTE* p, void* tableBase, tableTy /** LZ4_compress_generic() : inlined, to ensure branches are decided at compilation time */ FORCE_INLINE int LZ4_compress_generic( - void* const ctx, + LZ4_stream_t_internal* const dictPtr, const char* const source, char* const dest, const int inputSize, @@ -481,8 +469,6 @@ FORCE_INLINE int LZ4_compress_generic( const dictIssue_directive dictIssue, const U32 acceleration) { - LZ4_stream_t_internal* const dictPtr = (LZ4_stream_t_internal*)ctx; - const BYTE* ip = (const BYTE*) source; const BYTE* base; const BYTE* lowLimit; @@ -523,7 +509,7 @@ FORCE_INLINE int LZ4_compress_generic( if (inputSize<LZ4_minLength) goto _last_literals; /* Input too small, no compression (all literals) */ /* First Byte */ - LZ4_putPosition(ip, ctx, tableType, base); + LZ4_putPosition(ip, dictPtr->hashTable, tableType, base); ip++; forwardH = LZ4_hashPosition(ip, tableType); /* Main Loop */ @@ -543,7 +529,7 @@ FORCE_INLINE int LZ4_compress_generic( if (unlikely(forwardIp > mflimit)) goto _last_literals; - match = LZ4_getPositionOnHash(h, ctx, tableType, base); + match = LZ4_getPositionOnHash(h, dictPtr->hashTable, tableType, base); if (dict==usingExtDict) { if (match < (const BYTE*)source) { refDelta = dictDelta; @@ -553,7 +539,7 @@ FORCE_INLINE int LZ4_compress_generic( lowLimit = (const BYTE*)source; } } forwardH = LZ4_hashPosition(forwardIp, tableType); - LZ4_putPositionOnHash(ip, h, ctx, tableType, base); + LZ4_putPositionOnHash(ip, h, dictPtr->hashTable, tableType, base); } while ( ((dictIssue==dictSmall) ? (match < lowRefLimit) : 0) || ((tableType==byU16) ? 0 : (match + MAX_DISTANCE < ip)) @@ -626,10 +612,10 @@ _next_match: if (ip > mflimit) break; /* Fill table */ - LZ4_putPosition(ip-2, ctx, tableType, base); + LZ4_putPosition(ip-2, dictPtr->hashTable, tableType, base); /* Test next position */ - match = LZ4_getPosition(ip, ctx, tableType, base); + match = LZ4_getPosition(ip, dictPtr->hashTable, tableType, base); if (dict==usingExtDict) { if (match < (const BYTE*)source) { refDelta = dictDelta; @@ -638,7 +624,7 @@ _next_match: refDelta = 0; lowLimit = (const BYTE*)source; } } - LZ4_putPosition(ip, ctx, tableType, base); + LZ4_putPosition(ip, dictPtr->hashTable, tableType, base); if ( ((dictIssue==dictSmall) ? (match>=lowRefLimit) : 1) && (match+MAX_DISTANCE>=ip) && (LZ4_read32(match+refDelta)==LZ4_read32(ip)) ) @@ -673,19 +659,20 @@ _last_literals: int LZ4_compress_fast_extState(void* state, const char* source, char* dest, int inputSize, int maxOutputSize, int acceleration) { + LZ4_stream_t_internal* ctx = &((LZ4_stream_t*)state)->internal_donotuse; LZ4_resetStream((LZ4_stream_t*)state); if (acceleration < 1) acceleration = ACCELERATION_DEFAULT; if (maxOutputSize >= LZ4_compressBound(inputSize)) { if (inputSize < LZ4_64Klimit) - return LZ4_compress_generic(state, source, dest, inputSize, 0, notLimited, byU16, noDict, noDictIssue, acceleration); + return LZ4_compress_generic(ctx, source, dest, inputSize, 0, notLimited, byU16, noDict, noDictIssue, acceleration); else - return LZ4_compress_generic(state, source, dest, inputSize, 0, notLimited, LZ4_64bits() ? byU32 : byPtr, noDict, noDictIssue, acceleration); + return LZ4_compress_generic(ctx, source, dest, inputSize, 0, notLimited, LZ4_64bits() ? byU32 : byPtr, noDict, noDictIssue, acceleration); } else { if (inputSize < LZ4_64Klimit) - return LZ4_compress_generic(state, source, dest, inputSize, maxOutputSize, limitedOutput, byU16, noDict, noDictIssue, acceleration); + return LZ4_compress_generic(ctx, source, dest, inputSize, maxOutputSize, limitedOutput, byU16, noDict, noDictIssue, acceleration); else - return LZ4_compress_generic(state, source, dest, inputSize, maxOutputSize, limitedOutput, LZ4_64bits() ? byU32 : byPtr, noDict, noDictIssue, acceleration); + return LZ4_compress_generic(ctx, source, dest, inputSize, maxOutputSize, limitedOutput, LZ4_64bits() ? byU32 : byPtr, noDict, noDictIssue, acceleration); } } @@ -722,9 +709,9 @@ int LZ4_compress_fast_force(const char* source, char* dest, int inputSize, int m LZ4_resetStream(&ctx); if (inputSize < LZ4_64Klimit) - return LZ4_compress_generic(&ctx, source, dest, inputSize, maxOutputSize, limitedOutput, byU16, noDict, noDictIssue, acceleration); + return LZ4_compress_generic(&ctx.internal_donotuse, source, dest, inputSize, maxOutputSize, limitedOutput, byU16, noDict, noDictIssue, acceleration); else - return LZ4_compress_generic(&ctx, source, dest, inputSize, maxOutputSize, limitedOutput, LZ4_64bits() ? byU32 : byPtr, noDict, noDictIssue, acceleration); + return LZ4_compress_generic(&ctx.internal_donotuse, source, dest, inputSize, maxOutputSize, limitedOutput, LZ4_64bits() ? byU32 : byPtr, noDict, noDictIssue, acceleration); } @@ -733,7 +720,7 @@ int LZ4_compress_fast_force(const char* source, char* dest, int inputSize, int m ********************************/ static int LZ4_compress_destSize_generic( - void* const ctx, + LZ4_stream_t_internal* const ctx, const char* const src, char* const dst, int* const srcSizePtr, @@ -765,7 +752,7 @@ static int LZ4_compress_destSize_generic( /* First Byte */ *srcSizePtr = 0; - LZ4_putPosition(ip, ctx, tableType, base); + LZ4_putPosition(ip, ctx->hashTable, tableType, base); ip++; forwardH = LZ4_hashPosition(ip, tableType); /* Main Loop */ @@ -786,9 +773,9 @@ static int LZ4_compress_destSize_generic( if (unlikely(forwardIp > mflimit)) goto _last_literals; - match = LZ4_getPositionOnHash(h, ctx, tableType, base); + match = LZ4_getPositionOnHash(h, ctx->hashTable, tableType, base); forwardH = LZ4_hashPosition(forwardIp, tableType); - LZ4_putPositionOnHash(ip, h, ctx, tableType, base); + LZ4_putPositionOnHash(ip, h, ctx->hashTable, tableType, base); } while ( ((tableType==byU16) ? 0 : (match + MAX_DISTANCE < ip)) || (LZ4_read32(match) != LZ4_read32(ip)) ); @@ -847,11 +834,11 @@ _next_match: if (op > oMaxSeq) break; /* Fill table */ - LZ4_putPosition(ip-2, ctx, tableType, base); + LZ4_putPosition(ip-2, ctx->hashTable, tableType, base); /* Test next position */ - match = LZ4_getPosition(ip, ctx, tableType, base); - LZ4_putPosition(ip, ctx, tableType, base); + match = LZ4_getPosition(ip, ctx->hashTable, tableType, base); + LZ4_putPosition(ip, ctx->hashTable, tableType, base); if ( (match+MAX_DISTANCE>=ip) && (LZ4_read32(match)==LZ4_read32(ip)) ) { token=op++; *token=0; goto _next_match; } @@ -888,17 +875,17 @@ _last_literals: } -static int LZ4_compress_destSize_extState (void* state, const char* src, char* dst, int* srcSizePtr, int targetDstSize) +static int LZ4_compress_destSize_extState (LZ4_stream_t* state, const char* src, char* dst, int* srcSizePtr, int targetDstSize) { - LZ4_resetStream((LZ4_stream_t*)state); + LZ4_resetStream(state); if (targetDstSize >= LZ4_compressBound(*srcSizePtr)) { /* compression success is guaranteed */ return LZ4_compress_fast_extState(state, src, dst, *srcSizePtr, targetDstSize, 1); } else { if (*srcSizePtr < LZ4_64Klimit) - return LZ4_compress_destSize_generic(state, src, dst, srcSizePtr, targetDstSize, byU16); + return LZ4_compress_destSize_generic(&state->internal_donotuse, src, dst, srcSizePtr, targetDstSize, byU16); else - return LZ4_compress_destSize_generic(state, src, dst, srcSizePtr, targetDstSize, LZ4_64bits() ? byU32 : byPtr); + return LZ4_compress_destSize_generic(&state->internal_donotuse, src, dst, srcSizePtr, targetDstSize, LZ4_64bits() ? byU32 : byPtr); } } @@ -906,10 +893,10 @@ static int LZ4_compress_destSize_extState (void* state, const char* src, char* d int LZ4_compress_destSize(const char* src, char* dst, int* srcSizePtr, int targetDstSize) { #if (HEAPMODE) - void* ctx = ALLOCATOR(1, sizeof(LZ4_stream_t)); /* malloc-calloc always properly aligned */ + LZ4_stream_t* ctx = (LZ4_stream_t*)ALLOCATOR(1, sizeof(LZ4_stream_t)); /* malloc-calloc always properly aligned */ #else LZ4_stream_t ctxBody; - void* ctx = &ctxBody; + LZ4_stream_t* ctx = &ctxBody; #endif int result = LZ4_compress_destSize_extState(ctx, src, dst, srcSizePtr, targetDstSize); @@ -949,7 +936,7 @@ int LZ4_freeStream (LZ4_stream_t* LZ4_stream) #define HASH_UNIT sizeof(size_t) int LZ4_loadDict (LZ4_stream_t* LZ4_dict, const char* dictionary, int dictSize) { - LZ4_stream_t_internal* dict = (LZ4_stream_t_internal*) LZ4_dict; + LZ4_stream_t_internal* dict = &LZ4_dict->internal_donotuse; const BYTE* p = (const BYTE*)dictionary; const BYTE* const dictEnd = p + dictSize; const BYTE* base; @@ -987,7 +974,7 @@ static void LZ4_renormDictT(LZ4_stream_t_internal* LZ4_dict, const BYTE* src) U32 const delta = LZ4_dict->currentOffset - 64 KB; const BYTE* dictEnd = LZ4_dict->dictionary + LZ4_dict->dictSize; int i; - for (i=0; i<HASH_SIZE_U32; i++) { + for (i=0; i<LZ4_HASH_SIZE_U32; i++) { if (LZ4_dict->hashTable[i] < delta) LZ4_dict->hashTable[i]=0; else LZ4_dict->hashTable[i] -= delta; } @@ -1000,7 +987,7 @@ static void LZ4_renormDictT(LZ4_stream_t_internal* LZ4_dict, const BYTE* src) int LZ4_compress_fast_continue (LZ4_stream_t* LZ4_stream, const char* source, char* dest, int inputSize, int maxOutputSize, int acceleration) { - LZ4_stream_t_internal* streamPtr = (LZ4_stream_t_internal*)LZ4_stream; + LZ4_stream_t_internal* streamPtr = &LZ4_stream->internal_donotuse; const BYTE* const dictEnd = streamPtr->dictionary + streamPtr->dictSize; const BYTE* smallest = (const BYTE*) source; @@ -1023,9 +1010,9 @@ int LZ4_compress_fast_continue (LZ4_stream_t* LZ4_stream, const char* source, ch if (dictEnd == (const BYTE*)source) { int result; if ((streamPtr->dictSize < 64 KB) && (streamPtr->dictSize < streamPtr->currentOffset)) - result = LZ4_compress_generic(LZ4_stream, source, dest, inputSize, maxOutputSize, limitedOutput, byU32, withPrefix64k, dictSmall, acceleration); + result = LZ4_compress_generic(streamPtr, source, dest, inputSize, maxOutputSize, limitedOutput, byU32, withPrefix64k, dictSmall, acceleration); else - result = LZ4_compress_generic(LZ4_stream, source, dest, inputSize, maxOutputSize, limitedOutput, byU32, withPrefix64k, noDictIssue, acceleration); + result = LZ4_compress_generic(streamPtr, source, dest, inputSize, maxOutputSize, limitedOutput, byU32, withPrefix64k, noDictIssue, acceleration); streamPtr->dictSize += (U32)inputSize; streamPtr->currentOffset += (U32)inputSize; return result; @@ -1034,9 +1021,9 @@ int LZ4_compress_fast_continue (LZ4_stream_t* LZ4_stream, const char* source, ch /* external dictionary mode */ { int result; if ((streamPtr->dictSize < 64 KB) && (streamPtr->dictSize < streamPtr->currentOffset)) - result = LZ4_compress_generic(LZ4_stream, source, dest, inputSize, maxOutputSize, limitedOutput, byU32, usingExtDict, dictSmall, acceleration); + result = LZ4_compress_generic(streamPtr, source, dest, inputSize, maxOutputSize, limitedOutput, byU32, usingExtDict, dictSmall, acceleration); else - result = LZ4_compress_generic(LZ4_stream, source, dest, inputSize, maxOutputSize, limitedOutput, byU32, usingExtDict, noDictIssue, acceleration); + result = LZ4_compress_generic(streamPtr, source, dest, inputSize, maxOutputSize, limitedOutput, byU32, usingExtDict, noDictIssue, acceleration); streamPtr->dictionary = (const BYTE*)source; streamPtr->dictSize = (U32)inputSize; streamPtr->currentOffset += (U32)inputSize; @@ -1048,15 +1035,15 @@ int LZ4_compress_fast_continue (LZ4_stream_t* LZ4_stream, const char* source, ch /* Hidden debug function, to force external dictionary mode */ int LZ4_compress_forceExtDict (LZ4_stream_t* LZ4_dict, const char* source, char* dest, int inputSize) { - LZ4_stream_t_internal* streamPtr = (LZ4_stream_t_internal*)LZ4_dict; + LZ4_stream_t_internal* streamPtr = &LZ4_dict->internal_donotuse; int result; const BYTE* const dictEnd = streamPtr->dictionary + streamPtr->dictSize; const BYTE* smallest = dictEnd; if (smallest > (const BYTE*) source) smallest = (const BYTE*) source; - LZ4_renormDictT((LZ4_stream_t_internal*)LZ4_dict, smallest); + LZ4_renormDictT(streamPtr, smallest); - result = LZ4_compress_generic(LZ4_dict, source, dest, inputSize, 0, notLimited, byU32, usingExtDict, noDictIssue, 1); + result = LZ4_compress_generic(streamPtr, source, dest, inputSize, 0, notLimited, byU32, usingExtDict, noDictIssue, 1); streamPtr->dictionary = (const BYTE*)source; streamPtr->dictSize = (U32)inputSize; @@ -1075,7 +1062,7 @@ int LZ4_compress_forceExtDict (LZ4_stream_t* LZ4_dict, const char* source, char* */ int LZ4_saveDict (LZ4_stream_t* LZ4_dict, char* safeBuffer, int dictSize) { - LZ4_stream_t_internal* const dict = (LZ4_stream_t_internal*) LZ4_dict; + LZ4_stream_t_internal* const dict = &LZ4_dict->internal_donotuse; const BYTE* const previousDictEnd = dict->dictionary + dict->dictSize; if ((U32)dictSize > 64 KB) dictSize = 64 KB; /* useless to define a dictionary > 64 KB */ @@ -1280,13 +1267,6 @@ int LZ4_decompress_fast(const char* source, char* dest, int originalSize) /*===== streaming decompression functions =====*/ -typedef struct { - const BYTE* externalDict; - size_t extDictSize; - const BYTE* prefixEnd; - size_t prefixSize; -} LZ4_streamDecode_t_internal; - /* * If you prefer dynamic allocation methods, * LZ4_createStreamDecode() @@ -1313,7 +1293,7 @@ int LZ4_freeStreamDecode (LZ4_streamDecode_t* LZ4_stream) */ int LZ4_setStreamDecode (LZ4_streamDecode_t* LZ4_streamDecode, const char* dictionary, int dictSize) { - LZ4_streamDecode_t_internal* lz4sd = (LZ4_streamDecode_t_internal*) LZ4_streamDecode; + LZ4_streamDecode_t_internal* lz4sd = &LZ4_streamDecode->internal_donotuse; lz4sd->prefixSize = (size_t) dictSize; lz4sd->prefixEnd = (const BYTE*) dictionary + dictSize; lz4sd->externalDict = NULL; @@ -1330,7 +1310,7 @@ int LZ4_setStreamDecode (LZ4_streamDecode_t* LZ4_streamDecode, const char* dicti */ int LZ4_decompress_safe_continue (LZ4_streamDecode_t* LZ4_streamDecode, const char* source, char* dest, int compressedSize, int maxOutputSize) { - LZ4_streamDecode_t_internal* lz4sd = (LZ4_streamDecode_t_internal*) LZ4_streamDecode; + LZ4_streamDecode_t_internal* lz4sd = &LZ4_streamDecode->internal_donotuse; int result; if (lz4sd->prefixEnd == (BYTE*)dest) { @@ -1356,7 +1336,7 @@ int LZ4_decompress_safe_continue (LZ4_streamDecode_t* LZ4_streamDecode, const ch int LZ4_decompress_fast_continue (LZ4_streamDecode_t* LZ4_streamDecode, const char* source, char* dest, int originalSize) { - LZ4_streamDecode_t_internal* lz4sd = (LZ4_streamDecode_t_internal*) LZ4_streamDecode; + LZ4_streamDecode_t_internal* lz4sd = &LZ4_streamDecode->internal_donotuse; int result; if (lz4sd->prefixEnd == (BYTE*)dest) { @@ -1442,29 +1422,29 @@ int LZ4_uncompress_unknownOutputSize (const char* source, char* dest, int isize, int LZ4_sizeofStreamState() { return LZ4_STREAMSIZE; } -static void LZ4_init(LZ4_stream_t_internal* lz4ds, BYTE* base) +static void LZ4_init(LZ4_stream_t* lz4ds, BYTE* base) { - MEM_INIT(lz4ds, 0, LZ4_STREAMSIZE); - lz4ds->bufferStart = base; + MEM_INIT(lz4ds, 0, sizeof(LZ4_stream_t)); + lz4ds->internal_donotuse.bufferStart = base; } int LZ4_resetStreamState(void* state, char* inputBuffer) { if ((((size_t)state) & 3) != 0) return 1; /* Error : pointer is not aligned on 4-bytes boundary */ - LZ4_init((LZ4_stream_t_internal*)state, (BYTE*)inputBuffer); + LZ4_init((LZ4_stream_t*)state, (BYTE*)inputBuffer); return 0; } void* LZ4_create (char* inputBuffer) { - void* lz4ds = ALLOCATOR(8, LZ4_STREAMSIZE_U64); - LZ4_init ((LZ4_stream_t_internal*)lz4ds, (BYTE*)inputBuffer); + LZ4_stream_t* lz4ds = (LZ4_stream_t*)ALLOCATOR(8, sizeof(LZ4_stream_t)); + LZ4_init (lz4ds, (BYTE*)inputBuffer); return lz4ds; } char* LZ4_slideInputBuffer (void* LZ4_Data) { - LZ4_stream_t_internal* ctx = (LZ4_stream_t_internal*)LZ4_Data; + LZ4_stream_t_internal* ctx = &((LZ4_stream_t*)LZ4_Data)->internal_donotuse; int dictSize = LZ4_saveDict((LZ4_stream_t*)LZ4_Data, (char*)ctx->bufferStart, 64 KB); return (char*)(ctx->bufferStart + dictSize); } |