summaryrefslogtreecommitdiffstats
path: root/lib/lz4.c
diff options
context:
space:
mode:
authorNick Terrell <terrelln@fb.com>2016-11-11 21:00:02 (GMT)
committerNick Terrell <terrelln@fb.com>2016-11-11 21:00:02 (GMT)
commit85aeb0e4bb9c0b8dd6f6caa00ac2d9c7a4452660 (patch)
tree70d0ae59b9ca98746ff4ff53cbbb8f8e98f4635d /lib/lz4.c
parentdbfdd5131cfbcfb4e68312e36658912b144563f2 (diff)
downloadlz4-85aeb0e4bb9c0b8dd6f6caa00ac2d9c7a4452660.zip
lz4-85aeb0e4bb9c0b8dd6f6caa00ac2d9c7a4452660.tar.gz
lz4-85aeb0e4bb9c0b8dd6f6caa00ac2d9c7a4452660.tar.bz2
Expose internal types to remove strict aliasing
Diffstat (limited to 'lib/lz4.c')
-rw-r--r--lib/lz4.c117
1 files changed, 48 insertions, 69 deletions
diff --git a/lib/lz4.c b/lib/lz4.c
index f86e6f8..04183e6 100644
--- a/lib/lz4.c
+++ b/lib/lz4.c
@@ -371,10 +371,6 @@ static unsigned LZ4_count(const BYTE* pIn, const BYTE* pMatch, const BYTE* pInLi
/*-************************************
* Local Constants
**************************************/
-#define LZ4_HASHLOG (LZ4_MEMORY_USAGE-2)
-#define HASHTABLESIZE (1 << LZ4_MEMORY_USAGE)
-#define HASH_SIZE_U32 (1 << LZ4_HASHLOG) /* required as macro for static allocation */
-
static const int LZ4_64Klimit = ((64 KB) + (MFLIMIT-1));
static const U32 LZ4_skipTrigger = 6; /* Increase this value ==> compression run slower on incompressible data */
@@ -382,15 +378,6 @@ static const U32 LZ4_skipTrigger = 6; /* Increase this value ==> compression ru
/*-************************************
* Local Structures and types
**************************************/
-typedef struct {
- U32 hashTable[HASH_SIZE_U32];
- U32 currentOffset;
- U32 initCheck;
- const BYTE* dictionary;
- BYTE* bufferStart; /* obsolete, used for slideInputBuffer */
- U32 dictSize;
-} LZ4_stream_t_internal;
-
typedef enum { notLimited = 0, limitedOutput = 1 } limitedOutput_directive;
typedef enum { byPtr, byU32, byU16 } tableType_t;
@@ -470,7 +457,7 @@ FORCE_INLINE const BYTE* LZ4_getPosition(const BYTE* p, void* tableBase, tableTy
/** LZ4_compress_generic() :
inlined, to ensure branches are decided at compilation time */
FORCE_INLINE int LZ4_compress_generic(
- void* const ctx,
+ LZ4_stream_t_internal* const dictPtr,
const char* const source,
char* const dest,
const int inputSize,
@@ -481,8 +468,6 @@ FORCE_INLINE int LZ4_compress_generic(
const dictIssue_directive dictIssue,
const U32 acceleration)
{
- LZ4_stream_t_internal* const dictPtr = (LZ4_stream_t_internal*)ctx;
-
const BYTE* ip = (const BYTE*) source;
const BYTE* base;
const BYTE* lowLimit;
@@ -523,7 +508,7 @@ FORCE_INLINE int LZ4_compress_generic(
if (inputSize<LZ4_minLength) goto _last_literals; /* Input too small, no compression (all literals) */
/* First Byte */
- LZ4_putPosition(ip, ctx, tableType, base);
+ LZ4_putPosition(ip, dictPtr->hashTable, tableType, base);
ip++; forwardH = LZ4_hashPosition(ip, tableType);
/* Main Loop */
@@ -543,7 +528,7 @@ FORCE_INLINE int LZ4_compress_generic(
if (unlikely(forwardIp > mflimit)) goto _last_literals;
- match = LZ4_getPositionOnHash(h, ctx, tableType, base);
+ match = LZ4_getPositionOnHash(h, dictPtr->hashTable, tableType, base);
if (dict==usingExtDict) {
if (match < (const BYTE*)source) {
refDelta = dictDelta;
@@ -553,7 +538,7 @@ FORCE_INLINE int LZ4_compress_generic(
lowLimit = (const BYTE*)source;
} }
forwardH = LZ4_hashPosition(forwardIp, tableType);
- LZ4_putPositionOnHash(ip, h, ctx, tableType, base);
+ LZ4_putPositionOnHash(ip, h, dictPtr->hashTable, tableType, base);
} while ( ((dictIssue==dictSmall) ? (match < lowRefLimit) : 0)
|| ((tableType==byU16) ? 0 : (match + MAX_DISTANCE < ip))
@@ -626,10 +611,10 @@ _next_match:
if (ip > mflimit) break;
/* Fill table */
- LZ4_putPosition(ip-2, ctx, tableType, base);
+ LZ4_putPosition(ip-2, dictPtr->hashTable, tableType, base);
/* Test next position */
- match = LZ4_getPosition(ip, ctx, tableType, base);
+ match = LZ4_getPosition(ip, dictPtr->hashTable, tableType, base);
if (dict==usingExtDict) {
if (match < (const BYTE*)source) {
refDelta = dictDelta;
@@ -638,7 +623,7 @@ _next_match:
refDelta = 0;
lowLimit = (const BYTE*)source;
} }
- LZ4_putPosition(ip, ctx, tableType, base);
+ LZ4_putPosition(ip, dictPtr->hashTable, tableType, base);
if ( ((dictIssue==dictSmall) ? (match>=lowRefLimit) : 1)
&& (match+MAX_DISTANCE>=ip)
&& (LZ4_read32(match+refDelta)==LZ4_read32(ip)) )
@@ -673,19 +658,20 @@ _last_literals:
int LZ4_compress_fast_extState(void* state, const char* source, char* dest, int inputSize, int maxOutputSize, int acceleration)
{
+ LZ4_stream_t_internal* ctx = &((LZ4_stream_t*)state)->internal_donotuse;
LZ4_resetStream((LZ4_stream_t*)state);
if (acceleration < 1) acceleration = ACCELERATION_DEFAULT;
if (maxOutputSize >= LZ4_compressBound(inputSize)) {
if (inputSize < LZ4_64Klimit)
- return LZ4_compress_generic(state, source, dest, inputSize, 0, notLimited, byU16, noDict, noDictIssue, acceleration);
+ return LZ4_compress_generic(ctx, source, dest, inputSize, 0, notLimited, byU16, noDict, noDictIssue, acceleration);
else
- return LZ4_compress_generic(state, source, dest, inputSize, 0, notLimited, LZ4_64bits() ? byU32 : byPtr, noDict, noDictIssue, acceleration);
+ return LZ4_compress_generic(ctx, source, dest, inputSize, 0, notLimited, LZ4_64bits() ? byU32 : byPtr, noDict, noDictIssue, acceleration);
} else {
if (inputSize < LZ4_64Klimit)
- return LZ4_compress_generic(state, source, dest, inputSize, maxOutputSize, limitedOutput, byU16, noDict, noDictIssue, acceleration);
+ return LZ4_compress_generic(ctx, source, dest, inputSize, maxOutputSize, limitedOutput, byU16, noDict, noDictIssue, acceleration);
else
- return LZ4_compress_generic(state, source, dest, inputSize, maxOutputSize, limitedOutput, LZ4_64bits() ? byU32 : byPtr, noDict, noDictIssue, acceleration);
+ return LZ4_compress_generic(ctx, source, dest, inputSize, maxOutputSize, limitedOutput, LZ4_64bits() ? byU32 : byPtr, noDict, noDictIssue, acceleration);
}
}
@@ -722,9 +708,9 @@ int LZ4_compress_fast_force(const char* source, char* dest, int inputSize, int m
LZ4_resetStream(&ctx);
if (inputSize < LZ4_64Klimit)
- return LZ4_compress_generic(&ctx, source, dest, inputSize, maxOutputSize, limitedOutput, byU16, noDict, noDictIssue, acceleration);
+ return LZ4_compress_generic(&ctx.internal_donotuse, source, dest, inputSize, maxOutputSize, limitedOutput, byU16, noDict, noDictIssue, acceleration);
else
- return LZ4_compress_generic(&ctx, source, dest, inputSize, maxOutputSize, limitedOutput, LZ4_64bits() ? byU32 : byPtr, noDict, noDictIssue, acceleration);
+ return LZ4_compress_generic(&ctx.internal_donotuse, source, dest, inputSize, maxOutputSize, limitedOutput, LZ4_64bits() ? byU32 : byPtr, noDict, noDictIssue, acceleration);
}
@@ -733,7 +719,7 @@ int LZ4_compress_fast_force(const char* source, char* dest, int inputSize, int m
********************************/
static int LZ4_compress_destSize_generic(
- void* const ctx,
+ LZ4_stream_t_internal* const ctx,
const char* const src,
char* const dst,
int* const srcSizePtr,
@@ -765,7 +751,7 @@ static int LZ4_compress_destSize_generic(
/* First Byte */
*srcSizePtr = 0;
- LZ4_putPosition(ip, ctx, tableType, base);
+ LZ4_putPosition(ip, ctx->hashTable, tableType, base);
ip++; forwardH = LZ4_hashPosition(ip, tableType);
/* Main Loop */
@@ -786,9 +772,9 @@ static int LZ4_compress_destSize_generic(
if (unlikely(forwardIp > mflimit)) goto _last_literals;
- match = LZ4_getPositionOnHash(h, ctx, tableType, base);
+ match = LZ4_getPositionOnHash(h, ctx->hashTable, tableType, base);
forwardH = LZ4_hashPosition(forwardIp, tableType);
- LZ4_putPositionOnHash(ip, h, ctx, tableType, base);
+ LZ4_putPositionOnHash(ip, h, ctx->hashTable, tableType, base);
} while ( ((tableType==byU16) ? 0 : (match + MAX_DISTANCE < ip))
|| (LZ4_read32(match) != LZ4_read32(ip)) );
@@ -847,11 +833,11 @@ _next_match:
if (op > oMaxSeq) break;
/* Fill table */
- LZ4_putPosition(ip-2, ctx, tableType, base);
+ LZ4_putPosition(ip-2, ctx->hashTable, tableType, base);
/* Test next position */
- match = LZ4_getPosition(ip, ctx, tableType, base);
- LZ4_putPosition(ip, ctx, tableType, base);
+ match = LZ4_getPosition(ip, ctx->hashTable, tableType, base);
+ LZ4_putPosition(ip, ctx->hashTable, tableType, base);
if ( (match+MAX_DISTANCE>=ip)
&& (LZ4_read32(match)==LZ4_read32(ip)) )
{ token=op++; *token=0; goto _next_match; }
@@ -888,17 +874,17 @@ _last_literals:
}
-static int LZ4_compress_destSize_extState (void* state, const char* src, char* dst, int* srcSizePtr, int targetDstSize)
+static int LZ4_compress_destSize_extState (LZ4_stream_t* state, const char* src, char* dst, int* srcSizePtr, int targetDstSize)
{
- LZ4_resetStream((LZ4_stream_t*)state);
+ LZ4_resetStream(state);
if (targetDstSize >= LZ4_compressBound(*srcSizePtr)) { /* compression success is guaranteed */
return LZ4_compress_fast_extState(state, src, dst, *srcSizePtr, targetDstSize, 1);
} else {
if (*srcSizePtr < LZ4_64Klimit)
- return LZ4_compress_destSize_generic(state, src, dst, srcSizePtr, targetDstSize, byU16);
+ return LZ4_compress_destSize_generic(&state->internal_donotuse, src, dst, srcSizePtr, targetDstSize, byU16);
else
- return LZ4_compress_destSize_generic(state, src, dst, srcSizePtr, targetDstSize, LZ4_64bits() ? byU32 : byPtr);
+ return LZ4_compress_destSize_generic(&state->internal_donotuse, src, dst, srcSizePtr, targetDstSize, LZ4_64bits() ? byU32 : byPtr);
}
}
@@ -906,10 +892,10 @@ static int LZ4_compress_destSize_extState (void* state, const char* src, char* d
int LZ4_compress_destSize(const char* src, char* dst, int* srcSizePtr, int targetDstSize)
{
#if (HEAPMODE)
- void* ctx = ALLOCATOR(1, sizeof(LZ4_stream_t)); /* malloc-calloc always properly aligned */
+ LZ4_stream_t* ctx = (LZ4_stream_t*)ALLOCATOR(1, sizeof(LZ4_stream_t)); /* malloc-calloc always properly aligned */
#else
LZ4_stream_t ctxBody;
- void* ctx = &ctxBody;
+ LZ4_stream_t* ctx = &ctxBody;
#endif
int result = LZ4_compress_destSize_extState(ctx, src, dst, srcSizePtr, targetDstSize);
@@ -949,7 +935,7 @@ int LZ4_freeStream (LZ4_stream_t* LZ4_stream)
#define HASH_UNIT sizeof(size_t)
int LZ4_loadDict (LZ4_stream_t* LZ4_dict, const char* dictionary, int dictSize)
{
- LZ4_stream_t_internal* dict = (LZ4_stream_t_internal*) LZ4_dict;
+ LZ4_stream_t_internal* dict = &LZ4_dict->internal_donotuse;
const BYTE* p = (const BYTE*)dictionary;
const BYTE* const dictEnd = p + dictSize;
const BYTE* base;
@@ -987,7 +973,7 @@ static void LZ4_renormDictT(LZ4_stream_t_internal* LZ4_dict, const BYTE* src)
U32 const delta = LZ4_dict->currentOffset - 64 KB;
const BYTE* dictEnd = LZ4_dict->dictionary + LZ4_dict->dictSize;
int i;
- for (i=0; i<HASH_SIZE_U32; i++) {
+ for (i=0; i<LZ4_HASH_SIZE_U32; i++) {
if (LZ4_dict->hashTable[i] < delta) LZ4_dict->hashTable[i]=0;
else LZ4_dict->hashTable[i] -= delta;
}
@@ -1000,7 +986,7 @@ static void LZ4_renormDictT(LZ4_stream_t_internal* LZ4_dict, const BYTE* src)
int LZ4_compress_fast_continue (LZ4_stream_t* LZ4_stream, const char* source, char* dest, int inputSize, int maxOutputSize, int acceleration)
{
- LZ4_stream_t_internal* streamPtr = (LZ4_stream_t_internal*)LZ4_stream;
+ LZ4_stream_t_internal* streamPtr = &LZ4_stream->internal_donotuse;
const BYTE* const dictEnd = streamPtr->dictionary + streamPtr->dictSize;
const BYTE* smallest = (const BYTE*) source;
@@ -1023,9 +1009,9 @@ int LZ4_compress_fast_continue (LZ4_stream_t* LZ4_stream, const char* source, ch
if (dictEnd == (const BYTE*)source) {
int result;
if ((streamPtr->dictSize < 64 KB) && (streamPtr->dictSize < streamPtr->currentOffset))
- result = LZ4_compress_generic(LZ4_stream, source, dest, inputSize, maxOutputSize, limitedOutput, byU32, withPrefix64k, dictSmall, acceleration);
+ result = LZ4_compress_generic(streamPtr, source, dest, inputSize, maxOutputSize, limitedOutput, byU32, withPrefix64k, dictSmall, acceleration);
else
- result = LZ4_compress_generic(LZ4_stream, source, dest, inputSize, maxOutputSize, limitedOutput, byU32, withPrefix64k, noDictIssue, acceleration);
+ result = LZ4_compress_generic(streamPtr, source, dest, inputSize, maxOutputSize, limitedOutput, byU32, withPrefix64k, noDictIssue, acceleration);
streamPtr->dictSize += (U32)inputSize;
streamPtr->currentOffset += (U32)inputSize;
return result;
@@ -1034,9 +1020,9 @@ int LZ4_compress_fast_continue (LZ4_stream_t* LZ4_stream, const char* source, ch
/* external dictionary mode */
{ int result;
if ((streamPtr->dictSize < 64 KB) && (streamPtr->dictSize < streamPtr->currentOffset))
- result = LZ4_compress_generic(LZ4_stream, source, dest, inputSize, maxOutputSize, limitedOutput, byU32, usingExtDict, dictSmall, acceleration);
+ result = LZ4_compress_generic(streamPtr, source, dest, inputSize, maxOutputSize, limitedOutput, byU32, usingExtDict, dictSmall, acceleration);
else
- result = LZ4_compress_generic(LZ4_stream, source, dest, inputSize, maxOutputSize, limitedOutput, byU32, usingExtDict, noDictIssue, acceleration);
+ result = LZ4_compress_generic(streamPtr, source, dest, inputSize, maxOutputSize, limitedOutput, byU32, usingExtDict, noDictIssue, acceleration);
streamPtr->dictionary = (const BYTE*)source;
streamPtr->dictSize = (U32)inputSize;
streamPtr->currentOffset += (U32)inputSize;
@@ -1048,15 +1034,15 @@ int LZ4_compress_fast_continue (LZ4_stream_t* LZ4_stream, const char* source, ch
/* Hidden debug function, to force external dictionary mode */
int LZ4_compress_forceExtDict (LZ4_stream_t* LZ4_dict, const char* source, char* dest, int inputSize)
{
- LZ4_stream_t_internal* streamPtr = (LZ4_stream_t_internal*)LZ4_dict;
+ LZ4_stream_t_internal* streamPtr = &LZ4_dict->internal_donotuse;
int result;
const BYTE* const dictEnd = streamPtr->dictionary + streamPtr->dictSize;
const BYTE* smallest = dictEnd;
if (smallest > (const BYTE*) source) smallest = (const BYTE*) source;
- LZ4_renormDictT((LZ4_stream_t_internal*)LZ4_dict, smallest);
+ LZ4_renormDictT(streamPtr, smallest);
- result = LZ4_compress_generic(LZ4_dict, source, dest, inputSize, 0, notLimited, byU32, usingExtDict, noDictIssue, 1);
+ result = LZ4_compress_generic(streamPtr, source, dest, inputSize, 0, notLimited, byU32, usingExtDict, noDictIssue, 1);
streamPtr->dictionary = (const BYTE*)source;
streamPtr->dictSize = (U32)inputSize;
@@ -1075,7 +1061,7 @@ int LZ4_compress_forceExtDict (LZ4_stream_t* LZ4_dict, const char* source, char*
*/
int LZ4_saveDict (LZ4_stream_t* LZ4_dict, char* safeBuffer, int dictSize)
{
- LZ4_stream_t_internal* const dict = (LZ4_stream_t_internal*) LZ4_dict;
+ LZ4_stream_t_internal* const dict = &LZ4_dict->internal_donotuse;
const BYTE* const previousDictEnd = dict->dictionary + dict->dictSize;
if ((U32)dictSize > 64 KB) dictSize = 64 KB; /* useless to define a dictionary > 64 KB */
@@ -1280,13 +1266,6 @@ int LZ4_decompress_fast(const char* source, char* dest, int originalSize)
/*===== streaming decompression functions =====*/
-typedef struct {
- const BYTE* externalDict;
- size_t extDictSize;
- const BYTE* prefixEnd;
- size_t prefixSize;
-} LZ4_streamDecode_t_internal;
-
/*
* If you prefer dynamic allocation methods,
* LZ4_createStreamDecode()
@@ -1313,7 +1292,7 @@ int LZ4_freeStreamDecode (LZ4_streamDecode_t* LZ4_stream)
*/
int LZ4_setStreamDecode (LZ4_streamDecode_t* LZ4_streamDecode, const char* dictionary, int dictSize)
{
- LZ4_streamDecode_t_internal* lz4sd = (LZ4_streamDecode_t_internal*) LZ4_streamDecode;
+ LZ4_streamDecode_t_internal* lz4sd = &LZ4_streamDecode->internal_donotuse;
lz4sd->prefixSize = (size_t) dictSize;
lz4sd->prefixEnd = (const BYTE*) dictionary + dictSize;
lz4sd->externalDict = NULL;
@@ -1330,7 +1309,7 @@ int LZ4_setStreamDecode (LZ4_streamDecode_t* LZ4_streamDecode, const char* dicti
*/
int LZ4_decompress_safe_continue (LZ4_streamDecode_t* LZ4_streamDecode, const char* source, char* dest, int compressedSize, int maxOutputSize)
{
- LZ4_streamDecode_t_internal* lz4sd = (LZ4_streamDecode_t_internal*) LZ4_streamDecode;
+ LZ4_streamDecode_t_internal* lz4sd = &LZ4_streamDecode->internal_donotuse;
int result;
if (lz4sd->prefixEnd == (BYTE*)dest) {
@@ -1356,7 +1335,7 @@ int LZ4_decompress_safe_continue (LZ4_streamDecode_t* LZ4_streamDecode, const ch
int LZ4_decompress_fast_continue (LZ4_streamDecode_t* LZ4_streamDecode, const char* source, char* dest, int originalSize)
{
- LZ4_streamDecode_t_internal* lz4sd = (LZ4_streamDecode_t_internal*) LZ4_streamDecode;
+ LZ4_streamDecode_t_internal* lz4sd = &LZ4_streamDecode->internal_donotuse;
int result;
if (lz4sd->prefixEnd == (BYTE*)dest) {
@@ -1442,29 +1421,29 @@ int LZ4_uncompress_unknownOutputSize (const char* source, char* dest, int isize,
int LZ4_sizeofStreamState() { return LZ4_STREAMSIZE; }
-static void LZ4_init(LZ4_stream_t_internal* lz4ds, BYTE* base)
+static void LZ4_init(LZ4_stream_t* lz4ds, BYTE* base)
{
- MEM_INIT(lz4ds, 0, LZ4_STREAMSIZE);
- lz4ds->bufferStart = base;
+ MEM_INIT(lz4ds, 0, sizeof(LZ4_stream_t));
+ lz4ds->internal_donotuse.bufferStart = base;
}
int LZ4_resetStreamState(void* state, char* inputBuffer)
{
if ((((size_t)state) & 3) != 0) return 1; /* Error : pointer is not aligned on 4-bytes boundary */
- LZ4_init((LZ4_stream_t_internal*)state, (BYTE*)inputBuffer);
+ LZ4_init((LZ4_stream_t*)state, (BYTE*)inputBuffer);
return 0;
}
void* LZ4_create (char* inputBuffer)
{
- void* lz4ds = ALLOCATOR(8, LZ4_STREAMSIZE_U64);
- LZ4_init ((LZ4_stream_t_internal*)lz4ds, (BYTE*)inputBuffer);
+ LZ4_stream_t* lz4ds = (LZ4_stream_t*)ALLOCATOR(8, sizeof(LZ4_stream_t));
+ LZ4_init (lz4ds, (BYTE*)inputBuffer);
return lz4ds;
}
char* LZ4_slideInputBuffer (void* LZ4_Data)
{
- LZ4_stream_t_internal* ctx = (LZ4_stream_t_internal*)LZ4_Data;
+ LZ4_stream_t_internal* ctx = &((LZ4_stream_t*)LZ4_Data)->internal_donotuse;
int dictSize = LZ4_saveDict((LZ4_stream_t*)LZ4_Data, (char*)ctx->bufferStart, 64 KB);
return (char*)(ctx->bufferStart + dictSize);
}