From 84004b90159468e4d4984fcd6ee1dbb5ea53c982 Mon Sep 17 00:00:00 2001 From: "yann.collet.73@gmail.com" Date: Sat, 28 Jul 2012 13:32:30 +0000 Subject: Added : function LZ4_compress_limitedOutput() Removed : functions with explicit *ctx management (LZ4_compressCtx & LZ4_compress64kCtx). Functions are still present in the .c Changed : LZ4_compressBound() now a macro git-svn-id: https://lz4.googlecode.com/svn/trunk@71 650e7d94-2a16-8b24-b05c-7c0b3f6821cd --- lz4.c | 69 ++++++++++++++++++++++++++++++++----------------------------------- lz4.h | 51 +++++++++++++++++++++++-------------------------- 2 files changed, 57 insertions(+), 63 deletions(-) diff --git a/lz4.c b/lz4.c index b2156cb..0ae0c7e 100644 --- a/lz4.c +++ b/lz4.c @@ -41,20 +41,13 @@ // Default value is 14, for 16KB, which nicely fits into Intel x86 L1 cache #define MEMORY_USAGE 14 -// NOTCOMPRESSIBLE_CONFIRMATION : +// NOTCOMPRESSIBLE_DETECTIONLEVEL : // Decreasing this value will make the algorithm skip faster data segments considered "incompressible" // This may decrease compression ratio dramatically, but will be faster on incompressible data // Increasing this value will make the algorithm search more before declaring a segment "incompressible" // This could improve compression a bit, but will be slower on incompressible data // The default value (6) is recommended -#define NOTCOMPRESSIBLE_CONFIRMATION 6 - -// LZ4_COMPRESSMIN : -// Compression function will *fail* if it is not successful at compressing input by at least LZ4_COMPRESSMIN bytes -// Since the compression function stops working prematurely, it results in a speed gain -// The output however is unusable. Compression function result will be zero. -// Default : 0 = disabled -#define LZ4_COMPRESSMIN 0 +#define NOTCOMPRESSIBLE_DETECTIONLEVEL 6 // BIG_ENDIAN_NATIVE_BUT_INCOMPATIBLE : // This will provide a small boost to performance for big endian cpu, but the resulting compressed stream will be incompatible with little-endian CPU. @@ -185,7 +178,7 @@ typedef struct _U64_S { U64 v; } U64_S; #define HASHTABLESIZE (1 << HASH_LOG) #define HASH_MASK (HASHTABLESIZE - 1) -#define SKIPSTRENGTH (NOTCOMPRESSIBLE_CONFIRMATION>2?NOTCOMPRESSIBLE_CONFIRMATION:2) +#define SKIPSTRENGTH (NOTCOMPRESSIBLE_DETECTIONLEVEL>2?NOTCOMPRESSIBLE_DETECTIONLEVEL:2) #define STACKLIMIT 13 #define HEAPMODE (HASH_LOG>STACKLIMIT) // Defines if memory is allocated into the stack (local variable), or into the heap (malloc()). #define COPYLENGTH 8 @@ -257,7 +250,7 @@ struct refTables //**************************** #if LZ4_ARCH64 -inline static int LZ4_NbCommonBytes (register U64 val) +inline int LZ4_NbCommonBytes (register U64 val) { #if defined(LZ4_BIG_ENDIAN) #if defined(_MSC_VER) && !defined(LZ4_FORCE_SW_BITCOUNT) @@ -289,7 +282,7 @@ inline static int LZ4_NbCommonBytes (register U64 val) #else -inline static int LZ4_NbCommonBytes (register U32 val) +inline int LZ4_NbCommonBytes (register U32 val) { #if defined(LZ4_BIG_ENDIAN) #if defined(_MSC_VER) && !defined(LZ4_FORCE_SW_BITCOUNT) @@ -321,25 +314,16 @@ inline static int LZ4_NbCommonBytes (register U32 val) #endif -//**************************** -// Public functions -//**************************** - -int LZ4_compressBound(int isize) -{ - return (isize + (isize/255) + 16); -} - - //****************************** // Compression functions //****************************** -int LZ4_compressCtx(void** ctx, +inline int LZ4_compressCtx(void** ctx, const char* source, char* dest, - int isize) + int isize, + int maxOutputSize) { #if HEAPMODE struct refTables *srt = (struct refTables *) (*ctx); @@ -356,6 +340,7 @@ int LZ4_compressCtx(void** ctx, #define matchlimit (iend - LASTLITERALS) BYTE* op = (BYTE*) dest; + BYTE* const oend = op + maxOutputSize; int len, length; const int skipStrength = SKIPSTRENGTH; @@ -410,6 +395,7 @@ int LZ4_compressCtx(void** ctx, // Encode Literal length length = ip - anchor; token = op++; + if unlikely(op + length + (2 + 1 + LASTLITERALS) + (length>>8) >= oend) return 0; // Check output limit if (length>=(int)RUN_MASK) { *token=(RUN_MASK< 254 ; len-=255) *op++ = 255; *op++ = (BYTE)len; } else *token = (length<0) && (((op - (BYTE*)dest) + lastRun + 1 + ((lastRun-15)/255)) > isize - LZ4_COMPRESSMIN)) return 0; + if (op + lastRun + 1 + ((lastRun-15)/255) >= oend) return 0; if (lastRun>=(int)RUN_MASK) { *op++=(RUN_MASK< 254 ; lastRun-=255) *op++ = 255; *op++ = (BYTE) lastRun; } else *op++ = (lastRun<> ((MINMATCH*8)-HASHLOG64K)) #define LZ4_HASH64K_VALUE(p) LZ4_HASH64K_FUNCTION(A32(p)) -int LZ4_compress64kCtx(void** ctx, +inline int LZ4_compress64kCtx(void** ctx, const char* source, char* dest, - int isize) + int isize, + int maxOutputSize) { #if HEAPMODE struct refTables *srt = (struct refTables *) (*ctx); @@ -499,6 +486,7 @@ int LZ4_compress64kCtx(void** ctx, #define matchlimit (iend - LASTLITERALS) BYTE* op = (BYTE*) dest; + BYTE* const oend = op + maxOutputSize; int len, length; const int skipStrength = SKIPSTRENGTH; @@ -552,6 +540,7 @@ int LZ4_compress64kCtx(void** ctx, // Encode Literal length length = ip - anchor; token = op++; + if unlikely(op + length + (2 + 1 + LASTLITERALS) + (length>>8) >= oend) return 0; // Check output limit if (length>=(int)RUN_MASK) { *token=(RUN_MASK< 254 ; len-=255) *op++ = 255; *op++ = (BYTE)len; } else *token = (length<0) && (((op - (BYTE*)dest) + lastRun + 1 + ((lastRun-15)/255)) > isize - LZ4_COMPRESSMIN)) return 0; + if (op + lastRun + 1 + ((lastRun-15)/255) >= oend) return 0; if (lastRun>=(int)RUN_MASK) { *op++=(RUN_MASK< 254 ; lastRun-=255) *op++ = 255; *op++ = (BYTE) lastRun; } else *op++ = (lastRun<