summaryrefslogtreecommitdiffstats
path: root/lib/lz4.c
diff options
context:
space:
mode:
authorYann Collet <yann.collet.73@gmail.com>2014-12-19 08:51:32 (GMT)
committerYann Collet <yann.collet.73@gmail.com>2014-12-19 08:51:32 (GMT)
commite25b51de7b51101e04ceea194dd557fcc23c03ca (patch)
treee6b6f4e9d0363f2ac6b9fd8e6563b068bb2fa3f7 /lib/lz4.c
parenta5358e602a526b844d6872c0b46087044651feb3 (diff)
parentf68eead36c0eb8f6f4e5c912dfe204c52bb7b7c5 (diff)
downloadlz4-e25b51de7b51101e04ceea194dd557fcc23c03ca.zip
lz4-e25b51de7b51101e04ceea194dd557fcc23c03ca.tar.gz
lz4-e25b51de7b51101e04ceea194dd557fcc23c03ca.tar.bz2
Merge pull request #47 from Cyan4973/devr127r126
Dev
Diffstat (limited to 'lib/lz4.c')
-rw-r--r--lib/lz4.c42
1 files changed, 22 insertions, 20 deletions
diff --git a/lib/lz4.c b/lib/lz4.c
index 2ed686b..ed928ce 100644
--- a/lib/lz4.c
+++ b/lib/lz4.c
@@ -112,12 +112,16 @@
# pragma warning(disable : 4127) /* disable: C4127: conditional expression is constant */
# pragma warning(disable : 4293) /* disable: C4293: too large shift (32-bits) */
#else
-# ifdef __GNUC__
-# define FORCE_INLINE static inline __attribute__((always_inline))
+# if defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) /* C99 */
+# ifdef __GNUC__
+# define FORCE_INLINE static inline __attribute__((always_inline))
+# else
+# define FORCE_INLINE static inline
+# endif
# else
-# define FORCE_INLINE static inline
-# endif
-#endif
+# define FORCE_INLINE static
+# endif /* __STDC_VERSION__ */
+#endif /* _MSC_VER */
#define GCC_VERSION (__GNUC__ * 100 + __GNUC_MINOR__)
@@ -264,6 +268,7 @@ static void LZ4_copy4(void* dstPtr, const void* srcPtr)
static void LZ4_copy8(void* dstPtr, const void* srcPtr)
{
+#if GCC_VERSION!=409 /* disabled on GCC 4.9, as it generates invalid opcode (crash) */
if (LZ4_UNALIGNED_ACCESS)
{
if (LZ4_64bits())
@@ -273,6 +278,7 @@ static void LZ4_copy8(void* dstPtr, const void* srcPtr)
((U32*)dstPtr)[1] = ((U32*)srcPtr)[1];
return;
}
+#endif
memcpy(dstPtr, srcPtr, 8);
}
@@ -410,13 +416,10 @@ static unsigned LZ4_count(const BYTE* pIn, const BYTE* pMatch, const BYTE* pInLi
**************************************/
#define LZ4_HASHLOG (LZ4_MEMORY_USAGE-2)
#define HASHTABLESIZE (1 << LZ4_MEMORY_USAGE)
-#define HASH_SIZE_U32 (1 << LZ4_HASHLOG)
-
-#define LZ4_64KLIMIT ((64 KB) + (MFLIMIT-1))
-#define SKIPSTRENGTH 6 /* Increasing this value will make the compression run slower on incompressible data */
+#define HASH_SIZE_U32 (1 << LZ4_HASHLOG) /* required as macro for static allocation */
-#define MAXD_LOG 16
-#define MAX_DISTANCE ((1 << MAXD_LOG) - 1)
+static const int LZ4_64Klimit = ((64 KB) + (MFLIMIT-1));
+static const U32 LZ4_skipTrigger = 6; /* Increase this value ==> compression run slower on incompressible data */
/**************************************
@@ -520,7 +523,6 @@ static int LZ4_compress_generic(
BYTE* op = (BYTE*) dest;
BYTE* const olimit = op + maxOutputSize;
- const int skipStrength = SKIPSTRENGTH;
U32 forwardH;
size_t refDelta=0;
@@ -542,8 +544,8 @@ static int LZ4_compress_generic(
lowLimit = (const BYTE*)source;
break;
}
- if ((tableType == byU16) && (inputSize>=(int)LZ4_64KLIMIT)) return 0; /* Size too large (not within 64K limit) */
- if (inputSize<LZ4_minLength) goto _last_literals; /* Input too small, no compression (all literals) */
+ if ((tableType == byU16) && (inputSize>=LZ4_64Klimit)) return 0; /* Size too large (not within 64K limit) */
+ if (inputSize<LZ4_minLength) goto _last_literals; /* Input too small, no compression (all literals) */
/* First Byte */
LZ4_putPosition(ip, ctx, tableType, base);
@@ -557,14 +559,14 @@ static int LZ4_compress_generic(
{
const BYTE* forwardIp = ip;
unsigned step=1;
- unsigned searchMatchNb = (1U << skipStrength);
+ unsigned searchMatchNb = (1U << LZ4_skipTrigger);
/* Find a match */
do {
U32 h = forwardH;
ip = forwardIp;
forwardIp += step;
- step = searchMatchNb++ >> skipStrength;
+ step = searchMatchNb++ >> LZ4_skipTrigger;
if (unlikely(forwardIp > mflimit)) goto _last_literals;
@@ -714,7 +716,7 @@ int LZ4_compress(const char* source, char* dest, int inputSize)
#endif
int result;
- if (inputSize < (int)LZ4_64KLIMIT)
+ if (inputSize < LZ4_64Klimit)
result = LZ4_compress_generic((void*)ctx, source, dest, inputSize, 0, notLimited, byU16, noDict, noDictIssue);
else
result = LZ4_compress_generic((void*)ctx, source, dest, inputSize, 0, notLimited, LZ4_64bits() ? byU32 : byPtr, noDict, noDictIssue);
@@ -734,7 +736,7 @@ int LZ4_compress_limitedOutput(const char* source, char* dest, int inputSize, in
#endif
int result;
- if (inputSize < (int)LZ4_64KLIMIT)
+ if (inputSize < LZ4_64Klimit)
result = LZ4_compress_generic((void*)ctx, source, dest, inputSize, maxOutputSize, limitedOutput, byU16, noDict, noDictIssue);
else
result = LZ4_compress_generic((void*)ctx, source, dest, inputSize, maxOutputSize, limitedOutput, LZ4_64bits() ? byU32 : byPtr, noDict, noDictIssue);
@@ -1332,7 +1334,7 @@ int LZ4_compress_withState (void* state, const char* source, char* dest, int inp
if (((size_t)(state)&3) != 0) return 0; /* Error : state is not aligned on 4-bytes boundary */
MEM_INIT(state, 0, LZ4_STREAMSIZE);
- if (inputSize < (int)LZ4_64KLIMIT)
+ if (inputSize < LZ4_64Klimit)
return LZ4_compress_generic(state, source, dest, inputSize, 0, notLimited, byU16, noDict, noDictIssue);
else
return LZ4_compress_generic(state, source, dest, inputSize, 0, notLimited, LZ4_64bits() ? byU32 : byPtr, noDict, noDictIssue);
@@ -1343,7 +1345,7 @@ int LZ4_compress_limitedOutput_withState (void* state, const char* source, char*
if (((size_t)(state)&3) != 0) return 0; /* Error : state is not aligned on 4-bytes boundary */
MEM_INIT(state, 0, LZ4_STREAMSIZE);
- if (inputSize < (int)LZ4_64KLIMIT)
+ if (inputSize < LZ4_64Klimit)
return LZ4_compress_generic(state, source, dest, inputSize, maxOutputSize, limitedOutput, byU16, noDict, noDictIssue);
else
return LZ4_compress_generic(state, source, dest, inputSize, maxOutputSize, limitedOutput, LZ4_64bits() ? byU32 : byPtr, noDict, noDictIssue);