summaryrefslogtreecommitdiffstats
path: root/lib/lz4/lz4defs.h
diff options
context:
space:
mode:
Diffstat (limited to 'lib/lz4/lz4defs.h')
-rw-r--r--lib/lz4/lz4defs.h66
1 files changed, 64 insertions, 2 deletions
diff --git a/lib/lz4/lz4defs.h b/lib/lz4/lz4defs.h
index 43ac31d63f3..abcecdc2d0f 100644
--- a/lib/lz4/lz4defs.h
+++ b/lib/lz4/lz4defs.h
@@ -22,23 +22,40 @@
* Architecture-specific macros
*/
#define BYTE u8
+typedef struct _U16_S { u16 v; } U16_S;
+typedef struct _U32_S { u32 v; } U32_S;
+typedef struct _U64_S { u64 v; } U64_S;
#if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) \
|| defined(CONFIG_ARM) && __LINUX_ARM_ARCH__ >= 6 \
&& defined(ARM_EFFICIENT_UNALIGNED_ACCESS)
-typedef struct _U32_S { u32 v; } U32_S;
-typedef struct _U64_S { u64 v; } U64_S;
+#define A16(x) (((U16_S *)(x))->v)
#define A32(x) (((U32_S *)(x))->v)
#define A64(x) (((U64_S *)(x))->v)
#define PUT4(s, d) (A32(d) = A32(s))
#define PUT8(s, d) (A64(d) = A64(s))
+#define LZ4_WRITE_LITTLEENDIAN_16(p, v) \
+ do { \
+ A16(p) = v; \
+ p += 2; \
+ } while (0)
#else /* CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS */
+#define A64(x) get_unaligned((u64 *)&(((U16_S *)(x))->v))
+#define A32(x) get_unaligned((u32 *)&(((U16_S *)(x))->v))
+#define A16(x) get_unaligned((u16 *)&(((U16_S *)(x))->v))
+
#define PUT4(s, d) \
put_unaligned(get_unaligned((const u32 *) s), (u32 *) d)
#define PUT8(s, d) \
put_unaligned(get_unaligned((const u64 *) s), (u64 *) d)
+
+#define LZ4_WRITE_LITTLEENDIAN_16(p, v) \
+ do { \
+ put_unaligned(v, (u16 *)(p)); \
+ p += 2; \
+ } while (0)
#endif
#define COPYLENGTH 8
@@ -46,6 +63,29 @@ typedef struct _U64_S { u64 v; } U64_S;
#define ML_MASK ((1U << ML_BITS) - 1)
#define RUN_BITS (8 - ML_BITS)
#define RUN_MASK ((1U << RUN_BITS) - 1)
+#define MEMORY_USAGE 14
+#define MINMATCH 4
+#define SKIPSTRENGTH 6
+#define LASTLITERALS 5
+#define MFLIMIT (COPYLENGTH + MINMATCH)
+#define MINLENGTH (MFLIMIT + 1)
+#define MAXD_LOG 16
+#define MAXD (1 << MAXD_LOG)
+#define MAXD_MASK (u32)(MAXD - 1)
+#define MAX_DISTANCE (MAXD - 1)
+#define HASH_LOG (MAXD_LOG - 1)
+#define HASHTABLESIZE (1 << HASH_LOG)
+#define MAX_NB_ATTEMPTS 256
+#define OPTIMAL_ML (int)((ML_MASK-1)+MINMATCH)
+#define LZ4_64KLIMIT ((1<<16) + (MFLIMIT - 1))
+#define HASHLOG64K ((MEMORY_USAGE - 2) + 1)
+#define HASH64KTABLESIZE (1U << HASHLOG64K)
+#define LZ4_HASH_VALUE(p) (((A32(p)) * 2654435761U) >> \
+ ((MINMATCH * 8) - (MEMORY_USAGE-2)))
+#define LZ4_HASH64K_VALUE(p) (((A32(p)) * 2654435761U) >> \
+ ((MINMATCH * 8) - HASHLOG64K))
+#define HASH_VALUE(p) (((A32(p)) * 2654435761U) >> \
+ ((MINMATCH * 8) - HASH_LOG))
#if LZ4_ARCH64/* 64-bit */
#define STEPSIZE 8
@@ -65,6 +105,13 @@ typedef struct _U64_S { u64 v; } U64_S;
LZ4_WILDCOPY(s, d, e); \
} \
} while (0)
+#define HTYPE u32
+
+#ifdef __BIG_ENDIAN
+#define LZ4_NBCOMMONBYTES(val) (__builtin_clzll(val) >> 3)
+#else
+#define LZ4_NBCOMMONBYTES(val) (__builtin_ctzll(val) >> 3)
+#endif
#else /* 32-bit */
#define STEPSIZE 4
@@ -83,6 +130,14 @@ typedef struct _U64_S { u64 v; } U64_S;
} while (0)
#define LZ4_SECURECOPY LZ4_WILDCOPY
+#define HTYPE const u8*
+
+#ifdef __BIG_ENDIAN
+#define LZ4_NBCOMMONBYTES(val) (__builtin_clz(val) >> 3)
+#else
+#define LZ4_NBCOMMONBYTES(val) (__builtin_ctz(val) >> 3)
+#endif
+
#endif
#define LZ4_READ_LITTLEENDIAN_16(d, s, p) \
@@ -92,3 +147,10 @@ typedef struct _U64_S { u64 v; } U64_S;
do { \
LZ4_COPYPACKET(s, d); \
} while (d < e)
+
+#define LZ4_BLINDCOPY(s, d, l) \
+ do { \
+ u8 *e = (d) + l; \
+ LZ4_WILDCOPY(s, d, e); \
+ d = e; \
+ } while (0)