blob: abcecdc2d0f23a8ac9884db5297d7b46dfbcea1b [file] [log] [blame]
Kyungsik Leecffb78b2013-07-08 16:01:45 -07001/*
2 * lz4defs.h -- architecture specific defines
3 *
4 * Copyright (C) 2013, LG Electronics, Kyungsik Lee <kyungsik.lee@lge.com>
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 as
8 * published by the Free Software Foundation.
9 */
10
11/*
12 * Detects 64 bits mode
13 */
14#if (defined(__x86_64__) || defined(__x86_64) || defined(__amd64__) \
15 || defined(__ppc64__) || defined(__LP64__))
16#define LZ4_ARCH64 1
17#else
18#define LZ4_ARCH64 0
19#endif
20
21/*
22 * Architecture-specific macros
23 */
24#define BYTE u8
Chanho Minc72ac7a2013-07-08 16:01:49 -070025typedef struct _U16_S { u16 v; } U16_S;
26typedef struct _U32_S { u32 v; } U32_S;
27typedef struct _U64_S { u64 v; } U64_S;
Kyungsik Leecffb78b2013-07-08 16:01:45 -070028#if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) \
29 || defined(CONFIG_ARM) && __LINUX_ARM_ARCH__ >= 6 \
30 && defined(ARM_EFFICIENT_UNALIGNED_ACCESS)
Kyungsik Leecffb78b2013-07-08 16:01:45 -070031
Chanho Minc72ac7a2013-07-08 16:01:49 -070032#define A16(x) (((U16_S *)(x))->v)
Kyungsik Leecffb78b2013-07-08 16:01:45 -070033#define A32(x) (((U32_S *)(x))->v)
34#define A64(x) (((U64_S *)(x))->v)
35
36#define PUT4(s, d) (A32(d) = A32(s))
37#define PUT8(s, d) (A64(d) = A64(s))
Chanho Minc72ac7a2013-07-08 16:01:49 -070038#define LZ4_WRITE_LITTLEENDIAN_16(p, v) \
39 do { \
40 A16(p) = v; \
41 p += 2; \
42 } while (0)
Kyungsik Leecffb78b2013-07-08 16:01:45 -070043#else /* CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS */
44
Chanho Minc72ac7a2013-07-08 16:01:49 -070045#define A64(x) get_unaligned((u64 *)&(((U16_S *)(x))->v))
46#define A32(x) get_unaligned((u32 *)&(((U16_S *)(x))->v))
47#define A16(x) get_unaligned((u16 *)&(((U16_S *)(x))->v))
48
Kyungsik Leecffb78b2013-07-08 16:01:45 -070049#define PUT4(s, d) \
50 put_unaligned(get_unaligned((const u32 *) s), (u32 *) d)
51#define PUT8(s, d) \
52 put_unaligned(get_unaligned((const u64 *) s), (u64 *) d)
Chanho Minc72ac7a2013-07-08 16:01:49 -070053
54#define LZ4_WRITE_LITTLEENDIAN_16(p, v) \
55 do { \
56 put_unaligned(v, (u16 *)(p)); \
57 p += 2; \
58 } while (0)
Kyungsik Leecffb78b2013-07-08 16:01:45 -070059#endif
60
61#define COPYLENGTH 8
62#define ML_BITS 4
63#define ML_MASK ((1U << ML_BITS) - 1)
64#define RUN_BITS (8 - ML_BITS)
65#define RUN_MASK ((1U << RUN_BITS) - 1)
Chanho Minc72ac7a2013-07-08 16:01:49 -070066#define MEMORY_USAGE 14
67#define MINMATCH 4
68#define SKIPSTRENGTH 6
69#define LASTLITERALS 5
70#define MFLIMIT (COPYLENGTH + MINMATCH)
71#define MINLENGTH (MFLIMIT + 1)
72#define MAXD_LOG 16
73#define MAXD (1 << MAXD_LOG)
74#define MAXD_MASK (u32)(MAXD - 1)
75#define MAX_DISTANCE (MAXD - 1)
76#define HASH_LOG (MAXD_LOG - 1)
77#define HASHTABLESIZE (1 << HASH_LOG)
78#define MAX_NB_ATTEMPTS 256
79#define OPTIMAL_ML (int)((ML_MASK-1)+MINMATCH)
80#define LZ4_64KLIMIT ((1<<16) + (MFLIMIT - 1))
81#define HASHLOG64K ((MEMORY_USAGE - 2) + 1)
82#define HASH64KTABLESIZE (1U << HASHLOG64K)
83#define LZ4_HASH_VALUE(p) (((A32(p)) * 2654435761U) >> \
84 ((MINMATCH * 8) - (MEMORY_USAGE-2)))
85#define LZ4_HASH64K_VALUE(p) (((A32(p)) * 2654435761U) >> \
86 ((MINMATCH * 8) - HASHLOG64K))
87#define HASH_VALUE(p) (((A32(p)) * 2654435761U) >> \
88 ((MINMATCH * 8) - HASH_LOG))
Kyungsik Leecffb78b2013-07-08 16:01:45 -070089
90#if LZ4_ARCH64/* 64-bit */
91#define STEPSIZE 8
92
93#define LZ4_COPYSTEP(s, d) \
94 do { \
95 PUT8(s, d); \
96 d += 8; \
97 s += 8; \
98 } while (0)
99
100#define LZ4_COPYPACKET(s, d) LZ4_COPYSTEP(s, d)
101
102#define LZ4_SECURECOPY(s, d, e) \
103 do { \
104 if (d < e) { \
105 LZ4_WILDCOPY(s, d, e); \
106 } \
107 } while (0)
Chanho Minc72ac7a2013-07-08 16:01:49 -0700108#define HTYPE u32
109
110#ifdef __BIG_ENDIAN
111#define LZ4_NBCOMMONBYTES(val) (__builtin_clzll(val) >> 3)
112#else
113#define LZ4_NBCOMMONBYTES(val) (__builtin_ctzll(val) >> 3)
114#endif
Kyungsik Leecffb78b2013-07-08 16:01:45 -0700115
116#else /* 32-bit */
117#define STEPSIZE 4
118
119#define LZ4_COPYSTEP(s, d) \
120 do { \
121 PUT4(s, d); \
122 d += 4; \
123 s += 4; \
124 } while (0)
125
126#define LZ4_COPYPACKET(s, d) \
127 do { \
128 LZ4_COPYSTEP(s, d); \
129 LZ4_COPYSTEP(s, d); \
130 } while (0)
131
132#define LZ4_SECURECOPY LZ4_WILDCOPY
Chanho Minc72ac7a2013-07-08 16:01:49 -0700133#define HTYPE const u8*
134
135#ifdef __BIG_ENDIAN
136#define LZ4_NBCOMMONBYTES(val) (__builtin_clz(val) >> 3)
137#else
138#define LZ4_NBCOMMONBYTES(val) (__builtin_ctz(val) >> 3)
139#endif
140
Kyungsik Leecffb78b2013-07-08 16:01:45 -0700141#endif
142
143#define LZ4_READ_LITTLEENDIAN_16(d, s, p) \
144 (d = s - get_unaligned_le16(p))
145
146#define LZ4_WILDCOPY(s, d, e) \
147 do { \
148 LZ4_COPYPACKET(s, d); \
149 } while (d < e)
Chanho Minc72ac7a2013-07-08 16:01:49 -0700150
151#define LZ4_BLINDCOPY(s, d, l) \
152 do { \
153 u8 *e = (d) + l; \
154 LZ4_WILDCOPY(s, d, e); \
155 d = e; \
156 } while (0)