1 | /* ****************************************************************** |
---|
2 | mem.h |
---|
3 | low-level memory access routines |
---|
4 | Copyright (C) 2013-2015, Yann Collet. |
---|
5 | |
---|
6 | BSD 2-Clause License (http://www.opensource.org/licenses/bsd-license.php) |
---|
7 | |
---|
8 | Redistribution and use in source and binary forms, with or without |
---|
9 | modification, are permitted provided that the following conditions are |
---|
10 | met: |
---|
11 | |
---|
12 | * Redistributions of source code must retain the above copyright |
---|
13 | notice, this list of conditions and the following disclaimer. |
---|
14 | * Redistributions in binary form must reproduce the above |
---|
15 | copyright notice, this list of conditions and the following disclaimer |
---|
16 | in the documentation and/or other materials provided with the |
---|
17 | distribution. |
---|
18 | |
---|
19 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
---|
20 | "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
---|
21 | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
---|
22 | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT |
---|
23 | OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
---|
24 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
---|
25 | LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
---|
26 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
---|
27 | THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
---|
28 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
---|
29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
---|
30 | |
---|
31 | You can contact the author at : |
---|
32 | - FSE source repository : https://github.com/Cyan4973/FiniteStateEntropy |
---|
33 | - Public forum : https://groups.google.com/forum/#!forum/lz4c |
---|
34 | ****************************************************************** */ |
---|
35 | #ifndef MEM_H_MODULE |
---|
36 | #define MEM_H_MODULE |
---|
37 | |
---|
38 | #if defined (__cplusplus) |
---|
39 | extern "C" { |
---|
40 | #endif |
---|
41 | |
---|
42 | /*-**************************************** |
---|
43 | * Dependencies |
---|
44 | ******************************************/ |
---|
45 | #include <stddef.h> /* size_t, ptrdiff_t */ |
---|
46 | #include <string.h> /* memcpy */ |
---|
47 | #if defined(_MSC_VER) /* Visual Studio */ |
---|
48 | # include <stdlib.h> /* _byteswap_ulong */ |
---|
49 | #endif |
---|
50 | |
---|
51 | |
---|
52 | /*-**************************************** |
---|
53 | * Compiler specifics |
---|
54 | ******************************************/ |
---|
55 | #if defined(_MSC_VER) |
---|
56 | # include <intrin.h> /* _byteswap_ */ |
---|
57 | #endif |
---|
58 | #if defined(__GNUC__) |
---|
59 | # define MEM_STATIC static __attribute__((unused)) |
---|
60 | #elif defined (__cplusplus) || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) /* C99 */) |
---|
61 | # define MEM_STATIC static inline |
---|
62 | #elif defined(_MSC_VER) |
---|
63 | # define MEM_STATIC static __inline |
---|
64 | #else |
---|
65 | # define MEM_STATIC static /* this version may generate warnings for unused static functions; disable the relevant warning */ |
---|
66 | #endif |
---|
67 | |
---|
68 | |
---|
69 | /*-************************************************************** |
---|
70 | * Basic Types |
---|
71 | *****************************************************************/ |
---|
72 | #if !defined (__VMS) && (defined (__cplusplus) || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) /* C99 */) ) |
---|
73 | # include <stdint.h> |
---|
74 | typedef uint8_t BYTE; |
---|
75 | typedef uint16_t U16; |
---|
76 | typedef int16_t S16; |
---|
77 | typedef uint32_t U32; |
---|
78 | typedef int32_t S32; |
---|
79 | typedef uint64_t U64; |
---|
80 | typedef int64_t S64; |
---|
81 | #else |
---|
82 | typedef unsigned char BYTE; |
---|
83 | typedef unsigned short U16; |
---|
84 | typedef signed short S16; |
---|
85 | typedef unsigned int U32; |
---|
86 | typedef signed int S32; |
---|
87 | typedef unsigned long long U64; |
---|
88 | typedef signed long long S64; |
---|
89 | #endif |
---|
90 | |
---|
91 | |
---|
92 | /*-************************************************************** |
---|
93 | * Memory I/O |
---|
94 | *****************************************************************/ |
---|
95 | /* MEM_FORCE_MEMORY_ACCESS : |
---|
96 | * By default, access to unaligned memory is controlled by `memcpy()`, which is safe and portable. |
---|
97 | * Unfortunately, on some target/compiler combinations, the generated assembly is sub-optimal. |
---|
98 | * The below switch allow to select different access method for improved performance. |
---|
99 | * Method 0 (default) : use `memcpy()`. Safe and portable. |
---|
100 | * Method 1 : `__packed` statement. It depends on compiler extension (ie, not portable). |
---|
101 | * This method is safe if your compiler supports it, and *generally* as fast or faster than `memcpy`. |
---|
102 | * Method 2 : direct access. This method is portable but violate C standard. |
---|
103 | * It can generate buggy code on targets depending on alignment. |
---|
104 | * In some circumstances, it's the only known way to get the most performance (ie GCC + ARMv6) |
---|
105 | * See http://fastcompression.blogspot.fr/2015/08/accessing-unaligned-memory.html for details. |
---|
106 | * Prefer these methods in priority order (0 > 1 > 2) |
---|
107 | */ |
---|
108 | #ifndef MEM_FORCE_MEMORY_ACCESS /* can be defined externally, on command line for example */ |
---|
109 | # if defined(__GNUC__) && ( defined(__ARM_ARCH_6__) || defined(__ARM_ARCH_6J__) || defined(__ARM_ARCH_6K__) || defined(__ARM_ARCH_6Z__) || defined(__ARM_ARCH_6ZK__) || defined(__ARM_ARCH_6T2__) ) |
---|
110 | # define MEM_FORCE_MEMORY_ACCESS 2 |
---|
111 | # elif defined(__INTEL_COMPILER) || \ |
---|
112 | (defined(__GNUC__) && ( defined(__ARM_ARCH_7__) || defined(__ARM_ARCH_7A__) || defined(__ARM_ARCH_7R__) || defined(__ARM_ARCH_7M__) || defined(__ARM_ARCH_7S__) )) |
---|
113 | # define MEM_FORCE_MEMORY_ACCESS 1 |
---|
114 | # endif |
---|
115 | #endif |
---|
116 | |
---|
117 | MEM_STATIC unsigned MEM_32bits(void) { return sizeof(size_t)==4; } |
---|
118 | MEM_STATIC unsigned MEM_64bits(void) { return sizeof(size_t)==8; } |
---|
119 | |
---|
120 | MEM_STATIC unsigned MEM_isLittleEndian(void) |
---|
121 | { |
---|
122 | const union { U32 u; BYTE c[4]; } one = { 1 }; /* don't use static : performance detrimental */ |
---|
123 | return one.c[0]; |
---|
124 | } |
---|
125 | |
---|
126 | #if defined(MEM_FORCE_MEMORY_ACCESS) && (MEM_FORCE_MEMORY_ACCESS==2) |
---|
127 | |
---|
128 | /* violates C standard, by lying on structure alignment. |
---|
129 | Only use if no other choice to achieve best performance on target platform */ |
---|
130 | MEM_STATIC U16 MEM_read16(const void* memPtr) { return *(const U16*) memPtr; } |
---|
131 | MEM_STATIC U32 MEM_read32(const void* memPtr) { return *(const U32*) memPtr; } |
---|
132 | MEM_STATIC U64 MEM_read64(const void* memPtr) { return *(const U64*) memPtr; } |
---|
133 | MEM_STATIC U64 MEM_readST(const void* memPtr) { return *(const size_t*) memPtr; } |
---|
134 | |
---|
135 | MEM_STATIC void MEM_write16(void* memPtr, U16 value) { *(U16*)memPtr = value; } |
---|
136 | MEM_STATIC void MEM_write32(void* memPtr, U32 value) { *(U32*)memPtr = value; } |
---|
137 | MEM_STATIC void MEM_write64(void* memPtr, U64 value) { *(U64*)memPtr = value; } |
---|
138 | |
---|
139 | #elif defined(MEM_FORCE_MEMORY_ACCESS) && (MEM_FORCE_MEMORY_ACCESS==1) |
---|
140 | |
---|
141 | /* __pack instructions are safer, but compiler specific, hence potentially problematic for some compilers */ |
---|
142 | /* currently only defined for gcc and icc */ |
---|
143 | typedef union { U16 u16; U32 u32; U64 u64; size_t st; } __attribute__((packed)) unalign; |
---|
144 | |
---|
145 | MEM_STATIC U16 MEM_read16(const void* ptr) { return ((const unalign*)ptr)->u16; } |
---|
146 | MEM_STATIC U32 MEM_read32(const void* ptr) { return ((const unalign*)ptr)->u32; } |
---|
147 | MEM_STATIC U64 MEM_read64(const void* ptr) { return ((const unalign*)ptr)->u64; } |
---|
148 | MEM_STATIC U64 MEM_readST(const void* ptr) { return ((const unalign*)ptr)->st; } |
---|
149 | |
---|
150 | MEM_STATIC void MEM_write16(void* memPtr, U16 value) { ((unalign*)memPtr)->u16 = value; } |
---|
151 | MEM_STATIC void MEM_write32(void* memPtr, U32 value) { ((unalign*)memPtr)->u32 = value; } |
---|
152 | MEM_STATIC void MEM_write64(void* memPtr, U64 value) { ((unalign*)memPtr)->u64 = value; } |
---|
153 | |
---|
154 | #else |
---|
155 | |
---|
156 | /* default method, safe and standard. |
---|
157 | can sometimes prove slower */ |
---|
158 | |
---|
159 | MEM_STATIC U16 MEM_read16(const void* memPtr) |
---|
160 | { |
---|
161 | U16 val; memcpy(&val, memPtr, sizeof(val)); return val; |
---|
162 | } |
---|
163 | |
---|
164 | MEM_STATIC U32 MEM_read32(const void* memPtr) |
---|
165 | { |
---|
166 | U32 val; memcpy(&val, memPtr, sizeof(val)); return val; |
---|
167 | } |
---|
168 | |
---|
169 | MEM_STATIC U64 MEM_read64(const void* memPtr) |
---|
170 | { |
---|
171 | U64 val; memcpy(&val, memPtr, sizeof(val)); return val; |
---|
172 | } |
---|
173 | |
---|
174 | MEM_STATIC size_t MEM_readST(const void* memPtr) |
---|
175 | { |
---|
176 | size_t val; memcpy(&val, memPtr, sizeof(val)); return val; |
---|
177 | } |
---|
178 | |
---|
179 | MEM_STATIC void MEM_write16(void* memPtr, U16 value) |
---|
180 | { |
---|
181 | memcpy(memPtr, &value, sizeof(value)); |
---|
182 | } |
---|
183 | |
---|
184 | MEM_STATIC void MEM_write32(void* memPtr, U32 value) |
---|
185 | { |
---|
186 | memcpy(memPtr, &value, sizeof(value)); |
---|
187 | } |
---|
188 | |
---|
189 | MEM_STATIC void MEM_write64(void* memPtr, U64 value) |
---|
190 | { |
---|
191 | memcpy(memPtr, &value, sizeof(value)); |
---|
192 | } |
---|
193 | |
---|
194 | #endif /* MEM_FORCE_MEMORY_ACCESS */ |
---|
195 | |
---|
196 | MEM_STATIC U32 MEM_swap32(U32 in) |
---|
197 | { |
---|
198 | #if defined(_MSC_VER) /* Visual Studio */ |
---|
199 | return _byteswap_ulong(in); |
---|
200 | #elif defined (__GNUC__) |
---|
201 | return __builtin_bswap32(in); |
---|
202 | #else |
---|
203 | return ((in << 24) & 0xff000000 ) | |
---|
204 | ((in << 8) & 0x00ff0000 ) | |
---|
205 | ((in >> 8) & 0x0000ff00 ) | |
---|
206 | ((in >> 24) & 0x000000ff ); |
---|
207 | #endif |
---|
208 | } |
---|
209 | |
---|
210 | MEM_STATIC U64 MEM_swap64(U64 in) |
---|
211 | { |
---|
212 | #if defined(_MSC_VER) /* Visual Studio */ |
---|
213 | return _byteswap_uint64(in); |
---|
214 | #elif defined (__GNUC__) |
---|
215 | return __builtin_bswap64(in); |
---|
216 | #else |
---|
217 | return ((in << 56) & 0xff00000000000000ULL) | |
---|
218 | ((in << 40) & 0x00ff000000000000ULL) | |
---|
219 | ((in << 24) & 0x0000ff0000000000ULL) | |
---|
220 | ((in << 8) & 0x000000ff00000000ULL) | |
---|
221 | ((in >> 8) & 0x00000000ff000000ULL) | |
---|
222 | ((in >> 24) & 0x0000000000ff0000ULL) | |
---|
223 | ((in >> 40) & 0x000000000000ff00ULL) | |
---|
224 | ((in >> 56) & 0x00000000000000ffULL); |
---|
225 | #endif |
---|
226 | } |
---|
227 | |
---|
228 | MEM_STATIC size_t MEM_swapST(size_t in) |
---|
229 | { |
---|
230 | if (MEM_32bits()) |
---|
231 | return (size_t)MEM_swap32((U32)in); |
---|
232 | else |
---|
233 | return (size_t)MEM_swap64((U64)in); |
---|
234 | } |
---|
235 | |
---|
236 | /*=== Little endian r/w ===*/ |
---|
237 | |
---|
238 | MEM_STATIC U16 MEM_readLE16(const void* memPtr) |
---|
239 | { |
---|
240 | if (MEM_isLittleEndian()) |
---|
241 | return MEM_read16(memPtr); |
---|
242 | else { |
---|
243 | const BYTE* p = (const BYTE*)memPtr; |
---|
244 | return (U16)(p[0] + (p[1]<<8)); |
---|
245 | } |
---|
246 | } |
---|
247 | |
---|
248 | MEM_STATIC void MEM_writeLE16(void* memPtr, U16 val) |
---|
249 | { |
---|
250 | if (MEM_isLittleEndian()) { |
---|
251 | MEM_write16(memPtr, val); |
---|
252 | } else { |
---|
253 | BYTE* p = (BYTE*)memPtr; |
---|
254 | p[0] = (BYTE)val; |
---|
255 | p[1] = (BYTE)(val>>8); |
---|
256 | } |
---|
257 | } |
---|
258 | |
---|
259 | MEM_STATIC U32 MEM_readLE32(const void* memPtr) |
---|
260 | { |
---|
261 | if (MEM_isLittleEndian()) |
---|
262 | return MEM_read32(memPtr); |
---|
263 | else |
---|
264 | return MEM_swap32(MEM_read32(memPtr)); |
---|
265 | } |
---|
266 | |
---|
267 | MEM_STATIC void MEM_writeLE32(void* memPtr, U32 val32) |
---|
268 | { |
---|
269 | if (MEM_isLittleEndian()) |
---|
270 | MEM_write32(memPtr, val32); |
---|
271 | else |
---|
272 | MEM_write32(memPtr, MEM_swap32(val32)); |
---|
273 | } |
---|
274 | |
---|
275 | MEM_STATIC U64 MEM_readLE64(const void* memPtr) |
---|
276 | { |
---|
277 | if (MEM_isLittleEndian()) |
---|
278 | return MEM_read64(memPtr); |
---|
279 | else |
---|
280 | return MEM_swap64(MEM_read64(memPtr)); |
---|
281 | } |
---|
282 | |
---|
283 | MEM_STATIC void MEM_writeLE64(void* memPtr, U64 val64) |
---|
284 | { |
---|
285 | if (MEM_isLittleEndian()) |
---|
286 | MEM_write64(memPtr, val64); |
---|
287 | else |
---|
288 | MEM_write64(memPtr, MEM_swap64(val64)); |
---|
289 | } |
---|
290 | |
---|
291 | MEM_STATIC size_t MEM_readLEST(const void* memPtr) |
---|
292 | { |
---|
293 | if (MEM_32bits()) |
---|
294 | return (size_t)MEM_readLE32(memPtr); |
---|
295 | else |
---|
296 | return (size_t)MEM_readLE64(memPtr); |
---|
297 | } |
---|
298 | |
---|
299 | MEM_STATIC void MEM_writeLEST(void* memPtr, size_t val) |
---|
300 | { |
---|
301 | if (MEM_32bits()) |
---|
302 | MEM_writeLE32(memPtr, (U32)val); |
---|
303 | else |
---|
304 | MEM_writeLE64(memPtr, (U64)val); |
---|
305 | } |
---|
306 | |
---|
307 | /*=== Big endian r/w ===*/ |
---|
308 | |
---|
309 | MEM_STATIC U32 MEM_readBE32(const void* memPtr) |
---|
310 | { |
---|
311 | if (MEM_isLittleEndian()) |
---|
312 | return MEM_swap32(MEM_read32(memPtr)); |
---|
313 | else |
---|
314 | return MEM_read32(memPtr); |
---|
315 | } |
---|
316 | |
---|
317 | MEM_STATIC void MEM_writeBE32(void* memPtr, U32 val32) |
---|
318 | { |
---|
319 | if (MEM_isLittleEndian()) |
---|
320 | MEM_write32(memPtr, MEM_swap32(val32)); |
---|
321 | else |
---|
322 | MEM_write32(memPtr, val32); |
---|
323 | } |
---|
324 | |
---|
325 | MEM_STATIC U64 MEM_readBE64(const void* memPtr) |
---|
326 | { |
---|
327 | if (MEM_isLittleEndian()) |
---|
328 | return MEM_swap64(MEM_read64(memPtr)); |
---|
329 | else |
---|
330 | return MEM_read64(memPtr); |
---|
331 | } |
---|
332 | |
---|
333 | MEM_STATIC void MEM_writeBE64(void* memPtr, U64 val64) |
---|
334 | { |
---|
335 | if (MEM_isLittleEndian()) |
---|
336 | MEM_write64(memPtr, MEM_swap64(val64)); |
---|
337 | else |
---|
338 | MEM_write64(memPtr, val64); |
---|
339 | } |
---|
340 | |
---|
341 | MEM_STATIC size_t MEM_readBEST(const void* memPtr) |
---|
342 | { |
---|
343 | if (MEM_32bits()) |
---|
344 | return (size_t)MEM_readBE32(memPtr); |
---|
345 | else |
---|
346 | return (size_t)MEM_readBE64(memPtr); |
---|
347 | } |
---|
348 | |
---|
349 | MEM_STATIC void MEM_writeBEST(void* memPtr, size_t val) |
---|
350 | { |
---|
351 | if (MEM_32bits()) |
---|
352 | MEM_writeBE32(memPtr, (U32)val); |
---|
353 | else |
---|
354 | MEM_writeBE64(memPtr, (U64)val); |
---|
355 | } |
---|
356 | |
---|
357 | |
---|
358 | /* function safe only for comparisons */ |
---|
359 | MEM_STATIC U32 MEM_readMINMATCH(const void* memPtr, U32 length) |
---|
360 | { |
---|
361 | switch (length) |
---|
362 | { |
---|
363 | default : |
---|
364 | case 4 : return MEM_read32(memPtr); |
---|
365 | case 3 : if (MEM_isLittleEndian()) |
---|
366 | return MEM_read32(memPtr)<<8; |
---|
367 | else |
---|
368 | return MEM_read32(memPtr)>>8; |
---|
369 | } |
---|
370 | } |
---|
371 | |
---|
372 | #if defined (__cplusplus) |
---|
373 | } |
---|
374 | #endif |
---|
375 | |
---|
376 | #endif /* MEM_H_MODULE */ |
---|
377 | |
---|