extlzham 0.0.1.PROTOTYPE3-x86-mingw32
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/LICENSE.md +27 -0
- data/README.md +74 -0
- data/Rakefile +152 -0
- data/contrib/lzham/LICENSE +22 -0
- data/contrib/lzham/README.md +209 -0
- data/contrib/lzham/include/lzham.h +781 -0
- data/contrib/lzham/lzhamcomp/lzham_comp.h +38 -0
- data/contrib/lzham/lzhamcomp/lzham_lzbase.cpp +244 -0
- data/contrib/lzham/lzhamcomp/lzham_lzbase.h +45 -0
- data/contrib/lzham/lzhamcomp/lzham_lzcomp.cpp +608 -0
- data/contrib/lzham/lzhamcomp/lzham_lzcomp_internal.cpp +1966 -0
- data/contrib/lzham/lzhamcomp/lzham_lzcomp_internal.h +472 -0
- data/contrib/lzham/lzhamcomp/lzham_lzcomp_state.cpp +1413 -0
- data/contrib/lzham/lzhamcomp/lzham_match_accel.cpp +562 -0
- data/contrib/lzham/lzhamcomp/lzham_match_accel.h +146 -0
- data/contrib/lzham/lzhamcomp/lzham_null_threading.h +97 -0
- data/contrib/lzham/lzhamcomp/lzham_pthreads_threading.cpp +229 -0
- data/contrib/lzham/lzhamcomp/lzham_pthreads_threading.h +520 -0
- data/contrib/lzham/lzhamcomp/lzham_threading.h +12 -0
- data/contrib/lzham/lzhamcomp/lzham_win32_threading.cpp +220 -0
- data/contrib/lzham/lzhamcomp/lzham_win32_threading.h +368 -0
- data/contrib/lzham/lzhamdecomp/lzham_assert.cpp +66 -0
- data/contrib/lzham/lzhamdecomp/lzham_assert.h +40 -0
- data/contrib/lzham/lzhamdecomp/lzham_checksum.cpp +73 -0
- data/contrib/lzham/lzhamdecomp/lzham_checksum.h +13 -0
- data/contrib/lzham/lzhamdecomp/lzham_config.h +23 -0
- data/contrib/lzham/lzhamdecomp/lzham_core.h +264 -0
- data/contrib/lzham/lzhamdecomp/lzham_decomp.h +37 -0
- data/contrib/lzham/lzhamdecomp/lzham_helpers.h +54 -0
- data/contrib/lzham/lzhamdecomp/lzham_huffman_codes.cpp +262 -0
- data/contrib/lzham/lzhamdecomp/lzham_huffman_codes.h +14 -0
- data/contrib/lzham/lzhamdecomp/lzham_lzdecomp.cpp +1527 -0
- data/contrib/lzham/lzhamdecomp/lzham_lzdecompbase.cpp +131 -0
- data/contrib/lzham/lzhamdecomp/lzham_lzdecompbase.h +89 -0
- data/contrib/lzham/lzhamdecomp/lzham_math.h +142 -0
- data/contrib/lzham/lzhamdecomp/lzham_mem.cpp +284 -0
- data/contrib/lzham/lzhamdecomp/lzham_mem.h +112 -0
- data/contrib/lzham/lzhamdecomp/lzham_platform.cpp +157 -0
- data/contrib/lzham/lzhamdecomp/lzham_platform.h +284 -0
- data/contrib/lzham/lzhamdecomp/lzham_prefix_coding.cpp +351 -0
- data/contrib/lzham/lzhamdecomp/lzham_prefix_coding.h +146 -0
- data/contrib/lzham/lzhamdecomp/lzham_symbol_codec.cpp +1484 -0
- data/contrib/lzham/lzhamdecomp/lzham_symbol_codec.h +556 -0
- data/contrib/lzham/lzhamdecomp/lzham_timer.cpp +147 -0
- data/contrib/lzham/lzhamdecomp/lzham_timer.h +99 -0
- data/contrib/lzham/lzhamdecomp/lzham_traits.h +141 -0
- data/contrib/lzham/lzhamdecomp/lzham_types.h +97 -0
- data/contrib/lzham/lzhamdecomp/lzham_utils.h +58 -0
- data/contrib/lzham/lzhamdecomp/lzham_vector.cpp +75 -0
- data/contrib/lzham/lzhamdecomp/lzham_vector.h +588 -0
- data/contrib/lzham/lzhamlib/lzham_lib.cpp +179 -0
- data/examples/basic.rb +48 -0
- data/ext/constants.c +64 -0
- data/ext/decoder.c +313 -0
- data/ext/depend +5 -0
- data/ext/encoder.c +372 -0
- data/ext/error.c +80 -0
- data/ext/extconf.rb +29 -0
- data/ext/extlzham.c +34 -0
- data/ext/extlzham.h +62 -0
- data/gemstub.rb +22 -0
- data/lib/2.0/extlzham.so +0 -0
- data/lib/2.1/extlzham.so +0 -0
- data/lib/2.2/extlzham.so +0 -0
- data/lib/extlzham.rb +158 -0
- data/lib/extlzham/version.rb +5 -0
- data/test/test_extlzham.rb +35 -0
- metadata +156 -0
@@ -0,0 +1,112 @@
|
|
1
|
+
// File: lzham_mem.h
|
2
|
+
// See Copyright Notice and license at the end of include/lzham.h
|
3
|
+
#pragma once
|
4
|
+
|
5
|
+
namespace lzham
|
6
|
+
{
|
7
|
+
void lzham_mem_init();
|
8
|
+
|
9
|
+
void* lzham_malloc(size_t size, size_t* pActual_size = NULL);
|
10
|
+
void* lzham_realloc(void* p, size_t size, size_t* pActual_size = NULL, bool movable = true);
|
11
|
+
void lzham_free(void* p);
|
12
|
+
size_t lzham_msize(void* p);
|
13
|
+
|
14
|
+
template<typename T>
|
15
|
+
inline T* lzham_new()
|
16
|
+
{
|
17
|
+
T* p = static_cast<T*>(lzham_malloc(sizeof(T)));
|
18
|
+
if (!p) return NULL;
|
19
|
+
if (LZHAM_IS_SCALAR_TYPE(T))
|
20
|
+
return p;
|
21
|
+
return helpers::construct(p);
|
22
|
+
}
|
23
|
+
|
24
|
+
template<typename T, typename A>
|
25
|
+
inline T* lzham_new(const A& init0)
|
26
|
+
{
|
27
|
+
T* p = static_cast<T*>(lzham_malloc(sizeof(T)));
|
28
|
+
if (!p) return NULL;
|
29
|
+
return new (static_cast<void*>(p)) T(init0);
|
30
|
+
}
|
31
|
+
|
32
|
+
template<typename T, typename A, typename B>
|
33
|
+
inline T* lzham_new(const A& init0, const B& init1)
|
34
|
+
{
|
35
|
+
T* p = static_cast<T*>(lzham_malloc(sizeof(T)));
|
36
|
+
if (!p) return NULL;
|
37
|
+
return new (static_cast<void*>(p)) T(init0, init1);
|
38
|
+
}
|
39
|
+
|
40
|
+
template<typename T, typename A, typename B, typename C>
|
41
|
+
inline T* lzham_new(const A& init0, const B& init1, const C& init2)
|
42
|
+
{
|
43
|
+
T* p = static_cast<T*>(lzham_malloc(sizeof(T)));
|
44
|
+
if (!p) return NULL;
|
45
|
+
return new (static_cast<void*>(p)) T(init0, init1, init2);
|
46
|
+
}
|
47
|
+
|
48
|
+
template<typename T, typename A, typename B, typename C, typename D>
|
49
|
+
inline T* lzham_new(const A& init0, const B& init1, const C& init2, const D& init3)
|
50
|
+
{
|
51
|
+
T* p = static_cast<T*>(lzham_malloc(sizeof(T)));
|
52
|
+
if (!p) return NULL;
|
53
|
+
return new (static_cast<void*>(p)) T(init0, init1, init2, init3);
|
54
|
+
}
|
55
|
+
|
56
|
+
template<typename T>
|
57
|
+
inline T* lzham_new_array(uint32 num)
|
58
|
+
{
|
59
|
+
if (!num) num = 1;
|
60
|
+
|
61
|
+
uint8* q = static_cast<uint8*>(lzham_malloc(LZHAM_MIN_ALLOC_ALIGNMENT + sizeof(T) * num));
|
62
|
+
if (!q)
|
63
|
+
return NULL;
|
64
|
+
|
65
|
+
T* p = reinterpret_cast<T*>(q + LZHAM_MIN_ALLOC_ALIGNMENT);
|
66
|
+
|
67
|
+
reinterpret_cast<uint32*>(p)[-1] = num;
|
68
|
+
reinterpret_cast<uint32*>(p)[-2] = ~num;
|
69
|
+
|
70
|
+
if (!LZHAM_IS_SCALAR_TYPE(T))
|
71
|
+
{
|
72
|
+
helpers::construct_array(p, num);
|
73
|
+
}
|
74
|
+
return p;
|
75
|
+
}
|
76
|
+
|
77
|
+
template<typename T>
|
78
|
+
inline void lzham_delete(T* p)
|
79
|
+
{
|
80
|
+
if (p)
|
81
|
+
{
|
82
|
+
if (!LZHAM_IS_SCALAR_TYPE(T))
|
83
|
+
{
|
84
|
+
helpers::destruct(p);
|
85
|
+
}
|
86
|
+
lzham_free(p);
|
87
|
+
}
|
88
|
+
}
|
89
|
+
|
90
|
+
template<typename T>
|
91
|
+
inline void lzham_delete_array(T* p)
|
92
|
+
{
|
93
|
+
if (p)
|
94
|
+
{
|
95
|
+
const uint32 num = reinterpret_cast<uint32*>(p)[-1];
|
96
|
+
const uint32 num_check = reinterpret_cast<uint32*>(p)[-2];
|
97
|
+
LZHAM_ASSERT(num && (num == ~num_check));
|
98
|
+
if (num == ~num_check)
|
99
|
+
{
|
100
|
+
if (!LZHAM_IS_SCALAR_TYPE(T))
|
101
|
+
{
|
102
|
+
helpers::destruct_array(p, num);
|
103
|
+
}
|
104
|
+
|
105
|
+
lzham_free(reinterpret_cast<uint8*>(p) - LZHAM_MIN_ALLOC_ALIGNMENT);
|
106
|
+
}
|
107
|
+
}
|
108
|
+
}
|
109
|
+
|
110
|
+
void lzham_print_mem_stats();
|
111
|
+
|
112
|
+
} // namespace lzham
|
@@ -0,0 +1,157 @@
|
|
1
|
+
// File: platform.cpp
|
2
|
+
// See Copyright Notice and license at the end of include/lzham.h
|
3
|
+
#include "lzham_core.h"
|
4
|
+
#include "lzham_timer.h"
|
5
|
+
#include <assert.h>
|
6
|
+
|
7
|
+
#if LZHAM_PLATFORM_X360
|
8
|
+
#include <xbdm.h>
|
9
|
+
#endif
|
10
|
+
|
11
|
+
#define LZHAM_FORCE_DEBUGGER_PRESENT 1
|
12
|
+
|
13
|
+
#ifndef _MSC_VER
|
14
|
+
int sprintf_s(char *buffer, size_t sizeOfBuffer, const char *format, ...)
|
15
|
+
{
|
16
|
+
if (!sizeOfBuffer)
|
17
|
+
return 0;
|
18
|
+
|
19
|
+
va_list args;
|
20
|
+
va_start(args, format);
|
21
|
+
int c = vsnprintf(buffer, sizeOfBuffer, format, args);
|
22
|
+
va_end(args);
|
23
|
+
|
24
|
+
buffer[sizeOfBuffer - 1] = '\0';
|
25
|
+
|
26
|
+
if (c < 0)
|
27
|
+
return static_cast<int>(sizeOfBuffer - 1);
|
28
|
+
|
29
|
+
return LZHAM_MIN(c, (int)sizeOfBuffer - 1);
|
30
|
+
}
|
31
|
+
int vsprintf_s(char *buffer, size_t sizeOfBuffer, const char *format, va_list args)
|
32
|
+
{
|
33
|
+
if (!sizeOfBuffer)
|
34
|
+
return 0;
|
35
|
+
|
36
|
+
int c = vsnprintf(buffer, sizeOfBuffer, format, args);
|
37
|
+
|
38
|
+
buffer[sizeOfBuffer - 1] = '\0';
|
39
|
+
|
40
|
+
if (c < 0)
|
41
|
+
return static_cast<int>(sizeOfBuffer - 1);
|
42
|
+
|
43
|
+
return LZHAM_MIN(c, (int)sizeOfBuffer - 1);
|
44
|
+
}
|
45
|
+
#endif // __GNUC__
|
46
|
+
|
47
|
+
bool lzham_is_debugger_present(void)
|
48
|
+
{
|
49
|
+
#if LZHAM_PLATFORM_X360
|
50
|
+
return DmIsDebuggerPresent() != 0;
|
51
|
+
#elif LZHAM_USE_WIN32_API
|
52
|
+
return IsDebuggerPresent() != 0;
|
53
|
+
#elif LZHAM_FORCE_DEBUGGER_PRESENT
|
54
|
+
return true;
|
55
|
+
#else
|
56
|
+
return false;
|
57
|
+
#endif
|
58
|
+
}
|
59
|
+
|
60
|
+
void lzham_debug_break(void)
|
61
|
+
{
|
62
|
+
#if LZHAM_USE_WIN32_API
|
63
|
+
DebugBreak();
|
64
|
+
#elif (TARGET_OS_MAC == 1) && (TARGET_IPHONE_SIMULATOR == 0) && (TARGET_OS_IPHONE == 0)
|
65
|
+
__asm {int 3}
|
66
|
+
#else
|
67
|
+
assert(0);
|
68
|
+
#endif
|
69
|
+
}
|
70
|
+
|
71
|
+
void lzham_output_debug_string(const char* p)
|
72
|
+
{
|
73
|
+
LZHAM_NOTE_UNUSED(p);
|
74
|
+
#if LZHAM_USE_WIN32_API
|
75
|
+
OutputDebugStringA(p);
|
76
|
+
#else
|
77
|
+
fputs(p, stderr);
|
78
|
+
#endif
|
79
|
+
}
|
80
|
+
|
81
|
+
#if LZHAM_BUFFERED_PRINTF
|
82
|
+
// This stuff was a quick hack only intended for debugging/development.
|
83
|
+
namespace lzham
|
84
|
+
{
|
85
|
+
struct buffered_str
|
86
|
+
{
|
87
|
+
enum { cBufSize = 256 };
|
88
|
+
char m_buf[cBufSize];
|
89
|
+
};
|
90
|
+
|
91
|
+
static lzham::vector<buffered_str> g_buffered_strings;
|
92
|
+
static volatile long g_buffered_string_locked;
|
93
|
+
|
94
|
+
static void lock_buffered_strings()
|
95
|
+
{
|
96
|
+
while (atomic_exchange32(&g_buffered_string_locked, 1) == 1)
|
97
|
+
{
|
98
|
+
lzham_yield_processor();
|
99
|
+
lzham_yield_processor();
|
100
|
+
lzham_yield_processor();
|
101
|
+
lzham_yield_processor();
|
102
|
+
}
|
103
|
+
|
104
|
+
LZHAM_MEMORY_IMPORT_BARRIER
|
105
|
+
}
|
106
|
+
|
107
|
+
static void unlock_buffered_strings()
|
108
|
+
{
|
109
|
+
LZHAM_MEMORY_EXPORT_BARRIER
|
110
|
+
|
111
|
+
atomic_exchange32(&g_buffered_string_locked, 0);
|
112
|
+
}
|
113
|
+
|
114
|
+
} // namespace lzham
|
115
|
+
|
116
|
+
void lzham_buffered_printf(const char *format, ...)
|
117
|
+
{
|
118
|
+
format;
|
119
|
+
|
120
|
+
char buf[lzham::buffered_str::cBufSize];
|
121
|
+
|
122
|
+
va_list args;
|
123
|
+
va_start(args, format);
|
124
|
+
vsnprintf_s(buf, sizeof(buf), sizeof(buf), format, args);
|
125
|
+
va_end(args);
|
126
|
+
|
127
|
+
buf[sizeof(buf) - 1] = '\0';
|
128
|
+
|
129
|
+
lzham::lock_buffered_strings();
|
130
|
+
|
131
|
+
if (!lzham::g_buffered_strings.capacity())
|
132
|
+
{
|
133
|
+
lzham::g_buffered_strings.try_reserve(2048);
|
134
|
+
}
|
135
|
+
|
136
|
+
if (lzham::g_buffered_strings.try_resize(lzham::g_buffered_strings.size() + 1))
|
137
|
+
{
|
138
|
+
memcpy(lzham::g_buffered_strings.back().m_buf, buf, sizeof(buf));
|
139
|
+
}
|
140
|
+
|
141
|
+
lzham::unlock_buffered_strings();
|
142
|
+
}
|
143
|
+
|
144
|
+
void lzham_flush_buffered_printf()
|
145
|
+
{
|
146
|
+
lzham::lock_buffered_strings();
|
147
|
+
|
148
|
+
for (lzham::uint i = 0; i < lzham::g_buffered_strings.size(); i++)
|
149
|
+
{
|
150
|
+
printf("%s", lzham::g_buffered_strings[i].m_buf);
|
151
|
+
}
|
152
|
+
|
153
|
+
lzham::g_buffered_strings.try_resize(0);
|
154
|
+
|
155
|
+
lzham::unlock_buffered_strings();
|
156
|
+
}
|
157
|
+
#endif
|
@@ -0,0 +1,284 @@
|
|
1
|
+
// File: lzham_platform.h
|
2
|
+
// See Copyright Notice and license at the end of include/lzham.h
|
3
|
+
#pragma once
|
4
|
+
|
5
|
+
bool lzham_is_debugger_present(void);
|
6
|
+
void lzham_debug_break(void);
|
7
|
+
void lzham_output_debug_string(const char* p);
|
8
|
+
|
9
|
+
// actually in lzham_assert.cpp
|
10
|
+
void lzham_assert(const char* pExp, const char* pFile, unsigned line);
|
11
|
+
void lzham_fail(const char* pExp, const char* pFile, unsigned line);
|
12
|
+
|
13
|
+
#ifdef WIN32
|
14
|
+
#define LZHAM_BREAKPOINT DebuggerBreak();
|
15
|
+
#define LZHAM_BUILTIN_EXPECT(c, v) c
|
16
|
+
#elif defined(__GNUC__)
|
17
|
+
#define LZHAM_BREAKPOINT asm("int $3");
|
18
|
+
#define LZHAM_BUILTIN_EXPECT(c, v) __builtin_expect(c, v)
|
19
|
+
#else
|
20
|
+
#define LZHAM_BREAKPOINT
|
21
|
+
#define LZHAM_BUILTIN_EXPECT(c, v) c
|
22
|
+
#endif
|
23
|
+
|
24
|
+
#if defined(__GNUC__) && LZHAM_PLATFORM_PC
|
25
|
+
extern __inline__ __attribute__((__always_inline__,__gnu_inline__)) void lzham_yield_processor()
|
26
|
+
{
|
27
|
+
__asm__ __volatile__("pause");
|
28
|
+
}
|
29
|
+
#elif LZHAM_PLATFORM_X360
|
30
|
+
#define lzham_yield_processor() \
|
31
|
+
YieldProcessor(); \
|
32
|
+
__asm { or r0, r0, r0 } \
|
33
|
+
YieldProcessor(); \
|
34
|
+
__asm { or r1, r1, r1 } \
|
35
|
+
YieldProcessor(); \
|
36
|
+
__asm { or r0, r0, r0 } \
|
37
|
+
YieldProcessor(); \
|
38
|
+
__asm { or r1, r1, r1 } \
|
39
|
+
YieldProcessor(); \
|
40
|
+
__asm { or r0, r0, r0 } \
|
41
|
+
YieldProcessor(); \
|
42
|
+
__asm { or r1, r1, r1 } \
|
43
|
+
YieldProcessor(); \
|
44
|
+
__asm { or r0, r0, r0 } \
|
45
|
+
YieldProcessor(); \
|
46
|
+
__asm { or r1, r1, r1 }
|
47
|
+
#else
|
48
|
+
LZHAM_FORCE_INLINE void lzham_yield_processor()
|
49
|
+
{
|
50
|
+
#if LZHAM_USE_MSVC_INTRINSICS
|
51
|
+
#if LZHAM_PLATFORM_PC_X64
|
52
|
+
_mm_pause();
|
53
|
+
#else
|
54
|
+
YieldProcessor();
|
55
|
+
#endif
|
56
|
+
#else
|
57
|
+
// No implementation
|
58
|
+
#endif
|
59
|
+
}
|
60
|
+
#endif
|
61
|
+
|
62
|
+
#ifndef _MSC_VER
|
63
|
+
int sprintf_s(char *buffer, size_t sizeOfBuffer, const char *format, ...);
|
64
|
+
int vsprintf_s(char *buffer, size_t sizeOfBuffer, const char *format, va_list args);
|
65
|
+
#endif
|
66
|
+
|
67
|
+
#if LZHAM_PLATFORM_X360
|
68
|
+
#define LZHAM_MEMORY_EXPORT_BARRIER MemoryBarrier();
|
69
|
+
#else
|
70
|
+
// Barriers shouldn't be necessary on x86/x64.
|
71
|
+
// TODO: Should use __sync_synchronize() on other platforms that support GCC.
|
72
|
+
#define LZHAM_MEMORY_EXPORT_BARRIER
|
73
|
+
#endif
|
74
|
+
|
75
|
+
#if LZHAM_PLATFORM_X360
|
76
|
+
#define LZHAM_MEMORY_IMPORT_BARRIER MemoryBarrier();
|
77
|
+
#else
|
78
|
+
// Barriers shouldn't be necessary on x86/x64.
|
79
|
+
// TODO: Should use __sync_synchronize() on other platforms that support GCC.
|
80
|
+
#define LZHAM_MEMORY_IMPORT_BARRIER
|
81
|
+
#endif
|
82
|
+
|
83
|
+
// Note: It's very important that LZHAM_READ_BIG_ENDIAN_UINT32() is fast on the target platform.
|
84
|
+
// This is used to read every DWORD from the input stream.
|
85
|
+
|
86
|
+
#if LZHAM_USE_UNALIGNED_INT_LOADS
|
87
|
+
#if LZHAM_BIG_ENDIAN_CPU
|
88
|
+
#define LZHAM_READ_BIG_ENDIAN_UINT32(p) *reinterpret_cast<const uint32*>(p)
|
89
|
+
#else
|
90
|
+
#if defined(LZHAM_USE_MSVC_INTRINSICS)
|
91
|
+
#define LZHAM_READ_BIG_ENDIAN_UINT32(p) _byteswap_ulong(*reinterpret_cast<const uint32*>(p))
|
92
|
+
#elif defined(__GNUC__)
|
93
|
+
#define LZHAM_READ_BIG_ENDIAN_UINT32(p) __builtin_bswap32(*reinterpret_cast<const uint32*>(p))
|
94
|
+
#else
|
95
|
+
#define LZHAM_READ_BIG_ENDIAN_UINT32(p) utils::swap32(*reinterpret_cast<const uint32*>(p))
|
96
|
+
#endif
|
97
|
+
#endif
|
98
|
+
#else
|
99
|
+
#define LZHAM_READ_BIG_ENDIAN_UINT32(p) ((reinterpret_cast<const uint8*>(p)[0] << 24) | (reinterpret_cast<const uint8*>(p)[1] << 16) | (reinterpret_cast<const uint8*>(p)[2] << 8) | (reinterpret_cast<const uint8*>(p)[3]))
|
100
|
+
#endif
|
101
|
+
|
102
|
+
#if LZHAM_USE_WIN32_ATOMIC_FUNCTIONS
|
103
|
+
extern "C" __int64 _InterlockedCompareExchange64(__int64 volatile * Destination, __int64 Exchange, __int64 Comperand);
|
104
|
+
#if defined(_MSC_VER)
|
105
|
+
#pragma intrinsic(_InterlockedCompareExchange64)
|
106
|
+
#endif
|
107
|
+
#endif // LZHAM_USE_WIN32_ATOMIC_FUNCTIONS
|
108
|
+
|
109
|
+
namespace lzham
|
110
|
+
{
|
111
|
+
#if LZHAM_USE_WIN32_ATOMIC_FUNCTIONS
|
112
|
+
typedef LONG atomic32_t;
|
113
|
+
typedef LONGLONG atomic64_t;
|
114
|
+
|
115
|
+
// Returns the original value.
|
116
|
+
inline atomic32_t atomic_compare_exchange32(atomic32_t volatile *pDest, atomic32_t exchange, atomic32_t comparand)
|
117
|
+
{
|
118
|
+
LZHAM_ASSERT((reinterpret_cast<ptr_bits_t>(pDest) & 3) == 0);
|
119
|
+
return InterlockedCompareExchange(pDest, exchange, comparand);
|
120
|
+
}
|
121
|
+
|
122
|
+
// Returns the original value.
|
123
|
+
inline atomic64_t atomic_compare_exchange64(atomic64_t volatile *pDest, atomic64_t exchange, atomic64_t comparand)
|
124
|
+
{
|
125
|
+
LZHAM_ASSERT((reinterpret_cast<ptr_bits_t>(pDest) & 7) == 0);
|
126
|
+
return _InterlockedCompareExchange64(pDest, exchange, comparand);
|
127
|
+
}
|
128
|
+
|
129
|
+
// Returns the resulting incremented value.
|
130
|
+
inline atomic32_t atomic_increment32(atomic32_t volatile *pDest)
|
131
|
+
{
|
132
|
+
LZHAM_ASSERT((reinterpret_cast<ptr_bits_t>(pDest) & 3) == 0);
|
133
|
+
return InterlockedIncrement(pDest);
|
134
|
+
}
|
135
|
+
|
136
|
+
// Returns the resulting decremented value.
|
137
|
+
inline atomic32_t atomic_decrement32(atomic32_t volatile *pDest)
|
138
|
+
{
|
139
|
+
LZHAM_ASSERT((reinterpret_cast<ptr_bits_t>(pDest) & 3) == 0);
|
140
|
+
return InterlockedDecrement(pDest);
|
141
|
+
}
|
142
|
+
|
143
|
+
// Returns the original value.
|
144
|
+
inline atomic32_t atomic_exchange32(atomic32_t volatile *pDest, atomic32_t val)
|
145
|
+
{
|
146
|
+
LZHAM_ASSERT((reinterpret_cast<ptr_bits_t>(pDest) & 3) == 0);
|
147
|
+
return InterlockedExchange(pDest, val);
|
148
|
+
}
|
149
|
+
|
150
|
+
// Returns the resulting value.
|
151
|
+
inline atomic32_t atomic_add32(atomic32_t volatile *pDest, atomic32_t val)
|
152
|
+
{
|
153
|
+
LZHAM_ASSERT((reinterpret_cast<ptr_bits_t>(pDest) & 3) == 0);
|
154
|
+
return InterlockedExchangeAdd(pDest, val) + val;
|
155
|
+
}
|
156
|
+
|
157
|
+
// Returns the original value.
|
158
|
+
inline atomic32_t atomic_exchange_add(atomic32_t volatile *pDest, atomic32_t val)
|
159
|
+
{
|
160
|
+
LZHAM_ASSERT((reinterpret_cast<ptr_bits_t>(pDest) & 3) == 0);
|
161
|
+
return InterlockedExchangeAdd(pDest, val);
|
162
|
+
}
|
163
|
+
#elif LZHAM_USE_GCC_ATOMIC_BUILTINS
|
164
|
+
typedef long atomic32_t;
|
165
|
+
typedef long long atomic64_t;
|
166
|
+
|
167
|
+
// Returns the original value.
|
168
|
+
inline atomic32_t atomic_compare_exchange32(atomic32_t volatile *pDest, atomic32_t exchange, atomic32_t comparand)
|
169
|
+
{
|
170
|
+
LZHAM_ASSERT((reinterpret_cast<ptr_bits_t>(pDest) & 3) == 0);
|
171
|
+
return __sync_val_compare_and_swap(pDest, comparand, exchange);
|
172
|
+
}
|
173
|
+
|
174
|
+
// Returns the original value.
|
175
|
+
inline atomic64_t atomic_compare_exchange64(atomic64_t volatile *pDest, atomic64_t exchange, atomic64_t comparand)
|
176
|
+
{
|
177
|
+
LZHAM_ASSERT((reinterpret_cast<ptr_bits_t>(pDest) & 7) == 0);
|
178
|
+
return __sync_val_compare_and_swap(pDest, comparand, exchange);
|
179
|
+
}
|
180
|
+
|
181
|
+
// Returns the resulting incremented value.
|
182
|
+
inline atomic32_t atomic_increment32(atomic32_t volatile *pDest)
|
183
|
+
{
|
184
|
+
LZHAM_ASSERT((reinterpret_cast<ptr_bits_t>(pDest) & 3) == 0);
|
185
|
+
return __sync_add_and_fetch(pDest, 1);
|
186
|
+
}
|
187
|
+
|
188
|
+
// Returns the resulting decremented value.
|
189
|
+
inline atomic32_t atomic_decrement32(atomic32_t volatile *pDest)
|
190
|
+
{
|
191
|
+
LZHAM_ASSERT((reinterpret_cast<ptr_bits_t>(pDest) & 3) == 0);
|
192
|
+
return __sync_sub_and_fetch(pDest, 1);
|
193
|
+
}
|
194
|
+
|
195
|
+
// Returns the original value.
|
196
|
+
inline atomic32_t atomic_exchange32(atomic32_t volatile *pDest, atomic32_t val)
|
197
|
+
{
|
198
|
+
LZHAM_ASSERT((reinterpret_cast<ptr_bits_t>(pDest) & 3) == 0);
|
199
|
+
return __sync_lock_test_and_set(pDest, val);
|
200
|
+
}
|
201
|
+
|
202
|
+
// Returns the resulting value.
|
203
|
+
inline atomic32_t atomic_add32(atomic32_t volatile *pDest, atomic32_t val)
|
204
|
+
{
|
205
|
+
LZHAM_ASSERT((reinterpret_cast<ptr_bits_t>(pDest) & 3) == 0);
|
206
|
+
return __sync_add_and_fetch(pDest, val);
|
207
|
+
}
|
208
|
+
|
209
|
+
// Returns the original value.
|
210
|
+
inline atomic32_t atomic_exchange_add(atomic32_t volatile *pDest, atomic32_t val)
|
211
|
+
{
|
212
|
+
LZHAM_ASSERT((reinterpret_cast<ptr_bits_t>(pDest) & 3) == 0);
|
213
|
+
return __sync_fetch_and_add(pDest, val);
|
214
|
+
}
|
215
|
+
#else
|
216
|
+
#define LZHAM_NO_ATOMICS 1
|
217
|
+
|
218
|
+
// Atomic ops not supported - but try to do something reasonable. Assumes no threading at all.
|
219
|
+
typedef long atomic32_t;
|
220
|
+
typedef long long atomic64_t;
|
221
|
+
|
222
|
+
inline atomic32_t atomic_compare_exchange32(atomic32_t volatile *pDest, atomic32_t exchange, atomic32_t comparand)
|
223
|
+
{
|
224
|
+
LZHAM_ASSERT((reinterpret_cast<ptr_bits_t>(pDest) & 3) == 0);
|
225
|
+
atomic32_t cur = *pDest;
|
226
|
+
if (cur == comparand)
|
227
|
+
*pDest = exchange;
|
228
|
+
return cur;
|
229
|
+
}
|
230
|
+
|
231
|
+
inline atomic64_t atomic_compare_exchange64(atomic64_t volatile *pDest, atomic64_t exchange, atomic64_t comparand)
|
232
|
+
{
|
233
|
+
LZHAM_ASSERT((reinterpret_cast<ptr_bits_t>(pDest) & 7) == 0);
|
234
|
+
atomic64_t cur = *pDest;
|
235
|
+
if (cur == comparand)
|
236
|
+
*pDest = exchange;
|
237
|
+
return cur;
|
238
|
+
}
|
239
|
+
|
240
|
+
inline atomic32_t atomic_increment32(atomic32_t volatile *pDest)
|
241
|
+
{
|
242
|
+
LZHAM_ASSERT((reinterpret_cast<ptr_bits_t>(pDest) & 3) == 0);
|
243
|
+
return (*pDest += 1);
|
244
|
+
}
|
245
|
+
|
246
|
+
inline atomic32_t atomic_decrement32(atomic32_t volatile *pDest)
|
247
|
+
{
|
248
|
+
LZHAM_ASSERT((reinterpret_cast<ptr_bits_t>(pDest) & 3) == 0);
|
249
|
+
return (*pDest -= 1);
|
250
|
+
}
|
251
|
+
|
252
|
+
inline atomic32_t atomic_exchange32(atomic32_t volatile *pDest, atomic32_t val)
|
253
|
+
{
|
254
|
+
LZHAM_ASSERT((reinterpret_cast<ptr_bits_t>(pDest) & 3) == 0);
|
255
|
+
atomic32_t cur = *pDest;
|
256
|
+
*pDest = val;
|
257
|
+
return cur;
|
258
|
+
}
|
259
|
+
|
260
|
+
inline atomic32_t atomic_add32(atomic32_t volatile *pDest, atomic32_t val)
|
261
|
+
{
|
262
|
+
LZHAM_ASSERT((reinterpret_cast<ptr_bits_t>(pDest) & 3) == 0);
|
263
|
+
return (*pDest += val);
|
264
|
+
}
|
265
|
+
|
266
|
+
inline atomic32_t atomic_exchange_add(atomic32_t volatile *pDest, atomic32_t val)
|
267
|
+
{
|
268
|
+
LZHAM_ASSERT((reinterpret_cast<ptr_bits_t>(pDest) & 3) == 0);
|
269
|
+
atomic32_t cur = *pDest;
|
270
|
+
*pDest += val;
|
271
|
+
return cur;
|
272
|
+
}
|
273
|
+
|
274
|
+
#endif
|
275
|
+
|
276
|
+
#if LZHAM_BUFFERED_PRINTF
|
277
|
+
void lzham_buffered_printf(const char *format, ...);
|
278
|
+
void lzham_flush_buffered_printf();
|
279
|
+
#else
|
280
|
+
inline void lzham_buffered_printf(const char *format, ...) { (void)format; }
|
281
|
+
inline void lzham_flush_buffered_printf() { }
|
282
|
+
#endif
|
283
|
+
|
284
|
+
} // namespace lzham
|