asmjit 0.2.0 → 0.2.2

Sign up to get free protection for your applications and to get access to all the features.
Files changed (204) hide show
  1. checksums.yaml +4 -4
  2. data/Gemfile.lock +1 -1
  3. data/Rakefile +5 -3
  4. data/asmjit.gemspec +1 -3
  5. data/ext/asmjit/asmjit/.editorconfig +10 -0
  6. data/ext/asmjit/asmjit/.github/FUNDING.yml +1 -0
  7. data/ext/asmjit/asmjit/.github/workflows/build-config.json +47 -0
  8. data/ext/asmjit/asmjit/.github/workflows/build.yml +156 -0
  9. data/ext/asmjit/asmjit/.gitignore +6 -0
  10. data/ext/asmjit/asmjit/CMakeLists.txt +611 -0
  11. data/ext/asmjit/asmjit/LICENSE.md +17 -0
  12. data/ext/asmjit/asmjit/README.md +69 -0
  13. data/ext/asmjit/asmjit/src/asmjit/a64.h +62 -0
  14. data/ext/asmjit/asmjit/src/asmjit/arm/a64archtraits_p.h +81 -0
  15. data/ext/asmjit/asmjit/src/asmjit/arm/a64assembler.cpp +5115 -0
  16. data/ext/asmjit/asmjit/src/asmjit/arm/a64assembler.h +72 -0
  17. data/ext/asmjit/asmjit/src/asmjit/arm/a64builder.cpp +51 -0
  18. data/ext/asmjit/asmjit/src/asmjit/arm/a64builder.h +57 -0
  19. data/ext/asmjit/asmjit/src/asmjit/arm/a64compiler.cpp +60 -0
  20. data/ext/asmjit/asmjit/src/asmjit/arm/a64compiler.h +247 -0
  21. data/ext/asmjit/asmjit/src/asmjit/arm/a64emithelper.cpp +464 -0
  22. data/ext/asmjit/asmjit/src/asmjit/arm/a64emithelper_p.h +50 -0
  23. data/ext/asmjit/asmjit/src/asmjit/arm/a64emitter.h +1228 -0
  24. data/ext/asmjit/asmjit/src/asmjit/arm/a64formatter.cpp +298 -0
  25. data/ext/asmjit/asmjit/src/asmjit/arm/a64formatter_p.h +59 -0
  26. data/ext/asmjit/asmjit/src/asmjit/arm/a64func.cpp +189 -0
  27. data/ext/asmjit/asmjit/src/asmjit/arm/a64func_p.h +33 -0
  28. data/ext/asmjit/asmjit/src/asmjit/arm/a64globals.h +1894 -0
  29. data/ext/asmjit/asmjit/src/asmjit/arm/a64instapi.cpp +278 -0
  30. data/ext/asmjit/asmjit/src/asmjit/arm/a64instapi_p.h +41 -0
  31. data/ext/asmjit/asmjit/src/asmjit/arm/a64instdb.cpp +1957 -0
  32. data/ext/asmjit/asmjit/src/asmjit/arm/a64instdb.h +74 -0
  33. data/ext/asmjit/asmjit/src/asmjit/arm/a64instdb_p.h +876 -0
  34. data/ext/asmjit/asmjit/src/asmjit/arm/a64operand.cpp +85 -0
  35. data/ext/asmjit/asmjit/src/asmjit/arm/a64operand.h +312 -0
  36. data/ext/asmjit/asmjit/src/asmjit/arm/a64rapass.cpp +852 -0
  37. data/ext/asmjit/asmjit/src/asmjit/arm/a64rapass_p.h +105 -0
  38. data/ext/asmjit/asmjit/src/asmjit/arm/a64utils.h +179 -0
  39. data/ext/asmjit/asmjit/src/asmjit/arm/armformatter.cpp +143 -0
  40. data/ext/asmjit/asmjit/src/asmjit/arm/armformatter_p.h +44 -0
  41. data/ext/asmjit/asmjit/src/asmjit/arm/armglobals.h +21 -0
  42. data/ext/asmjit/asmjit/src/asmjit/arm/armoperand.h +621 -0
  43. data/ext/asmjit/asmjit/src/asmjit/arm.h +62 -0
  44. data/ext/asmjit/asmjit/src/asmjit/asmjit-scope-begin.h +17 -0
  45. data/ext/asmjit/asmjit/src/asmjit/asmjit-scope-end.h +9 -0
  46. data/ext/asmjit/asmjit/src/asmjit/asmjit.h +33 -0
  47. data/ext/asmjit/asmjit/src/asmjit/core/api-build_p.h +55 -0
  48. data/ext/asmjit/asmjit/src/asmjit/core/api-config.h +613 -0
  49. data/ext/asmjit/asmjit/src/asmjit/core/archcommons.h +229 -0
  50. data/ext/asmjit/asmjit/src/asmjit/core/archtraits.cpp +160 -0
  51. data/ext/asmjit/asmjit/src/asmjit/core/archtraits.h +290 -0
  52. data/ext/asmjit/asmjit/src/asmjit/core/assembler.cpp +406 -0
  53. data/ext/asmjit/asmjit/src/asmjit/core/assembler.h +129 -0
  54. data/ext/asmjit/asmjit/src/asmjit/core/builder.cpp +889 -0
  55. data/ext/asmjit/asmjit/src/asmjit/core/builder.h +1391 -0
  56. data/ext/asmjit/asmjit/src/asmjit/core/codebuffer.h +113 -0
  57. data/ext/asmjit/asmjit/src/asmjit/core/codeholder.cpp +1149 -0
  58. data/ext/asmjit/asmjit/src/asmjit/core/codeholder.h +1035 -0
  59. data/ext/asmjit/asmjit/src/asmjit/core/codewriter.cpp +175 -0
  60. data/ext/asmjit/asmjit/src/asmjit/core/codewriter_p.h +179 -0
  61. data/ext/asmjit/asmjit/src/asmjit/core/compiler.cpp +582 -0
  62. data/ext/asmjit/asmjit/src/asmjit/core/compiler.h +737 -0
  63. data/ext/asmjit/asmjit/src/asmjit/core/compilerdefs.h +173 -0
  64. data/ext/asmjit/asmjit/src/asmjit/core/constpool.cpp +363 -0
  65. data/ext/asmjit/asmjit/src/asmjit/core/constpool.h +250 -0
  66. data/ext/asmjit/asmjit/src/asmjit/core/cpuinfo.cpp +1162 -0
  67. data/ext/asmjit/asmjit/src/asmjit/core/cpuinfo.h +813 -0
  68. data/ext/asmjit/asmjit/src/asmjit/core/emithelper.cpp +323 -0
  69. data/ext/asmjit/asmjit/src/asmjit/core/emithelper_p.h +58 -0
  70. data/ext/asmjit/asmjit/src/asmjit/core/emitter.cpp +333 -0
  71. data/ext/asmjit/asmjit/src/asmjit/core/emitter.h +741 -0
  72. data/ext/asmjit/asmjit/src/asmjit/core/emitterutils.cpp +129 -0
  73. data/ext/asmjit/asmjit/src/asmjit/core/emitterutils_p.h +89 -0
  74. data/ext/asmjit/asmjit/src/asmjit/core/environment.cpp +46 -0
  75. data/ext/asmjit/asmjit/src/asmjit/core/environment.h +508 -0
  76. data/ext/asmjit/asmjit/src/asmjit/core/errorhandler.cpp +14 -0
  77. data/ext/asmjit/asmjit/src/asmjit/core/errorhandler.h +228 -0
  78. data/ext/asmjit/asmjit/src/asmjit/core/formatter.cpp +584 -0
  79. data/ext/asmjit/asmjit/src/asmjit/core/formatter.h +247 -0
  80. data/ext/asmjit/asmjit/src/asmjit/core/formatter_p.h +34 -0
  81. data/ext/asmjit/asmjit/src/asmjit/core/func.cpp +286 -0
  82. data/ext/asmjit/asmjit/src/asmjit/core/func.h +1445 -0
  83. data/ext/asmjit/asmjit/src/asmjit/core/funcargscontext.cpp +293 -0
  84. data/ext/asmjit/asmjit/src/asmjit/core/funcargscontext_p.h +199 -0
  85. data/ext/asmjit/asmjit/src/asmjit/core/globals.cpp +133 -0
  86. data/ext/asmjit/asmjit/src/asmjit/core/globals.h +393 -0
  87. data/ext/asmjit/asmjit/src/asmjit/core/inst.cpp +113 -0
  88. data/ext/asmjit/asmjit/src/asmjit/core/inst.h +772 -0
  89. data/ext/asmjit/asmjit/src/asmjit/core/jitallocator.cpp +1242 -0
  90. data/ext/asmjit/asmjit/src/asmjit/core/jitallocator.h +261 -0
  91. data/ext/asmjit/asmjit/src/asmjit/core/jitruntime.cpp +80 -0
  92. data/ext/asmjit/asmjit/src/asmjit/core/jitruntime.h +89 -0
  93. data/ext/asmjit/asmjit/src/asmjit/core/logger.cpp +69 -0
  94. data/ext/asmjit/asmjit/src/asmjit/core/logger.h +198 -0
  95. data/ext/asmjit/asmjit/src/asmjit/core/misc_p.h +33 -0
  96. data/ext/asmjit/asmjit/src/asmjit/core/operand.cpp +132 -0
  97. data/ext/asmjit/asmjit/src/asmjit/core/operand.h +1611 -0
  98. data/ext/asmjit/asmjit/src/asmjit/core/osutils.cpp +84 -0
  99. data/ext/asmjit/asmjit/src/asmjit/core/osutils.h +61 -0
  100. data/ext/asmjit/asmjit/src/asmjit/core/osutils_p.h +68 -0
  101. data/ext/asmjit/asmjit/src/asmjit/core/raassignment_p.h +418 -0
  102. data/ext/asmjit/asmjit/src/asmjit/core/rabuilders_p.h +612 -0
  103. data/ext/asmjit/asmjit/src/asmjit/core/radefs_p.h +1204 -0
  104. data/ext/asmjit/asmjit/src/asmjit/core/ralocal.cpp +1166 -0
  105. data/ext/asmjit/asmjit/src/asmjit/core/ralocal_p.h +254 -0
  106. data/ext/asmjit/asmjit/src/asmjit/core/rapass.cpp +1969 -0
  107. data/ext/asmjit/asmjit/src/asmjit/core/rapass_p.h +1183 -0
  108. data/ext/asmjit/asmjit/src/asmjit/core/rastack.cpp +184 -0
  109. data/ext/asmjit/asmjit/src/asmjit/core/rastack_p.h +171 -0
  110. data/ext/asmjit/asmjit/src/asmjit/core/string.cpp +559 -0
  111. data/ext/asmjit/asmjit/src/asmjit/core/string.h +372 -0
  112. data/ext/asmjit/asmjit/src/asmjit/core/support.cpp +494 -0
  113. data/ext/asmjit/asmjit/src/asmjit/core/support.h +1773 -0
  114. data/ext/asmjit/asmjit/src/asmjit/core/target.cpp +14 -0
  115. data/ext/asmjit/asmjit/src/asmjit/core/target.h +53 -0
  116. data/ext/asmjit/asmjit/src/asmjit/core/type.cpp +74 -0
  117. data/ext/asmjit/asmjit/src/asmjit/core/type.h +419 -0
  118. data/ext/asmjit/asmjit/src/asmjit/core/virtmem.cpp +722 -0
  119. data/ext/asmjit/asmjit/src/asmjit/core/virtmem.h +242 -0
  120. data/ext/asmjit/asmjit/src/asmjit/core/zone.cpp +353 -0
  121. data/ext/asmjit/asmjit/src/asmjit/core/zone.h +615 -0
  122. data/ext/asmjit/asmjit/src/asmjit/core/zonehash.cpp +309 -0
  123. data/ext/asmjit/asmjit/src/asmjit/core/zonehash.h +186 -0
  124. data/ext/asmjit/asmjit/src/asmjit/core/zonelist.cpp +163 -0
  125. data/ext/asmjit/asmjit/src/asmjit/core/zonelist.h +209 -0
  126. data/ext/asmjit/asmjit/src/asmjit/core/zonestack.cpp +176 -0
  127. data/ext/asmjit/asmjit/src/asmjit/core/zonestack.h +239 -0
  128. data/ext/asmjit/asmjit/src/asmjit/core/zonestring.h +120 -0
  129. data/ext/asmjit/asmjit/src/asmjit/core/zonetree.cpp +99 -0
  130. data/ext/asmjit/asmjit/src/asmjit/core/zonetree.h +380 -0
  131. data/ext/asmjit/asmjit/src/asmjit/core/zonevector.cpp +356 -0
  132. data/ext/asmjit/asmjit/src/asmjit/core/zonevector.h +690 -0
  133. data/ext/asmjit/asmjit/src/asmjit/core.h +1861 -0
  134. data/ext/asmjit/asmjit/src/asmjit/x86/x86archtraits_p.h +148 -0
  135. data/ext/asmjit/asmjit/src/asmjit/x86/x86assembler.cpp +5110 -0
  136. data/ext/asmjit/asmjit/src/asmjit/x86/x86assembler.h +685 -0
  137. data/ext/asmjit/asmjit/src/asmjit/x86/x86builder.cpp +52 -0
  138. data/ext/asmjit/asmjit/src/asmjit/x86/x86builder.h +351 -0
  139. data/ext/asmjit/asmjit/src/asmjit/x86/x86compiler.cpp +61 -0
  140. data/ext/asmjit/asmjit/src/asmjit/x86/x86compiler.h +721 -0
  141. data/ext/asmjit/asmjit/src/asmjit/x86/x86emithelper.cpp +619 -0
  142. data/ext/asmjit/asmjit/src/asmjit/x86/x86emithelper_p.h +60 -0
  143. data/ext/asmjit/asmjit/src/asmjit/x86/x86emitter.h +4315 -0
  144. data/ext/asmjit/asmjit/src/asmjit/x86/x86formatter.cpp +944 -0
  145. data/ext/asmjit/asmjit/src/asmjit/x86/x86formatter_p.h +58 -0
  146. data/ext/asmjit/asmjit/src/asmjit/x86/x86func.cpp +503 -0
  147. data/ext/asmjit/asmjit/src/asmjit/x86/x86func_p.h +33 -0
  148. data/ext/asmjit/asmjit/src/asmjit/x86/x86globals.h +2169 -0
  149. data/ext/asmjit/asmjit/src/asmjit/x86/x86instapi.cpp +1732 -0
  150. data/ext/asmjit/asmjit/src/asmjit/x86/x86instapi_p.h +41 -0
  151. data/ext/asmjit/asmjit/src/asmjit/x86/x86instdb.cpp +4427 -0
  152. data/ext/asmjit/asmjit/src/asmjit/x86/x86instdb.h +563 -0
  153. data/ext/asmjit/asmjit/src/asmjit/x86/x86instdb_p.h +311 -0
  154. data/ext/asmjit/asmjit/src/asmjit/x86/x86opcode_p.h +436 -0
  155. data/ext/asmjit/asmjit/src/asmjit/x86/x86operand.cpp +231 -0
  156. data/ext/asmjit/asmjit/src/asmjit/x86/x86operand.h +1085 -0
  157. data/ext/asmjit/asmjit/src/asmjit/x86/x86rapass.cpp +1509 -0
  158. data/ext/asmjit/asmjit/src/asmjit/x86/x86rapass_p.h +94 -0
  159. data/ext/asmjit/asmjit/src/asmjit/x86.h +93 -0
  160. data/ext/asmjit/asmjit/src/asmjit.natvis +245 -0
  161. data/ext/asmjit/asmjit/test/asmjit_test_assembler.cpp +84 -0
  162. data/ext/asmjit/asmjit/test/asmjit_test_assembler.h +85 -0
  163. data/ext/asmjit/asmjit/test/asmjit_test_assembler_a64.cpp +4006 -0
  164. data/ext/asmjit/asmjit/test/asmjit_test_assembler_x64.cpp +17833 -0
  165. data/ext/asmjit/asmjit/test/asmjit_test_assembler_x86.cpp +8300 -0
  166. data/ext/asmjit/asmjit/test/asmjit_test_compiler.cpp +253 -0
  167. data/ext/asmjit/asmjit/test/asmjit_test_compiler.h +73 -0
  168. data/ext/asmjit/asmjit/test/asmjit_test_compiler_a64.cpp +690 -0
  169. data/ext/asmjit/asmjit/test/asmjit_test_compiler_x86.cpp +4317 -0
  170. data/ext/asmjit/asmjit/test/asmjit_test_emitters.cpp +197 -0
  171. data/ext/asmjit/asmjit/test/asmjit_test_instinfo.cpp +181 -0
  172. data/ext/asmjit/asmjit/test/asmjit_test_misc.h +257 -0
  173. data/ext/asmjit/asmjit/test/asmjit_test_perf.cpp +62 -0
  174. data/ext/asmjit/asmjit/test/asmjit_test_perf.h +61 -0
  175. data/ext/asmjit/asmjit/test/asmjit_test_perf_a64.cpp +699 -0
  176. data/ext/asmjit/asmjit/test/asmjit_test_perf_x86.cpp +5032 -0
  177. data/ext/asmjit/asmjit/test/asmjit_test_unit.cpp +172 -0
  178. data/ext/asmjit/asmjit/test/asmjit_test_x86_sections.cpp +172 -0
  179. data/ext/asmjit/asmjit/test/asmjitutils.h +38 -0
  180. data/ext/asmjit/asmjit/test/broken.cpp +312 -0
  181. data/ext/asmjit/asmjit/test/broken.h +148 -0
  182. data/ext/asmjit/asmjit/test/cmdline.h +61 -0
  183. data/ext/asmjit/asmjit/test/performancetimer.h +41 -0
  184. data/ext/asmjit/asmjit/tools/configure-makefiles.sh +13 -0
  185. data/ext/asmjit/asmjit/tools/configure-ninja.sh +13 -0
  186. data/ext/asmjit/asmjit/tools/configure-sanitizers.sh +13 -0
  187. data/ext/asmjit/asmjit/tools/configure-vs2019-x64.bat +2 -0
  188. data/ext/asmjit/asmjit/tools/configure-vs2019-x86.bat +2 -0
  189. data/ext/asmjit/asmjit/tools/configure-vs2022-x64.bat +2 -0
  190. data/ext/asmjit/asmjit/tools/configure-vs2022-x86.bat +2 -0
  191. data/ext/asmjit/asmjit/tools/configure-xcode.sh +8 -0
  192. data/ext/asmjit/asmjit/tools/enumgen.js +417 -0
  193. data/ext/asmjit/asmjit/tools/enumgen.sh +3 -0
  194. data/ext/asmjit/asmjit/tools/tablegen-arm.js +365 -0
  195. data/ext/asmjit/asmjit/tools/tablegen-arm.sh +3 -0
  196. data/ext/asmjit/asmjit/tools/tablegen-x86.js +2638 -0
  197. data/ext/asmjit/asmjit/tools/tablegen-x86.sh +3 -0
  198. data/ext/asmjit/asmjit/tools/tablegen.js +947 -0
  199. data/ext/asmjit/asmjit/tools/tablegen.sh +4 -0
  200. data/ext/asmjit/asmjit.cc +167 -30
  201. data/ext/asmjit/extconf.rb +9 -9
  202. data/lib/asmjit/version.rb +1 -1
  203. data/lib/asmjit.rb +14 -4
  204. metadata +198 -17
@@ -0,0 +1,1204 @@
1
+ // This file is part of AsmJit project <https://asmjit.com>
2
+ //
3
+ // See asmjit.h or LICENSE.md for license and copyright information
4
+ // SPDX-License-Identifier: Zlib
5
+
6
+ #ifndef ASMJIT_CORE_RADEFS_P_H_INCLUDED
7
+ #define ASMJIT_CORE_RADEFS_P_H_INCLUDED
8
+
9
+ #include "../core/api-config.h"
10
+ #include "../core/archtraits.h"
11
+ #include "../core/compilerdefs.h"
12
+ #include "../core/logger.h"
13
+ #include "../core/operand.h"
14
+ #include "../core/support.h"
15
+ #include "../core/type.h"
16
+ #include "../core/zone.h"
17
+ #include "../core/zonevector.h"
18
+
19
+ ASMJIT_BEGIN_NAMESPACE
20
+
21
+ //! \cond INTERNAL
22
+ //! \addtogroup asmjit_ra
23
+ //! \{
24
+
25
+ #ifndef ASMJIT_NO_LOGGING
26
+ # define ASMJIT_RA_LOG_FORMAT(...) \
27
+ do { \
28
+ if (logger) \
29
+ logger->logf(__VA_ARGS__); \
30
+ } while (0)
31
+ # define ASMJIT_RA_LOG_COMPLEX(...) \
32
+ do { \
33
+ if (logger) { \
34
+ __VA_ARGS__ \
35
+ } \
36
+ } while (0)
37
+ #else
38
+ # define ASMJIT_RA_LOG_FORMAT(...) ((void)0)
39
+ # define ASMJIT_RA_LOG_COMPLEX(...) ((void)0)
40
+ #endif
41
+
42
+ class BaseRAPass;
43
+ class RABlock;
44
+ class BaseNode;
45
+ struct RAStackSlot;
46
+
47
+ typedef ZoneVector<RABlock*> RABlocks;
48
+ typedef ZoneVector<RAWorkReg*> RAWorkRegs;
49
+
50
+ //! Maximum number of consecutive registers aggregated from all supported backends.
51
+ static constexpr uint32_t kMaxConsecutiveRegs = 4;
52
+
53
+ //! Provides architecture constraints used by register allocator.
54
+ class RAConstraints {
55
+ public:
56
+ //! \name Members
57
+ //! \{
58
+
59
+ Support::Array<RegMask, Globals::kNumVirtGroups> _availableRegs {};
60
+
61
+ //! \}
62
+
63
+ ASMJIT_NOINLINE Error init(Arch arch) noexcept {
64
+ switch (arch) {
65
+ case Arch::kX86:
66
+ case Arch::kX64: {
67
+ uint32_t registerCount = arch == Arch::kX86 ? 8 : 16;
68
+ _availableRegs[RegGroup::kGp] = Support::lsbMask<RegMask>(registerCount) & ~Support::bitMask(4u);
69
+ _availableRegs[RegGroup::kVec] = Support::lsbMask<RegMask>(registerCount);
70
+ _availableRegs[RegGroup::kExtraVirt2] = Support::lsbMask<RegMask>(8);
71
+ _availableRegs[RegGroup::kExtraVirt3] = Support::lsbMask<RegMask>(8);
72
+ return kErrorOk;
73
+ }
74
+
75
+ case Arch::kAArch64: {
76
+ _availableRegs[RegGroup::kGp] = 0xFFFFFFFFu & ~Support::bitMask(18, 31u);
77
+ _availableRegs[RegGroup::kVec] = 0xFFFFFFFFu;
78
+ _availableRegs[RegGroup::kExtraVirt2] = 0;
79
+ _availableRegs[RegGroup::kExtraVirt3] = 0;
80
+ return kErrorOk;
81
+ }
82
+
83
+ default:
84
+ return DebugUtils::errored(kErrorInvalidArch);
85
+ }
86
+ }
87
+
88
+ inline RegMask availableRegs(RegGroup group) const noexcept { return _availableRegs[group]; }
89
+ };
90
+
91
+ enum class RAStrategyType : uint8_t {
92
+ kSimple = 0,
93
+ kComplex = 1
94
+ };
95
+ ASMJIT_DEFINE_ENUM_COMPARE(RAStrategyType)
96
+
97
+ enum class RAStrategyFlags : uint8_t {
98
+ kNone = 0
99
+ };
100
+ ASMJIT_DEFINE_ENUM_FLAGS(RAStrategyFlags)
101
+
102
+ //! Register allocation strategy.
103
+ //!
104
+ //! The idea is to select the best register allocation strategy for each virtual register group based on the
105
+ //! complexity of the code.
106
+ struct RAStrategy {
107
+ //! \name Members
108
+ //! \{
109
+
110
+ RAStrategyType _type = RAStrategyType::kSimple;
111
+ RAStrategyFlags _flags = RAStrategyFlags::kNone;
112
+
113
+ //! \}
114
+
115
+ //! \name Accessors
116
+ //! \{
117
+
118
+ inline void reset() noexcept {
119
+ _type = RAStrategyType::kSimple;
120
+ _flags = RAStrategyFlags::kNone;
121
+ }
122
+
123
+ inline RAStrategyType type() const noexcept { return _type; }
124
+ inline void setType(RAStrategyType type) noexcept { _type = type; }
125
+
126
+ inline bool isSimple() const noexcept { return _type == RAStrategyType::kSimple; }
127
+ inline bool isComplex() const noexcept { return _type >= RAStrategyType::kComplex; }
128
+
129
+ inline RAStrategyFlags flags() const noexcept { return _flags; }
130
+ inline bool hasFlag(RAStrategyFlags flag) const noexcept { return Support::test(_flags, flag); }
131
+ inline void addFlags(RAStrategyFlags flags) noexcept { _flags |= flags; }
132
+
133
+ //! \}
134
+ };
135
+
136
+ //! Count of virtual or physical registers per group.
137
+ //!
138
+ //! \note This class uses 8-bit integers to represent counters, it's only used in places where this is sufficient,
139
+ //! for example total count of machine's physical registers, count of virtual registers per instruction, etc...
140
+ //! There is also `RALiveCount`, which uses 32-bit integers and is indeed much safer.
141
+ struct RARegCount {
142
+ //! \name Members
143
+ //! \{
144
+
145
+ union {
146
+ uint8_t _regs[4];
147
+ uint32_t _packed;
148
+ };
149
+
150
+ //! \}
151
+
152
+ //! \name Construction & Destruction
153
+ //! \{
154
+
155
+ //! Resets all counters to zero.
156
+ inline void reset() noexcept { _packed = 0; }
157
+
158
+ //! \}
159
+
160
+ //! \name Overloaded Operators
161
+ //! \{
162
+
163
+ inline uint8_t& operator[](RegGroup group) noexcept {
164
+ ASMJIT_ASSERT(group <= RegGroup::kMaxVirt);
165
+ return _regs[size_t(group)];
166
+ }
167
+
168
+ inline const uint8_t& operator[](RegGroup group) const noexcept {
169
+ ASMJIT_ASSERT(group <= RegGroup::kMaxVirt);
170
+ return _regs[size_t(group)];
171
+ }
172
+
173
+ inline bool operator==(const RARegCount& other) const noexcept { return _packed == other._packed; }
174
+ inline bool operator!=(const RARegCount& other) const noexcept { return _packed != other._packed; }
175
+
176
+ //! \}
177
+
178
+ //! \name Accessors
179
+ //! \{
180
+
181
+ //! Returns the count of registers by the given register `group`.
182
+ inline uint32_t get(RegGroup group) const noexcept {
183
+ ASMJIT_ASSERT(group <= RegGroup::kMaxVirt);
184
+
185
+ uint32_t shift = Support::byteShiftOfDWordStruct(uint32_t(group));
186
+ return (_packed >> shift) & uint32_t(0xFF);
187
+ }
188
+
189
+ //! Sets the register count by a register `group`.
190
+ inline void set(RegGroup group, uint32_t n) noexcept {
191
+ ASMJIT_ASSERT(group <= RegGroup::kMaxVirt);
192
+ ASMJIT_ASSERT(n <= 0xFF);
193
+
194
+ uint32_t shift = Support::byteShiftOfDWordStruct(uint32_t(group));
195
+ _packed = (_packed & ~uint32_t(0xFF << shift)) + (n << shift);
196
+ }
197
+
198
+ //! Adds the register count by a register `group`.
199
+ inline void add(RegGroup group, uint32_t n = 1) noexcept {
200
+ ASMJIT_ASSERT(group <= RegGroup::kMaxVirt);
201
+ ASMJIT_ASSERT(0xFF - uint32_t(_regs[size_t(group)]) >= n);
202
+
203
+ uint32_t shift = Support::byteShiftOfDWordStruct(uint32_t(group));
204
+ _packed += n << shift;
205
+ }
206
+
207
+ //! \}
208
+ };
209
+
210
+ //! Provides mapping that can be used to fast index architecture register groups.
211
+ struct RARegIndex : public RARegCount {
212
+ //! Build register indexes based on the given `count` of registers.
213
+ ASMJIT_FORCE_INLINE void buildIndexes(const RARegCount& count) noexcept {
214
+ uint32_t x = uint32_t(count._regs[0]);
215
+ uint32_t y = uint32_t(count._regs[1]) + x;
216
+ uint32_t z = uint32_t(count._regs[2]) + y;
217
+
218
+ ASMJIT_ASSERT(y <= 0xFF);
219
+ ASMJIT_ASSERT(z <= 0xFF);
220
+ _packed = Support::bytepack32_4x8(0, x, y, z);
221
+ }
222
+ };
223
+
224
+ //! Registers mask.
225
+ struct RARegMask {
226
+ //! \name Members
227
+ //! \{
228
+
229
+ Support::Array<RegMask, Globals::kNumVirtGroups> _masks;
230
+
231
+ //! \}
232
+
233
+ //! \name Construction & Destruction
234
+ //! \{
235
+
236
+ inline void init(const RARegMask& other) noexcept { _masks = other._masks; }
237
+ //! Reset all register masks to zero.
238
+ inline void reset() noexcept { _masks.fill(0); }
239
+
240
+ //! \}
241
+
242
+ //! \name Overloaded Operators
243
+ //! \{
244
+
245
+ inline bool operator==(const RARegMask& other) const noexcept { return _masks == other._masks; }
246
+ inline bool operator!=(const RARegMask& other) const noexcept { return _masks != other._masks; }
247
+
248
+ template<typename Index>
249
+ inline uint32_t& operator[](const Index& index) noexcept { return _masks[index]; }
250
+
251
+ template<typename Index>
252
+ inline const uint32_t& operator[](const Index& index) const noexcept { return _masks[index]; }
253
+
254
+ //! \}
255
+
256
+ //! \name Utilities
257
+ //! \{
258
+
259
+ //! Tests whether all register masks are zero (empty).
260
+ inline bool empty() const noexcept {
261
+ return _masks.aggregate<Support::Or>() == 0;
262
+ }
263
+
264
+ inline bool has(RegGroup group, RegMask mask = 0xFFFFFFFFu) const noexcept {
265
+ return (_masks[group] & mask) != 0;
266
+ }
267
+
268
+ template<class Operator>
269
+ inline void op(const RARegMask& other) noexcept {
270
+ _masks.combine<Operator>(other._masks);
271
+ }
272
+
273
+ template<class Operator>
274
+ inline void op(RegGroup group, RegMask mask) noexcept {
275
+ _masks[group] = Operator::op(_masks[group], mask);
276
+ }
277
+
278
+ inline void clear(RegGroup group, RegMask mask) noexcept {
279
+ _masks[group] = _masks[group] & ~mask;
280
+ }
281
+
282
+ //! \}
283
+ };
284
+
285
+ //! Information associated with each instruction, propagated to blocks, loops, and the whole function. This
286
+ //! information can be used to do minor decisions before the register allocator tries to do its job. For
287
+ //! example to use fast register allocation inside a block or loop it cannot have clobbered and/or fixed
288
+ //! registers, etc...
289
+ class RARegsStats {
290
+ public:
291
+ //! \name Constants
292
+ //! \{
293
+
294
+ enum Index : uint32_t {
295
+ kIndexUsed = 0,
296
+ kIndexFixed = 8,
297
+ kIndexClobbered = 16
298
+ };
299
+
300
+ enum Mask : uint32_t {
301
+ kMaskUsed = 0xFFu << kIndexUsed,
302
+ kMaskFixed = 0xFFu << kIndexFixed,
303
+ kMaskClobbered = 0xFFu << kIndexClobbered
304
+ };
305
+
306
+ //! \}
307
+
308
+ //! \name Members
309
+ //! \{
310
+
311
+ uint32_t _packed = 0;
312
+
313
+ //! \}
314
+
315
+ //! \name Accessors
316
+ //! \{
317
+
318
+ inline void reset() noexcept { _packed = 0; }
319
+ inline void combineWith(const RARegsStats& other) noexcept { _packed |= other._packed; }
320
+
321
+ inline bool hasUsed() const noexcept { return (_packed & kMaskUsed) != 0u; }
322
+ inline bool hasUsed(RegGroup group) const noexcept { return (_packed & Support::bitMask(kIndexUsed + uint32_t(group))) != 0u; }
323
+ inline void makeUsed(RegGroup group) noexcept { _packed |= Support::bitMask(kIndexUsed + uint32_t(group)); }
324
+
325
+ inline bool hasFixed() const noexcept { return (_packed & kMaskFixed) != 0u; }
326
+ inline bool hasFixed(RegGroup group) const noexcept { return (_packed & Support::bitMask(kIndexFixed + uint32_t(group))) != 0u; }
327
+ inline void makeFixed(RegGroup group) noexcept { _packed |= Support::bitMask(kIndexFixed + uint32_t(group)); }
328
+
329
+ inline bool hasClobbered() const noexcept { return (_packed & kMaskClobbered) != 0u; }
330
+ inline bool hasClobbered(RegGroup group) const noexcept { return (_packed & Support::bitMask(kIndexClobbered + uint32_t(group))) != 0u; }
331
+ inline void makeClobbered(RegGroup group) noexcept { _packed |= Support::bitMask(kIndexClobbered + uint32_t(group)); }
332
+
333
+ //! \}
334
+ };
335
+
336
+ //! Count of live registers, per group.
337
+ class RALiveCount {
338
+ public:
339
+ //! \name Members
340
+ //! \{
341
+
342
+ Support::Array<uint32_t, Globals::kNumVirtGroups> n {};
343
+
344
+ //! \}
345
+
346
+ //! \name Construction & Destruction
347
+ //! \{
348
+
349
+ inline RALiveCount() noexcept = default;
350
+ inline RALiveCount(const RALiveCount& other) noexcept = default;
351
+
352
+ inline void init(const RALiveCount& other) noexcept { n = other.n; }
353
+ inline void reset() noexcept { n.fill(0); }
354
+
355
+ //! \}
356
+
357
+ //! \name Overloaded Operators
358
+ //! \{
359
+
360
+ inline RALiveCount& operator=(const RALiveCount& other) noexcept = default;
361
+
362
+ inline uint32_t& operator[](RegGroup group) noexcept { return n[group]; }
363
+ inline const uint32_t& operator[](RegGroup group) const noexcept { return n[group]; }
364
+
365
+ //! \}
366
+
367
+ //! \name Utilities
368
+ //! \{
369
+
370
+ template<class Operator>
371
+ inline void op(const RALiveCount& other) noexcept { n.combine<Operator>(other.n); }
372
+
373
+ //! \}
374
+ };
375
+
376
+ struct RALiveInterval {
377
+ //! \name Constants
378
+ //! \{
379
+
380
+ enum : uint32_t {
381
+ kNaN = 0,
382
+ kInf = 0xFFFFFFFFu
383
+ };
384
+
385
+ //! \}
386
+
387
+ //! \name Members
388
+ //! \{
389
+
390
+ uint32_t a, b;
391
+
392
+ //! \}
393
+
394
+ //! \name Construction & Destruction
395
+ //! \{
396
+
397
+ inline RALiveInterval() noexcept : a(0), b(0) {}
398
+ inline RALiveInterval(uint32_t a, uint32_t b) noexcept : a(a), b(b) {}
399
+ inline RALiveInterval(const RALiveInterval& other) noexcept : a(other.a), b(other.b) {}
400
+
401
+ inline void init(uint32_t aVal, uint32_t bVal) noexcept {
402
+ a = aVal;
403
+ b = bVal;
404
+ }
405
+ inline void init(const RALiveInterval& other) noexcept { init(other.a, other.b); }
406
+ inline void reset() noexcept { init(0, 0); }
407
+
408
+ //! \}
409
+
410
+ //! \name Overloaded Operators
411
+ //! \{
412
+
413
+ inline RALiveInterval& operator=(const RALiveInterval& other) = default;
414
+
415
+ //! \}
416
+
417
+ //! \name Accessors
418
+ //! \{
419
+
420
+ inline bool isValid() const noexcept { return a < b; }
421
+ inline uint32_t width() const noexcept { return b - a; }
422
+
423
+ //! \}
424
+ };
425
+
426
+ //! Live span with payload of type `T`.
427
+ template<typename T>
428
+ class RALiveSpan : public RALiveInterval, public T {
429
+ public:
430
+ //! \name Types
431
+ //! \{
432
+
433
+ typedef T DataType;
434
+
435
+ //! \}
436
+
437
+ //! \name Construction & Destruction
438
+ //! \{
439
+
440
+ inline RALiveSpan() noexcept : RALiveInterval(), T() {}
441
+ inline RALiveSpan(const RALiveSpan<T>& other) noexcept : RALiveInterval(other), T() {}
442
+ inline RALiveSpan(const RALiveInterval& interval, const T& data) noexcept : RALiveInterval(interval), T(data) {}
443
+ inline RALiveSpan(uint32_t a, uint32_t b) noexcept : RALiveInterval(a, b), T() {}
444
+ inline RALiveSpan(uint32_t a, uint32_t b, const T& data) noexcept : RALiveInterval(a, b), T(data) {}
445
+
446
+ inline void init(const RALiveSpan<T>& other) noexcept {
447
+ RALiveInterval::init(static_cast<const RALiveInterval&>(other));
448
+ T::init(static_cast<const T&>(other));
449
+ }
450
+
451
+ inline void init(const RALiveSpan<T>& span, const T& data) noexcept {
452
+ RALiveInterval::init(static_cast<const RALiveInterval&>(span));
453
+ T::init(data);
454
+ }
455
+
456
+ inline void init(const RALiveInterval& interval, const T& data) noexcept {
457
+ RALiveInterval::init(interval);
458
+ T::init(data);
459
+ }
460
+
461
+ //! \}
462
+
463
+ //! \name Overloaded Operators
464
+ //! \{
465
+
466
+ inline RALiveSpan& operator=(const RALiveSpan& other) {
467
+ init(other);
468
+ return *this;
469
+ }
470
+
471
+ //! \}
472
+ };
473
+
474
+ //! Vector of `RALiveSpan<T>` with additional convenience API.
475
+ template<typename T>
476
+ class RALiveSpans {
477
+ public:
478
+ ASMJIT_NONCOPYABLE(RALiveSpans)
479
+
480
+ typedef typename T::DataType DataType;
481
+ ZoneVector<T> _data;
482
+
483
+ //! \name Construction & Destruction
484
+ //! \{
485
+
486
+ inline RALiveSpans() noexcept : _data() {}
487
+
488
+ inline void reset() noexcept { _data.reset(); }
489
+ inline void release(ZoneAllocator* allocator) noexcept { _data.release(allocator); }
490
+
491
+ //! \}
492
+
493
+ //! \name Accessors
494
+ //! \{
495
+
496
+ inline bool empty() const noexcept { return _data.empty(); }
497
+ inline uint32_t size() const noexcept { return _data.size(); }
498
+
499
+ inline T* data() noexcept { return _data.data(); }
500
+ inline const T* data() const noexcept { return _data.data(); }
501
+
502
+ inline bool isOpen() const noexcept {
503
+ uint32_t size = _data.size();
504
+ return size > 0 && _data[size - 1].b == RALiveInterval::kInf;
505
+ }
506
+
507
+ //! \}
508
+
509
+ //! \name Utilities
510
+ //! \{
511
+
512
+ inline void swap(RALiveSpans<T>& other) noexcept { _data.swap(other._data); }
513
+
514
+ //! Open the current live span.
515
+ ASMJIT_FORCE_INLINE Error openAt(ZoneAllocator* allocator, uint32_t start, uint32_t end) noexcept {
516
+ bool wasOpen;
517
+ return openAt(allocator, start, end, wasOpen);
518
+ }
519
+
520
+ ASMJIT_FORCE_INLINE Error openAt(ZoneAllocator* allocator, uint32_t start, uint32_t end, bool& wasOpen) noexcept {
521
+ uint32_t size = _data.size();
522
+ wasOpen = false;
523
+
524
+ if (size > 0) {
525
+ T& last = _data[size - 1];
526
+ if (last.b >= start) {
527
+ wasOpen = last.b > start;
528
+ last.b = end;
529
+ return kErrorOk;
530
+ }
531
+ }
532
+
533
+ return _data.append(allocator, T(start, end));
534
+ }
535
+
536
+ ASMJIT_FORCE_INLINE void closeAt(uint32_t end) noexcept {
537
+ ASMJIT_ASSERT(!empty());
538
+
539
+ uint32_t size = _data.size();
540
+ _data[size - 1].b = end;
541
+ }
542
+
543
+ //! Returns the sum of width of all spans.
544
+ //!
545
+ //! \note Don't overuse, this iterates over all spans so it's O(N). It should be only called once and then cached.
546
+ inline uint32_t width() const noexcept {
547
+ uint32_t width = 0;
548
+ for (const T& span : _data)
549
+ width += span.width();
550
+ return width;
551
+ }
552
+
553
+ inline T& operator[](uint32_t index) noexcept { return _data[index]; }
554
+ inline const T& operator[](uint32_t index) const noexcept { return _data[index]; }
555
+
556
+ inline bool intersects(const RALiveSpans<T>& other) const noexcept {
557
+ return intersects(*this, other);
558
+ }
559
+
560
+ ASMJIT_FORCE_INLINE Error nonOverlappingUnionOf(ZoneAllocator* allocator, const RALiveSpans<T>& x, const RALiveSpans<T>& y, const DataType& yData) noexcept {
561
+ uint32_t finalSize = x.size() + y.size();
562
+ ASMJIT_PROPAGATE(_data.reserve(allocator, finalSize));
563
+
564
+ T* dstPtr = _data.data();
565
+ const T* xSpan = x.data();
566
+ const T* ySpan = y.data();
567
+
568
+ const T* xEnd = xSpan + x.size();
569
+ const T* yEnd = ySpan + y.size();
570
+
571
+ // Loop until we have intersection or either `xSpan == xEnd` or `ySpan == yEnd`, which means that there is no
572
+ // intersection. We advance either `xSpan` or `ySpan` depending on their ranges.
573
+ if (xSpan != xEnd && ySpan != yEnd) {
574
+ uint32_t xa, ya;
575
+ xa = xSpan->a;
576
+ for (;;) {
577
+ while (ySpan->b <= xa) {
578
+ dstPtr->init(*ySpan, yData);
579
+ dstPtr++;
580
+ if (++ySpan == yEnd)
581
+ goto Done;
582
+ }
583
+
584
+ ya = ySpan->a;
585
+ while (xSpan->b <= ya) {
586
+ *dstPtr++ = *xSpan;
587
+ if (++xSpan == xEnd)
588
+ goto Done;
589
+ }
590
+
591
+ // We know that `xSpan->b > ySpan->a`, so check if `ySpan->b > xSpan->a`.
592
+ xa = xSpan->a;
593
+ if (ySpan->b > xa)
594
+ return 0xFFFFFFFFu;
595
+ }
596
+ }
597
+
598
+ Done:
599
+ while (xSpan != xEnd) {
600
+ *dstPtr++ = *xSpan++;
601
+ }
602
+
603
+ while (ySpan != yEnd) {
604
+ dstPtr->init(*ySpan, yData);
605
+ dstPtr++;
606
+ ySpan++;
607
+ }
608
+
609
+ _data._setEndPtr(dstPtr);
610
+ return kErrorOk;
611
+ }
612
+
613
+ static ASMJIT_FORCE_INLINE bool intersects(const RALiveSpans<T>& x, const RALiveSpans<T>& y) noexcept {
614
+ const T* xSpan = x.data();
615
+ const T* ySpan = y.data();
616
+
617
+ const T* xEnd = xSpan + x.size();
618
+ const T* yEnd = ySpan + y.size();
619
+
620
+ // Loop until we have intersection or either `xSpan == xEnd` or `ySpan == yEnd`, which means that there is no
621
+ // intersection. We advance either `xSpan` or `ySpan` depending on their end positions.
622
+ if (xSpan == xEnd || ySpan == yEnd)
623
+ return false;
624
+
625
+ uint32_t xa, ya;
626
+ xa = xSpan->a;
627
+
628
+ for (;;) {
629
+ while (ySpan->b <= xa)
630
+ if (++ySpan == yEnd)
631
+ return false;
632
+
633
+ ya = ySpan->a;
634
+ while (xSpan->b <= ya)
635
+ if (++xSpan == xEnd)
636
+ return false;
637
+
638
+ // We know that `xSpan->b > ySpan->a`, so check if `ySpan->b > xSpan->a`.
639
+ xa = xSpan->a;
640
+ if (ySpan->b > xa)
641
+ return true;
642
+ }
643
+ }
644
+
645
+ //! \}
646
+ };
647
+
648
+ //! Statistics about a register liveness.
649
+ class RALiveStats {
650
+ public:
651
+ uint32_t _width = 0;
652
+ float _freq = 0.0f;
653
+ float _priority = 0.0f;
654
+
655
+ //! \name Accessors
656
+ //! \{
657
+
658
+ inline uint32_t width() const noexcept { return _width; }
659
+ inline float freq() const noexcept { return _freq; }
660
+ inline float priority() const noexcept { return _priority; }
661
+
662
+ //! \}
663
+ };
664
+
665
+ struct LiveRegData {
666
+ uint32_t id;
667
+
668
+ inline explicit LiveRegData(uint32_t id = BaseReg::kIdBad) noexcept : id(id) {}
669
+ inline LiveRegData(const LiveRegData& other) noexcept : id(other.id) {}
670
+
671
+ inline void init(const LiveRegData& other) noexcept { id = other.id; }
672
+
673
+ inline bool operator==(const LiveRegData& other) const noexcept { return id == other.id; }
674
+ inline bool operator!=(const LiveRegData& other) const noexcept { return id != other.id; }
675
+ };
676
+
677
+ typedef RALiveSpan<LiveRegData> LiveRegSpan;
678
+ typedef RALiveSpans<LiveRegSpan> LiveRegSpans;
679
+
680
+ //! Flags used by \ref RATiedReg.
681
+ //!
682
+ //! Register access information is encoded in 4 flags in total:
683
+ //!
684
+ //! - `kRead` - Register is Read (ReadWrite if combined with `kWrite`).
685
+ //! - `kWrite` - Register is Written (ReadWrite if combined with `kRead`).
686
+ //! - `kUse` - Encoded as Read or ReadWrite.
687
+ //! - `kOut` - Encoded as WriteOnly.
688
+ //!
689
+ //! Let's describe all of these on two X86 instructions:
690
+ //!
691
+ //! - ADD x{R|W|Use}, x{R|Use} -> {x:R|W|Use }
692
+ //! - LEA x{ W|Out}, [x{R|Use} + x{R|Out}] -> {x:R|W|Use|Out }
693
+ //! - ADD x{R|W|Use}, y{R|Use} -> {x:R|W|Use y:R|Use}
694
+ //! - LEA x{ W|Out}, [x{R|Use} + y{R|Out}] -> {x:R|W|Use|Out y:R|Use}
695
+ //!
696
+ //! It should be obvious from the example above how these flags get created. Each operand contains READ/WRITE
697
+ //! information, which is then merged to RATiedReg's flags. However, we also need to represent the possitility
698
+ //! to view the operation as two independent operations - USE and OUT, because the register allocator first
699
+ //! allocates USE registers, and then assigns OUT registers independently of USE registers.
700
+ enum class RATiedFlags : uint32_t {
701
+ //! No flags.
702
+ kNone = 0,
703
+
704
+ // Access Flags
705
+ // ------------
706
+
707
+ //! Register is read.
708
+ kRead = uint32_t(OpRWFlags::kRead),
709
+ //! Register is written.
710
+ kWrite = uint32_t(OpRWFlags::kWrite),
711
+ //! Register both read and written.
712
+ kRW = uint32_t(OpRWFlags::kRW),
713
+
714
+ // Use / Out Flags
715
+ // ---------------
716
+
717
+ //! Register has a USE slot (read/rw).
718
+ kUse = 0x00000004u,
719
+ //! Register has an OUT slot (write-only).
720
+ kOut = 0x00000008u,
721
+ //! Register in USE slot can be patched to memory.
722
+ kUseRM = 0x00000010u,
723
+ //! Register in OUT slot can be patched to memory.
724
+ kOutRM = 0x00000020u,
725
+
726
+ //! Register has a fixed USE slot.
727
+ kUseFixed = 0x00000040u,
728
+ //! Register has a fixed OUT slot.
729
+ kOutFixed = 0x00000080u,
730
+ //! Register USE slot has been allocated.
731
+ kUseDone = 0x00000100u,
732
+ //! Register OUT slot has been allocated.
733
+ kOutDone = 0x00000200u,
734
+
735
+ // Consecutive Flags / Data
736
+ // ------------------------
737
+
738
+ kUseConsecutive = 0x00000400u,
739
+ kOutConsecutive = 0x00000800u,
740
+ kLeadConsecutive = 0x00001000u,
741
+ kConsecutiveData = 0x00006000u,
742
+
743
+ // Liveness Flags
744
+ // --------------
745
+
746
+ //! Register must be duplicated (function call only).
747
+ kDuplicate = 0x00010000u,
748
+ //! Last occurrence of this VirtReg in basic block.
749
+ kLast = 0x00020000u,
750
+ //! Kill this VirtReg after use.
751
+ kKill = 0x00040000u,
752
+
753
+ // X86 Specific Flags
754
+ // ------------------
755
+
756
+ // Architecture specific flags are used during RATiedReg building to ensure that architecture-specific constraints
757
+ // are handled properly. These flags are not really needed after RATiedReg[] is built and copied to `RAInst`.
758
+
759
+ //! This RATiedReg references GPB-LO or GPB-HI.
760
+ kX86_Gpb = 0x01000000u,
761
+
762
+ // Instruction Flags (Never used by RATiedReg)
763
+ // -------------------------------------------
764
+
765
+ //! Instruction is transformable to another instruction if necessary.
766
+ //!
767
+ //! This is flag that is only used by \ref RAInst to inform register allocator that the instruction has some
768
+ //! constraints that can only be solved by transforming the instruction into another instruction, most likely
769
+ //! by changing its InstId.
770
+ kInst_IsTransformable = 0x80000000u
771
+ };
772
+ ASMJIT_DEFINE_ENUM_FLAGS(RATiedFlags)
773
+
774
+ static_assert(uint32_t(RATiedFlags::kRead ) == 0x1, "RATiedFlags::kRead must be 0x1");
775
+ static_assert(uint32_t(RATiedFlags::kWrite) == 0x2, "RATiedFlags::kWrite must be 0x2");
776
+ static_assert(uint32_t(RATiedFlags::kRW ) == 0x3, "RATiedFlags::kRW must be 0x3");
777
+
778
+ //! Tied register merges one ore more register operand into a single entity. It contains information about its access
779
+ //! (Read|Write) and allocation slots (Use|Out) that are used by the register allocator and liveness analysis.
780
+ struct RATiedReg {
781
+ //! \name Members
782
+ //! \{
783
+
784
+ //! WorkReg id.
785
+ uint32_t _workId;
786
+ //! WorkReg id that is an immediate consecutive parent of this register, or Globals::kInvalidId if it has no parent.
787
+ uint32_t _consecutiveParent;
788
+ //! Allocation flags.
789
+ RATiedFlags _flags;
790
+
791
+ union {
792
+ struct {
793
+ //! How many times the VirtReg is referenced in all operands.
794
+ uint8_t _refCount;
795
+ //! Size of a memory operand in case that it's use instead of the register.
796
+ uint8_t _rmSize;
797
+ //! Physical register for use operation (ReadOnly / ReadWrite).
798
+ uint8_t _useId;
799
+ //! Physical register for out operation (WriteOnly).
800
+ uint8_t _outId;
801
+ };
802
+ //! Packed data.
803
+ uint32_t _packed;
804
+ };
805
+
806
+ //! Registers where inputs {R|X} can be allocated to.
807
+ RegMask _useRegMask;
808
+ //! Registers where outputs {W} can be allocated to.
809
+ RegMask _outRegMask;
810
+ //! Indexes used to rewrite USE regs.
811
+ uint32_t _useRewriteMask;
812
+ //! Indexes used to rewrite OUT regs.
813
+ uint32_t _outRewriteMask;
814
+
815
+ //! \}
816
+
817
+ //! \name Statics
818
+ //! \{
819
+
820
+ static inline RATiedFlags consecutiveDataToFlags(uint32_t offset) noexcept {
821
+ ASMJIT_ASSERT(offset < 4);
822
+ constexpr uint32_t kOffsetShift = Support::ConstCTZ<uint32_t(RATiedFlags::kConsecutiveData)>::value;
823
+ return (RATiedFlags)(offset << kOffsetShift);
824
+ }
825
+
826
+ static inline uint32_t consecutiveDataFromFlags(RATiedFlags flags) noexcept {
827
+ constexpr uint32_t kOffsetShift = Support::ConstCTZ<uint32_t(RATiedFlags::kConsecutiveData)>::value;
828
+ return uint32_t(flags & RATiedFlags::kConsecutiveData) >> kOffsetShift;
829
+ }
830
+
831
+ //! \}
832
+
833
+ //! \name Construction & Destruction
834
+ //! \{
835
+
836
+ inline void init(uint32_t workId, RATiedFlags flags, RegMask useRegMask, uint32_t useId, uint32_t useRewriteMask, RegMask outRegMask, uint32_t outId, uint32_t outRewriteMask, uint32_t rmSize = 0, uint32_t consecutiveParent = Globals::kInvalidId) noexcept {
837
+ _workId = workId;
838
+ _consecutiveParent = consecutiveParent;
839
+ _flags = flags;
840
+ _refCount = 1;
841
+ _rmSize = uint8_t(rmSize);
842
+ _useId = uint8_t(useId);
843
+ _outId = uint8_t(outId);
844
+ _useRegMask = useRegMask;
845
+ _outRegMask = outRegMask;
846
+ _useRewriteMask = useRewriteMask;
847
+ _outRewriteMask = outRewriteMask;
848
+ }
849
+
850
+ //! \}
851
+
852
+ //! \name Accessors
853
+ //! \{
854
+
855
+ //! Returns the associated WorkReg id.
856
+ inline uint32_t workId() const noexcept { return _workId; }
857
+
858
+ inline bool hasConsecutiveParent() const noexcept { return _consecutiveParent != Globals::kInvalidId; }
859
+ inline uint32_t consecutiveParent() const noexcept { return _consecutiveParent; }
860
+ inline uint32_t consecutiveData() const noexcept { return consecutiveDataFromFlags(_flags); }
861
+
862
+ //! Returns TiedReg flags.
863
+ inline RATiedFlags flags() const noexcept { return _flags; }
864
+ //! Checks if the given `flag` is set.
865
+ inline bool hasFlag(RATiedFlags flag) const noexcept { return Support::test(_flags, flag); }
866
+ //! Adds tied register flags.
867
+ inline void addFlags(RATiedFlags flags) noexcept { _flags |= flags; }
868
+
869
+ //! Tests whether the register is read (writes `true` also if it's Read/Write).
870
+ inline bool isRead() const noexcept { return hasFlag(RATiedFlags::kRead); }
871
+ //! Tests whether the register is written (writes `true` also if it's Read/Write).
872
+ inline bool isWrite() const noexcept { return hasFlag(RATiedFlags::kWrite); }
873
+ //! Tests whether the register is read only.
874
+ inline bool isReadOnly() const noexcept { return (_flags & RATiedFlags::kRW) == RATiedFlags::kRead; }
875
+ //! Tests whether the register is write only.
876
+ inline bool isWriteOnly() const noexcept { return (_flags & RATiedFlags::kRW) == RATiedFlags::kWrite; }
877
+ //! Tests whether the register is read and written.
878
+ inline bool isReadWrite() const noexcept { return (_flags & RATiedFlags::kRW) == RATiedFlags::kRW; }
879
+
880
+ //! Tests whether the tied register has use operand (Read/ReadWrite).
881
+ inline bool isUse() const noexcept { return hasFlag(RATiedFlags::kUse); }
882
+ //! Tests whether the tied register has out operand (Write).
883
+ inline bool isOut() const noexcept { return hasFlag(RATiedFlags::kOut); }
884
+
885
+ //! Tests whether the tied register has \ref RATiedFlags::kLeadConsecutive flag set.
886
+ inline bool isLeadConsecutive() const noexcept { return hasFlag(RATiedFlags::kLeadConsecutive); }
887
+ //! Tests whether the tied register has \ref RATiedFlags::kUseConsecutive flag set.
888
+ inline bool isUseConsecutive() const noexcept { return hasFlag(RATiedFlags::kUseConsecutive); }
889
+ //! Tests whether the tied register has \ref RATiedFlags::kOutConsecutive flag set.
890
+ inline bool isOutConsecutive() const noexcept { return hasFlag(RATiedFlags::kOutConsecutive); }
891
+
892
+ //! Tests whether the tied register has any consecutive flag.
893
+ inline bool hasAnyConsecutiveFlag() const noexcept { return hasFlag(RATiedFlags::kLeadConsecutive | RATiedFlags::kUseConsecutive | RATiedFlags::kOutConsecutive); }
894
+
895
+ //! Tests whether the USE slot can be patched to memory operand.
896
+ inline bool hasUseRM() const noexcept { return hasFlag(RATiedFlags::kUseRM); }
897
+ //! Tests whether the OUT slot can be patched to memory operand.
898
+ inline bool hasOutRM() const noexcept { return hasFlag(RATiedFlags::kOutRM); }
899
+
900
+ inline uint32_t rmSize() const noexcept { return _rmSize; }
901
+
902
+ inline void makeReadOnly() noexcept {
903
+ _flags = (_flags & ~(RATiedFlags::kOut | RATiedFlags::kWrite)) | RATiedFlags::kUse;
904
+ _useRewriteMask |= _outRewriteMask;
905
+ _outRewriteMask = 0;
906
+ }
907
+
908
+ inline void makeWriteOnly() noexcept {
909
+ _flags = (_flags & ~(RATiedFlags::kUse | RATiedFlags::kRead)) | RATiedFlags::kOut;
910
+ _outRewriteMask |= _useRewriteMask;
911
+ _useRewriteMask = 0;
912
+ }
913
+
914
+ //! Tests whether the register would duplicate.
915
+ inline bool isDuplicate() const noexcept { return hasFlag(RATiedFlags::kDuplicate); }
916
+
917
+ //! Tests whether the register (and the instruction it's part of) appears last in the basic block.
918
+ inline bool isLast() const noexcept { return hasFlag(RATiedFlags::kLast); }
919
+ //! Tests whether the register should be killed after USEd and/or OUTed.
920
+ inline bool isKill() const noexcept { return hasFlag(RATiedFlags::kKill); }
921
+
922
+ //! Tests whether the register is OUT or KILL (used internally by local register allocator).
923
+ inline bool isOutOrKill() const noexcept { return hasFlag(RATiedFlags::kOut | RATiedFlags::kKill); }
924
+
925
+ //! Returns a register mask that describes allocable USE registers (Read/ReadWrite access).
926
+ inline RegMask useRegMask() const noexcept { return _useRegMask; }
927
+ //! Returns a register mask that describes allocable OUT registers (WriteOnly access).
928
+ inline RegMask outRegMask() const noexcept { return _outRegMask; }
929
+
930
+ inline uint32_t refCount() const noexcept { return _refCount; }
931
+ inline void addRefCount(uint32_t n = 1) noexcept { _refCount = uint8_t(_refCount + n); }
932
+
933
+ //! Tests whether the register must be allocated to a fixed physical register before it's used.
934
+ inline bool hasUseId() const noexcept { return _useId != BaseReg::kIdBad; }
935
+ //! Tests whether the register must be allocated to a fixed physical register before it's written.
936
+ inline bool hasOutId() const noexcept { return _outId != BaseReg::kIdBad; }
937
+
938
+ //! Returns a physical register id used for 'use' operation.
939
+ inline uint32_t useId() const noexcept { return _useId; }
940
+ //! Returns a physical register id used for 'out' operation.
941
+ inline uint32_t outId() const noexcept { return _outId; }
942
+
943
+ inline uint32_t useRewriteMask() const noexcept { return _useRewriteMask; }
944
+ inline uint32_t outRewriteMask() const noexcept { return _outRewriteMask; }
945
+
946
+ //! Sets a physical register used for 'use' operation.
947
+ inline void setUseId(uint32_t index) noexcept { _useId = uint8_t(index); }
948
+ //! Sets a physical register used for 'out' operation.
949
+ inline void setOutId(uint32_t index) noexcept { _outId = uint8_t(index); }
950
+
951
+ inline bool isUseDone() const noexcept { return hasFlag(RATiedFlags::kUseDone); }
952
+ inline bool isOutDone() const noexcept { return hasFlag(RATiedFlags::kUseDone); }
953
+
954
+ inline void markUseDone() noexcept { addFlags(RATiedFlags::kUseDone); }
955
+ inline void markOutDone() noexcept { addFlags(RATiedFlags::kUseDone); }
956
+
957
+ //! \}
958
+ };
959
+
960
+ //! Flags used by \ref RAWorkReg.
961
+ enum class RAWorkRegFlags : uint32_t {
962
+ //! No flags.
963
+ kNone = 0,
964
+
965
+ //! This register has already been allocated.
966
+ kAllocated = 0x00000001u,
967
+ //! Has been coalesced to another WorkReg.
968
+ kCoalesced = 0x00000002u,
969
+
970
+ //! Set when this register is used as a LEAD consecutive register at least once.
971
+ kLeadConsecutive = 0x00000004u,
972
+ //! Used to mark consecutive registers during processing.
973
+ kProcessedConsecutive = 0x00000008u,
974
+
975
+ //! Stack slot has to be allocated.
976
+ kStackUsed = 0x00000010u,
977
+ //! Stack allocation is preferred.
978
+ kStackPreferred = 0x00000020u,
979
+ //! Marked for stack argument reassignment.
980
+ kStackArgToStack = 0x00000040u
981
+ };
982
+ ASMJIT_DEFINE_ENUM_FLAGS(RAWorkRegFlags)
983
+
984
+ //! Work register provides additional data of \ref VirtReg that is used by register allocator.
985
+ //!
986
+ //! In general when a virtual register is found by register allocator it maps it to \ref RAWorkReg
987
+ //! and then only works with it. The reason for such mapping is that users can create many virtual
988
+ //! registers, which are not used inside a register allocation scope (which is currently always a
989
+ //! function). So register allocator basically scans the function for virtual registers and maps
990
+ //! them into WorkRegs, which receive a temporary ID (workId), which starts from zero. This WorkId
991
+ //! is then used in bit-arrays and other mappings.
992
+ class RAWorkReg {
993
+ public:
994
+ ASMJIT_NONCOPYABLE(RAWorkReg)
995
+
996
+ //! \name Constants
997
+ //! \{
998
+
999
+ enum : uint32_t {
1000
+ kIdNone = 0xFFFFFFFFu
1001
+ };
1002
+
1003
+ enum : uint32_t {
1004
+ kNoArgIndex = 0xFFu
1005
+ };
1006
+
1007
+ //! \}
1008
+
1009
+ //! \name Members
1010
+ //! \{
1011
+
1012
+ //! RAPass specific ID used during analysis and allocation.
1013
+ uint32_t _workId = 0;
1014
+ //! Copy of ID used by \ref VirtReg.
1015
+ uint32_t _virtId = 0;
1016
+
1017
+ //! Permanent association with \ref VirtReg.
1018
+ VirtReg* _virtReg = nullptr;
1019
+ //! Temporary association with \ref RATiedReg.
1020
+ RATiedReg* _tiedReg = nullptr;
1021
+ //! Stack slot associated with the register.
1022
+ RAStackSlot* _stackSlot = nullptr;
1023
+
1024
+ //! Copy of a signature used by \ref VirtReg.
1025
+ OperandSignature _signature {};
1026
+ //! RAPass specific flags used during analysis and allocation.
1027
+ RAWorkRegFlags _flags = RAWorkRegFlags::kNone;
1028
+
1029
+ //! Constains all USE ids collected from all instructions.
1030
+ //!
1031
+ //! If this mask is non-zero and not a power of two, it means that the register is used multiple times in
1032
+ //! instructions where it requires to have a different use ID. This means that in general it's not possible
1033
+ //! to keep this register in a single home.
1034
+ RegMask _useIdMask = 0;
1035
+ //! Preferred mask of registers (if non-zero) to allocate this register to.
1036
+ //!
1037
+ //! If this mask is zero it means that either there is no intersection of preferred registers collected from all
1038
+ //! TiedRegs or there is no preference at all (the register can be allocated to any register all the time).
1039
+ RegMask _preferredMask = 0xFFFFFFFFu;
1040
+ //! Consecutive mask, which was collected from all instructions where this register was used as a lead consecutive
1041
+ //! register.
1042
+ RegMask _consecutiveMask = 0xFFFFFFFFu;
1043
+ //! IDs of all physical registers that are clobbered during the lifetime of this WorkReg.
1044
+ //!
1045
+ //! This mask should be updated by `RAPass::buildLiveness()`, because it's global and should
1046
+ //! be updated after unreachable code has been removed.
1047
+ RegMask _clobberSurvivalMask = 0;
1048
+ //! IDs of all physical registers this WorkReg has been allocated to.
1049
+ RegMask _allocatedMask = 0;
1050
+
1051
+ //! A byte-mask where each bit represents one valid byte of the register.
1052
+ uint64_t _regByteMask = 0;
1053
+
1054
+ //! Argument index (or `kNoArgIndex` if none).
1055
+ uint8_t _argIndex = kNoArgIndex;
1056
+ //! Argument value index in the pack (0 by default).
1057
+ uint8_t _argValueIndex = 0;
1058
+ //! Global home register ID (if any, assigned by RA).
1059
+ uint8_t _homeRegId = BaseReg::kIdBad;
1060
+ //! Global hint register ID (provided by RA or user).
1061
+ uint8_t _hintRegId = BaseReg::kIdBad;
1062
+
1063
+ //! Live spans of the `VirtReg`.
1064
+ LiveRegSpans _liveSpans {};
1065
+ //! Live statistics.
1066
+ RALiveStats _liveStats {};
1067
+
1068
+ //! All nodes that read/write this VirtReg/WorkReg.
1069
+ ZoneVector<BaseNode*> _refs {};
1070
+ //! All nodes that write to this VirtReg/WorkReg.
1071
+ ZoneVector<BaseNode*> _writes {};
1072
+
1073
+ //! Contains work IDs of all immediate consecutive registers of this register.
1074
+ //!
1075
+ //! \note This bit array only contains immediate consecutives. This means that if this is a register that is
1076
+ //! followed by 3 more registers, then it would still have only a single immediate. The rest registers would
1077
+ //! have immediate consecutive registers as well, except the last one.
1078
+ ZoneBitVector _immediateConsecutives {};
1079
+
1080
+ //! \}
1081
+
1082
+ //! \name Construction & Destruction
1083
+ //! \{
1084
+
1085
+ inline RAWorkReg(VirtReg* vReg, uint32_t workId) noexcept
1086
+ : _workId(workId),
1087
+ _virtId(vReg->id()),
1088
+ _virtReg(vReg),
1089
+ _signature(vReg->signature()) {}
1090
+
1091
+ //! \}
1092
+
1093
+ //! \name Accessors
1094
+ //! \{
1095
+
1096
+ inline uint32_t workId() const noexcept { return _workId; }
1097
+ inline uint32_t virtId() const noexcept { return _virtId; }
1098
+
1099
+ inline const char* name() const noexcept { return _virtReg->name(); }
1100
+ inline uint32_t nameSize() const noexcept { return _virtReg->nameSize(); }
1101
+
1102
+ inline TypeId typeId() const noexcept { return _virtReg->typeId(); }
1103
+
1104
+ inline RAWorkRegFlags flags() const noexcept { return _flags; }
1105
+ inline bool hasFlag(RAWorkRegFlags flag) const noexcept { return Support::test(_flags, flag); }
1106
+ inline void addFlags(RAWorkRegFlags flags) noexcept { _flags |= flags; }
1107
+
1108
+ inline bool isAllocated() const noexcept { return hasFlag(RAWorkRegFlags::kAllocated); }
1109
+ inline void markAllocated() noexcept { addFlags(RAWorkRegFlags::kAllocated); }
1110
+
1111
+ inline bool isLeadConsecutive() const noexcept { return hasFlag(RAWorkRegFlags::kLeadConsecutive); }
1112
+ inline void markLeadConsecutive() noexcept { addFlags(RAWorkRegFlags::kLeadConsecutive); }
1113
+
1114
+ inline bool isProcessedConsecutive() const noexcept { return hasFlag(RAWorkRegFlags::kProcessedConsecutive); }
1115
+ inline void markProcessedConsecutive() noexcept { addFlags(RAWorkRegFlags::kProcessedConsecutive); }
1116
+
1117
+ inline bool isStackUsed() const noexcept { return hasFlag(RAWorkRegFlags::kStackUsed); }
1118
+ inline void markStackUsed() noexcept { addFlags(RAWorkRegFlags::kStackUsed); }
1119
+
1120
+ inline bool isStackPreferred() const noexcept { return hasFlag(RAWorkRegFlags::kStackPreferred); }
1121
+ inline void markStackPreferred() noexcept { addFlags(RAWorkRegFlags::kStackPreferred); }
1122
+
1123
+ //! Tests whether this RAWorkReg has been coalesced with another one (cannot be used anymore).
1124
+ inline bool isCoalesced() const noexcept { return hasFlag(RAWorkRegFlags::kCoalesced); }
1125
+
1126
+ inline OperandSignature signature() const noexcept { return _signature; }
1127
+ inline RegType type() const noexcept { return _signature.regType(); }
1128
+ inline RegGroup group() const noexcept { return _signature.regGroup(); }
1129
+
1130
+ inline VirtReg* virtReg() const noexcept { return _virtReg; }
1131
+
1132
+ inline bool hasTiedReg() const noexcept { return _tiedReg != nullptr; }
1133
+ inline RATiedReg* tiedReg() const noexcept { return _tiedReg; }
1134
+ inline void setTiedReg(RATiedReg* tiedReg) noexcept { _tiedReg = tiedReg; }
1135
+ inline void resetTiedReg() noexcept { _tiedReg = nullptr; }
1136
+
1137
+ inline bool hasStackSlot() const noexcept { return _stackSlot != nullptr; }
1138
+ inline RAStackSlot* stackSlot() const noexcept { return _stackSlot; }
1139
+
1140
+ inline LiveRegSpans& liveSpans() noexcept { return _liveSpans; }
1141
+ inline const LiveRegSpans& liveSpans() const noexcept { return _liveSpans; }
1142
+
1143
+ inline RALiveStats& liveStats() noexcept { return _liveStats; }
1144
+ inline const RALiveStats& liveStats() const noexcept { return _liveStats; }
1145
+
1146
+ inline bool hasArgIndex() const noexcept { return _argIndex != kNoArgIndex; }
1147
+ inline uint32_t argIndex() const noexcept { return _argIndex; }
1148
+ inline uint32_t argValueIndex() const noexcept { return _argValueIndex; }
1149
+
1150
+ inline void setArgIndex(uint32_t argIndex, uint32_t valueIndex) noexcept {
1151
+ _argIndex = uint8_t(argIndex);
1152
+ _argValueIndex = uint8_t(valueIndex);
1153
+ }
1154
+
1155
+ inline bool hasHomeRegId() const noexcept { return _homeRegId != BaseReg::kIdBad; }
1156
+ inline uint32_t homeRegId() const noexcept { return _homeRegId; }
1157
+ inline void setHomeRegId(uint32_t physId) noexcept { _homeRegId = uint8_t(physId); }
1158
+
1159
+ inline bool hasHintRegId() const noexcept { return _hintRegId != BaseReg::kIdBad; }
1160
+ inline uint32_t hintRegId() const noexcept { return _hintRegId; }
1161
+ inline void setHintRegId(uint32_t physId) noexcept { _hintRegId = uint8_t(physId); }
1162
+
1163
+ inline RegMask useIdMask() const noexcept { return _useIdMask; }
1164
+ inline bool hasUseIdMask() const noexcept { return _useIdMask != 0u; }
1165
+ inline bool hasMultipleUseIds() const noexcept { return _useIdMask != 0u && !Support::isPowerOf2(_useIdMask); }
1166
+ inline void addUseIdMask(RegMask mask) noexcept { _useIdMask |= mask; }
1167
+
1168
+ inline RegMask preferredMask() const noexcept { return _preferredMask; }
1169
+ inline bool hasPrereffedMask() const noexcept { return _preferredMask != 0xFFFFFFFFu; }
1170
+ inline void restrictPreferredMask(RegMask mask) noexcept { _preferredMask &= mask; }
1171
+
1172
+ inline RegMask consecutiveMask() const noexcept { return _consecutiveMask; }
1173
+ inline bool hasConsecutiveMask() const noexcept { return _consecutiveMask != 0xFFFFFFFFu; }
1174
+ inline void restrictConsecutiveMask(RegMask mask) noexcept { _consecutiveMask &= mask; }
1175
+
1176
+ inline RegMask clobberSurvivalMask() const noexcept { return _clobberSurvivalMask; }
1177
+ inline void addClobberSurvivalMask(RegMask mask) noexcept { _clobberSurvivalMask |= mask; }
1178
+
1179
+ inline RegMask allocatedMask() const noexcept { return _allocatedMask; }
1180
+ inline void addAllocatedMask(RegMask mask) noexcept { _allocatedMask |= mask; }
1181
+
1182
+ inline uint64_t regByteMask() const noexcept { return _regByteMask; }
1183
+ inline void setRegByteMask(uint64_t mask) noexcept { _regByteMask = mask; }
1184
+
1185
+ inline bool hasImmediateConsecutives() const noexcept { return !_immediateConsecutives.empty(); }
1186
+ inline const ZoneBitVector& immediateConsecutives() const noexcept { return _immediateConsecutives; }
1187
+
1188
+ inline Error addImmediateConsecutive(ZoneAllocator* allocator, uint32_t workId) noexcept {
1189
+ if (_immediateConsecutives.size() <= workId)
1190
+ ASMJIT_PROPAGATE(_immediateConsecutives.resize(allocator, workId + 1));
1191
+
1192
+ _immediateConsecutives.setBit(workId, true);
1193
+ return kErrorOk;
1194
+ }
1195
+
1196
+ //! \}
1197
+ };
1198
+
1199
+ //! \}
1200
+ //! \endcond
1201
+
1202
+ ASMJIT_END_NAMESPACE
1203
+
1204
+ #endif // ASMJIT_CORE_RADEFS_P_H_INCLUDED