asmjit 0.2.0 → 0.2.2

Sign up to get free protection for your applications and to get access to all the features.
Files changed (204) hide show
  1. checksums.yaml +4 -4
  2. data/Gemfile.lock +1 -1
  3. data/Rakefile +5 -3
  4. data/asmjit.gemspec +1 -3
  5. data/ext/asmjit/asmjit/.editorconfig +10 -0
  6. data/ext/asmjit/asmjit/.github/FUNDING.yml +1 -0
  7. data/ext/asmjit/asmjit/.github/workflows/build-config.json +47 -0
  8. data/ext/asmjit/asmjit/.github/workflows/build.yml +156 -0
  9. data/ext/asmjit/asmjit/.gitignore +6 -0
  10. data/ext/asmjit/asmjit/CMakeLists.txt +611 -0
  11. data/ext/asmjit/asmjit/LICENSE.md +17 -0
  12. data/ext/asmjit/asmjit/README.md +69 -0
  13. data/ext/asmjit/asmjit/src/asmjit/a64.h +62 -0
  14. data/ext/asmjit/asmjit/src/asmjit/arm/a64archtraits_p.h +81 -0
  15. data/ext/asmjit/asmjit/src/asmjit/arm/a64assembler.cpp +5115 -0
  16. data/ext/asmjit/asmjit/src/asmjit/arm/a64assembler.h +72 -0
  17. data/ext/asmjit/asmjit/src/asmjit/arm/a64builder.cpp +51 -0
  18. data/ext/asmjit/asmjit/src/asmjit/arm/a64builder.h +57 -0
  19. data/ext/asmjit/asmjit/src/asmjit/arm/a64compiler.cpp +60 -0
  20. data/ext/asmjit/asmjit/src/asmjit/arm/a64compiler.h +247 -0
  21. data/ext/asmjit/asmjit/src/asmjit/arm/a64emithelper.cpp +464 -0
  22. data/ext/asmjit/asmjit/src/asmjit/arm/a64emithelper_p.h +50 -0
  23. data/ext/asmjit/asmjit/src/asmjit/arm/a64emitter.h +1228 -0
  24. data/ext/asmjit/asmjit/src/asmjit/arm/a64formatter.cpp +298 -0
  25. data/ext/asmjit/asmjit/src/asmjit/arm/a64formatter_p.h +59 -0
  26. data/ext/asmjit/asmjit/src/asmjit/arm/a64func.cpp +189 -0
  27. data/ext/asmjit/asmjit/src/asmjit/arm/a64func_p.h +33 -0
  28. data/ext/asmjit/asmjit/src/asmjit/arm/a64globals.h +1894 -0
  29. data/ext/asmjit/asmjit/src/asmjit/arm/a64instapi.cpp +278 -0
  30. data/ext/asmjit/asmjit/src/asmjit/arm/a64instapi_p.h +41 -0
  31. data/ext/asmjit/asmjit/src/asmjit/arm/a64instdb.cpp +1957 -0
  32. data/ext/asmjit/asmjit/src/asmjit/arm/a64instdb.h +74 -0
  33. data/ext/asmjit/asmjit/src/asmjit/arm/a64instdb_p.h +876 -0
  34. data/ext/asmjit/asmjit/src/asmjit/arm/a64operand.cpp +85 -0
  35. data/ext/asmjit/asmjit/src/asmjit/arm/a64operand.h +312 -0
  36. data/ext/asmjit/asmjit/src/asmjit/arm/a64rapass.cpp +852 -0
  37. data/ext/asmjit/asmjit/src/asmjit/arm/a64rapass_p.h +105 -0
  38. data/ext/asmjit/asmjit/src/asmjit/arm/a64utils.h +179 -0
  39. data/ext/asmjit/asmjit/src/asmjit/arm/armformatter.cpp +143 -0
  40. data/ext/asmjit/asmjit/src/asmjit/arm/armformatter_p.h +44 -0
  41. data/ext/asmjit/asmjit/src/asmjit/arm/armglobals.h +21 -0
  42. data/ext/asmjit/asmjit/src/asmjit/arm/armoperand.h +621 -0
  43. data/ext/asmjit/asmjit/src/asmjit/arm.h +62 -0
  44. data/ext/asmjit/asmjit/src/asmjit/asmjit-scope-begin.h +17 -0
  45. data/ext/asmjit/asmjit/src/asmjit/asmjit-scope-end.h +9 -0
  46. data/ext/asmjit/asmjit/src/asmjit/asmjit.h +33 -0
  47. data/ext/asmjit/asmjit/src/asmjit/core/api-build_p.h +55 -0
  48. data/ext/asmjit/asmjit/src/asmjit/core/api-config.h +613 -0
  49. data/ext/asmjit/asmjit/src/asmjit/core/archcommons.h +229 -0
  50. data/ext/asmjit/asmjit/src/asmjit/core/archtraits.cpp +160 -0
  51. data/ext/asmjit/asmjit/src/asmjit/core/archtraits.h +290 -0
  52. data/ext/asmjit/asmjit/src/asmjit/core/assembler.cpp +406 -0
  53. data/ext/asmjit/asmjit/src/asmjit/core/assembler.h +129 -0
  54. data/ext/asmjit/asmjit/src/asmjit/core/builder.cpp +889 -0
  55. data/ext/asmjit/asmjit/src/asmjit/core/builder.h +1391 -0
  56. data/ext/asmjit/asmjit/src/asmjit/core/codebuffer.h +113 -0
  57. data/ext/asmjit/asmjit/src/asmjit/core/codeholder.cpp +1149 -0
  58. data/ext/asmjit/asmjit/src/asmjit/core/codeholder.h +1035 -0
  59. data/ext/asmjit/asmjit/src/asmjit/core/codewriter.cpp +175 -0
  60. data/ext/asmjit/asmjit/src/asmjit/core/codewriter_p.h +179 -0
  61. data/ext/asmjit/asmjit/src/asmjit/core/compiler.cpp +582 -0
  62. data/ext/asmjit/asmjit/src/asmjit/core/compiler.h +737 -0
  63. data/ext/asmjit/asmjit/src/asmjit/core/compilerdefs.h +173 -0
  64. data/ext/asmjit/asmjit/src/asmjit/core/constpool.cpp +363 -0
  65. data/ext/asmjit/asmjit/src/asmjit/core/constpool.h +250 -0
  66. data/ext/asmjit/asmjit/src/asmjit/core/cpuinfo.cpp +1162 -0
  67. data/ext/asmjit/asmjit/src/asmjit/core/cpuinfo.h +813 -0
  68. data/ext/asmjit/asmjit/src/asmjit/core/emithelper.cpp +323 -0
  69. data/ext/asmjit/asmjit/src/asmjit/core/emithelper_p.h +58 -0
  70. data/ext/asmjit/asmjit/src/asmjit/core/emitter.cpp +333 -0
  71. data/ext/asmjit/asmjit/src/asmjit/core/emitter.h +741 -0
  72. data/ext/asmjit/asmjit/src/asmjit/core/emitterutils.cpp +129 -0
  73. data/ext/asmjit/asmjit/src/asmjit/core/emitterutils_p.h +89 -0
  74. data/ext/asmjit/asmjit/src/asmjit/core/environment.cpp +46 -0
  75. data/ext/asmjit/asmjit/src/asmjit/core/environment.h +508 -0
  76. data/ext/asmjit/asmjit/src/asmjit/core/errorhandler.cpp +14 -0
  77. data/ext/asmjit/asmjit/src/asmjit/core/errorhandler.h +228 -0
  78. data/ext/asmjit/asmjit/src/asmjit/core/formatter.cpp +584 -0
  79. data/ext/asmjit/asmjit/src/asmjit/core/formatter.h +247 -0
  80. data/ext/asmjit/asmjit/src/asmjit/core/formatter_p.h +34 -0
  81. data/ext/asmjit/asmjit/src/asmjit/core/func.cpp +286 -0
  82. data/ext/asmjit/asmjit/src/asmjit/core/func.h +1445 -0
  83. data/ext/asmjit/asmjit/src/asmjit/core/funcargscontext.cpp +293 -0
  84. data/ext/asmjit/asmjit/src/asmjit/core/funcargscontext_p.h +199 -0
  85. data/ext/asmjit/asmjit/src/asmjit/core/globals.cpp +133 -0
  86. data/ext/asmjit/asmjit/src/asmjit/core/globals.h +393 -0
  87. data/ext/asmjit/asmjit/src/asmjit/core/inst.cpp +113 -0
  88. data/ext/asmjit/asmjit/src/asmjit/core/inst.h +772 -0
  89. data/ext/asmjit/asmjit/src/asmjit/core/jitallocator.cpp +1242 -0
  90. data/ext/asmjit/asmjit/src/asmjit/core/jitallocator.h +261 -0
  91. data/ext/asmjit/asmjit/src/asmjit/core/jitruntime.cpp +80 -0
  92. data/ext/asmjit/asmjit/src/asmjit/core/jitruntime.h +89 -0
  93. data/ext/asmjit/asmjit/src/asmjit/core/logger.cpp +69 -0
  94. data/ext/asmjit/asmjit/src/asmjit/core/logger.h +198 -0
  95. data/ext/asmjit/asmjit/src/asmjit/core/misc_p.h +33 -0
  96. data/ext/asmjit/asmjit/src/asmjit/core/operand.cpp +132 -0
  97. data/ext/asmjit/asmjit/src/asmjit/core/operand.h +1611 -0
  98. data/ext/asmjit/asmjit/src/asmjit/core/osutils.cpp +84 -0
  99. data/ext/asmjit/asmjit/src/asmjit/core/osutils.h +61 -0
  100. data/ext/asmjit/asmjit/src/asmjit/core/osutils_p.h +68 -0
  101. data/ext/asmjit/asmjit/src/asmjit/core/raassignment_p.h +418 -0
  102. data/ext/asmjit/asmjit/src/asmjit/core/rabuilders_p.h +612 -0
  103. data/ext/asmjit/asmjit/src/asmjit/core/radefs_p.h +1204 -0
  104. data/ext/asmjit/asmjit/src/asmjit/core/ralocal.cpp +1166 -0
  105. data/ext/asmjit/asmjit/src/asmjit/core/ralocal_p.h +254 -0
  106. data/ext/asmjit/asmjit/src/asmjit/core/rapass.cpp +1969 -0
  107. data/ext/asmjit/asmjit/src/asmjit/core/rapass_p.h +1183 -0
  108. data/ext/asmjit/asmjit/src/asmjit/core/rastack.cpp +184 -0
  109. data/ext/asmjit/asmjit/src/asmjit/core/rastack_p.h +171 -0
  110. data/ext/asmjit/asmjit/src/asmjit/core/string.cpp +559 -0
  111. data/ext/asmjit/asmjit/src/asmjit/core/string.h +372 -0
  112. data/ext/asmjit/asmjit/src/asmjit/core/support.cpp +494 -0
  113. data/ext/asmjit/asmjit/src/asmjit/core/support.h +1773 -0
  114. data/ext/asmjit/asmjit/src/asmjit/core/target.cpp +14 -0
  115. data/ext/asmjit/asmjit/src/asmjit/core/target.h +53 -0
  116. data/ext/asmjit/asmjit/src/asmjit/core/type.cpp +74 -0
  117. data/ext/asmjit/asmjit/src/asmjit/core/type.h +419 -0
  118. data/ext/asmjit/asmjit/src/asmjit/core/virtmem.cpp +722 -0
  119. data/ext/asmjit/asmjit/src/asmjit/core/virtmem.h +242 -0
  120. data/ext/asmjit/asmjit/src/asmjit/core/zone.cpp +353 -0
  121. data/ext/asmjit/asmjit/src/asmjit/core/zone.h +615 -0
  122. data/ext/asmjit/asmjit/src/asmjit/core/zonehash.cpp +309 -0
  123. data/ext/asmjit/asmjit/src/asmjit/core/zonehash.h +186 -0
  124. data/ext/asmjit/asmjit/src/asmjit/core/zonelist.cpp +163 -0
  125. data/ext/asmjit/asmjit/src/asmjit/core/zonelist.h +209 -0
  126. data/ext/asmjit/asmjit/src/asmjit/core/zonestack.cpp +176 -0
  127. data/ext/asmjit/asmjit/src/asmjit/core/zonestack.h +239 -0
  128. data/ext/asmjit/asmjit/src/asmjit/core/zonestring.h +120 -0
  129. data/ext/asmjit/asmjit/src/asmjit/core/zonetree.cpp +99 -0
  130. data/ext/asmjit/asmjit/src/asmjit/core/zonetree.h +380 -0
  131. data/ext/asmjit/asmjit/src/asmjit/core/zonevector.cpp +356 -0
  132. data/ext/asmjit/asmjit/src/asmjit/core/zonevector.h +690 -0
  133. data/ext/asmjit/asmjit/src/asmjit/core.h +1861 -0
  134. data/ext/asmjit/asmjit/src/asmjit/x86/x86archtraits_p.h +148 -0
  135. data/ext/asmjit/asmjit/src/asmjit/x86/x86assembler.cpp +5110 -0
  136. data/ext/asmjit/asmjit/src/asmjit/x86/x86assembler.h +685 -0
  137. data/ext/asmjit/asmjit/src/asmjit/x86/x86builder.cpp +52 -0
  138. data/ext/asmjit/asmjit/src/asmjit/x86/x86builder.h +351 -0
  139. data/ext/asmjit/asmjit/src/asmjit/x86/x86compiler.cpp +61 -0
  140. data/ext/asmjit/asmjit/src/asmjit/x86/x86compiler.h +721 -0
  141. data/ext/asmjit/asmjit/src/asmjit/x86/x86emithelper.cpp +619 -0
  142. data/ext/asmjit/asmjit/src/asmjit/x86/x86emithelper_p.h +60 -0
  143. data/ext/asmjit/asmjit/src/asmjit/x86/x86emitter.h +4315 -0
  144. data/ext/asmjit/asmjit/src/asmjit/x86/x86formatter.cpp +944 -0
  145. data/ext/asmjit/asmjit/src/asmjit/x86/x86formatter_p.h +58 -0
  146. data/ext/asmjit/asmjit/src/asmjit/x86/x86func.cpp +503 -0
  147. data/ext/asmjit/asmjit/src/asmjit/x86/x86func_p.h +33 -0
  148. data/ext/asmjit/asmjit/src/asmjit/x86/x86globals.h +2169 -0
  149. data/ext/asmjit/asmjit/src/asmjit/x86/x86instapi.cpp +1732 -0
  150. data/ext/asmjit/asmjit/src/asmjit/x86/x86instapi_p.h +41 -0
  151. data/ext/asmjit/asmjit/src/asmjit/x86/x86instdb.cpp +4427 -0
  152. data/ext/asmjit/asmjit/src/asmjit/x86/x86instdb.h +563 -0
  153. data/ext/asmjit/asmjit/src/asmjit/x86/x86instdb_p.h +311 -0
  154. data/ext/asmjit/asmjit/src/asmjit/x86/x86opcode_p.h +436 -0
  155. data/ext/asmjit/asmjit/src/asmjit/x86/x86operand.cpp +231 -0
  156. data/ext/asmjit/asmjit/src/asmjit/x86/x86operand.h +1085 -0
  157. data/ext/asmjit/asmjit/src/asmjit/x86/x86rapass.cpp +1509 -0
  158. data/ext/asmjit/asmjit/src/asmjit/x86/x86rapass_p.h +94 -0
  159. data/ext/asmjit/asmjit/src/asmjit/x86.h +93 -0
  160. data/ext/asmjit/asmjit/src/asmjit.natvis +245 -0
  161. data/ext/asmjit/asmjit/test/asmjit_test_assembler.cpp +84 -0
  162. data/ext/asmjit/asmjit/test/asmjit_test_assembler.h +85 -0
  163. data/ext/asmjit/asmjit/test/asmjit_test_assembler_a64.cpp +4006 -0
  164. data/ext/asmjit/asmjit/test/asmjit_test_assembler_x64.cpp +17833 -0
  165. data/ext/asmjit/asmjit/test/asmjit_test_assembler_x86.cpp +8300 -0
  166. data/ext/asmjit/asmjit/test/asmjit_test_compiler.cpp +253 -0
  167. data/ext/asmjit/asmjit/test/asmjit_test_compiler.h +73 -0
  168. data/ext/asmjit/asmjit/test/asmjit_test_compiler_a64.cpp +690 -0
  169. data/ext/asmjit/asmjit/test/asmjit_test_compiler_x86.cpp +4317 -0
  170. data/ext/asmjit/asmjit/test/asmjit_test_emitters.cpp +197 -0
  171. data/ext/asmjit/asmjit/test/asmjit_test_instinfo.cpp +181 -0
  172. data/ext/asmjit/asmjit/test/asmjit_test_misc.h +257 -0
  173. data/ext/asmjit/asmjit/test/asmjit_test_perf.cpp +62 -0
  174. data/ext/asmjit/asmjit/test/asmjit_test_perf.h +61 -0
  175. data/ext/asmjit/asmjit/test/asmjit_test_perf_a64.cpp +699 -0
  176. data/ext/asmjit/asmjit/test/asmjit_test_perf_x86.cpp +5032 -0
  177. data/ext/asmjit/asmjit/test/asmjit_test_unit.cpp +172 -0
  178. data/ext/asmjit/asmjit/test/asmjit_test_x86_sections.cpp +172 -0
  179. data/ext/asmjit/asmjit/test/asmjitutils.h +38 -0
  180. data/ext/asmjit/asmjit/test/broken.cpp +312 -0
  181. data/ext/asmjit/asmjit/test/broken.h +148 -0
  182. data/ext/asmjit/asmjit/test/cmdline.h +61 -0
  183. data/ext/asmjit/asmjit/test/performancetimer.h +41 -0
  184. data/ext/asmjit/asmjit/tools/configure-makefiles.sh +13 -0
  185. data/ext/asmjit/asmjit/tools/configure-ninja.sh +13 -0
  186. data/ext/asmjit/asmjit/tools/configure-sanitizers.sh +13 -0
  187. data/ext/asmjit/asmjit/tools/configure-vs2019-x64.bat +2 -0
  188. data/ext/asmjit/asmjit/tools/configure-vs2019-x86.bat +2 -0
  189. data/ext/asmjit/asmjit/tools/configure-vs2022-x64.bat +2 -0
  190. data/ext/asmjit/asmjit/tools/configure-vs2022-x86.bat +2 -0
  191. data/ext/asmjit/asmjit/tools/configure-xcode.sh +8 -0
  192. data/ext/asmjit/asmjit/tools/enumgen.js +417 -0
  193. data/ext/asmjit/asmjit/tools/enumgen.sh +3 -0
  194. data/ext/asmjit/asmjit/tools/tablegen-arm.js +365 -0
  195. data/ext/asmjit/asmjit/tools/tablegen-arm.sh +3 -0
  196. data/ext/asmjit/asmjit/tools/tablegen-x86.js +2638 -0
  197. data/ext/asmjit/asmjit/tools/tablegen-x86.sh +3 -0
  198. data/ext/asmjit/asmjit/tools/tablegen.js +947 -0
  199. data/ext/asmjit/asmjit/tools/tablegen.sh +4 -0
  200. data/ext/asmjit/asmjit.cc +167 -30
  201. data/ext/asmjit/extconf.rb +9 -9
  202. data/lib/asmjit/version.rb +1 -1
  203. data/lib/asmjit.rb +14 -4
  204. metadata +198 -17
@@ -0,0 +1,1183 @@
1
+ // This file is part of AsmJit project <https://asmjit.com>
2
+ //
3
+ // See asmjit.h or LICENSE.md for license and copyright information
4
+ // SPDX-License-Identifier: Zlib
5
+
6
+ #ifndef ASMJIT_CORE_RAPASS_P_H_INCLUDED
7
+ #define ASMJIT_CORE_RAPASS_P_H_INCLUDED
8
+
9
+ #include "../core/api-config.h"
10
+ #ifndef ASMJIT_NO_COMPILER
11
+
12
+ #include "../core/compiler.h"
13
+ #include "../core/emithelper_p.h"
14
+ #include "../core/raassignment_p.h"
15
+ #include "../core/radefs_p.h"
16
+ #include "../core/rastack_p.h"
17
+ #include "../core/support.h"
18
+
19
+ ASMJIT_BEGIN_NAMESPACE
20
+
21
+ //! \cond INTERNAL
22
+ //! \addtogroup asmjit_ra
23
+ //! \{
24
+
25
+ //! Flags used by \ref RABlock.
26
+ enum class RABlockFlags : uint32_t {
27
+ //! No flags.
28
+ kNone = 0,
29
+
30
+ //! Block has been constructed from nodes.
31
+ kIsConstructed = 0x00000001u,
32
+ //! Block is reachable (set by `buildCFGViews()`).
33
+ kIsReachable = 0x00000002u,
34
+ //! Block is a target (has an associated label or multiple labels).
35
+ kIsTargetable = 0x00000004u,
36
+ //! Block has been allocated.
37
+ kIsAllocated = 0x00000008u,
38
+ //! Block is a function-exit.
39
+ kIsFuncExit = 0x00000010u,
40
+
41
+ //! Block has a terminator (jump, conditional jump, ret).
42
+ kHasTerminator = 0x00000100u,
43
+ //! Block naturally flows to the next block.
44
+ kHasConsecutive = 0x00000200u,
45
+ //! Block has a jump to a jump-table at the end.
46
+ kHasJumpTable = 0x00000400u,
47
+ //! Block contains fixed registers (precolored).
48
+ kHasFixedRegs = 0x00000800u,
49
+ //! Block contains function calls.
50
+ kHasFuncCalls = 0x00001000u
51
+ };
52
+ ASMJIT_DEFINE_ENUM_FLAGS(RABlockFlags)
53
+
54
+ //! Basic block used by register allocator pass.
55
+ class RABlock {
56
+ public:
57
+ ASMJIT_NONCOPYABLE(RABlock)
58
+
59
+ typedef RAAssignment::PhysToWorkMap PhysToWorkMap;
60
+ typedef RAAssignment::WorkToPhysMap WorkToPhysMap;
61
+
62
+ //! \name Constants
63
+ //! \{
64
+
65
+ enum : uint32_t {
66
+ //! Unassigned block id.
67
+ kUnassignedId = 0xFFFFFFFFu
68
+ };
69
+
70
+ enum LiveType : uint32_t {
71
+ kLiveIn = 0,
72
+ kLiveOut = 1,
73
+ kLiveGen = 2,
74
+ kLiveKill = 3,
75
+ kLiveCount = 4
76
+ };
77
+
78
+ //! \}
79
+
80
+ //! \name Members
81
+ //! \{
82
+
83
+ //! Register allocator pass.
84
+ BaseRAPass* _ra;
85
+
86
+ //! Block id (indexed from zero).
87
+ uint32_t _blockId = kUnassignedId;
88
+ //! Block flags, see `Flags`.
89
+ RABlockFlags _flags = RABlockFlags::kNone;
90
+
91
+ //! First `BaseNode` of this block (inclusive).
92
+ BaseNode* _first = nullptr;
93
+ //! Last `BaseNode` of this block (inclusive).
94
+ BaseNode* _last = nullptr;
95
+
96
+ //! Initial position of this block (inclusive).
97
+ uint32_t _firstPosition = 0;
98
+ //! End position of this block (exclusive).
99
+ uint32_t _endPosition = 0;
100
+
101
+ //! Weight of this block (default 0, each loop adds one).
102
+ uint32_t _weight = 0;
103
+ //! Post-order view order, used during POV construction.
104
+ uint32_t _povOrder = 0;
105
+
106
+ //! Basic statistics about registers.
107
+ RARegsStats _regsStats = RARegsStats();
108
+ //! Maximum live-count per register group.
109
+ RALiveCount _maxLiveCount = RALiveCount();
110
+
111
+ //! Timestamp (used by block visitors).
112
+ mutable uint64_t _timestamp = 0;
113
+ //! Immediate dominator of this block.
114
+ RABlock* _idom = nullptr;
115
+
116
+ //! Block predecessors.
117
+ RABlocks _predecessors {};
118
+ //! Block successors.
119
+ RABlocks _successors {};
120
+
121
+ //! Liveness in/out/use/kill.
122
+ ZoneBitVector _liveBits[kLiveCount] {};
123
+
124
+ //! Shared assignment it or `Globals::kInvalidId` if this block doesn't have shared assignment.
125
+ //! See \ref RASharedAssignment for more details.
126
+ uint32_t _sharedAssignmentId = Globals::kInvalidId;
127
+ //! Scratch registers that cannot be allocated upon block entry.
128
+ RegMask _entryScratchGpRegs = 0;
129
+ //! Scratch registers used at exit, by a terminator instruction.
130
+ RegMask _exitScratchGpRegs = 0;
131
+
132
+ //! Register assignment on entry.
133
+ PhysToWorkMap* _entryPhysToWorkMap = nullptr;
134
+
135
+ //! \}
136
+
137
+ //! \name Construction & Destruction
138
+ //! \{
139
+
140
+ inline RABlock(BaseRAPass* ra) noexcept
141
+ : _ra(ra) {}
142
+
143
+ //! \}
144
+
145
+ //! \name Accessors
146
+ //! \{
147
+
148
+ inline BaseRAPass* pass() const noexcept { return _ra; }
149
+ inline ZoneAllocator* allocator() const noexcept;
150
+
151
+ inline uint32_t blockId() const noexcept { return _blockId; }
152
+ inline RABlockFlags flags() const noexcept { return _flags; }
153
+
154
+ inline bool hasFlag(RABlockFlags flag) const noexcept { return Support::test(_flags, flag); }
155
+ inline void addFlags(RABlockFlags flags) noexcept { _flags |= flags; }
156
+
157
+ inline bool isAssigned() const noexcept { return _blockId != kUnassignedId; }
158
+
159
+ inline bool isConstructed() const noexcept { return hasFlag(RABlockFlags::kIsConstructed); }
160
+ inline bool isReachable() const noexcept { return hasFlag(RABlockFlags::kIsReachable); }
161
+ inline bool isTargetable() const noexcept { return hasFlag(RABlockFlags::kIsTargetable); }
162
+ inline bool isAllocated() const noexcept { return hasFlag(RABlockFlags::kIsAllocated); }
163
+ inline bool isFuncExit() const noexcept { return hasFlag(RABlockFlags::kIsFuncExit); }
164
+ inline bool hasTerminator() const noexcept { return hasFlag(RABlockFlags::kHasTerminator); }
165
+ inline bool hasConsecutive() const noexcept { return hasFlag(RABlockFlags::kHasConsecutive); }
166
+ inline bool hasJumpTable() const noexcept { return hasFlag(RABlockFlags::kHasJumpTable); }
167
+
168
+ inline void makeConstructed(const RARegsStats& regStats) noexcept {
169
+ _flags |= RABlockFlags::kIsConstructed;
170
+ _regsStats.combineWith(regStats);
171
+ }
172
+
173
+ inline void makeReachable() noexcept { _flags |= RABlockFlags::kIsReachable; }
174
+ inline void makeTargetable() noexcept { _flags |= RABlockFlags::kIsTargetable; }
175
+ inline void makeAllocated() noexcept { _flags |= RABlockFlags::kIsAllocated; }
176
+
177
+ inline const RARegsStats& regsStats() const noexcept { return _regsStats; }
178
+
179
+ inline bool hasPredecessors() const noexcept { return !_predecessors.empty(); }
180
+ inline bool hasSuccessors() const noexcept { return !_successors.empty(); }
181
+
182
+ inline bool hasSuccessor(RABlock* block) noexcept {
183
+ if (block->_predecessors.size() < _successors.size())
184
+ return block->_predecessors.contains(this);
185
+ else
186
+ return _successors.contains(block);
187
+ }
188
+
189
+ inline const RABlocks& predecessors() const noexcept { return _predecessors; }
190
+ inline const RABlocks& successors() const noexcept { return _successors; }
191
+
192
+ inline BaseNode* first() const noexcept { return _first; }
193
+ inline BaseNode* last() const noexcept { return _last; }
194
+
195
+ inline void setFirst(BaseNode* node) noexcept { _first = node; }
196
+ inline void setLast(BaseNode* node) noexcept { _last = node; }
197
+
198
+ inline uint32_t firstPosition() const noexcept { return _firstPosition; }
199
+ inline void setFirstPosition(uint32_t position) noexcept { _firstPosition = position; }
200
+
201
+ inline uint32_t endPosition() const noexcept { return _endPosition; }
202
+ inline void setEndPosition(uint32_t position) noexcept { _endPosition = position; }
203
+
204
+ inline uint32_t povOrder() const noexcept { return _povOrder; }
205
+
206
+ inline RegMask entryScratchGpRegs() const noexcept;
207
+ inline RegMask exitScratchGpRegs() const noexcept { return _exitScratchGpRegs; }
208
+
209
+ inline void addEntryScratchGpRegs(RegMask regMask) noexcept { _entryScratchGpRegs |= regMask; }
210
+ inline void addExitScratchGpRegs(RegMask regMask) noexcept { _exitScratchGpRegs |= regMask; }
211
+
212
+ inline bool hasSharedAssignmentId() const noexcept { return _sharedAssignmentId != Globals::kInvalidId; }
213
+ inline uint32_t sharedAssignmentId() const noexcept { return _sharedAssignmentId; }
214
+ inline void setSharedAssignmentId(uint32_t id) noexcept { _sharedAssignmentId = id; }
215
+
216
+ inline uint64_t timestamp() const noexcept { return _timestamp; }
217
+ inline bool hasTimestamp(uint64_t ts) const noexcept { return _timestamp == ts; }
218
+ inline void setTimestamp(uint64_t ts) const noexcept { _timestamp = ts; }
219
+ inline void resetTimestamp() const noexcept { _timestamp = 0; }
220
+
221
+ inline RABlock* consecutive() const noexcept { return hasConsecutive() ? _successors[0] : nullptr; }
222
+
223
+ inline RABlock* iDom() noexcept { return _idom; }
224
+ inline const RABlock* iDom() const noexcept { return _idom; }
225
+ inline void setIDom(RABlock* block) noexcept { _idom = block; }
226
+
227
+ inline ZoneBitVector& liveIn() noexcept { return _liveBits[kLiveIn]; }
228
+ inline const ZoneBitVector& liveIn() const noexcept { return _liveBits[kLiveIn]; }
229
+
230
+ inline ZoneBitVector& liveOut() noexcept { return _liveBits[kLiveOut]; }
231
+ inline const ZoneBitVector& liveOut() const noexcept { return _liveBits[kLiveOut]; }
232
+
233
+ inline ZoneBitVector& gen() noexcept { return _liveBits[kLiveGen]; }
234
+ inline const ZoneBitVector& gen() const noexcept { return _liveBits[kLiveGen]; }
235
+
236
+ inline ZoneBitVector& kill() noexcept { return _liveBits[kLiveKill]; }
237
+ inline const ZoneBitVector& kill() const noexcept { return _liveBits[kLiveKill]; }
238
+
239
+ inline Error resizeLiveBits(uint32_t size) noexcept {
240
+ ASMJIT_PROPAGATE(_liveBits[kLiveIn ].resize(allocator(), size));
241
+ ASMJIT_PROPAGATE(_liveBits[kLiveOut ].resize(allocator(), size));
242
+ ASMJIT_PROPAGATE(_liveBits[kLiveGen ].resize(allocator(), size));
243
+ ASMJIT_PROPAGATE(_liveBits[kLiveKill].resize(allocator(), size));
244
+ return kErrorOk;
245
+ }
246
+
247
+ inline bool hasEntryAssignment() const noexcept { return _entryPhysToWorkMap != nullptr; }
248
+ inline PhysToWorkMap* entryPhysToWorkMap() const noexcept { return _entryPhysToWorkMap; }
249
+ inline void setEntryAssignment(PhysToWorkMap* physToWorkMap) noexcept { _entryPhysToWorkMap = physToWorkMap; }
250
+
251
+ //! \}
252
+
253
+ //! \name Utilities
254
+ //! \{
255
+
256
+ //! Adds a successor to this block, and predecessor to `successor`, making connection on both sides.
257
+ //!
258
+ //! This API must be used to manage successors and predecessors, never manage it manually.
259
+ Error appendSuccessor(RABlock* successor) noexcept;
260
+
261
+ //! Similar to `appendSuccessor()`, but does prepend instead append.
262
+ //!
263
+ //! This function is used to add a natural flow (always first) to the block.
264
+ Error prependSuccessor(RABlock* successor) noexcept;
265
+
266
+ //! \}
267
+ };
268
+
269
+ //! Register allocator's data associated with each `InstNode`.
270
+ class RAInst {
271
+ public:
272
+ ASMJIT_NONCOPYABLE(RAInst)
273
+
274
+ //! \name Members
275
+ //! \{
276
+
277
+ //! Parent block.
278
+ RABlock* _block;
279
+ //! Instruction RW flags.
280
+ InstRWFlags _instRWFlags;
281
+ //! Aggregated RATiedFlags from all operands & instruction specific flags.
282
+ RATiedFlags _flags;
283
+ //! Total count of RATiedReg's.
284
+ uint32_t _tiedTotal;
285
+ //! Index of RATiedReg's per register group.
286
+ RARegIndex _tiedIndex;
287
+ //! Count of RATiedReg's per register group.
288
+ RARegCount _tiedCount;
289
+ //! Number of live, and thus interfering VirtReg's at this point.
290
+ RALiveCount _liveCount;
291
+ //! Fixed physical registers used.
292
+ RARegMask _usedRegs;
293
+ //! Clobbered registers (by a function call).
294
+ RARegMask _clobberedRegs;
295
+ //! Tied registers.
296
+ RATiedReg _tiedRegs[1];
297
+
298
+ //! \}
299
+
300
+ //! \name Construction & Destruction
301
+ //! \{
302
+
303
+ inline RAInst(RABlock* block, InstRWFlags instRWFlags, RATiedFlags tiedFlags, uint32_t tiedTotal, const RARegMask& clobberedRegs) noexcept {
304
+ _block = block;
305
+ _instRWFlags = instRWFlags;
306
+ _flags = tiedFlags;
307
+ _tiedTotal = tiedTotal;
308
+ _tiedIndex.reset();
309
+ _tiedCount.reset();
310
+ _liveCount.reset();
311
+ _usedRegs.reset();
312
+ _clobberedRegs = clobberedRegs;
313
+ }
314
+
315
+ //! \}
316
+
317
+ //! \name Accessors
318
+ //! \{
319
+
320
+ //! Returns instruction RW flags.
321
+ inline InstRWFlags instRWFlags() const noexcept { return _instRWFlags; };
322
+ //! Tests whether the given `flag` is present in instruction RW flags.
323
+ inline bool hasInstRWFlag(InstRWFlags flag) const noexcept { return Support::test(_instRWFlags, flag); }
324
+ //! Adds `flags` to instruction RW flags.
325
+ inline void addInstRWFlags(InstRWFlags flags) noexcept { _instRWFlags |= flags; }
326
+
327
+ //! Returns the instruction flags.
328
+ inline RATiedFlags flags() const noexcept { return _flags; }
329
+ //! Tests whether the instruction has flag `flag`.
330
+ inline bool hasFlag(RATiedFlags flag) const noexcept { return Support::test(_flags, flag); }
331
+ //! Replaces the existing instruction flags with `flags`.
332
+ inline void setFlags(RATiedFlags flags) noexcept { _flags = flags; }
333
+ //! Adds instruction `flags` to this RAInst.
334
+ inline void addFlags(RATiedFlags flags) noexcept { _flags |= flags; }
335
+ //! Clears instruction `flags` from this RAInst.
336
+ inline void clearFlags(RATiedFlags flags) noexcept { _flags &= ~flags; }
337
+
338
+ //! Tests whether this instruction can be transformed to another instruction if necessary.
339
+ inline bool isTransformable() const noexcept { return hasFlag(RATiedFlags::kInst_IsTransformable); }
340
+
341
+ //! Returns the associated block with this RAInst.
342
+ inline RABlock* block() const noexcept { return _block; }
343
+
344
+ //! Returns tied registers (all).
345
+ inline RATiedReg* tiedRegs() const noexcept { return const_cast<RATiedReg*>(_tiedRegs); }
346
+ //! Returns tied registers for a given `group`.
347
+ inline RATiedReg* tiedRegs(RegGroup group) const noexcept { return const_cast<RATiedReg*>(_tiedRegs) + _tiedIndex.get(group); }
348
+
349
+ //! Returns count of all tied registers.
350
+ inline uint32_t tiedCount() const noexcept { return _tiedTotal; }
351
+ //! Returns count of tied registers of a given `group`.
352
+ inline uint32_t tiedCount(RegGroup group) const noexcept { return _tiedCount[group]; }
353
+
354
+ //! Returns `RATiedReg` at the given `index`.
355
+ inline RATiedReg* tiedAt(uint32_t index) const noexcept {
356
+ ASMJIT_ASSERT(index < _tiedTotal);
357
+ return tiedRegs() + index;
358
+ }
359
+
360
+ //! Returns `RATiedReg` at the given `index` of the given register `group`.
361
+ inline RATiedReg* tiedOf(RegGroup group, uint32_t index) const noexcept {
362
+ ASMJIT_ASSERT(index < _tiedCount.get(group));
363
+ return tiedRegs(group) + index;
364
+ }
365
+
366
+ inline void setTiedAt(uint32_t index, RATiedReg& tied) noexcept {
367
+ ASMJIT_ASSERT(index < _tiedTotal);
368
+ _tiedRegs[index] = tied;
369
+ }
370
+
371
+ //! \name Static Functions
372
+ //! \{
373
+
374
+ static inline size_t sizeOf(uint32_t tiedRegCount) noexcept {
375
+ return sizeof(RAInst) - sizeof(RATiedReg) + tiedRegCount * sizeof(RATiedReg);
376
+ }
377
+
378
+ //! \}
379
+ };
380
+
381
+ //! A helper class that is used to build an array of RATiedReg items that are then copied to `RAInst`.
382
+ class RAInstBuilder {
383
+ public:
384
+ ASMJIT_NONCOPYABLE(RAInstBuilder)
385
+
386
+ //! \name Members
387
+ //! \{
388
+
389
+ //! Instruction RW flags.
390
+ InstRWFlags _instRWFlags;
391
+
392
+ //! Flags combined from all RATiedReg's.
393
+ RATiedFlags _aggregatedFlags;
394
+ //! Flags that will be cleared before storing the aggregated flags to `RAInst`.
395
+ RATiedFlags _forbiddenFlags;
396
+ RARegCount _count;
397
+ RARegsStats _stats;
398
+
399
+ RARegMask _used;
400
+ RARegMask _clobbered;
401
+
402
+ //! Current tied register in `_tiedRegs`.
403
+ RATiedReg* _cur;
404
+ //! Array of temporary tied registers.
405
+ RATiedReg _tiedRegs[128];
406
+
407
+ //! \}
408
+
409
+ //! \name Construction & Destruction
410
+ //! \{
411
+
412
+ inline RAInstBuilder() noexcept { reset(); }
413
+
414
+ inline void init() noexcept { reset(); }
415
+ inline void reset() noexcept {
416
+ _instRWFlags = InstRWFlags::kNone;
417
+ _aggregatedFlags = RATiedFlags::kNone;
418
+ _forbiddenFlags = RATiedFlags::kNone;
419
+ _count.reset();
420
+ _stats.reset();
421
+ _used.reset();
422
+ _clobbered.reset();
423
+ _cur = _tiedRegs;
424
+ }
425
+
426
+ //! \}
427
+
428
+ //! \name Accessors
429
+ //! \{
430
+
431
+ inline InstRWFlags instRWFlags() const noexcept { return _instRWFlags; }
432
+ inline bool hasInstRWFlag(InstRWFlags flag) const noexcept { return Support::test(_instRWFlags, flag); }
433
+ inline void addInstRWFlags(InstRWFlags flags) noexcept { _instRWFlags |= flags; }
434
+ inline void clearInstRWFlags(InstRWFlags flags) noexcept { _instRWFlags &= ~flags; }
435
+
436
+ inline RATiedFlags aggregatedFlags() const noexcept { return _aggregatedFlags; }
437
+ inline void addAggregatedFlags(RATiedFlags flags) noexcept { _aggregatedFlags |= flags; }
438
+
439
+ inline RATiedFlags forbiddenFlags() const noexcept { return _forbiddenFlags; }
440
+ inline void addForbiddenFlags(RATiedFlags flags) noexcept { _forbiddenFlags |= flags; }
441
+
442
+ //! Returns the number of tied registers added to the builder.
443
+ inline uint32_t tiedRegCount() const noexcept { return uint32_t((size_t)(_cur - _tiedRegs)); }
444
+
445
+ inline RATiedReg* begin() noexcept { return _tiedRegs; }
446
+ inline RATiedReg* end() noexcept { return _cur; }
447
+
448
+ inline const RATiedReg* begin() const noexcept { return _tiedRegs; }
449
+ inline const RATiedReg* end() const noexcept { return _cur; }
450
+
451
+ //! Returns `RATiedReg` at the given `index`.
452
+ inline RATiedReg* operator[](uint32_t index) noexcept {
453
+ ASMJIT_ASSERT(index < tiedRegCount());
454
+ return &_tiedRegs[index];
455
+ }
456
+
457
+ //! Returns `RATiedReg` at the given `index`. (const).
458
+ inline const RATiedReg* operator[](uint32_t index) const noexcept {
459
+ ASMJIT_ASSERT(index < tiedRegCount());
460
+ return &_tiedRegs[index];
461
+ }
462
+
463
+ //! \}
464
+
465
+ //! \name Utilities
466
+ //! \{
467
+
468
+ Error add(
469
+ RAWorkReg* workReg,
470
+ RATiedFlags flags,
471
+ RegMask useRegMask, uint32_t useId, uint32_t useRewriteMask,
472
+ RegMask outRegMask, uint32_t outId, uint32_t outRewriteMask,
473
+ uint32_t rmSize = 0,
474
+ uint32_t consecutiveParent = Globals::kInvalidId) noexcept {
475
+
476
+ RegGroup group = workReg->group();
477
+ RATiedReg* tiedReg = workReg->tiedReg();
478
+
479
+ if (useId != BaseReg::kIdBad) {
480
+ _stats.makeFixed(group);
481
+ _used[group] |= Support::bitMask(useId);
482
+ flags |= RATiedFlags::kUseFixed;
483
+ }
484
+
485
+ if (outId != BaseReg::kIdBad) {
486
+ _clobbered[group] |= Support::bitMask(outId);
487
+ flags |= RATiedFlags::kOutFixed;
488
+ }
489
+
490
+ _aggregatedFlags |= flags;
491
+ _stats.makeUsed(group);
492
+
493
+ if (!tiedReg) {
494
+ // Could happen when the builder is not reset properly after each instruction.
495
+ ASMJIT_ASSERT(tiedRegCount() < ASMJIT_ARRAY_SIZE(_tiedRegs));
496
+
497
+ tiedReg = _cur++;
498
+ tiedReg->init(workReg->workId(), flags, useRegMask, useId, useRewriteMask, outRegMask, outId, outRewriteMask, rmSize, consecutiveParent);
499
+ workReg->setTiedReg(tiedReg);
500
+
501
+ _count.add(group);
502
+ return kErrorOk;
503
+ }
504
+ else {
505
+ if (consecutiveParent != tiedReg->consecutiveParent()) {
506
+ if (tiedReg->consecutiveParent() != Globals::kInvalidId)
507
+ return DebugUtils::errored(kErrorInvalidState);
508
+ tiedReg->_consecutiveParent = consecutiveParent;
509
+ }
510
+
511
+ if (useId != BaseReg::kIdBad) {
512
+ if (ASMJIT_UNLIKELY(tiedReg->hasUseId()))
513
+ return DebugUtils::errored(kErrorOverlappedRegs);
514
+ tiedReg->setUseId(useId);
515
+ }
516
+
517
+ if (outId != BaseReg::kIdBad) {
518
+ if (ASMJIT_UNLIKELY(tiedReg->hasOutId()))
519
+ return DebugUtils::errored(kErrorOverlappedRegs);
520
+ tiedReg->setOutId(outId);
521
+ }
522
+
523
+ tiedReg->addRefCount();
524
+ tiedReg->addFlags(flags);
525
+ tiedReg->_useRegMask &= useRegMask;
526
+ tiedReg->_useRewriteMask |= useRewriteMask;
527
+ tiedReg->_outRegMask &= outRegMask;
528
+ tiedReg->_outRewriteMask |= outRewriteMask;
529
+ tiedReg->_rmSize = uint8_t(Support::max<uint32_t>(tiedReg->rmSize(), rmSize));
530
+ return kErrorOk;
531
+ }
532
+ }
533
+
534
+ Error addCallArg(RAWorkReg* workReg, uint32_t useId) noexcept {
535
+ ASMJIT_ASSERT(useId != BaseReg::kIdBad);
536
+
537
+ RATiedFlags flags = RATiedFlags::kUse | RATiedFlags::kRead | RATiedFlags::kUseFixed;
538
+ RegGroup group = workReg->group();
539
+ RegMask allocable = Support::bitMask(useId);
540
+
541
+ _aggregatedFlags |= flags;
542
+ _used[group] |= allocable;
543
+ _stats.makeFixed(group);
544
+ _stats.makeUsed(group);
545
+
546
+ RATiedReg* tiedReg = workReg->tiedReg();
547
+ if (!tiedReg) {
548
+ // Could happen when the builder is not reset properly after each instruction.
549
+ ASMJIT_ASSERT(tiedRegCount() < ASMJIT_ARRAY_SIZE(_tiedRegs));
550
+
551
+ tiedReg = _cur++;
552
+ tiedReg->init(workReg->workId(), flags, allocable, useId, 0, allocable, BaseReg::kIdBad, 0);
553
+ workReg->setTiedReg(tiedReg);
554
+
555
+ _count.add(group);
556
+ return kErrorOk;
557
+ }
558
+ else {
559
+ if (tiedReg->hasUseId()) {
560
+ flags |= RATiedFlags::kDuplicate;
561
+ tiedReg->_useRegMask |= allocable;
562
+ }
563
+ else {
564
+ tiedReg->setUseId(useId);
565
+ tiedReg->_useRegMask &= allocable;
566
+ }
567
+
568
+ tiedReg->addRefCount();
569
+ tiedReg->addFlags(flags);
570
+ return kErrorOk;
571
+ }
572
+ }
573
+
574
+ Error addCallRet(RAWorkReg* workReg, uint32_t outId) noexcept {
575
+ ASMJIT_ASSERT(outId != BaseReg::kIdBad);
576
+
577
+ RATiedFlags flags = RATiedFlags::kOut | RATiedFlags::kWrite | RATiedFlags::kOutFixed;
578
+ RegGroup group = workReg->group();
579
+ RegMask outRegs = Support::bitMask(outId);
580
+
581
+ _aggregatedFlags |= flags;
582
+ _used[group] |= outRegs;
583
+ _stats.makeFixed(group);
584
+ _stats.makeUsed(group);
585
+
586
+ RATiedReg* tiedReg = workReg->tiedReg();
587
+ if (!tiedReg) {
588
+ // Could happen when the builder is not reset properly after each instruction.
589
+ ASMJIT_ASSERT(tiedRegCount() < ASMJIT_ARRAY_SIZE(_tiedRegs));
590
+
591
+ tiedReg = _cur++;
592
+ tiedReg->init(workReg->workId(), flags, Support::allOnes<RegMask>(), BaseReg::kIdBad, 0, outRegs, outId, 0);
593
+ workReg->setTiedReg(tiedReg);
594
+
595
+ _count.add(group);
596
+ return kErrorOk;
597
+ }
598
+ else {
599
+ if (tiedReg->hasOutId())
600
+ return DebugUtils::errored(kErrorOverlappedRegs);
601
+
602
+ tiedReg->addRefCount();
603
+ tiedReg->addFlags(flags);
604
+ tiedReg->setOutId(outId);
605
+ return kErrorOk;
606
+ }
607
+ }
608
+
609
+ //! \}
610
+ };
611
+
612
+ //! Intersection of multiple register assignments.
613
+ //!
614
+ //! See \ref RAAssignment for more information about register assignments.
615
+ class RASharedAssignment {
616
+ public:
617
+ typedef RAAssignment::PhysToWorkMap PhysToWorkMap;
618
+ typedef RAAssignment::WorkToPhysMap WorkToPhysMap;
619
+
620
+ //! \name Members
621
+ //! \{
622
+
623
+ //! Bit-mask of registers that cannot be used upon a block entry, for each block that has this shared assignment.
624
+ //! Scratch registers can come from ISA limits (like jecx/loop instructions on x86) or because the registers are
625
+ //! used by jump/branch instruction that uses registers to perform an indirect jump.
626
+ RegMask _entryScratchGpRegs = 0;
627
+ //! Union of all live-in registers.
628
+ ZoneBitVector _liveIn {};
629
+ //! Register assignment (PhysToWork).
630
+ PhysToWorkMap* _physToWorkMap = nullptr;
631
+
632
+ //! \}
633
+
634
+ //! \name Accessors
635
+ //! \{
636
+
637
+ inline bool empty() const noexcept { return _physToWorkMap == nullptr; }
638
+
639
+ inline RegMask entryScratchGpRegs() const noexcept { return _entryScratchGpRegs; }
640
+ inline void addEntryScratchGpRegs(RegMask mask) noexcept { _entryScratchGpRegs |= mask; }
641
+
642
+ inline const ZoneBitVector& liveIn() const noexcept { return _liveIn; }
643
+
644
+ inline PhysToWorkMap* physToWorkMap() const noexcept { return _physToWorkMap; }
645
+ inline void assignPhysToWorkMap(PhysToWorkMap* physToWorkMap) noexcept { _physToWorkMap = physToWorkMap; }
646
+
647
+ //! \}
648
+ };
649
+
650
+ //! Register allocation pass used by `BaseCompiler`.
651
+ class BaseRAPass : public FuncPass {
652
+ public:
653
+ ASMJIT_NONCOPYABLE(BaseRAPass)
654
+ typedef FuncPass Base;
655
+
656
+ enum : uint32_t {
657
+ kCallArgWeight = 80
658
+ };
659
+
660
+ typedef RAAssignment::PhysToWorkMap PhysToWorkMap;
661
+ typedef RAAssignment::WorkToPhysMap WorkToPhysMap;
662
+
663
+ //! \name Members
664
+ //! \{
665
+
666
+ //! Allocator that uses zone passed to `runOnFunction()`.
667
+ ZoneAllocator _allocator {};
668
+ //! Emit helper.
669
+ BaseEmitHelper* _iEmitHelper = nullptr;
670
+
671
+ //! Logger, disabled if null.
672
+ Logger* _logger = nullptr;
673
+ //! Format options, copied from Logger, or zeroed if there is no logger.
674
+ FormatOptions _formatOptions {};
675
+ //! Diagnostic options, copied from Emitter, or zeroed if there is no logger.
676
+ DiagnosticOptions _diagnosticOptions {};
677
+
678
+ //! Function being processed.
679
+ FuncNode* _func = nullptr;
680
+ //! Stop node.
681
+ BaseNode* _stop = nullptr;
682
+ //! Node that is used to insert extra code after the function body.
683
+ BaseNode* _extraBlock = nullptr;
684
+
685
+ //! Blocks (first block is the entry, always exists).
686
+ RABlocks _blocks {};
687
+ //! Function exit blocks (usually one, but can contain more).
688
+ RABlocks _exits {};
689
+ //! Post order view (POV).
690
+ RABlocks _pov {};
691
+
692
+ //! Number of instruction nodes.
693
+ uint32_t _instructionCount = 0;
694
+ //! Number of created blocks (internal).
695
+ uint32_t _createdBlockCount = 0;
696
+
697
+ //! Shared assignment blocks.
698
+ ZoneVector<RASharedAssignment> _sharedAssignments {};
699
+
700
+ //! Timestamp generator (incremental).
701
+ mutable uint64_t _lastTimestamp = 0;
702
+
703
+ //! Architecture traits.
704
+ const ArchTraits* _archTraits = nullptr;
705
+ //! Index to physical registers in `RAAssignment::PhysToWorkMap`.
706
+ RARegIndex _physRegIndex = RARegIndex();
707
+ //! Count of physical registers in `RAAssignment::PhysToWorkMap`.
708
+ RARegCount _physRegCount = RARegCount();
709
+ //! Total number of physical registers.
710
+ uint32_t _physRegTotal = 0;
711
+ //! Indexes of a possible scratch registers that can be selected if necessary.
712
+ Support::Array<uint8_t, 2> _scratchRegIndexes {};
713
+
714
+ //! Registers available for allocation.
715
+ RARegMask _availableRegs = RARegMask();
716
+ //! Count of physical registers per group.
717
+ RARegCount _availableRegCount = RARegCount();
718
+ //! Registers clobbered by the function.
719
+ RARegMask _clobberedRegs = RARegMask();
720
+
721
+ //! Work registers (registers used by the function).
722
+ RAWorkRegs _workRegs;
723
+ //! Work registers per register group.
724
+ Support::Array<RAWorkRegs, Globals::kNumVirtGroups> _workRegsOfGroup;
725
+
726
+ //! Register allocation strategy per register group.
727
+ Support::Array<RAStrategy, Globals::kNumVirtGroups> _strategy;
728
+ //! Global max live-count (from all blocks) per register group.
729
+ RALiveCount _globalMaxLiveCount = RALiveCount();
730
+ //! Global live spans per register group.
731
+ Support::Array<LiveRegSpans*, Globals::kNumVirtGroups> _globalLiveSpans {};
732
+ //! Temporary stack slot.
733
+ Operand _temporaryMem = Operand();
734
+
735
+ //! Stack pointer.
736
+ BaseReg _sp = BaseReg();
737
+ //! Frame pointer.
738
+ BaseReg _fp = BaseReg();
739
+ //! Stack manager.
740
+ RAStackAllocator _stackAllocator {};
741
+ //! Function arguments assignment.
742
+ FuncArgsAssignment _argsAssignment {};
743
+ //! Some StackArgs have to be assigned to StackSlots.
744
+ uint32_t _numStackArgsToStackSlots = 0;
745
+
746
+ //! Maximum name-size computed from all WorkRegs.
747
+ uint32_t _maxWorkRegNameSize = 0;
748
+ //! Temporary string builder used to format comments.
749
+ StringTmp<80> _tmpString;
750
+
751
+ //! \}
752
+
753
+ //! \name Construction & Destruction
754
+ //! \{
755
+
756
+ BaseRAPass() noexcept;
757
+ virtual ~BaseRAPass() noexcept;
758
+
759
+ //! \}
760
+
761
+ //! \name Accessors
762
+ //! \{
763
+
764
+ //! Returns \ref Logger passed to \ref runOnFunction().
765
+ inline Logger* logger() const noexcept { return _logger; }
766
+
767
+ //! Returns either a valid logger if the given `option` is set and logging is enabled, or nullptr.
768
+ inline Logger* getLoggerIf(DiagnosticOptions option) const noexcept { return Support::test(_diagnosticOptions, option) ? _logger : nullptr; }
769
+
770
+ //! Returns whether the diagnostic `option` is enabled.
771
+ //!
772
+ //! \note Returns false if there is no logger (as diagnostics without logging make no sense).
773
+ inline bool hasDiagnosticOption(DiagnosticOptions option) const noexcept { return Support::test(_diagnosticOptions, option); }
774
+
775
+ //! Returns \ref Zone passed to \ref runOnFunction().
776
+ inline Zone* zone() const noexcept { return _allocator.zone(); }
777
+ //! Returns \ref ZoneAllocator used by the register allocator.
778
+ inline ZoneAllocator* allocator() const noexcept { return const_cast<ZoneAllocator*>(&_allocator); }
779
+
780
+ inline const ZoneVector<RASharedAssignment>& sharedAssignments() const { return _sharedAssignments; }
781
+ inline uint32_t sharedAssignmentCount() const noexcept { return _sharedAssignments.size(); }
782
+
783
+ //! Returns the current function node.
784
+ inline FuncNode* func() const noexcept { return _func; }
785
+ //! Returns the stop of the current function.
786
+ inline BaseNode* stop() const noexcept { return _stop; }
787
+
788
+ //! Returns an extra block used by the current function being processed.
789
+ inline BaseNode* extraBlock() const noexcept { return _extraBlock; }
790
+ //! Sets an extra block, see `extraBlock()`.
791
+ inline void setExtraBlock(BaseNode* node) noexcept { _extraBlock = node; }
792
+
793
+ inline uint32_t endPosition() const noexcept { return _instructionCount * 2; }
794
+
795
+ inline const RARegMask& availableRegs() const noexcept { return _availableRegs; }
796
+ inline const RARegMask& cloberredRegs() const noexcept { return _clobberedRegs; }
797
+
798
+ //! \}
799
+
800
+ //! \name Utilities
801
+ //! \{
802
+
803
+ inline void makeUnavailable(RegGroup group, uint32_t regId) noexcept {
804
+ _availableRegs[group] &= ~Support::bitMask(regId);
805
+ _availableRegCount[group]--;
806
+ }
807
+
808
+ //! Runs the register allocator for the given `func`.
809
+ Error runOnFunction(Zone* zone, Logger* logger, FuncNode* func) override;
810
+
811
+ //! Performs all allocation steps sequentially, called by `runOnFunction()`.
812
+ Error onPerformAllSteps() noexcept;
813
+
814
+ //! \}
815
+
816
+ //! \name Events
817
+ //! \{
818
+
819
+ //! Called by \ref runOnFunction() before the register allocation to initialize
820
+ //! architecture-specific data and constraints.
821
+ virtual void onInit() noexcept = 0;
822
+
823
+ //! Called by \ref runOnFunction(` after register allocation to clean everything
824
+ //! up. Called even if the register allocation failed.
825
+ virtual void onDone() noexcept = 0;
826
+
827
+ //! \}
828
+
829
+ //! \name CFG - Basic-Block Management
830
+ //! \{
831
+
832
+ //! Returns the function's entry block.
833
+ inline RABlock* entryBlock() noexcept {
834
+ ASMJIT_ASSERT(!_blocks.empty());
835
+ return _blocks[0];
836
+ }
837
+
838
+ //! \overload
839
+ inline const RABlock* entryBlock() const noexcept {
840
+ ASMJIT_ASSERT(!_blocks.empty());
841
+ return _blocks[0];
842
+ }
843
+
844
+ //! Returns all basic blocks of this function.
845
+ inline RABlocks& blocks() noexcept { return _blocks; }
846
+ //! \overload
847
+ inline const RABlocks& blocks() const noexcept { return _blocks; }
848
+
849
+ //! Returns the count of basic blocks (returns size of `_blocks` array).
850
+ inline uint32_t blockCount() const noexcept { return _blocks.size(); }
851
+ //! Returns the count of reachable basic blocks (returns size of `_pov` array).
852
+ inline uint32_t reachableBlockCount() const noexcept { return _pov.size(); }
853
+
854
+ //! Tests whether the CFG has dangling blocks - these were created by `newBlock()`, but not added to CFG through
855
+ //! `addBlocks()`. If `true` is returned and the CFG is constructed it means that something is missing and it's
856
+ //! incomplete.
857
+ //!
858
+ //! \note This is only used to check if the number of created blocks matches the number of added blocks.
859
+ inline bool hasDanglingBlocks() const noexcept { return _createdBlockCount != blockCount(); }
860
+
861
+ //! Gest a next timestamp to be used to mark CFG blocks.
862
+ inline uint64_t nextTimestamp() const noexcept { return ++_lastTimestamp; }
863
+
864
+ //! Createss a new `RABlock` instance.
865
+ //!
866
+ //! \note New blocks don't have ID assigned until they are added to the block array by calling `addBlock()`.
867
+ RABlock* newBlock(BaseNode* initialNode = nullptr) noexcept;
868
+
869
+ //! Tries to find a neighboring LabelNode (without going through code) that is already connected with `RABlock`.
870
+ //! If no label is found then a new RABlock is created and assigned to all possible labels in a backward direction.
871
+ RABlock* newBlockOrExistingAt(LabelNode* cbLabel, BaseNode** stoppedAt = nullptr) noexcept;
872
+
873
+ //! Adds the given `block` to the block list and assign it a unique block id.
874
+ Error addBlock(RABlock* block) noexcept;
875
+
876
+ inline Error addExitBlock(RABlock* block) noexcept {
877
+ block->addFlags(RABlockFlags::kIsFuncExit);
878
+ return _exits.append(allocator(), block);
879
+ }
880
+
881
+ ASMJIT_FORCE_INLINE RAInst* newRAInst(RABlock* block, InstRWFlags instRWFlags, RATiedFlags flags, uint32_t tiedRegCount, const RARegMask& clobberedRegs) noexcept {
882
+ void* p = zone()->alloc(RAInst::sizeOf(tiedRegCount));
883
+ if (ASMJIT_UNLIKELY(!p))
884
+ return nullptr;
885
+ return new(p) RAInst(block, instRWFlags, flags, tiedRegCount, clobberedRegs);
886
+ }
887
+
888
+ ASMJIT_FORCE_INLINE Error assignRAInst(BaseNode* node, RABlock* block, RAInstBuilder& ib) noexcept {
889
+ uint32_t tiedRegCount = ib.tiedRegCount();
890
+ RAInst* raInst = newRAInst(block, ib.instRWFlags(), ib.aggregatedFlags(), tiedRegCount, ib._clobbered);
891
+
892
+ if (ASMJIT_UNLIKELY(!raInst))
893
+ return DebugUtils::errored(kErrorOutOfMemory);
894
+
895
+ RARegIndex index;
896
+ RATiedFlags flagsFilter = ~ib.forbiddenFlags();
897
+
898
+ index.buildIndexes(ib._count);
899
+ raInst->_tiedIndex = index;
900
+ raInst->_tiedCount = ib._count;
901
+
902
+ for (uint32_t i = 0; i < tiedRegCount; i++) {
903
+ RATiedReg* tiedReg = ib[i];
904
+ RAWorkReg* workReg = workRegById(tiedReg->workId());
905
+
906
+ workReg->resetTiedReg();
907
+ RegGroup group = workReg->group();
908
+
909
+ if (tiedReg->hasUseId()) {
910
+ block->addFlags(RABlockFlags::kHasFixedRegs);
911
+ raInst->_usedRegs[group] |= Support::bitMask(tiedReg->useId());
912
+ }
913
+
914
+ if (tiedReg->hasOutId()) {
915
+ block->addFlags(RABlockFlags::kHasFixedRegs);
916
+ }
917
+
918
+ RATiedReg& dst = raInst->_tiedRegs[index[group]++];
919
+ dst = *tiedReg;
920
+ dst._flags &= flagsFilter;
921
+
922
+ if (!tiedReg->isDuplicate())
923
+ dst._useRegMask &= ~ib._used[group];
924
+ }
925
+
926
+ node->setPassData<RAInst>(raInst);
927
+ return kErrorOk;
928
+ }
929
+
930
+ //! \}
931
+
932
+ //! \name CFG - Build CFG
933
+ //! \{
934
+
935
+ //! Traverse the whole function and do the following:
936
+ //!
937
+ //! 1. Construct CFG (represented by `RABlock`) by populating `_blocks` and `_exits`. Blocks describe the control
938
+ //! flow of the function and contain some additional information that is used by the register allocator.
939
+ //!
940
+ //! 2. Remove unreachable code immediately. This is not strictly necessary for BaseCompiler itself as the register
941
+ //! allocator cannot reach such nodes, but keeping instructions that use virtual registers would fail during
942
+ //! instruction encoding phase (Assembler).
943
+ //!
944
+ //! 3. `RAInst` is created for each `InstNode` or compatible. It contains information that is essential for further
945
+ //! analysis and register allocation.
946
+ //!
947
+ //! Use `RACFGBuilderT` template that provides the necessary boilerplate.
948
+ virtual Error buildCFG() noexcept = 0;
949
+
950
+ //! Called after the CFG is built.
951
+ Error initSharedAssignments(const ZoneVector<uint32_t>& sharedAssignmentsMap) noexcept;
952
+
953
+ //! \}
954
+
955
+ //! \name CFG - Views Order
956
+ //! \{
957
+
958
+ //! Constructs CFG views (only POV at the moment).
959
+ Error buildCFGViews() noexcept;
960
+
961
+ //! \}
962
+
963
+ //! \name CFG - Dominators
964
+ //! \{
965
+
966
+ // Terminology:
967
+ // - A node `X` dominates a node `Z` if any path from the entry point to `Z` has to go through `X`.
968
+ // - A node `Z` post-dominates a node `X` if any path from `X` to the end of the graph has to go through `Z`.
969
+
970
+ //! Constructs a dominator-tree from CFG.
971
+ Error buildCFGDominators() noexcept;
972
+
973
+ bool _strictlyDominates(const RABlock* a, const RABlock* b) const noexcept;
974
+ const RABlock* _nearestCommonDominator(const RABlock* a, const RABlock* b) const noexcept;
975
+
976
+ //! Tests whether the basic block `a` dominates `b` - non-strict, returns true when `a == b`.
977
+ inline bool dominates(const RABlock* a, const RABlock* b) const noexcept { return a == b ? true : _strictlyDominates(a, b); }
978
+ //! Tests whether the basic block `a` dominates `b` - strict dominance check, returns false when `a == b`.
979
+ inline bool strictlyDominates(const RABlock* a, const RABlock* b) const noexcept { return a == b ? false : _strictlyDominates(a, b); }
980
+
981
+ //! Returns a nearest common dominator of `a` and `b`.
982
+ inline RABlock* nearestCommonDominator(RABlock* a, RABlock* b) const noexcept { return const_cast<RABlock*>(_nearestCommonDominator(a, b)); }
983
+ //! Returns a nearest common dominator of `a` and `b` (const).
984
+ inline const RABlock* nearestCommonDominator(const RABlock* a, const RABlock* b) const noexcept { return _nearestCommonDominator(a, b); }
985
+
986
+ //! \}
987
+
988
+ //! \name CFG - Utilities
989
+ //! \{
990
+
991
+ Error removeUnreachableCode() noexcept;
992
+
993
+ //! Returns `node` or some node after that is ideal for beginning a new block. This function is mostly used after
994
+ //! a conditional or unconditional jump to select the successor node. In some cases the next node could be a label,
995
+ //! which means it could have assigned some block already.
996
+ BaseNode* findSuccessorStartingAt(BaseNode* node) noexcept;
997
+
998
+ //! Returns `true` of the `node` can flow to `target` without reaching code nor data. It's used to eliminate jumps
999
+ //! to labels that are next right to them.
1000
+ bool isNextTo(BaseNode* node, BaseNode* target) noexcept;
1001
+
1002
+ //! \}
1003
+
1004
+ //! \name Virtual Register Management
1005
+ //! \{
1006
+
1007
+ //! Returns a native size of the general-purpose register of the target architecture.
1008
+ inline uint32_t registerSize() const noexcept { return _sp.size(); }
1009
+ inline uint32_t availableRegCount(RegGroup group) const noexcept { return _availableRegCount[group]; }
1010
+
1011
+ inline RAWorkReg* workRegById(uint32_t workId) const noexcept { return _workRegs[workId]; }
1012
+
1013
+ inline RAWorkRegs& workRegs() noexcept { return _workRegs; }
1014
+ inline RAWorkRegs& workRegs(RegGroup group) noexcept { return _workRegsOfGroup[group]; }
1015
+
1016
+ inline const RAWorkRegs& workRegs() const noexcept { return _workRegs; }
1017
+ inline const RAWorkRegs& workRegs(RegGroup group) const noexcept { return _workRegsOfGroup[group]; }
1018
+
1019
+ inline uint32_t workRegCount() const noexcept { return _workRegs.size(); }
1020
+ inline uint32_t workRegCount(RegGroup group) const noexcept { return _workRegsOfGroup[group].size(); }
1021
+
1022
+ inline void _buildPhysIndex() noexcept {
1023
+ _physRegIndex.buildIndexes(_physRegCount);
1024
+ _physRegTotal = uint32_t(_physRegIndex[RegGroup::kMaxVirt]) +
1025
+ uint32_t(_physRegCount[RegGroup::kMaxVirt]) ;
1026
+ }
1027
+ inline uint32_t physRegIndex(RegGroup group) const noexcept { return _physRegIndex[group]; }
1028
+ inline uint32_t physRegTotal() const noexcept { return _physRegTotal; }
1029
+
1030
+ Error _asWorkReg(VirtReg* vReg, RAWorkReg** out) noexcept;
1031
+
1032
+ //! Creates `RAWorkReg` data for the given `vReg`. The function does nothing
1033
+ //! if `vReg` already contains link to `RAWorkReg`. Called by `constructBlocks()`.
1034
+ inline Error asWorkReg(VirtReg* vReg, RAWorkReg** out) noexcept {
1035
+ *out = vReg->workReg();
1036
+ return *out ? kErrorOk : _asWorkReg(vReg, out);
1037
+ }
1038
+
1039
+ ASMJIT_FORCE_INLINE Error virtIndexAsWorkReg(uint32_t vIndex, RAWorkReg** out) noexcept {
1040
+ const ZoneVector<VirtReg*>& virtRegs = cc()->virtRegs();
1041
+ if (ASMJIT_UNLIKELY(vIndex >= virtRegs.size()))
1042
+ return DebugUtils::errored(kErrorInvalidVirtId);
1043
+ return asWorkReg(virtRegs[vIndex], out);
1044
+ }
1045
+
1046
+ inline RAStackSlot* getOrCreateStackSlot(RAWorkReg* workReg) noexcept {
1047
+ RAStackSlot* slot = workReg->stackSlot();
1048
+
1049
+ if (slot)
1050
+ return slot;
1051
+
1052
+ slot = _stackAllocator.newSlot(_sp.id(), workReg->virtReg()->virtSize(), workReg->virtReg()->alignment(), RAStackSlot::kFlagRegHome);
1053
+ workReg->_stackSlot = slot;
1054
+ workReg->markStackUsed();
1055
+ return slot;
1056
+ }
1057
+
1058
+ inline BaseMem workRegAsMem(RAWorkReg* workReg) noexcept {
1059
+ getOrCreateStackSlot(workReg);
1060
+ return BaseMem(OperandSignature::fromOpType(OperandType::kMem) |
1061
+ OperandSignature::fromMemBaseType(_sp.type()) |
1062
+ OperandSignature::fromBits(OperandSignature::kMemRegHomeFlag),
1063
+ workReg->virtId(), 0, 0);
1064
+ }
1065
+
1066
+ WorkToPhysMap* newWorkToPhysMap() noexcept;
1067
+ PhysToWorkMap* newPhysToWorkMap() noexcept;
1068
+
1069
+ inline PhysToWorkMap* clonePhysToWorkMap(const PhysToWorkMap* map) noexcept {
1070
+ size_t size = PhysToWorkMap::sizeOf(_physRegTotal);
1071
+ return static_cast<PhysToWorkMap*>(zone()->dupAligned(map, size, sizeof(uint32_t)));
1072
+ }
1073
+
1074
+ //! \name Liveness Analysis & Statistics
1075
+ //! \{
1076
+
1077
+ //! 1. Calculates GEN/KILL/IN/OUT of each block.
1078
+ //! 2. Calculates live spans and basic statistics of each work register.
1079
+ Error buildLiveness() noexcept;
1080
+
1081
+ //! Assigns argIndex to WorkRegs. Must be called after the liveness analysis
1082
+ //! finishes as it checks whether the argument is live upon entry.
1083
+ Error assignArgIndexToWorkRegs() noexcept;
1084
+
1085
+ //! \}
1086
+
1087
+ //! \name Register Allocation - Global
1088
+ //! \{
1089
+
1090
+ //! Runs a global register allocator.
1091
+ Error runGlobalAllocator() noexcept;
1092
+
1093
+ //! Initializes data structures used for global live spans.
1094
+ Error initGlobalLiveSpans() noexcept;
1095
+
1096
+ Error binPack(RegGroup group) noexcept;
1097
+
1098
+ //! \}
1099
+
1100
+ //! \name Register Allocation - Local
1101
+ //! \{
1102
+
1103
+ //! Runs a local register allocator.
1104
+ Error runLocalAllocator() noexcept;
1105
+ Error setBlockEntryAssignment(RABlock* block, const RABlock* fromBlock, const RAAssignment& fromAssignment) noexcept;
1106
+ Error setSharedAssignment(uint32_t sharedAssignmentId, const RAAssignment& fromAssignment) noexcept;
1107
+
1108
+ //! Called after the RA assignment has been assigned to a block.
1109
+ //!
1110
+ //! This cannot change the assignment, but can examine it.
1111
+ Error blockEntryAssigned(const PhysToWorkMap* physToWorkMap) noexcept;
1112
+
1113
+ //! \}
1114
+
1115
+ //! \name Register Allocation Utilities
1116
+ //! \{
1117
+
1118
+ Error useTemporaryMem(BaseMem& out, uint32_t size, uint32_t alignment) noexcept;
1119
+
1120
+ //! \}
1121
+
1122
+ //! \name Function Prolog & Epilog
1123
+ //! \{
1124
+
1125
+ virtual Error updateStackFrame() noexcept;
1126
+ Error _markStackArgsToKeep() noexcept;
1127
+ Error _updateStackArgs() noexcept;
1128
+ Error insertPrologEpilog() noexcept;
1129
+
1130
+ //! \}
1131
+
1132
+ //! \name Instruction Rewriter
1133
+ //! \{
1134
+
1135
+ Error rewrite() noexcept;
1136
+ virtual Error _rewrite(BaseNode* first, BaseNode* stop) noexcept = 0;
1137
+
1138
+ //! \}
1139
+
1140
+ #ifndef ASMJIT_NO_LOGGING
1141
+ //! \name Logging
1142
+ //! \{
1143
+
1144
+ Error annotateCode() noexcept;
1145
+
1146
+ Error _dumpBlockIds(String& sb, const RABlocks& blocks) noexcept;
1147
+ Error _dumpBlockLiveness(String& sb, const RABlock* block) noexcept;
1148
+ Error _dumpLiveSpans(String& sb) noexcept;
1149
+
1150
+ //! \}
1151
+ #endif
1152
+
1153
+ //! \name Emit
1154
+ //! \{
1155
+
1156
+ virtual Error emitMove(uint32_t workId, uint32_t dstPhysId, uint32_t srcPhysId) noexcept = 0;
1157
+ virtual Error emitSwap(uint32_t aWorkId, uint32_t aPhysId, uint32_t bWorkId, uint32_t bPhysId) noexcept = 0;
1158
+
1159
+ virtual Error emitLoad(uint32_t workId, uint32_t dstPhysId) noexcept = 0;
1160
+ virtual Error emitSave(uint32_t workId, uint32_t srcPhysId) noexcept = 0;
1161
+
1162
+ virtual Error emitJump(const Label& label) noexcept = 0;
1163
+ virtual Error emitPreCall(InvokeNode* invokeNode) noexcept = 0;
1164
+
1165
+ //! \}
1166
+ };
1167
+
1168
+ inline ZoneAllocator* RABlock::allocator() const noexcept { return _ra->allocator(); }
1169
+
1170
+ inline RegMask RABlock::entryScratchGpRegs() const noexcept {
1171
+ RegMask regs = _entryScratchGpRegs;
1172
+ if (hasSharedAssignmentId())
1173
+ regs = _ra->_sharedAssignments[_sharedAssignmentId].entryScratchGpRegs();
1174
+ return regs;
1175
+ }
1176
+
1177
+ //! \}
1178
+ //! \endcond
1179
+
1180
+ ASMJIT_END_NAMESPACE
1181
+
1182
+ #endif // !ASMJIT_NO_COMPILER
1183
+ #endif // ASMJIT_CORE_RAPASS_P_H_INCLUDED