asmjit 0.2.0 → 0.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/Gemfile.lock +1 -1
- data/asmjit.gemspec +1 -1
- data/ext/asmjit/asmjit/.editorconfig +10 -0
- data/ext/asmjit/asmjit/.github/FUNDING.yml +1 -0
- data/ext/asmjit/asmjit/.github/workflows/build-config.json +47 -0
- data/ext/asmjit/asmjit/.github/workflows/build.yml +156 -0
- data/ext/asmjit/asmjit/.gitignore +6 -0
- data/ext/asmjit/asmjit/CMakeLists.txt +611 -0
- data/ext/asmjit/asmjit/LICENSE.md +17 -0
- data/ext/asmjit/asmjit/README.md +69 -0
- data/ext/asmjit/asmjit/src/asmjit/a64.h +62 -0
- data/ext/asmjit/asmjit/src/asmjit/arm/a64archtraits_p.h +81 -0
- data/ext/asmjit/asmjit/src/asmjit/arm/a64assembler.cpp +5115 -0
- data/ext/asmjit/asmjit/src/asmjit/arm/a64assembler.h +72 -0
- data/ext/asmjit/asmjit/src/asmjit/arm/a64builder.cpp +51 -0
- data/ext/asmjit/asmjit/src/asmjit/arm/a64builder.h +57 -0
- data/ext/asmjit/asmjit/src/asmjit/arm/a64compiler.cpp +60 -0
- data/ext/asmjit/asmjit/src/asmjit/arm/a64compiler.h +247 -0
- data/ext/asmjit/asmjit/src/asmjit/arm/a64emithelper.cpp +464 -0
- data/ext/asmjit/asmjit/src/asmjit/arm/a64emithelper_p.h +50 -0
- data/ext/asmjit/asmjit/src/asmjit/arm/a64emitter.h +1228 -0
- data/ext/asmjit/asmjit/src/asmjit/arm/a64formatter.cpp +298 -0
- data/ext/asmjit/asmjit/src/asmjit/arm/a64formatter_p.h +59 -0
- data/ext/asmjit/asmjit/src/asmjit/arm/a64func.cpp +189 -0
- data/ext/asmjit/asmjit/src/asmjit/arm/a64func_p.h +33 -0
- data/ext/asmjit/asmjit/src/asmjit/arm/a64globals.h +1894 -0
- data/ext/asmjit/asmjit/src/asmjit/arm/a64instapi.cpp +278 -0
- data/ext/asmjit/asmjit/src/asmjit/arm/a64instapi_p.h +41 -0
- data/ext/asmjit/asmjit/src/asmjit/arm/a64instdb.cpp +1957 -0
- data/ext/asmjit/asmjit/src/asmjit/arm/a64instdb.h +74 -0
- data/ext/asmjit/asmjit/src/asmjit/arm/a64instdb_p.h +876 -0
- data/ext/asmjit/asmjit/src/asmjit/arm/a64operand.cpp +85 -0
- data/ext/asmjit/asmjit/src/asmjit/arm/a64operand.h +312 -0
- data/ext/asmjit/asmjit/src/asmjit/arm/a64rapass.cpp +852 -0
- data/ext/asmjit/asmjit/src/asmjit/arm/a64rapass_p.h +105 -0
- data/ext/asmjit/asmjit/src/asmjit/arm/a64utils.h +179 -0
- data/ext/asmjit/asmjit/src/asmjit/arm/armformatter.cpp +143 -0
- data/ext/asmjit/asmjit/src/asmjit/arm/armformatter_p.h +44 -0
- data/ext/asmjit/asmjit/src/asmjit/arm/armglobals.h +21 -0
- data/ext/asmjit/asmjit/src/asmjit/arm/armoperand.h +621 -0
- data/ext/asmjit/asmjit/src/asmjit/arm.h +62 -0
- data/ext/asmjit/asmjit/src/asmjit/asmjit-scope-begin.h +17 -0
- data/ext/asmjit/asmjit/src/asmjit/asmjit-scope-end.h +9 -0
- data/ext/asmjit/asmjit/src/asmjit/asmjit.h +33 -0
- data/ext/asmjit/asmjit/src/asmjit/core/api-build_p.h +55 -0
- data/ext/asmjit/asmjit/src/asmjit/core/api-config.h +613 -0
- data/ext/asmjit/asmjit/src/asmjit/core/archcommons.h +229 -0
- data/ext/asmjit/asmjit/src/asmjit/core/archtraits.cpp +160 -0
- data/ext/asmjit/asmjit/src/asmjit/core/archtraits.h +290 -0
- data/ext/asmjit/asmjit/src/asmjit/core/assembler.cpp +406 -0
- data/ext/asmjit/asmjit/src/asmjit/core/assembler.h +129 -0
- data/ext/asmjit/asmjit/src/asmjit/core/builder.cpp +889 -0
- data/ext/asmjit/asmjit/src/asmjit/core/builder.h +1391 -0
- data/ext/asmjit/asmjit/src/asmjit/core/codebuffer.h +113 -0
- data/ext/asmjit/asmjit/src/asmjit/core/codeholder.cpp +1149 -0
- data/ext/asmjit/asmjit/src/asmjit/core/codeholder.h +1035 -0
- data/ext/asmjit/asmjit/src/asmjit/core/codewriter.cpp +175 -0
- data/ext/asmjit/asmjit/src/asmjit/core/codewriter_p.h +179 -0
- data/ext/asmjit/asmjit/src/asmjit/core/compiler.cpp +582 -0
- data/ext/asmjit/asmjit/src/asmjit/core/compiler.h +737 -0
- data/ext/asmjit/asmjit/src/asmjit/core/compilerdefs.h +173 -0
- data/ext/asmjit/asmjit/src/asmjit/core/constpool.cpp +363 -0
- data/ext/asmjit/asmjit/src/asmjit/core/constpool.h +250 -0
- data/ext/asmjit/asmjit/src/asmjit/core/cpuinfo.cpp +1162 -0
- data/ext/asmjit/asmjit/src/asmjit/core/cpuinfo.h +813 -0
- data/ext/asmjit/asmjit/src/asmjit/core/emithelper.cpp +323 -0
- data/ext/asmjit/asmjit/src/asmjit/core/emithelper_p.h +58 -0
- data/ext/asmjit/asmjit/src/asmjit/core/emitter.cpp +333 -0
- data/ext/asmjit/asmjit/src/asmjit/core/emitter.h +741 -0
- data/ext/asmjit/asmjit/src/asmjit/core/emitterutils.cpp +129 -0
- data/ext/asmjit/asmjit/src/asmjit/core/emitterutils_p.h +89 -0
- data/ext/asmjit/asmjit/src/asmjit/core/environment.cpp +46 -0
- data/ext/asmjit/asmjit/src/asmjit/core/environment.h +508 -0
- data/ext/asmjit/asmjit/src/asmjit/core/errorhandler.cpp +14 -0
- data/ext/asmjit/asmjit/src/asmjit/core/errorhandler.h +228 -0
- data/ext/asmjit/asmjit/src/asmjit/core/formatter.cpp +584 -0
- data/ext/asmjit/asmjit/src/asmjit/core/formatter.h +247 -0
- data/ext/asmjit/asmjit/src/asmjit/core/formatter_p.h +34 -0
- data/ext/asmjit/asmjit/src/asmjit/core/func.cpp +286 -0
- data/ext/asmjit/asmjit/src/asmjit/core/func.h +1445 -0
- data/ext/asmjit/asmjit/src/asmjit/core/funcargscontext.cpp +293 -0
- data/ext/asmjit/asmjit/src/asmjit/core/funcargscontext_p.h +199 -0
- data/ext/asmjit/asmjit/src/asmjit/core/globals.cpp +133 -0
- data/ext/asmjit/asmjit/src/asmjit/core/globals.h +393 -0
- data/ext/asmjit/asmjit/src/asmjit/core/inst.cpp +113 -0
- data/ext/asmjit/asmjit/src/asmjit/core/inst.h +772 -0
- data/ext/asmjit/asmjit/src/asmjit/core/jitallocator.cpp +1242 -0
- data/ext/asmjit/asmjit/src/asmjit/core/jitallocator.h +261 -0
- data/ext/asmjit/asmjit/src/asmjit/core/jitruntime.cpp +80 -0
- data/ext/asmjit/asmjit/src/asmjit/core/jitruntime.h +89 -0
- data/ext/asmjit/asmjit/src/asmjit/core/logger.cpp +69 -0
- data/ext/asmjit/asmjit/src/asmjit/core/logger.h +198 -0
- data/ext/asmjit/asmjit/src/asmjit/core/misc_p.h +33 -0
- data/ext/asmjit/asmjit/src/asmjit/core/operand.cpp +132 -0
- data/ext/asmjit/asmjit/src/asmjit/core/operand.h +1611 -0
- data/ext/asmjit/asmjit/src/asmjit/core/osutils.cpp +84 -0
- data/ext/asmjit/asmjit/src/asmjit/core/osutils.h +61 -0
- data/ext/asmjit/asmjit/src/asmjit/core/osutils_p.h +68 -0
- data/ext/asmjit/asmjit/src/asmjit/core/raassignment_p.h +418 -0
- data/ext/asmjit/asmjit/src/asmjit/core/rabuilders_p.h +612 -0
- data/ext/asmjit/asmjit/src/asmjit/core/radefs_p.h +1204 -0
- data/ext/asmjit/asmjit/src/asmjit/core/ralocal.cpp +1166 -0
- data/ext/asmjit/asmjit/src/asmjit/core/ralocal_p.h +254 -0
- data/ext/asmjit/asmjit/src/asmjit/core/rapass.cpp +1969 -0
- data/ext/asmjit/asmjit/src/asmjit/core/rapass_p.h +1183 -0
- data/ext/asmjit/asmjit/src/asmjit/core/rastack.cpp +184 -0
- data/ext/asmjit/asmjit/src/asmjit/core/rastack_p.h +171 -0
- data/ext/asmjit/asmjit/src/asmjit/core/string.cpp +559 -0
- data/ext/asmjit/asmjit/src/asmjit/core/string.h +372 -0
- data/ext/asmjit/asmjit/src/asmjit/core/support.cpp +494 -0
- data/ext/asmjit/asmjit/src/asmjit/core/support.h +1773 -0
- data/ext/asmjit/asmjit/src/asmjit/core/target.cpp +14 -0
- data/ext/asmjit/asmjit/src/asmjit/core/target.h +53 -0
- data/ext/asmjit/asmjit/src/asmjit/core/type.cpp +74 -0
- data/ext/asmjit/asmjit/src/asmjit/core/type.h +419 -0
- data/ext/asmjit/asmjit/src/asmjit/core/virtmem.cpp +722 -0
- data/ext/asmjit/asmjit/src/asmjit/core/virtmem.h +242 -0
- data/ext/asmjit/asmjit/src/asmjit/core/zone.cpp +353 -0
- data/ext/asmjit/asmjit/src/asmjit/core/zone.h +615 -0
- data/ext/asmjit/asmjit/src/asmjit/core/zonehash.cpp +309 -0
- data/ext/asmjit/asmjit/src/asmjit/core/zonehash.h +186 -0
- data/ext/asmjit/asmjit/src/asmjit/core/zonelist.cpp +163 -0
- data/ext/asmjit/asmjit/src/asmjit/core/zonelist.h +209 -0
- data/ext/asmjit/asmjit/src/asmjit/core/zonestack.cpp +176 -0
- data/ext/asmjit/asmjit/src/asmjit/core/zonestack.h +239 -0
- data/ext/asmjit/asmjit/src/asmjit/core/zonestring.h +120 -0
- data/ext/asmjit/asmjit/src/asmjit/core/zonetree.cpp +99 -0
- data/ext/asmjit/asmjit/src/asmjit/core/zonetree.h +380 -0
- data/ext/asmjit/asmjit/src/asmjit/core/zonevector.cpp +356 -0
- data/ext/asmjit/asmjit/src/asmjit/core/zonevector.h +690 -0
- data/ext/asmjit/asmjit/src/asmjit/core.h +1861 -0
- data/ext/asmjit/asmjit/src/asmjit/x86/x86archtraits_p.h +148 -0
- data/ext/asmjit/asmjit/src/asmjit/x86/x86assembler.cpp +5110 -0
- data/ext/asmjit/asmjit/src/asmjit/x86/x86assembler.h +685 -0
- data/ext/asmjit/asmjit/src/asmjit/x86/x86builder.cpp +52 -0
- data/ext/asmjit/asmjit/src/asmjit/x86/x86builder.h +351 -0
- data/ext/asmjit/asmjit/src/asmjit/x86/x86compiler.cpp +61 -0
- data/ext/asmjit/asmjit/src/asmjit/x86/x86compiler.h +721 -0
- data/ext/asmjit/asmjit/src/asmjit/x86/x86emithelper.cpp +619 -0
- data/ext/asmjit/asmjit/src/asmjit/x86/x86emithelper_p.h +60 -0
- data/ext/asmjit/asmjit/src/asmjit/x86/x86emitter.h +4315 -0
- data/ext/asmjit/asmjit/src/asmjit/x86/x86formatter.cpp +944 -0
- data/ext/asmjit/asmjit/src/asmjit/x86/x86formatter_p.h +58 -0
- data/ext/asmjit/asmjit/src/asmjit/x86/x86func.cpp +503 -0
- data/ext/asmjit/asmjit/src/asmjit/x86/x86func_p.h +33 -0
- data/ext/asmjit/asmjit/src/asmjit/x86/x86globals.h +2169 -0
- data/ext/asmjit/asmjit/src/asmjit/x86/x86instapi.cpp +1732 -0
- data/ext/asmjit/asmjit/src/asmjit/x86/x86instapi_p.h +41 -0
- data/ext/asmjit/asmjit/src/asmjit/x86/x86instdb.cpp +4427 -0
- data/ext/asmjit/asmjit/src/asmjit/x86/x86instdb.h +563 -0
- data/ext/asmjit/asmjit/src/asmjit/x86/x86instdb_p.h +311 -0
- data/ext/asmjit/asmjit/src/asmjit/x86/x86opcode_p.h +436 -0
- data/ext/asmjit/asmjit/src/asmjit/x86/x86operand.cpp +231 -0
- data/ext/asmjit/asmjit/src/asmjit/x86/x86operand.h +1085 -0
- data/ext/asmjit/asmjit/src/asmjit/x86/x86rapass.cpp +1509 -0
- data/ext/asmjit/asmjit/src/asmjit/x86/x86rapass_p.h +94 -0
- data/ext/asmjit/asmjit/src/asmjit/x86.h +93 -0
- data/ext/asmjit/asmjit/src/asmjit.natvis +245 -0
- data/ext/asmjit/asmjit/test/asmjit_test_assembler.cpp +84 -0
- data/ext/asmjit/asmjit/test/asmjit_test_assembler.h +85 -0
- data/ext/asmjit/asmjit/test/asmjit_test_assembler_a64.cpp +4006 -0
- data/ext/asmjit/asmjit/test/asmjit_test_assembler_x64.cpp +17833 -0
- data/ext/asmjit/asmjit/test/asmjit_test_assembler_x86.cpp +8300 -0
- data/ext/asmjit/asmjit/test/asmjit_test_compiler.cpp +253 -0
- data/ext/asmjit/asmjit/test/asmjit_test_compiler.h +73 -0
- data/ext/asmjit/asmjit/test/asmjit_test_compiler_a64.cpp +690 -0
- data/ext/asmjit/asmjit/test/asmjit_test_compiler_x86.cpp +4317 -0
- data/ext/asmjit/asmjit/test/asmjit_test_emitters.cpp +197 -0
- data/ext/asmjit/asmjit/test/asmjit_test_instinfo.cpp +181 -0
- data/ext/asmjit/asmjit/test/asmjit_test_misc.h +257 -0
- data/ext/asmjit/asmjit/test/asmjit_test_perf.cpp +62 -0
- data/ext/asmjit/asmjit/test/asmjit_test_perf.h +61 -0
- data/ext/asmjit/asmjit/test/asmjit_test_perf_a64.cpp +699 -0
- data/ext/asmjit/asmjit/test/asmjit_test_perf_x86.cpp +5032 -0
- data/ext/asmjit/asmjit/test/asmjit_test_unit.cpp +172 -0
- data/ext/asmjit/asmjit/test/asmjit_test_x86_sections.cpp +172 -0
- data/ext/asmjit/asmjit/test/asmjitutils.h +38 -0
- data/ext/asmjit/asmjit/test/broken.cpp +312 -0
- data/ext/asmjit/asmjit/test/broken.h +148 -0
- data/ext/asmjit/asmjit/test/cmdline.h +61 -0
- data/ext/asmjit/asmjit/test/performancetimer.h +41 -0
- data/ext/asmjit/asmjit/tools/configure-makefiles.sh +13 -0
- data/ext/asmjit/asmjit/tools/configure-ninja.sh +13 -0
- data/ext/asmjit/asmjit/tools/configure-sanitizers.sh +13 -0
- data/ext/asmjit/asmjit/tools/configure-vs2019-x64.bat +2 -0
- data/ext/asmjit/asmjit/tools/configure-vs2019-x86.bat +2 -0
- data/ext/asmjit/asmjit/tools/configure-vs2022-x64.bat +2 -0
- data/ext/asmjit/asmjit/tools/configure-vs2022-x86.bat +2 -0
- data/ext/asmjit/asmjit/tools/configure-xcode.sh +8 -0
- data/ext/asmjit/asmjit/tools/enumgen.js +417 -0
- data/ext/asmjit/asmjit/tools/enumgen.sh +3 -0
- data/ext/asmjit/asmjit/tools/tablegen-arm.js +365 -0
- data/ext/asmjit/asmjit/tools/tablegen-arm.sh +3 -0
- data/ext/asmjit/asmjit/tools/tablegen-x86.js +2638 -0
- data/ext/asmjit/asmjit/tools/tablegen-x86.sh +3 -0
- data/ext/asmjit/asmjit/tools/tablegen.js +947 -0
- data/ext/asmjit/asmjit/tools/tablegen.sh +4 -0
- data/ext/asmjit/asmjit.cc +18 -0
- data/lib/asmjit/version.rb +1 -1
- metadata +197 -2
|
@@ -0,0 +1,852 @@
|
|
|
1
|
+
// This file is part of AsmJit project <https://asmjit.com>
|
|
2
|
+
//
|
|
3
|
+
// See asmjit.h or LICENSE.md for license and copyright information
|
|
4
|
+
// SPDX-License-Identifier: Zlib
|
|
5
|
+
|
|
6
|
+
#include "../core/api-build_p.h"
|
|
7
|
+
#if !defined(ASMJIT_NO_AARCH64) && !defined(ASMJIT_NO_COMPILER)
|
|
8
|
+
|
|
9
|
+
#include "../core/cpuinfo.h"
|
|
10
|
+
#include "../core/support.h"
|
|
11
|
+
#include "../core/type.h"
|
|
12
|
+
#include "../arm/a64assembler.h"
|
|
13
|
+
#include "../arm/a64compiler.h"
|
|
14
|
+
#include "../arm/a64emithelper_p.h"
|
|
15
|
+
#include "../arm/a64instapi_p.h"
|
|
16
|
+
#include "../arm/a64instdb_p.h"
|
|
17
|
+
#include "../arm/a64rapass_p.h"
|
|
18
|
+
|
|
19
|
+
ASMJIT_BEGIN_SUB_NAMESPACE(a64)
|
|
20
|
+
|
|
21
|
+
// a64::ARMRAPass - Helpers
|
|
22
|
+
// ========================
|
|
23
|
+
|
|
24
|
+
// TODO: [ARM] These should be shared with all backends.
|
|
25
|
+
ASMJIT_MAYBE_UNUSED
|
|
26
|
+
static inline uint64_t raImmMaskFromSize(uint32_t size) noexcept {
|
|
27
|
+
ASMJIT_ASSERT(size > 0 && size < 256);
|
|
28
|
+
static const uint64_t masks[] = {
|
|
29
|
+
0x00000000000000FFu, // 1
|
|
30
|
+
0x000000000000FFFFu, // 2
|
|
31
|
+
0x00000000FFFFFFFFu, // 4
|
|
32
|
+
0xFFFFFFFFFFFFFFFFu, // 8
|
|
33
|
+
0x0000000000000000u, // 16
|
|
34
|
+
0x0000000000000000u, // 32
|
|
35
|
+
0x0000000000000000u, // 64
|
|
36
|
+
0x0000000000000000u, // 128
|
|
37
|
+
0x0000000000000000u // 256
|
|
38
|
+
};
|
|
39
|
+
return masks[Support::ctz(size)];
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
static const RegMask raConsecutiveLeadCountToRegMaskFilter[5] = {
|
|
43
|
+
0xFFFFFFFFu, // [0] No consecutive.
|
|
44
|
+
0x00000000u, // [1] Invalid, never used.
|
|
45
|
+
0x7FFFFFFFu, // [2] 2 consecutive registers.
|
|
46
|
+
0x3FFFFFFFu, // [3] 3 consecutive registers.
|
|
47
|
+
0x1FFFFFFFu // [4] 4 consecutive registers.
|
|
48
|
+
};
|
|
49
|
+
|
|
50
|
+
static inline RATiedFlags raUseOutFlagsFromRWFlags(OpRWFlags rwFlags) noexcept {
|
|
51
|
+
static constexpr RATiedFlags map[] = {
|
|
52
|
+
RATiedFlags::kNone,
|
|
53
|
+
RATiedFlags::kRead | RATiedFlags::kUse, // kRead
|
|
54
|
+
RATiedFlags::kWrite | RATiedFlags::kOut, // kWrite
|
|
55
|
+
RATiedFlags::kRW | RATiedFlags::kUse, // kRW
|
|
56
|
+
};
|
|
57
|
+
|
|
58
|
+
return map[uint32_t(rwFlags & OpRWFlags::kRW)];
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
static inline RATiedFlags raRegRwFlags(OpRWFlags flags) noexcept {
|
|
62
|
+
return raUseOutFlagsFromRWFlags(flags);
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
static inline RATiedFlags raMemBaseRwFlags(OpRWFlags flags) noexcept {
|
|
66
|
+
constexpr uint32_t shift = Support::ConstCTZ<uint32_t(OpRWFlags::kMemBaseRW)>::value;
|
|
67
|
+
return raUseOutFlagsFromRWFlags(OpRWFlags(uint32_t(flags) >> shift) & OpRWFlags::kRW);
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
static inline RATiedFlags raMemIndexRwFlags(OpRWFlags flags) noexcept {
|
|
71
|
+
constexpr uint32_t shift = Support::ConstCTZ<uint32_t(OpRWFlags::kMemIndexRW)>::value;
|
|
72
|
+
return raUseOutFlagsFromRWFlags(OpRWFlags(uint32_t(flags) >> shift) & OpRWFlags::kRW);
|
|
73
|
+
}
|
|
74
|
+
// a64::RACFGBuilder
|
|
75
|
+
// =================
|
|
76
|
+
|
|
77
|
+
class RACFGBuilder : public RACFGBuilderT<RACFGBuilder> {
|
|
78
|
+
public:
|
|
79
|
+
Arch _arch;
|
|
80
|
+
|
|
81
|
+
inline RACFGBuilder(ARMRAPass* pass) noexcept
|
|
82
|
+
: RACFGBuilderT<RACFGBuilder>(pass),
|
|
83
|
+
_arch(pass->cc()->arch()) {}
|
|
84
|
+
|
|
85
|
+
inline Compiler* cc() const noexcept { return static_cast<Compiler*>(_cc); }
|
|
86
|
+
|
|
87
|
+
Error onInst(InstNode* inst, InstControlFlow& controlType, RAInstBuilder& ib) noexcept;
|
|
88
|
+
|
|
89
|
+
Error onBeforeInvoke(InvokeNode* invokeNode) noexcept;
|
|
90
|
+
Error onInvoke(InvokeNode* invokeNode, RAInstBuilder& ib) noexcept;
|
|
91
|
+
|
|
92
|
+
Error moveImmToRegArg(InvokeNode* invokeNode, const FuncValue& arg, const Imm& imm_, BaseReg* out) noexcept;
|
|
93
|
+
Error moveImmToStackArg(InvokeNode* invokeNode, const FuncValue& arg, const Imm& imm_) noexcept;
|
|
94
|
+
Error moveRegToStackArg(InvokeNode* invokeNode, const FuncValue& arg, const BaseReg& reg) noexcept;
|
|
95
|
+
|
|
96
|
+
Error onBeforeRet(FuncRetNode* funcRet) noexcept;
|
|
97
|
+
Error onRet(FuncRetNode* funcRet, RAInstBuilder& ib) noexcept;
|
|
98
|
+
};
|
|
99
|
+
|
|
100
|
+
// a64::RACFGBuilder - OnInst
|
|
101
|
+
// ==========================
|
|
102
|
+
|
|
103
|
+
// TODO: [ARM] This is just a workaround...
|
|
104
|
+
static InstControlFlow getControlFlowType(InstId instId) noexcept {
|
|
105
|
+
switch (BaseInst::extractRealId(instId)) {
|
|
106
|
+
case Inst::kIdB:
|
|
107
|
+
case Inst::kIdBr:
|
|
108
|
+
if (BaseInst::extractARMCondCode(instId) == CondCode::kAL)
|
|
109
|
+
return InstControlFlow::kJump;
|
|
110
|
+
else
|
|
111
|
+
return InstControlFlow::kBranch;
|
|
112
|
+
case Inst::kIdBl:
|
|
113
|
+
case Inst::kIdBlr:
|
|
114
|
+
return InstControlFlow::kCall;
|
|
115
|
+
case Inst::kIdCbz:
|
|
116
|
+
case Inst::kIdCbnz:
|
|
117
|
+
case Inst::kIdTbz:
|
|
118
|
+
case Inst::kIdTbnz:
|
|
119
|
+
return InstControlFlow::kBranch;
|
|
120
|
+
case Inst::kIdRet:
|
|
121
|
+
return InstControlFlow::kReturn;
|
|
122
|
+
default:
|
|
123
|
+
return InstControlFlow::kRegular;
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
Error RACFGBuilder::onInst(InstNode* inst, InstControlFlow& controlType, RAInstBuilder& ib) noexcept {
|
|
128
|
+
InstRWInfo rwInfo;
|
|
129
|
+
|
|
130
|
+
if (Inst::isDefinedId(inst->realId())) {
|
|
131
|
+
InstId instId = inst->id();
|
|
132
|
+
uint32_t opCount = inst->opCount();
|
|
133
|
+
const Operand* opArray = inst->operands();
|
|
134
|
+
ASMJIT_PROPAGATE(InstInternal::queryRWInfo(_arch, inst->baseInst(), opArray, opCount, &rwInfo));
|
|
135
|
+
|
|
136
|
+
const InstDB::InstInfo& instInfo = InstDB::infoById(instId);
|
|
137
|
+
uint32_t singleRegOps = 0;
|
|
138
|
+
|
|
139
|
+
ib.addInstRWFlags(rwInfo.instFlags());
|
|
140
|
+
|
|
141
|
+
if (opCount) {
|
|
142
|
+
uint32_t consecutiveOffset = 0xFFFFFFFFu;
|
|
143
|
+
uint32_t consecutiveParent = Globals::kInvalidId;
|
|
144
|
+
|
|
145
|
+
for (uint32_t i = 0; i < opCount; i++) {
|
|
146
|
+
const Operand& op = opArray[i];
|
|
147
|
+
const OpRWInfo& opRwInfo = rwInfo.operand(i);
|
|
148
|
+
|
|
149
|
+
if (op.isReg()) {
|
|
150
|
+
// Register Operand
|
|
151
|
+
// ----------------
|
|
152
|
+
const Reg& reg = op.as<Reg>();
|
|
153
|
+
|
|
154
|
+
RATiedFlags flags = raRegRwFlags(opRwInfo.opFlags());
|
|
155
|
+
uint32_t vIndex = Operand::virtIdToIndex(reg.id());
|
|
156
|
+
|
|
157
|
+
if (vIndex < Operand::kVirtIdCount) {
|
|
158
|
+
RAWorkReg* workReg;
|
|
159
|
+
ASMJIT_PROPAGATE(_pass->virtIndexAsWorkReg(vIndex, &workReg));
|
|
160
|
+
|
|
161
|
+
// Use RW instead of Write in case that not the whole register is overwritten. This is important for
|
|
162
|
+
// liveness as we cannot kill a register that will be used.
|
|
163
|
+
if ((flags & RATiedFlags::kRW) == RATiedFlags::kWrite) {
|
|
164
|
+
if (workReg->regByteMask() & ~(opRwInfo.writeByteMask() | opRwInfo.extendByteMask())) {
|
|
165
|
+
// Not write-only operation.
|
|
166
|
+
flags = (flags & ~RATiedFlags::kOut) | (RATiedFlags::kRead | RATiedFlags::kUse);
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
RegGroup group = workReg->group();
|
|
171
|
+
|
|
172
|
+
RegMask useRegs = _pass->_availableRegs[group];
|
|
173
|
+
RegMask outRegs = useRegs;
|
|
174
|
+
|
|
175
|
+
uint32_t useId = BaseReg::kIdBad;
|
|
176
|
+
uint32_t outId = BaseReg::kIdBad;
|
|
177
|
+
|
|
178
|
+
uint32_t useRewriteMask = 0;
|
|
179
|
+
uint32_t outRewriteMask = 0;
|
|
180
|
+
|
|
181
|
+
if (opRwInfo.consecutiveLeadCount()) {
|
|
182
|
+
// There must be a single consecutive register lead, otherwise the RW data is invalid.
|
|
183
|
+
if (consecutiveOffset != 0xFFFFFFFFu)
|
|
184
|
+
return DebugUtils::errored(kErrorInvalidState);
|
|
185
|
+
|
|
186
|
+
// A consecutive lead register cannot be used as a consecutive +1/+2/+3 register, the registers must be distinct.
|
|
187
|
+
if (RATiedReg::consecutiveDataFromFlags(flags) != 0)
|
|
188
|
+
return DebugUtils::errored(kErrorNotConsecutiveRegs);
|
|
189
|
+
|
|
190
|
+
flags |= RATiedFlags::kLeadConsecutive | RATiedReg::consecutiveDataToFlags(opRwInfo.consecutiveLeadCount() - 1);
|
|
191
|
+
consecutiveOffset = 0;
|
|
192
|
+
|
|
193
|
+
RegMask filter = raConsecutiveLeadCountToRegMaskFilter[opRwInfo.consecutiveLeadCount()];
|
|
194
|
+
if (Support::test(flags, RATiedFlags::kUse)) {
|
|
195
|
+
flags |= RATiedFlags::kUseConsecutive;
|
|
196
|
+
useRegs &= filter;
|
|
197
|
+
}
|
|
198
|
+
else {
|
|
199
|
+
flags |= RATiedFlags::kOutConsecutive;
|
|
200
|
+
outRegs &= filter;
|
|
201
|
+
}
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
if (Support::test(flags, RATiedFlags::kUse)) {
|
|
205
|
+
useRewriteMask = Support::bitMask(inst->getRewriteIndex(®._baseId));
|
|
206
|
+
if (opRwInfo.hasOpFlag(OpRWFlags::kRegPhysId)) {
|
|
207
|
+
useId = opRwInfo.physId();
|
|
208
|
+
flags |= RATiedFlags::kUseFixed;
|
|
209
|
+
}
|
|
210
|
+
else if (opRwInfo.hasOpFlag(OpRWFlags::kConsecutive)) {
|
|
211
|
+
if (consecutiveOffset == 0xFFFFFFFFu)
|
|
212
|
+
return DebugUtils::errored(kErrorInvalidState);
|
|
213
|
+
flags |= RATiedFlags::kUseConsecutive | RATiedReg::consecutiveDataToFlags(++consecutiveOffset);
|
|
214
|
+
}
|
|
215
|
+
}
|
|
216
|
+
else {
|
|
217
|
+
outRewriteMask = Support::bitMask(inst->getRewriteIndex(®._baseId));
|
|
218
|
+
if (opRwInfo.hasOpFlag(OpRWFlags::kRegPhysId)) {
|
|
219
|
+
outId = opRwInfo.physId();
|
|
220
|
+
flags |= RATiedFlags::kOutFixed;
|
|
221
|
+
}
|
|
222
|
+
else if (opRwInfo.hasOpFlag(OpRWFlags::kConsecutive)) {
|
|
223
|
+
if (consecutiveOffset == 0xFFFFFFFFu)
|
|
224
|
+
return DebugUtils::errored(kErrorInvalidState);
|
|
225
|
+
flags |= RATiedFlags::kOutConsecutive | RATiedReg::consecutiveDataToFlags(++consecutiveOffset);
|
|
226
|
+
}
|
|
227
|
+
}
|
|
228
|
+
|
|
229
|
+
// Special cases regarding element access.
|
|
230
|
+
if (reg.as<Vec>().hasElementIndex()) {
|
|
231
|
+
// Only the first 0..15 registers can be used if the register uses
|
|
232
|
+
// element accessor that accesses half-words (h[0..7] elements).
|
|
233
|
+
if (instInfo.hasFlag(InstDB::kInstFlagVH0_15) && reg.as<Vec>().elementType() == Vec::kElementTypeH) {
|
|
234
|
+
if (Support::test(flags, RATiedFlags::kUse))
|
|
235
|
+
useId &= 0x0000FFFFu;
|
|
236
|
+
else
|
|
237
|
+
outId &= 0x0000FFFFu;
|
|
238
|
+
}
|
|
239
|
+
}
|
|
240
|
+
|
|
241
|
+
ASMJIT_PROPAGATE(ib.add(workReg, flags, useRegs, useId, useRewriteMask, outRegs, outId, outRewriteMask, opRwInfo.rmSize(), consecutiveParent));
|
|
242
|
+
if (singleRegOps == i)
|
|
243
|
+
singleRegOps++;
|
|
244
|
+
|
|
245
|
+
if (Support::test(flags, RATiedFlags::kLeadConsecutive | RATiedFlags::kUseConsecutive | RATiedFlags::kOutConsecutive))
|
|
246
|
+
consecutiveParent = workReg->workId();
|
|
247
|
+
}
|
|
248
|
+
}
|
|
249
|
+
else if (op.isMem()) {
|
|
250
|
+
// Memory Operand
|
|
251
|
+
// --------------
|
|
252
|
+
const Mem& mem = op.as<Mem>();
|
|
253
|
+
|
|
254
|
+
if (mem.isRegHome()) {
|
|
255
|
+
RAWorkReg* workReg;
|
|
256
|
+
ASMJIT_PROPAGATE(_pass->virtIndexAsWorkReg(Operand::virtIdToIndex(mem.baseId()), &workReg));
|
|
257
|
+
_pass->getOrCreateStackSlot(workReg);
|
|
258
|
+
}
|
|
259
|
+
else if (mem.hasBaseReg()) {
|
|
260
|
+
uint32_t vIndex = Operand::virtIdToIndex(mem.baseId());
|
|
261
|
+
if (vIndex < Operand::kVirtIdCount) {
|
|
262
|
+
RAWorkReg* workReg;
|
|
263
|
+
ASMJIT_PROPAGATE(_pass->virtIndexAsWorkReg(vIndex, &workReg));
|
|
264
|
+
|
|
265
|
+
RATiedFlags flags = raMemBaseRwFlags(opRwInfo.opFlags());
|
|
266
|
+
RegGroup group = workReg->group();
|
|
267
|
+
RegMask allocable = _pass->_availableRegs[group];
|
|
268
|
+
|
|
269
|
+
// Base registers have never fixed id on ARM.
|
|
270
|
+
const uint32_t useId = BaseReg::kIdBad;
|
|
271
|
+
const uint32_t outId = BaseReg::kIdBad;
|
|
272
|
+
|
|
273
|
+
uint32_t useRewriteMask = 0;
|
|
274
|
+
uint32_t outRewriteMask = 0;
|
|
275
|
+
|
|
276
|
+
if (Support::test(flags, RATiedFlags::kUse))
|
|
277
|
+
useRewriteMask = Support::bitMask(inst->getRewriteIndex(&mem._baseId));
|
|
278
|
+
else
|
|
279
|
+
outRewriteMask = Support::bitMask(inst->getRewriteIndex(&mem._baseId));
|
|
280
|
+
|
|
281
|
+
ASMJIT_PROPAGATE(ib.add(workReg, flags, allocable, useId, useRewriteMask, allocable, outId, outRewriteMask));
|
|
282
|
+
}
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
if (mem.hasIndexReg()) {
|
|
286
|
+
uint32_t vIndex = Operand::virtIdToIndex(mem.indexId());
|
|
287
|
+
if (vIndex < Operand::kVirtIdCount) {
|
|
288
|
+
RAWorkReg* workReg;
|
|
289
|
+
ASMJIT_PROPAGATE(_pass->virtIndexAsWorkReg(vIndex, &workReg));
|
|
290
|
+
|
|
291
|
+
RATiedFlags flags = raMemIndexRwFlags(opRwInfo.opFlags());
|
|
292
|
+
RegGroup group = workReg->group();
|
|
293
|
+
RegMask allocable = _pass->_availableRegs[group];
|
|
294
|
+
|
|
295
|
+
// Index registers have never fixed id on ARM.
|
|
296
|
+
const uint32_t useId = BaseReg::kIdBad;
|
|
297
|
+
const uint32_t outId = BaseReg::kIdBad;
|
|
298
|
+
|
|
299
|
+
uint32_t useRewriteMask = 0;
|
|
300
|
+
uint32_t outRewriteMask = 0;
|
|
301
|
+
|
|
302
|
+
if (Support::test(flags, RATiedFlags::kUse))
|
|
303
|
+
useRewriteMask = Support::bitMask(inst->getRewriteIndex(&mem._data[Operand::kDataMemIndexId]));
|
|
304
|
+
else
|
|
305
|
+
outRewriteMask = Support::bitMask(inst->getRewriteIndex(&mem._data[Operand::kDataMemIndexId]));
|
|
306
|
+
|
|
307
|
+
ASMJIT_PROPAGATE(ib.add(workReg, RATiedFlags::kUse | RATiedFlags::kRead, allocable, useId, useRewriteMask, allocable, outId, outRewriteMask));
|
|
308
|
+
}
|
|
309
|
+
}
|
|
310
|
+
}
|
|
311
|
+
}
|
|
312
|
+
}
|
|
313
|
+
|
|
314
|
+
controlType = getControlFlowType(instId);
|
|
315
|
+
}
|
|
316
|
+
|
|
317
|
+
return kErrorOk;
|
|
318
|
+
}
|
|
319
|
+
|
|
320
|
+
// a64::RACFGBuilder - OnInvoke
|
|
321
|
+
// ============================
|
|
322
|
+
|
|
323
|
+
Error RACFGBuilder::onBeforeInvoke(InvokeNode* invokeNode) noexcept {
|
|
324
|
+
const FuncDetail& fd = invokeNode->detail();
|
|
325
|
+
uint32_t argCount = invokeNode->argCount();
|
|
326
|
+
|
|
327
|
+
cc()->_setCursor(invokeNode->prev());
|
|
328
|
+
|
|
329
|
+
for (uint32_t argIndex = 0; argIndex < argCount; argIndex++) {
|
|
330
|
+
const FuncValuePack& argPack = fd.argPack(argIndex);
|
|
331
|
+
for (uint32_t valueIndex = 0; valueIndex < Globals::kMaxValuePack; valueIndex++) {
|
|
332
|
+
if (!argPack[valueIndex])
|
|
333
|
+
break;
|
|
334
|
+
|
|
335
|
+
const FuncValue& arg = argPack[valueIndex];
|
|
336
|
+
const Operand& op = invokeNode->arg(argIndex, valueIndex);
|
|
337
|
+
|
|
338
|
+
if (op.isNone())
|
|
339
|
+
continue;
|
|
340
|
+
|
|
341
|
+
if (op.isReg()) {
|
|
342
|
+
const Reg& reg = op.as<Reg>();
|
|
343
|
+
RAWorkReg* workReg;
|
|
344
|
+
ASMJIT_PROPAGATE(_pass->virtIndexAsWorkReg(Operand::virtIdToIndex(reg.id()), &workReg));
|
|
345
|
+
|
|
346
|
+
if (arg.isReg()) {
|
|
347
|
+
RegGroup regGroup = workReg->group();
|
|
348
|
+
RegGroup argGroup = Reg::groupOf(arg.regType());
|
|
349
|
+
|
|
350
|
+
if (regGroup != argGroup) {
|
|
351
|
+
// TODO: [ARM] Conversion is not supported.
|
|
352
|
+
return DebugUtils::errored(kErrorInvalidAssignment);
|
|
353
|
+
}
|
|
354
|
+
}
|
|
355
|
+
else {
|
|
356
|
+
ASMJIT_PROPAGATE(moveRegToStackArg(invokeNode, arg, reg));
|
|
357
|
+
}
|
|
358
|
+
}
|
|
359
|
+
else if (op.isImm()) {
|
|
360
|
+
if (arg.isReg()) {
|
|
361
|
+
BaseReg reg;
|
|
362
|
+
ASMJIT_PROPAGATE(moveImmToRegArg(invokeNode, arg, op.as<Imm>(), ®));
|
|
363
|
+
invokeNode->_args[argIndex][valueIndex] = reg;
|
|
364
|
+
}
|
|
365
|
+
else {
|
|
366
|
+
ASMJIT_PROPAGATE(moveImmToStackArg(invokeNode, arg, op.as<Imm>()));
|
|
367
|
+
}
|
|
368
|
+
}
|
|
369
|
+
}
|
|
370
|
+
}
|
|
371
|
+
|
|
372
|
+
cc()->_setCursor(invokeNode);
|
|
373
|
+
|
|
374
|
+
if (fd.hasRet()) {
|
|
375
|
+
for (uint32_t valueIndex = 0; valueIndex < Globals::kMaxValuePack; valueIndex++) {
|
|
376
|
+
const FuncValue& ret = fd.ret(valueIndex);
|
|
377
|
+
if (!ret)
|
|
378
|
+
break;
|
|
379
|
+
|
|
380
|
+
const Operand& op = invokeNode->ret(valueIndex);
|
|
381
|
+
if (op.isReg()) {
|
|
382
|
+
const Reg& reg = op.as<Reg>();
|
|
383
|
+
RAWorkReg* workReg;
|
|
384
|
+
ASMJIT_PROPAGATE(_pass->virtIndexAsWorkReg(Operand::virtIdToIndex(reg.id()), &workReg));
|
|
385
|
+
|
|
386
|
+
if (ret.isReg()) {
|
|
387
|
+
RegGroup regGroup = workReg->group();
|
|
388
|
+
RegGroup retGroup = Reg::groupOf(ret.regType());
|
|
389
|
+
|
|
390
|
+
if (regGroup != retGroup) {
|
|
391
|
+
// TODO: [ARM] Conversion is not supported.
|
|
392
|
+
return DebugUtils::errored(kErrorInvalidAssignment);
|
|
393
|
+
}
|
|
394
|
+
}
|
|
395
|
+
}
|
|
396
|
+
}
|
|
397
|
+
}
|
|
398
|
+
|
|
399
|
+
// This block has function call(s).
|
|
400
|
+
_curBlock->addFlags(RABlockFlags::kHasFuncCalls);
|
|
401
|
+
_pass->func()->frame().addAttributes(FuncAttributes::kHasFuncCalls);
|
|
402
|
+
_pass->func()->frame().updateCallStackSize(fd.argStackSize());
|
|
403
|
+
|
|
404
|
+
return kErrorOk;
|
|
405
|
+
}
|
|
406
|
+
|
|
407
|
+
Error RACFGBuilder::onInvoke(InvokeNode* invokeNode, RAInstBuilder& ib) noexcept {
|
|
408
|
+
uint32_t argCount = invokeNode->argCount();
|
|
409
|
+
const FuncDetail& fd = invokeNode->detail();
|
|
410
|
+
|
|
411
|
+
for (uint32_t argIndex = 0; argIndex < argCount; argIndex++) {
|
|
412
|
+
const FuncValuePack& argPack = fd.argPack(argIndex);
|
|
413
|
+
for (uint32_t valueIndex = 0; valueIndex < Globals::kMaxValuePack; valueIndex++) {
|
|
414
|
+
if (!argPack[valueIndex])
|
|
415
|
+
continue;
|
|
416
|
+
|
|
417
|
+
const FuncValue& arg = argPack[valueIndex];
|
|
418
|
+
const Operand& op = invokeNode->arg(argIndex, valueIndex);
|
|
419
|
+
|
|
420
|
+
if (op.isNone())
|
|
421
|
+
continue;
|
|
422
|
+
|
|
423
|
+
if (op.isReg()) {
|
|
424
|
+
const Reg& reg = op.as<Reg>();
|
|
425
|
+
RAWorkReg* workReg;
|
|
426
|
+
ASMJIT_PROPAGATE(_pass->virtIndexAsWorkReg(Operand::virtIdToIndex(reg.id()), &workReg));
|
|
427
|
+
|
|
428
|
+
if (arg.isIndirect()) {
|
|
429
|
+
RegGroup regGroup = workReg->group();
|
|
430
|
+
if (regGroup != RegGroup::kGp)
|
|
431
|
+
return DebugUtils::errored(kErrorInvalidState);
|
|
432
|
+
ASMJIT_PROPAGATE(ib.addCallArg(workReg, arg.regId()));
|
|
433
|
+
}
|
|
434
|
+
else if (arg.isReg()) {
|
|
435
|
+
RegGroup regGroup = workReg->group();
|
|
436
|
+
RegGroup argGroup = Reg::groupOf(arg.regType());
|
|
437
|
+
|
|
438
|
+
if (regGroup == argGroup) {
|
|
439
|
+
ASMJIT_PROPAGATE(ib.addCallArg(workReg, arg.regId()));
|
|
440
|
+
}
|
|
441
|
+
}
|
|
442
|
+
}
|
|
443
|
+
}
|
|
444
|
+
}
|
|
445
|
+
|
|
446
|
+
for (uint32_t retIndex = 0; retIndex < Globals::kMaxValuePack; retIndex++) {
|
|
447
|
+
const FuncValue& ret = fd.ret(retIndex);
|
|
448
|
+
if (!ret)
|
|
449
|
+
break;
|
|
450
|
+
|
|
451
|
+
const Operand& op = invokeNode->ret(retIndex);
|
|
452
|
+
if (op.isReg()) {
|
|
453
|
+
const Reg& reg = op.as<Reg>();
|
|
454
|
+
RAWorkReg* workReg;
|
|
455
|
+
ASMJIT_PROPAGATE(_pass->virtIndexAsWorkReg(Operand::virtIdToIndex(reg.id()), &workReg));
|
|
456
|
+
|
|
457
|
+
if (ret.isReg()) {
|
|
458
|
+
RegGroup regGroup = workReg->group();
|
|
459
|
+
RegGroup retGroup = Reg::groupOf(ret.regType());
|
|
460
|
+
|
|
461
|
+
if (regGroup == retGroup) {
|
|
462
|
+
ASMJIT_PROPAGATE(ib.addCallRet(workReg, ret.regId()));
|
|
463
|
+
}
|
|
464
|
+
}
|
|
465
|
+
else {
|
|
466
|
+
return DebugUtils::errored(kErrorInvalidAssignment);
|
|
467
|
+
}
|
|
468
|
+
}
|
|
469
|
+
}
|
|
470
|
+
|
|
471
|
+
// Setup clobbered registers.
|
|
472
|
+
ib._clobbered[0] = Support::lsbMask<RegMask>(_pass->_physRegCount[RegGroup(0)]) & ~fd.preservedRegs(RegGroup(0));
|
|
473
|
+
ib._clobbered[1] = Support::lsbMask<RegMask>(_pass->_physRegCount[RegGroup(1)]) & ~fd.preservedRegs(RegGroup(1));
|
|
474
|
+
ib._clobbered[2] = Support::lsbMask<RegMask>(_pass->_physRegCount[RegGroup(2)]) & ~fd.preservedRegs(RegGroup(2));
|
|
475
|
+
ib._clobbered[3] = Support::lsbMask<RegMask>(_pass->_physRegCount[RegGroup(3)]) & ~fd.preservedRegs(RegGroup(3));
|
|
476
|
+
|
|
477
|
+
return kErrorOk;
|
|
478
|
+
}
|
|
479
|
+
|
|
480
|
+
// a64::RACFGBuilder - MoveImmToRegArg
|
|
481
|
+
// ===================================
|
|
482
|
+
|
|
483
|
+
Error RACFGBuilder::moveImmToRegArg(InvokeNode* invokeNode, const FuncValue& arg, const Imm& imm_, BaseReg* out) noexcept {
|
|
484
|
+
DebugUtils::unused(invokeNode);
|
|
485
|
+
ASMJIT_ASSERT(arg.isReg());
|
|
486
|
+
|
|
487
|
+
Imm imm(imm_);
|
|
488
|
+
TypeId typeId = TypeId::kVoid;
|
|
489
|
+
|
|
490
|
+
switch (arg.typeId()) {
|
|
491
|
+
case TypeId::kInt8 : typeId = TypeId::kUInt64; imm.signExtend8Bits(); break;
|
|
492
|
+
case TypeId::kUInt8 : typeId = TypeId::kUInt64; imm.zeroExtend8Bits(); break;
|
|
493
|
+
case TypeId::kInt16 : typeId = TypeId::kUInt64; imm.signExtend16Bits(); break;
|
|
494
|
+
case TypeId::kUInt16: typeId = TypeId::kUInt64; imm.zeroExtend16Bits(); break;
|
|
495
|
+
case TypeId::kInt32 : typeId = TypeId::kUInt64; imm.signExtend32Bits(); break;
|
|
496
|
+
case TypeId::kUInt32: typeId = TypeId::kUInt64; imm.zeroExtend32Bits(); break;
|
|
497
|
+
case TypeId::kInt64 : typeId = TypeId::kUInt64; break;
|
|
498
|
+
case TypeId::kUInt64: typeId = TypeId::kUInt64; break;
|
|
499
|
+
|
|
500
|
+
default:
|
|
501
|
+
return DebugUtils::errored(kErrorInvalidAssignment);
|
|
502
|
+
}
|
|
503
|
+
|
|
504
|
+
ASMJIT_PROPAGATE(cc()->_newReg(out, typeId, nullptr));
|
|
505
|
+
cc()->virtRegById(out->id())->setWeight(BaseRAPass::kCallArgWeight);
|
|
506
|
+
return cc()->mov(out->as<Gp>(), imm);
|
|
507
|
+
}
|
|
508
|
+
|
|
509
|
+
// a64::RACFGBuilder - MoveImmToStackArg
|
|
510
|
+
// =====================================
|
|
511
|
+
|
|
512
|
+
Error RACFGBuilder::moveImmToStackArg(InvokeNode* invokeNode, const FuncValue& arg, const Imm& imm_) noexcept {
|
|
513
|
+
BaseReg reg;
|
|
514
|
+
|
|
515
|
+
ASMJIT_PROPAGATE(moveImmToRegArg(invokeNode, arg, imm_, ®));
|
|
516
|
+
ASMJIT_PROPAGATE(moveRegToStackArg(invokeNode, arg, reg));
|
|
517
|
+
|
|
518
|
+
return kErrorOk;
|
|
519
|
+
}
|
|
520
|
+
|
|
521
|
+
// a64::RACFGBuilder - MoveRegToStackArg
|
|
522
|
+
// =====================================
|
|
523
|
+
|
|
524
|
+
Error RACFGBuilder::moveRegToStackArg(InvokeNode* invokeNode, const FuncValue& arg, const BaseReg& reg) noexcept {
|
|
525
|
+
DebugUtils::unused(invokeNode);
|
|
526
|
+
Mem stackPtr = ptr(_pass->_sp.as<Gp>(), arg.stackOffset());
|
|
527
|
+
|
|
528
|
+
if (reg.isGp())
|
|
529
|
+
return cc()->str(reg.as<Gp>(), stackPtr);
|
|
530
|
+
|
|
531
|
+
if (reg.isVec())
|
|
532
|
+
return cc()->str(reg.as<Vec>(), stackPtr);
|
|
533
|
+
|
|
534
|
+
return DebugUtils::errored(kErrorInvalidState);
|
|
535
|
+
}
|
|
536
|
+
|
|
537
|
+
// a64::RACFGBuilder - OnReg
|
|
538
|
+
// =========================
|
|
539
|
+
|
|
540
|
+
Error RACFGBuilder::onBeforeRet(FuncRetNode* funcRet) noexcept {
|
|
541
|
+
DebugUtils::unused(funcRet);
|
|
542
|
+
return kErrorOk;
|
|
543
|
+
}
|
|
544
|
+
|
|
545
|
+
Error RACFGBuilder::onRet(FuncRetNode* funcRet, RAInstBuilder& ib) noexcept {
|
|
546
|
+
const FuncDetail& funcDetail = _pass->func()->detail();
|
|
547
|
+
const Operand* opArray = funcRet->operands();
|
|
548
|
+
uint32_t opCount = funcRet->opCount();
|
|
549
|
+
|
|
550
|
+
for (uint32_t i = 0; i < opCount; i++) {
|
|
551
|
+
const Operand& op = opArray[i];
|
|
552
|
+
if (op.isNone()) continue;
|
|
553
|
+
|
|
554
|
+
const FuncValue& ret = funcDetail.ret(i);
|
|
555
|
+
if (ASMJIT_UNLIKELY(!ret.isReg()))
|
|
556
|
+
return DebugUtils::errored(kErrorInvalidAssignment);
|
|
557
|
+
|
|
558
|
+
if (op.isReg()) {
|
|
559
|
+
// Register return value.
|
|
560
|
+
const Reg& reg = op.as<Reg>();
|
|
561
|
+
uint32_t vIndex = Operand::virtIdToIndex(reg.id());
|
|
562
|
+
|
|
563
|
+
if (vIndex < Operand::kVirtIdCount) {
|
|
564
|
+
RAWorkReg* workReg;
|
|
565
|
+
ASMJIT_PROPAGATE(_pass->virtIndexAsWorkReg(vIndex, &workReg));
|
|
566
|
+
|
|
567
|
+
RegGroup group = workReg->group();
|
|
568
|
+
RegMask allocable = _pass->_availableRegs[group];
|
|
569
|
+
ASMJIT_PROPAGATE(ib.add(workReg, RATiedFlags::kUse | RATiedFlags::kRead, allocable, ret.regId(), 0, 0, BaseReg::kIdBad, 0));
|
|
570
|
+
}
|
|
571
|
+
}
|
|
572
|
+
else {
|
|
573
|
+
return DebugUtils::errored(kErrorInvalidAssignment);
|
|
574
|
+
}
|
|
575
|
+
}
|
|
576
|
+
|
|
577
|
+
return kErrorOk;
|
|
578
|
+
}
|
|
579
|
+
|
|
580
|
+
// a64::ARMRAPass - Construction & Destruction
|
|
581
|
+
// ===========================================
|
|
582
|
+
|
|
583
|
+
ARMRAPass::ARMRAPass() noexcept
|
|
584
|
+
: BaseRAPass() { _iEmitHelper = &_emitHelper; }
|
|
585
|
+
ARMRAPass::~ARMRAPass() noexcept {}
|
|
586
|
+
|
|
587
|
+
// a64::ARMRAPass - OnInit / OnDone
|
|
588
|
+
// ================================
|
|
589
|
+
|
|
590
|
+
void ARMRAPass::onInit() noexcept {
|
|
591
|
+
Arch arch = cc()->arch();
|
|
592
|
+
|
|
593
|
+
_emitHelper._emitter = _cb;
|
|
594
|
+
|
|
595
|
+
_archTraits = &ArchTraits::byArch(arch);
|
|
596
|
+
_physRegCount.set(RegGroup::kGp, 32);
|
|
597
|
+
_physRegCount.set(RegGroup::kVec, 32);
|
|
598
|
+
_physRegCount.set(RegGroup::kExtraVirt2, 0);
|
|
599
|
+
_physRegCount.set(RegGroup::kExtraVirt3, 0);
|
|
600
|
+
_buildPhysIndex();
|
|
601
|
+
|
|
602
|
+
_availableRegCount = _physRegCount;
|
|
603
|
+
_availableRegs[RegGroup::kGp] = Support::lsbMask<uint32_t>(_physRegCount.get(RegGroup::kGp));
|
|
604
|
+
_availableRegs[RegGroup::kVec] = Support::lsbMask<uint32_t>(_physRegCount.get(RegGroup::kVec));
|
|
605
|
+
_availableRegs[RegGroup::kExtraVirt3] = Support::lsbMask<uint32_t>(_physRegCount.get(RegGroup::kExtraVirt2));
|
|
606
|
+
_availableRegs[RegGroup::kExtraVirt3] = Support::lsbMask<uint32_t>(_physRegCount.get(RegGroup::kExtraVirt3));
|
|
607
|
+
|
|
608
|
+
_scratchRegIndexes[0] = uint8_t(27);
|
|
609
|
+
_scratchRegIndexes[1] = uint8_t(28);
|
|
610
|
+
|
|
611
|
+
// The architecture specific setup makes implicitly all registers available. So
|
|
612
|
+
// make unavailable all registers that are special and cannot be used in general.
|
|
613
|
+
bool hasFP = _func->frame().hasPreservedFP();
|
|
614
|
+
|
|
615
|
+
if (hasFP)
|
|
616
|
+
makeUnavailable(RegGroup::kGp, Gp::kIdFp);
|
|
617
|
+
|
|
618
|
+
makeUnavailable(RegGroup::kGp, Gp::kIdSp);
|
|
619
|
+
makeUnavailable(RegGroup::kGp, Gp::kIdOs); // OS-specific use, usually TLS.
|
|
620
|
+
|
|
621
|
+
_sp = sp;
|
|
622
|
+
_fp = x29;
|
|
623
|
+
}
|
|
624
|
+
|
|
625
|
+
void ARMRAPass::onDone() noexcept {}
|
|
626
|
+
|
|
627
|
+
// a64::ARMRAPass - BuildCFG
|
|
628
|
+
// =========================
|
|
629
|
+
|
|
630
|
+
Error ARMRAPass::buildCFG() noexcept {
|
|
631
|
+
return RACFGBuilder(this).run();
|
|
632
|
+
}
|
|
633
|
+
|
|
634
|
+
// a64::ARMRAPass - Rewrite
|
|
635
|
+
// ========================
|
|
636
|
+
|
|
637
|
+
ASMJIT_FAVOR_SPEED Error ARMRAPass::_rewrite(BaseNode* first, BaseNode* stop) noexcept {
|
|
638
|
+
uint32_t virtCount = cc()->_vRegArray.size();
|
|
639
|
+
|
|
640
|
+
BaseNode* node = first;
|
|
641
|
+
while (node != stop) {
|
|
642
|
+
BaseNode* next = node->next();
|
|
643
|
+
if (node->isInst()) {
|
|
644
|
+
InstNode* inst = node->as<InstNode>();
|
|
645
|
+
RAInst* raInst = node->passData<RAInst>();
|
|
646
|
+
|
|
647
|
+
Operand* operands = inst->operands();
|
|
648
|
+
uint32_t opCount = inst->opCount();
|
|
649
|
+
|
|
650
|
+
uint32_t i;
|
|
651
|
+
|
|
652
|
+
// Rewrite virtual registers into physical registers.
|
|
653
|
+
if (raInst) {
|
|
654
|
+
// If the instruction contains pass data (raInst) then it was a subject
|
|
655
|
+
// for register allocation and must be rewritten to use physical regs.
|
|
656
|
+
RATiedReg* tiedRegs = raInst->tiedRegs();
|
|
657
|
+
uint32_t tiedCount = raInst->tiedCount();
|
|
658
|
+
|
|
659
|
+
for (i = 0; i < tiedCount; i++) {
|
|
660
|
+
RATiedReg* tiedReg = &tiedRegs[i];
|
|
661
|
+
|
|
662
|
+
Support::BitWordIterator<uint32_t> useIt(tiedReg->useRewriteMask());
|
|
663
|
+
uint32_t useId = tiedReg->useId();
|
|
664
|
+
while (useIt.hasNext())
|
|
665
|
+
inst->rewriteIdAtIndex(useIt.next(), useId);
|
|
666
|
+
|
|
667
|
+
Support::BitWordIterator<uint32_t> outIt(tiedReg->outRewriteMask());
|
|
668
|
+
uint32_t outId = tiedReg->outId();
|
|
669
|
+
while (outIt.hasNext())
|
|
670
|
+
inst->rewriteIdAtIndex(outIt.next(), outId);
|
|
671
|
+
}
|
|
672
|
+
|
|
673
|
+
// This data is allocated by Zone passed to `runOnFunction()`, which
|
|
674
|
+
// will be reset after the RA pass finishes. So reset this data to
|
|
675
|
+
// prevent having a dead pointer after the RA pass is complete.
|
|
676
|
+
node->resetPassData();
|
|
677
|
+
|
|
678
|
+
if (ASMJIT_UNLIKELY(node->type() != NodeType::kInst)) {
|
|
679
|
+
// FuncRet terminates the flow, it must either be removed if the exit
|
|
680
|
+
// label is next to it (optimization) or patched to an architecture
|
|
681
|
+
// dependent jump instruction that jumps to the function's exit before
|
|
682
|
+
// the epilog.
|
|
683
|
+
if (node->type() == NodeType::kFuncRet) {
|
|
684
|
+
RABlock* block = raInst->block();
|
|
685
|
+
if (!isNextTo(node, _func->exitNode())) {
|
|
686
|
+
cc()->_setCursor(node->prev());
|
|
687
|
+
ASMJIT_PROPAGATE(emitJump(_func->exitNode()->label()));
|
|
688
|
+
}
|
|
689
|
+
|
|
690
|
+
BaseNode* prev = node->prev();
|
|
691
|
+
cc()->removeNode(node);
|
|
692
|
+
block->setLast(prev);
|
|
693
|
+
}
|
|
694
|
+
}
|
|
695
|
+
}
|
|
696
|
+
|
|
697
|
+
// Rewrite stack slot addresses.
|
|
698
|
+
for (i = 0; i < opCount; i++) {
|
|
699
|
+
Operand& op = operands[i];
|
|
700
|
+
if (op.isMem()) {
|
|
701
|
+
BaseMem& mem = op.as<BaseMem>();
|
|
702
|
+
if (mem.isRegHome()) {
|
|
703
|
+
uint32_t virtIndex = Operand::virtIdToIndex(mem.baseId());
|
|
704
|
+
if (ASMJIT_UNLIKELY(virtIndex >= virtCount))
|
|
705
|
+
return DebugUtils::errored(kErrorInvalidVirtId);
|
|
706
|
+
|
|
707
|
+
VirtReg* virtReg = cc()->virtRegByIndex(virtIndex);
|
|
708
|
+
RAWorkReg* workReg = virtReg->workReg();
|
|
709
|
+
ASMJIT_ASSERT(workReg != nullptr);
|
|
710
|
+
|
|
711
|
+
RAStackSlot* slot = workReg->stackSlot();
|
|
712
|
+
int32_t offset = slot->offset();
|
|
713
|
+
|
|
714
|
+
mem._setBase(_sp.type(), slot->baseRegId());
|
|
715
|
+
mem.clearRegHome();
|
|
716
|
+
mem.addOffsetLo32(offset);
|
|
717
|
+
}
|
|
718
|
+
}
|
|
719
|
+
}
|
|
720
|
+
|
|
721
|
+
// Rewrite `loadAddressOf()` construct.
|
|
722
|
+
if (inst->realId() == Inst::kIdAdr && inst->opCount() == 2 && inst->op(1).isMem()) {
|
|
723
|
+
BaseMem mem = inst->op(1).as<BaseMem>();
|
|
724
|
+
int64_t offset = mem.offset();
|
|
725
|
+
|
|
726
|
+
if (!mem.hasBaseOrIndex()) {
|
|
727
|
+
inst->setId(Inst::kIdMov);
|
|
728
|
+
inst->setOp(1, Imm(offset));
|
|
729
|
+
}
|
|
730
|
+
else {
|
|
731
|
+
if (mem.hasIndex())
|
|
732
|
+
return DebugUtils::errored(kErrorInvalidAddressIndex);
|
|
733
|
+
|
|
734
|
+
GpX dst(inst->op(0).as<Gp>().id());
|
|
735
|
+
GpX base(mem.baseId());
|
|
736
|
+
|
|
737
|
+
InstId arithInstId = offset < 0 ? Inst::kIdSub : Inst::kIdAdd;
|
|
738
|
+
uint64_t absOffset = offset < 0 ? Support::neg(uint64_t(offset)) : uint64_t(offset);
|
|
739
|
+
|
|
740
|
+
inst->setId(arithInstId);
|
|
741
|
+
inst->setOpCount(3);
|
|
742
|
+
inst->setOp(1, base);
|
|
743
|
+
inst->setOp(2, Imm(absOffset));
|
|
744
|
+
|
|
745
|
+
// Use two operations if the offset cannot be encoded with ADD/SUB.
|
|
746
|
+
if (absOffset > 0xFFFu && (absOffset & ~uint64_t(0xFFF000u)) != 0) {
|
|
747
|
+
if (absOffset <= 0xFFFFFFu) {
|
|
748
|
+
cc()->_setCursor(inst->prev());
|
|
749
|
+
ASMJIT_PROPAGATE(cc()->emit(arithInstId, dst, base, Imm(absOffset & 0xFFFu)));
|
|
750
|
+
|
|
751
|
+
inst->setOp(1, dst);
|
|
752
|
+
inst->setOp(2, Imm(absOffset & 0xFFF000u));
|
|
753
|
+
}
|
|
754
|
+
else {
|
|
755
|
+
cc()->_setCursor(inst->prev());
|
|
756
|
+
ASMJIT_PROPAGATE(cc()->emit(Inst::kIdMov, inst->op(0), Imm(absOffset)));
|
|
757
|
+
|
|
758
|
+
inst->setOp(1, base);
|
|
759
|
+
inst->setOp(2, dst);
|
|
760
|
+
}
|
|
761
|
+
}
|
|
762
|
+
}
|
|
763
|
+
}
|
|
764
|
+
}
|
|
765
|
+
|
|
766
|
+
node = next;
|
|
767
|
+
}
|
|
768
|
+
|
|
769
|
+
return kErrorOk;
|
|
770
|
+
}
|
|
771
|
+
|
|
772
|
+
// a64::ARMRAPass - Prolog & Epilog
|
|
773
|
+
// ================================
|
|
774
|
+
|
|
775
|
+
Error ARMRAPass::updateStackFrame() noexcept {
|
|
776
|
+
if (_func->frame().hasFuncCalls())
|
|
777
|
+
_func->frame().addDirtyRegs(RegGroup::kGp, Support::bitMask(Gp::kIdLr));
|
|
778
|
+
|
|
779
|
+
return BaseRAPass::updateStackFrame();
|
|
780
|
+
}
|
|
781
|
+
|
|
782
|
+
// a64::ARMRAPass - OnEmit
|
|
783
|
+
// =======================
|
|
784
|
+
|
|
785
|
+
Error ARMRAPass::emitMove(uint32_t workId, uint32_t dstPhysId, uint32_t srcPhysId) noexcept {
|
|
786
|
+
RAWorkReg* wReg = workRegById(workId);
|
|
787
|
+
BaseReg dst(wReg->signature(), dstPhysId);
|
|
788
|
+
BaseReg src(wReg->signature(), srcPhysId);
|
|
789
|
+
|
|
790
|
+
const char* comment = nullptr;
|
|
791
|
+
|
|
792
|
+
#ifndef ASMJIT_NO_LOGGING
|
|
793
|
+
if (hasDiagnosticOption(DiagnosticOptions::kRAAnnotate)) {
|
|
794
|
+
_tmpString.assignFormat("<MOVE> %s", workRegById(workId)->name());
|
|
795
|
+
comment = _tmpString.data();
|
|
796
|
+
}
|
|
797
|
+
#endif
|
|
798
|
+
|
|
799
|
+
return _emitHelper.emitRegMove(dst, src, wReg->typeId(), comment);
|
|
800
|
+
}
|
|
801
|
+
|
|
802
|
+
Error ARMRAPass::emitSwap(uint32_t aWorkId, uint32_t aPhysId, uint32_t bWorkId, uint32_t bPhysId) noexcept {
|
|
803
|
+
DebugUtils::unused(aWorkId, aPhysId, bWorkId, bPhysId);
|
|
804
|
+
return DebugUtils::errored(kErrorInvalidState);
|
|
805
|
+
}
|
|
806
|
+
|
|
807
|
+
Error ARMRAPass::emitLoad(uint32_t workId, uint32_t dstPhysId) noexcept {
|
|
808
|
+
RAWorkReg* wReg = workRegById(workId);
|
|
809
|
+
BaseReg dstReg(wReg->signature(), dstPhysId);
|
|
810
|
+
BaseMem srcMem(workRegAsMem(wReg));
|
|
811
|
+
|
|
812
|
+
const char* comment = nullptr;
|
|
813
|
+
|
|
814
|
+
#ifndef ASMJIT_NO_LOGGING
|
|
815
|
+
if (hasDiagnosticOption(DiagnosticOptions::kRAAnnotate)) {
|
|
816
|
+
_tmpString.assignFormat("<LOAD> %s", workRegById(workId)->name());
|
|
817
|
+
comment = _tmpString.data();
|
|
818
|
+
}
|
|
819
|
+
#endif
|
|
820
|
+
|
|
821
|
+
return _emitHelper.emitRegMove(dstReg, srcMem, wReg->typeId(), comment);
|
|
822
|
+
}
|
|
823
|
+
|
|
824
|
+
Error ARMRAPass::emitSave(uint32_t workId, uint32_t srcPhysId) noexcept {
|
|
825
|
+
RAWorkReg* wReg = workRegById(workId);
|
|
826
|
+
BaseMem dstMem(workRegAsMem(wReg));
|
|
827
|
+
BaseReg srcReg(wReg->signature(), srcPhysId);
|
|
828
|
+
|
|
829
|
+
const char* comment = nullptr;
|
|
830
|
+
|
|
831
|
+
#ifndef ASMJIT_NO_LOGGING
|
|
832
|
+
if (hasDiagnosticOption(DiagnosticOptions::kRAAnnotate)) {
|
|
833
|
+
_tmpString.assignFormat("<SAVE> %s", workRegById(workId)->name());
|
|
834
|
+
comment = _tmpString.data();
|
|
835
|
+
}
|
|
836
|
+
#endif
|
|
837
|
+
|
|
838
|
+
return _emitHelper.emitRegMove(dstMem, srcReg, wReg->typeId(), comment);
|
|
839
|
+
}
|
|
840
|
+
|
|
841
|
+
Error ARMRAPass::emitJump(const Label& label) noexcept {
|
|
842
|
+
return cc()->b(label);
|
|
843
|
+
}
|
|
844
|
+
|
|
845
|
+
Error ARMRAPass::emitPreCall(InvokeNode* invokeNode) noexcept {
|
|
846
|
+
DebugUtils::unused(invokeNode);
|
|
847
|
+
return kErrorOk;
|
|
848
|
+
}
|
|
849
|
+
|
|
850
|
+
ASMJIT_END_SUB_NAMESPACE
|
|
851
|
+
|
|
852
|
+
#endif // !ASMJIT_NO_AARCH64 && !ASMJIT_NO_COMPILER
|