crabstone 3.0.3
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/CHANGES.md +61 -0
- data/LICENSE +25 -0
- data/MANIFEST +312 -0
- data/README.md +103 -0
- data/Rakefile +27 -0
- data/bin/genconst +66 -0
- data/bin/genreg +99 -0
- data/crabstone.gemspec +27 -0
- data/examples/hello_world.rb +43 -0
- data/lib/arch/arm.rb +128 -0
- data/lib/arch/arm64.rb +167 -0
- data/lib/arch/arm64_const.rb +1055 -0
- data/lib/arch/arm64_registers.rb +295 -0
- data/lib/arch/arm_const.rb +777 -0
- data/lib/arch/arm_registers.rb +149 -0
- data/lib/arch/mips.rb +78 -0
- data/lib/arch/mips_const.rb +850 -0
- data/lib/arch/mips_registers.rb +208 -0
- data/lib/arch/ppc.rb +90 -0
- data/lib/arch/ppc_const.rb +1181 -0
- data/lib/arch/ppc_registers.rb +209 -0
- data/lib/arch/sparc.rb +79 -0
- data/lib/arch/sparc_const.rb +461 -0
- data/lib/arch/sparc_registers.rb +121 -0
- data/lib/arch/systemz.rb +79 -0
- data/lib/arch/sysz_const.rb +779 -0
- data/lib/arch/sysz_registers.rb +66 -0
- data/lib/arch/x86.rb +107 -0
- data/lib/arch/x86_const.rb +1698 -0
- data/lib/arch/x86_registers.rb +265 -0
- data/lib/arch/xcore.rb +78 -0
- data/lib/arch/xcore_const.rb +185 -0
- data/lib/arch/xcore_registers.rb +57 -0
- data/lib/crabstone.rb +564 -0
- data/test/MC/AArch64/basic-a64-instructions.s.cs +2014 -0
- data/test/MC/AArch64/gicv3-regs.s.cs +111 -0
- data/test/MC/AArch64/neon-2velem.s.cs +113 -0
- data/test/MC/AArch64/neon-3vdiff.s.cs +143 -0
- data/test/MC/AArch64/neon-aba-abd.s.cs +28 -0
- data/test/MC/AArch64/neon-across.s.cs +40 -0
- data/test/MC/AArch64/neon-add-pairwise.s.cs +11 -0
- data/test/MC/AArch64/neon-add-sub-instructions.s.cs +21 -0
- data/test/MC/AArch64/neon-bitwise-instructions.s.cs +17 -0
- data/test/MC/AArch64/neon-compare-instructions.s.cs +136 -0
- data/test/MC/AArch64/neon-crypto.s.cs +15 -0
- data/test/MC/AArch64/neon-extract.s.cs +3 -0
- data/test/MC/AArch64/neon-facge-facgt.s.cs +13 -0
- data/test/MC/AArch64/neon-frsqrt-frecp.s.cs +7 -0
- data/test/MC/AArch64/neon-halving-add-sub.s.cs +25 -0
- data/test/MC/AArch64/neon-max-min-pairwise.s.cs +37 -0
- data/test/MC/AArch64/neon-max-min.s.cs +37 -0
- data/test/MC/AArch64/neon-mla-mls-instructions.s.cs +19 -0
- data/test/MC/AArch64/neon-mov.s.cs +74 -0
- data/test/MC/AArch64/neon-mul-div-instructions.s.cs +24 -0
- data/test/MC/AArch64/neon-perm.s.cs +43 -0
- data/test/MC/AArch64/neon-rounding-halving-add.s.cs +13 -0
- data/test/MC/AArch64/neon-rounding-shift.s.cs +15 -0
- data/test/MC/AArch64/neon-saturating-add-sub.s.cs +29 -0
- data/test/MC/AArch64/neon-saturating-rounding-shift.s.cs +15 -0
- data/test/MC/AArch64/neon-saturating-shift.s.cs +15 -0
- data/test/MC/AArch64/neon-scalar-abs.s.cs +8 -0
- data/test/MC/AArch64/neon-scalar-add-sub.s.cs +3 -0
- data/test/MC/AArch64/neon-scalar-by-elem-mla.s.cs +13 -0
- data/test/MC/AArch64/neon-scalar-by-elem-mul.s.cs +13 -0
- data/test/MC/AArch64/neon-scalar-by-elem-saturating-mla.s.cs +15 -0
- data/test/MC/AArch64/neon-scalar-by-elem-saturating-mul.s.cs +18 -0
- data/test/MC/AArch64/neon-scalar-compare.s.cs +12 -0
- data/test/MC/AArch64/neon-scalar-cvt.s.cs +34 -0
- data/test/MC/AArch64/neon-scalar-dup.s.cs +23 -0
- data/test/MC/AArch64/neon-scalar-extract-narrow.s.cs +10 -0
- data/test/MC/AArch64/neon-scalar-fp-compare.s.cs +21 -0
- data/test/MC/AArch64/neon-scalar-mul.s.cs +13 -0
- data/test/MC/AArch64/neon-scalar-neg.s.cs +6 -0
- data/test/MC/AArch64/neon-scalar-recip.s.cs +11 -0
- data/test/MC/AArch64/neon-scalar-reduce-pairwise.s.cs +3 -0
- data/test/MC/AArch64/neon-scalar-rounding-shift.s.cs +3 -0
- data/test/MC/AArch64/neon-scalar-saturating-add-sub.s.cs +25 -0
- data/test/MC/AArch64/neon-scalar-saturating-rounding-shift.s.cs +9 -0
- data/test/MC/AArch64/neon-scalar-saturating-shift.s.cs +9 -0
- data/test/MC/AArch64/neon-scalar-shift-imm.s.cs +42 -0
- data/test/MC/AArch64/neon-scalar-shift.s.cs +3 -0
- data/test/MC/AArch64/neon-shift-left-long.s.cs +13 -0
- data/test/MC/AArch64/neon-shift.s.cs +22 -0
- data/test/MC/AArch64/neon-simd-copy.s.cs +42 -0
- data/test/MC/AArch64/neon-simd-ldst-multi-elem.s.cs +197 -0
- data/test/MC/AArch64/neon-simd-ldst-one-elem.s.cs +129 -0
- data/test/MC/AArch64/neon-simd-misc.s.cs +213 -0
- data/test/MC/AArch64/neon-simd-post-ldst-multi-elem.s.cs +107 -0
- data/test/MC/AArch64/neon-simd-shift.s.cs +151 -0
- data/test/MC/AArch64/neon-tbl.s.cs +21 -0
- data/test/MC/AArch64/trace-regs.s.cs +383 -0
- data/test/MC/ARM/arm-aliases.s.cs +7 -0
- data/test/MC/ARM/arm-arithmetic-aliases.s.cs +50 -0
- data/test/MC/ARM/arm-it-block.s.cs +2 -0
- data/test/MC/ARM/arm-memory-instructions.s.cs +138 -0
- data/test/MC/ARM/arm-shift-encoding.s.cs +50 -0
- data/test/MC/ARM/arm-thumb-trustzone.s.cs +3 -0
- data/test/MC/ARM/arm-trustzone.s.cs +3 -0
- data/test/MC/ARM/arm_addrmode2.s.cs +15 -0
- data/test/MC/ARM/arm_addrmode3.s.cs +9 -0
- data/test/MC/ARM/arm_instructions.s.cs +25 -0
- data/test/MC/ARM/basic-arm-instructions-v8.s.cs +10 -0
- data/test/MC/ARM/basic-arm-instructions.s.cs +997 -0
- data/test/MC/ARM/basic-thumb-instructions.s.cs +130 -0
- data/test/MC/ARM/basic-thumb2-instructions-v8.s.cs +1 -0
- data/test/MC/ARM/basic-thumb2-instructions.s.cs +1242 -0
- data/test/MC/ARM/crc32-thumb.s.cs +7 -0
- data/test/MC/ARM/crc32.s.cs +7 -0
- data/test/MC/ARM/dot-req.s.cs +3 -0
- data/test/MC/ARM/fp-armv8.s.cs +52 -0
- data/test/MC/ARM/idiv-thumb.s.cs +3 -0
- data/test/MC/ARM/idiv.s.cs +3 -0
- data/test/MC/ARM/load-store-acquire-release-v8-thumb.s.cs +15 -0
- data/test/MC/ARM/load-store-acquire-release-v8.s.cs +15 -0
- data/test/MC/ARM/mode-switch.s.cs +7 -0
- data/test/MC/ARM/neon-abs-encoding.s.cs +15 -0
- data/test/MC/ARM/neon-absdiff-encoding.s.cs +39 -0
- data/test/MC/ARM/neon-add-encoding.s.cs +119 -0
- data/test/MC/ARM/neon-bitcount-encoding.s.cs +15 -0
- data/test/MC/ARM/neon-bitwise-encoding.s.cs +126 -0
- data/test/MC/ARM/neon-cmp-encoding.s.cs +88 -0
- data/test/MC/ARM/neon-convert-encoding.s.cs +27 -0
- data/test/MC/ARM/neon-crypto.s.cs +16 -0
- data/test/MC/ARM/neon-dup-encoding.s.cs +13 -0
- data/test/MC/ARM/neon-minmax-encoding.s.cs +57 -0
- data/test/MC/ARM/neon-mov-encoding.s.cs +76 -0
- data/test/MC/ARM/neon-mul-accum-encoding.s.cs +39 -0
- data/test/MC/ARM/neon-mul-encoding.s.cs +72 -0
- data/test/MC/ARM/neon-neg-encoding.s.cs +15 -0
- data/test/MC/ARM/neon-pairwise-encoding.s.cs +47 -0
- data/test/MC/ARM/neon-reciprocal-encoding.s.cs +13 -0
- data/test/MC/ARM/neon-reverse-encoding.s.cs +13 -0
- data/test/MC/ARM/neon-satshift-encoding.s.cs +75 -0
- data/test/MC/ARM/neon-shift-encoding.s.cs +238 -0
- data/test/MC/ARM/neon-shiftaccum-encoding.s.cs +97 -0
- data/test/MC/ARM/neon-shuffle-encoding.s.cs +59 -0
- data/test/MC/ARM/neon-sub-encoding.s.cs +82 -0
- data/test/MC/ARM/neon-table-encoding.s.cs +9 -0
- data/test/MC/ARM/neon-v8.s.cs +38 -0
- data/test/MC/ARM/neon-vld-encoding.s.cs +213 -0
- data/test/MC/ARM/neon-vst-encoding.s.cs +120 -0
- data/test/MC/ARM/neon-vswp.s.cs +3 -0
- data/test/MC/ARM/neont2-abs-encoding.s.cs +15 -0
- data/test/MC/ARM/neont2-absdiff-encoding.s.cs +39 -0
- data/test/MC/ARM/neont2-add-encoding.s.cs +65 -0
- data/test/MC/ARM/neont2-bitcount-encoding.s.cs +15 -0
- data/test/MC/ARM/neont2-bitwise-encoding.s.cs +15 -0
- data/test/MC/ARM/neont2-cmp-encoding.s.cs +17 -0
- data/test/MC/ARM/neont2-convert-encoding.s.cs +19 -0
- data/test/MC/ARM/neont2-dup-encoding.s.cs +19 -0
- data/test/MC/ARM/neont2-minmax-encoding.s.cs +57 -0
- data/test/MC/ARM/neont2-mov-encoding.s.cs +58 -0
- data/test/MC/ARM/neont2-mul-accum-encoding.s.cs +41 -0
- data/test/MC/ARM/neont2-mul-encoding.s.cs +31 -0
- data/test/MC/ARM/neont2-neg-encoding.s.cs +15 -0
- data/test/MC/ARM/neont2-pairwise-encoding.s.cs +43 -0
- data/test/MC/ARM/neont2-reciprocal-encoding.s.cs +13 -0
- data/test/MC/ARM/neont2-reverse-encoding.s.cs +13 -0
- data/test/MC/ARM/neont2-satshift-encoding.s.cs +75 -0
- data/test/MC/ARM/neont2-shift-encoding.s.cs +80 -0
- data/test/MC/ARM/neont2-shiftaccum-encoding.s.cs +97 -0
- data/test/MC/ARM/neont2-shuffle-encoding.s.cs +23 -0
- data/test/MC/ARM/neont2-sub-encoding.s.cs +23 -0
- data/test/MC/ARM/neont2-table-encoding.s.cs +9 -0
- data/test/MC/ARM/neont2-vld-encoding.s.cs +51 -0
- data/test/MC/ARM/neont2-vst-encoding.s.cs +48 -0
- data/test/MC/ARM/simple-fp-encoding.s.cs +157 -0
- data/test/MC/ARM/thumb-fp-armv8.s.cs +51 -0
- data/test/MC/ARM/thumb-hints.s.cs +12 -0
- data/test/MC/ARM/thumb-neon-crypto.s.cs +16 -0
- data/test/MC/ARM/thumb-neon-v8.s.cs +38 -0
- data/test/MC/ARM/thumb-shift-encoding.s.cs +19 -0
- data/test/MC/ARM/thumb.s.cs +19 -0
- data/test/MC/ARM/thumb2-b.w-encodingT4.s.cs +2 -0
- data/test/MC/ARM/thumb2-branches.s.cs +85 -0
- data/test/MC/ARM/thumb2-mclass.s.cs +41 -0
- data/test/MC/ARM/thumb2-narrow-dp.ll.cs +379 -0
- data/test/MC/ARM/thumb2-pldw.s.cs +2 -0
- data/test/MC/ARM/vfp4-thumb.s.cs +13 -0
- data/test/MC/ARM/vfp4.s.cs +13 -0
- data/test/MC/ARM/vpush-vpop-thumb.s.cs +9 -0
- data/test/MC/ARM/vpush-vpop.s.cs +9 -0
- data/test/MC/Mips/hilo-addressing.s.cs +4 -0
- data/test/MC/Mips/micromips-alu-instructions-EB.s.cs +33 -0
- data/test/MC/Mips/micromips-alu-instructions.s.cs +33 -0
- data/test/MC/Mips/micromips-branch-instructions-EB.s.cs +11 -0
- data/test/MC/Mips/micromips-branch-instructions.s.cs +11 -0
- data/test/MC/Mips/micromips-expansions.s.cs +20 -0
- data/test/MC/Mips/micromips-jump-instructions-EB.s.cs +5 -0
- data/test/MC/Mips/micromips-jump-instructions.s.cs +6 -0
- data/test/MC/Mips/micromips-loadstore-instructions-EB.s.cs +9 -0
- data/test/MC/Mips/micromips-loadstore-instructions.s.cs +9 -0
- data/test/MC/Mips/micromips-loadstore-unaligned-EB.s.cs +5 -0
- data/test/MC/Mips/micromips-loadstore-unaligned.s.cs +5 -0
- data/test/MC/Mips/micromips-movcond-instructions-EB.s.cs +5 -0
- data/test/MC/Mips/micromips-movcond-instructions.s.cs +5 -0
- data/test/MC/Mips/micromips-multiply-instructions-EB.s.cs +5 -0
- data/test/MC/Mips/micromips-multiply-instructions.s.cs +5 -0
- data/test/MC/Mips/micromips-shift-instructions-EB.s.cs +9 -0
- data/test/MC/Mips/micromips-shift-instructions.s.cs +9 -0
- data/test/MC/Mips/micromips-trap-instructions-EB.s.cs +13 -0
- data/test/MC/Mips/micromips-trap-instructions.s.cs +13 -0
- data/test/MC/Mips/mips-alu-instructions.s.cs +53 -0
- data/test/MC/Mips/mips-control-instructions-64.s.cs +33 -0
- data/test/MC/Mips/mips-control-instructions.s.cs +33 -0
- data/test/MC/Mips/mips-coprocessor-encodings.s.cs +17 -0
- data/test/MC/Mips/mips-dsp-instructions.s.cs +43 -0
- data/test/MC/Mips/mips-expansions.s.cs +20 -0
- data/test/MC/Mips/mips-fpu-instructions.s.cs +93 -0
- data/test/MC/Mips/mips-jump-instructions.s.cs +1 -0
- data/test/MC/Mips/mips-memory-instructions.s.cs +17 -0
- data/test/MC/Mips/mips-register-names.s.cs +33 -0
- data/test/MC/Mips/mips64-alu-instructions.s.cs +47 -0
- data/test/MC/Mips/mips64-instructions.s.cs +3 -0
- data/test/MC/Mips/mips64-register-names.s.cs +33 -0
- data/test/MC/Mips/mips_directives.s.cs +12 -0
- data/test/MC/Mips/nabi-regs.s.cs +12 -0
- data/test/MC/Mips/set-at-directive.s.cs +6 -0
- data/test/MC/Mips/test_2r.s.cs +16 -0
- data/test/MC/Mips/test_2rf.s.cs +33 -0
- data/test/MC/Mips/test_3r.s.cs +243 -0
- data/test/MC/Mips/test_3rf.s.cs +83 -0
- data/test/MC/Mips/test_bit.s.cs +49 -0
- data/test/MC/Mips/test_cbranch.s.cs +11 -0
- data/test/MC/Mips/test_ctrlregs.s.cs +33 -0
- data/test/MC/Mips/test_elm.s.cs +16 -0
- data/test/MC/Mips/test_elm_insert.s.cs +4 -0
- data/test/MC/Mips/test_elm_insve.s.cs +5 -0
- data/test/MC/Mips/test_i10.s.cs +5 -0
- data/test/MC/Mips/test_i5.s.cs +45 -0
- data/test/MC/Mips/test_i8.s.cs +11 -0
- data/test/MC/Mips/test_lsa.s.cs +5 -0
- data/test/MC/Mips/test_mi10.s.cs +24 -0
- data/test/MC/Mips/test_vec.s.cs +8 -0
- data/test/MC/PowerPC/ppc64-encoding-bookII.s.cs +25 -0
- data/test/MC/PowerPC/ppc64-encoding-bookIII.s.cs +35 -0
- data/test/MC/PowerPC/ppc64-encoding-ext.s.cs +535 -0
- data/test/MC/PowerPC/ppc64-encoding-fp.s.cs +110 -0
- data/test/MC/PowerPC/ppc64-encoding-vmx.s.cs +170 -0
- data/test/MC/PowerPC/ppc64-encoding.s.cs +202 -0
- data/test/MC/PowerPC/ppc64-operands.s.cs +32 -0
- data/test/MC/README +6 -0
- data/test/MC/Sparc/sparc-alu-instructions.s.cs +47 -0
- data/test/MC/Sparc/sparc-atomic-instructions.s.cs +7 -0
- data/test/MC/Sparc/sparc-ctrl-instructions.s.cs +11 -0
- data/test/MC/Sparc/sparc-fp-instructions.s.cs +59 -0
- data/test/MC/Sparc/sparc-mem-instructions.s.cs +25 -0
- data/test/MC/Sparc/sparc-vis.s.cs +2 -0
- data/test/MC/Sparc/sparc64-alu-instructions.s.cs +13 -0
- data/test/MC/Sparc/sparc64-ctrl-instructions.s.cs +102 -0
- data/test/MC/Sparc/sparcv8-instructions.s.cs +7 -0
- data/test/MC/Sparc/sparcv9-instructions.s.cs +1 -0
- data/test/MC/SystemZ/insn-good-z196.s.cs +589 -0
- data/test/MC/SystemZ/insn-good.s.cs +2265 -0
- data/test/MC/SystemZ/regs-good.s.cs +45 -0
- data/test/MC/X86/3DNow.s.cs +29 -0
- data/test/MC/X86/address-size.s.cs +5 -0
- data/test/MC/X86/avx512-encodings.s.cs +12 -0
- data/test/MC/X86/intel-syntax-encoding.s.cs +30 -0
- data/test/MC/X86/x86-32-avx.s.cs +833 -0
- data/test/MC/X86/x86-32-fma3.s.cs +169 -0
- data/test/MC/X86/x86-32-ms-inline-asm.s.cs +27 -0
- data/test/MC/X86/x86_64-avx-clmul-encoding.s.cs +11 -0
- data/test/MC/X86/x86_64-avx-encoding.s.cs +1058 -0
- data/test/MC/X86/x86_64-bmi-encoding.s.cs +51 -0
- data/test/MC/X86/x86_64-encoding.s.cs +59 -0
- data/test/MC/X86/x86_64-fma3-encoding.s.cs +169 -0
- data/test/MC/X86/x86_64-fma4-encoding.s.cs +98 -0
- data/test/MC/X86/x86_64-hle-encoding.s.cs +3 -0
- data/test/MC/X86/x86_64-imm-widths.s.cs +27 -0
- data/test/MC/X86/x86_64-rand-encoding.s.cs +13 -0
- data/test/MC/X86/x86_64-rtm-encoding.s.cs +4 -0
- data/test/MC/X86/x86_64-sse4a.s.cs +1 -0
- data/test/MC/X86/x86_64-tbm-encoding.s.cs +40 -0
- data/test/MC/X86/x86_64-xop-encoding.s.cs +152 -0
- data/test/README +6 -0
- data/test/test.rb +205 -0
- data/test/test.rb.SPEC +235 -0
- data/test/test_arm.rb +202 -0
- data/test/test_arm.rb.SPEC +275 -0
- data/test/test_arm64.rb +150 -0
- data/test/test_arm64.rb.SPEC +116 -0
- data/test/test_detail.rb +228 -0
- data/test/test_detail.rb.SPEC +322 -0
- data/test/test_exhaustive.rb +80 -0
- data/test/test_mips.rb +118 -0
- data/test/test_mips.rb.SPEC +91 -0
- data/test/test_ppc.rb +137 -0
- data/test/test_ppc.rb.SPEC +84 -0
- data/test/test_sanity.rb +83 -0
- data/test/test_skipdata.rb +111 -0
- data/test/test_skipdata.rb.SPEC +58 -0
- data/test/test_sparc.rb +113 -0
- data/test/test_sparc.rb.SPEC +116 -0
- data/test/test_sysz.rb +111 -0
- data/test/test_sysz.rb.SPEC +61 -0
- data/test/test_x86.rb +189 -0
- data/test/test_x86.rb.SPEC +579 -0
- data/test/test_xcore.rb +100 -0
- data/test/test_xcore.rb.SPEC +75 -0
- metadata +393 -0
@@ -0,0 +1,13 @@
|
|
1
|
+
# CS_ARCH_ARM, CS_MODE_ARM, None
|
2
|
+
0x20,0x04,0xfb,0xf3 = vrecpe.u32 d16, d16
|
3
|
+
0x60,0x04,0xfb,0xf3 = vrecpe.u32 q8, q8
|
4
|
+
0x20,0x05,0xfb,0xf3 = vrecpe.f32 d16, d16
|
5
|
+
0x60,0x05,0xfb,0xf3 = vrecpe.f32 q8, q8
|
6
|
+
0xb1,0x0f,0x40,0xf2 = vrecps.f32 d16, d16, d17
|
7
|
+
0xf2,0x0f,0x40,0xf2 = vrecps.f32 q8, q8, q9
|
8
|
+
0xa0,0x04,0xfb,0xf3 = vrsqrte.u32 d16, d16
|
9
|
+
0xe0,0x04,0xfb,0xf3 = vrsqrte.u32 q8, q8
|
10
|
+
0xa0,0x05,0xfb,0xf3 = vrsqrte.f32 d16, d16
|
11
|
+
0xe0,0x05,0xfb,0xf3 = vrsqrte.f32 q8, q8
|
12
|
+
0xb1,0x0f,0x60,0xf2 = vrsqrts.f32 d16, d16, d17
|
13
|
+
0xf2,0x0f,0x60,0xf2 = vrsqrts.f32 q8, q8, q9
|
@@ -0,0 +1,13 @@
|
|
1
|
+
# CS_ARCH_ARM, CS_MODE_ARM, None
|
2
|
+
0x20,0x00,0xf0,0xf3 = vrev64.8 d16, d16
|
3
|
+
0x20,0x00,0xf4,0xf3 = vrev64.16 d16, d16
|
4
|
+
0x20,0x00,0xf8,0xf3 = vrev64.32 d16, d16
|
5
|
+
0x60,0x00,0xf0,0xf3 = vrev64.8 q8, q8
|
6
|
+
0x60,0x00,0xf4,0xf3 = vrev64.16 q8, q8
|
7
|
+
0x60,0x00,0xf8,0xf3 = vrev64.32 q8, q8
|
8
|
+
0xa0,0x00,0xf0,0xf3 = vrev32.8 d16, d16
|
9
|
+
0xa0,0x00,0xf4,0xf3 = vrev32.16 d16, d16
|
10
|
+
0xe0,0x00,0xf0,0xf3 = vrev32.8 q8, q8
|
11
|
+
0xe0,0x00,0xf4,0xf3 = vrev32.16 q8, q8
|
12
|
+
0x20,0x01,0xf0,0xf3 = vrev16.8 d16, d16
|
13
|
+
0x60,0x01,0xf0,0xf3 = vrev16.8 q8, q8
|
@@ -0,0 +1,75 @@
|
|
1
|
+
# CS_ARCH_ARM, CS_MODE_ARM, None
|
2
|
+
0xb0,0x04,0x41,0xf2 = vqshl.s8 d16, d16, d17
|
3
|
+
0xb0,0x04,0x51,0xf2 = vqshl.s16 d16, d16, d17
|
4
|
+
0xb0,0x04,0x61,0xf2 = vqshl.s32 d16, d16, d17
|
5
|
+
0xb0,0x04,0x71,0xf2 = vqshl.s64 d16, d16, d17
|
6
|
+
0xb0,0x04,0x41,0xf3 = vqshl.u8 d16, d16, d17
|
7
|
+
0xb0,0x04,0x51,0xf3 = vqshl.u16 d16, d16, d17
|
8
|
+
0xb0,0x04,0x61,0xf3 = vqshl.u32 d16, d16, d17
|
9
|
+
0xb0,0x04,0x71,0xf3 = vqshl.u64 d16, d16, d17
|
10
|
+
0xf0,0x04,0x42,0xf2 = vqshl.s8 q8, q8, q9
|
11
|
+
0xf0,0x04,0x52,0xf2 = vqshl.s16 q8, q8, q9
|
12
|
+
0xf0,0x04,0x62,0xf2 = vqshl.s32 q8, q8, q9
|
13
|
+
0xf0,0x04,0x72,0xf2 = vqshl.s64 q8, q8, q9
|
14
|
+
0xf0,0x04,0x42,0xf3 = vqshl.u8 q8, q8, q9
|
15
|
+
0xf0,0x04,0x52,0xf3 = vqshl.u16 q8, q8, q9
|
16
|
+
0xf0,0x04,0x62,0xf3 = vqshl.u32 q8, q8, q9
|
17
|
+
0xf0,0x04,0x72,0xf3 = vqshl.u64 q8, q8, q9
|
18
|
+
0x30,0x07,0xcf,0xf2 = vqshl.s8 d16, d16, #7
|
19
|
+
0x30,0x07,0xdf,0xf2 = vqshl.s16 d16, d16, #15
|
20
|
+
0x30,0x07,0xff,0xf2 = vqshl.s32 d16, d16, #31
|
21
|
+
0xb0,0x07,0xff,0xf2 = vqshl.s64 d16, d16, #63
|
22
|
+
0x30,0x07,0xcf,0xf3 = vqshl.u8 d16, d16, #7
|
23
|
+
0x30,0x07,0xdf,0xf3 = vqshl.u16 d16, d16, #15
|
24
|
+
0x30,0x07,0xff,0xf3 = vqshl.u32 d16, d16, #31
|
25
|
+
0xb0,0x07,0xff,0xf3 = vqshl.u64 d16, d16, #63
|
26
|
+
0x30,0x06,0xcf,0xf3 = vqshlu.s8 d16, d16, #7
|
27
|
+
0x30,0x06,0xdf,0xf3 = vqshlu.s16 d16, d16, #15
|
28
|
+
0x30,0x06,0xff,0xf3 = vqshlu.s32 d16, d16, #31
|
29
|
+
0xb0,0x06,0xff,0xf3 = vqshlu.s64 d16, d16, #63
|
30
|
+
0x70,0x07,0xcf,0xf2 = vqshl.s8 q8, q8, #7
|
31
|
+
0x70,0x07,0xdf,0xf2 = vqshl.s16 q8, q8, #15
|
32
|
+
0x70,0x07,0xff,0xf2 = vqshl.s32 q8, q8, #31
|
33
|
+
0xf0,0x07,0xff,0xf2 = vqshl.s64 q8, q8, #63
|
34
|
+
0x70,0x07,0xcf,0xf3 = vqshl.u8 q8, q8, #7
|
35
|
+
0x70,0x07,0xdf,0xf3 = vqshl.u16 q8, q8, #15
|
36
|
+
0x70,0x07,0xff,0xf3 = vqshl.u32 q8, q8, #31
|
37
|
+
0xf0,0x07,0xff,0xf3 = vqshl.u64 q8, q8, #63
|
38
|
+
0x70,0x06,0xcf,0xf3 = vqshlu.s8 q8, q8, #7
|
39
|
+
0x70,0x06,0xdf,0xf3 = vqshlu.s16 q8, q8, #15
|
40
|
+
0x70,0x06,0xff,0xf3 = vqshlu.s32 q8, q8, #31
|
41
|
+
0xf0,0x06,0xff,0xf3 = vqshlu.s64 q8, q8, #63
|
42
|
+
0xb0,0x05,0x41,0xf2 = vqrshl.s8 d16, d16, d17
|
43
|
+
0xb0,0x05,0x51,0xf2 = vqrshl.s16 d16, d16, d17
|
44
|
+
0xb0,0x05,0x61,0xf2 = vqrshl.s32 d16, d16, d17
|
45
|
+
0xb0,0x05,0x71,0xf2 = vqrshl.s64 d16, d16, d17
|
46
|
+
0xb0,0x05,0x41,0xf3 = vqrshl.u8 d16, d16, d17
|
47
|
+
0xb0,0x05,0x51,0xf3 = vqrshl.u16 d16, d16, d17
|
48
|
+
0xb0,0x05,0x61,0xf3 = vqrshl.u32 d16, d16, d17
|
49
|
+
0xb0,0x05,0x71,0xf3 = vqrshl.u64 d16, d16, d17
|
50
|
+
0xf0,0x05,0x42,0xf2 = vqrshl.s8 q8, q8, q9
|
51
|
+
0xf0,0x05,0x52,0xf2 = vqrshl.s16 q8, q8, q9
|
52
|
+
0xf0,0x05,0x62,0xf2 = vqrshl.s32 q8, q8, q9
|
53
|
+
0xf0,0x05,0x72,0xf2 = vqrshl.s64 q8, q8, q9
|
54
|
+
0xf0,0x05,0x42,0xf3 = vqrshl.u8 q8, q8, q9
|
55
|
+
0xf0,0x05,0x52,0xf3 = vqrshl.u16 q8, q8, q9
|
56
|
+
0xf0,0x05,0x62,0xf3 = vqrshl.u32 q8, q8, q9
|
57
|
+
0xf0,0x05,0x72,0xf3 = vqrshl.u64 q8, q8, q9
|
58
|
+
0x30,0x09,0xc8,0xf2 = vqshrn.s16 d16, q8, #8
|
59
|
+
0x30,0x09,0xd0,0xf2 = vqshrn.s32 d16, q8, #16
|
60
|
+
0x30,0x09,0xe0,0xf2 = vqshrn.s64 d16, q8, #32
|
61
|
+
0x30,0x09,0xc8,0xf3 = vqshrn.u16 d16, q8, #8
|
62
|
+
0x30,0x09,0xd0,0xf3 = vqshrn.u32 d16, q8, #16
|
63
|
+
0x30,0x09,0xe0,0xf3 = vqshrn.u64 d16, q8, #32
|
64
|
+
0x30,0x08,0xc8,0xf3 = vqshrun.s16 d16, q8, #8
|
65
|
+
0x30,0x08,0xd0,0xf3 = vqshrun.s32 d16, q8, #16
|
66
|
+
0x30,0x08,0xe0,0xf3 = vqshrun.s64 d16, q8, #32
|
67
|
+
0x70,0x09,0xc8,0xf2 = vqrshrn.s16 d16, q8, #8
|
68
|
+
0x70,0x09,0xd0,0xf2 = vqrshrn.s32 d16, q8, #16
|
69
|
+
0x70,0x09,0xe0,0xf2 = vqrshrn.s64 d16, q8, #32
|
70
|
+
0x70,0x09,0xc8,0xf3 = vqrshrn.u16 d16, q8, #8
|
71
|
+
0x70,0x09,0xd0,0xf3 = vqrshrn.u32 d16, q8, #16
|
72
|
+
0x70,0x09,0xe0,0xf3 = vqrshrn.u64 d16, q8, #32
|
73
|
+
0x70,0x08,0xc8,0xf3 = vqrshrun.s16 d16, q8, #8
|
74
|
+
0x70,0x08,0xd0,0xf3 = vqrshrun.s32 d16, q8, #16
|
75
|
+
0x70,0x08,0xe0,0xf3 = vqrshrun.s64 d16, q8, #32
|
@@ -0,0 +1,238 @@
|
|
1
|
+
# CS_ARCH_ARM, CS_MODE_ARM, None
|
2
|
+
0xa1,0x04,0x40,0xf3 = vshl.u8 d16, d17, d16
|
3
|
+
0xa1,0x04,0x50,0xf3 = vshl.u16 d16, d17, d16
|
4
|
+
0xa1,0x04,0x60,0xf3 = vshl.u32 d16, d17, d16
|
5
|
+
0xa1,0x04,0x70,0xf3 = vshl.u64 d16, d17, d16
|
6
|
+
0x30,0x05,0xcf,0xf2 = vshl.i8 d16, d16, #7
|
7
|
+
0x30,0x05,0xdf,0xf2 = vshl.i16 d16, d16, #15
|
8
|
+
0x30,0x05,0xff,0xf2 = vshl.i32 d16, d16, #31
|
9
|
+
0xb0,0x05,0xff,0xf2 = vshl.i64 d16, d16, #63
|
10
|
+
0xe2,0x04,0x40,0xf3 = vshl.u8 q8, q9, q8
|
11
|
+
0xe2,0x04,0x50,0xf3 = vshl.u16 q8, q9, q8
|
12
|
+
0xe2,0x04,0x60,0xf3 = vshl.u32 q8, q9, q8
|
13
|
+
0xe2,0x04,0x70,0xf3 = vshl.u64 q8, q9, q8
|
14
|
+
0x70,0x05,0xcf,0xf2 = vshl.i8 q8, q8, #7
|
15
|
+
0x70,0x05,0xdf,0xf2 = vshl.i16 q8, q8, #15
|
16
|
+
0x70,0x05,0xff,0xf2 = vshl.i32 q8, q8, #31
|
17
|
+
0xf0,0x05,0xff,0xf2 = vshl.i64 q8, q8, #63
|
18
|
+
0x30,0x00,0xc9,0xf3 = vshr.u8 d16, d16, #7
|
19
|
+
0x30,0x00,0xd1,0xf3 = vshr.u16 d16, d16, #15
|
20
|
+
0x30,0x00,0xe1,0xf3 = vshr.u32 d16, d16, #31
|
21
|
+
0xb0,0x00,0xc1,0xf3 = vshr.u64 d16, d16, #63
|
22
|
+
0x70,0x00,0xc9,0xf3 = vshr.u8 q8, q8, #7
|
23
|
+
0x70,0x00,0xd1,0xf3 = vshr.u16 q8, q8, #15
|
24
|
+
0x70,0x00,0xe1,0xf3 = vshr.u32 q8, q8, #31
|
25
|
+
0xf0,0x00,0xc1,0xf3 = vshr.u64 q8, q8, #63
|
26
|
+
0x30,0x00,0xc9,0xf2 = vshr.s8 d16, d16, #7
|
27
|
+
0x30,0x00,0xd1,0xf2 = vshr.s16 d16, d16, #15
|
28
|
+
0x30,0x00,0xe1,0xf2 = vshr.s32 d16, d16, #31
|
29
|
+
0xb0,0x00,0xc1,0xf2 = vshr.s64 d16, d16, #63
|
30
|
+
0x70,0x00,0xc9,0xf2 = vshr.s8 q8, q8, #7
|
31
|
+
0x70,0x00,0xd1,0xf2 = vshr.s16 q8, q8, #15
|
32
|
+
0x70,0x00,0xe1,0xf2 = vshr.s32 q8, q8, #31
|
33
|
+
0xf0,0x00,0xc1,0xf2 = vshr.s64 q8, q8, #63
|
34
|
+
0x30,0x00,0xc9,0xf3 = vshr.u8 d16, d16, #7
|
35
|
+
0x30,0x00,0xd1,0xf3 = vshr.u16 d16, d16, #15
|
36
|
+
0x30,0x00,0xe1,0xf3 = vshr.u32 d16, d16, #31
|
37
|
+
0xb0,0x00,0xc1,0xf3 = vshr.u64 d16, d16, #63
|
38
|
+
0x70,0x00,0xc9,0xf3 = vshr.u8 q8, q8, #7
|
39
|
+
0x70,0x00,0xd1,0xf3 = vshr.u16 q8, q8, #15
|
40
|
+
0x70,0x00,0xe1,0xf3 = vshr.u32 q8, q8, #31
|
41
|
+
0xf0,0x00,0xc1,0xf3 = vshr.u64 q8, q8, #63
|
42
|
+
0x30,0x00,0xc9,0xf2 = vshr.s8 d16, d16, #7
|
43
|
+
0x30,0x00,0xd1,0xf2 = vshr.s16 d16, d16, #15
|
44
|
+
0x30,0x00,0xe1,0xf2 = vshr.s32 d16, d16, #31
|
45
|
+
0xb0,0x00,0xc1,0xf2 = vshr.s64 d16, d16, #63
|
46
|
+
0x70,0x00,0xc9,0xf2 = vshr.s8 q8, q8, #7
|
47
|
+
0x70,0x00,0xd1,0xf2 = vshr.s16 q8, q8, #15
|
48
|
+
0x70,0x00,0xe1,0xf2 = vshr.s32 q8, q8, #31
|
49
|
+
0xf0,0x00,0xc1,0xf2 = vshr.s64 q8, q8, #63
|
50
|
+
0x16,0x01,0xc9,0xf2 = vsra.s8 d16, d6, #7
|
51
|
+
0x32,0xa1,0xd1,0xf2 = vsra.s16 d26, d18, #15
|
52
|
+
0x1a,0xb1,0xa1,0xf2 = vsra.s32 d11, d10, #31
|
53
|
+
0xb3,0xc1,0x81,0xf2 = vsra.s64 d12, d19, #63
|
54
|
+
0x70,0x21,0x89,0xf2 = vsra.s8 q1, q8, #7
|
55
|
+
0x5e,0x41,0x91,0xf2 = vsra.s16 q2, q7, #15
|
56
|
+
0x5c,0x61,0xa1,0xf2 = vsra.s32 q3, q6, #31
|
57
|
+
0xda,0x81,0x81,0xf2 = vsra.s64 q4, q5, #63
|
58
|
+
0x30,0x01,0xc9,0xf2 = vsra.s8 d16, d16, #7
|
59
|
+
0x1f,0xf1,0x91,0xf2 = vsra.s16 d15, d15, #15
|
60
|
+
0x1e,0xe1,0xa1,0xf2 = vsra.s32 d14, d14, #31
|
61
|
+
0x9d,0xd1,0x81,0xf2 = vsra.s64 d13, d13, #63
|
62
|
+
0x58,0x81,0x89,0xf2 = vsra.s8 q4, q4, #7
|
63
|
+
0x5a,0xa1,0x91,0xf2 = vsra.s16 q5, q5, #15
|
64
|
+
0x5c,0xc1,0xa1,0xf2 = vsra.s32 q6, q6, #31
|
65
|
+
0xde,0xe1,0x81,0xf2 = vsra.s64 q7, q7, #63
|
66
|
+
0x16,0x01,0xc9,0xf3 = vsra.u8 d16, d6, #7
|
67
|
+
0x32,0xa1,0xd1,0xf3 = vsra.u16 d26, d18, #15
|
68
|
+
0x1a,0xb1,0xa1,0xf3 = vsra.u32 d11, d10, #31
|
69
|
+
0xb3,0xc1,0x81,0xf3 = vsra.u64 d12, d19, #63
|
70
|
+
0x70,0x21,0x89,0xf3 = vsra.u8 q1, q8, #7
|
71
|
+
0x5e,0x41,0x91,0xf3 = vsra.u16 q2, q7, #15
|
72
|
+
0x5c,0x61,0xa1,0xf3 = vsra.u32 q3, q6, #31
|
73
|
+
0xda,0x81,0x81,0xf3 = vsra.u64 q4, q5, #63
|
74
|
+
0x30,0x01,0xc9,0xf3 = vsra.u8 d16, d16, #7
|
75
|
+
0x1f,0xf1,0x91,0xf3 = vsra.u16 d15, d15, #15
|
76
|
+
0x1e,0xe1,0xa1,0xf3 = vsra.u32 d14, d14, #31
|
77
|
+
0x9d,0xd1,0x81,0xf3 = vsra.u64 d13, d13, #63
|
78
|
+
0x58,0x81,0x89,0xf3 = vsra.u8 q4, q4, #7
|
79
|
+
0x5a,0xa1,0x91,0xf3 = vsra.u16 q5, q5, #15
|
80
|
+
0x5c,0xc1,0xa1,0xf3 = vsra.u32 q6, q6, #31
|
81
|
+
0xde,0xe1,0x81,0xf3 = vsra.u64 q7, q7, #63
|
82
|
+
0x16,0x04,0xc9,0xf3 = vsri.8 d16, d6, #7
|
83
|
+
0x32,0xa4,0xd1,0xf3 = vsri.16 d26, d18, #15
|
84
|
+
0x1a,0xb4,0xa1,0xf3 = vsri.32 d11, d10, #31
|
85
|
+
0xb3,0xc4,0x81,0xf3 = vsri.64 d12, d19, #63
|
86
|
+
0x70,0x24,0x89,0xf3 = vsri.8 q1, q8, #7
|
87
|
+
0x5e,0x44,0x91,0xf3 = vsri.16 q2, q7, #15
|
88
|
+
0x5c,0x64,0xa1,0xf3 = vsri.32 q3, q6, #31
|
89
|
+
0xda,0x84,0x81,0xf3 = vsri.64 q4, q5, #63
|
90
|
+
0x30,0x04,0xc9,0xf3 = vsri.8 d16, d16, #7
|
91
|
+
0x1f,0xf4,0x91,0xf3 = vsri.16 d15, d15, #15
|
92
|
+
0x1e,0xe4,0xa1,0xf3 = vsri.32 d14, d14, #31
|
93
|
+
0x9d,0xd4,0x81,0xf3 = vsri.64 d13, d13, #63
|
94
|
+
0x58,0x84,0x89,0xf3 = vsri.8 q4, q4, #7
|
95
|
+
0x5a,0xa4,0x91,0xf3 = vsri.16 q5, q5, #15
|
96
|
+
0x5c,0xc4,0xa1,0xf3 = vsri.32 q6, q6, #31
|
97
|
+
0xde,0xe4,0x81,0xf3 = vsri.64 q7, q7, #63
|
98
|
+
0x16,0x05,0xcf,0xf3 = vsli.8 d16, d6, #7
|
99
|
+
0x32,0xa5,0xdf,0xf3 = vsli.16 d26, d18, #15
|
100
|
+
0x1a,0xb5,0xbf,0xf3 = vsli.32 d11, d10, #31
|
101
|
+
0xb3,0xc5,0xbf,0xf3 = vsli.64 d12, d19, #63
|
102
|
+
0x70,0x25,0x8f,0xf3 = vsli.8 q1, q8, #7
|
103
|
+
0x5e,0x45,0x9f,0xf3 = vsli.16 q2, q7, #15
|
104
|
+
0x5c,0x65,0xbf,0xf3 = vsli.32 q3, q6, #31
|
105
|
+
0xda,0x85,0xbf,0xf3 = vsli.64 q4, q5, #63
|
106
|
+
0x30,0x05,0xcf,0xf3 = vsli.8 d16, d16, #7
|
107
|
+
0x1f,0xf5,0x9f,0xf3 = vsli.16 d15, d15, #15
|
108
|
+
0x1e,0xe5,0xbf,0xf3 = vsli.32 d14, d14, #31
|
109
|
+
0x9d,0xd5,0xbf,0xf3 = vsli.64 d13, d13, #63
|
110
|
+
0x58,0x85,0x8f,0xf3 = vsli.8 q4, q4, #7
|
111
|
+
0x5a,0xa5,0x9f,0xf3 = vsli.16 q5, q5, #15
|
112
|
+
0x5c,0xc5,0xbf,0xf3 = vsli.32 q6, q6, #31
|
113
|
+
0xde,0xe5,0xbf,0xf3 = vsli.64 q7, q7, #63
|
114
|
+
0x30,0x0a,0xcf,0xf2 = vshll.s8 q8, d16, #7
|
115
|
+
0x30,0x0a,0xdf,0xf2 = vshll.s16 q8, d16, #15
|
116
|
+
0x30,0x0a,0xff,0xf2 = vshll.s32 q8, d16, #31
|
117
|
+
0x30,0x0a,0xcf,0xf3 = vshll.u8 q8, d16, #7
|
118
|
+
0x30,0x0a,0xdf,0xf3 = vshll.u16 q8, d16, #15
|
119
|
+
0x30,0x0a,0xff,0xf3 = vshll.u32 q8, d16, #31
|
120
|
+
0x20,0x03,0xf2,0xf3 = vshll.i8 q8, d16, #8
|
121
|
+
0x20,0x03,0xf6,0xf3 = vshll.i16 q8, d16, #16
|
122
|
+
0x20,0x03,0xfa,0xf3 = vshll.i32 q8, d16, #32
|
123
|
+
0x30,0x08,0xc8,0xf2 = vshrn.i16 d16, q8, #8
|
124
|
+
0x30,0x08,0xd0,0xf2 = vshrn.i32 d16, q8, #16
|
125
|
+
0x30,0x08,0xe0,0xf2 = vshrn.i64 d16, q8, #32
|
126
|
+
0xa1,0x05,0x40,0xf2 = vrshl.s8 d16, d17, d16
|
127
|
+
0xa1,0x05,0x50,0xf2 = vrshl.s16 d16, d17, d16
|
128
|
+
0xa1,0x05,0x60,0xf2 = vrshl.s32 d16, d17, d16
|
129
|
+
0xa1,0x05,0x70,0xf2 = vrshl.s64 d16, d17, d16
|
130
|
+
0xa1,0x05,0x40,0xf3 = vrshl.u8 d16, d17, d16
|
131
|
+
0xa1,0x05,0x50,0xf3 = vrshl.u16 d16, d17, d16
|
132
|
+
0xa1,0x05,0x60,0xf3 = vrshl.u32 d16, d17, d16
|
133
|
+
0xa1,0x05,0x70,0xf3 = vrshl.u64 d16, d17, d16
|
134
|
+
0xe2,0x05,0x40,0xf2 = vrshl.s8 q8, q9, q8
|
135
|
+
0xe2,0x05,0x50,0xf2 = vrshl.s16 q8, q9, q8
|
136
|
+
0xe2,0x05,0x60,0xf2 = vrshl.s32 q8, q9, q8
|
137
|
+
0xe2,0x05,0x70,0xf2 = vrshl.s64 q8, q9, q8
|
138
|
+
0xe2,0x05,0x40,0xf3 = vrshl.u8 q8, q9, q8
|
139
|
+
0xe2,0x05,0x50,0xf3 = vrshl.u16 q8, q9, q8
|
140
|
+
0xe2,0x05,0x60,0xf3 = vrshl.u32 q8, q9, q8
|
141
|
+
0xe2,0x05,0x70,0xf3 = vrshl.u64 q8, q9, q8
|
142
|
+
0x30,0x02,0xc8,0xf2 = vrshr.s8 d16, d16, #8
|
143
|
+
0x30,0x02,0xd0,0xf2 = vrshr.s16 d16, d16, #16
|
144
|
+
0x30,0x02,0xe0,0xf2 = vrshr.s32 d16, d16, #32
|
145
|
+
0xb0,0x02,0xc0,0xf2 = vrshr.s64 d16, d16, #64
|
146
|
+
0x30,0x02,0xc8,0xf3 = vrshr.u8 d16, d16, #8
|
147
|
+
0x30,0x02,0xd0,0xf3 = vrshr.u16 d16, d16, #16
|
148
|
+
0x30,0x02,0xe0,0xf3 = vrshr.u32 d16, d16, #32
|
149
|
+
0xb0,0x02,0xc0,0xf3 = vrshr.u64 d16, d16, #64
|
150
|
+
0x70,0x02,0xc8,0xf2 = vrshr.s8 q8, q8, #8
|
151
|
+
0x70,0x02,0xd0,0xf2 = vrshr.s16 q8, q8, #16
|
152
|
+
0x70,0x02,0xe0,0xf2 = vrshr.s32 q8, q8, #32
|
153
|
+
0xf0,0x02,0xc0,0xf2 = vrshr.s64 q8, q8, #64
|
154
|
+
0x70,0x02,0xc8,0xf3 = vrshr.u8 q8, q8, #8
|
155
|
+
0x70,0x02,0xd0,0xf3 = vrshr.u16 q8, q8, #16
|
156
|
+
0x70,0x02,0xe0,0xf3 = vrshr.u32 q8, q8, #32
|
157
|
+
0xf0,0x02,0xc0,0xf3 = vrshr.u64 q8, q8, #64
|
158
|
+
0x70,0x08,0xc8,0xf2 = vrshrn.i16 d16, q8, #8
|
159
|
+
0x70,0x08,0xd0,0xf2 = vrshrn.i32 d16, q8, #16
|
160
|
+
0x70,0x08,0xe0,0xf2 = vrshrn.i64 d16, q8, #32
|
161
|
+
0x70,0x09,0xcc,0xf2 = vqrshrn.s16 d16, q8, #4
|
162
|
+
0x70,0x09,0xd3,0xf2 = vqrshrn.s32 d16, q8, #13
|
163
|
+
0x70,0x09,0xf3,0xf2 = vqrshrn.s64 d16, q8, #13
|
164
|
+
0x70,0x09,0xcc,0xf3 = vqrshrn.u16 d16, q8, #4
|
165
|
+
0x70,0x09,0xd3,0xf3 = vqrshrn.u32 d16, q8, #13
|
166
|
+
0x70,0x09,0xf3,0xf3 = vqrshrn.u64 d16, q8, #13
|
167
|
+
0x48,0x84,0x0a,0xf2 = vshl.s8 q4, q4, q5
|
168
|
+
0x48,0x84,0x1a,0xf2 = vshl.s16 q4, q4, q5
|
169
|
+
0x48,0x84,0x2a,0xf2 = vshl.s32 q4, q4, q5
|
170
|
+
0x48,0x84,0x3a,0xf2 = vshl.s64 q4, q4, q5
|
171
|
+
0x48,0x84,0x0a,0xf3 = vshl.u8 q4, q4, q5
|
172
|
+
0x48,0x84,0x1a,0xf3 = vshl.u16 q4, q4, q5
|
173
|
+
0x48,0x84,0x2a,0xf3 = vshl.u32 q4, q4, q5
|
174
|
+
0x48,0x84,0x3a,0xf3 = vshl.u64 q4, q4, q5
|
175
|
+
0x04,0x44,0x05,0xf2 = vshl.s8 d4, d4, d5
|
176
|
+
0x04,0x44,0x15,0xf2 = vshl.s16 d4, d4, d5
|
177
|
+
0x04,0x44,0x25,0xf2 = vshl.s32 d4, d4, d5
|
178
|
+
0x04,0x44,0x35,0xf2 = vshl.s64 d4, d4, d5
|
179
|
+
0x04,0x44,0x05,0xf3 = vshl.u8 d4, d4, d5
|
180
|
+
0x04,0x44,0x15,0xf3 = vshl.u16 d4, d4, d5
|
181
|
+
0x04,0x44,0x25,0xf3 = vshl.u32 d4, d4, d5
|
182
|
+
0x04,0x44,0x35,0xf3 = vshl.u64 d4, d4, d5
|
183
|
+
0x58,0x85,0x8a,0xf2 = vshl.i8 q4, q4, #2
|
184
|
+
0x58,0x85,0x9e,0xf2 = vshl.i16 q4, q4, #14
|
185
|
+
0x58,0x85,0xbb,0xf2 = vshl.i32 q4, q4, #27
|
186
|
+
0xd8,0x85,0xa3,0xf2 = vshl.i64 q4, q4, #35
|
187
|
+
0x14,0x45,0x8e,0xf2 = vshl.i8 d4, d4, #6
|
188
|
+
0x14,0x45,0x9a,0xf2 = vshl.i16 d4, d4, #10
|
189
|
+
0x14,0x45,0xb1,0xf2 = vshl.i32 d4, d4, #17
|
190
|
+
0x94,0x45,0xab,0xf2 = vshl.i64 d4, d4, #43
|
191
|
+
0x0b,0xb5,0x04,0xf2 = vrshl.s8 d11, d11, d4
|
192
|
+
0x0c,0xc5,0x15,0xf2 = vrshl.s16 d12, d12, d5
|
193
|
+
0x0d,0xd5,0x26,0xf2 = vrshl.s32 d13, d13, d6
|
194
|
+
0x0e,0xe5,0x37,0xf2 = vrshl.s64 d14, d14, d7
|
195
|
+
0x0f,0xf5,0x08,0xf3 = vrshl.u8 d15, d15, d8
|
196
|
+
0x20,0x05,0x59,0xf3 = vrshl.u16 d16, d16, d9
|
197
|
+
0x21,0x15,0x6a,0xf3 = vrshl.u32 d17, d17, d10
|
198
|
+
0x22,0x25,0x7b,0xf3 = vrshl.u64 d18, d18, d11
|
199
|
+
0xc2,0x25,0x00,0xf2 = vrshl.s8 q1, q1, q8
|
200
|
+
0xc4,0x45,0x1e,0xf2 = vrshl.s16 q2, q2, q15
|
201
|
+
0xc6,0x65,0x2c,0xf2 = vrshl.s32 q3, q3, q14
|
202
|
+
0xc8,0x85,0x3a,0xf2 = vrshl.s64 q4, q4, q13
|
203
|
+
0xca,0xa5,0x08,0xf3 = vrshl.u8 q5, q5, q12
|
204
|
+
0xcc,0xc5,0x16,0xf3 = vrshl.u16 q6, q6, q11
|
205
|
+
0xce,0xe5,0x24,0xf3 = vrshl.u32 q7, q7, q10
|
206
|
+
0xe0,0x05,0x72,0xf3 = vrshl.u64 q8, q8, q9
|
207
|
+
0x1f,0xf0,0x88,0xf2 = vshr.s8 d15, d15, #8
|
208
|
+
0x1c,0xc0,0x90,0xf2 = vshr.s16 d12, d12, #16
|
209
|
+
0x1d,0xd0,0xa0,0xf2 = vshr.s32 d13, d13, #32
|
210
|
+
0x9e,0xe0,0x80,0xf2 = vshr.s64 d14, d14, #64
|
211
|
+
0x30,0x00,0xc8,0xf3 = vshr.u8 d16, d16, #8
|
212
|
+
0x31,0x10,0xd0,0xf3 = vshr.u16 d17, d17, #16
|
213
|
+
0x16,0x60,0xa0,0xf3 = vshr.u32 d6, d6, #32
|
214
|
+
0x9a,0xa0,0x80,0xf3 = vshr.u64 d10, d10, #64
|
215
|
+
0x52,0x20,0x88,0xf2 = vshr.s8 q1, q1, #8
|
216
|
+
0x54,0x40,0x90,0xf2 = vshr.s16 q2, q2, #16
|
217
|
+
0x56,0x60,0xa0,0xf2 = vshr.s32 q3, q3, #32
|
218
|
+
0xd8,0x80,0x80,0xf2 = vshr.s64 q4, q4, #64
|
219
|
+
0x5a,0xa0,0x88,0xf3 = vshr.u8 q5, q5, #8
|
220
|
+
0x5c,0xc0,0x90,0xf3 = vshr.u16 q6, q6, #16
|
221
|
+
0x5e,0xe0,0xa0,0xf3 = vshr.u32 q7, q7, #32
|
222
|
+
0xf0,0x00,0xc0,0xf3 = vshr.u64 q8, q8, #64
|
223
|
+
0x1f,0xf2,0x88,0xf2 = vrshr.s8 d15, d15, #8
|
224
|
+
0x1c,0xc2,0x90,0xf2 = vrshr.s16 d12, d12, #16
|
225
|
+
0x1d,0xd2,0xa0,0xf2 = vrshr.s32 d13, d13, #32
|
226
|
+
0x9e,0xe2,0x80,0xf2 = vrshr.s64 d14, d14, #64
|
227
|
+
0x30,0x02,0xc8,0xf3 = vrshr.u8 d16, d16, #8
|
228
|
+
0x31,0x12,0xd0,0xf3 = vrshr.u16 d17, d17, #16
|
229
|
+
0x16,0x62,0xa0,0xf3 = vrshr.u32 d6, d6, #32
|
230
|
+
0x9a,0xa2,0x80,0xf3 = vrshr.u64 d10, d10, #64
|
231
|
+
0x52,0x22,0x88,0xf2 = vrshr.s8 q1, q1, #8
|
232
|
+
0x54,0x42,0x90,0xf2 = vrshr.s16 q2, q2, #16
|
233
|
+
0x56,0x62,0xa0,0xf2 = vrshr.s32 q3, q3, #32
|
234
|
+
0xd8,0x82,0x80,0xf2 = vrshr.s64 q4, q4, #64
|
235
|
+
0x5a,0xa2,0x88,0xf3 = vrshr.u8 q5, q5, #8
|
236
|
+
0x5c,0xc2,0x90,0xf3 = vrshr.u16 q6, q6, #16
|
237
|
+
0x5e,0xe2,0xa0,0xf3 = vrshr.u32 q7, q7, #32
|
238
|
+
0xf0,0x02,0xc0,0xf3 = vrshr.u64 q8, q8, #64
|
@@ -0,0 +1,97 @@
|
|
1
|
+
# CS_ARCH_ARM, CS_MODE_ARM, None
|
2
|
+
0x30,0x11,0xc8,0xf2 = vsra.s8 d17, d16, #8
|
3
|
+
0x1e,0xf1,0x90,0xf2 = vsra.s16 d15, d14, #16
|
4
|
+
0x1c,0xd1,0xa0,0xf2 = vsra.s32 d13, d12, #32
|
5
|
+
0x9a,0xb1,0x80,0xf2 = vsra.s64 d11, d10, #64
|
6
|
+
0x54,0xe1,0x88,0xf2 = vsra.s8 q7, q2, #8
|
7
|
+
0x5c,0x61,0x90,0xf2 = vsra.s16 q3, q6, #16
|
8
|
+
0x5a,0x21,0xe0,0xf2 = vsra.s32 q9, q5, #32
|
9
|
+
0xd8,0x01,0xc0,0xf2 = vsra.s64 q8, q4, #64
|
10
|
+
0x30,0x11,0xc8,0xf3 = vsra.u8 d17, d16, #8
|
11
|
+
0x1e,0xb1,0x95,0xf3 = vsra.u16 d11, d14, #11
|
12
|
+
0x1f,0xc1,0xaa,0xf3 = vsra.u32 d12, d15, #22
|
13
|
+
0xb0,0xd1,0x8a,0xf3 = vsra.u64 d13, d16, #54
|
14
|
+
0x5e,0x21,0x88,0xf3 = vsra.u8 q1, q7, #8
|
15
|
+
0x5e,0x41,0x9a,0xf3 = vsra.u16 q2, q7, #6
|
16
|
+
0x5c,0x61,0xab,0xf3 = vsra.u32 q3, q6, #21
|
17
|
+
0xda,0x81,0xa7,0xf3 = vsra.u64 q4, q5, #25
|
18
|
+
0x30,0x01,0xc8,0xf2 = vsra.s8 d16, d16, #8
|
19
|
+
0x1e,0xe1,0x90,0xf2 = vsra.s16 d14, d14, #16
|
20
|
+
0x1c,0xc1,0xa0,0xf2 = vsra.s32 d12, d12, #32
|
21
|
+
0x9a,0xa1,0x80,0xf2 = vsra.s64 d10, d10, #64
|
22
|
+
0x54,0x41,0x88,0xf2 = vsra.s8 q2, q2, #8
|
23
|
+
0x5c,0xc1,0x90,0xf2 = vsra.s16 q6, q6, #16
|
24
|
+
0x5a,0xa1,0xa0,0xf2 = vsra.s32 q5, q5, #32
|
25
|
+
0xd8,0x81,0x80,0xf2 = vsra.s64 q4, q4, #64
|
26
|
+
0x30,0x01,0xc8,0xf3 = vsra.u8 d16, d16, #8
|
27
|
+
0x1e,0xe1,0x95,0xf3 = vsra.u16 d14, d14, #11
|
28
|
+
0x1f,0xf1,0xaa,0xf3 = vsra.u32 d15, d15, #22
|
29
|
+
0xb0,0x01,0xca,0xf3 = vsra.u64 d16, d16, #54
|
30
|
+
0x5e,0xe1,0x88,0xf3 = vsra.u8 q7, q7, #8
|
31
|
+
0x5e,0xe1,0x9a,0xf3 = vsra.u16 q7, q7, #6
|
32
|
+
0x5c,0xc1,0xab,0xf3 = vsra.u32 q6, q6, #21
|
33
|
+
0xda,0xa1,0xa7,0xf3 = vsra.u64 q5, q5, #25
|
34
|
+
0x3a,0x53,0x88,0xf2 = vrsra.s8 d5, d26, #8
|
35
|
+
0x39,0x63,0x90,0xf2 = vrsra.s16 d6, d25, #16
|
36
|
+
0x38,0x73,0xa0,0xf2 = vrsra.s32 d7, d24, #32
|
37
|
+
0xb7,0xe3,0x80,0xf2 = vrsra.s64 d14, d23, #64
|
38
|
+
0x36,0xf3,0x88,0xf3 = vrsra.u8 d15, d22, #8
|
39
|
+
0x35,0x03,0xd0,0xf3 = vrsra.u16 d16, d21, #16
|
40
|
+
0x34,0x13,0xe0,0xf3 = vrsra.u32 d17, d20, #32
|
41
|
+
0xb3,0x23,0xc0,0xf3 = vrsra.u64 d18, d19, #64
|
42
|
+
0x54,0x23,0x88,0xf2 = vrsra.s8 q1, q2, #8
|
43
|
+
0x56,0x43,0x90,0xf2 = vrsra.s16 q2, q3, #16
|
44
|
+
0x58,0x63,0xa0,0xf2 = vrsra.s32 q3, q4, #32
|
45
|
+
0xda,0x83,0x80,0xf2 = vrsra.s64 q4, q5, #64
|
46
|
+
0x5c,0xa3,0x88,0xf3 = vrsra.u8 q5, q6, #8
|
47
|
+
0x5e,0xc3,0x90,0xf3 = vrsra.u16 q6, q7, #16
|
48
|
+
0x70,0xe3,0xa0,0xf3 = vrsra.u32 q7, q8, #32
|
49
|
+
0xf2,0x03,0xc0,0xf3 = vrsra.u64 q8, q9, #64
|
50
|
+
0x3a,0xa3,0xc8,0xf2 = vrsra.s8 d26, d26, #8
|
51
|
+
0x39,0x93,0xd0,0xf2 = vrsra.s16 d25, d25, #16
|
52
|
+
0x38,0x83,0xe0,0xf2 = vrsra.s32 d24, d24, #32
|
53
|
+
0xb7,0x73,0xc0,0xf2 = vrsra.s64 d23, d23, #64
|
54
|
+
0x36,0x63,0xc8,0xf3 = vrsra.u8 d22, d22, #8
|
55
|
+
0x35,0x53,0xd0,0xf3 = vrsra.u16 d21, d21, #16
|
56
|
+
0x34,0x43,0xe0,0xf3 = vrsra.u32 d20, d20, #32
|
57
|
+
0xb3,0x33,0xc0,0xf3 = vrsra.u64 d19, d19, #64
|
58
|
+
0x54,0x43,0x88,0xf2 = vrsra.s8 q2, q2, #8
|
59
|
+
0x56,0x63,0x90,0xf2 = vrsra.s16 q3, q3, #16
|
60
|
+
0x58,0x83,0xa0,0xf2 = vrsra.s32 q4, q4, #32
|
61
|
+
0xda,0xa3,0x80,0xf2 = vrsra.s64 q5, q5, #64
|
62
|
+
0x5c,0xc3,0x88,0xf3 = vrsra.u8 q6, q6, #8
|
63
|
+
0x5e,0xe3,0x90,0xf3 = vrsra.u16 q7, q7, #16
|
64
|
+
0x70,0x03,0xe0,0xf3 = vrsra.u32 q8, q8, #32
|
65
|
+
0xf2,0x23,0xc0,0xf3 = vrsra.u64 q9, q9, #64
|
66
|
+
0x1c,0xb5,0x8f,0xf3 = vsli.8 d11, d12, #7
|
67
|
+
0x1d,0xc5,0x9f,0xf3 = vsli.16 d12, d13, #15
|
68
|
+
0x1e,0xd5,0xbf,0xf3 = vsli.32 d13, d14, #31
|
69
|
+
0x9f,0xe5,0xbf,0xf3 = vsli.64 d14, d15, #63
|
70
|
+
0x70,0x25,0x8f,0xf3 = vsli.8 q1, q8, #7
|
71
|
+
0x5e,0x45,0x9f,0xf3 = vsli.16 q2, q7, #15
|
72
|
+
0x58,0x65,0xbf,0xf3 = vsli.32 q3, q4, #31
|
73
|
+
0xda,0x85,0xbf,0xf3 = vsli.64 q4, q5, #63
|
74
|
+
0x1b,0xc4,0xc8,0xf3 = vsri.8 d28, d11, #8
|
75
|
+
0x1c,0xa4,0xd0,0xf3 = vsri.16 d26, d12, #16
|
76
|
+
0x1d,0x84,0xe0,0xf3 = vsri.32 d24, d13, #32
|
77
|
+
0x9e,0x54,0xc0,0xf3 = vsri.64 d21, d14, #64
|
78
|
+
0x70,0x24,0x88,0xf3 = vsri.8 q1, q8, #8
|
79
|
+
0x54,0xa4,0x90,0xf3 = vsri.16 q5, q2, #16
|
80
|
+
0x58,0xe4,0xa0,0xf3 = vsri.32 q7, q4, #32
|
81
|
+
0xdc,0x24,0xc0,0xf3 = vsri.64 q9, q6, #64
|
82
|
+
0x1c,0xc5,0x8f,0xf3 = vsli.8 d12, d12, #7
|
83
|
+
0x1d,0xd5,0x9f,0xf3 = vsli.16 d13, d13, #15
|
84
|
+
0x1e,0xe5,0xbf,0xf3 = vsli.32 d14, d14, #31
|
85
|
+
0x9f,0xf5,0xbf,0xf3 = vsli.64 d15, d15, #63
|
86
|
+
0x70,0x05,0xcf,0xf3 = vsli.8 q8, q8, #7
|
87
|
+
0x5e,0xe5,0x9f,0xf3 = vsli.16 q7, q7, #15
|
88
|
+
0x58,0x85,0xbf,0xf3 = vsli.32 q4, q4, #31
|
89
|
+
0xda,0xa5,0xbf,0xf3 = vsli.64 q5, q5, #63
|
90
|
+
0x1b,0xb4,0x88,0xf3 = vsri.8 d11, d11, #8
|
91
|
+
0x1c,0xc4,0x90,0xf3 = vsri.16 d12, d12, #16
|
92
|
+
0x1d,0xd4,0xa0,0xf3 = vsri.32 d13, d13, #32
|
93
|
+
0x9e,0xe4,0x80,0xf3 = vsri.64 d14, d14, #64
|
94
|
+
0x70,0x04,0xc8,0xf3 = vsri.8 q8, q8, #8
|
95
|
+
0x54,0x44,0x90,0xf3 = vsri.16 q2, q2, #16
|
96
|
+
0x58,0x84,0xa0,0xf3 = vsri.32 q4, q4, #32
|
97
|
+
0xdc,0xc4,0x80,0xf3 = vsri.64 q6, q6, #64
|
@@ -0,0 +1,59 @@
|
|
1
|
+
# CS_ARCH_ARM, CS_MODE_ARM, None
|
2
|
+
0xa0,0x03,0xf1,0xf2 = vext.8 d16, d17, d16, #3
|
3
|
+
0xa0,0x05,0xf1,0xf2 = vext.8 d16, d17, d16, #5
|
4
|
+
0xe0,0x03,0xf2,0xf2 = vext.8 q8, q9, q8, #3
|
5
|
+
0xe0,0x07,0xf2,0xf2 = vext.8 q8, q9, q8, #7
|
6
|
+
0xa0,0x06,0xf1,0xf2 = vext.16 d16, d17, d16, #3
|
7
|
+
0xe0,0x0c,0xf2,0xf2 = vext.32 q8, q9, q8, #3
|
8
|
+
0xe0,0x08,0xf2,0xf2 = vext.64 q8, q9, q8, #1
|
9
|
+
0xa0,0x13,0xf1,0xf2 = vext.8 d17, d17, d16, #3
|
10
|
+
0x0b,0x75,0xb7,0xf2 = vext.8 d7, d7, d11, #5
|
11
|
+
0x60,0x63,0xb6,0xf2 = vext.8 q3, q3, q8, #3
|
12
|
+
0xc8,0x27,0xf2,0xf2 = vext.8 q9, q9, q4, #7
|
13
|
+
0x2a,0x16,0xb1,0xf2 = vext.16 d1, d1, d26, #3
|
14
|
+
0x60,0xac,0xba,0xf2 = vext.32 q5, q5, q8, #3
|
15
|
+
0x60,0xa8,0xba,0xf2 = vext.64 q5, q5, q8, #1
|
16
|
+
0xa0,0x10,0xf2,0xf3 = vtrn.8 d17, d16
|
17
|
+
0xa0,0x10,0xf6,0xf3 = vtrn.16 d17, d16
|
18
|
+
0xa0,0x10,0xfa,0xf3 = vtrn.32 d17, d16
|
19
|
+
0xe0,0x20,0xf2,0xf3 = vtrn.8 q9, q8
|
20
|
+
0xe0,0x20,0xf6,0xf3 = vtrn.16 q9, q8
|
21
|
+
0xe0,0x20,0xfa,0xf3 = vtrn.32 q9, q8
|
22
|
+
0x20,0x11,0xf2,0xf3 = vuzp.8 d17, d16
|
23
|
+
0x20,0x11,0xf6,0xf3 = vuzp.16 d17, d16
|
24
|
+
0x60,0x21,0xf2,0xf3 = vuzp.8 q9, q8
|
25
|
+
0x60,0x21,0xf6,0xf3 = vuzp.16 q9, q8
|
26
|
+
0x60,0x21,0xfa,0xf3 = vuzp.32 q9, q8
|
27
|
+
0xa0,0x11,0xf2,0xf3 = vzip.8 d17, d16
|
28
|
+
0xa0,0x11,0xf6,0xf3 = vzip.16 d17, d16
|
29
|
+
0xe0,0x21,0xf2,0xf3 = vzip.8 q9, q8
|
30
|
+
0xe0,0x21,0xf6,0xf3 = vzip.16 q9, q8
|
31
|
+
0xe0,0x21,0xfa,0xf3 = vzip.32 q9, q8
|
32
|
+
0x83,0x20,0xba,0xf3 = vtrn.32 d2, d3
|
33
|
+
0x83,0x20,0xba,0xf3 = vtrn.32 d2, d3
|
34
|
+
0x89,0x30,0xb2,0xf3 = vtrn.8 d3, d9
|
35
|
+
0x89,0x30,0xb2,0xf3 = vtrn.8 d3, d9
|
36
|
+
0x89,0x30,0xb2,0xf3 = vtrn.8 d3, d9
|
37
|
+
0x89,0x30,0xb2,0xf3 = vtrn.8 d3, d9
|
38
|
+
0x89,0x30,0xb6,0xf3 = vtrn.16 d3, d9
|
39
|
+
0x89,0x30,0xb6,0xf3 = vtrn.16 d3, d9
|
40
|
+
0x89,0x30,0xb6,0xf3 = vtrn.16 d3, d9
|
41
|
+
0x89,0x30,0xb6,0xf3 = vtrn.16 d3, d9
|
42
|
+
0x89,0x30,0xba,0xf3 = vtrn.32 d3, d9
|
43
|
+
0x89,0x30,0xba,0xf3 = vtrn.32 d3, d9
|
44
|
+
0x89,0x30,0xba,0xf3 = vtrn.32 d3, d9
|
45
|
+
0x89,0x30,0xba,0xf3 = vtrn.32 d3, d9
|
46
|
+
0x89,0x30,0xba,0xf3 = vtrn.32 d3, d9
|
47
|
+
0xcc,0xc0,0xf2,0xf3 = vtrn.8 q14, q6
|
48
|
+
0xcc,0xc0,0xf2,0xf3 = vtrn.8 q14, q6
|
49
|
+
0xcc,0xc0,0xf2,0xf3 = vtrn.8 q14, q6
|
50
|
+
0xcc,0xc0,0xf2,0xf3 = vtrn.8 q14, q6
|
51
|
+
0xcc,0xc0,0xf6,0xf3 = vtrn.16 q14, q6
|
52
|
+
0xcc,0xc0,0xf6,0xf3 = vtrn.16 q14, q6
|
53
|
+
0xcc,0xc0,0xf6,0xf3 = vtrn.16 q14, q6
|
54
|
+
0xcc,0xc0,0xf6,0xf3 = vtrn.16 q14, q6
|
55
|
+
0xcc,0xc0,0xfa,0xf3 = vtrn.32 q14, q6
|
56
|
+
0xcc,0xc0,0xfa,0xf3 = vtrn.32 q14, q6
|
57
|
+
0xcc,0xc0,0xfa,0xf3 = vtrn.32 q14, q6
|
58
|
+
0xcc,0xc0,0xfa,0xf3 = vtrn.32 q14, q6
|
59
|
+
0xcc,0xc0,0xfa,0xf3 = vtrn.32 q14, q6
|