crabstone 3.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (302) hide show
  1. checksums.yaml +7 -0
  2. data/CHANGES.md +61 -0
  3. data/LICENSE +25 -0
  4. data/MANIFEST +312 -0
  5. data/README.md +103 -0
  6. data/Rakefile +27 -0
  7. data/bin/genconst +66 -0
  8. data/bin/genreg +99 -0
  9. data/crabstone.gemspec +27 -0
  10. data/examples/hello_world.rb +43 -0
  11. data/lib/arch/arm.rb +128 -0
  12. data/lib/arch/arm64.rb +167 -0
  13. data/lib/arch/arm64_const.rb +1055 -0
  14. data/lib/arch/arm64_registers.rb +295 -0
  15. data/lib/arch/arm_const.rb +777 -0
  16. data/lib/arch/arm_registers.rb +149 -0
  17. data/lib/arch/mips.rb +78 -0
  18. data/lib/arch/mips_const.rb +850 -0
  19. data/lib/arch/mips_registers.rb +208 -0
  20. data/lib/arch/ppc.rb +90 -0
  21. data/lib/arch/ppc_const.rb +1181 -0
  22. data/lib/arch/ppc_registers.rb +209 -0
  23. data/lib/arch/sparc.rb +79 -0
  24. data/lib/arch/sparc_const.rb +461 -0
  25. data/lib/arch/sparc_registers.rb +121 -0
  26. data/lib/arch/systemz.rb +79 -0
  27. data/lib/arch/sysz_const.rb +779 -0
  28. data/lib/arch/sysz_registers.rb +66 -0
  29. data/lib/arch/x86.rb +107 -0
  30. data/lib/arch/x86_const.rb +1698 -0
  31. data/lib/arch/x86_registers.rb +265 -0
  32. data/lib/arch/xcore.rb +78 -0
  33. data/lib/arch/xcore_const.rb +185 -0
  34. data/lib/arch/xcore_registers.rb +57 -0
  35. data/lib/crabstone.rb +564 -0
  36. data/test/MC/AArch64/basic-a64-instructions.s.cs +2014 -0
  37. data/test/MC/AArch64/gicv3-regs.s.cs +111 -0
  38. data/test/MC/AArch64/neon-2velem.s.cs +113 -0
  39. data/test/MC/AArch64/neon-3vdiff.s.cs +143 -0
  40. data/test/MC/AArch64/neon-aba-abd.s.cs +28 -0
  41. data/test/MC/AArch64/neon-across.s.cs +40 -0
  42. data/test/MC/AArch64/neon-add-pairwise.s.cs +11 -0
  43. data/test/MC/AArch64/neon-add-sub-instructions.s.cs +21 -0
  44. data/test/MC/AArch64/neon-bitwise-instructions.s.cs +17 -0
  45. data/test/MC/AArch64/neon-compare-instructions.s.cs +136 -0
  46. data/test/MC/AArch64/neon-crypto.s.cs +15 -0
  47. data/test/MC/AArch64/neon-extract.s.cs +3 -0
  48. data/test/MC/AArch64/neon-facge-facgt.s.cs +13 -0
  49. data/test/MC/AArch64/neon-frsqrt-frecp.s.cs +7 -0
  50. data/test/MC/AArch64/neon-halving-add-sub.s.cs +25 -0
  51. data/test/MC/AArch64/neon-max-min-pairwise.s.cs +37 -0
  52. data/test/MC/AArch64/neon-max-min.s.cs +37 -0
  53. data/test/MC/AArch64/neon-mla-mls-instructions.s.cs +19 -0
  54. data/test/MC/AArch64/neon-mov.s.cs +74 -0
  55. data/test/MC/AArch64/neon-mul-div-instructions.s.cs +24 -0
  56. data/test/MC/AArch64/neon-perm.s.cs +43 -0
  57. data/test/MC/AArch64/neon-rounding-halving-add.s.cs +13 -0
  58. data/test/MC/AArch64/neon-rounding-shift.s.cs +15 -0
  59. data/test/MC/AArch64/neon-saturating-add-sub.s.cs +29 -0
  60. data/test/MC/AArch64/neon-saturating-rounding-shift.s.cs +15 -0
  61. data/test/MC/AArch64/neon-saturating-shift.s.cs +15 -0
  62. data/test/MC/AArch64/neon-scalar-abs.s.cs +8 -0
  63. data/test/MC/AArch64/neon-scalar-add-sub.s.cs +3 -0
  64. data/test/MC/AArch64/neon-scalar-by-elem-mla.s.cs +13 -0
  65. data/test/MC/AArch64/neon-scalar-by-elem-mul.s.cs +13 -0
  66. data/test/MC/AArch64/neon-scalar-by-elem-saturating-mla.s.cs +15 -0
  67. data/test/MC/AArch64/neon-scalar-by-elem-saturating-mul.s.cs +18 -0
  68. data/test/MC/AArch64/neon-scalar-compare.s.cs +12 -0
  69. data/test/MC/AArch64/neon-scalar-cvt.s.cs +34 -0
  70. data/test/MC/AArch64/neon-scalar-dup.s.cs +23 -0
  71. data/test/MC/AArch64/neon-scalar-extract-narrow.s.cs +10 -0
  72. data/test/MC/AArch64/neon-scalar-fp-compare.s.cs +21 -0
  73. data/test/MC/AArch64/neon-scalar-mul.s.cs +13 -0
  74. data/test/MC/AArch64/neon-scalar-neg.s.cs +6 -0
  75. data/test/MC/AArch64/neon-scalar-recip.s.cs +11 -0
  76. data/test/MC/AArch64/neon-scalar-reduce-pairwise.s.cs +3 -0
  77. data/test/MC/AArch64/neon-scalar-rounding-shift.s.cs +3 -0
  78. data/test/MC/AArch64/neon-scalar-saturating-add-sub.s.cs +25 -0
  79. data/test/MC/AArch64/neon-scalar-saturating-rounding-shift.s.cs +9 -0
  80. data/test/MC/AArch64/neon-scalar-saturating-shift.s.cs +9 -0
  81. data/test/MC/AArch64/neon-scalar-shift-imm.s.cs +42 -0
  82. data/test/MC/AArch64/neon-scalar-shift.s.cs +3 -0
  83. data/test/MC/AArch64/neon-shift-left-long.s.cs +13 -0
  84. data/test/MC/AArch64/neon-shift.s.cs +22 -0
  85. data/test/MC/AArch64/neon-simd-copy.s.cs +42 -0
  86. data/test/MC/AArch64/neon-simd-ldst-multi-elem.s.cs +197 -0
  87. data/test/MC/AArch64/neon-simd-ldst-one-elem.s.cs +129 -0
  88. data/test/MC/AArch64/neon-simd-misc.s.cs +213 -0
  89. data/test/MC/AArch64/neon-simd-post-ldst-multi-elem.s.cs +107 -0
  90. data/test/MC/AArch64/neon-simd-shift.s.cs +151 -0
  91. data/test/MC/AArch64/neon-tbl.s.cs +21 -0
  92. data/test/MC/AArch64/trace-regs.s.cs +383 -0
  93. data/test/MC/ARM/arm-aliases.s.cs +7 -0
  94. data/test/MC/ARM/arm-arithmetic-aliases.s.cs +50 -0
  95. data/test/MC/ARM/arm-it-block.s.cs +2 -0
  96. data/test/MC/ARM/arm-memory-instructions.s.cs +138 -0
  97. data/test/MC/ARM/arm-shift-encoding.s.cs +50 -0
  98. data/test/MC/ARM/arm-thumb-trustzone.s.cs +3 -0
  99. data/test/MC/ARM/arm-trustzone.s.cs +3 -0
  100. data/test/MC/ARM/arm_addrmode2.s.cs +15 -0
  101. data/test/MC/ARM/arm_addrmode3.s.cs +9 -0
  102. data/test/MC/ARM/arm_instructions.s.cs +25 -0
  103. data/test/MC/ARM/basic-arm-instructions-v8.s.cs +10 -0
  104. data/test/MC/ARM/basic-arm-instructions.s.cs +997 -0
  105. data/test/MC/ARM/basic-thumb-instructions.s.cs +130 -0
  106. data/test/MC/ARM/basic-thumb2-instructions-v8.s.cs +1 -0
  107. data/test/MC/ARM/basic-thumb2-instructions.s.cs +1242 -0
  108. data/test/MC/ARM/crc32-thumb.s.cs +7 -0
  109. data/test/MC/ARM/crc32.s.cs +7 -0
  110. data/test/MC/ARM/dot-req.s.cs +3 -0
  111. data/test/MC/ARM/fp-armv8.s.cs +52 -0
  112. data/test/MC/ARM/idiv-thumb.s.cs +3 -0
  113. data/test/MC/ARM/idiv.s.cs +3 -0
  114. data/test/MC/ARM/load-store-acquire-release-v8-thumb.s.cs +15 -0
  115. data/test/MC/ARM/load-store-acquire-release-v8.s.cs +15 -0
  116. data/test/MC/ARM/mode-switch.s.cs +7 -0
  117. data/test/MC/ARM/neon-abs-encoding.s.cs +15 -0
  118. data/test/MC/ARM/neon-absdiff-encoding.s.cs +39 -0
  119. data/test/MC/ARM/neon-add-encoding.s.cs +119 -0
  120. data/test/MC/ARM/neon-bitcount-encoding.s.cs +15 -0
  121. data/test/MC/ARM/neon-bitwise-encoding.s.cs +126 -0
  122. data/test/MC/ARM/neon-cmp-encoding.s.cs +88 -0
  123. data/test/MC/ARM/neon-convert-encoding.s.cs +27 -0
  124. data/test/MC/ARM/neon-crypto.s.cs +16 -0
  125. data/test/MC/ARM/neon-dup-encoding.s.cs +13 -0
  126. data/test/MC/ARM/neon-minmax-encoding.s.cs +57 -0
  127. data/test/MC/ARM/neon-mov-encoding.s.cs +76 -0
  128. data/test/MC/ARM/neon-mul-accum-encoding.s.cs +39 -0
  129. data/test/MC/ARM/neon-mul-encoding.s.cs +72 -0
  130. data/test/MC/ARM/neon-neg-encoding.s.cs +15 -0
  131. data/test/MC/ARM/neon-pairwise-encoding.s.cs +47 -0
  132. data/test/MC/ARM/neon-reciprocal-encoding.s.cs +13 -0
  133. data/test/MC/ARM/neon-reverse-encoding.s.cs +13 -0
  134. data/test/MC/ARM/neon-satshift-encoding.s.cs +75 -0
  135. data/test/MC/ARM/neon-shift-encoding.s.cs +238 -0
  136. data/test/MC/ARM/neon-shiftaccum-encoding.s.cs +97 -0
  137. data/test/MC/ARM/neon-shuffle-encoding.s.cs +59 -0
  138. data/test/MC/ARM/neon-sub-encoding.s.cs +82 -0
  139. data/test/MC/ARM/neon-table-encoding.s.cs +9 -0
  140. data/test/MC/ARM/neon-v8.s.cs +38 -0
  141. data/test/MC/ARM/neon-vld-encoding.s.cs +213 -0
  142. data/test/MC/ARM/neon-vst-encoding.s.cs +120 -0
  143. data/test/MC/ARM/neon-vswp.s.cs +3 -0
  144. data/test/MC/ARM/neont2-abs-encoding.s.cs +15 -0
  145. data/test/MC/ARM/neont2-absdiff-encoding.s.cs +39 -0
  146. data/test/MC/ARM/neont2-add-encoding.s.cs +65 -0
  147. data/test/MC/ARM/neont2-bitcount-encoding.s.cs +15 -0
  148. data/test/MC/ARM/neont2-bitwise-encoding.s.cs +15 -0
  149. data/test/MC/ARM/neont2-cmp-encoding.s.cs +17 -0
  150. data/test/MC/ARM/neont2-convert-encoding.s.cs +19 -0
  151. data/test/MC/ARM/neont2-dup-encoding.s.cs +19 -0
  152. data/test/MC/ARM/neont2-minmax-encoding.s.cs +57 -0
  153. data/test/MC/ARM/neont2-mov-encoding.s.cs +58 -0
  154. data/test/MC/ARM/neont2-mul-accum-encoding.s.cs +41 -0
  155. data/test/MC/ARM/neont2-mul-encoding.s.cs +31 -0
  156. data/test/MC/ARM/neont2-neg-encoding.s.cs +15 -0
  157. data/test/MC/ARM/neont2-pairwise-encoding.s.cs +43 -0
  158. data/test/MC/ARM/neont2-reciprocal-encoding.s.cs +13 -0
  159. data/test/MC/ARM/neont2-reverse-encoding.s.cs +13 -0
  160. data/test/MC/ARM/neont2-satshift-encoding.s.cs +75 -0
  161. data/test/MC/ARM/neont2-shift-encoding.s.cs +80 -0
  162. data/test/MC/ARM/neont2-shiftaccum-encoding.s.cs +97 -0
  163. data/test/MC/ARM/neont2-shuffle-encoding.s.cs +23 -0
  164. data/test/MC/ARM/neont2-sub-encoding.s.cs +23 -0
  165. data/test/MC/ARM/neont2-table-encoding.s.cs +9 -0
  166. data/test/MC/ARM/neont2-vld-encoding.s.cs +51 -0
  167. data/test/MC/ARM/neont2-vst-encoding.s.cs +48 -0
  168. data/test/MC/ARM/simple-fp-encoding.s.cs +157 -0
  169. data/test/MC/ARM/thumb-fp-armv8.s.cs +51 -0
  170. data/test/MC/ARM/thumb-hints.s.cs +12 -0
  171. data/test/MC/ARM/thumb-neon-crypto.s.cs +16 -0
  172. data/test/MC/ARM/thumb-neon-v8.s.cs +38 -0
  173. data/test/MC/ARM/thumb-shift-encoding.s.cs +19 -0
  174. data/test/MC/ARM/thumb.s.cs +19 -0
  175. data/test/MC/ARM/thumb2-b.w-encodingT4.s.cs +2 -0
  176. data/test/MC/ARM/thumb2-branches.s.cs +85 -0
  177. data/test/MC/ARM/thumb2-mclass.s.cs +41 -0
  178. data/test/MC/ARM/thumb2-narrow-dp.ll.cs +379 -0
  179. data/test/MC/ARM/thumb2-pldw.s.cs +2 -0
  180. data/test/MC/ARM/vfp4-thumb.s.cs +13 -0
  181. data/test/MC/ARM/vfp4.s.cs +13 -0
  182. data/test/MC/ARM/vpush-vpop-thumb.s.cs +9 -0
  183. data/test/MC/ARM/vpush-vpop.s.cs +9 -0
  184. data/test/MC/Mips/hilo-addressing.s.cs +4 -0
  185. data/test/MC/Mips/micromips-alu-instructions-EB.s.cs +33 -0
  186. data/test/MC/Mips/micromips-alu-instructions.s.cs +33 -0
  187. data/test/MC/Mips/micromips-branch-instructions-EB.s.cs +11 -0
  188. data/test/MC/Mips/micromips-branch-instructions.s.cs +11 -0
  189. data/test/MC/Mips/micromips-expansions.s.cs +20 -0
  190. data/test/MC/Mips/micromips-jump-instructions-EB.s.cs +5 -0
  191. data/test/MC/Mips/micromips-jump-instructions.s.cs +6 -0
  192. data/test/MC/Mips/micromips-loadstore-instructions-EB.s.cs +9 -0
  193. data/test/MC/Mips/micromips-loadstore-instructions.s.cs +9 -0
  194. data/test/MC/Mips/micromips-loadstore-unaligned-EB.s.cs +5 -0
  195. data/test/MC/Mips/micromips-loadstore-unaligned.s.cs +5 -0
  196. data/test/MC/Mips/micromips-movcond-instructions-EB.s.cs +5 -0
  197. data/test/MC/Mips/micromips-movcond-instructions.s.cs +5 -0
  198. data/test/MC/Mips/micromips-multiply-instructions-EB.s.cs +5 -0
  199. data/test/MC/Mips/micromips-multiply-instructions.s.cs +5 -0
  200. data/test/MC/Mips/micromips-shift-instructions-EB.s.cs +9 -0
  201. data/test/MC/Mips/micromips-shift-instructions.s.cs +9 -0
  202. data/test/MC/Mips/micromips-trap-instructions-EB.s.cs +13 -0
  203. data/test/MC/Mips/micromips-trap-instructions.s.cs +13 -0
  204. data/test/MC/Mips/mips-alu-instructions.s.cs +53 -0
  205. data/test/MC/Mips/mips-control-instructions-64.s.cs +33 -0
  206. data/test/MC/Mips/mips-control-instructions.s.cs +33 -0
  207. data/test/MC/Mips/mips-coprocessor-encodings.s.cs +17 -0
  208. data/test/MC/Mips/mips-dsp-instructions.s.cs +43 -0
  209. data/test/MC/Mips/mips-expansions.s.cs +20 -0
  210. data/test/MC/Mips/mips-fpu-instructions.s.cs +93 -0
  211. data/test/MC/Mips/mips-jump-instructions.s.cs +1 -0
  212. data/test/MC/Mips/mips-memory-instructions.s.cs +17 -0
  213. data/test/MC/Mips/mips-register-names.s.cs +33 -0
  214. data/test/MC/Mips/mips64-alu-instructions.s.cs +47 -0
  215. data/test/MC/Mips/mips64-instructions.s.cs +3 -0
  216. data/test/MC/Mips/mips64-register-names.s.cs +33 -0
  217. data/test/MC/Mips/mips_directives.s.cs +12 -0
  218. data/test/MC/Mips/nabi-regs.s.cs +12 -0
  219. data/test/MC/Mips/set-at-directive.s.cs +6 -0
  220. data/test/MC/Mips/test_2r.s.cs +16 -0
  221. data/test/MC/Mips/test_2rf.s.cs +33 -0
  222. data/test/MC/Mips/test_3r.s.cs +243 -0
  223. data/test/MC/Mips/test_3rf.s.cs +83 -0
  224. data/test/MC/Mips/test_bit.s.cs +49 -0
  225. data/test/MC/Mips/test_cbranch.s.cs +11 -0
  226. data/test/MC/Mips/test_ctrlregs.s.cs +33 -0
  227. data/test/MC/Mips/test_elm.s.cs +16 -0
  228. data/test/MC/Mips/test_elm_insert.s.cs +4 -0
  229. data/test/MC/Mips/test_elm_insve.s.cs +5 -0
  230. data/test/MC/Mips/test_i10.s.cs +5 -0
  231. data/test/MC/Mips/test_i5.s.cs +45 -0
  232. data/test/MC/Mips/test_i8.s.cs +11 -0
  233. data/test/MC/Mips/test_lsa.s.cs +5 -0
  234. data/test/MC/Mips/test_mi10.s.cs +24 -0
  235. data/test/MC/Mips/test_vec.s.cs +8 -0
  236. data/test/MC/PowerPC/ppc64-encoding-bookII.s.cs +25 -0
  237. data/test/MC/PowerPC/ppc64-encoding-bookIII.s.cs +35 -0
  238. data/test/MC/PowerPC/ppc64-encoding-ext.s.cs +535 -0
  239. data/test/MC/PowerPC/ppc64-encoding-fp.s.cs +110 -0
  240. data/test/MC/PowerPC/ppc64-encoding-vmx.s.cs +170 -0
  241. data/test/MC/PowerPC/ppc64-encoding.s.cs +202 -0
  242. data/test/MC/PowerPC/ppc64-operands.s.cs +32 -0
  243. data/test/MC/README +6 -0
  244. data/test/MC/Sparc/sparc-alu-instructions.s.cs +47 -0
  245. data/test/MC/Sparc/sparc-atomic-instructions.s.cs +7 -0
  246. data/test/MC/Sparc/sparc-ctrl-instructions.s.cs +11 -0
  247. data/test/MC/Sparc/sparc-fp-instructions.s.cs +59 -0
  248. data/test/MC/Sparc/sparc-mem-instructions.s.cs +25 -0
  249. data/test/MC/Sparc/sparc-vis.s.cs +2 -0
  250. data/test/MC/Sparc/sparc64-alu-instructions.s.cs +13 -0
  251. data/test/MC/Sparc/sparc64-ctrl-instructions.s.cs +102 -0
  252. data/test/MC/Sparc/sparcv8-instructions.s.cs +7 -0
  253. data/test/MC/Sparc/sparcv9-instructions.s.cs +1 -0
  254. data/test/MC/SystemZ/insn-good-z196.s.cs +589 -0
  255. data/test/MC/SystemZ/insn-good.s.cs +2265 -0
  256. data/test/MC/SystemZ/regs-good.s.cs +45 -0
  257. data/test/MC/X86/3DNow.s.cs +29 -0
  258. data/test/MC/X86/address-size.s.cs +5 -0
  259. data/test/MC/X86/avx512-encodings.s.cs +12 -0
  260. data/test/MC/X86/intel-syntax-encoding.s.cs +30 -0
  261. data/test/MC/X86/x86-32-avx.s.cs +833 -0
  262. data/test/MC/X86/x86-32-fma3.s.cs +169 -0
  263. data/test/MC/X86/x86-32-ms-inline-asm.s.cs +27 -0
  264. data/test/MC/X86/x86_64-avx-clmul-encoding.s.cs +11 -0
  265. data/test/MC/X86/x86_64-avx-encoding.s.cs +1058 -0
  266. data/test/MC/X86/x86_64-bmi-encoding.s.cs +51 -0
  267. data/test/MC/X86/x86_64-encoding.s.cs +59 -0
  268. data/test/MC/X86/x86_64-fma3-encoding.s.cs +169 -0
  269. data/test/MC/X86/x86_64-fma4-encoding.s.cs +98 -0
  270. data/test/MC/X86/x86_64-hle-encoding.s.cs +3 -0
  271. data/test/MC/X86/x86_64-imm-widths.s.cs +27 -0
  272. data/test/MC/X86/x86_64-rand-encoding.s.cs +13 -0
  273. data/test/MC/X86/x86_64-rtm-encoding.s.cs +4 -0
  274. data/test/MC/X86/x86_64-sse4a.s.cs +1 -0
  275. data/test/MC/X86/x86_64-tbm-encoding.s.cs +40 -0
  276. data/test/MC/X86/x86_64-xop-encoding.s.cs +152 -0
  277. data/test/README +6 -0
  278. data/test/test.rb +205 -0
  279. data/test/test.rb.SPEC +235 -0
  280. data/test/test_arm.rb +202 -0
  281. data/test/test_arm.rb.SPEC +275 -0
  282. data/test/test_arm64.rb +150 -0
  283. data/test/test_arm64.rb.SPEC +116 -0
  284. data/test/test_detail.rb +228 -0
  285. data/test/test_detail.rb.SPEC +322 -0
  286. data/test/test_exhaustive.rb +80 -0
  287. data/test/test_mips.rb +118 -0
  288. data/test/test_mips.rb.SPEC +91 -0
  289. data/test/test_ppc.rb +137 -0
  290. data/test/test_ppc.rb.SPEC +84 -0
  291. data/test/test_sanity.rb +83 -0
  292. data/test/test_skipdata.rb +111 -0
  293. data/test/test_skipdata.rb.SPEC +58 -0
  294. data/test/test_sparc.rb +113 -0
  295. data/test/test_sparc.rb.SPEC +116 -0
  296. data/test/test_sysz.rb +111 -0
  297. data/test/test_sysz.rb.SPEC +61 -0
  298. data/test/test_x86.rb +189 -0
  299. data/test/test_x86.rb.SPEC +579 -0
  300. data/test/test_xcore.rb +100 -0
  301. data/test/test_xcore.rb.SPEC +75 -0
  302. metadata +393 -0
@@ -0,0 +1,45 @@
1
+ # CS_ARCH_SYSZ, 0, None
2
+ 0x18,0x01 = lr %r0, %r1
3
+ 0x18,0x23 = lr %r2, %r3
4
+ 0x18,0x45 = lr %r4, %r5
5
+ 0x18,0x67 = lr %r6, %r7
6
+ 0x18,0x89 = lr %r8, %r9
7
+ 0x18,0xab = lr %r10, %r11
8
+ 0x18,0xcd = lr %r12, %r13
9
+ 0x18,0xef = lr %r14, %r15
10
+ 0xb9,0x04,0x00,0x01 = lgr %r0, %r1
11
+ 0xb9,0x04,0x00,0x23 = lgr %r2, %r3
12
+ 0xb9,0x04,0x00,0x45 = lgr %r4, %r5
13
+ 0xb9,0x04,0x00,0x67 = lgr %r6, %r7
14
+ 0xb9,0x04,0x00,0x89 = lgr %r8, %r9
15
+ 0xb9,0x04,0x00,0xab = lgr %r10, %r11
16
+ 0xb9,0x04,0x00,0xcd = lgr %r12, %r13
17
+ 0xb9,0x04,0x00,0xef = lgr %r14, %r15
18
+ 0xb9,0x97,0x00,0x00 = dlr %r0, %r0
19
+ 0xb9,0x97,0x00,0x20 = dlr %r2, %r0
20
+ 0xb9,0x97,0x00,0x40 = dlr %r4, %r0
21
+ 0xb9,0x97,0x00,0x60 = dlr %r6, %r0
22
+ 0xb9,0x97,0x00,0x80 = dlr %r8, %r0
23
+ 0xb9,0x97,0x00,0xa0 = dlr %r10, %r0
24
+ 0xb9,0x97,0x00,0xc0 = dlr %r12, %r0
25
+ 0xb9,0x97,0x00,0xe0 = dlr %r14, %r0
26
+ 0x38,0x01 = ler %f0, %f1
27
+ 0x38,0x23 = ler %f2, %f3
28
+ 0x38,0x45 = ler %f4, %f5
29
+ 0x38,0x67 = ler %f6, %f7
30
+ 0x38,0x89 = ler %f8, %f9
31
+ 0x38,0xab = ler %f10, %f11
32
+ 0x38,0xcd = ler %f12, %f13
33
+ 0x38,0xef = ler %f14, %f15
34
+ 0x28,0x01 = ldr %f0, %f1
35
+ 0x28,0x23 = ldr %f2, %f3
36
+ 0x28,0x45 = ldr %f4, %f5
37
+ 0x28,0x67 = ldr %f6, %f7
38
+ 0x28,0x89 = ldr %f8, %f9
39
+ 0x28,0xab = ldr %f10, %f11
40
+ 0x28,0xcd = ldr %f12, %f13
41
+ 0x28,0xef = ldr %f14, %f15
42
+ 0xb3,0x65,0x00,0x01 = lxr %f0, %f1
43
+ 0xb3,0x65,0x00,0x45 = lxr %f4, %f5
44
+ 0xb3,0x65,0x00,0x89 = lxr %f8, %f9
45
+ 0xb3,0x65,0x00,0xcd = lxr %f12, %f13
@@ -0,0 +1,29 @@
1
+ # CS_ARCH_X86, CS_MODE_32, CS_OPT_SYNTAX_ATT
2
+ 0x0f,0x0f,0xca,0xbf = pavgusb %mm2, %mm1
3
+ 0x67,0x0f,0x0f,0x5c,0x16,0x09,0xbf = pavgusb 9(%esi,%edx), %mm3
4
+ 0x0f,0x0f,0xca,0x1d = pf2id %mm2, %mm1
5
+ 0x67,0x0f,0x0f,0x5c,0x16,0x09,0x1d = pf2id 9(%esi,%edx), %mm3
6
+ 0x0f,0x0f,0xca,0xae = pfacc %mm2, %mm1
7
+ 0x0f,0x0f,0xca,0x9e = pfadd %mm2, %mm1
8
+ 0x0f,0x0f,0xca,0xb0 = pfcmpeq %mm2, %mm1
9
+ 0x0f,0x0f,0xca,0x90 = pfcmpge %mm2, %mm1
10
+ 0x0f,0x0f,0xca,0xa0 = pfcmpgt %mm2, %mm1
11
+ 0x0f,0x0f,0xca,0xa4 = pfmax %mm2, %mm1
12
+ 0x0f,0x0f,0xca,0x94 = pfmin %mm2, %mm1
13
+ 0x0f,0x0f,0xca,0xb4 = pfmul %mm2, %mm1
14
+ 0x0f,0x0f,0xca,0x96 = pfrcp %mm2, %mm1
15
+ 0x0f,0x0f,0xca,0xa6 = pfrcpit1 %mm2, %mm1
16
+ 0x0f,0x0f,0xca,0xb6 = pfrcpit2 %mm2, %mm1
17
+ 0x0f,0x0f,0xca,0xa7 = pfrsqit1 %mm2, %mm1
18
+ 0x0f,0x0f,0xca,0x97 = pfrsqrt %mm2, %mm1
19
+ 0x0f,0x0f,0xca,0x9a = pfsub %mm2, %mm1
20
+ 0x0f,0x0f,0xca,0xaa = pfsubr %mm2, %mm1
21
+ 0x0f,0x0f,0xca,0x0d = pi2fd %mm2, %mm1
22
+ 0x0f,0x0f,0xca,0xb7 = pmulhrw %mm2, %mm1
23
+ 0x0f,0x0e = femms
24
+ 0x0f,0x0d,0x00 = prefetch (%eax)
25
+ 0x0f,0x0f,0xca,0x1c = pf2iw %mm2, %mm1
26
+ 0x0f,0x0f,0xca,0x0c = pi2fw %mm2, %mm1
27
+ 0x0f,0x0f,0xca,0x8a = pfnacc %mm2, %mm1
28
+ 0x0f,0x0f,0xca,0x8e = pfpnacc %mm2, %mm1
29
+ 0x0f,0x0f,0xca,0xbb = pswapd %mm2, %mm1
@@ -0,0 +1,5 @@
1
+ # CS_ARCH_X86, CS_MODE_64, CS_OPT_SYNTAX_ATT
2
+ 0x67,0xc6,0x06,0x00 = movb $0x0, (%esi)
3
+ 0xc6,0x06,0x00 = movb $0x0, (%rsi)
4
+ 0x67,0xc6,0x06,0x00 = movb $0x0, (%si)
5
+ 0xc6,0x06,0x00 = movb $0x0, (%esi)
@@ -0,0 +1,12 @@
1
+ # CS_ARCH_X86, CS_MODE_64, CS_OPT_SYNTAX_ATT
2
+ 0x62,0xa3,0x55,0x48,0x38,0xcd,0x01 = vinserti32x4 $1, %xmm21, %zmm5, %zmm17
3
+ 0x62,0xe3,0x1d,0x40,0x38,0x4f,0x10,0x01 = vinserti32x4 $1, 256(%rdi), %zmm28, %zmm17
4
+ 0x62,0x33,0x7d,0x48,0x39,0xc9,0x01 = vextracti32x4 $1, %zmm9, %xmm17
5
+ 0x62,0x33,0xfd,0x48,0x3b,0xc9,0x01 = vextracti64x4 $1, %zmm9, %ymm17
6
+ 0x62,0x73,0xfd,0x48,0x3b,0x4f,0x10,0x01 = vextracti64x4 $1, %zmm9, 512(%rdi)
7
+ 0x62,0xb1,0x35,0x40,0x72,0xe1,0x02 = vpsrad $2, %zmm17, %zmm25
8
+ 0x62,0xf1,0x35,0x40,0x72,0x64,0xb7,0x08,0x02 = vpsrad $2, 512(%rdi, %rsi, 4), %zmm25
9
+ 0x62,0x21,0x1d,0x48,0xe2,0xc9 = vpsrad %xmm17, %zmm12, %zmm25
10
+ 0x62,0x61,0x1d,0x48,0xe2,0x4c,0xb7,0x20 = vpsrad 512(%rdi, %rsi, 4), %zmm12, %zmm25
11
+ 0x62,0xf2,0x7d,0xc9,0x58,0xc8 = vpbroadcastd %xmm0, %zmm1 {%k1} {z}
12
+ 0x62,0xf1,0xfe,0x4b,0x6f,0xc8 = vmovdqu64 %zmm0, %zmm1 {%k3}
@@ -0,0 +1,30 @@
1
+ # CS_ARCH_X86, CS_MODE_64, None
2
+ 0x66,0x83,0xf0,0x0c = xor ax, 12
3
+ 0x83,0xf0,0x0c = xor eax, 12
4
+ 0x48,0x83,0xf0,0x0c = xor rax, 12
5
+ 0x66,0x83,0xc8,0x0c = or ax, 12
6
+ 0x83,0xc8,0x0c = or eax, 12
7
+ 0x48,0x83,0xc8,0x0c = or rax, 12
8
+ 0x66,0x83,0xf8,0x0c = cmp ax, 12
9
+ 0x83,0xf8,0x0c = cmp eax, 12
10
+ 0x48,0x83,0xf8,0x0c = cmp rax, 12
11
+ 0x48,0x89,0x44,0x24,0xf0 = mov QWORD PTR [RSP - 16], RAX
12
+ 0x66,0x83,0xc0,0xf4 = add ax, -12
13
+ 0x83,0xc0,0xf4 = add eax, -12
14
+ 0x48,0x83,0xc0,0xf4 = add rax, -12
15
+ 0x66,0x83,0xd0,0xf4 = adc ax, -12
16
+ 0x83,0xd0,0xf4 = adc eax, -12
17
+ 0x48,0x83,0xd0,0xf4 = adc rax, -12
18
+ 0x66,0x83,0xd8,0xf4 = sbb ax, -12
19
+ 0x83,0xd8,0xf4 = sbb eax, -12
20
+ 0x48,0x83,0xd8,0xf4 = sbb rax, -12
21
+ 0x66,0x83,0xf8,0xf4 = cmp ax, -12
22
+ 0x83,0xf8,0xf4 = cmp eax, -12
23
+ 0x48,0x83,0xf8,0xf4 = cmp rax, -12
24
+ 0xf2,0x0f,0x10,0x2c,0x25,0xf8,0xff,0xff,0xff = movsd XMM5, QWORD PTR [-8]
25
+ 0xd1,0xe7 = shl EDI, 1
26
+ 0x0f,0xc2,0xd1,0x01 = cmpltps XMM2, XMM1
27
+ 0xc3 = ret
28
+ 0xcb = retf
29
+ 0xc2,0x08,0x00 = ret 8
30
+ 0xca,0x08,0x00 = retf 8
@@ -0,0 +1,833 @@
1
+ # CS_ARCH_X86, CS_MODE_32, CS_OPT_SYNTAX_ATT
2
+ 0xc5,0xca,0x58,0xd4 = vaddss %xmm4, %xmm6, %xmm2
3
+ 0xc5,0xca,0x59,0xd4 = vmulss %xmm4, %xmm6, %xmm2
4
+ 0xc5,0xca,0x5c,0xd4 = vsubss %xmm4, %xmm6, %xmm2
5
+ 0xc5,0xca,0x5e,0xd4 = vdivss %xmm4, %xmm6, %xmm2
6
+ 0xc5,0xcb,0x58,0xd4 = vaddsd %xmm4, %xmm6, %xmm2
7
+ 0xc5,0xcb,0x59,0xd4 = vmulsd %xmm4, %xmm6, %xmm2
8
+ 0xc5,0xcb,0x5c,0xd4 = vsubsd %xmm4, %xmm6, %xmm2
9
+ 0xc5,0xcb,0x5e,0xd4 = vdivsd %xmm4, %xmm6, %xmm2
10
+ 0xc5,0xea,0x58,0xac,0xcb,0xef,0xbe,0xad,0xde = vaddss 3735928559(%ebx,%ecx,8), %xmm2, %xmm5
11
+ 0xc5,0xea,0x5c,0xac,0xcb,0xef,0xbe,0xad,0xde = vsubss 3735928559(%ebx,%ecx,8), %xmm2, %xmm5
12
+ 0xc5,0xea,0x59,0xac,0xcb,0xef,0xbe,0xad,0xde = vmulss 3735928559(%ebx,%ecx,8), %xmm2, %xmm5
13
+ 0xc5,0xea,0x5e,0xac,0xcb,0xef,0xbe,0xad,0xde = vdivss 3735928559(%ebx,%ecx,8), %xmm2, %xmm5
14
+ 0xc5,0xeb,0x58,0xac,0xcb,0xef,0xbe,0xad,0xde = vaddsd 3735928559(%ebx,%ecx,8), %xmm2, %xmm5
15
+ 0xc5,0xeb,0x5c,0xac,0xcb,0xef,0xbe,0xad,0xde = vsubsd 3735928559(%ebx,%ecx,8), %xmm2, %xmm5
16
+ 0xc5,0xeb,0x59,0xac,0xcb,0xef,0xbe,0xad,0xde = vmulsd 3735928559(%ebx,%ecx,8), %xmm2, %xmm5
17
+ 0xc5,0xeb,0x5e,0xac,0xcb,0xef,0xbe,0xad,0xde = vdivsd 3735928559(%ebx,%ecx,8), %xmm2, %xmm5
18
+ 0xc5,0xc8,0x58,0xd4 = vaddps %xmm4, %xmm6, %xmm2
19
+ 0xc5,0xc8,0x5c,0xd4 = vsubps %xmm4, %xmm6, %xmm2
20
+ 0xc5,0xc8,0x59,0xd4 = vmulps %xmm4, %xmm6, %xmm2
21
+ 0xc5,0xc8,0x5e,0xd4 = vdivps %xmm4, %xmm6, %xmm2
22
+ 0xc5,0xc9,0x58,0xd4 = vaddpd %xmm4, %xmm6, %xmm2
23
+ 0xc5,0xc9,0x5c,0xd4 = vsubpd %xmm4, %xmm6, %xmm2
24
+ 0xc5,0xc9,0x59,0xd4 = vmulpd %xmm4, %xmm6, %xmm2
25
+ 0xc5,0xc9,0x5e,0xd4 = vdivpd %xmm4, %xmm6, %xmm2
26
+ 0xc5,0xe8,0x58,0xac,0xcb,0xef,0xbe,0xad,0xde = vaddps 3735928559(%ebx,%ecx,8), %xmm2, %xmm5
27
+ 0xc5,0xe8,0x5c,0xac,0xcb,0xef,0xbe,0xad,0xde = vsubps 3735928559(%ebx,%ecx,8), %xmm2, %xmm5
28
+ 0xc5,0xe8,0x59,0xac,0xcb,0xef,0xbe,0xad,0xde = vmulps 3735928559(%ebx,%ecx,8), %xmm2, %xmm5
29
+ 0xc5,0xe8,0x5e,0xac,0xcb,0xef,0xbe,0xad,0xde = vdivps 3735928559(%ebx,%ecx,8), %xmm2, %xmm5
30
+ 0xc5,0xe9,0x58,0xac,0xcb,0xef,0xbe,0xad,0xde = vaddpd 3735928559(%ebx,%ecx,8), %xmm2, %xmm5
31
+ 0xc5,0xe9,0x5c,0xac,0xcb,0xef,0xbe,0xad,0xde = vsubpd 3735928559(%ebx,%ecx,8), %xmm2, %xmm5
32
+ 0xc5,0xe9,0x59,0xac,0xcb,0xef,0xbe,0xad,0xde = vmulpd 3735928559(%ebx,%ecx,8), %xmm2, %xmm5
33
+ 0xc5,0xe9,0x5e,0xac,0xcb,0xef,0xbe,0xad,0xde = vdivpd 3735928559(%ebx,%ecx,8), %xmm2, %xmm5
34
+ 0xc5,0xda,0x5f,0xf2 = vmaxss %xmm2, %xmm4, %xmm6
35
+ 0xc5,0xdb,0x5f,0xf2 = vmaxsd %xmm2, %xmm4, %xmm6
36
+ 0xc5,0xda,0x5d,0xf2 = vminss %xmm2, %xmm4, %xmm6
37
+ 0xc5,0xdb,0x5d,0xf2 = vminsd %xmm2, %xmm4, %xmm6
38
+ 0xc5,0xea,0x5f,0x6c,0xcb,0xfc = vmaxss -4(%ebx,%ecx,8), %xmm2, %xmm5
39
+ 0xc5,0xeb,0x5f,0x6c,0xcb,0xfc = vmaxsd -4(%ebx,%ecx,8), %xmm2, %xmm5
40
+ 0xc5,0xea,0x5d,0x6c,0xcb,0xfc = vminss -4(%ebx,%ecx,8), %xmm2, %xmm5
41
+ 0xc5,0xeb,0x5d,0x6c,0xcb,0xfc = vminsd -4(%ebx,%ecx,8), %xmm2, %xmm5
42
+ 0xc5,0xd8,0x5f,0xf2 = vmaxps %xmm2, %xmm4, %xmm6
43
+ 0xc5,0xd9,0x5f,0xf2 = vmaxpd %xmm2, %xmm4, %xmm6
44
+ 0xc5,0xd8,0x5d,0xf2 = vminps %xmm2, %xmm4, %xmm6
45
+ 0xc5,0xd9,0x5d,0xf2 = vminpd %xmm2, %xmm4, %xmm6
46
+ 0xc5,0xe8,0x5f,0x6c,0xcb,0xfc = vmaxps -4(%ebx,%ecx,8), %xmm2, %xmm5
47
+ 0xc5,0xe9,0x5f,0x6c,0xcb,0xfc = vmaxpd -4(%ebx,%ecx,8), %xmm2, %xmm5
48
+ 0xc5,0xe8,0x5d,0x6c,0xcb,0xfc = vminps -4(%ebx,%ecx,8), %xmm2, %xmm5
49
+ 0xc5,0xe9,0x5d,0x6c,0xcb,0xfc = vminpd -4(%ebx,%ecx,8), %xmm2, %xmm5
50
+ 0xc5,0xd8,0x54,0xf2 = vandps %xmm2, %xmm4, %xmm6
51
+ 0xc5,0xd9,0x54,0xf2 = vandpd %xmm2, %xmm4, %xmm6
52
+ 0xc5,0xe8,0x54,0x6c,0xcb,0xfc = vandps -4(%ebx,%ecx,8), %xmm2, %xmm5
53
+ 0xc5,0xe9,0x54,0x6c,0xcb,0xfc = vandpd -4(%ebx,%ecx,8), %xmm2, %xmm5
54
+ 0xc5,0xd8,0x56,0xf2 = vorps %xmm2, %xmm4, %xmm6
55
+ 0xc5,0xd9,0x56,0xf2 = vorpd %xmm2, %xmm4, %xmm6
56
+ 0xc5,0xe8,0x56,0x6c,0xcb,0xfc = vorps -4(%ebx,%ecx,8), %xmm2, %xmm5
57
+ 0xc5,0xe9,0x56,0x6c,0xcb,0xfc = vorpd -4(%ebx,%ecx,8), %xmm2, %xmm5
58
+ 0xc5,0xd8,0x57,0xf2 = vxorps %xmm2, %xmm4, %xmm6
59
+ 0xc5,0xd9,0x57,0xf2 = vxorpd %xmm2, %xmm4, %xmm6
60
+ 0xc5,0xe8,0x57,0x6c,0xcb,0xfc = vxorps -4(%ebx,%ecx,8), %xmm2, %xmm5
61
+ 0xc5,0xe9,0x57,0x6c,0xcb,0xfc = vxorpd -4(%ebx,%ecx,8), %xmm2, %xmm5
62
+ 0xc5,0xd8,0x55,0xf2 = vandnps %xmm2, %xmm4, %xmm6
63
+ 0xc5,0xd9,0x55,0xf2 = vandnpd %xmm2, %xmm4, %xmm6
64
+ 0xc5,0xe8,0x55,0x6c,0xcb,0xfc = vandnps -4(%ebx,%ecx,8), %xmm2, %xmm5
65
+ 0xc5,0xe9,0x55,0x6c,0xcb,0xfc = vandnpd -4(%ebx,%ecx,8), %xmm2, %xmm5
66
+ 0xc5,0xfa,0x10,0x6c,0xcb,0xfc = vmovss -4(%ebx,%ecx,8), %xmm5
67
+ 0xc5,0xea,0x10,0xec = vmovss %xmm4, %xmm2, %xmm5
68
+ 0xc5,0xfb,0x10,0x6c,0xcb,0xfc = vmovsd -4(%ebx,%ecx,8), %xmm5
69
+ 0xc5,0xeb,0x10,0xec = vmovsd %xmm4, %xmm2, %xmm5
70
+ 0xc5,0xe8,0x15,0xe1 = vunpckhps %xmm1, %xmm2, %xmm4
71
+ 0xc5,0xe9,0x15,0xe1 = vunpckhpd %xmm1, %xmm2, %xmm4
72
+ 0xc5,0xe8,0x14,0xe1 = vunpcklps %xmm1, %xmm2, %xmm4
73
+ 0xc5,0xe9,0x14,0xe1 = vunpcklpd %xmm1, %xmm2, %xmm4
74
+ 0xc5,0xe8,0x15,0x6c,0xcb,0xfc = vunpckhps -4(%ebx,%ecx,8), %xmm2, %xmm5
75
+ 0xc5,0xe9,0x15,0x6c,0xcb,0xfc = vunpckhpd -4(%ebx,%ecx,8), %xmm2, %xmm5
76
+ 0xc5,0xe8,0x14,0x6c,0xcb,0xfc = vunpcklps -4(%ebx,%ecx,8), %xmm2, %xmm5
77
+ 0xc5,0xe9,0x14,0x6c,0xcb,0xfc = vunpcklpd -4(%ebx,%ecx,8), %xmm2, %xmm5
78
+ 0xc5,0xc8,0xc2,0xc8,0x00 = vcmpps $0, %xmm0, %xmm6, %xmm1
79
+ 0xc5,0xc8,0xc2,0x08,0x00 = vcmpps $0, (%eax), %xmm6, %xmm1
80
+ 0xc5,0xc8,0xc2,0xc8,0x07 = vcmpps $7, %xmm0, %xmm6, %xmm1
81
+ 0xc5,0xc9,0xc2,0xc8,0x00 = vcmppd $0, %xmm0, %xmm6, %xmm1
82
+ 0xc5,0xc9,0xc2,0x08,0x00 = vcmppd $0, (%eax), %xmm6, %xmm1
83
+ 0xc5,0xc9,0xc2,0xc8,0x07 = vcmppd $7, %xmm0, %xmm6, %xmm1
84
+ 0xc5,0xe8,0xc6,0xd9,0x08 = vshufps $8, %xmm1, %xmm2, %xmm3
85
+ 0xc5,0xe8,0xc6,0x5c,0xcb,0xfc,0x08 = vshufps $8, -4(%ebx,%ecx,8), %xmm2, %xmm3
86
+ 0xc5,0xe9,0xc6,0xd9,0x08 = vshufpd $8, %xmm1, %xmm2, %xmm3
87
+ 0xc5,0xe9,0xc6,0x5c,0xcb,0xfc,0x08 = vshufpd $8, -4(%ebx,%ecx,8), %xmm2, %xmm3
88
+ 0xc5,0xe8,0xc2,0xd9,0x00 = vcmpeqps %xmm1, %xmm2, %xmm3
89
+ 0xc5,0xe8,0xc2,0xd9,0x02 = vcmpleps %xmm1, %xmm2, %xmm3
90
+ 0xc5,0xe8,0xc2,0xd9,0x01 = vcmpltps %xmm1, %xmm2, %xmm3
91
+ 0xc5,0xe8,0xc2,0xd9,0x04 = vcmpneqps %xmm1, %xmm2, %xmm3
92
+ 0xc5,0xe8,0xc2,0xd9,0x06 = vcmpnleps %xmm1, %xmm2, %xmm3
93
+ 0xc5,0xe8,0xc2,0xd9,0x05 = vcmpnltps %xmm1, %xmm2, %xmm3
94
+ 0xc5,0xe8,0xc2,0xd9,0x07 = vcmpordps %xmm1, %xmm2, %xmm3
95
+ 0xc5,0xe8,0xc2,0xd9,0x03 = vcmpunordps %xmm1, %xmm2, %xmm3
96
+ 0xc5,0xe8,0xc2,0x5c,0xcb,0xfc,0x00 = vcmpeqps -4(%ebx,%ecx,8), %xmm2, %xmm3
97
+ 0xc5,0xe8,0xc2,0x5c,0xcb,0xfc,0x02 = vcmpleps -4(%ebx,%ecx,8), %xmm2, %xmm3
98
+ 0xc5,0xe8,0xc2,0x5c,0xcb,0xfc,0x01 = vcmpltps -4(%ebx,%ecx,8), %xmm2, %xmm3
99
+ 0xc5,0xe8,0xc2,0x5c,0xcb,0xfc,0x04 = vcmpneqps -4(%ebx,%ecx,8), %xmm2, %xmm3
100
+ 0xc5,0xe8,0xc2,0x5c,0xcb,0xfc,0x06 = vcmpnleps -4(%ebx,%ecx,8), %xmm2, %xmm3
101
+ 0xc5,0xe8,0xc2,0x5c,0xcb,0xfc,0x05 = vcmpnltps -4(%ebx,%ecx,8), %xmm2, %xmm3
102
+ 0xc5,0xc8,0xc2,0x54,0xcb,0xfc,0x07 = vcmpordps -4(%ebx,%ecx,8), %xmm6, %xmm2
103
+ 0xc5,0xe8,0xc2,0x5c,0xcb,0xfc,0x03 = vcmpunordps -4(%ebx,%ecx,8), %xmm2, %xmm3
104
+ 0xc5,0xe9,0xc2,0xd9,0x00 = vcmpeqpd %xmm1, %xmm2, %xmm3
105
+ 0xc5,0xe9,0xc2,0xd9,0x02 = vcmplepd %xmm1, %xmm2, %xmm3
106
+ 0xc5,0xe9,0xc2,0xd9,0x01 = vcmpltpd %xmm1, %xmm2, %xmm3
107
+ 0xc5,0xe9,0xc2,0xd9,0x04 = vcmpneqpd %xmm1, %xmm2, %xmm3
108
+ 0xc5,0xe9,0xc2,0xd9,0x06 = vcmpnlepd %xmm1, %xmm2, %xmm3
109
+ 0xc5,0xe9,0xc2,0xd9,0x05 = vcmpnltpd %xmm1, %xmm2, %xmm3
110
+ 0xc5,0xe9,0xc2,0xd9,0x07 = vcmpordpd %xmm1, %xmm2, %xmm3
111
+ 0xc5,0xe9,0xc2,0xd9,0x03 = vcmpunordpd %xmm1, %xmm2, %xmm3
112
+ 0xc5,0xe9,0xc2,0x5c,0xcb,0xfc,0x00 = vcmpeqpd -4(%ebx,%ecx,8), %xmm2, %xmm3
113
+ 0xc5,0xe9,0xc2,0x5c,0xcb,0xfc,0x02 = vcmplepd -4(%ebx,%ecx,8), %xmm2, %xmm3
114
+ 0xc5,0xe9,0xc2,0x5c,0xcb,0xfc,0x01 = vcmpltpd -4(%ebx,%ecx,8), %xmm2, %xmm3
115
+ 0xc5,0xe9,0xc2,0x5c,0xcb,0xfc,0x04 = vcmpneqpd -4(%ebx,%ecx,8), %xmm2, %xmm3
116
+ 0xc5,0xe9,0xc2,0x5c,0xcb,0xfc,0x06 = vcmpnlepd -4(%ebx,%ecx,8), %xmm2, %xmm3
117
+ 0xc5,0xe9,0xc2,0x5c,0xcb,0xfc,0x05 = vcmpnltpd -4(%ebx,%ecx,8), %xmm2, %xmm3
118
+ 0xc5,0xc9,0xc2,0x54,0xcb,0xfc,0x07 = vcmpordpd -4(%ebx,%ecx,8), %xmm6, %xmm2
119
+ 0xc5,0xe9,0xc2,0x5c,0xcb,0xfc,0x03 = vcmpunordpd -4(%ebx,%ecx,8), %xmm2, %xmm3
120
+ 0xc5,0xf8,0x50,0xc2 = vmovmskps %xmm2, %eax
121
+ 0xc5,0xf9,0x50,0xc2 = vmovmskpd %xmm2, %eax
122
+ 0xc5,0xfc,0x50,0xc2 = vmovmskps %ymm2, %eax
123
+ 0xc5,0xfd,0x50,0xc2 = vmovmskpd %ymm2, %eax
124
+ 0xc5,0xea,0xc2,0xd9,0x00 = vcmpeqss %xmm1, %xmm2, %xmm3
125
+ 0xc5,0xea,0xc2,0xd9,0x02 = vcmpless %xmm1, %xmm2, %xmm3
126
+ 0xc5,0xea,0xc2,0xd9,0x01 = vcmpltss %xmm1, %xmm2, %xmm3
127
+ 0xc5,0xea,0xc2,0xd9,0x04 = vcmpneqss %xmm1, %xmm2, %xmm3
128
+ 0xc5,0xea,0xc2,0xd9,0x06 = vcmpnless %xmm1, %xmm2, %xmm3
129
+ 0xc5,0xea,0xc2,0xd9,0x05 = vcmpnltss %xmm1, %xmm2, %xmm3
130
+ 0xc5,0xea,0xc2,0xd9,0x07 = vcmpordss %xmm1, %xmm2, %xmm3
131
+ 0xc5,0xea,0xc2,0xd9,0x03 = vcmpunordss %xmm1, %xmm2, %xmm3
132
+ 0xc5,0xea,0xc2,0x5c,0xcb,0xfc,0x00 = vcmpeqss -4(%ebx,%ecx,8), %xmm2, %xmm3
133
+ 0xc5,0xea,0xc2,0x5c,0xcb,0xfc,0x02 = vcmpless -4(%ebx,%ecx,8), %xmm2, %xmm3
134
+ 0xc5,0xea,0xc2,0x5c,0xcb,0xfc,0x01 = vcmpltss -4(%ebx,%ecx,8), %xmm2, %xmm3
135
+ 0xc5,0xea,0xc2,0x5c,0xcb,0xfc,0x04 = vcmpneqss -4(%ebx,%ecx,8), %xmm2, %xmm3
136
+ 0xc5,0xea,0xc2,0x5c,0xcb,0xfc,0x06 = vcmpnless -4(%ebx,%ecx,8), %xmm2, %xmm3
137
+ 0xc5,0xea,0xc2,0x5c,0xcb,0xfc,0x05 = vcmpnltss -4(%ebx,%ecx,8), %xmm2, %xmm3
138
+ 0xc5,0xca,0xc2,0x54,0xcb,0xfc,0x07 = vcmpordss -4(%ebx,%ecx,8), %xmm6, %xmm2
139
+ 0xc5,0xea,0xc2,0x5c,0xcb,0xfc,0x03 = vcmpunordss -4(%ebx,%ecx,8), %xmm2, %xmm3
140
+ 0xc5,0xeb,0xc2,0xd9,0x00 = vcmpeqsd %xmm1, %xmm2, %xmm3
141
+ 0xc5,0xeb,0xc2,0xd9,0x02 = vcmplesd %xmm1, %xmm2, %xmm3
142
+ 0xc5,0xeb,0xc2,0xd9,0x01 = vcmpltsd %xmm1, %xmm2, %xmm3
143
+ 0xc5,0xeb,0xc2,0xd9,0x04 = vcmpneqsd %xmm1, %xmm2, %xmm3
144
+ 0xc5,0xeb,0xc2,0xd9,0x06 = vcmpnlesd %xmm1, %xmm2, %xmm3
145
+ 0xc5,0xeb,0xc2,0xd9,0x05 = vcmpnltsd %xmm1, %xmm2, %xmm3
146
+ 0xc5,0xeb,0xc2,0xd9,0x07 = vcmpordsd %xmm1, %xmm2, %xmm3
147
+ 0xc5,0xeb,0xc2,0xd9,0x03 = vcmpunordsd %xmm1, %xmm2, %xmm3
148
+ 0xc5,0xeb,0xc2,0x5c,0xcb,0xfc,0x00 = vcmpeqsd -4(%ebx,%ecx,8), %xmm2, %xmm3
149
+ 0xc5,0xeb,0xc2,0x5c,0xcb,0xfc,0x02 = vcmplesd -4(%ebx,%ecx,8), %xmm2, %xmm3
150
+ 0xc5,0xeb,0xc2,0x5c,0xcb,0xfc,0x01 = vcmpltsd -4(%ebx,%ecx,8), %xmm2, %xmm3
151
+ 0xc5,0xeb,0xc2,0x5c,0xcb,0xfc,0x04 = vcmpneqsd -4(%ebx,%ecx,8), %xmm2, %xmm3
152
+ 0xc5,0xeb,0xc2,0x5c,0xcb,0xfc,0x06 = vcmpnlesd -4(%ebx,%ecx,8), %xmm2, %xmm3
153
+ 0xc5,0xeb,0xc2,0x5c,0xcb,0xfc,0x05 = vcmpnltsd -4(%ebx,%ecx,8), %xmm2, %xmm3
154
+ 0xc5,0xcb,0xc2,0x54,0xcb,0xfc,0x07 = vcmpordsd -4(%ebx,%ecx,8), %xmm6, %xmm2
155
+ 0xc5,0xeb,0xc2,0x5c,0xcb,0xfc,0x03 = vcmpunordsd -4(%ebx,%ecx,8), %xmm2, %xmm3
156
+ 0xc5,0xf8,0x2e,0xd1 = vucomiss %xmm1, %xmm2
157
+ 0xc5,0xf8,0x2e,0x10 = vucomiss (%eax), %xmm2
158
+ 0xc5,0xf8,0x2f,0xd1 = vcomiss %xmm1, %xmm2
159
+ 0xc5,0xf8,0x2f,0x10 = vcomiss (%eax), %xmm2
160
+ 0xc5,0xf9,0x2e,0xd1 = vucomisd %xmm1, %xmm2
161
+ 0xc5,0xf9,0x2e,0x10 = vucomisd (%eax), %xmm2
162
+ 0xc5,0xf9,0x2f,0xd1 = vcomisd %xmm1, %xmm2
163
+ 0xc5,0xf9,0x2f,0x10 = vcomisd (%eax), %xmm2
164
+ 0xc5,0xfa,0x2c,0xc1 = vcvttss2si %xmm1, %eax
165
+ 0xc5,0xfa,0x2c,0x01 = vcvttss2si (%ecx), %eax
166
+ 0xc5,0xf2,0x2a,0x10 = vcvtsi2ss (%eax), %xmm1, %xmm2
167
+ 0xc5,0xf2,0x2a,0x10 = vcvtsi2ss (%eax), %xmm1, %xmm2
168
+ 0xc5,0xf2,0x2a,0x10 = vcvtsi2ssl (%eax), %xmm1, %xmm2
169
+ 0xc5,0xf2,0x2a,0x10 = vcvtsi2ssl (%eax), %xmm1, %xmm2
170
+ 0xc5,0xfb,0x2c,0xc1 = vcvttsd2si %xmm1, %eax
171
+ 0xc5,0xfb,0x2c,0x01 = vcvttsd2si (%ecx), %eax
172
+ 0xc5,0xf3,0x2a,0x10 = vcvtsi2sd (%eax), %xmm1, %xmm2
173
+ 0xc5,0xf3,0x2a,0x10 = vcvtsi2sd (%eax), %xmm1, %xmm2
174
+ 0xc5,0xf3,0x2a,0x10 = vcvtsi2sdl (%eax), %xmm1, %xmm2
175
+ 0xc5,0xf3,0x2a,0x10 = vcvtsi2sdl (%eax), %xmm1, %xmm2
176
+ 0xc5,0xf8,0x28,0x10 = vmovaps (%eax), %xmm2
177
+ 0xc5,0xf8,0x28,0xd1 = vmovaps %xmm1, %xmm2
178
+ 0xc5,0xf8,0x29,0x08 = vmovaps %xmm1, (%eax)
179
+ 0xc5,0xf9,0x28,0x10 = vmovapd (%eax), %xmm2
180
+ 0xc5,0xf9,0x28,0xd1 = vmovapd %xmm1, %xmm2
181
+ 0xc5,0xf9,0x29,0x08 = vmovapd %xmm1, (%eax)
182
+ 0xc5,0xf8,0x10,0x10 = vmovups (%eax), %xmm2
183
+ 0xc5,0xf8,0x10,0xd1 = vmovups %xmm1, %xmm2
184
+ 0xc5,0xf8,0x11,0x08 = vmovups %xmm1, (%eax)
185
+ 0xc5,0xf9,0x10,0x10 = vmovupd (%eax), %xmm2
186
+ 0xc5,0xf9,0x10,0xd1 = vmovupd %xmm1, %xmm2
187
+ 0xc5,0xf9,0x11,0x08 = vmovupd %xmm1, (%eax)
188
+ 0xc5,0xf8,0x13,0x08 = vmovlps %xmm1, (%eax)
189
+ 0xc5,0xe8,0x12,0x18 = vmovlps (%eax), %xmm2, %xmm3
190
+ 0xc5,0xf9,0x13,0x08 = vmovlpd %xmm1, (%eax)
191
+ 0xc5,0xe9,0x12,0x18 = vmovlpd (%eax), %xmm2, %xmm3
192
+ 0xc5,0xf8,0x17,0x08 = vmovhps %xmm1, (%eax)
193
+ 0xc5,0xe8,0x16,0x18 = vmovhps (%eax), %xmm2, %xmm3
194
+ 0xc5,0xf9,0x17,0x08 = vmovhpd %xmm1, (%eax)
195
+ 0xc5,0xe9,0x16,0x18 = vmovhpd (%eax), %xmm2, %xmm3
196
+ 0xc5,0xe8,0x16,0xd9 = vmovlhps %xmm1, %xmm2, %xmm3
197
+ 0xc5,0xe8,0x12,0xd9 = vmovhlps %xmm1, %xmm2, %xmm3
198
+ 0xc5,0xfa,0x2d,0xc1 = vcvtss2si %xmm1, %eax
199
+ 0xc5,0xfa,0x2d,0x18 = vcvtss2si (%eax), %ebx
200
+ 0xc5,0xfa,0x2d,0xc1 = vcvtss2sil %xmm1, %eax
201
+ 0xc5,0xfa,0x2d,0x18 = vcvtss2sil (%eax), %ebx
202
+ 0xc5,0xf8,0x5b,0xf5 = vcvtdq2ps %xmm5, %xmm6
203
+ 0xc5,0xf8,0x5b,0x30 = vcvtdq2ps (%eax), %xmm6
204
+ 0xc5,0xdb,0x5a,0xf2 = vcvtsd2ss %xmm2, %xmm4, %xmm6
205
+ 0xc5,0xdb,0x5a,0x30 = vcvtsd2ss (%eax), %xmm4, %xmm6
206
+ 0xc5,0xf9,0x5b,0xda = vcvtps2dq %xmm2, %xmm3
207
+ 0xc5,0xf9,0x5b,0x18 = vcvtps2dq (%eax), %xmm3
208
+ 0xc5,0xda,0x5a,0xf2 = vcvtss2sd %xmm2, %xmm4, %xmm6
209
+ 0xc5,0xda,0x5a,0x30 = vcvtss2sd (%eax), %xmm4, %xmm6
210
+ 0xc5,0xf8,0x5b,0xf4 = vcvtdq2ps %xmm4, %xmm6
211
+ 0xc5,0xf8,0x5b,0x21 = vcvtdq2ps (%ecx), %xmm4
212
+ 0xc5,0xfa,0x5b,0xda = vcvttps2dq %xmm2, %xmm3
213
+ 0xc5,0xfa,0x5b,0x18 = vcvttps2dq (%eax), %xmm3
214
+ 0xc5,0xf8,0x5a,0xda = vcvtps2pd %xmm2, %xmm3
215
+ 0xc5,0xf8,0x5a,0x18 = vcvtps2pd (%eax), %xmm3
216
+ 0xc5,0xf9,0x5a,0xda = vcvtpd2ps %xmm2, %xmm3
217
+ 0xc5,0xf9,0x51,0xd1 = vsqrtpd %xmm1, %xmm2
218
+ 0xc5,0xf9,0x51,0x10 = vsqrtpd (%eax), %xmm2
219
+ 0xc5,0xf8,0x51,0xd1 = vsqrtps %xmm1, %xmm2
220
+ 0xc5,0xf8,0x51,0x10 = vsqrtps (%eax), %xmm2
221
+ 0xc5,0xeb,0x51,0xd9 = vsqrtsd %xmm1, %xmm2, %xmm3
222
+ 0xc5,0xeb,0x51,0x18 = vsqrtsd (%eax), %xmm2, %xmm3
223
+ 0xc5,0xea,0x51,0xd9 = vsqrtss %xmm1, %xmm2, %xmm3
224
+ 0xc5,0xea,0x51,0x18 = vsqrtss (%eax), %xmm2, %xmm3
225
+ 0xc5,0xf8,0x52,0xd1 = vrsqrtps %xmm1, %xmm2
226
+ 0xc5,0xf8,0x52,0x10 = vrsqrtps (%eax), %xmm2
227
+ 0xc5,0xea,0x52,0xd9 = vrsqrtss %xmm1, %xmm2, %xmm3
228
+ 0xc5,0xea,0x52,0x18 = vrsqrtss (%eax), %xmm2, %xmm3
229
+ 0xc5,0xf8,0x53,0xd1 = vrcpps %xmm1, %xmm2
230
+ 0xc5,0xf8,0x53,0x10 = vrcpps (%eax), %xmm2
231
+ 0xc5,0xea,0x53,0xd9 = vrcpss %xmm1, %xmm2, %xmm3
232
+ 0xc5,0xea,0x53,0x18 = vrcpss (%eax), %xmm2, %xmm3
233
+ 0xc5,0xf9,0xe7,0x08 = vmovntdq %xmm1, (%eax)
234
+ 0xc5,0xf9,0x2b,0x08 = vmovntpd %xmm1, (%eax)
235
+ 0xc5,0xf8,0x2b,0x08 = vmovntps %xmm1, (%eax)
236
+ 0xc5,0xf8,0xae,0x10 = vldmxcsr (%eax)
237
+ 0xc5,0xf8,0xae,0x18 = vstmxcsr (%eax)
238
+ 0xc5,0xf8,0xae,0x15,0xef,0xbe,0xad,0xde = vldmxcsr 0xdeadbeef
239
+ 0xc5,0xf8,0xae,0x1d,0xef,0xbe,0xad,0xde = vstmxcsr 0xdeadbeef
240
+ 0xc5,0xe9,0xf8,0xd9 = vpsubb %xmm1, %xmm2, %xmm3
241
+ 0xc5,0xe9,0xf8,0x18 = vpsubb (%eax), %xmm2, %xmm3
242
+ 0xc5,0xe9,0xf9,0xd9 = vpsubw %xmm1, %xmm2, %xmm3
243
+ 0xc5,0xe9,0xf9,0x18 = vpsubw (%eax), %xmm2, %xmm3
244
+ 0xc5,0xe9,0xfa,0xd9 = vpsubd %xmm1, %xmm2, %xmm3
245
+ 0xc5,0xe9,0xfa,0x18 = vpsubd (%eax), %xmm2, %xmm3
246
+ 0xc5,0xe9,0xfb,0xd9 = vpsubq %xmm1, %xmm2, %xmm3
247
+ 0xc5,0xe9,0xfb,0x18 = vpsubq (%eax), %xmm2, %xmm3
248
+ 0xc5,0xe9,0xe8,0xd9 = vpsubsb %xmm1, %xmm2, %xmm3
249
+ 0xc5,0xe9,0xe8,0x18 = vpsubsb (%eax), %xmm2, %xmm3
250
+ 0xc5,0xe9,0xe9,0xd9 = vpsubsw %xmm1, %xmm2, %xmm3
251
+ 0xc5,0xe9,0xe9,0x18 = vpsubsw (%eax), %xmm2, %xmm3
252
+ 0xc5,0xe9,0xd8,0xd9 = vpsubusb %xmm1, %xmm2, %xmm3
253
+ 0xc5,0xe9,0xd8,0x18 = vpsubusb (%eax), %xmm2, %xmm3
254
+ 0xc5,0xe9,0xd9,0xd9 = vpsubusw %xmm1, %xmm2, %xmm3
255
+ 0xc5,0xe9,0xd9,0x18 = vpsubusw (%eax), %xmm2, %xmm3
256
+ 0xc5,0xe9,0xfc,0xd9 = vpaddb %xmm1, %xmm2, %xmm3
257
+ 0xc5,0xe9,0xfc,0x18 = vpaddb (%eax), %xmm2, %xmm3
258
+ 0xc5,0xe9,0xfd,0xd9 = vpaddw %xmm1, %xmm2, %xmm3
259
+ 0xc5,0xe9,0xfd,0x18 = vpaddw (%eax), %xmm2, %xmm3
260
+ 0xc5,0xe9,0xfe,0xd9 = vpaddd %xmm1, %xmm2, %xmm3
261
+ 0xc5,0xe9,0xfe,0x18 = vpaddd (%eax), %xmm2, %xmm3
262
+ 0xc5,0xe9,0xd4,0xd9 = vpaddq %xmm1, %xmm2, %xmm3
263
+ 0xc5,0xe9,0xd4,0x18 = vpaddq (%eax), %xmm2, %xmm3
264
+ 0xc5,0xe9,0xec,0xd9 = vpaddsb %xmm1, %xmm2, %xmm3
265
+ 0xc5,0xe9,0xec,0x18 = vpaddsb (%eax), %xmm2, %xmm3
266
+ 0xc5,0xe9,0xed,0xd9 = vpaddsw %xmm1, %xmm2, %xmm3
267
+ 0xc5,0xe9,0xed,0x18 = vpaddsw (%eax), %xmm2, %xmm3
268
+ 0xc5,0xe9,0xdc,0xd9 = vpaddusb %xmm1, %xmm2, %xmm3
269
+ 0xc5,0xe9,0xdc,0x18 = vpaddusb (%eax), %xmm2, %xmm3
270
+ 0xc5,0xe9,0xdd,0xd9 = vpaddusw %xmm1, %xmm2, %xmm3
271
+ 0xc5,0xe9,0xdd,0x18 = vpaddusw (%eax), %xmm2, %xmm3
272
+ 0xc5,0xe9,0xe4,0xd9 = vpmulhuw %xmm1, %xmm2, %xmm3
273
+ 0xc5,0xe9,0xe4,0x18 = vpmulhuw (%eax), %xmm2, %xmm3
274
+ 0xc5,0xe9,0xe5,0xd9 = vpmulhw %xmm1, %xmm2, %xmm3
275
+ 0xc5,0xe9,0xe5,0x18 = vpmulhw (%eax), %xmm2, %xmm3
276
+ 0xc5,0xe9,0xd5,0xd9 = vpmullw %xmm1, %xmm2, %xmm3
277
+ 0xc5,0xe9,0xd5,0x18 = vpmullw (%eax), %xmm2, %xmm3
278
+ 0xc5,0xe9,0xf4,0xd9 = vpmuludq %xmm1, %xmm2, %xmm3
279
+ 0xc5,0xe9,0xf4,0x18 = vpmuludq (%eax), %xmm2, %xmm3
280
+ 0xc5,0xe9,0xe0,0xd9 = vpavgb %xmm1, %xmm2, %xmm3
281
+ 0xc5,0xe9,0xe0,0x18 = vpavgb (%eax), %xmm2, %xmm3
282
+ 0xc5,0xe9,0xe3,0xd9 = vpavgw %xmm1, %xmm2, %xmm3
283
+ 0xc5,0xe9,0xe3,0x18 = vpavgw (%eax), %xmm2, %xmm3
284
+ 0xc5,0xe9,0xea,0xd9 = vpminsw %xmm1, %xmm2, %xmm3
285
+ 0xc5,0xe9,0xea,0x18 = vpminsw (%eax), %xmm2, %xmm3
286
+ 0xc5,0xe9,0xda,0xd9 = vpminub %xmm1, %xmm2, %xmm3
287
+ 0xc5,0xe9,0xda,0x18 = vpminub (%eax), %xmm2, %xmm3
288
+ 0xc5,0xe9,0xee,0xd9 = vpmaxsw %xmm1, %xmm2, %xmm3
289
+ 0xc5,0xe9,0xee,0x18 = vpmaxsw (%eax), %xmm2, %xmm3
290
+ 0xc5,0xe9,0xde,0xd9 = vpmaxub %xmm1, %xmm2, %xmm3
291
+ 0xc5,0xe9,0xde,0x18 = vpmaxub (%eax), %xmm2, %xmm3
292
+ 0xc5,0xe9,0xf6,0xd9 = vpsadbw %xmm1, %xmm2, %xmm3
293
+ 0xc5,0xe9,0xf6,0x18 = vpsadbw (%eax), %xmm2, %xmm3
294
+ 0xc5,0xe9,0xf1,0xd9 = vpsllw %xmm1, %xmm2, %xmm3
295
+ 0xc5,0xe9,0xf1,0x18 = vpsllw (%eax), %xmm2, %xmm3
296
+ 0xc5,0xe9,0xf2,0xd9 = vpslld %xmm1, %xmm2, %xmm3
297
+ 0xc5,0xe9,0xf2,0x18 = vpslld (%eax), %xmm2, %xmm3
298
+ 0xc5,0xe9,0xf3,0xd9 = vpsllq %xmm1, %xmm2, %xmm3
299
+ 0xc5,0xe9,0xf3,0x18 = vpsllq (%eax), %xmm2, %xmm3
300
+ 0xc5,0xe9,0xe1,0xd9 = vpsraw %xmm1, %xmm2, %xmm3
301
+ 0xc5,0xe9,0xe1,0x18 = vpsraw (%eax), %xmm2, %xmm3
302
+ 0xc5,0xe9,0xe2,0xd9 = vpsrad %xmm1, %xmm2, %xmm3
303
+ 0xc5,0xe9,0xe2,0x18 = vpsrad (%eax), %xmm2, %xmm3
304
+ 0xc5,0xe9,0xd1,0xd9 = vpsrlw %xmm1, %xmm2, %xmm3
305
+ 0xc5,0xe9,0xd1,0x18 = vpsrlw (%eax), %xmm2, %xmm3
306
+ 0xc5,0xe9,0xd2,0xd9 = vpsrld %xmm1, %xmm2, %xmm3
307
+ 0xc5,0xe9,0xd2,0x18 = vpsrld (%eax), %xmm2, %xmm3
308
+ 0xc5,0xe9,0xd3,0xd9 = vpsrlq %xmm1, %xmm2, %xmm3
309
+ 0xc5,0xe9,0xd3,0x18 = vpsrlq (%eax), %xmm2, %xmm3
310
+ 0xc5,0xe1,0x72,0xf2,0x0a = vpslld $10, %xmm2, %xmm3
311
+ 0xc5,0xe1,0x73,0xfa,0x0a = vpslldq $10, %xmm2, %xmm3
312
+ 0xc5,0xe1,0x73,0xf2,0x0a = vpsllq $10, %xmm2, %xmm3
313
+ 0xc5,0xe1,0x71,0xf2,0x0a = vpsllw $10, %xmm2, %xmm3
314
+ 0xc5,0xe1,0x72,0xe2,0x0a = vpsrad $10, %xmm2, %xmm3
315
+ 0xc5,0xe1,0x71,0xe2,0x0a = vpsraw $10, %xmm2, %xmm3
316
+ 0xc5,0xe1,0x72,0xd2,0x0a = vpsrld $10, %xmm2, %xmm3
317
+ 0xc5,0xe1,0x73,0xda,0x0a = vpsrldq $10, %xmm2, %xmm3
318
+ 0xc5,0xe1,0x73,0xd2,0x0a = vpsrlq $10, %xmm2, %xmm3
319
+ 0xc5,0xe1,0x71,0xd2,0x0a = vpsrlw $10, %xmm2, %xmm3
320
+ 0xc5,0xe1,0x72,0xf2,0x0a = vpslld $10, %xmm2, %xmm3
321
+ 0xc5,0xe9,0xdb,0xd9 = vpand %xmm1, %xmm2, %xmm3
322
+ 0xc5,0xe9,0xdb,0x18 = vpand (%eax), %xmm2, %xmm3
323
+ 0xc5,0xe9,0xeb,0xd9 = vpor %xmm1, %xmm2, %xmm3
324
+ 0xc5,0xe9,0xeb,0x18 = vpor (%eax), %xmm2, %xmm3
325
+ 0xc5,0xe9,0xef,0xd9 = vpxor %xmm1, %xmm2, %xmm3
326
+ 0xc5,0xe9,0xef,0x18 = vpxor (%eax), %xmm2, %xmm3
327
+ 0xc5,0xe9,0xdf,0xd9 = vpandn %xmm1, %xmm2, %xmm3
328
+ 0xc5,0xe9,0xdf,0x18 = vpandn (%eax), %xmm2, %xmm3
329
+ 0xc5,0xe9,0x74,0xd9 = vpcmpeqb %xmm1, %xmm2, %xmm3
330
+ 0xc5,0xe9,0x74,0x18 = vpcmpeqb (%eax), %xmm2, %xmm3
331
+ 0xc5,0xe9,0x75,0xd9 = vpcmpeqw %xmm1, %xmm2, %xmm3
332
+ 0xc5,0xe9,0x75,0x18 = vpcmpeqw (%eax), %xmm2, %xmm3
333
+ 0xc5,0xe9,0x76,0xd9 = vpcmpeqd %xmm1, %xmm2, %xmm3
334
+ 0xc5,0xe9,0x76,0x18 = vpcmpeqd (%eax), %xmm2, %xmm3
335
+ 0xc5,0xe9,0x64,0xd9 = vpcmpgtb %xmm1, %xmm2, %xmm3
336
+ 0xc5,0xe9,0x64,0x18 = vpcmpgtb (%eax), %xmm2, %xmm3
337
+ 0xc5,0xe9,0x65,0xd9 = vpcmpgtw %xmm1, %xmm2, %xmm3
338
+ 0xc5,0xe9,0x65,0x18 = vpcmpgtw (%eax), %xmm2, %xmm3
339
+ 0xc5,0xe9,0x66,0xd9 = vpcmpgtd %xmm1, %xmm2, %xmm3
340
+ 0xc5,0xe9,0x66,0x18 = vpcmpgtd (%eax), %xmm2, %xmm3
341
+ 0xc5,0xe9,0x63,0xd9 = vpacksswb %xmm1, %xmm2, %xmm3
342
+ 0xc5,0xe9,0x63,0x18 = vpacksswb (%eax), %xmm2, %xmm3
343
+ 0xc5,0xe9,0x6b,0xd9 = vpackssdw %xmm1, %xmm2, %xmm3
344
+ 0xc5,0xe9,0x6b,0x18 = vpackssdw (%eax), %xmm2, %xmm3
345
+ 0xc5,0xe9,0x67,0xd9 = vpackuswb %xmm1, %xmm2, %xmm3
346
+ 0xc5,0xe9,0x67,0x18 = vpackuswb (%eax), %xmm2, %xmm3
347
+ 0xc5,0xf9,0x70,0xda,0x04 = vpshufd $4, %xmm2, %xmm3
348
+ 0xc5,0xf9,0x70,0x18,0x04 = vpshufd $4, (%eax), %xmm3
349
+ 0xc5,0xfa,0x70,0xda,0x04 = vpshufhw $4, %xmm2, %xmm3
350
+ 0xc5,0xfa,0x70,0x18,0x04 = vpshufhw $4, (%eax), %xmm3
351
+ 0xc5,0xfb,0x70,0xda,0x04 = vpshuflw $4, %xmm2, %xmm3
352
+ 0xc5,0xfb,0x70,0x18,0x04 = vpshuflw $4, (%eax), %xmm3
353
+ 0xc5,0xe9,0x60,0xd9 = vpunpcklbw %xmm1, %xmm2, %xmm3
354
+ 0xc5,0xe9,0x60,0x18 = vpunpcklbw (%eax), %xmm2, %xmm3
355
+ 0xc5,0xe9,0x61,0xd9 = vpunpcklwd %xmm1, %xmm2, %xmm3
356
+ 0xc5,0xe9,0x61,0x18 = vpunpcklwd (%eax), %xmm2, %xmm3
357
+ 0xc5,0xe9,0x62,0xd9 = vpunpckldq %xmm1, %xmm2, %xmm3
358
+ 0xc5,0xe9,0x62,0x18 = vpunpckldq (%eax), %xmm2, %xmm3
359
+ 0xc5,0xe9,0x6c,0xd9 = vpunpcklqdq %xmm1, %xmm2, %xmm3
360
+ 0xc5,0xe9,0x6c,0x18 = vpunpcklqdq (%eax), %xmm2, %xmm3
361
+ 0xc5,0xe9,0x68,0xd9 = vpunpckhbw %xmm1, %xmm2, %xmm3
362
+ 0xc5,0xe9,0x68,0x18 = vpunpckhbw (%eax), %xmm2, %xmm3
363
+ 0xc5,0xe9,0x69,0xd9 = vpunpckhwd %xmm1, %xmm2, %xmm3
364
+ 0xc5,0xe9,0x69,0x18 = vpunpckhwd (%eax), %xmm2, %xmm3
365
+ 0xc5,0xe9,0x6a,0xd9 = vpunpckhdq %xmm1, %xmm2, %xmm3
366
+ 0xc5,0xe9,0x6a,0x18 = vpunpckhdq (%eax), %xmm2, %xmm3
367
+ 0xc5,0xe9,0x6d,0xd9 = vpunpckhqdq %xmm1, %xmm2, %xmm3
368
+ 0xc5,0xe9,0x6d,0x18 = vpunpckhqdq (%eax), %xmm2, %xmm3
369
+ 0xc5,0xe9,0xc4,0xd8,0x07 = vpinsrw $7, %eax, %xmm2, %xmm3
370
+ 0xc5,0xe9,0xc4,0x18,0x07 = vpinsrw $7, (%eax), %xmm2, %xmm3
371
+ 0xc5,0xf9,0xc5,0xc2,0x07 = vpextrw $7, %xmm2, %eax
372
+ 0xc5,0xf9,0xd7,0xc1 = vpmovmskb %xmm1, %eax
373
+ 0xc5,0xf9,0xf7,0xd1 = vmaskmovdqu %xmm1, %xmm2
374
+ 0xc5,0xf9,0x7e,0xc8 = vmovd %xmm1, %eax
375
+ 0xc5,0xf9,0x7e,0x08 = vmovd %xmm1, (%eax)
376
+ 0xc5,0xf9,0x6e,0xc8 = vmovd %eax, %xmm1
377
+ 0xc5,0xf9,0x6e,0x08 = vmovd (%eax), %xmm1
378
+ 0xc5,0xf9,0xd6,0x08 = vmovq %xmm1, (%eax)
379
+ 0xc5,0xfa,0x7e,0xd1 = vmovq %xmm1, %xmm2
380
+ 0xc5,0xfa,0x7e,0x08 = vmovq (%eax), %xmm1
381
+ 0xc5,0xfb,0xe6,0xd1 = vcvtpd2dq %xmm1, %xmm2
382
+ 0xc5,0xfa,0xe6,0xd1 = vcvtdq2pd %xmm1, %xmm2
383
+ 0xc5,0xfa,0xe6,0x10 = vcvtdq2pd (%eax), %xmm2
384
+ 0xc5,0xfa,0x16,0xd1 = vmovshdup %xmm1, %xmm2
385
+ 0xc5,0xfa,0x16,0x10 = vmovshdup (%eax), %xmm2
386
+ 0xc5,0xfa,0x12,0xd1 = vmovsldup %xmm1, %xmm2
387
+ 0xc5,0xfa,0x12,0x10 = vmovsldup (%eax), %xmm2
388
+ 0xc5,0xfb,0x12,0xd1 = vmovddup %xmm1, %xmm2
389
+ 0xc5,0xfb,0x12,0x10 = vmovddup (%eax), %xmm2
390
+ 0xc5,0xeb,0xd0,0xd9 = vaddsubps %xmm1, %xmm2, %xmm3
391
+ 0xc5,0xf3,0xd0,0x10 = vaddsubps (%eax), %xmm1, %xmm2
392
+ 0xc5,0xe9,0xd0,0xd9 = vaddsubpd %xmm1, %xmm2, %xmm3
393
+ 0xc5,0xf1,0xd0,0x10 = vaddsubpd (%eax), %xmm1, %xmm2
394
+ 0xc5,0xeb,0x7c,0xd9 = vhaddps %xmm1, %xmm2, %xmm3
395
+ 0xc5,0xeb,0x7c,0x18 = vhaddps (%eax), %xmm2, %xmm3
396
+ 0xc5,0xe9,0x7c,0xd9 = vhaddpd %xmm1, %xmm2, %xmm3
397
+ 0xc5,0xe9,0x7c,0x18 = vhaddpd (%eax), %xmm2, %xmm3
398
+ 0xc5,0xeb,0x7d,0xd9 = vhsubps %xmm1, %xmm2, %xmm3
399
+ 0xc5,0xeb,0x7d,0x18 = vhsubps (%eax), %xmm2, %xmm3
400
+ 0xc5,0xe9,0x7d,0xd9 = vhsubpd %xmm1, %xmm2, %xmm3
401
+ 0xc5,0xe9,0x7d,0x18 = vhsubpd (%eax), %xmm2, %xmm3
402
+ 0xc4,0xe2,0x79,0x1c,0xd1 = vpabsb %xmm1, %xmm2
403
+ 0xc4,0xe2,0x79,0x1c,0x10 = vpabsb (%eax), %xmm2
404
+ 0xc4,0xe2,0x79,0x1d,0xd1 = vpabsw %xmm1, %xmm2
405
+ 0xc4,0xe2,0x79,0x1d,0x10 = vpabsw (%eax), %xmm2
406
+ 0xc4,0xe2,0x79,0x1e,0xd1 = vpabsd %xmm1, %xmm2
407
+ 0xc4,0xe2,0x79,0x1e,0x10 = vpabsd (%eax), %xmm2
408
+ 0xc4,0xe2,0x69,0x01,0xd9 = vphaddw %xmm1, %xmm2, %xmm3
409
+ 0xc4,0xe2,0x69,0x01,0x18 = vphaddw (%eax), %xmm2, %xmm3
410
+ 0xc4,0xe2,0x69,0x02,0xd9 = vphaddd %xmm1, %xmm2, %xmm3
411
+ 0xc4,0xe2,0x69,0x02,0x18 = vphaddd (%eax), %xmm2, %xmm3
412
+ 0xc4,0xe2,0x69,0x03,0xd9 = vphaddsw %xmm1, %xmm2, %xmm3
413
+ 0xc4,0xe2,0x69,0x03,0x18 = vphaddsw (%eax), %xmm2, %xmm3
414
+ 0xc4,0xe2,0x69,0x05,0xd9 = vphsubw %xmm1, %xmm2, %xmm3
415
+ 0xc4,0xe2,0x69,0x05,0x18 = vphsubw (%eax), %xmm2, %xmm3
416
+ 0xc4,0xe2,0x69,0x06,0xd9 = vphsubd %xmm1, %xmm2, %xmm3
417
+ 0xc4,0xe2,0x69,0x06,0x18 = vphsubd (%eax), %xmm2, %xmm3
418
+ 0xc4,0xe2,0x69,0x07,0xd9 = vphsubsw %xmm1, %xmm2, %xmm3
419
+ 0xc4,0xe2,0x69,0x07,0x18 = vphsubsw (%eax), %xmm2, %xmm3
420
+ 0xc4,0xe2,0x69,0x04,0xd9 = vpmaddubsw %xmm1, %xmm2, %xmm3
421
+ 0xc4,0xe2,0x69,0x04,0x18 = vpmaddubsw (%eax), %xmm2, %xmm3
422
+ 0xc4,0xe2,0x69,0x00,0xd9 = vpshufb %xmm1, %xmm2, %xmm3
423
+ 0xc4,0xe2,0x69,0x00,0x18 = vpshufb (%eax), %xmm2, %xmm3
424
+ 0xc4,0xe2,0x69,0x08,0xd9 = vpsignb %xmm1, %xmm2, %xmm3
425
+ 0xc4,0xe2,0x69,0x08,0x18 = vpsignb (%eax), %xmm2, %xmm3
426
+ 0xc4,0xe2,0x69,0x09,0xd9 = vpsignw %xmm1, %xmm2, %xmm3
427
+ 0xc4,0xe2,0x69,0x09,0x18 = vpsignw (%eax), %xmm2, %xmm3
428
+ 0xc4,0xe2,0x69,0x0a,0xd9 = vpsignd %xmm1, %xmm2, %xmm3
429
+ 0xc4,0xe2,0x69,0x0a,0x18 = vpsignd (%eax), %xmm2, %xmm3
430
+ 0xc4,0xe2,0x69,0x0b,0xd9 = vpmulhrsw %xmm1, %xmm2, %xmm3
431
+ 0xc4,0xe2,0x69,0x0b,0x18 = vpmulhrsw (%eax), %xmm2, %xmm3
432
+ 0xc4,0xe3,0x69,0x0f,0xd9,0x07 = vpalignr $7, %xmm1, %xmm2, %xmm3
433
+ 0xc4,0xe3,0x69,0x0f,0x18,0x07 = vpalignr $7, (%eax), %xmm2, %xmm3
434
+ 0xc4,0xe3,0x69,0x0b,0xd9,0x07 = vroundsd $7, %xmm1, %xmm2, %xmm3
435
+ 0xc4,0xe3,0x69,0x0b,0x18,0x07 = vroundsd $7, (%eax), %xmm2, %xmm3
436
+ 0xc4,0xe3,0x69,0x0a,0xd9,0x07 = vroundss $7, %xmm1, %xmm2, %xmm3
437
+ 0xc4,0xe3,0x69,0x0a,0x18,0x07 = vroundss $7, (%eax), %xmm2, %xmm3
438
+ 0xc4,0xe3,0x79,0x09,0xda,0x07 = vroundpd $7, %xmm2, %xmm3
439
+ 0xc4,0xe3,0x79,0x09,0x18,0x07 = vroundpd $7, (%eax), %xmm3
440
+ 0xc4,0xe3,0x79,0x08,0xda,0x07 = vroundps $7, %xmm2, %xmm3
441
+ 0xc4,0xe3,0x79,0x08,0x18,0x07 = vroundps $7, (%eax), %xmm3
442
+ 0xc4,0xe2,0x79,0x41,0xda = vphminposuw %xmm2, %xmm3
443
+ 0xc4,0xe2,0x79,0x41,0x10 = vphminposuw (%eax), %xmm2
444
+ 0xc4,0xe2,0x61,0x2b,0xca = vpackusdw %xmm2, %xmm3, %xmm1
445
+ 0xc4,0xe2,0x69,0x2b,0x18 = vpackusdw (%eax), %xmm2, %xmm3
446
+ 0xc4,0xe2,0x61,0x29,0xca = vpcmpeqq %xmm2, %xmm3, %xmm1
447
+ 0xc4,0xe2,0x69,0x29,0x18 = vpcmpeqq (%eax), %xmm2, %xmm3
448
+ 0xc4,0xe2,0x61,0x38,0xca = vpminsb %xmm2, %xmm3, %xmm1
449
+ 0xc4,0xe2,0x69,0x38,0x18 = vpminsb (%eax), %xmm2, %xmm3
450
+ 0xc4,0xe2,0x61,0x39,0xca = vpminsd %xmm2, %xmm3, %xmm1
451
+ 0xc4,0xe2,0x69,0x39,0x18 = vpminsd (%eax), %xmm2, %xmm3
452
+ 0xc4,0xe2,0x61,0x3b,0xca = vpminud %xmm2, %xmm3, %xmm1
453
+ 0xc4,0xe2,0x69,0x3b,0x18 = vpminud (%eax), %xmm2, %xmm3
454
+ 0xc4,0xe2,0x61,0x3a,0xca = vpminuw %xmm2, %xmm3, %xmm1
455
+ 0xc4,0xe2,0x69,0x3a,0x18 = vpminuw (%eax), %xmm2, %xmm3
456
+ 0xc4,0xe2,0x61,0x3c,0xca = vpmaxsb %xmm2, %xmm3, %xmm1
457
+ 0xc4,0xe2,0x69,0x3c,0x18 = vpmaxsb (%eax), %xmm2, %xmm3
458
+ 0xc4,0xe2,0x61,0x3d,0xca = vpmaxsd %xmm2, %xmm3, %xmm1
459
+ 0xc4,0xe2,0x69,0x3d,0x18 = vpmaxsd (%eax), %xmm2, %xmm3
460
+ 0xc4,0xe2,0x61,0x3f,0xca = vpmaxud %xmm2, %xmm3, %xmm1
461
+ 0xc4,0xe2,0x69,0x3f,0x18 = vpmaxud (%eax), %xmm2, %xmm3
462
+ 0xc4,0xe2,0x61,0x3e,0xca = vpmaxuw %xmm2, %xmm3, %xmm1
463
+ 0xc4,0xe2,0x69,0x3e,0x18 = vpmaxuw (%eax), %xmm2, %xmm3
464
+ 0xc4,0xe2,0x61,0x28,0xca = vpmuldq %xmm2, %xmm3, %xmm1
465
+ 0xc4,0xe2,0x69,0x28,0x18 = vpmuldq (%eax), %xmm2, %xmm3
466
+ 0xc4,0xe2,0x51,0x40,0xca = vpmulld %xmm2, %xmm5, %xmm1
467
+ 0xc4,0xe2,0x51,0x40,0x18 = vpmulld (%eax), %xmm5, %xmm3
468
+ 0xc4,0xe3,0x51,0x0c,0xca,0x03 = vblendps $3, %xmm2, %xmm5, %xmm1
469
+ 0xc4,0xe3,0x51,0x0c,0x08,0x03 = vblendps $3, (%eax), %xmm5, %xmm1
470
+ 0xc4,0xe3,0x51,0x0d,0xca,0x03 = vblendpd $3, %xmm2, %xmm5, %xmm1
471
+ 0xc4,0xe3,0x51,0x0d,0x08,0x03 = vblendpd $3, (%eax), %xmm5, %xmm1
472
+ 0xc4,0xe3,0x51,0x0e,0xca,0x03 = vpblendw $3, %xmm2, %xmm5, %xmm1
473
+ 0xc4,0xe3,0x51,0x0e,0x08,0x03 = vpblendw $3, (%eax), %xmm5, %xmm1
474
+ 0xc4,0xe3,0x51,0x42,0xca,0x03 = vmpsadbw $3, %xmm2, %xmm5, %xmm1
475
+ 0xc4,0xe3,0x51,0x42,0x08,0x03 = vmpsadbw $3, (%eax), %xmm5, %xmm1
476
+ 0xc4,0xe3,0x51,0x40,0xca,0x03 = vdpps $3, %xmm2, %xmm5, %xmm1
477
+ 0xc4,0xe3,0x51,0x40,0x08,0x03 = vdpps $3, (%eax), %xmm5, %xmm1
478
+ 0xc4,0xe3,0x51,0x41,0xca,0x03 = vdppd $3, %xmm2, %xmm5, %xmm1
479
+ 0xc4,0xe3,0x51,0x41,0x08,0x03 = vdppd $3, (%eax), %xmm5, %xmm1
480
+ 0xc4,0xe3,0x71,0x4b,0xdd,0x20 = vblendvpd %xmm2, %xmm5, %xmm1, %xmm3
481
+ 0xc4,0xe3,0x71,0x4b,0x18,0x20 = vblendvpd %xmm2, (%eax), %xmm1, %xmm3
482
+ 0xc4,0xe3,0x71,0x4a,0xdd,0x20 = vblendvps %xmm2, %xmm5, %xmm1, %xmm3
483
+ 0xc4,0xe3,0x71,0x4a,0x18,0x20 = vblendvps %xmm2, (%eax), %xmm1, %xmm3
484
+ 0xc4,0xe3,0x71,0x4c,0xdd,0x20 = vpblendvb %xmm2, %xmm5, %xmm1, %xmm3
485
+ 0xc4,0xe3,0x71,0x4c,0x18,0x20 = vpblendvb %xmm2, (%eax), %xmm1, %xmm3
486
+ 0xc4,0xe2,0x79,0x20,0xea = vpmovsxbw %xmm2, %xmm5
487
+ 0xc4,0xe2,0x79,0x20,0x10 = vpmovsxbw (%eax), %xmm2
488
+ 0xc4,0xe2,0x79,0x23,0xea = vpmovsxwd %xmm2, %xmm5
489
+ 0xc4,0xe2,0x79,0x23,0x10 = vpmovsxwd (%eax), %xmm2
490
+ 0xc4,0xe2,0x79,0x25,0xea = vpmovsxdq %xmm2, %xmm5
491
+ 0xc4,0xe2,0x79,0x25,0x10 = vpmovsxdq (%eax), %xmm2
492
+ 0xc4,0xe2,0x79,0x30,0xea = vpmovzxbw %xmm2, %xmm5
493
+ 0xc4,0xe2,0x79,0x30,0x10 = vpmovzxbw (%eax), %xmm2
494
+ 0xc4,0xe2,0x79,0x33,0xea = vpmovzxwd %xmm2, %xmm5
495
+ 0xc4,0xe2,0x79,0x33,0x10 = vpmovzxwd (%eax), %xmm2
496
+ 0xc4,0xe2,0x79,0x35,0xea = vpmovzxdq %xmm2, %xmm5
497
+ 0xc4,0xe2,0x79,0x35,0x10 = vpmovzxdq (%eax), %xmm2
498
+ 0xc4,0xe2,0x79,0x22,0xea = vpmovsxbq %xmm2, %xmm5
499
+ 0xc4,0xe2,0x79,0x22,0x10 = vpmovsxbq (%eax), %xmm2
500
+ 0xc4,0xe2,0x79,0x32,0xea = vpmovzxbq %xmm2, %xmm5
501
+ 0xc4,0xe2,0x79,0x32,0x10 = vpmovzxbq (%eax), %xmm2
502
+ 0xc4,0xe2,0x79,0x21,0xea = vpmovsxbd %xmm2, %xmm5
503
+ 0xc4,0xe2,0x79,0x21,0x10 = vpmovsxbd (%eax), %xmm2
504
+ 0xc4,0xe2,0x79,0x24,0xea = vpmovsxwq %xmm2, %xmm5
505
+ 0xc4,0xe2,0x79,0x24,0x10 = vpmovsxwq (%eax), %xmm2
506
+ 0xc4,0xe2,0x79,0x31,0xea = vpmovzxbd %xmm2, %xmm5
507
+ 0xc4,0xe2,0x79,0x31,0x10 = vpmovzxbd (%eax), %xmm2
508
+ 0xc4,0xe2,0x79,0x34,0xea = vpmovzxwq %xmm2, %xmm5
509
+ 0xc4,0xe2,0x79,0x34,0x10 = vpmovzxwq (%eax), %xmm2
510
+ 0xc5,0xf9,0xc5,0xc2,0x07 = vpextrw $7, %xmm2, %eax
511
+ 0xc4,0xe3,0x79,0x15,0x10,0x07 = vpextrw $7, %xmm2, (%eax)
512
+ 0xc4,0xe3,0x79,0x16,0xd0,0x07 = vpextrd $7, %xmm2, %eax
513
+ 0xc4,0xe3,0x79,0x16,0x10,0x07 = vpextrd $7, %xmm2, (%eax)
514
+ 0xc4,0xe3,0x79,0x14,0xd0,0x07 = vpextrb $7, %xmm2, %eax
515
+ 0xc4,0xe3,0x79,0x14,0x10,0x07 = vpextrb $7, %xmm2, (%eax)
516
+ 0xc4,0xe3,0x79,0x17,0x10,0x07 = vextractps $7, %xmm2, (%eax)
517
+ 0xc4,0xe3,0x79,0x17,0xd0,0x07 = vextractps $7, %xmm2, %eax
518
+ 0xc5,0xe9,0xc4,0xe8,0x07 = vpinsrw $7, %eax, %xmm2, %xmm5
519
+ 0xc5,0xe9,0xc4,0x28,0x07 = vpinsrw $7, (%eax), %xmm2, %xmm5
520
+ 0xc4,0xe3,0x69,0x20,0xe8,0x07 = vpinsrb $7, %eax, %xmm2, %xmm5
521
+ 0xc4,0xe3,0x69,0x20,0x28,0x07 = vpinsrb $7, (%eax), %xmm2, %xmm5
522
+ 0xc4,0xe3,0x69,0x22,0xe8,0x07 = vpinsrd $7, %eax, %xmm2, %xmm5
523
+ 0xc4,0xe3,0x69,0x22,0x28,0x07 = vpinsrd $7, (%eax), %xmm2, %xmm5
524
+ 0xc4,0xe3,0x51,0x21,0xca,0x07 = vinsertps $7, %xmm2, %xmm5, %xmm1
525
+ 0xc4,0xe3,0x51,0x21,0x08,0x07 = vinsertps $7, (%eax), %xmm5, %xmm1
526
+ 0xc4,0xe2,0x79,0x17,0xea = vptest %xmm2, %xmm5
527
+ 0xc4,0xe2,0x79,0x17,0x10 = vptest (%eax), %xmm2
528
+ 0xc4,0xe2,0x79,0x2a,0x10 = vmovntdqa (%eax), %xmm2
529
+ 0xc4,0xe2,0x51,0x37,0xca = vpcmpgtq %xmm2, %xmm5, %xmm1
530
+ 0xc4,0xe2,0x51,0x37,0x18 = vpcmpgtq (%eax), %xmm5, %xmm3
531
+ 0xc4,0xe3,0x79,0x62,0xea,0x07 = vpcmpistrm $7, %xmm2, %xmm5
532
+ 0xc4,0xe3,0x79,0x62,0x28,0x07 = vpcmpistrm $7, (%eax), %xmm5
533
+ 0xc4,0xe3,0x79,0x60,0xea,0x07 = vpcmpestrm $7, %xmm2, %xmm5
534
+ 0xc4,0xe3,0x79,0x60,0x28,0x07 = vpcmpestrm $7, (%eax), %xmm5
535
+ 0xc4,0xe3,0x79,0x63,0xea,0x07 = vpcmpistri $7, %xmm2, %xmm5
536
+ 0xc4,0xe3,0x79,0x63,0x28,0x07 = vpcmpistri $7, (%eax), %xmm5
537
+ 0xc4,0xe3,0x79,0x61,0xea,0x07 = vpcmpestri $7, %xmm2, %xmm5
538
+ 0xc4,0xe3,0x79,0x61,0x28,0x07 = vpcmpestri $7, (%eax), %xmm5
539
+ 0xc4,0xe2,0x79,0xdb,0xea = vaesimc %xmm2, %xmm5
540
+ 0xc4,0xe2,0x79,0xdb,0x10 = vaesimc (%eax), %xmm2
541
+ 0xc4,0xe2,0x51,0xdc,0xca = vaesenc %xmm2, %xmm5, %xmm1
542
+ 0xc4,0xe2,0x51,0xdc,0x18 = vaesenc (%eax), %xmm5, %xmm3
543
+ 0xc4,0xe2,0x51,0xdd,0xca = vaesenclast %xmm2, %xmm5, %xmm1
544
+ 0xc4,0xe2,0x51,0xdd,0x18 = vaesenclast (%eax), %xmm5, %xmm3
545
+ 0xc4,0xe2,0x51,0xde,0xca = vaesdec %xmm2, %xmm5, %xmm1
546
+ 0xc4,0xe2,0x51,0xde,0x18 = vaesdec (%eax), %xmm5, %xmm3
547
+ 0xc4,0xe2,0x51,0xdf,0xca = vaesdeclast %xmm2, %xmm5, %xmm1
548
+ 0xc4,0xe2,0x51,0xdf,0x18 = vaesdeclast (%eax), %xmm5, %xmm3
549
+ 0xc4,0xe3,0x79,0xdf,0xea,0x07 = vaeskeygenassist $7, %xmm2, %xmm5
550
+ 0xc4,0xe3,0x79,0xdf,0x28,0x07 = vaeskeygenassist $7, (%eax), %xmm5
551
+ 0xc5,0xe8,0xc2,0xd9,0x08 = vcmpeq_uqps %xmm1, %xmm2, %xmm3
552
+ 0xc5,0xe8,0xc2,0xd9,0x09 = vcmpngeps %xmm1, %xmm2, %xmm3
553
+ 0xc5,0xe8,0xc2,0xd9,0x0a = vcmpngtps %xmm1, %xmm2, %xmm3
554
+ 0xc5,0xe8,0xc2,0xd9,0x0b = vcmpfalseps %xmm1, %xmm2, %xmm3
555
+ 0xc5,0xe8,0xc2,0xd9,0x0c = vcmpneq_oqps %xmm1, %xmm2, %xmm3
556
+ 0xc5,0xe8,0xc2,0xd9,0x0d = vcmpgeps %xmm1, %xmm2, %xmm3
557
+ 0xc5,0xe8,0xc2,0xd9,0x0e = vcmpgtps %xmm1, %xmm2, %xmm3
558
+ 0xc5,0xe8,0xc2,0xd9,0x0f = vcmptrueps %xmm1, %xmm2, %xmm3
559
+ 0xc5,0xe8,0xc2,0xd9,0x10 = vcmpeq_osps %xmm1, %xmm2, %xmm3
560
+ 0xc5,0xe8,0xc2,0xd9,0x11 = vcmplt_oqps %xmm1, %xmm2, %xmm3
561
+ 0xc5,0xe8,0xc2,0xd9,0x12 = vcmple_oqps %xmm1, %xmm2, %xmm3
562
+ 0xc5,0xe8,0xc2,0xd9,0x13 = vcmpunord_sps %xmm1, %xmm2, %xmm3
563
+ 0xc5,0xe8,0xc2,0xd9,0x14 = vcmpneq_usps %xmm1, %xmm2, %xmm3
564
+ 0xc5,0xe8,0xc2,0xd9,0x15 = vcmpnlt_uqps %xmm1, %xmm2, %xmm3
565
+ 0xc5,0xe8,0xc2,0xd9,0x16 = vcmpnle_uqps %xmm1, %xmm2, %xmm3
566
+ 0xc5,0xe8,0xc2,0xd9,0x17 = vcmpord_sps %xmm1, %xmm2, %xmm3
567
+ 0xc5,0xe8,0xc2,0xd9,0x18 = vcmpeq_usps %xmm1, %xmm2, %xmm3
568
+ 0xc5,0xe8,0xc2,0xd9,0x19 = vcmpnge_uqps %xmm1, %xmm2, %xmm3
569
+ 0xc5,0xe8,0xc2,0xd9,0x1a = vcmpngt_uqps %xmm1, %xmm2, %xmm3
570
+ 0xc5,0xe8,0xc2,0xd9,0x1b = vcmpfalse_osps %xmm1, %xmm2, %xmm3
571
+ 0xc5,0xe8,0xc2,0xd9,0x1c = vcmpneq_osps %xmm1, %xmm2, %xmm3
572
+ 0xc5,0xe8,0xc2,0xd9,0x1d = vcmpge_oqps %xmm1, %xmm2, %xmm3
573
+ 0xc5,0xe8,0xc2,0xd9,0x1e = vcmpgt_oqps %xmm1, %xmm2, %xmm3
574
+ 0xc5,0xe8,0xc2,0xd9,0x1f = vcmptrue_usps %xmm1, %xmm2, %xmm3
575
+ 0xc5,0xfc,0x28,0x10 = vmovaps (%eax), %ymm2
576
+ 0xc5,0xfc,0x28,0xd1 = vmovaps %ymm1, %ymm2
577
+ 0xc5,0xfc,0x29,0x08 = vmovaps %ymm1, (%eax)
578
+ 0xc5,0xfd,0x28,0x10 = vmovapd (%eax), %ymm2
579
+ 0xc5,0xfd,0x28,0xd1 = vmovapd %ymm1, %ymm2
580
+ 0xc5,0xfd,0x29,0x08 = vmovapd %ymm1, (%eax)
581
+ 0xc5,0xfc,0x10,0x10 = vmovups (%eax), %ymm2
582
+ 0xc5,0xfc,0x10,0xd1 = vmovups %ymm1, %ymm2
583
+ 0xc5,0xfc,0x11,0x08 = vmovups %ymm1, (%eax)
584
+ 0xc5,0xfd,0x10,0x10 = vmovupd (%eax), %ymm2
585
+ 0xc5,0xfd,0x10,0xd1 = vmovupd %ymm1, %ymm2
586
+ 0xc5,0xfd,0x11,0x08 = vmovupd %ymm1, (%eax)
587
+ 0xc5,0xec,0x15,0xe1 = vunpckhps %ymm1, %ymm2, %ymm4
588
+ 0xc5,0xed,0x15,0xe1 = vunpckhpd %ymm1, %ymm2, %ymm4
589
+ 0xc5,0xec,0x14,0xe1 = vunpcklps %ymm1, %ymm2, %ymm4
590
+ 0xc5,0xed,0x14,0xe1 = vunpcklpd %ymm1, %ymm2, %ymm4
591
+ 0xc5,0xec,0x15,0x6c,0xcb,0xfc = vunpckhps -4(%ebx,%ecx,8), %ymm2, %ymm5
592
+ 0xc5,0xed,0x15,0x6c,0xcb,0xfc = vunpckhpd -4(%ebx,%ecx,8), %ymm2, %ymm5
593
+ 0xc5,0xec,0x14,0x6c,0xcb,0xfc = vunpcklps -4(%ebx,%ecx,8), %ymm2, %ymm5
594
+ 0xc5,0xed,0x14,0x6c,0xcb,0xfc = vunpcklpd -4(%ebx,%ecx,8), %ymm2, %ymm5
595
+ 0xc5,0xfd,0xe7,0x08 = vmovntdq %ymm1, (%eax)
596
+ 0xc5,0xfd,0x2b,0x08 = vmovntpd %ymm1, (%eax)
597
+ 0xc5,0xfc,0x2b,0x08 = vmovntps %ymm1, (%eax)
598
+ 0xc5,0xf8,0x50,0xc2 = vmovmskps %xmm2, %eax
599
+ 0xc5,0xf9,0x50,0xc2 = vmovmskpd %xmm2, %eax
600
+ 0xc5,0xdc,0x5f,0xf2 = vmaxps %ymm2, %ymm4, %ymm6
601
+ 0xc5,0xdd,0x5f,0xf2 = vmaxpd %ymm2, %ymm4, %ymm6
602
+ 0xc5,0xdc,0x5d,0xf2 = vminps %ymm2, %ymm4, %ymm6
603
+ 0xc5,0xdd,0x5d,0xf2 = vminpd %ymm2, %ymm4, %ymm6
604
+ 0xc5,0xdc,0x5c,0xf2 = vsubps %ymm2, %ymm4, %ymm6
605
+ 0xc5,0xdd,0x5c,0xf2 = vsubpd %ymm2, %ymm4, %ymm6
606
+ 0xc5,0xdc,0x5e,0xf2 = vdivps %ymm2, %ymm4, %ymm6
607
+ 0xc5,0xdd,0x5e,0xf2 = vdivpd %ymm2, %ymm4, %ymm6
608
+ 0xc5,0xdc,0x58,0xf2 = vaddps %ymm2, %ymm4, %ymm6
609
+ 0xc5,0xdd,0x58,0xf2 = vaddpd %ymm2, %ymm4, %ymm6
610
+ 0xc5,0xdc,0x59,0xf2 = vmulps %ymm2, %ymm4, %ymm6
611
+ 0xc5,0xdd,0x59,0xf2 = vmulpd %ymm2, %ymm4, %ymm6
612
+ 0xc5,0xdc,0x5f,0x30 = vmaxps (%eax), %ymm4, %ymm6
613
+ 0xc5,0xdd,0x5f,0x30 = vmaxpd (%eax), %ymm4, %ymm6
614
+ 0xc5,0xdc,0x5d,0x30 = vminps (%eax), %ymm4, %ymm6
615
+ 0xc5,0xdd,0x5d,0x30 = vminpd (%eax), %ymm4, %ymm6
616
+ 0xc5,0xdc,0x5c,0x30 = vsubps (%eax), %ymm4, %ymm6
617
+ 0xc5,0xdd,0x5c,0x30 = vsubpd (%eax), %ymm4, %ymm6
618
+ 0xc5,0xdc,0x5e,0x30 = vdivps (%eax), %ymm4, %ymm6
619
+ 0xc5,0xdd,0x5e,0x30 = vdivpd (%eax), %ymm4, %ymm6
620
+ 0xc5,0xdc,0x58,0x30 = vaddps (%eax), %ymm4, %ymm6
621
+ 0xc5,0xdd,0x58,0x30 = vaddpd (%eax), %ymm4, %ymm6
622
+ 0xc5,0xdc,0x59,0x30 = vmulps (%eax), %ymm4, %ymm6
623
+ 0xc5,0xdd,0x59,0x30 = vmulpd (%eax), %ymm4, %ymm6
624
+ 0xc5,0xfd,0x51,0xd1 = vsqrtpd %ymm1, %ymm2
625
+ 0xc5,0xfd,0x51,0x10 = vsqrtpd (%eax), %ymm2
626
+ 0xc5,0xfc,0x51,0xd1 = vsqrtps %ymm1, %ymm2
627
+ 0xc5,0xfc,0x51,0x10 = vsqrtps (%eax), %ymm2
628
+ 0xc5,0xfc,0x52,0xd1 = vrsqrtps %ymm1, %ymm2
629
+ 0xc5,0xfc,0x52,0x10 = vrsqrtps (%eax), %ymm2
630
+ 0xc5,0xfc,0x53,0xd1 = vrcpps %ymm1, %ymm2
631
+ 0xc5,0xfc,0x53,0x10 = vrcpps (%eax), %ymm2
632
+ 0xc5,0xdc,0x54,0xf2 = vandps %ymm2, %ymm4, %ymm6
633
+ 0xc5,0xdd,0x54,0xf2 = vandpd %ymm2, %ymm4, %ymm6
634
+ 0xc5,0xec,0x54,0x6c,0xcb,0xfc = vandps -4(%ebx,%ecx,8), %ymm2, %ymm5
635
+ 0xc5,0xed,0x54,0x6c,0xcb,0xfc = vandpd -4(%ebx,%ecx,8), %ymm2, %ymm5
636
+ 0xc5,0xdc,0x56,0xf2 = vorps %ymm2, %ymm4, %ymm6
637
+ 0xc5,0xdd,0x56,0xf2 = vorpd %ymm2, %ymm4, %ymm6
638
+ 0xc5,0xec,0x56,0x6c,0xcb,0xfc = vorps -4(%ebx,%ecx,8), %ymm2, %ymm5
639
+ 0xc5,0xed,0x56,0x6c,0xcb,0xfc = vorpd -4(%ebx,%ecx,8), %ymm2, %ymm5
640
+ 0xc5,0xdc,0x57,0xf2 = vxorps %ymm2, %ymm4, %ymm6
641
+ 0xc5,0xdd,0x57,0xf2 = vxorpd %ymm2, %ymm4, %ymm6
642
+ 0xc5,0xec,0x57,0x6c,0xcb,0xfc = vxorps -4(%ebx,%ecx,8), %ymm2, %ymm5
643
+ 0xc5,0xed,0x57,0x6c,0xcb,0xfc = vxorpd -4(%ebx,%ecx,8), %ymm2, %ymm5
644
+ 0xc5,0xdc,0x55,0xf2 = vandnps %ymm2, %ymm4, %ymm6
645
+ 0xc5,0xdd,0x55,0xf2 = vandnpd %ymm2, %ymm4, %ymm6
646
+ 0xc5,0xec,0x55,0x6c,0xcb,0xfc = vandnps -4(%ebx,%ecx,8), %ymm2, %ymm5
647
+ 0xc5,0xed,0x55,0x6c,0xcb,0xfc = vandnpd -4(%ebx,%ecx,8), %ymm2, %ymm5
648
+ 0xc5,0xfc,0x5a,0xd3 = vcvtps2pd %xmm3, %ymm2
649
+ 0xc5,0xfc,0x5a,0x10 = vcvtps2pd (%eax), %ymm2
650
+ 0xc5,0xfe,0xe6,0xd3 = vcvtdq2pd %xmm3, %ymm2
651
+ 0xc5,0xfe,0xe6,0x10 = vcvtdq2pd (%eax), %ymm2
652
+ 0xc5,0xfc,0x5b,0xea = vcvtdq2ps %ymm2, %ymm5
653
+ 0xc5,0xfc,0x5b,0x10 = vcvtdq2ps (%eax), %ymm2
654
+ 0xc5,0xfd,0x5b,0xea = vcvtps2dq %ymm2, %ymm5
655
+ 0xc5,0xfd,0x5b,0x28 = vcvtps2dq (%eax), %ymm5
656
+ 0xc5,0xfe,0x5b,0xea = vcvttps2dq %ymm2, %ymm5
657
+ 0xc5,0xfe,0x5b,0x28 = vcvttps2dq (%eax), %ymm5
658
+ 0xc5,0xf9,0xe6,0xe9 = vcvttpd2dq %xmm1, %xmm5
659
+ 0xc5,0xfd,0xe6,0xea = vcvttpd2dq %ymm2, %xmm5
660
+ 0xc5,0xf9,0xe6,0xe9 = vcvttpd2dqx %xmm1, %xmm5
661
+ 0xc5,0xf9,0xe6,0x08 = vcvttpd2dqx (%eax), %xmm1
662
+ 0xc5,0xfd,0xe6,0xca = vcvttpd2dqy %ymm2, %xmm1
663
+ 0xc5,0xfd,0xe6,0x08 = vcvttpd2dqy (%eax), %xmm1
664
+ 0xc5,0xfd,0x5a,0xea = vcvtpd2ps %ymm2, %xmm5
665
+ 0xc5,0xf9,0x5a,0xe9 = vcvtpd2psx %xmm1, %xmm5
666
+ 0xc5,0xf9,0x5a,0x08 = vcvtpd2psx (%eax), %xmm1
667
+ 0xc5,0xfd,0x5a,0xca = vcvtpd2psy %ymm2, %xmm1
668
+ 0xc5,0xfd,0x5a,0x08 = vcvtpd2psy (%eax), %xmm1
669
+ 0xc5,0xff,0xe6,0xea = vcvtpd2dq %ymm2, %xmm5
670
+ 0xc5,0xff,0xe6,0xca = vcvtpd2dqy %ymm2, %xmm1
671
+ 0xc5,0xff,0xe6,0x08 = vcvtpd2dqy (%eax), %xmm1
672
+ 0xc5,0xfb,0xe6,0xe9 = vcvtpd2dqx %xmm1, %xmm5
673
+ 0xc5,0xfb,0xe6,0x08 = vcvtpd2dqx (%eax), %xmm1
674
+ 0xc5,0xec,0xc2,0xd9,0x00 = vcmpeqps %ymm1, %ymm2, %ymm3
675
+ 0xc5,0xec,0xc2,0xd9,0x02 = vcmpleps %ymm1, %ymm2, %ymm3
676
+ 0xc5,0xec,0xc2,0xd9,0x01 = vcmpltps %ymm1, %ymm2, %ymm3
677
+ 0xc5,0xec,0xc2,0xd9,0x04 = vcmpneqps %ymm1, %ymm2, %ymm3
678
+ 0xc5,0xec,0xc2,0xd9,0x06 = vcmpnleps %ymm1, %ymm2, %ymm3
679
+ 0xc5,0xec,0xc2,0xd9,0x05 = vcmpnltps %ymm1, %ymm2, %ymm3
680
+ 0xc5,0xec,0xc2,0xd9,0x07 = vcmpordps %ymm1, %ymm2, %ymm3
681
+ 0xc5,0xec,0xc2,0xd9,0x03 = vcmpunordps %ymm1, %ymm2, %ymm3
682
+ 0xc5,0xec,0xc2,0x5c,0xcb,0xfc,0x00 = vcmpeqps -4(%ebx,%ecx,8), %ymm2, %ymm3
683
+ 0xc5,0xec,0xc2,0x5c,0xcb,0xfc,0x02 = vcmpleps -4(%ebx,%ecx,8), %ymm2, %ymm3
684
+ 0xc5,0xec,0xc2,0x5c,0xcb,0xfc,0x01 = vcmpltps -4(%ebx,%ecx,8), %ymm2, %ymm3
685
+ 0xc5,0xec,0xc2,0x5c,0xcb,0xfc,0x04 = vcmpneqps -4(%ebx,%ecx,8), %ymm2, %ymm3
686
+ 0xc5,0xec,0xc2,0x5c,0xcb,0xfc,0x06 = vcmpnleps -4(%ebx,%ecx,8), %ymm2, %ymm3
687
+ 0xc5,0xec,0xc2,0x5c,0xcb,0xfc,0x05 = vcmpnltps -4(%ebx,%ecx,8), %ymm2, %ymm3
688
+ 0xc5,0xcc,0xc2,0x54,0xcb,0xfc,0x07 = vcmpordps -4(%ebx,%ecx,8), %ymm6, %ymm2
689
+ 0xc5,0xec,0xc2,0x5c,0xcb,0xfc,0x03 = vcmpunordps -4(%ebx,%ecx,8), %ymm2, %ymm3
690
+ 0xc5,0xed,0xc2,0xd9,0x00 = vcmpeqpd %ymm1, %ymm2, %ymm3
691
+ 0xc5,0xed,0xc2,0xd9,0x02 = vcmplepd %ymm1, %ymm2, %ymm3
692
+ 0xc5,0xed,0xc2,0xd9,0x01 = vcmpltpd %ymm1, %ymm2, %ymm3
693
+ 0xc5,0xed,0xc2,0xd9,0x04 = vcmpneqpd %ymm1, %ymm2, %ymm3
694
+ 0xc5,0xed,0xc2,0xd9,0x06 = vcmpnlepd %ymm1, %ymm2, %ymm3
695
+ 0xc5,0xed,0xc2,0xd9,0x05 = vcmpnltpd %ymm1, %ymm2, %ymm3
696
+ 0xc5,0xed,0xc2,0xd9,0x07 = vcmpordpd %ymm1, %ymm2, %ymm3
697
+ 0xc5,0xed,0xc2,0xd9,0x03 = vcmpunordpd %ymm1, %ymm2, %ymm3
698
+ 0xc5,0xed,0xc2,0x5c,0xcb,0xfc,0x00 = vcmpeqpd -4(%ebx,%ecx,8), %ymm2, %ymm3
699
+ 0xc5,0xed,0xc2,0x5c,0xcb,0xfc,0x02 = vcmplepd -4(%ebx,%ecx,8), %ymm2, %ymm3
700
+ 0xc5,0xed,0xc2,0x5c,0xcb,0xfc,0x01 = vcmpltpd -4(%ebx,%ecx,8), %ymm2, %ymm3
701
+ 0xc5,0xed,0xc2,0x5c,0xcb,0xfc,0x04 = vcmpneqpd -4(%ebx,%ecx,8), %ymm2, %ymm3
702
+ 0xc5,0xed,0xc2,0x5c,0xcb,0xfc,0x06 = vcmpnlepd -4(%ebx,%ecx,8), %ymm2, %ymm3
703
+ 0xc5,0xed,0xc2,0x5c,0xcb,0xfc,0x05 = vcmpnltpd -4(%ebx,%ecx,8), %ymm2, %ymm3
704
+ 0xc5,0xcd,0xc2,0x54,0xcb,0xfc,0x07 = vcmpordpd -4(%ebx,%ecx,8), %ymm6, %ymm2
705
+ 0xc5,0xed,0xc2,0x5c,0xcb,0xfc,0x03 = vcmpunordpd -4(%ebx,%ecx,8), %ymm2, %ymm3
706
+ 0xc5,0xec,0xc2,0xd9,0x08 = vcmpeq_uqps %ymm1, %ymm2, %ymm3
707
+ 0xc5,0xec,0xc2,0xd9,0x09 = vcmpngeps %ymm1, %ymm2, %ymm3
708
+ 0xc5,0xec,0xc2,0xd9,0x0a = vcmpngtps %ymm1, %ymm2, %ymm3
709
+ 0xc5,0xec,0xc2,0xd9,0x0b = vcmpfalseps %ymm1, %ymm2, %ymm3
710
+ 0xc5,0xec,0xc2,0xd9,0x0c = vcmpneq_oqps %ymm1, %ymm2, %ymm3
711
+ 0xc5,0xec,0xc2,0xd9,0x0d = vcmpgeps %ymm1, %ymm2, %ymm3
712
+ 0xc5,0xec,0xc2,0xd9,0x0e = vcmpgtps %ymm1, %ymm2, %ymm3
713
+ 0xc5,0xec,0xc2,0xd9,0x0f = vcmptrueps %ymm1, %ymm2, %ymm3
714
+ 0xc5,0xec,0xc2,0xd9,0x10 = vcmpeq_osps %ymm1, %ymm2, %ymm3
715
+ 0xc5,0xec,0xc2,0xd9,0x11 = vcmplt_oqps %ymm1, %ymm2, %ymm3
716
+ 0xc5,0xec,0xc2,0xd9,0x12 = vcmple_oqps %ymm1, %ymm2, %ymm3
717
+ 0xc5,0xec,0xc2,0xd9,0x13 = vcmpunord_sps %ymm1, %ymm2, %ymm3
718
+ 0xc5,0xec,0xc2,0xd9,0x14 = vcmpneq_usps %ymm1, %ymm2, %ymm3
719
+ 0xc5,0xec,0xc2,0xd9,0x15 = vcmpnlt_uqps %ymm1, %ymm2, %ymm3
720
+ 0xc5,0xec,0xc2,0xd9,0x16 = vcmpnle_uqps %ymm1, %ymm2, %ymm3
721
+ 0xc5,0xec,0xc2,0xd9,0x17 = vcmpord_sps %ymm1, %ymm2, %ymm3
722
+ 0xc5,0xec,0xc2,0xd9,0x18 = vcmpeq_usps %ymm1, %ymm2, %ymm3
723
+ 0xc5,0xec,0xc2,0xd9,0x19 = vcmpnge_uqps %ymm1, %ymm2, %ymm3
724
+ 0xc5,0xec,0xc2,0xd9,0x1a = vcmpngt_uqps %ymm1, %ymm2, %ymm3
725
+ 0xc5,0xec,0xc2,0xd9,0x1b = vcmpfalse_osps %ymm1, %ymm2, %ymm3
726
+ 0xc5,0xec,0xc2,0xd9,0x1c = vcmpneq_osps %ymm1, %ymm2, %ymm3
727
+ 0xc5,0xec,0xc2,0xd9,0x1d = vcmpge_oqps %ymm1, %ymm2, %ymm3
728
+ 0xc5,0xec,0xc2,0xd9,0x1e = vcmpgt_oqps %ymm1, %ymm2, %ymm3
729
+ 0xc5,0xec,0xc2,0xd9,0x1f = vcmptrue_usps %ymm1, %ymm2, %ymm3
730
+ 0xc5,0xef,0xd0,0xd9 = vaddsubps %ymm1, %ymm2, %ymm3
731
+ 0xc5,0xf7,0xd0,0x10 = vaddsubps (%eax), %ymm1, %ymm2
732
+ 0xc5,0xed,0xd0,0xd9 = vaddsubpd %ymm1, %ymm2, %ymm3
733
+ 0xc5,0xf5,0xd0,0x10 = vaddsubpd (%eax), %ymm1, %ymm2
734
+ 0xc5,0xef,0x7c,0xd9 = vhaddps %ymm1, %ymm2, %ymm3
735
+ 0xc5,0xef,0x7c,0x18 = vhaddps (%eax), %ymm2, %ymm3
736
+ 0xc5,0xed,0x7c,0xd9 = vhaddpd %ymm1, %ymm2, %ymm3
737
+ 0xc5,0xed,0x7c,0x18 = vhaddpd (%eax), %ymm2, %ymm3
738
+ 0xc5,0xef,0x7d,0xd9 = vhsubps %ymm1, %ymm2, %ymm3
739
+ 0xc5,0xef,0x7d,0x18 = vhsubps (%eax), %ymm2, %ymm3
740
+ 0xc5,0xed,0x7d,0xd9 = vhsubpd %ymm1, %ymm2, %ymm3
741
+ 0xc5,0xed,0x7d,0x18 = vhsubpd (%eax), %ymm2, %ymm3
742
+ 0xc4,0xe3,0x55,0x0c,0xca,0x03 = vblendps $3, %ymm2, %ymm5, %ymm1
743
+ 0xc4,0xe3,0x55,0x0c,0x08,0x03 = vblendps $3, (%eax), %ymm5, %ymm1
744
+ 0xc4,0xe3,0x55,0x0d,0xca,0x03 = vblendpd $3, %ymm2, %ymm5, %ymm1
745
+ 0xc4,0xe3,0x55,0x0d,0x08,0x03 = vblendpd $3, (%eax), %ymm5, %ymm1
746
+ 0xc4,0xe3,0x55,0x40,0xca,0x03 = vdpps $3, %ymm2, %ymm5, %ymm1
747
+ 0xc4,0xe3,0x55,0x40,0x08,0x03 = vdpps $3, (%eax), %ymm5, %ymm1
748
+ 0xc4,0xe2,0x7d,0x1a,0x10 = vbroadcastf128 (%eax), %ymm2
749
+ 0xc4,0xe2,0x7d,0x19,0x10 = vbroadcastsd (%eax), %ymm2
750
+ 0xc4,0xe2,0x79,0x18,0x10 = vbroadcastss (%eax), %xmm2
751
+ 0xc4,0xe2,0x7d,0x18,0x10 = vbroadcastss (%eax), %ymm2
752
+ 0xc4,0xe3,0x6d,0x18,0xea,0x07 = vinsertf128 $7, %xmm2, %ymm2, %ymm5
753
+ 0xc4,0xe3,0x6d,0x18,0x28,0x07 = vinsertf128 $7, (%eax), %ymm2, %ymm5
754
+ 0xc4,0xe3,0x7d,0x19,0xd2,0x07 = vextractf128 $7, %ymm2, %xmm2
755
+ 0xc4,0xe3,0x7d,0x19,0x10,0x07 = vextractf128 $7, %ymm2, (%eax)
756
+ 0xc4,0xe2,0x51,0x2f,0x10 = vmaskmovpd %xmm2, %xmm5, (%eax)
757
+ 0xc4,0xe2,0x55,0x2f,0x10 = vmaskmovpd %ymm2, %ymm5, (%eax)
758
+ 0xc4,0xe2,0x69,0x2d,0x28 = vmaskmovpd (%eax), %xmm2, %xmm5
759
+ 0xc4,0xe2,0x6d,0x2d,0x28 = vmaskmovpd (%eax), %ymm2, %ymm5
760
+ 0xc4,0xe2,0x51,0x2e,0x10 = vmaskmovps %xmm2, %xmm5, (%eax)
761
+ 0xc4,0xe2,0x55,0x2e,0x10 = vmaskmovps %ymm2, %ymm5, (%eax)
762
+ 0xc4,0xe2,0x69,0x2c,0x28 = vmaskmovps (%eax), %xmm2, %xmm5
763
+ 0xc4,0xe2,0x6d,0x2c,0x28 = vmaskmovps (%eax), %ymm2, %ymm5
764
+ 0xc4,0xe3,0x79,0x04,0xe9,0x07 = vpermilps $7, %xmm1, %xmm5
765
+ 0xc4,0xe3,0x7d,0x04,0xcd,0x07 = vpermilps $7, %ymm5, %ymm1
766
+ 0xc4,0xe3,0x79,0x04,0x28,0x07 = vpermilps $7, (%eax), %xmm5
767
+ 0xc4,0xe3,0x7d,0x04,0x28,0x07 = vpermilps $7, (%eax), %ymm5
768
+ 0xc4,0xe2,0x51,0x0c,0xc9 = vpermilps %xmm1, %xmm5, %xmm1
769
+ 0xc4,0xe2,0x55,0x0c,0xc9 = vpermilps %ymm1, %ymm5, %ymm1
770
+ 0xc4,0xe2,0x51,0x0c,0x18 = vpermilps (%eax), %xmm5, %xmm3
771
+ 0xc4,0xe2,0x55,0x0c,0x08 = vpermilps (%eax), %ymm5, %ymm1
772
+ 0xc4,0xe3,0x79,0x05,0xe9,0x07 = vpermilpd $7, %xmm1, %xmm5
773
+ 0xc4,0xe3,0x7d,0x05,0xcd,0x07 = vpermilpd $7, %ymm5, %ymm1
774
+ 0xc4,0xe3,0x79,0x05,0x28,0x07 = vpermilpd $7, (%eax), %xmm5
775
+ 0xc4,0xe3,0x7d,0x05,0x28,0x07 = vpermilpd $7, (%eax), %ymm5
776
+ 0xc4,0xe2,0x51,0x0d,0xc9 = vpermilpd %xmm1, %xmm5, %xmm1
777
+ 0xc4,0xe2,0x55,0x0d,0xc9 = vpermilpd %ymm1, %ymm5, %ymm1
778
+ 0xc4,0xe2,0x51,0x0d,0x18 = vpermilpd (%eax), %xmm5, %xmm3
779
+ 0xc4,0xe2,0x55,0x0d,0x08 = vpermilpd (%eax), %ymm5, %ymm1
780
+ 0xc4,0xe3,0x55,0x06,0xca,0x07 = vperm2f128 $7, %ymm2, %ymm5, %ymm1
781
+ 0xc4,0xe3,0x55,0x06,0x08,0x07 = vperm2f128 $7, (%eax), %ymm5, %ymm1
782
+ 0xc5,0xfc,0x77 = vzeroall
783
+ 0xc5,0xf8,0x77 = vzeroupper
784
+ 0xc5,0xfb,0x2d,0xcc = vcvtsd2sil %xmm4, %ecx
785
+ 0xc5,0xfb,0x2d,0x09 = vcvtsd2sil (%ecx), %ecx
786
+ 0xc5,0xfb,0x2d,0xcc = vcvtsd2si %xmm4, %ecx
787
+ 0xc5,0xfb,0x2d,0x09 = vcvtsd2si (%ecx), %ecx
788
+ 0xc5,0xfb,0x2a,0x7d,0x00 = vcvtsi2sdl (%ebp), %xmm0, %xmm7
789
+ 0xc5,0xfb,0x2a,0x3c,0x24 = vcvtsi2sdl (%esp), %xmm0, %xmm7
790
+ 0xc5,0xfb,0x2a,0x7d,0x00 = vcvtsi2sd (%ebp), %xmm0, %xmm7
791
+ 0xc5,0xfb,0x2a,0x3c,0x24 = vcvtsi2sd (%esp), %xmm0, %xmm7
792
+ 0xc5,0xff,0xf0,0x10 = vlddqu (%eax), %ymm2
793
+ 0xc5,0xff,0x12,0xea = vmovddup %ymm2, %ymm5
794
+ 0xc5,0xff,0x12,0x10 = vmovddup (%eax), %ymm2
795
+ 0xc5,0xfd,0x6f,0xea = vmovdqa %ymm2, %ymm5
796
+ 0xc5,0xfd,0x7f,0x10 = vmovdqa %ymm2, (%eax)
797
+ 0xc5,0xfd,0x6f,0x10 = vmovdqa (%eax), %ymm2
798
+ 0xc5,0xfe,0x6f,0xea = vmovdqu %ymm2, %ymm5
799
+ 0xc5,0xfe,0x7f,0x10 = vmovdqu %ymm2, (%eax)
800
+ 0xc5,0xfe,0x6f,0x10 = vmovdqu (%eax), %ymm2
801
+ 0xc5,0xfe,0x16,0xea = vmovshdup %ymm2, %ymm5
802
+ 0xc5,0xfe,0x16,0x10 = vmovshdup (%eax), %ymm2
803
+ 0xc5,0xfe,0x12,0xea = vmovsldup %ymm2, %ymm5
804
+ 0xc5,0xfe,0x12,0x10 = vmovsldup (%eax), %ymm2
805
+ 0xc4,0xe2,0x7d,0x17,0xea = vptest %ymm2, %ymm5
806
+ 0xc4,0xe2,0x7d,0x17,0x10 = vptest (%eax), %ymm2
807
+ 0xc4,0xe3,0x7d,0x09,0xcd,0x07 = vroundpd $7, %ymm5, %ymm1
808
+ 0xc4,0xe3,0x7d,0x09,0x28,0x07 = vroundpd $7, (%eax), %ymm5
809
+ 0xc4,0xe3,0x7d,0x08,0xcd,0x07 = vroundps $7, %ymm5, %ymm1
810
+ 0xc4,0xe3,0x7d,0x08,0x28,0x07 = vroundps $7, (%eax), %ymm5
811
+ 0xc5,0xd5,0xc6,0xca,0x07 = vshufpd $7, %ymm2, %ymm5, %ymm1
812
+ 0xc5,0xd5,0xc6,0x08,0x07 = vshufpd $7, (%eax), %ymm5, %ymm1
813
+ 0xc5,0xd4,0xc6,0xca,0x07 = vshufps $7, %ymm2, %ymm5, %ymm1
814
+ 0xc5,0xd4,0xc6,0x08,0x07 = vshufps $7, (%eax), %ymm5, %ymm1
815
+ 0xc4,0xe2,0x79,0x0f,0xea = vtestpd %xmm2, %xmm5
816
+ 0xc4,0xe2,0x7d,0x0f,0xea = vtestpd %ymm2, %ymm5
817
+ 0xc4,0xe2,0x79,0x0f,0x10 = vtestpd (%eax), %xmm2
818
+ 0xc4,0xe2,0x7d,0x0f,0x10 = vtestpd (%eax), %ymm2
819
+ 0xc4,0xe2,0x79,0x0e,0xea = vtestps %xmm2, %xmm5
820
+ 0xc4,0xe2,0x7d,0x0e,0xea = vtestps %ymm2, %ymm5
821
+ 0xc4,0xe2,0x79,0x0e,0x10 = vtestps (%eax), %xmm2
822
+ 0xc4,0xe2,0x7d,0x0e,0x10 = vtestps (%eax), %ymm2
823
+ 0xc4,0xe3,0x75,0x4b,0x94,0x20,0xad,0xde,0x00,0x00,0x00 = vblendvpd %ymm0, 0xdead(%eax,%eiz), %ymm1, %ymm2
824
+ 0xc4,0xe3,0x51,0x44,0xca,0x11 = vpclmulhqhqdq %xmm2, %xmm5, %xmm1
825
+ 0xc4,0xe3,0x51,0x44,0x18,0x11 = vpclmulhqhqdq (%eax), %xmm5, %xmm3
826
+ 0xc4,0xe3,0x51,0x44,0xca,0x01 = vpclmulhqlqdq %xmm2, %xmm5, %xmm1
827
+ 0xc4,0xe3,0x51,0x44,0x18,0x01 = vpclmulhqlqdq (%eax), %xmm5, %xmm3
828
+ 0xc4,0xe3,0x51,0x44,0xca,0x10 = vpclmullqhqdq %xmm2, %xmm5, %xmm1
829
+ 0xc4,0xe3,0x51,0x44,0x18,0x10 = vpclmullqhqdq (%eax), %xmm5, %xmm3
830
+ 0xc4,0xe3,0x51,0x44,0xca,0x00 = vpclmullqlqdq %xmm2, %xmm5, %xmm1
831
+ 0xc4,0xe3,0x51,0x44,0x18,0x00 = vpclmullqlqdq (%eax), %xmm5, %xmm3
832
+ 0xc4,0xe3,0x51,0x44,0xca,0x11 = vpclmulqdq $17, %xmm2, %xmm5, %xmm1
833
+ 0xc4,0xe3,0x51,0x44,0x18,0x11 = vpclmulqdq $17, (%eax), %xmm5, %xmm3