ruby_nacl 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/Changelog +0 -0
- data/README.md +49 -0
- data/ext/ruby_nacl/NaCl/MACROS +56 -0
- data/ext/ruby_nacl/NaCl/OPERATIONS +11 -0
- data/ext/ruby_nacl/NaCl/PROTOTYPES.c +26 -0
- data/ext/ruby_nacl/NaCl/PROTOTYPES.cpp +17 -0
- data/ext/ruby_nacl/NaCl/README +1 -0
- data/ext/ruby_nacl/NaCl/commandline/nacl-sha256.c +64 -0
- data/ext/ruby_nacl/NaCl/commandline/nacl-sha512.c +64 -0
- data/ext/ruby_nacl/NaCl/cpucycles/alpha.c +80 -0
- data/ext/ruby_nacl/NaCl/cpucycles/alpha.h +27 -0
- data/ext/ruby_nacl/NaCl/cpucycles/amd64cpuinfo.c +16 -0
- data/ext/ruby_nacl/NaCl/cpucycles/amd64cpuinfo.h +27 -0
- data/ext/ruby_nacl/NaCl/cpucycles/amd64cpuspeed.c +25 -0
- data/ext/ruby_nacl/NaCl/cpucycles/amd64cpuspeed.h +27 -0
- data/ext/ruby_nacl/NaCl/cpucycles/amd64tscfreq.c +18 -0
- data/ext/ruby_nacl/NaCl/cpucycles/amd64tscfreq.h +27 -0
- data/ext/ruby_nacl/NaCl/cpucycles/celllinux.c +83 -0
- data/ext/ruby_nacl/NaCl/cpucycles/celllinux.h +27 -0
- data/ext/ruby_nacl/NaCl/cpucycles/cortex.c +73 -0
- data/ext/ruby_nacl/NaCl/cpucycles/cortex.h +27 -0
- data/ext/ruby_nacl/NaCl/cpucycles/dev4ns.c +62 -0
- data/ext/ruby_nacl/NaCl/cpucycles/dev4ns.h +27 -0
- data/ext/ruby_nacl/NaCl/cpucycles/do +105 -0
- data/ext/ruby_nacl/NaCl/cpucycles/gettimeofday.c +32 -0
- data/ext/ruby_nacl/NaCl/cpucycles/gettimeofday.h +27 -0
- data/ext/ruby_nacl/NaCl/cpucycles/hppapstat.c +26 -0
- data/ext/ruby_nacl/NaCl/cpucycles/hppapstat.h +27 -0
- data/ext/ruby_nacl/NaCl/cpucycles/ia64cpuinfo.c +15 -0
- data/ext/ruby_nacl/NaCl/cpucycles/ia64cpuinfo.h +27 -0
- data/ext/ruby_nacl/NaCl/cpucycles/mips.c +65 -0
- data/ext/ruby_nacl/NaCl/cpucycles/mips.h +27 -0
- data/ext/ruby_nacl/NaCl/cpucycles/monotonic.c +34 -0
- data/ext/ruby_nacl/NaCl/cpucycles/monotonic.h +27 -0
- data/ext/ruby_nacl/NaCl/cpucycles/monotoniccpuinfo.c +33 -0
- data/ext/ruby_nacl/NaCl/cpucycles/monotoniccpuinfo.h +27 -0
- data/ext/ruby_nacl/NaCl/cpucycles/osfreq.c +65 -0
- data/ext/ruby_nacl/NaCl/cpucycles/powerpccpuinfo.c +95 -0
- data/ext/ruby_nacl/NaCl/cpucycles/powerpccpuinfo.h +27 -0
- data/ext/ruby_nacl/NaCl/cpucycles/powerpcmacos.c +42 -0
- data/ext/ruby_nacl/NaCl/cpucycles/powerpcmacos.h +27 -0
- data/ext/ruby_nacl/NaCl/cpucycles/sgi.c +38 -0
- data/ext/ruby_nacl/NaCl/cpucycles/sgi.h +27 -0
- data/ext/ruby_nacl/NaCl/cpucycles/sparc32cpuinfo.c +16 -0
- data/ext/ruby_nacl/NaCl/cpucycles/sparc32cpuinfo.h +27 -0
- data/ext/ruby_nacl/NaCl/cpucycles/sparccpuinfo.c +15 -0
- data/ext/ruby_nacl/NaCl/cpucycles/sparccpuinfo.h +27 -0
- data/ext/ruby_nacl/NaCl/cpucycles/test.c +77 -0
- data/ext/ruby_nacl/NaCl/cpucycles/x86cpuinfo.c +15 -0
- data/ext/ruby_nacl/NaCl/cpucycles/x86cpuinfo.h +27 -0
- data/ext/ruby_nacl/NaCl/cpucycles/x86cpuspeed.c +24 -0
- data/ext/ruby_nacl/NaCl/cpucycles/x86cpuspeed.h +27 -0
- data/ext/ruby_nacl/NaCl/cpucycles/x86estimate.c +59 -0
- data/ext/ruby_nacl/NaCl/cpucycles/x86estimate.h +27 -0
- data/ext/ruby_nacl/NaCl/cpucycles/x86tscfreq.c +17 -0
- data/ext/ruby_nacl/NaCl/cpucycles/x86tscfreq.h +27 -0
- data/ext/ruby_nacl/NaCl/cpuid/cbytes.c +16 -0
- data/ext/ruby_nacl/NaCl/cpuid/cpuid.c +41 -0
- data/ext/ruby_nacl/NaCl/cpuid/do +37 -0
- data/ext/ruby_nacl/NaCl/cpuid/unknown.c +7 -0
- data/ext/ruby_nacl/NaCl/cpuid/x86.c +41 -0
- data/ext/ruby_nacl/NaCl/crypto_auth/hmacsha256/checksum +1 -0
- data/ext/ruby_nacl/NaCl/crypto_auth/hmacsha256/ref/api.h +2 -0
- data/ext/ruby_nacl/NaCl/crypto_auth/hmacsha256/ref/hmac.c +83 -0
- data/ext/ruby_nacl/NaCl/crypto_auth/hmacsha256/ref/verify.c +9 -0
- data/ext/ruby_nacl/NaCl/crypto_auth/hmacsha256/used +0 -0
- data/ext/ruby_nacl/NaCl/crypto_auth/hmacsha512256/checksum +1 -0
- data/ext/ruby_nacl/NaCl/crypto_auth/hmacsha512256/ref/api.h +2 -0
- data/ext/ruby_nacl/NaCl/crypto_auth/hmacsha512256/ref/hmac.c +86 -0
- data/ext/ruby_nacl/NaCl/crypto_auth/hmacsha512256/ref/verify.c +9 -0
- data/ext/ruby_nacl/NaCl/crypto_auth/hmacsha512256/selected +0 -0
- data/ext/ruby_nacl/NaCl/crypto_auth/hmacsha512256/used +0 -0
- data/ext/ruby_nacl/NaCl/crypto_auth/measure.c +69 -0
- data/ext/ruby_nacl/NaCl/crypto_auth/try.c +119 -0
- data/ext/ruby_nacl/NaCl/crypto_auth/wrapper-auth.cpp +11 -0
- data/ext/ruby_nacl/NaCl/crypto_auth/wrapper-verify.cpp +14 -0
- data/ext/ruby_nacl/NaCl/crypto_box/curve25519xsalsa20poly1305/checksum +1 -0
- data/ext/ruby_nacl/NaCl/crypto_box/curve25519xsalsa20poly1305/ref/after.c +22 -0
- data/ext/ruby_nacl/NaCl/crypto_box/curve25519xsalsa20poly1305/ref/api.h +6 -0
- data/ext/ruby_nacl/NaCl/crypto_box/curve25519xsalsa20poly1305/ref/before.c +17 -0
- data/ext/ruby_nacl/NaCl/crypto_box/curve25519xsalsa20poly1305/ref/box.c +27 -0
- data/ext/ruby_nacl/NaCl/crypto_box/curve25519xsalsa20poly1305/ref/keypair.c +12 -0
- data/ext/ruby_nacl/NaCl/crypto_box/curve25519xsalsa20poly1305/selected +0 -0
- data/ext/ruby_nacl/NaCl/crypto_box/curve25519xsalsa20poly1305/used +0 -0
- data/ext/ruby_nacl/NaCl/crypto_box/measure.c +137 -0
- data/ext/ruby_nacl/NaCl/crypto_box/try.c +195 -0
- data/ext/ruby_nacl/NaCl/crypto_box/wrapper-box.cpp +24 -0
- data/ext/ruby_nacl/NaCl/crypto_box/wrapper-keypair.cpp +12 -0
- data/ext/ruby_nacl/NaCl/crypto_box/wrapper-open.cpp +27 -0
- data/ext/ruby_nacl/NaCl/crypto_core/hsalsa20/checksum +1 -0
- data/ext/ruby_nacl/NaCl/crypto_core/hsalsa20/ref/api.h +4 -0
- data/ext/ruby_nacl/NaCl/crypto_core/hsalsa20/ref/core.c +135 -0
- data/ext/ruby_nacl/NaCl/crypto_core/hsalsa20/ref/implementors +1 -0
- data/ext/ruby_nacl/NaCl/crypto_core/hsalsa20/ref2/api.h +4 -0
- data/ext/ruby_nacl/NaCl/crypto_core/hsalsa20/ref2/core.c +108 -0
- data/ext/ruby_nacl/NaCl/crypto_core/hsalsa20/ref2/implementors +1 -0
- data/ext/ruby_nacl/NaCl/crypto_core/hsalsa20/used +0 -0
- data/ext/ruby_nacl/NaCl/crypto_core/measure.c +18 -0
- data/ext/ruby_nacl/NaCl/crypto_core/salsa20/checksum +1 -0
- data/ext/ruby_nacl/NaCl/crypto_core/salsa20/ref/api.h +4 -0
- data/ext/ruby_nacl/NaCl/crypto_core/salsa20/ref/core.c +134 -0
- data/ext/ruby_nacl/NaCl/crypto_core/salsa20/ref/implementors +1 -0
- data/ext/ruby_nacl/NaCl/crypto_core/salsa20/used +0 -0
- data/ext/ruby_nacl/NaCl/crypto_core/salsa2012/checksum +1 -0
- data/ext/ruby_nacl/NaCl/crypto_core/salsa2012/ref/api.h +4 -0
- data/ext/ruby_nacl/NaCl/crypto_core/salsa2012/ref/core.c +134 -0
- data/ext/ruby_nacl/NaCl/crypto_core/salsa2012/ref/implementors +1 -0
- data/ext/ruby_nacl/NaCl/crypto_core/salsa2012/used +0 -0
- data/ext/ruby_nacl/NaCl/crypto_core/salsa208/checksum +1 -0
- data/ext/ruby_nacl/NaCl/crypto_core/salsa208/ref/api.h +4 -0
- data/ext/ruby_nacl/NaCl/crypto_core/salsa208/ref/core.c +134 -0
- data/ext/ruby_nacl/NaCl/crypto_core/salsa208/ref/implementors +1 -0
- data/ext/ruby_nacl/NaCl/crypto_core/salsa208/used +0 -0
- data/ext/ruby_nacl/NaCl/crypto_core/try.c +116 -0
- data/ext/ruby_nacl/NaCl/crypto_core/wrapper-empty.cpp +0 -0
- data/ext/ruby_nacl/NaCl/crypto_hash/measure.c +66 -0
- data/ext/ruby_nacl/NaCl/crypto_hash/sha256/checksum +1 -0
- data/ext/ruby_nacl/NaCl/crypto_hash/sha256/ref/api.h +1 -0
- data/ext/ruby_nacl/NaCl/crypto_hash/sha256/ref/hash.c +69 -0
- data/ext/ruby_nacl/NaCl/crypto_hash/sha256/ref/implementors +1 -0
- data/ext/ruby_nacl/NaCl/crypto_hash/sha256/used +0 -0
- data/ext/ruby_nacl/NaCl/crypto_hash/sha512/checksum +1 -0
- data/ext/ruby_nacl/NaCl/crypto_hash/sha512/ref/api.h +1 -0
- data/ext/ruby_nacl/NaCl/crypto_hash/sha512/ref/hash.c +71 -0
- data/ext/ruby_nacl/NaCl/crypto_hash/sha512/ref/implementors +1 -0
- data/ext/ruby_nacl/NaCl/crypto_hash/sha512/selected +0 -0
- data/ext/ruby_nacl/NaCl/crypto_hash/sha512/used +0 -0
- data/ext/ruby_nacl/NaCl/crypto_hash/try.c +77 -0
- data/ext/ruby_nacl/NaCl/crypto_hash/wrapper-hash.cpp +10 -0
- data/ext/ruby_nacl/NaCl/crypto_hashblocks/measure.c +18 -0
- data/ext/ruby_nacl/NaCl/crypto_hashblocks/sha256/checksum +1 -0
- data/ext/ruby_nacl/NaCl/crypto_hashblocks/sha256/inplace/api.h +2 -0
- data/ext/ruby_nacl/NaCl/crypto_hashblocks/sha256/inplace/blocks.c +228 -0
- data/ext/ruby_nacl/NaCl/crypto_hashblocks/sha256/inplace/implementors +1 -0
- data/ext/ruby_nacl/NaCl/crypto_hashblocks/sha256/ref/api.h +2 -0
- data/ext/ruby_nacl/NaCl/crypto_hashblocks/sha256/ref/blocks.c +212 -0
- data/ext/ruby_nacl/NaCl/crypto_hashblocks/sha256/ref/implementors +1 -0
- data/ext/ruby_nacl/NaCl/crypto_hashblocks/sha256/used +0 -0
- data/ext/ruby_nacl/NaCl/crypto_hashblocks/sha512/checksum +1 -0
- data/ext/ruby_nacl/NaCl/crypto_hashblocks/sha512/inplace/api.h +2 -0
- data/ext/ruby_nacl/NaCl/crypto_hashblocks/sha512/inplace/blocks.c +256 -0
- data/ext/ruby_nacl/NaCl/crypto_hashblocks/sha512/inplace/implementors +1 -0
- data/ext/ruby_nacl/NaCl/crypto_hashblocks/sha512/ref/api.h +2 -0
- data/ext/ruby_nacl/NaCl/crypto_hashblocks/sha512/ref/blocks.c +239 -0
- data/ext/ruby_nacl/NaCl/crypto_hashblocks/sha512/ref/implementors +1 -0
- data/ext/ruby_nacl/NaCl/crypto_hashblocks/sha512/selected +0 -0
- data/ext/ruby_nacl/NaCl/crypto_hashblocks/sha512/used +0 -0
- data/ext/ruby_nacl/NaCl/crypto_hashblocks/try.c +79 -0
- data/ext/ruby_nacl/NaCl/crypto_hashblocks/wrapper-empty.cpp +0 -0
- data/ext/ruby_nacl/NaCl/crypto_onetimeauth/measure.c +69 -0
- data/ext/ruby_nacl/NaCl/crypto_onetimeauth/poly1305/53/api.h +2 -0
- data/ext/ruby_nacl/NaCl/crypto_onetimeauth/poly1305/53/auth.c +1616 -0
- data/ext/ruby_nacl/NaCl/crypto_onetimeauth/poly1305/53/verify.c +9 -0
- data/ext/ruby_nacl/NaCl/crypto_onetimeauth/poly1305/amd64/api.h +2 -0
- data/ext/ruby_nacl/NaCl/crypto_onetimeauth/poly1305/amd64/auth.s +2787 -0
- data/ext/ruby_nacl/NaCl/crypto_onetimeauth/poly1305/amd64/constants.s +85 -0
- data/ext/ruby_nacl/NaCl/crypto_onetimeauth/poly1305/amd64/verify.c +9 -0
- data/ext/ruby_nacl/NaCl/crypto_onetimeauth/poly1305/checksum +1 -0
- data/ext/ruby_nacl/NaCl/crypto_onetimeauth/poly1305/ref/api.h +2 -0
- data/ext/ruby_nacl/NaCl/crypto_onetimeauth/poly1305/ref/auth.c +104 -0
- data/ext/ruby_nacl/NaCl/crypto_onetimeauth/poly1305/ref/verify.c +9 -0
- data/ext/ruby_nacl/NaCl/crypto_onetimeauth/poly1305/selected +0 -0
- data/ext/ruby_nacl/NaCl/crypto_onetimeauth/poly1305/used +0 -0
- data/ext/ruby_nacl/NaCl/crypto_onetimeauth/poly1305/x86/api.h +2 -0
- data/ext/ruby_nacl/NaCl/crypto_onetimeauth/poly1305/x86/auth.s +2779 -0
- data/ext/ruby_nacl/NaCl/crypto_onetimeauth/poly1305/x86/constants.s +85 -0
- data/ext/ruby_nacl/NaCl/crypto_onetimeauth/poly1305/x86/verify.c +9 -0
- data/ext/ruby_nacl/NaCl/crypto_onetimeauth/try.c +119 -0
- data/ext/ruby_nacl/NaCl/crypto_onetimeauth/wrapper-auth.cpp +11 -0
- data/ext/ruby_nacl/NaCl/crypto_onetimeauth/wrapper-verify.cpp +14 -0
- data/ext/ruby_nacl/NaCl/crypto_scalarmult/curve25519/athlon/api.h +2 -0
- data/ext/ruby_nacl/NaCl/crypto_scalarmult/curve25519/athlon/base.c +8 -0
- data/ext/ruby_nacl/NaCl/crypto_scalarmult/curve25519/athlon/const.s +114 -0
- data/ext/ruby_nacl/NaCl/crypto_scalarmult/curve25519/athlon/fromdouble.s +195 -0
- data/ext/ruby_nacl/NaCl/crypto_scalarmult/curve25519/athlon/implementors +1 -0
- data/ext/ruby_nacl/NaCl/crypto_scalarmult/curve25519/athlon/init.s +13 -0
- data/ext/ruby_nacl/NaCl/crypto_scalarmult/curve25519/athlon/mainloop.s +3990 -0
- data/ext/ruby_nacl/NaCl/crypto_scalarmult/curve25519/athlon/mult.s +410 -0
- data/ext/ruby_nacl/NaCl/crypto_scalarmult/curve25519/athlon/smult.c +91 -0
- data/ext/ruby_nacl/NaCl/crypto_scalarmult/curve25519/athlon/square.s +298 -0
- data/ext/ruby_nacl/NaCl/crypto_scalarmult/curve25519/athlon/todouble.s +144 -0
- data/ext/ruby_nacl/NaCl/crypto_scalarmult/curve25519/checksum +1 -0
- data/ext/ruby_nacl/NaCl/crypto_scalarmult/curve25519/donna_c64/api.h +2 -0
- data/ext/ruby_nacl/NaCl/crypto_scalarmult/curve25519/donna_c64/base.c +8 -0
- data/ext/ruby_nacl/NaCl/crypto_scalarmult/curve25519/donna_c64/implementors +1 -0
- data/ext/ruby_nacl/NaCl/crypto_scalarmult/curve25519/donna_c64/smult.c +477 -0
- data/ext/ruby_nacl/NaCl/crypto_scalarmult/curve25519/ref/api.h +2 -0
- data/ext/ruby_nacl/NaCl/crypto_scalarmult/curve25519/ref/base.c +16 -0
- data/ext/ruby_nacl/NaCl/crypto_scalarmult/curve25519/ref/implementors +1 -0
- data/ext/ruby_nacl/NaCl/crypto_scalarmult/curve25519/ref/smult.c +265 -0
- data/ext/ruby_nacl/NaCl/crypto_scalarmult/curve25519/used +0 -0
- data/ext/ruby_nacl/NaCl/crypto_scalarmult/measure.c +61 -0
- data/ext/ruby_nacl/NaCl/crypto_scalarmult/try.c +126 -0
- data/ext/ruby_nacl/NaCl/crypto_scalarmult/wrapper-base.cpp +11 -0
- data/ext/ruby_nacl/NaCl/crypto_scalarmult/wrapper-mult.cpp +12 -0
- data/ext/ruby_nacl/NaCl/crypto_secretbox/measure.c +75 -0
- data/ext/ruby_nacl/NaCl/crypto_secretbox/try.c +129 -0
- data/ext/ruby_nacl/NaCl/crypto_secretbox/wrapper-box.cpp +19 -0
- data/ext/ruby_nacl/NaCl/crypto_secretbox/wrapper-open.cpp +22 -0
- data/ext/ruby_nacl/NaCl/crypto_secretbox/xsalsa20poly1305/checksum +1 -0
- data/ext/ruby_nacl/NaCl/crypto_secretbox/xsalsa20poly1305/ref/api.h +4 -0
- data/ext/ruby_nacl/NaCl/crypto_secretbox/xsalsa20poly1305/ref/box.c +35 -0
- data/ext/ruby_nacl/NaCl/crypto_secretbox/xsalsa20poly1305/selected +0 -0
- data/ext/ruby_nacl/NaCl/crypto_secretbox/xsalsa20poly1305/used +0 -0
- data/ext/ruby_nacl/NaCl/crypto_sign/edwards25519sha512batch/ref/api.h +3 -0
- data/ext/ruby_nacl/NaCl/crypto_sign/edwards25519sha512batch/ref/fe25519.c +345 -0
- data/ext/ruby_nacl/NaCl/crypto_sign/edwards25519sha512batch/ref/fe25519.h +54 -0
- data/ext/ruby_nacl/NaCl/crypto_sign/edwards25519sha512batch/ref/ge25519.c +227 -0
- data/ext/ruby_nacl/NaCl/crypto_sign/edwards25519sha512batch/ref/ge25519.h +34 -0
- data/ext/ruby_nacl/NaCl/crypto_sign/edwards25519sha512batch/ref/sc25519.c +146 -0
- data/ext/ruby_nacl/NaCl/crypto_sign/edwards25519sha512batch/ref/sc25519.h +51 -0
- data/ext/ruby_nacl/NaCl/crypto_sign/edwards25519sha512batch/ref/sign.c +103 -0
- data/ext/ruby_nacl/NaCl/crypto_sign/edwards25519sha512batch/selected +0 -0
- data/ext/ruby_nacl/NaCl/crypto_sign/edwards25519sha512batch/used +0 -0
- data/ext/ruby_nacl/NaCl/crypto_sign/measure.c +83 -0
- data/ext/ruby_nacl/NaCl/crypto_sign/try.c +86 -0
- data/ext/ruby_nacl/NaCl/crypto_sign/wrapper-keypair.cpp +12 -0
- data/ext/ruby_nacl/NaCl/crypto_sign/wrapper-sign-open.cpp +24 -0
- data/ext/ruby_nacl/NaCl/crypto_sign/wrapper-sign.cpp +23 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/aes128ctr/checksum +1 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/aes128ctr/core2/afternm.s +12308 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/aes128ctr/core2/api.h +3 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/aes128ctr/core2/beforenm.s +13694 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/aes128ctr/core2/stream.c +14 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/aes128ctr/core2/xor.c +15 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/aes128ctr/core2/xor_afternm.s +12407 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/aes128ctr/portable/afternm.c +158 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/aes128ctr/portable/api.h +3 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/aes128ctr/portable/beforenm.c +59 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/aes128ctr/portable/common.c +64 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/aes128ctr/portable/common.h +788 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/aes128ctr/portable/consts.c +14 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/aes128ctr/portable/consts.h +28 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/aes128ctr/portable/int128.c +128 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/aes128ctr/portable/int128.h +47 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/aes128ctr/portable/stream.c +28 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/aes128ctr/portable/types.h +10 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/aes128ctr/portable/xor_afternm.c +180 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/aes128ctr/used +0 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/measure.c +73 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/salsa20/amd64_xmm6/api.h +2 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/salsa20/amd64_xmm6/implementors +1 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/salsa20/amd64_xmm6/stream.s +4823 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/salsa20/checksum +1 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/salsa20/ref/api.h +2 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/salsa20/ref/implementors +1 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/salsa20/ref/stream.c +49 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/salsa20/ref/xor.c +52 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/salsa20/used +0 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/salsa20/x86_xmm5/api.h +2 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/salsa20/x86_xmm5/implementors +1 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/salsa20/x86_xmm5/stream.s +5078 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/salsa2012/amd64_xmm6/api.h +2 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/salsa2012/amd64_xmm6/implementors +1 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/salsa2012/amd64_xmm6/stream.s +4823 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/salsa2012/checksum +1 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/salsa2012/ref/api.h +2 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/salsa2012/ref/implementors +1 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/salsa2012/ref/stream.c +49 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/salsa2012/ref/xor.c +52 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/salsa2012/used +0 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/salsa2012/x86_xmm5/api.h +2 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/salsa2012/x86_xmm5/implementors +1 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/salsa2012/x86_xmm5/stream.s +5078 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/salsa208/amd64_xmm6/api.h +2 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/salsa208/amd64_xmm6/implementors +1 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/salsa208/amd64_xmm6/stream.s +4823 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/salsa208/checksum +1 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/salsa208/ref/api.h +2 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/salsa208/ref/implementors +1 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/salsa208/ref/stream.c +49 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/salsa208/ref/xor.c +52 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/salsa208/used +0 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/salsa208/x86_xmm5/api.h +2 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/salsa208/x86_xmm5/implementors +1 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/salsa208/x86_xmm5/stream.s +5078 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/try.c +124 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/wrapper-stream.cpp +12 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/wrapper-xor.cpp +17 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/xsalsa20/checksum +1 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/xsalsa20/ref/api.h +2 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/xsalsa20/ref/implementors +1 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/xsalsa20/ref/stream.c +22 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/xsalsa20/ref/xor.c +23 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/xsalsa20/selected +0 -0
- data/ext/ruby_nacl/NaCl/crypto_stream/xsalsa20/used +0 -0
- data/ext/ruby_nacl/NaCl/crypto_stream.h +18 -0
- data/ext/ruby_nacl/NaCl/crypto_stream_aes128ctr.h +33 -0
- data/ext/ruby_nacl/NaCl/crypto_verify/16/checksum +1 -0
- data/ext/ruby_nacl/NaCl/crypto_verify/16/ref/api.h +1 -0
- data/ext/ruby_nacl/NaCl/crypto_verify/16/ref/verify.c +24 -0
- data/ext/ruby_nacl/NaCl/crypto_verify/16/used +0 -0
- data/ext/ruby_nacl/NaCl/crypto_verify/32/checksum +1 -0
- data/ext/ruby_nacl/NaCl/crypto_verify/32/ref/api.h +1 -0
- data/ext/ruby_nacl/NaCl/crypto_verify/32/ref/verify.c +40 -0
- data/ext/ruby_nacl/NaCl/crypto_verify/32/used +0 -0
- data/ext/ruby_nacl/NaCl/crypto_verify/measure.c +18 -0
- data/ext/ruby_nacl/NaCl/crypto_verify/try.c +75 -0
- data/ext/ruby_nacl/NaCl/crypto_verify/wrapper-empty.cpp +0 -0
- data/ext/ruby_nacl/NaCl/curvecp/LIBS +31 -0
- data/ext/ruby_nacl/NaCl/curvecp/README +10 -0
- data/ext/ruby_nacl/NaCl/curvecp/SOURCES +36 -0
- data/ext/ruby_nacl/NaCl/curvecp/TARGETS +5 -0
- data/ext/ruby_nacl/NaCl/curvecp/blocking.c +12 -0
- data/ext/ruby_nacl/NaCl/curvecp/blocking.h +7 -0
- data/ext/ruby_nacl/NaCl/curvecp/byte.h +8 -0
- data/ext/ruby_nacl/NaCl/curvecp/byte_copy.c +8 -0
- data/ext/ruby_nacl/NaCl/curvecp/byte_isequal.c +10 -0
- data/ext/ruby_nacl/NaCl/curvecp/byte_zero.c +7 -0
- data/ext/ruby_nacl/NaCl/curvecp/crypto_block.c +35 -0
- data/ext/ruby_nacl/NaCl/curvecp/crypto_block.h +4 -0
- data/ext/ruby_nacl/NaCl/curvecp/curvecpclient.c +476 -0
- data/ext/ruby_nacl/NaCl/curvecp/curvecpmakekey.c +57 -0
- data/ext/ruby_nacl/NaCl/curvecp/curvecpmessage.c +654 -0
- data/ext/ruby_nacl/NaCl/curvecp/curvecpprintkey.c +46 -0
- data/ext/ruby_nacl/NaCl/curvecp/curvecpserver.c +497 -0
- data/ext/ruby_nacl/NaCl/curvecp/die.c +42 -0
- data/ext/ruby_nacl/NaCl/curvecp/die.h +16 -0
- data/ext/ruby_nacl/NaCl/curvecp/e.c +106 -0
- data/ext/ruby_nacl/NaCl/curvecp/e.h +438 -0
- data/ext/ruby_nacl/NaCl/curvecp/hexparse.c +25 -0
- data/ext/ruby_nacl/NaCl/curvecp/hexparse.h +6 -0
- data/ext/ruby_nacl/NaCl/curvecp/load.c +33 -0
- data/ext/ruby_nacl/NaCl/curvecp/load.h +6 -0
- data/ext/ruby_nacl/NaCl/curvecp/nameparse.c +19 -0
- data/ext/ruby_nacl/NaCl/curvecp/nameparse.h +6 -0
- data/ext/ruby_nacl/NaCl/curvecp/nanoseconds.c +27 -0
- data/ext/ruby_nacl/NaCl/curvecp/nanoseconds.h +6 -0
- data/ext/ruby_nacl/NaCl/curvecp/open.h +10 -0
- data/ext/ruby_nacl/NaCl/curvecp/open_cwd.c +6 -0
- data/ext/ruby_nacl/NaCl/curvecp/open_lock.c +19 -0
- data/ext/ruby_nacl/NaCl/curvecp/open_pipe.c +15 -0
- data/ext/ruby_nacl/NaCl/curvecp/open_read.c +17 -0
- data/ext/ruby_nacl/NaCl/curvecp/open_write.c +17 -0
- data/ext/ruby_nacl/NaCl/curvecp/portparse.c +14 -0
- data/ext/ruby_nacl/NaCl/curvecp/portparse.h +6 -0
- data/ext/ruby_nacl/NaCl/curvecp/randommod.c +14 -0
- data/ext/ruby_nacl/NaCl/curvecp/randommod.h +6 -0
- data/ext/ruby_nacl/NaCl/curvecp/safenonce.c +74 -0
- data/ext/ruby_nacl/NaCl/curvecp/safenonce.h +6 -0
- data/ext/ruby_nacl/NaCl/curvecp/savesync.c +24 -0
- data/ext/ruby_nacl/NaCl/curvecp/savesync.h +6 -0
- data/ext/ruby_nacl/NaCl/curvecp/socket.h +9 -0
- data/ext/ruby_nacl/NaCl/curvecp/socket_bind.c +15 -0
- data/ext/ruby_nacl/NaCl/curvecp/socket_recv.c +23 -0
- data/ext/ruby_nacl/NaCl/curvecp/socket_send.c +19 -0
- data/ext/ruby_nacl/NaCl/curvecp/socket_udp.c +36 -0
- data/ext/ruby_nacl/NaCl/curvecp/uint16_pack.c +7 -0
- data/ext/ruby_nacl/NaCl/curvecp/uint16_pack.h +8 -0
- data/ext/ruby_nacl/NaCl/curvecp/uint16_unpack.c +9 -0
- data/ext/ruby_nacl/NaCl/curvecp/uint16_unpack.h +8 -0
- data/ext/ruby_nacl/NaCl/curvecp/uint32_pack.c +9 -0
- data/ext/ruby_nacl/NaCl/curvecp/uint32_pack.h +8 -0
- data/ext/ruby_nacl/NaCl/curvecp/uint32_unpack.c +11 -0
- data/ext/ruby_nacl/NaCl/curvecp/uint32_unpack.h +8 -0
- data/ext/ruby_nacl/NaCl/curvecp/uint64_pack.c +13 -0
- data/ext/ruby_nacl/NaCl/curvecp/uint64_pack.h +8 -0
- data/ext/ruby_nacl/NaCl/curvecp/uint64_unpack.c +15 -0
- data/ext/ruby_nacl/NaCl/curvecp/uint64_unpack.h +8 -0
- data/ext/ruby_nacl/NaCl/curvecp/writeall.c +27 -0
- data/ext/ruby_nacl/NaCl/curvecp/writeall.h +6 -0
- data/ext/ruby_nacl/NaCl/do +468 -0
- data/ext/ruby_nacl/NaCl/inttypes/crypto_int16.c +3 -0
- data/ext/ruby_nacl/NaCl/inttypes/crypto_int32.c +3 -0
- data/ext/ruby_nacl/NaCl/inttypes/crypto_int64.c +3 -0
- data/ext/ruby_nacl/NaCl/inttypes/crypto_int8.c +3 -0
- data/ext/ruby_nacl/NaCl/inttypes/crypto_uint16.c +3 -0
- data/ext/ruby_nacl/NaCl/inttypes/crypto_uint32.c +3 -0
- data/ext/ruby_nacl/NaCl/inttypes/crypto_uint64.c +3 -0
- data/ext/ruby_nacl/NaCl/inttypes/crypto_uint8.c +3 -0
- data/ext/ruby_nacl/NaCl/inttypes/do +47 -0
- data/ext/ruby_nacl/NaCl/inttypes/signed.h +17 -0
- data/ext/ruby_nacl/NaCl/inttypes/unsigned.h +17 -0
- data/ext/ruby_nacl/NaCl/measure-anything.c +225 -0
- data/ext/ruby_nacl/NaCl/okcompilers/abiname.c +45 -0
- data/ext/ruby_nacl/NaCl/okcompilers/archivers +2 -0
- data/ext/ruby_nacl/NaCl/okcompilers/c +8 -0
- data/ext/ruby_nacl/NaCl/okcompilers/cpp +8 -0
- data/ext/ruby_nacl/NaCl/okcompilers/do +196 -0
- data/ext/ruby_nacl/NaCl/okcompilers/lib.c +29 -0
- data/ext/ruby_nacl/NaCl/okcompilers/lib.cpp +19 -0
- data/ext/ruby_nacl/NaCl/okcompilers/main.c +25 -0
- data/ext/ruby_nacl/NaCl/okcompilers/main.cpp +22 -0
- data/ext/ruby_nacl/NaCl/randombytes/devurandom.c +34 -0
- data/ext/ruby_nacl/NaCl/randombytes/devurandom.h +24 -0
- data/ext/ruby_nacl/NaCl/randombytes/do +43 -0
- data/ext/ruby_nacl/NaCl/randombytes/test.c +15 -0
- data/ext/ruby_nacl/NaCl/tests/auth.c +19 -0
- data/ext/ruby_nacl/NaCl/tests/auth.out +4 -0
- data/ext/ruby_nacl/NaCl/tests/auth2.c +34 -0
- data/ext/ruby_nacl/NaCl/tests/auth2.out +4 -0
- data/ext/ruby_nacl/NaCl/tests/auth3.c +34 -0
- data/ext/ruby_nacl/NaCl/tests/auth3.out +1 -0
- data/ext/ruby_nacl/NaCl/tests/auth4.cpp +44 -0
- data/ext/ruby_nacl/NaCl/tests/auth4.out +1 -0
- data/ext/ruby_nacl/NaCl/tests/auth5.c +36 -0
- data/ext/ruby_nacl/NaCl/tests/auth5.out +0 -0
- data/ext/ruby_nacl/NaCl/tests/auth6.cpp +46 -0
- data/ext/ruby_nacl/NaCl/tests/auth6.out +0 -0
- data/ext/ruby_nacl/NaCl/tests/box.c +63 -0
- data/ext/ruby_nacl/NaCl/tests/box.out +19 -0
- data/ext/ruby_nacl/NaCl/tests/box2.c +64 -0
- data/ext/ruby_nacl/NaCl/tests/box2.out +17 -0
- data/ext/ruby_nacl/NaCl/tests/box3.cpp +60 -0
- data/ext/ruby_nacl/NaCl/tests/box3.out +19 -0
- data/ext/ruby_nacl/NaCl/tests/box4.cpp +66 -0
- data/ext/ruby_nacl/NaCl/tests/box4.out +17 -0
- data/ext/ruby_nacl/NaCl/tests/box5.cpp +30 -0
- data/ext/ruby_nacl/NaCl/tests/box5.out +0 -0
- data/ext/ruby_nacl/NaCl/tests/box6.cpp +43 -0
- data/ext/ruby_nacl/NaCl/tests/box6.out +0 -0
- data/ext/ruby_nacl/NaCl/tests/box7.c +36 -0
- data/ext/ruby_nacl/NaCl/tests/box7.out +0 -0
- data/ext/ruby_nacl/NaCl/tests/box8.c +41 -0
- data/ext/ruby_nacl/NaCl/tests/box8.out +0 -0
- data/ext/ruby_nacl/NaCl/tests/core1.c +30 -0
- data/ext/ruby_nacl/NaCl/tests/core1.out +4 -0
- data/ext/ruby_nacl/NaCl/tests/core2.c +33 -0
- data/ext/ruby_nacl/NaCl/tests/core2.out +4 -0
- data/ext/ruby_nacl/NaCl/tests/core3.c +41 -0
- data/ext/ruby_nacl/NaCl/tests/core3.out +1 -0
- data/ext/ruby_nacl/NaCl/tests/core4.c +33 -0
- data/ext/ruby_nacl/NaCl/tests/core4.out +8 -0
- data/ext/ruby_nacl/NaCl/tests/core5.c +32 -0
- data/ext/ruby_nacl/NaCl/tests/core5.out +4 -0
- data/ext/ruby_nacl/NaCl/tests/core6.c +47 -0
- data/ext/ruby_nacl/NaCl/tests/core6.out +4 -0
- data/ext/ruby_nacl/NaCl/tests/hash.c +14 -0
- data/ext/ruby_nacl/NaCl/tests/hash.out +1 -0
- data/ext/ruby_nacl/NaCl/tests/hash2.cpp +18 -0
- data/ext/ruby_nacl/NaCl/tests/hash2.out +1 -0
- data/ext/ruby_nacl/NaCl/tests/hash3.c +14 -0
- data/ext/ruby_nacl/NaCl/tests/hash3.out +1 -0
- data/ext/ruby_nacl/NaCl/tests/hash4.cpp +18 -0
- data/ext/ruby_nacl/NaCl/tests/hash4.out +1 -0
- data/ext/ruby_nacl/NaCl/tests/onetimeauth.c +42 -0
- data/ext/ruby_nacl/NaCl/tests/onetimeauth.out +2 -0
- data/ext/ruby_nacl/NaCl/tests/onetimeauth2.c +40 -0
- data/ext/ruby_nacl/NaCl/tests/onetimeauth2.out +1 -0
- data/ext/ruby_nacl/NaCl/tests/onetimeauth5.cpp +46 -0
- data/ext/ruby_nacl/NaCl/tests/onetimeauth5.out +2 -0
- data/ext/ruby_nacl/NaCl/tests/onetimeauth6.cpp +50 -0
- data/ext/ruby_nacl/NaCl/tests/onetimeauth6.out +1 -0
- data/ext/ruby_nacl/NaCl/tests/onetimeauth7.c +36 -0
- data/ext/ruby_nacl/NaCl/tests/onetimeauth7.out +0 -0
- data/ext/ruby_nacl/NaCl/tests/onetimeauth8.cpp +46 -0
- data/ext/ruby_nacl/NaCl/tests/onetimeauth8.out +0 -0
- data/ext/ruby_nacl/NaCl/tests/scalarmult.c +23 -0
- data/ext/ruby_nacl/NaCl/tests/scalarmult.out +4 -0
- data/ext/ruby_nacl/NaCl/tests/scalarmult2.c +23 -0
- data/ext/ruby_nacl/NaCl/tests/scalarmult2.out +4 -0
- data/ext/ruby_nacl/NaCl/tests/scalarmult3.cpp +31 -0
- data/ext/ruby_nacl/NaCl/tests/scalarmult3.out +4 -0
- data/ext/ruby_nacl/NaCl/tests/scalarmult4.cpp +31 -0
- data/ext/ruby_nacl/NaCl/tests/scalarmult4.out +4 -0
- data/ext/ruby_nacl/NaCl/tests/scalarmult5.c +30 -0
- data/ext/ruby_nacl/NaCl/tests/scalarmult5.out +4 -0
- data/ext/ruby_nacl/NaCl/tests/scalarmult6.c +30 -0
- data/ext/ruby_nacl/NaCl/tests/scalarmult6.out +4 -0
- data/ext/ruby_nacl/NaCl/tests/scalarmult7.cpp +32 -0
- data/ext/ruby_nacl/NaCl/tests/scalarmult7.out +4 -0
- data/ext/ruby_nacl/NaCl/tests/secretbox.c +56 -0
- data/ext/ruby_nacl/NaCl/tests/secretbox.out +19 -0
- data/ext/ruby_nacl/NaCl/tests/secretbox2.c +57 -0
- data/ext/ruby_nacl/NaCl/tests/secretbox2.out +17 -0
- data/ext/ruby_nacl/NaCl/tests/secretbox3.cpp +52 -0
- data/ext/ruby_nacl/NaCl/tests/secretbox3.out +19 -0
- data/ext/ruby_nacl/NaCl/tests/secretbox4.cpp +54 -0
- data/ext/ruby_nacl/NaCl/tests/secretbox4.out +17 -0
- data/ext/ruby_nacl/NaCl/tests/secretbox5.cpp +29 -0
- data/ext/ruby_nacl/NaCl/tests/secretbox5.out +0 -0
- data/ext/ruby_nacl/NaCl/tests/secretbox6.cpp +42 -0
- data/ext/ruby_nacl/NaCl/tests/secretbox6.out +0 -0
- data/ext/ruby_nacl/NaCl/tests/secretbox7.c +32 -0
- data/ext/ruby_nacl/NaCl/tests/secretbox7.out +0 -0
- data/ext/ruby_nacl/NaCl/tests/secretbox8.c +37 -0
- data/ext/ruby_nacl/NaCl/tests/secretbox8.out +0 -0
- data/ext/ruby_nacl/NaCl/tests/stream.c +29 -0
- data/ext/ruby_nacl/NaCl/tests/stream.out +1 -0
- data/ext/ruby_nacl/NaCl/tests/stream2.c +27 -0
- data/ext/ruby_nacl/NaCl/tests/stream2.out +1 -0
- data/ext/ruby_nacl/NaCl/tests/stream3.c +28 -0
- data/ext/ruby_nacl/NaCl/tests/stream3.out +4 -0
- data/ext/ruby_nacl/NaCl/tests/stream4.c +53 -0
- data/ext/ruby_nacl/NaCl/tests/stream4.out +17 -0
- data/ext/ruby_nacl/NaCl/tests/stream5.cpp +29 -0
- data/ext/ruby_nacl/NaCl/tests/stream5.out +1 -0
- data/ext/ruby_nacl/NaCl/tests/stream6.cpp +27 -0
- data/ext/ruby_nacl/NaCl/tests/stream6.out +1 -0
- data/ext/ruby_nacl/NaCl/tests/stream7.cpp +30 -0
- data/ext/ruby_nacl/NaCl/tests/stream7.out +4 -0
- data/ext/ruby_nacl/NaCl/tests/stream8.cpp +56 -0
- data/ext/ruby_nacl/NaCl/tests/stream8.out +17 -0
- data/ext/ruby_nacl/NaCl/try-anything.c +173 -0
- data/ext/ruby_nacl/NaCl/version +1 -0
- data/ext/ruby_nacl/extconf.rb +18 -0
- data/ext/ruby_nacl/ruby_nacl.cpp +147 -0
- data/ext/ruby_nacl/ruby_nacl.h +49 -0
- metadata +554 -0
|
@@ -0,0 +1,4823 @@
|
|
|
1
|
+
|
|
2
|
+
# qhasm: int64 r11_caller
|
|
3
|
+
|
|
4
|
+
# qhasm: int64 r12_caller
|
|
5
|
+
|
|
6
|
+
# qhasm: int64 r13_caller
|
|
7
|
+
|
|
8
|
+
# qhasm: int64 r14_caller
|
|
9
|
+
|
|
10
|
+
# qhasm: int64 r15_caller
|
|
11
|
+
|
|
12
|
+
# qhasm: int64 rbx_caller
|
|
13
|
+
|
|
14
|
+
# qhasm: int64 rbp_caller
|
|
15
|
+
|
|
16
|
+
# qhasm: caller r11_caller
|
|
17
|
+
|
|
18
|
+
# qhasm: caller r12_caller
|
|
19
|
+
|
|
20
|
+
# qhasm: caller r13_caller
|
|
21
|
+
|
|
22
|
+
# qhasm: caller r14_caller
|
|
23
|
+
|
|
24
|
+
# qhasm: caller r15_caller
|
|
25
|
+
|
|
26
|
+
# qhasm: caller rbx_caller
|
|
27
|
+
|
|
28
|
+
# qhasm: caller rbp_caller
|
|
29
|
+
|
|
30
|
+
# qhasm: stack64 r11_stack
|
|
31
|
+
|
|
32
|
+
# qhasm: stack64 r12_stack
|
|
33
|
+
|
|
34
|
+
# qhasm: stack64 r13_stack
|
|
35
|
+
|
|
36
|
+
# qhasm: stack64 r14_stack
|
|
37
|
+
|
|
38
|
+
# qhasm: stack64 r15_stack
|
|
39
|
+
|
|
40
|
+
# qhasm: stack64 rbx_stack
|
|
41
|
+
|
|
42
|
+
# qhasm: stack64 rbp_stack
|
|
43
|
+
|
|
44
|
+
# qhasm: int64 a
|
|
45
|
+
|
|
46
|
+
# qhasm: int64 arg1
|
|
47
|
+
|
|
48
|
+
# qhasm: int64 arg2
|
|
49
|
+
|
|
50
|
+
# qhasm: int64 arg3
|
|
51
|
+
|
|
52
|
+
# qhasm: int64 arg4
|
|
53
|
+
|
|
54
|
+
# qhasm: int64 arg5
|
|
55
|
+
|
|
56
|
+
# qhasm: input arg1
|
|
57
|
+
|
|
58
|
+
# qhasm: input arg2
|
|
59
|
+
|
|
60
|
+
# qhasm: input arg3
|
|
61
|
+
|
|
62
|
+
# qhasm: input arg4
|
|
63
|
+
|
|
64
|
+
# qhasm: input arg5
|
|
65
|
+
|
|
66
|
+
# qhasm: int64 k
|
|
67
|
+
|
|
68
|
+
# qhasm: int64 kbits
|
|
69
|
+
|
|
70
|
+
# qhasm: int64 iv
|
|
71
|
+
|
|
72
|
+
# qhasm: int64 i
|
|
73
|
+
|
|
74
|
+
# qhasm: stack128 x0
|
|
75
|
+
|
|
76
|
+
# qhasm: stack128 x1
|
|
77
|
+
|
|
78
|
+
# qhasm: stack128 x2
|
|
79
|
+
|
|
80
|
+
# qhasm: stack128 x3
|
|
81
|
+
|
|
82
|
+
# qhasm: int64 m
|
|
83
|
+
|
|
84
|
+
# qhasm: int64 out
|
|
85
|
+
|
|
86
|
+
# qhasm: int64 bytes
|
|
87
|
+
|
|
88
|
+
# qhasm: stack32 eax_stack
|
|
89
|
+
|
|
90
|
+
# qhasm: stack32 ebx_stack
|
|
91
|
+
|
|
92
|
+
# qhasm: stack32 esi_stack
|
|
93
|
+
|
|
94
|
+
# qhasm: stack32 edi_stack
|
|
95
|
+
|
|
96
|
+
# qhasm: stack32 ebp_stack
|
|
97
|
+
|
|
98
|
+
# qhasm: int6464 diag0
|
|
99
|
+
|
|
100
|
+
# qhasm: int6464 diag1
|
|
101
|
+
|
|
102
|
+
# qhasm: int6464 diag2
|
|
103
|
+
|
|
104
|
+
# qhasm: int6464 diag3
|
|
105
|
+
|
|
106
|
+
# qhasm: int6464 a0
|
|
107
|
+
|
|
108
|
+
# qhasm: int6464 a1
|
|
109
|
+
|
|
110
|
+
# qhasm: int6464 a2
|
|
111
|
+
|
|
112
|
+
# qhasm: int6464 a3
|
|
113
|
+
|
|
114
|
+
# qhasm: int6464 a4
|
|
115
|
+
|
|
116
|
+
# qhasm: int6464 a5
|
|
117
|
+
|
|
118
|
+
# qhasm: int6464 a6
|
|
119
|
+
|
|
120
|
+
# qhasm: int6464 a7
|
|
121
|
+
|
|
122
|
+
# qhasm: int6464 b0
|
|
123
|
+
|
|
124
|
+
# qhasm: int6464 b1
|
|
125
|
+
|
|
126
|
+
# qhasm: int6464 b2
|
|
127
|
+
|
|
128
|
+
# qhasm: int6464 b3
|
|
129
|
+
|
|
130
|
+
# qhasm: int6464 b4
|
|
131
|
+
|
|
132
|
+
# qhasm: int6464 b5
|
|
133
|
+
|
|
134
|
+
# qhasm: int6464 b6
|
|
135
|
+
|
|
136
|
+
# qhasm: int6464 b7
|
|
137
|
+
|
|
138
|
+
# qhasm: int6464 z0
|
|
139
|
+
|
|
140
|
+
# qhasm: int6464 z1
|
|
141
|
+
|
|
142
|
+
# qhasm: int6464 z2
|
|
143
|
+
|
|
144
|
+
# qhasm: int6464 z3
|
|
145
|
+
|
|
146
|
+
# qhasm: int6464 z4
|
|
147
|
+
|
|
148
|
+
# qhasm: int6464 z5
|
|
149
|
+
|
|
150
|
+
# qhasm: int6464 z6
|
|
151
|
+
|
|
152
|
+
# qhasm: int6464 z7
|
|
153
|
+
|
|
154
|
+
# qhasm: int6464 z8
|
|
155
|
+
|
|
156
|
+
# qhasm: int6464 z9
|
|
157
|
+
|
|
158
|
+
# qhasm: int6464 z10
|
|
159
|
+
|
|
160
|
+
# qhasm: int6464 z11
|
|
161
|
+
|
|
162
|
+
# qhasm: int6464 z12
|
|
163
|
+
|
|
164
|
+
# qhasm: int6464 z13
|
|
165
|
+
|
|
166
|
+
# qhasm: int6464 z14
|
|
167
|
+
|
|
168
|
+
# qhasm: int6464 z15
|
|
169
|
+
|
|
170
|
+
# qhasm: stack128 z0_stack
|
|
171
|
+
|
|
172
|
+
# qhasm: stack128 z1_stack
|
|
173
|
+
|
|
174
|
+
# qhasm: stack128 z2_stack
|
|
175
|
+
|
|
176
|
+
# qhasm: stack128 z3_stack
|
|
177
|
+
|
|
178
|
+
# qhasm: stack128 z4_stack
|
|
179
|
+
|
|
180
|
+
# qhasm: stack128 z5_stack
|
|
181
|
+
|
|
182
|
+
# qhasm: stack128 z6_stack
|
|
183
|
+
|
|
184
|
+
# qhasm: stack128 z7_stack
|
|
185
|
+
|
|
186
|
+
# qhasm: stack128 z8_stack
|
|
187
|
+
|
|
188
|
+
# qhasm: stack128 z9_stack
|
|
189
|
+
|
|
190
|
+
# qhasm: stack128 z10_stack
|
|
191
|
+
|
|
192
|
+
# qhasm: stack128 z11_stack
|
|
193
|
+
|
|
194
|
+
# qhasm: stack128 z12_stack
|
|
195
|
+
|
|
196
|
+
# qhasm: stack128 z13_stack
|
|
197
|
+
|
|
198
|
+
# qhasm: stack128 z14_stack
|
|
199
|
+
|
|
200
|
+
# qhasm: stack128 z15_stack
|
|
201
|
+
|
|
202
|
+
# qhasm: int6464 y0
|
|
203
|
+
|
|
204
|
+
# qhasm: int6464 y1
|
|
205
|
+
|
|
206
|
+
# qhasm: int6464 y2
|
|
207
|
+
|
|
208
|
+
# qhasm: int6464 y3
|
|
209
|
+
|
|
210
|
+
# qhasm: int6464 y4
|
|
211
|
+
|
|
212
|
+
# qhasm: int6464 y5
|
|
213
|
+
|
|
214
|
+
# qhasm: int6464 y6
|
|
215
|
+
|
|
216
|
+
# qhasm: int6464 y7
|
|
217
|
+
|
|
218
|
+
# qhasm: int6464 y8
|
|
219
|
+
|
|
220
|
+
# qhasm: int6464 y9
|
|
221
|
+
|
|
222
|
+
# qhasm: int6464 y10
|
|
223
|
+
|
|
224
|
+
# qhasm: int6464 y11
|
|
225
|
+
|
|
226
|
+
# qhasm: int6464 y12
|
|
227
|
+
|
|
228
|
+
# qhasm: int6464 y13
|
|
229
|
+
|
|
230
|
+
# qhasm: int6464 y14
|
|
231
|
+
|
|
232
|
+
# qhasm: int6464 y15
|
|
233
|
+
|
|
234
|
+
# qhasm: int6464 r0
|
|
235
|
+
|
|
236
|
+
# qhasm: int6464 r1
|
|
237
|
+
|
|
238
|
+
# qhasm: int6464 r2
|
|
239
|
+
|
|
240
|
+
# qhasm: int6464 r3
|
|
241
|
+
|
|
242
|
+
# qhasm: int6464 r4
|
|
243
|
+
|
|
244
|
+
# qhasm: int6464 r5
|
|
245
|
+
|
|
246
|
+
# qhasm: int6464 r6
|
|
247
|
+
|
|
248
|
+
# qhasm: int6464 r7
|
|
249
|
+
|
|
250
|
+
# qhasm: int6464 r8
|
|
251
|
+
|
|
252
|
+
# qhasm: int6464 r9
|
|
253
|
+
|
|
254
|
+
# qhasm: int6464 r10
|
|
255
|
+
|
|
256
|
+
# qhasm: int6464 r11
|
|
257
|
+
|
|
258
|
+
# qhasm: int6464 r12
|
|
259
|
+
|
|
260
|
+
# qhasm: int6464 r13
|
|
261
|
+
|
|
262
|
+
# qhasm: int6464 r14
|
|
263
|
+
|
|
264
|
+
# qhasm: int6464 r15
|
|
265
|
+
|
|
266
|
+
# qhasm: stack128 orig0
|
|
267
|
+
|
|
268
|
+
# qhasm: stack128 orig1
|
|
269
|
+
|
|
270
|
+
# qhasm: stack128 orig2
|
|
271
|
+
|
|
272
|
+
# qhasm: stack128 orig3
|
|
273
|
+
|
|
274
|
+
# qhasm: stack128 orig4
|
|
275
|
+
|
|
276
|
+
# qhasm: stack128 orig5
|
|
277
|
+
|
|
278
|
+
# qhasm: stack128 orig6
|
|
279
|
+
|
|
280
|
+
# qhasm: stack128 orig7
|
|
281
|
+
|
|
282
|
+
# qhasm: stack128 orig8
|
|
283
|
+
|
|
284
|
+
# qhasm: stack128 orig9
|
|
285
|
+
|
|
286
|
+
# qhasm: stack128 orig10
|
|
287
|
+
|
|
288
|
+
# qhasm: stack128 orig11
|
|
289
|
+
|
|
290
|
+
# qhasm: stack128 orig12
|
|
291
|
+
|
|
292
|
+
# qhasm: stack128 orig13
|
|
293
|
+
|
|
294
|
+
# qhasm: stack128 orig14
|
|
295
|
+
|
|
296
|
+
# qhasm: stack128 orig15
|
|
297
|
+
|
|
298
|
+
# qhasm: int64 in0
|
|
299
|
+
|
|
300
|
+
# qhasm: int64 in1
|
|
301
|
+
|
|
302
|
+
# qhasm: int64 in2
|
|
303
|
+
|
|
304
|
+
# qhasm: int64 in3
|
|
305
|
+
|
|
306
|
+
# qhasm: int64 in4
|
|
307
|
+
|
|
308
|
+
# qhasm: int64 in5
|
|
309
|
+
|
|
310
|
+
# qhasm: int64 in6
|
|
311
|
+
|
|
312
|
+
# qhasm: int64 in7
|
|
313
|
+
|
|
314
|
+
# qhasm: int64 in8
|
|
315
|
+
|
|
316
|
+
# qhasm: int64 in9
|
|
317
|
+
|
|
318
|
+
# qhasm: int64 in10
|
|
319
|
+
|
|
320
|
+
# qhasm: int64 in11
|
|
321
|
+
|
|
322
|
+
# qhasm: int64 in12
|
|
323
|
+
|
|
324
|
+
# qhasm: int64 in13
|
|
325
|
+
|
|
326
|
+
# qhasm: int64 in14
|
|
327
|
+
|
|
328
|
+
# qhasm: int64 in15
|
|
329
|
+
|
|
330
|
+
# qhasm: stack512 tmp
|
|
331
|
+
|
|
332
|
+
# qhasm: int64 ctarget
|
|
333
|
+
|
|
334
|
+
# qhasm: stack64 bytes_backup
|
|
335
|
+
|
|
336
|
+
# qhasm: enter crypto_stream_salsa2012_amd64_xmm6
|
|
337
|
+
.text
|
|
338
|
+
.p2align 5
|
|
339
|
+
.globl _crypto_stream_salsa2012_amd64_xmm6
|
|
340
|
+
.globl crypto_stream_salsa2012_amd64_xmm6
|
|
341
|
+
_crypto_stream_salsa2012_amd64_xmm6:
|
|
342
|
+
crypto_stream_salsa2012_amd64_xmm6:
|
|
343
|
+
mov %rsp,%r11
|
|
344
|
+
and $31,%r11
|
|
345
|
+
add $480,%r11
|
|
346
|
+
sub %r11,%rsp
|
|
347
|
+
|
|
348
|
+
# qhasm: r11_stack = r11_caller
|
|
349
|
+
# asm 1: movq <r11_caller=int64#9,>r11_stack=stack64#1
|
|
350
|
+
# asm 2: movq <r11_caller=%r11,>r11_stack=352(%rsp)
|
|
351
|
+
movq %r11,352(%rsp)
|
|
352
|
+
|
|
353
|
+
# qhasm: r12_stack = r12_caller
|
|
354
|
+
# asm 1: movq <r12_caller=int64#10,>r12_stack=stack64#2
|
|
355
|
+
# asm 2: movq <r12_caller=%r12,>r12_stack=360(%rsp)
|
|
356
|
+
movq %r12,360(%rsp)
|
|
357
|
+
|
|
358
|
+
# qhasm: r13_stack = r13_caller
|
|
359
|
+
# asm 1: movq <r13_caller=int64#11,>r13_stack=stack64#3
|
|
360
|
+
# asm 2: movq <r13_caller=%r13,>r13_stack=368(%rsp)
|
|
361
|
+
movq %r13,368(%rsp)
|
|
362
|
+
|
|
363
|
+
# qhasm: r14_stack = r14_caller
|
|
364
|
+
# asm 1: movq <r14_caller=int64#12,>r14_stack=stack64#4
|
|
365
|
+
# asm 2: movq <r14_caller=%r14,>r14_stack=376(%rsp)
|
|
366
|
+
movq %r14,376(%rsp)
|
|
367
|
+
|
|
368
|
+
# qhasm: r15_stack = r15_caller
|
|
369
|
+
# asm 1: movq <r15_caller=int64#13,>r15_stack=stack64#5
|
|
370
|
+
# asm 2: movq <r15_caller=%r15,>r15_stack=384(%rsp)
|
|
371
|
+
movq %r15,384(%rsp)
|
|
372
|
+
|
|
373
|
+
# qhasm: rbx_stack = rbx_caller
|
|
374
|
+
# asm 1: movq <rbx_caller=int64#14,>rbx_stack=stack64#6
|
|
375
|
+
# asm 2: movq <rbx_caller=%rbx,>rbx_stack=392(%rsp)
|
|
376
|
+
movq %rbx,392(%rsp)
|
|
377
|
+
|
|
378
|
+
# qhasm: rbp_stack = rbp_caller
|
|
379
|
+
# asm 1: movq <rbp_caller=int64#15,>rbp_stack=stack64#7
|
|
380
|
+
# asm 2: movq <rbp_caller=%rbp,>rbp_stack=400(%rsp)
|
|
381
|
+
movq %rbp,400(%rsp)
|
|
382
|
+
|
|
383
|
+
# qhasm: bytes = arg2
|
|
384
|
+
# asm 1: mov <arg2=int64#2,>bytes=int64#6
|
|
385
|
+
# asm 2: mov <arg2=%rsi,>bytes=%r9
|
|
386
|
+
mov %rsi,%r9
|
|
387
|
+
|
|
388
|
+
# qhasm: out = arg1
|
|
389
|
+
# asm 1: mov <arg1=int64#1,>out=int64#1
|
|
390
|
+
# asm 2: mov <arg1=%rdi,>out=%rdi
|
|
391
|
+
mov %rdi,%rdi
|
|
392
|
+
|
|
393
|
+
# qhasm: m = out
|
|
394
|
+
# asm 1: mov <out=int64#1,>m=int64#2
|
|
395
|
+
# asm 2: mov <out=%rdi,>m=%rsi
|
|
396
|
+
mov %rdi,%rsi
|
|
397
|
+
|
|
398
|
+
# qhasm: iv = arg3
|
|
399
|
+
# asm 1: mov <arg3=int64#3,>iv=int64#3
|
|
400
|
+
# asm 2: mov <arg3=%rdx,>iv=%rdx
|
|
401
|
+
mov %rdx,%rdx
|
|
402
|
+
|
|
403
|
+
# qhasm: k = arg4
|
|
404
|
+
# asm 1: mov <arg4=int64#4,>k=int64#8
|
|
405
|
+
# asm 2: mov <arg4=%rcx,>k=%r10
|
|
406
|
+
mov %rcx,%r10
|
|
407
|
+
|
|
408
|
+
# qhasm: unsigned>? bytes - 0
|
|
409
|
+
# asm 1: cmp $0,<bytes=int64#6
|
|
410
|
+
# asm 2: cmp $0,<bytes=%r9
|
|
411
|
+
cmp $0,%r9
|
|
412
|
+
# comment:fp stack unchanged by jump
|
|
413
|
+
|
|
414
|
+
# qhasm: goto done if !unsigned>
|
|
415
|
+
jbe ._done
|
|
416
|
+
|
|
417
|
+
# qhasm: a = 0
|
|
418
|
+
# asm 1: mov $0,>a=int64#7
|
|
419
|
+
# asm 2: mov $0,>a=%rax
|
|
420
|
+
mov $0,%rax
|
|
421
|
+
|
|
422
|
+
# qhasm: i = bytes
|
|
423
|
+
# asm 1: mov <bytes=int64#6,>i=int64#4
|
|
424
|
+
# asm 2: mov <bytes=%r9,>i=%rcx
|
|
425
|
+
mov %r9,%rcx
|
|
426
|
+
|
|
427
|
+
# qhasm: while (i) { *out++ = a; --i }
|
|
428
|
+
rep stosb
|
|
429
|
+
|
|
430
|
+
# qhasm: out -= bytes
|
|
431
|
+
# asm 1: sub <bytes=int64#6,<out=int64#1
|
|
432
|
+
# asm 2: sub <bytes=%r9,<out=%rdi
|
|
433
|
+
sub %r9,%rdi
|
|
434
|
+
# comment:fp stack unchanged by jump
|
|
435
|
+
|
|
436
|
+
# qhasm: goto start
|
|
437
|
+
jmp ._start
|
|
438
|
+
|
|
439
|
+
# qhasm: enter crypto_stream_salsa2012_amd64_xmm6_xor
|
|
440
|
+
.text
|
|
441
|
+
.p2align 5
|
|
442
|
+
.globl _crypto_stream_salsa2012_amd64_xmm6_xor
|
|
443
|
+
.globl crypto_stream_salsa2012_amd64_xmm6_xor
|
|
444
|
+
_crypto_stream_salsa2012_amd64_xmm6_xor:
|
|
445
|
+
crypto_stream_salsa2012_amd64_xmm6_xor:
|
|
446
|
+
mov %rsp,%r11
|
|
447
|
+
and $31,%r11
|
|
448
|
+
add $480,%r11
|
|
449
|
+
sub %r11,%rsp
|
|
450
|
+
|
|
451
|
+
# qhasm: r11_stack = r11_caller
|
|
452
|
+
# asm 1: movq <r11_caller=int64#9,>r11_stack=stack64#1
|
|
453
|
+
# asm 2: movq <r11_caller=%r11,>r11_stack=352(%rsp)
|
|
454
|
+
movq %r11,352(%rsp)
|
|
455
|
+
|
|
456
|
+
# qhasm: r12_stack = r12_caller
|
|
457
|
+
# asm 1: movq <r12_caller=int64#10,>r12_stack=stack64#2
|
|
458
|
+
# asm 2: movq <r12_caller=%r12,>r12_stack=360(%rsp)
|
|
459
|
+
movq %r12,360(%rsp)
|
|
460
|
+
|
|
461
|
+
# qhasm: r13_stack = r13_caller
|
|
462
|
+
# asm 1: movq <r13_caller=int64#11,>r13_stack=stack64#3
|
|
463
|
+
# asm 2: movq <r13_caller=%r13,>r13_stack=368(%rsp)
|
|
464
|
+
movq %r13,368(%rsp)
|
|
465
|
+
|
|
466
|
+
# qhasm: r14_stack = r14_caller
|
|
467
|
+
# asm 1: movq <r14_caller=int64#12,>r14_stack=stack64#4
|
|
468
|
+
# asm 2: movq <r14_caller=%r14,>r14_stack=376(%rsp)
|
|
469
|
+
movq %r14,376(%rsp)
|
|
470
|
+
|
|
471
|
+
# qhasm: r15_stack = r15_caller
|
|
472
|
+
# asm 1: movq <r15_caller=int64#13,>r15_stack=stack64#5
|
|
473
|
+
# asm 2: movq <r15_caller=%r15,>r15_stack=384(%rsp)
|
|
474
|
+
movq %r15,384(%rsp)
|
|
475
|
+
|
|
476
|
+
# qhasm: rbx_stack = rbx_caller
|
|
477
|
+
# asm 1: movq <rbx_caller=int64#14,>rbx_stack=stack64#6
|
|
478
|
+
# asm 2: movq <rbx_caller=%rbx,>rbx_stack=392(%rsp)
|
|
479
|
+
movq %rbx,392(%rsp)
|
|
480
|
+
|
|
481
|
+
# qhasm: rbp_stack = rbp_caller
|
|
482
|
+
# asm 1: movq <rbp_caller=int64#15,>rbp_stack=stack64#7
|
|
483
|
+
# asm 2: movq <rbp_caller=%rbp,>rbp_stack=400(%rsp)
|
|
484
|
+
movq %rbp,400(%rsp)
|
|
485
|
+
|
|
486
|
+
# qhasm: out = arg1
|
|
487
|
+
# asm 1: mov <arg1=int64#1,>out=int64#1
|
|
488
|
+
# asm 2: mov <arg1=%rdi,>out=%rdi
|
|
489
|
+
mov %rdi,%rdi
|
|
490
|
+
|
|
491
|
+
# qhasm: m = arg2
|
|
492
|
+
# asm 1: mov <arg2=int64#2,>m=int64#2
|
|
493
|
+
# asm 2: mov <arg2=%rsi,>m=%rsi
|
|
494
|
+
mov %rsi,%rsi
|
|
495
|
+
|
|
496
|
+
# qhasm: bytes = arg3
|
|
497
|
+
# asm 1: mov <arg3=int64#3,>bytes=int64#6
|
|
498
|
+
# asm 2: mov <arg3=%rdx,>bytes=%r9
|
|
499
|
+
mov %rdx,%r9
|
|
500
|
+
|
|
501
|
+
# qhasm: iv = arg4
|
|
502
|
+
# asm 1: mov <arg4=int64#4,>iv=int64#3
|
|
503
|
+
# asm 2: mov <arg4=%rcx,>iv=%rdx
|
|
504
|
+
mov %rcx,%rdx
|
|
505
|
+
|
|
506
|
+
# qhasm: k = arg5
|
|
507
|
+
# asm 1: mov <arg5=int64#5,>k=int64#8
|
|
508
|
+
# asm 2: mov <arg5=%r8,>k=%r10
|
|
509
|
+
mov %r8,%r10
|
|
510
|
+
|
|
511
|
+
# qhasm: unsigned>? bytes - 0
|
|
512
|
+
# asm 1: cmp $0,<bytes=int64#6
|
|
513
|
+
# asm 2: cmp $0,<bytes=%r9
|
|
514
|
+
cmp $0,%r9
|
|
515
|
+
# comment:fp stack unchanged by jump
|
|
516
|
+
|
|
517
|
+
# qhasm: goto done if !unsigned>
|
|
518
|
+
jbe ._done
|
|
519
|
+
# comment:fp stack unchanged by fallthrough
|
|
520
|
+
|
|
521
|
+
# qhasm: start:
|
|
522
|
+
._start:
|
|
523
|
+
|
|
524
|
+
# qhasm: in12 = *(uint32 *) (k + 20)
|
|
525
|
+
# asm 1: movl 20(<k=int64#8),>in12=int64#4d
|
|
526
|
+
# asm 2: movl 20(<k=%r10),>in12=%ecx
|
|
527
|
+
movl 20(%r10),%ecx
|
|
528
|
+
|
|
529
|
+
# qhasm: in1 = *(uint32 *) (k + 0)
|
|
530
|
+
# asm 1: movl 0(<k=int64#8),>in1=int64#5d
|
|
531
|
+
# asm 2: movl 0(<k=%r10),>in1=%r8d
|
|
532
|
+
movl 0(%r10),%r8d
|
|
533
|
+
|
|
534
|
+
# qhasm: in6 = *(uint32 *) (iv + 0)
|
|
535
|
+
# asm 1: movl 0(<iv=int64#3),>in6=int64#7d
|
|
536
|
+
# asm 2: movl 0(<iv=%rdx),>in6=%eax
|
|
537
|
+
movl 0(%rdx),%eax
|
|
538
|
+
|
|
539
|
+
# qhasm: in11 = *(uint32 *) (k + 16)
|
|
540
|
+
# asm 1: movl 16(<k=int64#8),>in11=int64#9d
|
|
541
|
+
# asm 2: movl 16(<k=%r10),>in11=%r11d
|
|
542
|
+
movl 16(%r10),%r11d
|
|
543
|
+
|
|
544
|
+
# qhasm: ((uint32 *)&x1)[0] = in12
|
|
545
|
+
# asm 1: movl <in12=int64#4d,>x1=stack128#1
|
|
546
|
+
# asm 2: movl <in12=%ecx,>x1=0(%rsp)
|
|
547
|
+
movl %ecx,0(%rsp)
|
|
548
|
+
|
|
549
|
+
# qhasm: ((uint32 *)&x1)[1] = in1
|
|
550
|
+
# asm 1: movl <in1=int64#5d,4+<x1=stack128#1
|
|
551
|
+
# asm 2: movl <in1=%r8d,4+<x1=0(%rsp)
|
|
552
|
+
movl %r8d,4+0(%rsp)
|
|
553
|
+
|
|
554
|
+
# qhasm: ((uint32 *)&x1)[2] = in6
|
|
555
|
+
# asm 1: movl <in6=int64#7d,8+<x1=stack128#1
|
|
556
|
+
# asm 2: movl <in6=%eax,8+<x1=0(%rsp)
|
|
557
|
+
movl %eax,8+0(%rsp)
|
|
558
|
+
|
|
559
|
+
# qhasm: ((uint32 *)&x1)[3] = in11
|
|
560
|
+
# asm 1: movl <in11=int64#9d,12+<x1=stack128#1
|
|
561
|
+
# asm 2: movl <in11=%r11d,12+<x1=0(%rsp)
|
|
562
|
+
movl %r11d,12+0(%rsp)
|
|
563
|
+
|
|
564
|
+
# qhasm: in8 = 0
|
|
565
|
+
# asm 1: mov $0,>in8=int64#4
|
|
566
|
+
# asm 2: mov $0,>in8=%rcx
|
|
567
|
+
mov $0,%rcx
|
|
568
|
+
|
|
569
|
+
# qhasm: in13 = *(uint32 *) (k + 24)
|
|
570
|
+
# asm 1: movl 24(<k=int64#8),>in13=int64#5d
|
|
571
|
+
# asm 2: movl 24(<k=%r10),>in13=%r8d
|
|
572
|
+
movl 24(%r10),%r8d
|
|
573
|
+
|
|
574
|
+
# qhasm: in2 = *(uint32 *) (k + 4)
|
|
575
|
+
# asm 1: movl 4(<k=int64#8),>in2=int64#7d
|
|
576
|
+
# asm 2: movl 4(<k=%r10),>in2=%eax
|
|
577
|
+
movl 4(%r10),%eax
|
|
578
|
+
|
|
579
|
+
# qhasm: in7 = *(uint32 *) (iv + 4)
|
|
580
|
+
# asm 1: movl 4(<iv=int64#3),>in7=int64#3d
|
|
581
|
+
# asm 2: movl 4(<iv=%rdx),>in7=%edx
|
|
582
|
+
movl 4(%rdx),%edx
|
|
583
|
+
|
|
584
|
+
# qhasm: ((uint32 *)&x2)[0] = in8
|
|
585
|
+
# asm 1: movl <in8=int64#4d,>x2=stack128#2
|
|
586
|
+
# asm 2: movl <in8=%ecx,>x2=16(%rsp)
|
|
587
|
+
movl %ecx,16(%rsp)
|
|
588
|
+
|
|
589
|
+
# qhasm: ((uint32 *)&x2)[1] = in13
|
|
590
|
+
# asm 1: movl <in13=int64#5d,4+<x2=stack128#2
|
|
591
|
+
# asm 2: movl <in13=%r8d,4+<x2=16(%rsp)
|
|
592
|
+
movl %r8d,4+16(%rsp)
|
|
593
|
+
|
|
594
|
+
# qhasm: ((uint32 *)&x2)[2] = in2
|
|
595
|
+
# asm 1: movl <in2=int64#7d,8+<x2=stack128#2
|
|
596
|
+
# asm 2: movl <in2=%eax,8+<x2=16(%rsp)
|
|
597
|
+
movl %eax,8+16(%rsp)
|
|
598
|
+
|
|
599
|
+
# qhasm: ((uint32 *)&x2)[3] = in7
|
|
600
|
+
# asm 1: movl <in7=int64#3d,12+<x2=stack128#2
|
|
601
|
+
# asm 2: movl <in7=%edx,12+<x2=16(%rsp)
|
|
602
|
+
movl %edx,12+16(%rsp)
|
|
603
|
+
|
|
604
|
+
# qhasm: in4 = *(uint32 *) (k + 12)
|
|
605
|
+
# asm 1: movl 12(<k=int64#8),>in4=int64#3d
|
|
606
|
+
# asm 2: movl 12(<k=%r10),>in4=%edx
|
|
607
|
+
movl 12(%r10),%edx
|
|
608
|
+
|
|
609
|
+
# qhasm: in9 = 0
|
|
610
|
+
# asm 1: mov $0,>in9=int64#4
|
|
611
|
+
# asm 2: mov $0,>in9=%rcx
|
|
612
|
+
mov $0,%rcx
|
|
613
|
+
|
|
614
|
+
# qhasm: in14 = *(uint32 *) (k + 28)
|
|
615
|
+
# asm 1: movl 28(<k=int64#8),>in14=int64#5d
|
|
616
|
+
# asm 2: movl 28(<k=%r10),>in14=%r8d
|
|
617
|
+
movl 28(%r10),%r8d
|
|
618
|
+
|
|
619
|
+
# qhasm: in3 = *(uint32 *) (k + 8)
|
|
620
|
+
# asm 1: movl 8(<k=int64#8),>in3=int64#7d
|
|
621
|
+
# asm 2: movl 8(<k=%r10),>in3=%eax
|
|
622
|
+
movl 8(%r10),%eax
|
|
623
|
+
|
|
624
|
+
# qhasm: ((uint32 *)&x3)[0] = in4
|
|
625
|
+
# asm 1: movl <in4=int64#3d,>x3=stack128#3
|
|
626
|
+
# asm 2: movl <in4=%edx,>x3=32(%rsp)
|
|
627
|
+
movl %edx,32(%rsp)
|
|
628
|
+
|
|
629
|
+
# qhasm: ((uint32 *)&x3)[1] = in9
|
|
630
|
+
# asm 1: movl <in9=int64#4d,4+<x3=stack128#3
|
|
631
|
+
# asm 2: movl <in9=%ecx,4+<x3=32(%rsp)
|
|
632
|
+
movl %ecx,4+32(%rsp)
|
|
633
|
+
|
|
634
|
+
# qhasm: ((uint32 *)&x3)[2] = in14
|
|
635
|
+
# asm 1: movl <in14=int64#5d,8+<x3=stack128#3
|
|
636
|
+
# asm 2: movl <in14=%r8d,8+<x3=32(%rsp)
|
|
637
|
+
movl %r8d,8+32(%rsp)
|
|
638
|
+
|
|
639
|
+
# qhasm: ((uint32 *)&x3)[3] = in3
|
|
640
|
+
# asm 1: movl <in3=int64#7d,12+<x3=stack128#3
|
|
641
|
+
# asm 2: movl <in3=%eax,12+<x3=32(%rsp)
|
|
642
|
+
movl %eax,12+32(%rsp)
|
|
643
|
+
|
|
644
|
+
# qhasm: in0 = 1634760805
|
|
645
|
+
# asm 1: mov $1634760805,>in0=int64#3
|
|
646
|
+
# asm 2: mov $1634760805,>in0=%rdx
|
|
647
|
+
mov $1634760805,%rdx
|
|
648
|
+
|
|
649
|
+
# qhasm: in5 = 857760878
|
|
650
|
+
# asm 1: mov $857760878,>in5=int64#4
|
|
651
|
+
# asm 2: mov $857760878,>in5=%rcx
|
|
652
|
+
mov $857760878,%rcx
|
|
653
|
+
|
|
654
|
+
# qhasm: in10 = 2036477234
|
|
655
|
+
# asm 1: mov $2036477234,>in10=int64#5
|
|
656
|
+
# asm 2: mov $2036477234,>in10=%r8
|
|
657
|
+
mov $2036477234,%r8
|
|
658
|
+
|
|
659
|
+
# qhasm: in15 = 1797285236
|
|
660
|
+
# asm 1: mov $1797285236,>in15=int64#7
|
|
661
|
+
# asm 2: mov $1797285236,>in15=%rax
|
|
662
|
+
mov $1797285236,%rax
|
|
663
|
+
|
|
664
|
+
# qhasm: ((uint32 *)&x0)[0] = in0
|
|
665
|
+
# asm 1: movl <in0=int64#3d,>x0=stack128#4
|
|
666
|
+
# asm 2: movl <in0=%edx,>x0=48(%rsp)
|
|
667
|
+
movl %edx,48(%rsp)
|
|
668
|
+
|
|
669
|
+
# qhasm: ((uint32 *)&x0)[1] = in5
|
|
670
|
+
# asm 1: movl <in5=int64#4d,4+<x0=stack128#4
|
|
671
|
+
# asm 2: movl <in5=%ecx,4+<x0=48(%rsp)
|
|
672
|
+
movl %ecx,4+48(%rsp)
|
|
673
|
+
|
|
674
|
+
# qhasm: ((uint32 *)&x0)[2] = in10
|
|
675
|
+
# asm 1: movl <in10=int64#5d,8+<x0=stack128#4
|
|
676
|
+
# asm 2: movl <in10=%r8d,8+<x0=48(%rsp)
|
|
677
|
+
movl %r8d,8+48(%rsp)
|
|
678
|
+
|
|
679
|
+
# qhasm: ((uint32 *)&x0)[3] = in15
|
|
680
|
+
# asm 1: movl <in15=int64#7d,12+<x0=stack128#4
|
|
681
|
+
# asm 2: movl <in15=%eax,12+<x0=48(%rsp)
|
|
682
|
+
movl %eax,12+48(%rsp)
|
|
683
|
+
|
|
684
|
+
# qhasm: unsigned<? bytes - 256
|
|
685
|
+
# asm 1: cmp $256,<bytes=int64#6
|
|
686
|
+
# asm 2: cmp $256,<bytes=%r9
|
|
687
|
+
cmp $256,%r9
|
|
688
|
+
# comment:fp stack unchanged by jump
|
|
689
|
+
|
|
690
|
+
# qhasm: goto bytesbetween1and255 if unsigned<
|
|
691
|
+
jb ._bytesbetween1and255
|
|
692
|
+
|
|
693
|
+
# qhasm: z0 = x0
|
|
694
|
+
# asm 1: movdqa <x0=stack128#4,>z0=int6464#1
|
|
695
|
+
# asm 2: movdqa <x0=48(%rsp),>z0=%xmm0
|
|
696
|
+
movdqa 48(%rsp),%xmm0
|
|
697
|
+
|
|
698
|
+
# qhasm: z5 = z0[1,1,1,1]
|
|
699
|
+
# asm 1: pshufd $0x55,<z0=int6464#1,>z5=int6464#2
|
|
700
|
+
# asm 2: pshufd $0x55,<z0=%xmm0,>z5=%xmm1
|
|
701
|
+
pshufd $0x55,%xmm0,%xmm1
|
|
702
|
+
|
|
703
|
+
# qhasm: z10 = z0[2,2,2,2]
|
|
704
|
+
# asm 1: pshufd $0xaa,<z0=int6464#1,>z10=int6464#3
|
|
705
|
+
# asm 2: pshufd $0xaa,<z0=%xmm0,>z10=%xmm2
|
|
706
|
+
pshufd $0xaa,%xmm0,%xmm2
|
|
707
|
+
|
|
708
|
+
# qhasm: z15 = z0[3,3,3,3]
|
|
709
|
+
# asm 1: pshufd $0xff,<z0=int6464#1,>z15=int6464#4
|
|
710
|
+
# asm 2: pshufd $0xff,<z0=%xmm0,>z15=%xmm3
|
|
711
|
+
pshufd $0xff,%xmm0,%xmm3
|
|
712
|
+
|
|
713
|
+
# qhasm: z0 = z0[0,0,0,0]
|
|
714
|
+
# asm 1: pshufd $0x00,<z0=int6464#1,>z0=int6464#1
|
|
715
|
+
# asm 2: pshufd $0x00,<z0=%xmm0,>z0=%xmm0
|
|
716
|
+
pshufd $0x00,%xmm0,%xmm0
|
|
717
|
+
|
|
718
|
+
# qhasm: orig5 = z5
|
|
719
|
+
# asm 1: movdqa <z5=int6464#2,>orig5=stack128#5
|
|
720
|
+
# asm 2: movdqa <z5=%xmm1,>orig5=64(%rsp)
|
|
721
|
+
movdqa %xmm1,64(%rsp)
|
|
722
|
+
|
|
723
|
+
# qhasm: orig10 = z10
|
|
724
|
+
# asm 1: movdqa <z10=int6464#3,>orig10=stack128#6
|
|
725
|
+
# asm 2: movdqa <z10=%xmm2,>orig10=80(%rsp)
|
|
726
|
+
movdqa %xmm2,80(%rsp)
|
|
727
|
+
|
|
728
|
+
# qhasm: orig15 = z15
|
|
729
|
+
# asm 1: movdqa <z15=int6464#4,>orig15=stack128#7
|
|
730
|
+
# asm 2: movdqa <z15=%xmm3,>orig15=96(%rsp)
|
|
731
|
+
movdqa %xmm3,96(%rsp)
|
|
732
|
+
|
|
733
|
+
# qhasm: orig0 = z0
|
|
734
|
+
# asm 1: movdqa <z0=int6464#1,>orig0=stack128#8
|
|
735
|
+
# asm 2: movdqa <z0=%xmm0,>orig0=112(%rsp)
|
|
736
|
+
movdqa %xmm0,112(%rsp)
|
|
737
|
+
|
|
738
|
+
# qhasm: z1 = x1
|
|
739
|
+
# asm 1: movdqa <x1=stack128#1,>z1=int6464#1
|
|
740
|
+
# asm 2: movdqa <x1=0(%rsp),>z1=%xmm0
|
|
741
|
+
movdqa 0(%rsp),%xmm0
|
|
742
|
+
|
|
743
|
+
# qhasm: z6 = z1[2,2,2,2]
|
|
744
|
+
# asm 1: pshufd $0xaa,<z1=int6464#1,>z6=int6464#2
|
|
745
|
+
# asm 2: pshufd $0xaa,<z1=%xmm0,>z6=%xmm1
|
|
746
|
+
pshufd $0xaa,%xmm0,%xmm1
|
|
747
|
+
|
|
748
|
+
# qhasm: z11 = z1[3,3,3,3]
|
|
749
|
+
# asm 1: pshufd $0xff,<z1=int6464#1,>z11=int6464#3
|
|
750
|
+
# asm 2: pshufd $0xff,<z1=%xmm0,>z11=%xmm2
|
|
751
|
+
pshufd $0xff,%xmm0,%xmm2
|
|
752
|
+
|
|
753
|
+
# qhasm: z12 = z1[0,0,0,0]
|
|
754
|
+
# asm 1: pshufd $0x00,<z1=int6464#1,>z12=int6464#4
|
|
755
|
+
# asm 2: pshufd $0x00,<z1=%xmm0,>z12=%xmm3
|
|
756
|
+
pshufd $0x00,%xmm0,%xmm3
|
|
757
|
+
|
|
758
|
+
# qhasm: z1 = z1[1,1,1,1]
|
|
759
|
+
# asm 1: pshufd $0x55,<z1=int6464#1,>z1=int6464#1
|
|
760
|
+
# asm 2: pshufd $0x55,<z1=%xmm0,>z1=%xmm0
|
|
761
|
+
pshufd $0x55,%xmm0,%xmm0
|
|
762
|
+
|
|
763
|
+
# qhasm: orig6 = z6
|
|
764
|
+
# asm 1: movdqa <z6=int6464#2,>orig6=stack128#9
|
|
765
|
+
# asm 2: movdqa <z6=%xmm1,>orig6=128(%rsp)
|
|
766
|
+
movdqa %xmm1,128(%rsp)
|
|
767
|
+
|
|
768
|
+
# qhasm: orig11 = z11
|
|
769
|
+
# asm 1: movdqa <z11=int6464#3,>orig11=stack128#10
|
|
770
|
+
# asm 2: movdqa <z11=%xmm2,>orig11=144(%rsp)
|
|
771
|
+
movdqa %xmm2,144(%rsp)
|
|
772
|
+
|
|
773
|
+
# qhasm: orig12 = z12
|
|
774
|
+
# asm 1: movdqa <z12=int6464#4,>orig12=stack128#11
|
|
775
|
+
# asm 2: movdqa <z12=%xmm3,>orig12=160(%rsp)
|
|
776
|
+
movdqa %xmm3,160(%rsp)
|
|
777
|
+
|
|
778
|
+
# qhasm: orig1 = z1
|
|
779
|
+
# asm 1: movdqa <z1=int6464#1,>orig1=stack128#12
|
|
780
|
+
# asm 2: movdqa <z1=%xmm0,>orig1=176(%rsp)
|
|
781
|
+
movdqa %xmm0,176(%rsp)
|
|
782
|
+
|
|
783
|
+
# qhasm: z2 = x2
|
|
784
|
+
# asm 1: movdqa <x2=stack128#2,>z2=int6464#1
|
|
785
|
+
# asm 2: movdqa <x2=16(%rsp),>z2=%xmm0
|
|
786
|
+
movdqa 16(%rsp),%xmm0
|
|
787
|
+
|
|
788
|
+
# qhasm: z7 = z2[3,3,3,3]
|
|
789
|
+
# asm 1: pshufd $0xff,<z2=int6464#1,>z7=int6464#2
|
|
790
|
+
# asm 2: pshufd $0xff,<z2=%xmm0,>z7=%xmm1
|
|
791
|
+
pshufd $0xff,%xmm0,%xmm1
|
|
792
|
+
|
|
793
|
+
# qhasm: z13 = z2[1,1,1,1]
|
|
794
|
+
# asm 1: pshufd $0x55,<z2=int6464#1,>z13=int6464#3
|
|
795
|
+
# asm 2: pshufd $0x55,<z2=%xmm0,>z13=%xmm2
|
|
796
|
+
pshufd $0x55,%xmm0,%xmm2
|
|
797
|
+
|
|
798
|
+
# qhasm: z2 = z2[2,2,2,2]
|
|
799
|
+
# asm 1: pshufd $0xaa,<z2=int6464#1,>z2=int6464#1
|
|
800
|
+
# asm 2: pshufd $0xaa,<z2=%xmm0,>z2=%xmm0
|
|
801
|
+
pshufd $0xaa,%xmm0,%xmm0
|
|
802
|
+
|
|
803
|
+
# qhasm: orig7 = z7
|
|
804
|
+
# asm 1: movdqa <z7=int6464#2,>orig7=stack128#13
|
|
805
|
+
# asm 2: movdqa <z7=%xmm1,>orig7=192(%rsp)
|
|
806
|
+
movdqa %xmm1,192(%rsp)
|
|
807
|
+
|
|
808
|
+
# qhasm: orig13 = z13
|
|
809
|
+
# asm 1: movdqa <z13=int6464#3,>orig13=stack128#14
|
|
810
|
+
# asm 2: movdqa <z13=%xmm2,>orig13=208(%rsp)
|
|
811
|
+
movdqa %xmm2,208(%rsp)
|
|
812
|
+
|
|
813
|
+
# qhasm: orig2 = z2
|
|
814
|
+
# asm 1: movdqa <z2=int6464#1,>orig2=stack128#15
|
|
815
|
+
# asm 2: movdqa <z2=%xmm0,>orig2=224(%rsp)
|
|
816
|
+
movdqa %xmm0,224(%rsp)
|
|
817
|
+
|
|
818
|
+
# qhasm: z3 = x3
|
|
819
|
+
# asm 1: movdqa <x3=stack128#3,>z3=int6464#1
|
|
820
|
+
# asm 2: movdqa <x3=32(%rsp),>z3=%xmm0
|
|
821
|
+
movdqa 32(%rsp),%xmm0
|
|
822
|
+
|
|
823
|
+
# qhasm: z4 = z3[0,0,0,0]
|
|
824
|
+
# asm 1: pshufd $0x00,<z3=int6464#1,>z4=int6464#2
|
|
825
|
+
# asm 2: pshufd $0x00,<z3=%xmm0,>z4=%xmm1
|
|
826
|
+
pshufd $0x00,%xmm0,%xmm1
|
|
827
|
+
|
|
828
|
+
# qhasm: z14 = z3[2,2,2,2]
|
|
829
|
+
# asm 1: pshufd $0xaa,<z3=int6464#1,>z14=int6464#3
|
|
830
|
+
# asm 2: pshufd $0xaa,<z3=%xmm0,>z14=%xmm2
|
|
831
|
+
pshufd $0xaa,%xmm0,%xmm2
|
|
832
|
+
|
|
833
|
+
# qhasm: z3 = z3[3,3,3,3]
|
|
834
|
+
# asm 1: pshufd $0xff,<z3=int6464#1,>z3=int6464#1
|
|
835
|
+
# asm 2: pshufd $0xff,<z3=%xmm0,>z3=%xmm0
|
|
836
|
+
pshufd $0xff,%xmm0,%xmm0
|
|
837
|
+
|
|
838
|
+
# qhasm: orig4 = z4
|
|
839
|
+
# asm 1: movdqa <z4=int6464#2,>orig4=stack128#16
|
|
840
|
+
# asm 2: movdqa <z4=%xmm1,>orig4=240(%rsp)
|
|
841
|
+
movdqa %xmm1,240(%rsp)
|
|
842
|
+
|
|
843
|
+
# qhasm: orig14 = z14
|
|
844
|
+
# asm 1: movdqa <z14=int6464#3,>orig14=stack128#17
|
|
845
|
+
# asm 2: movdqa <z14=%xmm2,>orig14=256(%rsp)
|
|
846
|
+
movdqa %xmm2,256(%rsp)
|
|
847
|
+
|
|
848
|
+
# qhasm: orig3 = z3
|
|
849
|
+
# asm 1: movdqa <z3=int6464#1,>orig3=stack128#18
|
|
850
|
+
# asm 2: movdqa <z3=%xmm0,>orig3=272(%rsp)
|
|
851
|
+
movdqa %xmm0,272(%rsp)
|
|
852
|
+
|
|
853
|
+
# qhasm: bytesatleast256:
|
|
854
|
+
._bytesatleast256:
|
|
855
|
+
|
|
856
|
+
# qhasm: in8 = ((uint32 *)&x2)[0]
|
|
857
|
+
# asm 1: movl <x2=stack128#2,>in8=int64#3d
|
|
858
|
+
# asm 2: movl <x2=16(%rsp),>in8=%edx
|
|
859
|
+
movl 16(%rsp),%edx
|
|
860
|
+
|
|
861
|
+
# qhasm: in9 = ((uint32 *)&x3)[1]
|
|
862
|
+
# asm 1: movl 4+<x3=stack128#3,>in9=int64#4d
|
|
863
|
+
# asm 2: movl 4+<x3=32(%rsp),>in9=%ecx
|
|
864
|
+
movl 4+32(%rsp),%ecx
|
|
865
|
+
|
|
866
|
+
# qhasm: ((uint32 *) &orig8)[0] = in8
|
|
867
|
+
# asm 1: movl <in8=int64#3d,>orig8=stack128#19
|
|
868
|
+
# asm 2: movl <in8=%edx,>orig8=288(%rsp)
|
|
869
|
+
movl %edx,288(%rsp)
|
|
870
|
+
|
|
871
|
+
# qhasm: ((uint32 *) &orig9)[0] = in9
|
|
872
|
+
# asm 1: movl <in9=int64#4d,>orig9=stack128#20
|
|
873
|
+
# asm 2: movl <in9=%ecx,>orig9=304(%rsp)
|
|
874
|
+
movl %ecx,304(%rsp)
|
|
875
|
+
|
|
876
|
+
# qhasm: in8 += 1
|
|
877
|
+
# asm 1: add $1,<in8=int64#3
|
|
878
|
+
# asm 2: add $1,<in8=%rdx
|
|
879
|
+
add $1,%rdx
|
|
880
|
+
|
|
881
|
+
# qhasm: in9 <<= 32
|
|
882
|
+
# asm 1: shl $32,<in9=int64#4
|
|
883
|
+
# asm 2: shl $32,<in9=%rcx
|
|
884
|
+
shl $32,%rcx
|
|
885
|
+
|
|
886
|
+
# qhasm: in8 += in9
|
|
887
|
+
# asm 1: add <in9=int64#4,<in8=int64#3
|
|
888
|
+
# asm 2: add <in9=%rcx,<in8=%rdx
|
|
889
|
+
add %rcx,%rdx
|
|
890
|
+
|
|
891
|
+
# qhasm: in9 = in8
|
|
892
|
+
# asm 1: mov <in8=int64#3,>in9=int64#4
|
|
893
|
+
# asm 2: mov <in8=%rdx,>in9=%rcx
|
|
894
|
+
mov %rdx,%rcx
|
|
895
|
+
|
|
896
|
+
# qhasm: (uint64) in9 >>= 32
|
|
897
|
+
# asm 1: shr $32,<in9=int64#4
|
|
898
|
+
# asm 2: shr $32,<in9=%rcx
|
|
899
|
+
shr $32,%rcx
|
|
900
|
+
|
|
901
|
+
# qhasm: ((uint32 *) &orig8)[1] = in8
|
|
902
|
+
# asm 1: movl <in8=int64#3d,4+<orig8=stack128#19
|
|
903
|
+
# asm 2: movl <in8=%edx,4+<orig8=288(%rsp)
|
|
904
|
+
movl %edx,4+288(%rsp)
|
|
905
|
+
|
|
906
|
+
# qhasm: ((uint32 *) &orig9)[1] = in9
|
|
907
|
+
# asm 1: movl <in9=int64#4d,4+<orig9=stack128#20
|
|
908
|
+
# asm 2: movl <in9=%ecx,4+<orig9=304(%rsp)
|
|
909
|
+
movl %ecx,4+304(%rsp)
|
|
910
|
+
|
|
911
|
+
# qhasm: in8 += 1
|
|
912
|
+
# asm 1: add $1,<in8=int64#3
|
|
913
|
+
# asm 2: add $1,<in8=%rdx
|
|
914
|
+
add $1,%rdx
|
|
915
|
+
|
|
916
|
+
# qhasm: in9 <<= 32
|
|
917
|
+
# asm 1: shl $32,<in9=int64#4
|
|
918
|
+
# asm 2: shl $32,<in9=%rcx
|
|
919
|
+
shl $32,%rcx
|
|
920
|
+
|
|
921
|
+
# qhasm: in8 += in9
|
|
922
|
+
# asm 1: add <in9=int64#4,<in8=int64#3
|
|
923
|
+
# asm 2: add <in9=%rcx,<in8=%rdx
|
|
924
|
+
add %rcx,%rdx
|
|
925
|
+
|
|
926
|
+
# qhasm: in9 = in8
|
|
927
|
+
# asm 1: mov <in8=int64#3,>in9=int64#4
|
|
928
|
+
# asm 2: mov <in8=%rdx,>in9=%rcx
|
|
929
|
+
mov %rdx,%rcx
|
|
930
|
+
|
|
931
|
+
# qhasm: (uint64) in9 >>= 32
|
|
932
|
+
# asm 1: shr $32,<in9=int64#4
|
|
933
|
+
# asm 2: shr $32,<in9=%rcx
|
|
934
|
+
shr $32,%rcx
|
|
935
|
+
|
|
936
|
+
# qhasm: ((uint32 *) &orig8)[2] = in8
|
|
937
|
+
# asm 1: movl <in8=int64#3d,8+<orig8=stack128#19
|
|
938
|
+
# asm 2: movl <in8=%edx,8+<orig8=288(%rsp)
|
|
939
|
+
movl %edx,8+288(%rsp)
|
|
940
|
+
|
|
941
|
+
# qhasm: ((uint32 *) &orig9)[2] = in9
|
|
942
|
+
# asm 1: movl <in9=int64#4d,8+<orig9=stack128#20
|
|
943
|
+
# asm 2: movl <in9=%ecx,8+<orig9=304(%rsp)
|
|
944
|
+
movl %ecx,8+304(%rsp)
|
|
945
|
+
|
|
946
|
+
# qhasm: in8 += 1
|
|
947
|
+
# asm 1: add $1,<in8=int64#3
|
|
948
|
+
# asm 2: add $1,<in8=%rdx
|
|
949
|
+
add $1,%rdx
|
|
950
|
+
|
|
951
|
+
# qhasm: in9 <<= 32
|
|
952
|
+
# asm 1: shl $32,<in9=int64#4
|
|
953
|
+
# asm 2: shl $32,<in9=%rcx
|
|
954
|
+
shl $32,%rcx
|
|
955
|
+
|
|
956
|
+
# qhasm: in8 += in9
|
|
957
|
+
# asm 1: add <in9=int64#4,<in8=int64#3
|
|
958
|
+
# asm 2: add <in9=%rcx,<in8=%rdx
|
|
959
|
+
add %rcx,%rdx
|
|
960
|
+
|
|
961
|
+
# qhasm: in9 = in8
|
|
962
|
+
# asm 1: mov <in8=int64#3,>in9=int64#4
|
|
963
|
+
# asm 2: mov <in8=%rdx,>in9=%rcx
|
|
964
|
+
mov %rdx,%rcx
|
|
965
|
+
|
|
966
|
+
# qhasm: (uint64) in9 >>= 32
|
|
967
|
+
# asm 1: shr $32,<in9=int64#4
|
|
968
|
+
# asm 2: shr $32,<in9=%rcx
|
|
969
|
+
shr $32,%rcx
|
|
970
|
+
|
|
971
|
+
# qhasm: ((uint32 *) &orig8)[3] = in8
|
|
972
|
+
# asm 1: movl <in8=int64#3d,12+<orig8=stack128#19
|
|
973
|
+
# asm 2: movl <in8=%edx,12+<orig8=288(%rsp)
|
|
974
|
+
movl %edx,12+288(%rsp)
|
|
975
|
+
|
|
976
|
+
# qhasm: ((uint32 *) &orig9)[3] = in9
|
|
977
|
+
# asm 1: movl <in9=int64#4d,12+<orig9=stack128#20
|
|
978
|
+
# asm 2: movl <in9=%ecx,12+<orig9=304(%rsp)
|
|
979
|
+
movl %ecx,12+304(%rsp)
|
|
980
|
+
|
|
981
|
+
# qhasm: in8 += 1
|
|
982
|
+
# asm 1: add $1,<in8=int64#3
|
|
983
|
+
# asm 2: add $1,<in8=%rdx
|
|
984
|
+
add $1,%rdx
|
|
985
|
+
|
|
986
|
+
# qhasm: in9 <<= 32
|
|
987
|
+
# asm 1: shl $32,<in9=int64#4
|
|
988
|
+
# asm 2: shl $32,<in9=%rcx
|
|
989
|
+
shl $32,%rcx
|
|
990
|
+
|
|
991
|
+
# qhasm: in8 += in9
|
|
992
|
+
# asm 1: add <in9=int64#4,<in8=int64#3
|
|
993
|
+
# asm 2: add <in9=%rcx,<in8=%rdx
|
|
994
|
+
add %rcx,%rdx
|
|
995
|
+
|
|
996
|
+
# qhasm: in9 = in8
|
|
997
|
+
# asm 1: mov <in8=int64#3,>in9=int64#4
|
|
998
|
+
# asm 2: mov <in8=%rdx,>in9=%rcx
|
|
999
|
+
mov %rdx,%rcx
|
|
1000
|
+
|
|
1001
|
+
# qhasm: (uint64) in9 >>= 32
|
|
1002
|
+
# asm 1: shr $32,<in9=int64#4
|
|
1003
|
+
# asm 2: shr $32,<in9=%rcx
|
|
1004
|
+
shr $32,%rcx
|
|
1005
|
+
|
|
1006
|
+
# qhasm: ((uint32 *)&x2)[0] = in8
|
|
1007
|
+
# asm 1: movl <in8=int64#3d,>x2=stack128#2
|
|
1008
|
+
# asm 2: movl <in8=%edx,>x2=16(%rsp)
|
|
1009
|
+
movl %edx,16(%rsp)
|
|
1010
|
+
|
|
1011
|
+
# qhasm: ((uint32 *)&x3)[1] = in9
|
|
1012
|
+
# asm 1: movl <in9=int64#4d,4+<x3=stack128#3
|
|
1013
|
+
# asm 2: movl <in9=%ecx,4+<x3=32(%rsp)
|
|
1014
|
+
movl %ecx,4+32(%rsp)
|
|
1015
|
+
|
|
1016
|
+
# qhasm: bytes_backup = bytes
|
|
1017
|
+
# asm 1: movq <bytes=int64#6,>bytes_backup=stack64#8
|
|
1018
|
+
# asm 2: movq <bytes=%r9,>bytes_backup=408(%rsp)
|
|
1019
|
+
movq %r9,408(%rsp)
|
|
1020
|
+
|
|
1021
|
+
# qhasm: i = 12
|
|
1022
|
+
# asm 1: mov $12,>i=int64#3
|
|
1023
|
+
# asm 2: mov $12,>i=%rdx
|
|
1024
|
+
mov $12,%rdx
|
|
1025
|
+
|
|
1026
|
+
# qhasm: z5 = orig5
|
|
1027
|
+
# asm 1: movdqa <orig5=stack128#5,>z5=int6464#1
|
|
1028
|
+
# asm 2: movdqa <orig5=64(%rsp),>z5=%xmm0
|
|
1029
|
+
movdqa 64(%rsp),%xmm0
|
|
1030
|
+
|
|
1031
|
+
# qhasm: z10 = orig10
|
|
1032
|
+
# asm 1: movdqa <orig10=stack128#6,>z10=int6464#2
|
|
1033
|
+
# asm 2: movdqa <orig10=80(%rsp),>z10=%xmm1
|
|
1034
|
+
movdqa 80(%rsp),%xmm1
|
|
1035
|
+
|
|
1036
|
+
# qhasm: z15 = orig15
|
|
1037
|
+
# asm 1: movdqa <orig15=stack128#7,>z15=int6464#3
|
|
1038
|
+
# asm 2: movdqa <orig15=96(%rsp),>z15=%xmm2
|
|
1039
|
+
movdqa 96(%rsp),%xmm2
|
|
1040
|
+
|
|
1041
|
+
# qhasm: z14 = orig14
|
|
1042
|
+
# asm 1: movdqa <orig14=stack128#17,>z14=int6464#4
|
|
1043
|
+
# asm 2: movdqa <orig14=256(%rsp),>z14=%xmm3
|
|
1044
|
+
movdqa 256(%rsp),%xmm3
|
|
1045
|
+
|
|
1046
|
+
# qhasm: z3 = orig3
|
|
1047
|
+
# asm 1: movdqa <orig3=stack128#18,>z3=int6464#5
|
|
1048
|
+
# asm 2: movdqa <orig3=272(%rsp),>z3=%xmm4
|
|
1049
|
+
movdqa 272(%rsp),%xmm4
|
|
1050
|
+
|
|
1051
|
+
# qhasm: z6 = orig6
|
|
1052
|
+
# asm 1: movdqa <orig6=stack128#9,>z6=int6464#6
|
|
1053
|
+
# asm 2: movdqa <orig6=128(%rsp),>z6=%xmm5
|
|
1054
|
+
movdqa 128(%rsp),%xmm5
|
|
1055
|
+
|
|
1056
|
+
# qhasm: z11 = orig11
|
|
1057
|
+
# asm 1: movdqa <orig11=stack128#10,>z11=int6464#7
|
|
1058
|
+
# asm 2: movdqa <orig11=144(%rsp),>z11=%xmm6
|
|
1059
|
+
movdqa 144(%rsp),%xmm6
|
|
1060
|
+
|
|
1061
|
+
# qhasm: z1 = orig1
|
|
1062
|
+
# asm 1: movdqa <orig1=stack128#12,>z1=int6464#8
|
|
1063
|
+
# asm 2: movdqa <orig1=176(%rsp),>z1=%xmm7
|
|
1064
|
+
movdqa 176(%rsp),%xmm7
|
|
1065
|
+
|
|
1066
|
+
# qhasm: z7 = orig7
|
|
1067
|
+
# asm 1: movdqa <orig7=stack128#13,>z7=int6464#9
|
|
1068
|
+
# asm 2: movdqa <orig7=192(%rsp),>z7=%xmm8
|
|
1069
|
+
movdqa 192(%rsp),%xmm8
|
|
1070
|
+
|
|
1071
|
+
# qhasm: z13 = orig13
|
|
1072
|
+
# asm 1: movdqa <orig13=stack128#14,>z13=int6464#10
|
|
1073
|
+
# asm 2: movdqa <orig13=208(%rsp),>z13=%xmm9
|
|
1074
|
+
movdqa 208(%rsp),%xmm9
|
|
1075
|
+
|
|
1076
|
+
# qhasm: z2 = orig2
|
|
1077
|
+
# asm 1: movdqa <orig2=stack128#15,>z2=int6464#11
|
|
1078
|
+
# asm 2: movdqa <orig2=224(%rsp),>z2=%xmm10
|
|
1079
|
+
movdqa 224(%rsp),%xmm10
|
|
1080
|
+
|
|
1081
|
+
# qhasm: z9 = orig9
|
|
1082
|
+
# asm 1: movdqa <orig9=stack128#20,>z9=int6464#12
|
|
1083
|
+
# asm 2: movdqa <orig9=304(%rsp),>z9=%xmm11
|
|
1084
|
+
movdqa 304(%rsp),%xmm11
|
|
1085
|
+
|
|
1086
|
+
# qhasm: z0 = orig0
|
|
1087
|
+
# asm 1: movdqa <orig0=stack128#8,>z0=int6464#13
|
|
1088
|
+
# asm 2: movdqa <orig0=112(%rsp),>z0=%xmm12
|
|
1089
|
+
movdqa 112(%rsp),%xmm12
|
|
1090
|
+
|
|
1091
|
+
# qhasm: z12 = orig12
|
|
1092
|
+
# asm 1: movdqa <orig12=stack128#11,>z12=int6464#14
|
|
1093
|
+
# asm 2: movdqa <orig12=160(%rsp),>z12=%xmm13
|
|
1094
|
+
movdqa 160(%rsp),%xmm13
|
|
1095
|
+
|
|
1096
|
+
# qhasm: z4 = orig4
|
|
1097
|
+
# asm 1: movdqa <orig4=stack128#16,>z4=int6464#15
|
|
1098
|
+
# asm 2: movdqa <orig4=240(%rsp),>z4=%xmm14
|
|
1099
|
+
movdqa 240(%rsp),%xmm14
|
|
1100
|
+
|
|
1101
|
+
# qhasm: z8 = orig8
|
|
1102
|
+
# asm 1: movdqa <orig8=stack128#19,>z8=int6464#16
|
|
1103
|
+
# asm 2: movdqa <orig8=288(%rsp),>z8=%xmm15
|
|
1104
|
+
movdqa 288(%rsp),%xmm15
|
|
1105
|
+
|
|
1106
|
+
# qhasm: mainloop1:
|
|
1107
|
+
._mainloop1:
|
|
1108
|
+
|
|
1109
|
+
# qhasm: z10_stack = z10
|
|
1110
|
+
# asm 1: movdqa <z10=int6464#2,>z10_stack=stack128#21
|
|
1111
|
+
# asm 2: movdqa <z10=%xmm1,>z10_stack=320(%rsp)
|
|
1112
|
+
movdqa %xmm1,320(%rsp)
|
|
1113
|
+
|
|
1114
|
+
# qhasm: z15_stack = z15
|
|
1115
|
+
# asm 1: movdqa <z15=int6464#3,>z15_stack=stack128#22
|
|
1116
|
+
# asm 2: movdqa <z15=%xmm2,>z15_stack=336(%rsp)
|
|
1117
|
+
movdqa %xmm2,336(%rsp)
|
|
1118
|
+
|
|
1119
|
+
# qhasm: y4 = z12
|
|
1120
|
+
# asm 1: movdqa <z12=int6464#14,>y4=int6464#2
|
|
1121
|
+
# asm 2: movdqa <z12=%xmm13,>y4=%xmm1
|
|
1122
|
+
movdqa %xmm13,%xmm1
|
|
1123
|
+
|
|
1124
|
+
# qhasm: uint32323232 y4 += z0
|
|
1125
|
+
# asm 1: paddd <z0=int6464#13,<y4=int6464#2
|
|
1126
|
+
# asm 2: paddd <z0=%xmm12,<y4=%xmm1
|
|
1127
|
+
paddd %xmm12,%xmm1
|
|
1128
|
+
|
|
1129
|
+
# qhasm: r4 = y4
|
|
1130
|
+
# asm 1: movdqa <y4=int6464#2,>r4=int6464#3
|
|
1131
|
+
# asm 2: movdqa <y4=%xmm1,>r4=%xmm2
|
|
1132
|
+
movdqa %xmm1,%xmm2
|
|
1133
|
+
|
|
1134
|
+
# qhasm: uint32323232 y4 <<= 7
|
|
1135
|
+
# asm 1: pslld $7,<y4=int6464#2
|
|
1136
|
+
# asm 2: pslld $7,<y4=%xmm1
|
|
1137
|
+
pslld $7,%xmm1
|
|
1138
|
+
|
|
1139
|
+
# qhasm: z4 ^= y4
|
|
1140
|
+
# asm 1: pxor <y4=int6464#2,<z4=int6464#15
|
|
1141
|
+
# asm 2: pxor <y4=%xmm1,<z4=%xmm14
|
|
1142
|
+
pxor %xmm1,%xmm14
|
|
1143
|
+
|
|
1144
|
+
# qhasm: uint32323232 r4 >>= 25
|
|
1145
|
+
# asm 1: psrld $25,<r4=int6464#3
|
|
1146
|
+
# asm 2: psrld $25,<r4=%xmm2
|
|
1147
|
+
psrld $25,%xmm2
|
|
1148
|
+
|
|
1149
|
+
# qhasm: z4 ^= r4
|
|
1150
|
+
# asm 1: pxor <r4=int6464#3,<z4=int6464#15
|
|
1151
|
+
# asm 2: pxor <r4=%xmm2,<z4=%xmm14
|
|
1152
|
+
pxor %xmm2,%xmm14
|
|
1153
|
+
|
|
1154
|
+
# qhasm: y9 = z1
|
|
1155
|
+
# asm 1: movdqa <z1=int6464#8,>y9=int6464#2
|
|
1156
|
+
# asm 2: movdqa <z1=%xmm7,>y9=%xmm1
|
|
1157
|
+
movdqa %xmm7,%xmm1
|
|
1158
|
+
|
|
1159
|
+
# qhasm: uint32323232 y9 += z5
|
|
1160
|
+
# asm 1: paddd <z5=int6464#1,<y9=int6464#2
|
|
1161
|
+
# asm 2: paddd <z5=%xmm0,<y9=%xmm1
|
|
1162
|
+
paddd %xmm0,%xmm1
|
|
1163
|
+
|
|
1164
|
+
# qhasm: r9 = y9
|
|
1165
|
+
# asm 1: movdqa <y9=int6464#2,>r9=int6464#3
|
|
1166
|
+
# asm 2: movdqa <y9=%xmm1,>r9=%xmm2
|
|
1167
|
+
movdqa %xmm1,%xmm2
|
|
1168
|
+
|
|
1169
|
+
# qhasm: uint32323232 y9 <<= 7
|
|
1170
|
+
# asm 1: pslld $7,<y9=int6464#2
|
|
1171
|
+
# asm 2: pslld $7,<y9=%xmm1
|
|
1172
|
+
pslld $7,%xmm1
|
|
1173
|
+
|
|
1174
|
+
# qhasm: z9 ^= y9
|
|
1175
|
+
# asm 1: pxor <y9=int6464#2,<z9=int6464#12
|
|
1176
|
+
# asm 2: pxor <y9=%xmm1,<z9=%xmm11
|
|
1177
|
+
pxor %xmm1,%xmm11
|
|
1178
|
+
|
|
1179
|
+
# qhasm: uint32323232 r9 >>= 25
|
|
1180
|
+
# asm 1: psrld $25,<r9=int6464#3
|
|
1181
|
+
# asm 2: psrld $25,<r9=%xmm2
|
|
1182
|
+
psrld $25,%xmm2
|
|
1183
|
+
|
|
1184
|
+
# qhasm: z9 ^= r9
|
|
1185
|
+
# asm 1: pxor <r9=int6464#3,<z9=int6464#12
|
|
1186
|
+
# asm 2: pxor <r9=%xmm2,<z9=%xmm11
|
|
1187
|
+
pxor %xmm2,%xmm11
|
|
1188
|
+
|
|
1189
|
+
# qhasm: y8 = z0
|
|
1190
|
+
# asm 1: movdqa <z0=int6464#13,>y8=int6464#2
|
|
1191
|
+
# asm 2: movdqa <z0=%xmm12,>y8=%xmm1
|
|
1192
|
+
movdqa %xmm12,%xmm1
|
|
1193
|
+
|
|
1194
|
+
# qhasm: uint32323232 y8 += z4
|
|
1195
|
+
# asm 1: paddd <z4=int6464#15,<y8=int6464#2
|
|
1196
|
+
# asm 2: paddd <z4=%xmm14,<y8=%xmm1
|
|
1197
|
+
paddd %xmm14,%xmm1
|
|
1198
|
+
|
|
1199
|
+
# qhasm: r8 = y8
|
|
1200
|
+
# asm 1: movdqa <y8=int6464#2,>r8=int6464#3
|
|
1201
|
+
# asm 2: movdqa <y8=%xmm1,>r8=%xmm2
|
|
1202
|
+
movdqa %xmm1,%xmm2
|
|
1203
|
+
|
|
1204
|
+
# qhasm: uint32323232 y8 <<= 9
|
|
1205
|
+
# asm 1: pslld $9,<y8=int6464#2
|
|
1206
|
+
# asm 2: pslld $9,<y8=%xmm1
|
|
1207
|
+
pslld $9,%xmm1
|
|
1208
|
+
|
|
1209
|
+
# qhasm: z8 ^= y8
|
|
1210
|
+
# asm 1: pxor <y8=int6464#2,<z8=int6464#16
|
|
1211
|
+
# asm 2: pxor <y8=%xmm1,<z8=%xmm15
|
|
1212
|
+
pxor %xmm1,%xmm15
|
|
1213
|
+
|
|
1214
|
+
# qhasm: uint32323232 r8 >>= 23
|
|
1215
|
+
# asm 1: psrld $23,<r8=int6464#3
|
|
1216
|
+
# asm 2: psrld $23,<r8=%xmm2
|
|
1217
|
+
psrld $23,%xmm2
|
|
1218
|
+
|
|
1219
|
+
# qhasm: z8 ^= r8
|
|
1220
|
+
# asm 1: pxor <r8=int6464#3,<z8=int6464#16
|
|
1221
|
+
# asm 2: pxor <r8=%xmm2,<z8=%xmm15
|
|
1222
|
+
pxor %xmm2,%xmm15
|
|
1223
|
+
|
|
1224
|
+
# qhasm: y13 = z5
|
|
1225
|
+
# asm 1: movdqa <z5=int6464#1,>y13=int6464#2
|
|
1226
|
+
# asm 2: movdqa <z5=%xmm0,>y13=%xmm1
|
|
1227
|
+
movdqa %xmm0,%xmm1
|
|
1228
|
+
|
|
1229
|
+
# qhasm: uint32323232 y13 += z9
|
|
1230
|
+
# asm 1: paddd <z9=int6464#12,<y13=int6464#2
|
|
1231
|
+
# asm 2: paddd <z9=%xmm11,<y13=%xmm1
|
|
1232
|
+
paddd %xmm11,%xmm1
|
|
1233
|
+
|
|
1234
|
+
# qhasm: r13 = y13
|
|
1235
|
+
# asm 1: movdqa <y13=int6464#2,>r13=int6464#3
|
|
1236
|
+
# asm 2: movdqa <y13=%xmm1,>r13=%xmm2
|
|
1237
|
+
movdqa %xmm1,%xmm2
|
|
1238
|
+
|
|
1239
|
+
# qhasm: uint32323232 y13 <<= 9
|
|
1240
|
+
# asm 1: pslld $9,<y13=int6464#2
|
|
1241
|
+
# asm 2: pslld $9,<y13=%xmm1
|
|
1242
|
+
pslld $9,%xmm1
|
|
1243
|
+
|
|
1244
|
+
# qhasm: z13 ^= y13
|
|
1245
|
+
# asm 1: pxor <y13=int6464#2,<z13=int6464#10
|
|
1246
|
+
# asm 2: pxor <y13=%xmm1,<z13=%xmm9
|
|
1247
|
+
pxor %xmm1,%xmm9
|
|
1248
|
+
|
|
1249
|
+
# qhasm: uint32323232 r13 >>= 23
|
|
1250
|
+
# asm 1: psrld $23,<r13=int6464#3
|
|
1251
|
+
# asm 2: psrld $23,<r13=%xmm2
|
|
1252
|
+
psrld $23,%xmm2
|
|
1253
|
+
|
|
1254
|
+
# qhasm: z13 ^= r13
|
|
1255
|
+
# asm 1: pxor <r13=int6464#3,<z13=int6464#10
|
|
1256
|
+
# asm 2: pxor <r13=%xmm2,<z13=%xmm9
|
|
1257
|
+
pxor %xmm2,%xmm9
|
|
1258
|
+
|
|
1259
|
+
# qhasm: y12 = z4
|
|
1260
|
+
# asm 1: movdqa <z4=int6464#15,>y12=int6464#2
|
|
1261
|
+
# asm 2: movdqa <z4=%xmm14,>y12=%xmm1
|
|
1262
|
+
movdqa %xmm14,%xmm1
|
|
1263
|
+
|
|
1264
|
+
# qhasm: uint32323232 y12 += z8
|
|
1265
|
+
# asm 1: paddd <z8=int6464#16,<y12=int6464#2
|
|
1266
|
+
# asm 2: paddd <z8=%xmm15,<y12=%xmm1
|
|
1267
|
+
paddd %xmm15,%xmm1
|
|
1268
|
+
|
|
1269
|
+
# qhasm: r12 = y12
|
|
1270
|
+
# asm 1: movdqa <y12=int6464#2,>r12=int6464#3
|
|
1271
|
+
# asm 2: movdqa <y12=%xmm1,>r12=%xmm2
|
|
1272
|
+
movdqa %xmm1,%xmm2
|
|
1273
|
+
|
|
1274
|
+
# qhasm: uint32323232 y12 <<= 13
|
|
1275
|
+
# asm 1: pslld $13,<y12=int6464#2
|
|
1276
|
+
# asm 2: pslld $13,<y12=%xmm1
|
|
1277
|
+
pslld $13,%xmm1
|
|
1278
|
+
|
|
1279
|
+
# qhasm: z12 ^= y12
|
|
1280
|
+
# asm 1: pxor <y12=int6464#2,<z12=int6464#14
|
|
1281
|
+
# asm 2: pxor <y12=%xmm1,<z12=%xmm13
|
|
1282
|
+
pxor %xmm1,%xmm13
|
|
1283
|
+
|
|
1284
|
+
# qhasm: uint32323232 r12 >>= 19
|
|
1285
|
+
# asm 1: psrld $19,<r12=int6464#3
|
|
1286
|
+
# asm 2: psrld $19,<r12=%xmm2
|
|
1287
|
+
psrld $19,%xmm2
|
|
1288
|
+
|
|
1289
|
+
# qhasm: z12 ^= r12
|
|
1290
|
+
# asm 1: pxor <r12=int6464#3,<z12=int6464#14
|
|
1291
|
+
# asm 2: pxor <r12=%xmm2,<z12=%xmm13
|
|
1292
|
+
pxor %xmm2,%xmm13
|
|
1293
|
+
|
|
1294
|
+
# qhasm: y1 = z9
|
|
1295
|
+
# asm 1: movdqa <z9=int6464#12,>y1=int6464#2
|
|
1296
|
+
# asm 2: movdqa <z9=%xmm11,>y1=%xmm1
|
|
1297
|
+
movdqa %xmm11,%xmm1
|
|
1298
|
+
|
|
1299
|
+
# qhasm: uint32323232 y1 += z13
|
|
1300
|
+
# asm 1: paddd <z13=int6464#10,<y1=int6464#2
|
|
1301
|
+
# asm 2: paddd <z13=%xmm9,<y1=%xmm1
|
|
1302
|
+
paddd %xmm9,%xmm1
|
|
1303
|
+
|
|
1304
|
+
# qhasm: r1 = y1
|
|
1305
|
+
# asm 1: movdqa <y1=int6464#2,>r1=int6464#3
|
|
1306
|
+
# asm 2: movdqa <y1=%xmm1,>r1=%xmm2
|
|
1307
|
+
movdqa %xmm1,%xmm2
|
|
1308
|
+
|
|
1309
|
+
# qhasm: uint32323232 y1 <<= 13
|
|
1310
|
+
# asm 1: pslld $13,<y1=int6464#2
|
|
1311
|
+
# asm 2: pslld $13,<y1=%xmm1
|
|
1312
|
+
pslld $13,%xmm1
|
|
1313
|
+
|
|
1314
|
+
# qhasm: z1 ^= y1
|
|
1315
|
+
# asm 1: pxor <y1=int6464#2,<z1=int6464#8
|
|
1316
|
+
# asm 2: pxor <y1=%xmm1,<z1=%xmm7
|
|
1317
|
+
pxor %xmm1,%xmm7
|
|
1318
|
+
|
|
1319
|
+
# qhasm: uint32323232 r1 >>= 19
|
|
1320
|
+
# asm 1: psrld $19,<r1=int6464#3
|
|
1321
|
+
# asm 2: psrld $19,<r1=%xmm2
|
|
1322
|
+
psrld $19,%xmm2
|
|
1323
|
+
|
|
1324
|
+
# qhasm: z1 ^= r1
|
|
1325
|
+
# asm 1: pxor <r1=int6464#3,<z1=int6464#8
|
|
1326
|
+
# asm 2: pxor <r1=%xmm2,<z1=%xmm7
|
|
1327
|
+
pxor %xmm2,%xmm7
|
|
1328
|
+
|
|
1329
|
+
# qhasm: y0 = z8
|
|
1330
|
+
# asm 1: movdqa <z8=int6464#16,>y0=int6464#2
|
|
1331
|
+
# asm 2: movdqa <z8=%xmm15,>y0=%xmm1
|
|
1332
|
+
movdqa %xmm15,%xmm1
|
|
1333
|
+
|
|
1334
|
+
# qhasm: uint32323232 y0 += z12
|
|
1335
|
+
# asm 1: paddd <z12=int6464#14,<y0=int6464#2
|
|
1336
|
+
# asm 2: paddd <z12=%xmm13,<y0=%xmm1
|
|
1337
|
+
paddd %xmm13,%xmm1
|
|
1338
|
+
|
|
1339
|
+
# qhasm: r0 = y0
|
|
1340
|
+
# asm 1: movdqa <y0=int6464#2,>r0=int6464#3
|
|
1341
|
+
# asm 2: movdqa <y0=%xmm1,>r0=%xmm2
|
|
1342
|
+
movdqa %xmm1,%xmm2
|
|
1343
|
+
|
|
1344
|
+
# qhasm: uint32323232 y0 <<= 18
|
|
1345
|
+
# asm 1: pslld $18,<y0=int6464#2
|
|
1346
|
+
# asm 2: pslld $18,<y0=%xmm1
|
|
1347
|
+
pslld $18,%xmm1
|
|
1348
|
+
|
|
1349
|
+
# qhasm: z0 ^= y0
|
|
1350
|
+
# asm 1: pxor <y0=int6464#2,<z0=int6464#13
|
|
1351
|
+
# asm 2: pxor <y0=%xmm1,<z0=%xmm12
|
|
1352
|
+
pxor %xmm1,%xmm12
|
|
1353
|
+
|
|
1354
|
+
# qhasm: uint32323232 r0 >>= 14
|
|
1355
|
+
# asm 1: psrld $14,<r0=int6464#3
|
|
1356
|
+
# asm 2: psrld $14,<r0=%xmm2
|
|
1357
|
+
psrld $14,%xmm2
|
|
1358
|
+
|
|
1359
|
+
# qhasm: z0 ^= r0
|
|
1360
|
+
# asm 1: pxor <r0=int6464#3,<z0=int6464#13
|
|
1361
|
+
# asm 2: pxor <r0=%xmm2,<z0=%xmm12
|
|
1362
|
+
pxor %xmm2,%xmm12
|
|
1363
|
+
|
|
1364
|
+
# qhasm: z10 = z10_stack
|
|
1365
|
+
# asm 1: movdqa <z10_stack=stack128#21,>z10=int6464#2
|
|
1366
|
+
# asm 2: movdqa <z10_stack=320(%rsp),>z10=%xmm1
|
|
1367
|
+
movdqa 320(%rsp),%xmm1
|
|
1368
|
+
|
|
1369
|
+
# qhasm: z0_stack = z0
|
|
1370
|
+
# asm 1: movdqa <z0=int6464#13,>z0_stack=stack128#21
|
|
1371
|
+
# asm 2: movdqa <z0=%xmm12,>z0_stack=320(%rsp)
|
|
1372
|
+
movdqa %xmm12,320(%rsp)
|
|
1373
|
+
|
|
1374
|
+
# qhasm: y5 = z13
|
|
1375
|
+
# asm 1: movdqa <z13=int6464#10,>y5=int6464#3
|
|
1376
|
+
# asm 2: movdqa <z13=%xmm9,>y5=%xmm2
|
|
1377
|
+
movdqa %xmm9,%xmm2
|
|
1378
|
+
|
|
1379
|
+
# qhasm: uint32323232 y5 += z1
|
|
1380
|
+
# asm 1: paddd <z1=int6464#8,<y5=int6464#3
|
|
1381
|
+
# asm 2: paddd <z1=%xmm7,<y5=%xmm2
|
|
1382
|
+
paddd %xmm7,%xmm2
|
|
1383
|
+
|
|
1384
|
+
# qhasm: r5 = y5
|
|
1385
|
+
# asm 1: movdqa <y5=int6464#3,>r5=int6464#13
|
|
1386
|
+
# asm 2: movdqa <y5=%xmm2,>r5=%xmm12
|
|
1387
|
+
movdqa %xmm2,%xmm12
|
|
1388
|
+
|
|
1389
|
+
# qhasm: uint32323232 y5 <<= 18
|
|
1390
|
+
# asm 1: pslld $18,<y5=int6464#3
|
|
1391
|
+
# asm 2: pslld $18,<y5=%xmm2
|
|
1392
|
+
pslld $18,%xmm2
|
|
1393
|
+
|
|
1394
|
+
# qhasm: z5 ^= y5
|
|
1395
|
+
# asm 1: pxor <y5=int6464#3,<z5=int6464#1
|
|
1396
|
+
# asm 2: pxor <y5=%xmm2,<z5=%xmm0
|
|
1397
|
+
pxor %xmm2,%xmm0
|
|
1398
|
+
|
|
1399
|
+
# qhasm: uint32323232 r5 >>= 14
|
|
1400
|
+
# asm 1: psrld $14,<r5=int6464#13
|
|
1401
|
+
# asm 2: psrld $14,<r5=%xmm12
|
|
1402
|
+
psrld $14,%xmm12
|
|
1403
|
+
|
|
1404
|
+
# qhasm: z5 ^= r5
|
|
1405
|
+
# asm 1: pxor <r5=int6464#13,<z5=int6464#1
|
|
1406
|
+
# asm 2: pxor <r5=%xmm12,<z5=%xmm0
|
|
1407
|
+
pxor %xmm12,%xmm0
|
|
1408
|
+
|
|
1409
|
+
# qhasm: y14 = z6
|
|
1410
|
+
# asm 1: movdqa <z6=int6464#6,>y14=int6464#3
|
|
1411
|
+
# asm 2: movdqa <z6=%xmm5,>y14=%xmm2
|
|
1412
|
+
movdqa %xmm5,%xmm2
|
|
1413
|
+
|
|
1414
|
+
# qhasm: uint32323232 y14 += z10
|
|
1415
|
+
# asm 1: paddd <z10=int6464#2,<y14=int6464#3
|
|
1416
|
+
# asm 2: paddd <z10=%xmm1,<y14=%xmm2
|
|
1417
|
+
paddd %xmm1,%xmm2
|
|
1418
|
+
|
|
1419
|
+
# qhasm: r14 = y14
|
|
1420
|
+
# asm 1: movdqa <y14=int6464#3,>r14=int6464#13
|
|
1421
|
+
# asm 2: movdqa <y14=%xmm2,>r14=%xmm12
|
|
1422
|
+
movdqa %xmm2,%xmm12
|
|
1423
|
+
|
|
1424
|
+
# qhasm: uint32323232 y14 <<= 7
|
|
1425
|
+
# asm 1: pslld $7,<y14=int6464#3
|
|
1426
|
+
# asm 2: pslld $7,<y14=%xmm2
|
|
1427
|
+
pslld $7,%xmm2
|
|
1428
|
+
|
|
1429
|
+
# qhasm: z14 ^= y14
|
|
1430
|
+
# asm 1: pxor <y14=int6464#3,<z14=int6464#4
|
|
1431
|
+
# asm 2: pxor <y14=%xmm2,<z14=%xmm3
|
|
1432
|
+
pxor %xmm2,%xmm3
|
|
1433
|
+
|
|
1434
|
+
# qhasm: uint32323232 r14 >>= 25
|
|
1435
|
+
# asm 1: psrld $25,<r14=int6464#13
|
|
1436
|
+
# asm 2: psrld $25,<r14=%xmm12
|
|
1437
|
+
psrld $25,%xmm12
|
|
1438
|
+
|
|
1439
|
+
# qhasm: z14 ^= r14
|
|
1440
|
+
# asm 1: pxor <r14=int6464#13,<z14=int6464#4
|
|
1441
|
+
# asm 2: pxor <r14=%xmm12,<z14=%xmm3
|
|
1442
|
+
pxor %xmm12,%xmm3
|
|
1443
|
+
|
|
1444
|
+
# qhasm: z15 = z15_stack
|
|
1445
|
+
# asm 1: movdqa <z15_stack=stack128#22,>z15=int6464#3
|
|
1446
|
+
# asm 2: movdqa <z15_stack=336(%rsp),>z15=%xmm2
|
|
1447
|
+
movdqa 336(%rsp),%xmm2
|
|
1448
|
+
|
|
1449
|
+
# qhasm: z5_stack = z5
|
|
1450
|
+
# asm 1: movdqa <z5=int6464#1,>z5_stack=stack128#22
|
|
1451
|
+
# asm 2: movdqa <z5=%xmm0,>z5_stack=336(%rsp)
|
|
1452
|
+
movdqa %xmm0,336(%rsp)
|
|
1453
|
+
|
|
1454
|
+
# qhasm: y3 = z11
|
|
1455
|
+
# asm 1: movdqa <z11=int6464#7,>y3=int6464#1
|
|
1456
|
+
# asm 2: movdqa <z11=%xmm6,>y3=%xmm0
|
|
1457
|
+
movdqa %xmm6,%xmm0
|
|
1458
|
+
|
|
1459
|
+
# qhasm: uint32323232 y3 += z15
|
|
1460
|
+
# asm 1: paddd <z15=int6464#3,<y3=int6464#1
|
|
1461
|
+
# asm 2: paddd <z15=%xmm2,<y3=%xmm0
|
|
1462
|
+
paddd %xmm2,%xmm0
|
|
1463
|
+
|
|
1464
|
+
# qhasm: r3 = y3
|
|
1465
|
+
# asm 1: movdqa <y3=int6464#1,>r3=int6464#13
|
|
1466
|
+
# asm 2: movdqa <y3=%xmm0,>r3=%xmm12
|
|
1467
|
+
movdqa %xmm0,%xmm12
|
|
1468
|
+
|
|
1469
|
+
# qhasm: uint32323232 y3 <<= 7
|
|
1470
|
+
# asm 1: pslld $7,<y3=int6464#1
|
|
1471
|
+
# asm 2: pslld $7,<y3=%xmm0
|
|
1472
|
+
pslld $7,%xmm0
|
|
1473
|
+
|
|
1474
|
+
# qhasm: z3 ^= y3
|
|
1475
|
+
# asm 1: pxor <y3=int6464#1,<z3=int6464#5
|
|
1476
|
+
# asm 2: pxor <y3=%xmm0,<z3=%xmm4
|
|
1477
|
+
pxor %xmm0,%xmm4
|
|
1478
|
+
|
|
1479
|
+
# qhasm: uint32323232 r3 >>= 25
|
|
1480
|
+
# asm 1: psrld $25,<r3=int6464#13
|
|
1481
|
+
# asm 2: psrld $25,<r3=%xmm12
|
|
1482
|
+
psrld $25,%xmm12
|
|
1483
|
+
|
|
1484
|
+
# qhasm: z3 ^= r3
|
|
1485
|
+
# asm 1: pxor <r3=int6464#13,<z3=int6464#5
|
|
1486
|
+
# asm 2: pxor <r3=%xmm12,<z3=%xmm4
|
|
1487
|
+
pxor %xmm12,%xmm4
|
|
1488
|
+
|
|
1489
|
+
# qhasm: y2 = z10
|
|
1490
|
+
# asm 1: movdqa <z10=int6464#2,>y2=int6464#1
|
|
1491
|
+
# asm 2: movdqa <z10=%xmm1,>y2=%xmm0
|
|
1492
|
+
movdqa %xmm1,%xmm0
|
|
1493
|
+
|
|
1494
|
+
# qhasm: uint32323232 y2 += z14
|
|
1495
|
+
# asm 1: paddd <z14=int6464#4,<y2=int6464#1
|
|
1496
|
+
# asm 2: paddd <z14=%xmm3,<y2=%xmm0
|
|
1497
|
+
paddd %xmm3,%xmm0
|
|
1498
|
+
|
|
1499
|
+
# qhasm: r2 = y2
|
|
1500
|
+
# asm 1: movdqa <y2=int6464#1,>r2=int6464#13
|
|
1501
|
+
# asm 2: movdqa <y2=%xmm0,>r2=%xmm12
|
|
1502
|
+
movdqa %xmm0,%xmm12
|
|
1503
|
+
|
|
1504
|
+
# qhasm: uint32323232 y2 <<= 9
|
|
1505
|
+
# asm 1: pslld $9,<y2=int6464#1
|
|
1506
|
+
# asm 2: pslld $9,<y2=%xmm0
|
|
1507
|
+
pslld $9,%xmm0
|
|
1508
|
+
|
|
1509
|
+
# qhasm: z2 ^= y2
|
|
1510
|
+
# asm 1: pxor <y2=int6464#1,<z2=int6464#11
|
|
1511
|
+
# asm 2: pxor <y2=%xmm0,<z2=%xmm10
|
|
1512
|
+
pxor %xmm0,%xmm10
|
|
1513
|
+
|
|
1514
|
+
# qhasm: uint32323232 r2 >>= 23
|
|
1515
|
+
# asm 1: psrld $23,<r2=int6464#13
|
|
1516
|
+
# asm 2: psrld $23,<r2=%xmm12
|
|
1517
|
+
psrld $23,%xmm12
|
|
1518
|
+
|
|
1519
|
+
# qhasm: z2 ^= r2
|
|
1520
|
+
# asm 1: pxor <r2=int6464#13,<z2=int6464#11
|
|
1521
|
+
# asm 2: pxor <r2=%xmm12,<z2=%xmm10
|
|
1522
|
+
pxor %xmm12,%xmm10
|
|
1523
|
+
|
|
1524
|
+
# qhasm: y7 = z15
|
|
1525
|
+
# asm 1: movdqa <z15=int6464#3,>y7=int6464#1
|
|
1526
|
+
# asm 2: movdqa <z15=%xmm2,>y7=%xmm0
|
|
1527
|
+
movdqa %xmm2,%xmm0
|
|
1528
|
+
|
|
1529
|
+
# qhasm: uint32323232 y7 += z3
|
|
1530
|
+
# asm 1: paddd <z3=int6464#5,<y7=int6464#1
|
|
1531
|
+
# asm 2: paddd <z3=%xmm4,<y7=%xmm0
|
|
1532
|
+
paddd %xmm4,%xmm0
|
|
1533
|
+
|
|
1534
|
+
# qhasm: r7 = y7
|
|
1535
|
+
# asm 1: movdqa <y7=int6464#1,>r7=int6464#13
|
|
1536
|
+
# asm 2: movdqa <y7=%xmm0,>r7=%xmm12
|
|
1537
|
+
movdqa %xmm0,%xmm12
|
|
1538
|
+
|
|
1539
|
+
# qhasm: uint32323232 y7 <<= 9
|
|
1540
|
+
# asm 1: pslld $9,<y7=int6464#1
|
|
1541
|
+
# asm 2: pslld $9,<y7=%xmm0
|
|
1542
|
+
pslld $9,%xmm0
|
|
1543
|
+
|
|
1544
|
+
# qhasm: z7 ^= y7
|
|
1545
|
+
# asm 1: pxor <y7=int6464#1,<z7=int6464#9
|
|
1546
|
+
# asm 2: pxor <y7=%xmm0,<z7=%xmm8
|
|
1547
|
+
pxor %xmm0,%xmm8
|
|
1548
|
+
|
|
1549
|
+
# qhasm: uint32323232 r7 >>= 23
|
|
1550
|
+
# asm 1: psrld $23,<r7=int6464#13
|
|
1551
|
+
# asm 2: psrld $23,<r7=%xmm12
|
|
1552
|
+
psrld $23,%xmm12
|
|
1553
|
+
|
|
1554
|
+
# qhasm: z7 ^= r7
|
|
1555
|
+
# asm 1: pxor <r7=int6464#13,<z7=int6464#9
|
|
1556
|
+
# asm 2: pxor <r7=%xmm12,<z7=%xmm8
|
|
1557
|
+
pxor %xmm12,%xmm8
|
|
1558
|
+
|
|
1559
|
+
# qhasm: y6 = z14
|
|
1560
|
+
# asm 1: movdqa <z14=int6464#4,>y6=int6464#1
|
|
1561
|
+
# asm 2: movdqa <z14=%xmm3,>y6=%xmm0
|
|
1562
|
+
movdqa %xmm3,%xmm0
|
|
1563
|
+
|
|
1564
|
+
# qhasm: uint32323232 y6 += z2
|
|
1565
|
+
# asm 1: paddd <z2=int6464#11,<y6=int6464#1
|
|
1566
|
+
# asm 2: paddd <z2=%xmm10,<y6=%xmm0
|
|
1567
|
+
paddd %xmm10,%xmm0
|
|
1568
|
+
|
|
1569
|
+
# qhasm: r6 = y6
|
|
1570
|
+
# asm 1: movdqa <y6=int6464#1,>r6=int6464#13
|
|
1571
|
+
# asm 2: movdqa <y6=%xmm0,>r6=%xmm12
|
|
1572
|
+
movdqa %xmm0,%xmm12
|
|
1573
|
+
|
|
1574
|
+
# qhasm: uint32323232 y6 <<= 13
|
|
1575
|
+
# asm 1: pslld $13,<y6=int6464#1
|
|
1576
|
+
# asm 2: pslld $13,<y6=%xmm0
|
|
1577
|
+
pslld $13,%xmm0
|
|
1578
|
+
|
|
1579
|
+
# qhasm: z6 ^= y6
|
|
1580
|
+
# asm 1: pxor <y6=int6464#1,<z6=int6464#6
|
|
1581
|
+
# asm 2: pxor <y6=%xmm0,<z6=%xmm5
|
|
1582
|
+
pxor %xmm0,%xmm5
|
|
1583
|
+
|
|
1584
|
+
# qhasm: uint32323232 r6 >>= 19
|
|
1585
|
+
# asm 1: psrld $19,<r6=int6464#13
|
|
1586
|
+
# asm 2: psrld $19,<r6=%xmm12
|
|
1587
|
+
psrld $19,%xmm12
|
|
1588
|
+
|
|
1589
|
+
# qhasm: z6 ^= r6
|
|
1590
|
+
# asm 1: pxor <r6=int6464#13,<z6=int6464#6
|
|
1591
|
+
# asm 2: pxor <r6=%xmm12,<z6=%xmm5
|
|
1592
|
+
pxor %xmm12,%xmm5
|
|
1593
|
+
|
|
1594
|
+
# qhasm: y11 = z3
|
|
1595
|
+
# asm 1: movdqa <z3=int6464#5,>y11=int6464#1
|
|
1596
|
+
# asm 2: movdqa <z3=%xmm4,>y11=%xmm0
|
|
1597
|
+
movdqa %xmm4,%xmm0
|
|
1598
|
+
|
|
1599
|
+
# qhasm: uint32323232 y11 += z7
|
|
1600
|
+
# asm 1: paddd <z7=int6464#9,<y11=int6464#1
|
|
1601
|
+
# asm 2: paddd <z7=%xmm8,<y11=%xmm0
|
|
1602
|
+
paddd %xmm8,%xmm0
|
|
1603
|
+
|
|
1604
|
+
# qhasm: r11 = y11
|
|
1605
|
+
# asm 1: movdqa <y11=int6464#1,>r11=int6464#13
|
|
1606
|
+
# asm 2: movdqa <y11=%xmm0,>r11=%xmm12
|
|
1607
|
+
movdqa %xmm0,%xmm12
|
|
1608
|
+
|
|
1609
|
+
# qhasm: uint32323232 y11 <<= 13
|
|
1610
|
+
# asm 1: pslld $13,<y11=int6464#1
|
|
1611
|
+
# asm 2: pslld $13,<y11=%xmm0
|
|
1612
|
+
pslld $13,%xmm0
|
|
1613
|
+
|
|
1614
|
+
# qhasm: z11 ^= y11
|
|
1615
|
+
# asm 1: pxor <y11=int6464#1,<z11=int6464#7
|
|
1616
|
+
# asm 2: pxor <y11=%xmm0,<z11=%xmm6
|
|
1617
|
+
pxor %xmm0,%xmm6
|
|
1618
|
+
|
|
1619
|
+
# qhasm: uint32323232 r11 >>= 19
|
|
1620
|
+
# asm 1: psrld $19,<r11=int6464#13
|
|
1621
|
+
# asm 2: psrld $19,<r11=%xmm12
|
|
1622
|
+
psrld $19,%xmm12
|
|
1623
|
+
|
|
1624
|
+
# qhasm: z11 ^= r11
|
|
1625
|
+
# asm 1: pxor <r11=int6464#13,<z11=int6464#7
|
|
1626
|
+
# asm 2: pxor <r11=%xmm12,<z11=%xmm6
|
|
1627
|
+
pxor %xmm12,%xmm6
|
|
1628
|
+
|
|
1629
|
+
# qhasm: y10 = z2
|
|
1630
|
+
# asm 1: movdqa <z2=int6464#11,>y10=int6464#1
|
|
1631
|
+
# asm 2: movdqa <z2=%xmm10,>y10=%xmm0
|
|
1632
|
+
movdqa %xmm10,%xmm0
|
|
1633
|
+
|
|
1634
|
+
# qhasm: uint32323232 y10 += z6
|
|
1635
|
+
# asm 1: paddd <z6=int6464#6,<y10=int6464#1
|
|
1636
|
+
# asm 2: paddd <z6=%xmm5,<y10=%xmm0
|
|
1637
|
+
paddd %xmm5,%xmm0
|
|
1638
|
+
|
|
1639
|
+
# qhasm: r10 = y10
|
|
1640
|
+
# asm 1: movdqa <y10=int6464#1,>r10=int6464#13
|
|
1641
|
+
# asm 2: movdqa <y10=%xmm0,>r10=%xmm12
|
|
1642
|
+
movdqa %xmm0,%xmm12
|
|
1643
|
+
|
|
1644
|
+
# qhasm: uint32323232 y10 <<= 18
|
|
1645
|
+
# asm 1: pslld $18,<y10=int6464#1
|
|
1646
|
+
# asm 2: pslld $18,<y10=%xmm0
|
|
1647
|
+
pslld $18,%xmm0
|
|
1648
|
+
|
|
1649
|
+
# qhasm: z10 ^= y10
|
|
1650
|
+
# asm 1: pxor <y10=int6464#1,<z10=int6464#2
|
|
1651
|
+
# asm 2: pxor <y10=%xmm0,<z10=%xmm1
|
|
1652
|
+
pxor %xmm0,%xmm1
|
|
1653
|
+
|
|
1654
|
+
# qhasm: uint32323232 r10 >>= 14
|
|
1655
|
+
# asm 1: psrld $14,<r10=int6464#13
|
|
1656
|
+
# asm 2: psrld $14,<r10=%xmm12
|
|
1657
|
+
psrld $14,%xmm12
|
|
1658
|
+
|
|
1659
|
+
# qhasm: z10 ^= r10
|
|
1660
|
+
# asm 1: pxor <r10=int6464#13,<z10=int6464#2
|
|
1661
|
+
# asm 2: pxor <r10=%xmm12,<z10=%xmm1
|
|
1662
|
+
pxor %xmm12,%xmm1
|
|
1663
|
+
|
|
1664
|
+
# qhasm: z0 = z0_stack
|
|
1665
|
+
# asm 1: movdqa <z0_stack=stack128#21,>z0=int6464#1
|
|
1666
|
+
# asm 2: movdqa <z0_stack=320(%rsp),>z0=%xmm0
|
|
1667
|
+
movdqa 320(%rsp),%xmm0
|
|
1668
|
+
|
|
1669
|
+
# qhasm: z10_stack = z10
|
|
1670
|
+
# asm 1: movdqa <z10=int6464#2,>z10_stack=stack128#21
|
|
1671
|
+
# asm 2: movdqa <z10=%xmm1,>z10_stack=320(%rsp)
|
|
1672
|
+
movdqa %xmm1,320(%rsp)
|
|
1673
|
+
|
|
1674
|
+
# qhasm: y1 = z3
|
|
1675
|
+
# asm 1: movdqa <z3=int6464#5,>y1=int6464#2
|
|
1676
|
+
# asm 2: movdqa <z3=%xmm4,>y1=%xmm1
|
|
1677
|
+
movdqa %xmm4,%xmm1
|
|
1678
|
+
|
|
1679
|
+
# qhasm: uint32323232 y1 += z0
|
|
1680
|
+
# asm 1: paddd <z0=int6464#1,<y1=int6464#2
|
|
1681
|
+
# asm 2: paddd <z0=%xmm0,<y1=%xmm1
|
|
1682
|
+
paddd %xmm0,%xmm1
|
|
1683
|
+
|
|
1684
|
+
# qhasm: r1 = y1
|
|
1685
|
+
# asm 1: movdqa <y1=int6464#2,>r1=int6464#13
|
|
1686
|
+
# asm 2: movdqa <y1=%xmm1,>r1=%xmm12
|
|
1687
|
+
movdqa %xmm1,%xmm12
|
|
1688
|
+
|
|
1689
|
+
# qhasm: uint32323232 y1 <<= 7
|
|
1690
|
+
# asm 1: pslld $7,<y1=int6464#2
|
|
1691
|
+
# asm 2: pslld $7,<y1=%xmm1
|
|
1692
|
+
pslld $7,%xmm1
|
|
1693
|
+
|
|
1694
|
+
# qhasm: z1 ^= y1
|
|
1695
|
+
# asm 1: pxor <y1=int6464#2,<z1=int6464#8
|
|
1696
|
+
# asm 2: pxor <y1=%xmm1,<z1=%xmm7
|
|
1697
|
+
pxor %xmm1,%xmm7
|
|
1698
|
+
|
|
1699
|
+
# qhasm: uint32323232 r1 >>= 25
|
|
1700
|
+
# asm 1: psrld $25,<r1=int6464#13
|
|
1701
|
+
# asm 2: psrld $25,<r1=%xmm12
|
|
1702
|
+
psrld $25,%xmm12
|
|
1703
|
+
|
|
1704
|
+
# qhasm: z1 ^= r1
|
|
1705
|
+
# asm 1: pxor <r1=int6464#13,<z1=int6464#8
|
|
1706
|
+
# asm 2: pxor <r1=%xmm12,<z1=%xmm7
|
|
1707
|
+
pxor %xmm12,%xmm7
|
|
1708
|
+
|
|
1709
|
+
# qhasm: y15 = z7
|
|
1710
|
+
# asm 1: movdqa <z7=int6464#9,>y15=int6464#2
|
|
1711
|
+
# asm 2: movdqa <z7=%xmm8,>y15=%xmm1
|
|
1712
|
+
movdqa %xmm8,%xmm1
|
|
1713
|
+
|
|
1714
|
+
# qhasm: uint32323232 y15 += z11
|
|
1715
|
+
# asm 1: paddd <z11=int6464#7,<y15=int6464#2
|
|
1716
|
+
# asm 2: paddd <z11=%xmm6,<y15=%xmm1
|
|
1717
|
+
paddd %xmm6,%xmm1
|
|
1718
|
+
|
|
1719
|
+
# qhasm: r15 = y15
|
|
1720
|
+
# asm 1: movdqa <y15=int6464#2,>r15=int6464#13
|
|
1721
|
+
# asm 2: movdqa <y15=%xmm1,>r15=%xmm12
|
|
1722
|
+
movdqa %xmm1,%xmm12
|
|
1723
|
+
|
|
1724
|
+
# qhasm: uint32323232 y15 <<= 18
|
|
1725
|
+
# asm 1: pslld $18,<y15=int6464#2
|
|
1726
|
+
# asm 2: pslld $18,<y15=%xmm1
|
|
1727
|
+
pslld $18,%xmm1
|
|
1728
|
+
|
|
1729
|
+
# qhasm: z15 ^= y15
|
|
1730
|
+
# asm 1: pxor <y15=int6464#2,<z15=int6464#3
|
|
1731
|
+
# asm 2: pxor <y15=%xmm1,<z15=%xmm2
|
|
1732
|
+
pxor %xmm1,%xmm2
|
|
1733
|
+
|
|
1734
|
+
# qhasm: uint32323232 r15 >>= 14
|
|
1735
|
+
# asm 1: psrld $14,<r15=int6464#13
|
|
1736
|
+
# asm 2: psrld $14,<r15=%xmm12
|
|
1737
|
+
psrld $14,%xmm12
|
|
1738
|
+
|
|
1739
|
+
# qhasm: z15 ^= r15
|
|
1740
|
+
# asm 1: pxor <r15=int6464#13,<z15=int6464#3
|
|
1741
|
+
# asm 2: pxor <r15=%xmm12,<z15=%xmm2
|
|
1742
|
+
pxor %xmm12,%xmm2
|
|
1743
|
+
|
|
1744
|
+
# qhasm: z5 = z5_stack
|
|
1745
|
+
# asm 1: movdqa <z5_stack=stack128#22,>z5=int6464#13
|
|
1746
|
+
# asm 2: movdqa <z5_stack=336(%rsp),>z5=%xmm12
|
|
1747
|
+
movdqa 336(%rsp),%xmm12
|
|
1748
|
+
|
|
1749
|
+
# qhasm: z15_stack = z15
|
|
1750
|
+
# asm 1: movdqa <z15=int6464#3,>z15_stack=stack128#22
|
|
1751
|
+
# asm 2: movdqa <z15=%xmm2,>z15_stack=336(%rsp)
|
|
1752
|
+
movdqa %xmm2,336(%rsp)
|
|
1753
|
+
|
|
1754
|
+
# qhasm: y6 = z4
|
|
1755
|
+
# asm 1: movdqa <z4=int6464#15,>y6=int6464#2
|
|
1756
|
+
# asm 2: movdqa <z4=%xmm14,>y6=%xmm1
|
|
1757
|
+
movdqa %xmm14,%xmm1
|
|
1758
|
+
|
|
1759
|
+
# qhasm: uint32323232 y6 += z5
|
|
1760
|
+
# asm 1: paddd <z5=int6464#13,<y6=int6464#2
|
|
1761
|
+
# asm 2: paddd <z5=%xmm12,<y6=%xmm1
|
|
1762
|
+
paddd %xmm12,%xmm1
|
|
1763
|
+
|
|
1764
|
+
# qhasm: r6 = y6
|
|
1765
|
+
# asm 1: movdqa <y6=int6464#2,>r6=int6464#3
|
|
1766
|
+
# asm 2: movdqa <y6=%xmm1,>r6=%xmm2
|
|
1767
|
+
movdqa %xmm1,%xmm2
|
|
1768
|
+
|
|
1769
|
+
# qhasm: uint32323232 y6 <<= 7
|
|
1770
|
+
# asm 1: pslld $7,<y6=int6464#2
|
|
1771
|
+
# asm 2: pslld $7,<y6=%xmm1
|
|
1772
|
+
pslld $7,%xmm1
|
|
1773
|
+
|
|
1774
|
+
# qhasm: z6 ^= y6
|
|
1775
|
+
# asm 1: pxor <y6=int6464#2,<z6=int6464#6
|
|
1776
|
+
# asm 2: pxor <y6=%xmm1,<z6=%xmm5
|
|
1777
|
+
pxor %xmm1,%xmm5
|
|
1778
|
+
|
|
1779
|
+
# qhasm: uint32323232 r6 >>= 25
|
|
1780
|
+
# asm 1: psrld $25,<r6=int6464#3
|
|
1781
|
+
# asm 2: psrld $25,<r6=%xmm2
|
|
1782
|
+
psrld $25,%xmm2
|
|
1783
|
+
|
|
1784
|
+
# qhasm: z6 ^= r6
|
|
1785
|
+
# asm 1: pxor <r6=int6464#3,<z6=int6464#6
|
|
1786
|
+
# asm 2: pxor <r6=%xmm2,<z6=%xmm5
|
|
1787
|
+
pxor %xmm2,%xmm5
|
|
1788
|
+
|
|
1789
|
+
# qhasm: y2 = z0
|
|
1790
|
+
# asm 1: movdqa <z0=int6464#1,>y2=int6464#2
|
|
1791
|
+
# asm 2: movdqa <z0=%xmm0,>y2=%xmm1
|
|
1792
|
+
movdqa %xmm0,%xmm1
|
|
1793
|
+
|
|
1794
|
+
# qhasm: uint32323232 y2 += z1
|
|
1795
|
+
# asm 1: paddd <z1=int6464#8,<y2=int6464#2
|
|
1796
|
+
# asm 2: paddd <z1=%xmm7,<y2=%xmm1
|
|
1797
|
+
paddd %xmm7,%xmm1
|
|
1798
|
+
|
|
1799
|
+
# qhasm: r2 = y2
|
|
1800
|
+
# asm 1: movdqa <y2=int6464#2,>r2=int6464#3
|
|
1801
|
+
# asm 2: movdqa <y2=%xmm1,>r2=%xmm2
|
|
1802
|
+
movdqa %xmm1,%xmm2
|
|
1803
|
+
|
|
1804
|
+
# qhasm: uint32323232 y2 <<= 9
|
|
1805
|
+
# asm 1: pslld $9,<y2=int6464#2
|
|
1806
|
+
# asm 2: pslld $9,<y2=%xmm1
|
|
1807
|
+
pslld $9,%xmm1
|
|
1808
|
+
|
|
1809
|
+
# qhasm: z2 ^= y2
|
|
1810
|
+
# asm 1: pxor <y2=int6464#2,<z2=int6464#11
|
|
1811
|
+
# asm 2: pxor <y2=%xmm1,<z2=%xmm10
|
|
1812
|
+
pxor %xmm1,%xmm10
|
|
1813
|
+
|
|
1814
|
+
# qhasm: uint32323232 r2 >>= 23
|
|
1815
|
+
# asm 1: psrld $23,<r2=int6464#3
|
|
1816
|
+
# asm 2: psrld $23,<r2=%xmm2
|
|
1817
|
+
psrld $23,%xmm2
|
|
1818
|
+
|
|
1819
|
+
# qhasm: z2 ^= r2
|
|
1820
|
+
# asm 1: pxor <r2=int6464#3,<z2=int6464#11
|
|
1821
|
+
# asm 2: pxor <r2=%xmm2,<z2=%xmm10
|
|
1822
|
+
pxor %xmm2,%xmm10
|
|
1823
|
+
|
|
1824
|
+
# qhasm: y7 = z5
|
|
1825
|
+
# asm 1: movdqa <z5=int6464#13,>y7=int6464#2
|
|
1826
|
+
# asm 2: movdqa <z5=%xmm12,>y7=%xmm1
|
|
1827
|
+
movdqa %xmm12,%xmm1
|
|
1828
|
+
|
|
1829
|
+
# qhasm: uint32323232 y7 += z6
|
|
1830
|
+
# asm 1: paddd <z6=int6464#6,<y7=int6464#2
|
|
1831
|
+
# asm 2: paddd <z6=%xmm5,<y7=%xmm1
|
|
1832
|
+
paddd %xmm5,%xmm1
|
|
1833
|
+
|
|
1834
|
+
# qhasm: r7 = y7
|
|
1835
|
+
# asm 1: movdqa <y7=int6464#2,>r7=int6464#3
|
|
1836
|
+
# asm 2: movdqa <y7=%xmm1,>r7=%xmm2
|
|
1837
|
+
movdqa %xmm1,%xmm2
|
|
1838
|
+
|
|
1839
|
+
# qhasm: uint32323232 y7 <<= 9
|
|
1840
|
+
# asm 1: pslld $9,<y7=int6464#2
|
|
1841
|
+
# asm 2: pslld $9,<y7=%xmm1
|
|
1842
|
+
pslld $9,%xmm1
|
|
1843
|
+
|
|
1844
|
+
# qhasm: z7 ^= y7
|
|
1845
|
+
# asm 1: pxor <y7=int6464#2,<z7=int6464#9
|
|
1846
|
+
# asm 2: pxor <y7=%xmm1,<z7=%xmm8
|
|
1847
|
+
pxor %xmm1,%xmm8
|
|
1848
|
+
|
|
1849
|
+
# qhasm: uint32323232 r7 >>= 23
|
|
1850
|
+
# asm 1: psrld $23,<r7=int6464#3
|
|
1851
|
+
# asm 2: psrld $23,<r7=%xmm2
|
|
1852
|
+
psrld $23,%xmm2
|
|
1853
|
+
|
|
1854
|
+
# qhasm: z7 ^= r7
|
|
1855
|
+
# asm 1: pxor <r7=int6464#3,<z7=int6464#9
|
|
1856
|
+
# asm 2: pxor <r7=%xmm2,<z7=%xmm8
|
|
1857
|
+
pxor %xmm2,%xmm8
|
|
1858
|
+
|
|
1859
|
+
# qhasm: y3 = z1
|
|
1860
|
+
# asm 1: movdqa <z1=int6464#8,>y3=int6464#2
|
|
1861
|
+
# asm 2: movdqa <z1=%xmm7,>y3=%xmm1
|
|
1862
|
+
movdqa %xmm7,%xmm1
|
|
1863
|
+
|
|
1864
|
+
# qhasm: uint32323232 y3 += z2
|
|
1865
|
+
# asm 1: paddd <z2=int6464#11,<y3=int6464#2
|
|
1866
|
+
# asm 2: paddd <z2=%xmm10,<y3=%xmm1
|
|
1867
|
+
paddd %xmm10,%xmm1
|
|
1868
|
+
|
|
1869
|
+
# qhasm: r3 = y3
|
|
1870
|
+
# asm 1: movdqa <y3=int6464#2,>r3=int6464#3
|
|
1871
|
+
# asm 2: movdqa <y3=%xmm1,>r3=%xmm2
|
|
1872
|
+
movdqa %xmm1,%xmm2
|
|
1873
|
+
|
|
1874
|
+
# qhasm: uint32323232 y3 <<= 13
|
|
1875
|
+
# asm 1: pslld $13,<y3=int6464#2
|
|
1876
|
+
# asm 2: pslld $13,<y3=%xmm1
|
|
1877
|
+
pslld $13,%xmm1
|
|
1878
|
+
|
|
1879
|
+
# qhasm: z3 ^= y3
|
|
1880
|
+
# asm 1: pxor <y3=int6464#2,<z3=int6464#5
|
|
1881
|
+
# asm 2: pxor <y3=%xmm1,<z3=%xmm4
|
|
1882
|
+
pxor %xmm1,%xmm4
|
|
1883
|
+
|
|
1884
|
+
# qhasm: uint32323232 r3 >>= 19
|
|
1885
|
+
# asm 1: psrld $19,<r3=int6464#3
|
|
1886
|
+
# asm 2: psrld $19,<r3=%xmm2
|
|
1887
|
+
psrld $19,%xmm2
|
|
1888
|
+
|
|
1889
|
+
# qhasm: z3 ^= r3
|
|
1890
|
+
# asm 1: pxor <r3=int6464#3,<z3=int6464#5
|
|
1891
|
+
# asm 2: pxor <r3=%xmm2,<z3=%xmm4
|
|
1892
|
+
pxor %xmm2,%xmm4
|
|
1893
|
+
|
|
1894
|
+
# qhasm: y4 = z6
|
|
1895
|
+
# asm 1: movdqa <z6=int6464#6,>y4=int6464#2
|
|
1896
|
+
# asm 2: movdqa <z6=%xmm5,>y4=%xmm1
|
|
1897
|
+
movdqa %xmm5,%xmm1
|
|
1898
|
+
|
|
1899
|
+
# qhasm: uint32323232 y4 += z7
|
|
1900
|
+
# asm 1: paddd <z7=int6464#9,<y4=int6464#2
|
|
1901
|
+
# asm 2: paddd <z7=%xmm8,<y4=%xmm1
|
|
1902
|
+
paddd %xmm8,%xmm1
|
|
1903
|
+
|
|
1904
|
+
# qhasm: r4 = y4
|
|
1905
|
+
# asm 1: movdqa <y4=int6464#2,>r4=int6464#3
|
|
1906
|
+
# asm 2: movdqa <y4=%xmm1,>r4=%xmm2
|
|
1907
|
+
movdqa %xmm1,%xmm2
|
|
1908
|
+
|
|
1909
|
+
# qhasm: uint32323232 y4 <<= 13
|
|
1910
|
+
# asm 1: pslld $13,<y4=int6464#2
|
|
1911
|
+
# asm 2: pslld $13,<y4=%xmm1
|
|
1912
|
+
pslld $13,%xmm1
|
|
1913
|
+
|
|
1914
|
+
# qhasm: z4 ^= y4
|
|
1915
|
+
# asm 1: pxor <y4=int6464#2,<z4=int6464#15
|
|
1916
|
+
# asm 2: pxor <y4=%xmm1,<z4=%xmm14
|
|
1917
|
+
pxor %xmm1,%xmm14
|
|
1918
|
+
|
|
1919
|
+
# qhasm: uint32323232 r4 >>= 19
|
|
1920
|
+
# asm 1: psrld $19,<r4=int6464#3
|
|
1921
|
+
# asm 2: psrld $19,<r4=%xmm2
|
|
1922
|
+
psrld $19,%xmm2
|
|
1923
|
+
|
|
1924
|
+
# qhasm: z4 ^= r4
|
|
1925
|
+
# asm 1: pxor <r4=int6464#3,<z4=int6464#15
|
|
1926
|
+
# asm 2: pxor <r4=%xmm2,<z4=%xmm14
|
|
1927
|
+
pxor %xmm2,%xmm14
|
|
1928
|
+
|
|
1929
|
+
# qhasm: y0 = z2
|
|
1930
|
+
# asm 1: movdqa <z2=int6464#11,>y0=int6464#2
|
|
1931
|
+
# asm 2: movdqa <z2=%xmm10,>y0=%xmm1
|
|
1932
|
+
movdqa %xmm10,%xmm1
|
|
1933
|
+
|
|
1934
|
+
# qhasm: uint32323232 y0 += z3
|
|
1935
|
+
# asm 1: paddd <z3=int6464#5,<y0=int6464#2
|
|
1936
|
+
# asm 2: paddd <z3=%xmm4,<y0=%xmm1
|
|
1937
|
+
paddd %xmm4,%xmm1
|
|
1938
|
+
|
|
1939
|
+
# qhasm: r0 = y0
|
|
1940
|
+
# asm 1: movdqa <y0=int6464#2,>r0=int6464#3
|
|
1941
|
+
# asm 2: movdqa <y0=%xmm1,>r0=%xmm2
|
|
1942
|
+
movdqa %xmm1,%xmm2
|
|
1943
|
+
|
|
1944
|
+
# qhasm: uint32323232 y0 <<= 18
|
|
1945
|
+
# asm 1: pslld $18,<y0=int6464#2
|
|
1946
|
+
# asm 2: pslld $18,<y0=%xmm1
|
|
1947
|
+
pslld $18,%xmm1
|
|
1948
|
+
|
|
1949
|
+
# qhasm: z0 ^= y0
|
|
1950
|
+
# asm 1: pxor <y0=int6464#2,<z0=int6464#1
|
|
1951
|
+
# asm 2: pxor <y0=%xmm1,<z0=%xmm0
|
|
1952
|
+
pxor %xmm1,%xmm0
|
|
1953
|
+
|
|
1954
|
+
# qhasm: uint32323232 r0 >>= 14
|
|
1955
|
+
# asm 1: psrld $14,<r0=int6464#3
|
|
1956
|
+
# asm 2: psrld $14,<r0=%xmm2
|
|
1957
|
+
psrld $14,%xmm2
|
|
1958
|
+
|
|
1959
|
+
# qhasm: z0 ^= r0
|
|
1960
|
+
# asm 1: pxor <r0=int6464#3,<z0=int6464#1
|
|
1961
|
+
# asm 2: pxor <r0=%xmm2,<z0=%xmm0
|
|
1962
|
+
pxor %xmm2,%xmm0
|
|
1963
|
+
|
|
1964
|
+
# qhasm: z10 = z10_stack
|
|
1965
|
+
# asm 1: movdqa <z10_stack=stack128#21,>z10=int6464#2
|
|
1966
|
+
# asm 2: movdqa <z10_stack=320(%rsp),>z10=%xmm1
|
|
1967
|
+
movdqa 320(%rsp),%xmm1
|
|
1968
|
+
|
|
1969
|
+
# qhasm: z0_stack = z0
|
|
1970
|
+
# asm 1: movdqa <z0=int6464#1,>z0_stack=stack128#21
|
|
1971
|
+
# asm 2: movdqa <z0=%xmm0,>z0_stack=320(%rsp)
|
|
1972
|
+
movdqa %xmm0,320(%rsp)
|
|
1973
|
+
|
|
1974
|
+
# qhasm: y5 = z7
|
|
1975
|
+
# asm 1: movdqa <z7=int6464#9,>y5=int6464#1
|
|
1976
|
+
# asm 2: movdqa <z7=%xmm8,>y5=%xmm0
|
|
1977
|
+
movdqa %xmm8,%xmm0
|
|
1978
|
+
|
|
1979
|
+
# qhasm: uint32323232 y5 += z4
|
|
1980
|
+
# asm 1: paddd <z4=int6464#15,<y5=int6464#1
|
|
1981
|
+
# asm 2: paddd <z4=%xmm14,<y5=%xmm0
|
|
1982
|
+
paddd %xmm14,%xmm0
|
|
1983
|
+
|
|
1984
|
+
# qhasm: r5 = y5
|
|
1985
|
+
# asm 1: movdqa <y5=int6464#1,>r5=int6464#3
|
|
1986
|
+
# asm 2: movdqa <y5=%xmm0,>r5=%xmm2
|
|
1987
|
+
movdqa %xmm0,%xmm2
|
|
1988
|
+
|
|
1989
|
+
# qhasm: uint32323232 y5 <<= 18
|
|
1990
|
+
# asm 1: pslld $18,<y5=int6464#1
|
|
1991
|
+
# asm 2: pslld $18,<y5=%xmm0
|
|
1992
|
+
pslld $18,%xmm0
|
|
1993
|
+
|
|
1994
|
+
# qhasm: z5 ^= y5
|
|
1995
|
+
# asm 1: pxor <y5=int6464#1,<z5=int6464#13
|
|
1996
|
+
# asm 2: pxor <y5=%xmm0,<z5=%xmm12
|
|
1997
|
+
pxor %xmm0,%xmm12
|
|
1998
|
+
|
|
1999
|
+
# qhasm: uint32323232 r5 >>= 14
|
|
2000
|
+
# asm 1: psrld $14,<r5=int6464#3
|
|
2001
|
+
# asm 2: psrld $14,<r5=%xmm2
|
|
2002
|
+
psrld $14,%xmm2
|
|
2003
|
+
|
|
2004
|
+
# qhasm: z5 ^= r5
|
|
2005
|
+
# asm 1: pxor <r5=int6464#3,<z5=int6464#13
|
|
2006
|
+
# asm 2: pxor <r5=%xmm2,<z5=%xmm12
|
|
2007
|
+
pxor %xmm2,%xmm12
|
|
2008
|
+
|
|
2009
|
+
# qhasm: y11 = z9
|
|
2010
|
+
# asm 1: movdqa <z9=int6464#12,>y11=int6464#1
|
|
2011
|
+
# asm 2: movdqa <z9=%xmm11,>y11=%xmm0
|
|
2012
|
+
movdqa %xmm11,%xmm0
|
|
2013
|
+
|
|
2014
|
+
# qhasm: uint32323232 y11 += z10
|
|
2015
|
+
# asm 1: paddd <z10=int6464#2,<y11=int6464#1
|
|
2016
|
+
# asm 2: paddd <z10=%xmm1,<y11=%xmm0
|
|
2017
|
+
paddd %xmm1,%xmm0
|
|
2018
|
+
|
|
2019
|
+
# qhasm: r11 = y11
|
|
2020
|
+
# asm 1: movdqa <y11=int6464#1,>r11=int6464#3
|
|
2021
|
+
# asm 2: movdqa <y11=%xmm0,>r11=%xmm2
|
|
2022
|
+
movdqa %xmm0,%xmm2
|
|
2023
|
+
|
|
2024
|
+
# qhasm: uint32323232 y11 <<= 7
|
|
2025
|
+
# asm 1: pslld $7,<y11=int6464#1
|
|
2026
|
+
# asm 2: pslld $7,<y11=%xmm0
|
|
2027
|
+
pslld $7,%xmm0
|
|
2028
|
+
|
|
2029
|
+
# qhasm: z11 ^= y11
|
|
2030
|
+
# asm 1: pxor <y11=int6464#1,<z11=int6464#7
|
|
2031
|
+
# asm 2: pxor <y11=%xmm0,<z11=%xmm6
|
|
2032
|
+
pxor %xmm0,%xmm6
|
|
2033
|
+
|
|
2034
|
+
# qhasm: uint32323232 r11 >>= 25
|
|
2035
|
+
# asm 1: psrld $25,<r11=int6464#3
|
|
2036
|
+
# asm 2: psrld $25,<r11=%xmm2
|
|
2037
|
+
psrld $25,%xmm2
|
|
2038
|
+
|
|
2039
|
+
# qhasm: z11 ^= r11
|
|
2040
|
+
# asm 1: pxor <r11=int6464#3,<z11=int6464#7
|
|
2041
|
+
# asm 2: pxor <r11=%xmm2,<z11=%xmm6
|
|
2042
|
+
pxor %xmm2,%xmm6
|
|
2043
|
+
|
|
2044
|
+
# qhasm: z15 = z15_stack
|
|
2045
|
+
# asm 1: movdqa <z15_stack=stack128#22,>z15=int6464#3
|
|
2046
|
+
# asm 2: movdqa <z15_stack=336(%rsp),>z15=%xmm2
|
|
2047
|
+
movdqa 336(%rsp),%xmm2
|
|
2048
|
+
|
|
2049
|
+
# qhasm: z5_stack = z5
|
|
2050
|
+
# asm 1: movdqa <z5=int6464#13,>z5_stack=stack128#22
|
|
2051
|
+
# asm 2: movdqa <z5=%xmm12,>z5_stack=336(%rsp)
|
|
2052
|
+
movdqa %xmm12,336(%rsp)
|
|
2053
|
+
|
|
2054
|
+
# qhasm: y12 = z14
|
|
2055
|
+
# asm 1: movdqa <z14=int6464#4,>y12=int6464#1
|
|
2056
|
+
# asm 2: movdqa <z14=%xmm3,>y12=%xmm0
|
|
2057
|
+
movdqa %xmm3,%xmm0
|
|
2058
|
+
|
|
2059
|
+
# qhasm: uint32323232 y12 += z15
|
|
2060
|
+
# asm 1: paddd <z15=int6464#3,<y12=int6464#1
|
|
2061
|
+
# asm 2: paddd <z15=%xmm2,<y12=%xmm0
|
|
2062
|
+
paddd %xmm2,%xmm0
|
|
2063
|
+
|
|
2064
|
+
# qhasm: r12 = y12
|
|
2065
|
+
# asm 1: movdqa <y12=int6464#1,>r12=int6464#13
|
|
2066
|
+
# asm 2: movdqa <y12=%xmm0,>r12=%xmm12
|
|
2067
|
+
movdqa %xmm0,%xmm12
|
|
2068
|
+
|
|
2069
|
+
# qhasm: uint32323232 y12 <<= 7
|
|
2070
|
+
# asm 1: pslld $7,<y12=int6464#1
|
|
2071
|
+
# asm 2: pslld $7,<y12=%xmm0
|
|
2072
|
+
pslld $7,%xmm0
|
|
2073
|
+
|
|
2074
|
+
# qhasm: z12 ^= y12
|
|
2075
|
+
# asm 1: pxor <y12=int6464#1,<z12=int6464#14
|
|
2076
|
+
# asm 2: pxor <y12=%xmm0,<z12=%xmm13
|
|
2077
|
+
pxor %xmm0,%xmm13
|
|
2078
|
+
|
|
2079
|
+
# qhasm: uint32323232 r12 >>= 25
|
|
2080
|
+
# asm 1: psrld $25,<r12=int6464#13
|
|
2081
|
+
# asm 2: psrld $25,<r12=%xmm12
|
|
2082
|
+
psrld $25,%xmm12
|
|
2083
|
+
|
|
2084
|
+
# qhasm: z12 ^= r12
|
|
2085
|
+
# asm 1: pxor <r12=int6464#13,<z12=int6464#14
|
|
2086
|
+
# asm 2: pxor <r12=%xmm12,<z12=%xmm13
|
|
2087
|
+
pxor %xmm12,%xmm13
|
|
2088
|
+
|
|
2089
|
+
# qhasm: y8 = z10
|
|
2090
|
+
# asm 1: movdqa <z10=int6464#2,>y8=int6464#1
|
|
2091
|
+
# asm 2: movdqa <z10=%xmm1,>y8=%xmm0
|
|
2092
|
+
movdqa %xmm1,%xmm0
|
|
2093
|
+
|
|
2094
|
+
# qhasm: uint32323232 y8 += z11
|
|
2095
|
+
# asm 1: paddd <z11=int6464#7,<y8=int6464#1
|
|
2096
|
+
# asm 2: paddd <z11=%xmm6,<y8=%xmm0
|
|
2097
|
+
paddd %xmm6,%xmm0
|
|
2098
|
+
|
|
2099
|
+
# qhasm: r8 = y8
|
|
2100
|
+
# asm 1: movdqa <y8=int6464#1,>r8=int6464#13
|
|
2101
|
+
# asm 2: movdqa <y8=%xmm0,>r8=%xmm12
|
|
2102
|
+
movdqa %xmm0,%xmm12
|
|
2103
|
+
|
|
2104
|
+
# qhasm: uint32323232 y8 <<= 9
|
|
2105
|
+
# asm 1: pslld $9,<y8=int6464#1
|
|
2106
|
+
# asm 2: pslld $9,<y8=%xmm0
|
|
2107
|
+
pslld $9,%xmm0
|
|
2108
|
+
|
|
2109
|
+
# qhasm: z8 ^= y8
|
|
2110
|
+
# asm 1: pxor <y8=int6464#1,<z8=int6464#16
|
|
2111
|
+
# asm 2: pxor <y8=%xmm0,<z8=%xmm15
|
|
2112
|
+
pxor %xmm0,%xmm15
|
|
2113
|
+
|
|
2114
|
+
# qhasm: uint32323232 r8 >>= 23
|
|
2115
|
+
# asm 1: psrld $23,<r8=int6464#13
|
|
2116
|
+
# asm 2: psrld $23,<r8=%xmm12
|
|
2117
|
+
psrld $23,%xmm12
|
|
2118
|
+
|
|
2119
|
+
# qhasm: z8 ^= r8
|
|
2120
|
+
# asm 1: pxor <r8=int6464#13,<z8=int6464#16
|
|
2121
|
+
# asm 2: pxor <r8=%xmm12,<z8=%xmm15
|
|
2122
|
+
pxor %xmm12,%xmm15
|
|
2123
|
+
|
|
2124
|
+
# qhasm: y13 = z15
|
|
2125
|
+
# asm 1: movdqa <z15=int6464#3,>y13=int6464#1
|
|
2126
|
+
# asm 2: movdqa <z15=%xmm2,>y13=%xmm0
|
|
2127
|
+
movdqa %xmm2,%xmm0
|
|
2128
|
+
|
|
2129
|
+
# qhasm: uint32323232 y13 += z12
|
|
2130
|
+
# asm 1: paddd <z12=int6464#14,<y13=int6464#1
|
|
2131
|
+
# asm 2: paddd <z12=%xmm13,<y13=%xmm0
|
|
2132
|
+
paddd %xmm13,%xmm0
|
|
2133
|
+
|
|
2134
|
+
# qhasm: r13 = y13
|
|
2135
|
+
# asm 1: movdqa <y13=int6464#1,>r13=int6464#13
|
|
2136
|
+
# asm 2: movdqa <y13=%xmm0,>r13=%xmm12
|
|
2137
|
+
movdqa %xmm0,%xmm12
|
|
2138
|
+
|
|
2139
|
+
# qhasm: uint32323232 y13 <<= 9
|
|
2140
|
+
# asm 1: pslld $9,<y13=int6464#1
|
|
2141
|
+
# asm 2: pslld $9,<y13=%xmm0
|
|
2142
|
+
pslld $9,%xmm0
|
|
2143
|
+
|
|
2144
|
+
# qhasm: z13 ^= y13
|
|
2145
|
+
# asm 1: pxor <y13=int6464#1,<z13=int6464#10
|
|
2146
|
+
# asm 2: pxor <y13=%xmm0,<z13=%xmm9
|
|
2147
|
+
pxor %xmm0,%xmm9
|
|
2148
|
+
|
|
2149
|
+
# qhasm: uint32323232 r13 >>= 23
|
|
2150
|
+
# asm 1: psrld $23,<r13=int6464#13
|
|
2151
|
+
# asm 2: psrld $23,<r13=%xmm12
|
|
2152
|
+
psrld $23,%xmm12
|
|
2153
|
+
|
|
2154
|
+
# qhasm: z13 ^= r13
|
|
2155
|
+
# asm 1: pxor <r13=int6464#13,<z13=int6464#10
|
|
2156
|
+
# asm 2: pxor <r13=%xmm12,<z13=%xmm9
|
|
2157
|
+
pxor %xmm12,%xmm9
|
|
2158
|
+
|
|
2159
|
+
# qhasm: y9 = z11
|
|
2160
|
+
# asm 1: movdqa <z11=int6464#7,>y9=int6464#1
|
|
2161
|
+
# asm 2: movdqa <z11=%xmm6,>y9=%xmm0
|
|
2162
|
+
movdqa %xmm6,%xmm0
|
|
2163
|
+
|
|
2164
|
+
# qhasm: uint32323232 y9 += z8
|
|
2165
|
+
# asm 1: paddd <z8=int6464#16,<y9=int6464#1
|
|
2166
|
+
# asm 2: paddd <z8=%xmm15,<y9=%xmm0
|
|
2167
|
+
paddd %xmm15,%xmm0
|
|
2168
|
+
|
|
2169
|
+
# qhasm: r9 = y9
|
|
2170
|
+
# asm 1: movdqa <y9=int6464#1,>r9=int6464#13
|
|
2171
|
+
# asm 2: movdqa <y9=%xmm0,>r9=%xmm12
|
|
2172
|
+
movdqa %xmm0,%xmm12
|
|
2173
|
+
|
|
2174
|
+
# qhasm: uint32323232 y9 <<= 13
|
|
2175
|
+
# asm 1: pslld $13,<y9=int6464#1
|
|
2176
|
+
# asm 2: pslld $13,<y9=%xmm0
|
|
2177
|
+
pslld $13,%xmm0
|
|
2178
|
+
|
|
2179
|
+
# qhasm: z9 ^= y9
|
|
2180
|
+
# asm 1: pxor <y9=int6464#1,<z9=int6464#12
|
|
2181
|
+
# asm 2: pxor <y9=%xmm0,<z9=%xmm11
|
|
2182
|
+
pxor %xmm0,%xmm11
|
|
2183
|
+
|
|
2184
|
+
# qhasm: uint32323232 r9 >>= 19
|
|
2185
|
+
# asm 1: psrld $19,<r9=int6464#13
|
|
2186
|
+
# asm 2: psrld $19,<r9=%xmm12
|
|
2187
|
+
psrld $19,%xmm12
|
|
2188
|
+
|
|
2189
|
+
# qhasm: z9 ^= r9
|
|
2190
|
+
# asm 1: pxor <r9=int6464#13,<z9=int6464#12
|
|
2191
|
+
# asm 2: pxor <r9=%xmm12,<z9=%xmm11
|
|
2192
|
+
pxor %xmm12,%xmm11
|
|
2193
|
+
|
|
2194
|
+
# qhasm: y14 = z12
|
|
2195
|
+
# asm 1: movdqa <z12=int6464#14,>y14=int6464#1
|
|
2196
|
+
# asm 2: movdqa <z12=%xmm13,>y14=%xmm0
|
|
2197
|
+
movdqa %xmm13,%xmm0
|
|
2198
|
+
|
|
2199
|
+
# qhasm: uint32323232 y14 += z13
|
|
2200
|
+
# asm 1: paddd <z13=int6464#10,<y14=int6464#1
|
|
2201
|
+
# asm 2: paddd <z13=%xmm9,<y14=%xmm0
|
|
2202
|
+
paddd %xmm9,%xmm0
|
|
2203
|
+
|
|
2204
|
+
# qhasm: r14 = y14
|
|
2205
|
+
# asm 1: movdqa <y14=int6464#1,>r14=int6464#13
|
|
2206
|
+
# asm 2: movdqa <y14=%xmm0,>r14=%xmm12
|
|
2207
|
+
movdqa %xmm0,%xmm12
|
|
2208
|
+
|
|
2209
|
+
# qhasm: uint32323232 y14 <<= 13
|
|
2210
|
+
# asm 1: pslld $13,<y14=int6464#1
|
|
2211
|
+
# asm 2: pslld $13,<y14=%xmm0
|
|
2212
|
+
pslld $13,%xmm0
|
|
2213
|
+
|
|
2214
|
+
# qhasm: z14 ^= y14
|
|
2215
|
+
# asm 1: pxor <y14=int6464#1,<z14=int6464#4
|
|
2216
|
+
# asm 2: pxor <y14=%xmm0,<z14=%xmm3
|
|
2217
|
+
pxor %xmm0,%xmm3
|
|
2218
|
+
|
|
2219
|
+
# qhasm: uint32323232 r14 >>= 19
|
|
2220
|
+
# asm 1: psrld $19,<r14=int6464#13
|
|
2221
|
+
# asm 2: psrld $19,<r14=%xmm12
|
|
2222
|
+
psrld $19,%xmm12
|
|
2223
|
+
|
|
2224
|
+
# qhasm: z14 ^= r14
|
|
2225
|
+
# asm 1: pxor <r14=int6464#13,<z14=int6464#4
|
|
2226
|
+
# asm 2: pxor <r14=%xmm12,<z14=%xmm3
|
|
2227
|
+
pxor %xmm12,%xmm3
|
|
2228
|
+
|
|
2229
|
+
# qhasm: y10 = z8
|
|
2230
|
+
# asm 1: movdqa <z8=int6464#16,>y10=int6464#1
|
|
2231
|
+
# asm 2: movdqa <z8=%xmm15,>y10=%xmm0
|
|
2232
|
+
movdqa %xmm15,%xmm0
|
|
2233
|
+
|
|
2234
|
+
# qhasm: uint32323232 y10 += z9
|
|
2235
|
+
# asm 1: paddd <z9=int6464#12,<y10=int6464#1
|
|
2236
|
+
# asm 2: paddd <z9=%xmm11,<y10=%xmm0
|
|
2237
|
+
paddd %xmm11,%xmm0
|
|
2238
|
+
|
|
2239
|
+
# qhasm: r10 = y10
|
|
2240
|
+
# asm 1: movdqa <y10=int6464#1,>r10=int6464#13
|
|
2241
|
+
# asm 2: movdqa <y10=%xmm0,>r10=%xmm12
|
|
2242
|
+
movdqa %xmm0,%xmm12
|
|
2243
|
+
|
|
2244
|
+
# qhasm: uint32323232 y10 <<= 18
|
|
2245
|
+
# asm 1: pslld $18,<y10=int6464#1
|
|
2246
|
+
# asm 2: pslld $18,<y10=%xmm0
|
|
2247
|
+
pslld $18,%xmm0
|
|
2248
|
+
|
|
2249
|
+
# qhasm: z10 ^= y10
|
|
2250
|
+
# asm 1: pxor <y10=int6464#1,<z10=int6464#2
|
|
2251
|
+
# asm 2: pxor <y10=%xmm0,<z10=%xmm1
|
|
2252
|
+
pxor %xmm0,%xmm1
|
|
2253
|
+
|
|
2254
|
+
# qhasm: uint32323232 r10 >>= 14
|
|
2255
|
+
# asm 1: psrld $14,<r10=int6464#13
|
|
2256
|
+
# asm 2: psrld $14,<r10=%xmm12
|
|
2257
|
+
psrld $14,%xmm12
|
|
2258
|
+
|
|
2259
|
+
# qhasm: z10 ^= r10
|
|
2260
|
+
# asm 1: pxor <r10=int6464#13,<z10=int6464#2
|
|
2261
|
+
# asm 2: pxor <r10=%xmm12,<z10=%xmm1
|
|
2262
|
+
pxor %xmm12,%xmm1
|
|
2263
|
+
|
|
2264
|
+
# qhasm: y15 = z13
|
|
2265
|
+
# asm 1: movdqa <z13=int6464#10,>y15=int6464#1
|
|
2266
|
+
# asm 2: movdqa <z13=%xmm9,>y15=%xmm0
|
|
2267
|
+
movdqa %xmm9,%xmm0
|
|
2268
|
+
|
|
2269
|
+
# qhasm: uint32323232 y15 += z14
|
|
2270
|
+
# asm 1: paddd <z14=int6464#4,<y15=int6464#1
|
|
2271
|
+
# asm 2: paddd <z14=%xmm3,<y15=%xmm0
|
|
2272
|
+
paddd %xmm3,%xmm0
|
|
2273
|
+
|
|
2274
|
+
# qhasm: r15 = y15
|
|
2275
|
+
# asm 1: movdqa <y15=int6464#1,>r15=int6464#13
|
|
2276
|
+
# asm 2: movdqa <y15=%xmm0,>r15=%xmm12
|
|
2277
|
+
movdqa %xmm0,%xmm12
|
|
2278
|
+
|
|
2279
|
+
# qhasm: uint32323232 y15 <<= 18
|
|
2280
|
+
# asm 1: pslld $18,<y15=int6464#1
|
|
2281
|
+
# asm 2: pslld $18,<y15=%xmm0
|
|
2282
|
+
pslld $18,%xmm0
|
|
2283
|
+
|
|
2284
|
+
# qhasm: z15 ^= y15
|
|
2285
|
+
# asm 1: pxor <y15=int6464#1,<z15=int6464#3
|
|
2286
|
+
# asm 2: pxor <y15=%xmm0,<z15=%xmm2
|
|
2287
|
+
pxor %xmm0,%xmm2
|
|
2288
|
+
|
|
2289
|
+
# qhasm: uint32323232 r15 >>= 14
|
|
2290
|
+
# asm 1: psrld $14,<r15=int6464#13
|
|
2291
|
+
# asm 2: psrld $14,<r15=%xmm12
|
|
2292
|
+
psrld $14,%xmm12
|
|
2293
|
+
|
|
2294
|
+
# qhasm: z15 ^= r15
|
|
2295
|
+
# asm 1: pxor <r15=int6464#13,<z15=int6464#3
|
|
2296
|
+
# asm 2: pxor <r15=%xmm12,<z15=%xmm2
|
|
2297
|
+
pxor %xmm12,%xmm2
|
|
2298
|
+
|
|
2299
|
+
# qhasm: z0 = z0_stack
|
|
2300
|
+
# asm 1: movdqa <z0_stack=stack128#21,>z0=int6464#13
|
|
2301
|
+
# asm 2: movdqa <z0_stack=320(%rsp),>z0=%xmm12
|
|
2302
|
+
movdqa 320(%rsp),%xmm12
|
|
2303
|
+
|
|
2304
|
+
# qhasm: z5 = z5_stack
|
|
2305
|
+
# asm 1: movdqa <z5_stack=stack128#22,>z5=int6464#1
|
|
2306
|
+
# asm 2: movdqa <z5_stack=336(%rsp),>z5=%xmm0
|
|
2307
|
+
movdqa 336(%rsp),%xmm0
|
|
2308
|
+
|
|
2309
|
+
# qhasm: unsigned>? i -= 2
|
|
2310
|
+
# asm 1: sub $2,<i=int64#3
|
|
2311
|
+
# asm 2: sub $2,<i=%rdx
|
|
2312
|
+
sub $2,%rdx
|
|
2313
|
+
# comment:fp stack unchanged by jump
|
|
2314
|
+
|
|
2315
|
+
# qhasm: goto mainloop1 if unsigned>
|
|
2316
|
+
ja ._mainloop1
|
|
2317
|
+
|
|
2318
|
+
# qhasm: uint32323232 z0 += orig0
|
|
2319
|
+
# asm 1: paddd <orig0=stack128#8,<z0=int6464#13
|
|
2320
|
+
# asm 2: paddd <orig0=112(%rsp),<z0=%xmm12
|
|
2321
|
+
paddd 112(%rsp),%xmm12
|
|
2322
|
+
|
|
2323
|
+
# qhasm: uint32323232 z1 += orig1
|
|
2324
|
+
# asm 1: paddd <orig1=stack128#12,<z1=int6464#8
|
|
2325
|
+
# asm 2: paddd <orig1=176(%rsp),<z1=%xmm7
|
|
2326
|
+
paddd 176(%rsp),%xmm7
|
|
2327
|
+
|
|
2328
|
+
# qhasm: uint32323232 z2 += orig2
|
|
2329
|
+
# asm 1: paddd <orig2=stack128#15,<z2=int6464#11
|
|
2330
|
+
# asm 2: paddd <orig2=224(%rsp),<z2=%xmm10
|
|
2331
|
+
paddd 224(%rsp),%xmm10
|
|
2332
|
+
|
|
2333
|
+
# qhasm: uint32323232 z3 += orig3
|
|
2334
|
+
# asm 1: paddd <orig3=stack128#18,<z3=int6464#5
|
|
2335
|
+
# asm 2: paddd <orig3=272(%rsp),<z3=%xmm4
|
|
2336
|
+
paddd 272(%rsp),%xmm4
|
|
2337
|
+
|
|
2338
|
+
# qhasm: in0 = z0
|
|
2339
|
+
# asm 1: movd <z0=int6464#13,>in0=int64#3
|
|
2340
|
+
# asm 2: movd <z0=%xmm12,>in0=%rdx
|
|
2341
|
+
movd %xmm12,%rdx
|
|
2342
|
+
|
|
2343
|
+
# qhasm: in1 = z1
|
|
2344
|
+
# asm 1: movd <z1=int6464#8,>in1=int64#4
|
|
2345
|
+
# asm 2: movd <z1=%xmm7,>in1=%rcx
|
|
2346
|
+
movd %xmm7,%rcx
|
|
2347
|
+
|
|
2348
|
+
# qhasm: in2 = z2
|
|
2349
|
+
# asm 1: movd <z2=int6464#11,>in2=int64#5
|
|
2350
|
+
# asm 2: movd <z2=%xmm10,>in2=%r8
|
|
2351
|
+
movd %xmm10,%r8
|
|
2352
|
+
|
|
2353
|
+
# qhasm: in3 = z3
|
|
2354
|
+
# asm 1: movd <z3=int6464#5,>in3=int64#6
|
|
2355
|
+
# asm 2: movd <z3=%xmm4,>in3=%r9
|
|
2356
|
+
movd %xmm4,%r9
|
|
2357
|
+
|
|
2358
|
+
# qhasm: z0 <<<= 96
|
|
2359
|
+
# asm 1: pshufd $0x39,<z0=int6464#13,<z0=int6464#13
|
|
2360
|
+
# asm 2: pshufd $0x39,<z0=%xmm12,<z0=%xmm12
|
|
2361
|
+
pshufd $0x39,%xmm12,%xmm12
|
|
2362
|
+
|
|
2363
|
+
# qhasm: z1 <<<= 96
|
|
2364
|
+
# asm 1: pshufd $0x39,<z1=int6464#8,<z1=int6464#8
|
|
2365
|
+
# asm 2: pshufd $0x39,<z1=%xmm7,<z1=%xmm7
|
|
2366
|
+
pshufd $0x39,%xmm7,%xmm7
|
|
2367
|
+
|
|
2368
|
+
# qhasm: z2 <<<= 96
|
|
2369
|
+
# asm 1: pshufd $0x39,<z2=int6464#11,<z2=int6464#11
|
|
2370
|
+
# asm 2: pshufd $0x39,<z2=%xmm10,<z2=%xmm10
|
|
2371
|
+
pshufd $0x39,%xmm10,%xmm10
|
|
2372
|
+
|
|
2373
|
+
# qhasm: z3 <<<= 96
|
|
2374
|
+
# asm 1: pshufd $0x39,<z3=int6464#5,<z3=int6464#5
|
|
2375
|
+
# asm 2: pshufd $0x39,<z3=%xmm4,<z3=%xmm4
|
|
2376
|
+
pshufd $0x39,%xmm4,%xmm4
|
|
2377
|
+
|
|
2378
|
+
# qhasm: (uint32) in0 ^= *(uint32 *) (m + 0)
|
|
2379
|
+
# asm 1: xorl 0(<m=int64#2),<in0=int64#3d
|
|
2380
|
+
# asm 2: xorl 0(<m=%rsi),<in0=%edx
|
|
2381
|
+
xorl 0(%rsi),%edx
|
|
2382
|
+
|
|
2383
|
+
# qhasm: (uint32) in1 ^= *(uint32 *) (m + 4)
|
|
2384
|
+
# asm 1: xorl 4(<m=int64#2),<in1=int64#4d
|
|
2385
|
+
# asm 2: xorl 4(<m=%rsi),<in1=%ecx
|
|
2386
|
+
xorl 4(%rsi),%ecx
|
|
2387
|
+
|
|
2388
|
+
# qhasm: (uint32) in2 ^= *(uint32 *) (m + 8)
|
|
2389
|
+
# asm 1: xorl 8(<m=int64#2),<in2=int64#5d
|
|
2390
|
+
# asm 2: xorl 8(<m=%rsi),<in2=%r8d
|
|
2391
|
+
xorl 8(%rsi),%r8d
|
|
2392
|
+
|
|
2393
|
+
# qhasm: (uint32) in3 ^= *(uint32 *) (m + 12)
|
|
2394
|
+
# asm 1: xorl 12(<m=int64#2),<in3=int64#6d
|
|
2395
|
+
# asm 2: xorl 12(<m=%rsi),<in3=%r9d
|
|
2396
|
+
xorl 12(%rsi),%r9d
|
|
2397
|
+
|
|
2398
|
+
# qhasm: *(uint32 *) (out + 0) = in0
|
|
2399
|
+
# asm 1: movl <in0=int64#3d,0(<out=int64#1)
|
|
2400
|
+
# asm 2: movl <in0=%edx,0(<out=%rdi)
|
|
2401
|
+
movl %edx,0(%rdi)
|
|
2402
|
+
|
|
2403
|
+
# qhasm: *(uint32 *) (out + 4) = in1
|
|
2404
|
+
# asm 1: movl <in1=int64#4d,4(<out=int64#1)
|
|
2405
|
+
# asm 2: movl <in1=%ecx,4(<out=%rdi)
|
|
2406
|
+
movl %ecx,4(%rdi)
|
|
2407
|
+
|
|
2408
|
+
# qhasm: *(uint32 *) (out + 8) = in2
|
|
2409
|
+
# asm 1: movl <in2=int64#5d,8(<out=int64#1)
|
|
2410
|
+
# asm 2: movl <in2=%r8d,8(<out=%rdi)
|
|
2411
|
+
movl %r8d,8(%rdi)
|
|
2412
|
+
|
|
2413
|
+
# qhasm: *(uint32 *) (out + 12) = in3
|
|
2414
|
+
# asm 1: movl <in3=int64#6d,12(<out=int64#1)
|
|
2415
|
+
# asm 2: movl <in3=%r9d,12(<out=%rdi)
|
|
2416
|
+
movl %r9d,12(%rdi)
|
|
2417
|
+
|
|
2418
|
+
# qhasm: in0 = z0
|
|
2419
|
+
# asm 1: movd <z0=int6464#13,>in0=int64#3
|
|
2420
|
+
# asm 2: movd <z0=%xmm12,>in0=%rdx
|
|
2421
|
+
movd %xmm12,%rdx
|
|
2422
|
+
|
|
2423
|
+
# qhasm: in1 = z1
|
|
2424
|
+
# asm 1: movd <z1=int6464#8,>in1=int64#4
|
|
2425
|
+
# asm 2: movd <z1=%xmm7,>in1=%rcx
|
|
2426
|
+
movd %xmm7,%rcx
|
|
2427
|
+
|
|
2428
|
+
# qhasm: in2 = z2
|
|
2429
|
+
# asm 1: movd <z2=int6464#11,>in2=int64#5
|
|
2430
|
+
# asm 2: movd <z2=%xmm10,>in2=%r8
|
|
2431
|
+
movd %xmm10,%r8
|
|
2432
|
+
|
|
2433
|
+
# qhasm: in3 = z3
|
|
2434
|
+
# asm 1: movd <z3=int6464#5,>in3=int64#6
|
|
2435
|
+
# asm 2: movd <z3=%xmm4,>in3=%r9
|
|
2436
|
+
movd %xmm4,%r9
|
|
2437
|
+
|
|
2438
|
+
# qhasm: z0 <<<= 96
|
|
2439
|
+
# asm 1: pshufd $0x39,<z0=int6464#13,<z0=int6464#13
|
|
2440
|
+
# asm 2: pshufd $0x39,<z0=%xmm12,<z0=%xmm12
|
|
2441
|
+
pshufd $0x39,%xmm12,%xmm12
|
|
2442
|
+
|
|
2443
|
+
# qhasm: z1 <<<= 96
|
|
2444
|
+
# asm 1: pshufd $0x39,<z1=int6464#8,<z1=int6464#8
|
|
2445
|
+
# asm 2: pshufd $0x39,<z1=%xmm7,<z1=%xmm7
|
|
2446
|
+
pshufd $0x39,%xmm7,%xmm7
|
|
2447
|
+
|
|
2448
|
+
# qhasm: z2 <<<= 96
|
|
2449
|
+
# asm 1: pshufd $0x39,<z2=int6464#11,<z2=int6464#11
|
|
2450
|
+
# asm 2: pshufd $0x39,<z2=%xmm10,<z2=%xmm10
|
|
2451
|
+
pshufd $0x39,%xmm10,%xmm10
|
|
2452
|
+
|
|
2453
|
+
# qhasm: z3 <<<= 96
|
|
2454
|
+
# asm 1: pshufd $0x39,<z3=int6464#5,<z3=int6464#5
|
|
2455
|
+
# asm 2: pshufd $0x39,<z3=%xmm4,<z3=%xmm4
|
|
2456
|
+
pshufd $0x39,%xmm4,%xmm4
|
|
2457
|
+
|
|
2458
|
+
# qhasm: (uint32) in0 ^= *(uint32 *) (m + 64)
|
|
2459
|
+
# asm 1: xorl 64(<m=int64#2),<in0=int64#3d
|
|
2460
|
+
# asm 2: xorl 64(<m=%rsi),<in0=%edx
|
|
2461
|
+
xorl 64(%rsi),%edx
|
|
2462
|
+
|
|
2463
|
+
# qhasm: (uint32) in1 ^= *(uint32 *) (m + 68)
|
|
2464
|
+
# asm 1: xorl 68(<m=int64#2),<in1=int64#4d
|
|
2465
|
+
# asm 2: xorl 68(<m=%rsi),<in1=%ecx
|
|
2466
|
+
xorl 68(%rsi),%ecx
|
|
2467
|
+
|
|
2468
|
+
# qhasm: (uint32) in2 ^= *(uint32 *) (m + 72)
|
|
2469
|
+
# asm 1: xorl 72(<m=int64#2),<in2=int64#5d
|
|
2470
|
+
# asm 2: xorl 72(<m=%rsi),<in2=%r8d
|
|
2471
|
+
xorl 72(%rsi),%r8d
|
|
2472
|
+
|
|
2473
|
+
# qhasm: (uint32) in3 ^= *(uint32 *) (m + 76)
|
|
2474
|
+
# asm 1: xorl 76(<m=int64#2),<in3=int64#6d
|
|
2475
|
+
# asm 2: xorl 76(<m=%rsi),<in3=%r9d
|
|
2476
|
+
xorl 76(%rsi),%r9d
|
|
2477
|
+
|
|
2478
|
+
# qhasm: *(uint32 *) (out + 64) = in0
|
|
2479
|
+
# asm 1: movl <in0=int64#3d,64(<out=int64#1)
|
|
2480
|
+
# asm 2: movl <in0=%edx,64(<out=%rdi)
|
|
2481
|
+
movl %edx,64(%rdi)
|
|
2482
|
+
|
|
2483
|
+
# qhasm: *(uint32 *) (out + 68) = in1
|
|
2484
|
+
# asm 1: movl <in1=int64#4d,68(<out=int64#1)
|
|
2485
|
+
# asm 2: movl <in1=%ecx,68(<out=%rdi)
|
|
2486
|
+
movl %ecx,68(%rdi)
|
|
2487
|
+
|
|
2488
|
+
# qhasm: *(uint32 *) (out + 72) = in2
|
|
2489
|
+
# asm 1: movl <in2=int64#5d,72(<out=int64#1)
|
|
2490
|
+
# asm 2: movl <in2=%r8d,72(<out=%rdi)
|
|
2491
|
+
movl %r8d,72(%rdi)
|
|
2492
|
+
|
|
2493
|
+
# qhasm: *(uint32 *) (out + 76) = in3
|
|
2494
|
+
# asm 1: movl <in3=int64#6d,76(<out=int64#1)
|
|
2495
|
+
# asm 2: movl <in3=%r9d,76(<out=%rdi)
|
|
2496
|
+
movl %r9d,76(%rdi)
|
|
2497
|
+
|
|
2498
|
+
# qhasm: in0 = z0
|
|
2499
|
+
# asm 1: movd <z0=int6464#13,>in0=int64#3
|
|
2500
|
+
# asm 2: movd <z0=%xmm12,>in0=%rdx
|
|
2501
|
+
movd %xmm12,%rdx
|
|
2502
|
+
|
|
2503
|
+
# qhasm: in1 = z1
|
|
2504
|
+
# asm 1: movd <z1=int6464#8,>in1=int64#4
|
|
2505
|
+
# asm 2: movd <z1=%xmm7,>in1=%rcx
|
|
2506
|
+
movd %xmm7,%rcx
|
|
2507
|
+
|
|
2508
|
+
# qhasm: in2 = z2
|
|
2509
|
+
# asm 1: movd <z2=int6464#11,>in2=int64#5
|
|
2510
|
+
# asm 2: movd <z2=%xmm10,>in2=%r8
|
|
2511
|
+
movd %xmm10,%r8
|
|
2512
|
+
|
|
2513
|
+
# qhasm: in3 = z3
|
|
2514
|
+
# asm 1: movd <z3=int6464#5,>in3=int64#6
|
|
2515
|
+
# asm 2: movd <z3=%xmm4,>in3=%r9
|
|
2516
|
+
movd %xmm4,%r9
|
|
2517
|
+
|
|
2518
|
+
# qhasm: z0 <<<= 96
|
|
2519
|
+
# asm 1: pshufd $0x39,<z0=int6464#13,<z0=int6464#13
|
|
2520
|
+
# asm 2: pshufd $0x39,<z0=%xmm12,<z0=%xmm12
|
|
2521
|
+
pshufd $0x39,%xmm12,%xmm12
|
|
2522
|
+
|
|
2523
|
+
# qhasm: z1 <<<= 96
|
|
2524
|
+
# asm 1: pshufd $0x39,<z1=int6464#8,<z1=int6464#8
|
|
2525
|
+
# asm 2: pshufd $0x39,<z1=%xmm7,<z1=%xmm7
|
|
2526
|
+
pshufd $0x39,%xmm7,%xmm7
|
|
2527
|
+
|
|
2528
|
+
# qhasm: z2 <<<= 96
|
|
2529
|
+
# asm 1: pshufd $0x39,<z2=int6464#11,<z2=int6464#11
|
|
2530
|
+
# asm 2: pshufd $0x39,<z2=%xmm10,<z2=%xmm10
|
|
2531
|
+
pshufd $0x39,%xmm10,%xmm10
|
|
2532
|
+
|
|
2533
|
+
# qhasm: z3 <<<= 96
|
|
2534
|
+
# asm 1: pshufd $0x39,<z3=int6464#5,<z3=int6464#5
|
|
2535
|
+
# asm 2: pshufd $0x39,<z3=%xmm4,<z3=%xmm4
|
|
2536
|
+
pshufd $0x39,%xmm4,%xmm4
|
|
2537
|
+
|
|
2538
|
+
# qhasm: (uint32) in0 ^= *(uint32 *) (m + 128)
|
|
2539
|
+
# asm 1: xorl 128(<m=int64#2),<in0=int64#3d
|
|
2540
|
+
# asm 2: xorl 128(<m=%rsi),<in0=%edx
|
|
2541
|
+
xorl 128(%rsi),%edx
|
|
2542
|
+
|
|
2543
|
+
# qhasm: (uint32) in1 ^= *(uint32 *) (m + 132)
|
|
2544
|
+
# asm 1: xorl 132(<m=int64#2),<in1=int64#4d
|
|
2545
|
+
# asm 2: xorl 132(<m=%rsi),<in1=%ecx
|
|
2546
|
+
xorl 132(%rsi),%ecx
|
|
2547
|
+
|
|
2548
|
+
# qhasm: (uint32) in2 ^= *(uint32 *) (m + 136)
|
|
2549
|
+
# asm 1: xorl 136(<m=int64#2),<in2=int64#5d
|
|
2550
|
+
# asm 2: xorl 136(<m=%rsi),<in2=%r8d
|
|
2551
|
+
xorl 136(%rsi),%r8d
|
|
2552
|
+
|
|
2553
|
+
# qhasm: (uint32) in3 ^= *(uint32 *) (m + 140)
|
|
2554
|
+
# asm 1: xorl 140(<m=int64#2),<in3=int64#6d
|
|
2555
|
+
# asm 2: xorl 140(<m=%rsi),<in3=%r9d
|
|
2556
|
+
xorl 140(%rsi),%r9d
|
|
2557
|
+
|
|
2558
|
+
# qhasm: *(uint32 *) (out + 128) = in0
|
|
2559
|
+
# asm 1: movl <in0=int64#3d,128(<out=int64#1)
|
|
2560
|
+
# asm 2: movl <in0=%edx,128(<out=%rdi)
|
|
2561
|
+
movl %edx,128(%rdi)
|
|
2562
|
+
|
|
2563
|
+
# qhasm: *(uint32 *) (out + 132) = in1
|
|
2564
|
+
# asm 1: movl <in1=int64#4d,132(<out=int64#1)
|
|
2565
|
+
# asm 2: movl <in1=%ecx,132(<out=%rdi)
|
|
2566
|
+
movl %ecx,132(%rdi)
|
|
2567
|
+
|
|
2568
|
+
# qhasm: *(uint32 *) (out + 136) = in2
|
|
2569
|
+
# asm 1: movl <in2=int64#5d,136(<out=int64#1)
|
|
2570
|
+
# asm 2: movl <in2=%r8d,136(<out=%rdi)
|
|
2571
|
+
movl %r8d,136(%rdi)
|
|
2572
|
+
|
|
2573
|
+
# qhasm: *(uint32 *) (out + 140) = in3
|
|
2574
|
+
# asm 1: movl <in3=int64#6d,140(<out=int64#1)
|
|
2575
|
+
# asm 2: movl <in3=%r9d,140(<out=%rdi)
|
|
2576
|
+
movl %r9d,140(%rdi)
|
|
2577
|
+
|
|
2578
|
+
# qhasm: in0 = z0
|
|
2579
|
+
# asm 1: movd <z0=int6464#13,>in0=int64#3
|
|
2580
|
+
# asm 2: movd <z0=%xmm12,>in0=%rdx
|
|
2581
|
+
movd %xmm12,%rdx
|
|
2582
|
+
|
|
2583
|
+
# qhasm: in1 = z1
|
|
2584
|
+
# asm 1: movd <z1=int6464#8,>in1=int64#4
|
|
2585
|
+
# asm 2: movd <z1=%xmm7,>in1=%rcx
|
|
2586
|
+
movd %xmm7,%rcx
|
|
2587
|
+
|
|
2588
|
+
# qhasm: in2 = z2
|
|
2589
|
+
# asm 1: movd <z2=int6464#11,>in2=int64#5
|
|
2590
|
+
# asm 2: movd <z2=%xmm10,>in2=%r8
|
|
2591
|
+
movd %xmm10,%r8
|
|
2592
|
+
|
|
2593
|
+
# qhasm: in3 = z3
|
|
2594
|
+
# asm 1: movd <z3=int6464#5,>in3=int64#6
|
|
2595
|
+
# asm 2: movd <z3=%xmm4,>in3=%r9
|
|
2596
|
+
movd %xmm4,%r9
|
|
2597
|
+
|
|
2598
|
+
# qhasm: (uint32) in0 ^= *(uint32 *) (m + 192)
|
|
2599
|
+
# asm 1: xorl 192(<m=int64#2),<in0=int64#3d
|
|
2600
|
+
# asm 2: xorl 192(<m=%rsi),<in0=%edx
|
|
2601
|
+
xorl 192(%rsi),%edx
|
|
2602
|
+
|
|
2603
|
+
# qhasm: (uint32) in1 ^= *(uint32 *) (m + 196)
|
|
2604
|
+
# asm 1: xorl 196(<m=int64#2),<in1=int64#4d
|
|
2605
|
+
# asm 2: xorl 196(<m=%rsi),<in1=%ecx
|
|
2606
|
+
xorl 196(%rsi),%ecx
|
|
2607
|
+
|
|
2608
|
+
# qhasm: (uint32) in2 ^= *(uint32 *) (m + 200)
|
|
2609
|
+
# asm 1: xorl 200(<m=int64#2),<in2=int64#5d
|
|
2610
|
+
# asm 2: xorl 200(<m=%rsi),<in2=%r8d
|
|
2611
|
+
xorl 200(%rsi),%r8d
|
|
2612
|
+
|
|
2613
|
+
# qhasm: (uint32) in3 ^= *(uint32 *) (m + 204)
|
|
2614
|
+
# asm 1: xorl 204(<m=int64#2),<in3=int64#6d
|
|
2615
|
+
# asm 2: xorl 204(<m=%rsi),<in3=%r9d
|
|
2616
|
+
xorl 204(%rsi),%r9d
|
|
2617
|
+
|
|
2618
|
+
# qhasm: *(uint32 *) (out + 192) = in0
|
|
2619
|
+
# asm 1: movl <in0=int64#3d,192(<out=int64#1)
|
|
2620
|
+
# asm 2: movl <in0=%edx,192(<out=%rdi)
|
|
2621
|
+
movl %edx,192(%rdi)
|
|
2622
|
+
|
|
2623
|
+
# qhasm: *(uint32 *) (out + 196) = in1
|
|
2624
|
+
# asm 1: movl <in1=int64#4d,196(<out=int64#1)
|
|
2625
|
+
# asm 2: movl <in1=%ecx,196(<out=%rdi)
|
|
2626
|
+
movl %ecx,196(%rdi)
|
|
2627
|
+
|
|
2628
|
+
# qhasm: *(uint32 *) (out + 200) = in2
|
|
2629
|
+
# asm 1: movl <in2=int64#5d,200(<out=int64#1)
|
|
2630
|
+
# asm 2: movl <in2=%r8d,200(<out=%rdi)
|
|
2631
|
+
movl %r8d,200(%rdi)
|
|
2632
|
+
|
|
2633
|
+
# qhasm: *(uint32 *) (out + 204) = in3
|
|
2634
|
+
# asm 1: movl <in3=int64#6d,204(<out=int64#1)
|
|
2635
|
+
# asm 2: movl <in3=%r9d,204(<out=%rdi)
|
|
2636
|
+
movl %r9d,204(%rdi)
|
|
2637
|
+
|
|
2638
|
+
# qhasm: uint32323232 z4 += orig4
|
|
2639
|
+
# asm 1: paddd <orig4=stack128#16,<z4=int6464#15
|
|
2640
|
+
# asm 2: paddd <orig4=240(%rsp),<z4=%xmm14
|
|
2641
|
+
paddd 240(%rsp),%xmm14
|
|
2642
|
+
|
|
2643
|
+
# qhasm: uint32323232 z5 += orig5
|
|
2644
|
+
# asm 1: paddd <orig5=stack128#5,<z5=int6464#1
|
|
2645
|
+
# asm 2: paddd <orig5=64(%rsp),<z5=%xmm0
|
|
2646
|
+
paddd 64(%rsp),%xmm0
|
|
2647
|
+
|
|
2648
|
+
# qhasm: uint32323232 z6 += orig6
|
|
2649
|
+
# asm 1: paddd <orig6=stack128#9,<z6=int6464#6
|
|
2650
|
+
# asm 2: paddd <orig6=128(%rsp),<z6=%xmm5
|
|
2651
|
+
paddd 128(%rsp),%xmm5
|
|
2652
|
+
|
|
2653
|
+
# qhasm: uint32323232 z7 += orig7
|
|
2654
|
+
# asm 1: paddd <orig7=stack128#13,<z7=int6464#9
|
|
2655
|
+
# asm 2: paddd <orig7=192(%rsp),<z7=%xmm8
|
|
2656
|
+
paddd 192(%rsp),%xmm8
|
|
2657
|
+
|
|
2658
|
+
# qhasm: in4 = z4
|
|
2659
|
+
# asm 1: movd <z4=int6464#15,>in4=int64#3
|
|
2660
|
+
# asm 2: movd <z4=%xmm14,>in4=%rdx
|
|
2661
|
+
movd %xmm14,%rdx
|
|
2662
|
+
|
|
2663
|
+
# qhasm: in5 = z5
|
|
2664
|
+
# asm 1: movd <z5=int6464#1,>in5=int64#4
|
|
2665
|
+
# asm 2: movd <z5=%xmm0,>in5=%rcx
|
|
2666
|
+
movd %xmm0,%rcx
|
|
2667
|
+
|
|
2668
|
+
# qhasm: in6 = z6
|
|
2669
|
+
# asm 1: movd <z6=int6464#6,>in6=int64#5
|
|
2670
|
+
# asm 2: movd <z6=%xmm5,>in6=%r8
|
|
2671
|
+
movd %xmm5,%r8
|
|
2672
|
+
|
|
2673
|
+
# qhasm: in7 = z7
|
|
2674
|
+
# asm 1: movd <z7=int6464#9,>in7=int64#6
|
|
2675
|
+
# asm 2: movd <z7=%xmm8,>in7=%r9
|
|
2676
|
+
movd %xmm8,%r9
|
|
2677
|
+
|
|
2678
|
+
# qhasm: z4 <<<= 96
|
|
2679
|
+
# asm 1: pshufd $0x39,<z4=int6464#15,<z4=int6464#15
|
|
2680
|
+
# asm 2: pshufd $0x39,<z4=%xmm14,<z4=%xmm14
|
|
2681
|
+
pshufd $0x39,%xmm14,%xmm14
|
|
2682
|
+
|
|
2683
|
+
# qhasm: z5 <<<= 96
|
|
2684
|
+
# asm 1: pshufd $0x39,<z5=int6464#1,<z5=int6464#1
|
|
2685
|
+
# asm 2: pshufd $0x39,<z5=%xmm0,<z5=%xmm0
|
|
2686
|
+
pshufd $0x39,%xmm0,%xmm0
|
|
2687
|
+
|
|
2688
|
+
# qhasm: z6 <<<= 96
|
|
2689
|
+
# asm 1: pshufd $0x39,<z6=int6464#6,<z6=int6464#6
|
|
2690
|
+
# asm 2: pshufd $0x39,<z6=%xmm5,<z6=%xmm5
|
|
2691
|
+
pshufd $0x39,%xmm5,%xmm5
|
|
2692
|
+
|
|
2693
|
+
# qhasm: z7 <<<= 96
|
|
2694
|
+
# asm 1: pshufd $0x39,<z7=int6464#9,<z7=int6464#9
|
|
2695
|
+
# asm 2: pshufd $0x39,<z7=%xmm8,<z7=%xmm8
|
|
2696
|
+
pshufd $0x39,%xmm8,%xmm8
|
|
2697
|
+
|
|
2698
|
+
# qhasm: (uint32) in4 ^= *(uint32 *) (m + 16)
|
|
2699
|
+
# asm 1: xorl 16(<m=int64#2),<in4=int64#3d
|
|
2700
|
+
# asm 2: xorl 16(<m=%rsi),<in4=%edx
|
|
2701
|
+
xorl 16(%rsi),%edx
|
|
2702
|
+
|
|
2703
|
+
# qhasm: (uint32) in5 ^= *(uint32 *) (m + 20)
|
|
2704
|
+
# asm 1: xorl 20(<m=int64#2),<in5=int64#4d
|
|
2705
|
+
# asm 2: xorl 20(<m=%rsi),<in5=%ecx
|
|
2706
|
+
xorl 20(%rsi),%ecx
|
|
2707
|
+
|
|
2708
|
+
# qhasm: (uint32) in6 ^= *(uint32 *) (m + 24)
|
|
2709
|
+
# asm 1: xorl 24(<m=int64#2),<in6=int64#5d
|
|
2710
|
+
# asm 2: xorl 24(<m=%rsi),<in6=%r8d
|
|
2711
|
+
xorl 24(%rsi),%r8d
|
|
2712
|
+
|
|
2713
|
+
# qhasm: (uint32) in7 ^= *(uint32 *) (m + 28)
|
|
2714
|
+
# asm 1: xorl 28(<m=int64#2),<in7=int64#6d
|
|
2715
|
+
# asm 2: xorl 28(<m=%rsi),<in7=%r9d
|
|
2716
|
+
xorl 28(%rsi),%r9d
|
|
2717
|
+
|
|
2718
|
+
# qhasm: *(uint32 *) (out + 16) = in4
|
|
2719
|
+
# asm 1: movl <in4=int64#3d,16(<out=int64#1)
|
|
2720
|
+
# asm 2: movl <in4=%edx,16(<out=%rdi)
|
|
2721
|
+
movl %edx,16(%rdi)
|
|
2722
|
+
|
|
2723
|
+
# qhasm: *(uint32 *) (out + 20) = in5
|
|
2724
|
+
# asm 1: movl <in5=int64#4d,20(<out=int64#1)
|
|
2725
|
+
# asm 2: movl <in5=%ecx,20(<out=%rdi)
|
|
2726
|
+
movl %ecx,20(%rdi)
|
|
2727
|
+
|
|
2728
|
+
# qhasm: *(uint32 *) (out + 24) = in6
|
|
2729
|
+
# asm 1: movl <in6=int64#5d,24(<out=int64#1)
|
|
2730
|
+
# asm 2: movl <in6=%r8d,24(<out=%rdi)
|
|
2731
|
+
movl %r8d,24(%rdi)
|
|
2732
|
+
|
|
2733
|
+
# qhasm: *(uint32 *) (out + 28) = in7
|
|
2734
|
+
# asm 1: movl <in7=int64#6d,28(<out=int64#1)
|
|
2735
|
+
# asm 2: movl <in7=%r9d,28(<out=%rdi)
|
|
2736
|
+
movl %r9d,28(%rdi)
|
|
2737
|
+
|
|
2738
|
+
# qhasm: in4 = z4
|
|
2739
|
+
# asm 1: movd <z4=int6464#15,>in4=int64#3
|
|
2740
|
+
# asm 2: movd <z4=%xmm14,>in4=%rdx
|
|
2741
|
+
movd %xmm14,%rdx
|
|
2742
|
+
|
|
2743
|
+
# qhasm: in5 = z5
|
|
2744
|
+
# asm 1: movd <z5=int6464#1,>in5=int64#4
|
|
2745
|
+
# asm 2: movd <z5=%xmm0,>in5=%rcx
|
|
2746
|
+
movd %xmm0,%rcx
|
|
2747
|
+
|
|
2748
|
+
# qhasm: in6 = z6
|
|
2749
|
+
# asm 1: movd <z6=int6464#6,>in6=int64#5
|
|
2750
|
+
# asm 2: movd <z6=%xmm5,>in6=%r8
|
|
2751
|
+
movd %xmm5,%r8
|
|
2752
|
+
|
|
2753
|
+
# qhasm: in7 = z7
|
|
2754
|
+
# asm 1: movd <z7=int6464#9,>in7=int64#6
|
|
2755
|
+
# asm 2: movd <z7=%xmm8,>in7=%r9
|
|
2756
|
+
movd %xmm8,%r9
|
|
2757
|
+
|
|
2758
|
+
# qhasm: z4 <<<= 96
|
|
2759
|
+
# asm 1: pshufd $0x39,<z4=int6464#15,<z4=int6464#15
|
|
2760
|
+
# asm 2: pshufd $0x39,<z4=%xmm14,<z4=%xmm14
|
|
2761
|
+
pshufd $0x39,%xmm14,%xmm14
|
|
2762
|
+
|
|
2763
|
+
# qhasm: z5 <<<= 96
|
|
2764
|
+
# asm 1: pshufd $0x39,<z5=int6464#1,<z5=int6464#1
|
|
2765
|
+
# asm 2: pshufd $0x39,<z5=%xmm0,<z5=%xmm0
|
|
2766
|
+
pshufd $0x39,%xmm0,%xmm0
|
|
2767
|
+
|
|
2768
|
+
# qhasm: z6 <<<= 96
|
|
2769
|
+
# asm 1: pshufd $0x39,<z6=int6464#6,<z6=int6464#6
|
|
2770
|
+
# asm 2: pshufd $0x39,<z6=%xmm5,<z6=%xmm5
|
|
2771
|
+
pshufd $0x39,%xmm5,%xmm5
|
|
2772
|
+
|
|
2773
|
+
# qhasm: z7 <<<= 96
|
|
2774
|
+
# asm 1: pshufd $0x39,<z7=int6464#9,<z7=int6464#9
|
|
2775
|
+
# asm 2: pshufd $0x39,<z7=%xmm8,<z7=%xmm8
|
|
2776
|
+
pshufd $0x39,%xmm8,%xmm8
|
|
2777
|
+
|
|
2778
|
+
# qhasm: (uint32) in4 ^= *(uint32 *) (m + 80)
|
|
2779
|
+
# asm 1: xorl 80(<m=int64#2),<in4=int64#3d
|
|
2780
|
+
# asm 2: xorl 80(<m=%rsi),<in4=%edx
|
|
2781
|
+
xorl 80(%rsi),%edx
|
|
2782
|
+
|
|
2783
|
+
# qhasm: (uint32) in5 ^= *(uint32 *) (m + 84)
|
|
2784
|
+
# asm 1: xorl 84(<m=int64#2),<in5=int64#4d
|
|
2785
|
+
# asm 2: xorl 84(<m=%rsi),<in5=%ecx
|
|
2786
|
+
xorl 84(%rsi),%ecx
|
|
2787
|
+
|
|
2788
|
+
# qhasm: (uint32) in6 ^= *(uint32 *) (m + 88)
|
|
2789
|
+
# asm 1: xorl 88(<m=int64#2),<in6=int64#5d
|
|
2790
|
+
# asm 2: xorl 88(<m=%rsi),<in6=%r8d
|
|
2791
|
+
xorl 88(%rsi),%r8d
|
|
2792
|
+
|
|
2793
|
+
# qhasm: (uint32) in7 ^= *(uint32 *) (m + 92)
|
|
2794
|
+
# asm 1: xorl 92(<m=int64#2),<in7=int64#6d
|
|
2795
|
+
# asm 2: xorl 92(<m=%rsi),<in7=%r9d
|
|
2796
|
+
xorl 92(%rsi),%r9d
|
|
2797
|
+
|
|
2798
|
+
# qhasm: *(uint32 *) (out + 80) = in4
|
|
2799
|
+
# asm 1: movl <in4=int64#3d,80(<out=int64#1)
|
|
2800
|
+
# asm 2: movl <in4=%edx,80(<out=%rdi)
|
|
2801
|
+
movl %edx,80(%rdi)
|
|
2802
|
+
|
|
2803
|
+
# qhasm: *(uint32 *) (out + 84) = in5
|
|
2804
|
+
# asm 1: movl <in5=int64#4d,84(<out=int64#1)
|
|
2805
|
+
# asm 2: movl <in5=%ecx,84(<out=%rdi)
|
|
2806
|
+
movl %ecx,84(%rdi)
|
|
2807
|
+
|
|
2808
|
+
# qhasm: *(uint32 *) (out + 88) = in6
|
|
2809
|
+
# asm 1: movl <in6=int64#5d,88(<out=int64#1)
|
|
2810
|
+
# asm 2: movl <in6=%r8d,88(<out=%rdi)
|
|
2811
|
+
movl %r8d,88(%rdi)
|
|
2812
|
+
|
|
2813
|
+
# qhasm: *(uint32 *) (out + 92) = in7
|
|
2814
|
+
# asm 1: movl <in7=int64#6d,92(<out=int64#1)
|
|
2815
|
+
# asm 2: movl <in7=%r9d,92(<out=%rdi)
|
|
2816
|
+
movl %r9d,92(%rdi)
|
|
2817
|
+
|
|
2818
|
+
# qhasm: in4 = z4
|
|
2819
|
+
# asm 1: movd <z4=int6464#15,>in4=int64#3
|
|
2820
|
+
# asm 2: movd <z4=%xmm14,>in4=%rdx
|
|
2821
|
+
movd %xmm14,%rdx
|
|
2822
|
+
|
|
2823
|
+
# qhasm: in5 = z5
|
|
2824
|
+
# asm 1: movd <z5=int6464#1,>in5=int64#4
|
|
2825
|
+
# asm 2: movd <z5=%xmm0,>in5=%rcx
|
|
2826
|
+
movd %xmm0,%rcx
|
|
2827
|
+
|
|
2828
|
+
# qhasm: in6 = z6
|
|
2829
|
+
# asm 1: movd <z6=int6464#6,>in6=int64#5
|
|
2830
|
+
# asm 2: movd <z6=%xmm5,>in6=%r8
|
|
2831
|
+
movd %xmm5,%r8
|
|
2832
|
+
|
|
2833
|
+
# qhasm: in7 = z7
|
|
2834
|
+
# asm 1: movd <z7=int6464#9,>in7=int64#6
|
|
2835
|
+
# asm 2: movd <z7=%xmm8,>in7=%r9
|
|
2836
|
+
movd %xmm8,%r9
|
|
2837
|
+
|
|
2838
|
+
# qhasm: z4 <<<= 96
|
|
2839
|
+
# asm 1: pshufd $0x39,<z4=int6464#15,<z4=int6464#15
|
|
2840
|
+
# asm 2: pshufd $0x39,<z4=%xmm14,<z4=%xmm14
|
|
2841
|
+
pshufd $0x39,%xmm14,%xmm14
|
|
2842
|
+
|
|
2843
|
+
# qhasm: z5 <<<= 96
|
|
2844
|
+
# asm 1: pshufd $0x39,<z5=int6464#1,<z5=int6464#1
|
|
2845
|
+
# asm 2: pshufd $0x39,<z5=%xmm0,<z5=%xmm0
|
|
2846
|
+
pshufd $0x39,%xmm0,%xmm0
|
|
2847
|
+
|
|
2848
|
+
# qhasm: z6 <<<= 96
|
|
2849
|
+
# asm 1: pshufd $0x39,<z6=int6464#6,<z6=int6464#6
|
|
2850
|
+
# asm 2: pshufd $0x39,<z6=%xmm5,<z6=%xmm5
|
|
2851
|
+
pshufd $0x39,%xmm5,%xmm5
|
|
2852
|
+
|
|
2853
|
+
# qhasm: z7 <<<= 96
|
|
2854
|
+
# asm 1: pshufd $0x39,<z7=int6464#9,<z7=int6464#9
|
|
2855
|
+
# asm 2: pshufd $0x39,<z7=%xmm8,<z7=%xmm8
|
|
2856
|
+
pshufd $0x39,%xmm8,%xmm8
|
|
2857
|
+
|
|
2858
|
+
# qhasm: (uint32) in4 ^= *(uint32 *) (m + 144)
|
|
2859
|
+
# asm 1: xorl 144(<m=int64#2),<in4=int64#3d
|
|
2860
|
+
# asm 2: xorl 144(<m=%rsi),<in4=%edx
|
|
2861
|
+
xorl 144(%rsi),%edx
|
|
2862
|
+
|
|
2863
|
+
# qhasm: (uint32) in5 ^= *(uint32 *) (m + 148)
|
|
2864
|
+
# asm 1: xorl 148(<m=int64#2),<in5=int64#4d
|
|
2865
|
+
# asm 2: xorl 148(<m=%rsi),<in5=%ecx
|
|
2866
|
+
xorl 148(%rsi),%ecx
|
|
2867
|
+
|
|
2868
|
+
# qhasm: (uint32) in6 ^= *(uint32 *) (m + 152)
|
|
2869
|
+
# asm 1: xorl 152(<m=int64#2),<in6=int64#5d
|
|
2870
|
+
# asm 2: xorl 152(<m=%rsi),<in6=%r8d
|
|
2871
|
+
xorl 152(%rsi),%r8d
|
|
2872
|
+
|
|
2873
|
+
# qhasm: (uint32) in7 ^= *(uint32 *) (m + 156)
|
|
2874
|
+
# asm 1: xorl 156(<m=int64#2),<in7=int64#6d
|
|
2875
|
+
# asm 2: xorl 156(<m=%rsi),<in7=%r9d
|
|
2876
|
+
xorl 156(%rsi),%r9d
|
|
2877
|
+
|
|
2878
|
+
# qhasm: *(uint32 *) (out + 144) = in4
|
|
2879
|
+
# asm 1: movl <in4=int64#3d,144(<out=int64#1)
|
|
2880
|
+
# asm 2: movl <in4=%edx,144(<out=%rdi)
|
|
2881
|
+
movl %edx,144(%rdi)
|
|
2882
|
+
|
|
2883
|
+
# qhasm: *(uint32 *) (out + 148) = in5
|
|
2884
|
+
# asm 1: movl <in5=int64#4d,148(<out=int64#1)
|
|
2885
|
+
# asm 2: movl <in5=%ecx,148(<out=%rdi)
|
|
2886
|
+
movl %ecx,148(%rdi)
|
|
2887
|
+
|
|
2888
|
+
# qhasm: *(uint32 *) (out + 152) = in6
|
|
2889
|
+
# asm 1: movl <in6=int64#5d,152(<out=int64#1)
|
|
2890
|
+
# asm 2: movl <in6=%r8d,152(<out=%rdi)
|
|
2891
|
+
movl %r8d,152(%rdi)
|
|
2892
|
+
|
|
2893
|
+
# qhasm: *(uint32 *) (out + 156) = in7
|
|
2894
|
+
# asm 1: movl <in7=int64#6d,156(<out=int64#1)
|
|
2895
|
+
# asm 2: movl <in7=%r9d,156(<out=%rdi)
|
|
2896
|
+
movl %r9d,156(%rdi)
|
|
2897
|
+
|
|
2898
|
+
# qhasm: in4 = z4
|
|
2899
|
+
# asm 1: movd <z4=int6464#15,>in4=int64#3
|
|
2900
|
+
# asm 2: movd <z4=%xmm14,>in4=%rdx
|
|
2901
|
+
movd %xmm14,%rdx
|
|
2902
|
+
|
|
2903
|
+
# qhasm: in5 = z5
|
|
2904
|
+
# asm 1: movd <z5=int6464#1,>in5=int64#4
|
|
2905
|
+
# asm 2: movd <z5=%xmm0,>in5=%rcx
|
|
2906
|
+
movd %xmm0,%rcx
|
|
2907
|
+
|
|
2908
|
+
# qhasm: in6 = z6
|
|
2909
|
+
# asm 1: movd <z6=int6464#6,>in6=int64#5
|
|
2910
|
+
# asm 2: movd <z6=%xmm5,>in6=%r8
|
|
2911
|
+
movd %xmm5,%r8
|
|
2912
|
+
|
|
2913
|
+
# qhasm: in7 = z7
|
|
2914
|
+
# asm 1: movd <z7=int6464#9,>in7=int64#6
|
|
2915
|
+
# asm 2: movd <z7=%xmm8,>in7=%r9
|
|
2916
|
+
movd %xmm8,%r9
|
|
2917
|
+
|
|
2918
|
+
# qhasm: (uint32) in4 ^= *(uint32 *) (m + 208)
|
|
2919
|
+
# asm 1: xorl 208(<m=int64#2),<in4=int64#3d
|
|
2920
|
+
# asm 2: xorl 208(<m=%rsi),<in4=%edx
|
|
2921
|
+
xorl 208(%rsi),%edx
|
|
2922
|
+
|
|
2923
|
+
# qhasm: (uint32) in5 ^= *(uint32 *) (m + 212)
|
|
2924
|
+
# asm 1: xorl 212(<m=int64#2),<in5=int64#4d
|
|
2925
|
+
# asm 2: xorl 212(<m=%rsi),<in5=%ecx
|
|
2926
|
+
xorl 212(%rsi),%ecx
|
|
2927
|
+
|
|
2928
|
+
# qhasm: (uint32) in6 ^= *(uint32 *) (m + 216)
|
|
2929
|
+
# asm 1: xorl 216(<m=int64#2),<in6=int64#5d
|
|
2930
|
+
# asm 2: xorl 216(<m=%rsi),<in6=%r8d
|
|
2931
|
+
xorl 216(%rsi),%r8d
|
|
2932
|
+
|
|
2933
|
+
# qhasm: (uint32) in7 ^= *(uint32 *) (m + 220)
|
|
2934
|
+
# asm 1: xorl 220(<m=int64#2),<in7=int64#6d
|
|
2935
|
+
# asm 2: xorl 220(<m=%rsi),<in7=%r9d
|
|
2936
|
+
xorl 220(%rsi),%r9d
|
|
2937
|
+
|
|
2938
|
+
# qhasm: *(uint32 *) (out + 208) = in4
|
|
2939
|
+
# asm 1: movl <in4=int64#3d,208(<out=int64#1)
|
|
2940
|
+
# asm 2: movl <in4=%edx,208(<out=%rdi)
|
|
2941
|
+
movl %edx,208(%rdi)
|
|
2942
|
+
|
|
2943
|
+
# qhasm: *(uint32 *) (out + 212) = in5
|
|
2944
|
+
# asm 1: movl <in5=int64#4d,212(<out=int64#1)
|
|
2945
|
+
# asm 2: movl <in5=%ecx,212(<out=%rdi)
|
|
2946
|
+
movl %ecx,212(%rdi)
|
|
2947
|
+
|
|
2948
|
+
# qhasm: *(uint32 *) (out + 216) = in6
|
|
2949
|
+
# asm 1: movl <in6=int64#5d,216(<out=int64#1)
|
|
2950
|
+
# asm 2: movl <in6=%r8d,216(<out=%rdi)
|
|
2951
|
+
movl %r8d,216(%rdi)
|
|
2952
|
+
|
|
2953
|
+
# qhasm: *(uint32 *) (out + 220) = in7
|
|
2954
|
+
# asm 1: movl <in7=int64#6d,220(<out=int64#1)
|
|
2955
|
+
# asm 2: movl <in7=%r9d,220(<out=%rdi)
|
|
2956
|
+
movl %r9d,220(%rdi)
|
|
2957
|
+
|
|
2958
|
+
# qhasm: uint32323232 z8 += orig8
|
|
2959
|
+
# asm 1: paddd <orig8=stack128#19,<z8=int6464#16
|
|
2960
|
+
# asm 2: paddd <orig8=288(%rsp),<z8=%xmm15
|
|
2961
|
+
paddd 288(%rsp),%xmm15
|
|
2962
|
+
|
|
2963
|
+
# qhasm: uint32323232 z9 += orig9
|
|
2964
|
+
# asm 1: paddd <orig9=stack128#20,<z9=int6464#12
|
|
2965
|
+
# asm 2: paddd <orig9=304(%rsp),<z9=%xmm11
|
|
2966
|
+
paddd 304(%rsp),%xmm11
|
|
2967
|
+
|
|
2968
|
+
# qhasm: uint32323232 z10 += orig10
|
|
2969
|
+
# asm 1: paddd <orig10=stack128#6,<z10=int6464#2
|
|
2970
|
+
# asm 2: paddd <orig10=80(%rsp),<z10=%xmm1
|
|
2971
|
+
paddd 80(%rsp),%xmm1
|
|
2972
|
+
|
|
2973
|
+
# qhasm: uint32323232 z11 += orig11
|
|
2974
|
+
# asm 1: paddd <orig11=stack128#10,<z11=int6464#7
|
|
2975
|
+
# asm 2: paddd <orig11=144(%rsp),<z11=%xmm6
|
|
2976
|
+
paddd 144(%rsp),%xmm6
|
|
2977
|
+
|
|
2978
|
+
# qhasm: in8 = z8
|
|
2979
|
+
# asm 1: movd <z8=int6464#16,>in8=int64#3
|
|
2980
|
+
# asm 2: movd <z8=%xmm15,>in8=%rdx
|
|
2981
|
+
movd %xmm15,%rdx
|
|
2982
|
+
|
|
2983
|
+
# qhasm: in9 = z9
|
|
2984
|
+
# asm 1: movd <z9=int6464#12,>in9=int64#4
|
|
2985
|
+
# asm 2: movd <z9=%xmm11,>in9=%rcx
|
|
2986
|
+
movd %xmm11,%rcx
|
|
2987
|
+
|
|
2988
|
+
# qhasm: in10 = z10
|
|
2989
|
+
# asm 1: movd <z10=int6464#2,>in10=int64#5
|
|
2990
|
+
# asm 2: movd <z10=%xmm1,>in10=%r8
|
|
2991
|
+
movd %xmm1,%r8
|
|
2992
|
+
|
|
2993
|
+
# qhasm: in11 = z11
|
|
2994
|
+
# asm 1: movd <z11=int6464#7,>in11=int64#6
|
|
2995
|
+
# asm 2: movd <z11=%xmm6,>in11=%r9
|
|
2996
|
+
movd %xmm6,%r9
|
|
2997
|
+
|
|
2998
|
+
# qhasm: z8 <<<= 96
|
|
2999
|
+
# asm 1: pshufd $0x39,<z8=int6464#16,<z8=int6464#16
|
|
3000
|
+
# asm 2: pshufd $0x39,<z8=%xmm15,<z8=%xmm15
|
|
3001
|
+
pshufd $0x39,%xmm15,%xmm15
|
|
3002
|
+
|
|
3003
|
+
# qhasm: z9 <<<= 96
|
|
3004
|
+
# asm 1: pshufd $0x39,<z9=int6464#12,<z9=int6464#12
|
|
3005
|
+
# asm 2: pshufd $0x39,<z9=%xmm11,<z9=%xmm11
|
|
3006
|
+
pshufd $0x39,%xmm11,%xmm11
|
|
3007
|
+
|
|
3008
|
+
# qhasm: z10 <<<= 96
|
|
3009
|
+
# asm 1: pshufd $0x39,<z10=int6464#2,<z10=int6464#2
|
|
3010
|
+
# asm 2: pshufd $0x39,<z10=%xmm1,<z10=%xmm1
|
|
3011
|
+
pshufd $0x39,%xmm1,%xmm1
|
|
3012
|
+
|
|
3013
|
+
# qhasm: z11 <<<= 96
|
|
3014
|
+
# asm 1: pshufd $0x39,<z11=int6464#7,<z11=int6464#7
|
|
3015
|
+
# asm 2: pshufd $0x39,<z11=%xmm6,<z11=%xmm6
|
|
3016
|
+
pshufd $0x39,%xmm6,%xmm6
|
|
3017
|
+
|
|
3018
|
+
# qhasm: (uint32) in8 ^= *(uint32 *) (m + 32)
|
|
3019
|
+
# asm 1: xorl 32(<m=int64#2),<in8=int64#3d
|
|
3020
|
+
# asm 2: xorl 32(<m=%rsi),<in8=%edx
|
|
3021
|
+
xorl 32(%rsi),%edx
|
|
3022
|
+
|
|
3023
|
+
# qhasm: (uint32) in9 ^= *(uint32 *) (m + 36)
|
|
3024
|
+
# asm 1: xorl 36(<m=int64#2),<in9=int64#4d
|
|
3025
|
+
# asm 2: xorl 36(<m=%rsi),<in9=%ecx
|
|
3026
|
+
xorl 36(%rsi),%ecx
|
|
3027
|
+
|
|
3028
|
+
# qhasm: (uint32) in10 ^= *(uint32 *) (m + 40)
|
|
3029
|
+
# asm 1: xorl 40(<m=int64#2),<in10=int64#5d
|
|
3030
|
+
# asm 2: xorl 40(<m=%rsi),<in10=%r8d
|
|
3031
|
+
xorl 40(%rsi),%r8d
|
|
3032
|
+
|
|
3033
|
+
# qhasm: (uint32) in11 ^= *(uint32 *) (m + 44)
|
|
3034
|
+
# asm 1: xorl 44(<m=int64#2),<in11=int64#6d
|
|
3035
|
+
# asm 2: xorl 44(<m=%rsi),<in11=%r9d
|
|
3036
|
+
xorl 44(%rsi),%r9d
|
|
3037
|
+
|
|
3038
|
+
# qhasm: *(uint32 *) (out + 32) = in8
|
|
3039
|
+
# asm 1: movl <in8=int64#3d,32(<out=int64#1)
|
|
3040
|
+
# asm 2: movl <in8=%edx,32(<out=%rdi)
|
|
3041
|
+
movl %edx,32(%rdi)
|
|
3042
|
+
|
|
3043
|
+
# qhasm: *(uint32 *) (out + 36) = in9
|
|
3044
|
+
# asm 1: movl <in9=int64#4d,36(<out=int64#1)
|
|
3045
|
+
# asm 2: movl <in9=%ecx,36(<out=%rdi)
|
|
3046
|
+
movl %ecx,36(%rdi)
|
|
3047
|
+
|
|
3048
|
+
# qhasm: *(uint32 *) (out + 40) = in10
|
|
3049
|
+
# asm 1: movl <in10=int64#5d,40(<out=int64#1)
|
|
3050
|
+
# asm 2: movl <in10=%r8d,40(<out=%rdi)
|
|
3051
|
+
movl %r8d,40(%rdi)
|
|
3052
|
+
|
|
3053
|
+
# qhasm: *(uint32 *) (out + 44) = in11
|
|
3054
|
+
# asm 1: movl <in11=int64#6d,44(<out=int64#1)
|
|
3055
|
+
# asm 2: movl <in11=%r9d,44(<out=%rdi)
|
|
3056
|
+
movl %r9d,44(%rdi)
|
|
3057
|
+
|
|
3058
|
+
# qhasm: in8 = z8
|
|
3059
|
+
# asm 1: movd <z8=int6464#16,>in8=int64#3
|
|
3060
|
+
# asm 2: movd <z8=%xmm15,>in8=%rdx
|
|
3061
|
+
movd %xmm15,%rdx
|
|
3062
|
+
|
|
3063
|
+
# qhasm: in9 = z9
|
|
3064
|
+
# asm 1: movd <z9=int6464#12,>in9=int64#4
|
|
3065
|
+
# asm 2: movd <z9=%xmm11,>in9=%rcx
|
|
3066
|
+
movd %xmm11,%rcx
|
|
3067
|
+
|
|
3068
|
+
# qhasm: in10 = z10
|
|
3069
|
+
# asm 1: movd <z10=int6464#2,>in10=int64#5
|
|
3070
|
+
# asm 2: movd <z10=%xmm1,>in10=%r8
|
|
3071
|
+
movd %xmm1,%r8
|
|
3072
|
+
|
|
3073
|
+
# qhasm: in11 = z11
|
|
3074
|
+
# asm 1: movd <z11=int6464#7,>in11=int64#6
|
|
3075
|
+
# asm 2: movd <z11=%xmm6,>in11=%r9
|
|
3076
|
+
movd %xmm6,%r9
|
|
3077
|
+
|
|
3078
|
+
# qhasm: z8 <<<= 96
|
|
3079
|
+
# asm 1: pshufd $0x39,<z8=int6464#16,<z8=int6464#16
|
|
3080
|
+
# asm 2: pshufd $0x39,<z8=%xmm15,<z8=%xmm15
|
|
3081
|
+
pshufd $0x39,%xmm15,%xmm15
|
|
3082
|
+
|
|
3083
|
+
# qhasm: z9 <<<= 96
|
|
3084
|
+
# asm 1: pshufd $0x39,<z9=int6464#12,<z9=int6464#12
|
|
3085
|
+
# asm 2: pshufd $0x39,<z9=%xmm11,<z9=%xmm11
|
|
3086
|
+
pshufd $0x39,%xmm11,%xmm11
|
|
3087
|
+
|
|
3088
|
+
# qhasm: z10 <<<= 96
|
|
3089
|
+
# asm 1: pshufd $0x39,<z10=int6464#2,<z10=int6464#2
|
|
3090
|
+
# asm 2: pshufd $0x39,<z10=%xmm1,<z10=%xmm1
|
|
3091
|
+
pshufd $0x39,%xmm1,%xmm1
|
|
3092
|
+
|
|
3093
|
+
# qhasm: z11 <<<= 96
|
|
3094
|
+
# asm 1: pshufd $0x39,<z11=int6464#7,<z11=int6464#7
|
|
3095
|
+
# asm 2: pshufd $0x39,<z11=%xmm6,<z11=%xmm6
|
|
3096
|
+
pshufd $0x39,%xmm6,%xmm6
|
|
3097
|
+
|
|
3098
|
+
# qhasm: (uint32) in8 ^= *(uint32 *) (m + 96)
|
|
3099
|
+
# asm 1: xorl 96(<m=int64#2),<in8=int64#3d
|
|
3100
|
+
# asm 2: xorl 96(<m=%rsi),<in8=%edx
|
|
3101
|
+
xorl 96(%rsi),%edx
|
|
3102
|
+
|
|
3103
|
+
# qhasm: (uint32) in9 ^= *(uint32 *) (m + 100)
|
|
3104
|
+
# asm 1: xorl 100(<m=int64#2),<in9=int64#4d
|
|
3105
|
+
# asm 2: xorl 100(<m=%rsi),<in9=%ecx
|
|
3106
|
+
xorl 100(%rsi),%ecx
|
|
3107
|
+
|
|
3108
|
+
# qhasm: (uint32) in10 ^= *(uint32 *) (m + 104)
|
|
3109
|
+
# asm 1: xorl 104(<m=int64#2),<in10=int64#5d
|
|
3110
|
+
# asm 2: xorl 104(<m=%rsi),<in10=%r8d
|
|
3111
|
+
xorl 104(%rsi),%r8d
|
|
3112
|
+
|
|
3113
|
+
# qhasm: (uint32) in11 ^= *(uint32 *) (m + 108)
|
|
3114
|
+
# asm 1: xorl 108(<m=int64#2),<in11=int64#6d
|
|
3115
|
+
# asm 2: xorl 108(<m=%rsi),<in11=%r9d
|
|
3116
|
+
xorl 108(%rsi),%r9d
|
|
3117
|
+
|
|
3118
|
+
# qhasm: *(uint32 *) (out + 96) = in8
|
|
3119
|
+
# asm 1: movl <in8=int64#3d,96(<out=int64#1)
|
|
3120
|
+
# asm 2: movl <in8=%edx,96(<out=%rdi)
|
|
3121
|
+
movl %edx,96(%rdi)
|
|
3122
|
+
|
|
3123
|
+
# qhasm: *(uint32 *) (out + 100) = in9
|
|
3124
|
+
# asm 1: movl <in9=int64#4d,100(<out=int64#1)
|
|
3125
|
+
# asm 2: movl <in9=%ecx,100(<out=%rdi)
|
|
3126
|
+
movl %ecx,100(%rdi)
|
|
3127
|
+
|
|
3128
|
+
# qhasm: *(uint32 *) (out + 104) = in10
|
|
3129
|
+
# asm 1: movl <in10=int64#5d,104(<out=int64#1)
|
|
3130
|
+
# asm 2: movl <in10=%r8d,104(<out=%rdi)
|
|
3131
|
+
movl %r8d,104(%rdi)
|
|
3132
|
+
|
|
3133
|
+
# qhasm: *(uint32 *) (out + 108) = in11
|
|
3134
|
+
# asm 1: movl <in11=int64#6d,108(<out=int64#1)
|
|
3135
|
+
# asm 2: movl <in11=%r9d,108(<out=%rdi)
|
|
3136
|
+
movl %r9d,108(%rdi)
|
|
3137
|
+
|
|
3138
|
+
# qhasm: in8 = z8
|
|
3139
|
+
# asm 1: movd <z8=int6464#16,>in8=int64#3
|
|
3140
|
+
# asm 2: movd <z8=%xmm15,>in8=%rdx
|
|
3141
|
+
movd %xmm15,%rdx
|
|
3142
|
+
|
|
3143
|
+
# qhasm: in9 = z9
|
|
3144
|
+
# asm 1: movd <z9=int6464#12,>in9=int64#4
|
|
3145
|
+
# asm 2: movd <z9=%xmm11,>in9=%rcx
|
|
3146
|
+
movd %xmm11,%rcx
|
|
3147
|
+
|
|
3148
|
+
# qhasm: in10 = z10
|
|
3149
|
+
# asm 1: movd <z10=int6464#2,>in10=int64#5
|
|
3150
|
+
# asm 2: movd <z10=%xmm1,>in10=%r8
|
|
3151
|
+
movd %xmm1,%r8
|
|
3152
|
+
|
|
3153
|
+
# qhasm: in11 = z11
|
|
3154
|
+
# asm 1: movd <z11=int6464#7,>in11=int64#6
|
|
3155
|
+
# asm 2: movd <z11=%xmm6,>in11=%r9
|
|
3156
|
+
movd %xmm6,%r9
|
|
3157
|
+
|
|
3158
|
+
# qhasm: z8 <<<= 96
|
|
3159
|
+
# asm 1: pshufd $0x39,<z8=int6464#16,<z8=int6464#16
|
|
3160
|
+
# asm 2: pshufd $0x39,<z8=%xmm15,<z8=%xmm15
|
|
3161
|
+
pshufd $0x39,%xmm15,%xmm15
|
|
3162
|
+
|
|
3163
|
+
# qhasm: z9 <<<= 96
|
|
3164
|
+
# asm 1: pshufd $0x39,<z9=int6464#12,<z9=int6464#12
|
|
3165
|
+
# asm 2: pshufd $0x39,<z9=%xmm11,<z9=%xmm11
|
|
3166
|
+
pshufd $0x39,%xmm11,%xmm11
|
|
3167
|
+
|
|
3168
|
+
# qhasm: z10 <<<= 96
|
|
3169
|
+
# asm 1: pshufd $0x39,<z10=int6464#2,<z10=int6464#2
|
|
3170
|
+
# asm 2: pshufd $0x39,<z10=%xmm1,<z10=%xmm1
|
|
3171
|
+
pshufd $0x39,%xmm1,%xmm1
|
|
3172
|
+
|
|
3173
|
+
# qhasm: z11 <<<= 96
|
|
3174
|
+
# asm 1: pshufd $0x39,<z11=int6464#7,<z11=int6464#7
|
|
3175
|
+
# asm 2: pshufd $0x39,<z11=%xmm6,<z11=%xmm6
|
|
3176
|
+
pshufd $0x39,%xmm6,%xmm6
|
|
3177
|
+
|
|
3178
|
+
# qhasm: (uint32) in8 ^= *(uint32 *) (m + 160)
|
|
3179
|
+
# asm 1: xorl 160(<m=int64#2),<in8=int64#3d
|
|
3180
|
+
# asm 2: xorl 160(<m=%rsi),<in8=%edx
|
|
3181
|
+
xorl 160(%rsi),%edx
|
|
3182
|
+
|
|
3183
|
+
# qhasm: (uint32) in9 ^= *(uint32 *) (m + 164)
|
|
3184
|
+
# asm 1: xorl 164(<m=int64#2),<in9=int64#4d
|
|
3185
|
+
# asm 2: xorl 164(<m=%rsi),<in9=%ecx
|
|
3186
|
+
xorl 164(%rsi),%ecx
|
|
3187
|
+
|
|
3188
|
+
# qhasm: (uint32) in10 ^= *(uint32 *) (m + 168)
|
|
3189
|
+
# asm 1: xorl 168(<m=int64#2),<in10=int64#5d
|
|
3190
|
+
# asm 2: xorl 168(<m=%rsi),<in10=%r8d
|
|
3191
|
+
xorl 168(%rsi),%r8d
|
|
3192
|
+
|
|
3193
|
+
# qhasm: (uint32) in11 ^= *(uint32 *) (m + 172)
|
|
3194
|
+
# asm 1: xorl 172(<m=int64#2),<in11=int64#6d
|
|
3195
|
+
# asm 2: xorl 172(<m=%rsi),<in11=%r9d
|
|
3196
|
+
xorl 172(%rsi),%r9d
|
|
3197
|
+
|
|
3198
|
+
# qhasm: *(uint32 *) (out + 160) = in8
|
|
3199
|
+
# asm 1: movl <in8=int64#3d,160(<out=int64#1)
|
|
3200
|
+
# asm 2: movl <in8=%edx,160(<out=%rdi)
|
|
3201
|
+
movl %edx,160(%rdi)
|
|
3202
|
+
|
|
3203
|
+
# qhasm: *(uint32 *) (out + 164) = in9
|
|
3204
|
+
# asm 1: movl <in9=int64#4d,164(<out=int64#1)
|
|
3205
|
+
# asm 2: movl <in9=%ecx,164(<out=%rdi)
|
|
3206
|
+
movl %ecx,164(%rdi)
|
|
3207
|
+
|
|
3208
|
+
# qhasm: *(uint32 *) (out + 168) = in10
|
|
3209
|
+
# asm 1: movl <in10=int64#5d,168(<out=int64#1)
|
|
3210
|
+
# asm 2: movl <in10=%r8d,168(<out=%rdi)
|
|
3211
|
+
movl %r8d,168(%rdi)
|
|
3212
|
+
|
|
3213
|
+
# qhasm: *(uint32 *) (out + 172) = in11
|
|
3214
|
+
# asm 1: movl <in11=int64#6d,172(<out=int64#1)
|
|
3215
|
+
# asm 2: movl <in11=%r9d,172(<out=%rdi)
|
|
3216
|
+
movl %r9d,172(%rdi)
|
|
3217
|
+
|
|
3218
|
+
# qhasm: in8 = z8
|
|
3219
|
+
# asm 1: movd <z8=int6464#16,>in8=int64#3
|
|
3220
|
+
# asm 2: movd <z8=%xmm15,>in8=%rdx
|
|
3221
|
+
movd %xmm15,%rdx
|
|
3222
|
+
|
|
3223
|
+
# qhasm: in9 = z9
|
|
3224
|
+
# asm 1: movd <z9=int6464#12,>in9=int64#4
|
|
3225
|
+
# asm 2: movd <z9=%xmm11,>in9=%rcx
|
|
3226
|
+
movd %xmm11,%rcx
|
|
3227
|
+
|
|
3228
|
+
# qhasm: in10 = z10
|
|
3229
|
+
# asm 1: movd <z10=int6464#2,>in10=int64#5
|
|
3230
|
+
# asm 2: movd <z10=%xmm1,>in10=%r8
|
|
3231
|
+
movd %xmm1,%r8
|
|
3232
|
+
|
|
3233
|
+
# qhasm: in11 = z11
|
|
3234
|
+
# asm 1: movd <z11=int6464#7,>in11=int64#6
|
|
3235
|
+
# asm 2: movd <z11=%xmm6,>in11=%r9
|
|
3236
|
+
movd %xmm6,%r9
|
|
3237
|
+
|
|
3238
|
+
# qhasm: (uint32) in8 ^= *(uint32 *) (m + 224)
|
|
3239
|
+
# asm 1: xorl 224(<m=int64#2),<in8=int64#3d
|
|
3240
|
+
# asm 2: xorl 224(<m=%rsi),<in8=%edx
|
|
3241
|
+
xorl 224(%rsi),%edx
|
|
3242
|
+
|
|
3243
|
+
# qhasm: (uint32) in9 ^= *(uint32 *) (m + 228)
|
|
3244
|
+
# asm 1: xorl 228(<m=int64#2),<in9=int64#4d
|
|
3245
|
+
# asm 2: xorl 228(<m=%rsi),<in9=%ecx
|
|
3246
|
+
xorl 228(%rsi),%ecx
|
|
3247
|
+
|
|
3248
|
+
# qhasm: (uint32) in10 ^= *(uint32 *) (m + 232)
|
|
3249
|
+
# asm 1: xorl 232(<m=int64#2),<in10=int64#5d
|
|
3250
|
+
# asm 2: xorl 232(<m=%rsi),<in10=%r8d
|
|
3251
|
+
xorl 232(%rsi),%r8d
|
|
3252
|
+
|
|
3253
|
+
# qhasm: (uint32) in11 ^= *(uint32 *) (m + 236)
|
|
3254
|
+
# asm 1: xorl 236(<m=int64#2),<in11=int64#6d
|
|
3255
|
+
# asm 2: xorl 236(<m=%rsi),<in11=%r9d
|
|
3256
|
+
xorl 236(%rsi),%r9d
|
|
3257
|
+
|
|
3258
|
+
# qhasm: *(uint32 *) (out + 224) = in8
|
|
3259
|
+
# asm 1: movl <in8=int64#3d,224(<out=int64#1)
|
|
3260
|
+
# asm 2: movl <in8=%edx,224(<out=%rdi)
|
|
3261
|
+
movl %edx,224(%rdi)
|
|
3262
|
+
|
|
3263
|
+
# qhasm: *(uint32 *) (out + 228) = in9
|
|
3264
|
+
# asm 1: movl <in9=int64#4d,228(<out=int64#1)
|
|
3265
|
+
# asm 2: movl <in9=%ecx,228(<out=%rdi)
|
|
3266
|
+
movl %ecx,228(%rdi)
|
|
3267
|
+
|
|
3268
|
+
# qhasm: *(uint32 *) (out + 232) = in10
|
|
3269
|
+
# asm 1: movl <in10=int64#5d,232(<out=int64#1)
|
|
3270
|
+
# asm 2: movl <in10=%r8d,232(<out=%rdi)
|
|
3271
|
+
movl %r8d,232(%rdi)
|
|
3272
|
+
|
|
3273
|
+
# qhasm: *(uint32 *) (out + 236) = in11
|
|
3274
|
+
# asm 1: movl <in11=int64#6d,236(<out=int64#1)
|
|
3275
|
+
# asm 2: movl <in11=%r9d,236(<out=%rdi)
|
|
3276
|
+
movl %r9d,236(%rdi)
|
|
3277
|
+
|
|
3278
|
+
# qhasm: uint32323232 z12 += orig12
|
|
3279
|
+
# asm 1: paddd <orig12=stack128#11,<z12=int6464#14
|
|
3280
|
+
# asm 2: paddd <orig12=160(%rsp),<z12=%xmm13
|
|
3281
|
+
paddd 160(%rsp),%xmm13
|
|
3282
|
+
|
|
3283
|
+
# qhasm: uint32323232 z13 += orig13
|
|
3284
|
+
# asm 1: paddd <orig13=stack128#14,<z13=int6464#10
|
|
3285
|
+
# asm 2: paddd <orig13=208(%rsp),<z13=%xmm9
|
|
3286
|
+
paddd 208(%rsp),%xmm9
|
|
3287
|
+
|
|
3288
|
+
# qhasm: uint32323232 z14 += orig14
|
|
3289
|
+
# asm 1: paddd <orig14=stack128#17,<z14=int6464#4
|
|
3290
|
+
# asm 2: paddd <orig14=256(%rsp),<z14=%xmm3
|
|
3291
|
+
paddd 256(%rsp),%xmm3
|
|
3292
|
+
|
|
3293
|
+
# qhasm: uint32323232 z15 += orig15
|
|
3294
|
+
# asm 1: paddd <orig15=stack128#7,<z15=int6464#3
|
|
3295
|
+
# asm 2: paddd <orig15=96(%rsp),<z15=%xmm2
|
|
3296
|
+
paddd 96(%rsp),%xmm2
|
|
3297
|
+
|
|
3298
|
+
# qhasm: in12 = z12
|
|
3299
|
+
# asm 1: movd <z12=int6464#14,>in12=int64#3
|
|
3300
|
+
# asm 2: movd <z12=%xmm13,>in12=%rdx
|
|
3301
|
+
movd %xmm13,%rdx
|
|
3302
|
+
|
|
3303
|
+
# qhasm: in13 = z13
|
|
3304
|
+
# asm 1: movd <z13=int6464#10,>in13=int64#4
|
|
3305
|
+
# asm 2: movd <z13=%xmm9,>in13=%rcx
|
|
3306
|
+
movd %xmm9,%rcx
|
|
3307
|
+
|
|
3308
|
+
# qhasm: in14 = z14
|
|
3309
|
+
# asm 1: movd <z14=int6464#4,>in14=int64#5
|
|
3310
|
+
# asm 2: movd <z14=%xmm3,>in14=%r8
|
|
3311
|
+
movd %xmm3,%r8
|
|
3312
|
+
|
|
3313
|
+
# qhasm: in15 = z15
|
|
3314
|
+
# asm 1: movd <z15=int6464#3,>in15=int64#6
|
|
3315
|
+
# asm 2: movd <z15=%xmm2,>in15=%r9
|
|
3316
|
+
movd %xmm2,%r9
|
|
3317
|
+
|
|
3318
|
+
# qhasm: z12 <<<= 96
|
|
3319
|
+
# asm 1: pshufd $0x39,<z12=int6464#14,<z12=int6464#14
|
|
3320
|
+
# asm 2: pshufd $0x39,<z12=%xmm13,<z12=%xmm13
|
|
3321
|
+
pshufd $0x39,%xmm13,%xmm13
|
|
3322
|
+
|
|
3323
|
+
# qhasm: z13 <<<= 96
|
|
3324
|
+
# asm 1: pshufd $0x39,<z13=int6464#10,<z13=int6464#10
|
|
3325
|
+
# asm 2: pshufd $0x39,<z13=%xmm9,<z13=%xmm9
|
|
3326
|
+
pshufd $0x39,%xmm9,%xmm9
|
|
3327
|
+
|
|
3328
|
+
# qhasm: z14 <<<= 96
|
|
3329
|
+
# asm 1: pshufd $0x39,<z14=int6464#4,<z14=int6464#4
|
|
3330
|
+
# asm 2: pshufd $0x39,<z14=%xmm3,<z14=%xmm3
|
|
3331
|
+
pshufd $0x39,%xmm3,%xmm3
|
|
3332
|
+
|
|
3333
|
+
# qhasm: z15 <<<= 96
|
|
3334
|
+
# asm 1: pshufd $0x39,<z15=int6464#3,<z15=int6464#3
|
|
3335
|
+
# asm 2: pshufd $0x39,<z15=%xmm2,<z15=%xmm2
|
|
3336
|
+
pshufd $0x39,%xmm2,%xmm2
|
|
3337
|
+
|
|
3338
|
+
# qhasm: (uint32) in12 ^= *(uint32 *) (m + 48)
|
|
3339
|
+
# asm 1: xorl 48(<m=int64#2),<in12=int64#3d
|
|
3340
|
+
# asm 2: xorl 48(<m=%rsi),<in12=%edx
|
|
3341
|
+
xorl 48(%rsi),%edx
|
|
3342
|
+
|
|
3343
|
+
# qhasm: (uint32) in13 ^= *(uint32 *) (m + 52)
|
|
3344
|
+
# asm 1: xorl 52(<m=int64#2),<in13=int64#4d
|
|
3345
|
+
# asm 2: xorl 52(<m=%rsi),<in13=%ecx
|
|
3346
|
+
xorl 52(%rsi),%ecx
|
|
3347
|
+
|
|
3348
|
+
# qhasm: (uint32) in14 ^= *(uint32 *) (m + 56)
|
|
3349
|
+
# asm 1: xorl 56(<m=int64#2),<in14=int64#5d
|
|
3350
|
+
# asm 2: xorl 56(<m=%rsi),<in14=%r8d
|
|
3351
|
+
xorl 56(%rsi),%r8d
|
|
3352
|
+
|
|
3353
|
+
# qhasm: (uint32) in15 ^= *(uint32 *) (m + 60)
|
|
3354
|
+
# asm 1: xorl 60(<m=int64#2),<in15=int64#6d
|
|
3355
|
+
# asm 2: xorl 60(<m=%rsi),<in15=%r9d
|
|
3356
|
+
xorl 60(%rsi),%r9d
|
|
3357
|
+
|
|
3358
|
+
# qhasm: *(uint32 *) (out + 48) = in12
|
|
3359
|
+
# asm 1: movl <in12=int64#3d,48(<out=int64#1)
|
|
3360
|
+
# asm 2: movl <in12=%edx,48(<out=%rdi)
|
|
3361
|
+
movl %edx,48(%rdi)
|
|
3362
|
+
|
|
3363
|
+
# qhasm: *(uint32 *) (out + 52) = in13
|
|
3364
|
+
# asm 1: movl <in13=int64#4d,52(<out=int64#1)
|
|
3365
|
+
# asm 2: movl <in13=%ecx,52(<out=%rdi)
|
|
3366
|
+
movl %ecx,52(%rdi)
|
|
3367
|
+
|
|
3368
|
+
# qhasm: *(uint32 *) (out + 56) = in14
|
|
3369
|
+
# asm 1: movl <in14=int64#5d,56(<out=int64#1)
|
|
3370
|
+
# asm 2: movl <in14=%r8d,56(<out=%rdi)
|
|
3371
|
+
movl %r8d,56(%rdi)
|
|
3372
|
+
|
|
3373
|
+
# qhasm: *(uint32 *) (out + 60) = in15
|
|
3374
|
+
# asm 1: movl <in15=int64#6d,60(<out=int64#1)
|
|
3375
|
+
# asm 2: movl <in15=%r9d,60(<out=%rdi)
|
|
3376
|
+
movl %r9d,60(%rdi)
|
|
3377
|
+
|
|
3378
|
+
# qhasm: in12 = z12
|
|
3379
|
+
# asm 1: movd <z12=int6464#14,>in12=int64#3
|
|
3380
|
+
# asm 2: movd <z12=%xmm13,>in12=%rdx
|
|
3381
|
+
movd %xmm13,%rdx
|
|
3382
|
+
|
|
3383
|
+
# qhasm: in13 = z13
|
|
3384
|
+
# asm 1: movd <z13=int6464#10,>in13=int64#4
|
|
3385
|
+
# asm 2: movd <z13=%xmm9,>in13=%rcx
|
|
3386
|
+
movd %xmm9,%rcx
|
|
3387
|
+
|
|
3388
|
+
# qhasm: in14 = z14
|
|
3389
|
+
# asm 1: movd <z14=int6464#4,>in14=int64#5
|
|
3390
|
+
# asm 2: movd <z14=%xmm3,>in14=%r8
|
|
3391
|
+
movd %xmm3,%r8
|
|
3392
|
+
|
|
3393
|
+
# qhasm: in15 = z15
|
|
3394
|
+
# asm 1: movd <z15=int6464#3,>in15=int64#6
|
|
3395
|
+
# asm 2: movd <z15=%xmm2,>in15=%r9
|
|
3396
|
+
movd %xmm2,%r9
|
|
3397
|
+
|
|
3398
|
+
# qhasm: z12 <<<= 96
|
|
3399
|
+
# asm 1: pshufd $0x39,<z12=int6464#14,<z12=int6464#14
|
|
3400
|
+
# asm 2: pshufd $0x39,<z12=%xmm13,<z12=%xmm13
|
|
3401
|
+
pshufd $0x39,%xmm13,%xmm13
|
|
3402
|
+
|
|
3403
|
+
# qhasm: z13 <<<= 96
|
|
3404
|
+
# asm 1: pshufd $0x39,<z13=int6464#10,<z13=int6464#10
|
|
3405
|
+
# asm 2: pshufd $0x39,<z13=%xmm9,<z13=%xmm9
|
|
3406
|
+
pshufd $0x39,%xmm9,%xmm9
|
|
3407
|
+
|
|
3408
|
+
# qhasm: z14 <<<= 96
|
|
3409
|
+
# asm 1: pshufd $0x39,<z14=int6464#4,<z14=int6464#4
|
|
3410
|
+
# asm 2: pshufd $0x39,<z14=%xmm3,<z14=%xmm3
|
|
3411
|
+
pshufd $0x39,%xmm3,%xmm3
|
|
3412
|
+
|
|
3413
|
+
# qhasm: z15 <<<= 96
|
|
3414
|
+
# asm 1: pshufd $0x39,<z15=int6464#3,<z15=int6464#3
|
|
3415
|
+
# asm 2: pshufd $0x39,<z15=%xmm2,<z15=%xmm2
|
|
3416
|
+
pshufd $0x39,%xmm2,%xmm2
|
|
3417
|
+
|
|
3418
|
+
# qhasm: (uint32) in12 ^= *(uint32 *) (m + 112)
|
|
3419
|
+
# asm 1: xorl 112(<m=int64#2),<in12=int64#3d
|
|
3420
|
+
# asm 2: xorl 112(<m=%rsi),<in12=%edx
|
|
3421
|
+
xorl 112(%rsi),%edx
|
|
3422
|
+
|
|
3423
|
+
# qhasm: (uint32) in13 ^= *(uint32 *) (m + 116)
|
|
3424
|
+
# asm 1: xorl 116(<m=int64#2),<in13=int64#4d
|
|
3425
|
+
# asm 2: xorl 116(<m=%rsi),<in13=%ecx
|
|
3426
|
+
xorl 116(%rsi),%ecx
|
|
3427
|
+
|
|
3428
|
+
# qhasm: (uint32) in14 ^= *(uint32 *) (m + 120)
|
|
3429
|
+
# asm 1: xorl 120(<m=int64#2),<in14=int64#5d
|
|
3430
|
+
# asm 2: xorl 120(<m=%rsi),<in14=%r8d
|
|
3431
|
+
xorl 120(%rsi),%r8d
|
|
3432
|
+
|
|
3433
|
+
# qhasm: (uint32) in15 ^= *(uint32 *) (m + 124)
|
|
3434
|
+
# asm 1: xorl 124(<m=int64#2),<in15=int64#6d
|
|
3435
|
+
# asm 2: xorl 124(<m=%rsi),<in15=%r9d
|
|
3436
|
+
xorl 124(%rsi),%r9d
|
|
3437
|
+
|
|
3438
|
+
# qhasm: *(uint32 *) (out + 112) = in12
|
|
3439
|
+
# asm 1: movl <in12=int64#3d,112(<out=int64#1)
|
|
3440
|
+
# asm 2: movl <in12=%edx,112(<out=%rdi)
|
|
3441
|
+
movl %edx,112(%rdi)
|
|
3442
|
+
|
|
3443
|
+
# qhasm: *(uint32 *) (out + 116) = in13
|
|
3444
|
+
# asm 1: movl <in13=int64#4d,116(<out=int64#1)
|
|
3445
|
+
# asm 2: movl <in13=%ecx,116(<out=%rdi)
|
|
3446
|
+
movl %ecx,116(%rdi)
|
|
3447
|
+
|
|
3448
|
+
# qhasm: *(uint32 *) (out + 120) = in14
|
|
3449
|
+
# asm 1: movl <in14=int64#5d,120(<out=int64#1)
|
|
3450
|
+
# asm 2: movl <in14=%r8d,120(<out=%rdi)
|
|
3451
|
+
movl %r8d,120(%rdi)
|
|
3452
|
+
|
|
3453
|
+
# qhasm: *(uint32 *) (out + 124) = in15
|
|
3454
|
+
# asm 1: movl <in15=int64#6d,124(<out=int64#1)
|
|
3455
|
+
# asm 2: movl <in15=%r9d,124(<out=%rdi)
|
|
3456
|
+
movl %r9d,124(%rdi)
|
|
3457
|
+
|
|
3458
|
+
# qhasm: in12 = z12
|
|
3459
|
+
# asm 1: movd <z12=int6464#14,>in12=int64#3
|
|
3460
|
+
# asm 2: movd <z12=%xmm13,>in12=%rdx
|
|
3461
|
+
movd %xmm13,%rdx
|
|
3462
|
+
|
|
3463
|
+
# qhasm: in13 = z13
|
|
3464
|
+
# asm 1: movd <z13=int6464#10,>in13=int64#4
|
|
3465
|
+
# asm 2: movd <z13=%xmm9,>in13=%rcx
|
|
3466
|
+
movd %xmm9,%rcx
|
|
3467
|
+
|
|
3468
|
+
# qhasm: in14 = z14
|
|
3469
|
+
# asm 1: movd <z14=int6464#4,>in14=int64#5
|
|
3470
|
+
# asm 2: movd <z14=%xmm3,>in14=%r8
|
|
3471
|
+
movd %xmm3,%r8
|
|
3472
|
+
|
|
3473
|
+
# qhasm: in15 = z15
|
|
3474
|
+
# asm 1: movd <z15=int6464#3,>in15=int64#6
|
|
3475
|
+
# asm 2: movd <z15=%xmm2,>in15=%r9
|
|
3476
|
+
movd %xmm2,%r9
|
|
3477
|
+
|
|
3478
|
+
# qhasm: z12 <<<= 96
|
|
3479
|
+
# asm 1: pshufd $0x39,<z12=int6464#14,<z12=int6464#14
|
|
3480
|
+
# asm 2: pshufd $0x39,<z12=%xmm13,<z12=%xmm13
|
|
3481
|
+
pshufd $0x39,%xmm13,%xmm13
|
|
3482
|
+
|
|
3483
|
+
# qhasm: z13 <<<= 96
|
|
3484
|
+
# asm 1: pshufd $0x39,<z13=int6464#10,<z13=int6464#10
|
|
3485
|
+
# asm 2: pshufd $0x39,<z13=%xmm9,<z13=%xmm9
|
|
3486
|
+
pshufd $0x39,%xmm9,%xmm9
|
|
3487
|
+
|
|
3488
|
+
# qhasm: z14 <<<= 96
|
|
3489
|
+
# asm 1: pshufd $0x39,<z14=int6464#4,<z14=int6464#4
|
|
3490
|
+
# asm 2: pshufd $0x39,<z14=%xmm3,<z14=%xmm3
|
|
3491
|
+
pshufd $0x39,%xmm3,%xmm3
|
|
3492
|
+
|
|
3493
|
+
# qhasm: z15 <<<= 96
|
|
3494
|
+
# asm 1: pshufd $0x39,<z15=int6464#3,<z15=int6464#3
|
|
3495
|
+
# asm 2: pshufd $0x39,<z15=%xmm2,<z15=%xmm2
|
|
3496
|
+
pshufd $0x39,%xmm2,%xmm2
|
|
3497
|
+
|
|
3498
|
+
# qhasm: (uint32) in12 ^= *(uint32 *) (m + 176)
|
|
3499
|
+
# asm 1: xorl 176(<m=int64#2),<in12=int64#3d
|
|
3500
|
+
# asm 2: xorl 176(<m=%rsi),<in12=%edx
|
|
3501
|
+
xorl 176(%rsi),%edx
|
|
3502
|
+
|
|
3503
|
+
# qhasm: (uint32) in13 ^= *(uint32 *) (m + 180)
|
|
3504
|
+
# asm 1: xorl 180(<m=int64#2),<in13=int64#4d
|
|
3505
|
+
# asm 2: xorl 180(<m=%rsi),<in13=%ecx
|
|
3506
|
+
xorl 180(%rsi),%ecx
|
|
3507
|
+
|
|
3508
|
+
# qhasm: (uint32) in14 ^= *(uint32 *) (m + 184)
|
|
3509
|
+
# asm 1: xorl 184(<m=int64#2),<in14=int64#5d
|
|
3510
|
+
# asm 2: xorl 184(<m=%rsi),<in14=%r8d
|
|
3511
|
+
xorl 184(%rsi),%r8d
|
|
3512
|
+
|
|
3513
|
+
# qhasm: (uint32) in15 ^= *(uint32 *) (m + 188)
|
|
3514
|
+
# asm 1: xorl 188(<m=int64#2),<in15=int64#6d
|
|
3515
|
+
# asm 2: xorl 188(<m=%rsi),<in15=%r9d
|
|
3516
|
+
xorl 188(%rsi),%r9d
|
|
3517
|
+
|
|
3518
|
+
# qhasm: *(uint32 *) (out + 176) = in12
|
|
3519
|
+
# asm 1: movl <in12=int64#3d,176(<out=int64#1)
|
|
3520
|
+
# asm 2: movl <in12=%edx,176(<out=%rdi)
|
|
3521
|
+
movl %edx,176(%rdi)
|
|
3522
|
+
|
|
3523
|
+
# qhasm: *(uint32 *) (out + 180) = in13
|
|
3524
|
+
# asm 1: movl <in13=int64#4d,180(<out=int64#1)
|
|
3525
|
+
# asm 2: movl <in13=%ecx,180(<out=%rdi)
|
|
3526
|
+
movl %ecx,180(%rdi)
|
|
3527
|
+
|
|
3528
|
+
# qhasm: *(uint32 *) (out + 184) = in14
|
|
3529
|
+
# asm 1: movl <in14=int64#5d,184(<out=int64#1)
|
|
3530
|
+
# asm 2: movl <in14=%r8d,184(<out=%rdi)
|
|
3531
|
+
movl %r8d,184(%rdi)
|
|
3532
|
+
|
|
3533
|
+
# qhasm: *(uint32 *) (out + 188) = in15
|
|
3534
|
+
# asm 1: movl <in15=int64#6d,188(<out=int64#1)
|
|
3535
|
+
# asm 2: movl <in15=%r9d,188(<out=%rdi)
|
|
3536
|
+
movl %r9d,188(%rdi)
|
|
3537
|
+
|
|
3538
|
+
# qhasm: in12 = z12
|
|
3539
|
+
# asm 1: movd <z12=int6464#14,>in12=int64#3
|
|
3540
|
+
# asm 2: movd <z12=%xmm13,>in12=%rdx
|
|
3541
|
+
movd %xmm13,%rdx
|
|
3542
|
+
|
|
3543
|
+
# qhasm: in13 = z13
|
|
3544
|
+
# asm 1: movd <z13=int6464#10,>in13=int64#4
|
|
3545
|
+
# asm 2: movd <z13=%xmm9,>in13=%rcx
|
|
3546
|
+
movd %xmm9,%rcx
|
|
3547
|
+
|
|
3548
|
+
# qhasm: in14 = z14
|
|
3549
|
+
# asm 1: movd <z14=int6464#4,>in14=int64#5
|
|
3550
|
+
# asm 2: movd <z14=%xmm3,>in14=%r8
|
|
3551
|
+
movd %xmm3,%r8
|
|
3552
|
+
|
|
3553
|
+
# qhasm: in15 = z15
|
|
3554
|
+
# asm 1: movd <z15=int6464#3,>in15=int64#6
|
|
3555
|
+
# asm 2: movd <z15=%xmm2,>in15=%r9
|
|
3556
|
+
movd %xmm2,%r9
|
|
3557
|
+
|
|
3558
|
+
# qhasm: (uint32) in12 ^= *(uint32 *) (m + 240)
|
|
3559
|
+
# asm 1: xorl 240(<m=int64#2),<in12=int64#3d
|
|
3560
|
+
# asm 2: xorl 240(<m=%rsi),<in12=%edx
|
|
3561
|
+
xorl 240(%rsi),%edx
|
|
3562
|
+
|
|
3563
|
+
# qhasm: (uint32) in13 ^= *(uint32 *) (m + 244)
|
|
3564
|
+
# asm 1: xorl 244(<m=int64#2),<in13=int64#4d
|
|
3565
|
+
# asm 2: xorl 244(<m=%rsi),<in13=%ecx
|
|
3566
|
+
xorl 244(%rsi),%ecx
|
|
3567
|
+
|
|
3568
|
+
# qhasm: (uint32) in14 ^= *(uint32 *) (m + 248)
|
|
3569
|
+
# asm 1: xorl 248(<m=int64#2),<in14=int64#5d
|
|
3570
|
+
# asm 2: xorl 248(<m=%rsi),<in14=%r8d
|
|
3571
|
+
xorl 248(%rsi),%r8d
|
|
3572
|
+
|
|
3573
|
+
# qhasm: (uint32) in15 ^= *(uint32 *) (m + 252)
|
|
3574
|
+
# asm 1: xorl 252(<m=int64#2),<in15=int64#6d
|
|
3575
|
+
# asm 2: xorl 252(<m=%rsi),<in15=%r9d
|
|
3576
|
+
xorl 252(%rsi),%r9d
|
|
3577
|
+
|
|
3578
|
+
# qhasm: *(uint32 *) (out + 240) = in12
|
|
3579
|
+
# asm 1: movl <in12=int64#3d,240(<out=int64#1)
|
|
3580
|
+
# asm 2: movl <in12=%edx,240(<out=%rdi)
|
|
3581
|
+
movl %edx,240(%rdi)
|
|
3582
|
+
|
|
3583
|
+
# qhasm: *(uint32 *) (out + 244) = in13
|
|
3584
|
+
# asm 1: movl <in13=int64#4d,244(<out=int64#1)
|
|
3585
|
+
# asm 2: movl <in13=%ecx,244(<out=%rdi)
|
|
3586
|
+
movl %ecx,244(%rdi)
|
|
3587
|
+
|
|
3588
|
+
# qhasm: *(uint32 *) (out + 248) = in14
|
|
3589
|
+
# asm 1: movl <in14=int64#5d,248(<out=int64#1)
|
|
3590
|
+
# asm 2: movl <in14=%r8d,248(<out=%rdi)
|
|
3591
|
+
movl %r8d,248(%rdi)
|
|
3592
|
+
|
|
3593
|
+
# qhasm: *(uint32 *) (out + 252) = in15
|
|
3594
|
+
# asm 1: movl <in15=int64#6d,252(<out=int64#1)
|
|
3595
|
+
# asm 2: movl <in15=%r9d,252(<out=%rdi)
|
|
3596
|
+
movl %r9d,252(%rdi)
|
|
3597
|
+
|
|
3598
|
+
# qhasm: bytes = bytes_backup
|
|
3599
|
+
# asm 1: movq <bytes_backup=stack64#8,>bytes=int64#6
|
|
3600
|
+
# asm 2: movq <bytes_backup=408(%rsp),>bytes=%r9
|
|
3601
|
+
movq 408(%rsp),%r9
|
|
3602
|
+
|
|
3603
|
+
# qhasm: bytes -= 256
|
|
3604
|
+
# asm 1: sub $256,<bytes=int64#6
|
|
3605
|
+
# asm 2: sub $256,<bytes=%r9
|
|
3606
|
+
sub $256,%r9
|
|
3607
|
+
|
|
3608
|
+
# qhasm: m += 256
|
|
3609
|
+
# asm 1: add $256,<m=int64#2
|
|
3610
|
+
# asm 2: add $256,<m=%rsi
|
|
3611
|
+
add $256,%rsi
|
|
3612
|
+
|
|
3613
|
+
# qhasm: out += 256
|
|
3614
|
+
# asm 1: add $256,<out=int64#1
|
|
3615
|
+
# asm 2: add $256,<out=%rdi
|
|
3616
|
+
add $256,%rdi
|
|
3617
|
+
|
|
3618
|
+
# qhasm: unsigned<? bytes - 256
|
|
3619
|
+
# asm 1: cmp $256,<bytes=int64#6
|
|
3620
|
+
# asm 2: cmp $256,<bytes=%r9
|
|
3621
|
+
cmp $256,%r9
|
|
3622
|
+
# comment:fp stack unchanged by jump
|
|
3623
|
+
|
|
3624
|
+
# qhasm: goto bytesatleast256 if !unsigned<
|
|
3625
|
+
jae ._bytesatleast256
|
|
3626
|
+
|
|
3627
|
+
# qhasm: unsigned>? bytes - 0
|
|
3628
|
+
# asm 1: cmp $0,<bytes=int64#6
|
|
3629
|
+
# asm 2: cmp $0,<bytes=%r9
|
|
3630
|
+
cmp $0,%r9
|
|
3631
|
+
# comment:fp stack unchanged by jump
|
|
3632
|
+
|
|
3633
|
+
# qhasm: goto done if !unsigned>
|
|
3634
|
+
jbe ._done
|
|
3635
|
+
# comment:fp stack unchanged by fallthrough
|
|
3636
|
+
|
|
3637
|
+
# qhasm: bytesbetween1and255:
|
|
3638
|
+
._bytesbetween1and255:
|
|
3639
|
+
|
|
3640
|
+
# qhasm: unsigned<? bytes - 64
|
|
3641
|
+
# asm 1: cmp $64,<bytes=int64#6
|
|
3642
|
+
# asm 2: cmp $64,<bytes=%r9
|
|
3643
|
+
cmp $64,%r9
|
|
3644
|
+
# comment:fp stack unchanged by jump
|
|
3645
|
+
|
|
3646
|
+
# qhasm: goto nocopy if !unsigned<
|
|
3647
|
+
jae ._nocopy
|
|
3648
|
+
|
|
3649
|
+
# qhasm: ctarget = out
|
|
3650
|
+
# asm 1: mov <out=int64#1,>ctarget=int64#3
|
|
3651
|
+
# asm 2: mov <out=%rdi,>ctarget=%rdx
|
|
3652
|
+
mov %rdi,%rdx
|
|
3653
|
+
|
|
3654
|
+
# qhasm: out = &tmp
|
|
3655
|
+
# asm 1: leaq <tmp=stack512#1,>out=int64#1
|
|
3656
|
+
# asm 2: leaq <tmp=416(%rsp),>out=%rdi
|
|
3657
|
+
leaq 416(%rsp),%rdi
|
|
3658
|
+
|
|
3659
|
+
# qhasm: i = bytes
|
|
3660
|
+
# asm 1: mov <bytes=int64#6,>i=int64#4
|
|
3661
|
+
# asm 2: mov <bytes=%r9,>i=%rcx
|
|
3662
|
+
mov %r9,%rcx
|
|
3663
|
+
|
|
3664
|
+
# qhasm: while (i) { *out++ = *m++; --i }
|
|
3665
|
+
rep movsb
|
|
3666
|
+
|
|
3667
|
+
# qhasm: out = &tmp
|
|
3668
|
+
# asm 1: leaq <tmp=stack512#1,>out=int64#1
|
|
3669
|
+
# asm 2: leaq <tmp=416(%rsp),>out=%rdi
|
|
3670
|
+
leaq 416(%rsp),%rdi
|
|
3671
|
+
|
|
3672
|
+
# qhasm: m = &tmp
|
|
3673
|
+
# asm 1: leaq <tmp=stack512#1,>m=int64#2
|
|
3674
|
+
# asm 2: leaq <tmp=416(%rsp),>m=%rsi
|
|
3675
|
+
leaq 416(%rsp),%rsi
|
|
3676
|
+
# comment:fp stack unchanged by fallthrough
|
|
3677
|
+
|
|
3678
|
+
# qhasm: nocopy:
|
|
3679
|
+
._nocopy:
|
|
3680
|
+
|
|
3681
|
+
# qhasm: bytes_backup = bytes
|
|
3682
|
+
# asm 1: movq <bytes=int64#6,>bytes_backup=stack64#8
|
|
3683
|
+
# asm 2: movq <bytes=%r9,>bytes_backup=408(%rsp)
|
|
3684
|
+
movq %r9,408(%rsp)
|
|
3685
|
+
|
|
3686
|
+
# qhasm: diag0 = x0
|
|
3687
|
+
# asm 1: movdqa <x0=stack128#4,>diag0=int6464#1
|
|
3688
|
+
# asm 2: movdqa <x0=48(%rsp),>diag0=%xmm0
|
|
3689
|
+
movdqa 48(%rsp),%xmm0
|
|
3690
|
+
|
|
3691
|
+
# qhasm: diag1 = x1
|
|
3692
|
+
# asm 1: movdqa <x1=stack128#1,>diag1=int6464#2
|
|
3693
|
+
# asm 2: movdqa <x1=0(%rsp),>diag1=%xmm1
|
|
3694
|
+
movdqa 0(%rsp),%xmm1
|
|
3695
|
+
|
|
3696
|
+
# qhasm: diag2 = x2
|
|
3697
|
+
# asm 1: movdqa <x2=stack128#2,>diag2=int6464#3
|
|
3698
|
+
# asm 2: movdqa <x2=16(%rsp),>diag2=%xmm2
|
|
3699
|
+
movdqa 16(%rsp),%xmm2
|
|
3700
|
+
|
|
3701
|
+
# qhasm: diag3 = x3
|
|
3702
|
+
# asm 1: movdqa <x3=stack128#3,>diag3=int6464#4
|
|
3703
|
+
# asm 2: movdqa <x3=32(%rsp),>diag3=%xmm3
|
|
3704
|
+
movdqa 32(%rsp),%xmm3
|
|
3705
|
+
|
|
3706
|
+
# qhasm: a0 = diag1
|
|
3707
|
+
# asm 1: movdqa <diag1=int6464#2,>a0=int6464#5
|
|
3708
|
+
# asm 2: movdqa <diag1=%xmm1,>a0=%xmm4
|
|
3709
|
+
movdqa %xmm1,%xmm4
|
|
3710
|
+
|
|
3711
|
+
# qhasm: i = 12
|
|
3712
|
+
# asm 1: mov $12,>i=int64#4
|
|
3713
|
+
# asm 2: mov $12,>i=%rcx
|
|
3714
|
+
mov $12,%rcx
|
|
3715
|
+
|
|
3716
|
+
# qhasm: mainloop2:
|
|
3717
|
+
._mainloop2:
|
|
3718
|
+
|
|
3719
|
+
# qhasm: uint32323232 a0 += diag0
|
|
3720
|
+
# asm 1: paddd <diag0=int6464#1,<a0=int6464#5
|
|
3721
|
+
# asm 2: paddd <diag0=%xmm0,<a0=%xmm4
|
|
3722
|
+
paddd %xmm0,%xmm4
|
|
3723
|
+
|
|
3724
|
+
# qhasm: a1 = diag0
|
|
3725
|
+
# asm 1: movdqa <diag0=int6464#1,>a1=int6464#6
|
|
3726
|
+
# asm 2: movdqa <diag0=%xmm0,>a1=%xmm5
|
|
3727
|
+
movdqa %xmm0,%xmm5
|
|
3728
|
+
|
|
3729
|
+
# qhasm: b0 = a0
|
|
3730
|
+
# asm 1: movdqa <a0=int6464#5,>b0=int6464#7
|
|
3731
|
+
# asm 2: movdqa <a0=%xmm4,>b0=%xmm6
|
|
3732
|
+
movdqa %xmm4,%xmm6
|
|
3733
|
+
|
|
3734
|
+
# qhasm: uint32323232 a0 <<= 7
|
|
3735
|
+
# asm 1: pslld $7,<a0=int6464#5
|
|
3736
|
+
# asm 2: pslld $7,<a0=%xmm4
|
|
3737
|
+
pslld $7,%xmm4
|
|
3738
|
+
|
|
3739
|
+
# qhasm: uint32323232 b0 >>= 25
|
|
3740
|
+
# asm 1: psrld $25,<b0=int6464#7
|
|
3741
|
+
# asm 2: psrld $25,<b0=%xmm6
|
|
3742
|
+
psrld $25,%xmm6
|
|
3743
|
+
|
|
3744
|
+
# qhasm: diag3 ^= a0
|
|
3745
|
+
# asm 1: pxor <a0=int6464#5,<diag3=int6464#4
|
|
3746
|
+
# asm 2: pxor <a0=%xmm4,<diag3=%xmm3
|
|
3747
|
+
pxor %xmm4,%xmm3
|
|
3748
|
+
|
|
3749
|
+
# qhasm: diag3 ^= b0
|
|
3750
|
+
# asm 1: pxor <b0=int6464#7,<diag3=int6464#4
|
|
3751
|
+
# asm 2: pxor <b0=%xmm6,<diag3=%xmm3
|
|
3752
|
+
pxor %xmm6,%xmm3
|
|
3753
|
+
|
|
3754
|
+
# qhasm: uint32323232 a1 += diag3
|
|
3755
|
+
# asm 1: paddd <diag3=int6464#4,<a1=int6464#6
|
|
3756
|
+
# asm 2: paddd <diag3=%xmm3,<a1=%xmm5
|
|
3757
|
+
paddd %xmm3,%xmm5
|
|
3758
|
+
|
|
3759
|
+
# qhasm: a2 = diag3
|
|
3760
|
+
# asm 1: movdqa <diag3=int6464#4,>a2=int6464#5
|
|
3761
|
+
# asm 2: movdqa <diag3=%xmm3,>a2=%xmm4
|
|
3762
|
+
movdqa %xmm3,%xmm4
|
|
3763
|
+
|
|
3764
|
+
# qhasm: b1 = a1
|
|
3765
|
+
# asm 1: movdqa <a1=int6464#6,>b1=int6464#7
|
|
3766
|
+
# asm 2: movdqa <a1=%xmm5,>b1=%xmm6
|
|
3767
|
+
movdqa %xmm5,%xmm6
|
|
3768
|
+
|
|
3769
|
+
# qhasm: uint32323232 a1 <<= 9
|
|
3770
|
+
# asm 1: pslld $9,<a1=int6464#6
|
|
3771
|
+
# asm 2: pslld $9,<a1=%xmm5
|
|
3772
|
+
pslld $9,%xmm5
|
|
3773
|
+
|
|
3774
|
+
# qhasm: uint32323232 b1 >>= 23
|
|
3775
|
+
# asm 1: psrld $23,<b1=int6464#7
|
|
3776
|
+
# asm 2: psrld $23,<b1=%xmm6
|
|
3777
|
+
psrld $23,%xmm6
|
|
3778
|
+
|
|
3779
|
+
# qhasm: diag2 ^= a1
|
|
3780
|
+
# asm 1: pxor <a1=int6464#6,<diag2=int6464#3
|
|
3781
|
+
# asm 2: pxor <a1=%xmm5,<diag2=%xmm2
|
|
3782
|
+
pxor %xmm5,%xmm2
|
|
3783
|
+
|
|
3784
|
+
# qhasm: diag3 <<<= 32
|
|
3785
|
+
# asm 1: pshufd $0x93,<diag3=int6464#4,<diag3=int6464#4
|
|
3786
|
+
# asm 2: pshufd $0x93,<diag3=%xmm3,<diag3=%xmm3
|
|
3787
|
+
pshufd $0x93,%xmm3,%xmm3
|
|
3788
|
+
|
|
3789
|
+
# qhasm: diag2 ^= b1
|
|
3790
|
+
# asm 1: pxor <b1=int6464#7,<diag2=int6464#3
|
|
3791
|
+
# asm 2: pxor <b1=%xmm6,<diag2=%xmm2
|
|
3792
|
+
pxor %xmm6,%xmm2
|
|
3793
|
+
|
|
3794
|
+
# qhasm: uint32323232 a2 += diag2
|
|
3795
|
+
# asm 1: paddd <diag2=int6464#3,<a2=int6464#5
|
|
3796
|
+
# asm 2: paddd <diag2=%xmm2,<a2=%xmm4
|
|
3797
|
+
paddd %xmm2,%xmm4
|
|
3798
|
+
|
|
3799
|
+
# qhasm: a3 = diag2
|
|
3800
|
+
# asm 1: movdqa <diag2=int6464#3,>a3=int6464#6
|
|
3801
|
+
# asm 2: movdqa <diag2=%xmm2,>a3=%xmm5
|
|
3802
|
+
movdqa %xmm2,%xmm5
|
|
3803
|
+
|
|
3804
|
+
# qhasm: b2 = a2
|
|
3805
|
+
# asm 1: movdqa <a2=int6464#5,>b2=int6464#7
|
|
3806
|
+
# asm 2: movdqa <a2=%xmm4,>b2=%xmm6
|
|
3807
|
+
movdqa %xmm4,%xmm6
|
|
3808
|
+
|
|
3809
|
+
# qhasm: uint32323232 a2 <<= 13
|
|
3810
|
+
# asm 1: pslld $13,<a2=int6464#5
|
|
3811
|
+
# asm 2: pslld $13,<a2=%xmm4
|
|
3812
|
+
pslld $13,%xmm4
|
|
3813
|
+
|
|
3814
|
+
# qhasm: uint32323232 b2 >>= 19
|
|
3815
|
+
# asm 1: psrld $19,<b2=int6464#7
|
|
3816
|
+
# asm 2: psrld $19,<b2=%xmm6
|
|
3817
|
+
psrld $19,%xmm6
|
|
3818
|
+
|
|
3819
|
+
# qhasm: diag1 ^= a2
|
|
3820
|
+
# asm 1: pxor <a2=int6464#5,<diag1=int6464#2
|
|
3821
|
+
# asm 2: pxor <a2=%xmm4,<diag1=%xmm1
|
|
3822
|
+
pxor %xmm4,%xmm1
|
|
3823
|
+
|
|
3824
|
+
# qhasm: diag2 <<<= 64
|
|
3825
|
+
# asm 1: pshufd $0x4e,<diag2=int6464#3,<diag2=int6464#3
|
|
3826
|
+
# asm 2: pshufd $0x4e,<diag2=%xmm2,<diag2=%xmm2
|
|
3827
|
+
pshufd $0x4e,%xmm2,%xmm2
|
|
3828
|
+
|
|
3829
|
+
# qhasm: diag1 ^= b2
|
|
3830
|
+
# asm 1: pxor <b2=int6464#7,<diag1=int6464#2
|
|
3831
|
+
# asm 2: pxor <b2=%xmm6,<diag1=%xmm1
|
|
3832
|
+
pxor %xmm6,%xmm1
|
|
3833
|
+
|
|
3834
|
+
# qhasm: uint32323232 a3 += diag1
|
|
3835
|
+
# asm 1: paddd <diag1=int6464#2,<a3=int6464#6
|
|
3836
|
+
# asm 2: paddd <diag1=%xmm1,<a3=%xmm5
|
|
3837
|
+
paddd %xmm1,%xmm5
|
|
3838
|
+
|
|
3839
|
+
# qhasm: a4 = diag3
|
|
3840
|
+
# asm 1: movdqa <diag3=int6464#4,>a4=int6464#5
|
|
3841
|
+
# asm 2: movdqa <diag3=%xmm3,>a4=%xmm4
|
|
3842
|
+
movdqa %xmm3,%xmm4
|
|
3843
|
+
|
|
3844
|
+
# qhasm: b3 = a3
|
|
3845
|
+
# asm 1: movdqa <a3=int6464#6,>b3=int6464#7
|
|
3846
|
+
# asm 2: movdqa <a3=%xmm5,>b3=%xmm6
|
|
3847
|
+
movdqa %xmm5,%xmm6
|
|
3848
|
+
|
|
3849
|
+
# qhasm: uint32323232 a3 <<= 18
|
|
3850
|
+
# asm 1: pslld $18,<a3=int6464#6
|
|
3851
|
+
# asm 2: pslld $18,<a3=%xmm5
|
|
3852
|
+
pslld $18,%xmm5
|
|
3853
|
+
|
|
3854
|
+
# qhasm: uint32323232 b3 >>= 14
|
|
3855
|
+
# asm 1: psrld $14,<b3=int6464#7
|
|
3856
|
+
# asm 2: psrld $14,<b3=%xmm6
|
|
3857
|
+
psrld $14,%xmm6
|
|
3858
|
+
|
|
3859
|
+
# qhasm: diag0 ^= a3
|
|
3860
|
+
# asm 1: pxor <a3=int6464#6,<diag0=int6464#1
|
|
3861
|
+
# asm 2: pxor <a3=%xmm5,<diag0=%xmm0
|
|
3862
|
+
pxor %xmm5,%xmm0
|
|
3863
|
+
|
|
3864
|
+
# qhasm: diag1 <<<= 96
|
|
3865
|
+
# asm 1: pshufd $0x39,<diag1=int6464#2,<diag1=int6464#2
|
|
3866
|
+
# asm 2: pshufd $0x39,<diag1=%xmm1,<diag1=%xmm1
|
|
3867
|
+
pshufd $0x39,%xmm1,%xmm1
|
|
3868
|
+
|
|
3869
|
+
# qhasm: diag0 ^= b3
|
|
3870
|
+
# asm 1: pxor <b3=int6464#7,<diag0=int6464#1
|
|
3871
|
+
# asm 2: pxor <b3=%xmm6,<diag0=%xmm0
|
|
3872
|
+
pxor %xmm6,%xmm0
|
|
3873
|
+
|
|
3874
|
+
# qhasm: uint32323232 a4 += diag0
|
|
3875
|
+
# asm 1: paddd <diag0=int6464#1,<a4=int6464#5
|
|
3876
|
+
# asm 2: paddd <diag0=%xmm0,<a4=%xmm4
|
|
3877
|
+
paddd %xmm0,%xmm4
|
|
3878
|
+
|
|
3879
|
+
# qhasm: a5 = diag0
|
|
3880
|
+
# asm 1: movdqa <diag0=int6464#1,>a5=int6464#6
|
|
3881
|
+
# asm 2: movdqa <diag0=%xmm0,>a5=%xmm5
|
|
3882
|
+
movdqa %xmm0,%xmm5
|
|
3883
|
+
|
|
3884
|
+
# qhasm: b4 = a4
|
|
3885
|
+
# asm 1: movdqa <a4=int6464#5,>b4=int6464#7
|
|
3886
|
+
# asm 2: movdqa <a4=%xmm4,>b4=%xmm6
|
|
3887
|
+
movdqa %xmm4,%xmm6
|
|
3888
|
+
|
|
3889
|
+
# qhasm: uint32323232 a4 <<= 7
|
|
3890
|
+
# asm 1: pslld $7,<a4=int6464#5
|
|
3891
|
+
# asm 2: pslld $7,<a4=%xmm4
|
|
3892
|
+
pslld $7,%xmm4
|
|
3893
|
+
|
|
3894
|
+
# qhasm: uint32323232 b4 >>= 25
|
|
3895
|
+
# asm 1: psrld $25,<b4=int6464#7
|
|
3896
|
+
# asm 2: psrld $25,<b4=%xmm6
|
|
3897
|
+
psrld $25,%xmm6
|
|
3898
|
+
|
|
3899
|
+
# qhasm: diag1 ^= a4
|
|
3900
|
+
# asm 1: pxor <a4=int6464#5,<diag1=int6464#2
|
|
3901
|
+
# asm 2: pxor <a4=%xmm4,<diag1=%xmm1
|
|
3902
|
+
pxor %xmm4,%xmm1
|
|
3903
|
+
|
|
3904
|
+
# qhasm: diag1 ^= b4
|
|
3905
|
+
# asm 1: pxor <b4=int6464#7,<diag1=int6464#2
|
|
3906
|
+
# asm 2: pxor <b4=%xmm6,<diag1=%xmm1
|
|
3907
|
+
pxor %xmm6,%xmm1
|
|
3908
|
+
|
|
3909
|
+
# qhasm: uint32323232 a5 += diag1
|
|
3910
|
+
# asm 1: paddd <diag1=int6464#2,<a5=int6464#6
|
|
3911
|
+
# asm 2: paddd <diag1=%xmm1,<a5=%xmm5
|
|
3912
|
+
paddd %xmm1,%xmm5
|
|
3913
|
+
|
|
3914
|
+
# qhasm: a6 = diag1
|
|
3915
|
+
# asm 1: movdqa <diag1=int6464#2,>a6=int6464#5
|
|
3916
|
+
# asm 2: movdqa <diag1=%xmm1,>a6=%xmm4
|
|
3917
|
+
movdqa %xmm1,%xmm4
|
|
3918
|
+
|
|
3919
|
+
# qhasm: b5 = a5
|
|
3920
|
+
# asm 1: movdqa <a5=int6464#6,>b5=int6464#7
|
|
3921
|
+
# asm 2: movdqa <a5=%xmm5,>b5=%xmm6
|
|
3922
|
+
movdqa %xmm5,%xmm6
|
|
3923
|
+
|
|
3924
|
+
# qhasm: uint32323232 a5 <<= 9
|
|
3925
|
+
# asm 1: pslld $9,<a5=int6464#6
|
|
3926
|
+
# asm 2: pslld $9,<a5=%xmm5
|
|
3927
|
+
pslld $9,%xmm5
|
|
3928
|
+
|
|
3929
|
+
# qhasm: uint32323232 b5 >>= 23
|
|
3930
|
+
# asm 1: psrld $23,<b5=int6464#7
|
|
3931
|
+
# asm 2: psrld $23,<b5=%xmm6
|
|
3932
|
+
psrld $23,%xmm6
|
|
3933
|
+
|
|
3934
|
+
# qhasm: diag2 ^= a5
|
|
3935
|
+
# asm 1: pxor <a5=int6464#6,<diag2=int6464#3
|
|
3936
|
+
# asm 2: pxor <a5=%xmm5,<diag2=%xmm2
|
|
3937
|
+
pxor %xmm5,%xmm2
|
|
3938
|
+
|
|
3939
|
+
# qhasm: diag1 <<<= 32
|
|
3940
|
+
# asm 1: pshufd $0x93,<diag1=int6464#2,<diag1=int6464#2
|
|
3941
|
+
# asm 2: pshufd $0x93,<diag1=%xmm1,<diag1=%xmm1
|
|
3942
|
+
pshufd $0x93,%xmm1,%xmm1
|
|
3943
|
+
|
|
3944
|
+
# qhasm: diag2 ^= b5
|
|
3945
|
+
# asm 1: pxor <b5=int6464#7,<diag2=int6464#3
|
|
3946
|
+
# asm 2: pxor <b5=%xmm6,<diag2=%xmm2
|
|
3947
|
+
pxor %xmm6,%xmm2
|
|
3948
|
+
|
|
3949
|
+
# qhasm: uint32323232 a6 += diag2
|
|
3950
|
+
# asm 1: paddd <diag2=int6464#3,<a6=int6464#5
|
|
3951
|
+
# asm 2: paddd <diag2=%xmm2,<a6=%xmm4
|
|
3952
|
+
paddd %xmm2,%xmm4
|
|
3953
|
+
|
|
3954
|
+
# qhasm: a7 = diag2
|
|
3955
|
+
# asm 1: movdqa <diag2=int6464#3,>a7=int6464#6
|
|
3956
|
+
# asm 2: movdqa <diag2=%xmm2,>a7=%xmm5
|
|
3957
|
+
movdqa %xmm2,%xmm5
|
|
3958
|
+
|
|
3959
|
+
# qhasm: b6 = a6
|
|
3960
|
+
# asm 1: movdqa <a6=int6464#5,>b6=int6464#7
|
|
3961
|
+
# asm 2: movdqa <a6=%xmm4,>b6=%xmm6
|
|
3962
|
+
movdqa %xmm4,%xmm6
|
|
3963
|
+
|
|
3964
|
+
# qhasm: uint32323232 a6 <<= 13
|
|
3965
|
+
# asm 1: pslld $13,<a6=int6464#5
|
|
3966
|
+
# asm 2: pslld $13,<a6=%xmm4
|
|
3967
|
+
pslld $13,%xmm4
|
|
3968
|
+
|
|
3969
|
+
# qhasm: uint32323232 b6 >>= 19
|
|
3970
|
+
# asm 1: psrld $19,<b6=int6464#7
|
|
3971
|
+
# asm 2: psrld $19,<b6=%xmm6
|
|
3972
|
+
psrld $19,%xmm6
|
|
3973
|
+
|
|
3974
|
+
# qhasm: diag3 ^= a6
|
|
3975
|
+
# asm 1: pxor <a6=int6464#5,<diag3=int6464#4
|
|
3976
|
+
# asm 2: pxor <a6=%xmm4,<diag3=%xmm3
|
|
3977
|
+
pxor %xmm4,%xmm3
|
|
3978
|
+
|
|
3979
|
+
# qhasm: diag2 <<<= 64
|
|
3980
|
+
# asm 1: pshufd $0x4e,<diag2=int6464#3,<diag2=int6464#3
|
|
3981
|
+
# asm 2: pshufd $0x4e,<diag2=%xmm2,<diag2=%xmm2
|
|
3982
|
+
pshufd $0x4e,%xmm2,%xmm2
|
|
3983
|
+
|
|
3984
|
+
# qhasm: diag3 ^= b6
|
|
3985
|
+
# asm 1: pxor <b6=int6464#7,<diag3=int6464#4
|
|
3986
|
+
# asm 2: pxor <b6=%xmm6,<diag3=%xmm3
|
|
3987
|
+
pxor %xmm6,%xmm3
|
|
3988
|
+
|
|
3989
|
+
# qhasm: uint32323232 a7 += diag3
|
|
3990
|
+
# asm 1: paddd <diag3=int6464#4,<a7=int6464#6
|
|
3991
|
+
# asm 2: paddd <diag3=%xmm3,<a7=%xmm5
|
|
3992
|
+
paddd %xmm3,%xmm5
|
|
3993
|
+
|
|
3994
|
+
# qhasm: a0 = diag1
|
|
3995
|
+
# asm 1: movdqa <diag1=int6464#2,>a0=int6464#5
|
|
3996
|
+
# asm 2: movdqa <diag1=%xmm1,>a0=%xmm4
|
|
3997
|
+
movdqa %xmm1,%xmm4
|
|
3998
|
+
|
|
3999
|
+
# qhasm: b7 = a7
|
|
4000
|
+
# asm 1: movdqa <a7=int6464#6,>b7=int6464#7
|
|
4001
|
+
# asm 2: movdqa <a7=%xmm5,>b7=%xmm6
|
|
4002
|
+
movdqa %xmm5,%xmm6
|
|
4003
|
+
|
|
4004
|
+
# qhasm: uint32323232 a7 <<= 18
|
|
4005
|
+
# asm 1: pslld $18,<a7=int6464#6
|
|
4006
|
+
# asm 2: pslld $18,<a7=%xmm5
|
|
4007
|
+
pslld $18,%xmm5
|
|
4008
|
+
|
|
4009
|
+
# qhasm: uint32323232 b7 >>= 14
|
|
4010
|
+
# asm 1: psrld $14,<b7=int6464#7
|
|
4011
|
+
# asm 2: psrld $14,<b7=%xmm6
|
|
4012
|
+
psrld $14,%xmm6
|
|
4013
|
+
|
|
4014
|
+
# qhasm: diag0 ^= a7
|
|
4015
|
+
# asm 1: pxor <a7=int6464#6,<diag0=int6464#1
|
|
4016
|
+
# asm 2: pxor <a7=%xmm5,<diag0=%xmm0
|
|
4017
|
+
pxor %xmm5,%xmm0
|
|
4018
|
+
|
|
4019
|
+
# qhasm: diag3 <<<= 96
|
|
4020
|
+
# asm 1: pshufd $0x39,<diag3=int6464#4,<diag3=int6464#4
|
|
4021
|
+
# asm 2: pshufd $0x39,<diag3=%xmm3,<diag3=%xmm3
|
|
4022
|
+
pshufd $0x39,%xmm3,%xmm3
|
|
4023
|
+
|
|
4024
|
+
# qhasm: diag0 ^= b7
|
|
4025
|
+
# asm 1: pxor <b7=int6464#7,<diag0=int6464#1
|
|
4026
|
+
# asm 2: pxor <b7=%xmm6,<diag0=%xmm0
|
|
4027
|
+
pxor %xmm6,%xmm0
|
|
4028
|
+
|
|
4029
|
+
# qhasm: uint32323232 a0 += diag0
|
|
4030
|
+
# asm 1: paddd <diag0=int6464#1,<a0=int6464#5
|
|
4031
|
+
# asm 2: paddd <diag0=%xmm0,<a0=%xmm4
|
|
4032
|
+
paddd %xmm0,%xmm4
|
|
4033
|
+
|
|
4034
|
+
# qhasm: a1 = diag0
|
|
4035
|
+
# asm 1: movdqa <diag0=int6464#1,>a1=int6464#6
|
|
4036
|
+
# asm 2: movdqa <diag0=%xmm0,>a1=%xmm5
|
|
4037
|
+
movdqa %xmm0,%xmm5
|
|
4038
|
+
|
|
4039
|
+
# qhasm: b0 = a0
|
|
4040
|
+
# asm 1: movdqa <a0=int6464#5,>b0=int6464#7
|
|
4041
|
+
# asm 2: movdqa <a0=%xmm4,>b0=%xmm6
|
|
4042
|
+
movdqa %xmm4,%xmm6
|
|
4043
|
+
|
|
4044
|
+
# qhasm: uint32323232 a0 <<= 7
|
|
4045
|
+
# asm 1: pslld $7,<a0=int6464#5
|
|
4046
|
+
# asm 2: pslld $7,<a0=%xmm4
|
|
4047
|
+
pslld $7,%xmm4
|
|
4048
|
+
|
|
4049
|
+
# qhasm: uint32323232 b0 >>= 25
|
|
4050
|
+
# asm 1: psrld $25,<b0=int6464#7
|
|
4051
|
+
# asm 2: psrld $25,<b0=%xmm6
|
|
4052
|
+
psrld $25,%xmm6
|
|
4053
|
+
|
|
4054
|
+
# qhasm: diag3 ^= a0
|
|
4055
|
+
# asm 1: pxor <a0=int6464#5,<diag3=int6464#4
|
|
4056
|
+
# asm 2: pxor <a0=%xmm4,<diag3=%xmm3
|
|
4057
|
+
pxor %xmm4,%xmm3
|
|
4058
|
+
|
|
4059
|
+
# qhasm: diag3 ^= b0
|
|
4060
|
+
# asm 1: pxor <b0=int6464#7,<diag3=int6464#4
|
|
4061
|
+
# asm 2: pxor <b0=%xmm6,<diag3=%xmm3
|
|
4062
|
+
pxor %xmm6,%xmm3
|
|
4063
|
+
|
|
4064
|
+
# qhasm: uint32323232 a1 += diag3
|
|
4065
|
+
# asm 1: paddd <diag3=int6464#4,<a1=int6464#6
|
|
4066
|
+
# asm 2: paddd <diag3=%xmm3,<a1=%xmm5
|
|
4067
|
+
paddd %xmm3,%xmm5
|
|
4068
|
+
|
|
4069
|
+
# qhasm: a2 = diag3
|
|
4070
|
+
# asm 1: movdqa <diag3=int6464#4,>a2=int6464#5
|
|
4071
|
+
# asm 2: movdqa <diag3=%xmm3,>a2=%xmm4
|
|
4072
|
+
movdqa %xmm3,%xmm4
|
|
4073
|
+
|
|
4074
|
+
# qhasm: b1 = a1
|
|
4075
|
+
# asm 1: movdqa <a1=int6464#6,>b1=int6464#7
|
|
4076
|
+
# asm 2: movdqa <a1=%xmm5,>b1=%xmm6
|
|
4077
|
+
movdqa %xmm5,%xmm6
|
|
4078
|
+
|
|
4079
|
+
# qhasm: uint32323232 a1 <<= 9
|
|
4080
|
+
# asm 1: pslld $9,<a1=int6464#6
|
|
4081
|
+
# asm 2: pslld $9,<a1=%xmm5
|
|
4082
|
+
pslld $9,%xmm5
|
|
4083
|
+
|
|
4084
|
+
# qhasm: uint32323232 b1 >>= 23
|
|
4085
|
+
# asm 1: psrld $23,<b1=int6464#7
|
|
4086
|
+
# asm 2: psrld $23,<b1=%xmm6
|
|
4087
|
+
psrld $23,%xmm6
|
|
4088
|
+
|
|
4089
|
+
# qhasm: diag2 ^= a1
|
|
4090
|
+
# asm 1: pxor <a1=int6464#6,<diag2=int6464#3
|
|
4091
|
+
# asm 2: pxor <a1=%xmm5,<diag2=%xmm2
|
|
4092
|
+
pxor %xmm5,%xmm2
|
|
4093
|
+
|
|
4094
|
+
# qhasm: diag3 <<<= 32
|
|
4095
|
+
# asm 1: pshufd $0x93,<diag3=int6464#4,<diag3=int6464#4
|
|
4096
|
+
# asm 2: pshufd $0x93,<diag3=%xmm3,<diag3=%xmm3
|
|
4097
|
+
pshufd $0x93,%xmm3,%xmm3
|
|
4098
|
+
|
|
4099
|
+
# qhasm: diag2 ^= b1
|
|
4100
|
+
# asm 1: pxor <b1=int6464#7,<diag2=int6464#3
|
|
4101
|
+
# asm 2: pxor <b1=%xmm6,<diag2=%xmm2
|
|
4102
|
+
pxor %xmm6,%xmm2
|
|
4103
|
+
|
|
4104
|
+
# qhasm: uint32323232 a2 += diag2
|
|
4105
|
+
# asm 1: paddd <diag2=int6464#3,<a2=int6464#5
|
|
4106
|
+
# asm 2: paddd <diag2=%xmm2,<a2=%xmm4
|
|
4107
|
+
paddd %xmm2,%xmm4
|
|
4108
|
+
|
|
4109
|
+
# qhasm: a3 = diag2
|
|
4110
|
+
# asm 1: movdqa <diag2=int6464#3,>a3=int6464#6
|
|
4111
|
+
# asm 2: movdqa <diag2=%xmm2,>a3=%xmm5
|
|
4112
|
+
movdqa %xmm2,%xmm5
|
|
4113
|
+
|
|
4114
|
+
# qhasm: b2 = a2
|
|
4115
|
+
# asm 1: movdqa <a2=int6464#5,>b2=int6464#7
|
|
4116
|
+
# asm 2: movdqa <a2=%xmm4,>b2=%xmm6
|
|
4117
|
+
movdqa %xmm4,%xmm6
|
|
4118
|
+
|
|
4119
|
+
# qhasm: uint32323232 a2 <<= 13
|
|
4120
|
+
# asm 1: pslld $13,<a2=int6464#5
|
|
4121
|
+
# asm 2: pslld $13,<a2=%xmm4
|
|
4122
|
+
pslld $13,%xmm4
|
|
4123
|
+
|
|
4124
|
+
# qhasm: uint32323232 b2 >>= 19
|
|
4125
|
+
# asm 1: psrld $19,<b2=int6464#7
|
|
4126
|
+
# asm 2: psrld $19,<b2=%xmm6
|
|
4127
|
+
psrld $19,%xmm6
|
|
4128
|
+
|
|
4129
|
+
# qhasm: diag1 ^= a2
|
|
4130
|
+
# asm 1: pxor <a2=int6464#5,<diag1=int6464#2
|
|
4131
|
+
# asm 2: pxor <a2=%xmm4,<diag1=%xmm1
|
|
4132
|
+
pxor %xmm4,%xmm1
|
|
4133
|
+
|
|
4134
|
+
# qhasm: diag2 <<<= 64
|
|
4135
|
+
# asm 1: pshufd $0x4e,<diag2=int6464#3,<diag2=int6464#3
|
|
4136
|
+
# asm 2: pshufd $0x4e,<diag2=%xmm2,<diag2=%xmm2
|
|
4137
|
+
pshufd $0x4e,%xmm2,%xmm2
|
|
4138
|
+
|
|
4139
|
+
# qhasm: diag1 ^= b2
|
|
4140
|
+
# asm 1: pxor <b2=int6464#7,<diag1=int6464#2
|
|
4141
|
+
# asm 2: pxor <b2=%xmm6,<diag1=%xmm1
|
|
4142
|
+
pxor %xmm6,%xmm1
|
|
4143
|
+
|
|
4144
|
+
# qhasm: uint32323232 a3 += diag1
|
|
4145
|
+
# asm 1: paddd <diag1=int6464#2,<a3=int6464#6
|
|
4146
|
+
# asm 2: paddd <diag1=%xmm1,<a3=%xmm5
|
|
4147
|
+
paddd %xmm1,%xmm5
|
|
4148
|
+
|
|
4149
|
+
# qhasm: a4 = diag3
|
|
4150
|
+
# asm 1: movdqa <diag3=int6464#4,>a4=int6464#5
|
|
4151
|
+
# asm 2: movdqa <diag3=%xmm3,>a4=%xmm4
|
|
4152
|
+
movdqa %xmm3,%xmm4
|
|
4153
|
+
|
|
4154
|
+
# qhasm: b3 = a3
|
|
4155
|
+
# asm 1: movdqa <a3=int6464#6,>b3=int6464#7
|
|
4156
|
+
# asm 2: movdqa <a3=%xmm5,>b3=%xmm6
|
|
4157
|
+
movdqa %xmm5,%xmm6
|
|
4158
|
+
|
|
4159
|
+
# qhasm: uint32323232 a3 <<= 18
|
|
4160
|
+
# asm 1: pslld $18,<a3=int6464#6
|
|
4161
|
+
# asm 2: pslld $18,<a3=%xmm5
|
|
4162
|
+
pslld $18,%xmm5
|
|
4163
|
+
|
|
4164
|
+
# qhasm: uint32323232 b3 >>= 14
|
|
4165
|
+
# asm 1: psrld $14,<b3=int6464#7
|
|
4166
|
+
# asm 2: psrld $14,<b3=%xmm6
|
|
4167
|
+
psrld $14,%xmm6
|
|
4168
|
+
|
|
4169
|
+
# qhasm: diag0 ^= a3
|
|
4170
|
+
# asm 1: pxor <a3=int6464#6,<diag0=int6464#1
|
|
4171
|
+
# asm 2: pxor <a3=%xmm5,<diag0=%xmm0
|
|
4172
|
+
pxor %xmm5,%xmm0
|
|
4173
|
+
|
|
4174
|
+
# qhasm: diag1 <<<= 96
|
|
4175
|
+
# asm 1: pshufd $0x39,<diag1=int6464#2,<diag1=int6464#2
|
|
4176
|
+
# asm 2: pshufd $0x39,<diag1=%xmm1,<diag1=%xmm1
|
|
4177
|
+
pshufd $0x39,%xmm1,%xmm1
|
|
4178
|
+
|
|
4179
|
+
# qhasm: diag0 ^= b3
|
|
4180
|
+
# asm 1: pxor <b3=int6464#7,<diag0=int6464#1
|
|
4181
|
+
# asm 2: pxor <b3=%xmm6,<diag0=%xmm0
|
|
4182
|
+
pxor %xmm6,%xmm0
|
|
4183
|
+
|
|
4184
|
+
# qhasm: uint32323232 a4 += diag0
|
|
4185
|
+
# asm 1: paddd <diag0=int6464#1,<a4=int6464#5
|
|
4186
|
+
# asm 2: paddd <diag0=%xmm0,<a4=%xmm4
|
|
4187
|
+
paddd %xmm0,%xmm4
|
|
4188
|
+
|
|
4189
|
+
# qhasm: a5 = diag0
|
|
4190
|
+
# asm 1: movdqa <diag0=int6464#1,>a5=int6464#6
|
|
4191
|
+
# asm 2: movdqa <diag0=%xmm0,>a5=%xmm5
|
|
4192
|
+
movdqa %xmm0,%xmm5
|
|
4193
|
+
|
|
4194
|
+
# qhasm: b4 = a4
|
|
4195
|
+
# asm 1: movdqa <a4=int6464#5,>b4=int6464#7
|
|
4196
|
+
# asm 2: movdqa <a4=%xmm4,>b4=%xmm6
|
|
4197
|
+
movdqa %xmm4,%xmm6
|
|
4198
|
+
|
|
4199
|
+
# qhasm: uint32323232 a4 <<= 7
|
|
4200
|
+
# asm 1: pslld $7,<a4=int6464#5
|
|
4201
|
+
# asm 2: pslld $7,<a4=%xmm4
|
|
4202
|
+
pslld $7,%xmm4
|
|
4203
|
+
|
|
4204
|
+
# qhasm: uint32323232 b4 >>= 25
|
|
4205
|
+
# asm 1: psrld $25,<b4=int6464#7
|
|
4206
|
+
# asm 2: psrld $25,<b4=%xmm6
|
|
4207
|
+
psrld $25,%xmm6
|
|
4208
|
+
|
|
4209
|
+
# qhasm: diag1 ^= a4
|
|
4210
|
+
# asm 1: pxor <a4=int6464#5,<diag1=int6464#2
|
|
4211
|
+
# asm 2: pxor <a4=%xmm4,<diag1=%xmm1
|
|
4212
|
+
pxor %xmm4,%xmm1
|
|
4213
|
+
|
|
4214
|
+
# qhasm: diag1 ^= b4
|
|
4215
|
+
# asm 1: pxor <b4=int6464#7,<diag1=int6464#2
|
|
4216
|
+
# asm 2: pxor <b4=%xmm6,<diag1=%xmm1
|
|
4217
|
+
pxor %xmm6,%xmm1
|
|
4218
|
+
|
|
4219
|
+
# qhasm: uint32323232 a5 += diag1
|
|
4220
|
+
# asm 1: paddd <diag1=int6464#2,<a5=int6464#6
|
|
4221
|
+
# asm 2: paddd <diag1=%xmm1,<a5=%xmm5
|
|
4222
|
+
paddd %xmm1,%xmm5
|
|
4223
|
+
|
|
4224
|
+
# qhasm: a6 = diag1
|
|
4225
|
+
# asm 1: movdqa <diag1=int6464#2,>a6=int6464#5
|
|
4226
|
+
# asm 2: movdqa <diag1=%xmm1,>a6=%xmm4
|
|
4227
|
+
movdqa %xmm1,%xmm4
|
|
4228
|
+
|
|
4229
|
+
# qhasm: b5 = a5
|
|
4230
|
+
# asm 1: movdqa <a5=int6464#6,>b5=int6464#7
|
|
4231
|
+
# asm 2: movdqa <a5=%xmm5,>b5=%xmm6
|
|
4232
|
+
movdqa %xmm5,%xmm6
|
|
4233
|
+
|
|
4234
|
+
# qhasm: uint32323232 a5 <<= 9
|
|
4235
|
+
# asm 1: pslld $9,<a5=int6464#6
|
|
4236
|
+
# asm 2: pslld $9,<a5=%xmm5
|
|
4237
|
+
pslld $9,%xmm5
|
|
4238
|
+
|
|
4239
|
+
# qhasm: uint32323232 b5 >>= 23
|
|
4240
|
+
# asm 1: psrld $23,<b5=int6464#7
|
|
4241
|
+
# asm 2: psrld $23,<b5=%xmm6
|
|
4242
|
+
psrld $23,%xmm6
|
|
4243
|
+
|
|
4244
|
+
# qhasm: diag2 ^= a5
|
|
4245
|
+
# asm 1: pxor <a5=int6464#6,<diag2=int6464#3
|
|
4246
|
+
# asm 2: pxor <a5=%xmm5,<diag2=%xmm2
|
|
4247
|
+
pxor %xmm5,%xmm2
|
|
4248
|
+
|
|
4249
|
+
# qhasm: diag1 <<<= 32
|
|
4250
|
+
# asm 1: pshufd $0x93,<diag1=int6464#2,<diag1=int6464#2
|
|
4251
|
+
# asm 2: pshufd $0x93,<diag1=%xmm1,<diag1=%xmm1
|
|
4252
|
+
pshufd $0x93,%xmm1,%xmm1
|
|
4253
|
+
|
|
4254
|
+
# qhasm: diag2 ^= b5
|
|
4255
|
+
# asm 1: pxor <b5=int6464#7,<diag2=int6464#3
|
|
4256
|
+
# asm 2: pxor <b5=%xmm6,<diag2=%xmm2
|
|
4257
|
+
pxor %xmm6,%xmm2
|
|
4258
|
+
|
|
4259
|
+
# qhasm: uint32323232 a6 += diag2
|
|
4260
|
+
# asm 1: paddd <diag2=int6464#3,<a6=int6464#5
|
|
4261
|
+
# asm 2: paddd <diag2=%xmm2,<a6=%xmm4
|
|
4262
|
+
paddd %xmm2,%xmm4
|
|
4263
|
+
|
|
4264
|
+
# qhasm: a7 = diag2
|
|
4265
|
+
# asm 1: movdqa <diag2=int6464#3,>a7=int6464#6
|
|
4266
|
+
# asm 2: movdqa <diag2=%xmm2,>a7=%xmm5
|
|
4267
|
+
movdqa %xmm2,%xmm5
|
|
4268
|
+
|
|
4269
|
+
# qhasm: b6 = a6
|
|
4270
|
+
# asm 1: movdqa <a6=int6464#5,>b6=int6464#7
|
|
4271
|
+
# asm 2: movdqa <a6=%xmm4,>b6=%xmm6
|
|
4272
|
+
movdqa %xmm4,%xmm6
|
|
4273
|
+
|
|
4274
|
+
# qhasm: uint32323232 a6 <<= 13
|
|
4275
|
+
# asm 1: pslld $13,<a6=int6464#5
|
|
4276
|
+
# asm 2: pslld $13,<a6=%xmm4
|
|
4277
|
+
pslld $13,%xmm4
|
|
4278
|
+
|
|
4279
|
+
# qhasm: uint32323232 b6 >>= 19
|
|
4280
|
+
# asm 1: psrld $19,<b6=int6464#7
|
|
4281
|
+
# asm 2: psrld $19,<b6=%xmm6
|
|
4282
|
+
psrld $19,%xmm6
|
|
4283
|
+
|
|
4284
|
+
# qhasm: diag3 ^= a6
|
|
4285
|
+
# asm 1: pxor <a6=int6464#5,<diag3=int6464#4
|
|
4286
|
+
# asm 2: pxor <a6=%xmm4,<diag3=%xmm3
|
|
4287
|
+
pxor %xmm4,%xmm3
|
|
4288
|
+
|
|
4289
|
+
# qhasm: diag2 <<<= 64
|
|
4290
|
+
# asm 1: pshufd $0x4e,<diag2=int6464#3,<diag2=int6464#3
|
|
4291
|
+
# asm 2: pshufd $0x4e,<diag2=%xmm2,<diag2=%xmm2
|
|
4292
|
+
pshufd $0x4e,%xmm2,%xmm2
|
|
4293
|
+
|
|
4294
|
+
# qhasm: diag3 ^= b6
|
|
4295
|
+
# asm 1: pxor <b6=int6464#7,<diag3=int6464#4
|
|
4296
|
+
# asm 2: pxor <b6=%xmm6,<diag3=%xmm3
|
|
4297
|
+
pxor %xmm6,%xmm3
|
|
4298
|
+
|
|
4299
|
+
# qhasm: unsigned>? i -= 4
|
|
4300
|
+
# asm 1: sub $4,<i=int64#4
|
|
4301
|
+
# asm 2: sub $4,<i=%rcx
|
|
4302
|
+
sub $4,%rcx
|
|
4303
|
+
|
|
4304
|
+
# qhasm: uint32323232 a7 += diag3
|
|
4305
|
+
# asm 1: paddd <diag3=int6464#4,<a7=int6464#6
|
|
4306
|
+
# asm 2: paddd <diag3=%xmm3,<a7=%xmm5
|
|
4307
|
+
paddd %xmm3,%xmm5
|
|
4308
|
+
|
|
4309
|
+
# qhasm: a0 = diag1
|
|
4310
|
+
# asm 1: movdqa <diag1=int6464#2,>a0=int6464#5
|
|
4311
|
+
# asm 2: movdqa <diag1=%xmm1,>a0=%xmm4
|
|
4312
|
+
movdqa %xmm1,%xmm4
|
|
4313
|
+
|
|
4314
|
+
# qhasm: b7 = a7
|
|
4315
|
+
# asm 1: movdqa <a7=int6464#6,>b7=int6464#7
|
|
4316
|
+
# asm 2: movdqa <a7=%xmm5,>b7=%xmm6
|
|
4317
|
+
movdqa %xmm5,%xmm6
|
|
4318
|
+
|
|
4319
|
+
# qhasm: uint32323232 a7 <<= 18
|
|
4320
|
+
# asm 1: pslld $18,<a7=int6464#6
|
|
4321
|
+
# asm 2: pslld $18,<a7=%xmm5
|
|
4322
|
+
pslld $18,%xmm5
|
|
4323
|
+
|
|
4324
|
+
# qhasm: b0 = 0
|
|
4325
|
+
# asm 1: pxor >b0=int6464#8,>b0=int6464#8
|
|
4326
|
+
# asm 2: pxor >b0=%xmm7,>b0=%xmm7
|
|
4327
|
+
pxor %xmm7,%xmm7
|
|
4328
|
+
|
|
4329
|
+
# qhasm: uint32323232 b7 >>= 14
|
|
4330
|
+
# asm 1: psrld $14,<b7=int6464#7
|
|
4331
|
+
# asm 2: psrld $14,<b7=%xmm6
|
|
4332
|
+
psrld $14,%xmm6
|
|
4333
|
+
|
|
4334
|
+
# qhasm: diag0 ^= a7
|
|
4335
|
+
# asm 1: pxor <a7=int6464#6,<diag0=int6464#1
|
|
4336
|
+
# asm 2: pxor <a7=%xmm5,<diag0=%xmm0
|
|
4337
|
+
pxor %xmm5,%xmm0
|
|
4338
|
+
|
|
4339
|
+
# qhasm: diag3 <<<= 96
|
|
4340
|
+
# asm 1: pshufd $0x39,<diag3=int6464#4,<diag3=int6464#4
|
|
4341
|
+
# asm 2: pshufd $0x39,<diag3=%xmm3,<diag3=%xmm3
|
|
4342
|
+
pshufd $0x39,%xmm3,%xmm3
|
|
4343
|
+
|
|
4344
|
+
# qhasm: diag0 ^= b7
|
|
4345
|
+
# asm 1: pxor <b7=int6464#7,<diag0=int6464#1
|
|
4346
|
+
# asm 2: pxor <b7=%xmm6,<diag0=%xmm0
|
|
4347
|
+
pxor %xmm6,%xmm0
|
|
4348
|
+
# comment:fp stack unchanged by jump
|
|
4349
|
+
|
|
4350
|
+
# qhasm: goto mainloop2 if unsigned>
|
|
4351
|
+
ja ._mainloop2
|
|
4352
|
+
|
|
4353
|
+
# qhasm: uint32323232 diag0 += x0
|
|
4354
|
+
# asm 1: paddd <x0=stack128#4,<diag0=int6464#1
|
|
4355
|
+
# asm 2: paddd <x0=48(%rsp),<diag0=%xmm0
|
|
4356
|
+
paddd 48(%rsp),%xmm0
|
|
4357
|
+
|
|
4358
|
+
# qhasm: uint32323232 diag1 += x1
|
|
4359
|
+
# asm 1: paddd <x1=stack128#1,<diag1=int6464#2
|
|
4360
|
+
# asm 2: paddd <x1=0(%rsp),<diag1=%xmm1
|
|
4361
|
+
paddd 0(%rsp),%xmm1
|
|
4362
|
+
|
|
4363
|
+
# qhasm: uint32323232 diag2 += x2
|
|
4364
|
+
# asm 1: paddd <x2=stack128#2,<diag2=int6464#3
|
|
4365
|
+
# asm 2: paddd <x2=16(%rsp),<diag2=%xmm2
|
|
4366
|
+
paddd 16(%rsp),%xmm2
|
|
4367
|
+
|
|
4368
|
+
# qhasm: uint32323232 diag3 += x3
|
|
4369
|
+
# asm 1: paddd <x3=stack128#3,<diag3=int6464#4
|
|
4370
|
+
# asm 2: paddd <x3=32(%rsp),<diag3=%xmm3
|
|
4371
|
+
paddd 32(%rsp),%xmm3
|
|
4372
|
+
|
|
4373
|
+
# qhasm: in0 = diag0
|
|
4374
|
+
# asm 1: movd <diag0=int6464#1,>in0=int64#4
|
|
4375
|
+
# asm 2: movd <diag0=%xmm0,>in0=%rcx
|
|
4376
|
+
movd %xmm0,%rcx
|
|
4377
|
+
|
|
4378
|
+
# qhasm: in12 = diag1
|
|
4379
|
+
# asm 1: movd <diag1=int6464#2,>in12=int64#5
|
|
4380
|
+
# asm 2: movd <diag1=%xmm1,>in12=%r8
|
|
4381
|
+
movd %xmm1,%r8
|
|
4382
|
+
|
|
4383
|
+
# qhasm: in8 = diag2
|
|
4384
|
+
# asm 1: movd <diag2=int6464#3,>in8=int64#6
|
|
4385
|
+
# asm 2: movd <diag2=%xmm2,>in8=%r9
|
|
4386
|
+
movd %xmm2,%r9
|
|
4387
|
+
|
|
4388
|
+
# qhasm: in4 = diag3
|
|
4389
|
+
# asm 1: movd <diag3=int6464#4,>in4=int64#7
|
|
4390
|
+
# asm 2: movd <diag3=%xmm3,>in4=%rax
|
|
4391
|
+
movd %xmm3,%rax
|
|
4392
|
+
|
|
4393
|
+
# qhasm: diag0 <<<= 96
|
|
4394
|
+
# asm 1: pshufd $0x39,<diag0=int6464#1,<diag0=int6464#1
|
|
4395
|
+
# asm 2: pshufd $0x39,<diag0=%xmm0,<diag0=%xmm0
|
|
4396
|
+
pshufd $0x39,%xmm0,%xmm0
|
|
4397
|
+
|
|
4398
|
+
# qhasm: diag1 <<<= 96
|
|
4399
|
+
# asm 1: pshufd $0x39,<diag1=int6464#2,<diag1=int6464#2
|
|
4400
|
+
# asm 2: pshufd $0x39,<diag1=%xmm1,<diag1=%xmm1
|
|
4401
|
+
pshufd $0x39,%xmm1,%xmm1
|
|
4402
|
+
|
|
4403
|
+
# qhasm: diag2 <<<= 96
|
|
4404
|
+
# asm 1: pshufd $0x39,<diag2=int6464#3,<diag2=int6464#3
|
|
4405
|
+
# asm 2: pshufd $0x39,<diag2=%xmm2,<diag2=%xmm2
|
|
4406
|
+
pshufd $0x39,%xmm2,%xmm2
|
|
4407
|
+
|
|
4408
|
+
# qhasm: diag3 <<<= 96
|
|
4409
|
+
# asm 1: pshufd $0x39,<diag3=int6464#4,<diag3=int6464#4
|
|
4410
|
+
# asm 2: pshufd $0x39,<diag3=%xmm3,<diag3=%xmm3
|
|
4411
|
+
pshufd $0x39,%xmm3,%xmm3
|
|
4412
|
+
|
|
4413
|
+
# qhasm: (uint32) in0 ^= *(uint32 *) (m + 0)
|
|
4414
|
+
# asm 1: xorl 0(<m=int64#2),<in0=int64#4d
|
|
4415
|
+
# asm 2: xorl 0(<m=%rsi),<in0=%ecx
|
|
4416
|
+
xorl 0(%rsi),%ecx
|
|
4417
|
+
|
|
4418
|
+
# qhasm: (uint32) in12 ^= *(uint32 *) (m + 48)
|
|
4419
|
+
# asm 1: xorl 48(<m=int64#2),<in12=int64#5d
|
|
4420
|
+
# asm 2: xorl 48(<m=%rsi),<in12=%r8d
|
|
4421
|
+
xorl 48(%rsi),%r8d
|
|
4422
|
+
|
|
4423
|
+
# qhasm: (uint32) in8 ^= *(uint32 *) (m + 32)
|
|
4424
|
+
# asm 1: xorl 32(<m=int64#2),<in8=int64#6d
|
|
4425
|
+
# asm 2: xorl 32(<m=%rsi),<in8=%r9d
|
|
4426
|
+
xorl 32(%rsi),%r9d
|
|
4427
|
+
|
|
4428
|
+
# qhasm: (uint32) in4 ^= *(uint32 *) (m + 16)
|
|
4429
|
+
# asm 1: xorl 16(<m=int64#2),<in4=int64#7d
|
|
4430
|
+
# asm 2: xorl 16(<m=%rsi),<in4=%eax
|
|
4431
|
+
xorl 16(%rsi),%eax
|
|
4432
|
+
|
|
4433
|
+
# qhasm: *(uint32 *) (out + 0) = in0
|
|
4434
|
+
# asm 1: movl <in0=int64#4d,0(<out=int64#1)
|
|
4435
|
+
# asm 2: movl <in0=%ecx,0(<out=%rdi)
|
|
4436
|
+
movl %ecx,0(%rdi)
|
|
4437
|
+
|
|
4438
|
+
# qhasm: *(uint32 *) (out + 48) = in12
|
|
4439
|
+
# asm 1: movl <in12=int64#5d,48(<out=int64#1)
|
|
4440
|
+
# asm 2: movl <in12=%r8d,48(<out=%rdi)
|
|
4441
|
+
movl %r8d,48(%rdi)
|
|
4442
|
+
|
|
4443
|
+
# qhasm: *(uint32 *) (out + 32) = in8
|
|
4444
|
+
# asm 1: movl <in8=int64#6d,32(<out=int64#1)
|
|
4445
|
+
# asm 2: movl <in8=%r9d,32(<out=%rdi)
|
|
4446
|
+
movl %r9d,32(%rdi)
|
|
4447
|
+
|
|
4448
|
+
# qhasm: *(uint32 *) (out + 16) = in4
|
|
4449
|
+
# asm 1: movl <in4=int64#7d,16(<out=int64#1)
|
|
4450
|
+
# asm 2: movl <in4=%eax,16(<out=%rdi)
|
|
4451
|
+
movl %eax,16(%rdi)
|
|
4452
|
+
|
|
4453
|
+
# qhasm: in5 = diag0
|
|
4454
|
+
# asm 1: movd <diag0=int6464#1,>in5=int64#4
|
|
4455
|
+
# asm 2: movd <diag0=%xmm0,>in5=%rcx
|
|
4456
|
+
movd %xmm0,%rcx
|
|
4457
|
+
|
|
4458
|
+
# qhasm: in1 = diag1
|
|
4459
|
+
# asm 1: movd <diag1=int6464#2,>in1=int64#5
|
|
4460
|
+
# asm 2: movd <diag1=%xmm1,>in1=%r8
|
|
4461
|
+
movd %xmm1,%r8
|
|
4462
|
+
|
|
4463
|
+
# qhasm: in13 = diag2
|
|
4464
|
+
# asm 1: movd <diag2=int6464#3,>in13=int64#6
|
|
4465
|
+
# asm 2: movd <diag2=%xmm2,>in13=%r9
|
|
4466
|
+
movd %xmm2,%r9
|
|
4467
|
+
|
|
4468
|
+
# qhasm: in9 = diag3
|
|
4469
|
+
# asm 1: movd <diag3=int6464#4,>in9=int64#7
|
|
4470
|
+
# asm 2: movd <diag3=%xmm3,>in9=%rax
|
|
4471
|
+
movd %xmm3,%rax
|
|
4472
|
+
|
|
4473
|
+
# qhasm: diag0 <<<= 96
|
|
4474
|
+
# asm 1: pshufd $0x39,<diag0=int6464#1,<diag0=int6464#1
|
|
4475
|
+
# asm 2: pshufd $0x39,<diag0=%xmm0,<diag0=%xmm0
|
|
4476
|
+
pshufd $0x39,%xmm0,%xmm0
|
|
4477
|
+
|
|
4478
|
+
# qhasm: diag1 <<<= 96
|
|
4479
|
+
# asm 1: pshufd $0x39,<diag1=int6464#2,<diag1=int6464#2
|
|
4480
|
+
# asm 2: pshufd $0x39,<diag1=%xmm1,<diag1=%xmm1
|
|
4481
|
+
pshufd $0x39,%xmm1,%xmm1
|
|
4482
|
+
|
|
4483
|
+
# qhasm: diag2 <<<= 96
|
|
4484
|
+
# asm 1: pshufd $0x39,<diag2=int6464#3,<diag2=int6464#3
|
|
4485
|
+
# asm 2: pshufd $0x39,<diag2=%xmm2,<diag2=%xmm2
|
|
4486
|
+
pshufd $0x39,%xmm2,%xmm2
|
|
4487
|
+
|
|
4488
|
+
# qhasm: diag3 <<<= 96
|
|
4489
|
+
# asm 1: pshufd $0x39,<diag3=int6464#4,<diag3=int6464#4
|
|
4490
|
+
# asm 2: pshufd $0x39,<diag3=%xmm3,<diag3=%xmm3
|
|
4491
|
+
pshufd $0x39,%xmm3,%xmm3
|
|
4492
|
+
|
|
4493
|
+
# qhasm: (uint32) in5 ^= *(uint32 *) (m + 20)
|
|
4494
|
+
# asm 1: xorl 20(<m=int64#2),<in5=int64#4d
|
|
4495
|
+
# asm 2: xorl 20(<m=%rsi),<in5=%ecx
|
|
4496
|
+
xorl 20(%rsi),%ecx
|
|
4497
|
+
|
|
4498
|
+
# qhasm: (uint32) in1 ^= *(uint32 *) (m + 4)
|
|
4499
|
+
# asm 1: xorl 4(<m=int64#2),<in1=int64#5d
|
|
4500
|
+
# asm 2: xorl 4(<m=%rsi),<in1=%r8d
|
|
4501
|
+
xorl 4(%rsi),%r8d
|
|
4502
|
+
|
|
4503
|
+
# qhasm: (uint32) in13 ^= *(uint32 *) (m + 52)
|
|
4504
|
+
# asm 1: xorl 52(<m=int64#2),<in13=int64#6d
|
|
4505
|
+
# asm 2: xorl 52(<m=%rsi),<in13=%r9d
|
|
4506
|
+
xorl 52(%rsi),%r9d
|
|
4507
|
+
|
|
4508
|
+
# qhasm: (uint32) in9 ^= *(uint32 *) (m + 36)
|
|
4509
|
+
# asm 1: xorl 36(<m=int64#2),<in9=int64#7d
|
|
4510
|
+
# asm 2: xorl 36(<m=%rsi),<in9=%eax
|
|
4511
|
+
xorl 36(%rsi),%eax
|
|
4512
|
+
|
|
4513
|
+
# qhasm: *(uint32 *) (out + 20) = in5
|
|
4514
|
+
# asm 1: movl <in5=int64#4d,20(<out=int64#1)
|
|
4515
|
+
# asm 2: movl <in5=%ecx,20(<out=%rdi)
|
|
4516
|
+
movl %ecx,20(%rdi)
|
|
4517
|
+
|
|
4518
|
+
# qhasm: *(uint32 *) (out + 4) = in1
|
|
4519
|
+
# asm 1: movl <in1=int64#5d,4(<out=int64#1)
|
|
4520
|
+
# asm 2: movl <in1=%r8d,4(<out=%rdi)
|
|
4521
|
+
movl %r8d,4(%rdi)
|
|
4522
|
+
|
|
4523
|
+
# qhasm: *(uint32 *) (out + 52) = in13
|
|
4524
|
+
# asm 1: movl <in13=int64#6d,52(<out=int64#1)
|
|
4525
|
+
# asm 2: movl <in13=%r9d,52(<out=%rdi)
|
|
4526
|
+
movl %r9d,52(%rdi)
|
|
4527
|
+
|
|
4528
|
+
# qhasm: *(uint32 *) (out + 36) = in9
|
|
4529
|
+
# asm 1: movl <in9=int64#7d,36(<out=int64#1)
|
|
4530
|
+
# asm 2: movl <in9=%eax,36(<out=%rdi)
|
|
4531
|
+
movl %eax,36(%rdi)
|
|
4532
|
+
|
|
4533
|
+
# qhasm: in10 = diag0
|
|
4534
|
+
# asm 1: movd <diag0=int6464#1,>in10=int64#4
|
|
4535
|
+
# asm 2: movd <diag0=%xmm0,>in10=%rcx
|
|
4536
|
+
movd %xmm0,%rcx
|
|
4537
|
+
|
|
4538
|
+
# qhasm: in6 = diag1
|
|
4539
|
+
# asm 1: movd <diag1=int6464#2,>in6=int64#5
|
|
4540
|
+
# asm 2: movd <diag1=%xmm1,>in6=%r8
|
|
4541
|
+
movd %xmm1,%r8
|
|
4542
|
+
|
|
4543
|
+
# qhasm: in2 = diag2
|
|
4544
|
+
# asm 1: movd <diag2=int6464#3,>in2=int64#6
|
|
4545
|
+
# asm 2: movd <diag2=%xmm2,>in2=%r9
|
|
4546
|
+
movd %xmm2,%r9
|
|
4547
|
+
|
|
4548
|
+
# qhasm: in14 = diag3
|
|
4549
|
+
# asm 1: movd <diag3=int6464#4,>in14=int64#7
|
|
4550
|
+
# asm 2: movd <diag3=%xmm3,>in14=%rax
|
|
4551
|
+
movd %xmm3,%rax
|
|
4552
|
+
|
|
4553
|
+
# qhasm: diag0 <<<= 96
|
|
4554
|
+
# asm 1: pshufd $0x39,<diag0=int6464#1,<diag0=int6464#1
|
|
4555
|
+
# asm 2: pshufd $0x39,<diag0=%xmm0,<diag0=%xmm0
|
|
4556
|
+
pshufd $0x39,%xmm0,%xmm0
|
|
4557
|
+
|
|
4558
|
+
# qhasm: diag1 <<<= 96
|
|
4559
|
+
# asm 1: pshufd $0x39,<diag1=int6464#2,<diag1=int6464#2
|
|
4560
|
+
# asm 2: pshufd $0x39,<diag1=%xmm1,<diag1=%xmm1
|
|
4561
|
+
pshufd $0x39,%xmm1,%xmm1
|
|
4562
|
+
|
|
4563
|
+
# qhasm: diag2 <<<= 96
|
|
4564
|
+
# asm 1: pshufd $0x39,<diag2=int6464#3,<diag2=int6464#3
|
|
4565
|
+
# asm 2: pshufd $0x39,<diag2=%xmm2,<diag2=%xmm2
|
|
4566
|
+
pshufd $0x39,%xmm2,%xmm2
|
|
4567
|
+
|
|
4568
|
+
# qhasm: diag3 <<<= 96
|
|
4569
|
+
# asm 1: pshufd $0x39,<diag3=int6464#4,<diag3=int6464#4
|
|
4570
|
+
# asm 2: pshufd $0x39,<diag3=%xmm3,<diag3=%xmm3
|
|
4571
|
+
pshufd $0x39,%xmm3,%xmm3
|
|
4572
|
+
|
|
4573
|
+
# qhasm: (uint32) in10 ^= *(uint32 *) (m + 40)
|
|
4574
|
+
# asm 1: xorl 40(<m=int64#2),<in10=int64#4d
|
|
4575
|
+
# asm 2: xorl 40(<m=%rsi),<in10=%ecx
|
|
4576
|
+
xorl 40(%rsi),%ecx
|
|
4577
|
+
|
|
4578
|
+
# qhasm: (uint32) in6 ^= *(uint32 *) (m + 24)
|
|
4579
|
+
# asm 1: xorl 24(<m=int64#2),<in6=int64#5d
|
|
4580
|
+
# asm 2: xorl 24(<m=%rsi),<in6=%r8d
|
|
4581
|
+
xorl 24(%rsi),%r8d
|
|
4582
|
+
|
|
4583
|
+
# qhasm: (uint32) in2 ^= *(uint32 *) (m + 8)
|
|
4584
|
+
# asm 1: xorl 8(<m=int64#2),<in2=int64#6d
|
|
4585
|
+
# asm 2: xorl 8(<m=%rsi),<in2=%r9d
|
|
4586
|
+
xorl 8(%rsi),%r9d
|
|
4587
|
+
|
|
4588
|
+
# qhasm: (uint32) in14 ^= *(uint32 *) (m + 56)
|
|
4589
|
+
# asm 1: xorl 56(<m=int64#2),<in14=int64#7d
|
|
4590
|
+
# asm 2: xorl 56(<m=%rsi),<in14=%eax
|
|
4591
|
+
xorl 56(%rsi),%eax
|
|
4592
|
+
|
|
4593
|
+
# qhasm: *(uint32 *) (out + 40) = in10
|
|
4594
|
+
# asm 1: movl <in10=int64#4d,40(<out=int64#1)
|
|
4595
|
+
# asm 2: movl <in10=%ecx,40(<out=%rdi)
|
|
4596
|
+
movl %ecx,40(%rdi)
|
|
4597
|
+
|
|
4598
|
+
# qhasm: *(uint32 *) (out + 24) = in6
|
|
4599
|
+
# asm 1: movl <in6=int64#5d,24(<out=int64#1)
|
|
4600
|
+
# asm 2: movl <in6=%r8d,24(<out=%rdi)
|
|
4601
|
+
movl %r8d,24(%rdi)
|
|
4602
|
+
|
|
4603
|
+
# qhasm: *(uint32 *) (out + 8) = in2
|
|
4604
|
+
# asm 1: movl <in2=int64#6d,8(<out=int64#1)
|
|
4605
|
+
# asm 2: movl <in2=%r9d,8(<out=%rdi)
|
|
4606
|
+
movl %r9d,8(%rdi)
|
|
4607
|
+
|
|
4608
|
+
# qhasm: *(uint32 *) (out + 56) = in14
|
|
4609
|
+
# asm 1: movl <in14=int64#7d,56(<out=int64#1)
|
|
4610
|
+
# asm 2: movl <in14=%eax,56(<out=%rdi)
|
|
4611
|
+
movl %eax,56(%rdi)
|
|
4612
|
+
|
|
4613
|
+
# qhasm: in15 = diag0
|
|
4614
|
+
# asm 1: movd <diag0=int6464#1,>in15=int64#4
|
|
4615
|
+
# asm 2: movd <diag0=%xmm0,>in15=%rcx
|
|
4616
|
+
movd %xmm0,%rcx
|
|
4617
|
+
|
|
4618
|
+
# qhasm: in11 = diag1
|
|
4619
|
+
# asm 1: movd <diag1=int6464#2,>in11=int64#5
|
|
4620
|
+
# asm 2: movd <diag1=%xmm1,>in11=%r8
|
|
4621
|
+
movd %xmm1,%r8
|
|
4622
|
+
|
|
4623
|
+
# qhasm: in7 = diag2
|
|
4624
|
+
# asm 1: movd <diag2=int6464#3,>in7=int64#6
|
|
4625
|
+
# asm 2: movd <diag2=%xmm2,>in7=%r9
|
|
4626
|
+
movd %xmm2,%r9
|
|
4627
|
+
|
|
4628
|
+
# qhasm: in3 = diag3
|
|
4629
|
+
# asm 1: movd <diag3=int6464#4,>in3=int64#7
|
|
4630
|
+
# asm 2: movd <diag3=%xmm3,>in3=%rax
|
|
4631
|
+
movd %xmm3,%rax
|
|
4632
|
+
|
|
4633
|
+
# qhasm: (uint32) in15 ^= *(uint32 *) (m + 60)
|
|
4634
|
+
# asm 1: xorl 60(<m=int64#2),<in15=int64#4d
|
|
4635
|
+
# asm 2: xorl 60(<m=%rsi),<in15=%ecx
|
|
4636
|
+
xorl 60(%rsi),%ecx
|
|
4637
|
+
|
|
4638
|
+
# qhasm: (uint32) in11 ^= *(uint32 *) (m + 44)
|
|
4639
|
+
# asm 1: xorl 44(<m=int64#2),<in11=int64#5d
|
|
4640
|
+
# asm 2: xorl 44(<m=%rsi),<in11=%r8d
|
|
4641
|
+
xorl 44(%rsi),%r8d
|
|
4642
|
+
|
|
4643
|
+
# qhasm: (uint32) in7 ^= *(uint32 *) (m + 28)
|
|
4644
|
+
# asm 1: xorl 28(<m=int64#2),<in7=int64#6d
|
|
4645
|
+
# asm 2: xorl 28(<m=%rsi),<in7=%r9d
|
|
4646
|
+
xorl 28(%rsi),%r9d
|
|
4647
|
+
|
|
4648
|
+
# qhasm: (uint32) in3 ^= *(uint32 *) (m + 12)
|
|
4649
|
+
# asm 1: xorl 12(<m=int64#2),<in3=int64#7d
|
|
4650
|
+
# asm 2: xorl 12(<m=%rsi),<in3=%eax
|
|
4651
|
+
xorl 12(%rsi),%eax
|
|
4652
|
+
|
|
4653
|
+
# qhasm: *(uint32 *) (out + 60) = in15
|
|
4654
|
+
# asm 1: movl <in15=int64#4d,60(<out=int64#1)
|
|
4655
|
+
# asm 2: movl <in15=%ecx,60(<out=%rdi)
|
|
4656
|
+
movl %ecx,60(%rdi)
|
|
4657
|
+
|
|
4658
|
+
# qhasm: *(uint32 *) (out + 44) = in11
|
|
4659
|
+
# asm 1: movl <in11=int64#5d,44(<out=int64#1)
|
|
4660
|
+
# asm 2: movl <in11=%r8d,44(<out=%rdi)
|
|
4661
|
+
movl %r8d,44(%rdi)
|
|
4662
|
+
|
|
4663
|
+
# qhasm: *(uint32 *) (out + 28) = in7
|
|
4664
|
+
# asm 1: movl <in7=int64#6d,28(<out=int64#1)
|
|
4665
|
+
# asm 2: movl <in7=%r9d,28(<out=%rdi)
|
|
4666
|
+
movl %r9d,28(%rdi)
|
|
4667
|
+
|
|
4668
|
+
# qhasm: *(uint32 *) (out + 12) = in3
|
|
4669
|
+
# asm 1: movl <in3=int64#7d,12(<out=int64#1)
|
|
4670
|
+
# asm 2: movl <in3=%eax,12(<out=%rdi)
|
|
4671
|
+
movl %eax,12(%rdi)
|
|
4672
|
+
|
|
4673
|
+
# qhasm: bytes = bytes_backup
|
|
4674
|
+
# asm 1: movq <bytes_backup=stack64#8,>bytes=int64#6
|
|
4675
|
+
# asm 2: movq <bytes_backup=408(%rsp),>bytes=%r9
|
|
4676
|
+
movq 408(%rsp),%r9
|
|
4677
|
+
|
|
4678
|
+
# qhasm: in8 = ((uint32 *)&x2)[0]
|
|
4679
|
+
# asm 1: movl <x2=stack128#2,>in8=int64#4d
|
|
4680
|
+
# asm 2: movl <x2=16(%rsp),>in8=%ecx
|
|
4681
|
+
movl 16(%rsp),%ecx
|
|
4682
|
+
|
|
4683
|
+
# qhasm: in9 = ((uint32 *)&x3)[1]
|
|
4684
|
+
# asm 1: movl 4+<x3=stack128#3,>in9=int64#5d
|
|
4685
|
+
# asm 2: movl 4+<x3=32(%rsp),>in9=%r8d
|
|
4686
|
+
movl 4+32(%rsp),%r8d
|
|
4687
|
+
|
|
4688
|
+
# qhasm: in8 += 1
|
|
4689
|
+
# asm 1: add $1,<in8=int64#4
|
|
4690
|
+
# asm 2: add $1,<in8=%rcx
|
|
4691
|
+
add $1,%rcx
|
|
4692
|
+
|
|
4693
|
+
# qhasm: in9 <<= 32
|
|
4694
|
+
# asm 1: shl $32,<in9=int64#5
|
|
4695
|
+
# asm 2: shl $32,<in9=%r8
|
|
4696
|
+
shl $32,%r8
|
|
4697
|
+
|
|
4698
|
+
# qhasm: in8 += in9
|
|
4699
|
+
# asm 1: add <in9=int64#5,<in8=int64#4
|
|
4700
|
+
# asm 2: add <in9=%r8,<in8=%rcx
|
|
4701
|
+
add %r8,%rcx
|
|
4702
|
+
|
|
4703
|
+
# qhasm: in9 = in8
|
|
4704
|
+
# asm 1: mov <in8=int64#4,>in9=int64#5
|
|
4705
|
+
# asm 2: mov <in8=%rcx,>in9=%r8
|
|
4706
|
+
mov %rcx,%r8
|
|
4707
|
+
|
|
4708
|
+
# qhasm: (uint64) in9 >>= 32
|
|
4709
|
+
# asm 1: shr $32,<in9=int64#5
|
|
4710
|
+
# asm 2: shr $32,<in9=%r8
|
|
4711
|
+
shr $32,%r8
|
|
4712
|
+
|
|
4713
|
+
# qhasm: ((uint32 *)&x2)[0] = in8
|
|
4714
|
+
# asm 1: movl <in8=int64#4d,>x2=stack128#2
|
|
4715
|
+
# asm 2: movl <in8=%ecx,>x2=16(%rsp)
|
|
4716
|
+
movl %ecx,16(%rsp)
|
|
4717
|
+
|
|
4718
|
+
# qhasm: ((uint32 *)&x3)[1] = in9
|
|
4719
|
+
# asm 1: movl <in9=int64#5d,4+<x3=stack128#3
|
|
4720
|
+
# asm 2: movl <in9=%r8d,4+<x3=32(%rsp)
|
|
4721
|
+
movl %r8d,4+32(%rsp)
|
|
4722
|
+
|
|
4723
|
+
# qhasm: unsigned>? unsigned<? bytes - 64
|
|
4724
|
+
# asm 1: cmp $64,<bytes=int64#6
|
|
4725
|
+
# asm 2: cmp $64,<bytes=%r9
|
|
4726
|
+
cmp $64,%r9
|
|
4727
|
+
# comment:fp stack unchanged by jump
|
|
4728
|
+
|
|
4729
|
+
# qhasm: goto bytesatleast65 if unsigned>
|
|
4730
|
+
ja ._bytesatleast65
|
|
4731
|
+
# comment:fp stack unchanged by jump
|
|
4732
|
+
|
|
4733
|
+
# qhasm: goto bytesatleast64 if !unsigned<
|
|
4734
|
+
jae ._bytesatleast64
|
|
4735
|
+
|
|
4736
|
+
# qhasm: m = out
|
|
4737
|
+
# asm 1: mov <out=int64#1,>m=int64#2
|
|
4738
|
+
# asm 2: mov <out=%rdi,>m=%rsi
|
|
4739
|
+
mov %rdi,%rsi
|
|
4740
|
+
|
|
4741
|
+
# qhasm: out = ctarget
|
|
4742
|
+
# asm 1: mov <ctarget=int64#3,>out=int64#1
|
|
4743
|
+
# asm 2: mov <ctarget=%rdx,>out=%rdi
|
|
4744
|
+
mov %rdx,%rdi
|
|
4745
|
+
|
|
4746
|
+
# qhasm: i = bytes
|
|
4747
|
+
# asm 1: mov <bytes=int64#6,>i=int64#4
|
|
4748
|
+
# asm 2: mov <bytes=%r9,>i=%rcx
|
|
4749
|
+
mov %r9,%rcx
|
|
4750
|
+
|
|
4751
|
+
# qhasm: while (i) { *out++ = *m++; --i }
|
|
4752
|
+
rep movsb
|
|
4753
|
+
# comment:fp stack unchanged by fallthrough
|
|
4754
|
+
|
|
4755
|
+
# qhasm: bytesatleast64:
|
|
4756
|
+
._bytesatleast64:
|
|
4757
|
+
# comment:fp stack unchanged by fallthrough
|
|
4758
|
+
|
|
4759
|
+
# qhasm: done:
|
|
4760
|
+
._done:
|
|
4761
|
+
|
|
4762
|
+
# qhasm: r11_caller = r11_stack
|
|
4763
|
+
# asm 1: movq <r11_stack=stack64#1,>r11_caller=int64#9
|
|
4764
|
+
# asm 2: movq <r11_stack=352(%rsp),>r11_caller=%r11
|
|
4765
|
+
movq 352(%rsp),%r11
|
|
4766
|
+
|
|
4767
|
+
# qhasm: r12_caller = r12_stack
|
|
4768
|
+
# asm 1: movq <r12_stack=stack64#2,>r12_caller=int64#10
|
|
4769
|
+
# asm 2: movq <r12_stack=360(%rsp),>r12_caller=%r12
|
|
4770
|
+
movq 360(%rsp),%r12
|
|
4771
|
+
|
|
4772
|
+
# qhasm: r13_caller = r13_stack
|
|
4773
|
+
# asm 1: movq <r13_stack=stack64#3,>r13_caller=int64#11
|
|
4774
|
+
# asm 2: movq <r13_stack=368(%rsp),>r13_caller=%r13
|
|
4775
|
+
movq 368(%rsp),%r13
|
|
4776
|
+
|
|
4777
|
+
# qhasm: r14_caller = r14_stack
|
|
4778
|
+
# asm 1: movq <r14_stack=stack64#4,>r14_caller=int64#12
|
|
4779
|
+
# asm 2: movq <r14_stack=376(%rsp),>r14_caller=%r14
|
|
4780
|
+
movq 376(%rsp),%r14
|
|
4781
|
+
|
|
4782
|
+
# qhasm: r15_caller = r15_stack
|
|
4783
|
+
# asm 1: movq <r15_stack=stack64#5,>r15_caller=int64#13
|
|
4784
|
+
# asm 2: movq <r15_stack=384(%rsp),>r15_caller=%r15
|
|
4785
|
+
movq 384(%rsp),%r15
|
|
4786
|
+
|
|
4787
|
+
# qhasm: rbx_caller = rbx_stack
|
|
4788
|
+
# asm 1: movq <rbx_stack=stack64#6,>rbx_caller=int64#14
|
|
4789
|
+
# asm 2: movq <rbx_stack=392(%rsp),>rbx_caller=%rbx
|
|
4790
|
+
movq 392(%rsp),%rbx
|
|
4791
|
+
|
|
4792
|
+
# qhasm: rbp_caller = rbp_stack
|
|
4793
|
+
# asm 1: movq <rbp_stack=stack64#7,>rbp_caller=int64#15
|
|
4794
|
+
# asm 2: movq <rbp_stack=400(%rsp),>rbp_caller=%rbp
|
|
4795
|
+
movq 400(%rsp),%rbp
|
|
4796
|
+
|
|
4797
|
+
# qhasm: leave
|
|
4798
|
+
add %r11,%rsp
|
|
4799
|
+
xor %rax,%rax
|
|
4800
|
+
xor %rdx,%rdx
|
|
4801
|
+
ret
|
|
4802
|
+
|
|
4803
|
+
# qhasm: bytesatleast65:
|
|
4804
|
+
._bytesatleast65:
|
|
4805
|
+
|
|
4806
|
+
# qhasm: bytes -= 64
|
|
4807
|
+
# asm 1: sub $64,<bytes=int64#6
|
|
4808
|
+
# asm 2: sub $64,<bytes=%r9
|
|
4809
|
+
sub $64,%r9
|
|
4810
|
+
|
|
4811
|
+
# qhasm: out += 64
|
|
4812
|
+
# asm 1: add $64,<out=int64#1
|
|
4813
|
+
# asm 2: add $64,<out=%rdi
|
|
4814
|
+
add $64,%rdi
|
|
4815
|
+
|
|
4816
|
+
# qhasm: m += 64
|
|
4817
|
+
# asm 1: add $64,<m=int64#2
|
|
4818
|
+
# asm 2: add $64,<m=%rsi
|
|
4819
|
+
add $64,%rsi
|
|
4820
|
+
# comment:fp stack unchanged by jump
|
|
4821
|
+
|
|
4822
|
+
# qhasm: goto bytesbetween1and255
|
|
4823
|
+
jmp ._bytesbetween1and255
|