libv8 3.3.10.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/.gitignore +8 -0
- data/.gitmodules +3 -0
- data/Gemfile +4 -0
- data/README.md +44 -0
- data/Rakefile +73 -0
- data/ext/libv8/extconf.rb +9 -0
- data/lib/libv8.rb +15 -0
- data/lib/libv8/Makefile +38 -0
- data/lib/libv8/detect_cpu.rb +27 -0
- data/lib/libv8/fpic-on-linux-amd64.patch +13 -0
- data/lib/libv8/scons/CHANGES.txt +5334 -0
- data/lib/libv8/scons/LICENSE.txt +20 -0
- data/lib/libv8/scons/MANIFEST +199 -0
- data/lib/libv8/scons/PKG-INFO +13 -0
- data/lib/libv8/scons/README.txt +243 -0
- data/lib/libv8/scons/RELEASE.txt +98 -0
- data/lib/libv8/scons/engine/SCons/Action.py +1241 -0
- data/lib/libv8/scons/engine/SCons/Builder.py +877 -0
- data/lib/libv8/scons/engine/SCons/CacheDir.py +216 -0
- data/lib/libv8/scons/engine/SCons/Conftest.py +793 -0
- data/lib/libv8/scons/engine/SCons/Debug.py +220 -0
- data/lib/libv8/scons/engine/SCons/Defaults.py +480 -0
- data/lib/libv8/scons/engine/SCons/Environment.py +2318 -0
- data/lib/libv8/scons/engine/SCons/Errors.py +205 -0
- data/lib/libv8/scons/engine/SCons/Executor.py +633 -0
- data/lib/libv8/scons/engine/SCons/Job.py +435 -0
- data/lib/libv8/scons/engine/SCons/Memoize.py +244 -0
- data/lib/libv8/scons/engine/SCons/Node/Alias.py +152 -0
- data/lib/libv8/scons/engine/SCons/Node/FS.py +3142 -0
- data/lib/libv8/scons/engine/SCons/Node/Python.py +128 -0
- data/lib/libv8/scons/engine/SCons/Node/__init__.py +1328 -0
- data/lib/libv8/scons/engine/SCons/Options/BoolOption.py +50 -0
- data/lib/libv8/scons/engine/SCons/Options/EnumOption.py +50 -0
- data/lib/libv8/scons/engine/SCons/Options/ListOption.py +50 -0
- data/lib/libv8/scons/engine/SCons/Options/PackageOption.py +50 -0
- data/lib/libv8/scons/engine/SCons/Options/PathOption.py +76 -0
- data/lib/libv8/scons/engine/SCons/Options/__init__.py +67 -0
- data/lib/libv8/scons/engine/SCons/PathList.py +231 -0
- data/lib/libv8/scons/engine/SCons/Platform/__init__.py +241 -0
- data/lib/libv8/scons/engine/SCons/Platform/aix.py +69 -0
- data/lib/libv8/scons/engine/SCons/Platform/cygwin.py +55 -0
- data/lib/libv8/scons/engine/SCons/Platform/darwin.py +46 -0
- data/lib/libv8/scons/engine/SCons/Platform/hpux.py +46 -0
- data/lib/libv8/scons/engine/SCons/Platform/irix.py +44 -0
- data/lib/libv8/scons/engine/SCons/Platform/os2.py +58 -0
- data/lib/libv8/scons/engine/SCons/Platform/posix.py +263 -0
- data/lib/libv8/scons/engine/SCons/Platform/sunos.py +50 -0
- data/lib/libv8/scons/engine/SCons/Platform/win32.py +385 -0
- data/lib/libv8/scons/engine/SCons/SConf.py +1030 -0
- data/lib/libv8/scons/engine/SCons/SConsign.py +383 -0
- data/lib/libv8/scons/engine/SCons/Scanner/C.py +132 -0
- data/lib/libv8/scons/engine/SCons/Scanner/D.py +73 -0
- data/lib/libv8/scons/engine/SCons/Scanner/Dir.py +109 -0
- data/lib/libv8/scons/engine/SCons/Scanner/Fortran.py +316 -0
- data/lib/libv8/scons/engine/SCons/Scanner/IDL.py +48 -0
- data/lib/libv8/scons/engine/SCons/Scanner/LaTeX.py +384 -0
- data/lib/libv8/scons/engine/SCons/Scanner/Prog.py +101 -0
- data/lib/libv8/scons/engine/SCons/Scanner/RC.py +55 -0
- data/lib/libv8/scons/engine/SCons/Scanner/__init__.py +413 -0
- data/lib/libv8/scons/engine/SCons/Script/Interactive.py +384 -0
- data/lib/libv8/scons/engine/SCons/Script/Main.py +1334 -0
- data/lib/libv8/scons/engine/SCons/Script/SConsOptions.py +939 -0
- data/lib/libv8/scons/engine/SCons/Script/SConscript.py +640 -0
- data/lib/libv8/scons/engine/SCons/Script/__init__.py +412 -0
- data/lib/libv8/scons/engine/SCons/Sig.py +63 -0
- data/lib/libv8/scons/engine/SCons/Subst.py +904 -0
- data/lib/libv8/scons/engine/SCons/Taskmaster.py +1017 -0
- data/lib/libv8/scons/engine/SCons/Tool/386asm.py +61 -0
- data/lib/libv8/scons/engine/SCons/Tool/BitKeeper.py +67 -0
- data/lib/libv8/scons/engine/SCons/Tool/CVS.py +73 -0
- data/lib/libv8/scons/engine/SCons/Tool/FortranCommon.py +246 -0
- data/lib/libv8/scons/engine/SCons/Tool/JavaCommon.py +323 -0
- data/lib/libv8/scons/engine/SCons/Tool/MSCommon/__init__.py +56 -0
- data/lib/libv8/scons/engine/SCons/Tool/MSCommon/arch.py +61 -0
- data/lib/libv8/scons/engine/SCons/Tool/MSCommon/common.py +240 -0
- data/lib/libv8/scons/engine/SCons/Tool/MSCommon/netframework.py +82 -0
- data/lib/libv8/scons/engine/SCons/Tool/MSCommon/sdk.py +391 -0
- data/lib/libv8/scons/engine/SCons/Tool/MSCommon/vc.py +456 -0
- data/lib/libv8/scons/engine/SCons/Tool/MSCommon/vs.py +499 -0
- data/lib/libv8/scons/engine/SCons/Tool/Perforce.py +103 -0
- data/lib/libv8/scons/engine/SCons/Tool/PharLapCommon.py +137 -0
- data/lib/libv8/scons/engine/SCons/Tool/RCS.py +64 -0
- data/lib/libv8/scons/engine/SCons/Tool/SCCS.py +64 -0
- data/lib/libv8/scons/engine/SCons/Tool/Subversion.py +71 -0
- data/lib/libv8/scons/engine/SCons/Tool/__init__.py +681 -0
- data/lib/libv8/scons/engine/SCons/Tool/aixc++.py +82 -0
- data/lib/libv8/scons/engine/SCons/Tool/aixcc.py +74 -0
- data/lib/libv8/scons/engine/SCons/Tool/aixf77.py +80 -0
- data/lib/libv8/scons/engine/SCons/Tool/aixlink.py +76 -0
- data/lib/libv8/scons/engine/SCons/Tool/applelink.py +71 -0
- data/lib/libv8/scons/engine/SCons/Tool/ar.py +63 -0
- data/lib/libv8/scons/engine/SCons/Tool/as.py +78 -0
- data/lib/libv8/scons/engine/SCons/Tool/bcc32.py +81 -0
- data/lib/libv8/scons/engine/SCons/Tool/c++.py +99 -0
- data/lib/libv8/scons/engine/SCons/Tool/cc.py +102 -0
- data/lib/libv8/scons/engine/SCons/Tool/cvf.py +58 -0
- data/lib/libv8/scons/engine/SCons/Tool/default.py +50 -0
- data/lib/libv8/scons/engine/SCons/Tool/dmd.py +223 -0
- data/lib/libv8/scons/engine/SCons/Tool/dvi.py +64 -0
- data/lib/libv8/scons/engine/SCons/Tool/dvipdf.py +124 -0
- data/lib/libv8/scons/engine/SCons/Tool/dvips.py +94 -0
- data/lib/libv8/scons/engine/SCons/Tool/f77.py +62 -0
- data/lib/libv8/scons/engine/SCons/Tool/f90.py +62 -0
- data/lib/libv8/scons/engine/SCons/Tool/f95.py +63 -0
- data/lib/libv8/scons/engine/SCons/Tool/filesystem.py +98 -0
- data/lib/libv8/scons/engine/SCons/Tool/fortran.py +62 -0
- data/lib/libv8/scons/engine/SCons/Tool/g++.py +90 -0
- data/lib/libv8/scons/engine/SCons/Tool/g77.py +73 -0
- data/lib/libv8/scons/engine/SCons/Tool/gas.py +53 -0
- data/lib/libv8/scons/engine/SCons/Tool/gcc.py +80 -0
- data/lib/libv8/scons/engine/SCons/Tool/gfortran.py +64 -0
- data/lib/libv8/scons/engine/SCons/Tool/gnulink.py +63 -0
- data/lib/libv8/scons/engine/SCons/Tool/gs.py +81 -0
- data/lib/libv8/scons/engine/SCons/Tool/hpc++.py +84 -0
- data/lib/libv8/scons/engine/SCons/Tool/hpcc.py +53 -0
- data/lib/libv8/scons/engine/SCons/Tool/hplink.py +77 -0
- data/lib/libv8/scons/engine/SCons/Tool/icc.py +59 -0
- data/lib/libv8/scons/engine/SCons/Tool/icl.py +52 -0
- data/lib/libv8/scons/engine/SCons/Tool/ifl.py +72 -0
- data/lib/libv8/scons/engine/SCons/Tool/ifort.py +88 -0
- data/lib/libv8/scons/engine/SCons/Tool/ilink.py +59 -0
- data/lib/libv8/scons/engine/SCons/Tool/ilink32.py +60 -0
- data/lib/libv8/scons/engine/SCons/Tool/install.py +229 -0
- data/lib/libv8/scons/engine/SCons/Tool/intelc.py +482 -0
- data/lib/libv8/scons/engine/SCons/Tool/ipkg.py +67 -0
- data/lib/libv8/scons/engine/SCons/Tool/jar.py +110 -0
- data/lib/libv8/scons/engine/SCons/Tool/javac.py +230 -0
- data/lib/libv8/scons/engine/SCons/Tool/javah.py +137 -0
- data/lib/libv8/scons/engine/SCons/Tool/latex.py +79 -0
- data/lib/libv8/scons/engine/SCons/Tool/lex.py +97 -0
- data/lib/libv8/scons/engine/SCons/Tool/link.py +121 -0
- data/lib/libv8/scons/engine/SCons/Tool/linkloc.py +112 -0
- data/lib/libv8/scons/engine/SCons/Tool/m4.py +63 -0
- data/lib/libv8/scons/engine/SCons/Tool/masm.py +77 -0
- data/lib/libv8/scons/engine/SCons/Tool/midl.py +88 -0
- data/lib/libv8/scons/engine/SCons/Tool/mingw.py +158 -0
- data/lib/libv8/scons/engine/SCons/Tool/mslib.py +64 -0
- data/lib/libv8/scons/engine/SCons/Tool/mslink.py +266 -0
- data/lib/libv8/scons/engine/SCons/Tool/mssdk.py +50 -0
- data/lib/libv8/scons/engine/SCons/Tool/msvc.py +268 -0
- data/lib/libv8/scons/engine/SCons/Tool/msvs.py +1388 -0
- data/lib/libv8/scons/engine/SCons/Tool/mwcc.py +207 -0
- data/lib/libv8/scons/engine/SCons/Tool/mwld.py +107 -0
- data/lib/libv8/scons/engine/SCons/Tool/nasm.py +72 -0
- data/lib/libv8/scons/engine/SCons/Tool/packaging/__init__.py +312 -0
- data/lib/libv8/scons/engine/SCons/Tool/packaging/ipk.py +185 -0
- data/lib/libv8/scons/engine/SCons/Tool/packaging/msi.py +527 -0
- data/lib/libv8/scons/engine/SCons/Tool/packaging/rpm.py +365 -0
- data/lib/libv8/scons/engine/SCons/Tool/packaging/src_tarbz2.py +43 -0
- data/lib/libv8/scons/engine/SCons/Tool/packaging/src_targz.py +43 -0
- data/lib/libv8/scons/engine/SCons/Tool/packaging/src_zip.py +43 -0
- data/lib/libv8/scons/engine/SCons/Tool/packaging/tarbz2.py +44 -0
- data/lib/libv8/scons/engine/SCons/Tool/packaging/targz.py +44 -0
- data/lib/libv8/scons/engine/SCons/Tool/packaging/zip.py +44 -0
- data/lib/libv8/scons/engine/SCons/Tool/pdf.py +78 -0
- data/lib/libv8/scons/engine/SCons/Tool/pdflatex.py +83 -0
- data/lib/libv8/scons/engine/SCons/Tool/pdftex.py +108 -0
- data/lib/libv8/scons/engine/SCons/Tool/qt.py +336 -0
- data/lib/libv8/scons/engine/SCons/Tool/rmic.py +120 -0
- data/lib/libv8/scons/engine/SCons/Tool/rpcgen.py +70 -0
- data/lib/libv8/scons/engine/SCons/Tool/rpm.py +132 -0
- data/lib/libv8/scons/engine/SCons/Tool/sgiar.py +68 -0
- data/lib/libv8/scons/engine/SCons/Tool/sgic++.py +58 -0
- data/lib/libv8/scons/engine/SCons/Tool/sgicc.py +53 -0
- data/lib/libv8/scons/engine/SCons/Tool/sgilink.py +63 -0
- data/lib/libv8/scons/engine/SCons/Tool/sunar.py +67 -0
- data/lib/libv8/scons/engine/SCons/Tool/sunc++.py +142 -0
- data/lib/libv8/scons/engine/SCons/Tool/suncc.py +58 -0
- data/lib/libv8/scons/engine/SCons/Tool/sunf77.py +63 -0
- data/lib/libv8/scons/engine/SCons/Tool/sunf90.py +64 -0
- data/lib/libv8/scons/engine/SCons/Tool/sunf95.py +64 -0
- data/lib/libv8/scons/engine/SCons/Tool/sunlink.py +77 -0
- data/lib/libv8/scons/engine/SCons/Tool/swig.py +182 -0
- data/lib/libv8/scons/engine/SCons/Tool/tar.py +73 -0
- data/lib/libv8/scons/engine/SCons/Tool/tex.py +813 -0
- data/lib/libv8/scons/engine/SCons/Tool/textfile.py +175 -0
- data/lib/libv8/scons/engine/SCons/Tool/tlib.py +53 -0
- data/lib/libv8/scons/engine/SCons/Tool/wix.py +99 -0
- data/lib/libv8/scons/engine/SCons/Tool/yacc.py +130 -0
- data/lib/libv8/scons/engine/SCons/Tool/zip.py +99 -0
- data/lib/libv8/scons/engine/SCons/Util.py +1492 -0
- data/lib/libv8/scons/engine/SCons/Variables/BoolVariable.py +89 -0
- data/lib/libv8/scons/engine/SCons/Variables/EnumVariable.py +103 -0
- data/lib/libv8/scons/engine/SCons/Variables/ListVariable.py +135 -0
- data/lib/libv8/scons/engine/SCons/Variables/PackageVariable.py +106 -0
- data/lib/libv8/scons/engine/SCons/Variables/PathVariable.py +147 -0
- data/lib/libv8/scons/engine/SCons/Variables/__init__.py +312 -0
- data/lib/libv8/scons/engine/SCons/Warnings.py +246 -0
- data/lib/libv8/scons/engine/SCons/__init__.py +49 -0
- data/lib/libv8/scons/engine/SCons/compat/__init__.py +237 -0
- data/lib/libv8/scons/engine/SCons/compat/_scons_builtins.py +150 -0
- data/lib/libv8/scons/engine/SCons/compat/_scons_collections.py +45 -0
- data/lib/libv8/scons/engine/SCons/compat/_scons_dbm.py +45 -0
- data/lib/libv8/scons/engine/SCons/compat/_scons_hashlib.py +76 -0
- data/lib/libv8/scons/engine/SCons/compat/_scons_io.py +45 -0
- data/lib/libv8/scons/engine/SCons/compat/_scons_sets.py +563 -0
- data/lib/libv8/scons/engine/SCons/compat/_scons_subprocess.py +1281 -0
- data/lib/libv8/scons/engine/SCons/cpp.py +589 -0
- data/lib/libv8/scons/engine/SCons/dblite.py +251 -0
- data/lib/libv8/scons/engine/SCons/exitfuncs.py +77 -0
- data/lib/libv8/scons/os_spawnv_fix.diff +83 -0
- data/lib/libv8/scons/scons-time.1 +1017 -0
- data/lib/libv8/scons/scons.1 +15219 -0
- data/lib/libv8/scons/sconsign.1 +208 -0
- data/lib/libv8/scons/script/scons +196 -0
- data/lib/libv8/scons/script/scons-time +1544 -0
- data/lib/libv8/scons/script/scons.bat +31 -0
- data/lib/libv8/scons/script/sconsign +513 -0
- data/lib/libv8/scons/setup.cfg +6 -0
- data/lib/libv8/scons/setup.py +425 -0
- data/lib/libv8/v8/.gitignore +35 -0
- data/lib/libv8/v8/AUTHORS +44 -0
- data/lib/libv8/v8/ChangeLog +2839 -0
- data/lib/libv8/v8/LICENSE +52 -0
- data/lib/libv8/v8/LICENSE.strongtalk +29 -0
- data/lib/libv8/v8/LICENSE.v8 +26 -0
- data/lib/libv8/v8/LICENSE.valgrind +45 -0
- data/lib/libv8/v8/SConstruct +1478 -0
- data/lib/libv8/v8/build/README.txt +49 -0
- data/lib/libv8/v8/build/all.gyp +18 -0
- data/lib/libv8/v8/build/armu.gypi +32 -0
- data/lib/libv8/v8/build/common.gypi +144 -0
- data/lib/libv8/v8/build/gyp_v8 +145 -0
- data/lib/libv8/v8/include/v8-debug.h +395 -0
- data/lib/libv8/v8/include/v8-preparser.h +117 -0
- data/lib/libv8/v8/include/v8-profiler.h +505 -0
- data/lib/libv8/v8/include/v8-testing.h +104 -0
- data/lib/libv8/v8/include/v8.h +4124 -0
- data/lib/libv8/v8/include/v8stdint.h +53 -0
- data/lib/libv8/v8/preparser/SConscript +38 -0
- data/lib/libv8/v8/preparser/preparser-process.cc +379 -0
- data/lib/libv8/v8/src/SConscript +368 -0
- data/lib/libv8/v8/src/accessors.cc +767 -0
- data/lib/libv8/v8/src/accessors.h +123 -0
- data/lib/libv8/v8/src/allocation-inl.h +49 -0
- data/lib/libv8/v8/src/allocation.cc +122 -0
- data/lib/libv8/v8/src/allocation.h +143 -0
- data/lib/libv8/v8/src/api.cc +5845 -0
- data/lib/libv8/v8/src/api.h +574 -0
- data/lib/libv8/v8/src/apinatives.js +110 -0
- data/lib/libv8/v8/src/apiutils.h +73 -0
- data/lib/libv8/v8/src/arguments.h +118 -0
- data/lib/libv8/v8/src/arm/assembler-arm-inl.h +353 -0
- data/lib/libv8/v8/src/arm/assembler-arm.cc +2661 -0
- data/lib/libv8/v8/src/arm/assembler-arm.h +1375 -0
- data/lib/libv8/v8/src/arm/builtins-arm.cc +1658 -0
- data/lib/libv8/v8/src/arm/code-stubs-arm.cc +6398 -0
- data/lib/libv8/v8/src/arm/code-stubs-arm.h +673 -0
- data/lib/libv8/v8/src/arm/codegen-arm.cc +52 -0
- data/lib/libv8/v8/src/arm/codegen-arm.h +91 -0
- data/lib/libv8/v8/src/arm/constants-arm.cc +152 -0
- data/lib/libv8/v8/src/arm/constants-arm.h +775 -0
- data/lib/libv8/v8/src/arm/cpu-arm.cc +120 -0
- data/lib/libv8/v8/src/arm/debug-arm.cc +317 -0
- data/lib/libv8/v8/src/arm/deoptimizer-arm.cc +754 -0
- data/lib/libv8/v8/src/arm/disasm-arm.cc +1506 -0
- data/lib/libv8/v8/src/arm/frames-arm.cc +45 -0
- data/lib/libv8/v8/src/arm/frames-arm.h +168 -0
- data/lib/libv8/v8/src/arm/full-codegen-arm.cc +4375 -0
- data/lib/libv8/v8/src/arm/ic-arm.cc +1562 -0
- data/lib/libv8/v8/src/arm/lithium-arm.cc +2206 -0
- data/lib/libv8/v8/src/arm/lithium-arm.h +2348 -0
- data/lib/libv8/v8/src/arm/lithium-codegen-arm.cc +4526 -0
- data/lib/libv8/v8/src/arm/lithium-codegen-arm.h +403 -0
- data/lib/libv8/v8/src/arm/lithium-gap-resolver-arm.cc +305 -0
- data/lib/libv8/v8/src/arm/lithium-gap-resolver-arm.h +84 -0
- data/lib/libv8/v8/src/arm/macro-assembler-arm.cc +3163 -0
- data/lib/libv8/v8/src/arm/macro-assembler-arm.h +1126 -0
- data/lib/libv8/v8/src/arm/regexp-macro-assembler-arm.cc +1287 -0
- data/lib/libv8/v8/src/arm/regexp-macro-assembler-arm.h +253 -0
- data/lib/libv8/v8/src/arm/simulator-arm.cc +3424 -0
- data/lib/libv8/v8/src/arm/simulator-arm.h +431 -0
- data/lib/libv8/v8/src/arm/stub-cache-arm.cc +4243 -0
- data/lib/libv8/v8/src/array.js +1366 -0
- data/lib/libv8/v8/src/assembler.cc +1207 -0
- data/lib/libv8/v8/src/assembler.h +858 -0
- data/lib/libv8/v8/src/ast-inl.h +112 -0
- data/lib/libv8/v8/src/ast.cc +1146 -0
- data/lib/libv8/v8/src/ast.h +2188 -0
- data/lib/libv8/v8/src/atomicops.h +167 -0
- data/lib/libv8/v8/src/atomicops_internals_arm_gcc.h +145 -0
- data/lib/libv8/v8/src/atomicops_internals_mips_gcc.h +169 -0
- data/lib/libv8/v8/src/atomicops_internals_x86_gcc.cc +133 -0
- data/lib/libv8/v8/src/atomicops_internals_x86_gcc.h +287 -0
- data/lib/libv8/v8/src/atomicops_internals_x86_macosx.h +301 -0
- data/lib/libv8/v8/src/atomicops_internals_x86_msvc.h +203 -0
- data/lib/libv8/v8/src/bignum-dtoa.cc +655 -0
- data/lib/libv8/v8/src/bignum-dtoa.h +81 -0
- data/lib/libv8/v8/src/bignum.cc +768 -0
- data/lib/libv8/v8/src/bignum.h +140 -0
- data/lib/libv8/v8/src/bootstrapper.cc +2184 -0
- data/lib/libv8/v8/src/bootstrapper.h +188 -0
- data/lib/libv8/v8/src/builtins.cc +1707 -0
- data/lib/libv8/v8/src/builtins.h +371 -0
- data/lib/libv8/v8/src/bytecodes-irregexp.h +105 -0
- data/lib/libv8/v8/src/cached-powers.cc +177 -0
- data/lib/libv8/v8/src/cached-powers.h +65 -0
- data/lib/libv8/v8/src/char-predicates-inl.h +94 -0
- data/lib/libv8/v8/src/char-predicates.h +67 -0
- data/lib/libv8/v8/src/checks.cc +110 -0
- data/lib/libv8/v8/src/checks.h +296 -0
- data/lib/libv8/v8/src/circular-queue-inl.h +53 -0
- data/lib/libv8/v8/src/circular-queue.cc +122 -0
- data/lib/libv8/v8/src/circular-queue.h +103 -0
- data/lib/libv8/v8/src/code-stubs.cc +267 -0
- data/lib/libv8/v8/src/code-stubs.h +1011 -0
- data/lib/libv8/v8/src/code.h +70 -0
- data/lib/libv8/v8/src/codegen.cc +231 -0
- data/lib/libv8/v8/src/codegen.h +84 -0
- data/lib/libv8/v8/src/compilation-cache.cc +540 -0
- data/lib/libv8/v8/src/compilation-cache.h +287 -0
- data/lib/libv8/v8/src/compiler.cc +786 -0
- data/lib/libv8/v8/src/compiler.h +312 -0
- data/lib/libv8/v8/src/contexts.cc +347 -0
- data/lib/libv8/v8/src/contexts.h +391 -0
- data/lib/libv8/v8/src/conversions-inl.h +106 -0
- data/lib/libv8/v8/src/conversions.cc +1131 -0
- data/lib/libv8/v8/src/conversions.h +135 -0
- data/lib/libv8/v8/src/counters.cc +93 -0
- data/lib/libv8/v8/src/counters.h +254 -0
- data/lib/libv8/v8/src/cpu-profiler-inl.h +101 -0
- data/lib/libv8/v8/src/cpu-profiler.cc +609 -0
- data/lib/libv8/v8/src/cpu-profiler.h +302 -0
- data/lib/libv8/v8/src/cpu.h +69 -0
- data/lib/libv8/v8/src/d8-debug.cc +367 -0
- data/lib/libv8/v8/src/d8-debug.h +158 -0
- data/lib/libv8/v8/src/d8-posix.cc +695 -0
- data/lib/libv8/v8/src/d8-readline.cc +130 -0
- data/lib/libv8/v8/src/d8-windows.cc +42 -0
- data/lib/libv8/v8/src/d8.cc +803 -0
- data/lib/libv8/v8/src/d8.gyp +91 -0
- data/lib/libv8/v8/src/d8.h +235 -0
- data/lib/libv8/v8/src/d8.js +2798 -0
- data/lib/libv8/v8/src/data-flow.cc +66 -0
- data/lib/libv8/v8/src/data-flow.h +205 -0
- data/lib/libv8/v8/src/date.js +1103 -0
- data/lib/libv8/v8/src/dateparser-inl.h +127 -0
- data/lib/libv8/v8/src/dateparser.cc +178 -0
- data/lib/libv8/v8/src/dateparser.h +266 -0
- data/lib/libv8/v8/src/debug-agent.cc +447 -0
- data/lib/libv8/v8/src/debug-agent.h +129 -0
- data/lib/libv8/v8/src/debug-debugger.js +2569 -0
- data/lib/libv8/v8/src/debug.cc +3165 -0
- data/lib/libv8/v8/src/debug.h +1057 -0
- data/lib/libv8/v8/src/deoptimizer.cc +1256 -0
- data/lib/libv8/v8/src/deoptimizer.h +602 -0
- data/lib/libv8/v8/src/disasm.h +80 -0
- data/lib/libv8/v8/src/disassembler.cc +343 -0
- data/lib/libv8/v8/src/disassembler.h +58 -0
- data/lib/libv8/v8/src/diy-fp.cc +58 -0
- data/lib/libv8/v8/src/diy-fp.h +117 -0
- data/lib/libv8/v8/src/double.h +238 -0
- data/lib/libv8/v8/src/dtoa.cc +103 -0
- data/lib/libv8/v8/src/dtoa.h +85 -0
- data/lib/libv8/v8/src/execution.cc +849 -0
- data/lib/libv8/v8/src/execution.h +297 -0
- data/lib/libv8/v8/src/extensions/experimental/break-iterator.cc +250 -0
- data/lib/libv8/v8/src/extensions/experimental/break-iterator.h +89 -0
- data/lib/libv8/v8/src/extensions/experimental/collator.cc +218 -0
- data/lib/libv8/v8/src/extensions/experimental/collator.h +69 -0
- data/lib/libv8/v8/src/extensions/experimental/experimental.gyp +94 -0
- data/lib/libv8/v8/src/extensions/experimental/i18n-extension.cc +78 -0
- data/lib/libv8/v8/src/extensions/experimental/i18n-extension.h +54 -0
- data/lib/libv8/v8/src/extensions/experimental/i18n-locale.cc +112 -0
- data/lib/libv8/v8/src/extensions/experimental/i18n-locale.h +60 -0
- data/lib/libv8/v8/src/extensions/experimental/i18n-utils.cc +43 -0
- data/lib/libv8/v8/src/extensions/experimental/i18n-utils.h +49 -0
- data/lib/libv8/v8/src/extensions/experimental/i18n.js +180 -0
- data/lib/libv8/v8/src/extensions/experimental/language-matcher.cc +251 -0
- data/lib/libv8/v8/src/extensions/experimental/language-matcher.h +95 -0
- data/lib/libv8/v8/src/extensions/externalize-string-extension.cc +141 -0
- data/lib/libv8/v8/src/extensions/externalize-string-extension.h +50 -0
- data/lib/libv8/v8/src/extensions/gc-extension.cc +58 -0
- data/lib/libv8/v8/src/extensions/gc-extension.h +49 -0
- data/lib/libv8/v8/src/factory.cc +1222 -0
- data/lib/libv8/v8/src/factory.h +442 -0
- data/lib/libv8/v8/src/fast-dtoa.cc +736 -0
- data/lib/libv8/v8/src/fast-dtoa.h +83 -0
- data/lib/libv8/v8/src/fixed-dtoa.cc +405 -0
- data/lib/libv8/v8/src/fixed-dtoa.h +55 -0
- data/lib/libv8/v8/src/flag-definitions.h +560 -0
- data/lib/libv8/v8/src/flags.cc +551 -0
- data/lib/libv8/v8/src/flags.h +79 -0
- data/lib/libv8/v8/src/frames-inl.h +247 -0
- data/lib/libv8/v8/src/frames.cc +1243 -0
- data/lib/libv8/v8/src/frames.h +870 -0
- data/lib/libv8/v8/src/full-codegen.cc +1374 -0
- data/lib/libv8/v8/src/full-codegen.h +771 -0
- data/lib/libv8/v8/src/func-name-inferrer.cc +92 -0
- data/lib/libv8/v8/src/func-name-inferrer.h +111 -0
- data/lib/libv8/v8/src/gdb-jit.cc +1555 -0
- data/lib/libv8/v8/src/gdb-jit.h +143 -0
- data/lib/libv8/v8/src/global-handles.cc +665 -0
- data/lib/libv8/v8/src/global-handles.h +284 -0
- data/lib/libv8/v8/src/globals.h +325 -0
- data/lib/libv8/v8/src/handles-inl.h +177 -0
- data/lib/libv8/v8/src/handles.cc +987 -0
- data/lib/libv8/v8/src/handles.h +382 -0
- data/lib/libv8/v8/src/hashmap.cc +230 -0
- data/lib/libv8/v8/src/hashmap.h +123 -0
- data/lib/libv8/v8/src/heap-inl.h +704 -0
- data/lib/libv8/v8/src/heap-profiler.cc +1173 -0
- data/lib/libv8/v8/src/heap-profiler.h +397 -0
- data/lib/libv8/v8/src/heap.cc +5930 -0
- data/lib/libv8/v8/src/heap.h +2268 -0
- data/lib/libv8/v8/src/hydrogen-instructions.cc +1769 -0
- data/lib/libv8/v8/src/hydrogen-instructions.h +3971 -0
- data/lib/libv8/v8/src/hydrogen.cc +6239 -0
- data/lib/libv8/v8/src/hydrogen.h +1202 -0
- data/lib/libv8/v8/src/ia32/assembler-ia32-inl.h +446 -0
- data/lib/libv8/v8/src/ia32/assembler-ia32.cc +2487 -0
- data/lib/libv8/v8/src/ia32/assembler-ia32.h +1144 -0
- data/lib/libv8/v8/src/ia32/builtins-ia32.cc +1621 -0
- data/lib/libv8/v8/src/ia32/code-stubs-ia32.cc +6198 -0
- data/lib/libv8/v8/src/ia32/code-stubs-ia32.h +517 -0
- data/lib/libv8/v8/src/ia32/codegen-ia32.cc +265 -0
- data/lib/libv8/v8/src/ia32/codegen-ia32.h +79 -0
- data/lib/libv8/v8/src/ia32/cpu-ia32.cc +88 -0
- data/lib/libv8/v8/src/ia32/debug-ia32.cc +312 -0
- data/lib/libv8/v8/src/ia32/deoptimizer-ia32.cc +774 -0
- data/lib/libv8/v8/src/ia32/disasm-ia32.cc +1628 -0
- data/lib/libv8/v8/src/ia32/frames-ia32.cc +45 -0
- data/lib/libv8/v8/src/ia32/frames-ia32.h +142 -0
- data/lib/libv8/v8/src/ia32/full-codegen-ia32.cc +4338 -0
- data/lib/libv8/v8/src/ia32/ic-ia32.cc +1597 -0
- data/lib/libv8/v8/src/ia32/lithium-codegen-ia32.cc +4461 -0
- data/lib/libv8/v8/src/ia32/lithium-codegen-ia32.h +375 -0
- data/lib/libv8/v8/src/ia32/lithium-gap-resolver-ia32.cc +475 -0
- data/lib/libv8/v8/src/ia32/lithium-gap-resolver-ia32.h +110 -0
- data/lib/libv8/v8/src/ia32/lithium-ia32.cc +2261 -0
- data/lib/libv8/v8/src/ia32/lithium-ia32.h +2396 -0
- data/lib/libv8/v8/src/ia32/macro-assembler-ia32.cc +2136 -0
- data/lib/libv8/v8/src/ia32/macro-assembler-ia32.h +775 -0
- data/lib/libv8/v8/src/ia32/regexp-macro-assembler-ia32.cc +1263 -0
- data/lib/libv8/v8/src/ia32/regexp-macro-assembler-ia32.h +216 -0
- data/lib/libv8/v8/src/ia32/simulator-ia32.cc +30 -0
- data/lib/libv8/v8/src/ia32/simulator-ia32.h +74 -0
- data/lib/libv8/v8/src/ia32/stub-cache-ia32.cc +3847 -0
- data/lib/libv8/v8/src/ic-inl.h +130 -0
- data/lib/libv8/v8/src/ic.cc +2577 -0
- data/lib/libv8/v8/src/ic.h +736 -0
- data/lib/libv8/v8/src/inspector.cc +63 -0
- data/lib/libv8/v8/src/inspector.h +62 -0
- data/lib/libv8/v8/src/interpreter-irregexp.cc +659 -0
- data/lib/libv8/v8/src/interpreter-irregexp.h +49 -0
- data/lib/libv8/v8/src/isolate-inl.h +50 -0
- data/lib/libv8/v8/src/isolate.cc +1869 -0
- data/lib/libv8/v8/src/isolate.h +1382 -0
- data/lib/libv8/v8/src/json-parser.cc +504 -0
- data/lib/libv8/v8/src/json-parser.h +161 -0
- data/lib/libv8/v8/src/json.js +342 -0
- data/lib/libv8/v8/src/jsregexp.cc +5385 -0
- data/lib/libv8/v8/src/jsregexp.h +1492 -0
- data/lib/libv8/v8/src/list-inl.h +212 -0
- data/lib/libv8/v8/src/list.h +174 -0
- data/lib/libv8/v8/src/lithium-allocator-inl.h +142 -0
- data/lib/libv8/v8/src/lithium-allocator.cc +2123 -0
- data/lib/libv8/v8/src/lithium-allocator.h +630 -0
- data/lib/libv8/v8/src/lithium.cc +190 -0
- data/lib/libv8/v8/src/lithium.h +597 -0
- data/lib/libv8/v8/src/liveedit-debugger.js +1082 -0
- data/lib/libv8/v8/src/liveedit.cc +1691 -0
- data/lib/libv8/v8/src/liveedit.h +180 -0
- data/lib/libv8/v8/src/liveobjectlist-inl.h +126 -0
- data/lib/libv8/v8/src/liveobjectlist.cc +2589 -0
- data/lib/libv8/v8/src/liveobjectlist.h +322 -0
- data/lib/libv8/v8/src/log-inl.h +59 -0
- data/lib/libv8/v8/src/log-utils.cc +428 -0
- data/lib/libv8/v8/src/log-utils.h +231 -0
- data/lib/libv8/v8/src/log.cc +1993 -0
- data/lib/libv8/v8/src/log.h +476 -0
- data/lib/libv8/v8/src/macro-assembler.h +120 -0
- data/lib/libv8/v8/src/macros.py +178 -0
- data/lib/libv8/v8/src/mark-compact.cc +3143 -0
- data/lib/libv8/v8/src/mark-compact.h +506 -0
- data/lib/libv8/v8/src/math.js +264 -0
- data/lib/libv8/v8/src/messages.cc +179 -0
- data/lib/libv8/v8/src/messages.h +113 -0
- data/lib/libv8/v8/src/messages.js +1096 -0
- data/lib/libv8/v8/src/mips/assembler-mips-inl.h +312 -0
- data/lib/libv8/v8/src/mips/assembler-mips.cc +1960 -0
- data/lib/libv8/v8/src/mips/assembler-mips.h +1138 -0
- data/lib/libv8/v8/src/mips/builtins-mips.cc +1628 -0
- data/lib/libv8/v8/src/mips/code-stubs-mips.cc +6656 -0
- data/lib/libv8/v8/src/mips/code-stubs-mips.h +682 -0
- data/lib/libv8/v8/src/mips/codegen-mips.cc +52 -0
- data/lib/libv8/v8/src/mips/codegen-mips.h +98 -0
- data/lib/libv8/v8/src/mips/constants-mips.cc +352 -0
- data/lib/libv8/v8/src/mips/constants-mips.h +739 -0
- data/lib/libv8/v8/src/mips/cpu-mips.cc +96 -0
- data/lib/libv8/v8/src/mips/debug-mips.cc +308 -0
- data/lib/libv8/v8/src/mips/deoptimizer-mips.cc +91 -0
- data/lib/libv8/v8/src/mips/disasm-mips.cc +1050 -0
- data/lib/libv8/v8/src/mips/frames-mips.cc +47 -0
- data/lib/libv8/v8/src/mips/frames-mips.h +219 -0
- data/lib/libv8/v8/src/mips/full-codegen-mips.cc +4388 -0
- data/lib/libv8/v8/src/mips/ic-mips.cc +1580 -0
- data/lib/libv8/v8/src/mips/lithium-codegen-mips.h +65 -0
- data/lib/libv8/v8/src/mips/lithium-mips.h +307 -0
- data/lib/libv8/v8/src/mips/macro-assembler-mips.cc +4056 -0
- data/lib/libv8/v8/src/mips/macro-assembler-mips.h +1214 -0
- data/lib/libv8/v8/src/mips/regexp-macro-assembler-mips.cc +1251 -0
- data/lib/libv8/v8/src/mips/regexp-macro-assembler-mips.h +252 -0
- data/lib/libv8/v8/src/mips/simulator-mips.cc +2621 -0
- data/lib/libv8/v8/src/mips/simulator-mips.h +401 -0
- data/lib/libv8/v8/src/mips/stub-cache-mips.cc +4285 -0
- data/lib/libv8/v8/src/mirror-debugger.js +2382 -0
- data/lib/libv8/v8/src/mksnapshot.cc +328 -0
- data/lib/libv8/v8/src/natives.h +64 -0
- data/lib/libv8/v8/src/objects-debug.cc +738 -0
- data/lib/libv8/v8/src/objects-inl.h +4323 -0
- data/lib/libv8/v8/src/objects-printer.cc +829 -0
- data/lib/libv8/v8/src/objects-visiting.cc +148 -0
- data/lib/libv8/v8/src/objects-visiting.h +424 -0
- data/lib/libv8/v8/src/objects.cc +10585 -0
- data/lib/libv8/v8/src/objects.h +6838 -0
- data/lib/libv8/v8/src/parser.cc +4997 -0
- data/lib/libv8/v8/src/parser.h +765 -0
- data/lib/libv8/v8/src/platform-cygwin.cc +779 -0
- data/lib/libv8/v8/src/platform-freebsd.cc +826 -0
- data/lib/libv8/v8/src/platform-linux.cc +1149 -0
- data/lib/libv8/v8/src/platform-macos.cc +830 -0
- data/lib/libv8/v8/src/platform-nullos.cc +479 -0
- data/lib/libv8/v8/src/platform-openbsd.cc +640 -0
- data/lib/libv8/v8/src/platform-posix.cc +424 -0
- data/lib/libv8/v8/src/platform-solaris.cc +762 -0
- data/lib/libv8/v8/src/platform-tls-mac.h +62 -0
- data/lib/libv8/v8/src/platform-tls-win32.h +62 -0
- data/lib/libv8/v8/src/platform-tls.h +50 -0
- data/lib/libv8/v8/src/platform-win32.cc +2021 -0
- data/lib/libv8/v8/src/platform.h +667 -0
- data/lib/libv8/v8/src/preparse-data-format.h +62 -0
- data/lib/libv8/v8/src/preparse-data.cc +183 -0
- data/lib/libv8/v8/src/preparse-data.h +225 -0
- data/lib/libv8/v8/src/preparser-api.cc +220 -0
- data/lib/libv8/v8/src/preparser.cc +1450 -0
- data/lib/libv8/v8/src/preparser.h +493 -0
- data/lib/libv8/v8/src/prettyprinter.cc +1493 -0
- data/lib/libv8/v8/src/prettyprinter.h +223 -0
- data/lib/libv8/v8/src/profile-generator-inl.h +128 -0
- data/lib/libv8/v8/src/profile-generator.cc +3098 -0
- data/lib/libv8/v8/src/profile-generator.h +1126 -0
- data/lib/libv8/v8/src/property.cc +105 -0
- data/lib/libv8/v8/src/property.h +365 -0
- data/lib/libv8/v8/src/proxy.js +83 -0
- data/lib/libv8/v8/src/regexp-macro-assembler-irregexp-inl.h +78 -0
- data/lib/libv8/v8/src/regexp-macro-assembler-irregexp.cc +471 -0
- data/lib/libv8/v8/src/regexp-macro-assembler-irregexp.h +142 -0
- data/lib/libv8/v8/src/regexp-macro-assembler-tracer.cc +373 -0
- data/lib/libv8/v8/src/regexp-macro-assembler-tracer.h +104 -0
- data/lib/libv8/v8/src/regexp-macro-assembler.cc +267 -0
- data/lib/libv8/v8/src/regexp-macro-assembler.h +243 -0
- data/lib/libv8/v8/src/regexp-stack.cc +111 -0
- data/lib/libv8/v8/src/regexp-stack.h +147 -0
- data/lib/libv8/v8/src/regexp.js +483 -0
- data/lib/libv8/v8/src/rewriter.cc +360 -0
- data/lib/libv8/v8/src/rewriter.h +50 -0
- data/lib/libv8/v8/src/runtime-profiler.cc +489 -0
- data/lib/libv8/v8/src/runtime-profiler.h +201 -0
- data/lib/libv8/v8/src/runtime.cc +12227 -0
- data/lib/libv8/v8/src/runtime.h +652 -0
- data/lib/libv8/v8/src/runtime.js +649 -0
- data/lib/libv8/v8/src/safepoint-table.cc +256 -0
- data/lib/libv8/v8/src/safepoint-table.h +270 -0
- data/lib/libv8/v8/src/scanner-base.cc +952 -0
- data/lib/libv8/v8/src/scanner-base.h +670 -0
- data/lib/libv8/v8/src/scanner.cc +345 -0
- data/lib/libv8/v8/src/scanner.h +146 -0
- data/lib/libv8/v8/src/scopeinfo.cc +646 -0
- data/lib/libv8/v8/src/scopeinfo.h +254 -0
- data/lib/libv8/v8/src/scopes.cc +1150 -0
- data/lib/libv8/v8/src/scopes.h +507 -0
- data/lib/libv8/v8/src/serialize.cc +1574 -0
- data/lib/libv8/v8/src/serialize.h +589 -0
- data/lib/libv8/v8/src/shell.h +55 -0
- data/lib/libv8/v8/src/simulator.h +43 -0
- data/lib/libv8/v8/src/small-pointer-list.h +163 -0
- data/lib/libv8/v8/src/smart-pointer.h +109 -0
- data/lib/libv8/v8/src/snapshot-common.cc +83 -0
- data/lib/libv8/v8/src/snapshot-empty.cc +54 -0
- data/lib/libv8/v8/src/snapshot.h +91 -0
- data/lib/libv8/v8/src/spaces-inl.h +529 -0
- data/lib/libv8/v8/src/spaces.cc +3145 -0
- data/lib/libv8/v8/src/spaces.h +2369 -0
- data/lib/libv8/v8/src/splay-tree-inl.h +310 -0
- data/lib/libv8/v8/src/splay-tree.h +205 -0
- data/lib/libv8/v8/src/string-search.cc +41 -0
- data/lib/libv8/v8/src/string-search.h +568 -0
- data/lib/libv8/v8/src/string-stream.cc +592 -0
- data/lib/libv8/v8/src/string-stream.h +191 -0
- data/lib/libv8/v8/src/string.js +994 -0
- data/lib/libv8/v8/src/strtod.cc +440 -0
- data/lib/libv8/v8/src/strtod.h +40 -0
- data/lib/libv8/v8/src/stub-cache.cc +1965 -0
- data/lib/libv8/v8/src/stub-cache.h +924 -0
- data/lib/libv8/v8/src/third_party/valgrind/valgrind.h +3925 -0
- data/lib/libv8/v8/src/token.cc +63 -0
- data/lib/libv8/v8/src/token.h +288 -0
- data/lib/libv8/v8/src/type-info.cc +507 -0
- data/lib/libv8/v8/src/type-info.h +272 -0
- data/lib/libv8/v8/src/unbound-queue-inl.h +95 -0
- data/lib/libv8/v8/src/unbound-queue.h +69 -0
- data/lib/libv8/v8/src/unicode-inl.h +238 -0
- data/lib/libv8/v8/src/unicode.cc +1624 -0
- data/lib/libv8/v8/src/unicode.h +280 -0
- data/lib/libv8/v8/src/uri.js +408 -0
- data/lib/libv8/v8/src/utils-inl.h +48 -0
- data/lib/libv8/v8/src/utils.cc +371 -0
- data/lib/libv8/v8/src/utils.h +800 -0
- data/lib/libv8/v8/src/v8-counters.cc +62 -0
- data/lib/libv8/v8/src/v8-counters.h +314 -0
- data/lib/libv8/v8/src/v8.cc +213 -0
- data/lib/libv8/v8/src/v8.h +131 -0
- data/lib/libv8/v8/src/v8checks.h +64 -0
- data/lib/libv8/v8/src/v8dll-main.cc +44 -0
- data/lib/libv8/v8/src/v8globals.h +512 -0
- data/lib/libv8/v8/src/v8memory.h +82 -0
- data/lib/libv8/v8/src/v8natives.js +1310 -0
- data/lib/libv8/v8/src/v8preparserdll-main.cc +39 -0
- data/lib/libv8/v8/src/v8threads.cc +464 -0
- data/lib/libv8/v8/src/v8threads.h +165 -0
- data/lib/libv8/v8/src/v8utils.h +319 -0
- data/lib/libv8/v8/src/variables.cc +114 -0
- data/lib/libv8/v8/src/variables.h +167 -0
- data/lib/libv8/v8/src/version.cc +116 -0
- data/lib/libv8/v8/src/version.h +68 -0
- data/lib/libv8/v8/src/vm-state-inl.h +138 -0
- data/lib/libv8/v8/src/vm-state.h +71 -0
- data/lib/libv8/v8/src/win32-headers.h +96 -0
- data/lib/libv8/v8/src/x64/assembler-x64-inl.h +462 -0
- data/lib/libv8/v8/src/x64/assembler-x64.cc +3027 -0
- data/lib/libv8/v8/src/x64/assembler-x64.h +1633 -0
- data/lib/libv8/v8/src/x64/builtins-x64.cc +1520 -0
- data/lib/libv8/v8/src/x64/code-stubs-x64.cc +5132 -0
- data/lib/libv8/v8/src/x64/code-stubs-x64.h +514 -0
- data/lib/libv8/v8/src/x64/codegen-x64.cc +146 -0
- data/lib/libv8/v8/src/x64/codegen-x64.h +76 -0
- data/lib/libv8/v8/src/x64/cpu-x64.cc +88 -0
- data/lib/libv8/v8/src/x64/debug-x64.cc +319 -0
- data/lib/libv8/v8/src/x64/deoptimizer-x64.cc +815 -0
- data/lib/libv8/v8/src/x64/disasm-x64.cc +1832 -0
- data/lib/libv8/v8/src/x64/frames-x64.cc +45 -0
- data/lib/libv8/v8/src/x64/frames-x64.h +130 -0
- data/lib/libv8/v8/src/x64/full-codegen-x64.cc +4318 -0
- data/lib/libv8/v8/src/x64/ic-x64.cc +1608 -0
- data/lib/libv8/v8/src/x64/lithium-codegen-x64.cc +4267 -0
- data/lib/libv8/v8/src/x64/lithium-codegen-x64.h +367 -0
- data/lib/libv8/v8/src/x64/lithium-gap-resolver-x64.cc +320 -0
- data/lib/libv8/v8/src/x64/lithium-gap-resolver-x64.h +74 -0
- data/lib/libv8/v8/src/x64/lithium-x64.cc +2202 -0
- data/lib/libv8/v8/src/x64/lithium-x64.h +2333 -0
- data/lib/libv8/v8/src/x64/macro-assembler-x64.cc +3745 -0
- data/lib/libv8/v8/src/x64/macro-assembler-x64.h +1290 -0
- data/lib/libv8/v8/src/x64/regexp-macro-assembler-x64.cc +1398 -0
- data/lib/libv8/v8/src/x64/regexp-macro-assembler-x64.h +282 -0
- data/lib/libv8/v8/src/x64/simulator-x64.cc +27 -0
- data/lib/libv8/v8/src/x64/simulator-x64.h +72 -0
- data/lib/libv8/v8/src/x64/stub-cache-x64.cc +3610 -0
- data/lib/libv8/v8/src/zone-inl.h +140 -0
- data/lib/libv8/v8/src/zone.cc +196 -0
- data/lib/libv8/v8/src/zone.h +240 -0
- data/lib/libv8/v8/tools/codemap.js +265 -0
- data/lib/libv8/v8/tools/consarray.js +93 -0
- data/lib/libv8/v8/tools/csvparser.js +78 -0
- data/lib/libv8/v8/tools/disasm.py +92 -0
- data/lib/libv8/v8/tools/freebsd-tick-processor +10 -0
- data/lib/libv8/v8/tools/gc-nvp-trace-processor.py +342 -0
- data/lib/libv8/v8/tools/gcmole/README +62 -0
- data/lib/libv8/v8/tools/gcmole/gccause.lua +60 -0
- data/lib/libv8/v8/tools/gcmole/gcmole.cc +1261 -0
- data/lib/libv8/v8/tools/gcmole/gcmole.lua +378 -0
- data/lib/libv8/v8/tools/generate-ten-powers.scm +286 -0
- data/lib/libv8/v8/tools/grokdump.py +841 -0
- data/lib/libv8/v8/tools/gyp/v8.gyp +995 -0
- data/lib/libv8/v8/tools/js2c.py +364 -0
- data/lib/libv8/v8/tools/jsmin.py +280 -0
- data/lib/libv8/v8/tools/linux-tick-processor +35 -0
- data/lib/libv8/v8/tools/ll_prof.py +942 -0
- data/lib/libv8/v8/tools/logreader.js +185 -0
- data/lib/libv8/v8/tools/mac-nm +18 -0
- data/lib/libv8/v8/tools/mac-tick-processor +6 -0
- data/lib/libv8/v8/tools/oom_dump/README +31 -0
- data/lib/libv8/v8/tools/oom_dump/SConstruct +42 -0
- data/lib/libv8/v8/tools/oom_dump/oom_dump.cc +288 -0
- data/lib/libv8/v8/tools/presubmit.py +305 -0
- data/lib/libv8/v8/tools/process-heap-prof.py +120 -0
- data/lib/libv8/v8/tools/profile.js +751 -0
- data/lib/libv8/v8/tools/profile_view.js +219 -0
- data/lib/libv8/v8/tools/run-valgrind.py +77 -0
- data/lib/libv8/v8/tools/splaytree.js +316 -0
- data/lib/libv8/v8/tools/stats-viewer.py +468 -0
- data/lib/libv8/v8/tools/test.py +1510 -0
- data/lib/libv8/v8/tools/tickprocessor-driver.js +59 -0
- data/lib/libv8/v8/tools/tickprocessor.js +877 -0
- data/lib/libv8/v8/tools/utils.py +96 -0
- data/lib/libv8/v8/tools/visual_studio/README.txt +12 -0
- data/lib/libv8/v8/tools/windows-tick-processor.bat +30 -0
- data/lib/libv8/version.rb +4 -0
- data/libv8.gemspec +31 -0
- metadata +800 -0
@@ -0,0 +1,4267 @@
|
|
1
|
+
// Copyright 2011 the V8 project authors. All rights reserved.
|
2
|
+
// Redistribution and use in source and binary forms, with or without
|
3
|
+
// modification, are permitted provided that the following conditions are
|
4
|
+
// met:
|
5
|
+
//
|
6
|
+
// * Redistributions of source code must retain the above copyright
|
7
|
+
// notice, this list of conditions and the following disclaimer.
|
8
|
+
// * Redistributions in binary form must reproduce the above
|
9
|
+
// copyright notice, this list of conditions and the following
|
10
|
+
// disclaimer in the documentation and/or other materials provided
|
11
|
+
// with the distribution.
|
12
|
+
// * Neither the name of Google Inc. nor the names of its
|
13
|
+
// contributors may be used to endorse or promote products derived
|
14
|
+
// from this software without specific prior written permission.
|
15
|
+
//
|
16
|
+
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
17
|
+
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
18
|
+
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
19
|
+
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
20
|
+
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
21
|
+
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
22
|
+
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
23
|
+
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
24
|
+
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
25
|
+
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
26
|
+
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
27
|
+
|
28
|
+
#include "v8.h"
|
29
|
+
|
30
|
+
#if defined(V8_TARGET_ARCH_X64)
|
31
|
+
|
32
|
+
#include "x64/lithium-codegen-x64.h"
|
33
|
+
#include "code-stubs.h"
|
34
|
+
#include "stub-cache.h"
|
35
|
+
|
36
|
+
namespace v8 {
|
37
|
+
namespace internal {
|
38
|
+
|
39
|
+
|
40
|
+
// When invoking builtins, we need to record the safepoint in the middle of
|
41
|
+
// the invoke instruction sequence generated by the macro assembler.
|
42
|
+
class SafepointGenerator : public CallWrapper {
|
43
|
+
public:
|
44
|
+
SafepointGenerator(LCodeGen* codegen,
|
45
|
+
LPointerMap* pointers,
|
46
|
+
int deoptimization_index)
|
47
|
+
: codegen_(codegen),
|
48
|
+
pointers_(pointers),
|
49
|
+
deoptimization_index_(deoptimization_index) { }
|
50
|
+
virtual ~SafepointGenerator() { }
|
51
|
+
|
52
|
+
virtual void BeforeCall(int call_size) const {
|
53
|
+
ASSERT(call_size >= 0);
|
54
|
+
// Ensure that we have enough space after the previous safepoint position
|
55
|
+
// for the jump generated there.
|
56
|
+
int call_end = codegen_->masm()->pc_offset() + call_size;
|
57
|
+
int prev_jump_end = codegen_->LastSafepointEnd() + kMinSafepointSize;
|
58
|
+
if (call_end < prev_jump_end) {
|
59
|
+
int padding_size = prev_jump_end - call_end;
|
60
|
+
STATIC_ASSERT(kMinSafepointSize <= 9); // One multibyte nop is enough.
|
61
|
+
codegen_->masm()->nop(padding_size);
|
62
|
+
}
|
63
|
+
}
|
64
|
+
|
65
|
+
virtual void AfterCall() const {
|
66
|
+
codegen_->RecordSafepoint(pointers_, deoptimization_index_);
|
67
|
+
}
|
68
|
+
|
69
|
+
private:
|
70
|
+
static const int kMinSafepointSize =
|
71
|
+
MacroAssembler::kShortCallInstructionLength;
|
72
|
+
LCodeGen* codegen_;
|
73
|
+
LPointerMap* pointers_;
|
74
|
+
int deoptimization_index_;
|
75
|
+
};
|
76
|
+
|
77
|
+
|
78
|
+
#define __ masm()->
|
79
|
+
|
80
|
+
bool LCodeGen::GenerateCode() {
|
81
|
+
HPhase phase("Code generation", chunk());
|
82
|
+
ASSERT(is_unused());
|
83
|
+
status_ = GENERATING;
|
84
|
+
return GeneratePrologue() &&
|
85
|
+
GenerateBody() &&
|
86
|
+
GenerateDeferredCode() &&
|
87
|
+
GenerateJumpTable() &&
|
88
|
+
GenerateSafepointTable();
|
89
|
+
}
|
90
|
+
|
91
|
+
|
92
|
+
void LCodeGen::FinishCode(Handle<Code> code) {
|
93
|
+
ASSERT(is_done());
|
94
|
+
code->set_stack_slots(GetStackSlotCount());
|
95
|
+
code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
|
96
|
+
PopulateDeoptimizationData(code);
|
97
|
+
Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(code);
|
98
|
+
}
|
99
|
+
|
100
|
+
|
101
|
+
void LCodeGen::Abort(const char* format, ...) {
|
102
|
+
if (FLAG_trace_bailout) {
|
103
|
+
SmartPointer<char> name(info()->shared_info()->DebugName()->ToCString());
|
104
|
+
PrintF("Aborting LCodeGen in @\"%s\": ", *name);
|
105
|
+
va_list arguments;
|
106
|
+
va_start(arguments, format);
|
107
|
+
OS::VPrint(format, arguments);
|
108
|
+
va_end(arguments);
|
109
|
+
PrintF("\n");
|
110
|
+
}
|
111
|
+
status_ = ABORTED;
|
112
|
+
}
|
113
|
+
|
114
|
+
|
115
|
+
void LCodeGen::Comment(const char* format, ...) {
|
116
|
+
if (!FLAG_code_comments) return;
|
117
|
+
char buffer[4 * KB];
|
118
|
+
StringBuilder builder(buffer, ARRAY_SIZE(buffer));
|
119
|
+
va_list arguments;
|
120
|
+
va_start(arguments, format);
|
121
|
+
builder.AddFormattedList(format, arguments);
|
122
|
+
va_end(arguments);
|
123
|
+
|
124
|
+
// Copy the string before recording it in the assembler to avoid
|
125
|
+
// issues when the stack allocated buffer goes out of scope.
|
126
|
+
int length = builder.position();
|
127
|
+
Vector<char> copy = Vector<char>::New(length + 1);
|
128
|
+
memcpy(copy.start(), builder.Finalize(), copy.length());
|
129
|
+
masm()->RecordComment(copy.start());
|
130
|
+
}
|
131
|
+
|
132
|
+
|
133
|
+
bool LCodeGen::GeneratePrologue() {
|
134
|
+
ASSERT(is_generating());
|
135
|
+
|
136
|
+
#ifdef DEBUG
|
137
|
+
if (strlen(FLAG_stop_at) > 0 &&
|
138
|
+
info_->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
|
139
|
+
__ int3();
|
140
|
+
}
|
141
|
+
#endif
|
142
|
+
|
143
|
+
// Strict mode functions need to replace the receiver with undefined
|
144
|
+
// when called as functions (without an explicit receiver
|
145
|
+
// object). rcx is zero for method calls and non-zero for function
|
146
|
+
// calls.
|
147
|
+
if (info_->is_strict_mode()) {
|
148
|
+
Label ok;
|
149
|
+
__ testq(rcx, rcx);
|
150
|
+
__ j(zero, &ok, Label::kNear);
|
151
|
+
// +1 for return address.
|
152
|
+
int receiver_offset = (scope()->num_parameters() + 1) * kPointerSize;
|
153
|
+
__ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
|
154
|
+
__ movq(Operand(rsp, receiver_offset), kScratchRegister);
|
155
|
+
__ bind(&ok);
|
156
|
+
}
|
157
|
+
|
158
|
+
__ push(rbp); // Caller's frame pointer.
|
159
|
+
__ movq(rbp, rsp);
|
160
|
+
__ push(rsi); // Callee's context.
|
161
|
+
__ push(rdi); // Callee's JS function.
|
162
|
+
|
163
|
+
// Reserve space for the stack slots needed by the code.
|
164
|
+
int slots = GetStackSlotCount();
|
165
|
+
if (slots > 0) {
|
166
|
+
if (FLAG_debug_code) {
|
167
|
+
__ Set(rax, slots);
|
168
|
+
__ movq(kScratchRegister, kSlotsZapValue, RelocInfo::NONE);
|
169
|
+
Label loop;
|
170
|
+
__ bind(&loop);
|
171
|
+
__ push(kScratchRegister);
|
172
|
+
__ decl(rax);
|
173
|
+
__ j(not_zero, &loop);
|
174
|
+
} else {
|
175
|
+
__ subq(rsp, Immediate(slots * kPointerSize));
|
176
|
+
#ifdef _MSC_VER
|
177
|
+
// On windows, you may not access the stack more than one page below
|
178
|
+
// the most recently mapped page. To make the allocated area randomly
|
179
|
+
// accessible, we write to each page in turn (the value is irrelevant).
|
180
|
+
const int kPageSize = 4 * KB;
|
181
|
+
for (int offset = slots * kPointerSize - kPageSize;
|
182
|
+
offset > 0;
|
183
|
+
offset -= kPageSize) {
|
184
|
+
__ movq(Operand(rsp, offset), rax);
|
185
|
+
}
|
186
|
+
#endif
|
187
|
+
}
|
188
|
+
}
|
189
|
+
|
190
|
+
// Possibly allocate a local context.
|
191
|
+
int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
|
192
|
+
if (heap_slots > 0) {
|
193
|
+
Comment(";;; Allocate local context");
|
194
|
+
// Argument to NewContext is the function, which is still in rdi.
|
195
|
+
__ push(rdi);
|
196
|
+
if (heap_slots <= FastNewContextStub::kMaximumSlots) {
|
197
|
+
FastNewContextStub stub(heap_slots);
|
198
|
+
__ CallStub(&stub);
|
199
|
+
} else {
|
200
|
+
__ CallRuntime(Runtime::kNewContext, 1);
|
201
|
+
}
|
202
|
+
RecordSafepoint(Safepoint::kNoDeoptimizationIndex);
|
203
|
+
// Context is returned in both rax and rsi. It replaces the context
|
204
|
+
// passed to us. It's saved in the stack and kept live in rsi.
|
205
|
+
__ movq(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
|
206
|
+
|
207
|
+
// Copy any necessary parameters into the context.
|
208
|
+
int num_parameters = scope()->num_parameters();
|
209
|
+
for (int i = 0; i < num_parameters; i++) {
|
210
|
+
Slot* slot = scope()->parameter(i)->AsSlot();
|
211
|
+
if (slot != NULL && slot->type() == Slot::CONTEXT) {
|
212
|
+
int parameter_offset = StandardFrameConstants::kCallerSPOffset +
|
213
|
+
(num_parameters - 1 - i) * kPointerSize;
|
214
|
+
// Load parameter from stack.
|
215
|
+
__ movq(rax, Operand(rbp, parameter_offset));
|
216
|
+
// Store it in the context.
|
217
|
+
int context_offset = Context::SlotOffset(slot->index());
|
218
|
+
__ movq(Operand(rsi, context_offset), rax);
|
219
|
+
// Update the write barrier. This clobbers all involved
|
220
|
+
// registers, so we have use a third register to avoid
|
221
|
+
// clobbering rsi.
|
222
|
+
__ movq(rcx, rsi);
|
223
|
+
__ RecordWrite(rcx, context_offset, rax, rbx);
|
224
|
+
}
|
225
|
+
}
|
226
|
+
Comment(";;; End allocate local context");
|
227
|
+
}
|
228
|
+
|
229
|
+
// Trace the call.
|
230
|
+
if (FLAG_trace) {
|
231
|
+
__ CallRuntime(Runtime::kTraceEnter, 0);
|
232
|
+
}
|
233
|
+
return !is_aborted();
|
234
|
+
}
|
235
|
+
|
236
|
+
|
237
|
+
bool LCodeGen::GenerateBody() {
|
238
|
+
ASSERT(is_generating());
|
239
|
+
bool emit_instructions = true;
|
240
|
+
for (current_instruction_ = 0;
|
241
|
+
!is_aborted() && current_instruction_ < instructions_->length();
|
242
|
+
current_instruction_++) {
|
243
|
+
LInstruction* instr = instructions_->at(current_instruction_);
|
244
|
+
if (instr->IsLabel()) {
|
245
|
+
LLabel* label = LLabel::cast(instr);
|
246
|
+
emit_instructions = !label->HasReplacement();
|
247
|
+
}
|
248
|
+
|
249
|
+
if (emit_instructions) {
|
250
|
+
Comment(";;; @%d: %s.", current_instruction_, instr->Mnemonic());
|
251
|
+
instr->CompileToNative(this);
|
252
|
+
}
|
253
|
+
}
|
254
|
+
return !is_aborted();
|
255
|
+
}
|
256
|
+
|
257
|
+
|
258
|
+
LInstruction* LCodeGen::GetNextInstruction() {
|
259
|
+
if (current_instruction_ < instructions_->length() - 1) {
|
260
|
+
return instructions_->at(current_instruction_ + 1);
|
261
|
+
} else {
|
262
|
+
return NULL;
|
263
|
+
}
|
264
|
+
}
|
265
|
+
|
266
|
+
|
267
|
+
bool LCodeGen::GenerateJumpTable() {
|
268
|
+
for (int i = 0; i < jump_table_.length(); i++) {
|
269
|
+
__ bind(&jump_table_[i].label);
|
270
|
+
__ Jump(jump_table_[i].address, RelocInfo::RUNTIME_ENTRY);
|
271
|
+
}
|
272
|
+
return !is_aborted();
|
273
|
+
}
|
274
|
+
|
275
|
+
|
276
|
+
bool LCodeGen::GenerateDeferredCode() {
|
277
|
+
ASSERT(is_generating());
|
278
|
+
for (int i = 0; !is_aborted() && i < deferred_.length(); i++) {
|
279
|
+
LDeferredCode* code = deferred_[i];
|
280
|
+
__ bind(code->entry());
|
281
|
+
code->Generate();
|
282
|
+
__ jmp(code->exit());
|
283
|
+
}
|
284
|
+
|
285
|
+
// Deferred code is the last part of the instruction sequence. Mark
|
286
|
+
// the generated code as done unless we bailed out.
|
287
|
+
if (!is_aborted()) status_ = DONE;
|
288
|
+
return !is_aborted();
|
289
|
+
}
|
290
|
+
|
291
|
+
|
292
|
+
bool LCodeGen::GenerateSafepointTable() {
|
293
|
+
ASSERT(is_done());
|
294
|
+
// Ensure that there is space at the end of the code to write a number
|
295
|
+
// of jump instructions, as well as to afford writing a call near the end
|
296
|
+
// of the code.
|
297
|
+
// The jumps are used when there isn't room in the code stream to write
|
298
|
+
// a long call instruction. Instead it writes a shorter call to a
|
299
|
+
// jump instruction in the same code object.
|
300
|
+
// The calls are used when lazy deoptimizing a function and calls to a
|
301
|
+
// deoptimization function.
|
302
|
+
int short_deopts = safepoints_.CountShortDeoptimizationIntervals(
|
303
|
+
static_cast<unsigned>(MacroAssembler::kJumpInstructionLength));
|
304
|
+
int byte_count = (short_deopts) * MacroAssembler::kJumpInstructionLength;
|
305
|
+
while (byte_count-- > 0) {
|
306
|
+
__ int3();
|
307
|
+
}
|
308
|
+
safepoints_.Emit(masm(), GetStackSlotCount());
|
309
|
+
return !is_aborted();
|
310
|
+
}
|
311
|
+
|
312
|
+
|
313
|
+
Register LCodeGen::ToRegister(int index) const {
|
314
|
+
return Register::FromAllocationIndex(index);
|
315
|
+
}
|
316
|
+
|
317
|
+
|
318
|
+
XMMRegister LCodeGen::ToDoubleRegister(int index) const {
|
319
|
+
return XMMRegister::FromAllocationIndex(index);
|
320
|
+
}
|
321
|
+
|
322
|
+
|
323
|
+
Register LCodeGen::ToRegister(LOperand* op) const {
|
324
|
+
ASSERT(op->IsRegister());
|
325
|
+
return ToRegister(op->index());
|
326
|
+
}
|
327
|
+
|
328
|
+
|
329
|
+
XMMRegister LCodeGen::ToDoubleRegister(LOperand* op) const {
|
330
|
+
ASSERT(op->IsDoubleRegister());
|
331
|
+
return ToDoubleRegister(op->index());
|
332
|
+
}
|
333
|
+
|
334
|
+
|
335
|
+
bool LCodeGen::IsInteger32Constant(LConstantOperand* op) const {
|
336
|
+
return op->IsConstantOperand() &&
|
337
|
+
chunk_->LookupLiteralRepresentation(op).IsInteger32();
|
338
|
+
}
|
339
|
+
|
340
|
+
|
341
|
+
bool LCodeGen::IsTaggedConstant(LConstantOperand* op) const {
|
342
|
+
return op->IsConstantOperand() &&
|
343
|
+
chunk_->LookupLiteralRepresentation(op).IsTagged();
|
344
|
+
}
|
345
|
+
|
346
|
+
|
347
|
+
int LCodeGen::ToInteger32(LConstantOperand* op) const {
|
348
|
+
Handle<Object> value = chunk_->LookupLiteral(op);
|
349
|
+
ASSERT(chunk_->LookupLiteralRepresentation(op).IsInteger32());
|
350
|
+
ASSERT(static_cast<double>(static_cast<int32_t>(value->Number())) ==
|
351
|
+
value->Number());
|
352
|
+
return static_cast<int32_t>(value->Number());
|
353
|
+
}
|
354
|
+
|
355
|
+
|
356
|
+
Handle<Object> LCodeGen::ToHandle(LConstantOperand* op) const {
|
357
|
+
Handle<Object> literal = chunk_->LookupLiteral(op);
|
358
|
+
ASSERT(chunk_->LookupLiteralRepresentation(op).IsTagged());
|
359
|
+
return literal;
|
360
|
+
}
|
361
|
+
|
362
|
+
|
363
|
+
Operand LCodeGen::ToOperand(LOperand* op) const {
|
364
|
+
// Does not handle registers. In X64 assembler, plain registers are not
|
365
|
+
// representable as an Operand.
|
366
|
+
ASSERT(op->IsStackSlot() || op->IsDoubleStackSlot());
|
367
|
+
int index = op->index();
|
368
|
+
if (index >= 0) {
|
369
|
+
// Local or spill slot. Skip the frame pointer, function, and
|
370
|
+
// context in the fixed part of the frame.
|
371
|
+
return Operand(rbp, -(index + 3) * kPointerSize);
|
372
|
+
} else {
|
373
|
+
// Incoming parameter. Skip the return address.
|
374
|
+
return Operand(rbp, -(index - 1) * kPointerSize);
|
375
|
+
}
|
376
|
+
}
|
377
|
+
|
378
|
+
|
379
|
+
void LCodeGen::WriteTranslation(LEnvironment* environment,
|
380
|
+
Translation* translation) {
|
381
|
+
if (environment == NULL) return;
|
382
|
+
|
383
|
+
// The translation includes one command per value in the environment.
|
384
|
+
int translation_size = environment->values()->length();
|
385
|
+
// The output frame height does not include the parameters.
|
386
|
+
int height = translation_size - environment->parameter_count();
|
387
|
+
|
388
|
+
WriteTranslation(environment->outer(), translation);
|
389
|
+
int closure_id = DefineDeoptimizationLiteral(environment->closure());
|
390
|
+
translation->BeginFrame(environment->ast_id(), closure_id, height);
|
391
|
+
for (int i = 0; i < translation_size; ++i) {
|
392
|
+
LOperand* value = environment->values()->at(i);
|
393
|
+
// spilled_registers_ and spilled_double_registers_ are either
|
394
|
+
// both NULL or both set.
|
395
|
+
if (environment->spilled_registers() != NULL && value != NULL) {
|
396
|
+
if (value->IsRegister() &&
|
397
|
+
environment->spilled_registers()[value->index()] != NULL) {
|
398
|
+
translation->MarkDuplicate();
|
399
|
+
AddToTranslation(translation,
|
400
|
+
environment->spilled_registers()[value->index()],
|
401
|
+
environment->HasTaggedValueAt(i));
|
402
|
+
} else if (
|
403
|
+
value->IsDoubleRegister() &&
|
404
|
+
environment->spilled_double_registers()[value->index()] != NULL) {
|
405
|
+
translation->MarkDuplicate();
|
406
|
+
AddToTranslation(
|
407
|
+
translation,
|
408
|
+
environment->spilled_double_registers()[value->index()],
|
409
|
+
false);
|
410
|
+
}
|
411
|
+
}
|
412
|
+
|
413
|
+
AddToTranslation(translation, value, environment->HasTaggedValueAt(i));
|
414
|
+
}
|
415
|
+
}
|
416
|
+
|
417
|
+
|
418
|
+
void LCodeGen::AddToTranslation(Translation* translation,
|
419
|
+
LOperand* op,
|
420
|
+
bool is_tagged) {
|
421
|
+
if (op == NULL) {
|
422
|
+
// TODO(twuerthinger): Introduce marker operands to indicate that this value
|
423
|
+
// is not present and must be reconstructed from the deoptimizer. Currently
|
424
|
+
// this is only used for the arguments object.
|
425
|
+
translation->StoreArgumentsObject();
|
426
|
+
} else if (op->IsStackSlot()) {
|
427
|
+
if (is_tagged) {
|
428
|
+
translation->StoreStackSlot(op->index());
|
429
|
+
} else {
|
430
|
+
translation->StoreInt32StackSlot(op->index());
|
431
|
+
}
|
432
|
+
} else if (op->IsDoubleStackSlot()) {
|
433
|
+
translation->StoreDoubleStackSlot(op->index());
|
434
|
+
} else if (op->IsArgument()) {
|
435
|
+
ASSERT(is_tagged);
|
436
|
+
int src_index = GetStackSlotCount() + op->index();
|
437
|
+
translation->StoreStackSlot(src_index);
|
438
|
+
} else if (op->IsRegister()) {
|
439
|
+
Register reg = ToRegister(op);
|
440
|
+
if (is_tagged) {
|
441
|
+
translation->StoreRegister(reg);
|
442
|
+
} else {
|
443
|
+
translation->StoreInt32Register(reg);
|
444
|
+
}
|
445
|
+
} else if (op->IsDoubleRegister()) {
|
446
|
+
XMMRegister reg = ToDoubleRegister(op);
|
447
|
+
translation->StoreDoubleRegister(reg);
|
448
|
+
} else if (op->IsConstantOperand()) {
|
449
|
+
Handle<Object> literal = chunk()->LookupLiteral(LConstantOperand::cast(op));
|
450
|
+
int src_index = DefineDeoptimizationLiteral(literal);
|
451
|
+
translation->StoreLiteral(src_index);
|
452
|
+
} else {
|
453
|
+
UNREACHABLE();
|
454
|
+
}
|
455
|
+
}
|
456
|
+
|
457
|
+
|
458
|
+
void LCodeGen::CallCodeGeneric(Handle<Code> code,
|
459
|
+
RelocInfo::Mode mode,
|
460
|
+
LInstruction* instr,
|
461
|
+
SafepointMode safepoint_mode,
|
462
|
+
int argc) {
|
463
|
+
ASSERT(instr != NULL);
|
464
|
+
LPointerMap* pointers = instr->pointer_map();
|
465
|
+
RecordPosition(pointers->position());
|
466
|
+
__ call(code, mode);
|
467
|
+
RegisterLazyDeoptimization(instr, safepoint_mode, argc);
|
468
|
+
|
469
|
+
// Signal that we don't inline smi code before these stubs in the
|
470
|
+
// optimizing code generator.
|
471
|
+
if (code->kind() == Code::BINARY_OP_IC ||
|
472
|
+
code->kind() == Code::COMPARE_IC) {
|
473
|
+
__ nop();
|
474
|
+
}
|
475
|
+
}
|
476
|
+
|
477
|
+
|
478
|
+
void LCodeGen::CallCode(Handle<Code> code,
|
479
|
+
RelocInfo::Mode mode,
|
480
|
+
LInstruction* instr) {
|
481
|
+
CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT, 0);
|
482
|
+
}
|
483
|
+
|
484
|
+
|
485
|
+
void LCodeGen::CallRuntime(const Runtime::Function* function,
|
486
|
+
int num_arguments,
|
487
|
+
LInstruction* instr) {
|
488
|
+
ASSERT(instr != NULL);
|
489
|
+
ASSERT(instr->HasPointerMap());
|
490
|
+
LPointerMap* pointers = instr->pointer_map();
|
491
|
+
RecordPosition(pointers->position());
|
492
|
+
|
493
|
+
__ CallRuntime(function, num_arguments);
|
494
|
+
RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT, 0);
|
495
|
+
}
|
496
|
+
|
497
|
+
|
498
|
+
void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id,
|
499
|
+
int argc,
|
500
|
+
LInstruction* instr) {
|
501
|
+
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
|
502
|
+
__ CallRuntimeSaveDoubles(id);
|
503
|
+
RecordSafepointWithRegisters(
|
504
|
+
instr->pointer_map(), argc, Safepoint::kNoDeoptimizationIndex);
|
505
|
+
}
|
506
|
+
|
507
|
+
|
508
|
+
void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr,
|
509
|
+
SafepointMode safepoint_mode,
|
510
|
+
int argc) {
|
511
|
+
// Create the environment to bailout to. If the call has side effects
|
512
|
+
// execution has to continue after the call otherwise execution can continue
|
513
|
+
// from a previous bailout point repeating the call.
|
514
|
+
LEnvironment* deoptimization_environment;
|
515
|
+
if (instr->HasDeoptimizationEnvironment()) {
|
516
|
+
deoptimization_environment = instr->deoptimization_environment();
|
517
|
+
} else {
|
518
|
+
deoptimization_environment = instr->environment();
|
519
|
+
}
|
520
|
+
|
521
|
+
RegisterEnvironmentForDeoptimization(deoptimization_environment);
|
522
|
+
if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) {
|
523
|
+
ASSERT(argc == 0);
|
524
|
+
RecordSafepoint(instr->pointer_map(),
|
525
|
+
deoptimization_environment->deoptimization_index());
|
526
|
+
} else {
|
527
|
+
ASSERT(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS);
|
528
|
+
RecordSafepointWithRegisters(
|
529
|
+
instr->pointer_map(),
|
530
|
+
argc,
|
531
|
+
deoptimization_environment->deoptimization_index());
|
532
|
+
}
|
533
|
+
}
|
534
|
+
|
535
|
+
|
536
|
+
void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment) {
|
537
|
+
if (!environment->HasBeenRegistered()) {
|
538
|
+
// Physical stack frame layout:
|
539
|
+
// -x ............. -4 0 ..................................... y
|
540
|
+
// [incoming arguments] [spill slots] [pushed outgoing arguments]
|
541
|
+
|
542
|
+
// Layout of the environment:
|
543
|
+
// 0 ..................................................... size-1
|
544
|
+
// [parameters] [locals] [expression stack including arguments]
|
545
|
+
|
546
|
+
// Layout of the translation:
|
547
|
+
// 0 ........................................................ size - 1 + 4
|
548
|
+
// [expression stack including arguments] [locals] [4 words] [parameters]
|
549
|
+
// |>------------ translation_size ------------<|
|
550
|
+
|
551
|
+
int frame_count = 0;
|
552
|
+
for (LEnvironment* e = environment; e != NULL; e = e->outer()) {
|
553
|
+
++frame_count;
|
554
|
+
}
|
555
|
+
Translation translation(&translations_, frame_count);
|
556
|
+
WriteTranslation(environment, &translation);
|
557
|
+
int deoptimization_index = deoptimizations_.length();
|
558
|
+
environment->Register(deoptimization_index, translation.index());
|
559
|
+
deoptimizations_.Add(environment);
|
560
|
+
}
|
561
|
+
}
|
562
|
+
|
563
|
+
|
564
|
+
void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
|
565
|
+
RegisterEnvironmentForDeoptimization(environment);
|
566
|
+
ASSERT(environment->HasBeenRegistered());
|
567
|
+
int id = environment->deoptimization_index();
|
568
|
+
Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER);
|
569
|
+
ASSERT(entry != NULL);
|
570
|
+
if (entry == NULL) {
|
571
|
+
Abort("bailout was not prepared");
|
572
|
+
return;
|
573
|
+
}
|
574
|
+
|
575
|
+
if (cc == no_condition) {
|
576
|
+
__ Jump(entry, RelocInfo::RUNTIME_ENTRY);
|
577
|
+
} else {
|
578
|
+
// We often have several deopts to the same entry, reuse the last
|
579
|
+
// jump entry if this is the case.
|
580
|
+
if (jump_table_.is_empty() ||
|
581
|
+
jump_table_.last().address != entry) {
|
582
|
+
jump_table_.Add(JumpTableEntry(entry));
|
583
|
+
}
|
584
|
+
__ j(cc, &jump_table_.last().label);
|
585
|
+
}
|
586
|
+
}
|
587
|
+
|
588
|
+
|
589
|
+
void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
|
590
|
+
int length = deoptimizations_.length();
|
591
|
+
if (length == 0) return;
|
592
|
+
ASSERT(FLAG_deopt);
|
593
|
+
Handle<DeoptimizationInputData> data =
|
594
|
+
factory()->NewDeoptimizationInputData(length, TENURED);
|
595
|
+
|
596
|
+
Handle<ByteArray> translations = translations_.CreateByteArray();
|
597
|
+
data->SetTranslationByteArray(*translations);
|
598
|
+
data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_));
|
599
|
+
|
600
|
+
Handle<FixedArray> literals =
|
601
|
+
factory()->NewFixedArray(deoptimization_literals_.length(), TENURED);
|
602
|
+
for (int i = 0; i < deoptimization_literals_.length(); i++) {
|
603
|
+
literals->set(i, *deoptimization_literals_[i]);
|
604
|
+
}
|
605
|
+
data->SetLiteralArray(*literals);
|
606
|
+
|
607
|
+
data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id()));
|
608
|
+
data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_));
|
609
|
+
|
610
|
+
// Populate the deoptimization entries.
|
611
|
+
for (int i = 0; i < length; i++) {
|
612
|
+
LEnvironment* env = deoptimizations_[i];
|
613
|
+
data->SetAstId(i, Smi::FromInt(env->ast_id()));
|
614
|
+
data->SetTranslationIndex(i, Smi::FromInt(env->translation_index()));
|
615
|
+
data->SetArgumentsStackHeight(i,
|
616
|
+
Smi::FromInt(env->arguments_stack_height()));
|
617
|
+
}
|
618
|
+
code->set_deoptimization_data(*data);
|
619
|
+
}
|
620
|
+
|
621
|
+
|
622
|
+
int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) {
|
623
|
+
int result = deoptimization_literals_.length();
|
624
|
+
for (int i = 0; i < deoptimization_literals_.length(); ++i) {
|
625
|
+
if (deoptimization_literals_[i].is_identical_to(literal)) return i;
|
626
|
+
}
|
627
|
+
deoptimization_literals_.Add(literal);
|
628
|
+
return result;
|
629
|
+
}
|
630
|
+
|
631
|
+
|
632
|
+
void LCodeGen::PopulateDeoptimizationLiteralsWithInlinedFunctions() {
|
633
|
+
ASSERT(deoptimization_literals_.length() == 0);
|
634
|
+
|
635
|
+
const ZoneList<Handle<JSFunction> >* inlined_closures =
|
636
|
+
chunk()->inlined_closures();
|
637
|
+
|
638
|
+
for (int i = 0, length = inlined_closures->length();
|
639
|
+
i < length;
|
640
|
+
i++) {
|
641
|
+
DefineDeoptimizationLiteral(inlined_closures->at(i));
|
642
|
+
}
|
643
|
+
|
644
|
+
inlined_function_count_ = deoptimization_literals_.length();
|
645
|
+
}
|
646
|
+
|
647
|
+
|
648
|
+
void LCodeGen::RecordSafepoint(
|
649
|
+
LPointerMap* pointers,
|
650
|
+
Safepoint::Kind kind,
|
651
|
+
int arguments,
|
652
|
+
int deoptimization_index) {
|
653
|
+
ASSERT(kind == expected_safepoint_kind_);
|
654
|
+
|
655
|
+
const ZoneList<LOperand*>* operands = pointers->operands();
|
656
|
+
|
657
|
+
Safepoint safepoint = safepoints_.DefineSafepoint(masm(),
|
658
|
+
kind, arguments, deoptimization_index);
|
659
|
+
for (int i = 0; i < operands->length(); i++) {
|
660
|
+
LOperand* pointer = operands->at(i);
|
661
|
+
if (pointer->IsStackSlot()) {
|
662
|
+
safepoint.DefinePointerSlot(pointer->index());
|
663
|
+
} else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
|
664
|
+
safepoint.DefinePointerRegister(ToRegister(pointer));
|
665
|
+
}
|
666
|
+
}
|
667
|
+
if (kind & Safepoint::kWithRegisters) {
|
668
|
+
// Register rsi always contains a pointer to the context.
|
669
|
+
safepoint.DefinePointerRegister(rsi);
|
670
|
+
}
|
671
|
+
}
|
672
|
+
|
673
|
+
|
674
|
+
void LCodeGen::RecordSafepoint(LPointerMap* pointers,
|
675
|
+
int deoptimization_index) {
|
676
|
+
RecordSafepoint(pointers, Safepoint::kSimple, 0, deoptimization_index);
|
677
|
+
}
|
678
|
+
|
679
|
+
|
680
|
+
void LCodeGen::RecordSafepoint(int deoptimization_index) {
|
681
|
+
LPointerMap empty_pointers(RelocInfo::kNoPosition);
|
682
|
+
RecordSafepoint(&empty_pointers, deoptimization_index);
|
683
|
+
}
|
684
|
+
|
685
|
+
|
686
|
+
void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers,
|
687
|
+
int arguments,
|
688
|
+
int deoptimization_index) {
|
689
|
+
RecordSafepoint(pointers, Safepoint::kWithRegisters, arguments,
|
690
|
+
deoptimization_index);
|
691
|
+
}
|
692
|
+
|
693
|
+
|
694
|
+
void LCodeGen::RecordPosition(int position) {
|
695
|
+
if (!FLAG_debug_info || position == RelocInfo::kNoPosition) return;
|
696
|
+
masm()->positions_recorder()->RecordPosition(position);
|
697
|
+
}
|
698
|
+
|
699
|
+
|
700
|
+
void LCodeGen::DoLabel(LLabel* label) {
|
701
|
+
if (label->is_loop_header()) {
|
702
|
+
Comment(";;; B%d - LOOP entry", label->block_id());
|
703
|
+
} else {
|
704
|
+
Comment(";;; B%d", label->block_id());
|
705
|
+
}
|
706
|
+
__ bind(label->label());
|
707
|
+
current_block_ = label->block_id();
|
708
|
+
DoGap(label);
|
709
|
+
}
|
710
|
+
|
711
|
+
|
712
|
+
void LCodeGen::DoParallelMove(LParallelMove* move) {
|
713
|
+
resolver_.Resolve(move);
|
714
|
+
}
|
715
|
+
|
716
|
+
|
717
|
+
void LCodeGen::DoGap(LGap* gap) {
|
718
|
+
for (int i = LGap::FIRST_INNER_POSITION;
|
719
|
+
i <= LGap::LAST_INNER_POSITION;
|
720
|
+
i++) {
|
721
|
+
LGap::InnerPosition inner_pos = static_cast<LGap::InnerPosition>(i);
|
722
|
+
LParallelMove* move = gap->GetParallelMove(inner_pos);
|
723
|
+
if (move != NULL) DoParallelMove(move);
|
724
|
+
}
|
725
|
+
|
726
|
+
LInstruction* next = GetNextInstruction();
|
727
|
+
if (next != NULL && next->IsLazyBailout()) {
|
728
|
+
int pc = masm()->pc_offset();
|
729
|
+
safepoints_.SetPcAfterGap(pc);
|
730
|
+
}
|
731
|
+
}
|
732
|
+
|
733
|
+
|
734
|
+
void LCodeGen::DoInstructionGap(LInstructionGap* instr) {
|
735
|
+
DoGap(instr);
|
736
|
+
}
|
737
|
+
|
738
|
+
|
739
|
+
void LCodeGen::DoParameter(LParameter* instr) {
|
740
|
+
// Nothing to do.
|
741
|
+
}
|
742
|
+
|
743
|
+
|
744
|
+
void LCodeGen::DoCallStub(LCallStub* instr) {
|
745
|
+
ASSERT(ToRegister(instr->result()).is(rax));
|
746
|
+
switch (instr->hydrogen()->major_key()) {
|
747
|
+
case CodeStub::RegExpConstructResult: {
|
748
|
+
RegExpConstructResultStub stub;
|
749
|
+
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
750
|
+
break;
|
751
|
+
}
|
752
|
+
case CodeStub::RegExpExec: {
|
753
|
+
RegExpExecStub stub;
|
754
|
+
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
755
|
+
break;
|
756
|
+
}
|
757
|
+
case CodeStub::SubString: {
|
758
|
+
SubStringStub stub;
|
759
|
+
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
760
|
+
break;
|
761
|
+
}
|
762
|
+
case CodeStub::NumberToString: {
|
763
|
+
NumberToStringStub stub;
|
764
|
+
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
765
|
+
break;
|
766
|
+
}
|
767
|
+
case CodeStub::StringAdd: {
|
768
|
+
StringAddStub stub(NO_STRING_ADD_FLAGS);
|
769
|
+
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
770
|
+
break;
|
771
|
+
}
|
772
|
+
case CodeStub::StringCompare: {
|
773
|
+
StringCompareStub stub;
|
774
|
+
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
775
|
+
break;
|
776
|
+
}
|
777
|
+
case CodeStub::TranscendentalCache: {
|
778
|
+
TranscendentalCacheStub stub(instr->transcendental_type(),
|
779
|
+
TranscendentalCacheStub::TAGGED);
|
780
|
+
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
781
|
+
break;
|
782
|
+
}
|
783
|
+
default:
|
784
|
+
UNREACHABLE();
|
785
|
+
}
|
786
|
+
}
|
787
|
+
|
788
|
+
|
789
|
+
void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) {
|
790
|
+
// Nothing to do.
|
791
|
+
}
|
792
|
+
|
793
|
+
|
794
|
+
void LCodeGen::DoModI(LModI* instr) {
|
795
|
+
if (instr->hydrogen()->HasPowerOf2Divisor()) {
|
796
|
+
Register dividend = ToRegister(instr->InputAt(0));
|
797
|
+
|
798
|
+
int32_t divisor =
|
799
|
+
HConstant::cast(instr->hydrogen()->right())->Integer32Value();
|
800
|
+
|
801
|
+
if (divisor < 0) divisor = -divisor;
|
802
|
+
|
803
|
+
Label positive_dividend, done;
|
804
|
+
__ testl(dividend, dividend);
|
805
|
+
__ j(not_sign, &positive_dividend, Label::kNear);
|
806
|
+
__ negl(dividend);
|
807
|
+
__ andl(dividend, Immediate(divisor - 1));
|
808
|
+
__ negl(dividend);
|
809
|
+
if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
|
810
|
+
__ j(not_zero, &done, Label::kNear);
|
811
|
+
DeoptimizeIf(no_condition, instr->environment());
|
812
|
+
}
|
813
|
+
__ bind(&positive_dividend);
|
814
|
+
__ andl(dividend, Immediate(divisor - 1));
|
815
|
+
__ bind(&done);
|
816
|
+
} else {
|
817
|
+
Label done, remainder_eq_dividend, slow, do_subtraction, both_positive;
|
818
|
+
Register left_reg = ToRegister(instr->InputAt(0));
|
819
|
+
Register right_reg = ToRegister(instr->InputAt(1));
|
820
|
+
Register result_reg = ToRegister(instr->result());
|
821
|
+
|
822
|
+
ASSERT(left_reg.is(rax));
|
823
|
+
ASSERT(result_reg.is(rdx));
|
824
|
+
ASSERT(!right_reg.is(rax));
|
825
|
+
ASSERT(!right_reg.is(rdx));
|
826
|
+
|
827
|
+
// Check for x % 0.
|
828
|
+
if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
|
829
|
+
__ testl(right_reg, right_reg);
|
830
|
+
DeoptimizeIf(zero, instr->environment());
|
831
|
+
}
|
832
|
+
|
833
|
+
__ testl(left_reg, left_reg);
|
834
|
+
__ j(zero, &remainder_eq_dividend, Label::kNear);
|
835
|
+
__ j(sign, &slow, Label::kNear);
|
836
|
+
|
837
|
+
__ testl(right_reg, right_reg);
|
838
|
+
__ j(not_sign, &both_positive, Label::kNear);
|
839
|
+
// The sign of the divisor doesn't matter.
|
840
|
+
__ neg(right_reg);
|
841
|
+
|
842
|
+
__ bind(&both_positive);
|
843
|
+
// If the dividend is smaller than the nonnegative
|
844
|
+
// divisor, the dividend is the result.
|
845
|
+
__ cmpl(left_reg, right_reg);
|
846
|
+
__ j(less, &remainder_eq_dividend, Label::kNear);
|
847
|
+
|
848
|
+
// Check if the divisor is a PowerOfTwo integer.
|
849
|
+
Register scratch = ToRegister(instr->TempAt(0));
|
850
|
+
__ movl(scratch, right_reg);
|
851
|
+
__ subl(scratch, Immediate(1));
|
852
|
+
__ testl(scratch, right_reg);
|
853
|
+
__ j(not_zero, &do_subtraction, Label::kNear);
|
854
|
+
__ andl(left_reg, scratch);
|
855
|
+
__ jmp(&remainder_eq_dividend, Label::kNear);
|
856
|
+
|
857
|
+
__ bind(&do_subtraction);
|
858
|
+
const int kUnfolds = 3;
|
859
|
+
// Try a few subtractions of the dividend.
|
860
|
+
__ movl(scratch, left_reg);
|
861
|
+
for (int i = 0; i < kUnfolds; i++) {
|
862
|
+
// Reduce the dividend by the divisor.
|
863
|
+
__ subl(left_reg, right_reg);
|
864
|
+
// Check if the dividend is less than the divisor.
|
865
|
+
__ cmpl(left_reg, right_reg);
|
866
|
+
__ j(less, &remainder_eq_dividend, Label::kNear);
|
867
|
+
}
|
868
|
+
__ movl(left_reg, scratch);
|
869
|
+
|
870
|
+
// Slow case, using idiv instruction.
|
871
|
+
__ bind(&slow);
|
872
|
+
// Sign extend eax to edx.
|
873
|
+
// (We are using only the low 32 bits of the values.)
|
874
|
+
__ cdq();
|
875
|
+
|
876
|
+
// Check for (0 % -x) that will produce negative zero.
|
877
|
+
if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
|
878
|
+
Label positive_left;
|
879
|
+
Label done;
|
880
|
+
__ testl(left_reg, left_reg);
|
881
|
+
__ j(not_sign, &positive_left, Label::kNear);
|
882
|
+
__ idivl(right_reg);
|
883
|
+
|
884
|
+
// Test the remainder for 0, because then the result would be -0.
|
885
|
+
__ testl(result_reg, result_reg);
|
886
|
+
__ j(not_zero, &done, Label::kNear);
|
887
|
+
|
888
|
+
DeoptimizeIf(no_condition, instr->environment());
|
889
|
+
__ bind(&positive_left);
|
890
|
+
__ idivl(right_reg);
|
891
|
+
__ bind(&done);
|
892
|
+
} else {
|
893
|
+
__ idivl(right_reg);
|
894
|
+
}
|
895
|
+
__ jmp(&done, Label::kNear);
|
896
|
+
|
897
|
+
__ bind(&remainder_eq_dividend);
|
898
|
+
__ movl(result_reg, left_reg);
|
899
|
+
|
900
|
+
__ bind(&done);
|
901
|
+
}
|
902
|
+
}
|
903
|
+
|
904
|
+
|
905
|
+
void LCodeGen::DoDivI(LDivI* instr) {
|
906
|
+
LOperand* right = instr->InputAt(1);
|
907
|
+
ASSERT(ToRegister(instr->result()).is(rax));
|
908
|
+
ASSERT(ToRegister(instr->InputAt(0)).is(rax));
|
909
|
+
ASSERT(!ToRegister(instr->InputAt(1)).is(rax));
|
910
|
+
ASSERT(!ToRegister(instr->InputAt(1)).is(rdx));
|
911
|
+
|
912
|
+
Register left_reg = rax;
|
913
|
+
|
914
|
+
// Check for x / 0.
|
915
|
+
Register right_reg = ToRegister(right);
|
916
|
+
if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
|
917
|
+
__ testl(right_reg, right_reg);
|
918
|
+
DeoptimizeIf(zero, instr->environment());
|
919
|
+
}
|
920
|
+
|
921
|
+
// Check for (0 / -x) that will produce negative zero.
|
922
|
+
if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
|
923
|
+
Label left_not_zero;
|
924
|
+
__ testl(left_reg, left_reg);
|
925
|
+
__ j(not_zero, &left_not_zero, Label::kNear);
|
926
|
+
__ testl(right_reg, right_reg);
|
927
|
+
DeoptimizeIf(sign, instr->environment());
|
928
|
+
__ bind(&left_not_zero);
|
929
|
+
}
|
930
|
+
|
931
|
+
// Check for (-kMinInt / -1).
|
932
|
+
if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
|
933
|
+
Label left_not_min_int;
|
934
|
+
__ cmpl(left_reg, Immediate(kMinInt));
|
935
|
+
__ j(not_zero, &left_not_min_int, Label::kNear);
|
936
|
+
__ cmpl(right_reg, Immediate(-1));
|
937
|
+
DeoptimizeIf(zero, instr->environment());
|
938
|
+
__ bind(&left_not_min_int);
|
939
|
+
}
|
940
|
+
|
941
|
+
// Sign extend to rdx.
|
942
|
+
__ cdq();
|
943
|
+
__ idivl(right_reg);
|
944
|
+
|
945
|
+
// Deoptimize if remainder is not 0.
|
946
|
+
__ testl(rdx, rdx);
|
947
|
+
DeoptimizeIf(not_zero, instr->environment());
|
948
|
+
}
|
949
|
+
|
950
|
+
|
951
|
+
void LCodeGen::DoMulI(LMulI* instr) {
|
952
|
+
Register left = ToRegister(instr->InputAt(0));
|
953
|
+
LOperand* right = instr->InputAt(1);
|
954
|
+
|
955
|
+
if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
|
956
|
+
__ movl(kScratchRegister, left);
|
957
|
+
}
|
958
|
+
|
959
|
+
bool can_overflow =
|
960
|
+
instr->hydrogen()->CheckFlag(HValue::kCanOverflow);
|
961
|
+
if (right->IsConstantOperand()) {
|
962
|
+
int right_value = ToInteger32(LConstantOperand::cast(right));
|
963
|
+
if (right_value == -1) {
|
964
|
+
__ negl(left);
|
965
|
+
} else if (right_value == 0) {
|
966
|
+
__ xorl(left, left);
|
967
|
+
} else if (right_value == 2) {
|
968
|
+
__ addl(left, left);
|
969
|
+
} else if (!can_overflow) {
|
970
|
+
// If the multiplication is known to not overflow, we
|
971
|
+
// can use operations that don't set the overflow flag
|
972
|
+
// correctly.
|
973
|
+
switch (right_value) {
|
974
|
+
case 1:
|
975
|
+
// Do nothing.
|
976
|
+
break;
|
977
|
+
case 3:
|
978
|
+
__ leal(left, Operand(left, left, times_2, 0));
|
979
|
+
break;
|
980
|
+
case 4:
|
981
|
+
__ shll(left, Immediate(2));
|
982
|
+
break;
|
983
|
+
case 5:
|
984
|
+
__ leal(left, Operand(left, left, times_4, 0));
|
985
|
+
break;
|
986
|
+
case 8:
|
987
|
+
__ shll(left, Immediate(3));
|
988
|
+
break;
|
989
|
+
case 9:
|
990
|
+
__ leal(left, Operand(left, left, times_8, 0));
|
991
|
+
break;
|
992
|
+
case 16:
|
993
|
+
__ shll(left, Immediate(4));
|
994
|
+
break;
|
995
|
+
default:
|
996
|
+
__ imull(left, left, Immediate(right_value));
|
997
|
+
break;
|
998
|
+
}
|
999
|
+
} else {
|
1000
|
+
__ imull(left, left, Immediate(right_value));
|
1001
|
+
}
|
1002
|
+
} else if (right->IsStackSlot()) {
|
1003
|
+
__ imull(left, ToOperand(right));
|
1004
|
+
} else {
|
1005
|
+
__ imull(left, ToRegister(right));
|
1006
|
+
}
|
1007
|
+
|
1008
|
+
if (can_overflow) {
|
1009
|
+
DeoptimizeIf(overflow, instr->environment());
|
1010
|
+
}
|
1011
|
+
|
1012
|
+
if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
|
1013
|
+
// Bail out if the result is supposed to be negative zero.
|
1014
|
+
Label done;
|
1015
|
+
__ testl(left, left);
|
1016
|
+
__ j(not_zero, &done, Label::kNear);
|
1017
|
+
if (right->IsConstantOperand()) {
|
1018
|
+
if (ToInteger32(LConstantOperand::cast(right)) <= 0) {
|
1019
|
+
DeoptimizeIf(no_condition, instr->environment());
|
1020
|
+
}
|
1021
|
+
} else if (right->IsStackSlot()) {
|
1022
|
+
__ or_(kScratchRegister, ToOperand(right));
|
1023
|
+
DeoptimizeIf(sign, instr->environment());
|
1024
|
+
} else {
|
1025
|
+
// Test the non-zero operand for negative sign.
|
1026
|
+
__ or_(kScratchRegister, ToRegister(right));
|
1027
|
+
DeoptimizeIf(sign, instr->environment());
|
1028
|
+
}
|
1029
|
+
__ bind(&done);
|
1030
|
+
}
|
1031
|
+
}
|
1032
|
+
|
1033
|
+
|
1034
|
+
void LCodeGen::DoBitI(LBitI* instr) {
|
1035
|
+
LOperand* left = instr->InputAt(0);
|
1036
|
+
LOperand* right = instr->InputAt(1);
|
1037
|
+
ASSERT(left->Equals(instr->result()));
|
1038
|
+
ASSERT(left->IsRegister());
|
1039
|
+
|
1040
|
+
if (right->IsConstantOperand()) {
|
1041
|
+
int right_operand = ToInteger32(LConstantOperand::cast(right));
|
1042
|
+
switch (instr->op()) {
|
1043
|
+
case Token::BIT_AND:
|
1044
|
+
__ andl(ToRegister(left), Immediate(right_operand));
|
1045
|
+
break;
|
1046
|
+
case Token::BIT_OR:
|
1047
|
+
__ orl(ToRegister(left), Immediate(right_operand));
|
1048
|
+
break;
|
1049
|
+
case Token::BIT_XOR:
|
1050
|
+
__ xorl(ToRegister(left), Immediate(right_operand));
|
1051
|
+
break;
|
1052
|
+
default:
|
1053
|
+
UNREACHABLE();
|
1054
|
+
break;
|
1055
|
+
}
|
1056
|
+
} else if (right->IsStackSlot()) {
|
1057
|
+
switch (instr->op()) {
|
1058
|
+
case Token::BIT_AND:
|
1059
|
+
__ andl(ToRegister(left), ToOperand(right));
|
1060
|
+
break;
|
1061
|
+
case Token::BIT_OR:
|
1062
|
+
__ orl(ToRegister(left), ToOperand(right));
|
1063
|
+
break;
|
1064
|
+
case Token::BIT_XOR:
|
1065
|
+
__ xorl(ToRegister(left), ToOperand(right));
|
1066
|
+
break;
|
1067
|
+
default:
|
1068
|
+
UNREACHABLE();
|
1069
|
+
break;
|
1070
|
+
}
|
1071
|
+
} else {
|
1072
|
+
ASSERT(right->IsRegister());
|
1073
|
+
switch (instr->op()) {
|
1074
|
+
case Token::BIT_AND:
|
1075
|
+
__ andl(ToRegister(left), ToRegister(right));
|
1076
|
+
break;
|
1077
|
+
case Token::BIT_OR:
|
1078
|
+
__ orl(ToRegister(left), ToRegister(right));
|
1079
|
+
break;
|
1080
|
+
case Token::BIT_XOR:
|
1081
|
+
__ xorl(ToRegister(left), ToRegister(right));
|
1082
|
+
break;
|
1083
|
+
default:
|
1084
|
+
UNREACHABLE();
|
1085
|
+
break;
|
1086
|
+
}
|
1087
|
+
}
|
1088
|
+
}
|
1089
|
+
|
1090
|
+
|
1091
|
+
void LCodeGen::DoShiftI(LShiftI* instr) {
|
1092
|
+
LOperand* left = instr->InputAt(0);
|
1093
|
+
LOperand* right = instr->InputAt(1);
|
1094
|
+
ASSERT(left->Equals(instr->result()));
|
1095
|
+
ASSERT(left->IsRegister());
|
1096
|
+
if (right->IsRegister()) {
|
1097
|
+
ASSERT(ToRegister(right).is(rcx));
|
1098
|
+
|
1099
|
+
switch (instr->op()) {
|
1100
|
+
case Token::SAR:
|
1101
|
+
__ sarl_cl(ToRegister(left));
|
1102
|
+
break;
|
1103
|
+
case Token::SHR:
|
1104
|
+
__ shrl_cl(ToRegister(left));
|
1105
|
+
if (instr->can_deopt()) {
|
1106
|
+
__ testl(ToRegister(left), ToRegister(left));
|
1107
|
+
DeoptimizeIf(negative, instr->environment());
|
1108
|
+
}
|
1109
|
+
break;
|
1110
|
+
case Token::SHL:
|
1111
|
+
__ shll_cl(ToRegister(left));
|
1112
|
+
break;
|
1113
|
+
default:
|
1114
|
+
UNREACHABLE();
|
1115
|
+
break;
|
1116
|
+
}
|
1117
|
+
} else {
|
1118
|
+
int value = ToInteger32(LConstantOperand::cast(right));
|
1119
|
+
uint8_t shift_count = static_cast<uint8_t>(value & 0x1F);
|
1120
|
+
switch (instr->op()) {
|
1121
|
+
case Token::SAR:
|
1122
|
+
if (shift_count != 0) {
|
1123
|
+
__ sarl(ToRegister(left), Immediate(shift_count));
|
1124
|
+
}
|
1125
|
+
break;
|
1126
|
+
case Token::SHR:
|
1127
|
+
if (shift_count == 0 && instr->can_deopt()) {
|
1128
|
+
__ testl(ToRegister(left), ToRegister(left));
|
1129
|
+
DeoptimizeIf(negative, instr->environment());
|
1130
|
+
} else {
|
1131
|
+
__ shrl(ToRegister(left), Immediate(shift_count));
|
1132
|
+
}
|
1133
|
+
break;
|
1134
|
+
case Token::SHL:
|
1135
|
+
if (shift_count != 0) {
|
1136
|
+
__ shll(ToRegister(left), Immediate(shift_count));
|
1137
|
+
}
|
1138
|
+
break;
|
1139
|
+
default:
|
1140
|
+
UNREACHABLE();
|
1141
|
+
break;
|
1142
|
+
}
|
1143
|
+
}
|
1144
|
+
}
|
1145
|
+
|
1146
|
+
|
1147
|
+
void LCodeGen::DoSubI(LSubI* instr) {
|
1148
|
+
LOperand* left = instr->InputAt(0);
|
1149
|
+
LOperand* right = instr->InputAt(1);
|
1150
|
+
ASSERT(left->Equals(instr->result()));
|
1151
|
+
|
1152
|
+
if (right->IsConstantOperand()) {
|
1153
|
+
__ subl(ToRegister(left),
|
1154
|
+
Immediate(ToInteger32(LConstantOperand::cast(right))));
|
1155
|
+
} else if (right->IsRegister()) {
|
1156
|
+
__ subl(ToRegister(left), ToRegister(right));
|
1157
|
+
} else {
|
1158
|
+
__ subl(ToRegister(left), ToOperand(right));
|
1159
|
+
}
|
1160
|
+
|
1161
|
+
if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
|
1162
|
+
DeoptimizeIf(overflow, instr->environment());
|
1163
|
+
}
|
1164
|
+
}
|
1165
|
+
|
1166
|
+
|
1167
|
+
void LCodeGen::DoConstantI(LConstantI* instr) {
|
1168
|
+
ASSERT(instr->result()->IsRegister());
|
1169
|
+
__ Set(ToRegister(instr->result()), instr->value());
|
1170
|
+
}
|
1171
|
+
|
1172
|
+
|
1173
|
+
void LCodeGen::DoConstantD(LConstantD* instr) {
|
1174
|
+
ASSERT(instr->result()->IsDoubleRegister());
|
1175
|
+
XMMRegister res = ToDoubleRegister(instr->result());
|
1176
|
+
double v = instr->value();
|
1177
|
+
uint64_t int_val = BitCast<uint64_t, double>(v);
|
1178
|
+
// Use xor to produce +0.0 in a fast and compact way, but avoid to
|
1179
|
+
// do so if the constant is -0.0.
|
1180
|
+
if (int_val == 0) {
|
1181
|
+
__ xorps(res, res);
|
1182
|
+
} else {
|
1183
|
+
Register tmp = ToRegister(instr->TempAt(0));
|
1184
|
+
__ Set(tmp, int_val);
|
1185
|
+
__ movq(res, tmp);
|
1186
|
+
}
|
1187
|
+
}
|
1188
|
+
|
1189
|
+
|
1190
|
+
void LCodeGen::DoConstantT(LConstantT* instr) {
|
1191
|
+
ASSERT(instr->result()->IsRegister());
|
1192
|
+
__ Move(ToRegister(instr->result()), instr->value());
|
1193
|
+
}
|
1194
|
+
|
1195
|
+
|
1196
|
+
void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) {
|
1197
|
+
Register result = ToRegister(instr->result());
|
1198
|
+
Register array = ToRegister(instr->InputAt(0));
|
1199
|
+
__ movq(result, FieldOperand(array, JSArray::kLengthOffset));
|
1200
|
+
}
|
1201
|
+
|
1202
|
+
|
1203
|
+
void LCodeGen::DoFixedArrayLength(LFixedArrayLength* instr) {
|
1204
|
+
Register result = ToRegister(instr->result());
|
1205
|
+
Register array = ToRegister(instr->InputAt(0));
|
1206
|
+
__ movq(result, FieldOperand(array, FixedArray::kLengthOffset));
|
1207
|
+
}
|
1208
|
+
|
1209
|
+
|
1210
|
+
void LCodeGen::DoExternalArrayLength(LExternalArrayLength* instr) {
|
1211
|
+
Register result = ToRegister(instr->result());
|
1212
|
+
Register array = ToRegister(instr->InputAt(0));
|
1213
|
+
__ movl(result, FieldOperand(array, ExternalPixelArray::kLengthOffset));
|
1214
|
+
}
|
1215
|
+
|
1216
|
+
|
1217
|
+
void LCodeGen::DoValueOf(LValueOf* instr) {
|
1218
|
+
Register input = ToRegister(instr->InputAt(0));
|
1219
|
+
Register result = ToRegister(instr->result());
|
1220
|
+
ASSERT(input.is(result));
|
1221
|
+
Label done;
|
1222
|
+
// If the object is a smi return the object.
|
1223
|
+
__ JumpIfSmi(input, &done, Label::kNear);
|
1224
|
+
|
1225
|
+
// If the object is not a value type, return the object.
|
1226
|
+
__ CmpObjectType(input, JS_VALUE_TYPE, kScratchRegister);
|
1227
|
+
__ j(not_equal, &done, Label::kNear);
|
1228
|
+
__ movq(result, FieldOperand(input, JSValue::kValueOffset));
|
1229
|
+
|
1230
|
+
__ bind(&done);
|
1231
|
+
}
|
1232
|
+
|
1233
|
+
|
1234
|
+
void LCodeGen::DoBitNotI(LBitNotI* instr) {
|
1235
|
+
LOperand* input = instr->InputAt(0);
|
1236
|
+
ASSERT(input->Equals(instr->result()));
|
1237
|
+
__ not_(ToRegister(input));
|
1238
|
+
}
|
1239
|
+
|
1240
|
+
|
1241
|
+
void LCodeGen::DoThrow(LThrow* instr) {
|
1242
|
+
__ push(ToRegister(instr->InputAt(0)));
|
1243
|
+
CallRuntime(Runtime::kThrow, 1, instr);
|
1244
|
+
|
1245
|
+
if (FLAG_debug_code) {
|
1246
|
+
Comment("Unreachable code.");
|
1247
|
+
__ int3();
|
1248
|
+
}
|
1249
|
+
}
|
1250
|
+
|
1251
|
+
|
1252
|
+
void LCodeGen::DoAddI(LAddI* instr) {
|
1253
|
+
LOperand* left = instr->InputAt(0);
|
1254
|
+
LOperand* right = instr->InputAt(1);
|
1255
|
+
ASSERT(left->Equals(instr->result()));
|
1256
|
+
|
1257
|
+
if (right->IsConstantOperand()) {
|
1258
|
+
__ addl(ToRegister(left),
|
1259
|
+
Immediate(ToInteger32(LConstantOperand::cast(right))));
|
1260
|
+
} else if (right->IsRegister()) {
|
1261
|
+
__ addl(ToRegister(left), ToRegister(right));
|
1262
|
+
} else {
|
1263
|
+
__ addl(ToRegister(left), ToOperand(right));
|
1264
|
+
}
|
1265
|
+
|
1266
|
+
if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
|
1267
|
+
DeoptimizeIf(overflow, instr->environment());
|
1268
|
+
}
|
1269
|
+
}
|
1270
|
+
|
1271
|
+
|
1272
|
+
void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
|
1273
|
+
XMMRegister left = ToDoubleRegister(instr->InputAt(0));
|
1274
|
+
XMMRegister right = ToDoubleRegister(instr->InputAt(1));
|
1275
|
+
XMMRegister result = ToDoubleRegister(instr->result());
|
1276
|
+
// All operations except MOD are computed in-place.
|
1277
|
+
ASSERT(instr->op() == Token::MOD || left.is(result));
|
1278
|
+
switch (instr->op()) {
|
1279
|
+
case Token::ADD:
|
1280
|
+
__ addsd(left, right);
|
1281
|
+
break;
|
1282
|
+
case Token::SUB:
|
1283
|
+
__ subsd(left, right);
|
1284
|
+
break;
|
1285
|
+
case Token::MUL:
|
1286
|
+
__ mulsd(left, right);
|
1287
|
+
break;
|
1288
|
+
case Token::DIV:
|
1289
|
+
__ divsd(left, right);
|
1290
|
+
break;
|
1291
|
+
case Token::MOD:
|
1292
|
+
__ PrepareCallCFunction(2);
|
1293
|
+
__ movaps(xmm0, left);
|
1294
|
+
ASSERT(right.is(xmm1));
|
1295
|
+
__ CallCFunction(
|
1296
|
+
ExternalReference::double_fp_operation(Token::MOD, isolate()), 2);
|
1297
|
+
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
|
1298
|
+
__ movaps(result, xmm0);
|
1299
|
+
break;
|
1300
|
+
default:
|
1301
|
+
UNREACHABLE();
|
1302
|
+
break;
|
1303
|
+
}
|
1304
|
+
}
|
1305
|
+
|
1306
|
+
|
1307
|
+
void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
|
1308
|
+
ASSERT(ToRegister(instr->InputAt(0)).is(rdx));
|
1309
|
+
ASSERT(ToRegister(instr->InputAt(1)).is(rax));
|
1310
|
+
ASSERT(ToRegister(instr->result()).is(rax));
|
1311
|
+
|
1312
|
+
BinaryOpStub stub(instr->op(), NO_OVERWRITE);
|
1313
|
+
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
1314
|
+
}
|
1315
|
+
|
1316
|
+
|
1317
|
+
int LCodeGen::GetNextEmittedBlock(int block) {
|
1318
|
+
for (int i = block + 1; i < graph()->blocks()->length(); ++i) {
|
1319
|
+
LLabel* label = chunk_->GetLabel(i);
|
1320
|
+
if (!label->HasReplacement()) return i;
|
1321
|
+
}
|
1322
|
+
return -1;
|
1323
|
+
}
|
1324
|
+
|
1325
|
+
|
1326
|
+
void LCodeGen::EmitBranch(int left_block, int right_block, Condition cc) {
|
1327
|
+
int next_block = GetNextEmittedBlock(current_block_);
|
1328
|
+
right_block = chunk_->LookupDestination(right_block);
|
1329
|
+
left_block = chunk_->LookupDestination(left_block);
|
1330
|
+
|
1331
|
+
if (right_block == left_block) {
|
1332
|
+
EmitGoto(left_block);
|
1333
|
+
} else if (left_block == next_block) {
|
1334
|
+
__ j(NegateCondition(cc), chunk_->GetAssemblyLabel(right_block));
|
1335
|
+
} else if (right_block == next_block) {
|
1336
|
+
__ j(cc, chunk_->GetAssemblyLabel(left_block));
|
1337
|
+
} else {
|
1338
|
+
__ j(cc, chunk_->GetAssemblyLabel(left_block));
|
1339
|
+
if (cc != always) {
|
1340
|
+
__ jmp(chunk_->GetAssemblyLabel(right_block));
|
1341
|
+
}
|
1342
|
+
}
|
1343
|
+
}
|
1344
|
+
|
1345
|
+
|
1346
|
+
void LCodeGen::DoBranch(LBranch* instr) {
|
1347
|
+
int true_block = chunk_->LookupDestination(instr->true_block_id());
|
1348
|
+
int false_block = chunk_->LookupDestination(instr->false_block_id());
|
1349
|
+
|
1350
|
+
Representation r = instr->hydrogen()->representation();
|
1351
|
+
if (r.IsInteger32()) {
|
1352
|
+
Register reg = ToRegister(instr->InputAt(0));
|
1353
|
+
__ testl(reg, reg);
|
1354
|
+
EmitBranch(true_block, false_block, not_zero);
|
1355
|
+
} else if (r.IsDouble()) {
|
1356
|
+
XMMRegister reg = ToDoubleRegister(instr->InputAt(0));
|
1357
|
+
__ xorps(xmm0, xmm0);
|
1358
|
+
__ ucomisd(reg, xmm0);
|
1359
|
+
EmitBranch(true_block, false_block, not_equal);
|
1360
|
+
} else {
|
1361
|
+
ASSERT(r.IsTagged());
|
1362
|
+
Register reg = ToRegister(instr->InputAt(0));
|
1363
|
+
HType type = instr->hydrogen()->type();
|
1364
|
+
if (type.IsBoolean()) {
|
1365
|
+
__ CompareRoot(reg, Heap::kTrueValueRootIndex);
|
1366
|
+
EmitBranch(true_block, false_block, equal);
|
1367
|
+
} else if (type.IsSmi()) {
|
1368
|
+
__ SmiCompare(reg, Smi::FromInt(0));
|
1369
|
+
EmitBranch(true_block, false_block, not_equal);
|
1370
|
+
} else {
|
1371
|
+
Label* true_label = chunk_->GetAssemblyLabel(true_block);
|
1372
|
+
Label* false_label = chunk_->GetAssemblyLabel(false_block);
|
1373
|
+
|
1374
|
+
__ CompareRoot(reg, Heap::kUndefinedValueRootIndex);
|
1375
|
+
__ j(equal, false_label);
|
1376
|
+
__ CompareRoot(reg, Heap::kTrueValueRootIndex);
|
1377
|
+
__ j(equal, true_label);
|
1378
|
+
__ CompareRoot(reg, Heap::kFalseValueRootIndex);
|
1379
|
+
__ j(equal, false_label);
|
1380
|
+
__ Cmp(reg, Smi::FromInt(0));
|
1381
|
+
__ j(equal, false_label);
|
1382
|
+
__ JumpIfSmi(reg, true_label);
|
1383
|
+
|
1384
|
+
// Test for double values. Plus/minus zero and NaN are false.
|
1385
|
+
Label call_stub;
|
1386
|
+
__ CompareRoot(FieldOperand(reg, HeapObject::kMapOffset),
|
1387
|
+
Heap::kHeapNumberMapRootIndex);
|
1388
|
+
__ j(not_equal, &call_stub, Label::kNear);
|
1389
|
+
|
1390
|
+
// HeapNumber => false iff +0, -0, or NaN. These three cases set the
|
1391
|
+
// zero flag when compared to zero using ucomisd.
|
1392
|
+
__ xorps(xmm0, xmm0);
|
1393
|
+
__ ucomisd(xmm0, FieldOperand(reg, HeapNumber::kValueOffset));
|
1394
|
+
__ j(zero, false_label);
|
1395
|
+
__ jmp(true_label);
|
1396
|
+
|
1397
|
+
// The conversion stub doesn't cause garbage collections so it's
|
1398
|
+
// safe to not record a safepoint after the call.
|
1399
|
+
__ bind(&call_stub);
|
1400
|
+
ToBooleanStub stub;
|
1401
|
+
__ Pushad();
|
1402
|
+
__ push(reg);
|
1403
|
+
__ CallStub(&stub);
|
1404
|
+
__ testq(rax, rax);
|
1405
|
+
__ Popad();
|
1406
|
+
EmitBranch(true_block, false_block, not_zero);
|
1407
|
+
}
|
1408
|
+
}
|
1409
|
+
}
|
1410
|
+
|
1411
|
+
|
1412
|
+
void LCodeGen::EmitGoto(int block, LDeferredCode* deferred_stack_check) {
|
1413
|
+
block = chunk_->LookupDestination(block);
|
1414
|
+
int next_block = GetNextEmittedBlock(current_block_);
|
1415
|
+
if (block != next_block) {
|
1416
|
+
// Perform stack overflow check if this goto needs it before jumping.
|
1417
|
+
if (deferred_stack_check != NULL) {
|
1418
|
+
__ CompareRoot(rsp, Heap::kStackLimitRootIndex);
|
1419
|
+
__ j(above_equal, chunk_->GetAssemblyLabel(block));
|
1420
|
+
__ jmp(deferred_stack_check->entry());
|
1421
|
+
deferred_stack_check->SetExit(chunk_->GetAssemblyLabel(block));
|
1422
|
+
} else {
|
1423
|
+
__ jmp(chunk_->GetAssemblyLabel(block));
|
1424
|
+
}
|
1425
|
+
}
|
1426
|
+
}
|
1427
|
+
|
1428
|
+
|
1429
|
+
void LCodeGen::DoDeferredStackCheck(LGoto* instr) {
|
1430
|
+
PushSafepointRegistersScope scope(this);
|
1431
|
+
CallRuntimeFromDeferred(Runtime::kStackGuard, 0, instr);
|
1432
|
+
}
|
1433
|
+
|
1434
|
+
|
1435
|
+
void LCodeGen::DoGoto(LGoto* instr) {
|
1436
|
+
class DeferredStackCheck: public LDeferredCode {
|
1437
|
+
public:
|
1438
|
+
DeferredStackCheck(LCodeGen* codegen, LGoto* instr)
|
1439
|
+
: LDeferredCode(codegen), instr_(instr) { }
|
1440
|
+
virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); }
|
1441
|
+
private:
|
1442
|
+
LGoto* instr_;
|
1443
|
+
};
|
1444
|
+
|
1445
|
+
DeferredStackCheck* deferred = NULL;
|
1446
|
+
if (instr->include_stack_check()) {
|
1447
|
+
deferred = new DeferredStackCheck(this, instr);
|
1448
|
+
}
|
1449
|
+
EmitGoto(instr->block_id(), deferred);
|
1450
|
+
}
|
1451
|
+
|
1452
|
+
|
1453
|
+
inline Condition LCodeGen::TokenToCondition(Token::Value op, bool is_unsigned) {
|
1454
|
+
Condition cond = no_condition;
|
1455
|
+
switch (op) {
|
1456
|
+
case Token::EQ:
|
1457
|
+
case Token::EQ_STRICT:
|
1458
|
+
cond = equal;
|
1459
|
+
break;
|
1460
|
+
case Token::LT:
|
1461
|
+
cond = is_unsigned ? below : less;
|
1462
|
+
break;
|
1463
|
+
case Token::GT:
|
1464
|
+
cond = is_unsigned ? above : greater;
|
1465
|
+
break;
|
1466
|
+
case Token::LTE:
|
1467
|
+
cond = is_unsigned ? below_equal : less_equal;
|
1468
|
+
break;
|
1469
|
+
case Token::GTE:
|
1470
|
+
cond = is_unsigned ? above_equal : greater_equal;
|
1471
|
+
break;
|
1472
|
+
case Token::IN:
|
1473
|
+
case Token::INSTANCEOF:
|
1474
|
+
default:
|
1475
|
+
UNREACHABLE();
|
1476
|
+
}
|
1477
|
+
return cond;
|
1478
|
+
}
|
1479
|
+
|
1480
|
+
|
1481
|
+
void LCodeGen::EmitCmpI(LOperand* left, LOperand* right) {
|
1482
|
+
if (right->IsConstantOperand()) {
|
1483
|
+
int32_t value = ToInteger32(LConstantOperand::cast(right));
|
1484
|
+
if (left->IsRegister()) {
|
1485
|
+
__ cmpl(ToRegister(left), Immediate(value));
|
1486
|
+
} else {
|
1487
|
+
__ cmpl(ToOperand(left), Immediate(value));
|
1488
|
+
}
|
1489
|
+
} else if (right->IsRegister()) {
|
1490
|
+
__ cmpl(ToRegister(left), ToRegister(right));
|
1491
|
+
} else {
|
1492
|
+
__ cmpl(ToRegister(left), ToOperand(right));
|
1493
|
+
}
|
1494
|
+
}
|
1495
|
+
|
1496
|
+
|
1497
|
+
void LCodeGen::DoCmpID(LCmpID* instr) {
|
1498
|
+
LOperand* left = instr->InputAt(0);
|
1499
|
+
LOperand* right = instr->InputAt(1);
|
1500
|
+
LOperand* result = instr->result();
|
1501
|
+
|
1502
|
+
Label unordered;
|
1503
|
+
if (instr->is_double()) {
|
1504
|
+
// Don't base result on EFLAGS when a NaN is involved. Instead
|
1505
|
+
// jump to the unordered case, which produces a false value.
|
1506
|
+
__ ucomisd(ToDoubleRegister(left), ToDoubleRegister(right));
|
1507
|
+
__ j(parity_even, &unordered, Label::kNear);
|
1508
|
+
} else {
|
1509
|
+
EmitCmpI(left, right);
|
1510
|
+
}
|
1511
|
+
|
1512
|
+
Label done;
|
1513
|
+
Condition cc = TokenToCondition(instr->op(), instr->is_double());
|
1514
|
+
__ LoadRoot(ToRegister(result), Heap::kTrueValueRootIndex);
|
1515
|
+
__ j(cc, &done, Label::kNear);
|
1516
|
+
|
1517
|
+
__ bind(&unordered);
|
1518
|
+
__ LoadRoot(ToRegister(result), Heap::kFalseValueRootIndex);
|
1519
|
+
__ bind(&done);
|
1520
|
+
}
|
1521
|
+
|
1522
|
+
|
1523
|
+
void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) {
|
1524
|
+
LOperand* left = instr->InputAt(0);
|
1525
|
+
LOperand* right = instr->InputAt(1);
|
1526
|
+
int false_block = chunk_->LookupDestination(instr->false_block_id());
|
1527
|
+
int true_block = chunk_->LookupDestination(instr->true_block_id());
|
1528
|
+
|
1529
|
+
if (instr->is_double()) {
|
1530
|
+
// Don't base result on EFLAGS when a NaN is involved. Instead
|
1531
|
+
// jump to the false block.
|
1532
|
+
__ ucomisd(ToDoubleRegister(left), ToDoubleRegister(right));
|
1533
|
+
__ j(parity_even, chunk_->GetAssemblyLabel(false_block));
|
1534
|
+
} else {
|
1535
|
+
EmitCmpI(left, right);
|
1536
|
+
}
|
1537
|
+
|
1538
|
+
Condition cc = TokenToCondition(instr->op(), instr->is_double());
|
1539
|
+
EmitBranch(true_block, false_block, cc);
|
1540
|
+
}
|
1541
|
+
|
1542
|
+
|
1543
|
+
void LCodeGen::DoCmpJSObjectEq(LCmpJSObjectEq* instr) {
|
1544
|
+
Register left = ToRegister(instr->InputAt(0));
|
1545
|
+
Register right = ToRegister(instr->InputAt(1));
|
1546
|
+
Register result = ToRegister(instr->result());
|
1547
|
+
|
1548
|
+
Label different, done;
|
1549
|
+
__ cmpq(left, right);
|
1550
|
+
__ j(not_equal, &different, Label::kNear);
|
1551
|
+
__ LoadRoot(result, Heap::kTrueValueRootIndex);
|
1552
|
+
__ jmp(&done, Label::kNear);
|
1553
|
+
__ bind(&different);
|
1554
|
+
__ LoadRoot(result, Heap::kFalseValueRootIndex);
|
1555
|
+
__ bind(&done);
|
1556
|
+
}
|
1557
|
+
|
1558
|
+
|
1559
|
+
void LCodeGen::DoCmpJSObjectEqAndBranch(LCmpJSObjectEqAndBranch* instr) {
|
1560
|
+
Register left = ToRegister(instr->InputAt(0));
|
1561
|
+
Register right = ToRegister(instr->InputAt(1));
|
1562
|
+
int false_block = chunk_->LookupDestination(instr->false_block_id());
|
1563
|
+
int true_block = chunk_->LookupDestination(instr->true_block_id());
|
1564
|
+
|
1565
|
+
__ cmpq(left, right);
|
1566
|
+
EmitBranch(true_block, false_block, equal);
|
1567
|
+
}
|
1568
|
+
|
1569
|
+
|
1570
|
+
void LCodeGen::DoCmpSymbolEq(LCmpSymbolEq* instr) {
|
1571
|
+
Register left = ToRegister(instr->InputAt(0));
|
1572
|
+
Register right = ToRegister(instr->InputAt(1));
|
1573
|
+
Register result = ToRegister(instr->result());
|
1574
|
+
|
1575
|
+
Label done;
|
1576
|
+
__ cmpq(left, right);
|
1577
|
+
__ LoadRoot(result, Heap::kFalseValueRootIndex);
|
1578
|
+
__ j(not_equal, &done, Label::kNear);
|
1579
|
+
__ LoadRoot(result, Heap::kTrueValueRootIndex);
|
1580
|
+
__ bind(&done);
|
1581
|
+
}
|
1582
|
+
|
1583
|
+
|
1584
|
+
void LCodeGen::DoCmpSymbolEqAndBranch(LCmpSymbolEqAndBranch* instr) {
|
1585
|
+
Register left = ToRegister(instr->InputAt(0));
|
1586
|
+
Register right = ToRegister(instr->InputAt(1));
|
1587
|
+
int false_block = chunk_->LookupDestination(instr->false_block_id());
|
1588
|
+
int true_block = chunk_->LookupDestination(instr->true_block_id());
|
1589
|
+
|
1590
|
+
__ cmpq(left, right);
|
1591
|
+
EmitBranch(true_block, false_block, equal);
|
1592
|
+
}
|
1593
|
+
|
1594
|
+
|
1595
|
+
void LCodeGen::DoIsNull(LIsNull* instr) {
|
1596
|
+
Register reg = ToRegister(instr->InputAt(0));
|
1597
|
+
Register result = ToRegister(instr->result());
|
1598
|
+
|
1599
|
+
// If the expression is known to be a smi, then it's
|
1600
|
+
// definitely not null. Materialize false.
|
1601
|
+
// Consider adding other type and representation tests too.
|
1602
|
+
if (instr->hydrogen()->value()->type().IsSmi()) {
|
1603
|
+
__ LoadRoot(result, Heap::kFalseValueRootIndex);
|
1604
|
+
return;
|
1605
|
+
}
|
1606
|
+
|
1607
|
+
__ CompareRoot(reg, Heap::kNullValueRootIndex);
|
1608
|
+
if (instr->is_strict()) {
|
1609
|
+
ASSERT(Heap::kTrueValueRootIndex >= 0);
|
1610
|
+
__ movl(result, Immediate(Heap::kTrueValueRootIndex));
|
1611
|
+
Label load;
|
1612
|
+
__ j(equal, &load, Label::kNear);
|
1613
|
+
__ Set(result, Heap::kFalseValueRootIndex);
|
1614
|
+
__ bind(&load);
|
1615
|
+
__ LoadRootIndexed(result, result, 0);
|
1616
|
+
} else {
|
1617
|
+
Label false_value, true_value, done;
|
1618
|
+
__ j(equal, &true_value, Label::kNear);
|
1619
|
+
__ CompareRoot(reg, Heap::kUndefinedValueRootIndex);
|
1620
|
+
__ j(equal, &true_value, Label::kNear);
|
1621
|
+
__ JumpIfSmi(reg, &false_value, Label::kNear);
|
1622
|
+
// Check for undetectable objects by looking in the bit field in
|
1623
|
+
// the map. The object has already been smi checked.
|
1624
|
+
Register scratch = result;
|
1625
|
+
__ movq(scratch, FieldOperand(reg, HeapObject::kMapOffset));
|
1626
|
+
__ testb(FieldOperand(scratch, Map::kBitFieldOffset),
|
1627
|
+
Immediate(1 << Map::kIsUndetectable));
|
1628
|
+
__ j(not_zero, &true_value, Label::kNear);
|
1629
|
+
__ bind(&false_value);
|
1630
|
+
__ LoadRoot(result, Heap::kFalseValueRootIndex);
|
1631
|
+
__ jmp(&done, Label::kNear);
|
1632
|
+
__ bind(&true_value);
|
1633
|
+
__ LoadRoot(result, Heap::kTrueValueRootIndex);
|
1634
|
+
__ bind(&done);
|
1635
|
+
}
|
1636
|
+
}
|
1637
|
+
|
1638
|
+
|
1639
|
+
void LCodeGen::DoIsNullAndBranch(LIsNullAndBranch* instr) {
|
1640
|
+
Register reg = ToRegister(instr->InputAt(0));
|
1641
|
+
|
1642
|
+
int false_block = chunk_->LookupDestination(instr->false_block_id());
|
1643
|
+
|
1644
|
+
if (instr->hydrogen()->representation().IsSpecialization() ||
|
1645
|
+
instr->hydrogen()->type().IsSmi()) {
|
1646
|
+
// If the expression is known to untagged or smi, then it's definitely
|
1647
|
+
// not null, and it can't be a an undetectable object.
|
1648
|
+
// Jump directly to the false block.
|
1649
|
+
EmitGoto(false_block);
|
1650
|
+
return;
|
1651
|
+
}
|
1652
|
+
|
1653
|
+
int true_block = chunk_->LookupDestination(instr->true_block_id());
|
1654
|
+
|
1655
|
+
__ CompareRoot(reg, Heap::kNullValueRootIndex);
|
1656
|
+
if (instr->is_strict()) {
|
1657
|
+
EmitBranch(true_block, false_block, equal);
|
1658
|
+
} else {
|
1659
|
+
Label* true_label = chunk_->GetAssemblyLabel(true_block);
|
1660
|
+
Label* false_label = chunk_->GetAssemblyLabel(false_block);
|
1661
|
+
__ j(equal, true_label);
|
1662
|
+
__ CompareRoot(reg, Heap::kUndefinedValueRootIndex);
|
1663
|
+
__ j(equal, true_label);
|
1664
|
+
__ JumpIfSmi(reg, false_label);
|
1665
|
+
// Check for undetectable objects by looking in the bit field in
|
1666
|
+
// the map. The object has already been smi checked.
|
1667
|
+
Register scratch = ToRegister(instr->TempAt(0));
|
1668
|
+
__ movq(scratch, FieldOperand(reg, HeapObject::kMapOffset));
|
1669
|
+
__ testb(FieldOperand(scratch, Map::kBitFieldOffset),
|
1670
|
+
Immediate(1 << Map::kIsUndetectable));
|
1671
|
+
EmitBranch(true_block, false_block, not_zero);
|
1672
|
+
}
|
1673
|
+
}
|
1674
|
+
|
1675
|
+
|
1676
|
+
Condition LCodeGen::EmitIsObject(Register input,
|
1677
|
+
Label* is_not_object,
|
1678
|
+
Label* is_object) {
|
1679
|
+
ASSERT(!input.is(kScratchRegister));
|
1680
|
+
|
1681
|
+
__ JumpIfSmi(input, is_not_object);
|
1682
|
+
|
1683
|
+
__ CompareRoot(input, Heap::kNullValueRootIndex);
|
1684
|
+
__ j(equal, is_object);
|
1685
|
+
|
1686
|
+
__ movq(kScratchRegister, FieldOperand(input, HeapObject::kMapOffset));
|
1687
|
+
// Undetectable objects behave like undefined.
|
1688
|
+
__ testb(FieldOperand(kScratchRegister, Map::kBitFieldOffset),
|
1689
|
+
Immediate(1 << Map::kIsUndetectable));
|
1690
|
+
__ j(not_zero, is_not_object);
|
1691
|
+
|
1692
|
+
__ movzxbl(kScratchRegister,
|
1693
|
+
FieldOperand(kScratchRegister, Map::kInstanceTypeOffset));
|
1694
|
+
__ cmpb(kScratchRegister, Immediate(FIRST_JS_OBJECT_TYPE));
|
1695
|
+
__ j(below, is_not_object);
|
1696
|
+
__ cmpb(kScratchRegister, Immediate(LAST_JS_OBJECT_TYPE));
|
1697
|
+
return below_equal;
|
1698
|
+
}
|
1699
|
+
|
1700
|
+
|
1701
|
+
void LCodeGen::DoIsObject(LIsObject* instr) {
|
1702
|
+
Register reg = ToRegister(instr->InputAt(0));
|
1703
|
+
Register result = ToRegister(instr->result());
|
1704
|
+
Label is_false, is_true, done;
|
1705
|
+
|
1706
|
+
Condition true_cond = EmitIsObject(reg, &is_false, &is_true);
|
1707
|
+
__ j(true_cond, &is_true);
|
1708
|
+
|
1709
|
+
__ bind(&is_false);
|
1710
|
+
__ LoadRoot(result, Heap::kFalseValueRootIndex);
|
1711
|
+
__ jmp(&done);
|
1712
|
+
|
1713
|
+
__ bind(&is_true);
|
1714
|
+
__ LoadRoot(result, Heap::kTrueValueRootIndex);
|
1715
|
+
|
1716
|
+
__ bind(&done);
|
1717
|
+
}
|
1718
|
+
|
1719
|
+
|
1720
|
+
void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) {
|
1721
|
+
Register reg = ToRegister(instr->InputAt(0));
|
1722
|
+
|
1723
|
+
int true_block = chunk_->LookupDestination(instr->true_block_id());
|
1724
|
+
int false_block = chunk_->LookupDestination(instr->false_block_id());
|
1725
|
+
Label* true_label = chunk_->GetAssemblyLabel(true_block);
|
1726
|
+
Label* false_label = chunk_->GetAssemblyLabel(false_block);
|
1727
|
+
|
1728
|
+
Condition true_cond = EmitIsObject(reg, false_label, true_label);
|
1729
|
+
|
1730
|
+
EmitBranch(true_block, false_block, true_cond);
|
1731
|
+
}
|
1732
|
+
|
1733
|
+
|
1734
|
+
void LCodeGen::DoIsSmi(LIsSmi* instr) {
|
1735
|
+
LOperand* input_operand = instr->InputAt(0);
|
1736
|
+
Register result = ToRegister(instr->result());
|
1737
|
+
if (input_operand->IsRegister()) {
|
1738
|
+
Register input = ToRegister(input_operand);
|
1739
|
+
__ CheckSmiToIndicator(result, input);
|
1740
|
+
} else {
|
1741
|
+
Operand input = ToOperand(instr->InputAt(0));
|
1742
|
+
__ CheckSmiToIndicator(result, input);
|
1743
|
+
}
|
1744
|
+
// result is zero if input is a smi, and one otherwise.
|
1745
|
+
ASSERT(Heap::kFalseValueRootIndex == Heap::kTrueValueRootIndex + 1);
|
1746
|
+
__ LoadRootIndexed(result, result, Heap::kTrueValueRootIndex);
|
1747
|
+
}
|
1748
|
+
|
1749
|
+
|
1750
|
+
void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
|
1751
|
+
int true_block = chunk_->LookupDestination(instr->true_block_id());
|
1752
|
+
int false_block = chunk_->LookupDestination(instr->false_block_id());
|
1753
|
+
|
1754
|
+
Condition is_smi;
|
1755
|
+
if (instr->InputAt(0)->IsRegister()) {
|
1756
|
+
Register input = ToRegister(instr->InputAt(0));
|
1757
|
+
is_smi = masm()->CheckSmi(input);
|
1758
|
+
} else {
|
1759
|
+
Operand input = ToOperand(instr->InputAt(0));
|
1760
|
+
is_smi = masm()->CheckSmi(input);
|
1761
|
+
}
|
1762
|
+
EmitBranch(true_block, false_block, is_smi);
|
1763
|
+
}
|
1764
|
+
|
1765
|
+
|
1766
|
+
void LCodeGen::DoIsUndetectable(LIsUndetectable* instr) {
|
1767
|
+
Register input = ToRegister(instr->InputAt(0));
|
1768
|
+
Register result = ToRegister(instr->result());
|
1769
|
+
|
1770
|
+
ASSERT(instr->hydrogen()->value()->representation().IsTagged());
|
1771
|
+
Label false_label, done;
|
1772
|
+
__ JumpIfSmi(input, &false_label);
|
1773
|
+
__ movq(result, FieldOperand(input, HeapObject::kMapOffset));
|
1774
|
+
__ testb(FieldOperand(result, Map::kBitFieldOffset),
|
1775
|
+
Immediate(1 << Map::kIsUndetectable));
|
1776
|
+
__ j(zero, &false_label);
|
1777
|
+
__ LoadRoot(result, Heap::kTrueValueRootIndex);
|
1778
|
+
__ jmp(&done);
|
1779
|
+
__ bind(&false_label);
|
1780
|
+
__ LoadRoot(result, Heap::kFalseValueRootIndex);
|
1781
|
+
__ bind(&done);
|
1782
|
+
}
|
1783
|
+
|
1784
|
+
|
1785
|
+
void LCodeGen::DoIsUndetectableAndBranch(LIsUndetectableAndBranch* instr) {
|
1786
|
+
Register input = ToRegister(instr->InputAt(0));
|
1787
|
+
Register temp = ToRegister(instr->TempAt(0));
|
1788
|
+
|
1789
|
+
int true_block = chunk_->LookupDestination(instr->true_block_id());
|
1790
|
+
int false_block = chunk_->LookupDestination(instr->false_block_id());
|
1791
|
+
|
1792
|
+
__ JumpIfSmi(input, chunk_->GetAssemblyLabel(false_block));
|
1793
|
+
__ movq(temp, FieldOperand(input, HeapObject::kMapOffset));
|
1794
|
+
__ testb(FieldOperand(temp, Map::kBitFieldOffset),
|
1795
|
+
Immediate(1 << Map::kIsUndetectable));
|
1796
|
+
EmitBranch(true_block, false_block, not_zero);
|
1797
|
+
}
|
1798
|
+
|
1799
|
+
|
1800
|
+
static InstanceType TestType(HHasInstanceType* instr) {
|
1801
|
+
InstanceType from = instr->from();
|
1802
|
+
InstanceType to = instr->to();
|
1803
|
+
if (from == FIRST_TYPE) return to;
|
1804
|
+
ASSERT(from == to || to == LAST_TYPE);
|
1805
|
+
return from;
|
1806
|
+
}
|
1807
|
+
|
1808
|
+
|
1809
|
+
static Condition BranchCondition(HHasInstanceType* instr) {
|
1810
|
+
InstanceType from = instr->from();
|
1811
|
+
InstanceType to = instr->to();
|
1812
|
+
if (from == to) return equal;
|
1813
|
+
if (to == LAST_TYPE) return above_equal;
|
1814
|
+
if (from == FIRST_TYPE) return below_equal;
|
1815
|
+
UNREACHABLE();
|
1816
|
+
return equal;
|
1817
|
+
}
|
1818
|
+
|
1819
|
+
|
1820
|
+
void LCodeGen::DoHasInstanceType(LHasInstanceType* instr) {
|
1821
|
+
Register input = ToRegister(instr->InputAt(0));
|
1822
|
+
Register result = ToRegister(instr->result());
|
1823
|
+
|
1824
|
+
ASSERT(instr->hydrogen()->value()->representation().IsTagged());
|
1825
|
+
__ testl(input, Immediate(kSmiTagMask));
|
1826
|
+
Label done, is_false;
|
1827
|
+
__ j(zero, &is_false);
|
1828
|
+
__ CmpObjectType(input, TestType(instr->hydrogen()), result);
|
1829
|
+
__ j(NegateCondition(BranchCondition(instr->hydrogen())),
|
1830
|
+
&is_false, Label::kNear);
|
1831
|
+
__ LoadRoot(result, Heap::kTrueValueRootIndex);
|
1832
|
+
__ jmp(&done, Label::kNear);
|
1833
|
+
__ bind(&is_false);
|
1834
|
+
__ LoadRoot(result, Heap::kFalseValueRootIndex);
|
1835
|
+
__ bind(&done);
|
1836
|
+
}
|
1837
|
+
|
1838
|
+
|
1839
|
+
void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
|
1840
|
+
Register input = ToRegister(instr->InputAt(0));
|
1841
|
+
|
1842
|
+
int true_block = chunk_->LookupDestination(instr->true_block_id());
|
1843
|
+
int false_block = chunk_->LookupDestination(instr->false_block_id());
|
1844
|
+
|
1845
|
+
Label* false_label = chunk_->GetAssemblyLabel(false_block);
|
1846
|
+
|
1847
|
+
__ JumpIfSmi(input, false_label);
|
1848
|
+
|
1849
|
+
__ CmpObjectType(input, TestType(instr->hydrogen()), kScratchRegister);
|
1850
|
+
EmitBranch(true_block, false_block, BranchCondition(instr->hydrogen()));
|
1851
|
+
}
|
1852
|
+
|
1853
|
+
|
1854
|
+
void LCodeGen::DoGetCachedArrayIndex(LGetCachedArrayIndex* instr) {
|
1855
|
+
Register input = ToRegister(instr->InputAt(0));
|
1856
|
+
Register result = ToRegister(instr->result());
|
1857
|
+
|
1858
|
+
if (FLAG_debug_code) {
|
1859
|
+
__ AbortIfNotString(input);
|
1860
|
+
}
|
1861
|
+
|
1862
|
+
__ movl(result, FieldOperand(input, String::kHashFieldOffset));
|
1863
|
+
ASSERT(String::kHashShift >= kSmiTagSize);
|
1864
|
+
__ IndexFromHash(result, result);
|
1865
|
+
}
|
1866
|
+
|
1867
|
+
|
1868
|
+
void LCodeGen::DoHasCachedArrayIndex(LHasCachedArrayIndex* instr) {
|
1869
|
+
Register input = ToRegister(instr->InputAt(0));
|
1870
|
+
Register result = ToRegister(instr->result());
|
1871
|
+
|
1872
|
+
ASSERT(instr->hydrogen()->value()->representation().IsTagged());
|
1873
|
+
__ LoadRoot(result, Heap::kTrueValueRootIndex);
|
1874
|
+
__ testl(FieldOperand(input, String::kHashFieldOffset),
|
1875
|
+
Immediate(String::kContainsCachedArrayIndexMask));
|
1876
|
+
Label done;
|
1877
|
+
__ j(zero, &done, Label::kNear);
|
1878
|
+
__ LoadRoot(result, Heap::kFalseValueRootIndex);
|
1879
|
+
__ bind(&done);
|
1880
|
+
}
|
1881
|
+
|
1882
|
+
|
1883
|
+
void LCodeGen::DoHasCachedArrayIndexAndBranch(
|
1884
|
+
LHasCachedArrayIndexAndBranch* instr) {
|
1885
|
+
Register input = ToRegister(instr->InputAt(0));
|
1886
|
+
|
1887
|
+
int true_block = chunk_->LookupDestination(instr->true_block_id());
|
1888
|
+
int false_block = chunk_->LookupDestination(instr->false_block_id());
|
1889
|
+
|
1890
|
+
__ testl(FieldOperand(input, String::kHashFieldOffset),
|
1891
|
+
Immediate(String::kContainsCachedArrayIndexMask));
|
1892
|
+
EmitBranch(true_block, false_block, equal);
|
1893
|
+
}
|
1894
|
+
|
1895
|
+
|
1896
|
+
// Branches to a label or falls through with the answer in the z flag.
|
1897
|
+
// Trashes the temp register and possibly input (if it and temp are aliased).
|
1898
|
+
void LCodeGen::EmitClassOfTest(Label* is_true,
|
1899
|
+
Label* is_false,
|
1900
|
+
Handle<String> class_name,
|
1901
|
+
Register input,
|
1902
|
+
Register temp) {
|
1903
|
+
__ JumpIfSmi(input, is_false);
|
1904
|
+
__ CmpObjectType(input, FIRST_JS_OBJECT_TYPE, temp);
|
1905
|
+
__ j(below, is_false);
|
1906
|
+
|
1907
|
+
// Map is now in temp.
|
1908
|
+
// Functions have class 'Function'.
|
1909
|
+
__ CmpInstanceType(temp, JS_FUNCTION_TYPE);
|
1910
|
+
if (class_name->IsEqualTo(CStrVector("Function"))) {
|
1911
|
+
__ j(equal, is_true);
|
1912
|
+
} else {
|
1913
|
+
__ j(equal, is_false);
|
1914
|
+
}
|
1915
|
+
|
1916
|
+
// Check if the constructor in the map is a function.
|
1917
|
+
__ movq(temp, FieldOperand(temp, Map::kConstructorOffset));
|
1918
|
+
|
1919
|
+
// As long as JS_FUNCTION_TYPE is the last instance type and it is
|
1920
|
+
// right after LAST_JS_OBJECT_TYPE, we can avoid checking for
|
1921
|
+
// LAST_JS_OBJECT_TYPE.
|
1922
|
+
ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
|
1923
|
+
ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
|
1924
|
+
|
1925
|
+
// Objects with a non-function constructor have class 'Object'.
|
1926
|
+
__ CmpObjectType(temp, JS_FUNCTION_TYPE, kScratchRegister);
|
1927
|
+
if (class_name->IsEqualTo(CStrVector("Object"))) {
|
1928
|
+
__ j(not_equal, is_true);
|
1929
|
+
} else {
|
1930
|
+
__ j(not_equal, is_false);
|
1931
|
+
}
|
1932
|
+
|
1933
|
+
// temp now contains the constructor function. Grab the
|
1934
|
+
// instance class name from there.
|
1935
|
+
__ movq(temp, FieldOperand(temp, JSFunction::kSharedFunctionInfoOffset));
|
1936
|
+
__ movq(temp, FieldOperand(temp,
|
1937
|
+
SharedFunctionInfo::kInstanceClassNameOffset));
|
1938
|
+
// The class name we are testing against is a symbol because it's a literal.
|
1939
|
+
// The name in the constructor is a symbol because of the way the context is
|
1940
|
+
// booted. This routine isn't expected to work for random API-created
|
1941
|
+
// classes and it doesn't have to because you can't access it with natives
|
1942
|
+
// syntax. Since both sides are symbols it is sufficient to use an identity
|
1943
|
+
// comparison.
|
1944
|
+
ASSERT(class_name->IsSymbol());
|
1945
|
+
__ Cmp(temp, class_name);
|
1946
|
+
// End with the answer in the z flag.
|
1947
|
+
}
|
1948
|
+
|
1949
|
+
|
1950
|
+
void LCodeGen::DoClassOfTest(LClassOfTest* instr) {
|
1951
|
+
Register input = ToRegister(instr->InputAt(0));
|
1952
|
+
Register result = ToRegister(instr->result());
|
1953
|
+
ASSERT(input.is(result));
|
1954
|
+
Register temp = ToRegister(instr->TempAt(0));
|
1955
|
+
Handle<String> class_name = instr->hydrogen()->class_name();
|
1956
|
+
Label done;
|
1957
|
+
Label is_true, is_false;
|
1958
|
+
|
1959
|
+
EmitClassOfTest(&is_true, &is_false, class_name, input, temp);
|
1960
|
+
|
1961
|
+
__ j(not_equal, &is_false);
|
1962
|
+
|
1963
|
+
__ bind(&is_true);
|
1964
|
+
__ LoadRoot(result, Heap::kTrueValueRootIndex);
|
1965
|
+
__ jmp(&done, Label::kNear);
|
1966
|
+
|
1967
|
+
__ bind(&is_false);
|
1968
|
+
__ LoadRoot(result, Heap::kFalseValueRootIndex);
|
1969
|
+
__ bind(&done);
|
1970
|
+
}
|
1971
|
+
|
1972
|
+
|
1973
|
+
void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
|
1974
|
+
Register input = ToRegister(instr->InputAt(0));
|
1975
|
+
Register temp = ToRegister(instr->TempAt(0));
|
1976
|
+
Handle<String> class_name = instr->hydrogen()->class_name();
|
1977
|
+
|
1978
|
+
int true_block = chunk_->LookupDestination(instr->true_block_id());
|
1979
|
+
int false_block = chunk_->LookupDestination(instr->false_block_id());
|
1980
|
+
|
1981
|
+
Label* true_label = chunk_->GetAssemblyLabel(true_block);
|
1982
|
+
Label* false_label = chunk_->GetAssemblyLabel(false_block);
|
1983
|
+
|
1984
|
+
EmitClassOfTest(true_label, false_label, class_name, input, temp);
|
1985
|
+
|
1986
|
+
EmitBranch(true_block, false_block, equal);
|
1987
|
+
}
|
1988
|
+
|
1989
|
+
|
1990
|
+
void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
|
1991
|
+
Register reg = ToRegister(instr->InputAt(0));
|
1992
|
+
int true_block = instr->true_block_id();
|
1993
|
+
int false_block = instr->false_block_id();
|
1994
|
+
|
1995
|
+
__ Cmp(FieldOperand(reg, HeapObject::kMapOffset), instr->map());
|
1996
|
+
EmitBranch(true_block, false_block, equal);
|
1997
|
+
}
|
1998
|
+
|
1999
|
+
|
2000
|
+
void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
|
2001
|
+
InstanceofStub stub(InstanceofStub::kNoFlags);
|
2002
|
+
__ push(ToRegister(instr->InputAt(0)));
|
2003
|
+
__ push(ToRegister(instr->InputAt(1)));
|
2004
|
+
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
2005
|
+
Label true_value, done;
|
2006
|
+
__ testq(rax, rax);
|
2007
|
+
__ j(zero, &true_value, Label::kNear);
|
2008
|
+
__ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex);
|
2009
|
+
__ jmp(&done, Label::kNear);
|
2010
|
+
__ bind(&true_value);
|
2011
|
+
__ LoadRoot(ToRegister(instr->result()), Heap::kTrueValueRootIndex);
|
2012
|
+
__ bind(&done);
|
2013
|
+
}
|
2014
|
+
|
2015
|
+
|
2016
|
+
void LCodeGen::DoInstanceOfAndBranch(LInstanceOfAndBranch* instr) {
|
2017
|
+
int true_block = chunk_->LookupDestination(instr->true_block_id());
|
2018
|
+
int false_block = chunk_->LookupDestination(instr->false_block_id());
|
2019
|
+
|
2020
|
+
InstanceofStub stub(InstanceofStub::kNoFlags);
|
2021
|
+
__ push(ToRegister(instr->InputAt(0)));
|
2022
|
+
__ push(ToRegister(instr->InputAt(1)));
|
2023
|
+
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
2024
|
+
__ testq(rax, rax);
|
2025
|
+
EmitBranch(true_block, false_block, zero);
|
2026
|
+
}
|
2027
|
+
|
2028
|
+
|
2029
|
+
void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
|
2030
|
+
class DeferredInstanceOfKnownGlobal: public LDeferredCode {
|
2031
|
+
public:
|
2032
|
+
DeferredInstanceOfKnownGlobal(LCodeGen* codegen,
|
2033
|
+
LInstanceOfKnownGlobal* instr)
|
2034
|
+
: LDeferredCode(codegen), instr_(instr) { }
|
2035
|
+
virtual void Generate() {
|
2036
|
+
codegen()->DoDeferredLInstanceOfKnownGlobal(instr_, &map_check_);
|
2037
|
+
}
|
2038
|
+
|
2039
|
+
Label* map_check() { return &map_check_; }
|
2040
|
+
|
2041
|
+
private:
|
2042
|
+
LInstanceOfKnownGlobal* instr_;
|
2043
|
+
Label map_check_;
|
2044
|
+
};
|
2045
|
+
|
2046
|
+
|
2047
|
+
DeferredInstanceOfKnownGlobal* deferred;
|
2048
|
+
deferred = new DeferredInstanceOfKnownGlobal(this, instr);
|
2049
|
+
|
2050
|
+
Label done, false_result;
|
2051
|
+
Register object = ToRegister(instr->InputAt(0));
|
2052
|
+
|
2053
|
+
// A Smi is not an instance of anything.
|
2054
|
+
__ JumpIfSmi(object, &false_result);
|
2055
|
+
|
2056
|
+
// This is the inlined call site instanceof cache. The two occurences of the
|
2057
|
+
// hole value will be patched to the last map/result pair generated by the
|
2058
|
+
// instanceof stub.
|
2059
|
+
Label cache_miss;
|
2060
|
+
// Use a temp register to avoid memory operands with variable lengths.
|
2061
|
+
Register map = ToRegister(instr->TempAt(0));
|
2062
|
+
__ movq(map, FieldOperand(object, HeapObject::kMapOffset));
|
2063
|
+
__ bind(deferred->map_check()); // Label for calculating code patching.
|
2064
|
+
__ movq(kScratchRegister, factory()->the_hole_value(),
|
2065
|
+
RelocInfo::EMBEDDED_OBJECT);
|
2066
|
+
__ cmpq(map, kScratchRegister); // Patched to cached map.
|
2067
|
+
__ j(not_equal, &cache_miss, Label::kNear);
|
2068
|
+
// Patched to load either true or false.
|
2069
|
+
__ LoadRoot(ToRegister(instr->result()), Heap::kTheHoleValueRootIndex);
|
2070
|
+
#ifdef DEBUG
|
2071
|
+
// Check that the code size between patch label and patch sites is invariant.
|
2072
|
+
Label end_of_patched_code;
|
2073
|
+
__ bind(&end_of_patched_code);
|
2074
|
+
ASSERT(true);
|
2075
|
+
#endif
|
2076
|
+
__ jmp(&done);
|
2077
|
+
|
2078
|
+
// The inlined call site cache did not match. Check for null and string
|
2079
|
+
// before calling the deferred code.
|
2080
|
+
__ bind(&cache_miss); // Null is not an instance of anything.
|
2081
|
+
__ CompareRoot(object, Heap::kNullValueRootIndex);
|
2082
|
+
__ j(equal, &false_result, Label::kNear);
|
2083
|
+
|
2084
|
+
// String values are not instances of anything.
|
2085
|
+
__ JumpIfNotString(object, kScratchRegister, deferred->entry());
|
2086
|
+
|
2087
|
+
__ bind(&false_result);
|
2088
|
+
__ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex);
|
2089
|
+
|
2090
|
+
__ bind(deferred->exit());
|
2091
|
+
__ bind(&done);
|
2092
|
+
}
|
2093
|
+
|
2094
|
+
|
2095
|
+
void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
|
2096
|
+
Label* map_check) {
|
2097
|
+
{
|
2098
|
+
PushSafepointRegistersScope scope(this);
|
2099
|
+
InstanceofStub::Flags flags = static_cast<InstanceofStub::Flags>(
|
2100
|
+
InstanceofStub::kNoFlags | InstanceofStub::kCallSiteInlineCheck);
|
2101
|
+
InstanceofStub stub(flags);
|
2102
|
+
|
2103
|
+
__ push(ToRegister(instr->InputAt(0)));
|
2104
|
+
__ Push(instr->function());
|
2105
|
+
|
2106
|
+
Register temp = ToRegister(instr->TempAt(0));
|
2107
|
+
static const int kAdditionalDelta = 10;
|
2108
|
+
int delta =
|
2109
|
+
masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta;
|
2110
|
+
ASSERT(delta >= 0);
|
2111
|
+
__ push_imm32(delta);
|
2112
|
+
|
2113
|
+
// We are pushing three values on the stack but recording a
|
2114
|
+
// safepoint with two arguments because stub is going to
|
2115
|
+
// remove the third argument from the stack before jumping
|
2116
|
+
// to instanceof builtin on the slow path.
|
2117
|
+
CallCodeGeneric(stub.GetCode(),
|
2118
|
+
RelocInfo::CODE_TARGET,
|
2119
|
+
instr,
|
2120
|
+
RECORD_SAFEPOINT_WITH_REGISTERS,
|
2121
|
+
2);
|
2122
|
+
ASSERT(delta == masm_->SizeOfCodeGeneratedSince(map_check));
|
2123
|
+
// Move result to a register that survives the end of the
|
2124
|
+
// PushSafepointRegisterScope.
|
2125
|
+
__ movq(kScratchRegister, rax);
|
2126
|
+
}
|
2127
|
+
__ testq(kScratchRegister, kScratchRegister);
|
2128
|
+
Label load_false;
|
2129
|
+
Label done;
|
2130
|
+
__ j(not_zero, &load_false);
|
2131
|
+
__ LoadRoot(rax, Heap::kTrueValueRootIndex);
|
2132
|
+
__ jmp(&done);
|
2133
|
+
__ bind(&load_false);
|
2134
|
+
__ LoadRoot(rax, Heap::kFalseValueRootIndex);
|
2135
|
+
__ bind(&done);
|
2136
|
+
}
|
2137
|
+
|
2138
|
+
|
2139
|
+
void LCodeGen::DoCmpT(LCmpT* instr) {
|
2140
|
+
Token::Value op = instr->op();
|
2141
|
+
|
2142
|
+
Handle<Code> ic = CompareIC::GetUninitialized(op);
|
2143
|
+
CallCode(ic, RelocInfo::CODE_TARGET, instr);
|
2144
|
+
|
2145
|
+
Condition condition = TokenToCondition(op, false);
|
2146
|
+
if (op == Token::GT || op == Token::LTE) {
|
2147
|
+
condition = ReverseCondition(condition);
|
2148
|
+
}
|
2149
|
+
Label true_value, done;
|
2150
|
+
__ testq(rax, rax);
|
2151
|
+
__ j(condition, &true_value, Label::kNear);
|
2152
|
+
__ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex);
|
2153
|
+
__ jmp(&done, Label::kNear);
|
2154
|
+
__ bind(&true_value);
|
2155
|
+
__ LoadRoot(ToRegister(instr->result()), Heap::kTrueValueRootIndex);
|
2156
|
+
__ bind(&done);
|
2157
|
+
}
|
2158
|
+
|
2159
|
+
|
2160
|
+
void LCodeGen::DoCmpTAndBranch(LCmpTAndBranch* instr) {
|
2161
|
+
Token::Value op = instr->op();
|
2162
|
+
int true_block = chunk_->LookupDestination(instr->true_block_id());
|
2163
|
+
int false_block = chunk_->LookupDestination(instr->false_block_id());
|
2164
|
+
|
2165
|
+
Handle<Code> ic = CompareIC::GetUninitialized(op);
|
2166
|
+
CallCode(ic, RelocInfo::CODE_TARGET, instr);
|
2167
|
+
|
2168
|
+
// The compare stub expects compare condition and the input operands
|
2169
|
+
// reversed for GT and LTE.
|
2170
|
+
Condition condition = TokenToCondition(op, false);
|
2171
|
+
if (op == Token::GT || op == Token::LTE) {
|
2172
|
+
condition = ReverseCondition(condition);
|
2173
|
+
}
|
2174
|
+
__ testq(rax, rax);
|
2175
|
+
EmitBranch(true_block, false_block, condition);
|
2176
|
+
}
|
2177
|
+
|
2178
|
+
|
2179
|
+
void LCodeGen::DoReturn(LReturn* instr) {
|
2180
|
+
if (FLAG_trace) {
|
2181
|
+
// Preserve the return value on the stack and rely on the runtime
|
2182
|
+
// call to return the value in the same register.
|
2183
|
+
__ push(rax);
|
2184
|
+
__ CallRuntime(Runtime::kTraceExit, 1);
|
2185
|
+
}
|
2186
|
+
__ movq(rsp, rbp);
|
2187
|
+
__ pop(rbp);
|
2188
|
+
__ Ret((GetParameterCount() + 1) * kPointerSize, rcx);
|
2189
|
+
}
|
2190
|
+
|
2191
|
+
|
2192
|
+
void LCodeGen::DoLoadGlobalCell(LLoadGlobalCell* instr) {
|
2193
|
+
Register result = ToRegister(instr->result());
|
2194
|
+
if (result.is(rax)) {
|
2195
|
+
__ load_rax(instr->hydrogen()->cell().location(),
|
2196
|
+
RelocInfo::GLOBAL_PROPERTY_CELL);
|
2197
|
+
} else {
|
2198
|
+
__ movq(result, instr->hydrogen()->cell(), RelocInfo::GLOBAL_PROPERTY_CELL);
|
2199
|
+
__ movq(result, Operand(result, 0));
|
2200
|
+
}
|
2201
|
+
if (instr->hydrogen()->check_hole_value()) {
|
2202
|
+
__ CompareRoot(result, Heap::kTheHoleValueRootIndex);
|
2203
|
+
DeoptimizeIf(equal, instr->environment());
|
2204
|
+
}
|
2205
|
+
}
|
2206
|
+
|
2207
|
+
|
2208
|
+
void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) {
|
2209
|
+
ASSERT(ToRegister(instr->global_object()).is(rax));
|
2210
|
+
ASSERT(ToRegister(instr->result()).is(rax));
|
2211
|
+
|
2212
|
+
__ Move(rcx, instr->name());
|
2213
|
+
RelocInfo::Mode mode = instr->for_typeof() ? RelocInfo::CODE_TARGET :
|
2214
|
+
RelocInfo::CODE_TARGET_CONTEXT;
|
2215
|
+
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
|
2216
|
+
CallCode(ic, mode, instr);
|
2217
|
+
}
|
2218
|
+
|
2219
|
+
|
2220
|
+
void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) {
|
2221
|
+
Register value = ToRegister(instr->InputAt(0));
|
2222
|
+
Register temp = ToRegister(instr->TempAt(0));
|
2223
|
+
ASSERT(!value.is(temp));
|
2224
|
+
bool check_hole = instr->hydrogen()->check_hole_value();
|
2225
|
+
if (!check_hole && value.is(rax)) {
|
2226
|
+
__ store_rax(instr->hydrogen()->cell().location(),
|
2227
|
+
RelocInfo::GLOBAL_PROPERTY_CELL);
|
2228
|
+
return;
|
2229
|
+
}
|
2230
|
+
// If the cell we are storing to contains the hole it could have
|
2231
|
+
// been deleted from the property dictionary. In that case, we need
|
2232
|
+
// to update the property details in the property dictionary to mark
|
2233
|
+
// it as no longer deleted. We deoptimize in that case.
|
2234
|
+
__ movq(temp, instr->hydrogen()->cell(), RelocInfo::GLOBAL_PROPERTY_CELL);
|
2235
|
+
if (check_hole) {
|
2236
|
+
__ CompareRoot(Operand(temp, 0), Heap::kTheHoleValueRootIndex);
|
2237
|
+
DeoptimizeIf(equal, instr->environment());
|
2238
|
+
}
|
2239
|
+
__ movq(Operand(temp, 0), value);
|
2240
|
+
}
|
2241
|
+
|
2242
|
+
|
2243
|
+
void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) {
|
2244
|
+
ASSERT(ToRegister(instr->global_object()).is(rdx));
|
2245
|
+
ASSERT(ToRegister(instr->value()).is(rax));
|
2246
|
+
|
2247
|
+
__ Move(rcx, instr->name());
|
2248
|
+
Handle<Code> ic = instr->strict_mode()
|
2249
|
+
? isolate()->builtins()->StoreIC_Initialize_Strict()
|
2250
|
+
: isolate()->builtins()->StoreIC_Initialize();
|
2251
|
+
CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
|
2252
|
+
}
|
2253
|
+
|
2254
|
+
|
2255
|
+
void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
|
2256
|
+
Register context = ToRegister(instr->context());
|
2257
|
+
Register result = ToRegister(instr->result());
|
2258
|
+
__ movq(result, ContextOperand(context, instr->slot_index()));
|
2259
|
+
}
|
2260
|
+
|
2261
|
+
|
2262
|
+
void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
|
2263
|
+
Register context = ToRegister(instr->context());
|
2264
|
+
Register value = ToRegister(instr->value());
|
2265
|
+
__ movq(ContextOperand(context, instr->slot_index()), value);
|
2266
|
+
if (instr->needs_write_barrier()) {
|
2267
|
+
int offset = Context::SlotOffset(instr->slot_index());
|
2268
|
+
Register scratch = ToRegister(instr->TempAt(0));
|
2269
|
+
__ RecordWrite(context, offset, value, scratch);
|
2270
|
+
}
|
2271
|
+
}
|
2272
|
+
|
2273
|
+
|
2274
|
+
void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
|
2275
|
+
Register object = ToRegister(instr->InputAt(0));
|
2276
|
+
Register result = ToRegister(instr->result());
|
2277
|
+
if (instr->hydrogen()->is_in_object()) {
|
2278
|
+
__ movq(result, FieldOperand(object, instr->hydrogen()->offset()));
|
2279
|
+
} else {
|
2280
|
+
__ movq(result, FieldOperand(object, JSObject::kPropertiesOffset));
|
2281
|
+
__ movq(result, FieldOperand(result, instr->hydrogen()->offset()));
|
2282
|
+
}
|
2283
|
+
}
|
2284
|
+
|
2285
|
+
|
2286
|
+
void LCodeGen::EmitLoadFieldOrConstantFunction(Register result,
|
2287
|
+
Register object,
|
2288
|
+
Handle<Map> type,
|
2289
|
+
Handle<String> name) {
|
2290
|
+
LookupResult lookup;
|
2291
|
+
type->LookupInDescriptors(NULL, *name, &lookup);
|
2292
|
+
ASSERT(lookup.IsProperty() &&
|
2293
|
+
(lookup.type() == FIELD || lookup.type() == CONSTANT_FUNCTION));
|
2294
|
+
if (lookup.type() == FIELD) {
|
2295
|
+
int index = lookup.GetLocalFieldIndexFromMap(*type);
|
2296
|
+
int offset = index * kPointerSize;
|
2297
|
+
if (index < 0) {
|
2298
|
+
// Negative property indices are in-object properties, indexed
|
2299
|
+
// from the end of the fixed part of the object.
|
2300
|
+
__ movq(result, FieldOperand(object, offset + type->instance_size()));
|
2301
|
+
} else {
|
2302
|
+
// Non-negative property indices are in the properties array.
|
2303
|
+
__ movq(result, FieldOperand(object, JSObject::kPropertiesOffset));
|
2304
|
+
__ movq(result, FieldOperand(result, offset + FixedArray::kHeaderSize));
|
2305
|
+
}
|
2306
|
+
} else {
|
2307
|
+
Handle<JSFunction> function(lookup.GetConstantFunctionFromMap(*type));
|
2308
|
+
LoadHeapObject(result, Handle<HeapObject>::cast(function));
|
2309
|
+
}
|
2310
|
+
}
|
2311
|
+
|
2312
|
+
|
2313
|
+
void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) {
|
2314
|
+
Register object = ToRegister(instr->object());
|
2315
|
+
Register result = ToRegister(instr->result());
|
2316
|
+
|
2317
|
+
int map_count = instr->hydrogen()->types()->length();
|
2318
|
+
Handle<String> name = instr->hydrogen()->name();
|
2319
|
+
|
2320
|
+
if (map_count == 0) {
|
2321
|
+
ASSERT(instr->hydrogen()->need_generic());
|
2322
|
+
__ Move(rcx, instr->hydrogen()->name());
|
2323
|
+
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
|
2324
|
+
CallCode(ic, RelocInfo::CODE_TARGET, instr);
|
2325
|
+
} else {
|
2326
|
+
Label done;
|
2327
|
+
for (int i = 0; i < map_count - 1; ++i) {
|
2328
|
+
Handle<Map> map = instr->hydrogen()->types()->at(i);
|
2329
|
+
Label next;
|
2330
|
+
__ Cmp(FieldOperand(object, HeapObject::kMapOffset), map);
|
2331
|
+
__ j(not_equal, &next, Label::kNear);
|
2332
|
+
EmitLoadFieldOrConstantFunction(result, object, map, name);
|
2333
|
+
__ jmp(&done, Label::kNear);
|
2334
|
+
__ bind(&next);
|
2335
|
+
}
|
2336
|
+
Handle<Map> map = instr->hydrogen()->types()->last();
|
2337
|
+
__ Cmp(FieldOperand(object, HeapObject::kMapOffset), map);
|
2338
|
+
if (instr->hydrogen()->need_generic()) {
|
2339
|
+
Label generic;
|
2340
|
+
__ j(not_equal, &generic, Label::kNear);
|
2341
|
+
EmitLoadFieldOrConstantFunction(result, object, map, name);
|
2342
|
+
__ jmp(&done, Label::kNear);
|
2343
|
+
__ bind(&generic);
|
2344
|
+
__ Move(rcx, instr->hydrogen()->name());
|
2345
|
+
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
|
2346
|
+
CallCode(ic, RelocInfo::CODE_TARGET, instr);
|
2347
|
+
} else {
|
2348
|
+
DeoptimizeIf(not_equal, instr->environment());
|
2349
|
+
EmitLoadFieldOrConstantFunction(result, object, map, name);
|
2350
|
+
}
|
2351
|
+
__ bind(&done);
|
2352
|
+
}
|
2353
|
+
}
|
2354
|
+
|
2355
|
+
|
2356
|
+
void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
|
2357
|
+
ASSERT(ToRegister(instr->object()).is(rax));
|
2358
|
+
ASSERT(ToRegister(instr->result()).is(rax));
|
2359
|
+
|
2360
|
+
__ Move(rcx, instr->name());
|
2361
|
+
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
|
2362
|
+
CallCode(ic, RelocInfo::CODE_TARGET, instr);
|
2363
|
+
}
|
2364
|
+
|
2365
|
+
|
2366
|
+
void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
|
2367
|
+
Register function = ToRegister(instr->function());
|
2368
|
+
Register result = ToRegister(instr->result());
|
2369
|
+
|
2370
|
+
// Check that the function really is a function.
|
2371
|
+
__ CmpObjectType(function, JS_FUNCTION_TYPE, result);
|
2372
|
+
DeoptimizeIf(not_equal, instr->environment());
|
2373
|
+
|
2374
|
+
// Check whether the function has an instance prototype.
|
2375
|
+
Label non_instance;
|
2376
|
+
__ testb(FieldOperand(result, Map::kBitFieldOffset),
|
2377
|
+
Immediate(1 << Map::kHasNonInstancePrototype));
|
2378
|
+
__ j(not_zero, &non_instance, Label::kNear);
|
2379
|
+
|
2380
|
+
// Get the prototype or initial map from the function.
|
2381
|
+
__ movq(result,
|
2382
|
+
FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
|
2383
|
+
|
2384
|
+
// Check that the function has a prototype or an initial map.
|
2385
|
+
__ CompareRoot(result, Heap::kTheHoleValueRootIndex);
|
2386
|
+
DeoptimizeIf(equal, instr->environment());
|
2387
|
+
|
2388
|
+
// If the function does not have an initial map, we're done.
|
2389
|
+
Label done;
|
2390
|
+
__ CmpObjectType(result, MAP_TYPE, kScratchRegister);
|
2391
|
+
__ j(not_equal, &done, Label::kNear);
|
2392
|
+
|
2393
|
+
// Get the prototype from the initial map.
|
2394
|
+
__ movq(result, FieldOperand(result, Map::kPrototypeOffset));
|
2395
|
+
__ jmp(&done, Label::kNear);
|
2396
|
+
|
2397
|
+
// Non-instance prototype: Fetch prototype from constructor field
|
2398
|
+
// in the function's map.
|
2399
|
+
__ bind(&non_instance);
|
2400
|
+
__ movq(result, FieldOperand(result, Map::kConstructorOffset));
|
2401
|
+
|
2402
|
+
// All done.
|
2403
|
+
__ bind(&done);
|
2404
|
+
}
|
2405
|
+
|
2406
|
+
|
2407
|
+
void LCodeGen::DoLoadElements(LLoadElements* instr) {
|
2408
|
+
Register result = ToRegister(instr->result());
|
2409
|
+
Register input = ToRegister(instr->InputAt(0));
|
2410
|
+
__ movq(result, FieldOperand(input, JSObject::kElementsOffset));
|
2411
|
+
if (FLAG_debug_code) {
|
2412
|
+
Label done;
|
2413
|
+
__ CompareRoot(FieldOperand(result, HeapObject::kMapOffset),
|
2414
|
+
Heap::kFixedArrayMapRootIndex);
|
2415
|
+
__ j(equal, &done, Label::kNear);
|
2416
|
+
__ CompareRoot(FieldOperand(result, HeapObject::kMapOffset),
|
2417
|
+
Heap::kFixedCOWArrayMapRootIndex);
|
2418
|
+
__ j(equal, &done, Label::kNear);
|
2419
|
+
Register temp((result.is(rax)) ? rbx : rax);
|
2420
|
+
__ push(temp);
|
2421
|
+
__ movq(temp, FieldOperand(result, HeapObject::kMapOffset));
|
2422
|
+
__ movzxbq(temp, FieldOperand(temp, Map::kInstanceTypeOffset));
|
2423
|
+
__ subq(temp, Immediate(FIRST_EXTERNAL_ARRAY_TYPE));
|
2424
|
+
__ cmpq(temp, Immediate(kExternalArrayTypeCount));
|
2425
|
+
__ pop(temp);
|
2426
|
+
__ Check(below, "Check for fast elements failed.");
|
2427
|
+
__ bind(&done);
|
2428
|
+
}
|
2429
|
+
}
|
2430
|
+
|
2431
|
+
|
2432
|
+
void LCodeGen::DoLoadExternalArrayPointer(
|
2433
|
+
LLoadExternalArrayPointer* instr) {
|
2434
|
+
Register result = ToRegister(instr->result());
|
2435
|
+
Register input = ToRegister(instr->InputAt(0));
|
2436
|
+
__ movq(result, FieldOperand(input,
|
2437
|
+
ExternalPixelArray::kExternalPointerOffset));
|
2438
|
+
}
|
2439
|
+
|
2440
|
+
|
2441
|
+
void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
|
2442
|
+
Register arguments = ToRegister(instr->arguments());
|
2443
|
+
Register length = ToRegister(instr->length());
|
2444
|
+
Register result = ToRegister(instr->result());
|
2445
|
+
|
2446
|
+
if (instr->index()->IsRegister()) {
|
2447
|
+
__ subl(length, ToRegister(instr->index()));
|
2448
|
+
} else {
|
2449
|
+
__ subl(length, ToOperand(instr->index()));
|
2450
|
+
}
|
2451
|
+
DeoptimizeIf(below_equal, instr->environment());
|
2452
|
+
|
2453
|
+
// There are two words between the frame pointer and the last argument.
|
2454
|
+
// Subtracting from length accounts for one of them add one more.
|
2455
|
+
__ movq(result, Operand(arguments, length, times_pointer_size, kPointerSize));
|
2456
|
+
}
|
2457
|
+
|
2458
|
+
|
2459
|
+
void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
|
2460
|
+
Register elements = ToRegister(instr->elements());
|
2461
|
+
Register key = ToRegister(instr->key());
|
2462
|
+
Register result = ToRegister(instr->result());
|
2463
|
+
ASSERT(result.is(elements));
|
2464
|
+
|
2465
|
+
// Load the result.
|
2466
|
+
__ movq(result, FieldOperand(elements,
|
2467
|
+
key,
|
2468
|
+
times_pointer_size,
|
2469
|
+
FixedArray::kHeaderSize));
|
2470
|
+
|
2471
|
+
// Check for the hole value.
|
2472
|
+
if (instr->hydrogen()->RequiresHoleCheck()) {
|
2473
|
+
__ CompareRoot(result, Heap::kTheHoleValueRootIndex);
|
2474
|
+
DeoptimizeIf(equal, instr->environment());
|
2475
|
+
}
|
2476
|
+
}
|
2477
|
+
|
2478
|
+
|
2479
|
+
Operand LCodeGen::BuildExternalArrayOperand(LOperand* external_pointer,
|
2480
|
+
LOperand* key,
|
2481
|
+
ExternalArrayType array_type) {
|
2482
|
+
Register external_pointer_reg = ToRegister(external_pointer);
|
2483
|
+
int shift_size = ExternalArrayTypeToShiftSize(array_type);
|
2484
|
+
if (key->IsConstantOperand()) {
|
2485
|
+
int constant_value = ToInteger32(LConstantOperand::cast(key));
|
2486
|
+
if (constant_value & 0xF0000000) {
|
2487
|
+
Abort("array index constant value too big");
|
2488
|
+
}
|
2489
|
+
return Operand(external_pointer_reg, constant_value * (1 << shift_size));
|
2490
|
+
} else {
|
2491
|
+
ScaleFactor scale_factor = static_cast<ScaleFactor>(shift_size);
|
2492
|
+
return Operand(external_pointer_reg, ToRegister(key), scale_factor, 0);
|
2493
|
+
}
|
2494
|
+
}
|
2495
|
+
|
2496
|
+
|
2497
|
+
void LCodeGen::DoLoadKeyedSpecializedArrayElement(
|
2498
|
+
LLoadKeyedSpecializedArrayElement* instr) {
|
2499
|
+
ExternalArrayType array_type = instr->array_type();
|
2500
|
+
Operand operand(BuildExternalArrayOperand(instr->external_pointer(),
|
2501
|
+
instr->key(), array_type));
|
2502
|
+
if (array_type == kExternalFloatArray) {
|
2503
|
+
XMMRegister result(ToDoubleRegister(instr->result()));
|
2504
|
+
__ movss(result, operand);
|
2505
|
+
__ cvtss2sd(result, result);
|
2506
|
+
} else if (array_type == kExternalDoubleArray) {
|
2507
|
+
__ movsd(ToDoubleRegister(instr->result()), operand);
|
2508
|
+
} else {
|
2509
|
+
Register result(ToRegister(instr->result()));
|
2510
|
+
switch (array_type) {
|
2511
|
+
case kExternalByteArray:
|
2512
|
+
__ movsxbq(result, operand);
|
2513
|
+
break;
|
2514
|
+
case kExternalUnsignedByteArray:
|
2515
|
+
case kExternalPixelArray:
|
2516
|
+
__ movzxbq(result, operand);
|
2517
|
+
break;
|
2518
|
+
case kExternalShortArray:
|
2519
|
+
__ movsxwq(result, operand);
|
2520
|
+
break;
|
2521
|
+
case kExternalUnsignedShortArray:
|
2522
|
+
__ movzxwq(result, operand);
|
2523
|
+
break;
|
2524
|
+
case kExternalIntArray:
|
2525
|
+
__ movsxlq(result, operand);
|
2526
|
+
break;
|
2527
|
+
case kExternalUnsignedIntArray:
|
2528
|
+
__ movl(result, operand);
|
2529
|
+
__ testl(result, result);
|
2530
|
+
// TODO(danno): we could be more clever here, perhaps having a special
|
2531
|
+
// version of the stub that detects if the overflow case actually
|
2532
|
+
// happens, and generate code that returns a double rather than int.
|
2533
|
+
DeoptimizeIf(negative, instr->environment());
|
2534
|
+
break;
|
2535
|
+
case kExternalFloatArray:
|
2536
|
+
case kExternalDoubleArray:
|
2537
|
+
UNREACHABLE();
|
2538
|
+
break;
|
2539
|
+
}
|
2540
|
+
}
|
2541
|
+
}
|
2542
|
+
|
2543
|
+
|
2544
|
+
void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
|
2545
|
+
ASSERT(ToRegister(instr->object()).is(rdx));
|
2546
|
+
ASSERT(ToRegister(instr->key()).is(rax));
|
2547
|
+
|
2548
|
+
Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
|
2549
|
+
CallCode(ic, RelocInfo::CODE_TARGET, instr);
|
2550
|
+
}
|
2551
|
+
|
2552
|
+
|
2553
|
+
void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
|
2554
|
+
Register result = ToRegister(instr->result());
|
2555
|
+
|
2556
|
+
// Check for arguments adapter frame.
|
2557
|
+
Label done, adapted;
|
2558
|
+
__ movq(result, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
|
2559
|
+
__ Cmp(Operand(result, StandardFrameConstants::kContextOffset),
|
2560
|
+
Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
|
2561
|
+
__ j(equal, &adapted, Label::kNear);
|
2562
|
+
|
2563
|
+
// No arguments adaptor frame.
|
2564
|
+
__ movq(result, rbp);
|
2565
|
+
__ jmp(&done, Label::kNear);
|
2566
|
+
|
2567
|
+
// Arguments adaptor frame present.
|
2568
|
+
__ bind(&adapted);
|
2569
|
+
__ movq(result, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
|
2570
|
+
|
2571
|
+
// Result is the frame pointer for the frame if not adapted and for the real
|
2572
|
+
// frame below the adaptor frame if adapted.
|
2573
|
+
__ bind(&done);
|
2574
|
+
}
|
2575
|
+
|
2576
|
+
|
2577
|
+
void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
|
2578
|
+
Register result = ToRegister(instr->result());
|
2579
|
+
|
2580
|
+
Label done;
|
2581
|
+
|
2582
|
+
// If no arguments adaptor frame the number of arguments is fixed.
|
2583
|
+
if (instr->InputAt(0)->IsRegister()) {
|
2584
|
+
__ cmpq(rbp, ToRegister(instr->InputAt(0)));
|
2585
|
+
} else {
|
2586
|
+
__ cmpq(rbp, ToOperand(instr->InputAt(0)));
|
2587
|
+
}
|
2588
|
+
__ movl(result, Immediate(scope()->num_parameters()));
|
2589
|
+
__ j(equal, &done, Label::kNear);
|
2590
|
+
|
2591
|
+
// Arguments adaptor frame present. Get argument length from there.
|
2592
|
+
__ movq(result, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
|
2593
|
+
__ SmiToInteger32(result,
|
2594
|
+
Operand(result,
|
2595
|
+
ArgumentsAdaptorFrameConstants::kLengthOffset));
|
2596
|
+
|
2597
|
+
// Argument length is in result register.
|
2598
|
+
__ bind(&done);
|
2599
|
+
}
|
2600
|
+
|
2601
|
+
|
2602
|
+
void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
|
2603
|
+
Register receiver = ToRegister(instr->receiver());
|
2604
|
+
Register function = ToRegister(instr->function());
|
2605
|
+
Register length = ToRegister(instr->length());
|
2606
|
+
Register elements = ToRegister(instr->elements());
|
2607
|
+
ASSERT(receiver.is(rax)); // Used for parameter count.
|
2608
|
+
ASSERT(function.is(rdi)); // Required by InvokeFunction.
|
2609
|
+
ASSERT(ToRegister(instr->result()).is(rax));
|
2610
|
+
|
2611
|
+
// If the receiver is null or undefined, we have to pass the global object
|
2612
|
+
// as a receiver.
|
2613
|
+
Label global_object, receiver_ok;
|
2614
|
+
__ CompareRoot(receiver, Heap::kNullValueRootIndex);
|
2615
|
+
__ j(equal, &global_object, Label::kNear);
|
2616
|
+
__ CompareRoot(receiver, Heap::kUndefinedValueRootIndex);
|
2617
|
+
__ j(equal, &global_object, Label::kNear);
|
2618
|
+
|
2619
|
+
// The receiver should be a JS object.
|
2620
|
+
Condition is_smi = __ CheckSmi(receiver);
|
2621
|
+
DeoptimizeIf(is_smi, instr->environment());
|
2622
|
+
__ CmpObjectType(receiver, FIRST_JS_OBJECT_TYPE, kScratchRegister);
|
2623
|
+
DeoptimizeIf(below, instr->environment());
|
2624
|
+
__ jmp(&receiver_ok, Label::kNear);
|
2625
|
+
|
2626
|
+
__ bind(&global_object);
|
2627
|
+
// TODO(kmillikin): We have a hydrogen value for the global object. See
|
2628
|
+
// if it's better to use it than to explicitly fetch it from the context
|
2629
|
+
// here.
|
2630
|
+
__ movq(receiver, Operand(rbp, StandardFrameConstants::kContextOffset));
|
2631
|
+
__ movq(receiver, ContextOperand(receiver, Context::GLOBAL_INDEX));
|
2632
|
+
__ bind(&receiver_ok);
|
2633
|
+
|
2634
|
+
// Copy the arguments to this function possibly from the
|
2635
|
+
// adaptor frame below it.
|
2636
|
+
const uint32_t kArgumentsLimit = 1 * KB;
|
2637
|
+
__ cmpq(length, Immediate(kArgumentsLimit));
|
2638
|
+
DeoptimizeIf(above, instr->environment());
|
2639
|
+
|
2640
|
+
__ push(receiver);
|
2641
|
+
__ movq(receiver, length);
|
2642
|
+
|
2643
|
+
// Loop through the arguments pushing them onto the execution
|
2644
|
+
// stack.
|
2645
|
+
Label invoke, loop;
|
2646
|
+
// length is a small non-negative integer, due to the test above.
|
2647
|
+
__ testl(length, length);
|
2648
|
+
__ j(zero, &invoke, Label::kNear);
|
2649
|
+
__ bind(&loop);
|
2650
|
+
__ push(Operand(elements, length, times_pointer_size, 1 * kPointerSize));
|
2651
|
+
__ decl(length);
|
2652
|
+
__ j(not_zero, &loop);
|
2653
|
+
|
2654
|
+
// Invoke the function.
|
2655
|
+
__ bind(&invoke);
|
2656
|
+
ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
|
2657
|
+
LPointerMap* pointers = instr->pointer_map();
|
2658
|
+
LEnvironment* env = instr->deoptimization_environment();
|
2659
|
+
RecordPosition(pointers->position());
|
2660
|
+
RegisterEnvironmentForDeoptimization(env);
|
2661
|
+
SafepointGenerator safepoint_generator(this,
|
2662
|
+
pointers,
|
2663
|
+
env->deoptimization_index());
|
2664
|
+
v8::internal::ParameterCount actual(rax);
|
2665
|
+
__ InvokeFunction(function, actual, CALL_FUNCTION, safepoint_generator);
|
2666
|
+
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
|
2667
|
+
}
|
2668
|
+
|
2669
|
+
|
2670
|
+
void LCodeGen::DoPushArgument(LPushArgument* instr) {
|
2671
|
+
LOperand* argument = instr->InputAt(0);
|
2672
|
+
EmitPushTaggedOperand(argument);
|
2673
|
+
}
|
2674
|
+
|
2675
|
+
|
2676
|
+
void LCodeGen::DoContext(LContext* instr) {
|
2677
|
+
Register result = ToRegister(instr->result());
|
2678
|
+
__ movq(result, rsi);
|
2679
|
+
}
|
2680
|
+
|
2681
|
+
|
2682
|
+
void LCodeGen::DoOuterContext(LOuterContext* instr) {
|
2683
|
+
Register context = ToRegister(instr->context());
|
2684
|
+
Register result = ToRegister(instr->result());
|
2685
|
+
__ movq(result,
|
2686
|
+
Operand(context, Context::SlotOffset(Context::CLOSURE_INDEX)));
|
2687
|
+
__ movq(result, FieldOperand(result, JSFunction::kContextOffset));
|
2688
|
+
}
|
2689
|
+
|
2690
|
+
|
2691
|
+
void LCodeGen::DoGlobalObject(LGlobalObject* instr) {
|
2692
|
+
Register result = ToRegister(instr->result());
|
2693
|
+
__ movq(result, GlobalObjectOperand());
|
2694
|
+
}
|
2695
|
+
|
2696
|
+
|
2697
|
+
void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) {
|
2698
|
+
Register global = ToRegister(instr->global());
|
2699
|
+
Register result = ToRegister(instr->result());
|
2700
|
+
__ movq(result, FieldOperand(global, GlobalObject::kGlobalReceiverOffset));
|
2701
|
+
}
|
2702
|
+
|
2703
|
+
|
2704
|
+
void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
|
2705
|
+
int arity,
|
2706
|
+
LInstruction* instr,
|
2707
|
+
CallKind call_kind) {
|
2708
|
+
// Change context if needed.
|
2709
|
+
bool change_context =
|
2710
|
+
(info()->closure()->context() != function->context()) ||
|
2711
|
+
scope()->contains_with() ||
|
2712
|
+
(scope()->num_heap_slots() > 0);
|
2713
|
+
if (change_context) {
|
2714
|
+
__ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
|
2715
|
+
}
|
2716
|
+
|
2717
|
+
// Set rax to arguments count if adaption is not needed. Assumes that rax
|
2718
|
+
// is available to write to at this point.
|
2719
|
+
if (!function->NeedsArgumentsAdaption()) {
|
2720
|
+
__ Set(rax, arity);
|
2721
|
+
}
|
2722
|
+
|
2723
|
+
LPointerMap* pointers = instr->pointer_map();
|
2724
|
+
RecordPosition(pointers->position());
|
2725
|
+
|
2726
|
+
// Invoke function.
|
2727
|
+
__ SetCallKind(rcx, call_kind);
|
2728
|
+
if (*function == *info()->closure()) {
|
2729
|
+
__ CallSelf();
|
2730
|
+
} else {
|
2731
|
+
__ call(FieldOperand(rdi, JSFunction::kCodeEntryOffset));
|
2732
|
+
}
|
2733
|
+
|
2734
|
+
// Setup deoptimization.
|
2735
|
+
RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT, 0);
|
2736
|
+
|
2737
|
+
// Restore context.
|
2738
|
+
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
|
2739
|
+
}
|
2740
|
+
|
2741
|
+
|
2742
|
+
void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
|
2743
|
+
ASSERT(ToRegister(instr->result()).is(rax));
|
2744
|
+
__ Move(rdi, instr->function());
|
2745
|
+
CallKnownFunction(instr->function(),
|
2746
|
+
instr->arity(),
|
2747
|
+
instr,
|
2748
|
+
CALL_AS_METHOD);
|
2749
|
+
}
|
2750
|
+
|
2751
|
+
|
2752
|
+
void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) {
|
2753
|
+
Register input_reg = ToRegister(instr->InputAt(0));
|
2754
|
+
__ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset),
|
2755
|
+
Heap::kHeapNumberMapRootIndex);
|
2756
|
+
DeoptimizeIf(not_equal, instr->environment());
|
2757
|
+
|
2758
|
+
Label done;
|
2759
|
+
Register tmp = input_reg.is(rax) ? rcx : rax;
|
2760
|
+
Register tmp2 = tmp.is(rcx) ? rdx : input_reg.is(rcx) ? rdx : rcx;
|
2761
|
+
|
2762
|
+
// Preserve the value of all registers.
|
2763
|
+
PushSafepointRegistersScope scope(this);
|
2764
|
+
|
2765
|
+
Label negative;
|
2766
|
+
__ movl(tmp, FieldOperand(input_reg, HeapNumber::kExponentOffset));
|
2767
|
+
// Check the sign of the argument. If the argument is positive, just
|
2768
|
+
// return it. We do not need to patch the stack since |input| and
|
2769
|
+
// |result| are the same register and |input| will be restored
|
2770
|
+
// unchanged by popping safepoint registers.
|
2771
|
+
__ testl(tmp, Immediate(HeapNumber::kSignMask));
|
2772
|
+
__ j(not_zero, &negative);
|
2773
|
+
__ jmp(&done);
|
2774
|
+
|
2775
|
+
__ bind(&negative);
|
2776
|
+
|
2777
|
+
Label allocated, slow;
|
2778
|
+
__ AllocateHeapNumber(tmp, tmp2, &slow);
|
2779
|
+
__ jmp(&allocated);
|
2780
|
+
|
2781
|
+
// Slow case: Call the runtime system to do the number allocation.
|
2782
|
+
__ bind(&slow);
|
2783
|
+
|
2784
|
+
CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
|
2785
|
+
// Set the pointer to the new heap number in tmp.
|
2786
|
+
if (!tmp.is(rax)) {
|
2787
|
+
__ movq(tmp, rax);
|
2788
|
+
}
|
2789
|
+
|
2790
|
+
// Restore input_reg after call to runtime.
|
2791
|
+
__ LoadFromSafepointRegisterSlot(input_reg, input_reg);
|
2792
|
+
|
2793
|
+
__ bind(&allocated);
|
2794
|
+
__ movq(tmp2, FieldOperand(input_reg, HeapNumber::kValueOffset));
|
2795
|
+
__ shl(tmp2, Immediate(1));
|
2796
|
+
__ shr(tmp2, Immediate(1));
|
2797
|
+
__ movq(FieldOperand(tmp, HeapNumber::kValueOffset), tmp2);
|
2798
|
+
__ StoreToSafepointRegisterSlot(input_reg, tmp);
|
2799
|
+
|
2800
|
+
__ bind(&done);
|
2801
|
+
}
|
2802
|
+
|
2803
|
+
|
2804
|
+
void LCodeGen::EmitIntegerMathAbs(LUnaryMathOperation* instr) {
|
2805
|
+
Register input_reg = ToRegister(instr->InputAt(0));
|
2806
|
+
__ testl(input_reg, input_reg);
|
2807
|
+
Label is_positive;
|
2808
|
+
__ j(not_sign, &is_positive);
|
2809
|
+
__ negl(input_reg); // Sets flags.
|
2810
|
+
DeoptimizeIf(negative, instr->environment());
|
2811
|
+
__ bind(&is_positive);
|
2812
|
+
}
|
2813
|
+
|
2814
|
+
|
2815
|
+
void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
|
2816
|
+
// Class for deferred case.
|
2817
|
+
class DeferredMathAbsTaggedHeapNumber: public LDeferredCode {
|
2818
|
+
public:
|
2819
|
+
DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen,
|
2820
|
+
LUnaryMathOperation* instr)
|
2821
|
+
: LDeferredCode(codegen), instr_(instr) { }
|
2822
|
+
virtual void Generate() {
|
2823
|
+
codegen()->DoDeferredMathAbsTaggedHeapNumber(instr_);
|
2824
|
+
}
|
2825
|
+
private:
|
2826
|
+
LUnaryMathOperation* instr_;
|
2827
|
+
};
|
2828
|
+
|
2829
|
+
ASSERT(instr->InputAt(0)->Equals(instr->result()));
|
2830
|
+
Representation r = instr->hydrogen()->value()->representation();
|
2831
|
+
|
2832
|
+
if (r.IsDouble()) {
|
2833
|
+
XMMRegister scratch = xmm0;
|
2834
|
+
XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
|
2835
|
+
__ xorps(scratch, scratch);
|
2836
|
+
__ subsd(scratch, input_reg);
|
2837
|
+
__ andpd(input_reg, scratch);
|
2838
|
+
} else if (r.IsInteger32()) {
|
2839
|
+
EmitIntegerMathAbs(instr);
|
2840
|
+
} else { // Tagged case.
|
2841
|
+
DeferredMathAbsTaggedHeapNumber* deferred =
|
2842
|
+
new DeferredMathAbsTaggedHeapNumber(this, instr);
|
2843
|
+
Register input_reg = ToRegister(instr->InputAt(0));
|
2844
|
+
// Smi check.
|
2845
|
+
__ JumpIfNotSmi(input_reg, deferred->entry());
|
2846
|
+
__ SmiToInteger32(input_reg, input_reg);
|
2847
|
+
EmitIntegerMathAbs(instr);
|
2848
|
+
__ Integer32ToSmi(input_reg, input_reg);
|
2849
|
+
__ bind(deferred->exit());
|
2850
|
+
}
|
2851
|
+
}
|
2852
|
+
|
2853
|
+
|
2854
|
+
void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
|
2855
|
+
XMMRegister xmm_scratch = xmm0;
|
2856
|
+
Register output_reg = ToRegister(instr->result());
|
2857
|
+
XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
|
2858
|
+
|
2859
|
+
if (CpuFeatures::IsSupported(SSE4_1)) {
|
2860
|
+
CpuFeatures::Scope scope(SSE4_1);
|
2861
|
+
if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
|
2862
|
+
// Deoptimize if minus zero.
|
2863
|
+
__ movq(output_reg, input_reg);
|
2864
|
+
__ subq(output_reg, Immediate(1));
|
2865
|
+
DeoptimizeIf(overflow, instr->environment());
|
2866
|
+
}
|
2867
|
+
__ roundsd(xmm_scratch, input_reg, Assembler::kRoundDown);
|
2868
|
+
__ cvttsd2si(output_reg, xmm_scratch);
|
2869
|
+
__ cmpl(output_reg, Immediate(0x80000000));
|
2870
|
+
DeoptimizeIf(equal, instr->environment());
|
2871
|
+
} else {
|
2872
|
+
__ xorps(xmm_scratch, xmm_scratch); // Zero the register.
|
2873
|
+
__ ucomisd(input_reg, xmm_scratch);
|
2874
|
+
|
2875
|
+
if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
|
2876
|
+
DeoptimizeIf(below_equal, instr->environment());
|
2877
|
+
} else {
|
2878
|
+
DeoptimizeIf(below, instr->environment());
|
2879
|
+
}
|
2880
|
+
|
2881
|
+
// Use truncating instruction (OK because input is positive).
|
2882
|
+
__ cvttsd2si(output_reg, input_reg);
|
2883
|
+
|
2884
|
+
// Overflow is signalled with minint.
|
2885
|
+
__ cmpl(output_reg, Immediate(0x80000000));
|
2886
|
+
DeoptimizeIf(equal, instr->environment());
|
2887
|
+
}
|
2888
|
+
}
|
2889
|
+
|
2890
|
+
|
2891
|
+
void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
|
2892
|
+
const XMMRegister xmm_scratch = xmm0;
|
2893
|
+
Register output_reg = ToRegister(instr->result());
|
2894
|
+
XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
|
2895
|
+
|
2896
|
+
Label done;
|
2897
|
+
// xmm_scratch = 0.5
|
2898
|
+
__ movq(kScratchRegister, V8_INT64_C(0x3FE0000000000000), RelocInfo::NONE);
|
2899
|
+
__ movq(xmm_scratch, kScratchRegister);
|
2900
|
+
Label below_half;
|
2901
|
+
__ ucomisd(xmm_scratch, input_reg);
|
2902
|
+
// If input_reg is NaN, this doesn't jump.
|
2903
|
+
__ j(above, &below_half, Label::kNear);
|
2904
|
+
// input = input + 0.5
|
2905
|
+
// This addition might give a result that isn't the correct for
|
2906
|
+
// rounding, due to loss of precision, but only for a number that's
|
2907
|
+
// so big that the conversion below will overflow anyway.
|
2908
|
+
__ addsd(input_reg, xmm_scratch);
|
2909
|
+
// Compute Math.floor(input).
|
2910
|
+
// Use truncating instruction (OK because input is positive).
|
2911
|
+
__ cvttsd2si(output_reg, input_reg);
|
2912
|
+
// Overflow is signalled with minint.
|
2913
|
+
__ cmpl(output_reg, Immediate(0x80000000));
|
2914
|
+
DeoptimizeIf(equal, instr->environment());
|
2915
|
+
__ jmp(&done);
|
2916
|
+
|
2917
|
+
__ bind(&below_half);
|
2918
|
+
if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
|
2919
|
+
// Bailout if negative (including -0).
|
2920
|
+
__ movq(output_reg, input_reg);
|
2921
|
+
__ testq(output_reg, output_reg);
|
2922
|
+
DeoptimizeIf(negative, instr->environment());
|
2923
|
+
} else {
|
2924
|
+
// Bailout if below -0.5, otherwise round to (positive) zero, even
|
2925
|
+
// if negative.
|
2926
|
+
// xmm_scrach = -0.5
|
2927
|
+
__ movq(kScratchRegister, V8_INT64_C(0xBFE0000000000000), RelocInfo::NONE);
|
2928
|
+
__ movq(xmm_scratch, kScratchRegister);
|
2929
|
+
__ ucomisd(input_reg, xmm_scratch);
|
2930
|
+
DeoptimizeIf(below, instr->environment());
|
2931
|
+
}
|
2932
|
+
__ xorl(output_reg, output_reg);
|
2933
|
+
|
2934
|
+
__ bind(&done);
|
2935
|
+
}
|
2936
|
+
|
2937
|
+
|
2938
|
+
void LCodeGen::DoMathSqrt(LUnaryMathOperation* instr) {
|
2939
|
+
XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
|
2940
|
+
ASSERT(ToDoubleRegister(instr->result()).is(input_reg));
|
2941
|
+
__ sqrtsd(input_reg, input_reg);
|
2942
|
+
}
|
2943
|
+
|
2944
|
+
|
2945
|
+
void LCodeGen::DoMathPowHalf(LUnaryMathOperation* instr) {
|
2946
|
+
XMMRegister xmm_scratch = xmm0;
|
2947
|
+
XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
|
2948
|
+
ASSERT(ToDoubleRegister(instr->result()).is(input_reg));
|
2949
|
+
__ xorps(xmm_scratch, xmm_scratch);
|
2950
|
+
__ addsd(input_reg, xmm_scratch); // Convert -0 to +0.
|
2951
|
+
__ sqrtsd(input_reg, input_reg);
|
2952
|
+
}
|
2953
|
+
|
2954
|
+
|
2955
|
+
void LCodeGen::DoPower(LPower* instr) {
|
2956
|
+
LOperand* left = instr->InputAt(0);
|
2957
|
+
XMMRegister left_reg = ToDoubleRegister(left);
|
2958
|
+
ASSERT(!left_reg.is(xmm1));
|
2959
|
+
LOperand* right = instr->InputAt(1);
|
2960
|
+
XMMRegister result_reg = ToDoubleRegister(instr->result());
|
2961
|
+
Representation exponent_type = instr->hydrogen()->right()->representation();
|
2962
|
+
if (exponent_type.IsDouble()) {
|
2963
|
+
__ PrepareCallCFunction(2);
|
2964
|
+
// Move arguments to correct registers
|
2965
|
+
__ movaps(xmm0, left_reg);
|
2966
|
+
ASSERT(ToDoubleRegister(right).is(xmm1));
|
2967
|
+
__ CallCFunction(
|
2968
|
+
ExternalReference::power_double_double_function(isolate()), 2);
|
2969
|
+
} else if (exponent_type.IsInteger32()) {
|
2970
|
+
__ PrepareCallCFunction(2);
|
2971
|
+
// Move arguments to correct registers: xmm0 and edi (not rdi).
|
2972
|
+
// On Windows, the registers are xmm0 and edx.
|
2973
|
+
__ movaps(xmm0, left_reg);
|
2974
|
+
#ifdef _WIN64
|
2975
|
+
ASSERT(ToRegister(right).is(rdx));
|
2976
|
+
#else
|
2977
|
+
ASSERT(ToRegister(right).is(rdi));
|
2978
|
+
#endif
|
2979
|
+
__ CallCFunction(
|
2980
|
+
ExternalReference::power_double_int_function(isolate()), 2);
|
2981
|
+
} else {
|
2982
|
+
ASSERT(exponent_type.IsTagged());
|
2983
|
+
Register right_reg = ToRegister(right);
|
2984
|
+
|
2985
|
+
Label non_smi, call;
|
2986
|
+
__ JumpIfNotSmi(right_reg, &non_smi);
|
2987
|
+
__ SmiToInteger32(right_reg, right_reg);
|
2988
|
+
__ cvtlsi2sd(xmm1, right_reg);
|
2989
|
+
__ jmp(&call);
|
2990
|
+
|
2991
|
+
__ bind(&non_smi);
|
2992
|
+
__ CmpObjectType(right_reg, HEAP_NUMBER_TYPE , kScratchRegister);
|
2993
|
+
DeoptimizeIf(not_equal, instr->environment());
|
2994
|
+
__ movsd(xmm1, FieldOperand(right_reg, HeapNumber::kValueOffset));
|
2995
|
+
|
2996
|
+
__ bind(&call);
|
2997
|
+
__ PrepareCallCFunction(2);
|
2998
|
+
// Move arguments to correct registers xmm0 and xmm1.
|
2999
|
+
__ movaps(xmm0, left_reg);
|
3000
|
+
// Right argument is already in xmm1.
|
3001
|
+
__ CallCFunction(
|
3002
|
+
ExternalReference::power_double_double_function(isolate()), 2);
|
3003
|
+
}
|
3004
|
+
// Return value is in xmm0.
|
3005
|
+
__ movaps(result_reg, xmm0);
|
3006
|
+
// Restore context register.
|
3007
|
+
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
|
3008
|
+
}
|
3009
|
+
|
3010
|
+
|
3011
|
+
void LCodeGen::DoMathLog(LUnaryMathOperation* instr) {
|
3012
|
+
ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
|
3013
|
+
TranscendentalCacheStub stub(TranscendentalCache::LOG,
|
3014
|
+
TranscendentalCacheStub::UNTAGGED);
|
3015
|
+
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
3016
|
+
}
|
3017
|
+
|
3018
|
+
|
3019
|
+
void LCodeGen::DoMathCos(LUnaryMathOperation* instr) {
|
3020
|
+
ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
|
3021
|
+
TranscendentalCacheStub stub(TranscendentalCache::COS,
|
3022
|
+
TranscendentalCacheStub::UNTAGGED);
|
3023
|
+
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
3024
|
+
}
|
3025
|
+
|
3026
|
+
|
3027
|
+
void LCodeGen::DoMathSin(LUnaryMathOperation* instr) {
|
3028
|
+
ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
|
3029
|
+
TranscendentalCacheStub stub(TranscendentalCache::SIN,
|
3030
|
+
TranscendentalCacheStub::UNTAGGED);
|
3031
|
+
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
3032
|
+
}
|
3033
|
+
|
3034
|
+
|
3035
|
+
void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) {
|
3036
|
+
switch (instr->op()) {
|
3037
|
+
case kMathAbs:
|
3038
|
+
DoMathAbs(instr);
|
3039
|
+
break;
|
3040
|
+
case kMathFloor:
|
3041
|
+
DoMathFloor(instr);
|
3042
|
+
break;
|
3043
|
+
case kMathRound:
|
3044
|
+
DoMathRound(instr);
|
3045
|
+
break;
|
3046
|
+
case kMathSqrt:
|
3047
|
+
DoMathSqrt(instr);
|
3048
|
+
break;
|
3049
|
+
case kMathPowHalf:
|
3050
|
+
DoMathPowHalf(instr);
|
3051
|
+
break;
|
3052
|
+
case kMathCos:
|
3053
|
+
DoMathCos(instr);
|
3054
|
+
break;
|
3055
|
+
case kMathSin:
|
3056
|
+
DoMathSin(instr);
|
3057
|
+
break;
|
3058
|
+
case kMathLog:
|
3059
|
+
DoMathLog(instr);
|
3060
|
+
break;
|
3061
|
+
|
3062
|
+
default:
|
3063
|
+
UNREACHABLE();
|
3064
|
+
}
|
3065
|
+
}
|
3066
|
+
|
3067
|
+
|
3068
|
+
void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) {
|
3069
|
+
ASSERT(ToRegister(instr->function()).is(rdi));
|
3070
|
+
ASSERT(instr->HasPointerMap());
|
3071
|
+
ASSERT(instr->HasDeoptimizationEnvironment());
|
3072
|
+
LPointerMap* pointers = instr->pointer_map();
|
3073
|
+
LEnvironment* env = instr->deoptimization_environment();
|
3074
|
+
RecordPosition(pointers->position());
|
3075
|
+
RegisterEnvironmentForDeoptimization(env);
|
3076
|
+
SafepointGenerator generator(this, pointers, env->deoptimization_index());
|
3077
|
+
ParameterCount count(instr->arity());
|
3078
|
+
__ InvokeFunction(rdi, count, CALL_FUNCTION, generator);
|
3079
|
+
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
|
3080
|
+
}
|
3081
|
+
|
3082
|
+
|
3083
|
+
void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
|
3084
|
+
ASSERT(ToRegister(instr->key()).is(rcx));
|
3085
|
+
ASSERT(ToRegister(instr->result()).is(rax));
|
3086
|
+
|
3087
|
+
int arity = instr->arity();
|
3088
|
+
Handle<Code> ic = isolate()->stub_cache()->ComputeKeyedCallInitialize(
|
3089
|
+
arity, NOT_IN_LOOP);
|
3090
|
+
CallCode(ic, RelocInfo::CODE_TARGET, instr);
|
3091
|
+
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
|
3092
|
+
}
|
3093
|
+
|
3094
|
+
|
3095
|
+
void LCodeGen::DoCallNamed(LCallNamed* instr) {
|
3096
|
+
ASSERT(ToRegister(instr->result()).is(rax));
|
3097
|
+
|
3098
|
+
int arity = instr->arity();
|
3099
|
+
RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
|
3100
|
+
Handle<Code> ic =
|
3101
|
+
isolate()->stub_cache()->ComputeCallInitialize(arity, NOT_IN_LOOP, mode);
|
3102
|
+
__ Move(rcx, instr->name());
|
3103
|
+
CallCode(ic, mode, instr);
|
3104
|
+
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
|
3105
|
+
}
|
3106
|
+
|
3107
|
+
|
3108
|
+
void LCodeGen::DoCallFunction(LCallFunction* instr) {
|
3109
|
+
ASSERT(ToRegister(instr->result()).is(rax));
|
3110
|
+
|
3111
|
+
int arity = instr->arity();
|
3112
|
+
CallFunctionStub stub(arity, NOT_IN_LOOP, RECEIVER_MIGHT_BE_IMPLICIT);
|
3113
|
+
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
3114
|
+
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
|
3115
|
+
__ Drop(1);
|
3116
|
+
}
|
3117
|
+
|
3118
|
+
|
3119
|
+
void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
|
3120
|
+
ASSERT(ToRegister(instr->result()).is(rax));
|
3121
|
+
int arity = instr->arity();
|
3122
|
+
RelocInfo::Mode mode = RelocInfo::CODE_TARGET_CONTEXT;
|
3123
|
+
Handle<Code> ic =
|
3124
|
+
isolate()->stub_cache()->ComputeCallInitialize(arity, NOT_IN_LOOP, mode);
|
3125
|
+
__ Move(rcx, instr->name());
|
3126
|
+
CallCode(ic, mode, instr);
|
3127
|
+
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
|
3128
|
+
}
|
3129
|
+
|
3130
|
+
|
3131
|
+
void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
|
3132
|
+
ASSERT(ToRegister(instr->result()).is(rax));
|
3133
|
+
__ Move(rdi, instr->target());
|
3134
|
+
CallKnownFunction(instr->target(), instr->arity(), instr, CALL_AS_FUNCTION);
|
3135
|
+
}
|
3136
|
+
|
3137
|
+
|
3138
|
+
void LCodeGen::DoCallNew(LCallNew* instr) {
|
3139
|
+
ASSERT(ToRegister(instr->InputAt(0)).is(rdi));
|
3140
|
+
ASSERT(ToRegister(instr->result()).is(rax));
|
3141
|
+
|
3142
|
+
Handle<Code> builtin = isolate()->builtins()->JSConstructCall();
|
3143
|
+
__ Set(rax, instr->arity());
|
3144
|
+
CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr);
|
3145
|
+
}
|
3146
|
+
|
3147
|
+
|
3148
|
+
void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
|
3149
|
+
CallRuntime(instr->function(), instr->arity(), instr);
|
3150
|
+
}
|
3151
|
+
|
3152
|
+
|
3153
|
+
void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
|
3154
|
+
Register object = ToRegister(instr->object());
|
3155
|
+
Register value = ToRegister(instr->value());
|
3156
|
+
int offset = instr->offset();
|
3157
|
+
|
3158
|
+
if (!instr->transition().is_null()) {
|
3159
|
+
__ Move(FieldOperand(object, HeapObject::kMapOffset), instr->transition());
|
3160
|
+
}
|
3161
|
+
|
3162
|
+
// Do the store.
|
3163
|
+
if (instr->is_in_object()) {
|
3164
|
+
__ movq(FieldOperand(object, offset), value);
|
3165
|
+
if (instr->needs_write_barrier()) {
|
3166
|
+
Register temp = ToRegister(instr->TempAt(0));
|
3167
|
+
// Update the write barrier for the object for in-object properties.
|
3168
|
+
__ RecordWrite(object, offset, value, temp);
|
3169
|
+
}
|
3170
|
+
} else {
|
3171
|
+
Register temp = ToRegister(instr->TempAt(0));
|
3172
|
+
__ movq(temp, FieldOperand(object, JSObject::kPropertiesOffset));
|
3173
|
+
__ movq(FieldOperand(temp, offset), value);
|
3174
|
+
if (instr->needs_write_barrier()) {
|
3175
|
+
// Update the write barrier for the properties array.
|
3176
|
+
// object is used as a scratch register.
|
3177
|
+
__ RecordWrite(temp, offset, value, object);
|
3178
|
+
}
|
3179
|
+
}
|
3180
|
+
}
|
3181
|
+
|
3182
|
+
|
3183
|
+
void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
|
3184
|
+
ASSERT(ToRegister(instr->object()).is(rdx));
|
3185
|
+
ASSERT(ToRegister(instr->value()).is(rax));
|
3186
|
+
|
3187
|
+
__ Move(rcx, instr->hydrogen()->name());
|
3188
|
+
Handle<Code> ic = instr->strict_mode()
|
3189
|
+
? isolate()->builtins()->StoreIC_Initialize_Strict()
|
3190
|
+
: isolate()->builtins()->StoreIC_Initialize();
|
3191
|
+
CallCode(ic, RelocInfo::CODE_TARGET, instr);
|
3192
|
+
}
|
3193
|
+
|
3194
|
+
|
3195
|
+
void LCodeGen::DoStoreKeyedSpecializedArrayElement(
|
3196
|
+
LStoreKeyedSpecializedArrayElement* instr) {
|
3197
|
+
ExternalArrayType array_type = instr->array_type();
|
3198
|
+
Operand operand(BuildExternalArrayOperand(instr->external_pointer(),
|
3199
|
+
instr->key(), array_type));
|
3200
|
+
if (array_type == kExternalFloatArray) {
|
3201
|
+
XMMRegister value(ToDoubleRegister(instr->value()));
|
3202
|
+
__ cvtsd2ss(value, value);
|
3203
|
+
__ movss(operand, value);
|
3204
|
+
} else if (array_type == kExternalDoubleArray) {
|
3205
|
+
__ movsd(operand, ToDoubleRegister(instr->value()));
|
3206
|
+
} else {
|
3207
|
+
Register value(ToRegister(instr->value()));
|
3208
|
+
switch (array_type) {
|
3209
|
+
case kExternalPixelArray:
|
3210
|
+
case kExternalByteArray:
|
3211
|
+
case kExternalUnsignedByteArray:
|
3212
|
+
__ movb(operand, value);
|
3213
|
+
break;
|
3214
|
+
case kExternalShortArray:
|
3215
|
+
case kExternalUnsignedShortArray:
|
3216
|
+
__ movw(operand, value);
|
3217
|
+
break;
|
3218
|
+
case kExternalIntArray:
|
3219
|
+
case kExternalUnsignedIntArray:
|
3220
|
+
__ movl(operand, value);
|
3221
|
+
break;
|
3222
|
+
case kExternalFloatArray:
|
3223
|
+
case kExternalDoubleArray:
|
3224
|
+
UNREACHABLE();
|
3225
|
+
break;
|
3226
|
+
}
|
3227
|
+
}
|
3228
|
+
}
|
3229
|
+
|
3230
|
+
|
3231
|
+
void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
|
3232
|
+
if (instr->length()->IsRegister()) {
|
3233
|
+
__ cmpq(ToRegister(instr->index()), ToRegister(instr->length()));
|
3234
|
+
} else {
|
3235
|
+
__ cmpq(ToRegister(instr->index()), ToOperand(instr->length()));
|
3236
|
+
}
|
3237
|
+
DeoptimizeIf(above_equal, instr->environment());
|
3238
|
+
}
|
3239
|
+
|
3240
|
+
|
3241
|
+
void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
|
3242
|
+
Register value = ToRegister(instr->value());
|
3243
|
+
Register elements = ToRegister(instr->object());
|
3244
|
+
Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg;
|
3245
|
+
|
3246
|
+
// Do the store.
|
3247
|
+
if (instr->key()->IsConstantOperand()) {
|
3248
|
+
ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
|
3249
|
+
LConstantOperand* const_operand = LConstantOperand::cast(instr->key());
|
3250
|
+
int offset =
|
3251
|
+
ToInteger32(const_operand) * kPointerSize + FixedArray::kHeaderSize;
|
3252
|
+
__ movq(FieldOperand(elements, offset), value);
|
3253
|
+
} else {
|
3254
|
+
__ movq(FieldOperand(elements,
|
3255
|
+
key,
|
3256
|
+
times_pointer_size,
|
3257
|
+
FixedArray::kHeaderSize),
|
3258
|
+
value);
|
3259
|
+
}
|
3260
|
+
|
3261
|
+
if (instr->hydrogen()->NeedsWriteBarrier()) {
|
3262
|
+
// Compute address of modified element and store it into key register.
|
3263
|
+
__ lea(key, FieldOperand(elements,
|
3264
|
+
key,
|
3265
|
+
times_pointer_size,
|
3266
|
+
FixedArray::kHeaderSize));
|
3267
|
+
__ RecordWrite(elements, key, value);
|
3268
|
+
}
|
3269
|
+
}
|
3270
|
+
|
3271
|
+
|
3272
|
+
void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
|
3273
|
+
ASSERT(ToRegister(instr->object()).is(rdx));
|
3274
|
+
ASSERT(ToRegister(instr->key()).is(rcx));
|
3275
|
+
ASSERT(ToRegister(instr->value()).is(rax));
|
3276
|
+
|
3277
|
+
Handle<Code> ic = instr->strict_mode()
|
3278
|
+
? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
|
3279
|
+
: isolate()->builtins()->KeyedStoreIC_Initialize();
|
3280
|
+
CallCode(ic, RelocInfo::CODE_TARGET, instr);
|
3281
|
+
}
|
3282
|
+
|
3283
|
+
|
3284
|
+
void LCodeGen::DoStringAdd(LStringAdd* instr) {
|
3285
|
+
EmitPushTaggedOperand(instr->left());
|
3286
|
+
EmitPushTaggedOperand(instr->right());
|
3287
|
+
StringAddStub stub(NO_STRING_CHECK_IN_STUB);
|
3288
|
+
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
3289
|
+
}
|
3290
|
+
|
3291
|
+
|
3292
|
+
void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
|
3293
|
+
class DeferredStringCharCodeAt: public LDeferredCode {
|
3294
|
+
public:
|
3295
|
+
DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr)
|
3296
|
+
: LDeferredCode(codegen), instr_(instr) { }
|
3297
|
+
virtual void Generate() { codegen()->DoDeferredStringCharCodeAt(instr_); }
|
3298
|
+
private:
|
3299
|
+
LStringCharCodeAt* instr_;
|
3300
|
+
};
|
3301
|
+
|
3302
|
+
Register string = ToRegister(instr->string());
|
3303
|
+
Register index = no_reg;
|
3304
|
+
int const_index = -1;
|
3305
|
+
if (instr->index()->IsConstantOperand()) {
|
3306
|
+
const_index = ToInteger32(LConstantOperand::cast(instr->index()));
|
3307
|
+
STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue);
|
3308
|
+
if (!Smi::IsValid(const_index)) {
|
3309
|
+
// Guaranteed to be out of bounds because of the assert above.
|
3310
|
+
// So the bounds check that must dominate this instruction must
|
3311
|
+
// have deoptimized already.
|
3312
|
+
if (FLAG_debug_code) {
|
3313
|
+
__ Abort("StringCharCodeAt: out of bounds index.");
|
3314
|
+
}
|
3315
|
+
// No code needs to be generated.
|
3316
|
+
return;
|
3317
|
+
}
|
3318
|
+
} else {
|
3319
|
+
index = ToRegister(instr->index());
|
3320
|
+
}
|
3321
|
+
Register result = ToRegister(instr->result());
|
3322
|
+
|
3323
|
+
DeferredStringCharCodeAt* deferred =
|
3324
|
+
new DeferredStringCharCodeAt(this, instr);
|
3325
|
+
|
3326
|
+
Label flat_string, ascii_string, done;
|
3327
|
+
|
3328
|
+
// Fetch the instance type of the receiver into result register.
|
3329
|
+
__ movq(result, FieldOperand(string, HeapObject::kMapOffset));
|
3330
|
+
__ movzxbl(result, FieldOperand(result, Map::kInstanceTypeOffset));
|
3331
|
+
|
3332
|
+
// We need special handling for non-sequential strings.
|
3333
|
+
STATIC_ASSERT(kSeqStringTag == 0);
|
3334
|
+
__ testb(result, Immediate(kStringRepresentationMask));
|
3335
|
+
__ j(zero, &flat_string, Label::kNear);
|
3336
|
+
|
3337
|
+
// Handle cons strings and go to deferred code for the rest.
|
3338
|
+
__ testb(result, Immediate(kIsConsStringMask));
|
3339
|
+
__ j(zero, deferred->entry());
|
3340
|
+
|
3341
|
+
// ConsString.
|
3342
|
+
// Check whether the right hand side is the empty string (i.e. if
|
3343
|
+
// this is really a flat string in a cons string). If that is not
|
3344
|
+
// the case we would rather go to the runtime system now to flatten
|
3345
|
+
// the string.
|
3346
|
+
__ CompareRoot(FieldOperand(string, ConsString::kSecondOffset),
|
3347
|
+
Heap::kEmptyStringRootIndex);
|
3348
|
+
__ j(not_equal, deferred->entry());
|
3349
|
+
// Get the first of the two strings and load its instance type.
|
3350
|
+
__ movq(string, FieldOperand(string, ConsString::kFirstOffset));
|
3351
|
+
__ movq(result, FieldOperand(string, HeapObject::kMapOffset));
|
3352
|
+
__ movzxbl(result, FieldOperand(result, Map::kInstanceTypeOffset));
|
3353
|
+
// If the first cons component is also non-flat, then go to runtime.
|
3354
|
+
STATIC_ASSERT(kSeqStringTag == 0);
|
3355
|
+
__ testb(result, Immediate(kStringRepresentationMask));
|
3356
|
+
__ j(not_zero, deferred->entry());
|
3357
|
+
|
3358
|
+
// Check for ASCII or two-byte string.
|
3359
|
+
__ bind(&flat_string);
|
3360
|
+
STATIC_ASSERT(kAsciiStringTag != 0);
|
3361
|
+
__ testb(result, Immediate(kStringEncodingMask));
|
3362
|
+
__ j(not_zero, &ascii_string, Label::kNear);
|
3363
|
+
|
3364
|
+
// Two-byte string.
|
3365
|
+
// Load the two-byte character code into the result register.
|
3366
|
+
STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
|
3367
|
+
if (instr->index()->IsConstantOperand()) {
|
3368
|
+
__ movzxwl(result,
|
3369
|
+
FieldOperand(string,
|
3370
|
+
SeqTwoByteString::kHeaderSize +
|
3371
|
+
(kUC16Size * const_index)));
|
3372
|
+
} else {
|
3373
|
+
__ movzxwl(result, FieldOperand(string,
|
3374
|
+
index,
|
3375
|
+
times_2,
|
3376
|
+
SeqTwoByteString::kHeaderSize));
|
3377
|
+
}
|
3378
|
+
__ jmp(&done, Label::kNear);
|
3379
|
+
|
3380
|
+
// ASCII string.
|
3381
|
+
// Load the byte into the result register.
|
3382
|
+
__ bind(&ascii_string);
|
3383
|
+
if (instr->index()->IsConstantOperand()) {
|
3384
|
+
__ movzxbl(result, FieldOperand(string,
|
3385
|
+
SeqAsciiString::kHeaderSize + const_index));
|
3386
|
+
} else {
|
3387
|
+
__ movzxbl(result, FieldOperand(string,
|
3388
|
+
index,
|
3389
|
+
times_1,
|
3390
|
+
SeqAsciiString::kHeaderSize));
|
3391
|
+
}
|
3392
|
+
__ bind(&done);
|
3393
|
+
__ bind(deferred->exit());
|
3394
|
+
}
|
3395
|
+
|
3396
|
+
|
3397
|
+
void LCodeGen::DoDeferredStringCharCodeAt(LStringCharCodeAt* instr) {
|
3398
|
+
Register string = ToRegister(instr->string());
|
3399
|
+
Register result = ToRegister(instr->result());
|
3400
|
+
|
3401
|
+
// TODO(3095996): Get rid of this. For now, we need to make the
|
3402
|
+
// result register contain a valid pointer because it is already
|
3403
|
+
// contained in the register pointer map.
|
3404
|
+
__ Set(result, 0);
|
3405
|
+
|
3406
|
+
PushSafepointRegistersScope scope(this);
|
3407
|
+
__ push(string);
|
3408
|
+
// Push the index as a smi. This is safe because of the checks in
|
3409
|
+
// DoStringCharCodeAt above.
|
3410
|
+
STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue);
|
3411
|
+
if (instr->index()->IsConstantOperand()) {
|
3412
|
+
int const_index = ToInteger32(LConstantOperand::cast(instr->index()));
|
3413
|
+
__ Push(Smi::FromInt(const_index));
|
3414
|
+
} else {
|
3415
|
+
Register index = ToRegister(instr->index());
|
3416
|
+
__ Integer32ToSmi(index, index);
|
3417
|
+
__ push(index);
|
3418
|
+
}
|
3419
|
+
CallRuntimeFromDeferred(Runtime::kStringCharCodeAt, 2, instr);
|
3420
|
+
if (FLAG_debug_code) {
|
3421
|
+
__ AbortIfNotSmi(rax);
|
3422
|
+
}
|
3423
|
+
__ SmiToInteger32(rax, rax);
|
3424
|
+
__ StoreToSafepointRegisterSlot(result, rax);
|
3425
|
+
}
|
3426
|
+
|
3427
|
+
|
3428
|
+
void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) {
|
3429
|
+
class DeferredStringCharFromCode: public LDeferredCode {
|
3430
|
+
public:
|
3431
|
+
DeferredStringCharFromCode(LCodeGen* codegen, LStringCharFromCode* instr)
|
3432
|
+
: LDeferredCode(codegen), instr_(instr) { }
|
3433
|
+
virtual void Generate() { codegen()->DoDeferredStringCharFromCode(instr_); }
|
3434
|
+
private:
|
3435
|
+
LStringCharFromCode* instr_;
|
3436
|
+
};
|
3437
|
+
|
3438
|
+
DeferredStringCharFromCode* deferred =
|
3439
|
+
new DeferredStringCharFromCode(this, instr);
|
3440
|
+
|
3441
|
+
ASSERT(instr->hydrogen()->value()->representation().IsInteger32());
|
3442
|
+
Register char_code = ToRegister(instr->char_code());
|
3443
|
+
Register result = ToRegister(instr->result());
|
3444
|
+
ASSERT(!char_code.is(result));
|
3445
|
+
|
3446
|
+
__ cmpl(char_code, Immediate(String::kMaxAsciiCharCode));
|
3447
|
+
__ j(above, deferred->entry());
|
3448
|
+
__ LoadRoot(result, Heap::kSingleCharacterStringCacheRootIndex);
|
3449
|
+
__ movq(result, FieldOperand(result,
|
3450
|
+
char_code, times_pointer_size,
|
3451
|
+
FixedArray::kHeaderSize));
|
3452
|
+
__ CompareRoot(result, Heap::kUndefinedValueRootIndex);
|
3453
|
+
__ j(equal, deferred->entry());
|
3454
|
+
__ bind(deferred->exit());
|
3455
|
+
}
|
3456
|
+
|
3457
|
+
|
3458
|
+
void LCodeGen::DoDeferredStringCharFromCode(LStringCharFromCode* instr) {
|
3459
|
+
Register char_code = ToRegister(instr->char_code());
|
3460
|
+
Register result = ToRegister(instr->result());
|
3461
|
+
|
3462
|
+
// TODO(3095996): Get rid of this. For now, we need to make the
|
3463
|
+
// result register contain a valid pointer because it is already
|
3464
|
+
// contained in the register pointer map.
|
3465
|
+
__ Set(result, 0);
|
3466
|
+
|
3467
|
+
PushSafepointRegistersScope scope(this);
|
3468
|
+
__ Integer32ToSmi(char_code, char_code);
|
3469
|
+
__ push(char_code);
|
3470
|
+
CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr);
|
3471
|
+
__ StoreToSafepointRegisterSlot(result, rax);
|
3472
|
+
}
|
3473
|
+
|
3474
|
+
|
3475
|
+
void LCodeGen::DoStringLength(LStringLength* instr) {
|
3476
|
+
Register string = ToRegister(instr->string());
|
3477
|
+
Register result = ToRegister(instr->result());
|
3478
|
+
__ movq(result, FieldOperand(string, String::kLengthOffset));
|
3479
|
+
}
|
3480
|
+
|
3481
|
+
|
3482
|
+
void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
|
3483
|
+
LOperand* input = instr->InputAt(0);
|
3484
|
+
ASSERT(input->IsRegister() || input->IsStackSlot());
|
3485
|
+
LOperand* output = instr->result();
|
3486
|
+
ASSERT(output->IsDoubleRegister());
|
3487
|
+
if (input->IsRegister()) {
|
3488
|
+
__ cvtlsi2sd(ToDoubleRegister(output), ToRegister(input));
|
3489
|
+
} else {
|
3490
|
+
__ cvtlsi2sd(ToDoubleRegister(output), ToOperand(input));
|
3491
|
+
}
|
3492
|
+
}
|
3493
|
+
|
3494
|
+
|
3495
|
+
void LCodeGen::DoNumberTagI(LNumberTagI* instr) {
|
3496
|
+
LOperand* input = instr->InputAt(0);
|
3497
|
+
ASSERT(input->IsRegister() && input->Equals(instr->result()));
|
3498
|
+
Register reg = ToRegister(input);
|
3499
|
+
|
3500
|
+
__ Integer32ToSmi(reg, reg);
|
3501
|
+
}
|
3502
|
+
|
3503
|
+
|
3504
|
+
void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
|
3505
|
+
class DeferredNumberTagD: public LDeferredCode {
|
3506
|
+
public:
|
3507
|
+
DeferredNumberTagD(LCodeGen* codegen, LNumberTagD* instr)
|
3508
|
+
: LDeferredCode(codegen), instr_(instr) { }
|
3509
|
+
virtual void Generate() { codegen()->DoDeferredNumberTagD(instr_); }
|
3510
|
+
private:
|
3511
|
+
LNumberTagD* instr_;
|
3512
|
+
};
|
3513
|
+
|
3514
|
+
XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
|
3515
|
+
Register reg = ToRegister(instr->result());
|
3516
|
+
Register tmp = ToRegister(instr->TempAt(0));
|
3517
|
+
|
3518
|
+
DeferredNumberTagD* deferred = new DeferredNumberTagD(this, instr);
|
3519
|
+
if (FLAG_inline_new) {
|
3520
|
+
__ AllocateHeapNumber(reg, tmp, deferred->entry());
|
3521
|
+
} else {
|
3522
|
+
__ jmp(deferred->entry());
|
3523
|
+
}
|
3524
|
+
__ bind(deferred->exit());
|
3525
|
+
__ movsd(FieldOperand(reg, HeapNumber::kValueOffset), input_reg);
|
3526
|
+
}
|
3527
|
+
|
3528
|
+
|
3529
|
+
void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) {
|
3530
|
+
// TODO(3095996): Get rid of this. For now, we need to make the
|
3531
|
+
// result register contain a valid pointer because it is already
|
3532
|
+
// contained in the register pointer map.
|
3533
|
+
Register reg = ToRegister(instr->result());
|
3534
|
+
__ Move(reg, Smi::FromInt(0));
|
3535
|
+
|
3536
|
+
{
|
3537
|
+
PushSafepointRegistersScope scope(this);
|
3538
|
+
CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
|
3539
|
+
// Ensure that value in rax survives popping registers.
|
3540
|
+
__ movq(kScratchRegister, rax);
|
3541
|
+
}
|
3542
|
+
__ movq(reg, kScratchRegister);
|
3543
|
+
}
|
3544
|
+
|
3545
|
+
|
3546
|
+
void LCodeGen::DoSmiTag(LSmiTag* instr) {
|
3547
|
+
ASSERT(instr->InputAt(0)->Equals(instr->result()));
|
3548
|
+
Register input = ToRegister(instr->InputAt(0));
|
3549
|
+
ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow));
|
3550
|
+
__ Integer32ToSmi(input, input);
|
3551
|
+
}
|
3552
|
+
|
3553
|
+
|
3554
|
+
void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
|
3555
|
+
ASSERT(instr->InputAt(0)->Equals(instr->result()));
|
3556
|
+
Register input = ToRegister(instr->InputAt(0));
|
3557
|
+
if (instr->needs_check()) {
|
3558
|
+
Condition is_smi = __ CheckSmi(input);
|
3559
|
+
DeoptimizeIf(NegateCondition(is_smi), instr->environment());
|
3560
|
+
}
|
3561
|
+
__ SmiToInteger32(input, input);
|
3562
|
+
}
|
3563
|
+
|
3564
|
+
|
3565
|
+
void LCodeGen::EmitNumberUntagD(Register input_reg,
|
3566
|
+
XMMRegister result_reg,
|
3567
|
+
LEnvironment* env) {
|
3568
|
+
Label load_smi, heap_number, done;
|
3569
|
+
|
3570
|
+
// Smi check.
|
3571
|
+
__ JumpIfSmi(input_reg, &load_smi, Label::kNear);
|
3572
|
+
|
3573
|
+
// Heap number map check.
|
3574
|
+
__ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset),
|
3575
|
+
Heap::kHeapNumberMapRootIndex);
|
3576
|
+
__ j(equal, &heap_number, Label::kNear);
|
3577
|
+
|
3578
|
+
__ CompareRoot(input_reg, Heap::kUndefinedValueRootIndex);
|
3579
|
+
DeoptimizeIf(not_equal, env);
|
3580
|
+
|
3581
|
+
// Convert undefined to NaN. Compute NaN as 0/0.
|
3582
|
+
__ xorps(result_reg, result_reg);
|
3583
|
+
__ divsd(result_reg, result_reg);
|
3584
|
+
__ jmp(&done, Label::kNear);
|
3585
|
+
|
3586
|
+
// Heap number to XMM conversion.
|
3587
|
+
__ bind(&heap_number);
|
3588
|
+
__ movsd(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset));
|
3589
|
+
__ jmp(&done, Label::kNear);
|
3590
|
+
|
3591
|
+
// Smi to XMM conversion
|
3592
|
+
__ bind(&load_smi);
|
3593
|
+
__ SmiToInteger32(kScratchRegister, input_reg);
|
3594
|
+
__ cvtlsi2sd(result_reg, kScratchRegister);
|
3595
|
+
__ bind(&done);
|
3596
|
+
}
|
3597
|
+
|
3598
|
+
|
3599
|
+
class DeferredTaggedToI: public LDeferredCode {
|
3600
|
+
public:
|
3601
|
+
DeferredTaggedToI(LCodeGen* codegen, LTaggedToI* instr)
|
3602
|
+
: LDeferredCode(codegen), instr_(instr) { }
|
3603
|
+
virtual void Generate() { codegen()->DoDeferredTaggedToI(instr_); }
|
3604
|
+
private:
|
3605
|
+
LTaggedToI* instr_;
|
3606
|
+
};
|
3607
|
+
|
3608
|
+
|
3609
|
+
void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
|
3610
|
+
Label done, heap_number;
|
3611
|
+
Register input_reg = ToRegister(instr->InputAt(0));
|
3612
|
+
|
3613
|
+
// Heap number map check.
|
3614
|
+
__ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset),
|
3615
|
+
Heap::kHeapNumberMapRootIndex);
|
3616
|
+
|
3617
|
+
if (instr->truncating()) {
|
3618
|
+
__ j(equal, &heap_number, Label::kNear);
|
3619
|
+
// Check for undefined. Undefined is converted to zero for truncating
|
3620
|
+
// conversions.
|
3621
|
+
__ CompareRoot(input_reg, Heap::kUndefinedValueRootIndex);
|
3622
|
+
DeoptimizeIf(not_equal, instr->environment());
|
3623
|
+
__ Set(input_reg, 0);
|
3624
|
+
__ jmp(&done, Label::kNear);
|
3625
|
+
|
3626
|
+
__ bind(&heap_number);
|
3627
|
+
|
3628
|
+
__ movsd(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
|
3629
|
+
__ cvttsd2siq(input_reg, xmm0);
|
3630
|
+
__ Set(kScratchRegister, V8_UINT64_C(0x8000000000000000));
|
3631
|
+
__ cmpq(input_reg, kScratchRegister);
|
3632
|
+
DeoptimizeIf(equal, instr->environment());
|
3633
|
+
} else {
|
3634
|
+
// Deoptimize if we don't have a heap number.
|
3635
|
+
DeoptimizeIf(not_equal, instr->environment());
|
3636
|
+
|
3637
|
+
XMMRegister xmm_temp = ToDoubleRegister(instr->TempAt(0));
|
3638
|
+
__ movsd(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
|
3639
|
+
__ cvttsd2si(input_reg, xmm0);
|
3640
|
+
__ cvtlsi2sd(xmm_temp, input_reg);
|
3641
|
+
__ ucomisd(xmm0, xmm_temp);
|
3642
|
+
DeoptimizeIf(not_equal, instr->environment());
|
3643
|
+
DeoptimizeIf(parity_even, instr->environment()); // NaN.
|
3644
|
+
if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
|
3645
|
+
__ testl(input_reg, input_reg);
|
3646
|
+
__ j(not_zero, &done);
|
3647
|
+
__ movmskpd(input_reg, xmm0);
|
3648
|
+
__ andl(input_reg, Immediate(1));
|
3649
|
+
DeoptimizeIf(not_zero, instr->environment());
|
3650
|
+
}
|
3651
|
+
}
|
3652
|
+
__ bind(&done);
|
3653
|
+
}
|
3654
|
+
|
3655
|
+
|
3656
|
+
void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
|
3657
|
+
LOperand* input = instr->InputAt(0);
|
3658
|
+
ASSERT(input->IsRegister());
|
3659
|
+
ASSERT(input->Equals(instr->result()));
|
3660
|
+
|
3661
|
+
Register input_reg = ToRegister(input);
|
3662
|
+
DeferredTaggedToI* deferred = new DeferredTaggedToI(this, instr);
|
3663
|
+
__ JumpIfNotSmi(input_reg, deferred->entry());
|
3664
|
+
__ SmiToInteger32(input_reg, input_reg);
|
3665
|
+
__ bind(deferred->exit());
|
3666
|
+
}
|
3667
|
+
|
3668
|
+
|
3669
|
+
void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
|
3670
|
+
LOperand* input = instr->InputAt(0);
|
3671
|
+
ASSERT(input->IsRegister());
|
3672
|
+
LOperand* result = instr->result();
|
3673
|
+
ASSERT(result->IsDoubleRegister());
|
3674
|
+
|
3675
|
+
Register input_reg = ToRegister(input);
|
3676
|
+
XMMRegister result_reg = ToDoubleRegister(result);
|
3677
|
+
|
3678
|
+
EmitNumberUntagD(input_reg, result_reg, instr->environment());
|
3679
|
+
}
|
3680
|
+
|
3681
|
+
|
3682
|
+
void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
|
3683
|
+
LOperand* input = instr->InputAt(0);
|
3684
|
+
ASSERT(input->IsDoubleRegister());
|
3685
|
+
LOperand* result = instr->result();
|
3686
|
+
ASSERT(result->IsRegister());
|
3687
|
+
|
3688
|
+
XMMRegister input_reg = ToDoubleRegister(input);
|
3689
|
+
Register result_reg = ToRegister(result);
|
3690
|
+
|
3691
|
+
if (instr->truncating()) {
|
3692
|
+
// Performs a truncating conversion of a floating point number as used by
|
3693
|
+
// the JS bitwise operations.
|
3694
|
+
__ cvttsd2siq(result_reg, input_reg);
|
3695
|
+
__ movq(kScratchRegister, V8_INT64_C(0x8000000000000000), RelocInfo::NONE);
|
3696
|
+
__ cmpq(result_reg, kScratchRegister);
|
3697
|
+
DeoptimizeIf(equal, instr->environment());
|
3698
|
+
} else {
|
3699
|
+
__ cvttsd2si(result_reg, input_reg);
|
3700
|
+
__ cvtlsi2sd(xmm0, result_reg);
|
3701
|
+
__ ucomisd(xmm0, input_reg);
|
3702
|
+
DeoptimizeIf(not_equal, instr->environment());
|
3703
|
+
DeoptimizeIf(parity_even, instr->environment()); // NaN.
|
3704
|
+
if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
|
3705
|
+
Label done;
|
3706
|
+
// The integer converted back is equal to the original. We
|
3707
|
+
// only have to test if we got -0 as an input.
|
3708
|
+
__ testl(result_reg, result_reg);
|
3709
|
+
__ j(not_zero, &done, Label::kNear);
|
3710
|
+
__ movmskpd(result_reg, input_reg);
|
3711
|
+
// Bit 0 contains the sign of the double in input_reg.
|
3712
|
+
// If input was positive, we are ok and return 0, otherwise
|
3713
|
+
// deoptimize.
|
3714
|
+
__ andl(result_reg, Immediate(1));
|
3715
|
+
DeoptimizeIf(not_zero, instr->environment());
|
3716
|
+
__ bind(&done);
|
3717
|
+
}
|
3718
|
+
}
|
3719
|
+
}
|
3720
|
+
|
3721
|
+
|
3722
|
+
void LCodeGen::DoCheckSmi(LCheckSmi* instr) {
|
3723
|
+
LOperand* input = instr->InputAt(0);
|
3724
|
+
Condition cc = masm()->CheckSmi(ToRegister(input));
|
3725
|
+
DeoptimizeIf(NegateCondition(cc), instr->environment());
|
3726
|
+
}
|
3727
|
+
|
3728
|
+
|
3729
|
+
void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) {
|
3730
|
+
LOperand* input = instr->InputAt(0);
|
3731
|
+
Condition cc = masm()->CheckSmi(ToRegister(input));
|
3732
|
+
DeoptimizeIf(cc, instr->environment());
|
3733
|
+
}
|
3734
|
+
|
3735
|
+
|
3736
|
+
void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
|
3737
|
+
Register input = ToRegister(instr->InputAt(0));
|
3738
|
+
|
3739
|
+
__ movq(kScratchRegister, FieldOperand(input, HeapObject::kMapOffset));
|
3740
|
+
|
3741
|
+
if (instr->hydrogen()->is_interval_check()) {
|
3742
|
+
InstanceType first;
|
3743
|
+
InstanceType last;
|
3744
|
+
instr->hydrogen()->GetCheckInterval(&first, &last);
|
3745
|
+
|
3746
|
+
__ cmpb(FieldOperand(kScratchRegister, Map::kInstanceTypeOffset),
|
3747
|
+
Immediate(static_cast<int8_t>(first)));
|
3748
|
+
|
3749
|
+
// If there is only one type in the interval check for equality.
|
3750
|
+
if (first == last) {
|
3751
|
+
DeoptimizeIf(not_equal, instr->environment());
|
3752
|
+
} else {
|
3753
|
+
DeoptimizeIf(below, instr->environment());
|
3754
|
+
// Omit check for the last type.
|
3755
|
+
if (last != LAST_TYPE) {
|
3756
|
+
__ cmpb(FieldOperand(kScratchRegister, Map::kInstanceTypeOffset),
|
3757
|
+
Immediate(static_cast<int8_t>(last)));
|
3758
|
+
DeoptimizeIf(above, instr->environment());
|
3759
|
+
}
|
3760
|
+
}
|
3761
|
+
} else {
|
3762
|
+
uint8_t mask;
|
3763
|
+
uint8_t tag;
|
3764
|
+
instr->hydrogen()->GetCheckMaskAndTag(&mask, &tag);
|
3765
|
+
|
3766
|
+
if (IsPowerOf2(mask)) {
|
3767
|
+
ASSERT(tag == 0 || IsPowerOf2(tag));
|
3768
|
+
__ testb(FieldOperand(kScratchRegister, Map::kInstanceTypeOffset),
|
3769
|
+
Immediate(mask));
|
3770
|
+
DeoptimizeIf(tag == 0 ? not_zero : zero, instr->environment());
|
3771
|
+
} else {
|
3772
|
+
__ movzxbl(kScratchRegister,
|
3773
|
+
FieldOperand(kScratchRegister, Map::kInstanceTypeOffset));
|
3774
|
+
__ andb(kScratchRegister, Immediate(mask));
|
3775
|
+
__ cmpb(kScratchRegister, Immediate(tag));
|
3776
|
+
DeoptimizeIf(not_equal, instr->environment());
|
3777
|
+
}
|
3778
|
+
}
|
3779
|
+
}
|
3780
|
+
|
3781
|
+
|
3782
|
+
void LCodeGen::DoCheckFunction(LCheckFunction* instr) {
|
3783
|
+
ASSERT(instr->InputAt(0)->IsRegister());
|
3784
|
+
Register reg = ToRegister(instr->InputAt(0));
|
3785
|
+
__ Cmp(reg, instr->hydrogen()->target());
|
3786
|
+
DeoptimizeIf(not_equal, instr->environment());
|
3787
|
+
}
|
3788
|
+
|
3789
|
+
|
3790
|
+
void LCodeGen::DoCheckMap(LCheckMap* instr) {
|
3791
|
+
LOperand* input = instr->InputAt(0);
|
3792
|
+
ASSERT(input->IsRegister());
|
3793
|
+
Register reg = ToRegister(input);
|
3794
|
+
__ Cmp(FieldOperand(reg, HeapObject::kMapOffset),
|
3795
|
+
instr->hydrogen()->map());
|
3796
|
+
DeoptimizeIf(not_equal, instr->environment());
|
3797
|
+
}
|
3798
|
+
|
3799
|
+
|
3800
|
+
void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) {
|
3801
|
+
XMMRegister value_reg = ToDoubleRegister(instr->unclamped());
|
3802
|
+
Register result_reg = ToRegister(instr->result());
|
3803
|
+
Register temp_reg = ToRegister(instr->TempAt(0));
|
3804
|
+
__ ClampDoubleToUint8(value_reg, xmm0, result_reg, temp_reg);
|
3805
|
+
}
|
3806
|
+
|
3807
|
+
|
3808
|
+
void LCodeGen::DoClampIToUint8(LClampIToUint8* instr) {
|
3809
|
+
ASSERT(instr->unclamped()->Equals(instr->result()));
|
3810
|
+
Register value_reg = ToRegister(instr->result());
|
3811
|
+
__ ClampUint8(value_reg);
|
3812
|
+
}
|
3813
|
+
|
3814
|
+
|
3815
|
+
void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) {
|
3816
|
+
ASSERT(instr->unclamped()->Equals(instr->result()));
|
3817
|
+
Register input_reg = ToRegister(instr->unclamped());
|
3818
|
+
Register temp_reg = ToRegister(instr->TempAt(0));
|
3819
|
+
XMMRegister temp_xmm_reg = ToDoubleRegister(instr->TempAt(1));
|
3820
|
+
Label is_smi, done, heap_number;
|
3821
|
+
|
3822
|
+
__ JumpIfSmi(input_reg, &is_smi);
|
3823
|
+
|
3824
|
+
// Check for heap number
|
3825
|
+
__ Cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
|
3826
|
+
factory()->heap_number_map());
|
3827
|
+
__ j(equal, &heap_number, Label::kNear);
|
3828
|
+
|
3829
|
+
// Check for undefined. Undefined is converted to zero for clamping
|
3830
|
+
// conversions.
|
3831
|
+
__ Cmp(input_reg, factory()->undefined_value());
|
3832
|
+
DeoptimizeIf(not_equal, instr->environment());
|
3833
|
+
__ movq(input_reg, Immediate(0));
|
3834
|
+
__ jmp(&done, Label::kNear);
|
3835
|
+
|
3836
|
+
// Heap number
|
3837
|
+
__ bind(&heap_number);
|
3838
|
+
__ movsd(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
|
3839
|
+
__ ClampDoubleToUint8(xmm0, temp_xmm_reg, input_reg, temp_reg);
|
3840
|
+
__ jmp(&done, Label::kNear);
|
3841
|
+
|
3842
|
+
// smi
|
3843
|
+
__ bind(&is_smi);
|
3844
|
+
__ SmiToInteger32(input_reg, input_reg);
|
3845
|
+
__ ClampUint8(input_reg);
|
3846
|
+
|
3847
|
+
__ bind(&done);
|
3848
|
+
}
|
3849
|
+
|
3850
|
+
|
3851
|
+
void LCodeGen::LoadHeapObject(Register result, Handle<HeapObject> object) {
|
3852
|
+
if (heap()->InNewSpace(*object)) {
|
3853
|
+
Handle<JSGlobalPropertyCell> cell =
|
3854
|
+
factory()->NewJSGlobalPropertyCell(object);
|
3855
|
+
__ movq(result, cell, RelocInfo::GLOBAL_PROPERTY_CELL);
|
3856
|
+
__ movq(result, Operand(result, 0));
|
3857
|
+
} else {
|
3858
|
+
__ Move(result, object);
|
3859
|
+
}
|
3860
|
+
}
|
3861
|
+
|
3862
|
+
|
3863
|
+
void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
|
3864
|
+
Register reg = ToRegister(instr->TempAt(0));
|
3865
|
+
|
3866
|
+
Handle<JSObject> holder = instr->holder();
|
3867
|
+
Handle<JSObject> current_prototype = instr->prototype();
|
3868
|
+
|
3869
|
+
// Load prototype object.
|
3870
|
+
LoadHeapObject(reg, current_prototype);
|
3871
|
+
|
3872
|
+
// Check prototype maps up to the holder.
|
3873
|
+
while (!current_prototype.is_identical_to(holder)) {
|
3874
|
+
__ Cmp(FieldOperand(reg, HeapObject::kMapOffset),
|
3875
|
+
Handle<Map>(current_prototype->map()));
|
3876
|
+
DeoptimizeIf(not_equal, instr->environment());
|
3877
|
+
current_prototype =
|
3878
|
+
Handle<JSObject>(JSObject::cast(current_prototype->GetPrototype()));
|
3879
|
+
// Load next prototype object.
|
3880
|
+
LoadHeapObject(reg, current_prototype);
|
3881
|
+
}
|
3882
|
+
|
3883
|
+
// Check the holder map.
|
3884
|
+
__ Cmp(FieldOperand(reg, HeapObject::kMapOffset),
|
3885
|
+
Handle<Map>(current_prototype->map()));
|
3886
|
+
DeoptimizeIf(not_equal, instr->environment());
|
3887
|
+
}
|
3888
|
+
|
3889
|
+
|
3890
|
+
void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
|
3891
|
+
// Setup the parameters to the stub/runtime call.
|
3892
|
+
__ movq(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
|
3893
|
+
__ push(FieldOperand(rax, JSFunction::kLiteralsOffset));
|
3894
|
+
__ Push(Smi::FromInt(instr->hydrogen()->literal_index()));
|
3895
|
+
__ Push(instr->hydrogen()->constant_elements());
|
3896
|
+
|
3897
|
+
// Pick the right runtime function or stub to call.
|
3898
|
+
int length = instr->hydrogen()->length();
|
3899
|
+
if (instr->hydrogen()->IsCopyOnWrite()) {
|
3900
|
+
ASSERT(instr->hydrogen()->depth() == 1);
|
3901
|
+
FastCloneShallowArrayStub::Mode mode =
|
3902
|
+
FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS;
|
3903
|
+
FastCloneShallowArrayStub stub(mode, length);
|
3904
|
+
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
3905
|
+
} else if (instr->hydrogen()->depth() > 1) {
|
3906
|
+
CallRuntime(Runtime::kCreateArrayLiteral, 3, instr);
|
3907
|
+
} else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
|
3908
|
+
CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr);
|
3909
|
+
} else {
|
3910
|
+
FastCloneShallowArrayStub::Mode mode =
|
3911
|
+
FastCloneShallowArrayStub::CLONE_ELEMENTS;
|
3912
|
+
FastCloneShallowArrayStub stub(mode, length);
|
3913
|
+
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
3914
|
+
}
|
3915
|
+
}
|
3916
|
+
|
3917
|
+
|
3918
|
+
void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
|
3919
|
+
// Setup the parameters to the stub/runtime call.
|
3920
|
+
__ movq(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
|
3921
|
+
__ push(FieldOperand(rax, JSFunction::kLiteralsOffset));
|
3922
|
+
__ Push(Smi::FromInt(instr->hydrogen()->literal_index()));
|
3923
|
+
__ Push(instr->hydrogen()->constant_properties());
|
3924
|
+
__ Push(Smi::FromInt(instr->hydrogen()->fast_elements() ? 1 : 0));
|
3925
|
+
|
3926
|
+
// Pick the right runtime function to call.
|
3927
|
+
if (instr->hydrogen()->depth() > 1) {
|
3928
|
+
CallRuntime(Runtime::kCreateObjectLiteral, 4, instr);
|
3929
|
+
} else {
|
3930
|
+
CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr);
|
3931
|
+
}
|
3932
|
+
}
|
3933
|
+
|
3934
|
+
|
3935
|
+
void LCodeGen::DoToFastProperties(LToFastProperties* instr) {
|
3936
|
+
ASSERT(ToRegister(instr->InputAt(0)).is(rax));
|
3937
|
+
__ push(rax);
|
3938
|
+
CallRuntime(Runtime::kToFastProperties, 1, instr);
|
3939
|
+
}
|
3940
|
+
|
3941
|
+
|
3942
|
+
void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
|
3943
|
+
Label materialized;
|
3944
|
+
// Registers will be used as follows:
|
3945
|
+
// rdi = JS function.
|
3946
|
+
// rcx = literals array.
|
3947
|
+
// rbx = regexp literal.
|
3948
|
+
// rax = regexp literal clone.
|
3949
|
+
__ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
|
3950
|
+
__ movq(rcx, FieldOperand(rdi, JSFunction::kLiteralsOffset));
|
3951
|
+
int literal_offset = FixedArray::kHeaderSize +
|
3952
|
+
instr->hydrogen()->literal_index() * kPointerSize;
|
3953
|
+
__ movq(rbx, FieldOperand(rcx, literal_offset));
|
3954
|
+
__ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
|
3955
|
+
__ j(not_equal, &materialized, Label::kNear);
|
3956
|
+
|
3957
|
+
// Create regexp literal using runtime function
|
3958
|
+
// Result will be in rax.
|
3959
|
+
__ push(rcx);
|
3960
|
+
__ Push(Smi::FromInt(instr->hydrogen()->literal_index()));
|
3961
|
+
__ Push(instr->hydrogen()->pattern());
|
3962
|
+
__ Push(instr->hydrogen()->flags());
|
3963
|
+
CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr);
|
3964
|
+
__ movq(rbx, rax);
|
3965
|
+
|
3966
|
+
__ bind(&materialized);
|
3967
|
+
int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
|
3968
|
+
Label allocated, runtime_allocate;
|
3969
|
+
__ AllocateInNewSpace(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT);
|
3970
|
+
__ jmp(&allocated);
|
3971
|
+
|
3972
|
+
__ bind(&runtime_allocate);
|
3973
|
+
__ push(rbx);
|
3974
|
+
__ Push(Smi::FromInt(size));
|
3975
|
+
CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
|
3976
|
+
__ pop(rbx);
|
3977
|
+
|
3978
|
+
__ bind(&allocated);
|
3979
|
+
// Copy the content into the newly allocated memory.
|
3980
|
+
// (Unroll copy loop once for better throughput).
|
3981
|
+
for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
|
3982
|
+
__ movq(rdx, FieldOperand(rbx, i));
|
3983
|
+
__ movq(rcx, FieldOperand(rbx, i + kPointerSize));
|
3984
|
+
__ movq(FieldOperand(rax, i), rdx);
|
3985
|
+
__ movq(FieldOperand(rax, i + kPointerSize), rcx);
|
3986
|
+
}
|
3987
|
+
if ((size % (2 * kPointerSize)) != 0) {
|
3988
|
+
__ movq(rdx, FieldOperand(rbx, size - kPointerSize));
|
3989
|
+
__ movq(FieldOperand(rax, size - kPointerSize), rdx);
|
3990
|
+
}
|
3991
|
+
}
|
3992
|
+
|
3993
|
+
|
3994
|
+
void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
|
3995
|
+
// Use the fast case closure allocation code that allocates in new
|
3996
|
+
// space for nested functions that don't need literals cloning.
|
3997
|
+
Handle<SharedFunctionInfo> shared_info = instr->shared_info();
|
3998
|
+
bool pretenure = instr->hydrogen()->pretenure();
|
3999
|
+
if (!pretenure && shared_info->num_literals() == 0) {
|
4000
|
+
FastNewClosureStub stub(
|
4001
|
+
shared_info->strict_mode() ? kStrictMode : kNonStrictMode);
|
4002
|
+
__ Push(shared_info);
|
4003
|
+
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
4004
|
+
} else {
|
4005
|
+
__ push(rsi);
|
4006
|
+
__ Push(shared_info);
|
4007
|
+
__ PushRoot(pretenure ?
|
4008
|
+
Heap::kTrueValueRootIndex :
|
4009
|
+
Heap::kFalseValueRootIndex);
|
4010
|
+
CallRuntime(Runtime::kNewClosure, 3, instr);
|
4011
|
+
}
|
4012
|
+
}
|
4013
|
+
|
4014
|
+
|
4015
|
+
void LCodeGen::DoTypeof(LTypeof* instr) {
|
4016
|
+
LOperand* input = instr->InputAt(0);
|
4017
|
+
EmitPushTaggedOperand(input);
|
4018
|
+
CallRuntime(Runtime::kTypeof, 1, instr);
|
4019
|
+
}
|
4020
|
+
|
4021
|
+
|
4022
|
+
void LCodeGen::DoTypeofIs(LTypeofIs* instr) {
|
4023
|
+
Register input = ToRegister(instr->InputAt(0));
|
4024
|
+
Register result = ToRegister(instr->result());
|
4025
|
+
Label true_label;
|
4026
|
+
Label false_label;
|
4027
|
+
Label done;
|
4028
|
+
|
4029
|
+
Condition final_branch_condition = EmitTypeofIs(&true_label,
|
4030
|
+
&false_label,
|
4031
|
+
input,
|
4032
|
+
instr->type_literal());
|
4033
|
+
__ j(final_branch_condition, &true_label);
|
4034
|
+
__ bind(&false_label);
|
4035
|
+
__ LoadRoot(result, Heap::kFalseValueRootIndex);
|
4036
|
+
__ jmp(&done, Label::kNear);
|
4037
|
+
|
4038
|
+
__ bind(&true_label);
|
4039
|
+
__ LoadRoot(result, Heap::kTrueValueRootIndex);
|
4040
|
+
|
4041
|
+
__ bind(&done);
|
4042
|
+
}
|
4043
|
+
|
4044
|
+
|
4045
|
+
void LCodeGen::EmitPushTaggedOperand(LOperand* operand) {
|
4046
|
+
ASSERT(!operand->IsDoubleRegister());
|
4047
|
+
if (operand->IsConstantOperand()) {
|
4048
|
+
__ Push(ToHandle(LConstantOperand::cast(operand)));
|
4049
|
+
} else if (operand->IsRegister()) {
|
4050
|
+
__ push(ToRegister(operand));
|
4051
|
+
} else {
|
4052
|
+
__ push(ToOperand(operand));
|
4053
|
+
}
|
4054
|
+
}
|
4055
|
+
|
4056
|
+
|
4057
|
+
void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) {
|
4058
|
+
Register input = ToRegister(instr->InputAt(0));
|
4059
|
+
int true_block = chunk_->LookupDestination(instr->true_block_id());
|
4060
|
+
int false_block = chunk_->LookupDestination(instr->false_block_id());
|
4061
|
+
Label* true_label = chunk_->GetAssemblyLabel(true_block);
|
4062
|
+
Label* false_label = chunk_->GetAssemblyLabel(false_block);
|
4063
|
+
|
4064
|
+
Condition final_branch_condition = EmitTypeofIs(true_label,
|
4065
|
+
false_label,
|
4066
|
+
input,
|
4067
|
+
instr->type_literal());
|
4068
|
+
|
4069
|
+
EmitBranch(true_block, false_block, final_branch_condition);
|
4070
|
+
}
|
4071
|
+
|
4072
|
+
|
4073
|
+
Condition LCodeGen::EmitTypeofIs(Label* true_label,
|
4074
|
+
Label* false_label,
|
4075
|
+
Register input,
|
4076
|
+
Handle<String> type_name) {
|
4077
|
+
Condition final_branch_condition = no_condition;
|
4078
|
+
if (type_name->Equals(heap()->number_symbol())) {
|
4079
|
+
__ JumpIfSmi(input, true_label);
|
4080
|
+
__ CompareRoot(FieldOperand(input, HeapObject::kMapOffset),
|
4081
|
+
Heap::kHeapNumberMapRootIndex);
|
4082
|
+
|
4083
|
+
final_branch_condition = equal;
|
4084
|
+
|
4085
|
+
} else if (type_name->Equals(heap()->string_symbol())) {
|
4086
|
+
__ JumpIfSmi(input, false_label);
|
4087
|
+
__ CmpObjectType(input, FIRST_NONSTRING_TYPE, input);
|
4088
|
+
__ j(above_equal, false_label);
|
4089
|
+
__ testb(FieldOperand(input, Map::kBitFieldOffset),
|
4090
|
+
Immediate(1 << Map::kIsUndetectable));
|
4091
|
+
final_branch_condition = zero;
|
4092
|
+
|
4093
|
+
} else if (type_name->Equals(heap()->boolean_symbol())) {
|
4094
|
+
__ CompareRoot(input, Heap::kTrueValueRootIndex);
|
4095
|
+
__ j(equal, true_label);
|
4096
|
+
__ CompareRoot(input, Heap::kFalseValueRootIndex);
|
4097
|
+
final_branch_condition = equal;
|
4098
|
+
|
4099
|
+
} else if (type_name->Equals(heap()->undefined_symbol())) {
|
4100
|
+
__ CompareRoot(input, Heap::kUndefinedValueRootIndex);
|
4101
|
+
__ j(equal, true_label);
|
4102
|
+
__ JumpIfSmi(input, false_label);
|
4103
|
+
// Check for undetectable objects => true.
|
4104
|
+
__ movq(input, FieldOperand(input, HeapObject::kMapOffset));
|
4105
|
+
__ testb(FieldOperand(input, Map::kBitFieldOffset),
|
4106
|
+
Immediate(1 << Map::kIsUndetectable));
|
4107
|
+
final_branch_condition = not_zero;
|
4108
|
+
|
4109
|
+
} else if (type_name->Equals(heap()->function_symbol())) {
|
4110
|
+
__ JumpIfSmi(input, false_label);
|
4111
|
+
__ CmpObjectType(input, FIRST_FUNCTION_CLASS_TYPE, input);
|
4112
|
+
final_branch_condition = above_equal;
|
4113
|
+
|
4114
|
+
} else if (type_name->Equals(heap()->object_symbol())) {
|
4115
|
+
__ JumpIfSmi(input, false_label);
|
4116
|
+
__ CompareRoot(input, Heap::kNullValueRootIndex);
|
4117
|
+
__ j(equal, true_label);
|
4118
|
+
__ CmpObjectType(input, FIRST_JS_OBJECT_TYPE, input);
|
4119
|
+
__ j(below, false_label);
|
4120
|
+
__ CmpInstanceType(input, FIRST_FUNCTION_CLASS_TYPE);
|
4121
|
+
__ j(above_equal, false_label);
|
4122
|
+
// Check for undetectable objects => false.
|
4123
|
+
__ testb(FieldOperand(input, Map::kBitFieldOffset),
|
4124
|
+
Immediate(1 << Map::kIsUndetectable));
|
4125
|
+
final_branch_condition = zero;
|
4126
|
+
|
4127
|
+
} else {
|
4128
|
+
final_branch_condition = never;
|
4129
|
+
__ jmp(false_label);
|
4130
|
+
}
|
4131
|
+
|
4132
|
+
return final_branch_condition;
|
4133
|
+
}
|
4134
|
+
|
4135
|
+
|
4136
|
+
void LCodeGen::DoIsConstructCall(LIsConstructCall* instr) {
|
4137
|
+
Register result = ToRegister(instr->result());
|
4138
|
+
Label true_label;
|
4139
|
+
Label done;
|
4140
|
+
|
4141
|
+
EmitIsConstructCall(result);
|
4142
|
+
__ j(equal, &true_label, Label::kNear);
|
4143
|
+
|
4144
|
+
__ LoadRoot(result, Heap::kFalseValueRootIndex);
|
4145
|
+
__ jmp(&done, Label::kNear);
|
4146
|
+
|
4147
|
+
__ bind(&true_label);
|
4148
|
+
__ LoadRoot(result, Heap::kTrueValueRootIndex);
|
4149
|
+
|
4150
|
+
|
4151
|
+
__ bind(&done);
|
4152
|
+
}
|
4153
|
+
|
4154
|
+
|
4155
|
+
void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) {
|
4156
|
+
Register temp = ToRegister(instr->TempAt(0));
|
4157
|
+
int true_block = chunk_->LookupDestination(instr->true_block_id());
|
4158
|
+
int false_block = chunk_->LookupDestination(instr->false_block_id());
|
4159
|
+
|
4160
|
+
EmitIsConstructCall(temp);
|
4161
|
+
EmitBranch(true_block, false_block, equal);
|
4162
|
+
}
|
4163
|
+
|
4164
|
+
|
4165
|
+
void LCodeGen::EmitIsConstructCall(Register temp) {
|
4166
|
+
// Get the frame pointer for the calling frame.
|
4167
|
+
__ movq(temp, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
|
4168
|
+
|
4169
|
+
// Skip the arguments adaptor frame if it exists.
|
4170
|
+
Label check_frame_marker;
|
4171
|
+
__ Cmp(Operand(temp, StandardFrameConstants::kContextOffset),
|
4172
|
+
Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
|
4173
|
+
__ j(not_equal, &check_frame_marker, Label::kNear);
|
4174
|
+
__ movq(temp, Operand(rax, StandardFrameConstants::kCallerFPOffset));
|
4175
|
+
|
4176
|
+
// Check the marker in the calling frame.
|
4177
|
+
__ bind(&check_frame_marker);
|
4178
|
+
__ Cmp(Operand(temp, StandardFrameConstants::kMarkerOffset),
|
4179
|
+
Smi::FromInt(StackFrame::CONSTRUCT));
|
4180
|
+
}
|
4181
|
+
|
4182
|
+
|
4183
|
+
void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
|
4184
|
+
// No code for lazy bailout instruction. Used to capture environment after a
|
4185
|
+
// call for populating the safepoint data with deoptimization data.
|
4186
|
+
}
|
4187
|
+
|
4188
|
+
|
4189
|
+
void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
|
4190
|
+
DeoptimizeIf(no_condition, instr->environment());
|
4191
|
+
}
|
4192
|
+
|
4193
|
+
|
4194
|
+
void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) {
|
4195
|
+
LOperand* obj = instr->object();
|
4196
|
+
LOperand* key = instr->key();
|
4197
|
+
EmitPushTaggedOperand(obj);
|
4198
|
+
EmitPushTaggedOperand(key);
|
4199
|
+
ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
|
4200
|
+
LPointerMap* pointers = instr->pointer_map();
|
4201
|
+
LEnvironment* env = instr->deoptimization_environment();
|
4202
|
+
RecordPosition(pointers->position());
|
4203
|
+
RegisterEnvironmentForDeoptimization(env);
|
4204
|
+
// Create safepoint generator that will also ensure enough space in the
|
4205
|
+
// reloc info for patching in deoptimization (since this is invoking a
|
4206
|
+
// builtin)
|
4207
|
+
SafepointGenerator safepoint_generator(this,
|
4208
|
+
pointers,
|
4209
|
+
env->deoptimization_index());
|
4210
|
+
__ Push(Smi::FromInt(strict_mode_flag()));
|
4211
|
+
__ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, safepoint_generator);
|
4212
|
+
}
|
4213
|
+
|
4214
|
+
|
4215
|
+
void LCodeGen::DoIn(LIn* instr) {
|
4216
|
+
LOperand* obj = instr->object();
|
4217
|
+
LOperand* key = instr->key();
|
4218
|
+
EmitPushTaggedOperand(key);
|
4219
|
+
EmitPushTaggedOperand(obj);
|
4220
|
+
ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
|
4221
|
+
LPointerMap* pointers = instr->pointer_map();
|
4222
|
+
LEnvironment* env = instr->deoptimization_environment();
|
4223
|
+
RecordPosition(pointers->position());
|
4224
|
+
RegisterEnvironmentForDeoptimization(env);
|
4225
|
+
// Create safepoint generator that will also ensure enough space in the
|
4226
|
+
// reloc info for patching in deoptimization (since this is invoking a
|
4227
|
+
// builtin)
|
4228
|
+
SafepointGenerator safepoint_generator(this,
|
4229
|
+
pointers,
|
4230
|
+
env->deoptimization_index());
|
4231
|
+
__ InvokeBuiltin(Builtins::IN, CALL_FUNCTION, safepoint_generator);
|
4232
|
+
}
|
4233
|
+
|
4234
|
+
|
4235
|
+
void LCodeGen::DoStackCheck(LStackCheck* instr) {
|
4236
|
+
// Perform stack overflow check.
|
4237
|
+
Label done;
|
4238
|
+
__ CompareRoot(rsp, Heap::kStackLimitRootIndex);
|
4239
|
+
__ j(above_equal, &done, Label::kNear);
|
4240
|
+
|
4241
|
+
StackCheckStub stub;
|
4242
|
+
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
4243
|
+
__ bind(&done);
|
4244
|
+
}
|
4245
|
+
|
4246
|
+
|
4247
|
+
void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
|
4248
|
+
// This is a pseudo-instruction that ensures that the environment here is
|
4249
|
+
// properly registered for deoptimization and records the assembler's PC
|
4250
|
+
// offset.
|
4251
|
+
LEnvironment* environment = instr->environment();
|
4252
|
+
environment->SetSpilledRegisters(instr->SpilledRegisterArray(),
|
4253
|
+
instr->SpilledDoubleRegisterArray());
|
4254
|
+
|
4255
|
+
// If the environment were already registered, we would have no way of
|
4256
|
+
// backpatching it with the spill slot operands.
|
4257
|
+
ASSERT(!environment->HasBeenRegistered());
|
4258
|
+
RegisterEnvironmentForDeoptimization(environment);
|
4259
|
+
ASSERT(osr_pc_offset_ == -1);
|
4260
|
+
osr_pc_offset_ = masm()->pc_offset();
|
4261
|
+
}
|
4262
|
+
|
4263
|
+
#undef __
|
4264
|
+
|
4265
|
+
} } // namespace v8::internal
|
4266
|
+
|
4267
|
+
#endif // V8_TARGET_ARCH_X64
|