libv8 3.3.10.4 → 3.5.10.beta1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (538) hide show
  1. data/lib/libv8/scons/CHANGES.txt +24 -231
  2. data/lib/libv8/scons/LICENSE.txt +1 -1
  3. data/lib/libv8/scons/MANIFEST +0 -1
  4. data/lib/libv8/scons/PKG-INFO +1 -1
  5. data/lib/libv8/scons/README.txt +9 -9
  6. data/lib/libv8/scons/RELEASE.txt +75 -77
  7. data/lib/libv8/scons/engine/SCons/Action.py +6 -22
  8. data/lib/libv8/scons/engine/SCons/Builder.py +2 -2
  9. data/lib/libv8/scons/engine/SCons/CacheDir.py +2 -2
  10. data/lib/libv8/scons/engine/SCons/Debug.py +2 -2
  11. data/lib/libv8/scons/engine/SCons/Defaults.py +10 -24
  12. data/lib/libv8/scons/engine/SCons/Environment.py +19 -118
  13. data/lib/libv8/scons/engine/SCons/Errors.py +2 -2
  14. data/lib/libv8/scons/engine/SCons/Executor.py +2 -2
  15. data/lib/libv8/scons/engine/SCons/Job.py +2 -2
  16. data/lib/libv8/scons/engine/SCons/Memoize.py +2 -2
  17. data/lib/libv8/scons/engine/SCons/Node/Alias.py +2 -2
  18. data/lib/libv8/scons/engine/SCons/Node/FS.py +121 -281
  19. data/lib/libv8/scons/engine/SCons/Node/Python.py +2 -2
  20. data/lib/libv8/scons/engine/SCons/Node/__init__.py +5 -6
  21. data/lib/libv8/scons/engine/SCons/Options/BoolOption.py +2 -2
  22. data/lib/libv8/scons/engine/SCons/Options/EnumOption.py +2 -2
  23. data/lib/libv8/scons/engine/SCons/Options/ListOption.py +2 -2
  24. data/lib/libv8/scons/engine/SCons/Options/PackageOption.py +2 -2
  25. data/lib/libv8/scons/engine/SCons/Options/PathOption.py +2 -2
  26. data/lib/libv8/scons/engine/SCons/Options/__init__.py +2 -2
  27. data/lib/libv8/scons/engine/SCons/PathList.py +2 -2
  28. data/lib/libv8/scons/engine/SCons/Platform/__init__.py +2 -2
  29. data/lib/libv8/scons/engine/SCons/Platform/aix.py +2 -2
  30. data/lib/libv8/scons/engine/SCons/Platform/cygwin.py +2 -2
  31. data/lib/libv8/scons/engine/SCons/Platform/darwin.py +3 -27
  32. data/lib/libv8/scons/engine/SCons/Platform/hpux.py +2 -2
  33. data/lib/libv8/scons/engine/SCons/Platform/irix.py +2 -2
  34. data/lib/libv8/scons/engine/SCons/Platform/os2.py +2 -2
  35. data/lib/libv8/scons/engine/SCons/Platform/posix.py +2 -2
  36. data/lib/libv8/scons/engine/SCons/Platform/sunos.py +2 -2
  37. data/lib/libv8/scons/engine/SCons/Platform/win32.py +2 -2
  38. data/lib/libv8/scons/engine/SCons/SConf.py +2 -2
  39. data/lib/libv8/scons/engine/SCons/SConsign.py +3 -9
  40. data/lib/libv8/scons/engine/SCons/Scanner/C.py +2 -2
  41. data/lib/libv8/scons/engine/SCons/Scanner/D.py +2 -2
  42. data/lib/libv8/scons/engine/SCons/Scanner/Dir.py +2 -2
  43. data/lib/libv8/scons/engine/SCons/Scanner/Fortran.py +2 -2
  44. data/lib/libv8/scons/engine/SCons/Scanner/IDL.py +2 -2
  45. data/lib/libv8/scons/engine/SCons/Scanner/LaTeX.py +2 -5
  46. data/lib/libv8/scons/engine/SCons/Scanner/Prog.py +2 -2
  47. data/lib/libv8/scons/engine/SCons/Scanner/RC.py +3 -3
  48. data/lib/libv8/scons/engine/SCons/Scanner/__init__.py +2 -2
  49. data/lib/libv8/scons/engine/SCons/Script/Interactive.py +2 -2
  50. data/lib/libv8/scons/engine/SCons/Script/Main.py +11 -82
  51. data/lib/libv8/scons/engine/SCons/Script/SConsOptions.py +5 -5
  52. data/lib/libv8/scons/engine/SCons/Script/SConscript.py +2 -2
  53. data/lib/libv8/scons/engine/SCons/Script/__init__.py +2 -2
  54. data/lib/libv8/scons/engine/SCons/Sig.py +2 -2
  55. data/lib/libv8/scons/engine/SCons/Subst.py +2 -2
  56. data/lib/libv8/scons/engine/SCons/Taskmaster.py +2 -10
  57. data/lib/libv8/scons/engine/SCons/Tool/386asm.py +2 -2
  58. data/lib/libv8/scons/engine/SCons/Tool/BitKeeper.py +2 -2
  59. data/lib/libv8/scons/engine/SCons/Tool/CVS.py +2 -2
  60. data/lib/libv8/scons/engine/SCons/Tool/FortranCommon.py +2 -19
  61. data/lib/libv8/scons/engine/SCons/Tool/JavaCommon.py +2 -2
  62. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/__init__.py +2 -2
  63. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/arch.py +2 -2
  64. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/common.py +2 -2
  65. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/netframework.py +2 -2
  66. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/sdk.py +2 -2
  67. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/vc.py +6 -9
  68. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/vs.py +2 -29
  69. data/lib/libv8/scons/engine/SCons/Tool/Perforce.py +2 -2
  70. data/lib/libv8/scons/engine/SCons/Tool/PharLapCommon.py +2 -2
  71. data/lib/libv8/scons/engine/SCons/Tool/RCS.py +2 -2
  72. data/lib/libv8/scons/engine/SCons/Tool/SCCS.py +2 -2
  73. data/lib/libv8/scons/engine/SCons/Tool/Subversion.py +2 -2
  74. data/lib/libv8/scons/engine/SCons/Tool/__init__.py +3 -3
  75. data/lib/libv8/scons/engine/SCons/Tool/aixc++.py +2 -2
  76. data/lib/libv8/scons/engine/SCons/Tool/aixcc.py +2 -2
  77. data/lib/libv8/scons/engine/SCons/Tool/aixf77.py +2 -2
  78. data/lib/libv8/scons/engine/SCons/Tool/aixlink.py +2 -2
  79. data/lib/libv8/scons/engine/SCons/Tool/applelink.py +2 -2
  80. data/lib/libv8/scons/engine/SCons/Tool/ar.py +2 -2
  81. data/lib/libv8/scons/engine/SCons/Tool/as.py +2 -2
  82. data/lib/libv8/scons/engine/SCons/Tool/bcc32.py +2 -2
  83. data/lib/libv8/scons/engine/SCons/Tool/c++.py +2 -2
  84. data/lib/libv8/scons/engine/SCons/Tool/cc.py +2 -2
  85. data/lib/libv8/scons/engine/SCons/Tool/cvf.py +2 -2
  86. data/lib/libv8/scons/engine/SCons/Tool/default.py +2 -2
  87. data/lib/libv8/scons/engine/SCons/Tool/dmd.py +7 -24
  88. data/lib/libv8/scons/engine/SCons/Tool/dvi.py +2 -2
  89. data/lib/libv8/scons/engine/SCons/Tool/dvipdf.py +2 -3
  90. data/lib/libv8/scons/engine/SCons/Tool/dvips.py +2 -3
  91. data/lib/libv8/scons/engine/SCons/Tool/f77.py +2 -2
  92. data/lib/libv8/scons/engine/SCons/Tool/f90.py +2 -2
  93. data/lib/libv8/scons/engine/SCons/Tool/f95.py +2 -2
  94. data/lib/libv8/scons/engine/SCons/Tool/filesystem.py +2 -2
  95. data/lib/libv8/scons/engine/SCons/Tool/fortran.py +2 -2
  96. data/lib/libv8/scons/engine/SCons/Tool/g++.py +2 -2
  97. data/lib/libv8/scons/engine/SCons/Tool/g77.py +2 -2
  98. data/lib/libv8/scons/engine/SCons/Tool/gas.py +2 -2
  99. data/lib/libv8/scons/engine/SCons/Tool/gcc.py +2 -2
  100. data/lib/libv8/scons/engine/SCons/Tool/gfortran.py +3 -3
  101. data/lib/libv8/scons/engine/SCons/Tool/gnulink.py +3 -2
  102. data/lib/libv8/scons/engine/SCons/Tool/gs.py +2 -2
  103. data/lib/libv8/scons/engine/SCons/Tool/hpc++.py +2 -2
  104. data/lib/libv8/scons/engine/SCons/Tool/hpcc.py +2 -2
  105. data/lib/libv8/scons/engine/SCons/Tool/hplink.py +2 -2
  106. data/lib/libv8/scons/engine/SCons/Tool/icc.py +2 -2
  107. data/lib/libv8/scons/engine/SCons/Tool/icl.py +2 -2
  108. data/lib/libv8/scons/engine/SCons/Tool/ifl.py +2 -2
  109. data/lib/libv8/scons/engine/SCons/Tool/ifort.py +2 -2
  110. data/lib/libv8/scons/engine/SCons/Tool/ilink.py +2 -2
  111. data/lib/libv8/scons/engine/SCons/Tool/ilink32.py +2 -2
  112. data/lib/libv8/scons/engine/SCons/Tool/install.py +3 -57
  113. data/lib/libv8/scons/engine/SCons/Tool/intelc.py +25 -65
  114. data/lib/libv8/scons/engine/SCons/Tool/ipkg.py +2 -2
  115. data/lib/libv8/scons/engine/SCons/Tool/jar.py +3 -9
  116. data/lib/libv8/scons/engine/SCons/Tool/javac.py +2 -2
  117. data/lib/libv8/scons/engine/SCons/Tool/javah.py +2 -2
  118. data/lib/libv8/scons/engine/SCons/Tool/latex.py +2 -3
  119. data/lib/libv8/scons/engine/SCons/Tool/lex.py +2 -2
  120. data/lib/libv8/scons/engine/SCons/Tool/link.py +5 -6
  121. data/lib/libv8/scons/engine/SCons/Tool/linkloc.py +2 -2
  122. data/lib/libv8/scons/engine/SCons/Tool/m4.py +2 -2
  123. data/lib/libv8/scons/engine/SCons/Tool/masm.py +2 -2
  124. data/lib/libv8/scons/engine/SCons/Tool/midl.py +2 -2
  125. data/lib/libv8/scons/engine/SCons/Tool/mingw.py +10 -31
  126. data/lib/libv8/scons/engine/SCons/Tool/mslib.py +2 -2
  127. data/lib/libv8/scons/engine/SCons/Tool/mslink.py +9 -61
  128. data/lib/libv8/scons/engine/SCons/Tool/mssdk.py +2 -2
  129. data/lib/libv8/scons/engine/SCons/Tool/msvc.py +11 -21
  130. data/lib/libv8/scons/engine/SCons/Tool/msvs.py +59 -477
  131. data/lib/libv8/scons/engine/SCons/Tool/mwcc.py +2 -2
  132. data/lib/libv8/scons/engine/SCons/Tool/mwld.py +2 -2
  133. data/lib/libv8/scons/engine/SCons/Tool/nasm.py +2 -2
  134. data/lib/libv8/scons/engine/SCons/Tool/packaging/__init__.py +2 -2
  135. data/lib/libv8/scons/engine/SCons/Tool/packaging/ipk.py +2 -2
  136. data/lib/libv8/scons/engine/SCons/Tool/packaging/msi.py +2 -2
  137. data/lib/libv8/scons/engine/SCons/Tool/packaging/rpm.py +2 -2
  138. data/lib/libv8/scons/engine/SCons/Tool/packaging/src_tarbz2.py +2 -2
  139. data/lib/libv8/scons/engine/SCons/Tool/packaging/src_targz.py +2 -2
  140. data/lib/libv8/scons/engine/SCons/Tool/packaging/src_zip.py +2 -2
  141. data/lib/libv8/scons/engine/SCons/Tool/packaging/tarbz2.py +2 -2
  142. data/lib/libv8/scons/engine/SCons/Tool/packaging/targz.py +2 -2
  143. data/lib/libv8/scons/engine/SCons/Tool/packaging/zip.py +2 -2
  144. data/lib/libv8/scons/engine/SCons/Tool/pdf.py +2 -2
  145. data/lib/libv8/scons/engine/SCons/Tool/pdflatex.py +2 -3
  146. data/lib/libv8/scons/engine/SCons/Tool/pdftex.py +2 -3
  147. data/lib/libv8/scons/engine/SCons/Tool/qt.py +2 -2
  148. data/lib/libv8/scons/engine/SCons/Tool/rmic.py +3 -9
  149. data/lib/libv8/scons/engine/SCons/Tool/rpcgen.py +2 -2
  150. data/lib/libv8/scons/engine/SCons/Tool/rpm.py +2 -2
  151. data/lib/libv8/scons/engine/SCons/Tool/sgiar.py +2 -2
  152. data/lib/libv8/scons/engine/SCons/Tool/sgic++.py +2 -2
  153. data/lib/libv8/scons/engine/SCons/Tool/sgicc.py +2 -2
  154. data/lib/libv8/scons/engine/SCons/Tool/sgilink.py +3 -2
  155. data/lib/libv8/scons/engine/SCons/Tool/sunar.py +2 -2
  156. data/lib/libv8/scons/engine/SCons/Tool/sunc++.py +2 -2
  157. data/lib/libv8/scons/engine/SCons/Tool/suncc.py +2 -2
  158. data/lib/libv8/scons/engine/SCons/Tool/sunf77.py +2 -2
  159. data/lib/libv8/scons/engine/SCons/Tool/sunf90.py +2 -2
  160. data/lib/libv8/scons/engine/SCons/Tool/sunf95.py +2 -2
  161. data/lib/libv8/scons/engine/SCons/Tool/sunlink.py +3 -2
  162. data/lib/libv8/scons/engine/SCons/Tool/swig.py +5 -6
  163. data/lib/libv8/scons/engine/SCons/Tool/tar.py +2 -2
  164. data/lib/libv8/scons/engine/SCons/Tool/tex.py +43 -96
  165. data/lib/libv8/scons/engine/SCons/Tool/textfile.py +2 -2
  166. data/lib/libv8/scons/engine/SCons/Tool/tlib.py +2 -2
  167. data/lib/libv8/scons/engine/SCons/Tool/wix.py +2 -2
  168. data/lib/libv8/scons/engine/SCons/Tool/yacc.py +2 -12
  169. data/lib/libv8/scons/engine/SCons/Tool/zip.py +2 -2
  170. data/lib/libv8/scons/engine/SCons/Util.py +3 -3
  171. data/lib/libv8/scons/engine/SCons/Variables/BoolVariable.py +2 -2
  172. data/lib/libv8/scons/engine/SCons/Variables/EnumVariable.py +3 -3
  173. data/lib/libv8/scons/engine/SCons/Variables/ListVariable.py +2 -2
  174. data/lib/libv8/scons/engine/SCons/Variables/PackageVariable.py +2 -2
  175. data/lib/libv8/scons/engine/SCons/Variables/PathVariable.py +2 -2
  176. data/lib/libv8/scons/engine/SCons/Variables/__init__.py +2 -2
  177. data/lib/libv8/scons/engine/SCons/Warnings.py +2 -2
  178. data/lib/libv8/scons/engine/SCons/__init__.py +6 -6
  179. data/lib/libv8/scons/engine/SCons/compat/__init__.py +2 -2
  180. data/lib/libv8/scons/engine/SCons/compat/_scons_builtins.py +2 -2
  181. data/lib/libv8/scons/engine/SCons/compat/_scons_collections.py +2 -2
  182. data/lib/libv8/scons/engine/SCons/compat/_scons_dbm.py +2 -2
  183. data/lib/libv8/scons/engine/SCons/compat/_scons_hashlib.py +2 -2
  184. data/lib/libv8/scons/engine/SCons/compat/_scons_io.py +2 -2
  185. data/lib/libv8/scons/engine/SCons/cpp.py +2 -2
  186. data/lib/libv8/scons/engine/SCons/dblite.py +1 -4
  187. data/lib/libv8/scons/engine/SCons/exitfuncs.py +2 -2
  188. data/lib/libv8/scons/scons-time.1 +3 -3
  189. data/lib/libv8/scons/scons.1 +1164 -1170
  190. data/lib/libv8/scons/sconsign.1 +3 -3
  191. data/lib/libv8/scons/script/scons +22 -22
  192. data/lib/libv8/scons/script/scons-time +2 -2
  193. data/lib/libv8/scons/script/scons.bat +4 -7
  194. data/lib/libv8/scons/script/sconsign +20 -21
  195. data/lib/libv8/scons/setup.cfg +1 -0
  196. data/lib/libv8/scons/setup.py +40 -38
  197. data/lib/libv8/v8/.gitignore +1 -1
  198. data/lib/libv8/v8/AUTHORS +2 -0
  199. data/lib/libv8/v8/ChangeLog +387 -0
  200. data/lib/libv8/v8/Makefile +171 -0
  201. data/lib/libv8/v8/SConstruct +124 -51
  202. data/lib/libv8/v8/build/README.txt +31 -14
  203. data/lib/libv8/v8/build/all.gyp +11 -4
  204. data/lib/libv8/v8/build/armu.gypi +6 -2
  205. data/lib/libv8/v8/build/common.gypi +240 -94
  206. data/lib/libv8/v8/build/gyp_v8 +32 -4
  207. data/lib/libv8/v8/build/standalone.gypi +200 -0
  208. data/lib/libv8/v8/include/v8-debug.h +0 -0
  209. data/lib/libv8/v8/include/v8-profiler.h +8 -11
  210. data/lib/libv8/v8/include/v8.h +191 -108
  211. data/lib/libv8/v8/preparser/SConscript +2 -2
  212. data/lib/libv8/v8/preparser/preparser-process.cc +3 -3
  213. data/lib/libv8/v8/preparser/preparser.gyp +42 -0
  214. data/lib/libv8/v8/src/SConscript +33 -8
  215. data/lib/libv8/v8/src/accessors.cc +77 -43
  216. data/lib/libv8/v8/src/api.cc +393 -191
  217. data/lib/libv8/v8/src/api.h +4 -8
  218. data/lib/libv8/v8/src/apinatives.js +15 -3
  219. data/lib/libv8/v8/src/arguments.h +8 -0
  220. data/lib/libv8/v8/src/arm/assembler-arm.cc +120 -120
  221. data/lib/libv8/v8/src/arm/assembler-arm.h +92 -43
  222. data/lib/libv8/v8/src/arm/builtins-arm.cc +32 -39
  223. data/lib/libv8/v8/src/arm/code-stubs-arm.cc +572 -351
  224. data/lib/libv8/v8/src/arm/code-stubs-arm.h +8 -77
  225. data/lib/libv8/v8/src/arm/codegen-arm.h +0 -2
  226. data/lib/libv8/v8/src/arm/deoptimizer-arm.cc +50 -30
  227. data/lib/libv8/v8/src/arm/disasm-arm.cc +1 -1
  228. data/lib/libv8/v8/src/arm/frames-arm.h +9 -5
  229. data/lib/libv8/v8/src/arm/full-codegen-arm.cc +331 -432
  230. data/lib/libv8/v8/src/arm/ic-arm.cc +192 -124
  231. data/lib/libv8/v8/src/arm/lithium-arm.cc +216 -232
  232. data/lib/libv8/v8/src/arm/lithium-arm.h +106 -259
  233. data/lib/libv8/v8/src/arm/lithium-codegen-arm.cc +633 -642
  234. data/lib/libv8/v8/src/arm/lithium-codegen-arm.h +4 -4
  235. data/lib/libv8/v8/src/arm/lithium-gap-resolver-arm.cc +1 -3
  236. data/lib/libv8/v8/src/arm/macro-assembler-arm.cc +260 -185
  237. data/lib/libv8/v8/src/arm/macro-assembler-arm.h +45 -25
  238. data/lib/libv8/v8/src/arm/regexp-macro-assembler-arm.cc +25 -13
  239. data/lib/libv8/v8/src/arm/regexp-macro-assembler-arm.h +3 -0
  240. data/lib/libv8/v8/src/arm/stub-cache-arm.cc +413 -226
  241. data/lib/libv8/v8/src/array.js +38 -18
  242. data/lib/libv8/v8/src/assembler.cc +12 -5
  243. data/lib/libv8/v8/src/assembler.h +15 -9
  244. data/lib/libv8/v8/src/ast-inl.h +34 -25
  245. data/lib/libv8/v8/src/ast.cc +141 -72
  246. data/lib/libv8/v8/src/ast.h +255 -181
  247. data/lib/libv8/v8/src/bignum.cc +3 -4
  248. data/lib/libv8/v8/src/bootstrapper.cc +55 -11
  249. data/lib/libv8/v8/src/bootstrapper.h +3 -2
  250. data/lib/libv8/v8/src/builtins.cc +8 -2
  251. data/lib/libv8/v8/src/builtins.h +4 -0
  252. data/lib/libv8/v8/src/cached-powers.cc +8 -4
  253. data/lib/libv8/v8/src/checks.h +3 -3
  254. data/lib/libv8/v8/src/code-stubs.cc +173 -28
  255. data/lib/libv8/v8/src/code-stubs.h +104 -148
  256. data/lib/libv8/v8/src/codegen.cc +8 -8
  257. data/lib/libv8/v8/src/compilation-cache.cc +2 -47
  258. data/lib/libv8/v8/src/compilation-cache.h +0 -10
  259. data/lib/libv8/v8/src/compiler.cc +27 -16
  260. data/lib/libv8/v8/src/compiler.h +13 -18
  261. data/lib/libv8/v8/src/contexts.cc +107 -72
  262. data/lib/libv8/v8/src/contexts.h +70 -34
  263. data/lib/libv8/v8/src/conversions-inl.h +572 -14
  264. data/lib/libv8/v8/src/conversions.cc +9 -707
  265. data/lib/libv8/v8/src/conversions.h +23 -12
  266. data/lib/libv8/v8/src/cpu-profiler-inl.h +2 -19
  267. data/lib/libv8/v8/src/cpu-profiler.cc +4 -21
  268. data/lib/libv8/v8/src/cpu-profiler.h +8 -17
  269. data/lib/libv8/v8/src/d8-debug.cc +5 -3
  270. data/lib/libv8/v8/src/d8-debug.h +6 -7
  271. data/lib/libv8/v8/src/d8-posix.cc +1 -10
  272. data/lib/libv8/v8/src/d8.cc +721 -219
  273. data/lib/libv8/v8/src/d8.gyp +37 -12
  274. data/lib/libv8/v8/src/d8.h +141 -19
  275. data/lib/libv8/v8/src/d8.js +17 -8
  276. data/lib/libv8/v8/src/date.js +16 -5
  277. data/lib/libv8/v8/src/dateparser-inl.h +242 -39
  278. data/lib/libv8/v8/src/dateparser.cc +38 -4
  279. data/lib/libv8/v8/src/dateparser.h +170 -28
  280. data/lib/libv8/v8/src/debug-agent.cc +5 -3
  281. data/lib/libv8/v8/src/debug-agent.h +11 -7
  282. data/lib/libv8/v8/src/debug-debugger.js +65 -34
  283. data/lib/libv8/v8/src/debug.cc +30 -60
  284. data/lib/libv8/v8/src/debug.h +5 -3
  285. data/lib/libv8/v8/src/deoptimizer.cc +227 -10
  286. data/lib/libv8/v8/src/deoptimizer.h +133 -9
  287. data/lib/libv8/v8/src/disassembler.cc +22 -14
  288. data/lib/libv8/v8/src/diy-fp.cc +4 -3
  289. data/lib/libv8/v8/src/diy-fp.h +3 -3
  290. data/lib/libv8/v8/src/elements.cc +634 -0
  291. data/lib/libv8/v8/src/elements.h +95 -0
  292. data/lib/libv8/v8/src/execution.cc +5 -21
  293. data/lib/libv8/v8/src/extensions/experimental/break-iterator.cc +3 -1
  294. data/lib/libv8/v8/src/extensions/experimental/break-iterator.h +1 -1
  295. data/lib/libv8/v8/src/extensions/experimental/collator.cc +6 -2
  296. data/lib/libv8/v8/src/extensions/experimental/collator.h +1 -2
  297. data/lib/libv8/v8/src/extensions/experimental/datetime-format.cc +384 -0
  298. data/lib/libv8/v8/src/extensions/experimental/datetime-format.h +83 -0
  299. data/lib/libv8/v8/src/extensions/experimental/experimental.gyp +18 -7
  300. data/lib/libv8/v8/src/extensions/experimental/i18n-extension.cc +12 -16
  301. data/lib/libv8/v8/src/extensions/experimental/i18n-extension.h +1 -1
  302. data/lib/libv8/v8/src/extensions/experimental/i18n-js2c.py +126 -0
  303. data/lib/libv8/v8/src/extensions/experimental/i18n-locale.cc +3 -4
  304. data/lib/libv8/v8/src/extensions/experimental/i18n-locale.h +1 -1
  305. data/lib/libv8/v8/src/{shell.h → extensions/experimental/i18n-natives.h} +8 -20
  306. data/lib/libv8/v8/src/extensions/experimental/i18n-utils.cc +45 -1
  307. data/lib/libv8/v8/src/extensions/experimental/i18n-utils.h +21 -1
  308. data/lib/libv8/v8/src/extensions/experimental/i18n.js +211 -11
  309. data/lib/libv8/v8/src/extensions/experimental/language-matcher.cc +4 -3
  310. data/lib/libv8/v8/src/extensions/experimental/language-matcher.h +1 -1
  311. data/lib/libv8/v8/src/extensions/experimental/number-format.cc +374 -0
  312. data/lib/libv8/v8/src/extensions/experimental/number-format.h +71 -0
  313. data/lib/libv8/v8/src/factory.cc +89 -18
  314. data/lib/libv8/v8/src/factory.h +36 -8
  315. data/lib/libv8/v8/src/flag-definitions.h +11 -44
  316. data/lib/libv8/v8/src/frames-inl.h +8 -1
  317. data/lib/libv8/v8/src/frames.cc +39 -3
  318. data/lib/libv8/v8/src/frames.h +10 -3
  319. data/lib/libv8/v8/src/full-codegen.cc +311 -293
  320. data/lib/libv8/v8/src/full-codegen.h +183 -143
  321. data/lib/libv8/v8/src/func-name-inferrer.cc +29 -15
  322. data/lib/libv8/v8/src/func-name-inferrer.h +19 -9
  323. data/lib/libv8/v8/src/gdb-jit.cc +658 -55
  324. data/lib/libv8/v8/src/gdb-jit.h +6 -2
  325. data/lib/libv8/v8/src/global-handles.cc +368 -312
  326. data/lib/libv8/v8/src/global-handles.h +29 -36
  327. data/lib/libv8/v8/src/globals.h +3 -1
  328. data/lib/libv8/v8/src/handles.cc +43 -69
  329. data/lib/libv8/v8/src/handles.h +21 -16
  330. data/lib/libv8/v8/src/heap-inl.h +11 -13
  331. data/lib/libv8/v8/src/heap-profiler.cc +0 -999
  332. data/lib/libv8/v8/src/heap-profiler.h +0 -303
  333. data/lib/libv8/v8/src/heap.cc +366 -141
  334. data/lib/libv8/v8/src/heap.h +87 -26
  335. data/lib/libv8/v8/src/hydrogen-instructions.cc +192 -81
  336. data/lib/libv8/v8/src/hydrogen-instructions.h +711 -482
  337. data/lib/libv8/v8/src/hydrogen.cc +1146 -629
  338. data/lib/libv8/v8/src/hydrogen.h +100 -64
  339. data/lib/libv8/v8/src/ia32/assembler-ia32.cc +19 -0
  340. data/lib/libv8/v8/src/ia32/assembler-ia32.h +15 -2
  341. data/lib/libv8/v8/src/ia32/builtins-ia32.cc +34 -39
  342. data/lib/libv8/v8/src/ia32/code-stubs-ia32.cc +675 -377
  343. data/lib/libv8/v8/src/ia32/code-stubs-ia32.h +8 -69
  344. data/lib/libv8/v8/src/ia32/codegen-ia32.cc +1 -0
  345. data/lib/libv8/v8/src/ia32/codegen-ia32.h +0 -2
  346. data/lib/libv8/v8/src/ia32/cpu-ia32.cc +3 -2
  347. data/lib/libv8/v8/src/ia32/deoptimizer-ia32.cc +28 -3
  348. data/lib/libv8/v8/src/ia32/disasm-ia32.cc +21 -10
  349. data/lib/libv8/v8/src/ia32/frames-ia32.h +6 -5
  350. data/lib/libv8/v8/src/ia32/full-codegen-ia32.cc +459 -465
  351. data/lib/libv8/v8/src/ia32/ic-ia32.cc +196 -147
  352. data/lib/libv8/v8/src/ia32/lithium-codegen-ia32.cc +575 -650
  353. data/lib/libv8/v8/src/ia32/lithium-codegen-ia32.h +19 -21
  354. data/lib/libv8/v8/src/ia32/lithium-gap-resolver-ia32.cc +7 -2
  355. data/lib/libv8/v8/src/ia32/lithium-ia32.cc +261 -256
  356. data/lib/libv8/v8/src/ia32/lithium-ia32.h +234 -335
  357. data/lib/libv8/v8/src/ia32/macro-assembler-ia32.cc +224 -67
  358. data/lib/libv8/v8/src/ia32/macro-assembler-ia32.h +63 -19
  359. data/lib/libv8/v8/src/ia32/regexp-macro-assembler-ia32.cc +22 -8
  360. data/lib/libv8/v8/src/ia32/regexp-macro-assembler-ia32.h +3 -0
  361. data/lib/libv8/v8/src/ia32/stub-cache-ia32.cc +380 -239
  362. data/lib/libv8/v8/src/ic.cc +198 -234
  363. data/lib/libv8/v8/src/ic.h +32 -30
  364. data/lib/libv8/v8/src/interpreter-irregexp.cc +6 -4
  365. data/lib/libv8/v8/src/isolate.cc +112 -95
  366. data/lib/libv8/v8/src/isolate.h +55 -71
  367. data/lib/libv8/v8/src/json-parser.h +486 -48
  368. data/lib/libv8/v8/src/json.js +28 -23
  369. data/lib/libv8/v8/src/jsregexp.cc +163 -208
  370. data/lib/libv8/v8/src/jsregexp.h +0 -1
  371. data/lib/libv8/v8/src/lithium-allocator-inl.h +29 -27
  372. data/lib/libv8/v8/src/lithium-allocator.cc +22 -17
  373. data/lib/libv8/v8/src/lithium-allocator.h +8 -8
  374. data/lib/libv8/v8/src/lithium.cc +16 -11
  375. data/lib/libv8/v8/src/lithium.h +31 -34
  376. data/lib/libv8/v8/src/liveedit.cc +111 -15
  377. data/lib/libv8/v8/src/liveedit.h +3 -4
  378. data/lib/libv8/v8/src/liveobjectlist.cc +116 -80
  379. data/lib/libv8/v8/src/liveobjectlist.h +2 -2
  380. data/lib/libv8/v8/src/log-inl.h +0 -4
  381. data/lib/libv8/v8/src/log-utils.cc +25 -143
  382. data/lib/libv8/v8/src/log-utils.h +13 -92
  383. data/lib/libv8/v8/src/log.cc +26 -249
  384. data/lib/libv8/v8/src/log.h +6 -17
  385. data/lib/libv8/v8/src/macros.py +9 -6
  386. data/lib/libv8/v8/src/mark-compact.cc +276 -56
  387. data/lib/libv8/v8/src/mark-compact.h +20 -0
  388. data/lib/libv8/v8/src/messages.js +93 -39
  389. data/lib/libv8/v8/src/mips/assembler-mips-inl.h +9 -3
  390. data/lib/libv8/v8/src/mips/assembler-mips.cc +297 -189
  391. data/lib/libv8/v8/src/mips/assembler-mips.h +121 -54
  392. data/lib/libv8/v8/src/mips/builtins-mips.cc +23 -24
  393. data/lib/libv8/v8/src/mips/code-stubs-mips.cc +484 -263
  394. data/lib/libv8/v8/src/mips/code-stubs-mips.h +8 -83
  395. data/lib/libv8/v8/src/mips/codegen-mips.h +0 -2
  396. data/lib/libv8/v8/src/mips/constants-mips.h +37 -11
  397. data/lib/libv8/v8/src/mips/deoptimizer-mips.cc +6 -1
  398. data/lib/libv8/v8/src/mips/frames-mips.h +8 -7
  399. data/lib/libv8/v8/src/mips/full-codegen-mips.cc +258 -419
  400. data/lib/libv8/v8/src/mips/ic-mips.cc +181 -121
  401. data/lib/libv8/v8/src/mips/macro-assembler-mips.cc +640 -382
  402. data/lib/libv8/v8/src/mips/macro-assembler-mips.h +94 -89
  403. data/lib/libv8/v8/src/mips/regexp-macro-assembler-mips.cc +23 -10
  404. data/lib/libv8/v8/src/mips/regexp-macro-assembler-mips.h +6 -1
  405. data/lib/libv8/v8/src/mips/simulator-mips.cc +249 -49
  406. data/lib/libv8/v8/src/mips/simulator-mips.h +25 -1
  407. data/lib/libv8/v8/src/mips/stub-cache-mips.cc +373 -161
  408. data/lib/libv8/v8/src/mirror-debugger.js +55 -8
  409. data/lib/libv8/v8/src/misc-intrinsics.h +89 -0
  410. data/lib/libv8/v8/src/mksnapshot.cc +36 -4
  411. data/lib/libv8/v8/src/natives.h +5 -2
  412. data/lib/libv8/v8/src/objects-debug.cc +73 -6
  413. data/lib/libv8/v8/src/objects-inl.h +529 -164
  414. data/lib/libv8/v8/src/objects-printer.cc +67 -12
  415. data/lib/libv8/v8/src/objects-visiting.cc +13 -2
  416. data/lib/libv8/v8/src/objects-visiting.h +41 -1
  417. data/lib/libv8/v8/src/objects.cc +2200 -1177
  418. data/lib/libv8/v8/src/objects.h +912 -283
  419. data/lib/libv8/v8/src/parser.cc +566 -371
  420. data/lib/libv8/v8/src/parser.h +35 -33
  421. data/lib/libv8/v8/src/platform-cygwin.cc +10 -25
  422. data/lib/libv8/v8/src/platform-freebsd.cc +4 -29
  423. data/lib/libv8/v8/src/platform-linux.cc +60 -57
  424. data/lib/libv8/v8/src/platform-macos.cc +4 -27
  425. data/lib/libv8/v8/src/platform-nullos.cc +3 -16
  426. data/lib/libv8/v8/src/platform-openbsd.cc +247 -85
  427. data/lib/libv8/v8/src/platform-posix.cc +43 -1
  428. data/lib/libv8/v8/src/platform-solaris.cc +151 -112
  429. data/lib/libv8/v8/src/platform-tls.h +1 -1
  430. data/lib/libv8/v8/src/platform-win32.cc +65 -39
  431. data/lib/libv8/v8/src/platform.h +17 -14
  432. data/lib/libv8/v8/src/preparse-data-format.h +2 -2
  433. data/lib/libv8/v8/src/preparse-data.h +8 -2
  434. data/lib/libv8/v8/src/preparser-api.cc +2 -18
  435. data/lib/libv8/v8/src/preparser.cc +106 -65
  436. data/lib/libv8/v8/src/preparser.h +26 -5
  437. data/lib/libv8/v8/src/prettyprinter.cc +25 -43
  438. data/lib/libv8/v8/src/profile-generator-inl.h +0 -4
  439. data/lib/libv8/v8/src/profile-generator.cc +213 -34
  440. data/lib/libv8/v8/src/profile-generator.h +9 -9
  441. data/lib/libv8/v8/src/property.h +1 -0
  442. data/lib/libv8/v8/src/proxy.js +74 -4
  443. data/lib/libv8/v8/src/regexp-macro-assembler.cc +10 -6
  444. data/lib/libv8/v8/src/regexp.js +16 -11
  445. data/lib/libv8/v8/src/rewriter.cc +24 -133
  446. data/lib/libv8/v8/src/runtime-profiler.cc +27 -151
  447. data/lib/libv8/v8/src/runtime-profiler.h +5 -31
  448. data/lib/libv8/v8/src/runtime.cc +1450 -681
  449. data/lib/libv8/v8/src/runtime.h +47 -31
  450. data/lib/libv8/v8/src/runtime.js +2 -1
  451. data/lib/libv8/v8/src/scanner-base.cc +358 -220
  452. data/lib/libv8/v8/src/scanner-base.h +30 -138
  453. data/lib/libv8/v8/src/scanner.cc +0 -18
  454. data/lib/libv8/v8/src/scanner.h +0 -15
  455. data/lib/libv8/v8/src/scopeinfo.cc +3 -1
  456. data/lib/libv8/v8/src/scopeinfo.h +1 -6
  457. data/lib/libv8/v8/src/scopes.cc +243 -253
  458. data/lib/libv8/v8/src/scopes.h +58 -109
  459. data/lib/libv8/v8/src/serialize.cc +12 -54
  460. data/lib/libv8/v8/src/serialize.h +47 -0
  461. data/lib/libv8/v8/src/small-pointer-list.h +25 -0
  462. data/lib/libv8/v8/src/spaces-inl.h +4 -50
  463. data/lib/libv8/v8/src/spaces.cc +64 -131
  464. data/lib/libv8/v8/src/spaces.h +19 -70
  465. data/lib/libv8/v8/src/string-stream.cc +3 -1
  466. data/lib/libv8/v8/src/string.js +10 -6
  467. data/lib/libv8/v8/src/strtod.cc +7 -3
  468. data/lib/libv8/v8/src/stub-cache.cc +59 -129
  469. data/lib/libv8/v8/src/stub-cache.h +42 -54
  470. data/lib/libv8/v8/src/third_party/valgrind/valgrind.h +1447 -1339
  471. data/lib/libv8/v8/src/token.cc +4 -4
  472. data/lib/libv8/v8/src/token.h +6 -5
  473. data/lib/libv8/v8/src/type-info.cc +173 -129
  474. data/lib/libv8/v8/src/type-info.h +40 -22
  475. data/lib/libv8/v8/src/utils.cc +25 -304
  476. data/lib/libv8/v8/src/utils.h +118 -3
  477. data/lib/libv8/v8/src/v8-counters.h +3 -6
  478. data/lib/libv8/v8/src/v8.cc +34 -27
  479. data/lib/libv8/v8/src/v8.h +7 -7
  480. data/lib/libv8/v8/src/v8conversions.cc +129 -0
  481. data/lib/libv8/v8/src/v8conversions.h +60 -0
  482. data/lib/libv8/v8/src/v8globals.h +15 -6
  483. data/lib/libv8/v8/src/v8natives.js +300 -78
  484. data/lib/libv8/v8/src/v8threads.cc +14 -6
  485. data/lib/libv8/v8/src/v8threads.h +4 -1
  486. data/lib/libv8/v8/src/v8utils.cc +360 -0
  487. data/lib/libv8/v8/src/v8utils.h +17 -66
  488. data/lib/libv8/v8/src/variables.cc +7 -12
  489. data/lib/libv8/v8/src/variables.h +12 -10
  490. data/lib/libv8/v8/src/version.cc +2 -2
  491. data/lib/libv8/v8/src/vm-state-inl.h +0 -41
  492. data/lib/libv8/v8/src/vm-state.h +0 -11
  493. data/lib/libv8/v8/src/weakmap.js +103 -0
  494. data/lib/libv8/v8/src/x64/assembler-x64.h +6 -3
  495. data/lib/libv8/v8/src/x64/builtins-x64.cc +25 -22
  496. data/lib/libv8/v8/src/x64/code-stubs-x64.cc +523 -250
  497. data/lib/libv8/v8/src/x64/code-stubs-x64.h +8 -71
  498. data/lib/libv8/v8/src/x64/codegen-x64.cc +1 -0
  499. data/lib/libv8/v8/src/x64/codegen-x64.h +0 -2
  500. data/lib/libv8/v8/src/x64/cpu-x64.cc +2 -1
  501. data/lib/libv8/v8/src/x64/deoptimizer-x64.cc +40 -8
  502. data/lib/libv8/v8/src/x64/disasm-x64.cc +12 -10
  503. data/lib/libv8/v8/src/x64/frames-x64.h +7 -6
  504. data/lib/libv8/v8/src/x64/full-codegen-x64.cc +310 -415
  505. data/lib/libv8/v8/src/x64/ic-x64.cc +180 -117
  506. data/lib/libv8/v8/src/x64/lithium-codegen-x64.cc +411 -523
  507. data/lib/libv8/v8/src/x64/lithium-codegen-x64.h +11 -6
  508. data/lib/libv8/v8/src/x64/lithium-x64.cc +191 -216
  509. data/lib/libv8/v8/src/x64/lithium-x64.h +112 -263
  510. data/lib/libv8/v8/src/x64/macro-assembler-x64.cc +177 -61
  511. data/lib/libv8/v8/src/x64/macro-assembler-x64.h +23 -7
  512. data/lib/libv8/v8/src/x64/regexp-macro-assembler-x64.cc +21 -9
  513. data/lib/libv8/v8/src/x64/regexp-macro-assembler-x64.h +6 -0
  514. data/lib/libv8/v8/src/x64/stub-cache-x64.cc +273 -107
  515. data/lib/libv8/v8/src/zone.cc +31 -22
  516. data/lib/libv8/v8/src/zone.h +12 -6
  517. data/lib/libv8/v8/tools/codemap.js +8 -0
  518. data/lib/libv8/v8/tools/gcmole/Makefile +43 -0
  519. data/lib/libv8/v8/tools/gcmole/gcmole.lua +0 -2
  520. data/lib/libv8/v8/tools/gdb-v8-support.py +154 -0
  521. data/lib/libv8/v8/tools/grokdump.py +44 -35
  522. data/lib/libv8/v8/tools/gyp/v8.gyp +94 -248
  523. data/lib/libv8/v8/tools/js2c.py +83 -52
  524. data/lib/libv8/v8/tools/linux-tick-processor +4 -6
  525. data/lib/libv8/v8/tools/ll_prof.py +3 -3
  526. data/lib/libv8/v8/tools/oom_dump/README +3 -1
  527. data/lib/libv8/v8/tools/presubmit.py +11 -4
  528. data/lib/libv8/v8/tools/profile.js +46 -2
  529. data/lib/libv8/v8/tools/splaytree.js +11 -0
  530. data/lib/libv8/v8/tools/stats-viewer.py +15 -11
  531. data/lib/libv8/v8/tools/test-wrapper-gypbuild.py +227 -0
  532. data/lib/libv8/v8/tools/test.py +28 -8
  533. data/lib/libv8/v8/tools/tickprocessor.js +0 -16
  534. data/lib/libv8/version.rb +1 -1
  535. data/libv8.gemspec +2 -2
  536. metadata +31 -19
  537. data/lib/libv8/scons/engine/SCons/Tool/f03.py +0 -63
  538. data/lib/libv8/v8/src/json-parser.cc +0 -504
@@ -146,11 +146,11 @@ bool LCodeGen::GeneratePrologue() {
146
146
  // fp: Caller's frame pointer.
147
147
  // lr: Caller's pc.
148
148
 
149
- // Strict mode functions need to replace the receiver with undefined
150
- // when called as functions (without an explicit receiver
151
- // object). r5 is zero for method calls and non-zero for function
152
- // calls.
153
- if (info_->is_strict_mode()) {
149
+ // Strict mode functions and builtins need to replace the receiver
150
+ // with undefined when called as functions (without an explicit
151
+ // receiver object). r5 is zero for method calls and non-zero for
152
+ // function calls.
153
+ if (info_->is_strict_mode() || info_->is_native()) {
154
154
  Label ok;
155
155
  __ cmp(r5, Operand(0));
156
156
  __ b(eq, &ok);
@@ -189,7 +189,7 @@ bool LCodeGen::GeneratePrologue() {
189
189
  FastNewContextStub stub(heap_slots);
190
190
  __ CallStub(&stub);
191
191
  } else {
192
- __ CallRuntime(Runtime::kNewContext, 1);
192
+ __ CallRuntime(Runtime::kNewFunctionContext, 1);
193
193
  }
194
194
  RecordSafepoint(Safepoint::kNoDeoptimizationIndex);
195
195
  // Context is returned in both r0 and cp. It replaces the context
@@ -257,11 +257,20 @@ LInstruction* LCodeGen::GetNextInstruction() {
257
257
 
258
258
  bool LCodeGen::GenerateDeferredCode() {
259
259
  ASSERT(is_generating());
260
- for (int i = 0; !is_aborted() && i < deferred_.length(); i++) {
261
- LDeferredCode* code = deferred_[i];
262
- __ bind(code->entry());
263
- code->Generate();
264
- __ jmp(code->exit());
260
+ if (deferred_.length() > 0) {
261
+ for (int i = 0; !is_aborted() && i < deferred_.length(); i++) {
262
+ LDeferredCode* code = deferred_[i];
263
+ __ bind(code->entry());
264
+ code->Generate();
265
+ __ jmp(code->exit());
266
+ }
267
+
268
+ // Pad code to ensure that the last piece of deferred code have
269
+ // room for lazy bailout.
270
+ while ((masm()->pc_offset() - LastSafepointEnd())
271
+ < Deoptimizer::patch_size()) {
272
+ __ nop();
273
+ }
265
274
  }
266
275
 
267
276
  // Force constant pool emission at the end of the deferred code to make
@@ -542,6 +551,13 @@ void LCodeGen::CallCodeGeneric(Handle<Code> code,
542
551
  RecordPosition(pointers->position());
543
552
  __ Call(code, mode);
544
553
  RegisterLazyDeoptimization(instr, safepoint_mode);
554
+
555
+ // Signal that we don't inline smi code before these stubs in the
556
+ // optimizing code generator.
557
+ if (code->kind() == Code::BINARY_OP_IC ||
558
+ code->kind() == Code::COMPARE_IC) {
559
+ __ nop();
560
+ }
545
561
  }
546
562
 
547
563
 
@@ -770,7 +786,7 @@ void LCodeGen::RecordSafepointWithRegistersAndDoubles(
770
786
 
771
787
 
772
788
  void LCodeGen::RecordPosition(int position) {
773
- if (!FLAG_debug_info || position == RelocInfo::kNoPosition) return;
789
+ if (position == RelocInfo::kNoPosition) return;
774
790
  masm()->positions_recorder()->RecordPosition(position);
775
791
  }
776
792
 
@@ -873,6 +889,7 @@ void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) {
873
889
  void LCodeGen::DoModI(LModI* instr) {
874
890
  if (instr->hydrogen()->HasPowerOf2Divisor()) {
875
891
  Register dividend = ToRegister(instr->InputAt(0));
892
+ Register result = ToRegister(instr->result());
876
893
 
877
894
  int32_t divisor =
878
895
  HConstant::cast(instr->hydrogen()->right())->Integer32Value();
@@ -882,15 +899,15 @@ void LCodeGen::DoModI(LModI* instr) {
882
899
  Label positive_dividend, done;
883
900
  __ cmp(dividend, Operand(0));
884
901
  __ b(pl, &positive_dividend);
885
- __ rsb(dividend, dividend, Operand(0));
886
- __ and_(dividend, dividend, Operand(divisor - 1));
887
- __ rsb(dividend, dividend, Operand(0), SetCC);
902
+ __ rsb(result, dividend, Operand(0));
903
+ __ and_(result, result, Operand(divisor - 1), SetCC);
888
904
  if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
889
- __ b(ne, &done);
890
- DeoptimizeIf(al, instr->environment());
905
+ DeoptimizeIf(eq, instr->environment());
891
906
  }
907
+ __ rsb(result, result, Operand(0));
908
+ __ b(&done);
892
909
  __ bind(&positive_dividend);
893
- __ and_(dividend, dividend, Operand(divisor - 1));
910
+ __ and_(result, dividend, Operand(divisor - 1));
894
911
  __ bind(&done);
895
912
  return;
896
913
  }
@@ -906,8 +923,6 @@ void LCodeGen::DoModI(LModI* instr) {
906
923
  DwVfpRegister divisor = ToDoubleRegister(instr->TempAt(2));
907
924
  DwVfpRegister quotient = double_scratch0();
908
925
 
909
- ASSERT(result.is(left));
910
-
911
926
  ASSERT(!dividend.is(divisor));
912
927
  ASSERT(!dividend.is(quotient));
913
928
  ASSERT(!divisor.is(quotient));
@@ -923,6 +938,8 @@ void LCodeGen::DoModI(LModI* instr) {
923
938
  DeoptimizeIf(eq, instr->environment());
924
939
  }
925
940
 
941
+ __ Move(result, left);
942
+
926
943
  // (0 % x) must yield 0 (if x is finite, which is the case here).
927
944
  __ cmp(left, Operand(0));
928
945
  __ b(eq, &done);
@@ -1119,68 +1136,125 @@ void LCodeGen::DoDeferredBinaryOpStub(LTemplateInstruction<1, 2, T>* instr,
1119
1136
 
1120
1137
  void LCodeGen::DoMulI(LMulI* instr) {
1121
1138
  Register scratch = scratch0();
1139
+ Register result = ToRegister(instr->result());
1140
+ // Note that result may alias left.
1122
1141
  Register left = ToRegister(instr->InputAt(0));
1123
- Register right = EmitLoadRegister(instr->InputAt(1), scratch);
1142
+ LOperand* right_op = instr->InputAt(1);
1124
1143
 
1125
- if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero) &&
1126
- !instr->InputAt(1)->IsConstantOperand()) {
1127
- __ orr(ToRegister(instr->TempAt(0)), left, right);
1128
- }
1144
+ bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow);
1145
+ bool bailout_on_minus_zero =
1146
+ instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero);
1147
+
1148
+ if (right_op->IsConstantOperand() && !can_overflow) {
1149
+ // Use optimized code for specific constants.
1150
+ int32_t constant = ToInteger32(LConstantOperand::cast(right_op));
1151
+
1152
+ if (bailout_on_minus_zero && (constant < 0)) {
1153
+ // The case of a null constant will be handled separately.
1154
+ // If constant is negative and left is null, the result should be -0.
1155
+ __ cmp(left, Operand(0));
1156
+ DeoptimizeIf(eq, instr->environment());
1157
+ }
1158
+
1159
+ switch (constant) {
1160
+ case -1:
1161
+ __ rsb(result, left, Operand(0));
1162
+ break;
1163
+ case 0:
1164
+ if (bailout_on_minus_zero) {
1165
+ // If left is strictly negative and the constant is null, the
1166
+ // result is -0. Deoptimize if required, otherwise return 0.
1167
+ __ cmp(left, Operand(0));
1168
+ DeoptimizeIf(mi, instr->environment());
1169
+ }
1170
+ __ mov(result, Operand(0));
1171
+ break;
1172
+ case 1:
1173
+ __ Move(result, left);
1174
+ break;
1175
+ default:
1176
+ // Multiplying by powers of two and powers of two plus or minus
1177
+ // one can be done faster with shifted operands.
1178
+ // For other constants we emit standard code.
1179
+ int32_t mask = constant >> 31;
1180
+ uint32_t constant_abs = (constant + mask) ^ mask;
1181
+
1182
+ if (IsPowerOf2(constant_abs) ||
1183
+ IsPowerOf2(constant_abs - 1) ||
1184
+ IsPowerOf2(constant_abs + 1)) {
1185
+ if (IsPowerOf2(constant_abs)) {
1186
+ int32_t shift = WhichPowerOf2(constant_abs);
1187
+ __ mov(result, Operand(left, LSL, shift));
1188
+ } else if (IsPowerOf2(constant_abs - 1)) {
1189
+ int32_t shift = WhichPowerOf2(constant_abs - 1);
1190
+ __ add(result, left, Operand(left, LSL, shift));
1191
+ } else if (IsPowerOf2(constant_abs + 1)) {
1192
+ int32_t shift = WhichPowerOf2(constant_abs + 1);
1193
+ __ rsb(result, left, Operand(left, LSL, shift));
1194
+ }
1195
+
1196
+ // Correct the sign of the result is the constant is negative.
1197
+ if (constant < 0) __ rsb(result, result, Operand(0));
1198
+
1199
+ } else {
1200
+ // Generate standard code.
1201
+ __ mov(ip, Operand(constant));
1202
+ __ mul(result, left, ip);
1203
+ }
1204
+ }
1129
1205
 
1130
- if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
1131
- // scratch:left = left * right.
1132
- __ smull(left, scratch, left, right);
1133
- __ mov(ip, Operand(left, ASR, 31));
1134
- __ cmp(ip, Operand(scratch));
1135
- DeoptimizeIf(ne, instr->environment());
1136
1206
  } else {
1137
- __ mul(left, left, right);
1138
- }
1207
+ Register right = EmitLoadRegister(right_op, scratch);
1208
+ if (bailout_on_minus_zero) {
1209
+ __ orr(ToRegister(instr->TempAt(0)), left, right);
1210
+ }
1139
1211
 
1140
- if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
1141
- // Bail out if the result is supposed to be negative zero.
1142
- Label done;
1143
- __ cmp(left, Operand(0));
1144
- __ b(ne, &done);
1145
- if (instr->InputAt(1)->IsConstantOperand()) {
1146
- if (ToInteger32(LConstantOperand::cast(instr->InputAt(1))) <= 0) {
1147
- DeoptimizeIf(al, instr->environment());
1148
- }
1212
+ if (can_overflow) {
1213
+ // scratch:result = left * right.
1214
+ __ smull(result, scratch, left, right);
1215
+ __ cmp(scratch, Operand(result, ASR, 31));
1216
+ DeoptimizeIf(ne, instr->environment());
1149
1217
  } else {
1150
- // Test the non-zero operand for negative sign.
1218
+ __ mul(result, left, right);
1219
+ }
1220
+
1221
+ if (bailout_on_minus_zero) {
1222
+ // Bail out if the result is supposed to be negative zero.
1223
+ Label done;
1224
+ __ cmp(result, Operand(0));
1225
+ __ b(ne, &done);
1151
1226
  __ cmp(ToRegister(instr->TempAt(0)), Operand(0));
1152
1227
  DeoptimizeIf(mi, instr->environment());
1228
+ __ bind(&done);
1153
1229
  }
1154
- __ bind(&done);
1155
1230
  }
1156
1231
  }
1157
1232
 
1158
1233
 
1159
1234
  void LCodeGen::DoBitI(LBitI* instr) {
1160
- LOperand* left = instr->InputAt(0);
1161
- LOperand* right = instr->InputAt(1);
1162
- ASSERT(left->Equals(instr->result()));
1163
- ASSERT(left->IsRegister());
1164
- Register result = ToRegister(left);
1165
- Operand right_operand(no_reg);
1235
+ LOperand* left_op = instr->InputAt(0);
1236
+ LOperand* right_op = instr->InputAt(1);
1237
+ ASSERT(left_op->IsRegister());
1238
+ Register left = ToRegister(left_op);
1239
+ Register result = ToRegister(instr->result());
1240
+ Operand right(no_reg);
1166
1241
 
1167
- if (right->IsStackSlot() || right->IsArgument()) {
1168
- Register right_reg = EmitLoadRegister(right, ip);
1169
- right_operand = Operand(right_reg);
1242
+ if (right_op->IsStackSlot() || right_op->IsArgument()) {
1243
+ right = Operand(EmitLoadRegister(right_op, ip));
1170
1244
  } else {
1171
- ASSERT(right->IsRegister() || right->IsConstantOperand());
1172
- right_operand = ToOperand(right);
1245
+ ASSERT(right_op->IsRegister() || right_op->IsConstantOperand());
1246
+ right = ToOperand(right_op);
1173
1247
  }
1174
1248
 
1175
1249
  switch (instr->op()) {
1176
1250
  case Token::BIT_AND:
1177
- __ and_(result, ToRegister(left), right_operand);
1251
+ __ and_(result, left, right);
1178
1252
  break;
1179
1253
  case Token::BIT_OR:
1180
- __ orr(result, ToRegister(left), right_operand);
1254
+ __ orr(result, left, right);
1181
1255
  break;
1182
1256
  case Token::BIT_XOR:
1183
- __ eor(result, ToRegister(left), right_operand);
1257
+ __ eor(result, left, right);
1184
1258
  break;
1185
1259
  default:
1186
1260
  UNREACHABLE();
@@ -1190,54 +1264,62 @@ void LCodeGen::DoBitI(LBitI* instr) {
1190
1264
 
1191
1265
 
1192
1266
  void LCodeGen::DoShiftI(LShiftI* instr) {
1267
+ // Both 'left' and 'right' are "used at start" (see LCodeGen::DoShift), so
1268
+ // result may alias either of them.
1269
+ LOperand* right_op = instr->InputAt(1);
1270
+ Register left = ToRegister(instr->InputAt(0));
1271
+ Register result = ToRegister(instr->result());
1193
1272
  Register scratch = scratch0();
1194
- LOperand* left = instr->InputAt(0);
1195
- LOperand* right = instr->InputAt(1);
1196
- ASSERT(left->Equals(instr->result()));
1197
- ASSERT(left->IsRegister());
1198
- Register result = ToRegister(left);
1199
- if (right->IsRegister()) {
1200
- // Mask the right operand.
1201
- __ and_(scratch, ToRegister(right), Operand(0x1F));
1273
+ if (right_op->IsRegister()) {
1274
+ // Mask the right_op operand.
1275
+ __ and_(scratch, ToRegister(right_op), Operand(0x1F));
1202
1276
  switch (instr->op()) {
1203
1277
  case Token::SAR:
1204
- __ mov(result, Operand(result, ASR, scratch));
1278
+ __ mov(result, Operand(left, ASR, scratch));
1205
1279
  break;
1206
1280
  case Token::SHR:
1207
1281
  if (instr->can_deopt()) {
1208
- __ mov(result, Operand(result, LSR, scratch), SetCC);
1282
+ __ mov(result, Operand(left, LSR, scratch), SetCC);
1209
1283
  DeoptimizeIf(mi, instr->environment());
1210
1284
  } else {
1211
- __ mov(result, Operand(result, LSR, scratch));
1285
+ __ mov(result, Operand(left, LSR, scratch));
1212
1286
  }
1213
1287
  break;
1214
1288
  case Token::SHL:
1215
- __ mov(result, Operand(result, LSL, scratch));
1289
+ __ mov(result, Operand(left, LSL, scratch));
1216
1290
  break;
1217
1291
  default:
1218
1292
  UNREACHABLE();
1219
1293
  break;
1220
1294
  }
1221
1295
  } else {
1222
- int value = ToInteger32(LConstantOperand::cast(right));
1296
+ // Mask the right_op operand.
1297
+ int value = ToInteger32(LConstantOperand::cast(right_op));
1223
1298
  uint8_t shift_count = static_cast<uint8_t>(value & 0x1F);
1224
1299
  switch (instr->op()) {
1225
1300
  case Token::SAR:
1226
1301
  if (shift_count != 0) {
1227
- __ mov(result, Operand(result, ASR, shift_count));
1302
+ __ mov(result, Operand(left, ASR, shift_count));
1303
+ } else {
1304
+ __ Move(result, left);
1228
1305
  }
1229
1306
  break;
1230
1307
  case Token::SHR:
1231
- if (shift_count == 0 && instr->can_deopt()) {
1232
- __ tst(result, Operand(0x80000000));
1233
- DeoptimizeIf(ne, instr->environment());
1308
+ if (shift_count != 0) {
1309
+ __ mov(result, Operand(left, LSR, shift_count));
1234
1310
  } else {
1235
- __ mov(result, Operand(result, LSR, shift_count));
1311
+ if (instr->can_deopt()) {
1312
+ __ tst(left, Operand(0x80000000));
1313
+ DeoptimizeIf(ne, instr->environment());
1314
+ }
1315
+ __ Move(result, left);
1236
1316
  }
1237
1317
  break;
1238
1318
  case Token::SHL:
1239
1319
  if (shift_count != 0) {
1240
- __ mov(result, Operand(result, LSL, shift_count));
1320
+ __ mov(result, Operand(left, LSL, shift_count));
1321
+ } else {
1322
+ __ Move(result, left);
1241
1323
  }
1242
1324
  break;
1243
1325
  default:
@@ -1251,16 +1333,16 @@ void LCodeGen::DoShiftI(LShiftI* instr) {
1251
1333
  void LCodeGen::DoSubI(LSubI* instr) {
1252
1334
  LOperand* left = instr->InputAt(0);
1253
1335
  LOperand* right = instr->InputAt(1);
1254
- ASSERT(left->Equals(instr->result()));
1336
+ LOperand* result = instr->result();
1255
1337
  bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow);
1256
1338
  SBit set_cond = can_overflow ? SetCC : LeaveCC;
1257
1339
 
1258
1340
  if (right->IsStackSlot() || right->IsArgument()) {
1259
1341
  Register right_reg = EmitLoadRegister(right, ip);
1260
- __ sub(ToRegister(left), ToRegister(left), Operand(right_reg), set_cond);
1342
+ __ sub(ToRegister(result), ToRegister(left), Operand(right_reg), set_cond);
1261
1343
  } else {
1262
1344
  ASSERT(right->IsRegister() || right->IsConstantOperand());
1263
- __ sub(ToRegister(left), ToRegister(left), ToOperand(right), set_cond);
1345
+ __ sub(ToRegister(result), ToRegister(left), ToOperand(right), set_cond);
1264
1346
  }
1265
1347
 
1266
1348
  if (can_overflow) {
@@ -1279,7 +1361,7 @@ void LCodeGen::DoConstantD(LConstantD* instr) {
1279
1361
  ASSERT(instr->result()->IsDoubleRegister());
1280
1362
  DwVfpRegister result = ToDoubleRegister(instr->result());
1281
1363
  double v = instr->value();
1282
- __ vmov(result, v);
1364
+ __ Vmov(result, v);
1283
1365
  }
1284
1366
 
1285
1367
 
@@ -1296,17 +1378,24 @@ void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) {
1296
1378
  }
1297
1379
 
1298
1380
 
1299
- void LCodeGen::DoExternalArrayLength(LExternalArrayLength* instr) {
1381
+ void LCodeGen::DoFixedArrayBaseLength(LFixedArrayBaseLength* instr) {
1300
1382
  Register result = ToRegister(instr->result());
1301
1383
  Register array = ToRegister(instr->InputAt(0));
1302
- __ ldr(result, FieldMemOperand(array, ExternalArray::kLengthOffset));
1384
+ __ ldr(result, FieldMemOperand(array, FixedArrayBase::kLengthOffset));
1303
1385
  }
1304
1386
 
1305
1387
 
1306
- void LCodeGen::DoFixedArrayLength(LFixedArrayLength* instr) {
1388
+ void LCodeGen::DoElementsKind(LElementsKind* instr) {
1307
1389
  Register result = ToRegister(instr->result());
1308
- Register array = ToRegister(instr->InputAt(0));
1309
- __ ldr(result, FieldMemOperand(array, FixedArray::kLengthOffset));
1390
+ Register input = ToRegister(instr->InputAt(0));
1391
+
1392
+ // Load map into |result|.
1393
+ __ ldr(result, FieldMemOperand(input, HeapObject::kMapOffset));
1394
+ // Load the map's "bit field 2" into |result|. We only need the first byte,
1395
+ // but the following bit field extraction takes care of that anyway.
1396
+ __ ldr(result, FieldMemOperand(result, Map::kBitField2Offset));
1397
+ // Retrieve elements_kind from bit field 2.
1398
+ __ ubfx(result, result, Map::kElementsKindShift, Map::kElementsKindBitCount);
1310
1399
  }
1311
1400
 
1312
1401
 
@@ -1314,15 +1403,16 @@ void LCodeGen::DoValueOf(LValueOf* instr) {
1314
1403
  Register input = ToRegister(instr->InputAt(0));
1315
1404
  Register result = ToRegister(instr->result());
1316
1405
  Register map = ToRegister(instr->TempAt(0));
1317
- ASSERT(input.is(result));
1318
1406
  Label done;
1319
1407
 
1320
1408
  // If the object is a smi return the object.
1321
1409
  __ tst(input, Operand(kSmiTagMask));
1410
+ __ Move(result, input, eq);
1322
1411
  __ b(eq, &done);
1323
1412
 
1324
1413
  // If the object is not a value type, return the object.
1325
1414
  __ CompareObjectType(input, map, map, JS_VALUE_TYPE);
1415
+ __ Move(result, input, ne);
1326
1416
  __ b(ne, &done);
1327
1417
  __ ldr(result, FieldMemOperand(input, JSValue::kValueOffset));
1328
1418
 
@@ -1331,9 +1421,9 @@ void LCodeGen::DoValueOf(LValueOf* instr) {
1331
1421
 
1332
1422
 
1333
1423
  void LCodeGen::DoBitNotI(LBitNotI* instr) {
1334
- LOperand* input = instr->InputAt(0);
1335
- ASSERT(input->Equals(instr->result()));
1336
- __ mvn(ToRegister(input), Operand(ToRegister(input)));
1424
+ Register input = ToRegister(instr->InputAt(0));
1425
+ Register result = ToRegister(instr->result());
1426
+ __ mvn(result, Operand(input));
1337
1427
  }
1338
1428
 
1339
1429
 
@@ -1351,16 +1441,16 @@ void LCodeGen::DoThrow(LThrow* instr) {
1351
1441
  void LCodeGen::DoAddI(LAddI* instr) {
1352
1442
  LOperand* left = instr->InputAt(0);
1353
1443
  LOperand* right = instr->InputAt(1);
1354
- ASSERT(left->Equals(instr->result()));
1444
+ LOperand* result = instr->result();
1355
1445
  bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow);
1356
1446
  SBit set_cond = can_overflow ? SetCC : LeaveCC;
1357
1447
 
1358
1448
  if (right->IsStackSlot() || right->IsArgument()) {
1359
1449
  Register right_reg = EmitLoadRegister(right, ip);
1360
- __ add(ToRegister(left), ToRegister(left), Operand(right_reg), set_cond);
1450
+ __ add(ToRegister(result), ToRegister(left), Operand(right_reg), set_cond);
1361
1451
  } else {
1362
1452
  ASSERT(right->IsRegister() || right->IsConstantOperand());
1363
- __ add(ToRegister(left), ToRegister(left), ToOperand(right), set_cond);
1453
+ __ add(ToRegister(result), ToRegister(left), ToOperand(right), set_cond);
1364
1454
  }
1365
1455
 
1366
1456
  if (can_overflow) {
@@ -1372,18 +1462,19 @@ void LCodeGen::DoAddI(LAddI* instr) {
1372
1462
  void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
1373
1463
  DoubleRegister left = ToDoubleRegister(instr->InputAt(0));
1374
1464
  DoubleRegister right = ToDoubleRegister(instr->InputAt(1));
1465
+ DoubleRegister result = ToDoubleRegister(instr->result());
1375
1466
  switch (instr->op()) {
1376
1467
  case Token::ADD:
1377
- __ vadd(left, left, right);
1468
+ __ vadd(result, left, right);
1378
1469
  break;
1379
1470
  case Token::SUB:
1380
- __ vsub(left, left, right);
1471
+ __ vsub(result, left, right);
1381
1472
  break;
1382
1473
  case Token::MUL:
1383
- __ vmul(left, left, right);
1474
+ __ vmul(result, left, right);
1384
1475
  break;
1385
1476
  case Token::DIV:
1386
- __ vdiv(left, left, right);
1477
+ __ vdiv(result, left, right);
1387
1478
  break;
1388
1479
  case Token::MOD: {
1389
1480
  // Save r0-r3 on the stack.
@@ -1395,7 +1486,7 @@ void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
1395
1486
  ExternalReference::double_fp_operation(Token::MOD, isolate()),
1396
1487
  0, 2);
1397
1488
  // Move the result in the double result register.
1398
- __ GetCFunctionDoubleResult(ToDoubleRegister(instr->result()));
1489
+ __ GetCFunctionDoubleResult(result);
1399
1490
 
1400
1491
  // Restore r0-r3.
1401
1492
  __ ldm(ia_w, sp, r0.bit() | r1.bit() | r2.bit() | r3.bit());
@@ -1415,6 +1506,7 @@ void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
1415
1506
 
1416
1507
  BinaryOpStub stub(instr->op(), NO_OVERWRITE);
1417
1508
  CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1509
+ __ nop(); // Signals no inlined code.
1418
1510
  }
1419
1511
 
1420
1512
 
@@ -1449,7 +1541,7 @@ void LCodeGen::DoBranch(LBranch* instr) {
1449
1541
  int true_block = chunk_->LookupDestination(instr->true_block_id());
1450
1542
  int false_block = chunk_->LookupDestination(instr->false_block_id());
1451
1543
 
1452
- Representation r = instr->hydrogen()->representation();
1544
+ Representation r = instr->hydrogen()->value()->representation();
1453
1545
  if (r.IsInteger32()) {
1454
1546
  Register reg = ToRegister(instr->InputAt(0));
1455
1547
  __ cmp(reg, Operand(0));
@@ -1461,101 +1553,116 @@ void LCodeGen::DoBranch(LBranch* instr) {
1461
1553
  // Test the double value. Zero and NaN are false.
1462
1554
  __ VFPCompareAndLoadFlags(reg, 0.0, scratch);
1463
1555
  __ tst(scratch, Operand(kVFPZConditionFlagBit | kVFPVConditionFlagBit));
1464
- EmitBranch(true_block, false_block, ne);
1556
+ EmitBranch(true_block, false_block, eq);
1465
1557
  } else {
1466
1558
  ASSERT(r.IsTagged());
1467
1559
  Register reg = ToRegister(instr->InputAt(0));
1468
- if (instr->hydrogen()->type().IsBoolean()) {
1469
- __ LoadRoot(ip, Heap::kTrueValueRootIndex);
1470
- __ cmp(reg, ip);
1560
+ HType type = instr->hydrogen()->value()->type();
1561
+ if (type.IsBoolean()) {
1562
+ __ CompareRoot(reg, Heap::kTrueValueRootIndex);
1471
1563
  EmitBranch(true_block, false_block, eq);
1564
+ } else if (type.IsSmi()) {
1565
+ __ cmp(reg, Operand(0));
1566
+ EmitBranch(true_block, false_block, ne);
1472
1567
  } else {
1473
1568
  Label* true_label = chunk_->GetAssemblyLabel(true_block);
1474
1569
  Label* false_label = chunk_->GetAssemblyLabel(false_block);
1475
1570
 
1476
- __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1477
- __ cmp(reg, ip);
1478
- __ b(eq, false_label);
1479
- __ LoadRoot(ip, Heap::kTrueValueRootIndex);
1480
- __ cmp(reg, ip);
1481
- __ b(eq, true_label);
1482
- __ LoadRoot(ip, Heap::kFalseValueRootIndex);
1483
- __ cmp(reg, ip);
1484
- __ b(eq, false_label);
1485
- __ cmp(reg, Operand(0));
1486
- __ b(eq, false_label);
1487
- __ tst(reg, Operand(kSmiTagMask));
1488
- __ b(eq, true_label);
1571
+ ToBooleanStub::Types expected = instr->hydrogen()->expected_input_types();
1572
+ // Avoid deopts in the case where we've never executed this path before.
1573
+ if (expected.IsEmpty()) expected = ToBooleanStub::all_types();
1489
1574
 
1490
- // Test double values. Zero and NaN are false.
1491
- Label call_stub;
1492
- DoubleRegister dbl_scratch = d0;
1493
- Register scratch = scratch0();
1494
- __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
1495
- __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
1496
- __ cmp(scratch, Operand(ip));
1497
- __ b(ne, &call_stub);
1498
- __ sub(ip, reg, Operand(kHeapObjectTag));
1499
- __ vldr(dbl_scratch, ip, HeapNumber::kValueOffset);
1500
- __ VFPCompareAndLoadFlags(dbl_scratch, 0.0, scratch);
1501
- __ tst(scratch, Operand(kVFPZConditionFlagBit | kVFPVConditionFlagBit));
1502
- __ b(ne, false_label);
1503
- __ b(true_label);
1504
-
1505
- // The conversion stub doesn't cause garbage collections so it's
1506
- // safe to not record a safepoint after the call.
1507
- __ bind(&call_stub);
1508
- ToBooleanStub stub(reg);
1509
- RegList saved_regs = kJSCallerSaved | kCalleeSaved;
1510
- __ stm(db_w, sp, saved_regs);
1511
- __ CallStub(&stub);
1512
- __ cmp(reg, Operand(0));
1513
- __ ldm(ia_w, sp, saved_regs);
1514
- EmitBranch(true_block, false_block, ne);
1575
+ if (expected.Contains(ToBooleanStub::UNDEFINED)) {
1576
+ // undefined -> false.
1577
+ __ CompareRoot(reg, Heap::kUndefinedValueRootIndex);
1578
+ __ b(eq, false_label);
1579
+ }
1580
+ if (expected.Contains(ToBooleanStub::BOOLEAN)) {
1581
+ // Boolean -> its value.
1582
+ __ CompareRoot(reg, Heap::kTrueValueRootIndex);
1583
+ __ b(eq, true_label);
1584
+ __ CompareRoot(reg, Heap::kFalseValueRootIndex);
1585
+ __ b(eq, false_label);
1586
+ }
1587
+ if (expected.Contains(ToBooleanStub::NULL_TYPE)) {
1588
+ // 'null' -> false.
1589
+ __ CompareRoot(reg, Heap::kNullValueRootIndex);
1590
+ __ b(eq, false_label);
1591
+ }
1592
+
1593
+ if (expected.Contains(ToBooleanStub::SMI)) {
1594
+ // Smis: 0 -> false, all other -> true.
1595
+ __ cmp(reg, Operand(0));
1596
+ __ b(eq, false_label);
1597
+ __ JumpIfSmi(reg, true_label);
1598
+ } else if (expected.NeedsMap()) {
1599
+ // If we need a map later and have a Smi -> deopt.
1600
+ __ tst(reg, Operand(kSmiTagMask));
1601
+ DeoptimizeIf(eq, instr->environment());
1602
+ }
1603
+
1604
+ const Register map = scratch0();
1605
+ if (expected.NeedsMap()) {
1606
+ __ ldr(map, FieldMemOperand(reg, HeapObject::kMapOffset));
1607
+
1608
+ if (expected.CanBeUndetectable()) {
1609
+ // Undetectable -> false.
1610
+ __ ldrb(ip, FieldMemOperand(map, Map::kBitFieldOffset));
1611
+ __ tst(ip, Operand(1 << Map::kIsUndetectable));
1612
+ __ b(ne, false_label);
1613
+ }
1614
+ }
1615
+
1616
+ if (expected.Contains(ToBooleanStub::SPEC_OBJECT)) {
1617
+ // spec object -> true.
1618
+ __ CompareInstanceType(map, ip, FIRST_SPEC_OBJECT_TYPE);
1619
+ __ b(ge, true_label);
1620
+ }
1621
+
1622
+ if (expected.Contains(ToBooleanStub::STRING)) {
1623
+ // String value -> false iff empty.
1624
+ Label not_string;
1625
+ __ CompareInstanceType(map, ip, FIRST_NONSTRING_TYPE);
1626
+ __ b(ge, &not_string);
1627
+ __ ldr(ip, FieldMemOperand(reg, String::kLengthOffset));
1628
+ __ cmp(ip, Operand(0));
1629
+ __ b(ne, true_label);
1630
+ __ b(false_label);
1631
+ __ bind(&not_string);
1632
+ }
1633
+
1634
+ if (expected.Contains(ToBooleanStub::HEAP_NUMBER)) {
1635
+ // heap number -> false iff +0, -0, or NaN.
1636
+ DoubleRegister dbl_scratch = double_scratch0();
1637
+ Label not_heap_number;
1638
+ __ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
1639
+ __ b(ne, &not_heap_number);
1640
+ __ vldr(dbl_scratch, FieldMemOperand(reg, HeapNumber::kValueOffset));
1641
+ __ VFPCompareAndSetFlags(dbl_scratch, 0.0);
1642
+ __ b(vs, false_label); // NaN -> false.
1643
+ __ b(eq, false_label); // +0, -0 -> false.
1644
+ __ b(true_label);
1645
+ __ bind(&not_heap_number);
1646
+ }
1647
+
1648
+ // We've seen something for the first time -> deopt.
1649
+ DeoptimizeIf(al, instr->environment());
1515
1650
  }
1516
1651
  }
1517
1652
  }
1518
1653
 
1519
1654
 
1520
- void LCodeGen::EmitGoto(int block, LDeferredCode* deferred_stack_check) {
1655
+ void LCodeGen::EmitGoto(int block) {
1521
1656
  block = chunk_->LookupDestination(block);
1522
1657
  int next_block = GetNextEmittedBlock(current_block_);
1523
1658
  if (block != next_block) {
1524
- // Perform stack overflow check if this goto needs it before jumping.
1525
- if (deferred_stack_check != NULL) {
1526
- __ LoadRoot(ip, Heap::kStackLimitRootIndex);
1527
- __ cmp(sp, Operand(ip));
1528
- __ b(hs, chunk_->GetAssemblyLabel(block));
1529
- __ jmp(deferred_stack_check->entry());
1530
- deferred_stack_check->SetExit(chunk_->GetAssemblyLabel(block));
1531
- } else {
1532
- __ jmp(chunk_->GetAssemblyLabel(block));
1533
- }
1659
+ __ jmp(chunk_->GetAssemblyLabel(block));
1534
1660
  }
1535
1661
  }
1536
1662
 
1537
1663
 
1538
- void LCodeGen::DoDeferredStackCheck(LGoto* instr) {
1539
- PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
1540
- CallRuntimeFromDeferred(Runtime::kStackGuard, 0, instr);
1541
- }
1542
-
1543
-
1544
1664
  void LCodeGen::DoGoto(LGoto* instr) {
1545
- class DeferredStackCheck: public LDeferredCode {
1546
- public:
1547
- DeferredStackCheck(LCodeGen* codegen, LGoto* instr)
1548
- : LDeferredCode(codegen), instr_(instr) { }
1549
- virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); }
1550
- private:
1551
- LGoto* instr_;
1552
- };
1553
-
1554
- DeferredStackCheck* deferred = NULL;
1555
- if (instr->include_stack_check()) {
1556
- deferred = new DeferredStackCheck(this, instr);
1557
- }
1558
- EmitGoto(instr->block_id(), deferred);
1665
+ EmitGoto(instr->block_id());
1559
1666
  }
1560
1667
 
1561
1668
 
@@ -1592,34 +1699,6 @@ void LCodeGen::EmitCmpI(LOperand* left, LOperand* right) {
1592
1699
  }
1593
1700
 
1594
1701
 
1595
- void LCodeGen::DoCmpID(LCmpID* instr) {
1596
- LOperand* left = instr->InputAt(0);
1597
- LOperand* right = instr->InputAt(1);
1598
- LOperand* result = instr->result();
1599
- Register scratch = scratch0();
1600
-
1601
- Label unordered, done;
1602
- if (instr->is_double()) {
1603
- // Compare left and right as doubles and load the
1604
- // resulting flags into the normal status register.
1605
- __ VFPCompareAndSetFlags(ToDoubleRegister(left), ToDoubleRegister(right));
1606
- // If a NaN is involved, i.e. the result is unordered (V set),
1607
- // jump to unordered to return false.
1608
- __ b(vs, &unordered);
1609
- } else {
1610
- EmitCmpI(left, right);
1611
- }
1612
-
1613
- Condition cc = TokenToCondition(instr->op(), instr->is_double());
1614
- __ LoadRoot(ToRegister(result), Heap::kTrueValueRootIndex);
1615
- __ b(cc, &done);
1616
-
1617
- __ bind(&unordered);
1618
- __ LoadRoot(ToRegister(result), Heap::kFalseValueRootIndex);
1619
- __ bind(&done);
1620
- }
1621
-
1622
-
1623
1702
  void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) {
1624
1703
  LOperand* left = instr->InputAt(0);
1625
1704
  LOperand* right = instr->InputAt(1);
@@ -1642,18 +1721,7 @@ void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) {
1642
1721
  }
1643
1722
 
1644
1723
 
1645
- void LCodeGen::DoCmpJSObjectEq(LCmpJSObjectEq* instr) {
1646
- Register left = ToRegister(instr->InputAt(0));
1647
- Register right = ToRegister(instr->InputAt(1));
1648
- Register result = ToRegister(instr->result());
1649
-
1650
- __ cmp(left, Operand(right));
1651
- __ LoadRoot(result, Heap::kTrueValueRootIndex, eq);
1652
- __ LoadRoot(result, Heap::kFalseValueRootIndex, ne);
1653
- }
1654
-
1655
-
1656
- void LCodeGen::DoCmpJSObjectEqAndBranch(LCmpJSObjectEqAndBranch* instr) {
1724
+ void LCodeGen::DoCmpObjectEqAndBranch(LCmpObjectEqAndBranch* instr) {
1657
1725
  Register left = ToRegister(instr->InputAt(0));
1658
1726
  Register right = ToRegister(instr->InputAt(1));
1659
1727
  int false_block = chunk_->LookupDestination(instr->false_block_id());
@@ -1664,62 +1732,16 @@ void LCodeGen::DoCmpJSObjectEqAndBranch(LCmpJSObjectEqAndBranch* instr) {
1664
1732
  }
1665
1733
 
1666
1734
 
1667
- void LCodeGen::DoCmpSymbolEq(LCmpSymbolEq* instr) {
1668
- Register left = ToRegister(instr->InputAt(0));
1669
- Register right = ToRegister(instr->InputAt(1));
1670
- Register result = ToRegister(instr->result());
1671
-
1672
- __ cmp(left, Operand(right));
1673
- __ LoadRoot(result, Heap::kTrueValueRootIndex, eq);
1674
- __ LoadRoot(result, Heap::kFalseValueRootIndex, ne);
1675
- }
1676
-
1677
-
1678
- void LCodeGen::DoCmpSymbolEqAndBranch(LCmpSymbolEqAndBranch* instr) {
1735
+ void LCodeGen::DoCmpConstantEqAndBranch(LCmpConstantEqAndBranch* instr) {
1679
1736
  Register left = ToRegister(instr->InputAt(0));
1680
- Register right = ToRegister(instr->InputAt(1));
1681
- int false_block = chunk_->LookupDestination(instr->false_block_id());
1682
1737
  int true_block = chunk_->LookupDestination(instr->true_block_id());
1738
+ int false_block = chunk_->LookupDestination(instr->false_block_id());
1683
1739
 
1684
- __ cmp(left, Operand(right));
1740
+ __ cmp(left, Operand(instr->hydrogen()->right()));
1685
1741
  EmitBranch(true_block, false_block, eq);
1686
1742
  }
1687
1743
 
1688
1744
 
1689
- void LCodeGen::DoIsNull(LIsNull* instr) {
1690
- Register reg = ToRegister(instr->InputAt(0));
1691
- Register result = ToRegister(instr->result());
1692
-
1693
- __ LoadRoot(ip, Heap::kNullValueRootIndex);
1694
- __ cmp(reg, ip);
1695
- if (instr->is_strict()) {
1696
- __ LoadRoot(result, Heap::kTrueValueRootIndex, eq);
1697
- __ LoadRoot(result, Heap::kFalseValueRootIndex, ne);
1698
- } else {
1699
- Label true_value, false_value, done;
1700
- __ b(eq, &true_value);
1701
- __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1702
- __ cmp(ip, reg);
1703
- __ b(eq, &true_value);
1704
- __ tst(reg, Operand(kSmiTagMask));
1705
- __ b(eq, &false_value);
1706
- // Check for undetectable objects by looking in the bit field in
1707
- // the map. The object has already been smi checked.
1708
- Register scratch = result;
1709
- __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
1710
- __ ldrb(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset));
1711
- __ tst(scratch, Operand(1 << Map::kIsUndetectable));
1712
- __ b(ne, &true_value);
1713
- __ bind(&false_value);
1714
- __ LoadRoot(result, Heap::kFalseValueRootIndex);
1715
- __ jmp(&done);
1716
- __ bind(&true_value);
1717
- __ LoadRoot(result, Heap::kTrueValueRootIndex);
1718
- __ bind(&done);
1719
- }
1720
- }
1721
-
1722
-
1723
1745
  void LCodeGen::DoIsNullAndBranch(LIsNullAndBranch* instr) {
1724
1746
  Register scratch = scratch0();
1725
1747
  Register reg = ToRegister(instr->InputAt(0));
@@ -1741,8 +1763,7 @@ void LCodeGen::DoIsNullAndBranch(LIsNullAndBranch* instr) {
1741
1763
  __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1742
1764
  __ cmp(reg, ip);
1743
1765
  __ b(eq, true_label);
1744
- __ tst(reg, Operand(kSmiTagMask));
1745
- __ b(eq, false_label);
1766
+ __ JumpIfSmi(reg, false_label);
1746
1767
  // Check for undetectable objects by looking in the bit field in
1747
1768
  // the map. The object has already been smi checked.
1748
1769
  __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
@@ -1755,13 +1776,13 @@ void LCodeGen::DoIsNullAndBranch(LIsNullAndBranch* instr) {
1755
1776
 
1756
1777
  Condition LCodeGen::EmitIsObject(Register input,
1757
1778
  Register temp1,
1758
- Register temp2,
1759
1779
  Label* is_not_object,
1760
1780
  Label* is_object) {
1781
+ Register temp2 = scratch0();
1761
1782
  __ JumpIfSmi(input, is_not_object);
1762
1783
 
1763
- __ LoadRoot(temp1, Heap::kNullValueRootIndex);
1764
- __ cmp(input, temp1);
1784
+ __ LoadRoot(temp2, Heap::kNullValueRootIndex);
1785
+ __ cmp(input, temp2);
1765
1786
  __ b(eq, is_object);
1766
1787
 
1767
1788
  // Load map.
@@ -1773,37 +1794,16 @@ Condition LCodeGen::EmitIsObject(Register input,
1773
1794
 
1774
1795
  // Load instance type and check that it is in object type range.
1775
1796
  __ ldrb(temp2, FieldMemOperand(temp1, Map::kInstanceTypeOffset));
1776
- __ cmp(temp2, Operand(FIRST_JS_OBJECT_TYPE));
1797
+ __ cmp(temp2, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
1777
1798
  __ b(lt, is_not_object);
1778
- __ cmp(temp2, Operand(LAST_JS_OBJECT_TYPE));
1799
+ __ cmp(temp2, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
1779
1800
  return le;
1780
1801
  }
1781
1802
 
1782
1803
 
1783
- void LCodeGen::DoIsObject(LIsObject* instr) {
1784
- Register reg = ToRegister(instr->InputAt(0));
1785
- Register result = ToRegister(instr->result());
1786
- Register temp = scratch0();
1787
- Label is_false, is_true, done;
1788
-
1789
- Condition true_cond = EmitIsObject(reg, result, temp, &is_false, &is_true);
1790
- __ b(true_cond, &is_true);
1791
-
1792
- __ bind(&is_false);
1793
- __ LoadRoot(result, Heap::kFalseValueRootIndex);
1794
- __ b(&done);
1795
-
1796
- __ bind(&is_true);
1797
- __ LoadRoot(result, Heap::kTrueValueRootIndex);
1798
-
1799
- __ bind(&done);
1800
- }
1801
-
1802
-
1803
1804
  void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) {
1804
1805
  Register reg = ToRegister(instr->InputAt(0));
1805
1806
  Register temp1 = ToRegister(instr->TempAt(0));
1806
- Register temp2 = scratch0();
1807
1807
 
1808
1808
  int true_block = chunk_->LookupDestination(instr->true_block_id());
1809
1809
  int false_block = chunk_->LookupDestination(instr->false_block_id());
@@ -1811,25 +1811,12 @@ void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) {
1811
1811
  Label* false_label = chunk_->GetAssemblyLabel(false_block);
1812
1812
 
1813
1813
  Condition true_cond =
1814
- EmitIsObject(reg, temp1, temp2, false_label, true_label);
1814
+ EmitIsObject(reg, temp1, false_label, true_label);
1815
1815
 
1816
1816
  EmitBranch(true_block, false_block, true_cond);
1817
1817
  }
1818
1818
 
1819
1819
 
1820
- void LCodeGen::DoIsSmi(LIsSmi* instr) {
1821
- ASSERT(instr->hydrogen()->value()->representation().IsTagged());
1822
- Register result = ToRegister(instr->result());
1823
- Register input_reg = EmitLoadRegister(instr->InputAt(0), ip);
1824
- __ tst(input_reg, Operand(kSmiTagMask));
1825
- __ LoadRoot(result, Heap::kTrueValueRootIndex);
1826
- Label done;
1827
- __ b(eq, &done);
1828
- __ LoadRoot(result, Heap::kFalseValueRootIndex);
1829
- __ bind(&done);
1830
- }
1831
-
1832
-
1833
1820
  void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
1834
1821
  int true_block = chunk_->LookupDestination(instr->true_block_id());
1835
1822
  int false_block = chunk_->LookupDestination(instr->false_block_id());
@@ -1840,25 +1827,6 @@ void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
1840
1827
  }
1841
1828
 
1842
1829
 
1843
- void LCodeGen::DoIsUndetectable(LIsUndetectable* instr) {
1844
- Register input = ToRegister(instr->InputAt(0));
1845
- Register result = ToRegister(instr->result());
1846
-
1847
- ASSERT(instr->hydrogen()->value()->representation().IsTagged());
1848
- Label false_label, done;
1849
- __ JumpIfSmi(input, &false_label);
1850
- __ ldr(result, FieldMemOperand(input, HeapObject::kMapOffset));
1851
- __ ldrb(result, FieldMemOperand(result, Map::kBitFieldOffset));
1852
- __ tst(result, Operand(1 << Map::kIsUndetectable));
1853
- __ b(eq, &false_label);
1854
- __ LoadRoot(result, Heap::kTrueValueRootIndex);
1855
- __ jmp(&done);
1856
- __ bind(&false_label);
1857
- __ LoadRoot(result, Heap::kFalseValueRootIndex);
1858
- __ bind(&done);
1859
- }
1860
-
1861
-
1862
1830
  void LCodeGen::DoIsUndetectableAndBranch(LIsUndetectableAndBranch* instr) {
1863
1831
  Register input = ToRegister(instr->InputAt(0));
1864
1832
  Register temp = ToRegister(instr->TempAt(0));
@@ -1874,7 +1842,7 @@ void LCodeGen::DoIsUndetectableAndBranch(LIsUndetectableAndBranch* instr) {
1874
1842
  }
1875
1843
 
1876
1844
 
1877
- static InstanceType TestType(HHasInstanceType* instr) {
1845
+ static InstanceType TestType(HHasInstanceTypeAndBranch* instr) {
1878
1846
  InstanceType from = instr->from();
1879
1847
  InstanceType to = instr->to();
1880
1848
  if (from == FIRST_TYPE) return to;
@@ -1883,7 +1851,7 @@ static InstanceType TestType(HHasInstanceType* instr) {
1883
1851
  }
1884
1852
 
1885
1853
 
1886
- static Condition BranchCondition(HHasInstanceType* instr) {
1854
+ static Condition BranchCondition(HHasInstanceTypeAndBranch* instr) {
1887
1855
  InstanceType from = instr->from();
1888
1856
  InstanceType to = instr->to();
1889
1857
  if (from == to) return eq;
@@ -1894,23 +1862,6 @@ static Condition BranchCondition(HHasInstanceType* instr) {
1894
1862
  }
1895
1863
 
1896
1864
 
1897
- void LCodeGen::DoHasInstanceType(LHasInstanceType* instr) {
1898
- Register input = ToRegister(instr->InputAt(0));
1899
- Register result = ToRegister(instr->result());
1900
-
1901
- ASSERT(instr->hydrogen()->value()->representation().IsTagged());
1902
- Label done;
1903
- __ tst(input, Operand(kSmiTagMask));
1904
- __ LoadRoot(result, Heap::kFalseValueRootIndex, eq);
1905
- __ b(eq, &done);
1906
- __ CompareObjectType(input, result, result, TestType(instr->hydrogen()));
1907
- Condition cond = BranchCondition(instr->hydrogen());
1908
- __ LoadRoot(result, Heap::kTrueValueRootIndex, cond);
1909
- __ LoadRoot(result, Heap::kFalseValueRootIndex, NegateCondition(cond));
1910
- __ bind(&done);
1911
- }
1912
-
1913
-
1914
1865
  void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
1915
1866
  Register scratch = scratch0();
1916
1867
  Register input = ToRegister(instr->InputAt(0));
@@ -1920,8 +1871,7 @@ void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
1920
1871
 
1921
1872
  Label* false_label = chunk_->GetAssemblyLabel(false_block);
1922
1873
 
1923
- __ tst(input, Operand(kSmiTagMask));
1924
- __ b(eq, false_label);
1874
+ __ JumpIfSmi(input, false_label);
1925
1875
 
1926
1876
  __ CompareObjectType(input, scratch, scratch, TestType(instr->hydrogen()));
1927
1877
  EmitBranch(true_block, false_block, BranchCondition(instr->hydrogen()));
@@ -1941,20 +1891,6 @@ void LCodeGen::DoGetCachedArrayIndex(LGetCachedArrayIndex* instr) {
1941
1891
  }
1942
1892
 
1943
1893
 
1944
- void LCodeGen::DoHasCachedArrayIndex(LHasCachedArrayIndex* instr) {
1945
- Register input = ToRegister(instr->InputAt(0));
1946
- Register result = ToRegister(instr->result());
1947
- Register scratch = scratch0();
1948
-
1949
- ASSERT(instr->hydrogen()->value()->representation().IsTagged());
1950
- __ ldr(scratch,
1951
- FieldMemOperand(input, String::kHashFieldOffset));
1952
- __ tst(scratch, Operand(String::kContainsCachedArrayIndexMask));
1953
- __ LoadRoot(result, Heap::kTrueValueRootIndex, eq);
1954
- __ LoadRoot(result, Heap::kFalseValueRootIndex, ne);
1955
- }
1956
-
1957
-
1958
1894
  void LCodeGen::DoHasCachedArrayIndexAndBranch(
1959
1895
  LHasCachedArrayIndexAndBranch* instr) {
1960
1896
  Register input = ToRegister(instr->InputAt(0));
@@ -1980,28 +1916,28 @@ void LCodeGen::EmitClassOfTest(Label* is_true,
1980
1916
  Register temp2) {
1981
1917
  ASSERT(!input.is(temp));
1982
1918
  ASSERT(!temp.is(temp2)); // But input and temp2 may be the same register.
1983
- __ tst(input, Operand(kSmiTagMask));
1984
- __ b(eq, is_false);
1985
- __ CompareObjectType(input, temp, temp2, FIRST_JS_OBJECT_TYPE);
1919
+ __ JumpIfSmi(input, is_false);
1920
+ __ CompareObjectType(input, temp, temp2, FIRST_SPEC_OBJECT_TYPE);
1986
1921
  __ b(lt, is_false);
1987
1922
 
1988
1923
  // Map is now in temp.
1989
1924
  // Functions have class 'Function'.
1990
- __ CompareInstanceType(temp, temp2, JS_FUNCTION_TYPE);
1925
+ __ CompareInstanceType(temp, temp2, FIRST_CALLABLE_SPEC_OBJECT_TYPE);
1991
1926
  if (class_name->IsEqualTo(CStrVector("Function"))) {
1992
- __ b(eq, is_true);
1927
+ __ b(ge, is_true);
1993
1928
  } else {
1994
- __ b(eq, is_false);
1929
+ __ b(ge, is_false);
1995
1930
  }
1996
1931
 
1997
1932
  // Check if the constructor in the map is a function.
1998
1933
  __ ldr(temp, FieldMemOperand(temp, Map::kConstructorOffset));
1999
1934
 
2000
- // As long as JS_FUNCTION_TYPE is the last instance type and it is
2001
- // right after LAST_JS_OBJECT_TYPE, we can avoid checking for
2002
- // LAST_JS_OBJECT_TYPE.
2003
- ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
2004
- ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
1935
+ // As long as LAST_CALLABLE_SPEC_OBJECT_TYPE is the last instance type and
1936
+ // FIRST_CALLABLE_SPEC_OBJECT_TYPE comes right after
1937
+ // LAST_NONCALLABLE_SPEC_OBJECT_TYPE, we can avoid checking for the latter.
1938
+ STATIC_ASSERT(LAST_TYPE == LAST_CALLABLE_SPEC_OBJECT_TYPE);
1939
+ STATIC_ASSERT(FIRST_CALLABLE_SPEC_OBJECT_TYPE ==
1940
+ LAST_NONCALLABLE_SPEC_OBJECT_TYPE + 1);
2005
1941
 
2006
1942
  // Objects with a non-function constructor have class 'Object'.
2007
1943
  __ CompareObjectType(temp, temp2, temp2, JS_FUNCTION_TYPE);
@@ -2027,27 +1963,6 @@ void LCodeGen::EmitClassOfTest(Label* is_true,
2027
1963
  }
2028
1964
 
2029
1965
 
2030
- void LCodeGen::DoClassOfTest(LClassOfTest* instr) {
2031
- Register input = ToRegister(instr->InputAt(0));
2032
- Register result = ToRegister(instr->result());
2033
- ASSERT(input.is(result));
2034
- Handle<String> class_name = instr->hydrogen()->class_name();
2035
-
2036
- Label done, is_true, is_false;
2037
-
2038
- EmitClassOfTest(&is_true, &is_false, class_name, input, scratch0(), input);
2039
- __ b(ne, &is_false);
2040
-
2041
- __ bind(&is_true);
2042
- __ LoadRoot(result, Heap::kTrueValueRootIndex);
2043
- __ jmp(&done);
2044
-
2045
- __ bind(&is_false);
2046
- __ LoadRoot(result, Heap::kFalseValueRootIndex);
2047
- __ bind(&done);
2048
- }
2049
-
2050
-
2051
1966
  void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
2052
1967
  Register input = ToRegister(instr->InputAt(0));
2053
1968
  Register temp = scratch0();
@@ -2091,20 +2006,6 @@ void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
2091
2006
  }
2092
2007
 
2093
2008
 
2094
- void LCodeGen::DoInstanceOfAndBranch(LInstanceOfAndBranch* instr) {
2095
- ASSERT(ToRegister(instr->InputAt(0)).is(r0)); // Object is in r0.
2096
- ASSERT(ToRegister(instr->InputAt(1)).is(r1)); // Function is in r1.
2097
-
2098
- int true_block = chunk_->LookupDestination(instr->true_block_id());
2099
- int false_block = chunk_->LookupDestination(instr->false_block_id());
2100
-
2101
- InstanceofStub stub(InstanceofStub::kArgsInRegisters);
2102
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2103
- __ cmp(r0, Operand(0));
2104
- EmitBranch(true_block, false_block, eq);
2105
- }
2106
-
2107
-
2108
2009
  void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
2109
2010
  class DeferredInstanceOfKnownGlobal: public LDeferredCode {
2110
2011
  public:
@@ -2259,25 +2160,6 @@ void LCodeGen::DoCmpT(LCmpT* instr) {
2259
2160
  }
2260
2161
 
2261
2162
 
2262
- void LCodeGen::DoCmpTAndBranch(LCmpTAndBranch* instr) {
2263
- Token::Value op = instr->op();
2264
- int true_block = chunk_->LookupDestination(instr->true_block_id());
2265
- int false_block = chunk_->LookupDestination(instr->false_block_id());
2266
-
2267
- Handle<Code> ic = CompareIC::GetUninitialized(op);
2268
- CallCode(ic, RelocInfo::CODE_TARGET, instr);
2269
-
2270
- // The compare stub expects compare condition and the input operands
2271
- // reversed for GT and LTE.
2272
- Condition condition = ComputeCompareCondition(op);
2273
- if (op == Token::GT || op == Token::LTE) {
2274
- condition = ReverseCondition(condition);
2275
- }
2276
- __ cmp(r0, Operand(0));
2277
- EmitBranch(true_block, false_block, condition);
2278
- }
2279
-
2280
-
2281
2163
  void LCodeGen::DoReturn(LReturn* instr) {
2282
2164
  if (FLAG_trace) {
2283
2165
  // Push the return value on the stack as the parameter.
@@ -2516,7 +2398,7 @@ void LCodeGen::DoLoadElements(LLoadElements* instr) {
2516
2398
 
2517
2399
  __ ldr(result, FieldMemOperand(input, JSObject::kElementsOffset));
2518
2400
  if (FLAG_debug_code) {
2519
- Label done;
2401
+ Label done, fail;
2520
2402
  __ ldr(scratch, FieldMemOperand(result, HeapObject::kMapOffset));
2521
2403
  __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
2522
2404
  __ cmp(scratch, ip);
@@ -2524,11 +2406,18 @@ void LCodeGen::DoLoadElements(LLoadElements* instr) {
2524
2406
  __ LoadRoot(ip, Heap::kFixedCOWArrayMapRootIndex);
2525
2407
  __ cmp(scratch, ip);
2526
2408
  __ b(eq, &done);
2527
- __ ldr(scratch, FieldMemOperand(result, HeapObject::kMapOffset));
2528
- __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
2529
- __ sub(scratch, scratch, Operand(FIRST_EXTERNAL_ARRAY_TYPE));
2530
- __ cmp(scratch, Operand(kExternalArrayTypeCount));
2531
- __ Check(cc, "Check for fast elements failed.");
2409
+ // |scratch| still contains |input|'s map.
2410
+ __ ldr(scratch, FieldMemOperand(scratch, Map::kBitField2Offset));
2411
+ __ ubfx(scratch, scratch, Map::kElementsKindShift,
2412
+ Map::kElementsKindBitCount);
2413
+ __ cmp(scratch, Operand(JSObject::FAST_ELEMENTS));
2414
+ __ b(eq, &done);
2415
+ __ cmp(scratch, Operand(JSObject::FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND));
2416
+ __ b(lt, &fail);
2417
+ __ cmp(scratch, Operand(JSObject::LAST_EXTERNAL_ARRAY_ELEMENTS_KIND));
2418
+ __ b(le, &done);
2419
+ __ bind(&fail);
2420
+ __ Abort("Check for fast or external elements failed.");
2532
2421
  __ bind(&done);
2533
2422
  }
2534
2423
  }
@@ -2566,7 +2455,6 @@ void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
2566
2455
  Register key = EmitLoadRegister(instr->key(), scratch0());
2567
2456
  Register result = ToRegister(instr->result());
2568
2457
  Register scratch = scratch0();
2569
- ASSERT(result.is(elements));
2570
2458
 
2571
2459
  // Load the result.
2572
2460
  __ add(scratch, elements, Operand(key, LSL, kPointerSizeLog2));
@@ -2581,11 +2469,53 @@ void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
2581
2469
  }
2582
2470
 
2583
2471
 
2472
+ void LCodeGen::DoLoadKeyedFastDoubleElement(
2473
+ LLoadKeyedFastDoubleElement* instr) {
2474
+ Register elements = ToRegister(instr->elements());
2475
+ bool key_is_constant = instr->key()->IsConstantOperand();
2476
+ Register key = no_reg;
2477
+ DwVfpRegister result = ToDoubleRegister(instr->result());
2478
+ Register scratch = scratch0();
2479
+
2480
+ int shift_size =
2481
+ ElementsKindToShiftSize(JSObject::FAST_DOUBLE_ELEMENTS);
2482
+ int constant_key = 0;
2483
+ if (key_is_constant) {
2484
+ constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
2485
+ if (constant_key & 0xF0000000) {
2486
+ Abort("array index constant value too big.");
2487
+ }
2488
+ } else {
2489
+ key = ToRegister(instr->key());
2490
+ }
2491
+
2492
+ Operand operand = key_is_constant
2493
+ ? Operand(constant_key * (1 << shift_size) +
2494
+ FixedDoubleArray::kHeaderSize - kHeapObjectTag)
2495
+ : Operand(key, LSL, shift_size);
2496
+ __ add(elements, elements, operand);
2497
+ if (!key_is_constant) {
2498
+ __ add(elements, elements,
2499
+ Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag));
2500
+ }
2501
+
2502
+ if (instr->hydrogen()->RequiresHoleCheck()) {
2503
+ // TODO(danno): If no hole check is required, there is no need to allocate
2504
+ // elements into a temporary register, instead scratch can be used.
2505
+ __ ldr(scratch, MemOperand(elements, sizeof(kHoleNanLower32)));
2506
+ __ cmp(scratch, Operand(kHoleNanUpper32));
2507
+ DeoptimizeIf(eq, instr->environment());
2508
+ }
2509
+
2510
+ __ vldr(result, elements, 0);
2511
+ }
2512
+
2513
+
2584
2514
  void LCodeGen::DoLoadKeyedSpecializedArrayElement(
2585
2515
  LLoadKeyedSpecializedArrayElement* instr) {
2586
2516
  Register external_pointer = ToRegister(instr->external_pointer());
2587
2517
  Register key = no_reg;
2588
- ExternalArrayType array_type = instr->array_type();
2518
+ JSObject::ElementsKind elements_kind = instr->elements_kind();
2589
2519
  bool key_is_constant = instr->key()->IsConstantOperand();
2590
2520
  int constant_key = 0;
2591
2521
  if (key_is_constant) {
@@ -2596,43 +2526,45 @@ void LCodeGen::DoLoadKeyedSpecializedArrayElement(
2596
2526
  } else {
2597
2527
  key = ToRegister(instr->key());
2598
2528
  }
2599
- int shift_size = ExternalArrayTypeToShiftSize(array_type);
2529
+ int shift_size = ElementsKindToShiftSize(elements_kind);
2600
2530
 
2601
- if (array_type == kExternalFloatArray || array_type == kExternalDoubleArray) {
2531
+ if (elements_kind == JSObject::EXTERNAL_FLOAT_ELEMENTS ||
2532
+ elements_kind == JSObject::EXTERNAL_DOUBLE_ELEMENTS) {
2602
2533
  CpuFeatures::Scope scope(VFP3);
2603
- DwVfpRegister result(ToDoubleRegister(instr->result()));
2604
- Operand operand(key_is_constant ? Operand(constant_key * (1 << shift_size))
2605
- : Operand(key, LSL, shift_size));
2534
+ DwVfpRegister result = ToDoubleRegister(instr->result());
2535
+ Operand operand = key_is_constant
2536
+ ? Operand(constant_key * (1 << shift_size))
2537
+ : Operand(key, LSL, shift_size);
2606
2538
  __ add(scratch0(), external_pointer, operand);
2607
- if (array_type == kExternalFloatArray) {
2539
+ if (elements_kind == JSObject::EXTERNAL_FLOAT_ELEMENTS) {
2608
2540
  __ vldr(result.low(), scratch0(), 0);
2609
2541
  __ vcvt_f64_f32(result, result.low());
2610
- } else { // i.e. array_type == kExternalDoubleArray
2542
+ } else { // i.e. elements_kind == JSObject::EXTERNAL_DOUBLE_ELEMENTS
2611
2543
  __ vldr(result, scratch0(), 0);
2612
2544
  }
2613
2545
  } else {
2614
- Register result(ToRegister(instr->result()));
2546
+ Register result = ToRegister(instr->result());
2615
2547
  MemOperand mem_operand(key_is_constant
2616
2548
  ? MemOperand(external_pointer, constant_key * (1 << shift_size))
2617
2549
  : MemOperand(external_pointer, key, LSL, shift_size));
2618
- switch (array_type) {
2619
- case kExternalByteArray:
2550
+ switch (elements_kind) {
2551
+ case JSObject::EXTERNAL_BYTE_ELEMENTS:
2620
2552
  __ ldrsb(result, mem_operand);
2621
2553
  break;
2622
- case kExternalUnsignedByteArray:
2623
- case kExternalPixelArray:
2554
+ case JSObject::EXTERNAL_PIXEL_ELEMENTS:
2555
+ case JSObject::EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
2624
2556
  __ ldrb(result, mem_operand);
2625
2557
  break;
2626
- case kExternalShortArray:
2558
+ case JSObject::EXTERNAL_SHORT_ELEMENTS:
2627
2559
  __ ldrsh(result, mem_operand);
2628
2560
  break;
2629
- case kExternalUnsignedShortArray:
2561
+ case JSObject::EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
2630
2562
  __ ldrh(result, mem_operand);
2631
2563
  break;
2632
- case kExternalIntArray:
2564
+ case JSObject::EXTERNAL_INT_ELEMENTS:
2633
2565
  __ ldr(result, mem_operand);
2634
2566
  break;
2635
- case kExternalUnsignedIntArray:
2567
+ case JSObject::EXTERNAL_UNSIGNED_INT_ELEMENTS:
2636
2568
  __ ldr(result, mem_operand);
2637
2569
  __ cmp(result, Operand(0x80000000));
2638
2570
  // TODO(danno): we could be more clever here, perhaps having a special
@@ -2640,8 +2572,12 @@ void LCodeGen::DoLoadKeyedSpecializedArrayElement(
2640
2572
  // happens, and generate code that returns a double rather than int.
2641
2573
  DeoptimizeIf(cs, instr->environment());
2642
2574
  break;
2643
- case kExternalFloatArray:
2644
- case kExternalDoubleArray:
2575
+ case JSObject::EXTERNAL_FLOAT_ELEMENTS:
2576
+ case JSObject::EXTERNAL_DOUBLE_ELEMENTS:
2577
+ case JSObject::FAST_DOUBLE_ELEMENTS:
2578
+ case JSObject::FAST_ELEMENTS:
2579
+ case JSObject::DICTIONARY_ELEMENTS:
2580
+ case JSObject::NON_STRICT_ARGUMENTS_ELEMENTS:
2645
2581
  UNREACHABLE();
2646
2582
  break;
2647
2583
  }
@@ -2707,9 +2643,26 @@ void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
2707
2643
  ASSERT(function.is(r1)); // Required by InvokeFunction.
2708
2644
  ASSERT(ToRegister(instr->result()).is(r0));
2709
2645
 
2710
- // If the receiver is null or undefined, we have to pass the global object
2711
- // as a receiver.
2646
+ // If the receiver is null or undefined, we have to pass the global
2647
+ // object as a receiver to normal functions. Values have to be
2648
+ // passed unchanged to builtins and strict-mode functions.
2712
2649
  Label global_object, receiver_ok;
2650
+
2651
+ // Do not transform the receiver to object for strict mode
2652
+ // functions.
2653
+ __ ldr(scratch,
2654
+ FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
2655
+ __ ldr(scratch,
2656
+ FieldMemOperand(scratch, SharedFunctionInfo::kCompilerHintsOffset));
2657
+ __ tst(scratch,
2658
+ Operand(1 << (SharedFunctionInfo::kStrictModeFunction + kSmiTagSize)));
2659
+ __ b(ne, &receiver_ok);
2660
+
2661
+ // Do not transform the receiver to object for builtins.
2662
+ __ tst(scratch, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
2663
+ __ b(ne, &receiver_ok);
2664
+
2665
+ // Normal function. Replace undefined or null with global receiver.
2713
2666
  __ LoadRoot(scratch, Heap::kNullValueRootIndex);
2714
2667
  __ cmp(receiver, scratch);
2715
2668
  __ b(eq, &global_object);
@@ -2720,12 +2673,14 @@ void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
2720
2673
  // Deoptimize if the receiver is not a JS object.
2721
2674
  __ tst(receiver, Operand(kSmiTagMask));
2722
2675
  DeoptimizeIf(eq, instr->environment());
2723
- __ CompareObjectType(receiver, scratch, scratch, FIRST_JS_OBJECT_TYPE);
2724
- DeoptimizeIf(lo, instr->environment());
2676
+ __ CompareObjectType(receiver, scratch, scratch, FIRST_SPEC_OBJECT_TYPE);
2677
+ DeoptimizeIf(lt, instr->environment());
2725
2678
  __ jmp(&receiver_ok);
2726
2679
 
2727
2680
  __ bind(&global_object);
2728
2681
  __ ldr(receiver, GlobalObjectOperand());
2682
+ __ ldr(receiver,
2683
+ FieldMemOperand(receiver, JSGlobalObject::kGlobalReceiverOffset));
2729
2684
  __ bind(&receiver_ok);
2730
2685
 
2731
2686
  // Copy the arguments to this function possibly from the
@@ -2765,7 +2720,8 @@ void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
2765
2720
  // The number of arguments is stored in receiver which is r0, as expected
2766
2721
  // by InvokeFunction.
2767
2722
  v8::internal::ParameterCount actual(receiver);
2768
- __ InvokeFunction(function, actual, CALL_FUNCTION, safepoint_generator);
2723
+ __ InvokeFunction(function, actual, CALL_FUNCTION,
2724
+ safepoint_generator, CALL_AS_METHOD);
2769
2725
  __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2770
2726
  }
2771
2727
 
@@ -2781,6 +2737,12 @@ void LCodeGen::DoPushArgument(LPushArgument* instr) {
2781
2737
  }
2782
2738
 
2783
2739
 
2740
+ void LCodeGen::DoThisFunction(LThisFunction* instr) {
2741
+ Register result = ToRegister(instr->result());
2742
+ __ ldr(result, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2743
+ }
2744
+
2745
+
2784
2746
  void LCodeGen::DoContext(LContext* instr) {
2785
2747
  Register result = ToRegister(instr->result());
2786
2748
  __ mov(result, cp);
@@ -2791,13 +2753,11 @@ void LCodeGen::DoOuterContext(LOuterContext* instr) {
2791
2753
  Register context = ToRegister(instr->context());
2792
2754
  Register result = ToRegister(instr->result());
2793
2755
  __ ldr(result,
2794
- MemOperand(context, Context::SlotOffset(Context::CLOSURE_INDEX)));
2795
- __ ldr(result, FieldMemOperand(result, JSFunction::kContextOffset));
2756
+ MemOperand(context, Context::SlotOffset(Context::PREVIOUS_INDEX)));
2796
2757
  }
2797
2758
 
2798
2759
 
2799
2760
  void LCodeGen::DoGlobalObject(LGlobalObject* instr) {
2800
- Register context = ToRegister(instr->context());
2801
2761
  Register result = ToRegister(instr->result());
2802
2762
  __ ldr(result, ContextOperand(cp, Context::GLOBAL_INDEX));
2803
2763
  }
@@ -2856,8 +2816,8 @@ void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
2856
2816
 
2857
2817
 
2858
2818
  void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) {
2859
- ASSERT(instr->InputAt(0)->Equals(instr->result()));
2860
2819
  Register input = ToRegister(instr->InputAt(0));
2820
+ Register result = ToRegister(instr->result());
2861
2821
  Register scratch = scratch0();
2862
2822
 
2863
2823
  // Deoptimize if not a heap number.
@@ -2871,10 +2831,10 @@ void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) {
2871
2831
  scratch = no_reg;
2872
2832
  __ ldr(exponent, FieldMemOperand(input, HeapNumber::kExponentOffset));
2873
2833
  // Check the sign of the argument. If the argument is positive, just
2874
- // return it. We do not need to patch the stack since |input| and
2875
- // |result| are the same register and |input| would be restored
2876
- // unchanged by popping safepoint registers.
2834
+ // return it.
2877
2835
  __ tst(exponent, Operand(HeapNumber::kSignMask));
2836
+ // Move the input to the result if necessary.
2837
+ __ Move(result, input);
2878
2838
  __ b(eq, &done);
2879
2839
 
2880
2840
  // Input is negative. Reverse its sign.
@@ -2914,7 +2874,7 @@ void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) {
2914
2874
  __ ldr(tmp2, FieldMemOperand(input, HeapNumber::kMantissaOffset));
2915
2875
  __ str(tmp2, FieldMemOperand(tmp1, HeapNumber::kMantissaOffset));
2916
2876
 
2917
- __ StoreToSafepointRegisterSlot(tmp1, input);
2877
+ __ StoreToSafepointRegisterSlot(tmp1, result);
2918
2878
  }
2919
2879
 
2920
2880
  __ bind(&done);
@@ -2923,11 +2883,13 @@ void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) {
2923
2883
 
2924
2884
  void LCodeGen::EmitIntegerMathAbs(LUnaryMathOperation* instr) {
2925
2885
  Register input = ToRegister(instr->InputAt(0));
2886
+ Register result = ToRegister(instr->result());
2926
2887
  __ cmp(input, Operand(0));
2888
+ __ Move(result, input, pl);
2927
2889
  // We can make rsb conditional because the previous cmp instruction
2928
2890
  // will clear the V (overflow) flag and rsb won't set this flag
2929
2891
  // if input is positive.
2930
- __ rsb(input, input, Operand(0), SetCC, mi);
2892
+ __ rsb(result, input, Operand(0), SetCC, mi);
2931
2893
  // Deoptimize on overflow.
2932
2894
  DeoptimizeIf(vs, instr->environment());
2933
2895
  }
@@ -2947,11 +2909,11 @@ void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
2947
2909
  LUnaryMathOperation* instr_;
2948
2910
  };
2949
2911
 
2950
- ASSERT(instr->InputAt(0)->Equals(instr->result()));
2951
2912
  Representation r = instr->hydrogen()->value()->representation();
2952
2913
  if (r.IsDouble()) {
2953
2914
  DwVfpRegister input = ToDoubleRegister(instr->InputAt(0));
2954
- __ vabs(input, input);
2915
+ DwVfpRegister result = ToDoubleRegister(instr->result());
2916
+ __ vabs(result, input);
2955
2917
  } else if (r.IsInteger32()) {
2956
2918
  EmitIntegerMathAbs(instr);
2957
2919
  } else {
@@ -3001,19 +2963,18 @@ void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
3001
2963
  void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
3002
2964
  DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
3003
2965
  Register result = ToRegister(instr->result());
3004
- Register scratch1 = result;
3005
- Register scratch2 = scratch0();
2966
+ Register scratch = scratch0();
3006
2967
  Label done, check_sign_on_zero;
3007
2968
 
3008
2969
  // Extract exponent bits.
3009
- __ vmov(scratch1, input.high());
3010
- __ ubfx(scratch2,
3011
- scratch1,
2970
+ __ vmov(result, input.high());
2971
+ __ ubfx(scratch,
2972
+ result,
3012
2973
  HeapNumber::kExponentShift,
3013
2974
  HeapNumber::kExponentBits);
3014
2975
 
3015
2976
  // If the number is in ]-0.5, +0.5[, the result is +/- 0.
3016
- __ cmp(scratch2, Operand(HeapNumber::kExponentBias - 2));
2977
+ __ cmp(scratch, Operand(HeapNumber::kExponentBias - 2));
3017
2978
  __ mov(result, Operand(0), LeaveCC, le);
3018
2979
  if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
3019
2980
  __ b(le, &check_sign_on_zero);
@@ -3023,19 +2984,19 @@ void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
3023
2984
 
3024
2985
  // The following conversion will not work with numbers
3025
2986
  // outside of ]-2^32, 2^32[.
3026
- __ cmp(scratch2, Operand(HeapNumber::kExponentBias + 32));
2987
+ __ cmp(scratch, Operand(HeapNumber::kExponentBias + 32));
3027
2988
  DeoptimizeIf(ge, instr->environment());
3028
2989
 
3029
2990
  // Save the original sign for later comparison.
3030
- __ and_(scratch2, scratch1, Operand(HeapNumber::kSignMask));
2991
+ __ and_(scratch, result, Operand(HeapNumber::kSignMask));
3031
2992
 
3032
- __ vmov(double_scratch0(), 0.5);
2993
+ __ Vmov(double_scratch0(), 0.5);
3033
2994
  __ vadd(input, input, double_scratch0());
3034
2995
 
3035
2996
  // Check sign of the result: if the sign changed, the input
3036
2997
  // value was in ]0.5, 0[ and the result should be -0.
3037
- __ vmov(scratch1, input.high());
3038
- __ eor(scratch1, scratch1, Operand(scratch2), SetCC);
2998
+ __ vmov(result, input.high());
2999
+ __ eor(result, result, Operand(scratch), SetCC);
3039
3000
  if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
3040
3001
  DeoptimizeIf(mi, instr->environment());
3041
3002
  } else {
@@ -3046,8 +3007,8 @@ void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
3046
3007
  __ EmitVFPTruncate(kRoundToMinusInf,
3047
3008
  double_scratch0().low(),
3048
3009
  input,
3049
- scratch1,
3050
- scratch2);
3010
+ result,
3011
+ scratch);
3051
3012
  DeoptimizeIf(ne, instr->environment());
3052
3013
  __ vmov(result, double_scratch0().low());
3053
3014
 
@@ -3056,8 +3017,8 @@ void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
3056
3017
  __ cmp(result, Operand(0));
3057
3018
  __ b(ne, &done);
3058
3019
  __ bind(&check_sign_on_zero);
3059
- __ vmov(scratch1, input.high());
3060
- __ tst(scratch1, Operand(HeapNumber::kSignMask));
3020
+ __ vmov(scratch, input.high());
3021
+ __ tst(scratch, Operand(HeapNumber::kSignMask));
3061
3022
  DeoptimizeIf(ne, instr->environment());
3062
3023
  }
3063
3024
  __ bind(&done);
@@ -3066,24 +3027,17 @@ void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
3066
3027
 
3067
3028
  void LCodeGen::DoMathSqrt(LUnaryMathOperation* instr) {
3068
3029
  DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
3069
- ASSERT(ToDoubleRegister(instr->result()).is(input));
3070
- __ vsqrt(input, input);
3030
+ DoubleRegister result = ToDoubleRegister(instr->result());
3031
+ __ vsqrt(result, input);
3071
3032
  }
3072
3033
 
3073
3034
 
3074
3035
  void LCodeGen::DoMathPowHalf(LUnaryMathOperation* instr) {
3075
3036
  DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
3076
- Register scratch = scratch0();
3077
- SwVfpRegister single_scratch = double_scratch0().low();
3078
- DoubleRegister double_scratch = double_scratch0();
3079
- ASSERT(ToDoubleRegister(instr->result()).is(input));
3080
-
3037
+ DoubleRegister result = ToDoubleRegister(instr->result());
3081
3038
  // Add +0 to convert -0 to +0.
3082
- __ mov(scratch, Operand(0));
3083
- __ vmov(single_scratch, scratch);
3084
- __ vcvt_f64_s32(double_scratch, single_scratch);
3085
- __ vadd(input, input, double_scratch);
3086
- __ vsqrt(input, input);
3039
+ __ vadd(result, input, kDoubleRegZero);
3040
+ __ vsqrt(result, result);
3087
3041
  }
3088
3042
 
3089
3043
 
@@ -3213,7 +3167,7 @@ void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) {
3213
3167
  RegisterEnvironmentForDeoptimization(env);
3214
3168
  SafepointGenerator generator(this, pointers, env->deoptimization_index());
3215
3169
  ParameterCount count(instr->arity());
3216
- __ InvokeFunction(r1, count, CALL_FUNCTION, generator);
3170
+ __ InvokeFunction(r1, count, CALL_FUNCTION, generator, CALL_AS_METHOD);
3217
3171
  __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3218
3172
  }
3219
3173
 
@@ -3366,12 +3320,54 @@ void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
3366
3320
  }
3367
3321
 
3368
3322
 
3323
+ void LCodeGen::DoStoreKeyedFastDoubleElement(
3324
+ LStoreKeyedFastDoubleElement* instr) {
3325
+ DwVfpRegister value = ToDoubleRegister(instr->value());
3326
+ Register elements = ToRegister(instr->elements());
3327
+ Register key = no_reg;
3328
+ Register scratch = scratch0();
3329
+ bool key_is_constant = instr->key()->IsConstantOperand();
3330
+ int constant_key = 0;
3331
+ Label not_nan;
3332
+
3333
+ // Calculate the effective address of the slot in the array to store the
3334
+ // double value.
3335
+ if (key_is_constant) {
3336
+ constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
3337
+ if (constant_key & 0xF0000000) {
3338
+ Abort("array index constant value too big.");
3339
+ }
3340
+ } else {
3341
+ key = ToRegister(instr->key());
3342
+ }
3343
+ int shift_size = ElementsKindToShiftSize(JSObject::FAST_DOUBLE_ELEMENTS);
3344
+ Operand operand = key_is_constant
3345
+ ? Operand(constant_key * (1 << shift_size) +
3346
+ FixedDoubleArray::kHeaderSize - kHeapObjectTag)
3347
+ : Operand(key, LSL, shift_size);
3348
+ __ add(scratch, elements, operand);
3349
+ if (!key_is_constant) {
3350
+ __ add(scratch, scratch,
3351
+ Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag));
3352
+ }
3353
+
3354
+ // Check for NaN. All NaNs must be canonicalized.
3355
+ __ VFPCompareAndSetFlags(value, value);
3356
+
3357
+ // Only load canonical NaN if the comparison above set the overflow.
3358
+ __ Vmov(value, FixedDoubleArray::canonical_not_the_hole_nan_as_double(), vs);
3359
+
3360
+ __ bind(&not_nan);
3361
+ __ vstr(value, scratch, 0);
3362
+ }
3363
+
3364
+
3369
3365
  void LCodeGen::DoStoreKeyedSpecializedArrayElement(
3370
3366
  LStoreKeyedSpecializedArrayElement* instr) {
3371
3367
 
3372
3368
  Register external_pointer = ToRegister(instr->external_pointer());
3373
3369
  Register key = no_reg;
3374
- ExternalArrayType array_type = instr->array_type();
3370
+ JSObject::ElementsKind elements_kind = instr->elements_kind();
3375
3371
  bool key_is_constant = instr->key()->IsConstantOperand();
3376
3372
  int constant_key = 0;
3377
3373
  if (key_is_constant) {
@@ -3382,18 +3378,19 @@ void LCodeGen::DoStoreKeyedSpecializedArrayElement(
3382
3378
  } else {
3383
3379
  key = ToRegister(instr->key());
3384
3380
  }
3385
- int shift_size = ExternalArrayTypeToShiftSize(array_type);
3381
+ int shift_size = ElementsKindToShiftSize(elements_kind);
3386
3382
 
3387
- if (array_type == kExternalFloatArray || array_type == kExternalDoubleArray) {
3383
+ if (elements_kind == JSObject::EXTERNAL_FLOAT_ELEMENTS ||
3384
+ elements_kind == JSObject::EXTERNAL_DOUBLE_ELEMENTS) {
3388
3385
  CpuFeatures::Scope scope(VFP3);
3389
3386
  DwVfpRegister value(ToDoubleRegister(instr->value()));
3390
3387
  Operand operand(key_is_constant ? Operand(constant_key * (1 << shift_size))
3391
3388
  : Operand(key, LSL, shift_size));
3392
3389
  __ add(scratch0(), external_pointer, operand);
3393
- if (array_type == kExternalFloatArray) {
3390
+ if (elements_kind == JSObject::EXTERNAL_FLOAT_ELEMENTS) {
3394
3391
  __ vcvt_f32_f64(double_scratch0().low(), value);
3395
3392
  __ vstr(double_scratch0().low(), scratch0(), 0);
3396
- } else { // i.e. array_type == kExternalDoubleArray
3393
+ } else { // i.e. elements_kind == JSObject::EXTERNAL_DOUBLE_ELEMENTS
3397
3394
  __ vstr(value, scratch0(), 0);
3398
3395
  }
3399
3396
  } else {
@@ -3401,22 +3398,26 @@ void LCodeGen::DoStoreKeyedSpecializedArrayElement(
3401
3398
  MemOperand mem_operand(key_is_constant
3402
3399
  ? MemOperand(external_pointer, constant_key * (1 << shift_size))
3403
3400
  : MemOperand(external_pointer, key, LSL, shift_size));
3404
- switch (array_type) {
3405
- case kExternalPixelArray:
3406
- case kExternalByteArray:
3407
- case kExternalUnsignedByteArray:
3401
+ switch (elements_kind) {
3402
+ case JSObject::EXTERNAL_PIXEL_ELEMENTS:
3403
+ case JSObject::EXTERNAL_BYTE_ELEMENTS:
3404
+ case JSObject::EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
3408
3405
  __ strb(value, mem_operand);
3409
3406
  break;
3410
- case kExternalShortArray:
3411
- case kExternalUnsignedShortArray:
3407
+ case JSObject::EXTERNAL_SHORT_ELEMENTS:
3408
+ case JSObject::EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
3412
3409
  __ strh(value, mem_operand);
3413
3410
  break;
3414
- case kExternalIntArray:
3415
- case kExternalUnsignedIntArray:
3411
+ case JSObject::EXTERNAL_INT_ELEMENTS:
3412
+ case JSObject::EXTERNAL_UNSIGNED_INT_ELEMENTS:
3416
3413
  __ str(value, mem_operand);
3417
3414
  break;
3418
- case kExternalFloatArray:
3419
- case kExternalDoubleArray:
3415
+ case JSObject::EXTERNAL_FLOAT_ELEMENTS:
3416
+ case JSObject::EXTERNAL_DOUBLE_ELEMENTS:
3417
+ case JSObject::FAST_DOUBLE_ELEMENTS:
3418
+ case JSObject::FAST_ELEMENTS:
3419
+ case JSObject::DICTIONARY_ELEMENTS:
3420
+ case JSObject::NON_STRICT_ARGUMENTS_ELEMENTS:
3420
3421
  UNREACHABLE();
3421
3422
  break;
3422
3423
  }
@@ -3454,97 +3455,81 @@ void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
3454
3455
  LStringCharCodeAt* instr_;
3455
3456
  };
3456
3457
 
3457
- Register scratch = scratch0();
3458
3458
  Register string = ToRegister(instr->string());
3459
- Register index = no_reg;
3460
- int const_index = -1;
3461
- if (instr->index()->IsConstantOperand()) {
3462
- const_index = ToInteger32(LConstantOperand::cast(instr->index()));
3463
- STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue);
3464
- if (!Smi::IsValid(const_index)) {
3465
- // Guaranteed to be out of bounds because of the assert above.
3466
- // So the bounds check that must dominate this instruction must
3467
- // have deoptimized already.
3468
- if (FLAG_debug_code) {
3469
- __ Abort("StringCharCodeAt: out of bounds index.");
3470
- }
3471
- // No code needs to be generated.
3472
- return;
3473
- }
3474
- } else {
3475
- index = ToRegister(instr->index());
3476
- }
3459
+ Register index = ToRegister(instr->index());
3477
3460
  Register result = ToRegister(instr->result());
3478
3461
 
3479
3462
  DeferredStringCharCodeAt* deferred =
3480
3463
  new DeferredStringCharCodeAt(this, instr);
3481
3464
 
3482
- Label flat_string, ascii_string, done;
3483
-
3484
3465
  // Fetch the instance type of the receiver into result register.
3485
3466
  __ ldr(result, FieldMemOperand(string, HeapObject::kMapOffset));
3486
3467
  __ ldrb(result, FieldMemOperand(result, Map::kInstanceTypeOffset));
3487
3468
 
3488
- // We need special handling for non-flat strings.
3489
- STATIC_ASSERT(kSeqStringTag == 0);
3490
- __ tst(result, Operand(kStringRepresentationMask));
3491
- __ b(eq, &flat_string);
3469
+ // We need special handling for indirect strings.
3470
+ Label check_sequential;
3471
+ __ tst(result, Operand(kIsIndirectStringMask));
3472
+ __ b(eq, &check_sequential);
3492
3473
 
3493
- // Handle non-flat strings.
3494
- __ tst(result, Operand(kIsConsStringMask));
3495
- __ b(eq, deferred->entry());
3474
+ // Dispatch on the indirect string shape: slice or cons.
3475
+ Label cons_string;
3476
+ __ tst(result, Operand(kSlicedNotConsMask));
3477
+ __ b(eq, &cons_string);
3496
3478
 
3497
- // ConsString.
3479
+ // Handle slices.
3480
+ Label indirect_string_loaded;
3481
+ __ ldr(result, FieldMemOperand(string, SlicedString::kOffsetOffset));
3482
+ __ add(index, index, Operand(result, ASR, kSmiTagSize));
3483
+ __ ldr(string, FieldMemOperand(string, SlicedString::kParentOffset));
3484
+ __ jmp(&indirect_string_loaded);
3485
+
3486
+ // Handle conses.
3498
3487
  // Check whether the right hand side is the empty string (i.e. if
3499
3488
  // this is really a flat string in a cons string). If that is not
3500
3489
  // the case we would rather go to the runtime system now to flatten
3501
3490
  // the string.
3502
- __ ldr(scratch, FieldMemOperand(string, ConsString::kSecondOffset));
3491
+ __ bind(&cons_string);
3492
+ __ ldr(result, FieldMemOperand(string, ConsString::kSecondOffset));
3503
3493
  __ LoadRoot(ip, Heap::kEmptyStringRootIndex);
3504
- __ cmp(scratch, ip);
3494
+ __ cmp(result, ip);
3505
3495
  __ b(ne, deferred->entry());
3506
3496
  // Get the first of the two strings and load its instance type.
3507
3497
  __ ldr(string, FieldMemOperand(string, ConsString::kFirstOffset));
3498
+
3499
+ __ bind(&indirect_string_loaded);
3508
3500
  __ ldr(result, FieldMemOperand(string, HeapObject::kMapOffset));
3509
3501
  __ ldrb(result, FieldMemOperand(result, Map::kInstanceTypeOffset));
3510
- // If the first cons component is also non-flat, then go to runtime.
3502
+
3503
+ // Check whether the string is sequential. The only non-sequential
3504
+ // shapes we support have just been unwrapped above.
3505
+ __ bind(&check_sequential);
3511
3506
  STATIC_ASSERT(kSeqStringTag == 0);
3512
3507
  __ tst(result, Operand(kStringRepresentationMask));
3513
3508
  __ b(ne, deferred->entry());
3514
3509
 
3515
- // Check for 1-byte or 2-byte string.
3516
- __ bind(&flat_string);
3510
+ // Dispatch on the encoding: ASCII or two-byte.
3511
+ Label ascii_string;
3517
3512
  STATIC_ASSERT(kAsciiStringTag != 0);
3518
3513
  __ tst(result, Operand(kStringEncodingMask));
3519
3514
  __ b(ne, &ascii_string);
3520
3515
 
3521
- // 2-byte string.
3522
- // Load the 2-byte character code into the result register.
3523
- STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3524
- if (instr->index()->IsConstantOperand()) {
3525
- __ ldrh(result,
3526
- FieldMemOperand(string,
3527
- SeqTwoByteString::kHeaderSize + 2 * const_index));
3528
- } else {
3529
- __ add(scratch,
3530
- string,
3531
- Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3532
- __ ldrh(result, MemOperand(scratch, index, LSL, 1));
3533
- }
3516
+ // Two-byte string.
3517
+ // Load the two-byte character code into the result register.
3518
+ Label done;
3519
+ __ add(result,
3520
+ string,
3521
+ Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3522
+ __ ldrh(result, MemOperand(result, index, LSL, 1));
3534
3523
  __ jmp(&done);
3535
3524
 
3536
3525
  // ASCII string.
3537
3526
  // Load the byte into the result register.
3538
3527
  __ bind(&ascii_string);
3539
- if (instr->index()->IsConstantOperand()) {
3540
- __ ldrb(result, FieldMemOperand(string,
3541
- SeqAsciiString::kHeaderSize + const_index));
3542
- } else {
3543
- __ add(scratch,
3544
- string,
3545
- Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
3546
- __ ldrb(result, MemOperand(scratch, index));
3547
- }
3528
+ __ add(result,
3529
+ string,
3530
+ Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
3531
+ __ ldrb(result, MemOperand(result, index));
3532
+
3548
3533
  __ bind(&done);
3549
3534
  __ bind(deferred->exit());
3550
3535
  }
@@ -3677,8 +3662,8 @@ void LCodeGen::DoNumberTagI(LNumberTagI* instr) {
3677
3662
  void LCodeGen::DoDeferredNumberTagI(LNumberTagI* instr) {
3678
3663
  Label slow;
3679
3664
  Register reg = ToRegister(instr->InputAt(0));
3680
- DoubleRegister dbl_scratch = d0;
3681
- SwVfpRegister flt_scratch = s0;
3665
+ DoubleRegister dbl_scratch = double_scratch0();
3666
+ SwVfpRegister flt_scratch = dbl_scratch.low();
3682
3667
 
3683
3668
  // Preserve the value of all registers.
3684
3669
  PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
@@ -3772,7 +3757,7 @@ void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
3772
3757
  LOperand* input = instr->InputAt(0);
3773
3758
  ASSERT(input->IsRegister() && input->Equals(instr->result()));
3774
3759
  if (instr->needs_check()) {
3775
- ASSERT(kHeapObjectTag == 1);
3760
+ STATIC_ASSERT(kHeapObjectTag == 1);
3776
3761
  // If the input is a HeapObject, SmiUntag will set the carry flag.
3777
3762
  __ SmiUntag(ToRegister(input), SetCC);
3778
3763
  DeoptimizeIf(cs, instr->environment());
@@ -3784,35 +3769,40 @@ void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
3784
3769
 
3785
3770
  void LCodeGen::EmitNumberUntagD(Register input_reg,
3786
3771
  DoubleRegister result_reg,
3772
+ bool deoptimize_on_undefined,
3787
3773
  LEnvironment* env) {
3788
3774
  Register scratch = scratch0();
3789
- SwVfpRegister flt_scratch = s0;
3790
- ASSERT(!result_reg.is(d0));
3775
+ SwVfpRegister flt_scratch = double_scratch0().low();
3776
+ ASSERT(!result_reg.is(double_scratch0()));
3791
3777
 
3792
3778
  Label load_smi, heap_number, done;
3793
3779
 
3794
3780
  // Smi check.
3795
- __ tst(input_reg, Operand(kSmiTagMask));
3796
- __ b(eq, &load_smi);
3781
+ __ JumpIfSmi(input_reg, &load_smi);
3797
3782
 
3798
3783
  // Heap number map check.
3799
3784
  __ ldr(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset));
3800
3785
  __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
3801
3786
  __ cmp(scratch, Operand(ip));
3802
- __ b(eq, &heap_number);
3787
+ if (deoptimize_on_undefined) {
3788
+ DeoptimizeIf(ne, env);
3789
+ } else {
3790
+ Label heap_number;
3791
+ __ b(eq, &heap_number);
3803
3792
 
3804
- __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
3805
- __ cmp(input_reg, Operand(ip));
3806
- DeoptimizeIf(ne, env);
3793
+ __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
3794
+ __ cmp(input_reg, Operand(ip));
3795
+ DeoptimizeIf(ne, env);
3807
3796
 
3808
- // Convert undefined to NaN.
3809
- __ LoadRoot(ip, Heap::kNanValueRootIndex);
3810
- __ sub(ip, ip, Operand(kHeapObjectTag));
3811
- __ vldr(result_reg, ip, HeapNumber::kValueOffset);
3812
- __ jmp(&done);
3797
+ // Convert undefined to NaN.
3798
+ __ LoadRoot(ip, Heap::kNanValueRootIndex);
3799
+ __ sub(ip, ip, Operand(kHeapObjectTag));
3800
+ __ vldr(result_reg, ip, HeapNumber::kValueOffset);
3801
+ __ jmp(&done);
3813
3802
 
3803
+ __ bind(&heap_number);
3804
+ }
3814
3805
  // Heap number to double register conversion.
3815
- __ bind(&heap_number);
3816
3806
  __ sub(ip, input_reg, Operand(kHeapObjectTag));
3817
3807
  __ vldr(result_reg, ip, HeapNumber::kValueOffset);
3818
3808
  __ jmp(&done);
@@ -3852,7 +3842,7 @@ void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
3852
3842
  // The input was optimistically untagged; revert it.
3853
3843
  // The carry flag is set when we reach this deferred code as we just executed
3854
3844
  // SmiUntag(heap_object, SetCC)
3855
- ASSERT(kHeapObjectTag == 1);
3845
+ STATIC_ASSERT(kHeapObjectTag == 1);
3856
3846
  __ adc(input_reg, input_reg, Operand(input_reg));
3857
3847
 
3858
3848
  // Heap number map check.
@@ -3946,7 +3936,9 @@ void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
3946
3936
  Register input_reg = ToRegister(input);
3947
3937
  DoubleRegister result_reg = ToDoubleRegister(result);
3948
3938
 
3949
- EmitNumberUntagD(input_reg, result_reg, instr->environment());
3939
+ EmitNumberUntagD(input_reg, result_reg,
3940
+ instr->hydrogen()->deoptimize_on_undefined(),
3941
+ instr->environment());
3950
3942
  }
3951
3943
 
3952
3944
 
@@ -3955,7 +3947,6 @@ void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
3955
3947
  Register scratch1 = scratch0();
3956
3948
  Register scratch2 = ToRegister(instr->TempAt(0));
3957
3949
  DwVfpRegister double_input = ToDoubleRegister(instr->InputAt(0));
3958
- DwVfpRegister double_scratch = double_scratch0();
3959
3950
  SwVfpRegister single_scratch = double_scratch0().low();
3960
3951
 
3961
3952
  Label done;
@@ -4096,7 +4087,7 @@ void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) {
4096
4087
  // conversions.
4097
4088
  __ cmp(input_reg, Operand(factory()->undefined_value()));
4098
4089
  DeoptimizeIf(ne, instr->environment());
4099
- __ movt(input_reg, 0);
4090
+ __ mov(result_reg, Operand(0));
4100
4091
  __ jmp(&done);
4101
4092
 
4102
4093
  // Heap number
@@ -4292,29 +4283,6 @@ void LCodeGen::DoTypeof(LTypeof* instr) {
4292
4283
  }
4293
4284
 
4294
4285
 
4295
- void LCodeGen::DoTypeofIs(LTypeofIs* instr) {
4296
- Register input = ToRegister(instr->InputAt(0));
4297
- Register result = ToRegister(instr->result());
4298
- Label true_label;
4299
- Label false_label;
4300
- Label done;
4301
-
4302
- Condition final_branch_condition = EmitTypeofIs(&true_label,
4303
- &false_label,
4304
- input,
4305
- instr->type_literal());
4306
- __ b(final_branch_condition, &true_label);
4307
- __ bind(&false_label);
4308
- __ LoadRoot(result, Heap::kFalseValueRootIndex);
4309
- __ b(&done);
4310
-
4311
- __ bind(&true_label);
4312
- __ LoadRoot(result, Heap::kTrueValueRootIndex);
4313
-
4314
- __ bind(&done);
4315
- }
4316
-
4317
-
4318
4286
  void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) {
4319
4287
  Register input = ToRegister(instr->InputAt(0));
4320
4288
  int true_block = chunk_->LookupDestination(instr->true_block_id());
@@ -4358,6 +4326,10 @@ Condition LCodeGen::EmitTypeofIs(Label* true_label,
4358
4326
  __ CompareRoot(input, Heap::kFalseValueRootIndex);
4359
4327
  final_branch_condition = eq;
4360
4328
 
4329
+ } else if (FLAG_harmony_typeof && type_name->Equals(heap()->null_symbol())) {
4330
+ __ CompareRoot(input, Heap::kNullValueRootIndex);
4331
+ final_branch_condition = eq;
4332
+
4361
4333
  } else if (type_name->Equals(heap()->undefined_symbol())) {
4362
4334
  __ CompareRoot(input, Heap::kUndefinedValueRootIndex);
4363
4335
  __ b(eq, true_label);
@@ -4370,17 +4342,21 @@ Condition LCodeGen::EmitTypeofIs(Label* true_label,
4370
4342
 
4371
4343
  } else if (type_name->Equals(heap()->function_symbol())) {
4372
4344
  __ JumpIfSmi(input, false_label);
4373
- __ CompareObjectType(input, input, scratch, FIRST_FUNCTION_CLASS_TYPE);
4345
+ __ CompareObjectType(input, input, scratch,
4346
+ FIRST_CALLABLE_SPEC_OBJECT_TYPE);
4374
4347
  final_branch_condition = ge;
4375
4348
 
4376
4349
  } else if (type_name->Equals(heap()->object_symbol())) {
4377
4350
  __ JumpIfSmi(input, false_label);
4378
- __ CompareRoot(input, Heap::kNullValueRootIndex);
4379
- __ b(eq, true_label);
4380
- __ CompareObjectType(input, input, scratch, FIRST_JS_OBJECT_TYPE);
4381
- __ b(lo, false_label);
4382
- __ CompareInstanceType(input, scratch, FIRST_FUNCTION_CLASS_TYPE);
4383
- __ b(hs, false_label);
4351
+ if (!FLAG_harmony_typeof) {
4352
+ __ CompareRoot(input, Heap::kNullValueRootIndex);
4353
+ __ b(eq, true_label);
4354
+ }
4355
+ __ CompareObjectType(input, input, scratch,
4356
+ FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
4357
+ __ b(lt, false_label);
4358
+ __ CompareInstanceType(input, scratch, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4359
+ __ b(gt, false_label);
4384
4360
  // Check for undetectable objects => false.
4385
4361
  __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset));
4386
4362
  __ tst(ip, Operand(1 << Map::kIsUndetectable));
@@ -4396,26 +4372,6 @@ Condition LCodeGen::EmitTypeofIs(Label* true_label,
4396
4372
  }
4397
4373
 
4398
4374
 
4399
- void LCodeGen::DoIsConstructCall(LIsConstructCall* instr) {
4400
- Register result = ToRegister(instr->result());
4401
- Label true_label;
4402
- Label false_label;
4403
- Label done;
4404
-
4405
- EmitIsConstructCall(result, scratch0());
4406
- __ b(eq, &true_label);
4407
-
4408
- __ LoadRoot(result, Heap::kFalseValueRootIndex);
4409
- __ b(&done);
4410
-
4411
-
4412
- __ bind(&true_label);
4413
- __ LoadRoot(result, Heap::kTrueValueRootIndex);
4414
-
4415
- __ bind(&done);
4416
- }
4417
-
4418
-
4419
4375
  void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) {
4420
4376
  Register temp1 = ToRegister(instr->TempAt(0));
4421
4377
  int true_block = chunk_->LookupDestination(instr->true_block_id());
@@ -4490,15 +4446,50 @@ void LCodeGen::DoIn(LIn* instr) {
4490
4446
  }
4491
4447
 
4492
4448
 
4449
+ void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) {
4450
+ {
4451
+ PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
4452
+ __ CallRuntimeSaveDoubles(Runtime::kStackGuard);
4453
+ RegisterLazyDeoptimization(
4454
+ instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
4455
+ }
4456
+
4457
+ // The gap code includes the restoring of the safepoint registers.
4458
+ int pc = masm()->pc_offset();
4459
+ safepoints_.SetPcAfterGap(pc);
4460
+ }
4461
+
4462
+
4493
4463
  void LCodeGen::DoStackCheck(LStackCheck* instr) {
4494
- // Perform stack overflow check.
4495
- Label ok;
4496
- __ LoadRoot(ip, Heap::kStackLimitRootIndex);
4497
- __ cmp(sp, Operand(ip));
4498
- __ b(hs, &ok);
4499
- StackCheckStub stub;
4500
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4501
- __ bind(&ok);
4464
+ class DeferredStackCheck: public LDeferredCode {
4465
+ public:
4466
+ DeferredStackCheck(LCodeGen* codegen, LStackCheck* instr)
4467
+ : LDeferredCode(codegen), instr_(instr) { }
4468
+ virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); }
4469
+ private:
4470
+ LStackCheck* instr_;
4471
+ };
4472
+
4473
+ if (instr->hydrogen()->is_function_entry()) {
4474
+ // Perform stack overflow check.
4475
+ Label done;
4476
+ __ LoadRoot(ip, Heap::kStackLimitRootIndex);
4477
+ __ cmp(sp, Operand(ip));
4478
+ __ b(hs, &done);
4479
+ StackCheckStub stub;
4480
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4481
+ __ bind(&done);
4482
+ } else {
4483
+ ASSERT(instr->hydrogen()->is_backwards_branch());
4484
+ // Perform stack overflow check if this goto needs it before jumping.
4485
+ DeferredStackCheck* deferred_stack_check =
4486
+ new DeferredStackCheck(this, instr);
4487
+ __ LoadRoot(ip, Heap::kStackLimitRootIndex);
4488
+ __ cmp(sp, Operand(ip));
4489
+ __ b(lo, deferred_stack_check->entry());
4490
+ __ bind(instr->done_label());
4491
+ deferred_stack_check->SetExit(instr->done_label());
4492
+ }
4502
4493
  }
4503
4494
 
4504
4495