libv8 3.3.10.4 → 3.5.10.beta1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (538) hide show
  1. data/lib/libv8/scons/CHANGES.txt +24 -231
  2. data/lib/libv8/scons/LICENSE.txt +1 -1
  3. data/lib/libv8/scons/MANIFEST +0 -1
  4. data/lib/libv8/scons/PKG-INFO +1 -1
  5. data/lib/libv8/scons/README.txt +9 -9
  6. data/lib/libv8/scons/RELEASE.txt +75 -77
  7. data/lib/libv8/scons/engine/SCons/Action.py +6 -22
  8. data/lib/libv8/scons/engine/SCons/Builder.py +2 -2
  9. data/lib/libv8/scons/engine/SCons/CacheDir.py +2 -2
  10. data/lib/libv8/scons/engine/SCons/Debug.py +2 -2
  11. data/lib/libv8/scons/engine/SCons/Defaults.py +10 -24
  12. data/lib/libv8/scons/engine/SCons/Environment.py +19 -118
  13. data/lib/libv8/scons/engine/SCons/Errors.py +2 -2
  14. data/lib/libv8/scons/engine/SCons/Executor.py +2 -2
  15. data/lib/libv8/scons/engine/SCons/Job.py +2 -2
  16. data/lib/libv8/scons/engine/SCons/Memoize.py +2 -2
  17. data/lib/libv8/scons/engine/SCons/Node/Alias.py +2 -2
  18. data/lib/libv8/scons/engine/SCons/Node/FS.py +121 -281
  19. data/lib/libv8/scons/engine/SCons/Node/Python.py +2 -2
  20. data/lib/libv8/scons/engine/SCons/Node/__init__.py +5 -6
  21. data/lib/libv8/scons/engine/SCons/Options/BoolOption.py +2 -2
  22. data/lib/libv8/scons/engine/SCons/Options/EnumOption.py +2 -2
  23. data/lib/libv8/scons/engine/SCons/Options/ListOption.py +2 -2
  24. data/lib/libv8/scons/engine/SCons/Options/PackageOption.py +2 -2
  25. data/lib/libv8/scons/engine/SCons/Options/PathOption.py +2 -2
  26. data/lib/libv8/scons/engine/SCons/Options/__init__.py +2 -2
  27. data/lib/libv8/scons/engine/SCons/PathList.py +2 -2
  28. data/lib/libv8/scons/engine/SCons/Platform/__init__.py +2 -2
  29. data/lib/libv8/scons/engine/SCons/Platform/aix.py +2 -2
  30. data/lib/libv8/scons/engine/SCons/Platform/cygwin.py +2 -2
  31. data/lib/libv8/scons/engine/SCons/Platform/darwin.py +3 -27
  32. data/lib/libv8/scons/engine/SCons/Platform/hpux.py +2 -2
  33. data/lib/libv8/scons/engine/SCons/Platform/irix.py +2 -2
  34. data/lib/libv8/scons/engine/SCons/Platform/os2.py +2 -2
  35. data/lib/libv8/scons/engine/SCons/Platform/posix.py +2 -2
  36. data/lib/libv8/scons/engine/SCons/Platform/sunos.py +2 -2
  37. data/lib/libv8/scons/engine/SCons/Platform/win32.py +2 -2
  38. data/lib/libv8/scons/engine/SCons/SConf.py +2 -2
  39. data/lib/libv8/scons/engine/SCons/SConsign.py +3 -9
  40. data/lib/libv8/scons/engine/SCons/Scanner/C.py +2 -2
  41. data/lib/libv8/scons/engine/SCons/Scanner/D.py +2 -2
  42. data/lib/libv8/scons/engine/SCons/Scanner/Dir.py +2 -2
  43. data/lib/libv8/scons/engine/SCons/Scanner/Fortran.py +2 -2
  44. data/lib/libv8/scons/engine/SCons/Scanner/IDL.py +2 -2
  45. data/lib/libv8/scons/engine/SCons/Scanner/LaTeX.py +2 -5
  46. data/lib/libv8/scons/engine/SCons/Scanner/Prog.py +2 -2
  47. data/lib/libv8/scons/engine/SCons/Scanner/RC.py +3 -3
  48. data/lib/libv8/scons/engine/SCons/Scanner/__init__.py +2 -2
  49. data/lib/libv8/scons/engine/SCons/Script/Interactive.py +2 -2
  50. data/lib/libv8/scons/engine/SCons/Script/Main.py +11 -82
  51. data/lib/libv8/scons/engine/SCons/Script/SConsOptions.py +5 -5
  52. data/lib/libv8/scons/engine/SCons/Script/SConscript.py +2 -2
  53. data/lib/libv8/scons/engine/SCons/Script/__init__.py +2 -2
  54. data/lib/libv8/scons/engine/SCons/Sig.py +2 -2
  55. data/lib/libv8/scons/engine/SCons/Subst.py +2 -2
  56. data/lib/libv8/scons/engine/SCons/Taskmaster.py +2 -10
  57. data/lib/libv8/scons/engine/SCons/Tool/386asm.py +2 -2
  58. data/lib/libv8/scons/engine/SCons/Tool/BitKeeper.py +2 -2
  59. data/lib/libv8/scons/engine/SCons/Tool/CVS.py +2 -2
  60. data/lib/libv8/scons/engine/SCons/Tool/FortranCommon.py +2 -19
  61. data/lib/libv8/scons/engine/SCons/Tool/JavaCommon.py +2 -2
  62. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/__init__.py +2 -2
  63. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/arch.py +2 -2
  64. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/common.py +2 -2
  65. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/netframework.py +2 -2
  66. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/sdk.py +2 -2
  67. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/vc.py +6 -9
  68. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/vs.py +2 -29
  69. data/lib/libv8/scons/engine/SCons/Tool/Perforce.py +2 -2
  70. data/lib/libv8/scons/engine/SCons/Tool/PharLapCommon.py +2 -2
  71. data/lib/libv8/scons/engine/SCons/Tool/RCS.py +2 -2
  72. data/lib/libv8/scons/engine/SCons/Tool/SCCS.py +2 -2
  73. data/lib/libv8/scons/engine/SCons/Tool/Subversion.py +2 -2
  74. data/lib/libv8/scons/engine/SCons/Tool/__init__.py +3 -3
  75. data/lib/libv8/scons/engine/SCons/Tool/aixc++.py +2 -2
  76. data/lib/libv8/scons/engine/SCons/Tool/aixcc.py +2 -2
  77. data/lib/libv8/scons/engine/SCons/Tool/aixf77.py +2 -2
  78. data/lib/libv8/scons/engine/SCons/Tool/aixlink.py +2 -2
  79. data/lib/libv8/scons/engine/SCons/Tool/applelink.py +2 -2
  80. data/lib/libv8/scons/engine/SCons/Tool/ar.py +2 -2
  81. data/lib/libv8/scons/engine/SCons/Tool/as.py +2 -2
  82. data/lib/libv8/scons/engine/SCons/Tool/bcc32.py +2 -2
  83. data/lib/libv8/scons/engine/SCons/Tool/c++.py +2 -2
  84. data/lib/libv8/scons/engine/SCons/Tool/cc.py +2 -2
  85. data/lib/libv8/scons/engine/SCons/Tool/cvf.py +2 -2
  86. data/lib/libv8/scons/engine/SCons/Tool/default.py +2 -2
  87. data/lib/libv8/scons/engine/SCons/Tool/dmd.py +7 -24
  88. data/lib/libv8/scons/engine/SCons/Tool/dvi.py +2 -2
  89. data/lib/libv8/scons/engine/SCons/Tool/dvipdf.py +2 -3
  90. data/lib/libv8/scons/engine/SCons/Tool/dvips.py +2 -3
  91. data/lib/libv8/scons/engine/SCons/Tool/f77.py +2 -2
  92. data/lib/libv8/scons/engine/SCons/Tool/f90.py +2 -2
  93. data/lib/libv8/scons/engine/SCons/Tool/f95.py +2 -2
  94. data/lib/libv8/scons/engine/SCons/Tool/filesystem.py +2 -2
  95. data/lib/libv8/scons/engine/SCons/Tool/fortran.py +2 -2
  96. data/lib/libv8/scons/engine/SCons/Tool/g++.py +2 -2
  97. data/lib/libv8/scons/engine/SCons/Tool/g77.py +2 -2
  98. data/lib/libv8/scons/engine/SCons/Tool/gas.py +2 -2
  99. data/lib/libv8/scons/engine/SCons/Tool/gcc.py +2 -2
  100. data/lib/libv8/scons/engine/SCons/Tool/gfortran.py +3 -3
  101. data/lib/libv8/scons/engine/SCons/Tool/gnulink.py +3 -2
  102. data/lib/libv8/scons/engine/SCons/Tool/gs.py +2 -2
  103. data/lib/libv8/scons/engine/SCons/Tool/hpc++.py +2 -2
  104. data/lib/libv8/scons/engine/SCons/Tool/hpcc.py +2 -2
  105. data/lib/libv8/scons/engine/SCons/Tool/hplink.py +2 -2
  106. data/lib/libv8/scons/engine/SCons/Tool/icc.py +2 -2
  107. data/lib/libv8/scons/engine/SCons/Tool/icl.py +2 -2
  108. data/lib/libv8/scons/engine/SCons/Tool/ifl.py +2 -2
  109. data/lib/libv8/scons/engine/SCons/Tool/ifort.py +2 -2
  110. data/lib/libv8/scons/engine/SCons/Tool/ilink.py +2 -2
  111. data/lib/libv8/scons/engine/SCons/Tool/ilink32.py +2 -2
  112. data/lib/libv8/scons/engine/SCons/Tool/install.py +3 -57
  113. data/lib/libv8/scons/engine/SCons/Tool/intelc.py +25 -65
  114. data/lib/libv8/scons/engine/SCons/Tool/ipkg.py +2 -2
  115. data/lib/libv8/scons/engine/SCons/Tool/jar.py +3 -9
  116. data/lib/libv8/scons/engine/SCons/Tool/javac.py +2 -2
  117. data/lib/libv8/scons/engine/SCons/Tool/javah.py +2 -2
  118. data/lib/libv8/scons/engine/SCons/Tool/latex.py +2 -3
  119. data/lib/libv8/scons/engine/SCons/Tool/lex.py +2 -2
  120. data/lib/libv8/scons/engine/SCons/Tool/link.py +5 -6
  121. data/lib/libv8/scons/engine/SCons/Tool/linkloc.py +2 -2
  122. data/lib/libv8/scons/engine/SCons/Tool/m4.py +2 -2
  123. data/lib/libv8/scons/engine/SCons/Tool/masm.py +2 -2
  124. data/lib/libv8/scons/engine/SCons/Tool/midl.py +2 -2
  125. data/lib/libv8/scons/engine/SCons/Tool/mingw.py +10 -31
  126. data/lib/libv8/scons/engine/SCons/Tool/mslib.py +2 -2
  127. data/lib/libv8/scons/engine/SCons/Tool/mslink.py +9 -61
  128. data/lib/libv8/scons/engine/SCons/Tool/mssdk.py +2 -2
  129. data/lib/libv8/scons/engine/SCons/Tool/msvc.py +11 -21
  130. data/lib/libv8/scons/engine/SCons/Tool/msvs.py +59 -477
  131. data/lib/libv8/scons/engine/SCons/Tool/mwcc.py +2 -2
  132. data/lib/libv8/scons/engine/SCons/Tool/mwld.py +2 -2
  133. data/lib/libv8/scons/engine/SCons/Tool/nasm.py +2 -2
  134. data/lib/libv8/scons/engine/SCons/Tool/packaging/__init__.py +2 -2
  135. data/lib/libv8/scons/engine/SCons/Tool/packaging/ipk.py +2 -2
  136. data/lib/libv8/scons/engine/SCons/Tool/packaging/msi.py +2 -2
  137. data/lib/libv8/scons/engine/SCons/Tool/packaging/rpm.py +2 -2
  138. data/lib/libv8/scons/engine/SCons/Tool/packaging/src_tarbz2.py +2 -2
  139. data/lib/libv8/scons/engine/SCons/Tool/packaging/src_targz.py +2 -2
  140. data/lib/libv8/scons/engine/SCons/Tool/packaging/src_zip.py +2 -2
  141. data/lib/libv8/scons/engine/SCons/Tool/packaging/tarbz2.py +2 -2
  142. data/lib/libv8/scons/engine/SCons/Tool/packaging/targz.py +2 -2
  143. data/lib/libv8/scons/engine/SCons/Tool/packaging/zip.py +2 -2
  144. data/lib/libv8/scons/engine/SCons/Tool/pdf.py +2 -2
  145. data/lib/libv8/scons/engine/SCons/Tool/pdflatex.py +2 -3
  146. data/lib/libv8/scons/engine/SCons/Tool/pdftex.py +2 -3
  147. data/lib/libv8/scons/engine/SCons/Tool/qt.py +2 -2
  148. data/lib/libv8/scons/engine/SCons/Tool/rmic.py +3 -9
  149. data/lib/libv8/scons/engine/SCons/Tool/rpcgen.py +2 -2
  150. data/lib/libv8/scons/engine/SCons/Tool/rpm.py +2 -2
  151. data/lib/libv8/scons/engine/SCons/Tool/sgiar.py +2 -2
  152. data/lib/libv8/scons/engine/SCons/Tool/sgic++.py +2 -2
  153. data/lib/libv8/scons/engine/SCons/Tool/sgicc.py +2 -2
  154. data/lib/libv8/scons/engine/SCons/Tool/sgilink.py +3 -2
  155. data/lib/libv8/scons/engine/SCons/Tool/sunar.py +2 -2
  156. data/lib/libv8/scons/engine/SCons/Tool/sunc++.py +2 -2
  157. data/lib/libv8/scons/engine/SCons/Tool/suncc.py +2 -2
  158. data/lib/libv8/scons/engine/SCons/Tool/sunf77.py +2 -2
  159. data/lib/libv8/scons/engine/SCons/Tool/sunf90.py +2 -2
  160. data/lib/libv8/scons/engine/SCons/Tool/sunf95.py +2 -2
  161. data/lib/libv8/scons/engine/SCons/Tool/sunlink.py +3 -2
  162. data/lib/libv8/scons/engine/SCons/Tool/swig.py +5 -6
  163. data/lib/libv8/scons/engine/SCons/Tool/tar.py +2 -2
  164. data/lib/libv8/scons/engine/SCons/Tool/tex.py +43 -96
  165. data/lib/libv8/scons/engine/SCons/Tool/textfile.py +2 -2
  166. data/lib/libv8/scons/engine/SCons/Tool/tlib.py +2 -2
  167. data/lib/libv8/scons/engine/SCons/Tool/wix.py +2 -2
  168. data/lib/libv8/scons/engine/SCons/Tool/yacc.py +2 -12
  169. data/lib/libv8/scons/engine/SCons/Tool/zip.py +2 -2
  170. data/lib/libv8/scons/engine/SCons/Util.py +3 -3
  171. data/lib/libv8/scons/engine/SCons/Variables/BoolVariable.py +2 -2
  172. data/lib/libv8/scons/engine/SCons/Variables/EnumVariable.py +3 -3
  173. data/lib/libv8/scons/engine/SCons/Variables/ListVariable.py +2 -2
  174. data/lib/libv8/scons/engine/SCons/Variables/PackageVariable.py +2 -2
  175. data/lib/libv8/scons/engine/SCons/Variables/PathVariable.py +2 -2
  176. data/lib/libv8/scons/engine/SCons/Variables/__init__.py +2 -2
  177. data/lib/libv8/scons/engine/SCons/Warnings.py +2 -2
  178. data/lib/libv8/scons/engine/SCons/__init__.py +6 -6
  179. data/lib/libv8/scons/engine/SCons/compat/__init__.py +2 -2
  180. data/lib/libv8/scons/engine/SCons/compat/_scons_builtins.py +2 -2
  181. data/lib/libv8/scons/engine/SCons/compat/_scons_collections.py +2 -2
  182. data/lib/libv8/scons/engine/SCons/compat/_scons_dbm.py +2 -2
  183. data/lib/libv8/scons/engine/SCons/compat/_scons_hashlib.py +2 -2
  184. data/lib/libv8/scons/engine/SCons/compat/_scons_io.py +2 -2
  185. data/lib/libv8/scons/engine/SCons/cpp.py +2 -2
  186. data/lib/libv8/scons/engine/SCons/dblite.py +1 -4
  187. data/lib/libv8/scons/engine/SCons/exitfuncs.py +2 -2
  188. data/lib/libv8/scons/scons-time.1 +3 -3
  189. data/lib/libv8/scons/scons.1 +1164 -1170
  190. data/lib/libv8/scons/sconsign.1 +3 -3
  191. data/lib/libv8/scons/script/scons +22 -22
  192. data/lib/libv8/scons/script/scons-time +2 -2
  193. data/lib/libv8/scons/script/scons.bat +4 -7
  194. data/lib/libv8/scons/script/sconsign +20 -21
  195. data/lib/libv8/scons/setup.cfg +1 -0
  196. data/lib/libv8/scons/setup.py +40 -38
  197. data/lib/libv8/v8/.gitignore +1 -1
  198. data/lib/libv8/v8/AUTHORS +2 -0
  199. data/lib/libv8/v8/ChangeLog +387 -0
  200. data/lib/libv8/v8/Makefile +171 -0
  201. data/lib/libv8/v8/SConstruct +124 -51
  202. data/lib/libv8/v8/build/README.txt +31 -14
  203. data/lib/libv8/v8/build/all.gyp +11 -4
  204. data/lib/libv8/v8/build/armu.gypi +6 -2
  205. data/lib/libv8/v8/build/common.gypi +240 -94
  206. data/lib/libv8/v8/build/gyp_v8 +32 -4
  207. data/lib/libv8/v8/build/standalone.gypi +200 -0
  208. data/lib/libv8/v8/include/v8-debug.h +0 -0
  209. data/lib/libv8/v8/include/v8-profiler.h +8 -11
  210. data/lib/libv8/v8/include/v8.h +191 -108
  211. data/lib/libv8/v8/preparser/SConscript +2 -2
  212. data/lib/libv8/v8/preparser/preparser-process.cc +3 -3
  213. data/lib/libv8/v8/preparser/preparser.gyp +42 -0
  214. data/lib/libv8/v8/src/SConscript +33 -8
  215. data/lib/libv8/v8/src/accessors.cc +77 -43
  216. data/lib/libv8/v8/src/api.cc +393 -191
  217. data/lib/libv8/v8/src/api.h +4 -8
  218. data/lib/libv8/v8/src/apinatives.js +15 -3
  219. data/lib/libv8/v8/src/arguments.h +8 -0
  220. data/lib/libv8/v8/src/arm/assembler-arm.cc +120 -120
  221. data/lib/libv8/v8/src/arm/assembler-arm.h +92 -43
  222. data/lib/libv8/v8/src/arm/builtins-arm.cc +32 -39
  223. data/lib/libv8/v8/src/arm/code-stubs-arm.cc +572 -351
  224. data/lib/libv8/v8/src/arm/code-stubs-arm.h +8 -77
  225. data/lib/libv8/v8/src/arm/codegen-arm.h +0 -2
  226. data/lib/libv8/v8/src/arm/deoptimizer-arm.cc +50 -30
  227. data/lib/libv8/v8/src/arm/disasm-arm.cc +1 -1
  228. data/lib/libv8/v8/src/arm/frames-arm.h +9 -5
  229. data/lib/libv8/v8/src/arm/full-codegen-arm.cc +331 -432
  230. data/lib/libv8/v8/src/arm/ic-arm.cc +192 -124
  231. data/lib/libv8/v8/src/arm/lithium-arm.cc +216 -232
  232. data/lib/libv8/v8/src/arm/lithium-arm.h +106 -259
  233. data/lib/libv8/v8/src/arm/lithium-codegen-arm.cc +633 -642
  234. data/lib/libv8/v8/src/arm/lithium-codegen-arm.h +4 -4
  235. data/lib/libv8/v8/src/arm/lithium-gap-resolver-arm.cc +1 -3
  236. data/lib/libv8/v8/src/arm/macro-assembler-arm.cc +260 -185
  237. data/lib/libv8/v8/src/arm/macro-assembler-arm.h +45 -25
  238. data/lib/libv8/v8/src/arm/regexp-macro-assembler-arm.cc +25 -13
  239. data/lib/libv8/v8/src/arm/regexp-macro-assembler-arm.h +3 -0
  240. data/lib/libv8/v8/src/arm/stub-cache-arm.cc +413 -226
  241. data/lib/libv8/v8/src/array.js +38 -18
  242. data/lib/libv8/v8/src/assembler.cc +12 -5
  243. data/lib/libv8/v8/src/assembler.h +15 -9
  244. data/lib/libv8/v8/src/ast-inl.h +34 -25
  245. data/lib/libv8/v8/src/ast.cc +141 -72
  246. data/lib/libv8/v8/src/ast.h +255 -181
  247. data/lib/libv8/v8/src/bignum.cc +3 -4
  248. data/lib/libv8/v8/src/bootstrapper.cc +55 -11
  249. data/lib/libv8/v8/src/bootstrapper.h +3 -2
  250. data/lib/libv8/v8/src/builtins.cc +8 -2
  251. data/lib/libv8/v8/src/builtins.h +4 -0
  252. data/lib/libv8/v8/src/cached-powers.cc +8 -4
  253. data/lib/libv8/v8/src/checks.h +3 -3
  254. data/lib/libv8/v8/src/code-stubs.cc +173 -28
  255. data/lib/libv8/v8/src/code-stubs.h +104 -148
  256. data/lib/libv8/v8/src/codegen.cc +8 -8
  257. data/lib/libv8/v8/src/compilation-cache.cc +2 -47
  258. data/lib/libv8/v8/src/compilation-cache.h +0 -10
  259. data/lib/libv8/v8/src/compiler.cc +27 -16
  260. data/lib/libv8/v8/src/compiler.h +13 -18
  261. data/lib/libv8/v8/src/contexts.cc +107 -72
  262. data/lib/libv8/v8/src/contexts.h +70 -34
  263. data/lib/libv8/v8/src/conversions-inl.h +572 -14
  264. data/lib/libv8/v8/src/conversions.cc +9 -707
  265. data/lib/libv8/v8/src/conversions.h +23 -12
  266. data/lib/libv8/v8/src/cpu-profiler-inl.h +2 -19
  267. data/lib/libv8/v8/src/cpu-profiler.cc +4 -21
  268. data/lib/libv8/v8/src/cpu-profiler.h +8 -17
  269. data/lib/libv8/v8/src/d8-debug.cc +5 -3
  270. data/lib/libv8/v8/src/d8-debug.h +6 -7
  271. data/lib/libv8/v8/src/d8-posix.cc +1 -10
  272. data/lib/libv8/v8/src/d8.cc +721 -219
  273. data/lib/libv8/v8/src/d8.gyp +37 -12
  274. data/lib/libv8/v8/src/d8.h +141 -19
  275. data/lib/libv8/v8/src/d8.js +17 -8
  276. data/lib/libv8/v8/src/date.js +16 -5
  277. data/lib/libv8/v8/src/dateparser-inl.h +242 -39
  278. data/lib/libv8/v8/src/dateparser.cc +38 -4
  279. data/lib/libv8/v8/src/dateparser.h +170 -28
  280. data/lib/libv8/v8/src/debug-agent.cc +5 -3
  281. data/lib/libv8/v8/src/debug-agent.h +11 -7
  282. data/lib/libv8/v8/src/debug-debugger.js +65 -34
  283. data/lib/libv8/v8/src/debug.cc +30 -60
  284. data/lib/libv8/v8/src/debug.h +5 -3
  285. data/lib/libv8/v8/src/deoptimizer.cc +227 -10
  286. data/lib/libv8/v8/src/deoptimizer.h +133 -9
  287. data/lib/libv8/v8/src/disassembler.cc +22 -14
  288. data/lib/libv8/v8/src/diy-fp.cc +4 -3
  289. data/lib/libv8/v8/src/diy-fp.h +3 -3
  290. data/lib/libv8/v8/src/elements.cc +634 -0
  291. data/lib/libv8/v8/src/elements.h +95 -0
  292. data/lib/libv8/v8/src/execution.cc +5 -21
  293. data/lib/libv8/v8/src/extensions/experimental/break-iterator.cc +3 -1
  294. data/lib/libv8/v8/src/extensions/experimental/break-iterator.h +1 -1
  295. data/lib/libv8/v8/src/extensions/experimental/collator.cc +6 -2
  296. data/lib/libv8/v8/src/extensions/experimental/collator.h +1 -2
  297. data/lib/libv8/v8/src/extensions/experimental/datetime-format.cc +384 -0
  298. data/lib/libv8/v8/src/extensions/experimental/datetime-format.h +83 -0
  299. data/lib/libv8/v8/src/extensions/experimental/experimental.gyp +18 -7
  300. data/lib/libv8/v8/src/extensions/experimental/i18n-extension.cc +12 -16
  301. data/lib/libv8/v8/src/extensions/experimental/i18n-extension.h +1 -1
  302. data/lib/libv8/v8/src/extensions/experimental/i18n-js2c.py +126 -0
  303. data/lib/libv8/v8/src/extensions/experimental/i18n-locale.cc +3 -4
  304. data/lib/libv8/v8/src/extensions/experimental/i18n-locale.h +1 -1
  305. data/lib/libv8/v8/src/{shell.h → extensions/experimental/i18n-natives.h} +8 -20
  306. data/lib/libv8/v8/src/extensions/experimental/i18n-utils.cc +45 -1
  307. data/lib/libv8/v8/src/extensions/experimental/i18n-utils.h +21 -1
  308. data/lib/libv8/v8/src/extensions/experimental/i18n.js +211 -11
  309. data/lib/libv8/v8/src/extensions/experimental/language-matcher.cc +4 -3
  310. data/lib/libv8/v8/src/extensions/experimental/language-matcher.h +1 -1
  311. data/lib/libv8/v8/src/extensions/experimental/number-format.cc +374 -0
  312. data/lib/libv8/v8/src/extensions/experimental/number-format.h +71 -0
  313. data/lib/libv8/v8/src/factory.cc +89 -18
  314. data/lib/libv8/v8/src/factory.h +36 -8
  315. data/lib/libv8/v8/src/flag-definitions.h +11 -44
  316. data/lib/libv8/v8/src/frames-inl.h +8 -1
  317. data/lib/libv8/v8/src/frames.cc +39 -3
  318. data/lib/libv8/v8/src/frames.h +10 -3
  319. data/lib/libv8/v8/src/full-codegen.cc +311 -293
  320. data/lib/libv8/v8/src/full-codegen.h +183 -143
  321. data/lib/libv8/v8/src/func-name-inferrer.cc +29 -15
  322. data/lib/libv8/v8/src/func-name-inferrer.h +19 -9
  323. data/lib/libv8/v8/src/gdb-jit.cc +658 -55
  324. data/lib/libv8/v8/src/gdb-jit.h +6 -2
  325. data/lib/libv8/v8/src/global-handles.cc +368 -312
  326. data/lib/libv8/v8/src/global-handles.h +29 -36
  327. data/lib/libv8/v8/src/globals.h +3 -1
  328. data/lib/libv8/v8/src/handles.cc +43 -69
  329. data/lib/libv8/v8/src/handles.h +21 -16
  330. data/lib/libv8/v8/src/heap-inl.h +11 -13
  331. data/lib/libv8/v8/src/heap-profiler.cc +0 -999
  332. data/lib/libv8/v8/src/heap-profiler.h +0 -303
  333. data/lib/libv8/v8/src/heap.cc +366 -141
  334. data/lib/libv8/v8/src/heap.h +87 -26
  335. data/lib/libv8/v8/src/hydrogen-instructions.cc +192 -81
  336. data/lib/libv8/v8/src/hydrogen-instructions.h +711 -482
  337. data/lib/libv8/v8/src/hydrogen.cc +1146 -629
  338. data/lib/libv8/v8/src/hydrogen.h +100 -64
  339. data/lib/libv8/v8/src/ia32/assembler-ia32.cc +19 -0
  340. data/lib/libv8/v8/src/ia32/assembler-ia32.h +15 -2
  341. data/lib/libv8/v8/src/ia32/builtins-ia32.cc +34 -39
  342. data/lib/libv8/v8/src/ia32/code-stubs-ia32.cc +675 -377
  343. data/lib/libv8/v8/src/ia32/code-stubs-ia32.h +8 -69
  344. data/lib/libv8/v8/src/ia32/codegen-ia32.cc +1 -0
  345. data/lib/libv8/v8/src/ia32/codegen-ia32.h +0 -2
  346. data/lib/libv8/v8/src/ia32/cpu-ia32.cc +3 -2
  347. data/lib/libv8/v8/src/ia32/deoptimizer-ia32.cc +28 -3
  348. data/lib/libv8/v8/src/ia32/disasm-ia32.cc +21 -10
  349. data/lib/libv8/v8/src/ia32/frames-ia32.h +6 -5
  350. data/lib/libv8/v8/src/ia32/full-codegen-ia32.cc +459 -465
  351. data/lib/libv8/v8/src/ia32/ic-ia32.cc +196 -147
  352. data/lib/libv8/v8/src/ia32/lithium-codegen-ia32.cc +575 -650
  353. data/lib/libv8/v8/src/ia32/lithium-codegen-ia32.h +19 -21
  354. data/lib/libv8/v8/src/ia32/lithium-gap-resolver-ia32.cc +7 -2
  355. data/lib/libv8/v8/src/ia32/lithium-ia32.cc +261 -256
  356. data/lib/libv8/v8/src/ia32/lithium-ia32.h +234 -335
  357. data/lib/libv8/v8/src/ia32/macro-assembler-ia32.cc +224 -67
  358. data/lib/libv8/v8/src/ia32/macro-assembler-ia32.h +63 -19
  359. data/lib/libv8/v8/src/ia32/regexp-macro-assembler-ia32.cc +22 -8
  360. data/lib/libv8/v8/src/ia32/regexp-macro-assembler-ia32.h +3 -0
  361. data/lib/libv8/v8/src/ia32/stub-cache-ia32.cc +380 -239
  362. data/lib/libv8/v8/src/ic.cc +198 -234
  363. data/lib/libv8/v8/src/ic.h +32 -30
  364. data/lib/libv8/v8/src/interpreter-irregexp.cc +6 -4
  365. data/lib/libv8/v8/src/isolate.cc +112 -95
  366. data/lib/libv8/v8/src/isolate.h +55 -71
  367. data/lib/libv8/v8/src/json-parser.h +486 -48
  368. data/lib/libv8/v8/src/json.js +28 -23
  369. data/lib/libv8/v8/src/jsregexp.cc +163 -208
  370. data/lib/libv8/v8/src/jsregexp.h +0 -1
  371. data/lib/libv8/v8/src/lithium-allocator-inl.h +29 -27
  372. data/lib/libv8/v8/src/lithium-allocator.cc +22 -17
  373. data/lib/libv8/v8/src/lithium-allocator.h +8 -8
  374. data/lib/libv8/v8/src/lithium.cc +16 -11
  375. data/lib/libv8/v8/src/lithium.h +31 -34
  376. data/lib/libv8/v8/src/liveedit.cc +111 -15
  377. data/lib/libv8/v8/src/liveedit.h +3 -4
  378. data/lib/libv8/v8/src/liveobjectlist.cc +116 -80
  379. data/lib/libv8/v8/src/liveobjectlist.h +2 -2
  380. data/lib/libv8/v8/src/log-inl.h +0 -4
  381. data/lib/libv8/v8/src/log-utils.cc +25 -143
  382. data/lib/libv8/v8/src/log-utils.h +13 -92
  383. data/lib/libv8/v8/src/log.cc +26 -249
  384. data/lib/libv8/v8/src/log.h +6 -17
  385. data/lib/libv8/v8/src/macros.py +9 -6
  386. data/lib/libv8/v8/src/mark-compact.cc +276 -56
  387. data/lib/libv8/v8/src/mark-compact.h +20 -0
  388. data/lib/libv8/v8/src/messages.js +93 -39
  389. data/lib/libv8/v8/src/mips/assembler-mips-inl.h +9 -3
  390. data/lib/libv8/v8/src/mips/assembler-mips.cc +297 -189
  391. data/lib/libv8/v8/src/mips/assembler-mips.h +121 -54
  392. data/lib/libv8/v8/src/mips/builtins-mips.cc +23 -24
  393. data/lib/libv8/v8/src/mips/code-stubs-mips.cc +484 -263
  394. data/lib/libv8/v8/src/mips/code-stubs-mips.h +8 -83
  395. data/lib/libv8/v8/src/mips/codegen-mips.h +0 -2
  396. data/lib/libv8/v8/src/mips/constants-mips.h +37 -11
  397. data/lib/libv8/v8/src/mips/deoptimizer-mips.cc +6 -1
  398. data/lib/libv8/v8/src/mips/frames-mips.h +8 -7
  399. data/lib/libv8/v8/src/mips/full-codegen-mips.cc +258 -419
  400. data/lib/libv8/v8/src/mips/ic-mips.cc +181 -121
  401. data/lib/libv8/v8/src/mips/macro-assembler-mips.cc +640 -382
  402. data/lib/libv8/v8/src/mips/macro-assembler-mips.h +94 -89
  403. data/lib/libv8/v8/src/mips/regexp-macro-assembler-mips.cc +23 -10
  404. data/lib/libv8/v8/src/mips/regexp-macro-assembler-mips.h +6 -1
  405. data/lib/libv8/v8/src/mips/simulator-mips.cc +249 -49
  406. data/lib/libv8/v8/src/mips/simulator-mips.h +25 -1
  407. data/lib/libv8/v8/src/mips/stub-cache-mips.cc +373 -161
  408. data/lib/libv8/v8/src/mirror-debugger.js +55 -8
  409. data/lib/libv8/v8/src/misc-intrinsics.h +89 -0
  410. data/lib/libv8/v8/src/mksnapshot.cc +36 -4
  411. data/lib/libv8/v8/src/natives.h +5 -2
  412. data/lib/libv8/v8/src/objects-debug.cc +73 -6
  413. data/lib/libv8/v8/src/objects-inl.h +529 -164
  414. data/lib/libv8/v8/src/objects-printer.cc +67 -12
  415. data/lib/libv8/v8/src/objects-visiting.cc +13 -2
  416. data/lib/libv8/v8/src/objects-visiting.h +41 -1
  417. data/lib/libv8/v8/src/objects.cc +2200 -1177
  418. data/lib/libv8/v8/src/objects.h +912 -283
  419. data/lib/libv8/v8/src/parser.cc +566 -371
  420. data/lib/libv8/v8/src/parser.h +35 -33
  421. data/lib/libv8/v8/src/platform-cygwin.cc +10 -25
  422. data/lib/libv8/v8/src/platform-freebsd.cc +4 -29
  423. data/lib/libv8/v8/src/platform-linux.cc +60 -57
  424. data/lib/libv8/v8/src/platform-macos.cc +4 -27
  425. data/lib/libv8/v8/src/platform-nullos.cc +3 -16
  426. data/lib/libv8/v8/src/platform-openbsd.cc +247 -85
  427. data/lib/libv8/v8/src/platform-posix.cc +43 -1
  428. data/lib/libv8/v8/src/platform-solaris.cc +151 -112
  429. data/lib/libv8/v8/src/platform-tls.h +1 -1
  430. data/lib/libv8/v8/src/platform-win32.cc +65 -39
  431. data/lib/libv8/v8/src/platform.h +17 -14
  432. data/lib/libv8/v8/src/preparse-data-format.h +2 -2
  433. data/lib/libv8/v8/src/preparse-data.h +8 -2
  434. data/lib/libv8/v8/src/preparser-api.cc +2 -18
  435. data/lib/libv8/v8/src/preparser.cc +106 -65
  436. data/lib/libv8/v8/src/preparser.h +26 -5
  437. data/lib/libv8/v8/src/prettyprinter.cc +25 -43
  438. data/lib/libv8/v8/src/profile-generator-inl.h +0 -4
  439. data/lib/libv8/v8/src/profile-generator.cc +213 -34
  440. data/lib/libv8/v8/src/profile-generator.h +9 -9
  441. data/lib/libv8/v8/src/property.h +1 -0
  442. data/lib/libv8/v8/src/proxy.js +74 -4
  443. data/lib/libv8/v8/src/regexp-macro-assembler.cc +10 -6
  444. data/lib/libv8/v8/src/regexp.js +16 -11
  445. data/lib/libv8/v8/src/rewriter.cc +24 -133
  446. data/lib/libv8/v8/src/runtime-profiler.cc +27 -151
  447. data/lib/libv8/v8/src/runtime-profiler.h +5 -31
  448. data/lib/libv8/v8/src/runtime.cc +1450 -681
  449. data/lib/libv8/v8/src/runtime.h +47 -31
  450. data/lib/libv8/v8/src/runtime.js +2 -1
  451. data/lib/libv8/v8/src/scanner-base.cc +358 -220
  452. data/lib/libv8/v8/src/scanner-base.h +30 -138
  453. data/lib/libv8/v8/src/scanner.cc +0 -18
  454. data/lib/libv8/v8/src/scanner.h +0 -15
  455. data/lib/libv8/v8/src/scopeinfo.cc +3 -1
  456. data/lib/libv8/v8/src/scopeinfo.h +1 -6
  457. data/lib/libv8/v8/src/scopes.cc +243 -253
  458. data/lib/libv8/v8/src/scopes.h +58 -109
  459. data/lib/libv8/v8/src/serialize.cc +12 -54
  460. data/lib/libv8/v8/src/serialize.h +47 -0
  461. data/lib/libv8/v8/src/small-pointer-list.h +25 -0
  462. data/lib/libv8/v8/src/spaces-inl.h +4 -50
  463. data/lib/libv8/v8/src/spaces.cc +64 -131
  464. data/lib/libv8/v8/src/spaces.h +19 -70
  465. data/lib/libv8/v8/src/string-stream.cc +3 -1
  466. data/lib/libv8/v8/src/string.js +10 -6
  467. data/lib/libv8/v8/src/strtod.cc +7 -3
  468. data/lib/libv8/v8/src/stub-cache.cc +59 -129
  469. data/lib/libv8/v8/src/stub-cache.h +42 -54
  470. data/lib/libv8/v8/src/third_party/valgrind/valgrind.h +1447 -1339
  471. data/lib/libv8/v8/src/token.cc +4 -4
  472. data/lib/libv8/v8/src/token.h +6 -5
  473. data/lib/libv8/v8/src/type-info.cc +173 -129
  474. data/lib/libv8/v8/src/type-info.h +40 -22
  475. data/lib/libv8/v8/src/utils.cc +25 -304
  476. data/lib/libv8/v8/src/utils.h +118 -3
  477. data/lib/libv8/v8/src/v8-counters.h +3 -6
  478. data/lib/libv8/v8/src/v8.cc +34 -27
  479. data/lib/libv8/v8/src/v8.h +7 -7
  480. data/lib/libv8/v8/src/v8conversions.cc +129 -0
  481. data/lib/libv8/v8/src/v8conversions.h +60 -0
  482. data/lib/libv8/v8/src/v8globals.h +15 -6
  483. data/lib/libv8/v8/src/v8natives.js +300 -78
  484. data/lib/libv8/v8/src/v8threads.cc +14 -6
  485. data/lib/libv8/v8/src/v8threads.h +4 -1
  486. data/lib/libv8/v8/src/v8utils.cc +360 -0
  487. data/lib/libv8/v8/src/v8utils.h +17 -66
  488. data/lib/libv8/v8/src/variables.cc +7 -12
  489. data/lib/libv8/v8/src/variables.h +12 -10
  490. data/lib/libv8/v8/src/version.cc +2 -2
  491. data/lib/libv8/v8/src/vm-state-inl.h +0 -41
  492. data/lib/libv8/v8/src/vm-state.h +0 -11
  493. data/lib/libv8/v8/src/weakmap.js +103 -0
  494. data/lib/libv8/v8/src/x64/assembler-x64.h +6 -3
  495. data/lib/libv8/v8/src/x64/builtins-x64.cc +25 -22
  496. data/lib/libv8/v8/src/x64/code-stubs-x64.cc +523 -250
  497. data/lib/libv8/v8/src/x64/code-stubs-x64.h +8 -71
  498. data/lib/libv8/v8/src/x64/codegen-x64.cc +1 -0
  499. data/lib/libv8/v8/src/x64/codegen-x64.h +0 -2
  500. data/lib/libv8/v8/src/x64/cpu-x64.cc +2 -1
  501. data/lib/libv8/v8/src/x64/deoptimizer-x64.cc +40 -8
  502. data/lib/libv8/v8/src/x64/disasm-x64.cc +12 -10
  503. data/lib/libv8/v8/src/x64/frames-x64.h +7 -6
  504. data/lib/libv8/v8/src/x64/full-codegen-x64.cc +310 -415
  505. data/lib/libv8/v8/src/x64/ic-x64.cc +180 -117
  506. data/lib/libv8/v8/src/x64/lithium-codegen-x64.cc +411 -523
  507. data/lib/libv8/v8/src/x64/lithium-codegen-x64.h +11 -6
  508. data/lib/libv8/v8/src/x64/lithium-x64.cc +191 -216
  509. data/lib/libv8/v8/src/x64/lithium-x64.h +112 -263
  510. data/lib/libv8/v8/src/x64/macro-assembler-x64.cc +177 -61
  511. data/lib/libv8/v8/src/x64/macro-assembler-x64.h +23 -7
  512. data/lib/libv8/v8/src/x64/regexp-macro-assembler-x64.cc +21 -9
  513. data/lib/libv8/v8/src/x64/regexp-macro-assembler-x64.h +6 -0
  514. data/lib/libv8/v8/src/x64/stub-cache-x64.cc +273 -107
  515. data/lib/libv8/v8/src/zone.cc +31 -22
  516. data/lib/libv8/v8/src/zone.h +12 -6
  517. data/lib/libv8/v8/tools/codemap.js +8 -0
  518. data/lib/libv8/v8/tools/gcmole/Makefile +43 -0
  519. data/lib/libv8/v8/tools/gcmole/gcmole.lua +0 -2
  520. data/lib/libv8/v8/tools/gdb-v8-support.py +154 -0
  521. data/lib/libv8/v8/tools/grokdump.py +44 -35
  522. data/lib/libv8/v8/tools/gyp/v8.gyp +94 -248
  523. data/lib/libv8/v8/tools/js2c.py +83 -52
  524. data/lib/libv8/v8/tools/linux-tick-processor +4 -6
  525. data/lib/libv8/v8/tools/ll_prof.py +3 -3
  526. data/lib/libv8/v8/tools/oom_dump/README +3 -1
  527. data/lib/libv8/v8/tools/presubmit.py +11 -4
  528. data/lib/libv8/v8/tools/profile.js +46 -2
  529. data/lib/libv8/v8/tools/splaytree.js +11 -0
  530. data/lib/libv8/v8/tools/stats-viewer.py +15 -11
  531. data/lib/libv8/v8/tools/test-wrapper-gypbuild.py +227 -0
  532. data/lib/libv8/v8/tools/test.py +28 -8
  533. data/lib/libv8/v8/tools/tickprocessor.js +0 -16
  534. data/lib/libv8/version.rb +1 -1
  535. data/libv8.gemspec +2 -2
  536. metadata +31 -19
  537. data/lib/libv8/scons/engine/SCons/Tool/f03.py +0 -63
  538. data/lib/libv8/v8/src/json-parser.cc +0 -504
@@ -128,11 +128,11 @@ bool LCodeGen::GeneratePrologue() {
128
128
  }
129
129
  #endif
130
130
 
131
- // Strict mode functions need to replace the receiver with undefined
132
- // when called as functions (without an explicit receiver
133
- // object). ecx is zero for method calls and non-zero for function
134
- // calls.
135
- if (info_->is_strict_mode()) {
131
+ // Strict mode functions and builtins need to replace the receiver
132
+ // with undefined when called as functions (without an explicit
133
+ // receiver object). ecx is zero for method calls and non-zero for
134
+ // function calls.
135
+ if (info_->is_strict_mode() || info_->is_native()) {
136
136
  Label ok;
137
137
  __ test(ecx, Operand(ecx));
138
138
  __ j(zero, &ok, Label::kNear);
@@ -184,7 +184,7 @@ bool LCodeGen::GeneratePrologue() {
184
184
  FastNewContextStub stub(heap_slots);
185
185
  __ CallStub(&stub);
186
186
  } else {
187
- __ CallRuntime(Runtime::kNewContext, 1);
187
+ __ CallRuntime(Runtime::kNewFunctionContext, 1);
188
188
  }
189
189
  RecordSafepoint(Safepoint::kNoDeoptimizationIndex);
190
190
  // Context is returned in both eax and esi. It replaces the context
@@ -255,11 +255,20 @@ LInstruction* LCodeGen::GetNextInstruction() {
255
255
 
256
256
  bool LCodeGen::GenerateDeferredCode() {
257
257
  ASSERT(is_generating());
258
- for (int i = 0; !is_aborted() && i < deferred_.length(); i++) {
259
- LDeferredCode* code = deferred_[i];
260
- __ bind(code->entry());
261
- code->Generate();
262
- __ jmp(code->exit());
258
+ if (deferred_.length() > 0) {
259
+ for (int i = 0; !is_aborted() && i < deferred_.length(); i++) {
260
+ LDeferredCode* code = deferred_[i];
261
+ __ bind(code->entry());
262
+ code->Generate();
263
+ __ jmp(code->exit());
264
+ }
265
+
266
+ // Pad code to ensure that the last piece of deferred code have
267
+ // room for lazy bailout.
268
+ while ((masm()->pc_offset() - LastSafepointEnd())
269
+ < Deoptimizer::patch_size()) {
270
+ __ nop();
271
+ }
263
272
  }
264
273
 
265
274
  // Deferred code is the last part of the instruction sequence. Mark
@@ -428,15 +437,11 @@ void LCodeGen::AddToTranslation(Translation* translation,
428
437
  void LCodeGen::CallCodeGeneric(Handle<Code> code,
429
438
  RelocInfo::Mode mode,
430
439
  LInstruction* instr,
431
- ContextMode context_mode,
432
440
  SafepointMode safepoint_mode) {
433
441
  ASSERT(instr != NULL);
434
442
  LPointerMap* pointers = instr->pointer_map();
435
443
  RecordPosition(pointers->position());
436
444
 
437
- if (context_mode == RESTORE_CONTEXT) {
438
- __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
439
- }
440
445
  __ call(code, mode);
441
446
 
442
447
  RegisterLazyDeoptimization(instr, safepoint_mode);
@@ -452,24 +457,19 @@ void LCodeGen::CallCodeGeneric(Handle<Code> code,
452
457
 
453
458
  void LCodeGen::CallCode(Handle<Code> code,
454
459
  RelocInfo::Mode mode,
455
- LInstruction* instr,
456
- ContextMode context_mode) {
457
- CallCodeGeneric(code, mode, instr, context_mode, RECORD_SIMPLE_SAFEPOINT);
460
+ LInstruction* instr) {
461
+ CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT);
458
462
  }
459
463
 
460
464
 
461
465
  void LCodeGen::CallRuntime(const Runtime::Function* fun,
462
466
  int argc,
463
- LInstruction* instr,
464
- ContextMode context_mode) {
467
+ LInstruction* instr) {
465
468
  ASSERT(instr != NULL);
466
469
  ASSERT(instr->HasPointerMap());
467
470
  LPointerMap* pointers = instr->pointer_map();
468
471
  RecordPosition(pointers->position());
469
472
 
470
- if (context_mode == RESTORE_CONTEXT) {
471
- __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
472
- }
473
473
  __ CallRuntime(fun, argc);
474
474
 
475
475
  RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT);
@@ -478,8 +478,18 @@ void LCodeGen::CallRuntime(const Runtime::Function* fun,
478
478
 
479
479
  void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id,
480
480
  int argc,
481
- LInstruction* instr) {
482
- __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
481
+ LInstruction* instr,
482
+ LOperand* context) {
483
+ ASSERT(context->IsRegister() || context->IsStackSlot());
484
+ if (context->IsRegister()) {
485
+ if (!ToRegister(context).is(esi)) {
486
+ __ mov(esi, ToRegister(context));
487
+ }
488
+ } else {
489
+ // Context is stack slot.
490
+ __ mov(esi, ToOperand(context));
491
+ }
492
+
483
493
  __ CallRuntimeSaveDoubles(id);
484
494
  RecordSafepointWithRegisters(
485
495
  instr->pointer_map(), argc, Safepoint::kNoDeoptimizationIndex);
@@ -693,7 +703,7 @@ void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers,
693
703
 
694
704
 
695
705
  void LCodeGen::RecordPosition(int position) {
696
- if (!FLAG_debug_info || position == RelocInfo::kNoPosition) return;
706
+ if (position == RelocInfo::kNoPosition) return;
697
707
  masm()->positions_recorder()->RecordPosition(position);
698
708
  }
699
709
 
@@ -748,38 +758,38 @@ void LCodeGen::DoCallStub(LCallStub* instr) {
748
758
  switch (instr->hydrogen()->major_key()) {
749
759
  case CodeStub::RegExpConstructResult: {
750
760
  RegExpConstructResultStub stub;
751
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
761
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
752
762
  break;
753
763
  }
754
764
  case CodeStub::RegExpExec: {
755
765
  RegExpExecStub stub;
756
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
766
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
757
767
  break;
758
768
  }
759
769
  case CodeStub::SubString: {
760
770
  SubStringStub stub;
761
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
771
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
762
772
  break;
763
773
  }
764
774
  case CodeStub::NumberToString: {
765
775
  NumberToStringStub stub;
766
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
776
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
767
777
  break;
768
778
  }
769
779
  case CodeStub::StringAdd: {
770
780
  StringAddStub stub(NO_STRING_ADD_FLAGS);
771
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
781
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
772
782
  break;
773
783
  }
774
784
  case CodeStub::StringCompare: {
775
785
  StringCompareStub stub;
776
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
786
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
777
787
  break;
778
788
  }
779
789
  case CodeStub::TranscendentalCache: {
780
790
  TranscendentalCacheStub stub(instr->transcendental_type(),
781
791
  TranscendentalCacheStub::TAGGED);
782
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
792
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
783
793
  break;
784
794
  }
785
795
  default:
@@ -811,6 +821,8 @@ void LCodeGen::DoModI(LModI* instr) {
811
821
  if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
812
822
  __ j(not_zero, &done, Label::kNear);
813
823
  DeoptimizeIf(no_condition, instr->environment());
824
+ } else {
825
+ __ jmp(&done, Label::kNear);
814
826
  }
815
827
  __ bind(&positive_dividend);
816
828
  __ and_(dividend, divisor - 1);
@@ -1199,17 +1211,26 @@ void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) {
1199
1211
  }
1200
1212
 
1201
1213
 
1202
- void LCodeGen::DoFixedArrayLength(LFixedArrayLength* instr) {
1214
+ void LCodeGen::DoFixedArrayBaseLength(
1215
+ LFixedArrayBaseLength* instr) {
1203
1216
  Register result = ToRegister(instr->result());
1204
1217
  Register array = ToRegister(instr->InputAt(0));
1205
- __ mov(result, FieldOperand(array, FixedArray::kLengthOffset));
1218
+ __ mov(result, FieldOperand(array, FixedArrayBase::kLengthOffset));
1206
1219
  }
1207
1220
 
1208
1221
 
1209
- void LCodeGen::DoExternalArrayLength(LExternalArrayLength* instr) {
1222
+ void LCodeGen::DoElementsKind(LElementsKind* instr) {
1210
1223
  Register result = ToRegister(instr->result());
1211
- Register array = ToRegister(instr->InputAt(0));
1212
- __ mov(result, FieldOperand(array, ExternalArray::kLengthOffset));
1224
+ Register input = ToRegister(instr->InputAt(0));
1225
+
1226
+ // Load map into |result|.
1227
+ __ mov(result, FieldOperand(input, HeapObject::kMapOffset));
1228
+ // Load the map's "bit field 2" into |result|. We only need the first byte,
1229
+ // but the following masking takes care of that anyway.
1230
+ __ mov(result, FieldOperand(result, Map::kBitField2Offset));
1231
+ // Retrieve elements_kind from bit field 2.
1232
+ __ and_(result, Map::kElementsKindMask);
1233
+ __ shr(result, Map::kElementsKindShift);
1213
1234
  }
1214
1235
 
1215
1236
 
@@ -1220,8 +1241,7 @@ void LCodeGen::DoValueOf(LValueOf* instr) {
1220
1241
  ASSERT(input.is(result));
1221
1242
  Label done;
1222
1243
  // If the object is a smi return the object.
1223
- __ test(input, Immediate(kSmiTagMask));
1224
- __ j(zero, &done, Label::kNear);
1244
+ __ JumpIfSmi(input, &done, Label::kNear);
1225
1245
 
1226
1246
  // If the object is not a value type, return the object.
1227
1247
  __ CmpObjectType(input, JS_VALUE_TYPE, map);
@@ -1240,8 +1260,9 @@ void LCodeGen::DoBitNotI(LBitNotI* instr) {
1240
1260
 
1241
1261
 
1242
1262
  void LCodeGen::DoThrow(LThrow* instr) {
1243
- __ push(ToOperand(instr->InputAt(0)));
1244
- CallRuntime(Runtime::kThrow, 1, instr, RESTORE_CONTEXT);
1263
+ __ push(ToOperand(instr->value()));
1264
+ ASSERT(ToRegister(instr->context()).is(esi));
1265
+ CallRuntime(Runtime::kThrow, 1, instr);
1245
1266
 
1246
1267
  if (FLAG_debug_code) {
1247
1268
  Comment("Unreachable code.");
@@ -1311,12 +1332,14 @@ void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
1311
1332
 
1312
1333
 
1313
1334
  void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
1314
- ASSERT(ToRegister(instr->InputAt(0)).is(edx));
1315
- ASSERT(ToRegister(instr->InputAt(1)).is(eax));
1335
+ ASSERT(ToRegister(instr->context()).is(esi));
1336
+ ASSERT(ToRegister(instr->left()).is(edx));
1337
+ ASSERT(ToRegister(instr->right()).is(eax));
1316
1338
  ASSERT(ToRegister(instr->result()).is(eax));
1317
1339
 
1318
1340
  BinaryOpStub stub(instr->op(), NO_OVERWRITE);
1319
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT);
1341
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1342
+ __ nop(); // Signals no inlined code.
1320
1343
  }
1321
1344
 
1322
1345
 
@@ -1351,7 +1374,7 @@ void LCodeGen::DoBranch(LBranch* instr) {
1351
1374
  int true_block = chunk_->LookupDestination(instr->true_block_id());
1352
1375
  int false_block = chunk_->LookupDestination(instr->false_block_id());
1353
1376
 
1354
- Representation r = instr->hydrogen()->representation();
1377
+ Representation r = instr->hydrogen()->value()->representation();
1355
1378
  if (r.IsInteger32()) {
1356
1379
  Register reg = ToRegister(instr->InputAt(0));
1357
1380
  __ test(reg, Operand(reg));
@@ -1364,89 +1387,114 @@ void LCodeGen::DoBranch(LBranch* instr) {
1364
1387
  } else {
1365
1388
  ASSERT(r.IsTagged());
1366
1389
  Register reg = ToRegister(instr->InputAt(0));
1367
- if (instr->hydrogen()->type().IsBoolean()) {
1390
+ HType type = instr->hydrogen()->value()->type();
1391
+ if (type.IsBoolean()) {
1368
1392
  __ cmp(reg, factory()->true_value());
1369
1393
  EmitBranch(true_block, false_block, equal);
1394
+ } else if (type.IsSmi()) {
1395
+ __ test(reg, Operand(reg));
1396
+ EmitBranch(true_block, false_block, not_equal);
1370
1397
  } else {
1371
1398
  Label* true_label = chunk_->GetAssemblyLabel(true_block);
1372
1399
  Label* false_label = chunk_->GetAssemblyLabel(false_block);
1373
1400
 
1374
- __ cmp(reg, factory()->undefined_value());
1375
- __ j(equal, false_label);
1376
- __ cmp(reg, factory()->true_value());
1377
- __ j(equal, true_label);
1378
- __ cmp(reg, factory()->false_value());
1379
- __ j(equal, false_label);
1380
- __ test(reg, Operand(reg));
1381
- __ j(equal, false_label);
1382
- __ test(reg, Immediate(kSmiTagMask));
1383
- __ j(zero, true_label);
1384
-
1385
- // Test for double values. Zero is false.
1386
- Label call_stub;
1387
- __ cmp(FieldOperand(reg, HeapObject::kMapOffset),
1388
- factory()->heap_number_map());
1389
- __ j(not_equal, &call_stub, Label::kNear);
1390
- __ fldz();
1391
- __ fld_d(FieldOperand(reg, HeapNumber::kValueOffset));
1392
- __ FCmp();
1393
- __ j(zero, false_label);
1394
- __ jmp(true_label);
1395
-
1396
- // The conversion stub doesn't cause garbage collections so it's
1397
- // safe to not record a safepoint after the call.
1398
- __ bind(&call_stub);
1399
- ToBooleanStub stub;
1400
- __ pushad();
1401
- __ push(reg);
1402
- __ CallStub(&stub);
1403
- __ test(eax, Operand(eax));
1404
- __ popad();
1405
- EmitBranch(true_block, false_block, not_zero);
1401
+ ToBooleanStub::Types expected = instr->hydrogen()->expected_input_types();
1402
+ // Avoid deopts in the case where we've never executed this path before.
1403
+ if (expected.IsEmpty()) expected = ToBooleanStub::all_types();
1404
+
1405
+ if (expected.Contains(ToBooleanStub::UNDEFINED)) {
1406
+ // undefined -> false.
1407
+ __ cmp(reg, factory()->undefined_value());
1408
+ __ j(equal, false_label);
1409
+ }
1410
+ if (expected.Contains(ToBooleanStub::BOOLEAN)) {
1411
+ // true -> true.
1412
+ __ cmp(reg, factory()->true_value());
1413
+ __ j(equal, true_label);
1414
+ // false -> false.
1415
+ __ cmp(reg, factory()->false_value());
1416
+ __ j(equal, false_label);
1417
+ }
1418
+ if (expected.Contains(ToBooleanStub::NULL_TYPE)) {
1419
+ // 'null' -> false.
1420
+ __ cmp(reg, factory()->null_value());
1421
+ __ j(equal, false_label);
1422
+ }
1423
+
1424
+ if (expected.Contains(ToBooleanStub::SMI)) {
1425
+ // Smis: 0 -> false, all other -> true.
1426
+ __ test(reg, Operand(reg));
1427
+ __ j(equal, false_label);
1428
+ __ JumpIfSmi(reg, true_label);
1429
+ } else if (expected.NeedsMap()) {
1430
+ // If we need a map later and have a Smi -> deopt.
1431
+ __ test(reg, Immediate(kSmiTagMask));
1432
+ DeoptimizeIf(zero, instr->environment());
1433
+ }
1434
+
1435
+ Register map = no_reg; // Keep the compiler happy.
1436
+ if (expected.NeedsMap()) {
1437
+ map = ToRegister(instr->TempAt(0));
1438
+ ASSERT(!map.is(reg));
1439
+ __ mov(map, FieldOperand(reg, HeapObject::kMapOffset));
1440
+
1441
+ if (expected.CanBeUndetectable()) {
1442
+ // Undetectable -> false.
1443
+ __ test_b(FieldOperand(map, Map::kBitFieldOffset),
1444
+ 1 << Map::kIsUndetectable);
1445
+ __ j(not_zero, false_label);
1446
+ }
1447
+ }
1448
+
1449
+ if (expected.Contains(ToBooleanStub::SPEC_OBJECT)) {
1450
+ // spec object -> true.
1451
+ __ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
1452
+ __ j(above_equal, true_label);
1453
+ }
1454
+
1455
+ if (expected.Contains(ToBooleanStub::STRING)) {
1456
+ // String value -> false iff empty.
1457
+ Label not_string;
1458
+ __ CmpInstanceType(map, FIRST_NONSTRING_TYPE);
1459
+ __ j(above_equal, &not_string, Label::kNear);
1460
+ __ cmp(FieldOperand(reg, String::kLengthOffset), Immediate(0));
1461
+ __ j(not_zero, true_label);
1462
+ __ jmp(false_label);
1463
+ __ bind(&not_string);
1464
+ }
1465
+
1466
+ if (expected.Contains(ToBooleanStub::HEAP_NUMBER)) {
1467
+ // heap number -> false iff +0, -0, or NaN.
1468
+ Label not_heap_number;
1469
+ __ cmp(FieldOperand(reg, HeapObject::kMapOffset),
1470
+ factory()->heap_number_map());
1471
+ __ j(not_equal, &not_heap_number, Label::kNear);
1472
+ __ fldz();
1473
+ __ fld_d(FieldOperand(reg, HeapNumber::kValueOffset));
1474
+ __ FCmp();
1475
+ __ j(zero, false_label);
1476
+ __ jmp(true_label);
1477
+ __ bind(&not_heap_number);
1478
+ }
1479
+
1480
+ // We've seen something for the first time -> deopt.
1481
+ DeoptimizeIf(no_condition, instr->environment());
1406
1482
  }
1407
1483
  }
1408
1484
  }
1409
1485
 
1410
1486
 
1411
- void LCodeGen::EmitGoto(int block, LDeferredCode* deferred_stack_check) {
1487
+ void LCodeGen::EmitGoto(int block) {
1412
1488
  block = chunk_->LookupDestination(block);
1413
1489
  int next_block = GetNextEmittedBlock(current_block_);
1414
1490
  if (block != next_block) {
1415
- // Perform stack overflow check if this goto needs it before jumping.
1416
- if (deferred_stack_check != NULL) {
1417
- ExternalReference stack_limit =
1418
- ExternalReference::address_of_stack_limit(isolate());
1419
- __ cmp(esp, Operand::StaticVariable(stack_limit));
1420
- __ j(above_equal, chunk_->GetAssemblyLabel(block));
1421
- __ jmp(deferred_stack_check->entry());
1422
- deferred_stack_check->SetExit(chunk_->GetAssemblyLabel(block));
1423
- } else {
1424
- __ jmp(chunk_->GetAssemblyLabel(block));
1425
- }
1491
+ __ jmp(chunk_->GetAssemblyLabel(block));
1426
1492
  }
1427
1493
  }
1428
1494
 
1429
1495
 
1430
- void LCodeGen::DoDeferredStackCheck(LGoto* instr) {
1431
- PushSafepointRegistersScope scope(this);
1432
- CallRuntimeFromDeferred(Runtime::kStackGuard, 0, instr);
1433
- }
1434
-
1435
1496
  void LCodeGen::DoGoto(LGoto* instr) {
1436
- class DeferredStackCheck: public LDeferredCode {
1437
- public:
1438
- DeferredStackCheck(LCodeGen* codegen, LGoto* instr)
1439
- : LDeferredCode(codegen), instr_(instr) { }
1440
- virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); }
1441
- private:
1442
- LGoto* instr_;
1443
- };
1444
-
1445
- DeferredStackCheck* deferred = NULL;
1446
- if (instr->include_stack_check()) {
1447
- deferred = new DeferredStackCheck(this, instr);
1448
- }
1449
- EmitGoto(instr->block_id(), deferred);
1497
+ EmitGoto(instr->block_id());
1450
1498
  }
1451
1499
 
1452
1500
 
@@ -1487,32 +1535,6 @@ void LCodeGen::EmitCmpI(LOperand* left, LOperand* right) {
1487
1535
  }
1488
1536
 
1489
1537
 
1490
- void LCodeGen::DoCmpID(LCmpID* instr) {
1491
- LOperand* left = instr->InputAt(0);
1492
- LOperand* right = instr->InputAt(1);
1493
- LOperand* result = instr->result();
1494
-
1495
- Label unordered;
1496
- if (instr->is_double()) {
1497
- // Don't base result on EFLAGS when a NaN is involved. Instead
1498
- // jump to the unordered case, which produces a false value.
1499
- __ ucomisd(ToDoubleRegister(left), ToDoubleRegister(right));
1500
- __ j(parity_even, &unordered, Label::kNear);
1501
- } else {
1502
- EmitCmpI(left, right);
1503
- }
1504
-
1505
- Label done;
1506
- Condition cc = TokenToCondition(instr->op(), instr->is_double());
1507
- __ mov(ToRegister(result), factory()->true_value());
1508
- __ j(cc, &done, Label::kNear);
1509
-
1510
- __ bind(&unordered);
1511
- __ mov(ToRegister(result), factory()->false_value());
1512
- __ bind(&done);
1513
- }
1514
-
1515
-
1516
1538
  void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) {
1517
1539
  LOperand* left = instr->InputAt(0);
1518
1540
  LOperand* right = instr->InputAt(1);
@@ -1533,23 +1555,9 @@ void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) {
1533
1555
  }
1534
1556
 
1535
1557
 
1536
- void LCodeGen::DoCmpJSObjectEq(LCmpJSObjectEq* instr) {
1558
+ void LCodeGen::DoCmpObjectEqAndBranch(LCmpObjectEqAndBranch* instr) {
1537
1559
  Register left = ToRegister(instr->InputAt(0));
1538
- Register right = ToRegister(instr->InputAt(1));
1539
- Register result = ToRegister(instr->result());
1540
-
1541
- __ cmp(left, Operand(right));
1542
- __ mov(result, factory()->true_value());
1543
- Label done;
1544
- __ j(equal, &done, Label::kNear);
1545
- __ mov(result, factory()->false_value());
1546
- __ bind(&done);
1547
- }
1548
-
1549
-
1550
- void LCodeGen::DoCmpJSObjectEqAndBranch(LCmpJSObjectEqAndBranch* instr) {
1551
- Register left = ToRegister(instr->InputAt(0));
1552
- Register right = ToRegister(instr->InputAt(1));
1560
+ Operand right = ToOperand(instr->InputAt(1));
1553
1561
  int false_block = chunk_->LookupDestination(instr->false_block_id());
1554
1562
  int true_block = chunk_->LookupDestination(instr->true_block_id());
1555
1563
 
@@ -1558,69 +1566,16 @@ void LCodeGen::DoCmpJSObjectEqAndBranch(LCmpJSObjectEqAndBranch* instr) {
1558
1566
  }
1559
1567
 
1560
1568
 
1561
- void LCodeGen::DoCmpSymbolEq(LCmpSymbolEq* instr) {
1562
- Register left = ToRegister(instr->InputAt(0));
1563
- Register right = ToRegister(instr->InputAt(1));
1564
- Register result = ToRegister(instr->result());
1565
-
1566
- Label done;
1567
- __ cmp(left, Operand(right));
1568
- __ mov(result, factory()->false_value());
1569
- __ j(not_equal, &done, Label::kNear);
1570
- __ mov(result, factory()->true_value());
1571
- __ bind(&done);
1572
- }
1573
-
1574
-
1575
- void LCodeGen::DoCmpSymbolEqAndBranch(LCmpSymbolEqAndBranch* instr) {
1569
+ void LCodeGen::DoCmpConstantEqAndBranch(LCmpConstantEqAndBranch* instr) {
1576
1570
  Register left = ToRegister(instr->InputAt(0));
1577
- Register right = ToRegister(instr->InputAt(1));
1578
- int false_block = chunk_->LookupDestination(instr->false_block_id());
1579
1571
  int true_block = chunk_->LookupDestination(instr->true_block_id());
1572
+ int false_block = chunk_->LookupDestination(instr->false_block_id());
1580
1573
 
1581
- __ cmp(left, Operand(right));
1574
+ __ cmp(left, instr->hydrogen()->right());
1582
1575
  EmitBranch(true_block, false_block, equal);
1583
1576
  }
1584
1577
 
1585
1578
 
1586
- void LCodeGen::DoIsNull(LIsNull* instr) {
1587
- Register reg = ToRegister(instr->InputAt(0));
1588
- Register result = ToRegister(instr->result());
1589
-
1590
- // TODO(fsc): If the expression is known to be a smi, then it's
1591
- // definitely not null. Materialize false.
1592
-
1593
- __ cmp(reg, factory()->null_value());
1594
- if (instr->is_strict()) {
1595
- __ mov(result, factory()->true_value());
1596
- Label done;
1597
- __ j(equal, &done, Label::kNear);
1598
- __ mov(result, factory()->false_value());
1599
- __ bind(&done);
1600
- } else {
1601
- Label true_value, false_value, done;
1602
- __ j(equal, &true_value, Label::kNear);
1603
- __ cmp(reg, factory()->undefined_value());
1604
- __ j(equal, &true_value, Label::kNear);
1605
- __ test(reg, Immediate(kSmiTagMask));
1606
- __ j(zero, &false_value, Label::kNear);
1607
- // Check for undetectable objects by looking in the bit field in
1608
- // the map. The object has already been smi checked.
1609
- Register scratch = result;
1610
- __ mov(scratch, FieldOperand(reg, HeapObject::kMapOffset));
1611
- __ movzx_b(scratch, FieldOperand(scratch, Map::kBitFieldOffset));
1612
- __ test(scratch, Immediate(1 << Map::kIsUndetectable));
1613
- __ j(not_zero, &true_value, Label::kNear);
1614
- __ bind(&false_value);
1615
- __ mov(result, factory()->false_value());
1616
- __ jmp(&done, Label::kNear);
1617
- __ bind(&true_value);
1618
- __ mov(result, factory()->true_value());
1619
- __ bind(&done);
1620
- }
1621
- }
1622
-
1623
-
1624
1579
  void LCodeGen::DoIsNullAndBranch(LIsNullAndBranch* instr) {
1625
1580
  Register reg = ToRegister(instr->InputAt(0));
1626
1581
 
@@ -1639,8 +1594,7 @@ void LCodeGen::DoIsNullAndBranch(LIsNullAndBranch* instr) {
1639
1594
  __ j(equal, true_label);
1640
1595
  __ cmp(reg, factory()->undefined_value());
1641
1596
  __ j(equal, true_label);
1642
- __ test(reg, Immediate(kSmiTagMask));
1643
- __ j(zero, false_label);
1597
+ __ JumpIfSmi(reg, false_label);
1644
1598
  // Check for undetectable objects by looking in the bit field in
1645
1599
  // the map. The object has already been smi checked.
1646
1600
  Register scratch = ToRegister(instr->TempAt(0));
@@ -1654,83 +1608,42 @@ void LCodeGen::DoIsNullAndBranch(LIsNullAndBranch* instr) {
1654
1608
 
1655
1609
  Condition LCodeGen::EmitIsObject(Register input,
1656
1610
  Register temp1,
1657
- Register temp2,
1658
1611
  Label* is_not_object,
1659
1612
  Label* is_object) {
1660
- ASSERT(!input.is(temp1));
1661
- ASSERT(!input.is(temp2));
1662
- ASSERT(!temp1.is(temp2));
1663
-
1664
- __ test(input, Immediate(kSmiTagMask));
1665
- __ j(equal, is_not_object);
1613
+ __ JumpIfSmi(input, is_not_object);
1666
1614
 
1667
1615
  __ cmp(input, isolate()->factory()->null_value());
1668
1616
  __ j(equal, is_object);
1669
1617
 
1670
1618
  __ mov(temp1, FieldOperand(input, HeapObject::kMapOffset));
1671
1619
  // Undetectable objects behave like undefined.
1672
- __ movzx_b(temp2, FieldOperand(temp1, Map::kBitFieldOffset));
1673
- __ test(temp2, Immediate(1 << Map::kIsUndetectable));
1620
+ __ test_b(FieldOperand(temp1, Map::kBitFieldOffset),
1621
+ 1 << Map::kIsUndetectable);
1674
1622
  __ j(not_zero, is_not_object);
1675
1623
 
1676
- __ movzx_b(temp2, FieldOperand(temp1, Map::kInstanceTypeOffset));
1677
- __ cmp(temp2, FIRST_JS_OBJECT_TYPE);
1624
+ __ movzx_b(temp1, FieldOperand(temp1, Map::kInstanceTypeOffset));
1625
+ __ cmp(temp1, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
1678
1626
  __ j(below, is_not_object);
1679
- __ cmp(temp2, LAST_JS_OBJECT_TYPE);
1627
+ __ cmp(temp1, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
1680
1628
  return below_equal;
1681
1629
  }
1682
1630
 
1683
1631
 
1684
- void LCodeGen::DoIsObject(LIsObject* instr) {
1685
- Register reg = ToRegister(instr->InputAt(0));
1686
- Register result = ToRegister(instr->result());
1687
- Register temp = ToRegister(instr->TempAt(0));
1688
- Label is_false, is_true, done;
1689
-
1690
- Condition true_cond = EmitIsObject(reg, result, temp, &is_false, &is_true);
1691
- __ j(true_cond, &is_true);
1692
-
1693
- __ bind(&is_false);
1694
- __ mov(result, factory()->false_value());
1695
- __ jmp(&done);
1696
-
1697
- __ bind(&is_true);
1698
- __ mov(result, factory()->true_value());
1699
-
1700
- __ bind(&done);
1701
- }
1702
-
1703
-
1704
1632
  void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) {
1705
1633
  Register reg = ToRegister(instr->InputAt(0));
1706
1634
  Register temp = ToRegister(instr->TempAt(0));
1707
- Register temp2 = ToRegister(instr->TempAt(1));
1708
1635
 
1709
1636
  int true_block = chunk_->LookupDestination(instr->true_block_id());
1710
1637
  int false_block = chunk_->LookupDestination(instr->false_block_id());
1711
1638
  Label* true_label = chunk_->GetAssemblyLabel(true_block);
1712
1639
  Label* false_label = chunk_->GetAssemblyLabel(false_block);
1713
1640
 
1714
- Condition true_cond = EmitIsObject(reg, temp, temp2, false_label, true_label);
1641
+ Condition true_cond = EmitIsObject(reg, temp, false_label, true_label);
1715
1642
 
1716
1643
  EmitBranch(true_block, false_block, true_cond);
1717
1644
  }
1718
1645
 
1719
1646
 
1720
- void LCodeGen::DoIsSmi(LIsSmi* instr) {
1721
- Operand input = ToOperand(instr->InputAt(0));
1722
- Register result = ToRegister(instr->result());
1723
-
1724
- ASSERT(instr->hydrogen()->value()->representation().IsTagged());
1725
- __ test(input, Immediate(kSmiTagMask));
1726
- __ mov(result, factory()->true_value());
1727
- Label done;
1728
- __ j(zero, &done, Label::kNear);
1729
- __ mov(result, factory()->false_value());
1730
- __ bind(&done);
1731
- }
1732
-
1733
-
1734
1647
  void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
1735
1648
  Operand input = ToOperand(instr->InputAt(0));
1736
1649
 
@@ -1742,27 +1655,6 @@ void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
1742
1655
  }
1743
1656
 
1744
1657
 
1745
- void LCodeGen::DoIsUndetectable(LIsUndetectable* instr) {
1746
- Register input = ToRegister(instr->InputAt(0));
1747
- Register result = ToRegister(instr->result());
1748
-
1749
- ASSERT(instr->hydrogen()->value()->representation().IsTagged());
1750
- Label false_label, done;
1751
- STATIC_ASSERT(kSmiTag == 0);
1752
- __ test(input, Immediate(kSmiTagMask));
1753
- __ j(zero, &false_label, Label::kNear);
1754
- __ mov(result, FieldOperand(input, HeapObject::kMapOffset));
1755
- __ test_b(FieldOperand(result, Map::kBitFieldOffset),
1756
- 1 << Map::kIsUndetectable);
1757
- __ j(zero, &false_label, Label::kNear);
1758
- __ mov(result, factory()->true_value());
1759
- __ jmp(&done);
1760
- __ bind(&false_label);
1761
- __ mov(result, factory()->false_value());
1762
- __ bind(&done);
1763
- }
1764
-
1765
-
1766
1658
  void LCodeGen::DoIsUndetectableAndBranch(LIsUndetectableAndBranch* instr) {
1767
1659
  Register input = ToRegister(instr->InputAt(0));
1768
1660
  Register temp = ToRegister(instr->TempAt(0));
@@ -1771,8 +1663,7 @@ void LCodeGen::DoIsUndetectableAndBranch(LIsUndetectableAndBranch* instr) {
1771
1663
  int false_block = chunk_->LookupDestination(instr->false_block_id());
1772
1664
 
1773
1665
  STATIC_ASSERT(kSmiTag == 0);
1774
- __ test(input, Immediate(kSmiTagMask));
1775
- __ j(zero, chunk_->GetAssemblyLabel(false_block));
1666
+ __ JumpIfSmi(input, chunk_->GetAssemblyLabel(false_block));
1776
1667
  __ mov(temp, FieldOperand(input, HeapObject::kMapOffset));
1777
1668
  __ test_b(FieldOperand(temp, Map::kBitFieldOffset),
1778
1669
  1 << Map::kIsUndetectable);
@@ -1780,7 +1671,7 @@ void LCodeGen::DoIsUndetectableAndBranch(LIsUndetectableAndBranch* instr) {
1780
1671
  }
1781
1672
 
1782
1673
 
1783
- static InstanceType TestType(HHasInstanceType* instr) {
1674
+ static InstanceType TestType(HHasInstanceTypeAndBranch* instr) {
1784
1675
  InstanceType from = instr->from();
1785
1676
  InstanceType to = instr->to();
1786
1677
  if (from == FIRST_TYPE) return to;
@@ -1789,7 +1680,7 @@ static InstanceType TestType(HHasInstanceType* instr) {
1789
1680
  }
1790
1681
 
1791
1682
 
1792
- static Condition BranchCondition(HHasInstanceType* instr) {
1683
+ static Condition BranchCondition(HHasInstanceTypeAndBranch* instr) {
1793
1684
  InstanceType from = instr->from();
1794
1685
  InstanceType to = instr->to();
1795
1686
  if (from == to) return equal;
@@ -1800,25 +1691,6 @@ static Condition BranchCondition(HHasInstanceType* instr) {
1800
1691
  }
1801
1692
 
1802
1693
 
1803
- void LCodeGen::DoHasInstanceType(LHasInstanceType* instr) {
1804
- Register input = ToRegister(instr->InputAt(0));
1805
- Register result = ToRegister(instr->result());
1806
-
1807
- ASSERT(instr->hydrogen()->value()->representation().IsTagged());
1808
- __ test(input, Immediate(kSmiTagMask));
1809
- Label done, is_false;
1810
- __ j(zero, &is_false, Label::kNear);
1811
- __ CmpObjectType(input, TestType(instr->hydrogen()), result);
1812
- __ j(NegateCondition(BranchCondition(instr->hydrogen())),
1813
- &is_false, Label::kNear);
1814
- __ mov(result, factory()->true_value());
1815
- __ jmp(&done, Label::kNear);
1816
- __ bind(&is_false);
1817
- __ mov(result, factory()->false_value());
1818
- __ bind(&done);
1819
- }
1820
-
1821
-
1822
1694
  void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
1823
1695
  Register input = ToRegister(instr->InputAt(0));
1824
1696
  Register temp = ToRegister(instr->TempAt(0));
@@ -1828,8 +1700,7 @@ void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
1828
1700
 
1829
1701
  Label* false_label = chunk_->GetAssemblyLabel(false_block);
1830
1702
 
1831
- __ test(input, Immediate(kSmiTagMask));
1832
- __ j(zero, false_label);
1703
+ __ JumpIfSmi(input, false_label);
1833
1704
 
1834
1705
  __ CmpObjectType(input, TestType(instr->hydrogen()), temp);
1835
1706
  EmitBranch(true_block, false_block, BranchCondition(instr->hydrogen()));
@@ -1849,21 +1720,6 @@ void LCodeGen::DoGetCachedArrayIndex(LGetCachedArrayIndex* instr) {
1849
1720
  }
1850
1721
 
1851
1722
 
1852
- void LCodeGen::DoHasCachedArrayIndex(LHasCachedArrayIndex* instr) {
1853
- Register input = ToRegister(instr->InputAt(0));
1854
- Register result = ToRegister(instr->result());
1855
-
1856
- ASSERT(instr->hydrogen()->value()->representation().IsTagged());
1857
- __ mov(result, factory()->true_value());
1858
- __ test(FieldOperand(input, String::kHashFieldOffset),
1859
- Immediate(String::kContainsCachedArrayIndexMask));
1860
- Label done;
1861
- __ j(zero, &done, Label::kNear);
1862
- __ mov(result, factory()->false_value());
1863
- __ bind(&done);
1864
- }
1865
-
1866
-
1867
1723
  void LCodeGen::DoHasCachedArrayIndexAndBranch(
1868
1724
  LHasCachedArrayIndexAndBranch* instr) {
1869
1725
  Register input = ToRegister(instr->InputAt(0));
@@ -1887,28 +1743,28 @@ void LCodeGen::EmitClassOfTest(Label* is_true,
1887
1743
  Register temp2) {
1888
1744
  ASSERT(!input.is(temp));
1889
1745
  ASSERT(!temp.is(temp2)); // But input and temp2 may be the same register.
1890
- __ test(input, Immediate(kSmiTagMask));
1891
- __ j(zero, is_false);
1892
- __ CmpObjectType(input, FIRST_JS_OBJECT_TYPE, temp);
1746
+ __ JumpIfSmi(input, is_false);
1747
+ __ CmpObjectType(input, FIRST_SPEC_OBJECT_TYPE, temp);
1893
1748
  __ j(below, is_false);
1894
1749
 
1895
1750
  // Map is now in temp.
1896
1751
  // Functions have class 'Function'.
1897
- __ CmpInstanceType(temp, JS_FUNCTION_TYPE);
1752
+ __ CmpInstanceType(temp, FIRST_CALLABLE_SPEC_OBJECT_TYPE);
1898
1753
  if (class_name->IsEqualTo(CStrVector("Function"))) {
1899
- __ j(equal, is_true);
1754
+ __ j(above_equal, is_true);
1900
1755
  } else {
1901
- __ j(equal, is_false);
1756
+ __ j(above_equal, is_false);
1902
1757
  }
1903
1758
 
1904
1759
  // Check if the constructor in the map is a function.
1905
1760
  __ mov(temp, FieldOperand(temp, Map::kConstructorOffset));
1906
1761
 
1907
- // As long as JS_FUNCTION_TYPE is the last instance type and it is
1908
- // right after LAST_JS_OBJECT_TYPE, we can avoid checking for
1909
- // LAST_JS_OBJECT_TYPE.
1910
- ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
1911
- ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
1762
+ // As long as LAST_CALLABLE_SPEC_OBJECT_TYPE is the last instance type, and
1763
+ // FIRST_CALLABLE_SPEC_OBJECT_TYPE comes right after
1764
+ // LAST_NONCALLABLE_SPEC_OBJECT_TYPE, we can avoid checking for the latter.
1765
+ STATIC_ASSERT(LAST_TYPE == LAST_CALLABLE_SPEC_OBJECT_TYPE);
1766
+ STATIC_ASSERT(FIRST_CALLABLE_SPEC_OBJECT_TYPE ==
1767
+ LAST_NONCALLABLE_SPEC_OBJECT_TYPE + 1);
1912
1768
 
1913
1769
  // Objects with a non-function constructor have class 'Object'.
1914
1770
  __ CmpObjectType(temp, JS_FUNCTION_TYPE, temp2);
@@ -1934,29 +1790,6 @@ void LCodeGen::EmitClassOfTest(Label* is_true,
1934
1790
  }
1935
1791
 
1936
1792
 
1937
- void LCodeGen::DoClassOfTest(LClassOfTest* instr) {
1938
- Register input = ToRegister(instr->InputAt(0));
1939
- Register result = ToRegister(instr->result());
1940
- ASSERT(input.is(result));
1941
- Register temp = ToRegister(instr->TempAt(0));
1942
- Handle<String> class_name = instr->hydrogen()->class_name();
1943
- Label done;
1944
- Label is_true, is_false;
1945
-
1946
- EmitClassOfTest(&is_true, &is_false, class_name, input, temp, input);
1947
-
1948
- __ j(not_equal, &is_false, Label::kNear);
1949
-
1950
- __ bind(&is_true);
1951
- __ mov(result, factory()->true_value());
1952
- __ jmp(&done, Label::kNear);
1953
-
1954
- __ bind(&is_false);
1955
- __ mov(result, factory()->false_value());
1956
- __ bind(&done);
1957
- }
1958
-
1959
-
1960
1793
  void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
1961
1794
  Register input = ToRegister(instr->InputAt(0));
1962
1795
  Register temp = ToRegister(instr->TempAt(0));
@@ -1995,7 +1828,7 @@ void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
1995
1828
  // Object and function are in fixed registers defined by the stub.
1996
1829
  ASSERT(ToRegister(instr->context()).is(esi));
1997
1830
  InstanceofStub stub(InstanceofStub::kArgsInRegisters);
1998
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
1831
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1999
1832
 
2000
1833
  Label true_value, done;
2001
1834
  __ test(eax, Operand(eax));
@@ -2008,18 +1841,6 @@ void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
2008
1841
  }
2009
1842
 
2010
1843
 
2011
- void LCodeGen::DoInstanceOfAndBranch(LInstanceOfAndBranch* instr) {
2012
- ASSERT(ToRegister(instr->context()).is(esi));
2013
- int true_block = chunk_->LookupDestination(instr->true_block_id());
2014
- int false_block = chunk_->LookupDestination(instr->false_block_id());
2015
-
2016
- InstanceofStub stub(InstanceofStub::kArgsInRegisters);
2017
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
2018
- __ test(eax, Operand(eax));
2019
- EmitBranch(true_block, false_block, zero);
2020
- }
2021
-
2022
-
2023
1844
  void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
2024
1845
  class DeferredInstanceOfKnownGlobal: public LDeferredCode {
2025
1846
  public:
@@ -2041,12 +1862,11 @@ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
2041
1862
  deferred = new DeferredInstanceOfKnownGlobal(this, instr);
2042
1863
 
2043
1864
  Label done, false_result;
2044
- Register object = ToRegister(instr->InputAt(0));
1865
+ Register object = ToRegister(instr->InputAt(1));
2045
1866
  Register temp = ToRegister(instr->TempAt(0));
2046
1867
 
2047
1868
  // A Smi is not an instance of anything.
2048
- __ test(object, Immediate(kSmiTagMask));
2049
- __ j(zero, &false_result);
1869
+ __ JumpIfSmi(object, &false_result);
2050
1870
 
2051
1871
  // This is the inlined call site instanceof cache. The two occurences of the
2052
1872
  // hole value will be patched to the last map/result pair generated by the
@@ -2104,14 +1924,13 @@ void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
2104
1924
  Register temp = ToRegister(instr->TempAt(0));
2105
1925
  ASSERT(MacroAssembler::SafepointRegisterStackIndex(temp) == 0);
2106
1926
  __ mov(InstanceofStub::right(), Immediate(instr->function()));
2107
- static const int kAdditionalDelta = 16;
1927
+ static const int kAdditionalDelta = 13;
2108
1928
  int delta = masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta;
2109
1929
  __ mov(temp, Immediate(delta));
2110
1930
  __ StoreToSafepointRegisterSlot(temp, temp);
2111
1931
  CallCodeGeneric(stub.GetCode(),
2112
1932
  RelocInfo::CODE_TARGET,
2113
1933
  instr,
2114
- RESTORE_CONTEXT,
2115
1934
  RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
2116
1935
  // Put the result value into the eax slot and restore all registers.
2117
1936
  __ StoreToSafepointRegisterSlot(eax, eax);
@@ -2142,7 +1961,7 @@ void LCodeGen::DoCmpT(LCmpT* instr) {
2142
1961
  Token::Value op = instr->op();
2143
1962
 
2144
1963
  Handle<Code> ic = CompareIC::GetUninitialized(op);
2145
- CallCode(ic, RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT);
1964
+ CallCode(ic, RelocInfo::CODE_TARGET, instr);
2146
1965
 
2147
1966
  Condition condition = ComputeCompareCondition(op);
2148
1967
  if (op == Token::GT || op == Token::LTE) {
@@ -2159,25 +1978,6 @@ void LCodeGen::DoCmpT(LCmpT* instr) {
2159
1978
  }
2160
1979
 
2161
1980
 
2162
- void LCodeGen::DoCmpTAndBranch(LCmpTAndBranch* instr) {
2163
- Token::Value op = instr->op();
2164
- int true_block = chunk_->LookupDestination(instr->true_block_id());
2165
- int false_block = chunk_->LookupDestination(instr->false_block_id());
2166
-
2167
- Handle<Code> ic = CompareIC::GetUninitialized(op);
2168
- CallCode(ic, RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT);
2169
-
2170
- // The compare stub expects compare condition and the input operands
2171
- // reversed for GT and LTE.
2172
- Condition condition = ComputeCompareCondition(op);
2173
- if (op == Token::GT || op == Token::LTE) {
2174
- condition = ReverseCondition(condition);
2175
- }
2176
- __ test(eax, Operand(eax));
2177
- EmitBranch(true_block, false_block, condition);
2178
- }
2179
-
2180
-
2181
1981
  void LCodeGen::DoReturn(LReturn* instr) {
2182
1982
  if (FLAG_trace) {
2183
1983
  // Preserve the return value on the stack and rely on the runtime call
@@ -2213,7 +2013,7 @@ void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) {
2213
2013
  RelocInfo::Mode mode = instr->for_typeof() ? RelocInfo::CODE_TARGET :
2214
2014
  RelocInfo::CODE_TARGET_CONTEXT;
2215
2015
  Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2216
- CallCode(ic, mode, instr, CONTEXT_ADJUSTED);
2016
+ CallCode(ic, mode, instr);
2217
2017
  }
2218
2018
 
2219
2019
 
@@ -2244,7 +2044,7 @@ void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) {
2244
2044
  Handle<Code> ic = instr->strict_mode()
2245
2045
  ? isolate()->builtins()->StoreIC_Initialize_Strict()
2246
2046
  : isolate()->builtins()->StoreIC_Initialize();
2247
- CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr, CONTEXT_ADJUSTED);
2047
+ CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
2248
2048
  }
2249
2049
 
2250
2050
 
@@ -2316,7 +2116,7 @@ void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) {
2316
2116
  ASSERT(instr->hydrogen()->need_generic());
2317
2117
  __ mov(ecx, name);
2318
2118
  Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2319
- CallCode(ic, RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT);
2119
+ CallCode(ic, RelocInfo::CODE_TARGET, instr);
2320
2120
  } else {
2321
2121
  Label done;
2322
2122
  for (int i = 0; i < map_count - 1; ++i) {
@@ -2338,7 +2138,7 @@ void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) {
2338
2138
  __ bind(&generic);
2339
2139
  __ mov(ecx, name);
2340
2140
  Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2341
- CallCode(ic, RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT);
2141
+ CallCode(ic, RelocInfo::CODE_TARGET, instr);
2342
2142
  } else {
2343
2143
  DeoptimizeIf(not_equal, instr->environment());
2344
2144
  EmitLoadFieldOrConstantFunction(result, object, map, name);
@@ -2355,7 +2155,7 @@ void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
2355
2155
 
2356
2156
  __ mov(ecx, instr->name());
2357
2157
  Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2358
- CallCode(ic, RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
2158
+ CallCode(ic, RelocInfo::CODE_TARGET, instr);
2359
2159
  }
2360
2160
 
2361
2161
 
@@ -2406,7 +2206,7 @@ void LCodeGen::DoLoadElements(LLoadElements* instr) {
2406
2206
  Register input = ToRegister(instr->InputAt(0));
2407
2207
  __ mov(result, FieldOperand(input, JSObject::kElementsOffset));
2408
2208
  if (FLAG_debug_code) {
2409
- Label done;
2209
+ Label done, ok, fail;
2410
2210
  __ cmp(FieldOperand(result, HeapObject::kMapOffset),
2411
2211
  Immediate(factory()->fixed_array_map()));
2412
2212
  __ j(equal, &done, Label::kNear);
@@ -2416,11 +2216,19 @@ void LCodeGen::DoLoadElements(LLoadElements* instr) {
2416
2216
  Register temp((result.is(eax)) ? ebx : eax);
2417
2217
  __ push(temp);
2418
2218
  __ mov(temp, FieldOperand(result, HeapObject::kMapOffset));
2419
- __ movzx_b(temp, FieldOperand(temp, Map::kInstanceTypeOffset));
2420
- __ sub(Operand(temp), Immediate(FIRST_EXTERNAL_ARRAY_TYPE));
2421
- __ cmp(Operand(temp), Immediate(kExternalArrayTypeCount));
2219
+ __ movzx_b(temp, FieldOperand(temp, Map::kBitField2Offset));
2220
+ __ and_(temp, Map::kElementsKindMask);
2221
+ __ shr(temp, Map::kElementsKindShift);
2222
+ __ cmp(temp, JSObject::FAST_ELEMENTS);
2223
+ __ j(equal, &ok, Label::kNear);
2224
+ __ cmp(temp, JSObject::FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND);
2225
+ __ j(less, &fail, Label::kNear);
2226
+ __ cmp(temp, JSObject::LAST_EXTERNAL_ARRAY_ELEMENTS_KIND);
2227
+ __ j(less_equal, &ok, Label::kNear);
2228
+ __ bind(&fail);
2229
+ __ Abort("Check for fast or external elements failed.");
2230
+ __ bind(&ok);
2422
2231
  __ pop(temp);
2423
- __ Check(below, "Check for fast elements or pixel array failed.");
2424
2232
  __ bind(&done);
2425
2233
  }
2426
2234
  }
@@ -2451,16 +2259,13 @@ void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
2451
2259
 
2452
2260
 
2453
2261
  void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
2454
- Register elements = ToRegister(instr->elements());
2455
- Register key = ToRegister(instr->key());
2456
2262
  Register result = ToRegister(instr->result());
2457
- ASSERT(result.is(elements));
2458
2263
 
2459
2264
  // Load the result.
2460
- __ mov(result, FieldOperand(elements,
2461
- key,
2462
- times_pointer_size,
2463
- FixedArray::kHeaderSize));
2265
+ __ mov(result,
2266
+ BuildFastArrayOperand(instr->elements(), instr->key(),
2267
+ JSObject::FAST_ELEMENTS,
2268
+ FixedArray::kHeaderSize - kHeapObjectTag));
2464
2269
 
2465
2270
  // Check for the hole value.
2466
2271
  if (instr->hydrogen()->RequiresHoleCheck()) {
@@ -2470,55 +2275,80 @@ void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
2470
2275
  }
2471
2276
 
2472
2277
 
2473
- Operand LCodeGen::BuildExternalArrayOperand(LOperand* external_pointer,
2474
- LOperand* key,
2475
- ExternalArrayType array_type) {
2476
- Register external_pointer_reg = ToRegister(external_pointer);
2477
- int shift_size = ExternalArrayTypeToShiftSize(array_type);
2278
+ void LCodeGen::DoLoadKeyedFastDoubleElement(
2279
+ LLoadKeyedFastDoubleElement* instr) {
2280
+ XMMRegister result = ToDoubleRegister(instr->result());
2281
+
2282
+ if (instr->hydrogen()->RequiresHoleCheck()) {
2283
+ int offset = FixedDoubleArray::kHeaderSize - kHeapObjectTag +
2284
+ sizeof(kHoleNanLower32);
2285
+ Operand hole_check_operand = BuildFastArrayOperand(
2286
+ instr->elements(), instr->key(),
2287
+ JSObject::FAST_DOUBLE_ELEMENTS,
2288
+ offset);
2289
+ __ cmp(hole_check_operand, Immediate(kHoleNanUpper32));
2290
+ DeoptimizeIf(equal, instr->environment());
2291
+ }
2292
+
2293
+ Operand double_load_operand = BuildFastArrayOperand(
2294
+ instr->elements(), instr->key(), JSObject::FAST_DOUBLE_ELEMENTS,
2295
+ FixedDoubleArray::kHeaderSize - kHeapObjectTag);
2296
+ __ movdbl(result, double_load_operand);
2297
+ }
2298
+
2299
+
2300
+ Operand LCodeGen::BuildFastArrayOperand(
2301
+ LOperand* elements_pointer,
2302
+ LOperand* key,
2303
+ JSObject::ElementsKind elements_kind,
2304
+ uint32_t offset) {
2305
+ Register elements_pointer_reg = ToRegister(elements_pointer);
2306
+ int shift_size = ElementsKindToShiftSize(elements_kind);
2478
2307
  if (key->IsConstantOperand()) {
2479
2308
  int constant_value = ToInteger32(LConstantOperand::cast(key));
2480
2309
  if (constant_value & 0xF0000000) {
2481
2310
  Abort("array index constant value too big");
2482
2311
  }
2483
- return Operand(external_pointer_reg, constant_value * (1 << shift_size));
2312
+ return Operand(elements_pointer_reg,
2313
+ constant_value * (1 << shift_size) + offset);
2484
2314
  } else {
2485
2315
  ScaleFactor scale_factor = static_cast<ScaleFactor>(shift_size);
2486
- return Operand(external_pointer_reg, ToRegister(key), scale_factor, 0);
2316
+ return Operand(elements_pointer_reg, ToRegister(key), scale_factor, offset);
2487
2317
  }
2488
2318
  }
2489
2319
 
2490
2320
 
2491
2321
  void LCodeGen::DoLoadKeyedSpecializedArrayElement(
2492
2322
  LLoadKeyedSpecializedArrayElement* instr) {
2493
- ExternalArrayType array_type = instr->array_type();
2494
- Operand operand(BuildExternalArrayOperand(instr->external_pointer(),
2495
- instr->key(), array_type));
2496
- if (array_type == kExternalFloatArray) {
2323
+ JSObject::ElementsKind elements_kind = instr->elements_kind();
2324
+ Operand operand(BuildFastArrayOperand(instr->external_pointer(),
2325
+ instr->key(), elements_kind, 0));
2326
+ if (elements_kind == JSObject::EXTERNAL_FLOAT_ELEMENTS) {
2497
2327
  XMMRegister result(ToDoubleRegister(instr->result()));
2498
2328
  __ movss(result, operand);
2499
2329
  __ cvtss2sd(result, result);
2500
- } else if (array_type == kExternalDoubleArray) {
2330
+ } else if (elements_kind == JSObject::EXTERNAL_DOUBLE_ELEMENTS) {
2501
2331
  __ movdbl(ToDoubleRegister(instr->result()), operand);
2502
2332
  } else {
2503
2333
  Register result(ToRegister(instr->result()));
2504
- switch (array_type) {
2505
- case kExternalByteArray:
2334
+ switch (elements_kind) {
2335
+ case JSObject::EXTERNAL_BYTE_ELEMENTS:
2506
2336
  __ movsx_b(result, operand);
2507
2337
  break;
2508
- case kExternalUnsignedByteArray:
2509
- case kExternalPixelArray:
2338
+ case JSObject::EXTERNAL_PIXEL_ELEMENTS:
2339
+ case JSObject::EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
2510
2340
  __ movzx_b(result, operand);
2511
2341
  break;
2512
- case kExternalShortArray:
2342
+ case JSObject::EXTERNAL_SHORT_ELEMENTS:
2513
2343
  __ movsx_w(result, operand);
2514
2344
  break;
2515
- case kExternalUnsignedShortArray:
2345
+ case JSObject::EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
2516
2346
  __ movzx_w(result, operand);
2517
2347
  break;
2518
- case kExternalIntArray:
2348
+ case JSObject::EXTERNAL_INT_ELEMENTS:
2519
2349
  __ mov(result, operand);
2520
2350
  break;
2521
- case kExternalUnsignedIntArray:
2351
+ case JSObject::EXTERNAL_UNSIGNED_INT_ELEMENTS:
2522
2352
  __ mov(result, operand);
2523
2353
  __ test(result, Operand(result));
2524
2354
  // TODO(danno): we could be more clever here, perhaps having a special
@@ -2526,8 +2356,12 @@ void LCodeGen::DoLoadKeyedSpecializedArrayElement(
2526
2356
  // happens, and generate code that returns a double rather than int.
2527
2357
  DeoptimizeIf(negative, instr->environment());
2528
2358
  break;
2529
- case kExternalFloatArray:
2530
- case kExternalDoubleArray:
2359
+ case JSObject::EXTERNAL_FLOAT_ELEMENTS:
2360
+ case JSObject::EXTERNAL_DOUBLE_ELEMENTS:
2361
+ case JSObject::FAST_ELEMENTS:
2362
+ case JSObject::FAST_DOUBLE_ELEMENTS:
2363
+ case JSObject::DICTIONARY_ELEMENTS:
2364
+ case JSObject::NON_STRICT_ARGUMENTS_ELEMENTS:
2531
2365
  UNREACHABLE();
2532
2366
  break;
2533
2367
  }
@@ -2541,7 +2375,7 @@ void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
2541
2375
  ASSERT(ToRegister(instr->key()).is(eax));
2542
2376
 
2543
2377
  Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2544
- CallCode(ic, RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
2378
+ CallCode(ic, RelocInfo::CODE_TARGET, instr);
2545
2379
  }
2546
2380
 
2547
2381
 
@@ -2602,9 +2436,25 @@ void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
2602
2436
  ASSERT(function.is(edi)); // Required by InvokeFunction.
2603
2437
  ASSERT(ToRegister(instr->result()).is(eax));
2604
2438
 
2605
- // If the receiver is null or undefined, we have to pass the global object
2606
- // as a receiver.
2439
+ // If the receiver is null or undefined, we have to pass the global
2440
+ // object as a receiver to normal functions. Values have to be
2441
+ // passed unchanged to builtins and strict-mode functions.
2607
2442
  Label global_object, receiver_ok;
2443
+
2444
+ // Do not transform the receiver to object for strict mode
2445
+ // functions.
2446
+ __ mov(scratch,
2447
+ FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
2448
+ __ test_b(FieldOperand(scratch, SharedFunctionInfo::kStrictModeByteOffset),
2449
+ 1 << SharedFunctionInfo::kStrictModeBitWithinByte);
2450
+ __ j(not_equal, &receiver_ok, Label::kNear);
2451
+
2452
+ // Do not transform the receiver to object for builtins.
2453
+ __ test_b(FieldOperand(scratch, SharedFunctionInfo::kNativeByteOffset),
2454
+ 1 << SharedFunctionInfo::kNativeBitWithinByte);
2455
+ __ j(not_equal, &receiver_ok, Label::kNear);
2456
+
2457
+ // Normal function. Replace undefined or null with global receiver.
2608
2458
  __ cmp(receiver, factory()->null_value());
2609
2459
  __ j(equal, &global_object, Label::kNear);
2610
2460
  __ cmp(receiver, factory()->undefined_value());
@@ -2613,7 +2463,7 @@ void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
2613
2463
  // The receiver should be a JS object.
2614
2464
  __ test(receiver, Immediate(kSmiTagMask));
2615
2465
  DeoptimizeIf(equal, instr->environment());
2616
- __ CmpObjectType(receiver, FIRST_JS_OBJECT_TYPE, scratch);
2466
+ __ CmpObjectType(receiver, FIRST_SPEC_OBJECT_TYPE, scratch);
2617
2467
  DeoptimizeIf(below, instr->environment());
2618
2468
  __ jmp(&receiver_ok, Label::kNear);
2619
2469
 
@@ -2623,6 +2473,8 @@ void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
2623
2473
  // here.
2624
2474
  __ mov(receiver, Operand(ebp, StandardFrameConstants::kContextOffset));
2625
2475
  __ mov(receiver, ContextOperand(receiver, Context::GLOBAL_INDEX));
2476
+ __ mov(receiver,
2477
+ FieldOperand(receiver, JSGlobalObject::kGlobalReceiverOffset));
2626
2478
  __ bind(&receiver_ok);
2627
2479
 
2628
2480
  // Copy the arguments to this function possibly from the
@@ -2656,7 +2508,8 @@ void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
2656
2508
  pointers,
2657
2509
  env->deoptimization_index());
2658
2510
  ParameterCount actual(eax);
2659
- __ InvokeFunction(function, actual, CALL_FUNCTION, safepoint_generator);
2511
+ __ InvokeFunction(function, actual, CALL_FUNCTION,
2512
+ safepoint_generator, CALL_AS_METHOD);
2660
2513
  }
2661
2514
 
2662
2515
 
@@ -2670,6 +2523,12 @@ void LCodeGen::DoPushArgument(LPushArgument* instr) {
2670
2523
  }
2671
2524
 
2672
2525
 
2526
+ void LCodeGen::DoThisFunction(LThisFunction* instr) {
2527
+ Register result = ToRegister(instr->result());
2528
+ __ mov(result, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
2529
+ }
2530
+
2531
+
2673
2532
  void LCodeGen::DoContext(LContext* instr) {
2674
2533
  Register result = ToRegister(instr->result());
2675
2534
  __ mov(result, Operand(ebp, StandardFrameConstants::kContextOffset));
@@ -2679,8 +2538,8 @@ void LCodeGen::DoContext(LContext* instr) {
2679
2538
  void LCodeGen::DoOuterContext(LOuterContext* instr) {
2680
2539
  Register context = ToRegister(instr->context());
2681
2540
  Register result = ToRegister(instr->result());
2682
- __ mov(result, Operand(context, Context::SlotOffset(Context::CLOSURE_INDEX)));
2683
- __ mov(result, FieldOperand(result, JSFunction::kContextOffset));
2541
+ __ mov(result,
2542
+ Operand(context, Context::SlotOffset(Context::PREVIOUS_INDEX)));
2684
2543
  }
2685
2544
 
2686
2545
 
@@ -2746,7 +2605,7 @@ void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
2746
2605
 
2747
2606
 
2748
2607
  void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) {
2749
- Register input_reg = ToRegister(instr->InputAt(0));
2608
+ Register input_reg = ToRegister(instr->value());
2750
2609
  __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
2751
2610
  factory()->heap_number_map());
2752
2611
  DeoptimizeIf(not_equal, instr->environment());
@@ -2777,7 +2636,8 @@ void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) {
2777
2636
  // Slow case: Call the runtime system to do the number allocation.
2778
2637
  __ bind(&slow);
2779
2638
 
2780
- CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
2639
+ CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0,
2640
+ instr, instr->context());
2781
2641
 
2782
2642
  // Set the pointer to the new heap number in tmp.
2783
2643
  if (!tmp.is(eax)) __ mov(tmp, eax);
@@ -2798,7 +2658,7 @@ void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) {
2798
2658
 
2799
2659
 
2800
2660
  void LCodeGen::EmitIntegerMathAbs(LUnaryMathOperation* instr) {
2801
- Register input_reg = ToRegister(instr->InputAt(0));
2661
+ Register input_reg = ToRegister(instr->value());
2802
2662
  __ test(input_reg, Operand(input_reg));
2803
2663
  Label is_positive;
2804
2664
  __ j(not_sign, &is_positive);
@@ -2823,12 +2683,12 @@ void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
2823
2683
  LUnaryMathOperation* instr_;
2824
2684
  };
2825
2685
 
2826
- ASSERT(instr->InputAt(0)->Equals(instr->result()));
2686
+ ASSERT(instr->value()->Equals(instr->result()));
2827
2687
  Representation r = instr->hydrogen()->value()->representation();
2828
2688
 
2829
2689
  if (r.IsDouble()) {
2830
2690
  XMMRegister scratch = xmm0;
2831
- XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
2691
+ XMMRegister input_reg = ToDoubleRegister(instr->value());
2832
2692
  __ xorps(scratch, scratch);
2833
2693
  __ subsd(scratch, input_reg);
2834
2694
  __ pand(input_reg, scratch);
@@ -2837,10 +2697,9 @@ void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
2837
2697
  } else { // Tagged case.
2838
2698
  DeferredMathAbsTaggedHeapNumber* deferred =
2839
2699
  new DeferredMathAbsTaggedHeapNumber(this, instr);
2840
- Register input_reg = ToRegister(instr->InputAt(0));
2700
+ Register input_reg = ToRegister(instr->value());
2841
2701
  // Smi check.
2842
- __ test(input_reg, Immediate(kSmiTagMask));
2843
- __ j(not_zero, deferred->entry());
2702
+ __ JumpIfNotSmi(input_reg, deferred->entry());
2844
2703
  EmitIntegerMathAbs(instr);
2845
2704
  __ bind(deferred->exit());
2846
2705
  }
@@ -2850,41 +2709,69 @@ void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
2850
2709
  void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
2851
2710
  XMMRegister xmm_scratch = xmm0;
2852
2711
  Register output_reg = ToRegister(instr->result());
2853
- XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
2854
- __ xorps(xmm_scratch, xmm_scratch); // Zero the register.
2855
- __ ucomisd(input_reg, xmm_scratch);
2712
+ XMMRegister input_reg = ToDoubleRegister(instr->value());
2856
2713
 
2857
- if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
2858
- DeoptimizeIf(below_equal, instr->environment());
2714
+ if (CpuFeatures::IsSupported(SSE4_1)) {
2715
+ CpuFeatures::Scope scope(SSE4_1);
2716
+ if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
2717
+ // Deoptimize on negative zero.
2718
+ Label non_zero;
2719
+ __ xorps(xmm_scratch, xmm_scratch); // Zero the register.
2720
+ __ ucomisd(input_reg, xmm_scratch);
2721
+ __ j(not_equal, &non_zero, Label::kNear);
2722
+ __ movmskpd(output_reg, input_reg);
2723
+ __ test(output_reg, Immediate(1));
2724
+ DeoptimizeIf(not_zero, instr->environment());
2725
+ __ bind(&non_zero);
2726
+ }
2727
+ __ roundsd(xmm_scratch, input_reg, Assembler::kRoundDown);
2728
+ __ cvttsd2si(output_reg, Operand(xmm_scratch));
2729
+ // Overflow is signalled with minint.
2730
+ __ cmp(output_reg, 0x80000000u);
2731
+ DeoptimizeIf(equal, instr->environment());
2859
2732
  } else {
2733
+ Label done;
2734
+ // Deoptimize on negative numbers.
2735
+ __ xorps(xmm_scratch, xmm_scratch); // Zero the register.
2736
+ __ ucomisd(input_reg, xmm_scratch);
2860
2737
  DeoptimizeIf(below, instr->environment());
2861
- }
2862
2738
 
2863
- // Use truncating instruction (OK because input is positive).
2864
- __ cvttsd2si(output_reg, Operand(input_reg));
2739
+ if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
2740
+ // Check for negative zero.
2741
+ Label positive_sign;
2742
+ __ j(above, &positive_sign, Label::kNear);
2743
+ __ movmskpd(output_reg, input_reg);
2744
+ __ test(output_reg, Immediate(1));
2745
+ DeoptimizeIf(not_zero, instr->environment());
2746
+ __ Set(output_reg, Immediate(0));
2747
+ __ jmp(&done, Label::kNear);
2748
+ __ bind(&positive_sign);
2749
+ }
2865
2750
 
2866
- // Overflow is signalled with minint.
2867
- __ cmp(output_reg, 0x80000000u);
2868
- DeoptimizeIf(equal, instr->environment());
2869
- }
2751
+ // Use truncating instruction (OK because input is positive).
2752
+ __ cvttsd2si(output_reg, Operand(input_reg));
2870
2753
 
2754
+ // Overflow is signalled with minint.
2755
+ __ cmp(output_reg, 0x80000000u);
2756
+ DeoptimizeIf(equal, instr->environment());
2757
+ __ bind(&done);
2758
+ }
2759
+ }
2871
2760
 
2872
2761
  void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
2873
2762
  XMMRegister xmm_scratch = xmm0;
2874
2763
  Register output_reg = ToRegister(instr->result());
2875
- XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
2764
+ XMMRegister input_reg = ToDoubleRegister(instr->value());
2876
2765
 
2877
2766
  Label below_half, done;
2878
2767
  // xmm_scratch = 0.5
2879
2768
  ExternalReference one_half = ExternalReference::address_of_one_half();
2880
2769
  __ movdbl(xmm_scratch, Operand::StaticVariable(one_half));
2881
-
2882
2770
  __ ucomisd(xmm_scratch, input_reg);
2883
2771
  __ j(above, &below_half);
2884
2772
  // input = input + 0.5
2885
2773
  __ addsd(input_reg, xmm_scratch);
2886
2774
 
2887
-
2888
2775
  // Compute Math.floor(value + 0.5).
2889
2776
  // Use truncating instruction (OK because input is positive).
2890
2777
  __ cvttsd2si(output_reg, Operand(input_reg));
@@ -2917,7 +2804,7 @@ void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
2917
2804
 
2918
2805
 
2919
2806
  void LCodeGen::DoMathSqrt(LUnaryMathOperation* instr) {
2920
- XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
2807
+ XMMRegister input_reg = ToDoubleRegister(instr->value());
2921
2808
  ASSERT(ToDoubleRegister(instr->result()).is(input_reg));
2922
2809
  __ sqrtsd(input_reg, input_reg);
2923
2810
  }
@@ -2925,7 +2812,7 @@ void LCodeGen::DoMathSqrt(LUnaryMathOperation* instr) {
2925
2812
 
2926
2813
  void LCodeGen::DoMathPowHalf(LUnaryMathOperation* instr) {
2927
2814
  XMMRegister xmm_scratch = xmm0;
2928
- XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
2815
+ XMMRegister input_reg = ToDoubleRegister(instr->value());
2929
2816
  ASSERT(ToDoubleRegister(instr->result()).is(input_reg));
2930
2817
  __ xorps(xmm_scratch, xmm_scratch);
2931
2818
  __ addsd(input_reg, xmm_scratch); // Convert -0 to +0.
@@ -2962,8 +2849,7 @@ void LCodeGen::DoPower(LPower* instr) {
2962
2849
  Register right_reg = ToRegister(right);
2963
2850
 
2964
2851
  Label non_smi, call;
2965
- __ test(right_reg, Immediate(kSmiTagMask));
2966
- __ j(not_zero, &non_smi);
2852
+ __ JumpIfNotSmi(right_reg, &non_smi);
2967
2853
  __ SmiUntag(right_reg);
2968
2854
  __ cvtsi2sd(result_reg, Operand(right_reg));
2969
2855
  __ jmp(&call);
@@ -2994,14 +2880,15 @@ void LCodeGen::DoPower(LPower* instr) {
2994
2880
 
2995
2881
 
2996
2882
  void LCodeGen::DoMathLog(LUnaryMathOperation* instr) {
2997
- ASSERT(instr->InputAt(0)->Equals(instr->result()));
2998
- XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
2883
+ ASSERT(instr->value()->Equals(instr->result()));
2884
+ XMMRegister input_reg = ToDoubleRegister(instr->value());
2999
2885
  Label positive, done, zero;
3000
2886
  __ xorps(xmm0, xmm0);
3001
2887
  __ ucomisd(input_reg, xmm0);
3002
2888
  __ j(above, &positive, Label::kNear);
3003
2889
  __ j(equal, &zero, Label::kNear);
3004
- ExternalReference nan = ExternalReference::address_of_nan();
2890
+ ExternalReference nan =
2891
+ ExternalReference::address_of_canonical_non_hole_nan();
3005
2892
  __ movdbl(input_reg, Operand::StaticVariable(nan));
3006
2893
  __ jmp(&done, Label::kNear);
3007
2894
  __ bind(&zero);
@@ -3027,7 +2914,7 @@ void LCodeGen::DoMathCos(LUnaryMathOperation* instr) {
3027
2914
  ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
3028
2915
  TranscendentalCacheStub stub(TranscendentalCache::COS,
3029
2916
  TranscendentalCacheStub::UNTAGGED);
3030
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT);
2917
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3031
2918
  }
3032
2919
 
3033
2920
 
@@ -3035,7 +2922,7 @@ void LCodeGen::DoMathSin(LUnaryMathOperation* instr) {
3035
2922
  ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
3036
2923
  TranscendentalCacheStub stub(TranscendentalCache::SIN,
3037
2924
  TranscendentalCacheStub::UNTAGGED);
3038
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT);
2925
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3039
2926
  }
3040
2927
 
3041
2928
 
@@ -3083,7 +2970,7 @@ void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) {
3083
2970
  RegisterEnvironmentForDeoptimization(env);
3084
2971
  SafepointGenerator generator(this, pointers, env->deoptimization_index());
3085
2972
  ParameterCount count(instr->arity());
3086
- __ InvokeFunction(edi, count, CALL_FUNCTION, generator);
2973
+ __ InvokeFunction(edi, count, CALL_FUNCTION, generator, CALL_AS_METHOD);
3087
2974
  }
3088
2975
 
3089
2976
 
@@ -3095,7 +2982,7 @@ void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
3095
2982
  int arity = instr->arity();
3096
2983
  Handle<Code> ic = isolate()->stub_cache()->
3097
2984
  ComputeKeyedCallInitialize(arity, NOT_IN_LOOP);
3098
- CallCode(ic, RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
2985
+ CallCode(ic, RelocInfo::CODE_TARGET, instr);
3099
2986
  }
3100
2987
 
3101
2988
 
@@ -3108,7 +2995,7 @@ void LCodeGen::DoCallNamed(LCallNamed* instr) {
3108
2995
  Handle<Code> ic =
3109
2996
  isolate()->stub_cache()->ComputeCallInitialize(arity, NOT_IN_LOOP, mode);
3110
2997
  __ mov(ecx, instr->name());
3111
- CallCode(ic, mode, instr, CONTEXT_ADJUSTED);
2998
+ CallCode(ic, mode, instr);
3112
2999
  }
3113
3000
 
3114
3001
 
@@ -3118,7 +3005,7 @@ void LCodeGen::DoCallFunction(LCallFunction* instr) {
3118
3005
 
3119
3006
  int arity = instr->arity();
3120
3007
  CallFunctionStub stub(arity, NOT_IN_LOOP, RECEIVER_MIGHT_BE_IMPLICIT);
3121
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
3008
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3122
3009
  __ Drop(1);
3123
3010
  }
3124
3011
 
@@ -3132,7 +3019,7 @@ void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
3132
3019
  Handle<Code> ic =
3133
3020
  isolate()->stub_cache()->ComputeCallInitialize(arity, NOT_IN_LOOP, mode);
3134
3021
  __ mov(ecx, instr->name());
3135
- CallCode(ic, mode, instr, CONTEXT_ADJUSTED);
3022
+ CallCode(ic, mode, instr);
3136
3023
  }
3137
3024
 
3138
3025
 
@@ -3150,12 +3037,12 @@ void LCodeGen::DoCallNew(LCallNew* instr) {
3150
3037
 
3151
3038
  Handle<Code> builtin = isolate()->builtins()->JSConstructCall();
3152
3039
  __ Set(eax, Immediate(instr->arity()));
3153
- CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr, CONTEXT_ADJUSTED);
3040
+ CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr);
3154
3041
  }
3155
3042
 
3156
3043
 
3157
3044
  void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
3158
- CallRuntime(instr->function(), instr->arity(), instr, RESTORE_CONTEXT);
3045
+ CallRuntime(instr->function(), instr->arity(), instr);
3159
3046
  }
3160
3047
 
3161
3048
 
@@ -3198,44 +3085,54 @@ void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
3198
3085
  Handle<Code> ic = instr->strict_mode()
3199
3086
  ? isolate()->builtins()->StoreIC_Initialize_Strict()
3200
3087
  : isolate()->builtins()->StoreIC_Initialize();
3201
- CallCode(ic, RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
3088
+ CallCode(ic, RelocInfo::CODE_TARGET, instr);
3202
3089
  }
3203
3090
 
3204
3091
 
3205
3092
  void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
3206
- __ cmp(ToRegister(instr->index()), ToOperand(instr->length()));
3207
- DeoptimizeIf(above_equal, instr->environment());
3093
+ if (instr->index()->IsConstantOperand()) {
3094
+ __ cmp(ToOperand(instr->length()),
3095
+ ToImmediate(LConstantOperand::cast(instr->index())));
3096
+ DeoptimizeIf(below_equal, instr->environment());
3097
+ } else {
3098
+ __ cmp(ToRegister(instr->index()), ToOperand(instr->length()));
3099
+ DeoptimizeIf(above_equal, instr->environment());
3100
+ }
3208
3101
  }
3209
3102
 
3210
3103
 
3211
3104
  void LCodeGen::DoStoreKeyedSpecializedArrayElement(
3212
3105
  LStoreKeyedSpecializedArrayElement* instr) {
3213
- ExternalArrayType array_type = instr->array_type();
3214
- Operand operand(BuildExternalArrayOperand(instr->external_pointer(),
3215
- instr->key(), array_type));
3216
- if (array_type == kExternalFloatArray) {
3106
+ JSObject::ElementsKind elements_kind = instr->elements_kind();
3107
+ Operand operand(BuildFastArrayOperand(instr->external_pointer(),
3108
+ instr->key(), elements_kind, 0));
3109
+ if (elements_kind == JSObject::EXTERNAL_FLOAT_ELEMENTS) {
3217
3110
  __ cvtsd2ss(xmm0, ToDoubleRegister(instr->value()));
3218
3111
  __ movss(operand, xmm0);
3219
- } else if (array_type == kExternalDoubleArray) {
3112
+ } else if (elements_kind == JSObject::EXTERNAL_DOUBLE_ELEMENTS) {
3220
3113
  __ movdbl(operand, ToDoubleRegister(instr->value()));
3221
3114
  } else {
3222
3115
  Register value = ToRegister(instr->value());
3223
- switch (array_type) {
3224
- case kExternalPixelArray:
3225
- case kExternalByteArray:
3226
- case kExternalUnsignedByteArray:
3116
+ switch (elements_kind) {
3117
+ case JSObject::EXTERNAL_PIXEL_ELEMENTS:
3118
+ case JSObject::EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
3119
+ case JSObject::EXTERNAL_BYTE_ELEMENTS:
3227
3120
  __ mov_b(operand, value);
3228
3121
  break;
3229
- case kExternalShortArray:
3230
- case kExternalUnsignedShortArray:
3122
+ case JSObject::EXTERNAL_SHORT_ELEMENTS:
3123
+ case JSObject::EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
3231
3124
  __ mov_w(operand, value);
3232
3125
  break;
3233
- case kExternalIntArray:
3234
- case kExternalUnsignedIntArray:
3126
+ case JSObject::EXTERNAL_INT_ELEMENTS:
3127
+ case JSObject::EXTERNAL_UNSIGNED_INT_ELEMENTS:
3235
3128
  __ mov(operand, value);
3236
3129
  break;
3237
- case kExternalFloatArray:
3238
- case kExternalDoubleArray:
3130
+ case JSObject::EXTERNAL_FLOAT_ELEMENTS:
3131
+ case JSObject::EXTERNAL_DOUBLE_ELEMENTS:
3132
+ case JSObject::FAST_ELEMENTS:
3133
+ case JSObject::FAST_DOUBLE_ELEMENTS:
3134
+ case JSObject::DICTIONARY_ELEMENTS:
3135
+ case JSObject::NON_STRICT_ARGUMENTS_ELEMENTS:
3239
3136
  UNREACHABLE();
3240
3137
  break;
3241
3138
  }
@@ -3275,6 +3172,27 @@ void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
3275
3172
  }
3276
3173
 
3277
3174
 
3175
+ void LCodeGen::DoStoreKeyedFastDoubleElement(
3176
+ LStoreKeyedFastDoubleElement* instr) {
3177
+ XMMRegister value = ToDoubleRegister(instr->value());
3178
+ Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg;
3179
+ Label have_value;
3180
+
3181
+ __ ucomisd(value, value);
3182
+ __ j(parity_odd, &have_value); // NaN.
3183
+
3184
+ ExternalReference canonical_nan_reference =
3185
+ ExternalReference::address_of_canonical_non_hole_nan();
3186
+ __ movdbl(value, Operand::StaticVariable(canonical_nan_reference));
3187
+ __ bind(&have_value);
3188
+
3189
+ Operand double_store_operand = BuildFastArrayOperand(
3190
+ instr->elements(), instr->key(), JSObject::FAST_DOUBLE_ELEMENTS,
3191
+ FixedDoubleArray::kHeaderSize - kHeapObjectTag);
3192
+ __ movdbl(double_store_operand, value);
3193
+ }
3194
+
3195
+
3278
3196
  void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
3279
3197
  ASSERT(ToRegister(instr->context()).is(esi));
3280
3198
  ASSERT(ToRegister(instr->object()).is(edx));
@@ -3284,7 +3202,7 @@ void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
3284
3202
  Handle<Code> ic = instr->strict_mode()
3285
3203
  ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
3286
3204
  : isolate()->builtins()->KeyedStoreIC_Initialize();
3287
- CallCode(ic, RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
3205
+ CallCode(ic, RelocInfo::CODE_TARGET, instr);
3288
3206
  }
3289
3207
 
3290
3208
 
@@ -3299,95 +3217,79 @@ void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
3299
3217
  };
3300
3218
 
3301
3219
  Register string = ToRegister(instr->string());
3302
- Register index = no_reg;
3303
- int const_index = -1;
3304
- if (instr->index()->IsConstantOperand()) {
3305
- const_index = ToInteger32(LConstantOperand::cast(instr->index()));
3306
- STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue);
3307
- if (!Smi::IsValid(const_index)) {
3308
- // Guaranteed to be out of bounds because of the assert above.
3309
- // So the bounds check that must dominate this instruction must
3310
- // have deoptimized already.
3311
- if (FLAG_debug_code) {
3312
- __ Abort("StringCharCodeAt: out of bounds index.");
3313
- }
3314
- // No code needs to be generated.
3315
- return;
3316
- }
3317
- } else {
3318
- index = ToRegister(instr->index());
3319
- }
3220
+ Register index = ToRegister(instr->index());
3320
3221
  Register result = ToRegister(instr->result());
3321
3222
 
3322
3223
  DeferredStringCharCodeAt* deferred =
3323
3224
  new DeferredStringCharCodeAt(this, instr);
3324
3225
 
3325
- Label flat_string, ascii_string, done;
3326
-
3327
3226
  // Fetch the instance type of the receiver into result register.
3328
3227
  __ mov(result, FieldOperand(string, HeapObject::kMapOffset));
3329
3228
  __ movzx_b(result, FieldOperand(result, Map::kInstanceTypeOffset));
3330
3229
 
3331
- // We need special handling for non-flat strings.
3332
- STATIC_ASSERT(kSeqStringTag == 0);
3333
- __ test(result, Immediate(kStringRepresentationMask));
3334
- __ j(zero, &flat_string, Label::kNear);
3230
+ // We need special handling for indirect strings.
3231
+ Label check_sequential;
3232
+ __ test(result, Immediate(kIsIndirectStringMask));
3233
+ __ j(zero, &check_sequential, Label::kNear);
3335
3234
 
3336
- // Handle non-flat strings.
3337
- __ test(result, Immediate(kIsConsStringMask));
3338
- __ j(zero, deferred->entry());
3235
+ // Dispatch on the indirect string shape: slice or cons.
3236
+ Label cons_string;
3237
+ __ test(result, Immediate(kSlicedNotConsMask));
3238
+ __ j(zero, &cons_string, Label::kNear);
3339
3239
 
3340
- // ConsString.
3240
+ // Handle slices.
3241
+ Label indirect_string_loaded;
3242
+ __ mov(result, FieldOperand(string, SlicedString::kOffsetOffset));
3243
+ __ SmiUntag(result);
3244
+ __ add(index, Operand(result));
3245
+ __ mov(string, FieldOperand(string, SlicedString::kParentOffset));
3246
+ __ jmp(&indirect_string_loaded, Label::kNear);
3247
+
3248
+ // Handle conses.
3341
3249
  // Check whether the right hand side is the empty string (i.e. if
3342
3250
  // this is really a flat string in a cons string). If that is not
3343
3251
  // the case we would rather go to the runtime system now to flatten
3344
3252
  // the string.
3253
+ __ bind(&cons_string);
3345
3254
  __ cmp(FieldOperand(string, ConsString::kSecondOffset),
3346
3255
  Immediate(factory()->empty_string()));
3347
3256
  __ j(not_equal, deferred->entry());
3348
- // Get the first of the two strings and load its instance type.
3349
3257
  __ mov(string, FieldOperand(string, ConsString::kFirstOffset));
3258
+
3259
+ __ bind(&indirect_string_loaded);
3350
3260
  __ mov(result, FieldOperand(string, HeapObject::kMapOffset));
3351
3261
  __ movzx_b(result, FieldOperand(result, Map::kInstanceTypeOffset));
3352
- // If the first cons component is also non-flat, then go to runtime.
3262
+
3263
+ // Check whether the string is sequential. The only non-sequential
3264
+ // shapes we support have just been unwrapped above.
3265
+ __ bind(&check_sequential);
3353
3266
  STATIC_ASSERT(kSeqStringTag == 0);
3354
3267
  __ test(result, Immediate(kStringRepresentationMask));
3355
3268
  __ j(not_zero, deferred->entry());
3356
3269
 
3357
- // Check for ASCII or two-byte string.
3358
- __ bind(&flat_string);
3270
+ // Dispatch on the encoding: ASCII or two-byte.
3271
+ Label ascii_string;
3359
3272
  STATIC_ASSERT(kAsciiStringTag != 0);
3360
3273
  __ test(result, Immediate(kStringEncodingMask));
3361
3274
  __ j(not_zero, &ascii_string, Label::kNear);
3362
3275
 
3363
3276
  // Two-byte string.
3364
3277
  // Load the two-byte character code into the result register.
3278
+ Label done;
3365
3279
  STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3366
- if (instr->index()->IsConstantOperand()) {
3367
- __ movzx_w(result,
3368
- FieldOperand(string,
3369
- SeqTwoByteString::kHeaderSize +
3370
- (kUC16Size * const_index)));
3371
- } else {
3372
- __ movzx_w(result, FieldOperand(string,
3373
- index,
3374
- times_2,
3375
- SeqTwoByteString::kHeaderSize));
3376
- }
3280
+ __ movzx_w(result, FieldOperand(string,
3281
+ index,
3282
+ times_2,
3283
+ SeqTwoByteString::kHeaderSize));
3377
3284
  __ jmp(&done, Label::kNear);
3378
3285
 
3379
3286
  // ASCII string.
3380
3287
  // Load the byte into the result register.
3381
3288
  __ bind(&ascii_string);
3382
- if (instr->index()->IsConstantOperand()) {
3383
- __ movzx_b(result, FieldOperand(string,
3384
- SeqAsciiString::kHeaderSize + const_index));
3385
- } else {
3386
- __ movzx_b(result, FieldOperand(string,
3387
- index,
3388
- times_1,
3389
- SeqAsciiString::kHeaderSize));
3390
- }
3289
+ __ movzx_b(result, FieldOperand(string,
3290
+ index,
3291
+ times_1,
3292
+ SeqAsciiString::kHeaderSize));
3391
3293
  __ bind(&done);
3392
3294
  __ bind(deferred->exit());
3393
3295
  }
@@ -3415,7 +3317,8 @@ void LCodeGen::DoDeferredStringCharCodeAt(LStringCharCodeAt* instr) {
3415
3317
  __ SmiTag(index);
3416
3318
  __ push(index);
3417
3319
  }
3418
- CallRuntimeFromDeferred(Runtime::kStringCharCodeAt, 2, instr);
3320
+ CallRuntimeFromDeferred(Runtime::kStringCharCodeAt, 2,
3321
+ instr, instr->context());
3419
3322
  if (FLAG_debug_code) {
3420
3323
  __ AbortIfNotSmi(eax);
3421
3324
  }
@@ -3466,7 +3369,7 @@ void LCodeGen::DoDeferredStringCharFromCode(LStringCharFromCode* instr) {
3466
3369
  PushSafepointRegistersScope scope(this);
3467
3370
  __ SmiTag(char_code);
3468
3371
  __ push(char_code);
3469
- CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr);
3372
+ CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr, instr->context());
3470
3373
  __ StoreToSafepointRegisterSlot(result, eax);
3471
3374
  }
3472
3375
 
@@ -3490,7 +3393,7 @@ void LCodeGen::DoStringAdd(LStringAdd* instr) {
3490
3393
  __ push(ToOperand(instr->right()));
3491
3394
  }
3492
3395
  StringAddStub stub(NO_STRING_CHECK_IN_STUB);
3493
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT);
3396
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3494
3397
  }
3495
3398
 
3496
3399
 
@@ -3551,8 +3454,15 @@ void LCodeGen::DoDeferredNumberTagI(LNumberTagI* instr) {
3551
3454
  // register is stored, as this register is in the pointer map, but contains an
3552
3455
  // integer value.
3553
3456
  __ StoreToSafepointRegisterSlot(reg, Immediate(0));
3554
-
3555
- CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
3457
+ // NumberTagI and NumberTagD use the context from the frame, rather than
3458
+ // the environment's HContext or HInlinedContext value.
3459
+ // They only call Runtime::kAllocateHeapNumber.
3460
+ // The corresponding HChange instructions are added in a phase that does
3461
+ // not have easy access to the local context.
3462
+ __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
3463
+ __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
3464
+ RecordSafepointWithRegisters(
3465
+ instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
3556
3466
  if (!reg.is(eax)) __ mov(reg, eax);
3557
3467
 
3558
3468
  // Done. Put the value in xmm0 into the value of the allocated heap
@@ -3596,7 +3506,15 @@ void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) {
3596
3506
  __ Set(reg, Immediate(0));
3597
3507
 
3598
3508
  PushSafepointRegistersScope scope(this);
3599
- CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
3509
+ // NumberTagI and NumberTagD use the context from the frame, rather than
3510
+ // the environment's HContext or HInlinedContext value.
3511
+ // They only call Runtime::kAllocateHeapNumber.
3512
+ // The corresponding HChange instructions are added in a phase that does
3513
+ // not have easy access to the local context.
3514
+ __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
3515
+ __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
3516
+ RecordSafepointWithRegisters(instr->pointer_map(), 0,
3517
+ Safepoint::kNoDeoptimizationIndex);
3600
3518
  __ StoreToSafepointRegisterSlot(reg, eax);
3601
3519
  }
3602
3520
 
@@ -3622,28 +3540,34 @@ void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
3622
3540
 
3623
3541
  void LCodeGen::EmitNumberUntagD(Register input_reg,
3624
3542
  XMMRegister result_reg,
3543
+ bool deoptimize_on_undefined,
3625
3544
  LEnvironment* env) {
3626
- Label load_smi, heap_number, done;
3545
+ Label load_smi, done;
3627
3546
 
3628
3547
  // Smi check.
3629
- __ test(input_reg, Immediate(kSmiTagMask));
3630
- __ j(zero, &load_smi, Label::kNear);
3548
+ __ JumpIfSmi(input_reg, &load_smi, Label::kNear);
3631
3549
 
3632
3550
  // Heap number map check.
3633
3551
  __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
3634
3552
  factory()->heap_number_map());
3635
- __ j(equal, &heap_number, Label::kNear);
3553
+ if (deoptimize_on_undefined) {
3554
+ DeoptimizeIf(not_equal, env);
3555
+ } else {
3556
+ Label heap_number;
3557
+ __ j(equal, &heap_number, Label::kNear);
3636
3558
 
3637
- __ cmp(input_reg, factory()->undefined_value());
3638
- DeoptimizeIf(not_equal, env);
3559
+ __ cmp(input_reg, factory()->undefined_value());
3560
+ DeoptimizeIf(not_equal, env);
3639
3561
 
3640
- // Convert undefined to NaN.
3641
- ExternalReference nan = ExternalReference::address_of_nan();
3642
- __ movdbl(result_reg, Operand::StaticVariable(nan));
3643
- __ jmp(&done, Label::kNear);
3562
+ // Convert undefined to NaN.
3563
+ ExternalReference nan =
3564
+ ExternalReference::address_of_canonical_non_hole_nan();
3565
+ __ movdbl(result_reg, Operand::StaticVariable(nan));
3566
+ __ jmp(&done, Label::kNear);
3644
3567
 
3568
+ __ bind(&heap_number);
3569
+ }
3645
3570
  // Heap number to XMM conversion.
3646
- __ bind(&heap_number);
3647
3571
  __ movdbl(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset));
3648
3572
  __ jmp(&done, Label::kNear);
3649
3573
 
@@ -3756,8 +3680,7 @@ void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
3756
3680
  DeferredTaggedToI* deferred = new DeferredTaggedToI(this, instr);
3757
3681
 
3758
3682
  // Smi check.
3759
- __ test(input_reg, Immediate(kSmiTagMask));
3760
- __ j(not_zero, deferred->entry());
3683
+ __ JumpIfNotSmi(input_reg, deferred->entry());
3761
3684
 
3762
3685
  // Smi to int32 conversion
3763
3686
  __ SmiUntag(input_reg); // Untag smi.
@@ -3775,7 +3698,9 @@ void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
3775
3698
  Register input_reg = ToRegister(input);
3776
3699
  XMMRegister result_reg = ToDoubleRegister(result);
3777
3700
 
3778
- EmitNumberUntagD(input_reg, result_reg, instr->environment());
3701
+ EmitNumberUntagD(input_reg, result_reg,
3702
+ instr->hydrogen()->deoptimize_on_undefined(),
3703
+ instr->environment());
3779
3704
  }
3780
3705
 
3781
3706
 
@@ -3898,14 +3823,14 @@ void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
3898
3823
 
3899
3824
  void LCodeGen::DoCheckSmi(LCheckSmi* instr) {
3900
3825
  LOperand* input = instr->InputAt(0);
3901
- __ test(ToRegister(input), Immediate(kSmiTagMask));
3826
+ __ test(ToOperand(input), Immediate(kSmiTagMask));
3902
3827
  DeoptimizeIf(not_zero, instr->environment());
3903
3828
  }
3904
3829
 
3905
3830
 
3906
3831
  void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) {
3907
3832
  LOperand* input = instr->InputAt(0);
3908
- __ test(ToRegister(input), Immediate(kSmiTagMask));
3833
+ __ test(ToOperand(input), Immediate(kSmiTagMask));
3909
3834
  DeoptimizeIf(zero, instr->environment());
3910
3835
  }
3911
3836
 
@@ -3957,8 +3882,8 @@ void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
3957
3882
 
3958
3883
  void LCodeGen::DoCheckFunction(LCheckFunction* instr) {
3959
3884
  ASSERT(instr->InputAt(0)->IsRegister());
3960
- Register reg = ToRegister(instr->InputAt(0));
3961
- __ cmp(reg, instr->hydrogen()->target());
3885
+ Operand operand = ToOperand(instr->InputAt(0));
3886
+ __ cmp(operand, instr->hydrogen()->target());
3962
3887
  DeoptimizeIf(not_equal, instr->environment());
3963
3888
  }
3964
3889
 
@@ -4060,6 +3985,7 @@ void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
4060
3985
 
4061
3986
 
4062
3987
  void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
3988
+ ASSERT(ToRegister(instr->context()).is(esi));
4063
3989
  // Setup the parameters to the stub/runtime call.
4064
3990
  __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
4065
3991
  __ push(FieldOperand(eax, JSFunction::kLiteralsOffset));
@@ -4073,16 +3999,16 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
4073
3999
  FastCloneShallowArrayStub::Mode mode =
4074
4000
  FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS;
4075
4001
  FastCloneShallowArrayStub stub(mode, length);
4076
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT);
4002
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4077
4003
  } else if (instr->hydrogen()->depth() > 1) {
4078
- CallRuntime(Runtime::kCreateArrayLiteral, 3, instr, RESTORE_CONTEXT);
4004
+ CallRuntime(Runtime::kCreateArrayLiteral, 3, instr);
4079
4005
  } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
4080
- CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr, RESTORE_CONTEXT);
4006
+ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr);
4081
4007
  } else {
4082
4008
  FastCloneShallowArrayStub::Mode mode =
4083
4009
  FastCloneShallowArrayStub::CLONE_ELEMENTS;
4084
4010
  FastCloneShallowArrayStub stub(mode, length);
4085
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT);
4011
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4086
4012
  }
4087
4013
  }
4088
4014
 
@@ -4104,12 +4030,9 @@ void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
4104
4030
 
4105
4031
  // Pick the right runtime function to call.
4106
4032
  if (instr->hydrogen()->depth() > 1) {
4107
- CallRuntime(Runtime::kCreateObjectLiteral, 4, instr, CONTEXT_ADJUSTED);
4033
+ CallRuntime(Runtime::kCreateObjectLiteral, 4, instr);
4108
4034
  } else {
4109
- CallRuntime(Runtime::kCreateObjectLiteralShallow,
4110
- 4,
4111
- instr,
4112
- CONTEXT_ADJUSTED);
4035
+ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr);
4113
4036
  }
4114
4037
  }
4115
4038
 
@@ -4117,17 +4040,19 @@ void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
4117
4040
  void LCodeGen::DoToFastProperties(LToFastProperties* instr) {
4118
4041
  ASSERT(ToRegister(instr->InputAt(0)).is(eax));
4119
4042
  __ push(eax);
4120
- CallRuntime(Runtime::kToFastProperties, 1, instr, CONTEXT_ADJUSTED);
4043
+ CallRuntime(Runtime::kToFastProperties, 1, instr);
4121
4044
  }
4122
4045
 
4123
4046
 
4124
4047
  void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
4048
+ ASSERT(ToRegister(instr->context()).is(esi));
4125
4049
  Label materialized;
4126
4050
  // Registers will be used as follows:
4127
4051
  // edi = JS function.
4128
4052
  // ecx = literals array.
4129
4053
  // ebx = regexp literal.
4130
4054
  // eax = regexp literal clone.
4055
+ // esi = context.
4131
4056
  __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
4132
4057
  __ mov(ecx, FieldOperand(edi, JSFunction::kLiteralsOffset));
4133
4058
  int literal_offset = FixedArray::kHeaderSize +
@@ -4142,7 +4067,7 @@ void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
4142
4067
  __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
4143
4068
  __ push(Immediate(instr->hydrogen()->pattern()));
4144
4069
  __ push(Immediate(instr->hydrogen()->flags()));
4145
- CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr, RESTORE_CONTEXT);
4070
+ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr);
4146
4071
  __ mov(ebx, eax);
4147
4072
 
4148
4073
  __ bind(&materialized);
@@ -4154,7 +4079,7 @@ void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
4154
4079
  __ bind(&runtime_allocate);
4155
4080
  __ push(ebx);
4156
4081
  __ push(Immediate(Smi::FromInt(size)));
4157
- CallRuntime(Runtime::kAllocateInNewSpace, 1, instr, RESTORE_CONTEXT);
4082
+ CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
4158
4083
  __ pop(ebx);
4159
4084
 
4160
4085
  __ bind(&allocated);
@@ -4174,6 +4099,7 @@ void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
4174
4099
 
4175
4100
 
4176
4101
  void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
4102
+ ASSERT(ToRegister(instr->context()).is(esi));
4177
4103
  // Use the fast case closure allocation code that allocates in new
4178
4104
  // space for nested functions that don't need literals cloning.
4179
4105
  Handle<SharedFunctionInfo> shared_info = instr->shared_info();
@@ -4182,49 +4108,26 @@ void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
4182
4108
  FastNewClosureStub stub(
4183
4109
  shared_info->strict_mode() ? kStrictMode : kNonStrictMode);
4184
4110
  __ push(Immediate(shared_info));
4185
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT);
4111
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4186
4112
  } else {
4187
4113
  __ push(Operand(ebp, StandardFrameConstants::kContextOffset));
4188
4114
  __ push(Immediate(shared_info));
4189
4115
  __ push(Immediate(pretenure
4190
4116
  ? factory()->true_value()
4191
4117
  : factory()->false_value()));
4192
- CallRuntime(Runtime::kNewClosure, 3, instr, RESTORE_CONTEXT);
4118
+ CallRuntime(Runtime::kNewClosure, 3, instr);
4193
4119
  }
4194
4120
  }
4195
4121
 
4196
4122
 
4197
4123
  void LCodeGen::DoTypeof(LTypeof* instr) {
4198
- LOperand* input = instr->InputAt(0);
4124
+ LOperand* input = instr->InputAt(1);
4199
4125
  if (input->IsConstantOperand()) {
4200
4126
  __ push(ToImmediate(input));
4201
4127
  } else {
4202
4128
  __ push(ToOperand(input));
4203
4129
  }
4204
- CallRuntime(Runtime::kTypeof, 1, instr, RESTORE_CONTEXT);
4205
- }
4206
-
4207
-
4208
- void LCodeGen::DoTypeofIs(LTypeofIs* instr) {
4209
- Register input = ToRegister(instr->InputAt(0));
4210
- Register result = ToRegister(instr->result());
4211
- Label true_label;
4212
- Label false_label;
4213
- Label done;
4214
-
4215
- Condition final_branch_condition = EmitTypeofIs(&true_label,
4216
- &false_label,
4217
- input,
4218
- instr->type_literal());
4219
- __ j(final_branch_condition, &true_label, Label::kNear);
4220
- __ bind(&false_label);
4221
- __ mov(result, factory()->false_value());
4222
- __ jmp(&done, Label::kNear);
4223
-
4224
- __ bind(&true_label);
4225
- __ mov(result, factory()->true_value());
4226
-
4227
- __ bind(&done);
4130
+ CallRuntime(Runtime::kTypeof, 1, instr);
4228
4131
  }
4229
4132
 
4230
4133
 
@@ -4269,6 +4172,10 @@ Condition LCodeGen::EmitTypeofIs(Label* true_label,
4269
4172
  __ cmp(input, factory()->false_value());
4270
4173
  final_branch_condition = equal;
4271
4174
 
4175
+ } else if (FLAG_harmony_typeof && type_name->Equals(heap()->null_symbol())) {
4176
+ __ cmp(input, factory()->null_value());
4177
+ final_branch_condition = equal;
4178
+
4272
4179
  } else if (type_name->Equals(heap()->undefined_symbol())) {
4273
4180
  __ cmp(input, factory()->undefined_value());
4274
4181
  __ j(equal, true_label);
@@ -4280,22 +4187,21 @@ Condition LCodeGen::EmitTypeofIs(Label* true_label,
4280
4187
  final_branch_condition = not_zero;
4281
4188
 
4282
4189
  } else if (type_name->Equals(heap()->function_symbol())) {
4190
+ STATIC_ASSERT(LAST_TYPE == LAST_CALLABLE_SPEC_OBJECT_TYPE);
4283
4191
  __ JumpIfSmi(input, false_label);
4284
- __ CmpObjectType(input, JS_FUNCTION_TYPE, input);
4285
- __ j(equal, true_label);
4286
- // Regular expressions => 'function' (they are callable).
4287
- __ CmpInstanceType(input, JS_REGEXP_TYPE);
4288
- final_branch_condition = equal;
4192
+ __ CmpObjectType(input, FIRST_CALLABLE_SPEC_OBJECT_TYPE, input);
4193
+ final_branch_condition = above_equal;
4289
4194
 
4290
4195
  } else if (type_name->Equals(heap()->object_symbol())) {
4291
4196
  __ JumpIfSmi(input, false_label);
4292
- __ cmp(input, factory()->null_value());
4293
- __ j(equal, true_label);
4294
- // Regular expressions => 'function', not 'object'.
4295
- __ CmpObjectType(input, FIRST_JS_OBJECT_TYPE, input);
4197
+ if (!FLAG_harmony_typeof) {
4198
+ __ cmp(input, factory()->null_value());
4199
+ __ j(equal, true_label);
4200
+ }
4201
+ __ CmpObjectType(input, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, input);
4296
4202
  __ j(below, false_label);
4297
- __ CmpInstanceType(input, FIRST_FUNCTION_CLASS_TYPE);
4298
- __ j(above_equal, false_label);
4203
+ __ CmpInstanceType(input, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4204
+ __ j(above, false_label);
4299
4205
  // Check for undetectable objects => false.
4300
4206
  __ test_b(FieldOperand(input, Map::kBitFieldOffset),
4301
4207
  1 << Map::kIsUndetectable);
@@ -4311,24 +4217,6 @@ Condition LCodeGen::EmitTypeofIs(Label* true_label,
4311
4217
  }
4312
4218
 
4313
4219
 
4314
- void LCodeGen::DoIsConstructCall(LIsConstructCall* instr) {
4315
- Register result = ToRegister(instr->result());
4316
- Label true_label;
4317
- Label done;
4318
-
4319
- EmitIsConstructCall(result);
4320
- __ j(equal, &true_label, Label::kNear);
4321
-
4322
- __ mov(result, factory()->false_value());
4323
- __ jmp(&done, Label::kNear);
4324
-
4325
- __ bind(&true_label);
4326
- __ mov(result, factory()->true_value());
4327
-
4328
- __ bind(&done);
4329
- }
4330
-
4331
-
4332
4220
  void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) {
4333
4221
  Register temp = ToRegister(instr->TempAt(0));
4334
4222
  int true_block = chunk_->LookupDestination(instr->true_block_id());
@@ -4388,23 +4276,61 @@ void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) {
4388
4276
  SafepointGenerator safepoint_generator(this,
4389
4277
  pointers,
4390
4278
  env->deoptimization_index());
4391
- __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
4392
4279
  __ push(Immediate(Smi::FromInt(strict_mode_flag())));
4393
4280
  __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, safepoint_generator);
4394
4281
  }
4395
4282
 
4396
4283
 
4284
+ void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) {
4285
+ {
4286
+ PushSafepointRegistersScope scope(this);
4287
+ __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
4288
+ __ CallRuntimeSaveDoubles(Runtime::kStackGuard);
4289
+ RegisterLazyDeoptimization(
4290
+ instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
4291
+ }
4292
+
4293
+ // The gap code includes the restoring of the safepoint registers.
4294
+ int pc = masm()->pc_offset();
4295
+ safepoints_.SetPcAfterGap(pc);
4296
+ }
4297
+
4298
+
4397
4299
  void LCodeGen::DoStackCheck(LStackCheck* instr) {
4398
- // Perform stack overflow check.
4399
- Label done;
4400
- ExternalReference stack_limit =
4401
- ExternalReference::address_of_stack_limit(isolate());
4402
- __ cmp(esp, Operand::StaticVariable(stack_limit));
4403
- __ j(above_equal, &done, Label::kNear);
4300
+ class DeferredStackCheck: public LDeferredCode {
4301
+ public:
4302
+ DeferredStackCheck(LCodeGen* codegen, LStackCheck* instr)
4303
+ : LDeferredCode(codegen), instr_(instr) { }
4304
+ virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); }
4305
+ private:
4306
+ LStackCheck* instr_;
4307
+ };
4404
4308
 
4405
- StackCheckStub stub;
4406
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT);
4407
- __ bind(&done);
4309
+ if (instr->hydrogen()->is_function_entry()) {
4310
+ // Perform stack overflow check.
4311
+ Label done;
4312
+ ExternalReference stack_limit =
4313
+ ExternalReference::address_of_stack_limit(isolate());
4314
+ __ cmp(esp, Operand::StaticVariable(stack_limit));
4315
+ __ j(above_equal, &done, Label::kNear);
4316
+
4317
+ ASSERT(instr->context()->IsRegister());
4318
+ ASSERT(ToRegister(instr->context()).is(esi));
4319
+ StackCheckStub stub;
4320
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4321
+ __ bind(&done);
4322
+ } else {
4323
+ ASSERT(instr->hydrogen()->is_backwards_branch());
4324
+ // Perform stack overflow check if this goto needs it before jumping.
4325
+ DeferredStackCheck* deferred_stack_check =
4326
+ new DeferredStackCheck(this, instr);
4327
+ ExternalReference stack_limit =
4328
+ ExternalReference::address_of_stack_limit(isolate());
4329
+ __ cmp(esp, Operand::StaticVariable(stack_limit));
4330
+ __ j(below, deferred_stack_check->entry());
4331
+ __ bind(instr->done_label());
4332
+ deferred_stack_check->SetExit(instr->done_label());
4333
+ }
4408
4334
  }
4409
4335
 
4410
4336
 
@@ -4449,7 +4375,6 @@ void LCodeGen::DoIn(LIn* instr) {
4449
4375
  SafepointGenerator safepoint_generator(this,
4450
4376
  pointers,
4451
4377
  env->deoptimization_index());
4452
- __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
4453
4378
  __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION, safepoint_generator);
4454
4379
  }
4455
4380