libv8 3.3.10.4 → 3.5.10.beta1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (538) hide show
  1. data/lib/libv8/scons/CHANGES.txt +24 -231
  2. data/lib/libv8/scons/LICENSE.txt +1 -1
  3. data/lib/libv8/scons/MANIFEST +0 -1
  4. data/lib/libv8/scons/PKG-INFO +1 -1
  5. data/lib/libv8/scons/README.txt +9 -9
  6. data/lib/libv8/scons/RELEASE.txt +75 -77
  7. data/lib/libv8/scons/engine/SCons/Action.py +6 -22
  8. data/lib/libv8/scons/engine/SCons/Builder.py +2 -2
  9. data/lib/libv8/scons/engine/SCons/CacheDir.py +2 -2
  10. data/lib/libv8/scons/engine/SCons/Debug.py +2 -2
  11. data/lib/libv8/scons/engine/SCons/Defaults.py +10 -24
  12. data/lib/libv8/scons/engine/SCons/Environment.py +19 -118
  13. data/lib/libv8/scons/engine/SCons/Errors.py +2 -2
  14. data/lib/libv8/scons/engine/SCons/Executor.py +2 -2
  15. data/lib/libv8/scons/engine/SCons/Job.py +2 -2
  16. data/lib/libv8/scons/engine/SCons/Memoize.py +2 -2
  17. data/lib/libv8/scons/engine/SCons/Node/Alias.py +2 -2
  18. data/lib/libv8/scons/engine/SCons/Node/FS.py +121 -281
  19. data/lib/libv8/scons/engine/SCons/Node/Python.py +2 -2
  20. data/lib/libv8/scons/engine/SCons/Node/__init__.py +5 -6
  21. data/lib/libv8/scons/engine/SCons/Options/BoolOption.py +2 -2
  22. data/lib/libv8/scons/engine/SCons/Options/EnumOption.py +2 -2
  23. data/lib/libv8/scons/engine/SCons/Options/ListOption.py +2 -2
  24. data/lib/libv8/scons/engine/SCons/Options/PackageOption.py +2 -2
  25. data/lib/libv8/scons/engine/SCons/Options/PathOption.py +2 -2
  26. data/lib/libv8/scons/engine/SCons/Options/__init__.py +2 -2
  27. data/lib/libv8/scons/engine/SCons/PathList.py +2 -2
  28. data/lib/libv8/scons/engine/SCons/Platform/__init__.py +2 -2
  29. data/lib/libv8/scons/engine/SCons/Platform/aix.py +2 -2
  30. data/lib/libv8/scons/engine/SCons/Platform/cygwin.py +2 -2
  31. data/lib/libv8/scons/engine/SCons/Platform/darwin.py +3 -27
  32. data/lib/libv8/scons/engine/SCons/Platform/hpux.py +2 -2
  33. data/lib/libv8/scons/engine/SCons/Platform/irix.py +2 -2
  34. data/lib/libv8/scons/engine/SCons/Platform/os2.py +2 -2
  35. data/lib/libv8/scons/engine/SCons/Platform/posix.py +2 -2
  36. data/lib/libv8/scons/engine/SCons/Platform/sunos.py +2 -2
  37. data/lib/libv8/scons/engine/SCons/Platform/win32.py +2 -2
  38. data/lib/libv8/scons/engine/SCons/SConf.py +2 -2
  39. data/lib/libv8/scons/engine/SCons/SConsign.py +3 -9
  40. data/lib/libv8/scons/engine/SCons/Scanner/C.py +2 -2
  41. data/lib/libv8/scons/engine/SCons/Scanner/D.py +2 -2
  42. data/lib/libv8/scons/engine/SCons/Scanner/Dir.py +2 -2
  43. data/lib/libv8/scons/engine/SCons/Scanner/Fortran.py +2 -2
  44. data/lib/libv8/scons/engine/SCons/Scanner/IDL.py +2 -2
  45. data/lib/libv8/scons/engine/SCons/Scanner/LaTeX.py +2 -5
  46. data/lib/libv8/scons/engine/SCons/Scanner/Prog.py +2 -2
  47. data/lib/libv8/scons/engine/SCons/Scanner/RC.py +3 -3
  48. data/lib/libv8/scons/engine/SCons/Scanner/__init__.py +2 -2
  49. data/lib/libv8/scons/engine/SCons/Script/Interactive.py +2 -2
  50. data/lib/libv8/scons/engine/SCons/Script/Main.py +11 -82
  51. data/lib/libv8/scons/engine/SCons/Script/SConsOptions.py +5 -5
  52. data/lib/libv8/scons/engine/SCons/Script/SConscript.py +2 -2
  53. data/lib/libv8/scons/engine/SCons/Script/__init__.py +2 -2
  54. data/lib/libv8/scons/engine/SCons/Sig.py +2 -2
  55. data/lib/libv8/scons/engine/SCons/Subst.py +2 -2
  56. data/lib/libv8/scons/engine/SCons/Taskmaster.py +2 -10
  57. data/lib/libv8/scons/engine/SCons/Tool/386asm.py +2 -2
  58. data/lib/libv8/scons/engine/SCons/Tool/BitKeeper.py +2 -2
  59. data/lib/libv8/scons/engine/SCons/Tool/CVS.py +2 -2
  60. data/lib/libv8/scons/engine/SCons/Tool/FortranCommon.py +2 -19
  61. data/lib/libv8/scons/engine/SCons/Tool/JavaCommon.py +2 -2
  62. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/__init__.py +2 -2
  63. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/arch.py +2 -2
  64. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/common.py +2 -2
  65. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/netframework.py +2 -2
  66. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/sdk.py +2 -2
  67. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/vc.py +6 -9
  68. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/vs.py +2 -29
  69. data/lib/libv8/scons/engine/SCons/Tool/Perforce.py +2 -2
  70. data/lib/libv8/scons/engine/SCons/Tool/PharLapCommon.py +2 -2
  71. data/lib/libv8/scons/engine/SCons/Tool/RCS.py +2 -2
  72. data/lib/libv8/scons/engine/SCons/Tool/SCCS.py +2 -2
  73. data/lib/libv8/scons/engine/SCons/Tool/Subversion.py +2 -2
  74. data/lib/libv8/scons/engine/SCons/Tool/__init__.py +3 -3
  75. data/lib/libv8/scons/engine/SCons/Tool/aixc++.py +2 -2
  76. data/lib/libv8/scons/engine/SCons/Tool/aixcc.py +2 -2
  77. data/lib/libv8/scons/engine/SCons/Tool/aixf77.py +2 -2
  78. data/lib/libv8/scons/engine/SCons/Tool/aixlink.py +2 -2
  79. data/lib/libv8/scons/engine/SCons/Tool/applelink.py +2 -2
  80. data/lib/libv8/scons/engine/SCons/Tool/ar.py +2 -2
  81. data/lib/libv8/scons/engine/SCons/Tool/as.py +2 -2
  82. data/lib/libv8/scons/engine/SCons/Tool/bcc32.py +2 -2
  83. data/lib/libv8/scons/engine/SCons/Tool/c++.py +2 -2
  84. data/lib/libv8/scons/engine/SCons/Tool/cc.py +2 -2
  85. data/lib/libv8/scons/engine/SCons/Tool/cvf.py +2 -2
  86. data/lib/libv8/scons/engine/SCons/Tool/default.py +2 -2
  87. data/lib/libv8/scons/engine/SCons/Tool/dmd.py +7 -24
  88. data/lib/libv8/scons/engine/SCons/Tool/dvi.py +2 -2
  89. data/lib/libv8/scons/engine/SCons/Tool/dvipdf.py +2 -3
  90. data/lib/libv8/scons/engine/SCons/Tool/dvips.py +2 -3
  91. data/lib/libv8/scons/engine/SCons/Tool/f77.py +2 -2
  92. data/lib/libv8/scons/engine/SCons/Tool/f90.py +2 -2
  93. data/lib/libv8/scons/engine/SCons/Tool/f95.py +2 -2
  94. data/lib/libv8/scons/engine/SCons/Tool/filesystem.py +2 -2
  95. data/lib/libv8/scons/engine/SCons/Tool/fortran.py +2 -2
  96. data/lib/libv8/scons/engine/SCons/Tool/g++.py +2 -2
  97. data/lib/libv8/scons/engine/SCons/Tool/g77.py +2 -2
  98. data/lib/libv8/scons/engine/SCons/Tool/gas.py +2 -2
  99. data/lib/libv8/scons/engine/SCons/Tool/gcc.py +2 -2
  100. data/lib/libv8/scons/engine/SCons/Tool/gfortran.py +3 -3
  101. data/lib/libv8/scons/engine/SCons/Tool/gnulink.py +3 -2
  102. data/lib/libv8/scons/engine/SCons/Tool/gs.py +2 -2
  103. data/lib/libv8/scons/engine/SCons/Tool/hpc++.py +2 -2
  104. data/lib/libv8/scons/engine/SCons/Tool/hpcc.py +2 -2
  105. data/lib/libv8/scons/engine/SCons/Tool/hplink.py +2 -2
  106. data/lib/libv8/scons/engine/SCons/Tool/icc.py +2 -2
  107. data/lib/libv8/scons/engine/SCons/Tool/icl.py +2 -2
  108. data/lib/libv8/scons/engine/SCons/Tool/ifl.py +2 -2
  109. data/lib/libv8/scons/engine/SCons/Tool/ifort.py +2 -2
  110. data/lib/libv8/scons/engine/SCons/Tool/ilink.py +2 -2
  111. data/lib/libv8/scons/engine/SCons/Tool/ilink32.py +2 -2
  112. data/lib/libv8/scons/engine/SCons/Tool/install.py +3 -57
  113. data/lib/libv8/scons/engine/SCons/Tool/intelc.py +25 -65
  114. data/lib/libv8/scons/engine/SCons/Tool/ipkg.py +2 -2
  115. data/lib/libv8/scons/engine/SCons/Tool/jar.py +3 -9
  116. data/lib/libv8/scons/engine/SCons/Tool/javac.py +2 -2
  117. data/lib/libv8/scons/engine/SCons/Tool/javah.py +2 -2
  118. data/lib/libv8/scons/engine/SCons/Tool/latex.py +2 -3
  119. data/lib/libv8/scons/engine/SCons/Tool/lex.py +2 -2
  120. data/lib/libv8/scons/engine/SCons/Tool/link.py +5 -6
  121. data/lib/libv8/scons/engine/SCons/Tool/linkloc.py +2 -2
  122. data/lib/libv8/scons/engine/SCons/Tool/m4.py +2 -2
  123. data/lib/libv8/scons/engine/SCons/Tool/masm.py +2 -2
  124. data/lib/libv8/scons/engine/SCons/Tool/midl.py +2 -2
  125. data/lib/libv8/scons/engine/SCons/Tool/mingw.py +10 -31
  126. data/lib/libv8/scons/engine/SCons/Tool/mslib.py +2 -2
  127. data/lib/libv8/scons/engine/SCons/Tool/mslink.py +9 -61
  128. data/lib/libv8/scons/engine/SCons/Tool/mssdk.py +2 -2
  129. data/lib/libv8/scons/engine/SCons/Tool/msvc.py +11 -21
  130. data/lib/libv8/scons/engine/SCons/Tool/msvs.py +59 -477
  131. data/lib/libv8/scons/engine/SCons/Tool/mwcc.py +2 -2
  132. data/lib/libv8/scons/engine/SCons/Tool/mwld.py +2 -2
  133. data/lib/libv8/scons/engine/SCons/Tool/nasm.py +2 -2
  134. data/lib/libv8/scons/engine/SCons/Tool/packaging/__init__.py +2 -2
  135. data/lib/libv8/scons/engine/SCons/Tool/packaging/ipk.py +2 -2
  136. data/lib/libv8/scons/engine/SCons/Tool/packaging/msi.py +2 -2
  137. data/lib/libv8/scons/engine/SCons/Tool/packaging/rpm.py +2 -2
  138. data/lib/libv8/scons/engine/SCons/Tool/packaging/src_tarbz2.py +2 -2
  139. data/lib/libv8/scons/engine/SCons/Tool/packaging/src_targz.py +2 -2
  140. data/lib/libv8/scons/engine/SCons/Tool/packaging/src_zip.py +2 -2
  141. data/lib/libv8/scons/engine/SCons/Tool/packaging/tarbz2.py +2 -2
  142. data/lib/libv8/scons/engine/SCons/Tool/packaging/targz.py +2 -2
  143. data/lib/libv8/scons/engine/SCons/Tool/packaging/zip.py +2 -2
  144. data/lib/libv8/scons/engine/SCons/Tool/pdf.py +2 -2
  145. data/lib/libv8/scons/engine/SCons/Tool/pdflatex.py +2 -3
  146. data/lib/libv8/scons/engine/SCons/Tool/pdftex.py +2 -3
  147. data/lib/libv8/scons/engine/SCons/Tool/qt.py +2 -2
  148. data/lib/libv8/scons/engine/SCons/Tool/rmic.py +3 -9
  149. data/lib/libv8/scons/engine/SCons/Tool/rpcgen.py +2 -2
  150. data/lib/libv8/scons/engine/SCons/Tool/rpm.py +2 -2
  151. data/lib/libv8/scons/engine/SCons/Tool/sgiar.py +2 -2
  152. data/lib/libv8/scons/engine/SCons/Tool/sgic++.py +2 -2
  153. data/lib/libv8/scons/engine/SCons/Tool/sgicc.py +2 -2
  154. data/lib/libv8/scons/engine/SCons/Tool/sgilink.py +3 -2
  155. data/lib/libv8/scons/engine/SCons/Tool/sunar.py +2 -2
  156. data/lib/libv8/scons/engine/SCons/Tool/sunc++.py +2 -2
  157. data/lib/libv8/scons/engine/SCons/Tool/suncc.py +2 -2
  158. data/lib/libv8/scons/engine/SCons/Tool/sunf77.py +2 -2
  159. data/lib/libv8/scons/engine/SCons/Tool/sunf90.py +2 -2
  160. data/lib/libv8/scons/engine/SCons/Tool/sunf95.py +2 -2
  161. data/lib/libv8/scons/engine/SCons/Tool/sunlink.py +3 -2
  162. data/lib/libv8/scons/engine/SCons/Tool/swig.py +5 -6
  163. data/lib/libv8/scons/engine/SCons/Tool/tar.py +2 -2
  164. data/lib/libv8/scons/engine/SCons/Tool/tex.py +43 -96
  165. data/lib/libv8/scons/engine/SCons/Tool/textfile.py +2 -2
  166. data/lib/libv8/scons/engine/SCons/Tool/tlib.py +2 -2
  167. data/lib/libv8/scons/engine/SCons/Tool/wix.py +2 -2
  168. data/lib/libv8/scons/engine/SCons/Tool/yacc.py +2 -12
  169. data/lib/libv8/scons/engine/SCons/Tool/zip.py +2 -2
  170. data/lib/libv8/scons/engine/SCons/Util.py +3 -3
  171. data/lib/libv8/scons/engine/SCons/Variables/BoolVariable.py +2 -2
  172. data/lib/libv8/scons/engine/SCons/Variables/EnumVariable.py +3 -3
  173. data/lib/libv8/scons/engine/SCons/Variables/ListVariable.py +2 -2
  174. data/lib/libv8/scons/engine/SCons/Variables/PackageVariable.py +2 -2
  175. data/lib/libv8/scons/engine/SCons/Variables/PathVariable.py +2 -2
  176. data/lib/libv8/scons/engine/SCons/Variables/__init__.py +2 -2
  177. data/lib/libv8/scons/engine/SCons/Warnings.py +2 -2
  178. data/lib/libv8/scons/engine/SCons/__init__.py +6 -6
  179. data/lib/libv8/scons/engine/SCons/compat/__init__.py +2 -2
  180. data/lib/libv8/scons/engine/SCons/compat/_scons_builtins.py +2 -2
  181. data/lib/libv8/scons/engine/SCons/compat/_scons_collections.py +2 -2
  182. data/lib/libv8/scons/engine/SCons/compat/_scons_dbm.py +2 -2
  183. data/lib/libv8/scons/engine/SCons/compat/_scons_hashlib.py +2 -2
  184. data/lib/libv8/scons/engine/SCons/compat/_scons_io.py +2 -2
  185. data/lib/libv8/scons/engine/SCons/cpp.py +2 -2
  186. data/lib/libv8/scons/engine/SCons/dblite.py +1 -4
  187. data/lib/libv8/scons/engine/SCons/exitfuncs.py +2 -2
  188. data/lib/libv8/scons/scons-time.1 +3 -3
  189. data/lib/libv8/scons/scons.1 +1164 -1170
  190. data/lib/libv8/scons/sconsign.1 +3 -3
  191. data/lib/libv8/scons/script/scons +22 -22
  192. data/lib/libv8/scons/script/scons-time +2 -2
  193. data/lib/libv8/scons/script/scons.bat +4 -7
  194. data/lib/libv8/scons/script/sconsign +20 -21
  195. data/lib/libv8/scons/setup.cfg +1 -0
  196. data/lib/libv8/scons/setup.py +40 -38
  197. data/lib/libv8/v8/.gitignore +1 -1
  198. data/lib/libv8/v8/AUTHORS +2 -0
  199. data/lib/libv8/v8/ChangeLog +387 -0
  200. data/lib/libv8/v8/Makefile +171 -0
  201. data/lib/libv8/v8/SConstruct +124 -51
  202. data/lib/libv8/v8/build/README.txt +31 -14
  203. data/lib/libv8/v8/build/all.gyp +11 -4
  204. data/lib/libv8/v8/build/armu.gypi +6 -2
  205. data/lib/libv8/v8/build/common.gypi +240 -94
  206. data/lib/libv8/v8/build/gyp_v8 +32 -4
  207. data/lib/libv8/v8/build/standalone.gypi +200 -0
  208. data/lib/libv8/v8/include/v8-debug.h +0 -0
  209. data/lib/libv8/v8/include/v8-profiler.h +8 -11
  210. data/lib/libv8/v8/include/v8.h +191 -108
  211. data/lib/libv8/v8/preparser/SConscript +2 -2
  212. data/lib/libv8/v8/preparser/preparser-process.cc +3 -3
  213. data/lib/libv8/v8/preparser/preparser.gyp +42 -0
  214. data/lib/libv8/v8/src/SConscript +33 -8
  215. data/lib/libv8/v8/src/accessors.cc +77 -43
  216. data/lib/libv8/v8/src/api.cc +393 -191
  217. data/lib/libv8/v8/src/api.h +4 -8
  218. data/lib/libv8/v8/src/apinatives.js +15 -3
  219. data/lib/libv8/v8/src/arguments.h +8 -0
  220. data/lib/libv8/v8/src/arm/assembler-arm.cc +120 -120
  221. data/lib/libv8/v8/src/arm/assembler-arm.h +92 -43
  222. data/lib/libv8/v8/src/arm/builtins-arm.cc +32 -39
  223. data/lib/libv8/v8/src/arm/code-stubs-arm.cc +572 -351
  224. data/lib/libv8/v8/src/arm/code-stubs-arm.h +8 -77
  225. data/lib/libv8/v8/src/arm/codegen-arm.h +0 -2
  226. data/lib/libv8/v8/src/arm/deoptimizer-arm.cc +50 -30
  227. data/lib/libv8/v8/src/arm/disasm-arm.cc +1 -1
  228. data/lib/libv8/v8/src/arm/frames-arm.h +9 -5
  229. data/lib/libv8/v8/src/arm/full-codegen-arm.cc +331 -432
  230. data/lib/libv8/v8/src/arm/ic-arm.cc +192 -124
  231. data/lib/libv8/v8/src/arm/lithium-arm.cc +216 -232
  232. data/lib/libv8/v8/src/arm/lithium-arm.h +106 -259
  233. data/lib/libv8/v8/src/arm/lithium-codegen-arm.cc +633 -642
  234. data/lib/libv8/v8/src/arm/lithium-codegen-arm.h +4 -4
  235. data/lib/libv8/v8/src/arm/lithium-gap-resolver-arm.cc +1 -3
  236. data/lib/libv8/v8/src/arm/macro-assembler-arm.cc +260 -185
  237. data/lib/libv8/v8/src/arm/macro-assembler-arm.h +45 -25
  238. data/lib/libv8/v8/src/arm/regexp-macro-assembler-arm.cc +25 -13
  239. data/lib/libv8/v8/src/arm/regexp-macro-assembler-arm.h +3 -0
  240. data/lib/libv8/v8/src/arm/stub-cache-arm.cc +413 -226
  241. data/lib/libv8/v8/src/array.js +38 -18
  242. data/lib/libv8/v8/src/assembler.cc +12 -5
  243. data/lib/libv8/v8/src/assembler.h +15 -9
  244. data/lib/libv8/v8/src/ast-inl.h +34 -25
  245. data/lib/libv8/v8/src/ast.cc +141 -72
  246. data/lib/libv8/v8/src/ast.h +255 -181
  247. data/lib/libv8/v8/src/bignum.cc +3 -4
  248. data/lib/libv8/v8/src/bootstrapper.cc +55 -11
  249. data/lib/libv8/v8/src/bootstrapper.h +3 -2
  250. data/lib/libv8/v8/src/builtins.cc +8 -2
  251. data/lib/libv8/v8/src/builtins.h +4 -0
  252. data/lib/libv8/v8/src/cached-powers.cc +8 -4
  253. data/lib/libv8/v8/src/checks.h +3 -3
  254. data/lib/libv8/v8/src/code-stubs.cc +173 -28
  255. data/lib/libv8/v8/src/code-stubs.h +104 -148
  256. data/lib/libv8/v8/src/codegen.cc +8 -8
  257. data/lib/libv8/v8/src/compilation-cache.cc +2 -47
  258. data/lib/libv8/v8/src/compilation-cache.h +0 -10
  259. data/lib/libv8/v8/src/compiler.cc +27 -16
  260. data/lib/libv8/v8/src/compiler.h +13 -18
  261. data/lib/libv8/v8/src/contexts.cc +107 -72
  262. data/lib/libv8/v8/src/contexts.h +70 -34
  263. data/lib/libv8/v8/src/conversions-inl.h +572 -14
  264. data/lib/libv8/v8/src/conversions.cc +9 -707
  265. data/lib/libv8/v8/src/conversions.h +23 -12
  266. data/lib/libv8/v8/src/cpu-profiler-inl.h +2 -19
  267. data/lib/libv8/v8/src/cpu-profiler.cc +4 -21
  268. data/lib/libv8/v8/src/cpu-profiler.h +8 -17
  269. data/lib/libv8/v8/src/d8-debug.cc +5 -3
  270. data/lib/libv8/v8/src/d8-debug.h +6 -7
  271. data/lib/libv8/v8/src/d8-posix.cc +1 -10
  272. data/lib/libv8/v8/src/d8.cc +721 -219
  273. data/lib/libv8/v8/src/d8.gyp +37 -12
  274. data/lib/libv8/v8/src/d8.h +141 -19
  275. data/lib/libv8/v8/src/d8.js +17 -8
  276. data/lib/libv8/v8/src/date.js +16 -5
  277. data/lib/libv8/v8/src/dateparser-inl.h +242 -39
  278. data/lib/libv8/v8/src/dateparser.cc +38 -4
  279. data/lib/libv8/v8/src/dateparser.h +170 -28
  280. data/lib/libv8/v8/src/debug-agent.cc +5 -3
  281. data/lib/libv8/v8/src/debug-agent.h +11 -7
  282. data/lib/libv8/v8/src/debug-debugger.js +65 -34
  283. data/lib/libv8/v8/src/debug.cc +30 -60
  284. data/lib/libv8/v8/src/debug.h +5 -3
  285. data/lib/libv8/v8/src/deoptimizer.cc +227 -10
  286. data/lib/libv8/v8/src/deoptimizer.h +133 -9
  287. data/lib/libv8/v8/src/disassembler.cc +22 -14
  288. data/lib/libv8/v8/src/diy-fp.cc +4 -3
  289. data/lib/libv8/v8/src/diy-fp.h +3 -3
  290. data/lib/libv8/v8/src/elements.cc +634 -0
  291. data/lib/libv8/v8/src/elements.h +95 -0
  292. data/lib/libv8/v8/src/execution.cc +5 -21
  293. data/lib/libv8/v8/src/extensions/experimental/break-iterator.cc +3 -1
  294. data/lib/libv8/v8/src/extensions/experimental/break-iterator.h +1 -1
  295. data/lib/libv8/v8/src/extensions/experimental/collator.cc +6 -2
  296. data/lib/libv8/v8/src/extensions/experimental/collator.h +1 -2
  297. data/lib/libv8/v8/src/extensions/experimental/datetime-format.cc +384 -0
  298. data/lib/libv8/v8/src/extensions/experimental/datetime-format.h +83 -0
  299. data/lib/libv8/v8/src/extensions/experimental/experimental.gyp +18 -7
  300. data/lib/libv8/v8/src/extensions/experimental/i18n-extension.cc +12 -16
  301. data/lib/libv8/v8/src/extensions/experimental/i18n-extension.h +1 -1
  302. data/lib/libv8/v8/src/extensions/experimental/i18n-js2c.py +126 -0
  303. data/lib/libv8/v8/src/extensions/experimental/i18n-locale.cc +3 -4
  304. data/lib/libv8/v8/src/extensions/experimental/i18n-locale.h +1 -1
  305. data/lib/libv8/v8/src/{shell.h → extensions/experimental/i18n-natives.h} +8 -20
  306. data/lib/libv8/v8/src/extensions/experimental/i18n-utils.cc +45 -1
  307. data/lib/libv8/v8/src/extensions/experimental/i18n-utils.h +21 -1
  308. data/lib/libv8/v8/src/extensions/experimental/i18n.js +211 -11
  309. data/lib/libv8/v8/src/extensions/experimental/language-matcher.cc +4 -3
  310. data/lib/libv8/v8/src/extensions/experimental/language-matcher.h +1 -1
  311. data/lib/libv8/v8/src/extensions/experimental/number-format.cc +374 -0
  312. data/lib/libv8/v8/src/extensions/experimental/number-format.h +71 -0
  313. data/lib/libv8/v8/src/factory.cc +89 -18
  314. data/lib/libv8/v8/src/factory.h +36 -8
  315. data/lib/libv8/v8/src/flag-definitions.h +11 -44
  316. data/lib/libv8/v8/src/frames-inl.h +8 -1
  317. data/lib/libv8/v8/src/frames.cc +39 -3
  318. data/lib/libv8/v8/src/frames.h +10 -3
  319. data/lib/libv8/v8/src/full-codegen.cc +311 -293
  320. data/lib/libv8/v8/src/full-codegen.h +183 -143
  321. data/lib/libv8/v8/src/func-name-inferrer.cc +29 -15
  322. data/lib/libv8/v8/src/func-name-inferrer.h +19 -9
  323. data/lib/libv8/v8/src/gdb-jit.cc +658 -55
  324. data/lib/libv8/v8/src/gdb-jit.h +6 -2
  325. data/lib/libv8/v8/src/global-handles.cc +368 -312
  326. data/lib/libv8/v8/src/global-handles.h +29 -36
  327. data/lib/libv8/v8/src/globals.h +3 -1
  328. data/lib/libv8/v8/src/handles.cc +43 -69
  329. data/lib/libv8/v8/src/handles.h +21 -16
  330. data/lib/libv8/v8/src/heap-inl.h +11 -13
  331. data/lib/libv8/v8/src/heap-profiler.cc +0 -999
  332. data/lib/libv8/v8/src/heap-profiler.h +0 -303
  333. data/lib/libv8/v8/src/heap.cc +366 -141
  334. data/lib/libv8/v8/src/heap.h +87 -26
  335. data/lib/libv8/v8/src/hydrogen-instructions.cc +192 -81
  336. data/lib/libv8/v8/src/hydrogen-instructions.h +711 -482
  337. data/lib/libv8/v8/src/hydrogen.cc +1146 -629
  338. data/lib/libv8/v8/src/hydrogen.h +100 -64
  339. data/lib/libv8/v8/src/ia32/assembler-ia32.cc +19 -0
  340. data/lib/libv8/v8/src/ia32/assembler-ia32.h +15 -2
  341. data/lib/libv8/v8/src/ia32/builtins-ia32.cc +34 -39
  342. data/lib/libv8/v8/src/ia32/code-stubs-ia32.cc +675 -377
  343. data/lib/libv8/v8/src/ia32/code-stubs-ia32.h +8 -69
  344. data/lib/libv8/v8/src/ia32/codegen-ia32.cc +1 -0
  345. data/lib/libv8/v8/src/ia32/codegen-ia32.h +0 -2
  346. data/lib/libv8/v8/src/ia32/cpu-ia32.cc +3 -2
  347. data/lib/libv8/v8/src/ia32/deoptimizer-ia32.cc +28 -3
  348. data/lib/libv8/v8/src/ia32/disasm-ia32.cc +21 -10
  349. data/lib/libv8/v8/src/ia32/frames-ia32.h +6 -5
  350. data/lib/libv8/v8/src/ia32/full-codegen-ia32.cc +459 -465
  351. data/lib/libv8/v8/src/ia32/ic-ia32.cc +196 -147
  352. data/lib/libv8/v8/src/ia32/lithium-codegen-ia32.cc +575 -650
  353. data/lib/libv8/v8/src/ia32/lithium-codegen-ia32.h +19 -21
  354. data/lib/libv8/v8/src/ia32/lithium-gap-resolver-ia32.cc +7 -2
  355. data/lib/libv8/v8/src/ia32/lithium-ia32.cc +261 -256
  356. data/lib/libv8/v8/src/ia32/lithium-ia32.h +234 -335
  357. data/lib/libv8/v8/src/ia32/macro-assembler-ia32.cc +224 -67
  358. data/lib/libv8/v8/src/ia32/macro-assembler-ia32.h +63 -19
  359. data/lib/libv8/v8/src/ia32/regexp-macro-assembler-ia32.cc +22 -8
  360. data/lib/libv8/v8/src/ia32/regexp-macro-assembler-ia32.h +3 -0
  361. data/lib/libv8/v8/src/ia32/stub-cache-ia32.cc +380 -239
  362. data/lib/libv8/v8/src/ic.cc +198 -234
  363. data/lib/libv8/v8/src/ic.h +32 -30
  364. data/lib/libv8/v8/src/interpreter-irregexp.cc +6 -4
  365. data/lib/libv8/v8/src/isolate.cc +112 -95
  366. data/lib/libv8/v8/src/isolate.h +55 -71
  367. data/lib/libv8/v8/src/json-parser.h +486 -48
  368. data/lib/libv8/v8/src/json.js +28 -23
  369. data/lib/libv8/v8/src/jsregexp.cc +163 -208
  370. data/lib/libv8/v8/src/jsregexp.h +0 -1
  371. data/lib/libv8/v8/src/lithium-allocator-inl.h +29 -27
  372. data/lib/libv8/v8/src/lithium-allocator.cc +22 -17
  373. data/lib/libv8/v8/src/lithium-allocator.h +8 -8
  374. data/lib/libv8/v8/src/lithium.cc +16 -11
  375. data/lib/libv8/v8/src/lithium.h +31 -34
  376. data/lib/libv8/v8/src/liveedit.cc +111 -15
  377. data/lib/libv8/v8/src/liveedit.h +3 -4
  378. data/lib/libv8/v8/src/liveobjectlist.cc +116 -80
  379. data/lib/libv8/v8/src/liveobjectlist.h +2 -2
  380. data/lib/libv8/v8/src/log-inl.h +0 -4
  381. data/lib/libv8/v8/src/log-utils.cc +25 -143
  382. data/lib/libv8/v8/src/log-utils.h +13 -92
  383. data/lib/libv8/v8/src/log.cc +26 -249
  384. data/lib/libv8/v8/src/log.h +6 -17
  385. data/lib/libv8/v8/src/macros.py +9 -6
  386. data/lib/libv8/v8/src/mark-compact.cc +276 -56
  387. data/lib/libv8/v8/src/mark-compact.h +20 -0
  388. data/lib/libv8/v8/src/messages.js +93 -39
  389. data/lib/libv8/v8/src/mips/assembler-mips-inl.h +9 -3
  390. data/lib/libv8/v8/src/mips/assembler-mips.cc +297 -189
  391. data/lib/libv8/v8/src/mips/assembler-mips.h +121 -54
  392. data/lib/libv8/v8/src/mips/builtins-mips.cc +23 -24
  393. data/lib/libv8/v8/src/mips/code-stubs-mips.cc +484 -263
  394. data/lib/libv8/v8/src/mips/code-stubs-mips.h +8 -83
  395. data/lib/libv8/v8/src/mips/codegen-mips.h +0 -2
  396. data/lib/libv8/v8/src/mips/constants-mips.h +37 -11
  397. data/lib/libv8/v8/src/mips/deoptimizer-mips.cc +6 -1
  398. data/lib/libv8/v8/src/mips/frames-mips.h +8 -7
  399. data/lib/libv8/v8/src/mips/full-codegen-mips.cc +258 -419
  400. data/lib/libv8/v8/src/mips/ic-mips.cc +181 -121
  401. data/lib/libv8/v8/src/mips/macro-assembler-mips.cc +640 -382
  402. data/lib/libv8/v8/src/mips/macro-assembler-mips.h +94 -89
  403. data/lib/libv8/v8/src/mips/regexp-macro-assembler-mips.cc +23 -10
  404. data/lib/libv8/v8/src/mips/regexp-macro-assembler-mips.h +6 -1
  405. data/lib/libv8/v8/src/mips/simulator-mips.cc +249 -49
  406. data/lib/libv8/v8/src/mips/simulator-mips.h +25 -1
  407. data/lib/libv8/v8/src/mips/stub-cache-mips.cc +373 -161
  408. data/lib/libv8/v8/src/mirror-debugger.js +55 -8
  409. data/lib/libv8/v8/src/misc-intrinsics.h +89 -0
  410. data/lib/libv8/v8/src/mksnapshot.cc +36 -4
  411. data/lib/libv8/v8/src/natives.h +5 -2
  412. data/lib/libv8/v8/src/objects-debug.cc +73 -6
  413. data/lib/libv8/v8/src/objects-inl.h +529 -164
  414. data/lib/libv8/v8/src/objects-printer.cc +67 -12
  415. data/lib/libv8/v8/src/objects-visiting.cc +13 -2
  416. data/lib/libv8/v8/src/objects-visiting.h +41 -1
  417. data/lib/libv8/v8/src/objects.cc +2200 -1177
  418. data/lib/libv8/v8/src/objects.h +912 -283
  419. data/lib/libv8/v8/src/parser.cc +566 -371
  420. data/lib/libv8/v8/src/parser.h +35 -33
  421. data/lib/libv8/v8/src/platform-cygwin.cc +10 -25
  422. data/lib/libv8/v8/src/platform-freebsd.cc +4 -29
  423. data/lib/libv8/v8/src/platform-linux.cc +60 -57
  424. data/lib/libv8/v8/src/platform-macos.cc +4 -27
  425. data/lib/libv8/v8/src/platform-nullos.cc +3 -16
  426. data/lib/libv8/v8/src/platform-openbsd.cc +247 -85
  427. data/lib/libv8/v8/src/platform-posix.cc +43 -1
  428. data/lib/libv8/v8/src/platform-solaris.cc +151 -112
  429. data/lib/libv8/v8/src/platform-tls.h +1 -1
  430. data/lib/libv8/v8/src/platform-win32.cc +65 -39
  431. data/lib/libv8/v8/src/platform.h +17 -14
  432. data/lib/libv8/v8/src/preparse-data-format.h +2 -2
  433. data/lib/libv8/v8/src/preparse-data.h +8 -2
  434. data/lib/libv8/v8/src/preparser-api.cc +2 -18
  435. data/lib/libv8/v8/src/preparser.cc +106 -65
  436. data/lib/libv8/v8/src/preparser.h +26 -5
  437. data/lib/libv8/v8/src/prettyprinter.cc +25 -43
  438. data/lib/libv8/v8/src/profile-generator-inl.h +0 -4
  439. data/lib/libv8/v8/src/profile-generator.cc +213 -34
  440. data/lib/libv8/v8/src/profile-generator.h +9 -9
  441. data/lib/libv8/v8/src/property.h +1 -0
  442. data/lib/libv8/v8/src/proxy.js +74 -4
  443. data/lib/libv8/v8/src/regexp-macro-assembler.cc +10 -6
  444. data/lib/libv8/v8/src/regexp.js +16 -11
  445. data/lib/libv8/v8/src/rewriter.cc +24 -133
  446. data/lib/libv8/v8/src/runtime-profiler.cc +27 -151
  447. data/lib/libv8/v8/src/runtime-profiler.h +5 -31
  448. data/lib/libv8/v8/src/runtime.cc +1450 -681
  449. data/lib/libv8/v8/src/runtime.h +47 -31
  450. data/lib/libv8/v8/src/runtime.js +2 -1
  451. data/lib/libv8/v8/src/scanner-base.cc +358 -220
  452. data/lib/libv8/v8/src/scanner-base.h +30 -138
  453. data/lib/libv8/v8/src/scanner.cc +0 -18
  454. data/lib/libv8/v8/src/scanner.h +0 -15
  455. data/lib/libv8/v8/src/scopeinfo.cc +3 -1
  456. data/lib/libv8/v8/src/scopeinfo.h +1 -6
  457. data/lib/libv8/v8/src/scopes.cc +243 -253
  458. data/lib/libv8/v8/src/scopes.h +58 -109
  459. data/lib/libv8/v8/src/serialize.cc +12 -54
  460. data/lib/libv8/v8/src/serialize.h +47 -0
  461. data/lib/libv8/v8/src/small-pointer-list.h +25 -0
  462. data/lib/libv8/v8/src/spaces-inl.h +4 -50
  463. data/lib/libv8/v8/src/spaces.cc +64 -131
  464. data/lib/libv8/v8/src/spaces.h +19 -70
  465. data/lib/libv8/v8/src/string-stream.cc +3 -1
  466. data/lib/libv8/v8/src/string.js +10 -6
  467. data/lib/libv8/v8/src/strtod.cc +7 -3
  468. data/lib/libv8/v8/src/stub-cache.cc +59 -129
  469. data/lib/libv8/v8/src/stub-cache.h +42 -54
  470. data/lib/libv8/v8/src/third_party/valgrind/valgrind.h +1447 -1339
  471. data/lib/libv8/v8/src/token.cc +4 -4
  472. data/lib/libv8/v8/src/token.h +6 -5
  473. data/lib/libv8/v8/src/type-info.cc +173 -129
  474. data/lib/libv8/v8/src/type-info.h +40 -22
  475. data/lib/libv8/v8/src/utils.cc +25 -304
  476. data/lib/libv8/v8/src/utils.h +118 -3
  477. data/lib/libv8/v8/src/v8-counters.h +3 -6
  478. data/lib/libv8/v8/src/v8.cc +34 -27
  479. data/lib/libv8/v8/src/v8.h +7 -7
  480. data/lib/libv8/v8/src/v8conversions.cc +129 -0
  481. data/lib/libv8/v8/src/v8conversions.h +60 -0
  482. data/lib/libv8/v8/src/v8globals.h +15 -6
  483. data/lib/libv8/v8/src/v8natives.js +300 -78
  484. data/lib/libv8/v8/src/v8threads.cc +14 -6
  485. data/lib/libv8/v8/src/v8threads.h +4 -1
  486. data/lib/libv8/v8/src/v8utils.cc +360 -0
  487. data/lib/libv8/v8/src/v8utils.h +17 -66
  488. data/lib/libv8/v8/src/variables.cc +7 -12
  489. data/lib/libv8/v8/src/variables.h +12 -10
  490. data/lib/libv8/v8/src/version.cc +2 -2
  491. data/lib/libv8/v8/src/vm-state-inl.h +0 -41
  492. data/lib/libv8/v8/src/vm-state.h +0 -11
  493. data/lib/libv8/v8/src/weakmap.js +103 -0
  494. data/lib/libv8/v8/src/x64/assembler-x64.h +6 -3
  495. data/lib/libv8/v8/src/x64/builtins-x64.cc +25 -22
  496. data/lib/libv8/v8/src/x64/code-stubs-x64.cc +523 -250
  497. data/lib/libv8/v8/src/x64/code-stubs-x64.h +8 -71
  498. data/lib/libv8/v8/src/x64/codegen-x64.cc +1 -0
  499. data/lib/libv8/v8/src/x64/codegen-x64.h +0 -2
  500. data/lib/libv8/v8/src/x64/cpu-x64.cc +2 -1
  501. data/lib/libv8/v8/src/x64/deoptimizer-x64.cc +40 -8
  502. data/lib/libv8/v8/src/x64/disasm-x64.cc +12 -10
  503. data/lib/libv8/v8/src/x64/frames-x64.h +7 -6
  504. data/lib/libv8/v8/src/x64/full-codegen-x64.cc +310 -415
  505. data/lib/libv8/v8/src/x64/ic-x64.cc +180 -117
  506. data/lib/libv8/v8/src/x64/lithium-codegen-x64.cc +411 -523
  507. data/lib/libv8/v8/src/x64/lithium-codegen-x64.h +11 -6
  508. data/lib/libv8/v8/src/x64/lithium-x64.cc +191 -216
  509. data/lib/libv8/v8/src/x64/lithium-x64.h +112 -263
  510. data/lib/libv8/v8/src/x64/macro-assembler-x64.cc +177 -61
  511. data/lib/libv8/v8/src/x64/macro-assembler-x64.h +23 -7
  512. data/lib/libv8/v8/src/x64/regexp-macro-assembler-x64.cc +21 -9
  513. data/lib/libv8/v8/src/x64/regexp-macro-assembler-x64.h +6 -0
  514. data/lib/libv8/v8/src/x64/stub-cache-x64.cc +273 -107
  515. data/lib/libv8/v8/src/zone.cc +31 -22
  516. data/lib/libv8/v8/src/zone.h +12 -6
  517. data/lib/libv8/v8/tools/codemap.js +8 -0
  518. data/lib/libv8/v8/tools/gcmole/Makefile +43 -0
  519. data/lib/libv8/v8/tools/gcmole/gcmole.lua +0 -2
  520. data/lib/libv8/v8/tools/gdb-v8-support.py +154 -0
  521. data/lib/libv8/v8/tools/grokdump.py +44 -35
  522. data/lib/libv8/v8/tools/gyp/v8.gyp +94 -248
  523. data/lib/libv8/v8/tools/js2c.py +83 -52
  524. data/lib/libv8/v8/tools/linux-tick-processor +4 -6
  525. data/lib/libv8/v8/tools/ll_prof.py +3 -3
  526. data/lib/libv8/v8/tools/oom_dump/README +3 -1
  527. data/lib/libv8/v8/tools/presubmit.py +11 -4
  528. data/lib/libv8/v8/tools/profile.js +46 -2
  529. data/lib/libv8/v8/tools/splaytree.js +11 -0
  530. data/lib/libv8/v8/tools/stats-viewer.py +15 -11
  531. data/lib/libv8/v8/tools/test-wrapper-gypbuild.py +227 -0
  532. data/lib/libv8/v8/tools/test.py +28 -8
  533. data/lib/libv8/v8/tools/tickprocessor.js +0 -16
  534. data/lib/libv8/version.rb +1 -1
  535. data/libv8.gemspec +2 -2
  536. metadata +31 -19
  537. data/lib/libv8/scons/engine/SCons/Tool/f03.py +0 -63
  538. data/lib/libv8/v8/src/json-parser.cc +0 -504
@@ -76,11 +76,11 @@ static void GenerateStringDictionaryReceiverCheck(MacroAssembler* masm,
76
76
  // Check that the receiver is a valid JS object.
77
77
  __ movq(r1, FieldOperand(receiver, HeapObject::kMapOffset));
78
78
  __ movb(r0, FieldOperand(r1, Map::kInstanceTypeOffset));
79
- __ cmpb(r0, Immediate(FIRST_JS_OBJECT_TYPE));
79
+ __ cmpb(r0, Immediate(FIRST_SPEC_OBJECT_TYPE));
80
80
  __ j(below, miss);
81
81
 
82
82
  // If this assert fails, we have to check upper bound too.
83
- ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
83
+ STATIC_ASSERT(LAST_TYPE == LAST_SPEC_OBJECT_TYPE);
84
84
 
85
85
  GenerateGlobalInstanceTypeCheck(masm, r0, miss);
86
86
 
@@ -225,110 +225,6 @@ static void GenerateDictionaryStore(MacroAssembler* masm,
225
225
  }
226
226
 
227
227
 
228
- static void GenerateNumberDictionaryLoad(MacroAssembler* masm,
229
- Label* miss,
230
- Register elements,
231
- Register key,
232
- Register r0,
233
- Register r1,
234
- Register r2,
235
- Register result) {
236
- // Register use:
237
- //
238
- // elements - holds the slow-case elements of the receiver on entry.
239
- // Unchanged unless 'result' is the same register.
240
- //
241
- // key - holds the smi key on entry.
242
- // Unchanged unless 'result' is the same register.
243
- //
244
- // Scratch registers:
245
- //
246
- // r0 - holds the untagged key on entry and holds the hash once computed.
247
- //
248
- // r1 - used to hold the capacity mask of the dictionary
249
- //
250
- // r2 - used for the index into the dictionary.
251
- //
252
- // result - holds the result on exit if the load succeeded.
253
- // Allowed to be the same as 'key' or 'result'.
254
- // Unchanged on bailout so 'key' or 'result' can be used
255
- // in further computation.
256
-
257
- Label done;
258
-
259
- // Compute the hash code from the untagged key. This must be kept in sync
260
- // with ComputeIntegerHash in utils.h.
261
- //
262
- // hash = ~hash + (hash << 15);
263
- __ movl(r1, r0);
264
- __ notl(r0);
265
- __ shll(r1, Immediate(15));
266
- __ addl(r0, r1);
267
- // hash = hash ^ (hash >> 12);
268
- __ movl(r1, r0);
269
- __ shrl(r1, Immediate(12));
270
- __ xorl(r0, r1);
271
- // hash = hash + (hash << 2);
272
- __ leal(r0, Operand(r0, r0, times_4, 0));
273
- // hash = hash ^ (hash >> 4);
274
- __ movl(r1, r0);
275
- __ shrl(r1, Immediate(4));
276
- __ xorl(r0, r1);
277
- // hash = hash * 2057;
278
- __ imull(r0, r0, Immediate(2057));
279
- // hash = hash ^ (hash >> 16);
280
- __ movl(r1, r0);
281
- __ shrl(r1, Immediate(16));
282
- __ xorl(r0, r1);
283
-
284
- // Compute capacity mask.
285
- __ SmiToInteger32(r1,
286
- FieldOperand(elements, NumberDictionary::kCapacityOffset));
287
- __ decl(r1);
288
-
289
- // Generate an unrolled loop that performs a few probes before giving up.
290
- const int kProbes = 4;
291
- for (int i = 0; i < kProbes; i++) {
292
- // Use r2 for index calculations and keep the hash intact in r0.
293
- __ movq(r2, r0);
294
- // Compute the masked index: (hash + i + i * i) & mask.
295
- if (i > 0) {
296
- __ addl(r2, Immediate(NumberDictionary::GetProbeOffset(i)));
297
- }
298
- __ and_(r2, r1);
299
-
300
- // Scale the index by multiplying by the entry size.
301
- ASSERT(NumberDictionary::kEntrySize == 3);
302
- __ lea(r2, Operand(r2, r2, times_2, 0)); // r2 = r2 * 3
303
-
304
- // Check if the key matches.
305
- __ cmpq(key, FieldOperand(elements,
306
- r2,
307
- times_pointer_size,
308
- NumberDictionary::kElementsStartOffset));
309
- if (i != (kProbes - 1)) {
310
- __ j(equal, &done);
311
- } else {
312
- __ j(not_equal, miss);
313
- }
314
- }
315
-
316
- __ bind(&done);
317
- // Check that the value is a normal propety.
318
- const int kDetailsOffset =
319
- NumberDictionary::kElementsStartOffset + 2 * kPointerSize;
320
- ASSERT_EQ(NORMAL, 0);
321
- __ Test(FieldOperand(elements, r2, times_pointer_size, kDetailsOffset),
322
- Smi::FromInt(PropertyDetails::TypeField::mask()));
323
- __ j(not_zero, miss);
324
-
325
- // Get the value at the masked, scaled index.
326
- const int kValueOffset =
327
- NumberDictionary::kElementsStartOffset + kPointerSize;
328
- __ movq(result, FieldOperand(elements, r2, times_pointer_size, kValueOffset));
329
- }
330
-
331
-
332
228
  void LoadIC::GenerateArrayLength(MacroAssembler* masm) {
333
229
  // ----------- S t a t e -------------
334
230
  // -- rax : receiver
@@ -482,7 +378,7 @@ static void GenerateKeyStringCheck(MacroAssembler* masm,
482
378
  __ j(zero, index_string); // The value in hash is used at jump target.
483
379
 
484
380
  // Is the string a symbol?
485
- ASSERT(kSymbolTag != 0);
381
+ STATIC_ASSERT(kSymbolTag != 0);
486
382
  __ testb(FieldOperand(map, Map::kInstanceTypeOffset),
487
383
  Immediate(kIsSymbolMask));
488
384
  __ j(zero, not_symbol);
@@ -508,11 +404,8 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
508
404
  GenerateKeyedLoadReceiverCheck(
509
405
  masm, rdx, rcx, Map::kHasIndexedInterceptor, &slow);
510
406
 
511
- // Check the "has fast elements" bit in the receiver's map which is
512
- // now in rcx.
513
- __ testb(FieldOperand(rcx, Map::kBitField2Offset),
514
- Immediate(1 << Map::kHasFastElements));
515
- __ j(zero, &check_number_dictionary);
407
+ // Check the receiver's map to see if it has fast elements.
408
+ __ CheckFastElements(rcx, &check_number_dictionary);
516
409
 
517
410
  GenerateFastArrayLoad(masm,
518
411
  rdx,
@@ -538,7 +431,7 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
538
431
  __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset),
539
432
  Heap::kHashTableMapRootIndex);
540
433
  __ j(not_equal, &slow);
541
- GenerateNumberDictionaryLoad(masm, &slow, rcx, rax, rbx, r9, rdi, rax);
434
+ __ LoadFromNumberDictionary(&slow, rcx, rax, rbx, r9, rdi, rax);
542
435
  __ ret(0);
543
436
 
544
437
  __ bind(&slow);
@@ -730,9 +623,13 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
730
623
 
731
624
  __ CmpInstanceType(rbx, JS_ARRAY_TYPE);
732
625
  __ j(equal, &array);
733
- // Check that the object is some kind of JS object.
734
- __ CmpInstanceType(rbx, FIRST_JS_OBJECT_TYPE);
626
+ // Check that the object is some kind of JSObject.
627
+ __ CmpInstanceType(rbx, FIRST_JS_RECEIVER_TYPE);
735
628
  __ j(below, &slow);
629
+ __ CmpInstanceType(rbx, JS_PROXY_TYPE);
630
+ __ j(equal, &slow);
631
+ __ CmpInstanceType(rbx, JS_FUNCTION_PROXY_TYPE);
632
+ __ j(equal, &slow);
736
633
 
737
634
  // Object case: Check key against length in the elements array.
738
635
  // rax: value
@@ -892,7 +789,8 @@ static void GenerateFunctionTailCall(MacroAssembler* masm,
892
789
 
893
790
  // Invoke the function.
894
791
  ParameterCount actual(argc);
895
- __ InvokeFunction(rdi, actual, JUMP_FUNCTION);
792
+ __ InvokeFunction(rdi, actual, JUMP_FUNCTION,
793
+ NullCallWrapper(), CALL_AS_METHOD);
896
794
  }
897
795
 
898
796
 
@@ -1097,7 +995,7 @@ void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
1097
995
  __ j(not_equal, &slow_load);
1098
996
  __ SmiToInteger32(rbx, rcx);
1099
997
  // ebx: untagged index
1100
- GenerateNumberDictionaryLoad(masm, &slow_load, rax, rcx, rbx, r9, rdi, rdi);
998
+ __ LoadFromNumberDictionary(&slow_load, rax, rcx, rbx, r9, rdi, rdi);
1101
999
  __ IncrementCounter(counters->keyed_call_generic_smi_dict(), 1);
1102
1000
  __ jmp(&do_call);
1103
1001
 
@@ -1196,6 +1094,171 @@ void KeyedCallIC::GenerateMiss(MacroAssembler* masm, int argc) {
1196
1094
  }
1197
1095
 
1198
1096
 
1097
+ static Operand GenerateMappedArgumentsLookup(MacroAssembler* masm,
1098
+ Register object,
1099
+ Register key,
1100
+ Register scratch1,
1101
+ Register scratch2,
1102
+ Register scratch3,
1103
+ Label* unmapped_case,
1104
+ Label* slow_case) {
1105
+ Heap* heap = masm->isolate()->heap();
1106
+
1107
+ // Check that the receiver is a JSObject. Because of the elements
1108
+ // map check later, we do not need to check for interceptors or
1109
+ // whether it requires access checks.
1110
+ __ JumpIfSmi(object, slow_case);
1111
+ // Check that the object is some kind of JSObject.
1112
+ __ CmpObjectType(object, FIRST_JS_RECEIVER_TYPE, scratch1);
1113
+ __ j(below, slow_case);
1114
+
1115
+ // Check that the key is a positive smi.
1116
+ Condition check = masm->CheckNonNegativeSmi(key);
1117
+ __ j(NegateCondition(check), slow_case);
1118
+
1119
+ // Load the elements into scratch1 and check its map. If not, jump
1120
+ // to the unmapped lookup with the parameter map in scratch1.
1121
+ Handle<Map> arguments_map(heap->non_strict_arguments_elements_map());
1122
+ __ movq(scratch1, FieldOperand(object, JSObject::kElementsOffset));
1123
+ __ CheckMap(scratch1, arguments_map, slow_case, DONT_DO_SMI_CHECK);
1124
+
1125
+ // Check if element is in the range of mapped arguments.
1126
+ __ movq(scratch2, FieldOperand(scratch1, FixedArray::kLengthOffset));
1127
+ __ SmiSubConstant(scratch2, scratch2, Smi::FromInt(2));
1128
+ __ cmpq(key, scratch2);
1129
+ __ j(greater_equal, unmapped_case);
1130
+
1131
+ // Load element index and check whether it is the hole.
1132
+ const int kHeaderSize = FixedArray::kHeaderSize + 2 * kPointerSize;
1133
+ __ SmiToInteger64(scratch3, key);
1134
+ __ movq(scratch2, FieldOperand(scratch1,
1135
+ scratch3,
1136
+ times_pointer_size,
1137
+ kHeaderSize));
1138
+ __ CompareRoot(scratch2, Heap::kTheHoleValueRootIndex);
1139
+ __ j(equal, unmapped_case);
1140
+
1141
+ // Load value from context and return it. We can reuse scratch1 because
1142
+ // we do not jump to the unmapped lookup (which requires the parameter
1143
+ // map in scratch1).
1144
+ __ movq(scratch1, FieldOperand(scratch1, FixedArray::kHeaderSize));
1145
+ __ SmiToInteger64(scratch3, scratch2);
1146
+ return FieldOperand(scratch1,
1147
+ scratch3,
1148
+ times_pointer_size,
1149
+ Context::kHeaderSize);
1150
+ }
1151
+
1152
+
1153
+ static Operand GenerateUnmappedArgumentsLookup(MacroAssembler* masm,
1154
+ Register key,
1155
+ Register parameter_map,
1156
+ Register scratch,
1157
+ Label* slow_case) {
1158
+ // Element is in arguments backing store, which is referenced by the
1159
+ // second element of the parameter_map. The parameter_map register
1160
+ // must be loaded with the parameter map of the arguments object and is
1161
+ // overwritten.
1162
+ const int kBackingStoreOffset = FixedArray::kHeaderSize + kPointerSize;
1163
+ Register backing_store = parameter_map;
1164
+ __ movq(backing_store, FieldOperand(parameter_map, kBackingStoreOffset));
1165
+ Handle<Map> fixed_array_map(masm->isolate()->heap()->fixed_array_map());
1166
+ __ CheckMap(backing_store, fixed_array_map, slow_case, DONT_DO_SMI_CHECK);
1167
+ __ movq(scratch, FieldOperand(backing_store, FixedArray::kLengthOffset));
1168
+ __ cmpq(key, scratch);
1169
+ __ j(greater_equal, slow_case);
1170
+ __ SmiToInteger64(scratch, key);
1171
+ return FieldOperand(backing_store,
1172
+ scratch,
1173
+ times_pointer_size,
1174
+ FixedArray::kHeaderSize);
1175
+ }
1176
+
1177
+
1178
+ void KeyedLoadIC::GenerateNonStrictArguments(MacroAssembler* masm) {
1179
+ // ----------- S t a t e -------------
1180
+ // -- rax : key
1181
+ // -- rdx : receiver
1182
+ // -- rsp[0] : return address
1183
+ // -----------------------------------
1184
+ Label slow, notin;
1185
+ Operand mapped_location =
1186
+ GenerateMappedArgumentsLookup(
1187
+ masm, rdx, rax, rbx, rcx, rdi, &notin, &slow);
1188
+ __ movq(rax, mapped_location);
1189
+ __ Ret();
1190
+ __ bind(&notin);
1191
+ // The unmapped lookup expects that the parameter map is in rbx.
1192
+ Operand unmapped_location =
1193
+ GenerateUnmappedArgumentsLookup(masm, rax, rbx, rcx, &slow);
1194
+ __ CompareRoot(unmapped_location, Heap::kTheHoleValueRootIndex);
1195
+ __ j(equal, &slow);
1196
+ __ movq(rax, unmapped_location);
1197
+ __ Ret();
1198
+ __ bind(&slow);
1199
+ GenerateMiss(masm, false);
1200
+ }
1201
+
1202
+
1203
+ void KeyedStoreIC::GenerateNonStrictArguments(MacroAssembler* masm) {
1204
+ // ----------- S t a t e -------------
1205
+ // -- rax : value
1206
+ // -- rcx : key
1207
+ // -- rdx : receiver
1208
+ // -- rsp[0] : return address
1209
+ // -----------------------------------
1210
+ Label slow, notin;
1211
+ Operand mapped_location = GenerateMappedArgumentsLookup(
1212
+ masm, rdx, rcx, rbx, rdi, r8, &notin, &slow);
1213
+ __ movq(mapped_location, rax);
1214
+ __ lea(r9, mapped_location);
1215
+ __ movq(r8, rax);
1216
+ __ RecordWrite(rbx, r9, r8);
1217
+ __ Ret();
1218
+ __ bind(&notin);
1219
+ // The unmapped lookup expects that the parameter map is in rbx.
1220
+ Operand unmapped_location =
1221
+ GenerateUnmappedArgumentsLookup(masm, rcx, rbx, rdi, &slow);
1222
+ __ movq(unmapped_location, rax);
1223
+ __ lea(r9, unmapped_location);
1224
+ __ movq(r8, rax);
1225
+ __ RecordWrite(rbx, r9, r8);
1226
+ __ Ret();
1227
+ __ bind(&slow);
1228
+ GenerateMiss(masm, false);
1229
+ }
1230
+
1231
+
1232
+ void KeyedCallIC::GenerateNonStrictArguments(MacroAssembler* masm,
1233
+ int argc) {
1234
+ // ----------- S t a t e -------------
1235
+ // rcx : function name
1236
+ // rsp[0] : return address
1237
+ // rsp[8] : argument argc
1238
+ // rsp[16] : argument argc - 1
1239
+ // ...
1240
+ // rsp[argc * 8] : argument 1
1241
+ // rsp[(argc + 1) * 8] : argument 0 = receiver
1242
+ // -----------------------------------
1243
+ Label slow, notin;
1244
+ __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1245
+ Operand mapped_location = GenerateMappedArgumentsLookup(
1246
+ masm, rdx, rcx, rbx, rax, r8, &notin, &slow);
1247
+ __ movq(rdi, mapped_location);
1248
+ GenerateFunctionTailCall(masm, argc, &slow);
1249
+ __ bind(&notin);
1250
+ // The unmapped lookup expects that the parameter map is in rbx.
1251
+ Operand unmapped_location =
1252
+ GenerateUnmappedArgumentsLookup(masm, rcx, rbx, rax, &slow);
1253
+ __ CompareRoot(unmapped_location, Heap::kTheHoleValueRootIndex);
1254
+ __ j(equal, &slow);
1255
+ __ movq(rdi, unmapped_location);
1256
+ GenerateFunctionTailCall(masm, argc, &slow);
1257
+ __ bind(&slow);
1258
+ GenerateMiss(masm, argc);
1259
+ }
1260
+
1261
+
1199
1262
  void LoadIC::GenerateMegamorphic(MacroAssembler* masm) {
1200
1263
  // ----------- S t a t e -------------
1201
1264
  // -- rax : receiver
@@ -144,7 +144,7 @@ bool LCodeGen::GeneratePrologue() {
144
144
  // when called as functions (without an explicit receiver
145
145
  // object). rcx is zero for method calls and non-zero for function
146
146
  // calls.
147
- if (info_->is_strict_mode()) {
147
+ if (info_->is_strict_mode() || info_->is_native()) {
148
148
  Label ok;
149
149
  __ testq(rcx, rcx);
150
150
  __ j(zero, &ok, Label::kNear);
@@ -197,7 +197,7 @@ bool LCodeGen::GeneratePrologue() {
197
197
  FastNewContextStub stub(heap_slots);
198
198
  __ CallStub(&stub);
199
199
  } else {
200
- __ CallRuntime(Runtime::kNewContext, 1);
200
+ __ CallRuntime(Runtime::kNewFunctionContext, 1);
201
201
  }
202
202
  RecordSafepoint(Safepoint::kNoDeoptimizationIndex);
203
203
  // Context is returned in both rax and rsi. It replaces the context
@@ -275,11 +275,25 @@ bool LCodeGen::GenerateJumpTable() {
275
275
 
276
276
  bool LCodeGen::GenerateDeferredCode() {
277
277
  ASSERT(is_generating());
278
- for (int i = 0; !is_aborted() && i < deferred_.length(); i++) {
279
- LDeferredCode* code = deferred_[i];
280
- __ bind(code->entry());
281
- code->Generate();
282
- __ jmp(code->exit());
278
+ if (deferred_.length() > 0) {
279
+ for (int i = 0; !is_aborted() && i < deferred_.length(); i++) {
280
+ LDeferredCode* code = deferred_[i];
281
+ __ bind(code->entry());
282
+ code->Generate();
283
+ __ jmp(code->exit());
284
+ }
285
+
286
+ // Pad code to ensure that the last piece of deferred code have
287
+ // room for lazy bailout.
288
+ while ((masm()->pc_offset() - LastSafepointEnd())
289
+ < Deoptimizer::patch_size()) {
290
+ int padding = masm()->pc_offset() - LastSafepointEnd();
291
+ if (padding > 9) {
292
+ __ nop(9);
293
+ } else {
294
+ __ nop(padding);
295
+ }
296
+ }
283
297
  }
284
298
 
285
299
  // Deferred code is the last part of the instruction sequence. Mark
@@ -692,7 +706,7 @@ void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers,
692
706
 
693
707
 
694
708
  void LCodeGen::RecordPosition(int position) {
695
- if (!FLAG_debug_info || position == RelocInfo::kNoPosition) return;
709
+ if (position == RelocInfo::kNoPosition) return;
696
710
  masm()->positions_recorder()->RecordPosition(position);
697
711
  }
698
712
 
@@ -809,6 +823,8 @@ void LCodeGen::DoModI(LModI* instr) {
809
823
  if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
810
824
  __ j(not_zero, &done, Label::kNear);
811
825
  DeoptimizeIf(no_condition, instr->environment());
826
+ } else {
827
+ __ jmp(&done, Label::kNear);
812
828
  }
813
829
  __ bind(&positive_dividend);
814
830
  __ andl(dividend, Immediate(divisor - 1));
@@ -1200,17 +1216,24 @@ void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) {
1200
1216
  }
1201
1217
 
1202
1218
 
1203
- void LCodeGen::DoFixedArrayLength(LFixedArrayLength* instr) {
1219
+ void LCodeGen::DoFixedArrayBaseLength(LFixedArrayBaseLength* instr) {
1204
1220
  Register result = ToRegister(instr->result());
1205
1221
  Register array = ToRegister(instr->InputAt(0));
1206
- __ movq(result, FieldOperand(array, FixedArray::kLengthOffset));
1222
+ __ movq(result, FieldOperand(array, FixedArrayBase::kLengthOffset));
1207
1223
  }
1208
1224
 
1209
1225
 
1210
- void LCodeGen::DoExternalArrayLength(LExternalArrayLength* instr) {
1226
+ void LCodeGen::DoElementsKind(LElementsKind* instr) {
1211
1227
  Register result = ToRegister(instr->result());
1212
- Register array = ToRegister(instr->InputAt(0));
1213
- __ movl(result, FieldOperand(array, ExternalPixelArray::kLengthOffset));
1228
+ Register input = ToRegister(instr->InputAt(0));
1229
+
1230
+ // Load map into |result|.
1231
+ __ movq(result, FieldOperand(input, HeapObject::kMapOffset));
1232
+ // Load the map's "bit field 2" into |result|. We only need the first byte.
1233
+ __ movzxbq(result, FieldOperand(result, Map::kBitField2Offset));
1234
+ // Retrieve elements_kind from bit field 2.
1235
+ __ and_(result, Immediate(Map::kElementsKindMask));
1236
+ __ shr(result, Immediate(Map::kElementsKindShift));
1214
1237
  }
1215
1238
 
1216
1239
 
@@ -1311,6 +1334,7 @@ void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
1311
1334
 
1312
1335
  BinaryOpStub stub(instr->op(), NO_OVERWRITE);
1313
1336
  CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1337
+ __ nop(); // Signals no inlined code.
1314
1338
  }
1315
1339
 
1316
1340
 
@@ -1347,7 +1371,7 @@ void LCodeGen::DoBranch(LBranch* instr) {
1347
1371
  int true_block = chunk_->LookupDestination(instr->true_block_id());
1348
1372
  int false_block = chunk_->LookupDestination(instr->false_block_id());
1349
1373
 
1350
- Representation r = instr->hydrogen()->representation();
1374
+ Representation r = instr->hydrogen()->value()->representation();
1351
1375
  if (r.IsInteger32()) {
1352
1376
  Register reg = ToRegister(instr->InputAt(0));
1353
1377
  __ testl(reg, reg);
@@ -1360,7 +1384,7 @@ void LCodeGen::DoBranch(LBranch* instr) {
1360
1384
  } else {
1361
1385
  ASSERT(r.IsTagged());
1362
1386
  Register reg = ToRegister(instr->InputAt(0));
1363
- HType type = instr->hydrogen()->type();
1387
+ HType type = instr->hydrogen()->value()->type();
1364
1388
  if (type.IsBoolean()) {
1365
1389
  __ CompareRoot(reg, Heap::kTrueValueRootIndex);
1366
1390
  EmitBranch(true_block, false_block, equal);
@@ -1371,82 +1395,99 @@ void LCodeGen::DoBranch(LBranch* instr) {
1371
1395
  Label* true_label = chunk_->GetAssemblyLabel(true_block);
1372
1396
  Label* false_label = chunk_->GetAssemblyLabel(false_block);
1373
1397
 
1374
- __ CompareRoot(reg, Heap::kUndefinedValueRootIndex);
1375
- __ j(equal, false_label);
1376
- __ CompareRoot(reg, Heap::kTrueValueRootIndex);
1377
- __ j(equal, true_label);
1378
- __ CompareRoot(reg, Heap::kFalseValueRootIndex);
1379
- __ j(equal, false_label);
1380
- __ Cmp(reg, Smi::FromInt(0));
1381
- __ j(equal, false_label);
1382
- __ JumpIfSmi(reg, true_label);
1383
-
1384
- // Test for double values. Plus/minus zero and NaN are false.
1385
- Label call_stub;
1386
- __ CompareRoot(FieldOperand(reg, HeapObject::kMapOffset),
1387
- Heap::kHeapNumberMapRootIndex);
1388
- __ j(not_equal, &call_stub, Label::kNear);
1389
-
1390
- // HeapNumber => false iff +0, -0, or NaN. These three cases set the
1391
- // zero flag when compared to zero using ucomisd.
1392
- __ xorps(xmm0, xmm0);
1393
- __ ucomisd(xmm0, FieldOperand(reg, HeapNumber::kValueOffset));
1394
- __ j(zero, false_label);
1395
- __ jmp(true_label);
1396
-
1397
- // The conversion stub doesn't cause garbage collections so it's
1398
- // safe to not record a safepoint after the call.
1399
- __ bind(&call_stub);
1400
- ToBooleanStub stub;
1401
- __ Pushad();
1402
- __ push(reg);
1403
- __ CallStub(&stub);
1404
- __ testq(rax, rax);
1405
- __ Popad();
1406
- EmitBranch(true_block, false_block, not_zero);
1398
+ ToBooleanStub::Types expected = instr->hydrogen()->expected_input_types();
1399
+ // Avoid deopts in the case where we've never executed this path before.
1400
+ if (expected.IsEmpty()) expected = ToBooleanStub::all_types();
1401
+
1402
+ if (expected.Contains(ToBooleanStub::UNDEFINED)) {
1403
+ // undefined -> false.
1404
+ __ CompareRoot(reg, Heap::kUndefinedValueRootIndex);
1405
+ __ j(equal, false_label);
1406
+ }
1407
+ if (expected.Contains(ToBooleanStub::BOOLEAN)) {
1408
+ // true -> true.
1409
+ __ CompareRoot(reg, Heap::kTrueValueRootIndex);
1410
+ __ j(equal, true_label);
1411
+ // false -> false.
1412
+ __ CompareRoot(reg, Heap::kFalseValueRootIndex);
1413
+ __ j(equal, false_label);
1414
+ }
1415
+ if (expected.Contains(ToBooleanStub::NULL_TYPE)) {
1416
+ // 'null' -> false.
1417
+ __ CompareRoot(reg, Heap::kNullValueRootIndex);
1418
+ __ j(equal, false_label);
1419
+ }
1420
+
1421
+ if (expected.Contains(ToBooleanStub::SMI)) {
1422
+ // Smis: 0 -> false, all other -> true.
1423
+ __ Cmp(reg, Smi::FromInt(0));
1424
+ __ j(equal, false_label);
1425
+ __ JumpIfSmi(reg, true_label);
1426
+ } else if (expected.NeedsMap()) {
1427
+ // If we need a map later and have a Smi -> deopt.
1428
+ __ testb(reg, Immediate(kSmiTagMask));
1429
+ DeoptimizeIf(zero, instr->environment());
1430
+ }
1431
+
1432
+ const Register map = kScratchRegister;
1433
+ if (expected.NeedsMap()) {
1434
+ __ movq(map, FieldOperand(reg, HeapObject::kMapOffset));
1435
+
1436
+ if (expected.CanBeUndetectable()) {
1437
+ // Undetectable -> false.
1438
+ __ testb(FieldOperand(map, Map::kBitFieldOffset),
1439
+ Immediate(1 << Map::kIsUndetectable));
1440
+ __ j(not_zero, false_label);
1441
+ }
1442
+ }
1443
+
1444
+ if (expected.Contains(ToBooleanStub::SPEC_OBJECT)) {
1445
+ // spec object -> true.
1446
+ __ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
1447
+ __ j(above_equal, true_label);
1448
+ }
1449
+
1450
+ if (expected.Contains(ToBooleanStub::STRING)) {
1451
+ // String value -> false iff empty.
1452
+ Label not_string;
1453
+ __ CmpInstanceType(map, FIRST_NONSTRING_TYPE);
1454
+ __ j(above_equal, &not_string, Label::kNear);
1455
+ __ cmpq(FieldOperand(reg, String::kLengthOffset), Immediate(0));
1456
+ __ j(not_zero, true_label);
1457
+ __ jmp(false_label);
1458
+ __ bind(&not_string);
1459
+ }
1460
+
1461
+ if (expected.Contains(ToBooleanStub::HEAP_NUMBER)) {
1462
+ // heap number -> false iff +0, -0, or NaN.
1463
+ Label not_heap_number;
1464
+ __ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
1465
+ __ j(not_equal, &not_heap_number, Label::kNear);
1466
+ __ xorps(xmm0, xmm0);
1467
+ __ ucomisd(xmm0, FieldOperand(reg, HeapNumber::kValueOffset));
1468
+ __ j(zero, false_label);
1469
+ __ jmp(true_label);
1470
+ __ bind(&not_heap_number);
1471
+ }
1472
+
1473
+ // We've seen something for the first time -> deopt.
1474
+ DeoptimizeIf(no_condition, instr->environment());
1407
1475
  }
1408
1476
  }
1409
1477
  }
1410
1478
 
1411
1479
 
1412
- void LCodeGen::EmitGoto(int block, LDeferredCode* deferred_stack_check) {
1480
+ void LCodeGen::EmitGoto(int block) {
1413
1481
  block = chunk_->LookupDestination(block);
1414
1482
  int next_block = GetNextEmittedBlock(current_block_);
1415
1483
  if (block != next_block) {
1416
- // Perform stack overflow check if this goto needs it before jumping.
1417
- if (deferred_stack_check != NULL) {
1418
- __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
1419
- __ j(above_equal, chunk_->GetAssemblyLabel(block));
1420
- __ jmp(deferred_stack_check->entry());
1421
- deferred_stack_check->SetExit(chunk_->GetAssemblyLabel(block));
1422
- } else {
1423
- __ jmp(chunk_->GetAssemblyLabel(block));
1424
- }
1484
+ __ jmp(chunk_->GetAssemblyLabel(block));
1425
1485
  }
1426
1486
  }
1427
1487
 
1428
1488
 
1429
- void LCodeGen::DoDeferredStackCheck(LGoto* instr) {
1430
- PushSafepointRegistersScope scope(this);
1431
- CallRuntimeFromDeferred(Runtime::kStackGuard, 0, instr);
1432
- }
1433
-
1434
-
1435
1489
  void LCodeGen::DoGoto(LGoto* instr) {
1436
- class DeferredStackCheck: public LDeferredCode {
1437
- public:
1438
- DeferredStackCheck(LCodeGen* codegen, LGoto* instr)
1439
- : LDeferredCode(codegen), instr_(instr) { }
1440
- virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); }
1441
- private:
1442
- LGoto* instr_;
1443
- };
1444
-
1445
- DeferredStackCheck* deferred = NULL;
1446
- if (instr->include_stack_check()) {
1447
- deferred = new DeferredStackCheck(this, instr);
1448
- }
1449
- EmitGoto(instr->block_id(), deferred);
1490
+ EmitGoto(instr->block_id());
1450
1491
  }
1451
1492
 
1452
1493
 
@@ -1494,32 +1535,6 @@ void LCodeGen::EmitCmpI(LOperand* left, LOperand* right) {
1494
1535
  }
1495
1536
 
1496
1537
 
1497
- void LCodeGen::DoCmpID(LCmpID* instr) {
1498
- LOperand* left = instr->InputAt(0);
1499
- LOperand* right = instr->InputAt(1);
1500
- LOperand* result = instr->result();
1501
-
1502
- Label unordered;
1503
- if (instr->is_double()) {
1504
- // Don't base result on EFLAGS when a NaN is involved. Instead
1505
- // jump to the unordered case, which produces a false value.
1506
- __ ucomisd(ToDoubleRegister(left), ToDoubleRegister(right));
1507
- __ j(parity_even, &unordered, Label::kNear);
1508
- } else {
1509
- EmitCmpI(left, right);
1510
- }
1511
-
1512
- Label done;
1513
- Condition cc = TokenToCondition(instr->op(), instr->is_double());
1514
- __ LoadRoot(ToRegister(result), Heap::kTrueValueRootIndex);
1515
- __ j(cc, &done, Label::kNear);
1516
-
1517
- __ bind(&unordered);
1518
- __ LoadRoot(ToRegister(result), Heap::kFalseValueRootIndex);
1519
- __ bind(&done);
1520
- }
1521
-
1522
-
1523
1538
  void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) {
1524
1539
  LOperand* left = instr->InputAt(0);
1525
1540
  LOperand* right = instr->InputAt(1);
@@ -1540,23 +1555,7 @@ void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) {
1540
1555
  }
1541
1556
 
1542
1557
 
1543
- void LCodeGen::DoCmpJSObjectEq(LCmpJSObjectEq* instr) {
1544
- Register left = ToRegister(instr->InputAt(0));
1545
- Register right = ToRegister(instr->InputAt(1));
1546
- Register result = ToRegister(instr->result());
1547
-
1548
- Label different, done;
1549
- __ cmpq(left, right);
1550
- __ j(not_equal, &different, Label::kNear);
1551
- __ LoadRoot(result, Heap::kTrueValueRootIndex);
1552
- __ jmp(&done, Label::kNear);
1553
- __ bind(&different);
1554
- __ LoadRoot(result, Heap::kFalseValueRootIndex);
1555
- __ bind(&done);
1556
- }
1557
-
1558
-
1559
- void LCodeGen::DoCmpJSObjectEqAndBranch(LCmpJSObjectEqAndBranch* instr) {
1558
+ void LCodeGen::DoCmpObjectEqAndBranch(LCmpObjectEqAndBranch* instr) {
1560
1559
  Register left = ToRegister(instr->InputAt(0));
1561
1560
  Register right = ToRegister(instr->InputAt(1));
1562
1561
  int false_block = chunk_->LookupDestination(instr->false_block_id());
@@ -1567,75 +1566,16 @@ void LCodeGen::DoCmpJSObjectEqAndBranch(LCmpJSObjectEqAndBranch* instr) {
1567
1566
  }
1568
1567
 
1569
1568
 
1570
- void LCodeGen::DoCmpSymbolEq(LCmpSymbolEq* instr) {
1569
+ void LCodeGen::DoCmpConstantEqAndBranch(LCmpConstantEqAndBranch* instr) {
1571
1570
  Register left = ToRegister(instr->InputAt(0));
1572
- Register right = ToRegister(instr->InputAt(1));
1573
- Register result = ToRegister(instr->result());
1574
-
1575
- Label done;
1576
- __ cmpq(left, right);
1577
- __ LoadRoot(result, Heap::kFalseValueRootIndex);
1578
- __ j(not_equal, &done, Label::kNear);
1579
- __ LoadRoot(result, Heap::kTrueValueRootIndex);
1580
- __ bind(&done);
1581
- }
1582
-
1583
-
1584
- void LCodeGen::DoCmpSymbolEqAndBranch(LCmpSymbolEqAndBranch* instr) {
1585
- Register left = ToRegister(instr->InputAt(0));
1586
- Register right = ToRegister(instr->InputAt(1));
1587
- int false_block = chunk_->LookupDestination(instr->false_block_id());
1588
1571
  int true_block = chunk_->LookupDestination(instr->true_block_id());
1572
+ int false_block = chunk_->LookupDestination(instr->false_block_id());
1589
1573
 
1590
- __ cmpq(left, right);
1574
+ __ cmpq(left, Immediate(instr->hydrogen()->right()));
1591
1575
  EmitBranch(true_block, false_block, equal);
1592
1576
  }
1593
1577
 
1594
1578
 
1595
- void LCodeGen::DoIsNull(LIsNull* instr) {
1596
- Register reg = ToRegister(instr->InputAt(0));
1597
- Register result = ToRegister(instr->result());
1598
-
1599
- // If the expression is known to be a smi, then it's
1600
- // definitely not null. Materialize false.
1601
- // Consider adding other type and representation tests too.
1602
- if (instr->hydrogen()->value()->type().IsSmi()) {
1603
- __ LoadRoot(result, Heap::kFalseValueRootIndex);
1604
- return;
1605
- }
1606
-
1607
- __ CompareRoot(reg, Heap::kNullValueRootIndex);
1608
- if (instr->is_strict()) {
1609
- ASSERT(Heap::kTrueValueRootIndex >= 0);
1610
- __ movl(result, Immediate(Heap::kTrueValueRootIndex));
1611
- Label load;
1612
- __ j(equal, &load, Label::kNear);
1613
- __ Set(result, Heap::kFalseValueRootIndex);
1614
- __ bind(&load);
1615
- __ LoadRootIndexed(result, result, 0);
1616
- } else {
1617
- Label false_value, true_value, done;
1618
- __ j(equal, &true_value, Label::kNear);
1619
- __ CompareRoot(reg, Heap::kUndefinedValueRootIndex);
1620
- __ j(equal, &true_value, Label::kNear);
1621
- __ JumpIfSmi(reg, &false_value, Label::kNear);
1622
- // Check for undetectable objects by looking in the bit field in
1623
- // the map. The object has already been smi checked.
1624
- Register scratch = result;
1625
- __ movq(scratch, FieldOperand(reg, HeapObject::kMapOffset));
1626
- __ testb(FieldOperand(scratch, Map::kBitFieldOffset),
1627
- Immediate(1 << Map::kIsUndetectable));
1628
- __ j(not_zero, &true_value, Label::kNear);
1629
- __ bind(&false_value);
1630
- __ LoadRoot(result, Heap::kFalseValueRootIndex);
1631
- __ jmp(&done, Label::kNear);
1632
- __ bind(&true_value);
1633
- __ LoadRoot(result, Heap::kTrueValueRootIndex);
1634
- __ bind(&done);
1635
- }
1636
- }
1637
-
1638
-
1639
1579
  void LCodeGen::DoIsNullAndBranch(LIsNullAndBranch* instr) {
1640
1580
  Register reg = ToRegister(instr->InputAt(0));
1641
1581
 
@@ -1691,32 +1631,13 @@ Condition LCodeGen::EmitIsObject(Register input,
1691
1631
 
1692
1632
  __ movzxbl(kScratchRegister,
1693
1633
  FieldOperand(kScratchRegister, Map::kInstanceTypeOffset));
1694
- __ cmpb(kScratchRegister, Immediate(FIRST_JS_OBJECT_TYPE));
1634
+ __ cmpb(kScratchRegister, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
1695
1635
  __ j(below, is_not_object);
1696
- __ cmpb(kScratchRegister, Immediate(LAST_JS_OBJECT_TYPE));
1636
+ __ cmpb(kScratchRegister, Immediate(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
1697
1637
  return below_equal;
1698
1638
  }
1699
1639
 
1700
1640
 
1701
- void LCodeGen::DoIsObject(LIsObject* instr) {
1702
- Register reg = ToRegister(instr->InputAt(0));
1703
- Register result = ToRegister(instr->result());
1704
- Label is_false, is_true, done;
1705
-
1706
- Condition true_cond = EmitIsObject(reg, &is_false, &is_true);
1707
- __ j(true_cond, &is_true);
1708
-
1709
- __ bind(&is_false);
1710
- __ LoadRoot(result, Heap::kFalseValueRootIndex);
1711
- __ jmp(&done);
1712
-
1713
- __ bind(&is_true);
1714
- __ LoadRoot(result, Heap::kTrueValueRootIndex);
1715
-
1716
- __ bind(&done);
1717
- }
1718
-
1719
-
1720
1641
  void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) {
1721
1642
  Register reg = ToRegister(instr->InputAt(0));
1722
1643
 
@@ -1731,22 +1652,6 @@ void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) {
1731
1652
  }
1732
1653
 
1733
1654
 
1734
- void LCodeGen::DoIsSmi(LIsSmi* instr) {
1735
- LOperand* input_operand = instr->InputAt(0);
1736
- Register result = ToRegister(instr->result());
1737
- if (input_operand->IsRegister()) {
1738
- Register input = ToRegister(input_operand);
1739
- __ CheckSmiToIndicator(result, input);
1740
- } else {
1741
- Operand input = ToOperand(instr->InputAt(0));
1742
- __ CheckSmiToIndicator(result, input);
1743
- }
1744
- // result is zero if input is a smi, and one otherwise.
1745
- ASSERT(Heap::kFalseValueRootIndex == Heap::kTrueValueRootIndex + 1);
1746
- __ LoadRootIndexed(result, result, Heap::kTrueValueRootIndex);
1747
- }
1748
-
1749
-
1750
1655
  void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
1751
1656
  int true_block = chunk_->LookupDestination(instr->true_block_id());
1752
1657
  int false_block = chunk_->LookupDestination(instr->false_block_id());
@@ -1763,25 +1668,6 @@ void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
1763
1668
  }
1764
1669
 
1765
1670
 
1766
- void LCodeGen::DoIsUndetectable(LIsUndetectable* instr) {
1767
- Register input = ToRegister(instr->InputAt(0));
1768
- Register result = ToRegister(instr->result());
1769
-
1770
- ASSERT(instr->hydrogen()->value()->representation().IsTagged());
1771
- Label false_label, done;
1772
- __ JumpIfSmi(input, &false_label);
1773
- __ movq(result, FieldOperand(input, HeapObject::kMapOffset));
1774
- __ testb(FieldOperand(result, Map::kBitFieldOffset),
1775
- Immediate(1 << Map::kIsUndetectable));
1776
- __ j(zero, &false_label);
1777
- __ LoadRoot(result, Heap::kTrueValueRootIndex);
1778
- __ jmp(&done);
1779
- __ bind(&false_label);
1780
- __ LoadRoot(result, Heap::kFalseValueRootIndex);
1781
- __ bind(&done);
1782
- }
1783
-
1784
-
1785
1671
  void LCodeGen::DoIsUndetectableAndBranch(LIsUndetectableAndBranch* instr) {
1786
1672
  Register input = ToRegister(instr->InputAt(0));
1787
1673
  Register temp = ToRegister(instr->TempAt(0));
@@ -1797,7 +1683,7 @@ void LCodeGen::DoIsUndetectableAndBranch(LIsUndetectableAndBranch* instr) {
1797
1683
  }
1798
1684
 
1799
1685
 
1800
- static InstanceType TestType(HHasInstanceType* instr) {
1686
+ static InstanceType TestType(HHasInstanceTypeAndBranch* instr) {
1801
1687
  InstanceType from = instr->from();
1802
1688
  InstanceType to = instr->to();
1803
1689
  if (from == FIRST_TYPE) return to;
@@ -1806,7 +1692,7 @@ static InstanceType TestType(HHasInstanceType* instr) {
1806
1692
  }
1807
1693
 
1808
1694
 
1809
- static Condition BranchCondition(HHasInstanceType* instr) {
1695
+ static Condition BranchCondition(HHasInstanceTypeAndBranch* instr) {
1810
1696
  InstanceType from = instr->from();
1811
1697
  InstanceType to = instr->to();
1812
1698
  if (from == to) return equal;
@@ -1817,25 +1703,6 @@ static Condition BranchCondition(HHasInstanceType* instr) {
1817
1703
  }
1818
1704
 
1819
1705
 
1820
- void LCodeGen::DoHasInstanceType(LHasInstanceType* instr) {
1821
- Register input = ToRegister(instr->InputAt(0));
1822
- Register result = ToRegister(instr->result());
1823
-
1824
- ASSERT(instr->hydrogen()->value()->representation().IsTagged());
1825
- __ testl(input, Immediate(kSmiTagMask));
1826
- Label done, is_false;
1827
- __ j(zero, &is_false);
1828
- __ CmpObjectType(input, TestType(instr->hydrogen()), result);
1829
- __ j(NegateCondition(BranchCondition(instr->hydrogen())),
1830
- &is_false, Label::kNear);
1831
- __ LoadRoot(result, Heap::kTrueValueRootIndex);
1832
- __ jmp(&done, Label::kNear);
1833
- __ bind(&is_false);
1834
- __ LoadRoot(result, Heap::kFalseValueRootIndex);
1835
- __ bind(&done);
1836
- }
1837
-
1838
-
1839
1706
  void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
1840
1707
  Register input = ToRegister(instr->InputAt(0));
1841
1708
 
@@ -1865,21 +1732,6 @@ void LCodeGen::DoGetCachedArrayIndex(LGetCachedArrayIndex* instr) {
1865
1732
  }
1866
1733
 
1867
1734
 
1868
- void LCodeGen::DoHasCachedArrayIndex(LHasCachedArrayIndex* instr) {
1869
- Register input = ToRegister(instr->InputAt(0));
1870
- Register result = ToRegister(instr->result());
1871
-
1872
- ASSERT(instr->hydrogen()->value()->representation().IsTagged());
1873
- __ LoadRoot(result, Heap::kTrueValueRootIndex);
1874
- __ testl(FieldOperand(input, String::kHashFieldOffset),
1875
- Immediate(String::kContainsCachedArrayIndexMask));
1876
- Label done;
1877
- __ j(zero, &done, Label::kNear);
1878
- __ LoadRoot(result, Heap::kFalseValueRootIndex);
1879
- __ bind(&done);
1880
- }
1881
-
1882
-
1883
1735
  void LCodeGen::DoHasCachedArrayIndexAndBranch(
1884
1736
  LHasCachedArrayIndexAndBranch* instr) {
1885
1737
  Register input = ToRegister(instr->InputAt(0));
@@ -1901,26 +1753,27 @@ void LCodeGen::EmitClassOfTest(Label* is_true,
1901
1753
  Register input,
1902
1754
  Register temp) {
1903
1755
  __ JumpIfSmi(input, is_false);
1904
- __ CmpObjectType(input, FIRST_JS_OBJECT_TYPE, temp);
1756
+ __ CmpObjectType(input, FIRST_SPEC_OBJECT_TYPE, temp);
1905
1757
  __ j(below, is_false);
1906
1758
 
1907
1759
  // Map is now in temp.
1908
1760
  // Functions have class 'Function'.
1909
- __ CmpInstanceType(temp, JS_FUNCTION_TYPE);
1761
+ __ CmpInstanceType(temp, FIRST_CALLABLE_SPEC_OBJECT_TYPE);
1910
1762
  if (class_name->IsEqualTo(CStrVector("Function"))) {
1911
- __ j(equal, is_true);
1763
+ __ j(above_equal, is_true);
1912
1764
  } else {
1913
- __ j(equal, is_false);
1765
+ __ j(above_equal, is_false);
1914
1766
  }
1915
1767
 
1916
1768
  // Check if the constructor in the map is a function.
1917
1769
  __ movq(temp, FieldOperand(temp, Map::kConstructorOffset));
1918
1770
 
1919
- // As long as JS_FUNCTION_TYPE is the last instance type and it is
1920
- // right after LAST_JS_OBJECT_TYPE, we can avoid checking for
1921
- // LAST_JS_OBJECT_TYPE.
1922
- ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
1923
- ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
1771
+ // As long as LAST_CALLABLE_SPEC_OBJECT_TYPE is the last type and
1772
+ // FIRST_CALLABLE_SPEC_OBJECT_TYPE comes right after
1773
+ // LAST_NONCALLABLE_SPEC_OBJECT_TYPE, we can avoid checking for the latter.
1774
+ STATIC_ASSERT(LAST_TYPE == LAST_CALLABLE_SPEC_OBJECT_TYPE);
1775
+ STATIC_ASSERT(FIRST_CALLABLE_SPEC_OBJECT_TYPE ==
1776
+ LAST_NONCALLABLE_SPEC_OBJECT_TYPE + 1);
1924
1777
 
1925
1778
  // Objects with a non-function constructor have class 'Object'.
1926
1779
  __ CmpObjectType(temp, JS_FUNCTION_TYPE, kScratchRegister);
@@ -1947,29 +1800,6 @@ void LCodeGen::EmitClassOfTest(Label* is_true,
1947
1800
  }
1948
1801
 
1949
1802
 
1950
- void LCodeGen::DoClassOfTest(LClassOfTest* instr) {
1951
- Register input = ToRegister(instr->InputAt(0));
1952
- Register result = ToRegister(instr->result());
1953
- ASSERT(input.is(result));
1954
- Register temp = ToRegister(instr->TempAt(0));
1955
- Handle<String> class_name = instr->hydrogen()->class_name();
1956
- Label done;
1957
- Label is_true, is_false;
1958
-
1959
- EmitClassOfTest(&is_true, &is_false, class_name, input, temp);
1960
-
1961
- __ j(not_equal, &is_false);
1962
-
1963
- __ bind(&is_true);
1964
- __ LoadRoot(result, Heap::kTrueValueRootIndex);
1965
- __ jmp(&done, Label::kNear);
1966
-
1967
- __ bind(&is_false);
1968
- __ LoadRoot(result, Heap::kFalseValueRootIndex);
1969
- __ bind(&done);
1970
- }
1971
-
1972
-
1973
1803
  void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
1974
1804
  Register input = ToRegister(instr->InputAt(0));
1975
1805
  Register temp = ToRegister(instr->TempAt(0));
@@ -2013,19 +1843,6 @@ void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
2013
1843
  }
2014
1844
 
2015
1845
 
2016
- void LCodeGen::DoInstanceOfAndBranch(LInstanceOfAndBranch* instr) {
2017
- int true_block = chunk_->LookupDestination(instr->true_block_id());
2018
- int false_block = chunk_->LookupDestination(instr->false_block_id());
2019
-
2020
- InstanceofStub stub(InstanceofStub::kNoFlags);
2021
- __ push(ToRegister(instr->InputAt(0)));
2022
- __ push(ToRegister(instr->InputAt(1)));
2023
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2024
- __ testq(rax, rax);
2025
- EmitBranch(true_block, false_block, zero);
2026
- }
2027
-
2028
-
2029
1846
  void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
2030
1847
  class DeferredInstanceOfKnownGlobal: public LDeferredCode {
2031
1848
  public:
@@ -2103,7 +1920,6 @@ void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
2103
1920
  __ push(ToRegister(instr->InputAt(0)));
2104
1921
  __ Push(instr->function());
2105
1922
 
2106
- Register temp = ToRegister(instr->TempAt(0));
2107
1923
  static const int kAdditionalDelta = 10;
2108
1924
  int delta =
2109
1925
  masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta;
@@ -2157,25 +1973,6 @@ void LCodeGen::DoCmpT(LCmpT* instr) {
2157
1973
  }
2158
1974
 
2159
1975
 
2160
- void LCodeGen::DoCmpTAndBranch(LCmpTAndBranch* instr) {
2161
- Token::Value op = instr->op();
2162
- int true_block = chunk_->LookupDestination(instr->true_block_id());
2163
- int false_block = chunk_->LookupDestination(instr->false_block_id());
2164
-
2165
- Handle<Code> ic = CompareIC::GetUninitialized(op);
2166
- CallCode(ic, RelocInfo::CODE_TARGET, instr);
2167
-
2168
- // The compare stub expects compare condition and the input operands
2169
- // reversed for GT and LTE.
2170
- Condition condition = TokenToCondition(op, false);
2171
- if (op == Token::GT || op == Token::LTE) {
2172
- condition = ReverseCondition(condition);
2173
- }
2174
- __ testq(rax, rax);
2175
- EmitBranch(true_block, false_block, condition);
2176
- }
2177
-
2178
-
2179
1976
  void LCodeGen::DoReturn(LReturn* instr) {
2180
1977
  if (FLAG_trace) {
2181
1978
  // Preserve the return value on the stack and rely on the runtime
@@ -2409,7 +2206,7 @@ void LCodeGen::DoLoadElements(LLoadElements* instr) {
2409
2206
  Register input = ToRegister(instr->InputAt(0));
2410
2207
  __ movq(result, FieldOperand(input, JSObject::kElementsOffset));
2411
2208
  if (FLAG_debug_code) {
2412
- Label done;
2209
+ Label done, ok, fail;
2413
2210
  __ CompareRoot(FieldOperand(result, HeapObject::kMapOffset),
2414
2211
  Heap::kFixedArrayMapRootIndex);
2415
2212
  __ j(equal, &done, Label::kNear);
@@ -2419,11 +2216,19 @@ void LCodeGen::DoLoadElements(LLoadElements* instr) {
2419
2216
  Register temp((result.is(rax)) ? rbx : rax);
2420
2217
  __ push(temp);
2421
2218
  __ movq(temp, FieldOperand(result, HeapObject::kMapOffset));
2422
- __ movzxbq(temp, FieldOperand(temp, Map::kInstanceTypeOffset));
2423
- __ subq(temp, Immediate(FIRST_EXTERNAL_ARRAY_TYPE));
2424
- __ cmpq(temp, Immediate(kExternalArrayTypeCount));
2219
+ __ movzxbq(temp, FieldOperand(temp, Map::kBitField2Offset));
2220
+ __ and_(temp, Immediate(Map::kElementsKindMask));
2221
+ __ shr(temp, Immediate(Map::kElementsKindShift));
2222
+ __ cmpl(temp, Immediate(JSObject::FAST_ELEMENTS));
2223
+ __ j(equal, &ok, Label::kNear);
2224
+ __ cmpl(temp, Immediate(JSObject::FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND));
2225
+ __ j(less, &fail, Label::kNear);
2226
+ __ cmpl(temp, Immediate(JSObject::LAST_EXTERNAL_ARRAY_ELEMENTS_KIND));
2227
+ __ j(less_equal, &ok, Label::kNear);
2228
+ __ bind(&fail);
2229
+ __ Abort("Check for fast or external elements failed");
2230
+ __ bind(&ok);
2425
2231
  __ pop(temp);
2426
- __ Check(below, "Check for fast elements failed.");
2427
2232
  __ bind(&done);
2428
2233
  }
2429
2234
  }
@@ -2457,16 +2262,13 @@ void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
2457
2262
 
2458
2263
 
2459
2264
  void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
2460
- Register elements = ToRegister(instr->elements());
2461
- Register key = ToRegister(instr->key());
2462
2265
  Register result = ToRegister(instr->result());
2463
- ASSERT(result.is(elements));
2464
2266
 
2465
2267
  // Load the result.
2466
- __ movq(result, FieldOperand(elements,
2467
- key,
2468
- times_pointer_size,
2469
- FixedArray::kHeaderSize));
2268
+ __ movq(result,
2269
+ BuildFastArrayOperand(instr->elements(), instr->key(),
2270
+ JSObject::FAST_ELEMENTS,
2271
+ FixedArray::kHeaderSize - kHeapObjectTag));
2470
2272
 
2471
2273
  // Check for the hole value.
2472
2274
  if (instr->hydrogen()->RequiresHoleCheck()) {
@@ -2476,55 +2278,82 @@ void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
2476
2278
  }
2477
2279
 
2478
2280
 
2479
- Operand LCodeGen::BuildExternalArrayOperand(LOperand* external_pointer,
2480
- LOperand* key,
2481
- ExternalArrayType array_type) {
2482
- Register external_pointer_reg = ToRegister(external_pointer);
2483
- int shift_size = ExternalArrayTypeToShiftSize(array_type);
2281
+ void LCodeGen::DoLoadKeyedFastDoubleElement(
2282
+ LLoadKeyedFastDoubleElement* instr) {
2283
+ XMMRegister result(ToDoubleRegister(instr->result()));
2284
+
2285
+ if (instr->hydrogen()->RequiresHoleCheck()) {
2286
+ int offset = FixedDoubleArray::kHeaderSize - kHeapObjectTag +
2287
+ sizeof(kHoleNanLower32);
2288
+ Operand hole_check_operand = BuildFastArrayOperand(
2289
+ instr->elements(),
2290
+ instr->key(),
2291
+ JSObject::FAST_DOUBLE_ELEMENTS,
2292
+ offset);
2293
+ __ cmpl(hole_check_operand, Immediate(kHoleNanUpper32));
2294
+ DeoptimizeIf(equal, instr->environment());
2295
+ }
2296
+
2297
+ Operand double_load_operand = BuildFastArrayOperand(
2298
+ instr->elements(), instr->key(), JSObject::FAST_DOUBLE_ELEMENTS,
2299
+ FixedDoubleArray::kHeaderSize - kHeapObjectTag);
2300
+ __ movsd(result, double_load_operand);
2301
+ }
2302
+
2303
+
2304
+ Operand LCodeGen::BuildFastArrayOperand(
2305
+ LOperand* elements_pointer,
2306
+ LOperand* key,
2307
+ JSObject::ElementsKind elements_kind,
2308
+ uint32_t offset) {
2309
+ Register elements_pointer_reg = ToRegister(elements_pointer);
2310
+ int shift_size = ElementsKindToShiftSize(elements_kind);
2484
2311
  if (key->IsConstantOperand()) {
2485
2312
  int constant_value = ToInteger32(LConstantOperand::cast(key));
2486
2313
  if (constant_value & 0xF0000000) {
2487
2314
  Abort("array index constant value too big");
2488
2315
  }
2489
- return Operand(external_pointer_reg, constant_value * (1 << shift_size));
2316
+ return Operand(elements_pointer_reg,
2317
+ constant_value * (1 << shift_size) + offset);
2490
2318
  } else {
2491
2319
  ScaleFactor scale_factor = static_cast<ScaleFactor>(shift_size);
2492
- return Operand(external_pointer_reg, ToRegister(key), scale_factor, 0);
2320
+ return Operand(elements_pointer_reg, ToRegister(key),
2321
+ scale_factor, offset);
2493
2322
  }
2494
2323
  }
2495
2324
 
2496
2325
 
2497
2326
  void LCodeGen::DoLoadKeyedSpecializedArrayElement(
2498
2327
  LLoadKeyedSpecializedArrayElement* instr) {
2499
- ExternalArrayType array_type = instr->array_type();
2500
- Operand operand(BuildExternalArrayOperand(instr->external_pointer(),
2501
- instr->key(), array_type));
2502
- if (array_type == kExternalFloatArray) {
2328
+ JSObject::ElementsKind elements_kind = instr->elements_kind();
2329
+ Operand operand(BuildFastArrayOperand(instr->external_pointer(),
2330
+ instr->key(), elements_kind, 0));
2331
+ if (elements_kind == JSObject::EXTERNAL_FLOAT_ELEMENTS) {
2503
2332
  XMMRegister result(ToDoubleRegister(instr->result()));
2504
2333
  __ movss(result, operand);
2505
2334
  __ cvtss2sd(result, result);
2506
- } else if (array_type == kExternalDoubleArray) {
2335
+ } else if (elements_kind == JSObject::EXTERNAL_DOUBLE_ELEMENTS) {
2507
2336
  __ movsd(ToDoubleRegister(instr->result()), operand);
2508
2337
  } else {
2509
2338
  Register result(ToRegister(instr->result()));
2510
- switch (array_type) {
2511
- case kExternalByteArray:
2339
+ switch (elements_kind) {
2340
+ case JSObject::EXTERNAL_BYTE_ELEMENTS:
2512
2341
  __ movsxbq(result, operand);
2513
2342
  break;
2514
- case kExternalUnsignedByteArray:
2515
- case kExternalPixelArray:
2343
+ case JSObject::EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
2344
+ case JSObject::EXTERNAL_PIXEL_ELEMENTS:
2516
2345
  __ movzxbq(result, operand);
2517
2346
  break;
2518
- case kExternalShortArray:
2347
+ case JSObject::EXTERNAL_SHORT_ELEMENTS:
2519
2348
  __ movsxwq(result, operand);
2520
2349
  break;
2521
- case kExternalUnsignedShortArray:
2350
+ case JSObject::EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
2522
2351
  __ movzxwq(result, operand);
2523
2352
  break;
2524
- case kExternalIntArray:
2353
+ case JSObject::EXTERNAL_INT_ELEMENTS:
2525
2354
  __ movsxlq(result, operand);
2526
2355
  break;
2527
- case kExternalUnsignedIntArray:
2356
+ case JSObject::EXTERNAL_UNSIGNED_INT_ELEMENTS:
2528
2357
  __ movl(result, operand);
2529
2358
  __ testl(result, result);
2530
2359
  // TODO(danno): we could be more clever here, perhaps having a special
@@ -2532,8 +2361,12 @@ void LCodeGen::DoLoadKeyedSpecializedArrayElement(
2532
2361
  // happens, and generate code that returns a double rather than int.
2533
2362
  DeoptimizeIf(negative, instr->environment());
2534
2363
  break;
2535
- case kExternalFloatArray:
2536
- case kExternalDoubleArray:
2364
+ case JSObject::EXTERNAL_FLOAT_ELEMENTS:
2365
+ case JSObject::EXTERNAL_DOUBLE_ELEMENTS:
2366
+ case JSObject::FAST_ELEMENTS:
2367
+ case JSObject::FAST_DOUBLE_ELEMENTS:
2368
+ case JSObject::DICTIONARY_ELEMENTS:
2369
+ case JSObject::NON_STRICT_ARGUMENTS_ELEMENTS:
2537
2370
  UNREACHABLE();
2538
2371
  break;
2539
2372
  }
@@ -2608,9 +2441,27 @@ void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
2608
2441
  ASSERT(function.is(rdi)); // Required by InvokeFunction.
2609
2442
  ASSERT(ToRegister(instr->result()).is(rax));
2610
2443
 
2611
- // If the receiver is null or undefined, we have to pass the global object
2612
- // as a receiver.
2444
+ // If the receiver is null or undefined, we have to pass the global
2445
+ // object as a receiver to normal functions. Values have to be
2446
+ // passed unchanged to builtins and strict-mode functions.
2613
2447
  Label global_object, receiver_ok;
2448
+
2449
+ // Do not transform the receiver to object for strict mode
2450
+ // functions.
2451
+ __ movq(kScratchRegister,
2452
+ FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
2453
+ __ testb(FieldOperand(kScratchRegister,
2454
+ SharedFunctionInfo::kStrictModeByteOffset),
2455
+ Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
2456
+ __ j(not_equal, &receiver_ok, Label::kNear);
2457
+
2458
+ // Do not transform the receiver to object for builtins.
2459
+ __ testb(FieldOperand(kScratchRegister,
2460
+ SharedFunctionInfo::kNativeByteOffset),
2461
+ Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte));
2462
+ __ j(not_equal, &receiver_ok, Label::kNear);
2463
+
2464
+ // Normal function. Replace undefined or null with global receiver.
2614
2465
  __ CompareRoot(receiver, Heap::kNullValueRootIndex);
2615
2466
  __ j(equal, &global_object, Label::kNear);
2616
2467
  __ CompareRoot(receiver, Heap::kUndefinedValueRootIndex);
@@ -2619,7 +2470,7 @@ void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
2619
2470
  // The receiver should be a JS object.
2620
2471
  Condition is_smi = __ CheckSmi(receiver);
2621
2472
  DeoptimizeIf(is_smi, instr->environment());
2622
- __ CmpObjectType(receiver, FIRST_JS_OBJECT_TYPE, kScratchRegister);
2473
+ __ CmpObjectType(receiver, FIRST_SPEC_OBJECT_TYPE, kScratchRegister);
2623
2474
  DeoptimizeIf(below, instr->environment());
2624
2475
  __ jmp(&receiver_ok, Label::kNear);
2625
2476
 
@@ -2627,8 +2478,9 @@ void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
2627
2478
  // TODO(kmillikin): We have a hydrogen value for the global object. See
2628
2479
  // if it's better to use it than to explicitly fetch it from the context
2629
2480
  // here.
2630
- __ movq(receiver, Operand(rbp, StandardFrameConstants::kContextOffset));
2631
- __ movq(receiver, ContextOperand(receiver, Context::GLOBAL_INDEX));
2481
+ __ movq(receiver, ContextOperand(rsi, Context::GLOBAL_INDEX));
2482
+ __ movq(receiver,
2483
+ FieldOperand(receiver, JSGlobalObject::kGlobalReceiverOffset));
2632
2484
  __ bind(&receiver_ok);
2633
2485
 
2634
2486
  // Copy the arguments to this function possibly from the
@@ -2662,7 +2514,8 @@ void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
2662
2514
  pointers,
2663
2515
  env->deoptimization_index());
2664
2516
  v8::internal::ParameterCount actual(rax);
2665
- __ InvokeFunction(function, actual, CALL_FUNCTION, safepoint_generator);
2517
+ __ InvokeFunction(function, actual, CALL_FUNCTION,
2518
+ safepoint_generator, CALL_AS_METHOD);
2666
2519
  __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2667
2520
  }
2668
2521
 
@@ -2673,6 +2526,12 @@ void LCodeGen::DoPushArgument(LPushArgument* instr) {
2673
2526
  }
2674
2527
 
2675
2528
 
2529
+ void LCodeGen::DoThisFunction(LThisFunction* instr) {
2530
+ Register result = ToRegister(instr->result());
2531
+ __ movq(result, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
2532
+ }
2533
+
2534
+
2676
2535
  void LCodeGen::DoContext(LContext* instr) {
2677
2536
  Register result = ToRegister(instr->result());
2678
2537
  __ movq(result, rsi);
@@ -2683,8 +2542,7 @@ void LCodeGen::DoOuterContext(LOuterContext* instr) {
2683
2542
  Register context = ToRegister(instr->context());
2684
2543
  Register result = ToRegister(instr->result());
2685
2544
  __ movq(result,
2686
- Operand(context, Context::SlotOffset(Context::CLOSURE_INDEX)));
2687
- __ movq(result, FieldOperand(result, JSFunction::kContextOffset));
2545
+ Operand(context, Context::SlotOffset(Context::PREVIOUS_INDEX)));
2688
2546
  }
2689
2547
 
2690
2548
 
@@ -2855,6 +2713,7 @@ void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
2855
2713
  XMMRegister xmm_scratch = xmm0;
2856
2714
  Register output_reg = ToRegister(instr->result());
2857
2715
  XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
2716
+ Label done;
2858
2717
 
2859
2718
  if (CpuFeatures::IsSupported(SSE4_1)) {
2860
2719
  CpuFeatures::Scope scope(SSE4_1);
@@ -2869,13 +2728,20 @@ void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
2869
2728
  __ cmpl(output_reg, Immediate(0x80000000));
2870
2729
  DeoptimizeIf(equal, instr->environment());
2871
2730
  } else {
2731
+ // Deoptimize on negative inputs.
2872
2732
  __ xorps(xmm_scratch, xmm_scratch); // Zero the register.
2873
2733
  __ ucomisd(input_reg, xmm_scratch);
2874
-
2734
+ DeoptimizeIf(below, instr->environment());
2875
2735
  if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
2876
- DeoptimizeIf(below_equal, instr->environment());
2877
- } else {
2878
- DeoptimizeIf(below, instr->environment());
2736
+ // Check for negative zero.
2737
+ Label positive_sign;
2738
+ __ j(above, &positive_sign, Label::kNear);
2739
+ __ movmskpd(output_reg, input_reg);
2740
+ __ testq(output_reg, Immediate(1));
2741
+ DeoptimizeIf(not_zero, instr->environment());
2742
+ __ Set(output_reg, 0);
2743
+ __ jmp(&done);
2744
+ __ bind(&positive_sign);
2879
2745
  }
2880
2746
 
2881
2747
  // Use truncating instruction (OK because input is positive).
@@ -2885,6 +2751,7 @@ void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
2885
2751
  __ cmpl(output_reg, Immediate(0x80000000));
2886
2752
  DeoptimizeIf(equal, instr->environment());
2887
2753
  }
2754
+ __ bind(&done);
2888
2755
  }
2889
2756
 
2890
2757
 
@@ -3075,7 +2942,7 @@ void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) {
3075
2942
  RegisterEnvironmentForDeoptimization(env);
3076
2943
  SafepointGenerator generator(this, pointers, env->deoptimization_index());
3077
2944
  ParameterCount count(instr->arity());
3078
- __ InvokeFunction(rdi, count, CALL_FUNCTION, generator);
2945
+ __ InvokeFunction(rdi, count, CALL_FUNCTION, generator, CALL_AS_METHOD);
3079
2946
  __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
3080
2947
  }
3081
2948
 
@@ -3194,33 +3061,37 @@ void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
3194
3061
 
3195
3062
  void LCodeGen::DoStoreKeyedSpecializedArrayElement(
3196
3063
  LStoreKeyedSpecializedArrayElement* instr) {
3197
- ExternalArrayType array_type = instr->array_type();
3198
- Operand operand(BuildExternalArrayOperand(instr->external_pointer(),
3199
- instr->key(), array_type));
3200
- if (array_type == kExternalFloatArray) {
3064
+ JSObject::ElementsKind elements_kind = instr->elements_kind();
3065
+ Operand operand(BuildFastArrayOperand(instr->external_pointer(),
3066
+ instr->key(), elements_kind, 0));
3067
+ if (elements_kind == JSObject::EXTERNAL_FLOAT_ELEMENTS) {
3201
3068
  XMMRegister value(ToDoubleRegister(instr->value()));
3202
3069
  __ cvtsd2ss(value, value);
3203
3070
  __ movss(operand, value);
3204
- } else if (array_type == kExternalDoubleArray) {
3071
+ } else if (elements_kind == JSObject::EXTERNAL_DOUBLE_ELEMENTS) {
3205
3072
  __ movsd(operand, ToDoubleRegister(instr->value()));
3206
3073
  } else {
3207
3074
  Register value(ToRegister(instr->value()));
3208
- switch (array_type) {
3209
- case kExternalPixelArray:
3210
- case kExternalByteArray:
3211
- case kExternalUnsignedByteArray:
3075
+ switch (elements_kind) {
3076
+ case JSObject::EXTERNAL_PIXEL_ELEMENTS:
3077
+ case JSObject::EXTERNAL_BYTE_ELEMENTS:
3078
+ case JSObject::EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
3212
3079
  __ movb(operand, value);
3213
3080
  break;
3214
- case kExternalShortArray:
3215
- case kExternalUnsignedShortArray:
3081
+ case JSObject::EXTERNAL_SHORT_ELEMENTS:
3082
+ case JSObject::EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
3216
3083
  __ movw(operand, value);
3217
3084
  break;
3218
- case kExternalIntArray:
3219
- case kExternalUnsignedIntArray:
3085
+ case JSObject::EXTERNAL_INT_ELEMENTS:
3086
+ case JSObject::EXTERNAL_UNSIGNED_INT_ELEMENTS:
3220
3087
  __ movl(operand, value);
3221
3088
  break;
3222
- case kExternalFloatArray:
3223
- case kExternalDoubleArray:
3089
+ case JSObject::EXTERNAL_FLOAT_ELEMENTS:
3090
+ case JSObject::EXTERNAL_DOUBLE_ELEMENTS:
3091
+ case JSObject::FAST_ELEMENTS:
3092
+ case JSObject::FAST_DOUBLE_ELEMENTS:
3093
+ case JSObject::DICTIONARY_ELEMENTS:
3094
+ case JSObject::NON_STRICT_ARGUMENTS_ELEMENTS:
3224
3095
  UNREACHABLE();
3225
3096
  break;
3226
3097
  }
@@ -3229,12 +3100,22 @@ void LCodeGen::DoStoreKeyedSpecializedArrayElement(
3229
3100
 
3230
3101
 
3231
3102
  void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
3232
- if (instr->length()->IsRegister()) {
3233
- __ cmpq(ToRegister(instr->index()), ToRegister(instr->length()));
3103
+ if (instr->index()->IsConstantOperand()) {
3104
+ if (instr->length()->IsRegister()) {
3105
+ __ cmpq(ToRegister(instr->length()),
3106
+ Immediate(ToInteger32(LConstantOperand::cast(instr->index()))));
3107
+ } else {
3108
+ __ cmpq(ToOperand(instr->length()),
3109
+ Immediate(ToInteger32(LConstantOperand::cast(instr->index()))));
3110
+ }
3234
3111
  } else {
3235
- __ cmpq(ToRegister(instr->index()), ToOperand(instr->length()));
3112
+ if (instr->length()->IsRegister()) {
3113
+ __ cmpq(ToRegister(instr->length()), ToRegister(instr->index()));
3114
+ } else {
3115
+ __ cmpq(ToOperand(instr->length()), ToRegister(instr->index()));
3116
+ }
3236
3117
  }
3237
- DeoptimizeIf(above_equal, instr->environment());
3118
+ DeoptimizeIf(below_equal, instr->environment());
3238
3119
  }
3239
3120
 
3240
3121
 
@@ -3269,6 +3150,25 @@ void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
3269
3150
  }
3270
3151
 
3271
3152
 
3153
+ void LCodeGen::DoStoreKeyedFastDoubleElement(
3154
+ LStoreKeyedFastDoubleElement* instr) {
3155
+ XMMRegister value = ToDoubleRegister(instr->value());
3156
+ Label have_value;
3157
+
3158
+ __ ucomisd(value, value);
3159
+ __ j(parity_odd, &have_value); // NaN.
3160
+
3161
+ __ Set(kScratchRegister, BitCast<uint64_t>(
3162
+ FixedDoubleArray::canonical_not_the_hole_nan_as_double()));
3163
+ __ movq(value, kScratchRegister);
3164
+
3165
+ __ bind(&have_value);
3166
+ Operand double_store_operand = BuildFastArrayOperand(
3167
+ instr->elements(), instr->key(), JSObject::FAST_DOUBLE_ELEMENTS,
3168
+ FixedDoubleArray::kHeaderSize - kHeapObjectTag);
3169
+ __ movsd(double_store_operand, value);
3170
+ }
3171
+
3272
3172
  void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
3273
3173
  ASSERT(ToRegister(instr->object()).is(rdx));
3274
3174
  ASSERT(ToRegister(instr->key()).is(rcx));
@@ -3300,95 +3200,78 @@ void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
3300
3200
  };
3301
3201
 
3302
3202
  Register string = ToRegister(instr->string());
3303
- Register index = no_reg;
3304
- int const_index = -1;
3305
- if (instr->index()->IsConstantOperand()) {
3306
- const_index = ToInteger32(LConstantOperand::cast(instr->index()));
3307
- STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue);
3308
- if (!Smi::IsValid(const_index)) {
3309
- // Guaranteed to be out of bounds because of the assert above.
3310
- // So the bounds check that must dominate this instruction must
3311
- // have deoptimized already.
3312
- if (FLAG_debug_code) {
3313
- __ Abort("StringCharCodeAt: out of bounds index.");
3314
- }
3315
- // No code needs to be generated.
3316
- return;
3317
- }
3318
- } else {
3319
- index = ToRegister(instr->index());
3320
- }
3203
+ Register index = ToRegister(instr->index());
3321
3204
  Register result = ToRegister(instr->result());
3322
3205
 
3323
3206
  DeferredStringCharCodeAt* deferred =
3324
3207
  new DeferredStringCharCodeAt(this, instr);
3325
3208
 
3326
- Label flat_string, ascii_string, done;
3327
-
3328
3209
  // Fetch the instance type of the receiver into result register.
3329
3210
  __ movq(result, FieldOperand(string, HeapObject::kMapOffset));
3330
3211
  __ movzxbl(result, FieldOperand(result, Map::kInstanceTypeOffset));
3331
3212
 
3332
- // We need special handling for non-sequential strings.
3333
- STATIC_ASSERT(kSeqStringTag == 0);
3334
- __ testb(result, Immediate(kStringRepresentationMask));
3335
- __ j(zero, &flat_string, Label::kNear);
3213
+ // We need special handling for indirect strings.
3214
+ Label check_sequential;
3215
+ __ testb(result, Immediate(kIsIndirectStringMask));
3216
+ __ j(zero, &check_sequential, Label::kNear);
3336
3217
 
3337
- // Handle cons strings and go to deferred code for the rest.
3338
- __ testb(result, Immediate(kIsConsStringMask));
3339
- __ j(zero, deferred->entry());
3218
+ // Dispatch on the indirect string shape: slice or cons.
3219
+ Label cons_string;
3220
+ __ testb(result, Immediate(kSlicedNotConsMask));
3221
+ __ j(zero, &cons_string, Label::kNear);
3340
3222
 
3341
- // ConsString.
3223
+ // Handle slices.
3224
+ Label indirect_string_loaded;
3225
+ __ SmiToInteger32(result, FieldOperand(string, SlicedString::kOffsetOffset));
3226
+ __ addq(index, result);
3227
+ __ movq(string, FieldOperand(string, SlicedString::kParentOffset));
3228
+ __ jmp(&indirect_string_loaded, Label::kNear);
3229
+
3230
+ // Handle conses.
3342
3231
  // Check whether the right hand side is the empty string (i.e. if
3343
3232
  // this is really a flat string in a cons string). If that is not
3344
3233
  // the case we would rather go to the runtime system now to flatten
3345
3234
  // the string.
3235
+ __ bind(&cons_string);
3346
3236
  __ CompareRoot(FieldOperand(string, ConsString::kSecondOffset),
3347
3237
  Heap::kEmptyStringRootIndex);
3348
3238
  __ j(not_equal, deferred->entry());
3349
- // Get the first of the two strings and load its instance type.
3350
3239
  __ movq(string, FieldOperand(string, ConsString::kFirstOffset));
3240
+
3241
+ __ bind(&indirect_string_loaded);
3351
3242
  __ movq(result, FieldOperand(string, HeapObject::kMapOffset));
3352
3243
  __ movzxbl(result, FieldOperand(result, Map::kInstanceTypeOffset));
3353
- // If the first cons component is also non-flat, then go to runtime.
3244
+
3245
+ // Check whether the string is sequential. The only non-sequential
3246
+ // shapes we support have just been unwrapped above.
3247
+ __ bind(&check_sequential);
3354
3248
  STATIC_ASSERT(kSeqStringTag == 0);
3355
3249
  __ testb(result, Immediate(kStringRepresentationMask));
3356
3250
  __ j(not_zero, deferred->entry());
3357
3251
 
3358
- // Check for ASCII or two-byte string.
3359
- __ bind(&flat_string);
3252
+ // Dispatch on the encoding: ASCII or two-byte.
3253
+ Label ascii_string;
3360
3254
  STATIC_ASSERT(kAsciiStringTag != 0);
3361
3255
  __ testb(result, Immediate(kStringEncodingMask));
3362
3256
  __ j(not_zero, &ascii_string, Label::kNear);
3363
3257
 
3364
3258
  // Two-byte string.
3365
3259
  // Load the two-byte character code into the result register.
3260
+ Label done;
3366
3261
  STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3367
- if (instr->index()->IsConstantOperand()) {
3368
- __ movzxwl(result,
3369
- FieldOperand(string,
3370
- SeqTwoByteString::kHeaderSize +
3371
- (kUC16Size * const_index)));
3372
- } else {
3373
- __ movzxwl(result, FieldOperand(string,
3374
- index,
3375
- times_2,
3376
- SeqTwoByteString::kHeaderSize));
3377
- }
3262
+ __ movzxwl(result, FieldOperand(string,
3263
+ index,
3264
+ times_2,
3265
+ SeqTwoByteString::kHeaderSize));
3378
3266
  __ jmp(&done, Label::kNear);
3379
3267
 
3380
3268
  // ASCII string.
3381
3269
  // Load the byte into the result register.
3382
3270
  __ bind(&ascii_string);
3383
- if (instr->index()->IsConstantOperand()) {
3384
- __ movzxbl(result, FieldOperand(string,
3385
- SeqAsciiString::kHeaderSize + const_index));
3386
- } else {
3387
- __ movzxbl(result, FieldOperand(string,
3388
- index,
3389
- times_1,
3390
- SeqAsciiString::kHeaderSize));
3391
- }
3271
+ __ movzxbl(result, FieldOperand(string,
3272
+ index,
3273
+ times_1,
3274
+ SeqAsciiString::kHeaderSize));
3392
3275
  __ bind(&done);
3393
3276
  __ bind(deferred->exit());
3394
3277
  }
@@ -3564,8 +3447,9 @@ void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
3564
3447
 
3565
3448
  void LCodeGen::EmitNumberUntagD(Register input_reg,
3566
3449
  XMMRegister result_reg,
3450
+ bool deoptimize_on_undefined,
3567
3451
  LEnvironment* env) {
3568
- Label load_smi, heap_number, done;
3452
+ Label load_smi, done;
3569
3453
 
3570
3454
  // Smi check.
3571
3455
  __ JumpIfSmi(input_reg, &load_smi, Label::kNear);
@@ -3573,18 +3457,23 @@ void LCodeGen::EmitNumberUntagD(Register input_reg,
3573
3457
  // Heap number map check.
3574
3458
  __ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset),
3575
3459
  Heap::kHeapNumberMapRootIndex);
3576
- __ j(equal, &heap_number, Label::kNear);
3460
+ if (deoptimize_on_undefined) {
3461
+ DeoptimizeIf(not_equal, env);
3462
+ } else {
3463
+ Label heap_number;
3464
+ __ j(equal, &heap_number, Label::kNear);
3577
3465
 
3578
- __ CompareRoot(input_reg, Heap::kUndefinedValueRootIndex);
3579
- DeoptimizeIf(not_equal, env);
3466
+ __ CompareRoot(input_reg, Heap::kUndefinedValueRootIndex);
3467
+ DeoptimizeIf(not_equal, env);
3580
3468
 
3581
- // Convert undefined to NaN. Compute NaN as 0/0.
3582
- __ xorps(result_reg, result_reg);
3583
- __ divsd(result_reg, result_reg);
3584
- __ jmp(&done, Label::kNear);
3469
+ // Convert undefined to NaN. Compute NaN as 0/0.
3470
+ __ xorps(result_reg, result_reg);
3471
+ __ divsd(result_reg, result_reg);
3472
+ __ jmp(&done, Label::kNear);
3585
3473
 
3474
+ __ bind(&heap_number);
3475
+ }
3586
3476
  // Heap number to XMM conversion.
3587
- __ bind(&heap_number);
3588
3477
  __ movsd(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset));
3589
3478
  __ jmp(&done, Label::kNear);
3590
3479
 
@@ -3675,7 +3564,9 @@ void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
3675
3564
  Register input_reg = ToRegister(input);
3676
3565
  XMMRegister result_reg = ToDoubleRegister(result);
3677
3566
 
3678
- EmitNumberUntagD(input_reg, result_reg, instr->environment());
3567
+ EmitNumberUntagD(input_reg, result_reg,
3568
+ instr->hydrogen()->deoptimize_on_undefined(),
3569
+ instr->environment());
3679
3570
  }
3680
3571
 
3681
3572
 
@@ -4019,29 +3910,6 @@ void LCodeGen::DoTypeof(LTypeof* instr) {
4019
3910
  }
4020
3911
 
4021
3912
 
4022
- void LCodeGen::DoTypeofIs(LTypeofIs* instr) {
4023
- Register input = ToRegister(instr->InputAt(0));
4024
- Register result = ToRegister(instr->result());
4025
- Label true_label;
4026
- Label false_label;
4027
- Label done;
4028
-
4029
- Condition final_branch_condition = EmitTypeofIs(&true_label,
4030
- &false_label,
4031
- input,
4032
- instr->type_literal());
4033
- __ j(final_branch_condition, &true_label);
4034
- __ bind(&false_label);
4035
- __ LoadRoot(result, Heap::kFalseValueRootIndex);
4036
- __ jmp(&done, Label::kNear);
4037
-
4038
- __ bind(&true_label);
4039
- __ LoadRoot(result, Heap::kTrueValueRootIndex);
4040
-
4041
- __ bind(&done);
4042
- }
4043
-
4044
-
4045
3913
  void LCodeGen::EmitPushTaggedOperand(LOperand* operand) {
4046
3914
  ASSERT(!operand->IsDoubleRegister());
4047
3915
  if (operand->IsConstantOperand()) {
@@ -4096,6 +3964,10 @@ Condition LCodeGen::EmitTypeofIs(Label* true_label,
4096
3964
  __ CompareRoot(input, Heap::kFalseValueRootIndex);
4097
3965
  final_branch_condition = equal;
4098
3966
 
3967
+ } else if (FLAG_harmony_typeof && type_name->Equals(heap()->null_symbol())) {
3968
+ __ CompareRoot(input, Heap::kNullValueRootIndex);
3969
+ final_branch_condition = equal;
3970
+
4099
3971
  } else if (type_name->Equals(heap()->undefined_symbol())) {
4100
3972
  __ CompareRoot(input, Heap::kUndefinedValueRootIndex);
4101
3973
  __ j(equal, true_label);
@@ -4108,17 +3980,19 @@ Condition LCodeGen::EmitTypeofIs(Label* true_label,
4108
3980
 
4109
3981
  } else if (type_name->Equals(heap()->function_symbol())) {
4110
3982
  __ JumpIfSmi(input, false_label);
4111
- __ CmpObjectType(input, FIRST_FUNCTION_CLASS_TYPE, input);
3983
+ __ CmpObjectType(input, FIRST_CALLABLE_SPEC_OBJECT_TYPE, input);
4112
3984
  final_branch_condition = above_equal;
4113
3985
 
4114
3986
  } else if (type_name->Equals(heap()->object_symbol())) {
4115
3987
  __ JumpIfSmi(input, false_label);
4116
- __ CompareRoot(input, Heap::kNullValueRootIndex);
4117
- __ j(equal, true_label);
4118
- __ CmpObjectType(input, FIRST_JS_OBJECT_TYPE, input);
3988
+ if (!FLAG_harmony_typeof) {
3989
+ __ CompareRoot(input, Heap::kNullValueRootIndex);
3990
+ __ j(equal, true_label);
3991
+ }
3992
+ __ CmpObjectType(input, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, input);
4119
3993
  __ j(below, false_label);
4120
- __ CmpInstanceType(input, FIRST_FUNCTION_CLASS_TYPE);
4121
- __ j(above_equal, false_label);
3994
+ __ CmpInstanceType(input, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
3995
+ __ j(above, false_label);
4122
3996
  // Check for undetectable objects => false.
4123
3997
  __ testb(FieldOperand(input, Map::kBitFieldOffset),
4124
3998
  Immediate(1 << Map::kIsUndetectable));
@@ -4133,25 +4007,6 @@ Condition LCodeGen::EmitTypeofIs(Label* true_label,
4133
4007
  }
4134
4008
 
4135
4009
 
4136
- void LCodeGen::DoIsConstructCall(LIsConstructCall* instr) {
4137
- Register result = ToRegister(instr->result());
4138
- Label true_label;
4139
- Label done;
4140
-
4141
- EmitIsConstructCall(result);
4142
- __ j(equal, &true_label, Label::kNear);
4143
-
4144
- __ LoadRoot(result, Heap::kFalseValueRootIndex);
4145
- __ jmp(&done, Label::kNear);
4146
-
4147
- __ bind(&true_label);
4148
- __ LoadRoot(result, Heap::kTrueValueRootIndex);
4149
-
4150
-
4151
- __ bind(&done);
4152
- }
4153
-
4154
-
4155
4010
  void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) {
4156
4011
  Register temp = ToRegister(instr->TempAt(0));
4157
4012
  int true_block = chunk_->LookupDestination(instr->true_block_id());
@@ -4232,15 +4087,48 @@ void LCodeGen::DoIn(LIn* instr) {
4232
4087
  }
4233
4088
 
4234
4089
 
4090
+ void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) {
4091
+ {
4092
+ PushSafepointRegistersScope scope(this);
4093
+ __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
4094
+ __ CallRuntimeSaveDoubles(Runtime::kStackGuard);
4095
+ RegisterLazyDeoptimization(instr, RECORD_SAFEPOINT_WITH_REGISTERS, 0);
4096
+ }
4097
+
4098
+ // The gap code includes the restoring of the safepoint registers.
4099
+ int pc = masm()->pc_offset();
4100
+ safepoints_.SetPcAfterGap(pc);
4101
+ }
4102
+
4103
+
4235
4104
  void LCodeGen::DoStackCheck(LStackCheck* instr) {
4236
- // Perform stack overflow check.
4237
- Label done;
4238
- __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
4239
- __ j(above_equal, &done, Label::kNear);
4105
+ class DeferredStackCheck: public LDeferredCode {
4106
+ public:
4107
+ DeferredStackCheck(LCodeGen* codegen, LStackCheck* instr)
4108
+ : LDeferredCode(codegen), instr_(instr) { }
4109
+ virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); }
4110
+ private:
4111
+ LStackCheck* instr_;
4112
+ };
4240
4113
 
4241
- StackCheckStub stub;
4242
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4243
- __ bind(&done);
4114
+ if (instr->hydrogen()->is_function_entry()) {
4115
+ // Perform stack overflow check.
4116
+ Label done;
4117
+ __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
4118
+ __ j(above_equal, &done, Label::kNear);
4119
+ StackCheckStub stub;
4120
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4121
+ __ bind(&done);
4122
+ } else {
4123
+ ASSERT(instr->hydrogen()->is_backwards_branch());
4124
+ // Perform stack overflow check if this goto needs it before jumping.
4125
+ DeferredStackCheck* deferred_stack_check =
4126
+ new DeferredStackCheck(this, instr);
4127
+ __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
4128
+ __ j(below, deferred_stack_check->entry());
4129
+ __ bind(instr->done_label());
4130
+ deferred_stack_check->SetExit(instr->done_label());
4131
+ }
4244
4132
  }
4245
4133
 
4246
4134