libv8 3.3.10.4 → 3.5.10.beta1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (538) hide show
  1. data/lib/libv8/scons/CHANGES.txt +24 -231
  2. data/lib/libv8/scons/LICENSE.txt +1 -1
  3. data/lib/libv8/scons/MANIFEST +0 -1
  4. data/lib/libv8/scons/PKG-INFO +1 -1
  5. data/lib/libv8/scons/README.txt +9 -9
  6. data/lib/libv8/scons/RELEASE.txt +75 -77
  7. data/lib/libv8/scons/engine/SCons/Action.py +6 -22
  8. data/lib/libv8/scons/engine/SCons/Builder.py +2 -2
  9. data/lib/libv8/scons/engine/SCons/CacheDir.py +2 -2
  10. data/lib/libv8/scons/engine/SCons/Debug.py +2 -2
  11. data/lib/libv8/scons/engine/SCons/Defaults.py +10 -24
  12. data/lib/libv8/scons/engine/SCons/Environment.py +19 -118
  13. data/lib/libv8/scons/engine/SCons/Errors.py +2 -2
  14. data/lib/libv8/scons/engine/SCons/Executor.py +2 -2
  15. data/lib/libv8/scons/engine/SCons/Job.py +2 -2
  16. data/lib/libv8/scons/engine/SCons/Memoize.py +2 -2
  17. data/lib/libv8/scons/engine/SCons/Node/Alias.py +2 -2
  18. data/lib/libv8/scons/engine/SCons/Node/FS.py +121 -281
  19. data/lib/libv8/scons/engine/SCons/Node/Python.py +2 -2
  20. data/lib/libv8/scons/engine/SCons/Node/__init__.py +5 -6
  21. data/lib/libv8/scons/engine/SCons/Options/BoolOption.py +2 -2
  22. data/lib/libv8/scons/engine/SCons/Options/EnumOption.py +2 -2
  23. data/lib/libv8/scons/engine/SCons/Options/ListOption.py +2 -2
  24. data/lib/libv8/scons/engine/SCons/Options/PackageOption.py +2 -2
  25. data/lib/libv8/scons/engine/SCons/Options/PathOption.py +2 -2
  26. data/lib/libv8/scons/engine/SCons/Options/__init__.py +2 -2
  27. data/lib/libv8/scons/engine/SCons/PathList.py +2 -2
  28. data/lib/libv8/scons/engine/SCons/Platform/__init__.py +2 -2
  29. data/lib/libv8/scons/engine/SCons/Platform/aix.py +2 -2
  30. data/lib/libv8/scons/engine/SCons/Platform/cygwin.py +2 -2
  31. data/lib/libv8/scons/engine/SCons/Platform/darwin.py +3 -27
  32. data/lib/libv8/scons/engine/SCons/Platform/hpux.py +2 -2
  33. data/lib/libv8/scons/engine/SCons/Platform/irix.py +2 -2
  34. data/lib/libv8/scons/engine/SCons/Platform/os2.py +2 -2
  35. data/lib/libv8/scons/engine/SCons/Platform/posix.py +2 -2
  36. data/lib/libv8/scons/engine/SCons/Platform/sunos.py +2 -2
  37. data/lib/libv8/scons/engine/SCons/Platform/win32.py +2 -2
  38. data/lib/libv8/scons/engine/SCons/SConf.py +2 -2
  39. data/lib/libv8/scons/engine/SCons/SConsign.py +3 -9
  40. data/lib/libv8/scons/engine/SCons/Scanner/C.py +2 -2
  41. data/lib/libv8/scons/engine/SCons/Scanner/D.py +2 -2
  42. data/lib/libv8/scons/engine/SCons/Scanner/Dir.py +2 -2
  43. data/lib/libv8/scons/engine/SCons/Scanner/Fortran.py +2 -2
  44. data/lib/libv8/scons/engine/SCons/Scanner/IDL.py +2 -2
  45. data/lib/libv8/scons/engine/SCons/Scanner/LaTeX.py +2 -5
  46. data/lib/libv8/scons/engine/SCons/Scanner/Prog.py +2 -2
  47. data/lib/libv8/scons/engine/SCons/Scanner/RC.py +3 -3
  48. data/lib/libv8/scons/engine/SCons/Scanner/__init__.py +2 -2
  49. data/lib/libv8/scons/engine/SCons/Script/Interactive.py +2 -2
  50. data/lib/libv8/scons/engine/SCons/Script/Main.py +11 -82
  51. data/lib/libv8/scons/engine/SCons/Script/SConsOptions.py +5 -5
  52. data/lib/libv8/scons/engine/SCons/Script/SConscript.py +2 -2
  53. data/lib/libv8/scons/engine/SCons/Script/__init__.py +2 -2
  54. data/lib/libv8/scons/engine/SCons/Sig.py +2 -2
  55. data/lib/libv8/scons/engine/SCons/Subst.py +2 -2
  56. data/lib/libv8/scons/engine/SCons/Taskmaster.py +2 -10
  57. data/lib/libv8/scons/engine/SCons/Tool/386asm.py +2 -2
  58. data/lib/libv8/scons/engine/SCons/Tool/BitKeeper.py +2 -2
  59. data/lib/libv8/scons/engine/SCons/Tool/CVS.py +2 -2
  60. data/lib/libv8/scons/engine/SCons/Tool/FortranCommon.py +2 -19
  61. data/lib/libv8/scons/engine/SCons/Tool/JavaCommon.py +2 -2
  62. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/__init__.py +2 -2
  63. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/arch.py +2 -2
  64. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/common.py +2 -2
  65. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/netframework.py +2 -2
  66. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/sdk.py +2 -2
  67. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/vc.py +6 -9
  68. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/vs.py +2 -29
  69. data/lib/libv8/scons/engine/SCons/Tool/Perforce.py +2 -2
  70. data/lib/libv8/scons/engine/SCons/Tool/PharLapCommon.py +2 -2
  71. data/lib/libv8/scons/engine/SCons/Tool/RCS.py +2 -2
  72. data/lib/libv8/scons/engine/SCons/Tool/SCCS.py +2 -2
  73. data/lib/libv8/scons/engine/SCons/Tool/Subversion.py +2 -2
  74. data/lib/libv8/scons/engine/SCons/Tool/__init__.py +3 -3
  75. data/lib/libv8/scons/engine/SCons/Tool/aixc++.py +2 -2
  76. data/lib/libv8/scons/engine/SCons/Tool/aixcc.py +2 -2
  77. data/lib/libv8/scons/engine/SCons/Tool/aixf77.py +2 -2
  78. data/lib/libv8/scons/engine/SCons/Tool/aixlink.py +2 -2
  79. data/lib/libv8/scons/engine/SCons/Tool/applelink.py +2 -2
  80. data/lib/libv8/scons/engine/SCons/Tool/ar.py +2 -2
  81. data/lib/libv8/scons/engine/SCons/Tool/as.py +2 -2
  82. data/lib/libv8/scons/engine/SCons/Tool/bcc32.py +2 -2
  83. data/lib/libv8/scons/engine/SCons/Tool/c++.py +2 -2
  84. data/lib/libv8/scons/engine/SCons/Tool/cc.py +2 -2
  85. data/lib/libv8/scons/engine/SCons/Tool/cvf.py +2 -2
  86. data/lib/libv8/scons/engine/SCons/Tool/default.py +2 -2
  87. data/lib/libv8/scons/engine/SCons/Tool/dmd.py +7 -24
  88. data/lib/libv8/scons/engine/SCons/Tool/dvi.py +2 -2
  89. data/lib/libv8/scons/engine/SCons/Tool/dvipdf.py +2 -3
  90. data/lib/libv8/scons/engine/SCons/Tool/dvips.py +2 -3
  91. data/lib/libv8/scons/engine/SCons/Tool/f77.py +2 -2
  92. data/lib/libv8/scons/engine/SCons/Tool/f90.py +2 -2
  93. data/lib/libv8/scons/engine/SCons/Tool/f95.py +2 -2
  94. data/lib/libv8/scons/engine/SCons/Tool/filesystem.py +2 -2
  95. data/lib/libv8/scons/engine/SCons/Tool/fortran.py +2 -2
  96. data/lib/libv8/scons/engine/SCons/Tool/g++.py +2 -2
  97. data/lib/libv8/scons/engine/SCons/Tool/g77.py +2 -2
  98. data/lib/libv8/scons/engine/SCons/Tool/gas.py +2 -2
  99. data/lib/libv8/scons/engine/SCons/Tool/gcc.py +2 -2
  100. data/lib/libv8/scons/engine/SCons/Tool/gfortran.py +3 -3
  101. data/lib/libv8/scons/engine/SCons/Tool/gnulink.py +3 -2
  102. data/lib/libv8/scons/engine/SCons/Tool/gs.py +2 -2
  103. data/lib/libv8/scons/engine/SCons/Tool/hpc++.py +2 -2
  104. data/lib/libv8/scons/engine/SCons/Tool/hpcc.py +2 -2
  105. data/lib/libv8/scons/engine/SCons/Tool/hplink.py +2 -2
  106. data/lib/libv8/scons/engine/SCons/Tool/icc.py +2 -2
  107. data/lib/libv8/scons/engine/SCons/Tool/icl.py +2 -2
  108. data/lib/libv8/scons/engine/SCons/Tool/ifl.py +2 -2
  109. data/lib/libv8/scons/engine/SCons/Tool/ifort.py +2 -2
  110. data/lib/libv8/scons/engine/SCons/Tool/ilink.py +2 -2
  111. data/lib/libv8/scons/engine/SCons/Tool/ilink32.py +2 -2
  112. data/lib/libv8/scons/engine/SCons/Tool/install.py +3 -57
  113. data/lib/libv8/scons/engine/SCons/Tool/intelc.py +25 -65
  114. data/lib/libv8/scons/engine/SCons/Tool/ipkg.py +2 -2
  115. data/lib/libv8/scons/engine/SCons/Tool/jar.py +3 -9
  116. data/lib/libv8/scons/engine/SCons/Tool/javac.py +2 -2
  117. data/lib/libv8/scons/engine/SCons/Tool/javah.py +2 -2
  118. data/lib/libv8/scons/engine/SCons/Tool/latex.py +2 -3
  119. data/lib/libv8/scons/engine/SCons/Tool/lex.py +2 -2
  120. data/lib/libv8/scons/engine/SCons/Tool/link.py +5 -6
  121. data/lib/libv8/scons/engine/SCons/Tool/linkloc.py +2 -2
  122. data/lib/libv8/scons/engine/SCons/Tool/m4.py +2 -2
  123. data/lib/libv8/scons/engine/SCons/Tool/masm.py +2 -2
  124. data/lib/libv8/scons/engine/SCons/Tool/midl.py +2 -2
  125. data/lib/libv8/scons/engine/SCons/Tool/mingw.py +10 -31
  126. data/lib/libv8/scons/engine/SCons/Tool/mslib.py +2 -2
  127. data/lib/libv8/scons/engine/SCons/Tool/mslink.py +9 -61
  128. data/lib/libv8/scons/engine/SCons/Tool/mssdk.py +2 -2
  129. data/lib/libv8/scons/engine/SCons/Tool/msvc.py +11 -21
  130. data/lib/libv8/scons/engine/SCons/Tool/msvs.py +59 -477
  131. data/lib/libv8/scons/engine/SCons/Tool/mwcc.py +2 -2
  132. data/lib/libv8/scons/engine/SCons/Tool/mwld.py +2 -2
  133. data/lib/libv8/scons/engine/SCons/Tool/nasm.py +2 -2
  134. data/lib/libv8/scons/engine/SCons/Tool/packaging/__init__.py +2 -2
  135. data/lib/libv8/scons/engine/SCons/Tool/packaging/ipk.py +2 -2
  136. data/lib/libv8/scons/engine/SCons/Tool/packaging/msi.py +2 -2
  137. data/lib/libv8/scons/engine/SCons/Tool/packaging/rpm.py +2 -2
  138. data/lib/libv8/scons/engine/SCons/Tool/packaging/src_tarbz2.py +2 -2
  139. data/lib/libv8/scons/engine/SCons/Tool/packaging/src_targz.py +2 -2
  140. data/lib/libv8/scons/engine/SCons/Tool/packaging/src_zip.py +2 -2
  141. data/lib/libv8/scons/engine/SCons/Tool/packaging/tarbz2.py +2 -2
  142. data/lib/libv8/scons/engine/SCons/Tool/packaging/targz.py +2 -2
  143. data/lib/libv8/scons/engine/SCons/Tool/packaging/zip.py +2 -2
  144. data/lib/libv8/scons/engine/SCons/Tool/pdf.py +2 -2
  145. data/lib/libv8/scons/engine/SCons/Tool/pdflatex.py +2 -3
  146. data/lib/libv8/scons/engine/SCons/Tool/pdftex.py +2 -3
  147. data/lib/libv8/scons/engine/SCons/Tool/qt.py +2 -2
  148. data/lib/libv8/scons/engine/SCons/Tool/rmic.py +3 -9
  149. data/lib/libv8/scons/engine/SCons/Tool/rpcgen.py +2 -2
  150. data/lib/libv8/scons/engine/SCons/Tool/rpm.py +2 -2
  151. data/lib/libv8/scons/engine/SCons/Tool/sgiar.py +2 -2
  152. data/lib/libv8/scons/engine/SCons/Tool/sgic++.py +2 -2
  153. data/lib/libv8/scons/engine/SCons/Tool/sgicc.py +2 -2
  154. data/lib/libv8/scons/engine/SCons/Tool/sgilink.py +3 -2
  155. data/lib/libv8/scons/engine/SCons/Tool/sunar.py +2 -2
  156. data/lib/libv8/scons/engine/SCons/Tool/sunc++.py +2 -2
  157. data/lib/libv8/scons/engine/SCons/Tool/suncc.py +2 -2
  158. data/lib/libv8/scons/engine/SCons/Tool/sunf77.py +2 -2
  159. data/lib/libv8/scons/engine/SCons/Tool/sunf90.py +2 -2
  160. data/lib/libv8/scons/engine/SCons/Tool/sunf95.py +2 -2
  161. data/lib/libv8/scons/engine/SCons/Tool/sunlink.py +3 -2
  162. data/lib/libv8/scons/engine/SCons/Tool/swig.py +5 -6
  163. data/lib/libv8/scons/engine/SCons/Tool/tar.py +2 -2
  164. data/lib/libv8/scons/engine/SCons/Tool/tex.py +43 -96
  165. data/lib/libv8/scons/engine/SCons/Tool/textfile.py +2 -2
  166. data/lib/libv8/scons/engine/SCons/Tool/tlib.py +2 -2
  167. data/lib/libv8/scons/engine/SCons/Tool/wix.py +2 -2
  168. data/lib/libv8/scons/engine/SCons/Tool/yacc.py +2 -12
  169. data/lib/libv8/scons/engine/SCons/Tool/zip.py +2 -2
  170. data/lib/libv8/scons/engine/SCons/Util.py +3 -3
  171. data/lib/libv8/scons/engine/SCons/Variables/BoolVariable.py +2 -2
  172. data/lib/libv8/scons/engine/SCons/Variables/EnumVariable.py +3 -3
  173. data/lib/libv8/scons/engine/SCons/Variables/ListVariable.py +2 -2
  174. data/lib/libv8/scons/engine/SCons/Variables/PackageVariable.py +2 -2
  175. data/lib/libv8/scons/engine/SCons/Variables/PathVariable.py +2 -2
  176. data/lib/libv8/scons/engine/SCons/Variables/__init__.py +2 -2
  177. data/lib/libv8/scons/engine/SCons/Warnings.py +2 -2
  178. data/lib/libv8/scons/engine/SCons/__init__.py +6 -6
  179. data/lib/libv8/scons/engine/SCons/compat/__init__.py +2 -2
  180. data/lib/libv8/scons/engine/SCons/compat/_scons_builtins.py +2 -2
  181. data/lib/libv8/scons/engine/SCons/compat/_scons_collections.py +2 -2
  182. data/lib/libv8/scons/engine/SCons/compat/_scons_dbm.py +2 -2
  183. data/lib/libv8/scons/engine/SCons/compat/_scons_hashlib.py +2 -2
  184. data/lib/libv8/scons/engine/SCons/compat/_scons_io.py +2 -2
  185. data/lib/libv8/scons/engine/SCons/cpp.py +2 -2
  186. data/lib/libv8/scons/engine/SCons/dblite.py +1 -4
  187. data/lib/libv8/scons/engine/SCons/exitfuncs.py +2 -2
  188. data/lib/libv8/scons/scons-time.1 +3 -3
  189. data/lib/libv8/scons/scons.1 +1164 -1170
  190. data/lib/libv8/scons/sconsign.1 +3 -3
  191. data/lib/libv8/scons/script/scons +22 -22
  192. data/lib/libv8/scons/script/scons-time +2 -2
  193. data/lib/libv8/scons/script/scons.bat +4 -7
  194. data/lib/libv8/scons/script/sconsign +20 -21
  195. data/lib/libv8/scons/setup.cfg +1 -0
  196. data/lib/libv8/scons/setup.py +40 -38
  197. data/lib/libv8/v8/.gitignore +1 -1
  198. data/lib/libv8/v8/AUTHORS +2 -0
  199. data/lib/libv8/v8/ChangeLog +387 -0
  200. data/lib/libv8/v8/Makefile +171 -0
  201. data/lib/libv8/v8/SConstruct +124 -51
  202. data/lib/libv8/v8/build/README.txt +31 -14
  203. data/lib/libv8/v8/build/all.gyp +11 -4
  204. data/lib/libv8/v8/build/armu.gypi +6 -2
  205. data/lib/libv8/v8/build/common.gypi +240 -94
  206. data/lib/libv8/v8/build/gyp_v8 +32 -4
  207. data/lib/libv8/v8/build/standalone.gypi +200 -0
  208. data/lib/libv8/v8/include/v8-debug.h +0 -0
  209. data/lib/libv8/v8/include/v8-profiler.h +8 -11
  210. data/lib/libv8/v8/include/v8.h +191 -108
  211. data/lib/libv8/v8/preparser/SConscript +2 -2
  212. data/lib/libv8/v8/preparser/preparser-process.cc +3 -3
  213. data/lib/libv8/v8/preparser/preparser.gyp +42 -0
  214. data/lib/libv8/v8/src/SConscript +33 -8
  215. data/lib/libv8/v8/src/accessors.cc +77 -43
  216. data/lib/libv8/v8/src/api.cc +393 -191
  217. data/lib/libv8/v8/src/api.h +4 -8
  218. data/lib/libv8/v8/src/apinatives.js +15 -3
  219. data/lib/libv8/v8/src/arguments.h +8 -0
  220. data/lib/libv8/v8/src/arm/assembler-arm.cc +120 -120
  221. data/lib/libv8/v8/src/arm/assembler-arm.h +92 -43
  222. data/lib/libv8/v8/src/arm/builtins-arm.cc +32 -39
  223. data/lib/libv8/v8/src/arm/code-stubs-arm.cc +572 -351
  224. data/lib/libv8/v8/src/arm/code-stubs-arm.h +8 -77
  225. data/lib/libv8/v8/src/arm/codegen-arm.h +0 -2
  226. data/lib/libv8/v8/src/arm/deoptimizer-arm.cc +50 -30
  227. data/lib/libv8/v8/src/arm/disasm-arm.cc +1 -1
  228. data/lib/libv8/v8/src/arm/frames-arm.h +9 -5
  229. data/lib/libv8/v8/src/arm/full-codegen-arm.cc +331 -432
  230. data/lib/libv8/v8/src/arm/ic-arm.cc +192 -124
  231. data/lib/libv8/v8/src/arm/lithium-arm.cc +216 -232
  232. data/lib/libv8/v8/src/arm/lithium-arm.h +106 -259
  233. data/lib/libv8/v8/src/arm/lithium-codegen-arm.cc +633 -642
  234. data/lib/libv8/v8/src/arm/lithium-codegen-arm.h +4 -4
  235. data/lib/libv8/v8/src/arm/lithium-gap-resolver-arm.cc +1 -3
  236. data/lib/libv8/v8/src/arm/macro-assembler-arm.cc +260 -185
  237. data/lib/libv8/v8/src/arm/macro-assembler-arm.h +45 -25
  238. data/lib/libv8/v8/src/arm/regexp-macro-assembler-arm.cc +25 -13
  239. data/lib/libv8/v8/src/arm/regexp-macro-assembler-arm.h +3 -0
  240. data/lib/libv8/v8/src/arm/stub-cache-arm.cc +413 -226
  241. data/lib/libv8/v8/src/array.js +38 -18
  242. data/lib/libv8/v8/src/assembler.cc +12 -5
  243. data/lib/libv8/v8/src/assembler.h +15 -9
  244. data/lib/libv8/v8/src/ast-inl.h +34 -25
  245. data/lib/libv8/v8/src/ast.cc +141 -72
  246. data/lib/libv8/v8/src/ast.h +255 -181
  247. data/lib/libv8/v8/src/bignum.cc +3 -4
  248. data/lib/libv8/v8/src/bootstrapper.cc +55 -11
  249. data/lib/libv8/v8/src/bootstrapper.h +3 -2
  250. data/lib/libv8/v8/src/builtins.cc +8 -2
  251. data/lib/libv8/v8/src/builtins.h +4 -0
  252. data/lib/libv8/v8/src/cached-powers.cc +8 -4
  253. data/lib/libv8/v8/src/checks.h +3 -3
  254. data/lib/libv8/v8/src/code-stubs.cc +173 -28
  255. data/lib/libv8/v8/src/code-stubs.h +104 -148
  256. data/lib/libv8/v8/src/codegen.cc +8 -8
  257. data/lib/libv8/v8/src/compilation-cache.cc +2 -47
  258. data/lib/libv8/v8/src/compilation-cache.h +0 -10
  259. data/lib/libv8/v8/src/compiler.cc +27 -16
  260. data/lib/libv8/v8/src/compiler.h +13 -18
  261. data/lib/libv8/v8/src/contexts.cc +107 -72
  262. data/lib/libv8/v8/src/contexts.h +70 -34
  263. data/lib/libv8/v8/src/conversions-inl.h +572 -14
  264. data/lib/libv8/v8/src/conversions.cc +9 -707
  265. data/lib/libv8/v8/src/conversions.h +23 -12
  266. data/lib/libv8/v8/src/cpu-profiler-inl.h +2 -19
  267. data/lib/libv8/v8/src/cpu-profiler.cc +4 -21
  268. data/lib/libv8/v8/src/cpu-profiler.h +8 -17
  269. data/lib/libv8/v8/src/d8-debug.cc +5 -3
  270. data/lib/libv8/v8/src/d8-debug.h +6 -7
  271. data/lib/libv8/v8/src/d8-posix.cc +1 -10
  272. data/lib/libv8/v8/src/d8.cc +721 -219
  273. data/lib/libv8/v8/src/d8.gyp +37 -12
  274. data/lib/libv8/v8/src/d8.h +141 -19
  275. data/lib/libv8/v8/src/d8.js +17 -8
  276. data/lib/libv8/v8/src/date.js +16 -5
  277. data/lib/libv8/v8/src/dateparser-inl.h +242 -39
  278. data/lib/libv8/v8/src/dateparser.cc +38 -4
  279. data/lib/libv8/v8/src/dateparser.h +170 -28
  280. data/lib/libv8/v8/src/debug-agent.cc +5 -3
  281. data/lib/libv8/v8/src/debug-agent.h +11 -7
  282. data/lib/libv8/v8/src/debug-debugger.js +65 -34
  283. data/lib/libv8/v8/src/debug.cc +30 -60
  284. data/lib/libv8/v8/src/debug.h +5 -3
  285. data/lib/libv8/v8/src/deoptimizer.cc +227 -10
  286. data/lib/libv8/v8/src/deoptimizer.h +133 -9
  287. data/lib/libv8/v8/src/disassembler.cc +22 -14
  288. data/lib/libv8/v8/src/diy-fp.cc +4 -3
  289. data/lib/libv8/v8/src/diy-fp.h +3 -3
  290. data/lib/libv8/v8/src/elements.cc +634 -0
  291. data/lib/libv8/v8/src/elements.h +95 -0
  292. data/lib/libv8/v8/src/execution.cc +5 -21
  293. data/lib/libv8/v8/src/extensions/experimental/break-iterator.cc +3 -1
  294. data/lib/libv8/v8/src/extensions/experimental/break-iterator.h +1 -1
  295. data/lib/libv8/v8/src/extensions/experimental/collator.cc +6 -2
  296. data/lib/libv8/v8/src/extensions/experimental/collator.h +1 -2
  297. data/lib/libv8/v8/src/extensions/experimental/datetime-format.cc +384 -0
  298. data/lib/libv8/v8/src/extensions/experimental/datetime-format.h +83 -0
  299. data/lib/libv8/v8/src/extensions/experimental/experimental.gyp +18 -7
  300. data/lib/libv8/v8/src/extensions/experimental/i18n-extension.cc +12 -16
  301. data/lib/libv8/v8/src/extensions/experimental/i18n-extension.h +1 -1
  302. data/lib/libv8/v8/src/extensions/experimental/i18n-js2c.py +126 -0
  303. data/lib/libv8/v8/src/extensions/experimental/i18n-locale.cc +3 -4
  304. data/lib/libv8/v8/src/extensions/experimental/i18n-locale.h +1 -1
  305. data/lib/libv8/v8/src/{shell.h → extensions/experimental/i18n-natives.h} +8 -20
  306. data/lib/libv8/v8/src/extensions/experimental/i18n-utils.cc +45 -1
  307. data/lib/libv8/v8/src/extensions/experimental/i18n-utils.h +21 -1
  308. data/lib/libv8/v8/src/extensions/experimental/i18n.js +211 -11
  309. data/lib/libv8/v8/src/extensions/experimental/language-matcher.cc +4 -3
  310. data/lib/libv8/v8/src/extensions/experimental/language-matcher.h +1 -1
  311. data/lib/libv8/v8/src/extensions/experimental/number-format.cc +374 -0
  312. data/lib/libv8/v8/src/extensions/experimental/number-format.h +71 -0
  313. data/lib/libv8/v8/src/factory.cc +89 -18
  314. data/lib/libv8/v8/src/factory.h +36 -8
  315. data/lib/libv8/v8/src/flag-definitions.h +11 -44
  316. data/lib/libv8/v8/src/frames-inl.h +8 -1
  317. data/lib/libv8/v8/src/frames.cc +39 -3
  318. data/lib/libv8/v8/src/frames.h +10 -3
  319. data/lib/libv8/v8/src/full-codegen.cc +311 -293
  320. data/lib/libv8/v8/src/full-codegen.h +183 -143
  321. data/lib/libv8/v8/src/func-name-inferrer.cc +29 -15
  322. data/lib/libv8/v8/src/func-name-inferrer.h +19 -9
  323. data/lib/libv8/v8/src/gdb-jit.cc +658 -55
  324. data/lib/libv8/v8/src/gdb-jit.h +6 -2
  325. data/lib/libv8/v8/src/global-handles.cc +368 -312
  326. data/lib/libv8/v8/src/global-handles.h +29 -36
  327. data/lib/libv8/v8/src/globals.h +3 -1
  328. data/lib/libv8/v8/src/handles.cc +43 -69
  329. data/lib/libv8/v8/src/handles.h +21 -16
  330. data/lib/libv8/v8/src/heap-inl.h +11 -13
  331. data/lib/libv8/v8/src/heap-profiler.cc +0 -999
  332. data/lib/libv8/v8/src/heap-profiler.h +0 -303
  333. data/lib/libv8/v8/src/heap.cc +366 -141
  334. data/lib/libv8/v8/src/heap.h +87 -26
  335. data/lib/libv8/v8/src/hydrogen-instructions.cc +192 -81
  336. data/lib/libv8/v8/src/hydrogen-instructions.h +711 -482
  337. data/lib/libv8/v8/src/hydrogen.cc +1146 -629
  338. data/lib/libv8/v8/src/hydrogen.h +100 -64
  339. data/lib/libv8/v8/src/ia32/assembler-ia32.cc +19 -0
  340. data/lib/libv8/v8/src/ia32/assembler-ia32.h +15 -2
  341. data/lib/libv8/v8/src/ia32/builtins-ia32.cc +34 -39
  342. data/lib/libv8/v8/src/ia32/code-stubs-ia32.cc +675 -377
  343. data/lib/libv8/v8/src/ia32/code-stubs-ia32.h +8 -69
  344. data/lib/libv8/v8/src/ia32/codegen-ia32.cc +1 -0
  345. data/lib/libv8/v8/src/ia32/codegen-ia32.h +0 -2
  346. data/lib/libv8/v8/src/ia32/cpu-ia32.cc +3 -2
  347. data/lib/libv8/v8/src/ia32/deoptimizer-ia32.cc +28 -3
  348. data/lib/libv8/v8/src/ia32/disasm-ia32.cc +21 -10
  349. data/lib/libv8/v8/src/ia32/frames-ia32.h +6 -5
  350. data/lib/libv8/v8/src/ia32/full-codegen-ia32.cc +459 -465
  351. data/lib/libv8/v8/src/ia32/ic-ia32.cc +196 -147
  352. data/lib/libv8/v8/src/ia32/lithium-codegen-ia32.cc +575 -650
  353. data/lib/libv8/v8/src/ia32/lithium-codegen-ia32.h +19 -21
  354. data/lib/libv8/v8/src/ia32/lithium-gap-resolver-ia32.cc +7 -2
  355. data/lib/libv8/v8/src/ia32/lithium-ia32.cc +261 -256
  356. data/lib/libv8/v8/src/ia32/lithium-ia32.h +234 -335
  357. data/lib/libv8/v8/src/ia32/macro-assembler-ia32.cc +224 -67
  358. data/lib/libv8/v8/src/ia32/macro-assembler-ia32.h +63 -19
  359. data/lib/libv8/v8/src/ia32/regexp-macro-assembler-ia32.cc +22 -8
  360. data/lib/libv8/v8/src/ia32/regexp-macro-assembler-ia32.h +3 -0
  361. data/lib/libv8/v8/src/ia32/stub-cache-ia32.cc +380 -239
  362. data/lib/libv8/v8/src/ic.cc +198 -234
  363. data/lib/libv8/v8/src/ic.h +32 -30
  364. data/lib/libv8/v8/src/interpreter-irregexp.cc +6 -4
  365. data/lib/libv8/v8/src/isolate.cc +112 -95
  366. data/lib/libv8/v8/src/isolate.h +55 -71
  367. data/lib/libv8/v8/src/json-parser.h +486 -48
  368. data/lib/libv8/v8/src/json.js +28 -23
  369. data/lib/libv8/v8/src/jsregexp.cc +163 -208
  370. data/lib/libv8/v8/src/jsregexp.h +0 -1
  371. data/lib/libv8/v8/src/lithium-allocator-inl.h +29 -27
  372. data/lib/libv8/v8/src/lithium-allocator.cc +22 -17
  373. data/lib/libv8/v8/src/lithium-allocator.h +8 -8
  374. data/lib/libv8/v8/src/lithium.cc +16 -11
  375. data/lib/libv8/v8/src/lithium.h +31 -34
  376. data/lib/libv8/v8/src/liveedit.cc +111 -15
  377. data/lib/libv8/v8/src/liveedit.h +3 -4
  378. data/lib/libv8/v8/src/liveobjectlist.cc +116 -80
  379. data/lib/libv8/v8/src/liveobjectlist.h +2 -2
  380. data/lib/libv8/v8/src/log-inl.h +0 -4
  381. data/lib/libv8/v8/src/log-utils.cc +25 -143
  382. data/lib/libv8/v8/src/log-utils.h +13 -92
  383. data/lib/libv8/v8/src/log.cc +26 -249
  384. data/lib/libv8/v8/src/log.h +6 -17
  385. data/lib/libv8/v8/src/macros.py +9 -6
  386. data/lib/libv8/v8/src/mark-compact.cc +276 -56
  387. data/lib/libv8/v8/src/mark-compact.h +20 -0
  388. data/lib/libv8/v8/src/messages.js +93 -39
  389. data/lib/libv8/v8/src/mips/assembler-mips-inl.h +9 -3
  390. data/lib/libv8/v8/src/mips/assembler-mips.cc +297 -189
  391. data/lib/libv8/v8/src/mips/assembler-mips.h +121 -54
  392. data/lib/libv8/v8/src/mips/builtins-mips.cc +23 -24
  393. data/lib/libv8/v8/src/mips/code-stubs-mips.cc +484 -263
  394. data/lib/libv8/v8/src/mips/code-stubs-mips.h +8 -83
  395. data/lib/libv8/v8/src/mips/codegen-mips.h +0 -2
  396. data/lib/libv8/v8/src/mips/constants-mips.h +37 -11
  397. data/lib/libv8/v8/src/mips/deoptimizer-mips.cc +6 -1
  398. data/lib/libv8/v8/src/mips/frames-mips.h +8 -7
  399. data/lib/libv8/v8/src/mips/full-codegen-mips.cc +258 -419
  400. data/lib/libv8/v8/src/mips/ic-mips.cc +181 -121
  401. data/lib/libv8/v8/src/mips/macro-assembler-mips.cc +640 -382
  402. data/lib/libv8/v8/src/mips/macro-assembler-mips.h +94 -89
  403. data/lib/libv8/v8/src/mips/regexp-macro-assembler-mips.cc +23 -10
  404. data/lib/libv8/v8/src/mips/regexp-macro-assembler-mips.h +6 -1
  405. data/lib/libv8/v8/src/mips/simulator-mips.cc +249 -49
  406. data/lib/libv8/v8/src/mips/simulator-mips.h +25 -1
  407. data/lib/libv8/v8/src/mips/stub-cache-mips.cc +373 -161
  408. data/lib/libv8/v8/src/mirror-debugger.js +55 -8
  409. data/lib/libv8/v8/src/misc-intrinsics.h +89 -0
  410. data/lib/libv8/v8/src/mksnapshot.cc +36 -4
  411. data/lib/libv8/v8/src/natives.h +5 -2
  412. data/lib/libv8/v8/src/objects-debug.cc +73 -6
  413. data/lib/libv8/v8/src/objects-inl.h +529 -164
  414. data/lib/libv8/v8/src/objects-printer.cc +67 -12
  415. data/lib/libv8/v8/src/objects-visiting.cc +13 -2
  416. data/lib/libv8/v8/src/objects-visiting.h +41 -1
  417. data/lib/libv8/v8/src/objects.cc +2200 -1177
  418. data/lib/libv8/v8/src/objects.h +912 -283
  419. data/lib/libv8/v8/src/parser.cc +566 -371
  420. data/lib/libv8/v8/src/parser.h +35 -33
  421. data/lib/libv8/v8/src/platform-cygwin.cc +10 -25
  422. data/lib/libv8/v8/src/platform-freebsd.cc +4 -29
  423. data/lib/libv8/v8/src/platform-linux.cc +60 -57
  424. data/lib/libv8/v8/src/platform-macos.cc +4 -27
  425. data/lib/libv8/v8/src/platform-nullos.cc +3 -16
  426. data/lib/libv8/v8/src/platform-openbsd.cc +247 -85
  427. data/lib/libv8/v8/src/platform-posix.cc +43 -1
  428. data/lib/libv8/v8/src/platform-solaris.cc +151 -112
  429. data/lib/libv8/v8/src/platform-tls.h +1 -1
  430. data/lib/libv8/v8/src/platform-win32.cc +65 -39
  431. data/lib/libv8/v8/src/platform.h +17 -14
  432. data/lib/libv8/v8/src/preparse-data-format.h +2 -2
  433. data/lib/libv8/v8/src/preparse-data.h +8 -2
  434. data/lib/libv8/v8/src/preparser-api.cc +2 -18
  435. data/lib/libv8/v8/src/preparser.cc +106 -65
  436. data/lib/libv8/v8/src/preparser.h +26 -5
  437. data/lib/libv8/v8/src/prettyprinter.cc +25 -43
  438. data/lib/libv8/v8/src/profile-generator-inl.h +0 -4
  439. data/lib/libv8/v8/src/profile-generator.cc +213 -34
  440. data/lib/libv8/v8/src/profile-generator.h +9 -9
  441. data/lib/libv8/v8/src/property.h +1 -0
  442. data/lib/libv8/v8/src/proxy.js +74 -4
  443. data/lib/libv8/v8/src/regexp-macro-assembler.cc +10 -6
  444. data/lib/libv8/v8/src/regexp.js +16 -11
  445. data/lib/libv8/v8/src/rewriter.cc +24 -133
  446. data/lib/libv8/v8/src/runtime-profiler.cc +27 -151
  447. data/lib/libv8/v8/src/runtime-profiler.h +5 -31
  448. data/lib/libv8/v8/src/runtime.cc +1450 -681
  449. data/lib/libv8/v8/src/runtime.h +47 -31
  450. data/lib/libv8/v8/src/runtime.js +2 -1
  451. data/lib/libv8/v8/src/scanner-base.cc +358 -220
  452. data/lib/libv8/v8/src/scanner-base.h +30 -138
  453. data/lib/libv8/v8/src/scanner.cc +0 -18
  454. data/lib/libv8/v8/src/scanner.h +0 -15
  455. data/lib/libv8/v8/src/scopeinfo.cc +3 -1
  456. data/lib/libv8/v8/src/scopeinfo.h +1 -6
  457. data/lib/libv8/v8/src/scopes.cc +243 -253
  458. data/lib/libv8/v8/src/scopes.h +58 -109
  459. data/lib/libv8/v8/src/serialize.cc +12 -54
  460. data/lib/libv8/v8/src/serialize.h +47 -0
  461. data/lib/libv8/v8/src/small-pointer-list.h +25 -0
  462. data/lib/libv8/v8/src/spaces-inl.h +4 -50
  463. data/lib/libv8/v8/src/spaces.cc +64 -131
  464. data/lib/libv8/v8/src/spaces.h +19 -70
  465. data/lib/libv8/v8/src/string-stream.cc +3 -1
  466. data/lib/libv8/v8/src/string.js +10 -6
  467. data/lib/libv8/v8/src/strtod.cc +7 -3
  468. data/lib/libv8/v8/src/stub-cache.cc +59 -129
  469. data/lib/libv8/v8/src/stub-cache.h +42 -54
  470. data/lib/libv8/v8/src/third_party/valgrind/valgrind.h +1447 -1339
  471. data/lib/libv8/v8/src/token.cc +4 -4
  472. data/lib/libv8/v8/src/token.h +6 -5
  473. data/lib/libv8/v8/src/type-info.cc +173 -129
  474. data/lib/libv8/v8/src/type-info.h +40 -22
  475. data/lib/libv8/v8/src/utils.cc +25 -304
  476. data/lib/libv8/v8/src/utils.h +118 -3
  477. data/lib/libv8/v8/src/v8-counters.h +3 -6
  478. data/lib/libv8/v8/src/v8.cc +34 -27
  479. data/lib/libv8/v8/src/v8.h +7 -7
  480. data/lib/libv8/v8/src/v8conversions.cc +129 -0
  481. data/lib/libv8/v8/src/v8conversions.h +60 -0
  482. data/lib/libv8/v8/src/v8globals.h +15 -6
  483. data/lib/libv8/v8/src/v8natives.js +300 -78
  484. data/lib/libv8/v8/src/v8threads.cc +14 -6
  485. data/lib/libv8/v8/src/v8threads.h +4 -1
  486. data/lib/libv8/v8/src/v8utils.cc +360 -0
  487. data/lib/libv8/v8/src/v8utils.h +17 -66
  488. data/lib/libv8/v8/src/variables.cc +7 -12
  489. data/lib/libv8/v8/src/variables.h +12 -10
  490. data/lib/libv8/v8/src/version.cc +2 -2
  491. data/lib/libv8/v8/src/vm-state-inl.h +0 -41
  492. data/lib/libv8/v8/src/vm-state.h +0 -11
  493. data/lib/libv8/v8/src/weakmap.js +103 -0
  494. data/lib/libv8/v8/src/x64/assembler-x64.h +6 -3
  495. data/lib/libv8/v8/src/x64/builtins-x64.cc +25 -22
  496. data/lib/libv8/v8/src/x64/code-stubs-x64.cc +523 -250
  497. data/lib/libv8/v8/src/x64/code-stubs-x64.h +8 -71
  498. data/lib/libv8/v8/src/x64/codegen-x64.cc +1 -0
  499. data/lib/libv8/v8/src/x64/codegen-x64.h +0 -2
  500. data/lib/libv8/v8/src/x64/cpu-x64.cc +2 -1
  501. data/lib/libv8/v8/src/x64/deoptimizer-x64.cc +40 -8
  502. data/lib/libv8/v8/src/x64/disasm-x64.cc +12 -10
  503. data/lib/libv8/v8/src/x64/frames-x64.h +7 -6
  504. data/lib/libv8/v8/src/x64/full-codegen-x64.cc +310 -415
  505. data/lib/libv8/v8/src/x64/ic-x64.cc +180 -117
  506. data/lib/libv8/v8/src/x64/lithium-codegen-x64.cc +411 -523
  507. data/lib/libv8/v8/src/x64/lithium-codegen-x64.h +11 -6
  508. data/lib/libv8/v8/src/x64/lithium-x64.cc +191 -216
  509. data/lib/libv8/v8/src/x64/lithium-x64.h +112 -263
  510. data/lib/libv8/v8/src/x64/macro-assembler-x64.cc +177 -61
  511. data/lib/libv8/v8/src/x64/macro-assembler-x64.h +23 -7
  512. data/lib/libv8/v8/src/x64/regexp-macro-assembler-x64.cc +21 -9
  513. data/lib/libv8/v8/src/x64/regexp-macro-assembler-x64.h +6 -0
  514. data/lib/libv8/v8/src/x64/stub-cache-x64.cc +273 -107
  515. data/lib/libv8/v8/src/zone.cc +31 -22
  516. data/lib/libv8/v8/src/zone.h +12 -6
  517. data/lib/libv8/v8/tools/codemap.js +8 -0
  518. data/lib/libv8/v8/tools/gcmole/Makefile +43 -0
  519. data/lib/libv8/v8/tools/gcmole/gcmole.lua +0 -2
  520. data/lib/libv8/v8/tools/gdb-v8-support.py +154 -0
  521. data/lib/libv8/v8/tools/grokdump.py +44 -35
  522. data/lib/libv8/v8/tools/gyp/v8.gyp +94 -248
  523. data/lib/libv8/v8/tools/js2c.py +83 -52
  524. data/lib/libv8/v8/tools/linux-tick-processor +4 -6
  525. data/lib/libv8/v8/tools/ll_prof.py +3 -3
  526. data/lib/libv8/v8/tools/oom_dump/README +3 -1
  527. data/lib/libv8/v8/tools/presubmit.py +11 -4
  528. data/lib/libv8/v8/tools/profile.js +46 -2
  529. data/lib/libv8/v8/tools/splaytree.js +11 -0
  530. data/lib/libv8/v8/tools/stats-viewer.py +15 -11
  531. data/lib/libv8/v8/tools/test-wrapper-gypbuild.py +227 -0
  532. data/lib/libv8/v8/tools/test.py +28 -8
  533. data/lib/libv8/v8/tools/tickprocessor.js +0 -16
  534. data/lib/libv8/version.rb +1 -1
  535. data/libv8.gemspec +2 -2
  536. metadata +31 -19
  537. data/lib/libv8/scons/engine/SCons/Tool/f03.py +0 -63
  538. data/lib/libv8/v8/src/json-parser.cc +0 -504
@@ -82,8 +82,7 @@ void Builtins::Generate_JSConstructCall(MacroAssembler* masm) {
82
82
 
83
83
  Label non_function_call;
84
84
  // Check that function is not a smi.
85
- __ test(edi, Immediate(kSmiTagMask));
86
- __ j(zero, &non_function_call);
85
+ __ JumpIfSmi(edi, &non_function_call);
87
86
  // Check that function is a JSFunction.
88
87
  __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
89
88
  __ j(not_equal, &non_function_call);
@@ -140,8 +139,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
140
139
  // edi: constructor
141
140
  __ mov(eax, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
142
141
  // Will both indicate a NULL and a Smi
143
- __ test(eax, Immediate(kSmiTagMask));
144
- __ j(zero, &rt_call);
142
+ __ JumpIfSmi(eax, &rt_call);
145
143
  // edi: constructor
146
144
  // eax: initial map (if proven valid below)
147
145
  __ CmpObjectType(eax, MAP_TYPE, ebx);
@@ -340,11 +338,12 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
340
338
  Handle<Code> code =
341
339
  masm->isolate()->builtins()->HandleApiCallConstruct();
342
340
  ParameterCount expected(0);
343
- __ InvokeCode(code, expected, expected,
344
- RelocInfo::CODE_TARGET, CALL_FUNCTION);
341
+ __ InvokeCode(code, expected, expected, RelocInfo::CODE_TARGET,
342
+ CALL_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
345
343
  } else {
346
344
  ParameterCount actual(eax);
347
- __ InvokeFunction(edi, actual, CALL_FUNCTION);
345
+ __ InvokeFunction(edi, actual, CALL_FUNCTION,
346
+ NullCallWrapper(), CALL_AS_METHOD);
348
347
  }
349
348
 
350
349
  // Restore context from the frame.
@@ -356,12 +355,11 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
356
355
  Label use_receiver, exit;
357
356
 
358
357
  // If the result is a smi, it is *not* an object in the ECMA sense.
359
- __ test(eax, Immediate(kSmiTagMask));
360
- __ j(zero, &use_receiver);
358
+ __ JumpIfSmi(eax, &use_receiver);
361
359
 
362
360
  // If the type of the result (stored in its map) is less than
363
- // FIRST_JS_OBJECT_TYPE, it is not an object in the ECMA sense.
364
- __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ecx);
361
+ // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
362
+ __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
365
363
  __ j(above_equal, &exit);
366
364
 
367
365
  // Throw away the result of the constructor invocation and use the
@@ -375,7 +373,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
375
373
  __ LeaveConstructFrame();
376
374
 
377
375
  // Remove caller arguments from the stack and return.
378
- ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
376
+ STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
379
377
  __ pop(ecx);
380
378
  __ lea(esp, Operand(esp, ebx, times_2, 1 * kPointerSize)); // 1 ~ receiver
381
379
  __ push(ecx);
@@ -443,7 +441,8 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
443
441
  RelocInfo::CODE_TARGET);
444
442
  } else {
445
443
  ParameterCount actual(eax);
446
- __ InvokeFunction(edi, actual, CALL_FUNCTION);
444
+ __ InvokeFunction(edi, actual, CALL_FUNCTION,
445
+ NullCallWrapper(), CALL_AS_METHOD);
447
446
  }
448
447
 
449
448
  // Exit the JS frame. Notice that this also removes the empty
@@ -594,8 +593,7 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
594
593
  Label non_function;
595
594
  // 1 ~ return address.
596
595
  __ mov(edi, Operand(esp, eax, times_4, 1 * kPointerSize));
597
- __ test(edi, Immediate(kSmiTagMask));
598
- __ j(zero, &non_function);
596
+ __ JumpIfSmi(edi, &non_function);
599
597
  __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
600
598
  __ j(not_equal, &non_function);
601
599
 
@@ -613,8 +611,8 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
613
611
  __ j(not_equal, &shift_arguments);
614
612
 
615
613
  // Do not transform the receiver for natives (shared already in ebx).
616
- __ test_b(FieldOperand(ebx, SharedFunctionInfo::kES5NativeByteOffset),
617
- 1 << SharedFunctionInfo::kES5NativeBitWithinByte);
614
+ __ test_b(FieldOperand(ebx, SharedFunctionInfo::kNativeByteOffset),
615
+ 1 << SharedFunctionInfo::kNativeBitWithinByte);
618
616
  __ j(not_equal, &shift_arguments);
619
617
 
620
618
  // Compute the receiver in non-strict mode.
@@ -622,15 +620,13 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
622
620
 
623
621
  // Call ToObject on the receiver if it is not an object, or use the
624
622
  // global object if it is null or undefined.
625
- __ test(ebx, Immediate(kSmiTagMask));
626
- __ j(zero, &convert_to_object);
623
+ __ JumpIfSmi(ebx, &convert_to_object);
627
624
  __ cmp(ebx, factory->null_value());
628
625
  __ j(equal, &use_global_receiver);
629
626
  __ cmp(ebx, factory->undefined_value());
630
627
  __ j(equal, &use_global_receiver);
631
- STATIC_ASSERT(LAST_JS_OBJECT_TYPE + 1 == LAST_TYPE);
632
- STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
633
- __ CmpObjectType(ebx, FIRST_JS_OBJECT_TYPE, ecx);
628
+ STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
629
+ __ CmpObjectType(ebx, FIRST_SPEC_OBJECT_TYPE, ecx);
634
630
  __ j(above_equal, &shift_arguments);
635
631
 
636
632
  __ bind(&convert_to_object);
@@ -715,7 +711,8 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
715
711
  masm->isolate()->builtins()->ArgumentsAdaptorTrampoline());
716
712
 
717
713
  ParameterCount expected(0);
718
- __ InvokeCode(Operand(edx), expected, expected, JUMP_FUNCTION);
714
+ __ InvokeCode(Operand(edx), expected, expected, JUMP_FUNCTION,
715
+ NullCallWrapper(), CALL_AS_METHOD);
719
716
  }
720
717
 
721
718
 
@@ -777,22 +774,20 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
777
774
  Factory* factory = masm->isolate()->factory();
778
775
 
779
776
  // Do not transform the receiver for natives (shared already in ecx).
780
- __ test_b(FieldOperand(ecx, SharedFunctionInfo::kES5NativeByteOffset),
781
- 1 << SharedFunctionInfo::kES5NativeBitWithinByte);
777
+ __ test_b(FieldOperand(ecx, SharedFunctionInfo::kNativeByteOffset),
778
+ 1 << SharedFunctionInfo::kNativeBitWithinByte);
782
779
  __ j(not_equal, &push_receiver);
783
780
 
784
781
  // Compute the receiver in non-strict mode.
785
782
  // Call ToObject on the receiver if it is not an object, or use the
786
783
  // global object if it is null or undefined.
787
- __ test(ebx, Immediate(kSmiTagMask));
788
- __ j(zero, &call_to_object);
784
+ __ JumpIfSmi(ebx, &call_to_object);
789
785
  __ cmp(ebx, factory->null_value());
790
786
  __ j(equal, &use_global_receiver);
791
787
  __ cmp(ebx, factory->undefined_value());
792
788
  __ j(equal, &use_global_receiver);
793
- STATIC_ASSERT(LAST_JS_OBJECT_TYPE + 1 == LAST_TYPE);
794
- STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
795
- __ CmpObjectType(ebx, FIRST_JS_OBJECT_TYPE, ecx);
789
+ STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
790
+ __ CmpObjectType(ebx, FIRST_SPEC_OBJECT_TYPE, ecx);
796
791
  __ j(above_equal, &push_receiver);
797
792
 
798
793
  __ bind(&call_to_object);
@@ -845,7 +840,8 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
845
840
  ParameterCount actual(eax);
846
841
  __ SmiUntag(eax);
847
842
  __ mov(edi, Operand(ebp, 4 * kPointerSize));
848
- __ InvokeFunction(edi, actual, CALL_FUNCTION);
843
+ __ InvokeFunction(edi, actual, CALL_FUNCTION,
844
+ NullCallWrapper(), CALL_AS_METHOD);
849
845
 
850
846
  __ LeaveInternalFrame();
851
847
  __ ret(3 * kPointerSize); // remove this, receiver, and arguments
@@ -927,7 +923,7 @@ static void AllocateEmptyJSArray(MacroAssembler* masm,
927
923
  // Fill the FixedArray with the hole value. Inline the code if short.
928
924
  // Reconsider loop unfolding if kPreallocatedArrayElements gets changed.
929
925
  static const int kLoopUnfoldLimit = 4;
930
- ASSERT(kPreallocatedArrayElements <= kLoopUnfoldLimit);
926
+ STATIC_ASSERT(kPreallocatedArrayElements <= kLoopUnfoldLimit);
931
927
  if (initial_capacity <= kLoopUnfoldLimit) {
932
928
  // Use a scratch register here to have only one reloc info when unfolding
933
929
  // the loop.
@@ -979,7 +975,7 @@ static void AllocateJSArray(MacroAssembler* masm,
979
975
 
980
976
  // Allocate the JSArray object together with space for a FixedArray with the
981
977
  // requested elements.
982
- ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
978
+ STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
983
979
  __ AllocateInNewSpace(JSArray::kSize + FixedArray::kHeaderSize,
984
980
  times_half_pointer_size, // array_size is a smi.
985
981
  array_size,
@@ -1104,7 +1100,7 @@ static void ArrayNativeCode(MacroAssembler* masm,
1104
1100
  __ bind(&argc_one_or_more);
1105
1101
  __ cmp(eax, 1);
1106
1102
  __ j(not_equal, &argc_two_or_more);
1107
- ASSERT(kSmiTag == 0);
1103
+ STATIC_ASSERT(kSmiTag == 0);
1108
1104
  __ mov(ecx, Operand(esp, (push_count + 1) * kPointerSize));
1109
1105
  __ test(ecx, Operand(ecx));
1110
1106
  __ j(not_zero, &not_empty_array);
@@ -1159,7 +1155,7 @@ static void ArrayNativeCode(MacroAssembler* masm,
1159
1155
 
1160
1156
  // Handle construction of an array from a list of arguments.
1161
1157
  __ bind(&argc_two_or_more);
1162
- ASSERT(kSmiTag == 0);
1158
+ STATIC_ASSERT(kSmiTag == 0);
1163
1159
  __ SmiTag(eax); // Convet argc to a smi.
1164
1160
  // eax: array_size (smi)
1165
1161
  // edi: constructor
@@ -1388,8 +1384,7 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
1388
1384
  Label convert_argument;
1389
1385
  __ bind(&not_cached);
1390
1386
  STATIC_ASSERT(kSmiTag == 0);
1391
- __ test(eax, Immediate(kSmiTagMask));
1392
- __ j(zero, &convert_argument);
1387
+ __ JumpIfSmi(eax, &convert_argument);
1393
1388
  Condition is_string = masm->IsObjectStringType(eax, ebx, ecx);
1394
1389
  __ j(NegateCondition(is_string), &convert_argument);
1395
1390
  __ mov(ebx, eax);
@@ -1442,7 +1437,7 @@ static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1442
1437
  // Preserve the number of arguments on the stack. Must preserve eax,
1443
1438
  // ebx and ecx because these registers are used when copying the
1444
1439
  // arguments and the receiver.
1445
- ASSERT(kSmiTagSize == 1);
1440
+ STATIC_ASSERT(kSmiTagSize == 1);
1446
1441
  __ lea(edi, Operand(eax, eax, times_1, kSmiTag));
1447
1442
  __ push(edi);
1448
1443
  }
@@ -1456,7 +1451,7 @@ static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1456
1451
  __ leave();
1457
1452
 
1458
1453
  // Remove caller arguments from the stack.
1459
- ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
1454
+ STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
1460
1455
  __ pop(ecx);
1461
1456
  __ lea(esp, Operand(esp, ebx, times_2, 1 * kPointerSize)); // 1 ~ receiver
1462
1457
  __ push(ecx);
@@ -43,8 +43,7 @@ namespace internal {
43
43
  void ToNumberStub::Generate(MacroAssembler* masm) {
44
44
  // The ToNumber stub takes one argument in eax.
45
45
  Label check_heap_number, call_builtin;
46
- __ test(eax, Immediate(kSmiTagMask));
47
- __ j(not_zero, &check_heap_number, Label::kNear);
46
+ __ JumpIfNotSmi(eax, &check_heap_number, Label::kNear);
48
47
  __ ret(0);
49
48
 
50
49
  __ bind(&check_heap_number);
@@ -129,22 +128,19 @@ void FastNewContextStub::Generate(MacroAssembler* masm) {
129
128
 
130
129
  // Setup the object header.
131
130
  Factory* factory = masm->isolate()->factory();
132
- __ mov(FieldOperand(eax, HeapObject::kMapOffset), factory->context_map());
131
+ __ mov(FieldOperand(eax, HeapObject::kMapOffset),
132
+ factory->function_context_map());
133
133
  __ mov(FieldOperand(eax, Context::kLengthOffset),
134
134
  Immediate(Smi::FromInt(length)));
135
135
 
136
136
  // Setup the fixed slots.
137
137
  __ Set(ebx, Immediate(0)); // Set to NULL.
138
138
  __ mov(Operand(eax, Context::SlotOffset(Context::CLOSURE_INDEX)), ecx);
139
- __ mov(Operand(eax, Context::SlotOffset(Context::FCONTEXT_INDEX)), eax);
140
- __ mov(Operand(eax, Context::SlotOffset(Context::PREVIOUS_INDEX)), ebx);
139
+ __ mov(Operand(eax, Context::SlotOffset(Context::PREVIOUS_INDEX)), esi);
141
140
  __ mov(Operand(eax, Context::SlotOffset(Context::EXTENSION_INDEX)), ebx);
142
141
 
143
- // Copy the global object from the surrounding context. We go through the
144
- // context in the function (ecx) to match the allocation behavior we have
145
- // in the runtime system (see Heap::AllocateFunctionContext).
146
- __ mov(ebx, FieldOperand(ecx, JSFunction::kContextOffset));
147
- __ mov(ebx, Operand(ebx, Context::SlotOffset(Context::GLOBAL_INDEX)));
142
+ // Copy the global object from the previous context.
143
+ __ mov(ebx, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
148
144
  __ mov(Operand(eax, Context::SlotOffset(Context::GLOBAL_INDEX)), ebx);
149
145
 
150
146
  // Initialize the rest of the slots to undefined.
@@ -159,7 +155,7 @@ void FastNewContextStub::Generate(MacroAssembler* masm) {
159
155
 
160
156
  // Need to collect. Call into runtime system.
161
157
  __ bind(&gc);
162
- __ TailCallRuntime(Runtime::kNewContext, 1, 1);
158
+ __ TailCallRuntime(Runtime::kNewFunctionContext, 1, 1);
163
159
  }
164
160
 
165
161
 
@@ -240,76 +236,146 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
240
236
  }
241
237
 
242
238
 
243
- // NOTE: The stub does not handle the inlined cases (Smis, Booleans, undefined).
239
+ // The stub expects its argument on the stack and returns its result in tos_:
240
+ // zero for false, and a non-zero value for true.
244
241
  void ToBooleanStub::Generate(MacroAssembler* masm) {
245
- Label false_result, true_result, not_string;
246
- __ mov(eax, Operand(esp, 1 * kPointerSize));
242
+ Label patch;
247
243
  Factory* factory = masm->isolate()->factory();
244
+ const Register argument = eax;
245
+ const Register map = edx;
246
+
247
+ if (!types_.IsEmpty()) {
248
+ __ mov(argument, Operand(esp, 1 * kPointerSize));
249
+ }
248
250
 
249
251
  // undefined -> false
250
- __ cmp(eax, factory->undefined_value());
251
- __ j(equal, &false_result);
252
+ CheckOddball(masm, UNDEFINED, Heap::kUndefinedValueRootIndex, false);
252
253
 
253
254
  // Boolean -> its value
254
- __ cmp(eax, factory->true_value());
255
- __ j(equal, &true_result);
256
- __ cmp(eax, factory->false_value());
257
- __ j(equal, &false_result);
255
+ CheckOddball(masm, BOOLEAN, Heap::kFalseValueRootIndex, false);
256
+ CheckOddball(masm, BOOLEAN, Heap::kTrueValueRootIndex, true);
258
257
 
259
- // Smis: 0 -> false, all other -> true
260
- __ test(eax, Operand(eax));
261
- __ j(zero, &false_result);
262
- __ test(eax, Immediate(kSmiTagMask));
263
- __ j(zero, &true_result);
258
+ // 'null' -> false.
259
+ CheckOddball(masm, NULL_TYPE, Heap::kNullValueRootIndex, false);
264
260
 
265
- // 'null' => false.
266
- __ cmp(eax, factory->null_value());
267
- __ j(equal, &false_result, Label::kNear);
261
+ if (types_.Contains(SMI)) {
262
+ // Smis: 0 -> false, all other -> true
263
+ Label not_smi;
264
+ __ JumpIfNotSmi(argument, &not_smi, Label::kNear);
265
+ // argument contains the correct return value already.
266
+ if (!tos_.is(argument)) {
267
+ __ mov(tos_, argument);
268
+ }
269
+ __ ret(1 * kPointerSize);
270
+ __ bind(&not_smi);
271
+ } else if (types_.NeedsMap()) {
272
+ // If we need a map later and have a Smi -> patch.
273
+ __ JumpIfSmi(argument, &patch, Label::kNear);
274
+ }
268
275
 
269
- // Get the map and type of the heap object.
270
- __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
271
- __ movzx_b(ecx, FieldOperand(edx, Map::kInstanceTypeOffset));
276
+ if (types_.NeedsMap()) {
277
+ __ mov(map, FieldOperand(argument, HeapObject::kMapOffset));
278
+
279
+ if (types_.CanBeUndetectable()) {
280
+ __ test_b(FieldOperand(map, Map::kBitFieldOffset),
281
+ 1 << Map::kIsUndetectable);
282
+ // Undetectable -> false.
283
+ Label not_undetectable;
284
+ __ j(zero, &not_undetectable, Label::kNear);
285
+ __ Set(tos_, Immediate(0));
286
+ __ ret(1 * kPointerSize);
287
+ __ bind(&not_undetectable);
288
+ }
289
+ }
272
290
 
273
- // Undetectable => false.
274
- __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
275
- 1 << Map::kIsUndetectable);
276
- __ j(not_zero, &false_result, Label::kNear);
291
+ if (types_.Contains(SPEC_OBJECT)) {
292
+ // spec object -> true.
293
+ Label not_js_object;
294
+ __ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
295
+ __ j(below, &not_js_object, Label::kNear);
296
+ // argument contains the correct return value already.
297
+ if (!tos_.is(argument)) {
298
+ __ Set(tos_, Immediate(1));
299
+ }
300
+ __ ret(1 * kPointerSize);
301
+ __ bind(&not_js_object);
302
+ }
277
303
 
278
- // JavaScript object => true.
279
- __ CmpInstanceType(edx, FIRST_JS_OBJECT_TYPE);
280
- __ j(above_equal, &true_result, Label::kNear);
304
+ if (types_.Contains(STRING)) {
305
+ // String value -> false iff empty.
306
+ Label not_string;
307
+ __ CmpInstanceType(map, FIRST_NONSTRING_TYPE);
308
+ __ j(above_equal, &not_string, Label::kNear);
309
+ __ mov(tos_, FieldOperand(argument, String::kLengthOffset));
310
+ __ ret(1 * kPointerSize); // the string length is OK as the return value
311
+ __ bind(&not_string);
312
+ }
281
313
 
282
- // String value => false iff empty.
283
- __ CmpInstanceType(edx, FIRST_NONSTRING_TYPE);
284
- __ j(above_equal, &not_string, Label::kNear);
285
- STATIC_ASSERT(kSmiTag == 0);
286
- __ cmp(FieldOperand(eax, String::kLengthOffset), Immediate(0));
287
- __ j(zero, &false_result, Label::kNear);
288
- __ jmp(&true_result, Label::kNear);
314
+ if (types_.Contains(HEAP_NUMBER)) {
315
+ // heap number -> false iff +0, -0, or NaN.
316
+ Label not_heap_number, false_result;
317
+ __ cmp(map, factory->heap_number_map());
318
+ __ j(not_equal, &not_heap_number, Label::kNear);
319
+ __ fldz();
320
+ __ fld_d(FieldOperand(argument, HeapNumber::kValueOffset));
321
+ __ FCmp();
322
+ __ j(zero, &false_result, Label::kNear);
323
+ // argument contains the correct return value already.
324
+ if (!tos_.is(argument)) {
325
+ __ Set(tos_, Immediate(1));
326
+ }
327
+ __ ret(1 * kPointerSize);
328
+ __ bind(&false_result);
329
+ __ Set(tos_, Immediate(0));
330
+ __ ret(1 * kPointerSize);
331
+ __ bind(&not_heap_number);
332
+ }
289
333
 
290
- __ bind(&not_string);
291
- // HeapNumber => false iff +0, -0, or NaN.
292
- __ cmp(edx, factory->heap_number_map());
293
- __ j(not_equal, &true_result, Label::kNear);
294
- __ fldz();
295
- __ fld_d(FieldOperand(eax, HeapNumber::kValueOffset));
296
- __ FCmp();
297
- __ j(zero, &false_result, Label::kNear);
298
- // Fall through to |true_result|.
299
-
300
- // Return 1/0 for true/false in eax.
301
- __ bind(&true_result);
302
- __ mov(eax, 1);
303
- __ ret(1 * kPointerSize);
304
- __ bind(&false_result);
305
- __ mov(eax, 0);
306
- __ ret(1 * kPointerSize);
334
+ __ bind(&patch);
335
+ GenerateTypeTransition(masm);
336
+ }
337
+
338
+
339
+ void ToBooleanStub::CheckOddball(MacroAssembler* masm,
340
+ Type type,
341
+ Heap::RootListIndex value,
342
+ bool result) {
343
+ const Register argument = eax;
344
+ if (types_.Contains(type)) {
345
+ // If we see an expected oddball, return its ToBoolean value tos_.
346
+ Label different_value;
347
+ __ CompareRoot(argument, value);
348
+ __ j(not_equal, &different_value, Label::kNear);
349
+ if (!result) {
350
+ // If we have to return zero, there is no way around clearing tos_.
351
+ __ Set(tos_, Immediate(0));
352
+ } else if (!tos_.is(argument)) {
353
+ // If we have to return non-zero, we can re-use the argument if it is the
354
+ // same register as the result, because we never see Smi-zero here.
355
+ __ Set(tos_, Immediate(1));
356
+ }
357
+ __ ret(1 * kPointerSize);
358
+ __ bind(&different_value);
359
+ }
360
+ }
361
+
362
+
363
+ void ToBooleanStub::GenerateTypeTransition(MacroAssembler* masm) {
364
+ __ pop(ecx); // Get return address, operand is now on top of stack.
365
+ __ push(Immediate(Smi::FromInt(tos_.code())));
366
+ __ push(Immediate(Smi::FromInt(types_.ToByte())));
367
+ __ push(ecx); // Push return address.
368
+ // Patch the caller to an appropriate specialized stub and return the
369
+ // operation result to the caller of the stub.
370
+ __ TailCallExternalReference(
371
+ ExternalReference(IC_Utility(IC::kToBoolean_Patch), masm->isolate()),
372
+ 3,
373
+ 1);
307
374
  }
308
375
 
309
376
 
310
377
  class FloatingPointHelper : public AllStatic {
311
378
  public:
312
-
313
379
  enum ArgLocation {
314
380
  ARGS_ON_STACK,
315
381
  ARGS_IN_REGISTERS
@@ -427,10 +493,10 @@ static void IntegerConvert(MacroAssembler* masm,
427
493
  __ cmp(Operand(scratch2), Immediate(non_smi_exponent));
428
494
  // If we have a match of the int32-but-not-Smi exponent then skip some
429
495
  // logic.
430
- __ j(equal, &right_exponent);
496
+ __ j(equal, &right_exponent, Label::kNear);
431
497
  // If the exponent is higher than that then go to slow case. This catches
432
498
  // numbers that don't fit in a signed int32, infinities and NaNs.
433
- __ j(less, &normal_exponent);
499
+ __ j(less, &normal_exponent, Label::kNear);
434
500
 
435
501
  {
436
502
  // Handle a big exponent. The only reason we have this code is that the
@@ -459,9 +525,9 @@ static void IntegerConvert(MacroAssembler* masm,
459
525
  __ or_(ecx, Operand(scratch2));
460
526
  // We have the answer in ecx, but we may need to negate it.
461
527
  __ test(scratch, Operand(scratch));
462
- __ j(positive, &done);
528
+ __ j(positive, &done, Label::kNear);
463
529
  __ neg(ecx);
464
- __ jmp(&done);
530
+ __ jmp(&done, Label::kNear);
465
531
  }
466
532
 
467
533
  __ bind(&normal_exponent);
@@ -474,7 +540,7 @@ static void IntegerConvert(MacroAssembler* masm,
474
540
  (HeapNumber::kExponentBias + 0) << HeapNumber::kExponentShift;
475
541
  __ sub(Operand(scratch2), Immediate(zero_exponent));
476
542
  // ecx already has a Smi zero.
477
- __ j(less, &done);
543
+ __ j(less, &done, Label::kNear);
478
544
 
479
545
  // We have a shifted exponent between 0 and 30 in scratch2.
480
546
  __ shr(scratch2, HeapNumber::kExponentShift);
@@ -517,31 +583,17 @@ static void IntegerConvert(MacroAssembler* masm,
517
583
  }
518
584
 
519
585
 
520
- Handle<Code> GetUnaryOpStub(int key, UnaryOpIC::TypeInfo type_info) {
521
- UnaryOpStub stub(key, type_info);
522
- return stub.GetCode();
523
- }
524
-
525
-
526
- const char* UnaryOpStub::GetName() {
527
- if (name_ != NULL) return name_;
528
- const int kMaxNameLength = 100;
529
- name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray(
530
- kMaxNameLength);
531
- if (name_ == NULL) return "OOM";
586
+ void UnaryOpStub::PrintName(StringStream* stream) {
532
587
  const char* op_name = Token::Name(op_);
533
588
  const char* overwrite_name = NULL; // Make g++ happy.
534
589
  switch (mode_) {
535
590
  case UNARY_NO_OVERWRITE: overwrite_name = "Alloc"; break;
536
591
  case UNARY_OVERWRITE: overwrite_name = "Overwrite"; break;
537
592
  }
538
-
539
- OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
540
- "UnaryOpStub_%s_%s_%s",
541
- op_name,
542
- overwrite_name,
543
- UnaryOpIC::GetName(operand_type_));
544
- return name_;
593
+ stream->Add("UnaryOpStub_%s_%s_%s",
594
+ op_name,
595
+ overwrite_name,
596
+ UnaryOpIC::GetName(operand_type_));
545
597
  }
546
598
 
547
599
 
@@ -566,12 +618,10 @@ void UnaryOpStub::Generate(MacroAssembler* masm) {
566
618
 
567
619
  void UnaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
568
620
  __ pop(ecx); // Save return address.
569
- __ push(eax);
570
- // the argument is now on top.
571
- // Push this stub's key. Although the operation and the type info are
572
- // encoded into the key, the encoding is opaque, so push them too.
573
- __ push(Immediate(Smi::FromInt(MinorKey())));
621
+
622
+ __ push(eax); // the operand
574
623
  __ push(Immediate(Smi::FromInt(op_)));
624
+ __ push(Immediate(Smi::FromInt(mode_)));
575
625
  __ push(Immediate(Smi::FromInt(operand_type_)));
576
626
 
577
627
  __ push(ecx); // Push return address.
@@ -579,8 +629,7 @@ void UnaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
579
629
  // Patch the caller to an appropriate specialized stub and return the
580
630
  // operation result to the caller of the stub.
581
631
  __ TailCallExternalReference(
582
- ExternalReference(IC_Utility(IC::kUnaryOp_Patch),
583
- masm->isolate()), 4, 1);
632
+ ExternalReference(IC_Utility(IC::kUnaryOp_Patch), masm->isolate()), 4, 1);
584
633
  }
585
634
 
586
635
 
@@ -627,8 +676,7 @@ void UnaryOpStub::GenerateSmiCodeSub(MacroAssembler* masm,
627
676
  Label::Distance undo_near,
628
677
  Label::Distance slow_near) {
629
678
  // Check whether the value is a smi.
630
- __ test(eax, Immediate(kSmiTagMask));
631
- __ j(not_zero, non_smi, non_smi_near);
679
+ __ JumpIfNotSmi(eax, non_smi, non_smi_near);
632
680
 
633
681
  // We can't handle -0 with smis, so use a type transition for that case.
634
682
  __ test(eax, Operand(eax));
@@ -648,8 +696,7 @@ void UnaryOpStub::GenerateSmiCodeBitNot(
648
696
  Label* non_smi,
649
697
  Label::Distance non_smi_near) {
650
698
  // Check whether the value is a smi.
651
- __ test(eax, Immediate(kSmiTagMask));
652
- __ j(not_zero, non_smi, non_smi_near);
699
+ __ JumpIfNotSmi(eax, non_smi, non_smi_near);
653
700
 
654
701
  // Flip bits and revert inverted smi-tag.
655
702
  __ not_(eax);
@@ -718,7 +765,7 @@ void UnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm,
718
765
 
719
766
  Label slow_allocate_heapnumber, heapnumber_allocated;
720
767
  __ AllocateHeapNumber(eax, ebx, ecx, &slow_allocate_heapnumber);
721
- __ jmp(&heapnumber_allocated);
768
+ __ jmp(&heapnumber_allocated, Label::kNear);
722
769
 
723
770
  __ bind(&slow_allocate_heapnumber);
724
771
  __ EnterInternalFrame();
@@ -853,14 +900,6 @@ void UnaryOpStub::GenerateGenericCodeFallback(MacroAssembler* masm) {
853
900
  }
854
901
 
855
902
 
856
- Handle<Code> GetBinaryOpStub(int key,
857
- BinaryOpIC::TypeInfo type_info,
858
- BinaryOpIC::TypeInfo result_type_info) {
859
- BinaryOpStub stub(key, type_info, result_type_info);
860
- return stub.GetCode();
861
- }
862
-
863
-
864
903
  void BinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
865
904
  __ pop(ecx); // Save return address.
866
905
  __ push(edx);
@@ -939,12 +978,7 @@ void BinaryOpStub::Generate(MacroAssembler* masm) {
939
978
  }
940
979
 
941
980
 
942
- const char* BinaryOpStub::GetName() {
943
- if (name_ != NULL) return name_;
944
- const int kMaxNameLength = 100;
945
- name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray(
946
- kMaxNameLength);
947
- if (name_ == NULL) return "OOM";
981
+ void BinaryOpStub::PrintName(StringStream* stream) {
948
982
  const char* op_name = Token::Name(op_);
949
983
  const char* overwrite_name;
950
984
  switch (mode_) {
@@ -953,13 +987,10 @@ const char* BinaryOpStub::GetName() {
953
987
  case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break;
954
988
  default: overwrite_name = "UnknownOverwrite"; break;
955
989
  }
956
-
957
- OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
958
- "BinaryOpStub_%s_%s_%s",
959
- op_name,
960
- overwrite_name,
961
- BinaryOpIC::GetName(operands_type_));
962
- return name_;
990
+ stream->Add("BinaryOpStub_%s_%s_%s",
991
+ op_name,
992
+ overwrite_name,
993
+ BinaryOpIC::GetName(operands_type_));
963
994
  }
964
995
 
965
996
 
@@ -1023,8 +1054,7 @@ void BinaryOpStub::GenerateSmiCode(
1023
1054
 
1024
1055
  // 3. Perform the smi check of the operands.
1025
1056
  STATIC_ASSERT(kSmiTag == 0); // Adjust zero check if not the case.
1026
- __ test(combined, Immediate(kSmiTagMask));
1027
- __ j(not_zero, &not_smis);
1057
+ __ JumpIfNotSmi(combined, &not_smis);
1028
1058
 
1029
1059
  // 4. Operands are both smis, perform the operation leaving the result in
1030
1060
  // eax and check the result if necessary.
@@ -1412,16 +1442,14 @@ void BinaryOpStub::GenerateBothStringStub(MacroAssembler* masm) {
1412
1442
  Register right = eax;
1413
1443
 
1414
1444
  // Test if left operand is a string.
1415
- __ test(left, Immediate(kSmiTagMask));
1416
- __ j(zero, &call_runtime);
1445
+ __ JumpIfSmi(left, &call_runtime, Label::kNear);
1417
1446
  __ CmpObjectType(left, FIRST_NONSTRING_TYPE, ecx);
1418
- __ j(above_equal, &call_runtime);
1447
+ __ j(above_equal, &call_runtime, Label::kNear);
1419
1448
 
1420
1449
  // Test if right operand is a string.
1421
- __ test(right, Immediate(kSmiTagMask));
1422
- __ j(zero, &call_runtime);
1450
+ __ JumpIfSmi(right, &call_runtime, Label::kNear);
1423
1451
  __ CmpObjectType(right, FIRST_NONSTRING_TYPE, ecx);
1424
- __ j(above_equal, &call_runtime);
1452
+ __ j(above_equal, &call_runtime, Label::kNear);
1425
1453
 
1426
1454
  StringAddStub string_add_stub(NO_STRING_CHECK_IN_STUB);
1427
1455
  GenerateRegisterArgsPush(masm);
@@ -1535,7 +1563,7 @@ void BinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
1535
1563
  } else {
1536
1564
  // Check if result fits in a smi.
1537
1565
  __ cmp(eax, 0xc0000000);
1538
- __ j(negative, &non_smi_result);
1566
+ __ j(negative, &non_smi_result, Label::kNear);
1539
1567
  }
1540
1568
  // Tag smi result and return.
1541
1569
  __ SmiTag(eax);
@@ -1555,8 +1583,7 @@ void BinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
1555
1583
  // allocation of a heap number.
1556
1584
  __ mov(eax, Operand(esp, mode_ == OVERWRITE_RIGHT ?
1557
1585
  1 * kPointerSize : 2 * kPointerSize));
1558
- __ test(eax, Immediate(kSmiTagMask));
1559
- __ j(not_zero, &skip_allocation, Label::kNear);
1586
+ __ JumpIfNotSmi(eax, &skip_allocation, Label::kNear);
1560
1587
  // Fall through!
1561
1588
  case NO_OVERWRITE:
1562
1589
  __ AllocateHeapNumber(eax, ecx, edx, &call_runtime);
@@ -1750,7 +1777,7 @@ void BinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) {
1750
1777
  } else {
1751
1778
  // Check if result fits in a smi.
1752
1779
  __ cmp(eax, 0xc0000000);
1753
- __ j(negative, &non_smi_result);
1780
+ __ j(negative, &non_smi_result, Label::kNear);
1754
1781
  }
1755
1782
  // Tag smi result and return.
1756
1783
  __ SmiTag(eax);
@@ -1770,8 +1797,7 @@ void BinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) {
1770
1797
  // allocation of a heap number.
1771
1798
  __ mov(eax, Operand(esp, mode_ == OVERWRITE_RIGHT ?
1772
1799
  1 * kPointerSize : 2 * kPointerSize));
1773
- __ test(eax, Immediate(kSmiTagMask));
1774
- __ j(not_zero, &skip_allocation, Label::kNear);
1800
+ __ JumpIfNotSmi(eax, &skip_allocation, Label::kNear);
1775
1801
  // Fall through!
1776
1802
  case NO_OVERWRITE:
1777
1803
  __ AllocateHeapNumber(eax, ecx, edx, &call_runtime);
@@ -1950,7 +1976,7 @@ void BinaryOpStub::GenerateGeneric(MacroAssembler* masm) {
1950
1976
  } else {
1951
1977
  // Check if result fits in a smi.
1952
1978
  __ cmp(eax, 0xc0000000);
1953
- __ j(negative, &non_smi_result);
1979
+ __ j(negative, &non_smi_result, Label::kNear);
1954
1980
  }
1955
1981
  // Tag smi result and return.
1956
1982
  __ SmiTag(eax);
@@ -1970,8 +1996,7 @@ void BinaryOpStub::GenerateGeneric(MacroAssembler* masm) {
1970
1996
  // allocation of a heap number.
1971
1997
  __ mov(eax, Operand(esp, mode_ == OVERWRITE_RIGHT ?
1972
1998
  1 * kPointerSize : 2 * kPointerSize));
1973
- __ test(eax, Immediate(kSmiTagMask));
1974
- __ j(not_zero, &skip_allocation, Label::kNear);
1999
+ __ JumpIfNotSmi(eax, &skip_allocation, Label::kNear);
1975
2000
  // Fall through!
1976
2001
  case NO_OVERWRITE:
1977
2002
  __ AllocateHeapNumber(eax, ecx, edx, &call_runtime);
@@ -2054,8 +2079,7 @@ void BinaryOpStub::GenerateAddStrings(MacroAssembler* masm) {
2054
2079
  Register right = eax;
2055
2080
 
2056
2081
  // Test if left operand is a string.
2057
- __ test(left, Immediate(kSmiTagMask));
2058
- __ j(zero, &left_not_string, Label::kNear);
2082
+ __ JumpIfSmi(left, &left_not_string, Label::kNear);
2059
2083
  __ CmpObjectType(left, FIRST_NONSTRING_TYPE, ecx);
2060
2084
  __ j(above_equal, &left_not_string, Label::kNear);
2061
2085
 
@@ -2065,8 +2089,7 @@ void BinaryOpStub::GenerateAddStrings(MacroAssembler* masm) {
2065
2089
 
2066
2090
  // Left operand is not a string, test right.
2067
2091
  __ bind(&left_not_string);
2068
- __ test(right, Immediate(kSmiTagMask));
2069
- __ j(zero, &call_runtime, Label::kNear);
2092
+ __ JumpIfSmi(right, &call_runtime, Label::kNear);
2070
2093
  __ CmpObjectType(right, FIRST_NONSTRING_TYPE, ecx);
2071
2094
  __ j(above_equal, &call_runtime, Label::kNear);
2072
2095
 
@@ -2088,8 +2111,7 @@ void BinaryOpStub::GenerateHeapResultAllocation(
2088
2111
  case OVERWRITE_LEFT: {
2089
2112
  // If the argument in edx is already an object, we skip the
2090
2113
  // allocation of a heap number.
2091
- __ test(edx, Immediate(kSmiTagMask));
2092
- __ j(not_zero, &skip_allocation);
2114
+ __ JumpIfNotSmi(edx, &skip_allocation, Label::kNear);
2093
2115
  // Allocate a heap number for the result. Keep eax and edx intact
2094
2116
  // for the possible runtime call.
2095
2117
  __ AllocateHeapNumber(ebx, ecx, no_reg, alloc_failure);
@@ -2104,8 +2126,7 @@ void BinaryOpStub::GenerateHeapResultAllocation(
2104
2126
  case OVERWRITE_RIGHT:
2105
2127
  // If the argument in eax is already an object, we skip the
2106
2128
  // allocation of a heap number.
2107
- __ test(eax, Immediate(kSmiTagMask));
2108
- __ j(not_zero, &skip_allocation);
2129
+ __ JumpIfNotSmi(eax, &skip_allocation, Label::kNear);
2109
2130
  // Fall through!
2110
2131
  case NO_OVERWRITE:
2111
2132
  // Allocate a heap number for the result. Keep eax and edx intact
@@ -2152,8 +2173,7 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
2152
2173
  Label input_not_smi;
2153
2174
  Label loaded;
2154
2175
  __ mov(eax, Operand(esp, kPointerSize));
2155
- __ test(eax, Immediate(kSmiTagMask));
2156
- __ j(not_zero, &input_not_smi, Label::kNear);
2176
+ __ JumpIfNotSmi(eax, &input_not_smi, Label::kNear);
2157
2177
  // Input is a smi. Untag and load it onto the FPU stack.
2158
2178
  // Then load the low and high words of the double into ebx, edx.
2159
2179
  STATIC_ASSERT(kSmiTagSize == 1);
@@ -2431,8 +2451,7 @@ void FloatingPointHelper::LoadUnknownsAsIntegers(MacroAssembler* masm,
2431
2451
  Label load_arg2, done;
2432
2452
 
2433
2453
  // Test if arg1 is a Smi.
2434
- __ test(edx, Immediate(kSmiTagMask));
2435
- __ j(not_zero, &arg1_is_object);
2454
+ __ JumpIfNotSmi(edx, &arg1_is_object, Label::kNear);
2436
2455
 
2437
2456
  __ SmiUntag(edx);
2438
2457
  __ jmp(&load_arg2);
@@ -2458,8 +2477,7 @@ void FloatingPointHelper::LoadUnknownsAsIntegers(MacroAssembler* masm,
2458
2477
  __ bind(&load_arg2);
2459
2478
 
2460
2479
  // Test if arg2 is a Smi.
2461
- __ test(eax, Immediate(kSmiTagMask));
2462
- __ j(not_zero, &arg2_is_object);
2480
+ __ JumpIfNotSmi(eax, &arg2_is_object, Label::kNear);
2463
2481
 
2464
2482
  __ SmiUntag(eax);
2465
2483
  __ mov(ecx, eax);
@@ -2495,8 +2513,7 @@ void FloatingPointHelper::LoadFloatOperand(MacroAssembler* masm,
2495
2513
  Register number) {
2496
2514
  Label load_smi, done;
2497
2515
 
2498
- __ test(number, Immediate(kSmiTagMask));
2499
- __ j(zero, &load_smi, Label::kNear);
2516
+ __ JumpIfSmi(number, &load_smi, Label::kNear);
2500
2517
  __ fld_d(FieldOperand(number, HeapNumber::kValueOffset));
2501
2518
  __ jmp(&done, Label::kNear);
2502
2519
 
@@ -2513,16 +2530,12 @@ void FloatingPointHelper::LoadFloatOperand(MacroAssembler* masm,
2513
2530
  void FloatingPointHelper::LoadSSE2Operands(MacroAssembler* masm) {
2514
2531
  Label load_smi_edx, load_eax, load_smi_eax, done;
2515
2532
  // Load operand in edx into xmm0.
2516
- __ test(edx, Immediate(kSmiTagMask));
2517
- // Argument in edx is a smi.
2518
- __ j(zero, &load_smi_edx, Label::kNear);
2533
+ __ JumpIfSmi(edx, &load_smi_edx, Label::kNear);
2519
2534
  __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
2520
2535
 
2521
2536
  __ bind(&load_eax);
2522
2537
  // Load operand in eax into xmm1.
2523
- __ test(eax, Immediate(kSmiTagMask));
2524
- // Argument in eax is a smi.
2525
- __ j(zero, &load_smi_eax, Label::kNear);
2538
+ __ JumpIfSmi(eax, &load_smi_eax, Label::kNear);
2526
2539
  __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
2527
2540
  __ jmp(&done, Label::kNear);
2528
2541
 
@@ -2545,18 +2558,14 @@ void FloatingPointHelper::LoadSSE2Operands(MacroAssembler* masm,
2545
2558
  Label* not_numbers) {
2546
2559
  Label load_smi_edx, load_eax, load_smi_eax, load_float_eax, done;
2547
2560
  // Load operand in edx into xmm0, or branch to not_numbers.
2548
- __ test(edx, Immediate(kSmiTagMask));
2549
- // Argument in edx is a smi.
2550
- __ j(zero, &load_smi_edx, Label::kNear);
2561
+ __ JumpIfSmi(edx, &load_smi_edx, Label::kNear);
2551
2562
  Factory* factory = masm->isolate()->factory();
2552
2563
  __ cmp(FieldOperand(edx, HeapObject::kMapOffset), factory->heap_number_map());
2553
2564
  __ j(not_equal, not_numbers); // Argument in edx is not a number.
2554
2565
  __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
2555
2566
  __ bind(&load_eax);
2556
2567
  // Load operand in eax into xmm1, or branch to not_numbers.
2557
- __ test(eax, Immediate(kSmiTagMask));
2558
- // Argument in eax is a smi.
2559
- __ j(zero, &load_smi_eax, Label::kNear);
2568
+ __ JumpIfSmi(eax, &load_smi_eax, Label::kNear);
2560
2569
  __ cmp(FieldOperand(eax, HeapObject::kMapOffset), factory->heap_number_map());
2561
2570
  __ j(equal, &load_float_eax, Label::kNear);
2562
2571
  __ jmp(not_numbers); // Argument in eax is not a number.
@@ -2616,8 +2625,7 @@ void FloatingPointHelper::LoadFloatOperands(MacroAssembler* masm,
2616
2625
  } else {
2617
2626
  __ mov(scratch, Operand(esp, 2 * kPointerSize));
2618
2627
  }
2619
- __ test(scratch, Immediate(kSmiTagMask));
2620
- __ j(zero, &load_smi_1, Label::kNear);
2628
+ __ JumpIfSmi(scratch, &load_smi_1, Label::kNear);
2621
2629
  __ fld_d(FieldOperand(scratch, HeapNumber::kValueOffset));
2622
2630
  __ bind(&done_load_1);
2623
2631
 
@@ -2626,8 +2634,7 @@ void FloatingPointHelper::LoadFloatOperands(MacroAssembler* masm,
2626
2634
  } else {
2627
2635
  __ mov(scratch, Operand(esp, 1 * kPointerSize));
2628
2636
  }
2629
- __ test(scratch, Immediate(kSmiTagMask));
2630
- __ j(zero, &load_smi_2, Label::kNear);
2637
+ __ JumpIfSmi(scratch, &load_smi_2, Label::kNear);
2631
2638
  __ fld_d(FieldOperand(scratch, HeapNumber::kValueOffset));
2632
2639
  __ jmp(&done, Label::kNear);
2633
2640
 
@@ -2672,16 +2679,14 @@ void FloatingPointHelper::CheckFloatOperands(MacroAssembler* masm,
2672
2679
  Label test_other, done;
2673
2680
  // Test if both operands are floats or smi -> scratch=k_is_float;
2674
2681
  // Otherwise scratch = k_not_float.
2675
- __ test(edx, Immediate(kSmiTagMask));
2676
- __ j(zero, &test_other, Label::kNear); // argument in edx is OK
2682
+ __ JumpIfSmi(edx, &test_other, Label::kNear);
2677
2683
  __ mov(scratch, FieldOperand(edx, HeapObject::kMapOffset));
2678
2684
  Factory* factory = masm->isolate()->factory();
2679
2685
  __ cmp(scratch, factory->heap_number_map());
2680
2686
  __ j(not_equal, non_float); // argument in edx is not a number -> NaN
2681
2687
 
2682
2688
  __ bind(&test_other);
2683
- __ test(eax, Immediate(kSmiTagMask));
2684
- __ j(zero, &done, Label::kNear); // argument in eax is OK
2689
+ __ JumpIfSmi(eax, &done, Label::kNear);
2685
2690
  __ mov(scratch, FieldOperand(eax, HeapObject::kMapOffset));
2686
2691
  __ cmp(scratch, factory->heap_number_map());
2687
2692
  __ j(not_equal, non_float); // argument in eax is not a number -> NaN
@@ -2717,10 +2722,8 @@ void MathPowStub::Generate(MacroAssembler* masm) {
2717
2722
  Label exponent_nonsmi;
2718
2723
  Label base_nonsmi;
2719
2724
  // If the exponent is a heap number go to that specific case.
2720
- __ test(eax, Immediate(kSmiTagMask));
2721
- __ j(not_zero, &exponent_nonsmi);
2722
- __ test(edx, Immediate(kSmiTagMask));
2723
- __ j(not_zero, &base_nonsmi);
2725
+ __ JumpIfNotSmi(eax, &exponent_nonsmi);
2726
+ __ JumpIfNotSmi(edx, &base_nonsmi);
2724
2727
 
2725
2728
  // Optimized version when both exponent and base are smis.
2726
2729
  Label powi;
@@ -2792,8 +2795,7 @@ void MathPowStub::Generate(MacroAssembler* masm) {
2792
2795
 
2793
2796
  Label base_not_smi;
2794
2797
  Label handle_special_cases;
2795
- __ test(edx, Immediate(kSmiTagMask));
2796
- __ j(not_zero, &base_not_smi, Label::kNear);
2798
+ __ JumpIfNotSmi(edx, &base_not_smi, Label::kNear);
2797
2799
  __ SmiUntag(edx);
2798
2800
  __ cvtsi2sd(xmm0, Operand(edx));
2799
2801
  __ jmp(&handle_special_cases, Label::kNear);
@@ -2865,8 +2867,7 @@ void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
2865
2867
 
2866
2868
  // Check that the key is a smi.
2867
2869
  Label slow;
2868
- __ test(edx, Immediate(kSmiTagMask));
2869
- __ j(not_zero, &slow);
2870
+ __ JumpIfNotSmi(edx, &slow, Label::kNear);
2870
2871
 
2871
2872
  // Check if the calling frame is an arguments adaptor frame.
2872
2873
  Label adaptor;
@@ -2879,7 +2880,7 @@ void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
2879
2880
  // through register eax. Use unsigned comparison to get negative
2880
2881
  // check for free.
2881
2882
  __ cmp(edx, Operand(eax));
2882
- __ j(above_equal, &slow);
2883
+ __ j(above_equal, &slow, Label::kNear);
2883
2884
 
2884
2885
  // Read the argument from the stack and return it.
2885
2886
  STATIC_ASSERT(kSmiTagSize == 1);
@@ -2895,7 +2896,7 @@ void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
2895
2896
  __ bind(&adaptor);
2896
2897
  __ mov(ecx, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
2897
2898
  __ cmp(edx, Operand(ecx));
2898
- __ j(above_equal, &slow);
2899
+ __ j(above_equal, &slow, Label::kNear);
2899
2900
 
2900
2901
  // Read the argument from the stack and return it.
2901
2902
  STATIC_ASSERT(kSmiTagSize == 1);
@@ -2915,33 +2916,277 @@ void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
2915
2916
  }
2916
2917
 
2917
2918
 
2918
- void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) {
2919
+ void ArgumentsAccessStub::GenerateNewNonStrictSlow(MacroAssembler* masm) {
2919
2920
  // esp[0] : return address
2920
2921
  // esp[4] : number of parameters
2921
2922
  // esp[8] : receiver displacement
2922
- // esp[16] : function
2923
+ // esp[12] : function
2924
+
2925
+ // Check if the calling frame is an arguments adaptor frame.
2926
+ Label runtime;
2927
+ __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2928
+ __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset));
2929
+ __ cmp(Operand(ecx), Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2930
+ __ j(not_equal, &runtime, Label::kNear);
2931
+
2932
+ // Patch the arguments.length and the parameters pointer.
2933
+ __ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
2934
+ __ mov(Operand(esp, 1 * kPointerSize), ecx);
2935
+ __ lea(edx, Operand(edx, ecx, times_2,
2936
+ StandardFrameConstants::kCallerSPOffset));
2937
+ __ mov(Operand(esp, 2 * kPointerSize), edx);
2938
+
2939
+ __ bind(&runtime);
2940
+ __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
2941
+ }
2942
+
2943
+
2944
+ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
2945
+ // esp[0] : return address
2946
+ // esp[4] : number of parameters (tagged)
2947
+ // esp[8] : receiver displacement
2948
+ // esp[12] : function
2949
+
2950
+ // ebx = parameter count (tagged)
2951
+ __ mov(ebx, Operand(esp, 1 * kPointerSize));
2952
+
2953
+ // Check if the calling frame is an arguments adaptor frame.
2954
+ // TODO(rossberg): Factor out some of the bits that are shared with the other
2955
+ // Generate* functions.
2956
+ Label runtime;
2957
+ Label adaptor_frame, try_allocate;
2958
+ __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2959
+ __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset));
2960
+ __ cmp(Operand(ecx), Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2961
+ __ j(equal, &adaptor_frame, Label::kNear);
2962
+
2963
+ // No adaptor, parameter count = argument count.
2964
+ __ mov(ecx, ebx);
2965
+ __ jmp(&try_allocate, Label::kNear);
2966
+
2967
+ // We have an adaptor frame. Patch the parameters pointer.
2968
+ __ bind(&adaptor_frame);
2969
+ __ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
2970
+ __ lea(edx, Operand(edx, ecx, times_2,
2971
+ StandardFrameConstants::kCallerSPOffset));
2972
+ __ mov(Operand(esp, 2 * kPointerSize), edx);
2973
+
2974
+ // ebx = parameter count (tagged)
2975
+ // ecx = argument count (tagged)
2976
+ // esp[4] = parameter count (tagged)
2977
+ // esp[8] = address of receiver argument
2978
+ // Compute the mapped parameter count = min(ebx, ecx) in ebx.
2979
+ __ cmp(ebx, Operand(ecx));
2980
+ __ j(less_equal, &try_allocate, Label::kNear);
2981
+ __ mov(ebx, ecx);
2982
+
2983
+ __ bind(&try_allocate);
2984
+
2985
+ // Save mapped parameter count.
2986
+ __ push(ebx);
2987
+
2988
+ // Compute the sizes of backing store, parameter map, and arguments object.
2989
+ // 1. Parameter map, has 2 extra words containing context and backing store.
2990
+ const int kParameterMapHeaderSize =
2991
+ FixedArray::kHeaderSize + 2 * kPointerSize;
2992
+ Label no_parameter_map;
2993
+ __ test(ebx, Operand(ebx));
2994
+ __ j(zero, &no_parameter_map, Label::kNear);
2995
+ __ lea(ebx, Operand(ebx, times_2, kParameterMapHeaderSize));
2996
+ __ bind(&no_parameter_map);
2997
+
2998
+ // 2. Backing store.
2999
+ __ lea(ebx, Operand(ebx, ecx, times_2, FixedArray::kHeaderSize));
3000
+
3001
+ // 3. Arguments object.
3002
+ __ add(Operand(ebx), Immediate(Heap::kArgumentsObjectSize));
3003
+
3004
+ // Do the allocation of all three objects in one go.
3005
+ __ AllocateInNewSpace(ebx, eax, edx, edi, &runtime, TAG_OBJECT);
3006
+
3007
+ // eax = address of new object(s) (tagged)
3008
+ // ecx = argument count (tagged)
3009
+ // esp[0] = mapped parameter count (tagged)
3010
+ // esp[8] = parameter count (tagged)
3011
+ // esp[12] = address of receiver argument
3012
+ // Get the arguments boilerplate from the current (global) context into edi.
3013
+ Label has_mapped_parameters, copy;
3014
+ __ mov(edi, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
3015
+ __ mov(edi, FieldOperand(edi, GlobalObject::kGlobalContextOffset));
3016
+ __ mov(ebx, Operand(esp, 0 * kPointerSize));
3017
+ __ test(ebx, Operand(ebx));
3018
+ __ j(not_zero, &has_mapped_parameters, Label::kNear);
3019
+ __ mov(edi, Operand(edi,
3020
+ Context::SlotOffset(Context::ARGUMENTS_BOILERPLATE_INDEX)));
3021
+ __ jmp(&copy, Label::kNear);
3022
+
3023
+ __ bind(&has_mapped_parameters);
3024
+ __ mov(edi, Operand(edi,
3025
+ Context::SlotOffset(Context::ALIASED_ARGUMENTS_BOILERPLATE_INDEX)));
3026
+ __ bind(&copy);
3027
+
3028
+ // eax = address of new object (tagged)
3029
+ // ebx = mapped parameter count (tagged)
3030
+ // ecx = argument count (tagged)
3031
+ // edi = address of boilerplate object (tagged)
3032
+ // esp[0] = mapped parameter count (tagged)
3033
+ // esp[8] = parameter count (tagged)
3034
+ // esp[12] = address of receiver argument
3035
+ // Copy the JS object part.
3036
+ for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
3037
+ __ mov(edx, FieldOperand(edi, i));
3038
+ __ mov(FieldOperand(eax, i), edx);
3039
+ }
3040
+
3041
+ // Setup the callee in-object property.
3042
+ STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1);
3043
+ __ mov(edx, Operand(esp, 4 * kPointerSize));
3044
+ __ mov(FieldOperand(eax, JSObject::kHeaderSize +
3045
+ Heap::kArgumentsCalleeIndex * kPointerSize),
3046
+ edx);
3047
+
3048
+ // Use the length (smi tagged) and set that as an in-object property too.
3049
+ STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
3050
+ __ mov(FieldOperand(eax, JSObject::kHeaderSize +
3051
+ Heap::kArgumentsLengthIndex * kPointerSize),
3052
+ ecx);
3053
+
3054
+ // Setup the elements pointer in the allocated arguments object.
3055
+ // If we allocated a parameter map, edi will point there, otherwise to the
3056
+ // backing store.
3057
+ __ lea(edi, Operand(eax, Heap::kArgumentsObjectSize));
3058
+ __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi);
3059
+
3060
+ // eax = address of new object (tagged)
3061
+ // ebx = mapped parameter count (tagged)
3062
+ // ecx = argument count (tagged)
3063
+ // edi = address of parameter map or backing store (tagged)
3064
+ // esp[0] = mapped parameter count (tagged)
3065
+ // esp[8] = parameter count (tagged)
3066
+ // esp[12] = address of receiver argument
3067
+ // Free a register.
3068
+ __ push(eax);
3069
+
3070
+ // Initialize parameter map. If there are no mapped arguments, we're done.
3071
+ Label skip_parameter_map;
3072
+ __ test(ebx, Operand(ebx));
3073
+ __ j(zero, &skip_parameter_map);
3074
+
3075
+ __ mov(FieldOperand(edi, FixedArray::kMapOffset),
3076
+ Immediate(FACTORY->non_strict_arguments_elements_map()));
3077
+ __ lea(eax, Operand(ebx, reinterpret_cast<intptr_t>(Smi::FromInt(2))));
3078
+ __ mov(FieldOperand(edi, FixedArray::kLengthOffset), eax);
3079
+ __ mov(FieldOperand(edi, FixedArray::kHeaderSize + 0 * kPointerSize), esi);
3080
+ __ lea(eax, Operand(edi, ebx, times_2, kParameterMapHeaderSize));
3081
+ __ mov(FieldOperand(edi, FixedArray::kHeaderSize + 1 * kPointerSize), eax);
3082
+
3083
+ // Copy the parameter slots and the holes in the arguments.
3084
+ // We need to fill in mapped_parameter_count slots. They index the context,
3085
+ // where parameters are stored in reverse order, at
3086
+ // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
3087
+ // The mapped parameter thus need to get indices
3088
+ // MIN_CONTEXT_SLOTS+parameter_count-1 ..
3089
+ // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
3090
+ // We loop from right to left.
3091
+ Label parameters_loop, parameters_test;
3092
+ __ push(ecx);
3093
+ __ mov(eax, Operand(esp, 2 * kPointerSize));
3094
+ __ mov(ebx, Immediate(Smi::FromInt(Context::MIN_CONTEXT_SLOTS)));
3095
+ __ add(ebx, Operand(esp, 4 * kPointerSize));
3096
+ __ sub(ebx, Operand(eax));
3097
+ __ mov(ecx, FACTORY->the_hole_value());
3098
+ __ mov(edx, edi);
3099
+ __ lea(edi, Operand(edi, eax, times_2, kParameterMapHeaderSize));
3100
+ // eax = loop variable (tagged)
3101
+ // ebx = mapping index (tagged)
3102
+ // ecx = the hole value
3103
+ // edx = address of parameter map (tagged)
3104
+ // edi = address of backing store (tagged)
3105
+ // esp[0] = argument count (tagged)
3106
+ // esp[4] = address of new object (tagged)
3107
+ // esp[8] = mapped parameter count (tagged)
3108
+ // esp[16] = parameter count (tagged)
3109
+ // esp[20] = address of receiver argument
3110
+ __ jmp(&parameters_test, Label::kNear);
3111
+
3112
+ __ bind(&parameters_loop);
3113
+ __ sub(Operand(eax), Immediate(Smi::FromInt(1)));
3114
+ __ mov(FieldOperand(edx, eax, times_2, kParameterMapHeaderSize), ebx);
3115
+ __ mov(FieldOperand(edi, eax, times_2, FixedArray::kHeaderSize), ecx);
3116
+ __ add(Operand(ebx), Immediate(Smi::FromInt(1)));
3117
+ __ bind(&parameters_test);
3118
+ __ test(eax, Operand(eax));
3119
+ __ j(not_zero, &parameters_loop, Label::kNear);
3120
+ __ pop(ecx);
3121
+
3122
+ __ bind(&skip_parameter_map);
3123
+
3124
+ // ecx = argument count (tagged)
3125
+ // edi = address of backing store (tagged)
3126
+ // esp[0] = address of new object (tagged)
3127
+ // esp[4] = mapped parameter count (tagged)
3128
+ // esp[12] = parameter count (tagged)
3129
+ // esp[16] = address of receiver argument
3130
+ // Copy arguments header and remaining slots (if there are any).
3131
+ __ mov(FieldOperand(edi, FixedArray::kMapOffset),
3132
+ Immediate(FACTORY->fixed_array_map()));
3133
+ __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx);
2923
3134
 
2924
- // The displacement is used for skipping the return address and the
2925
- // frame pointer on the stack. It is the offset of the last
2926
- // parameter (if any) relative to the frame pointer.
2927
- static const int kDisplacement = 2 * kPointerSize;
3135
+ Label arguments_loop, arguments_test;
3136
+ __ mov(ebx, Operand(esp, 1 * kPointerSize));
3137
+ __ mov(edx, Operand(esp, 4 * kPointerSize));
3138
+ __ sub(Operand(edx), ebx); // Is there a smarter way to do negative scaling?
3139
+ __ sub(Operand(edx), ebx);
3140
+ __ jmp(&arguments_test, Label::kNear);
3141
+
3142
+ __ bind(&arguments_loop);
3143
+ __ sub(Operand(edx), Immediate(kPointerSize));
3144
+ __ mov(eax, Operand(edx, 0));
3145
+ __ mov(FieldOperand(edi, ebx, times_2, FixedArray::kHeaderSize), eax);
3146
+ __ add(Operand(ebx), Immediate(Smi::FromInt(1)));
3147
+
3148
+ __ bind(&arguments_test);
3149
+ __ cmp(ebx, Operand(ecx));
3150
+ __ j(less, &arguments_loop, Label::kNear);
3151
+
3152
+ // Restore.
3153
+ __ pop(eax); // Address of arguments object.
3154
+ __ pop(ebx); // Parameter count.
3155
+
3156
+ // Return and remove the on-stack parameters.
3157
+ __ ret(3 * kPointerSize);
3158
+
3159
+ // Do the runtime call to allocate the arguments object.
3160
+ __ bind(&runtime);
3161
+ __ pop(eax); // Remove saved parameter count.
3162
+ __ mov(Operand(esp, 1 * kPointerSize), ecx); // Patch argument count.
3163
+ __ TailCallRuntime(Runtime::kNewStrictArgumentsFast, 3, 1);
3164
+ }
3165
+
3166
+
3167
+ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
3168
+ // esp[0] : return address
3169
+ // esp[4] : number of parameters
3170
+ // esp[8] : receiver displacement
3171
+ // esp[12] : function
2928
3172
 
2929
3173
  // Check if the calling frame is an arguments adaptor frame.
2930
3174
  Label adaptor_frame, try_allocate, runtime;
2931
3175
  __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2932
3176
  __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset));
2933
3177
  __ cmp(Operand(ecx), Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2934
- __ j(equal, &adaptor_frame);
3178
+ __ j(equal, &adaptor_frame, Label::kNear);
2935
3179
 
2936
3180
  // Get the length from the frame.
2937
3181
  __ mov(ecx, Operand(esp, 1 * kPointerSize));
2938
- __ jmp(&try_allocate);
3182
+ __ jmp(&try_allocate, Label::kNear);
2939
3183
 
2940
3184
  // Patch the arguments.length and the parameters pointer.
2941
3185
  __ bind(&adaptor_frame);
2942
3186
  __ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
2943
3187
  __ mov(Operand(esp, 1 * kPointerSize), ecx);
2944
- __ lea(edx, Operand(edx, ecx, times_2, kDisplacement));
3188
+ __ lea(edx, Operand(edx, ecx, times_2,
3189
+ StandardFrameConstants::kCallerSPOffset));
2945
3190
  __ mov(Operand(esp, 2 * kPointerSize), edx);
2946
3191
 
2947
3192
  // Try the new space allocation. Start out with computing the size of
@@ -2952,7 +3197,7 @@ void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) {
2952
3197
  __ j(zero, &add_arguments_object, Label::kNear);
2953
3198
  __ lea(ecx, Operand(ecx, times_2, FixedArray::kHeaderSize));
2954
3199
  __ bind(&add_arguments_object);
2955
- __ add(Operand(ecx), Immediate(GetArgumentsObjectSize()));
3200
+ __ add(Operand(ecx), Immediate(Heap::kArgumentsObjectSizeStrict));
2956
3201
 
2957
3202
  // Do the allocation of both objects in one go.
2958
3203
  __ AllocateInNewSpace(ecx, eax, edx, ebx, &runtime, TAG_OBJECT);
@@ -2960,8 +3205,9 @@ void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) {
2960
3205
  // Get the arguments boilerplate from the current (global) context.
2961
3206
  __ mov(edi, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
2962
3207
  __ mov(edi, FieldOperand(edi, GlobalObject::kGlobalContextOffset));
2963
- __ mov(edi, Operand(edi,
2964
- Context::SlotOffset(GetArgumentsBoilerplateIndex())));
3208
+ const int offset =
3209
+ Context::SlotOffset(Context::STRICT_MODE_ARGUMENTS_BOILERPLATE_INDEX);
3210
+ __ mov(edi, Operand(edi, offset));
2965
3211
 
2966
3212
  // Copy the JS object part.
2967
3213
  for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
@@ -2969,36 +3215,27 @@ void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) {
2969
3215
  __ mov(FieldOperand(eax, i), ebx);
2970
3216
  }
2971
3217
 
2972
- if (type_ == NEW_NON_STRICT) {
2973
- // Setup the callee in-object property.
2974
- STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1);
2975
- __ mov(ebx, Operand(esp, 3 * kPointerSize));
2976
- __ mov(FieldOperand(eax, JSObject::kHeaderSize +
2977
- Heap::kArgumentsCalleeIndex * kPointerSize),
2978
- ebx);
2979
- }
2980
-
2981
3218
  // Get the length (smi tagged) and set that as an in-object property too.
2982
3219
  STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
2983
3220
  __ mov(ecx, Operand(esp, 1 * kPointerSize));
2984
3221
  __ mov(FieldOperand(eax, JSObject::kHeaderSize +
2985
- Heap::kArgumentsLengthIndex * kPointerSize),
3222
+ Heap::kArgumentsLengthIndex * kPointerSize),
2986
3223
  ecx);
2987
3224
 
2988
3225
  // If there are no actual arguments, we're done.
2989
3226
  Label done;
2990
3227
  __ test(ecx, Operand(ecx));
2991
- __ j(zero, &done);
3228
+ __ j(zero, &done, Label::kNear);
2992
3229
 
2993
3230
  // Get the parameters pointer from the stack.
2994
3231
  __ mov(edx, Operand(esp, 2 * kPointerSize));
2995
3232
 
2996
3233
  // Setup the elements pointer in the allocated arguments object and
2997
3234
  // initialize the header in the elements fixed array.
2998
- __ lea(edi, Operand(eax, GetArgumentsObjectSize()));
3235
+ __ lea(edi, Operand(eax, Heap::kArgumentsObjectSizeStrict));
2999
3236
  __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi);
3000
3237
  __ mov(FieldOperand(edi, FixedArray::kMapOffset),
3001
- Immediate(masm->isolate()->factory()->fixed_array_map()));
3238
+ Immediate(FACTORY->fixed_array_map()));
3002
3239
 
3003
3240
  __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx);
3004
3241
  // Untag the length for the loop below.
@@ -3020,7 +3257,7 @@ void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) {
3020
3257
 
3021
3258
  // Do the runtime call to allocate the arguments object.
3022
3259
  __ bind(&runtime);
3023
- __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
3260
+ __ TailCallRuntime(Runtime::kNewStrictArgumentsFast, 3, 1);
3024
3261
  }
3025
3262
 
3026
3263
 
@@ -3063,8 +3300,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
3063
3300
  // Check that the first argument is a JSRegExp object.
3064
3301
  __ mov(eax, Operand(esp, kJSRegExpOffset));
3065
3302
  STATIC_ASSERT(kSmiTag == 0);
3066
- __ test(eax, Immediate(kSmiTagMask));
3067
- __ j(zero, &runtime);
3303
+ __ JumpIfSmi(eax, &runtime);
3068
3304
  __ CmpObjectType(eax, JS_REGEXP_TYPE, ecx);
3069
3305
  __ j(not_equal, &runtime);
3070
3306
  // Check that the RegExp has been compiled (data contains a fixed array).
@@ -3098,8 +3334,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
3098
3334
  // edx: Number of capture registers
3099
3335
  // Check that the second argument is a string.
3100
3336
  __ mov(eax, Operand(esp, kSubjectOffset));
3101
- __ test(eax, Immediate(kSmiTagMask));
3102
- __ j(zero, &runtime);
3337
+ __ JumpIfSmi(eax, &runtime);
3103
3338
  Condition is_string = masm->IsObjectStringType(eax, ebx, ebx);
3104
3339
  __ j(NegateCondition(is_string), &runtime);
3105
3340
  // Get the length of the string to ebx.
@@ -3111,8 +3346,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
3111
3346
  // Check that the third argument is a positive smi less than the subject
3112
3347
  // string length. A negative value will be greater (unsigned comparison).
3113
3348
  __ mov(eax, Operand(esp, kPreviousIndexOffset));
3114
- __ test(eax, Immediate(kSmiTagMask));
3115
- __ j(not_zero, &runtime);
3349
+ __ JumpIfNotSmi(eax, &runtime);
3116
3350
  __ cmp(eax, Operand(ebx));
3117
3351
  __ j(above_equal, &runtime);
3118
3352
 
@@ -3120,8 +3354,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
3120
3354
  // edx: Number of capture registers
3121
3355
  // Check that the fourth object is a JSArray object.
3122
3356
  __ mov(eax, Operand(esp, kLastMatchInfoOffset));
3123
- __ test(eax, Immediate(kSmiTagMask));
3124
- __ j(zero, &runtime);
3357
+ __ JumpIfSmi(eax, &runtime);
3125
3358
  __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
3126
3359
  __ j(not_equal, &runtime);
3127
3360
  // Check that the JSArray is in fast case.
@@ -3138,6 +3371,8 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
3138
3371
  __ cmp(edx, Operand(eax));
3139
3372
  __ j(greater, &runtime);
3140
3373
 
3374
+ // Reset offset for possibly sliced string.
3375
+ __ Set(edi, Immediate(0));
3141
3376
  // ecx: RegExp data (FixedArray)
3142
3377
  // Check the representation and encoding of the subject string.
3143
3378
  Label seq_ascii_string, seq_two_byte_string, check_code;
@@ -3148,36 +3383,45 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
3148
3383
  __ and_(ebx,
3149
3384
  kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
3150
3385
  STATIC_ASSERT((kStringTag | kSeqStringTag | kTwoByteStringTag) == 0);
3151
- __ j(zero, &seq_two_byte_string);
3386
+ __ j(zero, &seq_two_byte_string, Label::kNear);
3152
3387
  // Any other flat string must be a flat ascii string.
3153
- __ test(Operand(ebx),
3388
+ __ and_(Operand(ebx),
3154
3389
  Immediate(kIsNotStringMask | kStringRepresentationMask));
3155
- __ j(zero, &seq_ascii_string);
3390
+ __ j(zero, &seq_ascii_string, Label::kNear);
3156
3391
 
3157
- // Check for flat cons string.
3392
+ // Check for flat cons string or sliced string.
3158
3393
  // A flat cons string is a cons string where the second part is the empty
3159
3394
  // string. In that case the subject string is just the first part of the cons
3160
3395
  // string. Also in this case the first part of the cons string is known to be
3161
3396
  // a sequential string or an external string.
3162
- STATIC_ASSERT(kExternalStringTag != 0);
3163
- STATIC_ASSERT((kConsStringTag & kExternalStringTag) == 0);
3164
- __ test(Operand(ebx),
3165
- Immediate(kIsNotStringMask | kExternalStringTag));
3166
- __ j(not_zero, &runtime);
3167
- // String is a cons string.
3168
- __ mov(edx, FieldOperand(eax, ConsString::kSecondOffset));
3169
- __ cmp(Operand(edx), factory->empty_string());
3397
+ // In the case of a sliced string its offset has to be taken into account.
3398
+ Label cons_string, check_encoding;
3399
+ STATIC_ASSERT(kConsStringTag < kExternalStringTag);
3400
+ STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
3401
+ __ cmp(Operand(ebx), Immediate(kExternalStringTag));
3402
+ __ j(less, &cons_string);
3403
+ __ j(equal, &runtime);
3404
+
3405
+ // String is sliced.
3406
+ __ mov(edi, FieldOperand(eax, SlicedString::kOffsetOffset));
3407
+ __ mov(eax, FieldOperand(eax, SlicedString::kParentOffset));
3408
+ // edi: offset of sliced string, smi-tagged.
3409
+ // eax: parent string.
3410
+ __ jmp(&check_encoding, Label::kNear);
3411
+ // String is a cons string, check whether it is flat.
3412
+ __ bind(&cons_string);
3413
+ __ cmp(FieldOperand(eax, ConsString::kSecondOffset), factory->empty_string());
3170
3414
  __ j(not_equal, &runtime);
3171
3415
  __ mov(eax, FieldOperand(eax, ConsString::kFirstOffset));
3416
+ __ bind(&check_encoding);
3172
3417
  __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
3173
- // String is a cons string with empty second part.
3174
- // eax: first part of cons string.
3175
- // ebx: map of first part of cons string.
3176
- // Is first part a flat two byte string?
3418
+ // eax: first part of cons string or parent of sliced string.
3419
+ // ebx: map of first part of cons string or map of parent of sliced string.
3420
+ // Is first part of cons or parent of slice a flat two byte string?
3177
3421
  __ test_b(FieldOperand(ebx, Map::kInstanceTypeOffset),
3178
3422
  kStringRepresentationMask | kStringEncodingMask);
3179
3423
  STATIC_ASSERT((kSeqStringTag | kTwoByteStringTag) == 0);
3180
- __ j(zero, &seq_two_byte_string);
3424
+ __ j(zero, &seq_two_byte_string, Label::kNear);
3181
3425
  // Any other flat string must be ascii.
3182
3426
  __ test_b(FieldOperand(ebx, Map::kInstanceTypeOffset),
3183
3427
  kStringRepresentationMask);
@@ -3187,25 +3431,24 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
3187
3431
  // eax: subject string (flat ascii)
3188
3432
  // ecx: RegExp data (FixedArray)
3189
3433
  __ mov(edx, FieldOperand(ecx, JSRegExp::kDataAsciiCodeOffset));
3190
- __ Set(edi, Immediate(1)); // Type is ascii.
3191
- __ jmp(&check_code);
3434
+ __ Set(ecx, Immediate(1)); // Type is ascii.
3435
+ __ jmp(&check_code, Label::kNear);
3192
3436
 
3193
3437
  __ bind(&seq_two_byte_string);
3194
3438
  // eax: subject string (flat two byte)
3195
3439
  // ecx: RegExp data (FixedArray)
3196
3440
  __ mov(edx, FieldOperand(ecx, JSRegExp::kDataUC16CodeOffset));
3197
- __ Set(edi, Immediate(0)); // Type is two byte.
3441
+ __ Set(ecx, Immediate(0)); // Type is two byte.
3198
3442
 
3199
3443
  __ bind(&check_code);
3200
3444
  // Check that the irregexp code has been generated for the actual string
3201
3445
  // encoding. If it has, the field contains a code object otherwise it contains
3202
- // the hole.
3203
- __ CmpObjectType(edx, CODE_TYPE, ebx);
3204
- __ j(not_equal, &runtime);
3446
+ // a smi (code flushing support).
3447
+ __ JumpIfSmi(edx, &runtime);
3205
3448
 
3206
3449
  // eax: subject string
3207
3450
  // edx: code
3208
- // edi: encoding of subject string (1 if ascii, 0 if two_byte);
3451
+ // ecx: encoding of subject string (1 if ascii, 0 if two_byte);
3209
3452
  // Load used arguments before starting to push arguments for call to native
3210
3453
  // RegExp code to avoid handling changing stack height.
3211
3454
  __ mov(ebx, Operand(esp, kPreviousIndexOffset));
@@ -3214,7 +3457,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
3214
3457
  // eax: subject string
3215
3458
  // ebx: previous index
3216
3459
  // edx: code
3217
- // edi: encoding of subject string (1 if ascii 0 if two_byte);
3460
+ // ecx: encoding of subject string (1 if ascii 0 if two_byte);
3218
3461
  // All checks done. Now push arguments for native regexp code.
3219
3462
  Counters* counters = masm->isolate()->counters();
3220
3463
  __ IncrementCounter(counters->regexp_entry_native(), 1);
@@ -3231,23 +3474,47 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
3231
3474
  __ mov(Operand(esp, 6 * kPointerSize), Immediate(1));
3232
3475
 
3233
3476
  // Argument 6: Start (high end) of backtracking stack memory area.
3234
- __ mov(ecx, Operand::StaticVariable(address_of_regexp_stack_memory_address));
3235
- __ add(ecx, Operand::StaticVariable(address_of_regexp_stack_memory_size));
3236
- __ mov(Operand(esp, 5 * kPointerSize), ecx);
3477
+ __ mov(esi, Operand::StaticVariable(address_of_regexp_stack_memory_address));
3478
+ __ add(esi, Operand::StaticVariable(address_of_regexp_stack_memory_size));
3479
+ __ mov(Operand(esp, 5 * kPointerSize), esi);
3237
3480
 
3238
3481
  // Argument 5: static offsets vector buffer.
3239
3482
  __ mov(Operand(esp, 4 * kPointerSize),
3240
3483
  Immediate(ExternalReference::address_of_static_offsets_vector(
3241
3484
  masm->isolate())));
3242
3485
 
3486
+ // Argument 2: Previous index.
3487
+ __ mov(Operand(esp, 1 * kPointerSize), ebx);
3488
+
3489
+ // Argument 1: Original subject string.
3490
+ // The original subject is in the previous stack frame. Therefore we have to
3491
+ // use ebp, which points exactly to one pointer size below the previous esp.
3492
+ // (Because creating a new stack frame pushes the previous ebp onto the stack
3493
+ // and thereby moves up esp by one kPointerSize.)
3494
+ __ mov(esi, Operand(ebp, kSubjectOffset + kPointerSize));
3495
+ __ mov(Operand(esp, 0 * kPointerSize), esi);
3496
+
3497
+ // esi: original subject string
3498
+ // eax: underlying subject string
3499
+ // ebx: previous index
3500
+ // ecx: encoding of subject string (1 if ascii 0 if two_byte);
3501
+ // edx: code
3243
3502
  // Argument 4: End of string data
3244
3503
  // Argument 3: Start of string data
3504
+ // Prepare start and end index of the input.
3505
+ // Load the length from the original sliced string if that is the case.
3506
+ __ mov(esi, FieldOperand(esi, String::kLengthOffset));
3507
+ __ add(esi, Operand(edi)); // Calculate input end wrt offset.
3508
+ __ SmiUntag(edi);
3509
+ __ add(ebx, Operand(edi)); // Calculate input start wrt offset.
3510
+
3511
+ // ebx: start index of the input string
3512
+ // esi: end index of the input string
3245
3513
  Label setup_two_byte, setup_rest;
3246
- __ test(edi, Operand(edi));
3247
- __ mov(edi, FieldOperand(eax, String::kLengthOffset));
3514
+ __ test(ecx, Operand(ecx));
3248
3515
  __ j(zero, &setup_two_byte, Label::kNear);
3249
- __ SmiUntag(edi);
3250
- __ lea(ecx, FieldOperand(eax, edi, times_1, SeqAsciiString::kHeaderSize));
3516
+ __ SmiUntag(esi);
3517
+ __ lea(ecx, FieldOperand(eax, esi, times_1, SeqAsciiString::kHeaderSize));
3251
3518
  __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4.
3252
3519
  __ lea(ecx, FieldOperand(eax, ebx, times_1, SeqAsciiString::kHeaderSize));
3253
3520
  __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3.
@@ -3255,20 +3522,14 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
3255
3522
 
3256
3523
  __ bind(&setup_two_byte);
3257
3524
  STATIC_ASSERT(kSmiTag == 0);
3258
- STATIC_ASSERT(kSmiTagSize == 1); // edi is smi (powered by 2).
3259
- __ lea(ecx, FieldOperand(eax, edi, times_1, SeqTwoByteString::kHeaderSize));
3525
+ STATIC_ASSERT(kSmiTagSize == 1); // esi is smi (powered by 2).
3526
+ __ lea(ecx, FieldOperand(eax, esi, times_1, SeqTwoByteString::kHeaderSize));
3260
3527
  __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4.
3261
3528
  __ lea(ecx, FieldOperand(eax, ebx, times_2, SeqTwoByteString::kHeaderSize));
3262
3529
  __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3.
3263
3530
 
3264
3531
  __ bind(&setup_rest);
3265
3532
 
3266
- // Argument 2: Previous index.
3267
- __ mov(Operand(esp, 1 * kPointerSize), ebx);
3268
-
3269
- // Argument 1: Subject string.
3270
- __ mov(Operand(esp, 0 * kPointerSize), eax);
3271
-
3272
3533
  // Locate the code entry and call it.
3273
3534
  __ add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag));
3274
3535
  __ call(Operand(edx));
@@ -3307,7 +3568,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
3307
3568
  // by javascript code.
3308
3569
  __ cmp(eax, factory->termination_exception());
3309
3570
  Label throw_termination_exception;
3310
- __ j(equal, &throw_termination_exception);
3571
+ __ j(equal, &throw_termination_exception, Label::kNear);
3311
3572
 
3312
3573
  // Handle normal exception by following handler chain.
3313
3574
  __ Throw(eax);
@@ -3393,8 +3654,7 @@ void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
3393
3654
  Label slowcase;
3394
3655
  Label done;
3395
3656
  __ mov(ebx, Operand(esp, kPointerSize * 3));
3396
- __ test(ebx, Immediate(kSmiTagMask));
3397
- __ j(not_zero, &slowcase);
3657
+ __ JumpIfNotSmi(ebx, &slowcase);
3398
3658
  __ cmp(Operand(ebx), Immediate(Smi::FromInt(kMaxInlineLength)));
3399
3659
  __ j(above, &slowcase);
3400
3660
  // Smi-tagging is equivalent to multiplying by 2.
@@ -3506,8 +3766,7 @@ void NumberToStringStub::GenerateLookupNumberStringCache(MacroAssembler* masm,
3506
3766
  } else {
3507
3767
  Label not_smi;
3508
3768
  STATIC_ASSERT(kSmiTag == 0);
3509
- __ test(object, Immediate(kSmiTagMask));
3510
- __ j(not_zero, &not_smi, Label::kNear);
3769
+ __ JumpIfNotSmi(object, &not_smi, Label::kNear);
3511
3770
  __ mov(scratch, object);
3512
3771
  __ SmiUntag(scratch);
3513
3772
  __ jmp(&smi_hash_calculated, Label::kNear);
@@ -3527,8 +3786,7 @@ void NumberToStringStub::GenerateLookupNumberStringCache(MacroAssembler* masm,
3527
3786
  index,
3528
3787
  times_twice_pointer_size,
3529
3788
  FixedArray::kHeaderSize));
3530
- __ test(probe, Immediate(kSmiTagMask));
3531
- __ j(zero, not_found);
3789
+ __ JumpIfSmi(probe, not_found);
3532
3790
  if (CpuFeatures::IsSupported(SSE2)) {
3533
3791
  CpuFeatures::Scope fscope(SSE2);
3534
3792
  __ movdbl(xmm0, FieldOperand(object, HeapNumber::kValueOffset));
@@ -3593,17 +3851,16 @@ static int NegativeComparisonResult(Condition cc) {
3593
3851
  void CompareStub::Generate(MacroAssembler* masm) {
3594
3852
  ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
3595
3853
 
3596
- Label check_unequal_objects, done;
3854
+ Label check_unequal_objects;
3597
3855
 
3598
3856
  // Compare two smis if required.
3599
3857
  if (include_smi_compare_) {
3600
3858
  Label non_smi, smi_done;
3601
3859
  __ mov(ecx, Operand(edx));
3602
3860
  __ or_(ecx, Operand(eax));
3603
- __ test(ecx, Immediate(kSmiTagMask));
3604
- __ j(not_zero, &non_smi);
3861
+ __ JumpIfNotSmi(ecx, &non_smi, Label::kNear);
3605
3862
  __ sub(edx, Operand(eax)); // Return on the result of the subtraction.
3606
- __ j(no_overflow, &smi_done);
3863
+ __ j(no_overflow, &smi_done, Label::kNear);
3607
3864
  __ not_(edx); // Correct sign in case of overflow. edx is never 0 here.
3608
3865
  __ bind(&smi_done);
3609
3866
  __ mov(eax, edx);
@@ -3650,7 +3907,7 @@ void CompareStub::Generate(MacroAssembler* masm) {
3650
3907
  __ j(equal, &heap_number, Label::kNear);
3651
3908
  if (cc_ != equal) {
3652
3909
  // Call runtime on identical JSObjects. Otherwise return equal.
3653
- __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ecx);
3910
+ __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
3654
3911
  __ j(above_equal, &not_identical);
3655
3912
  }
3656
3913
  __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
@@ -3725,7 +3982,7 @@ void CompareStub::Generate(MacroAssembler* masm) {
3725
3982
  __ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
3726
3983
  Immediate(masm->isolate()->factory()->heap_number_map()));
3727
3984
  // If heap number, handle it in the slow case.
3728
- __ j(equal, &slow);
3985
+ __ j(equal, &slow, Label::kNear);
3729
3986
  // Return non-equal (ebx is not zero)
3730
3987
  __ mov(eax, ebx);
3731
3988
  __ ret(0);
@@ -3738,8 +3995,8 @@ void CompareStub::Generate(MacroAssembler* masm) {
3738
3995
  // Get the type of the first operand.
3739
3996
  // If the first object is a JS object, we have done pointer comparison.
3740
3997
  Label first_non_object;
3741
- STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
3742
- __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ecx);
3998
+ STATIC_ASSERT(LAST_TYPE == LAST_SPEC_OBJECT_TYPE);
3999
+ __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
3743
4000
  __ j(below, &first_non_object, Label::kNear);
3744
4001
 
3745
4002
  // Return non-zero (eax is not zero)
@@ -3753,7 +4010,7 @@ void CompareStub::Generate(MacroAssembler* masm) {
3753
4010
  __ CmpInstanceType(ecx, ODDBALL_TYPE);
3754
4011
  __ j(equal, &return_not_equal);
3755
4012
 
3756
- __ CmpObjectType(edx, FIRST_JS_OBJECT_TYPE, ecx);
4013
+ __ CmpObjectType(edx, FIRST_SPEC_OBJECT_TYPE, ecx);
3757
4014
  __ j(above_equal, &return_not_equal);
3758
4015
 
3759
4016
  // Check for oddballs: true, false, null, undefined.
@@ -3776,7 +4033,7 @@ void CompareStub::Generate(MacroAssembler* masm) {
3776
4033
  __ ucomisd(xmm0, xmm1);
3777
4034
 
3778
4035
  // Don't base result on EFLAGS when a NaN is involved.
3779
- __ j(parity_even, &unordered);
4036
+ __ j(parity_even, &unordered, Label::kNear);
3780
4037
  // Return a result of -1, 0, or 1, based on EFLAGS.
3781
4038
  __ mov(eax, 0); // equal
3782
4039
  __ mov(ecx, Immediate(Smi::FromInt(1)));
@@ -3792,12 +4049,12 @@ void CompareStub::Generate(MacroAssembler* masm) {
3792
4049
  __ FCmp();
3793
4050
 
3794
4051
  // Don't base result on EFLAGS when a NaN is involved.
3795
- __ j(parity_even, &unordered);
4052
+ __ j(parity_even, &unordered, Label::kNear);
3796
4053
 
3797
4054
  Label below_label, above_label;
3798
4055
  // Return a result of -1, 0, or 1, based on EFLAGS.
3799
- __ j(below, &below_label);
3800
- __ j(above, &above_label);
4056
+ __ j(below, &below_label, Label::kNear);
4057
+ __ j(above, &above_label, Label::kNear);
3801
4058
 
3802
4059
  __ Set(eax, Immediate(0));
3803
4060
  __ ret(0);
@@ -3877,9 +4134,9 @@ void CompareStub::Generate(MacroAssembler* masm) {
3877
4134
  __ lea(ecx, Operand(eax, edx, times_1, 0));
3878
4135
  __ test(ecx, Immediate(kSmiTagMask));
3879
4136
  __ j(not_zero, &not_both_objects, Label::kNear);
3880
- __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ecx);
4137
+ __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
3881
4138
  __ j(below, &not_both_objects, Label::kNear);
3882
- __ CmpObjectType(edx, FIRST_JS_OBJECT_TYPE, ebx);
4139
+ __ CmpObjectType(edx, FIRST_SPEC_OBJECT_TYPE, ebx);
3883
4140
  __ j(below, &not_both_objects, Label::kNear);
3884
4141
  // We do not bail out after this point. Both are JSObjects, and
3885
4142
  // they are equal if and only if both are undetectable.
@@ -3927,8 +4184,7 @@ void CompareStub::BranchIfNonSymbol(MacroAssembler* masm,
3927
4184
  Label* label,
3928
4185
  Register object,
3929
4186
  Register scratch) {
3930
- __ test(object, Immediate(kSmiTagMask));
3931
- __ j(zero, label);
4187
+ __ JumpIfSmi(object, label);
3932
4188
  __ mov(scratch, FieldOperand(object, HeapObject::kMapOffset));
3933
4189
  __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3934
4190
  __ and_(scratch, kIsSymbolMask | kIsNotStringMask);
@@ -3968,8 +4224,7 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
3968
4224
  __ mov(edi, Operand(esp, (argc_ + 2) * kPointerSize));
3969
4225
 
3970
4226
  // Check that the function really is a JavaScript function.
3971
- __ test(edi, Immediate(kSmiTagMask));
3972
- __ j(zero, &slow);
4227
+ __ JumpIfSmi(edi, &slow);
3973
4228
  // Goto slow case if we do not have a function.
3974
4229
  __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
3975
4230
  __ j(not_equal, &slow);
@@ -3981,7 +4236,11 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
3981
4236
  Label call_as_function;
3982
4237
  __ cmp(eax, masm->isolate()->factory()->the_hole_value());
3983
4238
  __ j(equal, &call_as_function);
3984
- __ InvokeFunction(edi, actual, JUMP_FUNCTION);
4239
+ __ InvokeFunction(edi,
4240
+ actual,
4241
+ JUMP_FUNCTION,
4242
+ NullCallWrapper(),
4243
+ CALL_AS_METHOD);
3985
4244
  __ bind(&call_as_function);
3986
4245
  }
3987
4246
  __ InvokeFunction(edi,
@@ -4000,6 +4259,7 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
4000
4259
  __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION);
4001
4260
  Handle<Code> adaptor =
4002
4261
  masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
4262
+ __ SetCallKind(ecx, CALL_AS_METHOD);
4003
4263
  __ jmp(adaptor, RelocInfo::CODE_TARGET);
4004
4264
  }
4005
4265
 
@@ -4109,7 +4369,7 @@ void CEntryStub::GenerateCore(MacroAssembler* masm,
4109
4369
  // If the returned exception is RETRY_AFTER_GC continue at retry label
4110
4370
  STATIC_ASSERT(Failure::RETRY_AFTER_GC == 0);
4111
4371
  __ test(eax, Immediate(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize));
4112
- __ j(zero, &retry);
4372
+ __ j(zero, &retry, Label::kNear);
4113
4373
 
4114
4374
  // Special handling of out of memory exceptions.
4115
4375
  __ cmp(eax, reinterpret_cast<int32_t>(Failure::OutOfMemoryException()));
@@ -4206,9 +4466,7 @@ void CEntryStub::Generate(MacroAssembler* masm) {
4206
4466
 
4207
4467
  void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
4208
4468
  Label invoke, exit;
4209
- #ifdef ENABLE_LOGGING_AND_PROFILING
4210
4469
  Label not_outermost_js, not_outermost_js_2;
4211
- #endif
4212
4470
 
4213
4471
  // Setup frame.
4214
4472
  __ push(ebp);
@@ -4227,20 +4485,18 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
4227
4485
  ExternalReference c_entry_fp(Isolate::k_c_entry_fp_address, masm->isolate());
4228
4486
  __ push(Operand::StaticVariable(c_entry_fp));
4229
4487
 
4230
- #ifdef ENABLE_LOGGING_AND_PROFILING
4231
4488
  // If this is the outermost JS call, set js_entry_sp value.
4232
4489
  ExternalReference js_entry_sp(Isolate::k_js_entry_sp_address,
4233
4490
  masm->isolate());
4234
4491
  __ cmp(Operand::StaticVariable(js_entry_sp), Immediate(0));
4235
- __ j(not_equal, &not_outermost_js);
4492
+ __ j(not_equal, &not_outermost_js, Label::kNear);
4236
4493
  __ mov(Operand::StaticVariable(js_entry_sp), ebp);
4237
4494
  __ push(Immediate(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
4238
4495
  Label cont;
4239
- __ jmp(&cont);
4496
+ __ jmp(&cont, Label::kNear);
4240
4497
  __ bind(&not_outermost_js);
4241
4498
  __ push(Immediate(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)));
4242
4499
  __ bind(&cont);
4243
- #endif
4244
4500
 
4245
4501
  // Call a faked try-block that does the invoke.
4246
4502
  __ call(&invoke);
@@ -4288,7 +4544,6 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
4288
4544
  __ PopTryHandler();
4289
4545
 
4290
4546
  __ bind(&exit);
4291
- #ifdef ENABLE_LOGGING_AND_PROFILING
4292
4547
  // Check if the current stack frame is marked as the outermost JS frame.
4293
4548
  __ pop(ebx);
4294
4549
  __ cmp(Operand(ebx),
@@ -4296,7 +4551,6 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
4296
4551
  __ j(not_equal, &not_outermost_js_2);
4297
4552
  __ mov(Operand::StaticVariable(js_entry_sp), Immediate(0));
4298
4553
  __ bind(&not_outermost_js_2);
4299
- #endif
4300
4554
 
4301
4555
  // Restore the top frame descriptor from the stack.
4302
4556
  __ pop(Operand::StaticVariable(ExternalReference(
@@ -4364,8 +4618,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
4364
4618
  }
4365
4619
 
4366
4620
  // Check that the left hand is a JS object.
4367
- __ test(object, Immediate(kSmiTagMask));
4368
- __ j(zero, &not_js_object);
4621
+ __ JumpIfSmi(object, &not_js_object);
4369
4622
  __ IsObjectJSObjectType(object, map, scratch, &not_js_object);
4370
4623
 
4371
4624
  // If there is a call site cache don't look in the global cache, but do the
@@ -4392,8 +4645,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
4392
4645
  __ TryGetFunctionPrototype(function, prototype, scratch, &slow);
4393
4646
 
4394
4647
  // Check that the function prototype is a JS object.
4395
- __ test(prototype, Immediate(kSmiTagMask));
4396
- __ j(zero, &slow);
4648
+ __ JumpIfSmi(prototype, &slow);
4397
4649
  __ IsObjectJSObjectType(prototype, scratch, scratch, &slow);
4398
4650
 
4399
4651
  // Update the global instanceof or call site inlined cache with the current
@@ -4482,28 +4734,26 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
4482
4734
  __ bind(&not_js_object);
4483
4735
  // Before null, smi and string value checks, check that the rhs is a function
4484
4736
  // as for a non-function rhs an exception needs to be thrown.
4485
- __ test(function, Immediate(kSmiTagMask));
4486
- __ j(zero, &slow);
4737
+ __ JumpIfSmi(function, &slow, Label::kNear);
4487
4738
  __ CmpObjectType(function, JS_FUNCTION_TYPE, scratch);
4488
- __ j(not_equal, &slow);
4739
+ __ j(not_equal, &slow, Label::kNear);
4489
4740
 
4490
4741
  // Null is not instance of anything.
4491
4742
  __ cmp(object, factory->null_value());
4492
- __ j(not_equal, &object_not_null);
4743
+ __ j(not_equal, &object_not_null, Label::kNear);
4493
4744
  __ Set(eax, Immediate(Smi::FromInt(1)));
4494
4745
  __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
4495
4746
 
4496
4747
  __ bind(&object_not_null);
4497
4748
  // Smi values is not instance of anything.
4498
- __ test(object, Immediate(kSmiTagMask));
4499
- __ j(not_zero, &object_not_null_or_smi);
4749
+ __ JumpIfNotSmi(object, &object_not_null_or_smi, Label::kNear);
4500
4750
  __ Set(eax, Immediate(Smi::FromInt(1)));
4501
4751
  __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
4502
4752
 
4503
4753
  __ bind(&object_not_null_or_smi);
4504
4754
  // String values is not instance of anything.
4505
4755
  Condition is_string = masm->IsObjectStringType(object, scratch, scratch);
4506
- __ j(NegateCondition(is_string), &slow);
4756
+ __ j(NegateCondition(is_string), &slow, Label::kNear);
4507
4757
  __ Set(eax, Immediate(Smi::FromInt(1)));
4508
4758
  __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
4509
4759
 
@@ -4562,15 +4812,8 @@ int CompareStub::MinorKey() {
4562
4812
 
4563
4813
  // Unfortunately you have to run without snapshots to see most of these
4564
4814
  // names in the profile since most compare stubs end up in the snapshot.
4565
- const char* CompareStub::GetName() {
4815
+ void CompareStub::PrintName(StringStream* stream) {
4566
4816
  ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
4567
-
4568
- if (name_ != NULL) return name_;
4569
- const int kMaxNameLength = 100;
4570
- name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray(
4571
- kMaxNameLength);
4572
- if (name_ == NULL) return "OOM";
4573
-
4574
4817
  const char* cc_name;
4575
4818
  switch (cc_) {
4576
4819
  case less: cc_name = "LT"; break;
@@ -4581,35 +4824,12 @@ const char* CompareStub::GetName() {
4581
4824
  case not_equal: cc_name = "NE"; break;
4582
4825
  default: cc_name = "UnknownCondition"; break;
4583
4826
  }
4584
-
4585
- const char* strict_name = "";
4586
- if (strict_ && (cc_ == equal || cc_ == not_equal)) {
4587
- strict_name = "_STRICT";
4588
- }
4589
-
4590
- const char* never_nan_nan_name = "";
4591
- if (never_nan_nan_ && (cc_ == equal || cc_ == not_equal)) {
4592
- never_nan_nan_name = "_NO_NAN";
4593
- }
4594
-
4595
- const char* include_number_compare_name = "";
4596
- if (!include_number_compare_) {
4597
- include_number_compare_name = "_NO_NUMBER";
4598
- }
4599
-
4600
- const char* include_smi_compare_name = "";
4601
- if (!include_smi_compare_) {
4602
- include_smi_compare_name = "_NO_SMI";
4603
- }
4604
-
4605
- OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
4606
- "CompareStub_%s%s%s%s%s",
4607
- cc_name,
4608
- strict_name,
4609
- never_nan_nan_name,
4610
- include_number_compare_name,
4611
- include_smi_compare_name);
4612
- return name_;
4827
+ bool is_equality = cc_ == equal || cc_ == not_equal;
4828
+ stream->Add("CompareStub_%s", cc_name);
4829
+ if (strict_ && is_equality) stream->Add("_STRICT");
4830
+ if (never_nan_nan_ && is_equality) stream->Add("_NO_NAN");
4831
+ if (!include_number_compare_) stream->Add("_NO_NUMBER");
4832
+ if (!include_smi_compare_) stream->Add("_NO_SMI");
4613
4833
  }
4614
4834
 
4615
4835
 
@@ -4620,11 +4840,11 @@ void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
4620
4840
  Label flat_string;
4621
4841
  Label ascii_string;
4622
4842
  Label got_char_code;
4843
+ Label sliced_string;
4623
4844
 
4624
4845
  // If the receiver is a smi trigger the non-string case.
4625
4846
  STATIC_ASSERT(kSmiTag == 0);
4626
- __ test(object_, Immediate(kSmiTagMask));
4627
- __ j(zero, receiver_not_string_);
4847
+ __ JumpIfSmi(object_, receiver_not_string_);
4628
4848
 
4629
4849
  // Fetch the instance type of the receiver into result register.
4630
4850
  __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset));
@@ -4635,8 +4855,7 @@ void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
4635
4855
 
4636
4856
  // If the index is non-smi trigger the non-smi case.
4637
4857
  STATIC_ASSERT(kSmiTag == 0);
4638
- __ test(index_, Immediate(kSmiTagMask));
4639
- __ j(not_zero, &index_not_smi_);
4858
+ __ JumpIfNotSmi(index_, &index_not_smi_);
4640
4859
 
4641
4860
  // Put smi-tagged index into scratch register.
4642
4861
  __ mov(scratch_, index_);
@@ -4652,31 +4871,45 @@ void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
4652
4871
  __ j(zero, &flat_string);
4653
4872
 
4654
4873
  // Handle non-flat strings.
4655
- __ test(result_, Immediate(kIsConsStringMask));
4656
- __ j(zero, &call_runtime_);
4874
+ __ and_(result_, kStringRepresentationMask);
4875
+ STATIC_ASSERT(kConsStringTag < kExternalStringTag);
4876
+ STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
4877
+ __ cmp(result_, kExternalStringTag);
4878
+ __ j(greater, &sliced_string, Label::kNear);
4879
+ __ j(equal, &call_runtime_);
4657
4880
 
4658
4881
  // ConsString.
4659
4882
  // Check whether the right hand side is the empty string (i.e. if
4660
4883
  // this is really a flat string in a cons string). If that is not
4661
4884
  // the case we would rather go to the runtime system now to flatten
4662
4885
  // the string.
4886
+ Label assure_seq_string;
4663
4887
  __ cmp(FieldOperand(object_, ConsString::kSecondOffset),
4664
4888
  Immediate(masm->isolate()->factory()->empty_string()));
4665
4889
  __ j(not_equal, &call_runtime_);
4666
4890
  // Get the first of the two strings and load its instance type.
4667
4891
  __ mov(object_, FieldOperand(object_, ConsString::kFirstOffset));
4892
+ __ jmp(&assure_seq_string, Label::kNear);
4893
+
4894
+ // SlicedString, unpack and add offset.
4895
+ __ bind(&sliced_string);
4896
+ __ add(scratch_, FieldOperand(object_, SlicedString::kOffsetOffset));
4897
+ __ mov(object_, FieldOperand(object_, SlicedString::kParentOffset));
4898
+
4899
+ // Assure that we are dealing with a sequential string. Go to runtime if not.
4900
+ __ bind(&assure_seq_string);
4668
4901
  __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset));
4669
4902
  __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
4670
- // If the first cons component is also non-flat, then go to runtime.
4671
4903
  STATIC_ASSERT(kSeqStringTag == 0);
4672
4904
  __ test(result_, Immediate(kStringRepresentationMask));
4673
4905
  __ j(not_zero, &call_runtime_);
4906
+ __ jmp(&flat_string, Label::kNear);
4674
4907
 
4675
4908
  // Check for 1-byte or 2-byte string.
4676
4909
  __ bind(&flat_string);
4677
4910
  STATIC_ASSERT(kAsciiStringTag != 0);
4678
4911
  __ test(result_, Immediate(kStringEncodingMask));
4679
- __ j(not_zero, &ascii_string);
4912
+ __ j(not_zero, &ascii_string, Label::kNear);
4680
4913
 
4681
4914
  // 2-byte string.
4682
4915
  // Load the 2-byte character code into the result register.
@@ -4684,7 +4917,7 @@ void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
4684
4917
  __ movzx_w(result_, FieldOperand(object_,
4685
4918
  scratch_, times_1, // Scratch is smi-tagged.
4686
4919
  SeqTwoByteString::kHeaderSize));
4687
- __ jmp(&got_char_code);
4920
+ __ jmp(&got_char_code, Label::kNear);
4688
4921
 
4689
4922
  // ASCII string.
4690
4923
  // Load the byte into the result register.
@@ -4734,8 +4967,7 @@ void StringCharCodeAtGenerator::GenerateSlow(
4734
4967
  call_helper.AfterCall(masm);
4735
4968
  // If index is still not a smi, it must be out of range.
4736
4969
  STATIC_ASSERT(kSmiTag == 0);
4737
- __ test(scratch_, Immediate(kSmiTagMask));
4738
- __ j(not_zero, index_out_of_range_);
4970
+ __ JumpIfNotSmi(scratch_, index_out_of_range_);
4739
4971
  // Otherwise, return to the fast path.
4740
4972
  __ jmp(&got_smi_index_);
4741
4973
 
@@ -4829,14 +5061,12 @@ void StringAddStub::Generate(MacroAssembler* masm) {
4829
5061
 
4830
5062
  // Make sure that both arguments are strings if not known in advance.
4831
5063
  if (flags_ == NO_STRING_ADD_FLAGS) {
4832
- __ test(eax, Immediate(kSmiTagMask));
4833
- __ j(zero, &string_add_runtime);
5064
+ __ JumpIfSmi(eax, &string_add_runtime);
4834
5065
  __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, ebx);
4835
5066
  __ j(above_equal, &string_add_runtime);
4836
5067
 
4837
5068
  // First argument is a a string, test second.
4838
- __ test(edx, Immediate(kSmiTagMask));
4839
- __ j(zero, &string_add_runtime);
5069
+ __ JumpIfSmi(edx, &string_add_runtime);
4840
5070
  __ CmpObjectType(edx, FIRST_NONSTRING_TYPE, ebx);
4841
5071
  __ j(above_equal, &string_add_runtime);
4842
5072
  } else {
@@ -4999,6 +5229,8 @@ void StringAddStub::Generate(MacroAssembler* masm) {
4999
5229
  __ and_(ecx, kStringRepresentationMask);
5000
5230
  __ cmp(ecx, kExternalStringTag);
5001
5231
  __ j(equal, &string_add_runtime);
5232
+ // We cannot encounter sliced strings here since:
5233
+ STATIC_ASSERT(SlicedString::kMinLength >= String::kMinNonFlatLength);
5002
5234
  // Now check if both strings are ascii strings.
5003
5235
  // eax: first string
5004
5236
  // ebx: length of resulting flat string as a smi
@@ -5104,8 +5336,7 @@ void StringAddStub::GenerateConvertArgument(MacroAssembler* masm,
5104
5336
  Label* slow) {
5105
5337
  // First check if the argument is already a string.
5106
5338
  Label not_string, done;
5107
- __ test(arg, Immediate(kSmiTagMask));
5108
- __ j(zero, &not_string);
5339
+ __ JumpIfSmi(arg, &not_string);
5109
5340
  __ CmpObjectType(arg, FIRST_NONSTRING_TYPE, scratch1);
5110
5341
  __ j(below, &done);
5111
5342
 
@@ -5126,8 +5357,7 @@ void StringAddStub::GenerateConvertArgument(MacroAssembler* masm,
5126
5357
 
5127
5358
  // Check if the argument is a safe string wrapper.
5128
5359
  __ bind(&not_cached);
5129
- __ test(arg, Immediate(kSmiTagMask));
5130
- __ j(zero, slow);
5360
+ __ JumpIfSmi(arg, slow);
5131
5361
  __ CmpObjectType(arg, JS_VALUE_TYPE, scratch1); // map -> scratch1.
5132
5362
  __ j(not_equal, slow);
5133
5363
  __ test_b(FieldOperand(scratch1, Map::kBitField2Offset),
@@ -5421,8 +5651,7 @@ void SubStringStub::Generate(MacroAssembler* masm) {
5421
5651
  // Make sure first argument is a string.
5422
5652
  __ mov(eax, Operand(esp, 3 * kPointerSize));
5423
5653
  STATIC_ASSERT(kSmiTag == 0);
5424
- __ test(eax, Immediate(kSmiTagMask));
5425
- __ j(zero, &runtime);
5654
+ __ JumpIfSmi(eax, &runtime);
5426
5655
  Condition is_string = masm->IsObjectStringType(eax, ebx, ebx);
5427
5656
  __ j(NegateCondition(is_string), &runtime);
5428
5657
 
@@ -5432,11 +5661,9 @@ void SubStringStub::Generate(MacroAssembler* masm) {
5432
5661
  // Calculate length of sub string using the smi values.
5433
5662
  Label result_longer_than_two;
5434
5663
  __ mov(ecx, Operand(esp, 1 * kPointerSize)); // To index.
5435
- __ test(ecx, Immediate(kSmiTagMask));
5436
- __ j(not_zero, &runtime);
5664
+ __ JumpIfNotSmi(ecx, &runtime);
5437
5665
  __ mov(edx, Operand(esp, 2 * kPointerSize)); // From index.
5438
- __ test(edx, Immediate(kSmiTagMask));
5439
- __ j(not_zero, &runtime);
5666
+ __ JumpIfNotSmi(edx, &runtime);
5440
5667
  __ sub(ecx, Operand(edx));
5441
5668
  __ cmp(ecx, FieldOperand(eax, String::kLengthOffset));
5442
5669
  Label return_eax;
@@ -5476,7 +5703,83 @@ void SubStringStub::Generate(MacroAssembler* masm) {
5476
5703
  __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
5477
5704
  __ Set(ecx, Immediate(2));
5478
5705
 
5479
- __ bind(&result_longer_than_two);
5706
+ if (FLAG_string_slices) {
5707
+ Label copy_routine;
5708
+ // If coming from the make_two_character_string path, the string
5709
+ // is too short to be sliced anyways.
5710
+ STATIC_ASSERT(2 < SlicedString::kMinLength);
5711
+ __ jmp(&copy_routine);
5712
+ __ bind(&result_longer_than_two);
5713
+
5714
+ // eax: string
5715
+ // ebx: instance type
5716
+ // ecx: sub string length
5717
+ // edx: from index (smi)
5718
+ Label allocate_slice, sliced_string, seq_string;
5719
+ __ cmp(ecx, SlicedString::kMinLength);
5720
+ // Short slice. Copy instead of slicing.
5721
+ __ j(less, &copy_routine);
5722
+ STATIC_ASSERT(kSeqStringTag == 0);
5723
+ __ test(ebx, Immediate(kStringRepresentationMask));
5724
+ __ j(zero, &seq_string, Label::kNear);
5725
+ STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
5726
+ STATIC_ASSERT(kIsIndirectStringMask != 0);
5727
+ __ test(ebx, Immediate(kIsIndirectStringMask));
5728
+ // External string. Jump to runtime.
5729
+ __ j(zero, &runtime);
5730
+
5731
+ Factory* factory = masm->isolate()->factory();
5732
+ __ test(ebx, Immediate(kSlicedNotConsMask));
5733
+ __ j(not_zero, &sliced_string, Label::kNear);
5734
+ // Cons string. Check whether it is flat, then fetch first part.
5735
+ __ cmp(FieldOperand(eax, ConsString::kSecondOffset),
5736
+ factory->empty_string());
5737
+ __ j(not_equal, &runtime);
5738
+ __ mov(edi, FieldOperand(eax, ConsString::kFirstOffset));
5739
+ __ jmp(&allocate_slice, Label::kNear);
5740
+
5741
+ __ bind(&sliced_string);
5742
+ // Sliced string. Fetch parent and correct start index by offset.
5743
+ __ add(edx, FieldOperand(eax, SlicedString::kOffsetOffset));
5744
+ __ mov(edi, FieldOperand(eax, SlicedString::kParentOffset));
5745
+ __ jmp(&allocate_slice, Label::kNear);
5746
+
5747
+ __ bind(&seq_string);
5748
+ // Sequential string. Just move string to the right register.
5749
+ __ mov(edi, eax);
5750
+
5751
+ __ bind(&allocate_slice);
5752
+ // edi: underlying subject string
5753
+ // ebx: instance type of original subject string
5754
+ // edx: offset
5755
+ // ecx: length
5756
+ // Allocate new sliced string. At this point we do not reload the instance
5757
+ // type including the string encoding because we simply rely on the info
5758
+ // provided by the original string. It does not matter if the original
5759
+ // string's encoding is wrong because we always have to recheck encoding of
5760
+ // the newly created string's parent anyways due to externalized strings.
5761
+ Label two_byte_slice, set_slice_header;
5762
+ STATIC_ASSERT(kAsciiStringTag != 0);
5763
+ __ test(ebx, Immediate(kAsciiStringTag));
5764
+ __ j(zero, &two_byte_slice, Label::kNear);
5765
+ __ AllocateAsciiSlicedString(eax, ebx, no_reg, &runtime);
5766
+ __ jmp(&set_slice_header, Label::kNear);
5767
+ __ bind(&two_byte_slice);
5768
+ __ AllocateSlicedString(eax, ebx, no_reg, &runtime);
5769
+ __ bind(&set_slice_header);
5770
+ __ mov(FieldOperand(eax, SlicedString::kOffsetOffset), edx);
5771
+ __ SmiTag(ecx);
5772
+ __ mov(FieldOperand(eax, SlicedString::kLengthOffset), ecx);
5773
+ __ mov(FieldOperand(eax, SlicedString::kParentOffset), edi);
5774
+ __ mov(FieldOperand(eax, SlicedString::kHashFieldOffset),
5775
+ Immediate(String::kEmptyHashField));
5776
+ __ jmp(&return_eax);
5777
+
5778
+ __ bind(&copy_routine);
5779
+ } else {
5780
+ __ bind(&result_longer_than_two);
5781
+ }
5782
+
5480
5783
  // eax: string
5481
5784
  // ebx: instance type
5482
5785
  // ecx: result string length
@@ -5728,8 +6031,7 @@ void ICCompareStub::GenerateSmis(MacroAssembler* masm) {
5728
6031
  Label miss;
5729
6032
  __ mov(ecx, Operand(edx));
5730
6033
  __ or_(ecx, Operand(eax));
5731
- __ test(ecx, Immediate(kSmiTagMask));
5732
- __ j(not_zero, &miss, Label::kNear);
6034
+ __ JumpIfNotSmi(ecx, &miss, Label::kNear);
5733
6035
 
5734
6036
  if (GetCondition() == equal) {
5735
6037
  // For equality we do not care about the sign of the result.
@@ -5758,8 +6060,7 @@ void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) {
5758
6060
  Label miss;
5759
6061
  __ mov(ecx, Operand(edx));
5760
6062
  __ and_(ecx, Operand(eax));
5761
- __ test(ecx, Immediate(kSmiTagMask));
5762
- __ j(zero, &generic_stub, Label::kNear);
6063
+ __ JumpIfSmi(ecx, &generic_stub, Label::kNear);
5763
6064
 
5764
6065
  __ CmpObjectType(eax, HEAP_NUMBER_TYPE, ecx);
5765
6066
  __ j(not_equal, &miss, Label::kNear);
@@ -5818,8 +6119,7 @@ void ICCompareStub::GenerateSymbols(MacroAssembler* masm) {
5818
6119
  __ mov(tmp1, Operand(left));
5819
6120
  STATIC_ASSERT(kSmiTag == 0);
5820
6121
  __ and_(tmp1, Operand(right));
5821
- __ test(tmp1, Immediate(kSmiTagMask));
5822
- __ j(zero, &miss, Label::kNear);
6122
+ __ JumpIfSmi(tmp1, &miss, Label::kNear);
5823
6123
 
5824
6124
  // Check that both operands are symbols.
5825
6125
  __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset));
@@ -5865,8 +6165,7 @@ void ICCompareStub::GenerateStrings(MacroAssembler* masm) {
5865
6165
  __ mov(tmp1, Operand(left));
5866
6166
  STATIC_ASSERT(kSmiTag == 0);
5867
6167
  __ and_(tmp1, Operand(right));
5868
- __ test(tmp1, Immediate(kSmiTagMask));
5869
- __ j(zero, &miss);
6168
+ __ JumpIfSmi(tmp1, &miss);
5870
6169
 
5871
6170
  // Check that both operands are strings. This leaves the instance
5872
6171
  // types loaded in tmp1 and tmp2.
@@ -5931,8 +6230,7 @@ void ICCompareStub::GenerateObjects(MacroAssembler* masm) {
5931
6230
  Label miss;
5932
6231
  __ mov(ecx, Operand(edx));
5933
6232
  __ and_(ecx, Operand(eax));
5934
- __ test(ecx, Immediate(kSmiTagMask));
5935
- __ j(zero, &miss, Label::kNear);
6233
+ __ JumpIfSmi(ecx, &miss, Label::kNear);
5936
6234
 
5937
6235
  __ CmpObjectType(eax, JS_OBJECT_TYPE, ecx);
5938
6236
  __ j(not_equal, &miss, Label::kNear);