libv8 3.3.10.4 → 3.5.10.beta1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (538) hide show
  1. data/lib/libv8/scons/CHANGES.txt +24 -231
  2. data/lib/libv8/scons/LICENSE.txt +1 -1
  3. data/lib/libv8/scons/MANIFEST +0 -1
  4. data/lib/libv8/scons/PKG-INFO +1 -1
  5. data/lib/libv8/scons/README.txt +9 -9
  6. data/lib/libv8/scons/RELEASE.txt +75 -77
  7. data/lib/libv8/scons/engine/SCons/Action.py +6 -22
  8. data/lib/libv8/scons/engine/SCons/Builder.py +2 -2
  9. data/lib/libv8/scons/engine/SCons/CacheDir.py +2 -2
  10. data/lib/libv8/scons/engine/SCons/Debug.py +2 -2
  11. data/lib/libv8/scons/engine/SCons/Defaults.py +10 -24
  12. data/lib/libv8/scons/engine/SCons/Environment.py +19 -118
  13. data/lib/libv8/scons/engine/SCons/Errors.py +2 -2
  14. data/lib/libv8/scons/engine/SCons/Executor.py +2 -2
  15. data/lib/libv8/scons/engine/SCons/Job.py +2 -2
  16. data/lib/libv8/scons/engine/SCons/Memoize.py +2 -2
  17. data/lib/libv8/scons/engine/SCons/Node/Alias.py +2 -2
  18. data/lib/libv8/scons/engine/SCons/Node/FS.py +121 -281
  19. data/lib/libv8/scons/engine/SCons/Node/Python.py +2 -2
  20. data/lib/libv8/scons/engine/SCons/Node/__init__.py +5 -6
  21. data/lib/libv8/scons/engine/SCons/Options/BoolOption.py +2 -2
  22. data/lib/libv8/scons/engine/SCons/Options/EnumOption.py +2 -2
  23. data/lib/libv8/scons/engine/SCons/Options/ListOption.py +2 -2
  24. data/lib/libv8/scons/engine/SCons/Options/PackageOption.py +2 -2
  25. data/lib/libv8/scons/engine/SCons/Options/PathOption.py +2 -2
  26. data/lib/libv8/scons/engine/SCons/Options/__init__.py +2 -2
  27. data/lib/libv8/scons/engine/SCons/PathList.py +2 -2
  28. data/lib/libv8/scons/engine/SCons/Platform/__init__.py +2 -2
  29. data/lib/libv8/scons/engine/SCons/Platform/aix.py +2 -2
  30. data/lib/libv8/scons/engine/SCons/Platform/cygwin.py +2 -2
  31. data/lib/libv8/scons/engine/SCons/Platform/darwin.py +3 -27
  32. data/lib/libv8/scons/engine/SCons/Platform/hpux.py +2 -2
  33. data/lib/libv8/scons/engine/SCons/Platform/irix.py +2 -2
  34. data/lib/libv8/scons/engine/SCons/Platform/os2.py +2 -2
  35. data/lib/libv8/scons/engine/SCons/Platform/posix.py +2 -2
  36. data/lib/libv8/scons/engine/SCons/Platform/sunos.py +2 -2
  37. data/lib/libv8/scons/engine/SCons/Platform/win32.py +2 -2
  38. data/lib/libv8/scons/engine/SCons/SConf.py +2 -2
  39. data/lib/libv8/scons/engine/SCons/SConsign.py +3 -9
  40. data/lib/libv8/scons/engine/SCons/Scanner/C.py +2 -2
  41. data/lib/libv8/scons/engine/SCons/Scanner/D.py +2 -2
  42. data/lib/libv8/scons/engine/SCons/Scanner/Dir.py +2 -2
  43. data/lib/libv8/scons/engine/SCons/Scanner/Fortran.py +2 -2
  44. data/lib/libv8/scons/engine/SCons/Scanner/IDL.py +2 -2
  45. data/lib/libv8/scons/engine/SCons/Scanner/LaTeX.py +2 -5
  46. data/lib/libv8/scons/engine/SCons/Scanner/Prog.py +2 -2
  47. data/lib/libv8/scons/engine/SCons/Scanner/RC.py +3 -3
  48. data/lib/libv8/scons/engine/SCons/Scanner/__init__.py +2 -2
  49. data/lib/libv8/scons/engine/SCons/Script/Interactive.py +2 -2
  50. data/lib/libv8/scons/engine/SCons/Script/Main.py +11 -82
  51. data/lib/libv8/scons/engine/SCons/Script/SConsOptions.py +5 -5
  52. data/lib/libv8/scons/engine/SCons/Script/SConscript.py +2 -2
  53. data/lib/libv8/scons/engine/SCons/Script/__init__.py +2 -2
  54. data/lib/libv8/scons/engine/SCons/Sig.py +2 -2
  55. data/lib/libv8/scons/engine/SCons/Subst.py +2 -2
  56. data/lib/libv8/scons/engine/SCons/Taskmaster.py +2 -10
  57. data/lib/libv8/scons/engine/SCons/Tool/386asm.py +2 -2
  58. data/lib/libv8/scons/engine/SCons/Tool/BitKeeper.py +2 -2
  59. data/lib/libv8/scons/engine/SCons/Tool/CVS.py +2 -2
  60. data/lib/libv8/scons/engine/SCons/Tool/FortranCommon.py +2 -19
  61. data/lib/libv8/scons/engine/SCons/Tool/JavaCommon.py +2 -2
  62. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/__init__.py +2 -2
  63. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/arch.py +2 -2
  64. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/common.py +2 -2
  65. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/netframework.py +2 -2
  66. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/sdk.py +2 -2
  67. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/vc.py +6 -9
  68. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/vs.py +2 -29
  69. data/lib/libv8/scons/engine/SCons/Tool/Perforce.py +2 -2
  70. data/lib/libv8/scons/engine/SCons/Tool/PharLapCommon.py +2 -2
  71. data/lib/libv8/scons/engine/SCons/Tool/RCS.py +2 -2
  72. data/lib/libv8/scons/engine/SCons/Tool/SCCS.py +2 -2
  73. data/lib/libv8/scons/engine/SCons/Tool/Subversion.py +2 -2
  74. data/lib/libv8/scons/engine/SCons/Tool/__init__.py +3 -3
  75. data/lib/libv8/scons/engine/SCons/Tool/aixc++.py +2 -2
  76. data/lib/libv8/scons/engine/SCons/Tool/aixcc.py +2 -2
  77. data/lib/libv8/scons/engine/SCons/Tool/aixf77.py +2 -2
  78. data/lib/libv8/scons/engine/SCons/Tool/aixlink.py +2 -2
  79. data/lib/libv8/scons/engine/SCons/Tool/applelink.py +2 -2
  80. data/lib/libv8/scons/engine/SCons/Tool/ar.py +2 -2
  81. data/lib/libv8/scons/engine/SCons/Tool/as.py +2 -2
  82. data/lib/libv8/scons/engine/SCons/Tool/bcc32.py +2 -2
  83. data/lib/libv8/scons/engine/SCons/Tool/c++.py +2 -2
  84. data/lib/libv8/scons/engine/SCons/Tool/cc.py +2 -2
  85. data/lib/libv8/scons/engine/SCons/Tool/cvf.py +2 -2
  86. data/lib/libv8/scons/engine/SCons/Tool/default.py +2 -2
  87. data/lib/libv8/scons/engine/SCons/Tool/dmd.py +7 -24
  88. data/lib/libv8/scons/engine/SCons/Tool/dvi.py +2 -2
  89. data/lib/libv8/scons/engine/SCons/Tool/dvipdf.py +2 -3
  90. data/lib/libv8/scons/engine/SCons/Tool/dvips.py +2 -3
  91. data/lib/libv8/scons/engine/SCons/Tool/f77.py +2 -2
  92. data/lib/libv8/scons/engine/SCons/Tool/f90.py +2 -2
  93. data/lib/libv8/scons/engine/SCons/Tool/f95.py +2 -2
  94. data/lib/libv8/scons/engine/SCons/Tool/filesystem.py +2 -2
  95. data/lib/libv8/scons/engine/SCons/Tool/fortran.py +2 -2
  96. data/lib/libv8/scons/engine/SCons/Tool/g++.py +2 -2
  97. data/lib/libv8/scons/engine/SCons/Tool/g77.py +2 -2
  98. data/lib/libv8/scons/engine/SCons/Tool/gas.py +2 -2
  99. data/lib/libv8/scons/engine/SCons/Tool/gcc.py +2 -2
  100. data/lib/libv8/scons/engine/SCons/Tool/gfortran.py +3 -3
  101. data/lib/libv8/scons/engine/SCons/Tool/gnulink.py +3 -2
  102. data/lib/libv8/scons/engine/SCons/Tool/gs.py +2 -2
  103. data/lib/libv8/scons/engine/SCons/Tool/hpc++.py +2 -2
  104. data/lib/libv8/scons/engine/SCons/Tool/hpcc.py +2 -2
  105. data/lib/libv8/scons/engine/SCons/Tool/hplink.py +2 -2
  106. data/lib/libv8/scons/engine/SCons/Tool/icc.py +2 -2
  107. data/lib/libv8/scons/engine/SCons/Tool/icl.py +2 -2
  108. data/lib/libv8/scons/engine/SCons/Tool/ifl.py +2 -2
  109. data/lib/libv8/scons/engine/SCons/Tool/ifort.py +2 -2
  110. data/lib/libv8/scons/engine/SCons/Tool/ilink.py +2 -2
  111. data/lib/libv8/scons/engine/SCons/Tool/ilink32.py +2 -2
  112. data/lib/libv8/scons/engine/SCons/Tool/install.py +3 -57
  113. data/lib/libv8/scons/engine/SCons/Tool/intelc.py +25 -65
  114. data/lib/libv8/scons/engine/SCons/Tool/ipkg.py +2 -2
  115. data/lib/libv8/scons/engine/SCons/Tool/jar.py +3 -9
  116. data/lib/libv8/scons/engine/SCons/Tool/javac.py +2 -2
  117. data/lib/libv8/scons/engine/SCons/Tool/javah.py +2 -2
  118. data/lib/libv8/scons/engine/SCons/Tool/latex.py +2 -3
  119. data/lib/libv8/scons/engine/SCons/Tool/lex.py +2 -2
  120. data/lib/libv8/scons/engine/SCons/Tool/link.py +5 -6
  121. data/lib/libv8/scons/engine/SCons/Tool/linkloc.py +2 -2
  122. data/lib/libv8/scons/engine/SCons/Tool/m4.py +2 -2
  123. data/lib/libv8/scons/engine/SCons/Tool/masm.py +2 -2
  124. data/lib/libv8/scons/engine/SCons/Tool/midl.py +2 -2
  125. data/lib/libv8/scons/engine/SCons/Tool/mingw.py +10 -31
  126. data/lib/libv8/scons/engine/SCons/Tool/mslib.py +2 -2
  127. data/lib/libv8/scons/engine/SCons/Tool/mslink.py +9 -61
  128. data/lib/libv8/scons/engine/SCons/Tool/mssdk.py +2 -2
  129. data/lib/libv8/scons/engine/SCons/Tool/msvc.py +11 -21
  130. data/lib/libv8/scons/engine/SCons/Tool/msvs.py +59 -477
  131. data/lib/libv8/scons/engine/SCons/Tool/mwcc.py +2 -2
  132. data/lib/libv8/scons/engine/SCons/Tool/mwld.py +2 -2
  133. data/lib/libv8/scons/engine/SCons/Tool/nasm.py +2 -2
  134. data/lib/libv8/scons/engine/SCons/Tool/packaging/__init__.py +2 -2
  135. data/lib/libv8/scons/engine/SCons/Tool/packaging/ipk.py +2 -2
  136. data/lib/libv8/scons/engine/SCons/Tool/packaging/msi.py +2 -2
  137. data/lib/libv8/scons/engine/SCons/Tool/packaging/rpm.py +2 -2
  138. data/lib/libv8/scons/engine/SCons/Tool/packaging/src_tarbz2.py +2 -2
  139. data/lib/libv8/scons/engine/SCons/Tool/packaging/src_targz.py +2 -2
  140. data/lib/libv8/scons/engine/SCons/Tool/packaging/src_zip.py +2 -2
  141. data/lib/libv8/scons/engine/SCons/Tool/packaging/tarbz2.py +2 -2
  142. data/lib/libv8/scons/engine/SCons/Tool/packaging/targz.py +2 -2
  143. data/lib/libv8/scons/engine/SCons/Tool/packaging/zip.py +2 -2
  144. data/lib/libv8/scons/engine/SCons/Tool/pdf.py +2 -2
  145. data/lib/libv8/scons/engine/SCons/Tool/pdflatex.py +2 -3
  146. data/lib/libv8/scons/engine/SCons/Tool/pdftex.py +2 -3
  147. data/lib/libv8/scons/engine/SCons/Tool/qt.py +2 -2
  148. data/lib/libv8/scons/engine/SCons/Tool/rmic.py +3 -9
  149. data/lib/libv8/scons/engine/SCons/Tool/rpcgen.py +2 -2
  150. data/lib/libv8/scons/engine/SCons/Tool/rpm.py +2 -2
  151. data/lib/libv8/scons/engine/SCons/Tool/sgiar.py +2 -2
  152. data/lib/libv8/scons/engine/SCons/Tool/sgic++.py +2 -2
  153. data/lib/libv8/scons/engine/SCons/Tool/sgicc.py +2 -2
  154. data/lib/libv8/scons/engine/SCons/Tool/sgilink.py +3 -2
  155. data/lib/libv8/scons/engine/SCons/Tool/sunar.py +2 -2
  156. data/lib/libv8/scons/engine/SCons/Tool/sunc++.py +2 -2
  157. data/lib/libv8/scons/engine/SCons/Tool/suncc.py +2 -2
  158. data/lib/libv8/scons/engine/SCons/Tool/sunf77.py +2 -2
  159. data/lib/libv8/scons/engine/SCons/Tool/sunf90.py +2 -2
  160. data/lib/libv8/scons/engine/SCons/Tool/sunf95.py +2 -2
  161. data/lib/libv8/scons/engine/SCons/Tool/sunlink.py +3 -2
  162. data/lib/libv8/scons/engine/SCons/Tool/swig.py +5 -6
  163. data/lib/libv8/scons/engine/SCons/Tool/tar.py +2 -2
  164. data/lib/libv8/scons/engine/SCons/Tool/tex.py +43 -96
  165. data/lib/libv8/scons/engine/SCons/Tool/textfile.py +2 -2
  166. data/lib/libv8/scons/engine/SCons/Tool/tlib.py +2 -2
  167. data/lib/libv8/scons/engine/SCons/Tool/wix.py +2 -2
  168. data/lib/libv8/scons/engine/SCons/Tool/yacc.py +2 -12
  169. data/lib/libv8/scons/engine/SCons/Tool/zip.py +2 -2
  170. data/lib/libv8/scons/engine/SCons/Util.py +3 -3
  171. data/lib/libv8/scons/engine/SCons/Variables/BoolVariable.py +2 -2
  172. data/lib/libv8/scons/engine/SCons/Variables/EnumVariable.py +3 -3
  173. data/lib/libv8/scons/engine/SCons/Variables/ListVariable.py +2 -2
  174. data/lib/libv8/scons/engine/SCons/Variables/PackageVariable.py +2 -2
  175. data/lib/libv8/scons/engine/SCons/Variables/PathVariable.py +2 -2
  176. data/lib/libv8/scons/engine/SCons/Variables/__init__.py +2 -2
  177. data/lib/libv8/scons/engine/SCons/Warnings.py +2 -2
  178. data/lib/libv8/scons/engine/SCons/__init__.py +6 -6
  179. data/lib/libv8/scons/engine/SCons/compat/__init__.py +2 -2
  180. data/lib/libv8/scons/engine/SCons/compat/_scons_builtins.py +2 -2
  181. data/lib/libv8/scons/engine/SCons/compat/_scons_collections.py +2 -2
  182. data/lib/libv8/scons/engine/SCons/compat/_scons_dbm.py +2 -2
  183. data/lib/libv8/scons/engine/SCons/compat/_scons_hashlib.py +2 -2
  184. data/lib/libv8/scons/engine/SCons/compat/_scons_io.py +2 -2
  185. data/lib/libv8/scons/engine/SCons/cpp.py +2 -2
  186. data/lib/libv8/scons/engine/SCons/dblite.py +1 -4
  187. data/lib/libv8/scons/engine/SCons/exitfuncs.py +2 -2
  188. data/lib/libv8/scons/scons-time.1 +3 -3
  189. data/lib/libv8/scons/scons.1 +1164 -1170
  190. data/lib/libv8/scons/sconsign.1 +3 -3
  191. data/lib/libv8/scons/script/scons +22 -22
  192. data/lib/libv8/scons/script/scons-time +2 -2
  193. data/lib/libv8/scons/script/scons.bat +4 -7
  194. data/lib/libv8/scons/script/sconsign +20 -21
  195. data/lib/libv8/scons/setup.cfg +1 -0
  196. data/lib/libv8/scons/setup.py +40 -38
  197. data/lib/libv8/v8/.gitignore +1 -1
  198. data/lib/libv8/v8/AUTHORS +2 -0
  199. data/lib/libv8/v8/ChangeLog +387 -0
  200. data/lib/libv8/v8/Makefile +171 -0
  201. data/lib/libv8/v8/SConstruct +124 -51
  202. data/lib/libv8/v8/build/README.txt +31 -14
  203. data/lib/libv8/v8/build/all.gyp +11 -4
  204. data/lib/libv8/v8/build/armu.gypi +6 -2
  205. data/lib/libv8/v8/build/common.gypi +240 -94
  206. data/lib/libv8/v8/build/gyp_v8 +32 -4
  207. data/lib/libv8/v8/build/standalone.gypi +200 -0
  208. data/lib/libv8/v8/include/v8-debug.h +0 -0
  209. data/lib/libv8/v8/include/v8-profiler.h +8 -11
  210. data/lib/libv8/v8/include/v8.h +191 -108
  211. data/lib/libv8/v8/preparser/SConscript +2 -2
  212. data/lib/libv8/v8/preparser/preparser-process.cc +3 -3
  213. data/lib/libv8/v8/preparser/preparser.gyp +42 -0
  214. data/lib/libv8/v8/src/SConscript +33 -8
  215. data/lib/libv8/v8/src/accessors.cc +77 -43
  216. data/lib/libv8/v8/src/api.cc +393 -191
  217. data/lib/libv8/v8/src/api.h +4 -8
  218. data/lib/libv8/v8/src/apinatives.js +15 -3
  219. data/lib/libv8/v8/src/arguments.h +8 -0
  220. data/lib/libv8/v8/src/arm/assembler-arm.cc +120 -120
  221. data/lib/libv8/v8/src/arm/assembler-arm.h +92 -43
  222. data/lib/libv8/v8/src/arm/builtins-arm.cc +32 -39
  223. data/lib/libv8/v8/src/arm/code-stubs-arm.cc +572 -351
  224. data/lib/libv8/v8/src/arm/code-stubs-arm.h +8 -77
  225. data/lib/libv8/v8/src/arm/codegen-arm.h +0 -2
  226. data/lib/libv8/v8/src/arm/deoptimizer-arm.cc +50 -30
  227. data/lib/libv8/v8/src/arm/disasm-arm.cc +1 -1
  228. data/lib/libv8/v8/src/arm/frames-arm.h +9 -5
  229. data/lib/libv8/v8/src/arm/full-codegen-arm.cc +331 -432
  230. data/lib/libv8/v8/src/arm/ic-arm.cc +192 -124
  231. data/lib/libv8/v8/src/arm/lithium-arm.cc +216 -232
  232. data/lib/libv8/v8/src/arm/lithium-arm.h +106 -259
  233. data/lib/libv8/v8/src/arm/lithium-codegen-arm.cc +633 -642
  234. data/lib/libv8/v8/src/arm/lithium-codegen-arm.h +4 -4
  235. data/lib/libv8/v8/src/arm/lithium-gap-resolver-arm.cc +1 -3
  236. data/lib/libv8/v8/src/arm/macro-assembler-arm.cc +260 -185
  237. data/lib/libv8/v8/src/arm/macro-assembler-arm.h +45 -25
  238. data/lib/libv8/v8/src/arm/regexp-macro-assembler-arm.cc +25 -13
  239. data/lib/libv8/v8/src/arm/regexp-macro-assembler-arm.h +3 -0
  240. data/lib/libv8/v8/src/arm/stub-cache-arm.cc +413 -226
  241. data/lib/libv8/v8/src/array.js +38 -18
  242. data/lib/libv8/v8/src/assembler.cc +12 -5
  243. data/lib/libv8/v8/src/assembler.h +15 -9
  244. data/lib/libv8/v8/src/ast-inl.h +34 -25
  245. data/lib/libv8/v8/src/ast.cc +141 -72
  246. data/lib/libv8/v8/src/ast.h +255 -181
  247. data/lib/libv8/v8/src/bignum.cc +3 -4
  248. data/lib/libv8/v8/src/bootstrapper.cc +55 -11
  249. data/lib/libv8/v8/src/bootstrapper.h +3 -2
  250. data/lib/libv8/v8/src/builtins.cc +8 -2
  251. data/lib/libv8/v8/src/builtins.h +4 -0
  252. data/lib/libv8/v8/src/cached-powers.cc +8 -4
  253. data/lib/libv8/v8/src/checks.h +3 -3
  254. data/lib/libv8/v8/src/code-stubs.cc +173 -28
  255. data/lib/libv8/v8/src/code-stubs.h +104 -148
  256. data/lib/libv8/v8/src/codegen.cc +8 -8
  257. data/lib/libv8/v8/src/compilation-cache.cc +2 -47
  258. data/lib/libv8/v8/src/compilation-cache.h +0 -10
  259. data/lib/libv8/v8/src/compiler.cc +27 -16
  260. data/lib/libv8/v8/src/compiler.h +13 -18
  261. data/lib/libv8/v8/src/contexts.cc +107 -72
  262. data/lib/libv8/v8/src/contexts.h +70 -34
  263. data/lib/libv8/v8/src/conversions-inl.h +572 -14
  264. data/lib/libv8/v8/src/conversions.cc +9 -707
  265. data/lib/libv8/v8/src/conversions.h +23 -12
  266. data/lib/libv8/v8/src/cpu-profiler-inl.h +2 -19
  267. data/lib/libv8/v8/src/cpu-profiler.cc +4 -21
  268. data/lib/libv8/v8/src/cpu-profiler.h +8 -17
  269. data/lib/libv8/v8/src/d8-debug.cc +5 -3
  270. data/lib/libv8/v8/src/d8-debug.h +6 -7
  271. data/lib/libv8/v8/src/d8-posix.cc +1 -10
  272. data/lib/libv8/v8/src/d8.cc +721 -219
  273. data/lib/libv8/v8/src/d8.gyp +37 -12
  274. data/lib/libv8/v8/src/d8.h +141 -19
  275. data/lib/libv8/v8/src/d8.js +17 -8
  276. data/lib/libv8/v8/src/date.js +16 -5
  277. data/lib/libv8/v8/src/dateparser-inl.h +242 -39
  278. data/lib/libv8/v8/src/dateparser.cc +38 -4
  279. data/lib/libv8/v8/src/dateparser.h +170 -28
  280. data/lib/libv8/v8/src/debug-agent.cc +5 -3
  281. data/lib/libv8/v8/src/debug-agent.h +11 -7
  282. data/lib/libv8/v8/src/debug-debugger.js +65 -34
  283. data/lib/libv8/v8/src/debug.cc +30 -60
  284. data/lib/libv8/v8/src/debug.h +5 -3
  285. data/lib/libv8/v8/src/deoptimizer.cc +227 -10
  286. data/lib/libv8/v8/src/deoptimizer.h +133 -9
  287. data/lib/libv8/v8/src/disassembler.cc +22 -14
  288. data/lib/libv8/v8/src/diy-fp.cc +4 -3
  289. data/lib/libv8/v8/src/diy-fp.h +3 -3
  290. data/lib/libv8/v8/src/elements.cc +634 -0
  291. data/lib/libv8/v8/src/elements.h +95 -0
  292. data/lib/libv8/v8/src/execution.cc +5 -21
  293. data/lib/libv8/v8/src/extensions/experimental/break-iterator.cc +3 -1
  294. data/lib/libv8/v8/src/extensions/experimental/break-iterator.h +1 -1
  295. data/lib/libv8/v8/src/extensions/experimental/collator.cc +6 -2
  296. data/lib/libv8/v8/src/extensions/experimental/collator.h +1 -2
  297. data/lib/libv8/v8/src/extensions/experimental/datetime-format.cc +384 -0
  298. data/lib/libv8/v8/src/extensions/experimental/datetime-format.h +83 -0
  299. data/lib/libv8/v8/src/extensions/experimental/experimental.gyp +18 -7
  300. data/lib/libv8/v8/src/extensions/experimental/i18n-extension.cc +12 -16
  301. data/lib/libv8/v8/src/extensions/experimental/i18n-extension.h +1 -1
  302. data/lib/libv8/v8/src/extensions/experimental/i18n-js2c.py +126 -0
  303. data/lib/libv8/v8/src/extensions/experimental/i18n-locale.cc +3 -4
  304. data/lib/libv8/v8/src/extensions/experimental/i18n-locale.h +1 -1
  305. data/lib/libv8/v8/src/{shell.h → extensions/experimental/i18n-natives.h} +8 -20
  306. data/lib/libv8/v8/src/extensions/experimental/i18n-utils.cc +45 -1
  307. data/lib/libv8/v8/src/extensions/experimental/i18n-utils.h +21 -1
  308. data/lib/libv8/v8/src/extensions/experimental/i18n.js +211 -11
  309. data/lib/libv8/v8/src/extensions/experimental/language-matcher.cc +4 -3
  310. data/lib/libv8/v8/src/extensions/experimental/language-matcher.h +1 -1
  311. data/lib/libv8/v8/src/extensions/experimental/number-format.cc +374 -0
  312. data/lib/libv8/v8/src/extensions/experimental/number-format.h +71 -0
  313. data/lib/libv8/v8/src/factory.cc +89 -18
  314. data/lib/libv8/v8/src/factory.h +36 -8
  315. data/lib/libv8/v8/src/flag-definitions.h +11 -44
  316. data/lib/libv8/v8/src/frames-inl.h +8 -1
  317. data/lib/libv8/v8/src/frames.cc +39 -3
  318. data/lib/libv8/v8/src/frames.h +10 -3
  319. data/lib/libv8/v8/src/full-codegen.cc +311 -293
  320. data/lib/libv8/v8/src/full-codegen.h +183 -143
  321. data/lib/libv8/v8/src/func-name-inferrer.cc +29 -15
  322. data/lib/libv8/v8/src/func-name-inferrer.h +19 -9
  323. data/lib/libv8/v8/src/gdb-jit.cc +658 -55
  324. data/lib/libv8/v8/src/gdb-jit.h +6 -2
  325. data/lib/libv8/v8/src/global-handles.cc +368 -312
  326. data/lib/libv8/v8/src/global-handles.h +29 -36
  327. data/lib/libv8/v8/src/globals.h +3 -1
  328. data/lib/libv8/v8/src/handles.cc +43 -69
  329. data/lib/libv8/v8/src/handles.h +21 -16
  330. data/lib/libv8/v8/src/heap-inl.h +11 -13
  331. data/lib/libv8/v8/src/heap-profiler.cc +0 -999
  332. data/lib/libv8/v8/src/heap-profiler.h +0 -303
  333. data/lib/libv8/v8/src/heap.cc +366 -141
  334. data/lib/libv8/v8/src/heap.h +87 -26
  335. data/lib/libv8/v8/src/hydrogen-instructions.cc +192 -81
  336. data/lib/libv8/v8/src/hydrogen-instructions.h +711 -482
  337. data/lib/libv8/v8/src/hydrogen.cc +1146 -629
  338. data/lib/libv8/v8/src/hydrogen.h +100 -64
  339. data/lib/libv8/v8/src/ia32/assembler-ia32.cc +19 -0
  340. data/lib/libv8/v8/src/ia32/assembler-ia32.h +15 -2
  341. data/lib/libv8/v8/src/ia32/builtins-ia32.cc +34 -39
  342. data/lib/libv8/v8/src/ia32/code-stubs-ia32.cc +675 -377
  343. data/lib/libv8/v8/src/ia32/code-stubs-ia32.h +8 -69
  344. data/lib/libv8/v8/src/ia32/codegen-ia32.cc +1 -0
  345. data/lib/libv8/v8/src/ia32/codegen-ia32.h +0 -2
  346. data/lib/libv8/v8/src/ia32/cpu-ia32.cc +3 -2
  347. data/lib/libv8/v8/src/ia32/deoptimizer-ia32.cc +28 -3
  348. data/lib/libv8/v8/src/ia32/disasm-ia32.cc +21 -10
  349. data/lib/libv8/v8/src/ia32/frames-ia32.h +6 -5
  350. data/lib/libv8/v8/src/ia32/full-codegen-ia32.cc +459 -465
  351. data/lib/libv8/v8/src/ia32/ic-ia32.cc +196 -147
  352. data/lib/libv8/v8/src/ia32/lithium-codegen-ia32.cc +575 -650
  353. data/lib/libv8/v8/src/ia32/lithium-codegen-ia32.h +19 -21
  354. data/lib/libv8/v8/src/ia32/lithium-gap-resolver-ia32.cc +7 -2
  355. data/lib/libv8/v8/src/ia32/lithium-ia32.cc +261 -256
  356. data/lib/libv8/v8/src/ia32/lithium-ia32.h +234 -335
  357. data/lib/libv8/v8/src/ia32/macro-assembler-ia32.cc +224 -67
  358. data/lib/libv8/v8/src/ia32/macro-assembler-ia32.h +63 -19
  359. data/lib/libv8/v8/src/ia32/regexp-macro-assembler-ia32.cc +22 -8
  360. data/lib/libv8/v8/src/ia32/regexp-macro-assembler-ia32.h +3 -0
  361. data/lib/libv8/v8/src/ia32/stub-cache-ia32.cc +380 -239
  362. data/lib/libv8/v8/src/ic.cc +198 -234
  363. data/lib/libv8/v8/src/ic.h +32 -30
  364. data/lib/libv8/v8/src/interpreter-irregexp.cc +6 -4
  365. data/lib/libv8/v8/src/isolate.cc +112 -95
  366. data/lib/libv8/v8/src/isolate.h +55 -71
  367. data/lib/libv8/v8/src/json-parser.h +486 -48
  368. data/lib/libv8/v8/src/json.js +28 -23
  369. data/lib/libv8/v8/src/jsregexp.cc +163 -208
  370. data/lib/libv8/v8/src/jsregexp.h +0 -1
  371. data/lib/libv8/v8/src/lithium-allocator-inl.h +29 -27
  372. data/lib/libv8/v8/src/lithium-allocator.cc +22 -17
  373. data/lib/libv8/v8/src/lithium-allocator.h +8 -8
  374. data/lib/libv8/v8/src/lithium.cc +16 -11
  375. data/lib/libv8/v8/src/lithium.h +31 -34
  376. data/lib/libv8/v8/src/liveedit.cc +111 -15
  377. data/lib/libv8/v8/src/liveedit.h +3 -4
  378. data/lib/libv8/v8/src/liveobjectlist.cc +116 -80
  379. data/lib/libv8/v8/src/liveobjectlist.h +2 -2
  380. data/lib/libv8/v8/src/log-inl.h +0 -4
  381. data/lib/libv8/v8/src/log-utils.cc +25 -143
  382. data/lib/libv8/v8/src/log-utils.h +13 -92
  383. data/lib/libv8/v8/src/log.cc +26 -249
  384. data/lib/libv8/v8/src/log.h +6 -17
  385. data/lib/libv8/v8/src/macros.py +9 -6
  386. data/lib/libv8/v8/src/mark-compact.cc +276 -56
  387. data/lib/libv8/v8/src/mark-compact.h +20 -0
  388. data/lib/libv8/v8/src/messages.js +93 -39
  389. data/lib/libv8/v8/src/mips/assembler-mips-inl.h +9 -3
  390. data/lib/libv8/v8/src/mips/assembler-mips.cc +297 -189
  391. data/lib/libv8/v8/src/mips/assembler-mips.h +121 -54
  392. data/lib/libv8/v8/src/mips/builtins-mips.cc +23 -24
  393. data/lib/libv8/v8/src/mips/code-stubs-mips.cc +484 -263
  394. data/lib/libv8/v8/src/mips/code-stubs-mips.h +8 -83
  395. data/lib/libv8/v8/src/mips/codegen-mips.h +0 -2
  396. data/lib/libv8/v8/src/mips/constants-mips.h +37 -11
  397. data/lib/libv8/v8/src/mips/deoptimizer-mips.cc +6 -1
  398. data/lib/libv8/v8/src/mips/frames-mips.h +8 -7
  399. data/lib/libv8/v8/src/mips/full-codegen-mips.cc +258 -419
  400. data/lib/libv8/v8/src/mips/ic-mips.cc +181 -121
  401. data/lib/libv8/v8/src/mips/macro-assembler-mips.cc +640 -382
  402. data/lib/libv8/v8/src/mips/macro-assembler-mips.h +94 -89
  403. data/lib/libv8/v8/src/mips/regexp-macro-assembler-mips.cc +23 -10
  404. data/lib/libv8/v8/src/mips/regexp-macro-assembler-mips.h +6 -1
  405. data/lib/libv8/v8/src/mips/simulator-mips.cc +249 -49
  406. data/lib/libv8/v8/src/mips/simulator-mips.h +25 -1
  407. data/lib/libv8/v8/src/mips/stub-cache-mips.cc +373 -161
  408. data/lib/libv8/v8/src/mirror-debugger.js +55 -8
  409. data/lib/libv8/v8/src/misc-intrinsics.h +89 -0
  410. data/lib/libv8/v8/src/mksnapshot.cc +36 -4
  411. data/lib/libv8/v8/src/natives.h +5 -2
  412. data/lib/libv8/v8/src/objects-debug.cc +73 -6
  413. data/lib/libv8/v8/src/objects-inl.h +529 -164
  414. data/lib/libv8/v8/src/objects-printer.cc +67 -12
  415. data/lib/libv8/v8/src/objects-visiting.cc +13 -2
  416. data/lib/libv8/v8/src/objects-visiting.h +41 -1
  417. data/lib/libv8/v8/src/objects.cc +2200 -1177
  418. data/lib/libv8/v8/src/objects.h +912 -283
  419. data/lib/libv8/v8/src/parser.cc +566 -371
  420. data/lib/libv8/v8/src/parser.h +35 -33
  421. data/lib/libv8/v8/src/platform-cygwin.cc +10 -25
  422. data/lib/libv8/v8/src/platform-freebsd.cc +4 -29
  423. data/lib/libv8/v8/src/platform-linux.cc +60 -57
  424. data/lib/libv8/v8/src/platform-macos.cc +4 -27
  425. data/lib/libv8/v8/src/platform-nullos.cc +3 -16
  426. data/lib/libv8/v8/src/platform-openbsd.cc +247 -85
  427. data/lib/libv8/v8/src/platform-posix.cc +43 -1
  428. data/lib/libv8/v8/src/platform-solaris.cc +151 -112
  429. data/lib/libv8/v8/src/platform-tls.h +1 -1
  430. data/lib/libv8/v8/src/platform-win32.cc +65 -39
  431. data/lib/libv8/v8/src/platform.h +17 -14
  432. data/lib/libv8/v8/src/preparse-data-format.h +2 -2
  433. data/lib/libv8/v8/src/preparse-data.h +8 -2
  434. data/lib/libv8/v8/src/preparser-api.cc +2 -18
  435. data/lib/libv8/v8/src/preparser.cc +106 -65
  436. data/lib/libv8/v8/src/preparser.h +26 -5
  437. data/lib/libv8/v8/src/prettyprinter.cc +25 -43
  438. data/lib/libv8/v8/src/profile-generator-inl.h +0 -4
  439. data/lib/libv8/v8/src/profile-generator.cc +213 -34
  440. data/lib/libv8/v8/src/profile-generator.h +9 -9
  441. data/lib/libv8/v8/src/property.h +1 -0
  442. data/lib/libv8/v8/src/proxy.js +74 -4
  443. data/lib/libv8/v8/src/regexp-macro-assembler.cc +10 -6
  444. data/lib/libv8/v8/src/regexp.js +16 -11
  445. data/lib/libv8/v8/src/rewriter.cc +24 -133
  446. data/lib/libv8/v8/src/runtime-profiler.cc +27 -151
  447. data/lib/libv8/v8/src/runtime-profiler.h +5 -31
  448. data/lib/libv8/v8/src/runtime.cc +1450 -681
  449. data/lib/libv8/v8/src/runtime.h +47 -31
  450. data/lib/libv8/v8/src/runtime.js +2 -1
  451. data/lib/libv8/v8/src/scanner-base.cc +358 -220
  452. data/lib/libv8/v8/src/scanner-base.h +30 -138
  453. data/lib/libv8/v8/src/scanner.cc +0 -18
  454. data/lib/libv8/v8/src/scanner.h +0 -15
  455. data/lib/libv8/v8/src/scopeinfo.cc +3 -1
  456. data/lib/libv8/v8/src/scopeinfo.h +1 -6
  457. data/lib/libv8/v8/src/scopes.cc +243 -253
  458. data/lib/libv8/v8/src/scopes.h +58 -109
  459. data/lib/libv8/v8/src/serialize.cc +12 -54
  460. data/lib/libv8/v8/src/serialize.h +47 -0
  461. data/lib/libv8/v8/src/small-pointer-list.h +25 -0
  462. data/lib/libv8/v8/src/spaces-inl.h +4 -50
  463. data/lib/libv8/v8/src/spaces.cc +64 -131
  464. data/lib/libv8/v8/src/spaces.h +19 -70
  465. data/lib/libv8/v8/src/string-stream.cc +3 -1
  466. data/lib/libv8/v8/src/string.js +10 -6
  467. data/lib/libv8/v8/src/strtod.cc +7 -3
  468. data/lib/libv8/v8/src/stub-cache.cc +59 -129
  469. data/lib/libv8/v8/src/stub-cache.h +42 -54
  470. data/lib/libv8/v8/src/third_party/valgrind/valgrind.h +1447 -1339
  471. data/lib/libv8/v8/src/token.cc +4 -4
  472. data/lib/libv8/v8/src/token.h +6 -5
  473. data/lib/libv8/v8/src/type-info.cc +173 -129
  474. data/lib/libv8/v8/src/type-info.h +40 -22
  475. data/lib/libv8/v8/src/utils.cc +25 -304
  476. data/lib/libv8/v8/src/utils.h +118 -3
  477. data/lib/libv8/v8/src/v8-counters.h +3 -6
  478. data/lib/libv8/v8/src/v8.cc +34 -27
  479. data/lib/libv8/v8/src/v8.h +7 -7
  480. data/lib/libv8/v8/src/v8conversions.cc +129 -0
  481. data/lib/libv8/v8/src/v8conversions.h +60 -0
  482. data/lib/libv8/v8/src/v8globals.h +15 -6
  483. data/lib/libv8/v8/src/v8natives.js +300 -78
  484. data/lib/libv8/v8/src/v8threads.cc +14 -6
  485. data/lib/libv8/v8/src/v8threads.h +4 -1
  486. data/lib/libv8/v8/src/v8utils.cc +360 -0
  487. data/lib/libv8/v8/src/v8utils.h +17 -66
  488. data/lib/libv8/v8/src/variables.cc +7 -12
  489. data/lib/libv8/v8/src/variables.h +12 -10
  490. data/lib/libv8/v8/src/version.cc +2 -2
  491. data/lib/libv8/v8/src/vm-state-inl.h +0 -41
  492. data/lib/libv8/v8/src/vm-state.h +0 -11
  493. data/lib/libv8/v8/src/weakmap.js +103 -0
  494. data/lib/libv8/v8/src/x64/assembler-x64.h +6 -3
  495. data/lib/libv8/v8/src/x64/builtins-x64.cc +25 -22
  496. data/lib/libv8/v8/src/x64/code-stubs-x64.cc +523 -250
  497. data/lib/libv8/v8/src/x64/code-stubs-x64.h +8 -71
  498. data/lib/libv8/v8/src/x64/codegen-x64.cc +1 -0
  499. data/lib/libv8/v8/src/x64/codegen-x64.h +0 -2
  500. data/lib/libv8/v8/src/x64/cpu-x64.cc +2 -1
  501. data/lib/libv8/v8/src/x64/deoptimizer-x64.cc +40 -8
  502. data/lib/libv8/v8/src/x64/disasm-x64.cc +12 -10
  503. data/lib/libv8/v8/src/x64/frames-x64.h +7 -6
  504. data/lib/libv8/v8/src/x64/full-codegen-x64.cc +310 -415
  505. data/lib/libv8/v8/src/x64/ic-x64.cc +180 -117
  506. data/lib/libv8/v8/src/x64/lithium-codegen-x64.cc +411 -523
  507. data/lib/libv8/v8/src/x64/lithium-codegen-x64.h +11 -6
  508. data/lib/libv8/v8/src/x64/lithium-x64.cc +191 -216
  509. data/lib/libv8/v8/src/x64/lithium-x64.h +112 -263
  510. data/lib/libv8/v8/src/x64/macro-assembler-x64.cc +177 -61
  511. data/lib/libv8/v8/src/x64/macro-assembler-x64.h +23 -7
  512. data/lib/libv8/v8/src/x64/regexp-macro-assembler-x64.cc +21 -9
  513. data/lib/libv8/v8/src/x64/regexp-macro-assembler-x64.h +6 -0
  514. data/lib/libv8/v8/src/x64/stub-cache-x64.cc +273 -107
  515. data/lib/libv8/v8/src/zone.cc +31 -22
  516. data/lib/libv8/v8/src/zone.h +12 -6
  517. data/lib/libv8/v8/tools/codemap.js +8 -0
  518. data/lib/libv8/v8/tools/gcmole/Makefile +43 -0
  519. data/lib/libv8/v8/tools/gcmole/gcmole.lua +0 -2
  520. data/lib/libv8/v8/tools/gdb-v8-support.py +154 -0
  521. data/lib/libv8/v8/tools/grokdump.py +44 -35
  522. data/lib/libv8/v8/tools/gyp/v8.gyp +94 -248
  523. data/lib/libv8/v8/tools/js2c.py +83 -52
  524. data/lib/libv8/v8/tools/linux-tick-processor +4 -6
  525. data/lib/libv8/v8/tools/ll_prof.py +3 -3
  526. data/lib/libv8/v8/tools/oom_dump/README +3 -1
  527. data/lib/libv8/v8/tools/presubmit.py +11 -4
  528. data/lib/libv8/v8/tools/profile.js +46 -2
  529. data/lib/libv8/v8/tools/splaytree.js +11 -0
  530. data/lib/libv8/v8/tools/stats-viewer.py +15 -11
  531. data/lib/libv8/v8/tools/test-wrapper-gypbuild.py +227 -0
  532. data/lib/libv8/v8/tools/test.py +28 -8
  533. data/lib/libv8/v8/tools/tickprocessor.js +0 -16
  534. data/lib/libv8/version.rb +1 -1
  535. data/libv8.gemspec +2 -2
  536. metadata +31 -19
  537. data/lib/libv8/scons/engine/SCons/Tool/f03.py +0 -63
  538. data/lib/libv8/v8/src/json-parser.cc +0 -504
@@ -138,7 +138,7 @@ static void AllocateEmptyJSArray(MacroAssembler* masm,
138
138
  __ str(scratch1, FieldMemOperand(result, JSArray::kElementsOffset));
139
139
 
140
140
  // Clear the heap tag on the elements array.
141
- ASSERT(kSmiTag == 0);
141
+ STATIC_ASSERT(kSmiTag == 0);
142
142
  __ sub(scratch1, scratch1, Operand(kHeapObjectTag));
143
143
 
144
144
  // Initialize the FixedArray and fill it with holes. FixedArray length is
@@ -207,7 +207,7 @@ static void AllocateJSArray(MacroAssembler* masm,
207
207
  // Allocate the JSArray object together with space for a FixedArray with the
208
208
  // requested number of elements.
209
209
  __ bind(&not_empty);
210
- ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
210
+ STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
211
211
  __ mov(elements_array_end,
212
212
  Operand((JSArray::kSize + FixedArray::kHeaderSize) / kPointerSize));
213
213
  __ add(elements_array_end,
@@ -243,7 +243,7 @@ static void AllocateJSArray(MacroAssembler* masm,
243
243
  FieldMemOperand(result, JSArray::kElementsOffset));
244
244
 
245
245
  // Clear the heap tag on the elements array.
246
- ASSERT(kSmiTag == 0);
246
+ STATIC_ASSERT(kSmiTag == 0);
247
247
  __ sub(elements_array_storage,
248
248
  elements_array_storage,
249
249
  Operand(kHeapObjectTag));
@@ -255,7 +255,7 @@ static void AllocateJSArray(MacroAssembler* masm,
255
255
  __ LoadRoot(scratch1, Heap::kFixedArrayMapRootIndex);
256
256
  ASSERT_EQ(0 * kPointerSize, FixedArray::kMapOffset);
257
257
  __ str(scratch1, MemOperand(elements_array_storage, kPointerSize, PostIndex));
258
- ASSERT(kSmiTag == 0);
258
+ STATIC_ASSERT(kSmiTag == 0);
259
259
  __ tst(array_size, array_size);
260
260
  // Length of the FixedArray is the number of pre-allocated elements if
261
261
  // the actual JSArray has length 0 and the size of the JSArray for non-empty
@@ -272,7 +272,7 @@ static void AllocateJSArray(MacroAssembler* masm,
272
272
  // result: JSObject
273
273
  // elements_array_storage: elements array element storage
274
274
  // array_size: smi-tagged size of elements array
275
- ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
275
+ STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
276
276
  __ add(elements_array_end,
277
277
  elements_array_storage,
278
278
  Operand(array_size, LSL, kPointerSizeLog2 - kSmiTagSize));
@@ -337,14 +337,14 @@ static void ArrayNativeCode(MacroAssembler* masm,
337
337
  __ bind(&argc_one_or_more);
338
338
  __ cmp(r0, Operand(1));
339
339
  __ b(ne, &argc_two_or_more);
340
- ASSERT(kSmiTag == 0);
340
+ STATIC_ASSERT(kSmiTag == 0);
341
341
  __ ldr(r2, MemOperand(sp)); // Get the argument from the stack.
342
342
  __ and_(r3, r2, Operand(kIntptrSignBit | kSmiTagMask), SetCC);
343
343
  __ b(ne, call_generic_code);
344
344
 
345
345
  // Handle construction of an empty array of a certain size. Bail out if size
346
346
  // is too large to actually allocate an elements array.
347
- ASSERT(kSmiTag == 0);
347
+ STATIC_ASSERT(kSmiTag == 0);
348
348
  __ cmp(r2, Operand(JSObject::kInitialMaxFastElementArray << kSmiTagSize));
349
349
  __ b(ge, call_generic_code);
350
350
 
@@ -571,7 +571,7 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
571
571
  // Is it a String?
572
572
  __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
573
573
  __ ldrb(r3, FieldMemOperand(r2, Map::kInstanceTypeOffset));
574
- ASSERT(kNotStringTag != 0);
574
+ STATIC_ASSERT(kNotStringTag != 0);
575
575
  __ tst(r3, Operand(kIsNotStringMask));
576
576
  __ b(ne, &convert_argument);
577
577
  __ mov(argument, r0);
@@ -619,8 +619,7 @@ void Builtins::Generate_JSConstructCall(MacroAssembler* masm) {
619
619
 
620
620
  Label non_function_call;
621
621
  // Check that the function is not a smi.
622
- __ tst(r1, Operand(kSmiTagMask));
623
- __ b(eq, &non_function_call);
622
+ __ JumpIfSmi(r1, &non_function_call);
624
623
  // Check that the function is a JSFunction.
625
624
  __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
626
625
  __ b(ne, &non_function_call);
@@ -675,8 +674,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
675
674
  // Load the initial map and verify that it is in fact a map.
676
675
  // r1: constructor function
677
676
  __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
678
- __ tst(r2, Operand(kSmiTagMask));
679
- __ b(eq, &rt_call);
677
+ __ JumpIfSmi(r2, &rt_call);
680
678
  __ CompareObjectType(r2, r3, r4, MAP_TYPE);
681
679
  __ b(ne, &rt_call);
682
680
 
@@ -915,10 +913,11 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
915
913
  masm->isolate()->builtins()->HandleApiCallConstruct();
916
914
  ParameterCount expected(0);
917
915
  __ InvokeCode(code, expected, expected,
918
- RelocInfo::CODE_TARGET, CALL_FUNCTION);
916
+ RelocInfo::CODE_TARGET, CALL_FUNCTION, CALL_AS_METHOD);
919
917
  } else {
920
918
  ParameterCount actual(r0);
921
- __ InvokeFunction(r1, actual, CALL_FUNCTION);
919
+ __ InvokeFunction(r1, actual, CALL_FUNCTION,
920
+ NullCallWrapper(), CALL_AS_METHOD);
922
921
  }
923
922
 
924
923
  // Pop the function from the stack.
@@ -945,12 +944,11 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
945
944
  // sp[0]: receiver (newly allocated object)
946
945
  // sp[1]: constructor function
947
946
  // sp[2]: number of arguments (smi-tagged)
948
- __ tst(r0, Operand(kSmiTagMask));
949
- __ b(eq, &use_receiver);
947
+ __ JumpIfSmi(r0, &use_receiver);
950
948
 
951
949
  // If the type of the result (stored in its map) is less than
952
- // FIRST_JS_OBJECT_TYPE, it is not an object in the ECMA sense.
953
- __ CompareObjectType(r0, r3, r3, FIRST_JS_OBJECT_TYPE);
950
+ // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
951
+ __ CompareObjectType(r0, r3, r3, FIRST_SPEC_OBJECT_TYPE);
954
952
  __ b(ge, &exit);
955
953
 
956
954
  // Throw away the result of the constructor invocation and use the
@@ -1046,11 +1044,11 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
1046
1044
  // Invoke the code and pass argc as r0.
1047
1045
  __ mov(r0, Operand(r3));
1048
1046
  if (is_construct) {
1049
- __ Call(masm->isolate()->builtins()->JSConstructCall(),
1050
- RelocInfo::CODE_TARGET);
1047
+ __ Call(masm->isolate()->builtins()->JSConstructCall());
1051
1048
  } else {
1052
1049
  ParameterCount actual(r0);
1053
- __ InvokeFunction(r1, actual, CALL_FUNCTION);
1050
+ __ InvokeFunction(r1, actual, CALL_FUNCTION,
1051
+ NullCallWrapper(), CALL_AS_METHOD);
1054
1052
  }
1055
1053
 
1056
1054
  // Exit the JS frame and remove the parameters (except function), and return.
@@ -1234,8 +1232,7 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
1234
1232
  // r0: actual number of arguments
1235
1233
  Label non_function;
1236
1234
  __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
1237
- __ tst(r1, Operand(kSmiTagMask));
1238
- __ b(eq, &non_function);
1235
+ __ JumpIfSmi(r1, &non_function);
1239
1236
  __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
1240
1237
  __ b(ne, &non_function);
1241
1238
 
@@ -1255,8 +1252,7 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
1255
1252
  __ b(ne, &shift_arguments);
1256
1253
 
1257
1254
  // Do not transform the receiver for native (Compilerhints already in r3).
1258
- __ tst(r3, Operand(1 << (SharedFunctionInfo::kES5Native +
1259
- kSmiTagSize)));
1255
+ __ tst(r3, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
1260
1256
  __ b(ne, &shift_arguments);
1261
1257
 
1262
1258
  // Compute the receiver in non-strict mode.
@@ -1265,8 +1261,7 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
1265
1261
  // r0: actual number of arguments
1266
1262
  // r1: function
1267
1263
  // r2: first argument
1268
- __ tst(r2, Operand(kSmiTagMask));
1269
- __ b(eq, &convert_to_object);
1264
+ __ JumpIfSmi(r2, &convert_to_object);
1270
1265
 
1271
1266
  __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1272
1267
  __ cmp(r2, r3);
@@ -1275,9 +1270,8 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
1275
1270
  __ cmp(r2, r3);
1276
1271
  __ b(eq, &use_global_receiver);
1277
1272
 
1278
- STATIC_ASSERT(LAST_JS_OBJECT_TYPE + 1 == LAST_TYPE);
1279
- STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
1280
- __ CompareObjectType(r2, r3, r3, FIRST_JS_OBJECT_TYPE);
1273
+ STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1274
+ __ CompareObjectType(r2, r3, r3, FIRST_SPEC_OBJECT_TYPE);
1281
1275
  __ b(ge, &shift_arguments);
1282
1276
 
1283
1277
  __ bind(&convert_to_object);
@@ -1379,7 +1373,8 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
1379
1373
  ne);
1380
1374
 
1381
1375
  ParameterCount expected(0);
1382
- __ InvokeCode(r3, expected, expected, JUMP_FUNCTION);
1376
+ __ InvokeCode(r3, expected, expected, JUMP_FUNCTION,
1377
+ NullCallWrapper(), CALL_AS_METHOD);
1383
1378
  }
1384
1379
 
1385
1380
 
@@ -1440,13 +1435,11 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1440
1435
  __ b(ne, &push_receiver);
1441
1436
 
1442
1437
  // Do not transform the receiver for strict mode functions.
1443
- __ tst(r2, Operand(1 << (SharedFunctionInfo::kES5Native +
1444
- kSmiTagSize)));
1438
+ __ tst(r2, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
1445
1439
  __ b(ne, &push_receiver);
1446
1440
 
1447
1441
  // Compute the receiver in non-strict mode.
1448
- __ tst(r0, Operand(kSmiTagMask));
1449
- __ b(eq, &call_to_object);
1442
+ __ JumpIfSmi(r0, &call_to_object);
1450
1443
  __ LoadRoot(r1, Heap::kNullValueRootIndex);
1451
1444
  __ cmp(r0, r1);
1452
1445
  __ b(eq, &use_global_receiver);
@@ -1456,9 +1449,8 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1456
1449
 
1457
1450
  // Check if the receiver is already a JavaScript object.
1458
1451
  // r0: receiver
1459
- STATIC_ASSERT(LAST_JS_OBJECT_TYPE + 1 == LAST_TYPE);
1460
- STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
1461
- __ CompareObjectType(r0, r1, r1, FIRST_JS_OBJECT_TYPE);
1452
+ STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1453
+ __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
1462
1454
  __ b(ge, &push_receiver);
1463
1455
 
1464
1456
  // Convert the receiver to a regular object.
@@ -1515,7 +1507,8 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1515
1507
  ParameterCount actual(r0);
1516
1508
  __ mov(r0, Operand(r0, ASR, kSmiTagSize));
1517
1509
  __ ldr(r1, MemOperand(fp, kFunctionOffset));
1518
- __ InvokeFunction(r1, actual, CALL_FUNCTION);
1510
+ __ InvokeFunction(r1, actual, CALL_FUNCTION,
1511
+ NullCallWrapper(), CALL_AS_METHOD);
1519
1512
 
1520
1513
  // Tear down the internal frame and remove function, receiver and args.
1521
1514
  __ LeaveInternalFrame();
@@ -69,8 +69,7 @@ static void EmitCheckForHeapNumber(MacroAssembler* masm, Register operand,
69
69
  void ToNumberStub::Generate(MacroAssembler* masm) {
70
70
  // The ToNumber stub takes one argument in eax.
71
71
  Label check_heap_number, call_builtin;
72
- __ tst(r0, Operand(kSmiTagMask));
73
- __ b(ne, &check_heap_number);
72
+ __ JumpIfNotSmi(r0, &check_heap_number);
74
73
  __ Ret();
75
74
 
76
75
  __ bind(&check_heap_number);
@@ -158,7 +157,7 @@ void FastNewContextStub::Generate(MacroAssembler* masm) {
158
157
  __ ldr(r3, MemOperand(sp, 0));
159
158
 
160
159
  // Setup the object header.
161
- __ LoadRoot(r2, Heap::kContextMapRootIndex);
160
+ __ LoadRoot(r2, Heap::kFunctionContextMapRootIndex);
162
161
  __ str(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
163
162
  __ mov(r2, Operand(Smi::FromInt(length)));
164
163
  __ str(r2, FieldMemOperand(r0, FixedArray::kLengthOffset));
@@ -166,11 +165,10 @@ void FastNewContextStub::Generate(MacroAssembler* masm) {
166
165
  // Setup the fixed slots.
167
166
  __ mov(r1, Operand(Smi::FromInt(0)));
168
167
  __ str(r3, MemOperand(r0, Context::SlotOffset(Context::CLOSURE_INDEX)));
169
- __ str(r0, MemOperand(r0, Context::SlotOffset(Context::FCONTEXT_INDEX)));
170
- __ str(r1, MemOperand(r0, Context::SlotOffset(Context::PREVIOUS_INDEX)));
168
+ __ str(cp, MemOperand(r0, Context::SlotOffset(Context::PREVIOUS_INDEX)));
171
169
  __ str(r1, MemOperand(r0, Context::SlotOffset(Context::EXTENSION_INDEX)));
172
170
 
173
- // Copy the global object from the surrounding context.
171
+ // Copy the global object from the previous context.
174
172
  __ ldr(r1, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
175
173
  __ str(r1, MemOperand(r0, Context::SlotOffset(Context::GLOBAL_INDEX)));
176
174
 
@@ -187,7 +185,7 @@ void FastNewContextStub::Generate(MacroAssembler* masm) {
187
185
 
188
186
  // Need to collect. Call into runtime system.
189
187
  __ bind(&gc);
190
- __ TailCallRuntime(Runtime::kNewContext, 1, 1);
188
+ __ TailCallRuntime(Runtime::kNewFunctionContext, 1, 1);
191
189
  }
192
190
 
193
191
 
@@ -306,12 +304,6 @@ class ConvertToDoubleStub : public CodeStub {
306
304
  }
307
305
 
308
306
  void Generate(MacroAssembler* masm);
309
-
310
- const char* GetName() { return "ConvertToDoubleStub"; }
311
-
312
- #ifdef DEBUG
313
- void Print() { PrintF("ConvertToDoubleStub\n"); }
314
- #endif
315
307
  };
316
308
 
317
309
 
@@ -394,11 +386,11 @@ void FloatingPointHelper::LoadSmis(MacroAssembler* masm,
394
386
  __ mov(scratch1, Operand(r0));
395
387
  ConvertToDoubleStub stub1(r3, r2, scratch1, scratch2);
396
388
  __ push(lr);
397
- __ Call(stub1.GetCode(), RelocInfo::CODE_TARGET);
389
+ __ Call(stub1.GetCode());
398
390
  // Write Smi from r1 to r1 and r0 in double format.
399
391
  __ mov(scratch1, Operand(r1));
400
392
  ConvertToDoubleStub stub2(r1, r0, scratch1, scratch2);
401
- __ Call(stub2.GetCode(), RelocInfo::CODE_TARGET);
393
+ __ Call(stub2.GetCode());
402
394
  __ pop(lr);
403
395
  }
404
396
  }
@@ -475,7 +467,7 @@ void FloatingPointHelper::LoadNumber(MacroAssembler* masm,
475
467
  __ mov(scratch1, Operand(object));
476
468
  ConvertToDoubleStub stub(dst2, dst1, scratch1, scratch2);
477
469
  __ push(lr);
478
- __ Call(stub.GetCode(), RelocInfo::CODE_TARGET);
470
+ __ Call(stub.GetCode());
479
471
  __ pop(lr);
480
472
  }
481
473
 
@@ -557,7 +549,7 @@ void FloatingPointHelper::ConvertIntToDouble(MacroAssembler* masm,
557
549
  // | s | exp | mantissa |
558
550
 
559
551
  // Check for zero.
560
- __ cmp(int_scratch, Operand(0));
552
+ __ cmp(int_scratch, Operand::Zero());
561
553
  __ mov(dst2, int_scratch);
562
554
  __ mov(dst1, int_scratch);
563
555
  __ b(eq, &done);
@@ -565,7 +557,7 @@ void FloatingPointHelper::ConvertIntToDouble(MacroAssembler* masm,
565
557
  // Preload the sign of the value.
566
558
  __ and_(dst2, int_scratch, Operand(HeapNumber::kSignMask), SetCC);
567
559
  // Get the absolute value of the object (as an unsigned integer).
568
- __ rsb(int_scratch, int_scratch, Operand(0), SetCC, mi);
560
+ __ rsb(int_scratch, int_scratch, Operand::Zero(), SetCC, mi);
569
561
 
570
562
  // Get mantisssa[51:20].
571
563
 
@@ -597,7 +589,7 @@ void FloatingPointHelper::ConvertIntToDouble(MacroAssembler* masm,
597
589
  __ mov(scratch2, Operand(int_scratch, LSL, scratch2));
598
590
  __ orr(dst2, dst2, scratch2);
599
591
  // Set dst1 to 0.
600
- __ mov(dst1, Operand(0));
592
+ __ mov(dst1, Operand::Zero());
601
593
  }
602
594
  __ bind(&done);
603
595
  }
@@ -665,7 +657,7 @@ void FloatingPointHelper::LoadNumberAsInt32Double(MacroAssembler* masm,
665
657
  // Check for 0 and -0.
666
658
  __ bic(scratch1, dst1, Operand(HeapNumber::kSignMask));
667
659
  __ orr(scratch1, scratch1, Operand(dst2));
668
- __ cmp(scratch1, Operand(0));
660
+ __ cmp(scratch1, Operand::Zero());
669
661
  __ b(eq, &done);
670
662
 
671
663
  // Check that the value can be exactly represented by a 32-bit integer.
@@ -738,7 +730,7 @@ void FloatingPointHelper::LoadNumberAsInt32(MacroAssembler* masm,
738
730
  // Check for 0 and -0.
739
731
  __ bic(dst, scratch1, Operand(HeapNumber::kSignMask));
740
732
  __ orr(dst, scratch2, Operand(dst));
741
- __ cmp(dst, Operand(0));
733
+ __ cmp(dst, Operand::Zero());
742
734
  __ b(eq, &done);
743
735
 
744
736
  DoubleIs32BitInteger(masm, scratch1, scratch2, dst, scratch3, not_int32);
@@ -755,7 +747,7 @@ void FloatingPointHelper::LoadNumberAsInt32(MacroAssembler* masm,
755
747
  // Set the sign.
756
748
  __ ldr(scratch1, FieldMemOperand(object, HeapNumber::kExponentOffset));
757
749
  __ tst(scratch1, Operand(HeapNumber::kSignMask));
758
- __ rsb(dst, dst, Operand(0), LeaveCC, mi);
750
+ __ rsb(dst, dst, Operand::Zero(), LeaveCC, mi);
759
751
  }
760
752
 
761
753
  __ bind(&done);
@@ -931,14 +923,14 @@ static void EmitIdenticalObjectComparison(MacroAssembler* masm,
931
923
  // They are both equal and they are not both Smis so both of them are not
932
924
  // Smis. If it's not a heap number, then return equal.
933
925
  if (cond == lt || cond == gt) {
934
- __ CompareObjectType(r0, r4, r4, FIRST_JS_OBJECT_TYPE);
926
+ __ CompareObjectType(r0, r4, r4, FIRST_SPEC_OBJECT_TYPE);
935
927
  __ b(ge, slow);
936
928
  } else {
937
929
  __ CompareObjectType(r0, r4, r4, HEAP_NUMBER_TYPE);
938
930
  __ b(eq, &heap_number);
939
931
  // Comparing JS objects with <=, >= is complicated.
940
932
  if (cond != eq) {
941
- __ cmp(r4, Operand(FIRST_JS_OBJECT_TYPE));
933
+ __ cmp(r4, Operand(FIRST_SPEC_OBJECT_TYPE));
942
934
  __ b(ge, slow);
943
935
  // Normally here we fall through to return_equal, but undefined is
944
936
  // special: (undefined == undefined) == true, but
@@ -1029,8 +1021,7 @@ static void EmitSmiNonsmiComparison(MacroAssembler* masm,
1029
1021
  (lhs.is(r1) && rhs.is(r0)));
1030
1022
 
1031
1023
  Label rhs_is_smi;
1032
- __ tst(rhs, Operand(kSmiTagMask));
1033
- __ b(eq, &rhs_is_smi);
1024
+ __ JumpIfSmi(rhs, &rhs_is_smi);
1034
1025
 
1035
1026
  // Lhs is a Smi. Check whether the rhs is a heap number.
1036
1027
  __ CompareObjectType(rhs, r4, r4, HEAP_NUMBER_TYPE);
@@ -1061,7 +1052,7 @@ static void EmitSmiNonsmiComparison(MacroAssembler* masm,
1061
1052
  // Convert lhs to a double in r2, r3.
1062
1053
  __ mov(r7, Operand(lhs));
1063
1054
  ConvertToDoubleStub stub1(r3, r2, r7, r6);
1064
- __ Call(stub1.GetCode(), RelocInfo::CODE_TARGET);
1055
+ __ Call(stub1.GetCode());
1065
1056
  // Load rhs to a double in r0, r1.
1066
1057
  __ Ldrd(r0, r1, FieldMemOperand(rhs, HeapNumber::kValueOffset));
1067
1058
  __ pop(lr);
@@ -1103,7 +1094,7 @@ static void EmitSmiNonsmiComparison(MacroAssembler* masm,
1103
1094
  // Convert rhs to a double in r0, r1.
1104
1095
  __ mov(r7, Operand(rhs));
1105
1096
  ConvertToDoubleStub stub2(r1, r0, r7, r6);
1106
- __ Call(stub2.GetCode(), RelocInfo::CODE_TARGET);
1097
+ __ Call(stub2.GetCode());
1107
1098
  __ pop(lr);
1108
1099
  }
1109
1100
  // Fall through to both_loaded_as_doubles.
@@ -1220,14 +1211,14 @@ static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
1220
1211
  ASSERT((lhs.is(r0) && rhs.is(r1)) ||
1221
1212
  (lhs.is(r1) && rhs.is(r0)));
1222
1213
 
1223
- // If either operand is a JSObject or an oddball value, then they are
1214
+ // If either operand is a JS object or an oddball value, then they are
1224
1215
  // not equal since their pointers are different.
1225
1216
  // There is no test for undetectability in strict equality.
1226
- STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
1217
+ STATIC_ASSERT(LAST_TYPE == LAST_CALLABLE_SPEC_OBJECT_TYPE);
1227
1218
  Label first_non_object;
1228
1219
  // Get the type of the first operand into r2 and compare it with
1229
- // FIRST_JS_OBJECT_TYPE.
1230
- __ CompareObjectType(rhs, r2, r2, FIRST_JS_OBJECT_TYPE);
1220
+ // FIRST_SPEC_OBJECT_TYPE.
1221
+ __ CompareObjectType(rhs, r2, r2, FIRST_SPEC_OBJECT_TYPE);
1231
1222
  __ b(lt, &first_non_object);
1232
1223
 
1233
1224
  // Return non-zero (r0 is not zero)
@@ -1240,7 +1231,7 @@ static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
1240
1231
  __ cmp(r2, Operand(ODDBALL_TYPE));
1241
1232
  __ b(eq, &return_not_equal);
1242
1233
 
1243
- __ CompareObjectType(lhs, r3, r3, FIRST_JS_OBJECT_TYPE);
1234
+ __ CompareObjectType(lhs, r3, r3, FIRST_SPEC_OBJECT_TYPE);
1244
1235
  __ b(ge, &return_not_equal);
1245
1236
 
1246
1237
  // Check for oddballs: true, false, null, undefined.
@@ -1317,9 +1308,9 @@ static void EmitCheckForSymbolsOrObjects(MacroAssembler* masm,
1317
1308
  __ Ret();
1318
1309
 
1319
1310
  __ bind(&object_test);
1320
- __ cmp(r2, Operand(FIRST_JS_OBJECT_TYPE));
1311
+ __ cmp(r2, Operand(FIRST_SPEC_OBJECT_TYPE));
1321
1312
  __ b(lt, not_both_strings);
1322
- __ CompareObjectType(lhs, r2, r3, FIRST_JS_OBJECT_TYPE);
1313
+ __ CompareObjectType(lhs, r2, r3, FIRST_SPEC_OBJECT_TYPE);
1323
1314
  __ b(lt, not_both_strings);
1324
1315
  // If both objects are undetectable, they are equal. Otherwise, they
1325
1316
  // are not equal, since they are different objects and an object is not
@@ -1458,8 +1449,7 @@ void CompareStub::Generate(MacroAssembler* masm) {
1458
1449
  if (include_smi_compare_) {
1459
1450
  Label not_two_smis, smi_done;
1460
1451
  __ orr(r2, r1, r0);
1461
- __ tst(r2, Operand(kSmiTagMask));
1462
- __ b(ne, &not_two_smis);
1452
+ __ JumpIfNotSmi(r2, &not_two_smis);
1463
1453
  __ mov(r1, Operand(r1, ASR, 1));
1464
1454
  __ sub(r0, r1, Operand(r0, ASR, 1));
1465
1455
  __ Ret();
@@ -1482,8 +1472,7 @@ void CompareStub::Generate(MacroAssembler* masm) {
1482
1472
  STATIC_ASSERT(kSmiTag == 0);
1483
1473
  ASSERT_EQ(0, Smi::FromInt(0));
1484
1474
  __ and_(r2, lhs_, Operand(rhs_));
1485
- __ tst(r2, Operand(kSmiTagMask));
1486
- __ b(ne, &not_smis);
1475
+ __ JumpIfNotSmi(r2, &not_smis);
1487
1476
  // One operand is a smi. EmitSmiNonsmiComparison generates code that can:
1488
1477
  // 1) Return the answer.
1489
1478
  // 2) Go to slow.
@@ -1614,124 +1603,127 @@ void CompareStub::Generate(MacroAssembler* masm) {
1614
1603
  }
1615
1604
 
1616
1605
 
1617
- // This stub does not handle the inlined cases (Smis, Booleans, undefined).
1618
- // The stub returns zero for false, and a non-zero value for true.
1606
+ // The stub expects its argument in the tos_ register and returns its result in
1607
+ // it, too: zero for false, and a non-zero value for true.
1619
1608
  void ToBooleanStub::Generate(MacroAssembler* masm) {
1620
1609
  // This stub uses VFP3 instructions.
1621
1610
  CpuFeatures::Scope scope(VFP3);
1622
1611
 
1623
- Label false_result;
1624
- Label not_heap_number;
1625
- Register scratch = r9.is(tos_) ? r7 : r9;
1612
+ Label patch;
1613
+ const Register map = r9.is(tos_) ? r7 : r9;
1626
1614
 
1627
- // undefined -> false
1628
- __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1629
- __ cmp(tos_, ip);
1630
- __ b(eq, &false_result);
1631
-
1632
- // Boolean -> its value
1633
- __ LoadRoot(ip, Heap::kFalseValueRootIndex);
1634
- __ cmp(tos_, ip);
1635
- __ b(eq, &false_result);
1636
- __ LoadRoot(ip, Heap::kTrueValueRootIndex);
1637
- __ cmp(tos_, ip);
1638
- // "tos_" is a register and contains a non-zero value. Hence we implicitly
1639
- // return true if the equal condition is satisfied.
1640
- __ Ret(eq);
1615
+ // undefined -> false.
1616
+ CheckOddball(masm, UNDEFINED, Heap::kUndefinedValueRootIndex, false);
1641
1617
 
1642
- // Smis: 0 -> false, all other -> true
1643
- __ tst(tos_, tos_);
1644
- __ b(eq, &false_result);
1645
- __ tst(tos_, Operand(kSmiTagMask));
1646
- // "tos_" is a register and contains a non-zero value. Hence we implicitly
1647
- // return true if the not equal condition is satisfied.
1648
- __ Ret(eq);
1618
+ // Boolean -> its value.
1619
+ CheckOddball(masm, BOOLEAN, Heap::kFalseValueRootIndex, false);
1620
+ CheckOddball(masm, BOOLEAN, Heap::kTrueValueRootIndex, true);
1649
1621
 
1650
- // 'null' -> false
1651
- __ LoadRoot(ip, Heap::kNullValueRootIndex);
1652
- __ cmp(tos_, ip);
1653
- __ b(eq, &false_result);
1654
-
1655
- // HeapNumber => false iff +0, -0, or NaN.
1656
- __ ldr(scratch, FieldMemOperand(tos_, HeapObject::kMapOffset));
1657
- __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
1658
- __ cmp(scratch, ip);
1659
- __ b(&not_heap_number, ne);
1660
- __ vldr(d1, FieldMemOperand(tos_, HeapNumber::kValueOffset));
1661
- __ VFPCompareAndSetFlags(d1, 0.0);
1662
- // "tos_" is a register, and contains a non zero value by default.
1663
- // Hence we only need to overwrite "tos_" with zero to return false for
1664
- // FP_ZERO or FP_NAN cases. Otherwise, by default it returns true.
1665
- __ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, eq); // for FP_ZERO
1666
- __ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, vs); // for FP_NAN
1667
- __ Ret();
1622
+ // 'null' -> false.
1623
+ CheckOddball(masm, NULL_TYPE, Heap::kNullValueRootIndex, false);
1668
1624
 
1669
- __ bind(&not_heap_number);
1670
-
1671
- // It can be an undetectable object.
1672
- // Undetectable => false.
1673
- __ ldr(ip, FieldMemOperand(tos_, HeapObject::kMapOffset));
1674
- __ ldrb(scratch, FieldMemOperand(ip, Map::kBitFieldOffset));
1675
- __ and_(scratch, scratch, Operand(1 << Map::kIsUndetectable));
1676
- __ cmp(scratch, Operand(1 << Map::kIsUndetectable));
1677
- __ b(&false_result, eq);
1678
-
1679
- // JavaScript object => true.
1680
- __ ldr(scratch, FieldMemOperand(tos_, HeapObject::kMapOffset));
1681
- __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
1682
- __ cmp(scratch, Operand(FIRST_JS_OBJECT_TYPE));
1683
- // "tos_" is a register and contains a non-zero value.
1684
- // Hence we implicitly return true if the greater than
1685
- // condition is satisfied.
1686
- __ Ret(gt);
1687
-
1688
- // Check for string
1689
- __ ldr(scratch, FieldMemOperand(tos_, HeapObject::kMapOffset));
1690
- __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
1691
- __ cmp(scratch, Operand(FIRST_NONSTRING_TYPE));
1692
- // "tos_" is a register and contains a non-zero value.
1693
- // Hence we implicitly return true if the greater than
1694
- // condition is satisfied.
1695
- __ Ret(gt);
1696
-
1697
- // String value => false iff empty, i.e., length is zero
1698
- __ ldr(tos_, FieldMemOperand(tos_, String::kLengthOffset));
1699
- // If length is zero, "tos_" contains zero ==> false.
1700
- // If length is not zero, "tos_" contains a non-zero value ==> true.
1701
- __ Ret();
1625
+ if (types_.Contains(SMI)) {
1626
+ // Smis: 0 -> false, all other -> true
1627
+ __ tst(tos_, Operand(kSmiTagMask));
1628
+ // tos_ contains the correct return value already
1629
+ __ Ret(eq);
1630
+ } else if (types_.NeedsMap()) {
1631
+ // If we need a map later and have a Smi -> patch.
1632
+ __ JumpIfSmi(tos_, &patch);
1633
+ }
1702
1634
 
1703
- // Return 0 in "tos_" for false .
1704
- __ bind(&false_result);
1705
- __ mov(tos_, Operand(0, RelocInfo::NONE));
1706
- __ Ret();
1635
+ if (types_.NeedsMap()) {
1636
+ __ ldr(map, FieldMemOperand(tos_, HeapObject::kMapOffset));
1637
+
1638
+ if (types_.CanBeUndetectable()) {
1639
+ __ ldrb(ip, FieldMemOperand(map, Map::kBitFieldOffset));
1640
+ __ tst(ip, Operand(1 << Map::kIsUndetectable));
1641
+ // Undetectable -> false.
1642
+ __ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, ne);
1643
+ __ Ret(ne);
1644
+ }
1645
+ }
1646
+
1647
+ if (types_.Contains(SPEC_OBJECT)) {
1648
+ // Spec object -> true.
1649
+ __ CompareInstanceType(map, ip, FIRST_SPEC_OBJECT_TYPE);
1650
+ // tos_ contains the correct non-zero return value already.
1651
+ __ Ret(ge);
1652
+ }
1653
+
1654
+ if (types_.Contains(STRING)) {
1655
+ // String value -> false iff empty.
1656
+ __ CompareInstanceType(map, ip, FIRST_NONSTRING_TYPE);
1657
+ __ ldr(tos_, FieldMemOperand(tos_, String::kLengthOffset), lt);
1658
+ __ Ret(lt); // the string length is OK as the return value
1659
+ }
1660
+
1661
+ if (types_.Contains(HEAP_NUMBER)) {
1662
+ // Heap number -> false iff +0, -0, or NaN.
1663
+ Label not_heap_number;
1664
+ __ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
1665
+ __ b(ne, &not_heap_number);
1666
+ __ vldr(d1, FieldMemOperand(tos_, HeapNumber::kValueOffset));
1667
+ __ VFPCompareAndSetFlags(d1, 0.0);
1668
+ // "tos_" is a register, and contains a non zero value by default.
1669
+ // Hence we only need to overwrite "tos_" with zero to return false for
1670
+ // FP_ZERO or FP_NAN cases. Otherwise, by default it returns true.
1671
+ __ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, eq); // for FP_ZERO
1672
+ __ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, vs); // for FP_NAN
1673
+ __ Ret();
1674
+ __ bind(&not_heap_number);
1675
+ }
1676
+
1677
+ __ bind(&patch);
1678
+ GenerateTypeTransition(masm);
1707
1679
  }
1708
1680
 
1709
1681
 
1710
- Handle<Code> GetUnaryOpStub(int key, UnaryOpIC::TypeInfo type_info) {
1711
- UnaryOpStub stub(key, type_info);
1712
- return stub.GetCode();
1682
+ void ToBooleanStub::CheckOddball(MacroAssembler* masm,
1683
+ Type type,
1684
+ Heap::RootListIndex value,
1685
+ bool result) {
1686
+ if (types_.Contains(type)) {
1687
+ // If we see an expected oddball, return its ToBoolean value tos_.
1688
+ __ LoadRoot(ip, value);
1689
+ __ cmp(tos_, ip);
1690
+ // The value of a root is never NULL, so we can avoid loading a non-null
1691
+ // value into tos_ when we want to return 'true'.
1692
+ if (!result) {
1693
+ __ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, eq);
1694
+ }
1695
+ __ Ret(eq);
1696
+ }
1713
1697
  }
1714
1698
 
1715
1699
 
1716
- const char* UnaryOpStub::GetName() {
1717
- if (name_ != NULL) return name_;
1718
- const int kMaxNameLength = 100;
1719
- name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray(
1720
- kMaxNameLength);
1721
- if (name_ == NULL) return "OOM";
1700
+ void ToBooleanStub::GenerateTypeTransition(MacroAssembler* masm) {
1701
+ if (!tos_.is(r3)) {
1702
+ __ mov(r3, Operand(tos_));
1703
+ }
1704
+ __ mov(r2, Operand(Smi::FromInt(tos_.code())));
1705
+ __ mov(r1, Operand(Smi::FromInt(types_.ToByte())));
1706
+ __ Push(r3, r2, r1);
1707
+ // Patch the caller to an appropriate specialized stub and return the
1708
+ // operation result to the caller of the stub.
1709
+ __ TailCallExternalReference(
1710
+ ExternalReference(IC_Utility(IC::kToBoolean_Patch), masm->isolate()),
1711
+ 3,
1712
+ 1);
1713
+ }
1714
+
1715
+
1716
+ void UnaryOpStub::PrintName(StringStream* stream) {
1722
1717
  const char* op_name = Token::Name(op_);
1723
1718
  const char* overwrite_name = NULL; // Make g++ happy.
1724
1719
  switch (mode_) {
1725
1720
  case UNARY_NO_OVERWRITE: overwrite_name = "Alloc"; break;
1726
1721
  case UNARY_OVERWRITE: overwrite_name = "Overwrite"; break;
1727
1722
  }
1728
-
1729
- OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
1730
- "UnaryOpStub_%s_%s_%s",
1731
- op_name,
1732
- overwrite_name,
1733
- UnaryOpIC::GetName(operand_type_));
1734
- return name_;
1723
+ stream->Add("UnaryOpStub_%s_%s_%s",
1724
+ op_name,
1725
+ overwrite_name,
1726
+ UnaryOpIC::GetName(operand_type_));
1735
1727
  }
1736
1728
 
1737
1729
 
@@ -1755,22 +1747,14 @@ void UnaryOpStub::Generate(MacroAssembler* masm) {
1755
1747
 
1756
1748
 
1757
1749
  void UnaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
1758
- // Prepare to push argument.
1759
- __ mov(r3, Operand(r0));
1760
-
1761
- // Push this stub's key. Although the operation and the type info are
1762
- // encoded into the key, the encoding is opaque, so push them too.
1763
- __ mov(r2, Operand(Smi::FromInt(MinorKey())));
1764
- __ mov(r1, Operand(Smi::FromInt(op_)));
1750
+ __ mov(r3, Operand(r0)); // the operand
1751
+ __ mov(r2, Operand(Smi::FromInt(op_)));
1752
+ __ mov(r1, Operand(Smi::FromInt(mode_)));
1765
1753
  __ mov(r0, Operand(Smi::FromInt(operand_type_)));
1766
-
1767
1754
  __ Push(r3, r2, r1, r0);
1768
1755
 
1769
1756
  __ TailCallExternalReference(
1770
- ExternalReference(IC_Utility(IC::kUnaryOp_Patch),
1771
- masm->isolate()),
1772
- 4,
1773
- 1);
1757
+ ExternalReference(IC_Utility(IC::kUnaryOp_Patch), masm->isolate()), 4, 1);
1774
1758
  }
1775
1759
 
1776
1760
 
@@ -1903,6 +1887,8 @@ void UnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm,
1903
1887
 
1904
1888
  void UnaryOpStub::GenerateHeapNumberCodeBitNot(
1905
1889
  MacroAssembler* masm, Label* slow) {
1890
+ Label impossible;
1891
+
1906
1892
  EmitCheckForHeapNumber(masm, r0, r1, r6, slow);
1907
1893
  // Convert the heap number is r0 to an untagged integer in r1.
1908
1894
  __ ConvertToInt32(r0, r1, r2, r3, d0, slow);
@@ -1921,17 +1907,27 @@ void UnaryOpStub::GenerateHeapNumberCodeBitNot(
1921
1907
  __ bind(&try_float);
1922
1908
  if (mode_ == UNARY_NO_OVERWRITE) {
1923
1909
  Label slow_allocate_heapnumber, heapnumber_allocated;
1924
- __ AllocateHeapNumber(r0, r2, r3, r6, &slow_allocate_heapnumber);
1910
+ // Allocate a new heap number without zapping r0, which we need if it fails.
1911
+ __ AllocateHeapNumber(r2, r3, r4, r6, &slow_allocate_heapnumber);
1925
1912
  __ jmp(&heapnumber_allocated);
1926
1913
 
1927
1914
  __ bind(&slow_allocate_heapnumber);
1928
1915
  __ EnterInternalFrame();
1929
- __ push(r1);
1930
- __ CallRuntime(Runtime::kNumberAlloc, 0);
1931
- __ pop(r1);
1916
+ __ push(r0); // Push the heap number, not the untagged int32.
1917
+ __ CallRuntime(Runtime::kNumberAlloc, 0);
1918
+ __ mov(r2, r0); // Move the new heap number into r2.
1919
+ // Get the heap number into r0, now that the new heap number is in r2.
1920
+ __ pop(r0);
1932
1921
  __ LeaveInternalFrame();
1933
1922
 
1923
+ // Convert the heap number in r0 to an untagged integer in r1.
1924
+ // This can't go slow-case because it's the same number we already
1925
+ // converted once again.
1926
+ __ ConvertToInt32(r0, r1, r3, r4, d0, &impossible);
1927
+ __ mvn(r1, Operand(r1));
1928
+
1934
1929
  __ bind(&heapnumber_allocated);
1930
+ __ mov(r0, r2); // Move newly allocated heap number to r0.
1935
1931
  }
1936
1932
 
1937
1933
  if (CpuFeatures::IsSupported(VFP3)) {
@@ -1948,6 +1944,11 @@ void UnaryOpStub::GenerateHeapNumberCodeBitNot(
1948
1944
  WriteInt32ToHeapNumberStub stub(r1, r0, r2);
1949
1945
  __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
1950
1946
  }
1947
+
1948
+ __ bind(&impossible);
1949
+ if (FLAG_debug_code) {
1950
+ __ stop("Incorrect assumption in bit-not stub");
1951
+ }
1951
1952
  }
1952
1953
 
1953
1954
 
@@ -2002,14 +2003,6 @@ void UnaryOpStub::GenerateGenericCodeFallback(MacroAssembler* masm) {
2002
2003
  }
2003
2004
 
2004
2005
 
2005
- Handle<Code> GetBinaryOpStub(int key,
2006
- BinaryOpIC::TypeInfo type_info,
2007
- BinaryOpIC::TypeInfo result_type_info) {
2008
- BinaryOpStub stub(key, type_info, result_type_info);
2009
- return stub.GetCode();
2010
- }
2011
-
2012
-
2013
2006
  void BinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
2014
2007
  Label get_result;
2015
2008
 
@@ -2066,12 +2059,7 @@ void BinaryOpStub::Generate(MacroAssembler* masm) {
2066
2059
  }
2067
2060
 
2068
2061
 
2069
- const char* BinaryOpStub::GetName() {
2070
- if (name_ != NULL) return name_;
2071
- const int kMaxNameLength = 100;
2072
- name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray(
2073
- kMaxNameLength);
2074
- if (name_ == NULL) return "OOM";
2062
+ void BinaryOpStub::PrintName(StringStream* stream) {
2075
2063
  const char* op_name = Token::Name(op_);
2076
2064
  const char* overwrite_name;
2077
2065
  switch (mode_) {
@@ -2080,13 +2068,10 @@ const char* BinaryOpStub::GetName() {
2080
2068
  case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break;
2081
2069
  default: overwrite_name = "UnknownOverwrite"; break;
2082
2070
  }
2083
-
2084
- OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
2085
- "BinaryOpStub_%s_%s_%s",
2086
- op_name,
2087
- overwrite_name,
2088
- BinaryOpIC::GetName(operands_type_));
2089
- return name_;
2071
+ stream->Add("BinaryOpStub_%s_%s_%s",
2072
+ op_name,
2073
+ overwrite_name,
2074
+ BinaryOpIC::GetName(operands_type_));
2090
2075
  }
2091
2076
 
2092
2077
 
@@ -2439,13 +2424,11 @@ void BinaryOpStub::GenerateSmiCode(
2439
2424
  Register left = r1;
2440
2425
  Register right = r0;
2441
2426
  Register scratch1 = r7;
2442
- Register scratch2 = r9;
2443
2427
 
2444
2428
  // Perform combined smi check on both operands.
2445
2429
  __ orr(scratch1, left, Operand(right));
2446
2430
  STATIC_ASSERT(kSmiTag == 0);
2447
- __ tst(scratch1, Operand(kSmiTagMask));
2448
- __ b(ne, &not_smis);
2431
+ __ JumpIfNotSmi(scratch1, &not_smis);
2449
2432
 
2450
2433
  // If the smi-smi operation results in a smi return is generated.
2451
2434
  GenerateSmiSmiOperation(masm);
@@ -2558,37 +2541,36 @@ void BinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
2558
2541
  case Token::MUL:
2559
2542
  case Token::DIV:
2560
2543
  case Token::MOD: {
2561
- // Load both operands and check that they are 32-bit integer.
2562
- // Jump to type transition if they are not. The registers r0 and r1 (right
2563
- // and left) are preserved for the runtime call.
2564
- FloatingPointHelper::Destination destination =
2565
- CpuFeatures::IsSupported(VFP3) &&
2566
- op_ != Token::MOD ?
2567
- FloatingPointHelper::kVFPRegisters :
2568
- FloatingPointHelper::kCoreRegisters;
2569
-
2570
- FloatingPointHelper::LoadNumberAsInt32Double(masm,
2571
- right,
2572
- destination,
2573
- d7,
2574
- r2,
2575
- r3,
2576
- heap_number_map,
2577
- scratch1,
2578
- scratch2,
2579
- s0,
2580
- &transition);
2581
- FloatingPointHelper::LoadNumberAsInt32Double(masm,
2582
- left,
2583
- destination,
2584
- d6,
2585
- r4,
2586
- r5,
2587
- heap_number_map,
2588
- scratch1,
2589
- scratch2,
2590
- s0,
2591
- &transition);
2544
+ // Load both operands and check that they are 32-bit integer.
2545
+ // Jump to type transition if they are not. The registers r0 and r1 (right
2546
+ // and left) are preserved for the runtime call.
2547
+ FloatingPointHelper::Destination destination =
2548
+ (CpuFeatures::IsSupported(VFP3) && op_ != Token::MOD)
2549
+ ? FloatingPointHelper::kVFPRegisters
2550
+ : FloatingPointHelper::kCoreRegisters;
2551
+
2552
+ FloatingPointHelper::LoadNumberAsInt32Double(masm,
2553
+ right,
2554
+ destination,
2555
+ d7,
2556
+ r2,
2557
+ r3,
2558
+ heap_number_map,
2559
+ scratch1,
2560
+ scratch2,
2561
+ s0,
2562
+ &transition);
2563
+ FloatingPointHelper::LoadNumberAsInt32Double(masm,
2564
+ left,
2565
+ destination,
2566
+ d6,
2567
+ r4,
2568
+ r5,
2569
+ heap_number_map,
2570
+ scratch1,
2571
+ scratch2,
2572
+ s0,
2573
+ &transition);
2592
2574
 
2593
2575
  if (destination == FloatingPointHelper::kVFPRegisters) {
2594
2576
  CpuFeatures::Scope scope(VFP3);
@@ -2635,7 +2617,7 @@ void BinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
2635
2617
  __ b(mi, &return_heap_number);
2636
2618
  // Check for minus zero. Return heap number for minus zero.
2637
2619
  Label not_zero;
2638
- __ cmp(scratch1, Operand(0));
2620
+ __ cmp(scratch1, Operand::Zero());
2639
2621
  __ b(ne, &not_zero);
2640
2622
  __ vmov(scratch2, d5.high());
2641
2623
  __ tst(scratch2, Operand(HeapNumber::kSignMask));
@@ -2649,9 +2631,11 @@ void BinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
2649
2631
  // DIV just falls through to allocating a heap number.
2650
2632
  }
2651
2633
 
2652
- if (result_type_ >= (op_ == Token::DIV) ? BinaryOpIC::HEAP_NUMBER
2653
- : BinaryOpIC::INT32) {
2654
- __ bind(&return_heap_number);
2634
+ __ bind(&return_heap_number);
2635
+ // Return a heap number, or fall through to type transition or runtime
2636
+ // call if we can't.
2637
+ if (result_type_ >= ((op_ == Token::DIV) ? BinaryOpIC::HEAP_NUMBER
2638
+ : BinaryOpIC::INT32)) {
2655
2639
  // We are using vfp registers so r5 is available.
2656
2640
  heap_number_result = r5;
2657
2641
  GenerateHeapResultAllocation(masm,
@@ -2825,7 +2809,11 @@ void BinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
2825
2809
  UNREACHABLE();
2826
2810
  }
2827
2811
 
2828
- if (transition.is_linked()) {
2812
+ // We never expect DIV to yield an integer result, so we always generate
2813
+ // type transition code for DIV operations expecting an integer result: the
2814
+ // code will fall through to this type transition.
2815
+ if (transition.is_linked() ||
2816
+ ((op_ == Token::DIV) && (result_type_ <= BinaryOpIC::INT32))) {
2829
2817
  __ bind(&transition);
2830
2818
  GenerateTypeTransition(masm);
2831
2819
  }
@@ -3121,7 +3109,6 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
3121
3109
 
3122
3110
  Label no_update;
3123
3111
  Label skip_cache;
3124
- const Register heap_number_map = r5;
3125
3112
 
3126
3113
  // Call C function to calculate the result and update the cache.
3127
3114
  // Register r0 holds precalculated cache entry address; preserve
@@ -3394,15 +3381,10 @@ void CEntryStub::GenerateCore(MacroAssembler* masm,
3394
3381
 
3395
3382
  __ mov(r2, Operand(ExternalReference::isolate_address()));
3396
3383
 
3397
-
3398
- // TODO(1242173): To let the GC traverse the return address of the exit
3399
- // frames, we need to know where the return address is. Right now,
3400
- // we store it on the stack to be able to find it again, but we never
3401
- // restore from it in case of changes, which makes it impossible to
3402
- // support moving the C entry code stub. This should be fixed, but currently
3403
- // this is OK because the CEntryStub gets generated so early in the V8 boot
3404
- // sequence that it is not moving ever.
3405
-
3384
+ // To let the GC traverse the return address of the exit frames, we need to
3385
+ // know where the return address is. The CEntryStub is unmovable, so
3386
+ // we can store the address on the stack to be able to find it again and
3387
+ // we never have to restore it, because it will not change.
3406
3388
  // Compute the return address in lr to return to after the jump below. Pc is
3407
3389
  // already at '+ 8' from the current instruction but return is after three
3408
3390
  // instructions so add another 4 to pc to get the return address.
@@ -3552,12 +3534,26 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
3552
3534
  // Save callee-saved registers (incl. cp and fp), sp, and lr
3553
3535
  __ stm(db_w, sp, kCalleeSaved | lr.bit());
3554
3536
 
3537
+ if (CpuFeatures::IsSupported(VFP3)) {
3538
+ CpuFeatures::Scope scope(VFP3);
3539
+ // Save callee-saved vfp registers.
3540
+ __ vstm(db_w, sp, kFirstCalleeSavedDoubleReg, kLastCalleeSavedDoubleReg);
3541
+ // Set up the reserved register for 0.0.
3542
+ __ vmov(kDoubleRegZero, 0.0);
3543
+ }
3544
+
3555
3545
  // Get address of argv, see stm above.
3556
3546
  // r0: code entry
3557
3547
  // r1: function
3558
3548
  // r2: receiver
3559
3549
  // r3: argc
3560
- __ ldr(r4, MemOperand(sp, (kNumCalleeSaved + 1) * kPointerSize)); // argv
3550
+
3551
+ // Setup argv in r4.
3552
+ int offset_to_argv = (kNumCalleeSaved + 1) * kPointerSize;
3553
+ if (CpuFeatures::IsSupported(VFP3)) {
3554
+ offset_to_argv += kNumDoubleCalleeSaved * kDoubleSize;
3555
+ }
3556
+ __ ldr(r4, MemOperand(sp, offset_to_argv));
3561
3557
 
3562
3558
  // Push a frame with special values setup to mark it as an entry frame.
3563
3559
  // r0: code entry
@@ -3578,13 +3574,12 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
3578
3574
  // Setup frame pointer for the frame to be pushed.
3579
3575
  __ add(fp, sp, Operand(-EntryFrameConstants::kCallerFPOffset));
3580
3576
 
3581
- #ifdef ENABLE_LOGGING_AND_PROFILING
3582
3577
  // If this is the outermost JS call, set js_entry_sp value.
3583
3578
  Label non_outermost_js;
3584
3579
  ExternalReference js_entry_sp(Isolate::k_js_entry_sp_address, isolate);
3585
3580
  __ mov(r5, Operand(ExternalReference(js_entry_sp)));
3586
3581
  __ ldr(r6, MemOperand(r5));
3587
- __ cmp(r6, Operand(0));
3582
+ __ cmp(r6, Operand::Zero());
3588
3583
  __ b(ne, &non_outermost_js);
3589
3584
  __ str(fp, MemOperand(r5));
3590
3585
  __ mov(ip, Operand(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
@@ -3594,7 +3589,6 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
3594
3589
  __ mov(ip, Operand(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)));
3595
3590
  __ bind(&cont);
3596
3591
  __ push(ip);
3597
- #endif
3598
3592
 
3599
3593
  // Call a faked try-block that does the invoke.
3600
3594
  __ bl(&invoke);
@@ -3655,17 +3649,15 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
3655
3649
  __ PopTryHandler();
3656
3650
 
3657
3651
  __ bind(&exit); // r0 holds result
3658
- #ifdef ENABLE_LOGGING_AND_PROFILING
3659
3652
  // Check if the current stack frame is marked as the outermost JS frame.
3660
3653
  Label non_outermost_js_2;
3661
3654
  __ pop(r5);
3662
3655
  __ cmp(r5, Operand(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
3663
3656
  __ b(ne, &non_outermost_js_2);
3664
- __ mov(r6, Operand(0));
3657
+ __ mov(r6, Operand::Zero());
3665
3658
  __ mov(r5, Operand(ExternalReference(js_entry_sp)));
3666
3659
  __ str(r6, MemOperand(r5));
3667
3660
  __ bind(&non_outermost_js_2);
3668
- #endif
3669
3661
 
3670
3662
  // Restore the top frame descriptors from the stack.
3671
3663
  __ pop(r3);
@@ -3682,6 +3674,13 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
3682
3674
  __ mov(lr, Operand(pc));
3683
3675
  }
3684
3676
  #endif
3677
+
3678
+ if (CpuFeatures::IsSupported(VFP3)) {
3679
+ CpuFeatures::Scope scope(VFP3);
3680
+ // Restore callee-saved vfp registers.
3681
+ __ vldm(ia_w, sp, kFirstCalleeSavedDoubleReg, kLastCalleeSavedDoubleReg);
3682
+ }
3683
+
3685
3684
  __ ldm(ia_w, sp, kCalleeSaved | pc.bit());
3686
3685
  }
3687
3686
 
@@ -3856,7 +3855,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
3856
3855
  __ Push(r0, r1);
3857
3856
  __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION);
3858
3857
  __ LeaveInternalFrame();
3859
- __ cmp(r0, Operand(0));
3858
+ __ cmp(r0, Operand::Zero());
3860
3859
  __ LoadRoot(r0, Heap::kTrueValueRootIndex, eq);
3861
3860
  __ LoadRoot(r0, Heap::kFalseValueRootIndex, ne);
3862
3861
  __ Ret(HasArgsInRegisters() ? 0 : 2);
@@ -3921,11 +3920,232 @@ void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
3921
3920
  }
3922
3921
 
3923
3922
 
3924
- void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) {
3923
+ void ArgumentsAccessStub::GenerateNewNonStrictSlow(MacroAssembler* masm) {
3925
3924
  // sp[0] : number of parameters
3926
3925
  // sp[4] : receiver displacement
3927
3926
  // sp[8] : function
3928
3927
 
3928
+ // Check if the calling frame is an arguments adaptor frame.
3929
+ Label runtime;
3930
+ __ ldr(r3, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3931
+ __ ldr(r2, MemOperand(r3, StandardFrameConstants::kContextOffset));
3932
+ __ cmp(r2, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3933
+ __ b(ne, &runtime);
3934
+
3935
+ // Patch the arguments.length and the parameters pointer in the current frame.
3936
+ __ ldr(r2, MemOperand(r3, ArgumentsAdaptorFrameConstants::kLengthOffset));
3937
+ __ str(r2, MemOperand(sp, 0 * kPointerSize));
3938
+ __ add(r3, r3, Operand(r2, LSL, 1));
3939
+ __ add(r3, r3, Operand(StandardFrameConstants::kCallerSPOffset));
3940
+ __ str(r3, MemOperand(sp, 1 * kPointerSize));
3941
+
3942
+ __ bind(&runtime);
3943
+ __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
3944
+ }
3945
+
3946
+
3947
+ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
3948
+ // Stack layout:
3949
+ // sp[0] : number of parameters (tagged)
3950
+ // sp[4] : address of receiver argument
3951
+ // sp[8] : function
3952
+ // Registers used over whole function:
3953
+ // r6 : allocated object (tagged)
3954
+ // r9 : mapped parameter count (tagged)
3955
+
3956
+ __ ldr(r1, MemOperand(sp, 0 * kPointerSize));
3957
+ // r1 = parameter count (tagged)
3958
+
3959
+ // Check if the calling frame is an arguments adaptor frame.
3960
+ Label runtime;
3961
+ Label adaptor_frame, try_allocate;
3962
+ __ ldr(r3, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3963
+ __ ldr(r2, MemOperand(r3, StandardFrameConstants::kContextOffset));
3964
+ __ cmp(r2, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3965
+ __ b(eq, &adaptor_frame);
3966
+
3967
+ // No adaptor, parameter count = argument count.
3968
+ __ mov(r2, r1);
3969
+ __ b(&try_allocate);
3970
+
3971
+ // We have an adaptor frame. Patch the parameters pointer.
3972
+ __ bind(&adaptor_frame);
3973
+ __ ldr(r2, MemOperand(r3, ArgumentsAdaptorFrameConstants::kLengthOffset));
3974
+ __ add(r3, r3, Operand(r2, LSL, 1));
3975
+ __ add(r3, r3, Operand(StandardFrameConstants::kCallerSPOffset));
3976
+ __ str(r3, MemOperand(sp, 1 * kPointerSize));
3977
+
3978
+ // r1 = parameter count (tagged)
3979
+ // r2 = argument count (tagged)
3980
+ // Compute the mapped parameter count = min(r1, r2) in r1.
3981
+ __ cmp(r1, Operand(r2));
3982
+ __ mov(r1, Operand(r2), LeaveCC, gt);
3983
+
3984
+ __ bind(&try_allocate);
3985
+
3986
+ // Compute the sizes of backing store, parameter map, and arguments object.
3987
+ // 1. Parameter map, has 2 extra words containing context and backing store.
3988
+ const int kParameterMapHeaderSize =
3989
+ FixedArray::kHeaderSize + 2 * kPointerSize;
3990
+ // If there are no mapped parameters, we do not need the parameter_map.
3991
+ __ cmp(r1, Operand(Smi::FromInt(0)));
3992
+ __ mov(r9, Operand::Zero(), LeaveCC, eq);
3993
+ __ mov(r9, Operand(r1, LSL, 1), LeaveCC, ne);
3994
+ __ add(r9, r9, Operand(kParameterMapHeaderSize), LeaveCC, ne);
3995
+
3996
+ // 2. Backing store.
3997
+ __ add(r9, r9, Operand(r2, LSL, 1));
3998
+ __ add(r9, r9, Operand(FixedArray::kHeaderSize));
3999
+
4000
+ // 3. Arguments object.
4001
+ __ add(r9, r9, Operand(Heap::kArgumentsObjectSize));
4002
+
4003
+ // Do the allocation of all three objects in one go.
4004
+ __ AllocateInNewSpace(r9, r0, r3, r4, &runtime, TAG_OBJECT);
4005
+
4006
+ // r0 = address of new object(s) (tagged)
4007
+ // r2 = argument count (tagged)
4008
+ // Get the arguments boilerplate from the current (global) context into r4.
4009
+ const int kNormalOffset =
4010
+ Context::SlotOffset(Context::ARGUMENTS_BOILERPLATE_INDEX);
4011
+ const int kAliasedOffset =
4012
+ Context::SlotOffset(Context::ALIASED_ARGUMENTS_BOILERPLATE_INDEX);
4013
+
4014
+ __ ldr(r4, MemOperand(r8, Context::SlotOffset(Context::GLOBAL_INDEX)));
4015
+ __ ldr(r4, FieldMemOperand(r4, GlobalObject::kGlobalContextOffset));
4016
+ __ cmp(r1, Operand::Zero());
4017
+ __ ldr(r4, MemOperand(r4, kNormalOffset), eq);
4018
+ __ ldr(r4, MemOperand(r4, kAliasedOffset), ne);
4019
+
4020
+ // r0 = address of new object (tagged)
4021
+ // r1 = mapped parameter count (tagged)
4022
+ // r2 = argument count (tagged)
4023
+ // r4 = address of boilerplate object (tagged)
4024
+ // Copy the JS object part.
4025
+ for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
4026
+ __ ldr(r3, FieldMemOperand(r4, i));
4027
+ __ str(r3, FieldMemOperand(r0, i));
4028
+ }
4029
+
4030
+ // Setup the callee in-object property.
4031
+ STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1);
4032
+ __ ldr(r3, MemOperand(sp, 2 * kPointerSize));
4033
+ const int kCalleeOffset = JSObject::kHeaderSize +
4034
+ Heap::kArgumentsCalleeIndex * kPointerSize;
4035
+ __ str(r3, FieldMemOperand(r0, kCalleeOffset));
4036
+
4037
+ // Use the length (smi tagged) and set that as an in-object property too.
4038
+ STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
4039
+ const int kLengthOffset = JSObject::kHeaderSize +
4040
+ Heap::kArgumentsLengthIndex * kPointerSize;
4041
+ __ str(r2, FieldMemOperand(r0, kLengthOffset));
4042
+
4043
+ // Setup the elements pointer in the allocated arguments object.
4044
+ // If we allocated a parameter map, r4 will point there, otherwise
4045
+ // it will point to the backing store.
4046
+ __ add(r4, r0, Operand(Heap::kArgumentsObjectSize));
4047
+ __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
4048
+
4049
+ // r0 = address of new object (tagged)
4050
+ // r1 = mapped parameter count (tagged)
4051
+ // r2 = argument count (tagged)
4052
+ // r4 = address of parameter map or backing store (tagged)
4053
+ // Initialize parameter map. If there are no mapped arguments, we're done.
4054
+ Label skip_parameter_map;
4055
+ __ cmp(r1, Operand(Smi::FromInt(0)));
4056
+ // Move backing store address to r3, because it is
4057
+ // expected there when filling in the unmapped arguments.
4058
+ __ mov(r3, r4, LeaveCC, eq);
4059
+ __ b(eq, &skip_parameter_map);
4060
+
4061
+ __ LoadRoot(r6, Heap::kNonStrictArgumentsElementsMapRootIndex);
4062
+ __ str(r6, FieldMemOperand(r4, FixedArray::kMapOffset));
4063
+ __ add(r6, r1, Operand(Smi::FromInt(2)));
4064
+ __ str(r6, FieldMemOperand(r4, FixedArray::kLengthOffset));
4065
+ __ str(r8, FieldMemOperand(r4, FixedArray::kHeaderSize + 0 * kPointerSize));
4066
+ __ add(r6, r4, Operand(r1, LSL, 1));
4067
+ __ add(r6, r6, Operand(kParameterMapHeaderSize));
4068
+ __ str(r6, FieldMemOperand(r4, FixedArray::kHeaderSize + 1 * kPointerSize));
4069
+
4070
+ // Copy the parameter slots and the holes in the arguments.
4071
+ // We need to fill in mapped_parameter_count slots. They index the context,
4072
+ // where parameters are stored in reverse order, at
4073
+ // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
4074
+ // The mapped parameter thus need to get indices
4075
+ // MIN_CONTEXT_SLOTS+parameter_count-1 ..
4076
+ // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
4077
+ // We loop from right to left.
4078
+ Label parameters_loop, parameters_test;
4079
+ __ mov(r6, r1);
4080
+ __ ldr(r9, MemOperand(sp, 0 * kPointerSize));
4081
+ __ add(r9, r9, Operand(Smi::FromInt(Context::MIN_CONTEXT_SLOTS)));
4082
+ __ sub(r9, r9, Operand(r1));
4083
+ __ LoadRoot(r7, Heap::kTheHoleValueRootIndex);
4084
+ __ add(r3, r4, Operand(r6, LSL, 1));
4085
+ __ add(r3, r3, Operand(kParameterMapHeaderSize));
4086
+
4087
+ // r6 = loop variable (tagged)
4088
+ // r1 = mapping index (tagged)
4089
+ // r3 = address of backing store (tagged)
4090
+ // r4 = address of parameter map (tagged)
4091
+ // r5 = temporary scratch (a.o., for address calculation)
4092
+ // r7 = the hole value
4093
+ __ jmp(&parameters_test);
4094
+
4095
+ __ bind(&parameters_loop);
4096
+ __ sub(r6, r6, Operand(Smi::FromInt(1)));
4097
+ __ mov(r5, Operand(r6, LSL, 1));
4098
+ __ add(r5, r5, Operand(kParameterMapHeaderSize - kHeapObjectTag));
4099
+ __ str(r9, MemOperand(r4, r5));
4100
+ __ sub(r5, r5, Operand(kParameterMapHeaderSize - FixedArray::kHeaderSize));
4101
+ __ str(r7, MemOperand(r3, r5));
4102
+ __ add(r9, r9, Operand(Smi::FromInt(1)));
4103
+ __ bind(&parameters_test);
4104
+ __ cmp(r6, Operand(Smi::FromInt(0)));
4105
+ __ b(ne, &parameters_loop);
4106
+
4107
+ __ bind(&skip_parameter_map);
4108
+ // r2 = argument count (tagged)
4109
+ // r3 = address of backing store (tagged)
4110
+ // r5 = scratch
4111
+ // Copy arguments header and remaining slots (if there are any).
4112
+ __ LoadRoot(r5, Heap::kFixedArrayMapRootIndex);
4113
+ __ str(r5, FieldMemOperand(r3, FixedArray::kMapOffset));
4114
+ __ str(r2, FieldMemOperand(r3, FixedArray::kLengthOffset));
4115
+
4116
+ Label arguments_loop, arguments_test;
4117
+ __ mov(r9, r1);
4118
+ __ ldr(r4, MemOperand(sp, 1 * kPointerSize));
4119
+ __ sub(r4, r4, Operand(r9, LSL, 1));
4120
+ __ jmp(&arguments_test);
4121
+
4122
+ __ bind(&arguments_loop);
4123
+ __ sub(r4, r4, Operand(kPointerSize));
4124
+ __ ldr(r6, MemOperand(r4, 0));
4125
+ __ add(r5, r3, Operand(r9, LSL, 1));
4126
+ __ str(r6, FieldMemOperand(r5, FixedArray::kHeaderSize));
4127
+ __ add(r9, r9, Operand(Smi::FromInt(1)));
4128
+
4129
+ __ bind(&arguments_test);
4130
+ __ cmp(r9, Operand(r2));
4131
+ __ b(lt, &arguments_loop);
4132
+
4133
+ // Return and remove the on-stack parameters.
4134
+ __ add(sp, sp, Operand(3 * kPointerSize));
4135
+ __ Ret();
4136
+
4137
+ // Do the runtime call to allocate the arguments object.
4138
+ // r2 = argument count (taggged)
4139
+ __ bind(&runtime);
4140
+ __ str(r2, MemOperand(sp, 0 * kPointerSize)); // Patch argument count.
4141
+ __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
4142
+ }
4143
+
4144
+
4145
+ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
4146
+ // sp[0] : number of parameters
4147
+ // sp[4] : receiver displacement
4148
+ // sp[8] : function
3929
4149
  // Check if the calling frame is an arguments adaptor frame.
3930
4150
  Label adaptor_frame, try_allocate, runtime;
3931
4151
  __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
@@ -3954,40 +4174,31 @@ void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) {
3954
4174
  __ mov(r1, Operand(r1, LSR, kSmiTagSize));
3955
4175
  __ add(r1, r1, Operand(FixedArray::kHeaderSize / kPointerSize));
3956
4176
  __ bind(&add_arguments_object);
3957
- __ add(r1, r1, Operand(GetArgumentsObjectSize() / kPointerSize));
4177
+ __ add(r1, r1, Operand(Heap::kArgumentsObjectSizeStrict / kPointerSize));
3958
4178
 
3959
4179
  // Do the allocation of both objects in one go.
3960
- __ AllocateInNewSpace(
3961
- r1,
3962
- r0,
3963
- r2,
3964
- r3,
3965
- &runtime,
3966
- static_cast<AllocationFlags>(TAG_OBJECT | SIZE_IN_WORDS));
4180
+ __ AllocateInNewSpace(r1,
4181
+ r0,
4182
+ r2,
4183
+ r3,
4184
+ &runtime,
4185
+ static_cast<AllocationFlags>(TAG_OBJECT |
4186
+ SIZE_IN_WORDS));
3967
4187
 
3968
4188
  // Get the arguments boilerplate from the current (global) context.
3969
4189
  __ ldr(r4, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
3970
4190
  __ ldr(r4, FieldMemOperand(r4, GlobalObject::kGlobalContextOffset));
3971
- __ ldr(r4, MemOperand(r4,
3972
- Context::SlotOffset(GetArgumentsBoilerplateIndex())));
4191
+ __ ldr(r4, MemOperand(r4, Context::SlotOffset(
4192
+ Context::STRICT_MODE_ARGUMENTS_BOILERPLATE_INDEX)));
3973
4193
 
3974
4194
  // Copy the JS object part.
3975
4195
  __ CopyFields(r0, r4, r3.bit(), JSObject::kHeaderSize / kPointerSize);
3976
4196
 
3977
- if (type_ == NEW_NON_STRICT) {
3978
- // Setup the callee in-object property.
3979
- STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1);
3980
- __ ldr(r3, MemOperand(sp, 2 * kPointerSize));
3981
- const int kCalleeOffset = JSObject::kHeaderSize +
3982
- Heap::kArgumentsCalleeIndex * kPointerSize;
3983
- __ str(r3, FieldMemOperand(r0, kCalleeOffset));
3984
- }
3985
-
3986
4197
  // Get the length (smi tagged) and set that as an in-object property too.
3987
4198
  STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
3988
4199
  __ ldr(r1, MemOperand(sp, 0 * kPointerSize));
3989
4200
  __ str(r1, FieldMemOperand(r0, JSObject::kHeaderSize +
3990
- Heap::kArgumentsLengthIndex * kPointerSize));
4201
+ Heap::kArgumentsLengthIndex * kPointerSize));
3991
4202
 
3992
4203
  // If there are no actual arguments, we're done.
3993
4204
  Label done;
@@ -3999,12 +4210,13 @@ void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) {
3999
4210
 
4000
4211
  // Setup the elements pointer in the allocated arguments object and
4001
4212
  // initialize the header in the elements fixed array.
4002
- __ add(r4, r0, Operand(GetArgumentsObjectSize()));
4213
+ __ add(r4, r0, Operand(Heap::kArgumentsObjectSizeStrict));
4003
4214
  __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
4004
4215
  __ LoadRoot(r3, Heap::kFixedArrayMapRootIndex);
4005
4216
  __ str(r3, FieldMemOperand(r4, FixedArray::kMapOffset));
4006
4217
  __ str(r1, FieldMemOperand(r4, FixedArray::kLengthOffset));
4007
- __ mov(r1, Operand(r1, LSR, kSmiTagSize)); // Untag the length for the loop.
4218
+ // Untag the length for the loop.
4219
+ __ mov(r1, Operand(r1, LSR, kSmiTagSize));
4008
4220
 
4009
4221
  // Copy the fixed array slots.
4010
4222
  Label loop;
@@ -4027,7 +4239,7 @@ void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) {
4027
4239
 
4028
4240
  // Do the runtime call to allocate the arguments object.
4029
4241
  __ bind(&runtime);
4030
- __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
4242
+ __ TailCallRuntime(Runtime::kNewStrictArgumentsFast, 3, 1);
4031
4243
  }
4032
4244
 
4033
4245
 
@@ -4079,8 +4291,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
4079
4291
  // Check that the first argument is a JSRegExp object.
4080
4292
  __ ldr(r0, MemOperand(sp, kJSRegExpOffset));
4081
4293
  STATIC_ASSERT(kSmiTag == 0);
4082
- __ tst(r0, Operand(kSmiTagMask));
4083
- __ b(eq, &runtime);
4294
+ __ JumpIfSmi(r0, &runtime);
4084
4295
  __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE);
4085
4296
  __ b(ne, &runtime);
4086
4297
 
@@ -4116,8 +4327,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
4116
4327
  // regexp_data: RegExp data (FixedArray)
4117
4328
  // Check that the second argument is a string.
4118
4329
  __ ldr(subject, MemOperand(sp, kSubjectOffset));
4119
- __ tst(subject, Operand(kSmiTagMask));
4120
- __ b(eq, &runtime);
4330
+ __ JumpIfSmi(subject, &runtime);
4121
4331
  Condition is_string = masm->IsObjectStringType(subject, r0);
4122
4332
  __ b(NegateCondition(is_string), &runtime);
4123
4333
  // Get the length of the string to r3.
@@ -4130,8 +4340,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
4130
4340
  // Check that the third argument is a positive smi less than the subject
4131
4341
  // string length. A negative value will be greater (unsigned comparison).
4132
4342
  __ ldr(r0, MemOperand(sp, kPreviousIndexOffset));
4133
- __ tst(r0, Operand(kSmiTagMask));
4134
- __ b(ne, &runtime);
4343
+ __ JumpIfNotSmi(r0, &runtime);
4135
4344
  __ cmp(r3, Operand(r0));
4136
4345
  __ b(ls, &runtime);
4137
4346
 
@@ -4140,8 +4349,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
4140
4349
  // regexp_data: RegExp data (FixedArray)
4141
4350
  // Check that the fourth object is a JSArray object.
4142
4351
  __ ldr(r0, MemOperand(sp, kLastMatchInfoOffset));
4143
- __ tst(r0, Operand(kSmiTagMask));
4144
- __ b(eq, &runtime);
4352
+ __ JumpIfSmi(r0, &runtime);
4145
4353
  __ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE);
4146
4354
  __ b(ne, &runtime);
4147
4355
  // Check that the JSArray is in fast case.
@@ -4159,6 +4367,8 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
4159
4367
  __ cmp(r2, Operand(r0, ASR, kSmiTagSize));
4160
4368
  __ b(gt, &runtime);
4161
4369
 
4370
+ // Reset offset for possibly sliced string.
4371
+ __ mov(r9, Operand(0));
4162
4372
  // subject: Subject string
4163
4373
  // regexp_data: RegExp data (FixedArray)
4164
4374
  // Check the representation and encoding of the subject string.
@@ -4166,33 +4376,45 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
4166
4376
  __ ldr(r0, FieldMemOperand(subject, HeapObject::kMapOffset));
4167
4377
  __ ldrb(r0, FieldMemOperand(r0, Map::kInstanceTypeOffset));
4168
4378
  // First check for flat string.
4169
- __ tst(r0, Operand(kIsNotStringMask | kStringRepresentationMask));
4379
+ __ and_(r1, r0, Operand(kIsNotStringMask | kStringRepresentationMask), SetCC);
4170
4380
  STATIC_ASSERT((kStringTag | kSeqStringTag) == 0);
4171
4381
  __ b(eq, &seq_string);
4172
4382
 
4173
4383
  // subject: Subject string
4174
4384
  // regexp_data: RegExp data (FixedArray)
4175
- // Check for flat cons string.
4385
+ // Check for flat cons string or sliced string.
4176
4386
  // A flat cons string is a cons string where the second part is the empty
4177
4387
  // string. In that case the subject string is just the first part of the cons
4178
4388
  // string. Also in this case the first part of the cons string is known to be
4179
4389
  // a sequential string or an external string.
4180
- STATIC_ASSERT(kExternalStringTag !=0);
4181
- STATIC_ASSERT((kConsStringTag & kExternalStringTag) == 0);
4182
- __ tst(r0, Operand(kIsNotStringMask | kExternalStringTag));
4183
- __ b(ne, &runtime);
4390
+ // In the case of a sliced string its offset has to be taken into account.
4391
+ Label cons_string, check_encoding;
4392
+ STATIC_ASSERT(kConsStringTag < kExternalStringTag);
4393
+ STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
4394
+ __ cmp(r1, Operand(kExternalStringTag));
4395
+ __ b(lt, &cons_string);
4396
+ __ b(eq, &runtime);
4397
+
4398
+ // String is sliced.
4399
+ __ ldr(r9, FieldMemOperand(subject, SlicedString::kOffsetOffset));
4400
+ __ mov(r9, Operand(r9, ASR, kSmiTagSize));
4401
+ __ ldr(subject, FieldMemOperand(subject, SlicedString::kParentOffset));
4402
+ // r9: offset of sliced string, smi-tagged.
4403
+ __ jmp(&check_encoding);
4404
+ // String is a cons string, check whether it is flat.
4405
+ __ bind(&cons_string);
4184
4406
  __ ldr(r0, FieldMemOperand(subject, ConsString::kSecondOffset));
4185
4407
  __ LoadRoot(r1, Heap::kEmptyStringRootIndex);
4186
4408
  __ cmp(r0, r1);
4187
4409
  __ b(ne, &runtime);
4188
4410
  __ ldr(subject, FieldMemOperand(subject, ConsString::kFirstOffset));
4411
+ // Is first part of cons or parent of slice a flat string?
4412
+ __ bind(&check_encoding);
4189
4413
  __ ldr(r0, FieldMemOperand(subject, HeapObject::kMapOffset));
4190
4414
  __ ldrb(r0, FieldMemOperand(r0, Map::kInstanceTypeOffset));
4191
- // Is first part a flat string?
4192
4415
  STATIC_ASSERT(kSeqStringTag == 0);
4193
4416
  __ tst(r0, Operand(kStringRepresentationMask));
4194
4417
  __ b(ne, &runtime);
4195
-
4196
4418
  __ bind(&seq_string);
4197
4419
  // subject: Subject string
4198
4420
  // regexp_data: RegExp data (FixedArray)
@@ -4207,9 +4429,8 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
4207
4429
 
4208
4430
  // Check that the irregexp code has been generated for the actual string
4209
4431
  // encoding. If it has, the field contains a code object otherwise it contains
4210
- // the hole.
4211
- __ CompareObjectType(r7, r0, r0, CODE_TYPE);
4212
- __ b(ne, &runtime);
4432
+ // a smi (code flushing support).
4433
+ __ JumpIfSmi(r7, &runtime);
4213
4434
 
4214
4435
  // r3: encoding of subject string (1 if ASCII, 0 if two_byte);
4215
4436
  // r7: code
@@ -4259,21 +4480,30 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
4259
4480
 
4260
4481
  // For arguments 4 and 3 get string length, calculate start of string data and
4261
4482
  // calculate the shift of the index (0 for ASCII and 1 for two byte).
4262
- __ ldr(r0, FieldMemOperand(subject, String::kLengthOffset));
4263
- __ mov(r0, Operand(r0, ASR, kSmiTagSize));
4264
4483
  STATIC_ASSERT(SeqAsciiString::kHeaderSize == SeqTwoByteString::kHeaderSize);
4265
- __ add(r9, subject, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
4484
+ __ add(r8, subject, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
4266
4485
  __ eor(r3, r3, Operand(1));
4267
- // Argument 4 (r3): End of string data
4268
- // Argument 3 (r2): Start of string data
4486
+ // Load the length from the original subject string from the previous stack
4487
+ // frame. Therefore we have to use fp, which points exactly to two pointer
4488
+ // sizes below the previous sp. (Because creating a new stack frame pushes
4489
+ // the previous fp onto the stack and moves up sp by 2 * kPointerSize.)
4490
+ __ ldr(r0, MemOperand(fp, kSubjectOffset + 2 * kPointerSize));
4491
+ // If slice offset is not 0, load the length from the original sliced string.
4492
+ // Argument 4, r3: End of string data
4493
+ // Argument 3, r2: Start of string data
4494
+ // Prepare start and end index of the input.
4495
+ __ add(r9, r8, Operand(r9, LSL, r3));
4269
4496
  __ add(r2, r9, Operand(r1, LSL, r3));
4270
- __ add(r3, r9, Operand(r0, LSL, r3));
4497
+
4498
+ __ ldr(r8, FieldMemOperand(r0, String::kLengthOffset));
4499
+ __ mov(r8, Operand(r8, ASR, kSmiTagSize));
4500
+ __ add(r3, r9, Operand(r8, LSL, r3));
4271
4501
 
4272
4502
  // Argument 2 (r1): Previous index.
4273
4503
  // Already there
4274
4504
 
4275
4505
  // Argument 1 (r0): Subject string.
4276
- __ mov(r0, subject);
4506
+ // Already there
4277
4507
 
4278
4508
  // Locate the code entry and call it.
4279
4509
  __ add(r7, r7, Operand(Code::kHeaderSize - kHeapObjectTag));
@@ -4290,12 +4520,12 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
4290
4520
  // Check the result.
4291
4521
  Label success;
4292
4522
 
4293
- __ cmp(r0, Operand(NativeRegExpMacroAssembler::SUCCESS));
4523
+ __ cmp(subject, Operand(NativeRegExpMacroAssembler::SUCCESS));
4294
4524
  __ b(eq, &success);
4295
4525
  Label failure;
4296
- __ cmp(r0, Operand(NativeRegExpMacroAssembler::FAILURE));
4526
+ __ cmp(subject, Operand(NativeRegExpMacroAssembler::FAILURE));
4297
4527
  __ b(eq, &failure);
4298
- __ cmp(r0, Operand(NativeRegExpMacroAssembler::EXCEPTION));
4528
+ __ cmp(subject, Operand(NativeRegExpMacroAssembler::EXCEPTION));
4299
4529
  // If not exception it can only be retry. Handle that in the runtime system.
4300
4530
  __ b(ne, &runtime);
4301
4531
  // Result must now be exception. If there is no pending exception already a
@@ -4307,18 +4537,18 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
4307
4537
  __ mov(r2, Operand(ExternalReference(Isolate::k_pending_exception_address,
4308
4538
  isolate)));
4309
4539
  __ ldr(r0, MemOperand(r2, 0));
4310
- __ cmp(r0, r1);
4540
+ __ cmp(subject, r1);
4311
4541
  __ b(eq, &runtime);
4312
4542
 
4313
4543
  __ str(r1, MemOperand(r2, 0)); // Clear pending exception.
4314
4544
 
4315
4545
  // Check if the exception is a termination. If so, throw as uncatchable.
4316
4546
  __ LoadRoot(ip, Heap::kTerminationExceptionRootIndex);
4317
- __ cmp(r0, ip);
4547
+ __ cmp(subject, ip);
4318
4548
  Label termination_exception;
4319
4549
  __ b(eq, &termination_exception);
4320
4550
 
4321
- __ Throw(r0); // Expects thrown value in r0.
4551
+ __ Throw(subject); // Expects thrown value in r0.
4322
4552
 
4323
4553
  __ bind(&termination_exception);
4324
4554
  __ ThrowUncatchable(TERMINATION, r0); // Expects thrown value in r0.
@@ -4401,8 +4631,7 @@ void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
4401
4631
  __ ldr(r1, MemOperand(sp, kPointerSize * 2));
4402
4632
  STATIC_ASSERT(kSmiTag == 0);
4403
4633
  STATIC_ASSERT(kSmiTagSize == 1);
4404
- __ tst(r1, Operand(kSmiTagMask));
4405
- __ b(ne, &slowcase);
4634
+ __ JumpIfNotSmi(r1, &slowcase);
4406
4635
  __ cmp(r1, Operand(Smi::FromInt(kMaxInlineLength)));
4407
4636
  __ b(hi, &slowcase);
4408
4637
  // Smi-tagging is equivalent to multiplying by 2.
@@ -4523,7 +4752,11 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
4523
4752
  Label call_as_function;
4524
4753
  __ CompareRoot(r4, Heap::kTheHoleValueRootIndex);
4525
4754
  __ b(eq, &call_as_function);
4526
- __ InvokeFunction(r1, actual, JUMP_FUNCTION);
4755
+ __ InvokeFunction(r1,
4756
+ actual,
4757
+ JUMP_FUNCTION,
4758
+ NullCallWrapper(),
4759
+ CALL_AS_METHOD);
4527
4760
  __ bind(&call_as_function);
4528
4761
  }
4529
4762
  __ InvokeFunction(r1,
@@ -4540,6 +4773,7 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
4540
4773
  __ mov(r0, Operand(argc_)); // Setup the number of arguments.
4541
4774
  __ mov(r2, Operand(0, RelocInfo::NONE));
4542
4775
  __ GetBuiltinEntry(r3, Builtins::CALL_NON_FUNCTION);
4776
+ __ SetCallKind(r5, CALL_AS_METHOD);
4543
4777
  __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
4544
4778
  RelocInfo::CODE_TARGET);
4545
4779
  }
@@ -4547,16 +4781,9 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
4547
4781
 
4548
4782
  // Unfortunately you have to run without snapshots to see most of these
4549
4783
  // names in the profile since most compare stubs end up in the snapshot.
4550
- const char* CompareStub::GetName() {
4784
+ void CompareStub::PrintName(StringStream* stream) {
4551
4785
  ASSERT((lhs_.is(r0) && rhs_.is(r1)) ||
4552
4786
  (lhs_.is(r1) && rhs_.is(r0)));
4553
-
4554
- if (name_ != NULL) return name_;
4555
- const int kMaxNameLength = 100;
4556
- name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray(
4557
- kMaxNameLength);
4558
- if (name_ == NULL) return "OOM";
4559
-
4560
4787
  const char* cc_name;
4561
4788
  switch (cc_) {
4562
4789
  case lt: cc_name = "LT"; break;
@@ -4567,40 +4794,14 @@ const char* CompareStub::GetName() {
4567
4794
  case ne: cc_name = "NE"; break;
4568
4795
  default: cc_name = "UnknownCondition"; break;
4569
4796
  }
4570
-
4571
- const char* lhs_name = lhs_.is(r0) ? "_r0" : "_r1";
4572
- const char* rhs_name = rhs_.is(r0) ? "_r0" : "_r1";
4573
-
4574
- const char* strict_name = "";
4575
- if (strict_ && (cc_ == eq || cc_ == ne)) {
4576
- strict_name = "_STRICT";
4577
- }
4578
-
4579
- const char* never_nan_nan_name = "";
4580
- if (never_nan_nan_ && (cc_ == eq || cc_ == ne)) {
4581
- never_nan_nan_name = "_NO_NAN";
4582
- }
4583
-
4584
- const char* include_number_compare_name = "";
4585
- if (!include_number_compare_) {
4586
- include_number_compare_name = "_NO_NUMBER";
4587
- }
4588
-
4589
- const char* include_smi_compare_name = "";
4590
- if (!include_smi_compare_) {
4591
- include_smi_compare_name = "_NO_SMI";
4592
- }
4593
-
4594
- OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
4595
- "CompareStub_%s%s%s%s%s%s",
4596
- cc_name,
4597
- lhs_name,
4598
- rhs_name,
4599
- strict_name,
4600
- never_nan_nan_name,
4601
- include_number_compare_name,
4602
- include_smi_compare_name);
4603
- return name_;
4797
+ bool is_equality = cc_ == eq || cc_ == ne;
4798
+ stream->Add("CompareStub_%s", cc_name);
4799
+ stream->Add(lhs_.is(r0) ? "_r0" : "_r1");
4800
+ stream->Add(rhs_.is(r0) ? "_r0" : "_r1");
4801
+ if (strict_ && is_equality) stream->Add("_STRICT");
4802
+ if (never_nan_nan_ && is_equality) stream->Add("_NO_NAN");
4803
+ if (!include_number_compare_) stream->Add("_NO_NUMBER");
4804
+ if (!include_smi_compare_) stream->Add("_NO_SMI");
4604
4805
  }
4605
4806
 
4606
4807
 
@@ -4625,6 +4826,7 @@ void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
4625
4826
  Label flat_string;
4626
4827
  Label ascii_string;
4627
4828
  Label got_char_code;
4829
+ Label sliced_string;
4628
4830
 
4629
4831
  // If the receiver is a smi trigger the non-string case.
4630
4832
  __ JumpIfSmi(object_, receiver_not_string_);
@@ -4654,7 +4856,11 @@ void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
4654
4856
  __ b(eq, &flat_string);
4655
4857
 
4656
4858
  // Handle non-flat strings.
4657
- __ tst(result_, Operand(kIsConsStringMask));
4859
+ __ and_(result_, result_, Operand(kStringRepresentationMask));
4860
+ STATIC_ASSERT(kConsStringTag < kExternalStringTag);
4861
+ STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
4862
+ __ cmp(result_, Operand(kExternalStringTag));
4863
+ __ b(gt, &sliced_string);
4658
4864
  __ b(eq, &call_runtime_);
4659
4865
 
4660
4866
  // ConsString.
@@ -4662,15 +4868,26 @@ void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
4662
4868
  // this is really a flat string in a cons string). If that is not
4663
4869
  // the case we would rather go to the runtime system now to flatten
4664
4870
  // the string.
4871
+ Label assure_seq_string;
4665
4872
  __ ldr(result_, FieldMemOperand(object_, ConsString::kSecondOffset));
4666
4873
  __ LoadRoot(ip, Heap::kEmptyStringRootIndex);
4667
4874
  __ cmp(result_, Operand(ip));
4668
4875
  __ b(ne, &call_runtime_);
4669
4876
  // Get the first of the two strings and load its instance type.
4670
4877
  __ ldr(object_, FieldMemOperand(object_, ConsString::kFirstOffset));
4878
+ __ jmp(&assure_seq_string);
4879
+
4880
+ // SlicedString, unpack and add offset.
4881
+ __ bind(&sliced_string);
4882
+ __ ldr(result_, FieldMemOperand(object_, SlicedString::kOffsetOffset));
4883
+ __ add(scratch_, scratch_, result_);
4884
+ __ ldr(object_, FieldMemOperand(object_, SlicedString::kParentOffset));
4885
+
4886
+ // Assure that we are dealing with a sequential string. Go to runtime if not.
4887
+ __ bind(&assure_seq_string);
4671
4888
  __ ldr(result_, FieldMemOperand(object_, HeapObject::kMapOffset));
4672
4889
  __ ldrb(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset));
4673
- // If the first cons component is also non-flat, then go to runtime.
4890
+ // Check that parent is not an external string. Go to runtime otherwise.
4674
4891
  STATIC_ASSERT(kSeqStringTag == 0);
4675
4892
  __ tst(result_, Operand(kStringRepresentationMask));
4676
4893
  __ b(ne, &call_runtime_);
@@ -5250,10 +5467,17 @@ void SubStringStub::Generate(MacroAssembler* masm) {
5250
5467
  // Check bounds and smi-ness.
5251
5468
  Register to = r6;
5252
5469
  Register from = r7;
5470
+
5471
+ if (FLAG_string_slices) {
5472
+ __ nop(0); // Jumping as first instruction would crash the code generation.
5473
+ __ jmp(&runtime);
5474
+ }
5475
+
5253
5476
  __ Ldrd(to, from, MemOperand(sp, kToOffset));
5254
5477
  STATIC_ASSERT(kFromOffset == kToOffset + 4);
5255
5478
  STATIC_ASSERT(kSmiTag == 0);
5256
5479
  STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
5480
+
5257
5481
  // I.e., arithmetic shift right by one un-smi-tags.
5258
5482
  __ mov(r2, Operand(to, ASR, 1), SetCC);
5259
5483
  __ mov(r3, Operand(from, ASR, 1), SetCC, cc);
@@ -5262,7 +5486,6 @@ void SubStringStub::Generate(MacroAssembler* masm) {
5262
5486
  __ b(mi, &runtime); // From is negative.
5263
5487
 
5264
5488
  // Both to and from are smis.
5265
-
5266
5489
  __ sub(r2, r2, Operand(r3), SetCC);
5267
5490
  __ b(mi, &runtime); // Fail if from > to.
5268
5491
  // Special handling of sub-strings of length 1 and 2. One character strings
@@ -5279,8 +5502,7 @@ void SubStringStub::Generate(MacroAssembler* masm) {
5279
5502
  // Make sure first argument is a sequential (or flat) string.
5280
5503
  __ ldr(r5, MemOperand(sp, kStringOffset));
5281
5504
  STATIC_ASSERT(kSmiTag == 0);
5282
- __ tst(r5, Operand(kSmiTagMask));
5283
- __ b(eq, &runtime);
5505
+ __ JumpIfSmi(r5, &runtime);
5284
5506
  Condition is_string = masm->IsObjectStringType(r5, r1);
5285
5507
  __ b(NegateCondition(is_string), &runtime);
5286
5508
 
@@ -5518,7 +5740,7 @@ void StringCompareStub::GenerateAsciiCharsCompareLoop(
5518
5740
  Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
5519
5741
  __ add(left, left, Operand(scratch1));
5520
5742
  __ add(right, right, Operand(scratch1));
5521
- __ rsb(length, length, Operand(0));
5743
+ __ rsb(length, length, Operand::Zero());
5522
5744
  Register index = length; // index = -length;
5523
5745
 
5524
5746
  // Compare loop.
@@ -5917,8 +6139,7 @@ void ICCompareStub::GenerateSmis(MacroAssembler* masm) {
5917
6139
  ASSERT(state_ == CompareIC::SMIS);
5918
6140
  Label miss;
5919
6141
  __ orr(r2, r1, r0);
5920
- __ tst(r2, Operand(kSmiTagMask));
5921
- __ b(ne, &miss);
6142
+ __ JumpIfNotSmi(r2, &miss);
5922
6143
 
5923
6144
  if (GetCondition() == eq) {
5924
6145
  // For equality we do not care about the sign of the result.
@@ -5942,8 +6163,7 @@ void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) {
5942
6163
  Label unordered;
5943
6164
  Label miss;
5944
6165
  __ and_(r2, r1, Operand(r0));
5945
- __ tst(r2, Operand(kSmiTagMask));
5946
- __ b(eq, &generic_stub);
6166
+ __ JumpIfSmi(r2, &generic_stub);
5947
6167
 
5948
6168
  __ CompareObjectType(r0, r2, r2, HEAP_NUMBER_TYPE);
5949
6169
  __ b(ne, &miss);
@@ -6092,8 +6312,7 @@ void ICCompareStub::GenerateObjects(MacroAssembler* masm) {
6092
6312
  ASSERT(state_ == CompareIC::OBJECTS);
6093
6313
  Label miss;
6094
6314
  __ and_(r2, r1, Operand(r0));
6095
- __ tst(r2, Operand(kSmiTagMask));
6096
- __ b(eq, &miss);
6315
+ __ JumpIfSmi(r2, &miss);
6097
6316
 
6098
6317
  __ CompareObjectType(r0, r2, r2, JS_OBJECT_TYPE);
6099
6318
  __ b(ne, &miss);
@@ -6139,12 +6358,8 @@ void DirectCEntryStub::Generate(MacroAssembler* masm) {
6139
6358
 
6140
6359
  void DirectCEntryStub::GenerateCall(MacroAssembler* masm,
6141
6360
  ExternalReference function) {
6142
- __ mov(lr, Operand(reinterpret_cast<intptr_t>(GetCode().location()),
6143
- RelocInfo::CODE_TARGET));
6144
6361
  __ mov(r2, Operand(function));
6145
- // Push return address (accessible to GC through exit frame pc).
6146
- __ str(pc, MemOperand(sp, 0));
6147
- __ Jump(r2); // Call the api function.
6362
+ GenerateCall(masm, r2);
6148
6363
  }
6149
6364
 
6150
6365
 
@@ -6153,8 +6368,14 @@ void DirectCEntryStub::GenerateCall(MacroAssembler* masm,
6153
6368
  __ mov(lr, Operand(reinterpret_cast<intptr_t>(GetCode().location()),
6154
6369
  RelocInfo::CODE_TARGET));
6155
6370
  // Push return address (accessible to GC through exit frame pc).
6156
- __ str(pc, MemOperand(sp, 0));
6371
+ // Note that using pc with str is deprecated.
6372
+ Label start;
6373
+ __ bind(&start);
6374
+ __ add(ip, pc, Operand(Assembler::kInstrSize));
6375
+ __ str(ip, MemOperand(sp, 0));
6157
6376
  __ Jump(target); // Call the C++ function.
6377
+ ASSERT_EQ(Assembler::kInstrSize + Assembler::kPcLoadDelta,
6378
+ masm->SizeOfCodeGeneratedSince(&start));
6158
6379
  }
6159
6380
 
6160
6381
 
@@ -6377,7 +6598,7 @@ void StringDictionaryLookupStub::Generate(MacroAssembler* masm) {
6377
6598
  // treated as a lookup success. For positive lookup probing failure
6378
6599
  // should be treated as lookup failure.
6379
6600
  if (mode_ == POSITIVE_LOOKUP) {
6380
- __ mov(result, Operand(0));
6601
+ __ mov(result, Operand::Zero());
6381
6602
  __ Ret();
6382
6603
  }
6383
6604
 
@@ -6386,7 +6607,7 @@ void StringDictionaryLookupStub::Generate(MacroAssembler* masm) {
6386
6607
  __ Ret();
6387
6608
 
6388
6609
  __ bind(&not_in_dictionary);
6389
- __ mov(result, Operand(0));
6610
+ __ mov(result, Operand::Zero());
6390
6611
  __ Ret();
6391
6612
  }
6392
6613