libv8 3.3.10.4 → 3.5.10.beta1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (538) hide show
  1. data/lib/libv8/scons/CHANGES.txt +24 -231
  2. data/lib/libv8/scons/LICENSE.txt +1 -1
  3. data/lib/libv8/scons/MANIFEST +0 -1
  4. data/lib/libv8/scons/PKG-INFO +1 -1
  5. data/lib/libv8/scons/README.txt +9 -9
  6. data/lib/libv8/scons/RELEASE.txt +75 -77
  7. data/lib/libv8/scons/engine/SCons/Action.py +6 -22
  8. data/lib/libv8/scons/engine/SCons/Builder.py +2 -2
  9. data/lib/libv8/scons/engine/SCons/CacheDir.py +2 -2
  10. data/lib/libv8/scons/engine/SCons/Debug.py +2 -2
  11. data/lib/libv8/scons/engine/SCons/Defaults.py +10 -24
  12. data/lib/libv8/scons/engine/SCons/Environment.py +19 -118
  13. data/lib/libv8/scons/engine/SCons/Errors.py +2 -2
  14. data/lib/libv8/scons/engine/SCons/Executor.py +2 -2
  15. data/lib/libv8/scons/engine/SCons/Job.py +2 -2
  16. data/lib/libv8/scons/engine/SCons/Memoize.py +2 -2
  17. data/lib/libv8/scons/engine/SCons/Node/Alias.py +2 -2
  18. data/lib/libv8/scons/engine/SCons/Node/FS.py +121 -281
  19. data/lib/libv8/scons/engine/SCons/Node/Python.py +2 -2
  20. data/lib/libv8/scons/engine/SCons/Node/__init__.py +5 -6
  21. data/lib/libv8/scons/engine/SCons/Options/BoolOption.py +2 -2
  22. data/lib/libv8/scons/engine/SCons/Options/EnumOption.py +2 -2
  23. data/lib/libv8/scons/engine/SCons/Options/ListOption.py +2 -2
  24. data/lib/libv8/scons/engine/SCons/Options/PackageOption.py +2 -2
  25. data/lib/libv8/scons/engine/SCons/Options/PathOption.py +2 -2
  26. data/lib/libv8/scons/engine/SCons/Options/__init__.py +2 -2
  27. data/lib/libv8/scons/engine/SCons/PathList.py +2 -2
  28. data/lib/libv8/scons/engine/SCons/Platform/__init__.py +2 -2
  29. data/lib/libv8/scons/engine/SCons/Platform/aix.py +2 -2
  30. data/lib/libv8/scons/engine/SCons/Platform/cygwin.py +2 -2
  31. data/lib/libv8/scons/engine/SCons/Platform/darwin.py +3 -27
  32. data/lib/libv8/scons/engine/SCons/Platform/hpux.py +2 -2
  33. data/lib/libv8/scons/engine/SCons/Platform/irix.py +2 -2
  34. data/lib/libv8/scons/engine/SCons/Platform/os2.py +2 -2
  35. data/lib/libv8/scons/engine/SCons/Platform/posix.py +2 -2
  36. data/lib/libv8/scons/engine/SCons/Platform/sunos.py +2 -2
  37. data/lib/libv8/scons/engine/SCons/Platform/win32.py +2 -2
  38. data/lib/libv8/scons/engine/SCons/SConf.py +2 -2
  39. data/lib/libv8/scons/engine/SCons/SConsign.py +3 -9
  40. data/lib/libv8/scons/engine/SCons/Scanner/C.py +2 -2
  41. data/lib/libv8/scons/engine/SCons/Scanner/D.py +2 -2
  42. data/lib/libv8/scons/engine/SCons/Scanner/Dir.py +2 -2
  43. data/lib/libv8/scons/engine/SCons/Scanner/Fortran.py +2 -2
  44. data/lib/libv8/scons/engine/SCons/Scanner/IDL.py +2 -2
  45. data/lib/libv8/scons/engine/SCons/Scanner/LaTeX.py +2 -5
  46. data/lib/libv8/scons/engine/SCons/Scanner/Prog.py +2 -2
  47. data/lib/libv8/scons/engine/SCons/Scanner/RC.py +3 -3
  48. data/lib/libv8/scons/engine/SCons/Scanner/__init__.py +2 -2
  49. data/lib/libv8/scons/engine/SCons/Script/Interactive.py +2 -2
  50. data/lib/libv8/scons/engine/SCons/Script/Main.py +11 -82
  51. data/lib/libv8/scons/engine/SCons/Script/SConsOptions.py +5 -5
  52. data/lib/libv8/scons/engine/SCons/Script/SConscript.py +2 -2
  53. data/lib/libv8/scons/engine/SCons/Script/__init__.py +2 -2
  54. data/lib/libv8/scons/engine/SCons/Sig.py +2 -2
  55. data/lib/libv8/scons/engine/SCons/Subst.py +2 -2
  56. data/lib/libv8/scons/engine/SCons/Taskmaster.py +2 -10
  57. data/lib/libv8/scons/engine/SCons/Tool/386asm.py +2 -2
  58. data/lib/libv8/scons/engine/SCons/Tool/BitKeeper.py +2 -2
  59. data/lib/libv8/scons/engine/SCons/Tool/CVS.py +2 -2
  60. data/lib/libv8/scons/engine/SCons/Tool/FortranCommon.py +2 -19
  61. data/lib/libv8/scons/engine/SCons/Tool/JavaCommon.py +2 -2
  62. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/__init__.py +2 -2
  63. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/arch.py +2 -2
  64. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/common.py +2 -2
  65. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/netframework.py +2 -2
  66. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/sdk.py +2 -2
  67. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/vc.py +6 -9
  68. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/vs.py +2 -29
  69. data/lib/libv8/scons/engine/SCons/Tool/Perforce.py +2 -2
  70. data/lib/libv8/scons/engine/SCons/Tool/PharLapCommon.py +2 -2
  71. data/lib/libv8/scons/engine/SCons/Tool/RCS.py +2 -2
  72. data/lib/libv8/scons/engine/SCons/Tool/SCCS.py +2 -2
  73. data/lib/libv8/scons/engine/SCons/Tool/Subversion.py +2 -2
  74. data/lib/libv8/scons/engine/SCons/Tool/__init__.py +3 -3
  75. data/lib/libv8/scons/engine/SCons/Tool/aixc++.py +2 -2
  76. data/lib/libv8/scons/engine/SCons/Tool/aixcc.py +2 -2
  77. data/lib/libv8/scons/engine/SCons/Tool/aixf77.py +2 -2
  78. data/lib/libv8/scons/engine/SCons/Tool/aixlink.py +2 -2
  79. data/lib/libv8/scons/engine/SCons/Tool/applelink.py +2 -2
  80. data/lib/libv8/scons/engine/SCons/Tool/ar.py +2 -2
  81. data/lib/libv8/scons/engine/SCons/Tool/as.py +2 -2
  82. data/lib/libv8/scons/engine/SCons/Tool/bcc32.py +2 -2
  83. data/lib/libv8/scons/engine/SCons/Tool/c++.py +2 -2
  84. data/lib/libv8/scons/engine/SCons/Tool/cc.py +2 -2
  85. data/lib/libv8/scons/engine/SCons/Tool/cvf.py +2 -2
  86. data/lib/libv8/scons/engine/SCons/Tool/default.py +2 -2
  87. data/lib/libv8/scons/engine/SCons/Tool/dmd.py +7 -24
  88. data/lib/libv8/scons/engine/SCons/Tool/dvi.py +2 -2
  89. data/lib/libv8/scons/engine/SCons/Tool/dvipdf.py +2 -3
  90. data/lib/libv8/scons/engine/SCons/Tool/dvips.py +2 -3
  91. data/lib/libv8/scons/engine/SCons/Tool/f77.py +2 -2
  92. data/lib/libv8/scons/engine/SCons/Tool/f90.py +2 -2
  93. data/lib/libv8/scons/engine/SCons/Tool/f95.py +2 -2
  94. data/lib/libv8/scons/engine/SCons/Tool/filesystem.py +2 -2
  95. data/lib/libv8/scons/engine/SCons/Tool/fortran.py +2 -2
  96. data/lib/libv8/scons/engine/SCons/Tool/g++.py +2 -2
  97. data/lib/libv8/scons/engine/SCons/Tool/g77.py +2 -2
  98. data/lib/libv8/scons/engine/SCons/Tool/gas.py +2 -2
  99. data/lib/libv8/scons/engine/SCons/Tool/gcc.py +2 -2
  100. data/lib/libv8/scons/engine/SCons/Tool/gfortran.py +3 -3
  101. data/lib/libv8/scons/engine/SCons/Tool/gnulink.py +3 -2
  102. data/lib/libv8/scons/engine/SCons/Tool/gs.py +2 -2
  103. data/lib/libv8/scons/engine/SCons/Tool/hpc++.py +2 -2
  104. data/lib/libv8/scons/engine/SCons/Tool/hpcc.py +2 -2
  105. data/lib/libv8/scons/engine/SCons/Tool/hplink.py +2 -2
  106. data/lib/libv8/scons/engine/SCons/Tool/icc.py +2 -2
  107. data/lib/libv8/scons/engine/SCons/Tool/icl.py +2 -2
  108. data/lib/libv8/scons/engine/SCons/Tool/ifl.py +2 -2
  109. data/lib/libv8/scons/engine/SCons/Tool/ifort.py +2 -2
  110. data/lib/libv8/scons/engine/SCons/Tool/ilink.py +2 -2
  111. data/lib/libv8/scons/engine/SCons/Tool/ilink32.py +2 -2
  112. data/lib/libv8/scons/engine/SCons/Tool/install.py +3 -57
  113. data/lib/libv8/scons/engine/SCons/Tool/intelc.py +25 -65
  114. data/lib/libv8/scons/engine/SCons/Tool/ipkg.py +2 -2
  115. data/lib/libv8/scons/engine/SCons/Tool/jar.py +3 -9
  116. data/lib/libv8/scons/engine/SCons/Tool/javac.py +2 -2
  117. data/lib/libv8/scons/engine/SCons/Tool/javah.py +2 -2
  118. data/lib/libv8/scons/engine/SCons/Tool/latex.py +2 -3
  119. data/lib/libv8/scons/engine/SCons/Tool/lex.py +2 -2
  120. data/lib/libv8/scons/engine/SCons/Tool/link.py +5 -6
  121. data/lib/libv8/scons/engine/SCons/Tool/linkloc.py +2 -2
  122. data/lib/libv8/scons/engine/SCons/Tool/m4.py +2 -2
  123. data/lib/libv8/scons/engine/SCons/Tool/masm.py +2 -2
  124. data/lib/libv8/scons/engine/SCons/Tool/midl.py +2 -2
  125. data/lib/libv8/scons/engine/SCons/Tool/mingw.py +10 -31
  126. data/lib/libv8/scons/engine/SCons/Tool/mslib.py +2 -2
  127. data/lib/libv8/scons/engine/SCons/Tool/mslink.py +9 -61
  128. data/lib/libv8/scons/engine/SCons/Tool/mssdk.py +2 -2
  129. data/lib/libv8/scons/engine/SCons/Tool/msvc.py +11 -21
  130. data/lib/libv8/scons/engine/SCons/Tool/msvs.py +59 -477
  131. data/lib/libv8/scons/engine/SCons/Tool/mwcc.py +2 -2
  132. data/lib/libv8/scons/engine/SCons/Tool/mwld.py +2 -2
  133. data/lib/libv8/scons/engine/SCons/Tool/nasm.py +2 -2
  134. data/lib/libv8/scons/engine/SCons/Tool/packaging/__init__.py +2 -2
  135. data/lib/libv8/scons/engine/SCons/Tool/packaging/ipk.py +2 -2
  136. data/lib/libv8/scons/engine/SCons/Tool/packaging/msi.py +2 -2
  137. data/lib/libv8/scons/engine/SCons/Tool/packaging/rpm.py +2 -2
  138. data/lib/libv8/scons/engine/SCons/Tool/packaging/src_tarbz2.py +2 -2
  139. data/lib/libv8/scons/engine/SCons/Tool/packaging/src_targz.py +2 -2
  140. data/lib/libv8/scons/engine/SCons/Tool/packaging/src_zip.py +2 -2
  141. data/lib/libv8/scons/engine/SCons/Tool/packaging/tarbz2.py +2 -2
  142. data/lib/libv8/scons/engine/SCons/Tool/packaging/targz.py +2 -2
  143. data/lib/libv8/scons/engine/SCons/Tool/packaging/zip.py +2 -2
  144. data/lib/libv8/scons/engine/SCons/Tool/pdf.py +2 -2
  145. data/lib/libv8/scons/engine/SCons/Tool/pdflatex.py +2 -3
  146. data/lib/libv8/scons/engine/SCons/Tool/pdftex.py +2 -3
  147. data/lib/libv8/scons/engine/SCons/Tool/qt.py +2 -2
  148. data/lib/libv8/scons/engine/SCons/Tool/rmic.py +3 -9
  149. data/lib/libv8/scons/engine/SCons/Tool/rpcgen.py +2 -2
  150. data/lib/libv8/scons/engine/SCons/Tool/rpm.py +2 -2
  151. data/lib/libv8/scons/engine/SCons/Tool/sgiar.py +2 -2
  152. data/lib/libv8/scons/engine/SCons/Tool/sgic++.py +2 -2
  153. data/lib/libv8/scons/engine/SCons/Tool/sgicc.py +2 -2
  154. data/lib/libv8/scons/engine/SCons/Tool/sgilink.py +3 -2
  155. data/lib/libv8/scons/engine/SCons/Tool/sunar.py +2 -2
  156. data/lib/libv8/scons/engine/SCons/Tool/sunc++.py +2 -2
  157. data/lib/libv8/scons/engine/SCons/Tool/suncc.py +2 -2
  158. data/lib/libv8/scons/engine/SCons/Tool/sunf77.py +2 -2
  159. data/lib/libv8/scons/engine/SCons/Tool/sunf90.py +2 -2
  160. data/lib/libv8/scons/engine/SCons/Tool/sunf95.py +2 -2
  161. data/lib/libv8/scons/engine/SCons/Tool/sunlink.py +3 -2
  162. data/lib/libv8/scons/engine/SCons/Tool/swig.py +5 -6
  163. data/lib/libv8/scons/engine/SCons/Tool/tar.py +2 -2
  164. data/lib/libv8/scons/engine/SCons/Tool/tex.py +43 -96
  165. data/lib/libv8/scons/engine/SCons/Tool/textfile.py +2 -2
  166. data/lib/libv8/scons/engine/SCons/Tool/tlib.py +2 -2
  167. data/lib/libv8/scons/engine/SCons/Tool/wix.py +2 -2
  168. data/lib/libv8/scons/engine/SCons/Tool/yacc.py +2 -12
  169. data/lib/libv8/scons/engine/SCons/Tool/zip.py +2 -2
  170. data/lib/libv8/scons/engine/SCons/Util.py +3 -3
  171. data/lib/libv8/scons/engine/SCons/Variables/BoolVariable.py +2 -2
  172. data/lib/libv8/scons/engine/SCons/Variables/EnumVariable.py +3 -3
  173. data/lib/libv8/scons/engine/SCons/Variables/ListVariable.py +2 -2
  174. data/lib/libv8/scons/engine/SCons/Variables/PackageVariable.py +2 -2
  175. data/lib/libv8/scons/engine/SCons/Variables/PathVariable.py +2 -2
  176. data/lib/libv8/scons/engine/SCons/Variables/__init__.py +2 -2
  177. data/lib/libv8/scons/engine/SCons/Warnings.py +2 -2
  178. data/lib/libv8/scons/engine/SCons/__init__.py +6 -6
  179. data/lib/libv8/scons/engine/SCons/compat/__init__.py +2 -2
  180. data/lib/libv8/scons/engine/SCons/compat/_scons_builtins.py +2 -2
  181. data/lib/libv8/scons/engine/SCons/compat/_scons_collections.py +2 -2
  182. data/lib/libv8/scons/engine/SCons/compat/_scons_dbm.py +2 -2
  183. data/lib/libv8/scons/engine/SCons/compat/_scons_hashlib.py +2 -2
  184. data/lib/libv8/scons/engine/SCons/compat/_scons_io.py +2 -2
  185. data/lib/libv8/scons/engine/SCons/cpp.py +2 -2
  186. data/lib/libv8/scons/engine/SCons/dblite.py +1 -4
  187. data/lib/libv8/scons/engine/SCons/exitfuncs.py +2 -2
  188. data/lib/libv8/scons/scons-time.1 +3 -3
  189. data/lib/libv8/scons/scons.1 +1164 -1170
  190. data/lib/libv8/scons/sconsign.1 +3 -3
  191. data/lib/libv8/scons/script/scons +22 -22
  192. data/lib/libv8/scons/script/scons-time +2 -2
  193. data/lib/libv8/scons/script/scons.bat +4 -7
  194. data/lib/libv8/scons/script/sconsign +20 -21
  195. data/lib/libv8/scons/setup.cfg +1 -0
  196. data/lib/libv8/scons/setup.py +40 -38
  197. data/lib/libv8/v8/.gitignore +1 -1
  198. data/lib/libv8/v8/AUTHORS +2 -0
  199. data/lib/libv8/v8/ChangeLog +387 -0
  200. data/lib/libv8/v8/Makefile +171 -0
  201. data/lib/libv8/v8/SConstruct +124 -51
  202. data/lib/libv8/v8/build/README.txt +31 -14
  203. data/lib/libv8/v8/build/all.gyp +11 -4
  204. data/lib/libv8/v8/build/armu.gypi +6 -2
  205. data/lib/libv8/v8/build/common.gypi +240 -94
  206. data/lib/libv8/v8/build/gyp_v8 +32 -4
  207. data/lib/libv8/v8/build/standalone.gypi +200 -0
  208. data/lib/libv8/v8/include/v8-debug.h +0 -0
  209. data/lib/libv8/v8/include/v8-profiler.h +8 -11
  210. data/lib/libv8/v8/include/v8.h +191 -108
  211. data/lib/libv8/v8/preparser/SConscript +2 -2
  212. data/lib/libv8/v8/preparser/preparser-process.cc +3 -3
  213. data/lib/libv8/v8/preparser/preparser.gyp +42 -0
  214. data/lib/libv8/v8/src/SConscript +33 -8
  215. data/lib/libv8/v8/src/accessors.cc +77 -43
  216. data/lib/libv8/v8/src/api.cc +393 -191
  217. data/lib/libv8/v8/src/api.h +4 -8
  218. data/lib/libv8/v8/src/apinatives.js +15 -3
  219. data/lib/libv8/v8/src/arguments.h +8 -0
  220. data/lib/libv8/v8/src/arm/assembler-arm.cc +120 -120
  221. data/lib/libv8/v8/src/arm/assembler-arm.h +92 -43
  222. data/lib/libv8/v8/src/arm/builtins-arm.cc +32 -39
  223. data/lib/libv8/v8/src/arm/code-stubs-arm.cc +572 -351
  224. data/lib/libv8/v8/src/arm/code-stubs-arm.h +8 -77
  225. data/lib/libv8/v8/src/arm/codegen-arm.h +0 -2
  226. data/lib/libv8/v8/src/arm/deoptimizer-arm.cc +50 -30
  227. data/lib/libv8/v8/src/arm/disasm-arm.cc +1 -1
  228. data/lib/libv8/v8/src/arm/frames-arm.h +9 -5
  229. data/lib/libv8/v8/src/arm/full-codegen-arm.cc +331 -432
  230. data/lib/libv8/v8/src/arm/ic-arm.cc +192 -124
  231. data/lib/libv8/v8/src/arm/lithium-arm.cc +216 -232
  232. data/lib/libv8/v8/src/arm/lithium-arm.h +106 -259
  233. data/lib/libv8/v8/src/arm/lithium-codegen-arm.cc +633 -642
  234. data/lib/libv8/v8/src/arm/lithium-codegen-arm.h +4 -4
  235. data/lib/libv8/v8/src/arm/lithium-gap-resolver-arm.cc +1 -3
  236. data/lib/libv8/v8/src/arm/macro-assembler-arm.cc +260 -185
  237. data/lib/libv8/v8/src/arm/macro-assembler-arm.h +45 -25
  238. data/lib/libv8/v8/src/arm/regexp-macro-assembler-arm.cc +25 -13
  239. data/lib/libv8/v8/src/arm/regexp-macro-assembler-arm.h +3 -0
  240. data/lib/libv8/v8/src/arm/stub-cache-arm.cc +413 -226
  241. data/lib/libv8/v8/src/array.js +38 -18
  242. data/lib/libv8/v8/src/assembler.cc +12 -5
  243. data/lib/libv8/v8/src/assembler.h +15 -9
  244. data/lib/libv8/v8/src/ast-inl.h +34 -25
  245. data/lib/libv8/v8/src/ast.cc +141 -72
  246. data/lib/libv8/v8/src/ast.h +255 -181
  247. data/lib/libv8/v8/src/bignum.cc +3 -4
  248. data/lib/libv8/v8/src/bootstrapper.cc +55 -11
  249. data/lib/libv8/v8/src/bootstrapper.h +3 -2
  250. data/lib/libv8/v8/src/builtins.cc +8 -2
  251. data/lib/libv8/v8/src/builtins.h +4 -0
  252. data/lib/libv8/v8/src/cached-powers.cc +8 -4
  253. data/lib/libv8/v8/src/checks.h +3 -3
  254. data/lib/libv8/v8/src/code-stubs.cc +173 -28
  255. data/lib/libv8/v8/src/code-stubs.h +104 -148
  256. data/lib/libv8/v8/src/codegen.cc +8 -8
  257. data/lib/libv8/v8/src/compilation-cache.cc +2 -47
  258. data/lib/libv8/v8/src/compilation-cache.h +0 -10
  259. data/lib/libv8/v8/src/compiler.cc +27 -16
  260. data/lib/libv8/v8/src/compiler.h +13 -18
  261. data/lib/libv8/v8/src/contexts.cc +107 -72
  262. data/lib/libv8/v8/src/contexts.h +70 -34
  263. data/lib/libv8/v8/src/conversions-inl.h +572 -14
  264. data/lib/libv8/v8/src/conversions.cc +9 -707
  265. data/lib/libv8/v8/src/conversions.h +23 -12
  266. data/lib/libv8/v8/src/cpu-profiler-inl.h +2 -19
  267. data/lib/libv8/v8/src/cpu-profiler.cc +4 -21
  268. data/lib/libv8/v8/src/cpu-profiler.h +8 -17
  269. data/lib/libv8/v8/src/d8-debug.cc +5 -3
  270. data/lib/libv8/v8/src/d8-debug.h +6 -7
  271. data/lib/libv8/v8/src/d8-posix.cc +1 -10
  272. data/lib/libv8/v8/src/d8.cc +721 -219
  273. data/lib/libv8/v8/src/d8.gyp +37 -12
  274. data/lib/libv8/v8/src/d8.h +141 -19
  275. data/lib/libv8/v8/src/d8.js +17 -8
  276. data/lib/libv8/v8/src/date.js +16 -5
  277. data/lib/libv8/v8/src/dateparser-inl.h +242 -39
  278. data/lib/libv8/v8/src/dateparser.cc +38 -4
  279. data/lib/libv8/v8/src/dateparser.h +170 -28
  280. data/lib/libv8/v8/src/debug-agent.cc +5 -3
  281. data/lib/libv8/v8/src/debug-agent.h +11 -7
  282. data/lib/libv8/v8/src/debug-debugger.js +65 -34
  283. data/lib/libv8/v8/src/debug.cc +30 -60
  284. data/lib/libv8/v8/src/debug.h +5 -3
  285. data/lib/libv8/v8/src/deoptimizer.cc +227 -10
  286. data/lib/libv8/v8/src/deoptimizer.h +133 -9
  287. data/lib/libv8/v8/src/disassembler.cc +22 -14
  288. data/lib/libv8/v8/src/diy-fp.cc +4 -3
  289. data/lib/libv8/v8/src/diy-fp.h +3 -3
  290. data/lib/libv8/v8/src/elements.cc +634 -0
  291. data/lib/libv8/v8/src/elements.h +95 -0
  292. data/lib/libv8/v8/src/execution.cc +5 -21
  293. data/lib/libv8/v8/src/extensions/experimental/break-iterator.cc +3 -1
  294. data/lib/libv8/v8/src/extensions/experimental/break-iterator.h +1 -1
  295. data/lib/libv8/v8/src/extensions/experimental/collator.cc +6 -2
  296. data/lib/libv8/v8/src/extensions/experimental/collator.h +1 -2
  297. data/lib/libv8/v8/src/extensions/experimental/datetime-format.cc +384 -0
  298. data/lib/libv8/v8/src/extensions/experimental/datetime-format.h +83 -0
  299. data/lib/libv8/v8/src/extensions/experimental/experimental.gyp +18 -7
  300. data/lib/libv8/v8/src/extensions/experimental/i18n-extension.cc +12 -16
  301. data/lib/libv8/v8/src/extensions/experimental/i18n-extension.h +1 -1
  302. data/lib/libv8/v8/src/extensions/experimental/i18n-js2c.py +126 -0
  303. data/lib/libv8/v8/src/extensions/experimental/i18n-locale.cc +3 -4
  304. data/lib/libv8/v8/src/extensions/experimental/i18n-locale.h +1 -1
  305. data/lib/libv8/v8/src/{shell.h → extensions/experimental/i18n-natives.h} +8 -20
  306. data/lib/libv8/v8/src/extensions/experimental/i18n-utils.cc +45 -1
  307. data/lib/libv8/v8/src/extensions/experimental/i18n-utils.h +21 -1
  308. data/lib/libv8/v8/src/extensions/experimental/i18n.js +211 -11
  309. data/lib/libv8/v8/src/extensions/experimental/language-matcher.cc +4 -3
  310. data/lib/libv8/v8/src/extensions/experimental/language-matcher.h +1 -1
  311. data/lib/libv8/v8/src/extensions/experimental/number-format.cc +374 -0
  312. data/lib/libv8/v8/src/extensions/experimental/number-format.h +71 -0
  313. data/lib/libv8/v8/src/factory.cc +89 -18
  314. data/lib/libv8/v8/src/factory.h +36 -8
  315. data/lib/libv8/v8/src/flag-definitions.h +11 -44
  316. data/lib/libv8/v8/src/frames-inl.h +8 -1
  317. data/lib/libv8/v8/src/frames.cc +39 -3
  318. data/lib/libv8/v8/src/frames.h +10 -3
  319. data/lib/libv8/v8/src/full-codegen.cc +311 -293
  320. data/lib/libv8/v8/src/full-codegen.h +183 -143
  321. data/lib/libv8/v8/src/func-name-inferrer.cc +29 -15
  322. data/lib/libv8/v8/src/func-name-inferrer.h +19 -9
  323. data/lib/libv8/v8/src/gdb-jit.cc +658 -55
  324. data/lib/libv8/v8/src/gdb-jit.h +6 -2
  325. data/lib/libv8/v8/src/global-handles.cc +368 -312
  326. data/lib/libv8/v8/src/global-handles.h +29 -36
  327. data/lib/libv8/v8/src/globals.h +3 -1
  328. data/lib/libv8/v8/src/handles.cc +43 -69
  329. data/lib/libv8/v8/src/handles.h +21 -16
  330. data/lib/libv8/v8/src/heap-inl.h +11 -13
  331. data/lib/libv8/v8/src/heap-profiler.cc +0 -999
  332. data/lib/libv8/v8/src/heap-profiler.h +0 -303
  333. data/lib/libv8/v8/src/heap.cc +366 -141
  334. data/lib/libv8/v8/src/heap.h +87 -26
  335. data/lib/libv8/v8/src/hydrogen-instructions.cc +192 -81
  336. data/lib/libv8/v8/src/hydrogen-instructions.h +711 -482
  337. data/lib/libv8/v8/src/hydrogen.cc +1146 -629
  338. data/lib/libv8/v8/src/hydrogen.h +100 -64
  339. data/lib/libv8/v8/src/ia32/assembler-ia32.cc +19 -0
  340. data/lib/libv8/v8/src/ia32/assembler-ia32.h +15 -2
  341. data/lib/libv8/v8/src/ia32/builtins-ia32.cc +34 -39
  342. data/lib/libv8/v8/src/ia32/code-stubs-ia32.cc +675 -377
  343. data/lib/libv8/v8/src/ia32/code-stubs-ia32.h +8 -69
  344. data/lib/libv8/v8/src/ia32/codegen-ia32.cc +1 -0
  345. data/lib/libv8/v8/src/ia32/codegen-ia32.h +0 -2
  346. data/lib/libv8/v8/src/ia32/cpu-ia32.cc +3 -2
  347. data/lib/libv8/v8/src/ia32/deoptimizer-ia32.cc +28 -3
  348. data/lib/libv8/v8/src/ia32/disasm-ia32.cc +21 -10
  349. data/lib/libv8/v8/src/ia32/frames-ia32.h +6 -5
  350. data/lib/libv8/v8/src/ia32/full-codegen-ia32.cc +459 -465
  351. data/lib/libv8/v8/src/ia32/ic-ia32.cc +196 -147
  352. data/lib/libv8/v8/src/ia32/lithium-codegen-ia32.cc +575 -650
  353. data/lib/libv8/v8/src/ia32/lithium-codegen-ia32.h +19 -21
  354. data/lib/libv8/v8/src/ia32/lithium-gap-resolver-ia32.cc +7 -2
  355. data/lib/libv8/v8/src/ia32/lithium-ia32.cc +261 -256
  356. data/lib/libv8/v8/src/ia32/lithium-ia32.h +234 -335
  357. data/lib/libv8/v8/src/ia32/macro-assembler-ia32.cc +224 -67
  358. data/lib/libv8/v8/src/ia32/macro-assembler-ia32.h +63 -19
  359. data/lib/libv8/v8/src/ia32/regexp-macro-assembler-ia32.cc +22 -8
  360. data/lib/libv8/v8/src/ia32/regexp-macro-assembler-ia32.h +3 -0
  361. data/lib/libv8/v8/src/ia32/stub-cache-ia32.cc +380 -239
  362. data/lib/libv8/v8/src/ic.cc +198 -234
  363. data/lib/libv8/v8/src/ic.h +32 -30
  364. data/lib/libv8/v8/src/interpreter-irregexp.cc +6 -4
  365. data/lib/libv8/v8/src/isolate.cc +112 -95
  366. data/lib/libv8/v8/src/isolate.h +55 -71
  367. data/lib/libv8/v8/src/json-parser.h +486 -48
  368. data/lib/libv8/v8/src/json.js +28 -23
  369. data/lib/libv8/v8/src/jsregexp.cc +163 -208
  370. data/lib/libv8/v8/src/jsregexp.h +0 -1
  371. data/lib/libv8/v8/src/lithium-allocator-inl.h +29 -27
  372. data/lib/libv8/v8/src/lithium-allocator.cc +22 -17
  373. data/lib/libv8/v8/src/lithium-allocator.h +8 -8
  374. data/lib/libv8/v8/src/lithium.cc +16 -11
  375. data/lib/libv8/v8/src/lithium.h +31 -34
  376. data/lib/libv8/v8/src/liveedit.cc +111 -15
  377. data/lib/libv8/v8/src/liveedit.h +3 -4
  378. data/lib/libv8/v8/src/liveobjectlist.cc +116 -80
  379. data/lib/libv8/v8/src/liveobjectlist.h +2 -2
  380. data/lib/libv8/v8/src/log-inl.h +0 -4
  381. data/lib/libv8/v8/src/log-utils.cc +25 -143
  382. data/lib/libv8/v8/src/log-utils.h +13 -92
  383. data/lib/libv8/v8/src/log.cc +26 -249
  384. data/lib/libv8/v8/src/log.h +6 -17
  385. data/lib/libv8/v8/src/macros.py +9 -6
  386. data/lib/libv8/v8/src/mark-compact.cc +276 -56
  387. data/lib/libv8/v8/src/mark-compact.h +20 -0
  388. data/lib/libv8/v8/src/messages.js +93 -39
  389. data/lib/libv8/v8/src/mips/assembler-mips-inl.h +9 -3
  390. data/lib/libv8/v8/src/mips/assembler-mips.cc +297 -189
  391. data/lib/libv8/v8/src/mips/assembler-mips.h +121 -54
  392. data/lib/libv8/v8/src/mips/builtins-mips.cc +23 -24
  393. data/lib/libv8/v8/src/mips/code-stubs-mips.cc +484 -263
  394. data/lib/libv8/v8/src/mips/code-stubs-mips.h +8 -83
  395. data/lib/libv8/v8/src/mips/codegen-mips.h +0 -2
  396. data/lib/libv8/v8/src/mips/constants-mips.h +37 -11
  397. data/lib/libv8/v8/src/mips/deoptimizer-mips.cc +6 -1
  398. data/lib/libv8/v8/src/mips/frames-mips.h +8 -7
  399. data/lib/libv8/v8/src/mips/full-codegen-mips.cc +258 -419
  400. data/lib/libv8/v8/src/mips/ic-mips.cc +181 -121
  401. data/lib/libv8/v8/src/mips/macro-assembler-mips.cc +640 -382
  402. data/lib/libv8/v8/src/mips/macro-assembler-mips.h +94 -89
  403. data/lib/libv8/v8/src/mips/regexp-macro-assembler-mips.cc +23 -10
  404. data/lib/libv8/v8/src/mips/regexp-macro-assembler-mips.h +6 -1
  405. data/lib/libv8/v8/src/mips/simulator-mips.cc +249 -49
  406. data/lib/libv8/v8/src/mips/simulator-mips.h +25 -1
  407. data/lib/libv8/v8/src/mips/stub-cache-mips.cc +373 -161
  408. data/lib/libv8/v8/src/mirror-debugger.js +55 -8
  409. data/lib/libv8/v8/src/misc-intrinsics.h +89 -0
  410. data/lib/libv8/v8/src/mksnapshot.cc +36 -4
  411. data/lib/libv8/v8/src/natives.h +5 -2
  412. data/lib/libv8/v8/src/objects-debug.cc +73 -6
  413. data/lib/libv8/v8/src/objects-inl.h +529 -164
  414. data/lib/libv8/v8/src/objects-printer.cc +67 -12
  415. data/lib/libv8/v8/src/objects-visiting.cc +13 -2
  416. data/lib/libv8/v8/src/objects-visiting.h +41 -1
  417. data/lib/libv8/v8/src/objects.cc +2200 -1177
  418. data/lib/libv8/v8/src/objects.h +912 -283
  419. data/lib/libv8/v8/src/parser.cc +566 -371
  420. data/lib/libv8/v8/src/parser.h +35 -33
  421. data/lib/libv8/v8/src/platform-cygwin.cc +10 -25
  422. data/lib/libv8/v8/src/platform-freebsd.cc +4 -29
  423. data/lib/libv8/v8/src/platform-linux.cc +60 -57
  424. data/lib/libv8/v8/src/platform-macos.cc +4 -27
  425. data/lib/libv8/v8/src/platform-nullos.cc +3 -16
  426. data/lib/libv8/v8/src/platform-openbsd.cc +247 -85
  427. data/lib/libv8/v8/src/platform-posix.cc +43 -1
  428. data/lib/libv8/v8/src/platform-solaris.cc +151 -112
  429. data/lib/libv8/v8/src/platform-tls.h +1 -1
  430. data/lib/libv8/v8/src/platform-win32.cc +65 -39
  431. data/lib/libv8/v8/src/platform.h +17 -14
  432. data/lib/libv8/v8/src/preparse-data-format.h +2 -2
  433. data/lib/libv8/v8/src/preparse-data.h +8 -2
  434. data/lib/libv8/v8/src/preparser-api.cc +2 -18
  435. data/lib/libv8/v8/src/preparser.cc +106 -65
  436. data/lib/libv8/v8/src/preparser.h +26 -5
  437. data/lib/libv8/v8/src/prettyprinter.cc +25 -43
  438. data/lib/libv8/v8/src/profile-generator-inl.h +0 -4
  439. data/lib/libv8/v8/src/profile-generator.cc +213 -34
  440. data/lib/libv8/v8/src/profile-generator.h +9 -9
  441. data/lib/libv8/v8/src/property.h +1 -0
  442. data/lib/libv8/v8/src/proxy.js +74 -4
  443. data/lib/libv8/v8/src/regexp-macro-assembler.cc +10 -6
  444. data/lib/libv8/v8/src/regexp.js +16 -11
  445. data/lib/libv8/v8/src/rewriter.cc +24 -133
  446. data/lib/libv8/v8/src/runtime-profiler.cc +27 -151
  447. data/lib/libv8/v8/src/runtime-profiler.h +5 -31
  448. data/lib/libv8/v8/src/runtime.cc +1450 -681
  449. data/lib/libv8/v8/src/runtime.h +47 -31
  450. data/lib/libv8/v8/src/runtime.js +2 -1
  451. data/lib/libv8/v8/src/scanner-base.cc +358 -220
  452. data/lib/libv8/v8/src/scanner-base.h +30 -138
  453. data/lib/libv8/v8/src/scanner.cc +0 -18
  454. data/lib/libv8/v8/src/scanner.h +0 -15
  455. data/lib/libv8/v8/src/scopeinfo.cc +3 -1
  456. data/lib/libv8/v8/src/scopeinfo.h +1 -6
  457. data/lib/libv8/v8/src/scopes.cc +243 -253
  458. data/lib/libv8/v8/src/scopes.h +58 -109
  459. data/lib/libv8/v8/src/serialize.cc +12 -54
  460. data/lib/libv8/v8/src/serialize.h +47 -0
  461. data/lib/libv8/v8/src/small-pointer-list.h +25 -0
  462. data/lib/libv8/v8/src/spaces-inl.h +4 -50
  463. data/lib/libv8/v8/src/spaces.cc +64 -131
  464. data/lib/libv8/v8/src/spaces.h +19 -70
  465. data/lib/libv8/v8/src/string-stream.cc +3 -1
  466. data/lib/libv8/v8/src/string.js +10 -6
  467. data/lib/libv8/v8/src/strtod.cc +7 -3
  468. data/lib/libv8/v8/src/stub-cache.cc +59 -129
  469. data/lib/libv8/v8/src/stub-cache.h +42 -54
  470. data/lib/libv8/v8/src/third_party/valgrind/valgrind.h +1447 -1339
  471. data/lib/libv8/v8/src/token.cc +4 -4
  472. data/lib/libv8/v8/src/token.h +6 -5
  473. data/lib/libv8/v8/src/type-info.cc +173 -129
  474. data/lib/libv8/v8/src/type-info.h +40 -22
  475. data/lib/libv8/v8/src/utils.cc +25 -304
  476. data/lib/libv8/v8/src/utils.h +118 -3
  477. data/lib/libv8/v8/src/v8-counters.h +3 -6
  478. data/lib/libv8/v8/src/v8.cc +34 -27
  479. data/lib/libv8/v8/src/v8.h +7 -7
  480. data/lib/libv8/v8/src/v8conversions.cc +129 -0
  481. data/lib/libv8/v8/src/v8conversions.h +60 -0
  482. data/lib/libv8/v8/src/v8globals.h +15 -6
  483. data/lib/libv8/v8/src/v8natives.js +300 -78
  484. data/lib/libv8/v8/src/v8threads.cc +14 -6
  485. data/lib/libv8/v8/src/v8threads.h +4 -1
  486. data/lib/libv8/v8/src/v8utils.cc +360 -0
  487. data/lib/libv8/v8/src/v8utils.h +17 -66
  488. data/lib/libv8/v8/src/variables.cc +7 -12
  489. data/lib/libv8/v8/src/variables.h +12 -10
  490. data/lib/libv8/v8/src/version.cc +2 -2
  491. data/lib/libv8/v8/src/vm-state-inl.h +0 -41
  492. data/lib/libv8/v8/src/vm-state.h +0 -11
  493. data/lib/libv8/v8/src/weakmap.js +103 -0
  494. data/lib/libv8/v8/src/x64/assembler-x64.h +6 -3
  495. data/lib/libv8/v8/src/x64/builtins-x64.cc +25 -22
  496. data/lib/libv8/v8/src/x64/code-stubs-x64.cc +523 -250
  497. data/lib/libv8/v8/src/x64/code-stubs-x64.h +8 -71
  498. data/lib/libv8/v8/src/x64/codegen-x64.cc +1 -0
  499. data/lib/libv8/v8/src/x64/codegen-x64.h +0 -2
  500. data/lib/libv8/v8/src/x64/cpu-x64.cc +2 -1
  501. data/lib/libv8/v8/src/x64/deoptimizer-x64.cc +40 -8
  502. data/lib/libv8/v8/src/x64/disasm-x64.cc +12 -10
  503. data/lib/libv8/v8/src/x64/frames-x64.h +7 -6
  504. data/lib/libv8/v8/src/x64/full-codegen-x64.cc +310 -415
  505. data/lib/libv8/v8/src/x64/ic-x64.cc +180 -117
  506. data/lib/libv8/v8/src/x64/lithium-codegen-x64.cc +411 -523
  507. data/lib/libv8/v8/src/x64/lithium-codegen-x64.h +11 -6
  508. data/lib/libv8/v8/src/x64/lithium-x64.cc +191 -216
  509. data/lib/libv8/v8/src/x64/lithium-x64.h +112 -263
  510. data/lib/libv8/v8/src/x64/macro-assembler-x64.cc +177 -61
  511. data/lib/libv8/v8/src/x64/macro-assembler-x64.h +23 -7
  512. data/lib/libv8/v8/src/x64/regexp-macro-assembler-x64.cc +21 -9
  513. data/lib/libv8/v8/src/x64/regexp-macro-assembler-x64.h +6 -0
  514. data/lib/libv8/v8/src/x64/stub-cache-x64.cc +273 -107
  515. data/lib/libv8/v8/src/zone.cc +31 -22
  516. data/lib/libv8/v8/src/zone.h +12 -6
  517. data/lib/libv8/v8/tools/codemap.js +8 -0
  518. data/lib/libv8/v8/tools/gcmole/Makefile +43 -0
  519. data/lib/libv8/v8/tools/gcmole/gcmole.lua +0 -2
  520. data/lib/libv8/v8/tools/gdb-v8-support.py +154 -0
  521. data/lib/libv8/v8/tools/grokdump.py +44 -35
  522. data/lib/libv8/v8/tools/gyp/v8.gyp +94 -248
  523. data/lib/libv8/v8/tools/js2c.py +83 -52
  524. data/lib/libv8/v8/tools/linux-tick-processor +4 -6
  525. data/lib/libv8/v8/tools/ll_prof.py +3 -3
  526. data/lib/libv8/v8/tools/oom_dump/README +3 -1
  527. data/lib/libv8/v8/tools/presubmit.py +11 -4
  528. data/lib/libv8/v8/tools/profile.js +46 -2
  529. data/lib/libv8/v8/tools/splaytree.js +11 -0
  530. data/lib/libv8/v8/tools/stats-viewer.py +15 -11
  531. data/lib/libv8/v8/tools/test-wrapper-gypbuild.py +227 -0
  532. data/lib/libv8/v8/tools/test.py +28 -8
  533. data/lib/libv8/v8/tools/tickprocessor.js +0 -16
  534. data/lib/libv8/version.rb +1 -1
  535. data/libv8.gemspec +2 -2
  536. metadata +31 -19
  537. data/lib/libv8/scons/engine/SCons/Tool/f03.py +0 -63
  538. data/lib/libv8/v8/src/json-parser.cc +0 -504
@@ -90,19 +90,21 @@ class MacroAssembler: public Assembler {
90
90
 
91
91
  // Jump, Call, and Ret pseudo instructions implementing inter-working.
92
92
  void Jump(Register target, Condition cond = al);
93
- void Jump(byte* target, RelocInfo::Mode rmode, Condition cond = al);
93
+ void Jump(Address target, RelocInfo::Mode rmode, Condition cond = al);
94
94
  void Jump(Handle<Code> code, RelocInfo::Mode rmode, Condition cond = al);
95
- int CallSize(Register target, Condition cond = al);
95
+ static int CallSize(Register target, Condition cond = al);
96
96
  void Call(Register target, Condition cond = al);
97
- int CallSize(byte* target, RelocInfo::Mode rmode, Condition cond = al);
98
- void Call(byte* target, RelocInfo::Mode rmode, Condition cond = al);
99
- int CallSize(Handle<Code> code, RelocInfo::Mode rmode, Condition cond = al);
97
+ static int CallSize(Address target,
98
+ RelocInfo::Mode rmode,
99
+ Condition cond = al);
100
+ void Call(Address target, RelocInfo::Mode rmode, Condition cond = al);
101
+ static int CallSize(Handle<Code> code,
102
+ RelocInfo::Mode rmode = RelocInfo::CODE_TARGET,
103
+ unsigned ast_id = kNoASTId,
104
+ Condition cond = al);
100
105
  void Call(Handle<Code> code,
101
- RelocInfo::Mode rmode,
102
- Condition cond = al);
103
- void CallWithAstId(Handle<Code> code,
104
- RelocInfo::Mode rmode,
105
- unsigned ast_id,
106
+ RelocInfo::Mode rmode = RelocInfo::CODE_TARGET,
107
+ unsigned ast_id = kNoASTId,
106
108
  Condition cond = al);
107
109
  void Ret(Condition cond = al);
108
110
 
@@ -143,11 +145,9 @@ class MacroAssembler: public Assembler {
143
145
 
144
146
  // Register move. May do nothing if the registers are identical.
145
147
  void Move(Register dst, Handle<Object> value);
146
- void Move(Register dst, Register src);
148
+ void Move(Register dst, Register src, Condition cond = al);
147
149
  void Move(DoubleRegister dst, DoubleRegister src);
148
150
 
149
- // Jumps to the label at the index given by the Smi in "index".
150
- void SmiJumpTable(Register index, Vector<Label*> targets);
151
151
  // Load an object from the root table.
152
152
  void LoadRoot(Register destination,
153
153
  Heap::RootListIndex index,
@@ -192,6 +192,9 @@ class MacroAssembler: public Assembler {
192
192
  Register address,
193
193
  Register scratch);
194
194
 
195
+ // Push a handle.
196
+ void Push(Handle<Object> handle);
197
+
195
198
  // Push two registers. Pushes leftmost register first (to highest address).
196
199
  void Push(Register src1, Register src2, Condition cond = al) {
197
200
  ASSERT(!src1.is(src2));
@@ -311,6 +314,10 @@ class MacroAssembler: public Assembler {
311
314
  const Register fpscr_flags,
312
315
  const Condition cond = al);
313
316
 
317
+ void Vmov(const DwVfpRegister dst,
318
+ const double imm,
319
+ const Condition cond = al);
320
+
314
321
 
315
322
  // ---------------------------------------------------------------------------
316
323
  // Activation frames
@@ -356,27 +363,28 @@ class MacroAssembler: public Assembler {
356
363
  const ParameterCount& expected,
357
364
  const ParameterCount& actual,
358
365
  InvokeFlag flag,
359
- const CallWrapper& call_wrapper = NullCallWrapper(),
360
- CallKind call_kind = CALL_AS_METHOD);
366
+ const CallWrapper& call_wrapper,
367
+ CallKind call_kind);
361
368
 
362
369
  void InvokeCode(Handle<Code> code,
363
370
  const ParameterCount& expected,
364
371
  const ParameterCount& actual,
365
372
  RelocInfo::Mode rmode,
366
373
  InvokeFlag flag,
367
- CallKind call_kind = CALL_AS_METHOD);
374
+ CallKind call_kind);
368
375
 
369
376
  // Invoke the JavaScript function in the given register. Changes the
370
377
  // current context to the context in the function before invoking.
371
378
  void InvokeFunction(Register function,
372
379
  const ParameterCount& actual,
373
380
  InvokeFlag flag,
374
- const CallWrapper& call_wrapper = NullCallWrapper(),
375
- CallKind call_kind = CALL_AS_METHOD);
381
+ const CallWrapper& call_wrapper,
382
+ CallKind call_kind);
376
383
 
377
384
  void InvokeFunction(JSFunction* function,
378
385
  const ParameterCount& actual,
379
- InvokeFlag flag);
386
+ InvokeFlag flag,
387
+ CallKind call_kind);
380
388
 
381
389
  void IsObjectJSObjectType(Register heap_object,
382
390
  Register map,
@@ -427,6 +435,16 @@ class MacroAssembler: public Assembler {
427
435
  Register scratch,
428
436
  Label* miss);
429
437
 
438
+
439
+ void LoadFromNumberDictionary(Label* miss,
440
+ Register elements,
441
+ Register key,
442
+ Register result,
443
+ Register t0,
444
+ Register t1,
445
+ Register t2);
446
+
447
+
430
448
  inline void MarkCode(NopMarkerTypes type) {
431
449
  nop(type);
432
450
  }
@@ -576,6 +594,12 @@ class MacroAssembler: public Assembler {
576
594
  InstanceType type);
577
595
 
578
596
 
597
+ // Check if a map for a JSObject indicates that the object has fast elements.
598
+ // Jump to the specified label if it does not.
599
+ void CheckFastElements(Register map,
600
+ Register scratch,
601
+ Label* fail);
602
+
579
603
  // Check if the map of an object is equal to a specified map (either
580
604
  // given directly or as an index into the root list) and branch to
581
605
  // label if not. Skip the smi check if not required (object is known
@@ -1024,10 +1048,6 @@ class MacroAssembler: public Assembler {
1024
1048
  int num_double_arguments);
1025
1049
 
1026
1050
  void Jump(intptr_t target, RelocInfo::Mode rmode, Condition cond = al);
1027
- int CallSize(intptr_t target, RelocInfo::Mode rmode, Condition cond = al);
1028
- void Call(intptr_t target,
1029
- RelocInfo::Mode rmode,
1030
- Condition cond = al);
1031
1051
 
1032
1052
  // Helper functions for generating invokes.
1033
1053
  void InvokePrologue(const ParameterCount& expected,
@@ -1036,8 +1056,8 @@ class MacroAssembler: public Assembler {
1036
1056
  Register code_reg,
1037
1057
  Label* done,
1038
1058
  InvokeFlag flag,
1039
- const CallWrapper& call_wrapper = NullCallWrapper(),
1040
- CallKind call_kind = CALL_AS_METHOD);
1059
+ const CallWrapper& call_wrapper,
1060
+ CallKind call_kind);
1041
1061
 
1042
1062
  // Activation support.
1043
1063
  void EnterFrame(StackFrame::Type type);
@@ -899,13 +899,12 @@ void RegExpMacroAssemblerARM::PushBacktrack(Label* label) {
899
899
  constant_offset - offset_of_pc_register_read;
900
900
  ASSERT(pc_offset_of_constant < 0);
901
901
  if (is_valid_memory_offset(pc_offset_of_constant)) {
902
- masm_->BlockConstPoolBefore(masm_->pc_offset() + Assembler::kInstrSize);
902
+ Assembler::BlockConstPoolScope block_const_pool(masm_);
903
903
  __ ldr(r0, MemOperand(pc, pc_offset_of_constant));
904
904
  } else {
905
905
  // Not a 12-bit offset, so it needs to be loaded from the constant
906
906
  // pool.
907
- masm_->BlockConstPoolBefore(
908
- masm_->pc_offset() + 2 * Assembler::kInstrSize);
907
+ Assembler::BlockConstPoolScope block_const_pool(masm_);
909
908
  __ mov(r0, Operand(pc_offset_of_constant + Assembler::kInstrSize));
910
909
  __ ldr(r0, MemOperand(pc, r0));
911
910
  }
@@ -1035,12 +1034,13 @@ int RegExpMacroAssemblerARM::CheckStackGuardState(Address* return_address,
1035
1034
  }
1036
1035
 
1037
1036
  // Prepare for possible GC.
1038
- HandleScope handles;
1037
+ HandleScope handles(isolate);
1039
1038
  Handle<Code> code_handle(re_code);
1040
1039
 
1041
1040
  Handle<String> subject(frame_entry<String*>(re_frame, kInputString));
1041
+
1042
1042
  // Current string.
1043
- bool is_ascii = subject->IsAsciiRepresentation();
1043
+ bool is_ascii = subject->IsAsciiRepresentationUnderneath();
1044
1044
 
1045
1045
  ASSERT(re_code->instruction_start() <= *return_address);
1046
1046
  ASSERT(*return_address <=
@@ -1049,7 +1049,7 @@ int RegExpMacroAssemblerARM::CheckStackGuardState(Address* return_address,
1049
1049
  MaybeObject* result = Execution::HandleStackGuardInterrupt();
1050
1050
 
1051
1051
  if (*code_handle != re_code) { // Return address no longer valid
1052
- int delta = *code_handle - re_code;
1052
+ int delta = code_handle->address() - re_code->address();
1053
1053
  // Overwrite the return address on the stack.
1054
1054
  *return_address += delta;
1055
1055
  }
@@ -1058,8 +1058,20 @@ int RegExpMacroAssemblerARM::CheckStackGuardState(Address* return_address,
1058
1058
  return EXCEPTION;
1059
1059
  }
1060
1060
 
1061
+ Handle<String> subject_tmp = subject;
1062
+ int slice_offset = 0;
1063
+
1064
+ // Extract the underlying string and the slice offset.
1065
+ if (StringShape(*subject_tmp).IsCons()) {
1066
+ subject_tmp = Handle<String>(ConsString::cast(*subject_tmp)->first());
1067
+ } else if (StringShape(*subject_tmp).IsSliced()) {
1068
+ SlicedString* slice = SlicedString::cast(*subject_tmp);
1069
+ subject_tmp = Handle<String>(slice->parent());
1070
+ slice_offset = slice->offset();
1071
+ }
1072
+
1061
1073
  // String might have changed.
1062
- if (subject->IsAsciiRepresentation() != is_ascii) {
1074
+ if (subject_tmp->IsAsciiRepresentation() != is_ascii) {
1063
1075
  // If we changed between an ASCII and an UC16 string, the specialized
1064
1076
  // code cannot be used, and we need to restart regexp matching from
1065
1077
  // scratch (including, potentially, compiling a new version of the code).
@@ -1070,8 +1082,8 @@ int RegExpMacroAssemblerARM::CheckStackGuardState(Address* return_address,
1070
1082
  // be a sequential or external string with the same content.
1071
1083
  // Update the start and end pointers in the stack frame to the current
1072
1084
  // location (whether it has actually moved or not).
1073
- ASSERT(StringShape(*subject).IsSequential() ||
1074
- StringShape(*subject).IsExternal());
1085
+ ASSERT(StringShape(*subject_tmp).IsSequential() ||
1086
+ StringShape(*subject_tmp).IsExternal());
1075
1087
 
1076
1088
  // The original start address of the characters to match.
1077
1089
  const byte* start_address = frame_entry<const byte*>(re_frame, kInputStart);
@@ -1079,13 +1091,14 @@ int RegExpMacroAssemblerARM::CheckStackGuardState(Address* return_address,
1079
1091
  // Find the current start address of the same character at the current string
1080
1092
  // position.
1081
1093
  int start_index = frame_entry<int>(re_frame, kStartIndex);
1082
- const byte* new_address = StringCharacterPosition(*subject, start_index);
1094
+ const byte* new_address = StringCharacterPosition(*subject_tmp,
1095
+ start_index + slice_offset);
1083
1096
 
1084
1097
  if (start_address != new_address) {
1085
1098
  // If there is a difference, update the object pointer and start and end
1086
1099
  // addresses in the RegExp stack frame to match the new value.
1087
1100
  const byte* end_address = frame_entry<const byte* >(re_frame, kInputEnd);
1088
- int byte_length = end_address - start_address;
1101
+ int byte_length = static_cast<int>(end_address - start_address);
1089
1102
  frame_entry<const String*>(re_frame, kInputString) = *subject;
1090
1103
  frame_entry<const byte*>(re_frame, kInputStart) = new_address;
1091
1104
  frame_entry<const byte*>(re_frame, kInputEnd) = new_address + byte_length;
@@ -1185,8 +1198,7 @@ void RegExpMacroAssemblerARM::CheckStackLimit() {
1185
1198
 
1186
1199
  void RegExpMacroAssemblerARM::EmitBacktrackConstantPool() {
1187
1200
  __ CheckConstPool(false, false);
1188
- __ BlockConstPoolBefore(
1189
- masm_->pc_offset() + kBacktrackConstantPoolSize * Assembler::kInstrSize);
1201
+ Assembler::BlockConstPoolScope block_const_pool(masm_);
1190
1202
  backtrack_constant_pool_offset_ = masm_->pc_offset();
1191
1203
  for (int i = 0; i < kBacktrackConstantPoolSize; i++) {
1192
1204
  __ emit(0);
@@ -28,6 +28,9 @@
28
28
  #ifndef V8_ARM_REGEXP_MACRO_ASSEMBLER_ARM_H_
29
29
  #define V8_ARM_REGEXP_MACRO_ASSEMBLER_ARM_H_
30
30
 
31
+ #include "arm/assembler-arm.h"
32
+ #include "arm/assembler-arm-inl.h"
33
+
31
34
  namespace v8 {
32
35
  namespace internal {
33
36
 
@@ -121,7 +121,7 @@ MUST_USE_RESULT static MaybeObject* GenerateDictionaryNegativeLookup(
121
121
 
122
122
  // Check that receiver is a JSObject.
123
123
  __ ldrb(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset));
124
- __ cmp(scratch0, Operand(FIRST_JS_OBJECT_TYPE));
124
+ __ cmp(scratch0, Operand(FIRST_SPEC_OBJECT_TYPE));
125
125
  __ b(lt, miss_label);
126
126
 
127
127
  // Load properties array.
@@ -189,8 +189,7 @@ void StubCache::GenerateProbe(MacroAssembler* masm,
189
189
  ASSERT(!extra2.is(no_reg));
190
190
 
191
191
  // Check that the receiver isn't a smi.
192
- __ tst(receiver, Operand(kSmiTagMask));
193
- __ b(eq, &miss);
192
+ __ JumpIfSmi(receiver, &miss);
194
193
 
195
194
  // Get the map of the receiver and compute the hash.
196
195
  __ ldr(scratch, FieldMemOperand(name, String::kHashFieldOffset));
@@ -282,8 +281,7 @@ void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
282
281
  Register scratch,
283
282
  Label* miss_label) {
284
283
  // Check that the receiver isn't a smi.
285
- __ tst(receiver, Operand(kSmiTagMask));
286
- __ b(eq, miss_label);
284
+ __ JumpIfSmi(receiver, miss_label);
287
285
 
288
286
  // Check that the object is a JS array.
289
287
  __ CompareObjectType(receiver, scratch, scratch, JS_ARRAY_TYPE);
@@ -305,8 +303,7 @@ static void GenerateStringCheck(MacroAssembler* masm,
305
303
  Label* smi,
306
304
  Label* non_string_object) {
307
305
  // Check that the receiver isn't a smi.
308
- __ tst(receiver, Operand(kSmiTagMask));
309
- __ b(eq, smi);
306
+ __ JumpIfSmi(receiver, smi);
310
307
 
311
308
  // Check that the object is a string.
312
309
  __ ldr(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset));
@@ -381,8 +378,7 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
381
378
  Label exit;
382
379
 
383
380
  // Check that the receiver isn't a smi.
384
- __ tst(receiver_reg, Operand(kSmiTagMask));
385
- __ b(eq, miss_label);
381
+ __ JumpIfSmi(receiver_reg, miss_label);
386
382
 
387
383
  // Check that the map of the receiver hasn't changed.
388
384
  __ ldr(scratch, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
@@ -431,8 +427,7 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
431
427
  __ str(r0, FieldMemOperand(receiver_reg, offset));
432
428
 
433
429
  // Skip updating write barrier if storing a smi.
434
- __ tst(r0, Operand(kSmiTagMask));
435
- __ b(eq, &exit);
430
+ __ JumpIfSmi(r0, &exit);
436
431
 
437
432
  // Update the write barrier for the array address.
438
433
  // Pass the now unused name_reg as a scratch register.
@@ -445,8 +440,7 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
445
440
  __ str(r0, FieldMemOperand(scratch, offset));
446
441
 
447
442
  // Skip updating write barrier if storing a smi.
448
- __ tst(r0, Operand(kSmiTagMask));
449
- __ b(eq, &exit);
443
+ __ JumpIfSmi(r0, &exit);
450
444
 
451
445
  // Update the write barrier for the array address.
452
446
  // Ok to clobber receiver_reg and name_reg, since we return.
@@ -476,7 +470,8 @@ void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) {
476
470
  static void GenerateCallFunction(MacroAssembler* masm,
477
471
  Object* object,
478
472
  const ParameterCount& arguments,
479
- Label* miss) {
473
+ Label* miss,
474
+ Code::ExtraICState extra_ic_state) {
480
475
  // ----------- S t a t e -------------
481
476
  // -- r0: receiver
482
477
  // -- r1: function to call
@@ -495,7 +490,10 @@ static void GenerateCallFunction(MacroAssembler* masm,
495
490
  }
496
491
 
497
492
  // Invoke the function.
498
- __ InvokeFunction(r1, arguments, JUMP_FUNCTION);
493
+ CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state)
494
+ ? CALL_AS_FUNCTION
495
+ : CALL_AS_METHOD;
496
+ __ InvokeFunction(r1, arguments, JUMP_FUNCTION, NullCallWrapper(), call_kind);
499
497
  }
500
498
 
501
499
 
@@ -625,10 +623,12 @@ class CallInterceptorCompiler BASE_EMBEDDED {
625
623
  public:
626
624
  CallInterceptorCompiler(StubCompiler* stub_compiler,
627
625
  const ParameterCount& arguments,
628
- Register name)
626
+ Register name,
627
+ Code::ExtraICState extra_ic_state)
629
628
  : stub_compiler_(stub_compiler),
630
629
  arguments_(arguments),
631
- name_(name) {}
630
+ name_(name),
631
+ extra_ic_state_(extra_ic_state) {}
632
632
 
633
633
  MaybeObject* Compile(MacroAssembler* masm,
634
634
  JSObject* object,
@@ -756,8 +756,11 @@ class CallInterceptorCompiler BASE_EMBEDDED {
756
756
  arguments_.immediate());
757
757
  if (result->IsFailure()) return result;
758
758
  } else {
759
+ CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_)
760
+ ? CALL_AS_FUNCTION
761
+ : CALL_AS_METHOD;
759
762
  __ InvokeFunction(optimization.constant_function(), arguments_,
760
- JUMP_FUNCTION);
763
+ JUMP_FUNCTION, call_kind);
761
764
  }
762
765
 
763
766
  // Deferred code for fast API call case---clean preallocated space.
@@ -839,6 +842,7 @@ class CallInterceptorCompiler BASE_EMBEDDED {
839
842
  StubCompiler* stub_compiler_;
840
843
  const ParameterCount& arguments_;
841
844
  Register name_;
845
+ Code::ExtraICState extra_ic_state_;
842
846
  };
843
847
 
844
848
 
@@ -1155,8 +1159,7 @@ void StubCompiler::GenerateLoadField(JSObject* object,
1155
1159
  String* name,
1156
1160
  Label* miss) {
1157
1161
  // Check that the receiver isn't a smi.
1158
- __ tst(receiver, Operand(kSmiTagMask));
1159
- __ b(eq, miss);
1162
+ __ JumpIfSmi(receiver, miss);
1160
1163
 
1161
1164
  // Check that the maps haven't changed.
1162
1165
  Register reg =
@@ -1177,13 +1180,11 @@ void StubCompiler::GenerateLoadConstant(JSObject* object,
1177
1180
  String* name,
1178
1181
  Label* miss) {
1179
1182
  // Check that the receiver isn't a smi.
1180
- __ tst(receiver, Operand(kSmiTagMask));
1181
- __ b(eq, miss);
1183
+ __ JumpIfSmi(receiver, miss);
1182
1184
 
1183
1185
  // Check that the maps haven't changed.
1184
- Register reg =
1185
- CheckPrototypes(object, receiver, holder,
1186
- scratch1, scratch2, scratch3, name, miss);
1186
+ CheckPrototypes(object, receiver, holder, scratch1, scratch2, scratch3, name,
1187
+ miss);
1187
1188
 
1188
1189
  // Return the constant value.
1189
1190
  __ mov(r0, Operand(Handle<Object>(value)));
@@ -1202,8 +1203,7 @@ MaybeObject* StubCompiler::GenerateLoadCallback(JSObject* object,
1202
1203
  String* name,
1203
1204
  Label* miss) {
1204
1205
  // Check that the receiver isn't a smi.
1205
- __ tst(receiver, Operand(kSmiTagMask));
1206
- __ b(eq, miss);
1206
+ __ JumpIfSmi(receiver, miss);
1207
1207
 
1208
1208
  // Check that the maps haven't changed.
1209
1209
  Register reg =
@@ -1416,8 +1416,7 @@ void CallStubCompiler::GenerateGlobalReceiverCheck(JSObject* object,
1416
1416
  // object which can only happen for contextual calls. In this case,
1417
1417
  // the receiver cannot be a smi.
1418
1418
  if (object != holder) {
1419
- __ tst(r0, Operand(kSmiTagMask));
1420
- __ b(eq, miss);
1419
+ __ JumpIfSmi(r0, miss);
1421
1420
  }
1422
1421
 
1423
1422
  // Check that the maps haven't changed.
@@ -1439,8 +1438,7 @@ void CallStubCompiler::GenerateLoadFunctionFromCell(JSGlobalPropertyCell* cell,
1439
1438
  // the nice side effect that multiple closures based on the same
1440
1439
  // function can all use this call IC. Before we load through the
1441
1440
  // function, we have to verify that it still is a function.
1442
- __ tst(r1, Operand(kSmiTagMask));
1443
- __ b(eq, miss);
1441
+ __ JumpIfSmi(r1, miss);
1444
1442
  __ CompareObjectType(r1, r3, r3, JS_FUNCTION_TYPE);
1445
1443
  __ b(ne, miss);
1446
1444
 
@@ -1485,14 +1483,13 @@ MaybeObject* CallStubCompiler::CompileCallField(JSObject* object,
1485
1483
  // Get the receiver of the function from the stack into r0.
1486
1484
  __ ldr(r0, MemOperand(sp, argc * kPointerSize));
1487
1485
  // Check that the receiver isn't a smi.
1488
- __ tst(r0, Operand(kSmiTagMask));
1489
- __ b(eq, &miss);
1486
+ __ JumpIfSmi(r0, &miss);
1490
1487
 
1491
1488
  // Do the right check and compute the holder register.
1492
1489
  Register reg = CheckPrototypes(object, r0, holder, r1, r3, r4, name, &miss);
1493
1490
  GenerateFastPropertyLoad(masm(), r1, reg, holder, index);
1494
1491
 
1495
- GenerateCallFunction(masm(), object, arguments(), &miss);
1492
+ GenerateCallFunction(masm(), object, arguments(), &miss, extra_ic_state_);
1496
1493
 
1497
1494
  // Handle call cache miss.
1498
1495
  __ bind(&miss);
@@ -1957,8 +1954,7 @@ MaybeObject* CallStubCompiler::CompileStringFromCharCodeCall(
1957
1954
  __ ldr(r1, MemOperand(sp, 1 * kPointerSize));
1958
1955
 
1959
1956
  STATIC_ASSERT(kSmiTag == 0);
1960
- __ tst(r1, Operand(kSmiTagMask));
1961
- __ b(eq, &miss);
1957
+ __ JumpIfSmi(r1, &miss);
1962
1958
 
1963
1959
  CheckPrototypes(JSObject::cast(object), r1, holder, r0, r3, r4, name,
1964
1960
  &miss);
@@ -1975,8 +1971,7 @@ MaybeObject* CallStubCompiler::CompileStringFromCharCodeCall(
1975
1971
  // Check the code is a smi.
1976
1972
  Label slow;
1977
1973
  STATIC_ASSERT(kSmiTag == 0);
1978
- __ tst(code, Operand(kSmiTagMask));
1979
- __ b(ne, &slow);
1974
+ __ JumpIfNotSmi(code, &slow);
1980
1975
 
1981
1976
  // Convert the smi code to uint16.
1982
1977
  __ and_(code, code, Operand(Smi::FromInt(0xffff)));
@@ -1992,7 +1987,7 @@ MaybeObject* CallStubCompiler::CompileStringFromCharCodeCall(
1992
1987
  // Tail call the full function. We do not have to patch the receiver
1993
1988
  // because the function makes no use of it.
1994
1989
  __ bind(&slow);
1995
- __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
1990
+ __ InvokeFunction(function, arguments(), JUMP_FUNCTION, CALL_AS_METHOD);
1996
1991
 
1997
1992
  __ bind(&miss);
1998
1993
  // r2: function name.
@@ -2140,7 +2135,7 @@ MaybeObject* CallStubCompiler::CompileMathFloorCall(Object* object,
2140
2135
  __ bind(&slow);
2141
2136
  // Tail call the full function. We do not have to patch the receiver
2142
2137
  // because the function makes no use of it.
2143
- __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
2138
+ __ InvokeFunction(function, arguments(), JUMP_FUNCTION, CALL_AS_METHOD);
2144
2139
 
2145
2140
  __ bind(&miss);
2146
2141
  // r2: function name.
@@ -2178,8 +2173,7 @@ MaybeObject* CallStubCompiler::CompileMathAbsCall(Object* object,
2178
2173
  __ ldr(r1, MemOperand(sp, 1 * kPointerSize));
2179
2174
 
2180
2175
  STATIC_ASSERT(kSmiTag == 0);
2181
- __ tst(r1, Operand(kSmiTagMask));
2182
- __ b(eq, &miss);
2176
+ __ JumpIfSmi(r1, &miss);
2183
2177
 
2184
2178
  CheckPrototypes(JSObject::cast(object), r1, holder, r0, r3, r4, name,
2185
2179
  &miss);
@@ -2242,7 +2236,7 @@ MaybeObject* CallStubCompiler::CompileMathAbsCall(Object* object,
2242
2236
  // Tail call the full function. We do not have to patch the receiver
2243
2237
  // because the function makes no use of it.
2244
2238
  __ bind(&slow);
2245
- __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
2239
+ __ InvokeFunction(function, arguments(), JUMP_FUNCTION, CALL_AS_METHOD);
2246
2240
 
2247
2241
  __ bind(&miss);
2248
2242
  // r2: function name.
@@ -2282,8 +2276,7 @@ MaybeObject* CallStubCompiler::CompileFastApiCall(
2282
2276
  __ ldr(r1, MemOperand(sp, argc * kPointerSize));
2283
2277
 
2284
2278
  // Check that the receiver isn't a smi.
2285
- __ tst(r1, Operand(kSmiTagMask));
2286
- __ b(eq, &miss_before_stack_reserved);
2279
+ __ JumpIfSmi(r1, &miss_before_stack_reserved);
2287
2280
 
2288
2281
  __ IncrementCounter(counters->call_const(), 1, r0, r3);
2289
2282
  __ IncrementCounter(counters->call_const_fast_api(), 1, r0, r3);
@@ -2337,8 +2330,7 @@ MaybeObject* CallStubCompiler::CompileCallConstant(Object* object,
2337
2330
 
2338
2331
  // Check that the receiver isn't a smi.
2339
2332
  if (check != NUMBER_CHECK) {
2340
- __ tst(r1, Operand(kSmiTagMask));
2341
- __ b(eq, &miss);
2333
+ __ JumpIfSmi(r1, &miss);
2342
2334
  }
2343
2335
 
2344
2336
  // Make sure that it's okay not to patch the on stack receiver
@@ -2371,7 +2363,7 @@ MaybeObject* CallStubCompiler::CompileCallConstant(Object* object,
2371
2363
  } else {
2372
2364
  // Check that the object is a two-byte string or a symbol.
2373
2365
  __ CompareObjectType(r1, r3, r3, FIRST_NONSTRING_TYPE);
2374
- __ b(hs, &miss);
2366
+ __ b(ge, &miss);
2375
2367
  // Check that the maps starting from the prototype haven't changed.
2376
2368
  GenerateDirectLoadGlobalFunctionPrototype(
2377
2369
  masm(), Context::STRING_FUNCTION_INDEX, r0, &miss);
@@ -2388,8 +2380,7 @@ MaybeObject* CallStubCompiler::CompileCallConstant(Object* object,
2388
2380
  } else {
2389
2381
  Label fast;
2390
2382
  // Check that the object is a smi or a heap number.
2391
- __ tst(r1, Operand(kSmiTagMask));
2392
- __ b(eq, &fast);
2383
+ __ JumpIfSmi(r1, &fast);
2393
2384
  __ CompareObjectType(r1, r0, r0, HEAP_NUMBER_TYPE);
2394
2385
  __ b(ne, &miss);
2395
2386
  __ bind(&fast);
@@ -2430,7 +2421,10 @@ MaybeObject* CallStubCompiler::CompileCallConstant(Object* object,
2430
2421
  UNREACHABLE();
2431
2422
  }
2432
2423
 
2433
- __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
2424
+ CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_)
2425
+ ? CALL_AS_FUNCTION
2426
+ : CALL_AS_METHOD;
2427
+ __ InvokeFunction(function, arguments(), JUMP_FUNCTION, call_kind);
2434
2428
 
2435
2429
  // Handle call cache miss.
2436
2430
  __ bind(&miss);
@@ -2463,7 +2457,7 @@ MaybeObject* CallStubCompiler::CompileCallInterceptor(JSObject* object,
2463
2457
  // Get the receiver from the stack.
2464
2458
  __ ldr(r1, MemOperand(sp, argc * kPointerSize));
2465
2459
 
2466
- CallInterceptorCompiler compiler(this, arguments(), r2);
2460
+ CallInterceptorCompiler compiler(this, arguments(), r2, extra_ic_state_);
2467
2461
  MaybeObject* result = compiler.Compile(masm(),
2468
2462
  object,
2469
2463
  holder,
@@ -2483,7 +2477,7 @@ MaybeObject* CallStubCompiler::CompileCallInterceptor(JSObject* object,
2483
2477
  // Restore receiver.
2484
2478
  __ ldr(r0, MemOperand(sp, argc * kPointerSize));
2485
2479
 
2486
- GenerateCallFunction(masm(), object, arguments(), &miss);
2480
+ GenerateCallFunction(masm(), object, arguments(), &miss, extra_ic_state_);
2487
2481
 
2488
2482
  // Handle call cache miss.
2489
2483
  __ bind(&miss);
@@ -2495,13 +2489,11 @@ MaybeObject* CallStubCompiler::CompileCallInterceptor(JSObject* object,
2495
2489
  }
2496
2490
 
2497
2491
 
2498
- MaybeObject* CallStubCompiler::CompileCallGlobal(
2499
- JSObject* object,
2500
- GlobalObject* holder,
2501
- JSGlobalPropertyCell* cell,
2502
- JSFunction* function,
2503
- String* name,
2504
- Code::ExtraICState extra_ic_state) {
2492
+ MaybeObject* CallStubCompiler::CompileCallGlobal(JSObject* object,
2493
+ GlobalObject* holder,
2494
+ JSGlobalPropertyCell* cell,
2495
+ JSFunction* function,
2496
+ String* name) {
2505
2497
  // ----------- S t a t e -------------
2506
2498
  // -- r2 : name
2507
2499
  // -- lr : return address
@@ -2543,7 +2535,7 @@ MaybeObject* CallStubCompiler::CompileCallGlobal(
2543
2535
  ASSERT(function->is_compiled());
2544
2536
  Handle<Code> code(function->code());
2545
2537
  ParameterCount expected(function->shared()->formal_parameter_count());
2546
- CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state)
2538
+ CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_)
2547
2539
  ? CALL_AS_FUNCTION
2548
2540
  : CALL_AS_METHOD;
2549
2541
  if (V8::UseCrankshaft()) {
@@ -2608,8 +2600,7 @@ MaybeObject* StoreStubCompiler::CompileStoreCallback(JSObject* object,
2608
2600
  Label miss;
2609
2601
 
2610
2602
  // Check that the object isn't a smi.
2611
- __ tst(r1, Operand(kSmiTagMask));
2612
- __ b(eq, &miss);
2603
+ __ JumpIfSmi(r1, &miss);
2613
2604
 
2614
2605
  // Check that the map of the object hasn't changed.
2615
2606
  __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
@@ -2656,8 +2647,7 @@ MaybeObject* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver,
2656
2647
  Label miss;
2657
2648
 
2658
2649
  // Check that the object isn't a smi.
2659
- __ tst(r1, Operand(kSmiTagMask));
2660
- __ b(eq, &miss);
2650
+ __ JumpIfSmi(r1, &miss);
2661
2651
 
2662
2652
  // Check that the map of the object hasn't changed.
2663
2653
  __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
@@ -2748,8 +2738,7 @@ MaybeObject* LoadStubCompiler::CompileLoadNonexistent(String* name,
2748
2738
  Label miss;
2749
2739
 
2750
2740
  // Check that receiver is not a smi.
2751
- __ tst(r0, Operand(kSmiTagMask));
2752
- __ b(eq, &miss);
2741
+ __ JumpIfSmi(r0, &miss);
2753
2742
 
2754
2743
  // Check the maps of the full prototype chain.
2755
2744
  CheckPrototypes(object, r0, last, r3, r1, r4, name, &miss);
@@ -2893,8 +2882,7 @@ MaybeObject* LoadStubCompiler::CompileLoadGlobal(JSObject* object,
2893
2882
  // object which can only happen for contextual calls. In this case,
2894
2883
  // the receiver cannot be a smi.
2895
2884
  if (object != holder) {
2896
- __ tst(r0, Operand(kSmiTagMask));
2897
- __ b(eq, &miss);
2885
+ __ JumpIfSmi(r0, &miss);
2898
2886
  }
2899
2887
 
2900
2888
  // Check that the map of the global has not changed.
@@ -3104,14 +3092,15 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) {
3104
3092
  }
3105
3093
 
3106
3094
 
3107
- MaybeObject* KeyedLoadStubCompiler::CompileLoadFastElement(Map* receiver_map) {
3095
+ MaybeObject* KeyedLoadStubCompiler::CompileLoadElement(Map* receiver_map) {
3108
3096
  // ----------- S t a t e -------------
3109
3097
  // -- lr : return address
3110
3098
  // -- r0 : key
3111
3099
  // -- r1 : receiver
3112
3100
  // -----------------------------------
3113
- MaybeObject* maybe_stub = KeyedLoadFastElementStub().TryGetCode();
3114
3101
  Code* stub;
3102
+ JSObject::ElementsKind elements_kind = receiver_map->elements_kind();
3103
+ MaybeObject* maybe_stub = KeyedLoadElementStub(elements_kind).TryGetCode();
3115
3104
  if (!maybe_stub->To(&stub)) return maybe_stub;
3116
3105
  __ DispatchMap(r1,
3117
3106
  r2,
@@ -3195,8 +3184,7 @@ MaybeObject* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
3195
3184
  }
3196
3185
 
3197
3186
 
3198
- MaybeObject* KeyedStoreStubCompiler::CompileStoreFastElement(
3199
- Map* receiver_map) {
3187
+ MaybeObject* KeyedStoreStubCompiler::CompileStoreElement(Map* receiver_map) {
3200
3188
  // ----------- S t a t e -------------
3201
3189
  // -- r0 : value
3202
3190
  // -- r1 : key
@@ -3204,10 +3192,11 @@ MaybeObject* KeyedStoreStubCompiler::CompileStoreFastElement(
3204
3192
  // -- lr : return address
3205
3193
  // -- r3 : scratch
3206
3194
  // -----------------------------------
3195
+ Code* stub;
3196
+ JSObject::ElementsKind elements_kind = receiver_map->elements_kind();
3207
3197
  bool is_js_array = receiver_map->instance_type() == JS_ARRAY_TYPE;
3208
3198
  MaybeObject* maybe_stub =
3209
- KeyedStoreFastElementStub(is_js_array).TryGetCode();
3210
- Code* stub;
3199
+ KeyedStoreElementStub(is_js_array, elements_kind).TryGetCode();
3211
3200
  if (!maybe_stub->To(&stub)) return maybe_stub;
3212
3201
  __ DispatchMap(r2,
3213
3202
  r3,
@@ -3281,8 +3270,7 @@ MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) {
3281
3270
  // r1: constructor function
3282
3271
  // r7: undefined
3283
3272
  __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
3284
- __ tst(r2, Operand(kSmiTagMask));
3285
- __ b(eq, &generic_stub_call);
3273
+ __ JumpIfSmi(r2, &generic_stub_call);
3286
3274
  __ CompareObjectType(r2, r3, r4, MAP_TYPE);
3287
3275
  __ b(ne, &generic_stub_call);
3288
3276
 
@@ -3399,82 +3387,86 @@ MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) {
3399
3387
  }
3400
3388
 
3401
3389
 
3402
- MaybeObject* ExternalArrayLoadStubCompiler::CompileLoad(
3403
- JSObject*receiver, ExternalArrayType array_type) {
3404
- // ----------- S t a t e -------------
3405
- // -- lr : return address
3406
- // -- r0 : key
3407
- // -- r1 : receiver
3390
+ #undef __
3391
+ #define __ ACCESS_MASM(masm)
3392
+
3393
+
3394
+ void KeyedLoadStubCompiler::GenerateLoadDictionaryElement(
3395
+ MacroAssembler* masm) {
3396
+ // ---------- S t a t e --------------
3397
+ // -- lr : return address
3398
+ // -- r0 : key
3399
+ // -- r1 : receiver
3408
3400
  // -----------------------------------
3409
- MaybeObject* maybe_stub =
3410
- KeyedLoadExternalArrayStub(array_type).TryGetCode();
3411
- Code* stub;
3412
- if (!maybe_stub->To(&stub)) return maybe_stub;
3413
- __ DispatchMap(r1,
3414
- r2,
3415
- Handle<Map>(receiver->map()),
3416
- Handle<Code>(stub),
3417
- DO_SMI_CHECK);
3401
+ Label slow, miss_force_generic;
3418
3402
 
3419
- Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Miss();
3420
- __ Jump(ic, RelocInfo::CODE_TARGET);
3403
+ Register key = r0;
3404
+ Register receiver = r1;
3421
3405
 
3422
- // Return the generated code.
3423
- return GetCode();
3424
- }
3406
+ __ JumpIfNotSmi(key, &miss_force_generic);
3407
+ __ mov(r2, Operand(key, ASR, kSmiTagSize));
3408
+ __ ldr(r4, FieldMemOperand(receiver, JSObject::kElementsOffset));
3409
+ __ LoadFromNumberDictionary(&slow, r4, key, r0, r2, r3, r5);
3410
+ __ Ret();
3425
3411
 
3412
+ __ bind(&slow);
3413
+ __ IncrementCounter(
3414
+ masm->isolate()->counters()->keyed_load_external_array_slow(),
3415
+ 1, r2, r3);
3426
3416
 
3427
- MaybeObject* ExternalArrayStoreStubCompiler::CompileStore(
3428
- JSObject* receiver, ExternalArrayType array_type) {
3429
- // ----------- S t a t e -------------
3430
- // -- r0 : value
3431
- // -- r1 : name
3432
- // -- r2 : receiver
3433
- // -- lr : return address
3417
+ // ---------- S t a t e --------------
3418
+ // -- lr : return address
3419
+ // -- r0 : key
3420
+ // -- r1 : receiver
3434
3421
  // -----------------------------------
3435
- MaybeObject* maybe_stub =
3436
- KeyedStoreExternalArrayStub(array_type).TryGetCode();
3437
- Code* stub;
3438
- if (!maybe_stub->To(&stub)) return maybe_stub;
3439
- __ DispatchMap(r2,
3440
- r3,
3441
- Handle<Map>(receiver->map()),
3442
- Handle<Code>(stub),
3443
- DO_SMI_CHECK);
3444
-
3445
- Handle<Code> ic = isolate()->builtins()->KeyedStoreIC_Miss();
3446
- __ Jump(ic, RelocInfo::CODE_TARGET);
3422
+ Handle<Code> slow_ic =
3423
+ masm->isolate()->builtins()->KeyedLoadIC_Slow();
3424
+ __ Jump(slow_ic, RelocInfo::CODE_TARGET);
3447
3425
 
3448
- return GetCode();
3449
- }
3426
+ // Miss case, call the runtime.
3427
+ __ bind(&miss_force_generic);
3450
3428
 
3429
+ // ---------- S t a t e --------------
3430
+ // -- lr : return address
3431
+ // -- r0 : key
3432
+ // -- r1 : receiver
3433
+ // -----------------------------------
3451
3434
 
3452
- #undef __
3453
- #define __ ACCESS_MASM(masm)
3435
+ Handle<Code> miss_ic =
3436
+ masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
3437
+ __ Jump(miss_ic, RelocInfo::CODE_TARGET);
3438
+ }
3454
3439
 
3455
3440
 
3456
- static bool IsElementTypeSigned(ExternalArrayType array_type) {
3457
- switch (array_type) {
3458
- case kExternalByteArray:
3459
- case kExternalShortArray:
3460
- case kExternalIntArray:
3441
+ static bool IsElementTypeSigned(JSObject::ElementsKind elements_kind) {
3442
+ switch (elements_kind) {
3443
+ case JSObject::EXTERNAL_BYTE_ELEMENTS:
3444
+ case JSObject::EXTERNAL_SHORT_ELEMENTS:
3445
+ case JSObject::EXTERNAL_INT_ELEMENTS:
3461
3446
  return true;
3462
3447
 
3463
- case kExternalUnsignedByteArray:
3464
- case kExternalUnsignedShortArray:
3465
- case kExternalUnsignedIntArray:
3448
+ case JSObject::EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
3449
+ case JSObject::EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
3450
+ case JSObject::EXTERNAL_UNSIGNED_INT_ELEMENTS:
3451
+ case JSObject::EXTERNAL_PIXEL_ELEMENTS:
3466
3452
  return false;
3467
3453
 
3468
- default:
3454
+ case JSObject::EXTERNAL_FLOAT_ELEMENTS:
3455
+ case JSObject::EXTERNAL_DOUBLE_ELEMENTS:
3456
+ case JSObject::FAST_ELEMENTS:
3457
+ case JSObject::FAST_DOUBLE_ELEMENTS:
3458
+ case JSObject::DICTIONARY_ELEMENTS:
3459
+ case JSObject::NON_STRICT_ARGUMENTS_ELEMENTS:
3469
3460
  UNREACHABLE();
3470
3461
  return false;
3471
3462
  }
3463
+ return false;
3472
3464
  }
3473
3465
 
3474
3466
 
3475
3467
  void KeyedLoadStubCompiler::GenerateLoadExternalArray(
3476
3468
  MacroAssembler* masm,
3477
- ExternalArrayType array_type) {
3469
+ JSObject::ElementsKind elements_kind) {
3478
3470
  // ---------- S t a t e --------------
3479
3471
  // -- lr : return address
3480
3472
  // -- r0 : key
@@ -3496,37 +3488,37 @@ void KeyedLoadStubCompiler::GenerateLoadExternalArray(
3496
3488
 
3497
3489
  // Check that the index is in range.
3498
3490
  __ ldr(ip, FieldMemOperand(r3, ExternalArray::kLengthOffset));
3499
- __ cmp(ip, Operand(key, ASR, kSmiTagSize));
3491
+ __ cmp(key, ip);
3500
3492
  // Unsigned comparison catches both negative and too-large values.
3501
- __ b(lo, &miss_force_generic);
3493
+ __ b(hs, &miss_force_generic);
3502
3494
 
3503
3495
  __ ldr(r3, FieldMemOperand(r3, ExternalArray::kExternalPointerOffset));
3504
3496
  // r3: base pointer of external storage
3505
3497
 
3506
3498
  // We are not untagging smi key and instead work with it
3507
3499
  // as if it was premultiplied by 2.
3508
- ASSERT((kSmiTag == 0) && (kSmiTagSize == 1));
3500
+ STATIC_ASSERT((kSmiTag == 0) && (kSmiTagSize == 1));
3509
3501
 
3510
3502
  Register value = r2;
3511
- switch (array_type) {
3512
- case kExternalByteArray:
3503
+ switch (elements_kind) {
3504
+ case JSObject::EXTERNAL_BYTE_ELEMENTS:
3513
3505
  __ ldrsb(value, MemOperand(r3, key, LSR, 1));
3514
3506
  break;
3515
- case kExternalPixelArray:
3516
- case kExternalUnsignedByteArray:
3507
+ case JSObject::EXTERNAL_PIXEL_ELEMENTS:
3508
+ case JSObject::EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
3517
3509
  __ ldrb(value, MemOperand(r3, key, LSR, 1));
3518
3510
  break;
3519
- case kExternalShortArray:
3511
+ case JSObject::EXTERNAL_SHORT_ELEMENTS:
3520
3512
  __ ldrsh(value, MemOperand(r3, key, LSL, 0));
3521
3513
  break;
3522
- case kExternalUnsignedShortArray:
3514
+ case JSObject::EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
3523
3515
  __ ldrh(value, MemOperand(r3, key, LSL, 0));
3524
3516
  break;
3525
- case kExternalIntArray:
3526
- case kExternalUnsignedIntArray:
3517
+ case JSObject::EXTERNAL_INT_ELEMENTS:
3518
+ case JSObject::EXTERNAL_UNSIGNED_INT_ELEMENTS:
3527
3519
  __ ldr(value, MemOperand(r3, key, LSL, 1));
3528
3520
  break;
3529
- case kExternalFloatArray:
3521
+ case JSObject::EXTERNAL_FLOAT_ELEMENTS:
3530
3522
  if (CpuFeatures::IsSupported(VFP3)) {
3531
3523
  CpuFeatures::Scope scope(VFP3);
3532
3524
  __ add(r2, r3, Operand(key, LSL, 1));
@@ -3535,7 +3527,7 @@ void KeyedLoadStubCompiler::GenerateLoadExternalArray(
3535
3527
  __ ldr(value, MemOperand(r3, key, LSL, 1));
3536
3528
  }
3537
3529
  break;
3538
- case kExternalDoubleArray:
3530
+ case JSObject::EXTERNAL_DOUBLE_ELEMENTS:
3539
3531
  if (CpuFeatures::IsSupported(VFP3)) {
3540
3532
  CpuFeatures::Scope scope(VFP3);
3541
3533
  __ add(r2, r3, Operand(key, LSL, 2));
@@ -3547,7 +3539,10 @@ void KeyedLoadStubCompiler::GenerateLoadExternalArray(
3547
3539
  __ ldr(r3, MemOperand(r4, Register::kSizeInBytes));
3548
3540
  }
3549
3541
  break;
3550
- default:
3542
+ case JSObject::FAST_ELEMENTS:
3543
+ case JSObject::FAST_DOUBLE_ELEMENTS:
3544
+ case JSObject::DICTIONARY_ELEMENTS:
3545
+ case JSObject::NON_STRICT_ARGUMENTS_ELEMENTS:
3551
3546
  UNREACHABLE();
3552
3547
  break;
3553
3548
  }
@@ -3561,7 +3556,7 @@ void KeyedLoadStubCompiler::GenerateLoadExternalArray(
3561
3556
  // d0: value (if VFP3 is supported)
3562
3557
  // r2/r3: value (if VFP3 is not supported)
3563
3558
 
3564
- if (array_type == kExternalIntArray) {
3559
+ if (elements_kind == JSObject::EXTERNAL_INT_ELEMENTS) {
3565
3560
  // For the Int and UnsignedInt array types, we need to see whether
3566
3561
  // the value can be represented in a Smi. If not, we need to convert
3567
3562
  // it to a HeapNumber.
@@ -3605,7 +3600,7 @@ void KeyedLoadStubCompiler::GenerateLoadExternalArray(
3605
3600
  __ str(dst2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
3606
3601
  __ Ret();
3607
3602
  }
3608
- } else if (array_type == kExternalUnsignedIntArray) {
3603
+ } else if (elements_kind == JSObject::EXTERNAL_UNSIGNED_INT_ELEMENTS) {
3609
3604
  // The test is different for unsigned int values. Since we need
3610
3605
  // the value to be in the range of a positive smi, we can't
3611
3606
  // handle either of the top two bits being set in the value.
@@ -3670,7 +3665,7 @@ void KeyedLoadStubCompiler::GenerateLoadExternalArray(
3670
3665
  __ mov(r0, r4);
3671
3666
  __ Ret();
3672
3667
  }
3673
- } else if (array_type == kExternalFloatArray) {
3668
+ } else if (elements_kind == JSObject::EXTERNAL_FLOAT_ELEMENTS) {
3674
3669
  // For the floating-point array type, we need to always allocate a
3675
3670
  // HeapNumber.
3676
3671
  if (CpuFeatures::IsSupported(VFP3)) {
@@ -3740,7 +3735,7 @@ void KeyedLoadStubCompiler::GenerateLoadExternalArray(
3740
3735
  __ mov(r0, r3);
3741
3736
  __ Ret();
3742
3737
  }
3743
- } else if (array_type == kExternalDoubleArray) {
3738
+ } else if (elements_kind == JSObject::EXTERNAL_DOUBLE_ELEMENTS) {
3744
3739
  if (CpuFeatures::IsSupported(VFP3)) {
3745
3740
  CpuFeatures::Scope scope(VFP3);
3746
3741
  // Allocate a HeapNumber for the result. Don't use r0 and r1 as
@@ -3797,7 +3792,7 @@ void KeyedLoadStubCompiler::GenerateLoadExternalArray(
3797
3792
 
3798
3793
  void KeyedStoreStubCompiler::GenerateStoreExternalArray(
3799
3794
  MacroAssembler* masm,
3800
- ExternalArrayType array_type) {
3795
+ JSObject::ElementsKind elements_kind) {
3801
3796
  // ---------- S t a t e --------------
3802
3797
  // -- r0 : value
3803
3798
  // -- r1 : key
@@ -3815,23 +3810,21 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
3815
3810
  // This stub is meant to be tail-jumped to, the receiver must already
3816
3811
  // have been verified by the caller to not be a smi.
3817
3812
 
3818
- __ ldr(r3, FieldMemOperand(receiver, JSObject::kElementsOffset));
3819
-
3820
3813
  // Check that the key is a smi.
3821
3814
  __ JumpIfNotSmi(key, &miss_force_generic);
3822
3815
 
3816
+ __ ldr(r3, FieldMemOperand(receiver, JSObject::kElementsOffset));
3817
+
3823
3818
  // Check that the index is in range
3824
- __ SmiUntag(r4, key);
3825
3819
  __ ldr(ip, FieldMemOperand(r3, ExternalArray::kLengthOffset));
3826
- __ cmp(r4, ip);
3820
+ __ cmp(key, ip);
3827
3821
  // Unsigned comparison catches both negative and too-large values.
3828
3822
  __ b(hs, &miss_force_generic);
3829
3823
 
3830
3824
  // Handle both smis and HeapNumbers in the fast path. Go to the
3831
3825
  // runtime for all other kinds of values.
3832
3826
  // r3: external array.
3833
- // r4: key (integer).
3834
- if (array_type == kExternalPixelArray) {
3827
+ if (elements_kind == JSObject::EXTERNAL_PIXEL_ELEMENTS) {
3835
3828
  // Double to pixel conversion is only implemented in the runtime for now.
3836
3829
  __ JumpIfNotSmi(value, &slow);
3837
3830
  } else {
@@ -3841,32 +3834,32 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
3841
3834
  __ ldr(r3, FieldMemOperand(r3, ExternalArray::kExternalPointerOffset));
3842
3835
 
3843
3836
  // r3: base pointer of external storage.
3844
- // r4: key (integer).
3845
3837
  // r5: value (integer).
3846
- switch (array_type) {
3847
- case kExternalPixelArray:
3838
+ switch (elements_kind) {
3839
+ case JSObject::EXTERNAL_PIXEL_ELEMENTS:
3848
3840
  // Clamp the value to [0..255].
3849
3841
  __ Usat(r5, 8, Operand(r5));
3850
- __ strb(r5, MemOperand(r3, r4, LSL, 0));
3842
+ __ strb(r5, MemOperand(r3, key, LSR, 1));
3851
3843
  break;
3852
- case kExternalByteArray:
3853
- case kExternalUnsignedByteArray:
3854
- __ strb(r5, MemOperand(r3, r4, LSL, 0));
3844
+ case JSObject::EXTERNAL_BYTE_ELEMENTS:
3845
+ case JSObject::EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
3846
+ __ strb(r5, MemOperand(r3, key, LSR, 1));
3855
3847
  break;
3856
- case kExternalShortArray:
3857
- case kExternalUnsignedShortArray:
3858
- __ strh(r5, MemOperand(r3, r4, LSL, 1));
3848
+ case JSObject::EXTERNAL_SHORT_ELEMENTS:
3849
+ case JSObject::EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
3850
+ __ strh(r5, MemOperand(r3, key, LSL, 0));
3859
3851
  break;
3860
- case kExternalIntArray:
3861
- case kExternalUnsignedIntArray:
3862
- __ str(r5, MemOperand(r3, r4, LSL, 2));
3852
+ case JSObject::EXTERNAL_INT_ELEMENTS:
3853
+ case JSObject::EXTERNAL_UNSIGNED_INT_ELEMENTS:
3854
+ __ str(r5, MemOperand(r3, key, LSL, 1));
3863
3855
  break;
3864
- case kExternalFloatArray:
3856
+ case JSObject::EXTERNAL_FLOAT_ELEMENTS:
3865
3857
  // Perform int-to-float conversion and store to memory.
3858
+ __ SmiUntag(r4, key);
3866
3859
  StoreIntAsFloat(masm, r3, r4, r5, r6, r7, r9);
3867
3860
  break;
3868
- case kExternalDoubleArray:
3869
- __ add(r3, r3, Operand(r4, LSL, 3));
3861
+ case JSObject::EXTERNAL_DOUBLE_ELEMENTS:
3862
+ __ add(r3, r3, Operand(key, LSL, 2));
3870
3863
  // r3: effective address of the double element
3871
3864
  FloatingPointHelper::Destination destination;
3872
3865
  if (CpuFeatures::IsSupported(VFP3)) {
@@ -3886,7 +3879,10 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
3886
3879
  __ str(r7, MemOperand(r3, Register::kSizeInBytes));
3887
3880
  }
3888
3881
  break;
3889
- default:
3882
+ case JSObject::FAST_ELEMENTS:
3883
+ case JSObject::FAST_DOUBLE_ELEMENTS:
3884
+ case JSObject::DICTIONARY_ELEMENTS:
3885
+ case JSObject::NON_STRICT_ARGUMENTS_ELEMENTS:
3890
3886
  UNREACHABLE();
3891
3887
  break;
3892
3888
  }
@@ -3894,9 +3890,8 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
3894
3890
  // Entry registers are intact, r0 holds the value which is the return value.
3895
3891
  __ Ret();
3896
3892
 
3897
- if (array_type != kExternalPixelArray) {
3893
+ if (elements_kind != JSObject::EXTERNAL_PIXEL_ELEMENTS) {
3898
3894
  // r3: external array.
3899
- // r4: index (integer).
3900
3895
  __ bind(&check_heap_number);
3901
3896
  __ CompareObjectType(value, r5, r6, HEAP_NUMBER_TYPE);
3902
3897
  __ b(ne, &slow);
@@ -3904,7 +3899,6 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
3904
3899
  __ ldr(r3, FieldMemOperand(r3, ExternalArray::kExternalPointerOffset));
3905
3900
 
3906
3901
  // r3: base pointer of external storage.
3907
- // r4: key (integer).
3908
3902
 
3909
3903
  // The WebGL specification leaves the behavior of storing NaN and
3910
3904
  // +/-Infinity into integer arrays basically undefined. For more
@@ -3912,56 +3906,46 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
3912
3906
  if (CpuFeatures::IsSupported(VFP3)) {
3913
3907
  CpuFeatures::Scope scope(VFP3);
3914
3908
 
3915
- if (array_type == kExternalFloatArray) {
3909
+ if (elements_kind == JSObject::EXTERNAL_FLOAT_ELEMENTS) {
3916
3910
  // vldr requires offset to be a multiple of 4 so we can not
3917
3911
  // include -kHeapObjectTag into it.
3918
3912
  __ sub(r5, r0, Operand(kHeapObjectTag));
3919
3913
  __ vldr(d0, r5, HeapNumber::kValueOffset);
3920
- __ add(r5, r3, Operand(r4, LSL, 2));
3914
+ __ add(r5, r3, Operand(key, LSL, 1));
3921
3915
  __ vcvt_f32_f64(s0, d0);
3922
3916
  __ vstr(s0, r5, 0);
3923
- } else if (array_type == kExternalDoubleArray) {
3917
+ } else if (elements_kind == JSObject::EXTERNAL_DOUBLE_ELEMENTS) {
3924
3918
  __ sub(r5, r0, Operand(kHeapObjectTag));
3925
3919
  __ vldr(d0, r5, HeapNumber::kValueOffset);
3926
- __ add(r5, r3, Operand(r4, LSL, 3));
3920
+ __ add(r5, r3, Operand(key, LSL, 2));
3927
3921
  __ vstr(d0, r5, 0);
3928
3922
  } else {
3929
- // Need to perform float-to-int conversion.
3930
- // Test for NaN or infinity (both give zero).
3931
- __ ldr(r6, FieldMemOperand(value, HeapNumber::kExponentOffset));
3932
-
3933
3923
  // Hoisted load. vldr requires offset to be a multiple of 4 so we can
3934
3924
  // not include -kHeapObjectTag into it.
3935
3925
  __ sub(r5, value, Operand(kHeapObjectTag));
3936
3926
  __ vldr(d0, r5, HeapNumber::kValueOffset);
3927
+ __ EmitECMATruncate(r5, d0, s2, r6, r7, r9);
3937
3928
 
3938
- __ Sbfx(r6, r6, HeapNumber::kExponentShift, HeapNumber::kExponentBits);
3939
- // NaNs and Infinities have all-one exponents so they sign extend to -1.
3940
- __ cmp(r6, Operand(-1));
3941
- __ mov(r5, Operand(0), LeaveCC, eq);
3942
-
3943
- // Not infinity or NaN simply convert to int.
3944
- if (IsElementTypeSigned(array_type)) {
3945
- __ vcvt_s32_f64(s0, d0, kDefaultRoundToZero, ne);
3946
- } else {
3947
- __ vcvt_u32_f64(s0, d0, kDefaultRoundToZero, ne);
3948
- }
3949
- __ vmov(r5, s0, ne);
3950
-
3951
- switch (array_type) {
3952
- case kExternalByteArray:
3953
- case kExternalUnsignedByteArray:
3954
- __ strb(r5, MemOperand(r3, r4, LSL, 0));
3929
+ switch (elements_kind) {
3930
+ case JSObject::EXTERNAL_BYTE_ELEMENTS:
3931
+ case JSObject::EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
3932
+ __ strb(r5, MemOperand(r3, key, LSR, 1));
3955
3933
  break;
3956
- case kExternalShortArray:
3957
- case kExternalUnsignedShortArray:
3958
- __ strh(r5, MemOperand(r3, r4, LSL, 1));
3934
+ case JSObject::EXTERNAL_SHORT_ELEMENTS:
3935
+ case JSObject::EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
3936
+ __ strh(r5, MemOperand(r3, key, LSL, 0));
3959
3937
  break;
3960
- case kExternalIntArray:
3961
- case kExternalUnsignedIntArray:
3962
- __ str(r5, MemOperand(r3, r4, LSL, 2));
3938
+ case JSObject::EXTERNAL_INT_ELEMENTS:
3939
+ case JSObject::EXTERNAL_UNSIGNED_INT_ELEMENTS:
3940
+ __ str(r5, MemOperand(r3, key, LSL, 1));
3963
3941
  break;
3964
- default:
3942
+ case JSObject::EXTERNAL_PIXEL_ELEMENTS:
3943
+ case JSObject::EXTERNAL_FLOAT_ELEMENTS:
3944
+ case JSObject::EXTERNAL_DOUBLE_ELEMENTS:
3945
+ case JSObject::FAST_ELEMENTS:
3946
+ case JSObject::FAST_DOUBLE_ELEMENTS:
3947
+ case JSObject::DICTIONARY_ELEMENTS:
3948
+ case JSObject::NON_STRICT_ARGUMENTS_ELEMENTS:
3965
3949
  UNREACHABLE();
3966
3950
  break;
3967
3951
  }
@@ -3975,7 +3959,7 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
3975
3959
  __ ldr(r5, FieldMemOperand(value, HeapNumber::kExponentOffset));
3976
3960
  __ ldr(r6, FieldMemOperand(value, HeapNumber::kMantissaOffset));
3977
3961
 
3978
- if (array_type == kExternalFloatArray) {
3962
+ if (elements_kind == JSObject::EXTERNAL_FLOAT_ELEMENTS) {
3979
3963
  Label done, nan_or_infinity_or_zero;
3980
3964
  static const int kMantissaInHiWordShift =
3981
3965
  kBinary32MantissaBits - HeapNumber::kMantissaBitsInTopWord;
@@ -4015,7 +3999,7 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
4015
3999
  __ orr(r5, r7, Operand(r9, LSL, kBinary32ExponentShift));
4016
4000
 
4017
4001
  __ bind(&done);
4018
- __ str(r5, MemOperand(r3, r4, LSL, 2));
4002
+ __ str(r5, MemOperand(r3, key, LSL, 1));
4019
4003
  // Entry registers are intact, r0 holds the value which is the return
4020
4004
  // value.
4021
4005
  __ Ret();
@@ -4027,14 +4011,14 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
4027
4011
  __ orr(r9, r9, Operand(r5, LSL, kMantissaInHiWordShift));
4028
4012
  __ orr(r5, r9, Operand(r6, LSR, kMantissaInLoWordShift));
4029
4013
  __ b(&done);
4030
- } else if (array_type == kExternalDoubleArray) {
4031
- __ add(r7, r3, Operand(r4, LSL, 3));
4014
+ } else if (elements_kind == JSObject::EXTERNAL_DOUBLE_ELEMENTS) {
4015
+ __ add(r7, r3, Operand(key, LSL, 2));
4032
4016
  // r7: effective address of destination element.
4033
4017
  __ str(r6, MemOperand(r7, 0));
4034
4018
  __ str(r5, MemOperand(r7, Register::kSizeInBytes));
4035
4019
  __ Ret();
4036
4020
  } else {
4037
- bool is_signed_type = IsElementTypeSigned(array_type);
4021
+ bool is_signed_type = IsElementTypeSigned(elements_kind);
4038
4022
  int meaningfull_bits = is_signed_type ? (kBitsPerInt - 1) : kBitsPerInt;
4039
4023
  int32_t min_value = is_signed_type ? 0x80000000 : 0x00000000;
4040
4024
 
@@ -4081,20 +4065,26 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
4081
4065
  __ rsb(r5, r5, Operand(0, RelocInfo::NONE), LeaveCC, ne);
4082
4066
 
4083
4067
  __ bind(&done);
4084
- switch (array_type) {
4085
- case kExternalByteArray:
4086
- case kExternalUnsignedByteArray:
4087
- __ strb(r5, MemOperand(r3, r4, LSL, 0));
4068
+ switch (elements_kind) {
4069
+ case JSObject::EXTERNAL_BYTE_ELEMENTS:
4070
+ case JSObject::EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
4071
+ __ strb(r5, MemOperand(r3, key, LSR, 1));
4088
4072
  break;
4089
- case kExternalShortArray:
4090
- case kExternalUnsignedShortArray:
4091
- __ strh(r5, MemOperand(r3, r4, LSL, 1));
4073
+ case JSObject::EXTERNAL_SHORT_ELEMENTS:
4074
+ case JSObject::EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
4075
+ __ strh(r5, MemOperand(r3, key, LSL, 0));
4092
4076
  break;
4093
- case kExternalIntArray:
4094
- case kExternalUnsignedIntArray:
4095
- __ str(r5, MemOperand(r3, r4, LSL, 2));
4077
+ case JSObject::EXTERNAL_INT_ELEMENTS:
4078
+ case JSObject::EXTERNAL_UNSIGNED_INT_ELEMENTS:
4079
+ __ str(r5, MemOperand(r3, key, LSL, 1));
4096
4080
  break;
4097
- default:
4081
+ case JSObject::EXTERNAL_PIXEL_ELEMENTS:
4082
+ case JSObject::EXTERNAL_FLOAT_ELEMENTS:
4083
+ case JSObject::EXTERNAL_DOUBLE_ELEMENTS:
4084
+ case JSObject::FAST_ELEMENTS:
4085
+ case JSObject::FAST_DOUBLE_ELEMENTS:
4086
+ case JSObject::DICTIONARY_ELEMENTS:
4087
+ case JSObject::NON_STRICT_ARGUMENTS_ELEMENTS:
4098
4088
  UNREACHABLE();
4099
4089
  break;
4100
4090
  }
@@ -4157,7 +4147,7 @@ void KeyedLoadStubCompiler::GenerateLoadFastElement(MacroAssembler* masm) {
4157
4147
 
4158
4148
  // Load the result and make sure it's not the hole.
4159
4149
  __ add(r3, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4160
- ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
4150
+ STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
4161
4151
  __ ldr(r4,
4162
4152
  MemOperand(r3, r0, LSL, kPointerSizeLog2 - kSmiTagSize));
4163
4153
  __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
@@ -4173,6 +4163,77 @@ void KeyedLoadStubCompiler::GenerateLoadFastElement(MacroAssembler* masm) {
4173
4163
  }
4174
4164
 
4175
4165
 
4166
+ void KeyedLoadStubCompiler::GenerateLoadFastDoubleElement(
4167
+ MacroAssembler* masm) {
4168
+ // ----------- S t a t e -------------
4169
+ // -- lr : return address
4170
+ // -- r0 : key
4171
+ // -- r1 : receiver
4172
+ // -----------------------------------
4173
+ Label miss_force_generic, slow_allocate_heapnumber;
4174
+
4175
+ Register key_reg = r0;
4176
+ Register receiver_reg = r1;
4177
+ Register elements_reg = r2;
4178
+ Register heap_number_reg = r2;
4179
+ Register indexed_double_offset = r3;
4180
+ Register scratch = r4;
4181
+ Register scratch2 = r5;
4182
+ Register scratch3 = r6;
4183
+ Register heap_number_map = r7;
4184
+
4185
+ // This stub is meant to be tail-jumped to, the receiver must already
4186
+ // have been verified by the caller to not be a smi.
4187
+
4188
+ // Check that the key is a smi.
4189
+ __ JumpIfNotSmi(key_reg, &miss_force_generic);
4190
+
4191
+ // Get the elements array.
4192
+ __ ldr(elements_reg,
4193
+ FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
4194
+
4195
+ // Check that the key is within bounds.
4196
+ __ ldr(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
4197
+ __ cmp(key_reg, Operand(scratch));
4198
+ __ b(hs, &miss_force_generic);
4199
+
4200
+ // Load the upper word of the double in the fixed array and test for NaN.
4201
+ __ add(indexed_double_offset, elements_reg,
4202
+ Operand(key_reg, LSL, kDoubleSizeLog2 - kSmiTagSize));
4203
+ uint32_t upper_32_offset = FixedArray::kHeaderSize + sizeof(kHoleNanLower32);
4204
+ __ ldr(scratch, FieldMemOperand(indexed_double_offset, upper_32_offset));
4205
+ __ cmp(scratch, Operand(kHoleNanUpper32));
4206
+ __ b(&miss_force_generic, eq);
4207
+
4208
+ // Non-NaN. Allocate a new heap number and copy the double value into it.
4209
+ __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
4210
+ __ AllocateHeapNumber(heap_number_reg, scratch2, scratch3,
4211
+ heap_number_map, &slow_allocate_heapnumber);
4212
+
4213
+ // Don't need to reload the upper 32 bits of the double, it's already in
4214
+ // scratch.
4215
+ __ str(scratch, FieldMemOperand(heap_number_reg,
4216
+ HeapNumber::kExponentOffset));
4217
+ __ ldr(scratch, FieldMemOperand(indexed_double_offset,
4218
+ FixedArray::kHeaderSize));
4219
+ __ str(scratch, FieldMemOperand(heap_number_reg,
4220
+ HeapNumber::kMantissaOffset));
4221
+
4222
+ __ mov(r0, heap_number_reg);
4223
+ __ Ret();
4224
+
4225
+ __ bind(&slow_allocate_heapnumber);
4226
+ Handle<Code> slow_ic =
4227
+ masm->isolate()->builtins()->KeyedLoadIC_Slow();
4228
+ __ Jump(slow_ic, RelocInfo::CODE_TARGET);
4229
+
4230
+ __ bind(&miss_force_generic);
4231
+ Handle<Code> miss_ic =
4232
+ masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
4233
+ __ Jump(miss_ic, RelocInfo::CODE_TARGET);
4234
+ }
4235
+
4236
+
4176
4237
  void KeyedStoreStubCompiler::GenerateStoreFastElement(MacroAssembler* masm,
4177
4238
  bool is_js_array) {
4178
4239
  // ----------- S t a t e -------------
@@ -4195,7 +4256,7 @@ void KeyedStoreStubCompiler::GenerateStoreFastElement(MacroAssembler* masm,
4195
4256
  // have been verified by the caller to not be a smi.
4196
4257
 
4197
4258
  // Check that the key is a smi.
4198
- __ JumpIfNotSmi(r0, &miss_force_generic);
4259
+ __ JumpIfNotSmi(key_reg, &miss_force_generic);
4199
4260
 
4200
4261
  // Get the elements array and make sure it is a fast element array, not 'cow'.
4201
4262
  __ ldr(elements_reg,
@@ -4218,7 +4279,7 @@ void KeyedStoreStubCompiler::GenerateStoreFastElement(MacroAssembler* masm,
4218
4279
 
4219
4280
  __ add(scratch,
4220
4281
  elements_reg, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4221
- ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
4282
+ STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
4222
4283
  __ str(value_reg,
4223
4284
  MemOperand(scratch, key_reg, LSL, kPointerSizeLog2 - kSmiTagSize));
4224
4285
  __ RecordWrite(scratch,
@@ -4236,6 +4297,132 @@ void KeyedStoreStubCompiler::GenerateStoreFastElement(MacroAssembler* masm,
4236
4297
  }
4237
4298
 
4238
4299
 
4300
+ void KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(
4301
+ MacroAssembler* masm,
4302
+ bool is_js_array) {
4303
+ // ----------- S t a t e -------------
4304
+ // -- r0 : value
4305
+ // -- r1 : key
4306
+ // -- r2 : receiver
4307
+ // -- lr : return address
4308
+ // -- r3 : scratch
4309
+ // -- r4 : scratch
4310
+ // -- r5 : scratch
4311
+ // -----------------------------------
4312
+ Label miss_force_generic, smi_value, is_nan, maybe_nan, have_double_value;
4313
+
4314
+ Register value_reg = r0;
4315
+ Register key_reg = r1;
4316
+ Register receiver_reg = r2;
4317
+ Register scratch = r3;
4318
+ Register elements_reg = r4;
4319
+ Register mantissa_reg = r5;
4320
+ Register exponent_reg = r6;
4321
+ Register scratch4 = r7;
4322
+
4323
+ // This stub is meant to be tail-jumped to, the receiver must already
4324
+ // have been verified by the caller to not be a smi.
4325
+ __ JumpIfNotSmi(key_reg, &miss_force_generic);
4326
+
4327
+ __ ldr(elements_reg,
4328
+ FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
4329
+
4330
+ // Check that the key is within bounds.
4331
+ if (is_js_array) {
4332
+ __ ldr(scratch, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
4333
+ } else {
4334
+ __ ldr(scratch,
4335
+ FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
4336
+ }
4337
+ // Compare smis, unsigned compare catches both negative and out-of-bound
4338
+ // indexes.
4339
+ __ cmp(key_reg, scratch);
4340
+ __ b(hs, &miss_force_generic);
4341
+
4342
+ // Handle smi values specially.
4343
+ __ JumpIfSmi(value_reg, &smi_value);
4344
+
4345
+ // Ensure that the object is a heap number
4346
+ __ CheckMap(value_reg,
4347
+ scratch,
4348
+ masm->isolate()->factory()->heap_number_map(),
4349
+ &miss_force_generic,
4350
+ DONT_DO_SMI_CHECK);
4351
+
4352
+ // Check for nan: all NaN values have a value greater (signed) than 0x7ff00000
4353
+ // in the exponent.
4354
+ __ mov(scratch, Operand(kNaNOrInfinityLowerBoundUpper32));
4355
+ __ ldr(exponent_reg, FieldMemOperand(value_reg, HeapNumber::kExponentOffset));
4356
+ __ cmp(exponent_reg, scratch);
4357
+ __ b(ge, &maybe_nan);
4358
+
4359
+ __ ldr(mantissa_reg, FieldMemOperand(value_reg, HeapNumber::kMantissaOffset));
4360
+
4361
+ __ bind(&have_double_value);
4362
+ __ add(scratch, elements_reg,
4363
+ Operand(key_reg, LSL, kDoubleSizeLog2 - kSmiTagSize));
4364
+ __ str(mantissa_reg, FieldMemOperand(scratch, FixedDoubleArray::kHeaderSize));
4365
+ uint32_t offset = FixedDoubleArray::kHeaderSize + sizeof(kHoleNanLower32);
4366
+ __ str(exponent_reg, FieldMemOperand(scratch, offset));
4367
+ __ Ret();
4368
+
4369
+ __ bind(&maybe_nan);
4370
+ // Could be NaN or Infinity. If fraction is not zero, it's NaN, otherwise
4371
+ // it's an Infinity, and the non-NaN code path applies.
4372
+ __ b(gt, &is_nan);
4373
+ __ ldr(mantissa_reg, FieldMemOperand(value_reg, HeapNumber::kMantissaOffset));
4374
+ __ cmp(mantissa_reg, Operand(0));
4375
+ __ b(eq, &have_double_value);
4376
+ __ bind(&is_nan);
4377
+ // Load canonical NaN for storing into the double array.
4378
+ uint64_t nan_int64 = BitCast<uint64_t>(
4379
+ FixedDoubleArray::canonical_not_the_hole_nan_as_double());
4380
+ __ mov(mantissa_reg, Operand(static_cast<uint32_t>(nan_int64)));
4381
+ __ mov(exponent_reg, Operand(static_cast<uint32_t>(nan_int64 >> 32)));
4382
+ __ jmp(&have_double_value);
4383
+
4384
+ __ bind(&smi_value);
4385
+ __ add(scratch, elements_reg,
4386
+ Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag));
4387
+ __ add(scratch, scratch,
4388
+ Operand(key_reg, LSL, kDoubleSizeLog2 - kSmiTagSize));
4389
+ // scratch is now effective address of the double element
4390
+
4391
+ FloatingPointHelper::Destination destination;
4392
+ if (CpuFeatures::IsSupported(VFP3)) {
4393
+ destination = FloatingPointHelper::kVFPRegisters;
4394
+ } else {
4395
+ destination = FloatingPointHelper::kCoreRegisters;
4396
+ }
4397
+
4398
+ Register untagged_value = receiver_reg;
4399
+ __ SmiUntag(untagged_value, value_reg);
4400
+ FloatingPointHelper::ConvertIntToDouble(
4401
+ masm,
4402
+ untagged_value,
4403
+ destination,
4404
+ d0,
4405
+ mantissa_reg,
4406
+ exponent_reg,
4407
+ scratch4,
4408
+ s2);
4409
+ if (destination == FloatingPointHelper::kVFPRegisters) {
4410
+ CpuFeatures::Scope scope(VFP3);
4411
+ __ vstr(d0, scratch, 0);
4412
+ } else {
4413
+ __ str(mantissa_reg, MemOperand(scratch, 0));
4414
+ __ str(exponent_reg, MemOperand(scratch, Register::kSizeInBytes));
4415
+ }
4416
+ __ Ret();
4417
+
4418
+ // Handle store cache miss, replacing the ic with the generic stub.
4419
+ __ bind(&miss_force_generic);
4420
+ Handle<Code> ic =
4421
+ masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric();
4422
+ __ Jump(ic, RelocInfo::CODE_TARGET);
4423
+ }
4424
+
4425
+
4239
4426
  #undef __
4240
4427
 
4241
4428
  } } // namespace v8::internal