libv8 3.3.10.4 → 3.5.10.beta1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (538) hide show
  1. data/lib/libv8/scons/CHANGES.txt +24 -231
  2. data/lib/libv8/scons/LICENSE.txt +1 -1
  3. data/lib/libv8/scons/MANIFEST +0 -1
  4. data/lib/libv8/scons/PKG-INFO +1 -1
  5. data/lib/libv8/scons/README.txt +9 -9
  6. data/lib/libv8/scons/RELEASE.txt +75 -77
  7. data/lib/libv8/scons/engine/SCons/Action.py +6 -22
  8. data/lib/libv8/scons/engine/SCons/Builder.py +2 -2
  9. data/lib/libv8/scons/engine/SCons/CacheDir.py +2 -2
  10. data/lib/libv8/scons/engine/SCons/Debug.py +2 -2
  11. data/lib/libv8/scons/engine/SCons/Defaults.py +10 -24
  12. data/lib/libv8/scons/engine/SCons/Environment.py +19 -118
  13. data/lib/libv8/scons/engine/SCons/Errors.py +2 -2
  14. data/lib/libv8/scons/engine/SCons/Executor.py +2 -2
  15. data/lib/libv8/scons/engine/SCons/Job.py +2 -2
  16. data/lib/libv8/scons/engine/SCons/Memoize.py +2 -2
  17. data/lib/libv8/scons/engine/SCons/Node/Alias.py +2 -2
  18. data/lib/libv8/scons/engine/SCons/Node/FS.py +121 -281
  19. data/lib/libv8/scons/engine/SCons/Node/Python.py +2 -2
  20. data/lib/libv8/scons/engine/SCons/Node/__init__.py +5 -6
  21. data/lib/libv8/scons/engine/SCons/Options/BoolOption.py +2 -2
  22. data/lib/libv8/scons/engine/SCons/Options/EnumOption.py +2 -2
  23. data/lib/libv8/scons/engine/SCons/Options/ListOption.py +2 -2
  24. data/lib/libv8/scons/engine/SCons/Options/PackageOption.py +2 -2
  25. data/lib/libv8/scons/engine/SCons/Options/PathOption.py +2 -2
  26. data/lib/libv8/scons/engine/SCons/Options/__init__.py +2 -2
  27. data/lib/libv8/scons/engine/SCons/PathList.py +2 -2
  28. data/lib/libv8/scons/engine/SCons/Platform/__init__.py +2 -2
  29. data/lib/libv8/scons/engine/SCons/Platform/aix.py +2 -2
  30. data/lib/libv8/scons/engine/SCons/Platform/cygwin.py +2 -2
  31. data/lib/libv8/scons/engine/SCons/Platform/darwin.py +3 -27
  32. data/lib/libv8/scons/engine/SCons/Platform/hpux.py +2 -2
  33. data/lib/libv8/scons/engine/SCons/Platform/irix.py +2 -2
  34. data/lib/libv8/scons/engine/SCons/Platform/os2.py +2 -2
  35. data/lib/libv8/scons/engine/SCons/Platform/posix.py +2 -2
  36. data/lib/libv8/scons/engine/SCons/Platform/sunos.py +2 -2
  37. data/lib/libv8/scons/engine/SCons/Platform/win32.py +2 -2
  38. data/lib/libv8/scons/engine/SCons/SConf.py +2 -2
  39. data/lib/libv8/scons/engine/SCons/SConsign.py +3 -9
  40. data/lib/libv8/scons/engine/SCons/Scanner/C.py +2 -2
  41. data/lib/libv8/scons/engine/SCons/Scanner/D.py +2 -2
  42. data/lib/libv8/scons/engine/SCons/Scanner/Dir.py +2 -2
  43. data/lib/libv8/scons/engine/SCons/Scanner/Fortran.py +2 -2
  44. data/lib/libv8/scons/engine/SCons/Scanner/IDL.py +2 -2
  45. data/lib/libv8/scons/engine/SCons/Scanner/LaTeX.py +2 -5
  46. data/lib/libv8/scons/engine/SCons/Scanner/Prog.py +2 -2
  47. data/lib/libv8/scons/engine/SCons/Scanner/RC.py +3 -3
  48. data/lib/libv8/scons/engine/SCons/Scanner/__init__.py +2 -2
  49. data/lib/libv8/scons/engine/SCons/Script/Interactive.py +2 -2
  50. data/lib/libv8/scons/engine/SCons/Script/Main.py +11 -82
  51. data/lib/libv8/scons/engine/SCons/Script/SConsOptions.py +5 -5
  52. data/lib/libv8/scons/engine/SCons/Script/SConscript.py +2 -2
  53. data/lib/libv8/scons/engine/SCons/Script/__init__.py +2 -2
  54. data/lib/libv8/scons/engine/SCons/Sig.py +2 -2
  55. data/lib/libv8/scons/engine/SCons/Subst.py +2 -2
  56. data/lib/libv8/scons/engine/SCons/Taskmaster.py +2 -10
  57. data/lib/libv8/scons/engine/SCons/Tool/386asm.py +2 -2
  58. data/lib/libv8/scons/engine/SCons/Tool/BitKeeper.py +2 -2
  59. data/lib/libv8/scons/engine/SCons/Tool/CVS.py +2 -2
  60. data/lib/libv8/scons/engine/SCons/Tool/FortranCommon.py +2 -19
  61. data/lib/libv8/scons/engine/SCons/Tool/JavaCommon.py +2 -2
  62. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/__init__.py +2 -2
  63. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/arch.py +2 -2
  64. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/common.py +2 -2
  65. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/netframework.py +2 -2
  66. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/sdk.py +2 -2
  67. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/vc.py +6 -9
  68. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/vs.py +2 -29
  69. data/lib/libv8/scons/engine/SCons/Tool/Perforce.py +2 -2
  70. data/lib/libv8/scons/engine/SCons/Tool/PharLapCommon.py +2 -2
  71. data/lib/libv8/scons/engine/SCons/Tool/RCS.py +2 -2
  72. data/lib/libv8/scons/engine/SCons/Tool/SCCS.py +2 -2
  73. data/lib/libv8/scons/engine/SCons/Tool/Subversion.py +2 -2
  74. data/lib/libv8/scons/engine/SCons/Tool/__init__.py +3 -3
  75. data/lib/libv8/scons/engine/SCons/Tool/aixc++.py +2 -2
  76. data/lib/libv8/scons/engine/SCons/Tool/aixcc.py +2 -2
  77. data/lib/libv8/scons/engine/SCons/Tool/aixf77.py +2 -2
  78. data/lib/libv8/scons/engine/SCons/Tool/aixlink.py +2 -2
  79. data/lib/libv8/scons/engine/SCons/Tool/applelink.py +2 -2
  80. data/lib/libv8/scons/engine/SCons/Tool/ar.py +2 -2
  81. data/lib/libv8/scons/engine/SCons/Tool/as.py +2 -2
  82. data/lib/libv8/scons/engine/SCons/Tool/bcc32.py +2 -2
  83. data/lib/libv8/scons/engine/SCons/Tool/c++.py +2 -2
  84. data/lib/libv8/scons/engine/SCons/Tool/cc.py +2 -2
  85. data/lib/libv8/scons/engine/SCons/Tool/cvf.py +2 -2
  86. data/lib/libv8/scons/engine/SCons/Tool/default.py +2 -2
  87. data/lib/libv8/scons/engine/SCons/Tool/dmd.py +7 -24
  88. data/lib/libv8/scons/engine/SCons/Tool/dvi.py +2 -2
  89. data/lib/libv8/scons/engine/SCons/Tool/dvipdf.py +2 -3
  90. data/lib/libv8/scons/engine/SCons/Tool/dvips.py +2 -3
  91. data/lib/libv8/scons/engine/SCons/Tool/f77.py +2 -2
  92. data/lib/libv8/scons/engine/SCons/Tool/f90.py +2 -2
  93. data/lib/libv8/scons/engine/SCons/Tool/f95.py +2 -2
  94. data/lib/libv8/scons/engine/SCons/Tool/filesystem.py +2 -2
  95. data/lib/libv8/scons/engine/SCons/Tool/fortran.py +2 -2
  96. data/lib/libv8/scons/engine/SCons/Tool/g++.py +2 -2
  97. data/lib/libv8/scons/engine/SCons/Tool/g77.py +2 -2
  98. data/lib/libv8/scons/engine/SCons/Tool/gas.py +2 -2
  99. data/lib/libv8/scons/engine/SCons/Tool/gcc.py +2 -2
  100. data/lib/libv8/scons/engine/SCons/Tool/gfortran.py +3 -3
  101. data/lib/libv8/scons/engine/SCons/Tool/gnulink.py +3 -2
  102. data/lib/libv8/scons/engine/SCons/Tool/gs.py +2 -2
  103. data/lib/libv8/scons/engine/SCons/Tool/hpc++.py +2 -2
  104. data/lib/libv8/scons/engine/SCons/Tool/hpcc.py +2 -2
  105. data/lib/libv8/scons/engine/SCons/Tool/hplink.py +2 -2
  106. data/lib/libv8/scons/engine/SCons/Tool/icc.py +2 -2
  107. data/lib/libv8/scons/engine/SCons/Tool/icl.py +2 -2
  108. data/lib/libv8/scons/engine/SCons/Tool/ifl.py +2 -2
  109. data/lib/libv8/scons/engine/SCons/Tool/ifort.py +2 -2
  110. data/lib/libv8/scons/engine/SCons/Tool/ilink.py +2 -2
  111. data/lib/libv8/scons/engine/SCons/Tool/ilink32.py +2 -2
  112. data/lib/libv8/scons/engine/SCons/Tool/install.py +3 -57
  113. data/lib/libv8/scons/engine/SCons/Tool/intelc.py +25 -65
  114. data/lib/libv8/scons/engine/SCons/Tool/ipkg.py +2 -2
  115. data/lib/libv8/scons/engine/SCons/Tool/jar.py +3 -9
  116. data/lib/libv8/scons/engine/SCons/Tool/javac.py +2 -2
  117. data/lib/libv8/scons/engine/SCons/Tool/javah.py +2 -2
  118. data/lib/libv8/scons/engine/SCons/Tool/latex.py +2 -3
  119. data/lib/libv8/scons/engine/SCons/Tool/lex.py +2 -2
  120. data/lib/libv8/scons/engine/SCons/Tool/link.py +5 -6
  121. data/lib/libv8/scons/engine/SCons/Tool/linkloc.py +2 -2
  122. data/lib/libv8/scons/engine/SCons/Tool/m4.py +2 -2
  123. data/lib/libv8/scons/engine/SCons/Tool/masm.py +2 -2
  124. data/lib/libv8/scons/engine/SCons/Tool/midl.py +2 -2
  125. data/lib/libv8/scons/engine/SCons/Tool/mingw.py +10 -31
  126. data/lib/libv8/scons/engine/SCons/Tool/mslib.py +2 -2
  127. data/lib/libv8/scons/engine/SCons/Tool/mslink.py +9 -61
  128. data/lib/libv8/scons/engine/SCons/Tool/mssdk.py +2 -2
  129. data/lib/libv8/scons/engine/SCons/Tool/msvc.py +11 -21
  130. data/lib/libv8/scons/engine/SCons/Tool/msvs.py +59 -477
  131. data/lib/libv8/scons/engine/SCons/Tool/mwcc.py +2 -2
  132. data/lib/libv8/scons/engine/SCons/Tool/mwld.py +2 -2
  133. data/lib/libv8/scons/engine/SCons/Tool/nasm.py +2 -2
  134. data/lib/libv8/scons/engine/SCons/Tool/packaging/__init__.py +2 -2
  135. data/lib/libv8/scons/engine/SCons/Tool/packaging/ipk.py +2 -2
  136. data/lib/libv8/scons/engine/SCons/Tool/packaging/msi.py +2 -2
  137. data/lib/libv8/scons/engine/SCons/Tool/packaging/rpm.py +2 -2
  138. data/lib/libv8/scons/engine/SCons/Tool/packaging/src_tarbz2.py +2 -2
  139. data/lib/libv8/scons/engine/SCons/Tool/packaging/src_targz.py +2 -2
  140. data/lib/libv8/scons/engine/SCons/Tool/packaging/src_zip.py +2 -2
  141. data/lib/libv8/scons/engine/SCons/Tool/packaging/tarbz2.py +2 -2
  142. data/lib/libv8/scons/engine/SCons/Tool/packaging/targz.py +2 -2
  143. data/lib/libv8/scons/engine/SCons/Tool/packaging/zip.py +2 -2
  144. data/lib/libv8/scons/engine/SCons/Tool/pdf.py +2 -2
  145. data/lib/libv8/scons/engine/SCons/Tool/pdflatex.py +2 -3
  146. data/lib/libv8/scons/engine/SCons/Tool/pdftex.py +2 -3
  147. data/lib/libv8/scons/engine/SCons/Tool/qt.py +2 -2
  148. data/lib/libv8/scons/engine/SCons/Tool/rmic.py +3 -9
  149. data/lib/libv8/scons/engine/SCons/Tool/rpcgen.py +2 -2
  150. data/lib/libv8/scons/engine/SCons/Tool/rpm.py +2 -2
  151. data/lib/libv8/scons/engine/SCons/Tool/sgiar.py +2 -2
  152. data/lib/libv8/scons/engine/SCons/Tool/sgic++.py +2 -2
  153. data/lib/libv8/scons/engine/SCons/Tool/sgicc.py +2 -2
  154. data/lib/libv8/scons/engine/SCons/Tool/sgilink.py +3 -2
  155. data/lib/libv8/scons/engine/SCons/Tool/sunar.py +2 -2
  156. data/lib/libv8/scons/engine/SCons/Tool/sunc++.py +2 -2
  157. data/lib/libv8/scons/engine/SCons/Tool/suncc.py +2 -2
  158. data/lib/libv8/scons/engine/SCons/Tool/sunf77.py +2 -2
  159. data/lib/libv8/scons/engine/SCons/Tool/sunf90.py +2 -2
  160. data/lib/libv8/scons/engine/SCons/Tool/sunf95.py +2 -2
  161. data/lib/libv8/scons/engine/SCons/Tool/sunlink.py +3 -2
  162. data/lib/libv8/scons/engine/SCons/Tool/swig.py +5 -6
  163. data/lib/libv8/scons/engine/SCons/Tool/tar.py +2 -2
  164. data/lib/libv8/scons/engine/SCons/Tool/tex.py +43 -96
  165. data/lib/libv8/scons/engine/SCons/Tool/textfile.py +2 -2
  166. data/lib/libv8/scons/engine/SCons/Tool/tlib.py +2 -2
  167. data/lib/libv8/scons/engine/SCons/Tool/wix.py +2 -2
  168. data/lib/libv8/scons/engine/SCons/Tool/yacc.py +2 -12
  169. data/lib/libv8/scons/engine/SCons/Tool/zip.py +2 -2
  170. data/lib/libv8/scons/engine/SCons/Util.py +3 -3
  171. data/lib/libv8/scons/engine/SCons/Variables/BoolVariable.py +2 -2
  172. data/lib/libv8/scons/engine/SCons/Variables/EnumVariable.py +3 -3
  173. data/lib/libv8/scons/engine/SCons/Variables/ListVariable.py +2 -2
  174. data/lib/libv8/scons/engine/SCons/Variables/PackageVariable.py +2 -2
  175. data/lib/libv8/scons/engine/SCons/Variables/PathVariable.py +2 -2
  176. data/lib/libv8/scons/engine/SCons/Variables/__init__.py +2 -2
  177. data/lib/libv8/scons/engine/SCons/Warnings.py +2 -2
  178. data/lib/libv8/scons/engine/SCons/__init__.py +6 -6
  179. data/lib/libv8/scons/engine/SCons/compat/__init__.py +2 -2
  180. data/lib/libv8/scons/engine/SCons/compat/_scons_builtins.py +2 -2
  181. data/lib/libv8/scons/engine/SCons/compat/_scons_collections.py +2 -2
  182. data/lib/libv8/scons/engine/SCons/compat/_scons_dbm.py +2 -2
  183. data/lib/libv8/scons/engine/SCons/compat/_scons_hashlib.py +2 -2
  184. data/lib/libv8/scons/engine/SCons/compat/_scons_io.py +2 -2
  185. data/lib/libv8/scons/engine/SCons/cpp.py +2 -2
  186. data/lib/libv8/scons/engine/SCons/dblite.py +1 -4
  187. data/lib/libv8/scons/engine/SCons/exitfuncs.py +2 -2
  188. data/lib/libv8/scons/scons-time.1 +3 -3
  189. data/lib/libv8/scons/scons.1 +1164 -1170
  190. data/lib/libv8/scons/sconsign.1 +3 -3
  191. data/lib/libv8/scons/script/scons +22 -22
  192. data/lib/libv8/scons/script/scons-time +2 -2
  193. data/lib/libv8/scons/script/scons.bat +4 -7
  194. data/lib/libv8/scons/script/sconsign +20 -21
  195. data/lib/libv8/scons/setup.cfg +1 -0
  196. data/lib/libv8/scons/setup.py +40 -38
  197. data/lib/libv8/v8/.gitignore +1 -1
  198. data/lib/libv8/v8/AUTHORS +2 -0
  199. data/lib/libv8/v8/ChangeLog +387 -0
  200. data/lib/libv8/v8/Makefile +171 -0
  201. data/lib/libv8/v8/SConstruct +124 -51
  202. data/lib/libv8/v8/build/README.txt +31 -14
  203. data/lib/libv8/v8/build/all.gyp +11 -4
  204. data/lib/libv8/v8/build/armu.gypi +6 -2
  205. data/lib/libv8/v8/build/common.gypi +240 -94
  206. data/lib/libv8/v8/build/gyp_v8 +32 -4
  207. data/lib/libv8/v8/build/standalone.gypi +200 -0
  208. data/lib/libv8/v8/include/v8-debug.h +0 -0
  209. data/lib/libv8/v8/include/v8-profiler.h +8 -11
  210. data/lib/libv8/v8/include/v8.h +191 -108
  211. data/lib/libv8/v8/preparser/SConscript +2 -2
  212. data/lib/libv8/v8/preparser/preparser-process.cc +3 -3
  213. data/lib/libv8/v8/preparser/preparser.gyp +42 -0
  214. data/lib/libv8/v8/src/SConscript +33 -8
  215. data/lib/libv8/v8/src/accessors.cc +77 -43
  216. data/lib/libv8/v8/src/api.cc +393 -191
  217. data/lib/libv8/v8/src/api.h +4 -8
  218. data/lib/libv8/v8/src/apinatives.js +15 -3
  219. data/lib/libv8/v8/src/arguments.h +8 -0
  220. data/lib/libv8/v8/src/arm/assembler-arm.cc +120 -120
  221. data/lib/libv8/v8/src/arm/assembler-arm.h +92 -43
  222. data/lib/libv8/v8/src/arm/builtins-arm.cc +32 -39
  223. data/lib/libv8/v8/src/arm/code-stubs-arm.cc +572 -351
  224. data/lib/libv8/v8/src/arm/code-stubs-arm.h +8 -77
  225. data/lib/libv8/v8/src/arm/codegen-arm.h +0 -2
  226. data/lib/libv8/v8/src/arm/deoptimizer-arm.cc +50 -30
  227. data/lib/libv8/v8/src/arm/disasm-arm.cc +1 -1
  228. data/lib/libv8/v8/src/arm/frames-arm.h +9 -5
  229. data/lib/libv8/v8/src/arm/full-codegen-arm.cc +331 -432
  230. data/lib/libv8/v8/src/arm/ic-arm.cc +192 -124
  231. data/lib/libv8/v8/src/arm/lithium-arm.cc +216 -232
  232. data/lib/libv8/v8/src/arm/lithium-arm.h +106 -259
  233. data/lib/libv8/v8/src/arm/lithium-codegen-arm.cc +633 -642
  234. data/lib/libv8/v8/src/arm/lithium-codegen-arm.h +4 -4
  235. data/lib/libv8/v8/src/arm/lithium-gap-resolver-arm.cc +1 -3
  236. data/lib/libv8/v8/src/arm/macro-assembler-arm.cc +260 -185
  237. data/lib/libv8/v8/src/arm/macro-assembler-arm.h +45 -25
  238. data/lib/libv8/v8/src/arm/regexp-macro-assembler-arm.cc +25 -13
  239. data/lib/libv8/v8/src/arm/regexp-macro-assembler-arm.h +3 -0
  240. data/lib/libv8/v8/src/arm/stub-cache-arm.cc +413 -226
  241. data/lib/libv8/v8/src/array.js +38 -18
  242. data/lib/libv8/v8/src/assembler.cc +12 -5
  243. data/lib/libv8/v8/src/assembler.h +15 -9
  244. data/lib/libv8/v8/src/ast-inl.h +34 -25
  245. data/lib/libv8/v8/src/ast.cc +141 -72
  246. data/lib/libv8/v8/src/ast.h +255 -181
  247. data/lib/libv8/v8/src/bignum.cc +3 -4
  248. data/lib/libv8/v8/src/bootstrapper.cc +55 -11
  249. data/lib/libv8/v8/src/bootstrapper.h +3 -2
  250. data/lib/libv8/v8/src/builtins.cc +8 -2
  251. data/lib/libv8/v8/src/builtins.h +4 -0
  252. data/lib/libv8/v8/src/cached-powers.cc +8 -4
  253. data/lib/libv8/v8/src/checks.h +3 -3
  254. data/lib/libv8/v8/src/code-stubs.cc +173 -28
  255. data/lib/libv8/v8/src/code-stubs.h +104 -148
  256. data/lib/libv8/v8/src/codegen.cc +8 -8
  257. data/lib/libv8/v8/src/compilation-cache.cc +2 -47
  258. data/lib/libv8/v8/src/compilation-cache.h +0 -10
  259. data/lib/libv8/v8/src/compiler.cc +27 -16
  260. data/lib/libv8/v8/src/compiler.h +13 -18
  261. data/lib/libv8/v8/src/contexts.cc +107 -72
  262. data/lib/libv8/v8/src/contexts.h +70 -34
  263. data/lib/libv8/v8/src/conversions-inl.h +572 -14
  264. data/lib/libv8/v8/src/conversions.cc +9 -707
  265. data/lib/libv8/v8/src/conversions.h +23 -12
  266. data/lib/libv8/v8/src/cpu-profiler-inl.h +2 -19
  267. data/lib/libv8/v8/src/cpu-profiler.cc +4 -21
  268. data/lib/libv8/v8/src/cpu-profiler.h +8 -17
  269. data/lib/libv8/v8/src/d8-debug.cc +5 -3
  270. data/lib/libv8/v8/src/d8-debug.h +6 -7
  271. data/lib/libv8/v8/src/d8-posix.cc +1 -10
  272. data/lib/libv8/v8/src/d8.cc +721 -219
  273. data/lib/libv8/v8/src/d8.gyp +37 -12
  274. data/lib/libv8/v8/src/d8.h +141 -19
  275. data/lib/libv8/v8/src/d8.js +17 -8
  276. data/lib/libv8/v8/src/date.js +16 -5
  277. data/lib/libv8/v8/src/dateparser-inl.h +242 -39
  278. data/lib/libv8/v8/src/dateparser.cc +38 -4
  279. data/lib/libv8/v8/src/dateparser.h +170 -28
  280. data/lib/libv8/v8/src/debug-agent.cc +5 -3
  281. data/lib/libv8/v8/src/debug-agent.h +11 -7
  282. data/lib/libv8/v8/src/debug-debugger.js +65 -34
  283. data/lib/libv8/v8/src/debug.cc +30 -60
  284. data/lib/libv8/v8/src/debug.h +5 -3
  285. data/lib/libv8/v8/src/deoptimizer.cc +227 -10
  286. data/lib/libv8/v8/src/deoptimizer.h +133 -9
  287. data/lib/libv8/v8/src/disassembler.cc +22 -14
  288. data/lib/libv8/v8/src/diy-fp.cc +4 -3
  289. data/lib/libv8/v8/src/diy-fp.h +3 -3
  290. data/lib/libv8/v8/src/elements.cc +634 -0
  291. data/lib/libv8/v8/src/elements.h +95 -0
  292. data/lib/libv8/v8/src/execution.cc +5 -21
  293. data/lib/libv8/v8/src/extensions/experimental/break-iterator.cc +3 -1
  294. data/lib/libv8/v8/src/extensions/experimental/break-iterator.h +1 -1
  295. data/lib/libv8/v8/src/extensions/experimental/collator.cc +6 -2
  296. data/lib/libv8/v8/src/extensions/experimental/collator.h +1 -2
  297. data/lib/libv8/v8/src/extensions/experimental/datetime-format.cc +384 -0
  298. data/lib/libv8/v8/src/extensions/experimental/datetime-format.h +83 -0
  299. data/lib/libv8/v8/src/extensions/experimental/experimental.gyp +18 -7
  300. data/lib/libv8/v8/src/extensions/experimental/i18n-extension.cc +12 -16
  301. data/lib/libv8/v8/src/extensions/experimental/i18n-extension.h +1 -1
  302. data/lib/libv8/v8/src/extensions/experimental/i18n-js2c.py +126 -0
  303. data/lib/libv8/v8/src/extensions/experimental/i18n-locale.cc +3 -4
  304. data/lib/libv8/v8/src/extensions/experimental/i18n-locale.h +1 -1
  305. data/lib/libv8/v8/src/{shell.h → extensions/experimental/i18n-natives.h} +8 -20
  306. data/lib/libv8/v8/src/extensions/experimental/i18n-utils.cc +45 -1
  307. data/lib/libv8/v8/src/extensions/experimental/i18n-utils.h +21 -1
  308. data/lib/libv8/v8/src/extensions/experimental/i18n.js +211 -11
  309. data/lib/libv8/v8/src/extensions/experimental/language-matcher.cc +4 -3
  310. data/lib/libv8/v8/src/extensions/experimental/language-matcher.h +1 -1
  311. data/lib/libv8/v8/src/extensions/experimental/number-format.cc +374 -0
  312. data/lib/libv8/v8/src/extensions/experimental/number-format.h +71 -0
  313. data/lib/libv8/v8/src/factory.cc +89 -18
  314. data/lib/libv8/v8/src/factory.h +36 -8
  315. data/lib/libv8/v8/src/flag-definitions.h +11 -44
  316. data/lib/libv8/v8/src/frames-inl.h +8 -1
  317. data/lib/libv8/v8/src/frames.cc +39 -3
  318. data/lib/libv8/v8/src/frames.h +10 -3
  319. data/lib/libv8/v8/src/full-codegen.cc +311 -293
  320. data/lib/libv8/v8/src/full-codegen.h +183 -143
  321. data/lib/libv8/v8/src/func-name-inferrer.cc +29 -15
  322. data/lib/libv8/v8/src/func-name-inferrer.h +19 -9
  323. data/lib/libv8/v8/src/gdb-jit.cc +658 -55
  324. data/lib/libv8/v8/src/gdb-jit.h +6 -2
  325. data/lib/libv8/v8/src/global-handles.cc +368 -312
  326. data/lib/libv8/v8/src/global-handles.h +29 -36
  327. data/lib/libv8/v8/src/globals.h +3 -1
  328. data/lib/libv8/v8/src/handles.cc +43 -69
  329. data/lib/libv8/v8/src/handles.h +21 -16
  330. data/lib/libv8/v8/src/heap-inl.h +11 -13
  331. data/lib/libv8/v8/src/heap-profiler.cc +0 -999
  332. data/lib/libv8/v8/src/heap-profiler.h +0 -303
  333. data/lib/libv8/v8/src/heap.cc +366 -141
  334. data/lib/libv8/v8/src/heap.h +87 -26
  335. data/lib/libv8/v8/src/hydrogen-instructions.cc +192 -81
  336. data/lib/libv8/v8/src/hydrogen-instructions.h +711 -482
  337. data/lib/libv8/v8/src/hydrogen.cc +1146 -629
  338. data/lib/libv8/v8/src/hydrogen.h +100 -64
  339. data/lib/libv8/v8/src/ia32/assembler-ia32.cc +19 -0
  340. data/lib/libv8/v8/src/ia32/assembler-ia32.h +15 -2
  341. data/lib/libv8/v8/src/ia32/builtins-ia32.cc +34 -39
  342. data/lib/libv8/v8/src/ia32/code-stubs-ia32.cc +675 -377
  343. data/lib/libv8/v8/src/ia32/code-stubs-ia32.h +8 -69
  344. data/lib/libv8/v8/src/ia32/codegen-ia32.cc +1 -0
  345. data/lib/libv8/v8/src/ia32/codegen-ia32.h +0 -2
  346. data/lib/libv8/v8/src/ia32/cpu-ia32.cc +3 -2
  347. data/lib/libv8/v8/src/ia32/deoptimizer-ia32.cc +28 -3
  348. data/lib/libv8/v8/src/ia32/disasm-ia32.cc +21 -10
  349. data/lib/libv8/v8/src/ia32/frames-ia32.h +6 -5
  350. data/lib/libv8/v8/src/ia32/full-codegen-ia32.cc +459 -465
  351. data/lib/libv8/v8/src/ia32/ic-ia32.cc +196 -147
  352. data/lib/libv8/v8/src/ia32/lithium-codegen-ia32.cc +575 -650
  353. data/lib/libv8/v8/src/ia32/lithium-codegen-ia32.h +19 -21
  354. data/lib/libv8/v8/src/ia32/lithium-gap-resolver-ia32.cc +7 -2
  355. data/lib/libv8/v8/src/ia32/lithium-ia32.cc +261 -256
  356. data/lib/libv8/v8/src/ia32/lithium-ia32.h +234 -335
  357. data/lib/libv8/v8/src/ia32/macro-assembler-ia32.cc +224 -67
  358. data/lib/libv8/v8/src/ia32/macro-assembler-ia32.h +63 -19
  359. data/lib/libv8/v8/src/ia32/regexp-macro-assembler-ia32.cc +22 -8
  360. data/lib/libv8/v8/src/ia32/regexp-macro-assembler-ia32.h +3 -0
  361. data/lib/libv8/v8/src/ia32/stub-cache-ia32.cc +380 -239
  362. data/lib/libv8/v8/src/ic.cc +198 -234
  363. data/lib/libv8/v8/src/ic.h +32 -30
  364. data/lib/libv8/v8/src/interpreter-irregexp.cc +6 -4
  365. data/lib/libv8/v8/src/isolate.cc +112 -95
  366. data/lib/libv8/v8/src/isolate.h +55 -71
  367. data/lib/libv8/v8/src/json-parser.h +486 -48
  368. data/lib/libv8/v8/src/json.js +28 -23
  369. data/lib/libv8/v8/src/jsregexp.cc +163 -208
  370. data/lib/libv8/v8/src/jsregexp.h +0 -1
  371. data/lib/libv8/v8/src/lithium-allocator-inl.h +29 -27
  372. data/lib/libv8/v8/src/lithium-allocator.cc +22 -17
  373. data/lib/libv8/v8/src/lithium-allocator.h +8 -8
  374. data/lib/libv8/v8/src/lithium.cc +16 -11
  375. data/lib/libv8/v8/src/lithium.h +31 -34
  376. data/lib/libv8/v8/src/liveedit.cc +111 -15
  377. data/lib/libv8/v8/src/liveedit.h +3 -4
  378. data/lib/libv8/v8/src/liveobjectlist.cc +116 -80
  379. data/lib/libv8/v8/src/liveobjectlist.h +2 -2
  380. data/lib/libv8/v8/src/log-inl.h +0 -4
  381. data/lib/libv8/v8/src/log-utils.cc +25 -143
  382. data/lib/libv8/v8/src/log-utils.h +13 -92
  383. data/lib/libv8/v8/src/log.cc +26 -249
  384. data/lib/libv8/v8/src/log.h +6 -17
  385. data/lib/libv8/v8/src/macros.py +9 -6
  386. data/lib/libv8/v8/src/mark-compact.cc +276 -56
  387. data/lib/libv8/v8/src/mark-compact.h +20 -0
  388. data/lib/libv8/v8/src/messages.js +93 -39
  389. data/lib/libv8/v8/src/mips/assembler-mips-inl.h +9 -3
  390. data/lib/libv8/v8/src/mips/assembler-mips.cc +297 -189
  391. data/lib/libv8/v8/src/mips/assembler-mips.h +121 -54
  392. data/lib/libv8/v8/src/mips/builtins-mips.cc +23 -24
  393. data/lib/libv8/v8/src/mips/code-stubs-mips.cc +484 -263
  394. data/lib/libv8/v8/src/mips/code-stubs-mips.h +8 -83
  395. data/lib/libv8/v8/src/mips/codegen-mips.h +0 -2
  396. data/lib/libv8/v8/src/mips/constants-mips.h +37 -11
  397. data/lib/libv8/v8/src/mips/deoptimizer-mips.cc +6 -1
  398. data/lib/libv8/v8/src/mips/frames-mips.h +8 -7
  399. data/lib/libv8/v8/src/mips/full-codegen-mips.cc +258 -419
  400. data/lib/libv8/v8/src/mips/ic-mips.cc +181 -121
  401. data/lib/libv8/v8/src/mips/macro-assembler-mips.cc +640 -382
  402. data/lib/libv8/v8/src/mips/macro-assembler-mips.h +94 -89
  403. data/lib/libv8/v8/src/mips/regexp-macro-assembler-mips.cc +23 -10
  404. data/lib/libv8/v8/src/mips/regexp-macro-assembler-mips.h +6 -1
  405. data/lib/libv8/v8/src/mips/simulator-mips.cc +249 -49
  406. data/lib/libv8/v8/src/mips/simulator-mips.h +25 -1
  407. data/lib/libv8/v8/src/mips/stub-cache-mips.cc +373 -161
  408. data/lib/libv8/v8/src/mirror-debugger.js +55 -8
  409. data/lib/libv8/v8/src/misc-intrinsics.h +89 -0
  410. data/lib/libv8/v8/src/mksnapshot.cc +36 -4
  411. data/lib/libv8/v8/src/natives.h +5 -2
  412. data/lib/libv8/v8/src/objects-debug.cc +73 -6
  413. data/lib/libv8/v8/src/objects-inl.h +529 -164
  414. data/lib/libv8/v8/src/objects-printer.cc +67 -12
  415. data/lib/libv8/v8/src/objects-visiting.cc +13 -2
  416. data/lib/libv8/v8/src/objects-visiting.h +41 -1
  417. data/lib/libv8/v8/src/objects.cc +2200 -1177
  418. data/lib/libv8/v8/src/objects.h +912 -283
  419. data/lib/libv8/v8/src/parser.cc +566 -371
  420. data/lib/libv8/v8/src/parser.h +35 -33
  421. data/lib/libv8/v8/src/platform-cygwin.cc +10 -25
  422. data/lib/libv8/v8/src/platform-freebsd.cc +4 -29
  423. data/lib/libv8/v8/src/platform-linux.cc +60 -57
  424. data/lib/libv8/v8/src/platform-macos.cc +4 -27
  425. data/lib/libv8/v8/src/platform-nullos.cc +3 -16
  426. data/lib/libv8/v8/src/platform-openbsd.cc +247 -85
  427. data/lib/libv8/v8/src/platform-posix.cc +43 -1
  428. data/lib/libv8/v8/src/platform-solaris.cc +151 -112
  429. data/lib/libv8/v8/src/platform-tls.h +1 -1
  430. data/lib/libv8/v8/src/platform-win32.cc +65 -39
  431. data/lib/libv8/v8/src/platform.h +17 -14
  432. data/lib/libv8/v8/src/preparse-data-format.h +2 -2
  433. data/lib/libv8/v8/src/preparse-data.h +8 -2
  434. data/lib/libv8/v8/src/preparser-api.cc +2 -18
  435. data/lib/libv8/v8/src/preparser.cc +106 -65
  436. data/lib/libv8/v8/src/preparser.h +26 -5
  437. data/lib/libv8/v8/src/prettyprinter.cc +25 -43
  438. data/lib/libv8/v8/src/profile-generator-inl.h +0 -4
  439. data/lib/libv8/v8/src/profile-generator.cc +213 -34
  440. data/lib/libv8/v8/src/profile-generator.h +9 -9
  441. data/lib/libv8/v8/src/property.h +1 -0
  442. data/lib/libv8/v8/src/proxy.js +74 -4
  443. data/lib/libv8/v8/src/regexp-macro-assembler.cc +10 -6
  444. data/lib/libv8/v8/src/regexp.js +16 -11
  445. data/lib/libv8/v8/src/rewriter.cc +24 -133
  446. data/lib/libv8/v8/src/runtime-profiler.cc +27 -151
  447. data/lib/libv8/v8/src/runtime-profiler.h +5 -31
  448. data/lib/libv8/v8/src/runtime.cc +1450 -681
  449. data/lib/libv8/v8/src/runtime.h +47 -31
  450. data/lib/libv8/v8/src/runtime.js +2 -1
  451. data/lib/libv8/v8/src/scanner-base.cc +358 -220
  452. data/lib/libv8/v8/src/scanner-base.h +30 -138
  453. data/lib/libv8/v8/src/scanner.cc +0 -18
  454. data/lib/libv8/v8/src/scanner.h +0 -15
  455. data/lib/libv8/v8/src/scopeinfo.cc +3 -1
  456. data/lib/libv8/v8/src/scopeinfo.h +1 -6
  457. data/lib/libv8/v8/src/scopes.cc +243 -253
  458. data/lib/libv8/v8/src/scopes.h +58 -109
  459. data/lib/libv8/v8/src/serialize.cc +12 -54
  460. data/lib/libv8/v8/src/serialize.h +47 -0
  461. data/lib/libv8/v8/src/small-pointer-list.h +25 -0
  462. data/lib/libv8/v8/src/spaces-inl.h +4 -50
  463. data/lib/libv8/v8/src/spaces.cc +64 -131
  464. data/lib/libv8/v8/src/spaces.h +19 -70
  465. data/lib/libv8/v8/src/string-stream.cc +3 -1
  466. data/lib/libv8/v8/src/string.js +10 -6
  467. data/lib/libv8/v8/src/strtod.cc +7 -3
  468. data/lib/libv8/v8/src/stub-cache.cc +59 -129
  469. data/lib/libv8/v8/src/stub-cache.h +42 -54
  470. data/lib/libv8/v8/src/third_party/valgrind/valgrind.h +1447 -1339
  471. data/lib/libv8/v8/src/token.cc +4 -4
  472. data/lib/libv8/v8/src/token.h +6 -5
  473. data/lib/libv8/v8/src/type-info.cc +173 -129
  474. data/lib/libv8/v8/src/type-info.h +40 -22
  475. data/lib/libv8/v8/src/utils.cc +25 -304
  476. data/lib/libv8/v8/src/utils.h +118 -3
  477. data/lib/libv8/v8/src/v8-counters.h +3 -6
  478. data/lib/libv8/v8/src/v8.cc +34 -27
  479. data/lib/libv8/v8/src/v8.h +7 -7
  480. data/lib/libv8/v8/src/v8conversions.cc +129 -0
  481. data/lib/libv8/v8/src/v8conversions.h +60 -0
  482. data/lib/libv8/v8/src/v8globals.h +15 -6
  483. data/lib/libv8/v8/src/v8natives.js +300 -78
  484. data/lib/libv8/v8/src/v8threads.cc +14 -6
  485. data/lib/libv8/v8/src/v8threads.h +4 -1
  486. data/lib/libv8/v8/src/v8utils.cc +360 -0
  487. data/lib/libv8/v8/src/v8utils.h +17 -66
  488. data/lib/libv8/v8/src/variables.cc +7 -12
  489. data/lib/libv8/v8/src/variables.h +12 -10
  490. data/lib/libv8/v8/src/version.cc +2 -2
  491. data/lib/libv8/v8/src/vm-state-inl.h +0 -41
  492. data/lib/libv8/v8/src/vm-state.h +0 -11
  493. data/lib/libv8/v8/src/weakmap.js +103 -0
  494. data/lib/libv8/v8/src/x64/assembler-x64.h +6 -3
  495. data/lib/libv8/v8/src/x64/builtins-x64.cc +25 -22
  496. data/lib/libv8/v8/src/x64/code-stubs-x64.cc +523 -250
  497. data/lib/libv8/v8/src/x64/code-stubs-x64.h +8 -71
  498. data/lib/libv8/v8/src/x64/codegen-x64.cc +1 -0
  499. data/lib/libv8/v8/src/x64/codegen-x64.h +0 -2
  500. data/lib/libv8/v8/src/x64/cpu-x64.cc +2 -1
  501. data/lib/libv8/v8/src/x64/deoptimizer-x64.cc +40 -8
  502. data/lib/libv8/v8/src/x64/disasm-x64.cc +12 -10
  503. data/lib/libv8/v8/src/x64/frames-x64.h +7 -6
  504. data/lib/libv8/v8/src/x64/full-codegen-x64.cc +310 -415
  505. data/lib/libv8/v8/src/x64/ic-x64.cc +180 -117
  506. data/lib/libv8/v8/src/x64/lithium-codegen-x64.cc +411 -523
  507. data/lib/libv8/v8/src/x64/lithium-codegen-x64.h +11 -6
  508. data/lib/libv8/v8/src/x64/lithium-x64.cc +191 -216
  509. data/lib/libv8/v8/src/x64/lithium-x64.h +112 -263
  510. data/lib/libv8/v8/src/x64/macro-assembler-x64.cc +177 -61
  511. data/lib/libv8/v8/src/x64/macro-assembler-x64.h +23 -7
  512. data/lib/libv8/v8/src/x64/regexp-macro-assembler-x64.cc +21 -9
  513. data/lib/libv8/v8/src/x64/regexp-macro-assembler-x64.h +6 -0
  514. data/lib/libv8/v8/src/x64/stub-cache-x64.cc +273 -107
  515. data/lib/libv8/v8/src/zone.cc +31 -22
  516. data/lib/libv8/v8/src/zone.h +12 -6
  517. data/lib/libv8/v8/tools/codemap.js +8 -0
  518. data/lib/libv8/v8/tools/gcmole/Makefile +43 -0
  519. data/lib/libv8/v8/tools/gcmole/gcmole.lua +0 -2
  520. data/lib/libv8/v8/tools/gdb-v8-support.py +154 -0
  521. data/lib/libv8/v8/tools/grokdump.py +44 -35
  522. data/lib/libv8/v8/tools/gyp/v8.gyp +94 -248
  523. data/lib/libv8/v8/tools/js2c.py +83 -52
  524. data/lib/libv8/v8/tools/linux-tick-processor +4 -6
  525. data/lib/libv8/v8/tools/ll_prof.py +3 -3
  526. data/lib/libv8/v8/tools/oom_dump/README +3 -1
  527. data/lib/libv8/v8/tools/presubmit.py +11 -4
  528. data/lib/libv8/v8/tools/profile.js +46 -2
  529. data/lib/libv8/v8/tools/splaytree.js +11 -0
  530. data/lib/libv8/v8/tools/stats-viewer.py +15 -11
  531. data/lib/libv8/v8/tools/test-wrapper-gypbuild.py +227 -0
  532. data/lib/libv8/v8/tools/test.py +28 -8
  533. data/lib/libv8/v8/tools/tickprocessor.js +0 -16
  534. data/lib/libv8/version.rb +1 -1
  535. data/libv8.gemspec +2 -2
  536. metadata +31 -19
  537. data/lib/libv8/scons/engine/SCons/Tool/f03.py +0 -63
  538. data/lib/libv8/v8/src/json-parser.cc +0 -504
@@ -58,35 +58,14 @@ class TranscendentalCacheStub: public CodeStub {
58
58
  };
59
59
 
60
60
 
61
- class ToBooleanStub: public CodeStub {
62
- public:
63
- explicit ToBooleanStub(Register tos) : tos_(tos) { }
64
-
65
- void Generate(MacroAssembler* masm);
66
-
67
- private:
68
- Register tos_;
69
- Major MajorKey() { return ToBoolean; }
70
- int MinorKey() { return tos_.code(); }
71
- };
72
-
73
-
74
61
  class UnaryOpStub: public CodeStub {
75
62
  public:
76
- UnaryOpStub(Token::Value op, UnaryOverwriteMode mode)
63
+ UnaryOpStub(Token::Value op,
64
+ UnaryOverwriteMode mode,
65
+ UnaryOpIC::TypeInfo operand_type = UnaryOpIC::UNINITIALIZED)
77
66
  : op_(op),
78
67
  mode_(mode),
79
- operand_type_(UnaryOpIC::UNINITIALIZED),
80
- name_(NULL) {
81
- }
82
-
83
- UnaryOpStub(
84
- int key,
85
- UnaryOpIC::TypeInfo operand_type)
86
- : op_(OpBits::decode(key)),
87
- mode_(ModeBits::decode(key)),
88
- operand_type_(operand_type),
89
- name_(NULL) {
68
+ operand_type_(operand_type) {
90
69
  }
91
70
 
92
71
  private:
@@ -96,20 +75,7 @@ class UnaryOpStub: public CodeStub {
96
75
  // Operand type information determined at runtime.
97
76
  UnaryOpIC::TypeInfo operand_type_;
98
77
 
99
- char* name_;
100
-
101
- const char* GetName();
102
-
103
- #ifdef DEBUG
104
- void Print() {
105
- PrintF("UnaryOpStub %d (op %s), "
106
- "(mode %d, runtime_type_info %s)\n",
107
- MinorKey(),
108
- Token::String(op_),
109
- static_cast<int>(mode_),
110
- UnaryOpIC::GetName(operand_type_));
111
- }
112
- #endif
78
+ virtual void PrintName(StringStream* stream);
113
79
 
114
80
  class ModeBits: public BitField<UnaryOverwriteMode, 0, 1> {};
115
81
  class OpBits: public BitField<Token::Value, 1, 7> {};
@@ -163,8 +129,7 @@ class BinaryOpStub: public CodeStub {
163
129
  : op_(op),
164
130
  mode_(mode),
165
131
  operands_type_(BinaryOpIC::UNINITIALIZED),
166
- result_type_(BinaryOpIC::UNINITIALIZED),
167
- name_(NULL) {
132
+ result_type_(BinaryOpIC::UNINITIALIZED) {
168
133
  use_vfp3_ = CpuFeatures::IsSupported(VFP3);
169
134
  ASSERT(OpBits::is_valid(Token::NUM_TOKENS));
170
135
  }
@@ -177,8 +142,7 @@ class BinaryOpStub: public CodeStub {
177
142
  mode_(ModeBits::decode(key)),
178
143
  use_vfp3_(VFP3Bits::decode(key)),
179
144
  operands_type_(operands_type),
180
- result_type_(result_type),
181
- name_(NULL) { }
145
+ result_type_(result_type) { }
182
146
 
183
147
  private:
184
148
  enum SmiCodeGenerateHeapNumberResults {
@@ -194,20 +158,7 @@ class BinaryOpStub: public CodeStub {
194
158
  BinaryOpIC::TypeInfo operands_type_;
195
159
  BinaryOpIC::TypeInfo result_type_;
196
160
 
197
- char* name_;
198
-
199
- const char* GetName();
200
-
201
- #ifdef DEBUG
202
- void Print() {
203
- PrintF("BinaryOpStub %d (op %s), "
204
- "(mode %d, runtime_type_info %s)\n",
205
- MinorKey(),
206
- Token::String(op_),
207
- static_cast<int>(mode_),
208
- BinaryOpIC::GetName(operands_type_));
209
- }
210
- #endif
161
+ virtual void PrintName(StringStream* stream);
211
162
 
212
163
  // Minor key encoding in 16 bits RRRTTTVOOOOOOOMM.
213
164
  class ModeBits: public BitField<OverwriteMode, 0, 2> {};
@@ -391,12 +342,6 @@ class WriteInt32ToHeapNumberStub : public CodeStub {
391
342
  }
392
343
 
393
344
  void Generate(MacroAssembler* masm);
394
-
395
- const char* GetName() { return "WriteInt32ToHeapNumberStub"; }
396
-
397
- #ifdef DEBUG
398
- void Print() { PrintF("WriteInt32ToHeapNumberStub\n"); }
399
- #endif
400
345
  };
401
346
 
402
347
 
@@ -423,8 +368,6 @@ class NumberToStringStub: public CodeStub {
423
368
  int MinorKey() { return 0; }
424
369
 
425
370
  void Generate(MacroAssembler* masm);
426
-
427
- const char* GetName() { return "NumberToStringStub"; }
428
371
  };
429
372
 
430
373
 
@@ -442,8 +385,6 @@ class RegExpCEntryStub: public CodeStub {
442
385
  int MinorKey() { return 0; }
443
386
 
444
387
  bool NeedsImmovableCode() { return true; }
445
-
446
- const char* GetName() { return "RegExpCEntryStub"; }
447
388
  };
448
389
 
449
390
 
@@ -464,14 +405,11 @@ class DirectCEntryStub: public CodeStub {
464
405
  int MinorKey() { return 0; }
465
406
 
466
407
  bool NeedsImmovableCode() { return true; }
467
-
468
- const char* GetName() { return "DirectCEntryStub"; }
469
408
  };
470
409
 
471
410
 
472
411
  class FloatingPointHelper : public AllStatic {
473
412
  public:
474
-
475
413
  enum Destination {
476
414
  kVFPRegisters,
477
415
  kCoreRegisters
@@ -649,13 +587,6 @@ class StringDictionaryLookupStub: public CodeStub {
649
587
  StringDictionary::kHeaderSize +
650
588
  StringDictionary::kElementsStartIndex * kPointerSize;
651
589
 
652
-
653
- #ifdef DEBUG
654
- void Print() {
655
- PrintF("StringDictionaryLookupStub\n");
656
- }
657
- #endif
658
-
659
590
  Major MajorKey() { return StringDictionaryNegativeLookup; }
660
591
 
661
592
  int MinorKey() {
@@ -58,9 +58,7 @@ class CodeGenerator: public AstVisitor {
58
58
  // Print the code after compiling it.
59
59
  static void PrintCode(Handle<Code> code, CompilationInfo* info);
60
60
 
61
- #ifdef ENABLE_LOGGING_AND_PROFILING
62
61
  static bool ShouldGenerateLog(Expression* type);
63
- #endif
64
62
 
65
63
  static void SetFunctionInfo(Handle<JSFunction> fun,
66
64
  FunctionLiteral* lit,
@@ -35,7 +35,7 @@
35
35
  namespace v8 {
36
36
  namespace internal {
37
37
 
38
- int Deoptimizer::table_entry_size_ = 16;
38
+ const int Deoptimizer::table_entry_size_ = 16;
39
39
 
40
40
 
41
41
  int Deoptimizer::patch_size() {
@@ -65,8 +65,6 @@ void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
65
65
 
66
66
  // For each return after a safepoint insert an absolute call to the
67
67
  // corresponding deoptimization entry.
68
- ASSERT(patch_size() % Assembler::kInstrSize == 0);
69
- int call_size_in_words = patch_size() / Assembler::kInstrSize;
70
68
  unsigned last_pc_offset = 0;
71
69
  SafepointTable table(function->code());
72
70
  for (unsigned i = 0; i < table.length(); i++) {
@@ -87,13 +85,18 @@ void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
87
85
  #endif
88
86
  last_pc_offset = pc_offset;
89
87
  if (deoptimization_index != Safepoint::kNoDeoptimizationIndex) {
88
+ Address deoptimization_entry = Deoptimizer::GetDeoptimizationEntry(
89
+ deoptimization_index, Deoptimizer::LAZY);
90
90
  last_pc_offset += gap_code_size;
91
+ int call_size_in_bytes = MacroAssembler::CallSize(deoptimization_entry,
92
+ RelocInfo::NONE);
93
+ int call_size_in_words = call_size_in_bytes / Assembler::kInstrSize;
94
+ ASSERT(call_size_in_bytes % Assembler::kInstrSize == 0);
95
+ ASSERT(call_size_in_bytes <= patch_size());
91
96
  CodePatcher patcher(code->instruction_start() + last_pc_offset,
92
97
  call_size_in_words);
93
- Address deoptimization_entry = Deoptimizer::GetDeoptimizationEntry(
94
- deoptimization_index, Deoptimizer::LAZY);
95
98
  patcher.masm()->Call(deoptimization_entry, RelocInfo::NONE);
96
- last_pc_offset += patch_size();
99
+ last_pc_offset += call_size_in_bytes;
97
100
  }
98
101
  }
99
102
 
@@ -267,6 +270,9 @@ void Deoptimizer::DoComputeOsrOutputFrame() {
267
270
  output_ = new FrameDescription*[1];
268
271
  output_[0] = new(output_frame_size) FrameDescription(
269
272
  output_frame_size, function_);
273
+ #ifdef DEBUG
274
+ output_[0]->SetKind(Code::OPTIMIZED_FUNCTION);
275
+ #endif
270
276
 
271
277
  // Clear the incoming parameters in the optimized frame to avoid
272
278
  // confusing the garbage collector.
@@ -382,6 +388,9 @@ void Deoptimizer::DoComputeFrame(TranslationIterator* iterator,
382
388
  // Allocate and store the output frame description.
383
389
  FrameDescription* output_frame =
384
390
  new(output_frame_size) FrameDescription(output_frame_size, function);
391
+ #ifdef DEBUG
392
+ output_frame->SetKind(Code::FUNCTION);
393
+ #endif
385
394
 
386
395
  bool is_bottommost = (0 == frame_index);
387
396
  bool is_topmost = (output_count_ - 1 == frame_index);
@@ -516,7 +525,7 @@ void Deoptimizer::DoComputeFrame(TranslationIterator* iterator,
516
525
 
517
526
 
518
527
  // Set the continuation for the topmost frame.
519
- if (is_topmost) {
528
+ if (is_topmost && bailout_type_ != DEBUGGER) {
520
529
  Builtins* builtins = isolate_->builtins();
521
530
  Code* continuation = (bailout_type_ == EAGER)
522
531
  ? builtins->builtin(Builtins::kNotifyDeoptimized)
@@ -524,14 +533,32 @@ void Deoptimizer::DoComputeFrame(TranslationIterator* iterator,
524
533
  output_frame->SetContinuation(
525
534
  reinterpret_cast<uint32_t>(continuation->entry()));
526
535
  }
536
+ }
537
+
538
+
539
+ void Deoptimizer::FillInputFrame(Address tos, JavaScriptFrame* frame) {
540
+ // Set the register values. The values are not important as there are no
541
+ // callee saved registers in JavaScript frames, so all registers are
542
+ // spilled. Registers fp and sp are set to the correct values though.
543
+
544
+ for (int i = 0; i < Register::kNumRegisters; i++) {
545
+ input_->SetRegister(i, i * 4);
546
+ }
547
+ input_->SetRegister(sp.code(), reinterpret_cast<intptr_t>(frame->sp()));
548
+ input_->SetRegister(fp.code(), reinterpret_cast<intptr_t>(frame->fp()));
549
+ for (int i = 0; i < DoubleRegister::kNumAllocatableRegisters; i++) {
550
+ input_->SetDoubleRegister(i, 0.0);
551
+ }
527
552
 
528
- if (output_count_ - 1 == frame_index) iterator->Done();
553
+ // Fill the frame content from the actual data on the frame.
554
+ for (unsigned i = 0; i < input_->GetFrameSize(); i += kPointerSize) {
555
+ input_->SetFrameSlot(i, Memory::uint32_at(tos + i));
556
+ }
529
557
  }
530
558
 
531
559
 
532
560
  #define __ masm()->
533
561
 
534
-
535
562
  // This code tries to be close to ia32 code so that any changes can be
536
563
  // easily ported.
537
564
  void Deoptimizer::EntryGenerator::Generate() {
@@ -566,6 +593,8 @@ void Deoptimizer::EntryGenerator::Generate() {
566
593
  __ vstm(db_w, sp, first, last);
567
594
 
568
595
  // Push all 16 registers (needed to populate FrameDescription::registers_).
596
+ // TODO(1588) Note that using pc with stm is deprecated, so we should perhaps
597
+ // handle this a bit differently.
569
598
  __ stm(db_w, sp, restored_regs | sp.bit() | lr.bit() | pc.bit());
570
599
 
571
600
  const int kSavedRegistersAreaSize =
@@ -611,30 +640,21 @@ void Deoptimizer::EntryGenerator::Generate() {
611
640
 
612
641
  // Copy core registers into FrameDescription::registers_[kNumRegisters].
613
642
  ASSERT(Register::kNumRegisters == kNumberOfRegisters);
614
- ASSERT(kNumberOfRegisters % 2 == 0);
615
-
616
- Label arm_loop;
617
- __ add(r6, r1, Operand(FrameDescription::registers_offset()));
618
- __ mov(r5, Operand(sp));
619
- __ mov(r4, Operand(kNumberOfRegisters / 2));
620
-
621
- __ bind(&arm_loop);
622
- __ Ldrd(r2, r3, MemOperand(r5, kPointerSize * 2, PostIndex));
623
- __ sub(r4, r4, Operand(1), SetCC);
624
- __ Strd(r2, r3, MemOperand(r6, kPointerSize * 2, PostIndex));
625
- __ b(gt, &arm_loop);
643
+ for (int i = 0; i < kNumberOfRegisters; i++) {
644
+ int offset = (i * kPointerSize) + FrameDescription::registers_offset();
645
+ __ ldr(r2, MemOperand(sp, i * kPointerSize));
646
+ __ str(r2, MemOperand(r1, offset));
647
+ }
626
648
 
627
649
  // Copy VFP registers to
628
650
  // double_registers_[DoubleRegister::kNumAllocatableRegisters]
629
- Label vfp_loop;
630
- __ add(r6, r1, Operand(FrameDescription::double_registers_offset()));
631
- __ mov(r4, Operand(DwVfpRegister::kNumAllocatableRegisters));
632
-
633
- __ bind(&vfp_loop);
634
- __ Ldrd(r2, r3, MemOperand(r5, kDoubleSize, PostIndex));
635
- __ sub(r4, r4, Operand(1), SetCC);
636
- __ Strd(r2, r3, MemOperand(r6, kDoubleSize, PostIndex));
637
- __ b(gt, &vfp_loop);
651
+ int double_regs_offset = FrameDescription::double_registers_offset();
652
+ for (int i = 0; i < DwVfpRegister::kNumAllocatableRegisters; ++i) {
653
+ int dst_offset = i * kDoubleSize + double_regs_offset;
654
+ int src_offset = i * kDoubleSize + kNumberOfRegisters * kPointerSize;
655
+ __ vldr(d0, sp, src_offset);
656
+ __ vstr(d0, r1, dst_offset);
657
+ }
638
658
 
639
659
  // Remove the bailout id, eventually return address, and the saved registers
640
660
  // from the stack.
@@ -200,7 +200,7 @@ void Decoder::PrintDRegister(int reg) {
200
200
 
201
201
  // These shift names are defined in a way to match the native disassembler
202
202
  // formatting. See for example the command "objdump -d <binary file>".
203
- static const char* shift_names[kNumberOfShifts] = {
203
+ static const char* const shift_names[kNumberOfShifts] = {
204
204
  "lsl", "lsr", "asr", "ror"
205
205
  };
206
206
 
@@ -1,4 +1,4 @@
1
- // Copyright 2006-2008 the V8 project authors. All rights reserved.
1
+ // Copyright 2011 the V8 project authors. All rights reserved.
2
2
  // Redistribution and use in source and binary forms, with or without
3
3
  // modification, are permitted provided that the following conditions are
4
4
  // met:
@@ -72,6 +72,9 @@ static const RegList kCalleeSaved =
72
72
 
73
73
  static const int kNumCalleeSaved = 7 + kR9Available;
74
74
 
75
+ // Double registers d8 to d15 are callee-saved.
76
+ static const int kNumDoubleCalleeSaved = 8;
77
+
75
78
 
76
79
  // Number of registers for which space is reserved in safepoints. Must be a
77
80
  // multiple of 8.
@@ -90,10 +93,11 @@ static const int kNumSafepointSavedRegisters =
90
93
 
91
94
  class StackHandlerConstants : public AllStatic {
92
95
  public:
93
- static const int kNextOffset = 0 * kPointerSize;
94
- static const int kStateOffset = 1 * kPointerSize;
95
- static const int kFPOffset = 2 * kPointerSize;
96
- static const int kPCOffset = 3 * kPointerSize;
96
+ static const int kNextOffset = 0 * kPointerSize;
97
+ static const int kStateOffset = 1 * kPointerSize;
98
+ static const int kContextOffset = 2 * kPointerSize;
99
+ static const int kFPOffset = 3 * kPointerSize;
100
+ static const int kPCOffset = 4 * kPointerSize;
97
101
 
98
102
  static const int kSize = kPCOffset + kPointerSize;
99
103
  };
@@ -47,7 +47,6 @@ namespace internal {
47
47
 
48
48
 
49
49
  static unsigned GetPropertyId(Property* property) {
50
- if (property->is_synthetic()) return AstNode::kNoNumber;
51
50
  return property->id();
52
51
  }
53
52
 
@@ -92,17 +91,19 @@ class JumpPatchSite BASE_EMBEDDED {
92
91
  }
93
92
 
94
93
  void EmitPatchInfo() {
95
- int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
96
- Register reg;
97
- reg.set_code(delta_to_patch_site / kOff12Mask);
98
- __ cmp_raw_immediate(reg, delta_to_patch_site % kOff12Mask);
94
+ if (patch_site_.is_bound()) {
95
+ int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
96
+ Register reg;
97
+ reg.set_code(delta_to_patch_site / kOff12Mask);
98
+ __ cmp_raw_immediate(reg, delta_to_patch_site % kOff12Mask);
99
99
  #ifdef DEBUG
100
- info_emitted_ = true;
100
+ info_emitted_ = true;
101
101
  #endif
102
+ } else {
103
+ __ nop(); // Signals no inlined code.
104
+ }
102
105
  }
103
106
 
104
- bool is_bound() const { return patch_site_.is_bound(); }
105
-
106
107
  private:
107
108
  MacroAssembler* masm_;
108
109
  Label patch_site_;
@@ -129,6 +130,7 @@ class JumpPatchSite BASE_EMBEDDED {
129
130
  void FullCodeGenerator::Generate(CompilationInfo* info) {
130
131
  ASSERT(info_ == NULL);
131
132
  info_ = info;
133
+ scope_ = info->scope();
132
134
  SetFunctionPosition(function());
133
135
  Comment cmnt(masm_, "[ function compiled by full code generator");
134
136
 
@@ -139,21 +141,21 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
139
141
  }
140
142
  #endif
141
143
 
142
- // Strict mode functions need to replace the receiver with undefined
143
- // when called as functions (without an explicit receiver
144
- // object). r5 is zero for method calls and non-zero for function
145
- // calls.
146
- if (info->is_strict_mode()) {
144
+ // Strict mode functions and builtins need to replace the receiver
145
+ // with undefined when called as functions (without an explicit
146
+ // receiver object). r5 is zero for method calls and non-zero for
147
+ // function calls.
148
+ if (info->is_strict_mode() || info->is_native()) {
147
149
  Label ok;
148
150
  __ cmp(r5, Operand(0));
149
151
  __ b(eq, &ok);
150
- int receiver_offset = scope()->num_parameters() * kPointerSize;
152
+ int receiver_offset = info->scope()->num_parameters() * kPointerSize;
151
153
  __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
152
154
  __ str(r2, MemOperand(sp, receiver_offset));
153
155
  __ bind(&ok);
154
156
  }
155
157
 
156
- int locals_count = scope()->num_stack_slots();
158
+ int locals_count = info->scope()->num_stack_slots();
157
159
 
158
160
  __ Push(lr, fp, cp, r1);
159
161
  if (locals_count > 0) {
@@ -173,7 +175,7 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
173
175
  bool function_in_register = true;
174
176
 
175
177
  // Possibly allocate a local context.
176
- int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
178
+ int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
177
179
  if (heap_slots > 0) {
178
180
  Comment cmnt(masm_, "[ Allocate local context");
179
181
  // Argument to NewContext is the function, which is in r1.
@@ -182,14 +184,14 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
182
184
  FastNewContextStub stub(heap_slots);
183
185
  __ CallStub(&stub);
184
186
  } else {
185
- __ CallRuntime(Runtime::kNewContext, 1);
187
+ __ CallRuntime(Runtime::kNewFunctionContext, 1);
186
188
  }
187
189
  function_in_register = false;
188
190
  // Context is returned in both r0 and cp. It replaces the context
189
191
  // passed to us. It's saved in the stack and kept live in cp.
190
192
  __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
191
193
  // Copy any necessary parameters into the context.
192
- int num_parameters = scope()->num_parameters();
194
+ int num_parameters = info->scope()->num_parameters();
193
195
  for (int i = 0; i < num_parameters; i++) {
194
196
  Slot* slot = scope()->parameter(i)->AsSlot();
195
197
  if (slot != NULL && slot->type() == Slot::CONTEXT) {
@@ -220,27 +222,28 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
220
222
  __ mov(r3, r1);
221
223
  }
222
224
  // Receiver is just before the parameters on the caller's stack.
223
- int offset = scope()->num_parameters() * kPointerSize;
225
+ int num_parameters = info->scope()->num_parameters();
226
+ int offset = num_parameters * kPointerSize;
224
227
  __ add(r2, fp,
225
228
  Operand(StandardFrameConstants::kCallerSPOffset + offset));
226
- __ mov(r1, Operand(Smi::FromInt(scope()->num_parameters())));
229
+ __ mov(r1, Operand(Smi::FromInt(num_parameters)));
227
230
  __ Push(r3, r2, r1);
228
231
 
229
232
  // Arguments to ArgumentsAccessStub:
230
233
  // function, receiver address, parameter count.
231
234
  // The stub will rewrite receiever and parameter count if the previous
232
235
  // stack frame was an arguments adapter frame.
233
- ArgumentsAccessStub stub(
234
- is_strict_mode() ? ArgumentsAccessStub::NEW_STRICT
235
- : ArgumentsAccessStub::NEW_NON_STRICT);
236
+ ArgumentsAccessStub::Type type;
237
+ if (is_strict_mode()) {
238
+ type = ArgumentsAccessStub::NEW_STRICT;
239
+ } else if (function()->has_duplicate_parameters()) {
240
+ type = ArgumentsAccessStub::NEW_NON_STRICT_SLOW;
241
+ } else {
242
+ type = ArgumentsAccessStub::NEW_NON_STRICT_FAST;
243
+ }
244
+ ArgumentsAccessStub stub(type);
236
245
  __ CallStub(&stub);
237
246
 
238
- Variable* arguments_shadow = scope()->arguments_shadow();
239
- if (arguments_shadow != NULL) {
240
- // Duplicate the value; move-to-slot operation might clobber registers.
241
- __ mov(r3, r0);
242
- Move(arguments_shadow->AsSlot(), r3, r1, r2);
243
- }
244
247
  Move(arguments->AsSlot(), r0, r1, r2);
245
248
  }
246
249
 
@@ -345,7 +348,7 @@ void FullCodeGenerator::EmitReturnSequence() {
345
348
  { Assembler::BlockConstPoolScope block_const_pool(masm_);
346
349
  // Here we use masm_-> instead of the __ macro to avoid the code coverage
347
350
  // tool from instrumenting as we rely on the code size here.
348
- int32_t sp_delta = (scope()->num_parameters() + 1) * kPointerSize;
351
+ int32_t sp_delta = (info_->scope()->num_parameters() + 1) * kPointerSize;
349
352
  CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
350
353
  __ RecordJSReturn();
351
354
  masm_->mov(sp, fp);
@@ -383,7 +386,7 @@ void FullCodeGenerator::TestContext::Plug(Slot* slot) const {
383
386
  // For simplicity we always test the accumulator register.
384
387
  codegen()->Move(result_register(), slot);
385
388
  codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
386
- codegen()->DoTest(true_label_, false_label_, fall_through_);
389
+ codegen()->DoTest(this);
387
390
  }
388
391
 
389
392
 
@@ -417,7 +420,7 @@ void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
417
420
  if (true_label_ != fall_through_) __ b(true_label_);
418
421
  } else {
419
422
  __ LoadRoot(result_register(), index);
420
- codegen()->DoTest(true_label_, false_label_, fall_through_);
423
+ codegen()->DoTest(this);
421
424
  }
422
425
  }
423
426
 
@@ -464,7 +467,7 @@ void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
464
467
  } else {
465
468
  // For simplicity we always test the accumulator register.
466
469
  __ mov(result_register(), Operand(lit));
467
- codegen()->DoTest(true_label_, false_label_, fall_through_);
470
+ codegen()->DoTest(this);
468
471
  }
469
472
  }
470
473
 
@@ -500,7 +503,7 @@ void FullCodeGenerator::TestContext::DropAndPlug(int count,
500
503
  __ Drop(count);
501
504
  __ Move(result_register(), reg);
502
505
  codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
503
- codegen()->DoTest(true_label_, false_label_, fall_through_);
506
+ codegen()->DoTest(this);
504
507
  }
505
508
 
506
509
 
@@ -578,7 +581,8 @@ void FullCodeGenerator::TestContext::Plug(bool flag) const {
578
581
  }
579
582
 
580
583
 
581
- void FullCodeGenerator::DoTest(Label* if_true,
584
+ void FullCodeGenerator::DoTest(Expression* condition,
585
+ Label* if_true,
582
586
  Label* if_false,
583
587
  Label* fall_through) {
584
588
  if (CpuFeatures::IsSupported(VFP3)) {
@@ -689,102 +693,73 @@ void FullCodeGenerator::EmitDeclaration(Variable* variable,
689
693
  Comment cmnt(masm_, "[ Declaration");
690
694
  ASSERT(variable != NULL); // Must have been resolved.
691
695
  Slot* slot = variable->AsSlot();
692
- Property* prop = variable->AsProperty();
693
-
694
- if (slot != NULL) {
695
- switch (slot->type()) {
696
- case Slot::PARAMETER:
697
- case Slot::LOCAL:
698
- if (mode == Variable::CONST) {
699
- __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
700
- __ str(ip, MemOperand(fp, SlotOffset(slot)));
701
- } else if (function != NULL) {
702
- VisitForAccumulatorValue(function);
703
- __ str(result_register(), MemOperand(fp, SlotOffset(slot)));
704
- }
705
- break;
706
-
707
- case Slot::CONTEXT:
708
- // We bypass the general EmitSlotSearch because we know more about
709
- // this specific context.
710
-
711
- // The variable in the decl always resides in the current function
712
- // context.
713
- ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
714
- if (FLAG_debug_code) {
715
- // Check that we're not inside a 'with'.
716
- __ ldr(r1, ContextOperand(cp, Context::FCONTEXT_INDEX));
717
- __ cmp(r1, cp);
718
- __ Check(eq, "Unexpected declaration in current context.");
719
- }
720
- if (mode == Variable::CONST) {
721
- __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
722
- __ str(ip, ContextOperand(cp, slot->index()));
723
- // No write barrier since the_hole_value is in old space.
724
- } else if (function != NULL) {
725
- VisitForAccumulatorValue(function);
726
- __ str(result_register(), ContextOperand(cp, slot->index()));
727
- int offset = Context::SlotOffset(slot->index());
728
- // We know that we have written a function, which is not a smi.
729
- __ mov(r1, Operand(cp));
730
- __ RecordWrite(r1, Operand(offset), r2, result_register());
731
- }
732
- break;
733
-
734
- case Slot::LOOKUP: {
735
- __ mov(r2, Operand(variable->name()));
736
- // Declaration nodes are always introduced in one of two modes.
737
- ASSERT(mode == Variable::VAR ||
738
- mode == Variable::CONST);
739
- PropertyAttributes attr =
740
- (mode == Variable::VAR) ? NONE : READ_ONLY;
741
- __ mov(r1, Operand(Smi::FromInt(attr)));
742
- // Push initial value, if any.
743
- // Note: For variables we must not push an initial value (such as
744
- // 'undefined') because we may have a (legal) redeclaration and we
745
- // must not destroy the current value.
746
- if (mode == Variable::CONST) {
747
- __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
748
- __ Push(cp, r2, r1, r0);
749
- } else if (function != NULL) {
750
- __ Push(cp, r2, r1);
751
- // Push initial value for function declaration.
752
- VisitForStackValue(function);
753
- } else {
754
- __ mov(r0, Operand(Smi::FromInt(0))); // No initial value!
755
- __ Push(cp, r2, r1, r0);
756
- }
757
- __ CallRuntime(Runtime::kDeclareContextSlot, 4);
758
- break;
696
+ ASSERT(slot != NULL);
697
+ switch (slot->type()) {
698
+ case Slot::PARAMETER:
699
+ case Slot::LOCAL:
700
+ if (function != NULL) {
701
+ VisitForAccumulatorValue(function);
702
+ __ str(result_register(), MemOperand(fp, SlotOffset(slot)));
703
+ } else if (mode == Variable::CONST || mode == Variable::LET) {
704
+ __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
705
+ __ str(ip, MemOperand(fp, SlotOffset(slot)));
759
706
  }
760
- }
707
+ break;
761
708
 
762
- } else if (prop != NULL) {
763
- // A const declaration aliasing a parameter is an illegal redeclaration.
764
- ASSERT(mode != Variable::CONST);
765
- if (function != NULL) {
766
- // We are declaring a function that rewrites to a property.
767
- // Use (keyed) IC to set the initial value. We cannot visit the
768
- // rewrite because it's shared and we risk recording duplicate AST
769
- // IDs for bailouts from optimized code.
770
- ASSERT(prop->obj()->AsVariableProxy() != NULL);
771
- { AccumulatorValueContext for_object(this);
772
- EmitVariableLoad(prop->obj()->AsVariableProxy()->var());
709
+ case Slot::CONTEXT:
710
+ // We bypass the general EmitSlotSearch because we know more about
711
+ // this specific context.
712
+
713
+ // The variable in the decl always resides in the current function
714
+ // context.
715
+ ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
716
+ if (FLAG_debug_code) {
717
+ // Check that we're not inside a with or catch context.
718
+ __ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset));
719
+ __ CompareRoot(r1, Heap::kWithContextMapRootIndex);
720
+ __ Check(ne, "Declaration in with context.");
721
+ __ CompareRoot(r1, Heap::kCatchContextMapRootIndex);
722
+ __ Check(ne, "Declaration in catch context.");
773
723
  }
724
+ if (function != NULL) {
725
+ VisitForAccumulatorValue(function);
726
+ __ str(result_register(), ContextOperand(cp, slot->index()));
727
+ int offset = Context::SlotOffset(slot->index());
728
+ // We know that we have written a function, which is not a smi.
729
+ __ mov(r1, Operand(cp));
730
+ __ RecordWrite(r1, Operand(offset), r2, result_register());
731
+ } else if (mode == Variable::CONST || mode == Variable::LET) {
732
+ __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
733
+ __ str(ip, ContextOperand(cp, slot->index()));
734
+ // No write barrier since the_hole_value is in old space.
735
+ }
736
+ break;
774
737
 
775
- __ push(r0);
776
- VisitForAccumulatorValue(function);
777
- __ pop(r2);
778
-
779
- ASSERT(prop->key()->AsLiteral() != NULL &&
780
- prop->key()->AsLiteral()->handle()->IsSmi());
781
- __ mov(r1, Operand(prop->key()->AsLiteral()->handle()));
782
-
783
- Handle<Code> ic = is_strict_mode()
784
- ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
785
- : isolate()->builtins()->KeyedStoreIC_Initialize();
786
- EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber);
787
- // Value in r0 is ignored (declarations are statements).
738
+ case Slot::LOOKUP: {
739
+ __ mov(r2, Operand(variable->name()));
740
+ // Declaration nodes are always introduced in one of two modes.
741
+ ASSERT(mode == Variable::VAR ||
742
+ mode == Variable::CONST ||
743
+ mode == Variable::LET);
744
+ PropertyAttributes attr = (mode == Variable::CONST) ? READ_ONLY : NONE;
745
+ __ mov(r1, Operand(Smi::FromInt(attr)));
746
+ // Push initial value, if any.
747
+ // Note: For variables we must not push an initial value (such as
748
+ // 'undefined') because we may have a (legal) redeclaration and we
749
+ // must not destroy the current value.
750
+ if (function != NULL) {
751
+ __ Push(cp, r2, r1);
752
+ // Push initial value for function declaration.
753
+ VisitForStackValue(function);
754
+ } else if (mode == Variable::CONST || mode == Variable::LET) {
755
+ __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
756
+ __ Push(cp, r2, r1, r0);
757
+ } else {
758
+ __ mov(r0, Operand(Smi::FromInt(0))); // No initial value!
759
+ __ Push(cp, r2, r1, r0);
760
+ }
761
+ __ CallRuntime(Runtime::kDeclareContextSlot, 4);
762
+ break;
788
763
  }
789
764
  }
790
765
  }
@@ -857,7 +832,8 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
857
832
  // Record position before stub call for type feedback.
858
833
  SetSourcePosition(clause->position());
859
834
  Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
860
- EmitCallIC(ic, &patch_site, clause->CompareId());
835
+ __ Call(ic, RelocInfo::CODE_TARGET, clause->CompareId());
836
+ patch_site.EmitPatchInfo();
861
837
 
862
838
  __ cmp(r0, Operand(0));
863
839
  __ b(ne, &next_test);
@@ -870,7 +846,7 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
870
846
  __ bind(&next_test);
871
847
  __ Drop(1); // Switch value is no longer needed.
872
848
  if (default_clause == NULL) {
873
- __ b(nested_statement.break_target());
849
+ __ b(nested_statement.break_label());
874
850
  } else {
875
851
  __ b(default_clause->body_target());
876
852
  }
@@ -884,7 +860,7 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
884
860
  VisitStatements(clause->statements());
885
861
  }
886
862
 
887
- __ bind(nested_statement.break_target());
863
+ __ bind(nested_statement.break_label());
888
864
  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
889
865
  }
890
866
 
@@ -912,8 +888,8 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
912
888
  // Convert the object to a JS object.
913
889
  Label convert, done_convert;
914
890
  __ JumpIfSmi(r0, &convert);
915
- __ CompareObjectType(r0, r1, r1, FIRST_JS_OBJECT_TYPE);
916
- __ b(hs, &done_convert);
891
+ __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
892
+ __ b(ge, &done_convert);
917
893
  __ bind(&convert);
918
894
  __ push(r0);
919
895
  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
@@ -1015,7 +991,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1015
991
  // Load the current count to r0, load the length to r1.
1016
992
  __ Ldrd(r0, r1, MemOperand(sp, 0 * kPointerSize));
1017
993
  __ cmp(r0, r1); // Compare to the array length.
1018
- __ b(hs, loop_statement.break_target());
994
+ __ b(hs, loop_statement.break_label());
1019
995
 
1020
996
  // Get the current entry of the array into register r3.
1021
997
  __ ldr(r2, MemOperand(sp, 2 * kPointerSize));
@@ -1041,7 +1017,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1041
1017
  __ push(r3); // Current entry.
1042
1018
  __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1043
1019
  __ mov(r3, Operand(r0), SetCC);
1044
- __ b(eq, loop_statement.continue_target());
1020
+ __ b(eq, loop_statement.continue_label());
1045
1021
 
1046
1022
  // Update the 'each' property or variable from the possibly filtered
1047
1023
  // entry in register r3.
@@ -1057,7 +1033,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1057
1033
 
1058
1034
  // Generate code for the going to the next element by incrementing
1059
1035
  // the index (smi) stored on top of the stack.
1060
- __ bind(loop_statement.continue_target());
1036
+ __ bind(loop_statement.continue_label());
1061
1037
  __ pop(r0);
1062
1038
  __ add(r0, r0, Operand(Smi::FromInt(1)));
1063
1039
  __ push(r0);
@@ -1066,7 +1042,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1066
1042
  __ b(&loop);
1067
1043
 
1068
1044
  // Remove the pointers stored on the stack.
1069
- __ bind(loop_statement.break_target());
1045
+ __ bind(loop_statement.break_label());
1070
1046
  __ Drop(5);
1071
1047
 
1072
1048
  // Exit and decrement the loop depth.
@@ -1105,7 +1081,7 @@ void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1105
1081
 
1106
1082
  void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1107
1083
  Comment cmnt(masm_, "[ VariableProxy");
1108
- EmitVariableLoad(expr->var());
1084
+ EmitVariableLoad(expr);
1109
1085
  }
1110
1086
 
1111
1087
 
@@ -1127,8 +1103,7 @@ void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions(
1127
1103
  __ b(ne, slow);
1128
1104
  }
1129
1105
  // Load next context in chain.
1130
- __ ldr(next, ContextOperand(current, Context::CLOSURE_INDEX));
1131
- __ ldr(next, FieldMemOperand(next, JSFunction::kContextOffset));
1106
+ __ ldr(next, ContextOperand(current, Context::PREVIOUS_INDEX));
1132
1107
  // Walk the rest of the chain without clobbering cp.
1133
1108
  current = next;
1134
1109
  }
@@ -1154,8 +1129,7 @@ void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions(
1154
1129
  __ tst(temp, temp);
1155
1130
  __ b(ne, slow);
1156
1131
  // Load next context in chain.
1157
- __ ldr(next, ContextOperand(next, Context::CLOSURE_INDEX));
1158
- __ ldr(next, FieldMemOperand(next, JSFunction::kContextOffset));
1132
+ __ ldr(next, ContextOperand(next, Context::PREVIOUS_INDEX));
1159
1133
  __ b(&loop);
1160
1134
  __ bind(&fast);
1161
1135
  }
@@ -1166,7 +1140,7 @@ void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions(
1166
1140
  ? RelocInfo::CODE_TARGET
1167
1141
  : RelocInfo::CODE_TARGET_CONTEXT;
1168
1142
  Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1169
- EmitCallIC(ic, mode, AstNode::kNoNumber);
1143
+ __ Call(ic, mode);
1170
1144
  }
1171
1145
 
1172
1146
 
@@ -1186,8 +1160,7 @@ MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(
1186
1160
  __ tst(temp, temp);
1187
1161
  __ b(ne, slow);
1188
1162
  }
1189
- __ ldr(next, ContextOperand(context, Context::CLOSURE_INDEX));
1190
- __ ldr(next, FieldMemOperand(next, JSFunction::kContextOffset));
1163
+ __ ldr(next, ContextOperand(context, Context::PREVIOUS_INDEX));
1191
1164
  // Walk the rest of the chain without clobbering cp.
1192
1165
  context = next;
1193
1166
  }
@@ -1248,7 +1221,7 @@ void FullCodeGenerator::EmitDynamicLoadFromSlotFastCase(
1248
1221
  __ mov(r0, Operand(key_literal->handle()));
1249
1222
  Handle<Code> ic =
1250
1223
  isolate()->builtins()->KeyedLoadIC_Initialize();
1251
- EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(property));
1224
+ __ Call(ic, RelocInfo::CODE_TARGET, GetPropertyId(property));
1252
1225
  __ jmp(done);
1253
1226
  }
1254
1227
  }
@@ -1257,24 +1230,27 @@ void FullCodeGenerator::EmitDynamicLoadFromSlotFastCase(
1257
1230
  }
1258
1231
 
1259
1232
 
1260
- void FullCodeGenerator::EmitVariableLoad(Variable* var) {
1261
- // Four cases: non-this global variables, lookup slots, all other
1262
- // types of slots, and parameters that rewrite to explicit property
1263
- // accesses on the arguments object.
1233
+ void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1234
+ // Record position before possible IC call.
1235
+ SetSourcePosition(proxy->position());
1236
+ Variable* var = proxy->var();
1237
+
1238
+ // Three cases: non-this global variables, lookup slots, and all other
1239
+ // types of slots.
1264
1240
  Slot* slot = var->AsSlot();
1265
- Property* property = var->AsProperty();
1241
+ ASSERT((var->is_global() && !var->is_this()) == (slot == NULL));
1266
1242
 
1267
- if (var->is_global() && !var->is_this()) {
1243
+ if (slot == NULL) {
1268
1244
  Comment cmnt(masm_, "Global variable");
1269
1245
  // Use inline caching. Variable name is passed in r2 and the global
1270
1246
  // object (receiver) in r0.
1271
1247
  __ ldr(r0, GlobalObjectOperand());
1272
1248
  __ mov(r2, Operand(var->name()));
1273
1249
  Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1274
- EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT, AstNode::kNoNumber);
1250
+ __ Call(ic, RelocInfo::CODE_TARGET_CONTEXT);
1275
1251
  context()->Plug(r0);
1276
1252
 
1277
- } else if (slot != NULL && slot->type() == Slot::LOOKUP) {
1253
+ } else if (slot->type() == Slot::LOOKUP) {
1278
1254
  Label done, slow;
1279
1255
 
1280
1256
  // Generate code for loading from variables potentially shadowed
@@ -1290,7 +1266,7 @@ void FullCodeGenerator::EmitVariableLoad(Variable* var) {
1290
1266
 
1291
1267
  context()->Plug(r0);
1292
1268
 
1293
- } else if (slot != NULL) {
1269
+ } else {
1294
1270
  Comment cmnt(masm_, (slot->type() == Slot::CONTEXT)
1295
1271
  ? "Context slot"
1296
1272
  : "Stack slot");
@@ -1303,35 +1279,23 @@ void FullCodeGenerator::EmitVariableLoad(Variable* var) {
1303
1279
  __ cmp(r0, ip);
1304
1280
  __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1305
1281
  context()->Plug(r0);
1282
+ } else if (var->mode() == Variable::LET) {
1283
+ // Let bindings may be the hole value if they have not been initialized.
1284
+ // Throw a type error in this case.
1285
+ Label done;
1286
+ MemOperand slot_operand = EmitSlotSearch(slot, r0);
1287
+ __ ldr(r0, slot_operand);
1288
+ __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
1289
+ __ cmp(r0, ip);
1290
+ __ b(ne, &done);
1291
+ __ mov(r0, Operand(var->name()));
1292
+ __ push(r0);
1293
+ __ CallRuntime(Runtime::kThrowReferenceError, 1);
1294
+ __ bind(&done);
1295
+ context()->Plug(r0);
1306
1296
  } else {
1307
1297
  context()->Plug(slot);
1308
1298
  }
1309
- } else {
1310
- Comment cmnt(masm_, "Rewritten parameter");
1311
- ASSERT_NOT_NULL(property);
1312
- // Rewritten parameter accesses are of the form "slot[literal]".
1313
-
1314
- // Assert that the object is in a slot.
1315
- Variable* object_var = property->obj()->AsVariableProxy()->AsVariable();
1316
- ASSERT_NOT_NULL(object_var);
1317
- Slot* object_slot = object_var->AsSlot();
1318
- ASSERT_NOT_NULL(object_slot);
1319
-
1320
- // Load the object.
1321
- Move(r1, object_slot);
1322
-
1323
- // Assert that the key is a smi.
1324
- Literal* key_literal = property->key()->AsLiteral();
1325
- ASSERT_NOT_NULL(key_literal);
1326
- ASSERT(key_literal->handle()->IsSmi());
1327
-
1328
- // Load the key.
1329
- __ mov(r0, Operand(key_literal->handle()));
1330
-
1331
- // Call keyed load IC. It has arguments key and receiver in r0 and r1.
1332
- Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
1333
- EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(property));
1334
- context()->Plug(r0);
1335
1299
  }
1336
1300
  }
1337
1301
 
@@ -1441,7 +1405,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1441
1405
  Handle<Code> ic = is_strict_mode()
1442
1406
  ? isolate()->builtins()->StoreIC_Initialize_Strict()
1443
1407
  : isolate()->builtins()->StoreIC_Initialize();
1444
- EmitCallIC(ic, RelocInfo::CODE_TARGET, key->id());
1408
+ __ Call(ic, RelocInfo::CODE_TARGET, key->id());
1445
1409
  PrepareForBailoutForId(key->id(), NO_REGISTERS);
1446
1410
  } else {
1447
1411
  VisitForEffect(value);
@@ -1572,7 +1536,7 @@ void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1572
1536
  }
1573
1537
 
1574
1538
  // Left-hand side can only be a property, a global or a (parameter or local)
1575
- // slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY.
1539
+ // slot.
1576
1540
  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1577
1541
  LhsKind assign_type = VARIABLE;
1578
1542
  Property* property = expr->target()->AsProperty();
@@ -1598,27 +1562,13 @@ void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1598
1562
  break;
1599
1563
  case KEYED_PROPERTY:
1600
1564
  if (expr->is_compound()) {
1601
- if (property->is_arguments_access()) {
1602
- VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
1603
- __ ldr(r0, EmitSlotSearch(obj_proxy->var()->AsSlot(), r0));
1604
- __ push(r0);
1605
- __ mov(r0, Operand(property->key()->AsLiteral()->handle()));
1606
- } else {
1607
- VisitForStackValue(property->obj());
1608
- VisitForAccumulatorValue(property->key());
1609
- }
1565
+ VisitForStackValue(property->obj());
1566
+ VisitForAccumulatorValue(property->key());
1610
1567
  __ ldr(r1, MemOperand(sp, 0));
1611
1568
  __ push(r0);
1612
1569
  } else {
1613
- if (property->is_arguments_access()) {
1614
- VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
1615
- __ ldr(r1, EmitSlotSearch(obj_proxy->var()->AsSlot(), r0));
1616
- __ mov(r0, Operand(property->key()->AsLiteral()->handle()));
1617
- __ Push(r1, r0);
1618
- } else {
1619
- VisitForStackValue(property->obj());
1620
- VisitForStackValue(property->key());
1621
- }
1570
+ VisitForStackValue(property->obj());
1571
+ VisitForStackValue(property->key());
1622
1572
  }
1623
1573
  break;
1624
1574
  }
@@ -1629,7 +1579,7 @@ void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1629
1579
  { AccumulatorValueContext context(this);
1630
1580
  switch (assign_type) {
1631
1581
  case VARIABLE:
1632
- EmitVariableLoad(expr->target()->AsVariableProxy()->var());
1582
+ EmitVariableLoad(expr->target()->AsVariableProxy());
1633
1583
  PrepareForBailout(expr->target(), TOS_REG);
1634
1584
  break;
1635
1585
  case NAMED_PROPERTY:
@@ -1695,7 +1645,7 @@ void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
1695
1645
  __ mov(r2, Operand(key->handle()));
1696
1646
  // Call load IC. It has arguments receiver and property name r0 and r2.
1697
1647
  Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1698
- EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop));
1648
+ __ Call(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop));
1699
1649
  }
1700
1650
 
1701
1651
 
@@ -1703,7 +1653,7 @@ void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
1703
1653
  SetSourcePosition(prop->position());
1704
1654
  // Call keyed load IC. It has arguments key and receiver in r0 and r1.
1705
1655
  Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
1706
- EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop));
1656
+ __ Call(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop));
1707
1657
  }
1708
1658
 
1709
1659
 
@@ -1730,7 +1680,8 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1730
1680
 
1731
1681
  __ bind(&stub_call);
1732
1682
  BinaryOpStub stub(op, mode);
1733
- EmitCallIC(stub.GetCode(), &patch_site, expr->id());
1683
+ __ Call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
1684
+ patch_site.EmitPatchInfo();
1734
1685
  __ jmp(&done);
1735
1686
 
1736
1687
  __ bind(&smi_case);
@@ -1811,7 +1762,9 @@ void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
1811
1762
  OverwriteMode mode) {
1812
1763
  __ pop(r1);
1813
1764
  BinaryOpStub stub(op, mode);
1814
- EmitCallIC(stub.GetCode(), NULL, expr->id());
1765
+ JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
1766
+ __ Call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
1767
+ patch_site.EmitPatchInfo();
1815
1768
  context()->Plug(r0);
1816
1769
  }
1817
1770
 
@@ -1825,7 +1778,7 @@ void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
1825
1778
  }
1826
1779
 
1827
1780
  // Left-hand side can only be a property, a global or a (parameter or local)
1828
- // slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY.
1781
+ // slot.
1829
1782
  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1830
1783
  LhsKind assign_type = VARIABLE;
1831
1784
  Property* prop = expr->AsProperty();
@@ -1851,30 +1804,20 @@ void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
1851
1804
  Handle<Code> ic = is_strict_mode()
1852
1805
  ? isolate()->builtins()->StoreIC_Initialize_Strict()
1853
1806
  : isolate()->builtins()->StoreIC_Initialize();
1854
- EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber);
1807
+ __ Call(ic);
1855
1808
  break;
1856
1809
  }
1857
1810
  case KEYED_PROPERTY: {
1858
1811
  __ push(r0); // Preserve value.
1859
- if (prop->is_synthetic()) {
1860
- ASSERT(prop->obj()->AsVariableProxy() != NULL);
1861
- ASSERT(prop->key()->AsLiteral() != NULL);
1862
- { AccumulatorValueContext for_object(this);
1863
- EmitVariableLoad(prop->obj()->AsVariableProxy()->var());
1864
- }
1865
- __ mov(r2, r0);
1866
- __ mov(r1, Operand(prop->key()->AsLiteral()->handle()));
1867
- } else {
1868
- VisitForStackValue(prop->obj());
1869
- VisitForAccumulatorValue(prop->key());
1870
- __ mov(r1, r0);
1871
- __ pop(r2);
1872
- }
1812
+ VisitForStackValue(prop->obj());
1813
+ VisitForAccumulatorValue(prop->key());
1814
+ __ mov(r1, r0);
1815
+ __ pop(r2);
1873
1816
  __ pop(r0); // Restore value.
1874
1817
  Handle<Code> ic = is_strict_mode()
1875
1818
  ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
1876
1819
  : isolate()->builtins()->KeyedStoreIC_Initialize();
1877
- EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber);
1820
+ __ Call(ic);
1878
1821
  break;
1879
1822
  }
1880
1823
  }
@@ -1885,8 +1828,6 @@ void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
1885
1828
 
1886
1829
  void FullCodeGenerator::EmitVariableAssignment(Variable* var,
1887
1830
  Token::Value op) {
1888
- // Left-hand sides that rewrite to explicit property accesses do not reach
1889
- // here.
1890
1831
  ASSERT(var != NULL);
1891
1832
  ASSERT(var->is_global() || var->AsSlot() != NULL);
1892
1833
 
@@ -1900,7 +1841,7 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
1900
1841
  Handle<Code> ic = is_strict_mode()
1901
1842
  ? isolate()->builtins()->StoreIC_Initialize_Strict()
1902
1843
  : isolate()->builtins()->StoreIC_Initialize();
1903
- EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT, AstNode::kNoNumber);
1844
+ __ Call(ic, RelocInfo::CODE_TARGET_CONTEXT);
1904
1845
 
1905
1846
  } else if (op == Token::INIT_CONST) {
1906
1847
  // Like var declarations, const declarations are hoisted to function
@@ -1922,18 +1863,7 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
1922
1863
  __ b(ne, &skip);
1923
1864
  __ str(result_register(), MemOperand(fp, SlotOffset(slot)));
1924
1865
  break;
1925
- case Slot::CONTEXT: {
1926
- __ ldr(r1, ContextOperand(cp, Context::FCONTEXT_INDEX));
1927
- __ ldr(r2, ContextOperand(r1, slot->index()));
1928
- __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
1929
- __ cmp(r2, ip);
1930
- __ b(ne, &skip);
1931
- __ str(r0, ContextOperand(r1, slot->index()));
1932
- int offset = Context::SlotOffset(slot->index());
1933
- __ mov(r3, r0); // Preserve the stored value in r0.
1934
- __ RecordWrite(r1, Operand(offset), r3, r2);
1935
- break;
1936
- }
1866
+ case Slot::CONTEXT:
1937
1867
  case Slot::LOOKUP:
1938
1868
  __ push(r0);
1939
1869
  __ mov(r0, Operand(slot->var()->name()));
@@ -1943,6 +1873,59 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
1943
1873
  }
1944
1874
  __ bind(&skip);
1945
1875
 
1876
+ } else if (var->mode() == Variable::LET && op != Token::INIT_LET) {
1877
+ // Perform the assignment for non-const variables. Const assignments
1878
+ // are simply skipped.
1879
+ Slot* slot = var->AsSlot();
1880
+ switch (slot->type()) {
1881
+ case Slot::PARAMETER:
1882
+ case Slot::LOCAL: {
1883
+ Label assign;
1884
+ // Check for an initialized let binding.
1885
+ __ ldr(r1, MemOperand(fp, SlotOffset(slot)));
1886
+ __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
1887
+ __ cmp(r1, ip);
1888
+ __ b(ne, &assign);
1889
+ __ mov(r1, Operand(var->name()));
1890
+ __ push(r1);
1891
+ __ CallRuntime(Runtime::kThrowReferenceError, 1);
1892
+ // Perform the assignment.
1893
+ __ bind(&assign);
1894
+ __ str(result_register(), MemOperand(fp, SlotOffset(slot)));
1895
+ break;
1896
+ }
1897
+ case Slot::CONTEXT: {
1898
+ // Let variables may be the hole value if they have not been
1899
+ // initialized. Throw a type error in this case.
1900
+ Label assign;
1901
+ MemOperand target = EmitSlotSearch(slot, r1);
1902
+ // Check for an initialized let binding.
1903
+ __ ldr(r3, target);
1904
+ __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
1905
+ __ cmp(r3, ip);
1906
+ __ b(ne, &assign);
1907
+ __ mov(r3, Operand(var->name()));
1908
+ __ push(r3);
1909
+ __ CallRuntime(Runtime::kThrowReferenceError, 1);
1910
+ // Perform the assignment.
1911
+ __ bind(&assign);
1912
+ __ str(result_register(), target);
1913
+ // RecordWrite may destroy all its register arguments.
1914
+ __ mov(r3, result_register());
1915
+ int offset = Context::SlotOffset(slot->index());
1916
+ __ RecordWrite(r1, Operand(offset), r2, r3);
1917
+ break;
1918
+ }
1919
+ case Slot::LOOKUP:
1920
+ // Call the runtime for the assignment.
1921
+ __ push(r0); // Value.
1922
+ __ mov(r1, Operand(slot->var()->name()));
1923
+ __ mov(r0, Operand(Smi::FromInt(strict_mode_flag())));
1924
+ __ Push(cp, r1, r0); // Context, name, strict mode.
1925
+ __ CallRuntime(Runtime::kStoreContextSlot, 4);
1926
+ break;
1927
+ }
1928
+
1946
1929
  } else if (var->mode() != Variable::CONST) {
1947
1930
  // Perform the assignment for non-const variables. Const assignments
1948
1931
  // are simply skipped.
@@ -2009,7 +1992,7 @@ void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2009
1992
  Handle<Code> ic = is_strict_mode()
2010
1993
  ? isolate()->builtins()->StoreIC_Initialize_Strict()
2011
1994
  : isolate()->builtins()->StoreIC_Initialize();
2012
- EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id());
1995
+ __ Call(ic, RelocInfo::CODE_TARGET, expr->id());
2013
1996
 
2014
1997
  // If the assignment ends an initialization block, revert to fast case.
2015
1998
  if (expr->ends_initialization_block()) {
@@ -2055,7 +2038,7 @@ void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2055
2038
  Handle<Code> ic = is_strict_mode()
2056
2039
  ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
2057
2040
  : isolate()->builtins()->KeyedStoreIC_Initialize();
2058
- EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id());
2041
+ __ Call(ic, RelocInfo::CODE_TARGET, expr->id());
2059
2042
 
2060
2043
  // If the assignment ends an initialization block, revert to fast case.
2061
2044
  if (expr->ends_initialization_block()) {
@@ -2107,7 +2090,7 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr,
2107
2090
  InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
2108
2091
  Handle<Code> ic =
2109
2092
  isolate()->stub_cache()->ComputeCallInitialize(arg_count, in_loop, mode);
2110
- EmitCallIC(ic, mode, expr->id());
2093
+ __ Call(ic, mode, expr->id());
2111
2094
  RecordJSReturnSite(expr);
2112
2095
  // Restore context register.
2113
2096
  __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
@@ -2141,7 +2124,7 @@ void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2141
2124
  Handle<Code> ic =
2142
2125
  isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count, in_loop);
2143
2126
  __ ldr(r2, MemOperand(sp, (arg_count + 1) * kPointerSize)); // Key.
2144
- EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id());
2127
+ __ Call(ic, RelocInfo::CODE_TARGET, expr->id());
2145
2128
  RecordJSReturnSite(expr);
2146
2129
  // Restore context register.
2147
2130
  __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
@@ -2181,7 +2164,8 @@ void FullCodeGenerator::EmitResolvePossiblyDirectEval(ResolveEvalFlag flag,
2181
2164
  __ push(r1);
2182
2165
 
2183
2166
  // Push the receiver of the enclosing function and do runtime call.
2184
- __ ldr(r1, MemOperand(fp, (2 + scope()->num_parameters()) * kPointerSize));
2167
+ int receiver_offset = 2 + info_->scope()->num_parameters();
2168
+ __ ldr(r1, MemOperand(fp, receiver_offset * kPointerSize));
2185
2169
  __ push(r1);
2186
2170
  // Push the strict mode flag.
2187
2171
  __ mov(r1, Operand(Smi::FromInt(strict_mode_flag())));
@@ -2300,9 +2284,9 @@ void FullCodeGenerator::VisitCall(Call* expr) {
2300
2284
  __ bind(&done);
2301
2285
  // Push function.
2302
2286
  __ push(r0);
2303
- // Push global receiver.
2304
- __ ldr(r1, GlobalObjectOperand());
2305
- __ ldr(r1, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset));
2287
+ // The receiver is implicitly the global receiver. Indicate this
2288
+ // by passing the hole to the call function stub.
2289
+ __ LoadRoot(r1, Heap::kTheHoleValueRootIndex);
2306
2290
  __ push(r1);
2307
2291
  __ bind(&call);
2308
2292
  }
@@ -2323,36 +2307,10 @@ void FullCodeGenerator::VisitCall(Call* expr) {
2323
2307
  EmitCallWithIC(expr, key->handle(), RelocInfo::CODE_TARGET);
2324
2308
  } else {
2325
2309
  // Call to a keyed property.
2326
- // For a synthetic property use keyed load IC followed by function call,
2327
- // for a regular property use keyed EmitCallIC.
2328
- if (prop->is_synthetic()) {
2329
- // Do not visit the object and key subexpressions (they are shared
2330
- // by all occurrences of the same rewritten parameter).
2331
- ASSERT(prop->obj()->AsVariableProxy() != NULL);
2332
- ASSERT(prop->obj()->AsVariableProxy()->var()->AsSlot() != NULL);
2333
- Slot* slot = prop->obj()->AsVariableProxy()->var()->AsSlot();
2334
- MemOperand operand = EmitSlotSearch(slot, r1);
2335
- __ ldr(r1, operand);
2336
-
2337
- ASSERT(prop->key()->AsLiteral() != NULL);
2338
- ASSERT(prop->key()->AsLiteral()->handle()->IsSmi());
2339
- __ mov(r0, Operand(prop->key()->AsLiteral()->handle()));
2340
-
2341
- // Record source code position for IC call.
2342
- SetSourcePosition(prop->position());
2343
-
2344
- Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2345
- EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop));
2346
- __ ldr(r1, GlobalObjectOperand());
2347
- __ ldr(r1, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset));
2348
- __ Push(r0, r1); // Function, receiver.
2349
- EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS);
2350
- } else {
2351
- { PreservePositionScope scope(masm()->positions_recorder());
2352
- VisitForStackValue(prop->obj());
2353
- }
2354
- EmitKeyedCallWithIC(expr, prop->key());
2310
+ { PreservePositionScope scope(masm()->positions_recorder());
2311
+ VisitForStackValue(prop->obj());
2355
2312
  }
2313
+ EmitKeyedCallWithIC(expr, prop->key());
2356
2314
  }
2357
2315
  } else {
2358
2316
  { PreservePositionScope scope(masm()->positions_recorder());
@@ -2468,9 +2426,9 @@ void FullCodeGenerator::EmitIsObject(ZoneList<Expression*>* args) {
2468
2426
  __ tst(r1, Operand(1 << Map::kIsUndetectable));
2469
2427
  __ b(ne, if_false);
2470
2428
  __ ldrb(r1, FieldMemOperand(r2, Map::kInstanceTypeOffset));
2471
- __ cmp(r1, Operand(FIRST_JS_OBJECT_TYPE));
2429
+ __ cmp(r1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
2472
2430
  __ b(lt, if_false);
2473
- __ cmp(r1, Operand(LAST_JS_OBJECT_TYPE));
2431
+ __ cmp(r1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
2474
2432
  PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2475
2433
  Split(le, if_true, if_false, fall_through);
2476
2434
 
@@ -2491,7 +2449,7 @@ void FullCodeGenerator::EmitIsSpecObject(ZoneList<Expression*>* args) {
2491
2449
  &if_true, &if_false, &fall_through);
2492
2450
 
2493
2451
  __ JumpIfSmi(r0, if_false);
2494
- __ CompareObjectType(r0, r1, r1, FIRST_JS_OBJECT_TYPE);
2452
+ __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
2495
2453
  PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2496
2454
  Split(ge, if_true, if_false, fall_through);
2497
2455
 
@@ -2588,8 +2546,7 @@ void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
2588
2546
  // If a valueOf property is not found on the object check that it's
2589
2547
  // prototype is the un-modified String prototype. If not result is false.
2590
2548
  __ ldr(r2, FieldMemOperand(r1, Map::kPrototypeOffset));
2591
- __ tst(r2, Operand(kSmiTagMask));
2592
- __ b(eq, if_false);
2549
+ __ JumpIfSmi(r2, if_false);
2593
2550
  __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
2594
2551
  __ ldr(r3, ContextOperand(cp, Context::GLOBAL_INDEX));
2595
2552
  __ ldr(r3, FieldMemOperand(r3, GlobalObject::kGlobalContextOffset));
@@ -2734,7 +2691,7 @@ void FullCodeGenerator::EmitArguments(ZoneList<Expression*>* args) {
2734
2691
  // parameter count in r0.
2735
2692
  VisitForAccumulatorValue(args->at(0));
2736
2693
  __ mov(r1, r0);
2737
- __ mov(r0, Operand(Smi::FromInt(scope()->num_parameters())));
2694
+ __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
2738
2695
  ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
2739
2696
  __ CallStub(&stub);
2740
2697
  context()->Plug(r0);
@@ -2746,7 +2703,7 @@ void FullCodeGenerator::EmitArgumentsLength(ZoneList<Expression*>* args) {
2746
2703
 
2747
2704
  Label exit;
2748
2705
  // Get the number of formal parameters.
2749
- __ mov(r0, Operand(Smi::FromInt(scope()->num_parameters())));
2706
+ __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
2750
2707
 
2751
2708
  // Check if the calling frame is an arguments adaptor frame.
2752
2709
  __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
@@ -2774,16 +2731,18 @@ void FullCodeGenerator::EmitClassOf(ZoneList<Expression*>* args) {
2774
2731
 
2775
2732
  // Check that the object is a JS object but take special care of JS
2776
2733
  // functions to make sure they have 'Function' as their class.
2777
- __ CompareObjectType(r0, r0, r1, FIRST_JS_OBJECT_TYPE); // Map is now in r0.
2734
+ __ CompareObjectType(r0, r0, r1, FIRST_SPEC_OBJECT_TYPE);
2735
+ // Map is now in r0.
2778
2736
  __ b(lt, &null);
2779
2737
 
2780
- // As long as JS_FUNCTION_TYPE is the last instance type and it is
2781
- // right after LAST_JS_OBJECT_TYPE, we can avoid checking for
2782
- // LAST_JS_OBJECT_TYPE.
2783
- ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
2784
- ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
2785
- __ cmp(r1, Operand(JS_FUNCTION_TYPE));
2786
- __ b(eq, &function);
2738
+ // As long as LAST_CALLABLE_SPEC_OBJECT_TYPE is the last instance type, and
2739
+ // FIRST_CALLABLE_SPEC_OBJECT_TYPE comes right after
2740
+ // LAST_NONCALLABLE_SPEC_OBJECT_TYPE, we can avoid checking for the latter.
2741
+ STATIC_ASSERT(LAST_TYPE == LAST_CALLABLE_SPEC_OBJECT_TYPE);
2742
+ STATIC_ASSERT(FIRST_CALLABLE_SPEC_OBJECT_TYPE ==
2743
+ LAST_NONCALLABLE_SPEC_OBJECT_TYPE + 1);
2744
+ __ cmp(r1, Operand(FIRST_CALLABLE_SPEC_OBJECT_TYPE));
2745
+ __ b(ge, &function);
2787
2746
 
2788
2747
  // Check if the constructor in the map is a function.
2789
2748
  __ ldr(r0, FieldMemOperand(r0, Map::kConstructorOffset));
@@ -2803,7 +2762,7 @@ void FullCodeGenerator::EmitClassOf(ZoneList<Expression*>* args) {
2803
2762
 
2804
2763
  // Objects with a non-function constructor have class 'Object'.
2805
2764
  __ bind(&non_function_constructor);
2806
- __ LoadRoot(r0, Heap::kfunction_class_symbolRootIndex);
2765
+ __ LoadRoot(r0, Heap::kObject_symbolRootIndex);
2807
2766
  __ jmp(&done);
2808
2767
 
2809
2768
  // Non-JS objects have class null.
@@ -2826,13 +2785,12 @@ void FullCodeGenerator::EmitLog(ZoneList<Expression*>* args) {
2826
2785
  // with '%2s' (see Logger::LogRuntime for all the formats).
2827
2786
  // 2 (array): Arguments to the format string.
2828
2787
  ASSERT_EQ(args->length(), 3);
2829
- #ifdef ENABLE_LOGGING_AND_PROFILING
2830
2788
  if (CodeGenerator::ShouldGenerateLog(args->at(0))) {
2831
2789
  VisitForStackValue(args->at(1));
2832
2790
  VisitForStackValue(args->at(2));
2833
2791
  __ CallRuntime(Runtime::kLog, 2);
2834
2792
  }
2835
- #endif
2793
+
2836
2794
  // Finally, we're expected to leave a value on the top of the stack.
2837
2795
  __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2838
2796
  context()->Plug(r0);
@@ -3175,7 +3133,8 @@ void FullCodeGenerator::EmitCallFunction(ZoneList<Expression*>* args) {
3175
3133
  // InvokeFunction requires the function in r1. Move it in there.
3176
3134
  __ mov(r1, result_register());
3177
3135
  ParameterCount count(arg_count);
3178
- __ InvokeFunction(r1, count, CALL_FUNCTION);
3136
+ __ InvokeFunction(r1, count, CALL_FUNCTION,
3137
+ NullCallWrapper(), CALL_AS_METHOD);
3179
3138
  __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3180
3139
  context()->Plug(r0);
3181
3140
  }
@@ -3302,7 +3261,7 @@ void FullCodeGenerator::EmitGetFromCache(ZoneList<Expression*>* args) {
3302
3261
 
3303
3262
  Label done, not_found;
3304
3263
  // tmp now holds finger offset as a smi.
3305
- ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3264
+ STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3306
3265
  __ ldr(r2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset));
3307
3266
  // r2 now holds finger offset as a smi.
3308
3267
  __ add(r3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
@@ -3342,8 +3301,7 @@ void FullCodeGenerator::EmitIsRegExpEquivalent(ZoneList<Expression*>* args) {
3342
3301
  __ b(eq, &ok);
3343
3302
  // Fail if either is a non-HeapObject.
3344
3303
  __ and_(tmp, left, Operand(right));
3345
- __ tst(tmp, Operand(kSmiTagMask));
3346
- __ b(eq, &fail);
3304
+ __ JumpIfSmi(tmp, &fail);
3347
3305
  __ ldr(tmp, FieldMemOperand(left, HeapObject::kMapOffset));
3348
3306
  __ ldrb(tmp2, FieldMemOperand(tmp, Map::kInstanceTypeOffset));
3349
3307
  __ cmp(tmp2, Operand(JS_REGEXP_TYPE));
@@ -3433,9 +3391,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(ZoneList<Expression*>* args) {
3433
3391
  __ b(ne, &bailout);
3434
3392
 
3435
3393
  // Check that the array has fast elements.
3436
- __ ldrb(scratch2, FieldMemOperand(scratch1, Map::kBitField2Offset));
3437
- __ tst(scratch2, Operand(1 << Map::kHasFastElements));
3438
- __ b(eq, &bailout);
3394
+ __ CheckFastElements(scratch1, scratch2, &bailout);
3439
3395
 
3440
3396
  // If the array has length zero, return the empty string.
3441
3397
  __ ldr(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
@@ -3665,7 +3621,7 @@ void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3665
3621
  isolate()->stub_cache()->ComputeCallInitialize(arg_count,
3666
3622
  NOT_IN_LOOP,
3667
3623
  mode);
3668
- EmitCallIC(ic, mode, expr->id());
3624
+ __ Call(ic, mode, expr->id());
3669
3625
  // Restore context register.
3670
3626
  __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3671
3627
  } else {
@@ -3684,18 +3640,12 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3684
3640
  Variable* var = expr->expression()->AsVariableProxy()->AsVariable();
3685
3641
 
3686
3642
  if (prop != NULL) {
3687
- if (prop->is_synthetic()) {
3688
- // Result of deleting parameters is false, even when they rewrite
3689
- // to accesses on the arguments object.
3690
- context()->Plug(false);
3691
- } else {
3692
- VisitForStackValue(prop->obj());
3693
- VisitForStackValue(prop->key());
3694
- __ mov(r1, Operand(Smi::FromInt(strict_mode_flag())));
3695
- __ push(r1);
3696
- __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3697
- context()->Plug(r0);
3698
- }
3643
+ VisitForStackValue(prop->obj());
3644
+ VisitForStackValue(prop->key());
3645
+ __ mov(r1, Operand(Smi::FromInt(strict_mode_flag())));
3646
+ __ push(r1);
3647
+ __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3648
+ context()->Plug(r0);
3699
3649
  } else if (var != NULL) {
3700
3650
  // Delete of an unqualified identifier is disallowed in strict mode
3701
3651
  // but "delete this" is.
@@ -3773,8 +3723,7 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3773
3723
  Comment cmt(masm_, "[ UnaryOperation (ADD)");
3774
3724
  VisitForAccumulatorValue(expr->expression());
3775
3725
  Label no_conversion;
3776
- __ tst(result_register(), Operand(kSmiTagMask));
3777
- __ b(eq, &no_conversion);
3726
+ __ JumpIfSmi(result_register(), &no_conversion);
3778
3727
  ToNumberStub convert_stub;
3779
3728
  __ CallStub(&convert_stub);
3780
3729
  __ bind(&no_conversion);
@@ -3808,7 +3757,7 @@ void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
3808
3757
  // accumulator register r0.
3809
3758
  VisitForAccumulatorValue(expr->expression());
3810
3759
  SetSourcePosition(expr->position());
3811
- EmitCallIC(stub.GetCode(), NULL, expr->id());
3760
+ __ Call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
3812
3761
  context()->Plug(r0);
3813
3762
  }
3814
3763
 
@@ -3825,7 +3774,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3825
3774
  }
3826
3775
 
3827
3776
  // Expression can only be a property, a global or a (parameter or local)
3828
- // slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY.
3777
+ // slot.
3829
3778
  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
3830
3779
  LhsKind assign_type = VARIABLE;
3831
3780
  Property* prop = expr->expression()->AsProperty();
@@ -3840,7 +3789,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3840
3789
  if (assign_type == VARIABLE) {
3841
3790
  ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
3842
3791
  AccumulatorValueContext context(this);
3843
- EmitVariableLoad(expr->expression()->AsVariableProxy()->var());
3792
+ EmitVariableLoad(expr->expression()->AsVariableProxy());
3844
3793
  } else {
3845
3794
  // Reserve space for result of postfix operation.
3846
3795
  if (expr->is_postfix() && !context()->IsEffect()) {
@@ -3853,15 +3802,8 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3853
3802
  __ push(r0);
3854
3803
  EmitNamedPropertyLoad(prop);
3855
3804
  } else {
3856
- if (prop->is_arguments_access()) {
3857
- VariableProxy* obj_proxy = prop->obj()->AsVariableProxy();
3858
- __ ldr(r0, EmitSlotSearch(obj_proxy->var()->AsSlot(), r0));
3859
- __ push(r0);
3860
- __ mov(r0, Operand(prop->key()->AsLiteral()->handle()));
3861
- } else {
3862
- VisitForStackValue(prop->obj());
3863
- VisitForAccumulatorValue(prop->key());
3864
- }
3805
+ VisitForStackValue(prop->obj());
3806
+ VisitForAccumulatorValue(prop->key());
3865
3807
  __ ldr(r1, MemOperand(sp, 0));
3866
3808
  __ push(r0);
3867
3809
  EmitKeyedPropertyLoad(prop);
@@ -3926,7 +3868,8 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3926
3868
  SetSourcePosition(expr->position());
3927
3869
 
3928
3870
  BinaryOpStub stub(Token::ADD, NO_OVERWRITE);
3929
- EmitCallIC(stub.GetCode(), &patch_site, expr->CountId());
3871
+ __ Call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountId());
3872
+ patch_site.EmitPatchInfo();
3930
3873
  __ bind(&done);
3931
3874
 
3932
3875
  // Store the value returned in r0.
@@ -3957,7 +3900,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3957
3900
  Handle<Code> ic = is_strict_mode()
3958
3901
  ? isolate()->builtins()->StoreIC_Initialize_Strict()
3959
3902
  : isolate()->builtins()->StoreIC_Initialize();
3960
- EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id());
3903
+ __ Call(ic, RelocInfo::CODE_TARGET, expr->id());
3961
3904
  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3962
3905
  if (expr->is_postfix()) {
3963
3906
  if (!context()->IsEffect()) {
@@ -3974,7 +3917,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3974
3917
  Handle<Code> ic = is_strict_mode()
3975
3918
  ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
3976
3919
  : isolate()->builtins()->KeyedStoreIC_Initialize();
3977
- EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id());
3920
+ __ Call(ic, RelocInfo::CODE_TARGET, expr->id());
3978
3921
  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3979
3922
  if (expr->is_postfix()) {
3980
3923
  if (!context()->IsEffect()) {
@@ -4000,7 +3943,7 @@ void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4000
3943
  Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
4001
3944
  // Use a regular load, not a contextual load, to avoid a reference
4002
3945
  // error.
4003
- EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber);
3946
+ __ Call(ic);
4004
3947
  PrepareForBailout(expr, TOS_REG);
4005
3948
  context()->Plug(r0);
4006
3949
  } else if (proxy != NULL &&
@@ -4023,30 +3966,18 @@ void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4023
3966
  context()->Plug(r0);
4024
3967
  } else {
4025
3968
  // This expression cannot throw a reference error at the top level.
4026
- context()->HandleExpression(expr);
3969
+ VisitInCurrentContext(expr);
4027
3970
  }
4028
3971
  }
4029
3972
 
4030
3973
 
4031
- bool FullCodeGenerator::TryLiteralCompare(Token::Value op,
4032
- Expression* left,
4033
- Expression* right,
4034
- Label* if_true,
4035
- Label* if_false,
4036
- Label* fall_through) {
4037
- if (op != Token::EQ && op != Token::EQ_STRICT) return false;
4038
-
4039
- // Check for the pattern: typeof <expression> == <string literal>.
4040
- Literal* right_literal = right->AsLiteral();
4041
- if (right_literal == NULL) return false;
4042
- Handle<Object> right_literal_value = right_literal->handle();
4043
- if (!right_literal_value->IsString()) return false;
4044
- UnaryOperation* left_unary = left->AsUnaryOperation();
4045
- if (left_unary == NULL || left_unary->op() != Token::TYPEOF) return false;
4046
- Handle<String> check = Handle<String>::cast(right_literal_value);
4047
-
3974
+ void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
3975
+ Handle<String> check,
3976
+ Label* if_true,
3977
+ Label* if_false,
3978
+ Label* fall_through) {
4048
3979
  { AccumulatorValueContext context(this);
4049
- VisitForTypeofValue(left_unary->expression());
3980
+ VisitForTypeofValue(expr);
4050
3981
  }
4051
3982
  PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
4052
3983
 
@@ -4069,6 +4000,10 @@ bool FullCodeGenerator::TryLiteralCompare(Token::Value op,
4069
4000
  __ b(eq, if_true);
4070
4001
  __ CompareRoot(r0, Heap::kFalseValueRootIndex);
4071
4002
  Split(eq, if_true, if_false, fall_through);
4003
+ } else if (FLAG_harmony_typeof &&
4004
+ check->Equals(isolate()->heap()->null_symbol())) {
4005
+ __ CompareRoot(r0, Heap::kNullValueRootIndex);
4006
+ Split(eq, if_true, if_false, fall_through);
4072
4007
  } else if (check->Equals(isolate()->heap()->undefined_symbol())) {
4073
4008
  __ CompareRoot(r0, Heap::kUndefinedValueRootIndex);
4074
4009
  __ b(eq, if_true);
@@ -4081,18 +4016,20 @@ bool FullCodeGenerator::TryLiteralCompare(Token::Value op,
4081
4016
 
4082
4017
  } else if (check->Equals(isolate()->heap()->function_symbol())) {
4083
4018
  __ JumpIfSmi(r0, if_false);
4084
- __ CompareObjectType(r0, r1, r0, FIRST_FUNCTION_CLASS_TYPE);
4019
+ __ CompareObjectType(r0, r1, r0, FIRST_CALLABLE_SPEC_OBJECT_TYPE);
4085
4020
  Split(ge, if_true, if_false, fall_through);
4086
4021
 
4087
4022
  } else if (check->Equals(isolate()->heap()->object_symbol())) {
4088
4023
  __ JumpIfSmi(r0, if_false);
4089
- __ CompareRoot(r0, Heap::kNullValueRootIndex);
4090
- __ b(eq, if_true);
4024
+ if (!FLAG_harmony_typeof) {
4025
+ __ CompareRoot(r0, Heap::kNullValueRootIndex);
4026
+ __ b(eq, if_true);
4027
+ }
4091
4028
  // Check for JS objects => true.
4092
- __ CompareObjectType(r0, r0, r1, FIRST_JS_OBJECT_TYPE);
4093
- __ b(lo, if_false);
4094
- __ CompareInstanceType(r0, r1, FIRST_FUNCTION_CLASS_TYPE);
4095
- __ b(hs, if_false);
4029
+ __ CompareObjectType(r0, r0, r1, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
4030
+ __ b(lt, if_false);
4031
+ __ CompareInstanceType(r0, r1, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4032
+ __ b(gt, if_false);
4096
4033
  // Check for undetectable objects => false.
4097
4034
  __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
4098
4035
  __ tst(r1, Operand(1 << Map::kIsUndetectable));
@@ -4100,8 +4037,18 @@ bool FullCodeGenerator::TryLiteralCompare(Token::Value op,
4100
4037
  } else {
4101
4038
  if (if_false != fall_through) __ jmp(if_false);
4102
4039
  }
4040
+ }
4041
+
4103
4042
 
4104
- return true;
4043
+ void FullCodeGenerator::EmitLiteralCompareUndefined(Expression* expr,
4044
+ Label* if_true,
4045
+ Label* if_false,
4046
+ Label* fall_through) {
4047
+ VisitForAccumulatorValue(expr);
4048
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
4049
+
4050
+ __ CompareRoot(r0, Heap::kUndefinedValueRootIndex);
4051
+ Split(eq, if_true, if_false, fall_through);
4105
4052
  }
4106
4053
 
4107
4054
 
@@ -4121,14 +4068,12 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4121
4068
 
4122
4069
  // First we try a fast inlined version of the compare when one of
4123
4070
  // the operands is a literal.
4124
- Token::Value op = expr->op();
4125
- Expression* left = expr->left();
4126
- Expression* right = expr->right();
4127
- if (TryLiteralCompare(op, left, right, if_true, if_false, fall_through)) {
4071
+ if (TryLiteralCompare(expr, if_true, if_false, fall_through)) {
4128
4072
  context()->Plug(if_true, if_false);
4129
4073
  return;
4130
4074
  }
4131
4075
 
4076
+ Token::Value op = expr->op();
4132
4077
  VisitForStackValue(expr->left());
4133
4078
  switch (op) {
4134
4079
  case Token::IN:
@@ -4154,11 +4099,8 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4154
4099
  default: {
4155
4100
  VisitForAccumulatorValue(expr->right());
4156
4101
  Condition cond = eq;
4157
- bool strict = false;
4158
4102
  switch (op) {
4159
4103
  case Token::EQ_STRICT:
4160
- strict = true;
4161
- // Fall through
4162
4104
  case Token::EQ:
4163
4105
  cond = eq;
4164
4106
  __ pop(r1);
@@ -4203,7 +4145,8 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4203
4145
  // Record position and call the compare IC.
4204
4146
  SetSourcePosition(expr->position());
4205
4147
  Handle<Code> ic = CompareIC::GetUninitialized(op);
4206
- EmitCallIC(ic, &patch_site, expr->id());
4148
+ __ Call(ic, RelocInfo::CODE_TARGET, expr->id());
4149
+ patch_site.EmitPatchInfo();
4207
4150
  PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
4208
4151
  __ cmp(r0, Operand(0));
4209
4152
  Split(cond, if_true, if_false, fall_through);
@@ -4236,8 +4179,7 @@ void FullCodeGenerator::VisitCompareToNull(CompareToNull* expr) {
4236
4179
  __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
4237
4180
  __ cmp(r0, r1);
4238
4181
  __ b(eq, if_true);
4239
- __ tst(r0, Operand(kSmiTagMask));
4240
- __ b(eq, if_false);
4182
+ __ JumpIfSmi(r0, if_false);
4241
4183
  // It can be an undetectable object.
4242
4184
  __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
4243
4185
  __ ldrb(r1, FieldMemOperand(r1, Map::kBitFieldOffset));
@@ -4265,70 +4207,6 @@ Register FullCodeGenerator::context_register() {
4265
4207
  }
4266
4208
 
4267
4209
 
4268
- void FullCodeGenerator::EmitCallIC(Handle<Code> ic,
4269
- RelocInfo::Mode mode,
4270
- unsigned ast_id) {
4271
- ASSERT(mode == RelocInfo::CODE_TARGET ||
4272
- mode == RelocInfo::CODE_TARGET_CONTEXT);
4273
- Counters* counters = isolate()->counters();
4274
- switch (ic->kind()) {
4275
- case Code::LOAD_IC:
4276
- __ IncrementCounter(counters->named_load_full(), 1, r1, r2);
4277
- break;
4278
- case Code::KEYED_LOAD_IC:
4279
- __ IncrementCounter(counters->keyed_load_full(), 1, r1, r2);
4280
- break;
4281
- case Code::STORE_IC:
4282
- __ IncrementCounter(counters->named_store_full(), 1, r1, r2);
4283
- break;
4284
- case Code::KEYED_STORE_IC:
4285
- __ IncrementCounter(counters->keyed_store_full(), 1, r1, r2);
4286
- default:
4287
- break;
4288
- }
4289
- if (ast_id == kNoASTId || mode == RelocInfo::CODE_TARGET_CONTEXT) {
4290
- __ Call(ic, mode);
4291
- } else {
4292
- ASSERT(mode == RelocInfo::CODE_TARGET);
4293
- mode = RelocInfo::CODE_TARGET_WITH_ID;
4294
- __ CallWithAstId(ic, mode, ast_id);
4295
- }
4296
- }
4297
-
4298
-
4299
- void FullCodeGenerator::EmitCallIC(Handle<Code> ic,
4300
- JumpPatchSite* patch_site,
4301
- unsigned ast_id) {
4302
- Counters* counters = isolate()->counters();
4303
- switch (ic->kind()) {
4304
- case Code::LOAD_IC:
4305
- __ IncrementCounter(counters->named_load_full(), 1, r1, r2);
4306
- break;
4307
- case Code::KEYED_LOAD_IC:
4308
- __ IncrementCounter(counters->keyed_load_full(), 1, r1, r2);
4309
- break;
4310
- case Code::STORE_IC:
4311
- __ IncrementCounter(counters->named_store_full(), 1, r1, r2);
4312
- break;
4313
- case Code::KEYED_STORE_IC:
4314
- __ IncrementCounter(counters->keyed_store_full(), 1, r1, r2);
4315
- default:
4316
- break;
4317
- }
4318
-
4319
- if (ast_id == kNoASTId) {
4320
- __ Call(ic, RelocInfo::CODE_TARGET);
4321
- } else {
4322
- __ CallWithAstId(ic, RelocInfo::CODE_TARGET_WITH_ID, ast_id);
4323
- }
4324
- if (patch_site != NULL && patch_site->is_bound()) {
4325
- patch_site->EmitPatchInfo();
4326
- } else {
4327
- __ nop(); // Signals no inlined code.
4328
- }
4329
- }
4330
-
4331
-
4332
4210
  void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4333
4211
  ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
4334
4212
  __ str(value, MemOperand(fp, frame_offset));
@@ -4340,6 +4218,27 @@ void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4340
4218
  }
4341
4219
 
4342
4220
 
4221
+ void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4222
+ Scope* declaration_scope = scope()->DeclarationScope();
4223
+ if (declaration_scope->is_global_scope()) {
4224
+ // Contexts nested in the global context have a canonical empty function
4225
+ // as their closure, not the anonymous closure containing the global
4226
+ // code. Pass a smi sentinel and let the runtime look up the empty
4227
+ // function.
4228
+ __ mov(ip, Operand(Smi::FromInt(0)));
4229
+ } else if (declaration_scope->is_eval_scope()) {
4230
+ // Contexts created by a call to eval have the same closure as the
4231
+ // context calling eval, not the anonymous closure containing the eval
4232
+ // code. Fetch it from the context.
4233
+ __ ldr(ip, ContextOperand(cp, Context::CLOSURE_INDEX));
4234
+ } else {
4235
+ ASSERT(declaration_scope->is_function_scope());
4236
+ __ ldr(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4237
+ }
4238
+ __ push(ip);
4239
+ }
4240
+
4241
+
4343
4242
  // ----------------------------------------------------------------------------
4344
4243
  // Non-local control flow support.
4345
4244
 
@@ -4350,7 +4249,7 @@ void FullCodeGenerator::EnterFinallyBlock() {
4350
4249
  // Cook return address in link register to stack (smi encoded Code* delta)
4351
4250
  __ sub(r1, lr, Operand(masm_->CodeObject()));
4352
4251
  ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
4353
- ASSERT_EQ(0, kSmiTag);
4252
+ STATIC_ASSERT(kSmiTag == 0);
4354
4253
  __ add(r1, r1, Operand(r1)); // Convert to smi.
4355
4254
  __ push(r1);
4356
4255
  }