libv8 3.3.10.4 → 3.5.10.beta1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (538) hide show
  1. data/lib/libv8/scons/CHANGES.txt +24 -231
  2. data/lib/libv8/scons/LICENSE.txt +1 -1
  3. data/lib/libv8/scons/MANIFEST +0 -1
  4. data/lib/libv8/scons/PKG-INFO +1 -1
  5. data/lib/libv8/scons/README.txt +9 -9
  6. data/lib/libv8/scons/RELEASE.txt +75 -77
  7. data/lib/libv8/scons/engine/SCons/Action.py +6 -22
  8. data/lib/libv8/scons/engine/SCons/Builder.py +2 -2
  9. data/lib/libv8/scons/engine/SCons/CacheDir.py +2 -2
  10. data/lib/libv8/scons/engine/SCons/Debug.py +2 -2
  11. data/lib/libv8/scons/engine/SCons/Defaults.py +10 -24
  12. data/lib/libv8/scons/engine/SCons/Environment.py +19 -118
  13. data/lib/libv8/scons/engine/SCons/Errors.py +2 -2
  14. data/lib/libv8/scons/engine/SCons/Executor.py +2 -2
  15. data/lib/libv8/scons/engine/SCons/Job.py +2 -2
  16. data/lib/libv8/scons/engine/SCons/Memoize.py +2 -2
  17. data/lib/libv8/scons/engine/SCons/Node/Alias.py +2 -2
  18. data/lib/libv8/scons/engine/SCons/Node/FS.py +121 -281
  19. data/lib/libv8/scons/engine/SCons/Node/Python.py +2 -2
  20. data/lib/libv8/scons/engine/SCons/Node/__init__.py +5 -6
  21. data/lib/libv8/scons/engine/SCons/Options/BoolOption.py +2 -2
  22. data/lib/libv8/scons/engine/SCons/Options/EnumOption.py +2 -2
  23. data/lib/libv8/scons/engine/SCons/Options/ListOption.py +2 -2
  24. data/lib/libv8/scons/engine/SCons/Options/PackageOption.py +2 -2
  25. data/lib/libv8/scons/engine/SCons/Options/PathOption.py +2 -2
  26. data/lib/libv8/scons/engine/SCons/Options/__init__.py +2 -2
  27. data/lib/libv8/scons/engine/SCons/PathList.py +2 -2
  28. data/lib/libv8/scons/engine/SCons/Platform/__init__.py +2 -2
  29. data/lib/libv8/scons/engine/SCons/Platform/aix.py +2 -2
  30. data/lib/libv8/scons/engine/SCons/Platform/cygwin.py +2 -2
  31. data/lib/libv8/scons/engine/SCons/Platform/darwin.py +3 -27
  32. data/lib/libv8/scons/engine/SCons/Platform/hpux.py +2 -2
  33. data/lib/libv8/scons/engine/SCons/Platform/irix.py +2 -2
  34. data/lib/libv8/scons/engine/SCons/Platform/os2.py +2 -2
  35. data/lib/libv8/scons/engine/SCons/Platform/posix.py +2 -2
  36. data/lib/libv8/scons/engine/SCons/Platform/sunos.py +2 -2
  37. data/lib/libv8/scons/engine/SCons/Platform/win32.py +2 -2
  38. data/lib/libv8/scons/engine/SCons/SConf.py +2 -2
  39. data/lib/libv8/scons/engine/SCons/SConsign.py +3 -9
  40. data/lib/libv8/scons/engine/SCons/Scanner/C.py +2 -2
  41. data/lib/libv8/scons/engine/SCons/Scanner/D.py +2 -2
  42. data/lib/libv8/scons/engine/SCons/Scanner/Dir.py +2 -2
  43. data/lib/libv8/scons/engine/SCons/Scanner/Fortran.py +2 -2
  44. data/lib/libv8/scons/engine/SCons/Scanner/IDL.py +2 -2
  45. data/lib/libv8/scons/engine/SCons/Scanner/LaTeX.py +2 -5
  46. data/lib/libv8/scons/engine/SCons/Scanner/Prog.py +2 -2
  47. data/lib/libv8/scons/engine/SCons/Scanner/RC.py +3 -3
  48. data/lib/libv8/scons/engine/SCons/Scanner/__init__.py +2 -2
  49. data/lib/libv8/scons/engine/SCons/Script/Interactive.py +2 -2
  50. data/lib/libv8/scons/engine/SCons/Script/Main.py +11 -82
  51. data/lib/libv8/scons/engine/SCons/Script/SConsOptions.py +5 -5
  52. data/lib/libv8/scons/engine/SCons/Script/SConscript.py +2 -2
  53. data/lib/libv8/scons/engine/SCons/Script/__init__.py +2 -2
  54. data/lib/libv8/scons/engine/SCons/Sig.py +2 -2
  55. data/lib/libv8/scons/engine/SCons/Subst.py +2 -2
  56. data/lib/libv8/scons/engine/SCons/Taskmaster.py +2 -10
  57. data/lib/libv8/scons/engine/SCons/Tool/386asm.py +2 -2
  58. data/lib/libv8/scons/engine/SCons/Tool/BitKeeper.py +2 -2
  59. data/lib/libv8/scons/engine/SCons/Tool/CVS.py +2 -2
  60. data/lib/libv8/scons/engine/SCons/Tool/FortranCommon.py +2 -19
  61. data/lib/libv8/scons/engine/SCons/Tool/JavaCommon.py +2 -2
  62. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/__init__.py +2 -2
  63. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/arch.py +2 -2
  64. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/common.py +2 -2
  65. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/netframework.py +2 -2
  66. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/sdk.py +2 -2
  67. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/vc.py +6 -9
  68. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/vs.py +2 -29
  69. data/lib/libv8/scons/engine/SCons/Tool/Perforce.py +2 -2
  70. data/lib/libv8/scons/engine/SCons/Tool/PharLapCommon.py +2 -2
  71. data/lib/libv8/scons/engine/SCons/Tool/RCS.py +2 -2
  72. data/lib/libv8/scons/engine/SCons/Tool/SCCS.py +2 -2
  73. data/lib/libv8/scons/engine/SCons/Tool/Subversion.py +2 -2
  74. data/lib/libv8/scons/engine/SCons/Tool/__init__.py +3 -3
  75. data/lib/libv8/scons/engine/SCons/Tool/aixc++.py +2 -2
  76. data/lib/libv8/scons/engine/SCons/Tool/aixcc.py +2 -2
  77. data/lib/libv8/scons/engine/SCons/Tool/aixf77.py +2 -2
  78. data/lib/libv8/scons/engine/SCons/Tool/aixlink.py +2 -2
  79. data/lib/libv8/scons/engine/SCons/Tool/applelink.py +2 -2
  80. data/lib/libv8/scons/engine/SCons/Tool/ar.py +2 -2
  81. data/lib/libv8/scons/engine/SCons/Tool/as.py +2 -2
  82. data/lib/libv8/scons/engine/SCons/Tool/bcc32.py +2 -2
  83. data/lib/libv8/scons/engine/SCons/Tool/c++.py +2 -2
  84. data/lib/libv8/scons/engine/SCons/Tool/cc.py +2 -2
  85. data/lib/libv8/scons/engine/SCons/Tool/cvf.py +2 -2
  86. data/lib/libv8/scons/engine/SCons/Tool/default.py +2 -2
  87. data/lib/libv8/scons/engine/SCons/Tool/dmd.py +7 -24
  88. data/lib/libv8/scons/engine/SCons/Tool/dvi.py +2 -2
  89. data/lib/libv8/scons/engine/SCons/Tool/dvipdf.py +2 -3
  90. data/lib/libv8/scons/engine/SCons/Tool/dvips.py +2 -3
  91. data/lib/libv8/scons/engine/SCons/Tool/f77.py +2 -2
  92. data/lib/libv8/scons/engine/SCons/Tool/f90.py +2 -2
  93. data/lib/libv8/scons/engine/SCons/Tool/f95.py +2 -2
  94. data/lib/libv8/scons/engine/SCons/Tool/filesystem.py +2 -2
  95. data/lib/libv8/scons/engine/SCons/Tool/fortran.py +2 -2
  96. data/lib/libv8/scons/engine/SCons/Tool/g++.py +2 -2
  97. data/lib/libv8/scons/engine/SCons/Tool/g77.py +2 -2
  98. data/lib/libv8/scons/engine/SCons/Tool/gas.py +2 -2
  99. data/lib/libv8/scons/engine/SCons/Tool/gcc.py +2 -2
  100. data/lib/libv8/scons/engine/SCons/Tool/gfortran.py +3 -3
  101. data/lib/libv8/scons/engine/SCons/Tool/gnulink.py +3 -2
  102. data/lib/libv8/scons/engine/SCons/Tool/gs.py +2 -2
  103. data/lib/libv8/scons/engine/SCons/Tool/hpc++.py +2 -2
  104. data/lib/libv8/scons/engine/SCons/Tool/hpcc.py +2 -2
  105. data/lib/libv8/scons/engine/SCons/Tool/hplink.py +2 -2
  106. data/lib/libv8/scons/engine/SCons/Tool/icc.py +2 -2
  107. data/lib/libv8/scons/engine/SCons/Tool/icl.py +2 -2
  108. data/lib/libv8/scons/engine/SCons/Tool/ifl.py +2 -2
  109. data/lib/libv8/scons/engine/SCons/Tool/ifort.py +2 -2
  110. data/lib/libv8/scons/engine/SCons/Tool/ilink.py +2 -2
  111. data/lib/libv8/scons/engine/SCons/Tool/ilink32.py +2 -2
  112. data/lib/libv8/scons/engine/SCons/Tool/install.py +3 -57
  113. data/lib/libv8/scons/engine/SCons/Tool/intelc.py +25 -65
  114. data/lib/libv8/scons/engine/SCons/Tool/ipkg.py +2 -2
  115. data/lib/libv8/scons/engine/SCons/Tool/jar.py +3 -9
  116. data/lib/libv8/scons/engine/SCons/Tool/javac.py +2 -2
  117. data/lib/libv8/scons/engine/SCons/Tool/javah.py +2 -2
  118. data/lib/libv8/scons/engine/SCons/Tool/latex.py +2 -3
  119. data/lib/libv8/scons/engine/SCons/Tool/lex.py +2 -2
  120. data/lib/libv8/scons/engine/SCons/Tool/link.py +5 -6
  121. data/lib/libv8/scons/engine/SCons/Tool/linkloc.py +2 -2
  122. data/lib/libv8/scons/engine/SCons/Tool/m4.py +2 -2
  123. data/lib/libv8/scons/engine/SCons/Tool/masm.py +2 -2
  124. data/lib/libv8/scons/engine/SCons/Tool/midl.py +2 -2
  125. data/lib/libv8/scons/engine/SCons/Tool/mingw.py +10 -31
  126. data/lib/libv8/scons/engine/SCons/Tool/mslib.py +2 -2
  127. data/lib/libv8/scons/engine/SCons/Tool/mslink.py +9 -61
  128. data/lib/libv8/scons/engine/SCons/Tool/mssdk.py +2 -2
  129. data/lib/libv8/scons/engine/SCons/Tool/msvc.py +11 -21
  130. data/lib/libv8/scons/engine/SCons/Tool/msvs.py +59 -477
  131. data/lib/libv8/scons/engine/SCons/Tool/mwcc.py +2 -2
  132. data/lib/libv8/scons/engine/SCons/Tool/mwld.py +2 -2
  133. data/lib/libv8/scons/engine/SCons/Tool/nasm.py +2 -2
  134. data/lib/libv8/scons/engine/SCons/Tool/packaging/__init__.py +2 -2
  135. data/lib/libv8/scons/engine/SCons/Tool/packaging/ipk.py +2 -2
  136. data/lib/libv8/scons/engine/SCons/Tool/packaging/msi.py +2 -2
  137. data/lib/libv8/scons/engine/SCons/Tool/packaging/rpm.py +2 -2
  138. data/lib/libv8/scons/engine/SCons/Tool/packaging/src_tarbz2.py +2 -2
  139. data/lib/libv8/scons/engine/SCons/Tool/packaging/src_targz.py +2 -2
  140. data/lib/libv8/scons/engine/SCons/Tool/packaging/src_zip.py +2 -2
  141. data/lib/libv8/scons/engine/SCons/Tool/packaging/tarbz2.py +2 -2
  142. data/lib/libv8/scons/engine/SCons/Tool/packaging/targz.py +2 -2
  143. data/lib/libv8/scons/engine/SCons/Tool/packaging/zip.py +2 -2
  144. data/lib/libv8/scons/engine/SCons/Tool/pdf.py +2 -2
  145. data/lib/libv8/scons/engine/SCons/Tool/pdflatex.py +2 -3
  146. data/lib/libv8/scons/engine/SCons/Tool/pdftex.py +2 -3
  147. data/lib/libv8/scons/engine/SCons/Tool/qt.py +2 -2
  148. data/lib/libv8/scons/engine/SCons/Tool/rmic.py +3 -9
  149. data/lib/libv8/scons/engine/SCons/Tool/rpcgen.py +2 -2
  150. data/lib/libv8/scons/engine/SCons/Tool/rpm.py +2 -2
  151. data/lib/libv8/scons/engine/SCons/Tool/sgiar.py +2 -2
  152. data/lib/libv8/scons/engine/SCons/Tool/sgic++.py +2 -2
  153. data/lib/libv8/scons/engine/SCons/Tool/sgicc.py +2 -2
  154. data/lib/libv8/scons/engine/SCons/Tool/sgilink.py +3 -2
  155. data/lib/libv8/scons/engine/SCons/Tool/sunar.py +2 -2
  156. data/lib/libv8/scons/engine/SCons/Tool/sunc++.py +2 -2
  157. data/lib/libv8/scons/engine/SCons/Tool/suncc.py +2 -2
  158. data/lib/libv8/scons/engine/SCons/Tool/sunf77.py +2 -2
  159. data/lib/libv8/scons/engine/SCons/Tool/sunf90.py +2 -2
  160. data/lib/libv8/scons/engine/SCons/Tool/sunf95.py +2 -2
  161. data/lib/libv8/scons/engine/SCons/Tool/sunlink.py +3 -2
  162. data/lib/libv8/scons/engine/SCons/Tool/swig.py +5 -6
  163. data/lib/libv8/scons/engine/SCons/Tool/tar.py +2 -2
  164. data/lib/libv8/scons/engine/SCons/Tool/tex.py +43 -96
  165. data/lib/libv8/scons/engine/SCons/Tool/textfile.py +2 -2
  166. data/lib/libv8/scons/engine/SCons/Tool/tlib.py +2 -2
  167. data/lib/libv8/scons/engine/SCons/Tool/wix.py +2 -2
  168. data/lib/libv8/scons/engine/SCons/Tool/yacc.py +2 -12
  169. data/lib/libv8/scons/engine/SCons/Tool/zip.py +2 -2
  170. data/lib/libv8/scons/engine/SCons/Util.py +3 -3
  171. data/lib/libv8/scons/engine/SCons/Variables/BoolVariable.py +2 -2
  172. data/lib/libv8/scons/engine/SCons/Variables/EnumVariable.py +3 -3
  173. data/lib/libv8/scons/engine/SCons/Variables/ListVariable.py +2 -2
  174. data/lib/libv8/scons/engine/SCons/Variables/PackageVariable.py +2 -2
  175. data/lib/libv8/scons/engine/SCons/Variables/PathVariable.py +2 -2
  176. data/lib/libv8/scons/engine/SCons/Variables/__init__.py +2 -2
  177. data/lib/libv8/scons/engine/SCons/Warnings.py +2 -2
  178. data/lib/libv8/scons/engine/SCons/__init__.py +6 -6
  179. data/lib/libv8/scons/engine/SCons/compat/__init__.py +2 -2
  180. data/lib/libv8/scons/engine/SCons/compat/_scons_builtins.py +2 -2
  181. data/lib/libv8/scons/engine/SCons/compat/_scons_collections.py +2 -2
  182. data/lib/libv8/scons/engine/SCons/compat/_scons_dbm.py +2 -2
  183. data/lib/libv8/scons/engine/SCons/compat/_scons_hashlib.py +2 -2
  184. data/lib/libv8/scons/engine/SCons/compat/_scons_io.py +2 -2
  185. data/lib/libv8/scons/engine/SCons/cpp.py +2 -2
  186. data/lib/libv8/scons/engine/SCons/dblite.py +1 -4
  187. data/lib/libv8/scons/engine/SCons/exitfuncs.py +2 -2
  188. data/lib/libv8/scons/scons-time.1 +3 -3
  189. data/lib/libv8/scons/scons.1 +1164 -1170
  190. data/lib/libv8/scons/sconsign.1 +3 -3
  191. data/lib/libv8/scons/script/scons +22 -22
  192. data/lib/libv8/scons/script/scons-time +2 -2
  193. data/lib/libv8/scons/script/scons.bat +4 -7
  194. data/lib/libv8/scons/script/sconsign +20 -21
  195. data/lib/libv8/scons/setup.cfg +1 -0
  196. data/lib/libv8/scons/setup.py +40 -38
  197. data/lib/libv8/v8/.gitignore +1 -1
  198. data/lib/libv8/v8/AUTHORS +2 -0
  199. data/lib/libv8/v8/ChangeLog +387 -0
  200. data/lib/libv8/v8/Makefile +171 -0
  201. data/lib/libv8/v8/SConstruct +124 -51
  202. data/lib/libv8/v8/build/README.txt +31 -14
  203. data/lib/libv8/v8/build/all.gyp +11 -4
  204. data/lib/libv8/v8/build/armu.gypi +6 -2
  205. data/lib/libv8/v8/build/common.gypi +240 -94
  206. data/lib/libv8/v8/build/gyp_v8 +32 -4
  207. data/lib/libv8/v8/build/standalone.gypi +200 -0
  208. data/lib/libv8/v8/include/v8-debug.h +0 -0
  209. data/lib/libv8/v8/include/v8-profiler.h +8 -11
  210. data/lib/libv8/v8/include/v8.h +191 -108
  211. data/lib/libv8/v8/preparser/SConscript +2 -2
  212. data/lib/libv8/v8/preparser/preparser-process.cc +3 -3
  213. data/lib/libv8/v8/preparser/preparser.gyp +42 -0
  214. data/lib/libv8/v8/src/SConscript +33 -8
  215. data/lib/libv8/v8/src/accessors.cc +77 -43
  216. data/lib/libv8/v8/src/api.cc +393 -191
  217. data/lib/libv8/v8/src/api.h +4 -8
  218. data/lib/libv8/v8/src/apinatives.js +15 -3
  219. data/lib/libv8/v8/src/arguments.h +8 -0
  220. data/lib/libv8/v8/src/arm/assembler-arm.cc +120 -120
  221. data/lib/libv8/v8/src/arm/assembler-arm.h +92 -43
  222. data/lib/libv8/v8/src/arm/builtins-arm.cc +32 -39
  223. data/lib/libv8/v8/src/arm/code-stubs-arm.cc +572 -351
  224. data/lib/libv8/v8/src/arm/code-stubs-arm.h +8 -77
  225. data/lib/libv8/v8/src/arm/codegen-arm.h +0 -2
  226. data/lib/libv8/v8/src/arm/deoptimizer-arm.cc +50 -30
  227. data/lib/libv8/v8/src/arm/disasm-arm.cc +1 -1
  228. data/lib/libv8/v8/src/arm/frames-arm.h +9 -5
  229. data/lib/libv8/v8/src/arm/full-codegen-arm.cc +331 -432
  230. data/lib/libv8/v8/src/arm/ic-arm.cc +192 -124
  231. data/lib/libv8/v8/src/arm/lithium-arm.cc +216 -232
  232. data/lib/libv8/v8/src/arm/lithium-arm.h +106 -259
  233. data/lib/libv8/v8/src/arm/lithium-codegen-arm.cc +633 -642
  234. data/lib/libv8/v8/src/arm/lithium-codegen-arm.h +4 -4
  235. data/lib/libv8/v8/src/arm/lithium-gap-resolver-arm.cc +1 -3
  236. data/lib/libv8/v8/src/arm/macro-assembler-arm.cc +260 -185
  237. data/lib/libv8/v8/src/arm/macro-assembler-arm.h +45 -25
  238. data/lib/libv8/v8/src/arm/regexp-macro-assembler-arm.cc +25 -13
  239. data/lib/libv8/v8/src/arm/regexp-macro-assembler-arm.h +3 -0
  240. data/lib/libv8/v8/src/arm/stub-cache-arm.cc +413 -226
  241. data/lib/libv8/v8/src/array.js +38 -18
  242. data/lib/libv8/v8/src/assembler.cc +12 -5
  243. data/lib/libv8/v8/src/assembler.h +15 -9
  244. data/lib/libv8/v8/src/ast-inl.h +34 -25
  245. data/lib/libv8/v8/src/ast.cc +141 -72
  246. data/lib/libv8/v8/src/ast.h +255 -181
  247. data/lib/libv8/v8/src/bignum.cc +3 -4
  248. data/lib/libv8/v8/src/bootstrapper.cc +55 -11
  249. data/lib/libv8/v8/src/bootstrapper.h +3 -2
  250. data/lib/libv8/v8/src/builtins.cc +8 -2
  251. data/lib/libv8/v8/src/builtins.h +4 -0
  252. data/lib/libv8/v8/src/cached-powers.cc +8 -4
  253. data/lib/libv8/v8/src/checks.h +3 -3
  254. data/lib/libv8/v8/src/code-stubs.cc +173 -28
  255. data/lib/libv8/v8/src/code-stubs.h +104 -148
  256. data/lib/libv8/v8/src/codegen.cc +8 -8
  257. data/lib/libv8/v8/src/compilation-cache.cc +2 -47
  258. data/lib/libv8/v8/src/compilation-cache.h +0 -10
  259. data/lib/libv8/v8/src/compiler.cc +27 -16
  260. data/lib/libv8/v8/src/compiler.h +13 -18
  261. data/lib/libv8/v8/src/contexts.cc +107 -72
  262. data/lib/libv8/v8/src/contexts.h +70 -34
  263. data/lib/libv8/v8/src/conversions-inl.h +572 -14
  264. data/lib/libv8/v8/src/conversions.cc +9 -707
  265. data/lib/libv8/v8/src/conversions.h +23 -12
  266. data/lib/libv8/v8/src/cpu-profiler-inl.h +2 -19
  267. data/lib/libv8/v8/src/cpu-profiler.cc +4 -21
  268. data/lib/libv8/v8/src/cpu-profiler.h +8 -17
  269. data/lib/libv8/v8/src/d8-debug.cc +5 -3
  270. data/lib/libv8/v8/src/d8-debug.h +6 -7
  271. data/lib/libv8/v8/src/d8-posix.cc +1 -10
  272. data/lib/libv8/v8/src/d8.cc +721 -219
  273. data/lib/libv8/v8/src/d8.gyp +37 -12
  274. data/lib/libv8/v8/src/d8.h +141 -19
  275. data/lib/libv8/v8/src/d8.js +17 -8
  276. data/lib/libv8/v8/src/date.js +16 -5
  277. data/lib/libv8/v8/src/dateparser-inl.h +242 -39
  278. data/lib/libv8/v8/src/dateparser.cc +38 -4
  279. data/lib/libv8/v8/src/dateparser.h +170 -28
  280. data/lib/libv8/v8/src/debug-agent.cc +5 -3
  281. data/lib/libv8/v8/src/debug-agent.h +11 -7
  282. data/lib/libv8/v8/src/debug-debugger.js +65 -34
  283. data/lib/libv8/v8/src/debug.cc +30 -60
  284. data/lib/libv8/v8/src/debug.h +5 -3
  285. data/lib/libv8/v8/src/deoptimizer.cc +227 -10
  286. data/lib/libv8/v8/src/deoptimizer.h +133 -9
  287. data/lib/libv8/v8/src/disassembler.cc +22 -14
  288. data/lib/libv8/v8/src/diy-fp.cc +4 -3
  289. data/lib/libv8/v8/src/diy-fp.h +3 -3
  290. data/lib/libv8/v8/src/elements.cc +634 -0
  291. data/lib/libv8/v8/src/elements.h +95 -0
  292. data/lib/libv8/v8/src/execution.cc +5 -21
  293. data/lib/libv8/v8/src/extensions/experimental/break-iterator.cc +3 -1
  294. data/lib/libv8/v8/src/extensions/experimental/break-iterator.h +1 -1
  295. data/lib/libv8/v8/src/extensions/experimental/collator.cc +6 -2
  296. data/lib/libv8/v8/src/extensions/experimental/collator.h +1 -2
  297. data/lib/libv8/v8/src/extensions/experimental/datetime-format.cc +384 -0
  298. data/lib/libv8/v8/src/extensions/experimental/datetime-format.h +83 -0
  299. data/lib/libv8/v8/src/extensions/experimental/experimental.gyp +18 -7
  300. data/lib/libv8/v8/src/extensions/experimental/i18n-extension.cc +12 -16
  301. data/lib/libv8/v8/src/extensions/experimental/i18n-extension.h +1 -1
  302. data/lib/libv8/v8/src/extensions/experimental/i18n-js2c.py +126 -0
  303. data/lib/libv8/v8/src/extensions/experimental/i18n-locale.cc +3 -4
  304. data/lib/libv8/v8/src/extensions/experimental/i18n-locale.h +1 -1
  305. data/lib/libv8/v8/src/{shell.h → extensions/experimental/i18n-natives.h} +8 -20
  306. data/lib/libv8/v8/src/extensions/experimental/i18n-utils.cc +45 -1
  307. data/lib/libv8/v8/src/extensions/experimental/i18n-utils.h +21 -1
  308. data/lib/libv8/v8/src/extensions/experimental/i18n.js +211 -11
  309. data/lib/libv8/v8/src/extensions/experimental/language-matcher.cc +4 -3
  310. data/lib/libv8/v8/src/extensions/experimental/language-matcher.h +1 -1
  311. data/lib/libv8/v8/src/extensions/experimental/number-format.cc +374 -0
  312. data/lib/libv8/v8/src/extensions/experimental/number-format.h +71 -0
  313. data/lib/libv8/v8/src/factory.cc +89 -18
  314. data/lib/libv8/v8/src/factory.h +36 -8
  315. data/lib/libv8/v8/src/flag-definitions.h +11 -44
  316. data/lib/libv8/v8/src/frames-inl.h +8 -1
  317. data/lib/libv8/v8/src/frames.cc +39 -3
  318. data/lib/libv8/v8/src/frames.h +10 -3
  319. data/lib/libv8/v8/src/full-codegen.cc +311 -293
  320. data/lib/libv8/v8/src/full-codegen.h +183 -143
  321. data/lib/libv8/v8/src/func-name-inferrer.cc +29 -15
  322. data/lib/libv8/v8/src/func-name-inferrer.h +19 -9
  323. data/lib/libv8/v8/src/gdb-jit.cc +658 -55
  324. data/lib/libv8/v8/src/gdb-jit.h +6 -2
  325. data/lib/libv8/v8/src/global-handles.cc +368 -312
  326. data/lib/libv8/v8/src/global-handles.h +29 -36
  327. data/lib/libv8/v8/src/globals.h +3 -1
  328. data/lib/libv8/v8/src/handles.cc +43 -69
  329. data/lib/libv8/v8/src/handles.h +21 -16
  330. data/lib/libv8/v8/src/heap-inl.h +11 -13
  331. data/lib/libv8/v8/src/heap-profiler.cc +0 -999
  332. data/lib/libv8/v8/src/heap-profiler.h +0 -303
  333. data/lib/libv8/v8/src/heap.cc +366 -141
  334. data/lib/libv8/v8/src/heap.h +87 -26
  335. data/lib/libv8/v8/src/hydrogen-instructions.cc +192 -81
  336. data/lib/libv8/v8/src/hydrogen-instructions.h +711 -482
  337. data/lib/libv8/v8/src/hydrogen.cc +1146 -629
  338. data/lib/libv8/v8/src/hydrogen.h +100 -64
  339. data/lib/libv8/v8/src/ia32/assembler-ia32.cc +19 -0
  340. data/lib/libv8/v8/src/ia32/assembler-ia32.h +15 -2
  341. data/lib/libv8/v8/src/ia32/builtins-ia32.cc +34 -39
  342. data/lib/libv8/v8/src/ia32/code-stubs-ia32.cc +675 -377
  343. data/lib/libv8/v8/src/ia32/code-stubs-ia32.h +8 -69
  344. data/lib/libv8/v8/src/ia32/codegen-ia32.cc +1 -0
  345. data/lib/libv8/v8/src/ia32/codegen-ia32.h +0 -2
  346. data/lib/libv8/v8/src/ia32/cpu-ia32.cc +3 -2
  347. data/lib/libv8/v8/src/ia32/deoptimizer-ia32.cc +28 -3
  348. data/lib/libv8/v8/src/ia32/disasm-ia32.cc +21 -10
  349. data/lib/libv8/v8/src/ia32/frames-ia32.h +6 -5
  350. data/lib/libv8/v8/src/ia32/full-codegen-ia32.cc +459 -465
  351. data/lib/libv8/v8/src/ia32/ic-ia32.cc +196 -147
  352. data/lib/libv8/v8/src/ia32/lithium-codegen-ia32.cc +575 -650
  353. data/lib/libv8/v8/src/ia32/lithium-codegen-ia32.h +19 -21
  354. data/lib/libv8/v8/src/ia32/lithium-gap-resolver-ia32.cc +7 -2
  355. data/lib/libv8/v8/src/ia32/lithium-ia32.cc +261 -256
  356. data/lib/libv8/v8/src/ia32/lithium-ia32.h +234 -335
  357. data/lib/libv8/v8/src/ia32/macro-assembler-ia32.cc +224 -67
  358. data/lib/libv8/v8/src/ia32/macro-assembler-ia32.h +63 -19
  359. data/lib/libv8/v8/src/ia32/regexp-macro-assembler-ia32.cc +22 -8
  360. data/lib/libv8/v8/src/ia32/regexp-macro-assembler-ia32.h +3 -0
  361. data/lib/libv8/v8/src/ia32/stub-cache-ia32.cc +380 -239
  362. data/lib/libv8/v8/src/ic.cc +198 -234
  363. data/lib/libv8/v8/src/ic.h +32 -30
  364. data/lib/libv8/v8/src/interpreter-irregexp.cc +6 -4
  365. data/lib/libv8/v8/src/isolate.cc +112 -95
  366. data/lib/libv8/v8/src/isolate.h +55 -71
  367. data/lib/libv8/v8/src/json-parser.h +486 -48
  368. data/lib/libv8/v8/src/json.js +28 -23
  369. data/lib/libv8/v8/src/jsregexp.cc +163 -208
  370. data/lib/libv8/v8/src/jsregexp.h +0 -1
  371. data/lib/libv8/v8/src/lithium-allocator-inl.h +29 -27
  372. data/lib/libv8/v8/src/lithium-allocator.cc +22 -17
  373. data/lib/libv8/v8/src/lithium-allocator.h +8 -8
  374. data/lib/libv8/v8/src/lithium.cc +16 -11
  375. data/lib/libv8/v8/src/lithium.h +31 -34
  376. data/lib/libv8/v8/src/liveedit.cc +111 -15
  377. data/lib/libv8/v8/src/liveedit.h +3 -4
  378. data/lib/libv8/v8/src/liveobjectlist.cc +116 -80
  379. data/lib/libv8/v8/src/liveobjectlist.h +2 -2
  380. data/lib/libv8/v8/src/log-inl.h +0 -4
  381. data/lib/libv8/v8/src/log-utils.cc +25 -143
  382. data/lib/libv8/v8/src/log-utils.h +13 -92
  383. data/lib/libv8/v8/src/log.cc +26 -249
  384. data/lib/libv8/v8/src/log.h +6 -17
  385. data/lib/libv8/v8/src/macros.py +9 -6
  386. data/lib/libv8/v8/src/mark-compact.cc +276 -56
  387. data/lib/libv8/v8/src/mark-compact.h +20 -0
  388. data/lib/libv8/v8/src/messages.js +93 -39
  389. data/lib/libv8/v8/src/mips/assembler-mips-inl.h +9 -3
  390. data/lib/libv8/v8/src/mips/assembler-mips.cc +297 -189
  391. data/lib/libv8/v8/src/mips/assembler-mips.h +121 -54
  392. data/lib/libv8/v8/src/mips/builtins-mips.cc +23 -24
  393. data/lib/libv8/v8/src/mips/code-stubs-mips.cc +484 -263
  394. data/lib/libv8/v8/src/mips/code-stubs-mips.h +8 -83
  395. data/lib/libv8/v8/src/mips/codegen-mips.h +0 -2
  396. data/lib/libv8/v8/src/mips/constants-mips.h +37 -11
  397. data/lib/libv8/v8/src/mips/deoptimizer-mips.cc +6 -1
  398. data/lib/libv8/v8/src/mips/frames-mips.h +8 -7
  399. data/lib/libv8/v8/src/mips/full-codegen-mips.cc +258 -419
  400. data/lib/libv8/v8/src/mips/ic-mips.cc +181 -121
  401. data/lib/libv8/v8/src/mips/macro-assembler-mips.cc +640 -382
  402. data/lib/libv8/v8/src/mips/macro-assembler-mips.h +94 -89
  403. data/lib/libv8/v8/src/mips/regexp-macro-assembler-mips.cc +23 -10
  404. data/lib/libv8/v8/src/mips/regexp-macro-assembler-mips.h +6 -1
  405. data/lib/libv8/v8/src/mips/simulator-mips.cc +249 -49
  406. data/lib/libv8/v8/src/mips/simulator-mips.h +25 -1
  407. data/lib/libv8/v8/src/mips/stub-cache-mips.cc +373 -161
  408. data/lib/libv8/v8/src/mirror-debugger.js +55 -8
  409. data/lib/libv8/v8/src/misc-intrinsics.h +89 -0
  410. data/lib/libv8/v8/src/mksnapshot.cc +36 -4
  411. data/lib/libv8/v8/src/natives.h +5 -2
  412. data/lib/libv8/v8/src/objects-debug.cc +73 -6
  413. data/lib/libv8/v8/src/objects-inl.h +529 -164
  414. data/lib/libv8/v8/src/objects-printer.cc +67 -12
  415. data/lib/libv8/v8/src/objects-visiting.cc +13 -2
  416. data/lib/libv8/v8/src/objects-visiting.h +41 -1
  417. data/lib/libv8/v8/src/objects.cc +2200 -1177
  418. data/lib/libv8/v8/src/objects.h +912 -283
  419. data/lib/libv8/v8/src/parser.cc +566 -371
  420. data/lib/libv8/v8/src/parser.h +35 -33
  421. data/lib/libv8/v8/src/platform-cygwin.cc +10 -25
  422. data/lib/libv8/v8/src/platform-freebsd.cc +4 -29
  423. data/lib/libv8/v8/src/platform-linux.cc +60 -57
  424. data/lib/libv8/v8/src/platform-macos.cc +4 -27
  425. data/lib/libv8/v8/src/platform-nullos.cc +3 -16
  426. data/lib/libv8/v8/src/platform-openbsd.cc +247 -85
  427. data/lib/libv8/v8/src/platform-posix.cc +43 -1
  428. data/lib/libv8/v8/src/platform-solaris.cc +151 -112
  429. data/lib/libv8/v8/src/platform-tls.h +1 -1
  430. data/lib/libv8/v8/src/platform-win32.cc +65 -39
  431. data/lib/libv8/v8/src/platform.h +17 -14
  432. data/lib/libv8/v8/src/preparse-data-format.h +2 -2
  433. data/lib/libv8/v8/src/preparse-data.h +8 -2
  434. data/lib/libv8/v8/src/preparser-api.cc +2 -18
  435. data/lib/libv8/v8/src/preparser.cc +106 -65
  436. data/lib/libv8/v8/src/preparser.h +26 -5
  437. data/lib/libv8/v8/src/prettyprinter.cc +25 -43
  438. data/lib/libv8/v8/src/profile-generator-inl.h +0 -4
  439. data/lib/libv8/v8/src/profile-generator.cc +213 -34
  440. data/lib/libv8/v8/src/profile-generator.h +9 -9
  441. data/lib/libv8/v8/src/property.h +1 -0
  442. data/lib/libv8/v8/src/proxy.js +74 -4
  443. data/lib/libv8/v8/src/regexp-macro-assembler.cc +10 -6
  444. data/lib/libv8/v8/src/regexp.js +16 -11
  445. data/lib/libv8/v8/src/rewriter.cc +24 -133
  446. data/lib/libv8/v8/src/runtime-profiler.cc +27 -151
  447. data/lib/libv8/v8/src/runtime-profiler.h +5 -31
  448. data/lib/libv8/v8/src/runtime.cc +1450 -681
  449. data/lib/libv8/v8/src/runtime.h +47 -31
  450. data/lib/libv8/v8/src/runtime.js +2 -1
  451. data/lib/libv8/v8/src/scanner-base.cc +358 -220
  452. data/lib/libv8/v8/src/scanner-base.h +30 -138
  453. data/lib/libv8/v8/src/scanner.cc +0 -18
  454. data/lib/libv8/v8/src/scanner.h +0 -15
  455. data/lib/libv8/v8/src/scopeinfo.cc +3 -1
  456. data/lib/libv8/v8/src/scopeinfo.h +1 -6
  457. data/lib/libv8/v8/src/scopes.cc +243 -253
  458. data/lib/libv8/v8/src/scopes.h +58 -109
  459. data/lib/libv8/v8/src/serialize.cc +12 -54
  460. data/lib/libv8/v8/src/serialize.h +47 -0
  461. data/lib/libv8/v8/src/small-pointer-list.h +25 -0
  462. data/lib/libv8/v8/src/spaces-inl.h +4 -50
  463. data/lib/libv8/v8/src/spaces.cc +64 -131
  464. data/lib/libv8/v8/src/spaces.h +19 -70
  465. data/lib/libv8/v8/src/string-stream.cc +3 -1
  466. data/lib/libv8/v8/src/string.js +10 -6
  467. data/lib/libv8/v8/src/strtod.cc +7 -3
  468. data/lib/libv8/v8/src/stub-cache.cc +59 -129
  469. data/lib/libv8/v8/src/stub-cache.h +42 -54
  470. data/lib/libv8/v8/src/third_party/valgrind/valgrind.h +1447 -1339
  471. data/lib/libv8/v8/src/token.cc +4 -4
  472. data/lib/libv8/v8/src/token.h +6 -5
  473. data/lib/libv8/v8/src/type-info.cc +173 -129
  474. data/lib/libv8/v8/src/type-info.h +40 -22
  475. data/lib/libv8/v8/src/utils.cc +25 -304
  476. data/lib/libv8/v8/src/utils.h +118 -3
  477. data/lib/libv8/v8/src/v8-counters.h +3 -6
  478. data/lib/libv8/v8/src/v8.cc +34 -27
  479. data/lib/libv8/v8/src/v8.h +7 -7
  480. data/lib/libv8/v8/src/v8conversions.cc +129 -0
  481. data/lib/libv8/v8/src/v8conversions.h +60 -0
  482. data/lib/libv8/v8/src/v8globals.h +15 -6
  483. data/lib/libv8/v8/src/v8natives.js +300 -78
  484. data/lib/libv8/v8/src/v8threads.cc +14 -6
  485. data/lib/libv8/v8/src/v8threads.h +4 -1
  486. data/lib/libv8/v8/src/v8utils.cc +360 -0
  487. data/lib/libv8/v8/src/v8utils.h +17 -66
  488. data/lib/libv8/v8/src/variables.cc +7 -12
  489. data/lib/libv8/v8/src/variables.h +12 -10
  490. data/lib/libv8/v8/src/version.cc +2 -2
  491. data/lib/libv8/v8/src/vm-state-inl.h +0 -41
  492. data/lib/libv8/v8/src/vm-state.h +0 -11
  493. data/lib/libv8/v8/src/weakmap.js +103 -0
  494. data/lib/libv8/v8/src/x64/assembler-x64.h +6 -3
  495. data/lib/libv8/v8/src/x64/builtins-x64.cc +25 -22
  496. data/lib/libv8/v8/src/x64/code-stubs-x64.cc +523 -250
  497. data/lib/libv8/v8/src/x64/code-stubs-x64.h +8 -71
  498. data/lib/libv8/v8/src/x64/codegen-x64.cc +1 -0
  499. data/lib/libv8/v8/src/x64/codegen-x64.h +0 -2
  500. data/lib/libv8/v8/src/x64/cpu-x64.cc +2 -1
  501. data/lib/libv8/v8/src/x64/deoptimizer-x64.cc +40 -8
  502. data/lib/libv8/v8/src/x64/disasm-x64.cc +12 -10
  503. data/lib/libv8/v8/src/x64/frames-x64.h +7 -6
  504. data/lib/libv8/v8/src/x64/full-codegen-x64.cc +310 -415
  505. data/lib/libv8/v8/src/x64/ic-x64.cc +180 -117
  506. data/lib/libv8/v8/src/x64/lithium-codegen-x64.cc +411 -523
  507. data/lib/libv8/v8/src/x64/lithium-codegen-x64.h +11 -6
  508. data/lib/libv8/v8/src/x64/lithium-x64.cc +191 -216
  509. data/lib/libv8/v8/src/x64/lithium-x64.h +112 -263
  510. data/lib/libv8/v8/src/x64/macro-assembler-x64.cc +177 -61
  511. data/lib/libv8/v8/src/x64/macro-assembler-x64.h +23 -7
  512. data/lib/libv8/v8/src/x64/regexp-macro-assembler-x64.cc +21 -9
  513. data/lib/libv8/v8/src/x64/regexp-macro-assembler-x64.h +6 -0
  514. data/lib/libv8/v8/src/x64/stub-cache-x64.cc +273 -107
  515. data/lib/libv8/v8/src/zone.cc +31 -22
  516. data/lib/libv8/v8/src/zone.h +12 -6
  517. data/lib/libv8/v8/tools/codemap.js +8 -0
  518. data/lib/libv8/v8/tools/gcmole/Makefile +43 -0
  519. data/lib/libv8/v8/tools/gcmole/gcmole.lua +0 -2
  520. data/lib/libv8/v8/tools/gdb-v8-support.py +154 -0
  521. data/lib/libv8/v8/tools/grokdump.py +44 -35
  522. data/lib/libv8/v8/tools/gyp/v8.gyp +94 -248
  523. data/lib/libv8/v8/tools/js2c.py +83 -52
  524. data/lib/libv8/v8/tools/linux-tick-processor +4 -6
  525. data/lib/libv8/v8/tools/ll_prof.py +3 -3
  526. data/lib/libv8/v8/tools/oom_dump/README +3 -1
  527. data/lib/libv8/v8/tools/presubmit.py +11 -4
  528. data/lib/libv8/v8/tools/profile.js +46 -2
  529. data/lib/libv8/v8/tools/splaytree.js +11 -0
  530. data/lib/libv8/v8/tools/stats-viewer.py +15 -11
  531. data/lib/libv8/v8/tools/test-wrapper-gypbuild.py +227 -0
  532. data/lib/libv8/v8/tools/test.py +28 -8
  533. data/lib/libv8/v8/tools/tickprocessor.js +0 -16
  534. data/lib/libv8/version.rb +1 -1
  535. data/libv8.gemspec +2 -2
  536. metadata +31 -19
  537. data/lib/libv8/scons/engine/SCons/Tool/f03.py +0 -63
  538. data/lib/libv8/v8/src/json-parser.cc +0 -504
@@ -59,34 +59,14 @@ class TranscendentalCacheStub: public CodeStub {
59
59
  };
60
60
 
61
61
 
62
- class ToBooleanStub: public CodeStub {
63
- public:
64
- ToBooleanStub() { }
65
-
66
- void Generate(MacroAssembler* masm);
67
-
68
- private:
69
- Major MajorKey() { return ToBoolean; }
70
- int MinorKey() { return 0; }
71
- };
72
-
73
-
74
62
  class UnaryOpStub: public CodeStub {
75
63
  public:
76
- UnaryOpStub(Token::Value op, UnaryOverwriteMode mode)
64
+ UnaryOpStub(Token::Value op,
65
+ UnaryOverwriteMode mode,
66
+ UnaryOpIC::TypeInfo operand_type = UnaryOpIC::UNINITIALIZED)
77
67
  : op_(op),
78
68
  mode_(mode),
79
- operand_type_(UnaryOpIC::UNINITIALIZED),
80
- name_(NULL) {
81
- }
82
-
83
- UnaryOpStub(
84
- int key,
85
- UnaryOpIC::TypeInfo operand_type)
86
- : op_(OpBits::decode(key)),
87
- mode_(ModeBits::decode(key)),
88
- operand_type_(operand_type),
89
- name_(NULL) {
69
+ operand_type_(operand_type) {
90
70
  }
91
71
 
92
72
  private:
@@ -96,20 +76,7 @@ class UnaryOpStub: public CodeStub {
96
76
  // Operand type information determined at runtime.
97
77
  UnaryOpIC::TypeInfo operand_type_;
98
78
 
99
- char* name_;
100
-
101
- const char* GetName();
102
-
103
- #ifdef DEBUG
104
- void Print() {
105
- PrintF("UnaryOpStub %d (op %s), "
106
- "(mode %d, runtime_type_info %s)\n",
107
- MinorKey(),
108
- Token::String(op_),
109
- static_cast<int>(mode_),
110
- UnaryOpIC::GetName(operand_type_));
111
- }
112
- #endif
79
+ virtual void PrintName(StringStream* stream);
113
80
 
114
81
  class ModeBits: public BitField<UnaryOverwriteMode, 0, 1> {};
115
82
  class OpBits: public BitField<Token::Value, 1, 7> {};
@@ -169,8 +136,7 @@ class BinaryOpStub: public CodeStub {
169
136
  : op_(op),
170
137
  mode_(mode),
171
138
  operands_type_(BinaryOpIC::UNINITIALIZED),
172
- result_type_(BinaryOpIC::UNINITIALIZED),
173
- name_(NULL) {
139
+ result_type_(BinaryOpIC::UNINITIALIZED) {
174
140
  ASSERT(OpBits::is_valid(Token::NUM_TOKENS));
175
141
  }
176
142
 
@@ -181,8 +147,7 @@ class BinaryOpStub: public CodeStub {
181
147
  : op_(OpBits::decode(key)),
182
148
  mode_(ModeBits::decode(key)),
183
149
  operands_type_(operands_type),
184
- result_type_(result_type),
185
- name_(NULL) { }
150
+ result_type_(result_type) { }
186
151
 
187
152
  private:
188
153
  enum SmiCodeGenerateHeapNumberResults {
@@ -197,20 +162,7 @@ class BinaryOpStub: public CodeStub {
197
162
  BinaryOpIC::TypeInfo operands_type_;
198
163
  BinaryOpIC::TypeInfo result_type_;
199
164
 
200
- char* name_;
201
-
202
- const char* GetName();
203
-
204
- #ifdef DEBUG
205
- void Print() {
206
- PrintF("BinaryOpStub %d (op %s), "
207
- "(mode %d, runtime_type_info %s)\n",
208
- MinorKey(),
209
- Token::String(op_),
210
- static_cast<int>(mode_),
211
- BinaryOpIC::GetName(operands_type_));
212
- }
213
- #endif
165
+ virtual void PrintName(StringStream* stream);
214
166
 
215
167
  // Minor key encoding in 15 bits RRRTTTOOOOOOOMM.
216
168
  class ModeBits: public BitField<OverwriteMode, 0, 2> {};
@@ -430,14 +382,6 @@ class NumberToStringStub: public CodeStub {
430
382
  int MinorKey() { return 0; }
431
383
 
432
384
  void Generate(MacroAssembler* masm);
433
-
434
- const char* GetName() { return "NumberToStringStub"; }
435
-
436
- #ifdef DEBUG
437
- void Print() {
438
- PrintF("NumberToStringStub\n");
439
- }
440
- #endif
441
385
  };
442
386
 
443
387
 
@@ -481,13 +425,6 @@ class StringDictionaryLookupStub: public CodeStub {
481
425
  StringDictionary::kHeaderSize +
482
426
  StringDictionary::kElementsStartIndex * kPointerSize;
483
427
 
484
-
485
- #ifdef DEBUG
486
- void Print() {
487
- PrintF("StringDictionaryLookupStub\n");
488
- }
489
- #endif
490
-
491
428
  Major MajorKey() { return StringDictionaryNegativeLookup; }
492
429
 
493
430
  int MinorKey() {
@@ -132,6 +132,7 @@ ModuloFunction CreateModuloFunction() {
132
132
 
133
133
  CodeDesc desc;
134
134
  masm.GetCode(&desc);
135
+ OS::ProtectCode(buffer, actual_size);
135
136
  // Call the function from C++ through this pointer.
136
137
  return FUNCTION_CAST<ModuloFunction>(buffer);
137
138
  }
@@ -58,9 +58,7 @@ class CodeGenerator: public AstVisitor {
58
58
  // Print the code after compiling it.
59
59
  static void PrintCode(Handle<Code> code, CompilationInfo* info);
60
60
 
61
- #ifdef ENABLE_LOGGING_AND_PROFILING
62
61
  static bool ShouldGenerateLog(Expression* type);
63
- #endif
64
62
 
65
63
  static bool RecordPositions(MacroAssembler* masm,
66
64
  int pos,
@@ -67,7 +67,8 @@ void CPU::FlushICache(void* start, size_t size) {
67
67
  // solution is to run valgrind with --smc-check=all, but this comes at a big
68
68
  // performance cost. We can notify valgrind to invalidate its cache.
69
69
  #ifdef VALGRIND_DISCARD_TRANSLATIONS
70
- VALGRIND_DISCARD_TRANSLATIONS(start, size);
70
+ unsigned res = VALGRIND_DISCARD_TRANSLATIONS(start, size);
71
+ USE(res);
71
72
  #endif
72
73
  }
73
74
 
@@ -38,7 +38,7 @@ namespace v8 {
38
38
  namespace internal {
39
39
 
40
40
 
41
- int Deoptimizer::table_entry_size_ = 10;
41
+ const int Deoptimizer::table_entry_size_ = 10;
42
42
 
43
43
 
44
44
  int Deoptimizer::patch_size() {
@@ -128,7 +128,9 @@ void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
128
128
  Address instruction_start = function->code()->instruction_start();
129
129
  Address jump_table_address =
130
130
  instruction_start + function->code()->safepoint_table_offset();
131
+ #ifdef DEBUG
131
132
  Address previous_pc = instruction_start;
133
+ #endif
132
134
 
133
135
  SafepointTableDeoptimiztionEntryIterator deoptimizations(function->code());
134
136
  Address entry_pc = NULL;
@@ -157,12 +159,16 @@ void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
157
159
  CodePatcher patcher(call_address, Assembler::kCallInstructionLength);
158
160
  patcher.masm()->Call(GetDeoptimizationEntry(deoptimization_index, LAZY),
159
161
  RelocInfo::NONE);
162
+ #ifdef DEBUG
160
163
  previous_pc = call_end_address;
164
+ #endif
161
165
  } else {
162
166
  // Not room enough for a long Call instruction. Write a short call
163
167
  // instruction to a long jump placed elsewhere in the code.
168
+ #ifdef DEBUG
164
169
  Address short_call_end_address =
165
170
  call_address + MacroAssembler::kShortCallInstructionLength;
171
+ #endif
166
172
  ASSERT(next_pc >= short_call_end_address);
167
173
 
168
174
  // Write jump in jump-table.
@@ -177,7 +183,9 @@ void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
177
183
  CodePatcher call_patcher(call_address,
178
184
  MacroAssembler::kShortCallInstructionLength);
179
185
  call_patcher.masm()->call(jump_table_address);
186
+ #ifdef DEBUG
180
187
  previous_pc = short_call_end_address;
188
+ #endif
181
189
  }
182
190
 
183
191
  // Continue with next deoptimization entry.
@@ -316,7 +324,7 @@ void Deoptimizer::DoComputeOsrOutputFrame() {
316
324
  USE(height_in_bytes);
317
325
 
318
326
  unsigned fixed_size = ComputeFixedSize(function_);
319
- unsigned input_frame_size = static_cast<unsigned>(input_->GetFrameSize());
327
+ unsigned input_frame_size = input_->GetFrameSize();
320
328
  ASSERT(fixed_size + height_in_bytes == input_frame_size);
321
329
 
322
330
  unsigned stack_slot_size = optimized_code_->stack_slots() * kPointerSize;
@@ -340,6 +348,9 @@ void Deoptimizer::DoComputeOsrOutputFrame() {
340
348
  output_ = new FrameDescription*[1];
341
349
  output_[0] = new(output_frame_size) FrameDescription(
342
350
  output_frame_size, function_);
351
+ #ifdef DEBUG
352
+ output_[0]->SetKind(Code::OPTIMIZED_FUNCTION);
353
+ #endif
343
354
 
344
355
  // Clear the incoming parameters in the optimized frame to avoid
345
356
  // confusing the garbage collector.
@@ -448,12 +459,15 @@ void Deoptimizer::DoComputeFrame(TranslationIterator* iterator,
448
459
  // The 'fixed' part of the frame consists of the incoming parameters and
449
460
  // the part described by JavaScriptFrameConstants.
450
461
  unsigned fixed_frame_size = ComputeFixedSize(function);
451
- unsigned input_frame_size = static_cast<unsigned>(input_->GetFrameSize());
462
+ unsigned input_frame_size = input_->GetFrameSize();
452
463
  unsigned output_frame_size = height_in_bytes + fixed_frame_size;
453
464
 
454
465
  // Allocate and store the output frame description.
455
466
  FrameDescription* output_frame =
456
467
  new(output_frame_size) FrameDescription(output_frame_size, function);
468
+ #ifdef DEBUG
469
+ output_frame->SetKind(Code::FUNCTION);
470
+ #endif
457
471
 
458
472
  bool is_bottommost = (0 == frame_index);
459
473
  bool is_topmost = (output_count_ - 1 == frame_index);
@@ -584,15 +598,33 @@ void Deoptimizer::DoComputeFrame(TranslationIterator* iterator,
584
598
  output_frame->SetState(Smi::FromInt(state));
585
599
 
586
600
  // Set the continuation for the topmost frame.
587
- if (is_topmost) {
601
+ if (is_topmost && bailout_type_ != DEBUGGER) {
588
602
  Code* continuation = (bailout_type_ == EAGER)
589
603
  ? isolate_->builtins()->builtin(Builtins::kNotifyDeoptimized)
590
604
  : isolate_->builtins()->builtin(Builtins::kNotifyLazyDeoptimized);
591
605
  output_frame->SetContinuation(
592
606
  reinterpret_cast<intptr_t>(continuation->entry()));
593
607
  }
608
+ }
609
+
594
610
 
595
- if (output_count_ - 1 == frame_index) iterator->Done();
611
+ void Deoptimizer::FillInputFrame(Address tos, JavaScriptFrame* frame) {
612
+ // Set the register values. The values are not important as there are no
613
+ // callee saved registers in JavaScript frames, so all registers are
614
+ // spilled. Registers rbp and rsp are set to the correct values though.
615
+ for (int i = 0; i < Register::kNumRegisters; i++) {
616
+ input_->SetRegister(i, i * 4);
617
+ }
618
+ input_->SetRegister(rsp.code(), reinterpret_cast<intptr_t>(frame->sp()));
619
+ input_->SetRegister(rbp.code(), reinterpret_cast<intptr_t>(frame->fp()));
620
+ for (int i = 0; i < DoubleRegister::kNumAllocatableRegisters; i++) {
621
+ input_->SetDoubleRegister(i, 0.0);
622
+ }
623
+
624
+ // Fill the frame content from the actual data on the frame.
625
+ for (unsigned i = 0; i < input_->GetFrameSize(); i += kPointerSize) {
626
+ input_->SetFrameSlot(i, Memory::uint64_at(tos + i));
627
+ }
596
628
  }
597
629
 
598
630
 
@@ -617,7 +649,7 @@ void Deoptimizer::EntryGenerator::Generate() {
617
649
  // We push all registers onto the stack, even though we do not need
618
650
  // to restore all later.
619
651
  for (int i = 0; i < kNumberOfRegisters; i++) {
620
- Register r = Register::toRegister(i);
652
+ Register r = Register::from_code(i);
621
653
  __ push(r);
622
654
  }
623
655
 
@@ -775,12 +807,12 @@ void Deoptimizer::EntryGenerator::Generate() {
775
807
 
776
808
  // Restore the registers from the stack.
777
809
  for (int i = kNumberOfRegisters - 1; i >= 0 ; i--) {
778
- Register r = Register::toRegister(i);
810
+ Register r = Register::from_code(i);
779
811
  // Do not restore rsp, simply pop the value into the next register
780
812
  // and overwrite this afterwards.
781
813
  if (r.is(rsp)) {
782
814
  ASSERT(i > 0);
783
- r = Register::toRegister(i - 1);
815
+ r = Register::from_code(i - 1);
784
816
  }
785
817
  __ pop(r);
786
818
  }
@@ -58,7 +58,7 @@ struct ByteMnemonic {
58
58
  };
59
59
 
60
60
 
61
- static ByteMnemonic two_operands_instr[] = {
61
+ static const ByteMnemonic two_operands_instr[] = {
62
62
  { 0x00, BYTE_OPER_REG_OP_ORDER, "add" },
63
63
  { 0x01, OPER_REG_OP_ORDER, "add" },
64
64
  { 0x02, BYTE_REG_OPER_OP_ORDER, "add" },
@@ -105,7 +105,7 @@ static ByteMnemonic two_operands_instr[] = {
105
105
  };
106
106
 
107
107
 
108
- static ByteMnemonic zero_operands_instr[] = {
108
+ static const ByteMnemonic zero_operands_instr[] = {
109
109
  { 0xC3, UNSET_OP_ORDER, "ret" },
110
110
  { 0xC9, UNSET_OP_ORDER, "leave" },
111
111
  { 0xF4, UNSET_OP_ORDER, "hlt" },
@@ -125,14 +125,14 @@ static ByteMnemonic zero_operands_instr[] = {
125
125
  };
126
126
 
127
127
 
128
- static ByteMnemonic call_jump_instr[] = {
128
+ static const ByteMnemonic call_jump_instr[] = {
129
129
  { 0xE8, UNSET_OP_ORDER, "call" },
130
130
  { 0xE9, UNSET_OP_ORDER, "jmp" },
131
131
  { -1, UNSET_OP_ORDER, "" }
132
132
  };
133
133
 
134
134
 
135
- static ByteMnemonic short_immediate_instr[] = {
135
+ static const ByteMnemonic short_immediate_instr[] = {
136
136
  { 0x05, UNSET_OP_ORDER, "add" },
137
137
  { 0x0D, UNSET_OP_ORDER, "or" },
138
138
  { 0x15, UNSET_OP_ORDER, "adc" },
@@ -145,7 +145,7 @@ static ByteMnemonic short_immediate_instr[] = {
145
145
  };
146
146
 
147
147
 
148
- static const char* conditional_code_suffix[] = {
148
+ static const char* const conditional_code_suffix[] = {
149
149
  "o", "no", "c", "nc", "z", "nz", "na", "a",
150
150
  "s", "ns", "pe", "po", "l", "ge", "le", "g"
151
151
  };
@@ -193,7 +193,7 @@ class InstructionTable {
193
193
  InstructionDesc instructions_[256];
194
194
  void Clear();
195
195
  void Init();
196
- void CopyTable(ByteMnemonic bm[], InstructionType type);
196
+ void CopyTable(const ByteMnemonic bm[], InstructionType type);
197
197
  void SetTableRange(InstructionType type, byte start, byte end, bool byte_size,
198
198
  const char* mnem);
199
199
  void AddJumpConditionalShort();
@@ -228,7 +228,8 @@ void InstructionTable::Init() {
228
228
  }
229
229
 
230
230
 
231
- void InstructionTable::CopyTable(ByteMnemonic bm[], InstructionType type) {
231
+ void InstructionTable::CopyTable(const ByteMnemonic bm[],
232
+ InstructionType type) {
232
233
  for (int i = 0; bm[i].b >= 0; i++) {
233
234
  InstructionDesc* id = &instructions_[bm[i].b];
234
235
  id->mnem = bm[i].mnem;
@@ -1062,11 +1063,12 @@ int DisassemblerX64::TwoByteOpcodeInstruction(byte* data) {
1062
1063
  AppendToBuffer("movq ");
1063
1064
  current += PrintRightXMMOperand(current);
1064
1065
  AppendToBuffer(", %s", NameOfXMMRegister(regop));
1066
+ } else if (opcode == 0x50) {
1067
+ AppendToBuffer("movmskpd %s,", NameOfCPURegister(regop));
1068
+ current += PrintRightXMMOperand(current);
1065
1069
  } else {
1066
1070
  const char* mnemonic = "?";
1067
- if (opcode == 0x50) {
1068
- mnemonic = "movmskpd";
1069
- } else if (opcode == 0x54) {
1071
+ if (opcode == 0x54) {
1070
1072
  mnemonic = "andpd";
1071
1073
  } else if (opcode == 0x56) {
1072
1074
  mnemonic = "orpd";
@@ -1,4 +1,4 @@
1
- // Copyright 2010 the V8 project authors. All rights reserved.
1
+ // Copyright 2011 the V8 project authors. All rights reserved.
2
2
  // Redistribution and use in source and binary forms, with or without
3
3
  // modification, are permitted provided that the following conditions are
4
4
  // met:
@@ -50,12 +50,13 @@ static const int kNumSafepointRegisters = 16;
50
50
 
51
51
  class StackHandlerConstants : public AllStatic {
52
52
  public:
53
- static const int kNextOffset = 0 * kPointerSize;
54
- static const int kFPOffset = 1 * kPointerSize;
55
- static const int kStateOffset = 2 * kPointerSize;
56
- static const int kPCOffset = 3 * kPointerSize;
53
+ static const int kNextOffset = 0 * kPointerSize;
54
+ static const int kContextOffset = 1 * kPointerSize;
55
+ static const int kFPOffset = 2 * kPointerSize;
56
+ static const int kStateOffset = 3 * kPointerSize;
57
+ static const int kPCOffset = 4 * kPointerSize;
57
58
 
58
- static const int kSize = 4 * kPointerSize;
59
+ static const int kSize = kPCOffset + kPointerSize;
59
60
  };
60
61
 
61
62
 
@@ -45,7 +45,6 @@ namespace internal {
45
45
 
46
46
 
47
47
  static unsigned GetPropertyId(Property* property) {
48
- if (property->is_synthetic()) return AstNode::kNoNumber;
49
48
  return property->id();
50
49
  }
51
50
 
@@ -78,16 +77,18 @@ class JumpPatchSite BASE_EMBEDDED {
78
77
  }
79
78
 
80
79
  void EmitPatchInfo() {
81
- int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
82
- ASSERT(is_int8(delta_to_patch_site));
83
- __ testl(rax, Immediate(delta_to_patch_site));
80
+ if (patch_site_.is_bound()) {
81
+ int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
82
+ ASSERT(is_int8(delta_to_patch_site));
83
+ __ testl(rax, Immediate(delta_to_patch_site));
84
84
  #ifdef DEBUG
85
- info_emitted_ = true;
85
+ info_emitted_ = true;
86
86
  #endif
87
+ } else {
88
+ __ nop(); // Signals no inlined code.
89
+ }
87
90
  }
88
91
 
89
- bool is_bound() const { return patch_site_.is_bound(); }
90
-
91
92
  private:
92
93
  // jc will be patched with jz, jnc will become jnz.
93
94
  void EmitJump(Condition cc, Label* target, Label::Distance near_jump) {
@@ -121,6 +122,7 @@ class JumpPatchSite BASE_EMBEDDED {
121
122
  void FullCodeGenerator::Generate(CompilationInfo* info) {
122
123
  ASSERT(info_ == NULL);
123
124
  info_ = info;
125
+ scope_ = info->scope();
124
126
  SetFunctionPosition(function());
125
127
  Comment cmnt(masm_, "[ function compiled by full code generator");
126
128
 
@@ -131,16 +133,16 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
131
133
  }
132
134
  #endif
133
135
 
134
- // Strict mode functions need to replace the receiver with undefined
135
- // when called as functions (without an explicit receiver
136
- // object). rcx is zero for method calls and non-zero for function
137
- // calls.
138
- if (info->is_strict_mode()) {
136
+ // Strict mode functions and builtins need to replace the receiver
137
+ // with undefined when called as functions (without an explicit
138
+ // receiver object). rcx is zero for method calls and non-zero for
139
+ // function calls.
140
+ if (info->is_strict_mode() || info->is_native()) {
139
141
  Label ok;
140
142
  __ testq(rcx, rcx);
141
143
  __ j(zero, &ok, Label::kNear);
142
144
  // +1 for return address.
143
- int receiver_offset = (scope()->num_parameters() + 1) * kPointerSize;
145
+ int receiver_offset = (info->scope()->num_parameters() + 1) * kPointerSize;
144
146
  __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
145
147
  __ movq(Operand(rsp, receiver_offset), kScratchRegister);
146
148
  __ bind(&ok);
@@ -152,7 +154,7 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
152
154
  __ push(rdi); // Callee's JS Function.
153
155
 
154
156
  { Comment cmnt(masm_, "[ Allocate locals");
155
- int locals_count = scope()->num_stack_slots();
157
+ int locals_count = info->scope()->num_stack_slots();
156
158
  if (locals_count == 1) {
157
159
  __ PushRoot(Heap::kUndefinedValueRootIndex);
158
160
  } else if (locals_count > 1) {
@@ -166,7 +168,7 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
166
168
  bool function_in_register = true;
167
169
 
168
170
  // Possibly allocate a local context.
169
- int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
171
+ int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
170
172
  if (heap_slots > 0) {
171
173
  Comment cmnt(masm_, "[ Allocate local context");
172
174
  // Argument to NewContext is the function, which is still in rdi.
@@ -175,7 +177,7 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
175
177
  FastNewContextStub stub(heap_slots);
176
178
  __ CallStub(&stub);
177
179
  } else {
178
- __ CallRuntime(Runtime::kNewContext, 1);
180
+ __ CallRuntime(Runtime::kNewFunctionContext, 1);
179
181
  }
180
182
  function_in_register = false;
181
183
  // Context is returned in both rax and rsi. It replaces the context
@@ -183,7 +185,7 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
183
185
  __ movq(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
184
186
 
185
187
  // Copy any necessary parameters into the context.
186
- int num_parameters = scope()->num_parameters();
188
+ int num_parameters = info->scope()->num_parameters();
187
189
  for (int i = 0; i < num_parameters; i++) {
188
190
  Slot* slot = scope()->parameter(i)->AsSlot();
189
191
  if (slot != NULL && slot->type() == Slot::CONTEXT) {
@@ -215,26 +217,21 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
215
217
  __ push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
216
218
  }
217
219
  // The receiver is just before the parameters on the caller's stack.
218
- int offset = scope()->num_parameters() * kPointerSize;
220
+ int num_parameters = info->scope()->num_parameters();
221
+ int offset = num_parameters * kPointerSize;
219
222
  __ lea(rdx,
220
223
  Operand(rbp, StandardFrameConstants::kCallerSPOffset + offset));
221
224
  __ push(rdx);
222
- __ Push(Smi::FromInt(scope()->num_parameters()));
225
+ __ Push(Smi::FromInt(num_parameters));
223
226
  // Arguments to ArgumentsAccessStub:
224
227
  // function, receiver address, parameter count.
225
228
  // The stub will rewrite receiver and parameter count if the previous
226
229
  // stack frame was an arguments adapter frame.
227
230
  ArgumentsAccessStub stub(
228
231
  is_strict_mode() ? ArgumentsAccessStub::NEW_STRICT
229
- : ArgumentsAccessStub::NEW_NON_STRICT);
232
+ : ArgumentsAccessStub::NEW_NON_STRICT_SLOW);
230
233
  __ CallStub(&stub);
231
234
 
232
- Variable* arguments_shadow = scope()->arguments_shadow();
233
- if (arguments_shadow != NULL) {
234
- // Store new arguments object in both "arguments" and ".arguments" slots.
235
- __ movq(rcx, rax);
236
- Move(arguments_shadow->AsSlot(), rcx, rbx, rdx);
237
- }
238
235
  Move(arguments->AsSlot(), rax, rbx, rdx);
239
236
  }
240
237
 
@@ -338,7 +335,7 @@ void FullCodeGenerator::EmitReturnSequence() {
338
335
  __ movq(rsp, rbp);
339
336
  __ pop(rbp);
340
337
 
341
- int arguments_bytes = (scope()->num_parameters() + 1) * kPointerSize;
338
+ int arguments_bytes = (info_->scope()->num_parameters() + 1) * kPointerSize;
342
339
  __ Ret(arguments_bytes, rcx);
343
340
 
344
341
  #ifdef ENABLE_DEBUGGER_SUPPORT
@@ -377,7 +374,7 @@ void FullCodeGenerator::StackValueContext::Plug(Slot* slot) const {
377
374
  void FullCodeGenerator::TestContext::Plug(Slot* slot) const {
378
375
  codegen()->Move(result_register(), slot);
379
376
  codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
380
- codegen()->DoTest(true_label_, false_label_, fall_through_);
377
+ codegen()->DoTest(this);
381
378
  }
382
379
 
383
380
 
@@ -410,7 +407,7 @@ void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
410
407
  if (true_label_ != fall_through_) __ jmp(true_label_);
411
408
  } else {
412
409
  __ LoadRoot(result_register(), index);
413
- codegen()->DoTest(true_label_, false_label_, fall_through_);
410
+ codegen()->DoTest(this);
414
411
  }
415
412
  }
416
413
 
@@ -455,7 +452,7 @@ void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
455
452
  } else {
456
453
  // For simplicity we always test the accumulator register.
457
454
  __ Move(result_register(), lit);
458
- codegen()->DoTest(true_label_, false_label_, fall_through_);
455
+ codegen()->DoTest(this);
459
456
  }
460
457
  }
461
458
 
@@ -491,7 +488,7 @@ void FullCodeGenerator::TestContext::DropAndPlug(int count,
491
488
  __ Drop(count);
492
489
  __ Move(result_register(), reg);
493
490
  codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
494
- codegen()->DoTest(true_label_, false_label_, fall_through_);
491
+ codegen()->DoTest(this);
495
492
  }
496
493
 
497
494
 
@@ -566,13 +563,14 @@ void FullCodeGenerator::TestContext::Plug(bool flag) const {
566
563
  }
567
564
 
568
565
 
569
- void FullCodeGenerator::DoTest(Label* if_true,
566
+ void FullCodeGenerator::DoTest(Expression* condition,
567
+ Label* if_true,
570
568
  Label* if_false,
571
569
  Label* fall_through) {
572
- ToBooleanStub stub;
570
+ ToBooleanStub stub(result_register());
573
571
  __ push(result_register());
574
572
  __ CallStub(&stub);
575
- __ testq(rax, rax);
573
+ __ testq(result_register(), result_register());
576
574
  // The stub returns nonzero for true.
577
575
  Split(not_zero, if_true, if_false, fall_through);
578
576
  }
@@ -666,92 +664,69 @@ void FullCodeGenerator::EmitDeclaration(Variable* variable,
666
664
  Comment cmnt(masm_, "[ Declaration");
667
665
  ASSERT(variable != NULL); // Must have been resolved.
668
666
  Slot* slot = variable->AsSlot();
669
- Property* prop = variable->AsProperty();
670
-
671
- if (slot != NULL) {
672
- switch (slot->type()) {
673
- case Slot::PARAMETER:
674
- case Slot::LOCAL:
675
- if (mode == Variable::CONST) {
676
- __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
677
- __ movq(Operand(rbp, SlotOffset(slot)), kScratchRegister);
678
- } else if (function != NULL) {
679
- VisitForAccumulatorValue(function);
680
- __ movq(Operand(rbp, SlotOffset(slot)), result_register());
681
- }
682
- break;
683
-
684
- case Slot::CONTEXT:
685
- // We bypass the general EmitSlotSearch because we know more about
686
- // this specific context.
687
-
688
- // The variable in the decl always resides in the current context.
689
- ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
690
- if (FLAG_debug_code) {
691
- // Check if we have the correct context pointer.
692
- __ movq(rbx, ContextOperand(rsi, Context::FCONTEXT_INDEX));
693
- __ cmpq(rbx, rsi);
694
- __ Check(equal, "Unexpected declaration in current context.");
695
- }
696
- if (mode == Variable::CONST) {
697
- __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
698
- __ movq(ContextOperand(rsi, slot->index()), kScratchRegister);
699
- // No write barrier since the hole value is in old space.
700
- } else if (function != NULL) {
701
- VisitForAccumulatorValue(function);
702
- __ movq(ContextOperand(rsi, slot->index()), result_register());
703
- int offset = Context::SlotOffset(slot->index());
704
- __ movq(rbx, rsi);
705
- __ RecordWrite(rbx, offset, result_register(), rcx);
706
- }
707
- break;
708
-
709
- case Slot::LOOKUP: {
710
- __ push(rsi);
711
- __ Push(variable->name());
712
- // Declaration nodes are always introduced in one of two modes.
713
- ASSERT(mode == Variable::VAR || mode == Variable::CONST);
714
- PropertyAttributes attr = (mode == Variable::VAR) ? NONE : READ_ONLY;
715
- __ Push(Smi::FromInt(attr));
716
- // Push initial value, if any.
717
- // Note: For variables we must not push an initial value (such as
718
- // 'undefined') because we may have a (legal) redeclaration and we
719
- // must not destroy the current value.
720
- if (mode == Variable::CONST) {
721
- __ PushRoot(Heap::kTheHoleValueRootIndex);
722
- } else if (function != NULL) {
723
- VisitForStackValue(function);
724
- } else {
725
- __ Push(Smi::FromInt(0)); // no initial value!
726
- }
727
- __ CallRuntime(Runtime::kDeclareContextSlot, 4);
728
- break;
667
+ ASSERT(slot != NULL);
668
+ switch (slot->type()) {
669
+ case Slot::PARAMETER:
670
+ case Slot::LOCAL:
671
+ if (function != NULL) {
672
+ VisitForAccumulatorValue(function);
673
+ __ movq(Operand(rbp, SlotOffset(slot)), result_register());
674
+ } else if (mode == Variable::CONST || mode == Variable::LET) {
675
+ __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
676
+ __ movq(Operand(rbp, SlotOffset(slot)), kScratchRegister);
729
677
  }
730
- }
678
+ break;
731
679
 
732
- } else if (prop != NULL) {
733
- // A const declaration aliasing a parameter is an illegal redeclaration.
734
- ASSERT(mode != Variable::CONST);
735
- if (function != NULL) {
736
- // We are declaring a function that rewrites to a property.
737
- // Use (keyed) IC to set the initial value. We cannot visit the
738
- // rewrite because it's shared and we risk recording duplicate AST
739
- // IDs for bailouts from optimized code.
740
- ASSERT(prop->obj()->AsVariableProxy() != NULL);
741
- { AccumulatorValueContext for_object(this);
742
- EmitVariableLoad(prop->obj()->AsVariableProxy()->var());
680
+ case Slot::CONTEXT:
681
+ // We bypass the general EmitSlotSearch because we know more about
682
+ // this specific context.
683
+
684
+ // The variable in the decl always resides in the current function
685
+ // context.
686
+ ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
687
+ if (FLAG_debug_code) {
688
+ // Check that we're not inside a with or catch context.
689
+ __ movq(rbx, FieldOperand(rsi, HeapObject::kMapOffset));
690
+ __ CompareRoot(rbx, Heap::kWithContextMapRootIndex);
691
+ __ Check(not_equal, "Declaration in with context.");
692
+ __ CompareRoot(rbx, Heap::kCatchContextMapRootIndex);
693
+ __ Check(not_equal, "Declaration in catch context.");
743
694
  }
744
- __ push(rax);
745
- VisitForAccumulatorValue(function);
746
- __ pop(rdx);
747
- ASSERT(prop->key()->AsLiteral() != NULL &&
748
- prop->key()->AsLiteral()->handle()->IsSmi());
749
- __ Move(rcx, prop->key()->AsLiteral()->handle());
695
+ if (function != NULL) {
696
+ VisitForAccumulatorValue(function);
697
+ __ movq(ContextOperand(rsi, slot->index()), result_register());
698
+ int offset = Context::SlotOffset(slot->index());
699
+ __ movq(rbx, rsi);
700
+ __ RecordWrite(rbx, offset, result_register(), rcx);
701
+ } else if (mode == Variable::CONST || mode == Variable::LET) {
702
+ __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
703
+ __ movq(ContextOperand(rsi, slot->index()), kScratchRegister);
704
+ // No write barrier since the hole value is in old space.
705
+ }
706
+ break;
750
707
 
751
- Handle<Code> ic = is_strict_mode()
752
- ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
753
- : isolate()->builtins()->KeyedStoreIC_Initialize();
754
- EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber);
708
+ case Slot::LOOKUP: {
709
+ __ push(rsi);
710
+ __ Push(variable->name());
711
+ // Declaration nodes are always introduced in one of two modes.
712
+ ASSERT(mode == Variable::VAR ||
713
+ mode == Variable::CONST ||
714
+ mode == Variable::LET);
715
+ PropertyAttributes attr = (mode == Variable::CONST) ? READ_ONLY : NONE;
716
+ __ Push(Smi::FromInt(attr));
717
+ // Push initial value, if any.
718
+ // Note: For variables we must not push an initial value (such as
719
+ // 'undefined') because we may have a (legal) redeclaration and we
720
+ // must not destroy the current value.
721
+ if (function != NULL) {
722
+ VisitForStackValue(function);
723
+ } else if (mode == Variable::CONST || mode == Variable::LET) {
724
+ __ PushRoot(Heap::kTheHoleValueRootIndex);
725
+ } else {
726
+ __ Push(Smi::FromInt(0)); // no initial value!
727
+ }
728
+ __ CallRuntime(Runtime::kDeclareContextSlot, 4);
729
+ break;
755
730
  }
756
731
  }
757
732
  }
@@ -824,7 +799,8 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
824
799
  // Record position before stub call for type feedback.
825
800
  SetSourcePosition(clause->position());
826
801
  Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
827
- EmitCallIC(ic, &patch_site, clause->CompareId());
802
+ __ call(ic, RelocInfo::CODE_TARGET, clause->CompareId());
803
+ patch_site.EmitPatchInfo();
828
804
 
829
805
  __ testq(rax, rax);
830
806
  __ j(not_equal, &next_test);
@@ -837,7 +813,7 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
837
813
  __ bind(&next_test);
838
814
  __ Drop(1); // Switch value is no longer needed.
839
815
  if (default_clause == NULL) {
840
- __ jmp(nested_statement.break_target());
816
+ __ jmp(nested_statement.break_label());
841
817
  } else {
842
818
  __ jmp(default_clause->body_target());
843
819
  }
@@ -851,7 +827,7 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
851
827
  VisitStatements(clause->statements());
852
828
  }
853
829
 
854
- __ bind(nested_statement.break_target());
830
+ __ bind(nested_statement.break_label());
855
831
  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
856
832
  }
857
833
 
@@ -878,7 +854,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
878
854
  // Convert the object to a JS object.
879
855
  Label convert, done_convert;
880
856
  __ JumpIfSmi(rax, &convert);
881
- __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx);
857
+ __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
882
858
  __ j(above_equal, &done_convert);
883
859
  __ bind(&convert);
884
860
  __ push(rax);
@@ -977,7 +953,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
977
953
  __ bind(&loop);
978
954
  __ movq(rax, Operand(rsp, 0 * kPointerSize)); // Get the current index.
979
955
  __ cmpq(rax, Operand(rsp, 1 * kPointerSize)); // Compare to the array length.
980
- __ j(above_equal, loop_statement.break_target());
956
+ __ j(above_equal, loop_statement.break_label());
981
957
 
982
958
  // Get the current entry of the array into register rbx.
983
959
  __ movq(rbx, Operand(rsp, 2 * kPointerSize));
@@ -1005,7 +981,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1005
981
  __ push(rbx); // Current entry.
1006
982
  __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1007
983
  __ Cmp(rax, Smi::FromInt(0));
1008
- __ j(equal, loop_statement.continue_target());
984
+ __ j(equal, loop_statement.continue_label());
1009
985
  __ movq(rbx, rax);
1010
986
 
1011
987
  // Update the 'each' property or variable from the possibly filtered
@@ -1022,14 +998,14 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1022
998
 
1023
999
  // Generate code for going to the next element by incrementing the
1024
1000
  // index (smi) stored on top of the stack.
1025
- __ bind(loop_statement.continue_target());
1001
+ __ bind(loop_statement.continue_label());
1026
1002
  __ SmiAddConstant(Operand(rsp, 0 * kPointerSize), Smi::FromInt(1));
1027
1003
 
1028
1004
  EmitStackCheck(stmt);
1029
1005
  __ jmp(&loop);
1030
1006
 
1031
1007
  // Remove the pointers stored on the stack.
1032
- __ bind(loop_statement.break_target());
1008
+ __ bind(loop_statement.break_label());
1033
1009
  __ addq(rsp, Immediate(5 * kPointerSize));
1034
1010
 
1035
1011
  // Exit and decrement the loop depth.
@@ -1068,7 +1044,7 @@ void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1068
1044
 
1069
1045
  void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1070
1046
  Comment cmnt(masm_, "[ VariableProxy");
1071
- EmitVariableLoad(expr->var());
1047
+ EmitVariableLoad(expr);
1072
1048
  }
1073
1049
 
1074
1050
 
@@ -1089,8 +1065,7 @@ void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions(
1089
1065
  __ j(not_equal, slow);
1090
1066
  }
1091
1067
  // Load next context in chain.
1092
- __ movq(temp, ContextOperand(context, Context::CLOSURE_INDEX));
1093
- __ movq(temp, FieldOperand(temp, JSFunction::kContextOffset));
1068
+ __ movq(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1094
1069
  // Walk the rest of the chain without clobbering rsi.
1095
1070
  context = temp;
1096
1071
  }
@@ -1118,8 +1093,7 @@ void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions(
1118
1093
  __ cmpq(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0));
1119
1094
  __ j(not_equal, slow);
1120
1095
  // Load next context in chain.
1121
- __ movq(temp, ContextOperand(temp, Context::CLOSURE_INDEX));
1122
- __ movq(temp, FieldOperand(temp, JSFunction::kContextOffset));
1096
+ __ movq(temp, ContextOperand(temp, Context::PREVIOUS_INDEX));
1123
1097
  __ jmp(&next);
1124
1098
  __ bind(&fast);
1125
1099
  }
@@ -1132,7 +1106,7 @@ void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions(
1132
1106
  RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
1133
1107
  ? RelocInfo::CODE_TARGET
1134
1108
  : RelocInfo::CODE_TARGET_CONTEXT;
1135
- EmitCallIC(ic, mode, AstNode::kNoNumber);
1109
+ __ call(ic, mode);
1136
1110
  }
1137
1111
 
1138
1112
 
@@ -1151,8 +1125,7 @@ MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(
1151
1125
  Immediate(0));
1152
1126
  __ j(not_equal, slow);
1153
1127
  }
1154
- __ movq(temp, ContextOperand(context, Context::CLOSURE_INDEX));
1155
- __ movq(temp, FieldOperand(temp, JSFunction::kContextOffset));
1128
+ __ movq(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1156
1129
  // Walk the rest of the chain without clobbering rsi.
1157
1130
  context = temp;
1158
1131
  }
@@ -1213,7 +1186,7 @@ void FullCodeGenerator::EmitDynamicLoadFromSlotFastCase(
1213
1186
  __ Move(rax, key_literal->handle());
1214
1187
  Handle<Code> ic =
1215
1188
  isolate()->builtins()->KeyedLoadIC_Initialize();
1216
- EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(property));
1189
+ __ call(ic, RelocInfo::CODE_TARGET, GetPropertyId(property));
1217
1190
  __ jmp(done);
1218
1191
  }
1219
1192
  }
@@ -1222,21 +1195,24 @@ void FullCodeGenerator::EmitDynamicLoadFromSlotFastCase(
1222
1195
  }
1223
1196
 
1224
1197
 
1225
- void FullCodeGenerator::EmitVariableLoad(Variable* var) {
1226
- // Four cases: non-this global variables, lookup slots, all other
1227
- // types of slots, and parameters that rewrite to explicit property
1228
- // accesses on the arguments object.
1198
+ void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1199
+ // Record position before possible IC call.
1200
+ SetSourcePosition(proxy->position());
1201
+ Variable* var = proxy->var();
1202
+
1203
+ // Three cases: non-this global variables, lookup slots, and all other
1204
+ // types of slots.
1229
1205
  Slot* slot = var->AsSlot();
1230
- Property* property = var->AsProperty();
1206
+ ASSERT((var->is_global() && !var->is_this()) == (slot == NULL));
1231
1207
 
1232
- if (var->is_global() && !var->is_this()) {
1208
+ if (slot == NULL) {
1233
1209
  Comment cmnt(masm_, "Global variable");
1234
1210
  // Use inline caching. Variable name is passed in rcx and the global
1235
1211
  // object on the stack.
1236
1212
  __ Move(rcx, var->name());
1237
1213
  __ movq(rax, GlobalObjectOperand());
1238
1214
  Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1239
- EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT, AstNode::kNoNumber);
1215
+ __ call(ic, RelocInfo::CODE_TARGET_CONTEXT);
1240
1216
  context()->Plug(rax);
1241
1217
 
1242
1218
  } else if (slot != NULL && slot->type() == Slot::LOOKUP) {
@@ -1255,7 +1231,7 @@ void FullCodeGenerator::EmitVariableLoad(Variable* var) {
1255
1231
 
1256
1232
  context()->Plug(rax);
1257
1233
 
1258
- } else if (slot != NULL) {
1234
+ } else {
1259
1235
  Comment cmnt(masm_, (slot->type() == Slot::CONTEXT)
1260
1236
  ? "Context slot"
1261
1237
  : "Stack slot");
@@ -1270,37 +1246,21 @@ void FullCodeGenerator::EmitVariableLoad(Variable* var) {
1270
1246
  __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
1271
1247
  __ bind(&done);
1272
1248
  context()->Plug(rax);
1249
+ } else if (var->mode() == Variable::LET) {
1250
+ // Let bindings may be the hole value if they have not been initialized.
1251
+ // Throw a type error in this case.
1252
+ Label done;
1253
+ MemOperand slot_operand = EmitSlotSearch(slot, rax);
1254
+ __ movq(rax, slot_operand);
1255
+ __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
1256
+ __ j(not_equal, &done, Label::kNear);
1257
+ __ Push(var->name());
1258
+ __ CallRuntime(Runtime::kThrowReferenceError, 1);
1259
+ __ bind(&done);
1260
+ context()->Plug(rax);
1273
1261
  } else {
1274
1262
  context()->Plug(slot);
1275
1263
  }
1276
-
1277
- } else {
1278
- Comment cmnt(masm_, "Rewritten parameter");
1279
- ASSERT_NOT_NULL(property);
1280
- // Rewritten parameter accesses are of the form "slot[literal]".
1281
-
1282
- // Assert that the object is in a slot.
1283
- Variable* object_var = property->obj()->AsVariableProxy()->AsVariable();
1284
- ASSERT_NOT_NULL(object_var);
1285
- Slot* object_slot = object_var->AsSlot();
1286
- ASSERT_NOT_NULL(object_slot);
1287
-
1288
- // Load the object.
1289
- MemOperand object_loc = EmitSlotSearch(object_slot, rax);
1290
- __ movq(rdx, object_loc);
1291
-
1292
- // Assert that the key is a smi.
1293
- Literal* key_literal = property->key()->AsLiteral();
1294
- ASSERT_NOT_NULL(key_literal);
1295
- ASSERT(key_literal->handle()->IsSmi());
1296
-
1297
- // Load the key.
1298
- __ Move(rax, key_literal->handle());
1299
-
1300
- // Do a keyed property load.
1301
- Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
1302
- EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(property));
1303
- context()->Plug(rax);
1304
1264
  }
1305
1265
  }
1306
1266
 
@@ -1412,7 +1372,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1412
1372
  Handle<Code> ic = is_strict_mode()
1413
1373
  ? isolate()->builtins()->StoreIC_Initialize_Strict()
1414
1374
  : isolate()->builtins()->StoreIC_Initialize();
1415
- EmitCallIC(ic, RelocInfo::CODE_TARGET, key->id());
1375
+ __ call(ic, RelocInfo::CODE_TARGET, key->id());
1416
1376
  PrepareForBailoutForId(key->id(), NO_REGISTERS);
1417
1377
  } else {
1418
1378
  VisitForEffect(value);
@@ -1533,7 +1493,7 @@ void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1533
1493
  }
1534
1494
 
1535
1495
  // Left-hand side can only be a property, a global or a (parameter or local)
1536
- // slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY.
1496
+ // slot.
1537
1497
  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1538
1498
  LhsKind assign_type = VARIABLE;
1539
1499
  Property* property = expr->target()->AsProperty();
@@ -1559,29 +1519,13 @@ void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1559
1519
  break;
1560
1520
  case KEYED_PROPERTY: {
1561
1521
  if (expr->is_compound()) {
1562
- if (property->is_arguments_access()) {
1563
- VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
1564
- MemOperand slot_operand =
1565
- EmitSlotSearch(obj_proxy->var()->AsSlot(), rcx);
1566
- __ push(slot_operand);
1567
- __ Move(rax, property->key()->AsLiteral()->handle());
1568
- } else {
1569
- VisitForStackValue(property->obj());
1570
- VisitForAccumulatorValue(property->key());
1571
- }
1522
+ VisitForStackValue(property->obj());
1523
+ VisitForAccumulatorValue(property->key());
1572
1524
  __ movq(rdx, Operand(rsp, 0));
1573
1525
  __ push(rax);
1574
1526
  } else {
1575
- if (property->is_arguments_access()) {
1576
- VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
1577
- MemOperand slot_operand =
1578
- EmitSlotSearch(obj_proxy->var()->AsSlot(), rcx);
1579
- __ push(slot_operand);
1580
- __ Push(property->key()->AsLiteral()->handle());
1581
- } else {
1582
- VisitForStackValue(property->obj());
1583
- VisitForStackValue(property->key());
1584
- }
1527
+ VisitForStackValue(property->obj());
1528
+ VisitForStackValue(property->key());
1585
1529
  }
1586
1530
  break;
1587
1531
  }
@@ -1593,7 +1537,7 @@ void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1593
1537
  { AccumulatorValueContext context(this);
1594
1538
  switch (assign_type) {
1595
1539
  case VARIABLE:
1596
- EmitVariableLoad(expr->target()->AsVariableProxy()->var());
1540
+ EmitVariableLoad(expr->target()->AsVariableProxy());
1597
1541
  PrepareForBailout(expr->target(), TOS_REG);
1598
1542
  break;
1599
1543
  case NAMED_PROPERTY:
@@ -1657,14 +1601,14 @@ void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
1657
1601
  Literal* key = prop->key()->AsLiteral();
1658
1602
  __ Move(rcx, key->handle());
1659
1603
  Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1660
- EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop));
1604
+ __ call(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop));
1661
1605
  }
1662
1606
 
1663
1607
 
1664
1608
  void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
1665
1609
  SetSourcePosition(prop->position());
1666
1610
  Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
1667
- EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop));
1611
+ __ call(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop));
1668
1612
  }
1669
1613
 
1670
1614
 
@@ -1686,7 +1630,8 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1686
1630
  __ bind(&stub_call);
1687
1631
  __ movq(rax, rcx);
1688
1632
  BinaryOpStub stub(op, mode);
1689
- EmitCallIC(stub.GetCode(), &patch_site, expr->id());
1633
+ __ call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
1634
+ patch_site.EmitPatchInfo();
1690
1635
  __ jmp(&done, Label::kNear);
1691
1636
 
1692
1637
  __ bind(&smi_case);
@@ -1733,8 +1678,9 @@ void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
1733
1678
  OverwriteMode mode) {
1734
1679
  __ pop(rdx);
1735
1680
  BinaryOpStub stub(op, mode);
1736
- // NULL signals no inlined smi code.
1737
- EmitCallIC(stub.GetCode(), NULL, expr->id());
1681
+ JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
1682
+ __ call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
1683
+ patch_site.EmitPatchInfo();
1738
1684
  context()->Plug(rax);
1739
1685
  }
1740
1686
 
@@ -1748,7 +1694,7 @@ void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
1748
1694
  }
1749
1695
 
1750
1696
  // Left-hand side can only be a property, a global or a (parameter or local)
1751
- // slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY.
1697
+ // slot.
1752
1698
  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1753
1699
  LhsKind assign_type = VARIABLE;
1754
1700
  Property* prop = expr->AsProperty();
@@ -1774,30 +1720,20 @@ void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
1774
1720
  Handle<Code> ic = is_strict_mode()
1775
1721
  ? isolate()->builtins()->StoreIC_Initialize_Strict()
1776
1722
  : isolate()->builtins()->StoreIC_Initialize();
1777
- EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber);
1723
+ __ call(ic);
1778
1724
  break;
1779
1725
  }
1780
1726
  case KEYED_PROPERTY: {
1781
1727
  __ push(rax); // Preserve value.
1782
- if (prop->is_synthetic()) {
1783
- ASSERT(prop->obj()->AsVariableProxy() != NULL);
1784
- ASSERT(prop->key()->AsLiteral() != NULL);
1785
- { AccumulatorValueContext for_object(this);
1786
- EmitVariableLoad(prop->obj()->AsVariableProxy()->var());
1787
- }
1788
- __ movq(rdx, rax);
1789
- __ Move(rcx, prop->key()->AsLiteral()->handle());
1790
- } else {
1791
- VisitForStackValue(prop->obj());
1792
- VisitForAccumulatorValue(prop->key());
1793
- __ movq(rcx, rax);
1794
- __ pop(rdx);
1795
- }
1728
+ VisitForStackValue(prop->obj());
1729
+ VisitForAccumulatorValue(prop->key());
1730
+ __ movq(rcx, rax);
1731
+ __ pop(rdx);
1796
1732
  __ pop(rax); // Restore value.
1797
1733
  Handle<Code> ic = is_strict_mode()
1798
1734
  ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
1799
1735
  : isolate()->builtins()->KeyedStoreIC_Initialize();
1800
- EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber);
1736
+ __ call(ic);
1801
1737
  break;
1802
1738
  }
1803
1739
  }
@@ -1808,8 +1744,6 @@ void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
1808
1744
 
1809
1745
  void FullCodeGenerator::EmitVariableAssignment(Variable* var,
1810
1746
  Token::Value op) {
1811
- // Left-hand sides that rewrite to explicit property accesses do not reach
1812
- // here.
1813
1747
  ASSERT(var != NULL);
1814
1748
  ASSERT(var->is_global() || var->AsSlot() != NULL);
1815
1749
 
@@ -1823,7 +1757,7 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
1823
1757
  Handle<Code> ic = is_strict_mode()
1824
1758
  ? isolate()->builtins()->StoreIC_Initialize_Strict()
1825
1759
  : isolate()->builtins()->StoreIC_Initialize();
1826
- EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT, AstNode::kNoNumber);
1760
+ __ call(ic, RelocInfo::CODE_TARGET_CONTEXT);
1827
1761
 
1828
1762
  } else if (op == Token::INIT_CONST) {
1829
1763
  // Like var declarations, const declarations are hoisted to function
@@ -1843,26 +1777,67 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
1843
1777
  __ j(not_equal, &skip);
1844
1778
  __ movq(Operand(rbp, SlotOffset(slot)), rax);
1845
1779
  break;
1780
+ case Slot::CONTEXT:
1781
+ case Slot::LOOKUP:
1782
+ __ push(rax);
1783
+ __ push(rsi);
1784
+ __ Push(var->name());
1785
+ __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
1786
+ break;
1787
+ }
1788
+ __ bind(&skip);
1789
+
1790
+ } else if (var->mode() == Variable::LET && op != Token::INIT_LET) {
1791
+ // Perform the assignment for non-const variables. Const assignments
1792
+ // are simply skipped.
1793
+ Slot* slot = var->AsSlot();
1794
+ switch (slot->type()) {
1795
+ case Slot::PARAMETER:
1796
+ case Slot::LOCAL: {
1797
+ Label assign;
1798
+ // Check for an initialized let binding.
1799
+ __ movq(rdx, Operand(rbp, SlotOffset(slot)));
1800
+ __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
1801
+ __ j(not_equal, &assign);
1802
+ __ Push(var->name());
1803
+ __ CallRuntime(Runtime::kThrowReferenceError, 1);
1804
+ // Perform the assignment.
1805
+ __ bind(&assign);
1806
+ __ movq(Operand(rbp, SlotOffset(slot)), rax);
1807
+ break;
1808
+ }
1809
+
1846
1810
  case Slot::CONTEXT: {
1847
- __ movq(rcx, ContextOperand(rsi, Context::FCONTEXT_INDEX));
1848
- __ movq(rdx, ContextOperand(rcx, slot->index()));
1811
+ // Let variables may be the hole value if they have not been
1812
+ // initialized. Throw a type error in this case.
1813
+ Label assign;
1814
+ MemOperand target = EmitSlotSearch(slot, rcx);
1815
+ // Check for an initialized let binding.
1816
+ __ movq(rdx, target);
1849
1817
  __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
1850
- __ j(not_equal, &skip);
1851
- __ movq(ContextOperand(rcx, slot->index()), rax);
1818
+ __ j(not_equal, &assign, Label::kNear);
1819
+ __ Push(var->name());
1820
+ __ CallRuntime(Runtime::kThrowReferenceError, 1);
1821
+ // Perform the assignment.
1822
+ __ bind(&assign);
1823
+ __ movq(target, rax);
1824
+ // The value of the assignment is in eax. RecordWrite clobbers its
1825
+ // register arguments.
1826
+ __ movq(rdx, rax);
1852
1827
  int offset = Context::SlotOffset(slot->index());
1853
- __ movq(rdx, rax); // Preserve the stored value in eax.
1854
1828
  __ RecordWrite(rcx, offset, rdx, rbx);
1855
1829
  break;
1856
1830
  }
1831
+
1857
1832
  case Slot::LOOKUP:
1858
- __ push(rax);
1859
- __ push(rsi);
1833
+ // Call the runtime for the assignment.
1834
+ __ push(rax); // Value.
1835
+ __ push(rsi); // Context.
1860
1836
  __ Push(var->name());
1861
- __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
1837
+ __ Push(Smi::FromInt(strict_mode_flag()));
1838
+ __ CallRuntime(Runtime::kStoreContextSlot, 4);
1862
1839
  break;
1863
1840
  }
1864
- __ bind(&skip);
1865
-
1866
1841
  } else if (var->mode() != Variable::CONST) {
1867
1842
  // Perform the assignment for non-const variables. Const assignments
1868
1843
  // are simply skipped.
@@ -1926,7 +1901,7 @@ void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
1926
1901
  Handle<Code> ic = is_strict_mode()
1927
1902
  ? isolate()->builtins()->StoreIC_Initialize_Strict()
1928
1903
  : isolate()->builtins()->StoreIC_Initialize();
1929
- EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id());
1904
+ __ call(ic, RelocInfo::CODE_TARGET, expr->id());
1930
1905
 
1931
1906
  // If the assignment ends an initialization block, revert to fast case.
1932
1907
  if (expr->ends_initialization_block()) {
@@ -1966,7 +1941,7 @@ void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
1966
1941
  Handle<Code> ic = is_strict_mode()
1967
1942
  ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
1968
1943
  : isolate()->builtins()->KeyedStoreIC_Initialize();
1969
- EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id());
1944
+ __ call(ic, RelocInfo::CODE_TARGET, expr->id());
1970
1945
 
1971
1946
  // If the assignment ends an initialization block, revert to fast case.
1972
1947
  if (expr->ends_initialization_block()) {
@@ -2018,7 +1993,7 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr,
2018
1993
  InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
2019
1994
  Handle<Code> ic =
2020
1995
  ISOLATE->stub_cache()->ComputeCallInitialize(arg_count, in_loop, mode);
2021
- EmitCallIC(ic, mode, expr->id());
1996
+ __ call(ic, mode, expr->id());
2022
1997
  RecordJSReturnSite(expr);
2023
1998
  // Restore context register.
2024
1999
  __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
@@ -2052,7 +2027,7 @@ void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2052
2027
  Handle<Code> ic =
2053
2028
  ISOLATE->stub_cache()->ComputeKeyedCallInitialize(arg_count, in_loop);
2054
2029
  __ movq(rcx, Operand(rsp, (arg_count + 1) * kPointerSize)); // Key.
2055
- EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id());
2030
+ __ call(ic, RelocInfo::CODE_TARGET, expr->id());
2056
2031
  RecordJSReturnSite(expr);
2057
2032
  // Restore context register.
2058
2033
  __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
@@ -2092,7 +2067,7 @@ void FullCodeGenerator::EmitResolvePossiblyDirectEval(ResolveEvalFlag flag,
2092
2067
  }
2093
2068
 
2094
2069
  // Push the receiver of the enclosing function and do runtime call.
2095
- __ push(Operand(rbp, (2 + scope()->num_parameters()) * kPointerSize));
2070
+ __ push(Operand(rbp, (2 + info_->scope()->num_parameters()) * kPointerSize));
2096
2071
 
2097
2072
  // Push the strict mode flag.
2098
2073
  __ Push(Smi::FromInt(strict_mode_flag()));
@@ -2206,9 +2181,9 @@ void FullCodeGenerator::VisitCall(Call* expr) {
2206
2181
  __ bind(&done);
2207
2182
  // Push function.
2208
2183
  __ push(rax);
2209
- // Push global receiver.
2210
- __ movq(rbx, GlobalObjectOperand());
2211
- __ push(FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
2184
+ // The receiver is implicitly the global receiver. Indicate this
2185
+ // by passing the hole to the call function stub.
2186
+ __ PushRoot(Heap::kTheHoleValueRootIndex);
2212
2187
  __ bind(&call);
2213
2188
  }
2214
2189
 
@@ -2228,38 +2203,10 @@ void FullCodeGenerator::VisitCall(Call* expr) {
2228
2203
  EmitCallWithIC(expr, key->handle(), RelocInfo::CODE_TARGET);
2229
2204
  } else {
2230
2205
  // Call to a keyed property.
2231
- // For a synthetic property use keyed load IC followed by function call,
2232
- // for a regular property use keyed EmitCallIC.
2233
- if (prop->is_synthetic()) {
2234
- // Do not visit the object and key subexpressions (they are shared
2235
- // by all occurrences of the same rewritten parameter).
2236
- ASSERT(prop->obj()->AsVariableProxy() != NULL);
2237
- ASSERT(prop->obj()->AsVariableProxy()->var()->AsSlot() != NULL);
2238
- Slot* slot = prop->obj()->AsVariableProxy()->var()->AsSlot();
2239
- MemOperand operand = EmitSlotSearch(slot, rdx);
2240
- __ movq(rdx, operand);
2241
-
2242
- ASSERT(prop->key()->AsLiteral() != NULL);
2243
- ASSERT(prop->key()->AsLiteral()->handle()->IsSmi());
2244
- __ Move(rax, prop->key()->AsLiteral()->handle());
2245
-
2246
- // Record source code position for IC call.
2247
- SetSourcePosition(prop->position());
2248
-
2249
- Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2250
- EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop));
2251
- // Push result (function).
2252
- __ push(rax);
2253
- // Push Global receiver.
2254
- __ movq(rcx, GlobalObjectOperand());
2255
- __ push(FieldOperand(rcx, GlobalObject::kGlobalReceiverOffset));
2256
- EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS);
2257
- } else {
2258
- { PreservePositionScope scope(masm()->positions_recorder());
2259
- VisitForStackValue(prop->obj());
2260
- }
2261
- EmitKeyedCallWithIC(expr, prop->key());
2206
+ { PreservePositionScope scope(masm()->positions_recorder());
2207
+ VisitForStackValue(prop->obj());
2262
2208
  }
2209
+ EmitKeyedCallWithIC(expr, prop->key());
2263
2210
  }
2264
2211
  } else {
2265
2212
  { PreservePositionScope scope(masm()->positions_recorder());
@@ -2373,9 +2320,9 @@ void FullCodeGenerator::EmitIsObject(ZoneList<Expression*>* args) {
2373
2320
  Immediate(1 << Map::kIsUndetectable));
2374
2321
  __ j(not_zero, if_false);
2375
2322
  __ movzxbq(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
2376
- __ cmpq(rbx, Immediate(FIRST_JS_OBJECT_TYPE));
2323
+ __ cmpq(rbx, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
2377
2324
  __ j(below, if_false);
2378
- __ cmpq(rbx, Immediate(LAST_JS_OBJECT_TYPE));
2325
+ __ cmpq(rbx, Immediate(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
2379
2326
  PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2380
2327
  Split(below_equal, if_true, if_false, fall_through);
2381
2328
 
@@ -2396,7 +2343,7 @@ void FullCodeGenerator::EmitIsSpecObject(ZoneList<Expression*>* args) {
2396
2343
  &if_true, &if_false, &fall_through);
2397
2344
 
2398
2345
  __ JumpIfSmi(rax, if_false);
2399
- __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rbx);
2346
+ __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rbx);
2400
2347
  PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2401
2348
  Split(above_equal, if_true, if_false, fall_through);
2402
2349
 
@@ -2634,7 +2581,7 @@ void FullCodeGenerator::EmitArguments(ZoneList<Expression*>* args) {
2634
2581
  // parameter count in rax.
2635
2582
  VisitForAccumulatorValue(args->at(0));
2636
2583
  __ movq(rdx, rax);
2637
- __ Move(rax, Smi::FromInt(scope()->num_parameters()));
2584
+ __ Move(rax, Smi::FromInt(info_->scope()->num_parameters()));
2638
2585
  ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
2639
2586
  __ CallStub(&stub);
2640
2587
  context()->Plug(rax);
@@ -2646,7 +2593,7 @@ void FullCodeGenerator::EmitArgumentsLength(ZoneList<Expression*>* args) {
2646
2593
 
2647
2594
  Label exit;
2648
2595
  // Get the number of formal parameters.
2649
- __ Move(rax, Smi::FromInt(scope()->num_parameters()));
2596
+ __ Move(rax, Smi::FromInt(info_->scope()->num_parameters()));
2650
2597
 
2651
2598
  // Check if the calling frame is an arguments adaptor frame.
2652
2599
  __ movq(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
@@ -2675,16 +2622,18 @@ void FullCodeGenerator::EmitClassOf(ZoneList<Expression*>* args) {
2675
2622
 
2676
2623
  // Check that the object is a JS object but take special care of JS
2677
2624
  // functions to make sure they have 'Function' as their class.
2678
- __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rax); // Map is now in rax.
2625
+ __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rax);
2626
+ // Map is now in rax.
2679
2627
  __ j(below, &null);
2680
2628
 
2681
- // As long as JS_FUNCTION_TYPE is the last instance type and it is
2682
- // right after LAST_JS_OBJECT_TYPE, we can avoid checking for
2683
- // LAST_JS_OBJECT_TYPE.
2684
- ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
2685
- ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
2686
- __ CmpInstanceType(rax, JS_FUNCTION_TYPE);
2687
- __ j(equal, &function);
2629
+ // As long as LAST_CALLABLE_SPEC_OBJECT_TYPE is the last instance type, and
2630
+ // FIRST_CALLABLE_SPEC_OBJECT_TYPE comes right after
2631
+ // LAST_NONCALLABLE_SPEC_OBJECT_TYPE, we can avoid checking for the latter.
2632
+ STATIC_ASSERT(LAST_TYPE == LAST_CALLABLE_SPEC_OBJECT_TYPE);
2633
+ STATIC_ASSERT(FIRST_CALLABLE_SPEC_OBJECT_TYPE ==
2634
+ LAST_NONCALLABLE_SPEC_OBJECT_TYPE + 1);
2635
+ __ CmpInstanceType(rax, FIRST_CALLABLE_SPEC_OBJECT_TYPE);
2636
+ __ j(above_equal, &function);
2688
2637
 
2689
2638
  // Check if the constructor in the map is a function.
2690
2639
  __ movq(rax, FieldOperand(rax, Map::kConstructorOffset));
@@ -2727,13 +2676,11 @@ void FullCodeGenerator::EmitLog(ZoneList<Expression*>* args) {
2727
2676
  // with '%2s' (see Logger::LogRuntime for all the formats).
2728
2677
  // 2 (array): Arguments to the format string.
2729
2678
  ASSERT_EQ(args->length(), 3);
2730
- #ifdef ENABLE_LOGGING_AND_PROFILING
2731
2679
  if (CodeGenerator::ShouldGenerateLog(args->at(0))) {
2732
2680
  VisitForStackValue(args->at(1));
2733
2681
  VisitForStackValue(args->at(2));
2734
2682
  __ CallRuntime(Runtime::kLog, 2);
2735
2683
  }
2736
- #endif
2737
2684
  // Finally, we're expected to leave a value on the top of the stack.
2738
2685
  __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
2739
2686
  context()->Plug(rax);
@@ -3067,7 +3014,8 @@ void FullCodeGenerator::EmitCallFunction(ZoneList<Expression*>* args) {
3067
3014
  // InvokeFunction requires the function in rdi. Move it in there.
3068
3015
  __ movq(rdi, result_register());
3069
3016
  ParameterCount count(arg_count);
3070
- __ InvokeFunction(rdi, count, CALL_FUNCTION);
3017
+ __ InvokeFunction(rdi, count, CALL_FUNCTION,
3018
+ NullCallWrapper(), CALL_AS_METHOD);
3071
3019
  __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
3072
3020
  context()->Plug(rax);
3073
3021
  }
@@ -3190,7 +3138,7 @@ void FullCodeGenerator::EmitGetFromCache(ZoneList<Expression*>* args) {
3190
3138
 
3191
3139
  Label done, not_found;
3192
3140
  // tmp now holds finger offset as a smi.
3193
- ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3141
+ STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3194
3142
  __ movq(tmp, FieldOperand(cache, JSFunctionResultCache::kFingerOffset));
3195
3143
  SmiIndex index =
3196
3144
  __ SmiToIndex(kScratchRegister, tmp, kPointerSizeLog2);
@@ -3330,9 +3278,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(ZoneList<Expression*>* args) {
3330
3278
  __ j(not_equal, &bailout);
3331
3279
 
3332
3280
  // Check that the array has fast elements.
3333
- __ testb(FieldOperand(scratch, Map::kBitField2Offset),
3334
- Immediate(1 << Map::kHasFastElements));
3335
- __ j(zero, &bailout);
3281
+ __ CheckFastElements(scratch, &bailout);
3336
3282
 
3337
3283
  // Array has fast elements, so its length must be a smi.
3338
3284
  // If the array has length zero, return the empty string.
@@ -3608,7 +3554,7 @@ void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3608
3554
  RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
3609
3555
  Handle<Code> ic =
3610
3556
  ISOLATE->stub_cache()->ComputeCallInitialize(arg_count, in_loop, mode);
3611
- EmitCallIC(ic, mode, expr->id());
3557
+ __ call(ic, mode, expr->id());
3612
3558
  // Restore context register.
3613
3559
  __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
3614
3560
  } else {
@@ -3626,17 +3572,11 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3626
3572
  Variable* var = expr->expression()->AsVariableProxy()->AsVariable();
3627
3573
 
3628
3574
  if (prop != NULL) {
3629
- if (prop->is_synthetic()) {
3630
- // Result of deleting parameters is false, even when they rewrite
3631
- // to accesses on the arguments object.
3632
- context()->Plug(false);
3633
- } else {
3634
- VisitForStackValue(prop->obj());
3635
- VisitForStackValue(prop->key());
3636
- __ Push(Smi::FromInt(strict_mode_flag()));
3637
- __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3638
- context()->Plug(rax);
3639
- }
3575
+ VisitForStackValue(prop->obj());
3576
+ VisitForStackValue(prop->key());
3577
+ __ Push(Smi::FromInt(strict_mode_flag()));
3578
+ __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3579
+ context()->Plug(rax);
3640
3580
  } else if (var != NULL) {
3641
3581
  // Delete of an unqualified identifier is disallowed in strict mode
3642
3582
  // but "delete this" is.
@@ -3711,8 +3651,7 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3711
3651
  Comment cmt(masm_, "[ UnaryOperation (ADD)");
3712
3652
  VisitForAccumulatorValue(expr->expression());
3713
3653
  Label no_conversion;
3714
- Condition is_smi = masm_->CheckSmi(result_register());
3715
- __ j(is_smi, &no_conversion);
3654
+ __ JumpIfSmi(result_register(), &no_conversion);
3716
3655
  ToNumberStub convert_stub;
3717
3656
  __ CallStub(&convert_stub);
3718
3657
  __ bind(&no_conversion);
@@ -3746,7 +3685,7 @@ void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
3746
3685
  // accumulator register rax.
3747
3686
  VisitForAccumulatorValue(expr->expression());
3748
3687
  SetSourcePosition(expr->position());
3749
- EmitCallIC(stub.GetCode(), NULL, expr->id());
3688
+ __ call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
3750
3689
  context()->Plug(rax);
3751
3690
  }
3752
3691
 
@@ -3763,7 +3702,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3763
3702
  }
3764
3703
 
3765
3704
  // Expression can only be a property, a global or a (parameter or local)
3766
- // slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY.
3705
+ // slot.
3767
3706
  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
3768
3707
  LhsKind assign_type = VARIABLE;
3769
3708
  Property* prop = expr->expression()->AsProperty();
@@ -3778,7 +3717,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3778
3717
  if (assign_type == VARIABLE) {
3779
3718
  ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
3780
3719
  AccumulatorValueContext context(this);
3781
- EmitVariableLoad(expr->expression()->AsVariableProxy()->var());
3720
+ EmitVariableLoad(expr->expression()->AsVariableProxy());
3782
3721
  } else {
3783
3722
  // Reserve space for result of postfix operation.
3784
3723
  if (expr->is_postfix() && !context()->IsEffect()) {
@@ -3789,16 +3728,8 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3789
3728
  __ push(rax); // Copy of receiver, needed for later store.
3790
3729
  EmitNamedPropertyLoad(prop);
3791
3730
  } else {
3792
- if (prop->is_arguments_access()) {
3793
- VariableProxy* obj_proxy = prop->obj()->AsVariableProxy();
3794
- MemOperand slot_operand =
3795
- EmitSlotSearch(obj_proxy->var()->AsSlot(), rcx);
3796
- __ push(slot_operand);
3797
- __ Move(rax, prop->key()->AsLiteral()->handle());
3798
- } else {
3799
- VisitForStackValue(prop->obj());
3800
- VisitForAccumulatorValue(prop->key());
3801
- }
3731
+ VisitForStackValue(prop->obj());
3732
+ VisitForAccumulatorValue(prop->key());
3802
3733
  __ movq(rdx, Operand(rsp, 0)); // Leave receiver on stack
3803
3734
  __ push(rax); // Copy of key, needed for later store.
3804
3735
  EmitKeyedPropertyLoad(prop);
@@ -3815,9 +3746,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3815
3746
 
3816
3747
  // Call ToNumber only if operand is not a smi.
3817
3748
  Label no_conversion;
3818
- Condition is_smi;
3819
- is_smi = masm_->CheckSmi(rax);
3820
- __ j(is_smi, &no_conversion, Label::kNear);
3749
+ __ JumpIfSmi(rax, &no_conversion, Label::kNear);
3821
3750
  ToNumberStub convert_stub;
3822
3751
  __ CallStub(&convert_stub);
3823
3752
  __ bind(&no_conversion);
@@ -3877,7 +3806,8 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3877
3806
  __ movq(rdx, rax);
3878
3807
  __ Move(rax, Smi::FromInt(1));
3879
3808
  }
3880
- EmitCallIC(stub.GetCode(), &patch_site, expr->CountId());
3809
+ __ call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountId());
3810
+ patch_site.EmitPatchInfo();
3881
3811
  __ bind(&done);
3882
3812
 
3883
3813
  // Store the value returned in rax.
@@ -3910,7 +3840,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3910
3840
  Handle<Code> ic = is_strict_mode()
3911
3841
  ? isolate()->builtins()->StoreIC_Initialize_Strict()
3912
3842
  : isolate()->builtins()->StoreIC_Initialize();
3913
- EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id());
3843
+ __ call(ic, RelocInfo::CODE_TARGET, expr->id());
3914
3844
  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3915
3845
  if (expr->is_postfix()) {
3916
3846
  if (!context()->IsEffect()) {
@@ -3927,7 +3857,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3927
3857
  Handle<Code> ic = is_strict_mode()
3928
3858
  ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
3929
3859
  : isolate()->builtins()->KeyedStoreIC_Initialize();
3930
- EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id());
3860
+ __ call(ic, RelocInfo::CODE_TARGET, expr->id());
3931
3861
  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3932
3862
  if (expr->is_postfix()) {
3933
3863
  if (!context()->IsEffect()) {
@@ -3954,7 +3884,7 @@ void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
3954
3884
  Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
3955
3885
  // Use a regular load, not a contextual load, to avoid a reference
3956
3886
  // error.
3957
- EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber);
3887
+ __ call(ic);
3958
3888
  PrepareForBailout(expr, TOS_REG);
3959
3889
  context()->Plug(rax);
3960
3890
  } else if (proxy != NULL &&
@@ -3977,30 +3907,18 @@ void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
3977
3907
  context()->Plug(rax);
3978
3908
  } else {
3979
3909
  // This expression cannot throw a reference error at the top level.
3980
- context()->HandleExpression(expr);
3910
+ VisitInCurrentContext(expr);
3981
3911
  }
3982
3912
  }
3983
3913
 
3984
3914
 
3985
- bool FullCodeGenerator::TryLiteralCompare(Token::Value op,
3986
- Expression* left,
3987
- Expression* right,
3988
- Label* if_true,
3989
- Label* if_false,
3990
- Label* fall_through) {
3991
- if (op != Token::EQ && op != Token::EQ_STRICT) return false;
3992
-
3993
- // Check for the pattern: typeof <expression> == <string literal>.
3994
- Literal* right_literal = right->AsLiteral();
3995
- if (right_literal == NULL) return false;
3996
- Handle<Object> right_literal_value = right_literal->handle();
3997
- if (!right_literal_value->IsString()) return false;
3998
- UnaryOperation* left_unary = left->AsUnaryOperation();
3999
- if (left_unary == NULL || left_unary->op() != Token::TYPEOF) return false;
4000
- Handle<String> check = Handle<String>::cast(right_literal_value);
4001
-
3915
+ void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
3916
+ Handle<String> check,
3917
+ Label* if_true,
3918
+ Label* if_false,
3919
+ Label* fall_through) {
4002
3920
  { AccumulatorValueContext context(this);
4003
- VisitForTypeofValue(left_unary->expression());
3921
+ VisitForTypeofValue(expr);
4004
3922
  }
4005
3923
  PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
4006
3924
 
@@ -4022,6 +3940,10 @@ bool FullCodeGenerator::TryLiteralCompare(Token::Value op,
4022
3940
  __ j(equal, if_true);
4023
3941
  __ CompareRoot(rax, Heap::kFalseValueRootIndex);
4024
3942
  Split(equal, if_true, if_false, fall_through);
3943
+ } else if (FLAG_harmony_typeof &&
3944
+ check->Equals(isolate()->heap()->null_symbol())) {
3945
+ __ CompareRoot(rax, Heap::kNullValueRootIndex);
3946
+ Split(equal, if_true, if_false, fall_through);
4025
3947
  } else if (check->Equals(isolate()->heap()->undefined_symbol())) {
4026
3948
  __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
4027
3949
  __ j(equal, if_true);
@@ -4033,16 +3955,19 @@ bool FullCodeGenerator::TryLiteralCompare(Token::Value op,
4033
3955
  Split(not_zero, if_true, if_false, fall_through);
4034
3956
  } else if (check->Equals(isolate()->heap()->function_symbol())) {
4035
3957
  __ JumpIfSmi(rax, if_false);
4036
- __ CmpObjectType(rax, FIRST_FUNCTION_CLASS_TYPE, rdx);
3958
+ STATIC_ASSERT(LAST_CALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE);
3959
+ __ CmpObjectType(rax, FIRST_CALLABLE_SPEC_OBJECT_TYPE, rdx);
4037
3960
  Split(above_equal, if_true, if_false, fall_through);
4038
3961
  } else if (check->Equals(isolate()->heap()->object_symbol())) {
4039
3962
  __ JumpIfSmi(rax, if_false);
4040
- __ CompareRoot(rax, Heap::kNullValueRootIndex);
4041
- __ j(equal, if_true);
4042
- __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rdx);
3963
+ if (!FLAG_harmony_typeof) {
3964
+ __ CompareRoot(rax, Heap::kNullValueRootIndex);
3965
+ __ j(equal, if_true);
3966
+ }
3967
+ __ CmpObjectType(rax, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, rdx);
4043
3968
  __ j(below, if_false);
4044
- __ CmpInstanceType(rdx, FIRST_FUNCTION_CLASS_TYPE);
4045
- __ j(above_equal, if_false);
3969
+ __ CmpInstanceType(rdx, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
3970
+ __ j(above, if_false);
4046
3971
  // Check for undetectable objects => false.
4047
3972
  __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
4048
3973
  Immediate(1 << Map::kIsUndetectable));
@@ -4050,8 +3975,18 @@ bool FullCodeGenerator::TryLiteralCompare(Token::Value op,
4050
3975
  } else {
4051
3976
  if (if_false != fall_through) __ jmp(if_false);
4052
3977
  }
3978
+ }
4053
3979
 
4054
- return true;
3980
+
3981
+ void FullCodeGenerator::EmitLiteralCompareUndefined(Expression* expr,
3982
+ Label* if_true,
3983
+ Label* if_false,
3984
+ Label* fall_through) {
3985
+ VisitForAccumulatorValue(expr);
3986
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
3987
+
3988
+ __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
3989
+ Split(equal, if_true, if_false, fall_through);
4055
3990
  }
4056
3991
 
4057
3992
 
@@ -4070,14 +4005,12 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4070
4005
 
4071
4006
  // First we try a fast inlined version of the compare when one of
4072
4007
  // the operands is a literal.
4073
- Token::Value op = expr->op();
4074
- Expression* left = expr->left();
4075
- Expression* right = expr->right();
4076
- if (TryLiteralCompare(op, left, right, if_true, if_false, fall_through)) {
4008
+ if (TryLiteralCompare(expr, if_true, if_false, fall_through)) {
4077
4009
  context()->Plug(if_true, if_false);
4078
4010
  return;
4079
4011
  }
4080
4012
 
4013
+ Token::Value op = expr->op();
4081
4014
  VisitForStackValue(expr->left());
4082
4015
  switch (op) {
4083
4016
  case Token::IN:
@@ -4102,10 +4035,8 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4102
4035
  default: {
4103
4036
  VisitForAccumulatorValue(expr->right());
4104
4037
  Condition cc = no_condition;
4105
- bool strict = false;
4106
4038
  switch (op) {
4107
4039
  case Token::EQ_STRICT:
4108
- strict = true;
4109
4040
  // Fall through.
4110
4041
  case Token::EQ:
4111
4042
  cc = equal;
@@ -4152,7 +4083,8 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4152
4083
  // Record position and call the compare IC.
4153
4084
  SetSourcePosition(expr->position());
4154
4085
  Handle<Code> ic = CompareIC::GetUninitialized(op);
4155
- EmitCallIC(ic, &patch_site, expr->id());
4086
+ __ call(ic, RelocInfo::CODE_TARGET, expr->id());
4087
+ patch_site.EmitPatchInfo();
4156
4088
 
4157
4089
  PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
4158
4090
  __ testq(rax, rax);
@@ -4184,8 +4116,7 @@ void FullCodeGenerator::VisitCompareToNull(CompareToNull* expr) {
4184
4116
  __ j(equal, if_true);
4185
4117
  __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
4186
4118
  __ j(equal, if_true);
4187
- Condition is_smi = masm_->CheckSmi(rax);
4188
- __ j(is_smi, if_false);
4119
+ __ JumpIfSmi(rax, if_false);
4189
4120
  // It can be an undetectable object.
4190
4121
  __ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset));
4191
4122
  __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
@@ -4212,59 +4143,6 @@ Register FullCodeGenerator::context_register() {
4212
4143
  }
4213
4144
 
4214
4145
 
4215
- void FullCodeGenerator::EmitCallIC(Handle<Code> ic,
4216
- RelocInfo::Mode mode,
4217
- unsigned ast_id) {
4218
- ASSERT(mode == RelocInfo::CODE_TARGET ||
4219
- mode == RelocInfo::CODE_TARGET_CONTEXT);
4220
- Counters* counters = isolate()->counters();
4221
- switch (ic->kind()) {
4222
- case Code::LOAD_IC:
4223
- __ IncrementCounter(counters->named_load_full(), 1);
4224
- break;
4225
- case Code::KEYED_LOAD_IC:
4226
- __ IncrementCounter(counters->keyed_load_full(), 1);
4227
- break;
4228
- case Code::STORE_IC:
4229
- __ IncrementCounter(counters->named_store_full(), 1);
4230
- break;
4231
- case Code::KEYED_STORE_IC:
4232
- __ IncrementCounter(counters->keyed_store_full(), 1);
4233
- default:
4234
- break;
4235
- }
4236
- __ call(ic, mode, ast_id);
4237
- }
4238
-
4239
-
4240
- void FullCodeGenerator::EmitCallIC(Handle<Code> ic,
4241
- JumpPatchSite* patch_site,
4242
- unsigned ast_id) {
4243
- Counters* counters = isolate()->counters();
4244
- switch (ic->kind()) {
4245
- case Code::LOAD_IC:
4246
- __ IncrementCounter(counters->named_load_full(), 1);
4247
- break;
4248
- case Code::KEYED_LOAD_IC:
4249
- __ IncrementCounter(counters->keyed_load_full(), 1);
4250
- break;
4251
- case Code::STORE_IC:
4252
- __ IncrementCounter(counters->named_store_full(), 1);
4253
- break;
4254
- case Code::KEYED_STORE_IC:
4255
- __ IncrementCounter(counters->keyed_store_full(), 1);
4256
- default:
4257
- break;
4258
- }
4259
- __ call(ic, RelocInfo::CODE_TARGET, ast_id);
4260
- if (patch_site != NULL && patch_site->is_bound()) {
4261
- patch_site->EmitPatchInfo();
4262
- } else {
4263
- __ nop(); // Signals no inlined code.
4264
- }
4265
- }
4266
-
4267
-
4268
4146
  void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4269
4147
  ASSERT(IsAligned(frame_offset, kPointerSize));
4270
4148
  __ movq(Operand(rbp, frame_offset), value);
@@ -4276,6 +4154,26 @@ void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4276
4154
  }
4277
4155
 
4278
4156
 
4157
+ void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4158
+ Scope* declaration_scope = scope()->DeclarationScope();
4159
+ if (declaration_scope->is_global_scope()) {
4160
+ // Contexts nested in the global context have a canonical empty function
4161
+ // as their closure, not the anonymous closure containing the global
4162
+ // code. Pass a smi sentinel and let the runtime look up the empty
4163
+ // function.
4164
+ __ Push(Smi::FromInt(0));
4165
+ } else if (declaration_scope->is_eval_scope()) {
4166
+ // Contexts created by a call to eval have the same closure as the
4167
+ // context calling eval, not the anonymous closure containing the eval
4168
+ // code. Fetch it from the context.
4169
+ __ push(ContextOperand(rsi, Context::CLOSURE_INDEX));
4170
+ } else {
4171
+ ASSERT(declaration_scope->is_function_scope());
4172
+ __ push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
4173
+ }
4174
+ }
4175
+
4176
+
4279
4177
  // ----------------------------------------------------------------------------
4280
4178
  // Non-local control flow support.
4281
4179
 
@@ -4284,11 +4182,11 @@ void FullCodeGenerator::EnterFinallyBlock() {
4284
4182
  ASSERT(!result_register().is(rdx));
4285
4183
  ASSERT(!result_register().is(rcx));
4286
4184
  // Cook return address on top of stack (smi encoded Code* delta)
4287
- __ movq(rdx, Operand(rsp, 0));
4185
+ __ pop(rdx);
4288
4186
  __ Move(rcx, masm_->CodeObject());
4289
4187
  __ subq(rdx, rcx);
4290
4188
  __ Integer32ToSmi(rdx, rdx);
4291
- __ movq(Operand(rsp, 0), rdx);
4189
+ __ push(rdx);
4292
4190
  // Store result register while executing finally block.
4293
4191
  __ push(result_register());
4294
4192
  }
@@ -4297,16 +4195,13 @@ void FullCodeGenerator::EnterFinallyBlock() {
4297
4195
  void FullCodeGenerator::ExitFinallyBlock() {
4298
4196
  ASSERT(!result_register().is(rdx));
4299
4197
  ASSERT(!result_register().is(rcx));
4300
- // Restore result register from stack.
4301
4198
  __ pop(result_register());
4302
4199
  // Uncook return address.
4303
- __ movq(rdx, Operand(rsp, 0));
4200
+ __ pop(rdx);
4304
4201
  __ SmiToInteger32(rdx, rdx);
4305
4202
  __ Move(rcx, masm_->CodeObject());
4306
4203
  __ addq(rdx, rcx);
4307
- __ movq(Operand(rsp, 0), rdx);
4308
- // And return.
4309
- __ ret(0);
4204
+ __ jmp(rdx);
4310
4205
  }
4311
4206
 
4312
4207