libv8 3.3.10.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (700) hide show
  1. data/.gitignore +8 -0
  2. data/.gitmodules +3 -0
  3. data/Gemfile +4 -0
  4. data/README.md +44 -0
  5. data/Rakefile +73 -0
  6. data/ext/libv8/extconf.rb +9 -0
  7. data/lib/libv8.rb +15 -0
  8. data/lib/libv8/Makefile +38 -0
  9. data/lib/libv8/detect_cpu.rb +27 -0
  10. data/lib/libv8/fpic-on-linux-amd64.patch +13 -0
  11. data/lib/libv8/scons/CHANGES.txt +5334 -0
  12. data/lib/libv8/scons/LICENSE.txt +20 -0
  13. data/lib/libv8/scons/MANIFEST +199 -0
  14. data/lib/libv8/scons/PKG-INFO +13 -0
  15. data/lib/libv8/scons/README.txt +243 -0
  16. data/lib/libv8/scons/RELEASE.txt +98 -0
  17. data/lib/libv8/scons/engine/SCons/Action.py +1241 -0
  18. data/lib/libv8/scons/engine/SCons/Builder.py +877 -0
  19. data/lib/libv8/scons/engine/SCons/CacheDir.py +216 -0
  20. data/lib/libv8/scons/engine/SCons/Conftest.py +793 -0
  21. data/lib/libv8/scons/engine/SCons/Debug.py +220 -0
  22. data/lib/libv8/scons/engine/SCons/Defaults.py +480 -0
  23. data/lib/libv8/scons/engine/SCons/Environment.py +2318 -0
  24. data/lib/libv8/scons/engine/SCons/Errors.py +205 -0
  25. data/lib/libv8/scons/engine/SCons/Executor.py +633 -0
  26. data/lib/libv8/scons/engine/SCons/Job.py +435 -0
  27. data/lib/libv8/scons/engine/SCons/Memoize.py +244 -0
  28. data/lib/libv8/scons/engine/SCons/Node/Alias.py +152 -0
  29. data/lib/libv8/scons/engine/SCons/Node/FS.py +3142 -0
  30. data/lib/libv8/scons/engine/SCons/Node/Python.py +128 -0
  31. data/lib/libv8/scons/engine/SCons/Node/__init__.py +1328 -0
  32. data/lib/libv8/scons/engine/SCons/Options/BoolOption.py +50 -0
  33. data/lib/libv8/scons/engine/SCons/Options/EnumOption.py +50 -0
  34. data/lib/libv8/scons/engine/SCons/Options/ListOption.py +50 -0
  35. data/lib/libv8/scons/engine/SCons/Options/PackageOption.py +50 -0
  36. data/lib/libv8/scons/engine/SCons/Options/PathOption.py +76 -0
  37. data/lib/libv8/scons/engine/SCons/Options/__init__.py +67 -0
  38. data/lib/libv8/scons/engine/SCons/PathList.py +231 -0
  39. data/lib/libv8/scons/engine/SCons/Platform/__init__.py +241 -0
  40. data/lib/libv8/scons/engine/SCons/Platform/aix.py +69 -0
  41. data/lib/libv8/scons/engine/SCons/Platform/cygwin.py +55 -0
  42. data/lib/libv8/scons/engine/SCons/Platform/darwin.py +46 -0
  43. data/lib/libv8/scons/engine/SCons/Platform/hpux.py +46 -0
  44. data/lib/libv8/scons/engine/SCons/Platform/irix.py +44 -0
  45. data/lib/libv8/scons/engine/SCons/Platform/os2.py +58 -0
  46. data/lib/libv8/scons/engine/SCons/Platform/posix.py +263 -0
  47. data/lib/libv8/scons/engine/SCons/Platform/sunos.py +50 -0
  48. data/lib/libv8/scons/engine/SCons/Platform/win32.py +385 -0
  49. data/lib/libv8/scons/engine/SCons/SConf.py +1030 -0
  50. data/lib/libv8/scons/engine/SCons/SConsign.py +383 -0
  51. data/lib/libv8/scons/engine/SCons/Scanner/C.py +132 -0
  52. data/lib/libv8/scons/engine/SCons/Scanner/D.py +73 -0
  53. data/lib/libv8/scons/engine/SCons/Scanner/Dir.py +109 -0
  54. data/lib/libv8/scons/engine/SCons/Scanner/Fortran.py +316 -0
  55. data/lib/libv8/scons/engine/SCons/Scanner/IDL.py +48 -0
  56. data/lib/libv8/scons/engine/SCons/Scanner/LaTeX.py +384 -0
  57. data/lib/libv8/scons/engine/SCons/Scanner/Prog.py +101 -0
  58. data/lib/libv8/scons/engine/SCons/Scanner/RC.py +55 -0
  59. data/lib/libv8/scons/engine/SCons/Scanner/__init__.py +413 -0
  60. data/lib/libv8/scons/engine/SCons/Script/Interactive.py +384 -0
  61. data/lib/libv8/scons/engine/SCons/Script/Main.py +1334 -0
  62. data/lib/libv8/scons/engine/SCons/Script/SConsOptions.py +939 -0
  63. data/lib/libv8/scons/engine/SCons/Script/SConscript.py +640 -0
  64. data/lib/libv8/scons/engine/SCons/Script/__init__.py +412 -0
  65. data/lib/libv8/scons/engine/SCons/Sig.py +63 -0
  66. data/lib/libv8/scons/engine/SCons/Subst.py +904 -0
  67. data/lib/libv8/scons/engine/SCons/Taskmaster.py +1017 -0
  68. data/lib/libv8/scons/engine/SCons/Tool/386asm.py +61 -0
  69. data/lib/libv8/scons/engine/SCons/Tool/BitKeeper.py +67 -0
  70. data/lib/libv8/scons/engine/SCons/Tool/CVS.py +73 -0
  71. data/lib/libv8/scons/engine/SCons/Tool/FortranCommon.py +246 -0
  72. data/lib/libv8/scons/engine/SCons/Tool/JavaCommon.py +323 -0
  73. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/__init__.py +56 -0
  74. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/arch.py +61 -0
  75. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/common.py +240 -0
  76. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/netframework.py +82 -0
  77. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/sdk.py +391 -0
  78. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/vc.py +456 -0
  79. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/vs.py +499 -0
  80. data/lib/libv8/scons/engine/SCons/Tool/Perforce.py +103 -0
  81. data/lib/libv8/scons/engine/SCons/Tool/PharLapCommon.py +137 -0
  82. data/lib/libv8/scons/engine/SCons/Tool/RCS.py +64 -0
  83. data/lib/libv8/scons/engine/SCons/Tool/SCCS.py +64 -0
  84. data/lib/libv8/scons/engine/SCons/Tool/Subversion.py +71 -0
  85. data/lib/libv8/scons/engine/SCons/Tool/__init__.py +681 -0
  86. data/lib/libv8/scons/engine/SCons/Tool/aixc++.py +82 -0
  87. data/lib/libv8/scons/engine/SCons/Tool/aixcc.py +74 -0
  88. data/lib/libv8/scons/engine/SCons/Tool/aixf77.py +80 -0
  89. data/lib/libv8/scons/engine/SCons/Tool/aixlink.py +76 -0
  90. data/lib/libv8/scons/engine/SCons/Tool/applelink.py +71 -0
  91. data/lib/libv8/scons/engine/SCons/Tool/ar.py +63 -0
  92. data/lib/libv8/scons/engine/SCons/Tool/as.py +78 -0
  93. data/lib/libv8/scons/engine/SCons/Tool/bcc32.py +81 -0
  94. data/lib/libv8/scons/engine/SCons/Tool/c++.py +99 -0
  95. data/lib/libv8/scons/engine/SCons/Tool/cc.py +102 -0
  96. data/lib/libv8/scons/engine/SCons/Tool/cvf.py +58 -0
  97. data/lib/libv8/scons/engine/SCons/Tool/default.py +50 -0
  98. data/lib/libv8/scons/engine/SCons/Tool/dmd.py +223 -0
  99. data/lib/libv8/scons/engine/SCons/Tool/dvi.py +64 -0
  100. data/lib/libv8/scons/engine/SCons/Tool/dvipdf.py +124 -0
  101. data/lib/libv8/scons/engine/SCons/Tool/dvips.py +94 -0
  102. data/lib/libv8/scons/engine/SCons/Tool/f77.py +62 -0
  103. data/lib/libv8/scons/engine/SCons/Tool/f90.py +62 -0
  104. data/lib/libv8/scons/engine/SCons/Tool/f95.py +63 -0
  105. data/lib/libv8/scons/engine/SCons/Tool/filesystem.py +98 -0
  106. data/lib/libv8/scons/engine/SCons/Tool/fortran.py +62 -0
  107. data/lib/libv8/scons/engine/SCons/Tool/g++.py +90 -0
  108. data/lib/libv8/scons/engine/SCons/Tool/g77.py +73 -0
  109. data/lib/libv8/scons/engine/SCons/Tool/gas.py +53 -0
  110. data/lib/libv8/scons/engine/SCons/Tool/gcc.py +80 -0
  111. data/lib/libv8/scons/engine/SCons/Tool/gfortran.py +64 -0
  112. data/lib/libv8/scons/engine/SCons/Tool/gnulink.py +63 -0
  113. data/lib/libv8/scons/engine/SCons/Tool/gs.py +81 -0
  114. data/lib/libv8/scons/engine/SCons/Tool/hpc++.py +84 -0
  115. data/lib/libv8/scons/engine/SCons/Tool/hpcc.py +53 -0
  116. data/lib/libv8/scons/engine/SCons/Tool/hplink.py +77 -0
  117. data/lib/libv8/scons/engine/SCons/Tool/icc.py +59 -0
  118. data/lib/libv8/scons/engine/SCons/Tool/icl.py +52 -0
  119. data/lib/libv8/scons/engine/SCons/Tool/ifl.py +72 -0
  120. data/lib/libv8/scons/engine/SCons/Tool/ifort.py +88 -0
  121. data/lib/libv8/scons/engine/SCons/Tool/ilink.py +59 -0
  122. data/lib/libv8/scons/engine/SCons/Tool/ilink32.py +60 -0
  123. data/lib/libv8/scons/engine/SCons/Tool/install.py +229 -0
  124. data/lib/libv8/scons/engine/SCons/Tool/intelc.py +482 -0
  125. data/lib/libv8/scons/engine/SCons/Tool/ipkg.py +67 -0
  126. data/lib/libv8/scons/engine/SCons/Tool/jar.py +110 -0
  127. data/lib/libv8/scons/engine/SCons/Tool/javac.py +230 -0
  128. data/lib/libv8/scons/engine/SCons/Tool/javah.py +137 -0
  129. data/lib/libv8/scons/engine/SCons/Tool/latex.py +79 -0
  130. data/lib/libv8/scons/engine/SCons/Tool/lex.py +97 -0
  131. data/lib/libv8/scons/engine/SCons/Tool/link.py +121 -0
  132. data/lib/libv8/scons/engine/SCons/Tool/linkloc.py +112 -0
  133. data/lib/libv8/scons/engine/SCons/Tool/m4.py +63 -0
  134. data/lib/libv8/scons/engine/SCons/Tool/masm.py +77 -0
  135. data/lib/libv8/scons/engine/SCons/Tool/midl.py +88 -0
  136. data/lib/libv8/scons/engine/SCons/Tool/mingw.py +158 -0
  137. data/lib/libv8/scons/engine/SCons/Tool/mslib.py +64 -0
  138. data/lib/libv8/scons/engine/SCons/Tool/mslink.py +266 -0
  139. data/lib/libv8/scons/engine/SCons/Tool/mssdk.py +50 -0
  140. data/lib/libv8/scons/engine/SCons/Tool/msvc.py +268 -0
  141. data/lib/libv8/scons/engine/SCons/Tool/msvs.py +1388 -0
  142. data/lib/libv8/scons/engine/SCons/Tool/mwcc.py +207 -0
  143. data/lib/libv8/scons/engine/SCons/Tool/mwld.py +107 -0
  144. data/lib/libv8/scons/engine/SCons/Tool/nasm.py +72 -0
  145. data/lib/libv8/scons/engine/SCons/Tool/packaging/__init__.py +312 -0
  146. data/lib/libv8/scons/engine/SCons/Tool/packaging/ipk.py +185 -0
  147. data/lib/libv8/scons/engine/SCons/Tool/packaging/msi.py +527 -0
  148. data/lib/libv8/scons/engine/SCons/Tool/packaging/rpm.py +365 -0
  149. data/lib/libv8/scons/engine/SCons/Tool/packaging/src_tarbz2.py +43 -0
  150. data/lib/libv8/scons/engine/SCons/Tool/packaging/src_targz.py +43 -0
  151. data/lib/libv8/scons/engine/SCons/Tool/packaging/src_zip.py +43 -0
  152. data/lib/libv8/scons/engine/SCons/Tool/packaging/tarbz2.py +44 -0
  153. data/lib/libv8/scons/engine/SCons/Tool/packaging/targz.py +44 -0
  154. data/lib/libv8/scons/engine/SCons/Tool/packaging/zip.py +44 -0
  155. data/lib/libv8/scons/engine/SCons/Tool/pdf.py +78 -0
  156. data/lib/libv8/scons/engine/SCons/Tool/pdflatex.py +83 -0
  157. data/lib/libv8/scons/engine/SCons/Tool/pdftex.py +108 -0
  158. data/lib/libv8/scons/engine/SCons/Tool/qt.py +336 -0
  159. data/lib/libv8/scons/engine/SCons/Tool/rmic.py +120 -0
  160. data/lib/libv8/scons/engine/SCons/Tool/rpcgen.py +70 -0
  161. data/lib/libv8/scons/engine/SCons/Tool/rpm.py +132 -0
  162. data/lib/libv8/scons/engine/SCons/Tool/sgiar.py +68 -0
  163. data/lib/libv8/scons/engine/SCons/Tool/sgic++.py +58 -0
  164. data/lib/libv8/scons/engine/SCons/Tool/sgicc.py +53 -0
  165. data/lib/libv8/scons/engine/SCons/Tool/sgilink.py +63 -0
  166. data/lib/libv8/scons/engine/SCons/Tool/sunar.py +67 -0
  167. data/lib/libv8/scons/engine/SCons/Tool/sunc++.py +142 -0
  168. data/lib/libv8/scons/engine/SCons/Tool/suncc.py +58 -0
  169. data/lib/libv8/scons/engine/SCons/Tool/sunf77.py +63 -0
  170. data/lib/libv8/scons/engine/SCons/Tool/sunf90.py +64 -0
  171. data/lib/libv8/scons/engine/SCons/Tool/sunf95.py +64 -0
  172. data/lib/libv8/scons/engine/SCons/Tool/sunlink.py +77 -0
  173. data/lib/libv8/scons/engine/SCons/Tool/swig.py +182 -0
  174. data/lib/libv8/scons/engine/SCons/Tool/tar.py +73 -0
  175. data/lib/libv8/scons/engine/SCons/Tool/tex.py +813 -0
  176. data/lib/libv8/scons/engine/SCons/Tool/textfile.py +175 -0
  177. data/lib/libv8/scons/engine/SCons/Tool/tlib.py +53 -0
  178. data/lib/libv8/scons/engine/SCons/Tool/wix.py +99 -0
  179. data/lib/libv8/scons/engine/SCons/Tool/yacc.py +130 -0
  180. data/lib/libv8/scons/engine/SCons/Tool/zip.py +99 -0
  181. data/lib/libv8/scons/engine/SCons/Util.py +1492 -0
  182. data/lib/libv8/scons/engine/SCons/Variables/BoolVariable.py +89 -0
  183. data/lib/libv8/scons/engine/SCons/Variables/EnumVariable.py +103 -0
  184. data/lib/libv8/scons/engine/SCons/Variables/ListVariable.py +135 -0
  185. data/lib/libv8/scons/engine/SCons/Variables/PackageVariable.py +106 -0
  186. data/lib/libv8/scons/engine/SCons/Variables/PathVariable.py +147 -0
  187. data/lib/libv8/scons/engine/SCons/Variables/__init__.py +312 -0
  188. data/lib/libv8/scons/engine/SCons/Warnings.py +246 -0
  189. data/lib/libv8/scons/engine/SCons/__init__.py +49 -0
  190. data/lib/libv8/scons/engine/SCons/compat/__init__.py +237 -0
  191. data/lib/libv8/scons/engine/SCons/compat/_scons_builtins.py +150 -0
  192. data/lib/libv8/scons/engine/SCons/compat/_scons_collections.py +45 -0
  193. data/lib/libv8/scons/engine/SCons/compat/_scons_dbm.py +45 -0
  194. data/lib/libv8/scons/engine/SCons/compat/_scons_hashlib.py +76 -0
  195. data/lib/libv8/scons/engine/SCons/compat/_scons_io.py +45 -0
  196. data/lib/libv8/scons/engine/SCons/compat/_scons_sets.py +563 -0
  197. data/lib/libv8/scons/engine/SCons/compat/_scons_subprocess.py +1281 -0
  198. data/lib/libv8/scons/engine/SCons/cpp.py +589 -0
  199. data/lib/libv8/scons/engine/SCons/dblite.py +251 -0
  200. data/lib/libv8/scons/engine/SCons/exitfuncs.py +77 -0
  201. data/lib/libv8/scons/os_spawnv_fix.diff +83 -0
  202. data/lib/libv8/scons/scons-time.1 +1017 -0
  203. data/lib/libv8/scons/scons.1 +15219 -0
  204. data/lib/libv8/scons/sconsign.1 +208 -0
  205. data/lib/libv8/scons/script/scons +196 -0
  206. data/lib/libv8/scons/script/scons-time +1544 -0
  207. data/lib/libv8/scons/script/scons.bat +31 -0
  208. data/lib/libv8/scons/script/sconsign +513 -0
  209. data/lib/libv8/scons/setup.cfg +6 -0
  210. data/lib/libv8/scons/setup.py +425 -0
  211. data/lib/libv8/v8/.gitignore +35 -0
  212. data/lib/libv8/v8/AUTHORS +44 -0
  213. data/lib/libv8/v8/ChangeLog +2839 -0
  214. data/lib/libv8/v8/LICENSE +52 -0
  215. data/lib/libv8/v8/LICENSE.strongtalk +29 -0
  216. data/lib/libv8/v8/LICENSE.v8 +26 -0
  217. data/lib/libv8/v8/LICENSE.valgrind +45 -0
  218. data/lib/libv8/v8/SConstruct +1478 -0
  219. data/lib/libv8/v8/build/README.txt +49 -0
  220. data/lib/libv8/v8/build/all.gyp +18 -0
  221. data/lib/libv8/v8/build/armu.gypi +32 -0
  222. data/lib/libv8/v8/build/common.gypi +144 -0
  223. data/lib/libv8/v8/build/gyp_v8 +145 -0
  224. data/lib/libv8/v8/include/v8-debug.h +395 -0
  225. data/lib/libv8/v8/include/v8-preparser.h +117 -0
  226. data/lib/libv8/v8/include/v8-profiler.h +505 -0
  227. data/lib/libv8/v8/include/v8-testing.h +104 -0
  228. data/lib/libv8/v8/include/v8.h +4124 -0
  229. data/lib/libv8/v8/include/v8stdint.h +53 -0
  230. data/lib/libv8/v8/preparser/SConscript +38 -0
  231. data/lib/libv8/v8/preparser/preparser-process.cc +379 -0
  232. data/lib/libv8/v8/src/SConscript +368 -0
  233. data/lib/libv8/v8/src/accessors.cc +767 -0
  234. data/lib/libv8/v8/src/accessors.h +123 -0
  235. data/lib/libv8/v8/src/allocation-inl.h +49 -0
  236. data/lib/libv8/v8/src/allocation.cc +122 -0
  237. data/lib/libv8/v8/src/allocation.h +143 -0
  238. data/lib/libv8/v8/src/api.cc +5845 -0
  239. data/lib/libv8/v8/src/api.h +574 -0
  240. data/lib/libv8/v8/src/apinatives.js +110 -0
  241. data/lib/libv8/v8/src/apiutils.h +73 -0
  242. data/lib/libv8/v8/src/arguments.h +118 -0
  243. data/lib/libv8/v8/src/arm/assembler-arm-inl.h +353 -0
  244. data/lib/libv8/v8/src/arm/assembler-arm.cc +2661 -0
  245. data/lib/libv8/v8/src/arm/assembler-arm.h +1375 -0
  246. data/lib/libv8/v8/src/arm/builtins-arm.cc +1658 -0
  247. data/lib/libv8/v8/src/arm/code-stubs-arm.cc +6398 -0
  248. data/lib/libv8/v8/src/arm/code-stubs-arm.h +673 -0
  249. data/lib/libv8/v8/src/arm/codegen-arm.cc +52 -0
  250. data/lib/libv8/v8/src/arm/codegen-arm.h +91 -0
  251. data/lib/libv8/v8/src/arm/constants-arm.cc +152 -0
  252. data/lib/libv8/v8/src/arm/constants-arm.h +775 -0
  253. data/lib/libv8/v8/src/arm/cpu-arm.cc +120 -0
  254. data/lib/libv8/v8/src/arm/debug-arm.cc +317 -0
  255. data/lib/libv8/v8/src/arm/deoptimizer-arm.cc +754 -0
  256. data/lib/libv8/v8/src/arm/disasm-arm.cc +1506 -0
  257. data/lib/libv8/v8/src/arm/frames-arm.cc +45 -0
  258. data/lib/libv8/v8/src/arm/frames-arm.h +168 -0
  259. data/lib/libv8/v8/src/arm/full-codegen-arm.cc +4375 -0
  260. data/lib/libv8/v8/src/arm/ic-arm.cc +1562 -0
  261. data/lib/libv8/v8/src/arm/lithium-arm.cc +2206 -0
  262. data/lib/libv8/v8/src/arm/lithium-arm.h +2348 -0
  263. data/lib/libv8/v8/src/arm/lithium-codegen-arm.cc +4526 -0
  264. data/lib/libv8/v8/src/arm/lithium-codegen-arm.h +403 -0
  265. data/lib/libv8/v8/src/arm/lithium-gap-resolver-arm.cc +305 -0
  266. data/lib/libv8/v8/src/arm/lithium-gap-resolver-arm.h +84 -0
  267. data/lib/libv8/v8/src/arm/macro-assembler-arm.cc +3163 -0
  268. data/lib/libv8/v8/src/arm/macro-assembler-arm.h +1126 -0
  269. data/lib/libv8/v8/src/arm/regexp-macro-assembler-arm.cc +1287 -0
  270. data/lib/libv8/v8/src/arm/regexp-macro-assembler-arm.h +253 -0
  271. data/lib/libv8/v8/src/arm/simulator-arm.cc +3424 -0
  272. data/lib/libv8/v8/src/arm/simulator-arm.h +431 -0
  273. data/lib/libv8/v8/src/arm/stub-cache-arm.cc +4243 -0
  274. data/lib/libv8/v8/src/array.js +1366 -0
  275. data/lib/libv8/v8/src/assembler.cc +1207 -0
  276. data/lib/libv8/v8/src/assembler.h +858 -0
  277. data/lib/libv8/v8/src/ast-inl.h +112 -0
  278. data/lib/libv8/v8/src/ast.cc +1146 -0
  279. data/lib/libv8/v8/src/ast.h +2188 -0
  280. data/lib/libv8/v8/src/atomicops.h +167 -0
  281. data/lib/libv8/v8/src/atomicops_internals_arm_gcc.h +145 -0
  282. data/lib/libv8/v8/src/atomicops_internals_mips_gcc.h +169 -0
  283. data/lib/libv8/v8/src/atomicops_internals_x86_gcc.cc +133 -0
  284. data/lib/libv8/v8/src/atomicops_internals_x86_gcc.h +287 -0
  285. data/lib/libv8/v8/src/atomicops_internals_x86_macosx.h +301 -0
  286. data/lib/libv8/v8/src/atomicops_internals_x86_msvc.h +203 -0
  287. data/lib/libv8/v8/src/bignum-dtoa.cc +655 -0
  288. data/lib/libv8/v8/src/bignum-dtoa.h +81 -0
  289. data/lib/libv8/v8/src/bignum.cc +768 -0
  290. data/lib/libv8/v8/src/bignum.h +140 -0
  291. data/lib/libv8/v8/src/bootstrapper.cc +2184 -0
  292. data/lib/libv8/v8/src/bootstrapper.h +188 -0
  293. data/lib/libv8/v8/src/builtins.cc +1707 -0
  294. data/lib/libv8/v8/src/builtins.h +371 -0
  295. data/lib/libv8/v8/src/bytecodes-irregexp.h +105 -0
  296. data/lib/libv8/v8/src/cached-powers.cc +177 -0
  297. data/lib/libv8/v8/src/cached-powers.h +65 -0
  298. data/lib/libv8/v8/src/char-predicates-inl.h +94 -0
  299. data/lib/libv8/v8/src/char-predicates.h +67 -0
  300. data/lib/libv8/v8/src/checks.cc +110 -0
  301. data/lib/libv8/v8/src/checks.h +296 -0
  302. data/lib/libv8/v8/src/circular-queue-inl.h +53 -0
  303. data/lib/libv8/v8/src/circular-queue.cc +122 -0
  304. data/lib/libv8/v8/src/circular-queue.h +103 -0
  305. data/lib/libv8/v8/src/code-stubs.cc +267 -0
  306. data/lib/libv8/v8/src/code-stubs.h +1011 -0
  307. data/lib/libv8/v8/src/code.h +70 -0
  308. data/lib/libv8/v8/src/codegen.cc +231 -0
  309. data/lib/libv8/v8/src/codegen.h +84 -0
  310. data/lib/libv8/v8/src/compilation-cache.cc +540 -0
  311. data/lib/libv8/v8/src/compilation-cache.h +287 -0
  312. data/lib/libv8/v8/src/compiler.cc +786 -0
  313. data/lib/libv8/v8/src/compiler.h +312 -0
  314. data/lib/libv8/v8/src/contexts.cc +347 -0
  315. data/lib/libv8/v8/src/contexts.h +391 -0
  316. data/lib/libv8/v8/src/conversions-inl.h +106 -0
  317. data/lib/libv8/v8/src/conversions.cc +1131 -0
  318. data/lib/libv8/v8/src/conversions.h +135 -0
  319. data/lib/libv8/v8/src/counters.cc +93 -0
  320. data/lib/libv8/v8/src/counters.h +254 -0
  321. data/lib/libv8/v8/src/cpu-profiler-inl.h +101 -0
  322. data/lib/libv8/v8/src/cpu-profiler.cc +609 -0
  323. data/lib/libv8/v8/src/cpu-profiler.h +302 -0
  324. data/lib/libv8/v8/src/cpu.h +69 -0
  325. data/lib/libv8/v8/src/d8-debug.cc +367 -0
  326. data/lib/libv8/v8/src/d8-debug.h +158 -0
  327. data/lib/libv8/v8/src/d8-posix.cc +695 -0
  328. data/lib/libv8/v8/src/d8-readline.cc +130 -0
  329. data/lib/libv8/v8/src/d8-windows.cc +42 -0
  330. data/lib/libv8/v8/src/d8.cc +803 -0
  331. data/lib/libv8/v8/src/d8.gyp +91 -0
  332. data/lib/libv8/v8/src/d8.h +235 -0
  333. data/lib/libv8/v8/src/d8.js +2798 -0
  334. data/lib/libv8/v8/src/data-flow.cc +66 -0
  335. data/lib/libv8/v8/src/data-flow.h +205 -0
  336. data/lib/libv8/v8/src/date.js +1103 -0
  337. data/lib/libv8/v8/src/dateparser-inl.h +127 -0
  338. data/lib/libv8/v8/src/dateparser.cc +178 -0
  339. data/lib/libv8/v8/src/dateparser.h +266 -0
  340. data/lib/libv8/v8/src/debug-agent.cc +447 -0
  341. data/lib/libv8/v8/src/debug-agent.h +129 -0
  342. data/lib/libv8/v8/src/debug-debugger.js +2569 -0
  343. data/lib/libv8/v8/src/debug.cc +3165 -0
  344. data/lib/libv8/v8/src/debug.h +1057 -0
  345. data/lib/libv8/v8/src/deoptimizer.cc +1256 -0
  346. data/lib/libv8/v8/src/deoptimizer.h +602 -0
  347. data/lib/libv8/v8/src/disasm.h +80 -0
  348. data/lib/libv8/v8/src/disassembler.cc +343 -0
  349. data/lib/libv8/v8/src/disassembler.h +58 -0
  350. data/lib/libv8/v8/src/diy-fp.cc +58 -0
  351. data/lib/libv8/v8/src/diy-fp.h +117 -0
  352. data/lib/libv8/v8/src/double.h +238 -0
  353. data/lib/libv8/v8/src/dtoa.cc +103 -0
  354. data/lib/libv8/v8/src/dtoa.h +85 -0
  355. data/lib/libv8/v8/src/execution.cc +849 -0
  356. data/lib/libv8/v8/src/execution.h +297 -0
  357. data/lib/libv8/v8/src/extensions/experimental/break-iterator.cc +250 -0
  358. data/lib/libv8/v8/src/extensions/experimental/break-iterator.h +89 -0
  359. data/lib/libv8/v8/src/extensions/experimental/collator.cc +218 -0
  360. data/lib/libv8/v8/src/extensions/experimental/collator.h +69 -0
  361. data/lib/libv8/v8/src/extensions/experimental/experimental.gyp +94 -0
  362. data/lib/libv8/v8/src/extensions/experimental/i18n-extension.cc +78 -0
  363. data/lib/libv8/v8/src/extensions/experimental/i18n-extension.h +54 -0
  364. data/lib/libv8/v8/src/extensions/experimental/i18n-locale.cc +112 -0
  365. data/lib/libv8/v8/src/extensions/experimental/i18n-locale.h +60 -0
  366. data/lib/libv8/v8/src/extensions/experimental/i18n-utils.cc +43 -0
  367. data/lib/libv8/v8/src/extensions/experimental/i18n-utils.h +49 -0
  368. data/lib/libv8/v8/src/extensions/experimental/i18n.js +180 -0
  369. data/lib/libv8/v8/src/extensions/experimental/language-matcher.cc +251 -0
  370. data/lib/libv8/v8/src/extensions/experimental/language-matcher.h +95 -0
  371. data/lib/libv8/v8/src/extensions/externalize-string-extension.cc +141 -0
  372. data/lib/libv8/v8/src/extensions/externalize-string-extension.h +50 -0
  373. data/lib/libv8/v8/src/extensions/gc-extension.cc +58 -0
  374. data/lib/libv8/v8/src/extensions/gc-extension.h +49 -0
  375. data/lib/libv8/v8/src/factory.cc +1222 -0
  376. data/lib/libv8/v8/src/factory.h +442 -0
  377. data/lib/libv8/v8/src/fast-dtoa.cc +736 -0
  378. data/lib/libv8/v8/src/fast-dtoa.h +83 -0
  379. data/lib/libv8/v8/src/fixed-dtoa.cc +405 -0
  380. data/lib/libv8/v8/src/fixed-dtoa.h +55 -0
  381. data/lib/libv8/v8/src/flag-definitions.h +560 -0
  382. data/lib/libv8/v8/src/flags.cc +551 -0
  383. data/lib/libv8/v8/src/flags.h +79 -0
  384. data/lib/libv8/v8/src/frames-inl.h +247 -0
  385. data/lib/libv8/v8/src/frames.cc +1243 -0
  386. data/lib/libv8/v8/src/frames.h +870 -0
  387. data/lib/libv8/v8/src/full-codegen.cc +1374 -0
  388. data/lib/libv8/v8/src/full-codegen.h +771 -0
  389. data/lib/libv8/v8/src/func-name-inferrer.cc +92 -0
  390. data/lib/libv8/v8/src/func-name-inferrer.h +111 -0
  391. data/lib/libv8/v8/src/gdb-jit.cc +1555 -0
  392. data/lib/libv8/v8/src/gdb-jit.h +143 -0
  393. data/lib/libv8/v8/src/global-handles.cc +665 -0
  394. data/lib/libv8/v8/src/global-handles.h +284 -0
  395. data/lib/libv8/v8/src/globals.h +325 -0
  396. data/lib/libv8/v8/src/handles-inl.h +177 -0
  397. data/lib/libv8/v8/src/handles.cc +987 -0
  398. data/lib/libv8/v8/src/handles.h +382 -0
  399. data/lib/libv8/v8/src/hashmap.cc +230 -0
  400. data/lib/libv8/v8/src/hashmap.h +123 -0
  401. data/lib/libv8/v8/src/heap-inl.h +704 -0
  402. data/lib/libv8/v8/src/heap-profiler.cc +1173 -0
  403. data/lib/libv8/v8/src/heap-profiler.h +397 -0
  404. data/lib/libv8/v8/src/heap.cc +5930 -0
  405. data/lib/libv8/v8/src/heap.h +2268 -0
  406. data/lib/libv8/v8/src/hydrogen-instructions.cc +1769 -0
  407. data/lib/libv8/v8/src/hydrogen-instructions.h +3971 -0
  408. data/lib/libv8/v8/src/hydrogen.cc +6239 -0
  409. data/lib/libv8/v8/src/hydrogen.h +1202 -0
  410. data/lib/libv8/v8/src/ia32/assembler-ia32-inl.h +446 -0
  411. data/lib/libv8/v8/src/ia32/assembler-ia32.cc +2487 -0
  412. data/lib/libv8/v8/src/ia32/assembler-ia32.h +1144 -0
  413. data/lib/libv8/v8/src/ia32/builtins-ia32.cc +1621 -0
  414. data/lib/libv8/v8/src/ia32/code-stubs-ia32.cc +6198 -0
  415. data/lib/libv8/v8/src/ia32/code-stubs-ia32.h +517 -0
  416. data/lib/libv8/v8/src/ia32/codegen-ia32.cc +265 -0
  417. data/lib/libv8/v8/src/ia32/codegen-ia32.h +79 -0
  418. data/lib/libv8/v8/src/ia32/cpu-ia32.cc +88 -0
  419. data/lib/libv8/v8/src/ia32/debug-ia32.cc +312 -0
  420. data/lib/libv8/v8/src/ia32/deoptimizer-ia32.cc +774 -0
  421. data/lib/libv8/v8/src/ia32/disasm-ia32.cc +1628 -0
  422. data/lib/libv8/v8/src/ia32/frames-ia32.cc +45 -0
  423. data/lib/libv8/v8/src/ia32/frames-ia32.h +142 -0
  424. data/lib/libv8/v8/src/ia32/full-codegen-ia32.cc +4338 -0
  425. data/lib/libv8/v8/src/ia32/ic-ia32.cc +1597 -0
  426. data/lib/libv8/v8/src/ia32/lithium-codegen-ia32.cc +4461 -0
  427. data/lib/libv8/v8/src/ia32/lithium-codegen-ia32.h +375 -0
  428. data/lib/libv8/v8/src/ia32/lithium-gap-resolver-ia32.cc +475 -0
  429. data/lib/libv8/v8/src/ia32/lithium-gap-resolver-ia32.h +110 -0
  430. data/lib/libv8/v8/src/ia32/lithium-ia32.cc +2261 -0
  431. data/lib/libv8/v8/src/ia32/lithium-ia32.h +2396 -0
  432. data/lib/libv8/v8/src/ia32/macro-assembler-ia32.cc +2136 -0
  433. data/lib/libv8/v8/src/ia32/macro-assembler-ia32.h +775 -0
  434. data/lib/libv8/v8/src/ia32/regexp-macro-assembler-ia32.cc +1263 -0
  435. data/lib/libv8/v8/src/ia32/regexp-macro-assembler-ia32.h +216 -0
  436. data/lib/libv8/v8/src/ia32/simulator-ia32.cc +30 -0
  437. data/lib/libv8/v8/src/ia32/simulator-ia32.h +74 -0
  438. data/lib/libv8/v8/src/ia32/stub-cache-ia32.cc +3847 -0
  439. data/lib/libv8/v8/src/ic-inl.h +130 -0
  440. data/lib/libv8/v8/src/ic.cc +2577 -0
  441. data/lib/libv8/v8/src/ic.h +736 -0
  442. data/lib/libv8/v8/src/inspector.cc +63 -0
  443. data/lib/libv8/v8/src/inspector.h +62 -0
  444. data/lib/libv8/v8/src/interpreter-irregexp.cc +659 -0
  445. data/lib/libv8/v8/src/interpreter-irregexp.h +49 -0
  446. data/lib/libv8/v8/src/isolate-inl.h +50 -0
  447. data/lib/libv8/v8/src/isolate.cc +1869 -0
  448. data/lib/libv8/v8/src/isolate.h +1382 -0
  449. data/lib/libv8/v8/src/json-parser.cc +504 -0
  450. data/lib/libv8/v8/src/json-parser.h +161 -0
  451. data/lib/libv8/v8/src/json.js +342 -0
  452. data/lib/libv8/v8/src/jsregexp.cc +5385 -0
  453. data/lib/libv8/v8/src/jsregexp.h +1492 -0
  454. data/lib/libv8/v8/src/list-inl.h +212 -0
  455. data/lib/libv8/v8/src/list.h +174 -0
  456. data/lib/libv8/v8/src/lithium-allocator-inl.h +142 -0
  457. data/lib/libv8/v8/src/lithium-allocator.cc +2123 -0
  458. data/lib/libv8/v8/src/lithium-allocator.h +630 -0
  459. data/lib/libv8/v8/src/lithium.cc +190 -0
  460. data/lib/libv8/v8/src/lithium.h +597 -0
  461. data/lib/libv8/v8/src/liveedit-debugger.js +1082 -0
  462. data/lib/libv8/v8/src/liveedit.cc +1691 -0
  463. data/lib/libv8/v8/src/liveedit.h +180 -0
  464. data/lib/libv8/v8/src/liveobjectlist-inl.h +126 -0
  465. data/lib/libv8/v8/src/liveobjectlist.cc +2589 -0
  466. data/lib/libv8/v8/src/liveobjectlist.h +322 -0
  467. data/lib/libv8/v8/src/log-inl.h +59 -0
  468. data/lib/libv8/v8/src/log-utils.cc +428 -0
  469. data/lib/libv8/v8/src/log-utils.h +231 -0
  470. data/lib/libv8/v8/src/log.cc +1993 -0
  471. data/lib/libv8/v8/src/log.h +476 -0
  472. data/lib/libv8/v8/src/macro-assembler.h +120 -0
  473. data/lib/libv8/v8/src/macros.py +178 -0
  474. data/lib/libv8/v8/src/mark-compact.cc +3143 -0
  475. data/lib/libv8/v8/src/mark-compact.h +506 -0
  476. data/lib/libv8/v8/src/math.js +264 -0
  477. data/lib/libv8/v8/src/messages.cc +179 -0
  478. data/lib/libv8/v8/src/messages.h +113 -0
  479. data/lib/libv8/v8/src/messages.js +1096 -0
  480. data/lib/libv8/v8/src/mips/assembler-mips-inl.h +312 -0
  481. data/lib/libv8/v8/src/mips/assembler-mips.cc +1960 -0
  482. data/lib/libv8/v8/src/mips/assembler-mips.h +1138 -0
  483. data/lib/libv8/v8/src/mips/builtins-mips.cc +1628 -0
  484. data/lib/libv8/v8/src/mips/code-stubs-mips.cc +6656 -0
  485. data/lib/libv8/v8/src/mips/code-stubs-mips.h +682 -0
  486. data/lib/libv8/v8/src/mips/codegen-mips.cc +52 -0
  487. data/lib/libv8/v8/src/mips/codegen-mips.h +98 -0
  488. data/lib/libv8/v8/src/mips/constants-mips.cc +352 -0
  489. data/lib/libv8/v8/src/mips/constants-mips.h +739 -0
  490. data/lib/libv8/v8/src/mips/cpu-mips.cc +96 -0
  491. data/lib/libv8/v8/src/mips/debug-mips.cc +308 -0
  492. data/lib/libv8/v8/src/mips/deoptimizer-mips.cc +91 -0
  493. data/lib/libv8/v8/src/mips/disasm-mips.cc +1050 -0
  494. data/lib/libv8/v8/src/mips/frames-mips.cc +47 -0
  495. data/lib/libv8/v8/src/mips/frames-mips.h +219 -0
  496. data/lib/libv8/v8/src/mips/full-codegen-mips.cc +4388 -0
  497. data/lib/libv8/v8/src/mips/ic-mips.cc +1580 -0
  498. data/lib/libv8/v8/src/mips/lithium-codegen-mips.h +65 -0
  499. data/lib/libv8/v8/src/mips/lithium-mips.h +307 -0
  500. data/lib/libv8/v8/src/mips/macro-assembler-mips.cc +4056 -0
  501. data/lib/libv8/v8/src/mips/macro-assembler-mips.h +1214 -0
  502. data/lib/libv8/v8/src/mips/regexp-macro-assembler-mips.cc +1251 -0
  503. data/lib/libv8/v8/src/mips/regexp-macro-assembler-mips.h +252 -0
  504. data/lib/libv8/v8/src/mips/simulator-mips.cc +2621 -0
  505. data/lib/libv8/v8/src/mips/simulator-mips.h +401 -0
  506. data/lib/libv8/v8/src/mips/stub-cache-mips.cc +4285 -0
  507. data/lib/libv8/v8/src/mirror-debugger.js +2382 -0
  508. data/lib/libv8/v8/src/mksnapshot.cc +328 -0
  509. data/lib/libv8/v8/src/natives.h +64 -0
  510. data/lib/libv8/v8/src/objects-debug.cc +738 -0
  511. data/lib/libv8/v8/src/objects-inl.h +4323 -0
  512. data/lib/libv8/v8/src/objects-printer.cc +829 -0
  513. data/lib/libv8/v8/src/objects-visiting.cc +148 -0
  514. data/lib/libv8/v8/src/objects-visiting.h +424 -0
  515. data/lib/libv8/v8/src/objects.cc +10585 -0
  516. data/lib/libv8/v8/src/objects.h +6838 -0
  517. data/lib/libv8/v8/src/parser.cc +4997 -0
  518. data/lib/libv8/v8/src/parser.h +765 -0
  519. data/lib/libv8/v8/src/platform-cygwin.cc +779 -0
  520. data/lib/libv8/v8/src/platform-freebsd.cc +826 -0
  521. data/lib/libv8/v8/src/platform-linux.cc +1149 -0
  522. data/lib/libv8/v8/src/platform-macos.cc +830 -0
  523. data/lib/libv8/v8/src/platform-nullos.cc +479 -0
  524. data/lib/libv8/v8/src/platform-openbsd.cc +640 -0
  525. data/lib/libv8/v8/src/platform-posix.cc +424 -0
  526. data/lib/libv8/v8/src/platform-solaris.cc +762 -0
  527. data/lib/libv8/v8/src/platform-tls-mac.h +62 -0
  528. data/lib/libv8/v8/src/platform-tls-win32.h +62 -0
  529. data/lib/libv8/v8/src/platform-tls.h +50 -0
  530. data/lib/libv8/v8/src/platform-win32.cc +2021 -0
  531. data/lib/libv8/v8/src/platform.h +667 -0
  532. data/lib/libv8/v8/src/preparse-data-format.h +62 -0
  533. data/lib/libv8/v8/src/preparse-data.cc +183 -0
  534. data/lib/libv8/v8/src/preparse-data.h +225 -0
  535. data/lib/libv8/v8/src/preparser-api.cc +220 -0
  536. data/lib/libv8/v8/src/preparser.cc +1450 -0
  537. data/lib/libv8/v8/src/preparser.h +493 -0
  538. data/lib/libv8/v8/src/prettyprinter.cc +1493 -0
  539. data/lib/libv8/v8/src/prettyprinter.h +223 -0
  540. data/lib/libv8/v8/src/profile-generator-inl.h +128 -0
  541. data/lib/libv8/v8/src/profile-generator.cc +3098 -0
  542. data/lib/libv8/v8/src/profile-generator.h +1126 -0
  543. data/lib/libv8/v8/src/property.cc +105 -0
  544. data/lib/libv8/v8/src/property.h +365 -0
  545. data/lib/libv8/v8/src/proxy.js +83 -0
  546. data/lib/libv8/v8/src/regexp-macro-assembler-irregexp-inl.h +78 -0
  547. data/lib/libv8/v8/src/regexp-macro-assembler-irregexp.cc +471 -0
  548. data/lib/libv8/v8/src/regexp-macro-assembler-irregexp.h +142 -0
  549. data/lib/libv8/v8/src/regexp-macro-assembler-tracer.cc +373 -0
  550. data/lib/libv8/v8/src/regexp-macro-assembler-tracer.h +104 -0
  551. data/lib/libv8/v8/src/regexp-macro-assembler.cc +267 -0
  552. data/lib/libv8/v8/src/regexp-macro-assembler.h +243 -0
  553. data/lib/libv8/v8/src/regexp-stack.cc +111 -0
  554. data/lib/libv8/v8/src/regexp-stack.h +147 -0
  555. data/lib/libv8/v8/src/regexp.js +483 -0
  556. data/lib/libv8/v8/src/rewriter.cc +360 -0
  557. data/lib/libv8/v8/src/rewriter.h +50 -0
  558. data/lib/libv8/v8/src/runtime-profiler.cc +489 -0
  559. data/lib/libv8/v8/src/runtime-profiler.h +201 -0
  560. data/lib/libv8/v8/src/runtime.cc +12227 -0
  561. data/lib/libv8/v8/src/runtime.h +652 -0
  562. data/lib/libv8/v8/src/runtime.js +649 -0
  563. data/lib/libv8/v8/src/safepoint-table.cc +256 -0
  564. data/lib/libv8/v8/src/safepoint-table.h +270 -0
  565. data/lib/libv8/v8/src/scanner-base.cc +952 -0
  566. data/lib/libv8/v8/src/scanner-base.h +670 -0
  567. data/lib/libv8/v8/src/scanner.cc +345 -0
  568. data/lib/libv8/v8/src/scanner.h +146 -0
  569. data/lib/libv8/v8/src/scopeinfo.cc +646 -0
  570. data/lib/libv8/v8/src/scopeinfo.h +254 -0
  571. data/lib/libv8/v8/src/scopes.cc +1150 -0
  572. data/lib/libv8/v8/src/scopes.h +507 -0
  573. data/lib/libv8/v8/src/serialize.cc +1574 -0
  574. data/lib/libv8/v8/src/serialize.h +589 -0
  575. data/lib/libv8/v8/src/shell.h +55 -0
  576. data/lib/libv8/v8/src/simulator.h +43 -0
  577. data/lib/libv8/v8/src/small-pointer-list.h +163 -0
  578. data/lib/libv8/v8/src/smart-pointer.h +109 -0
  579. data/lib/libv8/v8/src/snapshot-common.cc +83 -0
  580. data/lib/libv8/v8/src/snapshot-empty.cc +54 -0
  581. data/lib/libv8/v8/src/snapshot.h +91 -0
  582. data/lib/libv8/v8/src/spaces-inl.h +529 -0
  583. data/lib/libv8/v8/src/spaces.cc +3145 -0
  584. data/lib/libv8/v8/src/spaces.h +2369 -0
  585. data/lib/libv8/v8/src/splay-tree-inl.h +310 -0
  586. data/lib/libv8/v8/src/splay-tree.h +205 -0
  587. data/lib/libv8/v8/src/string-search.cc +41 -0
  588. data/lib/libv8/v8/src/string-search.h +568 -0
  589. data/lib/libv8/v8/src/string-stream.cc +592 -0
  590. data/lib/libv8/v8/src/string-stream.h +191 -0
  591. data/lib/libv8/v8/src/string.js +994 -0
  592. data/lib/libv8/v8/src/strtod.cc +440 -0
  593. data/lib/libv8/v8/src/strtod.h +40 -0
  594. data/lib/libv8/v8/src/stub-cache.cc +1965 -0
  595. data/lib/libv8/v8/src/stub-cache.h +924 -0
  596. data/lib/libv8/v8/src/third_party/valgrind/valgrind.h +3925 -0
  597. data/lib/libv8/v8/src/token.cc +63 -0
  598. data/lib/libv8/v8/src/token.h +288 -0
  599. data/lib/libv8/v8/src/type-info.cc +507 -0
  600. data/lib/libv8/v8/src/type-info.h +272 -0
  601. data/lib/libv8/v8/src/unbound-queue-inl.h +95 -0
  602. data/lib/libv8/v8/src/unbound-queue.h +69 -0
  603. data/lib/libv8/v8/src/unicode-inl.h +238 -0
  604. data/lib/libv8/v8/src/unicode.cc +1624 -0
  605. data/lib/libv8/v8/src/unicode.h +280 -0
  606. data/lib/libv8/v8/src/uri.js +408 -0
  607. data/lib/libv8/v8/src/utils-inl.h +48 -0
  608. data/lib/libv8/v8/src/utils.cc +371 -0
  609. data/lib/libv8/v8/src/utils.h +800 -0
  610. data/lib/libv8/v8/src/v8-counters.cc +62 -0
  611. data/lib/libv8/v8/src/v8-counters.h +314 -0
  612. data/lib/libv8/v8/src/v8.cc +213 -0
  613. data/lib/libv8/v8/src/v8.h +131 -0
  614. data/lib/libv8/v8/src/v8checks.h +64 -0
  615. data/lib/libv8/v8/src/v8dll-main.cc +44 -0
  616. data/lib/libv8/v8/src/v8globals.h +512 -0
  617. data/lib/libv8/v8/src/v8memory.h +82 -0
  618. data/lib/libv8/v8/src/v8natives.js +1310 -0
  619. data/lib/libv8/v8/src/v8preparserdll-main.cc +39 -0
  620. data/lib/libv8/v8/src/v8threads.cc +464 -0
  621. data/lib/libv8/v8/src/v8threads.h +165 -0
  622. data/lib/libv8/v8/src/v8utils.h +319 -0
  623. data/lib/libv8/v8/src/variables.cc +114 -0
  624. data/lib/libv8/v8/src/variables.h +167 -0
  625. data/lib/libv8/v8/src/version.cc +116 -0
  626. data/lib/libv8/v8/src/version.h +68 -0
  627. data/lib/libv8/v8/src/vm-state-inl.h +138 -0
  628. data/lib/libv8/v8/src/vm-state.h +71 -0
  629. data/lib/libv8/v8/src/win32-headers.h +96 -0
  630. data/lib/libv8/v8/src/x64/assembler-x64-inl.h +462 -0
  631. data/lib/libv8/v8/src/x64/assembler-x64.cc +3027 -0
  632. data/lib/libv8/v8/src/x64/assembler-x64.h +1633 -0
  633. data/lib/libv8/v8/src/x64/builtins-x64.cc +1520 -0
  634. data/lib/libv8/v8/src/x64/code-stubs-x64.cc +5132 -0
  635. data/lib/libv8/v8/src/x64/code-stubs-x64.h +514 -0
  636. data/lib/libv8/v8/src/x64/codegen-x64.cc +146 -0
  637. data/lib/libv8/v8/src/x64/codegen-x64.h +76 -0
  638. data/lib/libv8/v8/src/x64/cpu-x64.cc +88 -0
  639. data/lib/libv8/v8/src/x64/debug-x64.cc +319 -0
  640. data/lib/libv8/v8/src/x64/deoptimizer-x64.cc +815 -0
  641. data/lib/libv8/v8/src/x64/disasm-x64.cc +1832 -0
  642. data/lib/libv8/v8/src/x64/frames-x64.cc +45 -0
  643. data/lib/libv8/v8/src/x64/frames-x64.h +130 -0
  644. data/lib/libv8/v8/src/x64/full-codegen-x64.cc +4318 -0
  645. data/lib/libv8/v8/src/x64/ic-x64.cc +1608 -0
  646. data/lib/libv8/v8/src/x64/lithium-codegen-x64.cc +4267 -0
  647. data/lib/libv8/v8/src/x64/lithium-codegen-x64.h +367 -0
  648. data/lib/libv8/v8/src/x64/lithium-gap-resolver-x64.cc +320 -0
  649. data/lib/libv8/v8/src/x64/lithium-gap-resolver-x64.h +74 -0
  650. data/lib/libv8/v8/src/x64/lithium-x64.cc +2202 -0
  651. data/lib/libv8/v8/src/x64/lithium-x64.h +2333 -0
  652. data/lib/libv8/v8/src/x64/macro-assembler-x64.cc +3745 -0
  653. data/lib/libv8/v8/src/x64/macro-assembler-x64.h +1290 -0
  654. data/lib/libv8/v8/src/x64/regexp-macro-assembler-x64.cc +1398 -0
  655. data/lib/libv8/v8/src/x64/regexp-macro-assembler-x64.h +282 -0
  656. data/lib/libv8/v8/src/x64/simulator-x64.cc +27 -0
  657. data/lib/libv8/v8/src/x64/simulator-x64.h +72 -0
  658. data/lib/libv8/v8/src/x64/stub-cache-x64.cc +3610 -0
  659. data/lib/libv8/v8/src/zone-inl.h +140 -0
  660. data/lib/libv8/v8/src/zone.cc +196 -0
  661. data/lib/libv8/v8/src/zone.h +240 -0
  662. data/lib/libv8/v8/tools/codemap.js +265 -0
  663. data/lib/libv8/v8/tools/consarray.js +93 -0
  664. data/lib/libv8/v8/tools/csvparser.js +78 -0
  665. data/lib/libv8/v8/tools/disasm.py +92 -0
  666. data/lib/libv8/v8/tools/freebsd-tick-processor +10 -0
  667. data/lib/libv8/v8/tools/gc-nvp-trace-processor.py +342 -0
  668. data/lib/libv8/v8/tools/gcmole/README +62 -0
  669. data/lib/libv8/v8/tools/gcmole/gccause.lua +60 -0
  670. data/lib/libv8/v8/tools/gcmole/gcmole.cc +1261 -0
  671. data/lib/libv8/v8/tools/gcmole/gcmole.lua +378 -0
  672. data/lib/libv8/v8/tools/generate-ten-powers.scm +286 -0
  673. data/lib/libv8/v8/tools/grokdump.py +841 -0
  674. data/lib/libv8/v8/tools/gyp/v8.gyp +995 -0
  675. data/lib/libv8/v8/tools/js2c.py +364 -0
  676. data/lib/libv8/v8/tools/jsmin.py +280 -0
  677. data/lib/libv8/v8/tools/linux-tick-processor +35 -0
  678. data/lib/libv8/v8/tools/ll_prof.py +942 -0
  679. data/lib/libv8/v8/tools/logreader.js +185 -0
  680. data/lib/libv8/v8/tools/mac-nm +18 -0
  681. data/lib/libv8/v8/tools/mac-tick-processor +6 -0
  682. data/lib/libv8/v8/tools/oom_dump/README +31 -0
  683. data/lib/libv8/v8/tools/oom_dump/SConstruct +42 -0
  684. data/lib/libv8/v8/tools/oom_dump/oom_dump.cc +288 -0
  685. data/lib/libv8/v8/tools/presubmit.py +305 -0
  686. data/lib/libv8/v8/tools/process-heap-prof.py +120 -0
  687. data/lib/libv8/v8/tools/profile.js +751 -0
  688. data/lib/libv8/v8/tools/profile_view.js +219 -0
  689. data/lib/libv8/v8/tools/run-valgrind.py +77 -0
  690. data/lib/libv8/v8/tools/splaytree.js +316 -0
  691. data/lib/libv8/v8/tools/stats-viewer.py +468 -0
  692. data/lib/libv8/v8/tools/test.py +1510 -0
  693. data/lib/libv8/v8/tools/tickprocessor-driver.js +59 -0
  694. data/lib/libv8/v8/tools/tickprocessor.js +877 -0
  695. data/lib/libv8/v8/tools/utils.py +96 -0
  696. data/lib/libv8/v8/tools/visual_studio/README.txt +12 -0
  697. data/lib/libv8/v8/tools/windows-tick-processor.bat +30 -0
  698. data/lib/libv8/version.rb +4 -0
  699. data/libv8.gemspec +31 -0
  700. metadata +800 -0
@@ -0,0 +1,2136 @@
1
+ // Copyright 2011 the V8 project authors. All rights reserved.
2
+ // Redistribution and use in source and binary forms, with or without
3
+ // modification, are permitted provided that the following conditions are
4
+ // met:
5
+ //
6
+ // * Redistributions of source code must retain the above copyright
7
+ // notice, this list of conditions and the following disclaimer.
8
+ // * Redistributions in binary form must reproduce the above
9
+ // copyright notice, this list of conditions and the following
10
+ // disclaimer in the documentation and/or other materials provided
11
+ // with the distribution.
12
+ // * Neither the name of Google Inc. nor the names of its
13
+ // contributors may be used to endorse or promote products derived
14
+ // from this software without specific prior written permission.
15
+ //
16
+ // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
+ // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
+ // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
+ // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
+ // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
+ // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
+ // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
+ // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
+ // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
+ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
+ // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
+
28
+ #include "v8.h"
29
+
30
+ #if defined(V8_TARGET_ARCH_IA32)
31
+
32
+ #include "bootstrapper.h"
33
+ #include "codegen.h"
34
+ #include "debug.h"
35
+ #include "runtime.h"
36
+ #include "serialize.h"
37
+
38
+ namespace v8 {
39
+ namespace internal {
40
+
41
+ // -------------------------------------------------------------------------
42
+ // MacroAssembler implementation.
43
+
44
+ MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
45
+ : Assembler(arg_isolate, buffer, size),
46
+ generating_stub_(false),
47
+ allow_stub_calls_(true) {
48
+ if (isolate() != NULL) {
49
+ code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
50
+ isolate());
51
+ }
52
+ }
53
+
54
+
55
+ void MacroAssembler::RecordWriteHelper(Register object,
56
+ Register addr,
57
+ Register scratch) {
58
+ if (emit_debug_code()) {
59
+ // Check that the object is not in new space.
60
+ Label not_in_new_space;
61
+ InNewSpace(object, scratch, not_equal, &not_in_new_space);
62
+ Abort("new-space object passed to RecordWriteHelper");
63
+ bind(&not_in_new_space);
64
+ }
65
+
66
+ // Compute the page start address from the heap object pointer, and reuse
67
+ // the 'object' register for it.
68
+ and_(object, ~Page::kPageAlignmentMask);
69
+
70
+ // Compute number of region covering addr. See Page::GetRegionNumberForAddress
71
+ // method for more details.
72
+ and_(addr, Page::kPageAlignmentMask);
73
+ shr(addr, Page::kRegionSizeLog2);
74
+
75
+ // Set dirty mark for region.
76
+ // Bit tests with a memory operand should be avoided on Intel processors,
77
+ // as they usually have long latency and multiple uops. We load the bit base
78
+ // operand to a register at first and store it back after bit set.
79
+ mov(scratch, Operand(object, Page::kDirtyFlagOffset));
80
+ bts(Operand(scratch), addr);
81
+ mov(Operand(object, Page::kDirtyFlagOffset), scratch);
82
+ }
83
+
84
+
85
+ void MacroAssembler::ClampDoubleToUint8(XMMRegister input_reg,
86
+ XMMRegister scratch_reg,
87
+ Register result_reg) {
88
+ Label done;
89
+ ExternalReference zero_ref = ExternalReference::address_of_zero();
90
+ movdbl(scratch_reg, Operand::StaticVariable(zero_ref));
91
+ Set(result_reg, Immediate(0));
92
+ ucomisd(input_reg, scratch_reg);
93
+ j(below, &done, Label::kNear);
94
+ ExternalReference half_ref = ExternalReference::address_of_one_half();
95
+ movdbl(scratch_reg, Operand::StaticVariable(half_ref));
96
+ addsd(scratch_reg, input_reg);
97
+ cvttsd2si(result_reg, Operand(scratch_reg));
98
+ test(result_reg, Immediate(0xFFFFFF00));
99
+ j(zero, &done, Label::kNear);
100
+ Set(result_reg, Immediate(255));
101
+ bind(&done);
102
+ }
103
+
104
+
105
+ void MacroAssembler::ClampUint8(Register reg) {
106
+ Label done;
107
+ test(reg, Immediate(0xFFFFFF00));
108
+ j(zero, &done, Label::kNear);
109
+ setcc(negative, reg); // 1 if negative, 0 if positive.
110
+ dec_b(reg); // 0 if negative, 255 if positive.
111
+ bind(&done);
112
+ }
113
+
114
+
115
+ void MacroAssembler::InNewSpace(Register object,
116
+ Register scratch,
117
+ Condition cc,
118
+ Label* branch,
119
+ Label::Distance branch_near) {
120
+ ASSERT(cc == equal || cc == not_equal);
121
+ if (Serializer::enabled()) {
122
+ // Can't do arithmetic on external references if it might get serialized.
123
+ mov(scratch, Operand(object));
124
+ // The mask isn't really an address. We load it as an external reference in
125
+ // case the size of the new space is different between the snapshot maker
126
+ // and the running system.
127
+ and_(Operand(scratch),
128
+ Immediate(ExternalReference::new_space_mask(isolate())));
129
+ cmp(Operand(scratch),
130
+ Immediate(ExternalReference::new_space_start(isolate())));
131
+ j(cc, branch, branch_near);
132
+ } else {
133
+ int32_t new_space_start = reinterpret_cast<int32_t>(
134
+ ExternalReference::new_space_start(isolate()).address());
135
+ lea(scratch, Operand(object, -new_space_start));
136
+ and_(scratch, isolate()->heap()->NewSpaceMask());
137
+ j(cc, branch, branch_near);
138
+ }
139
+ }
140
+
141
+
142
+ void MacroAssembler::RecordWrite(Register object,
143
+ int offset,
144
+ Register value,
145
+ Register scratch) {
146
+ // First, check if a write barrier is even needed. The tests below
147
+ // catch stores of Smis and stores into young gen.
148
+ Label done;
149
+
150
+ // Skip barrier if writing a smi.
151
+ ASSERT_EQ(0, kSmiTag);
152
+ test(value, Immediate(kSmiTagMask));
153
+ j(zero, &done, Label::kNear);
154
+
155
+ InNewSpace(object, value, equal, &done, Label::kNear);
156
+
157
+ // The offset is relative to a tagged or untagged HeapObject pointer,
158
+ // so either offset or offset + kHeapObjectTag must be a
159
+ // multiple of kPointerSize.
160
+ ASSERT(IsAligned(offset, kPointerSize) ||
161
+ IsAligned(offset + kHeapObjectTag, kPointerSize));
162
+
163
+ Register dst = scratch;
164
+ if (offset != 0) {
165
+ lea(dst, Operand(object, offset));
166
+ } else {
167
+ // Array access: calculate the destination address in the same manner as
168
+ // KeyedStoreIC::GenerateGeneric. Multiply a smi by 2 to get an offset
169
+ // into an array of words.
170
+ ASSERT_EQ(1, kSmiTagSize);
171
+ ASSERT_EQ(0, kSmiTag);
172
+ lea(dst, Operand(object, dst, times_half_pointer_size,
173
+ FixedArray::kHeaderSize - kHeapObjectTag));
174
+ }
175
+ RecordWriteHelper(object, dst, value);
176
+
177
+ bind(&done);
178
+
179
+ // Clobber all input registers when running with the debug-code flag
180
+ // turned on to provoke errors.
181
+ if (emit_debug_code()) {
182
+ mov(object, Immediate(BitCast<int32_t>(kZapValue)));
183
+ mov(value, Immediate(BitCast<int32_t>(kZapValue)));
184
+ mov(scratch, Immediate(BitCast<int32_t>(kZapValue)));
185
+ }
186
+ }
187
+
188
+
189
+ void MacroAssembler::RecordWrite(Register object,
190
+ Register address,
191
+ Register value) {
192
+ // First, check if a write barrier is even needed. The tests below
193
+ // catch stores of Smis and stores into young gen.
194
+ Label done;
195
+
196
+ // Skip barrier if writing a smi.
197
+ ASSERT_EQ(0, kSmiTag);
198
+ test(value, Immediate(kSmiTagMask));
199
+ j(zero, &done);
200
+
201
+ InNewSpace(object, value, equal, &done);
202
+
203
+ RecordWriteHelper(object, address, value);
204
+
205
+ bind(&done);
206
+
207
+ // Clobber all input registers when running with the debug-code flag
208
+ // turned on to provoke errors.
209
+ if (emit_debug_code()) {
210
+ mov(object, Immediate(BitCast<int32_t>(kZapValue)));
211
+ mov(address, Immediate(BitCast<int32_t>(kZapValue)));
212
+ mov(value, Immediate(BitCast<int32_t>(kZapValue)));
213
+ }
214
+ }
215
+
216
+
217
+ #ifdef ENABLE_DEBUGGER_SUPPORT
218
+ void MacroAssembler::DebugBreak() {
219
+ Set(eax, Immediate(0));
220
+ mov(ebx, Immediate(ExternalReference(Runtime::kDebugBreak, isolate())));
221
+ CEntryStub ces(1);
222
+ call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
223
+ }
224
+ #endif
225
+
226
+
227
+ void MacroAssembler::Set(Register dst, const Immediate& x) {
228
+ if (x.is_zero()) {
229
+ xor_(dst, Operand(dst)); // Shorter than mov.
230
+ } else {
231
+ mov(dst, x);
232
+ }
233
+ }
234
+
235
+
236
+ void MacroAssembler::Set(const Operand& dst, const Immediate& x) {
237
+ mov(dst, x);
238
+ }
239
+
240
+
241
+ bool MacroAssembler::IsUnsafeImmediate(const Immediate& x) {
242
+ static const int kMaxImmediateBits = 17;
243
+ if (x.rmode_ != RelocInfo::NONE) return false;
244
+ return !is_intn(x.x_, kMaxImmediateBits);
245
+ }
246
+
247
+
248
+ void MacroAssembler::SafeSet(Register dst, const Immediate& x) {
249
+ if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
250
+ Set(dst, Immediate(x.x_ ^ jit_cookie()));
251
+ xor_(dst, jit_cookie());
252
+ } else {
253
+ Set(dst, x);
254
+ }
255
+ }
256
+
257
+
258
+ void MacroAssembler::SafePush(const Immediate& x) {
259
+ if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
260
+ push(Immediate(x.x_ ^ jit_cookie()));
261
+ xor_(Operand(esp, 0), Immediate(jit_cookie()));
262
+ } else {
263
+ push(x);
264
+ }
265
+ }
266
+
267
+
268
+ void MacroAssembler::CmpObjectType(Register heap_object,
269
+ InstanceType type,
270
+ Register map) {
271
+ mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
272
+ CmpInstanceType(map, type);
273
+ }
274
+
275
+
276
+ void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
277
+ cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
278
+ static_cast<int8_t>(type));
279
+ }
280
+
281
+
282
+ void MacroAssembler::CheckMap(Register obj,
283
+ Handle<Map> map,
284
+ Label* fail,
285
+ SmiCheckType smi_check_type) {
286
+ if (smi_check_type == DO_SMI_CHECK) {
287
+ JumpIfSmi(obj, fail);
288
+ }
289
+ cmp(FieldOperand(obj, HeapObject::kMapOffset), Immediate(map));
290
+ j(not_equal, fail);
291
+ }
292
+
293
+
294
+ void MacroAssembler::DispatchMap(Register obj,
295
+ Handle<Map> map,
296
+ Handle<Code> success,
297
+ SmiCheckType smi_check_type) {
298
+ Label fail;
299
+ if (smi_check_type == DO_SMI_CHECK) {
300
+ JumpIfSmi(obj, &fail);
301
+ }
302
+ cmp(FieldOperand(obj, HeapObject::kMapOffset), Immediate(map));
303
+ j(equal, success);
304
+
305
+ bind(&fail);
306
+ }
307
+
308
+
309
+ Condition MacroAssembler::IsObjectStringType(Register heap_object,
310
+ Register map,
311
+ Register instance_type) {
312
+ mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
313
+ movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
314
+ ASSERT(kNotStringTag != 0);
315
+ test(instance_type, Immediate(kIsNotStringMask));
316
+ return zero;
317
+ }
318
+
319
+
320
+ void MacroAssembler::IsObjectJSObjectType(Register heap_object,
321
+ Register map,
322
+ Register scratch,
323
+ Label* fail) {
324
+ mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
325
+ IsInstanceJSObjectType(map, scratch, fail);
326
+ }
327
+
328
+
329
+ void MacroAssembler::IsInstanceJSObjectType(Register map,
330
+ Register scratch,
331
+ Label* fail) {
332
+ movzx_b(scratch, FieldOperand(map, Map::kInstanceTypeOffset));
333
+ sub(Operand(scratch), Immediate(FIRST_JS_OBJECT_TYPE));
334
+ cmp(scratch, LAST_JS_OBJECT_TYPE - FIRST_JS_OBJECT_TYPE);
335
+ j(above, fail);
336
+ }
337
+
338
+
339
+ void MacroAssembler::FCmp() {
340
+ if (CpuFeatures::IsSupported(CMOV)) {
341
+ fucomip();
342
+ ffree(0);
343
+ fincstp();
344
+ } else {
345
+ fucompp();
346
+ push(eax);
347
+ fnstsw_ax();
348
+ sahf();
349
+ pop(eax);
350
+ }
351
+ }
352
+
353
+
354
+ void MacroAssembler::AbortIfNotNumber(Register object) {
355
+ Label ok;
356
+ test(object, Immediate(kSmiTagMask));
357
+ j(zero, &ok);
358
+ cmp(FieldOperand(object, HeapObject::kMapOffset),
359
+ isolate()->factory()->heap_number_map());
360
+ Assert(equal, "Operand not a number");
361
+ bind(&ok);
362
+ }
363
+
364
+
365
+ void MacroAssembler::AbortIfNotSmi(Register object) {
366
+ test(object, Immediate(kSmiTagMask));
367
+ Assert(equal, "Operand is not a smi");
368
+ }
369
+
370
+
371
+ void MacroAssembler::AbortIfNotString(Register object) {
372
+ test(object, Immediate(kSmiTagMask));
373
+ Assert(not_equal, "Operand is not a string");
374
+ push(object);
375
+ mov(object, FieldOperand(object, HeapObject::kMapOffset));
376
+ CmpInstanceType(object, FIRST_NONSTRING_TYPE);
377
+ pop(object);
378
+ Assert(below, "Operand is not a string");
379
+ }
380
+
381
+
382
+ void MacroAssembler::AbortIfSmi(Register object) {
383
+ test(object, Immediate(kSmiTagMask));
384
+ Assert(not_equal, "Operand is a smi");
385
+ }
386
+
387
+
388
+ void MacroAssembler::EnterFrame(StackFrame::Type type) {
389
+ push(ebp);
390
+ mov(ebp, Operand(esp));
391
+ push(esi);
392
+ push(Immediate(Smi::FromInt(type)));
393
+ push(Immediate(CodeObject()));
394
+ if (emit_debug_code()) {
395
+ cmp(Operand(esp, 0), Immediate(isolate()->factory()->undefined_value()));
396
+ Check(not_equal, "code object not properly patched");
397
+ }
398
+ }
399
+
400
+
401
+ void MacroAssembler::LeaveFrame(StackFrame::Type type) {
402
+ if (emit_debug_code()) {
403
+ cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
404
+ Immediate(Smi::FromInt(type)));
405
+ Check(equal, "stack frame types must match");
406
+ }
407
+ leave();
408
+ }
409
+
410
+
411
+ void MacroAssembler::EnterExitFramePrologue() {
412
+ // Setup the frame structure on the stack.
413
+ ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
414
+ ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
415
+ ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
416
+ push(ebp);
417
+ mov(ebp, Operand(esp));
418
+
419
+ // Reserve room for entry stack pointer and push the code object.
420
+ ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
421
+ push(Immediate(0)); // Saved entry sp, patched before call.
422
+ push(Immediate(CodeObject())); // Accessed from ExitFrame::code_slot.
423
+
424
+ // Save the frame pointer and the context in top.
425
+ ExternalReference c_entry_fp_address(Isolate::k_c_entry_fp_address,
426
+ isolate());
427
+ ExternalReference context_address(Isolate::k_context_address,
428
+ isolate());
429
+ mov(Operand::StaticVariable(c_entry_fp_address), ebp);
430
+ mov(Operand::StaticVariable(context_address), esi);
431
+ }
432
+
433
+
434
+ void MacroAssembler::EnterExitFrameEpilogue(int argc, bool save_doubles) {
435
+ // Optionally save all XMM registers.
436
+ if (save_doubles) {
437
+ CpuFeatures::Scope scope(SSE2);
438
+ int space = XMMRegister::kNumRegisters * kDoubleSize + argc * kPointerSize;
439
+ sub(Operand(esp), Immediate(space));
440
+ const int offset = -2 * kPointerSize;
441
+ for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
442
+ XMMRegister reg = XMMRegister::from_code(i);
443
+ movdbl(Operand(ebp, offset - ((i + 1) * kDoubleSize)), reg);
444
+ }
445
+ } else {
446
+ sub(Operand(esp), Immediate(argc * kPointerSize));
447
+ }
448
+
449
+ // Get the required frame alignment for the OS.
450
+ const int kFrameAlignment = OS::ActivationFrameAlignment();
451
+ if (kFrameAlignment > 0) {
452
+ ASSERT(IsPowerOf2(kFrameAlignment));
453
+ and_(esp, -kFrameAlignment);
454
+ }
455
+
456
+ // Patch the saved entry sp.
457
+ mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
458
+ }
459
+
460
+
461
+ void MacroAssembler::EnterExitFrame(bool save_doubles) {
462
+ EnterExitFramePrologue();
463
+
464
+ // Setup argc and argv in callee-saved registers.
465
+ int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
466
+ mov(edi, Operand(eax));
467
+ lea(esi, Operand(ebp, eax, times_4, offset));
468
+
469
+ // Reserve space for argc, argv and isolate.
470
+ EnterExitFrameEpilogue(3, save_doubles);
471
+ }
472
+
473
+
474
+ void MacroAssembler::EnterApiExitFrame(int argc) {
475
+ EnterExitFramePrologue();
476
+ EnterExitFrameEpilogue(argc, false);
477
+ }
478
+
479
+
480
+ void MacroAssembler::LeaveExitFrame(bool save_doubles) {
481
+ // Optionally restore all XMM registers.
482
+ if (save_doubles) {
483
+ CpuFeatures::Scope scope(SSE2);
484
+ const int offset = -2 * kPointerSize;
485
+ for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
486
+ XMMRegister reg = XMMRegister::from_code(i);
487
+ movdbl(reg, Operand(ebp, offset - ((i + 1) * kDoubleSize)));
488
+ }
489
+ }
490
+
491
+ // Get the return address from the stack and restore the frame pointer.
492
+ mov(ecx, Operand(ebp, 1 * kPointerSize));
493
+ mov(ebp, Operand(ebp, 0 * kPointerSize));
494
+
495
+ // Pop the arguments and the receiver from the caller stack.
496
+ lea(esp, Operand(esi, 1 * kPointerSize));
497
+
498
+ // Push the return address to get ready to return.
499
+ push(ecx);
500
+
501
+ LeaveExitFrameEpilogue();
502
+ }
503
+
504
+ void MacroAssembler::LeaveExitFrameEpilogue() {
505
+ // Restore current context from top and clear it in debug mode.
506
+ ExternalReference context_address(Isolate::k_context_address, isolate());
507
+ mov(esi, Operand::StaticVariable(context_address));
508
+ #ifdef DEBUG
509
+ mov(Operand::StaticVariable(context_address), Immediate(0));
510
+ #endif
511
+
512
+ // Clear the top frame.
513
+ ExternalReference c_entry_fp_address(Isolate::k_c_entry_fp_address,
514
+ isolate());
515
+ mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
516
+ }
517
+
518
+
519
+ void MacroAssembler::LeaveApiExitFrame() {
520
+ mov(esp, Operand(ebp));
521
+ pop(ebp);
522
+
523
+ LeaveExitFrameEpilogue();
524
+ }
525
+
526
+
527
+ void MacroAssembler::PushTryHandler(CodeLocation try_location,
528
+ HandlerType type) {
529
+ // Adjust this code if not the case.
530
+ ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
531
+ // The pc (return address) is already on TOS.
532
+ if (try_location == IN_JAVASCRIPT) {
533
+ if (type == TRY_CATCH_HANDLER) {
534
+ push(Immediate(StackHandler::TRY_CATCH));
535
+ } else {
536
+ push(Immediate(StackHandler::TRY_FINALLY));
537
+ }
538
+ push(ebp);
539
+ } else {
540
+ ASSERT(try_location == IN_JS_ENTRY);
541
+ // The frame pointer does not point to a JS frame so we save NULL
542
+ // for ebp. We expect the code throwing an exception to check ebp
543
+ // before dereferencing it to restore the context.
544
+ push(Immediate(StackHandler::ENTRY));
545
+ push(Immediate(0)); // NULL frame pointer.
546
+ }
547
+ // Save the current handler as the next handler.
548
+ push(Operand::StaticVariable(ExternalReference(Isolate::k_handler_address,
549
+ isolate())));
550
+ // Link this handler as the new current one.
551
+ mov(Operand::StaticVariable(ExternalReference(Isolate::k_handler_address,
552
+ isolate())),
553
+ esp);
554
+ }
555
+
556
+
557
+ void MacroAssembler::PopTryHandler() {
558
+ ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
559
+ pop(Operand::StaticVariable(ExternalReference(Isolate::k_handler_address,
560
+ isolate())));
561
+ add(Operand(esp), Immediate(StackHandlerConstants::kSize - kPointerSize));
562
+ }
563
+
564
+
565
+ void MacroAssembler::Throw(Register value) {
566
+ // Adjust this code if not the case.
567
+ STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
568
+
569
+ // eax must hold the exception.
570
+ if (!value.is(eax)) {
571
+ mov(eax, value);
572
+ }
573
+
574
+ // Drop the sp to the top of the handler.
575
+ ExternalReference handler_address(Isolate::k_handler_address,
576
+ isolate());
577
+ mov(esp, Operand::StaticVariable(handler_address));
578
+
579
+ // Restore next handler and frame pointer, discard handler state.
580
+ STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
581
+ pop(Operand::StaticVariable(handler_address));
582
+ STATIC_ASSERT(StackHandlerConstants::kFPOffset == 1 * kPointerSize);
583
+ pop(ebp);
584
+ pop(edx); // Remove state.
585
+
586
+ // Before returning we restore the context from the frame pointer if
587
+ // not NULL. The frame pointer is NULL in the exception handler of
588
+ // a JS entry frame.
589
+ Set(esi, Immediate(0)); // Tentatively set context pointer to NULL.
590
+ Label skip;
591
+ cmp(ebp, 0);
592
+ j(equal, &skip, Label::kNear);
593
+ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
594
+ bind(&skip);
595
+
596
+ STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize);
597
+ ret(0);
598
+ }
599
+
600
+
601
+ void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type,
602
+ Register value) {
603
+ // Adjust this code if not the case.
604
+ STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
605
+
606
+ // eax must hold the exception.
607
+ if (!value.is(eax)) {
608
+ mov(eax, value);
609
+ }
610
+
611
+ // Drop sp to the top stack handler.
612
+ ExternalReference handler_address(Isolate::k_handler_address,
613
+ isolate());
614
+ mov(esp, Operand::StaticVariable(handler_address));
615
+
616
+ // Unwind the handlers until the ENTRY handler is found.
617
+ Label loop, done;
618
+ bind(&loop);
619
+ // Load the type of the current stack handler.
620
+ const int kStateOffset = StackHandlerConstants::kStateOffset;
621
+ cmp(Operand(esp, kStateOffset), Immediate(StackHandler::ENTRY));
622
+ j(equal, &done, Label::kNear);
623
+ // Fetch the next handler in the list.
624
+ const int kNextOffset = StackHandlerConstants::kNextOffset;
625
+ mov(esp, Operand(esp, kNextOffset));
626
+ jmp(&loop);
627
+ bind(&done);
628
+
629
+ // Set the top handler address to next handler past the current ENTRY handler.
630
+ STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
631
+ pop(Operand::StaticVariable(handler_address));
632
+
633
+ if (type == OUT_OF_MEMORY) {
634
+ // Set external caught exception to false.
635
+ ExternalReference external_caught(
636
+ Isolate::k_external_caught_exception_address,
637
+ isolate());
638
+ mov(eax, false);
639
+ mov(Operand::StaticVariable(external_caught), eax);
640
+
641
+ // Set pending exception and eax to out of memory exception.
642
+ ExternalReference pending_exception(Isolate::k_pending_exception_address,
643
+ isolate());
644
+ mov(eax, reinterpret_cast<int32_t>(Failure::OutOfMemoryException()));
645
+ mov(Operand::StaticVariable(pending_exception), eax);
646
+ }
647
+
648
+ // Clear the context pointer.
649
+ Set(esi, Immediate(0));
650
+
651
+ // Restore fp from handler and discard handler state.
652
+ STATIC_ASSERT(StackHandlerConstants::kFPOffset == 1 * kPointerSize);
653
+ pop(ebp);
654
+ pop(edx); // State.
655
+
656
+ STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize);
657
+ ret(0);
658
+ }
659
+
660
+
661
+ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
662
+ Register scratch,
663
+ Label* miss) {
664
+ Label same_contexts;
665
+
666
+ ASSERT(!holder_reg.is(scratch));
667
+
668
+ // Load current lexical context from the stack frame.
669
+ mov(scratch, Operand(ebp, StandardFrameConstants::kContextOffset));
670
+
671
+ // When generating debug code, make sure the lexical context is set.
672
+ if (emit_debug_code()) {
673
+ cmp(Operand(scratch), Immediate(0));
674
+ Check(not_equal, "we should not have an empty lexical context");
675
+ }
676
+ // Load the global context of the current context.
677
+ int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
678
+ mov(scratch, FieldOperand(scratch, offset));
679
+ mov(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
680
+
681
+ // Check the context is a global context.
682
+ if (emit_debug_code()) {
683
+ push(scratch);
684
+ // Read the first word and compare to global_context_map.
685
+ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
686
+ cmp(scratch, isolate()->factory()->global_context_map());
687
+ Check(equal, "JSGlobalObject::global_context should be a global context.");
688
+ pop(scratch);
689
+ }
690
+
691
+ // Check if both contexts are the same.
692
+ cmp(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
693
+ j(equal, &same_contexts);
694
+
695
+ // Compare security tokens, save holder_reg on the stack so we can use it
696
+ // as a temporary register.
697
+ //
698
+ // TODO(119): avoid push(holder_reg)/pop(holder_reg)
699
+ push(holder_reg);
700
+ // Check that the security token in the calling global object is
701
+ // compatible with the security token in the receiving global
702
+ // object.
703
+ mov(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
704
+
705
+ // Check the context is a global context.
706
+ if (emit_debug_code()) {
707
+ cmp(holder_reg, isolate()->factory()->null_value());
708
+ Check(not_equal, "JSGlobalProxy::context() should not be null.");
709
+
710
+ push(holder_reg);
711
+ // Read the first word and compare to global_context_map(),
712
+ mov(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
713
+ cmp(holder_reg, isolate()->factory()->global_context_map());
714
+ Check(equal, "JSGlobalObject::global_context should be a global context.");
715
+ pop(holder_reg);
716
+ }
717
+
718
+ int token_offset = Context::kHeaderSize +
719
+ Context::SECURITY_TOKEN_INDEX * kPointerSize;
720
+ mov(scratch, FieldOperand(scratch, token_offset));
721
+ cmp(scratch, FieldOperand(holder_reg, token_offset));
722
+ pop(holder_reg);
723
+ j(not_equal, miss);
724
+
725
+ bind(&same_contexts);
726
+ }
727
+
728
+
729
+ void MacroAssembler::LoadAllocationTopHelper(Register result,
730
+ Register scratch,
731
+ AllocationFlags flags) {
732
+ ExternalReference new_space_allocation_top =
733
+ ExternalReference::new_space_allocation_top_address(isolate());
734
+
735
+ // Just return if allocation top is already known.
736
+ if ((flags & RESULT_CONTAINS_TOP) != 0) {
737
+ // No use of scratch if allocation top is provided.
738
+ ASSERT(scratch.is(no_reg));
739
+ #ifdef DEBUG
740
+ // Assert that result actually contains top on entry.
741
+ cmp(result, Operand::StaticVariable(new_space_allocation_top));
742
+ Check(equal, "Unexpected allocation top");
743
+ #endif
744
+ return;
745
+ }
746
+
747
+ // Move address of new object to result. Use scratch register if available.
748
+ if (scratch.is(no_reg)) {
749
+ mov(result, Operand::StaticVariable(new_space_allocation_top));
750
+ } else {
751
+ mov(Operand(scratch), Immediate(new_space_allocation_top));
752
+ mov(result, Operand(scratch, 0));
753
+ }
754
+ }
755
+
756
+
757
+ void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
758
+ Register scratch) {
759
+ if (emit_debug_code()) {
760
+ test(result_end, Immediate(kObjectAlignmentMask));
761
+ Check(zero, "Unaligned allocation in new space");
762
+ }
763
+
764
+ ExternalReference new_space_allocation_top =
765
+ ExternalReference::new_space_allocation_top_address(isolate());
766
+
767
+ // Update new top. Use scratch if available.
768
+ if (scratch.is(no_reg)) {
769
+ mov(Operand::StaticVariable(new_space_allocation_top), result_end);
770
+ } else {
771
+ mov(Operand(scratch, 0), result_end);
772
+ }
773
+ }
774
+
775
+
776
+ void MacroAssembler::AllocateInNewSpace(int object_size,
777
+ Register result,
778
+ Register result_end,
779
+ Register scratch,
780
+ Label* gc_required,
781
+ AllocationFlags flags) {
782
+ if (!FLAG_inline_new) {
783
+ if (emit_debug_code()) {
784
+ // Trash the registers to simulate an allocation failure.
785
+ mov(result, Immediate(0x7091));
786
+ if (result_end.is_valid()) {
787
+ mov(result_end, Immediate(0x7191));
788
+ }
789
+ if (scratch.is_valid()) {
790
+ mov(scratch, Immediate(0x7291));
791
+ }
792
+ }
793
+ jmp(gc_required);
794
+ return;
795
+ }
796
+ ASSERT(!result.is(result_end));
797
+
798
+ // Load address of new object into result.
799
+ LoadAllocationTopHelper(result, scratch, flags);
800
+
801
+ Register top_reg = result_end.is_valid() ? result_end : result;
802
+
803
+ // Calculate new top and bail out if new space is exhausted.
804
+ ExternalReference new_space_allocation_limit =
805
+ ExternalReference::new_space_allocation_limit_address(isolate());
806
+
807
+ if (!top_reg.is(result)) {
808
+ mov(top_reg, result);
809
+ }
810
+ add(Operand(top_reg), Immediate(object_size));
811
+ j(carry, gc_required);
812
+ cmp(top_reg, Operand::StaticVariable(new_space_allocation_limit));
813
+ j(above, gc_required);
814
+
815
+ // Update allocation top.
816
+ UpdateAllocationTopHelper(top_reg, scratch);
817
+
818
+ // Tag result if requested.
819
+ if (top_reg.is(result)) {
820
+ if ((flags & TAG_OBJECT) != 0) {
821
+ sub(Operand(result), Immediate(object_size - kHeapObjectTag));
822
+ } else {
823
+ sub(Operand(result), Immediate(object_size));
824
+ }
825
+ } else if ((flags & TAG_OBJECT) != 0) {
826
+ add(Operand(result), Immediate(kHeapObjectTag));
827
+ }
828
+ }
829
+
830
+
831
+ void MacroAssembler::AllocateInNewSpace(int header_size,
832
+ ScaleFactor element_size,
833
+ Register element_count,
834
+ Register result,
835
+ Register result_end,
836
+ Register scratch,
837
+ Label* gc_required,
838
+ AllocationFlags flags) {
839
+ if (!FLAG_inline_new) {
840
+ if (emit_debug_code()) {
841
+ // Trash the registers to simulate an allocation failure.
842
+ mov(result, Immediate(0x7091));
843
+ mov(result_end, Immediate(0x7191));
844
+ if (scratch.is_valid()) {
845
+ mov(scratch, Immediate(0x7291));
846
+ }
847
+ // Register element_count is not modified by the function.
848
+ }
849
+ jmp(gc_required);
850
+ return;
851
+ }
852
+ ASSERT(!result.is(result_end));
853
+
854
+ // Load address of new object into result.
855
+ LoadAllocationTopHelper(result, scratch, flags);
856
+
857
+ // Calculate new top and bail out if new space is exhausted.
858
+ ExternalReference new_space_allocation_limit =
859
+ ExternalReference::new_space_allocation_limit_address(isolate());
860
+
861
+ // We assume that element_count*element_size + header_size does not
862
+ // overflow.
863
+ lea(result_end, Operand(element_count, element_size, header_size));
864
+ add(result_end, Operand(result));
865
+ j(carry, gc_required);
866
+ cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
867
+ j(above, gc_required);
868
+
869
+ // Tag result if requested.
870
+ if ((flags & TAG_OBJECT) != 0) {
871
+ lea(result, Operand(result, kHeapObjectTag));
872
+ }
873
+
874
+ // Update allocation top.
875
+ UpdateAllocationTopHelper(result_end, scratch);
876
+ }
877
+
878
+
879
+ void MacroAssembler::AllocateInNewSpace(Register object_size,
880
+ Register result,
881
+ Register result_end,
882
+ Register scratch,
883
+ Label* gc_required,
884
+ AllocationFlags flags) {
885
+ if (!FLAG_inline_new) {
886
+ if (emit_debug_code()) {
887
+ // Trash the registers to simulate an allocation failure.
888
+ mov(result, Immediate(0x7091));
889
+ mov(result_end, Immediate(0x7191));
890
+ if (scratch.is_valid()) {
891
+ mov(scratch, Immediate(0x7291));
892
+ }
893
+ // object_size is left unchanged by this function.
894
+ }
895
+ jmp(gc_required);
896
+ return;
897
+ }
898
+ ASSERT(!result.is(result_end));
899
+
900
+ // Load address of new object into result.
901
+ LoadAllocationTopHelper(result, scratch, flags);
902
+
903
+ // Calculate new top and bail out if new space is exhausted.
904
+ ExternalReference new_space_allocation_limit =
905
+ ExternalReference::new_space_allocation_limit_address(isolate());
906
+ if (!object_size.is(result_end)) {
907
+ mov(result_end, object_size);
908
+ }
909
+ add(result_end, Operand(result));
910
+ j(carry, gc_required);
911
+ cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
912
+ j(above, gc_required);
913
+
914
+ // Tag result if requested.
915
+ if ((flags & TAG_OBJECT) != 0) {
916
+ lea(result, Operand(result, kHeapObjectTag));
917
+ }
918
+
919
+ // Update allocation top.
920
+ UpdateAllocationTopHelper(result_end, scratch);
921
+ }
922
+
923
+
924
+ void MacroAssembler::UndoAllocationInNewSpace(Register object) {
925
+ ExternalReference new_space_allocation_top =
926
+ ExternalReference::new_space_allocation_top_address(isolate());
927
+
928
+ // Make sure the object has no tag before resetting top.
929
+ and_(Operand(object), Immediate(~kHeapObjectTagMask));
930
+ #ifdef DEBUG
931
+ cmp(object, Operand::StaticVariable(new_space_allocation_top));
932
+ Check(below, "Undo allocation of non allocated memory");
933
+ #endif
934
+ mov(Operand::StaticVariable(new_space_allocation_top), object);
935
+ }
936
+
937
+
938
+ void MacroAssembler::AllocateHeapNumber(Register result,
939
+ Register scratch1,
940
+ Register scratch2,
941
+ Label* gc_required) {
942
+ // Allocate heap number in new space.
943
+ AllocateInNewSpace(HeapNumber::kSize,
944
+ result,
945
+ scratch1,
946
+ scratch2,
947
+ gc_required,
948
+ TAG_OBJECT);
949
+
950
+ // Set the map.
951
+ mov(FieldOperand(result, HeapObject::kMapOffset),
952
+ Immediate(isolate()->factory()->heap_number_map()));
953
+ }
954
+
955
+
956
+ void MacroAssembler::AllocateTwoByteString(Register result,
957
+ Register length,
958
+ Register scratch1,
959
+ Register scratch2,
960
+ Register scratch3,
961
+ Label* gc_required) {
962
+ // Calculate the number of bytes needed for the characters in the string while
963
+ // observing object alignment.
964
+ ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
965
+ ASSERT(kShortSize == 2);
966
+ // scratch1 = length * 2 + kObjectAlignmentMask.
967
+ lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask));
968
+ and_(Operand(scratch1), Immediate(~kObjectAlignmentMask));
969
+
970
+ // Allocate two byte string in new space.
971
+ AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
972
+ times_1,
973
+ scratch1,
974
+ result,
975
+ scratch2,
976
+ scratch3,
977
+ gc_required,
978
+ TAG_OBJECT);
979
+
980
+ // Set the map, length and hash field.
981
+ mov(FieldOperand(result, HeapObject::kMapOffset),
982
+ Immediate(isolate()->factory()->string_map()));
983
+ mov(scratch1, length);
984
+ SmiTag(scratch1);
985
+ mov(FieldOperand(result, String::kLengthOffset), scratch1);
986
+ mov(FieldOperand(result, String::kHashFieldOffset),
987
+ Immediate(String::kEmptyHashField));
988
+ }
989
+
990
+
991
+ void MacroAssembler::AllocateAsciiString(Register result,
992
+ Register length,
993
+ Register scratch1,
994
+ Register scratch2,
995
+ Register scratch3,
996
+ Label* gc_required) {
997
+ // Calculate the number of bytes needed for the characters in the string while
998
+ // observing object alignment.
999
+ ASSERT((SeqAsciiString::kHeaderSize & kObjectAlignmentMask) == 0);
1000
+ mov(scratch1, length);
1001
+ ASSERT(kCharSize == 1);
1002
+ add(Operand(scratch1), Immediate(kObjectAlignmentMask));
1003
+ and_(Operand(scratch1), Immediate(~kObjectAlignmentMask));
1004
+
1005
+ // Allocate ascii string in new space.
1006
+ AllocateInNewSpace(SeqAsciiString::kHeaderSize,
1007
+ times_1,
1008
+ scratch1,
1009
+ result,
1010
+ scratch2,
1011
+ scratch3,
1012
+ gc_required,
1013
+ TAG_OBJECT);
1014
+
1015
+ // Set the map, length and hash field.
1016
+ mov(FieldOperand(result, HeapObject::kMapOffset),
1017
+ Immediate(isolate()->factory()->ascii_string_map()));
1018
+ mov(scratch1, length);
1019
+ SmiTag(scratch1);
1020
+ mov(FieldOperand(result, String::kLengthOffset), scratch1);
1021
+ mov(FieldOperand(result, String::kHashFieldOffset),
1022
+ Immediate(String::kEmptyHashField));
1023
+ }
1024
+
1025
+
1026
+ void MacroAssembler::AllocateAsciiString(Register result,
1027
+ int length,
1028
+ Register scratch1,
1029
+ Register scratch2,
1030
+ Label* gc_required) {
1031
+ ASSERT(length > 0);
1032
+
1033
+ // Allocate ascii string in new space.
1034
+ AllocateInNewSpace(SeqAsciiString::SizeFor(length),
1035
+ result,
1036
+ scratch1,
1037
+ scratch2,
1038
+ gc_required,
1039
+ TAG_OBJECT);
1040
+
1041
+ // Set the map, length and hash field.
1042
+ mov(FieldOperand(result, HeapObject::kMapOffset),
1043
+ Immediate(isolate()->factory()->ascii_string_map()));
1044
+ mov(FieldOperand(result, String::kLengthOffset),
1045
+ Immediate(Smi::FromInt(length)));
1046
+ mov(FieldOperand(result, String::kHashFieldOffset),
1047
+ Immediate(String::kEmptyHashField));
1048
+ }
1049
+
1050
+
1051
+ void MacroAssembler::AllocateConsString(Register result,
1052
+ Register scratch1,
1053
+ Register scratch2,
1054
+ Label* gc_required) {
1055
+ // Allocate heap number in new space.
1056
+ AllocateInNewSpace(ConsString::kSize,
1057
+ result,
1058
+ scratch1,
1059
+ scratch2,
1060
+ gc_required,
1061
+ TAG_OBJECT);
1062
+
1063
+ // Set the map. The other fields are left uninitialized.
1064
+ mov(FieldOperand(result, HeapObject::kMapOffset),
1065
+ Immediate(isolate()->factory()->cons_string_map()));
1066
+ }
1067
+
1068
+
1069
+ void MacroAssembler::AllocateAsciiConsString(Register result,
1070
+ Register scratch1,
1071
+ Register scratch2,
1072
+ Label* gc_required) {
1073
+ // Allocate heap number in new space.
1074
+ AllocateInNewSpace(ConsString::kSize,
1075
+ result,
1076
+ scratch1,
1077
+ scratch2,
1078
+ gc_required,
1079
+ TAG_OBJECT);
1080
+
1081
+ // Set the map. The other fields are left uninitialized.
1082
+ mov(FieldOperand(result, HeapObject::kMapOffset),
1083
+ Immediate(isolate()->factory()->cons_ascii_string_map()));
1084
+ }
1085
+
1086
+
1087
+ // Copy memory, byte-by-byte, from source to destination. Not optimized for
1088
+ // long or aligned copies. The contents of scratch and length are destroyed.
1089
+ // Source and destination are incremented by length.
1090
+ // Many variants of movsb, loop unrolling, word moves, and indexed operands
1091
+ // have been tried here already, and this is fastest.
1092
+ // A simpler loop is faster on small copies, but 30% slower on large ones.
1093
+ // The cld() instruction must have been emitted, to set the direction flag(),
1094
+ // before calling this function.
1095
+ void MacroAssembler::CopyBytes(Register source,
1096
+ Register destination,
1097
+ Register length,
1098
+ Register scratch) {
1099
+ Label loop, done, short_string, short_loop;
1100
+ // Experimentation shows that the short string loop is faster if length < 10.
1101
+ cmp(Operand(length), Immediate(10));
1102
+ j(less_equal, &short_string);
1103
+
1104
+ ASSERT(source.is(esi));
1105
+ ASSERT(destination.is(edi));
1106
+ ASSERT(length.is(ecx));
1107
+
1108
+ // Because source is 4-byte aligned in our uses of this function,
1109
+ // we keep source aligned for the rep_movs call by copying the odd bytes
1110
+ // at the end of the ranges.
1111
+ mov(scratch, Operand(source, length, times_1, -4));
1112
+ mov(Operand(destination, length, times_1, -4), scratch);
1113
+ mov(scratch, ecx);
1114
+ shr(ecx, 2);
1115
+ rep_movs();
1116
+ and_(Operand(scratch), Immediate(0x3));
1117
+ add(destination, Operand(scratch));
1118
+ jmp(&done);
1119
+
1120
+ bind(&short_string);
1121
+ test(length, Operand(length));
1122
+ j(zero, &done);
1123
+
1124
+ bind(&short_loop);
1125
+ mov_b(scratch, Operand(source, 0));
1126
+ mov_b(Operand(destination, 0), scratch);
1127
+ inc(source);
1128
+ inc(destination);
1129
+ dec(length);
1130
+ j(not_zero, &short_loop);
1131
+
1132
+ bind(&done);
1133
+ }
1134
+
1135
+
1136
+ void MacroAssembler::NegativeZeroTest(Register result,
1137
+ Register op,
1138
+ Label* then_label) {
1139
+ Label ok;
1140
+ test(result, Operand(result));
1141
+ j(not_zero, &ok);
1142
+ test(op, Operand(op));
1143
+ j(sign, then_label);
1144
+ bind(&ok);
1145
+ }
1146
+
1147
+
1148
+ void MacroAssembler::NegativeZeroTest(Register result,
1149
+ Register op1,
1150
+ Register op2,
1151
+ Register scratch,
1152
+ Label* then_label) {
1153
+ Label ok;
1154
+ test(result, Operand(result));
1155
+ j(not_zero, &ok);
1156
+ mov(scratch, Operand(op1));
1157
+ or_(scratch, Operand(op2));
1158
+ j(sign, then_label);
1159
+ bind(&ok);
1160
+ }
1161
+
1162
+
1163
+ void MacroAssembler::TryGetFunctionPrototype(Register function,
1164
+ Register result,
1165
+ Register scratch,
1166
+ Label* miss) {
1167
+ // Check that the receiver isn't a smi.
1168
+ test(function, Immediate(kSmiTagMask));
1169
+ j(zero, miss);
1170
+
1171
+ // Check that the function really is a function.
1172
+ CmpObjectType(function, JS_FUNCTION_TYPE, result);
1173
+ j(not_equal, miss);
1174
+
1175
+ // Make sure that the function has an instance prototype.
1176
+ Label non_instance;
1177
+ movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset));
1178
+ test(scratch, Immediate(1 << Map::kHasNonInstancePrototype));
1179
+ j(not_zero, &non_instance);
1180
+
1181
+ // Get the prototype or initial map from the function.
1182
+ mov(result,
1183
+ FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1184
+
1185
+ // If the prototype or initial map is the hole, don't return it and
1186
+ // simply miss the cache instead. This will allow us to allocate a
1187
+ // prototype object on-demand in the runtime system.
1188
+ cmp(Operand(result), Immediate(isolate()->factory()->the_hole_value()));
1189
+ j(equal, miss);
1190
+
1191
+ // If the function does not have an initial map, we're done.
1192
+ Label done;
1193
+ CmpObjectType(result, MAP_TYPE, scratch);
1194
+ j(not_equal, &done);
1195
+
1196
+ // Get the prototype from the initial map.
1197
+ mov(result, FieldOperand(result, Map::kPrototypeOffset));
1198
+ jmp(&done);
1199
+
1200
+ // Non-instance prototype: Fetch prototype from constructor field
1201
+ // in initial map.
1202
+ bind(&non_instance);
1203
+ mov(result, FieldOperand(result, Map::kConstructorOffset));
1204
+
1205
+ // All done.
1206
+ bind(&done);
1207
+ }
1208
+
1209
+
1210
+ void MacroAssembler::CallStub(CodeStub* stub, unsigned ast_id) {
1211
+ ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
1212
+ call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
1213
+ }
1214
+
1215
+
1216
+ MaybeObject* MacroAssembler::TryCallStub(CodeStub* stub) {
1217
+ ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
1218
+ Object* result;
1219
+ { MaybeObject* maybe_result = stub->TryGetCode();
1220
+ if (!maybe_result->ToObject(&result)) return maybe_result;
1221
+ }
1222
+ call(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET);
1223
+ return result;
1224
+ }
1225
+
1226
+
1227
+ void MacroAssembler::TailCallStub(CodeStub* stub) {
1228
+ ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
1229
+ jmp(stub->GetCode(), RelocInfo::CODE_TARGET);
1230
+ }
1231
+
1232
+
1233
+ MaybeObject* MacroAssembler::TryTailCallStub(CodeStub* stub) {
1234
+ ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
1235
+ Object* result;
1236
+ { MaybeObject* maybe_result = stub->TryGetCode();
1237
+ if (!maybe_result->ToObject(&result)) return maybe_result;
1238
+ }
1239
+ jmp(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET);
1240
+ return result;
1241
+ }
1242
+
1243
+
1244
+ void MacroAssembler::StubReturn(int argc) {
1245
+ ASSERT(argc >= 1 && generating_stub());
1246
+ ret((argc - 1) * kPointerSize);
1247
+ }
1248
+
1249
+
1250
+ void MacroAssembler::IllegalOperation(int num_arguments) {
1251
+ if (num_arguments > 0) {
1252
+ add(Operand(esp), Immediate(num_arguments * kPointerSize));
1253
+ }
1254
+ mov(eax, Immediate(isolate()->factory()->undefined_value()));
1255
+ }
1256
+
1257
+
1258
+ void MacroAssembler::IndexFromHash(Register hash, Register index) {
1259
+ // The assert checks that the constants for the maximum number of digits
1260
+ // for an array index cached in the hash field and the number of bits
1261
+ // reserved for it does not conflict.
1262
+ ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) <
1263
+ (1 << String::kArrayIndexValueBits));
1264
+ // We want the smi-tagged index in key. kArrayIndexValueMask has zeros in
1265
+ // the low kHashShift bits.
1266
+ and_(hash, String::kArrayIndexValueMask);
1267
+ STATIC_ASSERT(String::kHashShift >= kSmiTagSize && kSmiTag == 0);
1268
+ if (String::kHashShift > kSmiTagSize) {
1269
+ shr(hash, String::kHashShift - kSmiTagSize);
1270
+ }
1271
+ if (!index.is(hash)) {
1272
+ mov(index, hash);
1273
+ }
1274
+ }
1275
+
1276
+
1277
+ void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
1278
+ CallRuntime(Runtime::FunctionForId(id), num_arguments);
1279
+ }
1280
+
1281
+
1282
+ void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) {
1283
+ const Runtime::Function* function = Runtime::FunctionForId(id);
1284
+ Set(eax, Immediate(function->nargs));
1285
+ mov(ebx, Immediate(ExternalReference(function, isolate())));
1286
+ CEntryStub ces(1);
1287
+ ces.SaveDoubles();
1288
+ CallStub(&ces);
1289
+ }
1290
+
1291
+
1292
+ MaybeObject* MacroAssembler::TryCallRuntime(Runtime::FunctionId id,
1293
+ int num_arguments) {
1294
+ return TryCallRuntime(Runtime::FunctionForId(id), num_arguments);
1295
+ }
1296
+
1297
+
1298
+ void MacroAssembler::CallRuntime(const Runtime::Function* f,
1299
+ int num_arguments) {
1300
+ // If the expected number of arguments of the runtime function is
1301
+ // constant, we check that the actual number of arguments match the
1302
+ // expectation.
1303
+ if (f->nargs >= 0 && f->nargs != num_arguments) {
1304
+ IllegalOperation(num_arguments);
1305
+ return;
1306
+ }
1307
+
1308
+ // TODO(1236192): Most runtime routines don't need the number of
1309
+ // arguments passed in because it is constant. At some point we
1310
+ // should remove this need and make the runtime routine entry code
1311
+ // smarter.
1312
+ Set(eax, Immediate(num_arguments));
1313
+ mov(ebx, Immediate(ExternalReference(f, isolate())));
1314
+ CEntryStub ces(1);
1315
+ CallStub(&ces);
1316
+ }
1317
+
1318
+
1319
+ MaybeObject* MacroAssembler::TryCallRuntime(const Runtime::Function* f,
1320
+ int num_arguments) {
1321
+ if (f->nargs >= 0 && f->nargs != num_arguments) {
1322
+ IllegalOperation(num_arguments);
1323
+ // Since we did not call the stub, there was no allocation failure.
1324
+ // Return some non-failure object.
1325
+ return isolate()->heap()->undefined_value();
1326
+ }
1327
+
1328
+ // TODO(1236192): Most runtime routines don't need the number of
1329
+ // arguments passed in because it is constant. At some point we
1330
+ // should remove this need and make the runtime routine entry code
1331
+ // smarter.
1332
+ Set(eax, Immediate(num_arguments));
1333
+ mov(ebx, Immediate(ExternalReference(f, isolate())));
1334
+ CEntryStub ces(1);
1335
+ return TryCallStub(&ces);
1336
+ }
1337
+
1338
+
1339
+ void MacroAssembler::CallExternalReference(ExternalReference ref,
1340
+ int num_arguments) {
1341
+ mov(eax, Immediate(num_arguments));
1342
+ mov(ebx, Immediate(ref));
1343
+
1344
+ CEntryStub stub(1);
1345
+ CallStub(&stub);
1346
+ }
1347
+
1348
+
1349
+ void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
1350
+ int num_arguments,
1351
+ int result_size) {
1352
+ // TODO(1236192): Most runtime routines don't need the number of
1353
+ // arguments passed in because it is constant. At some point we
1354
+ // should remove this need and make the runtime routine entry code
1355
+ // smarter.
1356
+ Set(eax, Immediate(num_arguments));
1357
+ JumpToExternalReference(ext);
1358
+ }
1359
+
1360
+
1361
+ MaybeObject* MacroAssembler::TryTailCallExternalReference(
1362
+ const ExternalReference& ext, int num_arguments, int result_size) {
1363
+ // TODO(1236192): Most runtime routines don't need the number of
1364
+ // arguments passed in because it is constant. At some point we
1365
+ // should remove this need and make the runtime routine entry code
1366
+ // smarter.
1367
+ Set(eax, Immediate(num_arguments));
1368
+ return TryJumpToExternalReference(ext);
1369
+ }
1370
+
1371
+
1372
+ void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
1373
+ int num_arguments,
1374
+ int result_size) {
1375
+ TailCallExternalReference(ExternalReference(fid, isolate()),
1376
+ num_arguments,
1377
+ result_size);
1378
+ }
1379
+
1380
+
1381
+ MaybeObject* MacroAssembler::TryTailCallRuntime(Runtime::FunctionId fid,
1382
+ int num_arguments,
1383
+ int result_size) {
1384
+ return TryTailCallExternalReference(
1385
+ ExternalReference(fid, isolate()), num_arguments, result_size);
1386
+ }
1387
+
1388
+
1389
+ // If true, a Handle<T> returned by value from a function with cdecl calling
1390
+ // convention will be returned directly as a value of location_ field in a
1391
+ // register eax.
1392
+ // If false, it is returned as a pointer to a preallocated by caller memory
1393
+ // region. Pointer to this region should be passed to a function as an
1394
+ // implicit first argument.
1395
+ #if defined(USING_BSD_ABI) || defined(__MINGW32__) || defined(__CYGWIN__)
1396
+ static const bool kReturnHandlesDirectly = true;
1397
+ #else
1398
+ static const bool kReturnHandlesDirectly = false;
1399
+ #endif
1400
+
1401
+
1402
+ Operand ApiParameterOperand(int index) {
1403
+ return Operand(
1404
+ esp, (index + (kReturnHandlesDirectly ? 0 : 1)) * kPointerSize);
1405
+ }
1406
+
1407
+
1408
+ void MacroAssembler::PrepareCallApiFunction(int argc, Register scratch) {
1409
+ if (kReturnHandlesDirectly) {
1410
+ EnterApiExitFrame(argc);
1411
+ // When handles are returned directly we don't have to allocate extra
1412
+ // space for and pass an out parameter.
1413
+ } else {
1414
+ // We allocate two additional slots: return value and pointer to it.
1415
+ EnterApiExitFrame(argc + 2);
1416
+
1417
+ // The argument slots are filled as follows:
1418
+ //
1419
+ // n + 1: output cell
1420
+ // n: arg n
1421
+ // ...
1422
+ // 1: arg1
1423
+ // 0: pointer to the output cell
1424
+ //
1425
+ // Note that this is one more "argument" than the function expects
1426
+ // so the out cell will have to be popped explicitly after returning
1427
+ // from the function. The out cell contains Handle.
1428
+
1429
+ // pointer to out cell.
1430
+ lea(scratch, Operand(esp, (argc + 1) * kPointerSize));
1431
+ mov(Operand(esp, 0 * kPointerSize), scratch); // output.
1432
+ if (emit_debug_code()) {
1433
+ mov(Operand(esp, (argc + 1) * kPointerSize), Immediate(0)); // out cell.
1434
+ }
1435
+ }
1436
+ }
1437
+
1438
+
1439
+ MaybeObject* MacroAssembler::TryCallApiFunctionAndReturn(ApiFunction* function,
1440
+ int stack_space) {
1441
+ ExternalReference next_address =
1442
+ ExternalReference::handle_scope_next_address();
1443
+ ExternalReference limit_address =
1444
+ ExternalReference::handle_scope_limit_address();
1445
+ ExternalReference level_address =
1446
+ ExternalReference::handle_scope_level_address();
1447
+
1448
+ // Allocate HandleScope in callee-save registers.
1449
+ mov(ebx, Operand::StaticVariable(next_address));
1450
+ mov(edi, Operand::StaticVariable(limit_address));
1451
+ add(Operand::StaticVariable(level_address), Immediate(1));
1452
+
1453
+ // Call the api function!
1454
+ call(function->address(), RelocInfo::RUNTIME_ENTRY);
1455
+
1456
+ if (!kReturnHandlesDirectly) {
1457
+ // The returned value is a pointer to the handle holding the result.
1458
+ // Dereference this to get to the location.
1459
+ mov(eax, Operand(eax, 0));
1460
+ }
1461
+
1462
+ Label empty_handle;
1463
+ Label prologue;
1464
+ Label promote_scheduled_exception;
1465
+ Label delete_allocated_handles;
1466
+ Label leave_exit_frame;
1467
+
1468
+ // Check if the result handle holds 0.
1469
+ test(eax, Operand(eax));
1470
+ j(zero, &empty_handle);
1471
+ // It was non-zero. Dereference to get the result value.
1472
+ mov(eax, Operand(eax, 0));
1473
+ bind(&prologue);
1474
+ // No more valid handles (the result handle was the last one). Restore
1475
+ // previous handle scope.
1476
+ mov(Operand::StaticVariable(next_address), ebx);
1477
+ sub(Operand::StaticVariable(level_address), Immediate(1));
1478
+ Assert(above_equal, "Invalid HandleScope level");
1479
+ cmp(edi, Operand::StaticVariable(limit_address));
1480
+ j(not_equal, &delete_allocated_handles);
1481
+ bind(&leave_exit_frame);
1482
+
1483
+ // Check if the function scheduled an exception.
1484
+ ExternalReference scheduled_exception_address =
1485
+ ExternalReference::scheduled_exception_address(isolate());
1486
+ cmp(Operand::StaticVariable(scheduled_exception_address),
1487
+ Immediate(isolate()->factory()->the_hole_value()));
1488
+ j(not_equal, &promote_scheduled_exception);
1489
+ LeaveApiExitFrame();
1490
+ ret(stack_space * kPointerSize);
1491
+ bind(&promote_scheduled_exception);
1492
+ MaybeObject* result =
1493
+ TryTailCallRuntime(Runtime::kPromoteScheduledException, 0, 1);
1494
+ if (result->IsFailure()) {
1495
+ return result;
1496
+ }
1497
+ bind(&empty_handle);
1498
+ // It was zero; the result is undefined.
1499
+ mov(eax, isolate()->factory()->undefined_value());
1500
+ jmp(&prologue);
1501
+
1502
+ // HandleScope limit has changed. Delete allocated extensions.
1503
+ ExternalReference delete_extensions =
1504
+ ExternalReference::delete_handle_scope_extensions(isolate());
1505
+ bind(&delete_allocated_handles);
1506
+ mov(Operand::StaticVariable(limit_address), edi);
1507
+ mov(edi, eax);
1508
+ mov(Operand(esp, 0), Immediate(ExternalReference::isolate_address()));
1509
+ mov(eax, Immediate(delete_extensions));
1510
+ call(Operand(eax));
1511
+ mov(eax, edi);
1512
+ jmp(&leave_exit_frame);
1513
+
1514
+ return result;
1515
+ }
1516
+
1517
+
1518
+ void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) {
1519
+ // Set the entry point and jump to the C entry runtime stub.
1520
+ mov(ebx, Immediate(ext));
1521
+ CEntryStub ces(1);
1522
+ jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
1523
+ }
1524
+
1525
+
1526
+ MaybeObject* MacroAssembler::TryJumpToExternalReference(
1527
+ const ExternalReference& ext) {
1528
+ // Set the entry point and jump to the C entry runtime stub.
1529
+ mov(ebx, Immediate(ext));
1530
+ CEntryStub ces(1);
1531
+ return TryTailCallStub(&ces);
1532
+ }
1533
+
1534
+
1535
+ void MacroAssembler::SetCallKind(Register dst, CallKind call_kind) {
1536
+ // This macro takes the dst register to make the code more readable
1537
+ // at the call sites. However, the dst register has to be ecx to
1538
+ // follow the calling convention which requires the call type to be
1539
+ // in ecx.
1540
+ ASSERT(dst.is(ecx));
1541
+ if (call_kind == CALL_AS_FUNCTION) {
1542
+ // Set to some non-zero smi by updating the least significant
1543
+ // byte.
1544
+ mov_b(Operand(dst), 1 << kSmiTagSize);
1545
+ } else {
1546
+ // Set to smi zero by clearing the register.
1547
+ xor_(dst, Operand(dst));
1548
+ }
1549
+ }
1550
+
1551
+
1552
+ void MacroAssembler::InvokePrologue(const ParameterCount& expected,
1553
+ const ParameterCount& actual,
1554
+ Handle<Code> code_constant,
1555
+ const Operand& code_operand,
1556
+ Label* done,
1557
+ InvokeFlag flag,
1558
+ Label::Distance done_near,
1559
+ const CallWrapper& call_wrapper,
1560
+ CallKind call_kind) {
1561
+ bool definitely_matches = false;
1562
+ Label invoke;
1563
+ if (expected.is_immediate()) {
1564
+ ASSERT(actual.is_immediate());
1565
+ if (expected.immediate() == actual.immediate()) {
1566
+ definitely_matches = true;
1567
+ } else {
1568
+ mov(eax, actual.immediate());
1569
+ const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
1570
+ if (expected.immediate() == sentinel) {
1571
+ // Don't worry about adapting arguments for builtins that
1572
+ // don't want that done. Skip adaption code by making it look
1573
+ // like we have a match between expected and actual number of
1574
+ // arguments.
1575
+ definitely_matches = true;
1576
+ } else {
1577
+ mov(ebx, expected.immediate());
1578
+ }
1579
+ }
1580
+ } else {
1581
+ if (actual.is_immediate()) {
1582
+ // Expected is in register, actual is immediate. This is the
1583
+ // case when we invoke function values without going through the
1584
+ // IC mechanism.
1585
+ cmp(expected.reg(), actual.immediate());
1586
+ j(equal, &invoke);
1587
+ ASSERT(expected.reg().is(ebx));
1588
+ mov(eax, actual.immediate());
1589
+ } else if (!expected.reg().is(actual.reg())) {
1590
+ // Both expected and actual are in (different) registers. This
1591
+ // is the case when we invoke functions using call and apply.
1592
+ cmp(expected.reg(), Operand(actual.reg()));
1593
+ j(equal, &invoke);
1594
+ ASSERT(actual.reg().is(eax));
1595
+ ASSERT(expected.reg().is(ebx));
1596
+ }
1597
+ }
1598
+
1599
+ if (!definitely_matches) {
1600
+ Handle<Code> adaptor =
1601
+ isolate()->builtins()->ArgumentsAdaptorTrampoline();
1602
+ if (!code_constant.is_null()) {
1603
+ mov(edx, Immediate(code_constant));
1604
+ add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag));
1605
+ } else if (!code_operand.is_reg(edx)) {
1606
+ mov(edx, code_operand);
1607
+ }
1608
+
1609
+ if (flag == CALL_FUNCTION) {
1610
+ call_wrapper.BeforeCall(CallSize(adaptor, RelocInfo::CODE_TARGET));
1611
+ SetCallKind(ecx, call_kind);
1612
+ call(adaptor, RelocInfo::CODE_TARGET);
1613
+ call_wrapper.AfterCall();
1614
+ jmp(done, done_near);
1615
+ } else {
1616
+ SetCallKind(ecx, call_kind);
1617
+ jmp(adaptor, RelocInfo::CODE_TARGET);
1618
+ }
1619
+ bind(&invoke);
1620
+ }
1621
+ }
1622
+
1623
+
1624
+ void MacroAssembler::InvokeCode(const Operand& code,
1625
+ const ParameterCount& expected,
1626
+ const ParameterCount& actual,
1627
+ InvokeFlag flag,
1628
+ const CallWrapper& call_wrapper,
1629
+ CallKind call_kind) {
1630
+ Label done;
1631
+ InvokePrologue(expected, actual, Handle<Code>::null(), code,
1632
+ &done, flag, Label::kNear, call_wrapper,
1633
+ call_kind);
1634
+ if (flag == CALL_FUNCTION) {
1635
+ call_wrapper.BeforeCall(CallSize(code));
1636
+ SetCallKind(ecx, call_kind);
1637
+ call(code);
1638
+ call_wrapper.AfterCall();
1639
+ } else {
1640
+ ASSERT(flag == JUMP_FUNCTION);
1641
+ SetCallKind(ecx, call_kind);
1642
+ jmp(code);
1643
+ }
1644
+ bind(&done);
1645
+ }
1646
+
1647
+
1648
+ void MacroAssembler::InvokeCode(Handle<Code> code,
1649
+ const ParameterCount& expected,
1650
+ const ParameterCount& actual,
1651
+ RelocInfo::Mode rmode,
1652
+ InvokeFlag flag,
1653
+ const CallWrapper& call_wrapper,
1654
+ CallKind call_kind) {
1655
+ Label done;
1656
+ Operand dummy(eax);
1657
+ InvokePrologue(expected, actual, code, dummy, &done, flag, Label::kNear,
1658
+ call_wrapper, call_kind);
1659
+ if (flag == CALL_FUNCTION) {
1660
+ call_wrapper.BeforeCall(CallSize(code, rmode));
1661
+ SetCallKind(ecx, call_kind);
1662
+ call(code, rmode);
1663
+ call_wrapper.AfterCall();
1664
+ } else {
1665
+ ASSERT(flag == JUMP_FUNCTION);
1666
+ SetCallKind(ecx, call_kind);
1667
+ jmp(code, rmode);
1668
+ }
1669
+ bind(&done);
1670
+ }
1671
+
1672
+
1673
+ void MacroAssembler::InvokeFunction(Register fun,
1674
+ const ParameterCount& actual,
1675
+ InvokeFlag flag,
1676
+ const CallWrapper& call_wrapper,
1677
+ CallKind call_kind) {
1678
+ ASSERT(fun.is(edi));
1679
+ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
1680
+ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
1681
+ mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
1682
+ SmiUntag(ebx);
1683
+
1684
+ ParameterCount expected(ebx);
1685
+ InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
1686
+ expected, actual, flag, call_wrapper, call_kind);
1687
+ }
1688
+
1689
+
1690
+ void MacroAssembler::InvokeFunction(JSFunction* function,
1691
+ const ParameterCount& actual,
1692
+ InvokeFlag flag,
1693
+ const CallWrapper& call_wrapper) {
1694
+ ASSERT(function->is_compiled());
1695
+ // Get the function and setup the context.
1696
+ mov(edi, Immediate(Handle<JSFunction>(function)));
1697
+ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
1698
+
1699
+ ParameterCount expected(function->shared()->formal_parameter_count());
1700
+ if (V8::UseCrankshaft()) {
1701
+ // TODO(kasperl): For now, we always call indirectly through the
1702
+ // code field in the function to allow recompilation to take effect
1703
+ // without changing any of the call sites.
1704
+ InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
1705
+ expected, actual, flag, call_wrapper);
1706
+ } else {
1707
+ Handle<Code> code(function->code());
1708
+ InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET,
1709
+ flag, call_wrapper);
1710
+ }
1711
+ }
1712
+
1713
+
1714
+ void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
1715
+ InvokeFlag flag,
1716
+ const CallWrapper& call_wrapper) {
1717
+ // Calls are not allowed in some stubs.
1718
+ ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
1719
+
1720
+ // Rely on the assertion to check that the number of provided
1721
+ // arguments match the expected number of arguments. Fake a
1722
+ // parameter count to avoid emitting code to do the check.
1723
+ ParameterCount expected(0);
1724
+ GetBuiltinFunction(edi, id);
1725
+ InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
1726
+ expected, expected, flag, call_wrapper);
1727
+ }
1728
+
1729
+ void MacroAssembler::GetBuiltinFunction(Register target,
1730
+ Builtins::JavaScript id) {
1731
+ // Load the JavaScript builtin function from the builtins object.
1732
+ mov(target, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
1733
+ mov(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
1734
+ mov(target, FieldOperand(target,
1735
+ JSBuiltinsObject::OffsetOfFunctionWithId(id)));
1736
+ }
1737
+
1738
+ void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
1739
+ ASSERT(!target.is(edi));
1740
+ // Load the JavaScript builtin function from the builtins object.
1741
+ GetBuiltinFunction(edi, id);
1742
+ // Load the code entry point from the function into the target register.
1743
+ mov(target, FieldOperand(edi, JSFunction::kCodeEntryOffset));
1744
+ }
1745
+
1746
+
1747
+ void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
1748
+ if (context_chain_length > 0) {
1749
+ // Move up the chain of contexts to the context containing the slot.
1750
+ mov(dst, Operand(esi, Context::SlotOffset(Context::CLOSURE_INDEX)));
1751
+ // Load the function context (which is the incoming, outer context).
1752
+ mov(dst, FieldOperand(dst, JSFunction::kContextOffset));
1753
+ for (int i = 1; i < context_chain_length; i++) {
1754
+ mov(dst, Operand(dst, Context::SlotOffset(Context::CLOSURE_INDEX)));
1755
+ mov(dst, FieldOperand(dst, JSFunction::kContextOffset));
1756
+ }
1757
+ } else {
1758
+ // Slot is in the current function context. Move it into the
1759
+ // destination register in case we store into it (the write barrier
1760
+ // cannot be allowed to destroy the context in esi).
1761
+ mov(dst, esi);
1762
+ }
1763
+
1764
+ // We should not have found a 'with' context by walking the context chain
1765
+ // (i.e., the static scope chain and runtime context chain do not agree).
1766
+ // A variable occurring in such a scope should have slot type LOOKUP and
1767
+ // not CONTEXT.
1768
+ if (emit_debug_code()) {
1769
+ cmp(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
1770
+ Check(equal, "Yo dawg, I heard you liked function contexts "
1771
+ "so I put function contexts in all your contexts");
1772
+ }
1773
+ }
1774
+
1775
+
1776
+ void MacroAssembler::LoadGlobalFunction(int index, Register function) {
1777
+ // Load the global or builtins object from the current context.
1778
+ mov(function, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
1779
+ // Load the global context from the global or builtins object.
1780
+ mov(function, FieldOperand(function, GlobalObject::kGlobalContextOffset));
1781
+ // Load the function from the global context.
1782
+ mov(function, Operand(function, Context::SlotOffset(index)));
1783
+ }
1784
+
1785
+
1786
+ void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
1787
+ Register map) {
1788
+ // Load the initial map. The global functions all have initial maps.
1789
+ mov(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1790
+ if (emit_debug_code()) {
1791
+ Label ok, fail;
1792
+ CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK);
1793
+ jmp(&ok);
1794
+ bind(&fail);
1795
+ Abort("Global functions must have initial map");
1796
+ bind(&ok);
1797
+ }
1798
+ }
1799
+
1800
+
1801
+ // Store the value in register src in the safepoint register stack
1802
+ // slot for register dst.
1803
+ void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
1804
+ mov(SafepointRegisterSlot(dst), src);
1805
+ }
1806
+
1807
+
1808
+ void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Immediate src) {
1809
+ mov(SafepointRegisterSlot(dst), src);
1810
+ }
1811
+
1812
+
1813
+ void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
1814
+ mov(dst, SafepointRegisterSlot(src));
1815
+ }
1816
+
1817
+
1818
+ Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
1819
+ return Operand(esp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
1820
+ }
1821
+
1822
+
1823
+ int MacroAssembler::SafepointRegisterStackIndex(int reg_code) {
1824
+ // The registers are pushed starting with the lowest encoding,
1825
+ // which means that lowest encodings are furthest away from
1826
+ // the stack pointer.
1827
+ ASSERT(reg_code >= 0 && reg_code < kNumSafepointRegisters);
1828
+ return kNumSafepointRegisters - reg_code - 1;
1829
+ }
1830
+
1831
+
1832
+ void MacroAssembler::Ret() {
1833
+ ret(0);
1834
+ }
1835
+
1836
+
1837
+ void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
1838
+ if (is_uint16(bytes_dropped)) {
1839
+ ret(bytes_dropped);
1840
+ } else {
1841
+ pop(scratch);
1842
+ add(Operand(esp), Immediate(bytes_dropped));
1843
+ push(scratch);
1844
+ ret(0);
1845
+ }
1846
+ }
1847
+
1848
+
1849
+
1850
+
1851
+ void MacroAssembler::Drop(int stack_elements) {
1852
+ if (stack_elements > 0) {
1853
+ add(Operand(esp), Immediate(stack_elements * kPointerSize));
1854
+ }
1855
+ }
1856
+
1857
+
1858
+ void MacroAssembler::Move(Register dst, Register src) {
1859
+ if (!dst.is(src)) {
1860
+ mov(dst, src);
1861
+ }
1862
+ }
1863
+
1864
+
1865
+ void MacroAssembler::Move(Register dst, Handle<Object> value) {
1866
+ mov(dst, value);
1867
+ }
1868
+
1869
+
1870
+ void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
1871
+ if (FLAG_native_code_counters && counter->Enabled()) {
1872
+ mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
1873
+ }
1874
+ }
1875
+
1876
+
1877
+ void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
1878
+ ASSERT(value > 0);
1879
+ if (FLAG_native_code_counters && counter->Enabled()) {
1880
+ Operand operand = Operand::StaticVariable(ExternalReference(counter));
1881
+ if (value == 1) {
1882
+ inc(operand);
1883
+ } else {
1884
+ add(operand, Immediate(value));
1885
+ }
1886
+ }
1887
+ }
1888
+
1889
+
1890
+ void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
1891
+ ASSERT(value > 0);
1892
+ if (FLAG_native_code_counters && counter->Enabled()) {
1893
+ Operand operand = Operand::StaticVariable(ExternalReference(counter));
1894
+ if (value == 1) {
1895
+ dec(operand);
1896
+ } else {
1897
+ sub(operand, Immediate(value));
1898
+ }
1899
+ }
1900
+ }
1901
+
1902
+
1903
+ void MacroAssembler::IncrementCounter(Condition cc,
1904
+ StatsCounter* counter,
1905
+ int value) {
1906
+ ASSERT(value > 0);
1907
+ if (FLAG_native_code_counters && counter->Enabled()) {
1908
+ Label skip;
1909
+ j(NegateCondition(cc), &skip);
1910
+ pushfd();
1911
+ IncrementCounter(counter, value);
1912
+ popfd();
1913
+ bind(&skip);
1914
+ }
1915
+ }
1916
+
1917
+
1918
+ void MacroAssembler::DecrementCounter(Condition cc,
1919
+ StatsCounter* counter,
1920
+ int value) {
1921
+ ASSERT(value > 0);
1922
+ if (FLAG_native_code_counters && counter->Enabled()) {
1923
+ Label skip;
1924
+ j(NegateCondition(cc), &skip);
1925
+ pushfd();
1926
+ DecrementCounter(counter, value);
1927
+ popfd();
1928
+ bind(&skip);
1929
+ }
1930
+ }
1931
+
1932
+
1933
+ void MacroAssembler::Assert(Condition cc, const char* msg) {
1934
+ if (emit_debug_code()) Check(cc, msg);
1935
+ }
1936
+
1937
+
1938
+ void MacroAssembler::AssertFastElements(Register elements) {
1939
+ if (emit_debug_code()) {
1940
+ Factory* factory = isolate()->factory();
1941
+ Label ok;
1942
+ cmp(FieldOperand(elements, HeapObject::kMapOffset),
1943
+ Immediate(factory->fixed_array_map()));
1944
+ j(equal, &ok);
1945
+ cmp(FieldOperand(elements, HeapObject::kMapOffset),
1946
+ Immediate(factory->fixed_cow_array_map()));
1947
+ j(equal, &ok);
1948
+ Abort("JSObject with fast elements map has slow elements");
1949
+ bind(&ok);
1950
+ }
1951
+ }
1952
+
1953
+
1954
+ void MacroAssembler::Check(Condition cc, const char* msg) {
1955
+ Label L;
1956
+ j(cc, &L);
1957
+ Abort(msg);
1958
+ // will not return here
1959
+ bind(&L);
1960
+ }
1961
+
1962
+
1963
+ void MacroAssembler::CheckStackAlignment() {
1964
+ int frame_alignment = OS::ActivationFrameAlignment();
1965
+ int frame_alignment_mask = frame_alignment - 1;
1966
+ if (frame_alignment > kPointerSize) {
1967
+ ASSERT(IsPowerOf2(frame_alignment));
1968
+ Label alignment_as_expected;
1969
+ test(esp, Immediate(frame_alignment_mask));
1970
+ j(zero, &alignment_as_expected);
1971
+ // Abort if stack is not aligned.
1972
+ int3();
1973
+ bind(&alignment_as_expected);
1974
+ }
1975
+ }
1976
+
1977
+
1978
+ void MacroAssembler::Abort(const char* msg) {
1979
+ // We want to pass the msg string like a smi to avoid GC
1980
+ // problems, however msg is not guaranteed to be aligned
1981
+ // properly. Instead, we pass an aligned pointer that is
1982
+ // a proper v8 smi, but also pass the alignment difference
1983
+ // from the real pointer as a smi.
1984
+ intptr_t p1 = reinterpret_cast<intptr_t>(msg);
1985
+ intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
1986
+ ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
1987
+ #ifdef DEBUG
1988
+ if (msg != NULL) {
1989
+ RecordComment("Abort message: ");
1990
+ RecordComment(msg);
1991
+ }
1992
+ #endif
1993
+ // Disable stub call restrictions to always allow calls to abort.
1994
+ AllowStubCallsScope allow_scope(this, true);
1995
+
1996
+ push(eax);
1997
+ push(Immediate(p0));
1998
+ push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0))));
1999
+ CallRuntime(Runtime::kAbort, 2);
2000
+ // will not return here
2001
+ int3();
2002
+ }
2003
+
2004
+
2005
+ void MacroAssembler::LoadInstanceDescriptors(Register map,
2006
+ Register descriptors) {
2007
+ mov(descriptors,
2008
+ FieldOperand(map, Map::kInstanceDescriptorsOrBitField3Offset));
2009
+ Label not_smi;
2010
+ JumpIfNotSmi(descriptors, &not_smi);
2011
+ mov(descriptors, isolate()->factory()->empty_descriptor_array());
2012
+ bind(&not_smi);
2013
+ }
2014
+
2015
+
2016
+ void MacroAssembler::LoadPowerOf2(XMMRegister dst,
2017
+ Register scratch,
2018
+ int power) {
2019
+ ASSERT(is_uintn(power + HeapNumber::kExponentBias,
2020
+ HeapNumber::kExponentBits));
2021
+ mov(scratch, Immediate(power + HeapNumber::kExponentBias));
2022
+ movd(dst, Operand(scratch));
2023
+ psllq(dst, HeapNumber::kMantissaBits);
2024
+ }
2025
+
2026
+
2027
+ void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
2028
+ Register instance_type,
2029
+ Register scratch,
2030
+ Label* failure) {
2031
+ if (!scratch.is(instance_type)) {
2032
+ mov(scratch, instance_type);
2033
+ }
2034
+ and_(scratch,
2035
+ kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
2036
+ cmp(scratch, kStringTag | kSeqStringTag | kAsciiStringTag);
2037
+ j(not_equal, failure);
2038
+ }
2039
+
2040
+
2041
+ void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register object1,
2042
+ Register object2,
2043
+ Register scratch1,
2044
+ Register scratch2,
2045
+ Label* failure) {
2046
+ // Check that both objects are not smis.
2047
+ ASSERT_EQ(0, kSmiTag);
2048
+ mov(scratch1, Operand(object1));
2049
+ and_(scratch1, Operand(object2));
2050
+ test(scratch1, Immediate(kSmiTagMask));
2051
+ j(zero, failure);
2052
+
2053
+ // Load instance type for both strings.
2054
+ mov(scratch1, FieldOperand(object1, HeapObject::kMapOffset));
2055
+ mov(scratch2, FieldOperand(object2, HeapObject::kMapOffset));
2056
+ movzx_b(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
2057
+ movzx_b(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
2058
+
2059
+ // Check that both are flat ascii strings.
2060
+ const int kFlatAsciiStringMask =
2061
+ kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
2062
+ const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
2063
+ // Interleave bits from both instance types and compare them in one check.
2064
+ ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
2065
+ and_(scratch1, kFlatAsciiStringMask);
2066
+ and_(scratch2, kFlatAsciiStringMask);
2067
+ lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
2068
+ cmp(scratch1, kFlatAsciiStringTag | (kFlatAsciiStringTag << 3));
2069
+ j(not_equal, failure);
2070
+ }
2071
+
2072
+
2073
+ void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
2074
+ int frame_alignment = OS::ActivationFrameAlignment();
2075
+ if (frame_alignment != 0) {
2076
+ // Make stack end at alignment and make room for num_arguments words
2077
+ // and the original value of esp.
2078
+ mov(scratch, esp);
2079
+ sub(Operand(esp), Immediate((num_arguments + 1) * kPointerSize));
2080
+ ASSERT(IsPowerOf2(frame_alignment));
2081
+ and_(esp, -frame_alignment);
2082
+ mov(Operand(esp, num_arguments * kPointerSize), scratch);
2083
+ } else {
2084
+ sub(Operand(esp), Immediate(num_arguments * kPointerSize));
2085
+ }
2086
+ }
2087
+
2088
+
2089
+ void MacroAssembler::CallCFunction(ExternalReference function,
2090
+ int num_arguments) {
2091
+ // Trashing eax is ok as it will be the return value.
2092
+ mov(Operand(eax), Immediate(function));
2093
+ CallCFunction(eax, num_arguments);
2094
+ }
2095
+
2096
+
2097
+ void MacroAssembler::CallCFunction(Register function,
2098
+ int num_arguments) {
2099
+ // Check stack alignment.
2100
+ if (emit_debug_code()) {
2101
+ CheckStackAlignment();
2102
+ }
2103
+
2104
+ call(Operand(function));
2105
+ if (OS::ActivationFrameAlignment() != 0) {
2106
+ mov(esp, Operand(esp, num_arguments * kPointerSize));
2107
+ } else {
2108
+ add(Operand(esp), Immediate(num_arguments * kPointerSize));
2109
+ }
2110
+ }
2111
+
2112
+
2113
+ CodePatcher::CodePatcher(byte* address, int size)
2114
+ : address_(address),
2115
+ size_(size),
2116
+ masm_(Isolate::Current(), address, size + Assembler::kGap) {
2117
+ // Create a new macro assembler pointing to the address of the code to patch.
2118
+ // The size is adjusted with kGap on order for the assembler to generate size
2119
+ // bytes of instructions without failing with buffer size constraints.
2120
+ ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2121
+ }
2122
+
2123
+
2124
+ CodePatcher::~CodePatcher() {
2125
+ // Indicate that code has changed.
2126
+ CPU::FlushICache(address_, size_);
2127
+
2128
+ // Check that the code was patched as expected.
2129
+ ASSERT(masm_.pc_ == address_ + size_);
2130
+ ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2131
+ }
2132
+
2133
+
2134
+ } } // namespace v8::internal
2135
+
2136
+ #endif // V8_TARGET_ARCH_IA32