libv8 3.3.10.2

Sign up to get free protection for your applications and to get access to all the features.
Files changed (700) hide show
  1. data/.gitignore +8 -0
  2. data/.gitmodules +3 -0
  3. data/Gemfile +4 -0
  4. data/README.md +44 -0
  5. data/Rakefile +73 -0
  6. data/ext/libv8/extconf.rb +9 -0
  7. data/lib/libv8.rb +15 -0
  8. data/lib/libv8/Makefile +38 -0
  9. data/lib/libv8/detect_cpu.rb +27 -0
  10. data/lib/libv8/fpic-on-linux-amd64.patch +13 -0
  11. data/lib/libv8/scons/CHANGES.txt +5334 -0
  12. data/lib/libv8/scons/LICENSE.txt +20 -0
  13. data/lib/libv8/scons/MANIFEST +199 -0
  14. data/lib/libv8/scons/PKG-INFO +13 -0
  15. data/lib/libv8/scons/README.txt +243 -0
  16. data/lib/libv8/scons/RELEASE.txt +98 -0
  17. data/lib/libv8/scons/engine/SCons/Action.py +1241 -0
  18. data/lib/libv8/scons/engine/SCons/Builder.py +877 -0
  19. data/lib/libv8/scons/engine/SCons/CacheDir.py +216 -0
  20. data/lib/libv8/scons/engine/SCons/Conftest.py +793 -0
  21. data/lib/libv8/scons/engine/SCons/Debug.py +220 -0
  22. data/lib/libv8/scons/engine/SCons/Defaults.py +480 -0
  23. data/lib/libv8/scons/engine/SCons/Environment.py +2318 -0
  24. data/lib/libv8/scons/engine/SCons/Errors.py +205 -0
  25. data/lib/libv8/scons/engine/SCons/Executor.py +633 -0
  26. data/lib/libv8/scons/engine/SCons/Job.py +435 -0
  27. data/lib/libv8/scons/engine/SCons/Memoize.py +244 -0
  28. data/lib/libv8/scons/engine/SCons/Node/Alias.py +152 -0
  29. data/lib/libv8/scons/engine/SCons/Node/FS.py +3142 -0
  30. data/lib/libv8/scons/engine/SCons/Node/Python.py +128 -0
  31. data/lib/libv8/scons/engine/SCons/Node/__init__.py +1328 -0
  32. data/lib/libv8/scons/engine/SCons/Options/BoolOption.py +50 -0
  33. data/lib/libv8/scons/engine/SCons/Options/EnumOption.py +50 -0
  34. data/lib/libv8/scons/engine/SCons/Options/ListOption.py +50 -0
  35. data/lib/libv8/scons/engine/SCons/Options/PackageOption.py +50 -0
  36. data/lib/libv8/scons/engine/SCons/Options/PathOption.py +76 -0
  37. data/lib/libv8/scons/engine/SCons/Options/__init__.py +67 -0
  38. data/lib/libv8/scons/engine/SCons/PathList.py +231 -0
  39. data/lib/libv8/scons/engine/SCons/Platform/__init__.py +241 -0
  40. data/lib/libv8/scons/engine/SCons/Platform/aix.py +69 -0
  41. data/lib/libv8/scons/engine/SCons/Platform/cygwin.py +55 -0
  42. data/lib/libv8/scons/engine/SCons/Platform/darwin.py +46 -0
  43. data/lib/libv8/scons/engine/SCons/Platform/hpux.py +46 -0
  44. data/lib/libv8/scons/engine/SCons/Platform/irix.py +44 -0
  45. data/lib/libv8/scons/engine/SCons/Platform/os2.py +58 -0
  46. data/lib/libv8/scons/engine/SCons/Platform/posix.py +263 -0
  47. data/lib/libv8/scons/engine/SCons/Platform/sunos.py +50 -0
  48. data/lib/libv8/scons/engine/SCons/Platform/win32.py +385 -0
  49. data/lib/libv8/scons/engine/SCons/SConf.py +1030 -0
  50. data/lib/libv8/scons/engine/SCons/SConsign.py +383 -0
  51. data/lib/libv8/scons/engine/SCons/Scanner/C.py +132 -0
  52. data/lib/libv8/scons/engine/SCons/Scanner/D.py +73 -0
  53. data/lib/libv8/scons/engine/SCons/Scanner/Dir.py +109 -0
  54. data/lib/libv8/scons/engine/SCons/Scanner/Fortran.py +316 -0
  55. data/lib/libv8/scons/engine/SCons/Scanner/IDL.py +48 -0
  56. data/lib/libv8/scons/engine/SCons/Scanner/LaTeX.py +384 -0
  57. data/lib/libv8/scons/engine/SCons/Scanner/Prog.py +101 -0
  58. data/lib/libv8/scons/engine/SCons/Scanner/RC.py +55 -0
  59. data/lib/libv8/scons/engine/SCons/Scanner/__init__.py +413 -0
  60. data/lib/libv8/scons/engine/SCons/Script/Interactive.py +384 -0
  61. data/lib/libv8/scons/engine/SCons/Script/Main.py +1334 -0
  62. data/lib/libv8/scons/engine/SCons/Script/SConsOptions.py +939 -0
  63. data/lib/libv8/scons/engine/SCons/Script/SConscript.py +640 -0
  64. data/lib/libv8/scons/engine/SCons/Script/__init__.py +412 -0
  65. data/lib/libv8/scons/engine/SCons/Sig.py +63 -0
  66. data/lib/libv8/scons/engine/SCons/Subst.py +904 -0
  67. data/lib/libv8/scons/engine/SCons/Taskmaster.py +1017 -0
  68. data/lib/libv8/scons/engine/SCons/Tool/386asm.py +61 -0
  69. data/lib/libv8/scons/engine/SCons/Tool/BitKeeper.py +67 -0
  70. data/lib/libv8/scons/engine/SCons/Tool/CVS.py +73 -0
  71. data/lib/libv8/scons/engine/SCons/Tool/FortranCommon.py +246 -0
  72. data/lib/libv8/scons/engine/SCons/Tool/JavaCommon.py +323 -0
  73. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/__init__.py +56 -0
  74. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/arch.py +61 -0
  75. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/common.py +240 -0
  76. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/netframework.py +82 -0
  77. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/sdk.py +391 -0
  78. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/vc.py +456 -0
  79. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/vs.py +499 -0
  80. data/lib/libv8/scons/engine/SCons/Tool/Perforce.py +103 -0
  81. data/lib/libv8/scons/engine/SCons/Tool/PharLapCommon.py +137 -0
  82. data/lib/libv8/scons/engine/SCons/Tool/RCS.py +64 -0
  83. data/lib/libv8/scons/engine/SCons/Tool/SCCS.py +64 -0
  84. data/lib/libv8/scons/engine/SCons/Tool/Subversion.py +71 -0
  85. data/lib/libv8/scons/engine/SCons/Tool/__init__.py +681 -0
  86. data/lib/libv8/scons/engine/SCons/Tool/aixc++.py +82 -0
  87. data/lib/libv8/scons/engine/SCons/Tool/aixcc.py +74 -0
  88. data/lib/libv8/scons/engine/SCons/Tool/aixf77.py +80 -0
  89. data/lib/libv8/scons/engine/SCons/Tool/aixlink.py +76 -0
  90. data/lib/libv8/scons/engine/SCons/Tool/applelink.py +71 -0
  91. data/lib/libv8/scons/engine/SCons/Tool/ar.py +63 -0
  92. data/lib/libv8/scons/engine/SCons/Tool/as.py +78 -0
  93. data/lib/libv8/scons/engine/SCons/Tool/bcc32.py +81 -0
  94. data/lib/libv8/scons/engine/SCons/Tool/c++.py +99 -0
  95. data/lib/libv8/scons/engine/SCons/Tool/cc.py +102 -0
  96. data/lib/libv8/scons/engine/SCons/Tool/cvf.py +58 -0
  97. data/lib/libv8/scons/engine/SCons/Tool/default.py +50 -0
  98. data/lib/libv8/scons/engine/SCons/Tool/dmd.py +223 -0
  99. data/lib/libv8/scons/engine/SCons/Tool/dvi.py +64 -0
  100. data/lib/libv8/scons/engine/SCons/Tool/dvipdf.py +124 -0
  101. data/lib/libv8/scons/engine/SCons/Tool/dvips.py +94 -0
  102. data/lib/libv8/scons/engine/SCons/Tool/f77.py +62 -0
  103. data/lib/libv8/scons/engine/SCons/Tool/f90.py +62 -0
  104. data/lib/libv8/scons/engine/SCons/Tool/f95.py +63 -0
  105. data/lib/libv8/scons/engine/SCons/Tool/filesystem.py +98 -0
  106. data/lib/libv8/scons/engine/SCons/Tool/fortran.py +62 -0
  107. data/lib/libv8/scons/engine/SCons/Tool/g++.py +90 -0
  108. data/lib/libv8/scons/engine/SCons/Tool/g77.py +73 -0
  109. data/lib/libv8/scons/engine/SCons/Tool/gas.py +53 -0
  110. data/lib/libv8/scons/engine/SCons/Tool/gcc.py +80 -0
  111. data/lib/libv8/scons/engine/SCons/Tool/gfortran.py +64 -0
  112. data/lib/libv8/scons/engine/SCons/Tool/gnulink.py +63 -0
  113. data/lib/libv8/scons/engine/SCons/Tool/gs.py +81 -0
  114. data/lib/libv8/scons/engine/SCons/Tool/hpc++.py +84 -0
  115. data/lib/libv8/scons/engine/SCons/Tool/hpcc.py +53 -0
  116. data/lib/libv8/scons/engine/SCons/Tool/hplink.py +77 -0
  117. data/lib/libv8/scons/engine/SCons/Tool/icc.py +59 -0
  118. data/lib/libv8/scons/engine/SCons/Tool/icl.py +52 -0
  119. data/lib/libv8/scons/engine/SCons/Tool/ifl.py +72 -0
  120. data/lib/libv8/scons/engine/SCons/Tool/ifort.py +88 -0
  121. data/lib/libv8/scons/engine/SCons/Tool/ilink.py +59 -0
  122. data/lib/libv8/scons/engine/SCons/Tool/ilink32.py +60 -0
  123. data/lib/libv8/scons/engine/SCons/Tool/install.py +229 -0
  124. data/lib/libv8/scons/engine/SCons/Tool/intelc.py +482 -0
  125. data/lib/libv8/scons/engine/SCons/Tool/ipkg.py +67 -0
  126. data/lib/libv8/scons/engine/SCons/Tool/jar.py +110 -0
  127. data/lib/libv8/scons/engine/SCons/Tool/javac.py +230 -0
  128. data/lib/libv8/scons/engine/SCons/Tool/javah.py +137 -0
  129. data/lib/libv8/scons/engine/SCons/Tool/latex.py +79 -0
  130. data/lib/libv8/scons/engine/SCons/Tool/lex.py +97 -0
  131. data/lib/libv8/scons/engine/SCons/Tool/link.py +121 -0
  132. data/lib/libv8/scons/engine/SCons/Tool/linkloc.py +112 -0
  133. data/lib/libv8/scons/engine/SCons/Tool/m4.py +63 -0
  134. data/lib/libv8/scons/engine/SCons/Tool/masm.py +77 -0
  135. data/lib/libv8/scons/engine/SCons/Tool/midl.py +88 -0
  136. data/lib/libv8/scons/engine/SCons/Tool/mingw.py +158 -0
  137. data/lib/libv8/scons/engine/SCons/Tool/mslib.py +64 -0
  138. data/lib/libv8/scons/engine/SCons/Tool/mslink.py +266 -0
  139. data/lib/libv8/scons/engine/SCons/Tool/mssdk.py +50 -0
  140. data/lib/libv8/scons/engine/SCons/Tool/msvc.py +268 -0
  141. data/lib/libv8/scons/engine/SCons/Tool/msvs.py +1388 -0
  142. data/lib/libv8/scons/engine/SCons/Tool/mwcc.py +207 -0
  143. data/lib/libv8/scons/engine/SCons/Tool/mwld.py +107 -0
  144. data/lib/libv8/scons/engine/SCons/Tool/nasm.py +72 -0
  145. data/lib/libv8/scons/engine/SCons/Tool/packaging/__init__.py +312 -0
  146. data/lib/libv8/scons/engine/SCons/Tool/packaging/ipk.py +185 -0
  147. data/lib/libv8/scons/engine/SCons/Tool/packaging/msi.py +527 -0
  148. data/lib/libv8/scons/engine/SCons/Tool/packaging/rpm.py +365 -0
  149. data/lib/libv8/scons/engine/SCons/Tool/packaging/src_tarbz2.py +43 -0
  150. data/lib/libv8/scons/engine/SCons/Tool/packaging/src_targz.py +43 -0
  151. data/lib/libv8/scons/engine/SCons/Tool/packaging/src_zip.py +43 -0
  152. data/lib/libv8/scons/engine/SCons/Tool/packaging/tarbz2.py +44 -0
  153. data/lib/libv8/scons/engine/SCons/Tool/packaging/targz.py +44 -0
  154. data/lib/libv8/scons/engine/SCons/Tool/packaging/zip.py +44 -0
  155. data/lib/libv8/scons/engine/SCons/Tool/pdf.py +78 -0
  156. data/lib/libv8/scons/engine/SCons/Tool/pdflatex.py +83 -0
  157. data/lib/libv8/scons/engine/SCons/Tool/pdftex.py +108 -0
  158. data/lib/libv8/scons/engine/SCons/Tool/qt.py +336 -0
  159. data/lib/libv8/scons/engine/SCons/Tool/rmic.py +120 -0
  160. data/lib/libv8/scons/engine/SCons/Tool/rpcgen.py +70 -0
  161. data/lib/libv8/scons/engine/SCons/Tool/rpm.py +132 -0
  162. data/lib/libv8/scons/engine/SCons/Tool/sgiar.py +68 -0
  163. data/lib/libv8/scons/engine/SCons/Tool/sgic++.py +58 -0
  164. data/lib/libv8/scons/engine/SCons/Tool/sgicc.py +53 -0
  165. data/lib/libv8/scons/engine/SCons/Tool/sgilink.py +63 -0
  166. data/lib/libv8/scons/engine/SCons/Tool/sunar.py +67 -0
  167. data/lib/libv8/scons/engine/SCons/Tool/sunc++.py +142 -0
  168. data/lib/libv8/scons/engine/SCons/Tool/suncc.py +58 -0
  169. data/lib/libv8/scons/engine/SCons/Tool/sunf77.py +63 -0
  170. data/lib/libv8/scons/engine/SCons/Tool/sunf90.py +64 -0
  171. data/lib/libv8/scons/engine/SCons/Tool/sunf95.py +64 -0
  172. data/lib/libv8/scons/engine/SCons/Tool/sunlink.py +77 -0
  173. data/lib/libv8/scons/engine/SCons/Tool/swig.py +182 -0
  174. data/lib/libv8/scons/engine/SCons/Tool/tar.py +73 -0
  175. data/lib/libv8/scons/engine/SCons/Tool/tex.py +813 -0
  176. data/lib/libv8/scons/engine/SCons/Tool/textfile.py +175 -0
  177. data/lib/libv8/scons/engine/SCons/Tool/tlib.py +53 -0
  178. data/lib/libv8/scons/engine/SCons/Tool/wix.py +99 -0
  179. data/lib/libv8/scons/engine/SCons/Tool/yacc.py +130 -0
  180. data/lib/libv8/scons/engine/SCons/Tool/zip.py +99 -0
  181. data/lib/libv8/scons/engine/SCons/Util.py +1492 -0
  182. data/lib/libv8/scons/engine/SCons/Variables/BoolVariable.py +89 -0
  183. data/lib/libv8/scons/engine/SCons/Variables/EnumVariable.py +103 -0
  184. data/lib/libv8/scons/engine/SCons/Variables/ListVariable.py +135 -0
  185. data/lib/libv8/scons/engine/SCons/Variables/PackageVariable.py +106 -0
  186. data/lib/libv8/scons/engine/SCons/Variables/PathVariable.py +147 -0
  187. data/lib/libv8/scons/engine/SCons/Variables/__init__.py +312 -0
  188. data/lib/libv8/scons/engine/SCons/Warnings.py +246 -0
  189. data/lib/libv8/scons/engine/SCons/__init__.py +49 -0
  190. data/lib/libv8/scons/engine/SCons/compat/__init__.py +237 -0
  191. data/lib/libv8/scons/engine/SCons/compat/_scons_builtins.py +150 -0
  192. data/lib/libv8/scons/engine/SCons/compat/_scons_collections.py +45 -0
  193. data/lib/libv8/scons/engine/SCons/compat/_scons_dbm.py +45 -0
  194. data/lib/libv8/scons/engine/SCons/compat/_scons_hashlib.py +76 -0
  195. data/lib/libv8/scons/engine/SCons/compat/_scons_io.py +45 -0
  196. data/lib/libv8/scons/engine/SCons/compat/_scons_sets.py +563 -0
  197. data/lib/libv8/scons/engine/SCons/compat/_scons_subprocess.py +1281 -0
  198. data/lib/libv8/scons/engine/SCons/cpp.py +589 -0
  199. data/lib/libv8/scons/engine/SCons/dblite.py +251 -0
  200. data/lib/libv8/scons/engine/SCons/exitfuncs.py +77 -0
  201. data/lib/libv8/scons/os_spawnv_fix.diff +83 -0
  202. data/lib/libv8/scons/scons-time.1 +1017 -0
  203. data/lib/libv8/scons/scons.1 +15219 -0
  204. data/lib/libv8/scons/sconsign.1 +208 -0
  205. data/lib/libv8/scons/script/scons +196 -0
  206. data/lib/libv8/scons/script/scons-time +1544 -0
  207. data/lib/libv8/scons/script/scons.bat +31 -0
  208. data/lib/libv8/scons/script/sconsign +513 -0
  209. data/lib/libv8/scons/setup.cfg +6 -0
  210. data/lib/libv8/scons/setup.py +425 -0
  211. data/lib/libv8/v8/.gitignore +35 -0
  212. data/lib/libv8/v8/AUTHORS +44 -0
  213. data/lib/libv8/v8/ChangeLog +2839 -0
  214. data/lib/libv8/v8/LICENSE +52 -0
  215. data/lib/libv8/v8/LICENSE.strongtalk +29 -0
  216. data/lib/libv8/v8/LICENSE.v8 +26 -0
  217. data/lib/libv8/v8/LICENSE.valgrind +45 -0
  218. data/lib/libv8/v8/SConstruct +1478 -0
  219. data/lib/libv8/v8/build/README.txt +49 -0
  220. data/lib/libv8/v8/build/all.gyp +18 -0
  221. data/lib/libv8/v8/build/armu.gypi +32 -0
  222. data/lib/libv8/v8/build/common.gypi +144 -0
  223. data/lib/libv8/v8/build/gyp_v8 +145 -0
  224. data/lib/libv8/v8/include/v8-debug.h +395 -0
  225. data/lib/libv8/v8/include/v8-preparser.h +117 -0
  226. data/lib/libv8/v8/include/v8-profiler.h +505 -0
  227. data/lib/libv8/v8/include/v8-testing.h +104 -0
  228. data/lib/libv8/v8/include/v8.h +4124 -0
  229. data/lib/libv8/v8/include/v8stdint.h +53 -0
  230. data/lib/libv8/v8/preparser/SConscript +38 -0
  231. data/lib/libv8/v8/preparser/preparser-process.cc +379 -0
  232. data/lib/libv8/v8/src/SConscript +368 -0
  233. data/lib/libv8/v8/src/accessors.cc +767 -0
  234. data/lib/libv8/v8/src/accessors.h +123 -0
  235. data/lib/libv8/v8/src/allocation-inl.h +49 -0
  236. data/lib/libv8/v8/src/allocation.cc +122 -0
  237. data/lib/libv8/v8/src/allocation.h +143 -0
  238. data/lib/libv8/v8/src/api.cc +5845 -0
  239. data/lib/libv8/v8/src/api.h +574 -0
  240. data/lib/libv8/v8/src/apinatives.js +110 -0
  241. data/lib/libv8/v8/src/apiutils.h +73 -0
  242. data/lib/libv8/v8/src/arguments.h +118 -0
  243. data/lib/libv8/v8/src/arm/assembler-arm-inl.h +353 -0
  244. data/lib/libv8/v8/src/arm/assembler-arm.cc +2661 -0
  245. data/lib/libv8/v8/src/arm/assembler-arm.h +1375 -0
  246. data/lib/libv8/v8/src/arm/builtins-arm.cc +1658 -0
  247. data/lib/libv8/v8/src/arm/code-stubs-arm.cc +6398 -0
  248. data/lib/libv8/v8/src/arm/code-stubs-arm.h +673 -0
  249. data/lib/libv8/v8/src/arm/codegen-arm.cc +52 -0
  250. data/lib/libv8/v8/src/arm/codegen-arm.h +91 -0
  251. data/lib/libv8/v8/src/arm/constants-arm.cc +152 -0
  252. data/lib/libv8/v8/src/arm/constants-arm.h +775 -0
  253. data/lib/libv8/v8/src/arm/cpu-arm.cc +120 -0
  254. data/lib/libv8/v8/src/arm/debug-arm.cc +317 -0
  255. data/lib/libv8/v8/src/arm/deoptimizer-arm.cc +754 -0
  256. data/lib/libv8/v8/src/arm/disasm-arm.cc +1506 -0
  257. data/lib/libv8/v8/src/arm/frames-arm.cc +45 -0
  258. data/lib/libv8/v8/src/arm/frames-arm.h +168 -0
  259. data/lib/libv8/v8/src/arm/full-codegen-arm.cc +4375 -0
  260. data/lib/libv8/v8/src/arm/ic-arm.cc +1562 -0
  261. data/lib/libv8/v8/src/arm/lithium-arm.cc +2206 -0
  262. data/lib/libv8/v8/src/arm/lithium-arm.h +2348 -0
  263. data/lib/libv8/v8/src/arm/lithium-codegen-arm.cc +4526 -0
  264. data/lib/libv8/v8/src/arm/lithium-codegen-arm.h +403 -0
  265. data/lib/libv8/v8/src/arm/lithium-gap-resolver-arm.cc +305 -0
  266. data/lib/libv8/v8/src/arm/lithium-gap-resolver-arm.h +84 -0
  267. data/lib/libv8/v8/src/arm/macro-assembler-arm.cc +3163 -0
  268. data/lib/libv8/v8/src/arm/macro-assembler-arm.h +1126 -0
  269. data/lib/libv8/v8/src/arm/regexp-macro-assembler-arm.cc +1287 -0
  270. data/lib/libv8/v8/src/arm/regexp-macro-assembler-arm.h +253 -0
  271. data/lib/libv8/v8/src/arm/simulator-arm.cc +3424 -0
  272. data/lib/libv8/v8/src/arm/simulator-arm.h +431 -0
  273. data/lib/libv8/v8/src/arm/stub-cache-arm.cc +4243 -0
  274. data/lib/libv8/v8/src/array.js +1366 -0
  275. data/lib/libv8/v8/src/assembler.cc +1207 -0
  276. data/lib/libv8/v8/src/assembler.h +858 -0
  277. data/lib/libv8/v8/src/ast-inl.h +112 -0
  278. data/lib/libv8/v8/src/ast.cc +1146 -0
  279. data/lib/libv8/v8/src/ast.h +2188 -0
  280. data/lib/libv8/v8/src/atomicops.h +167 -0
  281. data/lib/libv8/v8/src/atomicops_internals_arm_gcc.h +145 -0
  282. data/lib/libv8/v8/src/atomicops_internals_mips_gcc.h +169 -0
  283. data/lib/libv8/v8/src/atomicops_internals_x86_gcc.cc +133 -0
  284. data/lib/libv8/v8/src/atomicops_internals_x86_gcc.h +287 -0
  285. data/lib/libv8/v8/src/atomicops_internals_x86_macosx.h +301 -0
  286. data/lib/libv8/v8/src/atomicops_internals_x86_msvc.h +203 -0
  287. data/lib/libv8/v8/src/bignum-dtoa.cc +655 -0
  288. data/lib/libv8/v8/src/bignum-dtoa.h +81 -0
  289. data/lib/libv8/v8/src/bignum.cc +768 -0
  290. data/lib/libv8/v8/src/bignum.h +140 -0
  291. data/lib/libv8/v8/src/bootstrapper.cc +2184 -0
  292. data/lib/libv8/v8/src/bootstrapper.h +188 -0
  293. data/lib/libv8/v8/src/builtins.cc +1707 -0
  294. data/lib/libv8/v8/src/builtins.h +371 -0
  295. data/lib/libv8/v8/src/bytecodes-irregexp.h +105 -0
  296. data/lib/libv8/v8/src/cached-powers.cc +177 -0
  297. data/lib/libv8/v8/src/cached-powers.h +65 -0
  298. data/lib/libv8/v8/src/char-predicates-inl.h +94 -0
  299. data/lib/libv8/v8/src/char-predicates.h +67 -0
  300. data/lib/libv8/v8/src/checks.cc +110 -0
  301. data/lib/libv8/v8/src/checks.h +296 -0
  302. data/lib/libv8/v8/src/circular-queue-inl.h +53 -0
  303. data/lib/libv8/v8/src/circular-queue.cc +122 -0
  304. data/lib/libv8/v8/src/circular-queue.h +103 -0
  305. data/lib/libv8/v8/src/code-stubs.cc +267 -0
  306. data/lib/libv8/v8/src/code-stubs.h +1011 -0
  307. data/lib/libv8/v8/src/code.h +70 -0
  308. data/lib/libv8/v8/src/codegen.cc +231 -0
  309. data/lib/libv8/v8/src/codegen.h +84 -0
  310. data/lib/libv8/v8/src/compilation-cache.cc +540 -0
  311. data/lib/libv8/v8/src/compilation-cache.h +287 -0
  312. data/lib/libv8/v8/src/compiler.cc +786 -0
  313. data/lib/libv8/v8/src/compiler.h +312 -0
  314. data/lib/libv8/v8/src/contexts.cc +347 -0
  315. data/lib/libv8/v8/src/contexts.h +391 -0
  316. data/lib/libv8/v8/src/conversions-inl.h +106 -0
  317. data/lib/libv8/v8/src/conversions.cc +1131 -0
  318. data/lib/libv8/v8/src/conversions.h +135 -0
  319. data/lib/libv8/v8/src/counters.cc +93 -0
  320. data/lib/libv8/v8/src/counters.h +254 -0
  321. data/lib/libv8/v8/src/cpu-profiler-inl.h +101 -0
  322. data/lib/libv8/v8/src/cpu-profiler.cc +609 -0
  323. data/lib/libv8/v8/src/cpu-profiler.h +302 -0
  324. data/lib/libv8/v8/src/cpu.h +69 -0
  325. data/lib/libv8/v8/src/d8-debug.cc +367 -0
  326. data/lib/libv8/v8/src/d8-debug.h +158 -0
  327. data/lib/libv8/v8/src/d8-posix.cc +695 -0
  328. data/lib/libv8/v8/src/d8-readline.cc +130 -0
  329. data/lib/libv8/v8/src/d8-windows.cc +42 -0
  330. data/lib/libv8/v8/src/d8.cc +803 -0
  331. data/lib/libv8/v8/src/d8.gyp +91 -0
  332. data/lib/libv8/v8/src/d8.h +235 -0
  333. data/lib/libv8/v8/src/d8.js +2798 -0
  334. data/lib/libv8/v8/src/data-flow.cc +66 -0
  335. data/lib/libv8/v8/src/data-flow.h +205 -0
  336. data/lib/libv8/v8/src/date.js +1103 -0
  337. data/lib/libv8/v8/src/dateparser-inl.h +127 -0
  338. data/lib/libv8/v8/src/dateparser.cc +178 -0
  339. data/lib/libv8/v8/src/dateparser.h +266 -0
  340. data/lib/libv8/v8/src/debug-agent.cc +447 -0
  341. data/lib/libv8/v8/src/debug-agent.h +129 -0
  342. data/lib/libv8/v8/src/debug-debugger.js +2569 -0
  343. data/lib/libv8/v8/src/debug.cc +3165 -0
  344. data/lib/libv8/v8/src/debug.h +1057 -0
  345. data/lib/libv8/v8/src/deoptimizer.cc +1256 -0
  346. data/lib/libv8/v8/src/deoptimizer.h +602 -0
  347. data/lib/libv8/v8/src/disasm.h +80 -0
  348. data/lib/libv8/v8/src/disassembler.cc +343 -0
  349. data/lib/libv8/v8/src/disassembler.h +58 -0
  350. data/lib/libv8/v8/src/diy-fp.cc +58 -0
  351. data/lib/libv8/v8/src/diy-fp.h +117 -0
  352. data/lib/libv8/v8/src/double.h +238 -0
  353. data/lib/libv8/v8/src/dtoa.cc +103 -0
  354. data/lib/libv8/v8/src/dtoa.h +85 -0
  355. data/lib/libv8/v8/src/execution.cc +849 -0
  356. data/lib/libv8/v8/src/execution.h +297 -0
  357. data/lib/libv8/v8/src/extensions/experimental/break-iterator.cc +250 -0
  358. data/lib/libv8/v8/src/extensions/experimental/break-iterator.h +89 -0
  359. data/lib/libv8/v8/src/extensions/experimental/collator.cc +218 -0
  360. data/lib/libv8/v8/src/extensions/experimental/collator.h +69 -0
  361. data/lib/libv8/v8/src/extensions/experimental/experimental.gyp +94 -0
  362. data/lib/libv8/v8/src/extensions/experimental/i18n-extension.cc +78 -0
  363. data/lib/libv8/v8/src/extensions/experimental/i18n-extension.h +54 -0
  364. data/lib/libv8/v8/src/extensions/experimental/i18n-locale.cc +112 -0
  365. data/lib/libv8/v8/src/extensions/experimental/i18n-locale.h +60 -0
  366. data/lib/libv8/v8/src/extensions/experimental/i18n-utils.cc +43 -0
  367. data/lib/libv8/v8/src/extensions/experimental/i18n-utils.h +49 -0
  368. data/lib/libv8/v8/src/extensions/experimental/i18n.js +180 -0
  369. data/lib/libv8/v8/src/extensions/experimental/language-matcher.cc +251 -0
  370. data/lib/libv8/v8/src/extensions/experimental/language-matcher.h +95 -0
  371. data/lib/libv8/v8/src/extensions/externalize-string-extension.cc +141 -0
  372. data/lib/libv8/v8/src/extensions/externalize-string-extension.h +50 -0
  373. data/lib/libv8/v8/src/extensions/gc-extension.cc +58 -0
  374. data/lib/libv8/v8/src/extensions/gc-extension.h +49 -0
  375. data/lib/libv8/v8/src/factory.cc +1222 -0
  376. data/lib/libv8/v8/src/factory.h +442 -0
  377. data/lib/libv8/v8/src/fast-dtoa.cc +736 -0
  378. data/lib/libv8/v8/src/fast-dtoa.h +83 -0
  379. data/lib/libv8/v8/src/fixed-dtoa.cc +405 -0
  380. data/lib/libv8/v8/src/fixed-dtoa.h +55 -0
  381. data/lib/libv8/v8/src/flag-definitions.h +560 -0
  382. data/lib/libv8/v8/src/flags.cc +551 -0
  383. data/lib/libv8/v8/src/flags.h +79 -0
  384. data/lib/libv8/v8/src/frames-inl.h +247 -0
  385. data/lib/libv8/v8/src/frames.cc +1243 -0
  386. data/lib/libv8/v8/src/frames.h +870 -0
  387. data/lib/libv8/v8/src/full-codegen.cc +1374 -0
  388. data/lib/libv8/v8/src/full-codegen.h +771 -0
  389. data/lib/libv8/v8/src/func-name-inferrer.cc +92 -0
  390. data/lib/libv8/v8/src/func-name-inferrer.h +111 -0
  391. data/lib/libv8/v8/src/gdb-jit.cc +1555 -0
  392. data/lib/libv8/v8/src/gdb-jit.h +143 -0
  393. data/lib/libv8/v8/src/global-handles.cc +665 -0
  394. data/lib/libv8/v8/src/global-handles.h +284 -0
  395. data/lib/libv8/v8/src/globals.h +325 -0
  396. data/lib/libv8/v8/src/handles-inl.h +177 -0
  397. data/lib/libv8/v8/src/handles.cc +987 -0
  398. data/lib/libv8/v8/src/handles.h +382 -0
  399. data/lib/libv8/v8/src/hashmap.cc +230 -0
  400. data/lib/libv8/v8/src/hashmap.h +123 -0
  401. data/lib/libv8/v8/src/heap-inl.h +704 -0
  402. data/lib/libv8/v8/src/heap-profiler.cc +1173 -0
  403. data/lib/libv8/v8/src/heap-profiler.h +397 -0
  404. data/lib/libv8/v8/src/heap.cc +5930 -0
  405. data/lib/libv8/v8/src/heap.h +2268 -0
  406. data/lib/libv8/v8/src/hydrogen-instructions.cc +1769 -0
  407. data/lib/libv8/v8/src/hydrogen-instructions.h +3971 -0
  408. data/lib/libv8/v8/src/hydrogen.cc +6239 -0
  409. data/lib/libv8/v8/src/hydrogen.h +1202 -0
  410. data/lib/libv8/v8/src/ia32/assembler-ia32-inl.h +446 -0
  411. data/lib/libv8/v8/src/ia32/assembler-ia32.cc +2487 -0
  412. data/lib/libv8/v8/src/ia32/assembler-ia32.h +1144 -0
  413. data/lib/libv8/v8/src/ia32/builtins-ia32.cc +1621 -0
  414. data/lib/libv8/v8/src/ia32/code-stubs-ia32.cc +6198 -0
  415. data/lib/libv8/v8/src/ia32/code-stubs-ia32.h +517 -0
  416. data/lib/libv8/v8/src/ia32/codegen-ia32.cc +265 -0
  417. data/lib/libv8/v8/src/ia32/codegen-ia32.h +79 -0
  418. data/lib/libv8/v8/src/ia32/cpu-ia32.cc +88 -0
  419. data/lib/libv8/v8/src/ia32/debug-ia32.cc +312 -0
  420. data/lib/libv8/v8/src/ia32/deoptimizer-ia32.cc +774 -0
  421. data/lib/libv8/v8/src/ia32/disasm-ia32.cc +1628 -0
  422. data/lib/libv8/v8/src/ia32/frames-ia32.cc +45 -0
  423. data/lib/libv8/v8/src/ia32/frames-ia32.h +142 -0
  424. data/lib/libv8/v8/src/ia32/full-codegen-ia32.cc +4338 -0
  425. data/lib/libv8/v8/src/ia32/ic-ia32.cc +1597 -0
  426. data/lib/libv8/v8/src/ia32/lithium-codegen-ia32.cc +4461 -0
  427. data/lib/libv8/v8/src/ia32/lithium-codegen-ia32.h +375 -0
  428. data/lib/libv8/v8/src/ia32/lithium-gap-resolver-ia32.cc +475 -0
  429. data/lib/libv8/v8/src/ia32/lithium-gap-resolver-ia32.h +110 -0
  430. data/lib/libv8/v8/src/ia32/lithium-ia32.cc +2261 -0
  431. data/lib/libv8/v8/src/ia32/lithium-ia32.h +2396 -0
  432. data/lib/libv8/v8/src/ia32/macro-assembler-ia32.cc +2136 -0
  433. data/lib/libv8/v8/src/ia32/macro-assembler-ia32.h +775 -0
  434. data/lib/libv8/v8/src/ia32/regexp-macro-assembler-ia32.cc +1263 -0
  435. data/lib/libv8/v8/src/ia32/regexp-macro-assembler-ia32.h +216 -0
  436. data/lib/libv8/v8/src/ia32/simulator-ia32.cc +30 -0
  437. data/lib/libv8/v8/src/ia32/simulator-ia32.h +74 -0
  438. data/lib/libv8/v8/src/ia32/stub-cache-ia32.cc +3847 -0
  439. data/lib/libv8/v8/src/ic-inl.h +130 -0
  440. data/lib/libv8/v8/src/ic.cc +2577 -0
  441. data/lib/libv8/v8/src/ic.h +736 -0
  442. data/lib/libv8/v8/src/inspector.cc +63 -0
  443. data/lib/libv8/v8/src/inspector.h +62 -0
  444. data/lib/libv8/v8/src/interpreter-irregexp.cc +659 -0
  445. data/lib/libv8/v8/src/interpreter-irregexp.h +49 -0
  446. data/lib/libv8/v8/src/isolate-inl.h +50 -0
  447. data/lib/libv8/v8/src/isolate.cc +1869 -0
  448. data/lib/libv8/v8/src/isolate.h +1382 -0
  449. data/lib/libv8/v8/src/json-parser.cc +504 -0
  450. data/lib/libv8/v8/src/json-parser.h +161 -0
  451. data/lib/libv8/v8/src/json.js +342 -0
  452. data/lib/libv8/v8/src/jsregexp.cc +5385 -0
  453. data/lib/libv8/v8/src/jsregexp.h +1492 -0
  454. data/lib/libv8/v8/src/list-inl.h +212 -0
  455. data/lib/libv8/v8/src/list.h +174 -0
  456. data/lib/libv8/v8/src/lithium-allocator-inl.h +142 -0
  457. data/lib/libv8/v8/src/lithium-allocator.cc +2123 -0
  458. data/lib/libv8/v8/src/lithium-allocator.h +630 -0
  459. data/lib/libv8/v8/src/lithium.cc +190 -0
  460. data/lib/libv8/v8/src/lithium.h +597 -0
  461. data/lib/libv8/v8/src/liveedit-debugger.js +1082 -0
  462. data/lib/libv8/v8/src/liveedit.cc +1691 -0
  463. data/lib/libv8/v8/src/liveedit.h +180 -0
  464. data/lib/libv8/v8/src/liveobjectlist-inl.h +126 -0
  465. data/lib/libv8/v8/src/liveobjectlist.cc +2589 -0
  466. data/lib/libv8/v8/src/liveobjectlist.h +322 -0
  467. data/lib/libv8/v8/src/log-inl.h +59 -0
  468. data/lib/libv8/v8/src/log-utils.cc +428 -0
  469. data/lib/libv8/v8/src/log-utils.h +231 -0
  470. data/lib/libv8/v8/src/log.cc +1993 -0
  471. data/lib/libv8/v8/src/log.h +476 -0
  472. data/lib/libv8/v8/src/macro-assembler.h +120 -0
  473. data/lib/libv8/v8/src/macros.py +178 -0
  474. data/lib/libv8/v8/src/mark-compact.cc +3143 -0
  475. data/lib/libv8/v8/src/mark-compact.h +506 -0
  476. data/lib/libv8/v8/src/math.js +264 -0
  477. data/lib/libv8/v8/src/messages.cc +179 -0
  478. data/lib/libv8/v8/src/messages.h +113 -0
  479. data/lib/libv8/v8/src/messages.js +1096 -0
  480. data/lib/libv8/v8/src/mips/assembler-mips-inl.h +312 -0
  481. data/lib/libv8/v8/src/mips/assembler-mips.cc +1960 -0
  482. data/lib/libv8/v8/src/mips/assembler-mips.h +1138 -0
  483. data/lib/libv8/v8/src/mips/builtins-mips.cc +1628 -0
  484. data/lib/libv8/v8/src/mips/code-stubs-mips.cc +6656 -0
  485. data/lib/libv8/v8/src/mips/code-stubs-mips.h +682 -0
  486. data/lib/libv8/v8/src/mips/codegen-mips.cc +52 -0
  487. data/lib/libv8/v8/src/mips/codegen-mips.h +98 -0
  488. data/lib/libv8/v8/src/mips/constants-mips.cc +352 -0
  489. data/lib/libv8/v8/src/mips/constants-mips.h +739 -0
  490. data/lib/libv8/v8/src/mips/cpu-mips.cc +96 -0
  491. data/lib/libv8/v8/src/mips/debug-mips.cc +308 -0
  492. data/lib/libv8/v8/src/mips/deoptimizer-mips.cc +91 -0
  493. data/lib/libv8/v8/src/mips/disasm-mips.cc +1050 -0
  494. data/lib/libv8/v8/src/mips/frames-mips.cc +47 -0
  495. data/lib/libv8/v8/src/mips/frames-mips.h +219 -0
  496. data/lib/libv8/v8/src/mips/full-codegen-mips.cc +4388 -0
  497. data/lib/libv8/v8/src/mips/ic-mips.cc +1580 -0
  498. data/lib/libv8/v8/src/mips/lithium-codegen-mips.h +65 -0
  499. data/lib/libv8/v8/src/mips/lithium-mips.h +307 -0
  500. data/lib/libv8/v8/src/mips/macro-assembler-mips.cc +4056 -0
  501. data/lib/libv8/v8/src/mips/macro-assembler-mips.h +1214 -0
  502. data/lib/libv8/v8/src/mips/regexp-macro-assembler-mips.cc +1251 -0
  503. data/lib/libv8/v8/src/mips/regexp-macro-assembler-mips.h +252 -0
  504. data/lib/libv8/v8/src/mips/simulator-mips.cc +2621 -0
  505. data/lib/libv8/v8/src/mips/simulator-mips.h +401 -0
  506. data/lib/libv8/v8/src/mips/stub-cache-mips.cc +4285 -0
  507. data/lib/libv8/v8/src/mirror-debugger.js +2382 -0
  508. data/lib/libv8/v8/src/mksnapshot.cc +328 -0
  509. data/lib/libv8/v8/src/natives.h +64 -0
  510. data/lib/libv8/v8/src/objects-debug.cc +738 -0
  511. data/lib/libv8/v8/src/objects-inl.h +4323 -0
  512. data/lib/libv8/v8/src/objects-printer.cc +829 -0
  513. data/lib/libv8/v8/src/objects-visiting.cc +148 -0
  514. data/lib/libv8/v8/src/objects-visiting.h +424 -0
  515. data/lib/libv8/v8/src/objects.cc +10585 -0
  516. data/lib/libv8/v8/src/objects.h +6838 -0
  517. data/lib/libv8/v8/src/parser.cc +4997 -0
  518. data/lib/libv8/v8/src/parser.h +765 -0
  519. data/lib/libv8/v8/src/platform-cygwin.cc +779 -0
  520. data/lib/libv8/v8/src/platform-freebsd.cc +826 -0
  521. data/lib/libv8/v8/src/platform-linux.cc +1149 -0
  522. data/lib/libv8/v8/src/platform-macos.cc +830 -0
  523. data/lib/libv8/v8/src/platform-nullos.cc +479 -0
  524. data/lib/libv8/v8/src/platform-openbsd.cc +640 -0
  525. data/lib/libv8/v8/src/platform-posix.cc +424 -0
  526. data/lib/libv8/v8/src/platform-solaris.cc +762 -0
  527. data/lib/libv8/v8/src/platform-tls-mac.h +62 -0
  528. data/lib/libv8/v8/src/platform-tls-win32.h +62 -0
  529. data/lib/libv8/v8/src/platform-tls.h +50 -0
  530. data/lib/libv8/v8/src/platform-win32.cc +2021 -0
  531. data/lib/libv8/v8/src/platform.h +667 -0
  532. data/lib/libv8/v8/src/preparse-data-format.h +62 -0
  533. data/lib/libv8/v8/src/preparse-data.cc +183 -0
  534. data/lib/libv8/v8/src/preparse-data.h +225 -0
  535. data/lib/libv8/v8/src/preparser-api.cc +220 -0
  536. data/lib/libv8/v8/src/preparser.cc +1450 -0
  537. data/lib/libv8/v8/src/preparser.h +493 -0
  538. data/lib/libv8/v8/src/prettyprinter.cc +1493 -0
  539. data/lib/libv8/v8/src/prettyprinter.h +223 -0
  540. data/lib/libv8/v8/src/profile-generator-inl.h +128 -0
  541. data/lib/libv8/v8/src/profile-generator.cc +3098 -0
  542. data/lib/libv8/v8/src/profile-generator.h +1126 -0
  543. data/lib/libv8/v8/src/property.cc +105 -0
  544. data/lib/libv8/v8/src/property.h +365 -0
  545. data/lib/libv8/v8/src/proxy.js +83 -0
  546. data/lib/libv8/v8/src/regexp-macro-assembler-irregexp-inl.h +78 -0
  547. data/lib/libv8/v8/src/regexp-macro-assembler-irregexp.cc +471 -0
  548. data/lib/libv8/v8/src/regexp-macro-assembler-irregexp.h +142 -0
  549. data/lib/libv8/v8/src/regexp-macro-assembler-tracer.cc +373 -0
  550. data/lib/libv8/v8/src/regexp-macro-assembler-tracer.h +104 -0
  551. data/lib/libv8/v8/src/regexp-macro-assembler.cc +267 -0
  552. data/lib/libv8/v8/src/regexp-macro-assembler.h +243 -0
  553. data/lib/libv8/v8/src/regexp-stack.cc +111 -0
  554. data/lib/libv8/v8/src/regexp-stack.h +147 -0
  555. data/lib/libv8/v8/src/regexp.js +483 -0
  556. data/lib/libv8/v8/src/rewriter.cc +360 -0
  557. data/lib/libv8/v8/src/rewriter.h +50 -0
  558. data/lib/libv8/v8/src/runtime-profiler.cc +489 -0
  559. data/lib/libv8/v8/src/runtime-profiler.h +201 -0
  560. data/lib/libv8/v8/src/runtime.cc +12227 -0
  561. data/lib/libv8/v8/src/runtime.h +652 -0
  562. data/lib/libv8/v8/src/runtime.js +649 -0
  563. data/lib/libv8/v8/src/safepoint-table.cc +256 -0
  564. data/lib/libv8/v8/src/safepoint-table.h +270 -0
  565. data/lib/libv8/v8/src/scanner-base.cc +952 -0
  566. data/lib/libv8/v8/src/scanner-base.h +670 -0
  567. data/lib/libv8/v8/src/scanner.cc +345 -0
  568. data/lib/libv8/v8/src/scanner.h +146 -0
  569. data/lib/libv8/v8/src/scopeinfo.cc +646 -0
  570. data/lib/libv8/v8/src/scopeinfo.h +254 -0
  571. data/lib/libv8/v8/src/scopes.cc +1150 -0
  572. data/lib/libv8/v8/src/scopes.h +507 -0
  573. data/lib/libv8/v8/src/serialize.cc +1574 -0
  574. data/lib/libv8/v8/src/serialize.h +589 -0
  575. data/lib/libv8/v8/src/shell.h +55 -0
  576. data/lib/libv8/v8/src/simulator.h +43 -0
  577. data/lib/libv8/v8/src/small-pointer-list.h +163 -0
  578. data/lib/libv8/v8/src/smart-pointer.h +109 -0
  579. data/lib/libv8/v8/src/snapshot-common.cc +83 -0
  580. data/lib/libv8/v8/src/snapshot-empty.cc +54 -0
  581. data/lib/libv8/v8/src/snapshot.h +91 -0
  582. data/lib/libv8/v8/src/spaces-inl.h +529 -0
  583. data/lib/libv8/v8/src/spaces.cc +3145 -0
  584. data/lib/libv8/v8/src/spaces.h +2369 -0
  585. data/lib/libv8/v8/src/splay-tree-inl.h +310 -0
  586. data/lib/libv8/v8/src/splay-tree.h +205 -0
  587. data/lib/libv8/v8/src/string-search.cc +41 -0
  588. data/lib/libv8/v8/src/string-search.h +568 -0
  589. data/lib/libv8/v8/src/string-stream.cc +592 -0
  590. data/lib/libv8/v8/src/string-stream.h +191 -0
  591. data/lib/libv8/v8/src/string.js +994 -0
  592. data/lib/libv8/v8/src/strtod.cc +440 -0
  593. data/lib/libv8/v8/src/strtod.h +40 -0
  594. data/lib/libv8/v8/src/stub-cache.cc +1965 -0
  595. data/lib/libv8/v8/src/stub-cache.h +924 -0
  596. data/lib/libv8/v8/src/third_party/valgrind/valgrind.h +3925 -0
  597. data/lib/libv8/v8/src/token.cc +63 -0
  598. data/lib/libv8/v8/src/token.h +288 -0
  599. data/lib/libv8/v8/src/type-info.cc +507 -0
  600. data/lib/libv8/v8/src/type-info.h +272 -0
  601. data/lib/libv8/v8/src/unbound-queue-inl.h +95 -0
  602. data/lib/libv8/v8/src/unbound-queue.h +69 -0
  603. data/lib/libv8/v8/src/unicode-inl.h +238 -0
  604. data/lib/libv8/v8/src/unicode.cc +1624 -0
  605. data/lib/libv8/v8/src/unicode.h +280 -0
  606. data/lib/libv8/v8/src/uri.js +408 -0
  607. data/lib/libv8/v8/src/utils-inl.h +48 -0
  608. data/lib/libv8/v8/src/utils.cc +371 -0
  609. data/lib/libv8/v8/src/utils.h +800 -0
  610. data/lib/libv8/v8/src/v8-counters.cc +62 -0
  611. data/lib/libv8/v8/src/v8-counters.h +314 -0
  612. data/lib/libv8/v8/src/v8.cc +213 -0
  613. data/lib/libv8/v8/src/v8.h +131 -0
  614. data/lib/libv8/v8/src/v8checks.h +64 -0
  615. data/lib/libv8/v8/src/v8dll-main.cc +44 -0
  616. data/lib/libv8/v8/src/v8globals.h +512 -0
  617. data/lib/libv8/v8/src/v8memory.h +82 -0
  618. data/lib/libv8/v8/src/v8natives.js +1310 -0
  619. data/lib/libv8/v8/src/v8preparserdll-main.cc +39 -0
  620. data/lib/libv8/v8/src/v8threads.cc +464 -0
  621. data/lib/libv8/v8/src/v8threads.h +165 -0
  622. data/lib/libv8/v8/src/v8utils.h +319 -0
  623. data/lib/libv8/v8/src/variables.cc +114 -0
  624. data/lib/libv8/v8/src/variables.h +167 -0
  625. data/lib/libv8/v8/src/version.cc +116 -0
  626. data/lib/libv8/v8/src/version.h +68 -0
  627. data/lib/libv8/v8/src/vm-state-inl.h +138 -0
  628. data/lib/libv8/v8/src/vm-state.h +71 -0
  629. data/lib/libv8/v8/src/win32-headers.h +96 -0
  630. data/lib/libv8/v8/src/x64/assembler-x64-inl.h +462 -0
  631. data/lib/libv8/v8/src/x64/assembler-x64.cc +3027 -0
  632. data/lib/libv8/v8/src/x64/assembler-x64.h +1633 -0
  633. data/lib/libv8/v8/src/x64/builtins-x64.cc +1520 -0
  634. data/lib/libv8/v8/src/x64/code-stubs-x64.cc +5132 -0
  635. data/lib/libv8/v8/src/x64/code-stubs-x64.h +514 -0
  636. data/lib/libv8/v8/src/x64/codegen-x64.cc +146 -0
  637. data/lib/libv8/v8/src/x64/codegen-x64.h +76 -0
  638. data/lib/libv8/v8/src/x64/cpu-x64.cc +88 -0
  639. data/lib/libv8/v8/src/x64/debug-x64.cc +319 -0
  640. data/lib/libv8/v8/src/x64/deoptimizer-x64.cc +815 -0
  641. data/lib/libv8/v8/src/x64/disasm-x64.cc +1832 -0
  642. data/lib/libv8/v8/src/x64/frames-x64.cc +45 -0
  643. data/lib/libv8/v8/src/x64/frames-x64.h +130 -0
  644. data/lib/libv8/v8/src/x64/full-codegen-x64.cc +4318 -0
  645. data/lib/libv8/v8/src/x64/ic-x64.cc +1608 -0
  646. data/lib/libv8/v8/src/x64/lithium-codegen-x64.cc +4267 -0
  647. data/lib/libv8/v8/src/x64/lithium-codegen-x64.h +367 -0
  648. data/lib/libv8/v8/src/x64/lithium-gap-resolver-x64.cc +320 -0
  649. data/lib/libv8/v8/src/x64/lithium-gap-resolver-x64.h +74 -0
  650. data/lib/libv8/v8/src/x64/lithium-x64.cc +2202 -0
  651. data/lib/libv8/v8/src/x64/lithium-x64.h +2333 -0
  652. data/lib/libv8/v8/src/x64/macro-assembler-x64.cc +3745 -0
  653. data/lib/libv8/v8/src/x64/macro-assembler-x64.h +1290 -0
  654. data/lib/libv8/v8/src/x64/regexp-macro-assembler-x64.cc +1398 -0
  655. data/lib/libv8/v8/src/x64/regexp-macro-assembler-x64.h +282 -0
  656. data/lib/libv8/v8/src/x64/simulator-x64.cc +27 -0
  657. data/lib/libv8/v8/src/x64/simulator-x64.h +72 -0
  658. data/lib/libv8/v8/src/x64/stub-cache-x64.cc +3610 -0
  659. data/lib/libv8/v8/src/zone-inl.h +140 -0
  660. data/lib/libv8/v8/src/zone.cc +196 -0
  661. data/lib/libv8/v8/src/zone.h +240 -0
  662. data/lib/libv8/v8/tools/codemap.js +265 -0
  663. data/lib/libv8/v8/tools/consarray.js +93 -0
  664. data/lib/libv8/v8/tools/csvparser.js +78 -0
  665. data/lib/libv8/v8/tools/disasm.py +92 -0
  666. data/lib/libv8/v8/tools/freebsd-tick-processor +10 -0
  667. data/lib/libv8/v8/tools/gc-nvp-trace-processor.py +342 -0
  668. data/lib/libv8/v8/tools/gcmole/README +62 -0
  669. data/lib/libv8/v8/tools/gcmole/gccause.lua +60 -0
  670. data/lib/libv8/v8/tools/gcmole/gcmole.cc +1261 -0
  671. data/lib/libv8/v8/tools/gcmole/gcmole.lua +378 -0
  672. data/lib/libv8/v8/tools/generate-ten-powers.scm +286 -0
  673. data/lib/libv8/v8/tools/grokdump.py +841 -0
  674. data/lib/libv8/v8/tools/gyp/v8.gyp +995 -0
  675. data/lib/libv8/v8/tools/js2c.py +364 -0
  676. data/lib/libv8/v8/tools/jsmin.py +280 -0
  677. data/lib/libv8/v8/tools/linux-tick-processor +35 -0
  678. data/lib/libv8/v8/tools/ll_prof.py +942 -0
  679. data/lib/libv8/v8/tools/logreader.js +185 -0
  680. data/lib/libv8/v8/tools/mac-nm +18 -0
  681. data/lib/libv8/v8/tools/mac-tick-processor +6 -0
  682. data/lib/libv8/v8/tools/oom_dump/README +31 -0
  683. data/lib/libv8/v8/tools/oom_dump/SConstruct +42 -0
  684. data/lib/libv8/v8/tools/oom_dump/oom_dump.cc +288 -0
  685. data/lib/libv8/v8/tools/presubmit.py +305 -0
  686. data/lib/libv8/v8/tools/process-heap-prof.py +120 -0
  687. data/lib/libv8/v8/tools/profile.js +751 -0
  688. data/lib/libv8/v8/tools/profile_view.js +219 -0
  689. data/lib/libv8/v8/tools/run-valgrind.py +77 -0
  690. data/lib/libv8/v8/tools/splaytree.js +316 -0
  691. data/lib/libv8/v8/tools/stats-viewer.py +468 -0
  692. data/lib/libv8/v8/tools/test.py +1510 -0
  693. data/lib/libv8/v8/tools/tickprocessor-driver.js +59 -0
  694. data/lib/libv8/v8/tools/tickprocessor.js +877 -0
  695. data/lib/libv8/v8/tools/utils.py +96 -0
  696. data/lib/libv8/v8/tools/visual_studio/README.txt +12 -0
  697. data/lib/libv8/v8/tools/windows-tick-processor.bat +30 -0
  698. data/lib/libv8/version.rb +4 -0
  699. data/libv8.gemspec +31 -0
  700. metadata +800 -0
@@ -0,0 +1,120 @@
1
+ // Copyright 2006-2008 the V8 project authors. All rights reserved.
2
+ // Redistribution and use in source and binary forms, with or without
3
+ // modification, are permitted provided that the following conditions are
4
+ // met:
5
+ //
6
+ // * Redistributions of source code must retain the above copyright
7
+ // notice, this list of conditions and the following disclaimer.
8
+ // * Redistributions in binary form must reproduce the above
9
+ // copyright notice, this list of conditions and the following
10
+ // disclaimer in the documentation and/or other materials provided
11
+ // with the distribution.
12
+ // * Neither the name of Google Inc. nor the names of its
13
+ // contributors may be used to endorse or promote products derived
14
+ // from this software without specific prior written permission.
15
+ //
16
+ // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
+ // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
+ // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
+ // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
+ // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
+ // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
+ // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
+ // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
+ // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
+ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
+ // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
+
28
+ #ifndef V8_MACRO_ASSEMBLER_H_
29
+ #define V8_MACRO_ASSEMBLER_H_
30
+
31
+
32
+ // Helper types to make boolean flag easier to read at call-site.
33
+ enum InvokeFlag {
34
+ CALL_FUNCTION,
35
+ JUMP_FUNCTION
36
+ };
37
+
38
+
39
+ enum CodeLocation {
40
+ IN_JAVASCRIPT,
41
+ IN_JS_ENTRY,
42
+ IN_C_ENTRY
43
+ };
44
+
45
+
46
+ enum HandlerType {
47
+ TRY_CATCH_HANDLER,
48
+ TRY_FINALLY_HANDLER,
49
+ JS_ENTRY_HANDLER
50
+ };
51
+
52
+
53
+ // Types of uncatchable exceptions.
54
+ enum UncatchableExceptionType {
55
+ OUT_OF_MEMORY,
56
+ TERMINATION
57
+ };
58
+
59
+
60
+ // Invalid depth in prototype chain.
61
+ const int kInvalidProtoDepth = -1;
62
+
63
+ #if V8_TARGET_ARCH_IA32
64
+ #include "assembler.h"
65
+ #include "ia32/assembler-ia32.h"
66
+ #include "ia32/assembler-ia32-inl.h"
67
+ #include "code.h" // must be after assembler_*.h
68
+ #include "ia32/macro-assembler-ia32.h"
69
+ #elif V8_TARGET_ARCH_X64
70
+ #include "assembler.h"
71
+ #include "x64/assembler-x64.h"
72
+ #include "x64/assembler-x64-inl.h"
73
+ #include "code.h" // must be after assembler_*.h
74
+ #include "x64/macro-assembler-x64.h"
75
+ #elif V8_TARGET_ARCH_ARM
76
+ #include "arm/constants-arm.h"
77
+ #include "assembler.h"
78
+ #include "arm/assembler-arm.h"
79
+ #include "arm/assembler-arm-inl.h"
80
+ #include "code.h" // must be after assembler_*.h
81
+ #include "arm/macro-assembler-arm.h"
82
+ #elif V8_TARGET_ARCH_MIPS
83
+ #include "mips/constants-mips.h"
84
+ #include "assembler.h"
85
+ #include "mips/assembler-mips.h"
86
+ #include "mips/assembler-mips-inl.h"
87
+ #include "code.h" // must be after assembler_*.h
88
+ #include "mips/macro-assembler-mips.h"
89
+ #else
90
+ #error Unsupported target architecture.
91
+ #endif
92
+
93
+ namespace v8 {
94
+ namespace internal {
95
+
96
+ // Support for "structured" code comments.
97
+ #ifdef DEBUG
98
+
99
+ class Comment {
100
+ public:
101
+ Comment(MacroAssembler* masm, const char* msg);
102
+ ~Comment();
103
+
104
+ private:
105
+ MacroAssembler* masm_;
106
+ const char* msg_;
107
+ };
108
+
109
+ #else
110
+
111
+ class Comment {
112
+ public:
113
+ Comment(MacroAssembler*, const char*) {}
114
+ };
115
+
116
+ #endif // DEBUG
117
+
118
+ } } // namespace v8::internal
119
+
120
+ #endif // V8_MACRO_ASSEMBLER_H_
@@ -0,0 +1,178 @@
1
+ # Copyright 2006-2009 the V8 project authors. All rights reserved.
2
+ # Redistribution and use in source and binary forms, with or without
3
+ # modification, are permitted provided that the following conditions are
4
+ # met:
5
+ #
6
+ # * Redistributions of source code must retain the above copyright
7
+ # notice, this list of conditions and the following disclaimer.
8
+ # * Redistributions in binary form must reproduce the above
9
+ # copyright notice, this list of conditions and the following
10
+ # disclaimer in the documentation and/or other materials provided
11
+ # with the distribution.
12
+ # * Neither the name of Google Inc. nor the names of its
13
+ # contributors may be used to endorse or promote products derived
14
+ # from this software without specific prior written permission.
15
+ #
16
+ # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
+ # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
+ # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
+ # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
+ # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
+ # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
+ # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
+ # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
+ # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
+ # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
+ # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
+
28
+ # Dictionary that is passed as defines for js2c.py.
29
+ # Used for defines that must be defined for all native js files.
30
+
31
+ const NONE = 0;
32
+ const READ_ONLY = 1;
33
+ const DONT_ENUM = 2;
34
+ const DONT_DELETE = 4;
35
+
36
+ # Constants used for getter and setter operations.
37
+ const GETTER = 0;
38
+ const SETTER = 1;
39
+
40
+ # These definitions must match the index of the properties in objects.h.
41
+ const kApiTagOffset = 0;
42
+ const kApiPropertyListOffset = 1;
43
+ const kApiSerialNumberOffset = 2;
44
+ const kApiConstructorOffset = 2;
45
+ const kApiPrototypeTemplateOffset = 5;
46
+ const kApiParentTemplateOffset = 6;
47
+
48
+ const NO_HINT = 0;
49
+ const NUMBER_HINT = 1;
50
+ const STRING_HINT = 2;
51
+
52
+ const kFunctionTag = 0;
53
+ const kNewObjectTag = 1;
54
+
55
+ # For date.js.
56
+ const HoursPerDay = 24;
57
+ const MinutesPerHour = 60;
58
+ const SecondsPerMinute = 60;
59
+ const msPerSecond = 1000;
60
+ const msPerMinute = 60000;
61
+ const msPerHour = 3600000;
62
+ const msPerDay = 86400000;
63
+ const msPerMonth = 2592000000;
64
+
65
+ # For apinatives.js
66
+ const kUninitialized = -1;
67
+
68
+ # Note: kDayZeroInJulianDay = ToJulianDay(1970, 0, 1).
69
+ const kInvalidDate = 'Invalid Date';
70
+ const kDayZeroInJulianDay = 2440588;
71
+ const kMonthMask = 0x1e0;
72
+ const kDayMask = 0x01f;
73
+ const kYearShift = 9;
74
+ const kMonthShift = 5;
75
+
76
+ # Limits for parts of the date, so that we support all the dates that
77
+ # ECMA 262 - 15.9.1.1 requires us to, but at the same time be sure that
78
+ # the date (days since 1970) is in SMI range.
79
+ const kMinYear = -1000000;
80
+ const kMaxYear = 1000000;
81
+ const kMinMonth = -10000000;
82
+ const kMaxMonth = 10000000;
83
+ const kMinDate = -100000000;
84
+ const kMaxDate = 100000000;
85
+
86
+ # Native cache ids.
87
+ const STRING_TO_REGEXP_CACHE_ID = 0;
88
+
89
+ # Type query macros.
90
+ #
91
+ # Note: We have special support for typeof(foo) === 'bar' in the compiler.
92
+ # It will *not* generate a runtime typeof call for the most important
93
+ # values of 'bar'.
94
+ macro IS_NULL(arg) = (arg === null);
95
+ macro IS_NULL_OR_UNDEFINED(arg) = (arg == null);
96
+ macro IS_UNDEFINED(arg) = (typeof(arg) === 'undefined');
97
+ macro IS_NUMBER(arg) = (typeof(arg) === 'number');
98
+ macro IS_STRING(arg) = (typeof(arg) === 'string');
99
+ macro IS_BOOLEAN(arg) = (typeof(arg) === 'boolean');
100
+ macro IS_OBJECT(arg) = (%_IsObject(arg));
101
+ macro IS_ARRAY(arg) = (%_IsArray(arg));
102
+ macro IS_FUNCTION(arg) = (%_IsFunction(arg));
103
+ macro IS_REGEXP(arg) = (%_IsRegExp(arg));
104
+ macro IS_DATE(arg) = (%_ClassOf(arg) === 'Date');
105
+ macro IS_NUMBER_WRAPPER(arg) = (%_ClassOf(arg) === 'Number');
106
+ macro IS_STRING_WRAPPER(arg) = (%_ClassOf(arg) === 'String');
107
+ macro IS_BOOLEAN_WRAPPER(arg) = (%_ClassOf(arg) === 'Boolean');
108
+ macro IS_ERROR(arg) = (%_ClassOf(arg) === 'Error');
109
+ macro IS_SCRIPT(arg) = (%_ClassOf(arg) === 'Script');
110
+ macro IS_ARGUMENTS(arg) = (%_ClassOf(arg) === 'Arguments');
111
+ macro IS_GLOBAL(arg) = (%_ClassOf(arg) === 'global');
112
+ macro IS_UNDETECTABLE(arg) = (%_IsUndetectableObject(arg));
113
+ macro FLOOR(arg) = $floor(arg);
114
+
115
+ # Macro for ECMAScript 5 queries of the type:
116
+ # "Type(O) is object."
117
+ # This is the same as being either a function or an object in V8 terminology.
118
+ # In addition, an undetectable object is also included by this.
119
+ macro IS_SPEC_OBJECT(arg) = (%_IsSpecObject(arg));
120
+
121
+ # Inline macros. Use %IS_VAR to make sure arg is evaluated only once.
122
+ macro NUMBER_IS_NAN(arg) = (!%_IsSmi(%IS_VAR(arg)) && !(arg == arg));
123
+ macro NUMBER_IS_FINITE(arg) = (%_IsSmi(%IS_VAR(arg)) || arg - arg == 0);
124
+ macro TO_INTEGER(arg) = (%_IsSmi(%IS_VAR(arg)) ? arg : %NumberToInteger(ToNumber(arg)));
125
+ macro TO_INTEGER_MAP_MINUS_ZERO(arg) = (%_IsSmi(%IS_VAR(arg)) ? arg : %NumberToIntegerMapMinusZero(ToNumber(arg)));
126
+ macro TO_INT32(arg) = (%_IsSmi(%IS_VAR(arg)) ? arg : (arg >> 0));
127
+ macro TO_UINT32(arg) = (arg >>> 0);
128
+ macro TO_STRING_INLINE(arg) = (IS_STRING(%IS_VAR(arg)) ? arg : NonStringToString(arg));
129
+ macro TO_NUMBER_INLINE(arg) = (IS_NUMBER(%IS_VAR(arg)) ? arg : NonNumberToNumber(arg));
130
+ macro TO_OBJECT_INLINE(arg) = (IS_SPEC_OBJECT(%IS_VAR(arg)) ? arg : ToObject(arg));
131
+
132
+ # Macros implemented in Python.
133
+ python macro CHAR_CODE(str) = ord(str[1]);
134
+
135
+ # Constants used on an array to implement the properties of the RegExp object.
136
+ const REGEXP_NUMBER_OF_CAPTURES = 0;
137
+ const REGEXP_FIRST_CAPTURE = 3;
138
+
139
+ # We can't put macros in macros so we use constants here.
140
+ # REGEXP_NUMBER_OF_CAPTURES
141
+ macro NUMBER_OF_CAPTURES(array) = ((array)[0]);
142
+
143
+ # Limit according to ECMA 262 15.9.1.1
144
+ const MAX_TIME_MS = 8640000000000000;
145
+ # Limit which is MAX_TIME_MS + msPerMonth.
146
+ const MAX_TIME_BEFORE_UTC = 8640002592000000;
147
+
148
+ # Gets the value of a Date object. If arg is not a Date object
149
+ # a type error is thrown.
150
+ macro DATE_VALUE(arg) = (%_ClassOf(arg) === 'Date' ? %_ValueOf(arg) : ThrowDateTypeError());
151
+ macro DAY(time) = ($floor(time / 86400000));
152
+ macro NAN_OR_DATE_FROM_TIME(time) = (NUMBER_IS_NAN(time) ? time : DateFromTime(time));
153
+ macro HOUR_FROM_TIME(time) = (Modulo($floor(time / 3600000), 24));
154
+ macro MIN_FROM_TIME(time) = (Modulo($floor(time / 60000), 60));
155
+ macro NAN_OR_MIN_FROM_TIME(time) = (NUMBER_IS_NAN(time) ? time : MIN_FROM_TIME(time));
156
+ macro SEC_FROM_TIME(time) = (Modulo($floor(time / 1000), 60));
157
+ macro NAN_OR_SEC_FROM_TIME(time) = (NUMBER_IS_NAN(time) ? time : SEC_FROM_TIME(time));
158
+ macro MS_FROM_TIME(time) = (Modulo(time, 1000));
159
+ macro NAN_OR_MS_FROM_TIME(time) = (NUMBER_IS_NAN(time) ? time : MS_FROM_TIME(time));
160
+
161
+ # Last input and last subject of regexp matches.
162
+ macro LAST_SUBJECT(array) = ((array)[1]);
163
+ macro LAST_INPUT(array) = ((array)[2]);
164
+
165
+ # REGEXP_FIRST_CAPTURE
166
+ macro CAPTURE(index) = (3 + (index));
167
+ const CAPTURE0 = 3;
168
+ const CAPTURE1 = 4;
169
+
170
+ # PropertyDescriptor return value indices - must match
171
+ # PropertyDescriptorIndices in runtime.cc.
172
+ const IS_ACCESSOR_INDEX = 0;
173
+ const VALUE_INDEX = 1;
174
+ const GETTER_INDEX = 2;
175
+ const SETTER_INDEX = 3;
176
+ const WRITABLE_INDEX = 4;
177
+ const ENUMERABLE_INDEX = 5;
178
+ const CONFIGURABLE_INDEX = 6;
@@ -0,0 +1,3143 @@
1
+ // Copyright 2011 the V8 project authors. All rights reserved.
2
+ // Redistribution and use in source and binary forms, with or without
3
+ // modification, are permitted provided that the following conditions are
4
+ // met:
5
+ //
6
+ // * Redistributions of source code must retain the above copyright
7
+ // notice, this list of conditions and the following disclaimer.
8
+ // * Redistributions in binary form must reproduce the above
9
+ // copyright notice, this list of conditions and the following
10
+ // disclaimer in the documentation and/or other materials provided
11
+ // with the distribution.
12
+ // * Neither the name of Google Inc. nor the names of its
13
+ // contributors may be used to endorse or promote products derived
14
+ // from this software without specific prior written permission.
15
+ //
16
+ // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
+ // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
+ // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
+ // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
+ // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
+ // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
+ // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
+ // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
+ // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
+ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
+ // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
+
28
+ #include "v8.h"
29
+
30
+ #include "compilation-cache.h"
31
+ #include "execution.h"
32
+ #include "heap-profiler.h"
33
+ #include "gdb-jit.h"
34
+ #include "global-handles.h"
35
+ #include "ic-inl.h"
36
+ #include "liveobjectlist-inl.h"
37
+ #include "mark-compact.h"
38
+ #include "objects-visiting.h"
39
+ #include "stub-cache.h"
40
+
41
+ namespace v8 {
42
+ namespace internal {
43
+
44
+ // -------------------------------------------------------------------------
45
+ // MarkCompactCollector
46
+
47
+ MarkCompactCollector::MarkCompactCollector() : // NOLINT
48
+ #ifdef DEBUG
49
+ state_(IDLE),
50
+ #endif
51
+ force_compaction_(false),
52
+ compacting_collection_(false),
53
+ compact_on_next_gc_(false),
54
+ previous_marked_count_(0),
55
+ tracer_(NULL),
56
+ #ifdef DEBUG
57
+ live_young_objects_size_(0),
58
+ live_old_pointer_objects_size_(0),
59
+ live_old_data_objects_size_(0),
60
+ live_code_objects_size_(0),
61
+ live_map_objects_size_(0),
62
+ live_cell_objects_size_(0),
63
+ live_lo_objects_size_(0),
64
+ live_bytes_(0),
65
+ #endif
66
+ heap_(NULL),
67
+ code_flusher_(NULL) { }
68
+
69
+
70
+ void MarkCompactCollector::CollectGarbage() {
71
+ // Make sure that Prepare() has been called. The individual steps below will
72
+ // update the state as they proceed.
73
+ ASSERT(state_ == PREPARE_GC);
74
+
75
+ // Prepare has selected whether to compact the old generation or not.
76
+ // Tell the tracer.
77
+ if (IsCompacting()) tracer_->set_is_compacting();
78
+
79
+ MarkLiveObjects();
80
+
81
+ if (FLAG_collect_maps) ClearNonLiveTransitions();
82
+
83
+ SweepLargeObjectSpace();
84
+
85
+ if (IsCompacting()) {
86
+ GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_COMPACT);
87
+ EncodeForwardingAddresses();
88
+
89
+ heap()->MarkMapPointersAsEncoded(true);
90
+ UpdatePointers();
91
+ heap()->MarkMapPointersAsEncoded(false);
92
+ heap()->isolate()->pc_to_code_cache()->Flush();
93
+
94
+ RelocateObjects();
95
+ } else {
96
+ SweepSpaces();
97
+ heap()->isolate()->pc_to_code_cache()->Flush();
98
+ }
99
+
100
+ Finish();
101
+
102
+ // Save the count of marked objects remaining after the collection and
103
+ // null out the GC tracer.
104
+ previous_marked_count_ = tracer_->marked_count();
105
+ ASSERT(previous_marked_count_ == 0);
106
+ tracer_ = NULL;
107
+ }
108
+
109
+
110
+ void MarkCompactCollector::Prepare(GCTracer* tracer) {
111
+ // Rather than passing the tracer around we stash it in a static member
112
+ // variable.
113
+ tracer_ = tracer;
114
+
115
+ #ifdef DEBUG
116
+ ASSERT(state_ == IDLE);
117
+ state_ = PREPARE_GC;
118
+ #endif
119
+ ASSERT(!FLAG_always_compact || !FLAG_never_compact);
120
+
121
+ compacting_collection_ =
122
+ FLAG_always_compact || force_compaction_ || compact_on_next_gc_;
123
+ compact_on_next_gc_ = false;
124
+
125
+ if (FLAG_never_compact) compacting_collection_ = false;
126
+ if (!heap()->map_space()->MapPointersEncodable())
127
+ compacting_collection_ = false;
128
+ if (FLAG_collect_maps) CreateBackPointers();
129
+ #ifdef ENABLE_GDB_JIT_INTERFACE
130
+ if (FLAG_gdbjit) {
131
+ // If GDBJIT interface is active disable compaction.
132
+ compacting_collection_ = false;
133
+ }
134
+ #endif
135
+
136
+ PagedSpaces spaces;
137
+ for (PagedSpace* space = spaces.next();
138
+ space != NULL; space = spaces.next()) {
139
+ space->PrepareForMarkCompact(compacting_collection_);
140
+ }
141
+
142
+ #ifdef DEBUG
143
+ live_bytes_ = 0;
144
+ live_young_objects_size_ = 0;
145
+ live_old_pointer_objects_size_ = 0;
146
+ live_old_data_objects_size_ = 0;
147
+ live_code_objects_size_ = 0;
148
+ live_map_objects_size_ = 0;
149
+ live_cell_objects_size_ = 0;
150
+ live_lo_objects_size_ = 0;
151
+ #endif
152
+ }
153
+
154
+
155
+ void MarkCompactCollector::Finish() {
156
+ #ifdef DEBUG
157
+ ASSERT(state_ == SWEEP_SPACES || state_ == RELOCATE_OBJECTS);
158
+ state_ = IDLE;
159
+ #endif
160
+ // The stub cache is not traversed during GC; clear the cache to
161
+ // force lazy re-initialization of it. This must be done after the
162
+ // GC, because it relies on the new address of certain old space
163
+ // objects (empty string, illegal builtin).
164
+ heap()->isolate()->stub_cache()->Clear();
165
+
166
+ heap()->external_string_table_.CleanUp();
167
+
168
+ // If we've just compacted old space there's no reason to check the
169
+ // fragmentation limit. Just return.
170
+ if (HasCompacted()) return;
171
+
172
+ // We compact the old generation on the next GC if it has gotten too
173
+ // fragmented (ie, we could recover an expected amount of space by
174
+ // reclaiming the waste and free list blocks).
175
+ static const int kFragmentationLimit = 15; // Percent.
176
+ static const int kFragmentationAllowed = 1 * MB; // Absolute.
177
+ intptr_t old_gen_recoverable = 0;
178
+ intptr_t old_gen_used = 0;
179
+
180
+ OldSpaces spaces;
181
+ for (OldSpace* space = spaces.next(); space != NULL; space = spaces.next()) {
182
+ old_gen_recoverable += space->Waste() + space->AvailableFree();
183
+ old_gen_used += space->Size();
184
+ }
185
+
186
+ int old_gen_fragmentation =
187
+ static_cast<int>((old_gen_recoverable * 100.0) / old_gen_used);
188
+ if (old_gen_fragmentation > kFragmentationLimit &&
189
+ old_gen_recoverable > kFragmentationAllowed) {
190
+ compact_on_next_gc_ = true;
191
+ }
192
+ }
193
+
194
+
195
+ // -------------------------------------------------------------------------
196
+ // Phase 1: tracing and marking live objects.
197
+ // before: all objects are in normal state.
198
+ // after: a live object's map pointer is marked as '00'.
199
+
200
+ // Marking all live objects in the heap as part of mark-sweep or mark-compact
201
+ // collection. Before marking, all objects are in their normal state. After
202
+ // marking, live objects' map pointers are marked indicating that the object
203
+ // has been found reachable.
204
+ //
205
+ // The marking algorithm is a (mostly) depth-first (because of possible stack
206
+ // overflow) traversal of the graph of objects reachable from the roots. It
207
+ // uses an explicit stack of pointers rather than recursion. The young
208
+ // generation's inactive ('from') space is used as a marking stack. The
209
+ // objects in the marking stack are the ones that have been reached and marked
210
+ // but their children have not yet been visited.
211
+ //
212
+ // The marking stack can overflow during traversal. In that case, we set an
213
+ // overflow flag. When the overflow flag is set, we continue marking objects
214
+ // reachable from the objects on the marking stack, but no longer push them on
215
+ // the marking stack. Instead, we mark them as both marked and overflowed.
216
+ // When the stack is in the overflowed state, objects marked as overflowed
217
+ // have been reached and marked but their children have not been visited yet.
218
+ // After emptying the marking stack, we clear the overflow flag and traverse
219
+ // the heap looking for objects marked as overflowed, push them on the stack,
220
+ // and continue with marking. This process repeats until all reachable
221
+ // objects have been marked.
222
+
223
+ class CodeFlusher {
224
+ public:
225
+ explicit CodeFlusher(Isolate* isolate)
226
+ : isolate_(isolate),
227
+ jsfunction_candidates_head_(NULL),
228
+ shared_function_info_candidates_head_(NULL) {}
229
+
230
+ void AddCandidate(SharedFunctionInfo* shared_info) {
231
+ SetNextCandidate(shared_info, shared_function_info_candidates_head_);
232
+ shared_function_info_candidates_head_ = shared_info;
233
+ }
234
+
235
+ void AddCandidate(JSFunction* function) {
236
+ ASSERT(function->unchecked_code() ==
237
+ function->unchecked_shared()->unchecked_code());
238
+
239
+ SetNextCandidate(function, jsfunction_candidates_head_);
240
+ jsfunction_candidates_head_ = function;
241
+ }
242
+
243
+ void ProcessCandidates() {
244
+ ProcessSharedFunctionInfoCandidates();
245
+ ProcessJSFunctionCandidates();
246
+ }
247
+
248
+ private:
249
+ void ProcessJSFunctionCandidates() {
250
+ Code* lazy_compile = isolate_->builtins()->builtin(Builtins::kLazyCompile);
251
+
252
+ JSFunction* candidate = jsfunction_candidates_head_;
253
+ JSFunction* next_candidate;
254
+ while (candidate != NULL) {
255
+ next_candidate = GetNextCandidate(candidate);
256
+
257
+ SharedFunctionInfo* shared = candidate->unchecked_shared();
258
+
259
+ Code* code = shared->unchecked_code();
260
+ if (!code->IsMarked()) {
261
+ shared->set_code(lazy_compile);
262
+ candidate->set_code(lazy_compile);
263
+ } else {
264
+ candidate->set_code(shared->unchecked_code());
265
+ }
266
+
267
+ candidate = next_candidate;
268
+ }
269
+
270
+ jsfunction_candidates_head_ = NULL;
271
+ }
272
+
273
+
274
+ void ProcessSharedFunctionInfoCandidates() {
275
+ Code* lazy_compile = isolate_->builtins()->builtin(Builtins::kLazyCompile);
276
+
277
+ SharedFunctionInfo* candidate = shared_function_info_candidates_head_;
278
+ SharedFunctionInfo* next_candidate;
279
+ while (candidate != NULL) {
280
+ next_candidate = GetNextCandidate(candidate);
281
+ SetNextCandidate(candidate, NULL);
282
+
283
+ Code* code = candidate->unchecked_code();
284
+ if (!code->IsMarked()) {
285
+ candidate->set_code(lazy_compile);
286
+ }
287
+
288
+ candidate = next_candidate;
289
+ }
290
+
291
+ shared_function_info_candidates_head_ = NULL;
292
+ }
293
+
294
+ static JSFunction** GetNextCandidateField(JSFunction* candidate) {
295
+ return reinterpret_cast<JSFunction**>(
296
+ candidate->address() + JSFunction::kCodeEntryOffset);
297
+ }
298
+
299
+ static JSFunction* GetNextCandidate(JSFunction* candidate) {
300
+ return *GetNextCandidateField(candidate);
301
+ }
302
+
303
+ static void SetNextCandidate(JSFunction* candidate,
304
+ JSFunction* next_candidate) {
305
+ *GetNextCandidateField(candidate) = next_candidate;
306
+ }
307
+
308
+ static SharedFunctionInfo** GetNextCandidateField(
309
+ SharedFunctionInfo* candidate) {
310
+ Code* code = candidate->unchecked_code();
311
+ return reinterpret_cast<SharedFunctionInfo**>(
312
+ code->address() + Code::kNextCodeFlushingCandidateOffset);
313
+ }
314
+
315
+ static SharedFunctionInfo* GetNextCandidate(SharedFunctionInfo* candidate) {
316
+ return *GetNextCandidateField(candidate);
317
+ }
318
+
319
+ static void SetNextCandidate(SharedFunctionInfo* candidate,
320
+ SharedFunctionInfo* next_candidate) {
321
+ *GetNextCandidateField(candidate) = next_candidate;
322
+ }
323
+
324
+ Isolate* isolate_;
325
+ JSFunction* jsfunction_candidates_head_;
326
+ SharedFunctionInfo* shared_function_info_candidates_head_;
327
+
328
+ DISALLOW_COPY_AND_ASSIGN(CodeFlusher);
329
+ };
330
+
331
+
332
+ MarkCompactCollector::~MarkCompactCollector() {
333
+ if (code_flusher_ != NULL) {
334
+ delete code_flusher_;
335
+ code_flusher_ = NULL;
336
+ }
337
+ }
338
+
339
+
340
+ static inline HeapObject* ShortCircuitConsString(Object** p) {
341
+ // Optimization: If the heap object pointed to by p is a non-symbol
342
+ // cons string whose right substring is HEAP->empty_string, update
343
+ // it in place to its left substring. Return the updated value.
344
+ //
345
+ // Here we assume that if we change *p, we replace it with a heap object
346
+ // (ie, the left substring of a cons string is always a heap object).
347
+ //
348
+ // The check performed is:
349
+ // object->IsConsString() && !object->IsSymbol() &&
350
+ // (ConsString::cast(object)->second() == HEAP->empty_string())
351
+ // except the maps for the object and its possible substrings might be
352
+ // marked.
353
+ HeapObject* object = HeapObject::cast(*p);
354
+ MapWord map_word = object->map_word();
355
+ map_word.ClearMark();
356
+ InstanceType type = map_word.ToMap()->instance_type();
357
+ if ((type & kShortcutTypeMask) != kShortcutTypeTag) return object;
358
+
359
+ Object* second = reinterpret_cast<ConsString*>(object)->unchecked_second();
360
+ Heap* heap = map_word.ToMap()->heap();
361
+ if (second != heap->raw_unchecked_empty_string()) {
362
+ return object;
363
+ }
364
+
365
+ // Since we don't have the object's start, it is impossible to update the
366
+ // page dirty marks. Therefore, we only replace the string with its left
367
+ // substring when page dirty marks do not change.
368
+ Object* first = reinterpret_cast<ConsString*>(object)->unchecked_first();
369
+ if (!heap->InNewSpace(object) && heap->InNewSpace(first)) return object;
370
+
371
+ *p = first;
372
+ return HeapObject::cast(first);
373
+ }
374
+
375
+
376
+ class StaticMarkingVisitor : public StaticVisitorBase {
377
+ public:
378
+ static inline void IterateBody(Map* map, HeapObject* obj) {
379
+ table_.GetVisitor(map)(map, obj);
380
+ }
381
+
382
+ static void Initialize() {
383
+ table_.Register(kVisitShortcutCandidate,
384
+ &FixedBodyVisitor<StaticMarkingVisitor,
385
+ ConsString::BodyDescriptor,
386
+ void>::Visit);
387
+
388
+ table_.Register(kVisitConsString,
389
+ &FixedBodyVisitor<StaticMarkingVisitor,
390
+ ConsString::BodyDescriptor,
391
+ void>::Visit);
392
+
393
+
394
+ table_.Register(kVisitFixedArray,
395
+ &FlexibleBodyVisitor<StaticMarkingVisitor,
396
+ FixedArray::BodyDescriptor,
397
+ void>::Visit);
398
+
399
+ table_.Register(kVisitGlobalContext,
400
+ &FixedBodyVisitor<StaticMarkingVisitor,
401
+ Context::MarkCompactBodyDescriptor,
402
+ void>::Visit);
403
+
404
+ table_.Register(kVisitByteArray, &DataObjectVisitor::Visit);
405
+ table_.Register(kVisitSeqAsciiString, &DataObjectVisitor::Visit);
406
+ table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit);
407
+
408
+ table_.Register(kVisitOddball,
409
+ &FixedBodyVisitor<StaticMarkingVisitor,
410
+ Oddball::BodyDescriptor,
411
+ void>::Visit);
412
+ table_.Register(kVisitMap,
413
+ &FixedBodyVisitor<StaticMarkingVisitor,
414
+ Map::BodyDescriptor,
415
+ void>::Visit);
416
+
417
+ table_.Register(kVisitCode, &VisitCode);
418
+
419
+ table_.Register(kVisitSharedFunctionInfo,
420
+ &VisitSharedFunctionInfoAndFlushCode);
421
+
422
+ table_.Register(kVisitJSFunction,
423
+ &VisitJSFunctionAndFlushCode);
424
+
425
+ table_.Register(kVisitPropertyCell,
426
+ &FixedBodyVisitor<StaticMarkingVisitor,
427
+ JSGlobalPropertyCell::BodyDescriptor,
428
+ void>::Visit);
429
+
430
+ table_.RegisterSpecializations<DataObjectVisitor,
431
+ kVisitDataObject,
432
+ kVisitDataObjectGeneric>();
433
+
434
+ table_.RegisterSpecializations<JSObjectVisitor,
435
+ kVisitJSObject,
436
+ kVisitJSObjectGeneric>();
437
+
438
+ table_.RegisterSpecializations<StructObjectVisitor,
439
+ kVisitStruct,
440
+ kVisitStructGeneric>();
441
+ }
442
+
443
+ INLINE(static void VisitPointer(Heap* heap, Object** p)) {
444
+ MarkObjectByPointer(heap, p);
445
+ }
446
+
447
+ INLINE(static void VisitPointers(Heap* heap, Object** start, Object** end)) {
448
+ // Mark all objects pointed to in [start, end).
449
+ const int kMinRangeForMarkingRecursion = 64;
450
+ if (end - start >= kMinRangeForMarkingRecursion) {
451
+ if (VisitUnmarkedObjects(heap, start, end)) return;
452
+ // We are close to a stack overflow, so just mark the objects.
453
+ }
454
+ for (Object** p = start; p < end; p++) MarkObjectByPointer(heap, p);
455
+ }
456
+
457
+ static inline void VisitCodeTarget(Heap* heap, RelocInfo* rinfo) {
458
+ ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
459
+ Code* code = Code::GetCodeFromTargetAddress(rinfo->target_address());
460
+ if (FLAG_cleanup_code_caches_at_gc && code->is_inline_cache_stub()) {
461
+ IC::Clear(rinfo->pc());
462
+ // Please note targets for cleared inline cached do not have to be
463
+ // marked since they are contained in HEAP->non_monomorphic_cache().
464
+ } else {
465
+ heap->mark_compact_collector()->MarkObject(code);
466
+ }
467
+ }
468
+
469
+ static void VisitGlobalPropertyCell(Heap* heap, RelocInfo* rinfo) {
470
+ ASSERT(rinfo->rmode() == RelocInfo::GLOBAL_PROPERTY_CELL);
471
+ Object* cell = rinfo->target_cell();
472
+ Object* old_cell = cell;
473
+ VisitPointer(heap, &cell);
474
+ if (cell != old_cell) {
475
+ rinfo->set_target_cell(reinterpret_cast<JSGlobalPropertyCell*>(cell));
476
+ }
477
+ }
478
+
479
+ static inline void VisitDebugTarget(Heap* heap, RelocInfo* rinfo) {
480
+ ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) &&
481
+ rinfo->IsPatchedReturnSequence()) ||
482
+ (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
483
+ rinfo->IsPatchedDebugBreakSlotSequence()));
484
+ HeapObject* code = Code::GetCodeFromTargetAddress(rinfo->call_address());
485
+ heap->mark_compact_collector()->MarkObject(code);
486
+ }
487
+
488
+ // Mark object pointed to by p.
489
+ INLINE(static void MarkObjectByPointer(Heap* heap, Object** p)) {
490
+ if (!(*p)->IsHeapObject()) return;
491
+ HeapObject* object = ShortCircuitConsString(p);
492
+ if (!object->IsMarked()) {
493
+ heap->mark_compact_collector()->MarkUnmarkedObject(object);
494
+ }
495
+ }
496
+
497
+
498
+ // Visit an unmarked object.
499
+ INLINE(static void VisitUnmarkedObject(MarkCompactCollector* collector,
500
+ HeapObject* obj)) {
501
+ #ifdef DEBUG
502
+ ASSERT(Isolate::Current()->heap()->Contains(obj));
503
+ ASSERT(!obj->IsMarked());
504
+ #endif
505
+ Map* map = obj->map();
506
+ collector->SetMark(obj);
507
+ // Mark the map pointer and the body.
508
+ if (!map->IsMarked()) collector->MarkUnmarkedObject(map);
509
+ IterateBody(map, obj);
510
+ }
511
+
512
+ // Visit all unmarked objects pointed to by [start, end).
513
+ // Returns false if the operation fails (lack of stack space).
514
+ static inline bool VisitUnmarkedObjects(Heap* heap,
515
+ Object** start,
516
+ Object** end) {
517
+ // Return false is we are close to the stack limit.
518
+ StackLimitCheck check(heap->isolate());
519
+ if (check.HasOverflowed()) return false;
520
+
521
+ MarkCompactCollector* collector = heap->mark_compact_collector();
522
+ // Visit the unmarked objects.
523
+ for (Object** p = start; p < end; p++) {
524
+ if (!(*p)->IsHeapObject()) continue;
525
+ HeapObject* obj = HeapObject::cast(*p);
526
+ if (obj->IsMarked()) continue;
527
+ VisitUnmarkedObject(collector, obj);
528
+ }
529
+ return true;
530
+ }
531
+
532
+ static inline void VisitExternalReference(Address* p) { }
533
+ static inline void VisitRuntimeEntry(RelocInfo* rinfo) { }
534
+
535
+ private:
536
+ class DataObjectVisitor {
537
+ public:
538
+ template<int size>
539
+ static void VisitSpecialized(Map* map, HeapObject* object) {
540
+ }
541
+
542
+ static void Visit(Map* map, HeapObject* object) {
543
+ }
544
+ };
545
+
546
+ typedef FlexibleBodyVisitor<StaticMarkingVisitor,
547
+ JSObject::BodyDescriptor,
548
+ void> JSObjectVisitor;
549
+
550
+ typedef FlexibleBodyVisitor<StaticMarkingVisitor,
551
+ StructBodyDescriptor,
552
+ void> StructObjectVisitor;
553
+
554
+ static void VisitCode(Map* map, HeapObject* object) {
555
+ reinterpret_cast<Code*>(object)->CodeIterateBody<StaticMarkingVisitor>(
556
+ map->heap());
557
+ }
558
+
559
+ // Code flushing support.
560
+
561
+ // How many collections newly compiled code object will survive before being
562
+ // flushed.
563
+ static const int kCodeAgeThreshold = 5;
564
+
565
+ inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) {
566
+ Object* undefined = heap->raw_unchecked_undefined_value();
567
+ return (info->script() != undefined) &&
568
+ (reinterpret_cast<Script*>(info->script())->source() != undefined);
569
+ }
570
+
571
+
572
+ inline static bool IsCompiled(JSFunction* function) {
573
+ return function->unchecked_code() !=
574
+ function->GetIsolate()->builtins()->builtin(Builtins::kLazyCompile);
575
+ }
576
+
577
+ inline static bool IsCompiled(SharedFunctionInfo* function) {
578
+ return function->unchecked_code() !=
579
+ function->GetIsolate()->builtins()->builtin(Builtins::kLazyCompile);
580
+ }
581
+
582
+ inline static bool IsFlushable(Heap* heap, JSFunction* function) {
583
+ SharedFunctionInfo* shared_info = function->unchecked_shared();
584
+
585
+ // Code is either on stack, in compilation cache or referenced
586
+ // by optimized version of function.
587
+ if (function->unchecked_code()->IsMarked()) {
588
+ shared_info->set_code_age(0);
589
+ return false;
590
+ }
591
+
592
+ // We do not flush code for optimized functions.
593
+ if (function->code() != shared_info->unchecked_code()) {
594
+ return false;
595
+ }
596
+
597
+ return IsFlushable(heap, shared_info);
598
+ }
599
+
600
+ inline static bool IsFlushable(Heap* heap, SharedFunctionInfo* shared_info) {
601
+ // Code is either on stack, in compilation cache or referenced
602
+ // by optimized version of function.
603
+ if (shared_info->unchecked_code()->IsMarked()) {
604
+ shared_info->set_code_age(0);
605
+ return false;
606
+ }
607
+
608
+ // The function must be compiled and have the source code available,
609
+ // to be able to recompile it in case we need the function again.
610
+ if (!(shared_info->is_compiled() && HasSourceCode(heap, shared_info))) {
611
+ return false;
612
+ }
613
+
614
+ // We never flush code for Api functions.
615
+ Object* function_data = shared_info->function_data();
616
+ if (function_data->IsHeapObject() &&
617
+ (SafeMap(function_data)->instance_type() ==
618
+ FUNCTION_TEMPLATE_INFO_TYPE)) {
619
+ return false;
620
+ }
621
+
622
+ // Only flush code for functions.
623
+ if (shared_info->code()->kind() != Code::FUNCTION) return false;
624
+
625
+ // Function must be lazy compilable.
626
+ if (!shared_info->allows_lazy_compilation()) return false;
627
+
628
+ // If this is a full script wrapped in a function we do no flush the code.
629
+ if (shared_info->is_toplevel()) return false;
630
+
631
+ // Age this shared function info.
632
+ if (shared_info->code_age() < kCodeAgeThreshold) {
633
+ shared_info->set_code_age(shared_info->code_age() + 1);
634
+ return false;
635
+ }
636
+
637
+ return true;
638
+ }
639
+
640
+
641
+ static bool FlushCodeForFunction(Heap* heap, JSFunction* function) {
642
+ if (!IsFlushable(heap, function)) return false;
643
+
644
+ // This function's code looks flushable. But we have to postpone the
645
+ // decision until we see all functions that point to the same
646
+ // SharedFunctionInfo because some of them might be optimized.
647
+ // That would make the nonoptimized version of the code nonflushable,
648
+ // because it is required for bailing out from optimized code.
649
+ heap->mark_compact_collector()->code_flusher()->AddCandidate(function);
650
+ return true;
651
+ }
652
+
653
+
654
+ static inline Map* SafeMap(Object* obj) {
655
+ MapWord map_word = HeapObject::cast(obj)->map_word();
656
+ map_word.ClearMark();
657
+ map_word.ClearOverflow();
658
+ return map_word.ToMap();
659
+ }
660
+
661
+
662
+ static inline bool IsJSBuiltinsObject(Object* obj) {
663
+ return obj->IsHeapObject() &&
664
+ (SafeMap(obj)->instance_type() == JS_BUILTINS_OBJECT_TYPE);
665
+ }
666
+
667
+
668
+ static inline bool IsValidNotBuiltinContext(Object* ctx) {
669
+ if (!ctx->IsHeapObject()) return false;
670
+
671
+ Map* map = SafeMap(ctx);
672
+ Heap* heap = map->heap();
673
+ if (!(map == heap->raw_unchecked_context_map() ||
674
+ map == heap->raw_unchecked_catch_context_map() ||
675
+ map == heap->raw_unchecked_global_context_map())) {
676
+ return false;
677
+ }
678
+
679
+ Context* context = reinterpret_cast<Context*>(ctx);
680
+
681
+ if (IsJSBuiltinsObject(context->global())) {
682
+ return false;
683
+ }
684
+
685
+ return true;
686
+ }
687
+
688
+
689
+ static void VisitSharedFunctionInfoGeneric(Map* map, HeapObject* object) {
690
+ SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(object);
691
+
692
+ if (shared->IsInobjectSlackTrackingInProgress()) shared->DetachInitialMap();
693
+
694
+ FixedBodyVisitor<StaticMarkingVisitor,
695
+ SharedFunctionInfo::BodyDescriptor,
696
+ void>::Visit(map, object);
697
+ }
698
+
699
+
700
+ static void VisitSharedFunctionInfoAndFlushCode(Map* map,
701
+ HeapObject* object) {
702
+ MarkCompactCollector* collector = map->heap()->mark_compact_collector();
703
+ if (!collector->is_code_flushing_enabled()) {
704
+ VisitSharedFunctionInfoGeneric(map, object);
705
+ return;
706
+ }
707
+ VisitSharedFunctionInfoAndFlushCodeGeneric(map, object, false);
708
+ }
709
+
710
+
711
+ static void VisitSharedFunctionInfoAndFlushCodeGeneric(
712
+ Map* map, HeapObject* object, bool known_flush_code_candidate) {
713
+ Heap* heap = map->heap();
714
+ SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(object);
715
+
716
+ if (shared->IsInobjectSlackTrackingInProgress()) shared->DetachInitialMap();
717
+
718
+ if (!known_flush_code_candidate) {
719
+ known_flush_code_candidate = IsFlushable(heap, shared);
720
+ if (known_flush_code_candidate) {
721
+ heap->mark_compact_collector()->code_flusher()->AddCandidate(shared);
722
+ }
723
+ }
724
+
725
+ VisitSharedFunctionInfoFields(heap, object, known_flush_code_candidate);
726
+ }
727
+
728
+
729
+ static void VisitCodeEntry(Heap* heap, Address entry_address) {
730
+ Object* code = Code::GetObjectFromEntryAddress(entry_address);
731
+ Object* old_code = code;
732
+ VisitPointer(heap, &code);
733
+ if (code != old_code) {
734
+ Memory::Address_at(entry_address) =
735
+ reinterpret_cast<Code*>(code)->entry();
736
+ }
737
+ }
738
+
739
+
740
+ static void VisitJSFunctionAndFlushCode(Map* map, HeapObject* object) {
741
+ Heap* heap = map->heap();
742
+ MarkCompactCollector* collector = heap->mark_compact_collector();
743
+ if (!collector->is_code_flushing_enabled()) {
744
+ VisitJSFunction(map, object);
745
+ return;
746
+ }
747
+
748
+ JSFunction* jsfunction = reinterpret_cast<JSFunction*>(object);
749
+ // The function must have a valid context and not be a builtin.
750
+ bool flush_code_candidate = false;
751
+ if (IsValidNotBuiltinContext(jsfunction->unchecked_context())) {
752
+ flush_code_candidate = FlushCodeForFunction(heap, jsfunction);
753
+ }
754
+
755
+ if (!flush_code_candidate) {
756
+ collector->MarkObject(jsfunction->unchecked_shared()->unchecked_code());
757
+
758
+ if (jsfunction->unchecked_code()->kind() == Code::OPTIMIZED_FUNCTION) {
759
+ // For optimized functions we should retain both non-optimized version
760
+ // of it's code and non-optimized version of all inlined functions.
761
+ // This is required to support bailing out from inlined code.
762
+ DeoptimizationInputData* data =
763
+ reinterpret_cast<DeoptimizationInputData*>(
764
+ jsfunction->unchecked_code()->unchecked_deoptimization_data());
765
+
766
+ FixedArray* literals = data->UncheckedLiteralArray();
767
+
768
+ for (int i = 0, count = data->InlinedFunctionCount()->value();
769
+ i < count;
770
+ i++) {
771
+ JSFunction* inlined = reinterpret_cast<JSFunction*>(literals->get(i));
772
+ collector->MarkObject(inlined->unchecked_shared()->unchecked_code());
773
+ }
774
+ }
775
+ }
776
+
777
+ VisitJSFunctionFields(map,
778
+ reinterpret_cast<JSFunction*>(object),
779
+ flush_code_candidate);
780
+ }
781
+
782
+
783
+ static void VisitJSFunction(Map* map, HeapObject* object) {
784
+ VisitJSFunctionFields(map,
785
+ reinterpret_cast<JSFunction*>(object),
786
+ false);
787
+ }
788
+
789
+
790
+ #define SLOT_ADDR(obj, offset) \
791
+ reinterpret_cast<Object**>((obj)->address() + offset)
792
+
793
+
794
+ static inline void VisitJSFunctionFields(Map* map,
795
+ JSFunction* object,
796
+ bool flush_code_candidate) {
797
+ Heap* heap = map->heap();
798
+ MarkCompactCollector* collector = heap->mark_compact_collector();
799
+
800
+ VisitPointers(heap,
801
+ SLOT_ADDR(object, JSFunction::kPropertiesOffset),
802
+ SLOT_ADDR(object, JSFunction::kCodeEntryOffset));
803
+
804
+ if (!flush_code_candidate) {
805
+ VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset);
806
+ } else {
807
+ // Don't visit code object.
808
+
809
+ // Visit shared function info to avoid double checking of it's
810
+ // flushability.
811
+ SharedFunctionInfo* shared_info = object->unchecked_shared();
812
+ if (!shared_info->IsMarked()) {
813
+ Map* shared_info_map = shared_info->map();
814
+ collector->SetMark(shared_info);
815
+ collector->MarkObject(shared_info_map);
816
+ VisitSharedFunctionInfoAndFlushCodeGeneric(shared_info_map,
817
+ shared_info,
818
+ true);
819
+ }
820
+ }
821
+
822
+ VisitPointers(heap,
823
+ SLOT_ADDR(object,
824
+ JSFunction::kCodeEntryOffset + kPointerSize),
825
+ SLOT_ADDR(object, JSFunction::kNonWeakFieldsEndOffset));
826
+
827
+ // Don't visit the next function list field as it is a weak reference.
828
+ }
829
+
830
+
831
+ static void VisitSharedFunctionInfoFields(Heap* heap,
832
+ HeapObject* object,
833
+ bool flush_code_candidate) {
834
+ VisitPointer(heap, SLOT_ADDR(object, SharedFunctionInfo::kNameOffset));
835
+
836
+ if (!flush_code_candidate) {
837
+ VisitPointer(heap, SLOT_ADDR(object, SharedFunctionInfo::kCodeOffset));
838
+ }
839
+
840
+ VisitPointers(heap,
841
+ SLOT_ADDR(object, SharedFunctionInfo::kScopeInfoOffset),
842
+ SLOT_ADDR(object, SharedFunctionInfo::kSize));
843
+ }
844
+
845
+ #undef SLOT_ADDR
846
+
847
+ typedef void (*Callback)(Map* map, HeapObject* object);
848
+
849
+ static VisitorDispatchTable<Callback> table_;
850
+ };
851
+
852
+
853
+ VisitorDispatchTable<StaticMarkingVisitor::Callback>
854
+ StaticMarkingVisitor::table_;
855
+
856
+
857
+ class MarkingVisitor : public ObjectVisitor {
858
+ public:
859
+ explicit MarkingVisitor(Heap* heap) : heap_(heap) { }
860
+
861
+ void VisitPointer(Object** p) {
862
+ StaticMarkingVisitor::VisitPointer(heap_, p);
863
+ }
864
+
865
+ void VisitPointers(Object** start, Object** end) {
866
+ StaticMarkingVisitor::VisitPointers(heap_, start, end);
867
+ }
868
+
869
+ void VisitCodeTarget(Heap* heap, RelocInfo* rinfo) {
870
+ StaticMarkingVisitor::VisitCodeTarget(heap, rinfo);
871
+ }
872
+
873
+ void VisitGlobalPropertyCell(Heap* heap, RelocInfo* rinfo) {
874
+ StaticMarkingVisitor::VisitGlobalPropertyCell(heap, rinfo);
875
+ }
876
+
877
+ void VisitDebugTarget(Heap* heap, RelocInfo* rinfo) {
878
+ StaticMarkingVisitor::VisitDebugTarget(heap, rinfo);
879
+ }
880
+
881
+ private:
882
+ Heap* heap_;
883
+ };
884
+
885
+
886
+ class CodeMarkingVisitor : public ThreadVisitor {
887
+ public:
888
+ explicit CodeMarkingVisitor(MarkCompactCollector* collector)
889
+ : collector_(collector) {}
890
+
891
+ void VisitThread(Isolate* isolate, ThreadLocalTop* top) {
892
+ for (StackFrameIterator it(isolate, top); !it.done(); it.Advance()) {
893
+ collector_->MarkObject(it.frame()->unchecked_code());
894
+ }
895
+ }
896
+
897
+ private:
898
+ MarkCompactCollector* collector_;
899
+ };
900
+
901
+
902
+ class SharedFunctionInfoMarkingVisitor : public ObjectVisitor {
903
+ public:
904
+ explicit SharedFunctionInfoMarkingVisitor(MarkCompactCollector* collector)
905
+ : collector_(collector) {}
906
+
907
+ void VisitPointers(Object** start, Object** end) {
908
+ for (Object** p = start; p < end; p++) VisitPointer(p);
909
+ }
910
+
911
+ void VisitPointer(Object** slot) {
912
+ Object* obj = *slot;
913
+ if (obj->IsSharedFunctionInfo()) {
914
+ SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(obj);
915
+ collector_->MarkObject(shared->unchecked_code());
916
+ collector_->MarkObject(shared);
917
+ }
918
+ }
919
+
920
+ private:
921
+ MarkCompactCollector* collector_;
922
+ };
923
+
924
+
925
+ void MarkCompactCollector::PrepareForCodeFlushing() {
926
+ ASSERT(heap() == Isolate::Current()->heap());
927
+
928
+ if (!FLAG_flush_code) {
929
+ EnableCodeFlushing(false);
930
+ return;
931
+ }
932
+
933
+ #ifdef ENABLE_DEBUGGER_SUPPORT
934
+ if (heap()->isolate()->debug()->IsLoaded() ||
935
+ heap()->isolate()->debug()->has_break_points()) {
936
+ EnableCodeFlushing(false);
937
+ return;
938
+ }
939
+ #endif
940
+ EnableCodeFlushing(true);
941
+
942
+ // Ensure that empty descriptor array is marked. Method MarkDescriptorArray
943
+ // relies on it being marked before any other descriptor array.
944
+ MarkObject(heap()->raw_unchecked_empty_descriptor_array());
945
+
946
+ // Make sure we are not referencing the code from the stack.
947
+ ASSERT(this == heap()->mark_compact_collector());
948
+ for (StackFrameIterator it; !it.done(); it.Advance()) {
949
+ MarkObject(it.frame()->unchecked_code());
950
+ }
951
+
952
+ // Iterate the archived stacks in all threads to check if
953
+ // the code is referenced.
954
+ CodeMarkingVisitor code_marking_visitor(this);
955
+ heap()->isolate()->thread_manager()->IterateArchivedThreads(
956
+ &code_marking_visitor);
957
+
958
+ SharedFunctionInfoMarkingVisitor visitor(this);
959
+ heap()->isolate()->compilation_cache()->IterateFunctions(&visitor);
960
+ heap()->isolate()->handle_scope_implementer()->Iterate(&visitor);
961
+
962
+ ProcessMarkingStack();
963
+ }
964
+
965
+
966
+ // Visitor class for marking heap roots.
967
+ class RootMarkingVisitor : public ObjectVisitor {
968
+ public:
969
+ explicit RootMarkingVisitor(Heap* heap)
970
+ : collector_(heap->mark_compact_collector()) { }
971
+
972
+ void VisitPointer(Object** p) {
973
+ MarkObjectByPointer(p);
974
+ }
975
+
976
+ void VisitPointers(Object** start, Object** end) {
977
+ for (Object** p = start; p < end; p++) MarkObjectByPointer(p);
978
+ }
979
+
980
+ private:
981
+ void MarkObjectByPointer(Object** p) {
982
+ if (!(*p)->IsHeapObject()) return;
983
+
984
+ // Replace flat cons strings in place.
985
+ HeapObject* object = ShortCircuitConsString(p);
986
+ if (object->IsMarked()) return;
987
+
988
+ Map* map = object->map();
989
+ // Mark the object.
990
+ collector_->SetMark(object);
991
+
992
+ // Mark the map pointer and body, and push them on the marking stack.
993
+ collector_->MarkObject(map);
994
+ StaticMarkingVisitor::IterateBody(map, object);
995
+
996
+ // Mark all the objects reachable from the map and body. May leave
997
+ // overflowed objects in the heap.
998
+ collector_->EmptyMarkingStack();
999
+ }
1000
+
1001
+ MarkCompactCollector* collector_;
1002
+ };
1003
+
1004
+
1005
+ // Helper class for pruning the symbol table.
1006
+ class SymbolTableCleaner : public ObjectVisitor {
1007
+ public:
1008
+ explicit SymbolTableCleaner(Heap* heap)
1009
+ : heap_(heap), pointers_removed_(0) { }
1010
+
1011
+ virtual void VisitPointers(Object** start, Object** end) {
1012
+ // Visit all HeapObject pointers in [start, end).
1013
+ for (Object** p = start; p < end; p++) {
1014
+ if ((*p)->IsHeapObject() && !HeapObject::cast(*p)->IsMarked()) {
1015
+ // Check if the symbol being pruned is an external symbol. We need to
1016
+ // delete the associated external data as this symbol is going away.
1017
+
1018
+ // Since no objects have yet been moved we can safely access the map of
1019
+ // the object.
1020
+ if ((*p)->IsExternalString()) {
1021
+ heap_->FinalizeExternalString(String::cast(*p));
1022
+ }
1023
+ // Set the entry to null_value (as deleted).
1024
+ *p = heap_->raw_unchecked_null_value();
1025
+ pointers_removed_++;
1026
+ }
1027
+ }
1028
+ }
1029
+
1030
+ int PointersRemoved() {
1031
+ return pointers_removed_;
1032
+ }
1033
+ private:
1034
+ Heap* heap_;
1035
+ int pointers_removed_;
1036
+ };
1037
+
1038
+
1039
+ // Implementation of WeakObjectRetainer for mark compact GCs. All marked objects
1040
+ // are retained.
1041
+ class MarkCompactWeakObjectRetainer : public WeakObjectRetainer {
1042
+ public:
1043
+ virtual Object* RetainAs(Object* object) {
1044
+ MapWord first_word = HeapObject::cast(object)->map_word();
1045
+ if (first_word.IsMarked()) {
1046
+ return object;
1047
+ } else {
1048
+ return NULL;
1049
+ }
1050
+ }
1051
+ };
1052
+
1053
+
1054
+ void MarkCompactCollector::MarkUnmarkedObject(HeapObject* object) {
1055
+ ASSERT(!object->IsMarked());
1056
+ ASSERT(HEAP->Contains(object));
1057
+ if (object->IsMap()) {
1058
+ Map* map = Map::cast(object);
1059
+ if (FLAG_cleanup_code_caches_at_gc) {
1060
+ map->ClearCodeCache(heap());
1061
+ }
1062
+ SetMark(map);
1063
+ if (FLAG_collect_maps &&
1064
+ map->instance_type() >= FIRST_JS_OBJECT_TYPE &&
1065
+ map->instance_type() <= JS_FUNCTION_TYPE) {
1066
+ MarkMapContents(map);
1067
+ } else {
1068
+ marking_stack_.Push(map);
1069
+ }
1070
+ } else {
1071
+ SetMark(object);
1072
+ marking_stack_.Push(object);
1073
+ }
1074
+ }
1075
+
1076
+
1077
+ void MarkCompactCollector::MarkMapContents(Map* map) {
1078
+ // Mark prototype transitions array but don't push it into marking stack.
1079
+ // This will make references from it weak. We will clean dead prototype
1080
+ // transitions in ClearNonLiveTransitions.
1081
+ FixedArray* prototype_transitions = map->unchecked_prototype_transitions();
1082
+ if (!prototype_transitions->IsMarked()) SetMark(prototype_transitions);
1083
+
1084
+ Object* raw_descriptor_array =
1085
+ *HeapObject::RawField(map,
1086
+ Map::kInstanceDescriptorsOrBitField3Offset);
1087
+ if (!raw_descriptor_array->IsSmi()) {
1088
+ MarkDescriptorArray(
1089
+ reinterpret_cast<DescriptorArray*>(raw_descriptor_array));
1090
+ }
1091
+
1092
+ // Mark the Object* fields of the Map.
1093
+ // Since the descriptor array has been marked already, it is fine
1094
+ // that one of these fields contains a pointer to it.
1095
+ Object** start_slot = HeapObject::RawField(map,
1096
+ Map::kPointerFieldsBeginOffset);
1097
+
1098
+ Object** end_slot = HeapObject::RawField(map, Map::kPointerFieldsEndOffset);
1099
+
1100
+ StaticMarkingVisitor::VisitPointers(map->heap(), start_slot, end_slot);
1101
+ }
1102
+
1103
+
1104
+ void MarkCompactCollector::MarkDescriptorArray(
1105
+ DescriptorArray* descriptors) {
1106
+ if (descriptors->IsMarked()) return;
1107
+ // Empty descriptor array is marked as a root before any maps are marked.
1108
+ ASSERT(descriptors != HEAP->raw_unchecked_empty_descriptor_array());
1109
+ SetMark(descriptors);
1110
+
1111
+ FixedArray* contents = reinterpret_cast<FixedArray*>(
1112
+ descriptors->get(DescriptorArray::kContentArrayIndex));
1113
+ ASSERT(contents->IsHeapObject());
1114
+ ASSERT(!contents->IsMarked());
1115
+ ASSERT(contents->IsFixedArray());
1116
+ ASSERT(contents->length() >= 2);
1117
+ SetMark(contents);
1118
+ // Contents contains (value, details) pairs. If the details say that the type
1119
+ // of descriptor is MAP_TRANSITION, CONSTANT_TRANSITION,
1120
+ // EXTERNAL_ARRAY_TRANSITION or NULL_DESCRIPTOR, we don't mark the value as
1121
+ // live. Only for MAP_TRANSITION, EXTERNAL_ARRAY_TRANSITION and
1122
+ // CONSTANT_TRANSITION is the value an Object* (a Map*).
1123
+ for (int i = 0; i < contents->length(); i += 2) {
1124
+ // If the pair (value, details) at index i, i+1 is not
1125
+ // a transition or null descriptor, mark the value.
1126
+ PropertyDetails details(Smi::cast(contents->get(i + 1)));
1127
+ if (details.type() < FIRST_PHANTOM_PROPERTY_TYPE) {
1128
+ HeapObject* object = reinterpret_cast<HeapObject*>(contents->get(i));
1129
+ if (object->IsHeapObject() && !object->IsMarked()) {
1130
+ SetMark(object);
1131
+ marking_stack_.Push(object);
1132
+ }
1133
+ }
1134
+ }
1135
+ // The DescriptorArray descriptors contains a pointer to its contents array,
1136
+ // but the contents array is already marked.
1137
+ marking_stack_.Push(descriptors);
1138
+ }
1139
+
1140
+
1141
+ void MarkCompactCollector::CreateBackPointers() {
1142
+ HeapObjectIterator iterator(heap()->map_space());
1143
+ for (HeapObject* next_object = iterator.next();
1144
+ next_object != NULL; next_object = iterator.next()) {
1145
+ if (next_object->IsMap()) { // Could also be ByteArray on free list.
1146
+ Map* map = Map::cast(next_object);
1147
+ if (map->instance_type() >= FIRST_JS_OBJECT_TYPE &&
1148
+ map->instance_type() <= JS_FUNCTION_TYPE) {
1149
+ map->CreateBackPointers();
1150
+ } else {
1151
+ ASSERT(map->instance_descriptors() == heap()->empty_descriptor_array());
1152
+ }
1153
+ }
1154
+ }
1155
+ }
1156
+
1157
+
1158
+ static int OverflowObjectSize(HeapObject* obj) {
1159
+ // Recover the normal map pointer, it might be marked as live and
1160
+ // overflowed.
1161
+ MapWord map_word = obj->map_word();
1162
+ map_word.ClearMark();
1163
+ map_word.ClearOverflow();
1164
+ return obj->SizeFromMap(map_word.ToMap());
1165
+ }
1166
+
1167
+
1168
+ class OverflowedObjectsScanner : public AllStatic {
1169
+ public:
1170
+ // Fill the marking stack with overflowed objects returned by the given
1171
+ // iterator. Stop when the marking stack is filled or the end of the space
1172
+ // is reached, whichever comes first.
1173
+ template<class T>
1174
+ static inline void ScanOverflowedObjects(MarkCompactCollector* collector,
1175
+ T* it) {
1176
+ // The caller should ensure that the marking stack is initially not full,
1177
+ // so that we don't waste effort pointlessly scanning for objects.
1178
+ ASSERT(!collector->marking_stack_.is_full());
1179
+
1180
+ for (HeapObject* object = it->next(); object != NULL; object = it->next()) {
1181
+ if (object->IsOverflowed()) {
1182
+ object->ClearOverflow();
1183
+ ASSERT(object->IsMarked());
1184
+ ASSERT(HEAP->Contains(object));
1185
+ collector->marking_stack_.Push(object);
1186
+ if (collector->marking_stack_.is_full()) return;
1187
+ }
1188
+ }
1189
+ }
1190
+ };
1191
+
1192
+
1193
+ bool MarkCompactCollector::IsUnmarkedHeapObject(Object** p) {
1194
+ return (*p)->IsHeapObject() && !HeapObject::cast(*p)->IsMarked();
1195
+ }
1196
+
1197
+
1198
+ void MarkCompactCollector::MarkSymbolTable() {
1199
+ SymbolTable* symbol_table = heap()->raw_unchecked_symbol_table();
1200
+ // Mark the symbol table itself.
1201
+ SetMark(symbol_table);
1202
+ // Explicitly mark the prefix.
1203
+ MarkingVisitor marker(heap());
1204
+ symbol_table->IteratePrefix(&marker);
1205
+ ProcessMarkingStack();
1206
+ }
1207
+
1208
+
1209
+ void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) {
1210
+ // Mark the heap roots including global variables, stack variables,
1211
+ // etc., and all objects reachable from them.
1212
+ heap()->IterateStrongRoots(visitor, VISIT_ONLY_STRONG);
1213
+
1214
+ // Handle the symbol table specially.
1215
+ MarkSymbolTable();
1216
+
1217
+ // There may be overflowed objects in the heap. Visit them now.
1218
+ while (marking_stack_.overflowed()) {
1219
+ RefillMarkingStack();
1220
+ EmptyMarkingStack();
1221
+ }
1222
+ }
1223
+
1224
+
1225
+ void MarkCompactCollector::MarkObjectGroups() {
1226
+ List<ObjectGroup*>* object_groups =
1227
+ heap()->isolate()->global_handles()->object_groups();
1228
+
1229
+ int last = 0;
1230
+ for (int i = 0; i < object_groups->length(); i++) {
1231
+ ObjectGroup* entry = object_groups->at(i);
1232
+ ASSERT(entry != NULL);
1233
+
1234
+ Object*** objects = entry->objects_;
1235
+ bool group_marked = false;
1236
+ for (size_t j = 0; j < entry->length_; j++) {
1237
+ Object* object = *objects[j];
1238
+ if (object->IsHeapObject() && HeapObject::cast(object)->IsMarked()) {
1239
+ group_marked = true;
1240
+ break;
1241
+ }
1242
+ }
1243
+
1244
+ if (!group_marked) {
1245
+ (*object_groups)[last++] = entry;
1246
+ continue;
1247
+ }
1248
+
1249
+ // An object in the group is marked, so mark all heap objects in
1250
+ // the group.
1251
+ for (size_t j = 0; j < entry->length_; ++j) {
1252
+ if ((*objects[j])->IsHeapObject()) {
1253
+ MarkObject(HeapObject::cast(*objects[j]));
1254
+ }
1255
+ }
1256
+
1257
+ // Once the entire group has been marked, dispose it because it's
1258
+ // not needed anymore.
1259
+ entry->Dispose();
1260
+ }
1261
+ object_groups->Rewind(last);
1262
+ }
1263
+
1264
+
1265
+ void MarkCompactCollector::MarkImplicitRefGroups() {
1266
+ List<ImplicitRefGroup*>* ref_groups =
1267
+ heap()->isolate()->global_handles()->implicit_ref_groups();
1268
+
1269
+ int last = 0;
1270
+ for (int i = 0; i < ref_groups->length(); i++) {
1271
+ ImplicitRefGroup* entry = ref_groups->at(i);
1272
+ ASSERT(entry != NULL);
1273
+
1274
+ if (!(*entry->parent_)->IsMarked()) {
1275
+ (*ref_groups)[last++] = entry;
1276
+ continue;
1277
+ }
1278
+
1279
+ Object*** children = entry->children_;
1280
+ // A parent object is marked, so mark all child heap objects.
1281
+ for (size_t j = 0; j < entry->length_; ++j) {
1282
+ if ((*children[j])->IsHeapObject()) {
1283
+ MarkObject(HeapObject::cast(*children[j]));
1284
+ }
1285
+ }
1286
+
1287
+ // Once the entire group has been marked, dispose it because it's
1288
+ // not needed anymore.
1289
+ entry->Dispose();
1290
+ }
1291
+ ref_groups->Rewind(last);
1292
+ }
1293
+
1294
+
1295
+ // Mark all objects reachable from the objects on the marking stack.
1296
+ // Before: the marking stack contains zero or more heap object pointers.
1297
+ // After: the marking stack is empty, and all objects reachable from the
1298
+ // marking stack have been marked, or are overflowed in the heap.
1299
+ void MarkCompactCollector::EmptyMarkingStack() {
1300
+ while (!marking_stack_.is_empty()) {
1301
+ HeapObject* object = marking_stack_.Pop();
1302
+ ASSERT(object->IsHeapObject());
1303
+ ASSERT(heap()->Contains(object));
1304
+ ASSERT(object->IsMarked());
1305
+ ASSERT(!object->IsOverflowed());
1306
+
1307
+ // Because the object is marked, we have to recover the original map
1308
+ // pointer and use it to mark the object's body.
1309
+ MapWord map_word = object->map_word();
1310
+ map_word.ClearMark();
1311
+ Map* map = map_word.ToMap();
1312
+ MarkObject(map);
1313
+
1314
+ StaticMarkingVisitor::IterateBody(map, object);
1315
+ }
1316
+ }
1317
+
1318
+
1319
+ // Sweep the heap for overflowed objects, clear their overflow bits, and
1320
+ // push them on the marking stack. Stop early if the marking stack fills
1321
+ // before sweeping completes. If sweeping completes, there are no remaining
1322
+ // overflowed objects in the heap so the overflow flag on the markings stack
1323
+ // is cleared.
1324
+ void MarkCompactCollector::RefillMarkingStack() {
1325
+ ASSERT(marking_stack_.overflowed());
1326
+
1327
+ SemiSpaceIterator new_it(heap()->new_space(), &OverflowObjectSize);
1328
+ OverflowedObjectsScanner::ScanOverflowedObjects(this, &new_it);
1329
+ if (marking_stack_.is_full()) return;
1330
+
1331
+ HeapObjectIterator old_pointer_it(heap()->old_pointer_space(),
1332
+ &OverflowObjectSize);
1333
+ OverflowedObjectsScanner::ScanOverflowedObjects(this, &old_pointer_it);
1334
+ if (marking_stack_.is_full()) return;
1335
+
1336
+ HeapObjectIterator old_data_it(heap()->old_data_space(), &OverflowObjectSize);
1337
+ OverflowedObjectsScanner::ScanOverflowedObjects(this, &old_data_it);
1338
+ if (marking_stack_.is_full()) return;
1339
+
1340
+ HeapObjectIterator code_it(heap()->code_space(), &OverflowObjectSize);
1341
+ OverflowedObjectsScanner::ScanOverflowedObjects(this, &code_it);
1342
+ if (marking_stack_.is_full()) return;
1343
+
1344
+ HeapObjectIterator map_it(heap()->map_space(), &OverflowObjectSize);
1345
+ OverflowedObjectsScanner::ScanOverflowedObjects(this, &map_it);
1346
+ if (marking_stack_.is_full()) return;
1347
+
1348
+ HeapObjectIterator cell_it(heap()->cell_space(), &OverflowObjectSize);
1349
+ OverflowedObjectsScanner::ScanOverflowedObjects(this, &cell_it);
1350
+ if (marking_stack_.is_full()) return;
1351
+
1352
+ LargeObjectIterator lo_it(heap()->lo_space(), &OverflowObjectSize);
1353
+ OverflowedObjectsScanner::ScanOverflowedObjects(this, &lo_it);
1354
+ if (marking_stack_.is_full()) return;
1355
+
1356
+ marking_stack_.clear_overflowed();
1357
+ }
1358
+
1359
+
1360
+ // Mark all objects reachable (transitively) from objects on the marking
1361
+ // stack. Before: the marking stack contains zero or more heap object
1362
+ // pointers. After: the marking stack is empty and there are no overflowed
1363
+ // objects in the heap.
1364
+ void MarkCompactCollector::ProcessMarkingStack() {
1365
+ EmptyMarkingStack();
1366
+ while (marking_stack_.overflowed()) {
1367
+ RefillMarkingStack();
1368
+ EmptyMarkingStack();
1369
+ }
1370
+ }
1371
+
1372
+
1373
+ void MarkCompactCollector::ProcessExternalMarking() {
1374
+ bool work_to_do = true;
1375
+ ASSERT(marking_stack_.is_empty());
1376
+ while (work_to_do) {
1377
+ MarkObjectGroups();
1378
+ MarkImplicitRefGroups();
1379
+ work_to_do = !marking_stack_.is_empty();
1380
+ ProcessMarkingStack();
1381
+ }
1382
+ }
1383
+
1384
+
1385
+ void MarkCompactCollector::MarkLiveObjects() {
1386
+ GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_MARK);
1387
+ // The recursive GC marker detects when it is nearing stack overflow,
1388
+ // and switches to a different marking system. JS interrupts interfere
1389
+ // with the C stack limit check.
1390
+ PostponeInterruptsScope postpone(heap()->isolate());
1391
+
1392
+ #ifdef DEBUG
1393
+ ASSERT(state_ == PREPARE_GC);
1394
+ state_ = MARK_LIVE_OBJECTS;
1395
+ #endif
1396
+ // The to space contains live objects, the from space is used as a marking
1397
+ // stack.
1398
+ marking_stack_.Initialize(heap()->new_space()->FromSpaceLow(),
1399
+ heap()->new_space()->FromSpaceHigh());
1400
+
1401
+ ASSERT(!marking_stack_.overflowed());
1402
+
1403
+ PrepareForCodeFlushing();
1404
+
1405
+ RootMarkingVisitor root_visitor(heap());
1406
+ MarkRoots(&root_visitor);
1407
+
1408
+ // The objects reachable from the roots are marked, yet unreachable
1409
+ // objects are unmarked. Mark objects reachable due to host
1410
+ // application specific logic.
1411
+ ProcessExternalMarking();
1412
+
1413
+ // The objects reachable from the roots or object groups are marked,
1414
+ // yet unreachable objects are unmarked. Mark objects reachable
1415
+ // only from weak global handles.
1416
+ //
1417
+ // First we identify nonlive weak handles and mark them as pending
1418
+ // destruction.
1419
+ heap()->isolate()->global_handles()->IdentifyWeakHandles(
1420
+ &IsUnmarkedHeapObject);
1421
+ // Then we mark the objects and process the transitive closure.
1422
+ heap()->isolate()->global_handles()->IterateWeakRoots(&root_visitor);
1423
+ while (marking_stack_.overflowed()) {
1424
+ RefillMarkingStack();
1425
+ EmptyMarkingStack();
1426
+ }
1427
+
1428
+ // Repeat host application specific marking to mark unmarked objects
1429
+ // reachable from the weak roots.
1430
+ ProcessExternalMarking();
1431
+
1432
+ // Prune the symbol table removing all symbols only pointed to by the
1433
+ // symbol table. Cannot use symbol_table() here because the symbol
1434
+ // table is marked.
1435
+ SymbolTable* symbol_table = heap()->raw_unchecked_symbol_table();
1436
+ SymbolTableCleaner v(heap());
1437
+ symbol_table->IterateElements(&v);
1438
+ symbol_table->ElementsRemoved(v.PointersRemoved());
1439
+ heap()->external_string_table_.Iterate(&v);
1440
+ heap()->external_string_table_.CleanUp();
1441
+
1442
+ // Process the weak references.
1443
+ MarkCompactWeakObjectRetainer mark_compact_object_retainer;
1444
+ heap()->ProcessWeakReferences(&mark_compact_object_retainer);
1445
+
1446
+ // Remove object groups after marking phase.
1447
+ heap()->isolate()->global_handles()->RemoveObjectGroups();
1448
+ heap()->isolate()->global_handles()->RemoveImplicitRefGroups();
1449
+
1450
+ // Flush code from collected candidates.
1451
+ if (is_code_flushing_enabled()) {
1452
+ code_flusher_->ProcessCandidates();
1453
+ }
1454
+
1455
+ // Clean up dead objects from the runtime profiler.
1456
+ heap()->isolate()->runtime_profiler()->RemoveDeadSamples();
1457
+ }
1458
+
1459
+
1460
+ #ifdef DEBUG
1461
+ void MarkCompactCollector::UpdateLiveObjectCount(HeapObject* obj) {
1462
+ live_bytes_ += obj->Size();
1463
+ if (heap()->new_space()->Contains(obj)) {
1464
+ live_young_objects_size_ += obj->Size();
1465
+ } else if (heap()->map_space()->Contains(obj)) {
1466
+ ASSERT(obj->IsMap());
1467
+ live_map_objects_size_ += obj->Size();
1468
+ } else if (heap()->cell_space()->Contains(obj)) {
1469
+ ASSERT(obj->IsJSGlobalPropertyCell());
1470
+ live_cell_objects_size_ += obj->Size();
1471
+ } else if (heap()->old_pointer_space()->Contains(obj)) {
1472
+ live_old_pointer_objects_size_ += obj->Size();
1473
+ } else if (heap()->old_data_space()->Contains(obj)) {
1474
+ live_old_data_objects_size_ += obj->Size();
1475
+ } else if (heap()->code_space()->Contains(obj)) {
1476
+ live_code_objects_size_ += obj->Size();
1477
+ } else if (heap()->lo_space()->Contains(obj)) {
1478
+ live_lo_objects_size_ += obj->Size();
1479
+ } else {
1480
+ UNREACHABLE();
1481
+ }
1482
+ }
1483
+ #endif // DEBUG
1484
+
1485
+
1486
+ void MarkCompactCollector::SweepLargeObjectSpace() {
1487
+ #ifdef DEBUG
1488
+ ASSERT(state_ == MARK_LIVE_OBJECTS);
1489
+ state_ =
1490
+ compacting_collection_ ? ENCODE_FORWARDING_ADDRESSES : SWEEP_SPACES;
1491
+ #endif
1492
+ // Deallocate unmarked objects and clear marked bits for marked objects.
1493
+ heap()->lo_space()->FreeUnmarkedObjects();
1494
+ }
1495
+
1496
+
1497
+ // Safe to use during marking phase only.
1498
+ bool MarkCompactCollector::SafeIsMap(HeapObject* object) {
1499
+ MapWord metamap = object->map_word();
1500
+ metamap.ClearMark();
1501
+ return metamap.ToMap()->instance_type() == MAP_TYPE;
1502
+ }
1503
+
1504
+
1505
+ void MarkCompactCollector::ClearNonLiveTransitions() {
1506
+ HeapObjectIterator map_iterator(heap()->map_space(), &SizeOfMarkedObject);
1507
+ // Iterate over the map space, setting map transitions that go from
1508
+ // a marked map to an unmarked map to null transitions. At the same time,
1509
+ // set all the prototype fields of maps back to their original value,
1510
+ // dropping the back pointers temporarily stored in the prototype field.
1511
+ // Setting the prototype field requires following the linked list of
1512
+ // back pointers, reversing them all at once. This allows us to find
1513
+ // those maps with map transitions that need to be nulled, and only
1514
+ // scan the descriptor arrays of those maps, not all maps.
1515
+ // All of these actions are carried out only on maps of JSObjects
1516
+ // and related subtypes.
1517
+ for (HeapObject* obj = map_iterator.next();
1518
+ obj != NULL; obj = map_iterator.next()) {
1519
+ Map* map = reinterpret_cast<Map*>(obj);
1520
+ if (!map->IsMarked() && map->IsByteArray()) continue;
1521
+
1522
+ ASSERT(SafeIsMap(map));
1523
+ // Only JSObject and subtypes have map transitions and back pointers.
1524
+ if (map->instance_type() < FIRST_JS_OBJECT_TYPE) continue;
1525
+ if (map->instance_type() > JS_FUNCTION_TYPE) continue;
1526
+
1527
+ if (map->IsMarked() && map->attached_to_shared_function_info()) {
1528
+ // This map is used for inobject slack tracking and has been detached
1529
+ // from SharedFunctionInfo during the mark phase.
1530
+ // Since it survived the GC, reattach it now.
1531
+ map->unchecked_constructor()->unchecked_shared()->AttachInitialMap(map);
1532
+ }
1533
+
1534
+ // Clear dead prototype transitions.
1535
+ FixedArray* prototype_transitions = map->unchecked_prototype_transitions();
1536
+ if (prototype_transitions->length() > 0) {
1537
+ int finger = Smi::cast(prototype_transitions->get(0))->value();
1538
+ int new_finger = 1;
1539
+ for (int i = 1; i < finger; i += 2) {
1540
+ Object* prototype = prototype_transitions->get(i);
1541
+ Object* cached_map = prototype_transitions->get(i + 1);
1542
+ if (HeapObject::cast(prototype)->IsMarked() &&
1543
+ HeapObject::cast(cached_map)->IsMarked()) {
1544
+ if (new_finger != i) {
1545
+ prototype_transitions->set_unchecked(heap_,
1546
+ new_finger,
1547
+ prototype,
1548
+ UPDATE_WRITE_BARRIER);
1549
+ prototype_transitions->set_unchecked(heap_,
1550
+ new_finger + 1,
1551
+ cached_map,
1552
+ SKIP_WRITE_BARRIER);
1553
+ }
1554
+ new_finger += 2;
1555
+ }
1556
+ }
1557
+
1558
+ // Fill slots that became free with undefined value.
1559
+ Object* undefined = heap()->raw_unchecked_undefined_value();
1560
+ for (int i = new_finger; i < finger; i++) {
1561
+ prototype_transitions->set_unchecked(heap_,
1562
+ i,
1563
+ undefined,
1564
+ SKIP_WRITE_BARRIER);
1565
+ }
1566
+ prototype_transitions->set_unchecked(0, Smi::FromInt(new_finger));
1567
+ }
1568
+
1569
+ // Follow the chain of back pointers to find the prototype.
1570
+ Map* current = map;
1571
+ while (SafeIsMap(current)) {
1572
+ current = reinterpret_cast<Map*>(current->prototype());
1573
+ ASSERT(current->IsHeapObject());
1574
+ }
1575
+ Object* real_prototype = current;
1576
+
1577
+ // Follow back pointers, setting them to prototype,
1578
+ // clearing map transitions when necessary.
1579
+ current = map;
1580
+ bool on_dead_path = !current->IsMarked();
1581
+ Object* next;
1582
+ while (SafeIsMap(current)) {
1583
+ next = current->prototype();
1584
+ // There should never be a dead map above a live map.
1585
+ ASSERT(on_dead_path || current->IsMarked());
1586
+
1587
+ // A live map above a dead map indicates a dead transition.
1588
+ // This test will always be false on the first iteration.
1589
+ if (on_dead_path && current->IsMarked()) {
1590
+ on_dead_path = false;
1591
+ current->ClearNonLiveTransitions(heap(), real_prototype);
1592
+ }
1593
+ *HeapObject::RawField(current, Map::kPrototypeOffset) =
1594
+ real_prototype;
1595
+ current = reinterpret_cast<Map*>(next);
1596
+ }
1597
+ }
1598
+ }
1599
+
1600
+ // -------------------------------------------------------------------------
1601
+ // Phase 2: Encode forwarding addresses.
1602
+ // When compacting, forwarding addresses for objects in old space and map
1603
+ // space are encoded in their map pointer word (along with an encoding of
1604
+ // their map pointers).
1605
+ //
1606
+ // The excact encoding is described in the comments for class MapWord in
1607
+ // objects.h.
1608
+ //
1609
+ // An address range [start, end) can have both live and non-live objects.
1610
+ // Maximal non-live regions are marked so they can be skipped on subsequent
1611
+ // sweeps of the heap. A distinguished map-pointer encoding is used to mark
1612
+ // free regions of one-word size (in which case the next word is the start
1613
+ // of a live object). A second distinguished map-pointer encoding is used
1614
+ // to mark free regions larger than one word, and the size of the free
1615
+ // region (including the first word) is written to the second word of the
1616
+ // region.
1617
+ //
1618
+ // Any valid map page offset must lie in the object area of the page, so map
1619
+ // page offsets less than Page::kObjectStartOffset are invalid. We use a
1620
+ // pair of distinguished invalid map encodings (for single word and multiple
1621
+ // words) to indicate free regions in the page found during computation of
1622
+ // forwarding addresses and skipped over in subsequent sweeps.
1623
+
1624
+
1625
+ // Encode a free region, defined by the given start address and size, in the
1626
+ // first word or two of the region.
1627
+ void EncodeFreeRegion(Address free_start, int free_size) {
1628
+ ASSERT(free_size >= kIntSize);
1629
+ if (free_size == kIntSize) {
1630
+ Memory::uint32_at(free_start) = MarkCompactCollector::kSingleFreeEncoding;
1631
+ } else {
1632
+ ASSERT(free_size >= 2 * kIntSize);
1633
+ Memory::uint32_at(free_start) = MarkCompactCollector::kMultiFreeEncoding;
1634
+ Memory::int_at(free_start + kIntSize) = free_size;
1635
+ }
1636
+
1637
+ #ifdef DEBUG
1638
+ // Zap the body of the free region.
1639
+ if (FLAG_enable_slow_asserts) {
1640
+ for (int offset = 2 * kIntSize;
1641
+ offset < free_size;
1642
+ offset += kPointerSize) {
1643
+ Memory::Address_at(free_start + offset) = kZapValue;
1644
+ }
1645
+ }
1646
+ #endif
1647
+ }
1648
+
1649
+
1650
+ // Try to promote all objects in new space. Heap numbers and sequential
1651
+ // strings are promoted to the code space, large objects to large object space,
1652
+ // and all others to the old space.
1653
+ inline MaybeObject* MCAllocateFromNewSpace(Heap* heap,
1654
+ HeapObject* object,
1655
+ int object_size) {
1656
+ MaybeObject* forwarded;
1657
+ if (object_size > heap->MaxObjectSizeInPagedSpace()) {
1658
+ forwarded = Failure::Exception();
1659
+ } else {
1660
+ OldSpace* target_space = heap->TargetSpace(object);
1661
+ ASSERT(target_space == heap->old_pointer_space() ||
1662
+ target_space == heap->old_data_space());
1663
+ forwarded = target_space->MCAllocateRaw(object_size);
1664
+ }
1665
+ Object* result;
1666
+ if (!forwarded->ToObject(&result)) {
1667
+ result = heap->new_space()->MCAllocateRaw(object_size)->ToObjectUnchecked();
1668
+ }
1669
+ return result;
1670
+ }
1671
+
1672
+
1673
+ // Allocation functions for the paged spaces call the space's MCAllocateRaw.
1674
+ MUST_USE_RESULT inline MaybeObject* MCAllocateFromOldPointerSpace(
1675
+ Heap *heap,
1676
+ HeapObject* ignore,
1677
+ int object_size) {
1678
+ return heap->old_pointer_space()->MCAllocateRaw(object_size);
1679
+ }
1680
+
1681
+
1682
+ MUST_USE_RESULT inline MaybeObject* MCAllocateFromOldDataSpace(
1683
+ Heap* heap,
1684
+ HeapObject* ignore,
1685
+ int object_size) {
1686
+ return heap->old_data_space()->MCAllocateRaw(object_size);
1687
+ }
1688
+
1689
+
1690
+ MUST_USE_RESULT inline MaybeObject* MCAllocateFromCodeSpace(
1691
+ Heap* heap,
1692
+ HeapObject* ignore,
1693
+ int object_size) {
1694
+ return heap->code_space()->MCAllocateRaw(object_size);
1695
+ }
1696
+
1697
+
1698
+ MUST_USE_RESULT inline MaybeObject* MCAllocateFromMapSpace(
1699
+ Heap* heap,
1700
+ HeapObject* ignore,
1701
+ int object_size) {
1702
+ return heap->map_space()->MCAllocateRaw(object_size);
1703
+ }
1704
+
1705
+
1706
+ MUST_USE_RESULT inline MaybeObject* MCAllocateFromCellSpace(
1707
+ Heap* heap, HeapObject* ignore, int object_size) {
1708
+ return heap->cell_space()->MCAllocateRaw(object_size);
1709
+ }
1710
+
1711
+
1712
+ // The forwarding address is encoded at the same offset as the current
1713
+ // to-space object, but in from space.
1714
+ inline void EncodeForwardingAddressInNewSpace(Heap* heap,
1715
+ HeapObject* old_object,
1716
+ int object_size,
1717
+ Object* new_object,
1718
+ int* ignored) {
1719
+ int offset =
1720
+ heap->new_space()->ToSpaceOffsetForAddress(old_object->address());
1721
+ Memory::Address_at(heap->new_space()->FromSpaceLow() + offset) =
1722
+ HeapObject::cast(new_object)->address();
1723
+ }
1724
+
1725
+
1726
+ // The forwarding address is encoded in the map pointer of the object as an
1727
+ // offset (in terms of live bytes) from the address of the first live object
1728
+ // in the page.
1729
+ inline void EncodeForwardingAddressInPagedSpace(Heap* heap,
1730
+ HeapObject* old_object,
1731
+ int object_size,
1732
+ Object* new_object,
1733
+ int* offset) {
1734
+ // Record the forwarding address of the first live object if necessary.
1735
+ if (*offset == 0) {
1736
+ Page::FromAddress(old_object->address())->mc_first_forwarded =
1737
+ HeapObject::cast(new_object)->address();
1738
+ }
1739
+
1740
+ MapWord encoding =
1741
+ MapWord::EncodeAddress(old_object->map()->address(), *offset);
1742
+ old_object->set_map_word(encoding);
1743
+ *offset += object_size;
1744
+ ASSERT(*offset <= Page::kObjectAreaSize);
1745
+ }
1746
+
1747
+
1748
+ // Most non-live objects are ignored.
1749
+ inline void IgnoreNonLiveObject(HeapObject* object, Isolate* isolate) {}
1750
+
1751
+
1752
+ // Function template that, given a range of addresses (eg, a semispace or a
1753
+ // paged space page), iterates through the objects in the range to clear
1754
+ // mark bits and compute and encode forwarding addresses. As a side effect,
1755
+ // maximal free chunks are marked so that they can be skipped on subsequent
1756
+ // sweeps.
1757
+ //
1758
+ // The template parameters are an allocation function, a forwarding address
1759
+ // encoding function, and a function to process non-live objects.
1760
+ template<MarkCompactCollector::AllocationFunction Alloc,
1761
+ MarkCompactCollector::EncodingFunction Encode,
1762
+ MarkCompactCollector::ProcessNonLiveFunction ProcessNonLive>
1763
+ inline void EncodeForwardingAddressesInRange(MarkCompactCollector* collector,
1764
+ Address start,
1765
+ Address end,
1766
+ int* offset) {
1767
+ // The start address of the current free region while sweeping the space.
1768
+ // This address is set when a transition from live to non-live objects is
1769
+ // encountered. A value (an encoding of the 'next free region' pointer)
1770
+ // is written to memory at this address when a transition from non-live to
1771
+ // live objects is encountered.
1772
+ Address free_start = NULL;
1773
+
1774
+ // A flag giving the state of the previously swept object. Initially true
1775
+ // to ensure that free_start is initialized to a proper address before
1776
+ // trying to write to it.
1777
+ bool is_prev_alive = true;
1778
+
1779
+ int object_size; // Will be set on each iteration of the loop.
1780
+ for (Address current = start; current < end; current += object_size) {
1781
+ HeapObject* object = HeapObject::FromAddress(current);
1782
+ if (object->IsMarked()) {
1783
+ object->ClearMark();
1784
+ collector->tracer()->decrement_marked_count();
1785
+ object_size = object->Size();
1786
+
1787
+ Object* forwarded =
1788
+ Alloc(collector->heap(), object, object_size)->ToObjectUnchecked();
1789
+ Encode(collector->heap(), object, object_size, forwarded, offset);
1790
+
1791
+ #ifdef DEBUG
1792
+ if (FLAG_gc_verbose) {
1793
+ PrintF("forward %p -> %p.\n", object->address(),
1794
+ HeapObject::cast(forwarded)->address());
1795
+ }
1796
+ #endif
1797
+ if (!is_prev_alive) { // Transition from non-live to live.
1798
+ EncodeFreeRegion(free_start, static_cast<int>(current - free_start));
1799
+ is_prev_alive = true;
1800
+ }
1801
+ } else { // Non-live object.
1802
+ object_size = object->Size();
1803
+ ProcessNonLive(object, collector->heap()->isolate());
1804
+ if (is_prev_alive) { // Transition from live to non-live.
1805
+ free_start = current;
1806
+ is_prev_alive = false;
1807
+ }
1808
+ LiveObjectList::ProcessNonLive(object);
1809
+ }
1810
+ }
1811
+
1812
+ // If we ended on a free region, mark it.
1813
+ if (!is_prev_alive) {
1814
+ EncodeFreeRegion(free_start, static_cast<int>(end - free_start));
1815
+ }
1816
+ }
1817
+
1818
+
1819
+ // Functions to encode the forwarding pointers in each compactable space.
1820
+ void MarkCompactCollector::EncodeForwardingAddressesInNewSpace() {
1821
+ int ignored;
1822
+ EncodeForwardingAddressesInRange<MCAllocateFromNewSpace,
1823
+ EncodeForwardingAddressInNewSpace,
1824
+ IgnoreNonLiveObject>(
1825
+ this,
1826
+ heap()->new_space()->bottom(),
1827
+ heap()->new_space()->top(),
1828
+ &ignored);
1829
+ }
1830
+
1831
+
1832
+ template<MarkCompactCollector::AllocationFunction Alloc,
1833
+ MarkCompactCollector::ProcessNonLiveFunction ProcessNonLive>
1834
+ void MarkCompactCollector::EncodeForwardingAddressesInPagedSpace(
1835
+ PagedSpace* space) {
1836
+ PageIterator it(space, PageIterator::PAGES_IN_USE);
1837
+ while (it.has_next()) {
1838
+ Page* p = it.next();
1839
+
1840
+ // The offset of each live object in the page from the first live object
1841
+ // in the page.
1842
+ int offset = 0;
1843
+ EncodeForwardingAddressesInRange<Alloc,
1844
+ EncodeForwardingAddressInPagedSpace,
1845
+ ProcessNonLive>(
1846
+ this,
1847
+ p->ObjectAreaStart(),
1848
+ p->AllocationTop(),
1849
+ &offset);
1850
+ }
1851
+ }
1852
+
1853
+
1854
+ // We scavange new space simultaneously with sweeping. This is done in two
1855
+ // passes.
1856
+ // The first pass migrates all alive objects from one semispace to another or
1857
+ // promotes them to old space. Forwading address is written directly into
1858
+ // first word of object without any encoding. If object is dead we are writing
1859
+ // NULL as a forwarding address.
1860
+ // The second pass updates pointers to new space in all spaces. It is possible
1861
+ // to encounter pointers to dead objects during traversal of dirty regions we
1862
+ // should clear them to avoid encountering them during next dirty regions
1863
+ // iteration.
1864
+ static void MigrateObject(Heap* heap,
1865
+ Address dst,
1866
+ Address src,
1867
+ int size,
1868
+ bool to_old_space) {
1869
+ if (to_old_space) {
1870
+ heap->CopyBlockToOldSpaceAndUpdateRegionMarks(dst, src, size);
1871
+ } else {
1872
+ heap->CopyBlock(dst, src, size);
1873
+ }
1874
+
1875
+ Memory::Address_at(src) = dst;
1876
+ }
1877
+
1878
+
1879
+ class StaticPointersToNewGenUpdatingVisitor : public
1880
+ StaticNewSpaceVisitor<StaticPointersToNewGenUpdatingVisitor> {
1881
+ public:
1882
+ static inline void VisitPointer(Heap* heap, Object** p) {
1883
+ if (!(*p)->IsHeapObject()) return;
1884
+
1885
+ HeapObject* obj = HeapObject::cast(*p);
1886
+ Address old_addr = obj->address();
1887
+
1888
+ if (heap->new_space()->Contains(obj)) {
1889
+ ASSERT(heap->InFromSpace(*p));
1890
+ *p = HeapObject::FromAddress(Memory::Address_at(old_addr));
1891
+ }
1892
+ }
1893
+ };
1894
+
1895
+
1896
+ // Visitor for updating pointers from live objects in old spaces to new space.
1897
+ // It does not expect to encounter pointers to dead objects.
1898
+ class PointersToNewGenUpdatingVisitor: public ObjectVisitor {
1899
+ public:
1900
+ explicit PointersToNewGenUpdatingVisitor(Heap* heap) : heap_(heap) { }
1901
+
1902
+ void VisitPointer(Object** p) {
1903
+ StaticPointersToNewGenUpdatingVisitor::VisitPointer(heap_, p);
1904
+ }
1905
+
1906
+ void VisitPointers(Object** start, Object** end) {
1907
+ for (Object** p = start; p < end; p++) {
1908
+ StaticPointersToNewGenUpdatingVisitor::VisitPointer(heap_, p);
1909
+ }
1910
+ }
1911
+
1912
+ void VisitCodeTarget(RelocInfo* rinfo) {
1913
+ ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
1914
+ Object* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
1915
+ VisitPointer(&target);
1916
+ rinfo->set_target_address(Code::cast(target)->instruction_start());
1917
+ }
1918
+
1919
+ void VisitDebugTarget(RelocInfo* rinfo) {
1920
+ ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) &&
1921
+ rinfo->IsPatchedReturnSequence()) ||
1922
+ (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
1923
+ rinfo->IsPatchedDebugBreakSlotSequence()));
1924
+ Object* target = Code::GetCodeFromTargetAddress(rinfo->call_address());
1925
+ VisitPointer(&target);
1926
+ rinfo->set_call_address(Code::cast(target)->instruction_start());
1927
+ }
1928
+ private:
1929
+ Heap* heap_;
1930
+ };
1931
+
1932
+
1933
+ // Visitor for updating pointers from live objects in old spaces to new space.
1934
+ // It can encounter pointers to dead objects in new space when traversing map
1935
+ // space (see comment for MigrateObject).
1936
+ static void UpdatePointerToNewGen(HeapObject** p) {
1937
+ if (!(*p)->IsHeapObject()) return;
1938
+
1939
+ Address old_addr = (*p)->address();
1940
+ ASSERT(HEAP->InFromSpace(*p));
1941
+
1942
+ Address new_addr = Memory::Address_at(old_addr);
1943
+
1944
+ if (new_addr == NULL) {
1945
+ // We encountered pointer to a dead object. Clear it so we will
1946
+ // not visit it again during next iteration of dirty regions.
1947
+ *p = NULL;
1948
+ } else {
1949
+ *p = HeapObject::FromAddress(new_addr);
1950
+ }
1951
+ }
1952
+
1953
+
1954
+ static String* UpdateNewSpaceReferenceInExternalStringTableEntry(Heap* heap,
1955
+ Object** p) {
1956
+ Address old_addr = HeapObject::cast(*p)->address();
1957
+ Address new_addr = Memory::Address_at(old_addr);
1958
+ return String::cast(HeapObject::FromAddress(new_addr));
1959
+ }
1960
+
1961
+
1962
+ static bool TryPromoteObject(Heap* heap, HeapObject* object, int object_size) {
1963
+ Object* result;
1964
+
1965
+ if (object_size > heap->MaxObjectSizeInPagedSpace()) {
1966
+ MaybeObject* maybe_result =
1967
+ heap->lo_space()->AllocateRawFixedArray(object_size);
1968
+ if (maybe_result->ToObject(&result)) {
1969
+ HeapObject* target = HeapObject::cast(result);
1970
+ MigrateObject(heap, target->address(), object->address(), object_size,
1971
+ true);
1972
+ heap->mark_compact_collector()->tracer()->
1973
+ increment_promoted_objects_size(object_size);
1974
+ return true;
1975
+ }
1976
+ } else {
1977
+ OldSpace* target_space = heap->TargetSpace(object);
1978
+
1979
+ ASSERT(target_space == heap->old_pointer_space() ||
1980
+ target_space == heap->old_data_space());
1981
+ MaybeObject* maybe_result = target_space->AllocateRaw(object_size);
1982
+ if (maybe_result->ToObject(&result)) {
1983
+ HeapObject* target = HeapObject::cast(result);
1984
+ MigrateObject(heap,
1985
+ target->address(),
1986
+ object->address(),
1987
+ object_size,
1988
+ target_space == heap->old_pointer_space());
1989
+ heap->mark_compact_collector()->tracer()->
1990
+ increment_promoted_objects_size(object_size);
1991
+ return true;
1992
+ }
1993
+ }
1994
+
1995
+ return false;
1996
+ }
1997
+
1998
+
1999
+ static void SweepNewSpace(Heap* heap, NewSpace* space) {
2000
+ heap->CheckNewSpaceExpansionCriteria();
2001
+
2002
+ Address from_bottom = space->bottom();
2003
+ Address from_top = space->top();
2004
+
2005
+ // Flip the semispaces. After flipping, to space is empty, from space has
2006
+ // live objects.
2007
+ space->Flip();
2008
+ space->ResetAllocationInfo();
2009
+
2010
+ int size = 0;
2011
+ int survivors_size = 0;
2012
+
2013
+ // First pass: traverse all objects in inactive semispace, remove marks,
2014
+ // migrate live objects and write forwarding addresses.
2015
+ for (Address current = from_bottom; current < from_top; current += size) {
2016
+ HeapObject* object = HeapObject::FromAddress(current);
2017
+
2018
+ if (object->IsMarked()) {
2019
+ object->ClearMark();
2020
+ heap->mark_compact_collector()->tracer()->decrement_marked_count();
2021
+
2022
+ size = object->Size();
2023
+ survivors_size += size;
2024
+
2025
+ // Aggressively promote young survivors to the old space.
2026
+ if (TryPromoteObject(heap, object, size)) {
2027
+ continue;
2028
+ }
2029
+
2030
+ // Promotion failed. Just migrate object to another semispace.
2031
+ // Allocation cannot fail at this point: semispaces are of equal size.
2032
+ Object* target = space->AllocateRaw(size)->ToObjectUnchecked();
2033
+
2034
+ MigrateObject(heap,
2035
+ HeapObject::cast(target)->address(),
2036
+ current,
2037
+ size,
2038
+ false);
2039
+ } else {
2040
+ // Process the dead object before we write a NULL into its header.
2041
+ LiveObjectList::ProcessNonLive(object);
2042
+
2043
+ size = object->Size();
2044
+ Memory::Address_at(current) = NULL;
2045
+ }
2046
+ }
2047
+
2048
+ // Second pass: find pointers to new space and update them.
2049
+ PointersToNewGenUpdatingVisitor updating_visitor(heap);
2050
+
2051
+ // Update pointers in to space.
2052
+ Address current = space->bottom();
2053
+ while (current < space->top()) {
2054
+ HeapObject* object = HeapObject::FromAddress(current);
2055
+ current +=
2056
+ StaticPointersToNewGenUpdatingVisitor::IterateBody(object->map(),
2057
+ object);
2058
+ }
2059
+
2060
+ // Update roots.
2061
+ heap->IterateRoots(&updating_visitor, VISIT_ALL_IN_SWEEP_NEWSPACE);
2062
+ LiveObjectList::IterateElements(&updating_visitor);
2063
+
2064
+ // Update pointers in old spaces.
2065
+ heap->IterateDirtyRegions(heap->old_pointer_space(),
2066
+ &Heap::IteratePointersInDirtyRegion,
2067
+ &UpdatePointerToNewGen,
2068
+ heap->WATERMARK_SHOULD_BE_VALID);
2069
+
2070
+ heap->lo_space()->IterateDirtyRegions(&UpdatePointerToNewGen);
2071
+
2072
+ // Update pointers from cells.
2073
+ HeapObjectIterator cell_iterator(heap->cell_space());
2074
+ for (HeapObject* cell = cell_iterator.next();
2075
+ cell != NULL;
2076
+ cell = cell_iterator.next()) {
2077
+ if (cell->IsJSGlobalPropertyCell()) {
2078
+ Address value_address =
2079
+ reinterpret_cast<Address>(cell) +
2080
+ (JSGlobalPropertyCell::kValueOffset - kHeapObjectTag);
2081
+ updating_visitor.VisitPointer(reinterpret_cast<Object**>(value_address));
2082
+ }
2083
+ }
2084
+
2085
+ // Update pointer from the global contexts list.
2086
+ updating_visitor.VisitPointer(heap->global_contexts_list_address());
2087
+
2088
+ // Update pointers from external string table.
2089
+ heap->UpdateNewSpaceReferencesInExternalStringTable(
2090
+ &UpdateNewSpaceReferenceInExternalStringTableEntry);
2091
+
2092
+ // All pointers were updated. Update auxiliary allocation info.
2093
+ heap->IncrementYoungSurvivorsCounter(survivors_size);
2094
+ space->set_age_mark(space->top());
2095
+
2096
+ // Update JSFunction pointers from the runtime profiler.
2097
+ heap->isolate()->runtime_profiler()->UpdateSamplesAfterScavenge();
2098
+ }
2099
+
2100
+
2101
+ static void SweepSpace(Heap* heap, PagedSpace* space) {
2102
+ PageIterator it(space, PageIterator::PAGES_IN_USE);
2103
+
2104
+ // During sweeping of paged space we are trying to find longest sequences
2105
+ // of pages without live objects and free them (instead of putting them on
2106
+ // the free list).
2107
+
2108
+ // Page preceding current.
2109
+ Page* prev = Page::FromAddress(NULL);
2110
+
2111
+ // First empty page in a sequence.
2112
+ Page* first_empty_page = Page::FromAddress(NULL);
2113
+
2114
+ // Page preceding first empty page.
2115
+ Page* prec_first_empty_page = Page::FromAddress(NULL);
2116
+
2117
+ // If last used page of space ends with a sequence of dead objects
2118
+ // we can adjust allocation top instead of puting this free area into
2119
+ // the free list. Thus during sweeping we keep track of such areas
2120
+ // and defer their deallocation until the sweeping of the next page
2121
+ // is done: if one of the next pages contains live objects we have
2122
+ // to put such area into the free list.
2123
+ Address last_free_start = NULL;
2124
+ int last_free_size = 0;
2125
+
2126
+ while (it.has_next()) {
2127
+ Page* p = it.next();
2128
+
2129
+ bool is_previous_alive = true;
2130
+ Address free_start = NULL;
2131
+ HeapObject* object;
2132
+
2133
+ for (Address current = p->ObjectAreaStart();
2134
+ current < p->AllocationTop();
2135
+ current += object->Size()) {
2136
+ object = HeapObject::FromAddress(current);
2137
+ if (object->IsMarked()) {
2138
+ object->ClearMark();
2139
+ heap->mark_compact_collector()->tracer()->decrement_marked_count();
2140
+
2141
+ if (!is_previous_alive) { // Transition from free to live.
2142
+ space->DeallocateBlock(free_start,
2143
+ static_cast<int>(current - free_start),
2144
+ true);
2145
+ is_previous_alive = true;
2146
+ }
2147
+ } else {
2148
+ heap->mark_compact_collector()->ReportDeleteIfNeeded(
2149
+ object, heap->isolate());
2150
+ if (is_previous_alive) { // Transition from live to free.
2151
+ free_start = current;
2152
+ is_previous_alive = false;
2153
+ }
2154
+ LiveObjectList::ProcessNonLive(object);
2155
+ }
2156
+ // The object is now unmarked for the call to Size() at the top of the
2157
+ // loop.
2158
+ }
2159
+
2160
+ bool page_is_empty = (p->ObjectAreaStart() == p->AllocationTop())
2161
+ || (!is_previous_alive && free_start == p->ObjectAreaStart());
2162
+
2163
+ if (page_is_empty) {
2164
+ // This page is empty. Check whether we are in the middle of
2165
+ // sequence of empty pages and start one if not.
2166
+ if (!first_empty_page->is_valid()) {
2167
+ first_empty_page = p;
2168
+ prec_first_empty_page = prev;
2169
+ }
2170
+
2171
+ if (!is_previous_alive) {
2172
+ // There are dead objects on this page. Update space accounting stats
2173
+ // without putting anything into free list.
2174
+ int size_in_bytes = static_cast<int>(p->AllocationTop() - free_start);
2175
+ if (size_in_bytes > 0) {
2176
+ space->DeallocateBlock(free_start, size_in_bytes, false);
2177
+ }
2178
+ }
2179
+ } else {
2180
+ // This page is not empty. Sequence of empty pages ended on the previous
2181
+ // one.
2182
+ if (first_empty_page->is_valid()) {
2183
+ space->FreePages(prec_first_empty_page, prev);
2184
+ prec_first_empty_page = first_empty_page = Page::FromAddress(NULL);
2185
+ }
2186
+
2187
+ // If there is a free ending area on one of the previous pages we have
2188
+ // deallocate that area and put it on the free list.
2189
+ if (last_free_size > 0) {
2190
+ Page::FromAddress(last_free_start)->
2191
+ SetAllocationWatermark(last_free_start);
2192
+ space->DeallocateBlock(last_free_start, last_free_size, true);
2193
+ last_free_start = NULL;
2194
+ last_free_size = 0;
2195
+ }
2196
+
2197
+ // If the last region of this page was not live we remember it.
2198
+ if (!is_previous_alive) {
2199
+ ASSERT(last_free_size == 0);
2200
+ last_free_size = static_cast<int>(p->AllocationTop() - free_start);
2201
+ last_free_start = free_start;
2202
+ }
2203
+ }
2204
+
2205
+ prev = p;
2206
+ }
2207
+
2208
+ // We reached end of space. See if we need to adjust allocation top.
2209
+ Address new_allocation_top = NULL;
2210
+
2211
+ if (first_empty_page->is_valid()) {
2212
+ // Last used pages in space are empty. We can move allocation top backwards
2213
+ // to the beginning of first empty page.
2214
+ ASSERT(prev == space->AllocationTopPage());
2215
+
2216
+ new_allocation_top = first_empty_page->ObjectAreaStart();
2217
+ }
2218
+
2219
+ if (last_free_size > 0) {
2220
+ // There was a free ending area on the previous page.
2221
+ // Deallocate it without putting it into freelist and move allocation
2222
+ // top to the beginning of this free area.
2223
+ space->DeallocateBlock(last_free_start, last_free_size, false);
2224
+ new_allocation_top = last_free_start;
2225
+ }
2226
+
2227
+ if (new_allocation_top != NULL) {
2228
+ #ifdef DEBUG
2229
+ Page* new_allocation_top_page = Page::FromAllocationTop(new_allocation_top);
2230
+ if (!first_empty_page->is_valid()) {
2231
+ ASSERT(new_allocation_top_page == space->AllocationTopPage());
2232
+ } else if (last_free_size > 0) {
2233
+ ASSERT(new_allocation_top_page == prec_first_empty_page);
2234
+ } else {
2235
+ ASSERT(new_allocation_top_page == first_empty_page);
2236
+ }
2237
+ #endif
2238
+
2239
+ space->SetTop(new_allocation_top);
2240
+ }
2241
+ }
2242
+
2243
+
2244
+ void MarkCompactCollector::EncodeForwardingAddresses() {
2245
+ ASSERT(state_ == ENCODE_FORWARDING_ADDRESSES);
2246
+ // Objects in the active semispace of the young generation may be
2247
+ // relocated to the inactive semispace (if not promoted). Set the
2248
+ // relocation info to the beginning of the inactive semispace.
2249
+ heap()->new_space()->MCResetRelocationInfo();
2250
+
2251
+ // Compute the forwarding pointers in each space.
2252
+ EncodeForwardingAddressesInPagedSpace<MCAllocateFromOldPointerSpace,
2253
+ ReportDeleteIfNeeded>(
2254
+ heap()->old_pointer_space());
2255
+
2256
+ EncodeForwardingAddressesInPagedSpace<MCAllocateFromOldDataSpace,
2257
+ IgnoreNonLiveObject>(
2258
+ heap()->old_data_space());
2259
+
2260
+ EncodeForwardingAddressesInPagedSpace<MCAllocateFromCodeSpace,
2261
+ ReportDeleteIfNeeded>(
2262
+ heap()->code_space());
2263
+
2264
+ EncodeForwardingAddressesInPagedSpace<MCAllocateFromCellSpace,
2265
+ IgnoreNonLiveObject>(
2266
+ heap()->cell_space());
2267
+
2268
+
2269
+ // Compute new space next to last after the old and code spaces have been
2270
+ // compacted. Objects in new space can be promoted to old or code space.
2271
+ EncodeForwardingAddressesInNewSpace();
2272
+
2273
+ // Compute map space last because computing forwarding addresses
2274
+ // overwrites non-live objects. Objects in the other spaces rely on
2275
+ // non-live map pointers to get the sizes of non-live objects.
2276
+ EncodeForwardingAddressesInPagedSpace<MCAllocateFromMapSpace,
2277
+ IgnoreNonLiveObject>(
2278
+ heap()->map_space());
2279
+
2280
+ // Write relocation info to the top page, so we can use it later. This is
2281
+ // done after promoting objects from the new space so we get the correct
2282
+ // allocation top.
2283
+ heap()->old_pointer_space()->MCWriteRelocationInfoToPage();
2284
+ heap()->old_data_space()->MCWriteRelocationInfoToPage();
2285
+ heap()->code_space()->MCWriteRelocationInfoToPage();
2286
+ heap()->map_space()->MCWriteRelocationInfoToPage();
2287
+ heap()->cell_space()->MCWriteRelocationInfoToPage();
2288
+ }
2289
+
2290
+
2291
+ class MapIterator : public HeapObjectIterator {
2292
+ public:
2293
+ explicit MapIterator(Heap* heap)
2294
+ : HeapObjectIterator(heap->map_space(), &SizeCallback) { }
2295
+
2296
+ MapIterator(Heap* heap, Address start)
2297
+ : HeapObjectIterator(heap->map_space(), start, &SizeCallback) { }
2298
+
2299
+ private:
2300
+ static int SizeCallback(HeapObject* unused) {
2301
+ USE(unused);
2302
+ return Map::kSize;
2303
+ }
2304
+ };
2305
+
2306
+
2307
+ class MapCompact {
2308
+ public:
2309
+ explicit MapCompact(Heap* heap, int live_maps)
2310
+ : heap_(heap),
2311
+ live_maps_(live_maps),
2312
+ to_evacuate_start_(heap->map_space()->TopAfterCompaction(live_maps)),
2313
+ vacant_map_it_(heap),
2314
+ map_to_evacuate_it_(heap, to_evacuate_start_),
2315
+ first_map_to_evacuate_(
2316
+ reinterpret_cast<Map*>(HeapObject::FromAddress(to_evacuate_start_))) {
2317
+ }
2318
+
2319
+ void CompactMaps() {
2320
+ // As we know the number of maps to evacuate beforehand,
2321
+ // we stop then there is no more vacant maps.
2322
+ for (Map* next_vacant_map = NextVacantMap();
2323
+ next_vacant_map;
2324
+ next_vacant_map = NextVacantMap()) {
2325
+ EvacuateMap(next_vacant_map, NextMapToEvacuate());
2326
+ }
2327
+
2328
+ #ifdef DEBUG
2329
+ CheckNoMapsToEvacuate();
2330
+ #endif
2331
+ }
2332
+
2333
+ void UpdateMapPointersInRoots() {
2334
+ MapUpdatingVisitor map_updating_visitor;
2335
+ heap()->IterateRoots(&map_updating_visitor, VISIT_ONLY_STRONG);
2336
+ heap()->isolate()->global_handles()->IterateWeakRoots(
2337
+ &map_updating_visitor);
2338
+ LiveObjectList::IterateElements(&map_updating_visitor);
2339
+ }
2340
+
2341
+ void UpdateMapPointersInPagedSpace(PagedSpace* space) {
2342
+ ASSERT(space != heap()->map_space());
2343
+
2344
+ PageIterator it(space, PageIterator::PAGES_IN_USE);
2345
+ while (it.has_next()) {
2346
+ Page* p = it.next();
2347
+ UpdateMapPointersInRange(heap(),
2348
+ p->ObjectAreaStart(),
2349
+ p->AllocationTop());
2350
+ }
2351
+ }
2352
+
2353
+ void UpdateMapPointersInNewSpace() {
2354
+ NewSpace* space = heap()->new_space();
2355
+ UpdateMapPointersInRange(heap(), space->bottom(), space->top());
2356
+ }
2357
+
2358
+ void UpdateMapPointersInLargeObjectSpace() {
2359
+ LargeObjectIterator it(heap()->lo_space());
2360
+ for (HeapObject* obj = it.next(); obj != NULL; obj = it.next())
2361
+ UpdateMapPointersInObject(heap(), obj);
2362
+ }
2363
+
2364
+ void Finish() {
2365
+ heap()->map_space()->FinishCompaction(to_evacuate_start_, live_maps_);
2366
+ }
2367
+
2368
+ inline Heap* heap() const { return heap_; }
2369
+
2370
+ private:
2371
+ Heap* heap_;
2372
+ int live_maps_;
2373
+ Address to_evacuate_start_;
2374
+ MapIterator vacant_map_it_;
2375
+ MapIterator map_to_evacuate_it_;
2376
+ Map* first_map_to_evacuate_;
2377
+
2378
+ // Helper class for updating map pointers in HeapObjects.
2379
+ class MapUpdatingVisitor: public ObjectVisitor {
2380
+ public:
2381
+ MapUpdatingVisitor() {}
2382
+
2383
+ void VisitPointer(Object** p) {
2384
+ UpdateMapPointer(p);
2385
+ }
2386
+
2387
+ void VisitPointers(Object** start, Object** end) {
2388
+ for (Object** p = start; p < end; p++) UpdateMapPointer(p);
2389
+ }
2390
+
2391
+ private:
2392
+ void UpdateMapPointer(Object** p) {
2393
+ if (!(*p)->IsHeapObject()) return;
2394
+ HeapObject* old_map = reinterpret_cast<HeapObject*>(*p);
2395
+
2396
+ // Moved maps are tagged with overflowed map word. They are the only
2397
+ // objects those map word is overflowed as marking is already complete.
2398
+ MapWord map_word = old_map->map_word();
2399
+ if (!map_word.IsOverflowed()) return;
2400
+
2401
+ *p = GetForwardedMap(map_word);
2402
+ }
2403
+ };
2404
+
2405
+ static Map* NextMap(MapIterator* it, HeapObject* last, bool live) {
2406
+ while (true) {
2407
+ HeapObject* next = it->next();
2408
+ ASSERT(next != NULL);
2409
+ if (next == last)
2410
+ return NULL;
2411
+ ASSERT(!next->IsOverflowed());
2412
+ ASSERT(!next->IsMarked());
2413
+ ASSERT(next->IsMap() || FreeListNode::IsFreeListNode(next));
2414
+ if (next->IsMap() == live)
2415
+ return reinterpret_cast<Map*>(next);
2416
+ }
2417
+ }
2418
+
2419
+ Map* NextVacantMap() {
2420
+ Map* map = NextMap(&vacant_map_it_, first_map_to_evacuate_, false);
2421
+ ASSERT(map == NULL || FreeListNode::IsFreeListNode(map));
2422
+ return map;
2423
+ }
2424
+
2425
+ Map* NextMapToEvacuate() {
2426
+ Map* map = NextMap(&map_to_evacuate_it_, NULL, true);
2427
+ ASSERT(map != NULL);
2428
+ ASSERT(map->IsMap());
2429
+ return map;
2430
+ }
2431
+
2432
+ static void EvacuateMap(Map* vacant_map, Map* map_to_evacuate) {
2433
+ ASSERT(FreeListNode::IsFreeListNode(vacant_map));
2434
+ ASSERT(map_to_evacuate->IsMap());
2435
+
2436
+ ASSERT(Map::kSize % 4 == 0);
2437
+
2438
+ map_to_evacuate->heap()->CopyBlockToOldSpaceAndUpdateRegionMarks(
2439
+ vacant_map->address(), map_to_evacuate->address(), Map::kSize);
2440
+
2441
+ ASSERT(vacant_map->IsMap()); // Due to memcpy above.
2442
+
2443
+ MapWord forwarding_map_word = MapWord::FromMap(vacant_map);
2444
+ forwarding_map_word.SetOverflow();
2445
+ map_to_evacuate->set_map_word(forwarding_map_word);
2446
+
2447
+ ASSERT(map_to_evacuate->map_word().IsOverflowed());
2448
+ ASSERT(GetForwardedMap(map_to_evacuate->map_word()) == vacant_map);
2449
+ }
2450
+
2451
+ static Map* GetForwardedMap(MapWord map_word) {
2452
+ ASSERT(map_word.IsOverflowed());
2453
+ map_word.ClearOverflow();
2454
+ Map* new_map = map_word.ToMap();
2455
+ ASSERT_MAP_ALIGNED(new_map->address());
2456
+ return new_map;
2457
+ }
2458
+
2459
+ static int UpdateMapPointersInObject(Heap* heap, HeapObject* obj) {
2460
+ ASSERT(!obj->IsMarked());
2461
+ Map* map = obj->map();
2462
+ ASSERT(heap->map_space()->Contains(map));
2463
+ MapWord map_word = map->map_word();
2464
+ ASSERT(!map_word.IsMarked());
2465
+ if (map_word.IsOverflowed()) {
2466
+ Map* new_map = GetForwardedMap(map_word);
2467
+ ASSERT(heap->map_space()->Contains(new_map));
2468
+ obj->set_map(new_map);
2469
+
2470
+ #ifdef DEBUG
2471
+ if (FLAG_gc_verbose) {
2472
+ PrintF("update %p : %p -> %p\n",
2473
+ obj->address(),
2474
+ reinterpret_cast<void*>(map),
2475
+ reinterpret_cast<void*>(new_map));
2476
+ }
2477
+ #endif
2478
+ }
2479
+
2480
+ int size = obj->SizeFromMap(map);
2481
+ MapUpdatingVisitor map_updating_visitor;
2482
+ obj->IterateBody(map->instance_type(), size, &map_updating_visitor);
2483
+ return size;
2484
+ }
2485
+
2486
+ static void UpdateMapPointersInRange(Heap* heap, Address start, Address end) {
2487
+ HeapObject* object;
2488
+ int size;
2489
+ for (Address current = start; current < end; current += size) {
2490
+ object = HeapObject::FromAddress(current);
2491
+ size = UpdateMapPointersInObject(heap, object);
2492
+ ASSERT(size > 0);
2493
+ }
2494
+ }
2495
+
2496
+ #ifdef DEBUG
2497
+ void CheckNoMapsToEvacuate() {
2498
+ if (!FLAG_enable_slow_asserts)
2499
+ return;
2500
+
2501
+ for (HeapObject* obj = map_to_evacuate_it_.next();
2502
+ obj != NULL; obj = map_to_evacuate_it_.next())
2503
+ ASSERT(FreeListNode::IsFreeListNode(obj));
2504
+ }
2505
+ #endif
2506
+ };
2507
+
2508
+
2509
+ void MarkCompactCollector::SweepSpaces() {
2510
+ GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP);
2511
+
2512
+ ASSERT(state_ == SWEEP_SPACES);
2513
+ ASSERT(!IsCompacting());
2514
+ // Noncompacting collections simply sweep the spaces to clear the mark
2515
+ // bits and free the nonlive blocks (for old and map spaces). We sweep
2516
+ // the map space last because freeing non-live maps overwrites them and
2517
+ // the other spaces rely on possibly non-live maps to get the sizes for
2518
+ // non-live objects.
2519
+ SweepSpace(heap(), heap()->old_pointer_space());
2520
+ SweepSpace(heap(), heap()->old_data_space());
2521
+ SweepSpace(heap(), heap()->code_space());
2522
+ SweepSpace(heap(), heap()->cell_space());
2523
+ { GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP_NEWSPACE);
2524
+ SweepNewSpace(heap(), heap()->new_space());
2525
+ }
2526
+ SweepSpace(heap(), heap()->map_space());
2527
+
2528
+ heap()->IterateDirtyRegions(heap()->map_space(),
2529
+ &heap()->IteratePointersInDirtyMapsRegion,
2530
+ &UpdatePointerToNewGen,
2531
+ heap()->WATERMARK_SHOULD_BE_VALID);
2532
+
2533
+ intptr_t live_maps_size = heap()->map_space()->Size();
2534
+ int live_maps = static_cast<int>(live_maps_size / Map::kSize);
2535
+ ASSERT(live_map_objects_size_ == live_maps_size);
2536
+
2537
+ if (heap()->map_space()->NeedsCompaction(live_maps)) {
2538
+ MapCompact map_compact(heap(), live_maps);
2539
+
2540
+ map_compact.CompactMaps();
2541
+ map_compact.UpdateMapPointersInRoots();
2542
+
2543
+ PagedSpaces spaces;
2544
+ for (PagedSpace* space = spaces.next();
2545
+ space != NULL; space = spaces.next()) {
2546
+ if (space == heap()->map_space()) continue;
2547
+ map_compact.UpdateMapPointersInPagedSpace(space);
2548
+ }
2549
+ map_compact.UpdateMapPointersInNewSpace();
2550
+ map_compact.UpdateMapPointersInLargeObjectSpace();
2551
+
2552
+ map_compact.Finish();
2553
+ }
2554
+ }
2555
+
2556
+
2557
+ // Iterate the live objects in a range of addresses (eg, a page or a
2558
+ // semispace). The live regions of the range have been linked into a list.
2559
+ // The first live region is [first_live_start, first_live_end), and the last
2560
+ // address in the range is top. The callback function is used to get the
2561
+ // size of each live object.
2562
+ int MarkCompactCollector::IterateLiveObjectsInRange(
2563
+ Address start,
2564
+ Address end,
2565
+ LiveObjectCallback size_func) {
2566
+ int live_objects_size = 0;
2567
+ Address current = start;
2568
+ while (current < end) {
2569
+ uint32_t encoded_map = Memory::uint32_at(current);
2570
+ if (encoded_map == kSingleFreeEncoding) {
2571
+ current += kPointerSize;
2572
+ } else if (encoded_map == kMultiFreeEncoding) {
2573
+ current += Memory::int_at(current + kIntSize);
2574
+ } else {
2575
+ int size = (this->*size_func)(HeapObject::FromAddress(current));
2576
+ current += size;
2577
+ live_objects_size += size;
2578
+ }
2579
+ }
2580
+ return live_objects_size;
2581
+ }
2582
+
2583
+
2584
+ int MarkCompactCollector::IterateLiveObjects(
2585
+ NewSpace* space, LiveObjectCallback size_f) {
2586
+ ASSERT(MARK_LIVE_OBJECTS < state_ && state_ <= RELOCATE_OBJECTS);
2587
+ return IterateLiveObjectsInRange(space->bottom(), space->top(), size_f);
2588
+ }
2589
+
2590
+
2591
+ int MarkCompactCollector::IterateLiveObjects(
2592
+ PagedSpace* space, LiveObjectCallback size_f) {
2593
+ ASSERT(MARK_LIVE_OBJECTS < state_ && state_ <= RELOCATE_OBJECTS);
2594
+ int total = 0;
2595
+ PageIterator it(space, PageIterator::PAGES_IN_USE);
2596
+ while (it.has_next()) {
2597
+ Page* p = it.next();
2598
+ total += IterateLiveObjectsInRange(p->ObjectAreaStart(),
2599
+ p->AllocationTop(),
2600
+ size_f);
2601
+ }
2602
+ return total;
2603
+ }
2604
+
2605
+
2606
+ // -------------------------------------------------------------------------
2607
+ // Phase 3: Update pointers
2608
+
2609
+ // Helper class for updating pointers in HeapObjects.
2610
+ class UpdatingVisitor: public ObjectVisitor {
2611
+ public:
2612
+ explicit UpdatingVisitor(Heap* heap) : heap_(heap) {}
2613
+
2614
+ void VisitPointer(Object** p) {
2615
+ UpdatePointer(p);
2616
+ }
2617
+
2618
+ void VisitPointers(Object** start, Object** end) {
2619
+ // Mark all HeapObject pointers in [start, end)
2620
+ for (Object** p = start; p < end; p++) UpdatePointer(p);
2621
+ }
2622
+
2623
+ void VisitCodeTarget(RelocInfo* rinfo) {
2624
+ ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
2625
+ Object* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
2626
+ VisitPointer(&target);
2627
+ rinfo->set_target_address(
2628
+ reinterpret_cast<Code*>(target)->instruction_start());
2629
+ }
2630
+
2631
+ void VisitDebugTarget(RelocInfo* rinfo) {
2632
+ ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) &&
2633
+ rinfo->IsPatchedReturnSequence()) ||
2634
+ (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
2635
+ rinfo->IsPatchedDebugBreakSlotSequence()));
2636
+ Object* target = Code::GetCodeFromTargetAddress(rinfo->call_address());
2637
+ VisitPointer(&target);
2638
+ rinfo->set_call_address(
2639
+ reinterpret_cast<Code*>(target)->instruction_start());
2640
+ }
2641
+
2642
+ inline Heap* heap() const { return heap_; }
2643
+
2644
+ private:
2645
+ void UpdatePointer(Object** p) {
2646
+ if (!(*p)->IsHeapObject()) return;
2647
+
2648
+ HeapObject* obj = HeapObject::cast(*p);
2649
+ Address old_addr = obj->address();
2650
+ Address new_addr;
2651
+ ASSERT(!heap()->InFromSpace(obj));
2652
+
2653
+ if (heap()->new_space()->Contains(obj)) {
2654
+ Address forwarding_pointer_addr =
2655
+ heap()->new_space()->FromSpaceLow() +
2656
+ heap()->new_space()->ToSpaceOffsetForAddress(old_addr);
2657
+ new_addr = Memory::Address_at(forwarding_pointer_addr);
2658
+
2659
+ #ifdef DEBUG
2660
+ ASSERT(heap()->old_pointer_space()->Contains(new_addr) ||
2661
+ heap()->old_data_space()->Contains(new_addr) ||
2662
+ heap()->new_space()->FromSpaceContains(new_addr) ||
2663
+ heap()->lo_space()->Contains(HeapObject::FromAddress(new_addr)));
2664
+
2665
+ if (heap()->new_space()->FromSpaceContains(new_addr)) {
2666
+ ASSERT(heap()->new_space()->FromSpaceOffsetForAddress(new_addr) <=
2667
+ heap()->new_space()->ToSpaceOffsetForAddress(old_addr));
2668
+ }
2669
+ #endif
2670
+
2671
+ } else if (heap()->lo_space()->Contains(obj)) {
2672
+ // Don't move objects in the large object space.
2673
+ return;
2674
+
2675
+ } else {
2676
+ #ifdef DEBUG
2677
+ PagedSpaces spaces;
2678
+ PagedSpace* original_space = spaces.next();
2679
+ while (original_space != NULL) {
2680
+ if (original_space->Contains(obj)) break;
2681
+ original_space = spaces.next();
2682
+ }
2683
+ ASSERT(original_space != NULL);
2684
+ #endif
2685
+ new_addr = MarkCompactCollector::GetForwardingAddressInOldSpace(obj);
2686
+ ASSERT(original_space->Contains(new_addr));
2687
+ ASSERT(original_space->MCSpaceOffsetForAddress(new_addr) <=
2688
+ original_space->MCSpaceOffsetForAddress(old_addr));
2689
+ }
2690
+
2691
+ *p = HeapObject::FromAddress(new_addr);
2692
+
2693
+ #ifdef DEBUG
2694
+ if (FLAG_gc_verbose) {
2695
+ PrintF("update %p : %p -> %p\n",
2696
+ reinterpret_cast<Address>(p), old_addr, new_addr);
2697
+ }
2698
+ #endif
2699
+ }
2700
+
2701
+ Heap* heap_;
2702
+ };
2703
+
2704
+
2705
+ void MarkCompactCollector::UpdatePointers() {
2706
+ #ifdef DEBUG
2707
+ ASSERT(state_ == ENCODE_FORWARDING_ADDRESSES);
2708
+ state_ = UPDATE_POINTERS;
2709
+ #endif
2710
+ UpdatingVisitor updating_visitor(heap());
2711
+ heap()->isolate()->runtime_profiler()->UpdateSamplesAfterCompact(
2712
+ &updating_visitor);
2713
+ heap()->IterateRoots(&updating_visitor, VISIT_ONLY_STRONG);
2714
+ heap()->isolate()->global_handles()->IterateWeakRoots(&updating_visitor);
2715
+
2716
+ // Update the pointer to the head of the weak list of global contexts.
2717
+ updating_visitor.VisitPointer(&heap()->global_contexts_list_);
2718
+
2719
+ LiveObjectList::IterateElements(&updating_visitor);
2720
+
2721
+ int live_maps_size = IterateLiveObjects(
2722
+ heap()->map_space(), &MarkCompactCollector::UpdatePointersInOldObject);
2723
+ int live_pointer_olds_size = IterateLiveObjects(
2724
+ heap()->old_pointer_space(),
2725
+ &MarkCompactCollector::UpdatePointersInOldObject);
2726
+ int live_data_olds_size = IterateLiveObjects(
2727
+ heap()->old_data_space(),
2728
+ &MarkCompactCollector::UpdatePointersInOldObject);
2729
+ int live_codes_size = IterateLiveObjects(
2730
+ heap()->code_space(), &MarkCompactCollector::UpdatePointersInOldObject);
2731
+ int live_cells_size = IterateLiveObjects(
2732
+ heap()->cell_space(), &MarkCompactCollector::UpdatePointersInOldObject);
2733
+ int live_news_size = IterateLiveObjects(
2734
+ heap()->new_space(), &MarkCompactCollector::UpdatePointersInNewObject);
2735
+
2736
+ // Large objects do not move, the map word can be updated directly.
2737
+ LargeObjectIterator it(heap()->lo_space());
2738
+ for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) {
2739
+ UpdatePointersInNewObject(obj);
2740
+ }
2741
+
2742
+ USE(live_maps_size);
2743
+ USE(live_pointer_olds_size);
2744
+ USE(live_data_olds_size);
2745
+ USE(live_codes_size);
2746
+ USE(live_cells_size);
2747
+ USE(live_news_size);
2748
+ ASSERT(live_maps_size == live_map_objects_size_);
2749
+ ASSERT(live_data_olds_size == live_old_data_objects_size_);
2750
+ ASSERT(live_pointer_olds_size == live_old_pointer_objects_size_);
2751
+ ASSERT(live_codes_size == live_code_objects_size_);
2752
+ ASSERT(live_cells_size == live_cell_objects_size_);
2753
+ ASSERT(live_news_size == live_young_objects_size_);
2754
+ }
2755
+
2756
+
2757
+ int MarkCompactCollector::UpdatePointersInNewObject(HeapObject* obj) {
2758
+ // Keep old map pointers
2759
+ Map* old_map = obj->map();
2760
+ ASSERT(old_map->IsHeapObject());
2761
+
2762
+ Address forwarded = GetForwardingAddressInOldSpace(old_map);
2763
+
2764
+ ASSERT(heap()->map_space()->Contains(old_map));
2765
+ ASSERT(heap()->map_space()->Contains(forwarded));
2766
+ #ifdef DEBUG
2767
+ if (FLAG_gc_verbose) {
2768
+ PrintF("update %p : %p -> %p\n", obj->address(), old_map->address(),
2769
+ forwarded);
2770
+ }
2771
+ #endif
2772
+ // Update the map pointer.
2773
+ obj->set_map(reinterpret_cast<Map*>(HeapObject::FromAddress(forwarded)));
2774
+
2775
+ // We have to compute the object size relying on the old map because
2776
+ // map objects are not relocated yet.
2777
+ int obj_size = obj->SizeFromMap(old_map);
2778
+
2779
+ // Update pointers in the object body.
2780
+ UpdatingVisitor updating_visitor(heap());
2781
+ obj->IterateBody(old_map->instance_type(), obj_size, &updating_visitor);
2782
+ return obj_size;
2783
+ }
2784
+
2785
+
2786
+ int MarkCompactCollector::UpdatePointersInOldObject(HeapObject* obj) {
2787
+ // Decode the map pointer.
2788
+ MapWord encoding = obj->map_word();
2789
+ Address map_addr = encoding.DecodeMapAddress(heap()->map_space());
2790
+ ASSERT(heap()->map_space()->Contains(HeapObject::FromAddress(map_addr)));
2791
+
2792
+ // At this point, the first word of map_addr is also encoded, cannot
2793
+ // cast it to Map* using Map::cast.
2794
+ Map* map = reinterpret_cast<Map*>(HeapObject::FromAddress(map_addr));
2795
+ int obj_size = obj->SizeFromMap(map);
2796
+ InstanceType type = map->instance_type();
2797
+
2798
+ // Update map pointer.
2799
+ Address new_map_addr = GetForwardingAddressInOldSpace(map);
2800
+ int offset = encoding.DecodeOffset();
2801
+ obj->set_map_word(MapWord::EncodeAddress(new_map_addr, offset));
2802
+
2803
+ #ifdef DEBUG
2804
+ if (FLAG_gc_verbose) {
2805
+ PrintF("update %p : %p -> %p\n", obj->address(),
2806
+ map_addr, new_map_addr);
2807
+ }
2808
+ #endif
2809
+
2810
+ // Update pointers in the object body.
2811
+ UpdatingVisitor updating_visitor(heap());
2812
+ obj->IterateBody(type, obj_size, &updating_visitor);
2813
+ return obj_size;
2814
+ }
2815
+
2816
+
2817
+ Address MarkCompactCollector::GetForwardingAddressInOldSpace(HeapObject* obj) {
2818
+ // Object should either in old or map space.
2819
+ MapWord encoding = obj->map_word();
2820
+
2821
+ // Offset to the first live object's forwarding address.
2822
+ int offset = encoding.DecodeOffset();
2823
+ Address obj_addr = obj->address();
2824
+
2825
+ // Find the first live object's forwarding address.
2826
+ Page* p = Page::FromAddress(obj_addr);
2827
+ Address first_forwarded = p->mc_first_forwarded;
2828
+
2829
+ // Page start address of forwarded address.
2830
+ Page* forwarded_page = Page::FromAddress(first_forwarded);
2831
+ int forwarded_offset = forwarded_page->Offset(first_forwarded);
2832
+
2833
+ // Find end of allocation in the page of first_forwarded.
2834
+ int mc_top_offset = forwarded_page->AllocationWatermarkOffset();
2835
+
2836
+ // Check if current object's forward pointer is in the same page
2837
+ // as the first live object's forwarding pointer
2838
+ if (forwarded_offset + offset < mc_top_offset) {
2839
+ // In the same page.
2840
+ return first_forwarded + offset;
2841
+ }
2842
+
2843
+ // Must be in the next page, NOTE: this may cross chunks.
2844
+ Page* next_page = forwarded_page->next_page();
2845
+ ASSERT(next_page->is_valid());
2846
+
2847
+ offset -= (mc_top_offset - forwarded_offset);
2848
+ offset += Page::kObjectStartOffset;
2849
+
2850
+ ASSERT_PAGE_OFFSET(offset);
2851
+ ASSERT(next_page->OffsetToAddress(offset) < next_page->AllocationTop());
2852
+
2853
+ return next_page->OffsetToAddress(offset);
2854
+ }
2855
+
2856
+
2857
+ // -------------------------------------------------------------------------
2858
+ // Phase 4: Relocate objects
2859
+
2860
+ void MarkCompactCollector::RelocateObjects() {
2861
+ #ifdef DEBUG
2862
+ ASSERT(state_ == UPDATE_POINTERS);
2863
+ state_ = RELOCATE_OBJECTS;
2864
+ #endif
2865
+ // Relocates objects, always relocate map objects first. Relocating
2866
+ // objects in other space relies on map objects to get object size.
2867
+ int live_maps_size = IterateLiveObjects(
2868
+ heap()->map_space(), &MarkCompactCollector::RelocateMapObject);
2869
+ int live_pointer_olds_size = IterateLiveObjects(
2870
+ heap()->old_pointer_space(),
2871
+ &MarkCompactCollector::RelocateOldPointerObject);
2872
+ int live_data_olds_size = IterateLiveObjects(
2873
+ heap()->old_data_space(), &MarkCompactCollector::RelocateOldDataObject);
2874
+ int live_codes_size = IterateLiveObjects(
2875
+ heap()->code_space(), &MarkCompactCollector::RelocateCodeObject);
2876
+ int live_cells_size = IterateLiveObjects(
2877
+ heap()->cell_space(), &MarkCompactCollector::RelocateCellObject);
2878
+ int live_news_size = IterateLiveObjects(
2879
+ heap()->new_space(), &MarkCompactCollector::RelocateNewObject);
2880
+
2881
+ USE(live_maps_size);
2882
+ USE(live_pointer_olds_size);
2883
+ USE(live_data_olds_size);
2884
+ USE(live_codes_size);
2885
+ USE(live_cells_size);
2886
+ USE(live_news_size);
2887
+ ASSERT(live_maps_size == live_map_objects_size_);
2888
+ ASSERT(live_data_olds_size == live_old_data_objects_size_);
2889
+ ASSERT(live_pointer_olds_size == live_old_pointer_objects_size_);
2890
+ ASSERT(live_codes_size == live_code_objects_size_);
2891
+ ASSERT(live_cells_size == live_cell_objects_size_);
2892
+ ASSERT(live_news_size == live_young_objects_size_);
2893
+
2894
+ // Flip from and to spaces
2895
+ heap()->new_space()->Flip();
2896
+
2897
+ heap()->new_space()->MCCommitRelocationInfo();
2898
+
2899
+ // Set age_mark to bottom in to space
2900
+ Address mark = heap()->new_space()->bottom();
2901
+ heap()->new_space()->set_age_mark(mark);
2902
+
2903
+ PagedSpaces spaces;
2904
+ for (PagedSpace* space = spaces.next(); space != NULL; space = spaces.next())
2905
+ space->MCCommitRelocationInfo();
2906
+
2907
+ heap()->CheckNewSpaceExpansionCriteria();
2908
+ heap()->IncrementYoungSurvivorsCounter(live_news_size);
2909
+ }
2910
+
2911
+
2912
+ int MarkCompactCollector::RelocateMapObject(HeapObject* obj) {
2913
+ // Recover map pointer.
2914
+ MapWord encoding = obj->map_word();
2915
+ Address map_addr = encoding.DecodeMapAddress(heap()->map_space());
2916
+ ASSERT(heap()->map_space()->Contains(HeapObject::FromAddress(map_addr)));
2917
+
2918
+ // Get forwarding address before resetting map pointer
2919
+ Address new_addr = GetForwardingAddressInOldSpace(obj);
2920
+
2921
+ // Reset map pointer. The meta map object may not be copied yet so
2922
+ // Map::cast does not yet work.
2923
+ obj->set_map(reinterpret_cast<Map*>(HeapObject::FromAddress(map_addr)));
2924
+
2925
+ Address old_addr = obj->address();
2926
+
2927
+ if (new_addr != old_addr) {
2928
+ // Move contents.
2929
+ heap()->MoveBlockToOldSpaceAndUpdateRegionMarks(new_addr,
2930
+ old_addr,
2931
+ Map::kSize);
2932
+ }
2933
+
2934
+ #ifdef DEBUG
2935
+ if (FLAG_gc_verbose) {
2936
+ PrintF("relocate %p -> %p\n", old_addr, new_addr);
2937
+ }
2938
+ #endif
2939
+
2940
+ return Map::kSize;
2941
+ }
2942
+
2943
+
2944
+ static inline int RestoreMap(HeapObject* obj,
2945
+ PagedSpace* space,
2946
+ Address new_addr,
2947
+ Address map_addr) {
2948
+ // This must be a non-map object, and the function relies on the
2949
+ // assumption that the Map space is compacted before the other paged
2950
+ // spaces (see RelocateObjects).
2951
+
2952
+ // Reset map pointer.
2953
+ obj->set_map(Map::cast(HeapObject::FromAddress(map_addr)));
2954
+
2955
+ int obj_size = obj->Size();
2956
+ ASSERT_OBJECT_SIZE(obj_size);
2957
+
2958
+ ASSERT(space->MCSpaceOffsetForAddress(new_addr) <=
2959
+ space->MCSpaceOffsetForAddress(obj->address()));
2960
+
2961
+ #ifdef DEBUG
2962
+ if (FLAG_gc_verbose) {
2963
+ PrintF("relocate %p -> %p\n", obj->address(), new_addr);
2964
+ }
2965
+ #endif
2966
+
2967
+ return obj_size;
2968
+ }
2969
+
2970
+
2971
+ int MarkCompactCollector::RelocateOldNonCodeObject(HeapObject* obj,
2972
+ PagedSpace* space) {
2973
+ // Recover map pointer.
2974
+ MapWord encoding = obj->map_word();
2975
+ Address map_addr = encoding.DecodeMapAddress(heap()->map_space());
2976
+ ASSERT(heap()->map_space()->Contains(map_addr));
2977
+
2978
+ // Get forwarding address before resetting map pointer.
2979
+ Address new_addr = GetForwardingAddressInOldSpace(obj);
2980
+
2981
+ // Reset the map pointer.
2982
+ int obj_size = RestoreMap(obj, space, new_addr, map_addr);
2983
+
2984
+ Address old_addr = obj->address();
2985
+
2986
+ if (new_addr != old_addr) {
2987
+ // Move contents.
2988
+ if (space == heap()->old_data_space()) {
2989
+ heap()->MoveBlock(new_addr, old_addr, obj_size);
2990
+ } else {
2991
+ heap()->MoveBlockToOldSpaceAndUpdateRegionMarks(new_addr,
2992
+ old_addr,
2993
+ obj_size);
2994
+ }
2995
+ }
2996
+
2997
+ ASSERT(!HeapObject::FromAddress(new_addr)->IsCode());
2998
+
2999
+ HeapObject* copied_to = HeapObject::FromAddress(new_addr);
3000
+ if (copied_to->IsSharedFunctionInfo()) {
3001
+ PROFILE(heap()->isolate(),
3002
+ SharedFunctionInfoMoveEvent(old_addr, new_addr));
3003
+ }
3004
+ HEAP_PROFILE(heap(), ObjectMoveEvent(old_addr, new_addr));
3005
+
3006
+ return obj_size;
3007
+ }
3008
+
3009
+
3010
+ int MarkCompactCollector::RelocateOldPointerObject(HeapObject* obj) {
3011
+ return RelocateOldNonCodeObject(obj, heap()->old_pointer_space());
3012
+ }
3013
+
3014
+
3015
+ int MarkCompactCollector::RelocateOldDataObject(HeapObject* obj) {
3016
+ return RelocateOldNonCodeObject(obj, heap()->old_data_space());
3017
+ }
3018
+
3019
+
3020
+ int MarkCompactCollector::RelocateCellObject(HeapObject* obj) {
3021
+ return RelocateOldNonCodeObject(obj, heap()->cell_space());
3022
+ }
3023
+
3024
+
3025
+ int MarkCompactCollector::RelocateCodeObject(HeapObject* obj) {
3026
+ // Recover map pointer.
3027
+ MapWord encoding = obj->map_word();
3028
+ Address map_addr = encoding.DecodeMapAddress(heap()->map_space());
3029
+ ASSERT(heap()->map_space()->Contains(HeapObject::FromAddress(map_addr)));
3030
+
3031
+ // Get forwarding address before resetting map pointer
3032
+ Address new_addr = GetForwardingAddressInOldSpace(obj);
3033
+
3034
+ // Reset the map pointer.
3035
+ int obj_size = RestoreMap(obj, heap()->code_space(), new_addr, map_addr);
3036
+
3037
+ Address old_addr = obj->address();
3038
+
3039
+ if (new_addr != old_addr) {
3040
+ // Move contents.
3041
+ heap()->MoveBlock(new_addr, old_addr, obj_size);
3042
+ }
3043
+
3044
+ HeapObject* copied_to = HeapObject::FromAddress(new_addr);
3045
+ if (copied_to->IsCode()) {
3046
+ // May also update inline cache target.
3047
+ Code::cast(copied_to)->Relocate(new_addr - old_addr);
3048
+ // Notify the logger that compiled code has moved.
3049
+ PROFILE(heap()->isolate(), CodeMoveEvent(old_addr, new_addr));
3050
+ }
3051
+ HEAP_PROFILE(heap(), ObjectMoveEvent(old_addr, new_addr));
3052
+
3053
+ return obj_size;
3054
+ }
3055
+
3056
+
3057
+ int MarkCompactCollector::RelocateNewObject(HeapObject* obj) {
3058
+ int obj_size = obj->Size();
3059
+
3060
+ // Get forwarding address
3061
+ Address old_addr = obj->address();
3062
+ int offset = heap()->new_space()->ToSpaceOffsetForAddress(old_addr);
3063
+
3064
+ Address new_addr =
3065
+ Memory::Address_at(heap()->new_space()->FromSpaceLow() + offset);
3066
+
3067
+ #ifdef DEBUG
3068
+ if (heap()->new_space()->FromSpaceContains(new_addr)) {
3069
+ ASSERT(heap()->new_space()->FromSpaceOffsetForAddress(new_addr) <=
3070
+ heap()->new_space()->ToSpaceOffsetForAddress(old_addr));
3071
+ } else {
3072
+ ASSERT(heap()->TargetSpace(obj) == heap()->old_pointer_space() ||
3073
+ heap()->TargetSpace(obj) == heap()->old_data_space());
3074
+ }
3075
+ #endif
3076
+
3077
+ // New and old addresses cannot overlap.
3078
+ if (heap()->InNewSpace(HeapObject::FromAddress(new_addr))) {
3079
+ heap()->CopyBlock(new_addr, old_addr, obj_size);
3080
+ } else {
3081
+ heap()->CopyBlockToOldSpaceAndUpdateRegionMarks(new_addr,
3082
+ old_addr,
3083
+ obj_size);
3084
+ }
3085
+
3086
+ #ifdef DEBUG
3087
+ if (FLAG_gc_verbose) {
3088
+ PrintF("relocate %p -> %p\n", old_addr, new_addr);
3089
+ }
3090
+ #endif
3091
+
3092
+ HeapObject* copied_to = HeapObject::FromAddress(new_addr);
3093
+ if (copied_to->IsSharedFunctionInfo()) {
3094
+ PROFILE(heap()->isolate(),
3095
+ SharedFunctionInfoMoveEvent(old_addr, new_addr));
3096
+ }
3097
+ HEAP_PROFILE(heap(), ObjectMoveEvent(old_addr, new_addr));
3098
+
3099
+ return obj_size;
3100
+ }
3101
+
3102
+
3103
+ void MarkCompactCollector::EnableCodeFlushing(bool enable) {
3104
+ if (enable) {
3105
+ if (code_flusher_ != NULL) return;
3106
+ code_flusher_ = new CodeFlusher(heap()->isolate());
3107
+ } else {
3108
+ if (code_flusher_ == NULL) return;
3109
+ delete code_flusher_;
3110
+ code_flusher_ = NULL;
3111
+ }
3112
+ }
3113
+
3114
+
3115
+ void MarkCompactCollector::ReportDeleteIfNeeded(HeapObject* obj,
3116
+ Isolate* isolate) {
3117
+ #ifdef ENABLE_GDB_JIT_INTERFACE
3118
+ if (obj->IsCode()) {
3119
+ GDBJITInterface::RemoveCode(reinterpret_cast<Code*>(obj));
3120
+ }
3121
+ #endif
3122
+ #ifdef ENABLE_LOGGING_AND_PROFILING
3123
+ if (obj->IsCode()) {
3124
+ PROFILE(isolate, CodeDeleteEvent(obj->address()));
3125
+ }
3126
+ #endif
3127
+ }
3128
+
3129
+
3130
+ int MarkCompactCollector::SizeOfMarkedObject(HeapObject* obj) {
3131
+ MapWord map_word = obj->map_word();
3132
+ map_word.ClearMark();
3133
+ return obj->SizeFromMap(map_word.ToMap());
3134
+ }
3135
+
3136
+
3137
+ void MarkCompactCollector::Initialize() {
3138
+ StaticPointersToNewGenUpdatingVisitor::Initialize();
3139
+ StaticMarkingVisitor::Initialize();
3140
+ }
3141
+
3142
+
3143
+ } } // namespace v8::internal