therubyracer 0.9.0beta2 → 0.9.0beta3

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of therubyracer might be problematic. Click here for more details.

Files changed (920) hide show
  1. data/.gitmodules +3 -0
  2. data/ext/v8/upstream/Makefile +1 -2
  3. data/ext/v8/upstream/v8/.gitignore +33 -0
  4. data/ext/v8/upstream/v8/AUTHORS +42 -0
  5. data/ext/v8/upstream/v8/ChangeLog +2663 -0
  6. data/ext/v8/upstream/{3.1.8 → v8}/LICENSE +0 -0
  7. data/ext/v8/upstream/{3.1.8 → v8}/LICENSE.strongtalk +0 -0
  8. data/ext/v8/upstream/{3.1.8 → v8}/LICENSE.v8 +0 -0
  9. data/ext/v8/upstream/{3.1.8 → v8}/LICENSE.valgrind +0 -0
  10. data/ext/v8/upstream/v8/SConstruct +1473 -0
  11. data/ext/v8/upstream/{3.1.8 → v8}/build/README.txt +0 -0
  12. data/ext/v8/upstream/{3.1.8 → v8}/build/all.gyp +0 -0
  13. data/ext/v8/upstream/{3.1.8 → v8}/build/armu.gypi +0 -0
  14. data/ext/v8/upstream/{3.1.8 → v8}/build/common.gypi +0 -0
  15. data/ext/v8/upstream/{3.1.8 → v8}/build/gyp_v8 +0 -0
  16. data/ext/v8/upstream/v8/include/v8-debug.h +394 -0
  17. data/ext/v8/upstream/v8/include/v8-preparser.h +116 -0
  18. data/ext/v8/upstream/v8/include/v8-profiler.h +505 -0
  19. data/ext/v8/upstream/v8/include/v8-testing.h +104 -0
  20. data/ext/v8/upstream/v8/include/v8.h +4000 -0
  21. data/ext/v8/upstream/{3.1.8 → v8}/include/v8stdint.h +0 -0
  22. data/ext/v8/upstream/v8/preparser/SConscript +38 -0
  23. data/ext/v8/upstream/v8/preparser/preparser-process.cc +169 -0
  24. data/ext/v8/upstream/v8/src/SConscript +380 -0
  25. data/ext/v8/upstream/v8/src/accessors.cc +766 -0
  26. data/ext/v8/upstream/{3.1.8 → v8}/src/accessors.h +0 -0
  27. data/ext/v8/upstream/v8/src/allocation-inl.h +49 -0
  28. data/ext/v8/upstream/v8/src/allocation.cc +122 -0
  29. data/ext/v8/upstream/v8/src/allocation.h +143 -0
  30. data/ext/v8/upstream/v8/src/api.cc +5678 -0
  31. data/ext/v8/upstream/v8/src/api.h +572 -0
  32. data/ext/v8/upstream/{3.1.8 → v8}/src/apinatives.js +0 -0
  33. data/ext/v8/upstream/v8/src/apiutils.h +73 -0
  34. data/ext/v8/upstream/v8/src/arguments.h +116 -0
  35. data/ext/v8/upstream/v8/src/arm/assembler-arm-inl.h +353 -0
  36. data/ext/v8/upstream/v8/src/arm/assembler-arm.cc +2877 -0
  37. data/ext/v8/upstream/v8/src/arm/assembler-arm.h +1382 -0
  38. data/ext/v8/upstream/v8/src/arm/builtins-arm.cc +1634 -0
  39. data/ext/v8/upstream/v8/src/arm/code-stubs-arm.cc +6917 -0
  40. data/ext/v8/upstream/v8/src/arm/code-stubs-arm.h +623 -0
  41. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/codegen-arm-inl.h +0 -0
  42. data/ext/v8/upstream/v8/src/arm/codegen-arm.cc +7437 -0
  43. data/ext/v8/upstream/v8/src/arm/codegen-arm.h +595 -0
  44. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/constants-arm.cc +0 -0
  45. data/ext/v8/upstream/v8/src/arm/constants-arm.h +778 -0
  46. data/ext/v8/upstream/v8/src/arm/cpu-arm.cc +149 -0
  47. data/ext/v8/upstream/v8/src/arm/debug-arm.cc +317 -0
  48. data/ext/v8/upstream/v8/src/arm/deoptimizer-arm.cc +737 -0
  49. data/ext/v8/upstream/v8/src/arm/disasm-arm.cc +1503 -0
  50. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/frames-arm.cc +0 -0
  51. data/ext/v8/upstream/v8/src/arm/frames-arm.h +168 -0
  52. data/ext/v8/upstream/v8/src/arm/full-codegen-arm.cc +4374 -0
  53. data/ext/v8/upstream/v8/src/arm/ic-arm.cc +1793 -0
  54. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/jump-target-arm.cc +0 -0
  55. data/ext/v8/upstream/v8/src/arm/lithium-arm.cc +2120 -0
  56. data/ext/v8/upstream/v8/src/arm/lithium-arm.h +2179 -0
  57. data/ext/v8/upstream/v8/src/arm/lithium-codegen-arm.cc +4132 -0
  58. data/ext/v8/upstream/v8/src/arm/lithium-codegen-arm.h +329 -0
  59. data/ext/v8/upstream/v8/src/arm/lithium-gap-resolver-arm.cc +305 -0
  60. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/lithium-gap-resolver-arm.h +0 -0
  61. data/ext/v8/upstream/v8/src/arm/macro-assembler-arm.cc +2939 -0
  62. data/ext/v8/upstream/v8/src/arm/macro-assembler-arm.h +1071 -0
  63. data/ext/v8/upstream/v8/src/arm/regexp-macro-assembler-arm.cc +1287 -0
  64. data/ext/v8/upstream/v8/src/arm/regexp-macro-assembler-arm.h +253 -0
  65. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/register-allocator-arm-inl.h +0 -0
  66. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/register-allocator-arm.cc +0 -0
  67. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/register-allocator-arm.h +0 -0
  68. data/ext/v8/upstream/v8/src/arm/simulator-arm.cc +3288 -0
  69. data/ext/v8/upstream/v8/src/arm/simulator-arm.h +413 -0
  70. data/ext/v8/upstream/v8/src/arm/stub-cache-arm.cc +4034 -0
  71. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/virtual-frame-arm-inl.h +0 -0
  72. data/ext/v8/upstream/v8/src/arm/virtual-frame-arm.cc +843 -0
  73. data/ext/v8/upstream/v8/src/arm/virtual-frame-arm.h +523 -0
  74. data/ext/v8/upstream/v8/src/array.js +1249 -0
  75. data/ext/v8/upstream/v8/src/assembler.cc +1067 -0
  76. data/ext/v8/upstream/v8/src/assembler.h +823 -0
  77. data/ext/v8/upstream/v8/src/ast-inl.h +112 -0
  78. data/ext/v8/upstream/v8/src/ast.cc +1078 -0
  79. data/ext/v8/upstream/v8/src/ast.h +2234 -0
  80. data/ext/v8/upstream/v8/src/atomicops.h +167 -0
  81. data/ext/v8/upstream/{3.1.8 → v8}/src/atomicops_internals_arm_gcc.h +0 -0
  82. data/ext/v8/upstream/v8/src/atomicops_internals_mips_gcc.h +169 -0
  83. data/ext/v8/upstream/{3.1.8 → v8}/src/atomicops_internals_x86_gcc.cc +0 -0
  84. data/ext/v8/upstream/{3.1.8 → v8}/src/atomicops_internals_x86_gcc.h +0 -0
  85. data/ext/v8/upstream/{3.1.8 → v8}/src/atomicops_internals_x86_macosx.h +0 -0
  86. data/ext/v8/upstream/{3.1.8 → v8}/src/atomicops_internals_x86_msvc.h +0 -0
  87. data/ext/v8/upstream/{3.1.8 → v8}/src/bignum-dtoa.cc +0 -0
  88. data/ext/v8/upstream/{3.1.8 → v8}/src/bignum-dtoa.h +0 -0
  89. data/ext/v8/upstream/{3.1.8 → v8}/src/bignum.cc +0 -0
  90. data/ext/v8/upstream/{3.1.8 → v8}/src/bignum.h +0 -0
  91. data/ext/v8/upstream/v8/src/bootstrapper.cc +2138 -0
  92. data/ext/v8/upstream/v8/src/bootstrapper.h +185 -0
  93. data/ext/v8/upstream/v8/src/builtins.cc +1708 -0
  94. data/ext/v8/upstream/v8/src/builtins.h +368 -0
  95. data/ext/v8/upstream/{3.1.8 → v8}/src/bytecodes-irregexp.h +0 -0
  96. data/ext/v8/upstream/{3.1.8 → v8}/src/cached-powers.cc +0 -0
  97. data/ext/v8/upstream/{3.1.8 → v8}/src/cached-powers.h +0 -0
  98. data/ext/v8/upstream/{3.1.8 → v8}/src/char-predicates-inl.h +0 -0
  99. data/ext/v8/upstream/{3.1.8 → v8}/src/char-predicates.h +0 -0
  100. data/ext/v8/upstream/v8/src/checks.cc +110 -0
  101. data/ext/v8/upstream/v8/src/checks.h +296 -0
  102. data/ext/v8/upstream/{3.1.8 → v8}/src/circular-queue-inl.h +0 -0
  103. data/ext/v8/upstream/{3.1.8 → v8}/src/circular-queue.cc +0 -0
  104. data/ext/v8/upstream/{3.1.8 → v8}/src/circular-queue.h +0 -0
  105. data/ext/v8/upstream/v8/src/code-stubs.cc +240 -0
  106. data/ext/v8/upstream/v8/src/code-stubs.h +971 -0
  107. data/ext/v8/upstream/{3.1.8 → v8}/src/code.h +0 -0
  108. data/ext/v8/upstream/v8/src/codegen-inl.h +68 -0
  109. data/ext/v8/upstream/v8/src/codegen.cc +505 -0
  110. data/ext/v8/upstream/v8/src/codegen.h +245 -0
  111. data/ext/v8/upstream/v8/src/compilation-cache.cc +540 -0
  112. data/ext/v8/upstream/v8/src/compilation-cache.h +287 -0
  113. data/ext/v8/upstream/v8/src/compiler.cc +792 -0
  114. data/ext/v8/upstream/v8/src/compiler.h +307 -0
  115. data/ext/v8/upstream/v8/src/contexts.cc +327 -0
  116. data/ext/v8/upstream/v8/src/contexts.h +382 -0
  117. data/ext/v8/upstream/{3.1.8 → v8}/src/conversions-inl.h +0 -0
  118. data/ext/v8/upstream/v8/src/conversions.cc +1125 -0
  119. data/ext/v8/upstream/{3.1.8 → v8}/src/conversions.h +0 -0
  120. data/ext/v8/upstream/v8/src/counters.cc +93 -0
  121. data/ext/v8/upstream/v8/src/counters.h +254 -0
  122. data/ext/v8/upstream/v8/src/cpu-profiler-inl.h +101 -0
  123. data/ext/v8/upstream/v8/src/cpu-profiler.cc +606 -0
  124. data/ext/v8/upstream/v8/src/cpu-profiler.h +305 -0
  125. data/ext/v8/upstream/v8/src/cpu.h +67 -0
  126. data/ext/v8/upstream/v8/src/d8-debug.cc +367 -0
  127. data/ext/v8/upstream/v8/src/d8-debug.h +158 -0
  128. data/ext/v8/upstream/v8/src/d8-posix.cc +695 -0
  129. data/ext/v8/upstream/{3.1.8 → v8}/src/d8-readline.cc +0 -0
  130. data/ext/v8/upstream/{3.1.8 → v8}/src/d8-windows.cc +0 -0
  131. data/ext/v8/upstream/v8/src/d8.cc +796 -0
  132. data/ext/v8/upstream/v8/src/d8.gyp +88 -0
  133. data/ext/v8/upstream/{3.1.8 → v8}/src/d8.h +0 -0
  134. data/ext/v8/upstream/{3.1.8 → v8}/src/d8.js +0 -0
  135. data/ext/v8/upstream/{3.1.8 → v8}/src/data-flow.cc +0 -0
  136. data/ext/v8/upstream/v8/src/data-flow.h +379 -0
  137. data/ext/v8/upstream/{3.1.8 → v8}/src/date.js +0 -0
  138. data/ext/v8/upstream/{3.1.8 → v8}/src/dateparser-inl.h +0 -0
  139. data/ext/v8/upstream/{3.1.8 → v8}/src/dateparser.cc +0 -0
  140. data/ext/v8/upstream/v8/src/dateparser.h +265 -0
  141. data/ext/v8/upstream/v8/src/debug-agent.cc +447 -0
  142. data/ext/v8/upstream/v8/src/debug-agent.h +129 -0
  143. data/ext/v8/upstream/{3.1.8 → v8}/src/debug-debugger.js +0 -0
  144. data/ext/v8/upstream/v8/src/debug.cc +3188 -0
  145. data/ext/v8/upstream/v8/src/debug.h +1055 -0
  146. data/ext/v8/upstream/v8/src/deoptimizer.cc +1296 -0
  147. data/ext/v8/upstream/v8/src/deoptimizer.h +629 -0
  148. data/ext/v8/upstream/v8/src/disasm.h +80 -0
  149. data/ext/v8/upstream/v8/src/disassembler.cc +339 -0
  150. data/ext/v8/upstream/{3.1.8 → v8}/src/disassembler.h +0 -0
  151. data/ext/v8/upstream/{3.1.8 → v8}/src/diy-fp.cc +0 -0
  152. data/ext/v8/upstream/{3.1.8 → v8}/src/diy-fp.h +0 -0
  153. data/ext/v8/upstream/{3.1.8 → v8}/src/double.h +0 -0
  154. data/ext/v8/upstream/{3.1.8 → v8}/src/dtoa.cc +0 -0
  155. data/ext/v8/upstream/{3.1.8 → v8}/src/dtoa.h +0 -0
  156. data/ext/v8/upstream/v8/src/execution.cc +791 -0
  157. data/ext/v8/upstream/v8/src/execution.h +291 -0
  158. data/ext/v8/upstream/v8/src/extensions/experimental/break-iterator.cc +250 -0
  159. data/ext/v8/upstream/v8/src/extensions/experimental/break-iterator.h +89 -0
  160. data/ext/v8/upstream/v8/src/extensions/experimental/experimental.gyp +55 -0
  161. data/ext/v8/upstream/v8/src/extensions/experimental/i18n-extension.cc +284 -0
  162. data/ext/v8/upstream/{3.1.8 → v8}/src/extensions/experimental/i18n-extension.h +0 -0
  163. data/ext/v8/upstream/v8/src/extensions/externalize-string-extension.cc +141 -0
  164. data/ext/v8/upstream/{3.1.8 → v8}/src/extensions/externalize-string-extension.h +0 -0
  165. data/ext/v8/upstream/v8/src/extensions/gc-extension.cc +58 -0
  166. data/ext/v8/upstream/{3.1.8 → v8}/src/extensions/gc-extension.h +0 -0
  167. data/ext/v8/upstream/v8/src/factory.cc +1194 -0
  168. data/ext/v8/upstream/v8/src/factory.h +436 -0
  169. data/ext/v8/upstream/{3.1.8 → v8}/src/fast-dtoa.cc +0 -0
  170. data/ext/v8/upstream/{3.1.8 → v8}/src/fast-dtoa.h +0 -0
  171. data/ext/v8/upstream/{3.1.8 → v8}/src/fixed-dtoa.cc +0 -0
  172. data/ext/v8/upstream/{3.1.8 → v8}/src/fixed-dtoa.h +0 -0
  173. data/ext/v8/upstream/v8/src/flag-definitions.h +556 -0
  174. data/ext/v8/upstream/{3.1.8 → v8}/src/flags.cc +0 -0
  175. data/ext/v8/upstream/{3.1.8 → v8}/src/flags.h +0 -0
  176. data/ext/v8/upstream/v8/src/frame-element.cc +37 -0
  177. data/ext/v8/upstream/v8/src/frame-element.h +269 -0
  178. data/ext/v8/upstream/v8/src/frames-inl.h +236 -0
  179. data/ext/v8/upstream/v8/src/frames.cc +1273 -0
  180. data/ext/v8/upstream/v8/src/frames.h +854 -0
  181. data/ext/v8/upstream/v8/src/full-codegen.cc +1385 -0
  182. data/ext/v8/upstream/v8/src/full-codegen.h +753 -0
  183. data/ext/v8/upstream/v8/src/func-name-inferrer.cc +91 -0
  184. data/ext/v8/upstream/v8/src/func-name-inferrer.h +111 -0
  185. data/ext/v8/upstream/v8/src/gdb-jit.cc +1548 -0
  186. data/ext/v8/upstream/{3.1.8 → v8}/src/gdb-jit.h +0 -0
  187. data/ext/v8/upstream/v8/src/global-handles.cc +596 -0
  188. data/ext/v8/upstream/v8/src/global-handles.h +239 -0
  189. data/ext/v8/upstream/v8/src/globals.h +325 -0
  190. data/ext/v8/upstream/v8/src/handles-inl.h +177 -0
  191. data/ext/v8/upstream/v8/src/handles.cc +965 -0
  192. data/ext/v8/upstream/v8/src/handles.h +372 -0
  193. data/ext/v8/upstream/{3.1.8 → v8}/src/hashmap.cc +0 -0
  194. data/ext/v8/upstream/v8/src/hashmap.h +121 -0
  195. data/ext/v8/upstream/v8/src/heap-inl.h +703 -0
  196. data/ext/v8/upstream/v8/src/heap-profiler.cc +1173 -0
  197. data/ext/v8/upstream/v8/src/heap-profiler.h +396 -0
  198. data/ext/v8/upstream/v8/src/heap.cc +5856 -0
  199. data/ext/v8/upstream/v8/src/heap.h +2264 -0
  200. data/ext/v8/upstream/v8/src/hydrogen-instructions.cc +1639 -0
  201. data/ext/v8/upstream/v8/src/hydrogen-instructions.h +3657 -0
  202. data/ext/v8/upstream/v8/src/hydrogen.cc +6011 -0
  203. data/ext/v8/upstream/v8/src/hydrogen.h +1137 -0
  204. data/ext/v8/upstream/v8/src/ia32/assembler-ia32-inl.h +430 -0
  205. data/ext/v8/upstream/v8/src/ia32/assembler-ia32.cc +2846 -0
  206. data/ext/v8/upstream/v8/src/ia32/assembler-ia32.h +1159 -0
  207. data/ext/v8/upstream/v8/src/ia32/builtins-ia32.cc +1596 -0
  208. data/ext/v8/upstream/v8/src/ia32/code-stubs-ia32.cc +6549 -0
  209. data/ext/v8/upstream/v8/src/ia32/code-stubs-ia32.h +495 -0
  210. data/ext/v8/upstream/{3.1.8 → v8}/src/ia32/codegen-ia32-inl.h +0 -0
  211. data/ext/v8/upstream/v8/src/ia32/codegen-ia32.cc +10385 -0
  212. data/ext/v8/upstream/v8/src/ia32/codegen-ia32.h +801 -0
  213. data/ext/v8/upstream/v8/src/ia32/cpu-ia32.cc +88 -0
  214. data/ext/v8/upstream/v8/src/ia32/debug-ia32.cc +312 -0
  215. data/ext/v8/upstream/v8/src/ia32/deoptimizer-ia32.cc +774 -0
  216. data/ext/v8/upstream/v8/src/ia32/disasm-ia32.cc +1620 -0
  217. data/ext/v8/upstream/{3.1.8 → v8}/src/ia32/frames-ia32.cc +0 -0
  218. data/ext/v8/upstream/v8/src/ia32/frames-ia32.h +140 -0
  219. data/ext/v8/upstream/v8/src/ia32/full-codegen-ia32.cc +4357 -0
  220. data/ext/v8/upstream/v8/src/ia32/ic-ia32.cc +1779 -0
  221. data/ext/v8/upstream/{3.1.8 → v8}/src/ia32/jump-target-ia32.cc +0 -0
  222. data/ext/v8/upstream/v8/src/ia32/lithium-codegen-ia32.cc +4158 -0
  223. data/ext/v8/upstream/v8/src/ia32/lithium-codegen-ia32.h +318 -0
  224. data/ext/v8/upstream/v8/src/ia32/lithium-gap-resolver-ia32.cc +466 -0
  225. data/ext/v8/upstream/{3.1.8 → v8}/src/ia32/lithium-gap-resolver-ia32.h +0 -0
  226. data/ext/v8/upstream/v8/src/ia32/lithium-ia32.cc +2181 -0
  227. data/ext/v8/upstream/v8/src/ia32/lithium-ia32.h +2235 -0
  228. data/ext/v8/upstream/v8/src/ia32/macro-assembler-ia32.cc +2056 -0
  229. data/ext/v8/upstream/v8/src/ia32/macro-assembler-ia32.h +807 -0
  230. data/ext/v8/upstream/v8/src/ia32/regexp-macro-assembler-ia32.cc +1264 -0
  231. data/ext/v8/upstream/v8/src/ia32/regexp-macro-assembler-ia32.h +216 -0
  232. data/ext/v8/upstream/{3.1.8 → v8}/src/ia32/register-allocator-ia32-inl.h +0 -0
  233. data/ext/v8/upstream/v8/src/ia32/register-allocator-ia32.cc +157 -0
  234. data/ext/v8/upstream/{3.1.8 → v8}/src/ia32/register-allocator-ia32.h +0 -0
  235. data/ext/v8/upstream/{3.1.8 → v8}/src/ia32/simulator-ia32.cc +0 -0
  236. data/ext/v8/upstream/v8/src/ia32/simulator-ia32.h +72 -0
  237. data/ext/v8/upstream/v8/src/ia32/stub-cache-ia32.cc +3711 -0
  238. data/ext/v8/upstream/v8/src/ia32/virtual-frame-ia32.cc +1366 -0
  239. data/ext/v8/upstream/v8/src/ia32/virtual-frame-ia32.h +650 -0
  240. data/ext/v8/upstream/v8/src/ic-inl.h +130 -0
  241. data/ext/v8/upstream/v8/src/ic.cc +2389 -0
  242. data/ext/v8/upstream/v8/src/ic.h +675 -0
  243. data/ext/v8/upstream/{3.1.8 → v8}/src/inspector.cc +0 -0
  244. data/ext/v8/upstream/{3.1.8 → v8}/src/inspector.h +0 -0
  245. data/ext/v8/upstream/v8/src/interpreter-irregexp.cc +659 -0
  246. data/ext/v8/upstream/v8/src/interpreter-irregexp.h +49 -0
  247. data/ext/v8/upstream/v8/src/isolate.cc +883 -0
  248. data/ext/v8/upstream/v8/src/isolate.h +1306 -0
  249. data/ext/v8/upstream/v8/src/json.js +342 -0
  250. data/ext/v8/upstream/v8/src/jsregexp.cc +5371 -0
  251. data/ext/v8/upstream/v8/src/jsregexp.h +1483 -0
  252. data/ext/v8/upstream/{3.1.8 → v8}/src/jump-target-heavy-inl.h +0 -0
  253. data/ext/v8/upstream/v8/src/jump-target-heavy.cc +427 -0
  254. data/ext/v8/upstream/v8/src/jump-target-heavy.h +238 -0
  255. data/ext/v8/upstream/v8/src/jump-target-inl.h +48 -0
  256. data/ext/v8/upstream/{3.1.8 → v8}/src/jump-target-light-inl.h +0 -0
  257. data/ext/v8/upstream/v8/src/jump-target-light.cc +111 -0
  258. data/ext/v8/upstream/{3.1.8 → v8}/src/jump-target-light.h +0 -0
  259. data/ext/v8/upstream/{3.1.8 → v8}/src/jump-target.cc +0 -0
  260. data/ext/v8/upstream/{3.1.8 → v8}/src/jump-target.h +0 -0
  261. data/ext/v8/upstream/{3.1.8 → v8}/src/list-inl.h +0 -0
  262. data/ext/v8/upstream/{3.1.8 → v8}/src/list.h +0 -0
  263. data/ext/v8/upstream/v8/src/lithium-allocator-inl.h +142 -0
  264. data/ext/v8/upstream/v8/src/lithium-allocator.cc +2105 -0
  265. data/ext/v8/upstream/v8/src/lithium-allocator.h +630 -0
  266. data/ext/v8/upstream/v8/src/lithium.cc +169 -0
  267. data/ext/v8/upstream/{3.1.8 → v8}/src/lithium.h +0 -0
  268. data/ext/v8/upstream/{3.1.8 → v8}/src/liveedit-debugger.js +0 -0
  269. data/ext/v8/upstream/v8/src/liveedit.cc +1693 -0
  270. data/ext/v8/upstream/v8/src/liveedit.h +179 -0
  271. data/ext/v8/upstream/{3.1.8 → v8}/src/liveobjectlist-inl.h +0 -0
  272. data/ext/v8/upstream/v8/src/liveobjectlist.cc +2589 -0
  273. data/ext/v8/upstream/v8/src/liveobjectlist.h +322 -0
  274. data/ext/v8/upstream/{3.1.8 → v8}/src/log-inl.h +0 -0
  275. data/ext/v8/upstream/v8/src/log-utils.cc +423 -0
  276. data/ext/v8/upstream/v8/src/log-utils.h +229 -0
  277. data/ext/v8/upstream/v8/src/log.cc +1666 -0
  278. data/ext/v8/upstream/v8/src/log.h +446 -0
  279. data/ext/v8/upstream/{3.1.8 → v8}/src/macro-assembler.h +0 -0
  280. data/ext/v8/upstream/{3.1.8 → v8}/src/macros.py +0 -0
  281. data/ext/v8/upstream/v8/src/mark-compact.cc +3092 -0
  282. data/ext/v8/upstream/v8/src/mark-compact.h +506 -0
  283. data/ext/v8/upstream/{3.1.8 → v8}/src/math.js +0 -0
  284. data/ext/v8/upstream/v8/src/messages.cc +166 -0
  285. data/ext/v8/upstream/{3.1.8 → v8}/src/messages.h +0 -0
  286. data/ext/v8/upstream/v8/src/messages.js +1090 -0
  287. data/ext/v8/upstream/v8/src/mips/assembler-mips-inl.h +335 -0
  288. data/ext/v8/upstream/v8/src/mips/assembler-mips.cc +2093 -0
  289. data/ext/v8/upstream/v8/src/mips/assembler-mips.h +1066 -0
  290. data/ext/v8/upstream/v8/src/mips/builtins-mips.cc +148 -0
  291. data/ext/v8/upstream/v8/src/mips/code-stubs-mips.cc +752 -0
  292. data/ext/v8/upstream/v8/src/mips/code-stubs-mips.h +511 -0
  293. data/ext/v8/upstream/v8/src/mips/codegen-mips-inl.h +64 -0
  294. data/ext/v8/upstream/v8/src/mips/codegen-mips.cc +1213 -0
  295. data/ext/v8/upstream/v8/src/mips/codegen-mips.h +633 -0
  296. data/ext/v8/upstream/v8/src/mips/constants-mips.cc +352 -0
  297. data/ext/v8/upstream/v8/src/mips/constants-mips.h +723 -0
  298. data/ext/v8/upstream/v8/src/mips/cpu-mips.cc +90 -0
  299. data/ext/v8/upstream/v8/src/mips/debug-mips.cc +155 -0
  300. data/ext/v8/upstream/v8/src/mips/deoptimizer-mips.cc +91 -0
  301. data/ext/v8/upstream/v8/src/mips/disasm-mips.cc +1023 -0
  302. data/ext/v8/upstream/v8/src/mips/frames-mips.cc +48 -0
  303. data/ext/v8/upstream/v8/src/mips/frames-mips.h +179 -0
  304. data/ext/v8/upstream/v8/src/mips/full-codegen-mips.cc +727 -0
  305. data/ext/v8/upstream/v8/src/mips/ic-mips.cc +244 -0
  306. data/ext/v8/upstream/v8/src/mips/jump-target-mips.cc +80 -0
  307. data/ext/v8/upstream/v8/src/mips/lithium-codegen-mips.h +65 -0
  308. data/ext/v8/upstream/v8/src/mips/lithium-mips.h +304 -0
  309. data/ext/v8/upstream/v8/src/mips/macro-assembler-mips.cc +3327 -0
  310. data/ext/v8/upstream/v8/src/mips/macro-assembler-mips.h +1058 -0
  311. data/ext/v8/upstream/v8/src/mips/regexp-macro-assembler-mips.cc +478 -0
  312. data/ext/v8/upstream/v8/src/mips/regexp-macro-assembler-mips.h +250 -0
  313. data/ext/v8/upstream/v8/src/mips/register-allocator-mips-inl.h +134 -0
  314. data/ext/v8/upstream/{3.1.8 → v8}/src/mips/register-allocator-mips.cc +0 -0
  315. data/ext/v8/upstream/v8/src/mips/register-allocator-mips.h +47 -0
  316. data/ext/v8/upstream/v8/src/mips/simulator-mips.cc +2438 -0
  317. data/ext/v8/upstream/v8/src/mips/simulator-mips.h +394 -0
  318. data/ext/v8/upstream/v8/src/mips/stub-cache-mips.cc +601 -0
  319. data/ext/v8/upstream/v8/src/mips/virtual-frame-mips-inl.h +58 -0
  320. data/ext/v8/upstream/v8/src/mips/virtual-frame-mips.cc +307 -0
  321. data/ext/v8/upstream/v8/src/mips/virtual-frame-mips.h +530 -0
  322. data/ext/v8/upstream/v8/src/mirror-debugger.js +2381 -0
  323. data/ext/v8/upstream/v8/src/mksnapshot.cc +256 -0
  324. data/ext/v8/upstream/{3.1.8 → v8}/src/natives.h +0 -0
  325. data/ext/v8/upstream/v8/src/objects-debug.cc +722 -0
  326. data/ext/v8/upstream/v8/src/objects-inl.h +4166 -0
  327. data/ext/v8/upstream/v8/src/objects-printer.cc +801 -0
  328. data/ext/v8/upstream/v8/src/objects-visiting.cc +142 -0
  329. data/ext/v8/upstream/v8/src/objects-visiting.h +422 -0
  330. data/ext/v8/upstream/v8/src/objects.cc +10296 -0
  331. data/ext/v8/upstream/v8/src/objects.h +6662 -0
  332. data/ext/v8/upstream/v8/src/parser.cc +5168 -0
  333. data/ext/v8/upstream/v8/src/parser.h +823 -0
  334. data/ext/v8/upstream/v8/src/platform-cygwin.cc +811 -0
  335. data/ext/v8/upstream/v8/src/platform-freebsd.cc +854 -0
  336. data/ext/v8/upstream/v8/src/platform-linux.cc +1120 -0
  337. data/ext/v8/upstream/v8/src/platform-macos.cc +865 -0
  338. data/ext/v8/upstream/v8/src/platform-nullos.cc +504 -0
  339. data/ext/v8/upstream/v8/src/platform-openbsd.cc +672 -0
  340. data/ext/v8/upstream/v8/src/platform-posix.cc +424 -0
  341. data/ext/v8/upstream/v8/src/platform-solaris.cc +796 -0
  342. data/ext/v8/upstream/v8/src/platform-tls-mac.h +62 -0
  343. data/ext/v8/upstream/v8/src/platform-tls-win32.h +62 -0
  344. data/ext/v8/upstream/v8/src/platform-tls.h +50 -0
  345. data/ext/v8/upstream/v8/src/platform-win32.cc +2072 -0
  346. data/ext/v8/upstream/v8/src/platform.h +693 -0
  347. data/ext/v8/upstream/v8/src/preparse-data.cc +185 -0
  348. data/ext/v8/upstream/{3.1.8 → v8}/src/preparse-data.h +0 -0
  349. data/ext/v8/upstream/v8/src/preparser-api.cc +219 -0
  350. data/ext/v8/upstream/v8/src/preparser.cc +1205 -0
  351. data/ext/v8/upstream/{3.1.8 → v8}/src/preparser.h +0 -0
  352. data/ext/v8/upstream/v8/src/prettyprinter.cc +1530 -0
  353. data/ext/v8/upstream/v8/src/prettyprinter.h +223 -0
  354. data/ext/v8/upstream/{3.1.8 → v8}/src/profile-generator-inl.h +0 -0
  355. data/ext/v8/upstream/v8/src/profile-generator.cc +3095 -0
  356. data/ext/v8/upstream/v8/src/profile-generator.h +1125 -0
  357. data/ext/v8/upstream/v8/src/property.cc +102 -0
  358. data/ext/v8/upstream/v8/src/property.h +348 -0
  359. data/ext/v8/upstream/{3.1.8 → v8}/src/regexp-macro-assembler-irregexp-inl.h +0 -0
  360. data/ext/v8/upstream/v8/src/regexp-macro-assembler-irregexp.cc +470 -0
  361. data/ext/v8/upstream/{3.1.8 → v8}/src/regexp-macro-assembler-irregexp.h +0 -0
  362. data/ext/v8/upstream/{3.1.8 → v8}/src/regexp-macro-assembler-tracer.cc +0 -0
  363. data/ext/v8/upstream/{3.1.8 → v8}/src/regexp-macro-assembler-tracer.h +0 -0
  364. data/ext/v8/upstream/v8/src/regexp-macro-assembler.cc +266 -0
  365. data/ext/v8/upstream/v8/src/regexp-macro-assembler.h +236 -0
  366. data/ext/v8/upstream/v8/src/regexp-stack.cc +111 -0
  367. data/ext/v8/upstream/v8/src/regexp-stack.h +147 -0
  368. data/ext/v8/upstream/v8/src/regexp.js +483 -0
  369. data/ext/v8/upstream/v8/src/register-allocator-inl.h +141 -0
  370. data/ext/v8/upstream/v8/src/register-allocator.cc +98 -0
  371. data/ext/v8/upstream/v8/src/register-allocator.h +310 -0
  372. data/ext/v8/upstream/v8/src/rewriter.cc +1024 -0
  373. data/ext/v8/upstream/{3.1.8 → v8}/src/rewriter.h +0 -0
  374. data/ext/v8/upstream/v8/src/runtime-profiler.cc +478 -0
  375. data/ext/v8/upstream/v8/src/runtime-profiler.h +192 -0
  376. data/ext/v8/upstream/v8/src/runtime.cc +11949 -0
  377. data/ext/v8/upstream/v8/src/runtime.h +643 -0
  378. data/ext/v8/upstream/{3.1.8 → v8}/src/runtime.js +0 -0
  379. data/ext/v8/upstream/v8/src/safepoint-table.cc +256 -0
  380. data/ext/v8/upstream/v8/src/safepoint-table.h +269 -0
  381. data/ext/v8/upstream/v8/src/scanner-base.cc +964 -0
  382. data/ext/v8/upstream/v8/src/scanner-base.h +664 -0
  383. data/ext/v8/upstream/v8/src/scanner.cc +584 -0
  384. data/ext/v8/upstream/v8/src/scanner.h +196 -0
  385. data/ext/v8/upstream/v8/src/scopeinfo.cc +631 -0
  386. data/ext/v8/upstream/v8/src/scopeinfo.h +249 -0
  387. data/ext/v8/upstream/v8/src/scopes.cc +1093 -0
  388. data/ext/v8/upstream/v8/src/scopes.h +508 -0
  389. data/ext/v8/upstream/v8/src/serialize.cc +1574 -0
  390. data/ext/v8/upstream/v8/src/serialize.h +589 -0
  391. data/ext/v8/upstream/{3.1.8 → v8}/src/shell.h +0 -0
  392. data/ext/v8/upstream/{3.1.8 → v8}/src/simulator.h +0 -0
  393. data/ext/v8/upstream/v8/src/small-pointer-list.h +163 -0
  394. data/ext/v8/upstream/{3.1.8 → v8}/src/smart-pointer.h +0 -0
  395. data/ext/v8/upstream/v8/src/snapshot-common.cc +82 -0
  396. data/ext/v8/upstream/{3.1.8 → v8}/src/snapshot-empty.cc +0 -0
  397. data/ext/v8/upstream/v8/src/snapshot.h +73 -0
  398. data/ext/v8/upstream/v8/src/spaces-inl.h +529 -0
  399. data/ext/v8/upstream/v8/src/spaces.cc +3147 -0
  400. data/ext/v8/upstream/v8/src/spaces.h +2368 -0
  401. data/ext/v8/upstream/{3.1.8 → v8}/src/splay-tree-inl.h +0 -0
  402. data/ext/v8/upstream/{3.1.8 → v8}/src/splay-tree.h +0 -0
  403. data/ext/v8/upstream/v8/src/string-search.cc +41 -0
  404. data/ext/v8/upstream/v8/src/string-search.h +568 -0
  405. data/ext/v8/upstream/v8/src/string-stream.cc +592 -0
  406. data/ext/v8/upstream/{3.1.8 → v8}/src/string-stream.h +0 -0
  407. data/ext/v8/upstream/v8/src/string.js +915 -0
  408. data/ext/v8/upstream/{3.1.8 → v8}/src/strtod.cc +0 -0
  409. data/ext/v8/upstream/{3.1.8 → v8}/src/strtod.h +0 -0
  410. data/ext/v8/upstream/v8/src/stub-cache.cc +1940 -0
  411. data/ext/v8/upstream/v8/src/stub-cache.h +866 -0
  412. data/ext/v8/upstream/{3.1.8 → v8}/src/third_party/valgrind/valgrind.h +0 -0
  413. data/ext/v8/upstream/v8/src/token.cc +63 -0
  414. data/ext/v8/upstream/v8/src/token.h +288 -0
  415. data/ext/v8/upstream/v8/src/top.cc +983 -0
  416. data/ext/v8/upstream/v8/src/type-info.cc +472 -0
  417. data/ext/v8/upstream/v8/src/type-info.h +290 -0
  418. data/ext/v8/upstream/{3.1.8 → v8}/src/unbound-queue-inl.h +0 -0
  419. data/ext/v8/upstream/{3.1.8 → v8}/src/unbound-queue.h +0 -0
  420. data/ext/v8/upstream/{3.1.8 → v8}/src/unicode-inl.h +0 -0
  421. data/ext/v8/upstream/v8/src/unicode.cc +1624 -0
  422. data/ext/v8/upstream/v8/src/unicode.h +280 -0
  423. data/ext/v8/upstream/{3.1.8 → v8}/src/uri.js +0 -0
  424. data/ext/v8/upstream/{3.1.8 → v8}/src/utils.cc +0 -0
  425. data/ext/v8/upstream/v8/src/utils.h +796 -0
  426. data/ext/v8/upstream/v8/src/v8-counters.cc +62 -0
  427. data/ext/v8/upstream/v8/src/v8-counters.h +311 -0
  428. data/ext/v8/upstream/v8/src/v8.cc +215 -0
  429. data/ext/v8/upstream/v8/src/v8.h +130 -0
  430. data/ext/v8/upstream/{3.1.8 → v8}/src/v8checks.h +0 -0
  431. data/ext/v8/upstream/{3.1.8 → v8}/src/v8dll-main.cc +0 -0
  432. data/ext/v8/upstream/v8/src/v8globals.h +486 -0
  433. data/ext/v8/upstream/{3.1.8/src/memory.h → v8/src/v8memory.h} +0 -0
  434. data/ext/v8/upstream/v8/src/v8natives.js +1293 -0
  435. data/ext/v8/upstream/{3.1.8 → v8}/src/v8preparserdll-main.cc +0 -0
  436. data/ext/v8/upstream/v8/src/v8threads.cc +453 -0
  437. data/ext/v8/upstream/v8/src/v8threads.h +164 -0
  438. data/ext/v8/upstream/v8/src/v8utils.h +317 -0
  439. data/ext/v8/upstream/{3.1.8 → v8}/src/variables.cc +0 -0
  440. data/ext/v8/upstream/v8/src/variables.h +212 -0
  441. data/ext/v8/upstream/v8/src/version.cc +116 -0
  442. data/ext/v8/upstream/v8/src/version.h +68 -0
  443. data/ext/v8/upstream/{3.1.8 → v8}/src/virtual-frame-heavy-inl.h +0 -0
  444. data/ext/v8/upstream/{3.1.8 → v8}/src/virtual-frame-heavy.cc +0 -0
  445. data/ext/v8/upstream/{3.1.8 → v8}/src/virtual-frame-inl.h +0 -0
  446. data/ext/v8/upstream/v8/src/virtual-frame-light-inl.h +171 -0
  447. data/ext/v8/upstream/{3.1.8 → v8}/src/virtual-frame-light.cc +0 -0
  448. data/ext/v8/upstream/{3.1.8 → v8}/src/virtual-frame.cc +0 -0
  449. data/ext/v8/upstream/{3.1.8 → v8}/src/virtual-frame.h +0 -0
  450. data/ext/v8/upstream/v8/src/vm-state-inl.h +138 -0
  451. data/ext/v8/upstream/v8/src/vm-state.h +70 -0
  452. data/ext/v8/upstream/v8/src/win32-headers.h +96 -0
  453. data/ext/v8/upstream/v8/src/x64/assembler-x64-inl.h +456 -0
  454. data/ext/v8/upstream/v8/src/x64/assembler-x64.cc +2954 -0
  455. data/ext/v8/upstream/v8/src/x64/assembler-x64.h +1630 -0
  456. data/ext/v8/upstream/v8/src/x64/builtins-x64.cc +1493 -0
  457. data/ext/v8/upstream/v8/src/x64/code-stubs-x64.cc +5132 -0
  458. data/ext/v8/upstream/v8/src/x64/code-stubs-x64.h +477 -0
  459. data/ext/v8/upstream/{3.1.8 → v8}/src/x64/codegen-x64-inl.h +0 -0
  460. data/ext/v8/upstream/v8/src/x64/codegen-x64.cc +8843 -0
  461. data/ext/v8/upstream/v8/src/x64/codegen-x64.h +753 -0
  462. data/ext/v8/upstream/v8/src/x64/cpu-x64.cc +88 -0
  463. data/ext/v8/upstream/v8/src/x64/debug-x64.cc +318 -0
  464. data/ext/v8/upstream/v8/src/x64/deoptimizer-x64.cc +815 -0
  465. data/ext/v8/upstream/v8/src/x64/disasm-x64.cc +1752 -0
  466. data/ext/v8/upstream/{3.1.8 → v8}/src/x64/frames-x64.cc +0 -0
  467. data/ext/v8/upstream/v8/src/x64/frames-x64.h +130 -0
  468. data/ext/v8/upstream/v8/src/x64/full-codegen-x64.cc +4339 -0
  469. data/ext/v8/upstream/v8/src/x64/ic-x64.cc +1752 -0
  470. data/ext/v8/upstream/{3.1.8 → v8}/src/x64/jump-target-x64.cc +0 -0
  471. data/ext/v8/upstream/v8/src/x64/lithium-codegen-x64.cc +3970 -0
  472. data/ext/v8/upstream/v8/src/x64/lithium-codegen-x64.h +318 -0
  473. data/ext/v8/upstream/{3.1.8 → v8}/src/x64/lithium-gap-resolver-x64.cc +0 -0
  474. data/ext/v8/upstream/{3.1.8 → v8}/src/x64/lithium-gap-resolver-x64.h +0 -0
  475. data/ext/v8/upstream/v8/src/x64/lithium-x64.cc +2115 -0
  476. data/ext/v8/upstream/v8/src/x64/lithium-x64.h +2161 -0
  477. data/ext/v8/upstream/v8/src/x64/macro-assembler-x64.cc +2911 -0
  478. data/ext/v8/upstream/v8/src/x64/macro-assembler-x64.h +1984 -0
  479. data/ext/v8/upstream/v8/src/x64/regexp-macro-assembler-x64.cc +1398 -0
  480. data/ext/v8/upstream/v8/src/x64/regexp-macro-assembler-x64.h +282 -0
  481. data/ext/v8/upstream/v8/src/x64/register-allocator-x64-inl.h +87 -0
  482. data/ext/v8/upstream/v8/src/x64/register-allocator-x64.cc +95 -0
  483. data/ext/v8/upstream/{3.1.8 → v8}/src/x64/register-allocator-x64.h +0 -0
  484. data/ext/v8/upstream/{3.1.8 → v8}/src/x64/simulator-x64.cc +0 -0
  485. data/ext/v8/upstream/v8/src/x64/simulator-x64.h +71 -0
  486. data/ext/v8/upstream/v8/src/x64/stub-cache-x64.cc +3460 -0
  487. data/ext/v8/upstream/v8/src/x64/virtual-frame-x64.cc +1296 -0
  488. data/ext/v8/upstream/v8/src/x64/virtual-frame-x64.h +597 -0
  489. data/ext/v8/upstream/v8/src/zone-inl.h +129 -0
  490. data/ext/v8/upstream/v8/src/zone.cc +196 -0
  491. data/ext/v8/upstream/v8/src/zone.h +236 -0
  492. data/ext/v8/upstream/{3.1.8 → v8}/tools/codemap.js +0 -0
  493. data/ext/v8/upstream/{3.1.8 → v8}/tools/consarray.js +0 -0
  494. data/ext/v8/upstream/{3.1.8 → v8}/tools/csvparser.js +0 -0
  495. data/ext/v8/upstream/{3.1.8 → v8}/tools/disasm.py +0 -0
  496. data/ext/v8/upstream/v8/tools/freebsd-tick-processor +10 -0
  497. data/ext/v8/upstream/{3.1.8 → v8}/tools/gc-nvp-trace-processor.py +0 -0
  498. data/ext/v8/upstream/{3.1.8 → v8}/tools/generate-ten-powers.scm +0 -0
  499. data/ext/v8/upstream/{3.1.8 → v8}/tools/grokdump.py +0 -0
  500. data/ext/v8/upstream/v8/tools/gyp/v8.gyp +844 -0
  501. data/ext/v8/upstream/{3.1.8 → v8}/tools/js2c.py +0 -0
  502. data/ext/v8/upstream/{3.1.8 → v8}/tools/jsmin.py +0 -0
  503. data/ext/v8/upstream/v8/tools/linux-tick-processor +35 -0
  504. data/ext/v8/upstream/{3.1.8 → v8}/tools/ll_prof.py +0 -0
  505. data/ext/v8/upstream/{3.1.8 → v8}/tools/logreader.js +0 -0
  506. data/ext/v8/upstream/{3.1.8 → v8}/tools/mac-nm +0 -0
  507. data/ext/v8/upstream/{3.1.8 → v8}/tools/mac-tick-processor +0 -0
  508. data/ext/v8/upstream/{3.1.8 → v8}/tools/oom_dump/README +0 -0
  509. data/ext/v8/upstream/{3.1.8 → v8}/tools/oom_dump/SConstruct +0 -0
  510. data/ext/v8/upstream/{3.1.8 → v8}/tools/oom_dump/oom_dump.cc +0 -0
  511. data/ext/v8/upstream/{3.1.8 → v8}/tools/presubmit.py +0 -0
  512. data/ext/v8/upstream/{3.1.8 → v8}/tools/process-heap-prof.py +0 -0
  513. data/ext/v8/upstream/{3.1.8 → v8}/tools/profile.js +0 -0
  514. data/ext/v8/upstream/{3.1.8 → v8}/tools/profile_view.js +0 -0
  515. data/ext/v8/upstream/{3.1.8 → v8}/tools/run-valgrind.py +0 -0
  516. data/ext/v8/upstream/{3.1.8 → v8}/tools/splaytree.js +0 -0
  517. data/ext/v8/upstream/{3.1.8 → v8}/tools/stats-viewer.py +0 -0
  518. data/ext/v8/upstream/v8/tools/test.py +1490 -0
  519. data/ext/v8/upstream/{3.1.8 → v8}/tools/tickprocessor-driver.js +0 -0
  520. data/ext/v8/upstream/v8/tools/tickprocessor.js +877 -0
  521. data/ext/v8/upstream/{3.1.8 → v8}/tools/utils.py +0 -0
  522. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/README.txt +0 -0
  523. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/arm.vsprops +0 -0
  524. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/common.vsprops +0 -0
  525. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/d8.vcproj +0 -0
  526. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/d8_arm.vcproj +0 -0
  527. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/d8_x64.vcproj +0 -0
  528. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/d8js2c.cmd +0 -0
  529. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/debug.vsprops +0 -0
  530. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/ia32.vsprops +0 -0
  531. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/js2c.cmd +0 -0
  532. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/release.vsprops +0 -0
  533. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8.sln +0 -0
  534. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8.vcproj +0 -0
  535. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_arm.sln +0 -0
  536. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_arm.vcproj +0 -0
  537. data/ext/v8/upstream/v8/tools/visual_studio/v8_base.vcproj +1308 -0
  538. data/ext/v8/upstream/v8/tools/visual_studio/v8_base_arm.vcproj +1238 -0
  539. data/ext/v8/upstream/v8/tools/visual_studio/v8_base_x64.vcproj +1300 -0
  540. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_cctest.vcproj +0 -0
  541. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_cctest_arm.vcproj +0 -0
  542. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_cctest_x64.vcproj +0 -0
  543. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_mksnapshot.vcproj +0 -0
  544. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_mksnapshot_x64.vcproj +0 -0
  545. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_process_sample.vcproj +0 -0
  546. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_process_sample_arm.vcproj +0 -0
  547. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_process_sample_x64.vcproj +0 -0
  548. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_shell_sample.vcproj +0 -0
  549. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_shell_sample_arm.vcproj +0 -0
  550. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_shell_sample_x64.vcproj +0 -0
  551. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_snapshot.vcproj +0 -0
  552. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_snapshot_cc.vcproj +0 -0
  553. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_snapshot_cc_x64.vcproj +0 -0
  554. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_snapshot_x64.vcproj +0 -0
  555. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_x64.sln +0 -0
  556. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_x64.vcproj +0 -0
  557. data/ext/v8/upstream/v8/tools/visual_studio/x64.vsprops +18 -0
  558. data/ext/v8/upstream/{3.1.8 → v8}/tools/windows-tick-processor.bat +0 -0
  559. data/ext/v8/v8_callbacks.cpp +52 -92
  560. data/ext/v8/v8_date.cpp +2 -3
  561. data/ext/v8/v8_object.cpp +4 -0
  562. data/ext/v8/v8_template.cpp +2 -2
  563. data/ext/v8/v8_try_catch.cpp +8 -38
  564. data/lib/v8/version.rb +1 -1
  565. data/spec/ext/ext_spec_helper.rb +2 -20
  566. data/spec/ext/object_spec.rb +0 -12
  567. data/spec/ext/try_catch_spec.rb +29 -1
  568. data/spec/spec_helper.rb +1 -0
  569. data/spec/v8/portal/proxies_spec.rb +1 -84
  570. data/specmem/handle_memspec.rb +41 -0
  571. data/specmem/object_memspec.rb +16 -0
  572. data/specmem/proxies_memspec.rb +86 -0
  573. data/specmem/spec_helper.rb +24 -0
  574. data/therubyracer.gemspec +7 -2
  575. metadata +564 -541
  576. data/ext/v8/upstream/3.1.8/.gitignore +0 -31
  577. data/ext/v8/upstream/3.1.8/AUTHORS +0 -40
  578. data/ext/v8/upstream/3.1.8/ChangeLog +0 -2566
  579. data/ext/v8/upstream/3.1.8/SConstruct +0 -1192
  580. data/ext/v8/upstream/3.1.8/include/v8-debug.h +0 -384
  581. data/ext/v8/upstream/3.1.8/include/v8-preparser.h +0 -116
  582. data/ext/v8/upstream/3.1.8/include/v8-profiler.h +0 -426
  583. data/ext/v8/upstream/3.1.8/include/v8-testing.h +0 -99
  584. data/ext/v8/upstream/3.1.8/include/v8.h +0 -3846
  585. data/ext/v8/upstream/3.1.8/preparser/preparser-process.cc +0 -206
  586. data/ext/v8/upstream/3.1.8/src/SConscript +0 -356
  587. data/ext/v8/upstream/3.1.8/src/accessors.cc +0 -907
  588. data/ext/v8/upstream/3.1.8/src/allocation.cc +0 -204
  589. data/ext/v8/upstream/3.1.8/src/allocation.h +0 -176
  590. data/ext/v8/upstream/3.1.8/src/api.cc +0 -5191
  591. data/ext/v8/upstream/3.1.8/src/api.h +0 -508
  592. data/ext/v8/upstream/3.1.8/src/apiutils.h +0 -80
  593. data/ext/v8/upstream/3.1.8/src/arguments.h +0 -105
  594. data/ext/v8/upstream/3.1.8/src/arm/assembler-arm-inl.h +0 -352
  595. data/ext/v8/upstream/3.1.8/src/arm/assembler-arm.cc +0 -2756
  596. data/ext/v8/upstream/3.1.8/src/arm/assembler-arm.h +0 -1294
  597. data/ext/v8/upstream/3.1.8/src/arm/builtins-arm.cc +0 -1628
  598. data/ext/v8/upstream/3.1.8/src/arm/code-stubs-arm.cc +0 -6783
  599. data/ext/v8/upstream/3.1.8/src/arm/code-stubs-arm.h +0 -657
  600. data/ext/v8/upstream/3.1.8/src/arm/codegen-arm.cc +0 -7403
  601. data/ext/v8/upstream/3.1.8/src/arm/codegen-arm.h +0 -595
  602. data/ext/v8/upstream/3.1.8/src/arm/constants-arm.h +0 -769
  603. data/ext/v8/upstream/3.1.8/src/arm/cpu-arm.cc +0 -147
  604. data/ext/v8/upstream/3.1.8/src/arm/debug-arm.cc +0 -315
  605. data/ext/v8/upstream/3.1.8/src/arm/deoptimizer-arm.cc +0 -700
  606. data/ext/v8/upstream/3.1.8/src/arm/disasm-arm.cc +0 -1439
  607. data/ext/v8/upstream/3.1.8/src/arm/frames-arm.h +0 -168
  608. data/ext/v8/upstream/3.1.8/src/arm/full-codegen-arm.cc +0 -4230
  609. data/ext/v8/upstream/3.1.8/src/arm/ic-arm.cc +0 -1799
  610. data/ext/v8/upstream/3.1.8/src/arm/lithium-arm.cc +0 -2041
  611. data/ext/v8/upstream/3.1.8/src/arm/lithium-arm.h +0 -2046
  612. data/ext/v8/upstream/3.1.8/src/arm/lithium-codegen-arm.cc +0 -3822
  613. data/ext/v8/upstream/3.1.8/src/arm/lithium-codegen-arm.h +0 -312
  614. data/ext/v8/upstream/3.1.8/src/arm/lithium-gap-resolver-arm.cc +0 -303
  615. data/ext/v8/upstream/3.1.8/src/arm/macro-assembler-arm.cc +0 -2701
  616. data/ext/v8/upstream/3.1.8/src/arm/macro-assembler-arm.h +0 -1015
  617. data/ext/v8/upstream/3.1.8/src/arm/regexp-macro-assembler-arm.cc +0 -1280
  618. data/ext/v8/upstream/3.1.8/src/arm/regexp-macro-assembler-arm.h +0 -252
  619. data/ext/v8/upstream/3.1.8/src/arm/simulator-arm.cc +0 -3165
  620. data/ext/v8/upstream/3.1.8/src/arm/simulator-arm.h +0 -402
  621. data/ext/v8/upstream/3.1.8/src/arm/stub-cache-arm.cc +0 -4077
  622. data/ext/v8/upstream/3.1.8/src/arm/virtual-frame-arm.cc +0 -843
  623. data/ext/v8/upstream/3.1.8/src/arm/virtual-frame-arm.h +0 -520
  624. data/ext/v8/upstream/3.1.8/src/array.js +0 -1231
  625. data/ext/v8/upstream/3.1.8/src/assembler.cc +0 -973
  626. data/ext/v8/upstream/3.1.8/src/assembler.h +0 -787
  627. data/ext/v8/upstream/3.1.8/src/ast-inl.h +0 -107
  628. data/ext/v8/upstream/3.1.8/src/ast.cc +0 -1067
  629. data/ext/v8/upstream/3.1.8/src/ast.h +0 -2177
  630. data/ext/v8/upstream/3.1.8/src/atomicops.h +0 -165
  631. data/ext/v8/upstream/3.1.8/src/bootstrapper.cc +0 -1888
  632. data/ext/v8/upstream/3.1.8/src/bootstrapper.h +0 -118
  633. data/ext/v8/upstream/3.1.8/src/builtins.cc +0 -1586
  634. data/ext/v8/upstream/3.1.8/src/builtins.h +0 -339
  635. data/ext/v8/upstream/3.1.8/src/checks.cc +0 -110
  636. data/ext/v8/upstream/3.1.8/src/checks.h +0 -292
  637. data/ext/v8/upstream/3.1.8/src/code-stubs.cc +0 -230
  638. data/ext/v8/upstream/3.1.8/src/code-stubs.h +0 -950
  639. data/ext/v8/upstream/3.1.8/src/codegen-inl.h +0 -64
  640. data/ext/v8/upstream/3.1.8/src/codegen.cc +0 -495
  641. data/ext/v8/upstream/3.1.8/src/codegen.h +0 -245
  642. data/ext/v8/upstream/3.1.8/src/compilation-cache.cc +0 -654
  643. data/ext/v8/upstream/3.1.8/src/compilation-cache.h +0 -112
  644. data/ext/v8/upstream/3.1.8/src/compiler.cc +0 -806
  645. data/ext/v8/upstream/3.1.8/src/compiler.h +0 -290
  646. data/ext/v8/upstream/3.1.8/src/contexts.cc +0 -320
  647. data/ext/v8/upstream/3.1.8/src/contexts.h +0 -376
  648. data/ext/v8/upstream/3.1.8/src/conversions.cc +0 -1069
  649. data/ext/v8/upstream/3.1.8/src/counters.cc +0 -78
  650. data/ext/v8/upstream/3.1.8/src/counters.h +0 -242
  651. data/ext/v8/upstream/3.1.8/src/cpu-profiler-inl.h +0 -100
  652. data/ext/v8/upstream/3.1.8/src/cpu-profiler.cc +0 -554
  653. data/ext/v8/upstream/3.1.8/src/cpu-profiler.h +0 -291
  654. data/ext/v8/upstream/3.1.8/src/cpu.h +0 -65
  655. data/ext/v8/upstream/3.1.8/src/d8-debug.cc +0 -367
  656. data/ext/v8/upstream/3.1.8/src/d8-debug.h +0 -157
  657. data/ext/v8/upstream/3.1.8/src/d8-posix.cc +0 -693
  658. data/ext/v8/upstream/3.1.8/src/d8.cc +0 -792
  659. data/ext/v8/upstream/3.1.8/src/d8.gyp +0 -85
  660. data/ext/v8/upstream/3.1.8/src/data-flow.h +0 -379
  661. data/ext/v8/upstream/3.1.8/src/dateparser.h +0 -263
  662. data/ext/v8/upstream/3.1.8/src/debug-agent.cc +0 -446
  663. data/ext/v8/upstream/3.1.8/src/debug-agent.h +0 -131
  664. data/ext/v8/upstream/3.1.8/src/debug.cc +0 -3085
  665. data/ext/v8/upstream/3.1.8/src/debug.h +0 -1025
  666. data/ext/v8/upstream/3.1.8/src/deoptimizer.cc +0 -1185
  667. data/ext/v8/upstream/3.1.8/src/deoptimizer.h +0 -529
  668. data/ext/v8/upstream/3.1.8/src/disasm.h +0 -77
  669. data/ext/v8/upstream/3.1.8/src/disassembler.cc +0 -338
  670. data/ext/v8/upstream/3.1.8/src/execution.cc +0 -735
  671. data/ext/v8/upstream/3.1.8/src/execution.h +0 -322
  672. data/ext/v8/upstream/3.1.8/src/extensions/experimental/experimental.gyp +0 -53
  673. data/ext/v8/upstream/3.1.8/src/extensions/experimental/i18n-extension.cc +0 -264
  674. data/ext/v8/upstream/3.1.8/src/extensions/externalize-string-extension.cc +0 -141
  675. data/ext/v8/upstream/3.1.8/src/extensions/gc-extension.cc +0 -58
  676. data/ext/v8/upstream/3.1.8/src/factory.cc +0 -1087
  677. data/ext/v8/upstream/3.1.8/src/factory.h +0 -432
  678. data/ext/v8/upstream/3.1.8/src/flag-definitions.h +0 -552
  679. data/ext/v8/upstream/3.1.8/src/frame-element.cc +0 -42
  680. data/ext/v8/upstream/3.1.8/src/frame-element.h +0 -277
  681. data/ext/v8/upstream/3.1.8/src/frames-inl.h +0 -210
  682. data/ext/v8/upstream/3.1.8/src/frames.cc +0 -1232
  683. data/ext/v8/upstream/3.1.8/src/frames.h +0 -826
  684. data/ext/v8/upstream/3.1.8/src/full-codegen.cc +0 -1382
  685. data/ext/v8/upstream/3.1.8/src/full-codegen.h +0 -751
  686. data/ext/v8/upstream/3.1.8/src/func-name-inferrer.cc +0 -90
  687. data/ext/v8/upstream/3.1.8/src/func-name-inferrer.h +0 -111
  688. data/ext/v8/upstream/3.1.8/src/gdb-jit.cc +0 -1547
  689. data/ext/v8/upstream/3.1.8/src/global-handles.cc +0 -534
  690. data/ext/v8/upstream/3.1.8/src/global-handles.h +0 -181
  691. data/ext/v8/upstream/3.1.8/src/globals.h +0 -325
  692. data/ext/v8/upstream/3.1.8/src/handles-inl.h +0 -80
  693. data/ext/v8/upstream/3.1.8/src/handles.cc +0 -910
  694. data/ext/v8/upstream/3.1.8/src/handles.h +0 -424
  695. data/ext/v8/upstream/3.1.8/src/hashmap.h +0 -121
  696. data/ext/v8/upstream/3.1.8/src/heap-inl.h +0 -587
  697. data/ext/v8/upstream/3.1.8/src/heap-profiler.cc +0 -1128
  698. data/ext/v8/upstream/3.1.8/src/heap-profiler.h +0 -381
  699. data/ext/v8/upstream/3.1.8/src/heap.cc +0 -5610
  700. data/ext/v8/upstream/3.1.8/src/heap.h +0 -2218
  701. data/ext/v8/upstream/3.1.8/src/hydrogen-instructions.cc +0 -1490
  702. data/ext/v8/upstream/3.1.8/src/hydrogen-instructions.h +0 -3493
  703. data/ext/v8/upstream/3.1.8/src/hydrogen.cc +0 -6056
  704. data/ext/v8/upstream/3.1.8/src/hydrogen.h +0 -1091
  705. data/ext/v8/upstream/3.1.8/src/ia32/assembler-ia32-inl.h +0 -429
  706. data/ext/v8/upstream/3.1.8/src/ia32/assembler-ia32.cc +0 -2800
  707. data/ext/v8/upstream/3.1.8/src/ia32/assembler-ia32.h +0 -1093
  708. data/ext/v8/upstream/3.1.8/src/ia32/builtins-ia32.cc +0 -1590
  709. data/ext/v8/upstream/3.1.8/src/ia32/code-stubs-ia32.cc +0 -6624
  710. data/ext/v8/upstream/3.1.8/src/ia32/code-stubs-ia32.h +0 -536
  711. data/ext/v8/upstream/3.1.8/src/ia32/codegen-ia32.cc +0 -10354
  712. data/ext/v8/upstream/3.1.8/src/ia32/codegen-ia32.h +0 -798
  713. data/ext/v8/upstream/3.1.8/src/ia32/cpu-ia32.cc +0 -87
  714. data/ext/v8/upstream/3.1.8/src/ia32/debug-ia32.cc +0 -309
  715. data/ext/v8/upstream/3.1.8/src/ia32/deoptimizer-ia32.cc +0 -664
  716. data/ext/v8/upstream/3.1.8/src/ia32/disasm-ia32.cc +0 -1597
  717. data/ext/v8/upstream/3.1.8/src/ia32/frames-ia32.h +0 -140
  718. data/ext/v8/upstream/3.1.8/src/ia32/full-codegen-ia32.cc +0 -4278
  719. data/ext/v8/upstream/3.1.8/src/ia32/ic-ia32.cc +0 -1786
  720. data/ext/v8/upstream/3.1.8/src/ia32/lithium-codegen-ia32.cc +0 -3880
  721. data/ext/v8/upstream/3.1.8/src/ia32/lithium-codegen-ia32.h +0 -309
  722. data/ext/v8/upstream/3.1.8/src/ia32/lithium-gap-resolver-ia32.cc +0 -460
  723. data/ext/v8/upstream/3.1.8/src/ia32/lithium-ia32.cc +0 -2095
  724. data/ext/v8/upstream/3.1.8/src/ia32/lithium-ia32.h +0 -2127
  725. data/ext/v8/upstream/3.1.8/src/ia32/macro-assembler-ia32.cc +0 -2031
  726. data/ext/v8/upstream/3.1.8/src/ia32/macro-assembler-ia32.h +0 -798
  727. data/ext/v8/upstream/3.1.8/src/ia32/regexp-macro-assembler-ia32.cc +0 -1253
  728. data/ext/v8/upstream/3.1.8/src/ia32/regexp-macro-assembler-ia32.h +0 -215
  729. data/ext/v8/upstream/3.1.8/src/ia32/register-allocator-ia32.cc +0 -157
  730. data/ext/v8/upstream/3.1.8/src/ia32/simulator-ia32.h +0 -72
  731. data/ext/v8/upstream/3.1.8/src/ia32/stub-cache-ia32.cc +0 -3732
  732. data/ext/v8/upstream/3.1.8/src/ia32/virtual-frame-ia32.cc +0 -1360
  733. data/ext/v8/upstream/3.1.8/src/ia32/virtual-frame-ia32.h +0 -646
  734. data/ext/v8/upstream/3.1.8/src/ic-inl.h +0 -129
  735. data/ext/v8/upstream/3.1.8/src/ic.cc +0 -2333
  736. data/ext/v8/upstream/3.1.8/src/ic.h +0 -639
  737. data/ext/v8/upstream/3.1.8/src/interpreter-irregexp.cc +0 -655
  738. data/ext/v8/upstream/3.1.8/src/interpreter-irregexp.h +0 -48
  739. data/ext/v8/upstream/3.1.8/src/json.js +0 -342
  740. data/ext/v8/upstream/3.1.8/src/jsregexp.cc +0 -5340
  741. data/ext/v8/upstream/3.1.8/src/jsregexp.h +0 -1484
  742. data/ext/v8/upstream/3.1.8/src/jump-target-heavy.cc +0 -430
  743. data/ext/v8/upstream/3.1.8/src/jump-target-heavy.h +0 -244
  744. data/ext/v8/upstream/3.1.8/src/jump-target-inl.h +0 -48
  745. data/ext/v8/upstream/3.1.8/src/jump-target-light.cc +0 -111
  746. data/ext/v8/upstream/3.1.8/src/lithium-allocator-inl.h +0 -140
  747. data/ext/v8/upstream/3.1.8/src/lithium-allocator.cc +0 -2093
  748. data/ext/v8/upstream/3.1.8/src/lithium-allocator.h +0 -644
  749. data/ext/v8/upstream/3.1.8/src/lithium.cc +0 -168
  750. data/ext/v8/upstream/3.1.8/src/liveedit.cc +0 -1650
  751. data/ext/v8/upstream/3.1.8/src/liveedit.h +0 -174
  752. data/ext/v8/upstream/3.1.8/src/liveobjectlist.cc +0 -2527
  753. data/ext/v8/upstream/3.1.8/src/liveobjectlist.h +0 -322
  754. data/ext/v8/upstream/3.1.8/src/log-utils.cc +0 -336
  755. data/ext/v8/upstream/3.1.8/src/log-utils.h +0 -232
  756. data/ext/v8/upstream/3.1.8/src/log.cc +0 -1608
  757. data/ext/v8/upstream/3.1.8/src/log.h +0 -379
  758. data/ext/v8/upstream/3.1.8/src/mark-compact.cc +0 -2957
  759. data/ext/v8/upstream/3.1.8/src/mark-compact.h +0 -433
  760. data/ext/v8/upstream/3.1.8/src/messages.cc +0 -164
  761. data/ext/v8/upstream/3.1.8/src/messages.js +0 -1071
  762. data/ext/v8/upstream/3.1.8/src/mips/assembler-mips-inl.h +0 -215
  763. data/ext/v8/upstream/3.1.8/src/mips/assembler-mips.cc +0 -1219
  764. data/ext/v8/upstream/3.1.8/src/mips/assembler-mips.h +0 -667
  765. data/ext/v8/upstream/3.1.8/src/mips/builtins-mips.cc +0 -205
  766. data/ext/v8/upstream/3.1.8/src/mips/codegen-mips-inl.h +0 -70
  767. data/ext/v8/upstream/3.1.8/src/mips/codegen-mips.cc +0 -1437
  768. data/ext/v8/upstream/3.1.8/src/mips/codegen-mips.h +0 -431
  769. data/ext/v8/upstream/3.1.8/src/mips/constants-mips.cc +0 -328
  770. data/ext/v8/upstream/3.1.8/src/mips/constants-mips.h +0 -525
  771. data/ext/v8/upstream/3.1.8/src/mips/cpu-mips.cc +0 -73
  772. data/ext/v8/upstream/3.1.8/src/mips/debug-mips.cc +0 -127
  773. data/ext/v8/upstream/3.1.8/src/mips/disasm-mips.cc +0 -787
  774. data/ext/v8/upstream/3.1.8/src/mips/fast-codegen-mips.cc +0 -77
  775. data/ext/v8/upstream/3.1.8/src/mips/frames-mips.cc +0 -96
  776. data/ext/v8/upstream/3.1.8/src/mips/frames-mips.h +0 -164
  777. data/ext/v8/upstream/3.1.8/src/mips/full-codegen-mips.cc +0 -277
  778. data/ext/v8/upstream/3.1.8/src/mips/ic-mips.cc +0 -208
  779. data/ext/v8/upstream/3.1.8/src/mips/jump-target-mips.cc +0 -175
  780. data/ext/v8/upstream/3.1.8/src/mips/macro-assembler-mips.cc +0 -1326
  781. data/ext/v8/upstream/3.1.8/src/mips/macro-assembler-mips.h +0 -461
  782. data/ext/v8/upstream/3.1.8/src/mips/register-allocator-mips-inl.h +0 -137
  783. data/ext/v8/upstream/3.1.8/src/mips/register-allocator-mips.h +0 -46
  784. data/ext/v8/upstream/3.1.8/src/mips/simulator-mips.cc +0 -1650
  785. data/ext/v8/upstream/3.1.8/src/mips/simulator-mips.h +0 -311
  786. data/ext/v8/upstream/3.1.8/src/mips/stub-cache-mips.cc +0 -418
  787. data/ext/v8/upstream/3.1.8/src/mips/virtual-frame-mips.cc +0 -319
  788. data/ext/v8/upstream/3.1.8/src/mips/virtual-frame-mips.h +0 -548
  789. data/ext/v8/upstream/3.1.8/src/mirror-debugger.js +0 -2380
  790. data/ext/v8/upstream/3.1.8/src/mksnapshot.cc +0 -256
  791. data/ext/v8/upstream/3.1.8/src/objects-debug.cc +0 -722
  792. data/ext/v8/upstream/3.1.8/src/objects-inl.h +0 -3946
  793. data/ext/v8/upstream/3.1.8/src/objects-printer.cc +0 -801
  794. data/ext/v8/upstream/3.1.8/src/objects-visiting.cc +0 -142
  795. data/ext/v8/upstream/3.1.8/src/objects-visiting.h +0 -401
  796. data/ext/v8/upstream/3.1.8/src/objects.cc +0 -10044
  797. data/ext/v8/upstream/3.1.8/src/objects.h +0 -6571
  798. data/ext/v8/upstream/3.1.8/src/parser.cc +0 -5165
  799. data/ext/v8/upstream/3.1.8/src/parser.h +0 -802
  800. data/ext/v8/upstream/3.1.8/src/platform-cygwin.cc +0 -745
  801. data/ext/v8/upstream/3.1.8/src/platform-freebsd.cc +0 -702
  802. data/ext/v8/upstream/3.1.8/src/platform-linux.cc +0 -981
  803. data/ext/v8/upstream/3.1.8/src/platform-macos.cc +0 -732
  804. data/ext/v8/upstream/3.1.8/src/platform-nullos.cc +0 -498
  805. data/ext/v8/upstream/3.1.8/src/platform-openbsd.cc +0 -657
  806. data/ext/v8/upstream/3.1.8/src/platform-posix.cc +0 -399
  807. data/ext/v8/upstream/3.1.8/src/platform-solaris.cc +0 -714
  808. data/ext/v8/upstream/3.1.8/src/platform-win32.cc +0 -1974
  809. data/ext/v8/upstream/3.1.8/src/platform.h +0 -636
  810. data/ext/v8/upstream/3.1.8/src/preparse-data.cc +0 -183
  811. data/ext/v8/upstream/3.1.8/src/preparser-api.cc +0 -213
  812. data/ext/v8/upstream/3.1.8/src/preparser.cc +0 -1205
  813. data/ext/v8/upstream/3.1.8/src/prettyprinter.cc +0 -1539
  814. data/ext/v8/upstream/3.1.8/src/prettyprinter.h +0 -223
  815. data/ext/v8/upstream/3.1.8/src/profile-generator.cc +0 -2899
  816. data/ext/v8/upstream/3.1.8/src/profile-generator.h +0 -1151
  817. data/ext/v8/upstream/3.1.8/src/property.cc +0 -96
  818. data/ext/v8/upstream/3.1.8/src/property.h +0 -337
  819. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler-irregexp.cc +0 -470
  820. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler.cc +0 -257
  821. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler.h +0 -231
  822. data/ext/v8/upstream/3.1.8/src/regexp-stack.cc +0 -103
  823. data/ext/v8/upstream/3.1.8/src/regexp-stack.h +0 -123
  824. data/ext/v8/upstream/3.1.8/src/regexp.js +0 -483
  825. data/ext/v8/upstream/3.1.8/src/register-allocator-inl.h +0 -141
  826. data/ext/v8/upstream/3.1.8/src/register-allocator.cc +0 -104
  827. data/ext/v8/upstream/3.1.8/src/register-allocator.h +0 -320
  828. data/ext/v8/upstream/3.1.8/src/rewriter.cc +0 -1023
  829. data/ext/v8/upstream/3.1.8/src/runtime-profiler.cc +0 -443
  830. data/ext/v8/upstream/3.1.8/src/runtime-profiler.h +0 -77
  831. data/ext/v8/upstream/3.1.8/src/runtime.cc +0 -11592
  832. data/ext/v8/upstream/3.1.8/src/runtime.h +0 -582
  833. data/ext/v8/upstream/3.1.8/src/safepoint-table.cc +0 -253
  834. data/ext/v8/upstream/3.1.8/src/safepoint-table.h +0 -263
  835. data/ext/v8/upstream/3.1.8/src/scanner-base.cc +0 -971
  836. data/ext/v8/upstream/3.1.8/src/scanner-base.h +0 -653
  837. data/ext/v8/upstream/3.1.8/src/scanner.cc +0 -586
  838. data/ext/v8/upstream/3.1.8/src/scanner.h +0 -194
  839. data/ext/v8/upstream/3.1.8/src/scopeinfo.cc +0 -636
  840. data/ext/v8/upstream/3.1.8/src/scopeinfo.h +0 -238
  841. data/ext/v8/upstream/3.1.8/src/scopes.cc +0 -1063
  842. data/ext/v8/upstream/3.1.8/src/scopes.h +0 -494
  843. data/ext/v8/upstream/3.1.8/src/serialize.cc +0 -1535
  844. data/ext/v8/upstream/3.1.8/src/serialize.h +0 -584
  845. data/ext/v8/upstream/3.1.8/src/snapshot-common.cc +0 -82
  846. data/ext/v8/upstream/3.1.8/src/snapshot.h +0 -71
  847. data/ext/v8/upstream/3.1.8/src/spaces-inl.h +0 -524
  848. data/ext/v8/upstream/3.1.8/src/spaces.cc +0 -3254
  849. data/ext/v8/upstream/3.1.8/src/spaces.h +0 -2362
  850. data/ext/v8/upstream/3.1.8/src/string-search.cc +0 -40
  851. data/ext/v8/upstream/3.1.8/src/string-search.h +0 -567
  852. data/ext/v8/upstream/3.1.8/src/string-stream.cc +0 -584
  853. data/ext/v8/upstream/3.1.8/src/string.js +0 -915
  854. data/ext/v8/upstream/3.1.8/src/stub-cache.cc +0 -1878
  855. data/ext/v8/upstream/3.1.8/src/stub-cache.h +0 -849
  856. data/ext/v8/upstream/3.1.8/src/token.cc +0 -63
  857. data/ext/v8/upstream/3.1.8/src/token.h +0 -288
  858. data/ext/v8/upstream/3.1.8/src/top.cc +0 -1152
  859. data/ext/v8/upstream/3.1.8/src/top.h +0 -608
  860. data/ext/v8/upstream/3.1.8/src/type-info.cc +0 -406
  861. data/ext/v8/upstream/3.1.8/src/type-info.h +0 -283
  862. data/ext/v8/upstream/3.1.8/src/unicode.cc +0 -1624
  863. data/ext/v8/upstream/3.1.8/src/unicode.h +0 -280
  864. data/ext/v8/upstream/3.1.8/src/utils.h +0 -793
  865. data/ext/v8/upstream/3.1.8/src/v8-counters.cc +0 -55
  866. data/ext/v8/upstream/3.1.8/src/v8-counters.h +0 -290
  867. data/ext/v8/upstream/3.1.8/src/v8.cc +0 -270
  868. data/ext/v8/upstream/3.1.8/src/v8.h +0 -127
  869. data/ext/v8/upstream/3.1.8/src/v8globals.h +0 -480
  870. data/ext/v8/upstream/3.1.8/src/v8natives.js +0 -1252
  871. data/ext/v8/upstream/3.1.8/src/v8threads.cc +0 -440
  872. data/ext/v8/upstream/3.1.8/src/v8threads.h +0 -157
  873. data/ext/v8/upstream/3.1.8/src/v8utils.h +0 -354
  874. data/ext/v8/upstream/3.1.8/src/variables.h +0 -212
  875. data/ext/v8/upstream/3.1.8/src/version.cc +0 -95
  876. data/ext/v8/upstream/3.1.8/src/version.h +0 -64
  877. data/ext/v8/upstream/3.1.8/src/virtual-frame-light-inl.h +0 -170
  878. data/ext/v8/upstream/3.1.8/src/vm-state-inl.h +0 -134
  879. data/ext/v8/upstream/3.1.8/src/vm-state.h +0 -68
  880. data/ext/v8/upstream/3.1.8/src/win32-headers.h +0 -95
  881. data/ext/v8/upstream/3.1.8/src/x64/assembler-x64-inl.h +0 -455
  882. data/ext/v8/upstream/3.1.8/src/x64/assembler-x64.cc +0 -3162
  883. data/ext/v8/upstream/3.1.8/src/x64/assembler-x64.h +0 -1584
  884. data/ext/v8/upstream/3.1.8/src/x64/builtins-x64.cc +0 -1492
  885. data/ext/v8/upstream/3.1.8/src/x64/code-stubs-x64.cc +0 -5150
  886. data/ext/v8/upstream/3.1.8/src/x64/code-stubs-x64.h +0 -519
  887. data/ext/v8/upstream/3.1.8/src/x64/codegen-x64.cc +0 -8835
  888. data/ext/v8/upstream/3.1.8/src/x64/codegen-x64.h +0 -750
  889. data/ext/v8/upstream/3.1.8/src/x64/cpu-x64.cc +0 -86
  890. data/ext/v8/upstream/3.1.8/src/x64/debug-x64.cc +0 -316
  891. data/ext/v8/upstream/3.1.8/src/x64/deoptimizer-x64.cc +0 -781
  892. data/ext/v8/upstream/3.1.8/src/x64/disasm-x64.cc +0 -1737
  893. data/ext/v8/upstream/3.1.8/src/x64/frames-x64.h +0 -130
  894. data/ext/v8/upstream/3.1.8/src/x64/full-codegen-x64.cc +0 -3984
  895. data/ext/v8/upstream/3.1.8/src/x64/ic-x64.cc +0 -1761
  896. data/ext/v8/upstream/3.1.8/src/x64/lithium-codegen-x64.cc +0 -3639
  897. data/ext/v8/upstream/3.1.8/src/x64/lithium-codegen-x64.h +0 -305
  898. data/ext/v8/upstream/3.1.8/src/x64/lithium-x64.cc +0 -2044
  899. data/ext/v8/upstream/3.1.8/src/x64/lithium-x64.h +0 -2052
  900. data/ext/v8/upstream/3.1.8/src/x64/macro-assembler-x64.cc +0 -2660
  901. data/ext/v8/upstream/3.1.8/src/x64/macro-assembler-x64.h +0 -1852
  902. data/ext/v8/upstream/3.1.8/src/x64/regexp-macro-assembler-x64.cc +0 -1382
  903. data/ext/v8/upstream/3.1.8/src/x64/regexp-macro-assembler-x64.h +0 -278
  904. data/ext/v8/upstream/3.1.8/src/x64/register-allocator-x64-inl.h +0 -87
  905. data/ext/v8/upstream/3.1.8/src/x64/register-allocator-x64.cc +0 -91
  906. data/ext/v8/upstream/3.1.8/src/x64/simulator-x64.h +0 -71
  907. data/ext/v8/upstream/3.1.8/src/x64/stub-cache-x64.cc +0 -3509
  908. data/ext/v8/upstream/3.1.8/src/x64/virtual-frame-x64.cc +0 -1292
  909. data/ext/v8/upstream/3.1.8/src/x64/virtual-frame-x64.h +0 -593
  910. data/ext/v8/upstream/3.1.8/src/zone-inl.h +0 -83
  911. data/ext/v8/upstream/3.1.8/src/zone.cc +0 -195
  912. data/ext/v8/upstream/3.1.8/src/zone.h +0 -233
  913. data/ext/v8/upstream/3.1.8/tools/gyp/v8.gyp +0 -869
  914. data/ext/v8/upstream/3.1.8/tools/linux-tick-processor +0 -33
  915. data/ext/v8/upstream/3.1.8/tools/tickprocessor.js +0 -863
  916. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_base.vcproj +0 -1296
  917. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_base_arm.vcproj +0 -1234
  918. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_base_x64.vcproj +0 -1296
  919. data/ext/v8/upstream/3.1.8/tools/visual_studio/x64.vsprops +0 -17
  920. data/spec/ext/mem_spec.rb +0 -42
@@ -0,0 +1,446 @@
1
+ // Copyright 2006-2008 the V8 project authors. All rights reserved.
2
+ // Redistribution and use in source and binary forms, with or without
3
+ // modification, are permitted provided that the following conditions are
4
+ // met:
5
+ //
6
+ // * Redistributions of source code must retain the above copyright
7
+ // notice, this list of conditions and the following disclaimer.
8
+ // * Redistributions in binary form must reproduce the above
9
+ // copyright notice, this list of conditions and the following
10
+ // disclaimer in the documentation and/or other materials provided
11
+ // with the distribution.
12
+ // * Neither the name of Google Inc. nor the names of its
13
+ // contributors may be used to endorse or promote products derived
14
+ // from this software without specific prior written permission.
15
+ //
16
+ // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
+ // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
+ // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
+ // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
+ // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
+ // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
+ // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
+ // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
+ // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
+ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
+ // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
+
28
+ #ifndef V8_LOG_H_
29
+ #define V8_LOG_H_
30
+
31
+ #include "platform.h"
32
+ #include "log-utils.h"
33
+
34
+ namespace v8 {
35
+ namespace internal {
36
+
37
+ // Logger is used for collecting logging information from V8 during
38
+ // execution. The result is dumped to a file.
39
+ //
40
+ // Available command line flags:
41
+ //
42
+ // --log
43
+ // Minimal logging (no API, code, or GC sample events), default is off.
44
+ //
45
+ // --log-all
46
+ // Log all events to the file, default is off. This is the same as combining
47
+ // --log-api, --log-code, --log-gc, and --log-regexp.
48
+ //
49
+ // --log-api
50
+ // Log API events to the logfile, default is off. --log-api implies --log.
51
+ //
52
+ // --log-code
53
+ // Log code (create, move, and delete) events to the logfile, default is off.
54
+ // --log-code implies --log.
55
+ //
56
+ // --log-gc
57
+ // Log GC heap samples after each GC that can be processed by hp2ps, default
58
+ // is off. --log-gc implies --log.
59
+ //
60
+ // --log-regexp
61
+ // Log creation and use of regular expressions, Default is off.
62
+ // --log-regexp implies --log.
63
+ //
64
+ // --logfile <filename>
65
+ // Specify the name of the logfile, default is "v8.log".
66
+ //
67
+ // --prof
68
+ // Collect statistical profiling information (ticks), default is off. The
69
+ // tick profiler requires code events, so --prof implies --log-code.
70
+
71
+ // Forward declarations.
72
+ class Ticker;
73
+ class Profiler;
74
+ class Semaphore;
75
+ class SlidingStateWindow;
76
+ class LogMessageBuilder;
77
+
78
+ #undef LOG
79
+ #ifdef ENABLE_LOGGING_AND_PROFILING
80
+ #define LOG(isolate, Call) \
81
+ do { \
82
+ v8::internal::Logger* logger = \
83
+ (isolate)->logger(); \
84
+ if (logger->is_logging()) \
85
+ logger->Call; \
86
+ } while (false)
87
+ #else
88
+ #define LOG(isolate, Call) ((void) 0)
89
+ #endif
90
+
91
+ #define LOG_EVENTS_AND_TAGS_LIST(V) \
92
+ V(CODE_CREATION_EVENT, "code-creation") \
93
+ V(CODE_MOVE_EVENT, "code-move") \
94
+ V(CODE_DELETE_EVENT, "code-delete") \
95
+ V(CODE_MOVING_GC, "code-moving-gc") \
96
+ V(SHARED_FUNC_MOVE_EVENT, "sfi-move") \
97
+ V(SNAPSHOT_POSITION_EVENT, "snapshot-pos") \
98
+ V(TICK_EVENT, "tick") \
99
+ V(REPEAT_META_EVENT, "repeat") \
100
+ V(BUILTIN_TAG, "Builtin") \
101
+ V(CALL_DEBUG_BREAK_TAG, "CallDebugBreak") \
102
+ V(CALL_DEBUG_PREPARE_STEP_IN_TAG, "CallDebugPrepareStepIn") \
103
+ V(CALL_IC_TAG, "CallIC") \
104
+ V(CALL_INITIALIZE_TAG, "CallInitialize") \
105
+ V(CALL_MEGAMORPHIC_TAG, "CallMegamorphic") \
106
+ V(CALL_MISS_TAG, "CallMiss") \
107
+ V(CALL_NORMAL_TAG, "CallNormal") \
108
+ V(CALL_PRE_MONOMORPHIC_TAG, "CallPreMonomorphic") \
109
+ V(KEYED_CALL_DEBUG_BREAK_TAG, "KeyedCallDebugBreak") \
110
+ V(KEYED_CALL_DEBUG_PREPARE_STEP_IN_TAG, \
111
+ "KeyedCallDebugPrepareStepIn") \
112
+ V(KEYED_CALL_IC_TAG, "KeyedCallIC") \
113
+ V(KEYED_CALL_INITIALIZE_TAG, "KeyedCallInitialize") \
114
+ V(KEYED_CALL_MEGAMORPHIC_TAG, "KeyedCallMegamorphic") \
115
+ V(KEYED_CALL_MISS_TAG, "KeyedCallMiss") \
116
+ V(KEYED_CALL_NORMAL_TAG, "KeyedCallNormal") \
117
+ V(KEYED_CALL_PRE_MONOMORPHIC_TAG, "KeyedCallPreMonomorphic") \
118
+ V(CALLBACK_TAG, "Callback") \
119
+ V(EVAL_TAG, "Eval") \
120
+ V(FUNCTION_TAG, "Function") \
121
+ V(KEYED_LOAD_IC_TAG, "KeyedLoadIC") \
122
+ V(KEYED_EXTERNAL_ARRAY_LOAD_IC_TAG, "KeyedExternalArrayLoadIC") \
123
+ V(KEYED_STORE_IC_TAG, "KeyedStoreIC") \
124
+ V(KEYED_EXTERNAL_ARRAY_STORE_IC_TAG, "KeyedExternalArrayStoreIC")\
125
+ V(LAZY_COMPILE_TAG, "LazyCompile") \
126
+ V(LOAD_IC_TAG, "LoadIC") \
127
+ V(REG_EXP_TAG, "RegExp") \
128
+ V(SCRIPT_TAG, "Script") \
129
+ V(STORE_IC_TAG, "StoreIC") \
130
+ V(STUB_TAG, "Stub") \
131
+ V(NATIVE_FUNCTION_TAG, "Function") \
132
+ V(NATIVE_LAZY_COMPILE_TAG, "LazyCompile") \
133
+ V(NATIVE_SCRIPT_TAG, "Script")
134
+ // Note that 'NATIVE_' cases for functions and scripts are mapped onto
135
+ // original tags when writing to the log.
136
+
137
+
138
+ class Sampler;
139
+
140
+
141
+ class Logger {
142
+ public:
143
+ #define DECLARE_ENUM(enum_item, ignore) enum_item,
144
+ enum LogEventsAndTags {
145
+ LOG_EVENTS_AND_TAGS_LIST(DECLARE_ENUM)
146
+ NUMBER_OF_LOG_EVENTS
147
+ };
148
+ #undef DECLARE_ENUM
149
+
150
+ // Acquires resources for logging if the right flags are set.
151
+ bool Setup();
152
+
153
+ void EnsureTickerStarted();
154
+ void EnsureTickerStopped();
155
+
156
+ Sampler* sampler();
157
+
158
+ // Frees resources acquired in Setup.
159
+ void TearDown();
160
+
161
+ // Enable the computation of a sliding window of states.
162
+ void EnableSlidingStateWindow();
163
+
164
+ // Emits an event with a string value -> (name, value).
165
+ void StringEvent(const char* name, const char* value);
166
+
167
+ // Emits an event with an int value -> (name, value).
168
+ void IntEvent(const char* name, int value);
169
+ void IntPtrTEvent(const char* name, intptr_t value);
170
+
171
+ // Emits an event with an handle value -> (name, location).
172
+ void HandleEvent(const char* name, Object** location);
173
+
174
+ // Emits memory management events for C allocated structures.
175
+ void NewEvent(const char* name, void* object, size_t size);
176
+ void DeleteEvent(const char* name, void* object);
177
+
178
+ // Static versions of the above, operate on current isolate's logger.
179
+ // Used in TRACK_MEMORY(TypeName) defined in globals.h
180
+ static void NewEventStatic(const char* name, void* object, size_t size);
181
+ static void DeleteEventStatic(const char* name, void* object);
182
+
183
+ // Emits an event with a tag, and some resource usage information.
184
+ // -> (name, tag, <rusage information>).
185
+ // Currently, the resource usage information is a process time stamp
186
+ // and a real time timestamp.
187
+ void ResourceEvent(const char* name, const char* tag);
188
+
189
+ // Emits an event that an undefined property was read from an
190
+ // object.
191
+ void SuspectReadEvent(String* name, Object* obj);
192
+
193
+ // Emits an event when a message is put on or read from a debugging queue.
194
+ // DebugTag lets us put a call-site specific label on the event.
195
+ void DebugTag(const char* call_site_tag);
196
+ void DebugEvent(const char* event_type, Vector<uint16_t> parameter);
197
+
198
+
199
+ // ==== Events logged by --log-api. ====
200
+ void ApiNamedSecurityCheck(Object* key);
201
+ void ApiIndexedSecurityCheck(uint32_t index);
202
+ void ApiNamedPropertyAccess(const char* tag, JSObject* holder, Object* name);
203
+ void ApiIndexedPropertyAccess(const char* tag,
204
+ JSObject* holder,
205
+ uint32_t index);
206
+ void ApiObjectAccess(const char* tag, JSObject* obj);
207
+ void ApiEntryCall(const char* name);
208
+
209
+
210
+ // ==== Events logged by --log-code. ====
211
+ // Emits a code event for a callback function.
212
+ void CallbackEvent(String* name, Address entry_point);
213
+ void GetterCallbackEvent(String* name, Address entry_point);
214
+ void SetterCallbackEvent(String* name, Address entry_point);
215
+ // Emits a code create event.
216
+ void CodeCreateEvent(LogEventsAndTags tag,
217
+ Code* code, const char* source);
218
+ void CodeCreateEvent(LogEventsAndTags tag,
219
+ Code* code, String* name);
220
+ void CodeCreateEvent(LogEventsAndTags tag,
221
+ Code* code,
222
+ SharedFunctionInfo* shared,
223
+ String* name);
224
+ void CodeCreateEvent(LogEventsAndTags tag,
225
+ Code* code,
226
+ SharedFunctionInfo* shared,
227
+ String* source, int line);
228
+ void CodeCreateEvent(LogEventsAndTags tag, Code* code, int args_count);
229
+ void CodeMovingGCEvent();
230
+ // Emits a code create event for a RegExp.
231
+ void RegExpCodeCreateEvent(Code* code, String* source);
232
+ // Emits a code move event.
233
+ void CodeMoveEvent(Address from, Address to);
234
+ // Emits a code delete event.
235
+ void CodeDeleteEvent(Address from);
236
+
237
+ void SharedFunctionInfoMoveEvent(Address from, Address to);
238
+
239
+ void SnapshotPositionEvent(Address addr, int pos);
240
+
241
+ // ==== Events logged by --log-gc. ====
242
+ // Heap sampling events: start, end, and individual types.
243
+ void HeapSampleBeginEvent(const char* space, const char* kind);
244
+ void HeapSampleEndEvent(const char* space, const char* kind);
245
+ void HeapSampleItemEvent(const char* type, int number, int bytes);
246
+ void HeapSampleJSConstructorEvent(const char* constructor,
247
+ int number, int bytes);
248
+ void HeapSampleJSRetainersEvent(const char* constructor,
249
+ const char* event);
250
+ void HeapSampleJSProducerEvent(const char* constructor,
251
+ Address* stack);
252
+ void HeapSampleStats(const char* space, const char* kind,
253
+ intptr_t capacity, intptr_t used);
254
+
255
+ void SharedLibraryEvent(const char* library_path,
256
+ uintptr_t start,
257
+ uintptr_t end);
258
+ void SharedLibraryEvent(const wchar_t* library_path,
259
+ uintptr_t start,
260
+ uintptr_t end);
261
+
262
+ // ==== Events logged by --log-regexp ====
263
+ // Regexp compilation and execution events.
264
+
265
+ void RegExpCompileEvent(Handle<JSRegExp> regexp, bool in_cache);
266
+
267
+ // Log an event reported from generated code
268
+ void LogRuntime(Vector<const char> format, JSArray* args);
269
+
270
+ #ifdef ENABLE_LOGGING_AND_PROFILING
271
+ bool is_logging() {
272
+ return logging_nesting_ > 0;
273
+ }
274
+
275
+ // Pause/Resume collection of profiling data.
276
+ // When data collection is paused, CPU Tick events are discarded until
277
+ // data collection is Resumed.
278
+ void PauseProfiler(int flags, int tag);
279
+ void ResumeProfiler(int flags, int tag);
280
+ int GetActiveProfilerModules();
281
+
282
+ // If logging is performed into a memory buffer, allows to
283
+ // retrieve previously written messages. See v8.h.
284
+ int GetLogLines(int from_pos, char* dest_buf, int max_size);
285
+
286
+ // Logs all compiled functions found in the heap.
287
+ void LogCompiledFunctions();
288
+ // Logs all accessor callbacks found in the heap.
289
+ void LogAccessorCallbacks();
290
+ // Used for logging stubs found in the snapshot.
291
+ void LogCodeObjects();
292
+
293
+ // Converts tag to a corresponding NATIVE_... if the script is native.
294
+ INLINE(static LogEventsAndTags ToNativeByScript(LogEventsAndTags, Script*));
295
+
296
+ // Profiler's sampling interval (in milliseconds).
297
+ static const int kSamplingIntervalMs = 1;
298
+
299
+ // Callback from Log, stops profiling in case of insufficient resources.
300
+ void LogFailure();
301
+
302
+ private:
303
+ Logger();
304
+ ~Logger();
305
+
306
+ // Emits the profiler's first message.
307
+ void ProfilerBeginEvent();
308
+
309
+ // Emits callback event messages.
310
+ void CallbackEventInternal(const char* prefix,
311
+ const char* name,
312
+ Address entry_point);
313
+
314
+ // Internal configurable move event.
315
+ void MoveEventInternal(LogEventsAndTags event, Address from, Address to);
316
+
317
+ // Internal configurable move event.
318
+ void DeleteEventInternal(LogEventsAndTags event, Address from);
319
+
320
+ // Emits the source code of a regexp. Used by regexp events.
321
+ void LogRegExpSource(Handle<JSRegExp> regexp);
322
+
323
+ // Used for logging stubs found in the snapshot.
324
+ void LogCodeObject(Object* code_object);
325
+
326
+ // Emits general information about generated code.
327
+ void LogCodeInfo();
328
+
329
+ // Handles code creation when low-level profiling is active.
330
+ void LowLevelCodeCreateEvent(Code* code, LogMessageBuilder* msg);
331
+
332
+ // Emits a profiler tick event. Used by the profiler thread.
333
+ void TickEvent(TickSample* sample, bool overflow);
334
+
335
+ void ApiEvent(const char* name, ...);
336
+
337
+ // Logs a StringEvent regardless of whether FLAG_log is true.
338
+ void UncheckedStringEvent(const char* name, const char* value);
339
+
340
+ // Logs an IntEvent regardless of whether FLAG_log is true.
341
+ void UncheckedIntEvent(const char* name, int value);
342
+ void UncheckedIntPtrTEvent(const char* name, intptr_t value);
343
+
344
+ // Returns whether profiler's sampler is active.
345
+ bool IsProfilerSamplerActive();
346
+
347
+ // The sampler used by the profiler and the sliding state window.
348
+ Ticker* ticker_;
349
+
350
+ // When the statistical profile is active, profiler_
351
+ // points to a Profiler, that handles collection
352
+ // of samples.
353
+ Profiler* profiler_;
354
+
355
+ // SlidingStateWindow instance keeping a sliding window of the most
356
+ // recent VM states.
357
+ SlidingStateWindow* sliding_state_window_;
358
+
359
+ // An array of log events names.
360
+ const char* const* log_events_;
361
+
362
+ // Internal implementation classes with access to
363
+ // private members.
364
+ friend class EventLog;
365
+ friend class Isolate;
366
+ friend class LogMessageBuilder;
367
+ friend class TimeLog;
368
+ friend class Profiler;
369
+ friend class SlidingStateWindow;
370
+ friend class StackTracer;
371
+ friend class VMState;
372
+
373
+ friend class LoggerTestHelper;
374
+
375
+
376
+ int logging_nesting_;
377
+ int cpu_profiler_nesting_;
378
+ int heap_profiler_nesting_;
379
+
380
+ Log* log_;
381
+
382
+ // Guards against multiple calls to TearDown() that can happen in some tests.
383
+ // 'true' between Setup() and TearDown().
384
+ bool is_initialized_;
385
+
386
+ // Support for 'incremental addresses' in compressed logs:
387
+ // LogMessageBuilder::AppendAddress(Address addr)
388
+ Address last_address_;
389
+ // Logger::TickEvent(...)
390
+ Address prev_sp_;
391
+ Address prev_function_;
392
+ // Logger::MoveEventInternal(...)
393
+ Address prev_to_;
394
+ // Logger::FunctionCreateEvent(...)
395
+ Address prev_code_;
396
+
397
+ friend class CpuProfiler;
398
+ #else
399
+ bool is_logging() { return false; }
400
+ #endif
401
+ };
402
+
403
+
404
+ // Process wide registry of samplers.
405
+ class SamplerRegistry : public AllStatic {
406
+ public:
407
+ enum State {
408
+ HAS_NO_SAMPLERS,
409
+ HAS_SAMPLERS,
410
+ HAS_CPU_PROFILING_SAMPLERS
411
+ };
412
+
413
+ typedef void (*VisitSampler)(Sampler*, void*);
414
+
415
+ static State GetState();
416
+
417
+ // Iterates over all active samplers keeping the internal lock held.
418
+ // Returns whether there are any active samplers.
419
+ static bool IterateActiveSamplers(VisitSampler func, void* param);
420
+
421
+ // Adds/Removes an active sampler.
422
+ static void AddActiveSampler(Sampler* sampler);
423
+ static void RemoveActiveSampler(Sampler* sampler);
424
+
425
+ private:
426
+ static bool ActiveSamplersExist() {
427
+ return active_samplers_ != NULL && !active_samplers_->is_empty();
428
+ }
429
+
430
+ static Mutex* mutex_; // Protects the state below.
431
+ static List<Sampler*>* active_samplers_;
432
+
433
+ DISALLOW_IMPLICIT_CONSTRUCTORS(SamplerRegistry);
434
+ };
435
+
436
+
437
+ // Class that extracts stack trace, used for profiling.
438
+ class StackTracer : public AllStatic {
439
+ public:
440
+ static void Trace(Isolate* isolate, TickSample* sample);
441
+ };
442
+
443
+ } } // namespace v8::internal
444
+
445
+
446
+ #endif // V8_LOG_H_
File without changes
File without changes
@@ -0,0 +1,3092 @@
1
+ // Copyright 2006-2008 the V8 project authors. All rights reserved.
2
+ // Redistribution and use in source and binary forms, with or without
3
+ // modification, are permitted provided that the following conditions are
4
+ // met:
5
+ //
6
+ // * Redistributions of source code must retain the above copyright
7
+ // notice, this list of conditions and the following disclaimer.
8
+ // * Redistributions in binary form must reproduce the above
9
+ // copyright notice, this list of conditions and the following
10
+ // disclaimer in the documentation and/or other materials provided
11
+ // with the distribution.
12
+ // * Neither the name of Google Inc. nor the names of its
13
+ // contributors may be used to endorse or promote products derived
14
+ // from this software without specific prior written permission.
15
+ //
16
+ // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
+ // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
+ // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
+ // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
+ // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
+ // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
+ // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
+ // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
+ // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
+ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
+ // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
+
28
+ #include "v8.h"
29
+
30
+ #include "compilation-cache.h"
31
+ #include "execution.h"
32
+ #include "heap-profiler.h"
33
+ #include "gdb-jit.h"
34
+ #include "global-handles.h"
35
+ #include "ic-inl.h"
36
+ #include "liveobjectlist-inl.h"
37
+ #include "mark-compact.h"
38
+ #include "objects-visiting.h"
39
+ #include "stub-cache.h"
40
+
41
+ namespace v8 {
42
+ namespace internal {
43
+
44
+ // -------------------------------------------------------------------------
45
+ // MarkCompactCollector
46
+
47
+ MarkCompactCollector::MarkCompactCollector() : // NOLINT
48
+ #ifdef DEBUG
49
+ state_(IDLE),
50
+ #endif
51
+ force_compaction_(false),
52
+ compacting_collection_(false),
53
+ compact_on_next_gc_(false),
54
+ previous_marked_count_(0),
55
+ tracer_(NULL),
56
+ #ifdef DEBUG
57
+ live_young_objects_size_(0),
58
+ live_old_pointer_objects_size_(0),
59
+ live_old_data_objects_size_(0),
60
+ live_code_objects_size_(0),
61
+ live_map_objects_size_(0),
62
+ live_cell_objects_size_(0),
63
+ live_lo_objects_size_(0),
64
+ live_bytes_(0),
65
+ #endif
66
+ heap_(NULL),
67
+ code_flusher_(NULL) { }
68
+
69
+
70
+ void MarkCompactCollector::CollectGarbage() {
71
+ // Make sure that Prepare() has been called. The individual steps below will
72
+ // update the state as they proceed.
73
+ ASSERT(state_ == PREPARE_GC);
74
+
75
+ // Prepare has selected whether to compact the old generation or not.
76
+ // Tell the tracer.
77
+ if (IsCompacting()) tracer_->set_is_compacting();
78
+
79
+ MarkLiveObjects();
80
+
81
+ if (FLAG_collect_maps) ClearNonLiveTransitions();
82
+
83
+ SweepLargeObjectSpace();
84
+
85
+ if (IsCompacting()) {
86
+ GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_COMPACT);
87
+ EncodeForwardingAddresses();
88
+
89
+ heap()->MarkMapPointersAsEncoded(true);
90
+ UpdatePointers();
91
+ heap()->MarkMapPointersAsEncoded(false);
92
+ heap()->isolate()->pc_to_code_cache()->Flush();
93
+
94
+ RelocateObjects();
95
+ } else {
96
+ SweepSpaces();
97
+ heap()->isolate()->pc_to_code_cache()->Flush();
98
+ }
99
+
100
+ Finish();
101
+
102
+ // Save the count of marked objects remaining after the collection and
103
+ // null out the GC tracer.
104
+ previous_marked_count_ = tracer_->marked_count();
105
+ ASSERT(previous_marked_count_ == 0);
106
+ tracer_ = NULL;
107
+ }
108
+
109
+
110
+ void MarkCompactCollector::Prepare(GCTracer* tracer) {
111
+ // Rather than passing the tracer around we stash it in a static member
112
+ // variable.
113
+ tracer_ = tracer;
114
+
115
+ #ifdef DEBUG
116
+ ASSERT(state_ == IDLE);
117
+ state_ = PREPARE_GC;
118
+ #endif
119
+ ASSERT(!FLAG_always_compact || !FLAG_never_compact);
120
+
121
+ compacting_collection_ =
122
+ FLAG_always_compact || force_compaction_ || compact_on_next_gc_;
123
+ compact_on_next_gc_ = false;
124
+
125
+ if (FLAG_never_compact) compacting_collection_ = false;
126
+ if (!heap()->map_space()->MapPointersEncodable())
127
+ compacting_collection_ = false;
128
+ if (FLAG_collect_maps) CreateBackPointers();
129
+ #ifdef ENABLE_GDB_JIT_INTERFACE
130
+ if (FLAG_gdbjit) {
131
+ // If GDBJIT interface is active disable compaction.
132
+ compacting_collection_ = false;
133
+ }
134
+ #endif
135
+
136
+ PagedSpaces spaces;
137
+ for (PagedSpace* space = spaces.next();
138
+ space != NULL; space = spaces.next()) {
139
+ space->PrepareForMarkCompact(compacting_collection_);
140
+ }
141
+
142
+ #ifdef DEBUG
143
+ live_bytes_ = 0;
144
+ live_young_objects_size_ = 0;
145
+ live_old_pointer_objects_size_ = 0;
146
+ live_old_data_objects_size_ = 0;
147
+ live_code_objects_size_ = 0;
148
+ live_map_objects_size_ = 0;
149
+ live_cell_objects_size_ = 0;
150
+ live_lo_objects_size_ = 0;
151
+ #endif
152
+ }
153
+
154
+
155
+ void MarkCompactCollector::Finish() {
156
+ #ifdef DEBUG
157
+ ASSERT(state_ == SWEEP_SPACES || state_ == RELOCATE_OBJECTS);
158
+ state_ = IDLE;
159
+ #endif
160
+ // The stub cache is not traversed during GC; clear the cache to
161
+ // force lazy re-initialization of it. This must be done after the
162
+ // GC, because it relies on the new address of certain old space
163
+ // objects (empty string, illegal builtin).
164
+ heap()->isolate()->stub_cache()->Clear();
165
+
166
+ heap()->external_string_table_.CleanUp();
167
+
168
+ // If we've just compacted old space there's no reason to check the
169
+ // fragmentation limit. Just return.
170
+ if (HasCompacted()) return;
171
+
172
+ // We compact the old generation on the next GC if it has gotten too
173
+ // fragmented (ie, we could recover an expected amount of space by
174
+ // reclaiming the waste and free list blocks).
175
+ static const int kFragmentationLimit = 15; // Percent.
176
+ static const int kFragmentationAllowed = 1 * MB; // Absolute.
177
+ intptr_t old_gen_recoverable = 0;
178
+ intptr_t old_gen_used = 0;
179
+
180
+ OldSpaces spaces;
181
+ for (OldSpace* space = spaces.next(); space != NULL; space = spaces.next()) {
182
+ old_gen_recoverable += space->Waste() + space->AvailableFree();
183
+ old_gen_used += space->Size();
184
+ }
185
+
186
+ int old_gen_fragmentation =
187
+ static_cast<int>((old_gen_recoverable * 100.0) / old_gen_used);
188
+ if (old_gen_fragmentation > kFragmentationLimit &&
189
+ old_gen_recoverable > kFragmentationAllowed) {
190
+ compact_on_next_gc_ = true;
191
+ }
192
+ }
193
+
194
+
195
+ // -------------------------------------------------------------------------
196
+ // Phase 1: tracing and marking live objects.
197
+ // before: all objects are in normal state.
198
+ // after: a live object's map pointer is marked as '00'.
199
+
200
+ // Marking all live objects in the heap as part of mark-sweep or mark-compact
201
+ // collection. Before marking, all objects are in their normal state. After
202
+ // marking, live objects' map pointers are marked indicating that the object
203
+ // has been found reachable.
204
+ //
205
+ // The marking algorithm is a (mostly) depth-first (because of possible stack
206
+ // overflow) traversal of the graph of objects reachable from the roots. It
207
+ // uses an explicit stack of pointers rather than recursion. The young
208
+ // generation's inactive ('from') space is used as a marking stack. The
209
+ // objects in the marking stack are the ones that have been reached and marked
210
+ // but their children have not yet been visited.
211
+ //
212
+ // The marking stack can overflow during traversal. In that case, we set an
213
+ // overflow flag. When the overflow flag is set, we continue marking objects
214
+ // reachable from the objects on the marking stack, but no longer push them on
215
+ // the marking stack. Instead, we mark them as both marked and overflowed.
216
+ // When the stack is in the overflowed state, objects marked as overflowed
217
+ // have been reached and marked but their children have not been visited yet.
218
+ // After emptying the marking stack, we clear the overflow flag and traverse
219
+ // the heap looking for objects marked as overflowed, push them on the stack,
220
+ // and continue with marking. This process repeats until all reachable
221
+ // objects have been marked.
222
+
223
+ class CodeFlusher {
224
+ public:
225
+ explicit CodeFlusher(Isolate* isolate)
226
+ : isolate_(isolate),
227
+ jsfunction_candidates_head_(NULL),
228
+ shared_function_info_candidates_head_(NULL) {}
229
+
230
+ void AddCandidate(SharedFunctionInfo* shared_info) {
231
+ SetNextCandidate(shared_info, shared_function_info_candidates_head_);
232
+ shared_function_info_candidates_head_ = shared_info;
233
+ }
234
+
235
+ void AddCandidate(JSFunction* function) {
236
+ ASSERT(function->unchecked_code() ==
237
+ function->unchecked_shared()->unchecked_code());
238
+
239
+ SetNextCandidate(function, jsfunction_candidates_head_);
240
+ jsfunction_candidates_head_ = function;
241
+ }
242
+
243
+ void ProcessCandidates() {
244
+ ProcessSharedFunctionInfoCandidates();
245
+ ProcessJSFunctionCandidates();
246
+ }
247
+
248
+ private:
249
+ void ProcessJSFunctionCandidates() {
250
+ Code* lazy_compile = isolate_->builtins()->builtin(Builtins::kLazyCompile);
251
+
252
+ JSFunction* candidate = jsfunction_candidates_head_;
253
+ JSFunction* next_candidate;
254
+ while (candidate != NULL) {
255
+ next_candidate = GetNextCandidate(candidate);
256
+
257
+ SharedFunctionInfo* shared = candidate->unchecked_shared();
258
+
259
+ Code* code = shared->unchecked_code();
260
+ if (!code->IsMarked()) {
261
+ shared->set_code(lazy_compile);
262
+ candidate->set_code(lazy_compile);
263
+ } else {
264
+ candidate->set_code(shared->unchecked_code());
265
+ }
266
+
267
+ candidate = next_candidate;
268
+ }
269
+
270
+ jsfunction_candidates_head_ = NULL;
271
+ }
272
+
273
+
274
+ void ProcessSharedFunctionInfoCandidates() {
275
+ Code* lazy_compile = isolate_->builtins()->builtin(Builtins::kLazyCompile);
276
+
277
+ SharedFunctionInfo* candidate = shared_function_info_candidates_head_;
278
+ SharedFunctionInfo* next_candidate;
279
+ while (candidate != NULL) {
280
+ next_candidate = GetNextCandidate(candidate);
281
+ SetNextCandidate(candidate, NULL);
282
+
283
+ Code* code = candidate->unchecked_code();
284
+ if (!code->IsMarked()) {
285
+ candidate->set_code(lazy_compile);
286
+ }
287
+
288
+ candidate = next_candidate;
289
+ }
290
+
291
+ shared_function_info_candidates_head_ = NULL;
292
+ }
293
+
294
+ static JSFunction** GetNextCandidateField(JSFunction* candidate) {
295
+ return reinterpret_cast<JSFunction**>(
296
+ candidate->address() + JSFunction::kCodeEntryOffset);
297
+ }
298
+
299
+ static JSFunction* GetNextCandidate(JSFunction* candidate) {
300
+ return *GetNextCandidateField(candidate);
301
+ }
302
+
303
+ static void SetNextCandidate(JSFunction* candidate,
304
+ JSFunction* next_candidate) {
305
+ *GetNextCandidateField(candidate) = next_candidate;
306
+ }
307
+
308
+ STATIC_ASSERT(kPointerSize <= Code::kHeaderSize - Code::kHeaderPaddingStart);
309
+
310
+ static SharedFunctionInfo** GetNextCandidateField(
311
+ SharedFunctionInfo* candidate) {
312
+ Code* code = candidate->unchecked_code();
313
+ return reinterpret_cast<SharedFunctionInfo**>(
314
+ code->address() + Code::kHeaderPaddingStart);
315
+ }
316
+
317
+ static SharedFunctionInfo* GetNextCandidate(SharedFunctionInfo* candidate) {
318
+ return *GetNextCandidateField(candidate);
319
+ }
320
+
321
+ static void SetNextCandidate(SharedFunctionInfo* candidate,
322
+ SharedFunctionInfo* next_candidate) {
323
+ *GetNextCandidateField(candidate) = next_candidate;
324
+ }
325
+
326
+ Isolate* isolate_;
327
+ JSFunction* jsfunction_candidates_head_;
328
+ SharedFunctionInfo* shared_function_info_candidates_head_;
329
+
330
+ DISALLOW_COPY_AND_ASSIGN(CodeFlusher);
331
+ };
332
+
333
+
334
+ MarkCompactCollector::~MarkCompactCollector() {
335
+ if (code_flusher_ != NULL) {
336
+ delete code_flusher_;
337
+ code_flusher_ = NULL;
338
+ }
339
+ }
340
+
341
+
342
+ static inline HeapObject* ShortCircuitConsString(Object** p) {
343
+ // Optimization: If the heap object pointed to by p is a non-symbol
344
+ // cons string whose right substring is HEAP->empty_string, update
345
+ // it in place to its left substring. Return the updated value.
346
+ //
347
+ // Here we assume that if we change *p, we replace it with a heap object
348
+ // (ie, the left substring of a cons string is always a heap object).
349
+ //
350
+ // The check performed is:
351
+ // object->IsConsString() && !object->IsSymbol() &&
352
+ // (ConsString::cast(object)->second() == HEAP->empty_string())
353
+ // except the maps for the object and its possible substrings might be
354
+ // marked.
355
+ HeapObject* object = HeapObject::cast(*p);
356
+ MapWord map_word = object->map_word();
357
+ map_word.ClearMark();
358
+ InstanceType type = map_word.ToMap()->instance_type();
359
+ if ((type & kShortcutTypeMask) != kShortcutTypeTag) return object;
360
+
361
+ Object* second = reinterpret_cast<ConsString*>(object)->unchecked_second();
362
+ Heap* heap = map_word.ToMap()->heap();
363
+ if (second != heap->raw_unchecked_empty_string()) {
364
+ return object;
365
+ }
366
+
367
+ // Since we don't have the object's start, it is impossible to update the
368
+ // page dirty marks. Therefore, we only replace the string with its left
369
+ // substring when page dirty marks do not change.
370
+ Object* first = reinterpret_cast<ConsString*>(object)->unchecked_first();
371
+ if (!heap->InNewSpace(object) && heap->InNewSpace(first)) return object;
372
+
373
+ *p = first;
374
+ return HeapObject::cast(first);
375
+ }
376
+
377
+
378
+ class StaticMarkingVisitor : public StaticVisitorBase {
379
+ public:
380
+ static inline void IterateBody(Map* map, HeapObject* obj) {
381
+ table_.GetVisitor(map)(map, obj);
382
+ }
383
+
384
+ static void Initialize() {
385
+ table_.Register(kVisitShortcutCandidate,
386
+ &FixedBodyVisitor<StaticMarkingVisitor,
387
+ ConsString::BodyDescriptor,
388
+ void>::Visit);
389
+
390
+ table_.Register(kVisitConsString,
391
+ &FixedBodyVisitor<StaticMarkingVisitor,
392
+ ConsString::BodyDescriptor,
393
+ void>::Visit);
394
+
395
+
396
+ table_.Register(kVisitFixedArray,
397
+ &FlexibleBodyVisitor<StaticMarkingVisitor,
398
+ FixedArray::BodyDescriptor,
399
+ void>::Visit);
400
+
401
+ table_.Register(kVisitGlobalContext,
402
+ &FixedBodyVisitor<StaticMarkingVisitor,
403
+ Context::MarkCompactBodyDescriptor,
404
+ void>::Visit);
405
+
406
+ table_.Register(kVisitByteArray, &DataObjectVisitor::Visit);
407
+ table_.Register(kVisitSeqAsciiString, &DataObjectVisitor::Visit);
408
+ table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit);
409
+
410
+ table_.Register(kVisitOddball,
411
+ &FixedBodyVisitor<StaticMarkingVisitor,
412
+ Oddball::BodyDescriptor,
413
+ void>::Visit);
414
+ table_.Register(kVisitMap,
415
+ &FixedBodyVisitor<StaticMarkingVisitor,
416
+ Map::BodyDescriptor,
417
+ void>::Visit);
418
+
419
+ table_.Register(kVisitCode, &VisitCode);
420
+
421
+ table_.Register(kVisitSharedFunctionInfo,
422
+ &VisitSharedFunctionInfoAndFlushCode);
423
+
424
+ table_.Register(kVisitJSFunction,
425
+ &VisitJSFunctionAndFlushCode);
426
+
427
+ table_.Register(kVisitPropertyCell,
428
+ &FixedBodyVisitor<StaticMarkingVisitor,
429
+ JSGlobalPropertyCell::BodyDescriptor,
430
+ void>::Visit);
431
+
432
+ table_.RegisterSpecializations<DataObjectVisitor,
433
+ kVisitDataObject,
434
+ kVisitDataObjectGeneric>();
435
+
436
+ table_.RegisterSpecializations<JSObjectVisitor,
437
+ kVisitJSObject,
438
+ kVisitJSObjectGeneric>();
439
+
440
+ table_.RegisterSpecializations<StructObjectVisitor,
441
+ kVisitStruct,
442
+ kVisitStructGeneric>();
443
+ }
444
+
445
+ INLINE(static void VisitPointer(Heap* heap, Object** p)) {
446
+ MarkObjectByPointer(heap, p);
447
+ }
448
+
449
+ INLINE(static void VisitPointers(Heap* heap, Object** start, Object** end)) {
450
+ // Mark all objects pointed to in [start, end).
451
+ const int kMinRangeForMarkingRecursion = 64;
452
+ if (end - start >= kMinRangeForMarkingRecursion) {
453
+ if (VisitUnmarkedObjects(heap, start, end)) return;
454
+ // We are close to a stack overflow, so just mark the objects.
455
+ }
456
+ for (Object** p = start; p < end; p++) MarkObjectByPointer(heap, p);
457
+ }
458
+
459
+ static inline void VisitCodeTarget(Heap* heap, RelocInfo* rinfo) {
460
+ ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
461
+ Code* code = Code::GetCodeFromTargetAddress(rinfo->target_address());
462
+ if (FLAG_cleanup_ics_at_gc && code->is_inline_cache_stub()) {
463
+ IC::Clear(rinfo->pc());
464
+ // Please note targets for cleared inline cached do not have to be
465
+ // marked since they are contained in HEAP->non_monomorphic_cache().
466
+ } else {
467
+ heap->mark_compact_collector()->MarkObject(code);
468
+ }
469
+ }
470
+
471
+ static void VisitGlobalPropertyCell(Heap* heap, RelocInfo* rinfo) {
472
+ ASSERT(rinfo->rmode() == RelocInfo::GLOBAL_PROPERTY_CELL);
473
+ Object* cell = rinfo->target_cell();
474
+ Object* old_cell = cell;
475
+ VisitPointer(heap, &cell);
476
+ if (cell != old_cell) {
477
+ rinfo->set_target_cell(reinterpret_cast<JSGlobalPropertyCell*>(cell));
478
+ }
479
+ }
480
+
481
+ static inline void VisitDebugTarget(Heap* heap, RelocInfo* rinfo) {
482
+ ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) &&
483
+ rinfo->IsPatchedReturnSequence()) ||
484
+ (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
485
+ rinfo->IsPatchedDebugBreakSlotSequence()));
486
+ HeapObject* code = Code::GetCodeFromTargetAddress(rinfo->call_address());
487
+ heap->mark_compact_collector()->MarkObject(code);
488
+ }
489
+
490
+ // Mark object pointed to by p.
491
+ INLINE(static void MarkObjectByPointer(Heap* heap, Object** p)) {
492
+ if (!(*p)->IsHeapObject()) return;
493
+ HeapObject* object = ShortCircuitConsString(p);
494
+ if (!object->IsMarked()) {
495
+ heap->mark_compact_collector()->MarkUnmarkedObject(object);
496
+ }
497
+ }
498
+
499
+
500
+ // Visit an unmarked object.
501
+ INLINE(static void VisitUnmarkedObject(MarkCompactCollector* collector,
502
+ HeapObject* obj)) {
503
+ #ifdef DEBUG
504
+ ASSERT(Isolate::Current()->heap()->Contains(obj));
505
+ ASSERT(!obj->IsMarked());
506
+ #endif
507
+ Map* map = obj->map();
508
+ collector->SetMark(obj);
509
+ // Mark the map pointer and the body.
510
+ if (!map->IsMarked()) collector->MarkUnmarkedObject(map);
511
+ IterateBody(map, obj);
512
+ }
513
+
514
+ // Visit all unmarked objects pointed to by [start, end).
515
+ // Returns false if the operation fails (lack of stack space).
516
+ static inline bool VisitUnmarkedObjects(Heap* heap,
517
+ Object** start,
518
+ Object** end) {
519
+ // Return false is we are close to the stack limit.
520
+ StackLimitCheck check(heap->isolate());
521
+ if (check.HasOverflowed()) return false;
522
+
523
+ MarkCompactCollector* collector = heap->mark_compact_collector();
524
+ // Visit the unmarked objects.
525
+ for (Object** p = start; p < end; p++) {
526
+ if (!(*p)->IsHeapObject()) continue;
527
+ HeapObject* obj = HeapObject::cast(*p);
528
+ if (obj->IsMarked()) continue;
529
+ VisitUnmarkedObject(collector, obj);
530
+ }
531
+ return true;
532
+ }
533
+
534
+ static inline void VisitExternalReference(Address* p) { }
535
+ static inline void VisitRuntimeEntry(RelocInfo* rinfo) { }
536
+
537
+ private:
538
+ class DataObjectVisitor {
539
+ public:
540
+ template<int size>
541
+ static void VisitSpecialized(Map* map, HeapObject* object) {
542
+ }
543
+
544
+ static void Visit(Map* map, HeapObject* object) {
545
+ }
546
+ };
547
+
548
+ typedef FlexibleBodyVisitor<StaticMarkingVisitor,
549
+ JSObject::BodyDescriptor,
550
+ void> JSObjectVisitor;
551
+
552
+ typedef FlexibleBodyVisitor<StaticMarkingVisitor,
553
+ StructBodyDescriptor,
554
+ void> StructObjectVisitor;
555
+
556
+ static void VisitCode(Map* map, HeapObject* object) {
557
+ reinterpret_cast<Code*>(object)->CodeIterateBody<StaticMarkingVisitor>(
558
+ map->heap());
559
+ }
560
+
561
+ // Code flushing support.
562
+
563
+ // How many collections newly compiled code object will survive before being
564
+ // flushed.
565
+ static const int kCodeAgeThreshold = 5;
566
+
567
+ inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) {
568
+ Object* undefined = heap->raw_unchecked_undefined_value();
569
+ return (info->script() != undefined) &&
570
+ (reinterpret_cast<Script*>(info->script())->source() != undefined);
571
+ }
572
+
573
+
574
+ inline static bool IsCompiled(JSFunction* function) {
575
+ return function->unchecked_code() !=
576
+ function->GetIsolate()->builtins()->builtin(Builtins::kLazyCompile);
577
+ }
578
+
579
+ inline static bool IsCompiled(SharedFunctionInfo* function) {
580
+ return function->unchecked_code() !=
581
+ function->GetIsolate()->builtins()->builtin(Builtins::kLazyCompile);
582
+ }
583
+
584
+ inline static bool IsFlushable(Heap* heap, JSFunction* function) {
585
+ SharedFunctionInfo* shared_info = function->unchecked_shared();
586
+
587
+ // Code is either on stack, in compilation cache or referenced
588
+ // by optimized version of function.
589
+ if (function->unchecked_code()->IsMarked()) {
590
+ shared_info->set_code_age(0);
591
+ return false;
592
+ }
593
+
594
+ // We do not flush code for optimized functions.
595
+ if (function->code() != shared_info->unchecked_code()) {
596
+ return false;
597
+ }
598
+
599
+ return IsFlushable(heap, shared_info);
600
+ }
601
+
602
+ inline static bool IsFlushable(Heap* heap, SharedFunctionInfo* shared_info) {
603
+ // Code is either on stack, in compilation cache or referenced
604
+ // by optimized version of function.
605
+ if (shared_info->unchecked_code()->IsMarked()) {
606
+ shared_info->set_code_age(0);
607
+ return false;
608
+ }
609
+
610
+ // The function must be compiled and have the source code available,
611
+ // to be able to recompile it in case we need the function again.
612
+ if (!(shared_info->is_compiled() && HasSourceCode(heap, shared_info))) {
613
+ return false;
614
+ }
615
+
616
+ // We never flush code for Api functions.
617
+ Object* function_data = shared_info->function_data();
618
+ if (function_data->IsHeapObject() &&
619
+ (SafeMap(function_data)->instance_type() ==
620
+ FUNCTION_TEMPLATE_INFO_TYPE)) {
621
+ return false;
622
+ }
623
+
624
+ // Only flush code for functions.
625
+ if (shared_info->code()->kind() != Code::FUNCTION) return false;
626
+
627
+ // Function must be lazy compilable.
628
+ if (!shared_info->allows_lazy_compilation()) return false;
629
+
630
+ // If this is a full script wrapped in a function we do no flush the code.
631
+ if (shared_info->is_toplevel()) return false;
632
+
633
+ // Age this shared function info.
634
+ if (shared_info->code_age() < kCodeAgeThreshold) {
635
+ shared_info->set_code_age(shared_info->code_age() + 1);
636
+ return false;
637
+ }
638
+
639
+ return true;
640
+ }
641
+
642
+
643
+ static bool FlushCodeForFunction(Heap* heap, JSFunction* function) {
644
+ if (!IsFlushable(heap, function)) return false;
645
+
646
+ // This function's code looks flushable. But we have to postpone the
647
+ // decision until we see all functions that point to the same
648
+ // SharedFunctionInfo because some of them might be optimized.
649
+ // That would make the nonoptimized version of the code nonflushable,
650
+ // because it is required for bailing out from optimized code.
651
+ heap->mark_compact_collector()->code_flusher()->AddCandidate(function);
652
+ return true;
653
+ }
654
+
655
+
656
+ static inline Map* SafeMap(Object* obj) {
657
+ MapWord map_word = HeapObject::cast(obj)->map_word();
658
+ map_word.ClearMark();
659
+ map_word.ClearOverflow();
660
+ return map_word.ToMap();
661
+ }
662
+
663
+
664
+ static inline bool IsJSBuiltinsObject(Object* obj) {
665
+ return obj->IsHeapObject() &&
666
+ (SafeMap(obj)->instance_type() == JS_BUILTINS_OBJECT_TYPE);
667
+ }
668
+
669
+
670
+ static inline bool IsValidNotBuiltinContext(Object* ctx) {
671
+ if (!ctx->IsHeapObject()) return false;
672
+
673
+ Map* map = SafeMap(ctx);
674
+ Heap* heap = map->heap();
675
+ if (!(map == heap->raw_unchecked_context_map() ||
676
+ map == heap->raw_unchecked_catch_context_map() ||
677
+ map == heap->raw_unchecked_global_context_map())) {
678
+ return false;
679
+ }
680
+
681
+ Context* context = reinterpret_cast<Context*>(ctx);
682
+
683
+ if (IsJSBuiltinsObject(context->global())) {
684
+ return false;
685
+ }
686
+
687
+ return true;
688
+ }
689
+
690
+
691
+ static void VisitSharedFunctionInfoGeneric(Map* map, HeapObject* object) {
692
+ SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(object);
693
+
694
+ if (shared->IsInobjectSlackTrackingInProgress()) shared->DetachInitialMap();
695
+
696
+ FixedBodyVisitor<StaticMarkingVisitor,
697
+ SharedFunctionInfo::BodyDescriptor,
698
+ void>::Visit(map, object);
699
+ }
700
+
701
+
702
+ static void VisitSharedFunctionInfoAndFlushCode(Map* map,
703
+ HeapObject* object) {
704
+ MarkCompactCollector* collector = map->heap()->mark_compact_collector();
705
+ if (!collector->is_code_flushing_enabled()) {
706
+ VisitSharedFunctionInfoGeneric(map, object);
707
+ return;
708
+ }
709
+ VisitSharedFunctionInfoAndFlushCodeGeneric(map, object, false);
710
+ }
711
+
712
+
713
+ static void VisitSharedFunctionInfoAndFlushCodeGeneric(
714
+ Map* map, HeapObject* object, bool known_flush_code_candidate) {
715
+ Heap* heap = map->heap();
716
+ SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(object);
717
+
718
+ if (shared->IsInobjectSlackTrackingInProgress()) shared->DetachInitialMap();
719
+
720
+ if (!known_flush_code_candidate) {
721
+ known_flush_code_candidate = IsFlushable(heap, shared);
722
+ if (known_flush_code_candidate) {
723
+ heap->mark_compact_collector()->code_flusher()->AddCandidate(shared);
724
+ }
725
+ }
726
+
727
+ VisitSharedFunctionInfoFields(heap, object, known_flush_code_candidate);
728
+ }
729
+
730
+
731
+ static void VisitCodeEntry(Heap* heap, Address entry_address) {
732
+ Object* code = Code::GetObjectFromEntryAddress(entry_address);
733
+ Object* old_code = code;
734
+ VisitPointer(heap, &code);
735
+ if (code != old_code) {
736
+ Memory::Address_at(entry_address) =
737
+ reinterpret_cast<Code*>(code)->entry();
738
+ }
739
+ }
740
+
741
+
742
+ static void VisitJSFunctionAndFlushCode(Map* map, HeapObject* object) {
743
+ Heap* heap = map->heap();
744
+ MarkCompactCollector* collector = heap->mark_compact_collector();
745
+ if (!collector->is_code_flushing_enabled()) {
746
+ VisitJSFunction(map, object);
747
+ return;
748
+ }
749
+
750
+ JSFunction* jsfunction = reinterpret_cast<JSFunction*>(object);
751
+ // The function must have a valid context and not be a builtin.
752
+ bool flush_code_candidate = false;
753
+ if (IsValidNotBuiltinContext(jsfunction->unchecked_context())) {
754
+ flush_code_candidate = FlushCodeForFunction(heap, jsfunction);
755
+ }
756
+
757
+ if (!flush_code_candidate) {
758
+ collector->MarkObject(jsfunction->unchecked_shared()->unchecked_code());
759
+
760
+ if (jsfunction->unchecked_code()->kind() == Code::OPTIMIZED_FUNCTION) {
761
+ // For optimized functions we should retain both non-optimized version
762
+ // of it's code and non-optimized version of all inlined functions.
763
+ // This is required to support bailing out from inlined code.
764
+ DeoptimizationInputData* data =
765
+ reinterpret_cast<DeoptimizationInputData*>(
766
+ jsfunction->unchecked_code()->unchecked_deoptimization_data());
767
+
768
+ FixedArray* literals = data->UncheckedLiteralArray();
769
+
770
+ for (int i = 0, count = data->InlinedFunctionCount()->value();
771
+ i < count;
772
+ i++) {
773
+ JSFunction* inlined = reinterpret_cast<JSFunction*>(literals->get(i));
774
+ collector->MarkObject(inlined->unchecked_shared()->unchecked_code());
775
+ }
776
+ }
777
+ }
778
+
779
+ VisitJSFunctionFields(map,
780
+ reinterpret_cast<JSFunction*>(object),
781
+ flush_code_candidate);
782
+ }
783
+
784
+
785
+ static void VisitJSFunction(Map* map, HeapObject* object) {
786
+ VisitJSFunctionFields(map,
787
+ reinterpret_cast<JSFunction*>(object),
788
+ false);
789
+ }
790
+
791
+
792
+ #define SLOT_ADDR(obj, offset) \
793
+ reinterpret_cast<Object**>((obj)->address() + offset)
794
+
795
+
796
+ static inline void VisitJSFunctionFields(Map* map,
797
+ JSFunction* object,
798
+ bool flush_code_candidate) {
799
+ Heap* heap = map->heap();
800
+ MarkCompactCollector* collector = heap->mark_compact_collector();
801
+
802
+ VisitPointers(heap,
803
+ SLOT_ADDR(object, JSFunction::kPropertiesOffset),
804
+ SLOT_ADDR(object, JSFunction::kCodeEntryOffset));
805
+
806
+ if (!flush_code_candidate) {
807
+ VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset);
808
+ } else {
809
+ // Don't visit code object.
810
+
811
+ // Visit shared function info to avoid double checking of it's
812
+ // flushability.
813
+ SharedFunctionInfo* shared_info = object->unchecked_shared();
814
+ if (!shared_info->IsMarked()) {
815
+ Map* shared_info_map = shared_info->map();
816
+ collector->SetMark(shared_info);
817
+ collector->MarkObject(shared_info_map);
818
+ VisitSharedFunctionInfoAndFlushCodeGeneric(shared_info_map,
819
+ shared_info,
820
+ true);
821
+ }
822
+ }
823
+
824
+ VisitPointers(heap,
825
+ SLOT_ADDR(object,
826
+ JSFunction::kCodeEntryOffset + kPointerSize),
827
+ SLOT_ADDR(object, JSFunction::kNonWeakFieldsEndOffset));
828
+
829
+ // Don't visit the next function list field as it is a weak reference.
830
+ }
831
+
832
+
833
+ static void VisitSharedFunctionInfoFields(Heap* heap,
834
+ HeapObject* object,
835
+ bool flush_code_candidate) {
836
+ VisitPointer(heap, SLOT_ADDR(object, SharedFunctionInfo::kNameOffset));
837
+
838
+ if (!flush_code_candidate) {
839
+ VisitPointer(heap, SLOT_ADDR(object, SharedFunctionInfo::kCodeOffset));
840
+ }
841
+
842
+ VisitPointers(heap,
843
+ SLOT_ADDR(object, SharedFunctionInfo::kScopeInfoOffset),
844
+ SLOT_ADDR(object, SharedFunctionInfo::kSize));
845
+ }
846
+
847
+ #undef SLOT_ADDR
848
+
849
+ typedef void (*Callback)(Map* map, HeapObject* object);
850
+
851
+ static VisitorDispatchTable<Callback> table_;
852
+ };
853
+
854
+
855
+ VisitorDispatchTable<StaticMarkingVisitor::Callback>
856
+ StaticMarkingVisitor::table_;
857
+
858
+
859
+ class MarkingVisitor : public ObjectVisitor {
860
+ public:
861
+ explicit MarkingVisitor(Heap* heap) : heap_(heap) { }
862
+
863
+ void VisitPointer(Object** p) {
864
+ StaticMarkingVisitor::VisitPointer(heap_, p);
865
+ }
866
+
867
+ void VisitPointers(Object** start, Object** end) {
868
+ StaticMarkingVisitor::VisitPointers(heap_, start, end);
869
+ }
870
+
871
+ void VisitCodeTarget(Heap* heap, RelocInfo* rinfo) {
872
+ StaticMarkingVisitor::VisitCodeTarget(heap, rinfo);
873
+ }
874
+
875
+ void VisitGlobalPropertyCell(Heap* heap, RelocInfo* rinfo) {
876
+ StaticMarkingVisitor::VisitGlobalPropertyCell(heap, rinfo);
877
+ }
878
+
879
+ void VisitDebugTarget(Heap* heap, RelocInfo* rinfo) {
880
+ StaticMarkingVisitor::VisitDebugTarget(heap, rinfo);
881
+ }
882
+
883
+ private:
884
+ Heap* heap_;
885
+ };
886
+
887
+
888
+ class CodeMarkingVisitor : public ThreadVisitor {
889
+ public:
890
+ explicit CodeMarkingVisitor(MarkCompactCollector* collector)
891
+ : collector_(collector) {}
892
+
893
+ void VisitThread(Isolate* isolate, ThreadLocalTop* top) {
894
+ for (StackFrameIterator it(isolate, top); !it.done(); it.Advance()) {
895
+ collector_->MarkObject(it.frame()->unchecked_code());
896
+ }
897
+ }
898
+
899
+ private:
900
+ MarkCompactCollector* collector_;
901
+ };
902
+
903
+
904
+ class SharedFunctionInfoMarkingVisitor : public ObjectVisitor {
905
+ public:
906
+ explicit SharedFunctionInfoMarkingVisitor(MarkCompactCollector* collector)
907
+ : collector_(collector) {}
908
+
909
+ void VisitPointers(Object** start, Object** end) {
910
+ for (Object** p = start; p < end; p++) VisitPointer(p);
911
+ }
912
+
913
+ void VisitPointer(Object** slot) {
914
+ Object* obj = *slot;
915
+ if (obj->IsSharedFunctionInfo()) {
916
+ SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(obj);
917
+ collector_->MarkObject(shared->unchecked_code());
918
+ collector_->MarkObject(shared);
919
+ }
920
+ }
921
+
922
+ private:
923
+ MarkCompactCollector* collector_;
924
+ };
925
+
926
+
927
+ void MarkCompactCollector::PrepareForCodeFlushing() {
928
+ ASSERT(heap() == Isolate::Current()->heap());
929
+
930
+ if (!FLAG_flush_code) {
931
+ EnableCodeFlushing(false);
932
+ return;
933
+ }
934
+
935
+ #ifdef ENABLE_DEBUGGER_SUPPORT
936
+ if (heap()->isolate()->debug()->IsLoaded() ||
937
+ heap()->isolate()->debug()->has_break_points()) {
938
+ EnableCodeFlushing(false);
939
+ return;
940
+ }
941
+ #endif
942
+ EnableCodeFlushing(true);
943
+
944
+ // Ensure that empty descriptor array is marked. Method MarkDescriptorArray
945
+ // relies on it being marked before any other descriptor array.
946
+ MarkObject(heap()->raw_unchecked_empty_descriptor_array());
947
+
948
+ // Make sure we are not referencing the code from the stack.
949
+ ASSERT(this == heap()->mark_compact_collector());
950
+ for (StackFrameIterator it; !it.done(); it.Advance()) {
951
+ MarkObject(it.frame()->unchecked_code());
952
+ }
953
+
954
+ // Iterate the archived stacks in all threads to check if
955
+ // the code is referenced.
956
+ CodeMarkingVisitor code_marking_visitor(this);
957
+ heap()->isolate()->thread_manager()->IterateArchivedThreads(
958
+ &code_marking_visitor);
959
+
960
+ SharedFunctionInfoMarkingVisitor visitor(this);
961
+ heap()->isolate()->compilation_cache()->IterateFunctions(&visitor);
962
+ heap()->isolate()->handle_scope_implementer()->Iterate(&visitor);
963
+
964
+ ProcessMarkingStack();
965
+ }
966
+
967
+
968
+ // Visitor class for marking heap roots.
969
+ class RootMarkingVisitor : public ObjectVisitor {
970
+ public:
971
+ explicit RootMarkingVisitor(Heap* heap)
972
+ : collector_(heap->mark_compact_collector()) { }
973
+
974
+ void VisitPointer(Object** p) {
975
+ MarkObjectByPointer(p);
976
+ }
977
+
978
+ void VisitPointers(Object** start, Object** end) {
979
+ for (Object** p = start; p < end; p++) MarkObjectByPointer(p);
980
+ }
981
+
982
+ private:
983
+ void MarkObjectByPointer(Object** p) {
984
+ if (!(*p)->IsHeapObject()) return;
985
+
986
+ // Replace flat cons strings in place.
987
+ HeapObject* object = ShortCircuitConsString(p);
988
+ if (object->IsMarked()) return;
989
+
990
+ Map* map = object->map();
991
+ // Mark the object.
992
+ collector_->SetMark(object);
993
+
994
+ // Mark the map pointer and body, and push them on the marking stack.
995
+ collector_->MarkObject(map);
996
+ StaticMarkingVisitor::IterateBody(map, object);
997
+
998
+ // Mark all the objects reachable from the map and body. May leave
999
+ // overflowed objects in the heap.
1000
+ collector_->EmptyMarkingStack();
1001
+ }
1002
+
1003
+ MarkCompactCollector* collector_;
1004
+ };
1005
+
1006
+
1007
+ // Helper class for pruning the symbol table.
1008
+ class SymbolTableCleaner : public ObjectVisitor {
1009
+ public:
1010
+ explicit SymbolTableCleaner(Heap* heap)
1011
+ : heap_(heap), pointers_removed_(0) { }
1012
+
1013
+ virtual void VisitPointers(Object** start, Object** end) {
1014
+ // Visit all HeapObject pointers in [start, end).
1015
+ for (Object** p = start; p < end; p++) {
1016
+ if ((*p)->IsHeapObject() && !HeapObject::cast(*p)->IsMarked()) {
1017
+ // Check if the symbol being pruned is an external symbol. We need to
1018
+ // delete the associated external data as this symbol is going away.
1019
+
1020
+ // Since no objects have yet been moved we can safely access the map of
1021
+ // the object.
1022
+ if ((*p)->IsExternalString()) {
1023
+ heap_->FinalizeExternalString(String::cast(*p));
1024
+ }
1025
+ // Set the entry to null_value (as deleted).
1026
+ *p = heap_->raw_unchecked_null_value();
1027
+ pointers_removed_++;
1028
+ }
1029
+ }
1030
+ }
1031
+
1032
+ int PointersRemoved() {
1033
+ return pointers_removed_;
1034
+ }
1035
+ private:
1036
+ Heap* heap_;
1037
+ int pointers_removed_;
1038
+ };
1039
+
1040
+
1041
+ // Implementation of WeakObjectRetainer for mark compact GCs. All marked objects
1042
+ // are retained.
1043
+ class MarkCompactWeakObjectRetainer : public WeakObjectRetainer {
1044
+ public:
1045
+ virtual Object* RetainAs(Object* object) {
1046
+ MapWord first_word = HeapObject::cast(object)->map_word();
1047
+ if (first_word.IsMarked()) {
1048
+ return object;
1049
+ } else {
1050
+ return NULL;
1051
+ }
1052
+ }
1053
+ };
1054
+
1055
+
1056
+ void MarkCompactCollector::MarkUnmarkedObject(HeapObject* object) {
1057
+ ASSERT(!object->IsMarked());
1058
+ ASSERT(HEAP->Contains(object));
1059
+ if (object->IsMap()) {
1060
+ Map* map = Map::cast(object);
1061
+ if (FLAG_cleanup_caches_in_maps_at_gc) {
1062
+ map->ClearCodeCache(heap());
1063
+ }
1064
+ SetMark(map);
1065
+ if (FLAG_collect_maps &&
1066
+ map->instance_type() >= FIRST_JS_OBJECT_TYPE &&
1067
+ map->instance_type() <= JS_FUNCTION_TYPE) {
1068
+ MarkMapContents(map);
1069
+ } else {
1070
+ marking_stack_.Push(map);
1071
+ }
1072
+ } else {
1073
+ SetMark(object);
1074
+ marking_stack_.Push(object);
1075
+ }
1076
+ }
1077
+
1078
+
1079
+ void MarkCompactCollector::MarkMapContents(Map* map) {
1080
+ MarkDescriptorArray(reinterpret_cast<DescriptorArray*>(
1081
+ *HeapObject::RawField(map, Map::kInstanceDescriptorsOffset)));
1082
+
1083
+ // Mark the Object* fields of the Map.
1084
+ // Since the descriptor array has been marked already, it is fine
1085
+ // that one of these fields contains a pointer to it.
1086
+ Object** start_slot = HeapObject::RawField(map,
1087
+ Map::kPointerFieldsBeginOffset);
1088
+
1089
+ Object** end_slot = HeapObject::RawField(map, Map::kPointerFieldsEndOffset);
1090
+
1091
+ StaticMarkingVisitor::VisitPointers(map->heap(), start_slot, end_slot);
1092
+ }
1093
+
1094
+
1095
+ void MarkCompactCollector::MarkDescriptorArray(
1096
+ DescriptorArray* descriptors) {
1097
+ if (descriptors->IsMarked()) return;
1098
+ // Empty descriptor array is marked as a root before any maps are marked.
1099
+ ASSERT(descriptors != HEAP->raw_unchecked_empty_descriptor_array());
1100
+ SetMark(descriptors);
1101
+
1102
+ FixedArray* contents = reinterpret_cast<FixedArray*>(
1103
+ descriptors->get(DescriptorArray::kContentArrayIndex));
1104
+ ASSERT(contents->IsHeapObject());
1105
+ ASSERT(!contents->IsMarked());
1106
+ ASSERT(contents->IsFixedArray());
1107
+ ASSERT(contents->length() >= 2);
1108
+ SetMark(contents);
1109
+ // Contents contains (value, details) pairs. If the details say that the type
1110
+ // of descriptor is MAP_TRANSITION, CONSTANT_TRANSITION,
1111
+ // EXTERNAL_ARRAY_TRANSITION or NULL_DESCRIPTOR, we don't mark the value as
1112
+ // live. Only for MAP_TRANSITION, EXTERNAL_ARRAY_TRANSITION and
1113
+ // CONSTANT_TRANSITION is the value an Object* (a Map*).
1114
+ for (int i = 0; i < contents->length(); i += 2) {
1115
+ // If the pair (value, details) at index i, i+1 is not
1116
+ // a transition or null descriptor, mark the value.
1117
+ PropertyDetails details(Smi::cast(contents->get(i + 1)));
1118
+ if (details.type() < FIRST_PHANTOM_PROPERTY_TYPE) {
1119
+ HeapObject* object = reinterpret_cast<HeapObject*>(contents->get(i));
1120
+ if (object->IsHeapObject() && !object->IsMarked()) {
1121
+ SetMark(object);
1122
+ marking_stack_.Push(object);
1123
+ }
1124
+ }
1125
+ }
1126
+ // The DescriptorArray descriptors contains a pointer to its contents array,
1127
+ // but the contents array is already marked.
1128
+ marking_stack_.Push(descriptors);
1129
+ }
1130
+
1131
+
1132
+ void MarkCompactCollector::CreateBackPointers() {
1133
+ HeapObjectIterator iterator(heap()->map_space());
1134
+ for (HeapObject* next_object = iterator.next();
1135
+ next_object != NULL; next_object = iterator.next()) {
1136
+ if (next_object->IsMap()) { // Could also be ByteArray on free list.
1137
+ Map* map = Map::cast(next_object);
1138
+ if (map->instance_type() >= FIRST_JS_OBJECT_TYPE &&
1139
+ map->instance_type() <= JS_FUNCTION_TYPE) {
1140
+ map->CreateBackPointers();
1141
+ } else {
1142
+ ASSERT(map->instance_descriptors() == heap()->empty_descriptor_array());
1143
+ }
1144
+ }
1145
+ }
1146
+ }
1147
+
1148
+
1149
+ static int OverflowObjectSize(HeapObject* obj) {
1150
+ // Recover the normal map pointer, it might be marked as live and
1151
+ // overflowed.
1152
+ MapWord map_word = obj->map_word();
1153
+ map_word.ClearMark();
1154
+ map_word.ClearOverflow();
1155
+ return obj->SizeFromMap(map_word.ToMap());
1156
+ }
1157
+
1158
+
1159
+ class OverflowedObjectsScanner : public AllStatic {
1160
+ public:
1161
+ // Fill the marking stack with overflowed objects returned by the given
1162
+ // iterator. Stop when the marking stack is filled or the end of the space
1163
+ // is reached, whichever comes first.
1164
+ template<class T>
1165
+ static inline void ScanOverflowedObjects(MarkCompactCollector* collector,
1166
+ T* it) {
1167
+ // The caller should ensure that the marking stack is initially not full,
1168
+ // so that we don't waste effort pointlessly scanning for objects.
1169
+ ASSERT(!collector->marking_stack_.is_full());
1170
+
1171
+ for (HeapObject* object = it->next(); object != NULL; object = it->next()) {
1172
+ if (object->IsOverflowed()) {
1173
+ object->ClearOverflow();
1174
+ ASSERT(object->IsMarked());
1175
+ ASSERT(HEAP->Contains(object));
1176
+ collector->marking_stack_.Push(object);
1177
+ if (collector->marking_stack_.is_full()) return;
1178
+ }
1179
+ }
1180
+ }
1181
+ };
1182
+
1183
+
1184
+ bool MarkCompactCollector::IsUnmarkedHeapObject(Object** p) {
1185
+ return (*p)->IsHeapObject() && !HeapObject::cast(*p)->IsMarked();
1186
+ }
1187
+
1188
+
1189
+ void MarkCompactCollector::MarkSymbolTable() {
1190
+ SymbolTable* symbol_table = heap()->raw_unchecked_symbol_table();
1191
+ // Mark the symbol table itself.
1192
+ SetMark(symbol_table);
1193
+ // Explicitly mark the prefix.
1194
+ MarkingVisitor marker(heap());
1195
+ symbol_table->IteratePrefix(&marker);
1196
+ ProcessMarkingStack();
1197
+ }
1198
+
1199
+
1200
+ void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) {
1201
+ // Mark the heap roots including global variables, stack variables,
1202
+ // etc., and all objects reachable from them.
1203
+ heap()->IterateStrongRoots(visitor, VISIT_ONLY_STRONG);
1204
+
1205
+ // Handle the symbol table specially.
1206
+ MarkSymbolTable();
1207
+
1208
+ // There may be overflowed objects in the heap. Visit them now.
1209
+ while (marking_stack_.overflowed()) {
1210
+ RefillMarkingStack();
1211
+ EmptyMarkingStack();
1212
+ }
1213
+ }
1214
+
1215
+
1216
+ void MarkCompactCollector::MarkObjectGroups() {
1217
+ List<ObjectGroup*>* object_groups =
1218
+ heap()->isolate()->global_handles()->object_groups();
1219
+
1220
+ for (int i = 0; i < object_groups->length(); i++) {
1221
+ ObjectGroup* entry = object_groups->at(i);
1222
+ if (entry == NULL) continue;
1223
+
1224
+ List<Object**>& objects = entry->objects_;
1225
+ bool group_marked = false;
1226
+ for (int j = 0; j < objects.length(); j++) {
1227
+ Object* object = *objects[j];
1228
+ if (object->IsHeapObject() && HeapObject::cast(object)->IsMarked()) {
1229
+ group_marked = true;
1230
+ break;
1231
+ }
1232
+ }
1233
+
1234
+ if (!group_marked) continue;
1235
+
1236
+ // An object in the group is marked, so mark as gray all white heap
1237
+ // objects in the group.
1238
+ for (int j = 0; j < objects.length(); ++j) {
1239
+ if ((*objects[j])->IsHeapObject()) {
1240
+ MarkObject(HeapObject::cast(*objects[j]));
1241
+ }
1242
+ }
1243
+
1244
+ // Once the entire group has been colored gray, set the object group
1245
+ // to NULL so it won't be processed again.
1246
+ delete entry;
1247
+ object_groups->at(i) = NULL;
1248
+ }
1249
+ }
1250
+
1251
+
1252
+ void MarkCompactCollector::MarkImplicitRefGroups() {
1253
+ List<ImplicitRefGroup*>* ref_groups =
1254
+ heap()->isolate()->global_handles()->implicit_ref_groups();
1255
+
1256
+ for (int i = 0; i < ref_groups->length(); i++) {
1257
+ ImplicitRefGroup* entry = ref_groups->at(i);
1258
+ if (entry == NULL) continue;
1259
+
1260
+ if (!entry->parent_->IsMarked()) continue;
1261
+
1262
+ List<Object**>& children = entry->children_;
1263
+ // A parent object is marked, so mark as gray all child white heap
1264
+ // objects.
1265
+ for (int j = 0; j < children.length(); ++j) {
1266
+ if ((*children[j])->IsHeapObject()) {
1267
+ MarkObject(HeapObject::cast(*children[j]));
1268
+ }
1269
+ }
1270
+
1271
+ // Once the entire group has been colored gray, set the group
1272
+ // to NULL so it won't be processed again.
1273
+ delete entry;
1274
+ ref_groups->at(i) = NULL;
1275
+ }
1276
+ }
1277
+
1278
+
1279
+ // Mark all objects reachable from the objects on the marking stack.
1280
+ // Before: the marking stack contains zero or more heap object pointers.
1281
+ // After: the marking stack is empty, and all objects reachable from the
1282
+ // marking stack have been marked, or are overflowed in the heap.
1283
+ void MarkCompactCollector::EmptyMarkingStack() {
1284
+ while (!marking_stack_.is_empty()) {
1285
+ HeapObject* object = marking_stack_.Pop();
1286
+ ASSERT(object->IsHeapObject());
1287
+ ASSERT(heap()->Contains(object));
1288
+ ASSERT(object->IsMarked());
1289
+ ASSERT(!object->IsOverflowed());
1290
+
1291
+ // Because the object is marked, we have to recover the original map
1292
+ // pointer and use it to mark the object's body.
1293
+ MapWord map_word = object->map_word();
1294
+ map_word.ClearMark();
1295
+ Map* map = map_word.ToMap();
1296
+ MarkObject(map);
1297
+
1298
+ StaticMarkingVisitor::IterateBody(map, object);
1299
+ }
1300
+ }
1301
+
1302
+
1303
+ // Sweep the heap for overflowed objects, clear their overflow bits, and
1304
+ // push them on the marking stack. Stop early if the marking stack fills
1305
+ // before sweeping completes. If sweeping completes, there are no remaining
1306
+ // overflowed objects in the heap so the overflow flag on the markings stack
1307
+ // is cleared.
1308
+ void MarkCompactCollector::RefillMarkingStack() {
1309
+ ASSERT(marking_stack_.overflowed());
1310
+
1311
+ SemiSpaceIterator new_it(heap()->new_space(), &OverflowObjectSize);
1312
+ OverflowedObjectsScanner::ScanOverflowedObjects(this, &new_it);
1313
+ if (marking_stack_.is_full()) return;
1314
+
1315
+ HeapObjectIterator old_pointer_it(heap()->old_pointer_space(),
1316
+ &OverflowObjectSize);
1317
+ OverflowedObjectsScanner::ScanOverflowedObjects(this, &old_pointer_it);
1318
+ if (marking_stack_.is_full()) return;
1319
+
1320
+ HeapObjectIterator old_data_it(heap()->old_data_space(), &OverflowObjectSize);
1321
+ OverflowedObjectsScanner::ScanOverflowedObjects(this, &old_data_it);
1322
+ if (marking_stack_.is_full()) return;
1323
+
1324
+ HeapObjectIterator code_it(heap()->code_space(), &OverflowObjectSize);
1325
+ OverflowedObjectsScanner::ScanOverflowedObjects(this, &code_it);
1326
+ if (marking_stack_.is_full()) return;
1327
+
1328
+ HeapObjectIterator map_it(heap()->map_space(), &OverflowObjectSize);
1329
+ OverflowedObjectsScanner::ScanOverflowedObjects(this, &map_it);
1330
+ if (marking_stack_.is_full()) return;
1331
+
1332
+ HeapObjectIterator cell_it(heap()->cell_space(), &OverflowObjectSize);
1333
+ OverflowedObjectsScanner::ScanOverflowedObjects(this, &cell_it);
1334
+ if (marking_stack_.is_full()) return;
1335
+
1336
+ LargeObjectIterator lo_it(heap()->lo_space(), &OverflowObjectSize);
1337
+ OverflowedObjectsScanner::ScanOverflowedObjects(this, &lo_it);
1338
+ if (marking_stack_.is_full()) return;
1339
+
1340
+ marking_stack_.clear_overflowed();
1341
+ }
1342
+
1343
+
1344
+ // Mark all objects reachable (transitively) from objects on the marking
1345
+ // stack. Before: the marking stack contains zero or more heap object
1346
+ // pointers. After: the marking stack is empty and there are no overflowed
1347
+ // objects in the heap.
1348
+ void MarkCompactCollector::ProcessMarkingStack() {
1349
+ EmptyMarkingStack();
1350
+ while (marking_stack_.overflowed()) {
1351
+ RefillMarkingStack();
1352
+ EmptyMarkingStack();
1353
+ }
1354
+ }
1355
+
1356
+
1357
+ void MarkCompactCollector::ProcessExternalMarking() {
1358
+ bool work_to_do = true;
1359
+ ASSERT(marking_stack_.is_empty());
1360
+ while (work_to_do) {
1361
+ MarkObjectGroups();
1362
+ MarkImplicitRefGroups();
1363
+ work_to_do = !marking_stack_.is_empty();
1364
+ ProcessMarkingStack();
1365
+ }
1366
+ }
1367
+
1368
+
1369
+ void MarkCompactCollector::MarkLiveObjects() {
1370
+ GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_MARK);
1371
+ // The recursive GC marker detects when it is nearing stack overflow,
1372
+ // and switches to a different marking system. JS interrupts interfere
1373
+ // with the C stack limit check.
1374
+ PostponeInterruptsScope postpone(heap()->isolate());
1375
+
1376
+ #ifdef DEBUG
1377
+ ASSERT(state_ == PREPARE_GC);
1378
+ state_ = MARK_LIVE_OBJECTS;
1379
+ #endif
1380
+ // The to space contains live objects, the from space is used as a marking
1381
+ // stack.
1382
+ marking_stack_.Initialize(heap()->new_space()->FromSpaceLow(),
1383
+ heap()->new_space()->FromSpaceHigh());
1384
+
1385
+ ASSERT(!marking_stack_.overflowed());
1386
+
1387
+ PrepareForCodeFlushing();
1388
+
1389
+ RootMarkingVisitor root_visitor(heap());
1390
+ MarkRoots(&root_visitor);
1391
+
1392
+ // The objects reachable from the roots are marked, yet unreachable
1393
+ // objects are unmarked. Mark objects reachable due to host
1394
+ // application specific logic.
1395
+ ProcessExternalMarking();
1396
+
1397
+ // The objects reachable from the roots or object groups are marked,
1398
+ // yet unreachable objects are unmarked. Mark objects reachable
1399
+ // only from weak global handles.
1400
+ //
1401
+ // First we identify nonlive weak handles and mark them as pending
1402
+ // destruction.
1403
+ heap()->isolate()->global_handles()->IdentifyWeakHandles(
1404
+ &IsUnmarkedHeapObject);
1405
+ // Then we mark the objects and process the transitive closure.
1406
+ heap()->isolate()->global_handles()->IterateWeakRoots(&root_visitor);
1407
+ while (marking_stack_.overflowed()) {
1408
+ RefillMarkingStack();
1409
+ EmptyMarkingStack();
1410
+ }
1411
+
1412
+ // Repeat host application specific marking to mark unmarked objects
1413
+ // reachable from the weak roots.
1414
+ ProcessExternalMarking();
1415
+
1416
+ // Prune the symbol table removing all symbols only pointed to by the
1417
+ // symbol table. Cannot use symbol_table() here because the symbol
1418
+ // table is marked.
1419
+ SymbolTable* symbol_table = heap()->raw_unchecked_symbol_table();
1420
+ SymbolTableCleaner v(heap());
1421
+ symbol_table->IterateElements(&v);
1422
+ symbol_table->ElementsRemoved(v.PointersRemoved());
1423
+ heap()->external_string_table_.Iterate(&v);
1424
+ heap()->external_string_table_.CleanUp();
1425
+
1426
+ // Process the weak references.
1427
+ MarkCompactWeakObjectRetainer mark_compact_object_retainer;
1428
+ heap()->ProcessWeakReferences(&mark_compact_object_retainer);
1429
+
1430
+ // Remove object groups after marking phase.
1431
+ heap()->isolate()->global_handles()->RemoveObjectGroups();
1432
+ heap()->isolate()->global_handles()->RemoveImplicitRefGroups();
1433
+
1434
+ // Flush code from collected candidates.
1435
+ if (is_code_flushing_enabled()) {
1436
+ code_flusher_->ProcessCandidates();
1437
+ }
1438
+
1439
+ // Clean up dead objects from the runtime profiler.
1440
+ heap()->isolate()->runtime_profiler()->RemoveDeadSamples();
1441
+ }
1442
+
1443
+
1444
+ #ifdef DEBUG
1445
+ void MarkCompactCollector::UpdateLiveObjectCount(HeapObject* obj) {
1446
+ live_bytes_ += obj->Size();
1447
+ if (heap()->new_space()->Contains(obj)) {
1448
+ live_young_objects_size_ += obj->Size();
1449
+ } else if (heap()->map_space()->Contains(obj)) {
1450
+ ASSERT(obj->IsMap());
1451
+ live_map_objects_size_ += obj->Size();
1452
+ } else if (heap()->cell_space()->Contains(obj)) {
1453
+ ASSERT(obj->IsJSGlobalPropertyCell());
1454
+ live_cell_objects_size_ += obj->Size();
1455
+ } else if (heap()->old_pointer_space()->Contains(obj)) {
1456
+ live_old_pointer_objects_size_ += obj->Size();
1457
+ } else if (heap()->old_data_space()->Contains(obj)) {
1458
+ live_old_data_objects_size_ += obj->Size();
1459
+ } else if (heap()->code_space()->Contains(obj)) {
1460
+ live_code_objects_size_ += obj->Size();
1461
+ } else if (heap()->lo_space()->Contains(obj)) {
1462
+ live_lo_objects_size_ += obj->Size();
1463
+ } else {
1464
+ UNREACHABLE();
1465
+ }
1466
+ }
1467
+ #endif // DEBUG
1468
+
1469
+
1470
+ void MarkCompactCollector::SweepLargeObjectSpace() {
1471
+ #ifdef DEBUG
1472
+ ASSERT(state_ == MARK_LIVE_OBJECTS);
1473
+ state_ =
1474
+ compacting_collection_ ? ENCODE_FORWARDING_ADDRESSES : SWEEP_SPACES;
1475
+ #endif
1476
+ // Deallocate unmarked objects and clear marked bits for marked objects.
1477
+ heap()->lo_space()->FreeUnmarkedObjects();
1478
+ }
1479
+
1480
+
1481
+ // Safe to use during marking phase only.
1482
+ bool MarkCompactCollector::SafeIsMap(HeapObject* object) {
1483
+ MapWord metamap = object->map_word();
1484
+ metamap.ClearMark();
1485
+ return metamap.ToMap()->instance_type() == MAP_TYPE;
1486
+ }
1487
+
1488
+
1489
+ void MarkCompactCollector::ClearNonLiveTransitions() {
1490
+ HeapObjectIterator map_iterator(heap() ->map_space(), &SizeOfMarkedObject);
1491
+ // Iterate over the map space, setting map transitions that go from
1492
+ // a marked map to an unmarked map to null transitions. At the same time,
1493
+ // set all the prototype fields of maps back to their original value,
1494
+ // dropping the back pointers temporarily stored in the prototype field.
1495
+ // Setting the prototype field requires following the linked list of
1496
+ // back pointers, reversing them all at once. This allows us to find
1497
+ // those maps with map transitions that need to be nulled, and only
1498
+ // scan the descriptor arrays of those maps, not all maps.
1499
+ // All of these actions are carried out only on maps of JSObjects
1500
+ // and related subtypes.
1501
+ for (HeapObject* obj = map_iterator.next();
1502
+ obj != NULL; obj = map_iterator.next()) {
1503
+ Map* map = reinterpret_cast<Map*>(obj);
1504
+ if (!map->IsMarked() && map->IsByteArray()) continue;
1505
+
1506
+ ASSERT(SafeIsMap(map));
1507
+ // Only JSObject and subtypes have map transitions and back pointers.
1508
+ if (map->instance_type() < FIRST_JS_OBJECT_TYPE) continue;
1509
+ if (map->instance_type() > JS_FUNCTION_TYPE) continue;
1510
+
1511
+ if (map->IsMarked() && map->attached_to_shared_function_info()) {
1512
+ // This map is used for inobject slack tracking and has been detached
1513
+ // from SharedFunctionInfo during the mark phase.
1514
+ // Since it survived the GC, reattach it now.
1515
+ map->unchecked_constructor()->unchecked_shared()->AttachInitialMap(map);
1516
+ }
1517
+
1518
+ // Follow the chain of back pointers to find the prototype.
1519
+ Map* current = map;
1520
+ while (SafeIsMap(current)) {
1521
+ current = reinterpret_cast<Map*>(current->prototype());
1522
+ ASSERT(current->IsHeapObject());
1523
+ }
1524
+ Object* real_prototype = current;
1525
+
1526
+ // Follow back pointers, setting them to prototype,
1527
+ // clearing map transitions when necessary.
1528
+ current = map;
1529
+ bool on_dead_path = !current->IsMarked();
1530
+ Object* next;
1531
+ while (SafeIsMap(current)) {
1532
+ next = current->prototype();
1533
+ // There should never be a dead map above a live map.
1534
+ ASSERT(on_dead_path || current->IsMarked());
1535
+
1536
+ // A live map above a dead map indicates a dead transition.
1537
+ // This test will always be false on the first iteration.
1538
+ if (on_dead_path && current->IsMarked()) {
1539
+ on_dead_path = false;
1540
+ current->ClearNonLiveTransitions(heap(), real_prototype);
1541
+ }
1542
+ *HeapObject::RawField(current, Map::kPrototypeOffset) =
1543
+ real_prototype;
1544
+ current = reinterpret_cast<Map*>(next);
1545
+ }
1546
+ }
1547
+ }
1548
+
1549
+ // -------------------------------------------------------------------------
1550
+ // Phase 2: Encode forwarding addresses.
1551
+ // When compacting, forwarding addresses for objects in old space and map
1552
+ // space are encoded in their map pointer word (along with an encoding of
1553
+ // their map pointers).
1554
+ //
1555
+ // The excact encoding is described in the comments for class MapWord in
1556
+ // objects.h.
1557
+ //
1558
+ // An address range [start, end) can have both live and non-live objects.
1559
+ // Maximal non-live regions are marked so they can be skipped on subsequent
1560
+ // sweeps of the heap. A distinguished map-pointer encoding is used to mark
1561
+ // free regions of one-word size (in which case the next word is the start
1562
+ // of a live object). A second distinguished map-pointer encoding is used
1563
+ // to mark free regions larger than one word, and the size of the free
1564
+ // region (including the first word) is written to the second word of the
1565
+ // region.
1566
+ //
1567
+ // Any valid map page offset must lie in the object area of the page, so map
1568
+ // page offsets less than Page::kObjectStartOffset are invalid. We use a
1569
+ // pair of distinguished invalid map encodings (for single word and multiple
1570
+ // words) to indicate free regions in the page found during computation of
1571
+ // forwarding addresses and skipped over in subsequent sweeps.
1572
+
1573
+
1574
+ // Encode a free region, defined by the given start address and size, in the
1575
+ // first word or two of the region.
1576
+ void EncodeFreeRegion(Address free_start, int free_size) {
1577
+ ASSERT(free_size >= kIntSize);
1578
+ if (free_size == kIntSize) {
1579
+ Memory::uint32_at(free_start) = MarkCompactCollector::kSingleFreeEncoding;
1580
+ } else {
1581
+ ASSERT(free_size >= 2 * kIntSize);
1582
+ Memory::uint32_at(free_start) = MarkCompactCollector::kMultiFreeEncoding;
1583
+ Memory::int_at(free_start + kIntSize) = free_size;
1584
+ }
1585
+
1586
+ #ifdef DEBUG
1587
+ // Zap the body of the free region.
1588
+ if (FLAG_enable_slow_asserts) {
1589
+ for (int offset = 2 * kIntSize;
1590
+ offset < free_size;
1591
+ offset += kPointerSize) {
1592
+ Memory::Address_at(free_start + offset) = kZapValue;
1593
+ }
1594
+ }
1595
+ #endif
1596
+ }
1597
+
1598
+
1599
+ // Try to promote all objects in new space. Heap numbers and sequential
1600
+ // strings are promoted to the code space, large objects to large object space,
1601
+ // and all others to the old space.
1602
+ inline MaybeObject* MCAllocateFromNewSpace(Heap* heap,
1603
+ HeapObject* object,
1604
+ int object_size) {
1605
+ MaybeObject* forwarded;
1606
+ if (object_size > heap->MaxObjectSizeInPagedSpace()) {
1607
+ forwarded = Failure::Exception();
1608
+ } else {
1609
+ OldSpace* target_space = heap->TargetSpace(object);
1610
+ ASSERT(target_space == heap->old_pointer_space() ||
1611
+ target_space == heap->old_data_space());
1612
+ forwarded = target_space->MCAllocateRaw(object_size);
1613
+ }
1614
+ Object* result;
1615
+ if (!forwarded->ToObject(&result)) {
1616
+ result = heap->new_space()->MCAllocateRaw(object_size)->ToObjectUnchecked();
1617
+ }
1618
+ return result;
1619
+ }
1620
+
1621
+
1622
+ // Allocation functions for the paged spaces call the space's MCAllocateRaw.
1623
+ MUST_USE_RESULT inline MaybeObject* MCAllocateFromOldPointerSpace(
1624
+ Heap *heap,
1625
+ HeapObject* ignore,
1626
+ int object_size) {
1627
+ return heap->old_pointer_space()->MCAllocateRaw(object_size);
1628
+ }
1629
+
1630
+
1631
+ MUST_USE_RESULT inline MaybeObject* MCAllocateFromOldDataSpace(
1632
+ Heap* heap,
1633
+ HeapObject* ignore,
1634
+ int object_size) {
1635
+ return heap->old_data_space()->MCAllocateRaw(object_size);
1636
+ }
1637
+
1638
+
1639
+ MUST_USE_RESULT inline MaybeObject* MCAllocateFromCodeSpace(
1640
+ Heap* heap,
1641
+ HeapObject* ignore,
1642
+ int object_size) {
1643
+ return heap->code_space()->MCAllocateRaw(object_size);
1644
+ }
1645
+
1646
+
1647
+ MUST_USE_RESULT inline MaybeObject* MCAllocateFromMapSpace(
1648
+ Heap* heap,
1649
+ HeapObject* ignore,
1650
+ int object_size) {
1651
+ return heap->map_space()->MCAllocateRaw(object_size);
1652
+ }
1653
+
1654
+
1655
+ MUST_USE_RESULT inline MaybeObject* MCAllocateFromCellSpace(
1656
+ Heap* heap, HeapObject* ignore, int object_size) {
1657
+ return heap->cell_space()->MCAllocateRaw(object_size);
1658
+ }
1659
+
1660
+
1661
+ // The forwarding address is encoded at the same offset as the current
1662
+ // to-space object, but in from space.
1663
+ inline void EncodeForwardingAddressInNewSpace(Heap* heap,
1664
+ HeapObject* old_object,
1665
+ int object_size,
1666
+ Object* new_object,
1667
+ int* ignored) {
1668
+ int offset =
1669
+ heap->new_space()->ToSpaceOffsetForAddress(old_object->address());
1670
+ Memory::Address_at(heap->new_space()->FromSpaceLow() + offset) =
1671
+ HeapObject::cast(new_object)->address();
1672
+ }
1673
+
1674
+
1675
+ // The forwarding address is encoded in the map pointer of the object as an
1676
+ // offset (in terms of live bytes) from the address of the first live object
1677
+ // in the page.
1678
+ inline void EncodeForwardingAddressInPagedSpace(Heap* heap,
1679
+ HeapObject* old_object,
1680
+ int object_size,
1681
+ Object* new_object,
1682
+ int* offset) {
1683
+ // Record the forwarding address of the first live object if necessary.
1684
+ if (*offset == 0) {
1685
+ Page::FromAddress(old_object->address())->mc_first_forwarded =
1686
+ HeapObject::cast(new_object)->address();
1687
+ }
1688
+
1689
+ MapWord encoding =
1690
+ MapWord::EncodeAddress(old_object->map()->address(), *offset);
1691
+ old_object->set_map_word(encoding);
1692
+ *offset += object_size;
1693
+ ASSERT(*offset <= Page::kObjectAreaSize);
1694
+ }
1695
+
1696
+
1697
+ // Most non-live objects are ignored.
1698
+ inline void IgnoreNonLiveObject(HeapObject* object, Isolate* isolate) {}
1699
+
1700
+
1701
+ // Function template that, given a range of addresses (eg, a semispace or a
1702
+ // paged space page), iterates through the objects in the range to clear
1703
+ // mark bits and compute and encode forwarding addresses. As a side effect,
1704
+ // maximal free chunks are marked so that they can be skipped on subsequent
1705
+ // sweeps.
1706
+ //
1707
+ // The template parameters are an allocation function, a forwarding address
1708
+ // encoding function, and a function to process non-live objects.
1709
+ template<MarkCompactCollector::AllocationFunction Alloc,
1710
+ MarkCompactCollector::EncodingFunction Encode,
1711
+ MarkCompactCollector::ProcessNonLiveFunction ProcessNonLive>
1712
+ inline void EncodeForwardingAddressesInRange(MarkCompactCollector* collector,
1713
+ Address start,
1714
+ Address end,
1715
+ int* offset) {
1716
+ // The start address of the current free region while sweeping the space.
1717
+ // This address is set when a transition from live to non-live objects is
1718
+ // encountered. A value (an encoding of the 'next free region' pointer)
1719
+ // is written to memory at this address when a transition from non-live to
1720
+ // live objects is encountered.
1721
+ Address free_start = NULL;
1722
+
1723
+ // A flag giving the state of the previously swept object. Initially true
1724
+ // to ensure that free_start is initialized to a proper address before
1725
+ // trying to write to it.
1726
+ bool is_prev_alive = true;
1727
+
1728
+ int object_size; // Will be set on each iteration of the loop.
1729
+ for (Address current = start; current < end; current += object_size) {
1730
+ HeapObject* object = HeapObject::FromAddress(current);
1731
+ if (object->IsMarked()) {
1732
+ object->ClearMark();
1733
+ collector->tracer()->decrement_marked_count();
1734
+ object_size = object->Size();
1735
+
1736
+ Object* forwarded =
1737
+ Alloc(collector->heap(), object, object_size)->ToObjectUnchecked();
1738
+ Encode(collector->heap(), object, object_size, forwarded, offset);
1739
+
1740
+ #ifdef DEBUG
1741
+ if (FLAG_gc_verbose) {
1742
+ PrintF("forward %p -> %p.\n", object->address(),
1743
+ HeapObject::cast(forwarded)->address());
1744
+ }
1745
+ #endif
1746
+ if (!is_prev_alive) { // Transition from non-live to live.
1747
+ EncodeFreeRegion(free_start, static_cast<int>(current - free_start));
1748
+ is_prev_alive = true;
1749
+ }
1750
+ } else { // Non-live object.
1751
+ object_size = object->Size();
1752
+ ProcessNonLive(object, collector->heap()->isolate());
1753
+ if (is_prev_alive) { // Transition from live to non-live.
1754
+ free_start = current;
1755
+ is_prev_alive = false;
1756
+ }
1757
+ LiveObjectList::ProcessNonLive(object);
1758
+ }
1759
+ }
1760
+
1761
+ // If we ended on a free region, mark it.
1762
+ if (!is_prev_alive) {
1763
+ EncodeFreeRegion(free_start, static_cast<int>(end - free_start));
1764
+ }
1765
+ }
1766
+
1767
+
1768
+ // Functions to encode the forwarding pointers in each compactable space.
1769
+ void MarkCompactCollector::EncodeForwardingAddressesInNewSpace() {
1770
+ int ignored;
1771
+ EncodeForwardingAddressesInRange<MCAllocateFromNewSpace,
1772
+ EncodeForwardingAddressInNewSpace,
1773
+ IgnoreNonLiveObject>(
1774
+ this,
1775
+ heap()->new_space()->bottom(),
1776
+ heap()->new_space()->top(),
1777
+ &ignored);
1778
+ }
1779
+
1780
+
1781
+ template<MarkCompactCollector::AllocationFunction Alloc,
1782
+ MarkCompactCollector::ProcessNonLiveFunction ProcessNonLive>
1783
+ void MarkCompactCollector::EncodeForwardingAddressesInPagedSpace(
1784
+ PagedSpace* space) {
1785
+ PageIterator it(space, PageIterator::PAGES_IN_USE);
1786
+ while (it.has_next()) {
1787
+ Page* p = it.next();
1788
+
1789
+ // The offset of each live object in the page from the first live object
1790
+ // in the page.
1791
+ int offset = 0;
1792
+ EncodeForwardingAddressesInRange<Alloc,
1793
+ EncodeForwardingAddressInPagedSpace,
1794
+ ProcessNonLive>(
1795
+ this,
1796
+ p->ObjectAreaStart(),
1797
+ p->AllocationTop(),
1798
+ &offset);
1799
+ }
1800
+ }
1801
+
1802
+
1803
+ // We scavange new space simultaneously with sweeping. This is done in two
1804
+ // passes.
1805
+ // The first pass migrates all alive objects from one semispace to another or
1806
+ // promotes them to old space. Forwading address is written directly into
1807
+ // first word of object without any encoding. If object is dead we are writing
1808
+ // NULL as a forwarding address.
1809
+ // The second pass updates pointers to new space in all spaces. It is possible
1810
+ // to encounter pointers to dead objects during traversal of dirty regions we
1811
+ // should clear them to avoid encountering them during next dirty regions
1812
+ // iteration.
1813
+ static void MigrateObject(Heap* heap,
1814
+ Address dst,
1815
+ Address src,
1816
+ int size,
1817
+ bool to_old_space) {
1818
+ if (to_old_space) {
1819
+ heap->CopyBlockToOldSpaceAndUpdateRegionMarks(dst, src, size);
1820
+ } else {
1821
+ heap->CopyBlock(dst, src, size);
1822
+ }
1823
+
1824
+ Memory::Address_at(src) = dst;
1825
+ }
1826
+
1827
+
1828
+ class StaticPointersToNewGenUpdatingVisitor : public
1829
+ StaticNewSpaceVisitor<StaticPointersToNewGenUpdatingVisitor> {
1830
+ public:
1831
+ static inline void VisitPointer(Heap* heap, Object** p) {
1832
+ if (!(*p)->IsHeapObject()) return;
1833
+
1834
+ HeapObject* obj = HeapObject::cast(*p);
1835
+ Address old_addr = obj->address();
1836
+
1837
+ if (heap->new_space()->Contains(obj)) {
1838
+ ASSERT(heap->InFromSpace(*p));
1839
+ *p = HeapObject::FromAddress(Memory::Address_at(old_addr));
1840
+ }
1841
+ }
1842
+ };
1843
+
1844
+
1845
+ // Visitor for updating pointers from live objects in old spaces to new space.
1846
+ // It does not expect to encounter pointers to dead objects.
1847
+ class PointersToNewGenUpdatingVisitor: public ObjectVisitor {
1848
+ public:
1849
+ explicit PointersToNewGenUpdatingVisitor(Heap* heap) : heap_(heap) { }
1850
+
1851
+ void VisitPointer(Object** p) {
1852
+ StaticPointersToNewGenUpdatingVisitor::VisitPointer(heap_, p);
1853
+ }
1854
+
1855
+ void VisitPointers(Object** start, Object** end) {
1856
+ for (Object** p = start; p < end; p++) {
1857
+ StaticPointersToNewGenUpdatingVisitor::VisitPointer(heap_, p);
1858
+ }
1859
+ }
1860
+
1861
+ void VisitCodeTarget(RelocInfo* rinfo) {
1862
+ ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
1863
+ Object* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
1864
+ VisitPointer(&target);
1865
+ rinfo->set_target_address(Code::cast(target)->instruction_start());
1866
+ }
1867
+
1868
+ void VisitDebugTarget(RelocInfo* rinfo) {
1869
+ ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) &&
1870
+ rinfo->IsPatchedReturnSequence()) ||
1871
+ (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
1872
+ rinfo->IsPatchedDebugBreakSlotSequence()));
1873
+ Object* target = Code::GetCodeFromTargetAddress(rinfo->call_address());
1874
+ VisitPointer(&target);
1875
+ rinfo->set_call_address(Code::cast(target)->instruction_start());
1876
+ }
1877
+ private:
1878
+ Heap* heap_;
1879
+ };
1880
+
1881
+
1882
+ // Visitor for updating pointers from live objects in old spaces to new space.
1883
+ // It can encounter pointers to dead objects in new space when traversing map
1884
+ // space (see comment for MigrateObject).
1885
+ static void UpdatePointerToNewGen(HeapObject** p) {
1886
+ if (!(*p)->IsHeapObject()) return;
1887
+
1888
+ Address old_addr = (*p)->address();
1889
+ ASSERT(HEAP->InFromSpace(*p));
1890
+
1891
+ Address new_addr = Memory::Address_at(old_addr);
1892
+
1893
+ if (new_addr == NULL) {
1894
+ // We encountered pointer to a dead object. Clear it so we will
1895
+ // not visit it again during next iteration of dirty regions.
1896
+ *p = NULL;
1897
+ } else {
1898
+ *p = HeapObject::FromAddress(new_addr);
1899
+ }
1900
+ }
1901
+
1902
+
1903
+ static String* UpdateNewSpaceReferenceInExternalStringTableEntry(Heap* heap,
1904
+ Object** p) {
1905
+ Address old_addr = HeapObject::cast(*p)->address();
1906
+ Address new_addr = Memory::Address_at(old_addr);
1907
+ return String::cast(HeapObject::FromAddress(new_addr));
1908
+ }
1909
+
1910
+
1911
+ static bool TryPromoteObject(Heap* heap, HeapObject* object, int object_size) {
1912
+ Object* result;
1913
+
1914
+ if (object_size > heap->MaxObjectSizeInPagedSpace()) {
1915
+ MaybeObject* maybe_result =
1916
+ heap->lo_space()->AllocateRawFixedArray(object_size);
1917
+ if (maybe_result->ToObject(&result)) {
1918
+ HeapObject* target = HeapObject::cast(result);
1919
+ MigrateObject(heap, target->address(), object->address(), object_size,
1920
+ true);
1921
+ heap->mark_compact_collector()->tracer()->
1922
+ increment_promoted_objects_size(object_size);
1923
+ return true;
1924
+ }
1925
+ } else {
1926
+ OldSpace* target_space = heap->TargetSpace(object);
1927
+
1928
+ ASSERT(target_space == heap->old_pointer_space() ||
1929
+ target_space == heap->old_data_space());
1930
+ MaybeObject* maybe_result = target_space->AllocateRaw(object_size);
1931
+ if (maybe_result->ToObject(&result)) {
1932
+ HeapObject* target = HeapObject::cast(result);
1933
+ MigrateObject(heap,
1934
+ target->address(),
1935
+ object->address(),
1936
+ object_size,
1937
+ target_space == heap->old_pointer_space());
1938
+ heap->mark_compact_collector()->tracer()->
1939
+ increment_promoted_objects_size(object_size);
1940
+ return true;
1941
+ }
1942
+ }
1943
+
1944
+ return false;
1945
+ }
1946
+
1947
+
1948
+ static void SweepNewSpace(Heap* heap, NewSpace* space) {
1949
+ heap->CheckNewSpaceExpansionCriteria();
1950
+
1951
+ Address from_bottom = space->bottom();
1952
+ Address from_top = space->top();
1953
+
1954
+ // Flip the semispaces. After flipping, to space is empty, from space has
1955
+ // live objects.
1956
+ space->Flip();
1957
+ space->ResetAllocationInfo();
1958
+
1959
+ int size = 0;
1960
+ int survivors_size = 0;
1961
+
1962
+ // First pass: traverse all objects in inactive semispace, remove marks,
1963
+ // migrate live objects and write forwarding addresses.
1964
+ for (Address current = from_bottom; current < from_top; current += size) {
1965
+ HeapObject* object = HeapObject::FromAddress(current);
1966
+
1967
+ if (object->IsMarked()) {
1968
+ object->ClearMark();
1969
+ heap->mark_compact_collector()->tracer()->decrement_marked_count();
1970
+
1971
+ size = object->Size();
1972
+ survivors_size += size;
1973
+
1974
+ // Aggressively promote young survivors to the old space.
1975
+ if (TryPromoteObject(heap, object, size)) {
1976
+ continue;
1977
+ }
1978
+
1979
+ // Promotion failed. Just migrate object to another semispace.
1980
+ // Allocation cannot fail at this point: semispaces are of equal size.
1981
+ Object* target = space->AllocateRaw(size)->ToObjectUnchecked();
1982
+
1983
+ MigrateObject(heap,
1984
+ HeapObject::cast(target)->address(),
1985
+ current,
1986
+ size,
1987
+ false);
1988
+ } else {
1989
+ // Process the dead object before we write a NULL into its header.
1990
+ LiveObjectList::ProcessNonLive(object);
1991
+
1992
+ size = object->Size();
1993
+ Memory::Address_at(current) = NULL;
1994
+ }
1995
+ }
1996
+
1997
+ // Second pass: find pointers to new space and update them.
1998
+ PointersToNewGenUpdatingVisitor updating_visitor(heap);
1999
+
2000
+ // Update pointers in to space.
2001
+ Address current = space->bottom();
2002
+ while (current < space->top()) {
2003
+ HeapObject* object = HeapObject::FromAddress(current);
2004
+ current +=
2005
+ StaticPointersToNewGenUpdatingVisitor::IterateBody(object->map(),
2006
+ object);
2007
+ }
2008
+
2009
+ // Update roots.
2010
+ heap->IterateRoots(&updating_visitor, VISIT_ALL_IN_SCAVENGE);
2011
+ LiveObjectList::IterateElements(&updating_visitor);
2012
+
2013
+ // Update pointers in old spaces.
2014
+ heap->IterateDirtyRegions(heap->old_pointer_space(),
2015
+ &Heap::IteratePointersInDirtyRegion,
2016
+ &UpdatePointerToNewGen,
2017
+ heap->WATERMARK_SHOULD_BE_VALID);
2018
+
2019
+ heap->lo_space()->IterateDirtyRegions(&UpdatePointerToNewGen);
2020
+
2021
+ // Update pointers from cells.
2022
+ HeapObjectIterator cell_iterator(heap->cell_space());
2023
+ for (HeapObject* cell = cell_iterator.next();
2024
+ cell != NULL;
2025
+ cell = cell_iterator.next()) {
2026
+ if (cell->IsJSGlobalPropertyCell()) {
2027
+ Address value_address =
2028
+ reinterpret_cast<Address>(cell) +
2029
+ (JSGlobalPropertyCell::kValueOffset - kHeapObjectTag);
2030
+ updating_visitor.VisitPointer(reinterpret_cast<Object**>(value_address));
2031
+ }
2032
+ }
2033
+
2034
+ // Update pointer from the global contexts list.
2035
+ updating_visitor.VisitPointer(heap->global_contexts_list_address());
2036
+
2037
+ // Update pointers from external string table.
2038
+ heap->UpdateNewSpaceReferencesInExternalStringTable(
2039
+ &UpdateNewSpaceReferenceInExternalStringTableEntry);
2040
+
2041
+ // All pointers were updated. Update auxiliary allocation info.
2042
+ heap->IncrementYoungSurvivorsCounter(survivors_size);
2043
+ space->set_age_mark(space->top());
2044
+
2045
+ // Update JSFunction pointers from the runtime profiler.
2046
+ heap->isolate()->runtime_profiler()->UpdateSamplesAfterScavenge();
2047
+ }
2048
+
2049
+
2050
+ static void SweepSpace(Heap* heap, PagedSpace* space) {
2051
+ PageIterator it(space, PageIterator::PAGES_IN_USE);
2052
+
2053
+ // During sweeping of paged space we are trying to find longest sequences
2054
+ // of pages without live objects and free them (instead of putting them on
2055
+ // the free list).
2056
+
2057
+ // Page preceding current.
2058
+ Page* prev = Page::FromAddress(NULL);
2059
+
2060
+ // First empty page in a sequence.
2061
+ Page* first_empty_page = Page::FromAddress(NULL);
2062
+
2063
+ // Page preceding first empty page.
2064
+ Page* prec_first_empty_page = Page::FromAddress(NULL);
2065
+
2066
+ // If last used page of space ends with a sequence of dead objects
2067
+ // we can adjust allocation top instead of puting this free area into
2068
+ // the free list. Thus during sweeping we keep track of such areas
2069
+ // and defer their deallocation until the sweeping of the next page
2070
+ // is done: if one of the next pages contains live objects we have
2071
+ // to put such area into the free list.
2072
+ Address last_free_start = NULL;
2073
+ int last_free_size = 0;
2074
+
2075
+ while (it.has_next()) {
2076
+ Page* p = it.next();
2077
+
2078
+ bool is_previous_alive = true;
2079
+ Address free_start = NULL;
2080
+ HeapObject* object;
2081
+
2082
+ for (Address current = p->ObjectAreaStart();
2083
+ current < p->AllocationTop();
2084
+ current += object->Size()) {
2085
+ object = HeapObject::FromAddress(current);
2086
+ if (object->IsMarked()) {
2087
+ object->ClearMark();
2088
+ heap->mark_compact_collector()->tracer()->decrement_marked_count();
2089
+
2090
+ if (!is_previous_alive) { // Transition from free to live.
2091
+ space->DeallocateBlock(free_start,
2092
+ static_cast<int>(current - free_start),
2093
+ true);
2094
+ is_previous_alive = true;
2095
+ }
2096
+ } else {
2097
+ heap->mark_compact_collector()->ReportDeleteIfNeeded(
2098
+ object, heap->isolate());
2099
+ if (is_previous_alive) { // Transition from live to free.
2100
+ free_start = current;
2101
+ is_previous_alive = false;
2102
+ }
2103
+ LiveObjectList::ProcessNonLive(object);
2104
+ }
2105
+ // The object is now unmarked for the call to Size() at the top of the
2106
+ // loop.
2107
+ }
2108
+
2109
+ bool page_is_empty = (p->ObjectAreaStart() == p->AllocationTop())
2110
+ || (!is_previous_alive && free_start == p->ObjectAreaStart());
2111
+
2112
+ if (page_is_empty) {
2113
+ // This page is empty. Check whether we are in the middle of
2114
+ // sequence of empty pages and start one if not.
2115
+ if (!first_empty_page->is_valid()) {
2116
+ first_empty_page = p;
2117
+ prec_first_empty_page = prev;
2118
+ }
2119
+
2120
+ if (!is_previous_alive) {
2121
+ // There are dead objects on this page. Update space accounting stats
2122
+ // without putting anything into free list.
2123
+ int size_in_bytes = static_cast<int>(p->AllocationTop() - free_start);
2124
+ if (size_in_bytes > 0) {
2125
+ space->DeallocateBlock(free_start, size_in_bytes, false);
2126
+ }
2127
+ }
2128
+ } else {
2129
+ // This page is not empty. Sequence of empty pages ended on the previous
2130
+ // one.
2131
+ if (first_empty_page->is_valid()) {
2132
+ space->FreePages(prec_first_empty_page, prev);
2133
+ prec_first_empty_page = first_empty_page = Page::FromAddress(NULL);
2134
+ }
2135
+
2136
+ // If there is a free ending area on one of the previous pages we have
2137
+ // deallocate that area and put it on the free list.
2138
+ if (last_free_size > 0) {
2139
+ Page::FromAddress(last_free_start)->
2140
+ SetAllocationWatermark(last_free_start);
2141
+ space->DeallocateBlock(last_free_start, last_free_size, true);
2142
+ last_free_start = NULL;
2143
+ last_free_size = 0;
2144
+ }
2145
+
2146
+ // If the last region of this page was not live we remember it.
2147
+ if (!is_previous_alive) {
2148
+ ASSERT(last_free_size == 0);
2149
+ last_free_size = static_cast<int>(p->AllocationTop() - free_start);
2150
+ last_free_start = free_start;
2151
+ }
2152
+ }
2153
+
2154
+ prev = p;
2155
+ }
2156
+
2157
+ // We reached end of space. See if we need to adjust allocation top.
2158
+ Address new_allocation_top = NULL;
2159
+
2160
+ if (first_empty_page->is_valid()) {
2161
+ // Last used pages in space are empty. We can move allocation top backwards
2162
+ // to the beginning of first empty page.
2163
+ ASSERT(prev == space->AllocationTopPage());
2164
+
2165
+ new_allocation_top = first_empty_page->ObjectAreaStart();
2166
+ }
2167
+
2168
+ if (last_free_size > 0) {
2169
+ // There was a free ending area on the previous page.
2170
+ // Deallocate it without putting it into freelist and move allocation
2171
+ // top to the beginning of this free area.
2172
+ space->DeallocateBlock(last_free_start, last_free_size, false);
2173
+ new_allocation_top = last_free_start;
2174
+ }
2175
+
2176
+ if (new_allocation_top != NULL) {
2177
+ #ifdef DEBUG
2178
+ Page* new_allocation_top_page = Page::FromAllocationTop(new_allocation_top);
2179
+ if (!first_empty_page->is_valid()) {
2180
+ ASSERT(new_allocation_top_page == space->AllocationTopPage());
2181
+ } else if (last_free_size > 0) {
2182
+ ASSERT(new_allocation_top_page == prec_first_empty_page);
2183
+ } else {
2184
+ ASSERT(new_allocation_top_page == first_empty_page);
2185
+ }
2186
+ #endif
2187
+
2188
+ space->SetTop(new_allocation_top);
2189
+ }
2190
+ }
2191
+
2192
+
2193
+ void MarkCompactCollector::EncodeForwardingAddresses() {
2194
+ ASSERT(state_ == ENCODE_FORWARDING_ADDRESSES);
2195
+ // Objects in the active semispace of the young generation may be
2196
+ // relocated to the inactive semispace (if not promoted). Set the
2197
+ // relocation info to the beginning of the inactive semispace.
2198
+ heap()->new_space()->MCResetRelocationInfo();
2199
+
2200
+ // Compute the forwarding pointers in each space.
2201
+ EncodeForwardingAddressesInPagedSpace<MCAllocateFromOldPointerSpace,
2202
+ ReportDeleteIfNeeded>(
2203
+ heap()->old_pointer_space());
2204
+
2205
+ EncodeForwardingAddressesInPagedSpace<MCAllocateFromOldDataSpace,
2206
+ IgnoreNonLiveObject>(
2207
+ heap()->old_data_space());
2208
+
2209
+ EncodeForwardingAddressesInPagedSpace<MCAllocateFromCodeSpace,
2210
+ ReportDeleteIfNeeded>(
2211
+ heap()->code_space());
2212
+
2213
+ EncodeForwardingAddressesInPagedSpace<MCAllocateFromCellSpace,
2214
+ IgnoreNonLiveObject>(
2215
+ heap()->cell_space());
2216
+
2217
+
2218
+ // Compute new space next to last after the old and code spaces have been
2219
+ // compacted. Objects in new space can be promoted to old or code space.
2220
+ EncodeForwardingAddressesInNewSpace();
2221
+
2222
+ // Compute map space last because computing forwarding addresses
2223
+ // overwrites non-live objects. Objects in the other spaces rely on
2224
+ // non-live map pointers to get the sizes of non-live objects.
2225
+ EncodeForwardingAddressesInPagedSpace<MCAllocateFromMapSpace,
2226
+ IgnoreNonLiveObject>(
2227
+ heap()->map_space());
2228
+
2229
+ // Write relocation info to the top page, so we can use it later. This is
2230
+ // done after promoting objects from the new space so we get the correct
2231
+ // allocation top.
2232
+ heap()->old_pointer_space()->MCWriteRelocationInfoToPage();
2233
+ heap()->old_data_space()->MCWriteRelocationInfoToPage();
2234
+ heap()->code_space()->MCWriteRelocationInfoToPage();
2235
+ heap()->map_space()->MCWriteRelocationInfoToPage();
2236
+ heap()->cell_space()->MCWriteRelocationInfoToPage();
2237
+ }
2238
+
2239
+
2240
+ class MapIterator : public HeapObjectIterator {
2241
+ public:
2242
+ explicit MapIterator(Heap* heap)
2243
+ : HeapObjectIterator(heap->map_space(), &SizeCallback) { }
2244
+
2245
+ MapIterator(Heap* heap, Address start)
2246
+ : HeapObjectIterator(heap->map_space(), start, &SizeCallback) { }
2247
+
2248
+ private:
2249
+ static int SizeCallback(HeapObject* unused) {
2250
+ USE(unused);
2251
+ return Map::kSize;
2252
+ }
2253
+ };
2254
+
2255
+
2256
+ class MapCompact {
2257
+ public:
2258
+ explicit MapCompact(Heap* heap, int live_maps)
2259
+ : heap_(heap),
2260
+ live_maps_(live_maps),
2261
+ to_evacuate_start_(heap->map_space()->TopAfterCompaction(live_maps)),
2262
+ vacant_map_it_(heap),
2263
+ map_to_evacuate_it_(heap, to_evacuate_start_),
2264
+ first_map_to_evacuate_(
2265
+ reinterpret_cast<Map*>(HeapObject::FromAddress(to_evacuate_start_))) {
2266
+ }
2267
+
2268
+ void CompactMaps() {
2269
+ // As we know the number of maps to evacuate beforehand,
2270
+ // we stop then there is no more vacant maps.
2271
+ for (Map* next_vacant_map = NextVacantMap();
2272
+ next_vacant_map;
2273
+ next_vacant_map = NextVacantMap()) {
2274
+ EvacuateMap(next_vacant_map, NextMapToEvacuate());
2275
+ }
2276
+
2277
+ #ifdef DEBUG
2278
+ CheckNoMapsToEvacuate();
2279
+ #endif
2280
+ }
2281
+
2282
+ void UpdateMapPointersInRoots() {
2283
+ MapUpdatingVisitor map_updating_visitor;
2284
+ heap()->IterateRoots(&map_updating_visitor, VISIT_ONLY_STRONG);
2285
+ heap()->isolate()->global_handles()->IterateWeakRoots(
2286
+ &map_updating_visitor);
2287
+ LiveObjectList::IterateElements(&map_updating_visitor);
2288
+ }
2289
+
2290
+ void UpdateMapPointersInPagedSpace(PagedSpace* space) {
2291
+ ASSERT(space != heap()->map_space());
2292
+
2293
+ PageIterator it(space, PageIterator::PAGES_IN_USE);
2294
+ while (it.has_next()) {
2295
+ Page* p = it.next();
2296
+ UpdateMapPointersInRange(heap(),
2297
+ p->ObjectAreaStart(),
2298
+ p->AllocationTop());
2299
+ }
2300
+ }
2301
+
2302
+ void UpdateMapPointersInNewSpace() {
2303
+ NewSpace* space = heap()->new_space();
2304
+ UpdateMapPointersInRange(heap(), space->bottom(), space->top());
2305
+ }
2306
+
2307
+ void UpdateMapPointersInLargeObjectSpace() {
2308
+ LargeObjectIterator it(heap()->lo_space());
2309
+ for (HeapObject* obj = it.next(); obj != NULL; obj = it.next())
2310
+ UpdateMapPointersInObject(heap(), obj);
2311
+ }
2312
+
2313
+ void Finish() {
2314
+ heap()->map_space()->FinishCompaction(to_evacuate_start_, live_maps_);
2315
+ }
2316
+
2317
+ inline Heap* heap() const { return heap_; }
2318
+
2319
+ private:
2320
+ Heap* heap_;
2321
+ int live_maps_;
2322
+ Address to_evacuate_start_;
2323
+ MapIterator vacant_map_it_;
2324
+ MapIterator map_to_evacuate_it_;
2325
+ Map* first_map_to_evacuate_;
2326
+
2327
+ // Helper class for updating map pointers in HeapObjects.
2328
+ class MapUpdatingVisitor: public ObjectVisitor {
2329
+ public:
2330
+ MapUpdatingVisitor() {}
2331
+
2332
+ void VisitPointer(Object** p) {
2333
+ UpdateMapPointer(p);
2334
+ }
2335
+
2336
+ void VisitPointers(Object** start, Object** end) {
2337
+ for (Object** p = start; p < end; p++) UpdateMapPointer(p);
2338
+ }
2339
+
2340
+ private:
2341
+ void UpdateMapPointer(Object** p) {
2342
+ if (!(*p)->IsHeapObject()) return;
2343
+ HeapObject* old_map = reinterpret_cast<HeapObject*>(*p);
2344
+
2345
+ // Moved maps are tagged with overflowed map word. They are the only
2346
+ // objects those map word is overflowed as marking is already complete.
2347
+ MapWord map_word = old_map->map_word();
2348
+ if (!map_word.IsOverflowed()) return;
2349
+
2350
+ *p = GetForwardedMap(map_word);
2351
+ }
2352
+ };
2353
+
2354
+ static Map* NextMap(MapIterator* it, HeapObject* last, bool live) {
2355
+ while (true) {
2356
+ HeapObject* next = it->next();
2357
+ ASSERT(next != NULL);
2358
+ if (next == last)
2359
+ return NULL;
2360
+ ASSERT(!next->IsOverflowed());
2361
+ ASSERT(!next->IsMarked());
2362
+ ASSERT(next->IsMap() || FreeListNode::IsFreeListNode(next));
2363
+ if (next->IsMap() == live)
2364
+ return reinterpret_cast<Map*>(next);
2365
+ }
2366
+ }
2367
+
2368
+ Map* NextVacantMap() {
2369
+ Map* map = NextMap(&vacant_map_it_, first_map_to_evacuate_, false);
2370
+ ASSERT(map == NULL || FreeListNode::IsFreeListNode(map));
2371
+ return map;
2372
+ }
2373
+
2374
+ Map* NextMapToEvacuate() {
2375
+ Map* map = NextMap(&map_to_evacuate_it_, NULL, true);
2376
+ ASSERT(map != NULL);
2377
+ ASSERT(map->IsMap());
2378
+ return map;
2379
+ }
2380
+
2381
+ static void EvacuateMap(Map* vacant_map, Map* map_to_evacuate) {
2382
+ ASSERT(FreeListNode::IsFreeListNode(vacant_map));
2383
+ ASSERT(map_to_evacuate->IsMap());
2384
+
2385
+ ASSERT(Map::kSize % 4 == 0);
2386
+
2387
+ map_to_evacuate->heap()->CopyBlockToOldSpaceAndUpdateRegionMarks(
2388
+ vacant_map->address(), map_to_evacuate->address(), Map::kSize);
2389
+
2390
+ ASSERT(vacant_map->IsMap()); // Due to memcpy above.
2391
+
2392
+ MapWord forwarding_map_word = MapWord::FromMap(vacant_map);
2393
+ forwarding_map_word.SetOverflow();
2394
+ map_to_evacuate->set_map_word(forwarding_map_word);
2395
+
2396
+ ASSERT(map_to_evacuate->map_word().IsOverflowed());
2397
+ ASSERT(GetForwardedMap(map_to_evacuate->map_word()) == vacant_map);
2398
+ }
2399
+
2400
+ static Map* GetForwardedMap(MapWord map_word) {
2401
+ ASSERT(map_word.IsOverflowed());
2402
+ map_word.ClearOverflow();
2403
+ Map* new_map = map_word.ToMap();
2404
+ ASSERT_MAP_ALIGNED(new_map->address());
2405
+ return new_map;
2406
+ }
2407
+
2408
+ static int UpdateMapPointersInObject(Heap* heap, HeapObject* obj) {
2409
+ ASSERT(!obj->IsMarked());
2410
+ Map* map = obj->map();
2411
+ ASSERT(heap->map_space()->Contains(map));
2412
+ MapWord map_word = map->map_word();
2413
+ ASSERT(!map_word.IsMarked());
2414
+ if (map_word.IsOverflowed()) {
2415
+ Map* new_map = GetForwardedMap(map_word);
2416
+ ASSERT(heap->map_space()->Contains(new_map));
2417
+ obj->set_map(new_map);
2418
+
2419
+ #ifdef DEBUG
2420
+ if (FLAG_gc_verbose) {
2421
+ PrintF("update %p : %p -> %p\n",
2422
+ obj->address(),
2423
+ reinterpret_cast<void*>(map),
2424
+ reinterpret_cast<void*>(new_map));
2425
+ }
2426
+ #endif
2427
+ }
2428
+
2429
+ int size = obj->SizeFromMap(map);
2430
+ MapUpdatingVisitor map_updating_visitor;
2431
+ obj->IterateBody(map->instance_type(), size, &map_updating_visitor);
2432
+ return size;
2433
+ }
2434
+
2435
+ static void UpdateMapPointersInRange(Heap* heap, Address start, Address end) {
2436
+ HeapObject* object;
2437
+ int size;
2438
+ for (Address current = start; current < end; current += size) {
2439
+ object = HeapObject::FromAddress(current);
2440
+ size = UpdateMapPointersInObject(heap, object);
2441
+ ASSERT(size > 0);
2442
+ }
2443
+ }
2444
+
2445
+ #ifdef DEBUG
2446
+ void CheckNoMapsToEvacuate() {
2447
+ if (!FLAG_enable_slow_asserts)
2448
+ return;
2449
+
2450
+ for (HeapObject* obj = map_to_evacuate_it_.next();
2451
+ obj != NULL; obj = map_to_evacuate_it_.next())
2452
+ ASSERT(FreeListNode::IsFreeListNode(obj));
2453
+ }
2454
+ #endif
2455
+ };
2456
+
2457
+
2458
+ void MarkCompactCollector::SweepSpaces() {
2459
+ GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP);
2460
+
2461
+ ASSERT(state_ == SWEEP_SPACES);
2462
+ ASSERT(!IsCompacting());
2463
+ // Noncompacting collections simply sweep the spaces to clear the mark
2464
+ // bits and free the nonlive blocks (for old and map spaces). We sweep
2465
+ // the map space last because freeing non-live maps overwrites them and
2466
+ // the other spaces rely on possibly non-live maps to get the sizes for
2467
+ // non-live objects.
2468
+ SweepSpace(heap(), heap()->old_pointer_space());
2469
+ SweepSpace(heap(), heap()->old_data_space());
2470
+ SweepSpace(heap(), heap()->code_space());
2471
+ SweepSpace(heap(), heap()->cell_space());
2472
+ { GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP_NEWSPACE);
2473
+ SweepNewSpace(heap(), heap()->new_space());
2474
+ }
2475
+ SweepSpace(heap(), heap()->map_space());
2476
+
2477
+ heap()->IterateDirtyRegions(heap()->map_space(),
2478
+ &heap()->IteratePointersInDirtyMapsRegion,
2479
+ &UpdatePointerToNewGen,
2480
+ heap()->WATERMARK_SHOULD_BE_VALID);
2481
+
2482
+ intptr_t live_maps_size = heap()->map_space()->Size();
2483
+ int live_maps = static_cast<int>(live_maps_size / Map::kSize);
2484
+ ASSERT(live_map_objects_size_ == live_maps_size);
2485
+
2486
+ if (heap()->map_space()->NeedsCompaction(live_maps)) {
2487
+ MapCompact map_compact(heap(), live_maps);
2488
+
2489
+ map_compact.CompactMaps();
2490
+ map_compact.UpdateMapPointersInRoots();
2491
+
2492
+ PagedSpaces spaces;
2493
+ for (PagedSpace* space = spaces.next();
2494
+ space != NULL; space = spaces.next()) {
2495
+ if (space == heap()->map_space()) continue;
2496
+ map_compact.UpdateMapPointersInPagedSpace(space);
2497
+ }
2498
+ map_compact.UpdateMapPointersInNewSpace();
2499
+ map_compact.UpdateMapPointersInLargeObjectSpace();
2500
+
2501
+ map_compact.Finish();
2502
+ }
2503
+ }
2504
+
2505
+
2506
+ // Iterate the live objects in a range of addresses (eg, a page or a
2507
+ // semispace). The live regions of the range have been linked into a list.
2508
+ // The first live region is [first_live_start, first_live_end), and the last
2509
+ // address in the range is top. The callback function is used to get the
2510
+ // size of each live object.
2511
+ int MarkCompactCollector::IterateLiveObjectsInRange(
2512
+ Address start,
2513
+ Address end,
2514
+ LiveObjectCallback size_func) {
2515
+ int live_objects_size = 0;
2516
+ Address current = start;
2517
+ while (current < end) {
2518
+ uint32_t encoded_map = Memory::uint32_at(current);
2519
+ if (encoded_map == kSingleFreeEncoding) {
2520
+ current += kPointerSize;
2521
+ } else if (encoded_map == kMultiFreeEncoding) {
2522
+ current += Memory::int_at(current + kIntSize);
2523
+ } else {
2524
+ int size = (this->*size_func)(HeapObject::FromAddress(current));
2525
+ current += size;
2526
+ live_objects_size += size;
2527
+ }
2528
+ }
2529
+ return live_objects_size;
2530
+ }
2531
+
2532
+
2533
+ int MarkCompactCollector::IterateLiveObjects(
2534
+ NewSpace* space, LiveObjectCallback size_f) {
2535
+ ASSERT(MARK_LIVE_OBJECTS < state_ && state_ <= RELOCATE_OBJECTS);
2536
+ return IterateLiveObjectsInRange(space->bottom(), space->top(), size_f);
2537
+ }
2538
+
2539
+
2540
+ int MarkCompactCollector::IterateLiveObjects(
2541
+ PagedSpace* space, LiveObjectCallback size_f) {
2542
+ ASSERT(MARK_LIVE_OBJECTS < state_ && state_ <= RELOCATE_OBJECTS);
2543
+ int total = 0;
2544
+ PageIterator it(space, PageIterator::PAGES_IN_USE);
2545
+ while (it.has_next()) {
2546
+ Page* p = it.next();
2547
+ total += IterateLiveObjectsInRange(p->ObjectAreaStart(),
2548
+ p->AllocationTop(),
2549
+ size_f);
2550
+ }
2551
+ return total;
2552
+ }
2553
+
2554
+
2555
+ // -------------------------------------------------------------------------
2556
+ // Phase 3: Update pointers
2557
+
2558
+ // Helper class for updating pointers in HeapObjects.
2559
+ class UpdatingVisitor: public ObjectVisitor {
2560
+ public:
2561
+ explicit UpdatingVisitor(Heap* heap) : heap_(heap) {}
2562
+
2563
+ void VisitPointer(Object** p) {
2564
+ UpdatePointer(p);
2565
+ }
2566
+
2567
+ void VisitPointers(Object** start, Object** end) {
2568
+ // Mark all HeapObject pointers in [start, end)
2569
+ for (Object** p = start; p < end; p++) UpdatePointer(p);
2570
+ }
2571
+
2572
+ void VisitCodeTarget(RelocInfo* rinfo) {
2573
+ ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
2574
+ Object* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
2575
+ VisitPointer(&target);
2576
+ rinfo->set_target_address(
2577
+ reinterpret_cast<Code*>(target)->instruction_start());
2578
+ }
2579
+
2580
+ void VisitDebugTarget(RelocInfo* rinfo) {
2581
+ ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) &&
2582
+ rinfo->IsPatchedReturnSequence()) ||
2583
+ (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
2584
+ rinfo->IsPatchedDebugBreakSlotSequence()));
2585
+ Object* target = Code::GetCodeFromTargetAddress(rinfo->call_address());
2586
+ VisitPointer(&target);
2587
+ rinfo->set_call_address(
2588
+ reinterpret_cast<Code*>(target)->instruction_start());
2589
+ }
2590
+
2591
+ inline Heap* heap() const { return heap_; }
2592
+
2593
+ private:
2594
+ void UpdatePointer(Object** p) {
2595
+ if (!(*p)->IsHeapObject()) return;
2596
+
2597
+ HeapObject* obj = HeapObject::cast(*p);
2598
+ Address old_addr = obj->address();
2599
+ Address new_addr;
2600
+ ASSERT(!heap()->InFromSpace(obj));
2601
+
2602
+ if (heap()->new_space()->Contains(obj)) {
2603
+ Address forwarding_pointer_addr =
2604
+ heap()->new_space()->FromSpaceLow() +
2605
+ heap()->new_space()->ToSpaceOffsetForAddress(old_addr);
2606
+ new_addr = Memory::Address_at(forwarding_pointer_addr);
2607
+
2608
+ #ifdef DEBUG
2609
+ ASSERT(heap()->old_pointer_space()->Contains(new_addr) ||
2610
+ heap()->old_data_space()->Contains(new_addr) ||
2611
+ heap()->new_space()->FromSpaceContains(new_addr) ||
2612
+ heap()->lo_space()->Contains(HeapObject::FromAddress(new_addr)));
2613
+
2614
+ if (heap()->new_space()->FromSpaceContains(new_addr)) {
2615
+ ASSERT(heap()->new_space()->FromSpaceOffsetForAddress(new_addr) <=
2616
+ heap()->new_space()->ToSpaceOffsetForAddress(old_addr));
2617
+ }
2618
+ #endif
2619
+
2620
+ } else if (heap()->lo_space()->Contains(obj)) {
2621
+ // Don't move objects in the large object space.
2622
+ return;
2623
+
2624
+ } else {
2625
+ #ifdef DEBUG
2626
+ PagedSpaces spaces;
2627
+ PagedSpace* original_space = spaces.next();
2628
+ while (original_space != NULL) {
2629
+ if (original_space->Contains(obj)) break;
2630
+ original_space = spaces.next();
2631
+ }
2632
+ ASSERT(original_space != NULL);
2633
+ #endif
2634
+ new_addr = MarkCompactCollector::GetForwardingAddressInOldSpace(obj);
2635
+ ASSERT(original_space->Contains(new_addr));
2636
+ ASSERT(original_space->MCSpaceOffsetForAddress(new_addr) <=
2637
+ original_space->MCSpaceOffsetForAddress(old_addr));
2638
+ }
2639
+
2640
+ *p = HeapObject::FromAddress(new_addr);
2641
+
2642
+ #ifdef DEBUG
2643
+ if (FLAG_gc_verbose) {
2644
+ PrintF("update %p : %p -> %p\n",
2645
+ reinterpret_cast<Address>(p), old_addr, new_addr);
2646
+ }
2647
+ #endif
2648
+ }
2649
+
2650
+ Heap* heap_;
2651
+ };
2652
+
2653
+
2654
+ void MarkCompactCollector::UpdatePointers() {
2655
+ #ifdef DEBUG
2656
+ ASSERT(state_ == ENCODE_FORWARDING_ADDRESSES);
2657
+ state_ = UPDATE_POINTERS;
2658
+ #endif
2659
+ UpdatingVisitor updating_visitor(heap());
2660
+ heap()->isolate()->runtime_profiler()->UpdateSamplesAfterCompact(
2661
+ &updating_visitor);
2662
+ heap()->IterateRoots(&updating_visitor, VISIT_ONLY_STRONG);
2663
+ heap()->isolate()->global_handles()->IterateWeakRoots(&updating_visitor);
2664
+
2665
+ // Update the pointer to the head of the weak list of global contexts.
2666
+ updating_visitor.VisitPointer(&heap()->global_contexts_list_);
2667
+
2668
+ LiveObjectList::IterateElements(&updating_visitor);
2669
+
2670
+ int live_maps_size = IterateLiveObjects(
2671
+ heap()->map_space(), &MarkCompactCollector::UpdatePointersInOldObject);
2672
+ int live_pointer_olds_size = IterateLiveObjects(
2673
+ heap()->old_pointer_space(),
2674
+ &MarkCompactCollector::UpdatePointersInOldObject);
2675
+ int live_data_olds_size = IterateLiveObjects(
2676
+ heap()->old_data_space(),
2677
+ &MarkCompactCollector::UpdatePointersInOldObject);
2678
+ int live_codes_size = IterateLiveObjects(
2679
+ heap()->code_space(), &MarkCompactCollector::UpdatePointersInOldObject);
2680
+ int live_cells_size = IterateLiveObjects(
2681
+ heap()->cell_space(), &MarkCompactCollector::UpdatePointersInOldObject);
2682
+ int live_news_size = IterateLiveObjects(
2683
+ heap()->new_space(), &MarkCompactCollector::UpdatePointersInNewObject);
2684
+
2685
+ // Large objects do not move, the map word can be updated directly.
2686
+ LargeObjectIterator it(heap()->lo_space());
2687
+ for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) {
2688
+ UpdatePointersInNewObject(obj);
2689
+ }
2690
+
2691
+ USE(live_maps_size);
2692
+ USE(live_pointer_olds_size);
2693
+ USE(live_data_olds_size);
2694
+ USE(live_codes_size);
2695
+ USE(live_cells_size);
2696
+ USE(live_news_size);
2697
+ ASSERT(live_maps_size == live_map_objects_size_);
2698
+ ASSERT(live_data_olds_size == live_old_data_objects_size_);
2699
+ ASSERT(live_pointer_olds_size == live_old_pointer_objects_size_);
2700
+ ASSERT(live_codes_size == live_code_objects_size_);
2701
+ ASSERT(live_cells_size == live_cell_objects_size_);
2702
+ ASSERT(live_news_size == live_young_objects_size_);
2703
+ }
2704
+
2705
+
2706
+ int MarkCompactCollector::UpdatePointersInNewObject(HeapObject* obj) {
2707
+ // Keep old map pointers
2708
+ Map* old_map = obj->map();
2709
+ ASSERT(old_map->IsHeapObject());
2710
+
2711
+ Address forwarded = GetForwardingAddressInOldSpace(old_map);
2712
+
2713
+ ASSERT(heap()->map_space()->Contains(old_map));
2714
+ ASSERT(heap()->map_space()->Contains(forwarded));
2715
+ #ifdef DEBUG
2716
+ if (FLAG_gc_verbose) {
2717
+ PrintF("update %p : %p -> %p\n", obj->address(), old_map->address(),
2718
+ forwarded);
2719
+ }
2720
+ #endif
2721
+ // Update the map pointer.
2722
+ obj->set_map(reinterpret_cast<Map*>(HeapObject::FromAddress(forwarded)));
2723
+
2724
+ // We have to compute the object size relying on the old map because
2725
+ // map objects are not relocated yet.
2726
+ int obj_size = obj->SizeFromMap(old_map);
2727
+
2728
+ // Update pointers in the object body.
2729
+ UpdatingVisitor updating_visitor(heap());
2730
+ obj->IterateBody(old_map->instance_type(), obj_size, &updating_visitor);
2731
+ return obj_size;
2732
+ }
2733
+
2734
+
2735
+ int MarkCompactCollector::UpdatePointersInOldObject(HeapObject* obj) {
2736
+ // Decode the map pointer.
2737
+ MapWord encoding = obj->map_word();
2738
+ Address map_addr = encoding.DecodeMapAddress(heap()->map_space());
2739
+ ASSERT(heap()->map_space()->Contains(HeapObject::FromAddress(map_addr)));
2740
+
2741
+ // At this point, the first word of map_addr is also encoded, cannot
2742
+ // cast it to Map* using Map::cast.
2743
+ Map* map = reinterpret_cast<Map*>(HeapObject::FromAddress(map_addr));
2744
+ int obj_size = obj->SizeFromMap(map);
2745
+ InstanceType type = map->instance_type();
2746
+
2747
+ // Update map pointer.
2748
+ Address new_map_addr = GetForwardingAddressInOldSpace(map);
2749
+ int offset = encoding.DecodeOffset();
2750
+ obj->set_map_word(MapWord::EncodeAddress(new_map_addr, offset));
2751
+
2752
+ #ifdef DEBUG
2753
+ if (FLAG_gc_verbose) {
2754
+ PrintF("update %p : %p -> %p\n", obj->address(),
2755
+ map_addr, new_map_addr);
2756
+ }
2757
+ #endif
2758
+
2759
+ // Update pointers in the object body.
2760
+ UpdatingVisitor updating_visitor(heap());
2761
+ obj->IterateBody(type, obj_size, &updating_visitor);
2762
+ return obj_size;
2763
+ }
2764
+
2765
+
2766
+ Address MarkCompactCollector::GetForwardingAddressInOldSpace(HeapObject* obj) {
2767
+ // Object should either in old or map space.
2768
+ MapWord encoding = obj->map_word();
2769
+
2770
+ // Offset to the first live object's forwarding address.
2771
+ int offset = encoding.DecodeOffset();
2772
+ Address obj_addr = obj->address();
2773
+
2774
+ // Find the first live object's forwarding address.
2775
+ Page* p = Page::FromAddress(obj_addr);
2776
+ Address first_forwarded = p->mc_first_forwarded;
2777
+
2778
+ // Page start address of forwarded address.
2779
+ Page* forwarded_page = Page::FromAddress(first_forwarded);
2780
+ int forwarded_offset = forwarded_page->Offset(first_forwarded);
2781
+
2782
+ // Find end of allocation in the page of first_forwarded.
2783
+ int mc_top_offset = forwarded_page->AllocationWatermarkOffset();
2784
+
2785
+ // Check if current object's forward pointer is in the same page
2786
+ // as the first live object's forwarding pointer
2787
+ if (forwarded_offset + offset < mc_top_offset) {
2788
+ // In the same page.
2789
+ return first_forwarded + offset;
2790
+ }
2791
+
2792
+ // Must be in the next page, NOTE: this may cross chunks.
2793
+ Page* next_page = forwarded_page->next_page();
2794
+ ASSERT(next_page->is_valid());
2795
+
2796
+ offset -= (mc_top_offset - forwarded_offset);
2797
+ offset += Page::kObjectStartOffset;
2798
+
2799
+ ASSERT_PAGE_OFFSET(offset);
2800
+ ASSERT(next_page->OffsetToAddress(offset) < next_page->AllocationTop());
2801
+
2802
+ return next_page->OffsetToAddress(offset);
2803
+ }
2804
+
2805
+
2806
+ // -------------------------------------------------------------------------
2807
+ // Phase 4: Relocate objects
2808
+
2809
+ void MarkCompactCollector::RelocateObjects() {
2810
+ #ifdef DEBUG
2811
+ ASSERT(state_ == UPDATE_POINTERS);
2812
+ state_ = RELOCATE_OBJECTS;
2813
+ #endif
2814
+ // Relocates objects, always relocate map objects first. Relocating
2815
+ // objects in other space relies on map objects to get object size.
2816
+ int live_maps_size = IterateLiveObjects(
2817
+ heap()->map_space(), &MarkCompactCollector::RelocateMapObject);
2818
+ int live_pointer_olds_size = IterateLiveObjects(
2819
+ heap()->old_pointer_space(),
2820
+ &MarkCompactCollector::RelocateOldPointerObject);
2821
+ int live_data_olds_size = IterateLiveObjects(
2822
+ heap()->old_data_space(), &MarkCompactCollector::RelocateOldDataObject);
2823
+ int live_codes_size = IterateLiveObjects(
2824
+ heap()->code_space(), &MarkCompactCollector::RelocateCodeObject);
2825
+ int live_cells_size = IterateLiveObjects(
2826
+ heap()->cell_space(), &MarkCompactCollector::RelocateCellObject);
2827
+ int live_news_size = IterateLiveObjects(
2828
+ heap()->new_space(), &MarkCompactCollector::RelocateNewObject);
2829
+
2830
+ USE(live_maps_size);
2831
+ USE(live_pointer_olds_size);
2832
+ USE(live_data_olds_size);
2833
+ USE(live_codes_size);
2834
+ USE(live_cells_size);
2835
+ USE(live_news_size);
2836
+ ASSERT(live_maps_size == live_map_objects_size_);
2837
+ ASSERT(live_data_olds_size == live_old_data_objects_size_);
2838
+ ASSERT(live_pointer_olds_size == live_old_pointer_objects_size_);
2839
+ ASSERT(live_codes_size == live_code_objects_size_);
2840
+ ASSERT(live_cells_size == live_cell_objects_size_);
2841
+ ASSERT(live_news_size == live_young_objects_size_);
2842
+
2843
+ // Flip from and to spaces
2844
+ heap()->new_space()->Flip();
2845
+
2846
+ heap()->new_space()->MCCommitRelocationInfo();
2847
+
2848
+ // Set age_mark to bottom in to space
2849
+ Address mark = heap()->new_space()->bottom();
2850
+ heap()->new_space()->set_age_mark(mark);
2851
+
2852
+ PagedSpaces spaces;
2853
+ for (PagedSpace* space = spaces.next(); space != NULL; space = spaces.next())
2854
+ space->MCCommitRelocationInfo();
2855
+
2856
+ heap()->CheckNewSpaceExpansionCriteria();
2857
+ heap()->IncrementYoungSurvivorsCounter(live_news_size);
2858
+ }
2859
+
2860
+
2861
+ int MarkCompactCollector::RelocateMapObject(HeapObject* obj) {
2862
+ // Recover map pointer.
2863
+ MapWord encoding = obj->map_word();
2864
+ Address map_addr = encoding.DecodeMapAddress(heap()->map_space());
2865
+ ASSERT(heap()->map_space()->Contains(HeapObject::FromAddress(map_addr)));
2866
+
2867
+ // Get forwarding address before resetting map pointer
2868
+ Address new_addr = GetForwardingAddressInOldSpace(obj);
2869
+
2870
+ // Reset map pointer. The meta map object may not be copied yet so
2871
+ // Map::cast does not yet work.
2872
+ obj->set_map(reinterpret_cast<Map*>(HeapObject::FromAddress(map_addr)));
2873
+
2874
+ Address old_addr = obj->address();
2875
+
2876
+ if (new_addr != old_addr) {
2877
+ // Move contents.
2878
+ heap()->MoveBlockToOldSpaceAndUpdateRegionMarks(new_addr,
2879
+ old_addr,
2880
+ Map::kSize);
2881
+ }
2882
+
2883
+ #ifdef DEBUG
2884
+ if (FLAG_gc_verbose) {
2885
+ PrintF("relocate %p -> %p\n", old_addr, new_addr);
2886
+ }
2887
+ #endif
2888
+
2889
+ return Map::kSize;
2890
+ }
2891
+
2892
+
2893
+ static inline int RestoreMap(HeapObject* obj,
2894
+ PagedSpace* space,
2895
+ Address new_addr,
2896
+ Address map_addr) {
2897
+ // This must be a non-map object, and the function relies on the
2898
+ // assumption that the Map space is compacted before the other paged
2899
+ // spaces (see RelocateObjects).
2900
+
2901
+ // Reset map pointer.
2902
+ obj->set_map(Map::cast(HeapObject::FromAddress(map_addr)));
2903
+
2904
+ int obj_size = obj->Size();
2905
+ ASSERT_OBJECT_SIZE(obj_size);
2906
+
2907
+ ASSERT(space->MCSpaceOffsetForAddress(new_addr) <=
2908
+ space->MCSpaceOffsetForAddress(obj->address()));
2909
+
2910
+ #ifdef DEBUG
2911
+ if (FLAG_gc_verbose) {
2912
+ PrintF("relocate %p -> %p\n", obj->address(), new_addr);
2913
+ }
2914
+ #endif
2915
+
2916
+ return obj_size;
2917
+ }
2918
+
2919
+
2920
+ int MarkCompactCollector::RelocateOldNonCodeObject(HeapObject* obj,
2921
+ PagedSpace* space) {
2922
+ // Recover map pointer.
2923
+ MapWord encoding = obj->map_word();
2924
+ Address map_addr = encoding.DecodeMapAddress(heap()->map_space());
2925
+ ASSERT(heap()->map_space()->Contains(map_addr));
2926
+
2927
+ // Get forwarding address before resetting map pointer.
2928
+ Address new_addr = GetForwardingAddressInOldSpace(obj);
2929
+
2930
+ // Reset the map pointer.
2931
+ int obj_size = RestoreMap(obj, space, new_addr, map_addr);
2932
+
2933
+ Address old_addr = obj->address();
2934
+
2935
+ if (new_addr != old_addr) {
2936
+ // Move contents.
2937
+ if (space == heap()->old_data_space()) {
2938
+ heap()->MoveBlock(new_addr, old_addr, obj_size);
2939
+ } else {
2940
+ heap()->MoveBlockToOldSpaceAndUpdateRegionMarks(new_addr,
2941
+ old_addr,
2942
+ obj_size);
2943
+ }
2944
+ }
2945
+
2946
+ ASSERT(!HeapObject::FromAddress(new_addr)->IsCode());
2947
+
2948
+ HeapObject* copied_to = HeapObject::FromAddress(new_addr);
2949
+ if (copied_to->IsSharedFunctionInfo()) {
2950
+ PROFILE(heap()->isolate(),
2951
+ SharedFunctionInfoMoveEvent(old_addr, new_addr));
2952
+ }
2953
+ HEAP_PROFILE(heap(), ObjectMoveEvent(old_addr, new_addr));
2954
+
2955
+ return obj_size;
2956
+ }
2957
+
2958
+
2959
+ int MarkCompactCollector::RelocateOldPointerObject(HeapObject* obj) {
2960
+ return RelocateOldNonCodeObject(obj, heap()->old_pointer_space());
2961
+ }
2962
+
2963
+
2964
+ int MarkCompactCollector::RelocateOldDataObject(HeapObject* obj) {
2965
+ return RelocateOldNonCodeObject(obj, heap()->old_data_space());
2966
+ }
2967
+
2968
+
2969
+ int MarkCompactCollector::RelocateCellObject(HeapObject* obj) {
2970
+ return RelocateOldNonCodeObject(obj, heap()->cell_space());
2971
+ }
2972
+
2973
+
2974
+ int MarkCompactCollector::RelocateCodeObject(HeapObject* obj) {
2975
+ // Recover map pointer.
2976
+ MapWord encoding = obj->map_word();
2977
+ Address map_addr = encoding.DecodeMapAddress(heap()->map_space());
2978
+ ASSERT(heap()->map_space()->Contains(HeapObject::FromAddress(map_addr)));
2979
+
2980
+ // Get forwarding address before resetting map pointer
2981
+ Address new_addr = GetForwardingAddressInOldSpace(obj);
2982
+
2983
+ // Reset the map pointer.
2984
+ int obj_size = RestoreMap(obj, heap()->code_space(), new_addr, map_addr);
2985
+
2986
+ Address old_addr = obj->address();
2987
+
2988
+ if (new_addr != old_addr) {
2989
+ // Move contents.
2990
+ heap()->MoveBlock(new_addr, old_addr, obj_size);
2991
+ }
2992
+
2993
+ HeapObject* copied_to = HeapObject::FromAddress(new_addr);
2994
+ if (copied_to->IsCode()) {
2995
+ // May also update inline cache target.
2996
+ Code::cast(copied_to)->Relocate(new_addr - old_addr);
2997
+ // Notify the logger that compiled code has moved.
2998
+ PROFILE(heap()->isolate(), CodeMoveEvent(old_addr, new_addr));
2999
+ }
3000
+ HEAP_PROFILE(heap(), ObjectMoveEvent(old_addr, new_addr));
3001
+
3002
+ return obj_size;
3003
+ }
3004
+
3005
+
3006
+ int MarkCompactCollector::RelocateNewObject(HeapObject* obj) {
3007
+ int obj_size = obj->Size();
3008
+
3009
+ // Get forwarding address
3010
+ Address old_addr = obj->address();
3011
+ int offset = heap()->new_space()->ToSpaceOffsetForAddress(old_addr);
3012
+
3013
+ Address new_addr =
3014
+ Memory::Address_at(heap()->new_space()->FromSpaceLow() + offset);
3015
+
3016
+ #ifdef DEBUG
3017
+ if (heap()->new_space()->FromSpaceContains(new_addr)) {
3018
+ ASSERT(heap()->new_space()->FromSpaceOffsetForAddress(new_addr) <=
3019
+ heap()->new_space()->ToSpaceOffsetForAddress(old_addr));
3020
+ } else {
3021
+ ASSERT(heap()->TargetSpace(obj) == heap()->old_pointer_space() ||
3022
+ heap()->TargetSpace(obj) == heap()->old_data_space());
3023
+ }
3024
+ #endif
3025
+
3026
+ // New and old addresses cannot overlap.
3027
+ if (heap()->InNewSpace(HeapObject::FromAddress(new_addr))) {
3028
+ heap()->CopyBlock(new_addr, old_addr, obj_size);
3029
+ } else {
3030
+ heap()->CopyBlockToOldSpaceAndUpdateRegionMarks(new_addr,
3031
+ old_addr,
3032
+ obj_size);
3033
+ }
3034
+
3035
+ #ifdef DEBUG
3036
+ if (FLAG_gc_verbose) {
3037
+ PrintF("relocate %p -> %p\n", old_addr, new_addr);
3038
+ }
3039
+ #endif
3040
+
3041
+ HeapObject* copied_to = HeapObject::FromAddress(new_addr);
3042
+ if (copied_to->IsSharedFunctionInfo()) {
3043
+ PROFILE(heap()->isolate(),
3044
+ SharedFunctionInfoMoveEvent(old_addr, new_addr));
3045
+ }
3046
+ HEAP_PROFILE(heap(), ObjectMoveEvent(old_addr, new_addr));
3047
+
3048
+ return obj_size;
3049
+ }
3050
+
3051
+
3052
+ void MarkCompactCollector::EnableCodeFlushing(bool enable) {
3053
+ if (enable) {
3054
+ if (code_flusher_ != NULL) return;
3055
+ code_flusher_ = new CodeFlusher(heap()->isolate());
3056
+ } else {
3057
+ if (code_flusher_ == NULL) return;
3058
+ delete code_flusher_;
3059
+ code_flusher_ = NULL;
3060
+ }
3061
+ }
3062
+
3063
+
3064
+ void MarkCompactCollector::ReportDeleteIfNeeded(HeapObject* obj,
3065
+ Isolate* isolate) {
3066
+ #ifdef ENABLE_GDB_JIT_INTERFACE
3067
+ if (obj->IsCode()) {
3068
+ GDBJITInterface::RemoveCode(reinterpret_cast<Code*>(obj));
3069
+ }
3070
+ #endif
3071
+ #ifdef ENABLE_LOGGING_AND_PROFILING
3072
+ if (obj->IsCode()) {
3073
+ PROFILE(isolate, CodeDeleteEvent(obj->address()));
3074
+ }
3075
+ #endif
3076
+ }
3077
+
3078
+
3079
+ int MarkCompactCollector::SizeOfMarkedObject(HeapObject* obj) {
3080
+ MapWord map_word = obj->map_word();
3081
+ map_word.ClearMark();
3082
+ return obj->SizeFromMap(map_word.ToMap());
3083
+ }
3084
+
3085
+
3086
+ void MarkCompactCollector::Initialize() {
3087
+ StaticPointersToNewGenUpdatingVisitor::Initialize();
3088
+ StaticMarkingVisitor::Initialize();
3089
+ }
3090
+
3091
+
3092
+ } } // namespace v8::internal