therubyracer 0.9.0beta2 → 0.9.0beta3

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of therubyracer might be problematic. Click here for more details.

Files changed (920) hide show
  1. data/.gitmodules +3 -0
  2. data/ext/v8/upstream/Makefile +1 -2
  3. data/ext/v8/upstream/v8/.gitignore +33 -0
  4. data/ext/v8/upstream/v8/AUTHORS +42 -0
  5. data/ext/v8/upstream/v8/ChangeLog +2663 -0
  6. data/ext/v8/upstream/{3.1.8 → v8}/LICENSE +0 -0
  7. data/ext/v8/upstream/{3.1.8 → v8}/LICENSE.strongtalk +0 -0
  8. data/ext/v8/upstream/{3.1.8 → v8}/LICENSE.v8 +0 -0
  9. data/ext/v8/upstream/{3.1.8 → v8}/LICENSE.valgrind +0 -0
  10. data/ext/v8/upstream/v8/SConstruct +1473 -0
  11. data/ext/v8/upstream/{3.1.8 → v8}/build/README.txt +0 -0
  12. data/ext/v8/upstream/{3.1.8 → v8}/build/all.gyp +0 -0
  13. data/ext/v8/upstream/{3.1.8 → v8}/build/armu.gypi +0 -0
  14. data/ext/v8/upstream/{3.1.8 → v8}/build/common.gypi +0 -0
  15. data/ext/v8/upstream/{3.1.8 → v8}/build/gyp_v8 +0 -0
  16. data/ext/v8/upstream/v8/include/v8-debug.h +394 -0
  17. data/ext/v8/upstream/v8/include/v8-preparser.h +116 -0
  18. data/ext/v8/upstream/v8/include/v8-profiler.h +505 -0
  19. data/ext/v8/upstream/v8/include/v8-testing.h +104 -0
  20. data/ext/v8/upstream/v8/include/v8.h +4000 -0
  21. data/ext/v8/upstream/{3.1.8 → v8}/include/v8stdint.h +0 -0
  22. data/ext/v8/upstream/v8/preparser/SConscript +38 -0
  23. data/ext/v8/upstream/v8/preparser/preparser-process.cc +169 -0
  24. data/ext/v8/upstream/v8/src/SConscript +380 -0
  25. data/ext/v8/upstream/v8/src/accessors.cc +766 -0
  26. data/ext/v8/upstream/{3.1.8 → v8}/src/accessors.h +0 -0
  27. data/ext/v8/upstream/v8/src/allocation-inl.h +49 -0
  28. data/ext/v8/upstream/v8/src/allocation.cc +122 -0
  29. data/ext/v8/upstream/v8/src/allocation.h +143 -0
  30. data/ext/v8/upstream/v8/src/api.cc +5678 -0
  31. data/ext/v8/upstream/v8/src/api.h +572 -0
  32. data/ext/v8/upstream/{3.1.8 → v8}/src/apinatives.js +0 -0
  33. data/ext/v8/upstream/v8/src/apiutils.h +73 -0
  34. data/ext/v8/upstream/v8/src/arguments.h +116 -0
  35. data/ext/v8/upstream/v8/src/arm/assembler-arm-inl.h +353 -0
  36. data/ext/v8/upstream/v8/src/arm/assembler-arm.cc +2877 -0
  37. data/ext/v8/upstream/v8/src/arm/assembler-arm.h +1382 -0
  38. data/ext/v8/upstream/v8/src/arm/builtins-arm.cc +1634 -0
  39. data/ext/v8/upstream/v8/src/arm/code-stubs-arm.cc +6917 -0
  40. data/ext/v8/upstream/v8/src/arm/code-stubs-arm.h +623 -0
  41. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/codegen-arm-inl.h +0 -0
  42. data/ext/v8/upstream/v8/src/arm/codegen-arm.cc +7437 -0
  43. data/ext/v8/upstream/v8/src/arm/codegen-arm.h +595 -0
  44. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/constants-arm.cc +0 -0
  45. data/ext/v8/upstream/v8/src/arm/constants-arm.h +778 -0
  46. data/ext/v8/upstream/v8/src/arm/cpu-arm.cc +149 -0
  47. data/ext/v8/upstream/v8/src/arm/debug-arm.cc +317 -0
  48. data/ext/v8/upstream/v8/src/arm/deoptimizer-arm.cc +737 -0
  49. data/ext/v8/upstream/v8/src/arm/disasm-arm.cc +1503 -0
  50. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/frames-arm.cc +0 -0
  51. data/ext/v8/upstream/v8/src/arm/frames-arm.h +168 -0
  52. data/ext/v8/upstream/v8/src/arm/full-codegen-arm.cc +4374 -0
  53. data/ext/v8/upstream/v8/src/arm/ic-arm.cc +1793 -0
  54. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/jump-target-arm.cc +0 -0
  55. data/ext/v8/upstream/v8/src/arm/lithium-arm.cc +2120 -0
  56. data/ext/v8/upstream/v8/src/arm/lithium-arm.h +2179 -0
  57. data/ext/v8/upstream/v8/src/arm/lithium-codegen-arm.cc +4132 -0
  58. data/ext/v8/upstream/v8/src/arm/lithium-codegen-arm.h +329 -0
  59. data/ext/v8/upstream/v8/src/arm/lithium-gap-resolver-arm.cc +305 -0
  60. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/lithium-gap-resolver-arm.h +0 -0
  61. data/ext/v8/upstream/v8/src/arm/macro-assembler-arm.cc +2939 -0
  62. data/ext/v8/upstream/v8/src/arm/macro-assembler-arm.h +1071 -0
  63. data/ext/v8/upstream/v8/src/arm/regexp-macro-assembler-arm.cc +1287 -0
  64. data/ext/v8/upstream/v8/src/arm/regexp-macro-assembler-arm.h +253 -0
  65. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/register-allocator-arm-inl.h +0 -0
  66. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/register-allocator-arm.cc +0 -0
  67. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/register-allocator-arm.h +0 -0
  68. data/ext/v8/upstream/v8/src/arm/simulator-arm.cc +3288 -0
  69. data/ext/v8/upstream/v8/src/arm/simulator-arm.h +413 -0
  70. data/ext/v8/upstream/v8/src/arm/stub-cache-arm.cc +4034 -0
  71. data/ext/v8/upstream/{3.1.8 → v8}/src/arm/virtual-frame-arm-inl.h +0 -0
  72. data/ext/v8/upstream/v8/src/arm/virtual-frame-arm.cc +843 -0
  73. data/ext/v8/upstream/v8/src/arm/virtual-frame-arm.h +523 -0
  74. data/ext/v8/upstream/v8/src/array.js +1249 -0
  75. data/ext/v8/upstream/v8/src/assembler.cc +1067 -0
  76. data/ext/v8/upstream/v8/src/assembler.h +823 -0
  77. data/ext/v8/upstream/v8/src/ast-inl.h +112 -0
  78. data/ext/v8/upstream/v8/src/ast.cc +1078 -0
  79. data/ext/v8/upstream/v8/src/ast.h +2234 -0
  80. data/ext/v8/upstream/v8/src/atomicops.h +167 -0
  81. data/ext/v8/upstream/{3.1.8 → v8}/src/atomicops_internals_arm_gcc.h +0 -0
  82. data/ext/v8/upstream/v8/src/atomicops_internals_mips_gcc.h +169 -0
  83. data/ext/v8/upstream/{3.1.8 → v8}/src/atomicops_internals_x86_gcc.cc +0 -0
  84. data/ext/v8/upstream/{3.1.8 → v8}/src/atomicops_internals_x86_gcc.h +0 -0
  85. data/ext/v8/upstream/{3.1.8 → v8}/src/atomicops_internals_x86_macosx.h +0 -0
  86. data/ext/v8/upstream/{3.1.8 → v8}/src/atomicops_internals_x86_msvc.h +0 -0
  87. data/ext/v8/upstream/{3.1.8 → v8}/src/bignum-dtoa.cc +0 -0
  88. data/ext/v8/upstream/{3.1.8 → v8}/src/bignum-dtoa.h +0 -0
  89. data/ext/v8/upstream/{3.1.8 → v8}/src/bignum.cc +0 -0
  90. data/ext/v8/upstream/{3.1.8 → v8}/src/bignum.h +0 -0
  91. data/ext/v8/upstream/v8/src/bootstrapper.cc +2138 -0
  92. data/ext/v8/upstream/v8/src/bootstrapper.h +185 -0
  93. data/ext/v8/upstream/v8/src/builtins.cc +1708 -0
  94. data/ext/v8/upstream/v8/src/builtins.h +368 -0
  95. data/ext/v8/upstream/{3.1.8 → v8}/src/bytecodes-irregexp.h +0 -0
  96. data/ext/v8/upstream/{3.1.8 → v8}/src/cached-powers.cc +0 -0
  97. data/ext/v8/upstream/{3.1.8 → v8}/src/cached-powers.h +0 -0
  98. data/ext/v8/upstream/{3.1.8 → v8}/src/char-predicates-inl.h +0 -0
  99. data/ext/v8/upstream/{3.1.8 → v8}/src/char-predicates.h +0 -0
  100. data/ext/v8/upstream/v8/src/checks.cc +110 -0
  101. data/ext/v8/upstream/v8/src/checks.h +296 -0
  102. data/ext/v8/upstream/{3.1.8 → v8}/src/circular-queue-inl.h +0 -0
  103. data/ext/v8/upstream/{3.1.8 → v8}/src/circular-queue.cc +0 -0
  104. data/ext/v8/upstream/{3.1.8 → v8}/src/circular-queue.h +0 -0
  105. data/ext/v8/upstream/v8/src/code-stubs.cc +240 -0
  106. data/ext/v8/upstream/v8/src/code-stubs.h +971 -0
  107. data/ext/v8/upstream/{3.1.8 → v8}/src/code.h +0 -0
  108. data/ext/v8/upstream/v8/src/codegen-inl.h +68 -0
  109. data/ext/v8/upstream/v8/src/codegen.cc +505 -0
  110. data/ext/v8/upstream/v8/src/codegen.h +245 -0
  111. data/ext/v8/upstream/v8/src/compilation-cache.cc +540 -0
  112. data/ext/v8/upstream/v8/src/compilation-cache.h +287 -0
  113. data/ext/v8/upstream/v8/src/compiler.cc +792 -0
  114. data/ext/v8/upstream/v8/src/compiler.h +307 -0
  115. data/ext/v8/upstream/v8/src/contexts.cc +327 -0
  116. data/ext/v8/upstream/v8/src/contexts.h +382 -0
  117. data/ext/v8/upstream/{3.1.8 → v8}/src/conversions-inl.h +0 -0
  118. data/ext/v8/upstream/v8/src/conversions.cc +1125 -0
  119. data/ext/v8/upstream/{3.1.8 → v8}/src/conversions.h +0 -0
  120. data/ext/v8/upstream/v8/src/counters.cc +93 -0
  121. data/ext/v8/upstream/v8/src/counters.h +254 -0
  122. data/ext/v8/upstream/v8/src/cpu-profiler-inl.h +101 -0
  123. data/ext/v8/upstream/v8/src/cpu-profiler.cc +606 -0
  124. data/ext/v8/upstream/v8/src/cpu-profiler.h +305 -0
  125. data/ext/v8/upstream/v8/src/cpu.h +67 -0
  126. data/ext/v8/upstream/v8/src/d8-debug.cc +367 -0
  127. data/ext/v8/upstream/v8/src/d8-debug.h +158 -0
  128. data/ext/v8/upstream/v8/src/d8-posix.cc +695 -0
  129. data/ext/v8/upstream/{3.1.8 → v8}/src/d8-readline.cc +0 -0
  130. data/ext/v8/upstream/{3.1.8 → v8}/src/d8-windows.cc +0 -0
  131. data/ext/v8/upstream/v8/src/d8.cc +796 -0
  132. data/ext/v8/upstream/v8/src/d8.gyp +88 -0
  133. data/ext/v8/upstream/{3.1.8 → v8}/src/d8.h +0 -0
  134. data/ext/v8/upstream/{3.1.8 → v8}/src/d8.js +0 -0
  135. data/ext/v8/upstream/{3.1.8 → v8}/src/data-flow.cc +0 -0
  136. data/ext/v8/upstream/v8/src/data-flow.h +379 -0
  137. data/ext/v8/upstream/{3.1.8 → v8}/src/date.js +0 -0
  138. data/ext/v8/upstream/{3.1.8 → v8}/src/dateparser-inl.h +0 -0
  139. data/ext/v8/upstream/{3.1.8 → v8}/src/dateparser.cc +0 -0
  140. data/ext/v8/upstream/v8/src/dateparser.h +265 -0
  141. data/ext/v8/upstream/v8/src/debug-agent.cc +447 -0
  142. data/ext/v8/upstream/v8/src/debug-agent.h +129 -0
  143. data/ext/v8/upstream/{3.1.8 → v8}/src/debug-debugger.js +0 -0
  144. data/ext/v8/upstream/v8/src/debug.cc +3188 -0
  145. data/ext/v8/upstream/v8/src/debug.h +1055 -0
  146. data/ext/v8/upstream/v8/src/deoptimizer.cc +1296 -0
  147. data/ext/v8/upstream/v8/src/deoptimizer.h +629 -0
  148. data/ext/v8/upstream/v8/src/disasm.h +80 -0
  149. data/ext/v8/upstream/v8/src/disassembler.cc +339 -0
  150. data/ext/v8/upstream/{3.1.8 → v8}/src/disassembler.h +0 -0
  151. data/ext/v8/upstream/{3.1.8 → v8}/src/diy-fp.cc +0 -0
  152. data/ext/v8/upstream/{3.1.8 → v8}/src/diy-fp.h +0 -0
  153. data/ext/v8/upstream/{3.1.8 → v8}/src/double.h +0 -0
  154. data/ext/v8/upstream/{3.1.8 → v8}/src/dtoa.cc +0 -0
  155. data/ext/v8/upstream/{3.1.8 → v8}/src/dtoa.h +0 -0
  156. data/ext/v8/upstream/v8/src/execution.cc +791 -0
  157. data/ext/v8/upstream/v8/src/execution.h +291 -0
  158. data/ext/v8/upstream/v8/src/extensions/experimental/break-iterator.cc +250 -0
  159. data/ext/v8/upstream/v8/src/extensions/experimental/break-iterator.h +89 -0
  160. data/ext/v8/upstream/v8/src/extensions/experimental/experimental.gyp +55 -0
  161. data/ext/v8/upstream/v8/src/extensions/experimental/i18n-extension.cc +284 -0
  162. data/ext/v8/upstream/{3.1.8 → v8}/src/extensions/experimental/i18n-extension.h +0 -0
  163. data/ext/v8/upstream/v8/src/extensions/externalize-string-extension.cc +141 -0
  164. data/ext/v8/upstream/{3.1.8 → v8}/src/extensions/externalize-string-extension.h +0 -0
  165. data/ext/v8/upstream/v8/src/extensions/gc-extension.cc +58 -0
  166. data/ext/v8/upstream/{3.1.8 → v8}/src/extensions/gc-extension.h +0 -0
  167. data/ext/v8/upstream/v8/src/factory.cc +1194 -0
  168. data/ext/v8/upstream/v8/src/factory.h +436 -0
  169. data/ext/v8/upstream/{3.1.8 → v8}/src/fast-dtoa.cc +0 -0
  170. data/ext/v8/upstream/{3.1.8 → v8}/src/fast-dtoa.h +0 -0
  171. data/ext/v8/upstream/{3.1.8 → v8}/src/fixed-dtoa.cc +0 -0
  172. data/ext/v8/upstream/{3.1.8 → v8}/src/fixed-dtoa.h +0 -0
  173. data/ext/v8/upstream/v8/src/flag-definitions.h +556 -0
  174. data/ext/v8/upstream/{3.1.8 → v8}/src/flags.cc +0 -0
  175. data/ext/v8/upstream/{3.1.8 → v8}/src/flags.h +0 -0
  176. data/ext/v8/upstream/v8/src/frame-element.cc +37 -0
  177. data/ext/v8/upstream/v8/src/frame-element.h +269 -0
  178. data/ext/v8/upstream/v8/src/frames-inl.h +236 -0
  179. data/ext/v8/upstream/v8/src/frames.cc +1273 -0
  180. data/ext/v8/upstream/v8/src/frames.h +854 -0
  181. data/ext/v8/upstream/v8/src/full-codegen.cc +1385 -0
  182. data/ext/v8/upstream/v8/src/full-codegen.h +753 -0
  183. data/ext/v8/upstream/v8/src/func-name-inferrer.cc +91 -0
  184. data/ext/v8/upstream/v8/src/func-name-inferrer.h +111 -0
  185. data/ext/v8/upstream/v8/src/gdb-jit.cc +1548 -0
  186. data/ext/v8/upstream/{3.1.8 → v8}/src/gdb-jit.h +0 -0
  187. data/ext/v8/upstream/v8/src/global-handles.cc +596 -0
  188. data/ext/v8/upstream/v8/src/global-handles.h +239 -0
  189. data/ext/v8/upstream/v8/src/globals.h +325 -0
  190. data/ext/v8/upstream/v8/src/handles-inl.h +177 -0
  191. data/ext/v8/upstream/v8/src/handles.cc +965 -0
  192. data/ext/v8/upstream/v8/src/handles.h +372 -0
  193. data/ext/v8/upstream/{3.1.8 → v8}/src/hashmap.cc +0 -0
  194. data/ext/v8/upstream/v8/src/hashmap.h +121 -0
  195. data/ext/v8/upstream/v8/src/heap-inl.h +703 -0
  196. data/ext/v8/upstream/v8/src/heap-profiler.cc +1173 -0
  197. data/ext/v8/upstream/v8/src/heap-profiler.h +396 -0
  198. data/ext/v8/upstream/v8/src/heap.cc +5856 -0
  199. data/ext/v8/upstream/v8/src/heap.h +2264 -0
  200. data/ext/v8/upstream/v8/src/hydrogen-instructions.cc +1639 -0
  201. data/ext/v8/upstream/v8/src/hydrogen-instructions.h +3657 -0
  202. data/ext/v8/upstream/v8/src/hydrogen.cc +6011 -0
  203. data/ext/v8/upstream/v8/src/hydrogen.h +1137 -0
  204. data/ext/v8/upstream/v8/src/ia32/assembler-ia32-inl.h +430 -0
  205. data/ext/v8/upstream/v8/src/ia32/assembler-ia32.cc +2846 -0
  206. data/ext/v8/upstream/v8/src/ia32/assembler-ia32.h +1159 -0
  207. data/ext/v8/upstream/v8/src/ia32/builtins-ia32.cc +1596 -0
  208. data/ext/v8/upstream/v8/src/ia32/code-stubs-ia32.cc +6549 -0
  209. data/ext/v8/upstream/v8/src/ia32/code-stubs-ia32.h +495 -0
  210. data/ext/v8/upstream/{3.1.8 → v8}/src/ia32/codegen-ia32-inl.h +0 -0
  211. data/ext/v8/upstream/v8/src/ia32/codegen-ia32.cc +10385 -0
  212. data/ext/v8/upstream/v8/src/ia32/codegen-ia32.h +801 -0
  213. data/ext/v8/upstream/v8/src/ia32/cpu-ia32.cc +88 -0
  214. data/ext/v8/upstream/v8/src/ia32/debug-ia32.cc +312 -0
  215. data/ext/v8/upstream/v8/src/ia32/deoptimizer-ia32.cc +774 -0
  216. data/ext/v8/upstream/v8/src/ia32/disasm-ia32.cc +1620 -0
  217. data/ext/v8/upstream/{3.1.8 → v8}/src/ia32/frames-ia32.cc +0 -0
  218. data/ext/v8/upstream/v8/src/ia32/frames-ia32.h +140 -0
  219. data/ext/v8/upstream/v8/src/ia32/full-codegen-ia32.cc +4357 -0
  220. data/ext/v8/upstream/v8/src/ia32/ic-ia32.cc +1779 -0
  221. data/ext/v8/upstream/{3.1.8 → v8}/src/ia32/jump-target-ia32.cc +0 -0
  222. data/ext/v8/upstream/v8/src/ia32/lithium-codegen-ia32.cc +4158 -0
  223. data/ext/v8/upstream/v8/src/ia32/lithium-codegen-ia32.h +318 -0
  224. data/ext/v8/upstream/v8/src/ia32/lithium-gap-resolver-ia32.cc +466 -0
  225. data/ext/v8/upstream/{3.1.8 → v8}/src/ia32/lithium-gap-resolver-ia32.h +0 -0
  226. data/ext/v8/upstream/v8/src/ia32/lithium-ia32.cc +2181 -0
  227. data/ext/v8/upstream/v8/src/ia32/lithium-ia32.h +2235 -0
  228. data/ext/v8/upstream/v8/src/ia32/macro-assembler-ia32.cc +2056 -0
  229. data/ext/v8/upstream/v8/src/ia32/macro-assembler-ia32.h +807 -0
  230. data/ext/v8/upstream/v8/src/ia32/regexp-macro-assembler-ia32.cc +1264 -0
  231. data/ext/v8/upstream/v8/src/ia32/regexp-macro-assembler-ia32.h +216 -0
  232. data/ext/v8/upstream/{3.1.8 → v8}/src/ia32/register-allocator-ia32-inl.h +0 -0
  233. data/ext/v8/upstream/v8/src/ia32/register-allocator-ia32.cc +157 -0
  234. data/ext/v8/upstream/{3.1.8 → v8}/src/ia32/register-allocator-ia32.h +0 -0
  235. data/ext/v8/upstream/{3.1.8 → v8}/src/ia32/simulator-ia32.cc +0 -0
  236. data/ext/v8/upstream/v8/src/ia32/simulator-ia32.h +72 -0
  237. data/ext/v8/upstream/v8/src/ia32/stub-cache-ia32.cc +3711 -0
  238. data/ext/v8/upstream/v8/src/ia32/virtual-frame-ia32.cc +1366 -0
  239. data/ext/v8/upstream/v8/src/ia32/virtual-frame-ia32.h +650 -0
  240. data/ext/v8/upstream/v8/src/ic-inl.h +130 -0
  241. data/ext/v8/upstream/v8/src/ic.cc +2389 -0
  242. data/ext/v8/upstream/v8/src/ic.h +675 -0
  243. data/ext/v8/upstream/{3.1.8 → v8}/src/inspector.cc +0 -0
  244. data/ext/v8/upstream/{3.1.8 → v8}/src/inspector.h +0 -0
  245. data/ext/v8/upstream/v8/src/interpreter-irregexp.cc +659 -0
  246. data/ext/v8/upstream/v8/src/interpreter-irregexp.h +49 -0
  247. data/ext/v8/upstream/v8/src/isolate.cc +883 -0
  248. data/ext/v8/upstream/v8/src/isolate.h +1306 -0
  249. data/ext/v8/upstream/v8/src/json.js +342 -0
  250. data/ext/v8/upstream/v8/src/jsregexp.cc +5371 -0
  251. data/ext/v8/upstream/v8/src/jsregexp.h +1483 -0
  252. data/ext/v8/upstream/{3.1.8 → v8}/src/jump-target-heavy-inl.h +0 -0
  253. data/ext/v8/upstream/v8/src/jump-target-heavy.cc +427 -0
  254. data/ext/v8/upstream/v8/src/jump-target-heavy.h +238 -0
  255. data/ext/v8/upstream/v8/src/jump-target-inl.h +48 -0
  256. data/ext/v8/upstream/{3.1.8 → v8}/src/jump-target-light-inl.h +0 -0
  257. data/ext/v8/upstream/v8/src/jump-target-light.cc +111 -0
  258. data/ext/v8/upstream/{3.1.8 → v8}/src/jump-target-light.h +0 -0
  259. data/ext/v8/upstream/{3.1.8 → v8}/src/jump-target.cc +0 -0
  260. data/ext/v8/upstream/{3.1.8 → v8}/src/jump-target.h +0 -0
  261. data/ext/v8/upstream/{3.1.8 → v8}/src/list-inl.h +0 -0
  262. data/ext/v8/upstream/{3.1.8 → v8}/src/list.h +0 -0
  263. data/ext/v8/upstream/v8/src/lithium-allocator-inl.h +142 -0
  264. data/ext/v8/upstream/v8/src/lithium-allocator.cc +2105 -0
  265. data/ext/v8/upstream/v8/src/lithium-allocator.h +630 -0
  266. data/ext/v8/upstream/v8/src/lithium.cc +169 -0
  267. data/ext/v8/upstream/{3.1.8 → v8}/src/lithium.h +0 -0
  268. data/ext/v8/upstream/{3.1.8 → v8}/src/liveedit-debugger.js +0 -0
  269. data/ext/v8/upstream/v8/src/liveedit.cc +1693 -0
  270. data/ext/v8/upstream/v8/src/liveedit.h +179 -0
  271. data/ext/v8/upstream/{3.1.8 → v8}/src/liveobjectlist-inl.h +0 -0
  272. data/ext/v8/upstream/v8/src/liveobjectlist.cc +2589 -0
  273. data/ext/v8/upstream/v8/src/liveobjectlist.h +322 -0
  274. data/ext/v8/upstream/{3.1.8 → v8}/src/log-inl.h +0 -0
  275. data/ext/v8/upstream/v8/src/log-utils.cc +423 -0
  276. data/ext/v8/upstream/v8/src/log-utils.h +229 -0
  277. data/ext/v8/upstream/v8/src/log.cc +1666 -0
  278. data/ext/v8/upstream/v8/src/log.h +446 -0
  279. data/ext/v8/upstream/{3.1.8 → v8}/src/macro-assembler.h +0 -0
  280. data/ext/v8/upstream/{3.1.8 → v8}/src/macros.py +0 -0
  281. data/ext/v8/upstream/v8/src/mark-compact.cc +3092 -0
  282. data/ext/v8/upstream/v8/src/mark-compact.h +506 -0
  283. data/ext/v8/upstream/{3.1.8 → v8}/src/math.js +0 -0
  284. data/ext/v8/upstream/v8/src/messages.cc +166 -0
  285. data/ext/v8/upstream/{3.1.8 → v8}/src/messages.h +0 -0
  286. data/ext/v8/upstream/v8/src/messages.js +1090 -0
  287. data/ext/v8/upstream/v8/src/mips/assembler-mips-inl.h +335 -0
  288. data/ext/v8/upstream/v8/src/mips/assembler-mips.cc +2093 -0
  289. data/ext/v8/upstream/v8/src/mips/assembler-mips.h +1066 -0
  290. data/ext/v8/upstream/v8/src/mips/builtins-mips.cc +148 -0
  291. data/ext/v8/upstream/v8/src/mips/code-stubs-mips.cc +752 -0
  292. data/ext/v8/upstream/v8/src/mips/code-stubs-mips.h +511 -0
  293. data/ext/v8/upstream/v8/src/mips/codegen-mips-inl.h +64 -0
  294. data/ext/v8/upstream/v8/src/mips/codegen-mips.cc +1213 -0
  295. data/ext/v8/upstream/v8/src/mips/codegen-mips.h +633 -0
  296. data/ext/v8/upstream/v8/src/mips/constants-mips.cc +352 -0
  297. data/ext/v8/upstream/v8/src/mips/constants-mips.h +723 -0
  298. data/ext/v8/upstream/v8/src/mips/cpu-mips.cc +90 -0
  299. data/ext/v8/upstream/v8/src/mips/debug-mips.cc +155 -0
  300. data/ext/v8/upstream/v8/src/mips/deoptimizer-mips.cc +91 -0
  301. data/ext/v8/upstream/v8/src/mips/disasm-mips.cc +1023 -0
  302. data/ext/v8/upstream/v8/src/mips/frames-mips.cc +48 -0
  303. data/ext/v8/upstream/v8/src/mips/frames-mips.h +179 -0
  304. data/ext/v8/upstream/v8/src/mips/full-codegen-mips.cc +727 -0
  305. data/ext/v8/upstream/v8/src/mips/ic-mips.cc +244 -0
  306. data/ext/v8/upstream/v8/src/mips/jump-target-mips.cc +80 -0
  307. data/ext/v8/upstream/v8/src/mips/lithium-codegen-mips.h +65 -0
  308. data/ext/v8/upstream/v8/src/mips/lithium-mips.h +304 -0
  309. data/ext/v8/upstream/v8/src/mips/macro-assembler-mips.cc +3327 -0
  310. data/ext/v8/upstream/v8/src/mips/macro-assembler-mips.h +1058 -0
  311. data/ext/v8/upstream/v8/src/mips/regexp-macro-assembler-mips.cc +478 -0
  312. data/ext/v8/upstream/v8/src/mips/regexp-macro-assembler-mips.h +250 -0
  313. data/ext/v8/upstream/v8/src/mips/register-allocator-mips-inl.h +134 -0
  314. data/ext/v8/upstream/{3.1.8 → v8}/src/mips/register-allocator-mips.cc +0 -0
  315. data/ext/v8/upstream/v8/src/mips/register-allocator-mips.h +47 -0
  316. data/ext/v8/upstream/v8/src/mips/simulator-mips.cc +2438 -0
  317. data/ext/v8/upstream/v8/src/mips/simulator-mips.h +394 -0
  318. data/ext/v8/upstream/v8/src/mips/stub-cache-mips.cc +601 -0
  319. data/ext/v8/upstream/v8/src/mips/virtual-frame-mips-inl.h +58 -0
  320. data/ext/v8/upstream/v8/src/mips/virtual-frame-mips.cc +307 -0
  321. data/ext/v8/upstream/v8/src/mips/virtual-frame-mips.h +530 -0
  322. data/ext/v8/upstream/v8/src/mirror-debugger.js +2381 -0
  323. data/ext/v8/upstream/v8/src/mksnapshot.cc +256 -0
  324. data/ext/v8/upstream/{3.1.8 → v8}/src/natives.h +0 -0
  325. data/ext/v8/upstream/v8/src/objects-debug.cc +722 -0
  326. data/ext/v8/upstream/v8/src/objects-inl.h +4166 -0
  327. data/ext/v8/upstream/v8/src/objects-printer.cc +801 -0
  328. data/ext/v8/upstream/v8/src/objects-visiting.cc +142 -0
  329. data/ext/v8/upstream/v8/src/objects-visiting.h +422 -0
  330. data/ext/v8/upstream/v8/src/objects.cc +10296 -0
  331. data/ext/v8/upstream/v8/src/objects.h +6662 -0
  332. data/ext/v8/upstream/v8/src/parser.cc +5168 -0
  333. data/ext/v8/upstream/v8/src/parser.h +823 -0
  334. data/ext/v8/upstream/v8/src/platform-cygwin.cc +811 -0
  335. data/ext/v8/upstream/v8/src/platform-freebsd.cc +854 -0
  336. data/ext/v8/upstream/v8/src/platform-linux.cc +1120 -0
  337. data/ext/v8/upstream/v8/src/platform-macos.cc +865 -0
  338. data/ext/v8/upstream/v8/src/platform-nullos.cc +504 -0
  339. data/ext/v8/upstream/v8/src/platform-openbsd.cc +672 -0
  340. data/ext/v8/upstream/v8/src/platform-posix.cc +424 -0
  341. data/ext/v8/upstream/v8/src/platform-solaris.cc +796 -0
  342. data/ext/v8/upstream/v8/src/platform-tls-mac.h +62 -0
  343. data/ext/v8/upstream/v8/src/platform-tls-win32.h +62 -0
  344. data/ext/v8/upstream/v8/src/platform-tls.h +50 -0
  345. data/ext/v8/upstream/v8/src/platform-win32.cc +2072 -0
  346. data/ext/v8/upstream/v8/src/platform.h +693 -0
  347. data/ext/v8/upstream/v8/src/preparse-data.cc +185 -0
  348. data/ext/v8/upstream/{3.1.8 → v8}/src/preparse-data.h +0 -0
  349. data/ext/v8/upstream/v8/src/preparser-api.cc +219 -0
  350. data/ext/v8/upstream/v8/src/preparser.cc +1205 -0
  351. data/ext/v8/upstream/{3.1.8 → v8}/src/preparser.h +0 -0
  352. data/ext/v8/upstream/v8/src/prettyprinter.cc +1530 -0
  353. data/ext/v8/upstream/v8/src/prettyprinter.h +223 -0
  354. data/ext/v8/upstream/{3.1.8 → v8}/src/profile-generator-inl.h +0 -0
  355. data/ext/v8/upstream/v8/src/profile-generator.cc +3095 -0
  356. data/ext/v8/upstream/v8/src/profile-generator.h +1125 -0
  357. data/ext/v8/upstream/v8/src/property.cc +102 -0
  358. data/ext/v8/upstream/v8/src/property.h +348 -0
  359. data/ext/v8/upstream/{3.1.8 → v8}/src/regexp-macro-assembler-irregexp-inl.h +0 -0
  360. data/ext/v8/upstream/v8/src/regexp-macro-assembler-irregexp.cc +470 -0
  361. data/ext/v8/upstream/{3.1.8 → v8}/src/regexp-macro-assembler-irregexp.h +0 -0
  362. data/ext/v8/upstream/{3.1.8 → v8}/src/regexp-macro-assembler-tracer.cc +0 -0
  363. data/ext/v8/upstream/{3.1.8 → v8}/src/regexp-macro-assembler-tracer.h +0 -0
  364. data/ext/v8/upstream/v8/src/regexp-macro-assembler.cc +266 -0
  365. data/ext/v8/upstream/v8/src/regexp-macro-assembler.h +236 -0
  366. data/ext/v8/upstream/v8/src/regexp-stack.cc +111 -0
  367. data/ext/v8/upstream/v8/src/regexp-stack.h +147 -0
  368. data/ext/v8/upstream/v8/src/regexp.js +483 -0
  369. data/ext/v8/upstream/v8/src/register-allocator-inl.h +141 -0
  370. data/ext/v8/upstream/v8/src/register-allocator.cc +98 -0
  371. data/ext/v8/upstream/v8/src/register-allocator.h +310 -0
  372. data/ext/v8/upstream/v8/src/rewriter.cc +1024 -0
  373. data/ext/v8/upstream/{3.1.8 → v8}/src/rewriter.h +0 -0
  374. data/ext/v8/upstream/v8/src/runtime-profiler.cc +478 -0
  375. data/ext/v8/upstream/v8/src/runtime-profiler.h +192 -0
  376. data/ext/v8/upstream/v8/src/runtime.cc +11949 -0
  377. data/ext/v8/upstream/v8/src/runtime.h +643 -0
  378. data/ext/v8/upstream/{3.1.8 → v8}/src/runtime.js +0 -0
  379. data/ext/v8/upstream/v8/src/safepoint-table.cc +256 -0
  380. data/ext/v8/upstream/v8/src/safepoint-table.h +269 -0
  381. data/ext/v8/upstream/v8/src/scanner-base.cc +964 -0
  382. data/ext/v8/upstream/v8/src/scanner-base.h +664 -0
  383. data/ext/v8/upstream/v8/src/scanner.cc +584 -0
  384. data/ext/v8/upstream/v8/src/scanner.h +196 -0
  385. data/ext/v8/upstream/v8/src/scopeinfo.cc +631 -0
  386. data/ext/v8/upstream/v8/src/scopeinfo.h +249 -0
  387. data/ext/v8/upstream/v8/src/scopes.cc +1093 -0
  388. data/ext/v8/upstream/v8/src/scopes.h +508 -0
  389. data/ext/v8/upstream/v8/src/serialize.cc +1574 -0
  390. data/ext/v8/upstream/v8/src/serialize.h +589 -0
  391. data/ext/v8/upstream/{3.1.8 → v8}/src/shell.h +0 -0
  392. data/ext/v8/upstream/{3.1.8 → v8}/src/simulator.h +0 -0
  393. data/ext/v8/upstream/v8/src/small-pointer-list.h +163 -0
  394. data/ext/v8/upstream/{3.1.8 → v8}/src/smart-pointer.h +0 -0
  395. data/ext/v8/upstream/v8/src/snapshot-common.cc +82 -0
  396. data/ext/v8/upstream/{3.1.8 → v8}/src/snapshot-empty.cc +0 -0
  397. data/ext/v8/upstream/v8/src/snapshot.h +73 -0
  398. data/ext/v8/upstream/v8/src/spaces-inl.h +529 -0
  399. data/ext/v8/upstream/v8/src/spaces.cc +3147 -0
  400. data/ext/v8/upstream/v8/src/spaces.h +2368 -0
  401. data/ext/v8/upstream/{3.1.8 → v8}/src/splay-tree-inl.h +0 -0
  402. data/ext/v8/upstream/{3.1.8 → v8}/src/splay-tree.h +0 -0
  403. data/ext/v8/upstream/v8/src/string-search.cc +41 -0
  404. data/ext/v8/upstream/v8/src/string-search.h +568 -0
  405. data/ext/v8/upstream/v8/src/string-stream.cc +592 -0
  406. data/ext/v8/upstream/{3.1.8 → v8}/src/string-stream.h +0 -0
  407. data/ext/v8/upstream/v8/src/string.js +915 -0
  408. data/ext/v8/upstream/{3.1.8 → v8}/src/strtod.cc +0 -0
  409. data/ext/v8/upstream/{3.1.8 → v8}/src/strtod.h +0 -0
  410. data/ext/v8/upstream/v8/src/stub-cache.cc +1940 -0
  411. data/ext/v8/upstream/v8/src/stub-cache.h +866 -0
  412. data/ext/v8/upstream/{3.1.8 → v8}/src/third_party/valgrind/valgrind.h +0 -0
  413. data/ext/v8/upstream/v8/src/token.cc +63 -0
  414. data/ext/v8/upstream/v8/src/token.h +288 -0
  415. data/ext/v8/upstream/v8/src/top.cc +983 -0
  416. data/ext/v8/upstream/v8/src/type-info.cc +472 -0
  417. data/ext/v8/upstream/v8/src/type-info.h +290 -0
  418. data/ext/v8/upstream/{3.1.8 → v8}/src/unbound-queue-inl.h +0 -0
  419. data/ext/v8/upstream/{3.1.8 → v8}/src/unbound-queue.h +0 -0
  420. data/ext/v8/upstream/{3.1.8 → v8}/src/unicode-inl.h +0 -0
  421. data/ext/v8/upstream/v8/src/unicode.cc +1624 -0
  422. data/ext/v8/upstream/v8/src/unicode.h +280 -0
  423. data/ext/v8/upstream/{3.1.8 → v8}/src/uri.js +0 -0
  424. data/ext/v8/upstream/{3.1.8 → v8}/src/utils.cc +0 -0
  425. data/ext/v8/upstream/v8/src/utils.h +796 -0
  426. data/ext/v8/upstream/v8/src/v8-counters.cc +62 -0
  427. data/ext/v8/upstream/v8/src/v8-counters.h +311 -0
  428. data/ext/v8/upstream/v8/src/v8.cc +215 -0
  429. data/ext/v8/upstream/v8/src/v8.h +130 -0
  430. data/ext/v8/upstream/{3.1.8 → v8}/src/v8checks.h +0 -0
  431. data/ext/v8/upstream/{3.1.8 → v8}/src/v8dll-main.cc +0 -0
  432. data/ext/v8/upstream/v8/src/v8globals.h +486 -0
  433. data/ext/v8/upstream/{3.1.8/src/memory.h → v8/src/v8memory.h} +0 -0
  434. data/ext/v8/upstream/v8/src/v8natives.js +1293 -0
  435. data/ext/v8/upstream/{3.1.8 → v8}/src/v8preparserdll-main.cc +0 -0
  436. data/ext/v8/upstream/v8/src/v8threads.cc +453 -0
  437. data/ext/v8/upstream/v8/src/v8threads.h +164 -0
  438. data/ext/v8/upstream/v8/src/v8utils.h +317 -0
  439. data/ext/v8/upstream/{3.1.8 → v8}/src/variables.cc +0 -0
  440. data/ext/v8/upstream/v8/src/variables.h +212 -0
  441. data/ext/v8/upstream/v8/src/version.cc +116 -0
  442. data/ext/v8/upstream/v8/src/version.h +68 -0
  443. data/ext/v8/upstream/{3.1.8 → v8}/src/virtual-frame-heavy-inl.h +0 -0
  444. data/ext/v8/upstream/{3.1.8 → v8}/src/virtual-frame-heavy.cc +0 -0
  445. data/ext/v8/upstream/{3.1.8 → v8}/src/virtual-frame-inl.h +0 -0
  446. data/ext/v8/upstream/v8/src/virtual-frame-light-inl.h +171 -0
  447. data/ext/v8/upstream/{3.1.8 → v8}/src/virtual-frame-light.cc +0 -0
  448. data/ext/v8/upstream/{3.1.8 → v8}/src/virtual-frame.cc +0 -0
  449. data/ext/v8/upstream/{3.1.8 → v8}/src/virtual-frame.h +0 -0
  450. data/ext/v8/upstream/v8/src/vm-state-inl.h +138 -0
  451. data/ext/v8/upstream/v8/src/vm-state.h +70 -0
  452. data/ext/v8/upstream/v8/src/win32-headers.h +96 -0
  453. data/ext/v8/upstream/v8/src/x64/assembler-x64-inl.h +456 -0
  454. data/ext/v8/upstream/v8/src/x64/assembler-x64.cc +2954 -0
  455. data/ext/v8/upstream/v8/src/x64/assembler-x64.h +1630 -0
  456. data/ext/v8/upstream/v8/src/x64/builtins-x64.cc +1493 -0
  457. data/ext/v8/upstream/v8/src/x64/code-stubs-x64.cc +5132 -0
  458. data/ext/v8/upstream/v8/src/x64/code-stubs-x64.h +477 -0
  459. data/ext/v8/upstream/{3.1.8 → v8}/src/x64/codegen-x64-inl.h +0 -0
  460. data/ext/v8/upstream/v8/src/x64/codegen-x64.cc +8843 -0
  461. data/ext/v8/upstream/v8/src/x64/codegen-x64.h +753 -0
  462. data/ext/v8/upstream/v8/src/x64/cpu-x64.cc +88 -0
  463. data/ext/v8/upstream/v8/src/x64/debug-x64.cc +318 -0
  464. data/ext/v8/upstream/v8/src/x64/deoptimizer-x64.cc +815 -0
  465. data/ext/v8/upstream/v8/src/x64/disasm-x64.cc +1752 -0
  466. data/ext/v8/upstream/{3.1.8 → v8}/src/x64/frames-x64.cc +0 -0
  467. data/ext/v8/upstream/v8/src/x64/frames-x64.h +130 -0
  468. data/ext/v8/upstream/v8/src/x64/full-codegen-x64.cc +4339 -0
  469. data/ext/v8/upstream/v8/src/x64/ic-x64.cc +1752 -0
  470. data/ext/v8/upstream/{3.1.8 → v8}/src/x64/jump-target-x64.cc +0 -0
  471. data/ext/v8/upstream/v8/src/x64/lithium-codegen-x64.cc +3970 -0
  472. data/ext/v8/upstream/v8/src/x64/lithium-codegen-x64.h +318 -0
  473. data/ext/v8/upstream/{3.1.8 → v8}/src/x64/lithium-gap-resolver-x64.cc +0 -0
  474. data/ext/v8/upstream/{3.1.8 → v8}/src/x64/lithium-gap-resolver-x64.h +0 -0
  475. data/ext/v8/upstream/v8/src/x64/lithium-x64.cc +2115 -0
  476. data/ext/v8/upstream/v8/src/x64/lithium-x64.h +2161 -0
  477. data/ext/v8/upstream/v8/src/x64/macro-assembler-x64.cc +2911 -0
  478. data/ext/v8/upstream/v8/src/x64/macro-assembler-x64.h +1984 -0
  479. data/ext/v8/upstream/v8/src/x64/regexp-macro-assembler-x64.cc +1398 -0
  480. data/ext/v8/upstream/v8/src/x64/regexp-macro-assembler-x64.h +282 -0
  481. data/ext/v8/upstream/v8/src/x64/register-allocator-x64-inl.h +87 -0
  482. data/ext/v8/upstream/v8/src/x64/register-allocator-x64.cc +95 -0
  483. data/ext/v8/upstream/{3.1.8 → v8}/src/x64/register-allocator-x64.h +0 -0
  484. data/ext/v8/upstream/{3.1.8 → v8}/src/x64/simulator-x64.cc +0 -0
  485. data/ext/v8/upstream/v8/src/x64/simulator-x64.h +71 -0
  486. data/ext/v8/upstream/v8/src/x64/stub-cache-x64.cc +3460 -0
  487. data/ext/v8/upstream/v8/src/x64/virtual-frame-x64.cc +1296 -0
  488. data/ext/v8/upstream/v8/src/x64/virtual-frame-x64.h +597 -0
  489. data/ext/v8/upstream/v8/src/zone-inl.h +129 -0
  490. data/ext/v8/upstream/v8/src/zone.cc +196 -0
  491. data/ext/v8/upstream/v8/src/zone.h +236 -0
  492. data/ext/v8/upstream/{3.1.8 → v8}/tools/codemap.js +0 -0
  493. data/ext/v8/upstream/{3.1.8 → v8}/tools/consarray.js +0 -0
  494. data/ext/v8/upstream/{3.1.8 → v8}/tools/csvparser.js +0 -0
  495. data/ext/v8/upstream/{3.1.8 → v8}/tools/disasm.py +0 -0
  496. data/ext/v8/upstream/v8/tools/freebsd-tick-processor +10 -0
  497. data/ext/v8/upstream/{3.1.8 → v8}/tools/gc-nvp-trace-processor.py +0 -0
  498. data/ext/v8/upstream/{3.1.8 → v8}/tools/generate-ten-powers.scm +0 -0
  499. data/ext/v8/upstream/{3.1.8 → v8}/tools/grokdump.py +0 -0
  500. data/ext/v8/upstream/v8/tools/gyp/v8.gyp +844 -0
  501. data/ext/v8/upstream/{3.1.8 → v8}/tools/js2c.py +0 -0
  502. data/ext/v8/upstream/{3.1.8 → v8}/tools/jsmin.py +0 -0
  503. data/ext/v8/upstream/v8/tools/linux-tick-processor +35 -0
  504. data/ext/v8/upstream/{3.1.8 → v8}/tools/ll_prof.py +0 -0
  505. data/ext/v8/upstream/{3.1.8 → v8}/tools/logreader.js +0 -0
  506. data/ext/v8/upstream/{3.1.8 → v8}/tools/mac-nm +0 -0
  507. data/ext/v8/upstream/{3.1.8 → v8}/tools/mac-tick-processor +0 -0
  508. data/ext/v8/upstream/{3.1.8 → v8}/tools/oom_dump/README +0 -0
  509. data/ext/v8/upstream/{3.1.8 → v8}/tools/oom_dump/SConstruct +0 -0
  510. data/ext/v8/upstream/{3.1.8 → v8}/tools/oom_dump/oom_dump.cc +0 -0
  511. data/ext/v8/upstream/{3.1.8 → v8}/tools/presubmit.py +0 -0
  512. data/ext/v8/upstream/{3.1.8 → v8}/tools/process-heap-prof.py +0 -0
  513. data/ext/v8/upstream/{3.1.8 → v8}/tools/profile.js +0 -0
  514. data/ext/v8/upstream/{3.1.8 → v8}/tools/profile_view.js +0 -0
  515. data/ext/v8/upstream/{3.1.8 → v8}/tools/run-valgrind.py +0 -0
  516. data/ext/v8/upstream/{3.1.8 → v8}/tools/splaytree.js +0 -0
  517. data/ext/v8/upstream/{3.1.8 → v8}/tools/stats-viewer.py +0 -0
  518. data/ext/v8/upstream/v8/tools/test.py +1490 -0
  519. data/ext/v8/upstream/{3.1.8 → v8}/tools/tickprocessor-driver.js +0 -0
  520. data/ext/v8/upstream/v8/tools/tickprocessor.js +877 -0
  521. data/ext/v8/upstream/{3.1.8 → v8}/tools/utils.py +0 -0
  522. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/README.txt +0 -0
  523. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/arm.vsprops +0 -0
  524. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/common.vsprops +0 -0
  525. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/d8.vcproj +0 -0
  526. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/d8_arm.vcproj +0 -0
  527. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/d8_x64.vcproj +0 -0
  528. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/d8js2c.cmd +0 -0
  529. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/debug.vsprops +0 -0
  530. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/ia32.vsprops +0 -0
  531. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/js2c.cmd +0 -0
  532. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/release.vsprops +0 -0
  533. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8.sln +0 -0
  534. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8.vcproj +0 -0
  535. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_arm.sln +0 -0
  536. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_arm.vcproj +0 -0
  537. data/ext/v8/upstream/v8/tools/visual_studio/v8_base.vcproj +1308 -0
  538. data/ext/v8/upstream/v8/tools/visual_studio/v8_base_arm.vcproj +1238 -0
  539. data/ext/v8/upstream/v8/tools/visual_studio/v8_base_x64.vcproj +1300 -0
  540. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_cctest.vcproj +0 -0
  541. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_cctest_arm.vcproj +0 -0
  542. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_cctest_x64.vcproj +0 -0
  543. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_mksnapshot.vcproj +0 -0
  544. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_mksnapshot_x64.vcproj +0 -0
  545. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_process_sample.vcproj +0 -0
  546. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_process_sample_arm.vcproj +0 -0
  547. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_process_sample_x64.vcproj +0 -0
  548. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_shell_sample.vcproj +0 -0
  549. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_shell_sample_arm.vcproj +0 -0
  550. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_shell_sample_x64.vcproj +0 -0
  551. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_snapshot.vcproj +0 -0
  552. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_snapshot_cc.vcproj +0 -0
  553. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_snapshot_cc_x64.vcproj +0 -0
  554. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_snapshot_x64.vcproj +0 -0
  555. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_x64.sln +0 -0
  556. data/ext/v8/upstream/{3.1.8 → v8}/tools/visual_studio/v8_x64.vcproj +0 -0
  557. data/ext/v8/upstream/v8/tools/visual_studio/x64.vsprops +18 -0
  558. data/ext/v8/upstream/{3.1.8 → v8}/tools/windows-tick-processor.bat +0 -0
  559. data/ext/v8/v8_callbacks.cpp +52 -92
  560. data/ext/v8/v8_date.cpp +2 -3
  561. data/ext/v8/v8_object.cpp +4 -0
  562. data/ext/v8/v8_template.cpp +2 -2
  563. data/ext/v8/v8_try_catch.cpp +8 -38
  564. data/lib/v8/version.rb +1 -1
  565. data/spec/ext/ext_spec_helper.rb +2 -20
  566. data/spec/ext/object_spec.rb +0 -12
  567. data/spec/ext/try_catch_spec.rb +29 -1
  568. data/spec/spec_helper.rb +1 -0
  569. data/spec/v8/portal/proxies_spec.rb +1 -84
  570. data/specmem/handle_memspec.rb +41 -0
  571. data/specmem/object_memspec.rb +16 -0
  572. data/specmem/proxies_memspec.rb +86 -0
  573. data/specmem/spec_helper.rb +24 -0
  574. data/therubyracer.gemspec +7 -2
  575. metadata +564 -541
  576. data/ext/v8/upstream/3.1.8/.gitignore +0 -31
  577. data/ext/v8/upstream/3.1.8/AUTHORS +0 -40
  578. data/ext/v8/upstream/3.1.8/ChangeLog +0 -2566
  579. data/ext/v8/upstream/3.1.8/SConstruct +0 -1192
  580. data/ext/v8/upstream/3.1.8/include/v8-debug.h +0 -384
  581. data/ext/v8/upstream/3.1.8/include/v8-preparser.h +0 -116
  582. data/ext/v8/upstream/3.1.8/include/v8-profiler.h +0 -426
  583. data/ext/v8/upstream/3.1.8/include/v8-testing.h +0 -99
  584. data/ext/v8/upstream/3.1.8/include/v8.h +0 -3846
  585. data/ext/v8/upstream/3.1.8/preparser/preparser-process.cc +0 -206
  586. data/ext/v8/upstream/3.1.8/src/SConscript +0 -356
  587. data/ext/v8/upstream/3.1.8/src/accessors.cc +0 -907
  588. data/ext/v8/upstream/3.1.8/src/allocation.cc +0 -204
  589. data/ext/v8/upstream/3.1.8/src/allocation.h +0 -176
  590. data/ext/v8/upstream/3.1.8/src/api.cc +0 -5191
  591. data/ext/v8/upstream/3.1.8/src/api.h +0 -508
  592. data/ext/v8/upstream/3.1.8/src/apiutils.h +0 -80
  593. data/ext/v8/upstream/3.1.8/src/arguments.h +0 -105
  594. data/ext/v8/upstream/3.1.8/src/arm/assembler-arm-inl.h +0 -352
  595. data/ext/v8/upstream/3.1.8/src/arm/assembler-arm.cc +0 -2756
  596. data/ext/v8/upstream/3.1.8/src/arm/assembler-arm.h +0 -1294
  597. data/ext/v8/upstream/3.1.8/src/arm/builtins-arm.cc +0 -1628
  598. data/ext/v8/upstream/3.1.8/src/arm/code-stubs-arm.cc +0 -6783
  599. data/ext/v8/upstream/3.1.8/src/arm/code-stubs-arm.h +0 -657
  600. data/ext/v8/upstream/3.1.8/src/arm/codegen-arm.cc +0 -7403
  601. data/ext/v8/upstream/3.1.8/src/arm/codegen-arm.h +0 -595
  602. data/ext/v8/upstream/3.1.8/src/arm/constants-arm.h +0 -769
  603. data/ext/v8/upstream/3.1.8/src/arm/cpu-arm.cc +0 -147
  604. data/ext/v8/upstream/3.1.8/src/arm/debug-arm.cc +0 -315
  605. data/ext/v8/upstream/3.1.8/src/arm/deoptimizer-arm.cc +0 -700
  606. data/ext/v8/upstream/3.1.8/src/arm/disasm-arm.cc +0 -1439
  607. data/ext/v8/upstream/3.1.8/src/arm/frames-arm.h +0 -168
  608. data/ext/v8/upstream/3.1.8/src/arm/full-codegen-arm.cc +0 -4230
  609. data/ext/v8/upstream/3.1.8/src/arm/ic-arm.cc +0 -1799
  610. data/ext/v8/upstream/3.1.8/src/arm/lithium-arm.cc +0 -2041
  611. data/ext/v8/upstream/3.1.8/src/arm/lithium-arm.h +0 -2046
  612. data/ext/v8/upstream/3.1.8/src/arm/lithium-codegen-arm.cc +0 -3822
  613. data/ext/v8/upstream/3.1.8/src/arm/lithium-codegen-arm.h +0 -312
  614. data/ext/v8/upstream/3.1.8/src/arm/lithium-gap-resolver-arm.cc +0 -303
  615. data/ext/v8/upstream/3.1.8/src/arm/macro-assembler-arm.cc +0 -2701
  616. data/ext/v8/upstream/3.1.8/src/arm/macro-assembler-arm.h +0 -1015
  617. data/ext/v8/upstream/3.1.8/src/arm/regexp-macro-assembler-arm.cc +0 -1280
  618. data/ext/v8/upstream/3.1.8/src/arm/regexp-macro-assembler-arm.h +0 -252
  619. data/ext/v8/upstream/3.1.8/src/arm/simulator-arm.cc +0 -3165
  620. data/ext/v8/upstream/3.1.8/src/arm/simulator-arm.h +0 -402
  621. data/ext/v8/upstream/3.1.8/src/arm/stub-cache-arm.cc +0 -4077
  622. data/ext/v8/upstream/3.1.8/src/arm/virtual-frame-arm.cc +0 -843
  623. data/ext/v8/upstream/3.1.8/src/arm/virtual-frame-arm.h +0 -520
  624. data/ext/v8/upstream/3.1.8/src/array.js +0 -1231
  625. data/ext/v8/upstream/3.1.8/src/assembler.cc +0 -973
  626. data/ext/v8/upstream/3.1.8/src/assembler.h +0 -787
  627. data/ext/v8/upstream/3.1.8/src/ast-inl.h +0 -107
  628. data/ext/v8/upstream/3.1.8/src/ast.cc +0 -1067
  629. data/ext/v8/upstream/3.1.8/src/ast.h +0 -2177
  630. data/ext/v8/upstream/3.1.8/src/atomicops.h +0 -165
  631. data/ext/v8/upstream/3.1.8/src/bootstrapper.cc +0 -1888
  632. data/ext/v8/upstream/3.1.8/src/bootstrapper.h +0 -118
  633. data/ext/v8/upstream/3.1.8/src/builtins.cc +0 -1586
  634. data/ext/v8/upstream/3.1.8/src/builtins.h +0 -339
  635. data/ext/v8/upstream/3.1.8/src/checks.cc +0 -110
  636. data/ext/v8/upstream/3.1.8/src/checks.h +0 -292
  637. data/ext/v8/upstream/3.1.8/src/code-stubs.cc +0 -230
  638. data/ext/v8/upstream/3.1.8/src/code-stubs.h +0 -950
  639. data/ext/v8/upstream/3.1.8/src/codegen-inl.h +0 -64
  640. data/ext/v8/upstream/3.1.8/src/codegen.cc +0 -495
  641. data/ext/v8/upstream/3.1.8/src/codegen.h +0 -245
  642. data/ext/v8/upstream/3.1.8/src/compilation-cache.cc +0 -654
  643. data/ext/v8/upstream/3.1.8/src/compilation-cache.h +0 -112
  644. data/ext/v8/upstream/3.1.8/src/compiler.cc +0 -806
  645. data/ext/v8/upstream/3.1.8/src/compiler.h +0 -290
  646. data/ext/v8/upstream/3.1.8/src/contexts.cc +0 -320
  647. data/ext/v8/upstream/3.1.8/src/contexts.h +0 -376
  648. data/ext/v8/upstream/3.1.8/src/conversions.cc +0 -1069
  649. data/ext/v8/upstream/3.1.8/src/counters.cc +0 -78
  650. data/ext/v8/upstream/3.1.8/src/counters.h +0 -242
  651. data/ext/v8/upstream/3.1.8/src/cpu-profiler-inl.h +0 -100
  652. data/ext/v8/upstream/3.1.8/src/cpu-profiler.cc +0 -554
  653. data/ext/v8/upstream/3.1.8/src/cpu-profiler.h +0 -291
  654. data/ext/v8/upstream/3.1.8/src/cpu.h +0 -65
  655. data/ext/v8/upstream/3.1.8/src/d8-debug.cc +0 -367
  656. data/ext/v8/upstream/3.1.8/src/d8-debug.h +0 -157
  657. data/ext/v8/upstream/3.1.8/src/d8-posix.cc +0 -693
  658. data/ext/v8/upstream/3.1.8/src/d8.cc +0 -792
  659. data/ext/v8/upstream/3.1.8/src/d8.gyp +0 -85
  660. data/ext/v8/upstream/3.1.8/src/data-flow.h +0 -379
  661. data/ext/v8/upstream/3.1.8/src/dateparser.h +0 -263
  662. data/ext/v8/upstream/3.1.8/src/debug-agent.cc +0 -446
  663. data/ext/v8/upstream/3.1.8/src/debug-agent.h +0 -131
  664. data/ext/v8/upstream/3.1.8/src/debug.cc +0 -3085
  665. data/ext/v8/upstream/3.1.8/src/debug.h +0 -1025
  666. data/ext/v8/upstream/3.1.8/src/deoptimizer.cc +0 -1185
  667. data/ext/v8/upstream/3.1.8/src/deoptimizer.h +0 -529
  668. data/ext/v8/upstream/3.1.8/src/disasm.h +0 -77
  669. data/ext/v8/upstream/3.1.8/src/disassembler.cc +0 -338
  670. data/ext/v8/upstream/3.1.8/src/execution.cc +0 -735
  671. data/ext/v8/upstream/3.1.8/src/execution.h +0 -322
  672. data/ext/v8/upstream/3.1.8/src/extensions/experimental/experimental.gyp +0 -53
  673. data/ext/v8/upstream/3.1.8/src/extensions/experimental/i18n-extension.cc +0 -264
  674. data/ext/v8/upstream/3.1.8/src/extensions/externalize-string-extension.cc +0 -141
  675. data/ext/v8/upstream/3.1.8/src/extensions/gc-extension.cc +0 -58
  676. data/ext/v8/upstream/3.1.8/src/factory.cc +0 -1087
  677. data/ext/v8/upstream/3.1.8/src/factory.h +0 -432
  678. data/ext/v8/upstream/3.1.8/src/flag-definitions.h +0 -552
  679. data/ext/v8/upstream/3.1.8/src/frame-element.cc +0 -42
  680. data/ext/v8/upstream/3.1.8/src/frame-element.h +0 -277
  681. data/ext/v8/upstream/3.1.8/src/frames-inl.h +0 -210
  682. data/ext/v8/upstream/3.1.8/src/frames.cc +0 -1232
  683. data/ext/v8/upstream/3.1.8/src/frames.h +0 -826
  684. data/ext/v8/upstream/3.1.8/src/full-codegen.cc +0 -1382
  685. data/ext/v8/upstream/3.1.8/src/full-codegen.h +0 -751
  686. data/ext/v8/upstream/3.1.8/src/func-name-inferrer.cc +0 -90
  687. data/ext/v8/upstream/3.1.8/src/func-name-inferrer.h +0 -111
  688. data/ext/v8/upstream/3.1.8/src/gdb-jit.cc +0 -1547
  689. data/ext/v8/upstream/3.1.8/src/global-handles.cc +0 -534
  690. data/ext/v8/upstream/3.1.8/src/global-handles.h +0 -181
  691. data/ext/v8/upstream/3.1.8/src/globals.h +0 -325
  692. data/ext/v8/upstream/3.1.8/src/handles-inl.h +0 -80
  693. data/ext/v8/upstream/3.1.8/src/handles.cc +0 -910
  694. data/ext/v8/upstream/3.1.8/src/handles.h +0 -424
  695. data/ext/v8/upstream/3.1.8/src/hashmap.h +0 -121
  696. data/ext/v8/upstream/3.1.8/src/heap-inl.h +0 -587
  697. data/ext/v8/upstream/3.1.8/src/heap-profiler.cc +0 -1128
  698. data/ext/v8/upstream/3.1.8/src/heap-profiler.h +0 -381
  699. data/ext/v8/upstream/3.1.8/src/heap.cc +0 -5610
  700. data/ext/v8/upstream/3.1.8/src/heap.h +0 -2218
  701. data/ext/v8/upstream/3.1.8/src/hydrogen-instructions.cc +0 -1490
  702. data/ext/v8/upstream/3.1.8/src/hydrogen-instructions.h +0 -3493
  703. data/ext/v8/upstream/3.1.8/src/hydrogen.cc +0 -6056
  704. data/ext/v8/upstream/3.1.8/src/hydrogen.h +0 -1091
  705. data/ext/v8/upstream/3.1.8/src/ia32/assembler-ia32-inl.h +0 -429
  706. data/ext/v8/upstream/3.1.8/src/ia32/assembler-ia32.cc +0 -2800
  707. data/ext/v8/upstream/3.1.8/src/ia32/assembler-ia32.h +0 -1093
  708. data/ext/v8/upstream/3.1.8/src/ia32/builtins-ia32.cc +0 -1590
  709. data/ext/v8/upstream/3.1.8/src/ia32/code-stubs-ia32.cc +0 -6624
  710. data/ext/v8/upstream/3.1.8/src/ia32/code-stubs-ia32.h +0 -536
  711. data/ext/v8/upstream/3.1.8/src/ia32/codegen-ia32.cc +0 -10354
  712. data/ext/v8/upstream/3.1.8/src/ia32/codegen-ia32.h +0 -798
  713. data/ext/v8/upstream/3.1.8/src/ia32/cpu-ia32.cc +0 -87
  714. data/ext/v8/upstream/3.1.8/src/ia32/debug-ia32.cc +0 -309
  715. data/ext/v8/upstream/3.1.8/src/ia32/deoptimizer-ia32.cc +0 -664
  716. data/ext/v8/upstream/3.1.8/src/ia32/disasm-ia32.cc +0 -1597
  717. data/ext/v8/upstream/3.1.8/src/ia32/frames-ia32.h +0 -140
  718. data/ext/v8/upstream/3.1.8/src/ia32/full-codegen-ia32.cc +0 -4278
  719. data/ext/v8/upstream/3.1.8/src/ia32/ic-ia32.cc +0 -1786
  720. data/ext/v8/upstream/3.1.8/src/ia32/lithium-codegen-ia32.cc +0 -3880
  721. data/ext/v8/upstream/3.1.8/src/ia32/lithium-codegen-ia32.h +0 -309
  722. data/ext/v8/upstream/3.1.8/src/ia32/lithium-gap-resolver-ia32.cc +0 -460
  723. data/ext/v8/upstream/3.1.8/src/ia32/lithium-ia32.cc +0 -2095
  724. data/ext/v8/upstream/3.1.8/src/ia32/lithium-ia32.h +0 -2127
  725. data/ext/v8/upstream/3.1.8/src/ia32/macro-assembler-ia32.cc +0 -2031
  726. data/ext/v8/upstream/3.1.8/src/ia32/macro-assembler-ia32.h +0 -798
  727. data/ext/v8/upstream/3.1.8/src/ia32/regexp-macro-assembler-ia32.cc +0 -1253
  728. data/ext/v8/upstream/3.1.8/src/ia32/regexp-macro-assembler-ia32.h +0 -215
  729. data/ext/v8/upstream/3.1.8/src/ia32/register-allocator-ia32.cc +0 -157
  730. data/ext/v8/upstream/3.1.8/src/ia32/simulator-ia32.h +0 -72
  731. data/ext/v8/upstream/3.1.8/src/ia32/stub-cache-ia32.cc +0 -3732
  732. data/ext/v8/upstream/3.1.8/src/ia32/virtual-frame-ia32.cc +0 -1360
  733. data/ext/v8/upstream/3.1.8/src/ia32/virtual-frame-ia32.h +0 -646
  734. data/ext/v8/upstream/3.1.8/src/ic-inl.h +0 -129
  735. data/ext/v8/upstream/3.1.8/src/ic.cc +0 -2333
  736. data/ext/v8/upstream/3.1.8/src/ic.h +0 -639
  737. data/ext/v8/upstream/3.1.8/src/interpreter-irregexp.cc +0 -655
  738. data/ext/v8/upstream/3.1.8/src/interpreter-irregexp.h +0 -48
  739. data/ext/v8/upstream/3.1.8/src/json.js +0 -342
  740. data/ext/v8/upstream/3.1.8/src/jsregexp.cc +0 -5340
  741. data/ext/v8/upstream/3.1.8/src/jsregexp.h +0 -1484
  742. data/ext/v8/upstream/3.1.8/src/jump-target-heavy.cc +0 -430
  743. data/ext/v8/upstream/3.1.8/src/jump-target-heavy.h +0 -244
  744. data/ext/v8/upstream/3.1.8/src/jump-target-inl.h +0 -48
  745. data/ext/v8/upstream/3.1.8/src/jump-target-light.cc +0 -111
  746. data/ext/v8/upstream/3.1.8/src/lithium-allocator-inl.h +0 -140
  747. data/ext/v8/upstream/3.1.8/src/lithium-allocator.cc +0 -2093
  748. data/ext/v8/upstream/3.1.8/src/lithium-allocator.h +0 -644
  749. data/ext/v8/upstream/3.1.8/src/lithium.cc +0 -168
  750. data/ext/v8/upstream/3.1.8/src/liveedit.cc +0 -1650
  751. data/ext/v8/upstream/3.1.8/src/liveedit.h +0 -174
  752. data/ext/v8/upstream/3.1.8/src/liveobjectlist.cc +0 -2527
  753. data/ext/v8/upstream/3.1.8/src/liveobjectlist.h +0 -322
  754. data/ext/v8/upstream/3.1.8/src/log-utils.cc +0 -336
  755. data/ext/v8/upstream/3.1.8/src/log-utils.h +0 -232
  756. data/ext/v8/upstream/3.1.8/src/log.cc +0 -1608
  757. data/ext/v8/upstream/3.1.8/src/log.h +0 -379
  758. data/ext/v8/upstream/3.1.8/src/mark-compact.cc +0 -2957
  759. data/ext/v8/upstream/3.1.8/src/mark-compact.h +0 -433
  760. data/ext/v8/upstream/3.1.8/src/messages.cc +0 -164
  761. data/ext/v8/upstream/3.1.8/src/messages.js +0 -1071
  762. data/ext/v8/upstream/3.1.8/src/mips/assembler-mips-inl.h +0 -215
  763. data/ext/v8/upstream/3.1.8/src/mips/assembler-mips.cc +0 -1219
  764. data/ext/v8/upstream/3.1.8/src/mips/assembler-mips.h +0 -667
  765. data/ext/v8/upstream/3.1.8/src/mips/builtins-mips.cc +0 -205
  766. data/ext/v8/upstream/3.1.8/src/mips/codegen-mips-inl.h +0 -70
  767. data/ext/v8/upstream/3.1.8/src/mips/codegen-mips.cc +0 -1437
  768. data/ext/v8/upstream/3.1.8/src/mips/codegen-mips.h +0 -431
  769. data/ext/v8/upstream/3.1.8/src/mips/constants-mips.cc +0 -328
  770. data/ext/v8/upstream/3.1.8/src/mips/constants-mips.h +0 -525
  771. data/ext/v8/upstream/3.1.8/src/mips/cpu-mips.cc +0 -73
  772. data/ext/v8/upstream/3.1.8/src/mips/debug-mips.cc +0 -127
  773. data/ext/v8/upstream/3.1.8/src/mips/disasm-mips.cc +0 -787
  774. data/ext/v8/upstream/3.1.8/src/mips/fast-codegen-mips.cc +0 -77
  775. data/ext/v8/upstream/3.1.8/src/mips/frames-mips.cc +0 -96
  776. data/ext/v8/upstream/3.1.8/src/mips/frames-mips.h +0 -164
  777. data/ext/v8/upstream/3.1.8/src/mips/full-codegen-mips.cc +0 -277
  778. data/ext/v8/upstream/3.1.8/src/mips/ic-mips.cc +0 -208
  779. data/ext/v8/upstream/3.1.8/src/mips/jump-target-mips.cc +0 -175
  780. data/ext/v8/upstream/3.1.8/src/mips/macro-assembler-mips.cc +0 -1326
  781. data/ext/v8/upstream/3.1.8/src/mips/macro-assembler-mips.h +0 -461
  782. data/ext/v8/upstream/3.1.8/src/mips/register-allocator-mips-inl.h +0 -137
  783. data/ext/v8/upstream/3.1.8/src/mips/register-allocator-mips.h +0 -46
  784. data/ext/v8/upstream/3.1.8/src/mips/simulator-mips.cc +0 -1650
  785. data/ext/v8/upstream/3.1.8/src/mips/simulator-mips.h +0 -311
  786. data/ext/v8/upstream/3.1.8/src/mips/stub-cache-mips.cc +0 -418
  787. data/ext/v8/upstream/3.1.8/src/mips/virtual-frame-mips.cc +0 -319
  788. data/ext/v8/upstream/3.1.8/src/mips/virtual-frame-mips.h +0 -548
  789. data/ext/v8/upstream/3.1.8/src/mirror-debugger.js +0 -2380
  790. data/ext/v8/upstream/3.1.8/src/mksnapshot.cc +0 -256
  791. data/ext/v8/upstream/3.1.8/src/objects-debug.cc +0 -722
  792. data/ext/v8/upstream/3.1.8/src/objects-inl.h +0 -3946
  793. data/ext/v8/upstream/3.1.8/src/objects-printer.cc +0 -801
  794. data/ext/v8/upstream/3.1.8/src/objects-visiting.cc +0 -142
  795. data/ext/v8/upstream/3.1.8/src/objects-visiting.h +0 -401
  796. data/ext/v8/upstream/3.1.8/src/objects.cc +0 -10044
  797. data/ext/v8/upstream/3.1.8/src/objects.h +0 -6571
  798. data/ext/v8/upstream/3.1.8/src/parser.cc +0 -5165
  799. data/ext/v8/upstream/3.1.8/src/parser.h +0 -802
  800. data/ext/v8/upstream/3.1.8/src/platform-cygwin.cc +0 -745
  801. data/ext/v8/upstream/3.1.8/src/platform-freebsd.cc +0 -702
  802. data/ext/v8/upstream/3.1.8/src/platform-linux.cc +0 -981
  803. data/ext/v8/upstream/3.1.8/src/platform-macos.cc +0 -732
  804. data/ext/v8/upstream/3.1.8/src/platform-nullos.cc +0 -498
  805. data/ext/v8/upstream/3.1.8/src/platform-openbsd.cc +0 -657
  806. data/ext/v8/upstream/3.1.8/src/platform-posix.cc +0 -399
  807. data/ext/v8/upstream/3.1.8/src/platform-solaris.cc +0 -714
  808. data/ext/v8/upstream/3.1.8/src/platform-win32.cc +0 -1974
  809. data/ext/v8/upstream/3.1.8/src/platform.h +0 -636
  810. data/ext/v8/upstream/3.1.8/src/preparse-data.cc +0 -183
  811. data/ext/v8/upstream/3.1.8/src/preparser-api.cc +0 -213
  812. data/ext/v8/upstream/3.1.8/src/preparser.cc +0 -1205
  813. data/ext/v8/upstream/3.1.8/src/prettyprinter.cc +0 -1539
  814. data/ext/v8/upstream/3.1.8/src/prettyprinter.h +0 -223
  815. data/ext/v8/upstream/3.1.8/src/profile-generator.cc +0 -2899
  816. data/ext/v8/upstream/3.1.8/src/profile-generator.h +0 -1151
  817. data/ext/v8/upstream/3.1.8/src/property.cc +0 -96
  818. data/ext/v8/upstream/3.1.8/src/property.h +0 -337
  819. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler-irregexp.cc +0 -470
  820. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler.cc +0 -257
  821. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler.h +0 -231
  822. data/ext/v8/upstream/3.1.8/src/regexp-stack.cc +0 -103
  823. data/ext/v8/upstream/3.1.8/src/regexp-stack.h +0 -123
  824. data/ext/v8/upstream/3.1.8/src/regexp.js +0 -483
  825. data/ext/v8/upstream/3.1.8/src/register-allocator-inl.h +0 -141
  826. data/ext/v8/upstream/3.1.8/src/register-allocator.cc +0 -104
  827. data/ext/v8/upstream/3.1.8/src/register-allocator.h +0 -320
  828. data/ext/v8/upstream/3.1.8/src/rewriter.cc +0 -1023
  829. data/ext/v8/upstream/3.1.8/src/runtime-profiler.cc +0 -443
  830. data/ext/v8/upstream/3.1.8/src/runtime-profiler.h +0 -77
  831. data/ext/v8/upstream/3.1.8/src/runtime.cc +0 -11592
  832. data/ext/v8/upstream/3.1.8/src/runtime.h +0 -582
  833. data/ext/v8/upstream/3.1.8/src/safepoint-table.cc +0 -253
  834. data/ext/v8/upstream/3.1.8/src/safepoint-table.h +0 -263
  835. data/ext/v8/upstream/3.1.8/src/scanner-base.cc +0 -971
  836. data/ext/v8/upstream/3.1.8/src/scanner-base.h +0 -653
  837. data/ext/v8/upstream/3.1.8/src/scanner.cc +0 -586
  838. data/ext/v8/upstream/3.1.8/src/scanner.h +0 -194
  839. data/ext/v8/upstream/3.1.8/src/scopeinfo.cc +0 -636
  840. data/ext/v8/upstream/3.1.8/src/scopeinfo.h +0 -238
  841. data/ext/v8/upstream/3.1.8/src/scopes.cc +0 -1063
  842. data/ext/v8/upstream/3.1.8/src/scopes.h +0 -494
  843. data/ext/v8/upstream/3.1.8/src/serialize.cc +0 -1535
  844. data/ext/v8/upstream/3.1.8/src/serialize.h +0 -584
  845. data/ext/v8/upstream/3.1.8/src/snapshot-common.cc +0 -82
  846. data/ext/v8/upstream/3.1.8/src/snapshot.h +0 -71
  847. data/ext/v8/upstream/3.1.8/src/spaces-inl.h +0 -524
  848. data/ext/v8/upstream/3.1.8/src/spaces.cc +0 -3254
  849. data/ext/v8/upstream/3.1.8/src/spaces.h +0 -2362
  850. data/ext/v8/upstream/3.1.8/src/string-search.cc +0 -40
  851. data/ext/v8/upstream/3.1.8/src/string-search.h +0 -567
  852. data/ext/v8/upstream/3.1.8/src/string-stream.cc +0 -584
  853. data/ext/v8/upstream/3.1.8/src/string.js +0 -915
  854. data/ext/v8/upstream/3.1.8/src/stub-cache.cc +0 -1878
  855. data/ext/v8/upstream/3.1.8/src/stub-cache.h +0 -849
  856. data/ext/v8/upstream/3.1.8/src/token.cc +0 -63
  857. data/ext/v8/upstream/3.1.8/src/token.h +0 -288
  858. data/ext/v8/upstream/3.1.8/src/top.cc +0 -1152
  859. data/ext/v8/upstream/3.1.8/src/top.h +0 -608
  860. data/ext/v8/upstream/3.1.8/src/type-info.cc +0 -406
  861. data/ext/v8/upstream/3.1.8/src/type-info.h +0 -283
  862. data/ext/v8/upstream/3.1.8/src/unicode.cc +0 -1624
  863. data/ext/v8/upstream/3.1.8/src/unicode.h +0 -280
  864. data/ext/v8/upstream/3.1.8/src/utils.h +0 -793
  865. data/ext/v8/upstream/3.1.8/src/v8-counters.cc +0 -55
  866. data/ext/v8/upstream/3.1.8/src/v8-counters.h +0 -290
  867. data/ext/v8/upstream/3.1.8/src/v8.cc +0 -270
  868. data/ext/v8/upstream/3.1.8/src/v8.h +0 -127
  869. data/ext/v8/upstream/3.1.8/src/v8globals.h +0 -480
  870. data/ext/v8/upstream/3.1.8/src/v8natives.js +0 -1252
  871. data/ext/v8/upstream/3.1.8/src/v8threads.cc +0 -440
  872. data/ext/v8/upstream/3.1.8/src/v8threads.h +0 -157
  873. data/ext/v8/upstream/3.1.8/src/v8utils.h +0 -354
  874. data/ext/v8/upstream/3.1.8/src/variables.h +0 -212
  875. data/ext/v8/upstream/3.1.8/src/version.cc +0 -95
  876. data/ext/v8/upstream/3.1.8/src/version.h +0 -64
  877. data/ext/v8/upstream/3.1.8/src/virtual-frame-light-inl.h +0 -170
  878. data/ext/v8/upstream/3.1.8/src/vm-state-inl.h +0 -134
  879. data/ext/v8/upstream/3.1.8/src/vm-state.h +0 -68
  880. data/ext/v8/upstream/3.1.8/src/win32-headers.h +0 -95
  881. data/ext/v8/upstream/3.1.8/src/x64/assembler-x64-inl.h +0 -455
  882. data/ext/v8/upstream/3.1.8/src/x64/assembler-x64.cc +0 -3162
  883. data/ext/v8/upstream/3.1.8/src/x64/assembler-x64.h +0 -1584
  884. data/ext/v8/upstream/3.1.8/src/x64/builtins-x64.cc +0 -1492
  885. data/ext/v8/upstream/3.1.8/src/x64/code-stubs-x64.cc +0 -5150
  886. data/ext/v8/upstream/3.1.8/src/x64/code-stubs-x64.h +0 -519
  887. data/ext/v8/upstream/3.1.8/src/x64/codegen-x64.cc +0 -8835
  888. data/ext/v8/upstream/3.1.8/src/x64/codegen-x64.h +0 -750
  889. data/ext/v8/upstream/3.1.8/src/x64/cpu-x64.cc +0 -86
  890. data/ext/v8/upstream/3.1.8/src/x64/debug-x64.cc +0 -316
  891. data/ext/v8/upstream/3.1.8/src/x64/deoptimizer-x64.cc +0 -781
  892. data/ext/v8/upstream/3.1.8/src/x64/disasm-x64.cc +0 -1737
  893. data/ext/v8/upstream/3.1.8/src/x64/frames-x64.h +0 -130
  894. data/ext/v8/upstream/3.1.8/src/x64/full-codegen-x64.cc +0 -3984
  895. data/ext/v8/upstream/3.1.8/src/x64/ic-x64.cc +0 -1761
  896. data/ext/v8/upstream/3.1.8/src/x64/lithium-codegen-x64.cc +0 -3639
  897. data/ext/v8/upstream/3.1.8/src/x64/lithium-codegen-x64.h +0 -305
  898. data/ext/v8/upstream/3.1.8/src/x64/lithium-x64.cc +0 -2044
  899. data/ext/v8/upstream/3.1.8/src/x64/lithium-x64.h +0 -2052
  900. data/ext/v8/upstream/3.1.8/src/x64/macro-assembler-x64.cc +0 -2660
  901. data/ext/v8/upstream/3.1.8/src/x64/macro-assembler-x64.h +0 -1852
  902. data/ext/v8/upstream/3.1.8/src/x64/regexp-macro-assembler-x64.cc +0 -1382
  903. data/ext/v8/upstream/3.1.8/src/x64/regexp-macro-assembler-x64.h +0 -278
  904. data/ext/v8/upstream/3.1.8/src/x64/register-allocator-x64-inl.h +0 -87
  905. data/ext/v8/upstream/3.1.8/src/x64/register-allocator-x64.cc +0 -91
  906. data/ext/v8/upstream/3.1.8/src/x64/simulator-x64.h +0 -71
  907. data/ext/v8/upstream/3.1.8/src/x64/stub-cache-x64.cc +0 -3509
  908. data/ext/v8/upstream/3.1.8/src/x64/virtual-frame-x64.cc +0 -1292
  909. data/ext/v8/upstream/3.1.8/src/x64/virtual-frame-x64.h +0 -593
  910. data/ext/v8/upstream/3.1.8/src/zone-inl.h +0 -83
  911. data/ext/v8/upstream/3.1.8/src/zone.cc +0 -195
  912. data/ext/v8/upstream/3.1.8/src/zone.h +0 -233
  913. data/ext/v8/upstream/3.1.8/tools/gyp/v8.gyp +0 -869
  914. data/ext/v8/upstream/3.1.8/tools/linux-tick-processor +0 -33
  915. data/ext/v8/upstream/3.1.8/tools/tickprocessor.js +0 -863
  916. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_base.vcproj +0 -1296
  917. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_base_arm.vcproj +0 -1234
  918. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_base_x64.vcproj +0 -1296
  919. data/ext/v8/upstream/3.1.8/tools/visual_studio/x64.vsprops +0 -17
  920. data/spec/ext/mem_spec.rb +0 -42
@@ -1,379 +0,0 @@
1
- // Copyright 2006-2008 the V8 project authors. All rights reserved.
2
- // Redistribution and use in source and binary forms, with or without
3
- // modification, are permitted provided that the following conditions are
4
- // met:
5
- //
6
- // * Redistributions of source code must retain the above copyright
7
- // notice, this list of conditions and the following disclaimer.
8
- // * Redistributions in binary form must reproduce the above
9
- // copyright notice, this list of conditions and the following
10
- // disclaimer in the documentation and/or other materials provided
11
- // with the distribution.
12
- // * Neither the name of Google Inc. nor the names of its
13
- // contributors may be used to endorse or promote products derived
14
- // from this software without specific prior written permission.
15
- //
16
- // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
- // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
- // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
- // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
- // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
- // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
- // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
- // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
- // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
- // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
- // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
-
28
- #ifndef V8_LOG_H_
29
- #define V8_LOG_H_
30
-
31
- #include "platform.h"
32
- #include "log-utils.h"
33
-
34
- namespace v8 {
35
- namespace internal {
36
-
37
- // Logger is used for collecting logging information from V8 during
38
- // execution. The result is dumped to a file.
39
- //
40
- // Available command line flags:
41
- //
42
- // --log
43
- // Minimal logging (no API, code, or GC sample events), default is off.
44
- //
45
- // --log-all
46
- // Log all events to the file, default is off. This is the same as combining
47
- // --log-api, --log-code, --log-gc, and --log-regexp.
48
- //
49
- // --log-api
50
- // Log API events to the logfile, default is off. --log-api implies --log.
51
- //
52
- // --log-code
53
- // Log code (create, move, and delete) events to the logfile, default is off.
54
- // --log-code implies --log.
55
- //
56
- // --log-gc
57
- // Log GC heap samples after each GC that can be processed by hp2ps, default
58
- // is off. --log-gc implies --log.
59
- //
60
- // --log-regexp
61
- // Log creation and use of regular expressions, Default is off.
62
- // --log-regexp implies --log.
63
- //
64
- // --logfile <filename>
65
- // Specify the name of the logfile, default is "v8.log".
66
- //
67
- // --prof
68
- // Collect statistical profiling information (ticks), default is off. The
69
- // tick profiler requires code events, so --prof implies --log-code.
70
-
71
- // Forward declarations.
72
- class Ticker;
73
- class Profiler;
74
- class Semaphore;
75
- class SlidingStateWindow;
76
- class LogMessageBuilder;
77
-
78
- #undef LOG
79
- #ifdef ENABLE_LOGGING_AND_PROFILING
80
- #define LOG(Call) \
81
- do { \
82
- if (v8::internal::Logger::is_logging()) \
83
- v8::internal::Logger::Call; \
84
- } while (false)
85
- #else
86
- #define LOG(Call) ((void) 0)
87
- #endif
88
-
89
- #define LOG_EVENTS_AND_TAGS_LIST(V) \
90
- V(CODE_CREATION_EVENT, "code-creation") \
91
- V(CODE_MOVE_EVENT, "code-move") \
92
- V(CODE_DELETE_EVENT, "code-delete") \
93
- V(CODE_MOVING_GC, "code-moving-gc") \
94
- V(SFI_MOVE_EVENT, "sfi-move") \
95
- V(SNAPSHOT_POSITION_EVENT, "snapshot-pos") \
96
- V(TICK_EVENT, "tick") \
97
- V(REPEAT_META_EVENT, "repeat") \
98
- V(BUILTIN_TAG, "Builtin") \
99
- V(CALL_DEBUG_BREAK_TAG, "CallDebugBreak") \
100
- V(CALL_DEBUG_PREPARE_STEP_IN_TAG, "CallDebugPrepareStepIn") \
101
- V(CALL_IC_TAG, "CallIC") \
102
- V(CALL_INITIALIZE_TAG, "CallInitialize") \
103
- V(CALL_MEGAMORPHIC_TAG, "CallMegamorphic") \
104
- V(CALL_MISS_TAG, "CallMiss") \
105
- V(CALL_NORMAL_TAG, "CallNormal") \
106
- V(CALL_PRE_MONOMORPHIC_TAG, "CallPreMonomorphic") \
107
- V(KEYED_CALL_DEBUG_BREAK_TAG, "KeyedCallDebugBreak") \
108
- V(KEYED_CALL_DEBUG_PREPARE_STEP_IN_TAG, \
109
- "KeyedCallDebugPrepareStepIn") \
110
- V(KEYED_CALL_IC_TAG, "KeyedCallIC") \
111
- V(KEYED_CALL_INITIALIZE_TAG, "KeyedCallInitialize") \
112
- V(KEYED_CALL_MEGAMORPHIC_TAG, "KeyedCallMegamorphic") \
113
- V(KEYED_CALL_MISS_TAG, "KeyedCallMiss") \
114
- V(KEYED_CALL_NORMAL_TAG, "KeyedCallNormal") \
115
- V(KEYED_CALL_PRE_MONOMORPHIC_TAG, "KeyedCallPreMonomorphic") \
116
- V(CALLBACK_TAG, "Callback") \
117
- V(EVAL_TAG, "Eval") \
118
- V(FUNCTION_TAG, "Function") \
119
- V(KEYED_LOAD_IC_TAG, "KeyedLoadIC") \
120
- V(KEYED_STORE_IC_TAG, "KeyedStoreIC") \
121
- V(LAZY_COMPILE_TAG, "LazyCompile") \
122
- V(LOAD_IC_TAG, "LoadIC") \
123
- V(REG_EXP_TAG, "RegExp") \
124
- V(SCRIPT_TAG, "Script") \
125
- V(STORE_IC_TAG, "StoreIC") \
126
- V(STUB_TAG, "Stub") \
127
- V(NATIVE_FUNCTION_TAG, "Function") \
128
- V(NATIVE_LAZY_COMPILE_TAG, "LazyCompile") \
129
- V(NATIVE_SCRIPT_TAG, "Script")
130
- // Note that 'NATIVE_' cases for functions and scripts are mapped onto
131
- // original tags when writing to the log.
132
-
133
-
134
- class Logger {
135
- public:
136
- #define DECLARE_ENUM(enum_item, ignore) enum_item,
137
- enum LogEventsAndTags {
138
- LOG_EVENTS_AND_TAGS_LIST(DECLARE_ENUM)
139
- NUMBER_OF_LOG_EVENTS
140
- };
141
- #undef DECLARE_ENUM
142
-
143
- // Acquires resources for logging if the right flags are set.
144
- static bool Setup();
145
-
146
- static void EnsureTickerStarted();
147
- static void EnsureTickerStopped();
148
-
149
- // Frees resources acquired in Setup.
150
- static void TearDown();
151
-
152
- // Enable the computation of a sliding window of states.
153
- static void EnableSlidingStateWindow();
154
-
155
- // Emits an event with a string value -> (name, value).
156
- static void StringEvent(const char* name, const char* value);
157
-
158
- // Emits an event with an int value -> (name, value).
159
- static void IntEvent(const char* name, int value);
160
- static void IntPtrTEvent(const char* name, intptr_t value);
161
-
162
- // Emits an event with an handle value -> (name, location).
163
- static void HandleEvent(const char* name, Object** location);
164
-
165
- // Emits memory management events for C allocated structures.
166
- static void NewEvent(const char* name, void* object, size_t size);
167
- static void DeleteEvent(const char* name, void* object);
168
-
169
- // Emits an event with a tag, and some resource usage information.
170
- // -> (name, tag, <rusage information>).
171
- // Currently, the resource usage information is a process time stamp
172
- // and a real time timestamp.
173
- static void ResourceEvent(const char* name, const char* tag);
174
-
175
- // Emits an event that an undefined property was read from an
176
- // object.
177
- static void SuspectReadEvent(String* name, Object* obj);
178
-
179
- // Emits an event when a message is put on or read from a debugging queue.
180
- // DebugTag lets us put a call-site specific label on the event.
181
- static void DebugTag(const char* call_site_tag);
182
- static void DebugEvent(const char* event_type, Vector<uint16_t> parameter);
183
-
184
-
185
- // ==== Events logged by --log-api. ====
186
- static void ApiNamedSecurityCheck(Object* key);
187
- static void ApiIndexedSecurityCheck(uint32_t index);
188
- static void ApiNamedPropertyAccess(const char* tag,
189
- JSObject* holder,
190
- Object* name);
191
- static void ApiIndexedPropertyAccess(const char* tag,
192
- JSObject* holder,
193
- uint32_t index);
194
- static void ApiObjectAccess(const char* tag, JSObject* obj);
195
- static void ApiEntryCall(const char* name);
196
-
197
-
198
- // ==== Events logged by --log-code. ====
199
- // Emits a code event for a callback function.
200
- static void CallbackEvent(String* name, Address entry_point);
201
- static void GetterCallbackEvent(String* name, Address entry_point);
202
- static void SetterCallbackEvent(String* name, Address entry_point);
203
- // Emits a code create event.
204
- static void CodeCreateEvent(LogEventsAndTags tag,
205
- Code* code, const char* source);
206
- static void CodeCreateEvent(LogEventsAndTags tag,
207
- Code* code, String* name);
208
- static void CodeCreateEvent(LogEventsAndTags tag,
209
- Code* code,
210
- SharedFunctionInfo* shared,
211
- String* name);
212
- static void CodeCreateEvent(LogEventsAndTags tag,
213
- Code* code,
214
- SharedFunctionInfo* shared,
215
- String* source, int line);
216
- static void CodeCreateEvent(LogEventsAndTags tag, Code* code, int args_count);
217
- static void CodeMovingGCEvent();
218
- // Emits a code create event for a RegExp.
219
- static void RegExpCodeCreateEvent(Code* code, String* source);
220
- // Emits a code move event.
221
- static void CodeMoveEvent(Address from, Address to);
222
- // Emits a code delete event.
223
- static void CodeDeleteEvent(Address from);
224
-
225
- static void SFIMoveEvent(Address from, Address to);
226
-
227
- static void SnapshotPositionEvent(Address addr, int pos);
228
-
229
- // ==== Events logged by --log-gc. ====
230
- // Heap sampling events: start, end, and individual types.
231
- static void HeapSampleBeginEvent(const char* space, const char* kind);
232
- static void HeapSampleEndEvent(const char* space, const char* kind);
233
- static void HeapSampleItemEvent(const char* type, int number, int bytes);
234
- static void HeapSampleJSConstructorEvent(const char* constructor,
235
- int number, int bytes);
236
- static void HeapSampleJSRetainersEvent(const char* constructor,
237
- const char* event);
238
- static void HeapSampleJSProducerEvent(const char* constructor,
239
- Address* stack);
240
- static void HeapSampleStats(const char* space, const char* kind,
241
- intptr_t capacity, intptr_t used);
242
-
243
- static void SharedLibraryEvent(const char* library_path,
244
- uintptr_t start,
245
- uintptr_t end);
246
- static void SharedLibraryEvent(const wchar_t* library_path,
247
- uintptr_t start,
248
- uintptr_t end);
249
-
250
- // ==== Events logged by --log-regexp ====
251
- // Regexp compilation and execution events.
252
-
253
- static void RegExpCompileEvent(Handle<JSRegExp> regexp, bool in_cache);
254
-
255
- // Log an event reported from generated code
256
- static void LogRuntime(Vector<const char> format, JSArray* args);
257
-
258
- #ifdef ENABLE_LOGGING_AND_PROFILING
259
- static bool is_logging() {
260
- return logging_nesting_ > 0;
261
- }
262
-
263
- // Pause/Resume collection of profiling data.
264
- // When data collection is paused, CPU Tick events are discarded until
265
- // data collection is Resumed.
266
- static void PauseProfiler(int flags, int tag);
267
- static void ResumeProfiler(int flags, int tag);
268
- static int GetActiveProfilerModules();
269
-
270
- // If logging is performed into a memory buffer, allows to
271
- // retrieve previously written messages. See v8.h.
272
- static int GetLogLines(int from_pos, char* dest_buf, int max_size);
273
-
274
- // Logs all compiled functions found in the heap.
275
- static void LogCompiledFunctions();
276
- // Logs all accessor callbacks found in the heap.
277
- static void LogAccessorCallbacks();
278
- // Used for logging stubs found in the snapshot.
279
- static void LogCodeObjects();
280
-
281
- // Converts tag to a corresponding NATIVE_... if the script is native.
282
- INLINE(static LogEventsAndTags ToNativeByScript(LogEventsAndTags, Script*));
283
-
284
- // Profiler's sampling interval (in milliseconds).
285
- static const int kSamplingIntervalMs = 1;
286
-
287
- private:
288
-
289
- // Emits the profiler's first message.
290
- static void ProfilerBeginEvent();
291
-
292
- // Emits callback event messages.
293
- static void CallbackEventInternal(const char* prefix,
294
- const char* name,
295
- Address entry_point);
296
-
297
- // Internal configurable move event.
298
- static void MoveEventInternal(LogEventsAndTags event,
299
- Address from,
300
- Address to);
301
-
302
- // Internal configurable move event.
303
- static void DeleteEventInternal(LogEventsAndTags event,
304
- Address from);
305
-
306
- // Emits the source code of a regexp. Used by regexp events.
307
- static void LogRegExpSource(Handle<JSRegExp> regexp);
308
-
309
- // Used for logging stubs found in the snapshot.
310
- static void LogCodeObject(Object* code_object);
311
-
312
- // Emits general information about generated code.
313
- static void LogCodeInfo();
314
-
315
- // Handles code creation when low-level profiling is active.
316
- static void LowLevelCodeCreateEvent(Code* code, LogMessageBuilder* msg);
317
-
318
- // Emits a profiler tick event. Used by the profiler thread.
319
- static void TickEvent(TickSample* sample, bool overflow);
320
-
321
- static void ApiEvent(const char* name, ...);
322
-
323
- // Logs a StringEvent regardless of whether FLAG_log is true.
324
- static void UncheckedStringEvent(const char* name, const char* value);
325
-
326
- // Logs an IntEvent regardless of whether FLAG_log is true.
327
- static void UncheckedIntEvent(const char* name, int value);
328
- static void UncheckedIntPtrTEvent(const char* name, intptr_t value);
329
-
330
- // Stops logging and profiling in case of insufficient resources.
331
- static void StopLoggingAndProfiling();
332
-
333
- // Returns whether profiler's sampler is active.
334
- static bool IsProfilerSamplerActive();
335
-
336
- // The sampler used by the profiler and the sliding state window.
337
- static Ticker* ticker_;
338
-
339
- // When the statistical profile is active, profiler_
340
- // points to a Profiler, that handles collection
341
- // of samples.
342
- static Profiler* profiler_;
343
-
344
- // SlidingStateWindow instance keeping a sliding window of the most
345
- // recent VM states.
346
- static SlidingStateWindow* sliding_state_window_;
347
-
348
- // Internal implementation classes with access to
349
- // private members.
350
- friend class EventLog;
351
- friend class TimeLog;
352
- friend class Profiler;
353
- friend class SlidingStateWindow;
354
- friend class StackTracer;
355
- friend class VMState;
356
-
357
- friend class LoggerTestHelper;
358
-
359
- static int logging_nesting_;
360
- static int cpu_profiler_nesting_;
361
- static int heap_profiler_nesting_;
362
-
363
- friend class CpuProfiler;
364
- #else
365
- static bool is_logging() { return false; }
366
- #endif
367
- };
368
-
369
-
370
- // Class that extracts stack trace, used for profiling.
371
- class StackTracer : public AllStatic {
372
- public:
373
- static void Trace(TickSample* sample);
374
- };
375
-
376
- } } // namespace v8::internal
377
-
378
-
379
- #endif // V8_LOG_H_
@@ -1,2957 +0,0 @@
1
- // Copyright 2006-2008 the V8 project authors. All rights reserved.
2
- // Redistribution and use in source and binary forms, with or without
3
- // modification, are permitted provided that the following conditions are
4
- // met:
5
- //
6
- // * Redistributions of source code must retain the above copyright
7
- // notice, this list of conditions and the following disclaimer.
8
- // * Redistributions in binary form must reproduce the above
9
- // copyright notice, this list of conditions and the following
10
- // disclaimer in the documentation and/or other materials provided
11
- // with the distribution.
12
- // * Neither the name of Google Inc. nor the names of its
13
- // contributors may be used to endorse or promote products derived
14
- // from this software without specific prior written permission.
15
- //
16
- // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
- // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
- // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
- // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
- // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
- // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
- // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
- // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
- // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
- // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
- // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
-
28
- #include "v8.h"
29
-
30
- #include "compilation-cache.h"
31
- #include "execution.h"
32
- #include "heap-profiler.h"
33
- #include "gdb-jit.h"
34
- #include "global-handles.h"
35
- #include "ic-inl.h"
36
- #include "liveobjectlist-inl.h"
37
- #include "mark-compact.h"
38
- #include "objects-visiting.h"
39
- #include "stub-cache.h"
40
-
41
- namespace v8 {
42
- namespace internal {
43
-
44
- // -------------------------------------------------------------------------
45
- // MarkCompactCollector
46
-
47
- bool MarkCompactCollector::force_compaction_ = false;
48
- bool MarkCompactCollector::compacting_collection_ = false;
49
- bool MarkCompactCollector::compact_on_next_gc_ = false;
50
-
51
- int MarkCompactCollector::previous_marked_count_ = 0;
52
- GCTracer* MarkCompactCollector::tracer_ = NULL;
53
-
54
-
55
- #ifdef DEBUG
56
- MarkCompactCollector::CollectorState MarkCompactCollector::state_ = IDLE;
57
-
58
- // Counters used for debugging the marking phase of mark-compact or mark-sweep
59
- // collection.
60
- int MarkCompactCollector::live_bytes_ = 0;
61
- int MarkCompactCollector::live_young_objects_size_ = 0;
62
- int MarkCompactCollector::live_old_data_objects_size_ = 0;
63
- int MarkCompactCollector::live_old_pointer_objects_size_ = 0;
64
- int MarkCompactCollector::live_code_objects_size_ = 0;
65
- int MarkCompactCollector::live_map_objects_size_ = 0;
66
- int MarkCompactCollector::live_cell_objects_size_ = 0;
67
- int MarkCompactCollector::live_lo_objects_size_ = 0;
68
- #endif
69
-
70
-
71
- void MarkCompactCollector::CollectGarbage() {
72
- // Make sure that Prepare() has been called. The individual steps below will
73
- // update the state as they proceed.
74
- ASSERT(state_ == PREPARE_GC);
75
-
76
- // Prepare has selected whether to compact the old generation or not.
77
- // Tell the tracer.
78
- if (IsCompacting()) tracer_->set_is_compacting();
79
-
80
- MarkLiveObjects();
81
-
82
- if (FLAG_collect_maps) ClearNonLiveTransitions();
83
-
84
- SweepLargeObjectSpace();
85
-
86
- if (IsCompacting()) {
87
- GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_COMPACT);
88
- EncodeForwardingAddresses();
89
-
90
- Heap::MarkMapPointersAsEncoded(true);
91
- UpdatePointers();
92
- Heap::MarkMapPointersAsEncoded(false);
93
- PcToCodeCache::FlushPcToCodeCache();
94
-
95
- RelocateObjects();
96
- } else {
97
- SweepSpaces();
98
- PcToCodeCache::FlushPcToCodeCache();
99
- }
100
-
101
- Finish();
102
-
103
- // Save the count of marked objects remaining after the collection and
104
- // null out the GC tracer.
105
- previous_marked_count_ = tracer_->marked_count();
106
- ASSERT(previous_marked_count_ == 0);
107
- tracer_ = NULL;
108
- }
109
-
110
-
111
- void MarkCompactCollector::Prepare(GCTracer* tracer) {
112
- // Rather than passing the tracer around we stash it in a static member
113
- // variable.
114
- tracer_ = tracer;
115
-
116
- #ifdef DEBUG
117
- ASSERT(state_ == IDLE);
118
- state_ = PREPARE_GC;
119
- #endif
120
- ASSERT(!FLAG_always_compact || !FLAG_never_compact);
121
-
122
- compacting_collection_ =
123
- FLAG_always_compact || force_compaction_ || compact_on_next_gc_;
124
- compact_on_next_gc_ = false;
125
-
126
- if (FLAG_never_compact) compacting_collection_ = false;
127
- if (!Heap::map_space()->MapPointersEncodable())
128
- compacting_collection_ = false;
129
- if (FLAG_collect_maps) CreateBackPointers();
130
- #ifdef ENABLE_GDB_JIT_INTERFACE
131
- if (FLAG_gdbjit) {
132
- // If GDBJIT interface is active disable compaction.
133
- compacting_collection_ = false;
134
- }
135
- #endif
136
-
137
- PagedSpaces spaces;
138
- for (PagedSpace* space = spaces.next();
139
- space != NULL; space = spaces.next()) {
140
- space->PrepareForMarkCompact(compacting_collection_);
141
- }
142
-
143
- #ifdef DEBUG
144
- live_bytes_ = 0;
145
- live_young_objects_size_ = 0;
146
- live_old_pointer_objects_size_ = 0;
147
- live_old_data_objects_size_ = 0;
148
- live_code_objects_size_ = 0;
149
- live_map_objects_size_ = 0;
150
- live_cell_objects_size_ = 0;
151
- live_lo_objects_size_ = 0;
152
- #endif
153
- }
154
-
155
-
156
- void MarkCompactCollector::Finish() {
157
- #ifdef DEBUG
158
- ASSERT(state_ == SWEEP_SPACES || state_ == RELOCATE_OBJECTS);
159
- state_ = IDLE;
160
- #endif
161
- // The stub cache is not traversed during GC; clear the cache to
162
- // force lazy re-initialization of it. This must be done after the
163
- // GC, because it relies on the new address of certain old space
164
- // objects (empty string, illegal builtin).
165
- StubCache::Clear();
166
-
167
- ExternalStringTable::CleanUp();
168
-
169
- // If we've just compacted old space there's no reason to check the
170
- // fragmentation limit. Just return.
171
- if (HasCompacted()) return;
172
-
173
- // We compact the old generation on the next GC if it has gotten too
174
- // fragmented (ie, we could recover an expected amount of space by
175
- // reclaiming the waste and free list blocks).
176
- static const int kFragmentationLimit = 15; // Percent.
177
- static const int kFragmentationAllowed = 1 * MB; // Absolute.
178
- intptr_t old_gen_recoverable = 0;
179
- intptr_t old_gen_used = 0;
180
-
181
- OldSpaces spaces;
182
- for (OldSpace* space = spaces.next(); space != NULL; space = spaces.next()) {
183
- old_gen_recoverable += space->Waste() + space->AvailableFree();
184
- old_gen_used += space->Size();
185
- }
186
-
187
- int old_gen_fragmentation =
188
- static_cast<int>((old_gen_recoverable * 100.0) / old_gen_used);
189
- if (old_gen_fragmentation > kFragmentationLimit &&
190
- old_gen_recoverable > kFragmentationAllowed) {
191
- compact_on_next_gc_ = true;
192
- }
193
- }
194
-
195
-
196
- // -------------------------------------------------------------------------
197
- // Phase 1: tracing and marking live objects.
198
- // before: all objects are in normal state.
199
- // after: a live object's map pointer is marked as '00'.
200
-
201
- // Marking all live objects in the heap as part of mark-sweep or mark-compact
202
- // collection. Before marking, all objects are in their normal state. After
203
- // marking, live objects' map pointers are marked indicating that the object
204
- // has been found reachable.
205
- //
206
- // The marking algorithm is a (mostly) depth-first (because of possible stack
207
- // overflow) traversal of the graph of objects reachable from the roots. It
208
- // uses an explicit stack of pointers rather than recursion. The young
209
- // generation's inactive ('from') space is used as a marking stack. The
210
- // objects in the marking stack are the ones that have been reached and marked
211
- // but their children have not yet been visited.
212
- //
213
- // The marking stack can overflow during traversal. In that case, we set an
214
- // overflow flag. When the overflow flag is set, we continue marking objects
215
- // reachable from the objects on the marking stack, but no longer push them on
216
- // the marking stack. Instead, we mark them as both marked and overflowed.
217
- // When the stack is in the overflowed state, objects marked as overflowed
218
- // have been reached and marked but their children have not been visited yet.
219
- // After emptying the marking stack, we clear the overflow flag and traverse
220
- // the heap looking for objects marked as overflowed, push them on the stack,
221
- // and continue with marking. This process repeats until all reachable
222
- // objects have been marked.
223
-
224
- static MarkingStack marking_stack;
225
-
226
- class FlushCode : public AllStatic {
227
- public:
228
- static void AddCandidate(SharedFunctionInfo* shared_info) {
229
- SetNextCandidate(shared_info, shared_function_info_candidates_head_);
230
- shared_function_info_candidates_head_ = shared_info;
231
- }
232
-
233
-
234
- static void AddCandidate(JSFunction* function) {
235
- ASSERT(function->unchecked_code() ==
236
- function->unchecked_shared()->unchecked_code());
237
-
238
- SetNextCandidate(function, jsfunction_candidates_head_);
239
- jsfunction_candidates_head_ = function;
240
- }
241
-
242
-
243
- static void ProcessCandidates() {
244
- ProcessSharedFunctionInfoCandidates();
245
- ProcessJSFunctionCandidates();
246
- }
247
-
248
- private:
249
- static void ProcessJSFunctionCandidates() {
250
- Code* lazy_compile = Builtins::builtin(Builtins::LazyCompile);
251
-
252
- JSFunction* candidate = jsfunction_candidates_head_;
253
- JSFunction* next_candidate;
254
- while (candidate != NULL) {
255
- next_candidate = GetNextCandidate(candidate);
256
-
257
- SharedFunctionInfo* shared = candidate->unchecked_shared();
258
-
259
- Code* code = shared->unchecked_code();
260
- if (!code->IsMarked()) {
261
- shared->set_code(lazy_compile);
262
- candidate->set_code(lazy_compile);
263
- } else {
264
- candidate->set_code(shared->unchecked_code());
265
- }
266
-
267
- candidate = next_candidate;
268
- }
269
-
270
- jsfunction_candidates_head_ = NULL;
271
- }
272
-
273
-
274
- static void ProcessSharedFunctionInfoCandidates() {
275
- Code* lazy_compile = Builtins::builtin(Builtins::LazyCompile);
276
-
277
- SharedFunctionInfo* candidate = shared_function_info_candidates_head_;
278
- SharedFunctionInfo* next_candidate;
279
- while (candidate != NULL) {
280
- next_candidate = GetNextCandidate(candidate);
281
- SetNextCandidate(candidate, NULL);
282
-
283
- Code* code = candidate->unchecked_code();
284
- if (!code->IsMarked()) {
285
- candidate->set_code(lazy_compile);
286
- }
287
-
288
- candidate = next_candidate;
289
- }
290
-
291
- shared_function_info_candidates_head_ = NULL;
292
- }
293
-
294
-
295
- static JSFunction** GetNextCandidateField(JSFunction* candidate) {
296
- return reinterpret_cast<JSFunction**>(
297
- candidate->address() + JSFunction::kCodeEntryOffset);
298
- }
299
-
300
-
301
- static JSFunction* GetNextCandidate(JSFunction* candidate) {
302
- return *GetNextCandidateField(candidate);
303
- }
304
-
305
-
306
- static void SetNextCandidate(JSFunction* candidate,
307
- JSFunction* next_candidate) {
308
- *GetNextCandidateField(candidate) = next_candidate;
309
- }
310
-
311
-
312
- STATIC_ASSERT(kPointerSize <= Code::kHeaderSize - Code::kHeaderPaddingStart);
313
-
314
-
315
- static SharedFunctionInfo** GetNextCandidateField(
316
- SharedFunctionInfo* candidate) {
317
- Code* code = candidate->unchecked_code();
318
- return reinterpret_cast<SharedFunctionInfo**>(
319
- code->address() + Code::kHeaderPaddingStart);
320
- }
321
-
322
-
323
- static SharedFunctionInfo* GetNextCandidate(SharedFunctionInfo* candidate) {
324
- return *GetNextCandidateField(candidate);
325
- }
326
-
327
-
328
- static void SetNextCandidate(SharedFunctionInfo* candidate,
329
- SharedFunctionInfo* next_candidate) {
330
- *GetNextCandidateField(candidate) = next_candidate;
331
- }
332
-
333
- static JSFunction* jsfunction_candidates_head_;
334
-
335
- static SharedFunctionInfo* shared_function_info_candidates_head_;
336
- };
337
-
338
- JSFunction* FlushCode::jsfunction_candidates_head_ = NULL;
339
-
340
- SharedFunctionInfo* FlushCode::shared_function_info_candidates_head_ = NULL;
341
-
342
- static inline HeapObject* ShortCircuitConsString(Object** p) {
343
- // Optimization: If the heap object pointed to by p is a non-symbol
344
- // cons string whose right substring is Heap::empty_string, update
345
- // it in place to its left substring. Return the updated value.
346
- //
347
- // Here we assume that if we change *p, we replace it with a heap object
348
- // (ie, the left substring of a cons string is always a heap object).
349
- //
350
- // The check performed is:
351
- // object->IsConsString() && !object->IsSymbol() &&
352
- // (ConsString::cast(object)->second() == Heap::empty_string())
353
- // except the maps for the object and its possible substrings might be
354
- // marked.
355
- HeapObject* object = HeapObject::cast(*p);
356
- MapWord map_word = object->map_word();
357
- map_word.ClearMark();
358
- InstanceType type = map_word.ToMap()->instance_type();
359
- if ((type & kShortcutTypeMask) != kShortcutTypeTag) return object;
360
-
361
- Object* second = reinterpret_cast<ConsString*>(object)->unchecked_second();
362
- if (second != Heap::raw_unchecked_empty_string()) {
363
- return object;
364
- }
365
-
366
- // Since we don't have the object's start, it is impossible to update the
367
- // page dirty marks. Therefore, we only replace the string with its left
368
- // substring when page dirty marks do not change.
369
- Object* first = reinterpret_cast<ConsString*>(object)->unchecked_first();
370
- if (!Heap::InNewSpace(object) && Heap::InNewSpace(first)) return object;
371
-
372
- *p = first;
373
- return HeapObject::cast(first);
374
- }
375
-
376
-
377
- class StaticMarkingVisitor : public StaticVisitorBase {
378
- public:
379
- static inline void IterateBody(Map* map, HeapObject* obj) {
380
- table_.GetVisitor(map)(map, obj);
381
- }
382
-
383
- static void EnableCodeFlushing(bool enabled) {
384
- if (enabled) {
385
- table_.Register(kVisitJSFunction, &VisitJSFunctionAndFlushCode);
386
- table_.Register(kVisitSharedFunctionInfo,
387
- &VisitSharedFunctionInfoAndFlushCode);
388
-
389
- } else {
390
- table_.Register(kVisitJSFunction, &VisitJSFunction);
391
- table_.Register(kVisitSharedFunctionInfo,
392
- &VisitSharedFunctionInfoGeneric);
393
- }
394
- }
395
-
396
- static void Initialize() {
397
- table_.Register(kVisitShortcutCandidate,
398
- &FixedBodyVisitor<StaticMarkingVisitor,
399
- ConsString::BodyDescriptor,
400
- void>::Visit);
401
-
402
- table_.Register(kVisitConsString,
403
- &FixedBodyVisitor<StaticMarkingVisitor,
404
- ConsString::BodyDescriptor,
405
- void>::Visit);
406
-
407
-
408
- table_.Register(kVisitFixedArray,
409
- &FlexibleBodyVisitor<StaticMarkingVisitor,
410
- FixedArray::BodyDescriptor,
411
- void>::Visit);
412
-
413
- table_.Register(kVisitGlobalContext,
414
- &FixedBodyVisitor<StaticMarkingVisitor,
415
- Context::MarkCompactBodyDescriptor,
416
- void>::Visit);
417
-
418
- table_.Register(kVisitByteArray, &DataObjectVisitor::Visit);
419
- table_.Register(kVisitSeqAsciiString, &DataObjectVisitor::Visit);
420
- table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit);
421
-
422
- table_.Register(kVisitOddball,
423
- &FixedBodyVisitor<StaticMarkingVisitor,
424
- Oddball::BodyDescriptor,
425
- void>::Visit);
426
- table_.Register(kVisitMap,
427
- &FixedBodyVisitor<StaticMarkingVisitor,
428
- Map::BodyDescriptor,
429
- void>::Visit);
430
-
431
- table_.Register(kVisitCode, &VisitCode);
432
-
433
- table_.Register(kVisitSharedFunctionInfo,
434
- &VisitSharedFunctionInfoAndFlushCode);
435
-
436
- table_.Register(kVisitJSFunction,
437
- &VisitJSFunctionAndFlushCode);
438
-
439
- table_.Register(kVisitPropertyCell,
440
- &FixedBodyVisitor<StaticMarkingVisitor,
441
- JSGlobalPropertyCell::BodyDescriptor,
442
- void>::Visit);
443
-
444
- table_.RegisterSpecializations<DataObjectVisitor,
445
- kVisitDataObject,
446
- kVisitDataObjectGeneric>();
447
-
448
- table_.RegisterSpecializations<JSObjectVisitor,
449
- kVisitJSObject,
450
- kVisitJSObjectGeneric>();
451
-
452
- table_.RegisterSpecializations<StructObjectVisitor,
453
- kVisitStruct,
454
- kVisitStructGeneric>();
455
- }
456
-
457
- INLINE(static void VisitPointer(Object** p)) {
458
- MarkObjectByPointer(p);
459
- }
460
-
461
- INLINE(static void VisitPointers(Object** start, Object** end)) {
462
- // Mark all objects pointed to in [start, end).
463
- const int kMinRangeForMarkingRecursion = 64;
464
- if (end - start >= kMinRangeForMarkingRecursion) {
465
- if (VisitUnmarkedObjects(start, end)) return;
466
- // We are close to a stack overflow, so just mark the objects.
467
- }
468
- for (Object** p = start; p < end; p++) MarkObjectByPointer(p);
469
- }
470
-
471
- static inline void VisitCodeTarget(RelocInfo* rinfo) {
472
- ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
473
- Code* code = Code::GetCodeFromTargetAddress(rinfo->target_address());
474
- if (FLAG_cleanup_ics_at_gc && code->is_inline_cache_stub()) {
475
- IC::Clear(rinfo->pc());
476
- // Please note targets for cleared inline cached do not have to be
477
- // marked since they are contained in Heap::non_monomorphic_cache().
478
- } else {
479
- MarkCompactCollector::MarkObject(code);
480
- }
481
- }
482
-
483
- static void VisitGlobalPropertyCell(RelocInfo* rinfo) {
484
- ASSERT(rinfo->rmode() == RelocInfo::GLOBAL_PROPERTY_CELL);
485
- Object* cell = rinfo->target_cell();
486
- Object* old_cell = cell;
487
- VisitPointer(&cell);
488
- if (cell != old_cell) {
489
- rinfo->set_target_cell(reinterpret_cast<JSGlobalPropertyCell*>(cell));
490
- }
491
- }
492
-
493
- static inline void VisitDebugTarget(RelocInfo* rinfo) {
494
- ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) &&
495
- rinfo->IsPatchedReturnSequence()) ||
496
- (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
497
- rinfo->IsPatchedDebugBreakSlotSequence()));
498
- HeapObject* code = Code::GetCodeFromTargetAddress(rinfo->call_address());
499
- MarkCompactCollector::MarkObject(code);
500
- }
501
-
502
- // Mark object pointed to by p.
503
- INLINE(static void MarkObjectByPointer(Object** p)) {
504
- if (!(*p)->IsHeapObject()) return;
505
- HeapObject* object = ShortCircuitConsString(p);
506
- MarkCompactCollector::MarkObject(object);
507
- }
508
-
509
- // Visit an unmarked object.
510
- static inline void VisitUnmarkedObject(HeapObject* obj) {
511
- #ifdef DEBUG
512
- ASSERT(Heap::Contains(obj));
513
- ASSERT(!obj->IsMarked());
514
- #endif
515
- Map* map = obj->map();
516
- MarkCompactCollector::SetMark(obj);
517
- // Mark the map pointer and the body.
518
- MarkCompactCollector::MarkObject(map);
519
- IterateBody(map, obj);
520
- }
521
-
522
- // Visit all unmarked objects pointed to by [start, end).
523
- // Returns false if the operation fails (lack of stack space).
524
- static inline bool VisitUnmarkedObjects(Object** start, Object** end) {
525
- // Return false is we are close to the stack limit.
526
- StackLimitCheck check;
527
- if (check.HasOverflowed()) return false;
528
-
529
- // Visit the unmarked objects.
530
- for (Object** p = start; p < end; p++) {
531
- if (!(*p)->IsHeapObject()) continue;
532
- HeapObject* obj = HeapObject::cast(*p);
533
- if (obj->IsMarked()) continue;
534
- VisitUnmarkedObject(obj);
535
- }
536
- return true;
537
- }
538
-
539
- static inline void VisitExternalReference(Address* p) { }
540
- static inline void VisitRuntimeEntry(RelocInfo* rinfo) { }
541
-
542
- private:
543
- class DataObjectVisitor {
544
- public:
545
- template<int size>
546
- static void VisitSpecialized(Map* map, HeapObject* object) {
547
- }
548
-
549
- static void Visit(Map* map, HeapObject* object) {
550
- }
551
- };
552
-
553
- typedef FlexibleBodyVisitor<StaticMarkingVisitor,
554
- JSObject::BodyDescriptor,
555
- void> JSObjectVisitor;
556
-
557
- typedef FlexibleBodyVisitor<StaticMarkingVisitor,
558
- StructBodyDescriptor,
559
- void> StructObjectVisitor;
560
-
561
- static void VisitCode(Map* map, HeapObject* object) {
562
- reinterpret_cast<Code*>(object)->CodeIterateBody<StaticMarkingVisitor>();
563
- }
564
-
565
- // Code flushing support.
566
-
567
- // How many collections newly compiled code object will survive before being
568
- // flushed.
569
- static const int kCodeAgeThreshold = 5;
570
-
571
- inline static bool HasSourceCode(SharedFunctionInfo* info) {
572
- Object* undefined = Heap::raw_unchecked_undefined_value();
573
- return (info->script() != undefined) &&
574
- (reinterpret_cast<Script*>(info->script())->source() != undefined);
575
- }
576
-
577
-
578
- inline static bool IsCompiled(JSFunction* function) {
579
- return
580
- function->unchecked_code() != Builtins::builtin(Builtins::LazyCompile);
581
- }
582
-
583
-
584
- inline static bool IsCompiled(SharedFunctionInfo* function) {
585
- return
586
- function->unchecked_code() != Builtins::builtin(Builtins::LazyCompile);
587
- }
588
-
589
- inline static bool IsFlushable(JSFunction* function) {
590
- SharedFunctionInfo* shared_info = function->unchecked_shared();
591
-
592
- // Code is either on stack, in compilation cache or referenced
593
- // by optimized version of function.
594
- if (function->unchecked_code()->IsMarked()) {
595
- shared_info->set_code_age(0);
596
- return false;
597
- }
598
-
599
- // We do not flush code for optimized functions.
600
- if (function->code() != shared_info->unchecked_code()) {
601
- return false;
602
- }
603
-
604
- return IsFlushable(shared_info);
605
- }
606
-
607
- inline static bool IsFlushable(SharedFunctionInfo* shared_info) {
608
- // Code is either on stack, in compilation cache or referenced
609
- // by optimized version of function.
610
- if (shared_info->unchecked_code()->IsMarked()) {
611
- shared_info->set_code_age(0);
612
- return false;
613
- }
614
-
615
- // The function must be compiled and have the source code available,
616
- // to be able to recompile it in case we need the function again.
617
- if (!(shared_info->is_compiled() && HasSourceCode(shared_info))) {
618
- return false;
619
- }
620
-
621
- // We never flush code for Api functions.
622
- Object* function_data = shared_info->function_data();
623
- if (function_data->IsHeapObject() &&
624
- (SafeMap(function_data)->instance_type() ==
625
- FUNCTION_TEMPLATE_INFO_TYPE)) {
626
- return false;
627
- }
628
-
629
- // Only flush code for functions.
630
- if (shared_info->code()->kind() != Code::FUNCTION) return false;
631
-
632
- // Function must be lazy compilable.
633
- if (!shared_info->allows_lazy_compilation()) return false;
634
-
635
- // If this is a full script wrapped in a function we do no flush the code.
636
- if (shared_info->is_toplevel()) return false;
637
-
638
- // Age this shared function info.
639
- if (shared_info->code_age() < kCodeAgeThreshold) {
640
- shared_info->set_code_age(shared_info->code_age() + 1);
641
- return false;
642
- }
643
-
644
- return true;
645
- }
646
-
647
-
648
- static bool FlushCodeForFunction(JSFunction* function) {
649
- if (!IsFlushable(function)) return false;
650
-
651
- // This function's code looks flushable. But we have to postpone the
652
- // decision until we see all functions that point to the same
653
- // SharedFunctionInfo because some of them might be optimized.
654
- // That would make the nonoptimized version of the code nonflushable,
655
- // because it is required for bailing out from optimized code.
656
- FlushCode::AddCandidate(function);
657
- return true;
658
- }
659
-
660
-
661
- static inline Map* SafeMap(Object* obj) {
662
- MapWord map_word = HeapObject::cast(obj)->map_word();
663
- map_word.ClearMark();
664
- map_word.ClearOverflow();
665
- return map_word.ToMap();
666
- }
667
-
668
-
669
- static inline bool IsJSBuiltinsObject(Object* obj) {
670
- return obj->IsHeapObject() &&
671
- (SafeMap(obj)->instance_type() == JS_BUILTINS_OBJECT_TYPE);
672
- }
673
-
674
-
675
- static inline bool IsValidNotBuiltinContext(Object* ctx) {
676
- if (!ctx->IsHeapObject()) return false;
677
-
678
- Map* map = SafeMap(ctx);
679
- if (!(map == Heap::raw_unchecked_context_map() ||
680
- map == Heap::raw_unchecked_catch_context_map() ||
681
- map == Heap::raw_unchecked_global_context_map())) {
682
- return false;
683
- }
684
-
685
- Context* context = reinterpret_cast<Context*>(ctx);
686
-
687
- if (IsJSBuiltinsObject(context->global())) {
688
- return false;
689
- }
690
-
691
- return true;
692
- }
693
-
694
-
695
- static void VisitSharedFunctionInfoGeneric(Map* map, HeapObject* object) {
696
- SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(object);
697
-
698
- if (shared->IsInobjectSlackTrackingInProgress()) shared->DetachInitialMap();
699
-
700
- FixedBodyVisitor<StaticMarkingVisitor,
701
- SharedFunctionInfo::BodyDescriptor,
702
- void>::Visit(map, object);
703
- }
704
-
705
-
706
- static void VisitSharedFunctionInfoAndFlushCode(Map* map,
707
- HeapObject* object) {
708
- VisitSharedFunctionInfoAndFlushCodeGeneric(map, object, false);
709
- }
710
-
711
-
712
- static void VisitSharedFunctionInfoAndFlushCodeGeneric(
713
- Map* map, HeapObject* object, bool known_flush_code_candidate) {
714
- SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(object);
715
-
716
- if (shared->IsInobjectSlackTrackingInProgress()) shared->DetachInitialMap();
717
-
718
- if (!known_flush_code_candidate) {
719
- known_flush_code_candidate = IsFlushable(shared);
720
- if (known_flush_code_candidate) FlushCode::AddCandidate(shared);
721
- }
722
-
723
- VisitSharedFunctionInfoFields(object, known_flush_code_candidate);
724
- }
725
-
726
-
727
- static void VisitCodeEntry(Address entry_address) {
728
- Object* code = Code::GetObjectFromEntryAddress(entry_address);
729
- Object* old_code = code;
730
- VisitPointer(&code);
731
- if (code != old_code) {
732
- Memory::Address_at(entry_address) =
733
- reinterpret_cast<Code*>(code)->entry();
734
- }
735
- }
736
-
737
-
738
- static void VisitJSFunctionAndFlushCode(Map* map, HeapObject* object) {
739
- JSFunction* jsfunction = reinterpret_cast<JSFunction*>(object);
740
- // The function must have a valid context and not be a builtin.
741
- bool flush_code_candidate = false;
742
- if (IsValidNotBuiltinContext(jsfunction->unchecked_context())) {
743
- flush_code_candidate = FlushCodeForFunction(jsfunction);
744
- }
745
-
746
- if (!flush_code_candidate) {
747
- MarkCompactCollector::MarkObject(
748
- jsfunction->unchecked_shared()->unchecked_code());
749
-
750
- if (jsfunction->unchecked_code()->kind() == Code::OPTIMIZED_FUNCTION) {
751
- // For optimized functions we should retain both non-optimized version
752
- // of it's code and non-optimized version of all inlined functions.
753
- // This is required to support bailing out from inlined code.
754
- DeoptimizationInputData* data =
755
- reinterpret_cast<DeoptimizationInputData*>(
756
- jsfunction->unchecked_code()->unchecked_deoptimization_data());
757
-
758
- FixedArray* literals = data->UncheckedLiteralArray();
759
-
760
- for (int i = 0, count = data->InlinedFunctionCount()->value();
761
- i < count;
762
- i++) {
763
- JSFunction* inlined = reinterpret_cast<JSFunction*>(literals->get(i));
764
- MarkCompactCollector::MarkObject(
765
- inlined->unchecked_shared()->unchecked_code());
766
- }
767
- }
768
- }
769
-
770
- VisitJSFunctionFields(map,
771
- reinterpret_cast<JSFunction*>(object),
772
- flush_code_candidate);
773
- }
774
-
775
-
776
- static void VisitJSFunction(Map* map, HeapObject* object) {
777
- VisitJSFunctionFields(map,
778
- reinterpret_cast<JSFunction*>(object),
779
- false);
780
- }
781
-
782
-
783
- #define SLOT_ADDR(obj, offset) \
784
- reinterpret_cast<Object**>((obj)->address() + offset)
785
-
786
-
787
- static inline void VisitJSFunctionFields(Map* map,
788
- JSFunction* object,
789
- bool flush_code_candidate) {
790
- VisitPointers(SLOT_ADDR(object, JSFunction::kPropertiesOffset),
791
- SLOT_ADDR(object, JSFunction::kCodeEntryOffset));
792
-
793
- if (!flush_code_candidate) {
794
- VisitCodeEntry(object->address() + JSFunction::kCodeEntryOffset);
795
- } else {
796
- // Don't visit code object.
797
-
798
- // Visit shared function info to avoid double checking of it's
799
- // flushability.
800
- SharedFunctionInfo* shared_info = object->unchecked_shared();
801
- if (!shared_info->IsMarked()) {
802
- Map* shared_info_map = shared_info->map();
803
- MarkCompactCollector::SetMark(shared_info);
804
- MarkCompactCollector::MarkObject(shared_info_map);
805
- VisitSharedFunctionInfoAndFlushCodeGeneric(shared_info_map,
806
- shared_info,
807
- true);
808
- }
809
- }
810
-
811
- VisitPointers(SLOT_ADDR(object,
812
- JSFunction::kCodeEntryOffset + kPointerSize),
813
- SLOT_ADDR(object, JSFunction::kNonWeakFieldsEndOffset));
814
-
815
- // Don't visit the next function list field as it is a weak reference.
816
- }
817
-
818
-
819
- static void VisitSharedFunctionInfoFields(HeapObject* object,
820
- bool flush_code_candidate) {
821
- VisitPointer(SLOT_ADDR(object, SharedFunctionInfo::kNameOffset));
822
-
823
- if (!flush_code_candidate) {
824
- VisitPointer(SLOT_ADDR(object, SharedFunctionInfo::kCodeOffset));
825
- }
826
-
827
- VisitPointers(SLOT_ADDR(object, SharedFunctionInfo::kScopeInfoOffset),
828
- SLOT_ADDR(object, SharedFunctionInfo::kSize));
829
- }
830
-
831
- #undef SLOT_ADDR
832
-
833
- typedef void (*Callback)(Map* map, HeapObject* object);
834
-
835
- static VisitorDispatchTable<Callback> table_;
836
- };
837
-
838
-
839
- VisitorDispatchTable<StaticMarkingVisitor::Callback>
840
- StaticMarkingVisitor::table_;
841
-
842
-
843
- class MarkingVisitor : public ObjectVisitor {
844
- public:
845
- void VisitPointer(Object** p) {
846
- StaticMarkingVisitor::VisitPointer(p);
847
- }
848
-
849
- void VisitPointers(Object** start, Object** end) {
850
- StaticMarkingVisitor::VisitPointers(start, end);
851
- }
852
-
853
- void VisitCodeTarget(RelocInfo* rinfo) {
854
- StaticMarkingVisitor::VisitCodeTarget(rinfo);
855
- }
856
-
857
- void VisitGlobalPropertyCell(RelocInfo* rinfo) {
858
- StaticMarkingVisitor::VisitGlobalPropertyCell(rinfo);
859
- }
860
-
861
- void VisitDebugTarget(RelocInfo* rinfo) {
862
- StaticMarkingVisitor::VisitDebugTarget(rinfo);
863
- }
864
- };
865
-
866
-
867
- class CodeMarkingVisitor : public ThreadVisitor {
868
- public:
869
- void VisitThread(ThreadLocalTop* top) {
870
- for (StackFrameIterator it(top); !it.done(); it.Advance()) {
871
- MarkCompactCollector::MarkObject(it.frame()->unchecked_code());
872
- }
873
- }
874
- };
875
-
876
-
877
- class SharedFunctionInfoMarkingVisitor : public ObjectVisitor {
878
- public:
879
- void VisitPointers(Object** start, Object** end) {
880
- for (Object** p = start; p < end; p++) VisitPointer(p);
881
- }
882
-
883
- void VisitPointer(Object** slot) {
884
- Object* obj = *slot;
885
- if (obj->IsSharedFunctionInfo()) {
886
- SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(obj);
887
- MarkCompactCollector::MarkObject(shared->unchecked_code());
888
- MarkCompactCollector::MarkObject(shared);
889
- }
890
- }
891
- };
892
-
893
-
894
- void MarkCompactCollector::PrepareForCodeFlushing() {
895
- if (!FLAG_flush_code) {
896
- StaticMarkingVisitor::EnableCodeFlushing(false);
897
- return;
898
- }
899
-
900
- #ifdef ENABLE_DEBUGGER_SUPPORT
901
- if (Debug::IsLoaded() || Debug::has_break_points()) {
902
- StaticMarkingVisitor::EnableCodeFlushing(false);
903
- return;
904
- }
905
- #endif
906
- StaticMarkingVisitor::EnableCodeFlushing(true);
907
-
908
- // Ensure that empty descriptor array is marked. Method MarkDescriptorArray
909
- // relies on it being marked before any other descriptor array.
910
- MarkObject(Heap::raw_unchecked_empty_descriptor_array());
911
-
912
- // Make sure we are not referencing the code from the stack.
913
- for (StackFrameIterator it; !it.done(); it.Advance()) {
914
- MarkObject(it.frame()->unchecked_code());
915
- }
916
-
917
- // Iterate the archived stacks in all threads to check if
918
- // the code is referenced.
919
- CodeMarkingVisitor code_marking_visitor;
920
- ThreadManager::IterateArchivedThreads(&code_marking_visitor);
921
-
922
- SharedFunctionInfoMarkingVisitor visitor;
923
- CompilationCache::IterateFunctions(&visitor);
924
- HandleScopeImplementer::Iterate(&visitor);
925
-
926
- ProcessMarkingStack();
927
- }
928
-
929
-
930
- // Visitor class for marking heap roots.
931
- class RootMarkingVisitor : public ObjectVisitor {
932
- public:
933
- void VisitPointer(Object** p) {
934
- MarkObjectByPointer(p);
935
- }
936
-
937
- void VisitPointers(Object** start, Object** end) {
938
- for (Object** p = start; p < end; p++) MarkObjectByPointer(p);
939
- }
940
-
941
- private:
942
- void MarkObjectByPointer(Object** p) {
943
- if (!(*p)->IsHeapObject()) return;
944
-
945
- // Replace flat cons strings in place.
946
- HeapObject* object = ShortCircuitConsString(p);
947
- if (object->IsMarked()) return;
948
-
949
- Map* map = object->map();
950
- // Mark the object.
951
- MarkCompactCollector::SetMark(object);
952
-
953
- // Mark the map pointer and body, and push them on the marking stack.
954
- MarkCompactCollector::MarkObject(map);
955
- StaticMarkingVisitor::IterateBody(map, object);
956
-
957
- // Mark all the objects reachable from the map and body. May leave
958
- // overflowed objects in the heap.
959
- MarkCompactCollector::EmptyMarkingStack();
960
- }
961
- };
962
-
963
-
964
- // Helper class for pruning the symbol table.
965
- class SymbolTableCleaner : public ObjectVisitor {
966
- public:
967
- SymbolTableCleaner() : pointers_removed_(0) { }
968
-
969
- virtual void VisitPointers(Object** start, Object** end) {
970
- // Visit all HeapObject pointers in [start, end).
971
- for (Object** p = start; p < end; p++) {
972
- if ((*p)->IsHeapObject() && !HeapObject::cast(*p)->IsMarked()) {
973
- // Check if the symbol being pruned is an external symbol. We need to
974
- // delete the associated external data as this symbol is going away.
975
-
976
- // Since no objects have yet been moved we can safely access the map of
977
- // the object.
978
- if ((*p)->IsExternalString()) {
979
- Heap::FinalizeExternalString(String::cast(*p));
980
- }
981
- // Set the entry to null_value (as deleted).
982
- *p = Heap::raw_unchecked_null_value();
983
- pointers_removed_++;
984
- }
985
- }
986
- }
987
-
988
- int PointersRemoved() {
989
- return pointers_removed_;
990
- }
991
- private:
992
- int pointers_removed_;
993
- };
994
-
995
-
996
- // Implementation of WeakObjectRetainer for mark compact GCs. All marked objects
997
- // are retained.
998
- class MarkCompactWeakObjectRetainer : public WeakObjectRetainer {
999
- public:
1000
- virtual Object* RetainAs(Object* object) {
1001
- MapWord first_word = HeapObject::cast(object)->map_word();
1002
- if (first_word.IsMarked()) {
1003
- return object;
1004
- } else {
1005
- return NULL;
1006
- }
1007
- }
1008
- };
1009
-
1010
-
1011
- void MarkCompactCollector::MarkUnmarkedObject(HeapObject* object) {
1012
- ASSERT(!object->IsMarked());
1013
- ASSERT(Heap::Contains(object));
1014
- if (object->IsMap()) {
1015
- Map* map = Map::cast(object);
1016
- if (FLAG_cleanup_caches_in_maps_at_gc) {
1017
- map->ClearCodeCache();
1018
- }
1019
- SetMark(map);
1020
- if (FLAG_collect_maps &&
1021
- map->instance_type() >= FIRST_JS_OBJECT_TYPE &&
1022
- map->instance_type() <= JS_FUNCTION_TYPE) {
1023
- MarkMapContents(map);
1024
- } else {
1025
- marking_stack.Push(map);
1026
- }
1027
- } else {
1028
- SetMark(object);
1029
- marking_stack.Push(object);
1030
- }
1031
- }
1032
-
1033
-
1034
- void MarkCompactCollector::MarkMapContents(Map* map) {
1035
- MarkDescriptorArray(reinterpret_cast<DescriptorArray*>(
1036
- *HeapObject::RawField(map, Map::kInstanceDescriptorsOffset)));
1037
-
1038
- // Mark the Object* fields of the Map.
1039
- // Since the descriptor array has been marked already, it is fine
1040
- // that one of these fields contains a pointer to it.
1041
- Object** start_slot = HeapObject::RawField(map,
1042
- Map::kPointerFieldsBeginOffset);
1043
-
1044
- Object** end_slot = HeapObject::RawField(map, Map::kPointerFieldsEndOffset);
1045
-
1046
- StaticMarkingVisitor::VisitPointers(start_slot, end_slot);
1047
- }
1048
-
1049
-
1050
- void MarkCompactCollector::MarkDescriptorArray(
1051
- DescriptorArray* descriptors) {
1052
- if (descriptors->IsMarked()) return;
1053
- // Empty descriptor array is marked as a root before any maps are marked.
1054
- ASSERT(descriptors != Heap::raw_unchecked_empty_descriptor_array());
1055
- SetMark(descriptors);
1056
-
1057
- FixedArray* contents = reinterpret_cast<FixedArray*>(
1058
- descriptors->get(DescriptorArray::kContentArrayIndex));
1059
- ASSERT(contents->IsHeapObject());
1060
- ASSERT(!contents->IsMarked());
1061
- ASSERT(contents->IsFixedArray());
1062
- ASSERT(contents->length() >= 2);
1063
- SetMark(contents);
1064
- // Contents contains (value, details) pairs. If the details say that
1065
- // the type of descriptor is MAP_TRANSITION, CONSTANT_TRANSITION, or
1066
- // NULL_DESCRIPTOR, we don't mark the value as live. Only for
1067
- // MAP_TRANSITION and CONSTANT_TRANSITION is the value an Object* (a
1068
- // Map*).
1069
- for (int i = 0; i < contents->length(); i += 2) {
1070
- // If the pair (value, details) at index i, i+1 is not
1071
- // a transition or null descriptor, mark the value.
1072
- PropertyDetails details(Smi::cast(contents->get(i + 1)));
1073
- if (details.type() < FIRST_PHANTOM_PROPERTY_TYPE) {
1074
- HeapObject* object = reinterpret_cast<HeapObject*>(contents->get(i));
1075
- if (object->IsHeapObject() && !object->IsMarked()) {
1076
- SetMark(object);
1077
- marking_stack.Push(object);
1078
- }
1079
- }
1080
- }
1081
- // The DescriptorArray descriptors contains a pointer to its contents array,
1082
- // but the contents array is already marked.
1083
- marking_stack.Push(descriptors);
1084
- }
1085
-
1086
-
1087
- void MarkCompactCollector::CreateBackPointers() {
1088
- HeapObjectIterator iterator(Heap::map_space());
1089
- for (HeapObject* next_object = iterator.next();
1090
- next_object != NULL; next_object = iterator.next()) {
1091
- if (next_object->IsMap()) { // Could also be ByteArray on free list.
1092
- Map* map = Map::cast(next_object);
1093
- if (map->instance_type() >= FIRST_JS_OBJECT_TYPE &&
1094
- map->instance_type() <= JS_FUNCTION_TYPE) {
1095
- map->CreateBackPointers();
1096
- } else {
1097
- ASSERT(map->instance_descriptors() == Heap::empty_descriptor_array());
1098
- }
1099
- }
1100
- }
1101
- }
1102
-
1103
-
1104
- static int OverflowObjectSize(HeapObject* obj) {
1105
- // Recover the normal map pointer, it might be marked as live and
1106
- // overflowed.
1107
- MapWord map_word = obj->map_word();
1108
- map_word.ClearMark();
1109
- map_word.ClearOverflow();
1110
- return obj->SizeFromMap(map_word.ToMap());
1111
- }
1112
-
1113
-
1114
- // Fill the marking stack with overflowed objects returned by the given
1115
- // iterator. Stop when the marking stack is filled or the end of the space
1116
- // is reached, whichever comes first.
1117
- template<class T>
1118
- static void ScanOverflowedObjects(T* it) {
1119
- // The caller should ensure that the marking stack is initially not full,
1120
- // so that we don't waste effort pointlessly scanning for objects.
1121
- ASSERT(!marking_stack.is_full());
1122
-
1123
- for (HeapObject* object = it->next(); object != NULL; object = it->next()) {
1124
- if (object->IsOverflowed()) {
1125
- object->ClearOverflow();
1126
- ASSERT(object->IsMarked());
1127
- ASSERT(Heap::Contains(object));
1128
- marking_stack.Push(object);
1129
- if (marking_stack.is_full()) return;
1130
- }
1131
- }
1132
- }
1133
-
1134
-
1135
- bool MarkCompactCollector::IsUnmarkedHeapObject(Object** p) {
1136
- return (*p)->IsHeapObject() && !HeapObject::cast(*p)->IsMarked();
1137
- }
1138
-
1139
-
1140
- void MarkCompactCollector::MarkSymbolTable() {
1141
- SymbolTable* symbol_table = Heap::raw_unchecked_symbol_table();
1142
- // Mark the symbol table itself.
1143
- SetMark(symbol_table);
1144
- // Explicitly mark the prefix.
1145
- MarkingVisitor marker;
1146
- symbol_table->IteratePrefix(&marker);
1147
- ProcessMarkingStack();
1148
- }
1149
-
1150
-
1151
- void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) {
1152
- // Mark the heap roots including global variables, stack variables,
1153
- // etc., and all objects reachable from them.
1154
- Heap::IterateStrongRoots(visitor, VISIT_ONLY_STRONG);
1155
-
1156
- // Handle the symbol table specially.
1157
- MarkSymbolTable();
1158
-
1159
- // There may be overflowed objects in the heap. Visit them now.
1160
- while (marking_stack.overflowed()) {
1161
- RefillMarkingStack();
1162
- EmptyMarkingStack();
1163
- }
1164
- }
1165
-
1166
-
1167
- void MarkCompactCollector::MarkObjectGroups() {
1168
- List<ObjectGroup*>* object_groups = GlobalHandles::ObjectGroups();
1169
-
1170
- for (int i = 0; i < object_groups->length(); i++) {
1171
- ObjectGroup* entry = object_groups->at(i);
1172
- if (entry == NULL) continue;
1173
-
1174
- List<Object**>& objects = entry->objects_;
1175
- bool group_marked = false;
1176
- for (int j = 0; j < objects.length(); j++) {
1177
- Object* object = *objects[j];
1178
- if (object->IsHeapObject() && HeapObject::cast(object)->IsMarked()) {
1179
- group_marked = true;
1180
- break;
1181
- }
1182
- }
1183
-
1184
- if (!group_marked) continue;
1185
-
1186
- // An object in the group is marked, so mark as gray all white heap
1187
- // objects in the group.
1188
- for (int j = 0; j < objects.length(); ++j) {
1189
- if ((*objects[j])->IsHeapObject()) {
1190
- MarkObject(HeapObject::cast(*objects[j]));
1191
- }
1192
- }
1193
- // Once the entire group has been colored gray, set the object group
1194
- // to NULL so it won't be processed again.
1195
- delete object_groups->at(i);
1196
- object_groups->at(i) = NULL;
1197
- }
1198
- }
1199
-
1200
-
1201
- // Mark all objects reachable from the objects on the marking stack.
1202
- // Before: the marking stack contains zero or more heap object pointers.
1203
- // After: the marking stack is empty, and all objects reachable from the
1204
- // marking stack have been marked, or are overflowed in the heap.
1205
- void MarkCompactCollector::EmptyMarkingStack() {
1206
- while (!marking_stack.is_empty()) {
1207
- HeapObject* object = marking_stack.Pop();
1208
- ASSERT(object->IsHeapObject());
1209
- ASSERT(Heap::Contains(object));
1210
- ASSERT(object->IsMarked());
1211
- ASSERT(!object->IsOverflowed());
1212
-
1213
- // Because the object is marked, we have to recover the original map
1214
- // pointer and use it to mark the object's body.
1215
- MapWord map_word = object->map_word();
1216
- map_word.ClearMark();
1217
- Map* map = map_word.ToMap();
1218
- MarkObject(map);
1219
-
1220
- StaticMarkingVisitor::IterateBody(map, object);
1221
- }
1222
- }
1223
-
1224
-
1225
- // Sweep the heap for overflowed objects, clear their overflow bits, and
1226
- // push them on the marking stack. Stop early if the marking stack fills
1227
- // before sweeping completes. If sweeping completes, there are no remaining
1228
- // overflowed objects in the heap so the overflow flag on the markings stack
1229
- // is cleared.
1230
- void MarkCompactCollector::RefillMarkingStack() {
1231
- ASSERT(marking_stack.overflowed());
1232
-
1233
- SemiSpaceIterator new_it(Heap::new_space(), &OverflowObjectSize);
1234
- ScanOverflowedObjects(&new_it);
1235
- if (marking_stack.is_full()) return;
1236
-
1237
- HeapObjectIterator old_pointer_it(Heap::old_pointer_space(),
1238
- &OverflowObjectSize);
1239
- ScanOverflowedObjects(&old_pointer_it);
1240
- if (marking_stack.is_full()) return;
1241
-
1242
- HeapObjectIterator old_data_it(Heap::old_data_space(), &OverflowObjectSize);
1243
- ScanOverflowedObjects(&old_data_it);
1244
- if (marking_stack.is_full()) return;
1245
-
1246
- HeapObjectIterator code_it(Heap::code_space(), &OverflowObjectSize);
1247
- ScanOverflowedObjects(&code_it);
1248
- if (marking_stack.is_full()) return;
1249
-
1250
- HeapObjectIterator map_it(Heap::map_space(), &OverflowObjectSize);
1251
- ScanOverflowedObjects(&map_it);
1252
- if (marking_stack.is_full()) return;
1253
-
1254
- HeapObjectIterator cell_it(Heap::cell_space(), &OverflowObjectSize);
1255
- ScanOverflowedObjects(&cell_it);
1256
- if (marking_stack.is_full()) return;
1257
-
1258
- LargeObjectIterator lo_it(Heap::lo_space(), &OverflowObjectSize);
1259
- ScanOverflowedObjects(&lo_it);
1260
- if (marking_stack.is_full()) return;
1261
-
1262
- marking_stack.clear_overflowed();
1263
- }
1264
-
1265
-
1266
- // Mark all objects reachable (transitively) from objects on the marking
1267
- // stack. Before: the marking stack contains zero or more heap object
1268
- // pointers. After: the marking stack is empty and there are no overflowed
1269
- // objects in the heap.
1270
- void MarkCompactCollector::ProcessMarkingStack() {
1271
- EmptyMarkingStack();
1272
- while (marking_stack.overflowed()) {
1273
- RefillMarkingStack();
1274
- EmptyMarkingStack();
1275
- }
1276
- }
1277
-
1278
-
1279
- void MarkCompactCollector::ProcessObjectGroups() {
1280
- bool work_to_do = true;
1281
- ASSERT(marking_stack.is_empty());
1282
- while (work_to_do) {
1283
- MarkObjectGroups();
1284
- work_to_do = !marking_stack.is_empty();
1285
- ProcessMarkingStack();
1286
- }
1287
- }
1288
-
1289
-
1290
- void MarkCompactCollector::MarkLiveObjects() {
1291
- GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_MARK);
1292
- // The recursive GC marker detects when it is nearing stack overflow,
1293
- // and switches to a different marking system. JS interrupts interfere
1294
- // with the C stack limit check.
1295
- PostponeInterruptsScope postpone;
1296
-
1297
- #ifdef DEBUG
1298
- ASSERT(state_ == PREPARE_GC);
1299
- state_ = MARK_LIVE_OBJECTS;
1300
- #endif
1301
- // The to space contains live objects, the from space is used as a marking
1302
- // stack.
1303
- marking_stack.Initialize(Heap::new_space()->FromSpaceLow(),
1304
- Heap::new_space()->FromSpaceHigh());
1305
-
1306
- ASSERT(!marking_stack.overflowed());
1307
-
1308
- PrepareForCodeFlushing();
1309
-
1310
- RootMarkingVisitor root_visitor;
1311
- MarkRoots(&root_visitor);
1312
-
1313
- // The objects reachable from the roots are marked, yet unreachable
1314
- // objects are unmarked. Mark objects reachable from object groups
1315
- // containing at least one marked object, and continue until no new
1316
- // objects are reachable from the object groups.
1317
- ProcessObjectGroups();
1318
-
1319
- // The objects reachable from the roots or object groups are marked,
1320
- // yet unreachable objects are unmarked. Mark objects reachable
1321
- // only from weak global handles.
1322
- //
1323
- // First we identify nonlive weak handles and mark them as pending
1324
- // destruction.
1325
- GlobalHandles::IdentifyWeakHandles(&IsUnmarkedHeapObject);
1326
- // Then we mark the objects and process the transitive closure.
1327
- GlobalHandles::IterateWeakRoots(&root_visitor);
1328
- while (marking_stack.overflowed()) {
1329
- RefillMarkingStack();
1330
- EmptyMarkingStack();
1331
- }
1332
-
1333
- // Repeat the object groups to mark unmarked groups reachable from the
1334
- // weak roots.
1335
- ProcessObjectGroups();
1336
-
1337
- // Prune the symbol table removing all symbols only pointed to by the
1338
- // symbol table. Cannot use symbol_table() here because the symbol
1339
- // table is marked.
1340
- SymbolTable* symbol_table = Heap::raw_unchecked_symbol_table();
1341
- SymbolTableCleaner v;
1342
- symbol_table->IterateElements(&v);
1343
- symbol_table->ElementsRemoved(v.PointersRemoved());
1344
- ExternalStringTable::Iterate(&v);
1345
- ExternalStringTable::CleanUp();
1346
-
1347
- // Process the weak references.
1348
- MarkCompactWeakObjectRetainer mark_compact_object_retainer;
1349
- Heap::ProcessWeakReferences(&mark_compact_object_retainer);
1350
-
1351
- // Remove object groups after marking phase.
1352
- GlobalHandles::RemoveObjectGroups();
1353
-
1354
- // Flush code from collected candidates.
1355
- FlushCode::ProcessCandidates();
1356
-
1357
- // Clean up dead objects from the runtime profiler.
1358
- RuntimeProfiler::RemoveDeadSamples();
1359
- }
1360
-
1361
-
1362
- #ifdef DEBUG
1363
- void MarkCompactCollector::UpdateLiveObjectCount(HeapObject* obj) {
1364
- live_bytes_ += obj->Size();
1365
- if (Heap::new_space()->Contains(obj)) {
1366
- live_young_objects_size_ += obj->Size();
1367
- } else if (Heap::map_space()->Contains(obj)) {
1368
- ASSERT(obj->IsMap());
1369
- live_map_objects_size_ += obj->Size();
1370
- } else if (Heap::cell_space()->Contains(obj)) {
1371
- ASSERT(obj->IsJSGlobalPropertyCell());
1372
- live_cell_objects_size_ += obj->Size();
1373
- } else if (Heap::old_pointer_space()->Contains(obj)) {
1374
- live_old_pointer_objects_size_ += obj->Size();
1375
- } else if (Heap::old_data_space()->Contains(obj)) {
1376
- live_old_data_objects_size_ += obj->Size();
1377
- } else if (Heap::code_space()->Contains(obj)) {
1378
- live_code_objects_size_ += obj->Size();
1379
- } else if (Heap::lo_space()->Contains(obj)) {
1380
- live_lo_objects_size_ += obj->Size();
1381
- } else {
1382
- UNREACHABLE();
1383
- }
1384
- }
1385
- #endif // DEBUG
1386
-
1387
-
1388
- void MarkCompactCollector::SweepLargeObjectSpace() {
1389
- #ifdef DEBUG
1390
- ASSERT(state_ == MARK_LIVE_OBJECTS);
1391
- state_ =
1392
- compacting_collection_ ? ENCODE_FORWARDING_ADDRESSES : SWEEP_SPACES;
1393
- #endif
1394
- // Deallocate unmarked objects and clear marked bits for marked objects.
1395
- Heap::lo_space()->FreeUnmarkedObjects();
1396
- }
1397
-
1398
-
1399
- // Safe to use during marking phase only.
1400
- bool MarkCompactCollector::SafeIsMap(HeapObject* object) {
1401
- MapWord metamap = object->map_word();
1402
- metamap.ClearMark();
1403
- return metamap.ToMap()->instance_type() == MAP_TYPE;
1404
- }
1405
-
1406
-
1407
- void MarkCompactCollector::ClearNonLiveTransitions() {
1408
- HeapObjectIterator map_iterator(Heap::map_space(), &SizeOfMarkedObject);
1409
- // Iterate over the map space, setting map transitions that go from
1410
- // a marked map to an unmarked map to null transitions. At the same time,
1411
- // set all the prototype fields of maps back to their original value,
1412
- // dropping the back pointers temporarily stored in the prototype field.
1413
- // Setting the prototype field requires following the linked list of
1414
- // back pointers, reversing them all at once. This allows us to find
1415
- // those maps with map transitions that need to be nulled, and only
1416
- // scan the descriptor arrays of those maps, not all maps.
1417
- // All of these actions are carried out only on maps of JSObjects
1418
- // and related subtypes.
1419
- for (HeapObject* obj = map_iterator.next();
1420
- obj != NULL; obj = map_iterator.next()) {
1421
- Map* map = reinterpret_cast<Map*>(obj);
1422
- if (!map->IsMarked() && map->IsByteArray()) continue;
1423
-
1424
- ASSERT(SafeIsMap(map));
1425
- // Only JSObject and subtypes have map transitions and back pointers.
1426
- if (map->instance_type() < FIRST_JS_OBJECT_TYPE) continue;
1427
- if (map->instance_type() > JS_FUNCTION_TYPE) continue;
1428
-
1429
- if (map->IsMarked() && map->attached_to_shared_function_info()) {
1430
- // This map is used for inobject slack tracking and has been detached
1431
- // from SharedFunctionInfo during the mark phase.
1432
- // Since it survived the GC, reattach it now.
1433
- map->unchecked_constructor()->unchecked_shared()->AttachInitialMap(map);
1434
- }
1435
-
1436
- // Follow the chain of back pointers to find the prototype.
1437
- Map* current = map;
1438
- while (SafeIsMap(current)) {
1439
- current = reinterpret_cast<Map*>(current->prototype());
1440
- ASSERT(current->IsHeapObject());
1441
- }
1442
- Object* real_prototype = current;
1443
-
1444
- // Follow back pointers, setting them to prototype,
1445
- // clearing map transitions when necessary.
1446
- current = map;
1447
- bool on_dead_path = !current->IsMarked();
1448
- Object* next;
1449
- while (SafeIsMap(current)) {
1450
- next = current->prototype();
1451
- // There should never be a dead map above a live map.
1452
- ASSERT(on_dead_path || current->IsMarked());
1453
-
1454
- // A live map above a dead map indicates a dead transition.
1455
- // This test will always be false on the first iteration.
1456
- if (on_dead_path && current->IsMarked()) {
1457
- on_dead_path = false;
1458
- current->ClearNonLiveTransitions(real_prototype);
1459
- }
1460
- *HeapObject::RawField(current, Map::kPrototypeOffset) =
1461
- real_prototype;
1462
- current = reinterpret_cast<Map*>(next);
1463
- }
1464
- }
1465
- }
1466
-
1467
- // -------------------------------------------------------------------------
1468
- // Phase 2: Encode forwarding addresses.
1469
- // When compacting, forwarding addresses for objects in old space and map
1470
- // space are encoded in their map pointer word (along with an encoding of
1471
- // their map pointers).
1472
- //
1473
- // The excact encoding is described in the comments for class MapWord in
1474
- // objects.h.
1475
- //
1476
- // An address range [start, end) can have both live and non-live objects.
1477
- // Maximal non-live regions are marked so they can be skipped on subsequent
1478
- // sweeps of the heap. A distinguished map-pointer encoding is used to mark
1479
- // free regions of one-word size (in which case the next word is the start
1480
- // of a live object). A second distinguished map-pointer encoding is used
1481
- // to mark free regions larger than one word, and the size of the free
1482
- // region (including the first word) is written to the second word of the
1483
- // region.
1484
- //
1485
- // Any valid map page offset must lie in the object area of the page, so map
1486
- // page offsets less than Page::kObjectStartOffset are invalid. We use a
1487
- // pair of distinguished invalid map encodings (for single word and multiple
1488
- // words) to indicate free regions in the page found during computation of
1489
- // forwarding addresses and skipped over in subsequent sweeps.
1490
-
1491
-
1492
- // Encode a free region, defined by the given start address and size, in the
1493
- // first word or two of the region.
1494
- void EncodeFreeRegion(Address free_start, int free_size) {
1495
- ASSERT(free_size >= kIntSize);
1496
- if (free_size == kIntSize) {
1497
- Memory::uint32_at(free_start) = MarkCompactCollector::kSingleFreeEncoding;
1498
- } else {
1499
- ASSERT(free_size >= 2 * kIntSize);
1500
- Memory::uint32_at(free_start) = MarkCompactCollector::kMultiFreeEncoding;
1501
- Memory::int_at(free_start + kIntSize) = free_size;
1502
- }
1503
-
1504
- #ifdef DEBUG
1505
- // Zap the body of the free region.
1506
- if (FLAG_enable_slow_asserts) {
1507
- for (int offset = 2 * kIntSize;
1508
- offset < free_size;
1509
- offset += kPointerSize) {
1510
- Memory::Address_at(free_start + offset) = kZapValue;
1511
- }
1512
- }
1513
- #endif
1514
- }
1515
-
1516
-
1517
- // Try to promote all objects in new space. Heap numbers and sequential
1518
- // strings are promoted to the code space, large objects to large object space,
1519
- // and all others to the old space.
1520
- inline MaybeObject* MCAllocateFromNewSpace(HeapObject* object,
1521
- int object_size) {
1522
- MaybeObject* forwarded;
1523
- if (object_size > Heap::MaxObjectSizeInPagedSpace()) {
1524
- forwarded = Failure::Exception();
1525
- } else {
1526
- OldSpace* target_space = Heap::TargetSpace(object);
1527
- ASSERT(target_space == Heap::old_pointer_space() ||
1528
- target_space == Heap::old_data_space());
1529
- forwarded = target_space->MCAllocateRaw(object_size);
1530
- }
1531
- Object* result;
1532
- if (!forwarded->ToObject(&result)) {
1533
- result = Heap::new_space()->MCAllocateRaw(object_size)->ToObjectUnchecked();
1534
- }
1535
- return result;
1536
- }
1537
-
1538
-
1539
- // Allocation functions for the paged spaces call the space's MCAllocateRaw.
1540
- MUST_USE_RESULT inline MaybeObject* MCAllocateFromOldPointerSpace(
1541
- HeapObject* ignore,
1542
- int object_size) {
1543
- return Heap::old_pointer_space()->MCAllocateRaw(object_size);
1544
- }
1545
-
1546
-
1547
- MUST_USE_RESULT inline MaybeObject* MCAllocateFromOldDataSpace(
1548
- HeapObject* ignore,
1549
- int object_size) {
1550
- return Heap::old_data_space()->MCAllocateRaw(object_size);
1551
- }
1552
-
1553
-
1554
- MUST_USE_RESULT inline MaybeObject* MCAllocateFromCodeSpace(
1555
- HeapObject* ignore,
1556
- int object_size) {
1557
- return Heap::code_space()->MCAllocateRaw(object_size);
1558
- }
1559
-
1560
-
1561
- MUST_USE_RESULT inline MaybeObject* MCAllocateFromMapSpace(
1562
- HeapObject* ignore,
1563
- int object_size) {
1564
- return Heap::map_space()->MCAllocateRaw(object_size);
1565
- }
1566
-
1567
-
1568
- MUST_USE_RESULT inline MaybeObject* MCAllocateFromCellSpace(HeapObject* ignore,
1569
- int object_size) {
1570
- return Heap::cell_space()->MCAllocateRaw(object_size);
1571
- }
1572
-
1573
-
1574
- // The forwarding address is encoded at the same offset as the current
1575
- // to-space object, but in from space.
1576
- inline void EncodeForwardingAddressInNewSpace(HeapObject* old_object,
1577
- int object_size,
1578
- Object* new_object,
1579
- int* ignored) {
1580
- int offset =
1581
- Heap::new_space()->ToSpaceOffsetForAddress(old_object->address());
1582
- Memory::Address_at(Heap::new_space()->FromSpaceLow() + offset) =
1583
- HeapObject::cast(new_object)->address();
1584
- }
1585
-
1586
-
1587
- // The forwarding address is encoded in the map pointer of the object as an
1588
- // offset (in terms of live bytes) from the address of the first live object
1589
- // in the page.
1590
- inline void EncodeForwardingAddressInPagedSpace(HeapObject* old_object,
1591
- int object_size,
1592
- Object* new_object,
1593
- int* offset) {
1594
- // Record the forwarding address of the first live object if necessary.
1595
- if (*offset == 0) {
1596
- Page::FromAddress(old_object->address())->mc_first_forwarded =
1597
- HeapObject::cast(new_object)->address();
1598
- }
1599
-
1600
- MapWord encoding =
1601
- MapWord::EncodeAddress(old_object->map()->address(), *offset);
1602
- old_object->set_map_word(encoding);
1603
- *offset += object_size;
1604
- ASSERT(*offset <= Page::kObjectAreaSize);
1605
- }
1606
-
1607
-
1608
- // Most non-live objects are ignored.
1609
- inline void IgnoreNonLiveObject(HeapObject* object) {}
1610
-
1611
-
1612
- // Function template that, given a range of addresses (eg, a semispace or a
1613
- // paged space page), iterates through the objects in the range to clear
1614
- // mark bits and compute and encode forwarding addresses. As a side effect,
1615
- // maximal free chunks are marked so that they can be skipped on subsequent
1616
- // sweeps.
1617
- //
1618
- // The template parameters are an allocation function, a forwarding address
1619
- // encoding function, and a function to process non-live objects.
1620
- template<MarkCompactCollector::AllocationFunction Alloc,
1621
- MarkCompactCollector::EncodingFunction Encode,
1622
- MarkCompactCollector::ProcessNonLiveFunction ProcessNonLive>
1623
- inline void EncodeForwardingAddressesInRange(Address start,
1624
- Address end,
1625
- int* offset) {
1626
- // The start address of the current free region while sweeping the space.
1627
- // This address is set when a transition from live to non-live objects is
1628
- // encountered. A value (an encoding of the 'next free region' pointer)
1629
- // is written to memory at this address when a transition from non-live to
1630
- // live objects is encountered.
1631
- Address free_start = NULL;
1632
-
1633
- // A flag giving the state of the previously swept object. Initially true
1634
- // to ensure that free_start is initialized to a proper address before
1635
- // trying to write to it.
1636
- bool is_prev_alive = true;
1637
-
1638
- int object_size; // Will be set on each iteration of the loop.
1639
- for (Address current = start; current < end; current += object_size) {
1640
- HeapObject* object = HeapObject::FromAddress(current);
1641
- if (object->IsMarked()) {
1642
- object->ClearMark();
1643
- MarkCompactCollector::tracer()->decrement_marked_count();
1644
- object_size = object->Size();
1645
-
1646
- // Allocation cannot fail, because we are compacting the space.
1647
- Object* forwarded = Alloc(object, object_size)->ToObjectUnchecked();
1648
- Encode(object, object_size, forwarded, offset);
1649
-
1650
- #ifdef DEBUG
1651
- if (FLAG_gc_verbose) {
1652
- PrintF("forward %p -> %p.\n", object->address(),
1653
- HeapObject::cast(forwarded)->address());
1654
- }
1655
- #endif
1656
- if (!is_prev_alive) { // Transition from non-live to live.
1657
- EncodeFreeRegion(free_start, static_cast<int>(current - free_start));
1658
- is_prev_alive = true;
1659
- }
1660
- } else { // Non-live object.
1661
- object_size = object->Size();
1662
- ProcessNonLive(object);
1663
- if (is_prev_alive) { // Transition from live to non-live.
1664
- free_start = current;
1665
- is_prev_alive = false;
1666
- }
1667
- LiveObjectList::ProcessNonLive(object);
1668
- }
1669
- }
1670
-
1671
- // If we ended on a free region, mark it.
1672
- if (!is_prev_alive) {
1673
- EncodeFreeRegion(free_start, static_cast<int>(end - free_start));
1674
- }
1675
- }
1676
-
1677
-
1678
- // Functions to encode the forwarding pointers in each compactable space.
1679
- void MarkCompactCollector::EncodeForwardingAddressesInNewSpace() {
1680
- int ignored;
1681
- EncodeForwardingAddressesInRange<MCAllocateFromNewSpace,
1682
- EncodeForwardingAddressInNewSpace,
1683
- IgnoreNonLiveObject>(
1684
- Heap::new_space()->bottom(),
1685
- Heap::new_space()->top(),
1686
- &ignored);
1687
- }
1688
-
1689
-
1690
- template<MarkCompactCollector::AllocationFunction Alloc,
1691
- MarkCompactCollector::ProcessNonLiveFunction ProcessNonLive>
1692
- void MarkCompactCollector::EncodeForwardingAddressesInPagedSpace(
1693
- PagedSpace* space) {
1694
- PageIterator it(space, PageIterator::PAGES_IN_USE);
1695
- while (it.has_next()) {
1696
- Page* p = it.next();
1697
-
1698
- // The offset of each live object in the page from the first live object
1699
- // in the page.
1700
- int offset = 0;
1701
- EncodeForwardingAddressesInRange<Alloc,
1702
- EncodeForwardingAddressInPagedSpace,
1703
- ProcessNonLive>(
1704
- p->ObjectAreaStart(),
1705
- p->AllocationTop(),
1706
- &offset);
1707
- }
1708
- }
1709
-
1710
-
1711
- // We scavange new space simultaneously with sweeping. This is done in two
1712
- // passes.
1713
- // The first pass migrates all alive objects from one semispace to another or
1714
- // promotes them to old space. Forwading address is written directly into
1715
- // first word of object without any encoding. If object is dead we are writing
1716
- // NULL as a forwarding address.
1717
- // The second pass updates pointers to new space in all spaces. It is possible
1718
- // to encounter pointers to dead objects during traversal of dirty regions we
1719
- // should clear them to avoid encountering them during next dirty regions
1720
- // iteration.
1721
- static void MigrateObject(Address dst,
1722
- Address src,
1723
- int size,
1724
- bool to_old_space) {
1725
- if (to_old_space) {
1726
- Heap::CopyBlockToOldSpaceAndUpdateRegionMarks(dst, src, size);
1727
- } else {
1728
- Heap::CopyBlock(dst, src, size);
1729
- }
1730
-
1731
- Memory::Address_at(src) = dst;
1732
- }
1733
-
1734
-
1735
- class StaticPointersToNewGenUpdatingVisitor : public
1736
- StaticNewSpaceVisitor<StaticPointersToNewGenUpdatingVisitor> {
1737
- public:
1738
- static inline void VisitPointer(Object** p) {
1739
- if (!(*p)->IsHeapObject()) return;
1740
-
1741
- HeapObject* obj = HeapObject::cast(*p);
1742
- Address old_addr = obj->address();
1743
-
1744
- if (Heap::new_space()->Contains(obj)) {
1745
- ASSERT(Heap::InFromSpace(*p));
1746
- *p = HeapObject::FromAddress(Memory::Address_at(old_addr));
1747
- }
1748
- }
1749
- };
1750
-
1751
-
1752
- // Visitor for updating pointers from live objects in old spaces to new space.
1753
- // It does not expect to encounter pointers to dead objects.
1754
- class PointersToNewGenUpdatingVisitor: public ObjectVisitor {
1755
- public:
1756
- void VisitPointer(Object** p) {
1757
- StaticPointersToNewGenUpdatingVisitor::VisitPointer(p);
1758
- }
1759
-
1760
- void VisitPointers(Object** start, Object** end) {
1761
- for (Object** p = start; p < end; p++) {
1762
- StaticPointersToNewGenUpdatingVisitor::VisitPointer(p);
1763
- }
1764
- }
1765
-
1766
- void VisitCodeTarget(RelocInfo* rinfo) {
1767
- ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
1768
- Object* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
1769
- VisitPointer(&target);
1770
- rinfo->set_target_address(Code::cast(target)->instruction_start());
1771
- }
1772
-
1773
- void VisitDebugTarget(RelocInfo* rinfo) {
1774
- ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) &&
1775
- rinfo->IsPatchedReturnSequence()) ||
1776
- (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
1777
- rinfo->IsPatchedDebugBreakSlotSequence()));
1778
- Object* target = Code::GetCodeFromTargetAddress(rinfo->call_address());
1779
- VisitPointer(&target);
1780
- rinfo->set_call_address(Code::cast(target)->instruction_start());
1781
- }
1782
- };
1783
-
1784
-
1785
- // Visitor for updating pointers from live objects in old spaces to new space.
1786
- // It can encounter pointers to dead objects in new space when traversing map
1787
- // space (see comment for MigrateObject).
1788
- static void UpdatePointerToNewGen(HeapObject** p) {
1789
- if (!(*p)->IsHeapObject()) return;
1790
-
1791
- Address old_addr = (*p)->address();
1792
- ASSERT(Heap::InFromSpace(*p));
1793
-
1794
- Address new_addr = Memory::Address_at(old_addr);
1795
-
1796
- if (new_addr == NULL) {
1797
- // We encountered pointer to a dead object. Clear it so we will
1798
- // not visit it again during next iteration of dirty regions.
1799
- *p = NULL;
1800
- } else {
1801
- *p = HeapObject::FromAddress(new_addr);
1802
- }
1803
- }
1804
-
1805
-
1806
- static String* UpdateNewSpaceReferenceInExternalStringTableEntry(Object **p) {
1807
- Address old_addr = HeapObject::cast(*p)->address();
1808
- Address new_addr = Memory::Address_at(old_addr);
1809
- return String::cast(HeapObject::FromAddress(new_addr));
1810
- }
1811
-
1812
-
1813
- static bool TryPromoteObject(HeapObject* object, int object_size) {
1814
- Object* result;
1815
-
1816
- if (object_size > Heap::MaxObjectSizeInPagedSpace()) {
1817
- MaybeObject* maybe_result =
1818
- Heap::lo_space()->AllocateRawFixedArray(object_size);
1819
- if (maybe_result->ToObject(&result)) {
1820
- HeapObject* target = HeapObject::cast(result);
1821
- MigrateObject(target->address(), object->address(), object_size, true);
1822
- MarkCompactCollector::tracer()->
1823
- increment_promoted_objects_size(object_size);
1824
- return true;
1825
- }
1826
- } else {
1827
- OldSpace* target_space = Heap::TargetSpace(object);
1828
-
1829
- ASSERT(target_space == Heap::old_pointer_space() ||
1830
- target_space == Heap::old_data_space());
1831
- MaybeObject* maybe_result = target_space->AllocateRaw(object_size);
1832
- if (maybe_result->ToObject(&result)) {
1833
- HeapObject* target = HeapObject::cast(result);
1834
- MigrateObject(target->address(),
1835
- object->address(),
1836
- object_size,
1837
- target_space == Heap::old_pointer_space());
1838
- MarkCompactCollector::tracer()->
1839
- increment_promoted_objects_size(object_size);
1840
- return true;
1841
- }
1842
- }
1843
-
1844
- return false;
1845
- }
1846
-
1847
-
1848
- static void SweepNewSpace(NewSpace* space) {
1849
- Heap::CheckNewSpaceExpansionCriteria();
1850
-
1851
- Address from_bottom = space->bottom();
1852
- Address from_top = space->top();
1853
-
1854
- // Flip the semispaces. After flipping, to space is empty, from space has
1855
- // live objects.
1856
- space->Flip();
1857
- space->ResetAllocationInfo();
1858
-
1859
- int size = 0;
1860
- int survivors_size = 0;
1861
-
1862
- // First pass: traverse all objects in inactive semispace, remove marks,
1863
- // migrate live objects and write forwarding addresses.
1864
- for (Address current = from_bottom; current < from_top; current += size) {
1865
- HeapObject* object = HeapObject::FromAddress(current);
1866
-
1867
- if (object->IsMarked()) {
1868
- object->ClearMark();
1869
- MarkCompactCollector::tracer()->decrement_marked_count();
1870
-
1871
- size = object->Size();
1872
- survivors_size += size;
1873
-
1874
- // Aggressively promote young survivors to the old space.
1875
- if (TryPromoteObject(object, size)) {
1876
- continue;
1877
- }
1878
-
1879
- // Promotion failed. Just migrate object to another semispace.
1880
- // Allocation cannot fail at this point: semispaces are of equal size.
1881
- Object* target = space->AllocateRaw(size)->ToObjectUnchecked();
1882
-
1883
- MigrateObject(HeapObject::cast(target)->address(),
1884
- current,
1885
- size,
1886
- false);
1887
- } else {
1888
- // Process the dead object before we write a NULL into its header.
1889
- LiveObjectList::ProcessNonLive(object);
1890
-
1891
- size = object->Size();
1892
- Memory::Address_at(current) = NULL;
1893
- }
1894
- }
1895
-
1896
- // Second pass: find pointers to new space and update them.
1897
- PointersToNewGenUpdatingVisitor updating_visitor;
1898
-
1899
- // Update pointers in to space.
1900
- Address current = space->bottom();
1901
- while (current < space->top()) {
1902
- HeapObject* object = HeapObject::FromAddress(current);
1903
- current +=
1904
- StaticPointersToNewGenUpdatingVisitor::IterateBody(object->map(),
1905
- object);
1906
- }
1907
-
1908
- // Update roots.
1909
- Heap::IterateRoots(&updating_visitor, VISIT_ALL_IN_SCAVENGE);
1910
- LiveObjectList::IterateElements(&updating_visitor);
1911
-
1912
- // Update pointers in old spaces.
1913
- Heap::IterateDirtyRegions(Heap::old_pointer_space(),
1914
- &Heap::IteratePointersInDirtyRegion,
1915
- &UpdatePointerToNewGen,
1916
- Heap::WATERMARK_SHOULD_BE_VALID);
1917
-
1918
- Heap::lo_space()->IterateDirtyRegions(&UpdatePointerToNewGen);
1919
-
1920
- // Update pointers from cells.
1921
- HeapObjectIterator cell_iterator(Heap::cell_space());
1922
- for (HeapObject* cell = cell_iterator.next();
1923
- cell != NULL;
1924
- cell = cell_iterator.next()) {
1925
- if (cell->IsJSGlobalPropertyCell()) {
1926
- Address value_address =
1927
- reinterpret_cast<Address>(cell) +
1928
- (JSGlobalPropertyCell::kValueOffset - kHeapObjectTag);
1929
- updating_visitor.VisitPointer(reinterpret_cast<Object**>(value_address));
1930
- }
1931
- }
1932
-
1933
- // Update pointer from the global contexts list.
1934
- updating_visitor.VisitPointer(Heap::global_contexts_list_address());
1935
-
1936
- // Update pointers from external string table.
1937
- Heap::UpdateNewSpaceReferencesInExternalStringTable(
1938
- &UpdateNewSpaceReferenceInExternalStringTableEntry);
1939
-
1940
- // All pointers were updated. Update auxiliary allocation info.
1941
- Heap::IncrementYoungSurvivorsCounter(survivors_size);
1942
- space->set_age_mark(space->top());
1943
-
1944
- // Update JSFunction pointers from the runtime profiler.
1945
- RuntimeProfiler::UpdateSamplesAfterScavenge();
1946
- }
1947
-
1948
-
1949
- static void SweepSpace(PagedSpace* space) {
1950
- PageIterator it(space, PageIterator::PAGES_IN_USE);
1951
-
1952
- // During sweeping of paged space we are trying to find longest sequences
1953
- // of pages without live objects and free them (instead of putting them on
1954
- // the free list).
1955
-
1956
- // Page preceding current.
1957
- Page* prev = Page::FromAddress(NULL);
1958
-
1959
- // First empty page in a sequence.
1960
- Page* first_empty_page = Page::FromAddress(NULL);
1961
-
1962
- // Page preceding first empty page.
1963
- Page* prec_first_empty_page = Page::FromAddress(NULL);
1964
-
1965
- // If last used page of space ends with a sequence of dead objects
1966
- // we can adjust allocation top instead of puting this free area into
1967
- // the free list. Thus during sweeping we keep track of such areas
1968
- // and defer their deallocation until the sweeping of the next page
1969
- // is done: if one of the next pages contains live objects we have
1970
- // to put such area into the free list.
1971
- Address last_free_start = NULL;
1972
- int last_free_size = 0;
1973
-
1974
- while (it.has_next()) {
1975
- Page* p = it.next();
1976
-
1977
- bool is_previous_alive = true;
1978
- Address free_start = NULL;
1979
- HeapObject* object;
1980
-
1981
- for (Address current = p->ObjectAreaStart();
1982
- current < p->AllocationTop();
1983
- current += object->Size()) {
1984
- object = HeapObject::FromAddress(current);
1985
- if (object->IsMarked()) {
1986
- object->ClearMark();
1987
- MarkCompactCollector::tracer()->decrement_marked_count();
1988
-
1989
- if (!is_previous_alive) { // Transition from free to live.
1990
- space->DeallocateBlock(free_start,
1991
- static_cast<int>(current - free_start),
1992
- true);
1993
- is_previous_alive = true;
1994
- }
1995
- } else {
1996
- MarkCompactCollector::ReportDeleteIfNeeded(object);
1997
- if (is_previous_alive) { // Transition from live to free.
1998
- free_start = current;
1999
- is_previous_alive = false;
2000
- }
2001
- LiveObjectList::ProcessNonLive(object);
2002
- }
2003
- // The object is now unmarked for the call to Size() at the top of the
2004
- // loop.
2005
- }
2006
-
2007
- bool page_is_empty = (p->ObjectAreaStart() == p->AllocationTop())
2008
- || (!is_previous_alive && free_start == p->ObjectAreaStart());
2009
-
2010
- if (page_is_empty) {
2011
- // This page is empty. Check whether we are in the middle of
2012
- // sequence of empty pages and start one if not.
2013
- if (!first_empty_page->is_valid()) {
2014
- first_empty_page = p;
2015
- prec_first_empty_page = prev;
2016
- }
2017
-
2018
- if (!is_previous_alive) {
2019
- // There are dead objects on this page. Update space accounting stats
2020
- // without putting anything into free list.
2021
- int size_in_bytes = static_cast<int>(p->AllocationTop() - free_start);
2022
- if (size_in_bytes > 0) {
2023
- space->DeallocateBlock(free_start, size_in_bytes, false);
2024
- }
2025
- }
2026
- } else {
2027
- // This page is not empty. Sequence of empty pages ended on the previous
2028
- // one.
2029
- if (first_empty_page->is_valid()) {
2030
- space->FreePages(prec_first_empty_page, prev);
2031
- prec_first_empty_page = first_empty_page = Page::FromAddress(NULL);
2032
- }
2033
-
2034
- // If there is a free ending area on one of the previous pages we have
2035
- // deallocate that area and put it on the free list.
2036
- if (last_free_size > 0) {
2037
- Page::FromAddress(last_free_start)->
2038
- SetAllocationWatermark(last_free_start);
2039
- space->DeallocateBlock(last_free_start, last_free_size, true);
2040
- last_free_start = NULL;
2041
- last_free_size = 0;
2042
- }
2043
-
2044
- // If the last region of this page was not live we remember it.
2045
- if (!is_previous_alive) {
2046
- ASSERT(last_free_size == 0);
2047
- last_free_size = static_cast<int>(p->AllocationTop() - free_start);
2048
- last_free_start = free_start;
2049
- }
2050
- }
2051
-
2052
- prev = p;
2053
- }
2054
-
2055
- // We reached end of space. See if we need to adjust allocation top.
2056
- Address new_allocation_top = NULL;
2057
-
2058
- if (first_empty_page->is_valid()) {
2059
- // Last used pages in space are empty. We can move allocation top backwards
2060
- // to the beginning of first empty page.
2061
- ASSERT(prev == space->AllocationTopPage());
2062
-
2063
- new_allocation_top = first_empty_page->ObjectAreaStart();
2064
- }
2065
-
2066
- if (last_free_size > 0) {
2067
- // There was a free ending area on the previous page.
2068
- // Deallocate it without putting it into freelist and move allocation
2069
- // top to the beginning of this free area.
2070
- space->DeallocateBlock(last_free_start, last_free_size, false);
2071
- new_allocation_top = last_free_start;
2072
- }
2073
-
2074
- if (new_allocation_top != NULL) {
2075
- #ifdef DEBUG
2076
- Page* new_allocation_top_page = Page::FromAllocationTop(new_allocation_top);
2077
- if (!first_empty_page->is_valid()) {
2078
- ASSERT(new_allocation_top_page == space->AllocationTopPage());
2079
- } else if (last_free_size > 0) {
2080
- ASSERT(new_allocation_top_page == prec_first_empty_page);
2081
- } else {
2082
- ASSERT(new_allocation_top_page == first_empty_page);
2083
- }
2084
- #endif
2085
-
2086
- space->SetTop(new_allocation_top);
2087
- }
2088
- }
2089
-
2090
-
2091
- void MarkCompactCollector::EncodeForwardingAddresses() {
2092
- ASSERT(state_ == ENCODE_FORWARDING_ADDRESSES);
2093
- // Objects in the active semispace of the young generation may be
2094
- // relocated to the inactive semispace (if not promoted). Set the
2095
- // relocation info to the beginning of the inactive semispace.
2096
- Heap::new_space()->MCResetRelocationInfo();
2097
-
2098
- // Compute the forwarding pointers in each space.
2099
- EncodeForwardingAddressesInPagedSpace<MCAllocateFromOldPointerSpace,
2100
- ReportDeleteIfNeeded>(
2101
- Heap::old_pointer_space());
2102
-
2103
- EncodeForwardingAddressesInPagedSpace<MCAllocateFromOldDataSpace,
2104
- IgnoreNonLiveObject>(
2105
- Heap::old_data_space());
2106
-
2107
- EncodeForwardingAddressesInPagedSpace<MCAllocateFromCodeSpace,
2108
- ReportDeleteIfNeeded>(
2109
- Heap::code_space());
2110
-
2111
- EncodeForwardingAddressesInPagedSpace<MCAllocateFromCellSpace,
2112
- IgnoreNonLiveObject>(
2113
- Heap::cell_space());
2114
-
2115
-
2116
- // Compute new space next to last after the old and code spaces have been
2117
- // compacted. Objects in new space can be promoted to old or code space.
2118
- EncodeForwardingAddressesInNewSpace();
2119
-
2120
- // Compute map space last because computing forwarding addresses
2121
- // overwrites non-live objects. Objects in the other spaces rely on
2122
- // non-live map pointers to get the sizes of non-live objects.
2123
- EncodeForwardingAddressesInPagedSpace<MCAllocateFromMapSpace,
2124
- IgnoreNonLiveObject>(
2125
- Heap::map_space());
2126
-
2127
- // Write relocation info to the top page, so we can use it later. This is
2128
- // done after promoting objects from the new space so we get the correct
2129
- // allocation top.
2130
- Heap::old_pointer_space()->MCWriteRelocationInfoToPage();
2131
- Heap::old_data_space()->MCWriteRelocationInfoToPage();
2132
- Heap::code_space()->MCWriteRelocationInfoToPage();
2133
- Heap::map_space()->MCWriteRelocationInfoToPage();
2134
- Heap::cell_space()->MCWriteRelocationInfoToPage();
2135
- }
2136
-
2137
-
2138
- class MapIterator : public HeapObjectIterator {
2139
- public:
2140
- MapIterator() : HeapObjectIterator(Heap::map_space(), &SizeCallback) { }
2141
-
2142
- explicit MapIterator(Address start)
2143
- : HeapObjectIterator(Heap::map_space(), start, &SizeCallback) { }
2144
-
2145
- private:
2146
- static int SizeCallback(HeapObject* unused) {
2147
- USE(unused);
2148
- return Map::kSize;
2149
- }
2150
- };
2151
-
2152
-
2153
- class MapCompact {
2154
- public:
2155
- explicit MapCompact(int live_maps)
2156
- : live_maps_(live_maps),
2157
- to_evacuate_start_(Heap::map_space()->TopAfterCompaction(live_maps)),
2158
- map_to_evacuate_it_(to_evacuate_start_),
2159
- first_map_to_evacuate_(
2160
- reinterpret_cast<Map*>(HeapObject::FromAddress(to_evacuate_start_))) {
2161
- }
2162
-
2163
- void CompactMaps() {
2164
- // As we know the number of maps to evacuate beforehand,
2165
- // we stop then there is no more vacant maps.
2166
- for (Map* next_vacant_map = NextVacantMap();
2167
- next_vacant_map;
2168
- next_vacant_map = NextVacantMap()) {
2169
- EvacuateMap(next_vacant_map, NextMapToEvacuate());
2170
- }
2171
-
2172
- #ifdef DEBUG
2173
- CheckNoMapsToEvacuate();
2174
- #endif
2175
- }
2176
-
2177
- void UpdateMapPointersInRoots() {
2178
- Heap::IterateRoots(&map_updating_visitor_, VISIT_ONLY_STRONG);
2179
- GlobalHandles::IterateWeakRoots(&map_updating_visitor_);
2180
- LiveObjectList::IterateElements(&map_updating_visitor_);
2181
- }
2182
-
2183
- void UpdateMapPointersInPagedSpace(PagedSpace* space) {
2184
- ASSERT(space != Heap::map_space());
2185
-
2186
- PageIterator it(space, PageIterator::PAGES_IN_USE);
2187
- while (it.has_next()) {
2188
- Page* p = it.next();
2189
- UpdateMapPointersInRange(p->ObjectAreaStart(), p->AllocationTop());
2190
- }
2191
- }
2192
-
2193
- void UpdateMapPointersInNewSpace() {
2194
- NewSpace* space = Heap::new_space();
2195
- UpdateMapPointersInRange(space->bottom(), space->top());
2196
- }
2197
-
2198
- void UpdateMapPointersInLargeObjectSpace() {
2199
- LargeObjectIterator it(Heap::lo_space());
2200
- for (HeapObject* obj = it.next(); obj != NULL; obj = it.next())
2201
- UpdateMapPointersInObject(obj);
2202
- }
2203
-
2204
- void Finish() {
2205
- Heap::map_space()->FinishCompaction(to_evacuate_start_, live_maps_);
2206
- }
2207
-
2208
- private:
2209
- int live_maps_;
2210
- Address to_evacuate_start_;
2211
- MapIterator vacant_map_it_;
2212
- MapIterator map_to_evacuate_it_;
2213
- Map* first_map_to_evacuate_;
2214
-
2215
- // Helper class for updating map pointers in HeapObjects.
2216
- class MapUpdatingVisitor: public ObjectVisitor {
2217
- public:
2218
- void VisitPointer(Object** p) {
2219
- UpdateMapPointer(p);
2220
- }
2221
-
2222
- void VisitPointers(Object** start, Object** end) {
2223
- for (Object** p = start; p < end; p++) UpdateMapPointer(p);
2224
- }
2225
-
2226
- private:
2227
- void UpdateMapPointer(Object** p) {
2228
- if (!(*p)->IsHeapObject()) return;
2229
- HeapObject* old_map = reinterpret_cast<HeapObject*>(*p);
2230
-
2231
- // Moved maps are tagged with overflowed map word. They are the only
2232
- // objects those map word is overflowed as marking is already complete.
2233
- MapWord map_word = old_map->map_word();
2234
- if (!map_word.IsOverflowed()) return;
2235
-
2236
- *p = GetForwardedMap(map_word);
2237
- }
2238
- };
2239
-
2240
- static MapUpdatingVisitor map_updating_visitor_;
2241
-
2242
- static Map* NextMap(MapIterator* it, HeapObject* last, bool live) {
2243
- while (true) {
2244
- HeapObject* next = it->next();
2245
- ASSERT(next != NULL);
2246
- if (next == last)
2247
- return NULL;
2248
- ASSERT(!next->IsOverflowed());
2249
- ASSERT(!next->IsMarked());
2250
- ASSERT(next->IsMap() || FreeListNode::IsFreeListNode(next));
2251
- if (next->IsMap() == live)
2252
- return reinterpret_cast<Map*>(next);
2253
- }
2254
- }
2255
-
2256
- Map* NextVacantMap() {
2257
- Map* map = NextMap(&vacant_map_it_, first_map_to_evacuate_, false);
2258
- ASSERT(map == NULL || FreeListNode::IsFreeListNode(map));
2259
- return map;
2260
- }
2261
-
2262
- Map* NextMapToEvacuate() {
2263
- Map* map = NextMap(&map_to_evacuate_it_, NULL, true);
2264
- ASSERT(map != NULL);
2265
- ASSERT(map->IsMap());
2266
- return map;
2267
- }
2268
-
2269
- static void EvacuateMap(Map* vacant_map, Map* map_to_evacuate) {
2270
- ASSERT(FreeListNode::IsFreeListNode(vacant_map));
2271
- ASSERT(map_to_evacuate->IsMap());
2272
-
2273
- ASSERT(Map::kSize % 4 == 0);
2274
-
2275
- Heap::CopyBlockToOldSpaceAndUpdateRegionMarks(vacant_map->address(),
2276
- map_to_evacuate->address(),
2277
- Map::kSize);
2278
-
2279
- ASSERT(vacant_map->IsMap()); // Due to memcpy above.
2280
-
2281
- MapWord forwarding_map_word = MapWord::FromMap(vacant_map);
2282
- forwarding_map_word.SetOverflow();
2283
- map_to_evacuate->set_map_word(forwarding_map_word);
2284
-
2285
- ASSERT(map_to_evacuate->map_word().IsOverflowed());
2286
- ASSERT(GetForwardedMap(map_to_evacuate->map_word()) == vacant_map);
2287
- }
2288
-
2289
- static Map* GetForwardedMap(MapWord map_word) {
2290
- ASSERT(map_word.IsOverflowed());
2291
- map_word.ClearOverflow();
2292
- Map* new_map = map_word.ToMap();
2293
- ASSERT_MAP_ALIGNED(new_map->address());
2294
- return new_map;
2295
- }
2296
-
2297
- static int UpdateMapPointersInObject(HeapObject* obj) {
2298
- ASSERT(!obj->IsMarked());
2299
- Map* map = obj->map();
2300
- ASSERT(Heap::map_space()->Contains(map));
2301
- MapWord map_word = map->map_word();
2302
- ASSERT(!map_word.IsMarked());
2303
- if (map_word.IsOverflowed()) {
2304
- Map* new_map = GetForwardedMap(map_word);
2305
- ASSERT(Heap::map_space()->Contains(new_map));
2306
- obj->set_map(new_map);
2307
-
2308
- #ifdef DEBUG
2309
- if (FLAG_gc_verbose) {
2310
- PrintF("update %p : %p -> %p\n",
2311
- obj->address(),
2312
- reinterpret_cast<void*>(map),
2313
- reinterpret_cast<void*>(new_map));
2314
- }
2315
- #endif
2316
- }
2317
-
2318
- int size = obj->SizeFromMap(map);
2319
- obj->IterateBody(map->instance_type(), size, &map_updating_visitor_);
2320
- return size;
2321
- }
2322
-
2323
- static void UpdateMapPointersInRange(Address start, Address end) {
2324
- HeapObject* object;
2325
- int size;
2326
- for (Address current = start; current < end; current += size) {
2327
- object = HeapObject::FromAddress(current);
2328
- size = UpdateMapPointersInObject(object);
2329
- ASSERT(size > 0);
2330
- }
2331
- }
2332
-
2333
- #ifdef DEBUG
2334
- void CheckNoMapsToEvacuate() {
2335
- if (!FLAG_enable_slow_asserts)
2336
- return;
2337
-
2338
- for (HeapObject* obj = map_to_evacuate_it_.next();
2339
- obj != NULL; obj = map_to_evacuate_it_.next())
2340
- ASSERT(FreeListNode::IsFreeListNode(obj));
2341
- }
2342
- #endif
2343
- };
2344
-
2345
- MapCompact::MapUpdatingVisitor MapCompact::map_updating_visitor_;
2346
-
2347
-
2348
- void MarkCompactCollector::SweepSpaces() {
2349
- GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP);
2350
-
2351
- ASSERT(state_ == SWEEP_SPACES);
2352
- ASSERT(!IsCompacting());
2353
- // Noncompacting collections simply sweep the spaces to clear the mark
2354
- // bits and free the nonlive blocks (for old and map spaces). We sweep
2355
- // the map space last because freeing non-live maps overwrites them and
2356
- // the other spaces rely on possibly non-live maps to get the sizes for
2357
- // non-live objects.
2358
- SweepSpace(Heap::old_pointer_space());
2359
- SweepSpace(Heap::old_data_space());
2360
- SweepSpace(Heap::code_space());
2361
- SweepSpace(Heap::cell_space());
2362
- { GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP_NEWSPACE);
2363
- SweepNewSpace(Heap::new_space());
2364
- }
2365
- SweepSpace(Heap::map_space());
2366
-
2367
- Heap::IterateDirtyRegions(Heap::map_space(),
2368
- &Heap::IteratePointersInDirtyMapsRegion,
2369
- &UpdatePointerToNewGen,
2370
- Heap::WATERMARK_SHOULD_BE_VALID);
2371
-
2372
- intptr_t live_maps_size = Heap::map_space()->Size();
2373
- int live_maps = static_cast<int>(live_maps_size / Map::kSize);
2374
- ASSERT(live_map_objects_size_ == live_maps_size);
2375
-
2376
- if (Heap::map_space()->NeedsCompaction(live_maps)) {
2377
- MapCompact map_compact(live_maps);
2378
-
2379
- map_compact.CompactMaps();
2380
- map_compact.UpdateMapPointersInRoots();
2381
-
2382
- PagedSpaces spaces;
2383
- for (PagedSpace* space = spaces.next();
2384
- space != NULL; space = spaces.next()) {
2385
- if (space == Heap::map_space()) continue;
2386
- map_compact.UpdateMapPointersInPagedSpace(space);
2387
- }
2388
- map_compact.UpdateMapPointersInNewSpace();
2389
- map_compact.UpdateMapPointersInLargeObjectSpace();
2390
-
2391
- map_compact.Finish();
2392
- }
2393
- }
2394
-
2395
-
2396
- // Iterate the live objects in a range of addresses (eg, a page or a
2397
- // semispace). The live regions of the range have been linked into a list.
2398
- // The first live region is [first_live_start, first_live_end), and the last
2399
- // address in the range is top. The callback function is used to get the
2400
- // size of each live object.
2401
- int MarkCompactCollector::IterateLiveObjectsInRange(
2402
- Address start,
2403
- Address end,
2404
- HeapObjectCallback size_func) {
2405
- int live_objects_size = 0;
2406
- Address current = start;
2407
- while (current < end) {
2408
- uint32_t encoded_map = Memory::uint32_at(current);
2409
- if (encoded_map == kSingleFreeEncoding) {
2410
- current += kPointerSize;
2411
- } else if (encoded_map == kMultiFreeEncoding) {
2412
- current += Memory::int_at(current + kIntSize);
2413
- } else {
2414
- int size = size_func(HeapObject::FromAddress(current));
2415
- current += size;
2416
- live_objects_size += size;
2417
- }
2418
- }
2419
- return live_objects_size;
2420
- }
2421
-
2422
-
2423
- int MarkCompactCollector::IterateLiveObjects(NewSpace* space,
2424
- HeapObjectCallback size_f) {
2425
- ASSERT(MARK_LIVE_OBJECTS < state_ && state_ <= RELOCATE_OBJECTS);
2426
- return IterateLiveObjectsInRange(space->bottom(), space->top(), size_f);
2427
- }
2428
-
2429
-
2430
- int MarkCompactCollector::IterateLiveObjects(PagedSpace* space,
2431
- HeapObjectCallback size_f) {
2432
- ASSERT(MARK_LIVE_OBJECTS < state_ && state_ <= RELOCATE_OBJECTS);
2433
- int total = 0;
2434
- PageIterator it(space, PageIterator::PAGES_IN_USE);
2435
- while (it.has_next()) {
2436
- Page* p = it.next();
2437
- total += IterateLiveObjectsInRange(p->ObjectAreaStart(),
2438
- p->AllocationTop(),
2439
- size_f);
2440
- }
2441
- return total;
2442
- }
2443
-
2444
-
2445
- // -------------------------------------------------------------------------
2446
- // Phase 3: Update pointers
2447
-
2448
- // Helper class for updating pointers in HeapObjects.
2449
- class UpdatingVisitor: public ObjectVisitor {
2450
- public:
2451
- void VisitPointer(Object** p) {
2452
- UpdatePointer(p);
2453
- }
2454
-
2455
- void VisitPointers(Object** start, Object** end) {
2456
- // Mark all HeapObject pointers in [start, end)
2457
- for (Object** p = start; p < end; p++) UpdatePointer(p);
2458
- }
2459
-
2460
- void VisitCodeTarget(RelocInfo* rinfo) {
2461
- ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
2462
- Object* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
2463
- VisitPointer(&target);
2464
- rinfo->set_target_address(
2465
- reinterpret_cast<Code*>(target)->instruction_start());
2466
- }
2467
-
2468
- void VisitDebugTarget(RelocInfo* rinfo) {
2469
- ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) &&
2470
- rinfo->IsPatchedReturnSequence()) ||
2471
- (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
2472
- rinfo->IsPatchedDebugBreakSlotSequence()));
2473
- Object* target = Code::GetCodeFromTargetAddress(rinfo->call_address());
2474
- VisitPointer(&target);
2475
- rinfo->set_call_address(
2476
- reinterpret_cast<Code*>(target)->instruction_start());
2477
- }
2478
-
2479
- private:
2480
- void UpdatePointer(Object** p) {
2481
- if (!(*p)->IsHeapObject()) return;
2482
-
2483
- HeapObject* obj = HeapObject::cast(*p);
2484
- Address old_addr = obj->address();
2485
- Address new_addr;
2486
- ASSERT(!Heap::InFromSpace(obj));
2487
-
2488
- if (Heap::new_space()->Contains(obj)) {
2489
- Address forwarding_pointer_addr =
2490
- Heap::new_space()->FromSpaceLow() +
2491
- Heap::new_space()->ToSpaceOffsetForAddress(old_addr);
2492
- new_addr = Memory::Address_at(forwarding_pointer_addr);
2493
-
2494
- #ifdef DEBUG
2495
- ASSERT(Heap::old_pointer_space()->Contains(new_addr) ||
2496
- Heap::old_data_space()->Contains(new_addr) ||
2497
- Heap::new_space()->FromSpaceContains(new_addr) ||
2498
- Heap::lo_space()->Contains(HeapObject::FromAddress(new_addr)));
2499
-
2500
- if (Heap::new_space()->FromSpaceContains(new_addr)) {
2501
- ASSERT(Heap::new_space()->FromSpaceOffsetForAddress(new_addr) <=
2502
- Heap::new_space()->ToSpaceOffsetForAddress(old_addr));
2503
- }
2504
- #endif
2505
-
2506
- } else if (Heap::lo_space()->Contains(obj)) {
2507
- // Don't move objects in the large object space.
2508
- return;
2509
-
2510
- } else {
2511
- #ifdef DEBUG
2512
- PagedSpaces spaces;
2513
- PagedSpace* original_space = spaces.next();
2514
- while (original_space != NULL) {
2515
- if (original_space->Contains(obj)) break;
2516
- original_space = spaces.next();
2517
- }
2518
- ASSERT(original_space != NULL);
2519
- #endif
2520
- new_addr = MarkCompactCollector::GetForwardingAddressInOldSpace(obj);
2521
- ASSERT(original_space->Contains(new_addr));
2522
- ASSERT(original_space->MCSpaceOffsetForAddress(new_addr) <=
2523
- original_space->MCSpaceOffsetForAddress(old_addr));
2524
- }
2525
-
2526
- *p = HeapObject::FromAddress(new_addr);
2527
-
2528
- #ifdef DEBUG
2529
- if (FLAG_gc_verbose) {
2530
- PrintF("update %p : %p -> %p\n",
2531
- reinterpret_cast<Address>(p), old_addr, new_addr);
2532
- }
2533
- #endif
2534
- }
2535
- };
2536
-
2537
-
2538
- void MarkCompactCollector::UpdatePointers() {
2539
- #ifdef DEBUG
2540
- ASSERT(state_ == ENCODE_FORWARDING_ADDRESSES);
2541
- state_ = UPDATE_POINTERS;
2542
- #endif
2543
- UpdatingVisitor updating_visitor;
2544
- RuntimeProfiler::UpdateSamplesAfterCompact(&updating_visitor);
2545
- Heap::IterateRoots(&updating_visitor, VISIT_ONLY_STRONG);
2546
- GlobalHandles::IterateWeakRoots(&updating_visitor);
2547
-
2548
- // Update the pointer to the head of the weak list of global contexts.
2549
- updating_visitor.VisitPointer(&Heap::global_contexts_list_);
2550
-
2551
- LiveObjectList::IterateElements(&updating_visitor);
2552
-
2553
- int live_maps_size = IterateLiveObjects(Heap::map_space(),
2554
- &UpdatePointersInOldObject);
2555
- int live_pointer_olds_size = IterateLiveObjects(Heap::old_pointer_space(),
2556
- &UpdatePointersInOldObject);
2557
- int live_data_olds_size = IterateLiveObjects(Heap::old_data_space(),
2558
- &UpdatePointersInOldObject);
2559
- int live_codes_size = IterateLiveObjects(Heap::code_space(),
2560
- &UpdatePointersInOldObject);
2561
- int live_cells_size = IterateLiveObjects(Heap::cell_space(),
2562
- &UpdatePointersInOldObject);
2563
- int live_news_size = IterateLiveObjects(Heap::new_space(),
2564
- &UpdatePointersInNewObject);
2565
-
2566
- // Large objects do not move, the map word can be updated directly.
2567
- LargeObjectIterator it(Heap::lo_space());
2568
- for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) {
2569
- UpdatePointersInNewObject(obj);
2570
- }
2571
-
2572
- USE(live_maps_size);
2573
- USE(live_pointer_olds_size);
2574
- USE(live_data_olds_size);
2575
- USE(live_codes_size);
2576
- USE(live_cells_size);
2577
- USE(live_news_size);
2578
- ASSERT(live_maps_size == live_map_objects_size_);
2579
- ASSERT(live_data_olds_size == live_old_data_objects_size_);
2580
- ASSERT(live_pointer_olds_size == live_old_pointer_objects_size_);
2581
- ASSERT(live_codes_size == live_code_objects_size_);
2582
- ASSERT(live_cells_size == live_cell_objects_size_);
2583
- ASSERT(live_news_size == live_young_objects_size_);
2584
- }
2585
-
2586
-
2587
- int MarkCompactCollector::UpdatePointersInNewObject(HeapObject* obj) {
2588
- // Keep old map pointers
2589
- Map* old_map = obj->map();
2590
- ASSERT(old_map->IsHeapObject());
2591
-
2592
- Address forwarded = GetForwardingAddressInOldSpace(old_map);
2593
-
2594
- ASSERT(Heap::map_space()->Contains(old_map));
2595
- ASSERT(Heap::map_space()->Contains(forwarded));
2596
- #ifdef DEBUG
2597
- if (FLAG_gc_verbose) {
2598
- PrintF("update %p : %p -> %p\n", obj->address(), old_map->address(),
2599
- forwarded);
2600
- }
2601
- #endif
2602
- // Update the map pointer.
2603
- obj->set_map(reinterpret_cast<Map*>(HeapObject::FromAddress(forwarded)));
2604
-
2605
- // We have to compute the object size relying on the old map because
2606
- // map objects are not relocated yet.
2607
- int obj_size = obj->SizeFromMap(old_map);
2608
-
2609
- // Update pointers in the object body.
2610
- UpdatingVisitor updating_visitor;
2611
- obj->IterateBody(old_map->instance_type(), obj_size, &updating_visitor);
2612
- return obj_size;
2613
- }
2614
-
2615
-
2616
- int MarkCompactCollector::UpdatePointersInOldObject(HeapObject* obj) {
2617
- // Decode the map pointer.
2618
- MapWord encoding = obj->map_word();
2619
- Address map_addr = encoding.DecodeMapAddress(Heap::map_space());
2620
- ASSERT(Heap::map_space()->Contains(HeapObject::FromAddress(map_addr)));
2621
-
2622
- // At this point, the first word of map_addr is also encoded, cannot
2623
- // cast it to Map* using Map::cast.
2624
- Map* map = reinterpret_cast<Map*>(HeapObject::FromAddress(map_addr));
2625
- int obj_size = obj->SizeFromMap(map);
2626
- InstanceType type = map->instance_type();
2627
-
2628
- // Update map pointer.
2629
- Address new_map_addr = GetForwardingAddressInOldSpace(map);
2630
- int offset = encoding.DecodeOffset();
2631
- obj->set_map_word(MapWord::EncodeAddress(new_map_addr, offset));
2632
-
2633
- #ifdef DEBUG
2634
- if (FLAG_gc_verbose) {
2635
- PrintF("update %p : %p -> %p\n", obj->address(),
2636
- map_addr, new_map_addr);
2637
- }
2638
- #endif
2639
-
2640
- // Update pointers in the object body.
2641
- UpdatingVisitor updating_visitor;
2642
- obj->IterateBody(type, obj_size, &updating_visitor);
2643
- return obj_size;
2644
- }
2645
-
2646
-
2647
- Address MarkCompactCollector::GetForwardingAddressInOldSpace(HeapObject* obj) {
2648
- // Object should either in old or map space.
2649
- MapWord encoding = obj->map_word();
2650
-
2651
- // Offset to the first live object's forwarding address.
2652
- int offset = encoding.DecodeOffset();
2653
- Address obj_addr = obj->address();
2654
-
2655
- // Find the first live object's forwarding address.
2656
- Page* p = Page::FromAddress(obj_addr);
2657
- Address first_forwarded = p->mc_first_forwarded;
2658
-
2659
- // Page start address of forwarded address.
2660
- Page* forwarded_page = Page::FromAddress(first_forwarded);
2661
- int forwarded_offset = forwarded_page->Offset(first_forwarded);
2662
-
2663
- // Find end of allocation in the page of first_forwarded.
2664
- int mc_top_offset = forwarded_page->AllocationWatermarkOffset();
2665
-
2666
- // Check if current object's forward pointer is in the same page
2667
- // as the first live object's forwarding pointer
2668
- if (forwarded_offset + offset < mc_top_offset) {
2669
- // In the same page.
2670
- return first_forwarded + offset;
2671
- }
2672
-
2673
- // Must be in the next page, NOTE: this may cross chunks.
2674
- Page* next_page = forwarded_page->next_page();
2675
- ASSERT(next_page->is_valid());
2676
-
2677
- offset -= (mc_top_offset - forwarded_offset);
2678
- offset += Page::kObjectStartOffset;
2679
-
2680
- ASSERT_PAGE_OFFSET(offset);
2681
- ASSERT(next_page->OffsetToAddress(offset) < next_page->AllocationTop());
2682
-
2683
- return next_page->OffsetToAddress(offset);
2684
- }
2685
-
2686
-
2687
- // -------------------------------------------------------------------------
2688
- // Phase 4: Relocate objects
2689
-
2690
- void MarkCompactCollector::RelocateObjects() {
2691
- #ifdef DEBUG
2692
- ASSERT(state_ == UPDATE_POINTERS);
2693
- state_ = RELOCATE_OBJECTS;
2694
- #endif
2695
- // Relocates objects, always relocate map objects first. Relocating
2696
- // objects in other space relies on map objects to get object size.
2697
- int live_maps_size = IterateLiveObjects(Heap::map_space(),
2698
- &RelocateMapObject);
2699
- int live_pointer_olds_size = IterateLiveObjects(Heap::old_pointer_space(),
2700
- &RelocateOldPointerObject);
2701
- int live_data_olds_size = IterateLiveObjects(Heap::old_data_space(),
2702
- &RelocateOldDataObject);
2703
- int live_codes_size = IterateLiveObjects(Heap::code_space(),
2704
- &RelocateCodeObject);
2705
- int live_cells_size = IterateLiveObjects(Heap::cell_space(),
2706
- &RelocateCellObject);
2707
- int live_news_size = IterateLiveObjects(Heap::new_space(),
2708
- &RelocateNewObject);
2709
-
2710
- USE(live_maps_size);
2711
- USE(live_pointer_olds_size);
2712
- USE(live_data_olds_size);
2713
- USE(live_codes_size);
2714
- USE(live_cells_size);
2715
- USE(live_news_size);
2716
- ASSERT(live_maps_size == live_map_objects_size_);
2717
- ASSERT(live_data_olds_size == live_old_data_objects_size_);
2718
- ASSERT(live_pointer_olds_size == live_old_pointer_objects_size_);
2719
- ASSERT(live_codes_size == live_code_objects_size_);
2720
- ASSERT(live_cells_size == live_cell_objects_size_);
2721
- ASSERT(live_news_size == live_young_objects_size_);
2722
-
2723
- // Flip from and to spaces
2724
- Heap::new_space()->Flip();
2725
-
2726
- Heap::new_space()->MCCommitRelocationInfo();
2727
-
2728
- // Set age_mark to bottom in to space
2729
- Address mark = Heap::new_space()->bottom();
2730
- Heap::new_space()->set_age_mark(mark);
2731
-
2732
- PagedSpaces spaces;
2733
- for (PagedSpace* space = spaces.next(); space != NULL; space = spaces.next())
2734
- space->MCCommitRelocationInfo();
2735
-
2736
- Heap::CheckNewSpaceExpansionCriteria();
2737
- Heap::IncrementYoungSurvivorsCounter(live_news_size);
2738
- }
2739
-
2740
-
2741
- int MarkCompactCollector::RelocateMapObject(HeapObject* obj) {
2742
- // Recover map pointer.
2743
- MapWord encoding = obj->map_word();
2744
- Address map_addr = encoding.DecodeMapAddress(Heap::map_space());
2745
- ASSERT(Heap::map_space()->Contains(HeapObject::FromAddress(map_addr)));
2746
-
2747
- // Get forwarding address before resetting map pointer
2748
- Address new_addr = GetForwardingAddressInOldSpace(obj);
2749
-
2750
- // Reset map pointer. The meta map object may not be copied yet so
2751
- // Map::cast does not yet work.
2752
- obj->set_map(reinterpret_cast<Map*>(HeapObject::FromAddress(map_addr)));
2753
-
2754
- Address old_addr = obj->address();
2755
-
2756
- if (new_addr != old_addr) {
2757
- // Move contents.
2758
- Heap::MoveBlockToOldSpaceAndUpdateRegionMarks(new_addr,
2759
- old_addr,
2760
- Map::kSize);
2761
- }
2762
-
2763
- #ifdef DEBUG
2764
- if (FLAG_gc_verbose) {
2765
- PrintF("relocate %p -> %p\n", old_addr, new_addr);
2766
- }
2767
- #endif
2768
-
2769
- return Map::kSize;
2770
- }
2771
-
2772
-
2773
- static inline int RestoreMap(HeapObject* obj,
2774
- PagedSpace* space,
2775
- Address new_addr,
2776
- Address map_addr) {
2777
- // This must be a non-map object, and the function relies on the
2778
- // assumption that the Map space is compacted before the other paged
2779
- // spaces (see RelocateObjects).
2780
-
2781
- // Reset map pointer.
2782
- obj->set_map(Map::cast(HeapObject::FromAddress(map_addr)));
2783
-
2784
- int obj_size = obj->Size();
2785
- ASSERT_OBJECT_SIZE(obj_size);
2786
-
2787
- ASSERT(space->MCSpaceOffsetForAddress(new_addr) <=
2788
- space->MCSpaceOffsetForAddress(obj->address()));
2789
-
2790
- #ifdef DEBUG
2791
- if (FLAG_gc_verbose) {
2792
- PrintF("relocate %p -> %p\n", obj->address(), new_addr);
2793
- }
2794
- #endif
2795
-
2796
- return obj_size;
2797
- }
2798
-
2799
-
2800
- int MarkCompactCollector::RelocateOldNonCodeObject(HeapObject* obj,
2801
- PagedSpace* space) {
2802
- // Recover map pointer.
2803
- MapWord encoding = obj->map_word();
2804
- Address map_addr = encoding.DecodeMapAddress(Heap::map_space());
2805
- ASSERT(Heap::map_space()->Contains(map_addr));
2806
-
2807
- // Get forwarding address before resetting map pointer.
2808
- Address new_addr = GetForwardingAddressInOldSpace(obj);
2809
-
2810
- // Reset the map pointer.
2811
- int obj_size = RestoreMap(obj, space, new_addr, map_addr);
2812
-
2813
- Address old_addr = obj->address();
2814
-
2815
- if (new_addr != old_addr) {
2816
- // Move contents.
2817
- if (space == Heap::old_data_space()) {
2818
- Heap::MoveBlock(new_addr, old_addr, obj_size);
2819
- } else {
2820
- Heap::MoveBlockToOldSpaceAndUpdateRegionMarks(new_addr,
2821
- old_addr,
2822
- obj_size);
2823
- }
2824
- }
2825
-
2826
- ASSERT(!HeapObject::FromAddress(new_addr)->IsCode());
2827
-
2828
- HeapObject* copied_to = HeapObject::FromAddress(new_addr);
2829
- if (copied_to->IsSharedFunctionInfo()) {
2830
- PROFILE(SFIMoveEvent(old_addr, new_addr));
2831
- }
2832
- HEAP_PROFILE(ObjectMoveEvent(old_addr, new_addr));
2833
-
2834
- return obj_size;
2835
- }
2836
-
2837
-
2838
- int MarkCompactCollector::RelocateOldPointerObject(HeapObject* obj) {
2839
- return RelocateOldNonCodeObject(obj, Heap::old_pointer_space());
2840
- }
2841
-
2842
-
2843
- int MarkCompactCollector::RelocateOldDataObject(HeapObject* obj) {
2844
- return RelocateOldNonCodeObject(obj, Heap::old_data_space());
2845
- }
2846
-
2847
-
2848
- int MarkCompactCollector::RelocateCellObject(HeapObject* obj) {
2849
- return RelocateOldNonCodeObject(obj, Heap::cell_space());
2850
- }
2851
-
2852
-
2853
- int MarkCompactCollector::RelocateCodeObject(HeapObject* obj) {
2854
- // Recover map pointer.
2855
- MapWord encoding = obj->map_word();
2856
- Address map_addr = encoding.DecodeMapAddress(Heap::map_space());
2857
- ASSERT(Heap::map_space()->Contains(HeapObject::FromAddress(map_addr)));
2858
-
2859
- // Get forwarding address before resetting map pointer
2860
- Address new_addr = GetForwardingAddressInOldSpace(obj);
2861
-
2862
- // Reset the map pointer.
2863
- int obj_size = RestoreMap(obj, Heap::code_space(), new_addr, map_addr);
2864
-
2865
- Address old_addr = obj->address();
2866
-
2867
- if (new_addr != old_addr) {
2868
- // Move contents.
2869
- Heap::MoveBlock(new_addr, old_addr, obj_size);
2870
- }
2871
-
2872
- HeapObject* copied_to = HeapObject::FromAddress(new_addr);
2873
- if (copied_to->IsCode()) {
2874
- // May also update inline cache target.
2875
- Code::cast(copied_to)->Relocate(new_addr - old_addr);
2876
- // Notify the logger that compiled code has moved.
2877
- PROFILE(CodeMoveEvent(old_addr, new_addr));
2878
- }
2879
- HEAP_PROFILE(ObjectMoveEvent(old_addr, new_addr));
2880
-
2881
- return obj_size;
2882
- }
2883
-
2884
-
2885
- int MarkCompactCollector::RelocateNewObject(HeapObject* obj) {
2886
- int obj_size = obj->Size();
2887
-
2888
- // Get forwarding address
2889
- Address old_addr = obj->address();
2890
- int offset = Heap::new_space()->ToSpaceOffsetForAddress(old_addr);
2891
-
2892
- Address new_addr =
2893
- Memory::Address_at(Heap::new_space()->FromSpaceLow() + offset);
2894
-
2895
- #ifdef DEBUG
2896
- if (Heap::new_space()->FromSpaceContains(new_addr)) {
2897
- ASSERT(Heap::new_space()->FromSpaceOffsetForAddress(new_addr) <=
2898
- Heap::new_space()->ToSpaceOffsetForAddress(old_addr));
2899
- } else {
2900
- ASSERT(Heap::TargetSpace(obj) == Heap::old_pointer_space() ||
2901
- Heap::TargetSpace(obj) == Heap::old_data_space());
2902
- }
2903
- #endif
2904
-
2905
- // New and old addresses cannot overlap.
2906
- if (Heap::InNewSpace(HeapObject::FromAddress(new_addr))) {
2907
- Heap::CopyBlock(new_addr, old_addr, obj_size);
2908
- } else {
2909
- Heap::CopyBlockToOldSpaceAndUpdateRegionMarks(new_addr,
2910
- old_addr,
2911
- obj_size);
2912
- }
2913
-
2914
- #ifdef DEBUG
2915
- if (FLAG_gc_verbose) {
2916
- PrintF("relocate %p -> %p\n", old_addr, new_addr);
2917
- }
2918
- #endif
2919
-
2920
- HeapObject* copied_to = HeapObject::FromAddress(new_addr);
2921
- if (copied_to->IsSharedFunctionInfo()) {
2922
- PROFILE(SFIMoveEvent(old_addr, new_addr));
2923
- }
2924
- HEAP_PROFILE(ObjectMoveEvent(old_addr, new_addr));
2925
-
2926
- return obj_size;
2927
- }
2928
-
2929
-
2930
- void MarkCompactCollector::ReportDeleteIfNeeded(HeapObject* obj) {
2931
- #ifdef ENABLE_GDB_JIT_INTERFACE
2932
- if (obj->IsCode()) {
2933
- GDBJITInterface::RemoveCode(reinterpret_cast<Code*>(obj));
2934
- }
2935
- #endif
2936
- #ifdef ENABLE_LOGGING_AND_PROFILING
2937
- if (obj->IsCode()) {
2938
- PROFILE(CodeDeleteEvent(obj->address()));
2939
- }
2940
- #endif
2941
- }
2942
-
2943
-
2944
- int MarkCompactCollector::SizeOfMarkedObject(HeapObject* obj) {
2945
- MapWord map_word = obj->map_word();
2946
- map_word.ClearMark();
2947
- return obj->SizeFromMap(map_word.ToMap());
2948
- }
2949
-
2950
-
2951
- void MarkCompactCollector::Initialize() {
2952
- StaticPointersToNewGenUpdatingVisitor::Initialize();
2953
- StaticMarkingVisitor::Initialize();
2954
- }
2955
-
2956
-
2957
- } } // namespace v8::internal