therubyracer 0.8.1.pre2 → 0.8.1

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of therubyracer might be problematic. Click here for more details.

Files changed (877) hide show
  1. data/Changelog.md +2 -1
  2. data/README.md +6 -3
  3. data/ext/v8/upstream/3.1.8/.gitignore +31 -0
  4. data/ext/v8/upstream/3.1.8/AUTHORS +40 -0
  5. data/ext/v8/upstream/3.1.8/ChangeLog +2566 -0
  6. data/ext/v8/upstream/3.1.8/LICENSE +52 -0
  7. data/ext/v8/upstream/3.1.8/LICENSE.strongtalk +29 -0
  8. data/ext/v8/upstream/3.1.8/LICENSE.v8 +26 -0
  9. data/ext/v8/upstream/3.1.8/LICENSE.valgrind +45 -0
  10. data/ext/v8/upstream/3.1.8/SConstruct +1192 -0
  11. data/ext/v8/upstream/3.1.8/build/README.txt +25 -0
  12. data/ext/v8/upstream/3.1.8/build/all.gyp +18 -0
  13. data/ext/v8/upstream/3.1.8/build/armu.gypi +32 -0
  14. data/ext/v8/upstream/3.1.8/build/common.gypi +82 -0
  15. data/ext/v8/upstream/3.1.8/build/gyp_v8 +145 -0
  16. data/ext/v8/upstream/3.1.8/include/v8-debug.h +384 -0
  17. data/ext/v8/upstream/3.1.8/include/v8-preparser.h +116 -0
  18. data/ext/v8/upstream/3.1.8/include/v8-profiler.h +426 -0
  19. data/ext/v8/upstream/3.1.8/include/v8-testing.h +99 -0
  20. data/ext/v8/upstream/3.1.8/include/v8.h +3846 -0
  21. data/ext/v8/upstream/3.1.8/include/v8stdint.h +53 -0
  22. data/ext/v8/upstream/3.1.8/preparser/preparser-process.cc +206 -0
  23. data/ext/v8/upstream/3.1.8/src/SConscript +356 -0
  24. data/ext/v8/upstream/3.1.8/src/accessors.cc +907 -0
  25. data/ext/v8/upstream/3.1.8/src/accessors.h +121 -0
  26. data/ext/v8/upstream/3.1.8/src/allocation.cc +204 -0
  27. data/ext/v8/upstream/3.1.8/src/allocation.h +176 -0
  28. data/ext/v8/upstream/3.1.8/src/api.cc +5191 -0
  29. data/ext/v8/upstream/3.1.8/src/api.h +508 -0
  30. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/apinatives.js +0 -0
  31. data/ext/v8/upstream/3.1.8/src/apiutils.h +80 -0
  32. data/ext/v8/upstream/3.1.8/src/arguments.h +105 -0
  33. data/ext/v8/upstream/3.1.8/src/arm/assembler-arm-inl.h +352 -0
  34. data/ext/v8/upstream/3.1.8/src/arm/assembler-arm.cc +2756 -0
  35. data/ext/v8/upstream/3.1.8/src/arm/assembler-arm.h +1294 -0
  36. data/ext/v8/upstream/3.1.8/src/arm/builtins-arm.cc +1628 -0
  37. data/ext/v8/upstream/3.1.8/src/arm/code-stubs-arm.cc +6783 -0
  38. data/ext/v8/upstream/3.1.8/src/arm/code-stubs-arm.h +657 -0
  39. data/ext/v8/upstream/3.1.8/src/arm/codegen-arm-inl.h +48 -0
  40. data/ext/v8/upstream/3.1.8/src/arm/codegen-arm.cc +7403 -0
  41. data/ext/v8/upstream/3.1.8/src/arm/codegen-arm.h +595 -0
  42. data/ext/v8/upstream/3.1.8/src/arm/constants-arm.cc +152 -0
  43. data/ext/v8/upstream/3.1.8/src/arm/constants-arm.h +769 -0
  44. data/ext/v8/upstream/3.1.8/src/arm/cpu-arm.cc +147 -0
  45. data/ext/v8/upstream/3.1.8/src/arm/debug-arm.cc +315 -0
  46. data/ext/v8/upstream/3.1.8/src/arm/deoptimizer-arm.cc +700 -0
  47. data/ext/v8/upstream/3.1.8/src/arm/disasm-arm.cc +1439 -0
  48. data/ext/v8/upstream/3.1.8/src/arm/frames-arm.cc +45 -0
  49. data/ext/v8/upstream/3.1.8/src/arm/frames-arm.h +168 -0
  50. data/ext/v8/upstream/3.1.8/src/arm/full-codegen-arm.cc +4230 -0
  51. data/ext/v8/upstream/3.1.8/src/arm/ic-arm.cc +1799 -0
  52. data/ext/v8/upstream/3.1.8/src/arm/jump-target-arm.cc +174 -0
  53. data/ext/v8/upstream/3.1.8/src/arm/lithium-arm.cc +2041 -0
  54. data/ext/v8/upstream/3.1.8/src/arm/lithium-arm.h +2046 -0
  55. data/ext/v8/upstream/3.1.8/src/arm/lithium-codegen-arm.cc +3822 -0
  56. data/ext/v8/upstream/3.1.8/src/arm/lithium-codegen-arm.h +312 -0
  57. data/ext/v8/upstream/3.1.8/src/arm/lithium-gap-resolver-arm.cc +303 -0
  58. data/ext/v8/upstream/3.1.8/src/arm/lithium-gap-resolver-arm.h +84 -0
  59. data/ext/v8/upstream/3.1.8/src/arm/macro-assembler-arm.cc +2701 -0
  60. data/ext/v8/upstream/3.1.8/src/arm/macro-assembler-arm.h +1015 -0
  61. data/ext/v8/upstream/3.1.8/src/arm/regexp-macro-assembler-arm.cc +1280 -0
  62. data/ext/v8/upstream/3.1.8/src/arm/regexp-macro-assembler-arm.h +252 -0
  63. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/arm/register-allocator-arm-inl.h +0 -0
  64. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/arm/register-allocator-arm.cc +0 -0
  65. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/arm/register-allocator-arm.h +0 -0
  66. data/ext/v8/upstream/3.1.8/src/arm/simulator-arm.cc +3165 -0
  67. data/ext/v8/upstream/3.1.8/src/arm/simulator-arm.h +402 -0
  68. data/ext/v8/upstream/3.1.8/src/arm/stub-cache-arm.cc +4077 -0
  69. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/arm/virtual-frame-arm-inl.h +0 -0
  70. data/ext/v8/upstream/3.1.8/src/arm/virtual-frame-arm.cc +843 -0
  71. data/ext/v8/upstream/3.1.8/src/arm/virtual-frame-arm.h +520 -0
  72. data/ext/v8/upstream/3.1.8/src/array.js +1231 -0
  73. data/ext/v8/upstream/3.1.8/src/assembler.cc +973 -0
  74. data/ext/v8/upstream/3.1.8/src/assembler.h +787 -0
  75. data/ext/v8/upstream/3.1.8/src/ast-inl.h +107 -0
  76. data/ext/v8/upstream/3.1.8/src/ast.cc +1067 -0
  77. data/ext/v8/upstream/3.1.8/src/ast.h +2177 -0
  78. data/ext/v8/upstream/3.1.8/src/atomicops.h +165 -0
  79. data/ext/v8/upstream/3.1.8/src/atomicops_internals_arm_gcc.h +145 -0
  80. data/ext/v8/upstream/3.1.8/src/atomicops_internals_x86_gcc.cc +126 -0
  81. data/ext/v8/upstream/3.1.8/src/atomicops_internals_x86_gcc.h +287 -0
  82. data/ext/v8/upstream/3.1.8/src/atomicops_internals_x86_macosx.h +301 -0
  83. data/ext/v8/upstream/3.1.8/src/atomicops_internals_x86_msvc.h +203 -0
  84. data/ext/v8/upstream/3.1.8/src/bignum-dtoa.cc +655 -0
  85. data/ext/v8/upstream/3.1.8/src/bignum-dtoa.h +81 -0
  86. data/ext/v8/upstream/3.1.8/src/bignum.cc +768 -0
  87. data/ext/v8/upstream/3.1.8/src/bignum.h +140 -0
  88. data/ext/v8/upstream/3.1.8/src/bootstrapper.cc +1888 -0
  89. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/bootstrapper.h +0 -0
  90. data/ext/v8/upstream/3.1.8/src/builtins.cc +1586 -0
  91. data/ext/v8/upstream/3.1.8/src/builtins.h +339 -0
  92. data/ext/v8/upstream/3.1.8/src/bytecodes-irregexp.h +105 -0
  93. data/ext/v8/upstream/3.1.8/src/cached-powers.cc +177 -0
  94. data/ext/v8/upstream/3.1.8/src/cached-powers.h +65 -0
  95. data/ext/v8/upstream/3.1.8/src/char-predicates-inl.h +94 -0
  96. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/char-predicates.h +0 -0
  97. data/ext/v8/upstream/3.1.8/src/checks.cc +110 -0
  98. data/ext/v8/upstream/3.1.8/src/checks.h +292 -0
  99. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/circular-queue-inl.h +0 -0
  100. data/ext/v8/upstream/3.1.8/src/circular-queue.cc +122 -0
  101. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/circular-queue.h +0 -0
  102. data/ext/v8/upstream/3.1.8/src/code-stubs.cc +230 -0
  103. data/ext/v8/upstream/3.1.8/src/code-stubs.h +950 -0
  104. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/code.h +0 -0
  105. data/ext/v8/upstream/3.1.8/src/codegen-inl.h +64 -0
  106. data/ext/v8/upstream/3.1.8/src/codegen.cc +495 -0
  107. data/ext/v8/upstream/3.1.8/src/codegen.h +245 -0
  108. data/ext/v8/upstream/3.1.8/src/compilation-cache.cc +654 -0
  109. data/ext/v8/upstream/3.1.8/src/compilation-cache.h +112 -0
  110. data/ext/v8/upstream/3.1.8/src/compiler.cc +806 -0
  111. data/ext/v8/upstream/3.1.8/src/compiler.h +290 -0
  112. data/ext/v8/upstream/3.1.8/src/contexts.cc +320 -0
  113. data/ext/v8/upstream/3.1.8/src/contexts.h +376 -0
  114. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/conversions-inl.h +0 -0
  115. data/ext/v8/upstream/3.1.8/src/conversions.cc +1069 -0
  116. data/ext/v8/upstream/3.1.8/src/conversions.h +122 -0
  117. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/counters.cc +0 -0
  118. data/ext/v8/upstream/3.1.8/src/counters.h +242 -0
  119. data/ext/v8/upstream/3.1.8/src/cpu-profiler-inl.h +100 -0
  120. data/ext/v8/upstream/3.1.8/src/cpu-profiler.cc +554 -0
  121. data/ext/v8/upstream/3.1.8/src/cpu-profiler.h +291 -0
  122. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/cpu.h +0 -0
  123. data/ext/v8/upstream/3.1.8/src/d8-debug.cc +367 -0
  124. data/ext/v8/upstream/3.1.8/src/d8-debug.h +157 -0
  125. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/d8-posix.cc +0 -0
  126. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/d8-readline.cc +0 -0
  127. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/d8-windows.cc +0 -0
  128. data/ext/v8/upstream/3.1.8/src/d8.cc +792 -0
  129. data/ext/v8/upstream/3.1.8/src/d8.gyp +85 -0
  130. data/ext/v8/upstream/3.1.8/src/d8.h +231 -0
  131. data/ext/v8/upstream/3.1.8/src/d8.js +2798 -0
  132. data/ext/v8/upstream/3.1.8/src/data-flow.cc +545 -0
  133. data/ext/v8/upstream/3.1.8/src/data-flow.h +379 -0
  134. data/ext/v8/upstream/3.1.8/src/date.js +1103 -0
  135. data/ext/v8/upstream/3.1.8/src/dateparser-inl.h +125 -0
  136. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/dateparser.cc +0 -0
  137. data/ext/v8/upstream/3.1.8/src/dateparser.h +263 -0
  138. data/ext/v8/upstream/3.1.8/src/debug-agent.cc +446 -0
  139. data/ext/v8/upstream/3.1.8/src/debug-agent.h +131 -0
  140. data/ext/v8/upstream/3.1.8/src/debug-debugger.js +2569 -0
  141. data/ext/v8/upstream/3.1.8/src/debug.cc +3085 -0
  142. data/ext/v8/upstream/3.1.8/src/debug.h +1025 -0
  143. data/ext/v8/upstream/3.1.8/src/deoptimizer.cc +1185 -0
  144. data/ext/v8/upstream/3.1.8/src/deoptimizer.h +529 -0
  145. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/disasm.h +0 -0
  146. data/ext/v8/upstream/3.1.8/src/disassembler.cc +338 -0
  147. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/disassembler.h +0 -0
  148. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/diy-fp.cc +0 -0
  149. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/diy-fp.h +0 -0
  150. data/ext/v8/upstream/3.1.8/src/double.h +238 -0
  151. data/ext/v8/upstream/3.1.8/src/dtoa.cc +103 -0
  152. data/ext/v8/upstream/3.1.8/src/dtoa.h +85 -0
  153. data/ext/v8/upstream/3.1.8/src/execution.cc +735 -0
  154. data/ext/v8/upstream/3.1.8/src/execution.h +322 -0
  155. data/ext/v8/upstream/3.1.8/src/extensions/experimental/experimental.gyp +53 -0
  156. data/ext/v8/upstream/3.1.8/src/extensions/experimental/i18n-extension.cc +264 -0
  157. data/ext/v8/upstream/3.1.8/src/extensions/experimental/i18n-extension.h +64 -0
  158. data/ext/v8/upstream/3.1.8/src/extensions/externalize-string-extension.cc +141 -0
  159. data/ext/v8/upstream/3.1.8/src/extensions/externalize-string-extension.h +50 -0
  160. data/ext/v8/upstream/3.1.8/src/extensions/gc-extension.cc +58 -0
  161. data/ext/v8/upstream/3.1.8/src/extensions/gc-extension.h +49 -0
  162. data/ext/v8/upstream/3.1.8/src/factory.cc +1087 -0
  163. data/ext/v8/upstream/3.1.8/src/factory.h +432 -0
  164. data/ext/v8/upstream/3.1.8/src/fast-dtoa.cc +736 -0
  165. data/ext/v8/upstream/3.1.8/src/fast-dtoa.h +83 -0
  166. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/fixed-dtoa.cc +0 -0
  167. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/fixed-dtoa.h +0 -0
  168. data/ext/v8/upstream/3.1.8/src/flag-definitions.h +552 -0
  169. data/ext/v8/upstream/3.1.8/src/flags.cc +551 -0
  170. data/ext/v8/upstream/3.1.8/src/flags.h +79 -0
  171. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/frame-element.cc +0 -0
  172. data/ext/v8/upstream/3.1.8/src/frame-element.h +277 -0
  173. data/ext/v8/upstream/3.1.8/src/frames-inl.h +210 -0
  174. data/ext/v8/upstream/3.1.8/src/frames.cc +1232 -0
  175. data/ext/v8/upstream/3.1.8/src/frames.h +826 -0
  176. data/ext/v8/upstream/3.1.8/src/full-codegen.cc +1382 -0
  177. data/ext/v8/upstream/3.1.8/src/full-codegen.h +751 -0
  178. data/ext/v8/upstream/3.1.8/src/func-name-inferrer.cc +90 -0
  179. data/ext/v8/upstream/3.1.8/src/func-name-inferrer.h +111 -0
  180. data/ext/v8/upstream/3.1.8/src/gdb-jit.cc +1547 -0
  181. data/ext/v8/upstream/3.1.8/src/gdb-jit.h +138 -0
  182. data/ext/v8/upstream/3.1.8/src/global-handles.cc +534 -0
  183. data/ext/v8/upstream/3.1.8/src/global-handles.h +181 -0
  184. data/ext/v8/upstream/3.1.8/src/globals.h +325 -0
  185. data/ext/v8/upstream/3.1.8/src/handles-inl.h +80 -0
  186. data/ext/v8/upstream/3.1.8/src/handles.cc +910 -0
  187. data/ext/v8/upstream/3.1.8/src/handles.h +424 -0
  188. data/ext/v8/upstream/3.1.8/src/hashmap.cc +230 -0
  189. data/ext/v8/upstream/3.1.8/src/hashmap.h +121 -0
  190. data/ext/v8/upstream/3.1.8/src/heap-inl.h +587 -0
  191. data/ext/v8/upstream/3.1.8/src/heap-profiler.cc +1128 -0
  192. data/ext/v8/upstream/3.1.8/src/heap-profiler.h +381 -0
  193. data/ext/v8/upstream/3.1.8/src/heap.cc +5610 -0
  194. data/ext/v8/upstream/3.1.8/src/heap.h +2218 -0
  195. data/ext/v8/upstream/3.1.8/src/hydrogen-instructions.cc +1490 -0
  196. data/ext/v8/upstream/3.1.8/src/hydrogen-instructions.h +3493 -0
  197. data/ext/v8/upstream/3.1.8/src/hydrogen.cc +6056 -0
  198. data/ext/v8/upstream/3.1.8/src/hydrogen.h +1091 -0
  199. data/ext/v8/upstream/3.1.8/src/ia32/assembler-ia32-inl.h +429 -0
  200. data/ext/v8/upstream/3.1.8/src/ia32/assembler-ia32.cc +2800 -0
  201. data/ext/v8/upstream/3.1.8/src/ia32/assembler-ia32.h +1093 -0
  202. data/ext/v8/upstream/3.1.8/src/ia32/builtins-ia32.cc +1590 -0
  203. data/ext/v8/upstream/3.1.8/src/ia32/code-stubs-ia32.cc +6624 -0
  204. data/ext/v8/upstream/3.1.8/src/ia32/code-stubs-ia32.h +536 -0
  205. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/ia32/codegen-ia32-inl.h +0 -0
  206. data/ext/v8/upstream/3.1.8/src/ia32/codegen-ia32.cc +10354 -0
  207. data/ext/v8/upstream/3.1.8/src/ia32/codegen-ia32.h +798 -0
  208. data/ext/v8/upstream/3.1.8/src/ia32/cpu-ia32.cc +87 -0
  209. data/ext/v8/upstream/3.1.8/src/ia32/debug-ia32.cc +309 -0
  210. data/ext/v8/upstream/3.1.8/src/ia32/deoptimizer-ia32.cc +664 -0
  211. data/ext/v8/upstream/3.1.8/src/ia32/disasm-ia32.cc +1597 -0
  212. data/ext/v8/upstream/3.1.8/src/ia32/frames-ia32.cc +45 -0
  213. data/ext/v8/upstream/3.1.8/src/ia32/frames-ia32.h +140 -0
  214. data/ext/v8/upstream/3.1.8/src/ia32/full-codegen-ia32.cc +4278 -0
  215. data/ext/v8/upstream/3.1.8/src/ia32/ic-ia32.cc +1786 -0
  216. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/ia32/jump-target-ia32.cc +0 -0
  217. data/ext/v8/upstream/3.1.8/src/ia32/lithium-codegen-ia32.cc +3880 -0
  218. data/ext/v8/upstream/3.1.8/src/ia32/lithium-codegen-ia32.h +309 -0
  219. data/ext/v8/upstream/3.1.8/src/ia32/lithium-gap-resolver-ia32.cc +460 -0
  220. data/ext/v8/upstream/3.1.8/src/ia32/lithium-gap-resolver-ia32.h +110 -0
  221. data/ext/v8/upstream/3.1.8/src/ia32/lithium-ia32.cc +2095 -0
  222. data/ext/v8/upstream/3.1.8/src/ia32/lithium-ia32.h +2127 -0
  223. data/ext/v8/upstream/3.1.8/src/ia32/macro-assembler-ia32.cc +2031 -0
  224. data/ext/v8/upstream/3.1.8/src/ia32/macro-assembler-ia32.h +798 -0
  225. data/ext/v8/upstream/3.1.8/src/ia32/regexp-macro-assembler-ia32.cc +1253 -0
  226. data/ext/v8/upstream/3.1.8/src/ia32/regexp-macro-assembler-ia32.h +215 -0
  227. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/ia32/register-allocator-ia32-inl.h +0 -0
  228. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/ia32/register-allocator-ia32.cc +0 -0
  229. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/ia32/register-allocator-ia32.h +0 -0
  230. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/ia32/simulator-ia32.cc +0 -0
  231. data/ext/v8/upstream/3.1.8/src/ia32/simulator-ia32.h +72 -0
  232. data/ext/v8/upstream/3.1.8/src/ia32/stub-cache-ia32.cc +3732 -0
  233. data/ext/v8/upstream/3.1.8/src/ia32/virtual-frame-ia32.cc +1360 -0
  234. data/ext/v8/upstream/3.1.8/src/ia32/virtual-frame-ia32.h +646 -0
  235. data/ext/v8/upstream/3.1.8/src/ic-inl.h +129 -0
  236. data/ext/v8/upstream/3.1.8/src/ic.cc +2333 -0
  237. data/ext/v8/upstream/3.1.8/src/ic.h +639 -0
  238. data/ext/v8/upstream/3.1.8/src/inspector.cc +63 -0
  239. data/ext/v8/upstream/3.1.8/src/inspector.h +62 -0
  240. data/ext/v8/upstream/3.1.8/src/interpreter-irregexp.cc +655 -0
  241. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/interpreter-irregexp.h +0 -0
  242. data/ext/v8/upstream/3.1.8/src/json.js +342 -0
  243. data/ext/v8/upstream/3.1.8/src/jsregexp.cc +5340 -0
  244. data/ext/v8/upstream/3.1.8/src/jsregexp.h +1484 -0
  245. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/jump-target-heavy-inl.h +0 -0
  246. data/ext/v8/upstream/3.1.8/src/jump-target-heavy.cc +430 -0
  247. data/ext/v8/upstream/3.1.8/src/jump-target-heavy.h +244 -0
  248. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/jump-target-inl.h +0 -0
  249. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/jump-target-light-inl.h +0 -0
  250. data/ext/v8/upstream/3.1.8/src/jump-target-light.cc +111 -0
  251. data/ext/v8/upstream/3.1.8/src/jump-target-light.h +193 -0
  252. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/jump-target.cc +0 -0
  253. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/jump-target.h +0 -0
  254. data/ext/v8/upstream/3.1.8/src/list-inl.h +206 -0
  255. data/ext/v8/upstream/3.1.8/src/list.h +164 -0
  256. data/ext/v8/upstream/3.1.8/src/lithium-allocator-inl.h +140 -0
  257. data/ext/v8/upstream/3.1.8/src/lithium-allocator.cc +2093 -0
  258. data/ext/v8/upstream/3.1.8/src/lithium-allocator.h +644 -0
  259. data/ext/v8/upstream/3.1.8/src/lithium.cc +168 -0
  260. data/ext/v8/upstream/3.1.8/src/lithium.h +592 -0
  261. data/ext/v8/upstream/3.1.8/src/liveedit-debugger.js +1082 -0
  262. data/ext/v8/upstream/3.1.8/src/liveedit.cc +1650 -0
  263. data/ext/v8/upstream/3.1.8/src/liveedit.h +174 -0
  264. data/ext/v8/upstream/3.1.8/src/liveobjectlist-inl.h +126 -0
  265. data/ext/v8/upstream/3.1.8/src/liveobjectlist.cc +2527 -0
  266. data/ext/v8/upstream/3.1.8/src/liveobjectlist.h +322 -0
  267. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/log-inl.h +0 -0
  268. data/ext/v8/upstream/3.1.8/src/log-utils.cc +336 -0
  269. data/ext/v8/upstream/3.1.8/src/log-utils.h +232 -0
  270. data/ext/v8/upstream/3.1.8/src/log.cc +1608 -0
  271. data/ext/v8/upstream/3.1.8/src/log.h +379 -0
  272. data/ext/v8/upstream/3.1.8/src/macro-assembler.h +120 -0
  273. data/ext/v8/upstream/3.1.8/src/macros.py +178 -0
  274. data/ext/v8/upstream/3.1.8/src/mark-compact.cc +2957 -0
  275. data/ext/v8/upstream/3.1.8/src/mark-compact.h +433 -0
  276. data/ext/v8/upstream/3.1.8/src/math.js +264 -0
  277. data/ext/v8/upstream/3.1.8/src/memory.h +82 -0
  278. data/ext/v8/upstream/3.1.8/src/messages.cc +164 -0
  279. data/ext/v8/upstream/3.1.8/src/messages.h +114 -0
  280. data/ext/v8/upstream/3.1.8/src/messages.js +1071 -0
  281. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/assembler-mips-inl.h +0 -0
  282. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/assembler-mips.cc +0 -0
  283. data/ext/v8/upstream/3.1.8/src/mips/assembler-mips.h +667 -0
  284. data/ext/v8/upstream/3.1.8/src/mips/builtins-mips.cc +205 -0
  285. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/codegen-mips-inl.h +0 -0
  286. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/codegen-mips.cc +0 -0
  287. data/ext/v8/upstream/3.1.8/src/mips/codegen-mips.h +431 -0
  288. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/constants-mips.cc +0 -0
  289. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/constants-mips.h +0 -0
  290. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/cpu-mips.cc +0 -0
  291. data/ext/v8/upstream/3.1.8/src/mips/debug-mips.cc +127 -0
  292. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/disasm-mips.cc +0 -0
  293. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/fast-codegen-mips.cc +0 -0
  294. data/ext/v8/upstream/3.1.8/src/mips/frames-mips.cc +96 -0
  295. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/frames-mips.h +0 -0
  296. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/full-codegen-mips.cc +0 -0
  297. data/ext/v8/upstream/3.1.8/src/mips/ic-mips.cc +208 -0
  298. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/jump-target-mips.cc +0 -0
  299. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/macro-assembler-mips.cc +0 -0
  300. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/macro-assembler-mips.h +0 -0
  301. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/register-allocator-mips-inl.h +0 -0
  302. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/register-allocator-mips.cc +0 -0
  303. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/register-allocator-mips.h +0 -0
  304. data/ext/v8/upstream/3.1.8/src/mips/simulator-mips.cc +1650 -0
  305. data/ext/v8/upstream/3.1.8/src/mips/simulator-mips.h +311 -0
  306. data/ext/v8/upstream/3.1.8/src/mips/stub-cache-mips.cc +418 -0
  307. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/virtual-frame-mips.cc +0 -0
  308. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/virtual-frame-mips.h +0 -0
  309. data/ext/v8/upstream/3.1.8/src/mirror-debugger.js +2380 -0
  310. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mksnapshot.cc +0 -0
  311. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/natives.h +0 -0
  312. data/ext/v8/upstream/3.1.8/src/objects-debug.cc +722 -0
  313. data/ext/v8/upstream/3.1.8/src/objects-inl.h +3946 -0
  314. data/ext/v8/upstream/3.1.8/src/objects-printer.cc +801 -0
  315. data/ext/v8/upstream/3.1.8/src/objects-visiting.cc +142 -0
  316. data/ext/v8/upstream/3.1.8/src/objects-visiting.h +401 -0
  317. data/ext/v8/upstream/3.1.8/src/objects.cc +10044 -0
  318. data/ext/v8/upstream/3.1.8/src/objects.h +6571 -0
  319. data/ext/v8/upstream/3.1.8/src/parser.cc +5165 -0
  320. data/ext/v8/upstream/3.1.8/src/parser.h +802 -0
  321. data/ext/v8/upstream/3.1.8/src/platform-cygwin.cc +745 -0
  322. data/ext/v8/upstream/3.1.8/src/platform-freebsd.cc +702 -0
  323. data/ext/v8/upstream/3.1.8/src/platform-linux.cc +981 -0
  324. data/ext/v8/upstream/3.1.8/src/platform-macos.cc +732 -0
  325. data/ext/v8/upstream/3.1.8/src/platform-nullos.cc +498 -0
  326. data/ext/v8/upstream/3.1.8/src/platform-openbsd.cc +657 -0
  327. data/ext/v8/upstream/3.1.8/src/platform-posix.cc +399 -0
  328. data/ext/v8/upstream/3.1.8/src/platform-solaris.cc +714 -0
  329. data/ext/v8/upstream/3.1.8/src/platform-win32.cc +1974 -0
  330. data/ext/v8/upstream/3.1.8/src/platform.h +636 -0
  331. data/ext/v8/upstream/3.1.8/src/preparse-data.cc +183 -0
  332. data/ext/v8/upstream/3.1.8/src/preparse-data.h +249 -0
  333. data/ext/v8/upstream/3.1.8/src/preparser-api.cc +213 -0
  334. data/ext/v8/upstream/3.1.8/src/preparser.cc +1205 -0
  335. data/ext/v8/upstream/3.1.8/src/preparser.h +278 -0
  336. data/ext/v8/upstream/3.1.8/src/prettyprinter.cc +1539 -0
  337. data/ext/v8/upstream/3.1.8/src/prettyprinter.h +223 -0
  338. data/ext/v8/upstream/3.1.8/src/profile-generator-inl.h +128 -0
  339. data/ext/v8/upstream/3.1.8/src/profile-generator.cc +2899 -0
  340. data/ext/v8/upstream/3.1.8/src/profile-generator.h +1151 -0
  341. data/ext/v8/upstream/3.1.8/src/property.cc +96 -0
  342. data/ext/v8/upstream/3.1.8/src/property.h +337 -0
  343. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/regexp-macro-assembler-irregexp-inl.h +0 -0
  344. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler-irregexp.cc +470 -0
  345. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler-irregexp.h +142 -0
  346. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler-tracer.cc +373 -0
  347. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler-tracer.h +104 -0
  348. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler.cc +257 -0
  349. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler.h +231 -0
  350. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/regexp-stack.cc +0 -0
  351. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/regexp-stack.h +0 -0
  352. data/ext/v8/upstream/3.1.8/src/regexp.js +483 -0
  353. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/register-allocator-inl.h +0 -0
  354. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/register-allocator.cc +0 -0
  355. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/register-allocator.h +0 -0
  356. data/ext/v8/upstream/3.1.8/src/rewriter.cc +1023 -0
  357. data/ext/v8/upstream/3.1.8/src/rewriter.h +59 -0
  358. data/ext/v8/upstream/3.1.8/src/runtime-profiler.cc +443 -0
  359. data/ext/v8/upstream/3.1.8/src/runtime-profiler.h +77 -0
  360. data/ext/v8/upstream/3.1.8/src/runtime.cc +11592 -0
  361. data/ext/v8/upstream/3.1.8/src/runtime.h +582 -0
  362. data/ext/v8/upstream/3.1.8/src/runtime.js +643 -0
  363. data/ext/v8/upstream/3.1.8/src/safepoint-table.cc +253 -0
  364. data/ext/v8/upstream/3.1.8/src/safepoint-table.h +263 -0
  365. data/ext/v8/upstream/3.1.8/src/scanner-base.cc +971 -0
  366. data/ext/v8/upstream/3.1.8/src/scanner-base.h +653 -0
  367. data/ext/v8/upstream/3.1.8/src/scanner.cc +586 -0
  368. data/ext/v8/upstream/3.1.8/src/scanner.h +194 -0
  369. data/ext/v8/upstream/3.1.8/src/scopeinfo.cc +636 -0
  370. data/ext/v8/upstream/3.1.8/src/scopeinfo.h +238 -0
  371. data/ext/v8/upstream/3.1.8/src/scopes.cc +1063 -0
  372. data/ext/v8/upstream/3.1.8/src/scopes.h +494 -0
  373. data/ext/v8/upstream/3.1.8/src/serialize.cc +1535 -0
  374. data/ext/v8/upstream/3.1.8/src/serialize.h +584 -0
  375. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/shell.h +0 -0
  376. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/simulator.h +0 -0
  377. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/smart-pointer.h +0 -0
  378. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/snapshot-common.cc +0 -0
  379. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/snapshot-empty.cc +0 -0
  380. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/snapshot.h +0 -0
  381. data/ext/v8/upstream/3.1.8/src/spaces-inl.h +524 -0
  382. data/ext/v8/upstream/3.1.8/src/spaces.cc +3254 -0
  383. data/ext/v8/upstream/3.1.8/src/spaces.h +2362 -0
  384. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/splay-tree-inl.h +0 -0
  385. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/splay-tree.h +0 -0
  386. data/ext/v8/upstream/3.1.8/src/string-search.cc +40 -0
  387. data/ext/v8/upstream/3.1.8/src/string-search.h +567 -0
  388. data/ext/v8/upstream/3.1.8/src/string-stream.cc +584 -0
  389. data/ext/v8/upstream/3.1.8/src/string-stream.h +191 -0
  390. data/ext/v8/upstream/3.1.8/src/string.js +915 -0
  391. data/ext/v8/upstream/3.1.8/src/strtod.cc +440 -0
  392. data/ext/v8/upstream/3.1.8/src/strtod.h +40 -0
  393. data/ext/v8/upstream/3.1.8/src/stub-cache.cc +1878 -0
  394. data/ext/v8/upstream/3.1.8/src/stub-cache.h +849 -0
  395. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/third_party/valgrind/valgrind.h +0 -0
  396. data/ext/v8/upstream/3.1.8/src/token.cc +63 -0
  397. data/ext/v8/upstream/3.1.8/src/token.h +288 -0
  398. data/ext/v8/upstream/3.1.8/src/top.cc +1152 -0
  399. data/ext/v8/upstream/3.1.8/src/top.h +608 -0
  400. data/ext/v8/upstream/3.1.8/src/type-info.cc +406 -0
  401. data/ext/v8/upstream/3.1.8/src/type-info.h +283 -0
  402. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/unbound-queue-inl.h +0 -0
  403. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/unbound-queue.h +0 -0
  404. data/ext/v8/upstream/3.1.8/src/unicode-inl.h +238 -0
  405. data/ext/v8/upstream/3.1.8/src/unicode.cc +1624 -0
  406. data/ext/v8/upstream/3.1.8/src/unicode.h +280 -0
  407. data/ext/v8/upstream/3.1.8/src/uri.js +402 -0
  408. data/ext/v8/upstream/3.1.8/src/utils.cc +371 -0
  409. data/ext/v8/upstream/3.1.8/src/utils.h +793 -0
  410. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/v8-counters.cc +0 -0
  411. data/ext/v8/upstream/3.1.8/src/v8-counters.h +290 -0
  412. data/ext/v8/upstream/3.1.8/src/v8.cc +270 -0
  413. data/ext/v8/upstream/3.1.8/src/v8.h +127 -0
  414. data/ext/v8/upstream/3.1.8/src/v8checks.h +64 -0
  415. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/v8dll-main.cc +0 -0
  416. data/ext/v8/upstream/3.1.8/src/v8globals.h +480 -0
  417. data/ext/v8/upstream/3.1.8/src/v8natives.js +1252 -0
  418. data/ext/v8/upstream/3.1.8/src/v8preparserdll-main.cc +39 -0
  419. data/ext/v8/upstream/3.1.8/src/v8threads.cc +440 -0
  420. data/ext/v8/upstream/3.1.8/src/v8threads.h +157 -0
  421. data/ext/v8/upstream/3.1.8/src/v8utils.h +354 -0
  422. data/ext/v8/upstream/3.1.8/src/variables.cc +132 -0
  423. data/ext/v8/upstream/3.1.8/src/variables.h +212 -0
  424. data/ext/v8/upstream/3.1.8/src/version.cc +95 -0
  425. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/version.h +0 -0
  426. data/ext/v8/upstream/3.1.8/src/virtual-frame-heavy-inl.h +190 -0
  427. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/virtual-frame-heavy.cc +0 -0
  428. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/virtual-frame-inl.h +0 -0
  429. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/virtual-frame-light-inl.h +0 -0
  430. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/virtual-frame-light.cc +0 -0
  431. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/virtual-frame.cc +0 -0
  432. data/ext/v8/upstream/3.1.8/src/virtual-frame.h +59 -0
  433. data/ext/v8/upstream/3.1.8/src/vm-state-inl.h +134 -0
  434. data/ext/v8/upstream/3.1.8/src/vm-state.h +68 -0
  435. data/ext/v8/upstream/3.1.8/src/win32-headers.h +95 -0
  436. data/ext/v8/upstream/3.1.8/src/x64/assembler-x64-inl.h +455 -0
  437. data/ext/v8/upstream/3.1.8/src/x64/assembler-x64.cc +3162 -0
  438. data/ext/v8/upstream/3.1.8/src/x64/assembler-x64.h +1584 -0
  439. data/ext/v8/upstream/3.1.8/src/x64/builtins-x64.cc +1492 -0
  440. data/ext/v8/upstream/3.1.8/src/x64/code-stubs-x64.cc +5150 -0
  441. data/ext/v8/upstream/3.1.8/src/x64/code-stubs-x64.h +519 -0
  442. data/ext/v8/upstream/3.1.8/src/x64/codegen-x64-inl.h +46 -0
  443. data/ext/v8/upstream/3.1.8/src/x64/codegen-x64.cc +8835 -0
  444. data/ext/v8/upstream/3.1.8/src/x64/codegen-x64.h +750 -0
  445. data/ext/v8/upstream/3.1.8/src/x64/cpu-x64.cc +86 -0
  446. data/ext/v8/upstream/3.1.8/src/x64/debug-x64.cc +316 -0
  447. data/ext/v8/upstream/3.1.8/src/x64/deoptimizer-x64.cc +781 -0
  448. data/ext/v8/upstream/3.1.8/src/x64/disasm-x64.cc +1737 -0
  449. data/ext/v8/upstream/3.1.8/src/x64/frames-x64.cc +45 -0
  450. data/ext/v8/upstream/3.1.8/src/x64/frames-x64.h +130 -0
  451. data/ext/v8/upstream/3.1.8/src/x64/full-codegen-x64.cc +3984 -0
  452. data/ext/v8/upstream/3.1.8/src/x64/ic-x64.cc +1761 -0
  453. data/ext/v8/upstream/3.1.8/src/x64/jump-target-x64.cc +437 -0
  454. data/ext/v8/upstream/3.1.8/src/x64/lithium-codegen-x64.cc +3639 -0
  455. data/ext/v8/upstream/3.1.8/src/x64/lithium-codegen-x64.h +305 -0
  456. data/ext/v8/upstream/3.1.8/src/x64/lithium-gap-resolver-x64.cc +320 -0
  457. data/ext/v8/upstream/3.1.8/src/x64/lithium-gap-resolver-x64.h +74 -0
  458. data/ext/v8/upstream/3.1.8/src/x64/lithium-x64.cc +2044 -0
  459. data/ext/v8/upstream/3.1.8/src/x64/lithium-x64.h +2052 -0
  460. data/ext/v8/upstream/3.1.8/src/x64/macro-assembler-x64.cc +2660 -0
  461. data/ext/v8/upstream/3.1.8/src/x64/macro-assembler-x64.h +1852 -0
  462. data/ext/v8/upstream/3.1.8/src/x64/regexp-macro-assembler-x64.cc +1382 -0
  463. data/ext/v8/upstream/3.1.8/src/x64/regexp-macro-assembler-x64.h +278 -0
  464. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/x64/register-allocator-x64-inl.h +0 -0
  465. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/x64/register-allocator-x64.cc +0 -0
  466. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/x64/register-allocator-x64.h +0 -0
  467. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/x64/simulator-x64.cc +0 -0
  468. data/ext/v8/upstream/3.1.8/src/x64/simulator-x64.h +71 -0
  469. data/ext/v8/upstream/3.1.8/src/x64/stub-cache-x64.cc +3509 -0
  470. data/ext/v8/upstream/3.1.8/src/x64/virtual-frame-x64.cc +1292 -0
  471. data/ext/v8/upstream/3.1.8/src/x64/virtual-frame-x64.h +593 -0
  472. data/ext/v8/upstream/3.1.8/src/zone-inl.h +83 -0
  473. data/ext/v8/upstream/3.1.8/src/zone.cc +195 -0
  474. data/ext/v8/upstream/3.1.8/src/zone.h +233 -0
  475. data/ext/v8/upstream/3.1.8/tools/codemap.js +265 -0
  476. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/consarray.js +0 -0
  477. data/ext/v8/upstream/3.1.8/tools/csvparser.js +78 -0
  478. data/ext/v8/upstream/3.1.8/tools/disasm.py +92 -0
  479. data/ext/v8/upstream/3.1.8/tools/gc-nvp-trace-processor.py +328 -0
  480. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/generate-ten-powers.scm +0 -0
  481. data/ext/v8/upstream/3.1.8/tools/grokdump.py +840 -0
  482. data/ext/v8/upstream/3.1.8/tools/gyp/v8.gyp +869 -0
  483. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/js2c.py +0 -0
  484. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/jsmin.py +0 -0
  485. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/linux-tick-processor +0 -0
  486. data/ext/v8/upstream/3.1.8/tools/ll_prof.py +919 -0
  487. data/ext/v8/upstream/3.1.8/tools/logreader.js +185 -0
  488. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/mac-nm +0 -0
  489. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/mac-tick-processor +0 -0
  490. data/ext/v8/upstream/3.1.8/tools/oom_dump/README +31 -0
  491. data/ext/v8/upstream/3.1.8/tools/oom_dump/SConstruct +42 -0
  492. data/ext/v8/upstream/3.1.8/tools/oom_dump/oom_dump.cc +288 -0
  493. data/ext/v8/upstream/3.1.8/tools/presubmit.py +305 -0
  494. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/process-heap-prof.py +0 -0
  495. data/ext/v8/upstream/3.1.8/tools/profile.js +751 -0
  496. data/ext/v8/upstream/3.1.8/tools/profile_view.js +219 -0
  497. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/run-valgrind.py +0 -0
  498. data/ext/v8/upstream/3.1.8/tools/splaytree.js +316 -0
  499. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/stats-viewer.py +0 -0
  500. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/tickprocessor-driver.js +0 -0
  501. data/ext/v8/upstream/3.1.8/tools/tickprocessor.js +863 -0
  502. data/ext/v8/upstream/3.1.8/tools/utils.py +96 -0
  503. data/ext/v8/upstream/3.1.8/tools/visual_studio/README.txt +70 -0
  504. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/arm.vsprops +0 -0
  505. data/ext/v8/upstream/3.1.8/tools/visual_studio/common.vsprops +34 -0
  506. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/d8.vcproj +0 -0
  507. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/d8_arm.vcproj +0 -0
  508. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/d8_x64.vcproj +0 -0
  509. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/d8js2c.cmd +0 -0
  510. data/ext/v8/upstream/3.1.8/tools/visual_studio/debug.vsprops +17 -0
  511. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/ia32.vsprops +0 -0
  512. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/js2c.cmd +0 -0
  513. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/release.vsprops +0 -0
  514. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8.sln +0 -0
  515. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8.vcproj +0 -0
  516. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_arm.sln +0 -0
  517. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_arm.vcproj +0 -0
  518. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_base.vcproj +1296 -0
  519. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_base_arm.vcproj +1234 -0
  520. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_base_x64.vcproj +1296 -0
  521. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_cctest.vcproj +0 -0
  522. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_cctest_arm.vcproj +0 -0
  523. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_cctest_x64.vcproj +0 -0
  524. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_mksnapshot.vcproj +0 -0
  525. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_mksnapshot_x64.vcproj +0 -0
  526. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_process_sample.vcproj +0 -0
  527. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_process_sample_arm.vcproj +0 -0
  528. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_process_sample_x64.vcproj +0 -0
  529. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_shell_sample.vcproj +147 -0
  530. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_shell_sample_arm.vcproj +147 -0
  531. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_shell_sample_x64.vcproj +163 -0
  532. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_snapshot.vcproj +0 -0
  533. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_snapshot_cc.vcproj +0 -0
  534. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_snapshot_cc_x64.vcproj +0 -0
  535. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_snapshot_x64.vcproj +0 -0
  536. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_x64.sln +0 -0
  537. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_x64.vcproj +0 -0
  538. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/x64.vsprops +0 -0
  539. data/ext/v8/upstream/3.1.8/tools/windows-tick-processor.bat +30 -0
  540. data/ext/v8/upstream/Makefile +2 -1
  541. data/ext/v8/v8_template.cpp +2 -2
  542. data/lib/v8/version.rb +1 -1
  543. data/spec/redjs/jsapi_spec.rb +2 -2
  544. metadata +552 -490
  545. data/ext/v8/upstream/2.3.3/.gitignore +0 -26
  546. data/ext/v8/upstream/2.3.3/AUTHORS +0 -31
  547. data/ext/v8/upstream/2.3.3/ChangeLog +0 -1916
  548. data/ext/v8/upstream/2.3.3/LICENSE +0 -55
  549. data/ext/v8/upstream/2.3.3/SConstruct +0 -1154
  550. data/ext/v8/upstream/2.3.3/include/v8-debug.h +0 -381
  551. data/ext/v8/upstream/2.3.3/include/v8-profiler.h +0 -353
  552. data/ext/v8/upstream/2.3.3/include/v8.h +0 -3616
  553. data/ext/v8/upstream/2.3.3/src/SConscript +0 -330
  554. data/ext/v8/upstream/2.3.3/src/accessors.cc +0 -661
  555. data/ext/v8/upstream/2.3.3/src/accessors.h +0 -114
  556. data/ext/v8/upstream/2.3.3/src/allocation.cc +0 -198
  557. data/ext/v8/upstream/2.3.3/src/allocation.h +0 -169
  558. data/ext/v8/upstream/2.3.3/src/api.cc +0 -4795
  559. data/ext/v8/upstream/2.3.3/src/api.h +0 -485
  560. data/ext/v8/upstream/2.3.3/src/apiutils.h +0 -69
  561. data/ext/v8/upstream/2.3.3/src/arguments.h +0 -96
  562. data/ext/v8/upstream/2.3.3/src/arm/assembler-arm-inl.h +0 -305
  563. data/ext/v8/upstream/2.3.3/src/arm/assembler-arm.cc +0 -2580
  564. data/ext/v8/upstream/2.3.3/src/arm/assembler-arm.h +0 -1275
  565. data/ext/v8/upstream/2.3.3/src/arm/builtins-arm.cc +0 -1320
  566. data/ext/v8/upstream/2.3.3/src/arm/codegen-arm-inl.h +0 -48
  567. data/ext/v8/upstream/2.3.3/src/arm/codegen-arm.cc +0 -11398
  568. data/ext/v8/upstream/2.3.3/src/arm/codegen-arm.h +0 -1102
  569. data/ext/v8/upstream/2.3.3/src/arm/constants-arm.cc +0 -154
  570. data/ext/v8/upstream/2.3.3/src/arm/constants-arm.h +0 -388
  571. data/ext/v8/upstream/2.3.3/src/arm/cpu-arm.cc +0 -142
  572. data/ext/v8/upstream/2.3.3/src/arm/debug-arm.cc +0 -309
  573. data/ext/v8/upstream/2.3.3/src/arm/disasm-arm.cc +0 -1459
  574. data/ext/v8/upstream/2.3.3/src/arm/fast-codegen-arm.cc +0 -241
  575. data/ext/v8/upstream/2.3.3/src/arm/frames-arm.cc +0 -123
  576. data/ext/v8/upstream/2.3.3/src/arm/frames-arm.h +0 -162
  577. data/ext/v8/upstream/2.3.3/src/arm/full-codegen-arm.cc +0 -3178
  578. data/ext/v8/upstream/2.3.3/src/arm/ic-arm.cc +0 -2258
  579. data/ext/v8/upstream/2.3.3/src/arm/jump-target-arm.cc +0 -164
  580. data/ext/v8/upstream/2.3.3/src/arm/macro-assembler-arm.cc +0 -1892
  581. data/ext/v8/upstream/2.3.3/src/arm/macro-assembler-arm.h +0 -727
  582. data/ext/v8/upstream/2.3.3/src/arm/regexp-macro-assembler-arm.cc +0 -1261
  583. data/ext/v8/upstream/2.3.3/src/arm/regexp-macro-assembler-arm.h +0 -266
  584. data/ext/v8/upstream/2.3.3/src/arm/simulator-arm.cc +0 -2822
  585. data/ext/v8/upstream/2.3.3/src/arm/simulator-arm.h +0 -361
  586. data/ext/v8/upstream/2.3.3/src/arm/stub-cache-arm.cc +0 -2387
  587. data/ext/v8/upstream/2.3.3/src/arm/virtual-frame-arm.cc +0 -834
  588. data/ext/v8/upstream/2.3.3/src/arm/virtual-frame-arm.h +0 -519
  589. data/ext/v8/upstream/2.3.3/src/array.js +0 -1127
  590. data/ext/v8/upstream/2.3.3/src/assembler.cc +0 -801
  591. data/ext/v8/upstream/2.3.3/src/assembler.h +0 -573
  592. data/ext/v8/upstream/2.3.3/src/ast-inl.h +0 -81
  593. data/ext/v8/upstream/2.3.3/src/ast.cc +0 -1152
  594. data/ext/v8/upstream/2.3.3/src/ast.h +0 -2106
  595. data/ext/v8/upstream/2.3.3/src/bootstrapper.cc +0 -1819
  596. data/ext/v8/upstream/2.3.3/src/builtins.cc +0 -1529
  597. data/ext/v8/upstream/2.3.3/src/builtins.h +0 -263
  598. data/ext/v8/upstream/2.3.3/src/bytecodes-irregexp.h +0 -104
  599. data/ext/v8/upstream/2.3.3/src/cached-powers.h +0 -119
  600. data/ext/v8/upstream/2.3.3/src/char-predicates-inl.h +0 -86
  601. data/ext/v8/upstream/2.3.3/src/checks.cc +0 -100
  602. data/ext/v8/upstream/2.3.3/src/checks.h +0 -310
  603. data/ext/v8/upstream/2.3.3/src/circular-queue.cc +0 -121
  604. data/ext/v8/upstream/2.3.3/src/code-stubs.cc +0 -177
  605. data/ext/v8/upstream/2.3.3/src/code-stubs.h +0 -177
  606. data/ext/v8/upstream/2.3.3/src/codegen-inl.h +0 -60
  607. data/ext/v8/upstream/2.3.3/src/codegen.cc +0 -516
  608. data/ext/v8/upstream/2.3.3/src/codegen.h +0 -897
  609. data/ext/v8/upstream/2.3.3/src/compilation-cache.cc +0 -562
  610. data/ext/v8/upstream/2.3.3/src/compilation-cache.h +0 -102
  611. data/ext/v8/upstream/2.3.3/src/compiler.cc +0 -654
  612. data/ext/v8/upstream/2.3.3/src/compiler.h +0 -299
  613. data/ext/v8/upstream/2.3.3/src/contexts.cc +0 -256
  614. data/ext/v8/upstream/2.3.3/src/contexts.h +0 -342
  615. data/ext/v8/upstream/2.3.3/src/conversions.cc +0 -1119
  616. data/ext/v8/upstream/2.3.3/src/conversions.h +0 -123
  617. data/ext/v8/upstream/2.3.3/src/counters.h +0 -239
  618. data/ext/v8/upstream/2.3.3/src/cpu-profiler-inl.h +0 -100
  619. data/ext/v8/upstream/2.3.3/src/cpu-profiler.cc +0 -538
  620. data/ext/v8/upstream/2.3.3/src/cpu-profiler.h +0 -285
  621. data/ext/v8/upstream/2.3.3/src/d8-debug.cc +0 -356
  622. data/ext/v8/upstream/2.3.3/src/d8-debug.h +0 -155
  623. data/ext/v8/upstream/2.3.3/src/d8.cc +0 -783
  624. data/ext/v8/upstream/2.3.3/src/d8.h +0 -227
  625. data/ext/v8/upstream/2.3.3/src/d8.js +0 -1683
  626. data/ext/v8/upstream/2.3.3/src/data-flow.cc +0 -758
  627. data/ext/v8/upstream/2.3.3/src/data-flow.h +0 -278
  628. data/ext/v8/upstream/2.3.3/src/date.js +0 -1059
  629. data/ext/v8/upstream/2.3.3/src/dateparser-inl.h +0 -123
  630. data/ext/v8/upstream/2.3.3/src/dateparser.h +0 -244
  631. data/ext/v8/upstream/2.3.3/src/debug-agent.cc +0 -427
  632. data/ext/v8/upstream/2.3.3/src/debug-agent.h +0 -129
  633. data/ext/v8/upstream/2.3.3/src/debug-debugger.js +0 -2227
  634. data/ext/v8/upstream/2.3.3/src/debug.cc +0 -3005
  635. data/ext/v8/upstream/2.3.3/src/debug.h +0 -993
  636. data/ext/v8/upstream/2.3.3/src/disassembler.cc +0 -312
  637. data/ext/v8/upstream/2.3.3/src/double.h +0 -169
  638. data/ext/v8/upstream/2.3.3/src/dtoa-config.c +0 -92
  639. data/ext/v8/upstream/2.3.3/src/dtoa.cc +0 -77
  640. data/ext/v8/upstream/2.3.3/src/dtoa.h +0 -81
  641. data/ext/v8/upstream/2.3.3/src/execution.cc +0 -809
  642. data/ext/v8/upstream/2.3.3/src/execution.h +0 -336
  643. data/ext/v8/upstream/2.3.3/src/factory.cc +0 -1003
  644. data/ext/v8/upstream/2.3.3/src/factory.h +0 -410
  645. data/ext/v8/upstream/2.3.3/src/fast-codegen.cc +0 -746
  646. data/ext/v8/upstream/2.3.3/src/fast-codegen.h +0 -161
  647. data/ext/v8/upstream/2.3.3/src/fast-dtoa.cc +0 -505
  648. data/ext/v8/upstream/2.3.3/src/fast-dtoa.h +0 -58
  649. data/ext/v8/upstream/2.3.3/src/flag-definitions.h +0 -455
  650. data/ext/v8/upstream/2.3.3/src/flags.cc +0 -551
  651. data/ext/v8/upstream/2.3.3/src/flags.h +0 -81
  652. data/ext/v8/upstream/2.3.3/src/flow-graph.cc +0 -763
  653. data/ext/v8/upstream/2.3.3/src/flow-graph.h +0 -180
  654. data/ext/v8/upstream/2.3.3/src/frame-element.h +0 -273
  655. data/ext/v8/upstream/2.3.3/src/frames-inl.h +0 -217
  656. data/ext/v8/upstream/2.3.3/src/frames.cc +0 -826
  657. data/ext/v8/upstream/2.3.3/src/frames.h +0 -682
  658. data/ext/v8/upstream/2.3.3/src/full-codegen.cc +0 -1443
  659. data/ext/v8/upstream/2.3.3/src/full-codegen.h +0 -548
  660. data/ext/v8/upstream/2.3.3/src/func-name-inferrer.cc +0 -76
  661. data/ext/v8/upstream/2.3.3/src/func-name-inferrer.h +0 -135
  662. data/ext/v8/upstream/2.3.3/src/global-handles.cc +0 -520
  663. data/ext/v8/upstream/2.3.3/src/global-handles.h +0 -180
  664. data/ext/v8/upstream/2.3.3/src/globals.h +0 -669
  665. data/ext/v8/upstream/2.3.3/src/handles-inl.h +0 -76
  666. data/ext/v8/upstream/2.3.3/src/handles.cc +0 -825
  667. data/ext/v8/upstream/2.3.3/src/handles.h +0 -393
  668. data/ext/v8/upstream/2.3.3/src/hashmap.cc +0 -226
  669. data/ext/v8/upstream/2.3.3/src/hashmap.h +0 -120
  670. data/ext/v8/upstream/2.3.3/src/heap-inl.h +0 -493
  671. data/ext/v8/upstream/2.3.3/src/heap-profiler.cc +0 -779
  672. data/ext/v8/upstream/2.3.3/src/heap-profiler.h +0 -323
  673. data/ext/v8/upstream/2.3.3/src/heap.cc +0 -4994
  674. data/ext/v8/upstream/2.3.3/src/heap.h +0 -1984
  675. data/ext/v8/upstream/2.3.3/src/ia32/assembler-ia32-inl.h +0 -360
  676. data/ext/v8/upstream/2.3.3/src/ia32/assembler-ia32.cc +0 -2600
  677. data/ext/v8/upstream/2.3.3/src/ia32/assembler-ia32.h +0 -969
  678. data/ext/v8/upstream/2.3.3/src/ia32/builtins-ia32.cc +0 -1261
  679. data/ext/v8/upstream/2.3.3/src/ia32/codegen-ia32.cc +0 -13968
  680. data/ext/v8/upstream/2.3.3/src/ia32/codegen-ia32.h +0 -1097
  681. data/ext/v8/upstream/2.3.3/src/ia32/cpu-ia32.cc +0 -83
  682. data/ext/v8/upstream/2.3.3/src/ia32/debug-ia32.cc +0 -309
  683. data/ext/v8/upstream/2.3.3/src/ia32/disasm-ia32.cc +0 -1471
  684. data/ext/v8/upstream/2.3.3/src/ia32/fast-codegen-ia32.cc +0 -954
  685. data/ext/v8/upstream/2.3.3/src/ia32/fast-codegen-ia32.h +0 -155
  686. data/ext/v8/upstream/2.3.3/src/ia32/frames-ia32.cc +0 -115
  687. data/ext/v8/upstream/2.3.3/src/ia32/frames-ia32.h +0 -135
  688. data/ext/v8/upstream/2.3.3/src/ia32/full-codegen-ia32.cc +0 -3281
  689. data/ext/v8/upstream/2.3.3/src/ia32/ic-ia32.cc +0 -1966
  690. data/ext/v8/upstream/2.3.3/src/ia32/macro-assembler-ia32.cc +0 -1610
  691. data/ext/v8/upstream/2.3.3/src/ia32/macro-assembler-ia32.h +0 -610
  692. data/ext/v8/upstream/2.3.3/src/ia32/regexp-macro-assembler-ia32.cc +0 -1247
  693. data/ext/v8/upstream/2.3.3/src/ia32/regexp-macro-assembler-ia32.h +0 -214
  694. data/ext/v8/upstream/2.3.3/src/ia32/simulator-ia32.h +0 -62
  695. data/ext/v8/upstream/2.3.3/src/ia32/stub-cache-ia32.cc +0 -2750
  696. data/ext/v8/upstream/2.3.3/src/ia32/virtual-frame-ia32.cc +0 -1334
  697. data/ext/v8/upstream/2.3.3/src/ia32/virtual-frame-ia32.h +0 -627
  698. data/ext/v8/upstream/2.3.3/src/ic-inl.h +0 -120
  699. data/ext/v8/upstream/2.3.3/src/ic.cc +0 -1827
  700. data/ext/v8/upstream/2.3.3/src/ic.h +0 -515
  701. data/ext/v8/upstream/2.3.3/src/interpreter-irregexp.cc +0 -646
  702. data/ext/v8/upstream/2.3.3/src/json.js +0 -268
  703. data/ext/v8/upstream/2.3.3/src/jsregexp.cc +0 -5283
  704. data/ext/v8/upstream/2.3.3/src/jsregexp.h +0 -1463
  705. data/ext/v8/upstream/2.3.3/src/jump-target-heavy.cc +0 -429
  706. data/ext/v8/upstream/2.3.3/src/jump-target-heavy.h +0 -244
  707. data/ext/v8/upstream/2.3.3/src/jump-target-light.cc +0 -110
  708. data/ext/v8/upstream/2.3.3/src/jump-target-light.h +0 -192
  709. data/ext/v8/upstream/2.3.3/src/list-inl.h +0 -166
  710. data/ext/v8/upstream/2.3.3/src/list.h +0 -159
  711. data/ext/v8/upstream/2.3.3/src/liveedit-debugger.js +0 -1057
  712. data/ext/v8/upstream/2.3.3/src/liveedit.cc +0 -1480
  713. data/ext/v8/upstream/2.3.3/src/liveedit.h +0 -170
  714. data/ext/v8/upstream/2.3.3/src/log-utils.cc +0 -497
  715. data/ext/v8/upstream/2.3.3/src/log-utils.h +0 -289
  716. data/ext/v8/upstream/2.3.3/src/log.cc +0 -1561
  717. data/ext/v8/upstream/2.3.3/src/log.h +0 -384
  718. data/ext/v8/upstream/2.3.3/src/macro-assembler.h +0 -86
  719. data/ext/v8/upstream/2.3.3/src/macros.py +0 -177
  720. data/ext/v8/upstream/2.3.3/src/mark-compact.cc +0 -2330
  721. data/ext/v8/upstream/2.3.3/src/mark-compact.h +0 -451
  722. data/ext/v8/upstream/2.3.3/src/math.js +0 -264
  723. data/ext/v8/upstream/2.3.3/src/memory.h +0 -74
  724. data/ext/v8/upstream/2.3.3/src/messages.cc +0 -183
  725. data/ext/v8/upstream/2.3.3/src/messages.h +0 -113
  726. data/ext/v8/upstream/2.3.3/src/messages.js +0 -982
  727. data/ext/v8/upstream/2.3.3/src/mips/assembler-mips.h +0 -668
  728. data/ext/v8/upstream/2.3.3/src/mips/builtins-mips.cc +0 -205
  729. data/ext/v8/upstream/2.3.3/src/mips/codegen-mips.h +0 -434
  730. data/ext/v8/upstream/2.3.3/src/mips/debug-mips.cc +0 -131
  731. data/ext/v8/upstream/2.3.3/src/mips/frames-mips.cc +0 -102
  732. data/ext/v8/upstream/2.3.3/src/mips/ic-mips.cc +0 -220
  733. data/ext/v8/upstream/2.3.3/src/mips/simulator-mips.cc +0 -1651
  734. data/ext/v8/upstream/2.3.3/src/mips/simulator-mips.h +0 -311
  735. data/ext/v8/upstream/2.3.3/src/mips/stub-cache-mips.cc +0 -403
  736. data/ext/v8/upstream/2.3.3/src/mirror-debugger.js +0 -2380
  737. data/ext/v8/upstream/2.3.3/src/objects-debug.cc +0 -1366
  738. data/ext/v8/upstream/2.3.3/src/objects-inl.h +0 -3333
  739. data/ext/v8/upstream/2.3.3/src/objects.cc +0 -8820
  740. data/ext/v8/upstream/2.3.3/src/objects.h +0 -5373
  741. data/ext/v8/upstream/2.3.3/src/oprofile-agent.cc +0 -108
  742. data/ext/v8/upstream/2.3.3/src/oprofile-agent.h +0 -77
  743. data/ext/v8/upstream/2.3.3/src/parser.cc +0 -5207
  744. data/ext/v8/upstream/2.3.3/src/parser.h +0 -197
  745. data/ext/v8/upstream/2.3.3/src/platform-freebsd.cc +0 -667
  746. data/ext/v8/upstream/2.3.3/src/platform-linux.cc +0 -862
  747. data/ext/v8/upstream/2.3.3/src/platform-macos.cc +0 -665
  748. data/ext/v8/upstream/2.3.3/src/platform-nullos.cc +0 -454
  749. data/ext/v8/upstream/2.3.3/src/platform-openbsd.cc +0 -622
  750. data/ext/v8/upstream/2.3.3/src/platform-posix.cc +0 -362
  751. data/ext/v8/upstream/2.3.3/src/platform-solaris.cc +0 -653
  752. data/ext/v8/upstream/2.3.3/src/platform-win32.cc +0 -1911
  753. data/ext/v8/upstream/2.3.3/src/platform.h +0 -577
  754. data/ext/v8/upstream/2.3.3/src/powers-ten.h +0 -2461
  755. data/ext/v8/upstream/2.3.3/src/prettyprinter.cc +0 -1531
  756. data/ext/v8/upstream/2.3.3/src/prettyprinter.h +0 -221
  757. data/ext/v8/upstream/2.3.3/src/profile-generator-inl.h +0 -148
  758. data/ext/v8/upstream/2.3.3/src/profile-generator.cc +0 -1830
  759. data/ext/v8/upstream/2.3.3/src/profile-generator.h +0 -853
  760. data/ext/v8/upstream/2.3.3/src/property.cc +0 -96
  761. data/ext/v8/upstream/2.3.3/src/property.h +0 -315
  762. data/ext/v8/upstream/2.3.3/src/regexp-macro-assembler-irregexp.cc +0 -464
  763. data/ext/v8/upstream/2.3.3/src/regexp-macro-assembler-irregexp.h +0 -141
  764. data/ext/v8/upstream/2.3.3/src/regexp-macro-assembler-tracer.cc +0 -356
  765. data/ext/v8/upstream/2.3.3/src/regexp-macro-assembler-tracer.h +0 -103
  766. data/ext/v8/upstream/2.3.3/src/regexp-macro-assembler.cc +0 -261
  767. data/ext/v8/upstream/2.3.3/src/regexp-macro-assembler.h +0 -228
  768. data/ext/v8/upstream/2.3.3/src/regexp.js +0 -549
  769. data/ext/v8/upstream/2.3.3/src/rewriter.cc +0 -1038
  770. data/ext/v8/upstream/2.3.3/src/rewriter.h +0 -54
  771. data/ext/v8/upstream/2.3.3/src/runtime.cc +0 -10599
  772. data/ext/v8/upstream/2.3.3/src/runtime.h +0 -459
  773. data/ext/v8/upstream/2.3.3/src/runtime.js +0 -629
  774. data/ext/v8/upstream/2.3.3/src/scanner.cc +0 -1346
  775. data/ext/v8/upstream/2.3.3/src/scanner.h +0 -503
  776. data/ext/v8/upstream/2.3.3/src/scopeinfo.cc +0 -637
  777. data/ext/v8/upstream/2.3.3/src/scopeinfo.h +0 -233
  778. data/ext/v8/upstream/2.3.3/src/scopes.cc +0 -962
  779. data/ext/v8/upstream/2.3.3/src/scopes.h +0 -400
  780. data/ext/v8/upstream/2.3.3/src/serialize.cc +0 -1461
  781. data/ext/v8/upstream/2.3.3/src/serialize.h +0 -581
  782. data/ext/v8/upstream/2.3.3/src/spaces-inl.h +0 -483
  783. data/ext/v8/upstream/2.3.3/src/spaces.cc +0 -2901
  784. data/ext/v8/upstream/2.3.3/src/spaces.h +0 -2197
  785. data/ext/v8/upstream/2.3.3/src/string-stream.cc +0 -584
  786. data/ext/v8/upstream/2.3.3/src/string-stream.h +0 -189
  787. data/ext/v8/upstream/2.3.3/src/string.js +0 -1006
  788. data/ext/v8/upstream/2.3.3/src/stub-cache.cc +0 -1379
  789. data/ext/v8/upstream/2.3.3/src/stub-cache.h +0 -756
  790. data/ext/v8/upstream/2.3.3/src/third_party/dtoa/COPYING +0 -15
  791. data/ext/v8/upstream/2.3.3/src/third_party/dtoa/dtoa.c +0 -3334
  792. data/ext/v8/upstream/2.3.3/src/token.cc +0 -56
  793. data/ext/v8/upstream/2.3.3/src/token.h +0 -270
  794. data/ext/v8/upstream/2.3.3/src/top.cc +0 -1067
  795. data/ext/v8/upstream/2.3.3/src/top.h +0 -463
  796. data/ext/v8/upstream/2.3.3/src/type-info.cc +0 -53
  797. data/ext/v8/upstream/2.3.3/src/type-info.h +0 -244
  798. data/ext/v8/upstream/2.3.3/src/unicode-inl.h +0 -238
  799. data/ext/v8/upstream/2.3.3/src/unicode.cc +0 -749
  800. data/ext/v8/upstream/2.3.3/src/unicode.h +0 -279
  801. data/ext/v8/upstream/2.3.3/src/uri.js +0 -415
  802. data/ext/v8/upstream/2.3.3/src/utils.cc +0 -285
  803. data/ext/v8/upstream/2.3.3/src/utils.h +0 -745
  804. data/ext/v8/upstream/2.3.3/src/v8-counters.h +0 -250
  805. data/ext/v8/upstream/2.3.3/src/v8.cc +0 -228
  806. data/ext/v8/upstream/2.3.3/src/v8.h +0 -121
  807. data/ext/v8/upstream/2.3.3/src/v8natives.js +0 -1188
  808. data/ext/v8/upstream/2.3.3/src/v8threads.cc +0 -461
  809. data/ext/v8/upstream/2.3.3/src/v8threads.h +0 -159
  810. data/ext/v8/upstream/2.3.3/src/variables.cc +0 -119
  811. data/ext/v8/upstream/2.3.3/src/variables.h +0 -205
  812. data/ext/v8/upstream/2.3.3/src/version.cc +0 -88
  813. data/ext/v8/upstream/2.3.3/src/virtual-frame-heavy-inl.h +0 -192
  814. data/ext/v8/upstream/2.3.3/src/virtual-frame.h +0 -46
  815. data/ext/v8/upstream/2.3.3/src/vm-state-inl.h +0 -137
  816. data/ext/v8/upstream/2.3.3/src/vm-state.cc +0 -39
  817. data/ext/v8/upstream/2.3.3/src/vm-state.h +0 -77
  818. data/ext/v8/upstream/2.3.3/src/x64/assembler-x64-inl.h +0 -400
  819. data/ext/v8/upstream/2.3.3/src/x64/assembler-x64.cc +0 -2963
  820. data/ext/v8/upstream/2.3.3/src/x64/assembler-x64.h +0 -1438
  821. data/ext/v8/upstream/2.3.3/src/x64/builtins-x64.cc +0 -1296
  822. data/ext/v8/upstream/2.3.3/src/x64/codegen-x64-inl.h +0 -46
  823. data/ext/v8/upstream/2.3.3/src/x64/codegen-x64.cc +0 -12491
  824. data/ext/v8/upstream/2.3.3/src/x64/codegen-x64.h +0 -1090
  825. data/ext/v8/upstream/2.3.3/src/x64/cpu-x64.cc +0 -83
  826. data/ext/v8/upstream/2.3.3/src/x64/debug-x64.cc +0 -267
  827. data/ext/v8/upstream/2.3.3/src/x64/disasm-x64.cc +0 -1696
  828. data/ext/v8/upstream/2.3.3/src/x64/fast-codegen-x64.cc +0 -250
  829. data/ext/v8/upstream/2.3.3/src/x64/frames-x64.cc +0 -113
  830. data/ext/v8/upstream/2.3.3/src/x64/frames-x64.h +0 -125
  831. data/ext/v8/upstream/2.3.3/src/x64/full-codegen-x64.cc +0 -3270
  832. data/ext/v8/upstream/2.3.3/src/x64/ic-x64.cc +0 -1907
  833. data/ext/v8/upstream/2.3.3/src/x64/jump-target-x64.cc +0 -437
  834. data/ext/v8/upstream/2.3.3/src/x64/macro-assembler-x64.cc +0 -2793
  835. data/ext/v8/upstream/2.3.3/src/x64/macro-assembler-x64.h +0 -916
  836. data/ext/v8/upstream/2.3.3/src/x64/regexp-macro-assembler-x64.cc +0 -1374
  837. data/ext/v8/upstream/2.3.3/src/x64/regexp-macro-assembler-x64.h +0 -277
  838. data/ext/v8/upstream/2.3.3/src/x64/simulator-x64.h +0 -63
  839. data/ext/v8/upstream/2.3.3/src/x64/stub-cache-x64.cc +0 -2560
  840. data/ext/v8/upstream/2.3.3/src/x64/virtual-frame-x64.cc +0 -1264
  841. data/ext/v8/upstream/2.3.3/src/x64/virtual-frame-x64.h +0 -590
  842. data/ext/v8/upstream/2.3.3/src/zone-inl.h +0 -82
  843. data/ext/v8/upstream/2.3.3/src/zone.cc +0 -194
  844. data/ext/v8/upstream/2.3.3/src/zone.h +0 -221
  845. data/ext/v8/upstream/2.3.3/tools/codemap.js +0 -270
  846. data/ext/v8/upstream/2.3.3/tools/csvparser.js +0 -83
  847. data/ext/v8/upstream/2.3.3/tools/gc-nvp-trace-processor.py +0 -317
  848. data/ext/v8/upstream/2.3.3/tools/gyp/v8.gyp +0 -749
  849. data/ext/v8/upstream/2.3.3/tools/linux-tick-processor.py +0 -78
  850. data/ext/v8/upstream/2.3.3/tools/logreader.js +0 -338
  851. data/ext/v8/upstream/2.3.3/tools/oprofile/annotate +0 -7
  852. data/ext/v8/upstream/2.3.3/tools/oprofile/common +0 -19
  853. data/ext/v8/upstream/2.3.3/tools/oprofile/dump +0 -7
  854. data/ext/v8/upstream/2.3.3/tools/oprofile/report +0 -7
  855. data/ext/v8/upstream/2.3.3/tools/oprofile/reset +0 -7
  856. data/ext/v8/upstream/2.3.3/tools/oprofile/run +0 -14
  857. data/ext/v8/upstream/2.3.3/tools/oprofile/shutdown +0 -7
  858. data/ext/v8/upstream/2.3.3/tools/oprofile/start +0 -7
  859. data/ext/v8/upstream/2.3.3/tools/presubmit.py +0 -299
  860. data/ext/v8/upstream/2.3.3/tools/profile.js +0 -691
  861. data/ext/v8/upstream/2.3.3/tools/profile_view.js +0 -224
  862. data/ext/v8/upstream/2.3.3/tools/splaytree.js +0 -322
  863. data/ext/v8/upstream/2.3.3/tools/splaytree.py +0 -226
  864. data/ext/v8/upstream/2.3.3/tools/tickprocessor.js +0 -862
  865. data/ext/v8/upstream/2.3.3/tools/tickprocessor.py +0 -571
  866. data/ext/v8/upstream/2.3.3/tools/utils.py +0 -88
  867. data/ext/v8/upstream/2.3.3/tools/visual_studio/README.txt +0 -71
  868. data/ext/v8/upstream/2.3.3/tools/visual_studio/common.vsprops +0 -34
  869. data/ext/v8/upstream/2.3.3/tools/visual_studio/debug.vsprops +0 -17
  870. data/ext/v8/upstream/2.3.3/tools/visual_studio/v8_base.vcproj +0 -1143
  871. data/ext/v8/upstream/2.3.3/tools/visual_studio/v8_base_arm.vcproj +0 -1115
  872. data/ext/v8/upstream/2.3.3/tools/visual_studio/v8_base_x64.vcproj +0 -1096
  873. data/ext/v8/upstream/2.3.3/tools/visual_studio/v8_shell_sample.vcproj +0 -145
  874. data/ext/v8/upstream/2.3.3/tools/visual_studio/v8_shell_sample_arm.vcproj +0 -145
  875. data/ext/v8/upstream/2.3.3/tools/visual_studio/v8_shell_sample_x64.vcproj +0 -161
  876. data/ext/v8/upstream/2.3.3/tools/windows-tick-processor.bat +0 -29
  877. data/ext/v8/upstream/2.3.3/tools/windows-tick-processor.py +0 -137
@@ -0,0 +1,178 @@
1
+ # Copyright 2006-2009 the V8 project authors. All rights reserved.
2
+ # Redistribution and use in source and binary forms, with or without
3
+ # modification, are permitted provided that the following conditions are
4
+ # met:
5
+ #
6
+ # * Redistributions of source code must retain the above copyright
7
+ # notice, this list of conditions and the following disclaimer.
8
+ # * Redistributions in binary form must reproduce the above
9
+ # copyright notice, this list of conditions and the following
10
+ # disclaimer in the documentation and/or other materials provided
11
+ # with the distribution.
12
+ # * Neither the name of Google Inc. nor the names of its
13
+ # contributors may be used to endorse or promote products derived
14
+ # from this software without specific prior written permission.
15
+ #
16
+ # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
+ # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
+ # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
+ # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
+ # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
+ # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
+ # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
+ # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
+ # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
+ # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
+ # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
+
28
+ # Dictionary that is passed as defines for js2c.py.
29
+ # Used for defines that must be defined for all native js files.
30
+
31
+ const NONE = 0;
32
+ const READ_ONLY = 1;
33
+ const DONT_ENUM = 2;
34
+ const DONT_DELETE = 4;
35
+
36
+ # Constants used for getter and setter operations.
37
+ const GETTER = 0;
38
+ const SETTER = 1;
39
+
40
+ # These definitions must match the index of the properties in objects.h.
41
+ const kApiTagOffset = 0;
42
+ const kApiPropertyListOffset = 1;
43
+ const kApiSerialNumberOffset = 2;
44
+ const kApiConstructorOffset = 2;
45
+ const kApiPrototypeTemplateOffset = 5;
46
+ const kApiParentTemplateOffset = 6;
47
+
48
+ const NO_HINT = 0;
49
+ const NUMBER_HINT = 1;
50
+ const STRING_HINT = 2;
51
+
52
+ const kFunctionTag = 0;
53
+ const kNewObjectTag = 1;
54
+
55
+ # For date.js.
56
+ const HoursPerDay = 24;
57
+ const MinutesPerHour = 60;
58
+ const SecondsPerMinute = 60;
59
+ const msPerSecond = 1000;
60
+ const msPerMinute = 60000;
61
+ const msPerHour = 3600000;
62
+ const msPerDay = 86400000;
63
+ const msPerMonth = 2592000000;
64
+
65
+ # For apinatives.js
66
+ const kUninitialized = -1;
67
+
68
+ # Note: kDayZeroInJulianDay = ToJulianDay(1970, 0, 1).
69
+ const kInvalidDate = 'Invalid Date';
70
+ const kDayZeroInJulianDay = 2440588;
71
+ const kMonthMask = 0x1e0;
72
+ const kDayMask = 0x01f;
73
+ const kYearShift = 9;
74
+ const kMonthShift = 5;
75
+
76
+ # Limits for parts of the date, so that we support all the dates that
77
+ # ECMA 262 - 15.9.1.1 requires us to, but at the same time be sure that
78
+ # the date (days since 1970) is in SMI range.
79
+ const kMinYear = -1000000;
80
+ const kMaxYear = 1000000;
81
+ const kMinMonth = -10000000;
82
+ const kMaxMonth = 10000000;
83
+ const kMinDate = -100000000;
84
+ const kMaxDate = 100000000;
85
+
86
+ # Native cache ids.
87
+ const STRING_TO_REGEXP_CACHE_ID = 0;
88
+
89
+ # Type query macros.
90
+ #
91
+ # Note: We have special support for typeof(foo) === 'bar' in the compiler.
92
+ # It will *not* generate a runtime typeof call for the most important
93
+ # values of 'bar'.
94
+ macro IS_NULL(arg) = (arg === null);
95
+ macro IS_NULL_OR_UNDEFINED(arg) = (arg == null);
96
+ macro IS_UNDEFINED(arg) = (typeof(arg) === 'undefined');
97
+ macro IS_NUMBER(arg) = (typeof(arg) === 'number');
98
+ macro IS_STRING(arg) = (typeof(arg) === 'string');
99
+ macro IS_BOOLEAN(arg) = (typeof(arg) === 'boolean');
100
+ macro IS_OBJECT(arg) = (%_IsObject(arg));
101
+ macro IS_ARRAY(arg) = (%_IsArray(arg));
102
+ macro IS_FUNCTION(arg) = (%_IsFunction(arg));
103
+ macro IS_REGEXP(arg) = (%_IsRegExp(arg));
104
+ macro IS_DATE(arg) = (%_ClassOf(arg) === 'Date');
105
+ macro IS_NUMBER_WRAPPER(arg) = (%_ClassOf(arg) === 'Number');
106
+ macro IS_STRING_WRAPPER(arg) = (%_ClassOf(arg) === 'String');
107
+ macro IS_BOOLEAN_WRAPPER(arg) = (%_ClassOf(arg) === 'Boolean');
108
+ macro IS_ERROR(arg) = (%_ClassOf(arg) === 'Error');
109
+ macro IS_SCRIPT(arg) = (%_ClassOf(arg) === 'Script');
110
+ macro IS_ARGUMENTS(arg) = (%_ClassOf(arg) === 'Arguments');
111
+ macro IS_GLOBAL(arg) = (%_ClassOf(arg) === 'global');
112
+ macro IS_UNDETECTABLE(arg) = (%_IsUndetectableObject(arg));
113
+ macro FLOOR(arg) = $floor(arg);
114
+
115
+ # Macro for ECMAScript 5 queries of the type:
116
+ # "Type(O) is object."
117
+ # This is the same as being either a function or an object in V8 terminology.
118
+ # In addition, an undetectable object is also included by this.
119
+ macro IS_SPEC_OBJECT(arg) = (%_IsSpecObject(arg));
120
+
121
+ # Inline macros. Use %IS_VAR to make sure arg is evaluated only once.
122
+ macro NUMBER_IS_NAN(arg) = (!%_IsSmi(%IS_VAR(arg)) && !(arg == arg));
123
+ macro NUMBER_IS_FINITE(arg) = (%_IsSmi(%IS_VAR(arg)) || arg - arg == 0);
124
+ macro TO_INTEGER(arg) = (%_IsSmi(%IS_VAR(arg)) ? arg : %NumberToInteger(ToNumber(arg)));
125
+ macro TO_INTEGER_MAP_MINUS_ZERO(arg) = (%_IsSmi(%IS_VAR(arg)) ? arg : %NumberToIntegerMapMinusZero(ToNumber(arg)));
126
+ macro TO_INT32(arg) = (%_IsSmi(%IS_VAR(arg)) ? arg : (arg >> 0));
127
+ macro TO_UINT32(arg) = (arg >>> 0);
128
+ macro TO_STRING_INLINE(arg) = (IS_STRING(%IS_VAR(arg)) ? arg : NonStringToString(arg));
129
+ macro TO_NUMBER_INLINE(arg) = (IS_NUMBER(%IS_VAR(arg)) ? arg : NonNumberToNumber(arg));
130
+
131
+
132
+ # Macros implemented in Python.
133
+ python macro CHAR_CODE(str) = ord(str[1]);
134
+
135
+ # Constants used on an array to implement the properties of the RegExp object.
136
+ const REGEXP_NUMBER_OF_CAPTURES = 0;
137
+ const REGEXP_FIRST_CAPTURE = 3;
138
+
139
+ # We can't put macros in macros so we use constants here.
140
+ # REGEXP_NUMBER_OF_CAPTURES
141
+ macro NUMBER_OF_CAPTURES(array) = ((array)[0]);
142
+
143
+ # Limit according to ECMA 262 15.9.1.1
144
+ const MAX_TIME_MS = 8640000000000000;
145
+ # Limit which is MAX_TIME_MS + msPerMonth.
146
+ const MAX_TIME_BEFORE_UTC = 8640002592000000;
147
+
148
+ # Gets the value of a Date object. If arg is not a Date object
149
+ # a type error is thrown.
150
+ macro DATE_VALUE(arg) = (%_ClassOf(arg) === 'Date' ? %_ValueOf(arg) : ThrowDateTypeError());
151
+ macro DAY(time) = ($floor(time / 86400000));
152
+ macro NAN_OR_DATE_FROM_TIME(time) = (NUMBER_IS_NAN(time) ? time : DateFromTime(time));
153
+ macro HOUR_FROM_TIME(time) = (Modulo($floor(time / 3600000), 24));
154
+ macro MIN_FROM_TIME(time) = (Modulo($floor(time / 60000), 60));
155
+ macro NAN_OR_MIN_FROM_TIME(time) = (NUMBER_IS_NAN(time) ? time : MIN_FROM_TIME(time));
156
+ macro SEC_FROM_TIME(time) = (Modulo($floor(time / 1000), 60));
157
+ macro NAN_OR_SEC_FROM_TIME(time) = (NUMBER_IS_NAN(time) ? time : SEC_FROM_TIME(time));
158
+ macro MS_FROM_TIME(time) = (Modulo(time, 1000));
159
+ macro NAN_OR_MS_FROM_TIME(time) = (NUMBER_IS_NAN(time) ? time : MS_FROM_TIME(time));
160
+
161
+ # Last input and last subject of regexp matches.
162
+ macro LAST_SUBJECT(array) = ((array)[1]);
163
+ macro LAST_INPUT(array) = ((array)[2]);
164
+
165
+ # REGEXP_FIRST_CAPTURE
166
+ macro CAPTURE(index) = (3 + (index));
167
+ const CAPTURE0 = 3;
168
+ const CAPTURE1 = 4;
169
+
170
+ # PropertyDescriptor return value indices - must match
171
+ # PropertyDescriptorIndices in runtime.cc.
172
+ const IS_ACCESSOR_INDEX = 0;
173
+ const VALUE_INDEX = 1;
174
+ const GETTER_INDEX = 2;
175
+ const SETTER_INDEX = 3;
176
+ const WRITABLE_INDEX = 4;
177
+ const ENUMERABLE_INDEX = 5;
178
+ const CONFIGURABLE_INDEX = 6;
@@ -0,0 +1,2957 @@
1
+ // Copyright 2006-2008 the V8 project authors. All rights reserved.
2
+ // Redistribution and use in source and binary forms, with or without
3
+ // modification, are permitted provided that the following conditions are
4
+ // met:
5
+ //
6
+ // * Redistributions of source code must retain the above copyright
7
+ // notice, this list of conditions and the following disclaimer.
8
+ // * Redistributions in binary form must reproduce the above
9
+ // copyright notice, this list of conditions and the following
10
+ // disclaimer in the documentation and/or other materials provided
11
+ // with the distribution.
12
+ // * Neither the name of Google Inc. nor the names of its
13
+ // contributors may be used to endorse or promote products derived
14
+ // from this software without specific prior written permission.
15
+ //
16
+ // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
+ // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
+ // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
+ // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
+ // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
+ // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
+ // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
+ // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
+ // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
+ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
+ // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
+
28
+ #include "v8.h"
29
+
30
+ #include "compilation-cache.h"
31
+ #include "execution.h"
32
+ #include "heap-profiler.h"
33
+ #include "gdb-jit.h"
34
+ #include "global-handles.h"
35
+ #include "ic-inl.h"
36
+ #include "liveobjectlist-inl.h"
37
+ #include "mark-compact.h"
38
+ #include "objects-visiting.h"
39
+ #include "stub-cache.h"
40
+
41
+ namespace v8 {
42
+ namespace internal {
43
+
44
+ // -------------------------------------------------------------------------
45
+ // MarkCompactCollector
46
+
47
+ bool MarkCompactCollector::force_compaction_ = false;
48
+ bool MarkCompactCollector::compacting_collection_ = false;
49
+ bool MarkCompactCollector::compact_on_next_gc_ = false;
50
+
51
+ int MarkCompactCollector::previous_marked_count_ = 0;
52
+ GCTracer* MarkCompactCollector::tracer_ = NULL;
53
+
54
+
55
+ #ifdef DEBUG
56
+ MarkCompactCollector::CollectorState MarkCompactCollector::state_ = IDLE;
57
+
58
+ // Counters used for debugging the marking phase of mark-compact or mark-sweep
59
+ // collection.
60
+ int MarkCompactCollector::live_bytes_ = 0;
61
+ int MarkCompactCollector::live_young_objects_size_ = 0;
62
+ int MarkCompactCollector::live_old_data_objects_size_ = 0;
63
+ int MarkCompactCollector::live_old_pointer_objects_size_ = 0;
64
+ int MarkCompactCollector::live_code_objects_size_ = 0;
65
+ int MarkCompactCollector::live_map_objects_size_ = 0;
66
+ int MarkCompactCollector::live_cell_objects_size_ = 0;
67
+ int MarkCompactCollector::live_lo_objects_size_ = 0;
68
+ #endif
69
+
70
+
71
+ void MarkCompactCollector::CollectGarbage() {
72
+ // Make sure that Prepare() has been called. The individual steps below will
73
+ // update the state as they proceed.
74
+ ASSERT(state_ == PREPARE_GC);
75
+
76
+ // Prepare has selected whether to compact the old generation or not.
77
+ // Tell the tracer.
78
+ if (IsCompacting()) tracer_->set_is_compacting();
79
+
80
+ MarkLiveObjects();
81
+
82
+ if (FLAG_collect_maps) ClearNonLiveTransitions();
83
+
84
+ SweepLargeObjectSpace();
85
+
86
+ if (IsCompacting()) {
87
+ GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_COMPACT);
88
+ EncodeForwardingAddresses();
89
+
90
+ Heap::MarkMapPointersAsEncoded(true);
91
+ UpdatePointers();
92
+ Heap::MarkMapPointersAsEncoded(false);
93
+ PcToCodeCache::FlushPcToCodeCache();
94
+
95
+ RelocateObjects();
96
+ } else {
97
+ SweepSpaces();
98
+ PcToCodeCache::FlushPcToCodeCache();
99
+ }
100
+
101
+ Finish();
102
+
103
+ // Save the count of marked objects remaining after the collection and
104
+ // null out the GC tracer.
105
+ previous_marked_count_ = tracer_->marked_count();
106
+ ASSERT(previous_marked_count_ == 0);
107
+ tracer_ = NULL;
108
+ }
109
+
110
+
111
+ void MarkCompactCollector::Prepare(GCTracer* tracer) {
112
+ // Rather than passing the tracer around we stash it in a static member
113
+ // variable.
114
+ tracer_ = tracer;
115
+
116
+ #ifdef DEBUG
117
+ ASSERT(state_ == IDLE);
118
+ state_ = PREPARE_GC;
119
+ #endif
120
+ ASSERT(!FLAG_always_compact || !FLAG_never_compact);
121
+
122
+ compacting_collection_ =
123
+ FLAG_always_compact || force_compaction_ || compact_on_next_gc_;
124
+ compact_on_next_gc_ = false;
125
+
126
+ if (FLAG_never_compact) compacting_collection_ = false;
127
+ if (!Heap::map_space()->MapPointersEncodable())
128
+ compacting_collection_ = false;
129
+ if (FLAG_collect_maps) CreateBackPointers();
130
+ #ifdef ENABLE_GDB_JIT_INTERFACE
131
+ if (FLAG_gdbjit) {
132
+ // If GDBJIT interface is active disable compaction.
133
+ compacting_collection_ = false;
134
+ }
135
+ #endif
136
+
137
+ PagedSpaces spaces;
138
+ for (PagedSpace* space = spaces.next();
139
+ space != NULL; space = spaces.next()) {
140
+ space->PrepareForMarkCompact(compacting_collection_);
141
+ }
142
+
143
+ #ifdef DEBUG
144
+ live_bytes_ = 0;
145
+ live_young_objects_size_ = 0;
146
+ live_old_pointer_objects_size_ = 0;
147
+ live_old_data_objects_size_ = 0;
148
+ live_code_objects_size_ = 0;
149
+ live_map_objects_size_ = 0;
150
+ live_cell_objects_size_ = 0;
151
+ live_lo_objects_size_ = 0;
152
+ #endif
153
+ }
154
+
155
+
156
+ void MarkCompactCollector::Finish() {
157
+ #ifdef DEBUG
158
+ ASSERT(state_ == SWEEP_SPACES || state_ == RELOCATE_OBJECTS);
159
+ state_ = IDLE;
160
+ #endif
161
+ // The stub cache is not traversed during GC; clear the cache to
162
+ // force lazy re-initialization of it. This must be done after the
163
+ // GC, because it relies on the new address of certain old space
164
+ // objects (empty string, illegal builtin).
165
+ StubCache::Clear();
166
+
167
+ ExternalStringTable::CleanUp();
168
+
169
+ // If we've just compacted old space there's no reason to check the
170
+ // fragmentation limit. Just return.
171
+ if (HasCompacted()) return;
172
+
173
+ // We compact the old generation on the next GC if it has gotten too
174
+ // fragmented (ie, we could recover an expected amount of space by
175
+ // reclaiming the waste and free list blocks).
176
+ static const int kFragmentationLimit = 15; // Percent.
177
+ static const int kFragmentationAllowed = 1 * MB; // Absolute.
178
+ intptr_t old_gen_recoverable = 0;
179
+ intptr_t old_gen_used = 0;
180
+
181
+ OldSpaces spaces;
182
+ for (OldSpace* space = spaces.next(); space != NULL; space = spaces.next()) {
183
+ old_gen_recoverable += space->Waste() + space->AvailableFree();
184
+ old_gen_used += space->Size();
185
+ }
186
+
187
+ int old_gen_fragmentation =
188
+ static_cast<int>((old_gen_recoverable * 100.0) / old_gen_used);
189
+ if (old_gen_fragmentation > kFragmentationLimit &&
190
+ old_gen_recoverable > kFragmentationAllowed) {
191
+ compact_on_next_gc_ = true;
192
+ }
193
+ }
194
+
195
+
196
+ // -------------------------------------------------------------------------
197
+ // Phase 1: tracing and marking live objects.
198
+ // before: all objects are in normal state.
199
+ // after: a live object's map pointer is marked as '00'.
200
+
201
+ // Marking all live objects in the heap as part of mark-sweep or mark-compact
202
+ // collection. Before marking, all objects are in their normal state. After
203
+ // marking, live objects' map pointers are marked indicating that the object
204
+ // has been found reachable.
205
+ //
206
+ // The marking algorithm is a (mostly) depth-first (because of possible stack
207
+ // overflow) traversal of the graph of objects reachable from the roots. It
208
+ // uses an explicit stack of pointers rather than recursion. The young
209
+ // generation's inactive ('from') space is used as a marking stack. The
210
+ // objects in the marking stack are the ones that have been reached and marked
211
+ // but their children have not yet been visited.
212
+ //
213
+ // The marking stack can overflow during traversal. In that case, we set an
214
+ // overflow flag. When the overflow flag is set, we continue marking objects
215
+ // reachable from the objects on the marking stack, but no longer push them on
216
+ // the marking stack. Instead, we mark them as both marked and overflowed.
217
+ // When the stack is in the overflowed state, objects marked as overflowed
218
+ // have been reached and marked but their children have not been visited yet.
219
+ // After emptying the marking stack, we clear the overflow flag and traverse
220
+ // the heap looking for objects marked as overflowed, push them on the stack,
221
+ // and continue with marking. This process repeats until all reachable
222
+ // objects have been marked.
223
+
224
+ static MarkingStack marking_stack;
225
+
226
+ class FlushCode : public AllStatic {
227
+ public:
228
+ static void AddCandidate(SharedFunctionInfo* shared_info) {
229
+ SetNextCandidate(shared_info, shared_function_info_candidates_head_);
230
+ shared_function_info_candidates_head_ = shared_info;
231
+ }
232
+
233
+
234
+ static void AddCandidate(JSFunction* function) {
235
+ ASSERT(function->unchecked_code() ==
236
+ function->unchecked_shared()->unchecked_code());
237
+
238
+ SetNextCandidate(function, jsfunction_candidates_head_);
239
+ jsfunction_candidates_head_ = function;
240
+ }
241
+
242
+
243
+ static void ProcessCandidates() {
244
+ ProcessSharedFunctionInfoCandidates();
245
+ ProcessJSFunctionCandidates();
246
+ }
247
+
248
+ private:
249
+ static void ProcessJSFunctionCandidates() {
250
+ Code* lazy_compile = Builtins::builtin(Builtins::LazyCompile);
251
+
252
+ JSFunction* candidate = jsfunction_candidates_head_;
253
+ JSFunction* next_candidate;
254
+ while (candidate != NULL) {
255
+ next_candidate = GetNextCandidate(candidate);
256
+
257
+ SharedFunctionInfo* shared = candidate->unchecked_shared();
258
+
259
+ Code* code = shared->unchecked_code();
260
+ if (!code->IsMarked()) {
261
+ shared->set_code(lazy_compile);
262
+ candidate->set_code(lazy_compile);
263
+ } else {
264
+ candidate->set_code(shared->unchecked_code());
265
+ }
266
+
267
+ candidate = next_candidate;
268
+ }
269
+
270
+ jsfunction_candidates_head_ = NULL;
271
+ }
272
+
273
+
274
+ static void ProcessSharedFunctionInfoCandidates() {
275
+ Code* lazy_compile = Builtins::builtin(Builtins::LazyCompile);
276
+
277
+ SharedFunctionInfo* candidate = shared_function_info_candidates_head_;
278
+ SharedFunctionInfo* next_candidate;
279
+ while (candidate != NULL) {
280
+ next_candidate = GetNextCandidate(candidate);
281
+ SetNextCandidate(candidate, NULL);
282
+
283
+ Code* code = candidate->unchecked_code();
284
+ if (!code->IsMarked()) {
285
+ candidate->set_code(lazy_compile);
286
+ }
287
+
288
+ candidate = next_candidate;
289
+ }
290
+
291
+ shared_function_info_candidates_head_ = NULL;
292
+ }
293
+
294
+
295
+ static JSFunction** GetNextCandidateField(JSFunction* candidate) {
296
+ return reinterpret_cast<JSFunction**>(
297
+ candidate->address() + JSFunction::kCodeEntryOffset);
298
+ }
299
+
300
+
301
+ static JSFunction* GetNextCandidate(JSFunction* candidate) {
302
+ return *GetNextCandidateField(candidate);
303
+ }
304
+
305
+
306
+ static void SetNextCandidate(JSFunction* candidate,
307
+ JSFunction* next_candidate) {
308
+ *GetNextCandidateField(candidate) = next_candidate;
309
+ }
310
+
311
+
312
+ STATIC_ASSERT(kPointerSize <= Code::kHeaderSize - Code::kHeaderPaddingStart);
313
+
314
+
315
+ static SharedFunctionInfo** GetNextCandidateField(
316
+ SharedFunctionInfo* candidate) {
317
+ Code* code = candidate->unchecked_code();
318
+ return reinterpret_cast<SharedFunctionInfo**>(
319
+ code->address() + Code::kHeaderPaddingStart);
320
+ }
321
+
322
+
323
+ static SharedFunctionInfo* GetNextCandidate(SharedFunctionInfo* candidate) {
324
+ return *GetNextCandidateField(candidate);
325
+ }
326
+
327
+
328
+ static void SetNextCandidate(SharedFunctionInfo* candidate,
329
+ SharedFunctionInfo* next_candidate) {
330
+ *GetNextCandidateField(candidate) = next_candidate;
331
+ }
332
+
333
+ static JSFunction* jsfunction_candidates_head_;
334
+
335
+ static SharedFunctionInfo* shared_function_info_candidates_head_;
336
+ };
337
+
338
+ JSFunction* FlushCode::jsfunction_candidates_head_ = NULL;
339
+
340
+ SharedFunctionInfo* FlushCode::shared_function_info_candidates_head_ = NULL;
341
+
342
+ static inline HeapObject* ShortCircuitConsString(Object** p) {
343
+ // Optimization: If the heap object pointed to by p is a non-symbol
344
+ // cons string whose right substring is Heap::empty_string, update
345
+ // it in place to its left substring. Return the updated value.
346
+ //
347
+ // Here we assume that if we change *p, we replace it with a heap object
348
+ // (ie, the left substring of a cons string is always a heap object).
349
+ //
350
+ // The check performed is:
351
+ // object->IsConsString() && !object->IsSymbol() &&
352
+ // (ConsString::cast(object)->second() == Heap::empty_string())
353
+ // except the maps for the object and its possible substrings might be
354
+ // marked.
355
+ HeapObject* object = HeapObject::cast(*p);
356
+ MapWord map_word = object->map_word();
357
+ map_word.ClearMark();
358
+ InstanceType type = map_word.ToMap()->instance_type();
359
+ if ((type & kShortcutTypeMask) != kShortcutTypeTag) return object;
360
+
361
+ Object* second = reinterpret_cast<ConsString*>(object)->unchecked_second();
362
+ if (second != Heap::raw_unchecked_empty_string()) {
363
+ return object;
364
+ }
365
+
366
+ // Since we don't have the object's start, it is impossible to update the
367
+ // page dirty marks. Therefore, we only replace the string with its left
368
+ // substring when page dirty marks do not change.
369
+ Object* first = reinterpret_cast<ConsString*>(object)->unchecked_first();
370
+ if (!Heap::InNewSpace(object) && Heap::InNewSpace(first)) return object;
371
+
372
+ *p = first;
373
+ return HeapObject::cast(first);
374
+ }
375
+
376
+
377
+ class StaticMarkingVisitor : public StaticVisitorBase {
378
+ public:
379
+ static inline void IterateBody(Map* map, HeapObject* obj) {
380
+ table_.GetVisitor(map)(map, obj);
381
+ }
382
+
383
+ static void EnableCodeFlushing(bool enabled) {
384
+ if (enabled) {
385
+ table_.Register(kVisitJSFunction, &VisitJSFunctionAndFlushCode);
386
+ table_.Register(kVisitSharedFunctionInfo,
387
+ &VisitSharedFunctionInfoAndFlushCode);
388
+
389
+ } else {
390
+ table_.Register(kVisitJSFunction, &VisitJSFunction);
391
+ table_.Register(kVisitSharedFunctionInfo,
392
+ &VisitSharedFunctionInfoGeneric);
393
+ }
394
+ }
395
+
396
+ static void Initialize() {
397
+ table_.Register(kVisitShortcutCandidate,
398
+ &FixedBodyVisitor<StaticMarkingVisitor,
399
+ ConsString::BodyDescriptor,
400
+ void>::Visit);
401
+
402
+ table_.Register(kVisitConsString,
403
+ &FixedBodyVisitor<StaticMarkingVisitor,
404
+ ConsString::BodyDescriptor,
405
+ void>::Visit);
406
+
407
+
408
+ table_.Register(kVisitFixedArray,
409
+ &FlexibleBodyVisitor<StaticMarkingVisitor,
410
+ FixedArray::BodyDescriptor,
411
+ void>::Visit);
412
+
413
+ table_.Register(kVisitGlobalContext,
414
+ &FixedBodyVisitor<StaticMarkingVisitor,
415
+ Context::MarkCompactBodyDescriptor,
416
+ void>::Visit);
417
+
418
+ table_.Register(kVisitByteArray, &DataObjectVisitor::Visit);
419
+ table_.Register(kVisitSeqAsciiString, &DataObjectVisitor::Visit);
420
+ table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit);
421
+
422
+ table_.Register(kVisitOddball,
423
+ &FixedBodyVisitor<StaticMarkingVisitor,
424
+ Oddball::BodyDescriptor,
425
+ void>::Visit);
426
+ table_.Register(kVisitMap,
427
+ &FixedBodyVisitor<StaticMarkingVisitor,
428
+ Map::BodyDescriptor,
429
+ void>::Visit);
430
+
431
+ table_.Register(kVisitCode, &VisitCode);
432
+
433
+ table_.Register(kVisitSharedFunctionInfo,
434
+ &VisitSharedFunctionInfoAndFlushCode);
435
+
436
+ table_.Register(kVisitJSFunction,
437
+ &VisitJSFunctionAndFlushCode);
438
+
439
+ table_.Register(kVisitPropertyCell,
440
+ &FixedBodyVisitor<StaticMarkingVisitor,
441
+ JSGlobalPropertyCell::BodyDescriptor,
442
+ void>::Visit);
443
+
444
+ table_.RegisterSpecializations<DataObjectVisitor,
445
+ kVisitDataObject,
446
+ kVisitDataObjectGeneric>();
447
+
448
+ table_.RegisterSpecializations<JSObjectVisitor,
449
+ kVisitJSObject,
450
+ kVisitJSObjectGeneric>();
451
+
452
+ table_.RegisterSpecializations<StructObjectVisitor,
453
+ kVisitStruct,
454
+ kVisitStructGeneric>();
455
+ }
456
+
457
+ INLINE(static void VisitPointer(Object** p)) {
458
+ MarkObjectByPointer(p);
459
+ }
460
+
461
+ INLINE(static void VisitPointers(Object** start, Object** end)) {
462
+ // Mark all objects pointed to in [start, end).
463
+ const int kMinRangeForMarkingRecursion = 64;
464
+ if (end - start >= kMinRangeForMarkingRecursion) {
465
+ if (VisitUnmarkedObjects(start, end)) return;
466
+ // We are close to a stack overflow, so just mark the objects.
467
+ }
468
+ for (Object** p = start; p < end; p++) MarkObjectByPointer(p);
469
+ }
470
+
471
+ static inline void VisitCodeTarget(RelocInfo* rinfo) {
472
+ ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
473
+ Code* code = Code::GetCodeFromTargetAddress(rinfo->target_address());
474
+ if (FLAG_cleanup_ics_at_gc && code->is_inline_cache_stub()) {
475
+ IC::Clear(rinfo->pc());
476
+ // Please note targets for cleared inline cached do not have to be
477
+ // marked since they are contained in Heap::non_monomorphic_cache().
478
+ } else {
479
+ MarkCompactCollector::MarkObject(code);
480
+ }
481
+ }
482
+
483
+ static void VisitGlobalPropertyCell(RelocInfo* rinfo) {
484
+ ASSERT(rinfo->rmode() == RelocInfo::GLOBAL_PROPERTY_CELL);
485
+ Object* cell = rinfo->target_cell();
486
+ Object* old_cell = cell;
487
+ VisitPointer(&cell);
488
+ if (cell != old_cell) {
489
+ rinfo->set_target_cell(reinterpret_cast<JSGlobalPropertyCell*>(cell));
490
+ }
491
+ }
492
+
493
+ static inline void VisitDebugTarget(RelocInfo* rinfo) {
494
+ ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) &&
495
+ rinfo->IsPatchedReturnSequence()) ||
496
+ (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
497
+ rinfo->IsPatchedDebugBreakSlotSequence()));
498
+ HeapObject* code = Code::GetCodeFromTargetAddress(rinfo->call_address());
499
+ MarkCompactCollector::MarkObject(code);
500
+ }
501
+
502
+ // Mark object pointed to by p.
503
+ INLINE(static void MarkObjectByPointer(Object** p)) {
504
+ if (!(*p)->IsHeapObject()) return;
505
+ HeapObject* object = ShortCircuitConsString(p);
506
+ MarkCompactCollector::MarkObject(object);
507
+ }
508
+
509
+ // Visit an unmarked object.
510
+ static inline void VisitUnmarkedObject(HeapObject* obj) {
511
+ #ifdef DEBUG
512
+ ASSERT(Heap::Contains(obj));
513
+ ASSERT(!obj->IsMarked());
514
+ #endif
515
+ Map* map = obj->map();
516
+ MarkCompactCollector::SetMark(obj);
517
+ // Mark the map pointer and the body.
518
+ MarkCompactCollector::MarkObject(map);
519
+ IterateBody(map, obj);
520
+ }
521
+
522
+ // Visit all unmarked objects pointed to by [start, end).
523
+ // Returns false if the operation fails (lack of stack space).
524
+ static inline bool VisitUnmarkedObjects(Object** start, Object** end) {
525
+ // Return false is we are close to the stack limit.
526
+ StackLimitCheck check;
527
+ if (check.HasOverflowed()) return false;
528
+
529
+ // Visit the unmarked objects.
530
+ for (Object** p = start; p < end; p++) {
531
+ if (!(*p)->IsHeapObject()) continue;
532
+ HeapObject* obj = HeapObject::cast(*p);
533
+ if (obj->IsMarked()) continue;
534
+ VisitUnmarkedObject(obj);
535
+ }
536
+ return true;
537
+ }
538
+
539
+ static inline void VisitExternalReference(Address* p) { }
540
+ static inline void VisitRuntimeEntry(RelocInfo* rinfo) { }
541
+
542
+ private:
543
+ class DataObjectVisitor {
544
+ public:
545
+ template<int size>
546
+ static void VisitSpecialized(Map* map, HeapObject* object) {
547
+ }
548
+
549
+ static void Visit(Map* map, HeapObject* object) {
550
+ }
551
+ };
552
+
553
+ typedef FlexibleBodyVisitor<StaticMarkingVisitor,
554
+ JSObject::BodyDescriptor,
555
+ void> JSObjectVisitor;
556
+
557
+ typedef FlexibleBodyVisitor<StaticMarkingVisitor,
558
+ StructBodyDescriptor,
559
+ void> StructObjectVisitor;
560
+
561
+ static void VisitCode(Map* map, HeapObject* object) {
562
+ reinterpret_cast<Code*>(object)->CodeIterateBody<StaticMarkingVisitor>();
563
+ }
564
+
565
+ // Code flushing support.
566
+
567
+ // How many collections newly compiled code object will survive before being
568
+ // flushed.
569
+ static const int kCodeAgeThreshold = 5;
570
+
571
+ inline static bool HasSourceCode(SharedFunctionInfo* info) {
572
+ Object* undefined = Heap::raw_unchecked_undefined_value();
573
+ return (info->script() != undefined) &&
574
+ (reinterpret_cast<Script*>(info->script())->source() != undefined);
575
+ }
576
+
577
+
578
+ inline static bool IsCompiled(JSFunction* function) {
579
+ return
580
+ function->unchecked_code() != Builtins::builtin(Builtins::LazyCompile);
581
+ }
582
+
583
+
584
+ inline static bool IsCompiled(SharedFunctionInfo* function) {
585
+ return
586
+ function->unchecked_code() != Builtins::builtin(Builtins::LazyCompile);
587
+ }
588
+
589
+ inline static bool IsFlushable(JSFunction* function) {
590
+ SharedFunctionInfo* shared_info = function->unchecked_shared();
591
+
592
+ // Code is either on stack, in compilation cache or referenced
593
+ // by optimized version of function.
594
+ if (function->unchecked_code()->IsMarked()) {
595
+ shared_info->set_code_age(0);
596
+ return false;
597
+ }
598
+
599
+ // We do not flush code for optimized functions.
600
+ if (function->code() != shared_info->unchecked_code()) {
601
+ return false;
602
+ }
603
+
604
+ return IsFlushable(shared_info);
605
+ }
606
+
607
+ inline static bool IsFlushable(SharedFunctionInfo* shared_info) {
608
+ // Code is either on stack, in compilation cache or referenced
609
+ // by optimized version of function.
610
+ if (shared_info->unchecked_code()->IsMarked()) {
611
+ shared_info->set_code_age(0);
612
+ return false;
613
+ }
614
+
615
+ // The function must be compiled and have the source code available,
616
+ // to be able to recompile it in case we need the function again.
617
+ if (!(shared_info->is_compiled() && HasSourceCode(shared_info))) {
618
+ return false;
619
+ }
620
+
621
+ // We never flush code for Api functions.
622
+ Object* function_data = shared_info->function_data();
623
+ if (function_data->IsHeapObject() &&
624
+ (SafeMap(function_data)->instance_type() ==
625
+ FUNCTION_TEMPLATE_INFO_TYPE)) {
626
+ return false;
627
+ }
628
+
629
+ // Only flush code for functions.
630
+ if (shared_info->code()->kind() != Code::FUNCTION) return false;
631
+
632
+ // Function must be lazy compilable.
633
+ if (!shared_info->allows_lazy_compilation()) return false;
634
+
635
+ // If this is a full script wrapped in a function we do no flush the code.
636
+ if (shared_info->is_toplevel()) return false;
637
+
638
+ // Age this shared function info.
639
+ if (shared_info->code_age() < kCodeAgeThreshold) {
640
+ shared_info->set_code_age(shared_info->code_age() + 1);
641
+ return false;
642
+ }
643
+
644
+ return true;
645
+ }
646
+
647
+
648
+ static bool FlushCodeForFunction(JSFunction* function) {
649
+ if (!IsFlushable(function)) return false;
650
+
651
+ // This function's code looks flushable. But we have to postpone the
652
+ // decision until we see all functions that point to the same
653
+ // SharedFunctionInfo because some of them might be optimized.
654
+ // That would make the nonoptimized version of the code nonflushable,
655
+ // because it is required for bailing out from optimized code.
656
+ FlushCode::AddCandidate(function);
657
+ return true;
658
+ }
659
+
660
+
661
+ static inline Map* SafeMap(Object* obj) {
662
+ MapWord map_word = HeapObject::cast(obj)->map_word();
663
+ map_word.ClearMark();
664
+ map_word.ClearOverflow();
665
+ return map_word.ToMap();
666
+ }
667
+
668
+
669
+ static inline bool IsJSBuiltinsObject(Object* obj) {
670
+ return obj->IsHeapObject() &&
671
+ (SafeMap(obj)->instance_type() == JS_BUILTINS_OBJECT_TYPE);
672
+ }
673
+
674
+
675
+ static inline bool IsValidNotBuiltinContext(Object* ctx) {
676
+ if (!ctx->IsHeapObject()) return false;
677
+
678
+ Map* map = SafeMap(ctx);
679
+ if (!(map == Heap::raw_unchecked_context_map() ||
680
+ map == Heap::raw_unchecked_catch_context_map() ||
681
+ map == Heap::raw_unchecked_global_context_map())) {
682
+ return false;
683
+ }
684
+
685
+ Context* context = reinterpret_cast<Context*>(ctx);
686
+
687
+ if (IsJSBuiltinsObject(context->global())) {
688
+ return false;
689
+ }
690
+
691
+ return true;
692
+ }
693
+
694
+
695
+ static void VisitSharedFunctionInfoGeneric(Map* map, HeapObject* object) {
696
+ SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(object);
697
+
698
+ if (shared->IsInobjectSlackTrackingInProgress()) shared->DetachInitialMap();
699
+
700
+ FixedBodyVisitor<StaticMarkingVisitor,
701
+ SharedFunctionInfo::BodyDescriptor,
702
+ void>::Visit(map, object);
703
+ }
704
+
705
+
706
+ static void VisitSharedFunctionInfoAndFlushCode(Map* map,
707
+ HeapObject* object) {
708
+ VisitSharedFunctionInfoAndFlushCodeGeneric(map, object, false);
709
+ }
710
+
711
+
712
+ static void VisitSharedFunctionInfoAndFlushCodeGeneric(
713
+ Map* map, HeapObject* object, bool known_flush_code_candidate) {
714
+ SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(object);
715
+
716
+ if (shared->IsInobjectSlackTrackingInProgress()) shared->DetachInitialMap();
717
+
718
+ if (!known_flush_code_candidate) {
719
+ known_flush_code_candidate = IsFlushable(shared);
720
+ if (known_flush_code_candidate) FlushCode::AddCandidate(shared);
721
+ }
722
+
723
+ VisitSharedFunctionInfoFields(object, known_flush_code_candidate);
724
+ }
725
+
726
+
727
+ static void VisitCodeEntry(Address entry_address) {
728
+ Object* code = Code::GetObjectFromEntryAddress(entry_address);
729
+ Object* old_code = code;
730
+ VisitPointer(&code);
731
+ if (code != old_code) {
732
+ Memory::Address_at(entry_address) =
733
+ reinterpret_cast<Code*>(code)->entry();
734
+ }
735
+ }
736
+
737
+
738
+ static void VisitJSFunctionAndFlushCode(Map* map, HeapObject* object) {
739
+ JSFunction* jsfunction = reinterpret_cast<JSFunction*>(object);
740
+ // The function must have a valid context and not be a builtin.
741
+ bool flush_code_candidate = false;
742
+ if (IsValidNotBuiltinContext(jsfunction->unchecked_context())) {
743
+ flush_code_candidate = FlushCodeForFunction(jsfunction);
744
+ }
745
+
746
+ if (!flush_code_candidate) {
747
+ MarkCompactCollector::MarkObject(
748
+ jsfunction->unchecked_shared()->unchecked_code());
749
+
750
+ if (jsfunction->unchecked_code()->kind() == Code::OPTIMIZED_FUNCTION) {
751
+ // For optimized functions we should retain both non-optimized version
752
+ // of it's code and non-optimized version of all inlined functions.
753
+ // This is required to support bailing out from inlined code.
754
+ DeoptimizationInputData* data =
755
+ reinterpret_cast<DeoptimizationInputData*>(
756
+ jsfunction->unchecked_code()->unchecked_deoptimization_data());
757
+
758
+ FixedArray* literals = data->UncheckedLiteralArray();
759
+
760
+ for (int i = 0, count = data->InlinedFunctionCount()->value();
761
+ i < count;
762
+ i++) {
763
+ JSFunction* inlined = reinterpret_cast<JSFunction*>(literals->get(i));
764
+ MarkCompactCollector::MarkObject(
765
+ inlined->unchecked_shared()->unchecked_code());
766
+ }
767
+ }
768
+ }
769
+
770
+ VisitJSFunctionFields(map,
771
+ reinterpret_cast<JSFunction*>(object),
772
+ flush_code_candidate);
773
+ }
774
+
775
+
776
+ static void VisitJSFunction(Map* map, HeapObject* object) {
777
+ VisitJSFunctionFields(map,
778
+ reinterpret_cast<JSFunction*>(object),
779
+ false);
780
+ }
781
+
782
+
783
+ #define SLOT_ADDR(obj, offset) \
784
+ reinterpret_cast<Object**>((obj)->address() + offset)
785
+
786
+
787
+ static inline void VisitJSFunctionFields(Map* map,
788
+ JSFunction* object,
789
+ bool flush_code_candidate) {
790
+ VisitPointers(SLOT_ADDR(object, JSFunction::kPropertiesOffset),
791
+ SLOT_ADDR(object, JSFunction::kCodeEntryOffset));
792
+
793
+ if (!flush_code_candidate) {
794
+ VisitCodeEntry(object->address() + JSFunction::kCodeEntryOffset);
795
+ } else {
796
+ // Don't visit code object.
797
+
798
+ // Visit shared function info to avoid double checking of it's
799
+ // flushability.
800
+ SharedFunctionInfo* shared_info = object->unchecked_shared();
801
+ if (!shared_info->IsMarked()) {
802
+ Map* shared_info_map = shared_info->map();
803
+ MarkCompactCollector::SetMark(shared_info);
804
+ MarkCompactCollector::MarkObject(shared_info_map);
805
+ VisitSharedFunctionInfoAndFlushCodeGeneric(shared_info_map,
806
+ shared_info,
807
+ true);
808
+ }
809
+ }
810
+
811
+ VisitPointers(SLOT_ADDR(object,
812
+ JSFunction::kCodeEntryOffset + kPointerSize),
813
+ SLOT_ADDR(object, JSFunction::kNonWeakFieldsEndOffset));
814
+
815
+ // Don't visit the next function list field as it is a weak reference.
816
+ }
817
+
818
+
819
+ static void VisitSharedFunctionInfoFields(HeapObject* object,
820
+ bool flush_code_candidate) {
821
+ VisitPointer(SLOT_ADDR(object, SharedFunctionInfo::kNameOffset));
822
+
823
+ if (!flush_code_candidate) {
824
+ VisitPointer(SLOT_ADDR(object, SharedFunctionInfo::kCodeOffset));
825
+ }
826
+
827
+ VisitPointers(SLOT_ADDR(object, SharedFunctionInfo::kScopeInfoOffset),
828
+ SLOT_ADDR(object, SharedFunctionInfo::kSize));
829
+ }
830
+
831
+ #undef SLOT_ADDR
832
+
833
+ typedef void (*Callback)(Map* map, HeapObject* object);
834
+
835
+ static VisitorDispatchTable<Callback> table_;
836
+ };
837
+
838
+
839
+ VisitorDispatchTable<StaticMarkingVisitor::Callback>
840
+ StaticMarkingVisitor::table_;
841
+
842
+
843
+ class MarkingVisitor : public ObjectVisitor {
844
+ public:
845
+ void VisitPointer(Object** p) {
846
+ StaticMarkingVisitor::VisitPointer(p);
847
+ }
848
+
849
+ void VisitPointers(Object** start, Object** end) {
850
+ StaticMarkingVisitor::VisitPointers(start, end);
851
+ }
852
+
853
+ void VisitCodeTarget(RelocInfo* rinfo) {
854
+ StaticMarkingVisitor::VisitCodeTarget(rinfo);
855
+ }
856
+
857
+ void VisitGlobalPropertyCell(RelocInfo* rinfo) {
858
+ StaticMarkingVisitor::VisitGlobalPropertyCell(rinfo);
859
+ }
860
+
861
+ void VisitDebugTarget(RelocInfo* rinfo) {
862
+ StaticMarkingVisitor::VisitDebugTarget(rinfo);
863
+ }
864
+ };
865
+
866
+
867
+ class CodeMarkingVisitor : public ThreadVisitor {
868
+ public:
869
+ void VisitThread(ThreadLocalTop* top) {
870
+ for (StackFrameIterator it(top); !it.done(); it.Advance()) {
871
+ MarkCompactCollector::MarkObject(it.frame()->unchecked_code());
872
+ }
873
+ }
874
+ };
875
+
876
+
877
+ class SharedFunctionInfoMarkingVisitor : public ObjectVisitor {
878
+ public:
879
+ void VisitPointers(Object** start, Object** end) {
880
+ for (Object** p = start; p < end; p++) VisitPointer(p);
881
+ }
882
+
883
+ void VisitPointer(Object** slot) {
884
+ Object* obj = *slot;
885
+ if (obj->IsSharedFunctionInfo()) {
886
+ SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(obj);
887
+ MarkCompactCollector::MarkObject(shared->unchecked_code());
888
+ MarkCompactCollector::MarkObject(shared);
889
+ }
890
+ }
891
+ };
892
+
893
+
894
+ void MarkCompactCollector::PrepareForCodeFlushing() {
895
+ if (!FLAG_flush_code) {
896
+ StaticMarkingVisitor::EnableCodeFlushing(false);
897
+ return;
898
+ }
899
+
900
+ #ifdef ENABLE_DEBUGGER_SUPPORT
901
+ if (Debug::IsLoaded() || Debug::has_break_points()) {
902
+ StaticMarkingVisitor::EnableCodeFlushing(false);
903
+ return;
904
+ }
905
+ #endif
906
+ StaticMarkingVisitor::EnableCodeFlushing(true);
907
+
908
+ // Ensure that empty descriptor array is marked. Method MarkDescriptorArray
909
+ // relies on it being marked before any other descriptor array.
910
+ MarkObject(Heap::raw_unchecked_empty_descriptor_array());
911
+
912
+ // Make sure we are not referencing the code from the stack.
913
+ for (StackFrameIterator it; !it.done(); it.Advance()) {
914
+ MarkObject(it.frame()->unchecked_code());
915
+ }
916
+
917
+ // Iterate the archived stacks in all threads to check if
918
+ // the code is referenced.
919
+ CodeMarkingVisitor code_marking_visitor;
920
+ ThreadManager::IterateArchivedThreads(&code_marking_visitor);
921
+
922
+ SharedFunctionInfoMarkingVisitor visitor;
923
+ CompilationCache::IterateFunctions(&visitor);
924
+ HandleScopeImplementer::Iterate(&visitor);
925
+
926
+ ProcessMarkingStack();
927
+ }
928
+
929
+
930
+ // Visitor class for marking heap roots.
931
+ class RootMarkingVisitor : public ObjectVisitor {
932
+ public:
933
+ void VisitPointer(Object** p) {
934
+ MarkObjectByPointer(p);
935
+ }
936
+
937
+ void VisitPointers(Object** start, Object** end) {
938
+ for (Object** p = start; p < end; p++) MarkObjectByPointer(p);
939
+ }
940
+
941
+ private:
942
+ void MarkObjectByPointer(Object** p) {
943
+ if (!(*p)->IsHeapObject()) return;
944
+
945
+ // Replace flat cons strings in place.
946
+ HeapObject* object = ShortCircuitConsString(p);
947
+ if (object->IsMarked()) return;
948
+
949
+ Map* map = object->map();
950
+ // Mark the object.
951
+ MarkCompactCollector::SetMark(object);
952
+
953
+ // Mark the map pointer and body, and push them on the marking stack.
954
+ MarkCompactCollector::MarkObject(map);
955
+ StaticMarkingVisitor::IterateBody(map, object);
956
+
957
+ // Mark all the objects reachable from the map and body. May leave
958
+ // overflowed objects in the heap.
959
+ MarkCompactCollector::EmptyMarkingStack();
960
+ }
961
+ };
962
+
963
+
964
+ // Helper class for pruning the symbol table.
965
+ class SymbolTableCleaner : public ObjectVisitor {
966
+ public:
967
+ SymbolTableCleaner() : pointers_removed_(0) { }
968
+
969
+ virtual void VisitPointers(Object** start, Object** end) {
970
+ // Visit all HeapObject pointers in [start, end).
971
+ for (Object** p = start; p < end; p++) {
972
+ if ((*p)->IsHeapObject() && !HeapObject::cast(*p)->IsMarked()) {
973
+ // Check if the symbol being pruned is an external symbol. We need to
974
+ // delete the associated external data as this symbol is going away.
975
+
976
+ // Since no objects have yet been moved we can safely access the map of
977
+ // the object.
978
+ if ((*p)->IsExternalString()) {
979
+ Heap::FinalizeExternalString(String::cast(*p));
980
+ }
981
+ // Set the entry to null_value (as deleted).
982
+ *p = Heap::raw_unchecked_null_value();
983
+ pointers_removed_++;
984
+ }
985
+ }
986
+ }
987
+
988
+ int PointersRemoved() {
989
+ return pointers_removed_;
990
+ }
991
+ private:
992
+ int pointers_removed_;
993
+ };
994
+
995
+
996
+ // Implementation of WeakObjectRetainer for mark compact GCs. All marked objects
997
+ // are retained.
998
+ class MarkCompactWeakObjectRetainer : public WeakObjectRetainer {
999
+ public:
1000
+ virtual Object* RetainAs(Object* object) {
1001
+ MapWord first_word = HeapObject::cast(object)->map_word();
1002
+ if (first_word.IsMarked()) {
1003
+ return object;
1004
+ } else {
1005
+ return NULL;
1006
+ }
1007
+ }
1008
+ };
1009
+
1010
+
1011
+ void MarkCompactCollector::MarkUnmarkedObject(HeapObject* object) {
1012
+ ASSERT(!object->IsMarked());
1013
+ ASSERT(Heap::Contains(object));
1014
+ if (object->IsMap()) {
1015
+ Map* map = Map::cast(object);
1016
+ if (FLAG_cleanup_caches_in_maps_at_gc) {
1017
+ map->ClearCodeCache();
1018
+ }
1019
+ SetMark(map);
1020
+ if (FLAG_collect_maps &&
1021
+ map->instance_type() >= FIRST_JS_OBJECT_TYPE &&
1022
+ map->instance_type() <= JS_FUNCTION_TYPE) {
1023
+ MarkMapContents(map);
1024
+ } else {
1025
+ marking_stack.Push(map);
1026
+ }
1027
+ } else {
1028
+ SetMark(object);
1029
+ marking_stack.Push(object);
1030
+ }
1031
+ }
1032
+
1033
+
1034
+ void MarkCompactCollector::MarkMapContents(Map* map) {
1035
+ MarkDescriptorArray(reinterpret_cast<DescriptorArray*>(
1036
+ *HeapObject::RawField(map, Map::kInstanceDescriptorsOffset)));
1037
+
1038
+ // Mark the Object* fields of the Map.
1039
+ // Since the descriptor array has been marked already, it is fine
1040
+ // that one of these fields contains a pointer to it.
1041
+ Object** start_slot = HeapObject::RawField(map,
1042
+ Map::kPointerFieldsBeginOffset);
1043
+
1044
+ Object** end_slot = HeapObject::RawField(map, Map::kPointerFieldsEndOffset);
1045
+
1046
+ StaticMarkingVisitor::VisitPointers(start_slot, end_slot);
1047
+ }
1048
+
1049
+
1050
+ void MarkCompactCollector::MarkDescriptorArray(
1051
+ DescriptorArray* descriptors) {
1052
+ if (descriptors->IsMarked()) return;
1053
+ // Empty descriptor array is marked as a root before any maps are marked.
1054
+ ASSERT(descriptors != Heap::raw_unchecked_empty_descriptor_array());
1055
+ SetMark(descriptors);
1056
+
1057
+ FixedArray* contents = reinterpret_cast<FixedArray*>(
1058
+ descriptors->get(DescriptorArray::kContentArrayIndex));
1059
+ ASSERT(contents->IsHeapObject());
1060
+ ASSERT(!contents->IsMarked());
1061
+ ASSERT(contents->IsFixedArray());
1062
+ ASSERT(contents->length() >= 2);
1063
+ SetMark(contents);
1064
+ // Contents contains (value, details) pairs. If the details say that
1065
+ // the type of descriptor is MAP_TRANSITION, CONSTANT_TRANSITION, or
1066
+ // NULL_DESCRIPTOR, we don't mark the value as live. Only for
1067
+ // MAP_TRANSITION and CONSTANT_TRANSITION is the value an Object* (a
1068
+ // Map*).
1069
+ for (int i = 0; i < contents->length(); i += 2) {
1070
+ // If the pair (value, details) at index i, i+1 is not
1071
+ // a transition or null descriptor, mark the value.
1072
+ PropertyDetails details(Smi::cast(contents->get(i + 1)));
1073
+ if (details.type() < FIRST_PHANTOM_PROPERTY_TYPE) {
1074
+ HeapObject* object = reinterpret_cast<HeapObject*>(contents->get(i));
1075
+ if (object->IsHeapObject() && !object->IsMarked()) {
1076
+ SetMark(object);
1077
+ marking_stack.Push(object);
1078
+ }
1079
+ }
1080
+ }
1081
+ // The DescriptorArray descriptors contains a pointer to its contents array,
1082
+ // but the contents array is already marked.
1083
+ marking_stack.Push(descriptors);
1084
+ }
1085
+
1086
+
1087
+ void MarkCompactCollector::CreateBackPointers() {
1088
+ HeapObjectIterator iterator(Heap::map_space());
1089
+ for (HeapObject* next_object = iterator.next();
1090
+ next_object != NULL; next_object = iterator.next()) {
1091
+ if (next_object->IsMap()) { // Could also be ByteArray on free list.
1092
+ Map* map = Map::cast(next_object);
1093
+ if (map->instance_type() >= FIRST_JS_OBJECT_TYPE &&
1094
+ map->instance_type() <= JS_FUNCTION_TYPE) {
1095
+ map->CreateBackPointers();
1096
+ } else {
1097
+ ASSERT(map->instance_descriptors() == Heap::empty_descriptor_array());
1098
+ }
1099
+ }
1100
+ }
1101
+ }
1102
+
1103
+
1104
+ static int OverflowObjectSize(HeapObject* obj) {
1105
+ // Recover the normal map pointer, it might be marked as live and
1106
+ // overflowed.
1107
+ MapWord map_word = obj->map_word();
1108
+ map_word.ClearMark();
1109
+ map_word.ClearOverflow();
1110
+ return obj->SizeFromMap(map_word.ToMap());
1111
+ }
1112
+
1113
+
1114
+ // Fill the marking stack with overflowed objects returned by the given
1115
+ // iterator. Stop when the marking stack is filled or the end of the space
1116
+ // is reached, whichever comes first.
1117
+ template<class T>
1118
+ static void ScanOverflowedObjects(T* it) {
1119
+ // The caller should ensure that the marking stack is initially not full,
1120
+ // so that we don't waste effort pointlessly scanning for objects.
1121
+ ASSERT(!marking_stack.is_full());
1122
+
1123
+ for (HeapObject* object = it->next(); object != NULL; object = it->next()) {
1124
+ if (object->IsOverflowed()) {
1125
+ object->ClearOverflow();
1126
+ ASSERT(object->IsMarked());
1127
+ ASSERT(Heap::Contains(object));
1128
+ marking_stack.Push(object);
1129
+ if (marking_stack.is_full()) return;
1130
+ }
1131
+ }
1132
+ }
1133
+
1134
+
1135
+ bool MarkCompactCollector::IsUnmarkedHeapObject(Object** p) {
1136
+ return (*p)->IsHeapObject() && !HeapObject::cast(*p)->IsMarked();
1137
+ }
1138
+
1139
+
1140
+ void MarkCompactCollector::MarkSymbolTable() {
1141
+ SymbolTable* symbol_table = Heap::raw_unchecked_symbol_table();
1142
+ // Mark the symbol table itself.
1143
+ SetMark(symbol_table);
1144
+ // Explicitly mark the prefix.
1145
+ MarkingVisitor marker;
1146
+ symbol_table->IteratePrefix(&marker);
1147
+ ProcessMarkingStack();
1148
+ }
1149
+
1150
+
1151
+ void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) {
1152
+ // Mark the heap roots including global variables, stack variables,
1153
+ // etc., and all objects reachable from them.
1154
+ Heap::IterateStrongRoots(visitor, VISIT_ONLY_STRONG);
1155
+
1156
+ // Handle the symbol table specially.
1157
+ MarkSymbolTable();
1158
+
1159
+ // There may be overflowed objects in the heap. Visit them now.
1160
+ while (marking_stack.overflowed()) {
1161
+ RefillMarkingStack();
1162
+ EmptyMarkingStack();
1163
+ }
1164
+ }
1165
+
1166
+
1167
+ void MarkCompactCollector::MarkObjectGroups() {
1168
+ List<ObjectGroup*>* object_groups = GlobalHandles::ObjectGroups();
1169
+
1170
+ for (int i = 0; i < object_groups->length(); i++) {
1171
+ ObjectGroup* entry = object_groups->at(i);
1172
+ if (entry == NULL) continue;
1173
+
1174
+ List<Object**>& objects = entry->objects_;
1175
+ bool group_marked = false;
1176
+ for (int j = 0; j < objects.length(); j++) {
1177
+ Object* object = *objects[j];
1178
+ if (object->IsHeapObject() && HeapObject::cast(object)->IsMarked()) {
1179
+ group_marked = true;
1180
+ break;
1181
+ }
1182
+ }
1183
+
1184
+ if (!group_marked) continue;
1185
+
1186
+ // An object in the group is marked, so mark as gray all white heap
1187
+ // objects in the group.
1188
+ for (int j = 0; j < objects.length(); ++j) {
1189
+ if ((*objects[j])->IsHeapObject()) {
1190
+ MarkObject(HeapObject::cast(*objects[j]));
1191
+ }
1192
+ }
1193
+ // Once the entire group has been colored gray, set the object group
1194
+ // to NULL so it won't be processed again.
1195
+ delete object_groups->at(i);
1196
+ object_groups->at(i) = NULL;
1197
+ }
1198
+ }
1199
+
1200
+
1201
+ // Mark all objects reachable from the objects on the marking stack.
1202
+ // Before: the marking stack contains zero or more heap object pointers.
1203
+ // After: the marking stack is empty, and all objects reachable from the
1204
+ // marking stack have been marked, or are overflowed in the heap.
1205
+ void MarkCompactCollector::EmptyMarkingStack() {
1206
+ while (!marking_stack.is_empty()) {
1207
+ HeapObject* object = marking_stack.Pop();
1208
+ ASSERT(object->IsHeapObject());
1209
+ ASSERT(Heap::Contains(object));
1210
+ ASSERT(object->IsMarked());
1211
+ ASSERT(!object->IsOverflowed());
1212
+
1213
+ // Because the object is marked, we have to recover the original map
1214
+ // pointer and use it to mark the object's body.
1215
+ MapWord map_word = object->map_word();
1216
+ map_word.ClearMark();
1217
+ Map* map = map_word.ToMap();
1218
+ MarkObject(map);
1219
+
1220
+ StaticMarkingVisitor::IterateBody(map, object);
1221
+ }
1222
+ }
1223
+
1224
+
1225
+ // Sweep the heap for overflowed objects, clear their overflow bits, and
1226
+ // push them on the marking stack. Stop early if the marking stack fills
1227
+ // before sweeping completes. If sweeping completes, there are no remaining
1228
+ // overflowed objects in the heap so the overflow flag on the markings stack
1229
+ // is cleared.
1230
+ void MarkCompactCollector::RefillMarkingStack() {
1231
+ ASSERT(marking_stack.overflowed());
1232
+
1233
+ SemiSpaceIterator new_it(Heap::new_space(), &OverflowObjectSize);
1234
+ ScanOverflowedObjects(&new_it);
1235
+ if (marking_stack.is_full()) return;
1236
+
1237
+ HeapObjectIterator old_pointer_it(Heap::old_pointer_space(),
1238
+ &OverflowObjectSize);
1239
+ ScanOverflowedObjects(&old_pointer_it);
1240
+ if (marking_stack.is_full()) return;
1241
+
1242
+ HeapObjectIterator old_data_it(Heap::old_data_space(), &OverflowObjectSize);
1243
+ ScanOverflowedObjects(&old_data_it);
1244
+ if (marking_stack.is_full()) return;
1245
+
1246
+ HeapObjectIterator code_it(Heap::code_space(), &OverflowObjectSize);
1247
+ ScanOverflowedObjects(&code_it);
1248
+ if (marking_stack.is_full()) return;
1249
+
1250
+ HeapObjectIterator map_it(Heap::map_space(), &OverflowObjectSize);
1251
+ ScanOverflowedObjects(&map_it);
1252
+ if (marking_stack.is_full()) return;
1253
+
1254
+ HeapObjectIterator cell_it(Heap::cell_space(), &OverflowObjectSize);
1255
+ ScanOverflowedObjects(&cell_it);
1256
+ if (marking_stack.is_full()) return;
1257
+
1258
+ LargeObjectIterator lo_it(Heap::lo_space(), &OverflowObjectSize);
1259
+ ScanOverflowedObjects(&lo_it);
1260
+ if (marking_stack.is_full()) return;
1261
+
1262
+ marking_stack.clear_overflowed();
1263
+ }
1264
+
1265
+
1266
+ // Mark all objects reachable (transitively) from objects on the marking
1267
+ // stack. Before: the marking stack contains zero or more heap object
1268
+ // pointers. After: the marking stack is empty and there are no overflowed
1269
+ // objects in the heap.
1270
+ void MarkCompactCollector::ProcessMarkingStack() {
1271
+ EmptyMarkingStack();
1272
+ while (marking_stack.overflowed()) {
1273
+ RefillMarkingStack();
1274
+ EmptyMarkingStack();
1275
+ }
1276
+ }
1277
+
1278
+
1279
+ void MarkCompactCollector::ProcessObjectGroups() {
1280
+ bool work_to_do = true;
1281
+ ASSERT(marking_stack.is_empty());
1282
+ while (work_to_do) {
1283
+ MarkObjectGroups();
1284
+ work_to_do = !marking_stack.is_empty();
1285
+ ProcessMarkingStack();
1286
+ }
1287
+ }
1288
+
1289
+
1290
+ void MarkCompactCollector::MarkLiveObjects() {
1291
+ GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_MARK);
1292
+ // The recursive GC marker detects when it is nearing stack overflow,
1293
+ // and switches to a different marking system. JS interrupts interfere
1294
+ // with the C stack limit check.
1295
+ PostponeInterruptsScope postpone;
1296
+
1297
+ #ifdef DEBUG
1298
+ ASSERT(state_ == PREPARE_GC);
1299
+ state_ = MARK_LIVE_OBJECTS;
1300
+ #endif
1301
+ // The to space contains live objects, the from space is used as a marking
1302
+ // stack.
1303
+ marking_stack.Initialize(Heap::new_space()->FromSpaceLow(),
1304
+ Heap::new_space()->FromSpaceHigh());
1305
+
1306
+ ASSERT(!marking_stack.overflowed());
1307
+
1308
+ PrepareForCodeFlushing();
1309
+
1310
+ RootMarkingVisitor root_visitor;
1311
+ MarkRoots(&root_visitor);
1312
+
1313
+ // The objects reachable from the roots are marked, yet unreachable
1314
+ // objects are unmarked. Mark objects reachable from object groups
1315
+ // containing at least one marked object, and continue until no new
1316
+ // objects are reachable from the object groups.
1317
+ ProcessObjectGroups();
1318
+
1319
+ // The objects reachable from the roots or object groups are marked,
1320
+ // yet unreachable objects are unmarked. Mark objects reachable
1321
+ // only from weak global handles.
1322
+ //
1323
+ // First we identify nonlive weak handles and mark them as pending
1324
+ // destruction.
1325
+ GlobalHandles::IdentifyWeakHandles(&IsUnmarkedHeapObject);
1326
+ // Then we mark the objects and process the transitive closure.
1327
+ GlobalHandles::IterateWeakRoots(&root_visitor);
1328
+ while (marking_stack.overflowed()) {
1329
+ RefillMarkingStack();
1330
+ EmptyMarkingStack();
1331
+ }
1332
+
1333
+ // Repeat the object groups to mark unmarked groups reachable from the
1334
+ // weak roots.
1335
+ ProcessObjectGroups();
1336
+
1337
+ // Prune the symbol table removing all symbols only pointed to by the
1338
+ // symbol table. Cannot use symbol_table() here because the symbol
1339
+ // table is marked.
1340
+ SymbolTable* symbol_table = Heap::raw_unchecked_symbol_table();
1341
+ SymbolTableCleaner v;
1342
+ symbol_table->IterateElements(&v);
1343
+ symbol_table->ElementsRemoved(v.PointersRemoved());
1344
+ ExternalStringTable::Iterate(&v);
1345
+ ExternalStringTable::CleanUp();
1346
+
1347
+ // Process the weak references.
1348
+ MarkCompactWeakObjectRetainer mark_compact_object_retainer;
1349
+ Heap::ProcessWeakReferences(&mark_compact_object_retainer);
1350
+
1351
+ // Remove object groups after marking phase.
1352
+ GlobalHandles::RemoveObjectGroups();
1353
+
1354
+ // Flush code from collected candidates.
1355
+ FlushCode::ProcessCandidates();
1356
+
1357
+ // Clean up dead objects from the runtime profiler.
1358
+ RuntimeProfiler::RemoveDeadSamples();
1359
+ }
1360
+
1361
+
1362
+ #ifdef DEBUG
1363
+ void MarkCompactCollector::UpdateLiveObjectCount(HeapObject* obj) {
1364
+ live_bytes_ += obj->Size();
1365
+ if (Heap::new_space()->Contains(obj)) {
1366
+ live_young_objects_size_ += obj->Size();
1367
+ } else if (Heap::map_space()->Contains(obj)) {
1368
+ ASSERT(obj->IsMap());
1369
+ live_map_objects_size_ += obj->Size();
1370
+ } else if (Heap::cell_space()->Contains(obj)) {
1371
+ ASSERT(obj->IsJSGlobalPropertyCell());
1372
+ live_cell_objects_size_ += obj->Size();
1373
+ } else if (Heap::old_pointer_space()->Contains(obj)) {
1374
+ live_old_pointer_objects_size_ += obj->Size();
1375
+ } else if (Heap::old_data_space()->Contains(obj)) {
1376
+ live_old_data_objects_size_ += obj->Size();
1377
+ } else if (Heap::code_space()->Contains(obj)) {
1378
+ live_code_objects_size_ += obj->Size();
1379
+ } else if (Heap::lo_space()->Contains(obj)) {
1380
+ live_lo_objects_size_ += obj->Size();
1381
+ } else {
1382
+ UNREACHABLE();
1383
+ }
1384
+ }
1385
+ #endif // DEBUG
1386
+
1387
+
1388
+ void MarkCompactCollector::SweepLargeObjectSpace() {
1389
+ #ifdef DEBUG
1390
+ ASSERT(state_ == MARK_LIVE_OBJECTS);
1391
+ state_ =
1392
+ compacting_collection_ ? ENCODE_FORWARDING_ADDRESSES : SWEEP_SPACES;
1393
+ #endif
1394
+ // Deallocate unmarked objects and clear marked bits for marked objects.
1395
+ Heap::lo_space()->FreeUnmarkedObjects();
1396
+ }
1397
+
1398
+
1399
+ // Safe to use during marking phase only.
1400
+ bool MarkCompactCollector::SafeIsMap(HeapObject* object) {
1401
+ MapWord metamap = object->map_word();
1402
+ metamap.ClearMark();
1403
+ return metamap.ToMap()->instance_type() == MAP_TYPE;
1404
+ }
1405
+
1406
+
1407
+ void MarkCompactCollector::ClearNonLiveTransitions() {
1408
+ HeapObjectIterator map_iterator(Heap::map_space(), &SizeOfMarkedObject);
1409
+ // Iterate over the map space, setting map transitions that go from
1410
+ // a marked map to an unmarked map to null transitions. At the same time,
1411
+ // set all the prototype fields of maps back to their original value,
1412
+ // dropping the back pointers temporarily stored in the prototype field.
1413
+ // Setting the prototype field requires following the linked list of
1414
+ // back pointers, reversing them all at once. This allows us to find
1415
+ // those maps with map transitions that need to be nulled, and only
1416
+ // scan the descriptor arrays of those maps, not all maps.
1417
+ // All of these actions are carried out only on maps of JSObjects
1418
+ // and related subtypes.
1419
+ for (HeapObject* obj = map_iterator.next();
1420
+ obj != NULL; obj = map_iterator.next()) {
1421
+ Map* map = reinterpret_cast<Map*>(obj);
1422
+ if (!map->IsMarked() && map->IsByteArray()) continue;
1423
+
1424
+ ASSERT(SafeIsMap(map));
1425
+ // Only JSObject and subtypes have map transitions and back pointers.
1426
+ if (map->instance_type() < FIRST_JS_OBJECT_TYPE) continue;
1427
+ if (map->instance_type() > JS_FUNCTION_TYPE) continue;
1428
+
1429
+ if (map->IsMarked() && map->attached_to_shared_function_info()) {
1430
+ // This map is used for inobject slack tracking and has been detached
1431
+ // from SharedFunctionInfo during the mark phase.
1432
+ // Since it survived the GC, reattach it now.
1433
+ map->unchecked_constructor()->unchecked_shared()->AttachInitialMap(map);
1434
+ }
1435
+
1436
+ // Follow the chain of back pointers to find the prototype.
1437
+ Map* current = map;
1438
+ while (SafeIsMap(current)) {
1439
+ current = reinterpret_cast<Map*>(current->prototype());
1440
+ ASSERT(current->IsHeapObject());
1441
+ }
1442
+ Object* real_prototype = current;
1443
+
1444
+ // Follow back pointers, setting them to prototype,
1445
+ // clearing map transitions when necessary.
1446
+ current = map;
1447
+ bool on_dead_path = !current->IsMarked();
1448
+ Object* next;
1449
+ while (SafeIsMap(current)) {
1450
+ next = current->prototype();
1451
+ // There should never be a dead map above a live map.
1452
+ ASSERT(on_dead_path || current->IsMarked());
1453
+
1454
+ // A live map above a dead map indicates a dead transition.
1455
+ // This test will always be false on the first iteration.
1456
+ if (on_dead_path && current->IsMarked()) {
1457
+ on_dead_path = false;
1458
+ current->ClearNonLiveTransitions(real_prototype);
1459
+ }
1460
+ *HeapObject::RawField(current, Map::kPrototypeOffset) =
1461
+ real_prototype;
1462
+ current = reinterpret_cast<Map*>(next);
1463
+ }
1464
+ }
1465
+ }
1466
+
1467
+ // -------------------------------------------------------------------------
1468
+ // Phase 2: Encode forwarding addresses.
1469
+ // When compacting, forwarding addresses for objects in old space and map
1470
+ // space are encoded in their map pointer word (along with an encoding of
1471
+ // their map pointers).
1472
+ //
1473
+ // The excact encoding is described in the comments for class MapWord in
1474
+ // objects.h.
1475
+ //
1476
+ // An address range [start, end) can have both live and non-live objects.
1477
+ // Maximal non-live regions are marked so they can be skipped on subsequent
1478
+ // sweeps of the heap. A distinguished map-pointer encoding is used to mark
1479
+ // free regions of one-word size (in which case the next word is the start
1480
+ // of a live object). A second distinguished map-pointer encoding is used
1481
+ // to mark free regions larger than one word, and the size of the free
1482
+ // region (including the first word) is written to the second word of the
1483
+ // region.
1484
+ //
1485
+ // Any valid map page offset must lie in the object area of the page, so map
1486
+ // page offsets less than Page::kObjectStartOffset are invalid. We use a
1487
+ // pair of distinguished invalid map encodings (for single word and multiple
1488
+ // words) to indicate free regions in the page found during computation of
1489
+ // forwarding addresses and skipped over in subsequent sweeps.
1490
+
1491
+
1492
+ // Encode a free region, defined by the given start address and size, in the
1493
+ // first word or two of the region.
1494
+ void EncodeFreeRegion(Address free_start, int free_size) {
1495
+ ASSERT(free_size >= kIntSize);
1496
+ if (free_size == kIntSize) {
1497
+ Memory::uint32_at(free_start) = MarkCompactCollector::kSingleFreeEncoding;
1498
+ } else {
1499
+ ASSERT(free_size >= 2 * kIntSize);
1500
+ Memory::uint32_at(free_start) = MarkCompactCollector::kMultiFreeEncoding;
1501
+ Memory::int_at(free_start + kIntSize) = free_size;
1502
+ }
1503
+
1504
+ #ifdef DEBUG
1505
+ // Zap the body of the free region.
1506
+ if (FLAG_enable_slow_asserts) {
1507
+ for (int offset = 2 * kIntSize;
1508
+ offset < free_size;
1509
+ offset += kPointerSize) {
1510
+ Memory::Address_at(free_start + offset) = kZapValue;
1511
+ }
1512
+ }
1513
+ #endif
1514
+ }
1515
+
1516
+
1517
+ // Try to promote all objects in new space. Heap numbers and sequential
1518
+ // strings are promoted to the code space, large objects to large object space,
1519
+ // and all others to the old space.
1520
+ inline MaybeObject* MCAllocateFromNewSpace(HeapObject* object,
1521
+ int object_size) {
1522
+ MaybeObject* forwarded;
1523
+ if (object_size > Heap::MaxObjectSizeInPagedSpace()) {
1524
+ forwarded = Failure::Exception();
1525
+ } else {
1526
+ OldSpace* target_space = Heap::TargetSpace(object);
1527
+ ASSERT(target_space == Heap::old_pointer_space() ||
1528
+ target_space == Heap::old_data_space());
1529
+ forwarded = target_space->MCAllocateRaw(object_size);
1530
+ }
1531
+ Object* result;
1532
+ if (!forwarded->ToObject(&result)) {
1533
+ result = Heap::new_space()->MCAllocateRaw(object_size)->ToObjectUnchecked();
1534
+ }
1535
+ return result;
1536
+ }
1537
+
1538
+
1539
+ // Allocation functions for the paged spaces call the space's MCAllocateRaw.
1540
+ MUST_USE_RESULT inline MaybeObject* MCAllocateFromOldPointerSpace(
1541
+ HeapObject* ignore,
1542
+ int object_size) {
1543
+ return Heap::old_pointer_space()->MCAllocateRaw(object_size);
1544
+ }
1545
+
1546
+
1547
+ MUST_USE_RESULT inline MaybeObject* MCAllocateFromOldDataSpace(
1548
+ HeapObject* ignore,
1549
+ int object_size) {
1550
+ return Heap::old_data_space()->MCAllocateRaw(object_size);
1551
+ }
1552
+
1553
+
1554
+ MUST_USE_RESULT inline MaybeObject* MCAllocateFromCodeSpace(
1555
+ HeapObject* ignore,
1556
+ int object_size) {
1557
+ return Heap::code_space()->MCAllocateRaw(object_size);
1558
+ }
1559
+
1560
+
1561
+ MUST_USE_RESULT inline MaybeObject* MCAllocateFromMapSpace(
1562
+ HeapObject* ignore,
1563
+ int object_size) {
1564
+ return Heap::map_space()->MCAllocateRaw(object_size);
1565
+ }
1566
+
1567
+
1568
+ MUST_USE_RESULT inline MaybeObject* MCAllocateFromCellSpace(HeapObject* ignore,
1569
+ int object_size) {
1570
+ return Heap::cell_space()->MCAllocateRaw(object_size);
1571
+ }
1572
+
1573
+
1574
+ // The forwarding address is encoded at the same offset as the current
1575
+ // to-space object, but in from space.
1576
+ inline void EncodeForwardingAddressInNewSpace(HeapObject* old_object,
1577
+ int object_size,
1578
+ Object* new_object,
1579
+ int* ignored) {
1580
+ int offset =
1581
+ Heap::new_space()->ToSpaceOffsetForAddress(old_object->address());
1582
+ Memory::Address_at(Heap::new_space()->FromSpaceLow() + offset) =
1583
+ HeapObject::cast(new_object)->address();
1584
+ }
1585
+
1586
+
1587
+ // The forwarding address is encoded in the map pointer of the object as an
1588
+ // offset (in terms of live bytes) from the address of the first live object
1589
+ // in the page.
1590
+ inline void EncodeForwardingAddressInPagedSpace(HeapObject* old_object,
1591
+ int object_size,
1592
+ Object* new_object,
1593
+ int* offset) {
1594
+ // Record the forwarding address of the first live object if necessary.
1595
+ if (*offset == 0) {
1596
+ Page::FromAddress(old_object->address())->mc_first_forwarded =
1597
+ HeapObject::cast(new_object)->address();
1598
+ }
1599
+
1600
+ MapWord encoding =
1601
+ MapWord::EncodeAddress(old_object->map()->address(), *offset);
1602
+ old_object->set_map_word(encoding);
1603
+ *offset += object_size;
1604
+ ASSERT(*offset <= Page::kObjectAreaSize);
1605
+ }
1606
+
1607
+
1608
+ // Most non-live objects are ignored.
1609
+ inline void IgnoreNonLiveObject(HeapObject* object) {}
1610
+
1611
+
1612
+ // Function template that, given a range of addresses (eg, a semispace or a
1613
+ // paged space page), iterates through the objects in the range to clear
1614
+ // mark bits and compute and encode forwarding addresses. As a side effect,
1615
+ // maximal free chunks are marked so that they can be skipped on subsequent
1616
+ // sweeps.
1617
+ //
1618
+ // The template parameters are an allocation function, a forwarding address
1619
+ // encoding function, and a function to process non-live objects.
1620
+ template<MarkCompactCollector::AllocationFunction Alloc,
1621
+ MarkCompactCollector::EncodingFunction Encode,
1622
+ MarkCompactCollector::ProcessNonLiveFunction ProcessNonLive>
1623
+ inline void EncodeForwardingAddressesInRange(Address start,
1624
+ Address end,
1625
+ int* offset) {
1626
+ // The start address of the current free region while sweeping the space.
1627
+ // This address is set when a transition from live to non-live objects is
1628
+ // encountered. A value (an encoding of the 'next free region' pointer)
1629
+ // is written to memory at this address when a transition from non-live to
1630
+ // live objects is encountered.
1631
+ Address free_start = NULL;
1632
+
1633
+ // A flag giving the state of the previously swept object. Initially true
1634
+ // to ensure that free_start is initialized to a proper address before
1635
+ // trying to write to it.
1636
+ bool is_prev_alive = true;
1637
+
1638
+ int object_size; // Will be set on each iteration of the loop.
1639
+ for (Address current = start; current < end; current += object_size) {
1640
+ HeapObject* object = HeapObject::FromAddress(current);
1641
+ if (object->IsMarked()) {
1642
+ object->ClearMark();
1643
+ MarkCompactCollector::tracer()->decrement_marked_count();
1644
+ object_size = object->Size();
1645
+
1646
+ // Allocation cannot fail, because we are compacting the space.
1647
+ Object* forwarded = Alloc(object, object_size)->ToObjectUnchecked();
1648
+ Encode(object, object_size, forwarded, offset);
1649
+
1650
+ #ifdef DEBUG
1651
+ if (FLAG_gc_verbose) {
1652
+ PrintF("forward %p -> %p.\n", object->address(),
1653
+ HeapObject::cast(forwarded)->address());
1654
+ }
1655
+ #endif
1656
+ if (!is_prev_alive) { // Transition from non-live to live.
1657
+ EncodeFreeRegion(free_start, static_cast<int>(current - free_start));
1658
+ is_prev_alive = true;
1659
+ }
1660
+ } else { // Non-live object.
1661
+ object_size = object->Size();
1662
+ ProcessNonLive(object);
1663
+ if (is_prev_alive) { // Transition from live to non-live.
1664
+ free_start = current;
1665
+ is_prev_alive = false;
1666
+ }
1667
+ LiveObjectList::ProcessNonLive(object);
1668
+ }
1669
+ }
1670
+
1671
+ // If we ended on a free region, mark it.
1672
+ if (!is_prev_alive) {
1673
+ EncodeFreeRegion(free_start, static_cast<int>(end - free_start));
1674
+ }
1675
+ }
1676
+
1677
+
1678
+ // Functions to encode the forwarding pointers in each compactable space.
1679
+ void MarkCompactCollector::EncodeForwardingAddressesInNewSpace() {
1680
+ int ignored;
1681
+ EncodeForwardingAddressesInRange<MCAllocateFromNewSpace,
1682
+ EncodeForwardingAddressInNewSpace,
1683
+ IgnoreNonLiveObject>(
1684
+ Heap::new_space()->bottom(),
1685
+ Heap::new_space()->top(),
1686
+ &ignored);
1687
+ }
1688
+
1689
+
1690
+ template<MarkCompactCollector::AllocationFunction Alloc,
1691
+ MarkCompactCollector::ProcessNonLiveFunction ProcessNonLive>
1692
+ void MarkCompactCollector::EncodeForwardingAddressesInPagedSpace(
1693
+ PagedSpace* space) {
1694
+ PageIterator it(space, PageIterator::PAGES_IN_USE);
1695
+ while (it.has_next()) {
1696
+ Page* p = it.next();
1697
+
1698
+ // The offset of each live object in the page from the first live object
1699
+ // in the page.
1700
+ int offset = 0;
1701
+ EncodeForwardingAddressesInRange<Alloc,
1702
+ EncodeForwardingAddressInPagedSpace,
1703
+ ProcessNonLive>(
1704
+ p->ObjectAreaStart(),
1705
+ p->AllocationTop(),
1706
+ &offset);
1707
+ }
1708
+ }
1709
+
1710
+
1711
+ // We scavange new space simultaneously with sweeping. This is done in two
1712
+ // passes.
1713
+ // The first pass migrates all alive objects from one semispace to another or
1714
+ // promotes them to old space. Forwading address is written directly into
1715
+ // first word of object without any encoding. If object is dead we are writing
1716
+ // NULL as a forwarding address.
1717
+ // The second pass updates pointers to new space in all spaces. It is possible
1718
+ // to encounter pointers to dead objects during traversal of dirty regions we
1719
+ // should clear them to avoid encountering them during next dirty regions
1720
+ // iteration.
1721
+ static void MigrateObject(Address dst,
1722
+ Address src,
1723
+ int size,
1724
+ bool to_old_space) {
1725
+ if (to_old_space) {
1726
+ Heap::CopyBlockToOldSpaceAndUpdateRegionMarks(dst, src, size);
1727
+ } else {
1728
+ Heap::CopyBlock(dst, src, size);
1729
+ }
1730
+
1731
+ Memory::Address_at(src) = dst;
1732
+ }
1733
+
1734
+
1735
+ class StaticPointersToNewGenUpdatingVisitor : public
1736
+ StaticNewSpaceVisitor<StaticPointersToNewGenUpdatingVisitor> {
1737
+ public:
1738
+ static inline void VisitPointer(Object** p) {
1739
+ if (!(*p)->IsHeapObject()) return;
1740
+
1741
+ HeapObject* obj = HeapObject::cast(*p);
1742
+ Address old_addr = obj->address();
1743
+
1744
+ if (Heap::new_space()->Contains(obj)) {
1745
+ ASSERT(Heap::InFromSpace(*p));
1746
+ *p = HeapObject::FromAddress(Memory::Address_at(old_addr));
1747
+ }
1748
+ }
1749
+ };
1750
+
1751
+
1752
+ // Visitor for updating pointers from live objects in old spaces to new space.
1753
+ // It does not expect to encounter pointers to dead objects.
1754
+ class PointersToNewGenUpdatingVisitor: public ObjectVisitor {
1755
+ public:
1756
+ void VisitPointer(Object** p) {
1757
+ StaticPointersToNewGenUpdatingVisitor::VisitPointer(p);
1758
+ }
1759
+
1760
+ void VisitPointers(Object** start, Object** end) {
1761
+ for (Object** p = start; p < end; p++) {
1762
+ StaticPointersToNewGenUpdatingVisitor::VisitPointer(p);
1763
+ }
1764
+ }
1765
+
1766
+ void VisitCodeTarget(RelocInfo* rinfo) {
1767
+ ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
1768
+ Object* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
1769
+ VisitPointer(&target);
1770
+ rinfo->set_target_address(Code::cast(target)->instruction_start());
1771
+ }
1772
+
1773
+ void VisitDebugTarget(RelocInfo* rinfo) {
1774
+ ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) &&
1775
+ rinfo->IsPatchedReturnSequence()) ||
1776
+ (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
1777
+ rinfo->IsPatchedDebugBreakSlotSequence()));
1778
+ Object* target = Code::GetCodeFromTargetAddress(rinfo->call_address());
1779
+ VisitPointer(&target);
1780
+ rinfo->set_call_address(Code::cast(target)->instruction_start());
1781
+ }
1782
+ };
1783
+
1784
+
1785
+ // Visitor for updating pointers from live objects in old spaces to new space.
1786
+ // It can encounter pointers to dead objects in new space when traversing map
1787
+ // space (see comment for MigrateObject).
1788
+ static void UpdatePointerToNewGen(HeapObject** p) {
1789
+ if (!(*p)->IsHeapObject()) return;
1790
+
1791
+ Address old_addr = (*p)->address();
1792
+ ASSERT(Heap::InFromSpace(*p));
1793
+
1794
+ Address new_addr = Memory::Address_at(old_addr);
1795
+
1796
+ if (new_addr == NULL) {
1797
+ // We encountered pointer to a dead object. Clear it so we will
1798
+ // not visit it again during next iteration of dirty regions.
1799
+ *p = NULL;
1800
+ } else {
1801
+ *p = HeapObject::FromAddress(new_addr);
1802
+ }
1803
+ }
1804
+
1805
+
1806
+ static String* UpdateNewSpaceReferenceInExternalStringTableEntry(Object **p) {
1807
+ Address old_addr = HeapObject::cast(*p)->address();
1808
+ Address new_addr = Memory::Address_at(old_addr);
1809
+ return String::cast(HeapObject::FromAddress(new_addr));
1810
+ }
1811
+
1812
+
1813
+ static bool TryPromoteObject(HeapObject* object, int object_size) {
1814
+ Object* result;
1815
+
1816
+ if (object_size > Heap::MaxObjectSizeInPagedSpace()) {
1817
+ MaybeObject* maybe_result =
1818
+ Heap::lo_space()->AllocateRawFixedArray(object_size);
1819
+ if (maybe_result->ToObject(&result)) {
1820
+ HeapObject* target = HeapObject::cast(result);
1821
+ MigrateObject(target->address(), object->address(), object_size, true);
1822
+ MarkCompactCollector::tracer()->
1823
+ increment_promoted_objects_size(object_size);
1824
+ return true;
1825
+ }
1826
+ } else {
1827
+ OldSpace* target_space = Heap::TargetSpace(object);
1828
+
1829
+ ASSERT(target_space == Heap::old_pointer_space() ||
1830
+ target_space == Heap::old_data_space());
1831
+ MaybeObject* maybe_result = target_space->AllocateRaw(object_size);
1832
+ if (maybe_result->ToObject(&result)) {
1833
+ HeapObject* target = HeapObject::cast(result);
1834
+ MigrateObject(target->address(),
1835
+ object->address(),
1836
+ object_size,
1837
+ target_space == Heap::old_pointer_space());
1838
+ MarkCompactCollector::tracer()->
1839
+ increment_promoted_objects_size(object_size);
1840
+ return true;
1841
+ }
1842
+ }
1843
+
1844
+ return false;
1845
+ }
1846
+
1847
+
1848
+ static void SweepNewSpace(NewSpace* space) {
1849
+ Heap::CheckNewSpaceExpansionCriteria();
1850
+
1851
+ Address from_bottom = space->bottom();
1852
+ Address from_top = space->top();
1853
+
1854
+ // Flip the semispaces. After flipping, to space is empty, from space has
1855
+ // live objects.
1856
+ space->Flip();
1857
+ space->ResetAllocationInfo();
1858
+
1859
+ int size = 0;
1860
+ int survivors_size = 0;
1861
+
1862
+ // First pass: traverse all objects in inactive semispace, remove marks,
1863
+ // migrate live objects and write forwarding addresses.
1864
+ for (Address current = from_bottom; current < from_top; current += size) {
1865
+ HeapObject* object = HeapObject::FromAddress(current);
1866
+
1867
+ if (object->IsMarked()) {
1868
+ object->ClearMark();
1869
+ MarkCompactCollector::tracer()->decrement_marked_count();
1870
+
1871
+ size = object->Size();
1872
+ survivors_size += size;
1873
+
1874
+ // Aggressively promote young survivors to the old space.
1875
+ if (TryPromoteObject(object, size)) {
1876
+ continue;
1877
+ }
1878
+
1879
+ // Promotion failed. Just migrate object to another semispace.
1880
+ // Allocation cannot fail at this point: semispaces are of equal size.
1881
+ Object* target = space->AllocateRaw(size)->ToObjectUnchecked();
1882
+
1883
+ MigrateObject(HeapObject::cast(target)->address(),
1884
+ current,
1885
+ size,
1886
+ false);
1887
+ } else {
1888
+ // Process the dead object before we write a NULL into its header.
1889
+ LiveObjectList::ProcessNonLive(object);
1890
+
1891
+ size = object->Size();
1892
+ Memory::Address_at(current) = NULL;
1893
+ }
1894
+ }
1895
+
1896
+ // Second pass: find pointers to new space and update them.
1897
+ PointersToNewGenUpdatingVisitor updating_visitor;
1898
+
1899
+ // Update pointers in to space.
1900
+ Address current = space->bottom();
1901
+ while (current < space->top()) {
1902
+ HeapObject* object = HeapObject::FromAddress(current);
1903
+ current +=
1904
+ StaticPointersToNewGenUpdatingVisitor::IterateBody(object->map(),
1905
+ object);
1906
+ }
1907
+
1908
+ // Update roots.
1909
+ Heap::IterateRoots(&updating_visitor, VISIT_ALL_IN_SCAVENGE);
1910
+ LiveObjectList::IterateElements(&updating_visitor);
1911
+
1912
+ // Update pointers in old spaces.
1913
+ Heap::IterateDirtyRegions(Heap::old_pointer_space(),
1914
+ &Heap::IteratePointersInDirtyRegion,
1915
+ &UpdatePointerToNewGen,
1916
+ Heap::WATERMARK_SHOULD_BE_VALID);
1917
+
1918
+ Heap::lo_space()->IterateDirtyRegions(&UpdatePointerToNewGen);
1919
+
1920
+ // Update pointers from cells.
1921
+ HeapObjectIterator cell_iterator(Heap::cell_space());
1922
+ for (HeapObject* cell = cell_iterator.next();
1923
+ cell != NULL;
1924
+ cell = cell_iterator.next()) {
1925
+ if (cell->IsJSGlobalPropertyCell()) {
1926
+ Address value_address =
1927
+ reinterpret_cast<Address>(cell) +
1928
+ (JSGlobalPropertyCell::kValueOffset - kHeapObjectTag);
1929
+ updating_visitor.VisitPointer(reinterpret_cast<Object**>(value_address));
1930
+ }
1931
+ }
1932
+
1933
+ // Update pointer from the global contexts list.
1934
+ updating_visitor.VisitPointer(Heap::global_contexts_list_address());
1935
+
1936
+ // Update pointers from external string table.
1937
+ Heap::UpdateNewSpaceReferencesInExternalStringTable(
1938
+ &UpdateNewSpaceReferenceInExternalStringTableEntry);
1939
+
1940
+ // All pointers were updated. Update auxiliary allocation info.
1941
+ Heap::IncrementYoungSurvivorsCounter(survivors_size);
1942
+ space->set_age_mark(space->top());
1943
+
1944
+ // Update JSFunction pointers from the runtime profiler.
1945
+ RuntimeProfiler::UpdateSamplesAfterScavenge();
1946
+ }
1947
+
1948
+
1949
+ static void SweepSpace(PagedSpace* space) {
1950
+ PageIterator it(space, PageIterator::PAGES_IN_USE);
1951
+
1952
+ // During sweeping of paged space we are trying to find longest sequences
1953
+ // of pages without live objects and free them (instead of putting them on
1954
+ // the free list).
1955
+
1956
+ // Page preceding current.
1957
+ Page* prev = Page::FromAddress(NULL);
1958
+
1959
+ // First empty page in a sequence.
1960
+ Page* first_empty_page = Page::FromAddress(NULL);
1961
+
1962
+ // Page preceding first empty page.
1963
+ Page* prec_first_empty_page = Page::FromAddress(NULL);
1964
+
1965
+ // If last used page of space ends with a sequence of dead objects
1966
+ // we can adjust allocation top instead of puting this free area into
1967
+ // the free list. Thus during sweeping we keep track of such areas
1968
+ // and defer their deallocation until the sweeping of the next page
1969
+ // is done: if one of the next pages contains live objects we have
1970
+ // to put such area into the free list.
1971
+ Address last_free_start = NULL;
1972
+ int last_free_size = 0;
1973
+
1974
+ while (it.has_next()) {
1975
+ Page* p = it.next();
1976
+
1977
+ bool is_previous_alive = true;
1978
+ Address free_start = NULL;
1979
+ HeapObject* object;
1980
+
1981
+ for (Address current = p->ObjectAreaStart();
1982
+ current < p->AllocationTop();
1983
+ current += object->Size()) {
1984
+ object = HeapObject::FromAddress(current);
1985
+ if (object->IsMarked()) {
1986
+ object->ClearMark();
1987
+ MarkCompactCollector::tracer()->decrement_marked_count();
1988
+
1989
+ if (!is_previous_alive) { // Transition from free to live.
1990
+ space->DeallocateBlock(free_start,
1991
+ static_cast<int>(current - free_start),
1992
+ true);
1993
+ is_previous_alive = true;
1994
+ }
1995
+ } else {
1996
+ MarkCompactCollector::ReportDeleteIfNeeded(object);
1997
+ if (is_previous_alive) { // Transition from live to free.
1998
+ free_start = current;
1999
+ is_previous_alive = false;
2000
+ }
2001
+ LiveObjectList::ProcessNonLive(object);
2002
+ }
2003
+ // The object is now unmarked for the call to Size() at the top of the
2004
+ // loop.
2005
+ }
2006
+
2007
+ bool page_is_empty = (p->ObjectAreaStart() == p->AllocationTop())
2008
+ || (!is_previous_alive && free_start == p->ObjectAreaStart());
2009
+
2010
+ if (page_is_empty) {
2011
+ // This page is empty. Check whether we are in the middle of
2012
+ // sequence of empty pages and start one if not.
2013
+ if (!first_empty_page->is_valid()) {
2014
+ first_empty_page = p;
2015
+ prec_first_empty_page = prev;
2016
+ }
2017
+
2018
+ if (!is_previous_alive) {
2019
+ // There are dead objects on this page. Update space accounting stats
2020
+ // without putting anything into free list.
2021
+ int size_in_bytes = static_cast<int>(p->AllocationTop() - free_start);
2022
+ if (size_in_bytes > 0) {
2023
+ space->DeallocateBlock(free_start, size_in_bytes, false);
2024
+ }
2025
+ }
2026
+ } else {
2027
+ // This page is not empty. Sequence of empty pages ended on the previous
2028
+ // one.
2029
+ if (first_empty_page->is_valid()) {
2030
+ space->FreePages(prec_first_empty_page, prev);
2031
+ prec_first_empty_page = first_empty_page = Page::FromAddress(NULL);
2032
+ }
2033
+
2034
+ // If there is a free ending area on one of the previous pages we have
2035
+ // deallocate that area and put it on the free list.
2036
+ if (last_free_size > 0) {
2037
+ Page::FromAddress(last_free_start)->
2038
+ SetAllocationWatermark(last_free_start);
2039
+ space->DeallocateBlock(last_free_start, last_free_size, true);
2040
+ last_free_start = NULL;
2041
+ last_free_size = 0;
2042
+ }
2043
+
2044
+ // If the last region of this page was not live we remember it.
2045
+ if (!is_previous_alive) {
2046
+ ASSERT(last_free_size == 0);
2047
+ last_free_size = static_cast<int>(p->AllocationTop() - free_start);
2048
+ last_free_start = free_start;
2049
+ }
2050
+ }
2051
+
2052
+ prev = p;
2053
+ }
2054
+
2055
+ // We reached end of space. See if we need to adjust allocation top.
2056
+ Address new_allocation_top = NULL;
2057
+
2058
+ if (first_empty_page->is_valid()) {
2059
+ // Last used pages in space are empty. We can move allocation top backwards
2060
+ // to the beginning of first empty page.
2061
+ ASSERT(prev == space->AllocationTopPage());
2062
+
2063
+ new_allocation_top = first_empty_page->ObjectAreaStart();
2064
+ }
2065
+
2066
+ if (last_free_size > 0) {
2067
+ // There was a free ending area on the previous page.
2068
+ // Deallocate it without putting it into freelist and move allocation
2069
+ // top to the beginning of this free area.
2070
+ space->DeallocateBlock(last_free_start, last_free_size, false);
2071
+ new_allocation_top = last_free_start;
2072
+ }
2073
+
2074
+ if (new_allocation_top != NULL) {
2075
+ #ifdef DEBUG
2076
+ Page* new_allocation_top_page = Page::FromAllocationTop(new_allocation_top);
2077
+ if (!first_empty_page->is_valid()) {
2078
+ ASSERT(new_allocation_top_page == space->AllocationTopPage());
2079
+ } else if (last_free_size > 0) {
2080
+ ASSERT(new_allocation_top_page == prec_first_empty_page);
2081
+ } else {
2082
+ ASSERT(new_allocation_top_page == first_empty_page);
2083
+ }
2084
+ #endif
2085
+
2086
+ space->SetTop(new_allocation_top);
2087
+ }
2088
+ }
2089
+
2090
+
2091
+ void MarkCompactCollector::EncodeForwardingAddresses() {
2092
+ ASSERT(state_ == ENCODE_FORWARDING_ADDRESSES);
2093
+ // Objects in the active semispace of the young generation may be
2094
+ // relocated to the inactive semispace (if not promoted). Set the
2095
+ // relocation info to the beginning of the inactive semispace.
2096
+ Heap::new_space()->MCResetRelocationInfo();
2097
+
2098
+ // Compute the forwarding pointers in each space.
2099
+ EncodeForwardingAddressesInPagedSpace<MCAllocateFromOldPointerSpace,
2100
+ ReportDeleteIfNeeded>(
2101
+ Heap::old_pointer_space());
2102
+
2103
+ EncodeForwardingAddressesInPagedSpace<MCAllocateFromOldDataSpace,
2104
+ IgnoreNonLiveObject>(
2105
+ Heap::old_data_space());
2106
+
2107
+ EncodeForwardingAddressesInPagedSpace<MCAllocateFromCodeSpace,
2108
+ ReportDeleteIfNeeded>(
2109
+ Heap::code_space());
2110
+
2111
+ EncodeForwardingAddressesInPagedSpace<MCAllocateFromCellSpace,
2112
+ IgnoreNonLiveObject>(
2113
+ Heap::cell_space());
2114
+
2115
+
2116
+ // Compute new space next to last after the old and code spaces have been
2117
+ // compacted. Objects in new space can be promoted to old or code space.
2118
+ EncodeForwardingAddressesInNewSpace();
2119
+
2120
+ // Compute map space last because computing forwarding addresses
2121
+ // overwrites non-live objects. Objects in the other spaces rely on
2122
+ // non-live map pointers to get the sizes of non-live objects.
2123
+ EncodeForwardingAddressesInPagedSpace<MCAllocateFromMapSpace,
2124
+ IgnoreNonLiveObject>(
2125
+ Heap::map_space());
2126
+
2127
+ // Write relocation info to the top page, so we can use it later. This is
2128
+ // done after promoting objects from the new space so we get the correct
2129
+ // allocation top.
2130
+ Heap::old_pointer_space()->MCWriteRelocationInfoToPage();
2131
+ Heap::old_data_space()->MCWriteRelocationInfoToPage();
2132
+ Heap::code_space()->MCWriteRelocationInfoToPage();
2133
+ Heap::map_space()->MCWriteRelocationInfoToPage();
2134
+ Heap::cell_space()->MCWriteRelocationInfoToPage();
2135
+ }
2136
+
2137
+
2138
+ class MapIterator : public HeapObjectIterator {
2139
+ public:
2140
+ MapIterator() : HeapObjectIterator(Heap::map_space(), &SizeCallback) { }
2141
+
2142
+ explicit MapIterator(Address start)
2143
+ : HeapObjectIterator(Heap::map_space(), start, &SizeCallback) { }
2144
+
2145
+ private:
2146
+ static int SizeCallback(HeapObject* unused) {
2147
+ USE(unused);
2148
+ return Map::kSize;
2149
+ }
2150
+ };
2151
+
2152
+
2153
+ class MapCompact {
2154
+ public:
2155
+ explicit MapCompact(int live_maps)
2156
+ : live_maps_(live_maps),
2157
+ to_evacuate_start_(Heap::map_space()->TopAfterCompaction(live_maps)),
2158
+ map_to_evacuate_it_(to_evacuate_start_),
2159
+ first_map_to_evacuate_(
2160
+ reinterpret_cast<Map*>(HeapObject::FromAddress(to_evacuate_start_))) {
2161
+ }
2162
+
2163
+ void CompactMaps() {
2164
+ // As we know the number of maps to evacuate beforehand,
2165
+ // we stop then there is no more vacant maps.
2166
+ for (Map* next_vacant_map = NextVacantMap();
2167
+ next_vacant_map;
2168
+ next_vacant_map = NextVacantMap()) {
2169
+ EvacuateMap(next_vacant_map, NextMapToEvacuate());
2170
+ }
2171
+
2172
+ #ifdef DEBUG
2173
+ CheckNoMapsToEvacuate();
2174
+ #endif
2175
+ }
2176
+
2177
+ void UpdateMapPointersInRoots() {
2178
+ Heap::IterateRoots(&map_updating_visitor_, VISIT_ONLY_STRONG);
2179
+ GlobalHandles::IterateWeakRoots(&map_updating_visitor_);
2180
+ LiveObjectList::IterateElements(&map_updating_visitor_);
2181
+ }
2182
+
2183
+ void UpdateMapPointersInPagedSpace(PagedSpace* space) {
2184
+ ASSERT(space != Heap::map_space());
2185
+
2186
+ PageIterator it(space, PageIterator::PAGES_IN_USE);
2187
+ while (it.has_next()) {
2188
+ Page* p = it.next();
2189
+ UpdateMapPointersInRange(p->ObjectAreaStart(), p->AllocationTop());
2190
+ }
2191
+ }
2192
+
2193
+ void UpdateMapPointersInNewSpace() {
2194
+ NewSpace* space = Heap::new_space();
2195
+ UpdateMapPointersInRange(space->bottom(), space->top());
2196
+ }
2197
+
2198
+ void UpdateMapPointersInLargeObjectSpace() {
2199
+ LargeObjectIterator it(Heap::lo_space());
2200
+ for (HeapObject* obj = it.next(); obj != NULL; obj = it.next())
2201
+ UpdateMapPointersInObject(obj);
2202
+ }
2203
+
2204
+ void Finish() {
2205
+ Heap::map_space()->FinishCompaction(to_evacuate_start_, live_maps_);
2206
+ }
2207
+
2208
+ private:
2209
+ int live_maps_;
2210
+ Address to_evacuate_start_;
2211
+ MapIterator vacant_map_it_;
2212
+ MapIterator map_to_evacuate_it_;
2213
+ Map* first_map_to_evacuate_;
2214
+
2215
+ // Helper class for updating map pointers in HeapObjects.
2216
+ class MapUpdatingVisitor: public ObjectVisitor {
2217
+ public:
2218
+ void VisitPointer(Object** p) {
2219
+ UpdateMapPointer(p);
2220
+ }
2221
+
2222
+ void VisitPointers(Object** start, Object** end) {
2223
+ for (Object** p = start; p < end; p++) UpdateMapPointer(p);
2224
+ }
2225
+
2226
+ private:
2227
+ void UpdateMapPointer(Object** p) {
2228
+ if (!(*p)->IsHeapObject()) return;
2229
+ HeapObject* old_map = reinterpret_cast<HeapObject*>(*p);
2230
+
2231
+ // Moved maps are tagged with overflowed map word. They are the only
2232
+ // objects those map word is overflowed as marking is already complete.
2233
+ MapWord map_word = old_map->map_word();
2234
+ if (!map_word.IsOverflowed()) return;
2235
+
2236
+ *p = GetForwardedMap(map_word);
2237
+ }
2238
+ };
2239
+
2240
+ static MapUpdatingVisitor map_updating_visitor_;
2241
+
2242
+ static Map* NextMap(MapIterator* it, HeapObject* last, bool live) {
2243
+ while (true) {
2244
+ HeapObject* next = it->next();
2245
+ ASSERT(next != NULL);
2246
+ if (next == last)
2247
+ return NULL;
2248
+ ASSERT(!next->IsOverflowed());
2249
+ ASSERT(!next->IsMarked());
2250
+ ASSERT(next->IsMap() || FreeListNode::IsFreeListNode(next));
2251
+ if (next->IsMap() == live)
2252
+ return reinterpret_cast<Map*>(next);
2253
+ }
2254
+ }
2255
+
2256
+ Map* NextVacantMap() {
2257
+ Map* map = NextMap(&vacant_map_it_, first_map_to_evacuate_, false);
2258
+ ASSERT(map == NULL || FreeListNode::IsFreeListNode(map));
2259
+ return map;
2260
+ }
2261
+
2262
+ Map* NextMapToEvacuate() {
2263
+ Map* map = NextMap(&map_to_evacuate_it_, NULL, true);
2264
+ ASSERT(map != NULL);
2265
+ ASSERT(map->IsMap());
2266
+ return map;
2267
+ }
2268
+
2269
+ static void EvacuateMap(Map* vacant_map, Map* map_to_evacuate) {
2270
+ ASSERT(FreeListNode::IsFreeListNode(vacant_map));
2271
+ ASSERT(map_to_evacuate->IsMap());
2272
+
2273
+ ASSERT(Map::kSize % 4 == 0);
2274
+
2275
+ Heap::CopyBlockToOldSpaceAndUpdateRegionMarks(vacant_map->address(),
2276
+ map_to_evacuate->address(),
2277
+ Map::kSize);
2278
+
2279
+ ASSERT(vacant_map->IsMap()); // Due to memcpy above.
2280
+
2281
+ MapWord forwarding_map_word = MapWord::FromMap(vacant_map);
2282
+ forwarding_map_word.SetOverflow();
2283
+ map_to_evacuate->set_map_word(forwarding_map_word);
2284
+
2285
+ ASSERT(map_to_evacuate->map_word().IsOverflowed());
2286
+ ASSERT(GetForwardedMap(map_to_evacuate->map_word()) == vacant_map);
2287
+ }
2288
+
2289
+ static Map* GetForwardedMap(MapWord map_word) {
2290
+ ASSERT(map_word.IsOverflowed());
2291
+ map_word.ClearOverflow();
2292
+ Map* new_map = map_word.ToMap();
2293
+ ASSERT_MAP_ALIGNED(new_map->address());
2294
+ return new_map;
2295
+ }
2296
+
2297
+ static int UpdateMapPointersInObject(HeapObject* obj) {
2298
+ ASSERT(!obj->IsMarked());
2299
+ Map* map = obj->map();
2300
+ ASSERT(Heap::map_space()->Contains(map));
2301
+ MapWord map_word = map->map_word();
2302
+ ASSERT(!map_word.IsMarked());
2303
+ if (map_word.IsOverflowed()) {
2304
+ Map* new_map = GetForwardedMap(map_word);
2305
+ ASSERT(Heap::map_space()->Contains(new_map));
2306
+ obj->set_map(new_map);
2307
+
2308
+ #ifdef DEBUG
2309
+ if (FLAG_gc_verbose) {
2310
+ PrintF("update %p : %p -> %p\n",
2311
+ obj->address(),
2312
+ reinterpret_cast<void*>(map),
2313
+ reinterpret_cast<void*>(new_map));
2314
+ }
2315
+ #endif
2316
+ }
2317
+
2318
+ int size = obj->SizeFromMap(map);
2319
+ obj->IterateBody(map->instance_type(), size, &map_updating_visitor_);
2320
+ return size;
2321
+ }
2322
+
2323
+ static void UpdateMapPointersInRange(Address start, Address end) {
2324
+ HeapObject* object;
2325
+ int size;
2326
+ for (Address current = start; current < end; current += size) {
2327
+ object = HeapObject::FromAddress(current);
2328
+ size = UpdateMapPointersInObject(object);
2329
+ ASSERT(size > 0);
2330
+ }
2331
+ }
2332
+
2333
+ #ifdef DEBUG
2334
+ void CheckNoMapsToEvacuate() {
2335
+ if (!FLAG_enable_slow_asserts)
2336
+ return;
2337
+
2338
+ for (HeapObject* obj = map_to_evacuate_it_.next();
2339
+ obj != NULL; obj = map_to_evacuate_it_.next())
2340
+ ASSERT(FreeListNode::IsFreeListNode(obj));
2341
+ }
2342
+ #endif
2343
+ };
2344
+
2345
+ MapCompact::MapUpdatingVisitor MapCompact::map_updating_visitor_;
2346
+
2347
+
2348
+ void MarkCompactCollector::SweepSpaces() {
2349
+ GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP);
2350
+
2351
+ ASSERT(state_ == SWEEP_SPACES);
2352
+ ASSERT(!IsCompacting());
2353
+ // Noncompacting collections simply sweep the spaces to clear the mark
2354
+ // bits and free the nonlive blocks (for old and map spaces). We sweep
2355
+ // the map space last because freeing non-live maps overwrites them and
2356
+ // the other spaces rely on possibly non-live maps to get the sizes for
2357
+ // non-live objects.
2358
+ SweepSpace(Heap::old_pointer_space());
2359
+ SweepSpace(Heap::old_data_space());
2360
+ SweepSpace(Heap::code_space());
2361
+ SweepSpace(Heap::cell_space());
2362
+ { GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP_NEWSPACE);
2363
+ SweepNewSpace(Heap::new_space());
2364
+ }
2365
+ SweepSpace(Heap::map_space());
2366
+
2367
+ Heap::IterateDirtyRegions(Heap::map_space(),
2368
+ &Heap::IteratePointersInDirtyMapsRegion,
2369
+ &UpdatePointerToNewGen,
2370
+ Heap::WATERMARK_SHOULD_BE_VALID);
2371
+
2372
+ intptr_t live_maps_size = Heap::map_space()->Size();
2373
+ int live_maps = static_cast<int>(live_maps_size / Map::kSize);
2374
+ ASSERT(live_map_objects_size_ == live_maps_size);
2375
+
2376
+ if (Heap::map_space()->NeedsCompaction(live_maps)) {
2377
+ MapCompact map_compact(live_maps);
2378
+
2379
+ map_compact.CompactMaps();
2380
+ map_compact.UpdateMapPointersInRoots();
2381
+
2382
+ PagedSpaces spaces;
2383
+ for (PagedSpace* space = spaces.next();
2384
+ space != NULL; space = spaces.next()) {
2385
+ if (space == Heap::map_space()) continue;
2386
+ map_compact.UpdateMapPointersInPagedSpace(space);
2387
+ }
2388
+ map_compact.UpdateMapPointersInNewSpace();
2389
+ map_compact.UpdateMapPointersInLargeObjectSpace();
2390
+
2391
+ map_compact.Finish();
2392
+ }
2393
+ }
2394
+
2395
+
2396
+ // Iterate the live objects in a range of addresses (eg, a page or a
2397
+ // semispace). The live regions of the range have been linked into a list.
2398
+ // The first live region is [first_live_start, first_live_end), and the last
2399
+ // address in the range is top. The callback function is used to get the
2400
+ // size of each live object.
2401
+ int MarkCompactCollector::IterateLiveObjectsInRange(
2402
+ Address start,
2403
+ Address end,
2404
+ HeapObjectCallback size_func) {
2405
+ int live_objects_size = 0;
2406
+ Address current = start;
2407
+ while (current < end) {
2408
+ uint32_t encoded_map = Memory::uint32_at(current);
2409
+ if (encoded_map == kSingleFreeEncoding) {
2410
+ current += kPointerSize;
2411
+ } else if (encoded_map == kMultiFreeEncoding) {
2412
+ current += Memory::int_at(current + kIntSize);
2413
+ } else {
2414
+ int size = size_func(HeapObject::FromAddress(current));
2415
+ current += size;
2416
+ live_objects_size += size;
2417
+ }
2418
+ }
2419
+ return live_objects_size;
2420
+ }
2421
+
2422
+
2423
+ int MarkCompactCollector::IterateLiveObjects(NewSpace* space,
2424
+ HeapObjectCallback size_f) {
2425
+ ASSERT(MARK_LIVE_OBJECTS < state_ && state_ <= RELOCATE_OBJECTS);
2426
+ return IterateLiveObjectsInRange(space->bottom(), space->top(), size_f);
2427
+ }
2428
+
2429
+
2430
+ int MarkCompactCollector::IterateLiveObjects(PagedSpace* space,
2431
+ HeapObjectCallback size_f) {
2432
+ ASSERT(MARK_LIVE_OBJECTS < state_ && state_ <= RELOCATE_OBJECTS);
2433
+ int total = 0;
2434
+ PageIterator it(space, PageIterator::PAGES_IN_USE);
2435
+ while (it.has_next()) {
2436
+ Page* p = it.next();
2437
+ total += IterateLiveObjectsInRange(p->ObjectAreaStart(),
2438
+ p->AllocationTop(),
2439
+ size_f);
2440
+ }
2441
+ return total;
2442
+ }
2443
+
2444
+
2445
+ // -------------------------------------------------------------------------
2446
+ // Phase 3: Update pointers
2447
+
2448
+ // Helper class for updating pointers in HeapObjects.
2449
+ class UpdatingVisitor: public ObjectVisitor {
2450
+ public:
2451
+ void VisitPointer(Object** p) {
2452
+ UpdatePointer(p);
2453
+ }
2454
+
2455
+ void VisitPointers(Object** start, Object** end) {
2456
+ // Mark all HeapObject pointers in [start, end)
2457
+ for (Object** p = start; p < end; p++) UpdatePointer(p);
2458
+ }
2459
+
2460
+ void VisitCodeTarget(RelocInfo* rinfo) {
2461
+ ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
2462
+ Object* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
2463
+ VisitPointer(&target);
2464
+ rinfo->set_target_address(
2465
+ reinterpret_cast<Code*>(target)->instruction_start());
2466
+ }
2467
+
2468
+ void VisitDebugTarget(RelocInfo* rinfo) {
2469
+ ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) &&
2470
+ rinfo->IsPatchedReturnSequence()) ||
2471
+ (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
2472
+ rinfo->IsPatchedDebugBreakSlotSequence()));
2473
+ Object* target = Code::GetCodeFromTargetAddress(rinfo->call_address());
2474
+ VisitPointer(&target);
2475
+ rinfo->set_call_address(
2476
+ reinterpret_cast<Code*>(target)->instruction_start());
2477
+ }
2478
+
2479
+ private:
2480
+ void UpdatePointer(Object** p) {
2481
+ if (!(*p)->IsHeapObject()) return;
2482
+
2483
+ HeapObject* obj = HeapObject::cast(*p);
2484
+ Address old_addr = obj->address();
2485
+ Address new_addr;
2486
+ ASSERT(!Heap::InFromSpace(obj));
2487
+
2488
+ if (Heap::new_space()->Contains(obj)) {
2489
+ Address forwarding_pointer_addr =
2490
+ Heap::new_space()->FromSpaceLow() +
2491
+ Heap::new_space()->ToSpaceOffsetForAddress(old_addr);
2492
+ new_addr = Memory::Address_at(forwarding_pointer_addr);
2493
+
2494
+ #ifdef DEBUG
2495
+ ASSERT(Heap::old_pointer_space()->Contains(new_addr) ||
2496
+ Heap::old_data_space()->Contains(new_addr) ||
2497
+ Heap::new_space()->FromSpaceContains(new_addr) ||
2498
+ Heap::lo_space()->Contains(HeapObject::FromAddress(new_addr)));
2499
+
2500
+ if (Heap::new_space()->FromSpaceContains(new_addr)) {
2501
+ ASSERT(Heap::new_space()->FromSpaceOffsetForAddress(new_addr) <=
2502
+ Heap::new_space()->ToSpaceOffsetForAddress(old_addr));
2503
+ }
2504
+ #endif
2505
+
2506
+ } else if (Heap::lo_space()->Contains(obj)) {
2507
+ // Don't move objects in the large object space.
2508
+ return;
2509
+
2510
+ } else {
2511
+ #ifdef DEBUG
2512
+ PagedSpaces spaces;
2513
+ PagedSpace* original_space = spaces.next();
2514
+ while (original_space != NULL) {
2515
+ if (original_space->Contains(obj)) break;
2516
+ original_space = spaces.next();
2517
+ }
2518
+ ASSERT(original_space != NULL);
2519
+ #endif
2520
+ new_addr = MarkCompactCollector::GetForwardingAddressInOldSpace(obj);
2521
+ ASSERT(original_space->Contains(new_addr));
2522
+ ASSERT(original_space->MCSpaceOffsetForAddress(new_addr) <=
2523
+ original_space->MCSpaceOffsetForAddress(old_addr));
2524
+ }
2525
+
2526
+ *p = HeapObject::FromAddress(new_addr);
2527
+
2528
+ #ifdef DEBUG
2529
+ if (FLAG_gc_verbose) {
2530
+ PrintF("update %p : %p -> %p\n",
2531
+ reinterpret_cast<Address>(p), old_addr, new_addr);
2532
+ }
2533
+ #endif
2534
+ }
2535
+ };
2536
+
2537
+
2538
+ void MarkCompactCollector::UpdatePointers() {
2539
+ #ifdef DEBUG
2540
+ ASSERT(state_ == ENCODE_FORWARDING_ADDRESSES);
2541
+ state_ = UPDATE_POINTERS;
2542
+ #endif
2543
+ UpdatingVisitor updating_visitor;
2544
+ RuntimeProfiler::UpdateSamplesAfterCompact(&updating_visitor);
2545
+ Heap::IterateRoots(&updating_visitor, VISIT_ONLY_STRONG);
2546
+ GlobalHandles::IterateWeakRoots(&updating_visitor);
2547
+
2548
+ // Update the pointer to the head of the weak list of global contexts.
2549
+ updating_visitor.VisitPointer(&Heap::global_contexts_list_);
2550
+
2551
+ LiveObjectList::IterateElements(&updating_visitor);
2552
+
2553
+ int live_maps_size = IterateLiveObjects(Heap::map_space(),
2554
+ &UpdatePointersInOldObject);
2555
+ int live_pointer_olds_size = IterateLiveObjects(Heap::old_pointer_space(),
2556
+ &UpdatePointersInOldObject);
2557
+ int live_data_olds_size = IterateLiveObjects(Heap::old_data_space(),
2558
+ &UpdatePointersInOldObject);
2559
+ int live_codes_size = IterateLiveObjects(Heap::code_space(),
2560
+ &UpdatePointersInOldObject);
2561
+ int live_cells_size = IterateLiveObjects(Heap::cell_space(),
2562
+ &UpdatePointersInOldObject);
2563
+ int live_news_size = IterateLiveObjects(Heap::new_space(),
2564
+ &UpdatePointersInNewObject);
2565
+
2566
+ // Large objects do not move, the map word can be updated directly.
2567
+ LargeObjectIterator it(Heap::lo_space());
2568
+ for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) {
2569
+ UpdatePointersInNewObject(obj);
2570
+ }
2571
+
2572
+ USE(live_maps_size);
2573
+ USE(live_pointer_olds_size);
2574
+ USE(live_data_olds_size);
2575
+ USE(live_codes_size);
2576
+ USE(live_cells_size);
2577
+ USE(live_news_size);
2578
+ ASSERT(live_maps_size == live_map_objects_size_);
2579
+ ASSERT(live_data_olds_size == live_old_data_objects_size_);
2580
+ ASSERT(live_pointer_olds_size == live_old_pointer_objects_size_);
2581
+ ASSERT(live_codes_size == live_code_objects_size_);
2582
+ ASSERT(live_cells_size == live_cell_objects_size_);
2583
+ ASSERT(live_news_size == live_young_objects_size_);
2584
+ }
2585
+
2586
+
2587
+ int MarkCompactCollector::UpdatePointersInNewObject(HeapObject* obj) {
2588
+ // Keep old map pointers
2589
+ Map* old_map = obj->map();
2590
+ ASSERT(old_map->IsHeapObject());
2591
+
2592
+ Address forwarded = GetForwardingAddressInOldSpace(old_map);
2593
+
2594
+ ASSERT(Heap::map_space()->Contains(old_map));
2595
+ ASSERT(Heap::map_space()->Contains(forwarded));
2596
+ #ifdef DEBUG
2597
+ if (FLAG_gc_verbose) {
2598
+ PrintF("update %p : %p -> %p\n", obj->address(), old_map->address(),
2599
+ forwarded);
2600
+ }
2601
+ #endif
2602
+ // Update the map pointer.
2603
+ obj->set_map(reinterpret_cast<Map*>(HeapObject::FromAddress(forwarded)));
2604
+
2605
+ // We have to compute the object size relying on the old map because
2606
+ // map objects are not relocated yet.
2607
+ int obj_size = obj->SizeFromMap(old_map);
2608
+
2609
+ // Update pointers in the object body.
2610
+ UpdatingVisitor updating_visitor;
2611
+ obj->IterateBody(old_map->instance_type(), obj_size, &updating_visitor);
2612
+ return obj_size;
2613
+ }
2614
+
2615
+
2616
+ int MarkCompactCollector::UpdatePointersInOldObject(HeapObject* obj) {
2617
+ // Decode the map pointer.
2618
+ MapWord encoding = obj->map_word();
2619
+ Address map_addr = encoding.DecodeMapAddress(Heap::map_space());
2620
+ ASSERT(Heap::map_space()->Contains(HeapObject::FromAddress(map_addr)));
2621
+
2622
+ // At this point, the first word of map_addr is also encoded, cannot
2623
+ // cast it to Map* using Map::cast.
2624
+ Map* map = reinterpret_cast<Map*>(HeapObject::FromAddress(map_addr));
2625
+ int obj_size = obj->SizeFromMap(map);
2626
+ InstanceType type = map->instance_type();
2627
+
2628
+ // Update map pointer.
2629
+ Address new_map_addr = GetForwardingAddressInOldSpace(map);
2630
+ int offset = encoding.DecodeOffset();
2631
+ obj->set_map_word(MapWord::EncodeAddress(new_map_addr, offset));
2632
+
2633
+ #ifdef DEBUG
2634
+ if (FLAG_gc_verbose) {
2635
+ PrintF("update %p : %p -> %p\n", obj->address(),
2636
+ map_addr, new_map_addr);
2637
+ }
2638
+ #endif
2639
+
2640
+ // Update pointers in the object body.
2641
+ UpdatingVisitor updating_visitor;
2642
+ obj->IterateBody(type, obj_size, &updating_visitor);
2643
+ return obj_size;
2644
+ }
2645
+
2646
+
2647
+ Address MarkCompactCollector::GetForwardingAddressInOldSpace(HeapObject* obj) {
2648
+ // Object should either in old or map space.
2649
+ MapWord encoding = obj->map_word();
2650
+
2651
+ // Offset to the first live object's forwarding address.
2652
+ int offset = encoding.DecodeOffset();
2653
+ Address obj_addr = obj->address();
2654
+
2655
+ // Find the first live object's forwarding address.
2656
+ Page* p = Page::FromAddress(obj_addr);
2657
+ Address first_forwarded = p->mc_first_forwarded;
2658
+
2659
+ // Page start address of forwarded address.
2660
+ Page* forwarded_page = Page::FromAddress(first_forwarded);
2661
+ int forwarded_offset = forwarded_page->Offset(first_forwarded);
2662
+
2663
+ // Find end of allocation in the page of first_forwarded.
2664
+ int mc_top_offset = forwarded_page->AllocationWatermarkOffset();
2665
+
2666
+ // Check if current object's forward pointer is in the same page
2667
+ // as the first live object's forwarding pointer
2668
+ if (forwarded_offset + offset < mc_top_offset) {
2669
+ // In the same page.
2670
+ return first_forwarded + offset;
2671
+ }
2672
+
2673
+ // Must be in the next page, NOTE: this may cross chunks.
2674
+ Page* next_page = forwarded_page->next_page();
2675
+ ASSERT(next_page->is_valid());
2676
+
2677
+ offset -= (mc_top_offset - forwarded_offset);
2678
+ offset += Page::kObjectStartOffset;
2679
+
2680
+ ASSERT_PAGE_OFFSET(offset);
2681
+ ASSERT(next_page->OffsetToAddress(offset) < next_page->AllocationTop());
2682
+
2683
+ return next_page->OffsetToAddress(offset);
2684
+ }
2685
+
2686
+
2687
+ // -------------------------------------------------------------------------
2688
+ // Phase 4: Relocate objects
2689
+
2690
+ void MarkCompactCollector::RelocateObjects() {
2691
+ #ifdef DEBUG
2692
+ ASSERT(state_ == UPDATE_POINTERS);
2693
+ state_ = RELOCATE_OBJECTS;
2694
+ #endif
2695
+ // Relocates objects, always relocate map objects first. Relocating
2696
+ // objects in other space relies on map objects to get object size.
2697
+ int live_maps_size = IterateLiveObjects(Heap::map_space(),
2698
+ &RelocateMapObject);
2699
+ int live_pointer_olds_size = IterateLiveObjects(Heap::old_pointer_space(),
2700
+ &RelocateOldPointerObject);
2701
+ int live_data_olds_size = IterateLiveObjects(Heap::old_data_space(),
2702
+ &RelocateOldDataObject);
2703
+ int live_codes_size = IterateLiveObjects(Heap::code_space(),
2704
+ &RelocateCodeObject);
2705
+ int live_cells_size = IterateLiveObjects(Heap::cell_space(),
2706
+ &RelocateCellObject);
2707
+ int live_news_size = IterateLiveObjects(Heap::new_space(),
2708
+ &RelocateNewObject);
2709
+
2710
+ USE(live_maps_size);
2711
+ USE(live_pointer_olds_size);
2712
+ USE(live_data_olds_size);
2713
+ USE(live_codes_size);
2714
+ USE(live_cells_size);
2715
+ USE(live_news_size);
2716
+ ASSERT(live_maps_size == live_map_objects_size_);
2717
+ ASSERT(live_data_olds_size == live_old_data_objects_size_);
2718
+ ASSERT(live_pointer_olds_size == live_old_pointer_objects_size_);
2719
+ ASSERT(live_codes_size == live_code_objects_size_);
2720
+ ASSERT(live_cells_size == live_cell_objects_size_);
2721
+ ASSERT(live_news_size == live_young_objects_size_);
2722
+
2723
+ // Flip from and to spaces
2724
+ Heap::new_space()->Flip();
2725
+
2726
+ Heap::new_space()->MCCommitRelocationInfo();
2727
+
2728
+ // Set age_mark to bottom in to space
2729
+ Address mark = Heap::new_space()->bottom();
2730
+ Heap::new_space()->set_age_mark(mark);
2731
+
2732
+ PagedSpaces spaces;
2733
+ for (PagedSpace* space = spaces.next(); space != NULL; space = spaces.next())
2734
+ space->MCCommitRelocationInfo();
2735
+
2736
+ Heap::CheckNewSpaceExpansionCriteria();
2737
+ Heap::IncrementYoungSurvivorsCounter(live_news_size);
2738
+ }
2739
+
2740
+
2741
+ int MarkCompactCollector::RelocateMapObject(HeapObject* obj) {
2742
+ // Recover map pointer.
2743
+ MapWord encoding = obj->map_word();
2744
+ Address map_addr = encoding.DecodeMapAddress(Heap::map_space());
2745
+ ASSERT(Heap::map_space()->Contains(HeapObject::FromAddress(map_addr)));
2746
+
2747
+ // Get forwarding address before resetting map pointer
2748
+ Address new_addr = GetForwardingAddressInOldSpace(obj);
2749
+
2750
+ // Reset map pointer. The meta map object may not be copied yet so
2751
+ // Map::cast does not yet work.
2752
+ obj->set_map(reinterpret_cast<Map*>(HeapObject::FromAddress(map_addr)));
2753
+
2754
+ Address old_addr = obj->address();
2755
+
2756
+ if (new_addr != old_addr) {
2757
+ // Move contents.
2758
+ Heap::MoveBlockToOldSpaceAndUpdateRegionMarks(new_addr,
2759
+ old_addr,
2760
+ Map::kSize);
2761
+ }
2762
+
2763
+ #ifdef DEBUG
2764
+ if (FLAG_gc_verbose) {
2765
+ PrintF("relocate %p -> %p\n", old_addr, new_addr);
2766
+ }
2767
+ #endif
2768
+
2769
+ return Map::kSize;
2770
+ }
2771
+
2772
+
2773
+ static inline int RestoreMap(HeapObject* obj,
2774
+ PagedSpace* space,
2775
+ Address new_addr,
2776
+ Address map_addr) {
2777
+ // This must be a non-map object, and the function relies on the
2778
+ // assumption that the Map space is compacted before the other paged
2779
+ // spaces (see RelocateObjects).
2780
+
2781
+ // Reset map pointer.
2782
+ obj->set_map(Map::cast(HeapObject::FromAddress(map_addr)));
2783
+
2784
+ int obj_size = obj->Size();
2785
+ ASSERT_OBJECT_SIZE(obj_size);
2786
+
2787
+ ASSERT(space->MCSpaceOffsetForAddress(new_addr) <=
2788
+ space->MCSpaceOffsetForAddress(obj->address()));
2789
+
2790
+ #ifdef DEBUG
2791
+ if (FLAG_gc_verbose) {
2792
+ PrintF("relocate %p -> %p\n", obj->address(), new_addr);
2793
+ }
2794
+ #endif
2795
+
2796
+ return obj_size;
2797
+ }
2798
+
2799
+
2800
+ int MarkCompactCollector::RelocateOldNonCodeObject(HeapObject* obj,
2801
+ PagedSpace* space) {
2802
+ // Recover map pointer.
2803
+ MapWord encoding = obj->map_word();
2804
+ Address map_addr = encoding.DecodeMapAddress(Heap::map_space());
2805
+ ASSERT(Heap::map_space()->Contains(map_addr));
2806
+
2807
+ // Get forwarding address before resetting map pointer.
2808
+ Address new_addr = GetForwardingAddressInOldSpace(obj);
2809
+
2810
+ // Reset the map pointer.
2811
+ int obj_size = RestoreMap(obj, space, new_addr, map_addr);
2812
+
2813
+ Address old_addr = obj->address();
2814
+
2815
+ if (new_addr != old_addr) {
2816
+ // Move contents.
2817
+ if (space == Heap::old_data_space()) {
2818
+ Heap::MoveBlock(new_addr, old_addr, obj_size);
2819
+ } else {
2820
+ Heap::MoveBlockToOldSpaceAndUpdateRegionMarks(new_addr,
2821
+ old_addr,
2822
+ obj_size);
2823
+ }
2824
+ }
2825
+
2826
+ ASSERT(!HeapObject::FromAddress(new_addr)->IsCode());
2827
+
2828
+ HeapObject* copied_to = HeapObject::FromAddress(new_addr);
2829
+ if (copied_to->IsSharedFunctionInfo()) {
2830
+ PROFILE(SFIMoveEvent(old_addr, new_addr));
2831
+ }
2832
+ HEAP_PROFILE(ObjectMoveEvent(old_addr, new_addr));
2833
+
2834
+ return obj_size;
2835
+ }
2836
+
2837
+
2838
+ int MarkCompactCollector::RelocateOldPointerObject(HeapObject* obj) {
2839
+ return RelocateOldNonCodeObject(obj, Heap::old_pointer_space());
2840
+ }
2841
+
2842
+
2843
+ int MarkCompactCollector::RelocateOldDataObject(HeapObject* obj) {
2844
+ return RelocateOldNonCodeObject(obj, Heap::old_data_space());
2845
+ }
2846
+
2847
+
2848
+ int MarkCompactCollector::RelocateCellObject(HeapObject* obj) {
2849
+ return RelocateOldNonCodeObject(obj, Heap::cell_space());
2850
+ }
2851
+
2852
+
2853
+ int MarkCompactCollector::RelocateCodeObject(HeapObject* obj) {
2854
+ // Recover map pointer.
2855
+ MapWord encoding = obj->map_word();
2856
+ Address map_addr = encoding.DecodeMapAddress(Heap::map_space());
2857
+ ASSERT(Heap::map_space()->Contains(HeapObject::FromAddress(map_addr)));
2858
+
2859
+ // Get forwarding address before resetting map pointer
2860
+ Address new_addr = GetForwardingAddressInOldSpace(obj);
2861
+
2862
+ // Reset the map pointer.
2863
+ int obj_size = RestoreMap(obj, Heap::code_space(), new_addr, map_addr);
2864
+
2865
+ Address old_addr = obj->address();
2866
+
2867
+ if (new_addr != old_addr) {
2868
+ // Move contents.
2869
+ Heap::MoveBlock(new_addr, old_addr, obj_size);
2870
+ }
2871
+
2872
+ HeapObject* copied_to = HeapObject::FromAddress(new_addr);
2873
+ if (copied_to->IsCode()) {
2874
+ // May also update inline cache target.
2875
+ Code::cast(copied_to)->Relocate(new_addr - old_addr);
2876
+ // Notify the logger that compiled code has moved.
2877
+ PROFILE(CodeMoveEvent(old_addr, new_addr));
2878
+ }
2879
+ HEAP_PROFILE(ObjectMoveEvent(old_addr, new_addr));
2880
+
2881
+ return obj_size;
2882
+ }
2883
+
2884
+
2885
+ int MarkCompactCollector::RelocateNewObject(HeapObject* obj) {
2886
+ int obj_size = obj->Size();
2887
+
2888
+ // Get forwarding address
2889
+ Address old_addr = obj->address();
2890
+ int offset = Heap::new_space()->ToSpaceOffsetForAddress(old_addr);
2891
+
2892
+ Address new_addr =
2893
+ Memory::Address_at(Heap::new_space()->FromSpaceLow() + offset);
2894
+
2895
+ #ifdef DEBUG
2896
+ if (Heap::new_space()->FromSpaceContains(new_addr)) {
2897
+ ASSERT(Heap::new_space()->FromSpaceOffsetForAddress(new_addr) <=
2898
+ Heap::new_space()->ToSpaceOffsetForAddress(old_addr));
2899
+ } else {
2900
+ ASSERT(Heap::TargetSpace(obj) == Heap::old_pointer_space() ||
2901
+ Heap::TargetSpace(obj) == Heap::old_data_space());
2902
+ }
2903
+ #endif
2904
+
2905
+ // New and old addresses cannot overlap.
2906
+ if (Heap::InNewSpace(HeapObject::FromAddress(new_addr))) {
2907
+ Heap::CopyBlock(new_addr, old_addr, obj_size);
2908
+ } else {
2909
+ Heap::CopyBlockToOldSpaceAndUpdateRegionMarks(new_addr,
2910
+ old_addr,
2911
+ obj_size);
2912
+ }
2913
+
2914
+ #ifdef DEBUG
2915
+ if (FLAG_gc_verbose) {
2916
+ PrintF("relocate %p -> %p\n", old_addr, new_addr);
2917
+ }
2918
+ #endif
2919
+
2920
+ HeapObject* copied_to = HeapObject::FromAddress(new_addr);
2921
+ if (copied_to->IsSharedFunctionInfo()) {
2922
+ PROFILE(SFIMoveEvent(old_addr, new_addr));
2923
+ }
2924
+ HEAP_PROFILE(ObjectMoveEvent(old_addr, new_addr));
2925
+
2926
+ return obj_size;
2927
+ }
2928
+
2929
+
2930
+ void MarkCompactCollector::ReportDeleteIfNeeded(HeapObject* obj) {
2931
+ #ifdef ENABLE_GDB_JIT_INTERFACE
2932
+ if (obj->IsCode()) {
2933
+ GDBJITInterface::RemoveCode(reinterpret_cast<Code*>(obj));
2934
+ }
2935
+ #endif
2936
+ #ifdef ENABLE_LOGGING_AND_PROFILING
2937
+ if (obj->IsCode()) {
2938
+ PROFILE(CodeDeleteEvent(obj->address()));
2939
+ }
2940
+ #endif
2941
+ }
2942
+
2943
+
2944
+ int MarkCompactCollector::SizeOfMarkedObject(HeapObject* obj) {
2945
+ MapWord map_word = obj->map_word();
2946
+ map_word.ClearMark();
2947
+ return obj->SizeFromMap(map_word.ToMap());
2948
+ }
2949
+
2950
+
2951
+ void MarkCompactCollector::Initialize() {
2952
+ StaticPointersToNewGenUpdatingVisitor::Initialize();
2953
+ StaticMarkingVisitor::Initialize();
2954
+ }
2955
+
2956
+
2957
+ } } // namespace v8::internal