therubyracer 0.8.1.pre2 → 0.8.1

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of therubyracer might be problematic. Click here for more details.

Files changed (877) hide show
  1. data/Changelog.md +2 -1
  2. data/README.md +6 -3
  3. data/ext/v8/upstream/3.1.8/.gitignore +31 -0
  4. data/ext/v8/upstream/3.1.8/AUTHORS +40 -0
  5. data/ext/v8/upstream/3.1.8/ChangeLog +2566 -0
  6. data/ext/v8/upstream/3.1.8/LICENSE +52 -0
  7. data/ext/v8/upstream/3.1.8/LICENSE.strongtalk +29 -0
  8. data/ext/v8/upstream/3.1.8/LICENSE.v8 +26 -0
  9. data/ext/v8/upstream/3.1.8/LICENSE.valgrind +45 -0
  10. data/ext/v8/upstream/3.1.8/SConstruct +1192 -0
  11. data/ext/v8/upstream/3.1.8/build/README.txt +25 -0
  12. data/ext/v8/upstream/3.1.8/build/all.gyp +18 -0
  13. data/ext/v8/upstream/3.1.8/build/armu.gypi +32 -0
  14. data/ext/v8/upstream/3.1.8/build/common.gypi +82 -0
  15. data/ext/v8/upstream/3.1.8/build/gyp_v8 +145 -0
  16. data/ext/v8/upstream/3.1.8/include/v8-debug.h +384 -0
  17. data/ext/v8/upstream/3.1.8/include/v8-preparser.h +116 -0
  18. data/ext/v8/upstream/3.1.8/include/v8-profiler.h +426 -0
  19. data/ext/v8/upstream/3.1.8/include/v8-testing.h +99 -0
  20. data/ext/v8/upstream/3.1.8/include/v8.h +3846 -0
  21. data/ext/v8/upstream/3.1.8/include/v8stdint.h +53 -0
  22. data/ext/v8/upstream/3.1.8/preparser/preparser-process.cc +206 -0
  23. data/ext/v8/upstream/3.1.8/src/SConscript +356 -0
  24. data/ext/v8/upstream/3.1.8/src/accessors.cc +907 -0
  25. data/ext/v8/upstream/3.1.8/src/accessors.h +121 -0
  26. data/ext/v8/upstream/3.1.8/src/allocation.cc +204 -0
  27. data/ext/v8/upstream/3.1.8/src/allocation.h +176 -0
  28. data/ext/v8/upstream/3.1.8/src/api.cc +5191 -0
  29. data/ext/v8/upstream/3.1.8/src/api.h +508 -0
  30. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/apinatives.js +0 -0
  31. data/ext/v8/upstream/3.1.8/src/apiutils.h +80 -0
  32. data/ext/v8/upstream/3.1.8/src/arguments.h +105 -0
  33. data/ext/v8/upstream/3.1.8/src/arm/assembler-arm-inl.h +352 -0
  34. data/ext/v8/upstream/3.1.8/src/arm/assembler-arm.cc +2756 -0
  35. data/ext/v8/upstream/3.1.8/src/arm/assembler-arm.h +1294 -0
  36. data/ext/v8/upstream/3.1.8/src/arm/builtins-arm.cc +1628 -0
  37. data/ext/v8/upstream/3.1.8/src/arm/code-stubs-arm.cc +6783 -0
  38. data/ext/v8/upstream/3.1.8/src/arm/code-stubs-arm.h +657 -0
  39. data/ext/v8/upstream/3.1.8/src/arm/codegen-arm-inl.h +48 -0
  40. data/ext/v8/upstream/3.1.8/src/arm/codegen-arm.cc +7403 -0
  41. data/ext/v8/upstream/3.1.8/src/arm/codegen-arm.h +595 -0
  42. data/ext/v8/upstream/3.1.8/src/arm/constants-arm.cc +152 -0
  43. data/ext/v8/upstream/3.1.8/src/arm/constants-arm.h +769 -0
  44. data/ext/v8/upstream/3.1.8/src/arm/cpu-arm.cc +147 -0
  45. data/ext/v8/upstream/3.1.8/src/arm/debug-arm.cc +315 -0
  46. data/ext/v8/upstream/3.1.8/src/arm/deoptimizer-arm.cc +700 -0
  47. data/ext/v8/upstream/3.1.8/src/arm/disasm-arm.cc +1439 -0
  48. data/ext/v8/upstream/3.1.8/src/arm/frames-arm.cc +45 -0
  49. data/ext/v8/upstream/3.1.8/src/arm/frames-arm.h +168 -0
  50. data/ext/v8/upstream/3.1.8/src/arm/full-codegen-arm.cc +4230 -0
  51. data/ext/v8/upstream/3.1.8/src/arm/ic-arm.cc +1799 -0
  52. data/ext/v8/upstream/3.1.8/src/arm/jump-target-arm.cc +174 -0
  53. data/ext/v8/upstream/3.1.8/src/arm/lithium-arm.cc +2041 -0
  54. data/ext/v8/upstream/3.1.8/src/arm/lithium-arm.h +2046 -0
  55. data/ext/v8/upstream/3.1.8/src/arm/lithium-codegen-arm.cc +3822 -0
  56. data/ext/v8/upstream/3.1.8/src/arm/lithium-codegen-arm.h +312 -0
  57. data/ext/v8/upstream/3.1.8/src/arm/lithium-gap-resolver-arm.cc +303 -0
  58. data/ext/v8/upstream/3.1.8/src/arm/lithium-gap-resolver-arm.h +84 -0
  59. data/ext/v8/upstream/3.1.8/src/arm/macro-assembler-arm.cc +2701 -0
  60. data/ext/v8/upstream/3.1.8/src/arm/macro-assembler-arm.h +1015 -0
  61. data/ext/v8/upstream/3.1.8/src/arm/regexp-macro-assembler-arm.cc +1280 -0
  62. data/ext/v8/upstream/3.1.8/src/arm/regexp-macro-assembler-arm.h +252 -0
  63. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/arm/register-allocator-arm-inl.h +0 -0
  64. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/arm/register-allocator-arm.cc +0 -0
  65. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/arm/register-allocator-arm.h +0 -0
  66. data/ext/v8/upstream/3.1.8/src/arm/simulator-arm.cc +3165 -0
  67. data/ext/v8/upstream/3.1.8/src/arm/simulator-arm.h +402 -0
  68. data/ext/v8/upstream/3.1.8/src/arm/stub-cache-arm.cc +4077 -0
  69. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/arm/virtual-frame-arm-inl.h +0 -0
  70. data/ext/v8/upstream/3.1.8/src/arm/virtual-frame-arm.cc +843 -0
  71. data/ext/v8/upstream/3.1.8/src/arm/virtual-frame-arm.h +520 -0
  72. data/ext/v8/upstream/3.1.8/src/array.js +1231 -0
  73. data/ext/v8/upstream/3.1.8/src/assembler.cc +973 -0
  74. data/ext/v8/upstream/3.1.8/src/assembler.h +787 -0
  75. data/ext/v8/upstream/3.1.8/src/ast-inl.h +107 -0
  76. data/ext/v8/upstream/3.1.8/src/ast.cc +1067 -0
  77. data/ext/v8/upstream/3.1.8/src/ast.h +2177 -0
  78. data/ext/v8/upstream/3.1.8/src/atomicops.h +165 -0
  79. data/ext/v8/upstream/3.1.8/src/atomicops_internals_arm_gcc.h +145 -0
  80. data/ext/v8/upstream/3.1.8/src/atomicops_internals_x86_gcc.cc +126 -0
  81. data/ext/v8/upstream/3.1.8/src/atomicops_internals_x86_gcc.h +287 -0
  82. data/ext/v8/upstream/3.1.8/src/atomicops_internals_x86_macosx.h +301 -0
  83. data/ext/v8/upstream/3.1.8/src/atomicops_internals_x86_msvc.h +203 -0
  84. data/ext/v8/upstream/3.1.8/src/bignum-dtoa.cc +655 -0
  85. data/ext/v8/upstream/3.1.8/src/bignum-dtoa.h +81 -0
  86. data/ext/v8/upstream/3.1.8/src/bignum.cc +768 -0
  87. data/ext/v8/upstream/3.1.8/src/bignum.h +140 -0
  88. data/ext/v8/upstream/3.1.8/src/bootstrapper.cc +1888 -0
  89. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/bootstrapper.h +0 -0
  90. data/ext/v8/upstream/3.1.8/src/builtins.cc +1586 -0
  91. data/ext/v8/upstream/3.1.8/src/builtins.h +339 -0
  92. data/ext/v8/upstream/3.1.8/src/bytecodes-irregexp.h +105 -0
  93. data/ext/v8/upstream/3.1.8/src/cached-powers.cc +177 -0
  94. data/ext/v8/upstream/3.1.8/src/cached-powers.h +65 -0
  95. data/ext/v8/upstream/3.1.8/src/char-predicates-inl.h +94 -0
  96. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/char-predicates.h +0 -0
  97. data/ext/v8/upstream/3.1.8/src/checks.cc +110 -0
  98. data/ext/v8/upstream/3.1.8/src/checks.h +292 -0
  99. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/circular-queue-inl.h +0 -0
  100. data/ext/v8/upstream/3.1.8/src/circular-queue.cc +122 -0
  101. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/circular-queue.h +0 -0
  102. data/ext/v8/upstream/3.1.8/src/code-stubs.cc +230 -0
  103. data/ext/v8/upstream/3.1.8/src/code-stubs.h +950 -0
  104. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/code.h +0 -0
  105. data/ext/v8/upstream/3.1.8/src/codegen-inl.h +64 -0
  106. data/ext/v8/upstream/3.1.8/src/codegen.cc +495 -0
  107. data/ext/v8/upstream/3.1.8/src/codegen.h +245 -0
  108. data/ext/v8/upstream/3.1.8/src/compilation-cache.cc +654 -0
  109. data/ext/v8/upstream/3.1.8/src/compilation-cache.h +112 -0
  110. data/ext/v8/upstream/3.1.8/src/compiler.cc +806 -0
  111. data/ext/v8/upstream/3.1.8/src/compiler.h +290 -0
  112. data/ext/v8/upstream/3.1.8/src/contexts.cc +320 -0
  113. data/ext/v8/upstream/3.1.8/src/contexts.h +376 -0
  114. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/conversions-inl.h +0 -0
  115. data/ext/v8/upstream/3.1.8/src/conversions.cc +1069 -0
  116. data/ext/v8/upstream/3.1.8/src/conversions.h +122 -0
  117. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/counters.cc +0 -0
  118. data/ext/v8/upstream/3.1.8/src/counters.h +242 -0
  119. data/ext/v8/upstream/3.1.8/src/cpu-profiler-inl.h +100 -0
  120. data/ext/v8/upstream/3.1.8/src/cpu-profiler.cc +554 -0
  121. data/ext/v8/upstream/3.1.8/src/cpu-profiler.h +291 -0
  122. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/cpu.h +0 -0
  123. data/ext/v8/upstream/3.1.8/src/d8-debug.cc +367 -0
  124. data/ext/v8/upstream/3.1.8/src/d8-debug.h +157 -0
  125. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/d8-posix.cc +0 -0
  126. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/d8-readline.cc +0 -0
  127. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/d8-windows.cc +0 -0
  128. data/ext/v8/upstream/3.1.8/src/d8.cc +792 -0
  129. data/ext/v8/upstream/3.1.8/src/d8.gyp +85 -0
  130. data/ext/v8/upstream/3.1.8/src/d8.h +231 -0
  131. data/ext/v8/upstream/3.1.8/src/d8.js +2798 -0
  132. data/ext/v8/upstream/3.1.8/src/data-flow.cc +545 -0
  133. data/ext/v8/upstream/3.1.8/src/data-flow.h +379 -0
  134. data/ext/v8/upstream/3.1.8/src/date.js +1103 -0
  135. data/ext/v8/upstream/3.1.8/src/dateparser-inl.h +125 -0
  136. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/dateparser.cc +0 -0
  137. data/ext/v8/upstream/3.1.8/src/dateparser.h +263 -0
  138. data/ext/v8/upstream/3.1.8/src/debug-agent.cc +446 -0
  139. data/ext/v8/upstream/3.1.8/src/debug-agent.h +131 -0
  140. data/ext/v8/upstream/3.1.8/src/debug-debugger.js +2569 -0
  141. data/ext/v8/upstream/3.1.8/src/debug.cc +3085 -0
  142. data/ext/v8/upstream/3.1.8/src/debug.h +1025 -0
  143. data/ext/v8/upstream/3.1.8/src/deoptimizer.cc +1185 -0
  144. data/ext/v8/upstream/3.1.8/src/deoptimizer.h +529 -0
  145. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/disasm.h +0 -0
  146. data/ext/v8/upstream/3.1.8/src/disassembler.cc +338 -0
  147. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/disassembler.h +0 -0
  148. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/diy-fp.cc +0 -0
  149. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/diy-fp.h +0 -0
  150. data/ext/v8/upstream/3.1.8/src/double.h +238 -0
  151. data/ext/v8/upstream/3.1.8/src/dtoa.cc +103 -0
  152. data/ext/v8/upstream/3.1.8/src/dtoa.h +85 -0
  153. data/ext/v8/upstream/3.1.8/src/execution.cc +735 -0
  154. data/ext/v8/upstream/3.1.8/src/execution.h +322 -0
  155. data/ext/v8/upstream/3.1.8/src/extensions/experimental/experimental.gyp +53 -0
  156. data/ext/v8/upstream/3.1.8/src/extensions/experimental/i18n-extension.cc +264 -0
  157. data/ext/v8/upstream/3.1.8/src/extensions/experimental/i18n-extension.h +64 -0
  158. data/ext/v8/upstream/3.1.8/src/extensions/externalize-string-extension.cc +141 -0
  159. data/ext/v8/upstream/3.1.8/src/extensions/externalize-string-extension.h +50 -0
  160. data/ext/v8/upstream/3.1.8/src/extensions/gc-extension.cc +58 -0
  161. data/ext/v8/upstream/3.1.8/src/extensions/gc-extension.h +49 -0
  162. data/ext/v8/upstream/3.1.8/src/factory.cc +1087 -0
  163. data/ext/v8/upstream/3.1.8/src/factory.h +432 -0
  164. data/ext/v8/upstream/3.1.8/src/fast-dtoa.cc +736 -0
  165. data/ext/v8/upstream/3.1.8/src/fast-dtoa.h +83 -0
  166. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/fixed-dtoa.cc +0 -0
  167. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/fixed-dtoa.h +0 -0
  168. data/ext/v8/upstream/3.1.8/src/flag-definitions.h +552 -0
  169. data/ext/v8/upstream/3.1.8/src/flags.cc +551 -0
  170. data/ext/v8/upstream/3.1.8/src/flags.h +79 -0
  171. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/frame-element.cc +0 -0
  172. data/ext/v8/upstream/3.1.8/src/frame-element.h +277 -0
  173. data/ext/v8/upstream/3.1.8/src/frames-inl.h +210 -0
  174. data/ext/v8/upstream/3.1.8/src/frames.cc +1232 -0
  175. data/ext/v8/upstream/3.1.8/src/frames.h +826 -0
  176. data/ext/v8/upstream/3.1.8/src/full-codegen.cc +1382 -0
  177. data/ext/v8/upstream/3.1.8/src/full-codegen.h +751 -0
  178. data/ext/v8/upstream/3.1.8/src/func-name-inferrer.cc +90 -0
  179. data/ext/v8/upstream/3.1.8/src/func-name-inferrer.h +111 -0
  180. data/ext/v8/upstream/3.1.8/src/gdb-jit.cc +1547 -0
  181. data/ext/v8/upstream/3.1.8/src/gdb-jit.h +138 -0
  182. data/ext/v8/upstream/3.1.8/src/global-handles.cc +534 -0
  183. data/ext/v8/upstream/3.1.8/src/global-handles.h +181 -0
  184. data/ext/v8/upstream/3.1.8/src/globals.h +325 -0
  185. data/ext/v8/upstream/3.1.8/src/handles-inl.h +80 -0
  186. data/ext/v8/upstream/3.1.8/src/handles.cc +910 -0
  187. data/ext/v8/upstream/3.1.8/src/handles.h +424 -0
  188. data/ext/v8/upstream/3.1.8/src/hashmap.cc +230 -0
  189. data/ext/v8/upstream/3.1.8/src/hashmap.h +121 -0
  190. data/ext/v8/upstream/3.1.8/src/heap-inl.h +587 -0
  191. data/ext/v8/upstream/3.1.8/src/heap-profiler.cc +1128 -0
  192. data/ext/v8/upstream/3.1.8/src/heap-profiler.h +381 -0
  193. data/ext/v8/upstream/3.1.8/src/heap.cc +5610 -0
  194. data/ext/v8/upstream/3.1.8/src/heap.h +2218 -0
  195. data/ext/v8/upstream/3.1.8/src/hydrogen-instructions.cc +1490 -0
  196. data/ext/v8/upstream/3.1.8/src/hydrogen-instructions.h +3493 -0
  197. data/ext/v8/upstream/3.1.8/src/hydrogen.cc +6056 -0
  198. data/ext/v8/upstream/3.1.8/src/hydrogen.h +1091 -0
  199. data/ext/v8/upstream/3.1.8/src/ia32/assembler-ia32-inl.h +429 -0
  200. data/ext/v8/upstream/3.1.8/src/ia32/assembler-ia32.cc +2800 -0
  201. data/ext/v8/upstream/3.1.8/src/ia32/assembler-ia32.h +1093 -0
  202. data/ext/v8/upstream/3.1.8/src/ia32/builtins-ia32.cc +1590 -0
  203. data/ext/v8/upstream/3.1.8/src/ia32/code-stubs-ia32.cc +6624 -0
  204. data/ext/v8/upstream/3.1.8/src/ia32/code-stubs-ia32.h +536 -0
  205. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/ia32/codegen-ia32-inl.h +0 -0
  206. data/ext/v8/upstream/3.1.8/src/ia32/codegen-ia32.cc +10354 -0
  207. data/ext/v8/upstream/3.1.8/src/ia32/codegen-ia32.h +798 -0
  208. data/ext/v8/upstream/3.1.8/src/ia32/cpu-ia32.cc +87 -0
  209. data/ext/v8/upstream/3.1.8/src/ia32/debug-ia32.cc +309 -0
  210. data/ext/v8/upstream/3.1.8/src/ia32/deoptimizer-ia32.cc +664 -0
  211. data/ext/v8/upstream/3.1.8/src/ia32/disasm-ia32.cc +1597 -0
  212. data/ext/v8/upstream/3.1.8/src/ia32/frames-ia32.cc +45 -0
  213. data/ext/v8/upstream/3.1.8/src/ia32/frames-ia32.h +140 -0
  214. data/ext/v8/upstream/3.1.8/src/ia32/full-codegen-ia32.cc +4278 -0
  215. data/ext/v8/upstream/3.1.8/src/ia32/ic-ia32.cc +1786 -0
  216. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/ia32/jump-target-ia32.cc +0 -0
  217. data/ext/v8/upstream/3.1.8/src/ia32/lithium-codegen-ia32.cc +3880 -0
  218. data/ext/v8/upstream/3.1.8/src/ia32/lithium-codegen-ia32.h +309 -0
  219. data/ext/v8/upstream/3.1.8/src/ia32/lithium-gap-resolver-ia32.cc +460 -0
  220. data/ext/v8/upstream/3.1.8/src/ia32/lithium-gap-resolver-ia32.h +110 -0
  221. data/ext/v8/upstream/3.1.8/src/ia32/lithium-ia32.cc +2095 -0
  222. data/ext/v8/upstream/3.1.8/src/ia32/lithium-ia32.h +2127 -0
  223. data/ext/v8/upstream/3.1.8/src/ia32/macro-assembler-ia32.cc +2031 -0
  224. data/ext/v8/upstream/3.1.8/src/ia32/macro-assembler-ia32.h +798 -0
  225. data/ext/v8/upstream/3.1.8/src/ia32/regexp-macro-assembler-ia32.cc +1253 -0
  226. data/ext/v8/upstream/3.1.8/src/ia32/regexp-macro-assembler-ia32.h +215 -0
  227. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/ia32/register-allocator-ia32-inl.h +0 -0
  228. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/ia32/register-allocator-ia32.cc +0 -0
  229. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/ia32/register-allocator-ia32.h +0 -0
  230. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/ia32/simulator-ia32.cc +0 -0
  231. data/ext/v8/upstream/3.1.8/src/ia32/simulator-ia32.h +72 -0
  232. data/ext/v8/upstream/3.1.8/src/ia32/stub-cache-ia32.cc +3732 -0
  233. data/ext/v8/upstream/3.1.8/src/ia32/virtual-frame-ia32.cc +1360 -0
  234. data/ext/v8/upstream/3.1.8/src/ia32/virtual-frame-ia32.h +646 -0
  235. data/ext/v8/upstream/3.1.8/src/ic-inl.h +129 -0
  236. data/ext/v8/upstream/3.1.8/src/ic.cc +2333 -0
  237. data/ext/v8/upstream/3.1.8/src/ic.h +639 -0
  238. data/ext/v8/upstream/3.1.8/src/inspector.cc +63 -0
  239. data/ext/v8/upstream/3.1.8/src/inspector.h +62 -0
  240. data/ext/v8/upstream/3.1.8/src/interpreter-irregexp.cc +655 -0
  241. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/interpreter-irregexp.h +0 -0
  242. data/ext/v8/upstream/3.1.8/src/json.js +342 -0
  243. data/ext/v8/upstream/3.1.8/src/jsregexp.cc +5340 -0
  244. data/ext/v8/upstream/3.1.8/src/jsregexp.h +1484 -0
  245. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/jump-target-heavy-inl.h +0 -0
  246. data/ext/v8/upstream/3.1.8/src/jump-target-heavy.cc +430 -0
  247. data/ext/v8/upstream/3.1.8/src/jump-target-heavy.h +244 -0
  248. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/jump-target-inl.h +0 -0
  249. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/jump-target-light-inl.h +0 -0
  250. data/ext/v8/upstream/3.1.8/src/jump-target-light.cc +111 -0
  251. data/ext/v8/upstream/3.1.8/src/jump-target-light.h +193 -0
  252. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/jump-target.cc +0 -0
  253. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/jump-target.h +0 -0
  254. data/ext/v8/upstream/3.1.8/src/list-inl.h +206 -0
  255. data/ext/v8/upstream/3.1.8/src/list.h +164 -0
  256. data/ext/v8/upstream/3.1.8/src/lithium-allocator-inl.h +140 -0
  257. data/ext/v8/upstream/3.1.8/src/lithium-allocator.cc +2093 -0
  258. data/ext/v8/upstream/3.1.8/src/lithium-allocator.h +644 -0
  259. data/ext/v8/upstream/3.1.8/src/lithium.cc +168 -0
  260. data/ext/v8/upstream/3.1.8/src/lithium.h +592 -0
  261. data/ext/v8/upstream/3.1.8/src/liveedit-debugger.js +1082 -0
  262. data/ext/v8/upstream/3.1.8/src/liveedit.cc +1650 -0
  263. data/ext/v8/upstream/3.1.8/src/liveedit.h +174 -0
  264. data/ext/v8/upstream/3.1.8/src/liveobjectlist-inl.h +126 -0
  265. data/ext/v8/upstream/3.1.8/src/liveobjectlist.cc +2527 -0
  266. data/ext/v8/upstream/3.1.8/src/liveobjectlist.h +322 -0
  267. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/log-inl.h +0 -0
  268. data/ext/v8/upstream/3.1.8/src/log-utils.cc +336 -0
  269. data/ext/v8/upstream/3.1.8/src/log-utils.h +232 -0
  270. data/ext/v8/upstream/3.1.8/src/log.cc +1608 -0
  271. data/ext/v8/upstream/3.1.8/src/log.h +379 -0
  272. data/ext/v8/upstream/3.1.8/src/macro-assembler.h +120 -0
  273. data/ext/v8/upstream/3.1.8/src/macros.py +178 -0
  274. data/ext/v8/upstream/3.1.8/src/mark-compact.cc +2957 -0
  275. data/ext/v8/upstream/3.1.8/src/mark-compact.h +433 -0
  276. data/ext/v8/upstream/3.1.8/src/math.js +264 -0
  277. data/ext/v8/upstream/3.1.8/src/memory.h +82 -0
  278. data/ext/v8/upstream/3.1.8/src/messages.cc +164 -0
  279. data/ext/v8/upstream/3.1.8/src/messages.h +114 -0
  280. data/ext/v8/upstream/3.1.8/src/messages.js +1071 -0
  281. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/assembler-mips-inl.h +0 -0
  282. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/assembler-mips.cc +0 -0
  283. data/ext/v8/upstream/3.1.8/src/mips/assembler-mips.h +667 -0
  284. data/ext/v8/upstream/3.1.8/src/mips/builtins-mips.cc +205 -0
  285. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/codegen-mips-inl.h +0 -0
  286. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/codegen-mips.cc +0 -0
  287. data/ext/v8/upstream/3.1.8/src/mips/codegen-mips.h +431 -0
  288. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/constants-mips.cc +0 -0
  289. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/constants-mips.h +0 -0
  290. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/cpu-mips.cc +0 -0
  291. data/ext/v8/upstream/3.1.8/src/mips/debug-mips.cc +127 -0
  292. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/disasm-mips.cc +0 -0
  293. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/fast-codegen-mips.cc +0 -0
  294. data/ext/v8/upstream/3.1.8/src/mips/frames-mips.cc +96 -0
  295. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/frames-mips.h +0 -0
  296. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/full-codegen-mips.cc +0 -0
  297. data/ext/v8/upstream/3.1.8/src/mips/ic-mips.cc +208 -0
  298. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/jump-target-mips.cc +0 -0
  299. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/macro-assembler-mips.cc +0 -0
  300. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/macro-assembler-mips.h +0 -0
  301. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/register-allocator-mips-inl.h +0 -0
  302. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/register-allocator-mips.cc +0 -0
  303. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/register-allocator-mips.h +0 -0
  304. data/ext/v8/upstream/3.1.8/src/mips/simulator-mips.cc +1650 -0
  305. data/ext/v8/upstream/3.1.8/src/mips/simulator-mips.h +311 -0
  306. data/ext/v8/upstream/3.1.8/src/mips/stub-cache-mips.cc +418 -0
  307. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/virtual-frame-mips.cc +0 -0
  308. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mips/virtual-frame-mips.h +0 -0
  309. data/ext/v8/upstream/3.1.8/src/mirror-debugger.js +2380 -0
  310. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/mksnapshot.cc +0 -0
  311. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/natives.h +0 -0
  312. data/ext/v8/upstream/3.1.8/src/objects-debug.cc +722 -0
  313. data/ext/v8/upstream/3.1.8/src/objects-inl.h +3946 -0
  314. data/ext/v8/upstream/3.1.8/src/objects-printer.cc +801 -0
  315. data/ext/v8/upstream/3.1.8/src/objects-visiting.cc +142 -0
  316. data/ext/v8/upstream/3.1.8/src/objects-visiting.h +401 -0
  317. data/ext/v8/upstream/3.1.8/src/objects.cc +10044 -0
  318. data/ext/v8/upstream/3.1.8/src/objects.h +6571 -0
  319. data/ext/v8/upstream/3.1.8/src/parser.cc +5165 -0
  320. data/ext/v8/upstream/3.1.8/src/parser.h +802 -0
  321. data/ext/v8/upstream/3.1.8/src/platform-cygwin.cc +745 -0
  322. data/ext/v8/upstream/3.1.8/src/platform-freebsd.cc +702 -0
  323. data/ext/v8/upstream/3.1.8/src/platform-linux.cc +981 -0
  324. data/ext/v8/upstream/3.1.8/src/platform-macos.cc +732 -0
  325. data/ext/v8/upstream/3.1.8/src/platform-nullos.cc +498 -0
  326. data/ext/v8/upstream/3.1.8/src/platform-openbsd.cc +657 -0
  327. data/ext/v8/upstream/3.1.8/src/platform-posix.cc +399 -0
  328. data/ext/v8/upstream/3.1.8/src/platform-solaris.cc +714 -0
  329. data/ext/v8/upstream/3.1.8/src/platform-win32.cc +1974 -0
  330. data/ext/v8/upstream/3.1.8/src/platform.h +636 -0
  331. data/ext/v8/upstream/3.1.8/src/preparse-data.cc +183 -0
  332. data/ext/v8/upstream/3.1.8/src/preparse-data.h +249 -0
  333. data/ext/v8/upstream/3.1.8/src/preparser-api.cc +213 -0
  334. data/ext/v8/upstream/3.1.8/src/preparser.cc +1205 -0
  335. data/ext/v8/upstream/3.1.8/src/preparser.h +278 -0
  336. data/ext/v8/upstream/3.1.8/src/prettyprinter.cc +1539 -0
  337. data/ext/v8/upstream/3.1.8/src/prettyprinter.h +223 -0
  338. data/ext/v8/upstream/3.1.8/src/profile-generator-inl.h +128 -0
  339. data/ext/v8/upstream/3.1.8/src/profile-generator.cc +2899 -0
  340. data/ext/v8/upstream/3.1.8/src/profile-generator.h +1151 -0
  341. data/ext/v8/upstream/3.1.8/src/property.cc +96 -0
  342. data/ext/v8/upstream/3.1.8/src/property.h +337 -0
  343. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/regexp-macro-assembler-irregexp-inl.h +0 -0
  344. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler-irregexp.cc +470 -0
  345. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler-irregexp.h +142 -0
  346. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler-tracer.cc +373 -0
  347. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler-tracer.h +104 -0
  348. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler.cc +257 -0
  349. data/ext/v8/upstream/3.1.8/src/regexp-macro-assembler.h +231 -0
  350. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/regexp-stack.cc +0 -0
  351. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/regexp-stack.h +0 -0
  352. data/ext/v8/upstream/3.1.8/src/regexp.js +483 -0
  353. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/register-allocator-inl.h +0 -0
  354. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/register-allocator.cc +0 -0
  355. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/register-allocator.h +0 -0
  356. data/ext/v8/upstream/3.1.8/src/rewriter.cc +1023 -0
  357. data/ext/v8/upstream/3.1.8/src/rewriter.h +59 -0
  358. data/ext/v8/upstream/3.1.8/src/runtime-profiler.cc +443 -0
  359. data/ext/v8/upstream/3.1.8/src/runtime-profiler.h +77 -0
  360. data/ext/v8/upstream/3.1.8/src/runtime.cc +11592 -0
  361. data/ext/v8/upstream/3.1.8/src/runtime.h +582 -0
  362. data/ext/v8/upstream/3.1.8/src/runtime.js +643 -0
  363. data/ext/v8/upstream/3.1.8/src/safepoint-table.cc +253 -0
  364. data/ext/v8/upstream/3.1.8/src/safepoint-table.h +263 -0
  365. data/ext/v8/upstream/3.1.8/src/scanner-base.cc +971 -0
  366. data/ext/v8/upstream/3.1.8/src/scanner-base.h +653 -0
  367. data/ext/v8/upstream/3.1.8/src/scanner.cc +586 -0
  368. data/ext/v8/upstream/3.1.8/src/scanner.h +194 -0
  369. data/ext/v8/upstream/3.1.8/src/scopeinfo.cc +636 -0
  370. data/ext/v8/upstream/3.1.8/src/scopeinfo.h +238 -0
  371. data/ext/v8/upstream/3.1.8/src/scopes.cc +1063 -0
  372. data/ext/v8/upstream/3.1.8/src/scopes.h +494 -0
  373. data/ext/v8/upstream/3.1.8/src/serialize.cc +1535 -0
  374. data/ext/v8/upstream/3.1.8/src/serialize.h +584 -0
  375. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/shell.h +0 -0
  376. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/simulator.h +0 -0
  377. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/smart-pointer.h +0 -0
  378. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/snapshot-common.cc +0 -0
  379. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/snapshot-empty.cc +0 -0
  380. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/snapshot.h +0 -0
  381. data/ext/v8/upstream/3.1.8/src/spaces-inl.h +524 -0
  382. data/ext/v8/upstream/3.1.8/src/spaces.cc +3254 -0
  383. data/ext/v8/upstream/3.1.8/src/spaces.h +2362 -0
  384. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/splay-tree-inl.h +0 -0
  385. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/splay-tree.h +0 -0
  386. data/ext/v8/upstream/3.1.8/src/string-search.cc +40 -0
  387. data/ext/v8/upstream/3.1.8/src/string-search.h +567 -0
  388. data/ext/v8/upstream/3.1.8/src/string-stream.cc +584 -0
  389. data/ext/v8/upstream/3.1.8/src/string-stream.h +191 -0
  390. data/ext/v8/upstream/3.1.8/src/string.js +915 -0
  391. data/ext/v8/upstream/3.1.8/src/strtod.cc +440 -0
  392. data/ext/v8/upstream/3.1.8/src/strtod.h +40 -0
  393. data/ext/v8/upstream/3.1.8/src/stub-cache.cc +1878 -0
  394. data/ext/v8/upstream/3.1.8/src/stub-cache.h +849 -0
  395. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/third_party/valgrind/valgrind.h +0 -0
  396. data/ext/v8/upstream/3.1.8/src/token.cc +63 -0
  397. data/ext/v8/upstream/3.1.8/src/token.h +288 -0
  398. data/ext/v8/upstream/3.1.8/src/top.cc +1152 -0
  399. data/ext/v8/upstream/3.1.8/src/top.h +608 -0
  400. data/ext/v8/upstream/3.1.8/src/type-info.cc +406 -0
  401. data/ext/v8/upstream/3.1.8/src/type-info.h +283 -0
  402. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/unbound-queue-inl.h +0 -0
  403. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/unbound-queue.h +0 -0
  404. data/ext/v8/upstream/3.1.8/src/unicode-inl.h +238 -0
  405. data/ext/v8/upstream/3.1.8/src/unicode.cc +1624 -0
  406. data/ext/v8/upstream/3.1.8/src/unicode.h +280 -0
  407. data/ext/v8/upstream/3.1.8/src/uri.js +402 -0
  408. data/ext/v8/upstream/3.1.8/src/utils.cc +371 -0
  409. data/ext/v8/upstream/3.1.8/src/utils.h +793 -0
  410. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/v8-counters.cc +0 -0
  411. data/ext/v8/upstream/3.1.8/src/v8-counters.h +290 -0
  412. data/ext/v8/upstream/3.1.8/src/v8.cc +270 -0
  413. data/ext/v8/upstream/3.1.8/src/v8.h +127 -0
  414. data/ext/v8/upstream/3.1.8/src/v8checks.h +64 -0
  415. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/v8dll-main.cc +0 -0
  416. data/ext/v8/upstream/3.1.8/src/v8globals.h +480 -0
  417. data/ext/v8/upstream/3.1.8/src/v8natives.js +1252 -0
  418. data/ext/v8/upstream/3.1.8/src/v8preparserdll-main.cc +39 -0
  419. data/ext/v8/upstream/3.1.8/src/v8threads.cc +440 -0
  420. data/ext/v8/upstream/3.1.8/src/v8threads.h +157 -0
  421. data/ext/v8/upstream/3.1.8/src/v8utils.h +354 -0
  422. data/ext/v8/upstream/3.1.8/src/variables.cc +132 -0
  423. data/ext/v8/upstream/3.1.8/src/variables.h +212 -0
  424. data/ext/v8/upstream/3.1.8/src/version.cc +95 -0
  425. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/version.h +0 -0
  426. data/ext/v8/upstream/3.1.8/src/virtual-frame-heavy-inl.h +190 -0
  427. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/virtual-frame-heavy.cc +0 -0
  428. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/virtual-frame-inl.h +0 -0
  429. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/virtual-frame-light-inl.h +0 -0
  430. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/virtual-frame-light.cc +0 -0
  431. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/virtual-frame.cc +0 -0
  432. data/ext/v8/upstream/3.1.8/src/virtual-frame.h +59 -0
  433. data/ext/v8/upstream/3.1.8/src/vm-state-inl.h +134 -0
  434. data/ext/v8/upstream/3.1.8/src/vm-state.h +68 -0
  435. data/ext/v8/upstream/3.1.8/src/win32-headers.h +95 -0
  436. data/ext/v8/upstream/3.1.8/src/x64/assembler-x64-inl.h +455 -0
  437. data/ext/v8/upstream/3.1.8/src/x64/assembler-x64.cc +3162 -0
  438. data/ext/v8/upstream/3.1.8/src/x64/assembler-x64.h +1584 -0
  439. data/ext/v8/upstream/3.1.8/src/x64/builtins-x64.cc +1492 -0
  440. data/ext/v8/upstream/3.1.8/src/x64/code-stubs-x64.cc +5150 -0
  441. data/ext/v8/upstream/3.1.8/src/x64/code-stubs-x64.h +519 -0
  442. data/ext/v8/upstream/3.1.8/src/x64/codegen-x64-inl.h +46 -0
  443. data/ext/v8/upstream/3.1.8/src/x64/codegen-x64.cc +8835 -0
  444. data/ext/v8/upstream/3.1.8/src/x64/codegen-x64.h +750 -0
  445. data/ext/v8/upstream/3.1.8/src/x64/cpu-x64.cc +86 -0
  446. data/ext/v8/upstream/3.1.8/src/x64/debug-x64.cc +316 -0
  447. data/ext/v8/upstream/3.1.8/src/x64/deoptimizer-x64.cc +781 -0
  448. data/ext/v8/upstream/3.1.8/src/x64/disasm-x64.cc +1737 -0
  449. data/ext/v8/upstream/3.1.8/src/x64/frames-x64.cc +45 -0
  450. data/ext/v8/upstream/3.1.8/src/x64/frames-x64.h +130 -0
  451. data/ext/v8/upstream/3.1.8/src/x64/full-codegen-x64.cc +3984 -0
  452. data/ext/v8/upstream/3.1.8/src/x64/ic-x64.cc +1761 -0
  453. data/ext/v8/upstream/3.1.8/src/x64/jump-target-x64.cc +437 -0
  454. data/ext/v8/upstream/3.1.8/src/x64/lithium-codegen-x64.cc +3639 -0
  455. data/ext/v8/upstream/3.1.8/src/x64/lithium-codegen-x64.h +305 -0
  456. data/ext/v8/upstream/3.1.8/src/x64/lithium-gap-resolver-x64.cc +320 -0
  457. data/ext/v8/upstream/3.1.8/src/x64/lithium-gap-resolver-x64.h +74 -0
  458. data/ext/v8/upstream/3.1.8/src/x64/lithium-x64.cc +2044 -0
  459. data/ext/v8/upstream/3.1.8/src/x64/lithium-x64.h +2052 -0
  460. data/ext/v8/upstream/3.1.8/src/x64/macro-assembler-x64.cc +2660 -0
  461. data/ext/v8/upstream/3.1.8/src/x64/macro-assembler-x64.h +1852 -0
  462. data/ext/v8/upstream/3.1.8/src/x64/regexp-macro-assembler-x64.cc +1382 -0
  463. data/ext/v8/upstream/3.1.8/src/x64/regexp-macro-assembler-x64.h +278 -0
  464. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/x64/register-allocator-x64-inl.h +0 -0
  465. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/x64/register-allocator-x64.cc +0 -0
  466. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/x64/register-allocator-x64.h +0 -0
  467. data/ext/v8/upstream/{2.3.3 → 3.1.8}/src/x64/simulator-x64.cc +0 -0
  468. data/ext/v8/upstream/3.1.8/src/x64/simulator-x64.h +71 -0
  469. data/ext/v8/upstream/3.1.8/src/x64/stub-cache-x64.cc +3509 -0
  470. data/ext/v8/upstream/3.1.8/src/x64/virtual-frame-x64.cc +1292 -0
  471. data/ext/v8/upstream/3.1.8/src/x64/virtual-frame-x64.h +593 -0
  472. data/ext/v8/upstream/3.1.8/src/zone-inl.h +83 -0
  473. data/ext/v8/upstream/3.1.8/src/zone.cc +195 -0
  474. data/ext/v8/upstream/3.1.8/src/zone.h +233 -0
  475. data/ext/v8/upstream/3.1.8/tools/codemap.js +265 -0
  476. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/consarray.js +0 -0
  477. data/ext/v8/upstream/3.1.8/tools/csvparser.js +78 -0
  478. data/ext/v8/upstream/3.1.8/tools/disasm.py +92 -0
  479. data/ext/v8/upstream/3.1.8/tools/gc-nvp-trace-processor.py +328 -0
  480. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/generate-ten-powers.scm +0 -0
  481. data/ext/v8/upstream/3.1.8/tools/grokdump.py +840 -0
  482. data/ext/v8/upstream/3.1.8/tools/gyp/v8.gyp +869 -0
  483. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/js2c.py +0 -0
  484. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/jsmin.py +0 -0
  485. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/linux-tick-processor +0 -0
  486. data/ext/v8/upstream/3.1.8/tools/ll_prof.py +919 -0
  487. data/ext/v8/upstream/3.1.8/tools/logreader.js +185 -0
  488. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/mac-nm +0 -0
  489. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/mac-tick-processor +0 -0
  490. data/ext/v8/upstream/3.1.8/tools/oom_dump/README +31 -0
  491. data/ext/v8/upstream/3.1.8/tools/oom_dump/SConstruct +42 -0
  492. data/ext/v8/upstream/3.1.8/tools/oom_dump/oom_dump.cc +288 -0
  493. data/ext/v8/upstream/3.1.8/tools/presubmit.py +305 -0
  494. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/process-heap-prof.py +0 -0
  495. data/ext/v8/upstream/3.1.8/tools/profile.js +751 -0
  496. data/ext/v8/upstream/3.1.8/tools/profile_view.js +219 -0
  497. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/run-valgrind.py +0 -0
  498. data/ext/v8/upstream/3.1.8/tools/splaytree.js +316 -0
  499. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/stats-viewer.py +0 -0
  500. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/tickprocessor-driver.js +0 -0
  501. data/ext/v8/upstream/3.1.8/tools/tickprocessor.js +863 -0
  502. data/ext/v8/upstream/3.1.8/tools/utils.py +96 -0
  503. data/ext/v8/upstream/3.1.8/tools/visual_studio/README.txt +70 -0
  504. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/arm.vsprops +0 -0
  505. data/ext/v8/upstream/3.1.8/tools/visual_studio/common.vsprops +34 -0
  506. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/d8.vcproj +0 -0
  507. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/d8_arm.vcproj +0 -0
  508. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/d8_x64.vcproj +0 -0
  509. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/d8js2c.cmd +0 -0
  510. data/ext/v8/upstream/3.1.8/tools/visual_studio/debug.vsprops +17 -0
  511. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/ia32.vsprops +0 -0
  512. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/js2c.cmd +0 -0
  513. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/release.vsprops +0 -0
  514. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8.sln +0 -0
  515. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8.vcproj +0 -0
  516. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_arm.sln +0 -0
  517. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_arm.vcproj +0 -0
  518. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_base.vcproj +1296 -0
  519. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_base_arm.vcproj +1234 -0
  520. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_base_x64.vcproj +1296 -0
  521. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_cctest.vcproj +0 -0
  522. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_cctest_arm.vcproj +0 -0
  523. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_cctest_x64.vcproj +0 -0
  524. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_mksnapshot.vcproj +0 -0
  525. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_mksnapshot_x64.vcproj +0 -0
  526. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_process_sample.vcproj +0 -0
  527. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_process_sample_arm.vcproj +0 -0
  528. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_process_sample_x64.vcproj +0 -0
  529. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_shell_sample.vcproj +147 -0
  530. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_shell_sample_arm.vcproj +147 -0
  531. data/ext/v8/upstream/3.1.8/tools/visual_studio/v8_shell_sample_x64.vcproj +163 -0
  532. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_snapshot.vcproj +0 -0
  533. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_snapshot_cc.vcproj +0 -0
  534. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_snapshot_cc_x64.vcproj +0 -0
  535. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_snapshot_x64.vcproj +0 -0
  536. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_x64.sln +0 -0
  537. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/v8_x64.vcproj +0 -0
  538. data/ext/v8/upstream/{2.3.3 → 3.1.8}/tools/visual_studio/x64.vsprops +0 -0
  539. data/ext/v8/upstream/3.1.8/tools/windows-tick-processor.bat +30 -0
  540. data/ext/v8/upstream/Makefile +2 -1
  541. data/ext/v8/v8_template.cpp +2 -2
  542. data/lib/v8/version.rb +1 -1
  543. data/spec/redjs/jsapi_spec.rb +2 -2
  544. metadata +552 -490
  545. data/ext/v8/upstream/2.3.3/.gitignore +0 -26
  546. data/ext/v8/upstream/2.3.3/AUTHORS +0 -31
  547. data/ext/v8/upstream/2.3.3/ChangeLog +0 -1916
  548. data/ext/v8/upstream/2.3.3/LICENSE +0 -55
  549. data/ext/v8/upstream/2.3.3/SConstruct +0 -1154
  550. data/ext/v8/upstream/2.3.3/include/v8-debug.h +0 -381
  551. data/ext/v8/upstream/2.3.3/include/v8-profiler.h +0 -353
  552. data/ext/v8/upstream/2.3.3/include/v8.h +0 -3616
  553. data/ext/v8/upstream/2.3.3/src/SConscript +0 -330
  554. data/ext/v8/upstream/2.3.3/src/accessors.cc +0 -661
  555. data/ext/v8/upstream/2.3.3/src/accessors.h +0 -114
  556. data/ext/v8/upstream/2.3.3/src/allocation.cc +0 -198
  557. data/ext/v8/upstream/2.3.3/src/allocation.h +0 -169
  558. data/ext/v8/upstream/2.3.3/src/api.cc +0 -4795
  559. data/ext/v8/upstream/2.3.3/src/api.h +0 -485
  560. data/ext/v8/upstream/2.3.3/src/apiutils.h +0 -69
  561. data/ext/v8/upstream/2.3.3/src/arguments.h +0 -96
  562. data/ext/v8/upstream/2.3.3/src/arm/assembler-arm-inl.h +0 -305
  563. data/ext/v8/upstream/2.3.3/src/arm/assembler-arm.cc +0 -2580
  564. data/ext/v8/upstream/2.3.3/src/arm/assembler-arm.h +0 -1275
  565. data/ext/v8/upstream/2.3.3/src/arm/builtins-arm.cc +0 -1320
  566. data/ext/v8/upstream/2.3.3/src/arm/codegen-arm-inl.h +0 -48
  567. data/ext/v8/upstream/2.3.3/src/arm/codegen-arm.cc +0 -11398
  568. data/ext/v8/upstream/2.3.3/src/arm/codegen-arm.h +0 -1102
  569. data/ext/v8/upstream/2.3.3/src/arm/constants-arm.cc +0 -154
  570. data/ext/v8/upstream/2.3.3/src/arm/constants-arm.h +0 -388
  571. data/ext/v8/upstream/2.3.3/src/arm/cpu-arm.cc +0 -142
  572. data/ext/v8/upstream/2.3.3/src/arm/debug-arm.cc +0 -309
  573. data/ext/v8/upstream/2.3.3/src/arm/disasm-arm.cc +0 -1459
  574. data/ext/v8/upstream/2.3.3/src/arm/fast-codegen-arm.cc +0 -241
  575. data/ext/v8/upstream/2.3.3/src/arm/frames-arm.cc +0 -123
  576. data/ext/v8/upstream/2.3.3/src/arm/frames-arm.h +0 -162
  577. data/ext/v8/upstream/2.3.3/src/arm/full-codegen-arm.cc +0 -3178
  578. data/ext/v8/upstream/2.3.3/src/arm/ic-arm.cc +0 -2258
  579. data/ext/v8/upstream/2.3.3/src/arm/jump-target-arm.cc +0 -164
  580. data/ext/v8/upstream/2.3.3/src/arm/macro-assembler-arm.cc +0 -1892
  581. data/ext/v8/upstream/2.3.3/src/arm/macro-assembler-arm.h +0 -727
  582. data/ext/v8/upstream/2.3.3/src/arm/regexp-macro-assembler-arm.cc +0 -1261
  583. data/ext/v8/upstream/2.3.3/src/arm/regexp-macro-assembler-arm.h +0 -266
  584. data/ext/v8/upstream/2.3.3/src/arm/simulator-arm.cc +0 -2822
  585. data/ext/v8/upstream/2.3.3/src/arm/simulator-arm.h +0 -361
  586. data/ext/v8/upstream/2.3.3/src/arm/stub-cache-arm.cc +0 -2387
  587. data/ext/v8/upstream/2.3.3/src/arm/virtual-frame-arm.cc +0 -834
  588. data/ext/v8/upstream/2.3.3/src/arm/virtual-frame-arm.h +0 -519
  589. data/ext/v8/upstream/2.3.3/src/array.js +0 -1127
  590. data/ext/v8/upstream/2.3.3/src/assembler.cc +0 -801
  591. data/ext/v8/upstream/2.3.3/src/assembler.h +0 -573
  592. data/ext/v8/upstream/2.3.3/src/ast-inl.h +0 -81
  593. data/ext/v8/upstream/2.3.3/src/ast.cc +0 -1152
  594. data/ext/v8/upstream/2.3.3/src/ast.h +0 -2106
  595. data/ext/v8/upstream/2.3.3/src/bootstrapper.cc +0 -1819
  596. data/ext/v8/upstream/2.3.3/src/builtins.cc +0 -1529
  597. data/ext/v8/upstream/2.3.3/src/builtins.h +0 -263
  598. data/ext/v8/upstream/2.3.3/src/bytecodes-irregexp.h +0 -104
  599. data/ext/v8/upstream/2.3.3/src/cached-powers.h +0 -119
  600. data/ext/v8/upstream/2.3.3/src/char-predicates-inl.h +0 -86
  601. data/ext/v8/upstream/2.3.3/src/checks.cc +0 -100
  602. data/ext/v8/upstream/2.3.3/src/checks.h +0 -310
  603. data/ext/v8/upstream/2.3.3/src/circular-queue.cc +0 -121
  604. data/ext/v8/upstream/2.3.3/src/code-stubs.cc +0 -177
  605. data/ext/v8/upstream/2.3.3/src/code-stubs.h +0 -177
  606. data/ext/v8/upstream/2.3.3/src/codegen-inl.h +0 -60
  607. data/ext/v8/upstream/2.3.3/src/codegen.cc +0 -516
  608. data/ext/v8/upstream/2.3.3/src/codegen.h +0 -897
  609. data/ext/v8/upstream/2.3.3/src/compilation-cache.cc +0 -562
  610. data/ext/v8/upstream/2.3.3/src/compilation-cache.h +0 -102
  611. data/ext/v8/upstream/2.3.3/src/compiler.cc +0 -654
  612. data/ext/v8/upstream/2.3.3/src/compiler.h +0 -299
  613. data/ext/v8/upstream/2.3.3/src/contexts.cc +0 -256
  614. data/ext/v8/upstream/2.3.3/src/contexts.h +0 -342
  615. data/ext/v8/upstream/2.3.3/src/conversions.cc +0 -1119
  616. data/ext/v8/upstream/2.3.3/src/conversions.h +0 -123
  617. data/ext/v8/upstream/2.3.3/src/counters.h +0 -239
  618. data/ext/v8/upstream/2.3.3/src/cpu-profiler-inl.h +0 -100
  619. data/ext/v8/upstream/2.3.3/src/cpu-profiler.cc +0 -538
  620. data/ext/v8/upstream/2.3.3/src/cpu-profiler.h +0 -285
  621. data/ext/v8/upstream/2.3.3/src/d8-debug.cc +0 -356
  622. data/ext/v8/upstream/2.3.3/src/d8-debug.h +0 -155
  623. data/ext/v8/upstream/2.3.3/src/d8.cc +0 -783
  624. data/ext/v8/upstream/2.3.3/src/d8.h +0 -227
  625. data/ext/v8/upstream/2.3.3/src/d8.js +0 -1683
  626. data/ext/v8/upstream/2.3.3/src/data-flow.cc +0 -758
  627. data/ext/v8/upstream/2.3.3/src/data-flow.h +0 -278
  628. data/ext/v8/upstream/2.3.3/src/date.js +0 -1059
  629. data/ext/v8/upstream/2.3.3/src/dateparser-inl.h +0 -123
  630. data/ext/v8/upstream/2.3.3/src/dateparser.h +0 -244
  631. data/ext/v8/upstream/2.3.3/src/debug-agent.cc +0 -427
  632. data/ext/v8/upstream/2.3.3/src/debug-agent.h +0 -129
  633. data/ext/v8/upstream/2.3.3/src/debug-debugger.js +0 -2227
  634. data/ext/v8/upstream/2.3.3/src/debug.cc +0 -3005
  635. data/ext/v8/upstream/2.3.3/src/debug.h +0 -993
  636. data/ext/v8/upstream/2.3.3/src/disassembler.cc +0 -312
  637. data/ext/v8/upstream/2.3.3/src/double.h +0 -169
  638. data/ext/v8/upstream/2.3.3/src/dtoa-config.c +0 -92
  639. data/ext/v8/upstream/2.3.3/src/dtoa.cc +0 -77
  640. data/ext/v8/upstream/2.3.3/src/dtoa.h +0 -81
  641. data/ext/v8/upstream/2.3.3/src/execution.cc +0 -809
  642. data/ext/v8/upstream/2.3.3/src/execution.h +0 -336
  643. data/ext/v8/upstream/2.3.3/src/factory.cc +0 -1003
  644. data/ext/v8/upstream/2.3.3/src/factory.h +0 -410
  645. data/ext/v8/upstream/2.3.3/src/fast-codegen.cc +0 -746
  646. data/ext/v8/upstream/2.3.3/src/fast-codegen.h +0 -161
  647. data/ext/v8/upstream/2.3.3/src/fast-dtoa.cc +0 -505
  648. data/ext/v8/upstream/2.3.3/src/fast-dtoa.h +0 -58
  649. data/ext/v8/upstream/2.3.3/src/flag-definitions.h +0 -455
  650. data/ext/v8/upstream/2.3.3/src/flags.cc +0 -551
  651. data/ext/v8/upstream/2.3.3/src/flags.h +0 -81
  652. data/ext/v8/upstream/2.3.3/src/flow-graph.cc +0 -763
  653. data/ext/v8/upstream/2.3.3/src/flow-graph.h +0 -180
  654. data/ext/v8/upstream/2.3.3/src/frame-element.h +0 -273
  655. data/ext/v8/upstream/2.3.3/src/frames-inl.h +0 -217
  656. data/ext/v8/upstream/2.3.3/src/frames.cc +0 -826
  657. data/ext/v8/upstream/2.3.3/src/frames.h +0 -682
  658. data/ext/v8/upstream/2.3.3/src/full-codegen.cc +0 -1443
  659. data/ext/v8/upstream/2.3.3/src/full-codegen.h +0 -548
  660. data/ext/v8/upstream/2.3.3/src/func-name-inferrer.cc +0 -76
  661. data/ext/v8/upstream/2.3.3/src/func-name-inferrer.h +0 -135
  662. data/ext/v8/upstream/2.3.3/src/global-handles.cc +0 -520
  663. data/ext/v8/upstream/2.3.3/src/global-handles.h +0 -180
  664. data/ext/v8/upstream/2.3.3/src/globals.h +0 -669
  665. data/ext/v8/upstream/2.3.3/src/handles-inl.h +0 -76
  666. data/ext/v8/upstream/2.3.3/src/handles.cc +0 -825
  667. data/ext/v8/upstream/2.3.3/src/handles.h +0 -393
  668. data/ext/v8/upstream/2.3.3/src/hashmap.cc +0 -226
  669. data/ext/v8/upstream/2.3.3/src/hashmap.h +0 -120
  670. data/ext/v8/upstream/2.3.3/src/heap-inl.h +0 -493
  671. data/ext/v8/upstream/2.3.3/src/heap-profiler.cc +0 -779
  672. data/ext/v8/upstream/2.3.3/src/heap-profiler.h +0 -323
  673. data/ext/v8/upstream/2.3.3/src/heap.cc +0 -4994
  674. data/ext/v8/upstream/2.3.3/src/heap.h +0 -1984
  675. data/ext/v8/upstream/2.3.3/src/ia32/assembler-ia32-inl.h +0 -360
  676. data/ext/v8/upstream/2.3.3/src/ia32/assembler-ia32.cc +0 -2600
  677. data/ext/v8/upstream/2.3.3/src/ia32/assembler-ia32.h +0 -969
  678. data/ext/v8/upstream/2.3.3/src/ia32/builtins-ia32.cc +0 -1261
  679. data/ext/v8/upstream/2.3.3/src/ia32/codegen-ia32.cc +0 -13968
  680. data/ext/v8/upstream/2.3.3/src/ia32/codegen-ia32.h +0 -1097
  681. data/ext/v8/upstream/2.3.3/src/ia32/cpu-ia32.cc +0 -83
  682. data/ext/v8/upstream/2.3.3/src/ia32/debug-ia32.cc +0 -309
  683. data/ext/v8/upstream/2.3.3/src/ia32/disasm-ia32.cc +0 -1471
  684. data/ext/v8/upstream/2.3.3/src/ia32/fast-codegen-ia32.cc +0 -954
  685. data/ext/v8/upstream/2.3.3/src/ia32/fast-codegen-ia32.h +0 -155
  686. data/ext/v8/upstream/2.3.3/src/ia32/frames-ia32.cc +0 -115
  687. data/ext/v8/upstream/2.3.3/src/ia32/frames-ia32.h +0 -135
  688. data/ext/v8/upstream/2.3.3/src/ia32/full-codegen-ia32.cc +0 -3281
  689. data/ext/v8/upstream/2.3.3/src/ia32/ic-ia32.cc +0 -1966
  690. data/ext/v8/upstream/2.3.3/src/ia32/macro-assembler-ia32.cc +0 -1610
  691. data/ext/v8/upstream/2.3.3/src/ia32/macro-assembler-ia32.h +0 -610
  692. data/ext/v8/upstream/2.3.3/src/ia32/regexp-macro-assembler-ia32.cc +0 -1247
  693. data/ext/v8/upstream/2.3.3/src/ia32/regexp-macro-assembler-ia32.h +0 -214
  694. data/ext/v8/upstream/2.3.3/src/ia32/simulator-ia32.h +0 -62
  695. data/ext/v8/upstream/2.3.3/src/ia32/stub-cache-ia32.cc +0 -2750
  696. data/ext/v8/upstream/2.3.3/src/ia32/virtual-frame-ia32.cc +0 -1334
  697. data/ext/v8/upstream/2.3.3/src/ia32/virtual-frame-ia32.h +0 -627
  698. data/ext/v8/upstream/2.3.3/src/ic-inl.h +0 -120
  699. data/ext/v8/upstream/2.3.3/src/ic.cc +0 -1827
  700. data/ext/v8/upstream/2.3.3/src/ic.h +0 -515
  701. data/ext/v8/upstream/2.3.3/src/interpreter-irregexp.cc +0 -646
  702. data/ext/v8/upstream/2.3.3/src/json.js +0 -268
  703. data/ext/v8/upstream/2.3.3/src/jsregexp.cc +0 -5283
  704. data/ext/v8/upstream/2.3.3/src/jsregexp.h +0 -1463
  705. data/ext/v8/upstream/2.3.3/src/jump-target-heavy.cc +0 -429
  706. data/ext/v8/upstream/2.3.3/src/jump-target-heavy.h +0 -244
  707. data/ext/v8/upstream/2.3.3/src/jump-target-light.cc +0 -110
  708. data/ext/v8/upstream/2.3.3/src/jump-target-light.h +0 -192
  709. data/ext/v8/upstream/2.3.3/src/list-inl.h +0 -166
  710. data/ext/v8/upstream/2.3.3/src/list.h +0 -159
  711. data/ext/v8/upstream/2.3.3/src/liveedit-debugger.js +0 -1057
  712. data/ext/v8/upstream/2.3.3/src/liveedit.cc +0 -1480
  713. data/ext/v8/upstream/2.3.3/src/liveedit.h +0 -170
  714. data/ext/v8/upstream/2.3.3/src/log-utils.cc +0 -497
  715. data/ext/v8/upstream/2.3.3/src/log-utils.h +0 -289
  716. data/ext/v8/upstream/2.3.3/src/log.cc +0 -1561
  717. data/ext/v8/upstream/2.3.3/src/log.h +0 -384
  718. data/ext/v8/upstream/2.3.3/src/macro-assembler.h +0 -86
  719. data/ext/v8/upstream/2.3.3/src/macros.py +0 -177
  720. data/ext/v8/upstream/2.3.3/src/mark-compact.cc +0 -2330
  721. data/ext/v8/upstream/2.3.3/src/mark-compact.h +0 -451
  722. data/ext/v8/upstream/2.3.3/src/math.js +0 -264
  723. data/ext/v8/upstream/2.3.3/src/memory.h +0 -74
  724. data/ext/v8/upstream/2.3.3/src/messages.cc +0 -183
  725. data/ext/v8/upstream/2.3.3/src/messages.h +0 -113
  726. data/ext/v8/upstream/2.3.3/src/messages.js +0 -982
  727. data/ext/v8/upstream/2.3.3/src/mips/assembler-mips.h +0 -668
  728. data/ext/v8/upstream/2.3.3/src/mips/builtins-mips.cc +0 -205
  729. data/ext/v8/upstream/2.3.3/src/mips/codegen-mips.h +0 -434
  730. data/ext/v8/upstream/2.3.3/src/mips/debug-mips.cc +0 -131
  731. data/ext/v8/upstream/2.3.3/src/mips/frames-mips.cc +0 -102
  732. data/ext/v8/upstream/2.3.3/src/mips/ic-mips.cc +0 -220
  733. data/ext/v8/upstream/2.3.3/src/mips/simulator-mips.cc +0 -1651
  734. data/ext/v8/upstream/2.3.3/src/mips/simulator-mips.h +0 -311
  735. data/ext/v8/upstream/2.3.3/src/mips/stub-cache-mips.cc +0 -403
  736. data/ext/v8/upstream/2.3.3/src/mirror-debugger.js +0 -2380
  737. data/ext/v8/upstream/2.3.3/src/objects-debug.cc +0 -1366
  738. data/ext/v8/upstream/2.3.3/src/objects-inl.h +0 -3333
  739. data/ext/v8/upstream/2.3.3/src/objects.cc +0 -8820
  740. data/ext/v8/upstream/2.3.3/src/objects.h +0 -5373
  741. data/ext/v8/upstream/2.3.3/src/oprofile-agent.cc +0 -108
  742. data/ext/v8/upstream/2.3.3/src/oprofile-agent.h +0 -77
  743. data/ext/v8/upstream/2.3.3/src/parser.cc +0 -5207
  744. data/ext/v8/upstream/2.3.3/src/parser.h +0 -197
  745. data/ext/v8/upstream/2.3.3/src/platform-freebsd.cc +0 -667
  746. data/ext/v8/upstream/2.3.3/src/platform-linux.cc +0 -862
  747. data/ext/v8/upstream/2.3.3/src/platform-macos.cc +0 -665
  748. data/ext/v8/upstream/2.3.3/src/platform-nullos.cc +0 -454
  749. data/ext/v8/upstream/2.3.3/src/platform-openbsd.cc +0 -622
  750. data/ext/v8/upstream/2.3.3/src/platform-posix.cc +0 -362
  751. data/ext/v8/upstream/2.3.3/src/platform-solaris.cc +0 -653
  752. data/ext/v8/upstream/2.3.3/src/platform-win32.cc +0 -1911
  753. data/ext/v8/upstream/2.3.3/src/platform.h +0 -577
  754. data/ext/v8/upstream/2.3.3/src/powers-ten.h +0 -2461
  755. data/ext/v8/upstream/2.3.3/src/prettyprinter.cc +0 -1531
  756. data/ext/v8/upstream/2.3.3/src/prettyprinter.h +0 -221
  757. data/ext/v8/upstream/2.3.3/src/profile-generator-inl.h +0 -148
  758. data/ext/v8/upstream/2.3.3/src/profile-generator.cc +0 -1830
  759. data/ext/v8/upstream/2.3.3/src/profile-generator.h +0 -853
  760. data/ext/v8/upstream/2.3.3/src/property.cc +0 -96
  761. data/ext/v8/upstream/2.3.3/src/property.h +0 -315
  762. data/ext/v8/upstream/2.3.3/src/regexp-macro-assembler-irregexp.cc +0 -464
  763. data/ext/v8/upstream/2.3.3/src/regexp-macro-assembler-irregexp.h +0 -141
  764. data/ext/v8/upstream/2.3.3/src/regexp-macro-assembler-tracer.cc +0 -356
  765. data/ext/v8/upstream/2.3.3/src/regexp-macro-assembler-tracer.h +0 -103
  766. data/ext/v8/upstream/2.3.3/src/regexp-macro-assembler.cc +0 -261
  767. data/ext/v8/upstream/2.3.3/src/regexp-macro-assembler.h +0 -228
  768. data/ext/v8/upstream/2.3.3/src/regexp.js +0 -549
  769. data/ext/v8/upstream/2.3.3/src/rewriter.cc +0 -1038
  770. data/ext/v8/upstream/2.3.3/src/rewriter.h +0 -54
  771. data/ext/v8/upstream/2.3.3/src/runtime.cc +0 -10599
  772. data/ext/v8/upstream/2.3.3/src/runtime.h +0 -459
  773. data/ext/v8/upstream/2.3.3/src/runtime.js +0 -629
  774. data/ext/v8/upstream/2.3.3/src/scanner.cc +0 -1346
  775. data/ext/v8/upstream/2.3.3/src/scanner.h +0 -503
  776. data/ext/v8/upstream/2.3.3/src/scopeinfo.cc +0 -637
  777. data/ext/v8/upstream/2.3.3/src/scopeinfo.h +0 -233
  778. data/ext/v8/upstream/2.3.3/src/scopes.cc +0 -962
  779. data/ext/v8/upstream/2.3.3/src/scopes.h +0 -400
  780. data/ext/v8/upstream/2.3.3/src/serialize.cc +0 -1461
  781. data/ext/v8/upstream/2.3.3/src/serialize.h +0 -581
  782. data/ext/v8/upstream/2.3.3/src/spaces-inl.h +0 -483
  783. data/ext/v8/upstream/2.3.3/src/spaces.cc +0 -2901
  784. data/ext/v8/upstream/2.3.3/src/spaces.h +0 -2197
  785. data/ext/v8/upstream/2.3.3/src/string-stream.cc +0 -584
  786. data/ext/v8/upstream/2.3.3/src/string-stream.h +0 -189
  787. data/ext/v8/upstream/2.3.3/src/string.js +0 -1006
  788. data/ext/v8/upstream/2.3.3/src/stub-cache.cc +0 -1379
  789. data/ext/v8/upstream/2.3.3/src/stub-cache.h +0 -756
  790. data/ext/v8/upstream/2.3.3/src/third_party/dtoa/COPYING +0 -15
  791. data/ext/v8/upstream/2.3.3/src/third_party/dtoa/dtoa.c +0 -3334
  792. data/ext/v8/upstream/2.3.3/src/token.cc +0 -56
  793. data/ext/v8/upstream/2.3.3/src/token.h +0 -270
  794. data/ext/v8/upstream/2.3.3/src/top.cc +0 -1067
  795. data/ext/v8/upstream/2.3.3/src/top.h +0 -463
  796. data/ext/v8/upstream/2.3.3/src/type-info.cc +0 -53
  797. data/ext/v8/upstream/2.3.3/src/type-info.h +0 -244
  798. data/ext/v8/upstream/2.3.3/src/unicode-inl.h +0 -238
  799. data/ext/v8/upstream/2.3.3/src/unicode.cc +0 -749
  800. data/ext/v8/upstream/2.3.3/src/unicode.h +0 -279
  801. data/ext/v8/upstream/2.3.3/src/uri.js +0 -415
  802. data/ext/v8/upstream/2.3.3/src/utils.cc +0 -285
  803. data/ext/v8/upstream/2.3.3/src/utils.h +0 -745
  804. data/ext/v8/upstream/2.3.3/src/v8-counters.h +0 -250
  805. data/ext/v8/upstream/2.3.3/src/v8.cc +0 -228
  806. data/ext/v8/upstream/2.3.3/src/v8.h +0 -121
  807. data/ext/v8/upstream/2.3.3/src/v8natives.js +0 -1188
  808. data/ext/v8/upstream/2.3.3/src/v8threads.cc +0 -461
  809. data/ext/v8/upstream/2.3.3/src/v8threads.h +0 -159
  810. data/ext/v8/upstream/2.3.3/src/variables.cc +0 -119
  811. data/ext/v8/upstream/2.3.3/src/variables.h +0 -205
  812. data/ext/v8/upstream/2.3.3/src/version.cc +0 -88
  813. data/ext/v8/upstream/2.3.3/src/virtual-frame-heavy-inl.h +0 -192
  814. data/ext/v8/upstream/2.3.3/src/virtual-frame.h +0 -46
  815. data/ext/v8/upstream/2.3.3/src/vm-state-inl.h +0 -137
  816. data/ext/v8/upstream/2.3.3/src/vm-state.cc +0 -39
  817. data/ext/v8/upstream/2.3.3/src/vm-state.h +0 -77
  818. data/ext/v8/upstream/2.3.3/src/x64/assembler-x64-inl.h +0 -400
  819. data/ext/v8/upstream/2.3.3/src/x64/assembler-x64.cc +0 -2963
  820. data/ext/v8/upstream/2.3.3/src/x64/assembler-x64.h +0 -1438
  821. data/ext/v8/upstream/2.3.3/src/x64/builtins-x64.cc +0 -1296
  822. data/ext/v8/upstream/2.3.3/src/x64/codegen-x64-inl.h +0 -46
  823. data/ext/v8/upstream/2.3.3/src/x64/codegen-x64.cc +0 -12491
  824. data/ext/v8/upstream/2.3.3/src/x64/codegen-x64.h +0 -1090
  825. data/ext/v8/upstream/2.3.3/src/x64/cpu-x64.cc +0 -83
  826. data/ext/v8/upstream/2.3.3/src/x64/debug-x64.cc +0 -267
  827. data/ext/v8/upstream/2.3.3/src/x64/disasm-x64.cc +0 -1696
  828. data/ext/v8/upstream/2.3.3/src/x64/fast-codegen-x64.cc +0 -250
  829. data/ext/v8/upstream/2.3.3/src/x64/frames-x64.cc +0 -113
  830. data/ext/v8/upstream/2.3.3/src/x64/frames-x64.h +0 -125
  831. data/ext/v8/upstream/2.3.3/src/x64/full-codegen-x64.cc +0 -3270
  832. data/ext/v8/upstream/2.3.3/src/x64/ic-x64.cc +0 -1907
  833. data/ext/v8/upstream/2.3.3/src/x64/jump-target-x64.cc +0 -437
  834. data/ext/v8/upstream/2.3.3/src/x64/macro-assembler-x64.cc +0 -2793
  835. data/ext/v8/upstream/2.3.3/src/x64/macro-assembler-x64.h +0 -916
  836. data/ext/v8/upstream/2.3.3/src/x64/regexp-macro-assembler-x64.cc +0 -1374
  837. data/ext/v8/upstream/2.3.3/src/x64/regexp-macro-assembler-x64.h +0 -277
  838. data/ext/v8/upstream/2.3.3/src/x64/simulator-x64.h +0 -63
  839. data/ext/v8/upstream/2.3.3/src/x64/stub-cache-x64.cc +0 -2560
  840. data/ext/v8/upstream/2.3.3/src/x64/virtual-frame-x64.cc +0 -1264
  841. data/ext/v8/upstream/2.3.3/src/x64/virtual-frame-x64.h +0 -590
  842. data/ext/v8/upstream/2.3.3/src/zone-inl.h +0 -82
  843. data/ext/v8/upstream/2.3.3/src/zone.cc +0 -194
  844. data/ext/v8/upstream/2.3.3/src/zone.h +0 -221
  845. data/ext/v8/upstream/2.3.3/tools/codemap.js +0 -270
  846. data/ext/v8/upstream/2.3.3/tools/csvparser.js +0 -83
  847. data/ext/v8/upstream/2.3.3/tools/gc-nvp-trace-processor.py +0 -317
  848. data/ext/v8/upstream/2.3.3/tools/gyp/v8.gyp +0 -749
  849. data/ext/v8/upstream/2.3.3/tools/linux-tick-processor.py +0 -78
  850. data/ext/v8/upstream/2.3.3/tools/logreader.js +0 -338
  851. data/ext/v8/upstream/2.3.3/tools/oprofile/annotate +0 -7
  852. data/ext/v8/upstream/2.3.3/tools/oprofile/common +0 -19
  853. data/ext/v8/upstream/2.3.3/tools/oprofile/dump +0 -7
  854. data/ext/v8/upstream/2.3.3/tools/oprofile/report +0 -7
  855. data/ext/v8/upstream/2.3.3/tools/oprofile/reset +0 -7
  856. data/ext/v8/upstream/2.3.3/tools/oprofile/run +0 -14
  857. data/ext/v8/upstream/2.3.3/tools/oprofile/shutdown +0 -7
  858. data/ext/v8/upstream/2.3.3/tools/oprofile/start +0 -7
  859. data/ext/v8/upstream/2.3.3/tools/presubmit.py +0 -299
  860. data/ext/v8/upstream/2.3.3/tools/profile.js +0 -691
  861. data/ext/v8/upstream/2.3.3/tools/profile_view.js +0 -224
  862. data/ext/v8/upstream/2.3.3/tools/splaytree.js +0 -322
  863. data/ext/v8/upstream/2.3.3/tools/splaytree.py +0 -226
  864. data/ext/v8/upstream/2.3.3/tools/tickprocessor.js +0 -862
  865. data/ext/v8/upstream/2.3.3/tools/tickprocessor.py +0 -571
  866. data/ext/v8/upstream/2.3.3/tools/utils.py +0 -88
  867. data/ext/v8/upstream/2.3.3/tools/visual_studio/README.txt +0 -71
  868. data/ext/v8/upstream/2.3.3/tools/visual_studio/common.vsprops +0 -34
  869. data/ext/v8/upstream/2.3.3/tools/visual_studio/debug.vsprops +0 -17
  870. data/ext/v8/upstream/2.3.3/tools/visual_studio/v8_base.vcproj +0 -1143
  871. data/ext/v8/upstream/2.3.3/tools/visual_studio/v8_base_arm.vcproj +0 -1115
  872. data/ext/v8/upstream/2.3.3/tools/visual_studio/v8_base_x64.vcproj +0 -1096
  873. data/ext/v8/upstream/2.3.3/tools/visual_studio/v8_shell_sample.vcproj +0 -145
  874. data/ext/v8/upstream/2.3.3/tools/visual_studio/v8_shell_sample_arm.vcproj +0 -145
  875. data/ext/v8/upstream/2.3.3/tools/visual_studio/v8_shell_sample_x64.vcproj +0 -161
  876. data/ext/v8/upstream/2.3.3/tools/windows-tick-processor.bat +0 -29
  877. data/ext/v8/upstream/2.3.3/tools/windows-tick-processor.py +0 -137
@@ -1,2901 +0,0 @@
1
- // Copyright 2006-2008 the V8 project authors. All rights reserved.
2
- // Redistribution and use in source and binary forms, with or without
3
- // modification, are permitted provided that the following conditions are
4
- // met:
5
- //
6
- // * Redistributions of source code must retain the above copyright
7
- // notice, this list of conditions and the following disclaimer.
8
- // * Redistributions in binary form must reproduce the above
9
- // copyright notice, this list of conditions and the following
10
- // disclaimer in the documentation and/or other materials provided
11
- // with the distribution.
12
- // * Neither the name of Google Inc. nor the names of its
13
- // contributors may be used to endorse or promote products derived
14
- // from this software without specific prior written permission.
15
- //
16
- // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
- // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
- // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
- // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
- // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
- // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
- // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
- // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
- // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
- // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
- // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
-
28
- #include "v8.h"
29
-
30
- #include "macro-assembler.h"
31
- #include "mark-compact.h"
32
- #include "platform.h"
33
-
34
- namespace v8 {
35
- namespace internal {
36
-
37
- // For contiguous spaces, top should be in the space (or at the end) and limit
38
- // should be the end of the space.
39
- #define ASSERT_SEMISPACE_ALLOCATION_INFO(info, space) \
40
- ASSERT((space).low() <= (info).top \
41
- && (info).top <= (space).high() \
42
- && (info).limit == (space).high())
43
-
44
- intptr_t Page::watermark_invalidated_mark_ = Page::WATERMARK_INVALIDATED;
45
-
46
- // ----------------------------------------------------------------------------
47
- // HeapObjectIterator
48
-
49
- HeapObjectIterator::HeapObjectIterator(PagedSpace* space) {
50
- Initialize(space->bottom(), space->top(), NULL);
51
- }
52
-
53
-
54
- HeapObjectIterator::HeapObjectIterator(PagedSpace* space,
55
- HeapObjectCallback size_func) {
56
- Initialize(space->bottom(), space->top(), size_func);
57
- }
58
-
59
-
60
- HeapObjectIterator::HeapObjectIterator(PagedSpace* space, Address start) {
61
- Initialize(start, space->top(), NULL);
62
- }
63
-
64
-
65
- HeapObjectIterator::HeapObjectIterator(PagedSpace* space, Address start,
66
- HeapObjectCallback size_func) {
67
- Initialize(start, space->top(), size_func);
68
- }
69
-
70
-
71
- void HeapObjectIterator::Initialize(Address cur, Address end,
72
- HeapObjectCallback size_f) {
73
- cur_addr_ = cur;
74
- end_addr_ = end;
75
- end_page_ = Page::FromAllocationTop(end);
76
- size_func_ = size_f;
77
- Page* p = Page::FromAllocationTop(cur_addr_);
78
- cur_limit_ = (p == end_page_) ? end_addr_ : p->AllocationTop();
79
-
80
- #ifdef DEBUG
81
- Verify();
82
- #endif
83
- }
84
-
85
-
86
- HeapObject* HeapObjectIterator::FromNextPage() {
87
- if (cur_addr_ == end_addr_) return NULL;
88
-
89
- Page* cur_page = Page::FromAllocationTop(cur_addr_);
90
- cur_page = cur_page->next_page();
91
- ASSERT(cur_page->is_valid());
92
-
93
- cur_addr_ = cur_page->ObjectAreaStart();
94
- cur_limit_ = (cur_page == end_page_) ? end_addr_ : cur_page->AllocationTop();
95
-
96
- if (cur_addr_ == end_addr_) return NULL;
97
- ASSERT(cur_addr_ < cur_limit_);
98
- #ifdef DEBUG
99
- Verify();
100
- #endif
101
- return FromCurrentPage();
102
- }
103
-
104
-
105
- #ifdef DEBUG
106
- void HeapObjectIterator::Verify() {
107
- Page* p = Page::FromAllocationTop(cur_addr_);
108
- ASSERT(p == Page::FromAllocationTop(cur_limit_));
109
- ASSERT(p->Offset(cur_addr_) <= p->Offset(cur_limit_));
110
- }
111
- #endif
112
-
113
-
114
- // -----------------------------------------------------------------------------
115
- // PageIterator
116
-
117
- PageIterator::PageIterator(PagedSpace* space, Mode mode) : space_(space) {
118
- prev_page_ = NULL;
119
- switch (mode) {
120
- case PAGES_IN_USE:
121
- stop_page_ = space->AllocationTopPage();
122
- break;
123
- case PAGES_USED_BY_MC:
124
- stop_page_ = space->MCRelocationTopPage();
125
- break;
126
- case ALL_PAGES:
127
- #ifdef DEBUG
128
- // Verify that the cached last page in the space is actually the
129
- // last page.
130
- for (Page* p = space->first_page_; p->is_valid(); p = p->next_page()) {
131
- if (!p->next_page()->is_valid()) {
132
- ASSERT(space->last_page_ == p);
133
- }
134
- }
135
- #endif
136
- stop_page_ = space->last_page_;
137
- break;
138
- }
139
- }
140
-
141
-
142
- // -----------------------------------------------------------------------------
143
- // CodeRange
144
-
145
- List<CodeRange::FreeBlock> CodeRange::free_list_(0);
146
- List<CodeRange::FreeBlock> CodeRange::allocation_list_(0);
147
- int CodeRange::current_allocation_block_index_ = 0;
148
- VirtualMemory* CodeRange::code_range_ = NULL;
149
-
150
-
151
- bool CodeRange::Setup(const size_t requested) {
152
- ASSERT(code_range_ == NULL);
153
-
154
- code_range_ = new VirtualMemory(requested);
155
- CHECK(code_range_ != NULL);
156
- if (!code_range_->IsReserved()) {
157
- delete code_range_;
158
- code_range_ = NULL;
159
- return false;
160
- }
161
-
162
- // We are sure that we have mapped a block of requested addresses.
163
- ASSERT(code_range_->size() == requested);
164
- LOG(NewEvent("CodeRange", code_range_->address(), requested));
165
- allocation_list_.Add(FreeBlock(code_range_->address(), code_range_->size()));
166
- current_allocation_block_index_ = 0;
167
- return true;
168
- }
169
-
170
-
171
- int CodeRange::CompareFreeBlockAddress(const FreeBlock* left,
172
- const FreeBlock* right) {
173
- // The entire point of CodeRange is that the difference between two
174
- // addresses in the range can be represented as a signed 32-bit int,
175
- // so the cast is semantically correct.
176
- return static_cast<int>(left->start - right->start);
177
- }
178
-
179
-
180
- void CodeRange::GetNextAllocationBlock(size_t requested) {
181
- for (current_allocation_block_index_++;
182
- current_allocation_block_index_ < allocation_list_.length();
183
- current_allocation_block_index_++) {
184
- if (requested <= allocation_list_[current_allocation_block_index_].size) {
185
- return; // Found a large enough allocation block.
186
- }
187
- }
188
-
189
- // Sort and merge the free blocks on the free list and the allocation list.
190
- free_list_.AddAll(allocation_list_);
191
- allocation_list_.Clear();
192
- free_list_.Sort(&CompareFreeBlockAddress);
193
- for (int i = 0; i < free_list_.length();) {
194
- FreeBlock merged = free_list_[i];
195
- i++;
196
- // Add adjacent free blocks to the current merged block.
197
- while (i < free_list_.length() &&
198
- free_list_[i].start == merged.start + merged.size) {
199
- merged.size += free_list_[i].size;
200
- i++;
201
- }
202
- if (merged.size > 0) {
203
- allocation_list_.Add(merged);
204
- }
205
- }
206
- free_list_.Clear();
207
-
208
- for (current_allocation_block_index_ = 0;
209
- current_allocation_block_index_ < allocation_list_.length();
210
- current_allocation_block_index_++) {
211
- if (requested <= allocation_list_[current_allocation_block_index_].size) {
212
- return; // Found a large enough allocation block.
213
- }
214
- }
215
-
216
- // Code range is full or too fragmented.
217
- V8::FatalProcessOutOfMemory("CodeRange::GetNextAllocationBlock");
218
- }
219
-
220
-
221
-
222
- void* CodeRange::AllocateRawMemory(const size_t requested, size_t* allocated) {
223
- ASSERT(current_allocation_block_index_ < allocation_list_.length());
224
- if (requested > allocation_list_[current_allocation_block_index_].size) {
225
- // Find an allocation block large enough. This function call may
226
- // call V8::FatalProcessOutOfMemory if it cannot find a large enough block.
227
- GetNextAllocationBlock(requested);
228
- }
229
- // Commit the requested memory at the start of the current allocation block.
230
- *allocated = RoundUp(requested, Page::kPageSize);
231
- FreeBlock current = allocation_list_[current_allocation_block_index_];
232
- if (*allocated >= current.size - Page::kPageSize) {
233
- // Don't leave a small free block, useless for a large object or chunk.
234
- *allocated = current.size;
235
- }
236
- ASSERT(*allocated <= current.size);
237
- if (!code_range_->Commit(current.start, *allocated, true)) {
238
- *allocated = 0;
239
- return NULL;
240
- }
241
- allocation_list_[current_allocation_block_index_].start += *allocated;
242
- allocation_list_[current_allocation_block_index_].size -= *allocated;
243
- if (*allocated == current.size) {
244
- GetNextAllocationBlock(0); // This block is used up, get the next one.
245
- }
246
- return current.start;
247
- }
248
-
249
-
250
- void CodeRange::FreeRawMemory(void* address, size_t length) {
251
- free_list_.Add(FreeBlock(address, length));
252
- code_range_->Uncommit(address, length);
253
- }
254
-
255
-
256
- void CodeRange::TearDown() {
257
- delete code_range_; // Frees all memory in the virtual memory range.
258
- code_range_ = NULL;
259
- free_list_.Free();
260
- allocation_list_.Free();
261
- }
262
-
263
-
264
- // -----------------------------------------------------------------------------
265
- // MemoryAllocator
266
- //
267
- int MemoryAllocator::capacity_ = 0;
268
- int MemoryAllocator::size_ = 0;
269
-
270
- VirtualMemory* MemoryAllocator::initial_chunk_ = NULL;
271
-
272
- // 270 is an estimate based on the static default heap size of a pair of 256K
273
- // semispaces and a 64M old generation.
274
- const int kEstimatedNumberOfChunks = 270;
275
- List<MemoryAllocator::ChunkInfo> MemoryAllocator::chunks_(
276
- kEstimatedNumberOfChunks);
277
- List<int> MemoryAllocator::free_chunk_ids_(kEstimatedNumberOfChunks);
278
- int MemoryAllocator::max_nof_chunks_ = 0;
279
- int MemoryAllocator::top_ = 0;
280
-
281
-
282
- void MemoryAllocator::Push(int free_chunk_id) {
283
- ASSERT(max_nof_chunks_ > 0);
284
- ASSERT(top_ < max_nof_chunks_);
285
- free_chunk_ids_[top_++] = free_chunk_id;
286
- }
287
-
288
-
289
- int MemoryAllocator::Pop() {
290
- ASSERT(top_ > 0);
291
- return free_chunk_ids_[--top_];
292
- }
293
-
294
-
295
- bool MemoryAllocator::Setup(int capacity) {
296
- capacity_ = RoundUp(capacity, Page::kPageSize);
297
-
298
- // Over-estimate the size of chunks_ array. It assumes the expansion of old
299
- // space is always in the unit of a chunk (kChunkSize) except the last
300
- // expansion.
301
- //
302
- // Due to alignment, allocated space might be one page less than required
303
- // number (kPagesPerChunk) of pages for old spaces.
304
- //
305
- // Reserve two chunk ids for semispaces, one for map space, one for old
306
- // space, and one for code space.
307
- max_nof_chunks_ = (capacity_ / (kChunkSize - Page::kPageSize)) + 5;
308
- if (max_nof_chunks_ > kMaxNofChunks) return false;
309
-
310
- size_ = 0;
311
- ChunkInfo info; // uninitialized element.
312
- for (int i = max_nof_chunks_ - 1; i >= 0; i--) {
313
- chunks_.Add(info);
314
- free_chunk_ids_.Add(i);
315
- }
316
- top_ = max_nof_chunks_;
317
- return true;
318
- }
319
-
320
-
321
- void MemoryAllocator::TearDown() {
322
- for (int i = 0; i < max_nof_chunks_; i++) {
323
- if (chunks_[i].address() != NULL) DeleteChunk(i);
324
- }
325
- chunks_.Clear();
326
- free_chunk_ids_.Clear();
327
-
328
- if (initial_chunk_ != NULL) {
329
- LOG(DeleteEvent("InitialChunk", initial_chunk_->address()));
330
- delete initial_chunk_;
331
- initial_chunk_ = NULL;
332
- }
333
-
334
- ASSERT(top_ == max_nof_chunks_); // all chunks are free
335
- top_ = 0;
336
- capacity_ = 0;
337
- size_ = 0;
338
- max_nof_chunks_ = 0;
339
- }
340
-
341
-
342
- void* MemoryAllocator::AllocateRawMemory(const size_t requested,
343
- size_t* allocated,
344
- Executability executable) {
345
- if (size_ + static_cast<size_t>(requested) > static_cast<size_t>(capacity_)) {
346
- return NULL;
347
- }
348
- void* mem;
349
- if (executable == EXECUTABLE && CodeRange::exists()) {
350
- mem = CodeRange::AllocateRawMemory(requested, allocated);
351
- } else {
352
- mem = OS::Allocate(requested, allocated, (executable == EXECUTABLE));
353
- }
354
- int alloced = static_cast<int>(*allocated);
355
- size_ += alloced;
356
- #ifdef DEBUG
357
- ZapBlock(reinterpret_cast<Address>(mem), alloced);
358
- #endif
359
- Counters::memory_allocated.Increment(alloced);
360
- return mem;
361
- }
362
-
363
-
364
- void MemoryAllocator::FreeRawMemory(void* mem, size_t length) {
365
- #ifdef DEBUG
366
- ZapBlock(reinterpret_cast<Address>(mem), length);
367
- #endif
368
- if (CodeRange::contains(static_cast<Address>(mem))) {
369
- CodeRange::FreeRawMemory(mem, length);
370
- } else {
371
- OS::Free(mem, length);
372
- }
373
- Counters::memory_allocated.Decrement(static_cast<int>(length));
374
- size_ -= static_cast<int>(length);
375
- ASSERT(size_ >= 0);
376
- }
377
-
378
-
379
- void* MemoryAllocator::ReserveInitialChunk(const size_t requested) {
380
- ASSERT(initial_chunk_ == NULL);
381
-
382
- initial_chunk_ = new VirtualMemory(requested);
383
- CHECK(initial_chunk_ != NULL);
384
- if (!initial_chunk_->IsReserved()) {
385
- delete initial_chunk_;
386
- initial_chunk_ = NULL;
387
- return NULL;
388
- }
389
-
390
- // We are sure that we have mapped a block of requested addresses.
391
- ASSERT(initial_chunk_->size() == requested);
392
- LOG(NewEvent("InitialChunk", initial_chunk_->address(), requested));
393
- size_ += static_cast<int>(requested);
394
- return initial_chunk_->address();
395
- }
396
-
397
-
398
- static int PagesInChunk(Address start, size_t size) {
399
- // The first page starts on the first page-aligned address from start onward
400
- // and the last page ends on the last page-aligned address before
401
- // start+size. Page::kPageSize is a power of two so we can divide by
402
- // shifting.
403
- return static_cast<int>((RoundDown(start + size, Page::kPageSize)
404
- - RoundUp(start, Page::kPageSize)) >> kPageSizeBits);
405
- }
406
-
407
-
408
- Page* MemoryAllocator::AllocatePages(int requested_pages, int* allocated_pages,
409
- PagedSpace* owner) {
410
- if (requested_pages <= 0) return Page::FromAddress(NULL);
411
- size_t chunk_size = requested_pages * Page::kPageSize;
412
-
413
- // There is not enough space to guarantee the desired number pages can be
414
- // allocated.
415
- if (size_ + static_cast<int>(chunk_size) > capacity_) {
416
- // Request as many pages as we can.
417
- chunk_size = capacity_ - size_;
418
- requested_pages = static_cast<int>(chunk_size >> kPageSizeBits);
419
-
420
- if (requested_pages <= 0) return Page::FromAddress(NULL);
421
- }
422
- void* chunk = AllocateRawMemory(chunk_size, &chunk_size, owner->executable());
423
- if (chunk == NULL) return Page::FromAddress(NULL);
424
- LOG(NewEvent("PagedChunk", chunk, chunk_size));
425
-
426
- *allocated_pages = PagesInChunk(static_cast<Address>(chunk), chunk_size);
427
- if (*allocated_pages == 0) {
428
- FreeRawMemory(chunk, chunk_size);
429
- LOG(DeleteEvent("PagedChunk", chunk));
430
- return Page::FromAddress(NULL);
431
- }
432
-
433
- int chunk_id = Pop();
434
- chunks_[chunk_id].init(static_cast<Address>(chunk), chunk_size, owner);
435
-
436
- return InitializePagesInChunk(chunk_id, *allocated_pages, owner);
437
- }
438
-
439
-
440
- Page* MemoryAllocator::CommitPages(Address start, size_t size,
441
- PagedSpace* owner, int* num_pages) {
442
- ASSERT(start != NULL);
443
- *num_pages = PagesInChunk(start, size);
444
- ASSERT(*num_pages > 0);
445
- ASSERT(initial_chunk_ != NULL);
446
- ASSERT(InInitialChunk(start));
447
- ASSERT(InInitialChunk(start + size - 1));
448
- if (!initial_chunk_->Commit(start, size, owner->executable() == EXECUTABLE)) {
449
- return Page::FromAddress(NULL);
450
- }
451
- #ifdef DEBUG
452
- ZapBlock(start, size);
453
- #endif
454
- Counters::memory_allocated.Increment(static_cast<int>(size));
455
-
456
- // So long as we correctly overestimated the number of chunks we should not
457
- // run out of chunk ids.
458
- CHECK(!OutOfChunkIds());
459
- int chunk_id = Pop();
460
- chunks_[chunk_id].init(start, size, owner);
461
- return InitializePagesInChunk(chunk_id, *num_pages, owner);
462
- }
463
-
464
-
465
- bool MemoryAllocator::CommitBlock(Address start,
466
- size_t size,
467
- Executability executable) {
468
- ASSERT(start != NULL);
469
- ASSERT(size > 0);
470
- ASSERT(initial_chunk_ != NULL);
471
- ASSERT(InInitialChunk(start));
472
- ASSERT(InInitialChunk(start + size - 1));
473
-
474
- if (!initial_chunk_->Commit(start, size, executable)) return false;
475
- #ifdef DEBUG
476
- ZapBlock(start, size);
477
- #endif
478
- Counters::memory_allocated.Increment(static_cast<int>(size));
479
- return true;
480
- }
481
-
482
-
483
- bool MemoryAllocator::UncommitBlock(Address start, size_t size) {
484
- ASSERT(start != NULL);
485
- ASSERT(size > 0);
486
- ASSERT(initial_chunk_ != NULL);
487
- ASSERT(InInitialChunk(start));
488
- ASSERT(InInitialChunk(start + size - 1));
489
-
490
- if (!initial_chunk_->Uncommit(start, size)) return false;
491
- Counters::memory_allocated.Decrement(static_cast<int>(size));
492
- return true;
493
- }
494
-
495
-
496
- void MemoryAllocator::ZapBlock(Address start, size_t size) {
497
- for (size_t s = 0; s + kPointerSize <= size; s += kPointerSize) {
498
- Memory::Address_at(start + s) = kZapValue;
499
- }
500
- }
501
-
502
-
503
- Page* MemoryAllocator::InitializePagesInChunk(int chunk_id, int pages_in_chunk,
504
- PagedSpace* owner) {
505
- ASSERT(IsValidChunk(chunk_id));
506
- ASSERT(pages_in_chunk > 0);
507
-
508
- Address chunk_start = chunks_[chunk_id].address();
509
-
510
- Address low = RoundUp(chunk_start, Page::kPageSize);
511
-
512
- #ifdef DEBUG
513
- size_t chunk_size = chunks_[chunk_id].size();
514
- Address high = RoundDown(chunk_start + chunk_size, Page::kPageSize);
515
- ASSERT(pages_in_chunk <=
516
- ((OffsetFrom(high) - OffsetFrom(low)) / Page::kPageSize));
517
- #endif
518
-
519
- Address page_addr = low;
520
- for (int i = 0; i < pages_in_chunk; i++) {
521
- Page* p = Page::FromAddress(page_addr);
522
- p->opaque_header = OffsetFrom(page_addr + Page::kPageSize) | chunk_id;
523
- p->InvalidateWatermark(true);
524
- p->SetIsLargeObjectPage(false);
525
- p->SetAllocationWatermark(p->ObjectAreaStart());
526
- p->SetCachedAllocationWatermark(p->ObjectAreaStart());
527
- page_addr += Page::kPageSize;
528
- }
529
-
530
- // Set the next page of the last page to 0.
531
- Page* last_page = Page::FromAddress(page_addr - Page::kPageSize);
532
- last_page->opaque_header = OffsetFrom(0) | chunk_id;
533
-
534
- return Page::FromAddress(low);
535
- }
536
-
537
-
538
- Page* MemoryAllocator::FreePages(Page* p) {
539
- if (!p->is_valid()) return p;
540
-
541
- // Find the first page in the same chunk as 'p'
542
- Page* first_page = FindFirstPageInSameChunk(p);
543
- Page* page_to_return = Page::FromAddress(NULL);
544
-
545
- if (p != first_page) {
546
- // Find the last page in the same chunk as 'prev'.
547
- Page* last_page = FindLastPageInSameChunk(p);
548
- first_page = GetNextPage(last_page); // first page in next chunk
549
-
550
- // set the next_page of last_page to NULL
551
- SetNextPage(last_page, Page::FromAddress(NULL));
552
- page_to_return = p; // return 'p' when exiting
553
- }
554
-
555
- while (first_page->is_valid()) {
556
- int chunk_id = GetChunkId(first_page);
557
- ASSERT(IsValidChunk(chunk_id));
558
-
559
- // Find the first page of the next chunk before deleting this chunk.
560
- first_page = GetNextPage(FindLastPageInSameChunk(first_page));
561
-
562
- // Free the current chunk.
563
- DeleteChunk(chunk_id);
564
- }
565
-
566
- return page_to_return;
567
- }
568
-
569
-
570
- void MemoryAllocator::FreeAllPages(PagedSpace* space) {
571
- for (int i = 0, length = chunks_.length(); i < length; i++) {
572
- if (chunks_[i].owner() == space) {
573
- DeleteChunk(i);
574
- }
575
- }
576
- }
577
-
578
-
579
- void MemoryAllocator::DeleteChunk(int chunk_id) {
580
- ASSERT(IsValidChunk(chunk_id));
581
-
582
- ChunkInfo& c = chunks_[chunk_id];
583
-
584
- // We cannot free a chunk contained in the initial chunk because it was not
585
- // allocated with AllocateRawMemory. Instead we uncommit the virtual
586
- // memory.
587
- if (InInitialChunk(c.address())) {
588
- // TODO(1240712): VirtualMemory::Uncommit has a return value which
589
- // is ignored here.
590
- initial_chunk_->Uncommit(c.address(), c.size());
591
- Counters::memory_allocated.Decrement(static_cast<int>(c.size()));
592
- } else {
593
- LOG(DeleteEvent("PagedChunk", c.address()));
594
- FreeRawMemory(c.address(), c.size());
595
- }
596
- c.init(NULL, 0, NULL);
597
- Push(chunk_id);
598
- }
599
-
600
-
601
- Page* MemoryAllocator::FindFirstPageInSameChunk(Page* p) {
602
- int chunk_id = GetChunkId(p);
603
- ASSERT(IsValidChunk(chunk_id));
604
-
605
- Address low = RoundUp(chunks_[chunk_id].address(), Page::kPageSize);
606
- return Page::FromAddress(low);
607
- }
608
-
609
-
610
- Page* MemoryAllocator::FindLastPageInSameChunk(Page* p) {
611
- int chunk_id = GetChunkId(p);
612
- ASSERT(IsValidChunk(chunk_id));
613
-
614
- Address chunk_start = chunks_[chunk_id].address();
615
- size_t chunk_size = chunks_[chunk_id].size();
616
-
617
- Address high = RoundDown(chunk_start + chunk_size, Page::kPageSize);
618
- ASSERT(chunk_start <= p->address() && p->address() < high);
619
-
620
- return Page::FromAddress(high - Page::kPageSize);
621
- }
622
-
623
-
624
- #ifdef DEBUG
625
- void MemoryAllocator::ReportStatistics() {
626
- float pct = static_cast<float>(capacity_ - size_) / capacity_;
627
- PrintF(" capacity: %d, used: %d, available: %%%d\n\n",
628
- capacity_, size_, static_cast<int>(pct*100));
629
- }
630
- #endif
631
-
632
-
633
- void MemoryAllocator::RelinkPageListInChunkOrder(PagedSpace* space,
634
- Page** first_page,
635
- Page** last_page,
636
- Page** last_page_in_use) {
637
- Page* first = NULL;
638
- Page* last = NULL;
639
-
640
- for (int i = 0, length = chunks_.length(); i < length; i++) {
641
- ChunkInfo& chunk = chunks_[i];
642
-
643
- if (chunk.owner() == space) {
644
- if (first == NULL) {
645
- Address low = RoundUp(chunk.address(), Page::kPageSize);
646
- first = Page::FromAddress(low);
647
- }
648
- last = RelinkPagesInChunk(i,
649
- chunk.address(),
650
- chunk.size(),
651
- last,
652
- last_page_in_use);
653
- }
654
- }
655
-
656
- if (first_page != NULL) {
657
- *first_page = first;
658
- }
659
-
660
- if (last_page != NULL) {
661
- *last_page = last;
662
- }
663
- }
664
-
665
-
666
- Page* MemoryAllocator::RelinkPagesInChunk(int chunk_id,
667
- Address chunk_start,
668
- size_t chunk_size,
669
- Page* prev,
670
- Page** last_page_in_use) {
671
- Address page_addr = RoundUp(chunk_start, Page::kPageSize);
672
- int pages_in_chunk = PagesInChunk(chunk_start, chunk_size);
673
-
674
- if (prev->is_valid()) {
675
- SetNextPage(prev, Page::FromAddress(page_addr));
676
- }
677
-
678
- for (int i = 0; i < pages_in_chunk; i++) {
679
- Page* p = Page::FromAddress(page_addr);
680
- p->opaque_header = OffsetFrom(page_addr + Page::kPageSize) | chunk_id;
681
- page_addr += Page::kPageSize;
682
-
683
- p->InvalidateWatermark(true);
684
- if (p->WasInUseBeforeMC()) {
685
- *last_page_in_use = p;
686
- }
687
- }
688
-
689
- // Set the next page of the last page to 0.
690
- Page* last_page = Page::FromAddress(page_addr - Page::kPageSize);
691
- last_page->opaque_header = OffsetFrom(0) | chunk_id;
692
-
693
- if (last_page->WasInUseBeforeMC()) {
694
- *last_page_in_use = last_page;
695
- }
696
-
697
- return last_page;
698
- }
699
-
700
-
701
-
702
- // -----------------------------------------------------------------------------
703
- // PagedSpace implementation
704
-
705
- PagedSpace::PagedSpace(int max_capacity,
706
- AllocationSpace id,
707
- Executability executable)
708
- : Space(id, executable) {
709
- max_capacity_ = (RoundDown(max_capacity, Page::kPageSize) / Page::kPageSize)
710
- * Page::kObjectAreaSize;
711
- accounting_stats_.Clear();
712
-
713
- allocation_info_.top = NULL;
714
- allocation_info_.limit = NULL;
715
-
716
- mc_forwarding_info_.top = NULL;
717
- mc_forwarding_info_.limit = NULL;
718
- }
719
-
720
-
721
- bool PagedSpace::Setup(Address start, size_t size) {
722
- if (HasBeenSetup()) return false;
723
-
724
- int num_pages = 0;
725
- // Try to use the virtual memory range passed to us. If it is too small to
726
- // contain at least one page, ignore it and allocate instead.
727
- int pages_in_chunk = PagesInChunk(start, size);
728
- if (pages_in_chunk > 0) {
729
- first_page_ = MemoryAllocator::CommitPages(RoundUp(start, Page::kPageSize),
730
- Page::kPageSize * pages_in_chunk,
731
- this, &num_pages);
732
- } else {
733
- int requested_pages = Min(MemoryAllocator::kPagesPerChunk,
734
- max_capacity_ / Page::kObjectAreaSize);
735
- first_page_ =
736
- MemoryAllocator::AllocatePages(requested_pages, &num_pages, this);
737
- if (!first_page_->is_valid()) return false;
738
- }
739
-
740
- // We are sure that the first page is valid and that we have at least one
741
- // page.
742
- ASSERT(first_page_->is_valid());
743
- ASSERT(num_pages > 0);
744
- accounting_stats_.ExpandSpace(num_pages * Page::kObjectAreaSize);
745
- ASSERT(Capacity() <= max_capacity_);
746
-
747
- // Sequentially clear region marks in the newly allocated
748
- // pages and cache the current last page in the space.
749
- for (Page* p = first_page_; p->is_valid(); p = p->next_page()) {
750
- p->SetRegionMarks(Page::kAllRegionsCleanMarks);
751
- last_page_ = p;
752
- }
753
-
754
- // Use first_page_ for allocation.
755
- SetAllocationInfo(&allocation_info_, first_page_);
756
-
757
- page_list_is_chunk_ordered_ = true;
758
-
759
- return true;
760
- }
761
-
762
-
763
- bool PagedSpace::HasBeenSetup() {
764
- return (Capacity() > 0);
765
- }
766
-
767
-
768
- void PagedSpace::TearDown() {
769
- MemoryAllocator::FreeAllPages(this);
770
- first_page_ = NULL;
771
- accounting_stats_.Clear();
772
- }
773
-
774
-
775
- #ifdef ENABLE_HEAP_PROTECTION
776
-
777
- void PagedSpace::Protect() {
778
- Page* page = first_page_;
779
- while (page->is_valid()) {
780
- MemoryAllocator::ProtectChunkFromPage(page);
781
- page = MemoryAllocator::FindLastPageInSameChunk(page)->next_page();
782
- }
783
- }
784
-
785
-
786
- void PagedSpace::Unprotect() {
787
- Page* page = first_page_;
788
- while (page->is_valid()) {
789
- MemoryAllocator::UnprotectChunkFromPage(page);
790
- page = MemoryAllocator::FindLastPageInSameChunk(page)->next_page();
791
- }
792
- }
793
-
794
- #endif
795
-
796
-
797
- void PagedSpace::MarkAllPagesClean() {
798
- PageIterator it(this, PageIterator::ALL_PAGES);
799
- while (it.has_next()) {
800
- it.next()->SetRegionMarks(Page::kAllRegionsCleanMarks);
801
- }
802
- }
803
-
804
-
805
- Object* PagedSpace::FindObject(Address addr) {
806
- // Note: this function can only be called before or after mark-compact GC
807
- // because it accesses map pointers.
808
- ASSERT(!MarkCompactCollector::in_use());
809
-
810
- if (!Contains(addr)) return Failure::Exception();
811
-
812
- Page* p = Page::FromAddress(addr);
813
- ASSERT(IsUsed(p));
814
- Address cur = p->ObjectAreaStart();
815
- Address end = p->AllocationTop();
816
- while (cur < end) {
817
- HeapObject* obj = HeapObject::FromAddress(cur);
818
- Address next = cur + obj->Size();
819
- if ((cur <= addr) && (addr < next)) return obj;
820
- cur = next;
821
- }
822
-
823
- UNREACHABLE();
824
- return Failure::Exception();
825
- }
826
-
827
-
828
- bool PagedSpace::IsUsed(Page* page) {
829
- PageIterator it(this, PageIterator::PAGES_IN_USE);
830
- while (it.has_next()) {
831
- if (page == it.next()) return true;
832
- }
833
- return false;
834
- }
835
-
836
-
837
- void PagedSpace::SetAllocationInfo(AllocationInfo* alloc_info, Page* p) {
838
- alloc_info->top = p->ObjectAreaStart();
839
- alloc_info->limit = p->ObjectAreaEnd();
840
- ASSERT(alloc_info->VerifyPagedAllocation());
841
- }
842
-
843
-
844
- void PagedSpace::MCResetRelocationInfo() {
845
- // Set page indexes.
846
- int i = 0;
847
- PageIterator it(this, PageIterator::ALL_PAGES);
848
- while (it.has_next()) {
849
- Page* p = it.next();
850
- p->mc_page_index = i++;
851
- }
852
-
853
- // Set mc_forwarding_info_ to the first page in the space.
854
- SetAllocationInfo(&mc_forwarding_info_, first_page_);
855
- // All the bytes in the space are 'available'. We will rediscover
856
- // allocated and wasted bytes during GC.
857
- accounting_stats_.Reset();
858
- }
859
-
860
-
861
- int PagedSpace::MCSpaceOffsetForAddress(Address addr) {
862
- #ifdef DEBUG
863
- // The Contains function considers the address at the beginning of a
864
- // page in the page, MCSpaceOffsetForAddress considers it is in the
865
- // previous page.
866
- if (Page::IsAlignedToPageSize(addr)) {
867
- ASSERT(Contains(addr - kPointerSize));
868
- } else {
869
- ASSERT(Contains(addr));
870
- }
871
- #endif
872
-
873
- // If addr is at the end of a page, it belongs to previous page
874
- Page* p = Page::IsAlignedToPageSize(addr)
875
- ? Page::FromAllocationTop(addr)
876
- : Page::FromAddress(addr);
877
- int index = p->mc_page_index;
878
- return (index * Page::kPageSize) + p->Offset(addr);
879
- }
880
-
881
-
882
- // Slow case for reallocating and promoting objects during a compacting
883
- // collection. This function is not space-specific.
884
- HeapObject* PagedSpace::SlowMCAllocateRaw(int size_in_bytes) {
885
- Page* current_page = TopPageOf(mc_forwarding_info_);
886
- if (!current_page->next_page()->is_valid()) {
887
- if (!Expand(current_page)) {
888
- return NULL;
889
- }
890
- }
891
-
892
- // There are surely more pages in the space now.
893
- ASSERT(current_page->next_page()->is_valid());
894
- // We do not add the top of page block for current page to the space's
895
- // free list---the block may contain live objects so we cannot write
896
- // bookkeeping information to it. Instead, we will recover top of page
897
- // blocks when we move objects to their new locations.
898
- //
899
- // We do however write the allocation pointer to the page. The encoding
900
- // of forwarding addresses is as an offset in terms of live bytes, so we
901
- // need quick access to the allocation top of each page to decode
902
- // forwarding addresses.
903
- current_page->SetAllocationWatermark(mc_forwarding_info_.top);
904
- current_page->next_page()->InvalidateWatermark(true);
905
- SetAllocationInfo(&mc_forwarding_info_, current_page->next_page());
906
- return AllocateLinearly(&mc_forwarding_info_, size_in_bytes);
907
- }
908
-
909
-
910
- bool PagedSpace::Expand(Page* last_page) {
911
- ASSERT(max_capacity_ % Page::kObjectAreaSize == 0);
912
- ASSERT(Capacity() % Page::kObjectAreaSize == 0);
913
-
914
- if (Capacity() == max_capacity_) return false;
915
-
916
- ASSERT(Capacity() < max_capacity_);
917
- // Last page must be valid and its next page is invalid.
918
- ASSERT(last_page->is_valid() && !last_page->next_page()->is_valid());
919
-
920
- int available_pages = (max_capacity_ - Capacity()) / Page::kObjectAreaSize;
921
- if (available_pages <= 0) return false;
922
-
923
- int desired_pages = Min(available_pages, MemoryAllocator::kPagesPerChunk);
924
- Page* p = MemoryAllocator::AllocatePages(desired_pages, &desired_pages, this);
925
- if (!p->is_valid()) return false;
926
-
927
- accounting_stats_.ExpandSpace(desired_pages * Page::kObjectAreaSize);
928
- ASSERT(Capacity() <= max_capacity_);
929
-
930
- MemoryAllocator::SetNextPage(last_page, p);
931
-
932
- // Sequentially clear region marks of new pages and and cache the
933
- // new last page in the space.
934
- while (p->is_valid()) {
935
- p->SetRegionMarks(Page::kAllRegionsCleanMarks);
936
- last_page_ = p;
937
- p = p->next_page();
938
- }
939
-
940
- return true;
941
- }
942
-
943
-
944
- #ifdef DEBUG
945
- int PagedSpace::CountTotalPages() {
946
- int count = 0;
947
- for (Page* p = first_page_; p->is_valid(); p = p->next_page()) {
948
- count++;
949
- }
950
- return count;
951
- }
952
- #endif
953
-
954
-
955
- void PagedSpace::Shrink() {
956
- if (!page_list_is_chunk_ordered_) {
957
- // We can't shrink space if pages is not chunk-ordered
958
- // (see comment for class MemoryAllocator for definition).
959
- return;
960
- }
961
-
962
- // Release half of free pages.
963
- Page* top_page = AllocationTopPage();
964
- ASSERT(top_page->is_valid());
965
-
966
- // Count the number of pages we would like to free.
967
- int pages_to_free = 0;
968
- for (Page* p = top_page->next_page(); p->is_valid(); p = p->next_page()) {
969
- pages_to_free++;
970
- }
971
-
972
- // Free pages after top_page.
973
- Page* p = MemoryAllocator::FreePages(top_page->next_page());
974
- MemoryAllocator::SetNextPage(top_page, p);
975
-
976
- // Find out how many pages we failed to free and update last_page_.
977
- // Please note pages can only be freed in whole chunks.
978
- last_page_ = top_page;
979
- for (Page* p = top_page->next_page(); p->is_valid(); p = p->next_page()) {
980
- pages_to_free--;
981
- last_page_ = p;
982
- }
983
-
984
- accounting_stats_.ShrinkSpace(pages_to_free * Page::kObjectAreaSize);
985
- ASSERT(Capacity() == CountTotalPages() * Page::kObjectAreaSize);
986
- }
987
-
988
-
989
- bool PagedSpace::EnsureCapacity(int capacity) {
990
- if (Capacity() >= capacity) return true;
991
-
992
- // Start from the allocation top and loop to the last page in the space.
993
- Page* last_page = AllocationTopPage();
994
- Page* next_page = last_page->next_page();
995
- while (next_page->is_valid()) {
996
- last_page = MemoryAllocator::FindLastPageInSameChunk(next_page);
997
- next_page = last_page->next_page();
998
- }
999
-
1000
- // Expand the space until it has the required capacity or expansion fails.
1001
- do {
1002
- if (!Expand(last_page)) return false;
1003
- ASSERT(last_page->next_page()->is_valid());
1004
- last_page =
1005
- MemoryAllocator::FindLastPageInSameChunk(last_page->next_page());
1006
- } while (Capacity() < capacity);
1007
-
1008
- return true;
1009
- }
1010
-
1011
-
1012
- #ifdef DEBUG
1013
- void PagedSpace::Print() { }
1014
- #endif
1015
-
1016
-
1017
- #ifdef DEBUG
1018
- // We do not assume that the PageIterator works, because it depends on the
1019
- // invariants we are checking during verification.
1020
- void PagedSpace::Verify(ObjectVisitor* visitor) {
1021
- // The allocation pointer should be valid, and it should be in a page in the
1022
- // space.
1023
- ASSERT(allocation_info_.VerifyPagedAllocation());
1024
- Page* top_page = Page::FromAllocationTop(allocation_info_.top);
1025
- ASSERT(MemoryAllocator::IsPageInSpace(top_page, this));
1026
-
1027
- // Loop over all the pages.
1028
- bool above_allocation_top = false;
1029
- Page* current_page = first_page_;
1030
- while (current_page->is_valid()) {
1031
- if (above_allocation_top) {
1032
- // We don't care what's above the allocation top.
1033
- } else {
1034
- Address top = current_page->AllocationTop();
1035
- if (current_page == top_page) {
1036
- ASSERT(top == allocation_info_.top);
1037
- // The next page will be above the allocation top.
1038
- above_allocation_top = true;
1039
- }
1040
-
1041
- // It should be packed with objects from the bottom to the top.
1042
- Address current = current_page->ObjectAreaStart();
1043
- while (current < top) {
1044
- HeapObject* object = HeapObject::FromAddress(current);
1045
-
1046
- // The first word should be a map, and we expect all map pointers to
1047
- // be in map space.
1048
- Map* map = object->map();
1049
- ASSERT(map->IsMap());
1050
- ASSERT(Heap::map_space()->Contains(map));
1051
-
1052
- // Perform space-specific object verification.
1053
- VerifyObject(object);
1054
-
1055
- // The object itself should look OK.
1056
- object->Verify();
1057
-
1058
- // All the interior pointers should be contained in the heap and
1059
- // have page regions covering intergenerational references should be
1060
- // marked dirty.
1061
- int size = object->Size();
1062
- object->IterateBody(map->instance_type(), size, visitor);
1063
-
1064
- current += size;
1065
- }
1066
-
1067
- // The allocation pointer should not be in the middle of an object.
1068
- ASSERT(current == top);
1069
- }
1070
-
1071
- current_page = current_page->next_page();
1072
- }
1073
- }
1074
- #endif
1075
-
1076
-
1077
- // -----------------------------------------------------------------------------
1078
- // NewSpace implementation
1079
-
1080
-
1081
- bool NewSpace::Setup(Address start, int size) {
1082
- // Setup new space based on the preallocated memory block defined by
1083
- // start and size. The provided space is divided into two semi-spaces.
1084
- // To support fast containment testing in the new space, the size of
1085
- // this chunk must be a power of two and it must be aligned to its size.
1086
- int initial_semispace_capacity = Heap::InitialSemiSpaceSize();
1087
- int maximum_semispace_capacity = Heap::MaxSemiSpaceSize();
1088
-
1089
- ASSERT(initial_semispace_capacity <= maximum_semispace_capacity);
1090
- ASSERT(IsPowerOf2(maximum_semispace_capacity));
1091
-
1092
- // Allocate and setup the histogram arrays if necessary.
1093
- #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
1094
- allocated_histogram_ = NewArray<HistogramInfo>(LAST_TYPE + 1);
1095
- promoted_histogram_ = NewArray<HistogramInfo>(LAST_TYPE + 1);
1096
-
1097
- #define SET_NAME(name) allocated_histogram_[name].set_name(#name); \
1098
- promoted_histogram_[name].set_name(#name);
1099
- INSTANCE_TYPE_LIST(SET_NAME)
1100
- #undef SET_NAME
1101
- #endif
1102
-
1103
- ASSERT(size == 2 * Heap::ReservedSemiSpaceSize());
1104
- ASSERT(IsAddressAligned(start, size, 0));
1105
-
1106
- if (!to_space_.Setup(start,
1107
- initial_semispace_capacity,
1108
- maximum_semispace_capacity)) {
1109
- return false;
1110
- }
1111
- if (!from_space_.Setup(start + maximum_semispace_capacity,
1112
- initial_semispace_capacity,
1113
- maximum_semispace_capacity)) {
1114
- return false;
1115
- }
1116
-
1117
- start_ = start;
1118
- address_mask_ = ~(size - 1);
1119
- object_mask_ = address_mask_ | kHeapObjectTagMask;
1120
- object_expected_ = reinterpret_cast<uintptr_t>(start) | kHeapObjectTag;
1121
-
1122
- allocation_info_.top = to_space_.low();
1123
- allocation_info_.limit = to_space_.high();
1124
- mc_forwarding_info_.top = NULL;
1125
- mc_forwarding_info_.limit = NULL;
1126
-
1127
- ASSERT_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_);
1128
- return true;
1129
- }
1130
-
1131
-
1132
- void NewSpace::TearDown() {
1133
- #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
1134
- if (allocated_histogram_) {
1135
- DeleteArray(allocated_histogram_);
1136
- allocated_histogram_ = NULL;
1137
- }
1138
- if (promoted_histogram_) {
1139
- DeleteArray(promoted_histogram_);
1140
- promoted_histogram_ = NULL;
1141
- }
1142
- #endif
1143
-
1144
- start_ = NULL;
1145
- allocation_info_.top = NULL;
1146
- allocation_info_.limit = NULL;
1147
- mc_forwarding_info_.top = NULL;
1148
- mc_forwarding_info_.limit = NULL;
1149
-
1150
- to_space_.TearDown();
1151
- from_space_.TearDown();
1152
- }
1153
-
1154
-
1155
- #ifdef ENABLE_HEAP_PROTECTION
1156
-
1157
- void NewSpace::Protect() {
1158
- MemoryAllocator::Protect(ToSpaceLow(), Capacity());
1159
- MemoryAllocator::Protect(FromSpaceLow(), Capacity());
1160
- }
1161
-
1162
-
1163
- void NewSpace::Unprotect() {
1164
- MemoryAllocator::Unprotect(ToSpaceLow(), Capacity(),
1165
- to_space_.executable());
1166
- MemoryAllocator::Unprotect(FromSpaceLow(), Capacity(),
1167
- from_space_.executable());
1168
- }
1169
-
1170
- #endif
1171
-
1172
-
1173
- void NewSpace::Flip() {
1174
- SemiSpace tmp = from_space_;
1175
- from_space_ = to_space_;
1176
- to_space_ = tmp;
1177
- }
1178
-
1179
-
1180
- void NewSpace::Grow() {
1181
- ASSERT(Capacity() < MaximumCapacity());
1182
- if (to_space_.Grow()) {
1183
- // Only grow from space if we managed to grow to space.
1184
- if (!from_space_.Grow()) {
1185
- // If we managed to grow to space but couldn't grow from space,
1186
- // attempt to shrink to space.
1187
- if (!to_space_.ShrinkTo(from_space_.Capacity())) {
1188
- // We are in an inconsistent state because we could not
1189
- // commit/uncommit memory from new space.
1190
- V8::FatalProcessOutOfMemory("Failed to grow new space.");
1191
- }
1192
- }
1193
- }
1194
- allocation_info_.limit = to_space_.high();
1195
- ASSERT_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_);
1196
- }
1197
-
1198
-
1199
- void NewSpace::Shrink() {
1200
- int new_capacity = Max(InitialCapacity(), 2 * Size());
1201
- int rounded_new_capacity =
1202
- RoundUp(new_capacity, static_cast<int>(OS::AllocateAlignment()));
1203
- if (rounded_new_capacity < Capacity() &&
1204
- to_space_.ShrinkTo(rounded_new_capacity)) {
1205
- // Only shrink from space if we managed to shrink to space.
1206
- if (!from_space_.ShrinkTo(rounded_new_capacity)) {
1207
- // If we managed to shrink to space but couldn't shrink from
1208
- // space, attempt to grow to space again.
1209
- if (!to_space_.GrowTo(from_space_.Capacity())) {
1210
- // We are in an inconsistent state because we could not
1211
- // commit/uncommit memory from new space.
1212
- V8::FatalProcessOutOfMemory("Failed to shrink new space.");
1213
- }
1214
- }
1215
- }
1216
- allocation_info_.limit = to_space_.high();
1217
- ASSERT_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_);
1218
- }
1219
-
1220
-
1221
- void NewSpace::ResetAllocationInfo() {
1222
- allocation_info_.top = to_space_.low();
1223
- allocation_info_.limit = to_space_.high();
1224
- ASSERT_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_);
1225
- }
1226
-
1227
-
1228
- void NewSpace::MCResetRelocationInfo() {
1229
- mc_forwarding_info_.top = from_space_.low();
1230
- mc_forwarding_info_.limit = from_space_.high();
1231
- ASSERT_SEMISPACE_ALLOCATION_INFO(mc_forwarding_info_, from_space_);
1232
- }
1233
-
1234
-
1235
- void NewSpace::MCCommitRelocationInfo() {
1236
- // Assumes that the spaces have been flipped so that mc_forwarding_info_ is
1237
- // valid allocation info for the to space.
1238
- allocation_info_.top = mc_forwarding_info_.top;
1239
- allocation_info_.limit = to_space_.high();
1240
- ASSERT_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_);
1241
- }
1242
-
1243
-
1244
- #ifdef DEBUG
1245
- // We do not use the SemispaceIterator because verification doesn't assume
1246
- // that it works (it depends on the invariants we are checking).
1247
- void NewSpace::Verify() {
1248
- // The allocation pointer should be in the space or at the very end.
1249
- ASSERT_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_);
1250
-
1251
- // There should be objects packed in from the low address up to the
1252
- // allocation pointer.
1253
- Address current = to_space_.low();
1254
- while (current < top()) {
1255
- HeapObject* object = HeapObject::FromAddress(current);
1256
-
1257
- // The first word should be a map, and we expect all map pointers to
1258
- // be in map space.
1259
- Map* map = object->map();
1260
- ASSERT(map->IsMap());
1261
- ASSERT(Heap::map_space()->Contains(map));
1262
-
1263
- // The object should not be code or a map.
1264
- ASSERT(!object->IsMap());
1265
- ASSERT(!object->IsCode());
1266
-
1267
- // The object itself should look OK.
1268
- object->Verify();
1269
-
1270
- // All the interior pointers should be contained in the heap.
1271
- VerifyPointersVisitor visitor;
1272
- int size = object->Size();
1273
- object->IterateBody(map->instance_type(), size, &visitor);
1274
-
1275
- current += size;
1276
- }
1277
-
1278
- // The allocation pointer should not be in the middle of an object.
1279
- ASSERT(current == top());
1280
- }
1281
- #endif
1282
-
1283
-
1284
- bool SemiSpace::Commit() {
1285
- ASSERT(!is_committed());
1286
- if (!MemoryAllocator::CommitBlock(start_, capacity_, executable())) {
1287
- return false;
1288
- }
1289
- committed_ = true;
1290
- return true;
1291
- }
1292
-
1293
-
1294
- bool SemiSpace::Uncommit() {
1295
- ASSERT(is_committed());
1296
- if (!MemoryAllocator::UncommitBlock(start_, capacity_)) {
1297
- return false;
1298
- }
1299
- committed_ = false;
1300
- return true;
1301
- }
1302
-
1303
-
1304
- // -----------------------------------------------------------------------------
1305
- // SemiSpace implementation
1306
-
1307
- bool SemiSpace::Setup(Address start,
1308
- int initial_capacity,
1309
- int maximum_capacity) {
1310
- // Creates a space in the young generation. The constructor does not
1311
- // allocate memory from the OS. A SemiSpace is given a contiguous chunk of
1312
- // memory of size 'capacity' when set up, and does not grow or shrink
1313
- // otherwise. In the mark-compact collector, the memory region of the from
1314
- // space is used as the marking stack. It requires contiguous memory
1315
- // addresses.
1316
- initial_capacity_ = initial_capacity;
1317
- capacity_ = initial_capacity;
1318
- maximum_capacity_ = maximum_capacity;
1319
- committed_ = false;
1320
-
1321
- start_ = start;
1322
- address_mask_ = ~(maximum_capacity - 1);
1323
- object_mask_ = address_mask_ | kHeapObjectTagMask;
1324
- object_expected_ = reinterpret_cast<uintptr_t>(start) | kHeapObjectTag;
1325
- age_mark_ = start_;
1326
-
1327
- return Commit();
1328
- }
1329
-
1330
-
1331
- void SemiSpace::TearDown() {
1332
- start_ = NULL;
1333
- capacity_ = 0;
1334
- }
1335
-
1336
-
1337
- bool SemiSpace::Grow() {
1338
- // Double the semispace size but only up to maximum capacity.
1339
- int maximum_extra = maximum_capacity_ - capacity_;
1340
- int extra = Min(RoundUp(capacity_, static_cast<int>(OS::AllocateAlignment())),
1341
- maximum_extra);
1342
- if (!MemoryAllocator::CommitBlock(high(), extra, executable())) {
1343
- return false;
1344
- }
1345
- capacity_ += extra;
1346
- return true;
1347
- }
1348
-
1349
-
1350
- bool SemiSpace::GrowTo(int new_capacity) {
1351
- ASSERT(new_capacity <= maximum_capacity_);
1352
- ASSERT(new_capacity > capacity_);
1353
- size_t delta = new_capacity - capacity_;
1354
- ASSERT(IsAligned(delta, OS::AllocateAlignment()));
1355
- if (!MemoryAllocator::CommitBlock(high(), delta, executable())) {
1356
- return false;
1357
- }
1358
- capacity_ = new_capacity;
1359
- return true;
1360
- }
1361
-
1362
-
1363
- bool SemiSpace::ShrinkTo(int new_capacity) {
1364
- ASSERT(new_capacity >= initial_capacity_);
1365
- ASSERT(new_capacity < capacity_);
1366
- size_t delta = capacity_ - new_capacity;
1367
- ASSERT(IsAligned(delta, OS::AllocateAlignment()));
1368
- if (!MemoryAllocator::UncommitBlock(high() - delta, delta)) {
1369
- return false;
1370
- }
1371
- capacity_ = new_capacity;
1372
- return true;
1373
- }
1374
-
1375
-
1376
- #ifdef DEBUG
1377
- void SemiSpace::Print() { }
1378
-
1379
-
1380
- void SemiSpace::Verify() { }
1381
- #endif
1382
-
1383
-
1384
- // -----------------------------------------------------------------------------
1385
- // SemiSpaceIterator implementation.
1386
- SemiSpaceIterator::SemiSpaceIterator(NewSpace* space) {
1387
- Initialize(space, space->bottom(), space->top(), NULL);
1388
- }
1389
-
1390
-
1391
- SemiSpaceIterator::SemiSpaceIterator(NewSpace* space,
1392
- HeapObjectCallback size_func) {
1393
- Initialize(space, space->bottom(), space->top(), size_func);
1394
- }
1395
-
1396
-
1397
- SemiSpaceIterator::SemiSpaceIterator(NewSpace* space, Address start) {
1398
- Initialize(space, start, space->top(), NULL);
1399
- }
1400
-
1401
-
1402
- void SemiSpaceIterator::Initialize(NewSpace* space, Address start,
1403
- Address end,
1404
- HeapObjectCallback size_func) {
1405
- ASSERT(space->ToSpaceContains(start));
1406
- ASSERT(space->ToSpaceLow() <= end
1407
- && end <= space->ToSpaceHigh());
1408
- space_ = &space->to_space_;
1409
- current_ = start;
1410
- limit_ = end;
1411
- size_func_ = size_func;
1412
- }
1413
-
1414
-
1415
- #ifdef DEBUG
1416
- // A static array of histogram info for each type.
1417
- static HistogramInfo heap_histograms[LAST_TYPE+1];
1418
- static JSObject::SpillInformation js_spill_information;
1419
-
1420
- // heap_histograms is shared, always clear it before using it.
1421
- static void ClearHistograms() {
1422
- // We reset the name each time, though it hasn't changed.
1423
- #define DEF_TYPE_NAME(name) heap_histograms[name].set_name(#name);
1424
- INSTANCE_TYPE_LIST(DEF_TYPE_NAME)
1425
- #undef DEF_TYPE_NAME
1426
-
1427
- #define CLEAR_HISTOGRAM(name) heap_histograms[name].clear();
1428
- INSTANCE_TYPE_LIST(CLEAR_HISTOGRAM)
1429
- #undef CLEAR_HISTOGRAM
1430
-
1431
- js_spill_information.Clear();
1432
- }
1433
-
1434
-
1435
- static int code_kind_statistics[Code::NUMBER_OF_KINDS];
1436
-
1437
-
1438
- static void ClearCodeKindStatistics() {
1439
- for (int i = 0; i < Code::NUMBER_OF_KINDS; i++) {
1440
- code_kind_statistics[i] = 0;
1441
- }
1442
- }
1443
-
1444
-
1445
- static void ReportCodeKindStatistics() {
1446
- const char* table[Code::NUMBER_OF_KINDS] = { NULL };
1447
-
1448
- #define CASE(name) \
1449
- case Code::name: table[Code::name] = #name; \
1450
- break
1451
-
1452
- for (int i = 0; i < Code::NUMBER_OF_KINDS; i++) {
1453
- switch (static_cast<Code::Kind>(i)) {
1454
- CASE(FUNCTION);
1455
- CASE(STUB);
1456
- CASE(BUILTIN);
1457
- CASE(LOAD_IC);
1458
- CASE(KEYED_LOAD_IC);
1459
- CASE(STORE_IC);
1460
- CASE(KEYED_STORE_IC);
1461
- CASE(CALL_IC);
1462
- CASE(KEYED_CALL_IC);
1463
- CASE(BINARY_OP_IC);
1464
- }
1465
- }
1466
-
1467
- #undef CASE
1468
-
1469
- PrintF("\n Code kind histograms: \n");
1470
- for (int i = 0; i < Code::NUMBER_OF_KINDS; i++) {
1471
- if (code_kind_statistics[i] > 0) {
1472
- PrintF(" %-20s: %10d bytes\n", table[i], code_kind_statistics[i]);
1473
- }
1474
- }
1475
- PrintF("\n");
1476
- }
1477
-
1478
-
1479
- static int CollectHistogramInfo(HeapObject* obj) {
1480
- InstanceType type = obj->map()->instance_type();
1481
- ASSERT(0 <= type && type <= LAST_TYPE);
1482
- ASSERT(heap_histograms[type].name() != NULL);
1483
- heap_histograms[type].increment_number(1);
1484
- heap_histograms[type].increment_bytes(obj->Size());
1485
-
1486
- if (FLAG_collect_heap_spill_statistics && obj->IsJSObject()) {
1487
- JSObject::cast(obj)->IncrementSpillStatistics(&js_spill_information);
1488
- }
1489
-
1490
- return obj->Size();
1491
- }
1492
-
1493
-
1494
- static void ReportHistogram(bool print_spill) {
1495
- PrintF("\n Object Histogram:\n");
1496
- for (int i = 0; i <= LAST_TYPE; i++) {
1497
- if (heap_histograms[i].number() > 0) {
1498
- PrintF(" %-34s%10d (%10d bytes)\n",
1499
- heap_histograms[i].name(),
1500
- heap_histograms[i].number(),
1501
- heap_histograms[i].bytes());
1502
- }
1503
- }
1504
- PrintF("\n");
1505
-
1506
- // Summarize string types.
1507
- int string_number = 0;
1508
- int string_bytes = 0;
1509
- #define INCREMENT(type, size, name, camel_name) \
1510
- string_number += heap_histograms[type].number(); \
1511
- string_bytes += heap_histograms[type].bytes();
1512
- STRING_TYPE_LIST(INCREMENT)
1513
- #undef INCREMENT
1514
- if (string_number > 0) {
1515
- PrintF(" %-34s%10d (%10d bytes)\n\n", "STRING_TYPE", string_number,
1516
- string_bytes);
1517
- }
1518
-
1519
- if (FLAG_collect_heap_spill_statistics && print_spill) {
1520
- js_spill_information.Print();
1521
- }
1522
- }
1523
- #endif // DEBUG
1524
-
1525
-
1526
- // Support for statistics gathering for --heap-stats and --log-gc.
1527
- #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
1528
- void NewSpace::ClearHistograms() {
1529
- for (int i = 0; i <= LAST_TYPE; i++) {
1530
- allocated_histogram_[i].clear();
1531
- promoted_histogram_[i].clear();
1532
- }
1533
- }
1534
-
1535
- // Because the copying collector does not touch garbage objects, we iterate
1536
- // the new space before a collection to get a histogram of allocated objects.
1537
- // This only happens (1) when compiled with DEBUG and the --heap-stats flag is
1538
- // set, or when compiled with ENABLE_LOGGING_AND_PROFILING and the --log-gc
1539
- // flag is set.
1540
- void NewSpace::CollectStatistics() {
1541
- ClearHistograms();
1542
- SemiSpaceIterator it(this);
1543
- for (HeapObject* obj = it.next(); obj != NULL; obj = it.next())
1544
- RecordAllocation(obj);
1545
- }
1546
-
1547
-
1548
- #ifdef ENABLE_LOGGING_AND_PROFILING
1549
- static void DoReportStatistics(HistogramInfo* info, const char* description) {
1550
- LOG(HeapSampleBeginEvent("NewSpace", description));
1551
- // Lump all the string types together.
1552
- int string_number = 0;
1553
- int string_bytes = 0;
1554
- #define INCREMENT(type, size, name, camel_name) \
1555
- string_number += info[type].number(); \
1556
- string_bytes += info[type].bytes();
1557
- STRING_TYPE_LIST(INCREMENT)
1558
- #undef INCREMENT
1559
- if (string_number > 0) {
1560
- LOG(HeapSampleItemEvent("STRING_TYPE", string_number, string_bytes));
1561
- }
1562
-
1563
- // Then do the other types.
1564
- for (int i = FIRST_NONSTRING_TYPE; i <= LAST_TYPE; ++i) {
1565
- if (info[i].number() > 0) {
1566
- LOG(HeapSampleItemEvent(info[i].name(), info[i].number(),
1567
- info[i].bytes()));
1568
- }
1569
- }
1570
- LOG(HeapSampleEndEvent("NewSpace", description));
1571
- }
1572
- #endif // ENABLE_LOGGING_AND_PROFILING
1573
-
1574
-
1575
- void NewSpace::ReportStatistics() {
1576
- #ifdef DEBUG
1577
- if (FLAG_heap_stats) {
1578
- float pct = static_cast<float>(Available()) / Capacity();
1579
- PrintF(" capacity: %d, available: %d, %%%d\n",
1580
- Capacity(), Available(), static_cast<int>(pct*100));
1581
- PrintF("\n Object Histogram:\n");
1582
- for (int i = 0; i <= LAST_TYPE; i++) {
1583
- if (allocated_histogram_[i].number() > 0) {
1584
- PrintF(" %-34s%10d (%10d bytes)\n",
1585
- allocated_histogram_[i].name(),
1586
- allocated_histogram_[i].number(),
1587
- allocated_histogram_[i].bytes());
1588
- }
1589
- }
1590
- PrintF("\n");
1591
- }
1592
- #endif // DEBUG
1593
-
1594
- #ifdef ENABLE_LOGGING_AND_PROFILING
1595
- if (FLAG_log_gc) {
1596
- DoReportStatistics(allocated_histogram_, "allocated");
1597
- DoReportStatistics(promoted_histogram_, "promoted");
1598
- }
1599
- #endif // ENABLE_LOGGING_AND_PROFILING
1600
- }
1601
-
1602
-
1603
- void NewSpace::RecordAllocation(HeapObject* obj) {
1604
- InstanceType type = obj->map()->instance_type();
1605
- ASSERT(0 <= type && type <= LAST_TYPE);
1606
- allocated_histogram_[type].increment_number(1);
1607
- allocated_histogram_[type].increment_bytes(obj->Size());
1608
- }
1609
-
1610
-
1611
- void NewSpace::RecordPromotion(HeapObject* obj) {
1612
- InstanceType type = obj->map()->instance_type();
1613
- ASSERT(0 <= type && type <= LAST_TYPE);
1614
- promoted_histogram_[type].increment_number(1);
1615
- promoted_histogram_[type].increment_bytes(obj->Size());
1616
- }
1617
- #endif // defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
1618
-
1619
-
1620
- // -----------------------------------------------------------------------------
1621
- // Free lists for old object spaces implementation
1622
-
1623
- void FreeListNode::set_size(int size_in_bytes) {
1624
- ASSERT(size_in_bytes > 0);
1625
- ASSERT(IsAligned(size_in_bytes, kPointerSize));
1626
-
1627
- // We write a map and possibly size information to the block. If the block
1628
- // is big enough to be a ByteArray with at least one extra word (the next
1629
- // pointer), we set its map to be the byte array map and its size to an
1630
- // appropriate array length for the desired size from HeapObject::Size().
1631
- // If the block is too small (eg, one or two words), to hold both a size
1632
- // field and a next pointer, we give it a filler map that gives it the
1633
- // correct size.
1634
- if (size_in_bytes > ByteArray::kHeaderSize) {
1635
- set_map(Heap::raw_unchecked_byte_array_map());
1636
- // Can't use ByteArray::cast because it fails during deserialization.
1637
- ByteArray* this_as_byte_array = reinterpret_cast<ByteArray*>(this);
1638
- this_as_byte_array->set_length(ByteArray::LengthFor(size_in_bytes));
1639
- } else if (size_in_bytes == kPointerSize) {
1640
- set_map(Heap::raw_unchecked_one_pointer_filler_map());
1641
- } else if (size_in_bytes == 2 * kPointerSize) {
1642
- set_map(Heap::raw_unchecked_two_pointer_filler_map());
1643
- } else {
1644
- UNREACHABLE();
1645
- }
1646
- // We would like to ASSERT(Size() == size_in_bytes) but this would fail during
1647
- // deserialization because the byte array map is not done yet.
1648
- }
1649
-
1650
-
1651
- Address FreeListNode::next() {
1652
- ASSERT(IsFreeListNode(this));
1653
- if (map() == Heap::raw_unchecked_byte_array_map()) {
1654
- ASSERT(Size() >= kNextOffset + kPointerSize);
1655
- return Memory::Address_at(address() + kNextOffset);
1656
- } else {
1657
- return Memory::Address_at(address() + kPointerSize);
1658
- }
1659
- }
1660
-
1661
-
1662
- void FreeListNode::set_next(Address next) {
1663
- ASSERT(IsFreeListNode(this));
1664
- if (map() == Heap::raw_unchecked_byte_array_map()) {
1665
- ASSERT(Size() >= kNextOffset + kPointerSize);
1666
- Memory::Address_at(address() + kNextOffset) = next;
1667
- } else {
1668
- Memory::Address_at(address() + kPointerSize) = next;
1669
- }
1670
- }
1671
-
1672
-
1673
- OldSpaceFreeList::OldSpaceFreeList(AllocationSpace owner) : owner_(owner) {
1674
- Reset();
1675
- }
1676
-
1677
-
1678
- void OldSpaceFreeList::Reset() {
1679
- available_ = 0;
1680
- for (int i = 0; i < kFreeListsLength; i++) {
1681
- free_[i].head_node_ = NULL;
1682
- }
1683
- needs_rebuild_ = false;
1684
- finger_ = kHead;
1685
- free_[kHead].next_size_ = kEnd;
1686
- }
1687
-
1688
-
1689
- void OldSpaceFreeList::RebuildSizeList() {
1690
- ASSERT(needs_rebuild_);
1691
- int cur = kHead;
1692
- for (int i = cur + 1; i < kFreeListsLength; i++) {
1693
- if (free_[i].head_node_ != NULL) {
1694
- free_[cur].next_size_ = i;
1695
- cur = i;
1696
- }
1697
- }
1698
- free_[cur].next_size_ = kEnd;
1699
- needs_rebuild_ = false;
1700
- }
1701
-
1702
-
1703
- int OldSpaceFreeList::Free(Address start, int size_in_bytes) {
1704
- #ifdef DEBUG
1705
- MemoryAllocator::ZapBlock(start, size_in_bytes);
1706
- #endif
1707
- FreeListNode* node = FreeListNode::FromAddress(start);
1708
- node->set_size(size_in_bytes);
1709
-
1710
- // We don't use the freelists in compacting mode. This makes it more like a
1711
- // GC that only has mark-sweep-compact and doesn't have a mark-sweep
1712
- // collector.
1713
- if (FLAG_always_compact) {
1714
- return size_in_bytes;
1715
- }
1716
-
1717
- // Early return to drop too-small blocks on the floor (one or two word
1718
- // blocks cannot hold a map pointer, a size field, and a pointer to the
1719
- // next block in the free list).
1720
- if (size_in_bytes < kMinBlockSize) {
1721
- return size_in_bytes;
1722
- }
1723
-
1724
- // Insert other blocks at the head of an exact free list.
1725
- int index = size_in_bytes >> kPointerSizeLog2;
1726
- node->set_next(free_[index].head_node_);
1727
- free_[index].head_node_ = node->address();
1728
- available_ += size_in_bytes;
1729
- needs_rebuild_ = true;
1730
- return 0;
1731
- }
1732
-
1733
-
1734
- Object* OldSpaceFreeList::Allocate(int size_in_bytes, int* wasted_bytes) {
1735
- ASSERT(0 < size_in_bytes);
1736
- ASSERT(size_in_bytes <= kMaxBlockSize);
1737
- ASSERT(IsAligned(size_in_bytes, kPointerSize));
1738
-
1739
- if (needs_rebuild_) RebuildSizeList();
1740
- int index = size_in_bytes >> kPointerSizeLog2;
1741
- // Check for a perfect fit.
1742
- if (free_[index].head_node_ != NULL) {
1743
- FreeListNode* node = FreeListNode::FromAddress(free_[index].head_node_);
1744
- // If this was the last block of its size, remove the size.
1745
- if ((free_[index].head_node_ = node->next()) == NULL) RemoveSize(index);
1746
- available_ -= size_in_bytes;
1747
- *wasted_bytes = 0;
1748
- ASSERT(!FLAG_always_compact); // We only use the freelists with mark-sweep.
1749
- return node;
1750
- }
1751
- // Search the size list for the best fit.
1752
- int prev = finger_ < index ? finger_ : kHead;
1753
- int cur = FindSize(index, &prev);
1754
- ASSERT(index < cur);
1755
- if (cur == kEnd) {
1756
- // No large enough size in list.
1757
- *wasted_bytes = 0;
1758
- return Failure::RetryAfterGC(size_in_bytes, owner_);
1759
- }
1760
- ASSERT(!FLAG_always_compact); // We only use the freelists with mark-sweep.
1761
- int rem = cur - index;
1762
- int rem_bytes = rem << kPointerSizeLog2;
1763
- FreeListNode* cur_node = FreeListNode::FromAddress(free_[cur].head_node_);
1764
- ASSERT(cur_node->Size() == (cur << kPointerSizeLog2));
1765
- FreeListNode* rem_node = FreeListNode::FromAddress(free_[cur].head_node_ +
1766
- size_in_bytes);
1767
- // Distinguish the cases prev < rem < cur and rem <= prev < cur
1768
- // to avoid many redundant tests and calls to Insert/RemoveSize.
1769
- if (prev < rem) {
1770
- // Simple case: insert rem between prev and cur.
1771
- finger_ = prev;
1772
- free_[prev].next_size_ = rem;
1773
- // If this was the last block of size cur, remove the size.
1774
- if ((free_[cur].head_node_ = cur_node->next()) == NULL) {
1775
- free_[rem].next_size_ = free_[cur].next_size_;
1776
- } else {
1777
- free_[rem].next_size_ = cur;
1778
- }
1779
- // Add the remainder block.
1780
- rem_node->set_size(rem_bytes);
1781
- rem_node->set_next(free_[rem].head_node_);
1782
- free_[rem].head_node_ = rem_node->address();
1783
- } else {
1784
- // If this was the last block of size cur, remove the size.
1785
- if ((free_[cur].head_node_ = cur_node->next()) == NULL) {
1786
- finger_ = prev;
1787
- free_[prev].next_size_ = free_[cur].next_size_;
1788
- }
1789
- if (rem_bytes < kMinBlockSize) {
1790
- // Too-small remainder is wasted.
1791
- rem_node->set_size(rem_bytes);
1792
- available_ -= size_in_bytes + rem_bytes;
1793
- *wasted_bytes = rem_bytes;
1794
- return cur_node;
1795
- }
1796
- // Add the remainder block and, if needed, insert its size.
1797
- rem_node->set_size(rem_bytes);
1798
- rem_node->set_next(free_[rem].head_node_);
1799
- free_[rem].head_node_ = rem_node->address();
1800
- if (rem_node->next() == NULL) InsertSize(rem);
1801
- }
1802
- available_ -= size_in_bytes;
1803
- *wasted_bytes = 0;
1804
- return cur_node;
1805
- }
1806
-
1807
-
1808
- #ifdef DEBUG
1809
- bool OldSpaceFreeList::Contains(FreeListNode* node) {
1810
- for (int i = 0; i < kFreeListsLength; i++) {
1811
- Address cur_addr = free_[i].head_node_;
1812
- while (cur_addr != NULL) {
1813
- FreeListNode* cur_node = FreeListNode::FromAddress(cur_addr);
1814
- if (cur_node == node) return true;
1815
- cur_addr = cur_node->next();
1816
- }
1817
- }
1818
- return false;
1819
- }
1820
- #endif
1821
-
1822
-
1823
- FixedSizeFreeList::FixedSizeFreeList(AllocationSpace owner, int object_size)
1824
- : owner_(owner), object_size_(object_size) {
1825
- Reset();
1826
- }
1827
-
1828
-
1829
- void FixedSizeFreeList::Reset() {
1830
- available_ = 0;
1831
- head_ = tail_ = NULL;
1832
- }
1833
-
1834
-
1835
- void FixedSizeFreeList::Free(Address start) {
1836
- #ifdef DEBUG
1837
- MemoryAllocator::ZapBlock(start, object_size_);
1838
- #endif
1839
- // We only use the freelists with mark-sweep.
1840
- ASSERT(!MarkCompactCollector::IsCompacting());
1841
- FreeListNode* node = FreeListNode::FromAddress(start);
1842
- node->set_size(object_size_);
1843
- node->set_next(NULL);
1844
- if (head_ == NULL) {
1845
- tail_ = head_ = node->address();
1846
- } else {
1847
- FreeListNode::FromAddress(tail_)->set_next(node->address());
1848
- tail_ = node->address();
1849
- }
1850
- available_ += object_size_;
1851
- }
1852
-
1853
-
1854
- Object* FixedSizeFreeList::Allocate() {
1855
- if (head_ == NULL) {
1856
- return Failure::RetryAfterGC(object_size_, owner_);
1857
- }
1858
-
1859
- ASSERT(!FLAG_always_compact); // We only use the freelists with mark-sweep.
1860
- FreeListNode* node = FreeListNode::FromAddress(head_);
1861
- head_ = node->next();
1862
- available_ -= object_size_;
1863
- return node;
1864
- }
1865
-
1866
-
1867
- // -----------------------------------------------------------------------------
1868
- // OldSpace implementation
1869
-
1870
- void OldSpace::PrepareForMarkCompact(bool will_compact) {
1871
- // Call prepare of the super class.
1872
- PagedSpace::PrepareForMarkCompact(will_compact);
1873
-
1874
- if (will_compact) {
1875
- // Reset relocation info. During a compacting collection, everything in
1876
- // the space is considered 'available' and we will rediscover live data
1877
- // and waste during the collection.
1878
- MCResetRelocationInfo();
1879
- ASSERT(Available() == Capacity());
1880
- } else {
1881
- // During a non-compacting collection, everything below the linear
1882
- // allocation pointer is considered allocated (everything above is
1883
- // available) and we will rediscover available and wasted bytes during
1884
- // the collection.
1885
- accounting_stats_.AllocateBytes(free_list_.available());
1886
- accounting_stats_.FillWastedBytes(Waste());
1887
- }
1888
-
1889
- // Clear the free list before a full GC---it will be rebuilt afterward.
1890
- free_list_.Reset();
1891
- }
1892
-
1893
-
1894
- void OldSpace::MCCommitRelocationInfo() {
1895
- // Update fast allocation info.
1896
- allocation_info_.top = mc_forwarding_info_.top;
1897
- allocation_info_.limit = mc_forwarding_info_.limit;
1898
- ASSERT(allocation_info_.VerifyPagedAllocation());
1899
-
1900
- // The space is compacted and we haven't yet built free lists or
1901
- // wasted any space.
1902
- ASSERT(Waste() == 0);
1903
- ASSERT(AvailableFree() == 0);
1904
-
1905
- // Build the free list for the space.
1906
- int computed_size = 0;
1907
- PageIterator it(this, PageIterator::PAGES_USED_BY_MC);
1908
- while (it.has_next()) {
1909
- Page* p = it.next();
1910
- // Space below the relocation pointer is allocated.
1911
- computed_size +=
1912
- static_cast<int>(p->AllocationWatermark() - p->ObjectAreaStart());
1913
- if (it.has_next()) {
1914
- // Free the space at the top of the page.
1915
- int extra_size =
1916
- static_cast<int>(p->ObjectAreaEnd() - p->AllocationWatermark());
1917
- if (extra_size > 0) {
1918
- int wasted_bytes = free_list_.Free(p->AllocationWatermark(),
1919
- extra_size);
1920
- // The bytes we have just "freed" to add to the free list were
1921
- // already accounted as available.
1922
- accounting_stats_.WasteBytes(wasted_bytes);
1923
- }
1924
- }
1925
- }
1926
-
1927
- // Make sure the computed size - based on the used portion of the pages in
1928
- // use - matches the size obtained while computing forwarding addresses.
1929
- ASSERT(computed_size == Size());
1930
- }
1931
-
1932
-
1933
- bool NewSpace::ReserveSpace(int bytes) {
1934
- // We can't reliably unpack a partial snapshot that needs more new space
1935
- // space than the minimum NewSpace size.
1936
- ASSERT(bytes <= InitialCapacity());
1937
- Address limit = allocation_info_.limit;
1938
- Address top = allocation_info_.top;
1939
- return limit - top >= bytes;
1940
- }
1941
-
1942
-
1943
- void PagedSpace::FreePages(Page* prev, Page* last) {
1944
- if (last == AllocationTopPage()) {
1945
- // Pages are already at the end of used pages.
1946
- return;
1947
- }
1948
-
1949
- Page* first = NULL;
1950
-
1951
- // Remove pages from the list.
1952
- if (prev == NULL) {
1953
- first = first_page_;
1954
- first_page_ = last->next_page();
1955
- } else {
1956
- first = prev->next_page();
1957
- MemoryAllocator::SetNextPage(prev, last->next_page());
1958
- }
1959
-
1960
- // Attach it after the last page.
1961
- MemoryAllocator::SetNextPage(last_page_, first);
1962
- last_page_ = last;
1963
- MemoryAllocator::SetNextPage(last, NULL);
1964
-
1965
- // Clean them up.
1966
- do {
1967
- first->InvalidateWatermark(true);
1968
- first->SetAllocationWatermark(first->ObjectAreaStart());
1969
- first->SetCachedAllocationWatermark(first->ObjectAreaStart());
1970
- first->SetRegionMarks(Page::kAllRegionsCleanMarks);
1971
- first = first->next_page();
1972
- } while (first != NULL);
1973
-
1974
- // Order of pages in this space might no longer be consistent with
1975
- // order of pages in chunks.
1976
- page_list_is_chunk_ordered_ = false;
1977
- }
1978
-
1979
-
1980
- void PagedSpace::PrepareForMarkCompact(bool will_compact) {
1981
- if (will_compact) {
1982
- // MarkCompact collector relies on WAS_IN_USE_BEFORE_MC page flag
1983
- // to skip unused pages. Update flag value for all pages in space.
1984
- PageIterator all_pages_iterator(this, PageIterator::ALL_PAGES);
1985
- Page* last_in_use = AllocationTopPage();
1986
- bool in_use = true;
1987
-
1988
- while (all_pages_iterator.has_next()) {
1989
- Page* p = all_pages_iterator.next();
1990
- p->SetWasInUseBeforeMC(in_use);
1991
- if (p == last_in_use) {
1992
- // We passed a page containing allocation top. All consequent
1993
- // pages are not used.
1994
- in_use = false;
1995
- }
1996
- }
1997
-
1998
- if (!page_list_is_chunk_ordered_) {
1999
- Page* new_last_in_use = Page::FromAddress(NULL);
2000
- MemoryAllocator::RelinkPageListInChunkOrder(this,
2001
- &first_page_,
2002
- &last_page_,
2003
- &new_last_in_use);
2004
- ASSERT(new_last_in_use->is_valid());
2005
-
2006
- if (new_last_in_use != last_in_use) {
2007
- // Current allocation top points to a page which is now in the middle
2008
- // of page list. We should move allocation top forward to the new last
2009
- // used page so various object iterators will continue to work properly.
2010
- last_in_use->SetAllocationWatermark(last_in_use->AllocationTop());
2011
-
2012
- int size_in_bytes = static_cast<int>(PageAllocationLimit(last_in_use) -
2013
- last_in_use->AllocationTop());
2014
-
2015
- if (size_in_bytes > 0) {
2016
- // There is still some space left on this page. Create a fake
2017
- // object which will occupy all free space on this page.
2018
- // Otherwise iterators would not be able to scan this page
2019
- // correctly.
2020
-
2021
- Heap::CreateFillerObjectAt(last_in_use->AllocationTop(),
2022
- size_in_bytes);
2023
- }
2024
-
2025
- // New last in use page was in the middle of the list before
2026
- // sorting so it full.
2027
- SetTop(new_last_in_use->AllocationTop());
2028
-
2029
- ASSERT(AllocationTopPage() == new_last_in_use);
2030
- ASSERT(AllocationTopPage()->WasInUseBeforeMC());
2031
- }
2032
-
2033
- PageIterator pages_in_use_iterator(this, PageIterator::PAGES_IN_USE);
2034
- while (pages_in_use_iterator.has_next()) {
2035
- Page* p = pages_in_use_iterator.next();
2036
- if (!p->WasInUseBeforeMC()) {
2037
- // Empty page is in the middle of a sequence of used pages.
2038
- // Create a fake object which will occupy all free space on this page.
2039
- // Otherwise iterators would not be able to scan this page correctly.
2040
- int size_in_bytes = static_cast<int>(PageAllocationLimit(p) -
2041
- p->ObjectAreaStart());
2042
-
2043
- p->SetAllocationWatermark(p->ObjectAreaStart());
2044
- Heap::CreateFillerObjectAt(p->ObjectAreaStart(), size_in_bytes);
2045
- }
2046
- }
2047
-
2048
- page_list_is_chunk_ordered_ = true;
2049
- }
2050
- }
2051
- }
2052
-
2053
-
2054
- bool PagedSpace::ReserveSpace(int bytes) {
2055
- Address limit = allocation_info_.limit;
2056
- Address top = allocation_info_.top;
2057
- if (limit - top >= bytes) return true;
2058
-
2059
- // There wasn't enough space in the current page. Lets put the rest
2060
- // of the page on the free list and start a fresh page.
2061
- PutRestOfCurrentPageOnFreeList(TopPageOf(allocation_info_));
2062
-
2063
- Page* reserved_page = TopPageOf(allocation_info_);
2064
- int bytes_left_to_reserve = bytes;
2065
- while (bytes_left_to_reserve > 0) {
2066
- if (!reserved_page->next_page()->is_valid()) {
2067
- if (Heap::OldGenerationAllocationLimitReached()) return false;
2068
- Expand(reserved_page);
2069
- }
2070
- bytes_left_to_reserve -= Page::kPageSize;
2071
- reserved_page = reserved_page->next_page();
2072
- if (!reserved_page->is_valid()) return false;
2073
- }
2074
- ASSERT(TopPageOf(allocation_info_)->next_page()->is_valid());
2075
- TopPageOf(allocation_info_)->next_page()->InvalidateWatermark(true);
2076
- SetAllocationInfo(&allocation_info_,
2077
- TopPageOf(allocation_info_)->next_page());
2078
- return true;
2079
- }
2080
-
2081
-
2082
- // You have to call this last, since the implementation from PagedSpace
2083
- // doesn't know that memory was 'promised' to large object space.
2084
- bool LargeObjectSpace::ReserveSpace(int bytes) {
2085
- return Heap::OldGenerationSpaceAvailable() >= bytes;
2086
- }
2087
-
2088
-
2089
- // Slow case for normal allocation. Try in order: (1) allocate in the next
2090
- // page in the space, (2) allocate off the space's free list, (3) expand the
2091
- // space, (4) fail.
2092
- HeapObject* OldSpace::SlowAllocateRaw(int size_in_bytes) {
2093
- // Linear allocation in this space has failed. If there is another page
2094
- // in the space, move to that page and allocate there. This allocation
2095
- // should succeed (size_in_bytes should not be greater than a page's
2096
- // object area size).
2097
- Page* current_page = TopPageOf(allocation_info_);
2098
- if (current_page->next_page()->is_valid()) {
2099
- return AllocateInNextPage(current_page, size_in_bytes);
2100
- }
2101
-
2102
- // There is no next page in this space. Try free list allocation unless that
2103
- // is currently forbidden.
2104
- if (!Heap::linear_allocation()) {
2105
- int wasted_bytes;
2106
- Object* result = free_list_.Allocate(size_in_bytes, &wasted_bytes);
2107
- accounting_stats_.WasteBytes(wasted_bytes);
2108
- if (!result->IsFailure()) {
2109
- accounting_stats_.AllocateBytes(size_in_bytes);
2110
-
2111
- HeapObject* obj = HeapObject::cast(result);
2112
- Page* p = Page::FromAddress(obj->address());
2113
-
2114
- if (obj->address() >= p->AllocationWatermark()) {
2115
- // There should be no hole between the allocation watermark
2116
- // and allocated object address.
2117
- // Memory above the allocation watermark was not swept and
2118
- // might contain garbage pointers to new space.
2119
- ASSERT(obj->address() == p->AllocationWatermark());
2120
- p->SetAllocationWatermark(obj->address() + size_in_bytes);
2121
- }
2122
-
2123
- return obj;
2124
- }
2125
- }
2126
-
2127
- // Free list allocation failed and there is no next page. Fail if we have
2128
- // hit the old generation size limit that should cause a garbage
2129
- // collection.
2130
- if (!Heap::always_allocate() && Heap::OldGenerationAllocationLimitReached()) {
2131
- return NULL;
2132
- }
2133
-
2134
- // Try to expand the space and allocate in the new next page.
2135
- ASSERT(!current_page->next_page()->is_valid());
2136
- if (Expand(current_page)) {
2137
- return AllocateInNextPage(current_page, size_in_bytes);
2138
- }
2139
-
2140
- // Finally, fail.
2141
- return NULL;
2142
- }
2143
-
2144
-
2145
- void OldSpace::PutRestOfCurrentPageOnFreeList(Page* current_page) {
2146
- current_page->SetAllocationWatermark(allocation_info_.top);
2147
- int free_size =
2148
- static_cast<int>(current_page->ObjectAreaEnd() - allocation_info_.top);
2149
- if (free_size > 0) {
2150
- int wasted_bytes = free_list_.Free(allocation_info_.top, free_size);
2151
- accounting_stats_.WasteBytes(wasted_bytes);
2152
- }
2153
- }
2154
-
2155
-
2156
- void FixedSpace::PutRestOfCurrentPageOnFreeList(Page* current_page) {
2157
- current_page->SetAllocationWatermark(allocation_info_.top);
2158
- int free_size =
2159
- static_cast<int>(current_page->ObjectAreaEnd() - allocation_info_.top);
2160
- // In the fixed space free list all the free list items have the right size.
2161
- // We use up the rest of the page while preserving this invariant.
2162
- while (free_size >= object_size_in_bytes_) {
2163
- free_list_.Free(allocation_info_.top);
2164
- allocation_info_.top += object_size_in_bytes_;
2165
- free_size -= object_size_in_bytes_;
2166
- accounting_stats_.WasteBytes(object_size_in_bytes_);
2167
- }
2168
- }
2169
-
2170
-
2171
- // Add the block at the top of the page to the space's free list, set the
2172
- // allocation info to the next page (assumed to be one), and allocate
2173
- // linearly there.
2174
- HeapObject* OldSpace::AllocateInNextPage(Page* current_page,
2175
- int size_in_bytes) {
2176
- ASSERT(current_page->next_page()->is_valid());
2177
- Page* next_page = current_page->next_page();
2178
- next_page->ClearGCFields();
2179
- PutRestOfCurrentPageOnFreeList(current_page);
2180
- SetAllocationInfo(&allocation_info_, next_page);
2181
- return AllocateLinearly(&allocation_info_, size_in_bytes);
2182
- }
2183
-
2184
-
2185
- #ifdef DEBUG
2186
- struct CommentStatistic {
2187
- const char* comment;
2188
- int size;
2189
- int count;
2190
- void Clear() {
2191
- comment = NULL;
2192
- size = 0;
2193
- count = 0;
2194
- }
2195
- };
2196
-
2197
-
2198
- // must be small, since an iteration is used for lookup
2199
- const int kMaxComments = 64;
2200
- static CommentStatistic comments_statistics[kMaxComments+1];
2201
-
2202
-
2203
- void PagedSpace::ReportCodeStatistics() {
2204
- ReportCodeKindStatistics();
2205
- PrintF("Code comment statistics (\" [ comment-txt : size/ "
2206
- "count (average)\"):\n");
2207
- for (int i = 0; i <= kMaxComments; i++) {
2208
- const CommentStatistic& cs = comments_statistics[i];
2209
- if (cs.size > 0) {
2210
- PrintF(" %-30s: %10d/%6d (%d)\n", cs.comment, cs.size, cs.count,
2211
- cs.size/cs.count);
2212
- }
2213
- }
2214
- PrintF("\n");
2215
- }
2216
-
2217
-
2218
- void PagedSpace::ResetCodeStatistics() {
2219
- ClearCodeKindStatistics();
2220
- for (int i = 0; i < kMaxComments; i++) comments_statistics[i].Clear();
2221
- comments_statistics[kMaxComments].comment = "Unknown";
2222
- comments_statistics[kMaxComments].size = 0;
2223
- comments_statistics[kMaxComments].count = 0;
2224
- }
2225
-
2226
-
2227
- // Adds comment to 'comment_statistics' table. Performance OK sa long as
2228
- // 'kMaxComments' is small
2229
- static void EnterComment(const char* comment, int delta) {
2230
- // Do not count empty comments
2231
- if (delta <= 0) return;
2232
- CommentStatistic* cs = &comments_statistics[kMaxComments];
2233
- // Search for a free or matching entry in 'comments_statistics': 'cs'
2234
- // points to result.
2235
- for (int i = 0; i < kMaxComments; i++) {
2236
- if (comments_statistics[i].comment == NULL) {
2237
- cs = &comments_statistics[i];
2238
- cs->comment = comment;
2239
- break;
2240
- } else if (strcmp(comments_statistics[i].comment, comment) == 0) {
2241
- cs = &comments_statistics[i];
2242
- break;
2243
- }
2244
- }
2245
- // Update entry for 'comment'
2246
- cs->size += delta;
2247
- cs->count += 1;
2248
- }
2249
-
2250
-
2251
- // Call for each nested comment start (start marked with '[ xxx', end marked
2252
- // with ']'. RelocIterator 'it' must point to a comment reloc info.
2253
- static void CollectCommentStatistics(RelocIterator* it) {
2254
- ASSERT(!it->done());
2255
- ASSERT(it->rinfo()->rmode() == RelocInfo::COMMENT);
2256
- const char* tmp = reinterpret_cast<const char*>(it->rinfo()->data());
2257
- if (tmp[0] != '[') {
2258
- // Not a nested comment; skip
2259
- return;
2260
- }
2261
-
2262
- // Search for end of nested comment or a new nested comment
2263
- const char* const comment_txt =
2264
- reinterpret_cast<const char*>(it->rinfo()->data());
2265
- const byte* prev_pc = it->rinfo()->pc();
2266
- int flat_delta = 0;
2267
- it->next();
2268
- while (true) {
2269
- // All nested comments must be terminated properly, and therefore exit
2270
- // from loop.
2271
- ASSERT(!it->done());
2272
- if (it->rinfo()->rmode() == RelocInfo::COMMENT) {
2273
- const char* const txt =
2274
- reinterpret_cast<const char*>(it->rinfo()->data());
2275
- flat_delta += static_cast<int>(it->rinfo()->pc() - prev_pc);
2276
- if (txt[0] == ']') break; // End of nested comment
2277
- // A new comment
2278
- CollectCommentStatistics(it);
2279
- // Skip code that was covered with previous comment
2280
- prev_pc = it->rinfo()->pc();
2281
- }
2282
- it->next();
2283
- }
2284
- EnterComment(comment_txt, flat_delta);
2285
- }
2286
-
2287
-
2288
- // Collects code size statistics:
2289
- // - by code kind
2290
- // - by code comment
2291
- void PagedSpace::CollectCodeStatistics() {
2292
- HeapObjectIterator obj_it(this);
2293
- for (HeapObject* obj = obj_it.next(); obj != NULL; obj = obj_it.next()) {
2294
- if (obj->IsCode()) {
2295
- Code* code = Code::cast(obj);
2296
- code_kind_statistics[code->kind()] += code->Size();
2297
- RelocIterator it(code);
2298
- int delta = 0;
2299
- const byte* prev_pc = code->instruction_start();
2300
- while (!it.done()) {
2301
- if (it.rinfo()->rmode() == RelocInfo::COMMENT) {
2302
- delta += static_cast<int>(it.rinfo()->pc() - prev_pc);
2303
- CollectCommentStatistics(&it);
2304
- prev_pc = it.rinfo()->pc();
2305
- }
2306
- it.next();
2307
- }
2308
-
2309
- ASSERT(code->instruction_start() <= prev_pc &&
2310
- prev_pc <= code->instruction_end());
2311
- delta += static_cast<int>(code->instruction_end() - prev_pc);
2312
- EnterComment("NoComment", delta);
2313
- }
2314
- }
2315
- }
2316
-
2317
-
2318
- void OldSpace::ReportStatistics() {
2319
- int pct = Available() * 100 / Capacity();
2320
- PrintF(" capacity: %d, waste: %d, available: %d, %%%d\n",
2321
- Capacity(), Waste(), Available(), pct);
2322
-
2323
- ClearHistograms();
2324
- HeapObjectIterator obj_it(this);
2325
- for (HeapObject* obj = obj_it.next(); obj != NULL; obj = obj_it.next())
2326
- CollectHistogramInfo(obj);
2327
- ReportHistogram(true);
2328
- }
2329
- #endif
2330
-
2331
- // -----------------------------------------------------------------------------
2332
- // FixedSpace implementation
2333
-
2334
- void FixedSpace::PrepareForMarkCompact(bool will_compact) {
2335
- // Call prepare of the super class.
2336
- PagedSpace::PrepareForMarkCompact(will_compact);
2337
-
2338
- if (will_compact) {
2339
- // Reset relocation info.
2340
- MCResetRelocationInfo();
2341
-
2342
- // During a compacting collection, everything in the space is considered
2343
- // 'available' (set by the call to MCResetRelocationInfo) and we will
2344
- // rediscover live and wasted bytes during the collection.
2345
- ASSERT(Available() == Capacity());
2346
- } else {
2347
- // During a non-compacting collection, everything below the linear
2348
- // allocation pointer except wasted top-of-page blocks is considered
2349
- // allocated and we will rediscover available bytes during the
2350
- // collection.
2351
- accounting_stats_.AllocateBytes(free_list_.available());
2352
- }
2353
-
2354
- // Clear the free list before a full GC---it will be rebuilt afterward.
2355
- free_list_.Reset();
2356
- }
2357
-
2358
-
2359
- void FixedSpace::MCCommitRelocationInfo() {
2360
- // Update fast allocation info.
2361
- allocation_info_.top = mc_forwarding_info_.top;
2362
- allocation_info_.limit = mc_forwarding_info_.limit;
2363
- ASSERT(allocation_info_.VerifyPagedAllocation());
2364
-
2365
- // The space is compacted and we haven't yet wasted any space.
2366
- ASSERT(Waste() == 0);
2367
-
2368
- // Update allocation_top of each page in use and compute waste.
2369
- int computed_size = 0;
2370
- PageIterator it(this, PageIterator::PAGES_USED_BY_MC);
2371
- while (it.has_next()) {
2372
- Page* page = it.next();
2373
- Address page_top = page->AllocationTop();
2374
- computed_size += static_cast<int>(page_top - page->ObjectAreaStart());
2375
- if (it.has_next()) {
2376
- accounting_stats_.WasteBytes(
2377
- static_cast<int>(page->ObjectAreaEnd() - page_top));
2378
- page->SetAllocationWatermark(page_top);
2379
- }
2380
- }
2381
-
2382
- // Make sure the computed size - based on the used portion of the
2383
- // pages in use - matches the size we adjust during allocation.
2384
- ASSERT(computed_size == Size());
2385
- }
2386
-
2387
-
2388
- // Slow case for normal allocation. Try in order: (1) allocate in the next
2389
- // page in the space, (2) allocate off the space's free list, (3) expand the
2390
- // space, (4) fail.
2391
- HeapObject* FixedSpace::SlowAllocateRaw(int size_in_bytes) {
2392
- ASSERT_EQ(object_size_in_bytes_, size_in_bytes);
2393
- // Linear allocation in this space has failed. If there is another page
2394
- // in the space, move to that page and allocate there. This allocation
2395
- // should succeed.
2396
- Page* current_page = TopPageOf(allocation_info_);
2397
- if (current_page->next_page()->is_valid()) {
2398
- return AllocateInNextPage(current_page, size_in_bytes);
2399
- }
2400
-
2401
- // There is no next page in this space. Try free list allocation unless
2402
- // that is currently forbidden. The fixed space free list implicitly assumes
2403
- // that all free blocks are of the fixed size.
2404
- if (!Heap::linear_allocation()) {
2405
- Object* result = free_list_.Allocate();
2406
- if (!result->IsFailure()) {
2407
- accounting_stats_.AllocateBytes(size_in_bytes);
2408
- HeapObject* obj = HeapObject::cast(result);
2409
- Page* p = Page::FromAddress(obj->address());
2410
-
2411
- if (obj->address() >= p->AllocationWatermark()) {
2412
- // There should be no hole between the allocation watermark
2413
- // and allocated object address.
2414
- // Memory above the allocation watermark was not swept and
2415
- // might contain garbage pointers to new space.
2416
- ASSERT(obj->address() == p->AllocationWatermark());
2417
- p->SetAllocationWatermark(obj->address() + size_in_bytes);
2418
- }
2419
-
2420
- return obj;
2421
- }
2422
- }
2423
-
2424
- // Free list allocation failed and there is no next page. Fail if we have
2425
- // hit the old generation size limit that should cause a garbage
2426
- // collection.
2427
- if (!Heap::always_allocate() && Heap::OldGenerationAllocationLimitReached()) {
2428
- return NULL;
2429
- }
2430
-
2431
- // Try to expand the space and allocate in the new next page.
2432
- ASSERT(!current_page->next_page()->is_valid());
2433
- if (Expand(current_page)) {
2434
- return AllocateInNextPage(current_page, size_in_bytes);
2435
- }
2436
-
2437
- // Finally, fail.
2438
- return NULL;
2439
- }
2440
-
2441
-
2442
- // Move to the next page (there is assumed to be one) and allocate there.
2443
- // The top of page block is always wasted, because it is too small to hold a
2444
- // map.
2445
- HeapObject* FixedSpace::AllocateInNextPage(Page* current_page,
2446
- int size_in_bytes) {
2447
- ASSERT(current_page->next_page()->is_valid());
2448
- ASSERT(allocation_info_.top == PageAllocationLimit(current_page));
2449
- ASSERT_EQ(object_size_in_bytes_, size_in_bytes);
2450
- Page* next_page = current_page->next_page();
2451
- next_page->ClearGCFields();
2452
- current_page->SetAllocationWatermark(allocation_info_.top);
2453
- accounting_stats_.WasteBytes(page_extra_);
2454
- SetAllocationInfo(&allocation_info_, next_page);
2455
- return AllocateLinearly(&allocation_info_, size_in_bytes);
2456
- }
2457
-
2458
-
2459
- #ifdef DEBUG
2460
- void FixedSpace::ReportStatistics() {
2461
- int pct = Available() * 100 / Capacity();
2462
- PrintF(" capacity: %d, waste: %d, available: %d, %%%d\n",
2463
- Capacity(), Waste(), Available(), pct);
2464
-
2465
- ClearHistograms();
2466
- HeapObjectIterator obj_it(this);
2467
- for (HeapObject* obj = obj_it.next(); obj != NULL; obj = obj_it.next())
2468
- CollectHistogramInfo(obj);
2469
- ReportHistogram(false);
2470
- }
2471
- #endif
2472
-
2473
-
2474
- // -----------------------------------------------------------------------------
2475
- // MapSpace implementation
2476
-
2477
- void MapSpace::PrepareForMarkCompact(bool will_compact) {
2478
- // Call prepare of the super class.
2479
- FixedSpace::PrepareForMarkCompact(will_compact);
2480
-
2481
- if (will_compact) {
2482
- // Initialize map index entry.
2483
- int page_count = 0;
2484
- PageIterator it(this, PageIterator::ALL_PAGES);
2485
- while (it.has_next()) {
2486
- ASSERT_MAP_PAGE_INDEX(page_count);
2487
-
2488
- Page* p = it.next();
2489
- ASSERT(p->mc_page_index == page_count);
2490
-
2491
- page_addresses_[page_count++] = p->address();
2492
- }
2493
- }
2494
- }
2495
-
2496
-
2497
- #ifdef DEBUG
2498
- void MapSpace::VerifyObject(HeapObject* object) {
2499
- // The object should be a map or a free-list node.
2500
- ASSERT(object->IsMap() || object->IsByteArray());
2501
- }
2502
- #endif
2503
-
2504
-
2505
- // -----------------------------------------------------------------------------
2506
- // GlobalPropertyCellSpace implementation
2507
-
2508
- #ifdef DEBUG
2509
- void CellSpace::VerifyObject(HeapObject* object) {
2510
- // The object should be a global object property cell or a free-list node.
2511
- ASSERT(object->IsJSGlobalPropertyCell() ||
2512
- object->map() == Heap::two_pointer_filler_map());
2513
- }
2514
- #endif
2515
-
2516
-
2517
- // -----------------------------------------------------------------------------
2518
- // LargeObjectIterator
2519
-
2520
- LargeObjectIterator::LargeObjectIterator(LargeObjectSpace* space) {
2521
- current_ = space->first_chunk_;
2522
- size_func_ = NULL;
2523
- }
2524
-
2525
-
2526
- LargeObjectIterator::LargeObjectIterator(LargeObjectSpace* space,
2527
- HeapObjectCallback size_func) {
2528
- current_ = space->first_chunk_;
2529
- size_func_ = size_func;
2530
- }
2531
-
2532
-
2533
- HeapObject* LargeObjectIterator::next() {
2534
- if (current_ == NULL) return NULL;
2535
-
2536
- HeapObject* object = current_->GetObject();
2537
- current_ = current_->next();
2538
- return object;
2539
- }
2540
-
2541
-
2542
- // -----------------------------------------------------------------------------
2543
- // LargeObjectChunk
2544
-
2545
- LargeObjectChunk* LargeObjectChunk::New(int size_in_bytes,
2546
- size_t* chunk_size,
2547
- Executability executable) {
2548
- size_t requested = ChunkSizeFor(size_in_bytes);
2549
- void* mem = MemoryAllocator::AllocateRawMemory(requested,
2550
- chunk_size,
2551
- executable);
2552
- if (mem == NULL) return NULL;
2553
- LOG(NewEvent("LargeObjectChunk", mem, *chunk_size));
2554
- if (*chunk_size < requested) {
2555
- MemoryAllocator::FreeRawMemory(mem, *chunk_size);
2556
- LOG(DeleteEvent("LargeObjectChunk", mem));
2557
- return NULL;
2558
- }
2559
- return reinterpret_cast<LargeObjectChunk*>(mem);
2560
- }
2561
-
2562
-
2563
- int LargeObjectChunk::ChunkSizeFor(int size_in_bytes) {
2564
- int os_alignment = static_cast<int>(OS::AllocateAlignment());
2565
- if (os_alignment < Page::kPageSize)
2566
- size_in_bytes += (Page::kPageSize - os_alignment);
2567
- return size_in_bytes + Page::kObjectStartOffset;
2568
- }
2569
-
2570
- // -----------------------------------------------------------------------------
2571
- // LargeObjectSpace
2572
-
2573
- LargeObjectSpace::LargeObjectSpace(AllocationSpace id)
2574
- : Space(id, NOT_EXECUTABLE), // Managed on a per-allocation basis
2575
- first_chunk_(NULL),
2576
- size_(0),
2577
- page_count_(0) {}
2578
-
2579
-
2580
- bool LargeObjectSpace::Setup() {
2581
- first_chunk_ = NULL;
2582
- size_ = 0;
2583
- page_count_ = 0;
2584
- return true;
2585
- }
2586
-
2587
-
2588
- void LargeObjectSpace::TearDown() {
2589
- while (first_chunk_ != NULL) {
2590
- LargeObjectChunk* chunk = first_chunk_;
2591
- first_chunk_ = first_chunk_->next();
2592
- LOG(DeleteEvent("LargeObjectChunk", chunk->address()));
2593
- MemoryAllocator::FreeRawMemory(chunk->address(), chunk->size());
2594
- }
2595
-
2596
- size_ = 0;
2597
- page_count_ = 0;
2598
- }
2599
-
2600
-
2601
- #ifdef ENABLE_HEAP_PROTECTION
2602
-
2603
- void LargeObjectSpace::Protect() {
2604
- LargeObjectChunk* chunk = first_chunk_;
2605
- while (chunk != NULL) {
2606
- MemoryAllocator::Protect(chunk->address(), chunk->size());
2607
- chunk = chunk->next();
2608
- }
2609
- }
2610
-
2611
-
2612
- void LargeObjectSpace::Unprotect() {
2613
- LargeObjectChunk* chunk = first_chunk_;
2614
- while (chunk != NULL) {
2615
- bool is_code = chunk->GetObject()->IsCode();
2616
- MemoryAllocator::Unprotect(chunk->address(), chunk->size(),
2617
- is_code ? EXECUTABLE : NOT_EXECUTABLE);
2618
- chunk = chunk->next();
2619
- }
2620
- }
2621
-
2622
- #endif
2623
-
2624
-
2625
- Object* LargeObjectSpace::AllocateRawInternal(int requested_size,
2626
- int object_size,
2627
- Executability executable) {
2628
- ASSERT(0 < object_size && object_size <= requested_size);
2629
-
2630
- // Check if we want to force a GC before growing the old space further.
2631
- // If so, fail the allocation.
2632
- if (!Heap::always_allocate() && Heap::OldGenerationAllocationLimitReached()) {
2633
- return Failure::RetryAfterGC(requested_size, identity());
2634
- }
2635
-
2636
- size_t chunk_size;
2637
- LargeObjectChunk* chunk =
2638
- LargeObjectChunk::New(requested_size, &chunk_size, executable);
2639
- if (chunk == NULL) {
2640
- return Failure::RetryAfterGC(requested_size, identity());
2641
- }
2642
-
2643
- size_ += static_cast<int>(chunk_size);
2644
- page_count_++;
2645
- chunk->set_next(first_chunk_);
2646
- chunk->set_size(chunk_size);
2647
- first_chunk_ = chunk;
2648
-
2649
- // Initialize page header.
2650
- Page* page = Page::FromAddress(RoundUp(chunk->address(), Page::kPageSize));
2651
- Address object_address = page->ObjectAreaStart();
2652
- // Clear the low order bit of the second word in the page to flag it as a
2653
- // large object page. If the chunk_size happened to be written there, its
2654
- // low order bit should already be clear.
2655
- ASSERT((chunk_size & 0x1) == 0);
2656
- page->SetIsLargeObjectPage(true);
2657
- page->SetRegionMarks(Page::kAllRegionsCleanMarks);
2658
- return HeapObject::FromAddress(object_address);
2659
- }
2660
-
2661
-
2662
- Object* LargeObjectSpace::AllocateRawCode(int size_in_bytes) {
2663
- ASSERT(0 < size_in_bytes);
2664
- return AllocateRawInternal(size_in_bytes,
2665
- size_in_bytes,
2666
- EXECUTABLE);
2667
- }
2668
-
2669
-
2670
- Object* LargeObjectSpace::AllocateRawFixedArray(int size_in_bytes) {
2671
- ASSERT(0 < size_in_bytes);
2672
- return AllocateRawInternal(size_in_bytes,
2673
- size_in_bytes,
2674
- NOT_EXECUTABLE);
2675
- }
2676
-
2677
-
2678
- Object* LargeObjectSpace::AllocateRaw(int size_in_bytes) {
2679
- ASSERT(0 < size_in_bytes);
2680
- return AllocateRawInternal(size_in_bytes,
2681
- size_in_bytes,
2682
- NOT_EXECUTABLE);
2683
- }
2684
-
2685
-
2686
- // GC support
2687
- Object* LargeObjectSpace::FindObject(Address a) {
2688
- for (LargeObjectChunk* chunk = first_chunk_;
2689
- chunk != NULL;
2690
- chunk = chunk->next()) {
2691
- Address chunk_address = chunk->address();
2692
- if (chunk_address <= a && a < chunk_address + chunk->size()) {
2693
- return chunk->GetObject();
2694
- }
2695
- }
2696
- return Failure::Exception();
2697
- }
2698
-
2699
- void LargeObjectSpace::IterateDirtyRegions(ObjectSlotCallback copy_object) {
2700
- LargeObjectIterator it(this);
2701
- for (HeapObject* object = it.next(); object != NULL; object = it.next()) {
2702
- // We only have code, sequential strings, or fixed arrays in large
2703
- // object space, and only fixed arrays can possibly contain pointers to
2704
- // the young generation.
2705
- if (object->IsFixedArray()) {
2706
- Page* page = Page::FromAddress(object->address());
2707
- uint32_t marks = page->GetRegionMarks();
2708
- uint32_t newmarks = Page::kAllRegionsCleanMarks;
2709
-
2710
- if (marks != Page::kAllRegionsCleanMarks) {
2711
- // For a large page a single dirty mark corresponds to several
2712
- // regions (modulo 32). So we treat a large page as a sequence of
2713
- // normal pages of size Page::kPageSize having same dirty marks
2714
- // and subsequently iterate dirty regions on each of these pages.
2715
- Address start = object->address();
2716
- Address end = page->ObjectAreaEnd();
2717
- Address object_end = start + object->Size();
2718
-
2719
- // Iterate regions of the first normal page covering object.
2720
- uint32_t first_region_number = page->GetRegionNumberForAddress(start);
2721
- newmarks |=
2722
- Heap::IterateDirtyRegions(marks >> first_region_number,
2723
- start,
2724
- end,
2725
- &Heap::IteratePointersInDirtyRegion,
2726
- copy_object) << first_region_number;
2727
-
2728
- start = end;
2729
- end = start + Page::kPageSize;
2730
- while (end <= object_end) {
2731
- // Iterate next 32 regions.
2732
- newmarks |=
2733
- Heap::IterateDirtyRegions(marks,
2734
- start,
2735
- end,
2736
- &Heap::IteratePointersInDirtyRegion,
2737
- copy_object);
2738
- start = end;
2739
- end = start + Page::kPageSize;
2740
- }
2741
-
2742
- if (start != object_end) {
2743
- // Iterate the last piece of an object which is less than
2744
- // Page::kPageSize.
2745
- newmarks |=
2746
- Heap::IterateDirtyRegions(marks,
2747
- start,
2748
- object_end,
2749
- &Heap::IteratePointersInDirtyRegion,
2750
- copy_object);
2751
- }
2752
-
2753
- page->SetRegionMarks(newmarks);
2754
- }
2755
- }
2756
- }
2757
- }
2758
-
2759
-
2760
- void LargeObjectSpace::FreeUnmarkedObjects() {
2761
- LargeObjectChunk* previous = NULL;
2762
- LargeObjectChunk* current = first_chunk_;
2763
- while (current != NULL) {
2764
- HeapObject* object = current->GetObject();
2765
- if (object->IsMarked()) {
2766
- object->ClearMark();
2767
- MarkCompactCollector::tracer()->decrement_marked_count();
2768
- previous = current;
2769
- current = current->next();
2770
- } else {
2771
- Address chunk_address = current->address();
2772
- size_t chunk_size = current->size();
2773
-
2774
- // Cut the chunk out from the chunk list.
2775
- current = current->next();
2776
- if (previous == NULL) {
2777
- first_chunk_ = current;
2778
- } else {
2779
- previous->set_next(current);
2780
- }
2781
-
2782
- // Free the chunk.
2783
- MarkCompactCollector::ReportDeleteIfNeeded(object);
2784
- size_ -= static_cast<int>(chunk_size);
2785
- page_count_--;
2786
- MemoryAllocator::FreeRawMemory(chunk_address, chunk_size);
2787
- LOG(DeleteEvent("LargeObjectChunk", chunk_address));
2788
- }
2789
- }
2790
- }
2791
-
2792
-
2793
- bool LargeObjectSpace::Contains(HeapObject* object) {
2794
- Address address = object->address();
2795
- if (Heap::new_space()->Contains(address)) {
2796
- return false;
2797
- }
2798
- Page* page = Page::FromAddress(address);
2799
-
2800
- SLOW_ASSERT(!page->IsLargeObjectPage()
2801
- || !FindObject(address)->IsFailure());
2802
-
2803
- return page->IsLargeObjectPage();
2804
- }
2805
-
2806
-
2807
- #ifdef DEBUG
2808
- // We do not assume that the large object iterator works, because it depends
2809
- // on the invariants we are checking during verification.
2810
- void LargeObjectSpace::Verify() {
2811
- for (LargeObjectChunk* chunk = first_chunk_;
2812
- chunk != NULL;
2813
- chunk = chunk->next()) {
2814
- // Each chunk contains an object that starts at the large object page's
2815
- // object area start.
2816
- HeapObject* object = chunk->GetObject();
2817
- Page* page = Page::FromAddress(object->address());
2818
- ASSERT(object->address() == page->ObjectAreaStart());
2819
-
2820
- // The first word should be a map, and we expect all map pointers to be
2821
- // in map space.
2822
- Map* map = object->map();
2823
- ASSERT(map->IsMap());
2824
- ASSERT(Heap::map_space()->Contains(map));
2825
-
2826
- // We have only code, sequential strings, external strings
2827
- // (sequential strings that have been morphed into external
2828
- // strings), fixed arrays, and byte arrays in large object space.
2829
- ASSERT(object->IsCode() || object->IsSeqString() ||
2830
- object->IsExternalString() || object->IsFixedArray() ||
2831
- object->IsByteArray());
2832
-
2833
- // The object itself should look OK.
2834
- object->Verify();
2835
-
2836
- // Byte arrays and strings don't have interior pointers.
2837
- if (object->IsCode()) {
2838
- VerifyPointersVisitor code_visitor;
2839
- object->IterateBody(map->instance_type(),
2840
- object->Size(),
2841
- &code_visitor);
2842
- } else if (object->IsFixedArray()) {
2843
- // We loop over fixed arrays ourselves, rather then using the visitor,
2844
- // because the visitor doesn't support the start/offset iteration
2845
- // needed for IsRegionDirty.
2846
- FixedArray* array = FixedArray::cast(object);
2847
- for (int j = 0; j < array->length(); j++) {
2848
- Object* element = array->get(j);
2849
- if (element->IsHeapObject()) {
2850
- HeapObject* element_object = HeapObject::cast(element);
2851
- ASSERT(Heap::Contains(element_object));
2852
- ASSERT(element_object->map()->IsMap());
2853
- if (Heap::InNewSpace(element_object)) {
2854
- Address array_addr = object->address();
2855
- Address element_addr = array_addr + FixedArray::kHeaderSize +
2856
- j * kPointerSize;
2857
-
2858
- ASSERT(Page::FromAddress(array_addr)->IsRegionDirty(element_addr));
2859
- }
2860
- }
2861
- }
2862
- }
2863
- }
2864
- }
2865
-
2866
-
2867
- void LargeObjectSpace::Print() {
2868
- LargeObjectIterator it(this);
2869
- for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) {
2870
- obj->Print();
2871
- }
2872
- }
2873
-
2874
-
2875
- void LargeObjectSpace::ReportStatistics() {
2876
- PrintF(" size: %d\n", size_);
2877
- int num_objects = 0;
2878
- ClearHistograms();
2879
- LargeObjectIterator it(this);
2880
- for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) {
2881
- num_objects++;
2882
- CollectHistogramInfo(obj);
2883
- }
2884
-
2885
- PrintF(" number of objects %d\n", num_objects);
2886
- if (num_objects > 0) ReportHistogram(false);
2887
- }
2888
-
2889
-
2890
- void LargeObjectSpace::CollectCodeStatistics() {
2891
- LargeObjectIterator obj_it(this);
2892
- for (HeapObject* obj = obj_it.next(); obj != NULL; obj = obj_it.next()) {
2893
- if (obj->IsCode()) {
2894
- Code* code = Code::cast(obj);
2895
- code_kind_statistics[code->kind()] += code->Size();
2896
- }
2897
- }
2898
- }
2899
- #endif // DEBUG
2900
-
2901
- } } // namespace v8::internal