therubyracer 0.4.6 → 0.4.7

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of therubyracer might be problematic. Click here for more details.

Files changed (1148) hide show
  1. data/Manifest.txt +1138 -0
  2. data/README.rdoc +7 -5
  3. data/Rakefile +12 -2
  4. data/ext/v8/Makefile +169 -0
  5. data/ext/v8/extconf.rb +20 -3
  6. data/ext/v8/upstream/2.0.6/.sconsign.dblite +0 -0
  7. data/ext/v8/upstream/2.0.6/AUTHORS +23 -0
  8. data/ext/v8/upstream/2.0.6/ChangeLog +1479 -0
  9. data/ext/v8/upstream/2.0.6/LICENSE +55 -0
  10. data/ext/v8/upstream/2.0.6/SConstruct +1028 -0
  11. data/ext/v8/upstream/2.0.6/benchmarks/README.txt +63 -0
  12. data/ext/v8/upstream/2.0.6/benchmarks/base.js +264 -0
  13. data/ext/v8/upstream/2.0.6/benchmarks/crypto.js +1698 -0
  14. data/ext/v8/upstream/2.0.6/benchmarks/deltablue.js +880 -0
  15. data/ext/v8/upstream/2.0.6/benchmarks/earley-boyer.js +4684 -0
  16. data/ext/v8/upstream/2.0.6/benchmarks/raytrace.js +935 -0
  17. data/ext/v8/upstream/2.0.6/benchmarks/regexp.js +1614 -0
  18. data/ext/v8/upstream/2.0.6/benchmarks/revisions.html +86 -0
  19. data/ext/v8/upstream/2.0.6/benchmarks/richards.js +539 -0
  20. data/ext/v8/upstream/2.0.6/benchmarks/run.html +141 -0
  21. data/ext/v8/upstream/2.0.6/benchmarks/run.js +61 -0
  22. data/ext/v8/upstream/2.0.6/benchmarks/splay.js +378 -0
  23. data/ext/v8/upstream/2.0.6/benchmarks/style.css +77 -0
  24. data/ext/v8/upstream/2.0.6/benchmarks/v8-logo.png +0 -0
  25. data/ext/v8/upstream/2.0.6/include/v8-debug.h +275 -0
  26. data/ext/v8/upstream/2.0.6/include/v8.h +3236 -0
  27. data/ext/v8/upstream/2.0.6/samples/SConscript +38 -0
  28. data/ext/v8/upstream/2.0.6/samples/count-hosts.js +42 -0
  29. data/ext/v8/upstream/2.0.6/samples/process.cc +622 -0
  30. data/ext/v8/upstream/2.0.6/samples/shell.cc +303 -0
  31. data/ext/v8/upstream/2.0.6/src/SConscript +283 -0
  32. data/ext/v8/upstream/2.0.6/src/accessors.cc +695 -0
  33. data/ext/v8/upstream/2.0.6/src/accessors.h +114 -0
  34. data/ext/v8/upstream/2.0.6/src/allocation.cc +198 -0
  35. data/ext/v8/upstream/2.0.6/src/allocation.h +169 -0
  36. data/ext/v8/upstream/2.0.6/src/api.cc +3831 -0
  37. data/ext/v8/upstream/2.0.6/src/api.h +479 -0
  38. data/ext/v8/upstream/2.0.6/src/apinatives.js +110 -0
  39. data/ext/v8/upstream/2.0.6/src/apiutils.h +69 -0
  40. data/ext/v8/upstream/2.0.6/src/arguments.h +97 -0
  41. data/ext/v8/upstream/2.0.6/src/arm/assembler-arm-inl.h +277 -0
  42. data/ext/v8/upstream/2.0.6/src/arm/assembler-arm.cc +1821 -0
  43. data/ext/v8/upstream/2.0.6/src/arm/assembler-arm.h +1027 -0
  44. data/ext/v8/upstream/2.0.6/src/arm/assembler-thumb2-inl.h +267 -0
  45. data/ext/v8/upstream/2.0.6/src/arm/assembler-thumb2.cc +1821 -0
  46. data/ext/v8/upstream/2.0.6/src/arm/assembler-thumb2.h +1027 -0
  47. data/ext/v8/upstream/2.0.6/src/arm/builtins-arm.cc +1271 -0
  48. data/ext/v8/upstream/2.0.6/src/arm/codegen-arm-inl.h +74 -0
  49. data/ext/v8/upstream/2.0.6/src/arm/codegen-arm.cc +6682 -0
  50. data/ext/v8/upstream/2.0.6/src/arm/codegen-arm.h +535 -0
  51. data/ext/v8/upstream/2.0.6/src/arm/constants-arm.cc +112 -0
  52. data/ext/v8/upstream/2.0.6/src/arm/constants-arm.h +347 -0
  53. data/ext/v8/upstream/2.0.6/src/arm/cpu-arm.cc +132 -0
  54. data/ext/v8/upstream/2.0.6/src/arm/debug-arm.cc +213 -0
  55. data/ext/v8/upstream/2.0.6/src/arm/disasm-arm.cc +1166 -0
  56. data/ext/v8/upstream/2.0.6/src/arm/fast-codegen-arm.cc +1698 -0
  57. data/ext/v8/upstream/2.0.6/src/arm/frames-arm.cc +123 -0
  58. data/ext/v8/upstream/2.0.6/src/arm/frames-arm.h +162 -0
  59. data/ext/v8/upstream/2.0.6/src/arm/ic-arm.cc +849 -0
  60. data/ext/v8/upstream/2.0.6/src/arm/jump-target-arm.cc +238 -0
  61. data/ext/v8/upstream/2.0.6/src/arm/macro-assembler-arm.cc +1259 -0
  62. data/ext/v8/upstream/2.0.6/src/arm/macro-assembler-arm.h +423 -0
  63. data/ext/v8/upstream/2.0.6/src/arm/regexp-macro-assembler-arm.cc +1266 -0
  64. data/ext/v8/upstream/2.0.6/src/arm/regexp-macro-assembler-arm.h +282 -0
  65. data/ext/v8/upstream/2.0.6/src/arm/register-allocator-arm-inl.h +103 -0
  66. data/ext/v8/upstream/2.0.6/src/arm/register-allocator-arm.cc +59 -0
  67. data/ext/v8/upstream/2.0.6/src/arm/register-allocator-arm.h +43 -0
  68. data/ext/v8/upstream/2.0.6/src/arm/simulator-arm.cc +2264 -0
  69. data/ext/v8/upstream/2.0.6/src/arm/simulator-arm.h +306 -0
  70. data/ext/v8/upstream/2.0.6/src/arm/stub-cache-arm.cc +1516 -0
  71. data/ext/v8/upstream/2.0.6/src/arm/virtual-frame-arm.cc +412 -0
  72. data/ext/v8/upstream/2.0.6/src/arm/virtual-frame-arm.h +532 -0
  73. data/ext/v8/upstream/2.0.6/src/array.js +1154 -0
  74. data/ext/v8/upstream/2.0.6/src/assembler.cc +772 -0
  75. data/ext/v8/upstream/2.0.6/src/assembler.h +525 -0
  76. data/ext/v8/upstream/2.0.6/src/ast.cc +512 -0
  77. data/ext/v8/upstream/2.0.6/src/ast.h +1820 -0
  78. data/ext/v8/upstream/2.0.6/src/bootstrapper.cc +1680 -0
  79. data/ext/v8/upstream/2.0.6/src/bootstrapper.h +103 -0
  80. data/ext/v8/upstream/2.0.6/src/builtins.cc +851 -0
  81. data/ext/v8/upstream/2.0.6/src/builtins.h +245 -0
  82. data/ext/v8/upstream/2.0.6/src/bytecodes-irregexp.h +104 -0
  83. data/ext/v8/upstream/2.0.6/src/char-predicates-inl.h +86 -0
  84. data/ext/v8/upstream/2.0.6/src/char-predicates.h +65 -0
  85. data/ext/v8/upstream/2.0.6/src/checks.cc +100 -0
  86. data/ext/v8/upstream/2.0.6/src/checks.h +284 -0
  87. data/ext/v8/upstream/2.0.6/src/code-stubs.cc +164 -0
  88. data/ext/v8/upstream/2.0.6/src/code-stubs.h +164 -0
  89. data/ext/v8/upstream/2.0.6/src/code.h +68 -0
  90. data/ext/v8/upstream/2.0.6/src/codegen-inl.h +88 -0
  91. data/ext/v8/upstream/2.0.6/src/codegen.cc +504 -0
  92. data/ext/v8/upstream/2.0.6/src/codegen.h +522 -0
  93. data/ext/v8/upstream/2.0.6/src/compilation-cache.cc +490 -0
  94. data/ext/v8/upstream/2.0.6/src/compilation-cache.h +98 -0
  95. data/ext/v8/upstream/2.0.6/src/compiler.cc +1132 -0
  96. data/ext/v8/upstream/2.0.6/src/compiler.h +107 -0
  97. data/ext/v8/upstream/2.0.6/src/contexts.cc +256 -0
  98. data/ext/v8/upstream/2.0.6/src/contexts.h +345 -0
  99. data/ext/v8/upstream/2.0.6/src/conversions-inl.h +95 -0
  100. data/ext/v8/upstream/2.0.6/src/conversions.cc +709 -0
  101. data/ext/v8/upstream/2.0.6/src/conversions.h +118 -0
  102. data/ext/v8/upstream/2.0.6/src/counters.cc +78 -0
  103. data/ext/v8/upstream/2.0.6/src/counters.h +239 -0
  104. data/ext/v8/upstream/2.0.6/src/cpu.h +65 -0
  105. data/ext/v8/upstream/2.0.6/src/d8-debug.cc +345 -0
  106. data/ext/v8/upstream/2.0.6/src/d8-debug.h +155 -0
  107. data/ext/v8/upstream/2.0.6/src/d8-posix.cc +675 -0
  108. data/ext/v8/upstream/2.0.6/src/d8-readline.cc +128 -0
  109. data/ext/v8/upstream/2.0.6/src/d8-windows.cc +42 -0
  110. data/ext/v8/upstream/2.0.6/src/d8.cc +776 -0
  111. data/ext/v8/upstream/2.0.6/src/d8.h +225 -0
  112. data/ext/v8/upstream/2.0.6/src/d8.js +1625 -0
  113. data/ext/v8/upstream/2.0.6/src/date-delay.js +1138 -0
  114. data/ext/v8/upstream/2.0.6/src/dateparser-inl.h +114 -0
  115. data/ext/v8/upstream/2.0.6/src/dateparser.cc +186 -0
  116. data/ext/v8/upstream/2.0.6/src/dateparser.h +240 -0
  117. data/ext/v8/upstream/2.0.6/src/debug-agent.cc +425 -0
  118. data/ext/v8/upstream/2.0.6/src/debug-agent.h +129 -0
  119. data/ext/v8/upstream/2.0.6/src/debug-delay.js +2073 -0
  120. data/ext/v8/upstream/2.0.6/src/debug.cc +2751 -0
  121. data/ext/v8/upstream/2.0.6/src/debug.h +866 -0
  122. data/ext/v8/upstream/2.0.6/src/disasm.h +77 -0
  123. data/ext/v8/upstream/2.0.6/src/disassembler.cc +318 -0
  124. data/ext/v8/upstream/2.0.6/src/disassembler.h +56 -0
  125. data/ext/v8/upstream/2.0.6/src/dtoa-config.c +91 -0
  126. data/ext/v8/upstream/2.0.6/src/execution.cc +701 -0
  127. data/ext/v8/upstream/2.0.6/src/execution.h +312 -0
  128. data/ext/v8/upstream/2.0.6/src/factory.cc +957 -0
  129. data/ext/v8/upstream/2.0.6/src/factory.h +393 -0
  130. data/ext/v8/upstream/2.0.6/src/fast-codegen.cc +725 -0
  131. data/ext/v8/upstream/2.0.6/src/fast-codegen.h +371 -0
  132. data/ext/v8/upstream/2.0.6/src/flag-definitions.h +426 -0
  133. data/ext/v8/upstream/2.0.6/src/flags.cc +555 -0
  134. data/ext/v8/upstream/2.0.6/src/flags.h +81 -0
  135. data/ext/v8/upstream/2.0.6/src/frame-element.cc +45 -0
  136. data/ext/v8/upstream/2.0.6/src/frame-element.h +235 -0
  137. data/ext/v8/upstream/2.0.6/src/frames-inl.h +215 -0
  138. data/ext/v8/upstream/2.0.6/src/frames.cc +749 -0
  139. data/ext/v8/upstream/2.0.6/src/frames.h +659 -0
  140. data/ext/v8/upstream/2.0.6/src/func-name-inferrer.cc +76 -0
  141. data/ext/v8/upstream/2.0.6/src/func-name-inferrer.h +135 -0
  142. data/ext/v8/upstream/2.0.6/src/global-handles.cc +516 -0
  143. data/ext/v8/upstream/2.0.6/src/global-handles.h +180 -0
  144. data/ext/v8/upstream/2.0.6/src/globals.h +608 -0
  145. data/ext/v8/upstream/2.0.6/src/handles-inl.h +76 -0
  146. data/ext/v8/upstream/2.0.6/src/handles.cc +811 -0
  147. data/ext/v8/upstream/2.0.6/src/handles.h +367 -0
  148. data/ext/v8/upstream/2.0.6/src/hashmap.cc +226 -0
  149. data/ext/v8/upstream/2.0.6/src/hashmap.h +120 -0
  150. data/ext/v8/upstream/2.0.6/src/heap-inl.h +407 -0
  151. data/ext/v8/upstream/2.0.6/src/heap-profiler.cc +695 -0
  152. data/ext/v8/upstream/2.0.6/src/heap-profiler.h +277 -0
  153. data/ext/v8/upstream/2.0.6/src/heap.cc +4204 -0
  154. data/ext/v8/upstream/2.0.6/src/heap.h +1704 -0
  155. data/ext/v8/upstream/2.0.6/src/ia32/assembler-ia32-inl.h +325 -0
  156. data/ext/v8/upstream/2.0.6/src/ia32/assembler-ia32.cc +2375 -0
  157. data/ext/v8/upstream/2.0.6/src/ia32/assembler-ia32.h +914 -0
  158. data/ext/v8/upstream/2.0.6/src/ia32/builtins-ia32.cc +1222 -0
  159. data/ext/v8/upstream/2.0.6/src/ia32/codegen-ia32-inl.h +46 -0
  160. data/ext/v8/upstream/2.0.6/src/ia32/codegen-ia32.cc +9770 -0
  161. data/ext/v8/upstream/2.0.6/src/ia32/codegen-ia32.h +834 -0
  162. data/ext/v8/upstream/2.0.6/src/ia32/cpu-ia32.cc +79 -0
  163. data/ext/v8/upstream/2.0.6/src/ia32/debug-ia32.cc +208 -0
  164. data/ext/v8/upstream/2.0.6/src/ia32/disasm-ia32.cc +1357 -0
  165. data/ext/v8/upstream/2.0.6/src/ia32/fast-codegen-ia32.cc +1813 -0
  166. data/ext/v8/upstream/2.0.6/src/ia32/frames-ia32.cc +111 -0
  167. data/ext/v8/upstream/2.0.6/src/ia32/frames-ia32.h +135 -0
  168. data/ext/v8/upstream/2.0.6/src/ia32/ic-ia32.cc +1490 -0
  169. data/ext/v8/upstream/2.0.6/src/ia32/jump-target-ia32.cc +432 -0
  170. data/ext/v8/upstream/2.0.6/src/ia32/macro-assembler-ia32.cc +1517 -0
  171. data/ext/v8/upstream/2.0.6/src/ia32/macro-assembler-ia32.h +528 -0
  172. data/ext/v8/upstream/2.0.6/src/ia32/regexp-macro-assembler-ia32.cc +1219 -0
  173. data/ext/v8/upstream/2.0.6/src/ia32/regexp-macro-assembler-ia32.h +230 -0
  174. data/ext/v8/upstream/2.0.6/src/ia32/register-allocator-ia32-inl.h +82 -0
  175. data/ext/v8/upstream/2.0.6/src/ia32/register-allocator-ia32.cc +99 -0
  176. data/ext/v8/upstream/2.0.6/src/ia32/register-allocator-ia32.h +43 -0
  177. data/ext/v8/upstream/2.0.6/src/ia32/simulator-ia32.cc +30 -0
  178. data/ext/v8/upstream/2.0.6/src/ia32/simulator-ia32.h +62 -0
  179. data/ext/v8/upstream/2.0.6/src/ia32/stub-cache-ia32.cc +1961 -0
  180. data/ext/v8/upstream/2.0.6/src/ia32/virtual-frame-ia32.cc +1105 -0
  181. data/ext/v8/upstream/2.0.6/src/ia32/virtual-frame-ia32.h +580 -0
  182. data/ext/v8/upstream/2.0.6/src/ic-inl.h +93 -0
  183. data/ext/v8/upstream/2.0.6/src/ic.cc +1426 -0
  184. data/ext/v8/upstream/2.0.6/src/ic.h +443 -0
  185. data/ext/v8/upstream/2.0.6/src/interpreter-irregexp.cc +646 -0
  186. data/ext/v8/upstream/2.0.6/src/interpreter-irregexp.h +48 -0
  187. data/ext/v8/upstream/2.0.6/src/json-delay.js +254 -0
  188. data/ext/v8/upstream/2.0.6/src/jsregexp.cc +5234 -0
  189. data/ext/v8/upstream/2.0.6/src/jsregexp.h +1439 -0
  190. data/ext/v8/upstream/2.0.6/src/jump-target-inl.h +49 -0
  191. data/ext/v8/upstream/2.0.6/src/jump-target.cc +383 -0
  192. data/ext/v8/upstream/2.0.6/src/jump-target.h +280 -0
  193. data/ext/v8/upstream/2.0.6/src/list-inl.h +166 -0
  194. data/ext/v8/upstream/2.0.6/src/list.h +158 -0
  195. data/ext/v8/upstream/2.0.6/src/log-inl.h +126 -0
  196. data/ext/v8/upstream/2.0.6/src/log-utils.cc +503 -0
  197. data/ext/v8/upstream/2.0.6/src/log-utils.h +292 -0
  198. data/ext/v8/upstream/2.0.6/src/log.cc +1457 -0
  199. data/ext/v8/upstream/2.0.6/src/log.h +371 -0
  200. data/ext/v8/upstream/2.0.6/src/macro-assembler.h +93 -0
  201. data/ext/v8/upstream/2.0.6/src/macros.py +137 -0
  202. data/ext/v8/upstream/2.0.6/src/mark-compact.cc +2007 -0
  203. data/ext/v8/upstream/2.0.6/src/mark-compact.h +442 -0
  204. data/ext/v8/upstream/2.0.6/src/math.js +263 -0
  205. data/ext/v8/upstream/2.0.6/src/memory.h +74 -0
  206. data/ext/v8/upstream/2.0.6/src/messages.cc +177 -0
  207. data/ext/v8/upstream/2.0.6/src/messages.h +112 -0
  208. data/ext/v8/upstream/2.0.6/src/messages.js +937 -0
  209. data/ext/v8/upstream/2.0.6/src/mirror-delay.js +2332 -0
  210. data/ext/v8/upstream/2.0.6/src/mksnapshot.cc +169 -0
  211. data/ext/v8/upstream/2.0.6/src/natives.h +63 -0
  212. data/ext/v8/upstream/2.0.6/src/objects-debug.cc +1317 -0
  213. data/ext/v8/upstream/2.0.6/src/objects-inl.h +3044 -0
  214. data/ext/v8/upstream/2.0.6/src/objects.cc +8306 -0
  215. data/ext/v8/upstream/2.0.6/src/objects.h +4960 -0
  216. data/ext/v8/upstream/2.0.6/src/oprofile-agent.cc +116 -0
  217. data/ext/v8/upstream/2.0.6/src/oprofile-agent.h +69 -0
  218. data/ext/v8/upstream/2.0.6/src/parser.cc +4810 -0
  219. data/ext/v8/upstream/2.0.6/src/parser.h +195 -0
  220. data/ext/v8/upstream/2.0.6/src/platform-freebsd.cc +645 -0
  221. data/ext/v8/upstream/2.0.6/src/platform-linux.cc +808 -0
  222. data/ext/v8/upstream/2.0.6/src/platform-macos.cc +643 -0
  223. data/ext/v8/upstream/2.0.6/src/platform-nullos.cc +454 -0
  224. data/ext/v8/upstream/2.0.6/src/platform-openbsd.cc +597 -0
  225. data/ext/v8/upstream/2.0.6/src/platform-posix.cc +380 -0
  226. data/ext/v8/upstream/2.0.6/src/platform-win32.cc +1908 -0
  227. data/ext/v8/upstream/2.0.6/src/platform.h +556 -0
  228. data/ext/v8/upstream/2.0.6/src/prettyprinter.cc +1511 -0
  229. data/ext/v8/upstream/2.0.6/src/prettyprinter.h +219 -0
  230. data/ext/v8/upstream/2.0.6/src/property.cc +96 -0
  231. data/ext/v8/upstream/2.0.6/src/property.h +327 -0
  232. data/ext/v8/upstream/2.0.6/src/regexp-delay.js +406 -0
  233. data/ext/v8/upstream/2.0.6/src/regexp-macro-assembler-irregexp-inl.h +78 -0
  234. data/ext/v8/upstream/2.0.6/src/regexp-macro-assembler-irregexp.cc +464 -0
  235. data/ext/v8/upstream/2.0.6/src/regexp-macro-assembler-irregexp.h +141 -0
  236. data/ext/v8/upstream/2.0.6/src/regexp-macro-assembler-tracer.cc +356 -0
  237. data/ext/v8/upstream/2.0.6/src/regexp-macro-assembler-tracer.h +103 -0
  238. data/ext/v8/upstream/2.0.6/src/regexp-macro-assembler.cc +240 -0
  239. data/ext/v8/upstream/2.0.6/src/regexp-macro-assembler.h +220 -0
  240. data/ext/v8/upstream/2.0.6/src/regexp-stack.cc +103 -0
  241. data/ext/v8/upstream/2.0.6/src/regexp-stack.h +123 -0
  242. data/ext/v8/upstream/2.0.6/src/register-allocator-inl.h +74 -0
  243. data/ext/v8/upstream/2.0.6/src/register-allocator.cc +100 -0
  244. data/ext/v8/upstream/2.0.6/src/register-allocator.h +295 -0
  245. data/ext/v8/upstream/2.0.6/src/rewriter.cc +855 -0
  246. data/ext/v8/upstream/2.0.6/src/rewriter.h +54 -0
  247. data/ext/v8/upstream/2.0.6/src/runtime.cc +8163 -0
  248. data/ext/v8/upstream/2.0.6/src/runtime.h +432 -0
  249. data/ext/v8/upstream/2.0.6/src/runtime.js +626 -0
  250. data/ext/v8/upstream/2.0.6/src/scanner.cc +1098 -0
  251. data/ext/v8/upstream/2.0.6/src/scanner.h +425 -0
  252. data/ext/v8/upstream/2.0.6/src/scopeinfo.cc +649 -0
  253. data/ext/v8/upstream/2.0.6/src/scopeinfo.h +236 -0
  254. data/ext/v8/upstream/2.0.6/src/scopes.cc +963 -0
  255. data/ext/v8/upstream/2.0.6/src/scopes.h +401 -0
  256. data/ext/v8/upstream/2.0.6/src/serialize.cc +1260 -0
  257. data/ext/v8/upstream/2.0.6/src/serialize.h +404 -0
  258. data/ext/v8/upstream/2.0.6/src/shell.h +55 -0
  259. data/ext/v8/upstream/2.0.6/src/simulator.h +41 -0
  260. data/ext/v8/upstream/2.0.6/src/smart-pointer.h +109 -0
  261. data/ext/v8/upstream/2.0.6/src/snapshot-common.cc +97 -0
  262. data/ext/v8/upstream/2.0.6/src/snapshot-empty.cc +40 -0
  263. data/ext/v8/upstream/2.0.6/src/snapshot.h +59 -0
  264. data/ext/v8/upstream/2.0.6/src/spaces-inl.h +372 -0
  265. data/ext/v8/upstream/2.0.6/src/spaces.cc +2864 -0
  266. data/ext/v8/upstream/2.0.6/src/spaces.h +2072 -0
  267. data/ext/v8/upstream/2.0.6/src/string-stream.cc +584 -0
  268. data/ext/v8/upstream/2.0.6/src/string-stream.h +189 -0
  269. data/ext/v8/upstream/2.0.6/src/string.js +901 -0
  270. data/ext/v8/upstream/2.0.6/src/stub-cache.cc +1108 -0
  271. data/ext/v8/upstream/2.0.6/src/stub-cache.h +578 -0
  272. data/ext/v8/upstream/2.0.6/src/third_party/dtoa/COPYING +15 -0
  273. data/ext/v8/upstream/2.0.6/src/third_party/dtoa/dtoa.c +3330 -0
  274. data/ext/v8/upstream/2.0.6/src/third_party/valgrind/valgrind.h +3925 -0
  275. data/ext/v8/upstream/2.0.6/src/token.cc +56 -0
  276. data/ext/v8/upstream/2.0.6/src/token.h +270 -0
  277. data/ext/v8/upstream/2.0.6/src/top.cc +991 -0
  278. data/ext/v8/upstream/2.0.6/src/top.h +459 -0
  279. data/ext/v8/upstream/2.0.6/src/unicode-inl.h +238 -0
  280. data/ext/v8/upstream/2.0.6/src/unicode.cc +749 -0
  281. data/ext/v8/upstream/2.0.6/src/unicode.h +279 -0
  282. data/ext/v8/upstream/2.0.6/src/uri.js +415 -0
  283. data/ext/v8/upstream/2.0.6/src/usage-analyzer.cc +426 -0
  284. data/ext/v8/upstream/2.0.6/src/usage-analyzer.h +40 -0
  285. data/ext/v8/upstream/2.0.6/src/utils.cc +322 -0
  286. data/ext/v8/upstream/2.0.6/src/utils.h +592 -0
  287. data/ext/v8/upstream/2.0.6/src/v8-counters.cc +55 -0
  288. data/ext/v8/upstream/2.0.6/src/v8-counters.h +198 -0
  289. data/ext/v8/upstream/2.0.6/src/v8.cc +193 -0
  290. data/ext/v8/upstream/2.0.6/src/v8.h +119 -0
  291. data/ext/v8/upstream/2.0.6/src/v8natives.js +846 -0
  292. data/ext/v8/upstream/2.0.6/src/v8threads.cc +450 -0
  293. data/ext/v8/upstream/2.0.6/src/v8threads.h +144 -0
  294. data/ext/v8/upstream/2.0.6/src/variables.cc +163 -0
  295. data/ext/v8/upstream/2.0.6/src/variables.h +235 -0
  296. data/ext/v8/upstream/2.0.6/src/version.cc +88 -0
  297. data/ext/v8/upstream/2.0.6/src/version.h +64 -0
  298. data/ext/v8/upstream/2.0.6/src/virtual-frame.cc +381 -0
  299. data/ext/v8/upstream/2.0.6/src/virtual-frame.h +44 -0
  300. data/ext/v8/upstream/2.0.6/src/x64/assembler-x64-inl.h +352 -0
  301. data/ext/v8/upstream/2.0.6/src/x64/assembler-x64.cc +2539 -0
  302. data/ext/v8/upstream/2.0.6/src/x64/assembler-x64.h +1399 -0
  303. data/ext/v8/upstream/2.0.6/src/x64/builtins-x64.cc +1255 -0
  304. data/ext/v8/upstream/2.0.6/src/x64/codegen-x64-inl.h +46 -0
  305. data/ext/v8/upstream/2.0.6/src/x64/codegen-x64.cc +8223 -0
  306. data/ext/v8/upstream/2.0.6/src/x64/codegen-x64.h +785 -0
  307. data/ext/v8/upstream/2.0.6/src/x64/cpu-x64.cc +79 -0
  308. data/ext/v8/upstream/2.0.6/src/x64/debug-x64.cc +202 -0
  309. data/ext/v8/upstream/2.0.6/src/x64/disasm-x64.cc +1596 -0
  310. data/ext/v8/upstream/2.0.6/src/x64/fast-codegen-x64.cc +1820 -0
  311. data/ext/v8/upstream/2.0.6/src/x64/frames-x64.cc +109 -0
  312. data/ext/v8/upstream/2.0.6/src/x64/frames-x64.h +121 -0
  313. data/ext/v8/upstream/2.0.6/src/x64/ic-x64.cc +1392 -0
  314. data/ext/v8/upstream/2.0.6/src/x64/jump-target-x64.cc +432 -0
  315. data/ext/v8/upstream/2.0.6/src/x64/macro-assembler-x64.cc +2409 -0
  316. data/ext/v8/upstream/2.0.6/src/x64/macro-assembler-x64.h +765 -0
  317. data/ext/v8/upstream/2.0.6/src/x64/regexp-macro-assembler-x64.cc +1337 -0
  318. data/ext/v8/upstream/2.0.6/src/x64/regexp-macro-assembler-x64.h +295 -0
  319. data/ext/v8/upstream/2.0.6/src/x64/register-allocator-x64-inl.h +86 -0
  320. data/ext/v8/upstream/2.0.6/src/x64/register-allocator-x64.cc +84 -0
  321. data/ext/v8/upstream/2.0.6/src/x64/register-allocator-x64.h +43 -0
  322. data/ext/v8/upstream/2.0.6/src/x64/simulator-x64.cc +27 -0
  323. data/ext/v8/upstream/2.0.6/src/x64/simulator-x64.h +63 -0
  324. data/ext/v8/upstream/2.0.6/src/x64/stub-cache-x64.cc +1884 -0
  325. data/ext/v8/upstream/2.0.6/src/x64/virtual-frame-x64.cc +1089 -0
  326. data/ext/v8/upstream/2.0.6/src/x64/virtual-frame-x64.h +560 -0
  327. data/ext/v8/upstream/2.0.6/src/zone-inl.h +297 -0
  328. data/ext/v8/upstream/2.0.6/src/zone.cc +193 -0
  329. data/ext/v8/upstream/2.0.6/src/zone.h +305 -0
  330. data/ext/v8/upstream/2.0.6/test/cctest/SConscript +95 -0
  331. data/ext/v8/upstream/2.0.6/test/cctest/cctest.cc +126 -0
  332. data/ext/v8/upstream/2.0.6/test/cctest/cctest.h +211 -0
  333. data/ext/v8/upstream/2.0.6/test/cctest/cctest.status +54 -0
  334. data/ext/v8/upstream/2.0.6/test/cctest/test-accessors.cc +450 -0
  335. data/ext/v8/upstream/2.0.6/test/cctest/test-alloc.cc +215 -0
  336. data/ext/v8/upstream/2.0.6/test/cctest/test-api.cc +8699 -0
  337. data/ext/v8/upstream/2.0.6/test/cctest/test-assembler-arm.cc +227 -0
  338. data/ext/v8/upstream/2.0.6/test/cctest/test-assembler-ia32.cc +395 -0
  339. data/ext/v8/upstream/2.0.6/test/cctest/test-assembler-x64.cc +292 -0
  340. data/ext/v8/upstream/2.0.6/test/cctest/test-ast.cc +97 -0
  341. data/ext/v8/upstream/2.0.6/test/cctest/test-compiler.cc +318 -0
  342. data/ext/v8/upstream/2.0.6/test/cctest/test-conversions.cc +130 -0
  343. data/ext/v8/upstream/2.0.6/test/cctest/test-debug.cc +5788 -0
  344. data/ext/v8/upstream/2.0.6/test/cctest/test-decls.cc +593 -0
  345. data/ext/v8/upstream/2.0.6/test/cctest/test-disasm-arm.cc +281 -0
  346. data/ext/v8/upstream/2.0.6/test/cctest/test-disasm-ia32.cc +418 -0
  347. data/ext/v8/upstream/2.0.6/test/cctest/test-flags.cc +234 -0
  348. data/ext/v8/upstream/2.0.6/test/cctest/test-func-name-inference.cc +267 -0
  349. data/ext/v8/upstream/2.0.6/test/cctest/test-hashmap.cc +176 -0
  350. data/ext/v8/upstream/2.0.6/test/cctest/test-heap-profiler.cc +396 -0
  351. data/ext/v8/upstream/2.0.6/test/cctest/test-heap.cc +796 -0
  352. data/ext/v8/upstream/2.0.6/test/cctest/test-list.cc +101 -0
  353. data/ext/v8/upstream/2.0.6/test/cctest/test-lock.cc +63 -0
  354. data/ext/v8/upstream/2.0.6/test/cctest/test-log-stack-tracer.cc +372 -0
  355. data/ext/v8/upstream/2.0.6/test/cctest/test-log-utils.cc +310 -0
  356. data/ext/v8/upstream/2.0.6/test/cctest/test-log.cc +1081 -0
  357. data/ext/v8/upstream/2.0.6/test/cctest/test-macro-assembler-x64.cc +2104 -0
  358. data/ext/v8/upstream/2.0.6/test/cctest/test-mark-compact.cc +341 -0
  359. data/ext/v8/upstream/2.0.6/test/cctest/test-parsing.cc +129 -0
  360. data/ext/v8/upstream/2.0.6/test/cctest/test-platform-linux.cc +80 -0
  361. data/ext/v8/upstream/2.0.6/test/cctest/test-platform-macos.cc +10 -0
  362. data/ext/v8/upstream/2.0.6/test/cctest/test-platform-nullos.cc +80 -0
  363. data/ext/v8/upstream/2.0.6/test/cctest/test-platform-win32.cc +26 -0
  364. data/ext/v8/upstream/2.0.6/test/cctest/test-regexp.cc +1815 -0
  365. data/ext/v8/upstream/2.0.6/test/cctest/test-serialize.cc +438 -0
  366. data/ext/v8/upstream/2.0.6/test/cctest/test-sockets.cc +162 -0
  367. data/ext/v8/upstream/2.0.6/test/cctest/test-spaces.cc +248 -0
  368. data/ext/v8/upstream/2.0.6/test/cctest/test-strings.cc +432 -0
  369. data/ext/v8/upstream/2.0.6/test/cctest/test-thread-termination.cc +290 -0
  370. data/ext/v8/upstream/2.0.6/test/cctest/test-threads.cc +52 -0
  371. data/ext/v8/upstream/2.0.6/test/cctest/test-utils.cc +186 -0
  372. data/ext/v8/upstream/2.0.6/test/cctest/test-version.cc +89 -0
  373. data/ext/v8/upstream/2.0.6/test/cctest/testcfg.py +108 -0
  374. data/ext/v8/upstream/2.0.6/test/es5conform/README +14 -0
  375. data/ext/v8/upstream/2.0.6/test/es5conform/es5conform.status +226 -0
  376. data/ext/v8/upstream/2.0.6/test/es5conform/harness-adapt.js +74 -0
  377. data/ext/v8/upstream/2.0.6/test/es5conform/testcfg.py +108 -0
  378. data/ext/v8/upstream/2.0.6/test/message/message.status +31 -0
  379. data/ext/v8/upstream/2.0.6/test/message/overwritten-builtins.js +31 -0
  380. data/ext/v8/upstream/2.0.6/test/message/overwritten-builtins.out +30 -0
  381. data/ext/v8/upstream/2.0.6/test/message/regress/regress-73.js +33 -0
  382. data/ext/v8/upstream/2.0.6/test/message/regress/regress-73.out +30 -0
  383. data/ext/v8/upstream/2.0.6/test/message/regress/regress-75.js +32 -0
  384. data/ext/v8/upstream/2.0.6/test/message/regress/regress-75.out +30 -0
  385. data/ext/v8/upstream/2.0.6/test/message/simple-throw.js +28 -0
  386. data/ext/v8/upstream/2.0.6/test/message/simple-throw.out +30 -0
  387. data/ext/v8/upstream/2.0.6/test/message/testcfg.py +135 -0
  388. data/ext/v8/upstream/2.0.6/test/message/try-catch-finally-no-message.js +51 -0
  389. data/ext/v8/upstream/2.0.6/test/message/try-catch-finally-no-message.out +26 -0
  390. data/ext/v8/upstream/2.0.6/test/message/try-catch-finally-return-in-finally.js +39 -0
  391. data/ext/v8/upstream/2.0.6/test/message/try-catch-finally-return-in-finally.out +28 -0
  392. data/ext/v8/upstream/2.0.6/test/message/try-catch-finally-throw-in-catch-and-finally.js +34 -0
  393. data/ext/v8/upstream/2.0.6/test/message/try-catch-finally-throw-in-catch-and-finally.out +30 -0
  394. data/ext/v8/upstream/2.0.6/test/message/try-catch-finally-throw-in-catch.js +34 -0
  395. data/ext/v8/upstream/2.0.6/test/message/try-catch-finally-throw-in-catch.out +30 -0
  396. data/ext/v8/upstream/2.0.6/test/message/try-catch-finally-throw-in-finally.js +34 -0
  397. data/ext/v8/upstream/2.0.6/test/message/try-catch-finally-throw-in-finally.out +30 -0
  398. data/ext/v8/upstream/2.0.6/test/message/try-finally-return-in-finally.js +37 -0
  399. data/ext/v8/upstream/2.0.6/test/message/try-finally-return-in-finally.out +28 -0
  400. data/ext/v8/upstream/2.0.6/test/message/try-finally-throw-in-finally.js +32 -0
  401. data/ext/v8/upstream/2.0.6/test/message/try-finally-throw-in-finally.out +30 -0
  402. data/ext/v8/upstream/2.0.6/test/message/try-finally-throw-in-try-and-finally.js +32 -0
  403. data/ext/v8/upstream/2.0.6/test/message/try-finally-throw-in-try-and-finally.out +30 -0
  404. data/ext/v8/upstream/2.0.6/test/message/try-finally-throw-in-try.js +32 -0
  405. data/ext/v8/upstream/2.0.6/test/message/try-finally-throw-in-try.out +30 -0
  406. data/ext/v8/upstream/2.0.6/test/mjsunit/api-call-after-bypassed-exception.js +39 -0
  407. data/ext/v8/upstream/2.0.6/test/mjsunit/apply.js +196 -0
  408. data/ext/v8/upstream/2.0.6/test/mjsunit/arguments-apply.js +134 -0
  409. data/ext/v8/upstream/2.0.6/test/mjsunit/arguments-call-apply.js +41 -0
  410. data/ext/v8/upstream/2.0.6/test/mjsunit/arguments-enum.js +52 -0
  411. data/ext/v8/upstream/2.0.6/test/mjsunit/arguments-indirect.js +47 -0
  412. data/ext/v8/upstream/2.0.6/test/mjsunit/arguments-lazy.js +47 -0
  413. data/ext/v8/upstream/2.0.6/test/mjsunit/arguments-opt.js +130 -0
  414. data/ext/v8/upstream/2.0.6/test/mjsunit/arguments-read-and-assignment.js +164 -0
  415. data/ext/v8/upstream/2.0.6/test/mjsunit/arguments.js +97 -0
  416. data/ext/v8/upstream/2.0.6/test/mjsunit/array-concat.js +120 -0
  417. data/ext/v8/upstream/2.0.6/test/mjsunit/array-constructor.js +119 -0
  418. data/ext/v8/upstream/2.0.6/test/mjsunit/array-functions-prototype.js +159 -0
  419. data/ext/v8/upstream/2.0.6/test/mjsunit/array-indexing.js +66 -0
  420. data/ext/v8/upstream/2.0.6/test/mjsunit/array-iteration.js +228 -0
  421. data/ext/v8/upstream/2.0.6/test/mjsunit/array-join.js +45 -0
  422. data/ext/v8/upstream/2.0.6/test/mjsunit/array-length-number-conversion.js +53 -0
  423. data/ext/v8/upstream/2.0.6/test/mjsunit/array-length.js +111 -0
  424. data/ext/v8/upstream/2.0.6/test/mjsunit/array-reduce.js +514 -0
  425. data/ext/v8/upstream/2.0.6/test/mjsunit/array-sort.js +362 -0
  426. data/ext/v8/upstream/2.0.6/test/mjsunit/array-splice.js +314 -0
  427. data/ext/v8/upstream/2.0.6/test/mjsunit/ascii-regexp-subject.js +49 -0
  428. data/ext/v8/upstream/2.0.6/test/mjsunit/big-array-literal.js +111 -0
  429. data/ext/v8/upstream/2.0.6/test/mjsunit/big-object-literal.js +114 -0
  430. data/ext/v8/upstream/2.0.6/test/mjsunit/binary-operation-overwrite.js +36 -0
  431. data/ext/v8/upstream/2.0.6/test/mjsunit/bit-not.js +75 -0
  432. data/ext/v8/upstream/2.0.6/test/mjsunit/bitwise-operations-undefined.js +49 -0
  433. data/ext/v8/upstream/2.0.6/test/mjsunit/body-not-visible.js +39 -0
  434. data/ext/v8/upstream/2.0.6/test/mjsunit/bugs/bug-1344252.js +79 -0
  435. data/ext/v8/upstream/2.0.6/test/mjsunit/bugs/bug-222.js +42 -0
  436. data/ext/v8/upstream/2.0.6/test/mjsunit/bugs/bug-223.js +39 -0
  437. data/ext/v8/upstream/2.0.6/test/mjsunit/bugs/bug-900066.js +38 -0
  438. data/ext/v8/upstream/2.0.6/test/mjsunit/bugs/bug-941049.js +100 -0
  439. data/ext/v8/upstream/2.0.6/test/mjsunit/call-non-function-call.js +38 -0
  440. data/ext/v8/upstream/2.0.6/test/mjsunit/call-non-function.js +63 -0
  441. data/ext/v8/upstream/2.0.6/test/mjsunit/call.js +87 -0
  442. data/ext/v8/upstream/2.0.6/test/mjsunit/char-escape.js +53 -0
  443. data/ext/v8/upstream/2.0.6/test/mjsunit/class-of-builtins.js +50 -0
  444. data/ext/v8/upstream/2.0.6/test/mjsunit/closure.js +37 -0
  445. data/ext/v8/upstream/2.0.6/test/mjsunit/codegen-coverage.js +91 -0
  446. data/ext/v8/upstream/2.0.6/test/mjsunit/compare-character.js +50 -0
  447. data/ext/v8/upstream/2.0.6/test/mjsunit/compare-nan.js +66 -0
  448. data/ext/v8/upstream/2.0.6/test/mjsunit/compiler/countoperation.js +111 -0
  449. data/ext/v8/upstream/2.0.6/test/mjsunit/compiler/function-call.js +52 -0
  450. data/ext/v8/upstream/2.0.6/test/mjsunit/compiler/globals.js +65 -0
  451. data/ext/v8/upstream/2.0.6/test/mjsunit/compiler/jsnatives.js +33 -0
  452. data/ext/v8/upstream/2.0.6/test/mjsunit/compiler/literals-assignment.js +104 -0
  453. data/ext/v8/upstream/2.0.6/test/mjsunit/compiler/literals.js +52 -0
  454. data/ext/v8/upstream/2.0.6/test/mjsunit/compiler/loops.js +35 -0
  455. data/ext/v8/upstream/2.0.6/test/mjsunit/compiler/objectliterals.js +57 -0
  456. data/ext/v8/upstream/2.0.6/test/mjsunit/compiler/property-simple.js +39 -0
  457. data/ext/v8/upstream/2.0.6/test/mjsunit/compiler/short-circuit.js +102 -0
  458. data/ext/v8/upstream/2.0.6/test/mjsunit/compiler/thisfunction.js +35 -0
  459. data/ext/v8/upstream/2.0.6/test/mjsunit/const-declaration.js +172 -0
  460. data/ext/v8/upstream/2.0.6/test/mjsunit/const-eval-init.js +111 -0
  461. data/ext/v8/upstream/2.0.6/test/mjsunit/const-redecl.js +220 -0
  462. data/ext/v8/upstream/2.0.6/test/mjsunit/const.js +70 -0
  463. data/ext/v8/upstream/2.0.6/test/mjsunit/constant-folding.js +232 -0
  464. data/ext/v8/upstream/2.0.6/test/mjsunit/context-variable-assignments.js +37 -0
  465. data/ext/v8/upstream/2.0.6/test/mjsunit/cyclic-array-to-string.js +65 -0
  466. data/ext/v8/upstream/2.0.6/test/mjsunit/cyrillic.js +199 -0
  467. data/ext/v8/upstream/2.0.6/test/mjsunit/d8-os.js +180 -0
  468. data/ext/v8/upstream/2.0.6/test/mjsunit/date-parse.js +268 -0
  469. data/ext/v8/upstream/2.0.6/test/mjsunit/date.js +149 -0
  470. data/ext/v8/upstream/2.0.6/test/mjsunit/debug-backtrace-text.js +122 -0
  471. data/ext/v8/upstream/2.0.6/test/mjsunit/debug-backtrace.js +272 -0
  472. data/ext/v8/upstream/2.0.6/test/mjsunit/debug-breakpoints.js +120 -0
  473. data/ext/v8/upstream/2.0.6/test/mjsunit/debug-changebreakpoint.js +108 -0
  474. data/ext/v8/upstream/2.0.6/test/mjsunit/debug-clearbreakpoint.js +101 -0
  475. data/ext/v8/upstream/2.0.6/test/mjsunit/debug-clearbreakpointgroup.js +117 -0
  476. data/ext/v8/upstream/2.0.6/test/mjsunit/debug-compile-event.js +126 -0
  477. data/ext/v8/upstream/2.0.6/test/mjsunit/debug-conditional-breakpoints.js +171 -0
  478. data/ext/v8/upstream/2.0.6/test/mjsunit/debug-constructed-by.js +60 -0
  479. data/ext/v8/upstream/2.0.6/test/mjsunit/debug-constructor.js +78 -0
  480. data/ext/v8/upstream/2.0.6/test/mjsunit/debug-continue.js +114 -0
  481. data/ext/v8/upstream/2.0.6/test/mjsunit/debug-enable-disable-breakpoints.js +90 -0
  482. data/ext/v8/upstream/2.0.6/test/mjsunit/debug-evaluate-arguments.js +93 -0
  483. data/ext/v8/upstream/2.0.6/test/mjsunit/debug-evaluate-bool-constructor.js +80 -0
  484. data/ext/v8/upstream/2.0.6/test/mjsunit/debug-evaluate-locals.js +132 -0
  485. data/ext/v8/upstream/2.0.6/test/mjsunit/debug-evaluate-recursive.js +167 -0
  486. data/ext/v8/upstream/2.0.6/test/mjsunit/debug-evaluate-with.js +77 -0
  487. data/ext/v8/upstream/2.0.6/test/mjsunit/debug-evaluate.js +118 -0
  488. data/ext/v8/upstream/2.0.6/test/mjsunit/debug-event-listener.js +73 -0
  489. data/ext/v8/upstream/2.0.6/test/mjsunit/debug-handle.js +252 -0
  490. data/ext/v8/upstream/2.0.6/test/mjsunit/debug-ignore-breakpoints.js +89 -0
  491. data/ext/v8/upstream/2.0.6/test/mjsunit/debug-mirror-cache.js +85 -0
  492. data/ext/v8/upstream/2.0.6/test/mjsunit/debug-multiple-breakpoints.js +105 -0
  493. data/ext/v8/upstream/2.0.6/test/mjsunit/debug-referenced-by.js +112 -0
  494. data/ext/v8/upstream/2.0.6/test/mjsunit/debug-references.js +118 -0
  495. data/ext/v8/upstream/2.0.6/test/mjsunit/debug-scopes.js +761 -0
  496. data/ext/v8/upstream/2.0.6/test/mjsunit/debug-script-breakpoints.js +112 -0
  497. data/ext/v8/upstream/2.0.6/test/mjsunit/debug-script.js +92 -0
  498. data/ext/v8/upstream/2.0.6/test/mjsunit/debug-scripts-request.js +108 -0
  499. data/ext/v8/upstream/2.0.6/test/mjsunit/debug-setbreakpoint.js +165 -0
  500. data/ext/v8/upstream/2.0.6/test/mjsunit/debug-sourceinfo.js +352 -0
  501. data/ext/v8/upstream/2.0.6/test/mjsunit/debug-sourceslice.js +74 -0
  502. data/ext/v8/upstream/2.0.6/test/mjsunit/debug-step-stub-callfunction.js +87 -0
  503. data/ext/v8/upstream/2.0.6/test/mjsunit/debug-step.js +82 -0
  504. data/ext/v8/upstream/2.0.6/test/mjsunit/debug-stepin-accessor.js +248 -0
  505. data/ext/v8/upstream/2.0.6/test/mjsunit/debug-stepin-builtin.js +78 -0
  506. data/ext/v8/upstream/2.0.6/test/mjsunit/debug-stepin-call-function-stub.js +115 -0
  507. data/ext/v8/upstream/2.0.6/test/mjsunit/debug-stepin-constructor.js +78 -0
  508. data/ext/v8/upstream/2.0.6/test/mjsunit/debug-stepin-function-call.js +149 -0
  509. data/ext/v8/upstream/2.0.6/test/mjsunit/debug-stepnext-do-while.js +79 -0
  510. data/ext/v8/upstream/2.0.6/test/mjsunit/debug-stepout-recursive-function.js +106 -0
  511. data/ext/v8/upstream/2.0.6/test/mjsunit/debug-stepout-to-builtin.js +84 -0
  512. data/ext/v8/upstream/2.0.6/test/mjsunit/debug-suspend.js +96 -0
  513. data/ext/v8/upstream/2.0.6/test/mjsunit/debug-version.js +90 -0
  514. data/ext/v8/upstream/2.0.6/test/mjsunit/declare-locally.js +43 -0
  515. data/ext/v8/upstream/2.0.6/test/mjsunit/deep-recursion.js +66 -0
  516. data/ext/v8/upstream/2.0.6/test/mjsunit/delay-syntax-error.js +41 -0
  517. data/ext/v8/upstream/2.0.6/test/mjsunit/delete-global-properties.js +37 -0
  518. data/ext/v8/upstream/2.0.6/test/mjsunit/delete-in-eval.js +32 -0
  519. data/ext/v8/upstream/2.0.6/test/mjsunit/delete-in-with.js +34 -0
  520. data/ext/v8/upstream/2.0.6/test/mjsunit/delete-vars-from-eval.js +40 -0
  521. data/ext/v8/upstream/2.0.6/test/mjsunit/delete.js +163 -0
  522. data/ext/v8/upstream/2.0.6/test/mjsunit/div-mod.js +157 -0
  523. data/ext/v8/upstream/2.0.6/test/mjsunit/do-not-strip-fc.js +31 -0
  524. data/ext/v8/upstream/2.0.6/test/mjsunit/dont-enum-array-holes.js +35 -0
  525. data/ext/v8/upstream/2.0.6/test/mjsunit/dont-reinit-global-var.js +47 -0
  526. data/ext/v8/upstream/2.0.6/test/mjsunit/double-equals.js +114 -0
  527. data/ext/v8/upstream/2.0.6/test/mjsunit/dtoa.js +32 -0
  528. data/ext/v8/upstream/2.0.6/test/mjsunit/enumeration-order.js +109 -0
  529. data/ext/v8/upstream/2.0.6/test/mjsunit/error-constructors.js +32 -0
  530. data/ext/v8/upstream/2.0.6/test/mjsunit/escape.js +118 -0
  531. data/ext/v8/upstream/2.0.6/test/mjsunit/eval-enclosing-function-name.js +76 -0
  532. data/ext/v8/upstream/2.0.6/test/mjsunit/eval-typeof-non-existing.js +35 -0
  533. data/ext/v8/upstream/2.0.6/test/mjsunit/eval.js +157 -0
  534. data/ext/v8/upstream/2.0.6/test/mjsunit/execScript-case-insensitive.js +34 -0
  535. data/ext/v8/upstream/2.0.6/test/mjsunit/extra-arguments.js +54 -0
  536. data/ext/v8/upstream/2.0.6/test/mjsunit/extra-commas.js +46 -0
  537. data/ext/v8/upstream/2.0.6/test/mjsunit/for-in-null-or-undefined.js +33 -0
  538. data/ext/v8/upstream/2.0.6/test/mjsunit/for-in-special-cases.js +64 -0
  539. data/ext/v8/upstream/2.0.6/test/mjsunit/for-in.js +86 -0
  540. data/ext/v8/upstream/2.0.6/test/mjsunit/fun-as-prototype.js +36 -0
  541. data/ext/v8/upstream/2.0.6/test/mjsunit/fun-name.js +34 -0
  542. data/ext/v8/upstream/2.0.6/test/mjsunit/function-arguments-null.js +30 -0
  543. data/ext/v8/upstream/2.0.6/test/mjsunit/function-caller.js +48 -0
  544. data/ext/v8/upstream/2.0.6/test/mjsunit/function-names.js +133 -0
  545. data/ext/v8/upstream/2.0.6/test/mjsunit/function-property.js +29 -0
  546. data/ext/v8/upstream/2.0.6/test/mjsunit/function-prototype.js +98 -0
  547. data/ext/v8/upstream/2.0.6/test/mjsunit/function-source.js +49 -0
  548. data/ext/v8/upstream/2.0.6/test/mjsunit/function.js +83 -0
  549. data/ext/v8/upstream/2.0.6/test/mjsunit/fuzz-accessors.js +85 -0
  550. data/ext/v8/upstream/2.0.6/test/mjsunit/fuzz-natives.js +159 -0
  551. data/ext/v8/upstream/2.0.6/test/mjsunit/get-own-property-descriptor.js +51 -0
  552. data/ext/v8/upstream/2.0.6/test/mjsunit/get-prototype-of.js +68 -0
  553. data/ext/v8/upstream/2.0.6/test/mjsunit/getter-in-prototype.js +50 -0
  554. data/ext/v8/upstream/2.0.6/test/mjsunit/getter-in-value-prototype.js +35 -0
  555. data/ext/v8/upstream/2.0.6/test/mjsunit/global-const-var-conflicts.js +57 -0
  556. data/ext/v8/upstream/2.0.6/test/mjsunit/global-deleted-property-ic.js +45 -0
  557. data/ext/v8/upstream/2.0.6/test/mjsunit/global-deleted-property-keyed.js +38 -0
  558. data/ext/v8/upstream/2.0.6/test/mjsunit/global-ic.js +48 -0
  559. data/ext/v8/upstream/2.0.6/test/mjsunit/global-load-from-eval-in-with.js +59 -0
  560. data/ext/v8/upstream/2.0.6/test/mjsunit/global-load-from-eval.js +85 -0
  561. data/ext/v8/upstream/2.0.6/test/mjsunit/global-load-from-nested-eval.js +66 -0
  562. data/ext/v8/upstream/2.0.6/test/mjsunit/global-vars-eval.js +34 -0
  563. data/ext/v8/upstream/2.0.6/test/mjsunit/global-vars-with.js +43 -0
  564. data/ext/v8/upstream/2.0.6/test/mjsunit/greedy.js +60 -0
  565. data/ext/v8/upstream/2.0.6/test/mjsunit/has-own-property.js +38 -0
  566. data/ext/v8/upstream/2.0.6/test/mjsunit/html-comments.js +57 -0
  567. data/ext/v8/upstream/2.0.6/test/mjsunit/html-string-funcs.js +47 -0
  568. data/ext/v8/upstream/2.0.6/test/mjsunit/if-in-undefined.js +36 -0
  569. data/ext/v8/upstream/2.0.6/test/mjsunit/in.js +159 -0
  570. data/ext/v8/upstream/2.0.6/test/mjsunit/indexed-accessors.js +120 -0
  571. data/ext/v8/upstream/2.0.6/test/mjsunit/instanceof.js +93 -0
  572. data/ext/v8/upstream/2.0.6/test/mjsunit/integer-to-string.js +35 -0
  573. data/ext/v8/upstream/2.0.6/test/mjsunit/invalid-lhs.js +65 -0
  574. data/ext/v8/upstream/2.0.6/test/mjsunit/invalid-source-element.js +31 -0
  575. data/ext/v8/upstream/2.0.6/test/mjsunit/json.js +207 -0
  576. data/ext/v8/upstream/2.0.6/test/mjsunit/keyed-ic.js +236 -0
  577. data/ext/v8/upstream/2.0.6/test/mjsunit/keyed-storage-extend.js +55 -0
  578. data/ext/v8/upstream/2.0.6/test/mjsunit/large-object-allocation.js +300 -0
  579. data/ext/v8/upstream/2.0.6/test/mjsunit/large-object-literal.js +56 -0
  580. data/ext/v8/upstream/2.0.6/test/mjsunit/lazy-load.js +34 -0
  581. data/ext/v8/upstream/2.0.6/test/mjsunit/leakcheck.js +53 -0
  582. data/ext/v8/upstream/2.0.6/test/mjsunit/length.js +78 -0
  583. data/ext/v8/upstream/2.0.6/test/mjsunit/local-load-from-eval.js +39 -0
  584. data/ext/v8/upstream/2.0.6/test/mjsunit/math-min-max.js +105 -0
  585. data/ext/v8/upstream/2.0.6/test/mjsunit/megamorphic-callbacks.js +70 -0
  586. data/ext/v8/upstream/2.0.6/test/mjsunit/mirror-array.js +138 -0
  587. data/ext/v8/upstream/2.0.6/test/mjsunit/mirror-boolean.js +59 -0
  588. data/ext/v8/upstream/2.0.6/test/mjsunit/mirror-date.js +77 -0
  589. data/ext/v8/upstream/2.0.6/test/mjsunit/mirror-error.js +94 -0
  590. data/ext/v8/upstream/2.0.6/test/mjsunit/mirror-function.js +90 -0
  591. data/ext/v8/upstream/2.0.6/test/mjsunit/mirror-null.js +50 -0
  592. data/ext/v8/upstream/2.0.6/test/mjsunit/mirror-number.js +77 -0
  593. data/ext/v8/upstream/2.0.6/test/mjsunit/mirror-object.js +227 -0
  594. data/ext/v8/upstream/2.0.6/test/mjsunit/mirror-regexp.js +110 -0
  595. data/ext/v8/upstream/2.0.6/test/mjsunit/mirror-script.js +100 -0
  596. data/ext/v8/upstream/2.0.6/test/mjsunit/mirror-string.js +89 -0
  597. data/ext/v8/upstream/2.0.6/test/mjsunit/mirror-undefined.js +50 -0
  598. data/ext/v8/upstream/2.0.6/test/mjsunit/mirror-unresolved-function.js +81 -0
  599. data/ext/v8/upstream/2.0.6/test/mjsunit/mjsunit.js +203 -0
  600. data/ext/v8/upstream/2.0.6/test/mjsunit/mjsunit.status +66 -0
  601. data/ext/v8/upstream/2.0.6/test/mjsunit/mul-exhaustive.js +4511 -0
  602. data/ext/v8/upstream/2.0.6/test/mjsunit/multiple-return.js +62 -0
  603. data/ext/v8/upstream/2.0.6/test/mjsunit/negate-zero.js +42 -0
  604. data/ext/v8/upstream/2.0.6/test/mjsunit/negate.js +59 -0
  605. data/ext/v8/upstream/2.0.6/test/mjsunit/new.js +56 -0
  606. data/ext/v8/upstream/2.0.6/test/mjsunit/newline-in-string.js +46 -0
  607. data/ext/v8/upstream/2.0.6/test/mjsunit/no-branch-elimination.js +36 -0
  608. data/ext/v8/upstream/2.0.6/test/mjsunit/no-octal-constants-above-256.js +32 -0
  609. data/ext/v8/upstream/2.0.6/test/mjsunit/no-semicolon.js +45 -0
  610. data/ext/v8/upstream/2.0.6/test/mjsunit/non-ascii-replace.js +30 -0
  611. data/ext/v8/upstream/2.0.6/test/mjsunit/nul-characters.js +38 -0
  612. data/ext/v8/upstream/2.0.6/test/mjsunit/number-limits.js +47 -0
  613. data/ext/v8/upstream/2.0.6/test/mjsunit/number-string-index-call.js +32 -0
  614. data/ext/v8/upstream/2.0.6/test/mjsunit/number-tostring-small.js +395 -0
  615. data/ext/v8/upstream/2.0.6/test/mjsunit/number-tostring.js +338 -0
  616. data/ext/v8/upstream/2.0.6/test/mjsunit/obj-construct.js +46 -0
  617. data/ext/v8/upstream/2.0.6/test/mjsunit/object-create.js +250 -0
  618. data/ext/v8/upstream/2.0.6/test/mjsunit/object-literal-gc.js +66 -0
  619. data/ext/v8/upstream/2.0.6/test/mjsunit/object-literal.js +105 -0
  620. data/ext/v8/upstream/2.0.6/test/mjsunit/override-read-only-property.js +64 -0
  621. data/ext/v8/upstream/2.0.6/test/mjsunit/parse-int-float.js +85 -0
  622. data/ext/v8/upstream/2.0.6/test/mjsunit/property-load-across-eval.js +85 -0
  623. data/ext/v8/upstream/2.0.6/test/mjsunit/property-object-key.js +36 -0
  624. data/ext/v8/upstream/2.0.6/test/mjsunit/proto.js +33 -0
  625. data/ext/v8/upstream/2.0.6/test/mjsunit/prototype.js +93 -0
  626. data/ext/v8/upstream/2.0.6/test/mjsunit/receiver-in-with-calls.js +47 -0
  627. data/ext/v8/upstream/2.0.6/test/mjsunit/regexp-UC16.js +47 -0
  628. data/ext/v8/upstream/2.0.6/test/mjsunit/regexp-call-as-function.js +36 -0
  629. data/ext/v8/upstream/2.0.6/test/mjsunit/regexp-capture.js +57 -0
  630. data/ext/v8/upstream/2.0.6/test/mjsunit/regexp-captures.js +31 -0
  631. data/ext/v8/upstream/2.0.6/test/mjsunit/regexp-indexof.js +77 -0
  632. data/ext/v8/upstream/2.0.6/test/mjsunit/regexp-lookahead.js +166 -0
  633. data/ext/v8/upstream/2.0.6/test/mjsunit/regexp-loop-capture.js +29 -0
  634. data/ext/v8/upstream/2.0.6/test/mjsunit/regexp-multiline-stack-trace.js +116 -0
  635. data/ext/v8/upstream/2.0.6/test/mjsunit/regexp-multiline.js +112 -0
  636. data/ext/v8/upstream/2.0.6/test/mjsunit/regexp-standalones.js +78 -0
  637. data/ext/v8/upstream/2.0.6/test/mjsunit/regexp-static.js +167 -0
  638. data/ext/v8/upstream/2.0.6/test/mjsunit/regexp-string-methods.js +51 -0
  639. data/ext/v8/upstream/2.0.6/test/mjsunit/regexp.js +390 -0
  640. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-1030466.js +45 -0
  641. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-1036894.js +38 -0
  642. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-1039610.js +29 -0
  643. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-1050043.js +51 -0
  644. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-1062422.js +30 -0
  645. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-1066899.js +37 -0
  646. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-1081309.js +110 -0
  647. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-1102760.js +35 -0
  648. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-1110164.js +46 -0
  649. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-1112051.js +33 -0
  650. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-1114040.js +58 -0
  651. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-1134697.js +31 -0
  652. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-114.js +43 -0
  653. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-116.js +40 -0
  654. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-1170187.js +80 -0
  655. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-1173979.js +48 -0
  656. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-1175390.js +30 -0
  657. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-1177518.js +39 -0
  658. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-1177809.js +31 -0
  659. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-1178598.js +90 -0
  660. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-1182832.js +38 -0
  661. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-1187524.js +34 -0
  662. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-1199401.js +75 -0
  663. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-1199637.js +78 -0
  664. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-1200351.js +2032 -0
  665. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-1201933.js +40 -0
  666. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-1203459.js +29 -0
  667. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-1207276.js +36 -0
  668. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-1213516.js +40 -0
  669. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-1213575.js +41 -0
  670. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-1215653.js +365 -0
  671. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-124.js +57 -0
  672. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-1254366.js +38 -0
  673. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-1327557.js +36 -0
  674. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-1341167.js +33 -0
  675. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-1346700.js +29 -0
  676. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-137.js +46 -0
  677. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-1439135.js +40 -0
  678. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-149.js +28 -0
  679. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-1493017.js +52 -0
  680. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-155924.js +46 -0
  681. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-171.js +41 -0
  682. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-176.js +50 -0
  683. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-186.js +72 -0
  684. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-187.js +30 -0
  685. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-189.js +36 -0
  686. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-191.js +42 -0
  687. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-1919169.js +40 -0
  688. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-192.js +38 -0
  689. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-193.js +44 -0
  690. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-20070207.js +42 -0
  691. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-201.js +37 -0
  692. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-219.js +176 -0
  693. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-220.js +31 -0
  694. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-221.js +34 -0
  695. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-2249423.js +40 -0
  696. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-225.js +32 -0
  697. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-227.js +33 -0
  698. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-231.js +92 -0
  699. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-233.js +39 -0
  700. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-244.js +67 -0
  701. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-246.js +31 -0
  702. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-253.js +31 -0
  703. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-254.js +58 -0
  704. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-259.js +33 -0
  705. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-260.js +33 -0
  706. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-263.js +38 -0
  707. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-265.js +64 -0
  708. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-267.js +35 -0
  709. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-269.js +49 -0
  710. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-279.js +62 -0
  711. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-284.js +50 -0
  712. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-286.js +36 -0
  713. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-294.js +43 -0
  714. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-312.js +31 -0
  715. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-317.js +31 -0
  716. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-318.js +35 -0
  717. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-326.js +40 -0
  718. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-334.js +90 -0
  719. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-341.js +36 -0
  720. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-345.js +51 -0
  721. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-349.js +32 -0
  722. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-35.js +33 -0
  723. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-351.js +31 -0
  724. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-386.js +47 -0
  725. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-392.js +34 -0
  726. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-394.js +47 -0
  727. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-396.js +39 -0
  728. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-397.js +34 -0
  729. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-399.js +32 -0
  730. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-406.js +69 -0
  731. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-416.js +38 -0
  732. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-475.js +28 -0
  733. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-483.js +35 -0
  734. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-485.js +64 -0
  735. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-486.js +30 -0
  736. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-490.js +48 -0
  737. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-491.js +47 -0
  738. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-492.js +52 -0
  739. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-496.js +39 -0
  740. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-502.js +38 -0
  741. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-503.js +63 -0
  742. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-515.js +40 -0
  743. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-524.js +32 -0
  744. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-526.js +32 -0
  745. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-540.js +47 -0
  746. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-545.js +47 -0
  747. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-57.js +32 -0
  748. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-580.js +55 -0
  749. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-588599.js +31 -0
  750. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-6-9-regexp.js +30 -0
  751. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-662254.js +40 -0
  752. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-666721.js +53 -0
  753. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-667061.js +90 -0
  754. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-670147.js +34 -0
  755. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-674753.js +87 -0
  756. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-676025.js +31 -0
  757. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-678525.js +59 -0
  758. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-682649.js +30 -0
  759. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-69.js +43 -0
  760. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-734862.js +37 -0
  761. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-737588.js +34 -0
  762. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-74.js +41 -0
  763. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-780423.js +39 -0
  764. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-799761.js +92 -0
  765. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-806473.js +60 -0
  766. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-842017.js +60 -0
  767. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-86.js +46 -0
  768. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-87.js +58 -0
  769. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-874178.js +32 -0
  770. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-875031.js +37 -0
  771. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-877615.js +37 -0
  772. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-892742.js +50 -0
  773. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-900055.js +42 -0
  774. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-900966.js +38 -0
  775. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-91.js +38 -0
  776. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-925537.js +42 -0
  777. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-937896.js +50 -0
  778. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-990205.js +35 -0
  779. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-992733.js +35 -0
  780. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-996542.js +40 -0
  781. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-998565.js +51 -0
  782. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-crbug-18639.js +34 -0
  783. data/ext/v8/upstream/2.0.6/test/mjsunit/regress/regress-r3391.js +77 -0
  784. data/ext/v8/upstream/2.0.6/test/mjsunit/scanner.js +30 -0
  785. data/ext/v8/upstream/2.0.6/test/mjsunit/short-circuit-boolean.js +46 -0
  786. data/ext/v8/upstream/2.0.6/test/mjsunit/simple-constructor.js +140 -0
  787. data/ext/v8/upstream/2.0.6/test/mjsunit/sin-cos.js +45 -0
  788. data/ext/v8/upstream/2.0.6/test/mjsunit/smi-negative-zero.js +100 -0
  789. data/ext/v8/upstream/2.0.6/test/mjsunit/smi-ops.js +671 -0
  790. data/ext/v8/upstream/2.0.6/test/mjsunit/sparse-array-reverse.js +131 -0
  791. data/ext/v8/upstream/2.0.6/test/mjsunit/sparse-array.js +41 -0
  792. data/ext/v8/upstream/2.0.6/test/mjsunit/stack-traces.js +204 -0
  793. data/ext/v8/upstream/2.0.6/test/mjsunit/str-to-num.js +158 -0
  794. data/ext/v8/upstream/2.0.6/test/mjsunit/stress-array-push.js +34 -0
  795. data/ext/v8/upstream/2.0.6/test/mjsunit/strict-equals.js +90 -0
  796. data/ext/v8/upstream/2.0.6/test/mjsunit/string-add.js +195 -0
  797. data/ext/v8/upstream/2.0.6/test/mjsunit/string-case.js +28 -0
  798. data/ext/v8/upstream/2.0.6/test/mjsunit/string-charat.js +53 -0
  799. data/ext/v8/upstream/2.0.6/test/mjsunit/string-charcodeat.js +192 -0
  800. data/ext/v8/upstream/2.0.6/test/mjsunit/string-compare-alignment.js +47 -0
  801. data/ext/v8/upstream/2.0.6/test/mjsunit/string-flatten.js +37 -0
  802. data/ext/v8/upstream/2.0.6/test/mjsunit/string-index.js +154 -0
  803. data/ext/v8/upstream/2.0.6/test/mjsunit/string-indexof-1.js +99 -0
  804. data/ext/v8/upstream/2.0.6/test/mjsunit/string-indexof-2.js +68 -0
  805. data/ext/v8/upstream/2.0.6/test/mjsunit/string-lastindexof.js +88 -0
  806. data/ext/v8/upstream/2.0.6/test/mjsunit/string-localecompare.js +40 -0
  807. data/ext/v8/upstream/2.0.6/test/mjsunit/string-match.js +149 -0
  808. data/ext/v8/upstream/2.0.6/test/mjsunit/string-replace-gc.js +57 -0
  809. data/ext/v8/upstream/2.0.6/test/mjsunit/string-replace.js +182 -0
  810. data/ext/v8/upstream/2.0.6/test/mjsunit/string-search.js +30 -0
  811. data/ext/v8/upstream/2.0.6/test/mjsunit/string-split.js +126 -0
  812. data/ext/v8/upstream/2.0.6/test/mjsunit/substr.js +65 -0
  813. data/ext/v8/upstream/2.0.6/test/mjsunit/switch.js +289 -0
  814. data/ext/v8/upstream/2.0.6/test/mjsunit/testcfg.py +137 -0
  815. data/ext/v8/upstream/2.0.6/test/mjsunit/third_party/array-isarray.js +48 -0
  816. data/ext/v8/upstream/2.0.6/test/mjsunit/third_party/array-splice-webkit.js +62 -0
  817. data/ext/v8/upstream/2.0.6/test/mjsunit/third_party/object-keys.js +68 -0
  818. data/ext/v8/upstream/2.0.6/test/mjsunit/third_party/regexp-pcre.js +6603 -0
  819. data/ext/v8/upstream/2.0.6/test/mjsunit/third_party/string-trim.js +107 -0
  820. data/ext/v8/upstream/2.0.6/test/mjsunit/this-in-callbacks.js +47 -0
  821. data/ext/v8/upstream/2.0.6/test/mjsunit/this.js +46 -0
  822. data/ext/v8/upstream/2.0.6/test/mjsunit/throw-and-catch-function.js +50 -0
  823. data/ext/v8/upstream/2.0.6/test/mjsunit/throw-exception-for-null-access.js +37 -0
  824. data/ext/v8/upstream/2.0.6/test/mjsunit/to-precision.js +82 -0
  825. data/ext/v8/upstream/2.0.6/test/mjsunit/to_number_order.js +129 -0
  826. data/ext/v8/upstream/2.0.6/test/mjsunit/tobool.js +36 -0
  827. data/ext/v8/upstream/2.0.6/test/mjsunit/toint32.js +129 -0
  828. data/ext/v8/upstream/2.0.6/test/mjsunit/tools/codemap.js +180 -0
  829. data/ext/v8/upstream/2.0.6/test/mjsunit/tools/consarray.js +60 -0
  830. data/ext/v8/upstream/2.0.6/test/mjsunit/tools/csvparser.js +79 -0
  831. data/ext/v8/upstream/2.0.6/test/mjsunit/tools/logreader.js +98 -0
  832. data/ext/v8/upstream/2.0.6/test/mjsunit/tools/profile.js +348 -0
  833. data/ext/v8/upstream/2.0.6/test/mjsunit/tools/profile_view.js +95 -0
  834. data/ext/v8/upstream/2.0.6/test/mjsunit/tools/splaytree.js +166 -0
  835. data/ext/v8/upstream/2.0.6/test/mjsunit/tools/tickprocessor-test.default +55 -0
  836. data/ext/v8/upstream/2.0.6/test/mjsunit/tools/tickprocessor-test.gc-state +21 -0
  837. data/ext/v8/upstream/2.0.6/test/mjsunit/tools/tickprocessor-test.ignore-unknown +51 -0
  838. data/ext/v8/upstream/2.0.6/test/mjsunit/tools/tickprocessor-test.separate-ic +61 -0
  839. data/ext/v8/upstream/2.0.6/test/mjsunit/tools/tickprocessor.js +409 -0
  840. data/ext/v8/upstream/2.0.6/test/mjsunit/top-level-assignments.js +107 -0
  841. data/ext/v8/upstream/2.0.6/test/mjsunit/touint32.js +72 -0
  842. data/ext/v8/upstream/2.0.6/test/mjsunit/transcendentals.js +49 -0
  843. data/ext/v8/upstream/2.0.6/test/mjsunit/try-catch-extension-object.js +58 -0
  844. data/ext/v8/upstream/2.0.6/test/mjsunit/try-catch-scopes.js +42 -0
  845. data/ext/v8/upstream/2.0.6/test/mjsunit/try-finally-nested.js +46 -0
  846. data/ext/v8/upstream/2.0.6/test/mjsunit/try.js +394 -0
  847. data/ext/v8/upstream/2.0.6/test/mjsunit/typeof.js +40 -0
  848. data/ext/v8/upstream/2.0.6/test/mjsunit/undeletable-functions.js +181 -0
  849. data/ext/v8/upstream/2.0.6/test/mjsunit/unicode-case-overoptimization.js +35 -0
  850. data/ext/v8/upstream/2.0.6/test/mjsunit/unicode-string-to-number.js +46 -0
  851. data/ext/v8/upstream/2.0.6/test/mjsunit/unicode-test.js +9169 -0
  852. data/ext/v8/upstream/2.0.6/test/mjsunit/unusual-constructor.js +38 -0
  853. data/ext/v8/upstream/2.0.6/test/mjsunit/uri.js +78 -0
  854. data/ext/v8/upstream/2.0.6/test/mjsunit/value-callic-prototype-change.js +94 -0
  855. data/ext/v8/upstream/2.0.6/test/mjsunit/var.js +37 -0
  856. data/ext/v8/upstream/2.0.6/test/mjsunit/with-function-expression.js +36 -0
  857. data/ext/v8/upstream/2.0.6/test/mjsunit/with-leave.js +61 -0
  858. data/ext/v8/upstream/2.0.6/test/mjsunit/with-parameter-access.js +47 -0
  859. data/ext/v8/upstream/2.0.6/test/mjsunit/with-prototype.js +44 -0
  860. data/ext/v8/upstream/2.0.6/test/mjsunit/with-value.js +38 -0
  861. data/ext/v8/upstream/2.0.6/test/mozilla/mozilla-shell-emulation.js +37 -0
  862. data/ext/v8/upstream/2.0.6/test/mozilla/mozilla.status +815 -0
  863. data/ext/v8/upstream/2.0.6/test/mozilla/testcfg.py +138 -0
  864. data/ext/v8/upstream/2.0.6/test/sputnik/README +6 -0
  865. data/ext/v8/upstream/2.0.6/test/sputnik/sputnik.status +318 -0
  866. data/ext/v8/upstream/2.0.6/test/sputnik/testcfg.py +112 -0
  867. data/ext/v8/upstream/2.0.6/tools/codemap.js +258 -0
  868. data/ext/v8/upstream/2.0.6/tools/consarray.js +93 -0
  869. data/ext/v8/upstream/2.0.6/tools/csvparser.js +98 -0
  870. data/ext/v8/upstream/2.0.6/tools/gyp/v8.gyp +620 -0
  871. data/ext/v8/upstream/2.0.6/tools/js2c.py +376 -0
  872. data/ext/v8/upstream/2.0.6/tools/js2c.pyc +0 -0
  873. data/ext/v8/upstream/2.0.6/tools/jsmin.py +280 -0
  874. data/ext/v8/upstream/2.0.6/tools/jsmin.pyc +0 -0
  875. data/ext/v8/upstream/2.0.6/tools/linux-tick-processor +24 -0
  876. data/ext/v8/upstream/2.0.6/tools/linux-tick-processor.py +78 -0
  877. data/ext/v8/upstream/2.0.6/tools/logreader.js +320 -0
  878. data/ext/v8/upstream/2.0.6/tools/mac-nm +18 -0
  879. data/ext/v8/upstream/2.0.6/tools/mac-tick-processor +6 -0
  880. data/ext/v8/upstream/2.0.6/tools/oprofile/annotate +7 -0
  881. data/ext/v8/upstream/2.0.6/tools/oprofile/common +19 -0
  882. data/ext/v8/upstream/2.0.6/tools/oprofile/dump +7 -0
  883. data/ext/v8/upstream/2.0.6/tools/oprofile/report +7 -0
  884. data/ext/v8/upstream/2.0.6/tools/oprofile/reset +7 -0
  885. data/ext/v8/upstream/2.0.6/tools/oprofile/run +14 -0
  886. data/ext/v8/upstream/2.0.6/tools/oprofile/shutdown +7 -0
  887. data/ext/v8/upstream/2.0.6/tools/oprofile/start +7 -0
  888. data/ext/v8/upstream/2.0.6/tools/presubmit.py +299 -0
  889. data/ext/v8/upstream/2.0.6/tools/process-heap-prof.py +120 -0
  890. data/ext/v8/upstream/2.0.6/tools/profile.js +621 -0
  891. data/ext/v8/upstream/2.0.6/tools/profile_view.js +224 -0
  892. data/ext/v8/upstream/2.0.6/tools/run-valgrind.py +77 -0
  893. data/ext/v8/upstream/2.0.6/tools/splaytree.js +322 -0
  894. data/ext/v8/upstream/2.0.6/tools/splaytree.py +226 -0
  895. data/ext/v8/upstream/2.0.6/tools/stats-viewer.py +456 -0
  896. data/ext/v8/upstream/2.0.6/tools/test.py +1370 -0
  897. data/ext/v8/upstream/2.0.6/tools/tickprocessor-driver.js +53 -0
  898. data/ext/v8/upstream/2.0.6/tools/tickprocessor.js +731 -0
  899. data/ext/v8/upstream/2.0.6/tools/tickprocessor.py +535 -0
  900. data/ext/v8/upstream/2.0.6/tools/utils.py +82 -0
  901. data/ext/v8/upstream/2.0.6/tools/utils.pyc +0 -0
  902. data/ext/v8/upstream/2.0.6/tools/visual_studio/README.txt +71 -0
  903. data/ext/v8/upstream/2.0.6/tools/visual_studio/arm.vsprops +14 -0
  904. data/ext/v8/upstream/2.0.6/tools/visual_studio/common.vsprops +35 -0
  905. data/ext/v8/upstream/2.0.6/tools/visual_studio/d8.vcproj +199 -0
  906. data/ext/v8/upstream/2.0.6/tools/visual_studio/d8_arm.vcproj +199 -0
  907. data/ext/v8/upstream/2.0.6/tools/visual_studio/d8_x64.vcproj +201 -0
  908. data/ext/v8/upstream/2.0.6/tools/visual_studio/d8js2c.cmd +6 -0
  909. data/ext/v8/upstream/2.0.6/tools/visual_studio/debug.vsprops +17 -0
  910. data/ext/v8/upstream/2.0.6/tools/visual_studio/ia32.vsprops +13 -0
  911. data/ext/v8/upstream/2.0.6/tools/visual_studio/js2c.cmd +6 -0
  912. data/ext/v8/upstream/2.0.6/tools/visual_studio/release.vsprops +24 -0
  913. data/ext/v8/upstream/2.0.6/tools/visual_studio/v8.sln +101 -0
  914. data/ext/v8/upstream/2.0.6/tools/visual_studio/v8.vcproj +223 -0
  915. data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_arm.sln +74 -0
  916. data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_arm.vcproj +223 -0
  917. data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_base.vcproj +971 -0
  918. data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_base_arm.vcproj +983 -0
  919. data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_base_x64.vcproj +959 -0
  920. data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_cctest.vcproj +255 -0
  921. data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_cctest_arm.vcproj +243 -0
  922. data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_cctest_x64.vcproj +257 -0
  923. data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_mksnapshot.vcproj +151 -0
  924. data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_mksnapshot_x64.vcproj +151 -0
  925. data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_process_sample.vcproj +151 -0
  926. data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_process_sample_arm.vcproj +151 -0
  927. data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_process_sample_x64.vcproj +151 -0
  928. data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_shell_sample.vcproj +151 -0
  929. data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_shell_sample_arm.vcproj +151 -0
  930. data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_shell_sample_x64.vcproj +153 -0
  931. data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_snapshot.vcproj +142 -0
  932. data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_snapshot_cc.vcproj +92 -0
  933. data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_snapshot_cc_x64.vcproj +92 -0
  934. data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_snapshot_x64.vcproj +142 -0
  935. data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_x64.sln +101 -0
  936. data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_x64.vcproj +223 -0
  937. data/ext/v8/upstream/2.0.6/tools/visual_studio/x64.vsprops +13 -0
  938. data/ext/v8/upstream/2.0.6/tools/windows-tick-processor.bat +5 -0
  939. data/ext/v8/upstream/2.0.6/tools/windows-tick-processor.py +137 -0
  940. data/ext/v8/upstream/scons/CHANGES.txt +5183 -0
  941. data/ext/v8/upstream/scons/LICENSE.txt +20 -0
  942. data/ext/v8/upstream/scons/MANIFEST +202 -0
  943. data/ext/v8/upstream/scons/PKG-INFO +13 -0
  944. data/ext/v8/upstream/scons/README.txt +273 -0
  945. data/ext/v8/upstream/scons/RELEASE.txt +1040 -0
  946. data/ext/v8/upstream/scons/engine/SCons/Action.py +1256 -0
  947. data/ext/v8/upstream/scons/engine/SCons/Builder.py +868 -0
  948. data/ext/v8/upstream/scons/engine/SCons/CacheDir.py +217 -0
  949. data/ext/v8/upstream/scons/engine/SCons/Conftest.py +794 -0
  950. data/ext/v8/upstream/scons/engine/SCons/Debug.py +237 -0
  951. data/ext/v8/upstream/scons/engine/SCons/Defaults.py +485 -0
  952. data/ext/v8/upstream/scons/engine/SCons/Environment.py +2327 -0
  953. data/ext/v8/upstream/scons/engine/SCons/Errors.py +207 -0
  954. data/ext/v8/upstream/scons/engine/SCons/Executor.py +636 -0
  955. data/ext/v8/upstream/scons/engine/SCons/Job.py +435 -0
  956. data/ext/v8/upstream/scons/engine/SCons/Memoize.py +292 -0
  957. data/ext/v8/upstream/scons/engine/SCons/Node/Alias.py +153 -0
  958. data/ext/v8/upstream/scons/engine/SCons/Node/FS.py +3220 -0
  959. data/ext/v8/upstream/scons/engine/SCons/Node/Python.py +128 -0
  960. data/ext/v8/upstream/scons/engine/SCons/Node/__init__.py +1341 -0
  961. data/ext/v8/upstream/scons/engine/SCons/Options/BoolOption.py +50 -0
  962. data/ext/v8/upstream/scons/engine/SCons/Options/EnumOption.py +50 -0
  963. data/ext/v8/upstream/scons/engine/SCons/Options/ListOption.py +50 -0
  964. data/ext/v8/upstream/scons/engine/SCons/Options/PackageOption.py +50 -0
  965. data/ext/v8/upstream/scons/engine/SCons/Options/PathOption.py +76 -0
  966. data/ext/v8/upstream/scons/engine/SCons/Options/__init__.py +74 -0
  967. data/ext/v8/upstream/scons/engine/SCons/PathList.py +232 -0
  968. data/ext/v8/upstream/scons/engine/SCons/Platform/__init__.py +236 -0
  969. data/ext/v8/upstream/scons/engine/SCons/Platform/aix.py +70 -0
  970. data/ext/v8/upstream/scons/engine/SCons/Platform/cygwin.py +55 -0
  971. data/ext/v8/upstream/scons/engine/SCons/Platform/darwin.py +46 -0
  972. data/ext/v8/upstream/scons/engine/SCons/Platform/hpux.py +46 -0
  973. data/ext/v8/upstream/scons/engine/SCons/Platform/irix.py +44 -0
  974. data/ext/v8/upstream/scons/engine/SCons/Platform/os2.py +58 -0
  975. data/ext/v8/upstream/scons/engine/SCons/Platform/posix.py +264 -0
  976. data/ext/v8/upstream/scons/engine/SCons/Platform/sunos.py +50 -0
  977. data/ext/v8/upstream/scons/engine/SCons/Platform/win32.py +386 -0
  978. data/ext/v8/upstream/scons/engine/SCons/SConf.py +1038 -0
  979. data/ext/v8/upstream/scons/engine/SCons/SConsign.py +381 -0
  980. data/ext/v8/upstream/scons/engine/SCons/Scanner/C.py +132 -0
  981. data/ext/v8/upstream/scons/engine/SCons/Scanner/D.py +74 -0
  982. data/ext/v8/upstream/scons/engine/SCons/Scanner/Dir.py +111 -0
  983. data/ext/v8/upstream/scons/engine/SCons/Scanner/Fortran.py +320 -0
  984. data/ext/v8/upstream/scons/engine/SCons/Scanner/IDL.py +48 -0
  985. data/ext/v8/upstream/scons/engine/SCons/Scanner/LaTeX.py +378 -0
  986. data/ext/v8/upstream/scons/engine/SCons/Scanner/Prog.py +103 -0
  987. data/ext/v8/upstream/scons/engine/SCons/Scanner/RC.py +55 -0
  988. data/ext/v8/upstream/scons/engine/SCons/Scanner/__init__.py +415 -0
  989. data/ext/v8/upstream/scons/engine/SCons/Script/Interactive.py +386 -0
  990. data/ext/v8/upstream/scons/engine/SCons/Script/Main.py +1360 -0
  991. data/ext/v8/upstream/scons/engine/SCons/Script/SConsOptions.py +944 -0
  992. data/ext/v8/upstream/scons/engine/SCons/Script/SConscript.py +642 -0
  993. data/ext/v8/upstream/scons/engine/SCons/Script/__init__.py +414 -0
  994. data/ext/v8/upstream/scons/engine/SCons/Sig.py +63 -0
  995. data/ext/v8/upstream/scons/engine/SCons/Subst.py +911 -0
  996. data/ext/v8/upstream/scons/engine/SCons/Taskmaster.py +1030 -0
  997. data/ext/v8/upstream/scons/engine/SCons/Tool/386asm.py +61 -0
  998. data/ext/v8/upstream/scons/engine/SCons/Tool/BitKeeper.py +65 -0
  999. data/ext/v8/upstream/scons/engine/SCons/Tool/CVS.py +73 -0
  1000. data/ext/v8/upstream/scons/engine/SCons/Tool/FortranCommon.py +247 -0
  1001. data/ext/v8/upstream/scons/engine/SCons/Tool/JavaCommon.py +324 -0
  1002. data/ext/v8/upstream/scons/engine/SCons/Tool/MSCommon/__init__.py +56 -0
  1003. data/ext/v8/upstream/scons/engine/SCons/Tool/MSCommon/arch.py +61 -0
  1004. data/ext/v8/upstream/scons/engine/SCons/Tool/MSCommon/common.py +210 -0
  1005. data/ext/v8/upstream/scons/engine/SCons/Tool/MSCommon/netframework.py +84 -0
  1006. data/ext/v8/upstream/scons/engine/SCons/Tool/MSCommon/sdk.py +321 -0
  1007. data/ext/v8/upstream/scons/engine/SCons/Tool/MSCommon/vc.py +367 -0
  1008. data/ext/v8/upstream/scons/engine/SCons/Tool/MSCommon/vs.py +497 -0
  1009. data/ext/v8/upstream/scons/engine/SCons/Tool/Perforce.py +104 -0
  1010. data/ext/v8/upstream/scons/engine/SCons/Tool/PharLapCommon.py +138 -0
  1011. data/ext/v8/upstream/scons/engine/SCons/Tool/RCS.py +64 -0
  1012. data/ext/v8/upstream/scons/engine/SCons/Tool/SCCS.py +64 -0
  1013. data/ext/v8/upstream/scons/engine/SCons/Tool/Subversion.py +71 -0
  1014. data/ext/v8/upstream/scons/engine/SCons/Tool/__init__.py +675 -0
  1015. data/ext/v8/upstream/scons/engine/SCons/Tool/aixc++.py +82 -0
  1016. data/ext/v8/upstream/scons/engine/SCons/Tool/aixcc.py +74 -0
  1017. data/ext/v8/upstream/scons/engine/SCons/Tool/aixf77.py +80 -0
  1018. data/ext/v8/upstream/scons/engine/SCons/Tool/aixlink.py +76 -0
  1019. data/ext/v8/upstream/scons/engine/SCons/Tool/applelink.py +71 -0
  1020. data/ext/v8/upstream/scons/engine/SCons/Tool/ar.py +63 -0
  1021. data/ext/v8/upstream/scons/engine/SCons/Tool/as.py +78 -0
  1022. data/ext/v8/upstream/scons/engine/SCons/Tool/bcc32.py +82 -0
  1023. data/ext/v8/upstream/scons/engine/SCons/Tool/c++.py +99 -0
  1024. data/ext/v8/upstream/scons/engine/SCons/Tool/cc.py +114 -0
  1025. data/ext/v8/upstream/scons/engine/SCons/Tool/cvf.py +58 -0
  1026. data/ext/v8/upstream/scons/engine/SCons/Tool/default.py +50 -0
  1027. data/ext/v8/upstream/scons/engine/SCons/Tool/dmd.py +224 -0
  1028. data/ext/v8/upstream/scons/engine/SCons/Tool/dvi.py +64 -0
  1029. data/ext/v8/upstream/scons/engine/SCons/Tool/dvipdf.py +125 -0
  1030. data/ext/v8/upstream/scons/engine/SCons/Tool/dvips.py +94 -0
  1031. data/ext/v8/upstream/scons/engine/SCons/Tool/f77.py +62 -0
  1032. data/ext/v8/upstream/scons/engine/SCons/Tool/f90.py +62 -0
  1033. data/ext/v8/upstream/scons/engine/SCons/Tool/f95.py +63 -0
  1034. data/ext/v8/upstream/scons/engine/SCons/Tool/filesystem.py +98 -0
  1035. data/ext/v8/upstream/scons/engine/SCons/Tool/fortran.py +63 -0
  1036. data/ext/v8/upstream/scons/engine/SCons/Tool/g++.py +90 -0
  1037. data/ext/v8/upstream/scons/engine/SCons/Tool/g77.py +73 -0
  1038. data/ext/v8/upstream/scons/engine/SCons/Tool/gas.py +53 -0
  1039. data/ext/v8/upstream/scons/engine/SCons/Tool/gcc.py +80 -0
  1040. data/ext/v8/upstream/scons/engine/SCons/Tool/gfortran.py +64 -0
  1041. data/ext/v8/upstream/scons/engine/SCons/Tool/gnulink.py +63 -0
  1042. data/ext/v8/upstream/scons/engine/SCons/Tool/gs.py +81 -0
  1043. data/ext/v8/upstream/scons/engine/SCons/Tool/hpc++.py +85 -0
  1044. data/ext/v8/upstream/scons/engine/SCons/Tool/hpcc.py +53 -0
  1045. data/ext/v8/upstream/scons/engine/SCons/Tool/hplink.py +77 -0
  1046. data/ext/v8/upstream/scons/engine/SCons/Tool/icc.py +59 -0
  1047. data/ext/v8/upstream/scons/engine/SCons/Tool/icl.py +52 -0
  1048. data/ext/v8/upstream/scons/engine/SCons/Tool/ifl.py +72 -0
  1049. data/ext/v8/upstream/scons/engine/SCons/Tool/ifort.py +90 -0
  1050. data/ext/v8/upstream/scons/engine/SCons/Tool/ilink.py +59 -0
  1051. data/ext/v8/upstream/scons/engine/SCons/Tool/ilink32.py +60 -0
  1052. data/ext/v8/upstream/scons/engine/SCons/Tool/install.py +229 -0
  1053. data/ext/v8/upstream/scons/engine/SCons/Tool/intelc.py +490 -0
  1054. data/ext/v8/upstream/scons/engine/SCons/Tool/ipkg.py +71 -0
  1055. data/ext/v8/upstream/scons/engine/SCons/Tool/jar.py +110 -0
  1056. data/ext/v8/upstream/scons/engine/SCons/Tool/javac.py +234 -0
  1057. data/ext/v8/upstream/scons/engine/SCons/Tool/javah.py +138 -0
  1058. data/ext/v8/upstream/scons/engine/SCons/Tool/latex.py +79 -0
  1059. data/ext/v8/upstream/scons/engine/SCons/Tool/lex.py +99 -0
  1060. data/ext/v8/upstream/scons/engine/SCons/Tool/link.py +121 -0
  1061. data/ext/v8/upstream/scons/engine/SCons/Tool/linkloc.py +112 -0
  1062. data/ext/v8/upstream/scons/engine/SCons/Tool/m4.py +63 -0
  1063. data/ext/v8/upstream/scons/engine/SCons/Tool/masm.py +77 -0
  1064. data/ext/v8/upstream/scons/engine/SCons/Tool/midl.py +90 -0
  1065. data/ext/v8/upstream/scons/engine/SCons/Tool/mingw.py +159 -0
  1066. data/ext/v8/upstream/scons/engine/SCons/Tool/mslib.py +64 -0
  1067. data/ext/v8/upstream/scons/engine/SCons/Tool/mslink.py +266 -0
  1068. data/ext/v8/upstream/scons/engine/SCons/Tool/mssdk.py +50 -0
  1069. data/ext/v8/upstream/scons/engine/SCons/Tool/msvc.py +269 -0
  1070. data/ext/v8/upstream/scons/engine/SCons/Tool/msvs.py +1439 -0
  1071. data/ext/v8/upstream/scons/engine/SCons/Tool/mwcc.py +208 -0
  1072. data/ext/v8/upstream/scons/engine/SCons/Tool/mwld.py +107 -0
  1073. data/ext/v8/upstream/scons/engine/SCons/Tool/nasm.py +72 -0
  1074. data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/__init__.py +314 -0
  1075. data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/ipk.py +185 -0
  1076. data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/msi.py +526 -0
  1077. data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/rpm.py +367 -0
  1078. data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/src_tarbz2.py +43 -0
  1079. data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/src_targz.py +43 -0
  1080. data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/src_zip.py +43 -0
  1081. data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/tarbz2.py +44 -0
  1082. data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/targz.py +44 -0
  1083. data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/zip.py +44 -0
  1084. data/ext/v8/upstream/scons/engine/SCons/Tool/pdf.py +78 -0
  1085. data/ext/v8/upstream/scons/engine/SCons/Tool/pdflatex.py +83 -0
  1086. data/ext/v8/upstream/scons/engine/SCons/Tool/pdftex.py +108 -0
  1087. data/ext/v8/upstream/scons/engine/SCons/Tool/qt.py +336 -0
  1088. data/ext/v8/upstream/scons/engine/SCons/Tool/rmic.py +121 -0
  1089. data/ext/v8/upstream/scons/engine/SCons/Tool/rpcgen.py +70 -0
  1090. data/ext/v8/upstream/scons/engine/SCons/Tool/rpm.py +132 -0
  1091. data/ext/v8/upstream/scons/engine/SCons/Tool/sgiar.py +68 -0
  1092. data/ext/v8/upstream/scons/engine/SCons/Tool/sgic++.py +58 -0
  1093. data/ext/v8/upstream/scons/engine/SCons/Tool/sgicc.py +53 -0
  1094. data/ext/v8/upstream/scons/engine/SCons/Tool/sgilink.py +63 -0
  1095. data/ext/v8/upstream/scons/engine/SCons/Tool/sunar.py +67 -0
  1096. data/ext/v8/upstream/scons/engine/SCons/Tool/sunc++.py +142 -0
  1097. data/ext/v8/upstream/scons/engine/SCons/Tool/suncc.py +58 -0
  1098. data/ext/v8/upstream/scons/engine/SCons/Tool/sunf77.py +63 -0
  1099. data/ext/v8/upstream/scons/engine/SCons/Tool/sunf90.py +64 -0
  1100. data/ext/v8/upstream/scons/engine/SCons/Tool/sunf95.py +64 -0
  1101. data/ext/v8/upstream/scons/engine/SCons/Tool/sunlink.py +77 -0
  1102. data/ext/v8/upstream/scons/engine/SCons/Tool/swig.py +186 -0
  1103. data/ext/v8/upstream/scons/engine/SCons/Tool/tar.py +73 -0
  1104. data/ext/v8/upstream/scons/engine/SCons/Tool/tex.py +805 -0
  1105. data/ext/v8/upstream/scons/engine/SCons/Tool/textfile.py +175 -0
  1106. data/ext/v8/upstream/scons/engine/SCons/Tool/tlib.py +53 -0
  1107. data/ext/v8/upstream/scons/engine/SCons/Tool/wix.py +100 -0
  1108. data/ext/v8/upstream/scons/engine/SCons/Tool/yacc.py +131 -0
  1109. data/ext/v8/upstream/scons/engine/SCons/Tool/zip.py +100 -0
  1110. data/ext/v8/upstream/scons/engine/SCons/Util.py +1645 -0
  1111. data/ext/v8/upstream/scons/engine/SCons/Variables/BoolVariable.py +91 -0
  1112. data/ext/v8/upstream/scons/engine/SCons/Variables/EnumVariable.py +107 -0
  1113. data/ext/v8/upstream/scons/engine/SCons/Variables/ListVariable.py +139 -0
  1114. data/ext/v8/upstream/scons/engine/SCons/Variables/PackageVariable.py +109 -0
  1115. data/ext/v8/upstream/scons/engine/SCons/Variables/PathVariable.py +147 -0
  1116. data/ext/v8/upstream/scons/engine/SCons/Variables/__init__.py +317 -0
  1117. data/ext/v8/upstream/scons/engine/SCons/Warnings.py +228 -0
  1118. data/ext/v8/upstream/scons/engine/SCons/__init__.py +49 -0
  1119. data/ext/v8/upstream/scons/engine/SCons/compat/__init__.py +302 -0
  1120. data/ext/v8/upstream/scons/engine/SCons/compat/_scons_UserString.py +98 -0
  1121. data/ext/v8/upstream/scons/engine/SCons/compat/_scons_hashlib.py +91 -0
  1122. data/ext/v8/upstream/scons/engine/SCons/compat/_scons_itertools.py +124 -0
  1123. data/ext/v8/upstream/scons/engine/SCons/compat/_scons_optparse.py +1725 -0
  1124. data/ext/v8/upstream/scons/engine/SCons/compat/_scons_sets.py +583 -0
  1125. data/ext/v8/upstream/scons/engine/SCons/compat/_scons_sets15.py +176 -0
  1126. data/ext/v8/upstream/scons/engine/SCons/compat/_scons_shlex.py +325 -0
  1127. data/ext/v8/upstream/scons/engine/SCons/compat/_scons_subprocess.py +1296 -0
  1128. data/ext/v8/upstream/scons/engine/SCons/compat/_scons_textwrap.py +382 -0
  1129. data/ext/v8/upstream/scons/engine/SCons/compat/builtins.py +187 -0
  1130. data/ext/v8/upstream/scons/engine/SCons/cpp.py +598 -0
  1131. data/ext/v8/upstream/scons/engine/SCons/dblite.py +248 -0
  1132. data/ext/v8/upstream/scons/engine/SCons/exitfuncs.py +77 -0
  1133. data/ext/v8/upstream/scons/os_spawnv_fix.diff +83 -0
  1134. data/ext/v8/upstream/scons/scons-time.1 +1017 -0
  1135. data/ext/v8/upstream/scons/scons.1 +15179 -0
  1136. data/ext/v8/upstream/scons/sconsign.1 +208 -0
  1137. data/ext/v8/upstream/scons/script/scons +184 -0
  1138. data/ext/v8/upstream/scons/script/scons-time +1529 -0
  1139. data/ext/v8/upstream/scons/script/scons.bat +31 -0
  1140. data/ext/v8/upstream/scons/script/sconsign +508 -0
  1141. data/ext/v8/upstream/scons/setup.cfg +6 -0
  1142. data/ext/v8/upstream/scons/setup.py +427 -0
  1143. data/ext/v8/v8_cxt.cpp +3 -0
  1144. data/ext/v8/v8_cxt.h +9 -0
  1145. data/lib/v8.rb +1 -1
  1146. data/spec/redjs/jsapi_spec.rb +6 -0
  1147. data/therubyracer.gemspec +4 -4
  1148. metadata +1146 -2
@@ -0,0 +1,277 @@
1
+ // Copyright 2009 the V8 project authors. All rights reserved.
2
+ // Redistribution and use in source and binary forms, with or without
3
+ // modification, are permitted provided that the following conditions are
4
+ // met:
5
+ //
6
+ // * Redistributions of source code must retain the above copyright
7
+ // notice, this list of conditions and the following disclaimer.
8
+ // * Redistributions in binary form must reproduce the above
9
+ // copyright notice, this list of conditions and the following
10
+ // disclaimer in the documentation and/or other materials provided
11
+ // with the distribution.
12
+ // * Neither the name of Google Inc. nor the names of its
13
+ // contributors may be used to endorse or promote products derived
14
+ // from this software without specific prior written permission.
15
+ //
16
+ // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
+ // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
+ // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
+ // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
+ // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
+ // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
+ // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
+ // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
+ // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
+ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
+ // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
+
28
+ #ifndef V8_HEAP_PROFILER_H_
29
+ #define V8_HEAP_PROFILER_H_
30
+
31
+ namespace v8 {
32
+ namespace internal {
33
+
34
+ #ifdef ENABLE_LOGGING_AND_PROFILING
35
+
36
+ // The HeapProfiler writes data to the log files, which can be postprocessed
37
+ // to generate .hp files for use by the GHC/Valgrind tool hp2ps.
38
+ class HeapProfiler {
39
+ public:
40
+ // Write a single heap sample to the log file.
41
+ static void WriteSample();
42
+
43
+ private:
44
+ // Update the array info with stats from obj.
45
+ static void CollectStats(HeapObject* obj, HistogramInfo* info);
46
+ };
47
+
48
+
49
+ // JSObjectsCluster describes a group of JS objects that are
50
+ // considered equivalent in terms of a particular profile.
51
+ class JSObjectsCluster BASE_EMBEDDED {
52
+ public:
53
+ // These special cases are used in retainer profile.
54
+ enum SpecialCase {
55
+ ROOTS = 1,
56
+ GLOBAL_PROPERTY = 2,
57
+ CODE = 3,
58
+ SELF = 100 // This case is used in ClustersCoarser only.
59
+ };
60
+
61
+ JSObjectsCluster() : constructor_(NULL), instance_(NULL) {}
62
+ explicit JSObjectsCluster(String* constructor)
63
+ : constructor_(constructor), instance_(NULL) {}
64
+ explicit JSObjectsCluster(SpecialCase special)
65
+ : constructor_(FromSpecialCase(special)), instance_(NULL) {}
66
+ JSObjectsCluster(String* constructor, Object* instance)
67
+ : constructor_(constructor), instance_(instance) {}
68
+
69
+ static int CompareConstructors(const JSObjectsCluster& a,
70
+ const JSObjectsCluster& b) {
71
+ // Strings are unique, so it is sufficient to compare their pointers.
72
+ return a.constructor_ == b.constructor_ ? 0
73
+ : (a.constructor_ < b.constructor_ ? -1 : 1);
74
+ }
75
+ static int Compare(const JSObjectsCluster& a, const JSObjectsCluster& b) {
76
+ // Strings are unique, so it is sufficient to compare their pointers.
77
+ const int cons_cmp = CompareConstructors(a, b);
78
+ return cons_cmp == 0 ?
79
+ (a.instance_ == b.instance_ ? 0 : (a.instance_ < b.instance_ ? -1 : 1))
80
+ : cons_cmp;
81
+ }
82
+ static int Compare(const JSObjectsCluster* a, const JSObjectsCluster* b) {
83
+ return Compare(*a, *b);
84
+ }
85
+
86
+ bool is_null() const { return constructor_ == NULL; }
87
+ bool can_be_coarsed() const { return instance_ != NULL; }
88
+ String* constructor() const { return constructor_; }
89
+
90
+ void Print(StringStream* accumulator) const;
91
+ // Allows null clusters to be printed.
92
+ void DebugPrint(StringStream* accumulator) const;
93
+
94
+ private:
95
+ static String* FromSpecialCase(SpecialCase special) {
96
+ // We use symbols that are illegal JS identifiers to identify special cases.
97
+ // Their actual value is irrelevant for us.
98
+ switch (special) {
99
+ case ROOTS: return Heap::result_symbol();
100
+ case GLOBAL_PROPERTY: return Heap::code_symbol();
101
+ case CODE: return Heap::arguments_shadow_symbol();
102
+ case SELF: return Heap::catch_var_symbol();
103
+ default:
104
+ UNREACHABLE();
105
+ return NULL;
106
+ }
107
+ }
108
+
109
+ String* constructor_;
110
+ Object* instance_;
111
+ };
112
+
113
+
114
+ struct JSObjectsClusterTreeConfig {
115
+ typedef JSObjectsCluster Key;
116
+ typedef NumberAndSizeInfo Value;
117
+ static const Key kNoKey;
118
+ static const Value kNoValue;
119
+ static int Compare(const Key& a, const Key& b) {
120
+ return Key::Compare(a, b);
121
+ }
122
+ };
123
+ typedef ZoneSplayTree<JSObjectsClusterTreeConfig> JSObjectsClusterTree;
124
+
125
+
126
+ // ConstructorHeapProfile is responsible for gathering and logging
127
+ // "constructor profile" of JS objects allocated on heap.
128
+ // It is run during garbage collection cycle, thus it doesn't need
129
+ // to use handles.
130
+ class ConstructorHeapProfile BASE_EMBEDDED {
131
+ public:
132
+ ConstructorHeapProfile();
133
+ virtual ~ConstructorHeapProfile() {}
134
+ void CollectStats(HeapObject* obj);
135
+ void PrintStats();
136
+ // Used by ZoneSplayTree::ForEach. Made virtual to allow overriding in tests.
137
+ virtual void Call(const JSObjectsCluster& cluster,
138
+ const NumberAndSizeInfo& number_and_size);
139
+
140
+ private:
141
+ ZoneScope zscope_;
142
+ JSObjectsClusterTree js_objects_info_tree_;
143
+ };
144
+
145
+
146
+ // JSObjectsRetainerTree is used to represent retainer graphs using
147
+ // adjacency list form:
148
+ //
149
+ // Cluster -> (Cluster -> NumberAndSizeInfo)
150
+ //
151
+ // Subordinate splay trees are stored by pointer. They are zone-allocated,
152
+ // so it isn't needed to manage their lifetime.
153
+ //
154
+ struct JSObjectsRetainerTreeConfig {
155
+ typedef JSObjectsCluster Key;
156
+ typedef JSObjectsClusterTree* Value;
157
+ static const Key kNoKey;
158
+ static const Value kNoValue;
159
+ static int Compare(const Key& a, const Key& b) {
160
+ return Key::Compare(a, b);
161
+ }
162
+ };
163
+ typedef ZoneSplayTree<JSObjectsRetainerTreeConfig> JSObjectsRetainerTree;
164
+
165
+
166
+ class ClustersCoarser BASE_EMBEDDED {
167
+ public:
168
+ ClustersCoarser();
169
+
170
+ // Processes a given retainer graph.
171
+ void Process(JSObjectsRetainerTree* tree);
172
+
173
+ // Returns an equivalent cluster (can be the cluster itself).
174
+ // If the given cluster doesn't have an equivalent, returns null cluster.
175
+ JSObjectsCluster GetCoarseEquivalent(const JSObjectsCluster& cluster);
176
+ // Returns whether a cluster can be substitued with an equivalent and thus,
177
+ // skipped in some cases.
178
+ bool HasAnEquivalent(const JSObjectsCluster& cluster);
179
+
180
+ // Used by JSObjectsRetainerTree::ForEach.
181
+ void Call(const JSObjectsCluster& cluster, JSObjectsClusterTree* tree);
182
+ void Call(const JSObjectsCluster& cluster,
183
+ const NumberAndSizeInfo& number_and_size);
184
+
185
+ private:
186
+ // Stores a list of back references for a cluster.
187
+ struct ClusterBackRefs {
188
+ explicit ClusterBackRefs(const JSObjectsCluster& cluster_);
189
+ ClusterBackRefs(const ClusterBackRefs& src);
190
+ ClusterBackRefs& operator=(const ClusterBackRefs& src);
191
+
192
+ static int Compare(const ClusterBackRefs& a, const ClusterBackRefs& b);
193
+ void SortRefs() { refs.Sort(JSObjectsCluster::Compare); }
194
+ static void SortRefsIterator(ClusterBackRefs* ref) { ref->SortRefs(); }
195
+
196
+ JSObjectsCluster cluster;
197
+ ZoneList<JSObjectsCluster> refs;
198
+ };
199
+ typedef ZoneList<ClusterBackRefs> SimilarityList;
200
+
201
+ // A tree for storing a list of equivalents for a cluster.
202
+ struct ClusterEqualityConfig {
203
+ typedef JSObjectsCluster Key;
204
+ typedef JSObjectsCluster Value;
205
+ static const Key kNoKey;
206
+ static const Value kNoValue;
207
+ static int Compare(const Key& a, const Key& b) {
208
+ return Key::Compare(a, b);
209
+ }
210
+ };
211
+ typedef ZoneSplayTree<ClusterEqualityConfig> EqualityTree;
212
+
213
+ static int ClusterBackRefsCmp(const ClusterBackRefs* a,
214
+ const ClusterBackRefs* b) {
215
+ return ClusterBackRefs::Compare(*a, *b);
216
+ }
217
+ int DoProcess(JSObjectsRetainerTree* tree);
218
+ int FillEqualityTree();
219
+
220
+ static const int kInitialBackrefsListCapacity = 2;
221
+ static const int kInitialSimilarityListCapacity = 2000;
222
+ // Number of passes for finding equivalents. Limits the length of paths
223
+ // that can be considered equivalent.
224
+ static const int kMaxPassesCount = 10;
225
+
226
+ ZoneScope zscope_;
227
+ SimilarityList sim_list_;
228
+ EqualityTree eq_tree_;
229
+ ClusterBackRefs* current_pair_;
230
+ JSObjectsRetainerTree* current_set_;
231
+ const JSObjectsCluster* self_;
232
+ };
233
+
234
+
235
+ // RetainerHeapProfile is responsible for gathering and logging
236
+ // "retainer profile" of JS objects allocated on heap.
237
+ // It is run during garbage collection cycle, thus it doesn't need
238
+ // to use handles.
239
+ class RetainerHeapProfile BASE_EMBEDDED {
240
+ public:
241
+ class Printer {
242
+ public:
243
+ virtual ~Printer() {}
244
+ virtual void PrintRetainers(const JSObjectsCluster& cluster,
245
+ const StringStream& retainers) = 0;
246
+ };
247
+
248
+ RetainerHeapProfile();
249
+ void CollectStats(HeapObject* obj);
250
+ void PrintStats();
251
+ void DebugPrintStats(Printer* printer);
252
+ void StoreReference(const JSObjectsCluster& cluster, HeapObject* ref);
253
+
254
+ private:
255
+ ZoneScope zscope_;
256
+ JSObjectsRetainerTree retainers_tree_;
257
+ ClustersCoarser coarser_;
258
+ };
259
+
260
+
261
+ class ProducerHeapProfile : public AllStatic {
262
+ public:
263
+ static void Setup();
264
+ static void RecordJSObjectAllocation(Object* obj) {
265
+ if (FLAG_log_producers) DoRecordJSObjectAllocation(obj);
266
+ }
267
+
268
+ private:
269
+ static void DoRecordJSObjectAllocation(Object* obj);
270
+ static bool can_log_;
271
+ };
272
+
273
+ #endif // ENABLE_LOGGING_AND_PROFILING
274
+
275
+ } } // namespace v8::internal
276
+
277
+ #endif // V8_HEAP_PROFILER_H_
@@ -0,0 +1,4204 @@
1
+ // Copyright 2009 the V8 project authors. All rights reserved.
2
+ // Redistribution and use in source and binary forms, with or without
3
+ // modification, are permitted provided that the following conditions are
4
+ // met:
5
+ //
6
+ // * Redistributions of source code must retain the above copyright
7
+ // notice, this list of conditions and the following disclaimer.
8
+ // * Redistributions in binary form must reproduce the above
9
+ // copyright notice, this list of conditions and the following
10
+ // disclaimer in the documentation and/or other materials provided
11
+ // with the distribution.
12
+ // * Neither the name of Google Inc. nor the names of its
13
+ // contributors may be used to endorse or promote products derived
14
+ // from this software without specific prior written permission.
15
+ //
16
+ // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
+ // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
+ // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
+ // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
+ // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
+ // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
+ // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
+ // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
+ // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
+ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
+ // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
+
28
+ #include "v8.h"
29
+
30
+ #include "accessors.h"
31
+ #include "api.h"
32
+ #include "bootstrapper.h"
33
+ #include "codegen-inl.h"
34
+ #include "compilation-cache.h"
35
+ #include "debug.h"
36
+ #include "heap-profiler.h"
37
+ #include "global-handles.h"
38
+ #include "mark-compact.h"
39
+ #include "natives.h"
40
+ #include "scanner.h"
41
+ #include "scopeinfo.h"
42
+ #include "snapshot.h"
43
+ #include "v8threads.h"
44
+ #if V8_TARGET_ARCH_ARM && V8_NATIVE_REGEXP
45
+ #include "regexp-macro-assembler.h"
46
+ #include "arm/regexp-macro-assembler-arm.h"
47
+ #endif
48
+
49
+ namespace v8 {
50
+ namespace internal {
51
+
52
+
53
+ String* Heap::hidden_symbol_;
54
+ Object* Heap::roots_[Heap::kRootListLength];
55
+
56
+
57
+ NewSpace Heap::new_space_;
58
+ OldSpace* Heap::old_pointer_space_ = NULL;
59
+ OldSpace* Heap::old_data_space_ = NULL;
60
+ OldSpace* Heap::code_space_ = NULL;
61
+ MapSpace* Heap::map_space_ = NULL;
62
+ CellSpace* Heap::cell_space_ = NULL;
63
+ LargeObjectSpace* Heap::lo_space_ = NULL;
64
+
65
+ static const int kMinimumPromotionLimit = 2*MB;
66
+ static const int kMinimumAllocationLimit = 8*MB;
67
+
68
+ int Heap::old_gen_promotion_limit_ = kMinimumPromotionLimit;
69
+ int Heap::old_gen_allocation_limit_ = kMinimumAllocationLimit;
70
+
71
+ int Heap::old_gen_exhausted_ = false;
72
+
73
+ int Heap::amount_of_external_allocated_memory_ = 0;
74
+ int Heap::amount_of_external_allocated_memory_at_last_global_gc_ = 0;
75
+
76
+ // semispace_size_ should be a power of 2 and old_generation_size_ should be
77
+ // a multiple of Page::kPageSize.
78
+ #if defined(ANDROID)
79
+ int Heap::max_semispace_size_ = 512*KB;
80
+ int Heap::max_old_generation_size_ = 128*MB;
81
+ int Heap::initial_semispace_size_ = 128*KB;
82
+ size_t Heap::code_range_size_ = 0;
83
+ #elif defined(V8_TARGET_ARCH_X64)
84
+ int Heap::max_semispace_size_ = 16*MB;
85
+ int Heap::max_old_generation_size_ = 1*GB;
86
+ int Heap::initial_semispace_size_ = 1*MB;
87
+ size_t Heap::code_range_size_ = 512*MB;
88
+ #else
89
+ int Heap::max_semispace_size_ = 8*MB;
90
+ int Heap::max_old_generation_size_ = 512*MB;
91
+ int Heap::initial_semispace_size_ = 512*KB;
92
+ size_t Heap::code_range_size_ = 0;
93
+ #endif
94
+
95
+ // The snapshot semispace size will be the default semispace size if
96
+ // snapshotting is used and will be the requested semispace size as
97
+ // set up by ConfigureHeap otherwise.
98
+ int Heap::reserved_semispace_size_ = Heap::max_semispace_size_;
99
+
100
+ GCCallback Heap::global_gc_prologue_callback_ = NULL;
101
+ GCCallback Heap::global_gc_epilogue_callback_ = NULL;
102
+
103
+ // Variables set based on semispace_size_ and old_generation_size_ in
104
+ // ConfigureHeap.
105
+
106
+ // Will be 4 * reserved_semispace_size_ to ensure that young
107
+ // generation can be aligned to its size.
108
+ int Heap::survived_since_last_expansion_ = 0;
109
+ int Heap::external_allocation_limit_ = 0;
110
+
111
+ Heap::HeapState Heap::gc_state_ = NOT_IN_GC;
112
+
113
+ int Heap::mc_count_ = 0;
114
+ int Heap::gc_count_ = 0;
115
+
116
+ int Heap::always_allocate_scope_depth_ = 0;
117
+ int Heap::linear_allocation_scope_depth_ = 0;
118
+ bool Heap::context_disposed_pending_ = false;
119
+
120
+ #ifdef DEBUG
121
+ bool Heap::allocation_allowed_ = true;
122
+
123
+ int Heap::allocation_timeout_ = 0;
124
+ bool Heap::disallow_allocation_failure_ = false;
125
+ #endif // DEBUG
126
+
127
+
128
+ int Heap::Capacity() {
129
+ if (!HasBeenSetup()) return 0;
130
+
131
+ return new_space_.Capacity() +
132
+ old_pointer_space_->Capacity() +
133
+ old_data_space_->Capacity() +
134
+ code_space_->Capacity() +
135
+ map_space_->Capacity() +
136
+ cell_space_->Capacity();
137
+ }
138
+
139
+
140
+ int Heap::CommittedMemory() {
141
+ if (!HasBeenSetup()) return 0;
142
+
143
+ return new_space_.CommittedMemory() +
144
+ old_pointer_space_->CommittedMemory() +
145
+ old_data_space_->CommittedMemory() +
146
+ code_space_->CommittedMemory() +
147
+ map_space_->CommittedMemory() +
148
+ cell_space_->CommittedMemory() +
149
+ lo_space_->Size();
150
+ }
151
+
152
+
153
+ int Heap::Available() {
154
+ if (!HasBeenSetup()) return 0;
155
+
156
+ return new_space_.Available() +
157
+ old_pointer_space_->Available() +
158
+ old_data_space_->Available() +
159
+ code_space_->Available() +
160
+ map_space_->Available() +
161
+ cell_space_->Available();
162
+ }
163
+
164
+
165
+ bool Heap::HasBeenSetup() {
166
+ return old_pointer_space_ != NULL &&
167
+ old_data_space_ != NULL &&
168
+ code_space_ != NULL &&
169
+ map_space_ != NULL &&
170
+ cell_space_ != NULL &&
171
+ lo_space_ != NULL;
172
+ }
173
+
174
+
175
+ GarbageCollector Heap::SelectGarbageCollector(AllocationSpace space) {
176
+ // Is global GC requested?
177
+ if (space != NEW_SPACE || FLAG_gc_global) {
178
+ Counters::gc_compactor_caused_by_request.Increment();
179
+ return MARK_COMPACTOR;
180
+ }
181
+
182
+ // Is enough data promoted to justify a global GC?
183
+ if (OldGenerationPromotionLimitReached()) {
184
+ Counters::gc_compactor_caused_by_promoted_data.Increment();
185
+ return MARK_COMPACTOR;
186
+ }
187
+
188
+ // Have allocation in OLD and LO failed?
189
+ if (old_gen_exhausted_) {
190
+ Counters::gc_compactor_caused_by_oldspace_exhaustion.Increment();
191
+ return MARK_COMPACTOR;
192
+ }
193
+
194
+ // Is there enough space left in OLD to guarantee that a scavenge can
195
+ // succeed?
196
+ //
197
+ // Note that MemoryAllocator->MaxAvailable() undercounts the memory available
198
+ // for object promotion. It counts only the bytes that the memory
199
+ // allocator has not yet allocated from the OS and assigned to any space,
200
+ // and does not count available bytes already in the old space or code
201
+ // space. Undercounting is safe---we may get an unrequested full GC when
202
+ // a scavenge would have succeeded.
203
+ if (MemoryAllocator::MaxAvailable() <= new_space_.Size()) {
204
+ Counters::gc_compactor_caused_by_oldspace_exhaustion.Increment();
205
+ return MARK_COMPACTOR;
206
+ }
207
+
208
+ // Default
209
+ return SCAVENGER;
210
+ }
211
+
212
+
213
+ // TODO(1238405): Combine the infrastructure for --heap-stats and
214
+ // --log-gc to avoid the complicated preprocessor and flag testing.
215
+ #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
216
+ void Heap::ReportStatisticsBeforeGC() {
217
+ // Heap::ReportHeapStatistics will also log NewSpace statistics when
218
+ // compiled with ENABLE_LOGGING_AND_PROFILING and --log-gc is set. The
219
+ // following logic is used to avoid double logging.
220
+ #if defined(DEBUG) && defined(ENABLE_LOGGING_AND_PROFILING)
221
+ if (FLAG_heap_stats || FLAG_log_gc) new_space_.CollectStatistics();
222
+ if (FLAG_heap_stats) {
223
+ ReportHeapStatistics("Before GC");
224
+ } else if (FLAG_log_gc) {
225
+ new_space_.ReportStatistics();
226
+ }
227
+ if (FLAG_heap_stats || FLAG_log_gc) new_space_.ClearHistograms();
228
+ #elif defined(DEBUG)
229
+ if (FLAG_heap_stats) {
230
+ new_space_.CollectStatistics();
231
+ ReportHeapStatistics("Before GC");
232
+ new_space_.ClearHistograms();
233
+ }
234
+ #elif defined(ENABLE_LOGGING_AND_PROFILING)
235
+ if (FLAG_log_gc) {
236
+ new_space_.CollectStatistics();
237
+ new_space_.ReportStatistics();
238
+ new_space_.ClearHistograms();
239
+ }
240
+ #endif
241
+ }
242
+
243
+
244
+ #if defined(ENABLE_LOGGING_AND_PROFILING)
245
+ void Heap::PrintShortHeapStatistics() {
246
+ if (!FLAG_trace_gc_verbose) return;
247
+ PrintF("Memory allocator, used: %8d, available: %8d\n",
248
+ MemoryAllocator::Size(),
249
+ MemoryAllocator::Available());
250
+ PrintF("New space, used: %8d, available: %8d\n",
251
+ Heap::new_space_.Size(),
252
+ new_space_.Available());
253
+ PrintF("Old pointers, used: %8d, available: %8d, waste: %8d\n",
254
+ old_pointer_space_->Size(),
255
+ old_pointer_space_->Available(),
256
+ old_pointer_space_->Waste());
257
+ PrintF("Old data space, used: %8d, available: %8d, waste: %8d\n",
258
+ old_data_space_->Size(),
259
+ old_data_space_->Available(),
260
+ old_data_space_->Waste());
261
+ PrintF("Code space, used: %8d, available: %8d, waste: %8d\n",
262
+ code_space_->Size(),
263
+ code_space_->Available(),
264
+ code_space_->Waste());
265
+ PrintF("Map space, used: %8d, available: %8d, waste: %8d\n",
266
+ map_space_->Size(),
267
+ map_space_->Available(),
268
+ map_space_->Waste());
269
+ PrintF("Cell space, used: %8d, available: %8d, waste: %8d\n",
270
+ cell_space_->Size(),
271
+ cell_space_->Available(),
272
+ cell_space_->Waste());
273
+ PrintF("Large object space, used: %8d, avaialble: %8d\n",
274
+ lo_space_->Size(),
275
+ lo_space_->Available());
276
+ }
277
+ #endif
278
+
279
+
280
+ // TODO(1238405): Combine the infrastructure for --heap-stats and
281
+ // --log-gc to avoid the complicated preprocessor and flag testing.
282
+ void Heap::ReportStatisticsAfterGC() {
283
+ // Similar to the before GC, we use some complicated logic to ensure that
284
+ // NewSpace statistics are logged exactly once when --log-gc is turned on.
285
+ #if defined(DEBUG) && defined(ENABLE_LOGGING_AND_PROFILING)
286
+ if (FLAG_heap_stats) {
287
+ new_space_.CollectStatistics();
288
+ ReportHeapStatistics("After GC");
289
+ } else if (FLAG_log_gc) {
290
+ new_space_.ReportStatistics();
291
+ }
292
+ #elif defined(DEBUG)
293
+ if (FLAG_heap_stats) ReportHeapStatistics("After GC");
294
+ #elif defined(ENABLE_LOGGING_AND_PROFILING)
295
+ if (FLAG_log_gc) new_space_.ReportStatistics();
296
+ #endif
297
+ }
298
+ #endif // defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
299
+
300
+
301
+ void Heap::GarbageCollectionPrologue() {
302
+ TranscendentalCache::Clear();
303
+ gc_count_++;
304
+ #ifdef DEBUG
305
+ ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
306
+ allow_allocation(false);
307
+
308
+ if (FLAG_verify_heap) {
309
+ Verify();
310
+ }
311
+
312
+ if (FLAG_gc_verbose) Print();
313
+
314
+ if (FLAG_print_rset) {
315
+ // Not all spaces have remembered set bits that we care about.
316
+ old_pointer_space_->PrintRSet();
317
+ map_space_->PrintRSet();
318
+ lo_space_->PrintRSet();
319
+ }
320
+ #endif
321
+
322
+ #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
323
+ ReportStatisticsBeforeGC();
324
+ #endif
325
+ }
326
+
327
+ int Heap::SizeOfObjects() {
328
+ int total = 0;
329
+ AllSpaces spaces;
330
+ while (Space* space = spaces.next()) {
331
+ total += space->Size();
332
+ }
333
+ return total;
334
+ }
335
+
336
+ void Heap::GarbageCollectionEpilogue() {
337
+ #ifdef DEBUG
338
+ allow_allocation(true);
339
+ ZapFromSpace();
340
+
341
+ if (FLAG_verify_heap) {
342
+ Verify();
343
+ }
344
+
345
+ if (FLAG_print_global_handles) GlobalHandles::Print();
346
+ if (FLAG_print_handles) PrintHandles();
347
+ if (FLAG_gc_verbose) Print();
348
+ if (FLAG_code_stats) ReportCodeStatistics("After GC");
349
+ #endif
350
+
351
+ Counters::alive_after_last_gc.Set(SizeOfObjects());
352
+
353
+ Counters::symbol_table_capacity.Set(symbol_table()->Capacity());
354
+ Counters::number_of_symbols.Set(symbol_table()->NumberOfElements());
355
+ #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
356
+ ReportStatisticsAfterGC();
357
+ #endif
358
+ #ifdef ENABLE_DEBUGGER_SUPPORT
359
+ Debug::AfterGarbageCollection();
360
+ #endif
361
+ }
362
+
363
+
364
+ void Heap::CollectAllGarbage(bool force_compaction) {
365
+ // Since we are ignoring the return value, the exact choice of space does
366
+ // not matter, so long as we do not specify NEW_SPACE, which would not
367
+ // cause a full GC.
368
+ MarkCompactCollector::SetForceCompaction(force_compaction);
369
+ CollectGarbage(0, OLD_POINTER_SPACE);
370
+ MarkCompactCollector::SetForceCompaction(false);
371
+ }
372
+
373
+
374
+ void Heap::CollectAllGarbageIfContextDisposed() {
375
+ // If the garbage collector interface is exposed through the global
376
+ // gc() function, we avoid being clever about forcing GCs when
377
+ // contexts are disposed and leave it to the embedder to make
378
+ // informed decisions about when to force a collection.
379
+ if (!FLAG_expose_gc && context_disposed_pending_) {
380
+ HistogramTimerScope scope(&Counters::gc_context);
381
+ CollectAllGarbage(false);
382
+ }
383
+ context_disposed_pending_ = false;
384
+ }
385
+
386
+
387
+ void Heap::NotifyContextDisposed() {
388
+ context_disposed_pending_ = true;
389
+ }
390
+
391
+
392
+ bool Heap::CollectGarbage(int requested_size, AllocationSpace space) {
393
+ // The VM is in the GC state until exiting this function.
394
+ VMState state(GC);
395
+
396
+ #ifdef DEBUG
397
+ // Reset the allocation timeout to the GC interval, but make sure to
398
+ // allow at least a few allocations after a collection. The reason
399
+ // for this is that we have a lot of allocation sequences and we
400
+ // assume that a garbage collection will allow the subsequent
401
+ // allocation attempts to go through.
402
+ allocation_timeout_ = Max(6, FLAG_gc_interval);
403
+ #endif
404
+
405
+ { GCTracer tracer;
406
+ GarbageCollectionPrologue();
407
+ // The GC count was incremented in the prologue. Tell the tracer about
408
+ // it.
409
+ tracer.set_gc_count(gc_count_);
410
+
411
+ GarbageCollector collector = SelectGarbageCollector(space);
412
+ // Tell the tracer which collector we've selected.
413
+ tracer.set_collector(collector);
414
+
415
+ HistogramTimer* rate = (collector == SCAVENGER)
416
+ ? &Counters::gc_scavenger
417
+ : &Counters::gc_compactor;
418
+ rate->Start();
419
+ PerformGarbageCollection(space, collector, &tracer);
420
+ rate->Stop();
421
+
422
+ GarbageCollectionEpilogue();
423
+ }
424
+
425
+
426
+ #ifdef ENABLE_LOGGING_AND_PROFILING
427
+ if (FLAG_log_gc) HeapProfiler::WriteSample();
428
+ #endif
429
+
430
+ switch (space) {
431
+ case NEW_SPACE:
432
+ return new_space_.Available() >= requested_size;
433
+ case OLD_POINTER_SPACE:
434
+ return old_pointer_space_->Available() >= requested_size;
435
+ case OLD_DATA_SPACE:
436
+ return old_data_space_->Available() >= requested_size;
437
+ case CODE_SPACE:
438
+ return code_space_->Available() >= requested_size;
439
+ case MAP_SPACE:
440
+ return map_space_->Available() >= requested_size;
441
+ case CELL_SPACE:
442
+ return cell_space_->Available() >= requested_size;
443
+ case LO_SPACE:
444
+ return lo_space_->Available() >= requested_size;
445
+ }
446
+ return false;
447
+ }
448
+
449
+
450
+ void Heap::PerformScavenge() {
451
+ GCTracer tracer;
452
+ PerformGarbageCollection(NEW_SPACE, SCAVENGER, &tracer);
453
+ }
454
+
455
+
456
+ #ifdef DEBUG
457
+ // Helper class for verifying the symbol table.
458
+ class SymbolTableVerifier : public ObjectVisitor {
459
+ public:
460
+ SymbolTableVerifier() { }
461
+ void VisitPointers(Object** start, Object** end) {
462
+ // Visit all HeapObject pointers in [start, end).
463
+ for (Object** p = start; p < end; p++) {
464
+ if ((*p)->IsHeapObject()) {
465
+ // Check that the symbol is actually a symbol.
466
+ ASSERT((*p)->IsNull() || (*p)->IsUndefined() || (*p)->IsSymbol());
467
+ }
468
+ }
469
+ }
470
+ };
471
+ #endif // DEBUG
472
+
473
+
474
+ static void VerifySymbolTable() {
475
+ #ifdef DEBUG
476
+ SymbolTableVerifier verifier;
477
+ Heap::symbol_table()->IterateElements(&verifier);
478
+ #endif // DEBUG
479
+ }
480
+
481
+
482
+ void Heap::ReserveSpace(
483
+ int new_space_size,
484
+ int pointer_space_size,
485
+ int data_space_size,
486
+ int code_space_size,
487
+ int map_space_size,
488
+ int cell_space_size,
489
+ int large_object_size) {
490
+ NewSpace* new_space = Heap::new_space();
491
+ PagedSpace* old_pointer_space = Heap::old_pointer_space();
492
+ PagedSpace* old_data_space = Heap::old_data_space();
493
+ PagedSpace* code_space = Heap::code_space();
494
+ PagedSpace* map_space = Heap::map_space();
495
+ PagedSpace* cell_space = Heap::cell_space();
496
+ LargeObjectSpace* lo_space = Heap::lo_space();
497
+ bool gc_performed = true;
498
+ while (gc_performed) {
499
+ gc_performed = false;
500
+ if (!new_space->ReserveSpace(new_space_size)) {
501
+ Heap::CollectGarbage(new_space_size, NEW_SPACE);
502
+ gc_performed = true;
503
+ }
504
+ if (!old_pointer_space->ReserveSpace(pointer_space_size)) {
505
+ Heap::CollectGarbage(pointer_space_size, OLD_POINTER_SPACE);
506
+ gc_performed = true;
507
+ }
508
+ if (!(old_data_space->ReserveSpace(data_space_size))) {
509
+ Heap::CollectGarbage(data_space_size, OLD_DATA_SPACE);
510
+ gc_performed = true;
511
+ }
512
+ if (!(code_space->ReserveSpace(code_space_size))) {
513
+ Heap::CollectGarbage(code_space_size, CODE_SPACE);
514
+ gc_performed = true;
515
+ }
516
+ if (!(map_space->ReserveSpace(map_space_size))) {
517
+ Heap::CollectGarbage(map_space_size, MAP_SPACE);
518
+ gc_performed = true;
519
+ }
520
+ if (!(cell_space->ReserveSpace(cell_space_size))) {
521
+ Heap::CollectGarbage(cell_space_size, CELL_SPACE);
522
+ gc_performed = true;
523
+ }
524
+ // We add a slack-factor of 2 in order to have space for the remembered
525
+ // set and a series of large-object allocations that are only just larger
526
+ // than the page size.
527
+ large_object_size *= 2;
528
+ // The ReserveSpace method on the large object space checks how much
529
+ // we can expand the old generation. This includes expansion caused by
530
+ // allocation in the other spaces.
531
+ large_object_size += cell_space_size + map_space_size + code_space_size +
532
+ data_space_size + pointer_space_size;
533
+ if (!(lo_space->ReserveSpace(large_object_size))) {
534
+ Heap::CollectGarbage(large_object_size, LO_SPACE);
535
+ gc_performed = true;
536
+ }
537
+ }
538
+ }
539
+
540
+
541
+ void Heap::EnsureFromSpaceIsCommitted() {
542
+ if (new_space_.CommitFromSpaceIfNeeded()) return;
543
+
544
+ // Committing memory to from space failed.
545
+ // Try shrinking and try again.
546
+ Shrink();
547
+ if (new_space_.CommitFromSpaceIfNeeded()) return;
548
+
549
+ // Committing memory to from space failed again.
550
+ // Memory is exhausted and we will die.
551
+ V8::FatalProcessOutOfMemory("Committing semi space failed.");
552
+ }
553
+
554
+
555
+ void Heap::PerformGarbageCollection(AllocationSpace space,
556
+ GarbageCollector collector,
557
+ GCTracer* tracer) {
558
+ VerifySymbolTable();
559
+ if (collector == MARK_COMPACTOR && global_gc_prologue_callback_) {
560
+ ASSERT(!allocation_allowed_);
561
+ global_gc_prologue_callback_();
562
+ }
563
+ EnsureFromSpaceIsCommitted();
564
+ if (collector == MARK_COMPACTOR) {
565
+ MarkCompact(tracer);
566
+
567
+ int old_gen_size = PromotedSpaceSize();
568
+ old_gen_promotion_limit_ =
569
+ old_gen_size + Max(kMinimumPromotionLimit, old_gen_size / 3);
570
+ old_gen_allocation_limit_ =
571
+ old_gen_size + Max(kMinimumAllocationLimit, old_gen_size / 2);
572
+ old_gen_exhausted_ = false;
573
+ }
574
+ Scavenge();
575
+
576
+ Counters::objs_since_last_young.Set(0);
577
+
578
+ if (collector == MARK_COMPACTOR) {
579
+ DisableAssertNoAllocation allow_allocation;
580
+ GlobalHandles::PostGarbageCollectionProcessing();
581
+ }
582
+
583
+ // Update relocatables.
584
+ Relocatable::PostGarbageCollectionProcessing();
585
+
586
+ if (collector == MARK_COMPACTOR) {
587
+ // Register the amount of external allocated memory.
588
+ amount_of_external_allocated_memory_at_last_global_gc_ =
589
+ amount_of_external_allocated_memory_;
590
+ }
591
+
592
+ if (collector == MARK_COMPACTOR && global_gc_epilogue_callback_) {
593
+ ASSERT(!allocation_allowed_);
594
+ global_gc_epilogue_callback_();
595
+ }
596
+ VerifySymbolTable();
597
+ }
598
+
599
+
600
+ void Heap::MarkCompact(GCTracer* tracer) {
601
+ gc_state_ = MARK_COMPACT;
602
+ mc_count_++;
603
+ tracer->set_full_gc_count(mc_count_);
604
+ LOG(ResourceEvent("markcompact", "begin"));
605
+
606
+ MarkCompactCollector::Prepare(tracer);
607
+
608
+ bool is_compacting = MarkCompactCollector::IsCompacting();
609
+
610
+ MarkCompactPrologue(is_compacting);
611
+
612
+ MarkCompactCollector::CollectGarbage();
613
+
614
+ MarkCompactEpilogue(is_compacting);
615
+
616
+ LOG(ResourceEvent("markcompact", "end"));
617
+
618
+ gc_state_ = NOT_IN_GC;
619
+
620
+ Shrink();
621
+
622
+ Counters::objs_since_last_full.Set(0);
623
+ context_disposed_pending_ = false;
624
+ }
625
+
626
+
627
+ void Heap::MarkCompactPrologue(bool is_compacting) {
628
+ // At any old GC clear the keyed lookup cache to enable collection of unused
629
+ // maps.
630
+ KeyedLookupCache::Clear();
631
+ ContextSlotCache::Clear();
632
+ DescriptorLookupCache::Clear();
633
+
634
+ CompilationCache::MarkCompactPrologue();
635
+
636
+ Top::MarkCompactPrologue(is_compacting);
637
+ ThreadManager::MarkCompactPrologue(is_compacting);
638
+
639
+ if (is_compacting) FlushNumberStringCache();
640
+ }
641
+
642
+
643
+ void Heap::MarkCompactEpilogue(bool is_compacting) {
644
+ Top::MarkCompactEpilogue(is_compacting);
645
+ ThreadManager::MarkCompactEpilogue(is_compacting);
646
+ }
647
+
648
+
649
+ Object* Heap::FindCodeObject(Address a) {
650
+ Object* obj = code_space_->FindObject(a);
651
+ if (obj->IsFailure()) {
652
+ obj = lo_space_->FindObject(a);
653
+ }
654
+ ASSERT(!obj->IsFailure());
655
+ return obj;
656
+ }
657
+
658
+
659
+ // Helper class for copying HeapObjects
660
+ class ScavengeVisitor: public ObjectVisitor {
661
+ public:
662
+
663
+ void VisitPointer(Object** p) { ScavengePointer(p); }
664
+
665
+ void VisitPointers(Object** start, Object** end) {
666
+ // Copy all HeapObject pointers in [start, end)
667
+ for (Object** p = start; p < end; p++) ScavengePointer(p);
668
+ }
669
+
670
+ private:
671
+ void ScavengePointer(Object** p) {
672
+ Object* object = *p;
673
+ if (!Heap::InNewSpace(object)) return;
674
+ Heap::ScavengeObject(reinterpret_cast<HeapObject**>(p),
675
+ reinterpret_cast<HeapObject*>(object));
676
+ }
677
+ };
678
+
679
+
680
+ // A queue of pointers and maps of to-be-promoted objects during a
681
+ // scavenge collection.
682
+ class PromotionQueue {
683
+ public:
684
+ void Initialize(Address start_address) {
685
+ front_ = rear_ = reinterpret_cast<HeapObject**>(start_address);
686
+ }
687
+
688
+ bool is_empty() { return front_ <= rear_; }
689
+
690
+ void insert(HeapObject* object, Map* map) {
691
+ *(--rear_) = object;
692
+ *(--rear_) = map;
693
+ // Assert no overflow into live objects.
694
+ ASSERT(reinterpret_cast<Address>(rear_) >= Heap::new_space()->top());
695
+ }
696
+
697
+ void remove(HeapObject** object, Map** map) {
698
+ *object = *(--front_);
699
+ *map = Map::cast(*(--front_));
700
+ // Assert no underflow.
701
+ ASSERT(front_ >= rear_);
702
+ }
703
+
704
+ private:
705
+ // The front of the queue is higher in memory than the rear.
706
+ HeapObject** front_;
707
+ HeapObject** rear_;
708
+ };
709
+
710
+
711
+ // Shared state read by the scavenge collector and set by ScavengeObject.
712
+ static PromotionQueue promotion_queue;
713
+
714
+
715
+ #ifdef DEBUG
716
+ // Visitor class to verify pointers in code or data space do not point into
717
+ // new space.
718
+ class VerifyNonPointerSpacePointersVisitor: public ObjectVisitor {
719
+ public:
720
+ void VisitPointers(Object** start, Object**end) {
721
+ for (Object** current = start; current < end; current++) {
722
+ if ((*current)->IsHeapObject()) {
723
+ ASSERT(!Heap::InNewSpace(HeapObject::cast(*current)));
724
+ }
725
+ }
726
+ }
727
+ };
728
+
729
+
730
+ static void VerifyNonPointerSpacePointers() {
731
+ // Verify that there are no pointers to new space in spaces where we
732
+ // do not expect them.
733
+ VerifyNonPointerSpacePointersVisitor v;
734
+ HeapObjectIterator code_it(Heap::code_space());
735
+ while (code_it.has_next()) {
736
+ HeapObject* object = code_it.next();
737
+ object->Iterate(&v);
738
+ }
739
+
740
+ HeapObjectIterator data_it(Heap::old_data_space());
741
+ while (data_it.has_next()) data_it.next()->Iterate(&v);
742
+ }
743
+ #endif
744
+
745
+
746
+ void Heap::Scavenge() {
747
+ #ifdef DEBUG
748
+ if (FLAG_enable_slow_asserts) VerifyNonPointerSpacePointers();
749
+ #endif
750
+
751
+ gc_state_ = SCAVENGE;
752
+
753
+ // Implements Cheney's copying algorithm
754
+ LOG(ResourceEvent("scavenge", "begin"));
755
+
756
+ // Clear descriptor cache.
757
+ DescriptorLookupCache::Clear();
758
+
759
+ // Used for updating survived_since_last_expansion_ at function end.
760
+ int survived_watermark = PromotedSpaceSize();
761
+
762
+ if (new_space_.Capacity() < new_space_.MaximumCapacity() &&
763
+ survived_since_last_expansion_ > new_space_.Capacity()) {
764
+ // Grow the size of new space if there is room to grow and enough
765
+ // data has survived scavenge since the last expansion.
766
+ new_space_.Grow();
767
+ survived_since_last_expansion_ = 0;
768
+ }
769
+
770
+ // Flip the semispaces. After flipping, to space is empty, from space has
771
+ // live objects.
772
+ new_space_.Flip();
773
+ new_space_.ResetAllocationInfo();
774
+
775
+ // We need to sweep newly copied objects which can be either in the
776
+ // to space or promoted to the old generation. For to-space
777
+ // objects, we treat the bottom of the to space as a queue. Newly
778
+ // copied and unswept objects lie between a 'front' mark and the
779
+ // allocation pointer.
780
+ //
781
+ // Promoted objects can go into various old-generation spaces, and
782
+ // can be allocated internally in the spaces (from the free list).
783
+ // We treat the top of the to space as a queue of addresses of
784
+ // promoted objects. The addresses of newly promoted and unswept
785
+ // objects lie between a 'front' mark and a 'rear' mark that is
786
+ // updated as a side effect of promoting an object.
787
+ //
788
+ // There is guaranteed to be enough room at the top of the to space
789
+ // for the addresses of promoted objects: every object promoted
790
+ // frees up its size in bytes from the top of the new space, and
791
+ // objects are at least one pointer in size.
792
+ Address new_space_front = new_space_.ToSpaceLow();
793
+ promotion_queue.Initialize(new_space_.ToSpaceHigh());
794
+
795
+ ScavengeVisitor scavenge_visitor;
796
+ // Copy roots.
797
+ IterateRoots(&scavenge_visitor, VISIT_ALL_IN_SCAVENGE);
798
+
799
+ // Copy objects reachable from the old generation. By definition,
800
+ // there are no intergenerational pointers in code or data spaces.
801
+ IterateRSet(old_pointer_space_, &ScavengePointer);
802
+ IterateRSet(map_space_, &ScavengePointer);
803
+ lo_space_->IterateRSet(&ScavengePointer);
804
+
805
+ // Copy objects reachable from cells by scavenging cell values directly.
806
+ HeapObjectIterator cell_iterator(cell_space_);
807
+ while (cell_iterator.has_next()) {
808
+ HeapObject* cell = cell_iterator.next();
809
+ if (cell->IsJSGlobalPropertyCell()) {
810
+ Address value_address =
811
+ reinterpret_cast<Address>(cell) +
812
+ (JSGlobalPropertyCell::kValueOffset - kHeapObjectTag);
813
+ scavenge_visitor.VisitPointer(reinterpret_cast<Object**>(value_address));
814
+ }
815
+ }
816
+
817
+ new_space_front = DoScavenge(&scavenge_visitor, new_space_front);
818
+
819
+ ScavengeExternalStringTable();
820
+ ASSERT(new_space_front == new_space_.top());
821
+
822
+ // Set age mark.
823
+ new_space_.set_age_mark(new_space_.top());
824
+
825
+ // Update how much has survived scavenge.
826
+ survived_since_last_expansion_ +=
827
+ (PromotedSpaceSize() - survived_watermark) + new_space_.Size();
828
+
829
+ LOG(ResourceEvent("scavenge", "end"));
830
+
831
+ gc_state_ = NOT_IN_GC;
832
+ }
833
+
834
+
835
+ void Heap::ScavengeExternalStringTable() {
836
+ ExternalStringTable::Verify();
837
+
838
+ if (ExternalStringTable::new_space_strings_.is_empty()) return;
839
+
840
+ Object** start = &ExternalStringTable::new_space_strings_[0];
841
+ Object** end = start + ExternalStringTable::new_space_strings_.length();
842
+ Object** last = start;
843
+
844
+ for (Object** p = start; p < end; ++p) {
845
+ ASSERT(Heap::InFromSpace(*p));
846
+ MapWord first_word = HeapObject::cast(*p)->map_word();
847
+
848
+ if (!first_word.IsForwardingAddress()) {
849
+ // Unreachable external string can be finalized.
850
+ FinalizeExternalString(String::cast(*p));
851
+ continue;
852
+ }
853
+
854
+ // String is still reachable.
855
+ String* target = String::cast(first_word.ToForwardingAddress());
856
+ ASSERT(target->IsExternalString());
857
+
858
+ if (Heap::InNewSpace(target)) {
859
+ // String is still in new space. Update the table entry.
860
+ *last = target;
861
+ ++last;
862
+ } else {
863
+ // String got promoted. Move it to the old string list.
864
+ ExternalStringTable::AddOldString(target);
865
+ }
866
+ }
867
+
868
+ ASSERT(last <= end);
869
+ ExternalStringTable::ShrinkNewStrings(static_cast<int>(last - start));
870
+ }
871
+
872
+
873
+ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
874
+ Address new_space_front) {
875
+ do {
876
+ ASSERT(new_space_front <= new_space_.top());
877
+
878
+ // The addresses new_space_front and new_space_.top() define a
879
+ // queue of unprocessed copied objects. Process them until the
880
+ // queue is empty.
881
+ while (new_space_front < new_space_.top()) {
882
+ HeapObject* object = HeapObject::FromAddress(new_space_front);
883
+ object->Iterate(scavenge_visitor);
884
+ new_space_front += object->Size();
885
+ }
886
+
887
+ // Promote and process all the to-be-promoted objects.
888
+ while (!promotion_queue.is_empty()) {
889
+ HeapObject* source;
890
+ Map* map;
891
+ promotion_queue.remove(&source, &map);
892
+ // Copy the from-space object to its new location (given by the
893
+ // forwarding address) and fix its map.
894
+ HeapObject* target = source->map_word().ToForwardingAddress();
895
+ CopyBlock(reinterpret_cast<Object**>(target->address()),
896
+ reinterpret_cast<Object**>(source->address()),
897
+ source->SizeFromMap(map));
898
+ target->set_map(map);
899
+
900
+ #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
901
+ // Update NewSpace stats if necessary.
902
+ RecordCopiedObject(target);
903
+ #endif
904
+ // Visit the newly copied object for pointers to new space.
905
+ target->Iterate(scavenge_visitor);
906
+ UpdateRSet(target);
907
+ }
908
+
909
+ // Take another spin if there are now unswept objects in new space
910
+ // (there are currently no more unswept promoted objects).
911
+ } while (new_space_front < new_space_.top());
912
+
913
+ return new_space_front;
914
+ }
915
+
916
+
917
+ void Heap::ClearRSetRange(Address start, int size_in_bytes) {
918
+ uint32_t start_bit;
919
+ Address start_word_address =
920
+ Page::ComputeRSetBitPosition(start, 0, &start_bit);
921
+ uint32_t end_bit;
922
+ Address end_word_address =
923
+ Page::ComputeRSetBitPosition(start + size_in_bytes - kIntSize,
924
+ 0,
925
+ &end_bit);
926
+
927
+ // We want to clear the bits in the starting word starting with the
928
+ // first bit, and in the ending word up to and including the last
929
+ // bit. Build a pair of bitmasks to do that.
930
+ uint32_t start_bitmask = start_bit - 1;
931
+ uint32_t end_bitmask = ~((end_bit << 1) - 1);
932
+
933
+ // If the start address and end address are the same, we mask that
934
+ // word once, otherwise mask the starting and ending word
935
+ // separately and all the ones in between.
936
+ if (start_word_address == end_word_address) {
937
+ Memory::uint32_at(start_word_address) &= (start_bitmask | end_bitmask);
938
+ } else {
939
+ Memory::uint32_at(start_word_address) &= start_bitmask;
940
+ Memory::uint32_at(end_word_address) &= end_bitmask;
941
+ start_word_address += kIntSize;
942
+ memset(start_word_address, 0, end_word_address - start_word_address);
943
+ }
944
+ }
945
+
946
+
947
+ class UpdateRSetVisitor: public ObjectVisitor {
948
+ public:
949
+
950
+ void VisitPointer(Object** p) {
951
+ UpdateRSet(p);
952
+ }
953
+
954
+ void VisitPointers(Object** start, Object** end) {
955
+ // Update a store into slots [start, end), used (a) to update remembered
956
+ // set when promoting a young object to old space or (b) to rebuild
957
+ // remembered sets after a mark-compact collection.
958
+ for (Object** p = start; p < end; p++) UpdateRSet(p);
959
+ }
960
+ private:
961
+
962
+ void UpdateRSet(Object** p) {
963
+ // The remembered set should not be set. It should be clear for objects
964
+ // newly copied to old space, and it is cleared before rebuilding in the
965
+ // mark-compact collector.
966
+ ASSERT(!Page::IsRSetSet(reinterpret_cast<Address>(p), 0));
967
+ if (Heap::InNewSpace(*p)) {
968
+ Page::SetRSet(reinterpret_cast<Address>(p), 0);
969
+ }
970
+ }
971
+ };
972
+
973
+
974
+ int Heap::UpdateRSet(HeapObject* obj) {
975
+ ASSERT(!InNewSpace(obj));
976
+ // Special handling of fixed arrays to iterate the body based on the start
977
+ // address and offset. Just iterating the pointers as in UpdateRSetVisitor
978
+ // will not work because Page::SetRSet needs to have the start of the
979
+ // object for large object pages.
980
+ if (obj->IsFixedArray()) {
981
+ FixedArray* array = FixedArray::cast(obj);
982
+ int length = array->length();
983
+ for (int i = 0; i < length; i++) {
984
+ int offset = FixedArray::kHeaderSize + i * kPointerSize;
985
+ ASSERT(!Page::IsRSetSet(obj->address(), offset));
986
+ if (Heap::InNewSpace(array->get(i))) {
987
+ Page::SetRSet(obj->address(), offset);
988
+ }
989
+ }
990
+ } else if (!obj->IsCode()) {
991
+ // Skip code object, we know it does not contain inter-generational
992
+ // pointers.
993
+ UpdateRSetVisitor v;
994
+ obj->Iterate(&v);
995
+ }
996
+ return obj->Size();
997
+ }
998
+
999
+
1000
+ void Heap::RebuildRSets() {
1001
+ // By definition, we do not care about remembered set bits in code,
1002
+ // data, or cell spaces.
1003
+ map_space_->ClearRSet();
1004
+ RebuildRSets(map_space_);
1005
+
1006
+ old_pointer_space_->ClearRSet();
1007
+ RebuildRSets(old_pointer_space_);
1008
+
1009
+ Heap::lo_space_->ClearRSet();
1010
+ RebuildRSets(lo_space_);
1011
+ }
1012
+
1013
+
1014
+ void Heap::RebuildRSets(PagedSpace* space) {
1015
+ HeapObjectIterator it(space);
1016
+ while (it.has_next()) Heap::UpdateRSet(it.next());
1017
+ }
1018
+
1019
+
1020
+ void Heap::RebuildRSets(LargeObjectSpace* space) {
1021
+ LargeObjectIterator it(space);
1022
+ while (it.has_next()) Heap::UpdateRSet(it.next());
1023
+ }
1024
+
1025
+
1026
+ #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
1027
+ void Heap::RecordCopiedObject(HeapObject* obj) {
1028
+ bool should_record = false;
1029
+ #ifdef DEBUG
1030
+ should_record = FLAG_heap_stats;
1031
+ #endif
1032
+ #ifdef ENABLE_LOGGING_AND_PROFILING
1033
+ should_record = should_record || FLAG_log_gc;
1034
+ #endif
1035
+ if (should_record) {
1036
+ if (new_space_.Contains(obj)) {
1037
+ new_space_.RecordAllocation(obj);
1038
+ } else {
1039
+ new_space_.RecordPromotion(obj);
1040
+ }
1041
+ }
1042
+ }
1043
+ #endif // defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
1044
+
1045
+
1046
+
1047
+ HeapObject* Heap::MigrateObject(HeapObject* source,
1048
+ HeapObject* target,
1049
+ int size) {
1050
+ // Copy the content of source to target.
1051
+ CopyBlock(reinterpret_cast<Object**>(target->address()),
1052
+ reinterpret_cast<Object**>(source->address()),
1053
+ size);
1054
+
1055
+ // Set the forwarding address.
1056
+ source->set_map_word(MapWord::FromForwardingAddress(target));
1057
+
1058
+ #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
1059
+ // Update NewSpace stats if necessary.
1060
+ RecordCopiedObject(target);
1061
+ #endif
1062
+
1063
+ return target;
1064
+ }
1065
+
1066
+
1067
+ static inline bool IsShortcutCandidate(HeapObject* object, Map* map) {
1068
+ STATIC_ASSERT(kNotStringTag != 0 && kSymbolTag != 0);
1069
+ ASSERT(object->map() == map);
1070
+ InstanceType type = map->instance_type();
1071
+ if ((type & kShortcutTypeMask) != kShortcutTypeTag) return false;
1072
+ ASSERT(object->IsString() && !object->IsSymbol());
1073
+ return ConsString::cast(object)->unchecked_second() == Heap::empty_string();
1074
+ }
1075
+
1076
+
1077
+ void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) {
1078
+ ASSERT(InFromSpace(object));
1079
+ MapWord first_word = object->map_word();
1080
+ ASSERT(!first_word.IsForwardingAddress());
1081
+
1082
+ // Optimization: Bypass flattened ConsString objects.
1083
+ if (IsShortcutCandidate(object, first_word.ToMap())) {
1084
+ object = HeapObject::cast(ConsString::cast(object)->unchecked_first());
1085
+ *p = object;
1086
+ // After patching *p we have to repeat the checks that object is in the
1087
+ // active semispace of the young generation and not already copied.
1088
+ if (!InNewSpace(object)) return;
1089
+ first_word = object->map_word();
1090
+ if (first_word.IsForwardingAddress()) {
1091
+ *p = first_word.ToForwardingAddress();
1092
+ return;
1093
+ }
1094
+ }
1095
+
1096
+ int object_size = object->SizeFromMap(first_word.ToMap());
1097
+ // We rely on live objects in new space to be at least two pointers,
1098
+ // so we can store the from-space address and map pointer of promoted
1099
+ // objects in the to space.
1100
+ ASSERT(object_size >= 2 * kPointerSize);
1101
+
1102
+ // If the object should be promoted, we try to copy it to old space.
1103
+ if (ShouldBePromoted(object->address(), object_size)) {
1104
+ Object* result;
1105
+ if (object_size > MaxObjectSizeInPagedSpace()) {
1106
+ result = lo_space_->AllocateRawFixedArray(object_size);
1107
+ if (!result->IsFailure()) {
1108
+ // Save the from-space object pointer and its map pointer at the
1109
+ // top of the to space to be swept and copied later. Write the
1110
+ // forwarding address over the map word of the from-space
1111
+ // object.
1112
+ HeapObject* target = HeapObject::cast(result);
1113
+ promotion_queue.insert(object, first_word.ToMap());
1114
+ object->set_map_word(MapWord::FromForwardingAddress(target));
1115
+
1116
+ // Give the space allocated for the result a proper map by
1117
+ // treating it as a free list node (not linked into the free
1118
+ // list).
1119
+ FreeListNode* node = FreeListNode::FromAddress(target->address());
1120
+ node->set_size(object_size);
1121
+
1122
+ *p = target;
1123
+ return;
1124
+ }
1125
+ } else {
1126
+ OldSpace* target_space = Heap::TargetSpace(object);
1127
+ ASSERT(target_space == Heap::old_pointer_space_ ||
1128
+ target_space == Heap::old_data_space_);
1129
+ result = target_space->AllocateRaw(object_size);
1130
+ if (!result->IsFailure()) {
1131
+ HeapObject* target = HeapObject::cast(result);
1132
+ if (target_space == Heap::old_pointer_space_) {
1133
+ // Save the from-space object pointer and its map pointer at the
1134
+ // top of the to space to be swept and copied later. Write the
1135
+ // forwarding address over the map word of the from-space
1136
+ // object.
1137
+ promotion_queue.insert(object, first_word.ToMap());
1138
+ object->set_map_word(MapWord::FromForwardingAddress(target));
1139
+
1140
+ // Give the space allocated for the result a proper map by
1141
+ // treating it as a free list node (not linked into the free
1142
+ // list).
1143
+ FreeListNode* node = FreeListNode::FromAddress(target->address());
1144
+ node->set_size(object_size);
1145
+
1146
+ *p = target;
1147
+ } else {
1148
+ // Objects promoted to the data space can be copied immediately
1149
+ // and not revisited---we will never sweep that space for
1150
+ // pointers and the copied objects do not contain pointers to
1151
+ // new space objects.
1152
+ *p = MigrateObject(object, target, object_size);
1153
+ #ifdef DEBUG
1154
+ VerifyNonPointerSpacePointersVisitor v;
1155
+ (*p)->Iterate(&v);
1156
+ #endif
1157
+ }
1158
+ return;
1159
+ }
1160
+ }
1161
+ }
1162
+ // The object should remain in new space or the old space allocation failed.
1163
+ Object* result = new_space_.AllocateRaw(object_size);
1164
+ // Failed allocation at this point is utterly unexpected.
1165
+ ASSERT(!result->IsFailure());
1166
+ *p = MigrateObject(object, HeapObject::cast(result), object_size);
1167
+ }
1168
+
1169
+
1170
+ void Heap::ScavengePointer(HeapObject** p) {
1171
+ ScavengeObject(p, *p);
1172
+ }
1173
+
1174
+
1175
+ Object* Heap::AllocatePartialMap(InstanceType instance_type,
1176
+ int instance_size) {
1177
+ Object* result = AllocateRawMap();
1178
+ if (result->IsFailure()) return result;
1179
+
1180
+ // Map::cast cannot be used due to uninitialized map field.
1181
+ reinterpret_cast<Map*>(result)->set_map(raw_unchecked_meta_map());
1182
+ reinterpret_cast<Map*>(result)->set_instance_type(instance_type);
1183
+ reinterpret_cast<Map*>(result)->set_instance_size(instance_size);
1184
+ reinterpret_cast<Map*>(result)->set_inobject_properties(0);
1185
+ reinterpret_cast<Map*>(result)->set_unused_property_fields(0);
1186
+ return result;
1187
+ }
1188
+
1189
+
1190
+ Object* Heap::AllocateMap(InstanceType instance_type, int instance_size) {
1191
+ Object* result = AllocateRawMap();
1192
+ if (result->IsFailure()) return result;
1193
+
1194
+ Map* map = reinterpret_cast<Map*>(result);
1195
+ map->set_map(meta_map());
1196
+ map->set_instance_type(instance_type);
1197
+ map->set_prototype(null_value());
1198
+ map->set_constructor(null_value());
1199
+ map->set_instance_size(instance_size);
1200
+ map->set_inobject_properties(0);
1201
+ map->set_pre_allocated_property_fields(0);
1202
+ map->set_instance_descriptors(empty_descriptor_array());
1203
+ map->set_code_cache(empty_fixed_array());
1204
+ map->set_unused_property_fields(0);
1205
+ map->set_bit_field(0);
1206
+ map->set_bit_field2(0);
1207
+
1208
+ // If the map object is aligned fill the padding area with Smi 0 objects.
1209
+ if (Map::kPadStart < Map::kSize) {
1210
+ memset(reinterpret_cast<byte*>(map) + Map::kPadStart - kHeapObjectTag,
1211
+ 0,
1212
+ Map::kSize - Map::kPadStart);
1213
+ }
1214
+ return map;
1215
+ }
1216
+
1217
+
1218
+ const Heap::StringTypeTable Heap::string_type_table[] = {
1219
+ #define STRING_TYPE_ELEMENT(type, size, name, camel_name) \
1220
+ {type, size, k##camel_name##MapRootIndex},
1221
+ STRING_TYPE_LIST(STRING_TYPE_ELEMENT)
1222
+ #undef STRING_TYPE_ELEMENT
1223
+ };
1224
+
1225
+
1226
+ const Heap::ConstantSymbolTable Heap::constant_symbol_table[] = {
1227
+ #define CONSTANT_SYMBOL_ELEMENT(name, contents) \
1228
+ {contents, k##name##RootIndex},
1229
+ SYMBOL_LIST(CONSTANT_SYMBOL_ELEMENT)
1230
+ #undef CONSTANT_SYMBOL_ELEMENT
1231
+ };
1232
+
1233
+
1234
+ const Heap::StructTable Heap::struct_table[] = {
1235
+ #define STRUCT_TABLE_ELEMENT(NAME, Name, name) \
1236
+ { NAME##_TYPE, Name::kSize, k##Name##MapRootIndex },
1237
+ STRUCT_LIST(STRUCT_TABLE_ELEMENT)
1238
+ #undef STRUCT_TABLE_ELEMENT
1239
+ };
1240
+
1241
+
1242
+ bool Heap::CreateInitialMaps() {
1243
+ Object* obj = AllocatePartialMap(MAP_TYPE, Map::kSize);
1244
+ if (obj->IsFailure()) return false;
1245
+ // Map::cast cannot be used due to uninitialized map field.
1246
+ Map* new_meta_map = reinterpret_cast<Map*>(obj);
1247
+ set_meta_map(new_meta_map);
1248
+ new_meta_map->set_map(new_meta_map);
1249
+
1250
+ obj = AllocatePartialMap(FIXED_ARRAY_TYPE, FixedArray::kHeaderSize);
1251
+ if (obj->IsFailure()) return false;
1252
+ set_fixed_array_map(Map::cast(obj));
1253
+
1254
+ obj = AllocatePartialMap(ODDBALL_TYPE, Oddball::kSize);
1255
+ if (obj->IsFailure()) return false;
1256
+ set_oddball_map(Map::cast(obj));
1257
+
1258
+ // Allocate the empty array
1259
+ obj = AllocateEmptyFixedArray();
1260
+ if (obj->IsFailure()) return false;
1261
+ set_empty_fixed_array(FixedArray::cast(obj));
1262
+
1263
+ obj = Allocate(oddball_map(), OLD_DATA_SPACE);
1264
+ if (obj->IsFailure()) return false;
1265
+ set_null_value(obj);
1266
+
1267
+ // Allocate the empty descriptor array.
1268
+ obj = AllocateEmptyFixedArray();
1269
+ if (obj->IsFailure()) return false;
1270
+ set_empty_descriptor_array(DescriptorArray::cast(obj));
1271
+
1272
+ // Fix the instance_descriptors for the existing maps.
1273
+ meta_map()->set_instance_descriptors(empty_descriptor_array());
1274
+ meta_map()->set_code_cache(empty_fixed_array());
1275
+
1276
+ fixed_array_map()->set_instance_descriptors(empty_descriptor_array());
1277
+ fixed_array_map()->set_code_cache(empty_fixed_array());
1278
+
1279
+ oddball_map()->set_instance_descriptors(empty_descriptor_array());
1280
+ oddball_map()->set_code_cache(empty_fixed_array());
1281
+
1282
+ // Fix prototype object for existing maps.
1283
+ meta_map()->set_prototype(null_value());
1284
+ meta_map()->set_constructor(null_value());
1285
+
1286
+ fixed_array_map()->set_prototype(null_value());
1287
+ fixed_array_map()->set_constructor(null_value());
1288
+
1289
+ oddball_map()->set_prototype(null_value());
1290
+ oddball_map()->set_constructor(null_value());
1291
+
1292
+ obj = AllocateMap(HEAP_NUMBER_TYPE, HeapNumber::kSize);
1293
+ if (obj->IsFailure()) return false;
1294
+ set_heap_number_map(Map::cast(obj));
1295
+
1296
+ obj = AllocateMap(PROXY_TYPE, Proxy::kSize);
1297
+ if (obj->IsFailure()) return false;
1298
+ set_proxy_map(Map::cast(obj));
1299
+
1300
+ for (unsigned i = 0; i < ARRAY_SIZE(string_type_table); i++) {
1301
+ const StringTypeTable& entry = string_type_table[i];
1302
+ obj = AllocateMap(entry.type, entry.size);
1303
+ if (obj->IsFailure()) return false;
1304
+ roots_[entry.index] = Map::cast(obj);
1305
+ }
1306
+
1307
+ obj = AllocateMap(STRING_TYPE, SeqTwoByteString::kAlignedSize);
1308
+ if (obj->IsFailure()) return false;
1309
+ set_undetectable_string_map(Map::cast(obj));
1310
+ Map::cast(obj)->set_is_undetectable();
1311
+
1312
+ obj = AllocateMap(ASCII_STRING_TYPE, SeqAsciiString::kAlignedSize);
1313
+ if (obj->IsFailure()) return false;
1314
+ set_undetectable_ascii_string_map(Map::cast(obj));
1315
+ Map::cast(obj)->set_is_undetectable();
1316
+
1317
+ obj = AllocateMap(BYTE_ARRAY_TYPE, ByteArray::kAlignedSize);
1318
+ if (obj->IsFailure()) return false;
1319
+ set_byte_array_map(Map::cast(obj));
1320
+
1321
+ obj = AllocateMap(PIXEL_ARRAY_TYPE, PixelArray::kAlignedSize);
1322
+ if (obj->IsFailure()) return false;
1323
+ set_pixel_array_map(Map::cast(obj));
1324
+
1325
+ obj = AllocateMap(EXTERNAL_BYTE_ARRAY_TYPE,
1326
+ ExternalArray::kAlignedSize);
1327
+ if (obj->IsFailure()) return false;
1328
+ set_external_byte_array_map(Map::cast(obj));
1329
+
1330
+ obj = AllocateMap(EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE,
1331
+ ExternalArray::kAlignedSize);
1332
+ if (obj->IsFailure()) return false;
1333
+ set_external_unsigned_byte_array_map(Map::cast(obj));
1334
+
1335
+ obj = AllocateMap(EXTERNAL_SHORT_ARRAY_TYPE,
1336
+ ExternalArray::kAlignedSize);
1337
+ if (obj->IsFailure()) return false;
1338
+ set_external_short_array_map(Map::cast(obj));
1339
+
1340
+ obj = AllocateMap(EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE,
1341
+ ExternalArray::kAlignedSize);
1342
+ if (obj->IsFailure()) return false;
1343
+ set_external_unsigned_short_array_map(Map::cast(obj));
1344
+
1345
+ obj = AllocateMap(EXTERNAL_INT_ARRAY_TYPE,
1346
+ ExternalArray::kAlignedSize);
1347
+ if (obj->IsFailure()) return false;
1348
+ set_external_int_array_map(Map::cast(obj));
1349
+
1350
+ obj = AllocateMap(EXTERNAL_UNSIGNED_INT_ARRAY_TYPE,
1351
+ ExternalArray::kAlignedSize);
1352
+ if (obj->IsFailure()) return false;
1353
+ set_external_unsigned_int_array_map(Map::cast(obj));
1354
+
1355
+ obj = AllocateMap(EXTERNAL_FLOAT_ARRAY_TYPE,
1356
+ ExternalArray::kAlignedSize);
1357
+ if (obj->IsFailure()) return false;
1358
+ set_external_float_array_map(Map::cast(obj));
1359
+
1360
+ obj = AllocateMap(CODE_TYPE, Code::kHeaderSize);
1361
+ if (obj->IsFailure()) return false;
1362
+ set_code_map(Map::cast(obj));
1363
+
1364
+ obj = AllocateMap(JS_GLOBAL_PROPERTY_CELL_TYPE,
1365
+ JSGlobalPropertyCell::kSize);
1366
+ if (obj->IsFailure()) return false;
1367
+ set_global_property_cell_map(Map::cast(obj));
1368
+
1369
+ obj = AllocateMap(FILLER_TYPE, kPointerSize);
1370
+ if (obj->IsFailure()) return false;
1371
+ set_one_pointer_filler_map(Map::cast(obj));
1372
+
1373
+ obj = AllocateMap(FILLER_TYPE, 2 * kPointerSize);
1374
+ if (obj->IsFailure()) return false;
1375
+ set_two_pointer_filler_map(Map::cast(obj));
1376
+
1377
+ for (unsigned i = 0; i < ARRAY_SIZE(struct_table); i++) {
1378
+ const StructTable& entry = struct_table[i];
1379
+ obj = AllocateMap(entry.type, entry.size);
1380
+ if (obj->IsFailure()) return false;
1381
+ roots_[entry.index] = Map::cast(obj);
1382
+ }
1383
+
1384
+ obj = AllocateMap(FIXED_ARRAY_TYPE, HeapObject::kHeaderSize);
1385
+ if (obj->IsFailure()) return false;
1386
+ set_hash_table_map(Map::cast(obj));
1387
+
1388
+ obj = AllocateMap(FIXED_ARRAY_TYPE, HeapObject::kHeaderSize);
1389
+ if (obj->IsFailure()) return false;
1390
+ set_context_map(Map::cast(obj));
1391
+
1392
+ obj = AllocateMap(FIXED_ARRAY_TYPE, HeapObject::kHeaderSize);
1393
+ if (obj->IsFailure()) return false;
1394
+ set_catch_context_map(Map::cast(obj));
1395
+
1396
+ obj = AllocateMap(FIXED_ARRAY_TYPE, HeapObject::kHeaderSize);
1397
+ if (obj->IsFailure()) return false;
1398
+ set_global_context_map(Map::cast(obj));
1399
+
1400
+ obj = AllocateMap(JS_FUNCTION_TYPE, JSFunction::kSize);
1401
+ if (obj->IsFailure()) return false;
1402
+ set_boilerplate_function_map(Map::cast(obj));
1403
+
1404
+ obj = AllocateMap(SHARED_FUNCTION_INFO_TYPE, SharedFunctionInfo::kSize);
1405
+ if (obj->IsFailure()) return false;
1406
+ set_shared_function_info_map(Map::cast(obj));
1407
+
1408
+ ASSERT(!Heap::InNewSpace(Heap::empty_fixed_array()));
1409
+ return true;
1410
+ }
1411
+
1412
+
1413
+ Object* Heap::AllocateHeapNumber(double value, PretenureFlag pretenure) {
1414
+ // Statically ensure that it is safe to allocate heap numbers in paged
1415
+ // spaces.
1416
+ STATIC_ASSERT(HeapNumber::kSize <= Page::kMaxHeapObjectSize);
1417
+ AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
1418
+
1419
+ Object* result = AllocateRaw(HeapNumber::kSize, space, OLD_DATA_SPACE);
1420
+ if (result->IsFailure()) return result;
1421
+
1422
+ HeapObject::cast(result)->set_map(heap_number_map());
1423
+ HeapNumber::cast(result)->set_value(value);
1424
+ return result;
1425
+ }
1426
+
1427
+
1428
+ Object* Heap::AllocateHeapNumber(double value) {
1429
+ // Use general version, if we're forced to always allocate.
1430
+ if (always_allocate()) return AllocateHeapNumber(value, TENURED);
1431
+
1432
+ // This version of AllocateHeapNumber is optimized for
1433
+ // allocation in new space.
1434
+ STATIC_ASSERT(HeapNumber::kSize <= Page::kMaxHeapObjectSize);
1435
+ ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
1436
+ Object* result = new_space_.AllocateRaw(HeapNumber::kSize);
1437
+ if (result->IsFailure()) return result;
1438
+ HeapObject::cast(result)->set_map(heap_number_map());
1439
+ HeapNumber::cast(result)->set_value(value);
1440
+ return result;
1441
+ }
1442
+
1443
+
1444
+ Object* Heap::AllocateJSGlobalPropertyCell(Object* value) {
1445
+ Object* result = AllocateRawCell();
1446
+ if (result->IsFailure()) return result;
1447
+ HeapObject::cast(result)->set_map(global_property_cell_map());
1448
+ JSGlobalPropertyCell::cast(result)->set_value(value);
1449
+ return result;
1450
+ }
1451
+
1452
+
1453
+ Object* Heap::CreateOddball(Map* map,
1454
+ const char* to_string,
1455
+ Object* to_number) {
1456
+ Object* result = Allocate(map, OLD_DATA_SPACE);
1457
+ if (result->IsFailure()) return result;
1458
+ return Oddball::cast(result)->Initialize(to_string, to_number);
1459
+ }
1460
+
1461
+
1462
+ bool Heap::CreateApiObjects() {
1463
+ Object* obj;
1464
+
1465
+ obj = AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
1466
+ if (obj->IsFailure()) return false;
1467
+ set_neander_map(Map::cast(obj));
1468
+
1469
+ obj = Heap::AllocateJSObjectFromMap(neander_map());
1470
+ if (obj->IsFailure()) return false;
1471
+ Object* elements = AllocateFixedArray(2);
1472
+ if (elements->IsFailure()) return false;
1473
+ FixedArray::cast(elements)->set(0, Smi::FromInt(0));
1474
+ JSObject::cast(obj)->set_elements(FixedArray::cast(elements));
1475
+ set_message_listeners(JSObject::cast(obj));
1476
+
1477
+ return true;
1478
+ }
1479
+
1480
+
1481
+ void Heap::CreateCEntryStub() {
1482
+ CEntryStub stub(1);
1483
+ set_c_entry_code(*stub.GetCode());
1484
+ }
1485
+
1486
+
1487
+ #if V8_TARGET_ARCH_ARM && V8_NATIVE_REGEXP
1488
+ void Heap::CreateRegExpCEntryStub() {
1489
+ RegExpCEntryStub stub;
1490
+ set_re_c_entry_code(*stub.GetCode());
1491
+ }
1492
+ #endif
1493
+
1494
+
1495
+ void Heap::CreateCEntryDebugBreakStub() {
1496
+ CEntryDebugBreakStub stub;
1497
+ set_c_entry_debug_break_code(*stub.GetCode());
1498
+ }
1499
+
1500
+
1501
+ void Heap::CreateJSEntryStub() {
1502
+ JSEntryStub stub;
1503
+ set_js_entry_code(*stub.GetCode());
1504
+ }
1505
+
1506
+
1507
+ void Heap::CreateJSConstructEntryStub() {
1508
+ JSConstructEntryStub stub;
1509
+ set_js_construct_entry_code(*stub.GetCode());
1510
+ }
1511
+
1512
+
1513
+ void Heap::CreateFixedStubs() {
1514
+ // Here we create roots for fixed stubs. They are needed at GC
1515
+ // for cooking and uncooking (check out frames.cc).
1516
+ // The eliminates the need for doing dictionary lookup in the
1517
+ // stub cache for these stubs.
1518
+ HandleScope scope;
1519
+ // gcc-4.4 has problem generating correct code of following snippet:
1520
+ // { CEntryStub stub;
1521
+ // c_entry_code_ = *stub.GetCode();
1522
+ // }
1523
+ // { CEntryDebugBreakStub stub;
1524
+ // c_entry_debug_break_code_ = *stub.GetCode();
1525
+ // }
1526
+ // To workaround the problem, make separate functions without inlining.
1527
+ Heap::CreateCEntryStub();
1528
+ Heap::CreateCEntryDebugBreakStub();
1529
+ Heap::CreateJSEntryStub();
1530
+ Heap::CreateJSConstructEntryStub();
1531
+ #if V8_TARGET_ARCH_ARM && V8_NATIVE_REGEXP
1532
+ Heap::CreateRegExpCEntryStub();
1533
+ #endif
1534
+ }
1535
+
1536
+
1537
+ bool Heap::CreateInitialObjects() {
1538
+ Object* obj;
1539
+
1540
+ // The -0 value must be set before NumberFromDouble works.
1541
+ obj = AllocateHeapNumber(-0.0, TENURED);
1542
+ if (obj->IsFailure()) return false;
1543
+ set_minus_zero_value(obj);
1544
+ ASSERT(signbit(minus_zero_value()->Number()) != 0);
1545
+
1546
+ obj = AllocateHeapNumber(OS::nan_value(), TENURED);
1547
+ if (obj->IsFailure()) return false;
1548
+ set_nan_value(obj);
1549
+
1550
+ obj = Allocate(oddball_map(), OLD_DATA_SPACE);
1551
+ if (obj->IsFailure()) return false;
1552
+ set_undefined_value(obj);
1553
+ ASSERT(!InNewSpace(undefined_value()));
1554
+
1555
+ // Allocate initial symbol table.
1556
+ obj = SymbolTable::Allocate(kInitialSymbolTableSize);
1557
+ if (obj->IsFailure()) return false;
1558
+ // Don't use set_symbol_table() due to asserts.
1559
+ roots_[kSymbolTableRootIndex] = obj;
1560
+
1561
+ // Assign the print strings for oddballs after creating symboltable.
1562
+ Object* symbol = LookupAsciiSymbol("undefined");
1563
+ if (symbol->IsFailure()) return false;
1564
+ Oddball::cast(undefined_value())->set_to_string(String::cast(symbol));
1565
+ Oddball::cast(undefined_value())->set_to_number(nan_value());
1566
+
1567
+ // Assign the print strings for oddballs after creating symboltable.
1568
+ symbol = LookupAsciiSymbol("null");
1569
+ if (symbol->IsFailure()) return false;
1570
+ Oddball::cast(null_value())->set_to_string(String::cast(symbol));
1571
+ Oddball::cast(null_value())->set_to_number(Smi::FromInt(0));
1572
+
1573
+ // Allocate the null_value
1574
+ obj = Oddball::cast(null_value())->Initialize("null", Smi::FromInt(0));
1575
+ if (obj->IsFailure()) return false;
1576
+
1577
+ obj = CreateOddball(oddball_map(), "true", Smi::FromInt(1));
1578
+ if (obj->IsFailure()) return false;
1579
+ set_true_value(obj);
1580
+
1581
+ obj = CreateOddball(oddball_map(), "false", Smi::FromInt(0));
1582
+ if (obj->IsFailure()) return false;
1583
+ set_false_value(obj);
1584
+
1585
+ obj = CreateOddball(oddball_map(), "hole", Smi::FromInt(-1));
1586
+ if (obj->IsFailure()) return false;
1587
+ set_the_hole_value(obj);
1588
+
1589
+ obj = CreateOddball(
1590
+ oddball_map(), "no_interceptor_result_sentinel", Smi::FromInt(-2));
1591
+ if (obj->IsFailure()) return false;
1592
+ set_no_interceptor_result_sentinel(obj);
1593
+
1594
+ obj = CreateOddball(oddball_map(), "termination_exception", Smi::FromInt(-3));
1595
+ if (obj->IsFailure()) return false;
1596
+ set_termination_exception(obj);
1597
+
1598
+ // Allocate the empty string.
1599
+ obj = AllocateRawAsciiString(0, TENURED);
1600
+ if (obj->IsFailure()) return false;
1601
+ set_empty_string(String::cast(obj));
1602
+
1603
+ for (unsigned i = 0; i < ARRAY_SIZE(constant_symbol_table); i++) {
1604
+ obj = LookupAsciiSymbol(constant_symbol_table[i].contents);
1605
+ if (obj->IsFailure()) return false;
1606
+ roots_[constant_symbol_table[i].index] = String::cast(obj);
1607
+ }
1608
+
1609
+ // Allocate the hidden symbol which is used to identify the hidden properties
1610
+ // in JSObjects. The hash code has a special value so that it will not match
1611
+ // the empty string when searching for the property. It cannot be part of the
1612
+ // loop above because it needs to be allocated manually with the special
1613
+ // hash code in place. The hash code for the hidden_symbol is zero to ensure
1614
+ // that it will always be at the first entry in property descriptors.
1615
+ obj = AllocateSymbol(CStrVector(""), 0, String::kHashComputedMask);
1616
+ if (obj->IsFailure()) return false;
1617
+ hidden_symbol_ = String::cast(obj);
1618
+
1619
+ // Allocate the proxy for __proto__.
1620
+ obj = AllocateProxy((Address) &Accessors::ObjectPrototype);
1621
+ if (obj->IsFailure()) return false;
1622
+ set_prototype_accessors(Proxy::cast(obj));
1623
+
1624
+ // Allocate the code_stubs dictionary. The initial size is set to avoid
1625
+ // expanding the dictionary during bootstrapping.
1626
+ obj = NumberDictionary::Allocate(128);
1627
+ if (obj->IsFailure()) return false;
1628
+ set_code_stubs(NumberDictionary::cast(obj));
1629
+
1630
+ // Allocate the non_monomorphic_cache used in stub-cache.cc. The initial size
1631
+ // is set to avoid expanding the dictionary during bootstrapping.
1632
+ obj = NumberDictionary::Allocate(64);
1633
+ if (obj->IsFailure()) return false;
1634
+ set_non_monomorphic_cache(NumberDictionary::cast(obj));
1635
+
1636
+ CreateFixedStubs();
1637
+
1638
+ if (InitializeNumberStringCache()->IsFailure()) return false;
1639
+
1640
+ // Allocate cache for single character strings.
1641
+ obj = AllocateFixedArray(String::kMaxAsciiCharCode+1);
1642
+ if (obj->IsFailure()) return false;
1643
+ set_single_character_string_cache(FixedArray::cast(obj));
1644
+
1645
+ // Allocate cache for external strings pointing to native source code.
1646
+ obj = AllocateFixedArray(Natives::GetBuiltinsCount());
1647
+ if (obj->IsFailure()) return false;
1648
+ set_natives_source_cache(FixedArray::cast(obj));
1649
+
1650
+ // Handling of script id generation is in Factory::NewScript.
1651
+ set_last_script_id(undefined_value());
1652
+
1653
+ // Initialize keyed lookup cache.
1654
+ KeyedLookupCache::Clear();
1655
+
1656
+ // Initialize context slot cache.
1657
+ ContextSlotCache::Clear();
1658
+
1659
+ // Initialize descriptor cache.
1660
+ DescriptorLookupCache::Clear();
1661
+
1662
+ // Initialize compilation cache.
1663
+ CompilationCache::Clear();
1664
+
1665
+ return true;
1666
+ }
1667
+
1668
+
1669
+ Object* Heap::InitializeNumberStringCache() {
1670
+ // Compute the size of the number string cache based on the max heap size.
1671
+ // max_semispace_size_ == 512 KB => number_string_cache_size = 32.
1672
+ // max_semispace_size_ == 8 MB => number_string_cache_size = 16KB.
1673
+ int number_string_cache_size = max_semispace_size_ / 512;
1674
+ number_string_cache_size = Max(32, Min(16*KB, number_string_cache_size));
1675
+ Object* obj = AllocateFixedArray(number_string_cache_size * 2);
1676
+ if (!obj->IsFailure()) set_number_string_cache(FixedArray::cast(obj));
1677
+ return obj;
1678
+ }
1679
+
1680
+
1681
+ void Heap::FlushNumberStringCache() {
1682
+ // Flush the number to string cache.
1683
+ int len = number_string_cache()->length();
1684
+ for (int i = 0; i < len; i++) {
1685
+ number_string_cache()->set_undefined(i);
1686
+ }
1687
+ }
1688
+
1689
+
1690
+ static inline int double_get_hash(double d) {
1691
+ DoubleRepresentation rep(d);
1692
+ return static_cast<int>(rep.bits) ^ static_cast<int>(rep.bits >> 32);
1693
+ }
1694
+
1695
+
1696
+ static inline int smi_get_hash(Smi* smi) {
1697
+ return smi->value();
1698
+ }
1699
+
1700
+
1701
+ Object* Heap::GetNumberStringCache(Object* number) {
1702
+ int hash;
1703
+ int mask = (number_string_cache()->length() >> 1) - 1;
1704
+ if (number->IsSmi()) {
1705
+ hash = smi_get_hash(Smi::cast(number)) & mask;
1706
+ } else {
1707
+ hash = double_get_hash(number->Number()) & mask;
1708
+ }
1709
+ Object* key = number_string_cache()->get(hash * 2);
1710
+ if (key == number) {
1711
+ return String::cast(number_string_cache()->get(hash * 2 + 1));
1712
+ } else if (key->IsHeapNumber() &&
1713
+ number->IsHeapNumber() &&
1714
+ key->Number() == number->Number()) {
1715
+ return String::cast(number_string_cache()->get(hash * 2 + 1));
1716
+ }
1717
+ return undefined_value();
1718
+ }
1719
+
1720
+
1721
+ void Heap::SetNumberStringCache(Object* number, String* string) {
1722
+ int hash;
1723
+ int mask = (number_string_cache()->length() >> 1) - 1;
1724
+ if (number->IsSmi()) {
1725
+ hash = smi_get_hash(Smi::cast(number)) & mask;
1726
+ number_string_cache()->set(hash * 2, number, SKIP_WRITE_BARRIER);
1727
+ } else {
1728
+ hash = double_get_hash(number->Number()) & mask;
1729
+ number_string_cache()->set(hash * 2, number);
1730
+ }
1731
+ number_string_cache()->set(hash * 2 + 1, string);
1732
+ }
1733
+
1734
+
1735
+ Object* Heap::SmiOrNumberFromDouble(double value,
1736
+ bool new_object,
1737
+ PretenureFlag pretenure) {
1738
+ // We need to distinguish the minus zero value and this cannot be
1739
+ // done after conversion to int. Doing this by comparing bit
1740
+ // patterns is faster than using fpclassify() et al.
1741
+ static const DoubleRepresentation plus_zero(0.0);
1742
+ static const DoubleRepresentation minus_zero(-0.0);
1743
+ static const DoubleRepresentation nan(OS::nan_value());
1744
+ ASSERT(minus_zero_value() != NULL);
1745
+ ASSERT(sizeof(plus_zero.value) == sizeof(plus_zero.bits));
1746
+
1747
+ DoubleRepresentation rep(value);
1748
+ if (rep.bits == plus_zero.bits) return Smi::FromInt(0); // not uncommon
1749
+ if (rep.bits == minus_zero.bits) {
1750
+ return new_object ? AllocateHeapNumber(-0.0, pretenure)
1751
+ : minus_zero_value();
1752
+ }
1753
+ if (rep.bits == nan.bits) {
1754
+ return new_object
1755
+ ? AllocateHeapNumber(OS::nan_value(), pretenure)
1756
+ : nan_value();
1757
+ }
1758
+
1759
+ // Try to represent the value as a tagged small integer.
1760
+ int int_value = FastD2I(value);
1761
+ if (value == FastI2D(int_value) && Smi::IsValid(int_value)) {
1762
+ return Smi::FromInt(int_value);
1763
+ }
1764
+
1765
+ // Materialize the value in the heap.
1766
+ return AllocateHeapNumber(value, pretenure);
1767
+ }
1768
+
1769
+
1770
+ Object* Heap::NumberToString(Object* number) {
1771
+ Object* cached = GetNumberStringCache(number);
1772
+ if (cached != undefined_value()) {
1773
+ return cached;
1774
+ }
1775
+
1776
+ char arr[100];
1777
+ Vector<char> buffer(arr, ARRAY_SIZE(arr));
1778
+ const char* str;
1779
+ if (number->IsSmi()) {
1780
+ int num = Smi::cast(number)->value();
1781
+ str = IntToCString(num, buffer);
1782
+ } else {
1783
+ double num = HeapNumber::cast(number)->value();
1784
+ str = DoubleToCString(num, buffer);
1785
+ }
1786
+ Object* result = AllocateStringFromAscii(CStrVector(str));
1787
+
1788
+ if (!result->IsFailure()) {
1789
+ SetNumberStringCache(number, String::cast(result));
1790
+ }
1791
+ return result;
1792
+ }
1793
+
1794
+
1795
+ Map* Heap::MapForExternalArrayType(ExternalArrayType array_type) {
1796
+ return Map::cast(roots_[RootIndexForExternalArrayType(array_type)]);
1797
+ }
1798
+
1799
+
1800
+ Heap::RootListIndex Heap::RootIndexForExternalArrayType(
1801
+ ExternalArrayType array_type) {
1802
+ switch (array_type) {
1803
+ case kExternalByteArray:
1804
+ return kExternalByteArrayMapRootIndex;
1805
+ case kExternalUnsignedByteArray:
1806
+ return kExternalUnsignedByteArrayMapRootIndex;
1807
+ case kExternalShortArray:
1808
+ return kExternalShortArrayMapRootIndex;
1809
+ case kExternalUnsignedShortArray:
1810
+ return kExternalUnsignedShortArrayMapRootIndex;
1811
+ case kExternalIntArray:
1812
+ return kExternalIntArrayMapRootIndex;
1813
+ case kExternalUnsignedIntArray:
1814
+ return kExternalUnsignedIntArrayMapRootIndex;
1815
+ case kExternalFloatArray:
1816
+ return kExternalFloatArrayMapRootIndex;
1817
+ default:
1818
+ UNREACHABLE();
1819
+ return kUndefinedValueRootIndex;
1820
+ }
1821
+ }
1822
+
1823
+
1824
+ Object* Heap::NewNumberFromDouble(double value, PretenureFlag pretenure) {
1825
+ return SmiOrNumberFromDouble(value,
1826
+ true /* number object must be new */,
1827
+ pretenure);
1828
+ }
1829
+
1830
+
1831
+ Object* Heap::NumberFromDouble(double value, PretenureFlag pretenure) {
1832
+ return SmiOrNumberFromDouble(value,
1833
+ false /* use preallocated NaN, -0.0 */,
1834
+ pretenure);
1835
+ }
1836
+
1837
+
1838
+ Object* Heap::AllocateProxy(Address proxy, PretenureFlag pretenure) {
1839
+ // Statically ensure that it is safe to allocate proxies in paged spaces.
1840
+ STATIC_ASSERT(Proxy::kSize <= Page::kMaxHeapObjectSize);
1841
+ AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
1842
+ Object* result = Allocate(proxy_map(), space);
1843
+ if (result->IsFailure()) return result;
1844
+
1845
+ Proxy::cast(result)->set_proxy(proxy);
1846
+ return result;
1847
+ }
1848
+
1849
+
1850
+ Object* Heap::AllocateSharedFunctionInfo(Object* name) {
1851
+ Object* result = Allocate(shared_function_info_map(), OLD_POINTER_SPACE);
1852
+ if (result->IsFailure()) return result;
1853
+
1854
+ SharedFunctionInfo* share = SharedFunctionInfo::cast(result);
1855
+ share->set_name(name);
1856
+ Code* illegal = Builtins::builtin(Builtins::Illegal);
1857
+ share->set_code(illegal);
1858
+ Code* construct_stub = Builtins::builtin(Builtins::JSConstructStubGeneric);
1859
+ share->set_construct_stub(construct_stub);
1860
+ share->set_expected_nof_properties(0);
1861
+ share->set_length(0);
1862
+ share->set_formal_parameter_count(0);
1863
+ share->set_instance_class_name(Object_symbol());
1864
+ share->set_function_data(undefined_value());
1865
+ share->set_script(undefined_value());
1866
+ share->set_start_position_and_type(0);
1867
+ share->set_debug_info(undefined_value());
1868
+ share->set_inferred_name(empty_string());
1869
+ share->set_compiler_hints(0);
1870
+ share->set_this_property_assignments_count(0);
1871
+ share->set_this_property_assignments(undefined_value());
1872
+ return result;
1873
+ }
1874
+
1875
+
1876
+ // Returns true for a character in a range. Both limits are inclusive.
1877
+ static inline bool Between(uint32_t character, uint32_t from, uint32_t to) {
1878
+ // This makes uses of the the unsigned wraparound.
1879
+ return character - from <= to - from;
1880
+ }
1881
+
1882
+
1883
+ static inline Object* MakeOrFindTwoCharacterString(uint32_t c1, uint32_t c2) {
1884
+ String* symbol;
1885
+ // Numeric strings have a different hash algorithm not known by
1886
+ // LookupTwoCharsSymbolIfExists, so we skip this step for such strings.
1887
+ if ((!Between(c1, '0', '9') || !Between(c2, '0', '9')) &&
1888
+ Heap::symbol_table()->LookupTwoCharsSymbolIfExists(c1, c2, &symbol)) {
1889
+ return symbol;
1890
+ // Now we know the length is 2, we might as well make use of that fact
1891
+ // when building the new string.
1892
+ } else if ((c1 | c2) <= String::kMaxAsciiCharCodeU) { // We can do this
1893
+ ASSERT(IsPowerOf2(String::kMaxAsciiCharCodeU + 1)); // because of this.
1894
+ Object* result = Heap::AllocateRawAsciiString(2);
1895
+ if (result->IsFailure()) return result;
1896
+ char* dest = SeqAsciiString::cast(result)->GetChars();
1897
+ dest[0] = c1;
1898
+ dest[1] = c2;
1899
+ return result;
1900
+ } else {
1901
+ Object* result = Heap::AllocateRawTwoByteString(2);
1902
+ if (result->IsFailure()) return result;
1903
+ uc16* dest = SeqTwoByteString::cast(result)->GetChars();
1904
+ dest[0] = c1;
1905
+ dest[1] = c2;
1906
+ return result;
1907
+ }
1908
+ }
1909
+
1910
+
1911
+ Object* Heap::AllocateConsString(String* first, String* second) {
1912
+ int first_length = first->length();
1913
+ if (first_length == 0) {
1914
+ return second;
1915
+ }
1916
+
1917
+ int second_length = second->length();
1918
+ if (second_length == 0) {
1919
+ return first;
1920
+ }
1921
+
1922
+ int length = first_length + second_length;
1923
+
1924
+ // Optimization for 2-byte strings often used as keys in a decompression
1925
+ // dictionary. Check whether we already have the string in the symbol
1926
+ // table to prevent creation of many unneccesary strings.
1927
+ if (length == 2) {
1928
+ unsigned c1 = first->Get(0);
1929
+ unsigned c2 = second->Get(0);
1930
+ return MakeOrFindTwoCharacterString(c1, c2);
1931
+ }
1932
+
1933
+ bool is_ascii = first->IsAsciiRepresentation()
1934
+ && second->IsAsciiRepresentation();
1935
+
1936
+ // Make sure that an out of memory exception is thrown if the length
1937
+ // of the new cons string is too large.
1938
+ if (length > String::kMaxLength || length < 0) {
1939
+ Top::context()->mark_out_of_memory();
1940
+ return Failure::OutOfMemoryException();
1941
+ }
1942
+
1943
+ // If the resulting string is small make a flat string.
1944
+ if (length < String::kMinNonFlatLength) {
1945
+ ASSERT(first->IsFlat());
1946
+ ASSERT(second->IsFlat());
1947
+ if (is_ascii) {
1948
+ Object* result = AllocateRawAsciiString(length);
1949
+ if (result->IsFailure()) return result;
1950
+ // Copy the characters into the new object.
1951
+ char* dest = SeqAsciiString::cast(result)->GetChars();
1952
+ // Copy first part.
1953
+ const char* src;
1954
+ if (first->IsExternalString()) {
1955
+ src = ExternalAsciiString::cast(first)->resource()->data();
1956
+ } else {
1957
+ src = SeqAsciiString::cast(first)->GetChars();
1958
+ }
1959
+ for (int i = 0; i < first_length; i++) *dest++ = src[i];
1960
+ // Copy second part.
1961
+ if (second->IsExternalString()) {
1962
+ src = ExternalAsciiString::cast(second)->resource()->data();
1963
+ } else {
1964
+ src = SeqAsciiString::cast(second)->GetChars();
1965
+ }
1966
+ for (int i = 0; i < second_length; i++) *dest++ = src[i];
1967
+ return result;
1968
+ } else {
1969
+ Object* result = AllocateRawTwoByteString(length);
1970
+ if (result->IsFailure()) return result;
1971
+ // Copy the characters into the new object.
1972
+ uc16* dest = SeqTwoByteString::cast(result)->GetChars();
1973
+ String::WriteToFlat(first, dest, 0, first_length);
1974
+ String::WriteToFlat(second, dest + first_length, 0, second_length);
1975
+ return result;
1976
+ }
1977
+ }
1978
+
1979
+ Map* map = is_ascii ? cons_ascii_string_map() : cons_string_map();
1980
+
1981
+ Object* result = Allocate(map, NEW_SPACE);
1982
+ if (result->IsFailure()) return result;
1983
+ ConsString* cons_string = ConsString::cast(result);
1984
+ WriteBarrierMode mode = cons_string->GetWriteBarrierMode();
1985
+ cons_string->set_length(length);
1986
+ cons_string->set_hash_field(String::kEmptyHashField);
1987
+ cons_string->set_first(first, mode);
1988
+ cons_string->set_second(second, mode);
1989
+ return result;
1990
+ }
1991
+
1992
+
1993
+ Object* Heap::AllocateSubString(String* buffer,
1994
+ int start,
1995
+ int end) {
1996
+ int length = end - start;
1997
+
1998
+ if (length == 1) {
1999
+ return Heap::LookupSingleCharacterStringFromCode(
2000
+ buffer->Get(start));
2001
+ } else if (length == 2) {
2002
+ // Optimization for 2-byte strings often used as keys in a decompression
2003
+ // dictionary. Check whether we already have the string in the symbol
2004
+ // table to prevent creation of many unneccesary strings.
2005
+ unsigned c1 = buffer->Get(start);
2006
+ unsigned c2 = buffer->Get(start + 1);
2007
+ return MakeOrFindTwoCharacterString(c1, c2);
2008
+ }
2009
+
2010
+ // Make an attempt to flatten the buffer to reduce access time.
2011
+ if (!buffer->IsFlat()) {
2012
+ buffer->TryFlatten();
2013
+ }
2014
+
2015
+ Object* result = buffer->IsAsciiRepresentation()
2016
+ ? AllocateRawAsciiString(length)
2017
+ : AllocateRawTwoByteString(length);
2018
+ if (result->IsFailure()) return result;
2019
+ String* string_result = String::cast(result);
2020
+
2021
+ // Copy the characters into the new object.
2022
+ if (buffer->IsAsciiRepresentation()) {
2023
+ ASSERT(string_result->IsAsciiRepresentation());
2024
+ char* dest = SeqAsciiString::cast(string_result)->GetChars();
2025
+ String::WriteToFlat(buffer, dest, start, end);
2026
+ } else {
2027
+ ASSERT(string_result->IsTwoByteRepresentation());
2028
+ uc16* dest = SeqTwoByteString::cast(string_result)->GetChars();
2029
+ String::WriteToFlat(buffer, dest, start, end);
2030
+ }
2031
+
2032
+ return result;
2033
+ }
2034
+
2035
+
2036
+ Object* Heap::AllocateExternalStringFromAscii(
2037
+ ExternalAsciiString::Resource* resource) {
2038
+ size_t length = resource->length();
2039
+ if (length > static_cast<size_t>(String::kMaxLength)) {
2040
+ Top::context()->mark_out_of_memory();
2041
+ return Failure::OutOfMemoryException();
2042
+ }
2043
+
2044
+ Map* map = external_ascii_string_map();
2045
+ Object* result = Allocate(map, NEW_SPACE);
2046
+ if (result->IsFailure()) return result;
2047
+
2048
+ ExternalAsciiString* external_string = ExternalAsciiString::cast(result);
2049
+ external_string->set_length(static_cast<int>(length));
2050
+ external_string->set_hash_field(String::kEmptyHashField);
2051
+ external_string->set_resource(resource);
2052
+
2053
+ return result;
2054
+ }
2055
+
2056
+
2057
+ Object* Heap::AllocateExternalStringFromTwoByte(
2058
+ ExternalTwoByteString::Resource* resource) {
2059
+ size_t length = resource->length();
2060
+ if (length > static_cast<size_t>(String::kMaxLength)) {
2061
+ Top::context()->mark_out_of_memory();
2062
+ return Failure::OutOfMemoryException();
2063
+ }
2064
+
2065
+ Map* map = Heap::external_string_map();
2066
+ Object* result = Allocate(map, NEW_SPACE);
2067
+ if (result->IsFailure()) return result;
2068
+
2069
+ ExternalTwoByteString* external_string = ExternalTwoByteString::cast(result);
2070
+ external_string->set_length(static_cast<int>(length));
2071
+ external_string->set_hash_field(String::kEmptyHashField);
2072
+ external_string->set_resource(resource);
2073
+
2074
+ return result;
2075
+ }
2076
+
2077
+
2078
+ Object* Heap::LookupSingleCharacterStringFromCode(uint16_t code) {
2079
+ if (code <= String::kMaxAsciiCharCode) {
2080
+ Object* value = Heap::single_character_string_cache()->get(code);
2081
+ if (value != Heap::undefined_value()) return value;
2082
+
2083
+ char buffer[1];
2084
+ buffer[0] = static_cast<char>(code);
2085
+ Object* result = LookupSymbol(Vector<const char>(buffer, 1));
2086
+
2087
+ if (result->IsFailure()) return result;
2088
+ Heap::single_character_string_cache()->set(code, result);
2089
+ return result;
2090
+ }
2091
+
2092
+ Object* result = Heap::AllocateRawTwoByteString(1);
2093
+ if (result->IsFailure()) return result;
2094
+ String* answer = String::cast(result);
2095
+ answer->Set(0, code);
2096
+ return answer;
2097
+ }
2098
+
2099
+
2100
+ Object* Heap::AllocateByteArray(int length, PretenureFlag pretenure) {
2101
+ if (length < 0 || length > ByteArray::kMaxLength) {
2102
+ return Failure::OutOfMemoryException();
2103
+ }
2104
+ if (pretenure == NOT_TENURED) {
2105
+ return AllocateByteArray(length);
2106
+ }
2107
+ int size = ByteArray::SizeFor(length);
2108
+ Object* result = (size <= MaxObjectSizeInPagedSpace())
2109
+ ? old_data_space_->AllocateRaw(size)
2110
+ : lo_space_->AllocateRaw(size);
2111
+ if (result->IsFailure()) return result;
2112
+
2113
+ reinterpret_cast<Array*>(result)->set_map(byte_array_map());
2114
+ reinterpret_cast<Array*>(result)->set_length(length);
2115
+ return result;
2116
+ }
2117
+
2118
+
2119
+ Object* Heap::AllocateByteArray(int length) {
2120
+ if (length < 0 || length > ByteArray::kMaxLength) {
2121
+ return Failure::OutOfMemoryException();
2122
+ }
2123
+ int size = ByteArray::SizeFor(length);
2124
+ AllocationSpace space =
2125
+ (size > MaxObjectSizeInPagedSpace()) ? LO_SPACE : NEW_SPACE;
2126
+ Object* result = AllocateRaw(size, space, OLD_DATA_SPACE);
2127
+ if (result->IsFailure()) return result;
2128
+
2129
+ reinterpret_cast<Array*>(result)->set_map(byte_array_map());
2130
+ reinterpret_cast<Array*>(result)->set_length(length);
2131
+ return result;
2132
+ }
2133
+
2134
+
2135
+ void Heap::CreateFillerObjectAt(Address addr, int size) {
2136
+ if (size == 0) return;
2137
+ HeapObject* filler = HeapObject::FromAddress(addr);
2138
+ if (size == kPointerSize) {
2139
+ filler->set_map(Heap::one_pointer_filler_map());
2140
+ } else {
2141
+ filler->set_map(Heap::byte_array_map());
2142
+ ByteArray::cast(filler)->set_length(ByteArray::LengthFor(size));
2143
+ }
2144
+ }
2145
+
2146
+
2147
+ Object* Heap::AllocatePixelArray(int length,
2148
+ uint8_t* external_pointer,
2149
+ PretenureFlag pretenure) {
2150
+ AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
2151
+ Object* result = AllocateRaw(PixelArray::kAlignedSize, space, OLD_DATA_SPACE);
2152
+ if (result->IsFailure()) return result;
2153
+
2154
+ reinterpret_cast<PixelArray*>(result)->set_map(pixel_array_map());
2155
+ reinterpret_cast<PixelArray*>(result)->set_length(length);
2156
+ reinterpret_cast<PixelArray*>(result)->set_external_pointer(external_pointer);
2157
+
2158
+ return result;
2159
+ }
2160
+
2161
+
2162
+ Object* Heap::AllocateExternalArray(int length,
2163
+ ExternalArrayType array_type,
2164
+ void* external_pointer,
2165
+ PretenureFlag pretenure) {
2166
+ AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
2167
+ Object* result = AllocateRaw(ExternalArray::kAlignedSize,
2168
+ space,
2169
+ OLD_DATA_SPACE);
2170
+ if (result->IsFailure()) return result;
2171
+
2172
+ reinterpret_cast<ExternalArray*>(result)->set_map(
2173
+ MapForExternalArrayType(array_type));
2174
+ reinterpret_cast<ExternalArray*>(result)->set_length(length);
2175
+ reinterpret_cast<ExternalArray*>(result)->set_external_pointer(
2176
+ external_pointer);
2177
+
2178
+ return result;
2179
+ }
2180
+
2181
+
2182
+ Object* Heap::CreateCode(const CodeDesc& desc,
2183
+ ZoneScopeInfo* sinfo,
2184
+ Code::Flags flags,
2185
+ Handle<Object> self_reference) {
2186
+ // Compute size
2187
+ int body_size = RoundUp(desc.instr_size + desc.reloc_size, kObjectAlignment);
2188
+ int sinfo_size = 0;
2189
+ if (sinfo != NULL) sinfo_size = sinfo->Serialize(NULL);
2190
+ int obj_size = Code::SizeFor(body_size, sinfo_size);
2191
+ ASSERT(IsAligned(obj_size, Code::kCodeAlignment));
2192
+ Object* result;
2193
+ if (obj_size > MaxObjectSizeInPagedSpace()) {
2194
+ result = lo_space_->AllocateRawCode(obj_size);
2195
+ } else {
2196
+ result = code_space_->AllocateRaw(obj_size);
2197
+ }
2198
+
2199
+ if (result->IsFailure()) return result;
2200
+
2201
+ // Initialize the object
2202
+ HeapObject::cast(result)->set_map(code_map());
2203
+ Code* code = Code::cast(result);
2204
+ ASSERT(!CodeRange::exists() || CodeRange::contains(code->address()));
2205
+ code->set_instruction_size(desc.instr_size);
2206
+ code->set_relocation_size(desc.reloc_size);
2207
+ code->set_sinfo_size(sinfo_size);
2208
+ code->set_flags(flags);
2209
+ // Allow self references to created code object by patching the handle to
2210
+ // point to the newly allocated Code object.
2211
+ if (!self_reference.is_null()) {
2212
+ *(self_reference.location()) = code;
2213
+ }
2214
+ // Migrate generated code.
2215
+ // The generated code can contain Object** values (typically from handles)
2216
+ // that are dereferenced during the copy to point directly to the actual heap
2217
+ // objects. These pointers can include references to the code object itself,
2218
+ // through the self_reference parameter.
2219
+ code->CopyFrom(desc);
2220
+ if (sinfo != NULL) sinfo->Serialize(code); // write scope info
2221
+
2222
+ #ifdef DEBUG
2223
+ code->Verify();
2224
+ #endif
2225
+ return code;
2226
+ }
2227
+
2228
+
2229
+ Object* Heap::CopyCode(Code* code) {
2230
+ // Allocate an object the same size as the code object.
2231
+ int obj_size = code->Size();
2232
+ Object* result;
2233
+ if (obj_size > MaxObjectSizeInPagedSpace()) {
2234
+ result = lo_space_->AllocateRawCode(obj_size);
2235
+ } else {
2236
+ result = code_space_->AllocateRaw(obj_size);
2237
+ }
2238
+
2239
+ if (result->IsFailure()) return result;
2240
+
2241
+ // Copy code object.
2242
+ Address old_addr = code->address();
2243
+ Address new_addr = reinterpret_cast<HeapObject*>(result)->address();
2244
+ CopyBlock(reinterpret_cast<Object**>(new_addr),
2245
+ reinterpret_cast<Object**>(old_addr),
2246
+ obj_size);
2247
+ // Relocate the copy.
2248
+ Code* new_code = Code::cast(result);
2249
+ ASSERT(!CodeRange::exists() || CodeRange::contains(code->address()));
2250
+ new_code->Relocate(new_addr - old_addr);
2251
+ return new_code;
2252
+ }
2253
+
2254
+
2255
+ Object* Heap::Allocate(Map* map, AllocationSpace space) {
2256
+ ASSERT(gc_state_ == NOT_IN_GC);
2257
+ ASSERT(map->instance_type() != MAP_TYPE);
2258
+ // If allocation failures are disallowed, we may allocate in a different
2259
+ // space when new space is full and the object is not a large object.
2260
+ AllocationSpace retry_space =
2261
+ (space != NEW_SPACE) ? space : TargetSpaceId(map->instance_type());
2262
+ Object* result =
2263
+ AllocateRaw(map->instance_size(), space, retry_space);
2264
+ if (result->IsFailure()) return result;
2265
+ HeapObject::cast(result)->set_map(map);
2266
+ #ifdef ENABLE_LOGGING_AND_PROFILING
2267
+ ProducerHeapProfile::RecordJSObjectAllocation(result);
2268
+ #endif
2269
+ return result;
2270
+ }
2271
+
2272
+
2273
+ Object* Heap::InitializeFunction(JSFunction* function,
2274
+ SharedFunctionInfo* shared,
2275
+ Object* prototype) {
2276
+ ASSERT(!prototype->IsMap());
2277
+ function->initialize_properties();
2278
+ function->initialize_elements();
2279
+ function->set_shared(shared);
2280
+ function->set_prototype_or_initial_map(prototype);
2281
+ function->set_context(undefined_value());
2282
+ function->set_literals(empty_fixed_array(), SKIP_WRITE_BARRIER);
2283
+ return function;
2284
+ }
2285
+
2286
+
2287
+ Object* Heap::AllocateFunctionPrototype(JSFunction* function) {
2288
+ // Allocate the prototype. Make sure to use the object function
2289
+ // from the function's context, since the function can be from a
2290
+ // different context.
2291
+ JSFunction* object_function =
2292
+ function->context()->global_context()->object_function();
2293
+ Object* prototype = AllocateJSObject(object_function);
2294
+ if (prototype->IsFailure()) return prototype;
2295
+ // When creating the prototype for the function we must set its
2296
+ // constructor to the function.
2297
+ Object* result =
2298
+ JSObject::cast(prototype)->SetProperty(constructor_symbol(),
2299
+ function,
2300
+ DONT_ENUM);
2301
+ if (result->IsFailure()) return result;
2302
+ return prototype;
2303
+ }
2304
+
2305
+
2306
+ Object* Heap::AllocateFunction(Map* function_map,
2307
+ SharedFunctionInfo* shared,
2308
+ Object* prototype,
2309
+ PretenureFlag pretenure) {
2310
+ AllocationSpace space =
2311
+ (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE;
2312
+ Object* result = Allocate(function_map, space);
2313
+ if (result->IsFailure()) return result;
2314
+ return InitializeFunction(JSFunction::cast(result), shared, prototype);
2315
+ }
2316
+
2317
+
2318
+ Object* Heap::AllocateArgumentsObject(Object* callee, int length) {
2319
+ // To get fast allocation and map sharing for arguments objects we
2320
+ // allocate them based on an arguments boilerplate.
2321
+
2322
+ // This calls Copy directly rather than using Heap::AllocateRaw so we
2323
+ // duplicate the check here.
2324
+ ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
2325
+
2326
+ JSObject* boilerplate =
2327
+ Top::context()->global_context()->arguments_boilerplate();
2328
+
2329
+ // Check that the size of the boilerplate matches our
2330
+ // expectations. The ArgumentsAccessStub::GenerateNewObject relies
2331
+ // on the size being a known constant.
2332
+ ASSERT(kArgumentsObjectSize == boilerplate->map()->instance_size());
2333
+
2334
+ // Do the allocation.
2335
+ Object* result =
2336
+ AllocateRaw(kArgumentsObjectSize, NEW_SPACE, OLD_POINTER_SPACE);
2337
+ if (result->IsFailure()) return result;
2338
+
2339
+ // Copy the content. The arguments boilerplate doesn't have any
2340
+ // fields that point to new space so it's safe to skip the write
2341
+ // barrier here.
2342
+ CopyBlock(reinterpret_cast<Object**>(HeapObject::cast(result)->address()),
2343
+ reinterpret_cast<Object**>(boilerplate->address()),
2344
+ kArgumentsObjectSize);
2345
+
2346
+ // Set the two properties.
2347
+ JSObject::cast(result)->InObjectPropertyAtPut(arguments_callee_index,
2348
+ callee);
2349
+ JSObject::cast(result)->InObjectPropertyAtPut(arguments_length_index,
2350
+ Smi::FromInt(length),
2351
+ SKIP_WRITE_BARRIER);
2352
+
2353
+ // Check the state of the object
2354
+ ASSERT(JSObject::cast(result)->HasFastProperties());
2355
+ ASSERT(JSObject::cast(result)->HasFastElements());
2356
+
2357
+ return result;
2358
+ }
2359
+
2360
+
2361
+ Object* Heap::AllocateInitialMap(JSFunction* fun) {
2362
+ ASSERT(!fun->has_initial_map());
2363
+
2364
+ // First create a new map with the size and number of in-object properties
2365
+ // suggested by the function.
2366
+ int instance_size = fun->shared()->CalculateInstanceSize();
2367
+ int in_object_properties = fun->shared()->CalculateInObjectProperties();
2368
+ Object* map_obj = Heap::AllocateMap(JS_OBJECT_TYPE, instance_size);
2369
+ if (map_obj->IsFailure()) return map_obj;
2370
+
2371
+ // Fetch or allocate prototype.
2372
+ Object* prototype;
2373
+ if (fun->has_instance_prototype()) {
2374
+ prototype = fun->instance_prototype();
2375
+ } else {
2376
+ prototype = AllocateFunctionPrototype(fun);
2377
+ if (prototype->IsFailure()) return prototype;
2378
+ }
2379
+ Map* map = Map::cast(map_obj);
2380
+ map->set_inobject_properties(in_object_properties);
2381
+ map->set_unused_property_fields(in_object_properties);
2382
+ map->set_prototype(prototype);
2383
+
2384
+ // If the function has only simple this property assignments add field
2385
+ // descriptors for these to the initial map as the object cannot be
2386
+ // constructed without having these properties.
2387
+ ASSERT(in_object_properties <= Map::kMaxPreAllocatedPropertyFields);
2388
+ if (fun->shared()->has_only_simple_this_property_assignments() &&
2389
+ fun->shared()->this_property_assignments_count() > 0) {
2390
+ int count = fun->shared()->this_property_assignments_count();
2391
+ if (count > in_object_properties) {
2392
+ count = in_object_properties;
2393
+ }
2394
+ Object* descriptors_obj = DescriptorArray::Allocate(count);
2395
+ if (descriptors_obj->IsFailure()) return descriptors_obj;
2396
+ DescriptorArray* descriptors = DescriptorArray::cast(descriptors_obj);
2397
+ for (int i = 0; i < count; i++) {
2398
+ String* name = fun->shared()->GetThisPropertyAssignmentName(i);
2399
+ ASSERT(name->IsSymbol());
2400
+ FieldDescriptor field(name, i, NONE);
2401
+ descriptors->Set(i, &field);
2402
+ }
2403
+ descriptors->Sort();
2404
+ map->set_instance_descriptors(descriptors);
2405
+ map->set_pre_allocated_property_fields(count);
2406
+ map->set_unused_property_fields(in_object_properties - count);
2407
+ }
2408
+ return map;
2409
+ }
2410
+
2411
+
2412
+ void Heap::InitializeJSObjectFromMap(JSObject* obj,
2413
+ FixedArray* properties,
2414
+ Map* map) {
2415
+ obj->set_properties(properties);
2416
+ obj->initialize_elements();
2417
+ // TODO(1240798): Initialize the object's body using valid initial values
2418
+ // according to the object's initial map. For example, if the map's
2419
+ // instance type is JS_ARRAY_TYPE, the length field should be initialized
2420
+ // to a number (eg, Smi::FromInt(0)) and the elements initialized to a
2421
+ // fixed array (eg, Heap::empty_fixed_array()). Currently, the object
2422
+ // verification code has to cope with (temporarily) invalid objects. See
2423
+ // for example, JSArray::JSArrayVerify).
2424
+ obj->InitializeBody(map->instance_size());
2425
+ }
2426
+
2427
+
2428
+ Object* Heap::AllocateJSObjectFromMap(Map* map, PretenureFlag pretenure) {
2429
+ // JSFunctions should be allocated using AllocateFunction to be
2430
+ // properly initialized.
2431
+ ASSERT(map->instance_type() != JS_FUNCTION_TYPE);
2432
+
2433
+ // Both types of globla objects should be allocated using
2434
+ // AllocateGloblaObject to be properly initialized.
2435
+ ASSERT(map->instance_type() != JS_GLOBAL_OBJECT_TYPE);
2436
+ ASSERT(map->instance_type() != JS_BUILTINS_OBJECT_TYPE);
2437
+
2438
+ // Allocate the backing storage for the properties.
2439
+ int prop_size =
2440
+ map->pre_allocated_property_fields() +
2441
+ map->unused_property_fields() -
2442
+ map->inobject_properties();
2443
+ ASSERT(prop_size >= 0);
2444
+ Object* properties = AllocateFixedArray(prop_size, pretenure);
2445
+ if (properties->IsFailure()) return properties;
2446
+
2447
+ // Allocate the JSObject.
2448
+ AllocationSpace space =
2449
+ (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE;
2450
+ if (map->instance_size() > MaxObjectSizeInPagedSpace()) space = LO_SPACE;
2451
+ Object* obj = Allocate(map, space);
2452
+ if (obj->IsFailure()) return obj;
2453
+
2454
+ // Initialize the JSObject.
2455
+ InitializeJSObjectFromMap(JSObject::cast(obj),
2456
+ FixedArray::cast(properties),
2457
+ map);
2458
+ return obj;
2459
+ }
2460
+
2461
+
2462
+ Object* Heap::AllocateJSObject(JSFunction* constructor,
2463
+ PretenureFlag pretenure) {
2464
+ // Allocate the initial map if absent.
2465
+ if (!constructor->has_initial_map()) {
2466
+ Object* initial_map = AllocateInitialMap(constructor);
2467
+ if (initial_map->IsFailure()) return initial_map;
2468
+ constructor->set_initial_map(Map::cast(initial_map));
2469
+ Map::cast(initial_map)->set_constructor(constructor);
2470
+ }
2471
+ // Allocate the object based on the constructors initial map.
2472
+ Object* result =
2473
+ AllocateJSObjectFromMap(constructor->initial_map(), pretenure);
2474
+ // Make sure result is NOT a global object if valid.
2475
+ ASSERT(result->IsFailure() || !result->IsGlobalObject());
2476
+ return result;
2477
+ }
2478
+
2479
+
2480
+ Object* Heap::AllocateGlobalObject(JSFunction* constructor) {
2481
+ ASSERT(constructor->has_initial_map());
2482
+ Map* map = constructor->initial_map();
2483
+
2484
+ // Make sure no field properties are described in the initial map.
2485
+ // This guarantees us that normalizing the properties does not
2486
+ // require us to change property values to JSGlobalPropertyCells.
2487
+ ASSERT(map->NextFreePropertyIndex() == 0);
2488
+
2489
+ // Make sure we don't have a ton of pre-allocated slots in the
2490
+ // global objects. They will be unused once we normalize the object.
2491
+ ASSERT(map->unused_property_fields() == 0);
2492
+ ASSERT(map->inobject_properties() == 0);
2493
+
2494
+ // Initial size of the backing store to avoid resize of the storage during
2495
+ // bootstrapping. The size differs between the JS global object ad the
2496
+ // builtins object.
2497
+ int initial_size = map->instance_type() == JS_GLOBAL_OBJECT_TYPE ? 64 : 512;
2498
+
2499
+ // Allocate a dictionary object for backing storage.
2500
+ Object* obj =
2501
+ StringDictionary::Allocate(
2502
+ map->NumberOfDescribedProperties() * 2 + initial_size);
2503
+ if (obj->IsFailure()) return obj;
2504
+ StringDictionary* dictionary = StringDictionary::cast(obj);
2505
+
2506
+ // The global object might be created from an object template with accessors.
2507
+ // Fill these accessors into the dictionary.
2508
+ DescriptorArray* descs = map->instance_descriptors();
2509
+ for (int i = 0; i < descs->number_of_descriptors(); i++) {
2510
+ PropertyDetails details = descs->GetDetails(i);
2511
+ ASSERT(details.type() == CALLBACKS); // Only accessors are expected.
2512
+ PropertyDetails d =
2513
+ PropertyDetails(details.attributes(), CALLBACKS, details.index());
2514
+ Object* value = descs->GetCallbacksObject(i);
2515
+ value = Heap::AllocateJSGlobalPropertyCell(value);
2516
+ if (value->IsFailure()) return value;
2517
+
2518
+ Object* result = dictionary->Add(descs->GetKey(i), value, d);
2519
+ if (result->IsFailure()) return result;
2520
+ dictionary = StringDictionary::cast(result);
2521
+ }
2522
+
2523
+ // Allocate the global object and initialize it with the backing store.
2524
+ obj = Allocate(map, OLD_POINTER_SPACE);
2525
+ if (obj->IsFailure()) return obj;
2526
+ JSObject* global = JSObject::cast(obj);
2527
+ InitializeJSObjectFromMap(global, dictionary, map);
2528
+
2529
+ // Create a new map for the global object.
2530
+ obj = map->CopyDropDescriptors();
2531
+ if (obj->IsFailure()) return obj;
2532
+ Map* new_map = Map::cast(obj);
2533
+
2534
+ // Setup the global object as a normalized object.
2535
+ global->set_map(new_map);
2536
+ global->map()->set_instance_descriptors(Heap::empty_descriptor_array());
2537
+ global->set_properties(dictionary);
2538
+
2539
+ // Make sure result is a global object with properties in dictionary.
2540
+ ASSERT(global->IsGlobalObject());
2541
+ ASSERT(!global->HasFastProperties());
2542
+ return global;
2543
+ }
2544
+
2545
+
2546
+ Object* Heap::CopyJSObject(JSObject* source) {
2547
+ // Never used to copy functions. If functions need to be copied we
2548
+ // have to be careful to clear the literals array.
2549
+ ASSERT(!source->IsJSFunction());
2550
+
2551
+ // Make the clone.
2552
+ Map* map = source->map();
2553
+ int object_size = map->instance_size();
2554
+ Object* clone;
2555
+
2556
+ // If we're forced to always allocate, we use the general allocation
2557
+ // functions which may leave us with an object in old space.
2558
+ if (always_allocate()) {
2559
+ clone = AllocateRaw(object_size, NEW_SPACE, OLD_POINTER_SPACE);
2560
+ if (clone->IsFailure()) return clone;
2561
+ Address clone_address = HeapObject::cast(clone)->address();
2562
+ CopyBlock(reinterpret_cast<Object**>(clone_address),
2563
+ reinterpret_cast<Object**>(source->address()),
2564
+ object_size);
2565
+ // Update write barrier for all fields that lie beyond the header.
2566
+ for (int offset = JSObject::kHeaderSize;
2567
+ offset < object_size;
2568
+ offset += kPointerSize) {
2569
+ RecordWrite(clone_address, offset);
2570
+ }
2571
+ } else {
2572
+ clone = new_space_.AllocateRaw(object_size);
2573
+ if (clone->IsFailure()) return clone;
2574
+ ASSERT(Heap::InNewSpace(clone));
2575
+ // Since we know the clone is allocated in new space, we can copy
2576
+ // the contents without worrying about updating the write barrier.
2577
+ CopyBlock(reinterpret_cast<Object**>(HeapObject::cast(clone)->address()),
2578
+ reinterpret_cast<Object**>(source->address()),
2579
+ object_size);
2580
+ }
2581
+
2582
+ FixedArray* elements = FixedArray::cast(source->elements());
2583
+ FixedArray* properties = FixedArray::cast(source->properties());
2584
+ // Update elements if necessary.
2585
+ if (elements->length()> 0) {
2586
+ Object* elem = CopyFixedArray(elements);
2587
+ if (elem->IsFailure()) return elem;
2588
+ JSObject::cast(clone)->set_elements(FixedArray::cast(elem));
2589
+ }
2590
+ // Update properties if necessary.
2591
+ if (properties->length() > 0) {
2592
+ Object* prop = CopyFixedArray(properties);
2593
+ if (prop->IsFailure()) return prop;
2594
+ JSObject::cast(clone)->set_properties(FixedArray::cast(prop));
2595
+ }
2596
+ // Return the new clone.
2597
+ #ifdef ENABLE_LOGGING_AND_PROFILING
2598
+ ProducerHeapProfile::RecordJSObjectAllocation(clone);
2599
+ #endif
2600
+ return clone;
2601
+ }
2602
+
2603
+
2604
+ Object* Heap::ReinitializeJSGlobalProxy(JSFunction* constructor,
2605
+ JSGlobalProxy* object) {
2606
+ // Allocate initial map if absent.
2607
+ if (!constructor->has_initial_map()) {
2608
+ Object* initial_map = AllocateInitialMap(constructor);
2609
+ if (initial_map->IsFailure()) return initial_map;
2610
+ constructor->set_initial_map(Map::cast(initial_map));
2611
+ Map::cast(initial_map)->set_constructor(constructor);
2612
+ }
2613
+
2614
+ Map* map = constructor->initial_map();
2615
+
2616
+ // Check that the already allocated object has the same size as
2617
+ // objects allocated using the constructor.
2618
+ ASSERT(map->instance_size() == object->map()->instance_size());
2619
+
2620
+ // Allocate the backing storage for the properties.
2621
+ int prop_size = map->unused_property_fields() - map->inobject_properties();
2622
+ Object* properties = AllocateFixedArray(prop_size, TENURED);
2623
+ if (properties->IsFailure()) return properties;
2624
+
2625
+ // Reset the map for the object.
2626
+ object->set_map(constructor->initial_map());
2627
+
2628
+ // Reinitialize the object from the constructor map.
2629
+ InitializeJSObjectFromMap(object, FixedArray::cast(properties), map);
2630
+ return object;
2631
+ }
2632
+
2633
+
2634
+ Object* Heap::AllocateStringFromAscii(Vector<const char> string,
2635
+ PretenureFlag pretenure) {
2636
+ Object* result = AllocateRawAsciiString(string.length(), pretenure);
2637
+ if (result->IsFailure()) return result;
2638
+
2639
+ // Copy the characters into the new object.
2640
+ SeqAsciiString* string_result = SeqAsciiString::cast(result);
2641
+ for (int i = 0; i < string.length(); i++) {
2642
+ string_result->SeqAsciiStringSet(i, string[i]);
2643
+ }
2644
+ return result;
2645
+ }
2646
+
2647
+
2648
+ Object* Heap::AllocateStringFromUtf8(Vector<const char> string,
2649
+ PretenureFlag pretenure) {
2650
+ // Count the number of characters in the UTF-8 string and check if
2651
+ // it is an ASCII string.
2652
+ Access<Scanner::Utf8Decoder> decoder(Scanner::utf8_decoder());
2653
+ decoder->Reset(string.start(), string.length());
2654
+ int chars = 0;
2655
+ bool is_ascii = true;
2656
+ while (decoder->has_more()) {
2657
+ uc32 r = decoder->GetNext();
2658
+ if (r > String::kMaxAsciiCharCode) is_ascii = false;
2659
+ chars++;
2660
+ }
2661
+
2662
+ // If the string is ascii, we do not need to convert the characters
2663
+ // since UTF8 is backwards compatible with ascii.
2664
+ if (is_ascii) return AllocateStringFromAscii(string, pretenure);
2665
+
2666
+ Object* result = AllocateRawTwoByteString(chars, pretenure);
2667
+ if (result->IsFailure()) return result;
2668
+
2669
+ // Convert and copy the characters into the new object.
2670
+ String* string_result = String::cast(result);
2671
+ decoder->Reset(string.start(), string.length());
2672
+ for (int i = 0; i < chars; i++) {
2673
+ uc32 r = decoder->GetNext();
2674
+ string_result->Set(i, r);
2675
+ }
2676
+ return result;
2677
+ }
2678
+
2679
+
2680
+ Object* Heap::AllocateStringFromTwoByte(Vector<const uc16> string,
2681
+ PretenureFlag pretenure) {
2682
+ // Check if the string is an ASCII string.
2683
+ int i = 0;
2684
+ while (i < string.length() && string[i] <= String::kMaxAsciiCharCode) i++;
2685
+
2686
+ Object* result;
2687
+ if (i == string.length()) { // It's an ASCII string.
2688
+ result = AllocateRawAsciiString(string.length(), pretenure);
2689
+ } else { // It's not an ASCII string.
2690
+ result = AllocateRawTwoByteString(string.length(), pretenure);
2691
+ }
2692
+ if (result->IsFailure()) return result;
2693
+
2694
+ // Copy the characters into the new object, which may be either ASCII or
2695
+ // UTF-16.
2696
+ String* string_result = String::cast(result);
2697
+ for (int i = 0; i < string.length(); i++) {
2698
+ string_result->Set(i, string[i]);
2699
+ }
2700
+ return result;
2701
+ }
2702
+
2703
+
2704
+ Map* Heap::SymbolMapForString(String* string) {
2705
+ // If the string is in new space it cannot be used as a symbol.
2706
+ if (InNewSpace(string)) return NULL;
2707
+
2708
+ // Find the corresponding symbol map for strings.
2709
+ Map* map = string->map();
2710
+ if (map == ascii_string_map()) return ascii_symbol_map();
2711
+ if (map == string_map()) return symbol_map();
2712
+ if (map == cons_string_map()) return cons_symbol_map();
2713
+ if (map == cons_ascii_string_map()) return cons_ascii_symbol_map();
2714
+ if (map == external_string_map()) return external_symbol_map();
2715
+ if (map == external_ascii_string_map()) return external_ascii_symbol_map();
2716
+
2717
+ // No match found.
2718
+ return NULL;
2719
+ }
2720
+
2721
+
2722
+ Object* Heap::AllocateInternalSymbol(unibrow::CharacterStream* buffer,
2723
+ int chars,
2724
+ uint32_t hash_field) {
2725
+ ASSERT(chars >= 0);
2726
+ // Ensure the chars matches the number of characters in the buffer.
2727
+ ASSERT(static_cast<unsigned>(chars) == buffer->Length());
2728
+ // Determine whether the string is ascii.
2729
+ bool is_ascii = true;
2730
+ while (buffer->has_more()) {
2731
+ if (buffer->GetNext() > unibrow::Utf8::kMaxOneByteChar) {
2732
+ is_ascii = false;
2733
+ break;
2734
+ }
2735
+ }
2736
+ buffer->Rewind();
2737
+
2738
+ // Compute map and object size.
2739
+ int size;
2740
+ Map* map;
2741
+
2742
+ if (is_ascii) {
2743
+ if (chars > SeqAsciiString::kMaxLength) {
2744
+ return Failure::OutOfMemoryException();
2745
+ }
2746
+ map = ascii_symbol_map();
2747
+ size = SeqAsciiString::SizeFor(chars);
2748
+ } else {
2749
+ if (chars > SeqTwoByteString::kMaxLength) {
2750
+ return Failure::OutOfMemoryException();
2751
+ }
2752
+ map = symbol_map();
2753
+ size = SeqTwoByteString::SizeFor(chars);
2754
+ }
2755
+
2756
+ // Allocate string.
2757
+ Object* result = (size > MaxObjectSizeInPagedSpace())
2758
+ ? lo_space_->AllocateRaw(size)
2759
+ : old_data_space_->AllocateRaw(size);
2760
+ if (result->IsFailure()) return result;
2761
+
2762
+ reinterpret_cast<HeapObject*>(result)->set_map(map);
2763
+ // Set length and hash fields of the allocated string.
2764
+ String* answer = String::cast(result);
2765
+ answer->set_length(chars);
2766
+ answer->set_hash_field(hash_field);
2767
+
2768
+ ASSERT_EQ(size, answer->Size());
2769
+
2770
+ // Fill in the characters.
2771
+ for (int i = 0; i < chars; i++) {
2772
+ answer->Set(i, buffer->GetNext());
2773
+ }
2774
+ return answer;
2775
+ }
2776
+
2777
+
2778
+ Object* Heap::AllocateRawAsciiString(int length, PretenureFlag pretenure) {
2779
+ if (length < 0 || length > SeqAsciiString::kMaxLength) {
2780
+ return Failure::OutOfMemoryException();
2781
+ }
2782
+
2783
+ int size = SeqAsciiString::SizeFor(length);
2784
+ ASSERT(size <= SeqAsciiString::kMaxSize);
2785
+
2786
+ AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
2787
+ AllocationSpace retry_space = OLD_DATA_SPACE;
2788
+
2789
+ if (space == NEW_SPACE) {
2790
+ if (size > kMaxObjectSizeInNewSpace) {
2791
+ // Allocate in large object space, retry space will be ignored.
2792
+ space = LO_SPACE;
2793
+ } else if (size > MaxObjectSizeInPagedSpace()) {
2794
+ // Allocate in new space, retry in large object space.
2795
+ retry_space = LO_SPACE;
2796
+ }
2797
+ } else if (space == OLD_DATA_SPACE && size > MaxObjectSizeInPagedSpace()) {
2798
+ space = LO_SPACE;
2799
+ }
2800
+ Object* result = AllocateRaw(size, space, retry_space);
2801
+ if (result->IsFailure()) return result;
2802
+
2803
+ // Partially initialize the object.
2804
+ HeapObject::cast(result)->set_map(ascii_string_map());
2805
+ String::cast(result)->set_length(length);
2806
+ String::cast(result)->set_hash_field(String::kEmptyHashField);
2807
+ ASSERT_EQ(size, HeapObject::cast(result)->Size());
2808
+ return result;
2809
+ }
2810
+
2811
+
2812
+ Object* Heap::AllocateRawTwoByteString(int length, PretenureFlag pretenure) {
2813
+ if (length < 0 || length > SeqTwoByteString::kMaxLength) {
2814
+ return Failure::OutOfMemoryException();
2815
+ }
2816
+ int size = SeqTwoByteString::SizeFor(length);
2817
+ ASSERT(size <= SeqTwoByteString::kMaxSize);
2818
+ AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
2819
+ AllocationSpace retry_space = OLD_DATA_SPACE;
2820
+
2821
+ if (space == NEW_SPACE) {
2822
+ if (size > kMaxObjectSizeInNewSpace) {
2823
+ // Allocate in large object space, retry space will be ignored.
2824
+ space = LO_SPACE;
2825
+ } else if (size > MaxObjectSizeInPagedSpace()) {
2826
+ // Allocate in new space, retry in large object space.
2827
+ retry_space = LO_SPACE;
2828
+ }
2829
+ } else if (space == OLD_DATA_SPACE && size > MaxObjectSizeInPagedSpace()) {
2830
+ space = LO_SPACE;
2831
+ }
2832
+ Object* result = AllocateRaw(size, space, retry_space);
2833
+ if (result->IsFailure()) return result;
2834
+
2835
+ // Partially initialize the object.
2836
+ HeapObject::cast(result)->set_map(string_map());
2837
+ String::cast(result)->set_length(length);
2838
+ String::cast(result)->set_hash_field(String::kEmptyHashField);
2839
+ ASSERT_EQ(size, HeapObject::cast(result)->Size());
2840
+ return result;
2841
+ }
2842
+
2843
+
2844
+ Object* Heap::AllocateEmptyFixedArray() {
2845
+ int size = FixedArray::SizeFor(0);
2846
+ Object* result = AllocateRaw(size, OLD_DATA_SPACE, OLD_DATA_SPACE);
2847
+ if (result->IsFailure()) return result;
2848
+ // Initialize the object.
2849
+ reinterpret_cast<Array*>(result)->set_map(fixed_array_map());
2850
+ reinterpret_cast<Array*>(result)->set_length(0);
2851
+ return result;
2852
+ }
2853
+
2854
+
2855
+ Object* Heap::AllocateRawFixedArray(int length) {
2856
+ if (length < 0 || length > FixedArray::kMaxLength) {
2857
+ return Failure::OutOfMemoryException();
2858
+ }
2859
+ // Use the general function if we're forced to always allocate.
2860
+ if (always_allocate()) return AllocateFixedArray(length, TENURED);
2861
+ // Allocate the raw data for a fixed array.
2862
+ int size = FixedArray::SizeFor(length);
2863
+ return size <= kMaxObjectSizeInNewSpace
2864
+ ? new_space_.AllocateRaw(size)
2865
+ : lo_space_->AllocateRawFixedArray(size);
2866
+ }
2867
+
2868
+
2869
+ Object* Heap::CopyFixedArray(FixedArray* src) {
2870
+ int len = src->length();
2871
+ Object* obj = AllocateRawFixedArray(len);
2872
+ if (obj->IsFailure()) return obj;
2873
+ if (Heap::InNewSpace(obj)) {
2874
+ HeapObject* dst = HeapObject::cast(obj);
2875
+ CopyBlock(reinterpret_cast<Object**>(dst->address()),
2876
+ reinterpret_cast<Object**>(src->address()),
2877
+ FixedArray::SizeFor(len));
2878
+ return obj;
2879
+ }
2880
+ HeapObject::cast(obj)->set_map(src->map());
2881
+ FixedArray* result = FixedArray::cast(obj);
2882
+ result->set_length(len);
2883
+ // Copy the content
2884
+ WriteBarrierMode mode = result->GetWriteBarrierMode();
2885
+ for (int i = 0; i < len; i++) result->set(i, src->get(i), mode);
2886
+ return result;
2887
+ }
2888
+
2889
+
2890
+ Object* Heap::AllocateFixedArray(int length) {
2891
+ ASSERT(length >= 0);
2892
+ if (length == 0) return empty_fixed_array();
2893
+ Object* result = AllocateRawFixedArray(length);
2894
+ if (!result->IsFailure()) {
2895
+ // Initialize header.
2896
+ reinterpret_cast<Array*>(result)->set_map(fixed_array_map());
2897
+ FixedArray* array = FixedArray::cast(result);
2898
+ array->set_length(length);
2899
+ Object* value = undefined_value();
2900
+ // Initialize body.
2901
+ for (int index = 0; index < length; index++) {
2902
+ array->set(index, value, SKIP_WRITE_BARRIER);
2903
+ }
2904
+ }
2905
+ return result;
2906
+ }
2907
+
2908
+
2909
+ Object* Heap::AllocateFixedArray(int length, PretenureFlag pretenure) {
2910
+ ASSERT(length >= 0);
2911
+ ASSERT(empty_fixed_array()->IsFixedArray());
2912
+ if (length < 0 || length > FixedArray::kMaxLength) {
2913
+ return Failure::OutOfMemoryException();
2914
+ }
2915
+ if (length == 0) return empty_fixed_array();
2916
+
2917
+ AllocationSpace space =
2918
+ (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE;
2919
+ int size = FixedArray::SizeFor(length);
2920
+ if (space == NEW_SPACE && size > kMaxObjectSizeInNewSpace) {
2921
+ // Too big for new space.
2922
+ space = LO_SPACE;
2923
+ } else if (space == OLD_POINTER_SPACE &&
2924
+ size > MaxObjectSizeInPagedSpace()) {
2925
+ // Too big for old pointer space.
2926
+ space = LO_SPACE;
2927
+ }
2928
+
2929
+ // Specialize allocation for the space.
2930
+ Object* result = Failure::OutOfMemoryException();
2931
+ if (space == NEW_SPACE) {
2932
+ // We cannot use Heap::AllocateRaw() because it will not properly
2933
+ // allocate extra remembered set bits if always_allocate() is true and
2934
+ // new space allocation fails.
2935
+ result = new_space_.AllocateRaw(size);
2936
+ if (result->IsFailure() && always_allocate()) {
2937
+ if (size <= MaxObjectSizeInPagedSpace()) {
2938
+ result = old_pointer_space_->AllocateRaw(size);
2939
+ } else {
2940
+ result = lo_space_->AllocateRawFixedArray(size);
2941
+ }
2942
+ }
2943
+ } else if (space == OLD_POINTER_SPACE) {
2944
+ result = old_pointer_space_->AllocateRaw(size);
2945
+ } else {
2946
+ ASSERT(space == LO_SPACE);
2947
+ result = lo_space_->AllocateRawFixedArray(size);
2948
+ }
2949
+ if (result->IsFailure()) return result;
2950
+
2951
+ // Initialize the object.
2952
+ reinterpret_cast<Array*>(result)->set_map(fixed_array_map());
2953
+ FixedArray* array = FixedArray::cast(result);
2954
+ array->set_length(length);
2955
+ Object* value = undefined_value();
2956
+ for (int index = 0; index < length; index++) {
2957
+ array->set(index, value, SKIP_WRITE_BARRIER);
2958
+ }
2959
+ return array;
2960
+ }
2961
+
2962
+
2963
+ Object* Heap::AllocateFixedArrayWithHoles(int length) {
2964
+ if (length == 0) return empty_fixed_array();
2965
+ Object* result = AllocateRawFixedArray(length);
2966
+ if (!result->IsFailure()) {
2967
+ // Initialize header.
2968
+ reinterpret_cast<Array*>(result)->set_map(fixed_array_map());
2969
+ FixedArray* array = FixedArray::cast(result);
2970
+ array->set_length(length);
2971
+ // Initialize body.
2972
+ Object* value = the_hole_value();
2973
+ for (int index = 0; index < length; index++) {
2974
+ array->set(index, value, SKIP_WRITE_BARRIER);
2975
+ }
2976
+ }
2977
+ return result;
2978
+ }
2979
+
2980
+
2981
+ Object* Heap::AllocateHashTable(int length) {
2982
+ Object* result = Heap::AllocateFixedArray(length);
2983
+ if (result->IsFailure()) return result;
2984
+ reinterpret_cast<Array*>(result)->set_map(hash_table_map());
2985
+ ASSERT(result->IsHashTable());
2986
+ return result;
2987
+ }
2988
+
2989
+
2990
+ Object* Heap::AllocateGlobalContext() {
2991
+ Object* result = Heap::AllocateFixedArray(Context::GLOBAL_CONTEXT_SLOTS);
2992
+ if (result->IsFailure()) return result;
2993
+ Context* context = reinterpret_cast<Context*>(result);
2994
+ context->set_map(global_context_map());
2995
+ ASSERT(context->IsGlobalContext());
2996
+ ASSERT(result->IsContext());
2997
+ return result;
2998
+ }
2999
+
3000
+
3001
+ Object* Heap::AllocateFunctionContext(int length, JSFunction* function) {
3002
+ ASSERT(length >= Context::MIN_CONTEXT_SLOTS);
3003
+ Object* result = Heap::AllocateFixedArray(length);
3004
+ if (result->IsFailure()) return result;
3005
+ Context* context = reinterpret_cast<Context*>(result);
3006
+ context->set_map(context_map());
3007
+ context->set_closure(function);
3008
+ context->set_fcontext(context);
3009
+ context->set_previous(NULL);
3010
+ context->set_extension(NULL);
3011
+ context->set_global(function->context()->global());
3012
+ ASSERT(!context->IsGlobalContext());
3013
+ ASSERT(context->is_function_context());
3014
+ ASSERT(result->IsContext());
3015
+ return result;
3016
+ }
3017
+
3018
+
3019
+ Object* Heap::AllocateWithContext(Context* previous,
3020
+ JSObject* extension,
3021
+ bool is_catch_context) {
3022
+ Object* result = Heap::AllocateFixedArray(Context::MIN_CONTEXT_SLOTS);
3023
+ if (result->IsFailure()) return result;
3024
+ Context* context = reinterpret_cast<Context*>(result);
3025
+ context->set_map(is_catch_context ? catch_context_map() : context_map());
3026
+ context->set_closure(previous->closure());
3027
+ context->set_fcontext(previous->fcontext());
3028
+ context->set_previous(previous);
3029
+ context->set_extension(extension);
3030
+ context->set_global(previous->global());
3031
+ ASSERT(!context->IsGlobalContext());
3032
+ ASSERT(!context->is_function_context());
3033
+ ASSERT(result->IsContext());
3034
+ return result;
3035
+ }
3036
+
3037
+
3038
+ Object* Heap::AllocateStruct(InstanceType type) {
3039
+ Map* map;
3040
+ switch (type) {
3041
+ #define MAKE_CASE(NAME, Name, name) case NAME##_TYPE: map = name##_map(); break;
3042
+ STRUCT_LIST(MAKE_CASE)
3043
+ #undef MAKE_CASE
3044
+ default:
3045
+ UNREACHABLE();
3046
+ return Failure::InternalError();
3047
+ }
3048
+ int size = map->instance_size();
3049
+ AllocationSpace space =
3050
+ (size > MaxObjectSizeInPagedSpace()) ? LO_SPACE : OLD_POINTER_SPACE;
3051
+ Object* result = Heap::Allocate(map, space);
3052
+ if (result->IsFailure()) return result;
3053
+ Struct::cast(result)->InitializeBody(size);
3054
+ return result;
3055
+ }
3056
+
3057
+
3058
+ bool Heap::IdleNotification() {
3059
+ static const int kIdlesBeforeScavenge = 4;
3060
+ static const int kIdlesBeforeMarkSweep = 7;
3061
+ static const int kIdlesBeforeMarkCompact = 8;
3062
+ static int number_idle_notifications = 0;
3063
+ static int last_gc_count = gc_count_;
3064
+
3065
+ bool finished = false;
3066
+
3067
+ if (last_gc_count == gc_count_) {
3068
+ number_idle_notifications++;
3069
+ } else {
3070
+ number_idle_notifications = 0;
3071
+ last_gc_count = gc_count_;
3072
+ }
3073
+
3074
+ if (number_idle_notifications == kIdlesBeforeScavenge) {
3075
+ CollectGarbage(0, NEW_SPACE);
3076
+ new_space_.Shrink();
3077
+ last_gc_count = gc_count_;
3078
+
3079
+ } else if (number_idle_notifications == kIdlesBeforeMarkSweep) {
3080
+ // Before doing the mark-sweep collections we clear the
3081
+ // compilation cache to avoid hanging on to source code and
3082
+ // generated code for cached functions.
3083
+ CompilationCache::Clear();
3084
+
3085
+ CollectAllGarbage(false);
3086
+ new_space_.Shrink();
3087
+ last_gc_count = gc_count_;
3088
+
3089
+ } else if (number_idle_notifications == kIdlesBeforeMarkCompact) {
3090
+ CollectAllGarbage(true);
3091
+ new_space_.Shrink();
3092
+ last_gc_count = gc_count_;
3093
+ number_idle_notifications = 0;
3094
+ finished = true;
3095
+ }
3096
+
3097
+ // Uncommit unused memory in new space.
3098
+ Heap::UncommitFromSpace();
3099
+ return finished;
3100
+ }
3101
+
3102
+
3103
+ #ifdef DEBUG
3104
+
3105
+ void Heap::Print() {
3106
+ if (!HasBeenSetup()) return;
3107
+ Top::PrintStack();
3108
+ AllSpaces spaces;
3109
+ while (Space* space = spaces.next()) space->Print();
3110
+ }
3111
+
3112
+
3113
+ void Heap::ReportCodeStatistics(const char* title) {
3114
+ PrintF(">>>>>> Code Stats (%s) >>>>>>\n", title);
3115
+ PagedSpace::ResetCodeStatistics();
3116
+ // We do not look for code in new space, map space, or old space. If code
3117
+ // somehow ends up in those spaces, we would miss it here.
3118
+ code_space_->CollectCodeStatistics();
3119
+ lo_space_->CollectCodeStatistics();
3120
+ PagedSpace::ReportCodeStatistics();
3121
+ }
3122
+
3123
+
3124
+ // This function expects that NewSpace's allocated objects histogram is
3125
+ // populated (via a call to CollectStatistics or else as a side effect of a
3126
+ // just-completed scavenge collection).
3127
+ void Heap::ReportHeapStatistics(const char* title) {
3128
+ USE(title);
3129
+ PrintF(">>>>>> =============== %s (%d) =============== >>>>>>\n",
3130
+ title, gc_count_);
3131
+ PrintF("mark-compact GC : %d\n", mc_count_);
3132
+ PrintF("old_gen_promotion_limit_ %d\n", old_gen_promotion_limit_);
3133
+ PrintF("old_gen_allocation_limit_ %d\n", old_gen_allocation_limit_);
3134
+
3135
+ PrintF("\n");
3136
+ PrintF("Number of handles : %d\n", HandleScope::NumberOfHandles());
3137
+ GlobalHandles::PrintStats();
3138
+ PrintF("\n");
3139
+
3140
+ PrintF("Heap statistics : ");
3141
+ MemoryAllocator::ReportStatistics();
3142
+ PrintF("To space : ");
3143
+ new_space_.ReportStatistics();
3144
+ PrintF("Old pointer space : ");
3145
+ old_pointer_space_->ReportStatistics();
3146
+ PrintF("Old data space : ");
3147
+ old_data_space_->ReportStatistics();
3148
+ PrintF("Code space : ");
3149
+ code_space_->ReportStatistics();
3150
+ PrintF("Map space : ");
3151
+ map_space_->ReportStatistics();
3152
+ PrintF("Cell space : ");
3153
+ cell_space_->ReportStatistics();
3154
+ PrintF("Large object space : ");
3155
+ lo_space_->ReportStatistics();
3156
+ PrintF(">>>>>> ========================================= >>>>>>\n");
3157
+ }
3158
+
3159
+ #endif // DEBUG
3160
+
3161
+ bool Heap::Contains(HeapObject* value) {
3162
+ return Contains(value->address());
3163
+ }
3164
+
3165
+
3166
+ bool Heap::Contains(Address addr) {
3167
+ if (OS::IsOutsideAllocatedSpace(addr)) return false;
3168
+ return HasBeenSetup() &&
3169
+ (new_space_.ToSpaceContains(addr) ||
3170
+ old_pointer_space_->Contains(addr) ||
3171
+ old_data_space_->Contains(addr) ||
3172
+ code_space_->Contains(addr) ||
3173
+ map_space_->Contains(addr) ||
3174
+ cell_space_->Contains(addr) ||
3175
+ lo_space_->SlowContains(addr));
3176
+ }
3177
+
3178
+
3179
+ bool Heap::InSpace(HeapObject* value, AllocationSpace space) {
3180
+ return InSpace(value->address(), space);
3181
+ }
3182
+
3183
+
3184
+ bool Heap::InSpace(Address addr, AllocationSpace space) {
3185
+ if (OS::IsOutsideAllocatedSpace(addr)) return false;
3186
+ if (!HasBeenSetup()) return false;
3187
+
3188
+ switch (space) {
3189
+ case NEW_SPACE:
3190
+ return new_space_.ToSpaceContains(addr);
3191
+ case OLD_POINTER_SPACE:
3192
+ return old_pointer_space_->Contains(addr);
3193
+ case OLD_DATA_SPACE:
3194
+ return old_data_space_->Contains(addr);
3195
+ case CODE_SPACE:
3196
+ return code_space_->Contains(addr);
3197
+ case MAP_SPACE:
3198
+ return map_space_->Contains(addr);
3199
+ case CELL_SPACE:
3200
+ return cell_space_->Contains(addr);
3201
+ case LO_SPACE:
3202
+ return lo_space_->SlowContains(addr);
3203
+ }
3204
+
3205
+ return false;
3206
+ }
3207
+
3208
+
3209
+ #ifdef DEBUG
3210
+ void Heap::Verify() {
3211
+ ASSERT(HasBeenSetup());
3212
+
3213
+ VerifyPointersVisitor visitor;
3214
+ IterateRoots(&visitor, VISIT_ONLY_STRONG);
3215
+
3216
+ new_space_.Verify();
3217
+
3218
+ VerifyPointersAndRSetVisitor rset_visitor;
3219
+ old_pointer_space_->Verify(&rset_visitor);
3220
+ map_space_->Verify(&rset_visitor);
3221
+
3222
+ VerifyPointersVisitor no_rset_visitor;
3223
+ old_data_space_->Verify(&no_rset_visitor);
3224
+ code_space_->Verify(&no_rset_visitor);
3225
+ cell_space_->Verify(&no_rset_visitor);
3226
+
3227
+ lo_space_->Verify();
3228
+ }
3229
+ #endif // DEBUG
3230
+
3231
+
3232
+ Object* Heap::LookupSymbol(Vector<const char> string) {
3233
+ Object* symbol = NULL;
3234
+ Object* new_table = symbol_table()->LookupSymbol(string, &symbol);
3235
+ if (new_table->IsFailure()) return new_table;
3236
+ // Can't use set_symbol_table because SymbolTable::cast knows that
3237
+ // SymbolTable is a singleton and checks for identity.
3238
+ roots_[kSymbolTableRootIndex] = new_table;
3239
+ ASSERT(symbol != NULL);
3240
+ return symbol;
3241
+ }
3242
+
3243
+
3244
+ Object* Heap::LookupSymbol(String* string) {
3245
+ if (string->IsSymbol()) return string;
3246
+ Object* symbol = NULL;
3247
+ Object* new_table = symbol_table()->LookupString(string, &symbol);
3248
+ if (new_table->IsFailure()) return new_table;
3249
+ // Can't use set_symbol_table because SymbolTable::cast knows that
3250
+ // SymbolTable is a singleton and checks for identity.
3251
+ roots_[kSymbolTableRootIndex] = new_table;
3252
+ ASSERT(symbol != NULL);
3253
+ return symbol;
3254
+ }
3255
+
3256
+
3257
+ bool Heap::LookupSymbolIfExists(String* string, String** symbol) {
3258
+ if (string->IsSymbol()) {
3259
+ *symbol = string;
3260
+ return true;
3261
+ }
3262
+ return symbol_table()->LookupSymbolIfExists(string, symbol);
3263
+ }
3264
+
3265
+
3266
+ #ifdef DEBUG
3267
+ void Heap::ZapFromSpace() {
3268
+ ASSERT(reinterpret_cast<Object*>(kFromSpaceZapValue)->IsHeapObject());
3269
+ for (Address a = new_space_.FromSpaceLow();
3270
+ a < new_space_.FromSpaceHigh();
3271
+ a += kPointerSize) {
3272
+ Memory::Address_at(a) = kFromSpaceZapValue;
3273
+ }
3274
+ }
3275
+ #endif // DEBUG
3276
+
3277
+
3278
+ int Heap::IterateRSetRange(Address object_start,
3279
+ Address object_end,
3280
+ Address rset_start,
3281
+ ObjectSlotCallback copy_object_func) {
3282
+ Address object_address = object_start;
3283
+ Address rset_address = rset_start;
3284
+ int set_bits_count = 0;
3285
+
3286
+ // Loop over all the pointers in [object_start, object_end).
3287
+ while (object_address < object_end) {
3288
+ uint32_t rset_word = Memory::uint32_at(rset_address);
3289
+ if (rset_word != 0) {
3290
+ uint32_t result_rset = rset_word;
3291
+ for (uint32_t bitmask = 1; bitmask != 0; bitmask = bitmask << 1) {
3292
+ // Do not dereference pointers at or past object_end.
3293
+ if ((rset_word & bitmask) != 0 && object_address < object_end) {
3294
+ Object** object_p = reinterpret_cast<Object**>(object_address);
3295
+ if (Heap::InNewSpace(*object_p)) {
3296
+ copy_object_func(reinterpret_cast<HeapObject**>(object_p));
3297
+ }
3298
+ // If this pointer does not need to be remembered anymore, clear
3299
+ // the remembered set bit.
3300
+ if (!Heap::InNewSpace(*object_p)) result_rset &= ~bitmask;
3301
+ set_bits_count++;
3302
+ }
3303
+ object_address += kPointerSize;
3304
+ }
3305
+ // Update the remembered set if it has changed.
3306
+ if (result_rset != rset_word) {
3307
+ Memory::uint32_at(rset_address) = result_rset;
3308
+ }
3309
+ } else {
3310
+ // No bits in the word were set. This is the common case.
3311
+ object_address += kPointerSize * kBitsPerInt;
3312
+ }
3313
+ rset_address += kIntSize;
3314
+ }
3315
+ return set_bits_count;
3316
+ }
3317
+
3318
+
3319
+ void Heap::IterateRSet(PagedSpace* space, ObjectSlotCallback copy_object_func) {
3320
+ ASSERT(Page::is_rset_in_use());
3321
+ ASSERT(space == old_pointer_space_ || space == map_space_);
3322
+
3323
+ static void* paged_rset_histogram = StatsTable::CreateHistogram(
3324
+ "V8.RSetPaged",
3325
+ 0,
3326
+ Page::kObjectAreaSize / kPointerSize,
3327
+ 30);
3328
+
3329
+ PageIterator it(space, PageIterator::PAGES_IN_USE);
3330
+ while (it.has_next()) {
3331
+ Page* page = it.next();
3332
+ int count = IterateRSetRange(page->ObjectAreaStart(), page->AllocationTop(),
3333
+ page->RSetStart(), copy_object_func);
3334
+ if (paged_rset_histogram != NULL) {
3335
+ StatsTable::AddHistogramSample(paged_rset_histogram, count);
3336
+ }
3337
+ }
3338
+ }
3339
+
3340
+
3341
+ void Heap::IterateRoots(ObjectVisitor* v, VisitMode mode) {
3342
+ IterateStrongRoots(v, mode);
3343
+ v->VisitPointer(reinterpret_cast<Object**>(&roots_[kSymbolTableRootIndex]));
3344
+ v->Synchronize("symbol_table");
3345
+ if (mode != VISIT_ALL_IN_SCAVENGE) {
3346
+ // Scavenge collections have special processing for this.
3347
+ ExternalStringTable::Iterate(v);
3348
+ }
3349
+ v->Synchronize("external_string_table");
3350
+ }
3351
+
3352
+
3353
+ void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) {
3354
+ v->VisitPointers(&roots_[0], &roots_[kStrongRootListLength]);
3355
+ v->Synchronize("strong_root_list");
3356
+
3357
+ v->VisitPointer(bit_cast<Object**, String**>(&hidden_symbol_));
3358
+ v->Synchronize("symbol");
3359
+
3360
+ Bootstrapper::Iterate(v);
3361
+ v->Synchronize("bootstrapper");
3362
+ Top::Iterate(v);
3363
+ v->Synchronize("top");
3364
+ Relocatable::Iterate(v);
3365
+ v->Synchronize("relocatable");
3366
+
3367
+ #ifdef ENABLE_DEBUGGER_SUPPORT
3368
+ Debug::Iterate(v);
3369
+ #endif
3370
+ v->Synchronize("debug");
3371
+ CompilationCache::Iterate(v);
3372
+ v->Synchronize("compilationcache");
3373
+
3374
+ // Iterate over local handles in handle scopes.
3375
+ HandleScopeImplementer::Iterate(v);
3376
+ v->Synchronize("handlescope");
3377
+
3378
+ // Iterate over the builtin code objects and code stubs in the
3379
+ // heap. Note that it is not necessary to iterate over code objects
3380
+ // on scavenge collections.
3381
+ if (mode != VISIT_ALL_IN_SCAVENGE) {
3382
+ Builtins::IterateBuiltins(v);
3383
+ }
3384
+ v->Synchronize("builtins");
3385
+
3386
+ // Iterate over global handles.
3387
+ if (mode == VISIT_ONLY_STRONG) {
3388
+ GlobalHandles::IterateStrongRoots(v);
3389
+ } else {
3390
+ GlobalHandles::IterateAllRoots(v);
3391
+ }
3392
+ v->Synchronize("globalhandles");
3393
+
3394
+ // Iterate over pointers being held by inactive threads.
3395
+ ThreadManager::Iterate(v);
3396
+ v->Synchronize("threadmanager");
3397
+ }
3398
+
3399
+
3400
+ // Flag is set when the heap has been configured. The heap can be repeatedly
3401
+ // configured through the API until it is setup.
3402
+ static bool heap_configured = false;
3403
+
3404
+ // TODO(1236194): Since the heap size is configurable on the command line
3405
+ // and through the API, we should gracefully handle the case that the heap
3406
+ // size is not big enough to fit all the initial objects.
3407
+ bool Heap::ConfigureHeap(int max_semispace_size, int max_old_gen_size) {
3408
+ if (HasBeenSetup()) return false;
3409
+
3410
+ if (max_semispace_size > 0) max_semispace_size_ = max_semispace_size;
3411
+
3412
+ if (Snapshot::IsEnabled()) {
3413
+ // If we are using a snapshot we always reserve the default amount
3414
+ // of memory for each semispace because code in the snapshot has
3415
+ // write-barrier code that relies on the size and alignment of new
3416
+ // space. We therefore cannot use a larger max semispace size
3417
+ // than the default reserved semispace size.
3418
+ if (max_semispace_size_ > reserved_semispace_size_) {
3419
+ max_semispace_size_ = reserved_semispace_size_;
3420
+ }
3421
+ } else {
3422
+ // If we are not using snapshots we reserve space for the actual
3423
+ // max semispace size.
3424
+ reserved_semispace_size_ = max_semispace_size_;
3425
+ }
3426
+
3427
+ if (max_old_gen_size > 0) max_old_generation_size_ = max_old_gen_size;
3428
+
3429
+ // The new space size must be a power of two to support single-bit testing
3430
+ // for containment.
3431
+ max_semispace_size_ = RoundUpToPowerOf2(max_semispace_size_);
3432
+ reserved_semispace_size_ = RoundUpToPowerOf2(reserved_semispace_size_);
3433
+ initial_semispace_size_ = Min(initial_semispace_size_, max_semispace_size_);
3434
+ external_allocation_limit_ = 10 * max_semispace_size_;
3435
+
3436
+ // The old generation is paged.
3437
+ max_old_generation_size_ = RoundUp(max_old_generation_size_, Page::kPageSize);
3438
+
3439
+ heap_configured = true;
3440
+ return true;
3441
+ }
3442
+
3443
+
3444
+ bool Heap::ConfigureHeapDefault() {
3445
+ return ConfigureHeap(FLAG_max_new_space_size / 2, FLAG_max_old_space_size);
3446
+ }
3447
+
3448
+
3449
+ void Heap::RecordStats(HeapStats* stats) {
3450
+ *stats->start_marker = 0xDECADE00;
3451
+ *stats->end_marker = 0xDECADE01;
3452
+ *stats->new_space_size = new_space_.Size();
3453
+ *stats->new_space_capacity = new_space_.Capacity();
3454
+ *stats->old_pointer_space_size = old_pointer_space_->Size();
3455
+ *stats->old_pointer_space_capacity = old_pointer_space_->Capacity();
3456
+ *stats->old_data_space_size = old_data_space_->Size();
3457
+ *stats->old_data_space_capacity = old_data_space_->Capacity();
3458
+ *stats->code_space_size = code_space_->Size();
3459
+ *stats->code_space_capacity = code_space_->Capacity();
3460
+ *stats->map_space_size = map_space_->Size();
3461
+ *stats->map_space_capacity = map_space_->Capacity();
3462
+ *stats->cell_space_size = cell_space_->Size();
3463
+ *stats->cell_space_capacity = cell_space_->Capacity();
3464
+ *stats->lo_space_size = lo_space_->Size();
3465
+ GlobalHandles::RecordStats(stats);
3466
+ }
3467
+
3468
+
3469
+ int Heap::PromotedSpaceSize() {
3470
+ return old_pointer_space_->Size()
3471
+ + old_data_space_->Size()
3472
+ + code_space_->Size()
3473
+ + map_space_->Size()
3474
+ + cell_space_->Size()
3475
+ + lo_space_->Size();
3476
+ }
3477
+
3478
+
3479
+ int Heap::PromotedExternalMemorySize() {
3480
+ if (amount_of_external_allocated_memory_
3481
+ <= amount_of_external_allocated_memory_at_last_global_gc_) return 0;
3482
+ return amount_of_external_allocated_memory_
3483
+ - amount_of_external_allocated_memory_at_last_global_gc_;
3484
+ }
3485
+
3486
+
3487
+ bool Heap::Setup(bool create_heap_objects) {
3488
+ // Initialize heap spaces and initial maps and objects. Whenever something
3489
+ // goes wrong, just return false. The caller should check the results and
3490
+ // call Heap::TearDown() to release allocated memory.
3491
+ //
3492
+ // If the heap is not yet configured (eg, through the API), configure it.
3493
+ // Configuration is based on the flags new-space-size (really the semispace
3494
+ // size) and old-space-size if set or the initial values of semispace_size_
3495
+ // and old_generation_size_ otherwise.
3496
+ if (!heap_configured) {
3497
+ if (!ConfigureHeapDefault()) return false;
3498
+ }
3499
+
3500
+ // Setup memory allocator and reserve a chunk of memory for new
3501
+ // space. The chunk is double the size of the requested reserved
3502
+ // new space size to ensure that we can find a pair of semispaces that
3503
+ // are contiguous and aligned to their size.
3504
+ if (!MemoryAllocator::Setup(MaxReserved())) return false;
3505
+ void* chunk =
3506
+ MemoryAllocator::ReserveInitialChunk(4 * reserved_semispace_size_);
3507
+ if (chunk == NULL) return false;
3508
+
3509
+ // Align the pair of semispaces to their size, which must be a power
3510
+ // of 2.
3511
+ Address new_space_start =
3512
+ RoundUp(reinterpret_cast<byte*>(chunk), 2 * reserved_semispace_size_);
3513
+ if (!new_space_.Setup(new_space_start, 2 * reserved_semispace_size_)) {
3514
+ return false;
3515
+ }
3516
+
3517
+ // Initialize old pointer space.
3518
+ old_pointer_space_ =
3519
+ new OldSpace(max_old_generation_size_, OLD_POINTER_SPACE, NOT_EXECUTABLE);
3520
+ if (old_pointer_space_ == NULL) return false;
3521
+ if (!old_pointer_space_->Setup(NULL, 0)) return false;
3522
+
3523
+ // Initialize old data space.
3524
+ old_data_space_ =
3525
+ new OldSpace(max_old_generation_size_, OLD_DATA_SPACE, NOT_EXECUTABLE);
3526
+ if (old_data_space_ == NULL) return false;
3527
+ if (!old_data_space_->Setup(NULL, 0)) return false;
3528
+
3529
+ // Initialize the code space, set its maximum capacity to the old
3530
+ // generation size. It needs executable memory.
3531
+ // On 64-bit platform(s), we put all code objects in a 2 GB range of
3532
+ // virtual address space, so that they can call each other with near calls.
3533
+ if (code_range_size_ > 0) {
3534
+ if (!CodeRange::Setup(code_range_size_)) {
3535
+ return false;
3536
+ }
3537
+ }
3538
+
3539
+ code_space_ =
3540
+ new OldSpace(max_old_generation_size_, CODE_SPACE, EXECUTABLE);
3541
+ if (code_space_ == NULL) return false;
3542
+ if (!code_space_->Setup(NULL, 0)) return false;
3543
+
3544
+ // Initialize map space.
3545
+ map_space_ = new MapSpace(FLAG_use_big_map_space
3546
+ ? max_old_generation_size_
3547
+ : MapSpace::kMaxMapPageIndex * Page::kPageSize,
3548
+ FLAG_max_map_space_pages,
3549
+ MAP_SPACE);
3550
+ if (map_space_ == NULL) return false;
3551
+ if (!map_space_->Setup(NULL, 0)) return false;
3552
+
3553
+ // Initialize global property cell space.
3554
+ cell_space_ = new CellSpace(max_old_generation_size_, CELL_SPACE);
3555
+ if (cell_space_ == NULL) return false;
3556
+ if (!cell_space_->Setup(NULL, 0)) return false;
3557
+
3558
+ // The large object code space may contain code or data. We set the memory
3559
+ // to be non-executable here for safety, but this means we need to enable it
3560
+ // explicitly when allocating large code objects.
3561
+ lo_space_ = new LargeObjectSpace(LO_SPACE);
3562
+ if (lo_space_ == NULL) return false;
3563
+ if (!lo_space_->Setup()) return false;
3564
+
3565
+ if (create_heap_objects) {
3566
+ // Create initial maps.
3567
+ if (!CreateInitialMaps()) return false;
3568
+ if (!CreateApiObjects()) return false;
3569
+
3570
+ // Create initial objects
3571
+ if (!CreateInitialObjects()) return false;
3572
+ }
3573
+
3574
+ LOG(IntEvent("heap-capacity", Capacity()));
3575
+ LOG(IntEvent("heap-available", Available()));
3576
+
3577
+ #ifdef ENABLE_LOGGING_AND_PROFILING
3578
+ // This should be called only after initial objects have been created.
3579
+ ProducerHeapProfile::Setup();
3580
+ #endif
3581
+
3582
+ return true;
3583
+ }
3584
+
3585
+
3586
+ void Heap::SetStackLimits() {
3587
+ // On 64 bit machines, pointers are generally out of range of Smis. We write
3588
+ // something that looks like an out of range Smi to the GC.
3589
+
3590
+ // Set up the special root array entries containing the stack limits.
3591
+ // These are actually addresses, but the tag makes the GC ignore it.
3592
+ roots_[kStackLimitRootIndex] =
3593
+ reinterpret_cast<Object*>(
3594
+ (StackGuard::jslimit() & ~kSmiTagMask) | kSmiTag);
3595
+ roots_[kRealStackLimitRootIndex] =
3596
+ reinterpret_cast<Object*>(
3597
+ (StackGuard::real_jslimit() & ~kSmiTagMask) | kSmiTag);
3598
+ }
3599
+
3600
+
3601
+ void Heap::TearDown() {
3602
+ GlobalHandles::TearDown();
3603
+
3604
+ ExternalStringTable::TearDown();
3605
+
3606
+ new_space_.TearDown();
3607
+
3608
+ if (old_pointer_space_ != NULL) {
3609
+ old_pointer_space_->TearDown();
3610
+ delete old_pointer_space_;
3611
+ old_pointer_space_ = NULL;
3612
+ }
3613
+
3614
+ if (old_data_space_ != NULL) {
3615
+ old_data_space_->TearDown();
3616
+ delete old_data_space_;
3617
+ old_data_space_ = NULL;
3618
+ }
3619
+
3620
+ if (code_space_ != NULL) {
3621
+ code_space_->TearDown();
3622
+ delete code_space_;
3623
+ code_space_ = NULL;
3624
+ }
3625
+
3626
+ if (map_space_ != NULL) {
3627
+ map_space_->TearDown();
3628
+ delete map_space_;
3629
+ map_space_ = NULL;
3630
+ }
3631
+
3632
+ if (cell_space_ != NULL) {
3633
+ cell_space_->TearDown();
3634
+ delete cell_space_;
3635
+ cell_space_ = NULL;
3636
+ }
3637
+
3638
+ if (lo_space_ != NULL) {
3639
+ lo_space_->TearDown();
3640
+ delete lo_space_;
3641
+ lo_space_ = NULL;
3642
+ }
3643
+
3644
+ MemoryAllocator::TearDown();
3645
+ }
3646
+
3647
+
3648
+ void Heap::Shrink() {
3649
+ // Try to shrink all paged spaces.
3650
+ PagedSpaces spaces;
3651
+ while (PagedSpace* space = spaces.next()) space->Shrink();
3652
+ }
3653
+
3654
+
3655
+ #ifdef ENABLE_HEAP_PROTECTION
3656
+
3657
+ void Heap::Protect() {
3658
+ if (HasBeenSetup()) {
3659
+ AllSpaces spaces;
3660
+ while (Space* space = spaces.next()) space->Protect();
3661
+ }
3662
+ }
3663
+
3664
+
3665
+ void Heap::Unprotect() {
3666
+ if (HasBeenSetup()) {
3667
+ AllSpaces spaces;
3668
+ while (Space* space = spaces.next()) space->Unprotect();
3669
+ }
3670
+ }
3671
+
3672
+ #endif
3673
+
3674
+
3675
+ #ifdef DEBUG
3676
+
3677
+ class PrintHandleVisitor: public ObjectVisitor {
3678
+ public:
3679
+ void VisitPointers(Object** start, Object** end) {
3680
+ for (Object** p = start; p < end; p++)
3681
+ PrintF(" handle %p to %p\n", p, *p);
3682
+ }
3683
+ };
3684
+
3685
+ void Heap::PrintHandles() {
3686
+ PrintF("Handles:\n");
3687
+ PrintHandleVisitor v;
3688
+ HandleScopeImplementer::Iterate(&v);
3689
+ }
3690
+
3691
+ #endif
3692
+
3693
+
3694
+ Space* AllSpaces::next() {
3695
+ switch (counter_++) {
3696
+ case NEW_SPACE:
3697
+ return Heap::new_space();
3698
+ case OLD_POINTER_SPACE:
3699
+ return Heap::old_pointer_space();
3700
+ case OLD_DATA_SPACE:
3701
+ return Heap::old_data_space();
3702
+ case CODE_SPACE:
3703
+ return Heap::code_space();
3704
+ case MAP_SPACE:
3705
+ return Heap::map_space();
3706
+ case CELL_SPACE:
3707
+ return Heap::cell_space();
3708
+ case LO_SPACE:
3709
+ return Heap::lo_space();
3710
+ default:
3711
+ return NULL;
3712
+ }
3713
+ }
3714
+
3715
+
3716
+ PagedSpace* PagedSpaces::next() {
3717
+ switch (counter_++) {
3718
+ case OLD_POINTER_SPACE:
3719
+ return Heap::old_pointer_space();
3720
+ case OLD_DATA_SPACE:
3721
+ return Heap::old_data_space();
3722
+ case CODE_SPACE:
3723
+ return Heap::code_space();
3724
+ case MAP_SPACE:
3725
+ return Heap::map_space();
3726
+ case CELL_SPACE:
3727
+ return Heap::cell_space();
3728
+ default:
3729
+ return NULL;
3730
+ }
3731
+ }
3732
+
3733
+
3734
+
3735
+ OldSpace* OldSpaces::next() {
3736
+ switch (counter_++) {
3737
+ case OLD_POINTER_SPACE:
3738
+ return Heap::old_pointer_space();
3739
+ case OLD_DATA_SPACE:
3740
+ return Heap::old_data_space();
3741
+ case CODE_SPACE:
3742
+ return Heap::code_space();
3743
+ default:
3744
+ return NULL;
3745
+ }
3746
+ }
3747
+
3748
+
3749
+ SpaceIterator::SpaceIterator() : current_space_(FIRST_SPACE), iterator_(NULL) {
3750
+ }
3751
+
3752
+
3753
+ SpaceIterator::~SpaceIterator() {
3754
+ // Delete active iterator if any.
3755
+ delete iterator_;
3756
+ }
3757
+
3758
+
3759
+ bool SpaceIterator::has_next() {
3760
+ // Iterate until no more spaces.
3761
+ return current_space_ != LAST_SPACE;
3762
+ }
3763
+
3764
+
3765
+ ObjectIterator* SpaceIterator::next() {
3766
+ if (iterator_ != NULL) {
3767
+ delete iterator_;
3768
+ iterator_ = NULL;
3769
+ // Move to the next space
3770
+ current_space_++;
3771
+ if (current_space_ > LAST_SPACE) {
3772
+ return NULL;
3773
+ }
3774
+ }
3775
+
3776
+ // Return iterator for the new current space.
3777
+ return CreateIterator();
3778
+ }
3779
+
3780
+
3781
+ // Create an iterator for the space to iterate.
3782
+ ObjectIterator* SpaceIterator::CreateIterator() {
3783
+ ASSERT(iterator_ == NULL);
3784
+
3785
+ switch (current_space_) {
3786
+ case NEW_SPACE:
3787
+ iterator_ = new SemiSpaceIterator(Heap::new_space());
3788
+ break;
3789
+ case OLD_POINTER_SPACE:
3790
+ iterator_ = new HeapObjectIterator(Heap::old_pointer_space());
3791
+ break;
3792
+ case OLD_DATA_SPACE:
3793
+ iterator_ = new HeapObjectIterator(Heap::old_data_space());
3794
+ break;
3795
+ case CODE_SPACE:
3796
+ iterator_ = new HeapObjectIterator(Heap::code_space());
3797
+ break;
3798
+ case MAP_SPACE:
3799
+ iterator_ = new HeapObjectIterator(Heap::map_space());
3800
+ break;
3801
+ case CELL_SPACE:
3802
+ iterator_ = new HeapObjectIterator(Heap::cell_space());
3803
+ break;
3804
+ case LO_SPACE:
3805
+ iterator_ = new LargeObjectIterator(Heap::lo_space());
3806
+ break;
3807
+ }
3808
+
3809
+ // Return the newly allocated iterator;
3810
+ ASSERT(iterator_ != NULL);
3811
+ return iterator_;
3812
+ }
3813
+
3814
+
3815
+ HeapIterator::HeapIterator() {
3816
+ Init();
3817
+ }
3818
+
3819
+
3820
+ HeapIterator::~HeapIterator() {
3821
+ Shutdown();
3822
+ }
3823
+
3824
+
3825
+ void HeapIterator::Init() {
3826
+ // Start the iteration.
3827
+ space_iterator_ = new SpaceIterator();
3828
+ object_iterator_ = space_iterator_->next();
3829
+ }
3830
+
3831
+
3832
+ void HeapIterator::Shutdown() {
3833
+ // Make sure the last iterator is deallocated.
3834
+ delete space_iterator_;
3835
+ space_iterator_ = NULL;
3836
+ object_iterator_ = NULL;
3837
+ }
3838
+
3839
+
3840
+ bool HeapIterator::has_next() {
3841
+ // No iterator means we are done.
3842
+ if (object_iterator_ == NULL) return false;
3843
+
3844
+ if (object_iterator_->has_next_object()) {
3845
+ // If the current iterator has more objects we are fine.
3846
+ return true;
3847
+ } else {
3848
+ // Go though the spaces looking for one that has objects.
3849
+ while (space_iterator_->has_next()) {
3850
+ object_iterator_ = space_iterator_->next();
3851
+ if (object_iterator_->has_next_object()) {
3852
+ return true;
3853
+ }
3854
+ }
3855
+ }
3856
+ // Done with the last space.
3857
+ object_iterator_ = NULL;
3858
+ return false;
3859
+ }
3860
+
3861
+
3862
+ HeapObject* HeapIterator::next() {
3863
+ if (has_next()) {
3864
+ return object_iterator_->next_object();
3865
+ } else {
3866
+ return NULL;
3867
+ }
3868
+ }
3869
+
3870
+
3871
+ void HeapIterator::reset() {
3872
+ // Restart the iterator.
3873
+ Shutdown();
3874
+ Init();
3875
+ }
3876
+
3877
+
3878
+ #ifdef DEBUG
3879
+
3880
+ static bool search_for_any_global;
3881
+ static Object* search_target;
3882
+ static bool found_target;
3883
+ static List<Object*> object_stack(20);
3884
+
3885
+
3886
+ // Tags 0, 1, and 3 are used. Use 2 for marking visited HeapObject.
3887
+ static const int kMarkTag = 2;
3888
+
3889
+ static void MarkObjectRecursively(Object** p);
3890
+ class MarkObjectVisitor : public ObjectVisitor {
3891
+ public:
3892
+ void VisitPointers(Object** start, Object** end) {
3893
+ // Copy all HeapObject pointers in [start, end)
3894
+ for (Object** p = start; p < end; p++) {
3895
+ if ((*p)->IsHeapObject())
3896
+ MarkObjectRecursively(p);
3897
+ }
3898
+ }
3899
+ };
3900
+
3901
+ static MarkObjectVisitor mark_visitor;
3902
+
3903
+ static void MarkObjectRecursively(Object** p) {
3904
+ if (!(*p)->IsHeapObject()) return;
3905
+
3906
+ HeapObject* obj = HeapObject::cast(*p);
3907
+
3908
+ Object* map = obj->map();
3909
+
3910
+ if (!map->IsHeapObject()) return; // visited before
3911
+
3912
+ if (found_target) return; // stop if target found
3913
+ object_stack.Add(obj);
3914
+ if ((search_for_any_global && obj->IsJSGlobalObject()) ||
3915
+ (!search_for_any_global && (obj == search_target))) {
3916
+ found_target = true;
3917
+ return;
3918
+ }
3919
+
3920
+ // not visited yet
3921
+ Map* map_p = reinterpret_cast<Map*>(HeapObject::cast(map));
3922
+
3923
+ Address map_addr = map_p->address();
3924
+
3925
+ obj->set_map(reinterpret_cast<Map*>(map_addr + kMarkTag));
3926
+
3927
+ MarkObjectRecursively(&map);
3928
+
3929
+ obj->IterateBody(map_p->instance_type(), obj->SizeFromMap(map_p),
3930
+ &mark_visitor);
3931
+
3932
+ if (!found_target) // don't pop if found the target
3933
+ object_stack.RemoveLast();
3934
+ }
3935
+
3936
+
3937
+ static void UnmarkObjectRecursively(Object** p);
3938
+ class UnmarkObjectVisitor : public ObjectVisitor {
3939
+ public:
3940
+ void VisitPointers(Object** start, Object** end) {
3941
+ // Copy all HeapObject pointers in [start, end)
3942
+ for (Object** p = start; p < end; p++) {
3943
+ if ((*p)->IsHeapObject())
3944
+ UnmarkObjectRecursively(p);
3945
+ }
3946
+ }
3947
+ };
3948
+
3949
+ static UnmarkObjectVisitor unmark_visitor;
3950
+
3951
+ static void UnmarkObjectRecursively(Object** p) {
3952
+ if (!(*p)->IsHeapObject()) return;
3953
+
3954
+ HeapObject* obj = HeapObject::cast(*p);
3955
+
3956
+ Object* map = obj->map();
3957
+
3958
+ if (map->IsHeapObject()) return; // unmarked already
3959
+
3960
+ Address map_addr = reinterpret_cast<Address>(map);
3961
+
3962
+ map_addr -= kMarkTag;
3963
+
3964
+ ASSERT_TAG_ALIGNED(map_addr);
3965
+
3966
+ HeapObject* map_p = HeapObject::FromAddress(map_addr);
3967
+
3968
+ obj->set_map(reinterpret_cast<Map*>(map_p));
3969
+
3970
+ UnmarkObjectRecursively(reinterpret_cast<Object**>(&map_p));
3971
+
3972
+ obj->IterateBody(Map::cast(map_p)->instance_type(),
3973
+ obj->SizeFromMap(Map::cast(map_p)),
3974
+ &unmark_visitor);
3975
+ }
3976
+
3977
+
3978
+ static void MarkRootObjectRecursively(Object** root) {
3979
+ if (search_for_any_global) {
3980
+ ASSERT(search_target == NULL);
3981
+ } else {
3982
+ ASSERT(search_target->IsHeapObject());
3983
+ }
3984
+ found_target = false;
3985
+ object_stack.Clear();
3986
+
3987
+ MarkObjectRecursively(root);
3988
+ UnmarkObjectRecursively(root);
3989
+
3990
+ if (found_target) {
3991
+ PrintF("=====================================\n");
3992
+ PrintF("==== Path to object ====\n");
3993
+ PrintF("=====================================\n\n");
3994
+
3995
+ ASSERT(!object_stack.is_empty());
3996
+ for (int i = 0; i < object_stack.length(); i++) {
3997
+ if (i > 0) PrintF("\n |\n |\n V\n\n");
3998
+ Object* obj = object_stack[i];
3999
+ obj->Print();
4000
+ }
4001
+ PrintF("=====================================\n");
4002
+ }
4003
+ }
4004
+
4005
+
4006
+ // Helper class for visiting HeapObjects recursively.
4007
+ class MarkRootVisitor: public ObjectVisitor {
4008
+ public:
4009
+ void VisitPointers(Object** start, Object** end) {
4010
+ // Visit all HeapObject pointers in [start, end)
4011
+ for (Object** p = start; p < end; p++) {
4012
+ if ((*p)->IsHeapObject())
4013
+ MarkRootObjectRecursively(p);
4014
+ }
4015
+ }
4016
+ };
4017
+
4018
+
4019
+ // Triggers a depth-first traversal of reachable objects from roots
4020
+ // and finds a path to a specific heap object and prints it.
4021
+ void Heap::TracePathToObject(Object* target) {
4022
+ search_target = target;
4023
+ search_for_any_global = false;
4024
+
4025
+ MarkRootVisitor root_visitor;
4026
+ IterateRoots(&root_visitor, VISIT_ONLY_STRONG);
4027
+ }
4028
+
4029
+
4030
+ // Triggers a depth-first traversal of reachable objects from roots
4031
+ // and finds a path to any global object and prints it. Useful for
4032
+ // determining the source for leaks of global objects.
4033
+ void Heap::TracePathToGlobal() {
4034
+ search_target = NULL;
4035
+ search_for_any_global = true;
4036
+
4037
+ MarkRootVisitor root_visitor;
4038
+ IterateRoots(&root_visitor, VISIT_ONLY_STRONG);
4039
+ }
4040
+ #endif
4041
+
4042
+
4043
+ GCTracer::GCTracer()
4044
+ : start_time_(0.0),
4045
+ start_size_(0.0),
4046
+ gc_count_(0),
4047
+ full_gc_count_(0),
4048
+ is_compacting_(false),
4049
+ marked_count_(0) {
4050
+ // These two fields reflect the state of the previous full collection.
4051
+ // Set them before they are changed by the collector.
4052
+ previous_has_compacted_ = MarkCompactCollector::HasCompacted();
4053
+ previous_marked_count_ = MarkCompactCollector::previous_marked_count();
4054
+ if (!FLAG_trace_gc) return;
4055
+ start_time_ = OS::TimeCurrentMillis();
4056
+ start_size_ = SizeOfHeapObjects();
4057
+ }
4058
+
4059
+
4060
+ GCTracer::~GCTracer() {
4061
+ if (!FLAG_trace_gc) return;
4062
+ // Printf ONE line iff flag is set.
4063
+ PrintF("%s %.1f -> %.1f MB, %d ms.\n",
4064
+ CollectorString(),
4065
+ start_size_, SizeOfHeapObjects(),
4066
+ static_cast<int>(OS::TimeCurrentMillis() - start_time_));
4067
+
4068
+ #if defined(ENABLE_LOGGING_AND_PROFILING)
4069
+ Heap::PrintShortHeapStatistics();
4070
+ #endif
4071
+ }
4072
+
4073
+
4074
+ const char* GCTracer::CollectorString() {
4075
+ switch (collector_) {
4076
+ case SCAVENGER:
4077
+ return "Scavenge";
4078
+ case MARK_COMPACTOR:
4079
+ return MarkCompactCollector::HasCompacted() ? "Mark-compact"
4080
+ : "Mark-sweep";
4081
+ }
4082
+ return "Unknown GC";
4083
+ }
4084
+
4085
+
4086
+ int KeyedLookupCache::Hash(Map* map, String* name) {
4087
+ // Uses only lower 32 bits if pointers are larger.
4088
+ uintptr_t addr_hash =
4089
+ static_cast<uint32_t>(reinterpret_cast<uintptr_t>(map)) >> kMapHashShift;
4090
+ return (addr_hash ^ name->Hash()) & kCapacityMask;
4091
+ }
4092
+
4093
+
4094
+ int KeyedLookupCache::Lookup(Map* map, String* name) {
4095
+ int index = Hash(map, name);
4096
+ Key& key = keys_[index];
4097
+ if ((key.map == map) && key.name->Equals(name)) {
4098
+ return field_offsets_[index];
4099
+ }
4100
+ return -1;
4101
+ }
4102
+
4103
+
4104
+ void KeyedLookupCache::Update(Map* map, String* name, int field_offset) {
4105
+ String* symbol;
4106
+ if (Heap::LookupSymbolIfExists(name, &symbol)) {
4107
+ int index = Hash(map, symbol);
4108
+ Key& key = keys_[index];
4109
+ key.map = map;
4110
+ key.name = symbol;
4111
+ field_offsets_[index] = field_offset;
4112
+ }
4113
+ }
4114
+
4115
+
4116
+ void KeyedLookupCache::Clear() {
4117
+ for (int index = 0; index < kLength; index++) keys_[index].map = NULL;
4118
+ }
4119
+
4120
+
4121
+ KeyedLookupCache::Key KeyedLookupCache::keys_[KeyedLookupCache::kLength];
4122
+
4123
+
4124
+ int KeyedLookupCache::field_offsets_[KeyedLookupCache::kLength];
4125
+
4126
+
4127
+ void DescriptorLookupCache::Clear() {
4128
+ for (int index = 0; index < kLength; index++) keys_[index].array = NULL;
4129
+ }
4130
+
4131
+
4132
+ DescriptorLookupCache::Key
4133
+ DescriptorLookupCache::keys_[DescriptorLookupCache::kLength];
4134
+
4135
+ int DescriptorLookupCache::results_[DescriptorLookupCache::kLength];
4136
+
4137
+
4138
+ #ifdef DEBUG
4139
+ bool Heap::GarbageCollectionGreedyCheck() {
4140
+ ASSERT(FLAG_gc_greedy);
4141
+ if (Bootstrapper::IsActive()) return true;
4142
+ if (disallow_allocation_failure()) return true;
4143
+ return CollectGarbage(0, NEW_SPACE);
4144
+ }
4145
+ #endif
4146
+
4147
+
4148
+ TranscendentalCache::TranscendentalCache(TranscendentalCache::Type t)
4149
+ : type_(t) {
4150
+ uint32_t in0 = 0xffffffffu; // Bit-pattern for a NaN that isn't
4151
+ uint32_t in1 = 0xffffffffu; // generated by the FPU.
4152
+ for (int i = 0; i < kCacheSize; i++) {
4153
+ elements_[i].in[0] = in0;
4154
+ elements_[i].in[1] = in1;
4155
+ elements_[i].output = NULL;
4156
+ }
4157
+ }
4158
+
4159
+
4160
+ TranscendentalCache* TranscendentalCache::caches_[kNumberOfCaches];
4161
+
4162
+
4163
+ void TranscendentalCache::Clear() {
4164
+ for (int i = 0; i < kNumberOfCaches; i++) {
4165
+ if (caches_[i] != NULL) {
4166
+ delete caches_[i];
4167
+ caches_[i] = NULL;
4168
+ }
4169
+ }
4170
+ }
4171
+
4172
+
4173
+ void ExternalStringTable::CleanUp() {
4174
+ int last = 0;
4175
+ for (int i = 0; i < new_space_strings_.length(); ++i) {
4176
+ if (new_space_strings_[i] == Heap::raw_unchecked_null_value()) continue;
4177
+ if (Heap::InNewSpace(new_space_strings_[i])) {
4178
+ new_space_strings_[last++] = new_space_strings_[i];
4179
+ } else {
4180
+ old_space_strings_.Add(new_space_strings_[i]);
4181
+ }
4182
+ }
4183
+ new_space_strings_.Rewind(last);
4184
+ last = 0;
4185
+ for (int i = 0; i < old_space_strings_.length(); ++i) {
4186
+ if (old_space_strings_[i] == Heap::raw_unchecked_null_value()) continue;
4187
+ ASSERT(!Heap::InNewSpace(old_space_strings_[i]));
4188
+ old_space_strings_[last++] = old_space_strings_[i];
4189
+ }
4190
+ old_space_strings_.Rewind(last);
4191
+ Verify();
4192
+ }
4193
+
4194
+
4195
+ void ExternalStringTable::TearDown() {
4196
+ new_space_strings_.Free();
4197
+ old_space_strings_.Free();
4198
+ }
4199
+
4200
+
4201
+ List<Object*> ExternalStringTable::new_space_strings_;
4202
+ List<Object*> ExternalStringTable::old_space_strings_;
4203
+
4204
+ } } // namespace v8::internal