therubyracer 0.7.4 → 0.7.5

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of therubyracer might be problematic. Click here for more details.

Files changed (482) hide show
  1. data/History.txt +11 -0
  2. data/Rakefile +1 -1
  3. data/ext/v8/extconf.rb +0 -18
  4. data/ext/v8/rr.cpp +2 -2
  5. data/ext/v8/upstream/{2.1.10 → 2.3.3}/AUTHORS +1 -0
  6. data/ext/v8/upstream/{2.1.10 → 2.3.3}/ChangeLog +239 -0
  7. data/ext/v8/upstream/{2.1.10 → 2.3.3}/LICENSE +0 -0
  8. data/ext/v8/upstream/{2.1.10 → 2.3.3}/SConstruct +29 -17
  9. data/ext/v8/upstream/{2.1.10 → 2.3.3}/include/v8-debug.h +61 -3
  10. data/ext/v8/upstream/{2.1.10 → 2.3.3}/include/v8-profiler.h +182 -5
  11. data/ext/v8/upstream/{2.1.10 → 2.3.3}/include/v8.h +458 -257
  12. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/SConscript +2 -5
  13. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/accessors.cc +2 -2
  14. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/accessors.h +0 -0
  15. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/allocation.cc +0 -0
  16. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/allocation.h +0 -0
  17. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/api.cc +574 -30
  18. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/api.h +12 -10
  19. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/apinatives.js +0 -0
  20. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/apiutils.h +0 -0
  21. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arguments.h +0 -0
  22. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/assembler-arm-inl.h +38 -15
  23. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/assembler-arm.cc +646 -101
  24. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/assembler-arm.h +174 -15
  25. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/builtins-arm.cc +56 -47
  26. data/ext/v8/upstream/2.3.3/src/arm/codegen-arm-inl.h +48 -0
  27. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/codegen-arm.cc +2957 -1448
  28. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/codegen-arm.h +230 -74
  29. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/constants-arm.cc +25 -1
  30. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/constants-arm.h +16 -1
  31. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/cpu-arm.cc +4 -0
  32. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/debug-arm.cc +76 -6
  33. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/disasm-arm.cc +168 -20
  34. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/fast-codegen-arm.cc +5 -2
  35. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/frames-arm.cc +4 -4
  36. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/frames-arm.h +0 -0
  37. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/full-codegen-arm.cc +1558 -248
  38. data/ext/v8/upstream/2.3.3/src/arm/ic-arm.cc +2258 -0
  39. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/jump-target-arm.cc +55 -103
  40. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/macro-assembler-arm.cc +358 -185
  41. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/macro-assembler-arm.h +136 -41
  42. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/regexp-macro-assembler-arm.cc +26 -5
  43. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/regexp-macro-assembler-arm.h +0 -0
  44. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/register-allocator-arm-inl.h +0 -0
  45. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/register-allocator-arm.cc +4 -0
  46. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/register-allocator-arm.h +0 -0
  47. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/simulator-arm.cc +203 -22
  48. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/simulator-arm.h +7 -0
  49. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/stub-cache-arm.cc +531 -324
  50. data/ext/v8/upstream/2.3.3/src/arm/virtual-frame-arm-inl.h +59 -0
  51. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/virtual-frame-arm.cc +247 -81
  52. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/arm/virtual-frame-arm.h +99 -83
  53. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/array.js +2 -2
  54. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/assembler.cc +6 -13
  55. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/assembler.h +36 -10
  56. data/ext/v8/upstream/2.3.3/src/ast-inl.h +81 -0
  57. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ast.cc +14 -0
  58. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ast.h +20 -35
  59. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/bootstrapper.cc +32 -1
  60. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/bootstrapper.h +0 -4
  61. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/builtins.cc +50 -33
  62. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/builtins.h +2 -0
  63. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/bytecodes-irregexp.h +0 -0
  64. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/cached-powers.h +0 -0
  65. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/char-predicates-inl.h +0 -0
  66. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/char-predicates.h +0 -0
  67. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/checks.cc +0 -0
  68. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/checks.h +8 -6
  69. data/ext/v8/upstream/2.3.3/src/circular-queue-inl.h +53 -0
  70. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/circular-queue.cc +0 -0
  71. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/circular-queue.h +0 -26
  72. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/code-stubs.cc +2 -4
  73. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/code-stubs.h +1 -0
  74. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/code.h +0 -0
  75. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/codegen-inl.h +0 -0
  76. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/codegen.cc +44 -13
  77. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/codegen.h +310 -31
  78. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/compilation-cache.cc +28 -0
  79. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/compilation-cache.h +3 -0
  80. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/compiler.cc +45 -14
  81. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/compiler.h +0 -0
  82. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/contexts.cc +11 -11
  83. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/contexts.h +0 -0
  84. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/conversions-inl.h +0 -0
  85. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/conversions.cc +25 -11
  86. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/conversions.h +0 -0
  87. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/counters.cc +0 -0
  88. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/counters.h +0 -0
  89. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/cpu-profiler-inl.h +2 -1
  90. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/cpu-profiler.cc +68 -24
  91. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/cpu-profiler.h +19 -11
  92. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/cpu.h +0 -0
  93. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/d8-debug.cc +0 -0
  94. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/d8-debug.h +0 -0
  95. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/d8-posix.cc +0 -0
  96. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/d8-readline.cc +0 -0
  97. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/d8-windows.cc +0 -0
  98. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/d8.cc +3 -0
  99. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/d8.h +0 -0
  100. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/d8.js +55 -2
  101. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/data-flow.cc +3 -0
  102. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/data-flow.h +0 -0
  103. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/date.js +68 -137
  104. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/dateparser-inl.h +0 -0
  105. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/dateparser.cc +2 -8
  106. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/dateparser.h +0 -0
  107. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/debug-agent.cc +3 -3
  108. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/debug-agent.h +0 -0
  109. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/debug-debugger.js +81 -23
  110. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/debug.cc +275 -81
  111. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/debug.h +85 -6
  112. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/disasm.h +0 -0
  113. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/disassembler.cc +1 -1
  114. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/disassembler.h +0 -0
  115. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/diy-fp.cc +0 -0
  116. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/diy-fp.h +0 -0
  117. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/double.h +0 -0
  118. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/dtoa-config.c +0 -0
  119. data/ext/v8/upstream/2.3.3/src/dtoa.cc +77 -0
  120. data/ext/v8/upstream/2.3.3/src/dtoa.h +81 -0
  121. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/execution.cc +111 -3
  122. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/execution.h +12 -1
  123. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/factory.cc +25 -3
  124. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/factory.h +16 -9
  125. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/fast-codegen.cc +0 -0
  126. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/fast-codegen.h +0 -0
  127. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/fast-dtoa.cc +2 -9
  128. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/fast-dtoa.h +1 -2
  129. data/ext/v8/upstream/2.3.3/src/fixed-dtoa.cc +405 -0
  130. data/ext/v8/upstream/{2.1.10/src/jump-target-light.cc → 2.3.3/src/fixed-dtoa.h} +22 -53
  131. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/flag-definitions.h +14 -6
  132. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/flags.cc +5 -9
  133. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/flags.h +0 -0
  134. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/flow-graph.cc +0 -0
  135. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/flow-graph.h +0 -0
  136. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/frame-element.cc +0 -0
  137. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/frame-element.h +0 -0
  138. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/frames-inl.h +0 -0
  139. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/frames.cc +5 -2
  140. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/frames.h +1 -0
  141. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/full-codegen.cc +387 -20
  142. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/full-codegen.h +102 -5
  143. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/func-name-inferrer.cc +0 -0
  144. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/func-name-inferrer.h +0 -0
  145. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/global-handles.cc +8 -4
  146. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/global-handles.h +0 -0
  147. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/globals.h +44 -7
  148. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/handles-inl.h +0 -0
  149. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/handles.cc +19 -0
  150. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/handles.h +8 -0
  151. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/hashmap.cc +0 -0
  152. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/hashmap.h +0 -0
  153. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/heap-inl.h +56 -14
  154. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/heap-profiler.cc +85 -1
  155. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/heap-profiler.h +45 -1
  156. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/heap.cc +994 -396
  157. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/heap.h +220 -65
  158. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/assembler-ia32-inl.h +41 -12
  159. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/assembler-ia32.cc +94 -24
  160. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/assembler-ia32.h +32 -4
  161. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/builtins-ia32.cc +42 -30
  162. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/codegen-ia32-inl.h +0 -0
  163. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/codegen-ia32.cc +1758 -916
  164. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/codegen-ia32.h +67 -74
  165. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/cpu-ia32.cc +4 -0
  166. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/debug-ia32.cc +46 -0
  167. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/disasm-ia32.cc +37 -6
  168. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/fast-codegen-ia32.cc +4 -0
  169. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/fast-codegen-ia32.h +0 -0
  170. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/frames-ia32.cc +4 -0
  171. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/frames-ia32.h +0 -0
  172. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/full-codegen-ia32.cc +1465 -198
  173. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/ic-ia32.cc +688 -367
  174. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/jump-target-ia32.cc +4 -0
  175. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/macro-assembler-ia32.cc +82 -180
  176. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/macro-assembler-ia32.h +41 -25
  177. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/regexp-macro-assembler-ia32.cc +68 -24
  178. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/regexp-macro-assembler-ia32.h +1 -2
  179. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/register-allocator-ia32-inl.h +0 -0
  180. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/register-allocator-ia32.cc +4 -0
  181. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/register-allocator-ia32.h +0 -0
  182. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/simulator-ia32.cc +0 -0
  183. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/simulator-ia32.h +0 -0
  184. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/stub-cache-ia32.cc +649 -302
  185. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/virtual-frame-ia32.cc +23 -1
  186. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ia32/virtual-frame-ia32.h +18 -27
  187. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ic-inl.h +30 -3
  188. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ic.cc +384 -66
  189. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/ic.h +65 -24
  190. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/interpreter-irregexp.cc +0 -0
  191. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/interpreter-irregexp.h +0 -0
  192. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/json.js +3 -3
  193. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/jsregexp.cc +20 -4
  194. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/jsregexp.h +0 -0
  195. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/jump-target-heavy-inl.h +0 -0
  196. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/jump-target-heavy.cc +79 -13
  197. data/ext/v8/upstream/{2.1.10/src/jump-target.h → 2.3.3/src/jump-target-heavy.h} +5 -47
  198. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/jump-target-inl.h +0 -0
  199. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/jump-target-light-inl.h +16 -2
  200. data/ext/v8/upstream/2.3.3/src/jump-target-light.cc +110 -0
  201. data/ext/v8/upstream/2.3.3/src/jump-target-light.h +192 -0
  202. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/jump-target.cc +0 -64
  203. data/ext/v8/upstream/2.3.3/src/jump-target.h +90 -0
  204. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/list-inl.h +0 -0
  205. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/list.h +0 -0
  206. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/liveedit-debugger.js +141 -28
  207. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/liveedit.cc +19 -7
  208. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/liveedit.h +0 -0
  209. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/log-inl.h +0 -0
  210. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/log-utils.cc +0 -0
  211. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/log-utils.h +0 -0
  212. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/log.cc +12 -11
  213. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/log.h +12 -0
  214. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/macro-assembler.h +0 -16
  215. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/macros.py +21 -0
  216. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mark-compact.cc +120 -109
  217. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mark-compact.h +25 -37
  218. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/math.js +0 -0
  219. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/memory.h +0 -0
  220. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/messages.cc +8 -3
  221. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/messages.h +2 -1
  222. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/messages.js +15 -7
  223. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/assembler-mips-inl.h +0 -0
  224. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/assembler-mips.cc +12 -1
  225. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/assembler-mips.h +4 -1
  226. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/builtins-mips.cc +3 -0
  227. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/codegen-mips-inl.h +0 -0
  228. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/codegen-mips.cc +9 -0
  229. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/codegen-mips.h +1 -0
  230. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/constants-mips.cc +5 -0
  231. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/constants-mips.h +0 -0
  232. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/cpu-mips.cc +4 -0
  233. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/debug-mips.cc +3 -0
  234. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/disasm-mips.cc +3 -0
  235. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/fast-codegen-mips.cc +3 -0
  236. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/frames-mips.cc +3 -0
  237. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/frames-mips.h +0 -0
  238. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/full-codegen-mips.cc +5 -1
  239. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/ic-mips.cc +3 -0
  240. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/jump-target-mips.cc +3 -0
  241. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/macro-assembler-mips.cc +3 -0
  242. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/macro-assembler-mips.h +0 -0
  243. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/register-allocator-mips-inl.h +0 -0
  244. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/register-allocator-mips.cc +3 -0
  245. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/register-allocator-mips.h +0 -0
  246. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/simulator-mips.cc +3 -0
  247. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/simulator-mips.h +0 -0
  248. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/stub-cache-mips.cc +3 -0
  249. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/virtual-frame-mips.cc +3 -0
  250. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mips/virtual-frame-mips.h +0 -0
  251. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mirror-debugger.js +46 -4
  252. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/mksnapshot.cc +0 -0
  253. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/natives.h +0 -0
  254. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/objects-debug.cc +8 -1
  255. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/objects-inl.h +235 -62
  256. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/objects.cc +497 -231
  257. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/objects.h +355 -149
  258. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/oprofile-agent.cc +0 -0
  259. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/oprofile-agent.h +0 -0
  260. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/parser.cc +31 -6
  261. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/parser.h +1 -1
  262. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/platform-freebsd.cc +9 -6
  263. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/platform-linux.cc +26 -6
  264. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/platform-macos.cc +11 -6
  265. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/platform-nullos.cc +0 -0
  266. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/platform-openbsd.cc +6 -0
  267. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/platform-posix.cc +0 -0
  268. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/platform-solaris.cc +69 -23
  269. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/platform-win32.cc +15 -11
  270. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/platform.h +10 -6
  271. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/powers-ten.h +0 -0
  272. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/prettyprinter.cc +0 -0
  273. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/prettyprinter.h +0 -0
  274. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/profile-generator-inl.h +26 -2
  275. data/ext/v8/upstream/2.3.3/src/profile-generator.cc +1830 -0
  276. data/ext/v8/upstream/2.3.3/src/profile-generator.h +853 -0
  277. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/property.cc +0 -0
  278. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/property.h +0 -0
  279. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/regexp-macro-assembler-irregexp-inl.h +0 -0
  280. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/regexp-macro-assembler-irregexp.cc +0 -0
  281. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/regexp-macro-assembler-irregexp.h +0 -0
  282. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/regexp-macro-assembler-tracer.cc +0 -0
  283. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/regexp-macro-assembler-tracer.h +0 -0
  284. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/regexp-macro-assembler.cc +1 -3
  285. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/regexp-macro-assembler.h +0 -0
  286. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/regexp-stack.cc +0 -0
  287. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/regexp-stack.h +0 -0
  288. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/regexp.js +25 -4
  289. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/register-allocator-inl.h +0 -0
  290. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/register-allocator.cc +4 -3
  291. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/register-allocator.h +0 -0
  292. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/rewriter.cc +85 -8
  293. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/rewriter.h +0 -0
  294. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/runtime.cc +547 -221
  295. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/runtime.h +5 -1
  296. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/runtime.js +23 -31
  297. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/scanner.cc +12 -6
  298. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/scanner.h +60 -53
  299. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/scopeinfo.cc +156 -168
  300. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/scopeinfo.h +58 -62
  301. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/scopes.cc +0 -0
  302. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/scopes.h +0 -0
  303. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/serialize.cc +320 -242
  304. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/serialize.h +81 -48
  305. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/shell.h +0 -0
  306. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/simulator.h +0 -0
  307. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/smart-pointer.h +0 -0
  308. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/snapshot-common.cc +0 -0
  309. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/snapshot-empty.cc +0 -0
  310. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/snapshot.h +0 -0
  311. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/spaces-inl.h +177 -74
  312. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/spaces.cc +138 -315
  313. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/spaces.h +155 -124
  314. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/splay-tree-inl.h +0 -0
  315. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/splay-tree.h +0 -0
  316. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/string-stream.cc +0 -0
  317. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/string-stream.h +0 -0
  318. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/string.js +113 -119
  319. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/stub-cache.cc +242 -97
  320. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/stub-cache.h +118 -55
  321. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/third_party/dtoa/COPYING +0 -0
  322. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/third_party/dtoa/dtoa.c +4 -0
  323. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/third_party/valgrind/valgrind.h +0 -0
  324. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/token.cc +0 -0
  325. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/token.h +0 -0
  326. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/top.cc +107 -26
  327. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/top.h +9 -4
  328. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/type-info.cc +0 -0
  329. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/type-info.h +2 -2
  330. data/ext/v8/upstream/2.3.3/src/unbound-queue-inl.h +95 -0
  331. data/ext/v8/upstream/2.3.3/src/unbound-queue.h +67 -0
  332. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/unicode-inl.h +0 -0
  333. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/unicode.cc +0 -0
  334. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/unicode.h +0 -0
  335. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/uri.js +0 -0
  336. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/utils.cc +0 -0
  337. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/utils.h +83 -1
  338. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/v8-counters.cc +0 -0
  339. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/v8-counters.h +20 -0
  340. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/v8.cc +5 -1
  341. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/v8.h +0 -0
  342. data/ext/v8/upstream/2.3.3/src/v8dll-main.cc +39 -0
  343. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/v8natives.js +210 -33
  344. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/v8threads.cc +1 -1
  345. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/v8threads.h +1 -1
  346. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/variables.cc +0 -0
  347. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/variables.h +0 -0
  348. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/version.cc +3 -3
  349. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/version.h +0 -0
  350. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/virtual-frame-heavy-inl.h +40 -0
  351. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/virtual-frame-heavy.cc +0 -0
  352. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/virtual-frame-inl.h +0 -0
  353. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/virtual-frame-light-inl.h +106 -5
  354. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/virtual-frame-light.cc +4 -1
  355. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/virtual-frame.cc +0 -0
  356. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/virtual-frame.h +0 -0
  357. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/vm-state-inl.h +6 -3
  358. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/vm-state.cc +1 -1
  359. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/vm-state.h +6 -4
  360. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/assembler-x64-inl.h +42 -5
  361. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/assembler-x64.cc +285 -53
  362. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/assembler-x64.h +54 -18
  363. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/builtins-x64.cc +31 -33
  364. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/codegen-x64-inl.h +0 -0
  365. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/codegen-x64.cc +9787 -8722
  366. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/codegen-x64.h +82 -47
  367. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/cpu-x64.cc +4 -0
  368. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/debug-x64.cc +55 -6
  369. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/disasm-x64.cc +42 -19
  370. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/fast-codegen-x64.cc +4 -0
  371. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/frames-x64.cc +4 -0
  372. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/frames-x64.h +4 -0
  373. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/full-codegen-x64.cc +1487 -210
  374. data/ext/v8/upstream/2.3.3/src/x64/ic-x64.cc +1907 -0
  375. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/jump-target-x64.cc +4 -0
  376. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/macro-assembler-x64.cc +366 -338
  377. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/macro-assembler-x64.h +83 -38
  378. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/regexp-macro-assembler-x64.cc +82 -23
  379. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/regexp-macro-assembler-x64.h +1 -2
  380. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/register-allocator-x64-inl.h +6 -5
  381. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/register-allocator-x64.cc +4 -0
  382. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/register-allocator-x64.h +1 -1
  383. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/simulator-x64.cc +0 -0
  384. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/simulator-x64.h +0 -0
  385. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/stub-cache-x64.cc +556 -377
  386. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/virtual-frame-x64.cc +197 -98
  387. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/x64/virtual-frame-x64.h +37 -28
  388. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/zone-inl.h +0 -0
  389. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/zone.cc +0 -0
  390. data/ext/v8/upstream/{2.1.10 → 2.3.3}/src/zone.h +0 -0
  391. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/codemap.js +0 -0
  392. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/consarray.js +0 -0
  393. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/csvparser.js +0 -0
  394. data/ext/v8/upstream/2.3.3/tools/gc-nvp-trace-processor.py +317 -0
  395. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/generate-ten-powers.scm +0 -0
  396. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/gyp/v8.gyp +87 -20
  397. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/js2c.py +19 -15
  398. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/jsmin.py +0 -0
  399. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/linux-tick-processor +0 -0
  400. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/linux-tick-processor.py +0 -0
  401. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/logreader.js +0 -0
  402. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/mac-nm +0 -0
  403. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/mac-tick-processor +0 -0
  404. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/oprofile/annotate +0 -0
  405. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/oprofile/common +0 -0
  406. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/oprofile/dump +0 -0
  407. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/oprofile/report +0 -0
  408. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/oprofile/reset +0 -0
  409. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/oprofile/run +0 -0
  410. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/oprofile/shutdown +0 -0
  411. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/oprofile/start +0 -0
  412. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/presubmit.py +0 -0
  413. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/process-heap-prof.py +0 -0
  414. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/profile.js +0 -0
  415. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/profile_view.js +0 -0
  416. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/run-valgrind.py +0 -0
  417. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/splaytree.js +0 -0
  418. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/splaytree.py +0 -0
  419. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/stats-viewer.py +25 -13
  420. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/test.py +0 -0
  421. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/tickprocessor-driver.js +0 -0
  422. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/tickprocessor.js +0 -0
  423. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/tickprocessor.py +0 -0
  424. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/utils.py +0 -0
  425. data/ext/v8/upstream/2.3.3/tools/v8.xcodeproj/project.pbxproj +1855 -0
  426. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/README.txt +0 -0
  427. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/arm.vsprops +0 -0
  428. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/common.vsprops +0 -0
  429. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/d8.vcproj +0 -0
  430. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/d8_arm.vcproj +0 -0
  431. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/d8_x64.vcproj +0 -0
  432. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/d8js2c.cmd +0 -0
  433. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/debug.vsprops +0 -0
  434. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/ia32.vsprops +0 -0
  435. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/js2c.cmd +0 -0
  436. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/release.vsprops +0 -0
  437. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8.sln +0 -0
  438. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8.vcproj +0 -0
  439. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_arm.sln +0 -0
  440. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_arm.vcproj +0 -0
  441. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_base.vcproj +40 -0
  442. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_base_arm.vcproj +20 -0
  443. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_base_x64.vcproj +16 -0
  444. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_cctest.vcproj +4 -0
  445. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_cctest_arm.vcproj +0 -0
  446. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_cctest_x64.vcproj +0 -0
  447. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_mksnapshot.vcproj +0 -0
  448. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_mksnapshot_x64.vcproj +0 -0
  449. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_process_sample.vcproj +0 -0
  450. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_process_sample_arm.vcproj +0 -0
  451. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_process_sample_x64.vcproj +0 -0
  452. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_shell_sample.vcproj +0 -0
  453. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_shell_sample_arm.vcproj +0 -0
  454. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_shell_sample_x64.vcproj +0 -0
  455. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_snapshot.vcproj +0 -0
  456. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_snapshot_cc.vcproj +0 -0
  457. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_snapshot_cc_x64.vcproj +0 -0
  458. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_snapshot_x64.vcproj +0 -0
  459. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_x64.sln +0 -0
  460. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/v8_x64.vcproj +0 -0
  461. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/visual_studio/x64.vsprops +0 -0
  462. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/windows-tick-processor.bat +0 -0
  463. data/ext/v8/upstream/{2.1.10 → 2.3.3}/tools/windows-tick-processor.py +0 -0
  464. data/ext/v8/upstream/Makefile +1 -1
  465. data/ext/v8/v8_template.cpp +94 -2
  466. data/ext/v8/v8_try_catch.cpp +2 -2
  467. data/lib/v8.rb +1 -1
  468. data/lib/v8/access.rb +93 -40
  469. data/lib/v8/cli.rb +1 -1
  470. data/lib/v8/function.rb +14 -2
  471. data/spec/redjs/jsapi_spec.rb +231 -42
  472. data/therubyracer.gemspec +3 -3
  473. metadata +463 -453
  474. data/ext/v8/upstream/2.1.10/src/arm/assembler-thumb2-inl.h +0 -263
  475. data/ext/v8/upstream/2.1.10/src/arm/assembler-thumb2.cc +0 -1878
  476. data/ext/v8/upstream/2.1.10/src/arm/assembler-thumb2.h +0 -1036
  477. data/ext/v8/upstream/2.1.10/src/arm/codegen-arm-inl.h +0 -72
  478. data/ext/v8/upstream/2.1.10/src/arm/ic-arm.cc +0 -1833
  479. data/ext/v8/upstream/2.1.10/src/circular-queue-inl.h +0 -101
  480. data/ext/v8/upstream/2.1.10/src/profile-generator.cc +0 -583
  481. data/ext/v8/upstream/2.1.10/src/profile-generator.h +0 -364
  482. data/ext/v8/upstream/2.1.10/src/x64/ic-x64.cc +0 -1621
@@ -30,8 +30,8 @@
30
30
  #include "heap-profiler.h"
31
31
  #include "frames-inl.h"
32
32
  #include "global-handles.h"
33
+ #include "profile-generator.h"
33
34
  #include "string-stream.h"
34
- #include "zone-inl.h"
35
35
 
36
36
  namespace v8 {
37
37
  namespace internal {
@@ -314,6 +314,90 @@ void RetainerTreeAggregator::Call(const JSObjectsCluster& cluster,
314
314
  } // namespace
315
315
 
316
316
 
317
+ HeapProfiler* HeapProfiler::singleton_ = NULL;
318
+
319
+ HeapProfiler::HeapProfiler()
320
+ : snapshots_(new HeapSnapshotsCollection()),
321
+ next_snapshot_uid_(1) {
322
+ }
323
+
324
+
325
+ HeapProfiler::~HeapProfiler() {
326
+ delete snapshots_;
327
+ }
328
+
329
+ #endif // ENABLE_LOGGING_AND_PROFILING
330
+
331
+ void HeapProfiler::Setup() {
332
+ #ifdef ENABLE_LOGGING_AND_PROFILING
333
+ if (singleton_ == NULL) {
334
+ singleton_ = new HeapProfiler();
335
+ }
336
+ #endif
337
+ }
338
+
339
+
340
+ void HeapProfiler::TearDown() {
341
+ #ifdef ENABLE_LOGGING_AND_PROFILING
342
+ delete singleton_;
343
+ singleton_ = NULL;
344
+ #endif
345
+ }
346
+
347
+
348
+ #ifdef ENABLE_LOGGING_AND_PROFILING
349
+
350
+ HeapSnapshot* HeapProfiler::TakeSnapshot(const char* name) {
351
+ ASSERT(singleton_ != NULL);
352
+ return singleton_->TakeSnapshotImpl(name);
353
+ }
354
+
355
+
356
+ HeapSnapshot* HeapProfiler::TakeSnapshot(String* name) {
357
+ ASSERT(singleton_ != NULL);
358
+ return singleton_->TakeSnapshotImpl(name);
359
+ }
360
+
361
+
362
+ HeapSnapshot* HeapProfiler::TakeSnapshotImpl(const char* name) {
363
+ Heap::CollectAllGarbage(false);
364
+ HeapSnapshot* result = snapshots_->NewSnapshot(name, next_snapshot_uid_++);
365
+ HeapSnapshotGenerator generator(result);
366
+ generator.GenerateSnapshot();
367
+ snapshots_->SnapshotGenerationFinished();
368
+ return result;
369
+ }
370
+
371
+
372
+ HeapSnapshot* HeapProfiler::TakeSnapshotImpl(String* name) {
373
+ return TakeSnapshotImpl(snapshots_->GetName(name));
374
+ }
375
+
376
+
377
+ int HeapProfiler::GetSnapshotsCount() {
378
+ ASSERT(singleton_ != NULL);
379
+ return singleton_->snapshots_->snapshots()->length();
380
+ }
381
+
382
+
383
+ HeapSnapshot* HeapProfiler::GetSnapshot(int index) {
384
+ ASSERT(singleton_ != NULL);
385
+ return singleton_->snapshots_->snapshots()->at(index);
386
+ }
387
+
388
+
389
+ HeapSnapshot* HeapProfiler::FindSnapshot(unsigned uid) {
390
+ ASSERT(singleton_ != NULL);
391
+ return singleton_->snapshots_->GetSnapshot(uid);
392
+ }
393
+
394
+
395
+ void HeapProfiler::ObjectMoveEvent(Address from, Address to) {
396
+ ASSERT(singleton_ != NULL);
397
+ singleton_->snapshots_->ObjectMoveEvent(from, to);
398
+ }
399
+
400
+
317
401
  const JSObjectsClusterTreeConfig::Key JSObjectsClusterTreeConfig::kNoKey;
318
402
  const JSObjectsClusterTreeConfig::Value JSObjectsClusterTreeConfig::kNoValue;
319
403
 
@@ -28,26 +28,70 @@
28
28
  #ifndef V8_HEAP_PROFILER_H_
29
29
  #define V8_HEAP_PROFILER_H_
30
30
 
31
- #include "zone.h"
31
+ #include "zone-inl.h"
32
32
 
33
33
  namespace v8 {
34
34
  namespace internal {
35
35
 
36
36
  #ifdef ENABLE_LOGGING_AND_PROFILING
37
37
 
38
+ class HeapSnapshot;
39
+ class HeapSnapshotsCollection;
40
+
41
+ #define HEAP_PROFILE(Call) \
42
+ do { \
43
+ if (v8::internal::HeapProfiler::is_profiling()) { \
44
+ v8::internal::HeapProfiler::Call; \
45
+ } \
46
+ } while (false)
47
+ #else
48
+ #define HEAP_PROFILE(Call) ((void) 0)
49
+ #endif // ENABLE_LOGGING_AND_PROFILING
50
+
38
51
  // The HeapProfiler writes data to the log files, which can be postprocessed
39
52
  // to generate .hp files for use by the GHC/Valgrind tool hp2ps.
40
53
  class HeapProfiler {
41
54
  public:
55
+ static void Setup();
56
+ static void TearDown();
57
+
58
+ #ifdef ENABLE_LOGGING_AND_PROFILING
59
+ static HeapSnapshot* TakeSnapshot(const char* name);
60
+ static HeapSnapshot* TakeSnapshot(String* name);
61
+ static int GetSnapshotsCount();
62
+ static HeapSnapshot* GetSnapshot(int index);
63
+ static HeapSnapshot* FindSnapshot(unsigned uid);
64
+
65
+ static void ObjectMoveEvent(Address from, Address to);
66
+
67
+ static INLINE(bool is_profiling()) {
68
+ return singleton_ != NULL && singleton_->snapshots_->is_tracking_objects();
69
+ }
70
+
71
+ // Obsolete interface.
42
72
  // Write a single heap sample to the log file.
43
73
  static void WriteSample();
44
74
 
45
75
  private:
76
+ HeapProfiler();
77
+ ~HeapProfiler();
78
+ HeapSnapshot* TakeSnapshotImpl(const char* name);
79
+ HeapSnapshot* TakeSnapshotImpl(String* name);
80
+
81
+ // Obsolete interface.
46
82
  // Update the array info with stats from obj.
47
83
  static void CollectStats(HeapObject* obj, HistogramInfo* info);
84
+
85
+ HeapSnapshotsCollection* snapshots_;
86
+ unsigned next_snapshot_uid_;
87
+
88
+ static HeapProfiler* singleton_;
89
+ #endif // ENABLE_LOGGING_AND_PROFILING
48
90
  };
49
91
 
50
92
 
93
+ #ifdef ENABLE_LOGGING_AND_PROFILING
94
+
51
95
  // JSObjectsCluster describes a group of JS objects that are
52
96
  // considered equivalent in terms of a particular profile.
53
97
  class JSObjectsCluster BASE_EMBEDDED {
@@ -115,14 +115,23 @@ int Heap::external_allocation_limit_ = 0;
115
115
  Heap::HeapState Heap::gc_state_ = NOT_IN_GC;
116
116
 
117
117
  int Heap::mc_count_ = 0;
118
+ int Heap::ms_count_ = 0;
118
119
  int Heap::gc_count_ = 0;
119
120
 
121
+ GCTracer* Heap::tracer_ = NULL;
122
+
120
123
  int Heap::unflattened_strings_length_ = 0;
121
124
 
122
125
  int Heap::always_allocate_scope_depth_ = 0;
123
126
  int Heap::linear_allocation_scope_depth_ = 0;
124
127
  int Heap::contexts_disposed_ = 0;
125
128
 
129
+ int Heap::young_survivors_after_last_gc_ = 0;
130
+ int Heap::high_survival_rate_period_length_ = 0;
131
+ double Heap::survival_rate_ = 0;
132
+ Heap::SurvivalRateTrend Heap::previous_survival_rate_trend_ = Heap::STABLE;
133
+ Heap::SurvivalRateTrend Heap::survival_rate_trend_ = Heap::STABLE;
134
+
126
135
  #ifdef DEBUG
127
136
  bool Heap::allocation_allowed_ = true;
128
137
 
@@ -130,6 +139,11 @@ int Heap::allocation_timeout_ = 0;
130
139
  bool Heap::disallow_allocation_failure_ = false;
131
140
  #endif // DEBUG
132
141
 
142
+ int GCTracer::alive_after_last_gc_ = 0;
143
+ double GCTracer::last_gc_end_timestamp_ = 0.0;
144
+ int GCTracer::max_gc_pause_ = 0;
145
+ int GCTracer::max_alive_after_gc_ = 0;
146
+ int GCTracer::min_in_mutator_ = kMaxInt;
133
147
 
134
148
  int Heap::Capacity() {
135
149
  if (!HasBeenSetup()) return 0;
@@ -318,13 +332,6 @@ void Heap::GarbageCollectionPrologue() {
318
332
  }
319
333
 
320
334
  if (FLAG_gc_verbose) Print();
321
-
322
- if (FLAG_print_rset) {
323
- // Not all spaces have remembered set bits that we care about.
324
- old_pointer_space_->PrintRSet();
325
- map_space_->PrintRSet();
326
- lo_space_->PrintRSet();
327
- }
328
335
  #endif
329
336
 
330
337
  #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
@@ -511,9 +518,8 @@ void Heap::ReserveSpace(
511
518
  Heap::CollectGarbage(cell_space_size, CELL_SPACE);
512
519
  gc_performed = true;
513
520
  }
514
- // We add a slack-factor of 2 in order to have space for the remembered
515
- // set and a series of large-object allocations that are only just larger
516
- // than the page size.
521
+ // We add a slack-factor of 2 in order to have space for a series of
522
+ // large-object allocations that are only just larger than the page size.
517
523
  large_object_size *= 2;
518
524
  // The ReserveSpace method on the large object space checks how much
519
525
  // we can expand the old generation. This includes expansion caused by
@@ -560,17 +566,59 @@ class ClearThreadJSFunctionResultCachesVisitor: public ThreadVisitor {
560
566
  void Heap::ClearJSFunctionResultCaches() {
561
567
  if (Bootstrapper::IsActive()) return;
562
568
  ClearThreadJSFunctionResultCachesVisitor visitor;
563
- ThreadManager::IterateThreads(&visitor);
569
+ ThreadManager::IterateArchivedThreads(&visitor);
564
570
  }
565
571
 
566
572
 
573
+ #ifdef DEBUG
574
+
575
+ enum PageWatermarkValidity {
576
+ ALL_VALID,
577
+ ALL_INVALID
578
+ };
579
+
580
+ static void VerifyPageWatermarkValidity(PagedSpace* space,
581
+ PageWatermarkValidity validity) {
582
+ PageIterator it(space, PageIterator::PAGES_IN_USE);
583
+ bool expected_value = (validity == ALL_VALID);
584
+ while (it.has_next()) {
585
+ Page* page = it.next();
586
+ ASSERT(page->IsWatermarkValid() == expected_value);
587
+ }
588
+ }
589
+ #endif
590
+
591
+ void Heap::UpdateSurvivalRateTrend(int start_new_space_size) {
592
+ double survival_rate =
593
+ (static_cast<double>(young_survivors_after_last_gc_) * 100) /
594
+ start_new_space_size;
595
+
596
+ if (survival_rate > kYoungSurvivalRateThreshold) {
597
+ high_survival_rate_period_length_++;
598
+ } else {
599
+ high_survival_rate_period_length_ = 0;
600
+ }
601
+
602
+ double survival_rate_diff = survival_rate_ - survival_rate;
603
+
604
+ if (survival_rate_diff > kYoungSurvivalRateAllowedDeviation) {
605
+ set_survival_rate_trend(DECREASING);
606
+ } else if (survival_rate_diff < -kYoungSurvivalRateAllowedDeviation) {
607
+ set_survival_rate_trend(INCREASING);
608
+ } else {
609
+ set_survival_rate_trend(STABLE);
610
+ }
611
+
612
+ survival_rate_ = survival_rate;
613
+ }
614
+
567
615
  void Heap::PerformGarbageCollection(AllocationSpace space,
568
616
  GarbageCollector collector,
569
617
  GCTracer* tracer) {
570
618
  VerifySymbolTable();
571
619
  if (collector == MARK_COMPACTOR && global_gc_prologue_callback_) {
572
620
  ASSERT(!allocation_allowed_);
573
- GCTracer::ExternalScope scope(tracer);
621
+ GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
574
622
  global_gc_prologue_callback_();
575
623
  }
576
624
 
@@ -585,25 +633,55 @@ void Heap::PerformGarbageCollection(AllocationSpace space,
585
633
 
586
634
  EnsureFromSpaceIsCommitted();
587
635
 
636
+ int start_new_space_size = Heap::new_space()->Size();
637
+
588
638
  if (collector == MARK_COMPACTOR) {
639
+ if (FLAG_flush_code) {
640
+ // Flush all potentially unused code.
641
+ GCTracer::Scope gc_scope(tracer, GCTracer::Scope::MC_FLUSH_CODE);
642
+ FlushCode();
643
+ }
644
+
589
645
  // Perform mark-sweep with optional compaction.
590
646
  MarkCompact(tracer);
591
647
 
648
+ bool high_survival_rate_during_scavenges = IsHighSurvivalRate() &&
649
+ IsStableOrIncreasingSurvivalTrend();
650
+
651
+ UpdateSurvivalRateTrend(start_new_space_size);
652
+
592
653
  int old_gen_size = PromotedSpaceSize();
593
654
  old_gen_promotion_limit_ =
594
655
  old_gen_size + Max(kMinimumPromotionLimit, old_gen_size / 3);
595
656
  old_gen_allocation_limit_ =
596
657
  old_gen_size + Max(kMinimumAllocationLimit, old_gen_size / 2);
658
+
659
+ if (high_survival_rate_during_scavenges &&
660
+ IsStableOrIncreasingSurvivalTrend()) {
661
+ // Stable high survival rates of young objects both during partial and
662
+ // full collection indicate that mutator is either building or modifying
663
+ // a structure with a long lifetime.
664
+ // In this case we aggressively raise old generation memory limits to
665
+ // postpone subsequent mark-sweep collection and thus trade memory
666
+ // space for the mutation speed.
667
+ old_gen_promotion_limit_ *= 2;
668
+ old_gen_allocation_limit_ *= 2;
669
+ }
670
+
597
671
  old_gen_exhausted_ = false;
598
672
  } else {
673
+ tracer_ = tracer;
599
674
  Scavenge();
675
+ tracer_ = NULL;
676
+
677
+ UpdateSurvivalRateTrend(start_new_space_size);
600
678
  }
601
679
 
602
680
  Counters::objs_since_last_young.Set(0);
603
681
 
604
682
  if (collector == MARK_COMPACTOR) {
605
683
  DisableAssertNoAllocation allow_allocation;
606
- GCTracer::ExternalScope scope(tracer);
684
+ GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
607
685
  GlobalHandles::PostGarbageCollectionProcessing();
608
686
  }
609
687
 
@@ -627,7 +705,7 @@ void Heap::PerformGarbageCollection(AllocationSpace space,
627
705
 
628
706
  if (collector == MARK_COMPACTOR && global_gc_epilogue_callback_) {
629
707
  ASSERT(!allocation_allowed_);
630
- GCTracer::ExternalScope scope(tracer);
708
+ GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
631
709
  global_gc_epilogue_callback_();
632
710
  }
633
711
  VerifySymbolTable();
@@ -636,14 +714,19 @@ void Heap::PerformGarbageCollection(AllocationSpace space,
636
714
 
637
715
  void Heap::MarkCompact(GCTracer* tracer) {
638
716
  gc_state_ = MARK_COMPACT;
639
- mc_count_++;
640
- tracer->set_full_gc_count(mc_count_);
641
717
  LOG(ResourceEvent("markcompact", "begin"));
642
718
 
643
719
  MarkCompactCollector::Prepare(tracer);
644
720
 
645
721
  bool is_compacting = MarkCompactCollector::IsCompacting();
646
722
 
723
+ if (is_compacting) {
724
+ mc_count_++;
725
+ } else {
726
+ ms_count_++;
727
+ }
728
+ tracer->set_full_gc_count(mc_count_ + ms_count_);
729
+
647
730
  MarkCompactPrologue(is_compacting);
648
731
 
649
732
  MarkCompactCollector::CollectGarbage();
@@ -674,6 +757,8 @@ void Heap::MarkCompactPrologue(bool is_compacting) {
674
757
  Top::MarkCompactPrologue(is_compacting);
675
758
  ThreadManager::MarkCompactPrologue(is_compacting);
676
759
 
760
+ CompletelyClearInstanceofCache();
761
+
677
762
  if (is_compacting) FlushNumberStringCache();
678
763
  }
679
764
 
@@ -715,34 +800,34 @@ class ScavengeVisitor: public ObjectVisitor {
715
800
  };
716
801
 
717
802
 
718
- // A queue of pointers and maps of to-be-promoted objects during a
719
- // scavenge collection.
803
+ // A queue of objects promoted during scavenge. Each object is accompanied
804
+ // by it's size to avoid dereferencing a map pointer for scanning.
720
805
  class PromotionQueue {
721
806
  public:
722
807
  void Initialize(Address start_address) {
723
- front_ = rear_ = reinterpret_cast<HeapObject**>(start_address);
808
+ front_ = rear_ = reinterpret_cast<intptr_t*>(start_address);
724
809
  }
725
810
 
726
811
  bool is_empty() { return front_ <= rear_; }
727
812
 
728
- void insert(HeapObject* object, Map* map) {
729
- *(--rear_) = object;
730
- *(--rear_) = map;
813
+ void insert(HeapObject* target, int size) {
814
+ *(--rear_) = reinterpret_cast<intptr_t>(target);
815
+ *(--rear_) = size;
731
816
  // Assert no overflow into live objects.
732
817
  ASSERT(reinterpret_cast<Address>(rear_) >= Heap::new_space()->top());
733
818
  }
734
819
 
735
- void remove(HeapObject** object, Map** map) {
736
- *object = *(--front_);
737
- *map = Map::cast(*(--front_));
820
+ void remove(HeapObject** target, int* size) {
821
+ *target = reinterpret_cast<HeapObject*>(*(--front_));
822
+ *size = static_cast<int>(*(--front_));
738
823
  // Assert no underflow.
739
824
  ASSERT(front_ >= rear_);
740
825
  }
741
826
 
742
827
  private:
743
828
  // The front of the queue is higher in memory than the rear.
744
- HeapObject** front_;
745
- HeapObject** rear_;
829
+ intptr_t* front_;
830
+ intptr_t* rear_;
746
831
  };
747
832
 
748
833
 
@@ -800,6 +885,20 @@ void Heap::Scavenge() {
800
885
 
801
886
  gc_state_ = SCAVENGE;
802
887
 
888
+ Page::FlipMeaningOfInvalidatedWatermarkFlag();
889
+ #ifdef DEBUG
890
+ VerifyPageWatermarkValidity(old_pointer_space_, ALL_VALID);
891
+ VerifyPageWatermarkValidity(map_space_, ALL_VALID);
892
+ #endif
893
+
894
+ // We do not update an allocation watermark of the top page during linear
895
+ // allocation to avoid overhead. So to maintain the watermark invariant
896
+ // we have to manually cache the watermark and mark the top page as having an
897
+ // invalid watermark. This guarantees that dirty regions iteration will use a
898
+ // correct watermark even if a linear allocation happens.
899
+ old_pointer_space_->FlushTopPageWatermark();
900
+ map_space_->FlushTopPageWatermark();
901
+
803
902
  // Implements Cheney's copying algorithm
804
903
  LOG(ResourceEvent("scavenge", "begin"));
805
904
 
@@ -842,9 +941,17 @@ void Heap::Scavenge() {
842
941
 
843
942
  // Copy objects reachable from the old generation. By definition,
844
943
  // there are no intergenerational pointers in code or data spaces.
845
- IterateRSet(old_pointer_space_, &ScavengePointer);
846
- IterateRSet(map_space_, &ScavengePointer);
847
- lo_space_->IterateRSet(&ScavengePointer);
944
+ IterateDirtyRegions(old_pointer_space_,
945
+ &IteratePointersInDirtyRegion,
946
+ &ScavengePointer,
947
+ WATERMARK_CAN_BE_INVALID);
948
+
949
+ IterateDirtyRegions(map_space_,
950
+ &IteratePointersInDirtyMapsRegion,
951
+ &ScavengePointer,
952
+ WATERMARK_CAN_BE_INVALID);
953
+
954
+ lo_space_->IterateDirtyRegions(&ScavengePointer);
848
955
 
849
956
  // Copy objects reachable from cells by scavenging cell values directly.
850
957
  HeapObjectIterator cell_iterator(cell_space_);
@@ -935,30 +1042,26 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
935
1042
  // queue is empty.
936
1043
  while (new_space_front < new_space_.top()) {
937
1044
  HeapObject* object = HeapObject::FromAddress(new_space_front);
938
- object->Iterate(scavenge_visitor);
939
- new_space_front += object->Size();
1045
+ Map* map = object->map();
1046
+ int size = object->SizeFromMap(map);
1047
+ object->IterateBody(map->instance_type(), size, scavenge_visitor);
1048
+ new_space_front += size;
940
1049
  }
941
1050
 
942
1051
  // Promote and process all the to-be-promoted objects.
943
1052
  while (!promotion_queue.is_empty()) {
944
- HeapObject* source;
945
- Map* map;
946
- promotion_queue.remove(&source, &map);
947
- // Copy the from-space object to its new location (given by the
948
- // forwarding address) and fix its map.
949
- HeapObject* target = source->map_word().ToForwardingAddress();
950
- CopyBlock(reinterpret_cast<Object**>(target->address()),
951
- reinterpret_cast<Object**>(source->address()),
952
- source->SizeFromMap(map));
953
- target->set_map(map);
954
-
955
- #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
956
- // Update NewSpace stats if necessary.
957
- RecordCopiedObject(target);
958
- #endif
959
- // Visit the newly copied object for pointers to new space.
960
- target->Iterate(scavenge_visitor);
961
- UpdateRSet(target);
1053
+ HeapObject* target;
1054
+ int size;
1055
+ promotion_queue.remove(&target, &size);
1056
+
1057
+ // Promoted object might be already partially visited
1058
+ // during dirty regions iteration. Thus we search specificly
1059
+ // for pointers to from semispace instead of looking for pointers
1060
+ // to new space.
1061
+ ASSERT(!target->IsMap());
1062
+ IterateAndMarkPointersToFromSpace(target->address(),
1063
+ target->address() + size,
1064
+ &ScavengePointer);
962
1065
  }
963
1066
 
964
1067
  // Take another spin if there are now unswept objects in new space
@@ -969,258 +1072,323 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
969
1072
  }
970
1073
 
971
1074
 
972
- void Heap::ClearRSetRange(Address start, int size_in_bytes) {
973
- uint32_t start_bit;
974
- Address start_word_address =
975
- Page::ComputeRSetBitPosition(start, 0, &start_bit);
976
- uint32_t end_bit;
977
- Address end_word_address =
978
- Page::ComputeRSetBitPosition(start + size_in_bytes - kIntSize,
979
- 0,
980
- &end_bit);
981
-
982
- // We want to clear the bits in the starting word starting with the
983
- // first bit, and in the ending word up to and including the last
984
- // bit. Build a pair of bitmasks to do that.
985
- uint32_t start_bitmask = start_bit - 1;
986
- uint32_t end_bitmask = ~((end_bit << 1) - 1);
987
-
988
- // If the start address and end address are the same, we mask that
989
- // word once, otherwise mask the starting and ending word
990
- // separately and all the ones in between.
991
- if (start_word_address == end_word_address) {
992
- Memory::uint32_at(start_word_address) &= (start_bitmask | end_bitmask);
993
- } else {
994
- Memory::uint32_at(start_word_address) &= start_bitmask;
995
- Memory::uint32_at(end_word_address) &= end_bitmask;
996
- start_word_address += kIntSize;
997
- memset(start_word_address, 0, end_word_address - start_word_address);
1075
+ #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
1076
+ static void RecordCopiedObject(HeapObject* obj) {
1077
+ bool should_record = false;
1078
+ #ifdef DEBUG
1079
+ should_record = FLAG_heap_stats;
1080
+ #endif
1081
+ #ifdef ENABLE_LOGGING_AND_PROFILING
1082
+ should_record = should_record || FLAG_log_gc;
1083
+ #endif
1084
+ if (should_record) {
1085
+ if (Heap::new_space()->Contains(obj)) {
1086
+ Heap::new_space()->RecordAllocation(obj);
1087
+ } else {
1088
+ Heap::new_space()->RecordPromotion(obj);
1089
+ }
998
1090
  }
999
1091
  }
1092
+ #endif // defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
1000
1093
 
1001
1094
 
1002
- class UpdateRSetVisitor: public ObjectVisitor {
1003
- public:
1095
+ // Helper function used by CopyObject to copy a source object to an
1096
+ // allocated target object and update the forwarding pointer in the source
1097
+ // object. Returns the target object.
1098
+ inline static HeapObject* MigrateObject(HeapObject* source,
1099
+ HeapObject* target,
1100
+ int size) {
1101
+ // Copy the content of source to target.
1102
+ Heap::CopyBlock(target->address(), source->address(), size);
1004
1103
 
1005
- void VisitPointer(Object** p) {
1006
- UpdateRSet(p);
1007
- }
1104
+ // Set the forwarding address.
1105
+ source->set_map_word(MapWord::FromForwardingAddress(target));
1106
+
1107
+ #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
1108
+ // Update NewSpace stats if necessary.
1109
+ RecordCopiedObject(target);
1110
+ #endif
1111
+ HEAP_PROFILE(ObjectMoveEvent(source->address(), target->address()));
1112
+
1113
+ return target;
1114
+ }
1115
+
1116
+
1117
+ enum ObjectContents { DATA_OBJECT, POINTER_OBJECT };
1118
+ enum SizeRestriction { SMALL, UNKNOWN_SIZE };
1008
1119
 
1009
- void VisitPointers(Object** start, Object** end) {
1010
- // Update a store into slots [start, end), used (a) to update remembered
1011
- // set when promoting a young object to old space or (b) to rebuild
1012
- // remembered sets after a mark-compact collection.
1013
- for (Object** p = start; p < end; p++) UpdateRSet(p);
1014
- }
1015
- private:
1016
1120
 
1017
- void UpdateRSet(Object** p) {
1018
- // The remembered set should not be set. It should be clear for objects
1019
- // newly copied to old space, and it is cleared before rebuilding in the
1020
- // mark-compact collector.
1021
- ASSERT(!Page::IsRSetSet(reinterpret_cast<Address>(p), 0));
1022
- if (Heap::InNewSpace(*p)) {
1023
- Page::SetRSet(reinterpret_cast<Address>(p), 0);
1121
+ template<ObjectContents object_contents, SizeRestriction size_restriction>
1122
+ static inline void EvacuateObject(Map* map,
1123
+ HeapObject** slot,
1124
+ HeapObject* object,
1125
+ int object_size) {
1126
+ ASSERT((size_restriction != SMALL) ||
1127
+ (object_size <= Page::kMaxHeapObjectSize));
1128
+ ASSERT(object->Size() == object_size);
1129
+
1130
+ if (Heap::ShouldBePromoted(object->address(), object_size)) {
1131
+ Object* result;
1132
+
1133
+ if ((size_restriction != SMALL) &&
1134
+ (object_size > Page::kMaxHeapObjectSize)) {
1135
+ result = Heap::lo_space()->AllocateRawFixedArray(object_size);
1136
+ } else {
1137
+ if (object_contents == DATA_OBJECT) {
1138
+ result = Heap::old_data_space()->AllocateRaw(object_size);
1139
+ } else {
1140
+ result = Heap::old_pointer_space()->AllocateRaw(object_size);
1141
+ }
1024
1142
  }
1025
- }
1026
- };
1027
1143
 
1144
+ if (!result->IsFailure()) {
1145
+ HeapObject* target = HeapObject::cast(result);
1146
+ *slot = MigrateObject(object, target, object_size);
1028
1147
 
1029
- int Heap::UpdateRSet(HeapObject* obj) {
1030
- ASSERT(!InNewSpace(obj));
1031
- // Special handling of fixed arrays to iterate the body based on the start
1032
- // address and offset. Just iterating the pointers as in UpdateRSetVisitor
1033
- // will not work because Page::SetRSet needs to have the start of the
1034
- // object for large object pages.
1035
- if (obj->IsFixedArray()) {
1036
- FixedArray* array = FixedArray::cast(obj);
1037
- int length = array->length();
1038
- for (int i = 0; i < length; i++) {
1039
- int offset = FixedArray::kHeaderSize + i * kPointerSize;
1040
- ASSERT(!Page::IsRSetSet(obj->address(), offset));
1041
- if (Heap::InNewSpace(array->get(i))) {
1042
- Page::SetRSet(obj->address(), offset);
1148
+ if (object_contents == POINTER_OBJECT) {
1149
+ promotion_queue.insert(target, object_size);
1043
1150
  }
1151
+
1152
+ Heap::tracer()->increment_promoted_objects_size(object_size);
1153
+ return;
1044
1154
  }
1045
- } else if (!obj->IsCode()) {
1046
- // Skip code object, we know it does not contain inter-generational
1047
- // pointers.
1048
- UpdateRSetVisitor v;
1049
- obj->Iterate(&v);
1050
1155
  }
1051
- return obj->Size();
1156
+ Object* result = Heap::new_space()->AllocateRaw(object_size);
1157
+ ASSERT(!result->IsFailure());
1158
+ *slot = MigrateObject(object, HeapObject::cast(result), object_size);
1159
+ return;
1052
1160
  }
1053
1161
 
1054
1162
 
1055
- void Heap::RebuildRSets() {
1056
- // By definition, we do not care about remembered set bits in code,
1057
- // data, or cell spaces.
1058
- map_space_->ClearRSet();
1059
- RebuildRSets(map_space_);
1163
+ template<int object_size_in_words, ObjectContents object_contents>
1164
+ static inline void EvacuateObjectOfFixedSize(Map* map,
1165
+ HeapObject** slot,
1166
+ HeapObject* object) {
1167
+ const int object_size = object_size_in_words << kPointerSizeLog2;
1168
+ EvacuateObject<object_contents, SMALL>(map, slot, object, object_size);
1169
+ }
1060
1170
 
1061
- old_pointer_space_->ClearRSet();
1062
- RebuildRSets(old_pointer_space_);
1063
1171
 
1064
- Heap::lo_space_->ClearRSet();
1065
- RebuildRSets(lo_space_);
1172
+ template<ObjectContents object_contents>
1173
+ static inline void EvacuateObjectOfFixedSize(Map* map,
1174
+ HeapObject** slot,
1175
+ HeapObject* object) {
1176
+ int object_size = map->instance_size();
1177
+ EvacuateObject<object_contents, SMALL>(map, slot, object, object_size);
1066
1178
  }
1067
1179
 
1068
1180
 
1069
- void Heap::RebuildRSets(PagedSpace* space) {
1070
- HeapObjectIterator it(space);
1071
- for (HeapObject* obj = it.next(); obj != NULL; obj = it.next())
1072
- Heap::UpdateRSet(obj);
1181
+ static inline void EvacuateFixedArray(Map* map,
1182
+ HeapObject** slot,
1183
+ HeapObject* object) {
1184
+ int object_size = FixedArray::cast(object)->FixedArraySize();
1185
+ EvacuateObject<POINTER_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
1073
1186
  }
1074
1187
 
1075
1188
 
1076
- void Heap::RebuildRSets(LargeObjectSpace* space) {
1077
- LargeObjectIterator it(space);
1078
- for (HeapObject* obj = it.next(); obj != NULL; obj = it.next())
1079
- Heap::UpdateRSet(obj);
1189
+ static inline void EvacuateByteArray(Map* map,
1190
+ HeapObject** slot,
1191
+ HeapObject* object) {
1192
+ int object_size = ByteArray::cast(object)->ByteArraySize();
1193
+ EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
1080
1194
  }
1081
1195
 
1082
1196
 
1083
- #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
1084
- void Heap::RecordCopiedObject(HeapObject* obj) {
1085
- bool should_record = false;
1086
- #ifdef DEBUG
1087
- should_record = FLAG_heap_stats;
1088
- #endif
1089
- #ifdef ENABLE_LOGGING_AND_PROFILING
1090
- should_record = should_record || FLAG_log_gc;
1091
- #endif
1092
- if (should_record) {
1093
- if (new_space_.Contains(obj)) {
1094
- new_space_.RecordAllocation(obj);
1095
- } else {
1096
- new_space_.RecordPromotion(obj);
1097
- }
1197
+ static Scavenger GetScavengerForSize(int object_size,
1198
+ ObjectContents object_contents) {
1199
+ ASSERT(IsAligned(object_size, kPointerSize));
1200
+ ASSERT(object_size < Page::kMaxHeapObjectSize);
1201
+
1202
+ switch (object_size >> kPointerSizeLog2) {
1203
+ #define CASE(n) \
1204
+ case n: \
1205
+ if (object_contents == DATA_OBJECT) { \
1206
+ return static_cast<Scavenger>( \
1207
+ &EvacuateObjectOfFixedSize<n, DATA_OBJECT>); \
1208
+ } else { \
1209
+ return static_cast<Scavenger>( \
1210
+ &EvacuateObjectOfFixedSize<n, POINTER_OBJECT>); \
1211
+ }
1212
+
1213
+ CASE(1);
1214
+ CASE(2);
1215
+ CASE(3);
1216
+ CASE(4);
1217
+ CASE(5);
1218
+ CASE(6);
1219
+ CASE(7);
1220
+ CASE(8);
1221
+ CASE(9);
1222
+ CASE(10);
1223
+ CASE(11);
1224
+ CASE(12);
1225
+ CASE(13);
1226
+ CASE(14);
1227
+ CASE(15);
1228
+ CASE(16);
1229
+ default:
1230
+ if (object_contents == DATA_OBJECT) {
1231
+ return static_cast<Scavenger>(&EvacuateObjectOfFixedSize<DATA_OBJECT>);
1232
+ } else {
1233
+ return static_cast<Scavenger>(
1234
+ &EvacuateObjectOfFixedSize<POINTER_OBJECT>);
1235
+ }
1236
+
1237
+ #undef CASE
1098
1238
  }
1099
1239
  }
1100
- #endif // defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
1101
1240
 
1102
1241
 
1242
+ static inline void EvacuateSeqAsciiString(Map* map,
1243
+ HeapObject** slot,
1244
+ HeapObject* object) {
1245
+ int object_size = SeqAsciiString::cast(object)->
1246
+ SeqAsciiStringSize(map->instance_type());
1247
+ EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
1248
+ }
1103
1249
 
1104
- HeapObject* Heap::MigrateObject(HeapObject* source,
1105
- HeapObject* target,
1106
- int size) {
1107
- // Copy the content of source to target.
1108
- CopyBlock(reinterpret_cast<Object**>(target->address()),
1109
- reinterpret_cast<Object**>(source->address()),
1110
- size);
1111
1250
 
1112
- // Set the forwarding address.
1113
- source->set_map_word(MapWord::FromForwardingAddress(target));
1251
+ static inline void EvacuateSeqTwoByteString(Map* map,
1252
+ HeapObject** slot,
1253
+ HeapObject* object) {
1254
+ int object_size = SeqTwoByteString::cast(object)->
1255
+ SeqTwoByteStringSize(map->instance_type());
1256
+ EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
1257
+ }
1114
1258
 
1115
- #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
1116
- // Update NewSpace stats if necessary.
1117
- RecordCopiedObject(target);
1118
- #endif
1119
1259
 
1120
- return target;
1260
+ static inline bool IsShortcutCandidate(int type) {
1261
+ return ((type & kShortcutTypeMask) == kShortcutTypeTag);
1121
1262
  }
1122
1263
 
1123
1264
 
1124
- static inline bool IsShortcutCandidate(HeapObject* object, Map* map) {
1125
- STATIC_ASSERT(kNotStringTag != 0 && kSymbolTag != 0);
1126
- ASSERT(object->map() == map);
1127
- InstanceType type = map->instance_type();
1128
- if ((type & kShortcutTypeMask) != kShortcutTypeTag) return false;
1129
- ASSERT(object->IsString() && !object->IsSymbol());
1130
- return ConsString::cast(object)->unchecked_second() == Heap::empty_string();
1131
- }
1265
+ static inline void EvacuateShortcutCandidate(Map* map,
1266
+ HeapObject** slot,
1267
+ HeapObject* object) {
1268
+ ASSERT(IsShortcutCandidate(map->instance_type()));
1132
1269
 
1270
+ if (ConsString::cast(object)->unchecked_second() == Heap::empty_string()) {
1271
+ HeapObject* first =
1272
+ HeapObject::cast(ConsString::cast(object)->unchecked_first());
1133
1273
 
1134
- void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) {
1135
- ASSERT(InFromSpace(object));
1136
- MapWord first_word = object->map_word();
1137
- ASSERT(!first_word.IsForwardingAddress());
1274
+ *slot = first;
1138
1275
 
1139
- // Optimization: Bypass flattened ConsString objects.
1140
- if (IsShortcutCandidate(object, first_word.ToMap())) {
1141
- object = HeapObject::cast(ConsString::cast(object)->unchecked_first());
1142
- *p = object;
1143
- // After patching *p we have to repeat the checks that object is in the
1144
- // active semispace of the young generation and not already copied.
1145
- if (!InNewSpace(object)) return;
1146
- first_word = object->map_word();
1276
+ if (!Heap::InNewSpace(first)) {
1277
+ object->set_map_word(MapWord::FromForwardingAddress(first));
1278
+ return;
1279
+ }
1280
+
1281
+ MapWord first_word = first->map_word();
1147
1282
  if (first_word.IsForwardingAddress()) {
1148
- *p = first_word.ToForwardingAddress();
1283
+ HeapObject* target = first_word.ToForwardingAddress();
1284
+
1285
+ *slot = target;
1286
+ object->set_map_word(MapWord::FromForwardingAddress(target));
1149
1287
  return;
1150
1288
  }
1289
+
1290
+ first->map()->Scavenge(slot, first);
1291
+ object->set_map_word(MapWord::FromForwardingAddress(*slot));
1292
+ return;
1151
1293
  }
1152
1294
 
1153
- int object_size = object->SizeFromMap(first_word.ToMap());
1154
- // We rely on live objects in new space to be at least two pointers,
1155
- // so we can store the from-space address and map pointer of promoted
1156
- // objects in the to space.
1157
- ASSERT(object_size >= 2 * kPointerSize);
1295
+ int object_size = ConsString::kSize;
1296
+ EvacuateObject<POINTER_OBJECT, SMALL>(map, slot, object, object_size);
1297
+ }
1158
1298
 
1159
- // If the object should be promoted, we try to copy it to old space.
1160
- if (ShouldBePromoted(object->address(), object_size)) {
1161
- Object* result;
1162
- if (object_size > MaxObjectSizeInPagedSpace()) {
1163
- result = lo_space_->AllocateRawFixedArray(object_size);
1164
- if (!result->IsFailure()) {
1165
- // Save the from-space object pointer and its map pointer at the
1166
- // top of the to space to be swept and copied later. Write the
1167
- // forwarding address over the map word of the from-space
1168
- // object.
1169
- HeapObject* target = HeapObject::cast(result);
1170
- promotion_queue.insert(object, first_word.ToMap());
1171
- object->set_map_word(MapWord::FromForwardingAddress(target));
1172
-
1173
- // Give the space allocated for the result a proper map by
1174
- // treating it as a free list node (not linked into the free
1175
- // list).
1176
- FreeListNode* node = FreeListNode::FromAddress(target->address());
1177
- node->set_size(object_size);
1178
-
1179
- *p = target;
1180
- return;
1181
- }
1182
- } else {
1183
- OldSpace* target_space = Heap::TargetSpace(object);
1184
- ASSERT(target_space == Heap::old_pointer_space_ ||
1185
- target_space == Heap::old_data_space_);
1186
- result = target_space->AllocateRaw(object_size);
1187
- if (!result->IsFailure()) {
1188
- HeapObject* target = HeapObject::cast(result);
1189
- if (target_space == Heap::old_pointer_space_) {
1190
- // Save the from-space object pointer and its map pointer at the
1191
- // top of the to space to be swept and copied later. Write the
1192
- // forwarding address over the map word of the from-space
1193
- // object.
1194
- promotion_queue.insert(object, first_word.ToMap());
1195
- object->set_map_word(MapWord::FromForwardingAddress(target));
1196
-
1197
- // Give the space allocated for the result a proper map by
1198
- // treating it as a free list node (not linked into the free
1199
- // list).
1200
- FreeListNode* node = FreeListNode::FromAddress(target->address());
1201
- node->set_size(object_size);
1202
-
1203
- *p = target;
1299
+
1300
+ Scavenger Heap::GetScavenger(int instance_type, int instance_size) {
1301
+ if (instance_type < FIRST_NONSTRING_TYPE) {
1302
+ switch (instance_type & kStringRepresentationMask) {
1303
+ case kSeqStringTag:
1304
+ if ((instance_type & kStringEncodingMask) == kAsciiStringTag) {
1305
+ return &EvacuateSeqAsciiString;
1204
1306
  } else {
1205
- // Objects promoted to the data space can be copied immediately
1206
- // and not revisited---we will never sweep that space for
1207
- // pointers and the copied objects do not contain pointers to
1208
- // new space objects.
1209
- *p = MigrateObject(object, target, object_size);
1210
- #ifdef DEBUG
1211
- VerifyNonPointerSpacePointersVisitor v;
1212
- (*p)->Iterate(&v);
1213
- #endif
1307
+ return &EvacuateSeqTwoByteString;
1214
1308
  }
1215
- return;
1216
- }
1309
+
1310
+ case kConsStringTag:
1311
+ if (IsShortcutCandidate(instance_type)) {
1312
+ return &EvacuateShortcutCandidate;
1313
+ } else {
1314
+ ASSERT(instance_size == ConsString::kSize);
1315
+ return GetScavengerForSize(ConsString::kSize, POINTER_OBJECT);
1316
+ }
1317
+
1318
+ case kExternalStringTag:
1319
+ ASSERT(instance_size == ExternalString::kSize);
1320
+ return GetScavengerForSize(ExternalString::kSize, DATA_OBJECT);
1217
1321
  }
1322
+ UNREACHABLE();
1218
1323
  }
1219
- // The object should remain in new space or the old space allocation failed.
1220
- Object* result = new_space_.AllocateRaw(object_size);
1221
- // Failed allocation at this point is utterly unexpected.
1222
- ASSERT(!result->IsFailure());
1223
- *p = MigrateObject(object, HeapObject::cast(result), object_size);
1324
+
1325
+ switch (instance_type) {
1326
+ case BYTE_ARRAY_TYPE:
1327
+ return reinterpret_cast<Scavenger>(&EvacuateByteArray);
1328
+
1329
+ case FIXED_ARRAY_TYPE:
1330
+ return reinterpret_cast<Scavenger>(&EvacuateFixedArray);
1331
+
1332
+ case JS_OBJECT_TYPE:
1333
+ case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
1334
+ case JS_VALUE_TYPE:
1335
+ case JS_ARRAY_TYPE:
1336
+ case JS_REGEXP_TYPE:
1337
+ case JS_FUNCTION_TYPE:
1338
+ case JS_GLOBAL_PROXY_TYPE:
1339
+ case JS_GLOBAL_OBJECT_TYPE:
1340
+ case JS_BUILTINS_OBJECT_TYPE:
1341
+ return GetScavengerForSize(instance_size, POINTER_OBJECT);
1342
+
1343
+ case ODDBALL_TYPE:
1344
+ return NULL;
1345
+
1346
+ case PROXY_TYPE:
1347
+ return GetScavengerForSize(Proxy::kSize, DATA_OBJECT);
1348
+
1349
+ case MAP_TYPE:
1350
+ return NULL;
1351
+
1352
+ case CODE_TYPE:
1353
+ return NULL;
1354
+
1355
+ case JS_GLOBAL_PROPERTY_CELL_TYPE:
1356
+ return NULL;
1357
+
1358
+ case HEAP_NUMBER_TYPE:
1359
+ case FILLER_TYPE:
1360
+ case PIXEL_ARRAY_TYPE:
1361
+ case EXTERNAL_BYTE_ARRAY_TYPE:
1362
+ case EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE:
1363
+ case EXTERNAL_SHORT_ARRAY_TYPE:
1364
+ case EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE:
1365
+ case EXTERNAL_INT_ARRAY_TYPE:
1366
+ case EXTERNAL_UNSIGNED_INT_ARRAY_TYPE:
1367
+ case EXTERNAL_FLOAT_ARRAY_TYPE:
1368
+ return GetScavengerForSize(instance_size, DATA_OBJECT);
1369
+
1370
+ case SHARED_FUNCTION_INFO_TYPE:
1371
+ return GetScavengerForSize(SharedFunctionInfo::kAlignedSize,
1372
+ POINTER_OBJECT);
1373
+
1374
+ #define MAKE_STRUCT_CASE(NAME, Name, name) \
1375
+ case NAME##_TYPE:
1376
+ STRUCT_LIST(MAKE_STRUCT_CASE)
1377
+ #undef MAKE_STRUCT_CASE
1378
+ return GetScavengerForSize(instance_size, POINTER_OBJECT);
1379
+ default:
1380
+ UNREACHABLE();
1381
+ return NULL;
1382
+ }
1383
+ }
1384
+
1385
+
1386
+ void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) {
1387
+ ASSERT(InFromSpace(object));
1388
+ MapWord first_word = object->map_word();
1389
+ ASSERT(!first_word.IsForwardingAddress());
1390
+ Map* map = first_word.ToMap();
1391
+ map->Scavenge(p, object);
1224
1392
  }
1225
1393
 
1226
1394
 
@@ -1238,6 +1406,8 @@ Object* Heap::AllocatePartialMap(InstanceType instance_type,
1238
1406
  reinterpret_cast<Map*>(result)->set_map(raw_unchecked_meta_map());
1239
1407
  reinterpret_cast<Map*>(result)->set_instance_type(instance_type);
1240
1408
  reinterpret_cast<Map*>(result)->set_instance_size(instance_size);
1409
+ reinterpret_cast<Map*>(result)->
1410
+ set_scavenger(GetScavenger(instance_type, instance_size));
1241
1411
  reinterpret_cast<Map*>(result)->set_inobject_properties(0);
1242
1412
  reinterpret_cast<Map*>(result)->set_pre_allocated_property_fields(0);
1243
1413
  reinterpret_cast<Map*>(result)->set_unused_property_fields(0);
@@ -1254,6 +1424,7 @@ Object* Heap::AllocateMap(InstanceType instance_type, int instance_size) {
1254
1424
  Map* map = reinterpret_cast<Map*>(result);
1255
1425
  map->set_map(meta_map());
1256
1426
  map->set_instance_type(instance_type);
1427
+ map->set_scavenger(GetScavenger(instance_type, instance_size));
1257
1428
  map->set_prototype(null_value());
1258
1429
  map->set_constructor(null_value());
1259
1430
  map->set_instance_size(instance_size);
@@ -1263,7 +1434,7 @@ Object* Heap::AllocateMap(InstanceType instance_type, int instance_size) {
1263
1434
  map->set_code_cache(empty_fixed_array());
1264
1435
  map->set_unused_property_fields(0);
1265
1436
  map->set_bit_field(0);
1266
- map->set_bit_field2(1 << Map::kIsExtensible);
1437
+ map->set_bit_field2((1 << Map::kIsExtensible) | (1 << Map::kHasFastElements));
1267
1438
 
1268
1439
  // If the map object is aligned fill the padding area with Smi 0 objects.
1269
1440
  if (Map::kPadStart < Map::kSize) {
@@ -1664,7 +1835,7 @@ bool Heap::CreateInitialObjects() {
1664
1835
  // loop above because it needs to be allocated manually with the special
1665
1836
  // hash code in place. The hash code for the hidden_symbol is zero to ensure
1666
1837
  // that it will always be at the first entry in property descriptors.
1667
- obj = AllocateSymbol(CStrVector(""), 0, String::kHashComputedMask);
1838
+ obj = AllocateSymbol(CStrVector(""), 0, String::kZeroHash);
1668
1839
  if (obj->IsFailure()) return false;
1669
1840
  hidden_symbol_ = String::cast(obj);
1670
1841
 
@@ -1685,6 +1856,10 @@ bool Heap::CreateInitialObjects() {
1685
1856
  if (obj->IsFailure()) return false;
1686
1857
  set_non_monomorphic_cache(NumberDictionary::cast(obj));
1687
1858
 
1859
+ set_instanceof_cache_function(Smi::FromInt(0));
1860
+ set_instanceof_cache_map(Smi::FromInt(0));
1861
+ set_instanceof_cache_answer(Smi::FromInt(0));
1862
+
1688
1863
  CreateFixedStubs();
1689
1864
 
1690
1865
  if (InitializeNumberStringCache()->IsFailure()) return false;
@@ -1882,6 +2057,7 @@ Object* Heap::AllocateSharedFunctionInfo(Object* name) {
1882
2057
  share->set_name(name);
1883
2058
  Code* illegal = Builtins::builtin(Builtins::Illegal);
1884
2059
  share->set_code(illegal);
2060
+ share->set_scope_info(SerializedScopeInfo::Empty());
1885
2061
  Code* construct_stub = Builtins::builtin(Builtins::JSConstructStubGeneric);
1886
2062
  share->set_construct_stub(construct_stub);
1887
2063
  share->set_expected_nof_properties(0);
@@ -1896,6 +2072,9 @@ Object* Heap::AllocateSharedFunctionInfo(Object* name) {
1896
2072
  share->set_compiler_hints(0);
1897
2073
  share->set_this_property_assignments_count(0);
1898
2074
  share->set_this_property_assignments(undefined_value());
2075
+ share->set_num_literals(0);
2076
+ share->set_end_position(0);
2077
+ share->set_function_token_position(0);
1899
2078
  return result;
1900
2079
  }
1901
2080
 
@@ -1968,6 +2147,18 @@ Object* Heap::AllocateConsString(String* first, String* second) {
1968
2147
  return Failure::OutOfMemoryException();
1969
2148
  }
1970
2149
 
2150
+ bool is_ascii_data_in_two_byte_string = false;
2151
+ if (!is_ascii) {
2152
+ // At least one of the strings uses two-byte representation so we
2153
+ // can't use the fast case code for short ascii strings below, but
2154
+ // we can try to save memory if all chars actually fit in ascii.
2155
+ is_ascii_data_in_two_byte_string =
2156
+ first->HasOnlyAsciiChars() && second->HasOnlyAsciiChars();
2157
+ if (is_ascii_data_in_two_byte_string) {
2158
+ Counters::string_add_runtime_ext_to_ascii.Increment();
2159
+ }
2160
+ }
2161
+
1971
2162
  // If the resulting string is small make a flat string.
1972
2163
  if (length < String::kMinNonFlatLength) {
1973
2164
  ASSERT(first->IsFlat());
@@ -1994,22 +2185,13 @@ Object* Heap::AllocateConsString(String* first, String* second) {
1994
2185
  for (int i = 0; i < second_length; i++) *dest++ = src[i];
1995
2186
  return result;
1996
2187
  } else {
1997
- // For short external two-byte strings we check whether they can
1998
- // be represented using ascii.
1999
- if (!first_is_ascii) {
2000
- first_is_ascii = first->IsExternalTwoByteStringWithAsciiChars();
2001
- }
2002
- if (first_is_ascii && !second_is_ascii) {
2003
- second_is_ascii = second->IsExternalTwoByteStringWithAsciiChars();
2004
- }
2005
- if (first_is_ascii && second_is_ascii) {
2188
+ if (is_ascii_data_in_two_byte_string) {
2006
2189
  Object* result = AllocateRawAsciiString(length);
2007
2190
  if (result->IsFailure()) return result;
2008
2191
  // Copy the characters into the new object.
2009
2192
  char* dest = SeqAsciiString::cast(result)->GetChars();
2010
2193
  String::WriteToFlat(first, dest, 0, first_length);
2011
2194
  String::WriteToFlat(second, dest + first_length, 0, second_length);
2012
- Counters::string_add_runtime_ext_to_ascii.Increment();
2013
2195
  return result;
2014
2196
  }
2015
2197
 
@@ -2023,7 +2205,8 @@ Object* Heap::AllocateConsString(String* first, String* second) {
2023
2205
  }
2024
2206
  }
2025
2207
 
2026
- Map* map = is_ascii ? cons_ascii_string_map() : cons_string_map();
2208
+ Map* map = (is_ascii || is_ascii_data_in_two_byte_string) ?
2209
+ cons_ascii_string_map() : cons_string_map();
2027
2210
 
2028
2211
  Object* result = Allocate(map, NEW_SPACE);
2029
2212
  if (result->IsFailure()) return result;
@@ -2058,7 +2241,7 @@ Object* Heap::AllocateSubString(String* buffer,
2058
2241
  }
2059
2242
 
2060
2243
  // Make an attempt to flatten the buffer to reduce access time.
2061
- buffer->TryFlatten();
2244
+ buffer = buffer->TryFlattenGetString();
2062
2245
 
2063
2246
  Object* result = buffer->IsAsciiRepresentation()
2064
2247
  ? AllocateRawAsciiString(length, pretenure )
@@ -2109,7 +2292,23 @@ Object* Heap::AllocateExternalStringFromTwoByte(
2109
2292
  return Failure::OutOfMemoryException();
2110
2293
  }
2111
2294
 
2112
- Map* map = Heap::external_string_map();
2295
+ // For small strings we check whether the resource contains only
2296
+ // ascii characters. If yes, we use a different string map.
2297
+ bool is_ascii = true;
2298
+ if (length >= static_cast<size_t>(String::kMinNonFlatLength)) {
2299
+ is_ascii = false;
2300
+ } else {
2301
+ const uc16* data = resource->data();
2302
+ for (size_t i = 0; i < length; i++) {
2303
+ if (data[i] > String::kMaxAsciiCharCode) {
2304
+ is_ascii = false;
2305
+ break;
2306
+ }
2307
+ }
2308
+ }
2309
+
2310
+ Map* map = is_ascii ?
2311
+ Heap::external_string_with_ascii_data_map() : Heap::external_string_map();
2113
2312
  Object* result = Allocate(map, NEW_SPACE);
2114
2313
  if (result->IsFailure()) return result;
2115
2314
 
@@ -2157,8 +2356,8 @@ Object* Heap::AllocateByteArray(int length, PretenureFlag pretenure) {
2157
2356
  : lo_space_->AllocateRaw(size);
2158
2357
  if (result->IsFailure()) return result;
2159
2358
 
2160
- reinterpret_cast<Array*>(result)->set_map(byte_array_map());
2161
- reinterpret_cast<Array*>(result)->set_length(length);
2359
+ reinterpret_cast<ByteArray*>(result)->set_map(byte_array_map());
2360
+ reinterpret_cast<ByteArray*>(result)->set_length(length);
2162
2361
  return result;
2163
2362
  }
2164
2363
 
@@ -2173,8 +2372,8 @@ Object* Heap::AllocateByteArray(int length) {
2173
2372
  Object* result = AllocateRaw(size, space, OLD_DATA_SPACE);
2174
2373
  if (result->IsFailure()) return result;
2175
2374
 
2176
- reinterpret_cast<Array*>(result)->set_map(byte_array_map());
2177
- reinterpret_cast<Array*>(result)->set_length(length);
2375
+ reinterpret_cast<ByteArray*>(result)->set_map(byte_array_map());
2376
+ reinterpret_cast<ByteArray*>(result)->set_length(length);
2178
2377
  return result;
2179
2378
  }
2180
2379
 
@@ -2228,15 +2427,98 @@ Object* Heap::AllocateExternalArray(int length,
2228
2427
  }
2229
2428
 
2230
2429
 
2430
+ // The StackVisitor is used to traverse all the archived threads to see if
2431
+ // there are activations on any of the stacks corresponding to the code.
2432
+ class FlushingStackVisitor : public ThreadVisitor {
2433
+ public:
2434
+ explicit FlushingStackVisitor(Code* code) : found_(false), code_(code) {}
2435
+
2436
+ void VisitThread(ThreadLocalTop* top) {
2437
+ // If we already found the code in a previous traversed thread we return.
2438
+ if (found_) return;
2439
+
2440
+ for (StackFrameIterator it(top); !it.done(); it.Advance()) {
2441
+ if (code_->contains(it.frame()->pc())) {
2442
+ found_ = true;
2443
+ return;
2444
+ }
2445
+ }
2446
+ }
2447
+ bool FoundCode() {return found_;}
2448
+
2449
+ private:
2450
+ bool found_;
2451
+ Code* code_;
2452
+ };
2453
+
2454
+
2455
+ static void FlushCodeForFunction(SharedFunctionInfo* function_info) {
2456
+ // The function must be compiled and have the source code available,
2457
+ // to be able to recompile it in case we need the function again.
2458
+ if (!(function_info->is_compiled() && function_info->HasSourceCode())) return;
2459
+
2460
+ // We never flush code for Api functions.
2461
+ if (function_info->IsApiFunction()) return;
2462
+
2463
+ // Only flush code for functions.
2464
+ if (!function_info->code()->kind() == Code::FUNCTION) return;
2465
+
2466
+ // Function must be lazy compilable.
2467
+ if (!function_info->allows_lazy_compilation()) return;
2468
+
2469
+ // If this is a full script wrapped in a function we do no flush the code.
2470
+ if (function_info->is_toplevel()) return;
2471
+
2472
+ // If this function is in the compilation cache we do not flush the code.
2473
+ if (CompilationCache::HasFunction(function_info)) return;
2474
+
2475
+ // Make sure we are not referencing the code from the stack.
2476
+ for (StackFrameIterator it; !it.done(); it.Advance()) {
2477
+ if (function_info->code()->contains(it.frame()->pc())) return;
2478
+ }
2479
+ // Iterate the archived stacks in all threads to check if
2480
+ // the code is referenced.
2481
+ FlushingStackVisitor threadvisitor(function_info->code());
2482
+ ThreadManager::IterateArchivedThreads(&threadvisitor);
2483
+ if (threadvisitor.FoundCode()) return;
2484
+
2485
+ // Compute the lazy compilable version of the code.
2486
+ HandleScope scope;
2487
+ function_info->set_code(*ComputeLazyCompile(function_info->length()));
2488
+ }
2489
+
2490
+
2491
+ void Heap::FlushCode() {
2492
+ #ifdef ENABLE_DEBUGGER_SUPPORT
2493
+ // Do not flush code if the debugger is loaded or there are breakpoints.
2494
+ if (Debug::IsLoaded() || Debug::has_break_points()) return;
2495
+ #endif
2496
+ HeapObjectIterator it(old_pointer_space());
2497
+ for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) {
2498
+ if (obj->IsJSFunction()) {
2499
+ JSFunction* jsfunction = JSFunction::cast(obj);
2500
+
2501
+ // The function must have a valid context and not be a builtin.
2502
+ if (jsfunction->unchecked_context()->IsContext() &&
2503
+ !jsfunction->IsBuiltin()) {
2504
+ FlushCodeForFunction(jsfunction->shared());
2505
+ }
2506
+ }
2507
+ }
2508
+ }
2509
+
2510
+
2231
2511
  Object* Heap::CreateCode(const CodeDesc& desc,
2232
- ZoneScopeInfo* sinfo,
2233
2512
  Code::Flags flags,
2234
2513
  Handle<Object> self_reference) {
2514
+ // Allocate ByteArray before the Code object, so that we do not risk
2515
+ // leaving uninitialized Code object (and breaking the heap).
2516
+ Object* reloc_info = AllocateByteArray(desc.reloc_size, TENURED);
2517
+ if (reloc_info->IsFailure()) return reloc_info;
2518
+
2235
2519
  // Compute size
2236
- int body_size = RoundUp(desc.instr_size + desc.reloc_size, kObjectAlignment);
2237
- int sinfo_size = 0;
2238
- if (sinfo != NULL) sinfo_size = sinfo->Serialize(NULL);
2239
- int obj_size = Code::SizeFor(body_size, sinfo_size);
2520
+ int body_size = RoundUp(desc.instr_size, kObjectAlignment);
2521
+ int obj_size = Code::SizeFor(body_size);
2240
2522
  ASSERT(IsAligned(obj_size, Code::kCodeAlignment));
2241
2523
  Object* result;
2242
2524
  if (obj_size > MaxObjectSizeInPagedSpace()) {
@@ -2252,8 +2534,7 @@ Object* Heap::CreateCode(const CodeDesc& desc,
2252
2534
  Code* code = Code::cast(result);
2253
2535
  ASSERT(!CodeRange::exists() || CodeRange::contains(code->address()));
2254
2536
  code->set_instruction_size(desc.instr_size);
2255
- code->set_relocation_size(desc.reloc_size);
2256
- code->set_sinfo_size(sinfo_size);
2537
+ code->set_relocation_info(ByteArray::cast(reloc_info));
2257
2538
  code->set_flags(flags);
2258
2539
  // Allow self references to created code object by patching the handle to
2259
2540
  // point to the newly allocated Code object.
@@ -2266,7 +2547,6 @@ Object* Heap::CreateCode(const CodeDesc& desc,
2266
2547
  // objects. These pointers can include references to the code object itself,
2267
2548
  // through the self_reference parameter.
2268
2549
  code->CopyFrom(desc);
2269
- if (sinfo != NULL) sinfo->Serialize(code); // write scope info
2270
2550
 
2271
2551
  #ifdef DEBUG
2272
2552
  code->Verify();
@@ -2290,9 +2570,7 @@ Object* Heap::CopyCode(Code* code) {
2290
2570
  // Copy code object.
2291
2571
  Address old_addr = code->address();
2292
2572
  Address new_addr = reinterpret_cast<HeapObject*>(result)->address();
2293
- CopyBlock(reinterpret_cast<Object**>(new_addr),
2294
- reinterpret_cast<Object**>(old_addr),
2295
- obj_size);
2573
+ CopyBlock(new_addr, old_addr, obj_size);
2296
2574
  // Relocate the copy.
2297
2575
  Code* new_code = Code::cast(result);
2298
2576
  ASSERT(!CodeRange::exists() || CodeRange::contains(code->address()));
@@ -2302,17 +2580,19 @@ Object* Heap::CopyCode(Code* code) {
2302
2580
 
2303
2581
 
2304
2582
  Object* Heap::CopyCode(Code* code, Vector<byte> reloc_info) {
2305
- int new_body_size = RoundUp(code->instruction_size() + reloc_info.length(),
2306
- kObjectAlignment);
2583
+ // Allocate ByteArray before the Code object, so that we do not risk
2584
+ // leaving uninitialized Code object (and breaking the heap).
2585
+ Object* reloc_info_array = AllocateByteArray(reloc_info.length(), TENURED);
2586
+ if (reloc_info_array->IsFailure()) return reloc_info_array;
2307
2587
 
2308
- int sinfo_size = code->sinfo_size();
2588
+ int new_body_size = RoundUp(code->instruction_size(), kObjectAlignment);
2309
2589
 
2310
- int new_obj_size = Code::SizeFor(new_body_size, sinfo_size);
2590
+ int new_obj_size = Code::SizeFor(new_body_size);
2311
2591
 
2312
2592
  Address old_addr = code->address();
2313
2593
 
2314
2594
  size_t relocation_offset =
2315
- static_cast<size_t>(code->relocation_start() - old_addr);
2595
+ static_cast<size_t>(code->instruction_end() - old_addr);
2316
2596
 
2317
2597
  Object* result;
2318
2598
  if (new_obj_size > MaxObjectSizeInPagedSpace()) {
@@ -2329,16 +2609,11 @@ Object* Heap::CopyCode(Code* code, Vector<byte> reloc_info) {
2329
2609
  // Copy header and instructions.
2330
2610
  memcpy(new_addr, old_addr, relocation_offset);
2331
2611
 
2332
- // Copy patched rinfo.
2333
- memcpy(new_addr + relocation_offset,
2334
- reloc_info.start(),
2335
- reloc_info.length());
2336
-
2337
2612
  Code* new_code = Code::cast(result);
2338
- new_code->set_relocation_size(reloc_info.length());
2613
+ new_code->set_relocation_info(ByteArray::cast(reloc_info_array));
2339
2614
 
2340
- // Copy sinfo.
2341
- memcpy(new_code->sinfo_start(), code->sinfo_start(), code->sinfo_size());
2615
+ // Copy patched rinfo.
2616
+ memcpy(new_code->relocation_start(), reloc_info.start(), reloc_info.length());
2342
2617
 
2343
2618
  // Relocate the copy.
2344
2619
  ASSERT(!CodeRange::exists() || CodeRange::contains(code->address()));
@@ -2438,8 +2713,8 @@ Object* Heap::AllocateArgumentsObject(Object* callee, int length) {
2438
2713
  // Copy the content. The arguments boilerplate doesn't have any
2439
2714
  // fields that point to new space so it's safe to skip the write
2440
2715
  // barrier here.
2441
- CopyBlock(reinterpret_cast<Object**>(HeapObject::cast(result)->address()),
2442
- reinterpret_cast<Object**>(boilerplate->address()),
2716
+ CopyBlock(HeapObject::cast(result)->address(),
2717
+ boilerplate->address(),
2443
2718
  kArgumentsObjectSize);
2444
2719
 
2445
2720
  // Set the two properties.
@@ -2479,6 +2754,7 @@ Object* Heap::AllocateInitialMap(JSFunction* fun) {
2479
2754
  map->set_inobject_properties(in_object_properties);
2480
2755
  map->set_unused_property_fields(in_object_properties);
2481
2756
  map->set_prototype(prototype);
2757
+ ASSERT(map->has_fast_elements());
2482
2758
 
2483
2759
  // If the function has only simple this property assignments add
2484
2760
  // field descriptors for these to the initial map as the object
@@ -2532,8 +2808,8 @@ Object* Heap::AllocateJSObjectFromMap(Map* map, PretenureFlag pretenure) {
2532
2808
  // properly initialized.
2533
2809
  ASSERT(map->instance_type() != JS_FUNCTION_TYPE);
2534
2810
 
2535
- // Both types of globla objects should be allocated using
2536
- // AllocateGloblaObject to be properly initialized.
2811
+ // Both types of global objects should be allocated using
2812
+ // AllocateGlobalObject to be properly initialized.
2537
2813
  ASSERT(map->instance_type() != JS_GLOBAL_OBJECT_TYPE);
2538
2814
  ASSERT(map->instance_type() != JS_BUILTINS_OBJECT_TYPE);
2539
2815
 
@@ -2557,6 +2833,7 @@ Object* Heap::AllocateJSObjectFromMap(Map* map, PretenureFlag pretenure) {
2557
2833
  InitializeJSObjectFromMap(JSObject::cast(obj),
2558
2834
  FixedArray::cast(properties),
2559
2835
  map);
2836
+ ASSERT(JSObject::cast(obj)->HasFastElements());
2560
2837
  return obj;
2561
2838
  }
2562
2839
 
@@ -2661,8 +2938,8 @@ Object* Heap::CopyJSObject(JSObject* source) {
2661
2938
  clone = AllocateRaw(object_size, NEW_SPACE, OLD_POINTER_SPACE);
2662
2939
  if (clone->IsFailure()) return clone;
2663
2940
  Address clone_address = HeapObject::cast(clone)->address();
2664
- CopyBlock(reinterpret_cast<Object**>(clone_address),
2665
- reinterpret_cast<Object**>(source->address()),
2941
+ CopyBlock(clone_address,
2942
+ source->address(),
2666
2943
  object_size);
2667
2944
  // Update write barrier for all fields that lie beyond the header.
2668
2945
  RecordWrites(clone_address,
@@ -2674,8 +2951,8 @@ Object* Heap::CopyJSObject(JSObject* source) {
2674
2951
  ASSERT(Heap::InNewSpace(clone));
2675
2952
  // Since we know the clone is allocated in new space, we can copy
2676
2953
  // the contents without worrying about updating the write barrier.
2677
- CopyBlock(reinterpret_cast<Object**>(HeapObject::cast(clone)->address()),
2678
- reinterpret_cast<Object**>(source->address()),
2954
+ CopyBlock(HeapObject::cast(clone)->address(),
2955
+ source->address(),
2679
2956
  object_size);
2680
2957
  }
2681
2958
 
@@ -2747,6 +3024,8 @@ Object* Heap::AllocateStringFromAscii(Vector<const char> string,
2747
3024
 
2748
3025
  Object* Heap::AllocateStringFromUtf8(Vector<const char> string,
2749
3026
  PretenureFlag pretenure) {
3027
+ // V8 only supports characters in the Basic Multilingual Plane.
3028
+ const uc32 kMaxSupportedChar = 0xFFFF;
2750
3029
  // Count the number of characters in the UTF-8 string and check if
2751
3030
  // it is an ASCII string.
2752
3031
  Access<Scanner::Utf8Decoder> decoder(Scanner::utf8_decoder());
@@ -2771,6 +3050,7 @@ Object* Heap::AllocateStringFromUtf8(Vector<const char> string,
2771
3050
  decoder->Reset(string.start(), string.length());
2772
3051
  for (int i = 0; i < chars; i++) {
2773
3052
  uc32 r = decoder->GetNext();
3053
+ if (r > kMaxSupportedChar) { r = unibrow::Utf8::kBadChar; }
2774
3054
  string_result->Set(i, r);
2775
3055
  }
2776
3056
  return result;
@@ -2813,6 +3093,9 @@ Map* Heap::SymbolMapForString(String* string) {
2813
3093
  if (map == cons_ascii_string_map()) return cons_ascii_symbol_map();
2814
3094
  if (map == external_string_map()) return external_symbol_map();
2815
3095
  if (map == external_ascii_string_map()) return external_ascii_symbol_map();
3096
+ if (map == external_string_with_ascii_data_map()) {
3097
+ return external_symbol_with_ascii_data_map();
3098
+ }
2816
3099
 
2817
3100
  // No match found.
2818
3101
  return NULL;
@@ -2946,8 +3229,8 @@ Object* Heap::AllocateEmptyFixedArray() {
2946
3229
  Object* result = AllocateRaw(size, OLD_DATA_SPACE, OLD_DATA_SPACE);
2947
3230
  if (result->IsFailure()) return result;
2948
3231
  // Initialize the object.
2949
- reinterpret_cast<Array*>(result)->set_map(fixed_array_map());
2950
- reinterpret_cast<Array*>(result)->set_length(0);
3232
+ reinterpret_cast<FixedArray*>(result)->set_map(fixed_array_map());
3233
+ reinterpret_cast<FixedArray*>(result)->set_length(0);
2951
3234
  return result;
2952
3235
  }
2953
3236
 
@@ -2972,9 +3255,7 @@ Object* Heap::CopyFixedArray(FixedArray* src) {
2972
3255
  if (obj->IsFailure()) return obj;
2973
3256
  if (Heap::InNewSpace(obj)) {
2974
3257
  HeapObject* dst = HeapObject::cast(obj);
2975
- CopyBlock(reinterpret_cast<Object**>(dst->address()),
2976
- reinterpret_cast<Object**>(src->address()),
2977
- FixedArray::SizeFor(len));
3258
+ CopyBlock(dst->address(), src->address(), FixedArray::SizeFor(len));
2978
3259
  return obj;
2979
3260
  }
2980
3261
  HeapObject::cast(obj)->set_map(src->map());
@@ -2995,8 +3276,8 @@ Object* Heap::AllocateFixedArray(int length) {
2995
3276
  Object* result = AllocateRawFixedArray(length);
2996
3277
  if (!result->IsFailure()) {
2997
3278
  // Initialize header.
2998
- reinterpret_cast<Array*>(result)->set_map(fixed_array_map());
2999
- FixedArray* array = FixedArray::cast(result);
3279
+ FixedArray* array = reinterpret_cast<FixedArray*>(result);
3280
+ array->set_map(fixed_array_map());
3000
3281
  array->set_length(length);
3001
3282
  // Initialize body.
3002
3283
  ASSERT(!Heap::InNewSpace(undefined_value()));
@@ -3023,27 +3304,10 @@ Object* Heap::AllocateRawFixedArray(int length, PretenureFlag pretenure) {
3023
3304
  space = LO_SPACE;
3024
3305
  }
3025
3306
 
3026
- // Specialize allocation for the space.
3027
- Object* result = Failure::OutOfMemoryException();
3028
- if (space == NEW_SPACE) {
3029
- // We cannot use Heap::AllocateRaw() because it will not properly
3030
- // allocate extra remembered set bits if always_allocate() is true and
3031
- // new space allocation fails.
3032
- result = new_space_.AllocateRaw(size);
3033
- if (result->IsFailure() && always_allocate()) {
3034
- if (size <= MaxObjectSizeInPagedSpace()) {
3035
- result = old_pointer_space_->AllocateRaw(size);
3036
- } else {
3037
- result = lo_space_->AllocateRawFixedArray(size);
3038
- }
3039
- }
3040
- } else if (space == OLD_POINTER_SPACE) {
3041
- result = old_pointer_space_->AllocateRaw(size);
3042
- } else {
3043
- ASSERT(space == LO_SPACE);
3044
- result = lo_space_->AllocateRawFixedArray(size);
3045
- }
3046
- return result;
3307
+ AllocationSpace retry_space =
3308
+ (size <= MaxObjectSizeInPagedSpace()) ? OLD_POINTER_SPACE : LO_SPACE;
3309
+
3310
+ return AllocateRaw(size, space, retry_space);
3047
3311
  }
3048
3312
 
3049
3313
 
@@ -3091,7 +3355,7 @@ Object* Heap::AllocateUninitializedFixedArray(int length) {
3091
3355
  Object* Heap::AllocateHashTable(int length, PretenureFlag pretenure) {
3092
3356
  Object* result = Heap::AllocateFixedArray(length, pretenure);
3093
3357
  if (result->IsFailure()) return result;
3094
- reinterpret_cast<Array*>(result)->set_map(hash_table_map());
3358
+ reinterpret_cast<HeapObject*>(result)->set_map(hash_table_map());
3095
3359
  ASSERT(result->IsHashTable());
3096
3360
  return result;
3097
3361
  }
@@ -3343,6 +3607,49 @@ bool Heap::InSpace(Address addr, AllocationSpace space) {
3343
3607
 
3344
3608
 
3345
3609
  #ifdef DEBUG
3610
+ static void DummyScavengePointer(HeapObject** p) {
3611
+ }
3612
+
3613
+
3614
+ static void VerifyPointersUnderWatermark(
3615
+ PagedSpace* space,
3616
+ DirtyRegionCallback visit_dirty_region) {
3617
+ PageIterator it(space, PageIterator::PAGES_IN_USE);
3618
+
3619
+ while (it.has_next()) {
3620
+ Page* page = it.next();
3621
+ Address start = page->ObjectAreaStart();
3622
+ Address end = page->AllocationWatermark();
3623
+
3624
+ Heap::IterateDirtyRegions(Page::kAllRegionsDirtyMarks,
3625
+ start,
3626
+ end,
3627
+ visit_dirty_region,
3628
+ &DummyScavengePointer);
3629
+ }
3630
+ }
3631
+
3632
+
3633
+ static void VerifyPointersUnderWatermark(LargeObjectSpace* space) {
3634
+ LargeObjectIterator it(space);
3635
+ for (HeapObject* object = it.next(); object != NULL; object = it.next()) {
3636
+ if (object->IsFixedArray()) {
3637
+ Address slot_address = object->address();
3638
+ Address end = object->address() + object->Size();
3639
+
3640
+ while (slot_address < end) {
3641
+ HeapObject** slot = reinterpret_cast<HeapObject**>(slot_address);
3642
+ // When we are not in GC the Heap::InNewSpace() predicate
3643
+ // checks that pointers which satisfy predicate point into
3644
+ // the active semispace.
3645
+ Heap::InNewSpace(*slot);
3646
+ slot_address += kPointerSize;
3647
+ }
3648
+ }
3649
+ }
3650
+ }
3651
+
3652
+
3346
3653
  void Heap::Verify() {
3347
3654
  ASSERT(HasBeenSetup());
3348
3655
 
@@ -3351,14 +3658,23 @@ void Heap::Verify() {
3351
3658
 
3352
3659
  new_space_.Verify();
3353
3660
 
3354
- VerifyPointersAndRSetVisitor rset_visitor;
3355
- old_pointer_space_->Verify(&rset_visitor);
3356
- map_space_->Verify(&rset_visitor);
3661
+ VerifyPointersAndDirtyRegionsVisitor dirty_regions_visitor;
3662
+ old_pointer_space_->Verify(&dirty_regions_visitor);
3663
+ map_space_->Verify(&dirty_regions_visitor);
3664
+
3665
+ VerifyPointersUnderWatermark(old_pointer_space_,
3666
+ &IteratePointersInDirtyRegion);
3667
+ VerifyPointersUnderWatermark(map_space_,
3668
+ &IteratePointersInDirtyMapsRegion);
3669
+ VerifyPointersUnderWatermark(lo_space_);
3357
3670
 
3358
- VerifyPointersVisitor no_rset_visitor;
3359
- old_data_space_->Verify(&no_rset_visitor);
3360
- code_space_->Verify(&no_rset_visitor);
3361
- cell_space_->Verify(&no_rset_visitor);
3671
+ VerifyPageWatermarkValidity(old_pointer_space_, ALL_INVALID);
3672
+ VerifyPageWatermarkValidity(map_space_, ALL_INVALID);
3673
+
3674
+ VerifyPointersVisitor no_dirty_regions_visitor;
3675
+ old_data_space_->Verify(&no_dirty_regions_visitor);
3676
+ code_space_->Verify(&no_dirty_regions_visitor);
3677
+ cell_space_->Verify(&no_dirty_regions_visitor);
3362
3678
 
3363
3679
  lo_space_->Verify();
3364
3680
  }
@@ -3411,65 +3727,254 @@ void Heap::ZapFromSpace() {
3411
3727
  #endif // DEBUG
3412
3728
 
3413
3729
 
3414
- int Heap::IterateRSetRange(Address object_start,
3415
- Address object_end,
3416
- Address rset_start,
3417
- ObjectSlotCallback copy_object_func) {
3418
- Address object_address = object_start;
3419
- Address rset_address = rset_start;
3420
- int set_bits_count = 0;
3421
-
3422
- // Loop over all the pointers in [object_start, object_end).
3423
- while (object_address < object_end) {
3424
- uint32_t rset_word = Memory::uint32_at(rset_address);
3425
- if (rset_word != 0) {
3426
- uint32_t result_rset = rset_word;
3427
- for (uint32_t bitmask = 1; bitmask != 0; bitmask = bitmask << 1) {
3428
- // Do not dereference pointers at or past object_end.
3429
- if ((rset_word & bitmask) != 0 && object_address < object_end) {
3430
- Object** object_p = reinterpret_cast<Object**>(object_address);
3431
- if (Heap::InNewSpace(*object_p)) {
3432
- copy_object_func(reinterpret_cast<HeapObject**>(object_p));
3433
- }
3434
- // If this pointer does not need to be remembered anymore, clear
3435
- // the remembered set bit.
3436
- if (!Heap::InNewSpace(*object_p)) result_rset &= ~bitmask;
3437
- set_bits_count++;
3438
- }
3439
- object_address += kPointerSize;
3730
+ bool Heap::IteratePointersInDirtyRegion(Address start,
3731
+ Address end,
3732
+ ObjectSlotCallback copy_object_func) {
3733
+ Address slot_address = start;
3734
+ bool pointers_to_new_space_found = false;
3735
+
3736
+ while (slot_address < end) {
3737
+ Object** slot = reinterpret_cast<Object**>(slot_address);
3738
+ if (Heap::InNewSpace(*slot)) {
3739
+ ASSERT((*slot)->IsHeapObject());
3740
+ copy_object_func(reinterpret_cast<HeapObject**>(slot));
3741
+ if (Heap::InNewSpace(*slot)) {
3742
+ ASSERT((*slot)->IsHeapObject());
3743
+ pointers_to_new_space_found = true;
3440
3744
  }
3441
- // Update the remembered set if it has changed.
3442
- if (result_rset != rset_word) {
3443
- Memory::uint32_at(rset_address) = result_rset;
3745
+ }
3746
+ slot_address += kPointerSize;
3747
+ }
3748
+ return pointers_to_new_space_found;
3749
+ }
3750
+
3751
+
3752
+ // Compute start address of the first map following given addr.
3753
+ static inline Address MapStartAlign(Address addr) {
3754
+ Address page = Page::FromAddress(addr)->ObjectAreaStart();
3755
+ return page + (((addr - page) + (Map::kSize - 1)) / Map::kSize * Map::kSize);
3756
+ }
3757
+
3758
+
3759
+ // Compute end address of the first map preceding given addr.
3760
+ static inline Address MapEndAlign(Address addr) {
3761
+ Address page = Page::FromAllocationTop(addr)->ObjectAreaStart();
3762
+ return page + ((addr - page) / Map::kSize * Map::kSize);
3763
+ }
3764
+
3765
+
3766
+ static bool IteratePointersInDirtyMaps(Address start,
3767
+ Address end,
3768
+ ObjectSlotCallback copy_object_func) {
3769
+ ASSERT(MapStartAlign(start) == start);
3770
+ ASSERT(MapEndAlign(end) == end);
3771
+
3772
+ Address map_address = start;
3773
+ bool pointers_to_new_space_found = false;
3774
+
3775
+ while (map_address < end) {
3776
+ ASSERT(!Heap::InNewSpace(Memory::Object_at(map_address)));
3777
+ ASSERT(Memory::Object_at(map_address)->IsMap());
3778
+
3779
+ Address pointer_fields_start = map_address + Map::kPointerFieldsBeginOffset;
3780
+ Address pointer_fields_end = map_address + Map::kPointerFieldsEndOffset;
3781
+
3782
+ if (Heap::IteratePointersInDirtyRegion(pointer_fields_start,
3783
+ pointer_fields_end,
3784
+ copy_object_func)) {
3785
+ pointers_to_new_space_found = true;
3786
+ }
3787
+
3788
+ map_address += Map::kSize;
3789
+ }
3790
+
3791
+ return pointers_to_new_space_found;
3792
+ }
3793
+
3794
+
3795
+ bool Heap::IteratePointersInDirtyMapsRegion(
3796
+ Address start,
3797
+ Address end,
3798
+ ObjectSlotCallback copy_object_func) {
3799
+ Address map_aligned_start = MapStartAlign(start);
3800
+ Address map_aligned_end = MapEndAlign(end);
3801
+
3802
+ bool contains_pointers_to_new_space = false;
3803
+
3804
+ if (map_aligned_start != start) {
3805
+ Address prev_map = map_aligned_start - Map::kSize;
3806
+ ASSERT(Memory::Object_at(prev_map)->IsMap());
3807
+
3808
+ Address pointer_fields_start =
3809
+ Max(start, prev_map + Map::kPointerFieldsBeginOffset);
3810
+
3811
+ Address pointer_fields_end =
3812
+ Min(prev_map + Map::kPointerFieldsEndOffset, end);
3813
+
3814
+ contains_pointers_to_new_space =
3815
+ IteratePointersInDirtyRegion(pointer_fields_start,
3816
+ pointer_fields_end,
3817
+ copy_object_func)
3818
+ || contains_pointers_to_new_space;
3819
+ }
3820
+
3821
+ contains_pointers_to_new_space =
3822
+ IteratePointersInDirtyMaps(map_aligned_start,
3823
+ map_aligned_end,
3824
+ copy_object_func)
3825
+ || contains_pointers_to_new_space;
3826
+
3827
+ if (map_aligned_end != end) {
3828
+ ASSERT(Memory::Object_at(map_aligned_end)->IsMap());
3829
+
3830
+ Address pointer_fields_start =
3831
+ map_aligned_end + Map::kPointerFieldsBeginOffset;
3832
+
3833
+ Address pointer_fields_end =
3834
+ Min(end, map_aligned_end + Map::kPointerFieldsEndOffset);
3835
+
3836
+ contains_pointers_to_new_space =
3837
+ IteratePointersInDirtyRegion(pointer_fields_start,
3838
+ pointer_fields_end,
3839
+ copy_object_func)
3840
+ || contains_pointers_to_new_space;
3841
+ }
3842
+
3843
+ return contains_pointers_to_new_space;
3844
+ }
3845
+
3846
+
3847
+ void Heap::IterateAndMarkPointersToFromSpace(Address start,
3848
+ Address end,
3849
+ ObjectSlotCallback callback) {
3850
+ Address slot_address = start;
3851
+ Page* page = Page::FromAddress(start);
3852
+
3853
+ uint32_t marks = page->GetRegionMarks();
3854
+
3855
+ while (slot_address < end) {
3856
+ Object** slot = reinterpret_cast<Object**>(slot_address);
3857
+ if (Heap::InFromSpace(*slot)) {
3858
+ ASSERT((*slot)->IsHeapObject());
3859
+ callback(reinterpret_cast<HeapObject**>(slot));
3860
+ if (Heap::InNewSpace(*slot)) {
3861
+ ASSERT((*slot)->IsHeapObject());
3862
+ marks |= page->GetRegionMaskForAddress(slot_address);
3863
+ }
3864
+ }
3865
+ slot_address += kPointerSize;
3866
+ }
3867
+
3868
+ page->SetRegionMarks(marks);
3869
+ }
3870
+
3871
+
3872
+ uint32_t Heap::IterateDirtyRegions(
3873
+ uint32_t marks,
3874
+ Address area_start,
3875
+ Address area_end,
3876
+ DirtyRegionCallback visit_dirty_region,
3877
+ ObjectSlotCallback copy_object_func) {
3878
+ uint32_t newmarks = 0;
3879
+ uint32_t mask = 1;
3880
+
3881
+ if (area_start >= area_end) {
3882
+ return newmarks;
3883
+ }
3884
+
3885
+ Address region_start = area_start;
3886
+
3887
+ // area_start does not necessarily coincide with start of the first region.
3888
+ // Thus to calculate the beginning of the next region we have to align
3889
+ // area_start by Page::kRegionSize.
3890
+ Address second_region =
3891
+ reinterpret_cast<Address>(
3892
+ reinterpret_cast<intptr_t>(area_start + Page::kRegionSize) &
3893
+ ~Page::kRegionAlignmentMask);
3894
+
3895
+ // Next region might be beyond area_end.
3896
+ Address region_end = Min(second_region, area_end);
3897
+
3898
+ if (marks & mask) {
3899
+ if (visit_dirty_region(region_start, region_end, copy_object_func)) {
3900
+ newmarks |= mask;
3901
+ }
3902
+ }
3903
+ mask <<= 1;
3904
+
3905
+ // Iterate subsequent regions which fully lay inside [area_start, area_end[.
3906
+ region_start = region_end;
3907
+ region_end = region_start + Page::kRegionSize;
3908
+
3909
+ while (region_end <= area_end) {
3910
+ if (marks & mask) {
3911
+ if (visit_dirty_region(region_start, region_end, copy_object_func)) {
3912
+ newmarks |= mask;
3913
+ }
3914
+ }
3915
+
3916
+ region_start = region_end;
3917
+ region_end = region_start + Page::kRegionSize;
3918
+
3919
+ mask <<= 1;
3920
+ }
3921
+
3922
+ if (region_start != area_end) {
3923
+ // A small piece of area left uniterated because area_end does not coincide
3924
+ // with region end. Check whether region covering last part of area is
3925
+ // dirty.
3926
+ if (marks & mask) {
3927
+ if (visit_dirty_region(region_start, area_end, copy_object_func)) {
3928
+ newmarks |= mask;
3444
3929
  }
3445
- } else {
3446
- // No bits in the word were set. This is the common case.
3447
- object_address += kPointerSize * kBitsPerInt;
3448
3930
  }
3449
- rset_address += kIntSize;
3450
3931
  }
3451
- return set_bits_count;
3932
+
3933
+ return newmarks;
3452
3934
  }
3453
3935
 
3454
3936
 
3455
- void Heap::IterateRSet(PagedSpace* space, ObjectSlotCallback copy_object_func) {
3456
- ASSERT(Page::is_rset_in_use());
3457
- ASSERT(space == old_pointer_space_ || space == map_space_);
3458
3937
 
3459
- static void* paged_rset_histogram = StatsTable::CreateHistogram(
3460
- "V8.RSetPaged",
3461
- 0,
3462
- Page::kObjectAreaSize / kPointerSize,
3463
- 30);
3938
+ void Heap::IterateDirtyRegions(
3939
+ PagedSpace* space,
3940
+ DirtyRegionCallback visit_dirty_region,
3941
+ ObjectSlotCallback copy_object_func,
3942
+ ExpectedPageWatermarkState expected_page_watermark_state) {
3464
3943
 
3465
3944
  PageIterator it(space, PageIterator::PAGES_IN_USE);
3945
+
3466
3946
  while (it.has_next()) {
3467
3947
  Page* page = it.next();
3468
- int count = IterateRSetRange(page->ObjectAreaStart(), page->AllocationTop(),
3469
- page->RSetStart(), copy_object_func);
3470
- if (paged_rset_histogram != NULL) {
3471
- StatsTable::AddHistogramSample(paged_rset_histogram, count);
3948
+ uint32_t marks = page->GetRegionMarks();
3949
+
3950
+ if (marks != Page::kAllRegionsCleanMarks) {
3951
+ Address start = page->ObjectAreaStart();
3952
+
3953
+ // Do not try to visit pointers beyond page allocation watermark.
3954
+ // Page can contain garbage pointers there.
3955
+ Address end;
3956
+
3957
+ if ((expected_page_watermark_state == WATERMARK_SHOULD_BE_VALID) ||
3958
+ page->IsWatermarkValid()) {
3959
+ end = page->AllocationWatermark();
3960
+ } else {
3961
+ end = page->CachedAllocationWatermark();
3962
+ }
3963
+
3964
+ ASSERT(space == old_pointer_space_ ||
3965
+ (space == map_space_ &&
3966
+ ((page->ObjectAreaStart() - end) % Map::kSize == 0)));
3967
+
3968
+ page->SetRegionMarks(IterateDirtyRegions(marks,
3969
+ start,
3970
+ end,
3971
+ visit_dirty_region,
3972
+ copy_object_func));
3472
3973
  }
3974
+
3975
+ // Mark page watermark as invalid to maintain watermark validity invariant.
3976
+ // See Page::FlipMeaningOfInvalidatedWatermarkFlag() for details.
3977
+ page->InvalidateWatermark(true);
3473
3978
  }
3474
3979
  }
3475
3980
 
@@ -3754,6 +4259,17 @@ void Heap::SetStackLimits() {
3754
4259
 
3755
4260
 
3756
4261
  void Heap::TearDown() {
4262
+ if (FLAG_print_cumulative_gc_stat) {
4263
+ PrintF("\n\n");
4264
+ PrintF("gc_count=%d ", gc_count_);
4265
+ PrintF("mark_sweep_count=%d ", ms_count_);
4266
+ PrintF("mark_compact_count=%d ", mc_count_);
4267
+ PrintF("max_gc_pause=%d ", GCTracer::get_max_gc_pause());
4268
+ PrintF("min_in_mutator=%d ", GCTracer::get_min_in_mutator());
4269
+ PrintF("max_alive_after_gc=%d ", GCTracer::get_max_alive_after_gc());
4270
+ PrintF("\n\n");
4271
+ }
4272
+
3757
4273
  GlobalHandles::TearDown();
3758
4274
 
3759
4275
  ExternalStringTable::TearDown();
@@ -4229,33 +4745,115 @@ void Heap::TracePathToGlobal() {
4229
4745
  #endif
4230
4746
 
4231
4747
 
4748
+ static int CountTotalHolesSize() {
4749
+ int holes_size = 0;
4750
+ OldSpaces spaces;
4751
+ for (OldSpace* space = spaces.next();
4752
+ space != NULL;
4753
+ space = spaces.next()) {
4754
+ holes_size += space->Waste() + space->AvailableFree();
4755
+ }
4756
+ return holes_size;
4757
+ }
4758
+
4759
+
4232
4760
  GCTracer::GCTracer()
4233
4761
  : start_time_(0.0),
4234
- start_size_(0.0),
4235
- external_time_(0.0),
4762
+ start_size_(0),
4236
4763
  gc_count_(0),
4237
4764
  full_gc_count_(0),
4238
4765
  is_compacting_(false),
4239
- marked_count_(0) {
4766
+ marked_count_(0),
4767
+ allocated_since_last_gc_(0),
4768
+ spent_in_mutator_(0),
4769
+ promoted_objects_size_(0) {
4240
4770
  // These two fields reflect the state of the previous full collection.
4241
4771
  // Set them before they are changed by the collector.
4242
4772
  previous_has_compacted_ = MarkCompactCollector::HasCompacted();
4243
4773
  previous_marked_count_ = MarkCompactCollector::previous_marked_count();
4244
- if (!FLAG_trace_gc) return;
4774
+ if (!FLAG_trace_gc && !FLAG_print_cumulative_gc_stat) return;
4245
4775
  start_time_ = OS::TimeCurrentMillis();
4246
- start_size_ = SizeOfHeapObjects();
4776
+ start_size_ = Heap::SizeOfObjects();
4777
+
4778
+ for (int i = 0; i < Scope::kNumberOfScopes; i++) {
4779
+ scopes_[i] = 0;
4780
+ }
4781
+
4782
+ in_free_list_or_wasted_before_gc_ = CountTotalHolesSize();
4783
+
4784
+ allocated_since_last_gc_ = Heap::SizeOfObjects() - alive_after_last_gc_;
4785
+
4786
+ if (last_gc_end_timestamp_ > 0) {
4787
+ spent_in_mutator_ = Max(start_time_ - last_gc_end_timestamp_, 0.0);
4788
+ }
4247
4789
  }
4248
4790
 
4249
4791
 
4250
4792
  GCTracer::~GCTracer() {
4251
- if (!FLAG_trace_gc) return;
4252
4793
  // Printf ONE line iff flag is set.
4253
- int time = static_cast<int>(OS::TimeCurrentMillis() - start_time_);
4254
- int external_time = static_cast<int>(external_time_);
4255
- PrintF("%s %.1f -> %.1f MB, ",
4256
- CollectorString(), start_size_, SizeOfHeapObjects());
4257
- if (external_time > 0) PrintF("%d / ", external_time);
4258
- PrintF("%d ms.\n", time);
4794
+ if (!FLAG_trace_gc && !FLAG_print_cumulative_gc_stat) return;
4795
+
4796
+ bool first_gc = (last_gc_end_timestamp_ == 0);
4797
+
4798
+ alive_after_last_gc_ = Heap::SizeOfObjects();
4799
+ last_gc_end_timestamp_ = OS::TimeCurrentMillis();
4800
+
4801
+ int time = static_cast<int>(last_gc_end_timestamp_ - start_time_);
4802
+
4803
+ // Update cumulative GC statistics if required.
4804
+ if (FLAG_print_cumulative_gc_stat) {
4805
+ max_gc_pause_ = Max(max_gc_pause_, time);
4806
+ max_alive_after_gc_ = Max(max_alive_after_gc_, alive_after_last_gc_);
4807
+ if (!first_gc) {
4808
+ min_in_mutator_ = Min(min_in_mutator_,
4809
+ static_cast<int>(spent_in_mutator_));
4810
+ }
4811
+ }
4812
+
4813
+ if (!FLAG_trace_gc_nvp) {
4814
+ int external_time = static_cast<int>(scopes_[Scope::EXTERNAL]);
4815
+
4816
+ PrintF("%s %.1f -> %.1f MB, ",
4817
+ CollectorString(),
4818
+ static_cast<double>(start_size_) / MB,
4819
+ SizeOfHeapObjects());
4820
+
4821
+ if (external_time > 0) PrintF("%d / ", external_time);
4822
+ PrintF("%d ms.\n", time);
4823
+ } else {
4824
+ PrintF("pause=%d ", time);
4825
+ PrintF("mutator=%d ",
4826
+ static_cast<int>(spent_in_mutator_));
4827
+
4828
+ PrintF("gc=");
4829
+ switch (collector_) {
4830
+ case SCAVENGER:
4831
+ PrintF("s");
4832
+ break;
4833
+ case MARK_COMPACTOR:
4834
+ PrintF(MarkCompactCollector::HasCompacted() ? "mc" : "ms");
4835
+ break;
4836
+ default:
4837
+ UNREACHABLE();
4838
+ }
4839
+ PrintF(" ");
4840
+
4841
+ PrintF("external=%d ", static_cast<int>(scopes_[Scope::EXTERNAL]));
4842
+ PrintF("mark=%d ", static_cast<int>(scopes_[Scope::MC_MARK]));
4843
+ PrintF("sweep=%d ", static_cast<int>(scopes_[Scope::MC_SWEEP]));
4844
+ PrintF("compact=%d ", static_cast<int>(scopes_[Scope::MC_COMPACT]));
4845
+ PrintF("flushcode=%d ", static_cast<int>(scopes_[Scope::MC_FLUSH_CODE]));
4846
+
4847
+ PrintF("total_size_before=%d ", start_size_);
4848
+ PrintF("total_size_after=%d ", Heap::SizeOfObjects());
4849
+ PrintF("holes_size_before=%d ", in_free_list_or_wasted_before_gc_);
4850
+ PrintF("holes_size_after=%d ", CountTotalHolesSize());
4851
+
4852
+ PrintF("allocated=%d ", allocated_since_last_gc_);
4853
+ PrintF("promoted=%d ", promoted_objects_size_);
4854
+
4855
+ PrintF("\n");
4856
+ }
4259
4857
 
4260
4858
  #if defined(ENABLE_LOGGING_AND_PROFILING)
4261
4859
  Heap::PrintShortHeapStatistics();