libv8 3.10.8.0 → 3.11.8.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (215) hide show
  1. data/Rakefile +10 -3
  2. data/ext/libv8/compiler.rb +46 -0
  3. data/ext/libv8/extconf.rb +5 -1
  4. data/ext/libv8/make.rb +13 -0
  5. data/lib/libv8/version.rb +1 -1
  6. data/patches/add-freebsd9-and-freebsd10-to-gyp-GetFlavor.patch +11 -0
  7. data/patches/src_platform-freebsd.cc.patch +10 -0
  8. data/vendor/v8/ChangeLog +124 -0
  9. data/vendor/v8/DEPS +27 -0
  10. data/vendor/v8/Makefile +7 -0
  11. data/vendor/v8/SConstruct +15 -2
  12. data/vendor/v8/build/common.gypi +129 -157
  13. data/vendor/v8/build/gyp_v8 +11 -25
  14. data/vendor/v8/build/standalone.gypi +9 -3
  15. data/vendor/v8/include/v8.h +5 -3
  16. data/vendor/v8/src/SConscript +1 -0
  17. data/vendor/v8/src/api.cc +4 -33
  18. data/vendor/v8/src/api.h +2 -2
  19. data/vendor/v8/src/arm/builtins-arm.cc +5 -4
  20. data/vendor/v8/src/arm/code-stubs-arm.cc +21 -14
  21. data/vendor/v8/src/arm/codegen-arm.cc +2 -2
  22. data/vendor/v8/src/arm/debug-arm.cc +3 -1
  23. data/vendor/v8/src/arm/full-codegen-arm.cc +3 -102
  24. data/vendor/v8/src/arm/ic-arm.cc +30 -33
  25. data/vendor/v8/src/arm/lithium-arm.cc +20 -7
  26. data/vendor/v8/src/arm/lithium-arm.h +10 -4
  27. data/vendor/v8/src/arm/lithium-codegen-arm.cc +106 -60
  28. data/vendor/v8/src/arm/macro-assembler-arm.cc +49 -39
  29. data/vendor/v8/src/arm/macro-assembler-arm.h +5 -4
  30. data/vendor/v8/src/arm/regexp-macro-assembler-arm.cc +115 -55
  31. data/vendor/v8/src/arm/regexp-macro-assembler-arm.h +7 -6
  32. data/vendor/v8/src/arm/simulator-arm.h +6 -6
  33. data/vendor/v8/src/arm/stub-cache-arm.cc +64 -19
  34. data/vendor/v8/src/array.js +7 -3
  35. data/vendor/v8/src/ast.cc +11 -6
  36. data/vendor/v8/src/bootstrapper.cc +9 -11
  37. data/vendor/v8/src/builtins.cc +61 -31
  38. data/vendor/v8/src/code-stubs.cc +23 -9
  39. data/vendor/v8/src/code-stubs.h +1 -0
  40. data/vendor/v8/src/codegen.h +3 -3
  41. data/vendor/v8/src/compiler.cc +1 -1
  42. data/vendor/v8/src/contexts.h +2 -18
  43. data/vendor/v8/src/d8.cc +94 -93
  44. data/vendor/v8/src/d8.h +1 -1
  45. data/vendor/v8/src/debug-agent.cc +3 -3
  46. data/vendor/v8/src/debug.cc +41 -1
  47. data/vendor/v8/src/debug.h +50 -0
  48. data/vendor/v8/src/elements-kind.cc +134 -0
  49. data/vendor/v8/src/elements-kind.h +210 -0
  50. data/vendor/v8/src/elements.cc +356 -190
  51. data/vendor/v8/src/elements.h +36 -28
  52. data/vendor/v8/src/factory.cc +44 -4
  53. data/vendor/v8/src/factory.h +11 -7
  54. data/vendor/v8/src/flag-definitions.h +3 -0
  55. data/vendor/v8/src/frames.h +3 -0
  56. data/vendor/v8/src/full-codegen.cc +2 -1
  57. data/vendor/v8/src/func-name-inferrer.h +2 -0
  58. data/vendor/v8/src/globals.h +3 -0
  59. data/vendor/v8/src/heap-inl.h +16 -4
  60. data/vendor/v8/src/heap.cc +38 -32
  61. data/vendor/v8/src/heap.h +3 -17
  62. data/vendor/v8/src/hydrogen-instructions.cc +28 -5
  63. data/vendor/v8/src/hydrogen-instructions.h +142 -44
  64. data/vendor/v8/src/hydrogen.cc +160 -55
  65. data/vendor/v8/src/hydrogen.h +2 -0
  66. data/vendor/v8/src/ia32/assembler-ia32.h +3 -0
  67. data/vendor/v8/src/ia32/builtins-ia32.cc +5 -4
  68. data/vendor/v8/src/ia32/code-stubs-ia32.cc +22 -16
  69. data/vendor/v8/src/ia32/codegen-ia32.cc +2 -2
  70. data/vendor/v8/src/ia32/debug-ia32.cc +29 -2
  71. data/vendor/v8/src/ia32/full-codegen-ia32.cc +8 -101
  72. data/vendor/v8/src/ia32/ic-ia32.cc +23 -19
  73. data/vendor/v8/src/ia32/lithium-codegen-ia32.cc +126 -80
  74. data/vendor/v8/src/ia32/lithium-codegen-ia32.h +2 -1
  75. data/vendor/v8/src/ia32/lithium-ia32.cc +15 -9
  76. data/vendor/v8/src/ia32/lithium-ia32.h +14 -6
  77. data/vendor/v8/src/ia32/macro-assembler-ia32.cc +50 -40
  78. data/vendor/v8/src/ia32/macro-assembler-ia32.h +5 -4
  79. data/vendor/v8/src/ia32/regexp-macro-assembler-ia32.cc +113 -43
  80. data/vendor/v8/src/ia32/regexp-macro-assembler-ia32.h +9 -4
  81. data/vendor/v8/src/ia32/simulator-ia32.h +4 -4
  82. data/vendor/v8/src/ia32/stub-cache-ia32.cc +52 -14
  83. data/vendor/v8/src/ic.cc +77 -20
  84. data/vendor/v8/src/ic.h +18 -2
  85. data/vendor/v8/src/incremental-marking-inl.h +21 -5
  86. data/vendor/v8/src/incremental-marking.cc +35 -8
  87. data/vendor/v8/src/incremental-marking.h +12 -3
  88. data/vendor/v8/src/isolate.cc +12 -2
  89. data/vendor/v8/src/isolate.h +1 -1
  90. data/vendor/v8/src/jsregexp.cc +66 -26
  91. data/vendor/v8/src/jsregexp.h +60 -31
  92. data/vendor/v8/src/list-inl.h +8 -0
  93. data/vendor/v8/src/list.h +3 -0
  94. data/vendor/v8/src/lithium.cc +5 -2
  95. data/vendor/v8/src/liveedit.cc +57 -5
  96. data/vendor/v8/src/mark-compact-inl.h +17 -11
  97. data/vendor/v8/src/mark-compact.cc +100 -143
  98. data/vendor/v8/src/mark-compact.h +44 -20
  99. data/vendor/v8/src/messages.js +131 -99
  100. data/vendor/v8/src/mips/builtins-mips.cc +5 -4
  101. data/vendor/v8/src/mips/code-stubs-mips.cc +23 -15
  102. data/vendor/v8/src/mips/codegen-mips.cc +2 -2
  103. data/vendor/v8/src/mips/debug-mips.cc +3 -1
  104. data/vendor/v8/src/mips/full-codegen-mips.cc +4 -102
  105. data/vendor/v8/src/mips/ic-mips.cc +34 -36
  106. data/vendor/v8/src/mips/lithium-codegen-mips.cc +116 -68
  107. data/vendor/v8/src/mips/lithium-mips.cc +20 -7
  108. data/vendor/v8/src/mips/lithium-mips.h +11 -4
  109. data/vendor/v8/src/mips/macro-assembler-mips.cc +50 -39
  110. data/vendor/v8/src/mips/macro-assembler-mips.h +5 -4
  111. data/vendor/v8/src/mips/regexp-macro-assembler-mips.cc +110 -50
  112. data/vendor/v8/src/mips/regexp-macro-assembler-mips.h +6 -5
  113. data/vendor/v8/src/mips/simulator-mips.h +5 -5
  114. data/vendor/v8/src/mips/stub-cache-mips.cc +66 -20
  115. data/vendor/v8/src/mksnapshot.cc +5 -1
  116. data/vendor/v8/src/objects-debug.cc +103 -6
  117. data/vendor/v8/src/objects-inl.h +215 -116
  118. data/vendor/v8/src/objects-printer.cc +13 -8
  119. data/vendor/v8/src/objects.cc +608 -331
  120. data/vendor/v8/src/objects.h +129 -94
  121. data/vendor/v8/src/parser.cc +16 -4
  122. data/vendor/v8/src/platform-freebsd.cc +1 -0
  123. data/vendor/v8/src/platform-linux.cc +9 -30
  124. data/vendor/v8/src/platform-posix.cc +28 -7
  125. data/vendor/v8/src/platform-win32.cc +15 -3
  126. data/vendor/v8/src/platform.h +2 -1
  127. data/vendor/v8/src/profile-generator-inl.h +25 -2
  128. data/vendor/v8/src/profile-generator.cc +300 -822
  129. data/vendor/v8/src/profile-generator.h +97 -214
  130. data/vendor/v8/src/regexp-macro-assembler-irregexp.cc +2 -1
  131. data/vendor/v8/src/regexp-macro-assembler-irregexp.h +2 -2
  132. data/vendor/v8/src/regexp-macro-assembler-tracer.cc +6 -5
  133. data/vendor/v8/src/regexp-macro-assembler-tracer.h +1 -1
  134. data/vendor/v8/src/regexp-macro-assembler.cc +7 -3
  135. data/vendor/v8/src/regexp-macro-assembler.h +10 -2
  136. data/vendor/v8/src/regexp.js +6 -0
  137. data/vendor/v8/src/runtime.cc +265 -212
  138. data/vendor/v8/src/runtime.h +6 -5
  139. data/vendor/v8/src/scopes.cc +20 -0
  140. data/vendor/v8/src/scopes.h +6 -3
  141. data/vendor/v8/src/spaces.cc +0 -2
  142. data/vendor/v8/src/string-stream.cc +2 -2
  143. data/vendor/v8/src/v8-counters.h +0 -2
  144. data/vendor/v8/src/v8natives.js +2 -2
  145. data/vendor/v8/src/v8utils.h +6 -3
  146. data/vendor/v8/src/version.cc +1 -1
  147. data/vendor/v8/src/x64/assembler-x64.h +2 -1
  148. data/vendor/v8/src/x64/builtins-x64.cc +5 -4
  149. data/vendor/v8/src/x64/code-stubs-x64.cc +25 -16
  150. data/vendor/v8/src/x64/codegen-x64.cc +2 -2
  151. data/vendor/v8/src/x64/debug-x64.cc +14 -1
  152. data/vendor/v8/src/x64/disasm-x64.cc +1 -1
  153. data/vendor/v8/src/x64/full-codegen-x64.cc +10 -106
  154. data/vendor/v8/src/x64/ic-x64.cc +20 -16
  155. data/vendor/v8/src/x64/lithium-codegen-x64.cc +156 -79
  156. data/vendor/v8/src/x64/lithium-codegen-x64.h +2 -1
  157. data/vendor/v8/src/x64/lithium-x64.cc +18 -8
  158. data/vendor/v8/src/x64/lithium-x64.h +7 -2
  159. data/vendor/v8/src/x64/macro-assembler-x64.cc +50 -40
  160. data/vendor/v8/src/x64/macro-assembler-x64.h +5 -4
  161. data/vendor/v8/src/x64/regexp-macro-assembler-x64.cc +122 -51
  162. data/vendor/v8/src/x64/regexp-macro-assembler-x64.h +17 -8
  163. data/vendor/v8/src/x64/simulator-x64.h +4 -4
  164. data/vendor/v8/src/x64/stub-cache-x64.cc +55 -17
  165. data/vendor/v8/test/cctest/cctest.status +1 -0
  166. data/vendor/v8/test/cctest/test-api.cc +24 -0
  167. data/vendor/v8/test/cctest/test-func-name-inference.cc +38 -0
  168. data/vendor/v8/test/cctest/test-heap-profiler.cc +21 -77
  169. data/vendor/v8/test/cctest/test-heap.cc +164 -3
  170. data/vendor/v8/test/cctest/test-list.cc +12 -0
  171. data/vendor/v8/test/cctest/test-mark-compact.cc +5 -5
  172. data/vendor/v8/test/cctest/test-regexp.cc +14 -8
  173. data/vendor/v8/test/cctest/testcfg.py +2 -0
  174. data/vendor/v8/test/mjsunit/accessor-map-sharing.js +176 -0
  175. data/vendor/v8/test/mjsunit/array-construct-transition.js +3 -3
  176. data/vendor/v8/test/mjsunit/array-literal-transitions.js +10 -10
  177. data/vendor/v8/test/mjsunit/big-array-literal.js +3 -0
  178. data/vendor/v8/test/mjsunit/compiler/inline-construct.js +4 -2
  179. data/vendor/v8/test/mjsunit/debug-liveedit-stack-padding.js +88 -0
  180. data/vendor/v8/test/mjsunit/elements-kind.js +4 -4
  181. data/vendor/v8/test/mjsunit/elements-transition-hoisting.js +2 -2
  182. data/vendor/v8/test/mjsunit/elements-transition.js +5 -5
  183. data/vendor/v8/test/mjsunit/error-constructors.js +68 -33
  184. data/vendor/v8/test/mjsunit/harmony/proxies.js +14 -6
  185. data/vendor/v8/test/mjsunit/mjsunit.status +1 -0
  186. data/vendor/v8/test/mjsunit/packed-elements.js +112 -0
  187. data/vendor/v8/test/mjsunit/regexp-capture-3.js +6 -0
  188. data/vendor/v8/test/mjsunit/regexp-global.js +132 -0
  189. data/vendor/v8/test/mjsunit/regexp.js +11 -0
  190. data/vendor/v8/test/mjsunit/regress/regress-117409.js +52 -0
  191. data/vendor/v8/test/mjsunit/regress/regress-126412.js +33 -0
  192. data/vendor/v8/test/mjsunit/regress/regress-128018.js +35 -0
  193. data/vendor/v8/test/mjsunit/regress/regress-128146.js +33 -0
  194. data/vendor/v8/test/mjsunit/regress/regress-1639-2.js +4 -1
  195. data/vendor/v8/test/mjsunit/regress/regress-1639.js +14 -8
  196. data/vendor/v8/test/mjsunit/regress/regress-1849.js +3 -3
  197. data/vendor/v8/test/mjsunit/regress/regress-1878.js +2 -2
  198. data/vendor/v8/test/mjsunit/regress/regress-2071.js +79 -0
  199. data/vendor/v8/test/mjsunit/regress/regress-2153.js +32 -0
  200. data/vendor/v8/test/mjsunit/regress/regress-crbug-122271.js +4 -4
  201. data/vendor/v8/test/mjsunit/regress/regress-crbug-126414.js +32 -0
  202. data/vendor/v8/test/mjsunit/regress/regress-smi-only-concat.js +2 -2
  203. data/vendor/v8/test/mjsunit/regress/regress-transcendental.js +49 -0
  204. data/vendor/v8/test/mjsunit/stack-traces.js +14 -0
  205. data/vendor/v8/test/mjsunit/unbox-double-arrays.js +4 -3
  206. data/vendor/v8/test/test262/testcfg.py +6 -1
  207. data/vendor/v8/tools/check-static-initializers.sh +11 -3
  208. data/vendor/v8/tools/fuzz-harness.sh +92 -0
  209. data/vendor/v8/tools/grokdump.py +658 -67
  210. data/vendor/v8/tools/gyp/v8.gyp +21 -39
  211. data/vendor/v8/tools/js2c.py +3 -3
  212. data/vendor/v8/tools/jsmin.py +2 -2
  213. data/vendor/v8/tools/presubmit.py +2 -1
  214. data/vendor/v8/tools/test-wrapper-gypbuild.py +25 -11
  215. metadata +624 -612
@@ -136,6 +136,14 @@ bool List<T, P>::RemoveElement(const T& elm) {
136
136
  }
137
137
 
138
138
 
139
+ template<typename T, class P>
140
+ void List<T, P>::Allocate(int length) {
141
+ DeleteData(data_);
142
+ Initialize(length);
143
+ length_ = length;
144
+ }
145
+
146
+
139
147
  template<typename T, class P>
140
148
  void List<T, P>::Clear() {
141
149
  DeleteData(data_);
@@ -117,6 +117,9 @@ class List {
117
117
  // pointer type. Returns the removed element.
118
118
  INLINE(T RemoveLast()) { return Remove(length_ - 1); }
119
119
 
120
+ // Deletes current list contents and allocates space for 'length' elements.
121
+ INLINE(void Allocate(int length));
122
+
120
123
  // Clears the list by setting the length to zero. Even if T is a
121
124
  // pointer type, clearing the list doesn't delete the entries.
122
125
  INLINE(void Clear());
@@ -1,4 +1,4 @@
1
- // Copyright 2011 the V8 project authors. All rights reserved.
1
+ // Copyright 2012 the V8 project authors. All rights reserved.
2
2
  // Redistribution and use in source and binary forms, with or without
3
3
  // modification, are permitted provided that the following conditions are
4
4
  // met:
@@ -225,9 +225,12 @@ int ElementsKindToShiftSize(ElementsKind elements_kind) {
225
225
  return 2;
226
226
  case EXTERNAL_DOUBLE_ELEMENTS:
227
227
  case FAST_DOUBLE_ELEMENTS:
228
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
228
229
  return 3;
229
- case FAST_SMI_ONLY_ELEMENTS:
230
+ case FAST_SMI_ELEMENTS:
230
231
  case FAST_ELEMENTS:
232
+ case FAST_HOLEY_SMI_ELEMENTS:
233
+ case FAST_HOLEY_ELEMENTS:
231
234
  case DICTIONARY_ELEMENTS:
232
235
  case NON_STRICT_ARGUMENTS_ELEMENTS:
233
236
  return kPointerSizeLog2;
@@ -1,4 +1,4 @@
1
- // Copyright 2011 the V8 project authors. All rights reserved.
1
+ // Copyright 2012 the V8 project authors. All rights reserved.
2
2
  // Redistribution and use in source and binary forms, with or without
3
3
  // modification, are permitted provided that the following conditions are
4
4
  // met:
@@ -30,6 +30,7 @@
30
30
 
31
31
  #include "liveedit.h"
32
32
 
33
+ #include "code-stubs.h"
33
34
  #include "compilation-cache.h"
34
35
  #include "compiler.h"
35
36
  #include "debug.h"
@@ -1475,26 +1476,36 @@ static const char* DropFrames(Vector<StackFrame*> frames,
1475
1476
  // Check the nature of the top frame.
1476
1477
  Isolate* isolate = Isolate::Current();
1477
1478
  Code* pre_top_frame_code = pre_top_frame->LookupCode();
1479
+ bool frame_has_padding;
1478
1480
  if (pre_top_frame_code->is_inline_cache_stub() &&
1479
1481
  pre_top_frame_code->ic_state() == DEBUG_BREAK) {
1480
1482
  // OK, we can drop inline cache calls.
1481
1483
  *mode = Debug::FRAME_DROPPED_IN_IC_CALL;
1484
+ frame_has_padding = Debug::FramePaddingLayout::kIsSupported;
1482
1485
  } else if (pre_top_frame_code ==
1483
1486
  isolate->debug()->debug_break_slot()) {
1484
1487
  // OK, we can drop debug break slot.
1485
1488
  *mode = Debug::FRAME_DROPPED_IN_DEBUG_SLOT_CALL;
1489
+ frame_has_padding = Debug::FramePaddingLayout::kIsSupported;
1486
1490
  } else if (pre_top_frame_code ==
1487
1491
  isolate->builtins()->builtin(
1488
1492
  Builtins::kFrameDropper_LiveEdit)) {
1489
1493
  // OK, we can drop our own code.
1490
1494
  *mode = Debug::FRAME_DROPPED_IN_DIRECT_CALL;
1495
+ frame_has_padding = false;
1491
1496
  } else if (pre_top_frame_code ==
1492
1497
  isolate->builtins()->builtin(Builtins::kReturn_DebugBreak)) {
1493
1498
  *mode = Debug::FRAME_DROPPED_IN_RETURN_CALL;
1499
+ frame_has_padding = Debug::FramePaddingLayout::kIsSupported;
1494
1500
  } else if (pre_top_frame_code->kind() == Code::STUB &&
1495
- pre_top_frame_code->major_key()) {
1496
- // Entry from our unit tests, it's fine, we support this case.
1501
+ pre_top_frame_code->major_key() == CodeStub::CEntry) {
1502
+ // Entry from our unit tests on 'debugger' statement.
1503
+ // It's fine, we support this case.
1497
1504
  *mode = Debug::FRAME_DROPPED_IN_DIRECT_CALL;
1505
+ // We don't have a padding from 'debugger' statement call.
1506
+ // Here the stub is CEntry, it's not debug-only and can't be padded.
1507
+ // If anyone would complain, a proxy padded stub could be added.
1508
+ frame_has_padding = false;
1498
1509
  } else {
1499
1510
  return "Unknown structure of stack above changing function";
1500
1511
  }
@@ -1504,8 +1515,49 @@ static const char* DropFrames(Vector<StackFrame*> frames,
1504
1515
  - Debug::kFrameDropperFrameSize * kPointerSize // Size of the new frame.
1505
1516
  + kPointerSize; // Bigger address end is exclusive.
1506
1517
 
1518
+ Address* top_frame_pc_address = top_frame->pc_address();
1519
+
1520
+ // top_frame may be damaged below this point. Do not used it.
1521
+ ASSERT(!(top_frame = NULL));
1522
+
1507
1523
  if (unused_stack_top > unused_stack_bottom) {
1508
- return "Not enough space for frame dropper frame";
1524
+ if (frame_has_padding) {
1525
+ int shortage_bytes =
1526
+ static_cast<int>(unused_stack_top - unused_stack_bottom);
1527
+
1528
+ Address padding_start = pre_top_frame->fp() -
1529
+ Debug::FramePaddingLayout::kFrameBaseSize * kPointerSize;
1530
+
1531
+ Address padding_pointer = padding_start;
1532
+ Smi* padding_object =
1533
+ Smi::FromInt(Debug::FramePaddingLayout::kPaddingValue);
1534
+ while (Memory::Object_at(padding_pointer) == padding_object) {
1535
+ padding_pointer -= kPointerSize;
1536
+ }
1537
+ int padding_counter =
1538
+ Smi::cast(Memory::Object_at(padding_pointer))->value();
1539
+ if (padding_counter * kPointerSize < shortage_bytes) {
1540
+ return "Not enough space for frame dropper frame "
1541
+ "(even with padding frame)";
1542
+ }
1543
+ Memory::Object_at(padding_pointer) =
1544
+ Smi::FromInt(padding_counter - shortage_bytes / kPointerSize);
1545
+
1546
+ StackFrame* pre_pre_frame = frames[top_frame_index - 2];
1547
+
1548
+ memmove(padding_start + kPointerSize - shortage_bytes,
1549
+ padding_start + kPointerSize,
1550
+ Debug::FramePaddingLayout::kFrameBaseSize * kPointerSize);
1551
+
1552
+ pre_top_frame->UpdateFp(pre_top_frame->fp() - shortage_bytes);
1553
+ pre_pre_frame->SetCallerFp(pre_top_frame->fp());
1554
+ unused_stack_top -= shortage_bytes;
1555
+
1556
+ STATIC_ASSERT(sizeof(Address) == kPointerSize);
1557
+ top_frame_pc_address -= shortage_bytes / kPointerSize;
1558
+ } else {
1559
+ return "Not enough space for frame dropper frame";
1560
+ }
1509
1561
  }
1510
1562
 
1511
1563
  // Committing now. After this point we should return only NULL value.
@@ -1515,7 +1567,7 @@ static const char* DropFrames(Vector<StackFrame*> frames,
1515
1567
  ASSERT(!FixTryCatchHandler(pre_top_frame, bottom_js_frame));
1516
1568
 
1517
1569
  Handle<Code> code = Isolate::Current()->builtins()->FrameDropper_LiveEdit();
1518
- top_frame->set_pc(code->entry());
1570
+ *top_frame_pc_address = code->entry();
1519
1571
  pre_top_frame->SetCallerFp(bottom_js_frame->fp());
1520
1572
 
1521
1573
  *restarter_frame_function_pointer =
@@ -1,4 +1,4 @@
1
- // Copyright 2011 the V8 project authors. All rights reserved.
1
+ // Copyright 2012 the V8 project authors. All rights reserved.
2
2
  // Redistribution and use in source and binary forms, with or without
3
3
  // modification, are permitted provided that the following conditions are
4
4
  // met:
@@ -52,6 +52,15 @@ void MarkCompactCollector::SetFlags(int flags) {
52
52
  }
53
53
 
54
54
 
55
+ bool MarkCompactCollector::MarkObjectAndPush(HeapObject* obj) {
56
+ if (MarkObjectWithoutPush(obj)) {
57
+ marking_deque_.PushBlack(obj);
58
+ return true;
59
+ }
60
+ return false;
61
+ }
62
+
63
+
55
64
  void MarkCompactCollector::MarkObject(HeapObject* obj, MarkBit mark_bit) {
56
65
  ASSERT(Marking::MarkBitFrom(obj) == mark_bit);
57
66
  if (!mark_bit.Get()) {
@@ -62,16 +71,13 @@ void MarkCompactCollector::MarkObject(HeapObject* obj, MarkBit mark_bit) {
62
71
  }
63
72
 
64
73
 
65
- bool MarkCompactCollector::MarkObjectWithoutPush(HeapObject* object) {
66
- MarkBit mark = Marking::MarkBitFrom(object);
67
- bool old_mark = mark.Get();
68
- if (!old_mark) SetMark(object, mark);
69
- return old_mark;
70
- }
71
-
72
-
73
- void MarkCompactCollector::MarkObjectAndPush(HeapObject* object) {
74
- if (!MarkObjectWithoutPush(object)) marking_deque_.PushBlack(object);
74
+ bool MarkCompactCollector::MarkObjectWithoutPush(HeapObject* obj) {
75
+ MarkBit mark_bit = Marking::MarkBitFrom(obj);
76
+ if (!mark_bit.Get()) {
77
+ SetMark(obj, mark_bit);
78
+ return true;
79
+ }
80
+ return false;
75
81
  }
76
82
 
77
83
 
@@ -64,13 +64,13 @@ MarkCompactCollector::MarkCompactCollector() : // NOLINT
64
64
  abort_incremental_marking_(false),
65
65
  compacting_(false),
66
66
  was_marked_incrementally_(false),
67
- collect_maps_(FLAG_collect_maps),
68
67
  flush_monomorphic_ics_(false),
69
68
  tracer_(NULL),
70
69
  migration_slots_buffer_(NULL),
71
70
  heap_(NULL),
72
71
  code_flusher_(NULL),
73
- encountered_weak_maps_(NULL) { }
72
+ encountered_weak_maps_(NULL),
73
+ marker_(this, this) { }
74
74
 
75
75
 
76
76
  #ifdef DEBUG
@@ -282,7 +282,7 @@ void MarkCompactCollector::CollectGarbage() {
282
282
  MarkLiveObjects();
283
283
  ASSERT(heap_->incremental_marking()->IsStopped());
284
284
 
285
- if (collect_maps_) ClearNonLiveTransitions();
285
+ if (FLAG_collect_maps) ClearNonLiveTransitions();
286
286
 
287
287
  ClearWeakMaps();
288
288
 
@@ -294,7 +294,7 @@ void MarkCompactCollector::CollectGarbage() {
294
294
 
295
295
  SweepSpaces();
296
296
 
297
- if (!collect_maps_) ReattachInitialMaps();
297
+ if (!FLAG_collect_maps) ReattachInitialMaps();
298
298
 
299
299
  Finish();
300
300
 
@@ -658,11 +658,6 @@ void MarkCompactCollector::AbortCompaction() {
658
658
  void MarkCompactCollector::Prepare(GCTracer* tracer) {
659
659
  was_marked_incrementally_ = heap()->incremental_marking()->IsMarking();
660
660
 
661
- // Disable collection of maps if incremental marking is enabled.
662
- // Map collection algorithm relies on a special map transition tree traversal
663
- // order which is not implemented for incremental marking.
664
- collect_maps_ = FLAG_collect_maps && !was_marked_incrementally_;
665
-
666
661
  // Monomorphic ICs are preserved when possible, but need to be flushed
667
662
  // when they might be keeping a Context alive, or when the heap is about
668
663
  // to be serialized.
@@ -680,7 +675,6 @@ void MarkCompactCollector::Prepare(GCTracer* tracer) {
680
675
 
681
676
  ASSERT(!FLAG_never_compact || !FLAG_always_compact);
682
677
 
683
- if (collect_maps_) CreateBackPointers();
684
678
  #ifdef ENABLE_GDB_JIT_INTERFACE
685
679
  if (FLAG_gdbjit) {
686
680
  // If GDBJIT interface is active disable compaction.
@@ -1186,16 +1180,7 @@ class StaticMarkingVisitor : public StaticVisitorBase {
1186
1180
  Heap* heap = map->GetHeap();
1187
1181
  Code* code = reinterpret_cast<Code*>(object);
1188
1182
  if (FLAG_cleanup_code_caches_at_gc) {
1189
- Object* raw_info = code->type_feedback_info();
1190
- if (raw_info->IsTypeFeedbackInfo()) {
1191
- TypeFeedbackCells* type_feedback_cells =
1192
- TypeFeedbackInfo::cast(raw_info)->type_feedback_cells();
1193
- for (int i = 0; i < type_feedback_cells->CellCount(); i++) {
1194
- ASSERT(type_feedback_cells->AstId(i)->IsSmi());
1195
- JSGlobalPropertyCell* cell = type_feedback_cells->Cell(i);
1196
- cell->set_value(TypeFeedbackCells::RawUninitializedSentinel(heap));
1197
- }
1198
- }
1183
+ code->ClearTypeFeedbackCells(heap);
1199
1184
  }
1200
1185
  code->CodeIterateBody<StaticMarkingVisitor>(heap);
1201
1186
  }
@@ -1808,11 +1793,11 @@ void MarkCompactCollector::ProcessNewlyMarkedObject(HeapObject* object) {
1808
1793
  heap_->ClearCacheOnMap(map);
1809
1794
 
1810
1795
  // When map collection is enabled we have to mark through map's transitions
1811
- // in a special way to make transition links weak.
1812
- // Only maps for subclasses of JSReceiver can have transitions.
1796
+ // in a special way to make transition links weak. Only maps for subclasses
1797
+ // of JSReceiver can have transitions.
1813
1798
  STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
1814
- if (collect_maps_ && map->instance_type() >= FIRST_JS_RECEIVER_TYPE) {
1815
- MarkMapContents(map);
1799
+ if (FLAG_collect_maps && map->instance_type() >= FIRST_JS_RECEIVER_TYPE) {
1800
+ marker_.MarkMapContents(map);
1816
1801
  } else {
1817
1802
  marking_deque_.PushBlack(map);
1818
1803
  }
@@ -1822,79 +1807,85 @@ void MarkCompactCollector::ProcessNewlyMarkedObject(HeapObject* object) {
1822
1807
  }
1823
1808
 
1824
1809
 
1825
- void MarkCompactCollector::MarkMapContents(Map* map) {
1810
+ // Force instantiation of template instances.
1811
+ template void Marker<IncrementalMarking>::MarkMapContents(Map* map);
1812
+ template void Marker<MarkCompactCollector>::MarkMapContents(Map* map);
1813
+
1814
+
1815
+ template <class T>
1816
+ void Marker<T>::MarkMapContents(Map* map) {
1826
1817
  // Mark prototype transitions array but don't push it into marking stack.
1827
1818
  // This will make references from it weak. We will clean dead prototype
1828
1819
  // transitions in ClearNonLiveTransitions.
1829
- FixedArray* prototype_transitions = map->prototype_transitions();
1830
- MarkBit mark = Marking::MarkBitFrom(prototype_transitions);
1831
- if (!mark.Get()) {
1832
- mark.Set();
1833
- MemoryChunk::IncrementLiveBytesFromGC(prototype_transitions->address(),
1834
- prototype_transitions->Size());
1820
+ Object** proto_trans_slot =
1821
+ HeapObject::RawField(map, Map::kPrototypeTransitionsOrBackPointerOffset);
1822
+ HeapObject* prototype_transitions = HeapObject::cast(*proto_trans_slot);
1823
+ if (prototype_transitions->IsFixedArray()) {
1824
+ mark_compact_collector()->RecordSlot(proto_trans_slot,
1825
+ proto_trans_slot,
1826
+ prototype_transitions);
1827
+ MarkBit mark = Marking::MarkBitFrom(prototype_transitions);
1828
+ if (!mark.Get()) {
1829
+ mark.Set();
1830
+ MemoryChunk::IncrementLiveBytesFromGC(prototype_transitions->address(),
1831
+ prototype_transitions->Size());
1832
+ }
1835
1833
  }
1836
1834
 
1837
- Object** raw_descriptor_array_slot =
1835
+ // Make sure that the back pointer stored either in the map itself or inside
1836
+ // its prototype transitions array is marked. Treat pointers in the descriptor
1837
+ // array as weak and also mark that array to prevent visiting it later.
1838
+ base_marker()->MarkObjectAndPush(HeapObject::cast(map->GetBackPointer()));
1839
+
1840
+ Object** descriptor_array_slot =
1838
1841
  HeapObject::RawField(map, Map::kInstanceDescriptorsOrBitField3Offset);
1839
- Object* raw_descriptor_array = *raw_descriptor_array_slot;
1840
- if (!raw_descriptor_array->IsSmi()) {
1841
- MarkDescriptorArray(
1842
- reinterpret_cast<DescriptorArray*>(raw_descriptor_array));
1842
+ Object* descriptor_array = *descriptor_array_slot;
1843
+ if (!descriptor_array->IsSmi()) {
1844
+ MarkDescriptorArray(reinterpret_cast<DescriptorArray*>(descriptor_array));
1845
+ }
1846
+
1847
+ // Mark the Object* fields of the Map. Since the descriptor array has been
1848
+ // marked already, it is fine that one of these fields contains a pointer
1849
+ // to it. But make sure to skip back pointer and prototype transitions.
1850
+ STATIC_ASSERT(Map::kPointerFieldsEndOffset ==
1851
+ Map::kPrototypeTransitionsOrBackPointerOffset + kPointerSize);
1852
+ Object** start_slot = HeapObject::RawField(
1853
+ map, Map::kPointerFieldsBeginOffset);
1854
+ Object** end_slot = HeapObject::RawField(
1855
+ map, Map::kPrototypeTransitionsOrBackPointerOffset);
1856
+ for (Object** slot = start_slot; slot < end_slot; slot++) {
1857
+ Object* obj = *slot;
1858
+ if (!obj->NonFailureIsHeapObject()) continue;
1859
+ mark_compact_collector()->RecordSlot(start_slot, slot, obj);
1860
+ base_marker()->MarkObjectAndPush(reinterpret_cast<HeapObject*>(obj));
1843
1861
  }
1844
-
1845
- // Mark the Object* fields of the Map.
1846
- // Since the descriptor array has been marked already, it is fine
1847
- // that one of these fields contains a pointer to it.
1848
- Object** start_slot = HeapObject::RawField(map,
1849
- Map::kPointerFieldsBeginOffset);
1850
-
1851
- Object** end_slot = HeapObject::RawField(map, Map::kPointerFieldsEndOffset);
1852
-
1853
- StaticMarkingVisitor::VisitPointers(map->GetHeap(), start_slot, end_slot);
1854
1862
  }
1855
1863
 
1856
1864
 
1857
- void MarkCompactCollector::MarkAccessorPairSlot(HeapObject* accessors,
1858
- int offset) {
1859
- Object** slot = HeapObject::RawField(accessors, offset);
1860
- HeapObject* accessor = HeapObject::cast(*slot);
1861
- if (accessor->IsMap()) return;
1862
- RecordSlot(slot, slot, accessor);
1863
- MarkObjectAndPush(accessor);
1864
- }
1865
-
1866
-
1867
- void MarkCompactCollector::MarkDescriptorArray(
1868
- DescriptorArray* descriptors) {
1869
- MarkBit descriptors_mark = Marking::MarkBitFrom(descriptors);
1870
- if (descriptors_mark.Get()) return;
1865
+ template <class T>
1866
+ void Marker<T>::MarkDescriptorArray(DescriptorArray* descriptors) {
1871
1867
  // Empty descriptor array is marked as a root before any maps are marked.
1872
- ASSERT(descriptors != heap()->empty_descriptor_array());
1873
- SetMark(descriptors, descriptors_mark);
1868
+ ASSERT(descriptors != descriptors->GetHeap()->empty_descriptor_array());
1874
1869
 
1875
- FixedArray* contents = reinterpret_cast<FixedArray*>(
1870
+ // The DescriptorArray contains a pointer to its contents array, but the
1871
+ // contents array will be marked black and hence not be visited again.
1872
+ if (!base_marker()->MarkObjectAndPush(descriptors)) return;
1873
+ FixedArray* contents = FixedArray::cast(
1876
1874
  descriptors->get(DescriptorArray::kContentArrayIndex));
1877
- ASSERT(contents->IsHeapObject());
1878
- ASSERT(!IsMarked(contents));
1879
- ASSERT(contents->IsFixedArray());
1880
- ASSERT(contents->length() >= 2);
1881
- MarkBit contents_mark = Marking::MarkBitFrom(contents);
1882
- SetMark(contents, contents_mark);
1883
- // Contents contains (value, details) pairs. If the details say that the type
1884
- // of descriptor is MAP_TRANSITION, CONSTANT_TRANSITION,
1885
- // EXTERNAL_ARRAY_TRANSITION or NULL_DESCRIPTOR, we don't mark the value as
1886
- // live. Only for MAP_TRANSITION, EXTERNAL_ARRAY_TRANSITION and
1887
- // CONSTANT_TRANSITION is the value an Object* (a Map*).
1888
- for (int i = 0; i < contents->length(); i += 2) {
1889
- // If the pair (value, details) at index i, i+1 is not
1890
- // a transition or null descriptor, mark the value.
1891
- PropertyDetails details(Smi::cast(contents->get(i + 1)));
1892
-
1893
- Object** slot = contents->data_start() + i;
1875
+ ASSERT(Marking::IsWhite(Marking::MarkBitFrom(contents)));
1876
+ base_marker()->MarkObjectWithoutPush(contents);
1877
+
1878
+ // If the descriptor contains a transition (value is a Map), we don't mark the
1879
+ // value as live. It might be set to the NULL_DESCRIPTOR in
1880
+ // ClearNonLiveTransitions later.
1881
+ for (int i = 0; i < descriptors->number_of_descriptors(); ++i) {
1882
+ PropertyDetails details(descriptors->GetDetails(i));
1883
+ Object** slot = descriptors->GetValueSlot(i);
1884
+
1894
1885
  if (!(*slot)->IsHeapObject()) continue;
1895
1886
  HeapObject* value = HeapObject::cast(*slot);
1896
1887
 
1897
- RecordSlot(slot, slot, *slot);
1888
+ mark_compact_collector()->RecordSlot(slot, slot, *slot);
1898
1889
 
1899
1890
  switch (details.type()) {
1900
1891
  case NORMAL:
@@ -1902,21 +1893,22 @@ void MarkCompactCollector::MarkDescriptorArray(
1902
1893
  case CONSTANT_FUNCTION:
1903
1894
  case HANDLER:
1904
1895
  case INTERCEPTOR:
1905
- MarkObjectAndPush(value);
1896
+ base_marker()->MarkObjectAndPush(value);
1906
1897
  break;
1907
1898
  case CALLBACKS:
1908
1899
  if (!value->IsAccessorPair()) {
1909
- MarkObjectAndPush(value);
1910
- } else if (!MarkObjectWithoutPush(value)) {
1911
- MarkAccessorPairSlot(value, AccessorPair::kGetterOffset);
1912
- MarkAccessorPairSlot(value, AccessorPair::kSetterOffset);
1900
+ base_marker()->MarkObjectAndPush(value);
1901
+ } else if (base_marker()->MarkObjectWithoutPush(value)) {
1902
+ AccessorPair* accessors = AccessorPair::cast(value);
1903
+ MarkAccessorPairSlot(accessors, AccessorPair::kGetterOffset);
1904
+ MarkAccessorPairSlot(accessors, AccessorPair::kSetterOffset);
1913
1905
  }
1914
1906
  break;
1915
1907
  case ELEMENTS_TRANSITION:
1916
1908
  // For maps with multiple elements transitions, the transition maps are
1917
1909
  // stored in a FixedArray. Keep the fixed array alive but not the maps
1918
1910
  // that it refers to.
1919
- if (value->IsFixedArray()) MarkObjectWithoutPush(value);
1911
+ if (value->IsFixedArray()) base_marker()->MarkObjectWithoutPush(value);
1920
1912
  break;
1921
1913
  case MAP_TRANSITION:
1922
1914
  case CONSTANT_TRANSITION:
@@ -1924,26 +1916,16 @@ void MarkCompactCollector::MarkDescriptorArray(
1924
1916
  break;
1925
1917
  }
1926
1918
  }
1927
- // The DescriptorArray descriptors contains a pointer to its contents array,
1928
- // but the contents array is already marked.
1929
- marking_deque_.PushBlack(descriptors);
1930
1919
  }
1931
1920
 
1932
1921
 
1933
- void MarkCompactCollector::CreateBackPointers() {
1934
- HeapObjectIterator iterator(heap()->map_space());
1935
- for (HeapObject* next_object = iterator.Next();
1936
- next_object != NULL; next_object = iterator.Next()) {
1937
- if (next_object->IsMap()) { // Could also be FreeSpace object on free list.
1938
- Map* map = Map::cast(next_object);
1939
- STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
1940
- if (map->instance_type() >= FIRST_JS_RECEIVER_TYPE) {
1941
- map->CreateBackPointers();
1942
- } else {
1943
- ASSERT(map->instance_descriptors() == heap()->empty_descriptor_array());
1944
- }
1945
- }
1946
- }
1922
+ template <class T>
1923
+ void Marker<T>::MarkAccessorPairSlot(AccessorPair* accessors, int offset) {
1924
+ Object** slot = HeapObject::RawField(accessors, offset);
1925
+ HeapObject* accessor = HeapObject::cast(*slot);
1926
+ if (accessor->IsMap()) return;
1927
+ mark_compact_collector()->RecordSlot(slot, slot, accessor);
1928
+ base_marker()->MarkObjectAndPush(accessor);
1947
1929
  }
1948
1930
 
1949
1931
 
@@ -2470,15 +2452,8 @@ void MarkCompactCollector::ReattachInitialMaps() {
2470
2452
  void MarkCompactCollector::ClearNonLiveTransitions() {
2471
2453
  HeapObjectIterator map_iterator(heap()->map_space());
2472
2454
  // Iterate over the map space, setting map transitions that go from
2473
- // a marked map to an unmarked map to null transitions. At the same time,
2474
- // set all the prototype fields of maps back to their original value,
2475
- // dropping the back pointers temporarily stored in the prototype field.
2476
- // Setting the prototype field requires following the linked list of
2477
- // back pointers, reversing them all at once. This allows us to find
2478
- // those maps with map transitions that need to be nulled, and only
2479
- // scan the descriptor arrays of those maps, not all maps.
2480
- // All of these actions are carried out only on maps of JSObjects
2481
- // and related subtypes.
2455
+ // a marked map to an unmarked map to null transitions. This action
2456
+ // is carried out only on maps of JSObjects and related subtypes.
2482
2457
  for (HeapObject* obj = map_iterator.Next();
2483
2458
  obj != NULL; obj = map_iterator.Next()) {
2484
2459
  Map* map = reinterpret_cast<Map*>(obj);
@@ -2554,36 +2529,16 @@ void MarkCompactCollector::ClearNonLivePrototypeTransitions(Map* map) {
2554
2529
 
2555
2530
  void MarkCompactCollector::ClearNonLiveMapTransitions(Map* map,
2556
2531
  MarkBit map_mark) {
2557
- // Follow the chain of back pointers to find the prototype.
2558
- Object* real_prototype = map;
2559
- while (real_prototype->IsMap()) {
2560
- real_prototype = Map::cast(real_prototype)->prototype();
2561
- ASSERT(real_prototype->IsHeapObject());
2562
- }
2532
+ Object* potential_parent = map->GetBackPointer();
2533
+ if (!potential_parent->IsMap()) return;
2534
+ Map* parent = Map::cast(potential_parent);
2563
2535
 
2564
- // Follow back pointers, setting them to prototype, clearing map transitions
2565
- // when necessary.
2566
- Map* current = map;
2536
+ // Follow back pointer, check whether we are dealing with a map transition
2537
+ // from a live map to a dead path and in case clear transitions of parent.
2567
2538
  bool current_is_alive = map_mark.Get();
2568
- bool on_dead_path = !current_is_alive;
2569
- while (current->IsMap()) {
2570
- Object* next = current->prototype();
2571
- // There should never be a dead map above a live map.
2572
- ASSERT(on_dead_path || current_is_alive);
2573
-
2574
- // A live map above a dead map indicates a dead transition. This test will
2575
- // always be false on the first iteration.
2576
- if (on_dead_path && current_is_alive) {
2577
- on_dead_path = false;
2578
- current->ClearNonLiveTransitions(heap(), real_prototype);
2579
- }
2580
-
2581
- Object** slot = HeapObject::RawField(current, Map::kPrototypeOffset);
2582
- *slot = real_prototype;
2583
- if (current_is_alive) RecordSlot(slot, slot, real_prototype);
2584
-
2585
- current = reinterpret_cast<Map*>(next);
2586
- current_is_alive = Marking::MarkBitFrom(current).Get();
2539
+ bool parent_is_alive = Marking::MarkBitFrom(parent).Get();
2540
+ if (!current_is_alive && parent_is_alive) {
2541
+ parent->ClearNonLiveTransitions(heap());
2587
2542
  }
2588
2543
  }
2589
2544
 
@@ -2782,7 +2737,9 @@ static void UpdatePointer(HeapObject** p, HeapObject* object) {
2782
2737
  // We have to zap this pointer, because the store buffer may overflow later,
2783
2738
  // and then we have to scan the entire heap and we don't want to find
2784
2739
  // spurious newspace pointers in the old space.
2785
- *p = reinterpret_cast<HeapObject*>(Smi::FromInt(0));
2740
+ // TODO(mstarzinger): This was changed to a sentinel value to track down
2741
+ // rare crashes, change it back to Smi::FromInt(0) later.
2742
+ *p = reinterpret_cast<HeapObject*>(Smi::FromInt(0x0f100d00 >> 1)); // flood
2786
2743
  }
2787
2744
  }
2788
2745
 
@@ -3838,7 +3795,7 @@ void MarkCompactCollector::SweepSpace(PagedSpace* space, SweeperType sweeper) {
3838
3795
  bool lazy_sweeping_active = false;
3839
3796
  bool unused_page_present = false;
3840
3797
 
3841
- intptr_t old_space_size = heap()->PromotedSpaceSize();
3798
+ intptr_t old_space_size = heap()->PromotedSpaceSizeOfObjects();
3842
3799
  intptr_t space_left =
3843
3800
  Min(heap()->OldGenPromotionLimit(old_space_size),
3844
3801
  heap()->OldGenAllocationLimit(old_space_size)) - old_space_size;