libv8-node 18.13.0.1-aarch64-linux-musl → 19.9.0.0-aarch64-linux-musl

Sign up to get free protection for your applications and to get access to all the features.
Files changed (62) hide show
  1. checksums.yaml +4 -4
  2. data/lib/libv8/node/version.rb +3 -3
  3. data/vendor/v8/aarch64-linux-musl/libv8/obj/libv8_monolith.a +0 -0
  4. data/vendor/v8/include/cppgc/common.h +0 -1
  5. data/vendor/v8/include/cppgc/cross-thread-persistent.h +7 -8
  6. data/vendor/v8/include/cppgc/heap-consistency.h +46 -3
  7. data/vendor/v8/include/cppgc/heap-handle.h +43 -0
  8. data/vendor/v8/include/cppgc/heap-statistics.h +2 -2
  9. data/vendor/v8/include/cppgc/heap.h +3 -7
  10. data/vendor/v8/include/cppgc/internal/api-constants.h +11 -1
  11. data/vendor/v8/include/cppgc/internal/base-page-handle.h +45 -0
  12. data/vendor/v8/include/cppgc/internal/caged-heap-local-data.h +40 -8
  13. data/vendor/v8/include/cppgc/internal/caged-heap.h +61 -0
  14. data/vendor/v8/include/cppgc/internal/gc-info.h +0 -1
  15. data/vendor/v8/include/cppgc/internal/member-storage.h +236 -0
  16. data/vendor/v8/include/cppgc/internal/name-trait.h +21 -6
  17. data/vendor/v8/include/cppgc/internal/persistent-node.h +11 -13
  18. data/vendor/v8/include/cppgc/internal/pointer-policies.h +28 -7
  19. data/vendor/v8/include/cppgc/internal/write-barrier.h +143 -101
  20. data/vendor/v8/include/cppgc/liveness-broker.h +8 -7
  21. data/vendor/v8/include/cppgc/member.h +364 -89
  22. data/vendor/v8/include/cppgc/name-provider.h +4 -4
  23. data/vendor/v8/include/cppgc/persistent.h +5 -9
  24. data/vendor/v8/include/cppgc/platform.h +2 -2
  25. data/vendor/v8/include/cppgc/sentinel-pointer.h +1 -1
  26. data/vendor/v8/include/cppgc/trace-trait.h +4 -0
  27. data/vendor/v8/include/cppgc/type-traits.h +9 -0
  28. data/vendor/v8/include/cppgc/visitor.h +89 -57
  29. data/vendor/v8/include/v8-callbacks.h +19 -5
  30. data/vendor/v8/include/v8-context.h +13 -8
  31. data/vendor/v8/include/v8-cppgc.h +12 -0
  32. data/vendor/v8/include/v8-date.h +5 -0
  33. data/vendor/v8/include/v8-embedder-heap.h +8 -3
  34. data/vendor/v8/include/v8-exception.h +1 -1
  35. data/vendor/v8/include/v8-fast-api-calls.h +46 -32
  36. data/vendor/v8/include/v8-function.h +8 -0
  37. data/vendor/v8/include/v8-initialization.h +23 -49
  38. data/vendor/v8/include/v8-inspector.h +13 -7
  39. data/vendor/v8/include/v8-internal.h +328 -123
  40. data/vendor/v8/include/v8-isolate.h +27 -42
  41. data/vendor/v8/include/v8-local-handle.h +5 -5
  42. data/vendor/v8/include/v8-locker.h +0 -11
  43. data/vendor/v8/include/v8-maybe.h +24 -1
  44. data/vendor/v8/include/v8-message.h +2 -4
  45. data/vendor/v8/include/v8-metrics.h +20 -38
  46. data/vendor/v8/include/v8-microtask-queue.h +1 -1
  47. data/vendor/v8/include/v8-object.h +8 -15
  48. data/vendor/v8/include/v8-persistent-handle.h +0 -2
  49. data/vendor/v8/include/v8-platform.h +54 -25
  50. data/vendor/v8/include/v8-primitive.h +8 -8
  51. data/vendor/v8/include/v8-profiler.h +84 -22
  52. data/vendor/v8/include/v8-regexp.h +2 -1
  53. data/vendor/v8/include/v8-script.h +62 -6
  54. data/vendor/v8/include/v8-template.h +13 -76
  55. data/vendor/v8/include/v8-unwinder-state.h +4 -4
  56. data/vendor/v8/include/v8-util.h +2 -4
  57. data/vendor/v8/include/v8-value-serializer.h +46 -23
  58. data/vendor/v8/include/v8-version.h +3 -3
  59. data/vendor/v8/include/v8-wasm.h +5 -62
  60. data/vendor/v8/include/v8-weak-callback-info.h +0 -7
  61. data/vendor/v8/include/v8config.h +280 -13
  62. metadata +5 -1
@@ -8,9 +8,12 @@
8
8
  #include <cstddef>
9
9
  #include <cstdint>
10
10
 
11
+ #include "cppgc/heap-handle.h"
11
12
  #include "cppgc/heap-state.h"
12
13
  #include "cppgc/internal/api-constants.h"
13
14
  #include "cppgc/internal/atomic-entry-flag.h"
15
+ #include "cppgc/internal/base-page-handle.h"
16
+ #include "cppgc/internal/member-storage.h"
14
17
  #include "cppgc/platform.h"
15
18
  #include "cppgc/sentinel-pointer.h"
16
19
  #include "cppgc/trace-trait.h"
@@ -18,6 +21,7 @@
18
21
 
19
22
  #if defined(CPPGC_CAGED_HEAP)
20
23
  #include "cppgc/internal/caged-heap-local-data.h"
24
+ #include "cppgc/internal/caged-heap.h"
21
25
  #endif
22
26
 
23
27
  namespace cppgc {
@@ -40,16 +44,18 @@ class V8_EXPORT WriteBarrier final {
40
44
  kGenerational,
41
45
  };
42
46
 
47
+ enum class GenerationalBarrierType : uint8_t {
48
+ kPreciseSlot,
49
+ kPreciseUncompressedSlot,
50
+ kImpreciseSlot,
51
+ };
52
+
43
53
  struct Params {
44
54
  HeapHandle* heap = nullptr;
45
55
  #if V8_ENABLE_CHECKS
46
56
  Type type = Type::kNone;
47
57
  #endif // !V8_ENABLE_CHECKS
48
58
  #if defined(CPPGC_CAGED_HEAP)
49
- uintptr_t start = 0;
50
- CagedHeapLocalData& caged_heap() const {
51
- return *reinterpret_cast<CagedHeapLocalData*>(start);
52
- }
53
59
  uintptr_t slot_offset = 0;
54
60
  uintptr_t value_offset = 0;
55
61
  #endif // CPPGC_CAGED_HEAP
@@ -63,6 +69,9 @@ class V8_EXPORT WriteBarrier final {
63
69
  // Returns the required write barrier for a given `slot` and `value`.
64
70
  static V8_INLINE Type GetWriteBarrierType(const void* slot, const void* value,
65
71
  Params& params);
72
+ // Returns the required write barrier for a given `slot` and `value`.
73
+ static V8_INLINE Type GetWriteBarrierType(const void* slot, MemberStorage,
74
+ Params& params);
66
75
  // Returns the required write barrier for a given `slot`.
67
76
  template <typename HeapHandleCallback>
68
77
  static V8_INLINE Type GetWriteBarrierType(const void* slot, Params& params,
@@ -78,15 +87,13 @@ class V8_EXPORT WriteBarrier final {
78
87
  static V8_INLINE void SteeleMarkingBarrier(const Params& params,
79
88
  const void* object);
80
89
  #if defined(CPPGC_YOUNG_GENERATION)
90
+ template <GenerationalBarrierType>
81
91
  static V8_INLINE void GenerationalBarrier(const Params& params,
82
92
  const void* slot);
83
- static V8_INLINE void GenerationalBarrierForSourceObject(
84
- const Params& params, const void* inner_pointer);
85
93
  #else // !CPPGC_YOUNG_GENERATION
94
+ template <GenerationalBarrierType>
86
95
  static V8_INLINE void GenerationalBarrier(const Params& params,
87
- const void* slot) {}
88
- static V8_INLINE void GenerationalBarrierForSourceObject(
89
- const Params& params, const void* inner_pointer) {}
96
+ const void* slot){}
90
97
  #endif // CPPGC_YOUNG_GENERATION
91
98
 
92
99
  #if V8_ENABLE_CHECKS
@@ -95,12 +102,10 @@ class V8_EXPORT WriteBarrier final {
95
102
  static void CheckParams(Type expected_type, const Params& params) {}
96
103
  #endif // !V8_ENABLE_CHECKS
97
104
 
98
- // The IncrementalOrConcurrentUpdater class allows cppgc internal to update
99
- // |incremental_or_concurrent_marking_flag_|.
100
- class IncrementalOrConcurrentMarkingFlagUpdater;
101
- static bool IsAnyIncrementalOrConcurrentMarking() {
102
- return incremental_or_concurrent_marking_flag_.MightBeEntered();
103
- }
105
+ // The FlagUpdater class allows cppgc internal to update
106
+ // |write_barrier_enabled_|.
107
+ class FlagUpdater;
108
+ static bool IsEnabled() { return write_barrier_enabled_.MightBeEntered(); }
104
109
 
105
110
  private:
106
111
  WriteBarrier() = delete;
@@ -125,17 +130,23 @@ class V8_EXPORT WriteBarrier final {
125
130
  static CagedHeapLocalData& GetLocalData(HeapHandle&);
126
131
  static void GenerationalBarrierSlow(const CagedHeapLocalData& local_data,
127
132
  const AgeTable& age_table,
128
- const void* slot, uintptr_t value_offset);
133
+ const void* slot, uintptr_t value_offset,
134
+ HeapHandle* heap_handle);
135
+ static void GenerationalBarrierForUncompressedSlotSlow(
136
+ const CagedHeapLocalData& local_data, const AgeTable& age_table,
137
+ const void* slot, uintptr_t value_offset, HeapHandle* heap_handle);
129
138
  static void GenerationalBarrierForSourceObjectSlow(
130
- const CagedHeapLocalData& local_data, const void* object);
139
+ const CagedHeapLocalData& local_data, const void* object,
140
+ HeapHandle* heap_handle);
131
141
  #endif // CPPGC_YOUNG_GENERATION
132
142
 
133
- static AtomicEntryFlag incremental_or_concurrent_marking_flag_;
143
+ static AtomicEntryFlag write_barrier_enabled_;
134
144
  };
135
145
 
136
146
  template <WriteBarrier::Type type>
137
147
  V8_INLINE WriteBarrier::Type SetAndReturnType(WriteBarrier::Params& params) {
138
- if (type == WriteBarrier::Type::kNone) return WriteBarrier::Type::kNone;
148
+ if constexpr (type == WriteBarrier::Type::kNone)
149
+ return WriteBarrier::Type::kNone;
139
150
  #if V8_ENABLE_CHECKS
140
151
  params.type = type;
141
152
  #endif // !V8_ENABLE_CHECKS
@@ -152,6 +163,13 @@ class V8_EXPORT WriteBarrierTypeForCagedHeapPolicy final {
152
163
  return ValueModeDispatch<value_mode>::Get(slot, value, params, callback);
153
164
  }
154
165
 
166
+ template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback>
167
+ static V8_INLINE WriteBarrier::Type Get(const void* slot, MemberStorage value,
168
+ WriteBarrier::Params& params,
169
+ HeapHandleCallback callback) {
170
+ return ValueModeDispatch<value_mode>::Get(slot, value, params, callback);
171
+ }
172
+
155
173
  template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback>
156
174
  static V8_INLINE WriteBarrier::Type Get(const void* value,
157
175
  WriteBarrier::Params& params,
@@ -166,69 +184,77 @@ class V8_EXPORT WriteBarrierTypeForCagedHeapPolicy final {
166
184
  static V8_INLINE WriteBarrier::Type GetNoSlot(const void* value,
167
185
  WriteBarrier::Params& params,
168
186
  HeapHandleCallback) {
169
- if (!TryGetCagedHeap(value, value, params)) {
170
- return WriteBarrier::Type::kNone;
171
- }
172
- if (V8_UNLIKELY(params.caged_heap().is_incremental_marking_in_progress)) {
187
+ const bool within_cage = CagedHeapBase::IsWithinCage(value);
188
+ if (!within_cage) return WriteBarrier::Type::kNone;
189
+
190
+ // We know that |value| points either within the normal page or to the
191
+ // beginning of large-page, so extract the page header by bitmasking.
192
+ BasePageHandle* page =
193
+ BasePageHandle::FromPayload(const_cast<void*>(value));
194
+
195
+ HeapHandle& heap_handle = page->heap_handle();
196
+ if (V8_UNLIKELY(heap_handle.is_incremental_marking_in_progress())) {
173
197
  return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
174
198
  }
199
+
175
200
  return SetAndReturnType<WriteBarrier::Type::kNone>(params);
176
201
  }
177
202
 
178
203
  template <WriteBarrier::ValueMode value_mode>
179
204
  struct ValueModeDispatch;
180
-
181
- static V8_INLINE bool TryGetCagedHeap(const void* slot, const void* value,
182
- WriteBarrier::Params& params) {
183
- // TODO(chromium:1056170): Check if the null check can be folded in with
184
- // the rest of the write barrier.
185
- if (!value) return false;
186
- params.start = reinterpret_cast<uintptr_t>(value) &
187
- ~(api_constants::kCagedHeapReservationAlignment - 1);
188
- const uintptr_t slot_offset =
189
- reinterpret_cast<uintptr_t>(slot) - params.start;
190
- if (slot_offset > api_constants::kCagedHeapReservationSize) {
191
- // Check if slot is on stack or value is sentinel or nullptr. This relies
192
- // on the fact that kSentinelPointer is encoded as 0x1.
193
- return false;
194
- }
195
- return true;
196
- }
197
-
198
- // Returns whether marking is in progress. If marking is not in progress
199
- // sets the start of the cage accordingly.
200
- //
201
- // TODO(chromium:1056170): Create fast path on API.
202
- static bool IsMarking(const HeapHandle&, WriteBarrier::Params&);
203
205
  };
204
206
 
205
207
  template <>
206
208
  struct WriteBarrierTypeForCagedHeapPolicy::ValueModeDispatch<
207
209
  WriteBarrier::ValueMode::kValuePresent> {
210
+ template <typename HeapHandleCallback>
211
+ static V8_INLINE WriteBarrier::Type Get(const void* slot,
212
+ MemberStorage storage,
213
+ WriteBarrier::Params& params,
214
+ HeapHandleCallback) {
215
+ if (V8_LIKELY(!WriteBarrier::IsEnabled()))
216
+ return SetAndReturnType<WriteBarrier::Type::kNone>(params);
217
+
218
+ return BarrierEnabledGet(slot, storage.Load(), params);
219
+ }
220
+
208
221
  template <typename HeapHandleCallback>
209
222
  static V8_INLINE WriteBarrier::Type Get(const void* slot, const void* value,
210
223
  WriteBarrier::Params& params,
211
224
  HeapHandleCallback) {
212
- #if !defined(CPPGC_YOUNG_GENERATION)
213
- if (V8_LIKELY(!WriteBarrier::IsAnyIncrementalOrConcurrentMarking())) {
225
+ if (V8_LIKELY(!WriteBarrier::IsEnabled()))
214
226
  return SetAndReturnType<WriteBarrier::Type::kNone>(params);
215
- }
216
- #endif // !CPPGC_YOUNG_GENERATION
217
- bool within_cage = TryGetCagedHeap(slot, value, params);
218
- if (!within_cage) {
219
- return WriteBarrier::Type::kNone;
220
- }
221
- if (V8_LIKELY(!params.caged_heap().is_incremental_marking_in_progress)) {
227
+
228
+ return BarrierEnabledGet(slot, value, params);
229
+ }
230
+
231
+ private:
232
+ static V8_INLINE WriteBarrier::Type BarrierEnabledGet(
233
+ const void* slot, const void* value, WriteBarrier::Params& params) {
234
+ const bool within_cage = CagedHeapBase::AreWithinCage(slot, value);
235
+ if (!within_cage) return WriteBarrier::Type::kNone;
236
+
237
+ // We know that |value| points either within the normal page or to the
238
+ // beginning of large-page, so extract the page header by bitmasking.
239
+ BasePageHandle* page =
240
+ BasePageHandle::FromPayload(const_cast<void*>(value));
241
+
242
+ HeapHandle& heap_handle = page->heap_handle();
243
+ if (V8_LIKELY(!heap_handle.is_incremental_marking_in_progress())) {
222
244
  #if defined(CPPGC_YOUNG_GENERATION)
223
- params.heap = reinterpret_cast<HeapHandle*>(params.start);
224
- params.slot_offset = reinterpret_cast<uintptr_t>(slot) - params.start;
225
- params.value_offset = reinterpret_cast<uintptr_t>(value) - params.start;
245
+ if (!heap_handle.is_young_generation_enabled())
246
+ return WriteBarrier::Type::kNone;
247
+ params.heap = &heap_handle;
248
+ params.slot_offset = CagedHeapBase::OffsetFromAddress(slot);
249
+ params.value_offset = CagedHeapBase::OffsetFromAddress(value);
226
250
  return SetAndReturnType<WriteBarrier::Type::kGenerational>(params);
227
251
  #else // !CPPGC_YOUNG_GENERATION
228
252
  return SetAndReturnType<WriteBarrier::Type::kNone>(params);
229
253
  #endif // !CPPGC_YOUNG_GENERATION
230
254
  }
231
- params.heap = reinterpret_cast<HeapHandle*>(params.start);
255
+
256
+ // Use marking barrier.
257
+ params.heap = &heap_handle;
232
258
  return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
233
259
  }
234
260
  };
@@ -240,28 +266,28 @@ struct WriteBarrierTypeForCagedHeapPolicy::ValueModeDispatch<
240
266
  static V8_INLINE WriteBarrier::Type Get(const void* slot, const void*,
241
267
  WriteBarrier::Params& params,
242
268
  HeapHandleCallback callback) {
243
- #if defined(CPPGC_YOUNG_GENERATION)
269
+ if (V8_LIKELY(!WriteBarrier::IsEnabled()))
270
+ return SetAndReturnType<WriteBarrier::Type::kNone>(params);
271
+
244
272
  HeapHandle& handle = callback();
245
- if (V8_LIKELY(!IsMarking(handle, params))) {
246
- // params.start is populated by IsMarking().
273
+ #if defined(CPPGC_YOUNG_GENERATION)
274
+ if (V8_LIKELY(!handle.is_incremental_marking_in_progress())) {
275
+ if (!handle.is_young_generation_enabled()) {
276
+ return WriteBarrier::Type::kNone;
277
+ }
247
278
  params.heap = &handle;
248
- params.slot_offset = reinterpret_cast<uintptr_t>(slot) - params.start;
249
- // params.value_offset stays 0.
250
- if (params.slot_offset > api_constants::kCagedHeapReservationSize) {
251
- // Check if slot is on stack.
279
+ // Check if slot is on stack.
280
+ if (V8_UNLIKELY(!CagedHeapBase::IsWithinCage(slot))) {
252
281
  return SetAndReturnType<WriteBarrier::Type::kNone>(params);
253
282
  }
283
+ params.slot_offset = CagedHeapBase::OffsetFromAddress(slot);
254
284
  return SetAndReturnType<WriteBarrier::Type::kGenerational>(params);
255
285
  }
256
- #else // !CPPGC_YOUNG_GENERATION
257
- if (V8_LIKELY(!WriteBarrier::IsAnyIncrementalOrConcurrentMarking())) {
258
- return SetAndReturnType<WriteBarrier::Type::kNone>(params);
259
- }
260
- HeapHandle& handle = callback();
261
- if (V8_UNLIKELY(!subtle::HeapState::IsMarking(handle))) {
286
+ #else // !defined(CPPGC_YOUNG_GENERATION)
287
+ if (V8_UNLIKELY(!handle.is_incremental_marking_in_progress())) {
262
288
  return SetAndReturnType<WriteBarrier::Type::kNone>(params);
263
289
  }
264
- #endif // !CPPGC_YOUNG_GENERATION
290
+ #endif // !defined(CPPGC_YOUNG_GENERATION)
265
291
  params.heap = &handle;
266
292
  return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
267
293
  }
@@ -278,6 +304,16 @@ class V8_EXPORT WriteBarrierTypeForNonCagedHeapPolicy final {
278
304
  return ValueModeDispatch<value_mode>::Get(slot, value, params, callback);
279
305
  }
280
306
 
307
+ template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback>
308
+ static V8_INLINE WriteBarrier::Type Get(const void* slot, MemberStorage value,
309
+ WriteBarrier::Params& params,
310
+ HeapHandleCallback callback) {
311
+ // `MemberStorage` will always be `RawPointer` for non-caged heap builds.
312
+ // Just convert to `void*` in this case.
313
+ return ValueModeDispatch<value_mode>::Get(slot, value.Load(), params,
314
+ callback);
315
+ }
316
+
281
317
  template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback>
282
318
  static V8_INLINE WriteBarrier::Type Get(const void* value,
283
319
  WriteBarrier::Params& params,
@@ -291,11 +327,6 @@ class V8_EXPORT WriteBarrierTypeForNonCagedHeapPolicy final {
291
327
  template <WriteBarrier::ValueMode value_mode>
292
328
  struct ValueModeDispatch;
293
329
 
294
- // TODO(chromium:1056170): Create fast path on API.
295
- static bool IsMarking(const void*, HeapHandle**);
296
- // TODO(chromium:1056170): Create fast path on API.
297
- static bool IsMarking(HeapHandle&);
298
-
299
330
  WriteBarrierTypeForNonCagedHeapPolicy() = delete;
300
331
  };
301
332
 
@@ -310,10 +341,16 @@ struct WriteBarrierTypeForNonCagedHeapPolicy::ValueModeDispatch<
310
341
  if (object <= static_cast<void*>(kSentinelPointer)) {
311
342
  return SetAndReturnType<WriteBarrier::Type::kNone>(params);
312
343
  }
313
- if (V8_LIKELY(!WriteBarrier::IsAnyIncrementalOrConcurrentMarking())) {
344
+ if (V8_LIKELY(!WriteBarrier::IsEnabled())) {
314
345
  return SetAndReturnType<WriteBarrier::Type::kNone>(params);
315
346
  }
316
- if (IsMarking(object, &params.heap)) {
347
+ // We know that |object| is within the normal page or in the beginning of a
348
+ // large page, so extract the page header by bitmasking.
349
+ BasePageHandle* page =
350
+ BasePageHandle::FromPayload(const_cast<void*>(object));
351
+
352
+ HeapHandle& heap_handle = page->heap_handle();
353
+ if (V8_LIKELY(heap_handle.is_incremental_marking_in_progress())) {
317
354
  return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
318
355
  }
319
356
  return SetAndReturnType<WriteBarrier::Type::kNone>(params);
@@ -327,9 +364,9 @@ struct WriteBarrierTypeForNonCagedHeapPolicy::ValueModeDispatch<
327
364
  static V8_INLINE WriteBarrier::Type Get(const void*, const void*,
328
365
  WriteBarrier::Params& params,
329
366
  HeapHandleCallback callback) {
330
- if (V8_UNLIKELY(WriteBarrier::IsAnyIncrementalOrConcurrentMarking())) {
367
+ if (V8_UNLIKELY(WriteBarrier::IsEnabled())) {
331
368
  HeapHandle& handle = callback();
332
- if (IsMarking(handle)) {
369
+ if (V8_LIKELY(handle.is_incremental_marking_in_progress())) {
333
370
  params.heap = &handle;
334
371
  return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
335
372
  }
@@ -345,6 +382,13 @@ WriteBarrier::Type WriteBarrier::GetWriteBarrierType(
345
382
  params, []() {});
346
383
  }
347
384
 
385
+ // static
386
+ WriteBarrier::Type WriteBarrier::GetWriteBarrierType(
387
+ const void* slot, MemberStorage value, WriteBarrier::Params& params) {
388
+ return WriteBarrierTypePolicy::Get<ValueMode::kValuePresent>(slot, value,
389
+ params, []() {});
390
+ }
391
+
348
392
  // static
349
393
  template <typename HeapHandleCallback>
350
394
  WriteBarrier::Type WriteBarrier::GetWriteBarrierType(
@@ -397,34 +441,32 @@ void WriteBarrier::SteeleMarkingBarrier(const Params& params,
397
441
  }
398
442
 
399
443
  #if defined(CPPGC_YOUNG_GENERATION)
400
- // static
401
- void WriteBarrier::GenerationalBarrier(const Params& params, const void* slot) {
402
- CheckParams(Type::kGenerational, params);
403
-
404
- const CagedHeapLocalData& local_data = params.caged_heap();
405
- const AgeTable& age_table = local_data.age_table;
406
-
407
- // Bail out if the slot is in young generation.
408
- if (V8_LIKELY(age_table.GetAge(params.slot_offset) == AgeTable::Age::kYoung))
409
- return;
410
-
411
- GenerationalBarrierSlow(local_data, age_table, slot, params.value_offset);
412
- }
413
444
 
414
445
  // static
415
- void WriteBarrier::GenerationalBarrierForSourceObject(
416
- const Params& params, const void* inner_pointer) {
446
+ template <WriteBarrier::GenerationalBarrierType type>
447
+ void WriteBarrier::GenerationalBarrier(const Params& params, const void* slot) {
417
448
  CheckParams(Type::kGenerational, params);
418
449
 
419
- const CagedHeapLocalData& local_data = params.caged_heap();
450
+ const CagedHeapLocalData& local_data = CagedHeapLocalData::Get();
420
451
  const AgeTable& age_table = local_data.age_table;
421
452
 
422
- // Assume that if the first element is in young generation, the whole range is
423
- // in young generation.
453
+ // Bail out if the slot (precise or imprecise) is in young generation.
424
454
  if (V8_LIKELY(age_table.GetAge(params.slot_offset) == AgeTable::Age::kYoung))
425
455
  return;
426
456
 
427
- GenerationalBarrierForSourceObjectSlow(local_data, inner_pointer);
457
+ // Dispatch between different types of barriers.
458
+ // TODO(chromium:1029379): Consider reload local_data in the slow path to
459
+ // reduce register pressure.
460
+ if constexpr (type == GenerationalBarrierType::kPreciseSlot) {
461
+ GenerationalBarrierSlow(local_data, age_table, slot, params.value_offset,
462
+ params.heap);
463
+ } else if constexpr (type ==
464
+ GenerationalBarrierType::kPreciseUncompressedSlot) {
465
+ GenerationalBarrierForUncompressedSlotSlow(
466
+ local_data, age_table, slot, params.value_offset, params.heap);
467
+ } else {
468
+ GenerationalBarrierForSourceObjectSlow(local_data, slot, params.heap);
469
+ }
428
470
  }
429
471
 
430
472
  #endif // !CPPGC_YOUNG_GENERATION
@@ -7,6 +7,7 @@
7
7
 
8
8
  #include "cppgc/heap.h"
9
9
  #include "cppgc/member.h"
10
+ #include "cppgc/sentinel-pointer.h"
10
11
  #include "cppgc/trace-trait.h"
11
12
  #include "v8config.h" // NOLINT(build/include_directory)
12
13
 
@@ -44,24 +45,24 @@ class V8_EXPORT LivenessBroker final {
44
45
  public:
45
46
  template <typename T>
46
47
  bool IsHeapObjectAlive(const T* object) const {
47
- // nullptr objects are considered alive to allow weakness to be used from
48
+ // - nullptr objects are considered alive to allow weakness to be used from
48
49
  // stack while running into a conservative GC. Treating nullptr as dead
49
- // would mean that e.g. custom collectins could not be strongified on stack.
50
- return !object ||
50
+ // would mean that e.g. custom collections could not be strongified on
51
+ // stack.
52
+ // - Sentinel pointers are also preserved in weakness and not cleared.
53
+ return !object || object == kSentinelPointer ||
51
54
  IsHeapObjectAliveImpl(
52
55
  TraceTrait<T>::GetTraceDescriptor(object).base_object_payload);
53
56
  }
54
57
 
55
58
  template <typename T>
56
59
  bool IsHeapObjectAlive(const WeakMember<T>& weak_member) const {
57
- return (weak_member != kSentinelPointer) &&
58
- IsHeapObjectAlive<T>(weak_member.Get());
60
+ return IsHeapObjectAlive<T>(weak_member.Get());
59
61
  }
60
62
 
61
63
  template <typename T>
62
64
  bool IsHeapObjectAlive(const UntracedMember<T>& untraced_member) const {
63
- return (untraced_member != kSentinelPointer) &&
64
- IsHeapObjectAlive<T>(untraced_member.Get());
65
+ return IsHeapObjectAlive<T>(untraced_member.Get());
65
66
  }
66
67
 
67
68
  private: