libv8-node 15.12.0.0.beta1-aarch64-linux → 16.17.0.0-aarch64-linux

Sign up to get free protection for your applications and to get access to all the features.
Files changed (57) hide show
  1. checksums.yaml +4 -4
  2. data/ext/libv8-node/location.rb +1 -1
  3. data/ext/libv8-node/paths.rb +5 -1
  4. data/lib/libv8/node/version.rb +3 -3
  5. data/vendor/v8/{out.gn → aarch64-linux}/libv8/obj/libv8_monolith.a +0 -0
  6. data/vendor/v8/include/cppgc/allocation.h +105 -45
  7. data/vendor/v8/include/cppgc/common.h +9 -6
  8. data/vendor/v8/include/cppgc/cross-thread-persistent.h +413 -0
  9. data/vendor/v8/include/cppgc/custom-space.h +37 -2
  10. data/vendor/v8/include/cppgc/default-platform.h +47 -48
  11. data/vendor/v8/include/cppgc/ephemeron-pair.h +30 -0
  12. data/vendor/v8/include/cppgc/explicit-management.h +82 -0
  13. data/vendor/v8/include/cppgc/garbage-collected.h +4 -3
  14. data/vendor/v8/include/cppgc/heap-consistency.h +253 -0
  15. data/vendor/v8/include/cppgc/heap-state.h +70 -0
  16. data/vendor/v8/include/cppgc/heap-statistics.h +120 -0
  17. data/vendor/v8/include/cppgc/heap.h +68 -6
  18. data/vendor/v8/include/cppgc/internal/api-constants.h +3 -3
  19. data/vendor/v8/include/cppgc/internal/caged-heap-local-data.h +2 -1
  20. data/vendor/v8/include/cppgc/internal/compiler-specific.h +2 -2
  21. data/vendor/v8/include/cppgc/internal/gc-info.h +44 -13
  22. data/vendor/v8/include/cppgc/internal/name-trait.h +111 -0
  23. data/vendor/v8/include/cppgc/internal/persistent-node.h +58 -2
  24. data/vendor/v8/include/cppgc/internal/pointer-policies.h +69 -28
  25. data/vendor/v8/include/cppgc/internal/prefinalizer-handler.h +1 -1
  26. data/vendor/v8/include/cppgc/internal/write-barrier.h +390 -35
  27. data/vendor/v8/include/cppgc/liveness-broker.h +11 -2
  28. data/vendor/v8/include/cppgc/macros.h +2 -0
  29. data/vendor/v8/include/cppgc/member.h +87 -25
  30. data/vendor/v8/include/cppgc/name-provider.h +65 -0
  31. data/vendor/v8/include/cppgc/object-size-trait.h +58 -0
  32. data/vendor/v8/include/cppgc/persistent.h +40 -10
  33. data/vendor/v8/include/cppgc/platform.h +49 -25
  34. data/vendor/v8/include/cppgc/prefinalizer.h +1 -1
  35. data/vendor/v8/include/cppgc/process-heap-statistics.h +36 -0
  36. data/vendor/v8/include/cppgc/sentinel-pointer.h +32 -0
  37. data/vendor/v8/include/cppgc/source-location.h +2 -1
  38. data/vendor/v8/include/cppgc/testing.h +99 -0
  39. data/vendor/v8/include/cppgc/trace-trait.h +8 -3
  40. data/vendor/v8/include/cppgc/type-traits.h +157 -19
  41. data/vendor/v8/include/cppgc/visitor.h +194 -28
  42. data/vendor/v8/include/libplatform/libplatform.h +11 -0
  43. data/vendor/v8/include/libplatform/v8-tracing.h +2 -0
  44. data/vendor/v8/include/v8-cppgc.h +258 -159
  45. data/vendor/v8/include/v8-fast-api-calls.h +603 -155
  46. data/vendor/v8/include/v8-inspector.h +22 -4
  47. data/vendor/v8/include/v8-internal.h +111 -27
  48. data/vendor/v8/include/v8-metrics.h +77 -8
  49. data/vendor/v8/include/v8-platform.h +47 -22
  50. data/vendor/v8/include/v8-profiler.h +75 -11
  51. data/vendor/v8/include/v8-unwinder-state.h +30 -0
  52. data/vendor/v8/include/v8-util.h +1 -1
  53. data/vendor/v8/include/v8-version.h +4 -4
  54. data/vendor/v8/include/v8.h +1196 -642
  55. data/vendor/v8/include/v8config.h +87 -11
  56. metadata +19 -7
  57. data/vendor/v8/include/cppgc/internal/process-heap.h +0 -34
@@ -5,8 +5,15 @@
5
5
  #ifndef INCLUDE_CPPGC_INTERNAL_WRITE_BARRIER_H_
6
6
  #define INCLUDE_CPPGC_INTERNAL_WRITE_BARRIER_H_
7
7
 
8
+ #include <cstddef>
9
+ #include <cstdint>
10
+
11
+ #include "cppgc/heap-state.h"
8
12
  #include "cppgc/internal/api-constants.h"
9
- #include "cppgc/internal/process-heap.h"
13
+ #include "cppgc/internal/atomic-entry-flag.h"
14
+ #include "cppgc/platform.h"
15
+ #include "cppgc/sentinel-pointer.h"
16
+ #include "cppgc/trace-trait.h"
10
17
  #include "v8config.h" // NOLINT(build/include_directory)
11
18
 
12
19
  #if defined(CPPGC_CAGED_HEAP)
@@ -14,64 +21,412 @@
14
21
  #endif
15
22
 
16
23
  namespace cppgc {
24
+
25
+ class HeapHandle;
26
+
17
27
  namespace internal {
18
28
 
29
+ #if defined(CPPGC_CAGED_HEAP)
30
+ class WriteBarrierTypeForCagedHeapPolicy;
31
+ #else // !CPPGC_CAGED_HEAP
32
+ class WriteBarrierTypeForNonCagedHeapPolicy;
33
+ #endif // !CPPGC_CAGED_HEAP
34
+
19
35
  class V8_EXPORT WriteBarrier final {
20
36
  public:
21
- static V8_INLINE void MarkingBarrier(const void* slot, const void* value) {
37
+ enum class Type : uint8_t {
38
+ kNone,
39
+ kMarking,
40
+ kGenerational,
41
+ };
42
+
43
+ struct Params {
44
+ HeapHandle* heap = nullptr;
45
+ #if V8_ENABLE_CHECKS
46
+ Type type = Type::kNone;
47
+ #endif // !V8_ENABLE_CHECKS
48
+ #if defined(CPPGC_CAGED_HEAP)
49
+ uintptr_t start = 0;
50
+ CagedHeapLocalData& caged_heap() const {
51
+ return *reinterpret_cast<CagedHeapLocalData*>(start);
52
+ }
53
+ uintptr_t slot_offset = 0;
54
+ uintptr_t value_offset = 0;
55
+ #endif // CPPGC_CAGED_HEAP
56
+ };
57
+
58
+ enum class ValueMode {
59
+ kValuePresent,
60
+ kNoValuePresent,
61
+ };
62
+
63
+ // Returns the required write barrier for a given `slot` and `value`.
64
+ static V8_INLINE Type GetWriteBarrierType(const void* slot, const void* value,
65
+ Params& params);
66
+ // Returns the required write barrier for a given `slot`.
67
+ template <typename HeapHandleCallback>
68
+ static V8_INLINE Type GetWriteBarrierType(const void* slot, Params& params,
69
+ HeapHandleCallback callback);
70
+ // Returns the required write barrier for a given `value`.
71
+ static V8_INLINE Type GetWriteBarrierType(const void* value, Params& params);
72
+
73
+ template <typename HeapHandleCallback>
74
+ static V8_INLINE Type GetWriteBarrierTypeForExternallyReferencedObject(
75
+ const void* value, Params& params, HeapHandleCallback callback);
76
+
77
+ static V8_INLINE void DijkstraMarkingBarrier(const Params& params,
78
+ const void* object);
79
+ static V8_INLINE void DijkstraMarkingBarrierRange(
80
+ const Params& params, const void* first_element, size_t element_size,
81
+ size_t number_of_elements, TraceCallback trace_callback);
82
+ static V8_INLINE void SteeleMarkingBarrier(const Params& params,
83
+ const void* object);
84
+ #if defined(CPPGC_YOUNG_GENERATION)
85
+ static V8_INLINE void GenerationalBarrier(const Params& params,
86
+ const void* slot);
87
+ #else // !CPPGC_YOUNG_GENERATION
88
+ static V8_INLINE void GenerationalBarrier(const Params& params,
89
+ const void* slot) {}
90
+ #endif // CPPGC_YOUNG_GENERATION
91
+
92
+ #if V8_ENABLE_CHECKS
93
+ static void CheckParams(Type expected_type, const Params& params);
94
+ #else // !V8_ENABLE_CHECKS
95
+ static void CheckParams(Type expected_type, const Params& params) {}
96
+ #endif // !V8_ENABLE_CHECKS
97
+
98
+ // The IncrementalOrConcurrentUpdater class allows cppgc internal to update
99
+ // |incremental_or_concurrent_marking_flag_|.
100
+ class IncrementalOrConcurrentMarkingFlagUpdater;
101
+ static bool IsAnyIncrementalOrConcurrentMarking() {
102
+ return incremental_or_concurrent_marking_flag_.MightBeEntered();
103
+ }
104
+
105
+ private:
106
+ WriteBarrier() = delete;
107
+
108
+ #if defined(CPPGC_CAGED_HEAP)
109
+ using WriteBarrierTypePolicy = WriteBarrierTypeForCagedHeapPolicy;
110
+ #else // !CPPGC_CAGED_HEAP
111
+ using WriteBarrierTypePolicy = WriteBarrierTypeForNonCagedHeapPolicy;
112
+ #endif // !CPPGC_CAGED_HEAP
113
+
114
+ static void DijkstraMarkingBarrierSlow(const void* value);
115
+ static void DijkstraMarkingBarrierSlowWithSentinelCheck(const void* value);
116
+ static void DijkstraMarkingBarrierRangeSlow(HeapHandle& heap_handle,
117
+ const void* first_element,
118
+ size_t element_size,
119
+ size_t number_of_elements,
120
+ TraceCallback trace_callback);
121
+ static void SteeleMarkingBarrierSlow(const void* value);
122
+ static void SteeleMarkingBarrierSlowWithSentinelCheck(const void* value);
123
+
124
+ #if defined(CPPGC_YOUNG_GENERATION)
125
+ static CagedHeapLocalData& GetLocalData(HeapHandle&);
126
+ static void GenerationalBarrierSlow(const CagedHeapLocalData& local_data,
127
+ const AgeTable& ageTable,
128
+ const void* slot, uintptr_t value_offset);
129
+ #endif // CPPGC_YOUNG_GENERATION
130
+
131
+ static AtomicEntryFlag incremental_or_concurrent_marking_flag_;
132
+ };
133
+
134
+ template <WriteBarrier::Type type>
135
+ V8_INLINE WriteBarrier::Type SetAndReturnType(WriteBarrier::Params& params) {
136
+ if (type == WriteBarrier::Type::kNone) return WriteBarrier::Type::kNone;
137
+ #if V8_ENABLE_CHECKS
138
+ params.type = type;
139
+ #endif // !V8_ENABLE_CHECKS
140
+ return type;
141
+ }
142
+
22
143
  #if defined(CPPGC_CAGED_HEAP)
23
- const uintptr_t start =
24
- reinterpret_cast<uintptr_t>(value) &
25
- ~(api_constants::kCagedHeapReservationAlignment - 1);
26
- const uintptr_t slot_offset = reinterpret_cast<uintptr_t>(slot) - start;
144
+ class V8_EXPORT WriteBarrierTypeForCagedHeapPolicy final {
145
+ public:
146
+ template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback>
147
+ static V8_INLINE WriteBarrier::Type Get(const void* slot, const void* value,
148
+ WriteBarrier::Params& params,
149
+ HeapHandleCallback callback) {
150
+ return ValueModeDispatch<value_mode>::Get(slot, value, params, callback);
151
+ }
152
+
153
+ template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback>
154
+ static V8_INLINE WriteBarrier::Type Get(const void* value,
155
+ WriteBarrier::Params& params,
156
+ HeapHandleCallback callback) {
157
+ return GetNoSlot(value, params, callback);
158
+ }
159
+
160
+ template <typename HeapHandleCallback>
161
+ static V8_INLINE WriteBarrier::Type GetForExternallyReferenced(
162
+ const void* value, WriteBarrier::Params& params,
163
+ HeapHandleCallback callback) {
164
+ return GetNoSlot(value, params, callback);
165
+ }
166
+
167
+ private:
168
+ WriteBarrierTypeForCagedHeapPolicy() = delete;
169
+
170
+ template <typename HeapHandleCallback>
171
+ static V8_INLINE WriteBarrier::Type GetNoSlot(const void* value,
172
+ WriteBarrier::Params& params,
173
+ HeapHandleCallback) {
174
+ if (!TryGetCagedHeap(value, value, params)) {
175
+ return WriteBarrier::Type::kNone;
176
+ }
177
+ if (V8_UNLIKELY(params.caged_heap().is_incremental_marking_in_progress)) {
178
+ return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
179
+ }
180
+ return SetAndReturnType<WriteBarrier::Type::kNone>(params);
181
+ }
182
+
183
+ template <WriteBarrier::ValueMode value_mode>
184
+ struct ValueModeDispatch;
185
+
186
+ static V8_INLINE bool TryGetCagedHeap(const void* slot, const void* value,
187
+ WriteBarrier::Params& params) {
188
+ // TODO(chromium:1056170): Check if the null check can be folded in with
189
+ // the rest of the write barrier.
190
+ if (!value) return false;
191
+ params.start = reinterpret_cast<uintptr_t>(value) &
192
+ ~(api_constants::kCagedHeapReservationAlignment - 1);
193
+ const uintptr_t slot_offset =
194
+ reinterpret_cast<uintptr_t>(slot) - params.start;
27
195
  if (slot_offset > api_constants::kCagedHeapReservationSize) {
28
196
  // Check if slot is on stack or value is sentinel or nullptr. This relies
29
197
  // on the fact that kSentinelPointer is encoded as 0x1.
30
- return;
198
+ return false;
31
199
  }
200
+ return true;
201
+ }
32
202
 
33
- CagedHeapLocalData* local_data =
34
- reinterpret_cast<CagedHeapLocalData*>(start);
35
- if (V8_UNLIKELY(local_data->is_marking_in_progress)) {
36
- MarkingBarrierSlow(value);
37
- return;
203
+ // Returns whether marking is in progress. If marking is not in progress
204
+ // sets the start of the cage accordingly.
205
+ //
206
+ // TODO(chromium:1056170): Create fast path on API.
207
+ static bool IsMarking(const HeapHandle&, WriteBarrier::Params&);
208
+ };
209
+
210
+ template <>
211
+ struct WriteBarrierTypeForCagedHeapPolicy::ValueModeDispatch<
212
+ WriteBarrier::ValueMode::kValuePresent> {
213
+ template <typename HeapHandleCallback>
214
+ static V8_INLINE WriteBarrier::Type Get(const void* slot, const void* value,
215
+ WriteBarrier::Params& params,
216
+ HeapHandleCallback) {
217
+ bool within_cage = TryGetCagedHeap(slot, value, params);
218
+ if (!within_cage) {
219
+ return WriteBarrier::Type::kNone;
38
220
  }
221
+ if (V8_LIKELY(!params.caged_heap().is_incremental_marking_in_progress)) {
39
222
  #if defined(CPPGC_YOUNG_GENERATION)
40
- GenerationalBarrier(local_data, slot, slot_offset,
41
- reinterpret_cast<uintptr_t>(value) - start);
42
- #endif
43
- #else
44
- if (V8_LIKELY(!ProcessHeap::IsAnyIncrementalOrConcurrentMarking())) return;
223
+ params.heap = reinterpret_cast<HeapHandle*>(params.start);
224
+ params.slot_offset = reinterpret_cast<uintptr_t>(slot) - params.start;
225
+ params.value_offset = reinterpret_cast<uintptr_t>(value) - params.start;
226
+ return SetAndReturnType<WriteBarrier::Type::kGenerational>(params);
227
+ #else // !CPPGC_YOUNG_GENERATION
228
+ return SetAndReturnType<WriteBarrier::Type::kNone>(params);
229
+ #endif // !CPPGC_YOUNG_GENERATION
230
+ }
231
+ params.heap = reinterpret_cast<HeapHandle*>(params.start);
232
+ return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
233
+ }
234
+ };
235
+
236
+ template <>
237
+ struct WriteBarrierTypeForCagedHeapPolicy::ValueModeDispatch<
238
+ WriteBarrier::ValueMode::kNoValuePresent> {
239
+ template <typename HeapHandleCallback>
240
+ static V8_INLINE WriteBarrier::Type Get(const void* slot, const void*,
241
+ WriteBarrier::Params& params,
242
+ HeapHandleCallback callback) {
243
+ #if defined(CPPGC_YOUNG_GENERATION)
244
+ HeapHandle& handle = callback();
245
+ if (V8_LIKELY(!IsMarking(handle, params))) {
246
+ // params.start is populated by IsMarking().
247
+ params.heap = &handle;
248
+ params.slot_offset = reinterpret_cast<uintptr_t>(slot) - params.start;
249
+ // params.value_offset stays 0.
250
+ if (params.slot_offset > api_constants::kCagedHeapReservationSize) {
251
+ // Check if slot is on stack.
252
+ return SetAndReturnType<WriteBarrier::Type::kNone>(params);
253
+ }
254
+ return SetAndReturnType<WriteBarrier::Type::kGenerational>(params);
255
+ }
256
+ #else // !CPPGC_YOUNG_GENERATION
257
+ if (V8_LIKELY(!WriteBarrier::IsAnyIncrementalOrConcurrentMarking())) {
258
+ return SetAndReturnType<WriteBarrier::Type::kNone>(params);
259
+ }
260
+ HeapHandle& handle = callback();
261
+ if (V8_UNLIKELY(!subtle::HeapState::IsMarking(handle))) {
262
+ return SetAndReturnType<WriteBarrier::Type::kNone>(params);
263
+ }
264
+ #endif // !CPPGC_YOUNG_GENERATION
265
+ params.heap = &handle;
266
+ return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
267
+ }
268
+ };
45
269
 
46
- MarkingBarrierSlowWithSentinelCheck(value);
47
270
  #endif // CPPGC_CAGED_HEAP
271
+
272
+ class V8_EXPORT WriteBarrierTypeForNonCagedHeapPolicy final {
273
+ public:
274
+ template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback>
275
+ static V8_INLINE WriteBarrier::Type Get(const void* slot, const void* value,
276
+ WriteBarrier::Params& params,
277
+ HeapHandleCallback callback) {
278
+ return ValueModeDispatch<value_mode>::Get(slot, value, params, callback);
48
279
  }
49
280
 
50
- private:
51
- WriteBarrier() = delete;
281
+ template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback>
282
+ static V8_INLINE WriteBarrier::Type Get(const void* value,
283
+ WriteBarrier::Params& params,
284
+ HeapHandleCallback callback) {
285
+ // The slot will never be used in `Get()` below.
286
+ return Get<WriteBarrier::ValueMode::kValuePresent>(nullptr, value, params,
287
+ callback);
288
+ }
52
289
 
53
- static void MarkingBarrierSlow(const void* value);
54
- static void MarkingBarrierSlowWithSentinelCheck(const void* value);
290
+ template <typename HeapHandleCallback>
291
+ static V8_INLINE WriteBarrier::Type GetForExternallyReferenced(
292
+ const void* value, WriteBarrier::Params& params,
293
+ HeapHandleCallback callback) {
294
+ // The slot will never be used in `Get()` below.
295
+ return Get<WriteBarrier::ValueMode::kValuePresent>(nullptr, value, params,
296
+ callback);
297
+ }
55
298
 
56
- #if defined(CPPGC_YOUNG_GENERATION)
57
- static V8_INLINE void GenerationalBarrier(CagedHeapLocalData* local_data,
58
- const void* slot,
59
- uintptr_t slot_offset,
60
- uintptr_t value_offset) {
61
- const AgeTable& age_table = local_data->age_table;
299
+ private:
300
+ template <WriteBarrier::ValueMode value_mode>
301
+ struct ValueModeDispatch;
62
302
 
63
- // Bail out if the slot is in young generation.
64
- if (V8_LIKELY(age_table[slot_offset] == AgeTable::Age::kYoung)) return;
303
+ // TODO(chromium:1056170): Create fast path on API.
304
+ static bool IsMarking(const void*, HeapHandle**);
305
+ // TODO(chromium:1056170): Create fast path on API.
306
+ static bool IsMarking(HeapHandle&);
307
+
308
+ WriteBarrierTypeForNonCagedHeapPolicy() = delete;
309
+ };
65
310
 
66
- GenerationalBarrierSlow(local_data, age_table, slot, value_offset);
311
+ template <>
312
+ struct WriteBarrierTypeForNonCagedHeapPolicy::ValueModeDispatch<
313
+ WriteBarrier::ValueMode::kValuePresent> {
314
+ template <typename HeapHandleCallback>
315
+ static V8_INLINE WriteBarrier::Type Get(const void*, const void* object,
316
+ WriteBarrier::Params& params,
317
+ HeapHandleCallback callback) {
318
+ // The following check covers nullptr as well as sentinel pointer.
319
+ if (object <= static_cast<void*>(kSentinelPointer)) {
320
+ return WriteBarrier::Type::kNone;
321
+ }
322
+ if (IsMarking(object, &params.heap)) {
323
+ return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
324
+ }
325
+ return SetAndReturnType<WriteBarrier::Type::kNone>(params);
67
326
  }
327
+ };
68
328
 
69
- static void GenerationalBarrierSlow(CagedHeapLocalData* local_data,
70
- const AgeTable& ageTable,
71
- const void* slot, uintptr_t value_offset);
72
- #endif
329
+ template <>
330
+ struct WriteBarrierTypeForNonCagedHeapPolicy::ValueModeDispatch<
331
+ WriteBarrier::ValueMode::kNoValuePresent> {
332
+ template <typename HeapHandleCallback>
333
+ static V8_INLINE WriteBarrier::Type Get(const void*, const void*,
334
+ WriteBarrier::Params& params,
335
+ HeapHandleCallback callback) {
336
+ if (V8_UNLIKELY(WriteBarrier::IsAnyIncrementalOrConcurrentMarking())) {
337
+ HeapHandle& handle = callback();
338
+ if (IsMarking(handle)) {
339
+ params.heap = &handle;
340
+ return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
341
+ }
342
+ }
343
+ return WriteBarrier::Type::kNone;
344
+ }
73
345
  };
74
346
 
347
+ // static
348
+ WriteBarrier::Type WriteBarrier::GetWriteBarrierType(
349
+ const void* slot, const void* value, WriteBarrier::Params& params) {
350
+ return WriteBarrierTypePolicy::Get<ValueMode::kValuePresent>(slot, value,
351
+ params, []() {});
352
+ }
353
+
354
+ // static
355
+ template <typename HeapHandleCallback>
356
+ WriteBarrier::Type WriteBarrier::GetWriteBarrierType(
357
+ const void* slot, WriteBarrier::Params& params,
358
+ HeapHandleCallback callback) {
359
+ return WriteBarrierTypePolicy::Get<ValueMode::kNoValuePresent>(
360
+ slot, nullptr, params, callback);
361
+ }
362
+
363
+ // static
364
+ WriteBarrier::Type WriteBarrier::GetWriteBarrierType(
365
+ const void* value, WriteBarrier::Params& params) {
366
+ return WriteBarrierTypePolicy::Get<ValueMode::kValuePresent>(value, params,
367
+ []() {});
368
+ }
369
+
370
+ // static
371
+ template <typename HeapHandleCallback>
372
+ WriteBarrier::Type
373
+ WriteBarrier::GetWriteBarrierTypeForExternallyReferencedObject(
374
+ const void* value, Params& params, HeapHandleCallback callback) {
375
+ return WriteBarrierTypePolicy::GetForExternallyReferenced(value, params,
376
+ callback);
377
+ }
378
+
379
+ // static
380
+ void WriteBarrier::DijkstraMarkingBarrier(const Params& params,
381
+ const void* object) {
382
+ CheckParams(Type::kMarking, params);
383
+ #if defined(CPPGC_CAGED_HEAP)
384
+ // Caged heap already filters out sentinels.
385
+ DijkstraMarkingBarrierSlow(object);
386
+ #else // !CPPGC_CAGED_HEAP
387
+ DijkstraMarkingBarrierSlowWithSentinelCheck(object);
388
+ #endif // !CPPGC_CAGED_HEAP
389
+ }
390
+
391
+ // static
392
+ void WriteBarrier::DijkstraMarkingBarrierRange(const Params& params,
393
+ const void* first_element,
394
+ size_t element_size,
395
+ size_t number_of_elements,
396
+ TraceCallback trace_callback) {
397
+ CheckParams(Type::kMarking, params);
398
+ DijkstraMarkingBarrierRangeSlow(*params.heap, first_element, element_size,
399
+ number_of_elements, trace_callback);
400
+ }
401
+
402
+ // static
403
+ void WriteBarrier::SteeleMarkingBarrier(const Params& params,
404
+ const void* object) {
405
+ CheckParams(Type::kMarking, params);
406
+ #if defined(CPPGC_CAGED_HEAP)
407
+ // Caged heap already filters out sentinels.
408
+ SteeleMarkingBarrierSlow(object);
409
+ #else // !CPPGC_CAGED_HEAP
410
+ SteeleMarkingBarrierSlowWithSentinelCheck(object);
411
+ #endif // !CPPGC_CAGED_HEAP
412
+ }
413
+
414
+ #if defined(CPPGC_YOUNG_GENERATION)
415
+ // static
416
+ void WriteBarrier::GenerationalBarrier(const Params& params, const void* slot) {
417
+ CheckParams(Type::kGenerational, params);
418
+
419
+ const CagedHeapLocalData& local_data = params.caged_heap();
420
+ const AgeTable& age_table = local_data.age_table;
421
+
422
+ // Bail out if the slot is in young generation.
423
+ if (V8_LIKELY(age_table[params.slot_offset] == AgeTable::Age::kYoung)) return;
424
+
425
+ GenerationalBarrierSlow(local_data, age_table, slot, params.value_offset);
426
+ }
427
+
428
+ #endif // !CPPGC_YOUNG_GENERATION
429
+
75
430
  } // namespace internal
76
431
  } // namespace cppgc
77
432
 
@@ -19,7 +19,7 @@ class LivenessBrokerFactory;
19
19
  /**
20
20
  * The broker is passed to weak callbacks to allow (temporarily) querying
21
21
  * the liveness state of an object. References to non-live objects must be
22
- * cleared when IsHeapObjectAlive() returns false.
22
+ * cleared when `IsHeapObjectAlive()` returns false.
23
23
  *
24
24
  * \code
25
25
  * class GCedWithCustomWeakCallback final
@@ -44,11 +44,20 @@ class V8_EXPORT LivenessBroker final {
44
44
  public:
45
45
  template <typename T>
46
46
  bool IsHeapObjectAlive(const T* object) const {
47
- return object &&
47
+ // nullptr objects are considered alive to allow weakness to be used from
48
+ // stack while running into a conservative GC. Treating nullptr as dead
49
+ // would mean that e.g. custom collectins could not be strongified on stack.
50
+ return !object ||
48
51
  IsHeapObjectAliveImpl(
49
52
  TraceTrait<T>::GetTraceDescriptor(object).base_object_payload);
50
53
  }
51
54
 
55
+ template <typename T>
56
+ bool IsHeapObjectAlive(const WeakMember<T>& weak_member) const {
57
+ return (weak_member != kSentinelPointer) &&
58
+ IsHeapObjectAlive<T>(weak_member.Get());
59
+ }
60
+
52
61
  template <typename T>
53
62
  bool IsHeapObjectAlive(const UntracedMember<T>& untraced_member) const {
54
63
  return (untraced_member != kSentinelPointer) &&
@@ -5,6 +5,8 @@
5
5
  #ifndef INCLUDE_CPPGC_MACROS_H_
6
6
  #define INCLUDE_CPPGC_MACROS_H_
7
7
 
8
+ #include <cstddef>
9
+
8
10
  #include "cppgc/internal/compiler-specific.h"
9
11
 
10
12
  namespace cppgc {