libv8-node 15.12.0.0.beta1-aarch64-linux-musl → 17.9.1.0-aarch64-linux-musl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (100) hide show
  1. checksums.yaml +4 -4
  2. data/ext/libv8-node/location.rb +1 -1
  3. data/ext/libv8-node/paths.rb +5 -1
  4. data/lib/libv8/node/version.rb +3 -3
  5. data/vendor/v8/{out.gn → aarch64-linux-musl}/libv8/obj/libv8_monolith.a +0 -0
  6. data/vendor/v8/include/cppgc/allocation.h +110 -44
  7. data/vendor/v8/include/cppgc/common.h +9 -6
  8. data/vendor/v8/include/cppgc/cross-thread-persistent.h +465 -0
  9. data/vendor/v8/include/cppgc/custom-space.h +37 -2
  10. data/vendor/v8/include/cppgc/default-platform.h +47 -48
  11. data/vendor/v8/include/cppgc/ephemeron-pair.h +30 -0
  12. data/vendor/v8/include/cppgc/explicit-management.h +82 -0
  13. data/vendor/v8/include/cppgc/garbage-collected.h +4 -3
  14. data/vendor/v8/include/cppgc/heap-consistency.h +253 -0
  15. data/vendor/v8/include/cppgc/heap-state.h +70 -0
  16. data/vendor/v8/include/cppgc/heap-statistics.h +120 -0
  17. data/vendor/v8/include/cppgc/heap.h +68 -6
  18. data/vendor/v8/include/cppgc/internal/api-constants.h +3 -3
  19. data/vendor/v8/include/cppgc/internal/caged-heap-local-data.h +4 -3
  20. data/vendor/v8/include/cppgc/internal/compiler-specific.h +2 -2
  21. data/vendor/v8/include/cppgc/internal/finalizer-trait.h +2 -0
  22. data/vendor/v8/include/cppgc/internal/gc-info.h +124 -13
  23. data/vendor/v8/include/cppgc/internal/name-trait.h +122 -0
  24. data/vendor/v8/include/cppgc/internal/persistent-node.h +94 -6
  25. data/vendor/v8/include/cppgc/internal/pointer-policies.h +81 -29
  26. data/vendor/v8/include/cppgc/internal/prefinalizer-handler.h +1 -1
  27. data/vendor/v8/include/cppgc/internal/write-barrier.h +398 -35
  28. data/vendor/v8/include/cppgc/liveness-broker.h +11 -2
  29. data/vendor/v8/include/cppgc/macros.h +2 -0
  30. data/vendor/v8/include/cppgc/member.h +87 -25
  31. data/vendor/v8/include/cppgc/name-provider.h +65 -0
  32. data/vendor/v8/include/cppgc/object-size-trait.h +58 -0
  33. data/vendor/v8/include/cppgc/persistent.h +41 -11
  34. data/vendor/v8/include/cppgc/platform.h +49 -25
  35. data/vendor/v8/include/cppgc/prefinalizer.h +2 -2
  36. data/vendor/v8/include/cppgc/process-heap-statistics.h +36 -0
  37. data/vendor/v8/include/cppgc/sentinel-pointer.h +32 -0
  38. data/vendor/v8/include/cppgc/source-location.h +2 -1
  39. data/vendor/v8/include/cppgc/testing.h +99 -0
  40. data/vendor/v8/include/cppgc/trace-trait.h +8 -3
  41. data/vendor/v8/include/cppgc/type-traits.h +157 -19
  42. data/vendor/v8/include/cppgc/visitor.h +194 -28
  43. data/vendor/v8/include/libplatform/libplatform.h +11 -0
  44. data/vendor/v8/include/libplatform/v8-tracing.h +2 -0
  45. data/vendor/v8/include/v8-array-buffer.h +433 -0
  46. data/vendor/v8/include/v8-callbacks.h +377 -0
  47. data/vendor/v8/include/v8-container.h +129 -0
  48. data/vendor/v8/include/v8-context.h +418 -0
  49. data/vendor/v8/include/v8-cppgc.h +261 -159
  50. data/vendor/v8/include/v8-data.h +65 -0
  51. data/vendor/v8/include/v8-date.h +43 -0
  52. data/vendor/v8/include/v8-debug.h +151 -0
  53. data/vendor/v8/include/v8-embedder-heap.h +238 -0
  54. data/vendor/v8/include/v8-exception.h +224 -0
  55. data/vendor/v8/include/v8-extension.h +62 -0
  56. data/vendor/v8/include/v8-external.h +37 -0
  57. data/vendor/v8/include/v8-fast-api-calls.h +652 -152
  58. data/vendor/v8/include/v8-forward.h +81 -0
  59. data/vendor/v8/include/v8-function-callback.h +475 -0
  60. data/vendor/v8/include/v8-function.h +122 -0
  61. data/vendor/v8/include/v8-initialization.h +282 -0
  62. data/vendor/v8/include/v8-inspector.h +33 -25
  63. data/vendor/v8/include/v8-internal.h +178 -31
  64. data/vendor/v8/include/v8-isolate.h +1662 -0
  65. data/vendor/v8/include/v8-json.h +47 -0
  66. data/vendor/v8/include/v8-local-handle.h +459 -0
  67. data/vendor/v8/include/v8-locker.h +148 -0
  68. data/vendor/v8/include/v8-maybe.h +137 -0
  69. data/vendor/v8/include/v8-memory-span.h +43 -0
  70. data/vendor/v8/include/v8-message.h +241 -0
  71. data/vendor/v8/include/v8-metrics.h +114 -9
  72. data/vendor/v8/include/v8-microtask-queue.h +152 -0
  73. data/vendor/v8/include/v8-microtask.h +28 -0
  74. data/vendor/v8/include/v8-object.h +770 -0
  75. data/vendor/v8/include/v8-persistent-handle.h +590 -0
  76. data/vendor/v8/include/v8-platform.h +74 -25
  77. data/vendor/v8/include/v8-primitive-object.h +118 -0
  78. data/vendor/v8/include/v8-primitive.h +858 -0
  79. data/vendor/v8/include/v8-profiler.h +72 -9
  80. data/vendor/v8/include/v8-promise.h +174 -0
  81. data/vendor/v8/include/v8-proxy.h +50 -0
  82. data/vendor/v8/include/v8-regexp.h +105 -0
  83. data/vendor/v8/include/v8-script.h +771 -0
  84. data/vendor/v8/include/v8-snapshot.h +198 -0
  85. data/vendor/v8/include/v8-statistics.h +215 -0
  86. data/vendor/v8/include/v8-template.h +1052 -0
  87. data/vendor/v8/include/v8-traced-handle.h +605 -0
  88. data/vendor/v8/include/v8-typed-array.h +282 -0
  89. data/vendor/v8/include/v8-unwinder-state.h +31 -0
  90. data/vendor/v8/include/v8-unwinder.h +129 -0
  91. data/vendor/v8/include/v8-util.h +8 -2
  92. data/vendor/v8/include/v8-value-serializer.h +249 -0
  93. data/vendor/v8/include/v8-value.h +526 -0
  94. data/vendor/v8/include/v8-version.h +3 -3
  95. data/vendor/v8/include/v8-wasm.h +245 -0
  96. data/vendor/v8/include/v8-weak-callback-info.h +73 -0
  97. data/vendor/v8/include/v8.h +41 -12050
  98. data/vendor/v8/include/v8config.h +87 -11
  99. metadata +65 -11
  100. data/vendor/v8/include/cppgc/internal/process-heap.h +0 -34
@@ -5,8 +5,15 @@
5
5
  #ifndef INCLUDE_CPPGC_INTERNAL_WRITE_BARRIER_H_
6
6
  #define INCLUDE_CPPGC_INTERNAL_WRITE_BARRIER_H_
7
7
 
8
+ #include <cstddef>
9
+ #include <cstdint>
10
+
11
+ #include "cppgc/heap-state.h"
8
12
  #include "cppgc/internal/api-constants.h"
9
- #include "cppgc/internal/process-heap.h"
13
+ #include "cppgc/internal/atomic-entry-flag.h"
14
+ #include "cppgc/platform.h"
15
+ #include "cppgc/sentinel-pointer.h"
16
+ #include "cppgc/trace-trait.h"
10
17
  #include "v8config.h" // NOLINT(build/include_directory)
11
18
 
12
19
  #if defined(CPPGC_CAGED_HEAP)
@@ -14,64 +21,420 @@
14
21
  #endif
15
22
 
16
23
  namespace cppgc {
24
+
25
+ class HeapHandle;
26
+
17
27
  namespace internal {
18
28
 
29
+ #if defined(CPPGC_CAGED_HEAP)
30
+ class WriteBarrierTypeForCagedHeapPolicy;
31
+ #else // !CPPGC_CAGED_HEAP
32
+ class WriteBarrierTypeForNonCagedHeapPolicy;
33
+ #endif // !CPPGC_CAGED_HEAP
34
+
19
35
  class V8_EXPORT WriteBarrier final {
20
36
  public:
21
- static V8_INLINE void MarkingBarrier(const void* slot, const void* value) {
37
+ enum class Type : uint8_t {
38
+ kNone,
39
+ kMarking,
40
+ kGenerational,
41
+ };
42
+
43
+ struct Params {
44
+ HeapHandle* heap = nullptr;
45
+ #if V8_ENABLE_CHECKS
46
+ Type type = Type::kNone;
47
+ #endif // !V8_ENABLE_CHECKS
48
+ #if defined(CPPGC_CAGED_HEAP)
49
+ uintptr_t start = 0;
50
+ CagedHeapLocalData& caged_heap() const {
51
+ return *reinterpret_cast<CagedHeapLocalData*>(start);
52
+ }
53
+ uintptr_t slot_offset = 0;
54
+ uintptr_t value_offset = 0;
55
+ #endif // CPPGC_CAGED_HEAP
56
+ };
57
+
58
+ enum class ValueMode {
59
+ kValuePresent,
60
+ kNoValuePresent,
61
+ };
62
+
63
+ // Returns the required write barrier for a given `slot` and `value`.
64
+ static V8_INLINE Type GetWriteBarrierType(const void* slot, const void* value,
65
+ Params& params);
66
+ // Returns the required write barrier for a given `slot`.
67
+ template <typename HeapHandleCallback>
68
+ static V8_INLINE Type GetWriteBarrierType(const void* slot, Params& params,
69
+ HeapHandleCallback callback);
70
+ // Returns the required write barrier for a given `value`.
71
+ static V8_INLINE Type GetWriteBarrierType(const void* value, Params& params);
72
+
73
+ template <typename HeapHandleCallback>
74
+ static V8_INLINE Type GetWriteBarrierTypeForExternallyReferencedObject(
75
+ const void* value, Params& params, HeapHandleCallback callback);
76
+
77
+ static V8_INLINE void DijkstraMarkingBarrier(const Params& params,
78
+ const void* object);
79
+ static V8_INLINE void DijkstraMarkingBarrierRange(
80
+ const Params& params, const void* first_element, size_t element_size,
81
+ size_t number_of_elements, TraceCallback trace_callback);
82
+ static V8_INLINE void SteeleMarkingBarrier(const Params& params,
83
+ const void* object);
84
+ #if defined(CPPGC_YOUNG_GENERATION)
85
+ static V8_INLINE void GenerationalBarrier(const Params& params,
86
+ const void* slot);
87
+ #else // !CPPGC_YOUNG_GENERATION
88
+ static V8_INLINE void GenerationalBarrier(const Params& params,
89
+ const void* slot) {}
90
+ #endif // CPPGC_YOUNG_GENERATION
91
+
92
+ #if V8_ENABLE_CHECKS
93
+ static void CheckParams(Type expected_type, const Params& params);
94
+ #else // !V8_ENABLE_CHECKS
95
+ static void CheckParams(Type expected_type, const Params& params) {}
96
+ #endif // !V8_ENABLE_CHECKS
97
+
98
+ // The IncrementalOrConcurrentUpdater class allows cppgc internal to update
99
+ // |incremental_or_concurrent_marking_flag_|.
100
+ class IncrementalOrConcurrentMarkingFlagUpdater;
101
+ static bool IsAnyIncrementalOrConcurrentMarking() {
102
+ return incremental_or_concurrent_marking_flag_.MightBeEntered();
103
+ }
104
+
105
+ private:
106
+ WriteBarrier() = delete;
107
+
22
108
  #if defined(CPPGC_CAGED_HEAP)
23
- const uintptr_t start =
24
- reinterpret_cast<uintptr_t>(value) &
25
- ~(api_constants::kCagedHeapReservationAlignment - 1);
26
- const uintptr_t slot_offset = reinterpret_cast<uintptr_t>(slot) - start;
109
+ using WriteBarrierTypePolicy = WriteBarrierTypeForCagedHeapPolicy;
110
+ #else // !CPPGC_CAGED_HEAP
111
+ using WriteBarrierTypePolicy = WriteBarrierTypeForNonCagedHeapPolicy;
112
+ #endif // !CPPGC_CAGED_HEAP
113
+
114
+ static void DijkstraMarkingBarrierSlow(const void* value);
115
+ static void DijkstraMarkingBarrierSlowWithSentinelCheck(const void* value);
116
+ static void DijkstraMarkingBarrierRangeSlow(HeapHandle& heap_handle,
117
+ const void* first_element,
118
+ size_t element_size,
119
+ size_t number_of_elements,
120
+ TraceCallback trace_callback);
121
+ static void SteeleMarkingBarrierSlow(const void* value);
122
+ static void SteeleMarkingBarrierSlowWithSentinelCheck(const void* value);
123
+
124
+ #if defined(CPPGC_YOUNG_GENERATION)
125
+ static CagedHeapLocalData& GetLocalData(HeapHandle&);
126
+ static void GenerationalBarrierSlow(const CagedHeapLocalData& local_data,
127
+ const AgeTable& ageTable,
128
+ const void* slot, uintptr_t value_offset);
129
+ #endif // CPPGC_YOUNG_GENERATION
130
+
131
+ static AtomicEntryFlag incremental_or_concurrent_marking_flag_;
132
+ };
133
+
134
+ template <WriteBarrier::Type type>
135
+ V8_INLINE WriteBarrier::Type SetAndReturnType(WriteBarrier::Params& params) {
136
+ if (type == WriteBarrier::Type::kNone) return WriteBarrier::Type::kNone;
137
+ #if V8_ENABLE_CHECKS
138
+ params.type = type;
139
+ #endif // !V8_ENABLE_CHECKS
140
+ return type;
141
+ }
142
+
143
+ #if defined(CPPGC_CAGED_HEAP)
144
+ class V8_EXPORT WriteBarrierTypeForCagedHeapPolicy final {
145
+ public:
146
+ template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback>
147
+ static V8_INLINE WriteBarrier::Type Get(const void* slot, const void* value,
148
+ WriteBarrier::Params& params,
149
+ HeapHandleCallback callback) {
150
+ return ValueModeDispatch<value_mode>::Get(slot, value, params, callback);
151
+ }
152
+
153
+ template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback>
154
+ static V8_INLINE WriteBarrier::Type Get(const void* value,
155
+ WriteBarrier::Params& params,
156
+ HeapHandleCallback callback) {
157
+ return GetNoSlot(value, params, callback);
158
+ }
159
+
160
+ template <typename HeapHandleCallback>
161
+ static V8_INLINE WriteBarrier::Type GetForExternallyReferenced(
162
+ const void* value, WriteBarrier::Params& params,
163
+ HeapHandleCallback callback) {
164
+ return GetNoSlot(value, params, callback);
165
+ }
166
+
167
+ private:
168
+ WriteBarrierTypeForCagedHeapPolicy() = delete;
169
+
170
+ template <typename HeapHandleCallback>
171
+ static V8_INLINE WriteBarrier::Type GetNoSlot(const void* value,
172
+ WriteBarrier::Params& params,
173
+ HeapHandleCallback) {
174
+ if (!TryGetCagedHeap(value, value, params)) {
175
+ return WriteBarrier::Type::kNone;
176
+ }
177
+ if (V8_UNLIKELY(params.caged_heap().is_incremental_marking_in_progress)) {
178
+ return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
179
+ }
180
+ return SetAndReturnType<WriteBarrier::Type::kNone>(params);
181
+ }
182
+
183
+ template <WriteBarrier::ValueMode value_mode>
184
+ struct ValueModeDispatch;
185
+
186
+ static V8_INLINE bool TryGetCagedHeap(const void* slot, const void* value,
187
+ WriteBarrier::Params& params) {
188
+ // TODO(chromium:1056170): Check if the null check can be folded in with
189
+ // the rest of the write barrier.
190
+ if (!value) return false;
191
+ params.start = reinterpret_cast<uintptr_t>(value) &
192
+ ~(api_constants::kCagedHeapReservationAlignment - 1);
193
+ const uintptr_t slot_offset =
194
+ reinterpret_cast<uintptr_t>(slot) - params.start;
27
195
  if (slot_offset > api_constants::kCagedHeapReservationSize) {
28
196
  // Check if slot is on stack or value is sentinel or nullptr. This relies
29
197
  // on the fact that kSentinelPointer is encoded as 0x1.
30
- return;
198
+ return false;
31
199
  }
200
+ return true;
201
+ }
202
+
203
+ // Returns whether marking is in progress. If marking is not in progress
204
+ // sets the start of the cage accordingly.
205
+ //
206
+ // TODO(chromium:1056170): Create fast path on API.
207
+ static bool IsMarking(const HeapHandle&, WriteBarrier::Params&);
208
+ };
32
209
 
33
- CagedHeapLocalData* local_data =
34
- reinterpret_cast<CagedHeapLocalData*>(start);
35
- if (V8_UNLIKELY(local_data->is_marking_in_progress)) {
36
- MarkingBarrierSlow(value);
37
- return;
210
+ template <>
211
+ struct WriteBarrierTypeForCagedHeapPolicy::ValueModeDispatch<
212
+ WriteBarrier::ValueMode::kValuePresent> {
213
+ template <typename HeapHandleCallback>
214
+ static V8_INLINE WriteBarrier::Type Get(const void* slot, const void* value,
215
+ WriteBarrier::Params& params,
216
+ HeapHandleCallback) {
217
+ #if !defined(CPPGC_YOUNG_GENERATION)
218
+ if (V8_LIKELY(!WriteBarrier::IsAnyIncrementalOrConcurrentMarking())) {
219
+ return SetAndReturnType<WriteBarrier::Type::kNone>(params);
220
+ }
221
+ #endif // !CPPGC_YOUNG_GENERATION
222
+ bool within_cage = TryGetCagedHeap(slot, value, params);
223
+ if (!within_cage) {
224
+ return WriteBarrier::Type::kNone;
38
225
  }
226
+ if (V8_LIKELY(!params.caged_heap().is_incremental_marking_in_progress)) {
39
227
  #if defined(CPPGC_YOUNG_GENERATION)
40
- GenerationalBarrier(local_data, slot, slot_offset,
41
- reinterpret_cast<uintptr_t>(value) - start);
42
- #endif
43
- #else
44
- if (V8_LIKELY(!ProcessHeap::IsAnyIncrementalOrConcurrentMarking())) return;
228
+ params.heap = reinterpret_cast<HeapHandle*>(params.start);
229
+ params.slot_offset = reinterpret_cast<uintptr_t>(slot) - params.start;
230
+ params.value_offset = reinterpret_cast<uintptr_t>(value) - params.start;
231
+ return SetAndReturnType<WriteBarrier::Type::kGenerational>(params);
232
+ #else // !CPPGC_YOUNG_GENERATION
233
+ return SetAndReturnType<WriteBarrier::Type::kNone>(params);
234
+ #endif // !CPPGC_YOUNG_GENERATION
235
+ }
236
+ params.heap = reinterpret_cast<HeapHandle*>(params.start);
237
+ return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
238
+ }
239
+ };
240
+
241
+ template <>
242
+ struct WriteBarrierTypeForCagedHeapPolicy::ValueModeDispatch<
243
+ WriteBarrier::ValueMode::kNoValuePresent> {
244
+ template <typename HeapHandleCallback>
245
+ static V8_INLINE WriteBarrier::Type Get(const void* slot, const void*,
246
+ WriteBarrier::Params& params,
247
+ HeapHandleCallback callback) {
248
+ #if defined(CPPGC_YOUNG_GENERATION)
249
+ HeapHandle& handle = callback();
250
+ if (V8_LIKELY(!IsMarking(handle, params))) {
251
+ // params.start is populated by IsMarking().
252
+ params.heap = &handle;
253
+ params.slot_offset = reinterpret_cast<uintptr_t>(slot) - params.start;
254
+ // params.value_offset stays 0.
255
+ if (params.slot_offset > api_constants::kCagedHeapReservationSize) {
256
+ // Check if slot is on stack.
257
+ return SetAndReturnType<WriteBarrier::Type::kNone>(params);
258
+ }
259
+ return SetAndReturnType<WriteBarrier::Type::kGenerational>(params);
260
+ }
261
+ #else // !CPPGC_YOUNG_GENERATION
262
+ if (V8_LIKELY(!WriteBarrier::IsAnyIncrementalOrConcurrentMarking())) {
263
+ return SetAndReturnType<WriteBarrier::Type::kNone>(params);
264
+ }
265
+ HeapHandle& handle = callback();
266
+ if (V8_UNLIKELY(!subtle::HeapState::IsMarking(handle))) {
267
+ return SetAndReturnType<WriteBarrier::Type::kNone>(params);
268
+ }
269
+ #endif // !CPPGC_YOUNG_GENERATION
270
+ params.heap = &handle;
271
+ return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
272
+ }
273
+ };
45
274
 
46
- MarkingBarrierSlowWithSentinelCheck(value);
47
275
  #endif // CPPGC_CAGED_HEAP
276
+
277
+ class V8_EXPORT WriteBarrierTypeForNonCagedHeapPolicy final {
278
+ public:
279
+ template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback>
280
+ static V8_INLINE WriteBarrier::Type Get(const void* slot, const void* value,
281
+ WriteBarrier::Params& params,
282
+ HeapHandleCallback callback) {
283
+ return ValueModeDispatch<value_mode>::Get(slot, value, params, callback);
48
284
  }
49
285
 
50
- private:
51
- WriteBarrier() = delete;
286
+ template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback>
287
+ static V8_INLINE WriteBarrier::Type Get(const void* value,
288
+ WriteBarrier::Params& params,
289
+ HeapHandleCallback callback) {
290
+ // The slot will never be used in `Get()` below.
291
+ return Get<WriteBarrier::ValueMode::kValuePresent>(nullptr, value, params,
292
+ callback);
293
+ }
52
294
 
53
- static void MarkingBarrierSlow(const void* value);
54
- static void MarkingBarrierSlowWithSentinelCheck(const void* value);
295
+ template <typename HeapHandleCallback>
296
+ static V8_INLINE WriteBarrier::Type GetForExternallyReferenced(
297
+ const void* value, WriteBarrier::Params& params,
298
+ HeapHandleCallback callback) {
299
+ // The slot will never be used in `Get()` below.
300
+ return Get<WriteBarrier::ValueMode::kValuePresent>(nullptr, value, params,
301
+ callback);
302
+ }
55
303
 
56
- #if defined(CPPGC_YOUNG_GENERATION)
57
- static V8_INLINE void GenerationalBarrier(CagedHeapLocalData* local_data,
58
- const void* slot,
59
- uintptr_t slot_offset,
60
- uintptr_t value_offset) {
61
- const AgeTable& age_table = local_data->age_table;
304
+ private:
305
+ template <WriteBarrier::ValueMode value_mode>
306
+ struct ValueModeDispatch;
62
307
 
63
- // Bail out if the slot is in young generation.
64
- if (V8_LIKELY(age_table[slot_offset] == AgeTable::Age::kYoung)) return;
308
+ // TODO(chromium:1056170): Create fast path on API.
309
+ static bool IsMarking(const void*, HeapHandle**);
310
+ // TODO(chromium:1056170): Create fast path on API.
311
+ static bool IsMarking(HeapHandle&);
65
312
 
66
- GenerationalBarrierSlow(local_data, age_table, slot, value_offset);
313
+ WriteBarrierTypeForNonCagedHeapPolicy() = delete;
314
+ };
315
+
316
+ template <>
317
+ struct WriteBarrierTypeForNonCagedHeapPolicy::ValueModeDispatch<
318
+ WriteBarrier::ValueMode::kValuePresent> {
319
+ template <typename HeapHandleCallback>
320
+ static V8_INLINE WriteBarrier::Type Get(const void*, const void* object,
321
+ WriteBarrier::Params& params,
322
+ HeapHandleCallback callback) {
323
+ // The following check covers nullptr as well as sentinel pointer.
324
+ if (object <= static_cast<void*>(kSentinelPointer)) {
325
+ return SetAndReturnType<WriteBarrier::Type::kNone>(params);
326
+ }
327
+ if (V8_LIKELY(!WriteBarrier::IsAnyIncrementalOrConcurrentMarking())) {
328
+ return SetAndReturnType<WriteBarrier::Type::kNone>(params);
329
+ }
330
+ if (IsMarking(object, &params.heap)) {
331
+ return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
332
+ }
333
+ return SetAndReturnType<WriteBarrier::Type::kNone>(params);
67
334
  }
335
+ };
68
336
 
69
- static void GenerationalBarrierSlow(CagedHeapLocalData* local_data,
70
- const AgeTable& ageTable,
71
- const void* slot, uintptr_t value_offset);
72
- #endif
337
+ template <>
338
+ struct WriteBarrierTypeForNonCagedHeapPolicy::ValueModeDispatch<
339
+ WriteBarrier::ValueMode::kNoValuePresent> {
340
+ template <typename HeapHandleCallback>
341
+ static V8_INLINE WriteBarrier::Type Get(const void*, const void*,
342
+ WriteBarrier::Params& params,
343
+ HeapHandleCallback callback) {
344
+ if (V8_UNLIKELY(WriteBarrier::IsAnyIncrementalOrConcurrentMarking())) {
345
+ HeapHandle& handle = callback();
346
+ if (IsMarking(handle)) {
347
+ params.heap = &handle;
348
+ return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
349
+ }
350
+ }
351
+ return WriteBarrier::Type::kNone;
352
+ }
73
353
  };
74
354
 
355
+ // static
356
+ WriteBarrier::Type WriteBarrier::GetWriteBarrierType(
357
+ const void* slot, const void* value, WriteBarrier::Params& params) {
358
+ return WriteBarrierTypePolicy::Get<ValueMode::kValuePresent>(slot, value,
359
+ params, []() {});
360
+ }
361
+
362
+ // static
363
+ template <typename HeapHandleCallback>
364
+ WriteBarrier::Type WriteBarrier::GetWriteBarrierType(
365
+ const void* slot, WriteBarrier::Params& params,
366
+ HeapHandleCallback callback) {
367
+ return WriteBarrierTypePolicy::Get<ValueMode::kNoValuePresent>(
368
+ slot, nullptr, params, callback);
369
+ }
370
+
371
+ // static
372
+ WriteBarrier::Type WriteBarrier::GetWriteBarrierType(
373
+ const void* value, WriteBarrier::Params& params) {
374
+ return WriteBarrierTypePolicy::Get<ValueMode::kValuePresent>(value, params,
375
+ []() {});
376
+ }
377
+
378
+ // static
379
+ template <typename HeapHandleCallback>
380
+ WriteBarrier::Type
381
+ WriteBarrier::GetWriteBarrierTypeForExternallyReferencedObject(
382
+ const void* value, Params& params, HeapHandleCallback callback) {
383
+ return WriteBarrierTypePolicy::GetForExternallyReferenced(value, params,
384
+ callback);
385
+ }
386
+
387
+ // static
388
+ void WriteBarrier::DijkstraMarkingBarrier(const Params& params,
389
+ const void* object) {
390
+ CheckParams(Type::kMarking, params);
391
+ #if defined(CPPGC_CAGED_HEAP)
392
+ // Caged heap already filters out sentinels.
393
+ DijkstraMarkingBarrierSlow(object);
394
+ #else // !CPPGC_CAGED_HEAP
395
+ DijkstraMarkingBarrierSlowWithSentinelCheck(object);
396
+ #endif // !CPPGC_CAGED_HEAP
397
+ }
398
+
399
+ // static
400
+ void WriteBarrier::DijkstraMarkingBarrierRange(const Params& params,
401
+ const void* first_element,
402
+ size_t element_size,
403
+ size_t number_of_elements,
404
+ TraceCallback trace_callback) {
405
+ CheckParams(Type::kMarking, params);
406
+ DijkstraMarkingBarrierRangeSlow(*params.heap, first_element, element_size,
407
+ number_of_elements, trace_callback);
408
+ }
409
+
410
+ // static
411
+ void WriteBarrier::SteeleMarkingBarrier(const Params& params,
412
+ const void* object) {
413
+ CheckParams(Type::kMarking, params);
414
+ #if defined(CPPGC_CAGED_HEAP)
415
+ // Caged heap already filters out sentinels.
416
+ SteeleMarkingBarrierSlow(object);
417
+ #else // !CPPGC_CAGED_HEAP
418
+ SteeleMarkingBarrierSlowWithSentinelCheck(object);
419
+ #endif // !CPPGC_CAGED_HEAP
420
+ }
421
+
422
+ #if defined(CPPGC_YOUNG_GENERATION)
423
+ // static
424
+ void WriteBarrier::GenerationalBarrier(const Params& params, const void* slot) {
425
+ CheckParams(Type::kGenerational, params);
426
+
427
+ const CagedHeapLocalData& local_data = params.caged_heap();
428
+ const AgeTable& age_table = local_data.age_table;
429
+
430
+ // Bail out if the slot is in young generation.
431
+ if (V8_LIKELY(age_table[params.slot_offset] == AgeTable::Age::kYoung)) return;
432
+
433
+ GenerationalBarrierSlow(local_data, age_table, slot, params.value_offset);
434
+ }
435
+
436
+ #endif // !CPPGC_YOUNG_GENERATION
437
+
75
438
  } // namespace internal
76
439
  } // namespace cppgc
77
440
 
@@ -19,7 +19,7 @@ class LivenessBrokerFactory;
19
19
  /**
20
20
  * The broker is passed to weak callbacks to allow (temporarily) querying
21
21
  * the liveness state of an object. References to non-live objects must be
22
- * cleared when IsHeapObjectAlive() returns false.
22
+ * cleared when `IsHeapObjectAlive()` returns false.
23
23
  *
24
24
  * \code
25
25
  * class GCedWithCustomWeakCallback final
@@ -44,11 +44,20 @@ class V8_EXPORT LivenessBroker final {
44
44
  public:
45
45
  template <typename T>
46
46
  bool IsHeapObjectAlive(const T* object) const {
47
- return object &&
47
+ // nullptr objects are considered alive to allow weakness to be used from
48
+ // stack while running into a conservative GC. Treating nullptr as dead
49
+ // would mean that e.g. custom collectins could not be strongified on stack.
50
+ return !object ||
48
51
  IsHeapObjectAliveImpl(
49
52
  TraceTrait<T>::GetTraceDescriptor(object).base_object_payload);
50
53
  }
51
54
 
55
+ template <typename T>
56
+ bool IsHeapObjectAlive(const WeakMember<T>& weak_member) const {
57
+ return (weak_member != kSentinelPointer) &&
58
+ IsHeapObjectAlive<T>(weak_member.Get());
59
+ }
60
+
52
61
  template <typename T>
53
62
  bool IsHeapObjectAlive(const UntracedMember<T>& untraced_member) const {
54
63
  return (untraced_member != kSentinelPointer) &&
@@ -5,6 +5,8 @@
5
5
  #ifndef INCLUDE_CPPGC_MACROS_H_
6
6
  #define INCLUDE_CPPGC_MACROS_H_
7
7
 
8
+ #include <cstddef>
9
+
8
10
  #include "cppgc/internal/compiler-specific.h"
9
11
 
10
12
  namespace cppgc {