libv8-node 16.10.0.0-arm64-darwin

Sign up to get free protection for your applications and to get access to all the features.
Files changed (68) hide show
  1. checksums.yaml +7 -0
  2. data/ext/libv8-node/.location.yml +1 -0
  3. data/ext/libv8-node/location.rb +76 -0
  4. data/ext/libv8-node/paths.rb +34 -0
  5. data/lib/libv8/node/version.rb +7 -0
  6. data/lib/libv8/node.rb +11 -0
  7. data/lib/libv8-node.rb +1 -0
  8. data/vendor/v8/arm64-darwin/libv8/obj/libv8_monolith.a +0 -0
  9. data/vendor/v8/include/cppgc/allocation.h +232 -0
  10. data/vendor/v8/include/cppgc/common.h +29 -0
  11. data/vendor/v8/include/cppgc/cross-thread-persistent.h +384 -0
  12. data/vendor/v8/include/cppgc/custom-space.h +97 -0
  13. data/vendor/v8/include/cppgc/default-platform.h +75 -0
  14. data/vendor/v8/include/cppgc/ephemeron-pair.h +30 -0
  15. data/vendor/v8/include/cppgc/explicit-management.h +82 -0
  16. data/vendor/v8/include/cppgc/garbage-collected.h +117 -0
  17. data/vendor/v8/include/cppgc/heap-consistency.h +236 -0
  18. data/vendor/v8/include/cppgc/heap-state.h +70 -0
  19. data/vendor/v8/include/cppgc/heap-statistics.h +120 -0
  20. data/vendor/v8/include/cppgc/heap.h +201 -0
  21. data/vendor/v8/include/cppgc/internal/api-constants.h +47 -0
  22. data/vendor/v8/include/cppgc/internal/atomic-entry-flag.h +48 -0
  23. data/vendor/v8/include/cppgc/internal/caged-heap-local-data.h +68 -0
  24. data/vendor/v8/include/cppgc/internal/compiler-specific.h +38 -0
  25. data/vendor/v8/include/cppgc/internal/finalizer-trait.h +90 -0
  26. data/vendor/v8/include/cppgc/internal/gc-info.h +76 -0
  27. data/vendor/v8/include/cppgc/internal/logging.h +50 -0
  28. data/vendor/v8/include/cppgc/internal/name-trait.h +111 -0
  29. data/vendor/v8/include/cppgc/internal/persistent-node.h +172 -0
  30. data/vendor/v8/include/cppgc/internal/pointer-policies.h +175 -0
  31. data/vendor/v8/include/cppgc/internal/prefinalizer-handler.h +30 -0
  32. data/vendor/v8/include/cppgc/internal/write-barrier.h +396 -0
  33. data/vendor/v8/include/cppgc/liveness-broker.h +74 -0
  34. data/vendor/v8/include/cppgc/macros.h +26 -0
  35. data/vendor/v8/include/cppgc/member.h +286 -0
  36. data/vendor/v8/include/cppgc/name-provider.h +65 -0
  37. data/vendor/v8/include/cppgc/object-size-trait.h +58 -0
  38. data/vendor/v8/include/cppgc/persistent.h +365 -0
  39. data/vendor/v8/include/cppgc/platform.h +153 -0
  40. data/vendor/v8/include/cppgc/prefinalizer.h +52 -0
  41. data/vendor/v8/include/cppgc/process-heap-statistics.h +36 -0
  42. data/vendor/v8/include/cppgc/sentinel-pointer.h +32 -0
  43. data/vendor/v8/include/cppgc/source-location.h +92 -0
  44. data/vendor/v8/include/cppgc/testing.h +99 -0
  45. data/vendor/v8/include/cppgc/trace-trait.h +116 -0
  46. data/vendor/v8/include/cppgc/type-traits.h +247 -0
  47. data/vendor/v8/include/cppgc/visitor.h +377 -0
  48. data/vendor/v8/include/libplatform/libplatform-export.h +29 -0
  49. data/vendor/v8/include/libplatform/libplatform.h +117 -0
  50. data/vendor/v8/include/libplatform/v8-tracing.h +334 -0
  51. data/vendor/v8/include/v8-cppgc.h +325 -0
  52. data/vendor/v8/include/v8-fast-api-calls.h +791 -0
  53. data/vendor/v8/include/v8-inspector-protocol.h +13 -0
  54. data/vendor/v8/include/v8-inspector.h +348 -0
  55. data/vendor/v8/include/v8-internal.h +499 -0
  56. data/vendor/v8/include/v8-metrics.h +202 -0
  57. data/vendor/v8/include/v8-platform.h +709 -0
  58. data/vendor/v8/include/v8-profiler.h +1123 -0
  59. data/vendor/v8/include/v8-unwinder-state.h +30 -0
  60. data/vendor/v8/include/v8-util.h +652 -0
  61. data/vendor/v8/include/v8-value-serializer-version.h +24 -0
  62. data/vendor/v8/include/v8-version-string.h +38 -0
  63. data/vendor/v8/include/v8-version.h +20 -0
  64. data/vendor/v8/include/v8-wasm-trap-handler-posix.h +31 -0
  65. data/vendor/v8/include/v8-wasm-trap-handler-win.h +28 -0
  66. data/vendor/v8/include/v8.h +12648 -0
  67. data/vendor/v8/include/v8config.h +515 -0
  68. metadata +138 -0
@@ -0,0 +1,396 @@
1
+ // Copyright 2020 the V8 project authors. All rights reserved.
2
+ // Use of this source code is governed by a BSD-style license that can be
3
+ // found in the LICENSE file.
4
+
5
+ #ifndef INCLUDE_CPPGC_INTERNAL_WRITE_BARRIER_H_
6
+ #define INCLUDE_CPPGC_INTERNAL_WRITE_BARRIER_H_
7
+
8
+ #include <cstddef>
9
+ #include <cstdint>
10
+
11
+ #include "cppgc/heap-state.h"
12
+ #include "cppgc/internal/api-constants.h"
13
+ #include "cppgc/internal/atomic-entry-flag.h"
14
+ #include "cppgc/sentinel-pointer.h"
15
+ #include "cppgc/trace-trait.h"
16
+ #include "v8config.h" // NOLINT(build/include_directory)
17
+
18
+ #if defined(CPPGC_CAGED_HEAP)
19
+ #include "cppgc/internal/caged-heap-local-data.h"
20
+ #endif
21
+
22
+ namespace cppgc {
23
+
24
+ class HeapHandle;
25
+
26
+ namespace internal {
27
+
28
+ #if defined(CPPGC_CAGED_HEAP)
29
+ class WriteBarrierTypeForCagedHeapPolicy;
30
+ #else // !CPPGC_CAGED_HEAP
31
+ class WriteBarrierTypeForNonCagedHeapPolicy;
32
+ #endif // !CPPGC_CAGED_HEAP
33
+
34
+ class V8_EXPORT WriteBarrier final {
35
+ public:
36
+ enum class Type : uint8_t {
37
+ kNone,
38
+ kMarking,
39
+ kGenerational,
40
+ };
41
+
42
+ struct Params {
43
+ HeapHandle* heap = nullptr;
44
+ #if V8_ENABLE_CHECKS
45
+ Type type = Type::kNone;
46
+ #endif // !V8_ENABLE_CHECKS
47
+ #if defined(CPPGC_CAGED_HEAP)
48
+ uintptr_t start = 0;
49
+ CagedHeapLocalData& caged_heap() const {
50
+ return *reinterpret_cast<CagedHeapLocalData*>(start);
51
+ }
52
+ uintptr_t slot_offset = 0;
53
+ uintptr_t value_offset = 0;
54
+ #endif // CPPGC_CAGED_HEAP
55
+ };
56
+
57
+ enum class ValueMode {
58
+ kValuePresent,
59
+ kNoValuePresent,
60
+ };
61
+
62
+ // Returns the required write barrier for a given `slot` and `value`.
63
+ static V8_INLINE Type GetWriteBarrierType(const void* slot, const void* value,
64
+ Params& params);
65
+ // Returns the required write barrier for a given `slot`.
66
+ template <typename HeapHandleCallback>
67
+ static V8_INLINE Type GetWriteBarrierType(const void* slot, Params& params,
68
+ HeapHandleCallback callback);
69
+
70
+ template <typename HeapHandleCallback>
71
+ static V8_INLINE Type GetWriteBarrierTypeForExternallyReferencedObject(
72
+ const void* value, Params& params, HeapHandleCallback callback);
73
+
74
+ static V8_INLINE void DijkstraMarkingBarrier(const Params& params,
75
+ const void* object);
76
+ static V8_INLINE void DijkstraMarkingBarrierRange(
77
+ const Params& params, const void* first_element, size_t element_size,
78
+ size_t number_of_elements, TraceCallback trace_callback);
79
+ static V8_INLINE void SteeleMarkingBarrier(const Params& params,
80
+ const void* object);
81
+ #if defined(CPPGC_YOUNG_GENERATION)
82
+ static V8_INLINE void GenerationalBarrier(const Params& params,
83
+ const void* slot);
84
+ #else // !CPPGC_YOUNG_GENERATION
85
+ static V8_INLINE void GenerationalBarrier(const Params& params,
86
+ const void* slot) {}
87
+ #endif // CPPGC_YOUNG_GENERATION
88
+
89
+ #if V8_ENABLE_CHECKS
90
+ static void CheckParams(Type expected_type, const Params& params);
91
+ #else // !V8_ENABLE_CHECKS
92
+ static void CheckParams(Type expected_type, const Params& params) {}
93
+ #endif // !V8_ENABLE_CHECKS
94
+
95
+ // The IncrementalOrConcurrentUpdater class allows cppgc internal to update
96
+ // |incremental_or_concurrent_marking_flag_|.
97
+ class IncrementalOrConcurrentMarkingFlagUpdater;
98
+ static bool IsAnyIncrementalOrConcurrentMarking() {
99
+ return incremental_or_concurrent_marking_flag_.MightBeEntered();
100
+ }
101
+
102
+ private:
103
+ WriteBarrier() = delete;
104
+
105
+ #if defined(CPPGC_CAGED_HEAP)
106
+ using WriteBarrierTypePolicy = WriteBarrierTypeForCagedHeapPolicy;
107
+ #else // !CPPGC_CAGED_HEAP
108
+ using WriteBarrierTypePolicy = WriteBarrierTypeForNonCagedHeapPolicy;
109
+ #endif // !CPPGC_CAGED_HEAP
110
+
111
+ static void DijkstraMarkingBarrierSlow(const void* value);
112
+ static void DijkstraMarkingBarrierSlowWithSentinelCheck(const void* value);
113
+ static void DijkstraMarkingBarrierRangeSlow(HeapHandle& heap_handle,
114
+ const void* first_element,
115
+ size_t element_size,
116
+ size_t number_of_elements,
117
+ TraceCallback trace_callback);
118
+ static void SteeleMarkingBarrierSlow(const void* value);
119
+ static void SteeleMarkingBarrierSlowWithSentinelCheck(const void* value);
120
+
121
+ #if defined(CPPGC_YOUNG_GENERATION)
122
+ static CagedHeapLocalData& GetLocalData(HeapHandle&);
123
+ static void GenerationalBarrierSlow(const CagedHeapLocalData& local_data,
124
+ const AgeTable& ageTable,
125
+ const void* slot, uintptr_t value_offset);
126
+ #endif // CPPGC_YOUNG_GENERATION
127
+
128
+ static AtomicEntryFlag incremental_or_concurrent_marking_flag_;
129
+ };
130
+
131
+ template <WriteBarrier::Type type>
132
+ V8_INLINE WriteBarrier::Type SetAndReturnType(WriteBarrier::Params& params) {
133
+ if (type == WriteBarrier::Type::kNone) return WriteBarrier::Type::kNone;
134
+ #if V8_ENABLE_CHECKS
135
+ params.type = type;
136
+ #endif // !V8_ENABLE_CHECKS
137
+ return type;
138
+ }
139
+
140
+ #if defined(CPPGC_CAGED_HEAP)
141
+ class V8_EXPORT WriteBarrierTypeForCagedHeapPolicy final {
142
+ public:
143
+ template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback>
144
+ static V8_INLINE WriteBarrier::Type Get(const void* slot, const void* value,
145
+ WriteBarrier::Params& params,
146
+ HeapHandleCallback callback) {
147
+ return ValueModeDispatch<value_mode>::Get(slot, value, params, callback);
148
+ }
149
+
150
+ template <typename HeapHandleCallback>
151
+ static V8_INLINE WriteBarrier::Type GetForExternallyReferenced(
152
+ const void* value, WriteBarrier::Params& params, HeapHandleCallback) {
153
+ if (!TryGetCagedHeap(value, value, params)) {
154
+ return WriteBarrier::Type::kNone;
155
+ }
156
+ if (V8_UNLIKELY(params.caged_heap().is_incremental_marking_in_progress)) {
157
+ return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
158
+ }
159
+ return SetAndReturnType<WriteBarrier::Type::kNone>(params);
160
+ }
161
+
162
+ private:
163
+ WriteBarrierTypeForCagedHeapPolicy() = delete;
164
+
165
+ template <WriteBarrier::ValueMode value_mode>
166
+ struct ValueModeDispatch;
167
+
168
+ static V8_INLINE bool TryGetCagedHeap(const void* slot, const void* value,
169
+ WriteBarrier::Params& params) {
170
+ params.start = reinterpret_cast<uintptr_t>(value) &
171
+ ~(api_constants::kCagedHeapReservationAlignment - 1);
172
+ const uintptr_t slot_offset =
173
+ reinterpret_cast<uintptr_t>(slot) - params.start;
174
+ if (slot_offset > api_constants::kCagedHeapReservationSize) {
175
+ // Check if slot is on stack or value is sentinel or nullptr. This relies
176
+ // on the fact that kSentinelPointer is encoded as 0x1.
177
+ return false;
178
+ }
179
+ return true;
180
+ }
181
+
182
+ // Returns whether marking is in progress. If marking is not in progress
183
+ // sets the start of the cage accordingly.
184
+ //
185
+ // TODO(chromium:1056170): Create fast path on API.
186
+ static bool IsMarking(const HeapHandle&, WriteBarrier::Params&);
187
+ };
188
+
189
+ template <>
190
+ struct WriteBarrierTypeForCagedHeapPolicy::ValueModeDispatch<
191
+ WriteBarrier::ValueMode::kValuePresent> {
192
+ template <typename HeapHandleCallback>
193
+ static V8_INLINE WriteBarrier::Type Get(const void* slot, const void* value,
194
+ WriteBarrier::Params& params,
195
+ HeapHandleCallback) {
196
+ bool within_cage = TryGetCagedHeap(slot, value, params);
197
+ if (!within_cage) {
198
+ return WriteBarrier::Type::kNone;
199
+ }
200
+ if (V8_LIKELY(!params.caged_heap().is_incremental_marking_in_progress)) {
201
+ #if defined(CPPGC_YOUNG_GENERATION)
202
+ params.heap = reinterpret_cast<HeapHandle*>(params.start);
203
+ params.slot_offset = reinterpret_cast<uintptr_t>(slot) - params.start;
204
+ params.value_offset = reinterpret_cast<uintptr_t>(value) - params.start;
205
+ return SetAndReturnType<WriteBarrier::Type::kGenerational>(params);
206
+ #else // !CPPGC_YOUNG_GENERATION
207
+ return SetAndReturnType<WriteBarrier::Type::kNone>(params);
208
+ #endif // !CPPGC_YOUNG_GENERATION
209
+ }
210
+ params.heap = reinterpret_cast<HeapHandle*>(params.start);
211
+ return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
212
+ }
213
+ };
214
+
215
+ template <>
216
+ struct WriteBarrierTypeForCagedHeapPolicy::ValueModeDispatch<
217
+ WriteBarrier::ValueMode::kNoValuePresent> {
218
+ template <typename HeapHandleCallback>
219
+ static V8_INLINE WriteBarrier::Type Get(const void* slot, const void*,
220
+ WriteBarrier::Params& params,
221
+ HeapHandleCallback callback) {
222
+ #if defined(CPPGC_YOUNG_GENERATION)
223
+ HeapHandle& handle = callback();
224
+ if (V8_LIKELY(!IsMarking(handle, params))) {
225
+ // params.start is populated by IsMarking().
226
+ params.heap = &handle;
227
+ params.slot_offset = reinterpret_cast<uintptr_t>(slot) - params.start;
228
+ // params.value_offset stays 0.
229
+ if (params.slot_offset > api_constants::kCagedHeapReservationSize) {
230
+ // Check if slot is on stack.
231
+ return SetAndReturnType<WriteBarrier::Type::kNone>(params);
232
+ }
233
+ return SetAndReturnType<WriteBarrier::Type::kGenerational>(params);
234
+ }
235
+ #else // !CPPGC_YOUNG_GENERATION
236
+ if (V8_LIKELY(!WriteBarrier::IsAnyIncrementalOrConcurrentMarking())) {
237
+ return SetAndReturnType<WriteBarrier::Type::kNone>(params);
238
+ }
239
+ HeapHandle& handle = callback();
240
+ if (V8_UNLIKELY(!subtle::HeapState::IsMarking(handle))) {
241
+ return SetAndReturnType<WriteBarrier::Type::kNone>(params);
242
+ }
243
+ #endif // !CPPGC_YOUNG_GENERATION
244
+ params.heap = &handle;
245
+ return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
246
+ }
247
+ };
248
+
249
+ #endif // CPPGC_CAGED_HEAP
250
+
251
+ class V8_EXPORT WriteBarrierTypeForNonCagedHeapPolicy final {
252
+ public:
253
+ template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback>
254
+ static V8_INLINE WriteBarrier::Type Get(const void* slot, const void* value,
255
+ WriteBarrier::Params& params,
256
+ HeapHandleCallback callback) {
257
+ return ValueModeDispatch<value_mode>::Get(slot, value, params, callback);
258
+ }
259
+
260
+ template <typename HeapHandleCallback>
261
+ static V8_INLINE WriteBarrier::Type GetForExternallyReferenced(
262
+ const void* value, WriteBarrier::Params& params,
263
+ HeapHandleCallback callback) {
264
+ // The slot will never be used in `Get()` below.
265
+ return Get<WriteBarrier::ValueMode::kValuePresent>(nullptr, value, params,
266
+ callback);
267
+ }
268
+
269
+ private:
270
+ template <WriteBarrier::ValueMode value_mode>
271
+ struct ValueModeDispatch;
272
+
273
+ // TODO(chromium:1056170): Create fast path on API.
274
+ static bool IsMarking(const void*, HeapHandle**);
275
+ // TODO(chromium:1056170): Create fast path on API.
276
+ static bool IsMarking(HeapHandle&);
277
+
278
+ WriteBarrierTypeForNonCagedHeapPolicy() = delete;
279
+ };
280
+
281
+ template <>
282
+ struct WriteBarrierTypeForNonCagedHeapPolicy::ValueModeDispatch<
283
+ WriteBarrier::ValueMode::kValuePresent> {
284
+ template <typename HeapHandleCallback>
285
+ static V8_INLINE WriteBarrier::Type Get(const void*, const void* object,
286
+ WriteBarrier::Params& params,
287
+ HeapHandleCallback callback) {
288
+ // The following check covers nullptr as well as sentinel pointer.
289
+ if (object <= static_cast<void*>(kSentinelPointer)) {
290
+ return WriteBarrier::Type::kNone;
291
+ }
292
+ if (IsMarking(object, &params.heap)) {
293
+ return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
294
+ }
295
+ return SetAndReturnType<WriteBarrier::Type::kNone>(params);
296
+ }
297
+ };
298
+
299
+ template <>
300
+ struct WriteBarrierTypeForNonCagedHeapPolicy::ValueModeDispatch<
301
+ WriteBarrier::ValueMode::kNoValuePresent> {
302
+ template <typename HeapHandleCallback>
303
+ static V8_INLINE WriteBarrier::Type Get(const void*, const void*,
304
+ WriteBarrier::Params& params,
305
+ HeapHandleCallback callback) {
306
+ if (V8_UNLIKELY(WriteBarrier::IsAnyIncrementalOrConcurrentMarking())) {
307
+ HeapHandle& handle = callback();
308
+ if (IsMarking(handle)) {
309
+ params.heap = &handle;
310
+ return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
311
+ }
312
+ }
313
+ return WriteBarrier::Type::kNone;
314
+ }
315
+ };
316
+
317
+ // static
318
+ WriteBarrier::Type WriteBarrier::GetWriteBarrierType(
319
+ const void* slot, const void* value, WriteBarrier::Params& params) {
320
+ return WriteBarrierTypePolicy::Get<ValueMode::kValuePresent>(slot, value,
321
+ params, []() {});
322
+ }
323
+
324
+ // static
325
+ template <typename HeapHandleCallback>
326
+ WriteBarrier::Type WriteBarrier::GetWriteBarrierType(
327
+ const void* slot, WriteBarrier::Params& params,
328
+ HeapHandleCallback callback) {
329
+ return WriteBarrierTypePolicy::Get<ValueMode::kNoValuePresent>(
330
+ slot, nullptr, params, callback);
331
+ }
332
+
333
+ // static
334
+ template <typename HeapHandleCallback>
335
+ WriteBarrier::Type
336
+ WriteBarrier::GetWriteBarrierTypeForExternallyReferencedObject(
337
+ const void* value, Params& params, HeapHandleCallback callback) {
338
+ return WriteBarrierTypePolicy::GetForExternallyReferenced(value, params,
339
+ callback);
340
+ }
341
+
342
+ // static
343
+ void WriteBarrier::DijkstraMarkingBarrier(const Params& params,
344
+ const void* object) {
345
+ CheckParams(Type::kMarking, params);
346
+ #if defined(CPPGC_CAGED_HEAP)
347
+ // Caged heap already filters out sentinels.
348
+ DijkstraMarkingBarrierSlow(object);
349
+ #else // !CPPGC_CAGED_HEAP
350
+ DijkstraMarkingBarrierSlowWithSentinelCheck(object);
351
+ #endif // !CPPGC_CAGED_HEAP
352
+ }
353
+
354
+ // static
355
+ void WriteBarrier::DijkstraMarkingBarrierRange(const Params& params,
356
+ const void* first_element,
357
+ size_t element_size,
358
+ size_t number_of_elements,
359
+ TraceCallback trace_callback) {
360
+ CheckParams(Type::kMarking, params);
361
+ DijkstraMarkingBarrierRangeSlow(*params.heap, first_element, element_size,
362
+ number_of_elements, trace_callback);
363
+ }
364
+
365
+ // static
366
+ void WriteBarrier::SteeleMarkingBarrier(const Params& params,
367
+ const void* object) {
368
+ CheckParams(Type::kMarking, params);
369
+ #if defined(CPPGC_CAGED_HEAP)
370
+ // Caged heap already filters out sentinels.
371
+ SteeleMarkingBarrierSlow(object);
372
+ #else // !CPPGC_CAGED_HEAP
373
+ SteeleMarkingBarrierSlowWithSentinelCheck(object);
374
+ #endif // !CPPGC_CAGED_HEAP
375
+ }
376
+
377
+ #if defined(CPPGC_YOUNG_GENERATION)
378
+ // static
379
+ void WriteBarrier::GenerationalBarrier(const Params& params, const void* slot) {
380
+ CheckParams(Type::kGenerational, params);
381
+
382
+ const CagedHeapLocalData& local_data = params.caged_heap();
383
+ const AgeTable& age_table = local_data.age_table;
384
+
385
+ // Bail out if the slot is in young generation.
386
+ if (V8_LIKELY(age_table[params.slot_offset] == AgeTable::Age::kYoung)) return;
387
+
388
+ GenerationalBarrierSlow(local_data, age_table, slot, params.value_offset);
389
+ }
390
+
391
+ #endif // !CPPGC_YOUNG_GENERATION
392
+
393
+ } // namespace internal
394
+ } // namespace cppgc
395
+
396
+ #endif // INCLUDE_CPPGC_INTERNAL_WRITE_BARRIER_H_
@@ -0,0 +1,74 @@
1
+ // Copyright 2020 the V8 project authors. All rights reserved.
2
+ // Use of this source code is governed by a BSD-style license that can be
3
+ // found in the LICENSE file.
4
+
5
+ #ifndef INCLUDE_CPPGC_LIVENESS_BROKER_H_
6
+ #define INCLUDE_CPPGC_LIVENESS_BROKER_H_
7
+
8
+ #include "cppgc/heap.h"
9
+ #include "cppgc/member.h"
10
+ #include "cppgc/trace-trait.h"
11
+ #include "v8config.h" // NOLINT(build/include_directory)
12
+
13
+ namespace cppgc {
14
+
15
+ namespace internal {
16
+ class LivenessBrokerFactory;
17
+ } // namespace internal
18
+
19
+ /**
20
+ * The broker is passed to weak callbacks to allow (temporarily) querying
21
+ * the liveness state of an object. References to non-live objects must be
22
+ * cleared when `IsHeapObjectAlive()` returns false.
23
+ *
24
+ * \code
25
+ * class GCedWithCustomWeakCallback final
26
+ * : public GarbageCollected<GCedWithCustomWeakCallback> {
27
+ * public:
28
+ * UntracedMember<Bar> bar;
29
+ *
30
+ * void CustomWeakCallbackMethod(const LivenessBroker& broker) {
31
+ * if (!broker.IsHeapObjectAlive(bar))
32
+ * bar = nullptr;
33
+ * }
34
+ *
35
+ * void Trace(cppgc::Visitor* visitor) const {
36
+ * visitor->RegisterWeakCallbackMethod<
37
+ * GCedWithCustomWeakCallback,
38
+ * &GCedWithCustomWeakCallback::CustomWeakCallbackMethod>(this);
39
+ * }
40
+ * };
41
+ * \endcode
42
+ */
43
+ class V8_EXPORT LivenessBroker final {
44
+ public:
45
+ template <typename T>
46
+ bool IsHeapObjectAlive(const T* object) const {
47
+ return object &&
48
+ IsHeapObjectAliveImpl(
49
+ TraceTrait<T>::GetTraceDescriptor(object).base_object_payload);
50
+ }
51
+
52
+ template <typename T>
53
+ bool IsHeapObjectAlive(const WeakMember<T>& weak_member) const {
54
+ return (weak_member != kSentinelPointer) &&
55
+ IsHeapObjectAlive<T>(weak_member.Get());
56
+ }
57
+
58
+ template <typename T>
59
+ bool IsHeapObjectAlive(const UntracedMember<T>& untraced_member) const {
60
+ return (untraced_member != kSentinelPointer) &&
61
+ IsHeapObjectAlive<T>(untraced_member.Get());
62
+ }
63
+
64
+ private:
65
+ LivenessBroker() = default;
66
+
67
+ bool IsHeapObjectAliveImpl(const void*) const;
68
+
69
+ friend class internal::LivenessBrokerFactory;
70
+ };
71
+
72
+ } // namespace cppgc
73
+
74
+ #endif // INCLUDE_CPPGC_LIVENESS_BROKER_H_
@@ -0,0 +1,26 @@
1
+ // Copyright 2020 the V8 project authors. All rights reserved.
2
+ // Use of this source code is governed by a BSD-style license that can be
3
+ // found in the LICENSE file.
4
+
5
+ #ifndef INCLUDE_CPPGC_MACROS_H_
6
+ #define INCLUDE_CPPGC_MACROS_H_
7
+
8
+ #include <cstddef>
9
+
10
+ #include "cppgc/internal/compiler-specific.h"
11
+
12
+ namespace cppgc {
13
+
14
+ // Use if the object is only stack allocated.
15
+ #define CPPGC_STACK_ALLOCATED() \
16
+ public: \
17
+ using IsStackAllocatedTypeMarker CPPGC_UNUSED = int; \
18
+ \
19
+ private: \
20
+ void* operator new(size_t) = delete; \
21
+ void* operator new(size_t, void*) = delete; \
22
+ static_assert(true, "Force semicolon.")
23
+
24
+ } // namespace cppgc
25
+
26
+ #endif // INCLUDE_CPPGC_MACROS_H_