aha-libv8-node 16.0.0.0-linux

Sign up to get free protection for your applications and to get access to all the features.
Files changed (67) hide show
  1. checksums.yaml +7 -0
  2. data/ext/libv8-node/.location.yml +1 -0
  3. data/ext/libv8-node/location.rb +76 -0
  4. data/ext/libv8-node/paths.rb +30 -0
  5. data/lib/libv8-node.rb +1 -0
  6. data/lib/libv8/node.rb +11 -0
  7. data/lib/libv8/node/version.rb +7 -0
  8. data/vendor/v8/include/cppgc/allocation.h +229 -0
  9. data/vendor/v8/include/cppgc/common.h +29 -0
  10. data/vendor/v8/include/cppgc/cross-thread-persistent.h +345 -0
  11. data/vendor/v8/include/cppgc/custom-space.h +97 -0
  12. data/vendor/v8/include/cppgc/default-platform.h +75 -0
  13. data/vendor/v8/include/cppgc/ephemeron-pair.h +30 -0
  14. data/vendor/v8/include/cppgc/garbage-collected.h +116 -0
  15. data/vendor/v8/include/cppgc/heap-consistency.h +236 -0
  16. data/vendor/v8/include/cppgc/heap-state.h +59 -0
  17. data/vendor/v8/include/cppgc/heap-statistics.h +110 -0
  18. data/vendor/v8/include/cppgc/heap.h +199 -0
  19. data/vendor/v8/include/cppgc/internal/api-constants.h +47 -0
  20. data/vendor/v8/include/cppgc/internal/atomic-entry-flag.h +48 -0
  21. data/vendor/v8/include/cppgc/internal/caged-heap-local-data.h +68 -0
  22. data/vendor/v8/include/cppgc/internal/compiler-specific.h +38 -0
  23. data/vendor/v8/include/cppgc/internal/finalizer-trait.h +90 -0
  24. data/vendor/v8/include/cppgc/internal/gc-info.h +47 -0
  25. data/vendor/v8/include/cppgc/internal/logging.h +50 -0
  26. data/vendor/v8/include/cppgc/internal/name-trait.h +111 -0
  27. data/vendor/v8/include/cppgc/internal/persistent-node.h +132 -0
  28. data/vendor/v8/include/cppgc/internal/pointer-policies.h +143 -0
  29. data/vendor/v8/include/cppgc/internal/prefinalizer-handler.h +30 -0
  30. data/vendor/v8/include/cppgc/internal/write-barrier.h +390 -0
  31. data/vendor/v8/include/cppgc/liveness-broker.h +74 -0
  32. data/vendor/v8/include/cppgc/macros.h +26 -0
  33. data/vendor/v8/include/cppgc/member.h +271 -0
  34. data/vendor/v8/include/cppgc/name-provider.h +65 -0
  35. data/vendor/v8/include/cppgc/object-size-trait.h +58 -0
  36. data/vendor/v8/include/cppgc/persistent.h +365 -0
  37. data/vendor/v8/include/cppgc/platform.h +151 -0
  38. data/vendor/v8/include/cppgc/prefinalizer.h +52 -0
  39. data/vendor/v8/include/cppgc/process-heap-statistics.h +36 -0
  40. data/vendor/v8/include/cppgc/sentinel-pointer.h +32 -0
  41. data/vendor/v8/include/cppgc/source-location.h +91 -0
  42. data/vendor/v8/include/cppgc/testing.h +50 -0
  43. data/vendor/v8/include/cppgc/trace-trait.h +116 -0
  44. data/vendor/v8/include/cppgc/type-traits.h +228 -0
  45. data/vendor/v8/include/cppgc/visitor.h +340 -0
  46. data/vendor/v8/include/libplatform/libplatform-export.h +29 -0
  47. data/vendor/v8/include/libplatform/libplatform.h +117 -0
  48. data/vendor/v8/include/libplatform/v8-tracing.h +334 -0
  49. data/vendor/v8/include/v8-cppgc.h +278 -0
  50. data/vendor/v8/include/v8-fast-api-calls.h +419 -0
  51. data/vendor/v8/include/v8-inspector-protocol.h +13 -0
  52. data/vendor/v8/include/v8-inspector.h +336 -0
  53. data/vendor/v8/include/v8-internal.h +462 -0
  54. data/vendor/v8/include/v8-metrics.h +189 -0
  55. data/vendor/v8/include/v8-platform.h +710 -0
  56. data/vendor/v8/include/v8-profiler.h +1116 -0
  57. data/vendor/v8/include/v8-unwinder-state.h +30 -0
  58. data/vendor/v8/include/v8-util.h +652 -0
  59. data/vendor/v8/include/v8-value-serializer-version.h +24 -0
  60. data/vendor/v8/include/v8-version-string.h +38 -0
  61. data/vendor/v8/include/v8-version.h +20 -0
  62. data/vendor/v8/include/v8-wasm-trap-handler-posix.h +31 -0
  63. data/vendor/v8/include/v8-wasm-trap-handler-win.h +28 -0
  64. data/vendor/v8/include/v8.h +12479 -0
  65. data/vendor/v8/include/v8config.h +521 -0
  66. data/vendor/v8/out.gn/libv8/obj/libv8_monolith.a +0 -0
  67. metadata +137 -0
@@ -0,0 +1,143 @@
1
+ // Copyright 2020 the V8 project authors. All rights reserved.
2
+ // Use of this source code is governed by a BSD-style license that can be
3
+ // found in the LICENSE file.
4
+
5
+ #ifndef INCLUDE_CPPGC_INTERNAL_POINTER_POLICIES_H_
6
+ #define INCLUDE_CPPGC_INTERNAL_POINTER_POLICIES_H_
7
+
8
+ #include <cstdint>
9
+ #include <type_traits>
10
+
11
+ #include "cppgc/internal/write-barrier.h"
12
+ #include "cppgc/source-location.h"
13
+ #include "v8config.h" // NOLINT(build/include_directory)
14
+
15
+ namespace cppgc {
16
+ namespace internal {
17
+
18
+ class PersistentRegion;
19
+
20
+ // Tags to distinguish between strong and weak member types.
21
+ class StrongMemberTag;
22
+ class WeakMemberTag;
23
+ class UntracedMemberTag;
24
+
25
+ struct DijkstraWriteBarrierPolicy {
26
+ static void InitializingBarrier(const void*, const void*) {
27
+ // Since in initializing writes the source object is always white, having no
28
+ // barrier doesn't break the tri-color invariant.
29
+ }
30
+ static void AssigningBarrier(const void* slot, const void* value) {
31
+ WriteBarrier::Params params;
32
+ switch (WriteBarrier::GetWriteBarrierType(slot, value, params)) {
33
+ case WriteBarrier::Type::kGenerational:
34
+ WriteBarrier::GenerationalBarrier(params, slot);
35
+ break;
36
+ case WriteBarrier::Type::kMarking:
37
+ WriteBarrier::DijkstraMarkingBarrier(params, value);
38
+ break;
39
+ case WriteBarrier::Type::kNone:
40
+ break;
41
+ }
42
+ }
43
+ };
44
+
45
+ struct NoWriteBarrierPolicy {
46
+ static void InitializingBarrier(const void*, const void*) {}
47
+ static void AssigningBarrier(const void*, const void*) {}
48
+ };
49
+
50
+ class V8_EXPORT EnabledCheckingPolicy {
51
+ protected:
52
+ EnabledCheckingPolicy();
53
+ void CheckPointer(const void* ptr);
54
+
55
+ private:
56
+ void* impl_;
57
+ };
58
+
59
+ class DisabledCheckingPolicy {
60
+ protected:
61
+ void CheckPointer(const void* raw) {}
62
+ };
63
+
64
+ #if V8_ENABLE_CHECKS
65
+ using DefaultCheckingPolicy = EnabledCheckingPolicy;
66
+ #else
67
+ using DefaultCheckingPolicy = DisabledCheckingPolicy;
68
+ #endif
69
+
70
+ class KeepLocationPolicy {
71
+ public:
72
+ constexpr const SourceLocation& Location() const { return location_; }
73
+
74
+ protected:
75
+ constexpr KeepLocationPolicy() = default;
76
+ constexpr explicit KeepLocationPolicy(const SourceLocation& location)
77
+ : location_(location) {}
78
+
79
+ // KeepLocationPolicy must not copy underlying source locations.
80
+ KeepLocationPolicy(const KeepLocationPolicy&) = delete;
81
+ KeepLocationPolicy& operator=(const KeepLocationPolicy&) = delete;
82
+
83
+ // Location of the original moved from object should be preserved.
84
+ KeepLocationPolicy(KeepLocationPolicy&&) = default;
85
+ KeepLocationPolicy& operator=(KeepLocationPolicy&&) = default;
86
+
87
+ private:
88
+ SourceLocation location_;
89
+ };
90
+
91
+ class IgnoreLocationPolicy {
92
+ public:
93
+ constexpr SourceLocation Location() const { return {}; }
94
+
95
+ protected:
96
+ constexpr IgnoreLocationPolicy() = default;
97
+ constexpr explicit IgnoreLocationPolicy(const SourceLocation&) {}
98
+ };
99
+
100
+ #if CPPGC_SUPPORTS_OBJECT_NAMES
101
+ using DefaultLocationPolicy = KeepLocationPolicy;
102
+ #else
103
+ using DefaultLocationPolicy = IgnoreLocationPolicy;
104
+ #endif
105
+
106
+ struct StrongPersistentPolicy {
107
+ using IsStrongPersistent = std::true_type;
108
+ static V8_EXPORT PersistentRegion& GetPersistentRegion(const void* object);
109
+ };
110
+
111
+ struct WeakPersistentPolicy {
112
+ using IsStrongPersistent = std::false_type;
113
+ static V8_EXPORT PersistentRegion& GetPersistentRegion(const void* object);
114
+ };
115
+
116
+ struct StrongCrossThreadPersistentPolicy {
117
+ using IsStrongPersistent = std::true_type;
118
+ static V8_EXPORT PersistentRegion& GetPersistentRegion(const void* object);
119
+ };
120
+
121
+ struct WeakCrossThreadPersistentPolicy {
122
+ using IsStrongPersistent = std::false_type;
123
+ static V8_EXPORT PersistentRegion& GetPersistentRegion(const void* object);
124
+ };
125
+
126
+ // Forward declarations setting up the default policies.
127
+ template <typename T, typename WeaknessPolicy,
128
+ typename LocationPolicy = DefaultLocationPolicy,
129
+ typename CheckingPolicy = DisabledCheckingPolicy>
130
+ class BasicCrossThreadPersistent;
131
+ template <typename T, typename WeaknessPolicy,
132
+ typename LocationPolicy = DefaultLocationPolicy,
133
+ typename CheckingPolicy = DefaultCheckingPolicy>
134
+ class BasicPersistent;
135
+ template <typename T, typename WeaknessTag, typename WriteBarrierPolicy,
136
+ typename CheckingPolicy = DefaultCheckingPolicy>
137
+ class BasicMember;
138
+
139
+ } // namespace internal
140
+
141
+ } // namespace cppgc
142
+
143
+ #endif // INCLUDE_CPPGC_INTERNAL_POINTER_POLICIES_H_
@@ -0,0 +1,30 @@
1
+ // Copyright 2020 the V8 project authors. All rights reserved.
2
+ // Use of this source code is governed by a BSD-style license that can be
3
+ // found in the LICENSE file.
4
+
5
+ #ifndef INCLUDE_CPPGC_INTERNAL_PREFINALIZER_HANDLER_H_
6
+ #define INCLUDE_CPPGC_INTERNAL_PREFINALIZER_HANDLER_H_
7
+
8
+ #include "cppgc/heap.h"
9
+ #include "cppgc/liveness-broker.h"
10
+
11
+ namespace cppgc {
12
+ namespace internal {
13
+
14
+ class V8_EXPORT PreFinalizerRegistrationDispatcher final {
15
+ public:
16
+ using PreFinalizerCallback = bool (*)(const LivenessBroker&, void*);
17
+ struct PreFinalizer {
18
+ void* object;
19
+ PreFinalizerCallback callback;
20
+
21
+ bool operator==(const PreFinalizer& other) const;
22
+ };
23
+
24
+ static void RegisterPrefinalizer(PreFinalizer pre_finalizer);
25
+ };
26
+
27
+ } // namespace internal
28
+ } // namespace cppgc
29
+
30
+ #endif // INCLUDE_CPPGC_INTERNAL_PREFINALIZER_HANDLER_H_
@@ -0,0 +1,390 @@
1
+ // Copyright 2020 the V8 project authors. All rights reserved.
2
+ // Use of this source code is governed by a BSD-style license that can be
3
+ // found in the LICENSE file.
4
+
5
+ #ifndef INCLUDE_CPPGC_INTERNAL_WRITE_BARRIER_H_
6
+ #define INCLUDE_CPPGC_INTERNAL_WRITE_BARRIER_H_
7
+
8
+ #include "cppgc/heap-state.h"
9
+ #include "cppgc/internal/api-constants.h"
10
+ #include "cppgc/internal/atomic-entry-flag.h"
11
+ #include "cppgc/sentinel-pointer.h"
12
+ #include "cppgc/trace-trait.h"
13
+ #include "v8config.h" // NOLINT(build/include_directory)
14
+
15
+ #if defined(CPPGC_CAGED_HEAP)
16
+ #include "cppgc/internal/caged-heap-local-data.h"
17
+ #endif
18
+
19
+ namespace cppgc {
20
+
21
+ class HeapHandle;
22
+
23
+ namespace internal {
24
+
25
+ class WriteBarrierTypeForCagedHeapPolicy;
26
+ class WriteBarrierTypeForNonCagedHeapPolicy;
27
+
28
+ class V8_EXPORT WriteBarrier final {
29
+ public:
30
+ enum class Type : uint8_t {
31
+ kNone,
32
+ kMarking,
33
+ kGenerational,
34
+ };
35
+
36
+ struct Params {
37
+ HeapHandle* heap = nullptr;
38
+ #if V8_ENABLE_CHECKS
39
+ Type type = Type::kNone;
40
+ #endif // !V8_ENABLE_CHECKS
41
+ #if defined(CPPGC_CAGED_HEAP)
42
+ uintptr_t start = 0;
43
+ CagedHeapLocalData& caged_heap() const {
44
+ return *reinterpret_cast<CagedHeapLocalData*>(start);
45
+ }
46
+ uintptr_t slot_offset = 0;
47
+ uintptr_t value_offset = 0;
48
+ #endif // CPPGC_CAGED_HEAP
49
+ };
50
+
51
+ enum class ValueMode {
52
+ kValuePresent,
53
+ kNoValuePresent,
54
+ };
55
+
56
+ // Returns the required write barrier for a given `slot` and `value`.
57
+ static V8_INLINE Type GetWriteBarrierType(const void* slot, const void* value,
58
+ Params& params);
59
+ // Returns the required write barrier for a given `slot`.
60
+ template <typename HeapHandleCallback>
61
+ static V8_INLINE Type GetWriteBarrierType(const void* slot, Params& params,
62
+ HeapHandleCallback callback);
63
+
64
+ template <typename HeapHandleCallback>
65
+ static V8_INLINE Type GetWriteBarrierTypeForExternallyReferencedObject(
66
+ const void* value, Params& params, HeapHandleCallback callback);
67
+
68
+ static V8_INLINE void DijkstraMarkingBarrier(const Params& params,
69
+ const void* object);
70
+ static V8_INLINE void DijkstraMarkingBarrierRange(
71
+ const Params& params, const void* first_element, size_t element_size,
72
+ size_t number_of_elements, TraceCallback trace_callback);
73
+ static V8_INLINE void SteeleMarkingBarrier(const Params& params,
74
+ const void* object);
75
+ #if defined(CPPGC_YOUNG_GENERATION)
76
+ static V8_INLINE void GenerationalBarrier(const Params& params,
77
+ const void* slot);
78
+ #else // !CPPGC_YOUNG_GENERATION
79
+ static V8_INLINE void GenerationalBarrier(const Params& params,
80
+ const void* slot) {}
81
+ #endif // CPPGC_YOUNG_GENERATION
82
+
83
+ #if V8_ENABLE_CHECKS
84
+ static void CheckParams(Type expected_type, const Params& params);
85
+ #else // !V8_ENABLE_CHECKS
86
+ static void CheckParams(Type expected_type, const Params& params) {}
87
+ #endif // !V8_ENABLE_CHECKS
88
+
89
+ // The IncrementalOrConcurrentUpdater class allows cppgc internal to update
90
+ // |incremental_or_concurrent_marking_flag_|.
91
+ class IncrementalOrConcurrentMarkingFlagUpdater;
92
+ static bool IsAnyIncrementalOrConcurrentMarking() {
93
+ return incremental_or_concurrent_marking_flag_.MightBeEntered();
94
+ }
95
+
96
+ private:
97
+ WriteBarrier() = delete;
98
+
99
+ #if defined(CPPGC_CAGED_HEAP)
100
+ using WriteBarrierTypePolicy = WriteBarrierTypeForCagedHeapPolicy;
101
+ #else // !CPPGC_CAGED_HEAP
102
+ using WriteBarrierTypePolicy = WriteBarrierTypeForNonCagedHeapPolicy;
103
+ #endif // !CPPGC_CAGED_HEAP
104
+
105
+ static void DijkstraMarkingBarrierSlow(const void* value);
106
+ static void DijkstraMarkingBarrierSlowWithSentinelCheck(const void* value);
107
+ static void DijkstraMarkingBarrierRangeSlow(HeapHandle& heap_handle,
108
+ const void* first_element,
109
+ size_t element_size,
110
+ size_t number_of_elements,
111
+ TraceCallback trace_callback);
112
+ static void SteeleMarkingBarrierSlow(const void* value);
113
+ static void SteeleMarkingBarrierSlowWithSentinelCheck(const void* value);
114
+
115
+ #if defined(CPPGC_YOUNG_GENERATION)
116
+ static CagedHeapLocalData& GetLocalData(HeapHandle&);
117
+ static void GenerationalBarrierSlow(const CagedHeapLocalData& local_data,
118
+ const AgeTable& ageTable,
119
+ const void* slot, uintptr_t value_offset);
120
+ #endif // CPPGC_YOUNG_GENERATION
121
+
122
+ static AtomicEntryFlag incremental_or_concurrent_marking_flag_;
123
+ };
124
+
125
+ template <WriteBarrier::Type type>
126
+ V8_INLINE WriteBarrier::Type SetAndReturnType(WriteBarrier::Params& params) {
127
+ if (type == WriteBarrier::Type::kNone) return WriteBarrier::Type::kNone;
128
+ #if V8_ENABLE_CHECKS
129
+ params.type = type;
130
+ #endif // !V8_ENABLE_CHECKS
131
+ return type;
132
+ }
133
+
134
+ #if defined(CPPGC_CAGED_HEAP)
135
+ class V8_EXPORT WriteBarrierTypeForCagedHeapPolicy final {
136
+ public:
137
+ template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback>
138
+ static V8_INLINE WriteBarrier::Type Get(const void* slot, const void* value,
139
+ WriteBarrier::Params& params,
140
+ HeapHandleCallback callback) {
141
+ return ValueModeDispatch<value_mode>::Get(slot, value, params, callback);
142
+ }
143
+
144
+ template <typename HeapHandleCallback>
145
+ static V8_INLINE WriteBarrier::Type GetForExternallyReferenced(
146
+ const void* value, WriteBarrier::Params& params, HeapHandleCallback) {
147
+ if (!TryGetCagedHeap(value, value, params)) {
148
+ return WriteBarrier::Type::kNone;
149
+ }
150
+ if (V8_UNLIKELY(params.caged_heap().is_incremental_marking_in_progress)) {
151
+ return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
152
+ }
153
+ return SetAndReturnType<WriteBarrier::Type::kNone>(params);
154
+ }
155
+
156
+ private:
157
+ WriteBarrierTypeForCagedHeapPolicy() = delete;
158
+
159
+ template <WriteBarrier::ValueMode value_mode>
160
+ struct ValueModeDispatch;
161
+
162
+ static V8_INLINE bool TryGetCagedHeap(const void* slot, const void* value,
163
+ WriteBarrier::Params& params) {
164
+ params.start = reinterpret_cast<uintptr_t>(value) &
165
+ ~(api_constants::kCagedHeapReservationAlignment - 1);
166
+ const uintptr_t slot_offset =
167
+ reinterpret_cast<uintptr_t>(slot) - params.start;
168
+ if (slot_offset > api_constants::kCagedHeapReservationSize) {
169
+ // Check if slot is on stack or value is sentinel or nullptr. This relies
170
+ // on the fact that kSentinelPointer is encoded as 0x1.
171
+ return false;
172
+ }
173
+ return true;
174
+ }
175
+
176
+ // Returns whether marking is in progress. If marking is not in progress
177
+ // sets the start of the cage accordingly.
178
+ //
179
+ // TODO(chromium:1056170): Create fast path on API.
180
+ static bool IsMarking(const HeapHandle&, WriteBarrier::Params&);
181
+ };
182
+
183
+ template <>
184
+ struct WriteBarrierTypeForCagedHeapPolicy::ValueModeDispatch<
185
+ WriteBarrier::ValueMode::kValuePresent> {
186
+ template <typename HeapHandleCallback>
187
+ static V8_INLINE WriteBarrier::Type Get(const void* slot, const void* value,
188
+ WriteBarrier::Params& params,
189
+ HeapHandleCallback) {
190
+ bool within_cage = TryGetCagedHeap(slot, value, params);
191
+ if (!within_cage) {
192
+ return WriteBarrier::Type::kNone;
193
+ }
194
+ if (V8_LIKELY(!params.caged_heap().is_incremental_marking_in_progress)) {
195
+ #if defined(CPPGC_YOUNG_GENERATION)
196
+ params.heap = reinterpret_cast<HeapHandle*>(params.start);
197
+ params.slot_offset = reinterpret_cast<uintptr_t>(slot) - params.start;
198
+ params.value_offset = reinterpret_cast<uintptr_t>(value) - params.start;
199
+ return SetAndReturnType<WriteBarrier::Type::kGenerational>(params);
200
+ #else // !CPPGC_YOUNG_GENERATION
201
+ return SetAndReturnType<WriteBarrier::Type::kNone>(params);
202
+ #endif // !CPPGC_YOUNG_GENERATION
203
+ }
204
+ params.heap = reinterpret_cast<HeapHandle*>(params.start);
205
+ return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
206
+ }
207
+ };
208
+
209
+ template <>
210
+ struct WriteBarrierTypeForCagedHeapPolicy::ValueModeDispatch<
211
+ WriteBarrier::ValueMode::kNoValuePresent> {
212
+ template <typename HeapHandleCallback>
213
+ static V8_INLINE WriteBarrier::Type Get(const void* slot, const void*,
214
+ WriteBarrier::Params& params,
215
+ HeapHandleCallback callback) {
216
+ #if defined(CPPGC_YOUNG_GENERATION)
217
+ HeapHandle& handle = callback();
218
+ if (V8_LIKELY(!IsMarking(handle, params))) {
219
+ // params.start is populated by IsMarking().
220
+ params.heap = &handle;
221
+ params.slot_offset = reinterpret_cast<uintptr_t>(slot) - params.start;
222
+ // params.value_offset stays 0.
223
+ if (params.slot_offset > api_constants::kCagedHeapReservationSize) {
224
+ // Check if slot is on stack.
225
+ return SetAndReturnType<WriteBarrier::Type::kNone>(params);
226
+ }
227
+ return SetAndReturnType<WriteBarrier::Type::kGenerational>(params);
228
+ }
229
+ #else // !CPPGC_YOUNG_GENERATION
230
+ if (V8_LIKELY(!WriteBarrier::IsAnyIncrementalOrConcurrentMarking())) {
231
+ return SetAndReturnType<WriteBarrier::Type::kNone>(params);
232
+ }
233
+ HeapHandle& handle = callback();
234
+ if (V8_UNLIKELY(!subtle::HeapState::IsMarking(handle))) {
235
+ return SetAndReturnType<WriteBarrier::Type::kNone>(params);
236
+ }
237
+ #endif // !CPPGC_YOUNG_GENERATION
238
+ params.heap = &handle;
239
+ return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
240
+ }
241
+ };
242
+
243
+ #endif // CPPGC_CAGED_HEAP
244
+
245
+ class V8_EXPORT WriteBarrierTypeForNonCagedHeapPolicy final {
246
+ public:
247
+ template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback>
248
+ static V8_INLINE WriteBarrier::Type Get(const void* slot, const void* value,
249
+ WriteBarrier::Params& params,
250
+ HeapHandleCallback callback) {
251
+ return ValueModeDispatch<value_mode>::Get(slot, value, params, callback);
252
+ }
253
+
254
+ template <typename HeapHandleCallback>
255
+ static V8_INLINE WriteBarrier::Type GetForExternallyReferenced(
256
+ const void* value, WriteBarrier::Params& params,
257
+ HeapHandleCallback callback) {
258
+ // The slot will never be used in `Get()` below.
259
+ return Get<WriteBarrier::ValueMode::kValuePresent>(nullptr, value, params,
260
+ callback);
261
+ }
262
+
263
+ private:
264
+ template <WriteBarrier::ValueMode value_mode>
265
+ struct ValueModeDispatch;
266
+
267
+ // TODO(chromium:1056170): Create fast path on API.
268
+ static bool IsMarking(const void*, HeapHandle**);
269
+ // TODO(chromium:1056170): Create fast path on API.
270
+ static bool IsMarking(HeapHandle&);
271
+
272
+ WriteBarrierTypeForNonCagedHeapPolicy() = delete;
273
+ };
274
+
275
+ template <>
276
+ struct WriteBarrierTypeForNonCagedHeapPolicy::ValueModeDispatch<
277
+ WriteBarrier::ValueMode::kValuePresent> {
278
+ template <typename HeapHandleCallback>
279
+ static V8_INLINE WriteBarrier::Type Get(const void*, const void* object,
280
+ WriteBarrier::Params& params,
281
+ HeapHandleCallback callback) {
282
+ // The following check covers nullptr as well as sentinel pointer.
283
+ if (object <= static_cast<void*>(kSentinelPointer)) {
284
+ return WriteBarrier::Type::kNone;
285
+ }
286
+ if (IsMarking(object, &params.heap)) {
287
+ return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
288
+ }
289
+ return SetAndReturnType<WriteBarrier::Type::kNone>(params);
290
+ }
291
+ };
292
+
293
+ template <>
294
+ struct WriteBarrierTypeForNonCagedHeapPolicy::ValueModeDispatch<
295
+ WriteBarrier::ValueMode::kNoValuePresent> {
296
+ template <typename HeapHandleCallback>
297
+ static V8_INLINE WriteBarrier::Type Get(const void*, const void*,
298
+ WriteBarrier::Params& params,
299
+ HeapHandleCallback callback) {
300
+ if (V8_UNLIKELY(WriteBarrier::IsAnyIncrementalOrConcurrentMarking())) {
301
+ HeapHandle& handle = callback();
302
+ if (IsMarking(handle)) {
303
+ params.heap = &handle;
304
+ return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
305
+ }
306
+ }
307
+ return WriteBarrier::Type::kNone;
308
+ }
309
+ };
310
+
311
+ // static
312
+ WriteBarrier::Type WriteBarrier::GetWriteBarrierType(
313
+ const void* slot, const void* value, WriteBarrier::Params& params) {
314
+ return WriteBarrierTypePolicy::Get<ValueMode::kValuePresent>(slot, value,
315
+ params, []() {});
316
+ }
317
+
318
+ // static
319
+ template <typename HeapHandleCallback>
320
+ WriteBarrier::Type WriteBarrier::GetWriteBarrierType(
321
+ const void* slot, WriteBarrier::Params& params,
322
+ HeapHandleCallback callback) {
323
+ return WriteBarrierTypePolicy::Get<ValueMode::kNoValuePresent>(
324
+ slot, nullptr, params, callback);
325
+ }
326
+
327
+ // static
328
+ template <typename HeapHandleCallback>
329
+ WriteBarrier::Type
330
+ WriteBarrier::GetWriteBarrierTypeForExternallyReferencedObject(
331
+ const void* value, Params& params, HeapHandleCallback callback) {
332
+ return WriteBarrierTypePolicy::GetForExternallyReferenced(value, params,
333
+ callback);
334
+ }
335
+
336
+ // static
337
+ void WriteBarrier::DijkstraMarkingBarrier(const Params& params,
338
+ const void* object) {
339
+ CheckParams(Type::kMarking, params);
340
+ #if defined(CPPGC_CAGED_HEAP)
341
+ // Caged heap already filters out sentinels.
342
+ DijkstraMarkingBarrierSlow(object);
343
+ #else // !CPPGC_CAGED_HEAP
344
+ DijkstraMarkingBarrierSlowWithSentinelCheck(object);
345
+ #endif // !CPPGC_CAGED_HEAP
346
+ }
347
+
348
+ // static
349
+ void WriteBarrier::DijkstraMarkingBarrierRange(const Params& params,
350
+ const void* first_element,
351
+ size_t element_size,
352
+ size_t number_of_elements,
353
+ TraceCallback trace_callback) {
354
+ CheckParams(Type::kMarking, params);
355
+ DijkstraMarkingBarrierRangeSlow(*params.heap, first_element, element_size,
356
+ number_of_elements, trace_callback);
357
+ }
358
+
359
+ // static
360
+ void WriteBarrier::SteeleMarkingBarrier(const Params& params,
361
+ const void* object) {
362
+ CheckParams(Type::kMarking, params);
363
+ #if defined(CPPGC_CAGED_HEAP)
364
+ // Caged heap already filters out sentinels.
365
+ SteeleMarkingBarrierSlow(object);
366
+ #else // !CPPGC_CAGED_HEAP
367
+ SteeleMarkingBarrierSlowWithSentinelCheck(object);
368
+ #endif // !CPPGC_CAGED_HEAP
369
+ }
370
+
371
+ #if defined(CPPGC_YOUNG_GENERATION)
372
+ // static
373
+ void WriteBarrier::GenerationalBarrier(const Params& params, const void* slot) {
374
+ CheckParams(Type::kGenerational, params);
375
+
376
+ const CagedHeapLocalData& local_data = params.caged_heap();
377
+ const AgeTable& age_table = local_data.age_table;
378
+
379
+ // Bail out if the slot is in young generation.
380
+ if (V8_LIKELY(age_table[params.slot_offset] == AgeTable::Age::kYoung)) return;
381
+
382
+ GenerationalBarrierSlow(local_data, age_table, slot, params.value_offset);
383
+ }
384
+
385
+ #endif // !CPPGC_YOUNG_GENERATION
386
+
387
+ } // namespace internal
388
+ } // namespace cppgc
389
+
390
+ #endif // INCLUDE_CPPGC_INTERNAL_WRITE_BARRIER_H_