libv8-node 18.13.0.1-x86_64-linux-musl → 20.2.0.0-x86_64-linux-musl
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/lib/libv8/node/version.rb +3 -3
- data/vendor/v8/include/cppgc/common.h +0 -1
- data/vendor/v8/include/cppgc/cross-thread-persistent.h +11 -10
- data/vendor/v8/include/cppgc/heap-consistency.h +46 -3
- data/vendor/v8/include/cppgc/heap-handle.h +48 -0
- data/vendor/v8/include/cppgc/heap-statistics.h +2 -2
- data/vendor/v8/include/cppgc/heap.h +3 -7
- data/vendor/v8/include/cppgc/internal/api-constants.h +14 -1
- data/vendor/v8/include/cppgc/internal/base-page-handle.h +45 -0
- data/vendor/v8/include/cppgc/internal/caged-heap-local-data.h +40 -8
- data/vendor/v8/include/cppgc/internal/caged-heap.h +61 -0
- data/vendor/v8/include/cppgc/internal/gc-info.h +35 -34
- data/vendor/v8/include/cppgc/internal/member-storage.h +248 -0
- data/vendor/v8/include/cppgc/internal/name-trait.h +21 -6
- data/vendor/v8/include/cppgc/internal/persistent-node.h +11 -13
- data/vendor/v8/include/cppgc/internal/pointer-policies.h +65 -8
- data/vendor/v8/include/cppgc/internal/write-barrier.h +153 -101
- data/vendor/v8/include/cppgc/liveness-broker.h +8 -7
- data/vendor/v8/include/cppgc/macros.h +10 -1
- data/vendor/v8/include/cppgc/member.h +424 -111
- data/vendor/v8/include/cppgc/name-provider.h +4 -4
- data/vendor/v8/include/cppgc/persistent.h +27 -24
- data/vendor/v8/include/cppgc/platform.h +7 -5
- data/vendor/v8/include/cppgc/sentinel-pointer.h +1 -1
- data/vendor/v8/include/cppgc/trace-trait.h +4 -0
- data/vendor/v8/include/cppgc/type-traits.h +13 -3
- data/vendor/v8/include/cppgc/visitor.h +104 -57
- data/vendor/v8/include/libplatform/v8-tracing.h +2 -2
- data/vendor/v8/include/v8-array-buffer.h +59 -0
- data/vendor/v8/include/v8-callbacks.h +32 -5
- data/vendor/v8/include/v8-context.h +63 -11
- data/vendor/v8/include/v8-cppgc.h +22 -0
- data/vendor/v8/include/v8-data.h +1 -1
- data/vendor/v8/include/v8-date.h +5 -0
- data/vendor/v8/include/v8-embedder-heap.h +0 -164
- data/vendor/v8/include/v8-exception.h +1 -1
- data/vendor/v8/include/v8-fast-api-calls.h +49 -31
- data/vendor/v8/include/v8-function-callback.h +69 -42
- data/vendor/v8/include/v8-function.h +9 -0
- data/vendor/v8/include/v8-initialization.h +23 -49
- data/vendor/v8/include/v8-inspector.h +32 -11
- data/vendor/v8/include/v8-internal.h +480 -183
- data/vendor/v8/include/v8-isolate.h +52 -77
- data/vendor/v8/include/v8-local-handle.h +86 -53
- data/vendor/v8/include/v8-locker.h +0 -11
- data/vendor/v8/include/v8-maybe.h +24 -1
- data/vendor/v8/include/v8-message.h +2 -4
- data/vendor/v8/include/v8-metrics.h +48 -40
- data/vendor/v8/include/v8-microtask-queue.h +6 -1
- data/vendor/v8/include/v8-object.h +29 -18
- data/vendor/v8/include/v8-persistent-handle.h +25 -18
- data/vendor/v8/include/v8-platform.h +133 -35
- data/vendor/v8/include/v8-primitive.h +27 -20
- data/vendor/v8/include/v8-profiler.h +133 -53
- data/vendor/v8/include/v8-regexp.h +2 -1
- data/vendor/v8/include/v8-script.h +91 -7
- data/vendor/v8/include/v8-snapshot.h +4 -8
- data/vendor/v8/include/v8-template.h +16 -77
- data/vendor/v8/include/v8-traced-handle.h +22 -28
- data/vendor/v8/include/v8-unwinder-state.h +4 -4
- data/vendor/v8/include/v8-util.h +11 -7
- data/vendor/v8/include/v8-value-serializer.h +46 -23
- data/vendor/v8/include/v8-value.h +31 -4
- data/vendor/v8/include/v8-version.h +4 -4
- data/vendor/v8/include/v8-wasm.h +7 -63
- data/vendor/v8/include/v8-weak-callback-info.h +0 -7
- data/vendor/v8/include/v8config.h +353 -15
- data/vendor/v8/x86_64-linux-musl/libv8/obj/libv8_monolith.a +0 -0
- metadata +5 -1
@@ -8,9 +8,12 @@
|
|
8
8
|
#include <cstddef>
|
9
9
|
#include <cstdint>
|
10
10
|
|
11
|
+
#include "cppgc/heap-handle.h"
|
11
12
|
#include "cppgc/heap-state.h"
|
12
13
|
#include "cppgc/internal/api-constants.h"
|
13
14
|
#include "cppgc/internal/atomic-entry-flag.h"
|
15
|
+
#include "cppgc/internal/base-page-handle.h"
|
16
|
+
#include "cppgc/internal/member-storage.h"
|
14
17
|
#include "cppgc/platform.h"
|
15
18
|
#include "cppgc/sentinel-pointer.h"
|
16
19
|
#include "cppgc/trace-trait.h"
|
@@ -18,6 +21,7 @@
|
|
18
21
|
|
19
22
|
#if defined(CPPGC_CAGED_HEAP)
|
20
23
|
#include "cppgc/internal/caged-heap-local-data.h"
|
24
|
+
#include "cppgc/internal/caged-heap.h"
|
21
25
|
#endif
|
22
26
|
|
23
27
|
namespace cppgc {
|
@@ -40,16 +44,18 @@ class V8_EXPORT WriteBarrier final {
|
|
40
44
|
kGenerational,
|
41
45
|
};
|
42
46
|
|
47
|
+
enum class GenerationalBarrierType : uint8_t {
|
48
|
+
kPreciseSlot,
|
49
|
+
kPreciseUncompressedSlot,
|
50
|
+
kImpreciseSlot,
|
51
|
+
};
|
52
|
+
|
43
53
|
struct Params {
|
44
54
|
HeapHandle* heap = nullptr;
|
45
55
|
#if V8_ENABLE_CHECKS
|
46
56
|
Type type = Type::kNone;
|
47
57
|
#endif // !V8_ENABLE_CHECKS
|
48
58
|
#if defined(CPPGC_CAGED_HEAP)
|
49
|
-
uintptr_t start = 0;
|
50
|
-
CagedHeapLocalData& caged_heap() const {
|
51
|
-
return *reinterpret_cast<CagedHeapLocalData*>(start);
|
52
|
-
}
|
53
59
|
uintptr_t slot_offset = 0;
|
54
60
|
uintptr_t value_offset = 0;
|
55
61
|
#endif // CPPGC_CAGED_HEAP
|
@@ -63,6 +69,10 @@ class V8_EXPORT WriteBarrier final {
|
|
63
69
|
// Returns the required write barrier for a given `slot` and `value`.
|
64
70
|
static V8_INLINE Type GetWriteBarrierType(const void* slot, const void* value,
|
65
71
|
Params& params);
|
72
|
+
// Returns the required write barrier for a given `slot` and `value`.
|
73
|
+
template <typename MemberStorage>
|
74
|
+
static V8_INLINE Type GetWriteBarrierType(const void* slot, MemberStorage,
|
75
|
+
Params& params);
|
66
76
|
// Returns the required write barrier for a given `slot`.
|
67
77
|
template <typename HeapHandleCallback>
|
68
78
|
static V8_INLINE Type GetWriteBarrierType(const void* slot, Params& params,
|
@@ -70,6 +80,15 @@ class V8_EXPORT WriteBarrier final {
|
|
70
80
|
// Returns the required write barrier for a given `value`.
|
71
81
|
static V8_INLINE Type GetWriteBarrierType(const void* value, Params& params);
|
72
82
|
|
83
|
+
#ifdef CPPGC_SLIM_WRITE_BARRIER
|
84
|
+
// A write barrier that combines `GenerationalBarrier()` and
|
85
|
+
// `DijkstraMarkingBarrier()`. We only pass a single parameter here to clobber
|
86
|
+
// as few registers as possible.
|
87
|
+
template <WriteBarrierSlotType>
|
88
|
+
static V8_NOINLINE void V8_PRESERVE_MOST
|
89
|
+
CombinedWriteBarrierSlow(const void* slot);
|
90
|
+
#endif // CPPGC_SLIM_WRITE_BARRIER
|
91
|
+
|
73
92
|
static V8_INLINE void DijkstraMarkingBarrier(const Params& params,
|
74
93
|
const void* object);
|
75
94
|
static V8_INLINE void DijkstraMarkingBarrierRange(
|
@@ -78,15 +97,13 @@ class V8_EXPORT WriteBarrier final {
|
|
78
97
|
static V8_INLINE void SteeleMarkingBarrier(const Params& params,
|
79
98
|
const void* object);
|
80
99
|
#if defined(CPPGC_YOUNG_GENERATION)
|
100
|
+
template <GenerationalBarrierType>
|
81
101
|
static V8_INLINE void GenerationalBarrier(const Params& params,
|
82
102
|
const void* slot);
|
83
|
-
static V8_INLINE void GenerationalBarrierForSourceObject(
|
84
|
-
const Params& params, const void* inner_pointer);
|
85
103
|
#else // !CPPGC_YOUNG_GENERATION
|
104
|
+
template <GenerationalBarrierType>
|
86
105
|
static V8_INLINE void GenerationalBarrier(const Params& params,
|
87
|
-
const void* slot)
|
88
|
-
static V8_INLINE void GenerationalBarrierForSourceObject(
|
89
|
-
const Params& params, const void* inner_pointer) {}
|
106
|
+
const void* slot){}
|
90
107
|
#endif // CPPGC_YOUNG_GENERATION
|
91
108
|
|
92
109
|
#if V8_ENABLE_CHECKS
|
@@ -95,12 +112,10 @@ class V8_EXPORT WriteBarrier final {
|
|
95
112
|
static void CheckParams(Type expected_type, const Params& params) {}
|
96
113
|
#endif // !V8_ENABLE_CHECKS
|
97
114
|
|
98
|
-
// The
|
99
|
-
// |
|
100
|
-
class
|
101
|
-
static bool
|
102
|
-
return incremental_or_concurrent_marking_flag_.MightBeEntered();
|
103
|
-
}
|
115
|
+
// The FlagUpdater class allows cppgc internal to update
|
116
|
+
// |write_barrier_enabled_|.
|
117
|
+
class FlagUpdater;
|
118
|
+
static bool IsEnabled() { return write_barrier_enabled_.MightBeEntered(); }
|
104
119
|
|
105
120
|
private:
|
106
121
|
WriteBarrier() = delete;
|
@@ -125,17 +140,23 @@ class V8_EXPORT WriteBarrier final {
|
|
125
140
|
static CagedHeapLocalData& GetLocalData(HeapHandle&);
|
126
141
|
static void GenerationalBarrierSlow(const CagedHeapLocalData& local_data,
|
127
142
|
const AgeTable& age_table,
|
128
|
-
const void* slot, uintptr_t value_offset
|
143
|
+
const void* slot, uintptr_t value_offset,
|
144
|
+
HeapHandle* heap_handle);
|
145
|
+
static void GenerationalBarrierForUncompressedSlotSlow(
|
146
|
+
const CagedHeapLocalData& local_data, const AgeTable& age_table,
|
147
|
+
const void* slot, uintptr_t value_offset, HeapHandle* heap_handle);
|
129
148
|
static void GenerationalBarrierForSourceObjectSlow(
|
130
|
-
const CagedHeapLocalData& local_data, const void* object
|
149
|
+
const CagedHeapLocalData& local_data, const void* object,
|
150
|
+
HeapHandle* heap_handle);
|
131
151
|
#endif // CPPGC_YOUNG_GENERATION
|
132
152
|
|
133
|
-
static AtomicEntryFlag
|
153
|
+
static AtomicEntryFlag write_barrier_enabled_;
|
134
154
|
};
|
135
155
|
|
136
156
|
template <WriteBarrier::Type type>
|
137
157
|
V8_INLINE WriteBarrier::Type SetAndReturnType(WriteBarrier::Params& params) {
|
138
|
-
if (type == WriteBarrier::Type::kNone)
|
158
|
+
if constexpr (type == WriteBarrier::Type::kNone)
|
159
|
+
return WriteBarrier::Type::kNone;
|
139
160
|
#if V8_ENABLE_CHECKS
|
140
161
|
params.type = type;
|
141
162
|
#endif // !V8_ENABLE_CHECKS
|
@@ -152,6 +173,14 @@ class V8_EXPORT WriteBarrierTypeForCagedHeapPolicy final {
|
|
152
173
|
return ValueModeDispatch<value_mode>::Get(slot, value, params, callback);
|
153
174
|
}
|
154
175
|
|
176
|
+
template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback,
|
177
|
+
typename MemberStorage>
|
178
|
+
static V8_INLINE WriteBarrier::Type Get(const void* slot, MemberStorage value,
|
179
|
+
WriteBarrier::Params& params,
|
180
|
+
HeapHandleCallback callback) {
|
181
|
+
return ValueModeDispatch<value_mode>::Get(slot, value, params, callback);
|
182
|
+
}
|
183
|
+
|
155
184
|
template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback>
|
156
185
|
static V8_INLINE WriteBarrier::Type Get(const void* value,
|
157
186
|
WriteBarrier::Params& params,
|
@@ -166,69 +195,77 @@ class V8_EXPORT WriteBarrierTypeForCagedHeapPolicy final {
|
|
166
195
|
static V8_INLINE WriteBarrier::Type GetNoSlot(const void* value,
|
167
196
|
WriteBarrier::Params& params,
|
168
197
|
HeapHandleCallback) {
|
169
|
-
|
170
|
-
|
171
|
-
|
172
|
-
|
198
|
+
const bool within_cage = CagedHeapBase::IsWithinCage(value);
|
199
|
+
if (!within_cage) return WriteBarrier::Type::kNone;
|
200
|
+
|
201
|
+
// We know that |value| points either within the normal page or to the
|
202
|
+
// beginning of large-page, so extract the page header by bitmasking.
|
203
|
+
BasePageHandle* page =
|
204
|
+
BasePageHandle::FromPayload(const_cast<void*>(value));
|
205
|
+
|
206
|
+
HeapHandle& heap_handle = page->heap_handle();
|
207
|
+
if (V8_UNLIKELY(heap_handle.is_incremental_marking_in_progress())) {
|
173
208
|
return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
|
174
209
|
}
|
210
|
+
|
175
211
|
return SetAndReturnType<WriteBarrier::Type::kNone>(params);
|
176
212
|
}
|
177
213
|
|
178
214
|
template <WriteBarrier::ValueMode value_mode>
|
179
215
|
struct ValueModeDispatch;
|
180
|
-
|
181
|
-
static V8_INLINE bool TryGetCagedHeap(const void* slot, const void* value,
|
182
|
-
WriteBarrier::Params& params) {
|
183
|
-
// TODO(chromium:1056170): Check if the null check can be folded in with
|
184
|
-
// the rest of the write barrier.
|
185
|
-
if (!value) return false;
|
186
|
-
params.start = reinterpret_cast<uintptr_t>(value) &
|
187
|
-
~(api_constants::kCagedHeapReservationAlignment - 1);
|
188
|
-
const uintptr_t slot_offset =
|
189
|
-
reinterpret_cast<uintptr_t>(slot) - params.start;
|
190
|
-
if (slot_offset > api_constants::kCagedHeapReservationSize) {
|
191
|
-
// Check if slot is on stack or value is sentinel or nullptr. This relies
|
192
|
-
// on the fact that kSentinelPointer is encoded as 0x1.
|
193
|
-
return false;
|
194
|
-
}
|
195
|
-
return true;
|
196
|
-
}
|
197
|
-
|
198
|
-
// Returns whether marking is in progress. If marking is not in progress
|
199
|
-
// sets the start of the cage accordingly.
|
200
|
-
//
|
201
|
-
// TODO(chromium:1056170): Create fast path on API.
|
202
|
-
static bool IsMarking(const HeapHandle&, WriteBarrier::Params&);
|
203
216
|
};
|
204
217
|
|
205
218
|
template <>
|
206
219
|
struct WriteBarrierTypeForCagedHeapPolicy::ValueModeDispatch<
|
207
220
|
WriteBarrier::ValueMode::kValuePresent> {
|
221
|
+
template <typename HeapHandleCallback, typename MemberStorage>
|
222
|
+
static V8_INLINE WriteBarrier::Type Get(const void* slot,
|
223
|
+
MemberStorage storage,
|
224
|
+
WriteBarrier::Params& params,
|
225
|
+
HeapHandleCallback) {
|
226
|
+
if (V8_LIKELY(!WriteBarrier::IsEnabled()))
|
227
|
+
return SetAndReturnType<WriteBarrier::Type::kNone>(params);
|
228
|
+
|
229
|
+
return BarrierEnabledGet(slot, storage.Load(), params);
|
230
|
+
}
|
231
|
+
|
208
232
|
template <typename HeapHandleCallback>
|
209
233
|
static V8_INLINE WriteBarrier::Type Get(const void* slot, const void* value,
|
210
234
|
WriteBarrier::Params& params,
|
211
235
|
HeapHandleCallback) {
|
212
|
-
|
213
|
-
if (V8_LIKELY(!WriteBarrier::IsAnyIncrementalOrConcurrentMarking())) {
|
236
|
+
if (V8_LIKELY(!WriteBarrier::IsEnabled()))
|
214
237
|
return SetAndReturnType<WriteBarrier::Type::kNone>(params);
|
215
|
-
|
216
|
-
|
217
|
-
|
218
|
-
|
219
|
-
|
220
|
-
|
221
|
-
|
238
|
+
|
239
|
+
return BarrierEnabledGet(slot, value, params);
|
240
|
+
}
|
241
|
+
|
242
|
+
private:
|
243
|
+
static V8_INLINE WriteBarrier::Type BarrierEnabledGet(
|
244
|
+
const void* slot, const void* value, WriteBarrier::Params& params) {
|
245
|
+
const bool within_cage = CagedHeapBase::AreWithinCage(slot, value);
|
246
|
+
if (!within_cage) return WriteBarrier::Type::kNone;
|
247
|
+
|
248
|
+
// We know that |value| points either within the normal page or to the
|
249
|
+
// beginning of large-page, so extract the page header by bitmasking.
|
250
|
+
BasePageHandle* page =
|
251
|
+
BasePageHandle::FromPayload(const_cast<void*>(value));
|
252
|
+
|
253
|
+
HeapHandle& heap_handle = page->heap_handle();
|
254
|
+
if (V8_LIKELY(!heap_handle.is_incremental_marking_in_progress())) {
|
222
255
|
#if defined(CPPGC_YOUNG_GENERATION)
|
223
|
-
|
224
|
-
|
225
|
-
params.
|
256
|
+
if (!heap_handle.is_young_generation_enabled())
|
257
|
+
return WriteBarrier::Type::kNone;
|
258
|
+
params.heap = &heap_handle;
|
259
|
+
params.slot_offset = CagedHeapBase::OffsetFromAddress(slot);
|
260
|
+
params.value_offset = CagedHeapBase::OffsetFromAddress(value);
|
226
261
|
return SetAndReturnType<WriteBarrier::Type::kGenerational>(params);
|
227
262
|
#else // !CPPGC_YOUNG_GENERATION
|
228
263
|
return SetAndReturnType<WriteBarrier::Type::kNone>(params);
|
229
264
|
#endif // !CPPGC_YOUNG_GENERATION
|
230
265
|
}
|
231
|
-
|
266
|
+
|
267
|
+
// Use marking barrier.
|
268
|
+
params.heap = &heap_handle;
|
232
269
|
return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
|
233
270
|
}
|
234
271
|
};
|
@@ -240,28 +277,28 @@ struct WriteBarrierTypeForCagedHeapPolicy::ValueModeDispatch<
|
|
240
277
|
static V8_INLINE WriteBarrier::Type Get(const void* slot, const void*,
|
241
278
|
WriteBarrier::Params& params,
|
242
279
|
HeapHandleCallback callback) {
|
243
|
-
|
280
|
+
if (V8_LIKELY(!WriteBarrier::IsEnabled()))
|
281
|
+
return SetAndReturnType<WriteBarrier::Type::kNone>(params);
|
282
|
+
|
244
283
|
HeapHandle& handle = callback();
|
245
|
-
|
246
|
-
|
284
|
+
#if defined(CPPGC_YOUNG_GENERATION)
|
285
|
+
if (V8_LIKELY(!handle.is_incremental_marking_in_progress())) {
|
286
|
+
if (!handle.is_young_generation_enabled()) {
|
287
|
+
return WriteBarrier::Type::kNone;
|
288
|
+
}
|
247
289
|
params.heap = &handle;
|
248
|
-
|
249
|
-
|
250
|
-
if (params.slot_offset > api_constants::kCagedHeapReservationSize) {
|
251
|
-
// Check if slot is on stack.
|
290
|
+
// Check if slot is on stack.
|
291
|
+
if (V8_UNLIKELY(!CagedHeapBase::IsWithinCage(slot))) {
|
252
292
|
return SetAndReturnType<WriteBarrier::Type::kNone>(params);
|
253
293
|
}
|
294
|
+
params.slot_offset = CagedHeapBase::OffsetFromAddress(slot);
|
254
295
|
return SetAndReturnType<WriteBarrier::Type::kGenerational>(params);
|
255
296
|
}
|
256
|
-
#else // !CPPGC_YOUNG_GENERATION
|
257
|
-
if (
|
258
|
-
return SetAndReturnType<WriteBarrier::Type::kNone>(params);
|
259
|
-
}
|
260
|
-
HeapHandle& handle = callback();
|
261
|
-
if (V8_UNLIKELY(!subtle::HeapState::IsMarking(handle))) {
|
297
|
+
#else // !defined(CPPGC_YOUNG_GENERATION)
|
298
|
+
if (V8_UNLIKELY(!handle.is_incremental_marking_in_progress())) {
|
262
299
|
return SetAndReturnType<WriteBarrier::Type::kNone>(params);
|
263
300
|
}
|
264
|
-
#endif // !CPPGC_YOUNG_GENERATION
|
301
|
+
#endif // !defined(CPPGC_YOUNG_GENERATION)
|
265
302
|
params.heap = &handle;
|
266
303
|
return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
|
267
304
|
}
|
@@ -278,6 +315,14 @@ class V8_EXPORT WriteBarrierTypeForNonCagedHeapPolicy final {
|
|
278
315
|
return ValueModeDispatch<value_mode>::Get(slot, value, params, callback);
|
279
316
|
}
|
280
317
|
|
318
|
+
template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback>
|
319
|
+
static V8_INLINE WriteBarrier::Type Get(const void* slot, RawPointer value,
|
320
|
+
WriteBarrier::Params& params,
|
321
|
+
HeapHandleCallback callback) {
|
322
|
+
return ValueModeDispatch<value_mode>::Get(slot, value.Load(), params,
|
323
|
+
callback);
|
324
|
+
}
|
325
|
+
|
281
326
|
template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback>
|
282
327
|
static V8_INLINE WriteBarrier::Type Get(const void* value,
|
283
328
|
WriteBarrier::Params& params,
|
@@ -291,11 +336,6 @@ class V8_EXPORT WriteBarrierTypeForNonCagedHeapPolicy final {
|
|
291
336
|
template <WriteBarrier::ValueMode value_mode>
|
292
337
|
struct ValueModeDispatch;
|
293
338
|
|
294
|
-
// TODO(chromium:1056170): Create fast path on API.
|
295
|
-
static bool IsMarking(const void*, HeapHandle**);
|
296
|
-
// TODO(chromium:1056170): Create fast path on API.
|
297
|
-
static bool IsMarking(HeapHandle&);
|
298
|
-
|
299
339
|
WriteBarrierTypeForNonCagedHeapPolicy() = delete;
|
300
340
|
};
|
301
341
|
|
@@ -310,10 +350,16 @@ struct WriteBarrierTypeForNonCagedHeapPolicy::ValueModeDispatch<
|
|
310
350
|
if (object <= static_cast<void*>(kSentinelPointer)) {
|
311
351
|
return SetAndReturnType<WriteBarrier::Type::kNone>(params);
|
312
352
|
}
|
313
|
-
if (V8_LIKELY(!WriteBarrier::
|
353
|
+
if (V8_LIKELY(!WriteBarrier::IsEnabled())) {
|
314
354
|
return SetAndReturnType<WriteBarrier::Type::kNone>(params);
|
315
355
|
}
|
316
|
-
|
356
|
+
// We know that |object| is within the normal page or in the beginning of a
|
357
|
+
// large page, so extract the page header by bitmasking.
|
358
|
+
BasePageHandle* page =
|
359
|
+
BasePageHandle::FromPayload(const_cast<void*>(object));
|
360
|
+
|
361
|
+
HeapHandle& heap_handle = page->heap_handle();
|
362
|
+
if (V8_LIKELY(heap_handle.is_incremental_marking_in_progress())) {
|
317
363
|
return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
|
318
364
|
}
|
319
365
|
return SetAndReturnType<WriteBarrier::Type::kNone>(params);
|
@@ -327,9 +373,9 @@ struct WriteBarrierTypeForNonCagedHeapPolicy::ValueModeDispatch<
|
|
327
373
|
static V8_INLINE WriteBarrier::Type Get(const void*, const void*,
|
328
374
|
WriteBarrier::Params& params,
|
329
375
|
HeapHandleCallback callback) {
|
330
|
-
if (V8_UNLIKELY(WriteBarrier::
|
376
|
+
if (V8_UNLIKELY(WriteBarrier::IsEnabled())) {
|
331
377
|
HeapHandle& handle = callback();
|
332
|
-
if (
|
378
|
+
if (V8_LIKELY(handle.is_incremental_marking_in_progress())) {
|
333
379
|
params.heap = &handle;
|
334
380
|
return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
|
335
381
|
}
|
@@ -345,6 +391,14 @@ WriteBarrier::Type WriteBarrier::GetWriteBarrierType(
|
|
345
391
|
params, []() {});
|
346
392
|
}
|
347
393
|
|
394
|
+
// static
|
395
|
+
template <typename MemberStorage>
|
396
|
+
WriteBarrier::Type WriteBarrier::GetWriteBarrierType(
|
397
|
+
const void* slot, MemberStorage value, WriteBarrier::Params& params) {
|
398
|
+
return WriteBarrierTypePolicy::Get<ValueMode::kValuePresent>(slot, value,
|
399
|
+
params, []() {});
|
400
|
+
}
|
401
|
+
|
348
402
|
// static
|
349
403
|
template <typename HeapHandleCallback>
|
350
404
|
WriteBarrier::Type WriteBarrier::GetWriteBarrierType(
|
@@ -397,34 +451,32 @@ void WriteBarrier::SteeleMarkingBarrier(const Params& params,
|
|
397
451
|
}
|
398
452
|
|
399
453
|
#if defined(CPPGC_YOUNG_GENERATION)
|
400
|
-
// static
|
401
|
-
void WriteBarrier::GenerationalBarrier(const Params& params, const void* slot) {
|
402
|
-
CheckParams(Type::kGenerational, params);
|
403
|
-
|
404
|
-
const CagedHeapLocalData& local_data = params.caged_heap();
|
405
|
-
const AgeTable& age_table = local_data.age_table;
|
406
|
-
|
407
|
-
// Bail out if the slot is in young generation.
|
408
|
-
if (V8_LIKELY(age_table.GetAge(params.slot_offset) == AgeTable::Age::kYoung))
|
409
|
-
return;
|
410
|
-
|
411
|
-
GenerationalBarrierSlow(local_data, age_table, slot, params.value_offset);
|
412
|
-
}
|
413
454
|
|
414
455
|
// static
|
415
|
-
|
416
|
-
|
456
|
+
template <WriteBarrier::GenerationalBarrierType type>
|
457
|
+
void WriteBarrier::GenerationalBarrier(const Params& params, const void* slot) {
|
417
458
|
CheckParams(Type::kGenerational, params);
|
418
459
|
|
419
|
-
const CagedHeapLocalData& local_data =
|
460
|
+
const CagedHeapLocalData& local_data = CagedHeapLocalData::Get();
|
420
461
|
const AgeTable& age_table = local_data.age_table;
|
421
462
|
|
422
|
-
//
|
423
|
-
// in young generation.
|
463
|
+
// Bail out if the slot (precise or imprecise) is in young generation.
|
424
464
|
if (V8_LIKELY(age_table.GetAge(params.slot_offset) == AgeTable::Age::kYoung))
|
425
465
|
return;
|
426
466
|
|
427
|
-
|
467
|
+
// Dispatch between different types of barriers.
|
468
|
+
// TODO(chromium:1029379): Consider reload local_data in the slow path to
|
469
|
+
// reduce register pressure.
|
470
|
+
if constexpr (type == GenerationalBarrierType::kPreciseSlot) {
|
471
|
+
GenerationalBarrierSlow(local_data, age_table, slot, params.value_offset,
|
472
|
+
params.heap);
|
473
|
+
} else if constexpr (type ==
|
474
|
+
GenerationalBarrierType::kPreciseUncompressedSlot) {
|
475
|
+
GenerationalBarrierForUncompressedSlotSlow(
|
476
|
+
local_data, age_table, slot, params.value_offset, params.heap);
|
477
|
+
} else {
|
478
|
+
GenerationalBarrierForSourceObjectSlow(local_data, slot, params.heap);
|
479
|
+
}
|
428
480
|
}
|
429
481
|
|
430
482
|
#endif // !CPPGC_YOUNG_GENERATION
|
@@ -7,6 +7,7 @@
|
|
7
7
|
|
8
8
|
#include "cppgc/heap.h"
|
9
9
|
#include "cppgc/member.h"
|
10
|
+
#include "cppgc/sentinel-pointer.h"
|
10
11
|
#include "cppgc/trace-trait.h"
|
11
12
|
#include "v8config.h" // NOLINT(build/include_directory)
|
12
13
|
|
@@ -44,24 +45,24 @@ class V8_EXPORT LivenessBroker final {
|
|
44
45
|
public:
|
45
46
|
template <typename T>
|
46
47
|
bool IsHeapObjectAlive(const T* object) const {
|
47
|
-
// nullptr objects are considered alive to allow weakness to be used from
|
48
|
+
// - nullptr objects are considered alive to allow weakness to be used from
|
48
49
|
// stack while running into a conservative GC. Treating nullptr as dead
|
49
|
-
// would mean that e.g. custom
|
50
|
-
|
50
|
+
// would mean that e.g. custom collections could not be strongified on
|
51
|
+
// stack.
|
52
|
+
// - Sentinel pointers are also preserved in weakness and not cleared.
|
53
|
+
return !object || object == kSentinelPointer ||
|
51
54
|
IsHeapObjectAliveImpl(
|
52
55
|
TraceTrait<T>::GetTraceDescriptor(object).base_object_payload);
|
53
56
|
}
|
54
57
|
|
55
58
|
template <typename T>
|
56
59
|
bool IsHeapObjectAlive(const WeakMember<T>& weak_member) const {
|
57
|
-
return (weak_member
|
58
|
-
IsHeapObjectAlive<T>(weak_member.Get());
|
60
|
+
return IsHeapObjectAlive<T>(weak_member.Get());
|
59
61
|
}
|
60
62
|
|
61
63
|
template <typename T>
|
62
64
|
bool IsHeapObjectAlive(const UntracedMember<T>& untraced_member) const {
|
63
|
-
return (untraced_member
|
64
|
-
IsHeapObjectAlive<T>(untraced_member.Get());
|
65
|
+
return IsHeapObjectAlive<T>(untraced_member.Get());
|
65
66
|
}
|
66
67
|
|
67
68
|
private:
|
@@ -11,7 +11,10 @@
|
|
11
11
|
|
12
12
|
namespace cppgc {
|
13
13
|
|
14
|
-
// Use if the object is only stack allocated.
|
14
|
+
// Use CPPGC_STACK_ALLOCATED if the object is only stack allocated.
|
15
|
+
// Add the CPPGC_STACK_ALLOCATED_IGNORE annotation on a case-by-case basis when
|
16
|
+
// enforcement of CPPGC_STACK_ALLOCATED should be suppressed.
|
17
|
+
#if defined(__clang__)
|
15
18
|
#define CPPGC_STACK_ALLOCATED() \
|
16
19
|
public: \
|
17
20
|
using IsStackAllocatedTypeMarker CPPGC_UNUSED = int; \
|
@@ -20,6 +23,12 @@ namespace cppgc {
|
|
20
23
|
void* operator new(size_t) = delete; \
|
21
24
|
void* operator new(size_t, void*) = delete; \
|
22
25
|
static_assert(true, "Force semicolon.")
|
26
|
+
#define CPPGC_STACK_ALLOCATED_IGNORE(bug_or_reason) \
|
27
|
+
__attribute__((annotate("stack_allocated_ignore")))
|
28
|
+
#else // !defined(__clang__)
|
29
|
+
#define CPPGC_STACK_ALLOCATED() static_assert(true, "Force semicolon.")
|
30
|
+
#define CPPGC_STACK_ALLOCATED_IGNORE(bug_or_reason)
|
31
|
+
#endif // !defined(__clang__)
|
23
32
|
|
24
33
|
} // namespace cppgc
|
25
34
|
|