libv8-node 18.13.0.0-arm64-darwin → 19.9.0.0-arm64-darwin
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/ext/libv8-node/location.rb +1 -1
- data/lib/libv8/node/version.rb +3 -3
- data/vendor/v8/arm64-darwin/libv8/obj/libv8_monolith.a +0 -0
- data/vendor/v8/include/cppgc/common.h +0 -1
- data/vendor/v8/include/cppgc/cross-thread-persistent.h +7 -8
- data/vendor/v8/include/cppgc/heap-consistency.h +46 -3
- data/vendor/v8/include/cppgc/heap-handle.h +43 -0
- data/vendor/v8/include/cppgc/heap-statistics.h +2 -2
- data/vendor/v8/include/cppgc/heap.h +3 -7
- data/vendor/v8/include/cppgc/internal/api-constants.h +11 -1
- data/vendor/v8/include/cppgc/internal/base-page-handle.h +45 -0
- data/vendor/v8/include/cppgc/internal/caged-heap-local-data.h +40 -8
- data/vendor/v8/include/cppgc/internal/caged-heap.h +61 -0
- data/vendor/v8/include/cppgc/internal/gc-info.h +0 -1
- data/vendor/v8/include/cppgc/internal/member-storage.h +236 -0
- data/vendor/v8/include/cppgc/internal/name-trait.h +21 -6
- data/vendor/v8/include/cppgc/internal/persistent-node.h +11 -13
- data/vendor/v8/include/cppgc/internal/pointer-policies.h +28 -7
- data/vendor/v8/include/cppgc/internal/write-barrier.h +143 -101
- data/vendor/v8/include/cppgc/liveness-broker.h +8 -7
- data/vendor/v8/include/cppgc/member.h +364 -89
- data/vendor/v8/include/cppgc/name-provider.h +4 -4
- data/vendor/v8/include/cppgc/persistent.h +5 -9
- data/vendor/v8/include/cppgc/platform.h +2 -2
- data/vendor/v8/include/cppgc/sentinel-pointer.h +1 -1
- data/vendor/v8/include/cppgc/trace-trait.h +4 -0
- data/vendor/v8/include/cppgc/type-traits.h +9 -0
- data/vendor/v8/include/cppgc/visitor.h +89 -57
- data/vendor/v8/include/v8-callbacks.h +19 -5
- data/vendor/v8/include/v8-context.h +13 -8
- data/vendor/v8/include/v8-cppgc.h +12 -0
- data/vendor/v8/include/v8-date.h +5 -0
- data/vendor/v8/include/v8-embedder-heap.h +8 -3
- data/vendor/v8/include/v8-exception.h +1 -1
- data/vendor/v8/include/v8-fast-api-calls.h +46 -32
- data/vendor/v8/include/v8-function.h +8 -0
- data/vendor/v8/include/v8-initialization.h +23 -49
- data/vendor/v8/include/v8-inspector.h +13 -7
- data/vendor/v8/include/v8-internal.h +328 -123
- data/vendor/v8/include/v8-isolate.h +27 -42
- data/vendor/v8/include/v8-local-handle.h +5 -5
- data/vendor/v8/include/v8-locker.h +0 -11
- data/vendor/v8/include/v8-maybe.h +24 -1
- data/vendor/v8/include/v8-message.h +2 -4
- data/vendor/v8/include/v8-metrics.h +20 -38
- data/vendor/v8/include/v8-microtask-queue.h +1 -1
- data/vendor/v8/include/v8-object.h +8 -15
- data/vendor/v8/include/v8-persistent-handle.h +0 -2
- data/vendor/v8/include/v8-platform.h +54 -25
- data/vendor/v8/include/v8-primitive.h +8 -8
- data/vendor/v8/include/v8-profiler.h +84 -22
- data/vendor/v8/include/v8-regexp.h +2 -1
- data/vendor/v8/include/v8-script.h +62 -6
- data/vendor/v8/include/v8-template.h +13 -76
- data/vendor/v8/include/v8-unwinder-state.h +4 -4
- data/vendor/v8/include/v8-util.h +2 -4
- data/vendor/v8/include/v8-value-serializer.h +46 -23
- data/vendor/v8/include/v8-version.h +3 -3
- data/vendor/v8/include/v8-wasm.h +5 -62
- data/vendor/v8/include/v8-weak-callback-info.h +0 -7
- data/vendor/v8/include/v8config.h +280 -13
- metadata +8 -4
@@ -0,0 +1,236 @@
|
|
1
|
+
// Copyright 2022 the V8 project authors. All rights reserved.
|
2
|
+
// Use of this source code is governed by a BSD-style license that can be
|
3
|
+
// found in the LICENSE file.
|
4
|
+
|
5
|
+
#ifndef INCLUDE_CPPGC_INTERNAL_MEMBER_STORAGE_H_
|
6
|
+
#define INCLUDE_CPPGC_INTERNAL_MEMBER_STORAGE_H_
|
7
|
+
|
8
|
+
#include <atomic>
|
9
|
+
#include <cstddef>
|
10
|
+
#include <type_traits>
|
11
|
+
|
12
|
+
#include "cppgc/internal/api-constants.h"
|
13
|
+
#include "cppgc/internal/logging.h"
|
14
|
+
#include "cppgc/sentinel-pointer.h"
|
15
|
+
#include "v8config.h" // NOLINT(build/include_directory)
|
16
|
+
|
17
|
+
namespace cppgc {
|
18
|
+
namespace internal {
|
19
|
+
|
20
|
+
#if defined(CPPGC_POINTER_COMPRESSION)
|
21
|
+
|
22
|
+
#if defined(__clang__)
|
23
|
+
// Attribute const allows the compiler to assume that CageBaseGlobal::g_base_
|
24
|
+
// doesn't change (e.g. across calls) and thereby avoid redundant loads.
|
25
|
+
#define CPPGC_CONST __attribute__((const))
|
26
|
+
#define CPPGC_REQUIRE_CONSTANT_INIT \
|
27
|
+
__attribute__((require_constant_initialization))
|
28
|
+
#else // defined(__clang__)
|
29
|
+
#define CPPGC_CONST
|
30
|
+
#define CPPGC_REQUIRE_CONSTANT_INIT
|
31
|
+
#endif // defined(__clang__)
|
32
|
+
|
33
|
+
class CageBaseGlobal final {
|
34
|
+
public:
|
35
|
+
V8_INLINE CPPGC_CONST static uintptr_t Get() {
|
36
|
+
CPPGC_DCHECK(IsBaseConsistent());
|
37
|
+
return g_base_;
|
38
|
+
}
|
39
|
+
|
40
|
+
V8_INLINE CPPGC_CONST static bool IsSet() {
|
41
|
+
CPPGC_DCHECK(IsBaseConsistent());
|
42
|
+
return (g_base_ & ~kLowerHalfWordMask) != 0;
|
43
|
+
}
|
44
|
+
|
45
|
+
private:
|
46
|
+
// We keep the lower halfword as ones to speed up decompression.
|
47
|
+
static constexpr uintptr_t kLowerHalfWordMask =
|
48
|
+
(api_constants::kCagedHeapReservationAlignment - 1);
|
49
|
+
|
50
|
+
static V8_EXPORT uintptr_t g_base_ CPPGC_REQUIRE_CONSTANT_INIT;
|
51
|
+
|
52
|
+
CageBaseGlobal() = delete;
|
53
|
+
|
54
|
+
V8_INLINE static bool IsBaseConsistent() {
|
55
|
+
return kLowerHalfWordMask == (g_base_ & kLowerHalfWordMask);
|
56
|
+
}
|
57
|
+
|
58
|
+
friend class CageBaseGlobalUpdater;
|
59
|
+
};
|
60
|
+
|
61
|
+
#undef CPPGC_REQUIRE_CONSTANT_INIT
|
62
|
+
#undef CPPGC_CONST
|
63
|
+
|
64
|
+
class V8_TRIVIAL_ABI CompressedPointer final {
|
65
|
+
public:
|
66
|
+
using IntegralType = uint32_t;
|
67
|
+
|
68
|
+
V8_INLINE CompressedPointer() : value_(0u) {}
|
69
|
+
V8_INLINE explicit CompressedPointer(const void* ptr)
|
70
|
+
: value_(Compress(ptr)) {}
|
71
|
+
V8_INLINE explicit CompressedPointer(std::nullptr_t) : value_(0u) {}
|
72
|
+
V8_INLINE explicit CompressedPointer(SentinelPointer)
|
73
|
+
: value_(kCompressedSentinel) {}
|
74
|
+
|
75
|
+
V8_INLINE const void* Load() const { return Decompress(value_); }
|
76
|
+
V8_INLINE const void* LoadAtomic() const {
|
77
|
+
return Decompress(
|
78
|
+
reinterpret_cast<const std::atomic<IntegralType>&>(value_).load(
|
79
|
+
std::memory_order_relaxed));
|
80
|
+
}
|
81
|
+
|
82
|
+
V8_INLINE void Store(const void* ptr) { value_ = Compress(ptr); }
|
83
|
+
V8_INLINE void StoreAtomic(const void* value) {
|
84
|
+
reinterpret_cast<std::atomic<IntegralType>&>(value_).store(
|
85
|
+
Compress(value), std::memory_order_relaxed);
|
86
|
+
}
|
87
|
+
|
88
|
+
V8_INLINE void Clear() { value_ = 0u; }
|
89
|
+
V8_INLINE bool IsCleared() const { return !value_; }
|
90
|
+
|
91
|
+
V8_INLINE bool IsSentinel() const { return value_ == kCompressedSentinel; }
|
92
|
+
|
93
|
+
V8_INLINE uint32_t GetAsInteger() const { return value_; }
|
94
|
+
|
95
|
+
V8_INLINE friend bool operator==(CompressedPointer a, CompressedPointer b) {
|
96
|
+
return a.value_ == b.value_;
|
97
|
+
}
|
98
|
+
V8_INLINE friend bool operator!=(CompressedPointer a, CompressedPointer b) {
|
99
|
+
return a.value_ != b.value_;
|
100
|
+
}
|
101
|
+
V8_INLINE friend bool operator<(CompressedPointer a, CompressedPointer b) {
|
102
|
+
return a.value_ < b.value_;
|
103
|
+
}
|
104
|
+
V8_INLINE friend bool operator<=(CompressedPointer a, CompressedPointer b) {
|
105
|
+
return a.value_ <= b.value_;
|
106
|
+
}
|
107
|
+
V8_INLINE friend bool operator>(CompressedPointer a, CompressedPointer b) {
|
108
|
+
return a.value_ > b.value_;
|
109
|
+
}
|
110
|
+
V8_INLINE friend bool operator>=(CompressedPointer a, CompressedPointer b) {
|
111
|
+
return a.value_ >= b.value_;
|
112
|
+
}
|
113
|
+
|
114
|
+
static V8_INLINE IntegralType Compress(const void* ptr) {
|
115
|
+
static_assert(
|
116
|
+
SentinelPointer::kSentinelValue == 0b10,
|
117
|
+
"The compression scheme relies on the sentinel encoded as 0b10");
|
118
|
+
static constexpr size_t kGigaCageMask =
|
119
|
+
~(api_constants::kCagedHeapReservationAlignment - 1);
|
120
|
+
|
121
|
+
CPPGC_DCHECK(CageBaseGlobal::IsSet());
|
122
|
+
const uintptr_t base = CageBaseGlobal::Get();
|
123
|
+
CPPGC_DCHECK(!ptr || ptr == kSentinelPointer ||
|
124
|
+
(base & kGigaCageMask) ==
|
125
|
+
(reinterpret_cast<uintptr_t>(ptr) & kGigaCageMask));
|
126
|
+
|
127
|
+
#if defined(CPPGC_2GB_CAGE)
|
128
|
+
// Truncate the pointer.
|
129
|
+
auto compressed =
|
130
|
+
static_cast<IntegralType>(reinterpret_cast<uintptr_t>(ptr));
|
131
|
+
#else // !defined(CPPGC_2GB_CAGE)
|
132
|
+
const auto uptr = reinterpret_cast<uintptr_t>(ptr);
|
133
|
+
// Shift the pointer by one and truncate.
|
134
|
+
auto compressed = static_cast<IntegralType>(uptr >> 1);
|
135
|
+
#endif // !defined(CPPGC_2GB_CAGE)
|
136
|
+
// Normal compressed pointers must have the MSB set.
|
137
|
+
CPPGC_DCHECK((!compressed || compressed == kCompressedSentinel) ||
|
138
|
+
(compressed & (1 << 31)));
|
139
|
+
return compressed;
|
140
|
+
}
|
141
|
+
|
142
|
+
static V8_INLINE void* Decompress(IntegralType ptr) {
|
143
|
+
CPPGC_DCHECK(CageBaseGlobal::IsSet());
|
144
|
+
const uintptr_t base = CageBaseGlobal::Get();
|
145
|
+
// Treat compressed pointer as signed and cast it to uint64_t, which will
|
146
|
+
// sign-extend it.
|
147
|
+
#if defined(CPPGC_2GB_CAGE)
|
148
|
+
const uint64_t mask = static_cast<uint64_t>(static_cast<int32_t>(ptr));
|
149
|
+
#else // !defined(CPPGC_2GB_CAGE)
|
150
|
+
// Then, shift the result by one. It's important to shift the unsigned
|
151
|
+
// value, as otherwise it would result in undefined behavior.
|
152
|
+
const uint64_t mask = static_cast<uint64_t>(static_cast<int32_t>(ptr)) << 1;
|
153
|
+
#endif // !defined(CPPGC_2GB_CAGE)
|
154
|
+
return reinterpret_cast<void*>(mask & base);
|
155
|
+
}
|
156
|
+
|
157
|
+
private:
|
158
|
+
#if defined(CPPGC_2GB_CAGE)
|
159
|
+
static constexpr IntegralType kCompressedSentinel =
|
160
|
+
SentinelPointer::kSentinelValue;
|
161
|
+
#else // !defined(CPPGC_2GB_CAGE)
|
162
|
+
static constexpr IntegralType kCompressedSentinel =
|
163
|
+
SentinelPointer::kSentinelValue >> 1;
|
164
|
+
#endif // !defined(CPPGC_2GB_CAGE)
|
165
|
+
// All constructors initialize `value_`. Do not add a default value here as it
|
166
|
+
// results in a non-atomic write on some builds, even when the atomic version
|
167
|
+
// of the constructor is used.
|
168
|
+
IntegralType value_;
|
169
|
+
};
|
170
|
+
|
171
|
+
#endif // defined(CPPGC_POINTER_COMPRESSION)
|
172
|
+
|
173
|
+
class V8_TRIVIAL_ABI RawPointer final {
|
174
|
+
public:
|
175
|
+
using IntegralType = uintptr_t;
|
176
|
+
|
177
|
+
V8_INLINE RawPointer() : ptr_(nullptr) {}
|
178
|
+
V8_INLINE explicit RawPointer(const void* ptr) : ptr_(ptr) {}
|
179
|
+
|
180
|
+
V8_INLINE const void* Load() const { return ptr_; }
|
181
|
+
V8_INLINE const void* LoadAtomic() const {
|
182
|
+
return reinterpret_cast<const std::atomic<const void*>&>(ptr_).load(
|
183
|
+
std::memory_order_relaxed);
|
184
|
+
}
|
185
|
+
|
186
|
+
V8_INLINE void Store(const void* ptr) { ptr_ = ptr; }
|
187
|
+
V8_INLINE void StoreAtomic(const void* ptr) {
|
188
|
+
reinterpret_cast<std::atomic<const void*>&>(ptr_).store(
|
189
|
+
ptr, std::memory_order_relaxed);
|
190
|
+
}
|
191
|
+
|
192
|
+
V8_INLINE void Clear() { ptr_ = nullptr; }
|
193
|
+
V8_INLINE bool IsCleared() const { return !ptr_; }
|
194
|
+
|
195
|
+
V8_INLINE bool IsSentinel() const { return ptr_ == kSentinelPointer; }
|
196
|
+
|
197
|
+
V8_INLINE uintptr_t GetAsInteger() const {
|
198
|
+
return reinterpret_cast<uintptr_t>(ptr_);
|
199
|
+
}
|
200
|
+
|
201
|
+
V8_INLINE friend bool operator==(RawPointer a, RawPointer b) {
|
202
|
+
return a.ptr_ == b.ptr_;
|
203
|
+
}
|
204
|
+
V8_INLINE friend bool operator!=(RawPointer a, RawPointer b) {
|
205
|
+
return a.ptr_ != b.ptr_;
|
206
|
+
}
|
207
|
+
V8_INLINE friend bool operator<(RawPointer a, RawPointer b) {
|
208
|
+
return a.ptr_ < b.ptr_;
|
209
|
+
}
|
210
|
+
V8_INLINE friend bool operator<=(RawPointer a, RawPointer b) {
|
211
|
+
return a.ptr_ <= b.ptr_;
|
212
|
+
}
|
213
|
+
V8_INLINE friend bool operator>(RawPointer a, RawPointer b) {
|
214
|
+
return a.ptr_ > b.ptr_;
|
215
|
+
}
|
216
|
+
V8_INLINE friend bool operator>=(RawPointer a, RawPointer b) {
|
217
|
+
return a.ptr_ >= b.ptr_;
|
218
|
+
}
|
219
|
+
|
220
|
+
private:
|
221
|
+
// All constructors initialize `ptr_`. Do not add a default value here as it
|
222
|
+
// results in a non-atomic write on some builds, even when the atomic version
|
223
|
+
// of the constructor is used.
|
224
|
+
const void* ptr_;
|
225
|
+
};
|
226
|
+
|
227
|
+
#if defined(CPPGC_POINTER_COMPRESSION)
|
228
|
+
using MemberStorage = CompressedPointer;
|
229
|
+
#else // !defined(CPPGC_POINTER_COMPRESSION)
|
230
|
+
using MemberStorage = RawPointer;
|
231
|
+
#endif // !defined(CPPGC_POINTER_COMPRESSION)
|
232
|
+
|
233
|
+
} // namespace internal
|
234
|
+
} // namespace cppgc
|
235
|
+
|
236
|
+
#endif // INCLUDE_CPPGC_INTERNAL_MEMBER_STORAGE_H_
|
@@ -6,6 +6,7 @@
|
|
6
6
|
#define INCLUDE_CPPGC_INTERNAL_NAME_TRAIT_H_
|
7
7
|
|
8
8
|
#include <cstddef>
|
9
|
+
#include <cstdint>
|
9
10
|
#include <type_traits>
|
10
11
|
|
11
12
|
#include "cppgc/name-provider.h"
|
@@ -58,6 +59,11 @@ struct HeapObjectName {
|
|
58
59
|
bool name_was_hidden;
|
59
60
|
};
|
60
61
|
|
62
|
+
enum class HeapObjectNameForUnnamedObject : uint8_t {
|
63
|
+
kUseClassNameIfSupported,
|
64
|
+
kUseHiddenName,
|
65
|
+
};
|
66
|
+
|
61
67
|
class V8_EXPORT NameTraitBase {
|
62
68
|
protected:
|
63
69
|
static HeapObjectName GetNameFromTypeSignature(const char*);
|
@@ -78,16 +84,24 @@ class NameTrait final : public NameTraitBase {
|
|
78
84
|
#endif // !CPPGC_SUPPORTS_OBJECT_NAMES
|
79
85
|
}
|
80
86
|
|
81
|
-
static HeapObjectName GetName(
|
82
|
-
|
87
|
+
static HeapObjectName GetName(
|
88
|
+
const void* obj, HeapObjectNameForUnnamedObject name_retrieval_mode) {
|
89
|
+
return GetNameFor(static_cast<const T*>(obj), name_retrieval_mode);
|
83
90
|
}
|
84
91
|
|
85
92
|
private:
|
86
|
-
static HeapObjectName GetNameFor(const NameProvider* name_provider
|
93
|
+
static HeapObjectName GetNameFor(const NameProvider* name_provider,
|
94
|
+
HeapObjectNameForUnnamedObject) {
|
95
|
+
// Objects inheriting from `NameProvider` are not considered unnamed as
|
96
|
+
// users already provided a name for them.
|
87
97
|
return {name_provider->GetHumanReadableName(), false};
|
88
98
|
}
|
89
99
|
|
90
|
-
static HeapObjectName GetNameFor(
|
100
|
+
static HeapObjectName GetNameFor(
|
101
|
+
const void*, HeapObjectNameForUnnamedObject name_retrieval_mode) {
|
102
|
+
if (name_retrieval_mode == HeapObjectNameForUnnamedObject::kUseHiddenName)
|
103
|
+
return {NameProvider::kHiddenName, true};
|
104
|
+
|
91
105
|
#if CPPGC_SUPPORTS_COMPILE_TIME_TYPENAME
|
92
106
|
return {GetTypename<T>(), false};
|
93
107
|
#elif CPPGC_SUPPORTS_OBJECT_NAMES
|
@@ -102,7 +116,7 @@ class NameTrait final : public NameTraitBase {
|
|
102
116
|
|
103
117
|
static const HeapObjectName leaky_name =
|
104
118
|
GetNameFromTypeSignature(PRETTY_FUNCTION_VALUE);
|
105
|
-
return
|
119
|
+
return leaky_name;
|
106
120
|
|
107
121
|
#undef PRETTY_FUNCTION_VALUE
|
108
122
|
|
@@ -112,7 +126,8 @@ class NameTrait final : public NameTraitBase {
|
|
112
126
|
}
|
113
127
|
};
|
114
128
|
|
115
|
-
using NameCallback = HeapObjectName (*)(const void
|
129
|
+
using NameCallback = HeapObjectName (*)(const void*,
|
130
|
+
HeapObjectNameForUnnamedObject);
|
116
131
|
|
117
132
|
} // namespace internal
|
118
133
|
} // namespace cppgc
|
@@ -14,13 +14,11 @@
|
|
14
14
|
#include "v8config.h" // NOLINT(build/include_directory)
|
15
15
|
|
16
16
|
namespace cppgc {
|
17
|
-
|
18
|
-
class Visitor;
|
19
|
-
|
20
17
|
namespace internal {
|
21
18
|
|
22
19
|
class CrossThreadPersistentRegion;
|
23
20
|
class FatalOutOfMemoryHandler;
|
21
|
+
class RootVisitor;
|
24
22
|
|
25
23
|
// PersistentNode represents a variant of two states:
|
26
24
|
// 1) traceable node with a back pointer to the Persistent object;
|
@@ -32,7 +30,7 @@ class PersistentNode final {
|
|
32
30
|
PersistentNode(const PersistentNode&) = delete;
|
33
31
|
PersistentNode& operator=(const PersistentNode&) = delete;
|
34
32
|
|
35
|
-
void InitializeAsUsedNode(void* owner,
|
33
|
+
void InitializeAsUsedNode(void* owner, TraceRootCallback trace) {
|
36
34
|
CPPGC_DCHECK(trace);
|
37
35
|
owner_ = owner;
|
38
36
|
trace_ = trace;
|
@@ -53,9 +51,9 @@ class PersistentNode final {
|
|
53
51
|
return next_;
|
54
52
|
}
|
55
53
|
|
56
|
-
void Trace(
|
54
|
+
void Trace(RootVisitor& root_visitor) const {
|
57
55
|
CPPGC_DCHECK(IsUsed());
|
58
|
-
trace_(
|
56
|
+
trace_(root_visitor, owner_);
|
59
57
|
}
|
60
58
|
|
61
59
|
bool IsUsed() const { return trace_; }
|
@@ -73,7 +71,7 @@ class PersistentNode final {
|
|
73
71
|
void* owner_ = nullptr;
|
74
72
|
PersistentNode* next_;
|
75
73
|
};
|
76
|
-
|
74
|
+
TraceRootCallback trace_ = nullptr;
|
77
75
|
};
|
78
76
|
|
79
77
|
class V8_EXPORT PersistentRegionBase {
|
@@ -86,7 +84,7 @@ class V8_EXPORT PersistentRegionBase {
|
|
86
84
|
PersistentRegionBase(const PersistentRegionBase&) = delete;
|
87
85
|
PersistentRegionBase& operator=(const PersistentRegionBase&) = delete;
|
88
86
|
|
89
|
-
void
|
87
|
+
void Iterate(RootVisitor&);
|
90
88
|
|
91
89
|
size_t NodesInUse() const;
|
92
90
|
|
@@ -96,7 +94,7 @@ class V8_EXPORT PersistentRegionBase {
|
|
96
94
|
explicit PersistentRegionBase(const FatalOutOfMemoryHandler& oom_handler);
|
97
95
|
|
98
96
|
PersistentNode* TryAllocateNodeFromFreeList(void* owner,
|
99
|
-
|
97
|
+
TraceRootCallback trace) {
|
100
98
|
PersistentNode* node = nullptr;
|
101
99
|
if (V8_LIKELY(free_list_head_)) {
|
102
100
|
node = free_list_head_;
|
@@ -118,7 +116,7 @@ class V8_EXPORT PersistentRegionBase {
|
|
118
116
|
}
|
119
117
|
|
120
118
|
PersistentNode* RefillFreeListAndAllocateNode(void* owner,
|
121
|
-
|
119
|
+
TraceRootCallback trace);
|
122
120
|
|
123
121
|
private:
|
124
122
|
template <typename PersistentBaseClass>
|
@@ -145,7 +143,7 @@ class V8_EXPORT PersistentRegion final : public PersistentRegionBase {
|
|
145
143
|
PersistentRegion(const PersistentRegion&) = delete;
|
146
144
|
PersistentRegion& operator=(const PersistentRegion&) = delete;
|
147
145
|
|
148
|
-
V8_INLINE PersistentNode* AllocateNode(void* owner,
|
146
|
+
V8_INLINE PersistentNode* AllocateNode(void* owner, TraceRootCallback trace) {
|
149
147
|
CPPGC_DCHECK(IsCreationThread());
|
150
148
|
auto* node = TryAllocateNodeFromFreeList(owner, trace);
|
151
149
|
if (V8_LIKELY(node)) return node;
|
@@ -189,7 +187,7 @@ class V8_EXPORT CrossThreadPersistentRegion final
|
|
189
187
|
CrossThreadPersistentRegion& operator=(const CrossThreadPersistentRegion&) =
|
190
188
|
delete;
|
191
189
|
|
192
|
-
V8_INLINE PersistentNode* AllocateNode(void* owner,
|
190
|
+
V8_INLINE PersistentNode* AllocateNode(void* owner, TraceRootCallback trace) {
|
193
191
|
PersistentRegionLock::AssertLocked();
|
194
192
|
auto* node = TryAllocateNodeFromFreeList(owner, trace);
|
195
193
|
if (V8_LIKELY(node)) return node;
|
@@ -202,7 +200,7 @@ class V8_EXPORT CrossThreadPersistentRegion final
|
|
202
200
|
PersistentRegionBase::FreeNode(node);
|
203
201
|
}
|
204
202
|
|
205
|
-
void
|
203
|
+
void Iterate(RootVisitor&);
|
206
204
|
|
207
205
|
size_t NodesInUse() const;
|
208
206
|
|
@@ -8,6 +8,7 @@
|
|
8
8
|
#include <cstdint>
|
9
9
|
#include <type_traits>
|
10
10
|
|
11
|
+
#include "cppgc/internal/member-storage.h"
|
11
12
|
#include "cppgc/internal/write-barrier.h"
|
12
13
|
#include "cppgc/sentinel-pointer.h"
|
13
14
|
#include "cppgc/source-location.h"
|
@@ -27,15 +28,34 @@ class WeakMemberTag;
|
|
27
28
|
class UntracedMemberTag;
|
28
29
|
|
29
30
|
struct DijkstraWriteBarrierPolicy {
|
30
|
-
static void InitializingBarrier(const void*, const void*) {
|
31
|
+
V8_INLINE static void InitializingBarrier(const void*, const void*) {
|
31
32
|
// Since in initializing writes the source object is always white, having no
|
32
33
|
// barrier doesn't break the tri-color invariant.
|
33
34
|
}
|
34
|
-
|
35
|
+
|
36
|
+
V8_INLINE static void AssigningBarrier(const void* slot, const void* value) {
|
37
|
+
WriteBarrier::Params params;
|
38
|
+
const WriteBarrier::Type type =
|
39
|
+
WriteBarrier::GetWriteBarrierType(slot, value, params);
|
40
|
+
WriteBarrier(type, params, slot, value);
|
41
|
+
}
|
42
|
+
|
43
|
+
V8_INLINE static void AssigningBarrier(const void* slot,
|
44
|
+
MemberStorage storage) {
|
35
45
|
WriteBarrier::Params params;
|
36
|
-
|
46
|
+
const WriteBarrier::Type type =
|
47
|
+
WriteBarrier::GetWriteBarrierType(slot, storage, params);
|
48
|
+
WriteBarrier(type, params, slot, storage.Load());
|
49
|
+
}
|
50
|
+
|
51
|
+
private:
|
52
|
+
V8_INLINE static void WriteBarrier(WriteBarrier::Type type,
|
53
|
+
const WriteBarrier::Params& params,
|
54
|
+
const void* slot, const void* value) {
|
55
|
+
switch (type) {
|
37
56
|
case WriteBarrier::Type::kGenerational:
|
38
|
-
WriteBarrier::GenerationalBarrier
|
57
|
+
WriteBarrier::GenerationalBarrier<
|
58
|
+
WriteBarrier::GenerationalBarrierType::kPreciseSlot>(params, slot);
|
39
59
|
break;
|
40
60
|
case WriteBarrier::Type::kMarking:
|
41
61
|
WriteBarrier::DijkstraMarkingBarrier(params, value);
|
@@ -47,8 +67,9 @@ struct DijkstraWriteBarrierPolicy {
|
|
47
67
|
};
|
48
68
|
|
49
69
|
struct NoWriteBarrierPolicy {
|
50
|
-
static void InitializingBarrier(const void*, const void*) {}
|
51
|
-
static void AssigningBarrier(const void*, const void*) {}
|
70
|
+
V8_INLINE static void InitializingBarrier(const void*, const void*) {}
|
71
|
+
V8_INLINE static void AssigningBarrier(const void*, const void*) {}
|
72
|
+
V8_INLINE static void AssigningBarrier(const void*, MemberStorage) {}
|
52
73
|
};
|
53
74
|
|
54
75
|
class V8_EXPORT SameThreadEnabledCheckingPolicyBase {
|
@@ -89,7 +110,7 @@ class V8_EXPORT SameThreadEnabledCheckingPolicy
|
|
89
110
|
|
90
111
|
class DisabledCheckingPolicy {
|
91
112
|
protected:
|
92
|
-
void CheckPointer(const void*) {}
|
113
|
+
V8_INLINE void CheckPointer(const void*) {}
|
93
114
|
};
|
94
115
|
|
95
116
|
#ifdef DEBUG
|