libv8-node 19.9.0.0-arm64-darwin → 20.2.0.0-arm64-darwin
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/lib/libv8/node/version.rb +3 -3
- data/vendor/v8/arm64-darwin/libv8/obj/libv8_monolith.a +0 -0
- data/vendor/v8/include/cppgc/cross-thread-persistent.h +4 -2
- data/vendor/v8/include/cppgc/heap-consistency.h +2 -2
- data/vendor/v8/include/cppgc/heap-handle.h +5 -0
- data/vendor/v8/include/cppgc/internal/api-constants.h +4 -1
- data/vendor/v8/include/cppgc/internal/gc-info.h +35 -33
- data/vendor/v8/include/cppgc/internal/member-storage.h +19 -7
- data/vendor/v8/include/cppgc/internal/pointer-policies.h +38 -2
- data/vendor/v8/include/cppgc/internal/write-barrier.h +15 -5
- data/vendor/v8/include/cppgc/macros.h +10 -1
- data/vendor/v8/include/cppgc/member.h +167 -129
- data/vendor/v8/include/cppgc/persistent.h +22 -15
- data/vendor/v8/include/cppgc/platform.h +6 -4
- data/vendor/v8/include/cppgc/type-traits.h +4 -3
- data/vendor/v8/include/cppgc/visitor.h +16 -1
- data/vendor/v8/include/libplatform/v8-tracing.h +2 -2
- data/vendor/v8/include/v8-array-buffer.h +59 -0
- data/vendor/v8/include/v8-callbacks.h +14 -1
- data/vendor/v8/include/v8-context.h +50 -3
- data/vendor/v8/include/v8-cppgc.h +10 -0
- data/vendor/v8/include/v8-data.h +1 -1
- data/vendor/v8/include/v8-embedder-heap.h +0 -169
- data/vendor/v8/include/v8-fast-api-calls.h +7 -3
- data/vendor/v8/include/v8-function-callback.h +69 -42
- data/vendor/v8/include/v8-function.h +1 -0
- data/vendor/v8/include/v8-inspector.h +20 -5
- data/vendor/v8/include/v8-internal.h +242 -150
- data/vendor/v8/include/v8-isolate.h +30 -40
- data/vendor/v8/include/v8-local-handle.h +81 -48
- data/vendor/v8/include/v8-metrics.h +28 -2
- data/vendor/v8/include/v8-microtask-queue.h +5 -0
- data/vendor/v8/include/v8-object.h +21 -3
- data/vendor/v8/include/v8-persistent-handle.h +25 -16
- data/vendor/v8/include/v8-platform.h +79 -10
- data/vendor/v8/include/v8-primitive.h +19 -12
- data/vendor/v8/include/v8-profiler.h +49 -31
- data/vendor/v8/include/v8-script.h +29 -1
- data/vendor/v8/include/v8-snapshot.h +4 -8
- data/vendor/v8/include/v8-template.h +3 -1
- data/vendor/v8/include/v8-traced-handle.h +22 -28
- data/vendor/v8/include/v8-util.h +9 -3
- data/vendor/v8/include/v8-value.h +31 -4
- data/vendor/v8/include/v8-version.h +4 -4
- data/vendor/v8/include/v8-wasm.h +2 -1
- data/vendor/v8/include/v8config.h +73 -2
- metadata +1 -1
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: a031eaa284eebcc10b618796436d2234dab8b18979f1f2bf15d211806d4a8613
|
4
|
+
data.tar.gz: aaaf0be20bc8682fc9e3d0a28dbcd09660c7f0da8252c10238dabd03d523ccb5
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 96efd0549cac30034108fc07f5bd7b915aed2afd1e40218a7a82ab7bdf592ab53ef7023aa18359ecd765bd9aeb72a0cc7307198321c8a314373ac3853ecd8890
|
7
|
+
data.tar.gz: 9453423ed1bfd60b9059781f8482132bb275183116c021be7ffc4acec21d6eb82a8f50d971e162a112469d8718813f2941292d9a634523e460b321200e05f24f
|
data/lib/libv8/node/version.rb
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
module Libv8; end
|
2
2
|
|
3
3
|
module Libv8::Node
|
4
|
-
VERSION = '
|
5
|
-
NODE_VERSION = '
|
6
|
-
LIBV8_VERSION = '
|
4
|
+
VERSION = '20.2.0.0'.freeze
|
5
|
+
NODE_VERSION = '20.2.0'.freeze
|
6
|
+
LIBV8_VERSION = '11.3.244.8'.freeze # from v8/include/v8-version.h
|
7
7
|
end
|
Binary file
|
@@ -148,10 +148,11 @@ class BasicCrossThreadPersistent final : public CrossThreadPersistentBase,
|
|
148
148
|
|
149
149
|
template <typename U, typename MemberBarrierPolicy,
|
150
150
|
typename MemberWeaknessTag, typename MemberCheckingPolicy,
|
151
|
+
typename MemberStorageType,
|
151
152
|
typename = std::enable_if_t<std::is_base_of<T, U>::value>>
|
152
153
|
BasicCrossThreadPersistent(
|
153
154
|
internal::BasicMember<U, MemberBarrierPolicy, MemberWeaknessTag,
|
154
|
-
MemberCheckingPolicy>
|
155
|
+
MemberCheckingPolicy, MemberStorageType>
|
155
156
|
member,
|
156
157
|
const SourceLocation& loc = SourceLocation::Current())
|
157
158
|
: BasicCrossThreadPersistent(member.Get(), loc) {}
|
@@ -230,10 +231,11 @@ class BasicCrossThreadPersistent final : public CrossThreadPersistentBase,
|
|
230
231
|
// Assignment from member.
|
231
232
|
template <typename U, typename MemberBarrierPolicy,
|
232
233
|
typename MemberWeaknessTag, typename MemberCheckingPolicy,
|
234
|
+
typename MemberStorageType,
|
233
235
|
typename = std::enable_if_t<std::is_base_of<T, U>::value>>
|
234
236
|
BasicCrossThreadPersistent& operator=(
|
235
237
|
internal::BasicMember<U, MemberBarrierPolicy, MemberWeaknessTag,
|
236
|
-
MemberCheckingPolicy>
|
238
|
+
MemberCheckingPolicy, MemberStorageType>
|
237
239
|
member) {
|
238
240
|
return operator=(member.Get());
|
239
241
|
}
|
@@ -62,10 +62,10 @@ class HeapConsistency final {
|
|
62
62
|
* \returns whether a write barrier is needed and which barrier to invoke.
|
63
63
|
*/
|
64
64
|
template <typename T, typename WeaknessTag, typename WriteBarrierPolicy,
|
65
|
-
typename CheckingPolicy>
|
65
|
+
typename CheckingPolicy, typename StorageType>
|
66
66
|
static V8_INLINE WriteBarrierType GetWriteBarrierType(
|
67
67
|
const internal::BasicMember<T, WeaknessTag, WriteBarrierPolicy,
|
68
|
-
CheckingPolicy>& value,
|
68
|
+
CheckingPolicy, StorageType>& value,
|
69
69
|
WriteBarrierParams& params) {
|
70
70
|
return internal::WriteBarrier::GetWriteBarrierType(
|
71
71
|
value.GetRawSlot(), value.GetRawStorage(), params);
|
@@ -19,6 +19,11 @@ class WriteBarrierTypeForNonCagedHeapPolicy;
|
|
19
19
|
* Opaque handle used for additional heap APIs.
|
20
20
|
*/
|
21
21
|
class HeapHandle {
|
22
|
+
public:
|
23
|
+
// Deleted copy ctor to avoid treating the type by value.
|
24
|
+
HeapHandle(const HeapHandle&) = delete;
|
25
|
+
HeapHandle& operator=(const HeapHandle&) = delete;
|
26
|
+
|
22
27
|
private:
|
23
28
|
HeapHandle() = default;
|
24
29
|
|
@@ -32,7 +32,7 @@ static constexpr uint16_t kFullyConstructedBitMask = uint16_t{1};
|
|
32
32
|
|
33
33
|
static constexpr size_t kPageSize = size_t{1} << 17;
|
34
34
|
|
35
|
-
#if defined(V8_TARGET_ARCH_ARM64) && defined(
|
35
|
+
#if defined(V8_TARGET_ARCH_ARM64) && defined(V8_OS_DARWIN)
|
36
36
|
constexpr size_t kGuardPageSize = 0;
|
37
37
|
#else
|
38
38
|
constexpr size_t kGuardPageSize = 4096;
|
@@ -57,6 +57,9 @@ static constexpr size_t kMaxSupportedAlignment = 2 * kDefaultAlignment;
|
|
57
57
|
// Granularity of heap allocations.
|
58
58
|
constexpr size_t kAllocationGranularity = sizeof(void*);
|
59
59
|
|
60
|
+
// Default cacheline size.
|
61
|
+
constexpr size_t kCachelineSize = 64;
|
62
|
+
|
60
63
|
} // namespace api_constants
|
61
64
|
|
62
65
|
} // namespace internal
|
@@ -10,6 +10,7 @@
|
|
10
10
|
#include <type_traits>
|
11
11
|
|
12
12
|
#include "cppgc/internal/finalizer-trait.h"
|
13
|
+
#include "cppgc/internal/logging.h"
|
13
14
|
#include "cppgc/internal/name-trait.h"
|
14
15
|
#include "cppgc/trace-trait.h"
|
15
16
|
#include "v8config.h" // NOLINT(build/include_directory)
|
@@ -20,12 +21,12 @@ namespace internal {
|
|
20
21
|
using GCInfoIndex = uint16_t;
|
21
22
|
|
22
23
|
struct V8_EXPORT EnsureGCInfoIndexTrait final {
|
23
|
-
// Acquires a new GC info object and
|
24
|
-
//
|
24
|
+
// Acquires a new GC info object and updates `registered_index` with the index
|
25
|
+
// that identifies that new info accordingly.
|
25
26
|
template <typename T>
|
26
|
-
V8_INLINE static
|
27
|
+
V8_INLINE static void EnsureIndex(
|
27
28
|
std::atomic<GCInfoIndex>& registered_index) {
|
28
|
-
|
29
|
+
EnsureGCInfoIndexTraitDispatch<T>{}(registered_index);
|
29
30
|
}
|
30
31
|
|
31
32
|
private:
|
@@ -34,38 +35,32 @@ struct V8_EXPORT EnsureGCInfoIndexTrait final {
|
|
34
35
|
bool = NameTrait<T>::HasNonHiddenName()>
|
35
36
|
struct EnsureGCInfoIndexTraitDispatch;
|
36
37
|
|
37
|
-
static
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
static
|
45
|
-
|
46
|
-
static
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
static GCInfoIndex EnsureGCInfoIndexNonPolymorphic(std::atomic<GCInfoIndex>&,
|
56
|
-
TraceCallback,
|
57
|
-
NameCallback);
|
58
|
-
static GCInfoIndex EnsureGCInfoIndexNonPolymorphic(std::atomic<GCInfoIndex>&,
|
59
|
-
TraceCallback);
|
38
|
+
static void V8_PRESERVE_MOST
|
39
|
+
EnsureGCInfoIndexPolymorphic(std::atomic<GCInfoIndex>&, TraceCallback,
|
40
|
+
FinalizationCallback, NameCallback);
|
41
|
+
static void V8_PRESERVE_MOST EnsureGCInfoIndexPolymorphic(
|
42
|
+
std::atomic<GCInfoIndex>&, TraceCallback, FinalizationCallback);
|
43
|
+
static void V8_PRESERVE_MOST EnsureGCInfoIndexPolymorphic(
|
44
|
+
std::atomic<GCInfoIndex>&, TraceCallback, NameCallback);
|
45
|
+
static void V8_PRESERVE_MOST
|
46
|
+
EnsureGCInfoIndexPolymorphic(std::atomic<GCInfoIndex>&, TraceCallback);
|
47
|
+
static void V8_PRESERVE_MOST
|
48
|
+
EnsureGCInfoIndexNonPolymorphic(std::atomic<GCInfoIndex>&, TraceCallback,
|
49
|
+
FinalizationCallback, NameCallback);
|
50
|
+
static void V8_PRESERVE_MOST EnsureGCInfoIndexNonPolymorphic(
|
51
|
+
std::atomic<GCInfoIndex>&, TraceCallback, FinalizationCallback);
|
52
|
+
static void V8_PRESERVE_MOST EnsureGCInfoIndexNonPolymorphic(
|
53
|
+
std::atomic<GCInfoIndex>&, TraceCallback, NameCallback);
|
54
|
+
static void V8_PRESERVE_MOST
|
55
|
+
EnsureGCInfoIndexNonPolymorphic(std::atomic<GCInfoIndex>&, TraceCallback);
|
60
56
|
};
|
61
57
|
|
62
58
|
#define DISPATCH(is_polymorphic, has_finalizer, has_non_hidden_name, function) \
|
63
59
|
template <typename T> \
|
64
60
|
struct EnsureGCInfoIndexTrait::EnsureGCInfoIndexTraitDispatch< \
|
65
61
|
T, is_polymorphic, has_finalizer, has_non_hidden_name> { \
|
66
|
-
V8_INLINE GCInfoIndex
|
67
|
-
|
68
|
-
return function; \
|
62
|
+
V8_INLINE void operator()(std::atomic<GCInfoIndex>& registered_index) { \
|
63
|
+
function; \
|
69
64
|
} \
|
70
65
|
};
|
71
66
|
|
@@ -143,9 +138,16 @@ struct GCInfoTrait final {
|
|
143
138
|
static_assert(sizeof(T), "T must be fully defined");
|
144
139
|
static std::atomic<GCInfoIndex>
|
145
140
|
registered_index; // Uses zero initialization.
|
146
|
-
|
147
|
-
|
148
|
-
|
141
|
+
GCInfoIndex index = registered_index.load(std::memory_order_acquire);
|
142
|
+
if (V8_UNLIKELY(!index)) {
|
143
|
+
EnsureGCInfoIndexTrait::EnsureIndex<T>(registered_index);
|
144
|
+
// Slow path call uses V8_PRESERVE_MOST which does not support return
|
145
|
+
// values (also preserves RAX). Avoid out parameter by just reloading the
|
146
|
+
// value here which at this point is guaranteed to be set.
|
147
|
+
index = registered_index.load(std::memory_order_acquire);
|
148
|
+
CPPGC_DCHECK(index != 0);
|
149
|
+
}
|
150
|
+
return index;
|
149
151
|
}
|
150
152
|
};
|
151
153
|
|
@@ -17,6 +17,11 @@
|
|
17
17
|
namespace cppgc {
|
18
18
|
namespace internal {
|
19
19
|
|
20
|
+
enum class WriteBarrierSlotType {
|
21
|
+
kCompressed,
|
22
|
+
kUncompressed,
|
23
|
+
};
|
24
|
+
|
20
25
|
#if defined(CPPGC_POINTER_COMPRESSION)
|
21
26
|
|
22
27
|
#if defined(__clang__)
|
@@ -30,16 +35,16 @@ namespace internal {
|
|
30
35
|
#define CPPGC_REQUIRE_CONSTANT_INIT
|
31
36
|
#endif // defined(__clang__)
|
32
37
|
|
33
|
-
class CageBaseGlobal final {
|
38
|
+
class V8_EXPORT CageBaseGlobal final {
|
34
39
|
public:
|
35
40
|
V8_INLINE CPPGC_CONST static uintptr_t Get() {
|
36
41
|
CPPGC_DCHECK(IsBaseConsistent());
|
37
|
-
return g_base_;
|
42
|
+
return g_base_.base;
|
38
43
|
}
|
39
44
|
|
40
45
|
V8_INLINE CPPGC_CONST static bool IsSet() {
|
41
46
|
CPPGC_DCHECK(IsBaseConsistent());
|
42
|
-
return (g_base_ & ~kLowerHalfWordMask) != 0;
|
47
|
+
return (g_base_.base & ~kLowerHalfWordMask) != 0;
|
43
48
|
}
|
44
49
|
|
45
50
|
private:
|
@@ -47,12 +52,15 @@ class CageBaseGlobal final {
|
|
47
52
|
static constexpr uintptr_t kLowerHalfWordMask =
|
48
53
|
(api_constants::kCagedHeapReservationAlignment - 1);
|
49
54
|
|
50
|
-
static
|
55
|
+
static union alignas(api_constants::kCachelineSize) Base {
|
56
|
+
uintptr_t base;
|
57
|
+
char cache_line[api_constants::kCachelineSize];
|
58
|
+
} g_base_ CPPGC_REQUIRE_CONSTANT_INIT;
|
51
59
|
|
52
60
|
CageBaseGlobal() = delete;
|
53
61
|
|
54
62
|
V8_INLINE static bool IsBaseConsistent() {
|
55
|
-
return kLowerHalfWordMask == (g_base_ & kLowerHalfWordMask);
|
63
|
+
return kLowerHalfWordMask == (g_base_.base & kLowerHalfWordMask);
|
56
64
|
}
|
57
65
|
|
58
66
|
friend class CageBaseGlobalUpdater;
|
@@ -64,6 +72,8 @@ class CageBaseGlobal final {
|
|
64
72
|
class V8_TRIVIAL_ABI CompressedPointer final {
|
65
73
|
public:
|
66
74
|
using IntegralType = uint32_t;
|
75
|
+
static constexpr auto kWriteBarrierSlotType =
|
76
|
+
WriteBarrierSlotType::kCompressed;
|
67
77
|
|
68
78
|
V8_INLINE CompressedPointer() : value_(0u) {}
|
69
79
|
V8_INLINE explicit CompressedPointer(const void* ptr)
|
@@ -173,6 +183,8 @@ class V8_TRIVIAL_ABI CompressedPointer final {
|
|
173
183
|
class V8_TRIVIAL_ABI RawPointer final {
|
174
184
|
public:
|
175
185
|
using IntegralType = uintptr_t;
|
186
|
+
static constexpr auto kWriteBarrierSlotType =
|
187
|
+
WriteBarrierSlotType::kUncompressed;
|
176
188
|
|
177
189
|
V8_INLINE RawPointer() : ptr_(nullptr) {}
|
178
190
|
V8_INLINE explicit RawPointer(const void* ptr) : ptr_(ptr) {}
|
@@ -225,9 +237,9 @@ class V8_TRIVIAL_ABI RawPointer final {
|
|
225
237
|
};
|
226
238
|
|
227
239
|
#if defined(CPPGC_POINTER_COMPRESSION)
|
228
|
-
using
|
240
|
+
using DefaultMemberStorage = CompressedPointer;
|
229
241
|
#else // !defined(CPPGC_POINTER_COMPRESSION)
|
230
|
-
using
|
242
|
+
using DefaultMemberStorage = RawPointer;
|
231
243
|
#endif // !defined(CPPGC_POINTER_COMPRESSION)
|
232
244
|
|
233
245
|
} // namespace internal
|
@@ -33,20 +33,53 @@ struct DijkstraWriteBarrierPolicy {
|
|
33
33
|
// barrier doesn't break the tri-color invariant.
|
34
34
|
}
|
35
35
|
|
36
|
+
template <WriteBarrierSlotType SlotType>
|
36
37
|
V8_INLINE static void AssigningBarrier(const void* slot, const void* value) {
|
38
|
+
#ifdef CPPGC_SLIM_WRITE_BARRIER
|
39
|
+
if (V8_UNLIKELY(WriteBarrier::IsEnabled()))
|
40
|
+
WriteBarrier::CombinedWriteBarrierSlow<SlotType>(slot);
|
41
|
+
#else // !CPPGC_SLIM_WRITE_BARRIER
|
37
42
|
WriteBarrier::Params params;
|
38
43
|
const WriteBarrier::Type type =
|
39
44
|
WriteBarrier::GetWriteBarrierType(slot, value, params);
|
40
45
|
WriteBarrier(type, params, slot, value);
|
46
|
+
#endif // !CPPGC_SLIM_WRITE_BARRIER
|
41
47
|
}
|
42
48
|
|
49
|
+
template <WriteBarrierSlotType SlotType>
|
50
|
+
V8_INLINE static void AssigningBarrier(const void* slot, RawPointer storage) {
|
51
|
+
static_assert(
|
52
|
+
SlotType == WriteBarrierSlotType::kUncompressed,
|
53
|
+
"Assigning storages of Member and UncompressedMember is not supported");
|
54
|
+
#ifdef CPPGC_SLIM_WRITE_BARRIER
|
55
|
+
if (V8_UNLIKELY(WriteBarrier::IsEnabled()))
|
56
|
+
WriteBarrier::CombinedWriteBarrierSlow<SlotType>(slot);
|
57
|
+
#else // !CPPGC_SLIM_WRITE_BARRIER
|
58
|
+
WriteBarrier::Params params;
|
59
|
+
const WriteBarrier::Type type =
|
60
|
+
WriteBarrier::GetWriteBarrierType(slot, storage, params);
|
61
|
+
WriteBarrier(type, params, slot, storage.Load());
|
62
|
+
#endif // !CPPGC_SLIM_WRITE_BARRIER
|
63
|
+
}
|
64
|
+
|
65
|
+
#if defined(CPPGC_POINTER_COMPRESSION)
|
66
|
+
template <WriteBarrierSlotType SlotType>
|
43
67
|
V8_INLINE static void AssigningBarrier(const void* slot,
|
44
|
-
|
68
|
+
CompressedPointer storage) {
|
69
|
+
static_assert(
|
70
|
+
SlotType == WriteBarrierSlotType::kCompressed,
|
71
|
+
"Assigning storages of Member and UncompressedMember is not supported");
|
72
|
+
#ifdef CPPGC_SLIM_WRITE_BARRIER
|
73
|
+
if (V8_UNLIKELY(WriteBarrier::IsEnabled()))
|
74
|
+
WriteBarrier::CombinedWriteBarrierSlow<SlotType>(slot);
|
75
|
+
#else // !CPPGC_SLIM_WRITE_BARRIER
|
45
76
|
WriteBarrier::Params params;
|
46
77
|
const WriteBarrier::Type type =
|
47
78
|
WriteBarrier::GetWriteBarrierType(slot, storage, params);
|
48
79
|
WriteBarrier(type, params, slot, storage.Load());
|
80
|
+
#endif // !CPPGC_SLIM_WRITE_BARRIER
|
49
81
|
}
|
82
|
+
#endif // defined(CPPGC_POINTER_COMPRESSION)
|
50
83
|
|
51
84
|
private:
|
52
85
|
V8_INLINE static void WriteBarrier(WriteBarrier::Type type,
|
@@ -68,7 +101,9 @@ struct DijkstraWriteBarrierPolicy {
|
|
68
101
|
|
69
102
|
struct NoWriteBarrierPolicy {
|
70
103
|
V8_INLINE static void InitializingBarrier(const void*, const void*) {}
|
104
|
+
template <WriteBarrierSlotType>
|
71
105
|
V8_INLINE static void AssigningBarrier(const void*, const void*) {}
|
106
|
+
template <WriteBarrierSlotType, typename MemberStorage>
|
72
107
|
V8_INLINE static void AssigningBarrier(const void*, MemberStorage) {}
|
73
108
|
};
|
74
109
|
|
@@ -197,7 +232,8 @@ template <typename T, typename WeaknessPolicy,
|
|
197
232
|
typename CheckingPolicy = DefaultPersistentCheckingPolicy>
|
198
233
|
class BasicPersistent;
|
199
234
|
template <typename T, typename WeaknessTag, typename WriteBarrierPolicy,
|
200
|
-
typename CheckingPolicy = DefaultMemberCheckingPolicy
|
235
|
+
typename CheckingPolicy = DefaultMemberCheckingPolicy,
|
236
|
+
typename StorageType = DefaultMemberStorage>
|
201
237
|
class BasicMember;
|
202
238
|
|
203
239
|
} // namespace internal
|
@@ -70,6 +70,7 @@ class V8_EXPORT WriteBarrier final {
|
|
70
70
|
static V8_INLINE Type GetWriteBarrierType(const void* slot, const void* value,
|
71
71
|
Params& params);
|
72
72
|
// Returns the required write barrier for a given `slot` and `value`.
|
73
|
+
template <typename MemberStorage>
|
73
74
|
static V8_INLINE Type GetWriteBarrierType(const void* slot, MemberStorage,
|
74
75
|
Params& params);
|
75
76
|
// Returns the required write barrier for a given `slot`.
|
@@ -79,6 +80,15 @@ class V8_EXPORT WriteBarrier final {
|
|
79
80
|
// Returns the required write barrier for a given `value`.
|
80
81
|
static V8_INLINE Type GetWriteBarrierType(const void* value, Params& params);
|
81
82
|
|
83
|
+
#ifdef CPPGC_SLIM_WRITE_BARRIER
|
84
|
+
// A write barrier that combines `GenerationalBarrier()` and
|
85
|
+
// `DijkstraMarkingBarrier()`. We only pass a single parameter here to clobber
|
86
|
+
// as few registers as possible.
|
87
|
+
template <WriteBarrierSlotType>
|
88
|
+
static V8_NOINLINE void V8_PRESERVE_MOST
|
89
|
+
CombinedWriteBarrierSlow(const void* slot);
|
90
|
+
#endif // CPPGC_SLIM_WRITE_BARRIER
|
91
|
+
|
82
92
|
static V8_INLINE void DijkstraMarkingBarrier(const Params& params,
|
83
93
|
const void* object);
|
84
94
|
static V8_INLINE void DijkstraMarkingBarrierRange(
|
@@ -163,7 +173,8 @@ class V8_EXPORT WriteBarrierTypeForCagedHeapPolicy final {
|
|
163
173
|
return ValueModeDispatch<value_mode>::Get(slot, value, params, callback);
|
164
174
|
}
|
165
175
|
|
166
|
-
template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback
|
176
|
+
template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback,
|
177
|
+
typename MemberStorage>
|
167
178
|
static V8_INLINE WriteBarrier::Type Get(const void* slot, MemberStorage value,
|
168
179
|
WriteBarrier::Params& params,
|
169
180
|
HeapHandleCallback callback) {
|
@@ -207,7 +218,7 @@ class V8_EXPORT WriteBarrierTypeForCagedHeapPolicy final {
|
|
207
218
|
template <>
|
208
219
|
struct WriteBarrierTypeForCagedHeapPolicy::ValueModeDispatch<
|
209
220
|
WriteBarrier::ValueMode::kValuePresent> {
|
210
|
-
template <typename HeapHandleCallback>
|
221
|
+
template <typename HeapHandleCallback, typename MemberStorage>
|
211
222
|
static V8_INLINE WriteBarrier::Type Get(const void* slot,
|
212
223
|
MemberStorage storage,
|
213
224
|
WriteBarrier::Params& params,
|
@@ -305,11 +316,9 @@ class V8_EXPORT WriteBarrierTypeForNonCagedHeapPolicy final {
|
|
305
316
|
}
|
306
317
|
|
307
318
|
template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback>
|
308
|
-
static V8_INLINE WriteBarrier::Type Get(const void* slot,
|
319
|
+
static V8_INLINE WriteBarrier::Type Get(const void* slot, RawPointer value,
|
309
320
|
WriteBarrier::Params& params,
|
310
321
|
HeapHandleCallback callback) {
|
311
|
-
// `MemberStorage` will always be `RawPointer` for non-caged heap builds.
|
312
|
-
// Just convert to `void*` in this case.
|
313
322
|
return ValueModeDispatch<value_mode>::Get(slot, value.Load(), params,
|
314
323
|
callback);
|
315
324
|
}
|
@@ -383,6 +392,7 @@ WriteBarrier::Type WriteBarrier::GetWriteBarrierType(
|
|
383
392
|
}
|
384
393
|
|
385
394
|
// static
|
395
|
+
template <typename MemberStorage>
|
386
396
|
WriteBarrier::Type WriteBarrier::GetWriteBarrierType(
|
387
397
|
const void* slot, MemberStorage value, WriteBarrier::Params& params) {
|
388
398
|
return WriteBarrierTypePolicy::Get<ValueMode::kValuePresent>(slot, value,
|
@@ -11,7 +11,10 @@
|
|
11
11
|
|
12
12
|
namespace cppgc {
|
13
13
|
|
14
|
-
// Use if the object is only stack allocated.
|
14
|
+
// Use CPPGC_STACK_ALLOCATED if the object is only stack allocated.
|
15
|
+
// Add the CPPGC_STACK_ALLOCATED_IGNORE annotation on a case-by-case basis when
|
16
|
+
// enforcement of CPPGC_STACK_ALLOCATED should be suppressed.
|
17
|
+
#if defined(__clang__)
|
15
18
|
#define CPPGC_STACK_ALLOCATED() \
|
16
19
|
public: \
|
17
20
|
using IsStackAllocatedTypeMarker CPPGC_UNUSED = int; \
|
@@ -20,6 +23,12 @@ namespace cppgc {
|
|
20
23
|
void* operator new(size_t) = delete; \
|
21
24
|
void* operator new(size_t, void*) = delete; \
|
22
25
|
static_assert(true, "Force semicolon.")
|
26
|
+
#define CPPGC_STACK_ALLOCATED_IGNORE(bug_or_reason) \
|
27
|
+
__attribute__((annotate("stack_allocated_ignore")))
|
28
|
+
#else // !defined(__clang__)
|
29
|
+
#define CPPGC_STACK_ALLOCATED() static_assert(true, "Force semicolon.")
|
30
|
+
#define CPPGC_STACK_ALLOCATED_IGNORE(bug_or_reason)
|
31
|
+
#endif // !defined(__clang__)
|
23
32
|
|
24
33
|
} // namespace cppgc
|
25
34
|
|