libv8-node 20.2.0.0-x86_64-linux-musl → 21.7.2.0-x86_64-linux-musl
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/lib/libv8/node/version.rb +3 -3
- data/vendor/v8/include/cppgc/internal/api-constants.h +23 -4
- data/vendor/v8/include/cppgc/internal/caged-heap-local-data.h +16 -6
- data/vendor/v8/include/cppgc/internal/caged-heap.h +12 -5
- data/vendor/v8/include/cppgc/internal/gc-info.h +82 -91
- data/vendor/v8/include/cppgc/internal/member-storage.h +16 -8
- data/vendor/v8/include/cppgc/member.h +25 -0
- data/vendor/v8/include/cppgc/persistent.h +4 -0
- data/vendor/v8/include/cppgc/platform.h +6 -1
- data/vendor/v8/include/cppgc/sentinel-pointer.h +7 -0
- data/vendor/v8/include/cppgc/source-location.h +2 -78
- data/vendor/v8/include/cppgc/trace-trait.h +8 -0
- data/vendor/v8/include/cppgc/visitor.h +82 -4
- data/vendor/v8/include/libplatform/libplatform.h +7 -1
- data/vendor/v8/include/v8-callbacks.h +52 -8
- data/vendor/v8/include/v8-context.h +10 -13
- data/vendor/v8/include/v8-cppgc.h +5 -0
- data/vendor/v8/include/v8-embedder-heap.h +12 -0
- data/vendor/v8/include/v8-fast-api-calls.h +23 -5
- data/vendor/v8/include/v8-function-callback.h +11 -15
- data/vendor/v8/include/v8-function.h +6 -0
- data/vendor/v8/include/v8-handle-base.h +185 -0
- data/vendor/v8/include/v8-inspector.h +31 -1
- data/vendor/v8/include/v8-internal.h +109 -77
- data/vendor/v8/include/v8-isolate.h +130 -89
- data/vendor/v8/include/v8-local-handle.h +134 -89
- data/vendor/v8/include/v8-object.h +71 -52
- data/vendor/v8/include/v8-persistent-handle.h +65 -89
- data/vendor/v8/include/v8-platform.h +140 -9
- data/vendor/v8/include/v8-primitive.h +12 -8
- data/vendor/v8/include/v8-profiler.h +26 -2
- data/vendor/v8/include/v8-script.h +30 -7
- data/vendor/v8/include/v8-snapshot.h +4 -1
- data/vendor/v8/include/v8-source-location.h +92 -0
- data/vendor/v8/include/v8-statistics.h +36 -1
- data/vendor/v8/include/v8-traced-handle.h +37 -54
- data/vendor/v8/include/v8-unwinder.h +1 -1
- data/vendor/v8/include/v8-util.h +15 -13
- data/vendor/v8/include/v8-value-serializer.h +14 -0
- data/vendor/v8/include/v8-value.h +14 -0
- data/vendor/v8/include/v8-version.h +3 -3
- data/vendor/v8/include/v8config.h +19 -10
- data/vendor/v8/x86_64-linux-musl/libv8/obj/libv8_monolith.a +0 -0
- metadata +4 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 9e25b5adfbaf2f803609fc175091030029272f492b82fed053ceed18af103d14
|
4
|
+
data.tar.gz: d31e6b7db1ca1cf287e076b49f69cf745027914c0abcfa0b61e5712bb9d2c491
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: e24ca24e9730e714472c824b3214d3229fe3745a37c90dd1e950cfcdc3151610e555090c2d330e3668c6937f176b31224a69053dea020c25adb10a67a96d45d9
|
7
|
+
data.tar.gz: bcefa5bba05b171254d2f9b224e9a15c7555b59520a94effd5610ab5031f7e42ccd4a1746d165433daa38bafe7c328bbf43887317e57741fed6c284215b2157b
|
data/lib/libv8/node/version.rb
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
module Libv8; end
|
2
2
|
|
3
3
|
module Libv8::Node
|
4
|
-
VERSION = '
|
5
|
-
NODE_VERSION = '
|
6
|
-
LIBV8_VERSION = '11.
|
4
|
+
VERSION = '21.7.2.0'.freeze
|
5
|
+
NODE_VERSION = '21.7.2'.freeze
|
6
|
+
LIBV8_VERSION = '11.8.172.17'.freeze # from v8/include/v8-version.h
|
7
7
|
end
|
@@ -40,13 +40,32 @@ constexpr size_t kGuardPageSize = 4096;
|
|
40
40
|
|
41
41
|
static constexpr size_t kLargeObjectSizeThreshold = kPageSize / 2;
|
42
42
|
|
43
|
+
#if defined(CPPGC_POINTER_COMPRESSION)
|
44
|
+
#if defined(CPPGC_ENABLE_LARGER_CAGE)
|
45
|
+
constexpr unsigned kPointerCompressionShift = 3;
|
46
|
+
#else // !defined(CPPGC_ENABLE_LARGER_CAGE)
|
47
|
+
constexpr unsigned kPointerCompressionShift = 1;
|
48
|
+
#endif // !defined(CPPGC_ENABLE_LARGER_CAGE)
|
49
|
+
#endif // !defined(CPPGC_POINTER_COMPRESSION)
|
50
|
+
|
43
51
|
#if defined(CPPGC_CAGED_HEAP)
|
44
52
|
#if defined(CPPGC_2GB_CAGE)
|
45
|
-
constexpr size_t
|
46
|
-
|
47
|
-
constexpr size_t
|
53
|
+
constexpr size_t kCagedHeapDefaultReservationSize =
|
54
|
+
static_cast<size_t>(2) * kGB;
|
55
|
+
constexpr size_t kCagedHeapMaxReservationSize =
|
56
|
+
kCagedHeapDefaultReservationSize;
|
57
|
+
#else // !defined(CPPGC_2GB_CAGE)
|
58
|
+
constexpr size_t kCagedHeapDefaultReservationSize =
|
59
|
+
static_cast<size_t>(4) * kGB;
|
60
|
+
#if defined(CPPGC_POINTER_COMPRESSION)
|
61
|
+
constexpr size_t kCagedHeapMaxReservationSize =
|
62
|
+
size_t{1} << (31 + kPointerCompressionShift);
|
63
|
+
#else // !defined(CPPGC_POINTER_COMPRESSION)
|
64
|
+
constexpr size_t kCagedHeapMaxReservationSize =
|
65
|
+
kCagedHeapDefaultReservationSize;
|
66
|
+
#endif // !defined(CPPGC_POINTER_COMPRESSION)
|
48
67
|
#endif // !defined(CPPGC_2GB_CAGE)
|
49
|
-
constexpr size_t kCagedHeapReservationAlignment =
|
68
|
+
constexpr size_t kCagedHeapReservationAlignment = kCagedHeapMaxReservationSize;
|
50
69
|
#endif // defined(CPPGC_CAGED_HEAP)
|
51
70
|
|
52
71
|
static constexpr size_t kDefaultAlignment = sizeof(void*);
|
@@ -46,7 +46,11 @@ class V8_EXPORT AgeTable final {
|
|
46
46
|
enum class AdjacentCardsPolicy : uint8_t { kConsider, kIgnore };
|
47
47
|
|
48
48
|
static constexpr size_t kCardSizeInBytes =
|
49
|
-
api_constants::
|
49
|
+
api_constants::kCagedHeapDefaultReservationSize / kRequiredSize;
|
50
|
+
|
51
|
+
static constexpr size_t CalculateAgeTableSizeForHeapSize(size_t heap_size) {
|
52
|
+
return heap_size / kCardSizeInBytes;
|
53
|
+
}
|
50
54
|
|
51
55
|
void SetAge(uintptr_t cage_offset, Age age) {
|
52
56
|
table_[card(cage_offset)] = age;
|
@@ -81,16 +85,18 @@ class V8_EXPORT AgeTable final {
|
|
81
85
|
#endif // !V8_HAS_BUILTIN_CTZ
|
82
86
|
static_assert((1 << kGranularityBits) == kCardSizeInBytes);
|
83
87
|
const size_t entry = offset >> kGranularityBits;
|
84
|
-
CPPGC_DCHECK(
|
88
|
+
CPPGC_DCHECK(CagedHeapBase::GetAgeTableSize() > entry);
|
85
89
|
return entry;
|
86
90
|
}
|
87
91
|
|
88
|
-
|
92
|
+
#if defined(V8_CC_GNU)
|
93
|
+
// gcc disallows flexible arrays in otherwise empty classes.
|
94
|
+
Age table_[0];
|
95
|
+
#else // !defined(V8_CC_GNU)
|
96
|
+
Age table_[];
|
97
|
+
#endif // !defined(V8_CC_GNU)
|
89
98
|
};
|
90
99
|
|
91
|
-
static_assert(sizeof(AgeTable) == 1 * api_constants::kMB,
|
92
|
-
"Size of AgeTable is 1MB");
|
93
|
-
|
94
100
|
#endif // CPPGC_YOUNG_GENERATION
|
95
101
|
|
96
102
|
struct CagedHeapLocalData final {
|
@@ -98,6 +104,10 @@ struct CagedHeapLocalData final {
|
|
98
104
|
return *reinterpret_cast<CagedHeapLocalData*>(CagedHeapBase::GetBase());
|
99
105
|
}
|
100
106
|
|
107
|
+
static constexpr size_t CalculateLocalDataSizeForHeapSize(size_t heap_size) {
|
108
|
+
return AgeTable::CalculateAgeTableSizeForHeapSize(heap_size);
|
109
|
+
}
|
110
|
+
|
101
111
|
#if defined(CPPGC_YOUNG_GENERATION)
|
102
112
|
AgeTable age_table;
|
103
113
|
#endif
|
@@ -33,24 +33,31 @@ class V8_EXPORT CagedHeapBase {
|
|
33
33
|
|
34
34
|
V8_INLINE static bool AreWithinCage(const void* addr1, const void* addr2) {
|
35
35
|
#if defined(CPPGC_2GB_CAGE)
|
36
|
-
static constexpr size_t
|
36
|
+
static constexpr size_t kHeapBaseShift = sizeof(uint32_t) * CHAR_BIT - 1;
|
37
37
|
#else //! defined(CPPGC_2GB_CAGE)
|
38
|
-
|
38
|
+
#if defined(CPPGC_POINTER_COMPRESSION)
|
39
|
+
static constexpr size_t kHeapBaseShift =
|
40
|
+
31 + api_constants::kPointerCompressionShift;
|
41
|
+
#else // !defined(CPPGC_POINTER_COMPRESSION)
|
42
|
+
static constexpr size_t kHeapBaseShift = sizeof(uint32_t) * CHAR_BIT;
|
43
|
+
#endif // !defined(CPPGC_POINTER_COMPRESSION)
|
39
44
|
#endif //! defined(CPPGC_2GB_CAGE)
|
40
|
-
static_assert((static_cast<size_t>(1) <<
|
41
|
-
api_constants::
|
45
|
+
static_assert((static_cast<size_t>(1) << kHeapBaseShift) ==
|
46
|
+
api_constants::kCagedHeapMaxReservationSize);
|
42
47
|
CPPGC_DCHECK(g_heap_base_);
|
43
48
|
return !(((reinterpret_cast<uintptr_t>(addr1) ^ g_heap_base_) |
|
44
49
|
(reinterpret_cast<uintptr_t>(addr2) ^ g_heap_base_)) >>
|
45
|
-
|
50
|
+
kHeapBaseShift);
|
46
51
|
}
|
47
52
|
|
48
53
|
V8_INLINE static uintptr_t GetBase() { return g_heap_base_; }
|
54
|
+
V8_INLINE static size_t GetAgeTableSize() { return g_age_table_size_; }
|
49
55
|
|
50
56
|
private:
|
51
57
|
friend class CagedHeap;
|
52
58
|
|
53
59
|
static uintptr_t g_heap_base_;
|
60
|
+
static size_t g_age_table_size_;
|
54
61
|
};
|
55
62
|
|
56
63
|
} // namespace internal
|
@@ -24,89 +24,90 @@ struct V8_EXPORT EnsureGCInfoIndexTrait final {
|
|
24
24
|
// Acquires a new GC info object and updates `registered_index` with the index
|
25
25
|
// that identifies that new info accordingly.
|
26
26
|
template <typename T>
|
27
|
-
V8_INLINE static
|
27
|
+
V8_INLINE static GCInfoIndex EnsureIndex(
|
28
28
|
std::atomic<GCInfoIndex>& registered_index) {
|
29
|
-
EnsureGCInfoIndexTraitDispatch<T>{}(registered_index);
|
29
|
+
return EnsureGCInfoIndexTraitDispatch<T>{}(registered_index);
|
30
30
|
}
|
31
31
|
|
32
32
|
private:
|
33
|
-
template <typename T, bool =
|
34
|
-
bool = FinalizerTrait<T>::HasFinalizer(),
|
33
|
+
template <typename T, bool = FinalizerTrait<T>::HasFinalizer(),
|
35
34
|
bool = NameTrait<T>::HasNonHiddenName()>
|
36
35
|
struct EnsureGCInfoIndexTraitDispatch;
|
37
36
|
|
38
|
-
static
|
39
|
-
|
40
|
-
|
41
|
-
static
|
37
|
+
static GCInfoIndex V8_PRESERVE_MOST
|
38
|
+
EnsureGCInfoIndex(std::atomic<GCInfoIndex>&, TraceCallback,
|
39
|
+
FinalizationCallback, NameCallback);
|
40
|
+
static GCInfoIndex V8_PRESERVE_MOST EnsureGCInfoIndex(
|
42
41
|
std::atomic<GCInfoIndex>&, TraceCallback, FinalizationCallback);
|
43
|
-
static
|
44
|
-
|
45
|
-
static
|
46
|
-
|
47
|
-
static void V8_PRESERVE_MOST
|
48
|
-
EnsureGCInfoIndexNonPolymorphic(std::atomic<GCInfoIndex>&, TraceCallback,
|
49
|
-
FinalizationCallback, NameCallback);
|
50
|
-
static void V8_PRESERVE_MOST EnsureGCInfoIndexNonPolymorphic(
|
51
|
-
std::atomic<GCInfoIndex>&, TraceCallback, FinalizationCallback);
|
52
|
-
static void V8_PRESERVE_MOST EnsureGCInfoIndexNonPolymorphic(
|
53
|
-
std::atomic<GCInfoIndex>&, TraceCallback, NameCallback);
|
54
|
-
static void V8_PRESERVE_MOST
|
55
|
-
EnsureGCInfoIndexNonPolymorphic(std::atomic<GCInfoIndex>&, TraceCallback);
|
42
|
+
static GCInfoIndex V8_PRESERVE_MOST
|
43
|
+
EnsureGCInfoIndex(std::atomic<GCInfoIndex>&, TraceCallback, NameCallback);
|
44
|
+
static GCInfoIndex V8_PRESERVE_MOST
|
45
|
+
EnsureGCInfoIndex(std::atomic<GCInfoIndex>&, TraceCallback);
|
56
46
|
};
|
57
47
|
|
58
|
-
#define DISPATCH(
|
59
|
-
template <typename T>
|
60
|
-
struct EnsureGCInfoIndexTrait::EnsureGCInfoIndexTraitDispatch<
|
61
|
-
T,
|
62
|
-
V8_INLINE
|
63
|
-
|
64
|
-
|
48
|
+
#define DISPATCH(has_finalizer, has_non_hidden_name, function) \
|
49
|
+
template <typename T> \
|
50
|
+
struct EnsureGCInfoIndexTrait::EnsureGCInfoIndexTraitDispatch< \
|
51
|
+
T, has_finalizer, has_non_hidden_name> { \
|
52
|
+
V8_INLINE GCInfoIndex \
|
53
|
+
operator()(std::atomic<GCInfoIndex>& registered_index) { \
|
54
|
+
return function; \
|
55
|
+
} \
|
65
56
|
};
|
66
57
|
|
67
|
-
//
|
68
|
-
// DISPATCH(
|
69
|
-
//
|
70
|
-
DISPATCH(true, true,
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
DISPATCH(true,
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
DISPATCH(
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
DISPATCH(
|
84
|
-
|
85
|
-
|
86
|
-
DISPATCH(false, true, true, //
|
87
|
-
EnsureGCInfoIndexNonPolymorphic(registered_index, //
|
88
|
-
TraceTrait<T>::Trace, //
|
89
|
-
FinalizerTrait<T>::kCallback, //
|
90
|
-
NameTrait<T>::GetName)) //
|
91
|
-
DISPATCH(false, true, false, //
|
92
|
-
EnsureGCInfoIndexNonPolymorphic(registered_index, //
|
93
|
-
TraceTrait<T>::Trace, //
|
94
|
-
FinalizerTrait<T>::kCallback)) //
|
95
|
-
DISPATCH(false, false, true, //
|
96
|
-
EnsureGCInfoIndexNonPolymorphic(registered_index, //
|
97
|
-
TraceTrait<T>::Trace, //
|
98
|
-
NameTrait<T>::GetName)) //
|
99
|
-
DISPATCH(false, false, false, //
|
100
|
-
EnsureGCInfoIndexNonPolymorphic(registered_index, //
|
101
|
-
TraceTrait<T>::Trace)) //
|
58
|
+
// ------------------------------------------------------- //
|
59
|
+
// DISPATCH(has_finalizer, has_non_hidden_name, function) //
|
60
|
+
// ------------------------------------------------------- //
|
61
|
+
DISPATCH(true, true, //
|
62
|
+
EnsureGCInfoIndex(registered_index, //
|
63
|
+
TraceTrait<T>::Trace, //
|
64
|
+
FinalizerTrait<T>::kCallback, //
|
65
|
+
NameTrait<T>::GetName)) //
|
66
|
+
DISPATCH(true, false, //
|
67
|
+
EnsureGCInfoIndex(registered_index, //
|
68
|
+
TraceTrait<T>::Trace, //
|
69
|
+
FinalizerTrait<T>::kCallback)) //
|
70
|
+
DISPATCH(false, true, //
|
71
|
+
EnsureGCInfoIndex(registered_index, //
|
72
|
+
TraceTrait<T>::Trace, //
|
73
|
+
NameTrait<T>::GetName)) //
|
74
|
+
DISPATCH(false, false, //
|
75
|
+
EnsureGCInfoIndex(registered_index, //
|
76
|
+
TraceTrait<T>::Trace)) //
|
102
77
|
|
103
78
|
#undef DISPATCH
|
104
79
|
|
80
|
+
// Trait determines how the garbage collector treats objects wrt. to traversing,
|
81
|
+
// finalization, and naming.
|
82
|
+
template <typename T>
|
83
|
+
struct GCInfoTrait final {
|
84
|
+
V8_INLINE static GCInfoIndex Index() {
|
85
|
+
static_assert(sizeof(T), "T must be fully defined");
|
86
|
+
static std::atomic<GCInfoIndex>
|
87
|
+
registered_index; // Uses zero initialization.
|
88
|
+
GCInfoIndex index = registered_index.load(std::memory_order_acquire);
|
89
|
+
if (V8_UNLIKELY(!index)) {
|
90
|
+
index = EnsureGCInfoIndexTrait::EnsureIndex<T>(registered_index);
|
91
|
+
CPPGC_DCHECK(index != 0);
|
92
|
+
CPPGC_DCHECK(index == registered_index.load(std::memory_order_acquire));
|
93
|
+
}
|
94
|
+
return index;
|
95
|
+
}
|
96
|
+
|
97
|
+
static constexpr bool CheckCallbacksAreDefined() {
|
98
|
+
// No USE() macro available.
|
99
|
+
(void)static_cast<TraceCallback>(TraceTrait<T>::Trace);
|
100
|
+
(void)static_cast<FinalizationCallback>(FinalizerTrait<T>::kCallback);
|
101
|
+
(void)static_cast<NameCallback>(NameTrait<T>::GetName);
|
102
|
+
return true;
|
103
|
+
}
|
104
|
+
};
|
105
|
+
|
105
106
|
// Fold types based on finalizer behavior. Note that finalizer characteristics
|
106
107
|
// align with trace behavior, i.e., destructors are virtual when trace methods
|
107
108
|
// are and vice versa.
|
108
109
|
template <typename T, typename ParentMostGarbageCollectedType>
|
109
|
-
struct GCInfoFolding {
|
110
|
+
struct GCInfoFolding final {
|
110
111
|
static constexpr bool kHasVirtualDestructorAtBase =
|
111
112
|
std::has_virtual_destructor<ParentMostGarbageCollectedType>::value;
|
112
113
|
static constexpr bool kBothTypesAreTriviallyDestructible =
|
@@ -121,34 +122,24 @@ struct GCInfoFolding {
|
|
121
122
|
static constexpr bool kWantsDetailedObjectNames = false;
|
122
123
|
#endif // !CPPGC_SUPPORTS_OBJECT_NAMES
|
123
124
|
|
124
|
-
//
|
125
|
-
//
|
126
|
-
|
127
|
-
|
128
|
-
|
129
|
-
|
130
|
-
|
131
|
-
|
125
|
+
// Always true. Forces the compiler to resolve callbacks which ensures that
|
126
|
+
// both modes don't break without requiring compiling a separate
|
127
|
+
// configuration. Only a single GCInfo (for `ResultType` below) will actually
|
128
|
+
// be instantiated but existence (and well-formedness) of all callbacks is
|
129
|
+
// checked.
|
130
|
+
static constexpr bool kCheckTypeGuardAlwaysTrue =
|
131
|
+
GCInfoTrait<T>::CheckCallbacksAreDefined() &&
|
132
|
+
GCInfoTrait<ParentMostGarbageCollectedType>::CheckCallbacksAreDefined();
|
132
133
|
|
133
|
-
//
|
134
|
-
//
|
135
|
-
|
136
|
-
|
137
|
-
|
138
|
-
|
139
|
-
|
140
|
-
|
141
|
-
|
142
|
-
if (V8_UNLIKELY(!index)) {
|
143
|
-
EnsureGCInfoIndexTrait::EnsureIndex<T>(registered_index);
|
144
|
-
// Slow path call uses V8_PRESERVE_MOST which does not support return
|
145
|
-
// values (also preserves RAX). Avoid out parameter by just reloading the
|
146
|
-
// value here which at this point is guaranteed to be set.
|
147
|
-
index = registered_index.load(std::memory_order_acquire);
|
148
|
-
CPPGC_DCHECK(index != 0);
|
149
|
-
}
|
150
|
-
return index;
|
151
|
-
}
|
134
|
+
// Folding would regress name resolution when deriving names from C++
|
135
|
+
// class names as it would just folds a name to the base class name.
|
136
|
+
using ResultType =
|
137
|
+
std::conditional_t<kCheckTypeGuardAlwaysTrue &&
|
138
|
+
(kHasVirtualDestructorAtBase ||
|
139
|
+
kBothTypesAreTriviallyDestructible ||
|
140
|
+
kHasCustomFinalizerDispatchAtBase) &&
|
141
|
+
!kWantsDetailedObjectNames,
|
142
|
+
ParentMostGarbageCollectedType, T>;
|
152
143
|
};
|
153
144
|
|
154
145
|
} // namespace internal
|
@@ -122,17 +122,22 @@ class V8_TRIVIAL_ABI CompressedPointer final {
|
|
122
122
|
}
|
123
123
|
|
124
124
|
static V8_INLINE IntegralType Compress(const void* ptr) {
|
125
|
-
static_assert(
|
126
|
-
|
127
|
-
|
125
|
+
static_assert(SentinelPointer::kSentinelValue ==
|
126
|
+
1 << api_constants::kPointerCompressionShift,
|
127
|
+
"The compression scheme relies on the sentinel encoded as 1 "
|
128
|
+
"<< kPointerCompressionShift");
|
128
129
|
static constexpr size_t kGigaCageMask =
|
129
130
|
~(api_constants::kCagedHeapReservationAlignment - 1);
|
131
|
+
static constexpr size_t kPointerCompressionShiftMask =
|
132
|
+
(1 << api_constants::kPointerCompressionShift) - 1;
|
130
133
|
|
131
134
|
CPPGC_DCHECK(CageBaseGlobal::IsSet());
|
132
135
|
const uintptr_t base = CageBaseGlobal::Get();
|
133
136
|
CPPGC_DCHECK(!ptr || ptr == kSentinelPointer ||
|
134
137
|
(base & kGigaCageMask) ==
|
135
138
|
(reinterpret_cast<uintptr_t>(ptr) & kGigaCageMask));
|
139
|
+
CPPGC_DCHECK(
|
140
|
+
(reinterpret_cast<uintptr_t>(ptr) & kPointerCompressionShiftMask) == 0);
|
136
141
|
|
137
142
|
#if defined(CPPGC_2GB_CAGE)
|
138
143
|
// Truncate the pointer.
|
@@ -140,8 +145,9 @@ class V8_TRIVIAL_ABI CompressedPointer final {
|
|
140
145
|
static_cast<IntegralType>(reinterpret_cast<uintptr_t>(ptr));
|
141
146
|
#else // !defined(CPPGC_2GB_CAGE)
|
142
147
|
const auto uptr = reinterpret_cast<uintptr_t>(ptr);
|
143
|
-
// Shift the pointer
|
144
|
-
auto compressed = static_cast<IntegralType>(
|
148
|
+
// Shift the pointer and truncate.
|
149
|
+
auto compressed = static_cast<IntegralType>(
|
150
|
+
uptr >> api_constants::kPointerCompressionShift);
|
145
151
|
#endif // !defined(CPPGC_2GB_CAGE)
|
146
152
|
// Normal compressed pointers must have the MSB set.
|
147
153
|
CPPGC_DCHECK((!compressed || compressed == kCompressedSentinel) ||
|
@@ -157,9 +163,10 @@ class V8_TRIVIAL_ABI CompressedPointer final {
|
|
157
163
|
#if defined(CPPGC_2GB_CAGE)
|
158
164
|
const uint64_t mask = static_cast<uint64_t>(static_cast<int32_t>(ptr));
|
159
165
|
#else // !defined(CPPGC_2GB_CAGE)
|
160
|
-
// Then, shift the result
|
166
|
+
// Then, shift the result. It's important to shift the unsigned
|
161
167
|
// value, as otherwise it would result in undefined behavior.
|
162
|
-
const uint64_t mask = static_cast<uint64_t>(static_cast<int32_t>(ptr))
|
168
|
+
const uint64_t mask = static_cast<uint64_t>(static_cast<int32_t>(ptr))
|
169
|
+
<< api_constants::kPointerCompressionShift;
|
163
170
|
#endif // !defined(CPPGC_2GB_CAGE)
|
164
171
|
return reinterpret_cast<void*>(mask & base);
|
165
172
|
}
|
@@ -170,7 +177,8 @@ class V8_TRIVIAL_ABI CompressedPointer final {
|
|
170
177
|
SentinelPointer::kSentinelValue;
|
171
178
|
#else // !defined(CPPGC_2GB_CAGE)
|
172
179
|
static constexpr IntegralType kCompressedSentinel =
|
173
|
-
SentinelPointer::kSentinelValue >>
|
180
|
+
SentinelPointer::kSentinelValue >>
|
181
|
+
api_constants::kPointerCompressionShift;
|
174
182
|
#endif // !defined(CPPGC_2GB_CAGE)
|
175
183
|
// All constructors initialize `value_`. Do not add a default value here as it
|
176
184
|
// results in a non-atomic write on some builds, even when the atomic version
|
@@ -597,8 +597,33 @@ using UncompressedMember = internal::BasicMember<
|
|
597
597
|
T, internal::StrongMemberTag, internal::DijkstraWriteBarrierPolicy,
|
598
598
|
internal::DefaultMemberCheckingPolicy, internal::RawPointer>;
|
599
599
|
|
600
|
+
#if defined(CPPGC_POINTER_COMPRESSION)
|
601
|
+
/**
|
602
|
+
* CompressedMember. Default implementation of cppgc::Member on builds with
|
603
|
+
* pointer compression.
|
604
|
+
*/
|
605
|
+
template <typename T>
|
606
|
+
using CompressedMember = internal::BasicMember<
|
607
|
+
T, internal::StrongMemberTag, internal::DijkstraWriteBarrierPolicy,
|
608
|
+
internal::DefaultMemberCheckingPolicy, internal::CompressedPointer>;
|
609
|
+
#endif // defined(CPPGC_POINTER_COMPRESSION)
|
610
|
+
|
600
611
|
} // namespace subtle
|
601
612
|
|
613
|
+
namespace internal {
|
614
|
+
|
615
|
+
struct Dummy;
|
616
|
+
|
617
|
+
static constexpr size_t kSizeOfMember = sizeof(Member<Dummy>);
|
618
|
+
static constexpr size_t kSizeOfUncompressedMember =
|
619
|
+
sizeof(subtle::UncompressedMember<Dummy>);
|
620
|
+
#if defined(CPPGC_POINTER_COMPRESSION)
|
621
|
+
static constexpr size_t kSizeofCompressedMember =
|
622
|
+
sizeof(subtle::CompressedMember<Dummy>);
|
623
|
+
#endif // defined(CPPGC_POINTER_COMPRESSION)
|
624
|
+
|
625
|
+
} // namespace internal
|
626
|
+
|
602
627
|
} // namespace cppgc
|
603
628
|
|
604
629
|
#endif // INCLUDE_CPPGC_MEMBER_H_
|
@@ -92,6 +92,7 @@ class BasicPersistent final : public PersistentBase,
|
|
92
92
|
template <typename U, typename OtherWeaknessPolicy,
|
93
93
|
typename OtherLocationPolicy, typename OtherCheckingPolicy,
|
94
94
|
typename = std::enable_if_t<std::is_base_of<T, U>::value>>
|
95
|
+
// NOLINTNEXTLINE
|
95
96
|
BasicPersistent(
|
96
97
|
const BasicPersistent<U, OtherWeaknessPolicy, OtherLocationPolicy,
|
97
98
|
OtherCheckingPolicy>& other,
|
@@ -116,6 +117,7 @@ class BasicPersistent final : public PersistentBase,
|
|
116
117
|
typename MemberWeaknessTag, typename MemberCheckingPolicy,
|
117
118
|
typename MemberStorageType,
|
118
119
|
typename = std::enable_if_t<std::is_base_of<T, U>::value>>
|
120
|
+
// NOLINTNEXTLINE
|
119
121
|
BasicPersistent(const internal::BasicMember<
|
120
122
|
U, MemberBarrierPolicy, MemberWeaknessTag,
|
121
123
|
MemberCheckingPolicy, MemberStorageType>& member,
|
@@ -180,6 +182,8 @@ class BasicPersistent final : public PersistentBase,
|
|
180
182
|
}
|
181
183
|
|
182
184
|
explicit operator bool() const { return Get(); }
|
185
|
+
// Historically we allow implicit conversions to T*.
|
186
|
+
// NOLINTNEXTLINE
|
183
187
|
operator T*() const { return Get(); }
|
184
188
|
T* operator->() const { return Get(); }
|
185
189
|
T& operator*() const { return *Get(); }
|
@@ -136,8 +136,13 @@ class V8_EXPORT Platform {
|
|
136
136
|
* \param page_allocator The allocator used for maintaining meta data. Must stay
|
137
137
|
* always alive and not change between multiple calls to InitializeProcess. If
|
138
138
|
* no allocator is provided, a default internal version will be used.
|
139
|
+
* \param desired_heap_size Desired amount of virtual address space to reserve
|
140
|
+
* for the heap, in bytes. Actual size will be clamped to minimum and maximum
|
141
|
+
* values based on compile-time settings and may be rounded up. If this
|
142
|
+
* parameter is zero, a default value will be used.
|
139
143
|
*/
|
140
|
-
V8_EXPORT void InitializeProcess(PageAllocator* page_allocator = nullptr
|
144
|
+
V8_EXPORT void InitializeProcess(PageAllocator* page_allocator = nullptr,
|
145
|
+
size_t desired_heap_size = 0);
|
141
146
|
|
142
147
|
/**
|
143
148
|
* Must be called after destroying the last used heap. Some process-global
|
@@ -7,13 +7,20 @@
|
|
7
7
|
|
8
8
|
#include <cstdint>
|
9
9
|
|
10
|
+
#include "cppgc/internal/api-constants.h"
|
11
|
+
|
10
12
|
namespace cppgc {
|
11
13
|
namespace internal {
|
12
14
|
|
13
15
|
// Special tag type used to denote some sentinel member. The semantics of the
|
14
16
|
// sentinel is defined by the embedder.
|
15
17
|
struct SentinelPointer {
|
18
|
+
#if defined(CPPGC_POINTER_COMPRESSION)
|
19
|
+
static constexpr intptr_t kSentinelValue =
|
20
|
+
1 << api_constants::kPointerCompressionShift;
|
21
|
+
#else // !defined(CPPGC_POINTER_COMPRESSION)
|
16
22
|
static constexpr intptr_t kSentinelValue = 0b10;
|
23
|
+
#endif // !defined(CPPGC_POINTER_COMPRESSION)
|
17
24
|
template <typename T>
|
18
25
|
operator T*() const {
|
19
26
|
return reinterpret_cast<T*>(kSentinelValue);
|
@@ -5,87 +5,11 @@
|
|
5
5
|
#ifndef INCLUDE_CPPGC_SOURCE_LOCATION_H_
|
6
6
|
#define INCLUDE_CPPGC_SOURCE_LOCATION_H_
|
7
7
|
|
8
|
-
#include
|
9
|
-
#include <string>
|
10
|
-
|
11
|
-
#include "v8config.h" // NOLINT(build/include_directory)
|
12
|
-
|
13
|
-
#if defined(__has_builtin)
|
14
|
-
#define CPPGC_SUPPORTS_SOURCE_LOCATION \
|
15
|
-
(__has_builtin(__builtin_FUNCTION) && __has_builtin(__builtin_FILE) && \
|
16
|
-
__has_builtin(__builtin_LINE)) // NOLINT
|
17
|
-
#elif defined(V8_CC_GNU) && __GNUC__ >= 7
|
18
|
-
#define CPPGC_SUPPORTS_SOURCE_LOCATION 1
|
19
|
-
#elif defined(V8_CC_INTEL) && __ICC >= 1800
|
20
|
-
#define CPPGC_SUPPORTS_SOURCE_LOCATION 1
|
21
|
-
#else
|
22
|
-
#define CPPGC_SUPPORTS_SOURCE_LOCATION 0
|
23
|
-
#endif
|
8
|
+
#include "v8-source-location.h"
|
24
9
|
|
25
10
|
namespace cppgc {
|
26
11
|
|
27
|
-
|
28
|
-
* Encapsulates source location information. Mimics C++20's
|
29
|
-
* `std::source_location`.
|
30
|
-
*/
|
31
|
-
class V8_EXPORT SourceLocation final {
|
32
|
-
public:
|
33
|
-
/**
|
34
|
-
* Construct source location information corresponding to the location of the
|
35
|
-
* call site.
|
36
|
-
*/
|
37
|
-
#if CPPGC_SUPPORTS_SOURCE_LOCATION
|
38
|
-
static constexpr SourceLocation Current(
|
39
|
-
const char* function = __builtin_FUNCTION(),
|
40
|
-
const char* file = __builtin_FILE(), size_t line = __builtin_LINE()) {
|
41
|
-
return SourceLocation(function, file, line);
|
42
|
-
}
|
43
|
-
#else
|
44
|
-
static constexpr SourceLocation Current() { return SourceLocation(); }
|
45
|
-
#endif // CPPGC_SUPPORTS_SOURCE_LOCATION
|
46
|
-
|
47
|
-
/**
|
48
|
-
* Constructs unspecified source location information.
|
49
|
-
*/
|
50
|
-
constexpr SourceLocation() = default;
|
51
|
-
|
52
|
-
/**
|
53
|
-
* Returns the name of the function associated with the position represented
|
54
|
-
* by this object, if any.
|
55
|
-
*
|
56
|
-
* \returns the function name as cstring.
|
57
|
-
*/
|
58
|
-
constexpr const char* Function() const { return function_; }
|
59
|
-
|
60
|
-
/**
|
61
|
-
* Returns the name of the current source file represented by this object.
|
62
|
-
*
|
63
|
-
* \returns the file name as cstring.
|
64
|
-
*/
|
65
|
-
constexpr const char* FileName() const { return file_; }
|
66
|
-
|
67
|
-
/**
|
68
|
-
* Returns the line number represented by this object.
|
69
|
-
*
|
70
|
-
* \returns the line number.
|
71
|
-
*/
|
72
|
-
constexpr size_t Line() const { return line_; }
|
73
|
-
|
74
|
-
/**
|
75
|
-
* Returns a human-readable string representing this object.
|
76
|
-
*
|
77
|
-
* \returns a human-readable string representing source location information.
|
78
|
-
*/
|
79
|
-
std::string ToString() const;
|
80
|
-
|
81
|
-
private:
|
82
|
-
constexpr SourceLocation(const char* function, const char* file, size_t line)
|
83
|
-
: function_(function), file_(file), line_(line) {}
|
84
|
-
|
85
|
-
const char* function_ = nullptr;
|
86
|
-
const char* file_ = nullptr;
|
87
|
-
size_t line_ = 0u;
|
88
|
-
};
|
12
|
+
using SourceLocation = v8::SourceLocation;
|
89
13
|
|
90
14
|
} // namespace cppgc
|
91
15
|
|
@@ -53,6 +53,14 @@ struct TraceDescriptor {
|
|
53
53
|
TraceCallback callback;
|
54
54
|
};
|
55
55
|
|
56
|
+
/**
|
57
|
+
* Callback for getting a TraceDescriptor for a given address.
|
58
|
+
*
|
59
|
+
* \param address Possibly inner address of an object.
|
60
|
+
* \returns a TraceDescriptor for the provided address.
|
61
|
+
*/
|
62
|
+
using TraceDescriptorCallback = TraceDescriptor (*)(const void* address);
|
63
|
+
|
56
64
|
namespace internal {
|
57
65
|
|
58
66
|
struct V8_EXPORT TraceTraitFromInnerAddressImpl {
|