libv8-node 21.7.2.0-aarch64-linux-musl → 24.12.0.0-aarch64-linux-musl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/ext/libv8-node/location.rb +3 -5
- data/ext/libv8-node/paths.rb +2 -0
- data/lib/libv8/node/version.rb +7 -4
- data/lib/libv8/node.rb +2 -0
- data/lib/libv8-node.rb +2 -0
- data/vendor/v8/aarch64-linux-musl/libv8/obj/libv8_monolith.a +0 -0
- data/vendor/v8/include/cppgc/allocation.h +11 -13
- data/vendor/v8/include/cppgc/default-platform.h +3 -2
- data/vendor/v8/include/cppgc/garbage-collected.h +8 -0
- data/vendor/v8/include/cppgc/heap-consistency.h +1 -1
- data/vendor/v8/include/cppgc/heap-statistics.h +2 -0
- data/vendor/v8/include/cppgc/internal/api-constants.h +2 -14
- data/vendor/v8/include/cppgc/internal/base-page-handle.h +2 -4
- data/vendor/v8/include/cppgc/internal/caged-heap-local-data.h +0 -4
- data/vendor/v8/include/cppgc/internal/caged-heap.h +0 -4
- data/vendor/v8/include/cppgc/internal/compiler-specific.h +9 -1
- data/vendor/v8/include/cppgc/internal/conditional-stack-allocated.h +41 -0
- data/vendor/v8/include/cppgc/internal/gc-info.h +12 -10
- data/vendor/v8/include/cppgc/internal/logging.h +3 -3
- data/vendor/v8/include/cppgc/internal/member-storage.h +69 -20
- data/vendor/v8/include/cppgc/internal/name-trait.h +5 -1
- data/vendor/v8/include/cppgc/internal/persistent-node.h +8 -3
- data/vendor/v8/include/cppgc/internal/pointer-policies.h +48 -11
- data/vendor/v8/include/cppgc/macros.h +21 -0
- data/vendor/v8/include/cppgc/member.h +70 -36
- data/vendor/v8/include/cppgc/name-provider.h +10 -0
- data/vendor/v8/include/cppgc/platform.h +11 -0
- data/vendor/v8/include/cppgc/type-traits.h +26 -4
- data/vendor/v8/include/cppgc/visitor.h +25 -1
- data/vendor/v8/include/libplatform/libplatform-export.h +2 -2
- data/vendor/v8/include/libplatform/v8-tracing.h +0 -1
- data/vendor/v8/include/v8-array-buffer.h +149 -46
- data/vendor/v8/include/v8-callbacks.h +100 -43
- data/vendor/v8/include/v8-container.h +54 -0
- data/vendor/v8/include/v8-context.h +92 -30
- data/vendor/v8/include/v8-cppgc.h +5 -56
- data/vendor/v8/include/v8-data.h +5 -0
- data/vendor/v8/include/v8-date.h +9 -0
- data/vendor/v8/include/v8-debug.h +11 -0
- data/vendor/v8/include/v8-embedder-heap.h +8 -20
- data/vendor/v8/include/v8-embedder-state-scope.h +2 -1
- data/vendor/v8/include/v8-exception.h +87 -9
- data/vendor/v8/include/v8-external-memory-accounter.h +60 -0
- data/vendor/v8/include/v8-fast-api-calls.h +67 -223
- data/vendor/v8/include/v8-forward.h +1 -0
- data/vendor/v8/include/v8-function-callback.h +296 -75
- data/vendor/v8/include/v8-function.h +11 -3
- data/vendor/v8/include/v8-handle-base.h +52 -82
- data/vendor/v8/include/v8-initialization.h +26 -1
- data/vendor/v8/include/v8-inspector.h +26 -27
- data/vendor/v8/include/v8-internal.h +960 -230
- data/vendor/v8/include/v8-isolate.h +347 -226
- data/vendor/v8/include/v8-local-handle.h +307 -55
- data/vendor/v8/include/v8-maybe.h +2 -1
- data/vendor/v8/include/v8-memory-span.h +284 -4
- data/vendor/v8/include/v8-message.h +11 -5
- data/vendor/v8/include/v8-metrics.h +15 -0
- data/vendor/v8/include/v8-microtask-queue.h +0 -5
- data/vendor/v8/include/v8-object.h +314 -41
- data/vendor/v8/include/v8-persistent-handle.h +29 -39
- data/vendor/v8/include/v8-platform.h +135 -77
- data/vendor/v8/include/v8-primitive.h +223 -5
- data/vendor/v8/include/v8-profiler.h +51 -2
- data/vendor/v8/include/v8-promise.h +2 -2
- data/vendor/v8/include/v8-proxy.h +0 -1
- data/vendor/v8/include/v8-regexp.h +0 -1
- data/vendor/v8/include/v8-sandbox.h +173 -0
- data/vendor/v8/include/v8-script.h +125 -27
- data/vendor/v8/include/v8-snapshot.h +130 -23
- data/vendor/v8/include/v8-source-location.h +6 -1
- data/vendor/v8/include/v8-statistics.h +10 -24
- data/vendor/v8/include/v8-template.h +320 -193
- data/vendor/v8/include/v8-trace-categories.h +23 -0
- data/vendor/v8/include/v8-traced-handle.h +99 -76
- data/vendor/v8/include/v8-typed-array.h +111 -7
- data/vendor/v8/include/v8-unwinder-state.h +2 -3
- data/vendor/v8/include/v8-unwinder.h +2 -1
- data/vendor/v8/include/v8-util.h +10 -125
- data/vendor/v8/include/v8-value-serializer-version.h +3 -3
- data/vendor/v8/include/v8-value.h +113 -6
- data/vendor/v8/include/v8-version.h +3 -3
- data/vendor/v8/include/v8-wasm.h +27 -0
- data/vendor/v8/include/v8-weak-callback-info.h +20 -12
- data/vendor/v8/include/v8.h +3 -3
- data/vendor/v8/include/v8config.h +116 -53
- metadata +55 -12
- data/vendor/v8/include/cppgc/ephemeron-pair.h +0 -30
|
@@ -10,9 +10,29 @@
|
|
|
10
10
|
#include <string.h>
|
|
11
11
|
|
|
12
12
|
#include <atomic>
|
|
13
|
+
#include <iterator>
|
|
14
|
+
#include <limits>
|
|
15
|
+
#include <memory>
|
|
16
|
+
#include <optional>
|
|
13
17
|
#include <type_traits>
|
|
14
18
|
|
|
15
|
-
#include "v8config.h"
|
|
19
|
+
#include "v8config.h" // NOLINT(build/include_directory)
|
|
20
|
+
|
|
21
|
+
// TODO(pkasting): Use <compare>/spaceship unconditionally after dropping
|
|
22
|
+
// support for old libstdc++ versions.
|
|
23
|
+
#if __has_include(<version>)
|
|
24
|
+
#include <version>
|
|
25
|
+
#endif
|
|
26
|
+
#if defined(__cpp_lib_three_way_comparison) && \
|
|
27
|
+
__cpp_lib_three_way_comparison >= 201711L && \
|
|
28
|
+
defined(__cpp_lib_concepts) && __cpp_lib_concepts >= 202002L
|
|
29
|
+
#include <compare>
|
|
30
|
+
#include <concepts>
|
|
31
|
+
|
|
32
|
+
#define V8_HAVE_SPACESHIP_OPERATOR 1
|
|
33
|
+
#else
|
|
34
|
+
#define V8_HAVE_SPACESHIP_OPERATOR 0
|
|
35
|
+
#endif
|
|
16
36
|
|
|
17
37
|
namespace v8 {
|
|
18
38
|
|
|
@@ -23,7 +43,11 @@ class Isolate;
|
|
|
23
43
|
|
|
24
44
|
namespace internal {
|
|
25
45
|
|
|
46
|
+
class Heap;
|
|
47
|
+
class LocalHeap;
|
|
26
48
|
class Isolate;
|
|
49
|
+
class IsolateGroup;
|
|
50
|
+
class LocalIsolate;
|
|
27
51
|
|
|
28
52
|
typedef uintptr_t Address;
|
|
29
53
|
static constexpr Address kNullAddress = 0;
|
|
@@ -84,7 +108,10 @@ struct SmiTagging<4> {
|
|
|
84
108
|
// Truncate and shift down (requires >> to be sign extending).
|
|
85
109
|
return static_cast<int32_t>(static_cast<uint32_t>(value)) >> shift_bits;
|
|
86
110
|
}
|
|
87
|
-
|
|
111
|
+
|
|
112
|
+
template <class T, typename std::enable_if_t<std::is_integral_v<T> &&
|
|
113
|
+
std::is_signed_v<T>>* = nullptr>
|
|
114
|
+
V8_INLINE static constexpr bool IsValidSmi(T value) {
|
|
88
115
|
// Is value in range [kSmiMinValue, kSmiMaxValue].
|
|
89
116
|
// Use unsigned operations in order to avoid undefined behaviour in case of
|
|
90
117
|
// signed integer overflow.
|
|
@@ -93,6 +120,28 @@ struct SmiTagging<4> {
|
|
|
93
120
|
(static_cast<uintptr_t>(kSmiMaxValue) -
|
|
94
121
|
static_cast<uintptr_t>(kSmiMinValue));
|
|
95
122
|
}
|
|
123
|
+
|
|
124
|
+
template <class T,
|
|
125
|
+
typename std::enable_if_t<std::is_integral_v<T> &&
|
|
126
|
+
std::is_unsigned_v<T>>* = nullptr>
|
|
127
|
+
V8_INLINE static constexpr bool IsValidSmi(T value) {
|
|
128
|
+
static_assert(kSmiMaxValue <= std::numeric_limits<uintptr_t>::max());
|
|
129
|
+
return value <= static_cast<uintptr_t>(kSmiMaxValue);
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
// Same as the `intptr_t` version but works with int64_t on 32-bit builds
|
|
133
|
+
// without slowing down anything else.
|
|
134
|
+
V8_INLINE static constexpr bool IsValidSmi(int64_t value) {
|
|
135
|
+
return (static_cast<uint64_t>(value) -
|
|
136
|
+
static_cast<uint64_t>(kSmiMinValue)) <=
|
|
137
|
+
(static_cast<uint64_t>(kSmiMaxValue) -
|
|
138
|
+
static_cast<uint64_t>(kSmiMinValue));
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
V8_INLINE static constexpr bool IsValidSmi(uint64_t value) {
|
|
142
|
+
static_assert(kSmiMaxValue <= std::numeric_limits<uint64_t>::max());
|
|
143
|
+
return value <= static_cast<uint64_t>(kSmiMaxValue);
|
|
144
|
+
}
|
|
96
145
|
};
|
|
97
146
|
|
|
98
147
|
// Smi constants for systems where tagged pointer is a 64-bit value.
|
|
@@ -109,9 +158,20 @@ struct SmiTagging<8> {
|
|
|
109
158
|
// Shift down and throw away top 32 bits.
|
|
110
159
|
return static_cast<int>(static_cast<intptr_t>(value) >> shift_bits);
|
|
111
160
|
}
|
|
112
|
-
|
|
161
|
+
|
|
162
|
+
template <class T, typename std::enable_if_t<std::is_integral_v<T> &&
|
|
163
|
+
std::is_signed_v<T>>* = nullptr>
|
|
164
|
+
V8_INLINE static constexpr bool IsValidSmi(T value) {
|
|
113
165
|
// To be representable as a long smi, the value must be a 32-bit integer.
|
|
114
|
-
return (
|
|
166
|
+
return std::numeric_limits<int32_t>::min() <= value &&
|
|
167
|
+
value <= std::numeric_limits<int32_t>::max();
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
template <class T,
|
|
171
|
+
typename std::enable_if_t<std::is_integral_v<T> &&
|
|
172
|
+
std::is_unsigned_v<T>>* = nullptr>
|
|
173
|
+
V8_INLINE static constexpr bool IsValidSmi(T value) {
|
|
174
|
+
return value <= std::numeric_limits<int32_t>::max();
|
|
115
175
|
}
|
|
116
176
|
};
|
|
117
177
|
|
|
@@ -172,7 +232,7 @@ using SandboxedPointer_t = Address;
|
|
|
172
232
|
#ifdef V8_ENABLE_SANDBOX
|
|
173
233
|
|
|
174
234
|
// Size of the sandbox, excluding the guard regions surrounding it.
|
|
175
|
-
#
|
|
235
|
+
#if defined(V8_TARGET_OS_ANDROID)
|
|
176
236
|
// On Android, most 64-bit devices seem to be configured with only 39 bits of
|
|
177
237
|
// virtual address space for userspace. As such, limit the sandbox to 128GB (a
|
|
178
238
|
// quarter of the total available address space).
|
|
@@ -197,9 +257,12 @@ constexpr size_t kSandboxAlignment = kPtrComprCageBaseAlignment;
|
|
|
197
257
|
constexpr uint64_t kSandboxedPointerShift = 64 - kSandboxSizeLog2;
|
|
198
258
|
|
|
199
259
|
// Size of the guard regions surrounding the sandbox. This assumes a worst-case
|
|
200
|
-
// scenario of a 32-bit unsigned index used to access an array of 64-bit
|
|
201
|
-
//
|
|
202
|
-
|
|
260
|
+
// scenario of a 32-bit unsigned index used to access an array of 64-bit values
|
|
261
|
+
// with an additional 4GB (compressed pointer) offset. In particular, accesses
|
|
262
|
+
// to TypedArrays are effectively computed as
|
|
263
|
+
// `entry_pointer = array->base + array->offset + index * array->element_size`.
|
|
264
|
+
// See also https://crbug.com/40070746 for more details.
|
|
265
|
+
constexpr size_t kSandboxGuardRegionSize = 32ULL * GB + 4ULL * GB;
|
|
203
266
|
|
|
204
267
|
static_assert((kSandboxGuardRegionSize % kSandboxAlignment) == 0,
|
|
205
268
|
"The size of the guard regions around the sandbox must be a "
|
|
@@ -246,15 +309,16 @@ static_assert(1ULL << (64 - kBoundedSizeShift) ==
|
|
|
246
309
|
// size allows omitting bounds checks on table accesses if the indices are
|
|
247
310
|
// guaranteed (e.g. through shifting) to be below the maximum index. This
|
|
248
311
|
// value must be a power of two.
|
|
249
|
-
constexpr size_t kExternalPointerTableReservationSize =
|
|
312
|
+
constexpr size_t kExternalPointerTableReservationSize = 256 * MB;
|
|
250
313
|
|
|
251
314
|
// The external pointer table indices stored in HeapObjects as external
|
|
252
315
|
// pointers are shifted to the left by this amount to guarantee that they are
|
|
253
|
-
// smaller than the maximum table size
|
|
254
|
-
|
|
316
|
+
// smaller than the maximum table size even after the C++ compiler multiplies
|
|
317
|
+
// them by 8 to be used as indexes into a table of 64 bit pointers.
|
|
318
|
+
constexpr uint32_t kExternalPointerIndexShift = 7;
|
|
255
319
|
#else
|
|
256
|
-
constexpr size_t kExternalPointerTableReservationSize =
|
|
257
|
-
constexpr uint32_t kExternalPointerIndexShift =
|
|
320
|
+
constexpr size_t kExternalPointerTableReservationSize = 512 * MB;
|
|
321
|
+
constexpr uint32_t kExternalPointerIndexShift = 6;
|
|
258
322
|
#endif // V8_TARGET_OS_ANDROID
|
|
259
323
|
|
|
260
324
|
// The maximum number of entries in an external pointer table.
|
|
@@ -273,6 +337,16 @@ constexpr size_t kMaxExternalPointers = 0;
|
|
|
273
337
|
|
|
274
338
|
#endif // V8_COMPRESS_POINTERS
|
|
275
339
|
|
|
340
|
+
constexpr uint64_t kExternalPointerMarkBit = 1ULL << 48;
|
|
341
|
+
constexpr uint64_t kExternalPointerTagShift = 49;
|
|
342
|
+
constexpr uint64_t kExternalPointerTagMask = 0x00fe000000000000ULL;
|
|
343
|
+
constexpr uint64_t kExternalPointerShiftedTagMask =
|
|
344
|
+
kExternalPointerTagMask >> kExternalPointerTagShift;
|
|
345
|
+
static_assert(kExternalPointerShiftedTagMask << kExternalPointerTagShift ==
|
|
346
|
+
kExternalPointerTagMask);
|
|
347
|
+
constexpr uint64_t kExternalPointerTagAndMarkbitMask = 0x00ff000000000000ULL;
|
|
348
|
+
constexpr uint64_t kExternalPointerPayloadMask = 0xff00ffffffffffffULL;
|
|
349
|
+
|
|
276
350
|
// A ExternalPointerHandle represents a (opaque) reference to an external
|
|
277
351
|
// pointer that can be stored inside the sandbox. A ExternalPointerHandle has
|
|
278
352
|
// meaning only in combination with an (active) Isolate as it references an
|
|
@@ -294,6 +368,124 @@ using ExternalPointer_t = Address;
|
|
|
294
368
|
constexpr ExternalPointer_t kNullExternalPointer = 0;
|
|
295
369
|
constexpr ExternalPointerHandle kNullExternalPointerHandle = 0;
|
|
296
370
|
|
|
371
|
+
// See `ExternalPointerHandle` for the main documentation. The difference to
|
|
372
|
+
// `ExternalPointerHandle` is that the handle does not represent an arbitrary
|
|
373
|
+
// external pointer but always refers to an object managed by `CppHeap`. The
|
|
374
|
+
// handles are using in combination with a dedicated table for `CppHeap`
|
|
375
|
+
// references.
|
|
376
|
+
using CppHeapPointerHandle = uint32_t;
|
|
377
|
+
|
|
378
|
+
// The actual pointer to objects located on the `CppHeap`. When pointer
|
|
379
|
+
// compression is enabled these pointers are stored as `CppHeapPointerHandle`.
|
|
380
|
+
// In non-compressed configurations the pointers are simply stored as raw
|
|
381
|
+
// pointers.
|
|
382
|
+
#ifdef V8_COMPRESS_POINTERS
|
|
383
|
+
using CppHeapPointer_t = CppHeapPointerHandle;
|
|
384
|
+
#else
|
|
385
|
+
using CppHeapPointer_t = Address;
|
|
386
|
+
#endif
|
|
387
|
+
|
|
388
|
+
constexpr CppHeapPointer_t kNullCppHeapPointer = 0;
|
|
389
|
+
constexpr CppHeapPointerHandle kNullCppHeapPointerHandle = 0;
|
|
390
|
+
|
|
391
|
+
constexpr uint64_t kCppHeapPointerMarkBit = 1ULL;
|
|
392
|
+
constexpr uint64_t kCppHeapPointerTagShift = 1;
|
|
393
|
+
constexpr uint64_t kCppHeapPointerPayloadShift = 16;
|
|
394
|
+
|
|
395
|
+
#ifdef V8_COMPRESS_POINTERS
|
|
396
|
+
// CppHeapPointers use a dedicated pointer table. These constants control the
|
|
397
|
+
// size and layout of the table. See the corresponding constants for the
|
|
398
|
+
// external pointer table for further details.
|
|
399
|
+
constexpr size_t kCppHeapPointerTableReservationSize =
|
|
400
|
+
kExternalPointerTableReservationSize;
|
|
401
|
+
constexpr uint32_t kCppHeapPointerIndexShift = kExternalPointerIndexShift;
|
|
402
|
+
|
|
403
|
+
constexpr int kCppHeapPointerTableEntrySize = 8;
|
|
404
|
+
constexpr int kCppHeapPointerTableEntrySizeLog2 = 3;
|
|
405
|
+
constexpr size_t kMaxCppHeapPointers =
|
|
406
|
+
kCppHeapPointerTableReservationSize / kCppHeapPointerTableEntrySize;
|
|
407
|
+
static_assert((1 << (32 - kCppHeapPointerIndexShift)) == kMaxCppHeapPointers,
|
|
408
|
+
"kCppHeapPointerTableReservationSize and "
|
|
409
|
+
"kCppHeapPointerIndexShift don't match");
|
|
410
|
+
|
|
411
|
+
#else // !V8_COMPRESS_POINTERS
|
|
412
|
+
|
|
413
|
+
// Needed for the V8.SandboxedCppHeapPointersCount histogram.
|
|
414
|
+
constexpr size_t kMaxCppHeapPointers = 0;
|
|
415
|
+
|
|
416
|
+
#endif // V8_COMPRESS_POINTERS
|
|
417
|
+
|
|
418
|
+
// Generic tag range struct to represent ranges of type tags.
|
|
419
|
+
//
|
|
420
|
+
// When referencing external objects via pointer tables, type tags are
|
|
421
|
+
// frequently necessary to guarantee type safety for the external objects. When
|
|
422
|
+
// support for subtyping is necessary, range-based type checks are used in
|
|
423
|
+
// which all subtypes of a given supertype use contiguous tags. This struct can
|
|
424
|
+
// then be used to represent such a type range.
|
|
425
|
+
//
|
|
426
|
+
// In addition, there is an option for performance tweaks: if the size of the
|
|
427
|
+
// type range corresponding to a supertype is a power of two and starts at a
|
|
428
|
+
// power of two (e.g. [0x100, 0x13f]), then the compiler can often optimize
|
|
429
|
+
// the type check to use even fewer instructions (essentially replace a AND +
|
|
430
|
+
// SUB with a single AND).
|
|
431
|
+
//
|
|
432
|
+
template <typename Tag>
|
|
433
|
+
struct TagRange {
|
|
434
|
+
static_assert(std::is_enum_v<Tag> &&
|
|
435
|
+
std::is_same_v<std::underlying_type_t<Tag>, uint16_t>,
|
|
436
|
+
"Tag parameter must be an enum with base type uint16_t");
|
|
437
|
+
|
|
438
|
+
// Construct the inclusive tag range [first, last].
|
|
439
|
+
constexpr TagRange(Tag first, Tag last) : first(first), last(last) {}
|
|
440
|
+
|
|
441
|
+
// Construct a tag range consisting of a single tag.
|
|
442
|
+
//
|
|
443
|
+
// A single tag is always implicitly convertible to a tag range. This greatly
|
|
444
|
+
// increases readability as most of the time, the exact tag of a field is
|
|
445
|
+
// known and so no tag range needs to explicitly be created for it.
|
|
446
|
+
constexpr TagRange(Tag tag) // NOLINT(runtime/explicit)
|
|
447
|
+
: first(tag), last(tag) {}
|
|
448
|
+
|
|
449
|
+
// Construct an empty tag range.
|
|
450
|
+
constexpr TagRange() : TagRange(static_cast<Tag>(0)) {}
|
|
451
|
+
|
|
452
|
+
// A tag range is considered empty if it only contains the null tag.
|
|
453
|
+
constexpr bool IsEmpty() const { return first == 0 && last == 0; }
|
|
454
|
+
|
|
455
|
+
constexpr size_t Size() const {
|
|
456
|
+
if (IsEmpty()) {
|
|
457
|
+
return 0;
|
|
458
|
+
} else {
|
|
459
|
+
return last - first + 1;
|
|
460
|
+
}
|
|
461
|
+
}
|
|
462
|
+
|
|
463
|
+
constexpr bool Contains(Tag tag) const {
|
|
464
|
+
// Need to perform the math with uint32_t. Otherwise, the uint16_ts would
|
|
465
|
+
// be promoted to (signed) int, allowing the compiler to (wrongly) assume
|
|
466
|
+
// that an underflow cannot happen as that would be undefined behavior.
|
|
467
|
+
return static_cast<uint32_t>(tag) - first <=
|
|
468
|
+
static_cast<uint32_t>(last) - first;
|
|
469
|
+
}
|
|
470
|
+
|
|
471
|
+
constexpr bool Contains(TagRange tag_range) const {
|
|
472
|
+
return tag_range.first >= first && tag_range.last <= last;
|
|
473
|
+
}
|
|
474
|
+
|
|
475
|
+
constexpr bool operator==(const TagRange other) const {
|
|
476
|
+
return first == other.first && last == other.last;
|
|
477
|
+
}
|
|
478
|
+
|
|
479
|
+
constexpr size_t hash_value() const {
|
|
480
|
+
static_assert(std::is_same_v<std::underlying_type_t<Tag>, uint16_t>);
|
|
481
|
+
return (static_cast<size_t>(first) << 16) | last;
|
|
482
|
+
}
|
|
483
|
+
|
|
484
|
+
// Internally we represent tag ranges as half-open ranges [first, last).
|
|
485
|
+
const Tag first;
|
|
486
|
+
const Tag last;
|
|
487
|
+
};
|
|
488
|
+
|
|
297
489
|
//
|
|
298
490
|
// External Pointers.
|
|
299
491
|
//
|
|
@@ -301,41 +493,12 @@ constexpr ExternalPointerHandle kNullExternalPointerHandle = 0;
|
|
|
301
493
|
// pointer table and are referenced from HeapObjects through an index (a
|
|
302
494
|
// "handle"). When stored in the table, the pointers are tagged with per-type
|
|
303
495
|
// tags to prevent type confusion attacks between different external objects.
|
|
304
|
-
// Besides type information bits, these tags also contain the GC marking bit
|
|
305
|
-
// which indicates whether the pointer table entry is currently alive. When a
|
|
306
|
-
// pointer is written into the table, the tag is ORed into the top bits. When
|
|
307
|
-
// that pointer is later loaded from the table, it is ANDed with the inverse of
|
|
308
|
-
// the expected tag. If the expected and actual type differ, this will leave
|
|
309
|
-
// some of the top bits of the pointer set, rendering the pointer inaccessible.
|
|
310
|
-
// The AND operation also removes the GC marking bit from the pointer.
|
|
311
496
|
//
|
|
312
|
-
//
|
|
313
|
-
//
|
|
314
|
-
// that all
|
|
315
|
-
//
|
|
316
|
-
//
|
|
317
|
-
// the marking bit is automatically set when a pointer is written into the
|
|
318
|
-
// external pointer table (in which case it is clearly alive) and is cleared
|
|
319
|
-
// when the pointer is loaded. The exception to this is the free entry tag,
|
|
320
|
-
// which doesn't have the mark bit set, as the entry is not alive. This
|
|
321
|
-
// construction allows performing the type check and removing GC marking bits
|
|
322
|
-
// from the pointer in one efficient operation (bitwise AND). The number of
|
|
323
|
-
// available bits is limited in the following way: on x64, bits [47, 64) are
|
|
324
|
-
// generally available for tagging (userspace has 47 address bits available).
|
|
325
|
-
// On Arm64, userspace typically has a 40 or 48 bit address space. However, due
|
|
326
|
-
// to top-byte ignore (TBI) and memory tagging (MTE), the top byte is unusable
|
|
327
|
-
// for type checks as type-check failures would go unnoticed or collide with
|
|
328
|
-
// MTE bits. Some bits of the top byte can, however, still be used for the GC
|
|
329
|
-
// marking bit. The bits available for the type tags are therefore limited to
|
|
330
|
-
// [48, 56), i.e. (8 choose 4) = 70 different types.
|
|
331
|
-
// The following options exist to increase the number of possible types:
|
|
332
|
-
// - Using multiple ExternalPointerTables since tags can safely be reused
|
|
333
|
-
// across different tables
|
|
334
|
-
// - Using "extended" type checks, where additional type information is stored
|
|
335
|
-
// either in an adjacent pointer table entry or at the pointed-to location
|
|
336
|
-
// - Using a different tagging scheme, for example based on XOR which would
|
|
337
|
-
// allow for 2**8 different tags but require a separate operation to remove
|
|
338
|
-
// the marking bit
|
|
497
|
+
// When loading an external pointer, a range of allowed tags can be specified.
|
|
498
|
+
// This way, type hierarchies can be supported. The main requirement for that
|
|
499
|
+
// is that all (transitive) child classes of a given parent class have type ids
|
|
500
|
+
// in the same range, and that there are no unrelated types in that range. For
|
|
501
|
+
// more details about how to assign type tags to types, see the TagRange class.
|
|
339
502
|
//
|
|
340
503
|
// The external pointer sandboxing mechanism ensures that every access to an
|
|
341
504
|
// external pointer field will result in a valid pointer of the expected type
|
|
@@ -358,196 +521,285 @@ constexpr ExternalPointerHandle kNullExternalPointerHandle = 0;
|
|
|
358
521
|
// extension (MTE) which would use bits [56, 60).
|
|
359
522
|
//
|
|
360
523
|
// External pointer tables are also available even when the sandbox is off but
|
|
361
|
-
// pointer compression is on. In that case, the mechanism can be used to
|
|
524
|
+
// pointer compression is on. In that case, the mechanism can be used to ease
|
|
362
525
|
// alignment requirements as it turns unaligned 64-bit raw pointers into
|
|
363
526
|
// aligned 32-bit indices. To "opt-in" to the external pointer table mechanism
|
|
364
527
|
// for this purpose, instead of using the ExternalPointer accessors one needs to
|
|
365
528
|
// use ExternalPointerHandles directly and use them to access the pointers in an
|
|
366
529
|
// ExternalPointerTable.
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
//
|
|
375
|
-
//
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
//
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
|
|
401
|
-
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
|
|
432
|
-
|
|
433
|
-
|
|
434
|
-
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
|
|
445
|
-
|
|
446
|
-
|
|
447
|
-
|
|
448
|
-
|
|
449
|
-
|
|
450
|
-
|
|
451
|
-
|
|
452
|
-
|
|
530
|
+
//
|
|
531
|
+
// The tag is currently in practice limited to 15 bits since it needs to fit
|
|
532
|
+
// together with a marking bit into the unused parts of a pointer.
|
|
533
|
+
enum ExternalPointerTag : uint16_t {
|
|
534
|
+
kFirstExternalPointerTag = 0,
|
|
535
|
+
kExternalPointerNullTag = 0,
|
|
536
|
+
|
|
537
|
+
// When adding new tags, please ensure that the code using these tags is
|
|
538
|
+
// "substitution-safe", i.e. still operate safely if external pointers of the
|
|
539
|
+
// same type are swapped by an attacker. See comment above for more details.
|
|
540
|
+
|
|
541
|
+
// Shared external pointers are owned by the shared Isolate and stored in the
|
|
542
|
+
// shared external pointer table associated with that Isolate, where they can
|
|
543
|
+
// be accessed from multiple threads at the same time. The objects referenced
|
|
544
|
+
// in this way must therefore always be thread-safe.
|
|
545
|
+
kFirstSharedExternalPointerTag,
|
|
546
|
+
kWaiterQueueNodeTag = kFirstSharedExternalPointerTag,
|
|
547
|
+
kExternalStringResourceTag,
|
|
548
|
+
kExternalStringResourceDataTag,
|
|
549
|
+
kLastSharedExternalPointerTag = kExternalStringResourceDataTag,
|
|
550
|
+
|
|
551
|
+
// External pointers using these tags are kept in a per-Isolate external
|
|
552
|
+
// pointer table and can only be accessed when this Isolate is active.
|
|
553
|
+
kNativeContextMicrotaskQueueTag,
|
|
554
|
+
kEmbedderDataSlotPayloadTag,
|
|
555
|
+
// This tag essentially stands for a `void*` pointer in the V8 API, and it is
|
|
556
|
+
// the Embedder's responsibility to ensure type safety (against substitution)
|
|
557
|
+
// and lifetime validity of these objects.
|
|
558
|
+
kExternalObjectValueTag,
|
|
559
|
+
kFirstMaybeReadOnlyExternalPointerTag,
|
|
560
|
+
kFunctionTemplateInfoCallbackTag = kFirstMaybeReadOnlyExternalPointerTag,
|
|
561
|
+
kAccessorInfoGetterTag,
|
|
562
|
+
kAccessorInfoSetterTag,
|
|
563
|
+
kLastMaybeReadOnlyExternalPointerTag = kAccessorInfoSetterTag,
|
|
564
|
+
kWasmInternalFunctionCallTargetTag,
|
|
565
|
+
kWasmTypeInfoNativeTypeTag,
|
|
566
|
+
kWasmExportedFunctionDataSignatureTag,
|
|
567
|
+
kWasmStackMemoryTag,
|
|
568
|
+
kWasmIndirectFunctionTargetTag,
|
|
569
|
+
|
|
570
|
+
// Foreigns
|
|
571
|
+
kFirstForeignExternalPointerTag,
|
|
572
|
+
kGenericForeignTag = kFirstForeignExternalPointerTag,
|
|
573
|
+
kApiNamedPropertyQueryCallbackTag,
|
|
574
|
+
kApiNamedPropertyGetterCallbackTag,
|
|
575
|
+
kApiNamedPropertySetterCallbackTag,
|
|
576
|
+
kApiNamedPropertyDescriptorCallbackTag,
|
|
577
|
+
kApiNamedPropertyDefinerCallbackTag,
|
|
578
|
+
kApiNamedPropertyDeleterCallbackTag,
|
|
579
|
+
kApiIndexedPropertyQueryCallbackTag,
|
|
580
|
+
kApiIndexedPropertyGetterCallbackTag,
|
|
581
|
+
kApiIndexedPropertySetterCallbackTag,
|
|
582
|
+
kApiIndexedPropertyDescriptorCallbackTag,
|
|
583
|
+
kApiIndexedPropertyDefinerCallbackTag,
|
|
584
|
+
kApiIndexedPropertyDeleterCallbackTag,
|
|
585
|
+
kApiIndexedPropertyEnumeratorCallbackTag,
|
|
586
|
+
kApiAccessCheckCallbackTag,
|
|
587
|
+
kApiAbortScriptExecutionCallbackTag,
|
|
588
|
+
kSyntheticModuleTag,
|
|
589
|
+
kMicrotaskCallbackTag,
|
|
590
|
+
kMicrotaskCallbackDataTag,
|
|
591
|
+
kCFunctionTag,
|
|
592
|
+
kCFunctionInfoTag,
|
|
593
|
+
kMessageListenerTag,
|
|
594
|
+
kWaiterQueueForeignTag,
|
|
595
|
+
|
|
596
|
+
// Managed
|
|
597
|
+
kFirstManagedResourceTag,
|
|
598
|
+
kFirstManagedExternalPointerTag = kFirstManagedResourceTag,
|
|
599
|
+
kGenericManagedTag = kFirstManagedExternalPointerTag,
|
|
600
|
+
kWasmWasmStreamingTag,
|
|
601
|
+
kWasmFuncDataTag,
|
|
602
|
+
kWasmManagedDataTag,
|
|
603
|
+
kWasmNativeModuleTag,
|
|
604
|
+
kIcuBreakIteratorTag,
|
|
605
|
+
kIcuUnicodeStringTag,
|
|
606
|
+
kIcuListFormatterTag,
|
|
607
|
+
kIcuLocaleTag,
|
|
608
|
+
kIcuSimpleDateFormatTag,
|
|
609
|
+
kIcuDateIntervalFormatTag,
|
|
610
|
+
kIcuRelativeDateTimeFormatterTag,
|
|
611
|
+
kIcuLocalizedNumberFormatterTag,
|
|
612
|
+
kIcuPluralRulesTag,
|
|
613
|
+
kIcuCollatorTag,
|
|
614
|
+
kDisplayNamesInternalTag,
|
|
615
|
+
kD8WorkerTag,
|
|
616
|
+
kD8ModuleEmbedderDataTag,
|
|
617
|
+
kLastForeignExternalPointerTag = kD8ModuleEmbedderDataTag,
|
|
618
|
+
kLastManagedExternalPointerTag = kLastForeignExternalPointerTag,
|
|
619
|
+
// External resources whose lifetime is tied to their entry in the external
|
|
620
|
+
// pointer table but which are not referenced via a Managed
|
|
621
|
+
kArrayBufferExtensionTag,
|
|
622
|
+
kLastManagedResourceTag = kArrayBufferExtensionTag,
|
|
623
|
+
|
|
624
|
+
kExternalPointerZappedEntryTag = 0x7d,
|
|
625
|
+
kExternalPointerEvacuationEntryTag = 0x7e,
|
|
626
|
+
kExternalPointerFreeEntryTag = 0x7f,
|
|
627
|
+
// The tags are limited to 7 bits, so the last tag is 0x7f.
|
|
628
|
+
kLastExternalPointerTag = 0x7f,
|
|
453
629
|
};
|
|
454
630
|
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
|
|
631
|
+
using ExternalPointerTagRange = TagRange<ExternalPointerTag>;
|
|
632
|
+
|
|
633
|
+
constexpr ExternalPointerTagRange kAnyExternalPointerTagRange(
|
|
634
|
+
kFirstExternalPointerTag, kLastExternalPointerTag);
|
|
635
|
+
constexpr ExternalPointerTagRange kAnySharedExternalPointerTagRange(
|
|
636
|
+
kFirstSharedExternalPointerTag, kLastSharedExternalPointerTag);
|
|
637
|
+
constexpr ExternalPointerTagRange kAnyForeignExternalPointerTagRange(
|
|
638
|
+
kFirstForeignExternalPointerTag, kLastForeignExternalPointerTag);
|
|
639
|
+
constexpr ExternalPointerTagRange kAnyManagedExternalPointerTagRange(
|
|
640
|
+
kFirstManagedExternalPointerTag, kLastManagedExternalPointerTag);
|
|
641
|
+
constexpr ExternalPointerTagRange kAnyMaybeReadOnlyExternalPointerTagRange(
|
|
642
|
+
kFirstMaybeReadOnlyExternalPointerTag,
|
|
643
|
+
kLastMaybeReadOnlyExternalPointerTag);
|
|
644
|
+
constexpr ExternalPointerTagRange kAnyManagedResourceExternalPointerTag(
|
|
645
|
+
kFirstManagedResourceTag, kLastManagedResourceTag);
|
|
460
646
|
|
|
461
647
|
// True if the external pointer must be accessed from the shared isolate's
|
|
462
648
|
// external pointer table.
|
|
463
649
|
V8_INLINE static constexpr bool IsSharedExternalPointerType(
|
|
464
|
-
|
|
465
|
-
return
|
|
650
|
+
ExternalPointerTagRange tag_range) {
|
|
651
|
+
return kAnySharedExternalPointerTagRange.Contains(tag_range);
|
|
466
652
|
}
|
|
467
653
|
|
|
468
|
-
//
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
PER_ISOLATE_EXTERNAL_POINTER_TAGS(CHECK_NON_SHARED_EXTERNAL_POINTER_TAGS)
|
|
654
|
+
// True if the external pointer may live in a read-only object, in which case
|
|
655
|
+
// the table entry will be in the shared read-only segment of the external
|
|
656
|
+
// pointer table.
|
|
657
|
+
V8_INLINE static constexpr bool IsMaybeReadOnlyExternalPointerType(
|
|
658
|
+
ExternalPointerTagRange tag_range) {
|
|
659
|
+
return kAnyMaybeReadOnlyExternalPointerTagRange.Contains(tag_range);
|
|
660
|
+
}
|
|
476
661
|
|
|
477
|
-
|
|
478
|
-
|
|
662
|
+
// True if the external pointer references an external object whose lifetime is
|
|
663
|
+
// tied to the entry in the external pointer table.
|
|
664
|
+
// In this case, the entry in the ExternalPointerTable always points to an
|
|
665
|
+
// object derived from ExternalPointerTable::ManagedResource.
|
|
666
|
+
V8_INLINE static constexpr bool IsManagedExternalPointerType(
|
|
667
|
+
ExternalPointerTagRange tag_range) {
|
|
668
|
+
return kAnyManagedResourceExternalPointerTag.Contains(tag_range);
|
|
669
|
+
}
|
|
479
670
|
|
|
480
|
-
|
|
481
|
-
|
|
671
|
+
// When an external poiner field can contain the null external pointer handle,
|
|
672
|
+
// the type checking mechanism needs to also check for null.
|
|
673
|
+
// TODO(saelo): this is mostly a temporary workaround to introduce range-based
|
|
674
|
+
// type checks. In the future, we should either (a) change the type tagging
|
|
675
|
+
// scheme so that null always passes or (b) (more likely) introduce dedicated
|
|
676
|
+
// null entries for those tags that need them (similar to other well-known
|
|
677
|
+
// empty value constants such as the empty fixed array).
|
|
678
|
+
V8_INLINE static constexpr bool ExternalPointerCanBeEmpty(
|
|
679
|
+
ExternalPointerTagRange tag_range) {
|
|
680
|
+
return tag_range.Contains(kArrayBufferExtensionTag) ||
|
|
681
|
+
tag_range.Contains(kEmbedderDataSlotPayloadTag);
|
|
682
|
+
}
|
|
482
683
|
|
|
483
|
-
//
|
|
484
684
|
// Indirect Pointers.
|
|
485
685
|
//
|
|
486
686
|
// When the sandbox is enabled, indirect pointers are used to reference
|
|
487
|
-
// HeapObjects that live outside of the sandbox (but are still managed
|
|
488
|
-
//
|
|
489
|
-
// object A will contain a IndirectPointerHandle, i.e. a shifted
|
|
490
|
-
// which identifies an entry in a pointer table (
|
|
491
|
-
//
|
|
492
|
-
//
|
|
493
|
-
//
|
|
494
|
-
//
|
|
495
|
-
//
|
|
496
|
-
//
|
|
497
|
-
// pointers
|
|
498
|
-
// the pointer table entry would probably also contain the type of the target
|
|
499
|
-
// object (e.g. by XORing the instance type into the top bits of the pointer).
|
|
687
|
+
// HeapObjects that live outside of the sandbox (but are still managed by V8's
|
|
688
|
+
// garbage collector). When object A references an object B through an indirect
|
|
689
|
+
// pointer, object A will contain a IndirectPointerHandle, i.e. a shifted
|
|
690
|
+
// 32-bit index, which identifies an entry in a pointer table (either the
|
|
691
|
+
// trusted pointer table for TrustedObjects, or the code pointer table if it is
|
|
692
|
+
// a Code object). This table entry then contains the actual pointer to object
|
|
693
|
+
// B. Further, object B owns this pointer table entry, and it is responsible
|
|
694
|
+
// for updating the "self-pointer" in the entry when it is relocated in memory.
|
|
695
|
+
// This way, in contrast to "normal" pointers, indirect pointers never need to
|
|
696
|
+
// be tracked by the GC (i.e. there is no remembered set for them).
|
|
697
|
+
// These pointers do not exist when the sandbox is disabled.
|
|
500
698
|
|
|
501
699
|
// An IndirectPointerHandle represents a 32-bit index into a pointer table.
|
|
502
700
|
using IndirectPointerHandle = uint32_t;
|
|
503
701
|
|
|
504
|
-
// The indirect pointer handles are stores shifted to the left by this amount
|
|
505
|
-
// to guarantee that they are smaller than the maximum table size.
|
|
506
|
-
constexpr uint32_t kIndirectPointerHandleShift = 6;
|
|
507
|
-
|
|
508
702
|
// A null handle always references an entry that contains nullptr.
|
|
509
703
|
constexpr IndirectPointerHandle kNullIndirectPointerHandle = 0;
|
|
510
704
|
|
|
511
|
-
//
|
|
512
|
-
//
|
|
513
|
-
//
|
|
514
|
-
//
|
|
515
|
-
|
|
705
|
+
// When the sandbox is enabled, indirect pointers are used to implement:
|
|
706
|
+
// - TrustedPointers: an indirect pointer using the trusted pointer table (TPT)
|
|
707
|
+
// and referencing a TrustedObject in one of the trusted heap spaces.
|
|
708
|
+
// - CodePointers, an indirect pointer using the code pointer table (CPT) and
|
|
709
|
+
// referencing a Code object together with its instruction stream.
|
|
710
|
+
|
|
711
|
+
//
|
|
712
|
+
// Trusted Pointers.
|
|
713
|
+
//
|
|
714
|
+
// A pointer to a TrustedObject.
|
|
715
|
+
// When the sandbox is enabled, these are indirect pointers using the trusted
|
|
716
|
+
// pointer table (TPT). They are used to reference trusted objects (located in
|
|
717
|
+
// one of V8's trusted heap spaces, outside of the sandbox) from inside the
|
|
718
|
+
// sandbox in a memory-safe way. When the sandbox is disabled, these are
|
|
719
|
+
// regular tagged pointers.
|
|
720
|
+
using TrustedPointerHandle = IndirectPointerHandle;
|
|
721
|
+
|
|
722
|
+
// The size of the virtual memory reservation for the trusted pointer table.
|
|
723
|
+
// As with the external pointer table, a maximum table size in combination with
|
|
724
|
+
// shifted indices allows omitting bounds checks.
|
|
725
|
+
constexpr size_t kTrustedPointerTableReservationSize = 64 * MB;
|
|
726
|
+
|
|
727
|
+
// The trusted pointer handles are stored shifted to the left by this amount
|
|
728
|
+
// to guarantee that they are smaller than the maximum table size.
|
|
729
|
+
constexpr uint32_t kTrustedPointerHandleShift = 9;
|
|
730
|
+
|
|
731
|
+
// A null handle always references an entry that contains nullptr.
|
|
732
|
+
constexpr TrustedPointerHandle kNullTrustedPointerHandle =
|
|
733
|
+
kNullIndirectPointerHandle;
|
|
734
|
+
|
|
735
|
+
// The maximum number of entries in an trusted pointer table.
|
|
736
|
+
constexpr int kTrustedPointerTableEntrySize = 8;
|
|
737
|
+
constexpr int kTrustedPointerTableEntrySizeLog2 = 3;
|
|
738
|
+
constexpr size_t kMaxTrustedPointers =
|
|
739
|
+
kTrustedPointerTableReservationSize / kTrustedPointerTableEntrySize;
|
|
740
|
+
static_assert((1 << (32 - kTrustedPointerHandleShift)) == kMaxTrustedPointers,
|
|
741
|
+
"kTrustedPointerTableReservationSize and "
|
|
742
|
+
"kTrustedPointerHandleShift don't match");
|
|
516
743
|
|
|
517
744
|
//
|
|
518
745
|
// Code Pointers.
|
|
519
746
|
//
|
|
520
|
-
//
|
|
521
|
-
//
|
|
522
|
-
//
|
|
523
|
-
//
|
|
524
|
-
//
|
|
525
|
-
//
|
|
526
|
-
//
|
|
527
|
-
// object and to directly load
|
|
747
|
+
// A pointer to a Code object.
|
|
748
|
+
// Essentially a specialized version of a trusted pointer that (when the
|
|
749
|
+
// sandbox is enabled) uses the code pointer table (CPT) instead of the TPT.
|
|
750
|
+
// Each entry in the CPT contains both a pointer to a Code object as well as a
|
|
751
|
+
// pointer to the Code's entrypoint. This allows calling/jumping into Code with
|
|
752
|
+
// one fewer memory access (compared to the case where the entrypoint pointer
|
|
753
|
+
// first needs to be loaded from the Code object). As such, a CodePointerHandle
|
|
754
|
+
// can be used both to obtain the referenced Code object and to directly load
|
|
755
|
+
// its entrypoint.
|
|
756
|
+
//
|
|
757
|
+
// When the sandbox is disabled, these are regular tagged pointers.
|
|
528
758
|
using CodePointerHandle = IndirectPointerHandle;
|
|
529
|
-
constexpr uint32_t kCodePointerHandleShift = kIndirectPointerHandleShift;
|
|
530
|
-
constexpr CodePointerHandle kNullCodePointerHandle = 0;
|
|
531
759
|
|
|
532
|
-
// The size of the virtual memory reservation for code pointer table.
|
|
533
|
-
//
|
|
534
|
-
//
|
|
535
|
-
|
|
536
|
-
// value must be a power of two.
|
|
537
|
-
constexpr size_t kCodePointerTableReservationSize = 1 * GB;
|
|
760
|
+
// The size of the virtual memory reservation for the code pointer table.
|
|
761
|
+
// As with the other tables, a maximum table size in combination with shifted
|
|
762
|
+
// indices allows omitting bounds checks.
|
|
763
|
+
constexpr size_t kCodePointerTableReservationSize = 128 * MB;
|
|
538
764
|
|
|
539
|
-
//
|
|
765
|
+
// Code pointer handles are shifted by a different amount than indirect pointer
|
|
766
|
+
// handles as the tables have a different maximum size.
|
|
767
|
+
constexpr uint32_t kCodePointerHandleShift = 9;
|
|
768
|
+
|
|
769
|
+
// A null handle always references an entry that contains nullptr.
|
|
770
|
+
constexpr CodePointerHandle kNullCodePointerHandle = kNullIndirectPointerHandle;
|
|
771
|
+
|
|
772
|
+
// It can sometimes be necessary to distinguish a code pointer handle from a
|
|
773
|
+
// trusted pointer handle. A typical example would be a union trusted pointer
|
|
774
|
+
// field that can refer to both Code objects and other trusted objects. To
|
|
775
|
+
// support these use-cases, we use a simple marking scheme where some of the
|
|
776
|
+
// low bits of a code pointer handle are set, while they will be unset on a
|
|
777
|
+
// trusted pointer handle. This way, the correct table to resolve the handle
|
|
778
|
+
// can be determined even in the absence of a type tag.
|
|
779
|
+
constexpr uint32_t kCodePointerHandleMarker = 0x1;
|
|
780
|
+
static_assert(kCodePointerHandleShift > 0);
|
|
781
|
+
static_assert(kTrustedPointerHandleShift > 0);
|
|
782
|
+
|
|
783
|
+
// The maximum number of entries in a code pointer table.
|
|
540
784
|
constexpr int kCodePointerTableEntrySize = 16;
|
|
541
785
|
constexpr int kCodePointerTableEntrySizeLog2 = 4;
|
|
542
786
|
constexpr size_t kMaxCodePointers =
|
|
543
787
|
kCodePointerTableReservationSize / kCodePointerTableEntrySize;
|
|
544
788
|
static_assert(
|
|
545
|
-
(1 << (32 -
|
|
789
|
+
(1 << (32 - kCodePointerHandleShift)) == kMaxCodePointers,
|
|
546
790
|
"kCodePointerTableReservationSize and kCodePointerHandleShift don't match");
|
|
547
791
|
|
|
548
792
|
constexpr int kCodePointerTableEntryEntrypointOffset = 0;
|
|
549
793
|
constexpr int kCodePointerTableEntryCodeObjectOffset = 8;
|
|
550
794
|
|
|
795
|
+
// Constants that can be used to mark places that should be modified once
|
|
796
|
+
// certain types of objects are moved out of the sandbox and into trusted space.
|
|
797
|
+
constexpr bool kRuntimeGeneratedCodeObjectsLiveInTrustedSpace = true;
|
|
798
|
+
constexpr bool kBuiltinCodeObjectsLiveInTrustedSpace = false;
|
|
799
|
+
constexpr bool kAllCodeObjectsLiveInTrustedSpace =
|
|
800
|
+
kRuntimeGeneratedCodeObjectsLiveInTrustedSpace &&
|
|
801
|
+
kBuiltinCodeObjectsLiveInTrustedSpace;
|
|
802
|
+
|
|
551
803
|
// {obj} must be the raw tagged pointer representation of a HeapObject
|
|
552
804
|
// that's guaranteed to never be in ReadOnlySpace.
|
|
553
805
|
V8_EXPORT internal::Isolate* IsolateFromNeverReadOnlySpaceObject(Address obj);
|
|
@@ -579,6 +831,13 @@ class Internals {
|
|
|
579
831
|
|
|
580
832
|
static const int kOddballKindOffset = 4 * kApiTaggedSize + kApiDoubleSize;
|
|
581
833
|
static const int kJSObjectHeaderSize = 3 * kApiTaggedSize;
|
|
834
|
+
#ifdef V8_COMPRESS_POINTERS
|
|
835
|
+
static const int kJSAPIObjectWithEmbedderSlotsHeaderSize =
|
|
836
|
+
kJSObjectHeaderSize + kApiInt32Size;
|
|
837
|
+
#else // !V8_COMPRESS_POINTERS
|
|
838
|
+
static const int kJSAPIObjectWithEmbedderSlotsHeaderSize =
|
|
839
|
+
kJSObjectHeaderSize + kApiTaggedSize;
|
|
840
|
+
#endif // !V8_COMPRESS_POINTERS
|
|
582
841
|
static const int kFixedArrayHeaderSize = 2 * kApiTaggedSize;
|
|
583
842
|
static const int kEmbedderDataArrayHeaderSize = 2 * kApiTaggedSize;
|
|
584
843
|
static const int kEmbedderDataSlotSize = kApiSystemPointerSize;
|
|
@@ -595,6 +854,10 @@ class Internals {
|
|
|
595
854
|
|
|
596
855
|
static const uint32_t kNumIsolateDataSlots = 4;
|
|
597
856
|
static const int kStackGuardSize = 8 * kApiSystemPointerSize;
|
|
857
|
+
static const int kNumberOfBooleanFlags = 6;
|
|
858
|
+
static const int kErrorMessageParamSize = 1;
|
|
859
|
+
static const int kTablesAlignmentPaddingSize = 1;
|
|
860
|
+
static const int kRegExpStaticResultOffsetsVectorSize = kApiSystemPointerSize;
|
|
598
861
|
static const int kBuiltinTier0EntryTableSize = 7 * kApiSystemPointerSize;
|
|
599
862
|
static const int kBuiltinTier0TableSize = 7 * kApiSystemPointerSize;
|
|
600
863
|
static const int kLinearAllocationAreaSize = 3 * kApiSystemPointerSize;
|
|
@@ -602,9 +865,11 @@ class Internals {
|
|
|
602
865
|
static const int kHandleScopeDataSize =
|
|
603
866
|
2 * kApiSystemPointerSize + 2 * kApiInt32Size;
|
|
604
867
|
|
|
605
|
-
// ExternalPointerTable layout guarantees.
|
|
868
|
+
// ExternalPointerTable and TrustedPointerTable layout guarantees.
|
|
606
869
|
static const int kExternalPointerTableBasePointerOffset = 0;
|
|
607
870
|
static const int kExternalPointerTableSize = 2 * kApiSystemPointerSize;
|
|
871
|
+
static const int kTrustedPointerTableSize = 2 * kApiSystemPointerSize;
|
|
872
|
+
static const int kTrustedPointerTableBasePointerOffset = 0;
|
|
608
873
|
|
|
609
874
|
// IsolateData layout guarantees.
|
|
610
875
|
static const int kIsolateCageBaseOffset = 0;
|
|
@@ -612,20 +877,28 @@ class Internals {
|
|
|
612
877
|
kIsolateCageBaseOffset + kApiSystemPointerSize;
|
|
613
878
|
static const int kVariousBooleanFlagsOffset =
|
|
614
879
|
kIsolateStackGuardOffset + kStackGuardSize;
|
|
880
|
+
static const int kErrorMessageParamOffset =
|
|
881
|
+
kVariousBooleanFlagsOffset + kNumberOfBooleanFlags;
|
|
615
882
|
static const int kBuiltinTier0EntryTableOffset =
|
|
616
|
-
|
|
883
|
+
kErrorMessageParamOffset + kErrorMessageParamSize +
|
|
884
|
+
kTablesAlignmentPaddingSize + kRegExpStaticResultOffsetsVectorSize;
|
|
617
885
|
static const int kBuiltinTier0TableOffset =
|
|
618
886
|
kBuiltinTier0EntryTableOffset + kBuiltinTier0EntryTableSize;
|
|
619
887
|
static const int kNewAllocationInfoOffset =
|
|
620
888
|
kBuiltinTier0TableOffset + kBuiltinTier0TableSize;
|
|
621
889
|
static const int kOldAllocationInfoOffset =
|
|
622
890
|
kNewAllocationInfoOffset + kLinearAllocationAreaSize;
|
|
623
|
-
|
|
624
|
-
|
|
891
|
+
|
|
892
|
+
static const int kFastCCallAlignmentPaddingSize =
|
|
893
|
+
kApiSystemPointerSize == 8 ? 5 * kApiSystemPointerSize
|
|
894
|
+
: 1 * kApiSystemPointerSize;
|
|
625
895
|
static const int kIsolateFastCCallCallerPcOffset =
|
|
626
|
-
|
|
627
|
-
|
|
896
|
+
kOldAllocationInfoOffset + kLinearAllocationAreaSize +
|
|
897
|
+
kFastCCallAlignmentPaddingSize;
|
|
898
|
+
static const int kIsolateFastCCallCallerFpOffset =
|
|
628
899
|
kIsolateFastCCallCallerPcOffset + kApiSystemPointerSize;
|
|
900
|
+
static const int kIsolateFastApiCallTargetOffset =
|
|
901
|
+
kIsolateFastCCallCallerFpOffset + kApiSystemPointerSize;
|
|
629
902
|
static const int kIsolateLongTaskStatsCounterOffset =
|
|
630
903
|
kIsolateFastApiCallTargetOffset + kApiSystemPointerSize;
|
|
631
904
|
static const int kIsolateThreadLocalTopOffset =
|
|
@@ -639,34 +912,65 @@ class Internals {
|
|
|
639
912
|
kIsolateEmbedderDataOffset + kNumIsolateDataSlots * kApiSystemPointerSize;
|
|
640
913
|
static const int kIsolateSharedExternalPointerTableAddressOffset =
|
|
641
914
|
kIsolateExternalPointerTableOffset + kExternalPointerTableSize;
|
|
642
|
-
static const int
|
|
915
|
+
static const int kIsolateCppHeapPointerTableOffset =
|
|
643
916
|
kIsolateSharedExternalPointerTableAddressOffset + kApiSystemPointerSize;
|
|
917
|
+
#ifdef V8_ENABLE_SANDBOX
|
|
918
|
+
static const int kIsolateTrustedCageBaseOffset =
|
|
919
|
+
kIsolateCppHeapPointerTableOffset + kExternalPointerTableSize;
|
|
920
|
+
static const int kIsolateTrustedPointerTableOffset =
|
|
921
|
+
kIsolateTrustedCageBaseOffset + kApiSystemPointerSize;
|
|
922
|
+
static const int kIsolateSharedTrustedPointerTableAddressOffset =
|
|
923
|
+
kIsolateTrustedPointerTableOffset + kTrustedPointerTableSize;
|
|
924
|
+
static const int kIsolateTrustedPointerPublishingScopeOffset =
|
|
925
|
+
kIsolateSharedTrustedPointerTableAddressOffset + kApiSystemPointerSize;
|
|
926
|
+
static const int kIsolateCodePointerTableBaseAddressOffset =
|
|
927
|
+
kIsolateTrustedPointerPublishingScopeOffset + kApiSystemPointerSize;
|
|
928
|
+
static const int kIsolateApiCallbackThunkArgumentOffset =
|
|
929
|
+
kIsolateCodePointerTableBaseAddressOffset + kApiSystemPointerSize;
|
|
930
|
+
#else
|
|
931
|
+
static const int kIsolateApiCallbackThunkArgumentOffset =
|
|
932
|
+
kIsolateCppHeapPointerTableOffset + kExternalPointerTableSize;
|
|
933
|
+
#endif // V8_ENABLE_SANDBOX
|
|
644
934
|
#else
|
|
645
935
|
static const int kIsolateApiCallbackThunkArgumentOffset =
|
|
646
936
|
kIsolateEmbedderDataOffset + kNumIsolateDataSlots * kApiSystemPointerSize;
|
|
647
|
-
#endif
|
|
648
|
-
static const int
|
|
937
|
+
#endif // V8_COMPRESS_POINTERS
|
|
938
|
+
static const int kIsolateRegexpExecVectorArgumentOffset =
|
|
649
939
|
kIsolateApiCallbackThunkArgumentOffset + kApiSystemPointerSize;
|
|
940
|
+
static const int kContinuationPreservedEmbedderDataOffset =
|
|
941
|
+
kIsolateRegexpExecVectorArgumentOffset + kApiSystemPointerSize;
|
|
942
|
+
static const int kIsolateRootsOffset =
|
|
943
|
+
kContinuationPreservedEmbedderDataOffset + kApiSystemPointerSize;
|
|
944
|
+
|
|
945
|
+
// Assert scopes
|
|
946
|
+
static const int kDisallowGarbageCollectionAlign = alignof(uint32_t);
|
|
947
|
+
static const int kDisallowGarbageCollectionSize = sizeof(uint32_t);
|
|
650
948
|
|
|
651
949
|
#if V8_STATIC_ROOTS_BOOL
|
|
652
950
|
|
|
653
|
-
// These constants
|
|
951
|
+
// These constants are copied from static-roots.h and guarded by static asserts.
|
|
654
952
|
#define EXPORTED_STATIC_ROOTS_PTR_LIST(V) \
|
|
655
|
-
V(UndefinedValue)
|
|
656
|
-
V(NullValue)
|
|
657
|
-
V(TrueValue)
|
|
658
|
-
V(FalseValue)
|
|
659
|
-
V(EmptyString)
|
|
660
|
-
V(TheHoleValue)
|
|
953
|
+
V(UndefinedValue, 0x11) \
|
|
954
|
+
V(NullValue, 0x2d) \
|
|
955
|
+
V(TrueValue, 0x71) \
|
|
956
|
+
V(FalseValue, 0x55) \
|
|
957
|
+
V(EmptyString, 0x49) \
|
|
958
|
+
V(TheHoleValue, 0x761)
|
|
661
959
|
|
|
662
960
|
using Tagged_t = uint32_t;
|
|
663
961
|
struct StaticReadOnlyRoot {
|
|
664
|
-
#define DEF_ROOT(name)
|
|
962
|
+
#define DEF_ROOT(name, value) static constexpr Tagged_t k##name = value;
|
|
665
963
|
EXPORTED_STATIC_ROOTS_PTR_LIST(DEF_ROOT)
|
|
666
964
|
#undef DEF_ROOT
|
|
667
965
|
|
|
668
|
-
|
|
669
|
-
|
|
966
|
+
// Use 0 for kStringMapLowerBound since string maps are the first maps.
|
|
967
|
+
static constexpr Tagged_t kStringMapLowerBound = 0;
|
|
968
|
+
static constexpr Tagged_t kStringMapUpperBound = 0x425;
|
|
969
|
+
|
|
970
|
+
#define PLUSONE(...) +1
|
|
971
|
+
static constexpr size_t kNumberOfExportedStaticRoots =
|
|
972
|
+
2 + EXPORTED_STATIC_ROOTS_PTR_LIST(PLUSONE);
|
|
973
|
+
#undef PLUSONE
|
|
670
974
|
};
|
|
671
975
|
|
|
672
976
|
#endif // V8_STATIC_ROOTS_BOOL
|
|
@@ -683,8 +987,6 @@ class Internals {
|
|
|
683
987
|
static const int kNodeStateMask = 0x3;
|
|
684
988
|
static const int kNodeStateIsWeakValue = 2;
|
|
685
989
|
|
|
686
|
-
static const int kTracedNodeClassIdOffset = kApiSystemPointerSize;
|
|
687
|
-
|
|
688
990
|
static const int kFirstNonstringType = 0x80;
|
|
689
991
|
static const int kOddballType = 0x83;
|
|
690
992
|
static const int kForeignType = 0xcc;
|
|
@@ -692,19 +994,24 @@ class Internals {
|
|
|
692
994
|
static const int kJSObjectType = 0x421;
|
|
693
995
|
static const int kFirstJSApiObjectType = 0x422;
|
|
694
996
|
static const int kLastJSApiObjectType = 0x80A;
|
|
997
|
+
// Defines a range [kFirstEmbedderJSApiObjectType, kJSApiObjectTypesCount]
|
|
998
|
+
// of JSApiObject instance type values that an embedder can use.
|
|
999
|
+
static const int kFirstEmbedderJSApiObjectType = 0;
|
|
1000
|
+
static const int kLastEmbedderJSApiObjectType =
|
|
1001
|
+
kLastJSApiObjectType - kFirstJSApiObjectType;
|
|
695
1002
|
|
|
696
|
-
static const int kUndefinedOddballKind =
|
|
1003
|
+
static const int kUndefinedOddballKind = 4;
|
|
697
1004
|
static const int kNullOddballKind = 3;
|
|
698
1005
|
|
|
699
1006
|
// Constants used by PropertyCallbackInfo to check if we should throw when an
|
|
700
1007
|
// error occurs.
|
|
701
|
-
static const int
|
|
702
|
-
static const int
|
|
1008
|
+
static const int kDontThrow = 0;
|
|
1009
|
+
static const int kThrowOnError = 1;
|
|
703
1010
|
static const int kInferShouldThrowMode = 2;
|
|
704
1011
|
|
|
705
1012
|
// Soft limit for AdjustAmountofExternalAllocatedMemory. Trigger an
|
|
706
1013
|
// incremental GC once the external memory reaches this limit.
|
|
707
|
-
static constexpr
|
|
1014
|
+
static constexpr size_t kExternalAllocationSoftLimit = 64 * 1024 * 1024;
|
|
708
1015
|
|
|
709
1016
|
#ifdef V8_MAP_PACKING
|
|
710
1017
|
static const uintptr_t kMapWordMetadataMask = 0xffffULL << 48;
|
|
@@ -732,14 +1039,36 @@ class Internals {
|
|
|
732
1039
|
return PlatformSmiTagging::SmiToInt(value);
|
|
733
1040
|
}
|
|
734
1041
|
|
|
1042
|
+
V8_INLINE static constexpr Address AddressToSmi(Address value) {
|
|
1043
|
+
return (value << (kSmiTagSize + PlatformSmiTagging::kSmiShiftSize)) |
|
|
1044
|
+
kSmiTag;
|
|
1045
|
+
}
|
|
1046
|
+
|
|
735
1047
|
V8_INLINE static constexpr Address IntToSmi(int value) {
|
|
736
|
-
return
|
|
1048
|
+
return AddressToSmi(static_cast<Address>(value));
|
|
737
1049
|
}
|
|
738
1050
|
|
|
739
|
-
|
|
1051
|
+
template <typename T,
|
|
1052
|
+
typename std::enable_if_t<std::is_integral_v<T>>* = nullptr>
|
|
1053
|
+
V8_INLINE static constexpr Address IntegralToSmi(T value) {
|
|
1054
|
+
return AddressToSmi(static_cast<Address>(value));
|
|
1055
|
+
}
|
|
1056
|
+
|
|
1057
|
+
template <typename T,
|
|
1058
|
+
typename std::enable_if_t<std::is_integral_v<T>>* = nullptr>
|
|
1059
|
+
V8_INLINE static constexpr bool IsValidSmi(T value) {
|
|
740
1060
|
return PlatformSmiTagging::IsValidSmi(value);
|
|
741
1061
|
}
|
|
742
1062
|
|
|
1063
|
+
template <typename T,
|
|
1064
|
+
typename std::enable_if_t<std::is_integral_v<T>>* = nullptr>
|
|
1065
|
+
static constexpr std::optional<Address> TryIntegralToSmi(T value) {
|
|
1066
|
+
if (V8_LIKELY(PlatformSmiTagging::IsValidSmi(value))) {
|
|
1067
|
+
return {AddressToSmi(static_cast<Address>(value))};
|
|
1068
|
+
}
|
|
1069
|
+
return {};
|
|
1070
|
+
}
|
|
1071
|
+
|
|
743
1072
|
#if V8_STATIC_ROOTS_BOOL
|
|
744
1073
|
V8_INLINE static bool is_identical(Address obj, Tagged_t constant) {
|
|
745
1074
|
return static_cast<Tagged_t>(obj) == constant;
|
|
@@ -763,6 +1092,15 @@ class Internals {
|
|
|
763
1092
|
return ReadRawField<uint16_t>(map, kMapInstanceTypeOffset);
|
|
764
1093
|
}
|
|
765
1094
|
|
|
1095
|
+
V8_INLINE static Address LoadMap(Address obj) {
|
|
1096
|
+
if (!HasHeapObjectTag(obj)) return kNullAddress;
|
|
1097
|
+
Address map = ReadTaggedPointerField(obj, kHeapObjectMapOffset);
|
|
1098
|
+
#ifdef V8_MAP_PACKING
|
|
1099
|
+
map = UnpackMapWord(map);
|
|
1100
|
+
#endif
|
|
1101
|
+
return map;
|
|
1102
|
+
}
|
|
1103
|
+
|
|
766
1104
|
V8_INLINE static int GetOddballKind(Address obj) {
|
|
767
1105
|
return SmiValue(ReadTaggedSignedField(obj, kOddballKindOffset));
|
|
768
1106
|
}
|
|
@@ -836,15 +1174,15 @@ class Internals {
|
|
|
836
1174
|
Address base = *reinterpret_cast<Address*>(
|
|
837
1175
|
reinterpret_cast<uintptr_t>(isolate) + kIsolateCageBaseOffset);
|
|
838
1176
|
switch (index) {
|
|
839
|
-
#define DECOMPRESS_ROOT(name) \
|
|
840
|
-
case k##name##RootIndex:
|
|
1177
|
+
#define DECOMPRESS_ROOT(name, ...) \
|
|
1178
|
+
case k##name##RootIndex: \
|
|
841
1179
|
return base + StaticReadOnlyRoot::k##name;
|
|
842
1180
|
EXPORTED_STATIC_ROOTS_PTR_LIST(DECOMPRESS_ROOT)
|
|
843
1181
|
#undef DECOMPRESS_ROOT
|
|
1182
|
+
#undef EXPORTED_STATIC_ROOTS_PTR_LIST
|
|
844
1183
|
default:
|
|
845
1184
|
break;
|
|
846
1185
|
}
|
|
847
|
-
#undef EXPORTED_STATIC_ROOTS_PTR_LIST
|
|
848
1186
|
#endif // V8_STATIC_ROOTS_BOOL
|
|
849
1187
|
return *GetRootSlot(isolate, index);
|
|
850
1188
|
}
|
|
@@ -915,15 +1253,15 @@ class Internals {
|
|
|
915
1253
|
#endif
|
|
916
1254
|
}
|
|
917
1255
|
|
|
918
|
-
template <
|
|
1256
|
+
template <ExternalPointerTagRange tag_range>
|
|
919
1257
|
V8_INLINE static Address ReadExternalPointerField(v8::Isolate* isolate,
|
|
920
1258
|
Address heap_object_ptr,
|
|
921
1259
|
int offset) {
|
|
922
1260
|
#ifdef V8_ENABLE_SANDBOX
|
|
923
|
-
static_assert(
|
|
924
|
-
// See src/sandbox/external-pointer-table
|
|
1261
|
+
static_assert(!tag_range.IsEmpty());
|
|
1262
|
+
// See src/sandbox/external-pointer-table.h. Logic duplicated here so
|
|
925
1263
|
// it can be inlined and doesn't require an additional call.
|
|
926
|
-
Address* table = IsSharedExternalPointerType(
|
|
1264
|
+
Address* table = IsSharedExternalPointerType(tag_range)
|
|
927
1265
|
? GetSharedExternalPointerTableBase(isolate)
|
|
928
1266
|
: GetExternalPointerTableBase(isolate);
|
|
929
1267
|
internal::ExternalPointerHandle handle =
|
|
@@ -932,7 +1270,14 @@ class Internals {
|
|
|
932
1270
|
std::atomic<Address>* ptr =
|
|
933
1271
|
reinterpret_cast<std::atomic<Address>*>(&table[index]);
|
|
934
1272
|
Address entry = std::atomic_load_explicit(ptr, std::memory_order_relaxed);
|
|
935
|
-
|
|
1273
|
+
ExternalPointerTag actual_tag = static_cast<ExternalPointerTag>(
|
|
1274
|
+
(entry & kExternalPointerTagMask) >> kExternalPointerTagShift);
|
|
1275
|
+
if (V8_LIKELY(tag_range.Contains(actual_tag))) {
|
|
1276
|
+
return entry & kExternalPointerPayloadMask;
|
|
1277
|
+
} else {
|
|
1278
|
+
return 0;
|
|
1279
|
+
}
|
|
1280
|
+
return entry;
|
|
936
1281
|
#else
|
|
937
1282
|
return ReadRawField<Address>(heap_object_ptr, offset);
|
|
938
1283
|
#endif // V8_ENABLE_SANDBOX
|
|
@@ -943,6 +1288,10 @@ class Internals {
|
|
|
943
1288
|
return addr & -static_cast<intptr_t>(kPtrComprCageBaseAlignment);
|
|
944
1289
|
}
|
|
945
1290
|
|
|
1291
|
+
V8_INLINE static uint32_t CompressTagged(Address value) {
|
|
1292
|
+
return static_cast<uint32_t>(value);
|
|
1293
|
+
}
|
|
1294
|
+
|
|
946
1295
|
V8_INLINE static Address DecompressTaggedField(Address heap_object_ptr,
|
|
947
1296
|
uint32_t value) {
|
|
948
1297
|
Address base = GetPtrComprCageBaseFromOnHeapAddress(heap_object_ptr);
|
|
@@ -982,7 +1331,388 @@ class BackingStoreBase {};
|
|
|
982
1331
|
|
|
983
1332
|
// The maximum value in enum GarbageCollectionReason, defined in heap.h.
|
|
984
1333
|
// This is needed for histograms sampling garbage collection reasons.
|
|
985
|
-
constexpr int kGarbageCollectionReasonMaxValue =
|
|
1334
|
+
constexpr int kGarbageCollectionReasonMaxValue = 29;
|
|
1335
|
+
|
|
1336
|
+
// Base class for the address block allocator compatible with standard
|
|
1337
|
+
// containers, which registers its allocated range as strong roots.
|
|
1338
|
+
class V8_EXPORT StrongRootAllocatorBase {
|
|
1339
|
+
public:
|
|
1340
|
+
Heap* heap() const { return heap_; }
|
|
1341
|
+
|
|
1342
|
+
friend bool operator==(const StrongRootAllocatorBase& a,
|
|
1343
|
+
const StrongRootAllocatorBase& b) {
|
|
1344
|
+
// TODO(pkasting): Replace this body with `= default` after dropping support
|
|
1345
|
+
// for old gcc versions.
|
|
1346
|
+
return a.heap_ == b.heap_;
|
|
1347
|
+
}
|
|
1348
|
+
|
|
1349
|
+
protected:
|
|
1350
|
+
explicit StrongRootAllocatorBase(Heap* heap) : heap_(heap) {}
|
|
1351
|
+
explicit StrongRootAllocatorBase(LocalHeap* heap);
|
|
1352
|
+
explicit StrongRootAllocatorBase(Isolate* isolate);
|
|
1353
|
+
explicit StrongRootAllocatorBase(v8::Isolate* isolate);
|
|
1354
|
+
explicit StrongRootAllocatorBase(LocalIsolate* isolate);
|
|
1355
|
+
|
|
1356
|
+
// Allocate/deallocate a range of n elements of type internal::Address.
|
|
1357
|
+
Address* allocate_impl(size_t n);
|
|
1358
|
+
void deallocate_impl(Address* p, size_t n) noexcept;
|
|
1359
|
+
|
|
1360
|
+
private:
|
|
1361
|
+
Heap* heap_;
|
|
1362
|
+
};
|
|
1363
|
+
|
|
1364
|
+
// The general version of this template behaves just as std::allocator, with
|
|
1365
|
+
// the exception that the constructor takes the isolate as parameter. Only
|
|
1366
|
+
// specialized versions, e.g., internal::StrongRootAllocator<internal::Address>
|
|
1367
|
+
// and internal::StrongRootAllocator<v8::Local<T>> register the allocated range
|
|
1368
|
+
// as strong roots.
|
|
1369
|
+
template <typename T>
|
|
1370
|
+
class StrongRootAllocator : private std::allocator<T> {
|
|
1371
|
+
public:
|
|
1372
|
+
using value_type = T;
|
|
1373
|
+
|
|
1374
|
+
template <typename HeapOrIsolateT>
|
|
1375
|
+
explicit StrongRootAllocator(HeapOrIsolateT*) {}
|
|
1376
|
+
template <typename U>
|
|
1377
|
+
StrongRootAllocator(const StrongRootAllocator<U>& other) noexcept {}
|
|
1378
|
+
|
|
1379
|
+
using std::allocator<T>::allocate;
|
|
1380
|
+
using std::allocator<T>::deallocate;
|
|
1381
|
+
};
|
|
1382
|
+
|
|
1383
|
+
// TODO(pkasting): Replace with `requires` clauses after dropping support for
|
|
1384
|
+
// old gcc versions.
|
|
1385
|
+
template <typename Iterator, typename = void>
|
|
1386
|
+
inline constexpr bool kHaveIteratorConcept = false;
|
|
1387
|
+
template <typename Iterator>
|
|
1388
|
+
inline constexpr bool kHaveIteratorConcept<
|
|
1389
|
+
Iterator, std::void_t<typename Iterator::iterator_concept>> = true;
|
|
1390
|
+
|
|
1391
|
+
template <typename Iterator, typename = void>
|
|
1392
|
+
inline constexpr bool kHaveIteratorCategory = false;
|
|
1393
|
+
template <typename Iterator>
|
|
1394
|
+
inline constexpr bool kHaveIteratorCategory<
|
|
1395
|
+
Iterator, std::void_t<typename Iterator::iterator_category>> = true;
|
|
1396
|
+
|
|
1397
|
+
// Helper struct that contains an `iterator_concept` type alias only when either
|
|
1398
|
+
// `Iterator` or `std::iterator_traits<Iterator>` do.
|
|
1399
|
+
// Default: no alias.
|
|
1400
|
+
template <typename Iterator, typename = void>
|
|
1401
|
+
struct MaybeDefineIteratorConcept {};
|
|
1402
|
+
// Use `Iterator::iterator_concept` if available.
|
|
1403
|
+
template <typename Iterator>
|
|
1404
|
+
struct MaybeDefineIteratorConcept<
|
|
1405
|
+
Iterator, std::enable_if_t<kHaveIteratorConcept<Iterator>>> {
|
|
1406
|
+
using iterator_concept = typename Iterator::iterator_concept;
|
|
1407
|
+
};
|
|
1408
|
+
// Otherwise fall back to `std::iterator_traits<Iterator>` if possible.
|
|
1409
|
+
template <typename Iterator>
|
|
1410
|
+
struct MaybeDefineIteratorConcept<
|
|
1411
|
+
Iterator, std::enable_if_t<kHaveIteratorCategory<Iterator> &&
|
|
1412
|
+
!kHaveIteratorConcept<Iterator>>> {
|
|
1413
|
+
// There seems to be no feature-test macro covering this, so use the
|
|
1414
|
+
// presence of `<ranges>` as a crude proxy, since it was added to the
|
|
1415
|
+
// standard as part of the Ranges papers.
|
|
1416
|
+
// TODO(pkasting): Add this unconditionally after dropping support for old
|
|
1417
|
+
// libstdc++ versions.
|
|
1418
|
+
#if __has_include(<ranges>)
|
|
1419
|
+
using iterator_concept =
|
|
1420
|
+
typename std::iterator_traits<Iterator>::iterator_concept;
|
|
1421
|
+
#endif
|
|
1422
|
+
};
|
|
1423
|
+
|
|
1424
|
+
// A class of iterators that wrap some different iterator type.
|
|
1425
|
+
// If specified, ElementType is the type of element accessed by the wrapper
|
|
1426
|
+
// iterator; in this case, the actual reference and pointer types of Iterator
|
|
1427
|
+
// must be convertible to ElementType& and ElementType*, respectively.
|
|
1428
|
+
template <typename Iterator, typename ElementType = void>
|
|
1429
|
+
class WrappedIterator : public MaybeDefineIteratorConcept<Iterator> {
|
|
1430
|
+
public:
|
|
1431
|
+
static_assert(
|
|
1432
|
+
std::is_void_v<ElementType> ||
|
|
1433
|
+
(std::is_convertible_v<typename std::iterator_traits<Iterator>::pointer,
|
|
1434
|
+
std::add_pointer_t<ElementType>> &&
|
|
1435
|
+
std::is_convertible_v<typename std::iterator_traits<Iterator>::reference,
|
|
1436
|
+
std::add_lvalue_reference_t<ElementType>>));
|
|
1437
|
+
|
|
1438
|
+
using difference_type =
|
|
1439
|
+
typename std::iterator_traits<Iterator>::difference_type;
|
|
1440
|
+
using value_type =
|
|
1441
|
+
std::conditional_t<std::is_void_v<ElementType>,
|
|
1442
|
+
typename std::iterator_traits<Iterator>::value_type,
|
|
1443
|
+
ElementType>;
|
|
1444
|
+
using pointer =
|
|
1445
|
+
std::conditional_t<std::is_void_v<ElementType>,
|
|
1446
|
+
typename std::iterator_traits<Iterator>::pointer,
|
|
1447
|
+
std::add_pointer_t<ElementType>>;
|
|
1448
|
+
using reference =
|
|
1449
|
+
std::conditional_t<std::is_void_v<ElementType>,
|
|
1450
|
+
typename std::iterator_traits<Iterator>::reference,
|
|
1451
|
+
std::add_lvalue_reference_t<ElementType>>;
|
|
1452
|
+
using iterator_category =
|
|
1453
|
+
typename std::iterator_traits<Iterator>::iterator_category;
|
|
1454
|
+
|
|
1455
|
+
constexpr WrappedIterator() noexcept = default;
|
|
1456
|
+
constexpr explicit WrappedIterator(Iterator it) noexcept : it_(it) {}
|
|
1457
|
+
|
|
1458
|
+
// TODO(pkasting): Switch to `requires` and concepts after dropping support
|
|
1459
|
+
// for old gcc and libstdc++ versions.
|
|
1460
|
+
template <typename OtherIterator, typename OtherElementType,
|
|
1461
|
+
typename = std::enable_if_t<
|
|
1462
|
+
std::is_convertible_v<OtherIterator, Iterator>>>
|
|
1463
|
+
constexpr WrappedIterator(
|
|
1464
|
+
const WrappedIterator<OtherIterator, OtherElementType>& other) noexcept
|
|
1465
|
+
: it_(other.base()) {}
|
|
1466
|
+
|
|
1467
|
+
[[nodiscard]] constexpr reference operator*() const noexcept { return *it_; }
|
|
1468
|
+
[[nodiscard]] constexpr pointer operator->() const noexcept {
|
|
1469
|
+
if constexpr (std::is_pointer_v<Iterator>) {
|
|
1470
|
+
return it_;
|
|
1471
|
+
} else {
|
|
1472
|
+
return it_.operator->();
|
|
1473
|
+
}
|
|
1474
|
+
}
|
|
1475
|
+
|
|
1476
|
+
template <typename OtherIterator, typename OtherElementType>
|
|
1477
|
+
[[nodiscard]] constexpr bool operator==(
|
|
1478
|
+
const WrappedIterator<OtherIterator, OtherElementType>& other)
|
|
1479
|
+
const noexcept {
|
|
1480
|
+
return it_ == other.base();
|
|
1481
|
+
}
|
|
1482
|
+
#if V8_HAVE_SPACESHIP_OPERATOR
|
|
1483
|
+
template <typename OtherIterator, typename OtherElementType>
|
|
1484
|
+
[[nodiscard]] constexpr auto operator<=>(
|
|
1485
|
+
const WrappedIterator<OtherIterator, OtherElementType>& other)
|
|
1486
|
+
const noexcept {
|
|
1487
|
+
if constexpr (std::three_way_comparable_with<Iterator, OtherIterator>) {
|
|
1488
|
+
return it_ <=> other.base();
|
|
1489
|
+
} else if constexpr (std::totally_ordered_with<Iterator, OtherIterator>) {
|
|
1490
|
+
if (it_ < other.base()) {
|
|
1491
|
+
return std::strong_ordering::less;
|
|
1492
|
+
}
|
|
1493
|
+
return (it_ > other.base()) ? std::strong_ordering::greater
|
|
1494
|
+
: std::strong_ordering::equal;
|
|
1495
|
+
} else {
|
|
1496
|
+
if (it_ < other.base()) {
|
|
1497
|
+
return std::partial_ordering::less;
|
|
1498
|
+
}
|
|
1499
|
+
if (other.base() < it_) {
|
|
1500
|
+
return std::partial_ordering::greater;
|
|
1501
|
+
}
|
|
1502
|
+
return (it_ == other.base()) ? std::partial_ordering::equivalent
|
|
1503
|
+
: std::partial_ordering::unordered;
|
|
1504
|
+
}
|
|
1505
|
+
}
|
|
1506
|
+
#else
|
|
1507
|
+
// Assume that if spaceship isn't present, operator rewriting might not be
|
|
1508
|
+
// either.
|
|
1509
|
+
template <typename OtherIterator, typename OtherElementType>
|
|
1510
|
+
[[nodiscard]] constexpr bool operator!=(
|
|
1511
|
+
const WrappedIterator<OtherIterator, OtherElementType>& other)
|
|
1512
|
+
const noexcept {
|
|
1513
|
+
return it_ != other.base();
|
|
1514
|
+
}
|
|
1515
|
+
|
|
1516
|
+
template <typename OtherIterator, typename OtherElementType>
|
|
1517
|
+
[[nodiscard]] constexpr bool operator<(
|
|
1518
|
+
const WrappedIterator<OtherIterator, OtherElementType>& other)
|
|
1519
|
+
const noexcept {
|
|
1520
|
+
return it_ < other.base();
|
|
1521
|
+
}
|
|
1522
|
+
template <typename OtherIterator, typename OtherElementType>
|
|
1523
|
+
[[nodiscard]] constexpr bool operator<=(
|
|
1524
|
+
const WrappedIterator<OtherIterator, OtherElementType>& other)
|
|
1525
|
+
const noexcept {
|
|
1526
|
+
return it_ <= other.base();
|
|
1527
|
+
}
|
|
1528
|
+
template <typename OtherIterator, typename OtherElementType>
|
|
1529
|
+
[[nodiscard]] constexpr bool operator>(
|
|
1530
|
+
const WrappedIterator<OtherIterator, OtherElementType>& other)
|
|
1531
|
+
const noexcept {
|
|
1532
|
+
return it_ > other.base();
|
|
1533
|
+
}
|
|
1534
|
+
template <typename OtherIterator, typename OtherElementType>
|
|
1535
|
+
[[nodiscard]] constexpr bool operator>=(
|
|
1536
|
+
const WrappedIterator<OtherIterator, OtherElementType>& other)
|
|
1537
|
+
const noexcept {
|
|
1538
|
+
return it_ >= other.base();
|
|
1539
|
+
}
|
|
1540
|
+
#endif
|
|
1541
|
+
|
|
1542
|
+
constexpr WrappedIterator& operator++() noexcept {
|
|
1543
|
+
++it_;
|
|
1544
|
+
return *this;
|
|
1545
|
+
}
|
|
1546
|
+
constexpr WrappedIterator operator++(int) noexcept {
|
|
1547
|
+
WrappedIterator result(*this);
|
|
1548
|
+
++(*this);
|
|
1549
|
+
return result;
|
|
1550
|
+
}
|
|
1551
|
+
|
|
1552
|
+
constexpr WrappedIterator& operator--() noexcept {
|
|
1553
|
+
--it_;
|
|
1554
|
+
return *this;
|
|
1555
|
+
}
|
|
1556
|
+
constexpr WrappedIterator operator--(int) noexcept {
|
|
1557
|
+
WrappedIterator result(*this);
|
|
1558
|
+
--(*this);
|
|
1559
|
+
return result;
|
|
1560
|
+
}
|
|
1561
|
+
[[nodiscard]] constexpr WrappedIterator operator+(
|
|
1562
|
+
difference_type n) const noexcept {
|
|
1563
|
+
WrappedIterator result(*this);
|
|
1564
|
+
result += n;
|
|
1565
|
+
return result;
|
|
1566
|
+
}
|
|
1567
|
+
[[nodiscard]] friend constexpr WrappedIterator operator+(
|
|
1568
|
+
difference_type n, const WrappedIterator& x) noexcept {
|
|
1569
|
+
return x + n;
|
|
1570
|
+
}
|
|
1571
|
+
constexpr WrappedIterator& operator+=(difference_type n) noexcept {
|
|
1572
|
+
it_ += n;
|
|
1573
|
+
return *this;
|
|
1574
|
+
}
|
|
1575
|
+
[[nodiscard]] constexpr WrappedIterator operator-(
|
|
1576
|
+
difference_type n) const noexcept {
|
|
1577
|
+
return *this + -n;
|
|
1578
|
+
}
|
|
1579
|
+
constexpr WrappedIterator& operator-=(difference_type n) noexcept {
|
|
1580
|
+
return *this += -n;
|
|
1581
|
+
}
|
|
1582
|
+
template <typename OtherIterator, typename OtherElementType>
|
|
1583
|
+
[[nodiscard]] constexpr auto operator-(
|
|
1584
|
+
const WrappedIterator<OtherIterator, OtherElementType>& other)
|
|
1585
|
+
const noexcept {
|
|
1586
|
+
return it_ - other.base();
|
|
1587
|
+
}
|
|
1588
|
+
[[nodiscard]] constexpr reference operator[](
|
|
1589
|
+
difference_type n) const noexcept {
|
|
1590
|
+
return it_[n];
|
|
1591
|
+
}
|
|
1592
|
+
|
|
1593
|
+
[[nodiscard]] constexpr const Iterator& base() const noexcept { return it_; }
|
|
1594
|
+
|
|
1595
|
+
private:
|
|
1596
|
+
Iterator it_;
|
|
1597
|
+
};
|
|
1598
|
+
|
|
1599
|
+
// Helper functions about values contained in handles.
|
|
1600
|
+
// A value is either an indirect pointer or a direct pointer, depending on
|
|
1601
|
+
// whether direct local support is enabled.
|
|
1602
|
+
class ValueHelper final {
|
|
1603
|
+
public:
|
|
1604
|
+
// ValueHelper::InternalRepresentationType is an abstract type that
|
|
1605
|
+
// corresponds to the internal representation of v8::Local and essentially
|
|
1606
|
+
// to what T* really is (these two are always in sync). This type is used in
|
|
1607
|
+
// methods like GetDataFromSnapshotOnce that need access to a handle's
|
|
1608
|
+
// internal representation. In particular, if `x` is a `v8::Local<T>`, then
|
|
1609
|
+
// `v8::Local<T>::FromRepr(x.repr())` gives exactly the same handle as `x`.
|
|
1610
|
+
#ifdef V8_ENABLE_DIRECT_HANDLE
|
|
1611
|
+
static constexpr Address kTaggedNullAddress = 1;
|
|
1612
|
+
|
|
1613
|
+
using InternalRepresentationType = internal::Address;
|
|
1614
|
+
static constexpr InternalRepresentationType kEmpty = kTaggedNullAddress;
|
|
1615
|
+
#else
|
|
1616
|
+
using InternalRepresentationType = internal::Address*;
|
|
1617
|
+
static constexpr InternalRepresentationType kEmpty = nullptr;
|
|
1618
|
+
#endif // V8_ENABLE_DIRECT_HANDLE
|
|
1619
|
+
|
|
1620
|
+
template <typename T>
|
|
1621
|
+
V8_INLINE static bool IsEmpty(T* value) {
|
|
1622
|
+
return ValueAsRepr(value) == kEmpty;
|
|
1623
|
+
}
|
|
1624
|
+
|
|
1625
|
+
// Returns a handle's "value" for all kinds of abstract handles. For Local,
|
|
1626
|
+
// it is equivalent to `*handle`. The variadic parameters support handle
|
|
1627
|
+
// types with extra type parameters, like `Persistent<T, M>`.
|
|
1628
|
+
template <template <typename T, typename... Ms> typename H, typename T,
|
|
1629
|
+
typename... Ms>
|
|
1630
|
+
V8_INLINE static T* HandleAsValue(const H<T, Ms...>& handle) {
|
|
1631
|
+
return handle.template value<T>();
|
|
1632
|
+
}
|
|
1633
|
+
|
|
1634
|
+
#ifdef V8_ENABLE_DIRECT_HANDLE
|
|
1635
|
+
|
|
1636
|
+
template <typename T>
|
|
1637
|
+
V8_INLINE static Address ValueAsAddress(const T* value) {
|
|
1638
|
+
return reinterpret_cast<Address>(value);
|
|
1639
|
+
}
|
|
1640
|
+
|
|
1641
|
+
template <typename T, bool check_null = true, typename S>
|
|
1642
|
+
V8_INLINE static T* SlotAsValue(S* slot) {
|
|
1643
|
+
if (check_null && slot == nullptr) {
|
|
1644
|
+
return reinterpret_cast<T*>(kTaggedNullAddress);
|
|
1645
|
+
}
|
|
1646
|
+
return *reinterpret_cast<T**>(slot);
|
|
1647
|
+
}
|
|
1648
|
+
|
|
1649
|
+
template <typename T>
|
|
1650
|
+
V8_INLINE static InternalRepresentationType ValueAsRepr(const T* value) {
|
|
1651
|
+
return reinterpret_cast<InternalRepresentationType>(value);
|
|
1652
|
+
}
|
|
1653
|
+
|
|
1654
|
+
template <typename T>
|
|
1655
|
+
V8_INLINE static T* ReprAsValue(InternalRepresentationType repr) {
|
|
1656
|
+
return reinterpret_cast<T*>(repr);
|
|
1657
|
+
}
|
|
1658
|
+
|
|
1659
|
+
#else // !V8_ENABLE_DIRECT_HANDLE
|
|
1660
|
+
|
|
1661
|
+
template <typename T>
|
|
1662
|
+
V8_INLINE static Address ValueAsAddress(const T* value) {
|
|
1663
|
+
return *reinterpret_cast<const Address*>(value);
|
|
1664
|
+
}
|
|
1665
|
+
|
|
1666
|
+
template <typename T, bool check_null = true, typename S>
|
|
1667
|
+
V8_INLINE static T* SlotAsValue(S* slot) {
|
|
1668
|
+
return reinterpret_cast<T*>(slot);
|
|
1669
|
+
}
|
|
1670
|
+
|
|
1671
|
+
template <typename T>
|
|
1672
|
+
V8_INLINE static InternalRepresentationType ValueAsRepr(const T* value) {
|
|
1673
|
+
return const_cast<InternalRepresentationType>(
|
|
1674
|
+
reinterpret_cast<const Address*>(value));
|
|
1675
|
+
}
|
|
1676
|
+
|
|
1677
|
+
template <typename T>
|
|
1678
|
+
V8_INLINE static T* ReprAsValue(InternalRepresentationType repr) {
|
|
1679
|
+
return reinterpret_cast<T*>(repr);
|
|
1680
|
+
}
|
|
1681
|
+
|
|
1682
|
+
#endif // V8_ENABLE_DIRECT_HANDLE
|
|
1683
|
+
};
|
|
1684
|
+
|
|
1685
|
+
/**
|
|
1686
|
+
* Helper functions about handles.
|
|
1687
|
+
*/
|
|
1688
|
+
class HandleHelper final {
|
|
1689
|
+
public:
|
|
1690
|
+
/**
|
|
1691
|
+
* Checks whether two handles are equal.
|
|
1692
|
+
* They are equal iff they are both empty or they are both non-empty and the
|
|
1693
|
+
* objects to which they refer are physically equal.
|
|
1694
|
+
*
|
|
1695
|
+
* If both handles refer to JS objects, this is the same as strict equality.
|
|
1696
|
+
* For primitives, such as numbers or strings, a `false` return value does not
|
|
1697
|
+
* indicate that the values aren't equal in the JavaScript sense.
|
|
1698
|
+
* Use `Value::StrictEquals()` to check primitives for equality.
|
|
1699
|
+
*/
|
|
1700
|
+
template <typename T1, typename T2>
|
|
1701
|
+
V8_INLINE static bool EqualHandles(const T1& lhs, const T2& rhs) {
|
|
1702
|
+
if (lhs.IsEmpty()) return rhs.IsEmpty();
|
|
1703
|
+
if (rhs.IsEmpty()) return false;
|
|
1704
|
+
return lhs.ptr() == rhs.ptr();
|
|
1705
|
+
}
|
|
1706
|
+
};
|
|
1707
|
+
|
|
1708
|
+
V8_EXPORT void VerifyHandleIsNonEmpty(bool is_empty);
|
|
1709
|
+
|
|
1710
|
+
// These functions are here just to match friend declarations in
|
|
1711
|
+
// XxxCallbackInfo classes allowing these functions to access the internals
|
|
1712
|
+
// of the info objects. These functions are supposed to be called by debugger
|
|
1713
|
+
// macros.
|
|
1714
|
+
void PrintFunctionCallbackInfo(void* function_callback_info);
|
|
1715
|
+
void PrintPropertyCallbackInfo(void* property_callback_info);
|
|
986
1716
|
|
|
987
1717
|
} // namespace internal
|
|
988
1718
|
} // namespace v8
|