libv8-node 21.7.2.0-aarch64-linux → 22.5.1.0-aarch64-linux
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/lib/libv8/node/version.rb +3 -3
- data/vendor/v8/aarch64-linux/libv8/obj/libv8_monolith.a +0 -0
- data/vendor/v8/include/cppgc/internal/api-constants.h +1 -1
- data/vendor/v8/include/cppgc/type-traits.h +25 -4
- data/vendor/v8/include/v8-array-buffer.h +6 -0
- data/vendor/v8/include/v8-callbacks.h +6 -12
- data/vendor/v8/include/v8-container.h +54 -0
- data/vendor/v8/include/v8-context.h +51 -22
- data/vendor/v8/include/v8-embedder-heap.h +19 -3
- data/vendor/v8/include/v8-embedder-state-scope.h +2 -1
- data/vendor/v8/include/v8-exception.h +15 -9
- data/vendor/v8/include/v8-fast-api-calls.h +35 -26
- data/vendor/v8/include/v8-forward.h +1 -0
- data/vendor/v8/include/v8-function-callback.h +129 -20
- data/vendor/v8/include/v8-handle-base.h +32 -80
- data/vendor/v8/include/v8-inspector.h +16 -24
- data/vendor/v8/include/v8-internal.h +472 -65
- data/vendor/v8/include/v8-isolate.h +86 -51
- data/vendor/v8/include/v8-local-handle.h +257 -31
- data/vendor/v8/include/v8-memory-span.h +157 -2
- data/vendor/v8/include/v8-message.h +22 -3
- data/vendor/v8/include/v8-metrics.h +1 -0
- data/vendor/v8/include/v8-object.h +29 -10
- data/vendor/v8/include/v8-persistent-handle.h +5 -3
- data/vendor/v8/include/v8-platform.h +81 -44
- data/vendor/v8/include/v8-script.h +61 -11
- data/vendor/v8/include/v8-snapshot.h +94 -23
- data/vendor/v8/include/v8-statistics.h +10 -24
- data/vendor/v8/include/v8-template.h +410 -131
- data/vendor/v8/include/v8-traced-handle.h +81 -46
- data/vendor/v8/include/v8-typed-array.h +115 -7
- data/vendor/v8/include/v8-util.h +13 -12
- data/vendor/v8/include/v8-value.h +92 -4
- data/vendor/v8/include/v8-version.h +4 -4
- data/vendor/v8/include/v8config.h +35 -10
- metadata +2 -2
@@ -10,9 +10,11 @@
|
|
10
10
|
#include <string.h>
|
11
11
|
|
12
12
|
#include <atomic>
|
13
|
+
#include <iterator>
|
14
|
+
#include <memory>
|
13
15
|
#include <type_traits>
|
14
16
|
|
15
|
-
#include "v8config.h"
|
17
|
+
#include "v8config.h" // NOLINT(build/include_directory)
|
16
18
|
|
17
19
|
namespace v8 {
|
18
20
|
|
@@ -23,6 +25,7 @@ class Isolate;
|
|
23
25
|
|
24
26
|
namespace internal {
|
25
27
|
|
28
|
+
class Heap;
|
26
29
|
class Isolate;
|
27
30
|
|
28
31
|
typedef uintptr_t Address;
|
@@ -172,11 +175,15 @@ using SandboxedPointer_t = Address;
|
|
172
175
|
#ifdef V8_ENABLE_SANDBOX
|
173
176
|
|
174
177
|
// Size of the sandbox, excluding the guard regions surrounding it.
|
175
|
-
#
|
178
|
+
#if defined(V8_TARGET_OS_ANDROID)
|
176
179
|
// On Android, most 64-bit devices seem to be configured with only 39 bits of
|
177
180
|
// virtual address space for userspace. As such, limit the sandbox to 128GB (a
|
178
181
|
// quarter of the total available address space).
|
179
182
|
constexpr size_t kSandboxSizeLog2 = 37; // 128 GB
|
183
|
+
#elif defined(V8_TARGET_ARCH_LOONG64)
|
184
|
+
// Some Linux distros on LoongArch64 configured with only 40 bits of virtual
|
185
|
+
// address space for userspace. Limit the sandbox to 256GB here.
|
186
|
+
constexpr size_t kSandboxSizeLog2 = 38; // 256 GB
|
180
187
|
#else
|
181
188
|
// Everywhere else use a 1TB sandbox.
|
182
189
|
constexpr size_t kSandboxSizeLog2 = 40; // 1 TB
|
@@ -418,7 +425,7 @@ constexpr uint64_t kAllExternalPointerTypeTags[] = {
|
|
418
425
|
/* it is the Embedder's responsibility to ensure type safety (against */ \
|
419
426
|
/* substitution) and lifetime validity of these objects. */ \
|
420
427
|
V(kExternalObjectValueTag, TAG(13)) \
|
421
|
-
V(
|
428
|
+
V(kFunctionTemplateInfoCallbackTag, TAG(14)) \
|
422
429
|
V(kAccessorInfoGetterTag, TAG(15)) \
|
423
430
|
V(kAccessorInfoSetterTag, TAG(16)) \
|
424
431
|
V(kWasmInternalFunctionCallTargetTag, TAG(17)) \
|
@@ -465,6 +472,15 @@ V8_INLINE static constexpr bool IsSharedExternalPointerType(
|
|
465
472
|
return tag >= kFirstSharedTag && tag <= kLastSharedTag;
|
466
473
|
}
|
467
474
|
|
475
|
+
// True if the external pointer may live in a read-only object, in which case
|
476
|
+
// the table entry will be in the shared read-only segment of the external
|
477
|
+
// pointer table.
|
478
|
+
V8_INLINE static constexpr bool IsMaybeReadOnlyExternalPointerType(
|
479
|
+
ExternalPointerTag tag) {
|
480
|
+
return tag == kAccessorInfoGetterTag || tag == kAccessorInfoSetterTag ||
|
481
|
+
tag == kFunctionTemplateInfoCallbackTag;
|
482
|
+
}
|
483
|
+
|
468
484
|
// Sanity checks.
|
469
485
|
#define CHECK_SHARED_EXTERNAL_POINTER_TAGS(Tag, ...) \
|
470
486
|
static_assert(IsSharedExternalPointerType(Tag));
|
@@ -484,70 +500,122 @@ PER_ISOLATE_EXTERNAL_POINTER_TAGS(CHECK_NON_SHARED_EXTERNAL_POINTER_TAGS)
|
|
484
500
|
// Indirect Pointers.
|
485
501
|
//
|
486
502
|
// When the sandbox is enabled, indirect pointers are used to reference
|
487
|
-
// HeapObjects that live outside of the sandbox (but are still managed
|
488
|
-
//
|
489
|
-
// object A will contain a IndirectPointerHandle, i.e. a shifted
|
490
|
-
// which identifies an entry in a pointer table (
|
491
|
-
//
|
492
|
-
//
|
493
|
-
//
|
494
|
-
//
|
495
|
-
//
|
496
|
-
//
|
497
|
-
// pointers
|
498
|
-
// the pointer table entry would probably also contain the type of the target
|
499
|
-
// object (e.g. by XORing the instance type into the top bits of the pointer).
|
503
|
+
// HeapObjects that live outside of the sandbox (but are still managed by V8's
|
504
|
+
// garbage collector). When object A references an object B through an indirect
|
505
|
+
// pointer, object A will contain a IndirectPointerHandle, i.e. a shifted
|
506
|
+
// 32-bit index, which identifies an entry in a pointer table (either the
|
507
|
+
// trusted pointer table for TrustedObjects, or the code pointer table if it is
|
508
|
+
// a Code object). This table entry then contains the actual pointer to object
|
509
|
+
// B. Further, object B owns this pointer table entry, and it is responsible
|
510
|
+
// for updating the "self-pointer" in the entry when it is relocated in memory.
|
511
|
+
// This way, in contrast to "normal" pointers, indirect pointers never need to
|
512
|
+
// be tracked by the GC (i.e. there is no remembered set for them).
|
513
|
+
// These pointers do not exist when the sandbox is disabled.
|
500
514
|
|
501
515
|
// An IndirectPointerHandle represents a 32-bit index into a pointer table.
|
502
516
|
using IndirectPointerHandle = uint32_t;
|
503
517
|
|
504
|
-
// The indirect pointer handles are stores shifted to the left by this amount
|
505
|
-
// to guarantee that they are smaller than the maximum table size.
|
506
|
-
constexpr uint32_t kIndirectPointerHandleShift = 6;
|
507
|
-
|
508
518
|
// A null handle always references an entry that contains nullptr.
|
509
519
|
constexpr IndirectPointerHandle kNullIndirectPointerHandle = 0;
|
510
520
|
|
511
|
-
//
|
512
|
-
//
|
513
|
-
//
|
514
|
-
//
|
515
|
-
|
521
|
+
// When the sandbox is enabled, indirect pointers are used to implement:
|
522
|
+
// - TrustedPointers: an indirect pointer using the trusted pointer table (TPT)
|
523
|
+
// and referencing a TrustedObject in one of the trusted heap spaces.
|
524
|
+
// - CodePointers, an indirect pointer using the code pointer table (CPT) and
|
525
|
+
// referencing a Code object together with its instruction stream.
|
526
|
+
|
527
|
+
//
|
528
|
+
// Trusted Pointers.
|
529
|
+
//
|
530
|
+
// A pointer to a TrustedObject.
|
531
|
+
// When the sandbox is enabled, these are indirect pointers using the trusted
|
532
|
+
// pointer table (TPT). They are used to reference trusted objects (located in
|
533
|
+
// one of V8's trusted heap spaces, outside of the sandbox) from inside the
|
534
|
+
// sandbox in a memory-safe way. When the sandbox is disabled, these are
|
535
|
+
// regular tagged pointers.
|
536
|
+
using TrustedPointerHandle = IndirectPointerHandle;
|
537
|
+
|
538
|
+
// The size of the virtual memory reservation for the trusted pointer table.
|
539
|
+
// As with the external pointer table, a maximum table size in combination with
|
540
|
+
// shifted indices allows omitting bounds checks.
|
541
|
+
constexpr size_t kTrustedPointerTableReservationSize = 64 * MB;
|
542
|
+
|
543
|
+
// The trusted pointer handles are stores shifted to the left by this amount
|
544
|
+
// to guarantee that they are smaller than the maximum table size.
|
545
|
+
constexpr uint32_t kTrustedPointerHandleShift = 9;
|
546
|
+
|
547
|
+
// A null handle always references an entry that contains nullptr.
|
548
|
+
constexpr TrustedPointerHandle kNullTrustedPointerHandle =
|
549
|
+
kNullIndirectPointerHandle;
|
550
|
+
|
551
|
+
// The maximum number of entries in an trusted pointer table.
|
552
|
+
constexpr int kTrustedPointerTableEntrySize = 8;
|
553
|
+
constexpr int kTrustedPointerTableEntrySizeLog2 = 3;
|
554
|
+
constexpr size_t kMaxTrustedPointers =
|
555
|
+
kTrustedPointerTableReservationSize / kTrustedPointerTableEntrySize;
|
556
|
+
static_assert((1 << (32 - kTrustedPointerHandleShift)) == kMaxTrustedPointers,
|
557
|
+
"kTrustedPointerTableReservationSize and "
|
558
|
+
"kTrustedPointerHandleShift don't match");
|
516
559
|
|
517
560
|
//
|
518
561
|
// Code Pointers.
|
519
562
|
//
|
520
|
-
//
|
521
|
-
//
|
522
|
-
//
|
523
|
-
//
|
524
|
-
//
|
525
|
-
//
|
526
|
-
//
|
527
|
-
// object and to directly load
|
563
|
+
// A pointer to a Code object.
|
564
|
+
// Essentially a specialized version of a trusted pointer that (when the
|
565
|
+
// sandbox is enabled) uses the code pointer table (CPT) instead of the TPT.
|
566
|
+
// Each entry in the CPT contains both a pointer to a Code object as well as a
|
567
|
+
// pointer to the Code's entrypoint. This allows calling/jumping into Code with
|
568
|
+
// one fewer memory access (compared to the case where the entrypoint pointer
|
569
|
+
// first needs to be loaded from the Code object). As such, a CodePointerHandle
|
570
|
+
// can be used both to obtain the referenced Code object and to directly load
|
571
|
+
// its entrypoint.
|
572
|
+
//
|
573
|
+
// When the sandbox is disabled, these are regular tagged pointers.
|
528
574
|
using CodePointerHandle = IndirectPointerHandle;
|
529
|
-
constexpr uint32_t kCodePointerHandleShift = kIndirectPointerHandleShift;
|
530
|
-
constexpr CodePointerHandle kNullCodePointerHandle = 0;
|
531
575
|
|
532
|
-
// The size of the virtual memory reservation for code pointer table.
|
533
|
-
//
|
534
|
-
//
|
535
|
-
|
536
|
-
// value must be a power of two.
|
537
|
-
constexpr size_t kCodePointerTableReservationSize = 1 * GB;
|
576
|
+
// The size of the virtual memory reservation for the code pointer table.
|
577
|
+
// As with the other tables, a maximum table size in combination with shifted
|
578
|
+
// indices allows omitting bounds checks.
|
579
|
+
constexpr size_t kCodePointerTableReservationSize = 16 * MB;
|
538
580
|
|
539
|
-
//
|
581
|
+
// Code pointer handles are shifted by a different amount than indirect pointer
|
582
|
+
// handles as the tables have a different maximum size.
|
583
|
+
constexpr uint32_t kCodePointerHandleShift = 12;
|
584
|
+
|
585
|
+
// A null handle always references an entry that contains nullptr.
|
586
|
+
constexpr CodePointerHandle kNullCodePointerHandle = kNullIndirectPointerHandle;
|
587
|
+
|
588
|
+
// It can sometimes be necessary to distinguish a code pointer handle from a
|
589
|
+
// trusted pointer handle. A typical example would be a union trusted pointer
|
590
|
+
// field that can refer to both Code objects and other trusted objects. To
|
591
|
+
// support these use-cases, we use a simple marking scheme where some of the
|
592
|
+
// low bits of a code pointer handle are set, while they will be unset on a
|
593
|
+
// trusted pointer handle. This way, the correct table to resolve the handle
|
594
|
+
// can be determined even in the absence of a type tag.
|
595
|
+
constexpr uint32_t kCodePointerHandleMarker = 0x1;
|
596
|
+
static_assert(kCodePointerHandleShift > 0);
|
597
|
+
static_assert(kTrustedPointerHandleShift > 0);
|
598
|
+
|
599
|
+
// The maximum number of entries in a code pointer table.
|
540
600
|
constexpr int kCodePointerTableEntrySize = 16;
|
541
601
|
constexpr int kCodePointerTableEntrySizeLog2 = 4;
|
542
602
|
constexpr size_t kMaxCodePointers =
|
543
603
|
kCodePointerTableReservationSize / kCodePointerTableEntrySize;
|
544
604
|
static_assert(
|
545
|
-
(1 << (32 -
|
605
|
+
(1 << (32 - kCodePointerHandleShift)) == kMaxCodePointers,
|
546
606
|
"kCodePointerTableReservationSize and kCodePointerHandleShift don't match");
|
547
607
|
|
548
608
|
constexpr int kCodePointerTableEntryEntrypointOffset = 0;
|
549
609
|
constexpr int kCodePointerTableEntryCodeObjectOffset = 8;
|
550
610
|
|
611
|
+
// Constants that can be used to mark places that should be modified once
|
612
|
+
// certain types of objects are moved out of the sandbox and into trusted space.
|
613
|
+
constexpr bool kRuntimeGeneratedCodeObjectsLiveInTrustedSpace = true;
|
614
|
+
constexpr bool kBuiltinCodeObjectsLiveInTrustedSpace = false;
|
615
|
+
constexpr bool kAllCodeObjectsLiveInTrustedSpace =
|
616
|
+
kRuntimeGeneratedCodeObjectsLiveInTrustedSpace &&
|
617
|
+
kBuiltinCodeObjectsLiveInTrustedSpace;
|
618
|
+
|
551
619
|
// {obj} must be the raw tagged pointer representation of a HeapObject
|
552
620
|
// that's guaranteed to never be in ReadOnlySpace.
|
553
621
|
V8_EXPORT internal::Isolate* IsolateFromNeverReadOnlySpaceObject(Address obj);
|
@@ -595,6 +663,9 @@ class Internals {
|
|
595
663
|
|
596
664
|
static const uint32_t kNumIsolateDataSlots = 4;
|
597
665
|
static const int kStackGuardSize = 8 * kApiSystemPointerSize;
|
666
|
+
static const int kNumberOfBooleanFlags = 6;
|
667
|
+
static const int kErrorMessageParamSize = 1;
|
668
|
+
static const int kTablesAlignmentPaddingSize = 1;
|
598
669
|
static const int kBuiltinTier0EntryTableSize = 7 * kApiSystemPointerSize;
|
599
670
|
static const int kBuiltinTier0TableSize = 7 * kApiSystemPointerSize;
|
600
671
|
static const int kLinearAllocationAreaSize = 3 * kApiSystemPointerSize;
|
@@ -602,9 +673,11 @@ class Internals {
|
|
602
673
|
static const int kHandleScopeDataSize =
|
603
674
|
2 * kApiSystemPointerSize + 2 * kApiInt32Size;
|
604
675
|
|
605
|
-
// ExternalPointerTable layout guarantees.
|
676
|
+
// ExternalPointerTable and TrustedPointerTable layout guarantees.
|
606
677
|
static const int kExternalPointerTableBasePointerOffset = 0;
|
607
678
|
static const int kExternalPointerTableSize = 2 * kApiSystemPointerSize;
|
679
|
+
static const int kTrustedPointerTableSize = 2 * kApiSystemPointerSize;
|
680
|
+
static const int kTrustedPointerTableBasePointerOffset = 0;
|
608
681
|
|
609
682
|
// IsolateData layout guarantees.
|
610
683
|
static const int kIsolateCageBaseOffset = 0;
|
@@ -612,16 +685,23 @@ class Internals {
|
|
612
685
|
kIsolateCageBaseOffset + kApiSystemPointerSize;
|
613
686
|
static const int kVariousBooleanFlagsOffset =
|
614
687
|
kIsolateStackGuardOffset + kStackGuardSize;
|
615
|
-
static const int
|
616
|
-
kVariousBooleanFlagsOffset +
|
688
|
+
static const int kErrorMessageParamOffset =
|
689
|
+
kVariousBooleanFlagsOffset + kNumberOfBooleanFlags;
|
690
|
+
static const int kBuiltinTier0EntryTableOffset = kErrorMessageParamOffset +
|
691
|
+
kErrorMessageParamSize +
|
692
|
+
kTablesAlignmentPaddingSize;
|
617
693
|
static const int kBuiltinTier0TableOffset =
|
618
694
|
kBuiltinTier0EntryTableOffset + kBuiltinTier0EntryTableSize;
|
619
695
|
static const int kNewAllocationInfoOffset =
|
620
696
|
kBuiltinTier0TableOffset + kBuiltinTier0TableSize;
|
621
697
|
static const int kOldAllocationInfoOffset =
|
622
698
|
kNewAllocationInfoOffset + kLinearAllocationAreaSize;
|
699
|
+
|
700
|
+
static const int kFastCCallAlignmentPaddingSize =
|
701
|
+
kApiSystemPointerSize == 8 ? 0 : kApiSystemPointerSize;
|
623
702
|
static const int kIsolateFastCCallCallerFpOffset =
|
624
|
-
kOldAllocationInfoOffset + kLinearAllocationAreaSize
|
703
|
+
kOldAllocationInfoOffset + kLinearAllocationAreaSize +
|
704
|
+
kFastCCallAlignmentPaddingSize;
|
625
705
|
static const int kIsolateFastCCallCallerPcOffset =
|
626
706
|
kIsolateFastCCallCallerFpOffset + kApiSystemPointerSize;
|
627
707
|
static const int kIsolateFastApiCallTargetOffset =
|
@@ -639,34 +719,55 @@ class Internals {
|
|
639
719
|
kIsolateEmbedderDataOffset + kNumIsolateDataSlots * kApiSystemPointerSize;
|
640
720
|
static const int kIsolateSharedExternalPointerTableAddressOffset =
|
641
721
|
kIsolateExternalPointerTableOffset + kExternalPointerTableSize;
|
722
|
+
#ifdef V8_ENABLE_SANDBOX
|
723
|
+
static const int kIsolateTrustedCageBaseOffset =
|
724
|
+
kIsolateSharedExternalPointerTableAddressOffset + kApiSystemPointerSize;
|
725
|
+
static const int kIsolateTrustedPointerTableOffset =
|
726
|
+
kIsolateTrustedCageBaseOffset + kApiSystemPointerSize;
|
727
|
+
static const int kIsolateApiCallbackThunkArgumentOffset =
|
728
|
+
kIsolateTrustedPointerTableOffset + kTrustedPointerTableSize;
|
729
|
+
#else
|
642
730
|
static const int kIsolateApiCallbackThunkArgumentOffset =
|
643
731
|
kIsolateSharedExternalPointerTableAddressOffset + kApiSystemPointerSize;
|
732
|
+
#endif // V8_ENABLE_SANDBOX
|
644
733
|
#else
|
645
734
|
static const int kIsolateApiCallbackThunkArgumentOffset =
|
646
735
|
kIsolateEmbedderDataOffset + kNumIsolateDataSlots * kApiSystemPointerSize;
|
647
|
-
#endif
|
648
|
-
static const int
|
736
|
+
#endif // V8_COMPRESS_POINTERS
|
737
|
+
static const int kContinuationPreservedEmbedderDataOffset =
|
649
738
|
kIsolateApiCallbackThunkArgumentOffset + kApiSystemPointerSize;
|
650
739
|
|
740
|
+
static const int kWasm64OOBOffsetAlignmentPaddingSize = 0;
|
741
|
+
static const int kWasm64OOBOffsetOffset =
|
742
|
+
kContinuationPreservedEmbedderDataOffset + kApiSystemPointerSize +
|
743
|
+
kWasm64OOBOffsetAlignmentPaddingSize;
|
744
|
+
static const int kIsolateRootsOffset =
|
745
|
+
kWasm64OOBOffsetOffset + sizeof(int64_t);
|
746
|
+
|
651
747
|
#if V8_STATIC_ROOTS_BOOL
|
652
748
|
|
653
|
-
// These constants
|
749
|
+
// These constants are copied from static-roots.h and guarded by static asserts.
|
654
750
|
#define EXPORTED_STATIC_ROOTS_PTR_LIST(V) \
|
655
|
-
V(UndefinedValue)
|
656
|
-
V(NullValue)
|
657
|
-
V(TrueValue)
|
658
|
-
V(FalseValue)
|
659
|
-
V(EmptyString)
|
660
|
-
V(TheHoleValue)
|
751
|
+
V(UndefinedValue, 0x69) \
|
752
|
+
V(NullValue, 0x85) \
|
753
|
+
V(TrueValue, 0xc9) \
|
754
|
+
V(FalseValue, 0xad) \
|
755
|
+
V(EmptyString, 0xa1) \
|
756
|
+
V(TheHoleValue, 0x719)
|
661
757
|
|
662
758
|
using Tagged_t = uint32_t;
|
663
759
|
struct StaticReadOnlyRoot {
|
664
|
-
#define DEF_ROOT(name)
|
760
|
+
#define DEF_ROOT(name, value) static constexpr Tagged_t k##name = value;
|
665
761
|
EXPORTED_STATIC_ROOTS_PTR_LIST(DEF_ROOT)
|
666
762
|
#undef DEF_ROOT
|
667
763
|
|
668
|
-
|
669
|
-
|
764
|
+
static constexpr Tagged_t kFirstStringMap = 0xe5;
|
765
|
+
static constexpr Tagged_t kLastStringMap = 0x47d;
|
766
|
+
|
767
|
+
#define PLUSONE(...) +1
|
768
|
+
static constexpr size_t kNumberOfExportedStaticRoots =
|
769
|
+
2 + EXPORTED_STATIC_ROOTS_PTR_LIST(PLUSONE);
|
770
|
+
#undef PLUSONE
|
670
771
|
};
|
671
772
|
|
672
773
|
#endif // V8_STATIC_ROOTS_BOOL
|
@@ -683,8 +784,6 @@ class Internals {
|
|
683
784
|
static const int kNodeStateMask = 0x3;
|
684
785
|
static const int kNodeStateIsWeakValue = 2;
|
685
786
|
|
686
|
-
static const int kTracedNodeClassIdOffset = kApiSystemPointerSize;
|
687
|
-
|
688
787
|
static const int kFirstNonstringType = 0x80;
|
689
788
|
static const int kOddballType = 0x83;
|
690
789
|
static const int kForeignType = 0xcc;
|
@@ -692,8 +791,13 @@ class Internals {
|
|
692
791
|
static const int kJSObjectType = 0x421;
|
693
792
|
static const int kFirstJSApiObjectType = 0x422;
|
694
793
|
static const int kLastJSApiObjectType = 0x80A;
|
794
|
+
// Defines a range [kFirstEmbedderJSApiObjectType, kJSApiObjectTypesCount]
|
795
|
+
// of JSApiObject instance type values that an embedder can use.
|
796
|
+
static const int kFirstEmbedderJSApiObjectType = 0;
|
797
|
+
static const int kLastEmbedderJSApiObjectType =
|
798
|
+
kLastJSApiObjectType - kFirstJSApiObjectType;
|
695
799
|
|
696
|
-
static const int kUndefinedOddballKind =
|
800
|
+
static const int kUndefinedOddballKind = 4;
|
697
801
|
static const int kNullOddballKind = 3;
|
698
802
|
|
699
803
|
// Constants used by PropertyCallbackInfo to check if we should throw when an
|
@@ -763,6 +867,15 @@ class Internals {
|
|
763
867
|
return ReadRawField<uint16_t>(map, kMapInstanceTypeOffset);
|
764
868
|
}
|
765
869
|
|
870
|
+
V8_INLINE static Address LoadMap(Address obj) {
|
871
|
+
if (!HasHeapObjectTag(obj)) return kNullAddress;
|
872
|
+
Address map = ReadTaggedPointerField(obj, kHeapObjectMapOffset);
|
873
|
+
#ifdef V8_MAP_PACKING
|
874
|
+
map = UnpackMapWord(map);
|
875
|
+
#endif
|
876
|
+
return map;
|
877
|
+
}
|
878
|
+
|
766
879
|
V8_INLINE static int GetOddballKind(Address obj) {
|
767
880
|
return SmiValue(ReadTaggedSignedField(obj, kOddballKindOffset));
|
768
881
|
}
|
@@ -836,15 +949,15 @@ class Internals {
|
|
836
949
|
Address base = *reinterpret_cast<Address*>(
|
837
950
|
reinterpret_cast<uintptr_t>(isolate) + kIsolateCageBaseOffset);
|
838
951
|
switch (index) {
|
839
|
-
#define DECOMPRESS_ROOT(name) \
|
840
|
-
case k##name##RootIndex:
|
952
|
+
#define DECOMPRESS_ROOT(name, ...) \
|
953
|
+
case k##name##RootIndex: \
|
841
954
|
return base + StaticReadOnlyRoot::k##name;
|
842
955
|
EXPORTED_STATIC_ROOTS_PTR_LIST(DECOMPRESS_ROOT)
|
843
956
|
#undef DECOMPRESS_ROOT
|
957
|
+
#undef EXPORTED_STATIC_ROOTS_PTR_LIST
|
844
958
|
default:
|
845
959
|
break;
|
846
960
|
}
|
847
|
-
#undef EXPORTED_STATIC_ROOTS_PTR_LIST
|
848
961
|
#endif // V8_STATIC_ROOTS_BOOL
|
849
962
|
return *GetRootSlot(isolate, index);
|
850
963
|
}
|
@@ -943,6 +1056,10 @@ class Internals {
|
|
943
1056
|
return addr & -static_cast<intptr_t>(kPtrComprCageBaseAlignment);
|
944
1057
|
}
|
945
1058
|
|
1059
|
+
V8_INLINE static uint32_t CompressTagged(Address value) {
|
1060
|
+
return static_cast<uint32_t>(value);
|
1061
|
+
}
|
1062
|
+
|
946
1063
|
V8_INLINE static Address DecompressTaggedField(Address heap_object_ptr,
|
947
1064
|
uint32_t value) {
|
948
1065
|
Address base = GetPtrComprCageBaseFromOnHeapAddress(heap_object_ptr);
|
@@ -984,6 +1101,296 @@ class BackingStoreBase {};
|
|
984
1101
|
// This is needed for histograms sampling garbage collection reasons.
|
985
1102
|
constexpr int kGarbageCollectionReasonMaxValue = 27;
|
986
1103
|
|
1104
|
+
// Base class for the address block allocator compatible with standard
|
1105
|
+
// containers, which registers its allocated range as strong roots.
|
1106
|
+
class V8_EXPORT StrongRootAllocatorBase {
|
1107
|
+
public:
|
1108
|
+
Heap* heap() const { return heap_; }
|
1109
|
+
|
1110
|
+
bool operator==(const StrongRootAllocatorBase& other) const {
|
1111
|
+
return heap_ == other.heap_;
|
1112
|
+
}
|
1113
|
+
bool operator!=(const StrongRootAllocatorBase& other) const {
|
1114
|
+
return heap_ != other.heap_;
|
1115
|
+
}
|
1116
|
+
|
1117
|
+
protected:
|
1118
|
+
explicit StrongRootAllocatorBase(Heap* heap) : heap_(heap) {}
|
1119
|
+
explicit StrongRootAllocatorBase(v8::Isolate* isolate);
|
1120
|
+
|
1121
|
+
// Allocate/deallocate a range of n elements of type internal::Address.
|
1122
|
+
Address* allocate_impl(size_t n);
|
1123
|
+
void deallocate_impl(Address* p, size_t n) noexcept;
|
1124
|
+
|
1125
|
+
private:
|
1126
|
+
Heap* heap_;
|
1127
|
+
};
|
1128
|
+
|
1129
|
+
// The general version of this template behaves just as std::allocator, with
|
1130
|
+
// the exception that the constructor takes the isolate as parameter. Only
|
1131
|
+
// specialized versions, e.g., internal::StrongRootAllocator<internal::Address>
|
1132
|
+
// and internal::StrongRootAllocator<v8::Local<T>> register the allocated range
|
1133
|
+
// as strong roots.
|
1134
|
+
template <typename T>
|
1135
|
+
class StrongRootAllocator : public StrongRootAllocatorBase,
|
1136
|
+
private std::allocator<T> {
|
1137
|
+
public:
|
1138
|
+
using value_type = T;
|
1139
|
+
|
1140
|
+
explicit StrongRootAllocator(Heap* heap) : StrongRootAllocatorBase(heap) {}
|
1141
|
+
explicit StrongRootAllocator(v8::Isolate* isolate)
|
1142
|
+
: StrongRootAllocatorBase(isolate) {}
|
1143
|
+
template <typename U>
|
1144
|
+
StrongRootAllocator(const StrongRootAllocator<U>& other) noexcept
|
1145
|
+
: StrongRootAllocatorBase(other) {}
|
1146
|
+
|
1147
|
+
using std::allocator<T>::allocate;
|
1148
|
+
using std::allocator<T>::deallocate;
|
1149
|
+
};
|
1150
|
+
|
1151
|
+
// A class of iterators that wrap some different iterator type.
|
1152
|
+
// If specified, ElementType is the type of element accessed by the wrapper
|
1153
|
+
// iterator; in this case, the actual reference and pointer types of Iterator
|
1154
|
+
// must be convertible to ElementType& and ElementType*, respectively.
|
1155
|
+
template <typename Iterator, typename ElementType = void>
|
1156
|
+
class WrappedIterator {
|
1157
|
+
public:
|
1158
|
+
static_assert(
|
1159
|
+
!std::is_void_v<ElementType> ||
|
1160
|
+
(std::is_convertible_v<typename std::iterator_traits<Iterator>::pointer,
|
1161
|
+
ElementType*> &&
|
1162
|
+
std::is_convertible_v<typename std::iterator_traits<Iterator>::reference,
|
1163
|
+
ElementType&>));
|
1164
|
+
|
1165
|
+
using iterator_category =
|
1166
|
+
typename std::iterator_traits<Iterator>::iterator_category;
|
1167
|
+
using difference_type =
|
1168
|
+
typename std::iterator_traits<Iterator>::difference_type;
|
1169
|
+
using value_type =
|
1170
|
+
std::conditional_t<std::is_void_v<ElementType>,
|
1171
|
+
typename std::iterator_traits<Iterator>::value_type,
|
1172
|
+
ElementType>;
|
1173
|
+
using pointer =
|
1174
|
+
std::conditional_t<std::is_void_v<ElementType>,
|
1175
|
+
typename std::iterator_traits<Iterator>::pointer,
|
1176
|
+
ElementType*>;
|
1177
|
+
using reference =
|
1178
|
+
std::conditional_t<std::is_void_v<ElementType>,
|
1179
|
+
typename std::iterator_traits<Iterator>::reference,
|
1180
|
+
ElementType&>;
|
1181
|
+
|
1182
|
+
constexpr WrappedIterator() noexcept : it_() {}
|
1183
|
+
constexpr explicit WrappedIterator(Iterator it) noexcept : it_(it) {}
|
1184
|
+
|
1185
|
+
template <typename OtherIterator, typename OtherElementType,
|
1186
|
+
std::enable_if_t<std::is_convertible_v<OtherIterator, Iterator>,
|
1187
|
+
bool> = true>
|
1188
|
+
constexpr WrappedIterator(
|
1189
|
+
const WrappedIterator<OtherIterator, OtherElementType>& it) noexcept
|
1190
|
+
: it_(it.base()) {}
|
1191
|
+
|
1192
|
+
constexpr reference operator*() const noexcept { return *it_; }
|
1193
|
+
constexpr pointer operator->() const noexcept { return it_.operator->(); }
|
1194
|
+
|
1195
|
+
constexpr WrappedIterator& operator++() noexcept {
|
1196
|
+
++it_;
|
1197
|
+
return *this;
|
1198
|
+
}
|
1199
|
+
constexpr WrappedIterator operator++(int) noexcept {
|
1200
|
+
WrappedIterator result(*this);
|
1201
|
+
++(*this);
|
1202
|
+
return result;
|
1203
|
+
}
|
1204
|
+
|
1205
|
+
constexpr WrappedIterator& operator--() noexcept {
|
1206
|
+
--it_;
|
1207
|
+
return *this;
|
1208
|
+
}
|
1209
|
+
constexpr WrappedIterator operator--(int) noexcept {
|
1210
|
+
WrappedIterator result(*this);
|
1211
|
+
--(*this);
|
1212
|
+
return result;
|
1213
|
+
}
|
1214
|
+
constexpr WrappedIterator operator+(difference_type n) const noexcept {
|
1215
|
+
WrappedIterator result(*this);
|
1216
|
+
result += n;
|
1217
|
+
return result;
|
1218
|
+
}
|
1219
|
+
constexpr WrappedIterator& operator+=(difference_type n) noexcept {
|
1220
|
+
it_ += n;
|
1221
|
+
return *this;
|
1222
|
+
}
|
1223
|
+
constexpr WrappedIterator operator-(difference_type n) const noexcept {
|
1224
|
+
return *this + (-n);
|
1225
|
+
}
|
1226
|
+
constexpr WrappedIterator& operator-=(difference_type n) noexcept {
|
1227
|
+
*this += -n;
|
1228
|
+
return *this;
|
1229
|
+
}
|
1230
|
+
constexpr reference operator[](difference_type n) const noexcept {
|
1231
|
+
return it_[n];
|
1232
|
+
}
|
1233
|
+
|
1234
|
+
constexpr Iterator base() const noexcept { return it_; }
|
1235
|
+
|
1236
|
+
private:
|
1237
|
+
template <typename OtherIterator, typename OtherElementType>
|
1238
|
+
friend class WrappedIterator;
|
1239
|
+
|
1240
|
+
private:
|
1241
|
+
Iterator it_;
|
1242
|
+
};
|
1243
|
+
|
1244
|
+
template <typename Iterator, typename ElementType, typename OtherIterator,
|
1245
|
+
typename OtherElementType>
|
1246
|
+
constexpr bool operator==(
|
1247
|
+
const WrappedIterator<Iterator, ElementType>& x,
|
1248
|
+
const WrappedIterator<OtherIterator, OtherElementType>& y) noexcept {
|
1249
|
+
return x.base() == y.base();
|
1250
|
+
}
|
1251
|
+
|
1252
|
+
template <typename Iterator, typename ElementType, typename OtherIterator,
|
1253
|
+
typename OtherElementType>
|
1254
|
+
constexpr bool operator<(
|
1255
|
+
const WrappedIterator<Iterator, ElementType>& x,
|
1256
|
+
const WrappedIterator<OtherIterator, OtherElementType>& y) noexcept {
|
1257
|
+
return x.base() < y.base();
|
1258
|
+
}
|
1259
|
+
|
1260
|
+
template <typename Iterator, typename ElementType, typename OtherIterator,
|
1261
|
+
typename OtherElementType>
|
1262
|
+
constexpr bool operator!=(
|
1263
|
+
const WrappedIterator<Iterator, ElementType>& x,
|
1264
|
+
const WrappedIterator<OtherIterator, OtherElementType>& y) noexcept {
|
1265
|
+
return !(x == y);
|
1266
|
+
}
|
1267
|
+
|
1268
|
+
template <typename Iterator, typename ElementType, typename OtherIterator,
|
1269
|
+
typename OtherElementType>
|
1270
|
+
constexpr bool operator>(
|
1271
|
+
const WrappedIterator<Iterator, ElementType>& x,
|
1272
|
+
const WrappedIterator<OtherIterator, OtherElementType>& y) noexcept {
|
1273
|
+
return y < x;
|
1274
|
+
}
|
1275
|
+
|
1276
|
+
template <typename Iterator, typename ElementType, typename OtherIterator,
|
1277
|
+
typename OtherElementType>
|
1278
|
+
constexpr bool operator>=(
|
1279
|
+
const WrappedIterator<Iterator, ElementType>& x,
|
1280
|
+
const WrappedIterator<OtherIterator, OtherElementType>& y) noexcept {
|
1281
|
+
return !(x < y);
|
1282
|
+
}
|
1283
|
+
|
1284
|
+
template <typename Iterator, typename ElementType, typename OtherIterator,
|
1285
|
+
typename OtherElementType>
|
1286
|
+
constexpr bool operator<=(
|
1287
|
+
const WrappedIterator<Iterator, ElementType>& x,
|
1288
|
+
const WrappedIterator<OtherIterator, OtherElementType>& y) noexcept {
|
1289
|
+
return !(y < x);
|
1290
|
+
}
|
1291
|
+
|
1292
|
+
template <typename Iterator, typename ElementType, typename OtherIterator,
|
1293
|
+
typename OtherElementType>
|
1294
|
+
constexpr auto operator-(
|
1295
|
+
const WrappedIterator<Iterator, ElementType>& x,
|
1296
|
+
const WrappedIterator<OtherIterator, OtherElementType>& y) noexcept
|
1297
|
+
-> decltype(x.base() - y.base()) {
|
1298
|
+
return x.base() - y.base();
|
1299
|
+
}
|
1300
|
+
|
1301
|
+
template <typename Iterator, typename ElementType>
|
1302
|
+
constexpr WrappedIterator<Iterator> operator+(
|
1303
|
+
typename WrappedIterator<Iterator, ElementType>::difference_type n,
|
1304
|
+
const WrappedIterator<Iterator, ElementType>& x) noexcept {
|
1305
|
+
x += n;
|
1306
|
+
return x;
|
1307
|
+
}
|
1308
|
+
|
1309
|
+
// Helper functions about values contained in handles.
|
1310
|
+
// A value is either an indirect pointer or a direct pointer, depending on
|
1311
|
+
// whether direct local support is enabled.
|
1312
|
+
class ValueHelper final {
|
1313
|
+
public:
|
1314
|
+
#ifdef V8_ENABLE_DIRECT_LOCAL
|
1315
|
+
static constexpr Address kTaggedNullAddress = 1;
|
1316
|
+
static constexpr Address kEmpty = kTaggedNullAddress;
|
1317
|
+
#else
|
1318
|
+
static constexpr Address kEmpty = kNullAddress;
|
1319
|
+
#endif // V8_ENABLE_DIRECT_LOCAL
|
1320
|
+
|
1321
|
+
template <typename T>
|
1322
|
+
V8_INLINE static bool IsEmpty(T* value) {
|
1323
|
+
return reinterpret_cast<Address>(value) == kEmpty;
|
1324
|
+
}
|
1325
|
+
|
1326
|
+
// Returns a handle's "value" for all kinds of abstract handles. For Local,
|
1327
|
+
// it is equivalent to `*handle`. The variadic parameters support handle
|
1328
|
+
// types with extra type parameters, like `Persistent<T, M>`.
|
1329
|
+
template <template <typename T, typename... Ms> typename H, typename T,
|
1330
|
+
typename... Ms>
|
1331
|
+
V8_INLINE static T* HandleAsValue(const H<T, Ms...>& handle) {
|
1332
|
+
return handle.template value<T>();
|
1333
|
+
}
|
1334
|
+
|
1335
|
+
#ifdef V8_ENABLE_DIRECT_LOCAL
|
1336
|
+
|
1337
|
+
template <typename T>
|
1338
|
+
V8_INLINE static Address ValueAsAddress(const T* value) {
|
1339
|
+
return reinterpret_cast<Address>(value);
|
1340
|
+
}
|
1341
|
+
|
1342
|
+
template <typename T, bool check_null = true, typename S>
|
1343
|
+
V8_INLINE static T* SlotAsValue(S* slot) {
|
1344
|
+
if (check_null && slot == nullptr) {
|
1345
|
+
return reinterpret_cast<T*>(kTaggedNullAddress);
|
1346
|
+
}
|
1347
|
+
return *reinterpret_cast<T**>(slot);
|
1348
|
+
}
|
1349
|
+
|
1350
|
+
#else // !V8_ENABLE_DIRECT_LOCAL
|
1351
|
+
|
1352
|
+
template <typename T>
|
1353
|
+
V8_INLINE static Address ValueAsAddress(const T* value) {
|
1354
|
+
return *reinterpret_cast<const Address*>(value);
|
1355
|
+
}
|
1356
|
+
|
1357
|
+
template <typename T, bool check_null = true, typename S>
|
1358
|
+
V8_INLINE static T* SlotAsValue(S* slot) {
|
1359
|
+
return reinterpret_cast<T*>(slot);
|
1360
|
+
}
|
1361
|
+
|
1362
|
+
#endif // V8_ENABLE_DIRECT_LOCAL
|
1363
|
+
};
|
1364
|
+
|
1365
|
+
/**
|
1366
|
+
* Helper functions about handles.
|
1367
|
+
*/
|
1368
|
+
class HandleHelper final {
|
1369
|
+
public:
|
1370
|
+
/**
|
1371
|
+
* Checks whether two handles are equal.
|
1372
|
+
* They are equal iff they are both empty or they are both non-empty and the
|
1373
|
+
* objects to which they refer are physically equal.
|
1374
|
+
*
|
1375
|
+
* If both handles refer to JS objects, this is the same as strict equality.
|
1376
|
+
* For primitives, such as numbers or strings, a `false` return value does not
|
1377
|
+
* indicate that the values aren't equal in the JavaScript sense.
|
1378
|
+
* Use `Value::StrictEquals()` to check primitives for equality.
|
1379
|
+
*/
|
1380
|
+
template <typename T1, typename T2>
|
1381
|
+
V8_INLINE static bool EqualHandles(const T1& lhs, const T2& rhs) {
|
1382
|
+
if (lhs.IsEmpty()) return rhs.IsEmpty();
|
1383
|
+
if (rhs.IsEmpty()) return false;
|
1384
|
+
return lhs.ptr() == rhs.ptr();
|
1385
|
+
}
|
1386
|
+
|
1387
|
+
static V8_EXPORT bool IsOnStack(const void* ptr);
|
1388
|
+
static V8_EXPORT void VerifyOnStack(const void* ptr);
|
1389
|
+
static V8_EXPORT void VerifyOnMainThread();
|
1390
|
+
};
|
1391
|
+
|
1392
|
+
V8_EXPORT void VerifyHandleIsNonEmpty(bool is_empty);
|
1393
|
+
|
987
1394
|
} // namespace internal
|
988
1395
|
} // namespace v8
|
989
1396
|
|