libv8-node 20.12.1.0-x86_64-linux-musl → 22.5.1.0-x86_64-linux-musl

Sign up to get free protection for your applications and to get access to all the features.
Files changed (55) hide show
  1. checksums.yaml +4 -4
  2. data/lib/libv8/node/version.rb +3 -3
  3. data/vendor/v8/include/cppgc/internal/api-constants.h +24 -5
  4. data/vendor/v8/include/cppgc/internal/caged-heap-local-data.h +16 -6
  5. data/vendor/v8/include/cppgc/internal/caged-heap.h +12 -5
  6. data/vendor/v8/include/cppgc/internal/gc-info.h +82 -91
  7. data/vendor/v8/include/cppgc/internal/member-storage.h +16 -8
  8. data/vendor/v8/include/cppgc/member.h +25 -0
  9. data/vendor/v8/include/cppgc/persistent.h +4 -0
  10. data/vendor/v8/include/cppgc/platform.h +6 -1
  11. data/vendor/v8/include/cppgc/sentinel-pointer.h +7 -0
  12. data/vendor/v8/include/cppgc/source-location.h +2 -78
  13. data/vendor/v8/include/cppgc/trace-trait.h +8 -0
  14. data/vendor/v8/include/cppgc/type-traits.h +25 -4
  15. data/vendor/v8/include/cppgc/visitor.h +82 -4
  16. data/vendor/v8/include/libplatform/libplatform.h +7 -1
  17. data/vendor/v8/include/v8-array-buffer.h +6 -0
  18. data/vendor/v8/include/v8-callbacks.h +57 -19
  19. data/vendor/v8/include/v8-container.h +54 -0
  20. data/vendor/v8/include/v8-context.h +58 -32
  21. data/vendor/v8/include/v8-embedder-heap.h +31 -3
  22. data/vendor/v8/include/v8-embedder-state-scope.h +2 -1
  23. data/vendor/v8/include/v8-exception.h +15 -9
  24. data/vendor/v8/include/v8-fast-api-calls.h +58 -31
  25. data/vendor/v8/include/v8-forward.h +1 -0
  26. data/vendor/v8/include/v8-function-callback.h +135 -30
  27. data/vendor/v8/include/v8-function.h +6 -0
  28. data/vendor/v8/include/v8-handle-base.h +137 -0
  29. data/vendor/v8/include/v8-inspector.h +35 -13
  30. data/vendor/v8/include/v8-internal.h +510 -71
  31. data/vendor/v8/include/v8-isolate.h +176 -100
  32. data/vendor/v8/include/v8-local-handle.h +383 -112
  33. data/vendor/v8/include/v8-memory-span.h +157 -2
  34. data/vendor/v8/include/v8-message.h +22 -3
  35. data/vendor/v8/include/v8-metrics.h +1 -0
  36. data/vendor/v8/include/v8-object.h +98 -77
  37. data/vendor/v8/include/v8-persistent-handle.h +68 -90
  38. data/vendor/v8/include/v8-platform.h +191 -23
  39. data/vendor/v8/include/v8-primitive.h +12 -8
  40. data/vendor/v8/include/v8-profiler.h +16 -2
  41. data/vendor/v8/include/v8-script.h +88 -14
  42. data/vendor/v8/include/v8-snapshot.h +96 -22
  43. data/vendor/v8/include/v8-source-location.h +92 -0
  44. data/vendor/v8/include/v8-statistics.h +31 -10
  45. data/vendor/v8/include/v8-template.h +410 -131
  46. data/vendor/v8/include/v8-traced-handle.h +108 -90
  47. data/vendor/v8/include/v8-typed-array.h +115 -7
  48. data/vendor/v8/include/v8-unwinder.h +1 -1
  49. data/vendor/v8/include/v8-util.h +23 -20
  50. data/vendor/v8/include/v8-value-serializer.h +14 -0
  51. data/vendor/v8/include/v8-value.h +105 -3
  52. data/vendor/v8/include/v8-version.h +4 -4
  53. data/vendor/v8/include/v8config.h +54 -20
  54. data/vendor/v8/x86_64-linux-musl/libv8/obj/libv8_monolith.a +0 -0
  55. metadata +4 -2
@@ -10,10 +10,11 @@
10
10
  #include <string.h>
11
11
 
12
12
  #include <atomic>
13
+ #include <iterator>
14
+ #include <memory>
13
15
  #include <type_traits>
14
16
 
15
- #include "v8-version.h" // NOLINT(build/include_directory)
16
- #include "v8config.h" // NOLINT(build/include_directory)
17
+ #include "v8config.h" // NOLINT(build/include_directory)
17
18
 
18
19
  namespace v8 {
19
20
 
@@ -24,6 +25,7 @@ class Isolate;
24
25
 
25
26
  namespace internal {
26
27
 
28
+ class Heap;
27
29
  class Isolate;
28
30
 
29
31
  typedef uintptr_t Address;
@@ -80,7 +82,7 @@ struct SmiTagging<4> {
80
82
  static_cast<intptr_t>(kUintptrAllBitsSet << (kSmiValueSize - 1));
81
83
  static constexpr intptr_t kSmiMaxValue = -(kSmiMinValue + 1);
82
84
 
83
- V8_INLINE static int SmiToInt(Address value) {
85
+ V8_INLINE static constexpr int SmiToInt(Address value) {
84
86
  int shift_bits = kSmiTagSize + kSmiShiftSize;
85
87
  // Truncate and shift down (requires >> to be sign extending).
86
88
  return static_cast<int32_t>(static_cast<uint32_t>(value)) >> shift_bits;
@@ -105,7 +107,7 @@ struct SmiTagging<8> {
105
107
  static_cast<intptr_t>(kUintptrAllBitsSet << (kSmiValueSize - 1));
106
108
  static constexpr intptr_t kSmiMaxValue = -(kSmiMinValue + 1);
107
109
 
108
- V8_INLINE static int SmiToInt(Address value) {
110
+ V8_INLINE static constexpr int SmiToInt(Address value) {
109
111
  int shift_bits = kSmiTagSize + kSmiShiftSize;
110
112
  // Shift down and throw away top 32 bits.
111
113
  return static_cast<int>(static_cast<intptr_t>(value) >> shift_bits);
@@ -173,11 +175,15 @@ using SandboxedPointer_t = Address;
173
175
  #ifdef V8_ENABLE_SANDBOX
174
176
 
175
177
  // Size of the sandbox, excluding the guard regions surrounding it.
176
- #ifdef V8_TARGET_OS_ANDROID
178
+ #if defined(V8_TARGET_OS_ANDROID)
177
179
  // On Android, most 64-bit devices seem to be configured with only 39 bits of
178
180
  // virtual address space for userspace. As such, limit the sandbox to 128GB (a
179
181
  // quarter of the total available address space).
180
182
  constexpr size_t kSandboxSizeLog2 = 37; // 128 GB
183
+ #elif defined(V8_TARGET_ARCH_LOONG64)
184
+ // Some Linux distros on LoongArch64 configured with only 40 bits of virtual
185
+ // address space for userspace. Limit the sandbox to 256GB here.
186
+ constexpr size_t kSandboxSizeLog2 = 38; // 256 GB
181
187
  #else
182
188
  // Everywhere else use a 1TB sandbox.
183
189
  constexpr size_t kSandboxSizeLog2 = 40; // 1 TB
@@ -247,20 +253,22 @@ static_assert(1ULL << (64 - kBoundedSizeShift) ==
247
253
  // size allows omitting bounds checks on table accesses if the indices are
248
254
  // guaranteed (e.g. through shifting) to be below the maximum index. This
249
255
  // value must be a power of two.
250
- static const size_t kExternalPointerTableReservationSize = 512 * MB;
256
+ constexpr size_t kExternalPointerTableReservationSize = 512 * MB;
251
257
 
252
258
  // The external pointer table indices stored in HeapObjects as external
253
259
  // pointers are shifted to the left by this amount to guarantee that they are
254
260
  // smaller than the maximum table size.
255
- static const uint32_t kExternalPointerIndexShift = 6;
261
+ constexpr uint32_t kExternalPointerIndexShift = 6;
256
262
  #else
257
- static const size_t kExternalPointerTableReservationSize = 1024 * MB;
258
- static const uint32_t kExternalPointerIndexShift = 5;
263
+ constexpr size_t kExternalPointerTableReservationSize = 1024 * MB;
264
+ constexpr uint32_t kExternalPointerIndexShift = 5;
259
265
  #endif // V8_TARGET_OS_ANDROID
260
266
 
261
267
  // The maximum number of entries in an external pointer table.
262
- static const size_t kMaxExternalPointers =
263
- kExternalPointerTableReservationSize / kApiSystemPointerSize;
268
+ constexpr int kExternalPointerTableEntrySize = 8;
269
+ constexpr int kExternalPointerTableEntrySizeLog2 = 3;
270
+ constexpr size_t kMaxExternalPointers =
271
+ kExternalPointerTableReservationSize / kExternalPointerTableEntrySize;
264
272
  static_assert((1 << (32 - kExternalPointerIndexShift)) == kMaxExternalPointers,
265
273
  "kExternalPointerTableReservationSize and "
266
274
  "kExternalPointerIndexShift don't match");
@@ -268,7 +276,7 @@ static_assert((1 << (32 - kExternalPointerIndexShift)) == kMaxExternalPointers,
268
276
  #else // !V8_COMPRESS_POINTERS
269
277
 
270
278
  // Needed for the V8.SandboxedExternalPointersCount histogram.
271
- static const size_t kMaxExternalPointers = 0;
279
+ constexpr size_t kMaxExternalPointers = 0;
272
280
 
273
281
  #endif // V8_COMPRESS_POINTERS
274
282
 
@@ -281,15 +289,21 @@ static const size_t kMaxExternalPointers = 0;
281
289
  // that it is smaller than the size of the table.
282
290
  using ExternalPointerHandle = uint32_t;
283
291
 
284
- // ExternalPointers point to objects located outside the sandbox. When
285
- // sandboxed external pointers are enabled, these are stored on heap as
286
- // ExternalPointerHandles, otherwise they are simply raw pointers.
292
+ // ExternalPointers point to objects located outside the sandbox. When the V8
293
+ // sandbox is enabled, these are stored on heap as ExternalPointerHandles,
294
+ // otherwise they are simply raw pointers.
287
295
  #ifdef V8_ENABLE_SANDBOX
288
296
  using ExternalPointer_t = ExternalPointerHandle;
289
297
  #else
290
298
  using ExternalPointer_t = Address;
291
299
  #endif
292
300
 
301
+ constexpr ExternalPointer_t kNullExternalPointer = 0;
302
+ constexpr ExternalPointerHandle kNullExternalPointerHandle = 0;
303
+
304
+ //
305
+ // External Pointers.
306
+ //
293
307
  // When the sandbox is enabled, external pointers are stored in an external
294
308
  // pointer table and are referenced from HeapObjects through an index (a
295
309
  // "handle"). When stored in the table, the pointers are tagged with per-type
@@ -359,6 +373,7 @@ using ExternalPointer_t = Address;
359
373
  // ExternalPointerTable.
360
374
  constexpr uint64_t kExternalPointerMarkBit = 1ULL << 62;
361
375
  constexpr uint64_t kExternalPointerTagMask = 0x40ff000000000000;
376
+ constexpr uint64_t kExternalPointerTagMaskWithoutMarkBit = 0xff000000000000;
362
377
  constexpr uint64_t kExternalPointerTagShift = 48;
363
378
 
364
379
  // All possible 8-bit type tags.
@@ -410,14 +425,15 @@ constexpr uint64_t kAllExternalPointerTypeTags[] = {
410
425
  /* it is the Embedder's responsibility to ensure type safety (against */ \
411
426
  /* substitution) and lifetime validity of these objects. */ \
412
427
  V(kExternalObjectValueTag, TAG(13)) \
413
- V(kCallHandlerInfoCallbackTag, TAG(14)) \
428
+ V(kFunctionTemplateInfoCallbackTag, TAG(14)) \
414
429
  V(kAccessorInfoGetterTag, TAG(15)) \
415
430
  V(kAccessorInfoSetterTag, TAG(16)) \
416
431
  V(kWasmInternalFunctionCallTargetTag, TAG(17)) \
417
432
  V(kWasmTypeInfoNativeTypeTag, TAG(18)) \
418
433
  V(kWasmExportedFunctionDataSignatureTag, TAG(19)) \
419
434
  V(kWasmContinuationJmpbufTag, TAG(20)) \
420
- V(kArrayBufferExtensionTag, TAG(21))
435
+ V(kWasmIndirectFunctionTargetTag, TAG(21)) \
436
+ V(kArrayBufferExtensionTag, TAG(22))
421
437
 
422
438
  // All external pointer tags.
423
439
  #define ALL_EXTERNAL_POINTER_TAGS(V) \
@@ -430,7 +446,7 @@ constexpr uint64_t kAllExternalPointerTypeTags[] = {
430
446
  (HasMarkBit ? kExternalPointerMarkBit : 0))
431
447
  enum ExternalPointerTag : uint64_t {
432
448
  // Empty tag value. Mostly used as placeholder.
433
- kExternalPointerNullTag = MAKE_TAG(0, 0b00000000),
449
+ kExternalPointerNullTag = MAKE_TAG(1, 0b00000000),
434
450
  // External pointer tag that will match any external pointer. Use with care!
435
451
  kAnyExternalPointerTag = MAKE_TAG(1, 0b11111111),
436
452
  // The free entry tag has all type bits set so every type check with a
@@ -456,6 +472,15 @@ V8_INLINE static constexpr bool IsSharedExternalPointerType(
456
472
  return tag >= kFirstSharedTag && tag <= kLastSharedTag;
457
473
  }
458
474
 
475
+ // True if the external pointer may live in a read-only object, in which case
476
+ // the table entry will be in the shared read-only segment of the external
477
+ // pointer table.
478
+ V8_INLINE static constexpr bool IsMaybeReadOnlyExternalPointerType(
479
+ ExternalPointerTag tag) {
480
+ return tag == kAccessorInfoGetterTag || tag == kAccessorInfoSetterTag ||
481
+ tag == kFunctionTemplateInfoCallbackTag;
482
+ }
483
+
459
484
  // Sanity checks.
460
485
  #define CHECK_SHARED_EXTERNAL_POINTER_TAGS(Tag, ...) \
461
486
  static_assert(IsSharedExternalPointerType(Tag));
@@ -471,6 +496,126 @@ PER_ISOLATE_EXTERNAL_POINTER_TAGS(CHECK_NON_SHARED_EXTERNAL_POINTER_TAGS)
471
496
  #undef SHARED_EXTERNAL_POINTER_TAGS
472
497
  #undef EXTERNAL_POINTER_TAGS
473
498
 
499
+ //
500
+ // Indirect Pointers.
501
+ //
502
+ // When the sandbox is enabled, indirect pointers are used to reference
503
+ // HeapObjects that live outside of the sandbox (but are still managed by V8's
504
+ // garbage collector). When object A references an object B through an indirect
505
+ // pointer, object A will contain a IndirectPointerHandle, i.e. a shifted
506
+ // 32-bit index, which identifies an entry in a pointer table (either the
507
+ // trusted pointer table for TrustedObjects, or the code pointer table if it is
508
+ // a Code object). This table entry then contains the actual pointer to object
509
+ // B. Further, object B owns this pointer table entry, and it is responsible
510
+ // for updating the "self-pointer" in the entry when it is relocated in memory.
511
+ // This way, in contrast to "normal" pointers, indirect pointers never need to
512
+ // be tracked by the GC (i.e. there is no remembered set for them).
513
+ // These pointers do not exist when the sandbox is disabled.
514
+
515
+ // An IndirectPointerHandle represents a 32-bit index into a pointer table.
516
+ using IndirectPointerHandle = uint32_t;
517
+
518
+ // A null handle always references an entry that contains nullptr.
519
+ constexpr IndirectPointerHandle kNullIndirectPointerHandle = 0;
520
+
521
+ // When the sandbox is enabled, indirect pointers are used to implement:
522
+ // - TrustedPointers: an indirect pointer using the trusted pointer table (TPT)
523
+ // and referencing a TrustedObject in one of the trusted heap spaces.
524
+ // - CodePointers, an indirect pointer using the code pointer table (CPT) and
525
+ // referencing a Code object together with its instruction stream.
526
+
527
+ //
528
+ // Trusted Pointers.
529
+ //
530
+ // A pointer to a TrustedObject.
531
+ // When the sandbox is enabled, these are indirect pointers using the trusted
532
+ // pointer table (TPT). They are used to reference trusted objects (located in
533
+ // one of V8's trusted heap spaces, outside of the sandbox) from inside the
534
+ // sandbox in a memory-safe way. When the sandbox is disabled, these are
535
+ // regular tagged pointers.
536
+ using TrustedPointerHandle = IndirectPointerHandle;
537
+
538
+ // The size of the virtual memory reservation for the trusted pointer table.
539
+ // As with the external pointer table, a maximum table size in combination with
540
+ // shifted indices allows omitting bounds checks.
541
+ constexpr size_t kTrustedPointerTableReservationSize = 64 * MB;
542
+
543
+ // The trusted pointer handles are stores shifted to the left by this amount
544
+ // to guarantee that they are smaller than the maximum table size.
545
+ constexpr uint32_t kTrustedPointerHandleShift = 9;
546
+
547
+ // A null handle always references an entry that contains nullptr.
548
+ constexpr TrustedPointerHandle kNullTrustedPointerHandle =
549
+ kNullIndirectPointerHandle;
550
+
551
+ // The maximum number of entries in an trusted pointer table.
552
+ constexpr int kTrustedPointerTableEntrySize = 8;
553
+ constexpr int kTrustedPointerTableEntrySizeLog2 = 3;
554
+ constexpr size_t kMaxTrustedPointers =
555
+ kTrustedPointerTableReservationSize / kTrustedPointerTableEntrySize;
556
+ static_assert((1 << (32 - kTrustedPointerHandleShift)) == kMaxTrustedPointers,
557
+ "kTrustedPointerTableReservationSize and "
558
+ "kTrustedPointerHandleShift don't match");
559
+
560
+ //
561
+ // Code Pointers.
562
+ //
563
+ // A pointer to a Code object.
564
+ // Essentially a specialized version of a trusted pointer that (when the
565
+ // sandbox is enabled) uses the code pointer table (CPT) instead of the TPT.
566
+ // Each entry in the CPT contains both a pointer to a Code object as well as a
567
+ // pointer to the Code's entrypoint. This allows calling/jumping into Code with
568
+ // one fewer memory access (compared to the case where the entrypoint pointer
569
+ // first needs to be loaded from the Code object). As such, a CodePointerHandle
570
+ // can be used both to obtain the referenced Code object and to directly load
571
+ // its entrypoint.
572
+ //
573
+ // When the sandbox is disabled, these are regular tagged pointers.
574
+ using CodePointerHandle = IndirectPointerHandle;
575
+
576
+ // The size of the virtual memory reservation for the code pointer table.
577
+ // As with the other tables, a maximum table size in combination with shifted
578
+ // indices allows omitting bounds checks.
579
+ constexpr size_t kCodePointerTableReservationSize = 16 * MB;
580
+
581
+ // Code pointer handles are shifted by a different amount than indirect pointer
582
+ // handles as the tables have a different maximum size.
583
+ constexpr uint32_t kCodePointerHandleShift = 12;
584
+
585
+ // A null handle always references an entry that contains nullptr.
586
+ constexpr CodePointerHandle kNullCodePointerHandle = kNullIndirectPointerHandle;
587
+
588
+ // It can sometimes be necessary to distinguish a code pointer handle from a
589
+ // trusted pointer handle. A typical example would be a union trusted pointer
590
+ // field that can refer to both Code objects and other trusted objects. To
591
+ // support these use-cases, we use a simple marking scheme where some of the
592
+ // low bits of a code pointer handle are set, while they will be unset on a
593
+ // trusted pointer handle. This way, the correct table to resolve the handle
594
+ // can be determined even in the absence of a type tag.
595
+ constexpr uint32_t kCodePointerHandleMarker = 0x1;
596
+ static_assert(kCodePointerHandleShift > 0);
597
+ static_assert(kTrustedPointerHandleShift > 0);
598
+
599
+ // The maximum number of entries in a code pointer table.
600
+ constexpr int kCodePointerTableEntrySize = 16;
601
+ constexpr int kCodePointerTableEntrySizeLog2 = 4;
602
+ constexpr size_t kMaxCodePointers =
603
+ kCodePointerTableReservationSize / kCodePointerTableEntrySize;
604
+ static_assert(
605
+ (1 << (32 - kCodePointerHandleShift)) == kMaxCodePointers,
606
+ "kCodePointerTableReservationSize and kCodePointerHandleShift don't match");
607
+
608
+ constexpr int kCodePointerTableEntryEntrypointOffset = 0;
609
+ constexpr int kCodePointerTableEntryCodeObjectOffset = 8;
610
+
611
+ // Constants that can be used to mark places that should be modified once
612
+ // certain types of objects are moved out of the sandbox and into trusted space.
613
+ constexpr bool kRuntimeGeneratedCodeObjectsLiveInTrustedSpace = true;
614
+ constexpr bool kBuiltinCodeObjectsLiveInTrustedSpace = false;
615
+ constexpr bool kAllCodeObjectsLiveInTrustedSpace =
616
+ kRuntimeGeneratedCodeObjectsLiveInTrustedSpace &&
617
+ kBuiltinCodeObjectsLiveInTrustedSpace;
618
+
474
619
  // {obj} must be the raw tagged pointer representation of a HeapObject
475
620
  // that's guaranteed to never be in ReadOnlySpace.
476
621
  V8_EXPORT internal::Isolate* IsolateFromNeverReadOnlySpaceObject(Address obj);
@@ -517,15 +662,22 @@ class Internals {
517
662
  static const int kExternalOneByteRepresentationTag = 0x0a;
518
663
 
519
664
  static const uint32_t kNumIsolateDataSlots = 4;
520
- static const int kStackGuardSize = 7 * kApiSystemPointerSize;
665
+ static const int kStackGuardSize = 8 * kApiSystemPointerSize;
666
+ static const int kNumberOfBooleanFlags = 6;
667
+ static const int kErrorMessageParamSize = 1;
668
+ static const int kTablesAlignmentPaddingSize = 1;
521
669
  static const int kBuiltinTier0EntryTableSize = 7 * kApiSystemPointerSize;
522
670
  static const int kBuiltinTier0TableSize = 7 * kApiSystemPointerSize;
523
671
  static const int kLinearAllocationAreaSize = 3 * kApiSystemPointerSize;
524
- static const int kThreadLocalTopSize = 25 * kApiSystemPointerSize;
672
+ static const int kThreadLocalTopSize = 30 * kApiSystemPointerSize;
673
+ static const int kHandleScopeDataSize =
674
+ 2 * kApiSystemPointerSize + 2 * kApiInt32Size;
525
675
 
526
- // ExternalPointerTable layout guarantees.
527
- static const int kExternalPointerTableBufferOffset = 0;
528
- static const int kExternalPointerTableSize = 4 * kApiSystemPointerSize;
676
+ // ExternalPointerTable and TrustedPointerTable layout guarantees.
677
+ static const int kExternalPointerTableBasePointerOffset = 0;
678
+ static const int kExternalPointerTableSize = 2 * kApiSystemPointerSize;
679
+ static const int kTrustedPointerTableSize = 2 * kApiSystemPointerSize;
680
+ static const int kTrustedPointerTableBasePointerOffset = 0;
529
681
 
530
682
  // IsolateData layout guarantees.
531
683
  static const int kIsolateCageBaseOffset = 0;
@@ -533,16 +685,23 @@ class Internals {
533
685
  kIsolateCageBaseOffset + kApiSystemPointerSize;
534
686
  static const int kVariousBooleanFlagsOffset =
535
687
  kIsolateStackGuardOffset + kStackGuardSize;
536
- static const int kBuiltinTier0EntryTableOffset =
537
- kVariousBooleanFlagsOffset + 8;
688
+ static const int kErrorMessageParamOffset =
689
+ kVariousBooleanFlagsOffset + kNumberOfBooleanFlags;
690
+ static const int kBuiltinTier0EntryTableOffset = kErrorMessageParamOffset +
691
+ kErrorMessageParamSize +
692
+ kTablesAlignmentPaddingSize;
538
693
  static const int kBuiltinTier0TableOffset =
539
694
  kBuiltinTier0EntryTableOffset + kBuiltinTier0EntryTableSize;
540
695
  static const int kNewAllocationInfoOffset =
541
696
  kBuiltinTier0TableOffset + kBuiltinTier0TableSize;
542
697
  static const int kOldAllocationInfoOffset =
543
698
  kNewAllocationInfoOffset + kLinearAllocationAreaSize;
699
+
700
+ static const int kFastCCallAlignmentPaddingSize =
701
+ kApiSystemPointerSize == 8 ? 0 : kApiSystemPointerSize;
544
702
  static const int kIsolateFastCCallCallerFpOffset =
545
- kOldAllocationInfoOffset + kLinearAllocationAreaSize;
703
+ kOldAllocationInfoOffset + kLinearAllocationAreaSize +
704
+ kFastCCallAlignmentPaddingSize;
546
705
  static const int kIsolateFastCCallCallerPcOffset =
547
706
  kIsolateFastCCallCallerFpOffset + kApiSystemPointerSize;
548
707
  static const int kIsolateFastApiCallTargetOffset =
@@ -551,39 +710,64 @@ class Internals {
551
710
  kIsolateFastApiCallTargetOffset + kApiSystemPointerSize;
552
711
  static const int kIsolateThreadLocalTopOffset =
553
712
  kIsolateLongTaskStatsCounterOffset + kApiSizetSize;
554
- static const int kIsolateEmbedderDataOffset =
713
+ static const int kIsolateHandleScopeDataOffset =
555
714
  kIsolateThreadLocalTopOffset + kThreadLocalTopSize;
715
+ static const int kIsolateEmbedderDataOffset =
716
+ kIsolateHandleScopeDataOffset + kHandleScopeDataSize;
556
717
  #ifdef V8_COMPRESS_POINTERS
557
718
  static const int kIsolateExternalPointerTableOffset =
558
719
  kIsolateEmbedderDataOffset + kNumIsolateDataSlots * kApiSystemPointerSize;
559
720
  static const int kIsolateSharedExternalPointerTableAddressOffset =
560
721
  kIsolateExternalPointerTableOffset + kExternalPointerTableSize;
561
- static const int kIsolateRootsOffset =
722
+ #ifdef V8_ENABLE_SANDBOX
723
+ static const int kIsolateTrustedCageBaseOffset =
562
724
  kIsolateSharedExternalPointerTableAddressOffset + kApiSystemPointerSize;
725
+ static const int kIsolateTrustedPointerTableOffset =
726
+ kIsolateTrustedCageBaseOffset + kApiSystemPointerSize;
727
+ static const int kIsolateApiCallbackThunkArgumentOffset =
728
+ kIsolateTrustedPointerTableOffset + kTrustedPointerTableSize;
563
729
  #else
564
- static const int kIsolateRootsOffset =
730
+ static const int kIsolateApiCallbackThunkArgumentOffset =
731
+ kIsolateSharedExternalPointerTableAddressOffset + kApiSystemPointerSize;
732
+ #endif // V8_ENABLE_SANDBOX
733
+ #else
734
+ static const int kIsolateApiCallbackThunkArgumentOffset =
565
735
  kIsolateEmbedderDataOffset + kNumIsolateDataSlots * kApiSystemPointerSize;
566
- #endif
736
+ #endif // V8_COMPRESS_POINTERS
737
+ static const int kContinuationPreservedEmbedderDataOffset =
738
+ kIsolateApiCallbackThunkArgumentOffset + kApiSystemPointerSize;
739
+
740
+ static const int kWasm64OOBOffsetAlignmentPaddingSize = 0;
741
+ static const int kWasm64OOBOffsetOffset =
742
+ kContinuationPreservedEmbedderDataOffset + kApiSystemPointerSize +
743
+ kWasm64OOBOffsetAlignmentPaddingSize;
744
+ static const int kIsolateRootsOffset =
745
+ kWasm64OOBOffsetOffset + sizeof(int64_t);
567
746
 
568
747
  #if V8_STATIC_ROOTS_BOOL
569
748
 
570
- // These constants need to be initialized in api.cc.
749
+ // These constants are copied from static-roots.h and guarded by static asserts.
571
750
  #define EXPORTED_STATIC_ROOTS_PTR_LIST(V) \
572
- V(UndefinedValue) \
573
- V(NullValue) \
574
- V(TrueValue) \
575
- V(FalseValue) \
576
- V(EmptyString) \
577
- V(TheHoleValue)
751
+ V(UndefinedValue, 0x69) \
752
+ V(NullValue, 0x85) \
753
+ V(TrueValue, 0xc9) \
754
+ V(FalseValue, 0xad) \
755
+ V(EmptyString, 0xa1) \
756
+ V(TheHoleValue, 0x719)
578
757
 
579
758
  using Tagged_t = uint32_t;
580
759
  struct StaticReadOnlyRoot {
581
- #define DEF_ROOT(name) V8_EXPORT static const Tagged_t k##name;
760
+ #define DEF_ROOT(name, value) static constexpr Tagged_t k##name = value;
582
761
  EXPORTED_STATIC_ROOTS_PTR_LIST(DEF_ROOT)
583
762
  #undef DEF_ROOT
584
763
 
585
- V8_EXPORT static const Tagged_t kFirstStringMap;
586
- V8_EXPORT static const Tagged_t kLastStringMap;
764
+ static constexpr Tagged_t kFirstStringMap = 0xe5;
765
+ static constexpr Tagged_t kLastStringMap = 0x47d;
766
+
767
+ #define PLUSONE(...) +1
768
+ static constexpr size_t kNumberOfExportedStaticRoots =
769
+ 2 + EXPORTED_STATIC_ROOTS_PTR_LIST(PLUSONE);
770
+ #undef PLUSONE
587
771
  };
588
772
 
589
773
  #endif // V8_STATIC_ROOTS_BOOL
@@ -600,8 +784,6 @@ class Internals {
600
784
  static const int kNodeStateMask = 0x3;
601
785
  static const int kNodeStateIsWeakValue = 2;
602
786
 
603
- static const int kTracedNodeClassIdOffset = kApiSystemPointerSize;
604
-
605
787
  static const int kFirstNonstringType = 0x80;
606
788
  static const int kOddballType = 0x83;
607
789
  static const int kForeignType = 0xcc;
@@ -609,8 +791,13 @@ class Internals {
609
791
  static const int kJSObjectType = 0x421;
610
792
  static const int kFirstJSApiObjectType = 0x422;
611
793
  static const int kLastJSApiObjectType = 0x80A;
794
+ // Defines a range [kFirstEmbedderJSApiObjectType, kJSApiObjectTypesCount]
795
+ // of JSApiObject instance type values that an embedder can use.
796
+ static const int kFirstEmbedderJSApiObjectType = 0;
797
+ static const int kLastEmbedderJSApiObjectType =
798
+ kLastJSApiObjectType - kFirstJSApiObjectType;
612
799
 
613
- static const int kUndefinedOddballKind = 5;
800
+ static const int kUndefinedOddballKind = 4;
614
801
  static const int kNullOddballKind = 3;
615
802
 
616
803
  // Constants used by PropertyCallbackInfo to check if we should throw when an
@@ -641,11 +828,11 @@ class Internals {
641
828
  #endif
642
829
  }
643
830
 
644
- V8_INLINE static bool HasHeapObjectTag(Address value) {
831
+ V8_INLINE static constexpr bool HasHeapObjectTag(Address value) {
645
832
  return (value & kHeapObjectTagMask) == static_cast<Address>(kHeapObjectTag);
646
833
  }
647
834
 
648
- V8_INLINE static int SmiValue(Address value) {
835
+ V8_INLINE static constexpr int SmiValue(Address value) {
649
836
  return PlatformSmiTagging::SmiToInt(value);
650
837
  }
651
838
 
@@ -680,6 +867,15 @@ class Internals {
680
867
  return ReadRawField<uint16_t>(map, kMapInstanceTypeOffset);
681
868
  }
682
869
 
870
+ V8_INLINE static Address LoadMap(Address obj) {
871
+ if (!HasHeapObjectTag(obj)) return kNullAddress;
872
+ Address map = ReadTaggedPointerField(obj, kHeapObjectMapOffset);
873
+ #ifdef V8_MAP_PACKING
874
+ map = UnpackMapWord(map);
875
+ #endif
876
+ return map;
877
+ }
878
+
683
879
  V8_INLINE static int GetOddballKind(Address obj) {
684
880
  return SmiValue(ReadTaggedSignedField(obj, kOddballKindOffset));
685
881
  }
@@ -753,15 +949,15 @@ class Internals {
753
949
  Address base = *reinterpret_cast<Address*>(
754
950
  reinterpret_cast<uintptr_t>(isolate) + kIsolateCageBaseOffset);
755
951
  switch (index) {
756
- #define DECOMPRESS_ROOT(name) \
757
- case k##name##RootIndex: \
952
+ #define DECOMPRESS_ROOT(name, ...) \
953
+ case k##name##RootIndex: \
758
954
  return base + StaticReadOnlyRoot::k##name;
759
955
  EXPORTED_STATIC_ROOTS_PTR_LIST(DECOMPRESS_ROOT)
760
956
  #undef DECOMPRESS_ROOT
957
+ #undef EXPORTED_STATIC_ROOTS_PTR_LIST
761
958
  default:
762
959
  break;
763
960
  }
764
- #undef EXPORTED_STATIC_ROOTS_PTR_LIST
765
961
  #endif // V8_STATIC_ROOTS_BOOL
766
962
  return *GetRootSlot(isolate, index);
767
963
  }
@@ -770,7 +966,7 @@ class Internals {
770
966
  V8_INLINE static Address* GetExternalPointerTableBase(v8::Isolate* isolate) {
771
967
  Address addr = reinterpret_cast<Address>(isolate) +
772
968
  kIsolateExternalPointerTableOffset +
773
- kExternalPointerTableBufferOffset;
969
+ kExternalPointerTableBasePointerOffset;
774
970
  return *reinterpret_cast<Address**>(addr);
775
971
  }
776
972
 
@@ -779,7 +975,7 @@ class Internals {
779
975
  Address addr = reinterpret_cast<Address>(isolate) +
780
976
  kIsolateSharedExternalPointerTableAddressOffset;
781
977
  addr = *reinterpret_cast<Address*>(addr);
782
- addr += kExternalPointerTableBufferOffset;
978
+ addr += kExternalPointerTableBasePointerOffset;
783
979
  return *reinterpret_cast<Address**>(addr);
784
980
  }
785
981
  #endif
@@ -860,6 +1056,10 @@ class Internals {
860
1056
  return addr & -static_cast<intptr_t>(kPtrComprCageBaseAlignment);
861
1057
  }
862
1058
 
1059
+ V8_INLINE static uint32_t CompressTagged(Address value) {
1060
+ return static_cast<uint32_t>(value);
1061
+ }
1062
+
863
1063
  V8_INLINE static Address DecompressTaggedField(Address heap_object_ptr,
864
1064
  uint32_t value) {
865
1065
  Address base = GetPtrComprCageBaseFromOnHeapAddress(heap_object_ptr);
@@ -901,57 +1101,296 @@ class BackingStoreBase {};
901
1101
  // This is needed for histograms sampling garbage collection reasons.
902
1102
  constexpr int kGarbageCollectionReasonMaxValue = 27;
903
1103
 
1104
+ // Base class for the address block allocator compatible with standard
1105
+ // containers, which registers its allocated range as strong roots.
1106
+ class V8_EXPORT StrongRootAllocatorBase {
1107
+ public:
1108
+ Heap* heap() const { return heap_; }
1109
+
1110
+ bool operator==(const StrongRootAllocatorBase& other) const {
1111
+ return heap_ == other.heap_;
1112
+ }
1113
+ bool operator!=(const StrongRootAllocatorBase& other) const {
1114
+ return heap_ != other.heap_;
1115
+ }
1116
+
1117
+ protected:
1118
+ explicit StrongRootAllocatorBase(Heap* heap) : heap_(heap) {}
1119
+ explicit StrongRootAllocatorBase(v8::Isolate* isolate);
1120
+
1121
+ // Allocate/deallocate a range of n elements of type internal::Address.
1122
+ Address* allocate_impl(size_t n);
1123
+ void deallocate_impl(Address* p, size_t n) noexcept;
1124
+
1125
+ private:
1126
+ Heap* heap_;
1127
+ };
1128
+
1129
+ // The general version of this template behaves just as std::allocator, with
1130
+ // the exception that the constructor takes the isolate as parameter. Only
1131
+ // specialized versions, e.g., internal::StrongRootAllocator<internal::Address>
1132
+ // and internal::StrongRootAllocator<v8::Local<T>> register the allocated range
1133
+ // as strong roots.
1134
+ template <typename T>
1135
+ class StrongRootAllocator : public StrongRootAllocatorBase,
1136
+ private std::allocator<T> {
1137
+ public:
1138
+ using value_type = T;
1139
+
1140
+ explicit StrongRootAllocator(Heap* heap) : StrongRootAllocatorBase(heap) {}
1141
+ explicit StrongRootAllocator(v8::Isolate* isolate)
1142
+ : StrongRootAllocatorBase(isolate) {}
1143
+ template <typename U>
1144
+ StrongRootAllocator(const StrongRootAllocator<U>& other) noexcept
1145
+ : StrongRootAllocatorBase(other) {}
1146
+
1147
+ using std::allocator<T>::allocate;
1148
+ using std::allocator<T>::deallocate;
1149
+ };
1150
+
1151
+ // A class of iterators that wrap some different iterator type.
1152
+ // If specified, ElementType is the type of element accessed by the wrapper
1153
+ // iterator; in this case, the actual reference and pointer types of Iterator
1154
+ // must be convertible to ElementType& and ElementType*, respectively.
1155
+ template <typename Iterator, typename ElementType = void>
1156
+ class WrappedIterator {
1157
+ public:
1158
+ static_assert(
1159
+ !std::is_void_v<ElementType> ||
1160
+ (std::is_convertible_v<typename std::iterator_traits<Iterator>::pointer,
1161
+ ElementType*> &&
1162
+ std::is_convertible_v<typename std::iterator_traits<Iterator>::reference,
1163
+ ElementType&>));
1164
+
1165
+ using iterator_category =
1166
+ typename std::iterator_traits<Iterator>::iterator_category;
1167
+ using difference_type =
1168
+ typename std::iterator_traits<Iterator>::difference_type;
1169
+ using value_type =
1170
+ std::conditional_t<std::is_void_v<ElementType>,
1171
+ typename std::iterator_traits<Iterator>::value_type,
1172
+ ElementType>;
1173
+ using pointer =
1174
+ std::conditional_t<std::is_void_v<ElementType>,
1175
+ typename std::iterator_traits<Iterator>::pointer,
1176
+ ElementType*>;
1177
+ using reference =
1178
+ std::conditional_t<std::is_void_v<ElementType>,
1179
+ typename std::iterator_traits<Iterator>::reference,
1180
+ ElementType&>;
1181
+
1182
+ constexpr WrappedIterator() noexcept : it_() {}
1183
+ constexpr explicit WrappedIterator(Iterator it) noexcept : it_(it) {}
1184
+
1185
+ template <typename OtherIterator, typename OtherElementType,
1186
+ std::enable_if_t<std::is_convertible_v<OtherIterator, Iterator>,
1187
+ bool> = true>
1188
+ constexpr WrappedIterator(
1189
+ const WrappedIterator<OtherIterator, OtherElementType>& it) noexcept
1190
+ : it_(it.base()) {}
1191
+
1192
+ constexpr reference operator*() const noexcept { return *it_; }
1193
+ constexpr pointer operator->() const noexcept { return it_.operator->(); }
1194
+
1195
+ constexpr WrappedIterator& operator++() noexcept {
1196
+ ++it_;
1197
+ return *this;
1198
+ }
1199
+ constexpr WrappedIterator operator++(int) noexcept {
1200
+ WrappedIterator result(*this);
1201
+ ++(*this);
1202
+ return result;
1203
+ }
1204
+
1205
+ constexpr WrappedIterator& operator--() noexcept {
1206
+ --it_;
1207
+ return *this;
1208
+ }
1209
+ constexpr WrappedIterator operator--(int) noexcept {
1210
+ WrappedIterator result(*this);
1211
+ --(*this);
1212
+ return result;
1213
+ }
1214
+ constexpr WrappedIterator operator+(difference_type n) const noexcept {
1215
+ WrappedIterator result(*this);
1216
+ result += n;
1217
+ return result;
1218
+ }
1219
+ constexpr WrappedIterator& operator+=(difference_type n) noexcept {
1220
+ it_ += n;
1221
+ return *this;
1222
+ }
1223
+ constexpr WrappedIterator operator-(difference_type n) const noexcept {
1224
+ return *this + (-n);
1225
+ }
1226
+ constexpr WrappedIterator& operator-=(difference_type n) noexcept {
1227
+ *this += -n;
1228
+ return *this;
1229
+ }
1230
+ constexpr reference operator[](difference_type n) const noexcept {
1231
+ return it_[n];
1232
+ }
1233
+
1234
+ constexpr Iterator base() const noexcept { return it_; }
1235
+
1236
+ private:
1237
+ template <typename OtherIterator, typename OtherElementType>
1238
+ friend class WrappedIterator;
1239
+
1240
+ private:
1241
+ Iterator it_;
1242
+ };
1243
+
1244
+ template <typename Iterator, typename ElementType, typename OtherIterator,
1245
+ typename OtherElementType>
1246
+ constexpr bool operator==(
1247
+ const WrappedIterator<Iterator, ElementType>& x,
1248
+ const WrappedIterator<OtherIterator, OtherElementType>& y) noexcept {
1249
+ return x.base() == y.base();
1250
+ }
1251
+
1252
+ template <typename Iterator, typename ElementType, typename OtherIterator,
1253
+ typename OtherElementType>
1254
+ constexpr bool operator<(
1255
+ const WrappedIterator<Iterator, ElementType>& x,
1256
+ const WrappedIterator<OtherIterator, OtherElementType>& y) noexcept {
1257
+ return x.base() < y.base();
1258
+ }
1259
+
1260
+ template <typename Iterator, typename ElementType, typename OtherIterator,
1261
+ typename OtherElementType>
1262
+ constexpr bool operator!=(
1263
+ const WrappedIterator<Iterator, ElementType>& x,
1264
+ const WrappedIterator<OtherIterator, OtherElementType>& y) noexcept {
1265
+ return !(x == y);
1266
+ }
1267
+
1268
+ template <typename Iterator, typename ElementType, typename OtherIterator,
1269
+ typename OtherElementType>
1270
+ constexpr bool operator>(
1271
+ const WrappedIterator<Iterator, ElementType>& x,
1272
+ const WrappedIterator<OtherIterator, OtherElementType>& y) noexcept {
1273
+ return y < x;
1274
+ }
1275
+
1276
+ template <typename Iterator, typename ElementType, typename OtherIterator,
1277
+ typename OtherElementType>
1278
+ constexpr bool operator>=(
1279
+ const WrappedIterator<Iterator, ElementType>& x,
1280
+ const WrappedIterator<OtherIterator, OtherElementType>& y) noexcept {
1281
+ return !(x < y);
1282
+ }
1283
+
1284
+ template <typename Iterator, typename ElementType, typename OtherIterator,
1285
+ typename OtherElementType>
1286
+ constexpr bool operator<=(
1287
+ const WrappedIterator<Iterator, ElementType>& x,
1288
+ const WrappedIterator<OtherIterator, OtherElementType>& y) noexcept {
1289
+ return !(y < x);
1290
+ }
1291
+
1292
+ template <typename Iterator, typename ElementType, typename OtherIterator,
1293
+ typename OtherElementType>
1294
+ constexpr auto operator-(
1295
+ const WrappedIterator<Iterator, ElementType>& x,
1296
+ const WrappedIterator<OtherIterator, OtherElementType>& y) noexcept
1297
+ -> decltype(x.base() - y.base()) {
1298
+ return x.base() - y.base();
1299
+ }
1300
+
1301
+ template <typename Iterator, typename ElementType>
1302
+ constexpr WrappedIterator<Iterator> operator+(
1303
+ typename WrappedIterator<Iterator, ElementType>::difference_type n,
1304
+ const WrappedIterator<Iterator, ElementType>& x) noexcept {
1305
+ x += n;
1306
+ return x;
1307
+ }
1308
+
904
1309
  // Helper functions about values contained in handles.
1310
+ // A value is either an indirect pointer or a direct pointer, depending on
1311
+ // whether direct local support is enabled.
905
1312
  class ValueHelper final {
906
1313
  public:
907
- #ifdef V8_ENABLE_CONSERVATIVE_STACK_SCANNING
908
- static constexpr Address kLocalTaggedNullAddress = 1;
1314
+ #ifdef V8_ENABLE_DIRECT_LOCAL
1315
+ static constexpr Address kTaggedNullAddress = 1;
1316
+ static constexpr Address kEmpty = kTaggedNullAddress;
1317
+ #else
1318
+ static constexpr Address kEmpty = kNullAddress;
1319
+ #endif // V8_ENABLE_DIRECT_LOCAL
909
1320
 
910
1321
  template <typename T>
911
- static constexpr T* EmptyValue() {
912
- return reinterpret_cast<T*>(kLocalTaggedNullAddress);
1322
+ V8_INLINE static bool IsEmpty(T* value) {
1323
+ return reinterpret_cast<Address>(value) == kEmpty;
1324
+ }
1325
+
1326
+ // Returns a handle's "value" for all kinds of abstract handles. For Local,
1327
+ // it is equivalent to `*handle`. The variadic parameters support handle
1328
+ // types with extra type parameters, like `Persistent<T, M>`.
1329
+ template <template <typename T, typename... Ms> typename H, typename T,
1330
+ typename... Ms>
1331
+ V8_INLINE static T* HandleAsValue(const H<T, Ms...>& handle) {
1332
+ return handle.template value<T>();
913
1333
  }
914
1334
 
1335
+ #ifdef V8_ENABLE_DIRECT_LOCAL
1336
+
915
1337
  template <typename T>
916
1338
  V8_INLINE static Address ValueAsAddress(const T* value) {
917
1339
  return reinterpret_cast<Address>(value);
918
1340
  }
919
1341
 
920
- template <typename T, typename S>
1342
+ template <typename T, bool check_null = true, typename S>
921
1343
  V8_INLINE static T* SlotAsValue(S* slot) {
1344
+ if (check_null && slot == nullptr) {
1345
+ return reinterpret_cast<T*>(kTaggedNullAddress);
1346
+ }
922
1347
  return *reinterpret_cast<T**>(slot);
923
1348
  }
924
1349
 
925
- template <typename T>
926
- V8_INLINE static T* ValueAsSlot(T* const& value) {
927
- return reinterpret_cast<T*>(const_cast<T**>(&value));
928
- }
929
-
930
- #else // !V8_ENABLE_CONSERVATIVE_STACK_SCANNING
931
-
932
- template <typename T>
933
- static constexpr T* EmptyValue() {
934
- return nullptr;
935
- }
1350
+ #else // !V8_ENABLE_DIRECT_LOCAL
936
1351
 
937
1352
  template <typename T>
938
1353
  V8_INLINE static Address ValueAsAddress(const T* value) {
939
1354
  return *reinterpret_cast<const Address*>(value);
940
1355
  }
941
1356
 
942
- template <typename T, typename S>
1357
+ template <typename T, bool check_null = true, typename S>
943
1358
  V8_INLINE static T* SlotAsValue(S* slot) {
944
1359
  return reinterpret_cast<T*>(slot);
945
1360
  }
946
1361
 
947
- template <typename T>
948
- V8_INLINE static T* ValueAsSlot(T* const& value) {
949
- return value;
1362
+ #endif // V8_ENABLE_DIRECT_LOCAL
1363
+ };
1364
+
1365
+ /**
1366
+ * Helper functions about handles.
1367
+ */
1368
+ class HandleHelper final {
1369
+ public:
1370
+ /**
1371
+ * Checks whether two handles are equal.
1372
+ * They are equal iff they are both empty or they are both non-empty and the
1373
+ * objects to which they refer are physically equal.
1374
+ *
1375
+ * If both handles refer to JS objects, this is the same as strict equality.
1376
+ * For primitives, such as numbers or strings, a `false` return value does not
1377
+ * indicate that the values aren't equal in the JavaScript sense.
1378
+ * Use `Value::StrictEquals()` to check primitives for equality.
1379
+ */
1380
+ template <typename T1, typename T2>
1381
+ V8_INLINE static bool EqualHandles(const T1& lhs, const T2& rhs) {
1382
+ if (lhs.IsEmpty()) return rhs.IsEmpty();
1383
+ if (rhs.IsEmpty()) return false;
1384
+ return lhs.ptr() == rhs.ptr();
950
1385
  }
951
1386
 
952
- #endif // V8_ENABLE_CONSERVATIVE_STACK_SCANNING
1387
+ static V8_EXPORT bool IsOnStack(const void* ptr);
1388
+ static V8_EXPORT void VerifyOnStack(const void* ptr);
1389
+ static V8_EXPORT void VerifyOnMainThread();
953
1390
  };
954
1391
 
1392
+ V8_EXPORT void VerifyHandleIsNonEmpty(bool is_empty);
1393
+
955
1394
  } // namespace internal
956
1395
  } // namespace v8
957
1396