libv8-node 19.9.0.0-x86_64-linux → 20.2.0.0-x86_64-linux

Sign up to get free protection for your applications and to get access to all the features.
Files changed (48) hide show
  1. checksums.yaml +4 -4
  2. data/lib/libv8/node/version.rb +3 -3
  3. data/vendor/v8/include/cppgc/cross-thread-persistent.h +4 -2
  4. data/vendor/v8/include/cppgc/heap-consistency.h +2 -2
  5. data/vendor/v8/include/cppgc/heap-handle.h +5 -0
  6. data/vendor/v8/include/cppgc/internal/api-constants.h +4 -1
  7. data/vendor/v8/include/cppgc/internal/gc-info.h +35 -33
  8. data/vendor/v8/include/cppgc/internal/member-storage.h +19 -7
  9. data/vendor/v8/include/cppgc/internal/pointer-policies.h +38 -2
  10. data/vendor/v8/include/cppgc/internal/write-barrier.h +15 -5
  11. data/vendor/v8/include/cppgc/macros.h +10 -1
  12. data/vendor/v8/include/cppgc/member.h +167 -129
  13. data/vendor/v8/include/cppgc/persistent.h +22 -15
  14. data/vendor/v8/include/cppgc/platform.h +6 -4
  15. data/vendor/v8/include/cppgc/type-traits.h +4 -3
  16. data/vendor/v8/include/cppgc/visitor.h +16 -1
  17. data/vendor/v8/include/libplatform/v8-tracing.h +2 -2
  18. data/vendor/v8/include/v8-array-buffer.h +59 -0
  19. data/vendor/v8/include/v8-callbacks.h +14 -1
  20. data/vendor/v8/include/v8-context.h +50 -3
  21. data/vendor/v8/include/v8-cppgc.h +10 -0
  22. data/vendor/v8/include/v8-data.h +1 -1
  23. data/vendor/v8/include/v8-embedder-heap.h +0 -169
  24. data/vendor/v8/include/v8-fast-api-calls.h +7 -3
  25. data/vendor/v8/include/v8-function-callback.h +69 -42
  26. data/vendor/v8/include/v8-function.h +1 -0
  27. data/vendor/v8/include/v8-inspector.h +20 -5
  28. data/vendor/v8/include/v8-internal.h +242 -150
  29. data/vendor/v8/include/v8-isolate.h +30 -40
  30. data/vendor/v8/include/v8-local-handle.h +81 -48
  31. data/vendor/v8/include/v8-metrics.h +28 -2
  32. data/vendor/v8/include/v8-microtask-queue.h +5 -0
  33. data/vendor/v8/include/v8-object.h +21 -3
  34. data/vendor/v8/include/v8-persistent-handle.h +25 -16
  35. data/vendor/v8/include/v8-platform.h +79 -10
  36. data/vendor/v8/include/v8-primitive.h +19 -12
  37. data/vendor/v8/include/v8-profiler.h +49 -31
  38. data/vendor/v8/include/v8-script.h +29 -1
  39. data/vendor/v8/include/v8-snapshot.h +4 -8
  40. data/vendor/v8/include/v8-template.h +3 -1
  41. data/vendor/v8/include/v8-traced-handle.h +22 -28
  42. data/vendor/v8/include/v8-util.h +9 -3
  43. data/vendor/v8/include/v8-value.h +31 -4
  44. data/vendor/v8/include/v8-version.h +4 -4
  45. data/vendor/v8/include/v8-wasm.h +2 -1
  46. data/vendor/v8/include/v8config.h +73 -2
  47. data/vendor/v8/x86_64-linux/libv8/obj/libv8_monolith.a +0 -0
  48. metadata +1 -1
@@ -21,15 +21,13 @@ class Array;
21
21
  class Context;
22
22
  class Data;
23
23
  class Isolate;
24
- template <typename T>
25
- class Local;
26
24
 
27
25
  namespace internal {
28
26
 
29
27
  class Isolate;
30
28
 
31
29
  typedef uintptr_t Address;
32
- static const Address kNullAddress = 0;
30
+ static constexpr Address kNullAddress = 0;
33
31
 
34
32
  constexpr int KB = 1024;
35
33
  constexpr int MB = KB * 1024;
@@ -82,7 +80,7 @@ struct SmiTagging<4> {
82
80
  static_cast<intptr_t>(kUintptrAllBitsSet << (kSmiValueSize - 1));
83
81
  static constexpr intptr_t kSmiMaxValue = -(kSmiMinValue + 1);
84
82
 
85
- V8_INLINE static int SmiToInt(const internal::Address value) {
83
+ V8_INLINE static int SmiToInt(Address value) {
86
84
  int shift_bits = kSmiTagSize + kSmiShiftSize;
87
85
  // Truncate and shift down (requires >> to be sign extending).
88
86
  return static_cast<int32_t>(static_cast<uint32_t>(value)) >> shift_bits;
@@ -107,7 +105,7 @@ struct SmiTagging<8> {
107
105
  static_cast<intptr_t>(kUintptrAllBitsSet << (kSmiValueSize - 1));
108
106
  static constexpr intptr_t kSmiMaxValue = -(kSmiMinValue + 1);
109
107
 
110
- V8_INLINE static int SmiToInt(const internal::Address value) {
108
+ V8_INLINE static int SmiToInt(Address value) {
111
109
  int shift_bits = kSmiTagSize + kSmiShiftSize;
112
110
  // Shift down and throw away top 32 bits.
113
111
  return static_cast<int>(static_cast<intptr_t>(value) >> shift_bits);
@@ -150,8 +148,9 @@ const int kSmiMinValue = static_cast<int>(PlatformSmiTagging::kSmiMinValue);
150
148
  const int kSmiMaxValue = static_cast<int>(PlatformSmiTagging::kSmiMaxValue);
151
149
  constexpr bool SmiValuesAre31Bits() { return kSmiValueSize == 31; }
152
150
  constexpr bool SmiValuesAre32Bits() { return kSmiValueSize == 32; }
151
+ constexpr bool Is64() { return kApiSystemPointerSize == sizeof(int64_t); }
153
152
 
154
- V8_INLINE static constexpr internal::Address IntToSmi(int value) {
153
+ V8_INLINE static constexpr Address IntToSmi(int value) {
155
154
  return (static_cast<Address>(value) << (kSmiTagSize + kSmiShiftSize)) |
156
155
  kSmiTag;
157
156
  }
@@ -242,6 +241,7 @@ static_assert(1ULL << (64 - kBoundedSizeShift) ==
242
241
 
243
242
  #ifdef V8_COMPRESS_POINTERS
244
243
 
244
+ #ifdef V8_TARGET_OS_ANDROID
245
245
  // The size of the virtual memory reservation for an external pointer table.
246
246
  // This determines the maximum number of entries in a table. Using a maximum
247
247
  // size allows omitting bounds checks on table accesses if the indices are
@@ -249,14 +249,18 @@ static_assert(1ULL << (64 - kBoundedSizeShift) ==
249
249
  // value must be a power of two.
250
250
  static const size_t kExternalPointerTableReservationSize = 512 * MB;
251
251
 
252
- // The maximum number of entries in an external pointer table.
253
- static const size_t kMaxExternalPointers =
254
- kExternalPointerTableReservationSize / kApiSystemPointerSize;
255
-
256
252
  // The external pointer table indices stored in HeapObjects as external
257
253
  // pointers are shifted to the left by this amount to guarantee that they are
258
254
  // smaller than the maximum table size.
259
255
  static const uint32_t kExternalPointerIndexShift = 6;
256
+ #else
257
+ static const size_t kExternalPointerTableReservationSize = 1024 * MB;
258
+ static const uint32_t kExternalPointerIndexShift = 5;
259
+ #endif // V8_TARGET_OS_ANDROID
260
+
261
+ // The maximum number of entries in an external pointer table.
262
+ static const size_t kMaxExternalPointers =
263
+ kExternalPointerTableReservationSize / kApiSystemPointerSize;
260
264
  static_assert((1 << (32 - kExternalPointerIndexShift)) == kMaxExternalPointers,
261
265
  "kExternalPointerTableReservationSize and "
262
266
  "kExternalPointerIndexShift don't match");
@@ -345,6 +349,14 @@ using ExternalPointer_t = Address;
345
349
  // that the Embedder is not using this byte (really only this one bit) for any
346
350
  // other purpose. This bit also does not collide with the memory tagging
347
351
  // extension (MTE) which would use bits [56, 60).
352
+ //
353
+ // External pointer tables are also available even when the sandbox is off but
354
+ // pointer compression is on. In that case, the mechanism can be used to easy
355
+ // alignment requirements as it turns unaligned 64-bit raw pointers into
356
+ // aligned 32-bit indices. To "opt-in" to the external pointer table mechanism
357
+ // for this purpose, instead of using the ExternalPointer accessors one needs to
358
+ // use ExternalPointerHandles directly and use them to access the pointers in an
359
+ // ExternalPointerTable.
348
360
  constexpr uint64_t kExternalPointerMarkBit = 1ULL << 62;
349
361
  constexpr uint64_t kExternalPointerTagMask = 0x40ff000000000000;
350
362
  constexpr uint64_t kExternalPointerTagShift = 48;
@@ -367,71 +379,58 @@ constexpr uint64_t kAllExternalPointerTypeTags[] = {
367
379
  0b11001100, 0b11010001, 0b11010010, 0b11010100, 0b11011000, 0b11100001,
368
380
  0b11100010, 0b11100100, 0b11101000, 0b11110000};
369
381
 
382
+ #define TAG(i) \
383
+ ((kAllExternalPointerTypeTags[i] << kExternalPointerTagShift) | \
384
+ kExternalPointerMarkBit)
385
+
370
386
  // clang-format off
371
- // New entries should be added with state "sandboxed".
387
+
372
388
  // When adding new tags, please ensure that the code using these tags is
373
389
  // "substitution-safe", i.e. still operate safely if external pointers of the
374
390
  // same type are swapped by an attacker. See comment above for more details.
375
- #define TAG(i) (kAllExternalPointerTypeTags[i])
376
391
 
377
392
  // Shared external pointers are owned by the shared Isolate and stored in the
378
393
  // shared external pointer table associated with that Isolate, where they can
379
394
  // be accessed from multiple threads at the same time. The objects referenced
380
395
  // in this way must therefore always be thread-safe.
381
- #define SHARED_EXTERNAL_POINTER_TAGS(V) \
382
- V(kFirstSharedTag, sandboxed, TAG(0)) \
383
- V(kWaiterQueueNodeTag, sandboxed, TAG(0)) \
384
- V(kExternalStringResourceTag, sandboxed, TAG(1)) \
385
- V(kExternalStringResourceDataTag, sandboxed, TAG(2)) \
386
- V(kLastSharedTag, sandboxed, TAG(2))
396
+ #define SHARED_EXTERNAL_POINTER_TAGS(V) \
397
+ V(kFirstSharedTag, TAG(0)) \
398
+ V(kWaiterQueueNodeTag, TAG(0)) \
399
+ V(kExternalStringResourceTag, TAG(1)) \
400
+ V(kExternalStringResourceDataTag, TAG(2)) \
401
+ V(kLastSharedTag, TAG(2))
387
402
 
388
403
  // External pointers using these tags are kept in a per-Isolate external
389
404
  // pointer table and can only be accessed when this Isolate is active.
390
- #define PER_ISOLATE_EXTERNAL_POINTER_TAGS(V) \
391
- V(kForeignForeignAddressTag, sandboxed, TAG(10)) \
392
- V(kNativeContextMicrotaskQueueTag, sandboxed, TAG(11)) \
393
- V(kEmbedderDataSlotPayloadTag, sandboxed, TAG(12)) \
394
- V(kExternalObjectValueTag, sandboxed, TAG(13)) \
395
- V(kCallHandlerInfoCallbackTag, sandboxed, TAG(14)) \
396
- V(kAccessorInfoGetterTag, sandboxed, TAG(15)) \
397
- V(kAccessorInfoSetterTag, sandboxed, TAG(16)) \
398
- V(kWasmInternalFunctionCallTargetTag, sandboxed, TAG(17)) \
399
- V(kWasmTypeInfoNativeTypeTag, sandboxed, TAG(18)) \
400
- V(kWasmExportedFunctionDataSignatureTag, sandboxed, TAG(19)) \
401
- V(kWasmContinuationJmpbufTag, sandboxed, TAG(20)) \
402
- V(kArrayBufferExtensionTag, sandboxed, TAG(21))
405
+ #define PER_ISOLATE_EXTERNAL_POINTER_TAGS(V) \
406
+ V(kForeignForeignAddressTag, TAG(10)) \
407
+ V(kNativeContextMicrotaskQueueTag, TAG(11)) \
408
+ V(kEmbedderDataSlotPayloadTag, TAG(12)) \
409
+ /* This tag essentially stands for a `void*` pointer in the V8 API, and */ \
410
+ /* it is the Embedder's responsibility to ensure type safety (against */ \
411
+ /* substitution) and lifetime validity of these objects. */ \
412
+ V(kExternalObjectValueTag, TAG(13)) \
413
+ V(kCallHandlerInfoCallbackTag, TAG(14)) \
414
+ V(kAccessorInfoGetterTag, TAG(15)) \
415
+ V(kAccessorInfoSetterTag, TAG(16)) \
416
+ V(kWasmInternalFunctionCallTargetTag, TAG(17)) \
417
+ V(kWasmTypeInfoNativeTypeTag, TAG(18)) \
418
+ V(kWasmExportedFunctionDataSignatureTag, TAG(19)) \
419
+ V(kWasmContinuationJmpbufTag, TAG(20)) \
420
+ V(kArrayBufferExtensionTag, TAG(21))
403
421
 
404
422
  // All external pointer tags.
405
423
  #define ALL_EXTERNAL_POINTER_TAGS(V) \
406
424
  SHARED_EXTERNAL_POINTER_TAGS(V) \
407
425
  PER_ISOLATE_EXTERNAL_POINTER_TAGS(V)
408
426
 
409
- // When the sandbox is enabled, external pointers marked as "sandboxed" above
410
- // use the external pointer table (i.e. are sandboxed). This allows a gradual
411
- // rollout of external pointer sandboxing. If the sandbox is off, no external
412
- // pointers are sandboxed.
413
- //
414
- // Sandboxed external pointer tags are available when compressing pointers even
415
- // when the sandbox is off. Some tags (e.g. kWaiterQueueNodeTag) are used
416
- // manually with the external pointer table even when the sandbox is off to ease
417
- // alignment requirements.
418
- #define sandboxed(X) (X << kExternalPointerTagShift) | kExternalPointerMarkBit
419
- #define unsandboxed(X) kUnsandboxedExternalPointerTag
420
- #if defined(V8_COMPRESS_POINTERS)
421
- #define EXTERNAL_POINTER_TAG_ENUM(Name, State, Bits) Name = State(Bits),
422
- #else
423
- #define EXTERNAL_POINTER_TAG_ENUM(Name, State, Bits) Name = unsandboxed(Bits),
424
- #endif
425
-
427
+ #define EXTERNAL_POINTER_TAG_ENUM(Name, Tag) Name = Tag,
426
428
  #define MAKE_TAG(HasMarkBit, TypeTag) \
427
429
  ((static_cast<uint64_t>(TypeTag) << kExternalPointerTagShift) | \
428
430
  (HasMarkBit ? kExternalPointerMarkBit : 0))
429
431
  enum ExternalPointerTag : uint64_t {
430
432
  // Empty tag value. Mostly used as placeholder.
431
433
  kExternalPointerNullTag = MAKE_TAG(0, 0b00000000),
432
- // Tag to use for unsandboxed external pointers, which are still stored as
433
- // raw pointers on the heap.
434
- kUnsandboxedExternalPointerTag = MAKE_TAG(0, 0b00000000),
435
434
  // External pointer tag that will match any external pointer. Use with care!
436
435
  kAnyExternalPointerTag = MAKE_TAG(1, 0b11111111),
437
436
  // The free entry tag has all type bits set so every type check with a
@@ -445,20 +444,11 @@ enum ExternalPointerTag : uint64_t {
445
444
  };
446
445
 
447
446
  #undef MAKE_TAG
448
- #undef unsandboxed
449
- #undef sandboxed
450
447
  #undef TAG
451
448
  #undef EXTERNAL_POINTER_TAG_ENUM
452
449
 
453
450
  // clang-format on
454
451
 
455
- // True if the external pointer is sandboxed and so must be referenced through
456
- // an external pointer table.
457
- V8_INLINE static constexpr bool IsSandboxedExternalPointerType(
458
- ExternalPointerTag tag) {
459
- return tag != kUnsandboxedExternalPointerTag;
460
- }
461
-
462
452
  // True if the external pointer must be accessed from the shared isolate's
463
453
  // external pointer table.
464
454
  V8_INLINE static constexpr bool IsSharedExternalPointerType(
@@ -467,12 +457,10 @@ V8_INLINE static constexpr bool IsSharedExternalPointerType(
467
457
  }
468
458
 
469
459
  // Sanity checks.
470
- #define CHECK_SHARED_EXTERNAL_POINTER_TAGS(Tag, ...) \
471
- static_assert(!IsSandboxedExternalPointerType(Tag) || \
472
- IsSharedExternalPointerType(Tag));
460
+ #define CHECK_SHARED_EXTERNAL_POINTER_TAGS(Tag, ...) \
461
+ static_assert(IsSharedExternalPointerType(Tag));
473
462
  #define CHECK_NON_SHARED_EXTERNAL_POINTER_TAGS(Tag, ...) \
474
- static_assert(!IsSandboxedExternalPointerType(Tag) || \
475
- !IsSharedExternalPointerType(Tag));
463
+ static_assert(!IsSharedExternalPointerType(Tag));
476
464
 
477
465
  SHARED_EXTERNAL_POINTER_TAGS(CHECK_SHARED_EXTERNAL_POINTER_TAGS)
478
466
  PER_ISOLATE_EXTERNAL_POINTER_TAGS(CHECK_NON_SHARED_EXTERNAL_POINTER_TAGS)
@@ -490,7 +478,7 @@ V8_EXPORT internal::Isolate* IsolateFromNeverReadOnlySpaceObject(Address obj);
490
478
  // Returns if we need to throw when an error occurs. This infers the language
491
479
  // mode based on the current context and the closure. This returns true if the
492
480
  // language mode is strict.
493
- V8_EXPORT bool ShouldThrowOnError(v8::internal::Isolate* isolate);
481
+ V8_EXPORT bool ShouldThrowOnError(internal::Isolate* isolate);
494
482
  /**
495
483
  * This class exports constants and functionality from within v8 that
496
484
  * is necessary to implement inline functions in the v8 api. Don't
@@ -498,8 +486,7 @@ V8_EXPORT bool ShouldThrowOnError(v8::internal::Isolate* isolate);
498
486
  */
499
487
  class Internals {
500
488
  #ifdef V8_MAP_PACKING
501
- V8_INLINE static constexpr internal::Address UnpackMapWord(
502
- internal::Address mapword) {
489
+ V8_INLINE static constexpr Address UnpackMapWord(Address mapword) {
503
490
  // TODO(wenyuzhao): Clear header metadata.
504
491
  return mapword ^ kMapWordXorMask;
505
492
  }
@@ -533,6 +520,8 @@ class Internals {
533
520
  static const int kStackGuardSize = 7 * kApiSystemPointerSize;
534
521
  static const int kBuiltinTier0EntryTableSize = 7 * kApiSystemPointerSize;
535
522
  static const int kBuiltinTier0TableSize = 7 * kApiSystemPointerSize;
523
+ static const int kLinearAllocationAreaSize = 3 * kApiSystemPointerSize;
524
+ static const int kThreadLocalTopSize = 25 * kApiSystemPointerSize;
536
525
 
537
526
  // ExternalPointerTable layout guarantees.
538
527
  static const int kExternalPointerTableBufferOffset = 0;
@@ -545,31 +534,60 @@ class Internals {
545
534
  static const int kVariousBooleanFlagsOffset =
546
535
  kIsolateStackGuardOffset + kStackGuardSize;
547
536
  static const int kBuiltinTier0EntryTableOffset =
548
- kVariousBooleanFlagsOffset + kApiSystemPointerSize;
537
+ kVariousBooleanFlagsOffset + 8;
549
538
  static const int kBuiltinTier0TableOffset =
550
539
  kBuiltinTier0EntryTableOffset + kBuiltinTier0EntryTableSize;
551
- static const int kIsolateEmbedderDataOffset =
540
+ static const int kNewAllocationInfoOffset =
552
541
  kBuiltinTier0TableOffset + kBuiltinTier0TableSize;
542
+ static const int kOldAllocationInfoOffset =
543
+ kNewAllocationInfoOffset + kLinearAllocationAreaSize;
553
544
  static const int kIsolateFastCCallCallerFpOffset =
554
- kIsolateEmbedderDataOffset + kNumIsolateDataSlots * kApiSystemPointerSize;
545
+ kOldAllocationInfoOffset + kLinearAllocationAreaSize;
555
546
  static const int kIsolateFastCCallCallerPcOffset =
556
547
  kIsolateFastCCallCallerFpOffset + kApiSystemPointerSize;
557
548
  static const int kIsolateFastApiCallTargetOffset =
558
549
  kIsolateFastCCallCallerPcOffset + kApiSystemPointerSize;
559
550
  static const int kIsolateLongTaskStatsCounterOffset =
560
551
  kIsolateFastApiCallTargetOffset + kApiSystemPointerSize;
552
+ static const int kIsolateThreadLocalTopOffset =
553
+ kIsolateLongTaskStatsCounterOffset + kApiSizetSize;
554
+ static const int kIsolateEmbedderDataOffset =
555
+ kIsolateThreadLocalTopOffset + kThreadLocalTopSize;
561
556
  #ifdef V8_COMPRESS_POINTERS
562
557
  static const int kIsolateExternalPointerTableOffset =
563
- kIsolateLongTaskStatsCounterOffset + kApiSizetSize;
558
+ kIsolateEmbedderDataOffset + kNumIsolateDataSlots * kApiSystemPointerSize;
564
559
  static const int kIsolateSharedExternalPointerTableAddressOffset =
565
560
  kIsolateExternalPointerTableOffset + kExternalPointerTableSize;
566
561
  static const int kIsolateRootsOffset =
567
562
  kIsolateSharedExternalPointerTableAddressOffset + kApiSystemPointerSize;
568
563
  #else
569
564
  static const int kIsolateRootsOffset =
570
- kIsolateLongTaskStatsCounterOffset + kApiSizetSize;
565
+ kIsolateEmbedderDataOffset + kNumIsolateDataSlots * kApiSystemPointerSize;
571
566
  #endif
572
567
 
568
+ #if V8_STATIC_ROOTS_BOOL
569
+
570
+ // These constants need to be initialized in api.cc.
571
+ #define EXPORTED_STATIC_ROOTS_PTR_LIST(V) \
572
+ V(UndefinedValue) \
573
+ V(NullValue) \
574
+ V(TrueValue) \
575
+ V(FalseValue) \
576
+ V(EmptyString) \
577
+ V(TheHoleValue)
578
+
579
+ using Tagged_t = uint32_t;
580
+ struct StaticReadOnlyRoot {
581
+ #define DEF_ROOT(name) V8_EXPORT static const Tagged_t k##name;
582
+ EXPORTED_STATIC_ROOTS_PTR_LIST(DEF_ROOT)
583
+ #undef DEF_ROOT
584
+
585
+ V8_EXPORT static const Tagged_t kFirstStringMap;
586
+ V8_EXPORT static const Tagged_t kLastStringMap;
587
+ };
588
+
589
+ #endif // V8_STATIC_ROOTS_BOOL
590
+
573
591
  static const int kUndefinedValueRootIndex = 4;
574
592
  static const int kTheHoleValueRootIndex = 5;
575
593
  static const int kNullValueRootIndex = 6;
@@ -582,6 +600,8 @@ class Internals {
582
600
  static const int kNodeStateMask = 0x3;
583
601
  static const int kNodeStateIsWeakValue = 2;
584
602
 
603
+ static const int kTracedNodeClassIdOffset = kApiSystemPointerSize;
604
+
585
605
  static const int kFirstNonstringType = 0x80;
586
606
  static const int kOddballType = 0x83;
587
607
  static const int kForeignType = 0xcc;
@@ -621,15 +641,15 @@ class Internals {
621
641
  #endif
622
642
  }
623
643
 
624
- V8_INLINE static bool HasHeapObjectTag(const internal::Address value) {
644
+ V8_INLINE static bool HasHeapObjectTag(Address value) {
625
645
  return (value & kHeapObjectTagMask) == static_cast<Address>(kHeapObjectTag);
626
646
  }
627
647
 
628
- V8_INLINE static int SmiValue(const internal::Address value) {
648
+ V8_INLINE static int SmiValue(Address value) {
629
649
  return PlatformSmiTagging::SmiToInt(value);
630
650
  }
631
651
 
632
- V8_INLINE static constexpr internal::Address IntToSmi(int value) {
652
+ V8_INLINE static constexpr Address IntToSmi(int value) {
633
653
  return internal::IntToSmi(value);
634
654
  }
635
655
 
@@ -637,16 +657,30 @@ class Internals {
637
657
  return PlatformSmiTagging::IsValidSmi(value);
638
658
  }
639
659
 
640
- V8_INLINE static int GetInstanceType(const internal::Address obj) {
641
- typedef internal::Address A;
642
- A map = ReadTaggedPointerField(obj, kHeapObjectMapOffset);
660
+ #if V8_STATIC_ROOTS_BOOL
661
+ V8_INLINE static bool is_identical(Address obj, Tagged_t constant) {
662
+ return static_cast<Tagged_t>(obj) == constant;
663
+ }
664
+
665
+ V8_INLINE static bool CheckInstanceMapRange(Address obj, Tagged_t first_map,
666
+ Tagged_t last_map) {
667
+ auto map = ReadRawField<Tagged_t>(obj, kHeapObjectMapOffset);
668
+ #ifdef V8_MAP_PACKING
669
+ map = UnpackMapWord(map);
670
+ #endif
671
+ return map >= first_map && map <= last_map;
672
+ }
673
+ #endif
674
+
675
+ V8_INLINE static int GetInstanceType(Address obj) {
676
+ Address map = ReadTaggedPointerField(obj, kHeapObjectMapOffset);
643
677
  #ifdef V8_MAP_PACKING
644
678
  map = UnpackMapWord(map);
645
679
  #endif
646
680
  return ReadRawField<uint16_t>(map, kMapInstanceTypeOffset);
647
681
  }
648
682
 
649
- V8_INLINE static int GetOddballKind(const internal::Address obj) {
683
+ V8_INLINE static int GetOddballKind(Address obj) {
650
684
  return SmiValue(ReadTaggedSignedField(obj, kOddballKindOffset));
651
685
  }
652
686
 
@@ -667,80 +701,92 @@ class Internals {
667
701
  static_cast<unsigned>(kLastJSApiObjectType - kJSObjectType));
668
702
  }
669
703
 
670
- V8_INLINE static uint8_t GetNodeFlag(internal::Address* obj, int shift) {
704
+ V8_INLINE static uint8_t GetNodeFlag(Address* obj, int shift) {
671
705
  uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
672
706
  return *addr & static_cast<uint8_t>(1U << shift);
673
707
  }
674
708
 
675
- V8_INLINE static void UpdateNodeFlag(internal::Address* obj, bool value,
676
- int shift) {
709
+ V8_INLINE static void UpdateNodeFlag(Address* obj, bool value, int shift) {
677
710
  uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
678
711
  uint8_t mask = static_cast<uint8_t>(1U << shift);
679
712
  *addr = static_cast<uint8_t>((*addr & ~mask) | (value << shift));
680
713
  }
681
714
 
682
- V8_INLINE static uint8_t GetNodeState(internal::Address* obj) {
715
+ V8_INLINE static uint8_t GetNodeState(Address* obj) {
683
716
  uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
684
717
  return *addr & kNodeStateMask;
685
718
  }
686
719
 
687
- V8_INLINE static void UpdateNodeState(internal::Address* obj, uint8_t value) {
720
+ V8_INLINE static void UpdateNodeState(Address* obj, uint8_t value) {
688
721
  uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
689
722
  *addr = static_cast<uint8_t>((*addr & ~kNodeStateMask) | value);
690
723
  }
691
724
 
692
725
  V8_INLINE static void SetEmbedderData(v8::Isolate* isolate, uint32_t slot,
693
726
  void* data) {
694
- internal::Address addr = reinterpret_cast<internal::Address>(isolate) +
695
- kIsolateEmbedderDataOffset +
696
- slot * kApiSystemPointerSize;
727
+ Address addr = reinterpret_cast<Address>(isolate) +
728
+ kIsolateEmbedderDataOffset + slot * kApiSystemPointerSize;
697
729
  *reinterpret_cast<void**>(addr) = data;
698
730
  }
699
731
 
700
732
  V8_INLINE static void* GetEmbedderData(const v8::Isolate* isolate,
701
733
  uint32_t slot) {
702
- internal::Address addr = reinterpret_cast<internal::Address>(isolate) +
703
- kIsolateEmbedderDataOffset +
704
- slot * kApiSystemPointerSize;
734
+ Address addr = reinterpret_cast<Address>(isolate) +
735
+ kIsolateEmbedderDataOffset + slot * kApiSystemPointerSize;
705
736
  return *reinterpret_cast<void* const*>(addr);
706
737
  }
707
738
 
708
739
  V8_INLINE static void IncrementLongTasksStatsCounter(v8::Isolate* isolate) {
709
- internal::Address addr = reinterpret_cast<internal::Address>(isolate) +
710
- kIsolateLongTaskStatsCounterOffset;
740
+ Address addr =
741
+ reinterpret_cast<Address>(isolate) + kIsolateLongTaskStatsCounterOffset;
711
742
  ++(*reinterpret_cast<size_t*>(addr));
712
743
  }
713
744
 
714
- V8_INLINE static internal::Address* GetRoot(v8::Isolate* isolate, int index) {
715
- internal::Address addr = reinterpret_cast<internal::Address>(isolate) +
716
- kIsolateRootsOffset +
717
- index * kApiSystemPointerSize;
718
- return reinterpret_cast<internal::Address*>(addr);
745
+ V8_INLINE static Address* GetRootSlot(v8::Isolate* isolate, int index) {
746
+ Address addr = reinterpret_cast<Address>(isolate) + kIsolateRootsOffset +
747
+ index * kApiSystemPointerSize;
748
+ return reinterpret_cast<Address*>(addr);
749
+ }
750
+
751
+ V8_INLINE static Address GetRoot(v8::Isolate* isolate, int index) {
752
+ #if V8_STATIC_ROOTS_BOOL
753
+ Address base = *reinterpret_cast<Address*>(
754
+ reinterpret_cast<uintptr_t>(isolate) + kIsolateCageBaseOffset);
755
+ switch (index) {
756
+ #define DECOMPRESS_ROOT(name) \
757
+ case k##name##RootIndex: \
758
+ return base + StaticReadOnlyRoot::k##name;
759
+ EXPORTED_STATIC_ROOTS_PTR_LIST(DECOMPRESS_ROOT)
760
+ #undef DECOMPRESS_ROOT
761
+ default:
762
+ break;
763
+ }
764
+ #undef EXPORTED_STATIC_ROOTS_PTR_LIST
765
+ #endif // V8_STATIC_ROOTS_BOOL
766
+ return *GetRootSlot(isolate, index);
719
767
  }
720
768
 
721
769
  #ifdef V8_ENABLE_SANDBOX
722
- V8_INLINE static internal::Address* GetExternalPointerTableBase(
723
- v8::Isolate* isolate) {
724
- internal::Address addr = reinterpret_cast<internal::Address>(isolate) +
725
- kIsolateExternalPointerTableOffset +
726
- kExternalPointerTableBufferOffset;
727
- return *reinterpret_cast<internal::Address**>(addr);
770
+ V8_INLINE static Address* GetExternalPointerTableBase(v8::Isolate* isolate) {
771
+ Address addr = reinterpret_cast<Address>(isolate) +
772
+ kIsolateExternalPointerTableOffset +
773
+ kExternalPointerTableBufferOffset;
774
+ return *reinterpret_cast<Address**>(addr);
728
775
  }
729
776
 
730
- V8_INLINE static internal::Address* GetSharedExternalPointerTableBase(
777
+ V8_INLINE static Address* GetSharedExternalPointerTableBase(
731
778
  v8::Isolate* isolate) {
732
- internal::Address addr = reinterpret_cast<internal::Address>(isolate) +
733
- kIsolateSharedExternalPointerTableAddressOffset;
734
- addr = *reinterpret_cast<internal::Address*>(addr);
779
+ Address addr = reinterpret_cast<Address>(isolate) +
780
+ kIsolateSharedExternalPointerTableAddressOffset;
781
+ addr = *reinterpret_cast<Address*>(addr);
735
782
  addr += kExternalPointerTableBufferOffset;
736
- return *reinterpret_cast<internal::Address**>(addr);
783
+ return *reinterpret_cast<Address**>(addr);
737
784
  }
738
785
  #endif
739
786
 
740
787
  template <typename T>
741
- V8_INLINE static T ReadRawField(internal::Address heap_object_ptr,
742
- int offset) {
743
- internal::Address addr = heap_object_ptr + offset - kHeapObjectTag;
788
+ V8_INLINE static T ReadRawField(Address heap_object_ptr, int offset) {
789
+ Address addr = heap_object_ptr + offset - kHeapObjectTag;
744
790
  #ifdef V8_COMPRESS_POINTERS
745
791
  if (sizeof(T) > kApiTaggedSize) {
746
792
  // TODO(ishell, v8:8875): When pointer compression is enabled 8-byte size
@@ -755,29 +801,28 @@ class Internals {
755
801
  return *reinterpret_cast<const T*>(addr);
756
802
  }
757
803
 
758
- V8_INLINE static internal::Address ReadTaggedPointerField(
759
- internal::Address heap_object_ptr, int offset) {
804
+ V8_INLINE static Address ReadTaggedPointerField(Address heap_object_ptr,
805
+ int offset) {
760
806
  #ifdef V8_COMPRESS_POINTERS
761
807
  uint32_t value = ReadRawField<uint32_t>(heap_object_ptr, offset);
762
- internal::Address base =
763
- GetPtrComprCageBaseFromOnHeapAddress(heap_object_ptr);
764
- return base + static_cast<internal::Address>(static_cast<uintptr_t>(value));
808
+ Address base = GetPtrComprCageBaseFromOnHeapAddress(heap_object_ptr);
809
+ return base + static_cast<Address>(static_cast<uintptr_t>(value));
765
810
  #else
766
- return ReadRawField<internal::Address>(heap_object_ptr, offset);
811
+ return ReadRawField<Address>(heap_object_ptr, offset);
767
812
  #endif
768
813
  }
769
814
 
770
- V8_INLINE static internal::Address ReadTaggedSignedField(
771
- internal::Address heap_object_ptr, int offset) {
815
+ V8_INLINE static Address ReadTaggedSignedField(Address heap_object_ptr,
816
+ int offset) {
772
817
  #ifdef V8_COMPRESS_POINTERS
773
818
  uint32_t value = ReadRawField<uint32_t>(heap_object_ptr, offset);
774
- return static_cast<internal::Address>(static_cast<uintptr_t>(value));
819
+ return static_cast<Address>(static_cast<uintptr_t>(value));
775
820
  #else
776
- return ReadRawField<internal::Address>(heap_object_ptr, offset);
821
+ return ReadRawField<Address>(heap_object_ptr, offset);
777
822
  #endif
778
823
  }
779
824
 
780
- V8_INLINE static v8::Isolate* GetIsolateForSandbox(internal::Address obj) {
825
+ V8_INLINE static v8::Isolate* GetIsolateForSandbox(Address obj) {
781
826
  #ifdef V8_ENABLE_SANDBOX
782
827
  return reinterpret_cast<v8::Isolate*>(
783
828
  internal::IsolateFromNeverReadOnlySpaceObject(obj));
@@ -788,40 +833,37 @@ class Internals {
788
833
  }
789
834
 
790
835
  template <ExternalPointerTag tag>
791
- V8_INLINE static internal::Address ReadExternalPointerField(
792
- v8::Isolate* isolate, internal::Address heap_object_ptr, int offset) {
836
+ V8_INLINE static Address ReadExternalPointerField(v8::Isolate* isolate,
837
+ Address heap_object_ptr,
838
+ int offset) {
793
839
  #ifdef V8_ENABLE_SANDBOX
794
- if (IsSandboxedExternalPointerType(tag)) {
795
- // See src/sandbox/external-pointer-table-inl.h. Logic duplicated here so
796
- // it can be inlined and doesn't require an additional call.
797
- internal::Address* table =
798
- IsSharedExternalPointerType(tag)
799
- ? GetSharedExternalPointerTableBase(isolate)
800
- : GetExternalPointerTableBase(isolate);
801
- internal::ExternalPointerHandle handle =
802
- ReadRawField<ExternalPointerHandle>(heap_object_ptr, offset);
803
- uint32_t index = handle >> kExternalPointerIndexShift;
804
- std::atomic<internal::Address>* ptr =
805
- reinterpret_cast<std::atomic<internal::Address>*>(&table[index]);
806
- internal::Address entry =
807
- std::atomic_load_explicit(ptr, std::memory_order_relaxed);
808
- return entry & ~tag;
809
- }
810
- #endif
840
+ static_assert(tag != kExternalPointerNullTag);
841
+ // See src/sandbox/external-pointer-table-inl.h. Logic duplicated here so
842
+ // it can be inlined and doesn't require an additional call.
843
+ Address* table = IsSharedExternalPointerType(tag)
844
+ ? GetSharedExternalPointerTableBase(isolate)
845
+ : GetExternalPointerTableBase(isolate);
846
+ internal::ExternalPointerHandle handle =
847
+ ReadRawField<ExternalPointerHandle>(heap_object_ptr, offset);
848
+ uint32_t index = handle >> kExternalPointerIndexShift;
849
+ std::atomic<Address>* ptr =
850
+ reinterpret_cast<std::atomic<Address>*>(&table[index]);
851
+ Address entry = std::atomic_load_explicit(ptr, std::memory_order_relaxed);
852
+ return entry & ~tag;
853
+ #else
811
854
  return ReadRawField<Address>(heap_object_ptr, offset);
855
+ #endif // V8_ENABLE_SANDBOX
812
856
  }
813
857
 
814
858
  #ifdef V8_COMPRESS_POINTERS
815
- V8_INLINE static internal::Address GetPtrComprCageBaseFromOnHeapAddress(
816
- internal::Address addr) {
859
+ V8_INLINE static Address GetPtrComprCageBaseFromOnHeapAddress(Address addr) {
817
860
  return addr & -static_cast<intptr_t>(kPtrComprCageBaseAlignment);
818
861
  }
819
862
 
820
- V8_INLINE static internal::Address DecompressTaggedAnyField(
821
- internal::Address heap_object_ptr, uint32_t value) {
822
- internal::Address base =
823
- GetPtrComprCageBaseFromOnHeapAddress(heap_object_ptr);
824
- return base + static_cast<internal::Address>(static_cast<uintptr_t>(value));
863
+ V8_INLINE static Address DecompressTaggedField(Address heap_object_ptr,
864
+ uint32_t value) {
865
+ Address base = GetPtrComprCageBaseFromOnHeapAddress(heap_object_ptr);
866
+ return base + static_cast<Address>(static_cast<uintptr_t>(value));
825
867
  }
826
868
 
827
869
  #endif // V8_COMPRESS_POINTERS
@@ -859,8 +901,58 @@ class BackingStoreBase {};
859
901
  // This is needed for histograms sampling garbage collection reasons.
860
902
  constexpr int kGarbageCollectionReasonMaxValue = 27;
861
903
 
862
- } // namespace internal
904
+ // Helper functions about values contained in handles.
905
+ class ValueHelper final {
906
+ public:
907
+ #ifdef V8_ENABLE_CONSERVATIVE_STACK_SCANNING
908
+ static constexpr Address kLocalTaggedNullAddress = 1;
909
+
910
+ template <typename T>
911
+ static constexpr T* EmptyValue() {
912
+ return reinterpret_cast<T*>(kLocalTaggedNullAddress);
913
+ }
863
914
 
915
+ template <typename T>
916
+ V8_INLINE static Address ValueAsAddress(const T* value) {
917
+ return reinterpret_cast<Address>(value);
918
+ }
919
+
920
+ template <typename T, typename S>
921
+ V8_INLINE static T* SlotAsValue(S* slot) {
922
+ return *reinterpret_cast<T**>(slot);
923
+ }
924
+
925
+ template <typename T>
926
+ V8_INLINE static T* ValueAsSlot(T* const& value) {
927
+ return reinterpret_cast<T*>(const_cast<T**>(&value));
928
+ }
929
+
930
+ #else // !V8_ENABLE_CONSERVATIVE_STACK_SCANNING
931
+
932
+ template <typename T>
933
+ static constexpr T* EmptyValue() {
934
+ return nullptr;
935
+ }
936
+
937
+ template <typename T>
938
+ V8_INLINE static Address ValueAsAddress(const T* value) {
939
+ return *reinterpret_cast<const Address*>(value);
940
+ }
941
+
942
+ template <typename T, typename S>
943
+ V8_INLINE static T* SlotAsValue(S* slot) {
944
+ return reinterpret_cast<T*>(slot);
945
+ }
946
+
947
+ template <typename T>
948
+ V8_INLINE static T* ValueAsSlot(T* const& value) {
949
+ return value;
950
+ }
951
+
952
+ #endif // V8_ENABLE_CONSERVATIVE_STACK_SCANNING
953
+ };
954
+
955
+ } // namespace internal
864
956
  } // namespace v8
865
957
 
866
958
  #endif // INCLUDE_V8_INTERNAL_H_