libv8-node 22.7.0.4-arm64-darwin → 24.1.0.0-arm64-darwin

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (80) hide show
  1. checksums.yaml +4 -4
  2. data/lib/libv8/node/version.rb +3 -3
  3. data/vendor/v8/arm64-darwin/libv8/obj/libv8_monolith.a +0 -0
  4. data/vendor/v8/include/cppgc/allocation.h +11 -13
  5. data/vendor/v8/include/cppgc/default-platform.h +3 -2
  6. data/vendor/v8/include/cppgc/garbage-collected.h +8 -0
  7. data/vendor/v8/include/cppgc/heap-consistency.h +1 -1
  8. data/vendor/v8/include/cppgc/heap-statistics.h +2 -0
  9. data/vendor/v8/include/cppgc/internal/api-constants.h +2 -14
  10. data/vendor/v8/include/cppgc/internal/base-page-handle.h +2 -4
  11. data/vendor/v8/include/cppgc/internal/caged-heap-local-data.h +0 -4
  12. data/vendor/v8/include/cppgc/internal/caged-heap.h +0 -4
  13. data/vendor/v8/include/cppgc/internal/compiler-specific.h +9 -1
  14. data/vendor/v8/include/cppgc/internal/conditional-stack-allocated.h +41 -0
  15. data/vendor/v8/include/cppgc/internal/gc-info.h +12 -10
  16. data/vendor/v8/include/cppgc/internal/logging.h +3 -3
  17. data/vendor/v8/include/cppgc/internal/member-storage.h +69 -20
  18. data/vendor/v8/include/cppgc/internal/name-trait.h +5 -1
  19. data/vendor/v8/include/cppgc/internal/persistent-node.h +8 -3
  20. data/vendor/v8/include/cppgc/internal/pointer-policies.h +48 -11
  21. data/vendor/v8/include/cppgc/macros.h +21 -0
  22. data/vendor/v8/include/cppgc/member.h +70 -36
  23. data/vendor/v8/include/cppgc/name-provider.h +10 -0
  24. data/vendor/v8/include/cppgc/platform.h +11 -0
  25. data/vendor/v8/include/cppgc/type-traits.h +1 -0
  26. data/vendor/v8/include/cppgc/visitor.h +25 -1
  27. data/vendor/v8/include/libplatform/libplatform-export.h +2 -2
  28. data/vendor/v8/include/libplatform/v8-tracing.h +0 -1
  29. data/vendor/v8/include/v8-array-buffer.h +143 -46
  30. data/vendor/v8/include/v8-callbacks.h +94 -31
  31. data/vendor/v8/include/v8-context.h +48 -15
  32. data/vendor/v8/include/v8-cppgc.h +5 -56
  33. data/vendor/v8/include/v8-data.h +5 -0
  34. data/vendor/v8/include/v8-date.h +9 -0
  35. data/vendor/v8/include/v8-debug.h +11 -0
  36. data/vendor/v8/include/v8-embedder-heap.h +1 -29
  37. data/vendor/v8/include/v8-exception.h +72 -0
  38. data/vendor/v8/include/v8-external-memory-accounter.h +60 -0
  39. data/vendor/v8/include/v8-fast-api-calls.h +41 -206
  40. data/vendor/v8/include/v8-function-callback.h +190 -78
  41. data/vendor/v8/include/v8-function.h +11 -3
  42. data/vendor/v8/include/v8-handle-base.h +20 -2
  43. data/vendor/v8/include/v8-initialization.h +26 -1
  44. data/vendor/v8/include/v8-inspector.h +10 -3
  45. data/vendor/v8/include/v8-internal.h +638 -315
  46. data/vendor/v8/include/v8-isolate.h +275 -189
  47. data/vendor/v8/include/v8-local-handle.h +69 -42
  48. data/vendor/v8/include/v8-maybe.h +2 -1
  49. data/vendor/v8/include/v8-memory-span.h +149 -24
  50. data/vendor/v8/include/v8-message.h +9 -22
  51. data/vendor/v8/include/v8-metrics.h +14 -0
  52. data/vendor/v8/include/v8-microtask-queue.h +0 -5
  53. data/vendor/v8/include/v8-object.h +291 -37
  54. data/vendor/v8/include/v8-persistent-handle.h +17 -36
  55. data/vendor/v8/include/v8-platform.h +66 -45
  56. data/vendor/v8/include/v8-primitive.h +221 -5
  57. data/vendor/v8/include/v8-profiler.h +51 -2
  58. data/vendor/v8/include/v8-promise.h +2 -2
  59. data/vendor/v8/include/v8-proxy.h +0 -1
  60. data/vendor/v8/include/v8-regexp.h +0 -1
  61. data/vendor/v8/include/v8-sandbox.h +173 -0
  62. data/vendor/v8/include/v8-script.h +65 -17
  63. data/vendor/v8/include/v8-snapshot.h +38 -2
  64. data/vendor/v8/include/v8-source-location.h +6 -1
  65. data/vendor/v8/include/v8-template.h +111 -263
  66. data/vendor/v8/include/v8-trace-categories.h +23 -0
  67. data/vendor/v8/include/v8-traced-handle.h +20 -32
  68. data/vendor/v8/include/v8-typed-array.h +6 -10
  69. data/vendor/v8/include/v8-unwinder-state.h +2 -3
  70. data/vendor/v8/include/v8-unwinder.h +2 -1
  71. data/vendor/v8/include/v8-util.h +1 -117
  72. data/vendor/v8/include/v8-value-serializer-version.h +3 -3
  73. data/vendor/v8/include/v8-value.h +21 -2
  74. data/vendor/v8/include/v8-version.h +4 -4
  75. data/vendor/v8/include/v8-wasm.h +27 -0
  76. data/vendor/v8/include/v8-weak-callback-info.h +20 -12
  77. data/vendor/v8/include/v8.h +3 -3
  78. data/vendor/v8/include/v8config.h +83 -45
  79. metadata +7 -7
  80. data/vendor/v8/include/cppgc/ephemeron-pair.h +0 -30
@@ -11,11 +11,29 @@
11
11
 
12
12
  #include <atomic>
13
13
  #include <iterator>
14
+ #include <limits>
14
15
  #include <memory>
16
+ #include <optional>
15
17
  #include <type_traits>
16
18
 
17
19
  #include "v8config.h" // NOLINT(build/include_directory)
18
20
 
21
+ // TODO(pkasting): Use <compare>/spaceship unconditionally after dropping
22
+ // support for old libstdc++ versions.
23
+ #if __has_include(<version>)
24
+ #include <version>
25
+ #endif
26
+ #if defined(__cpp_lib_three_way_comparison) && \
27
+ __cpp_lib_three_way_comparison >= 201711L && \
28
+ defined(__cpp_lib_concepts) && __cpp_lib_concepts >= 202002L
29
+ #include <compare>
30
+ #include <concepts>
31
+
32
+ #define V8_HAVE_SPACESHIP_OPERATOR 1
33
+ #else
34
+ #define V8_HAVE_SPACESHIP_OPERATOR 0
35
+ #endif
36
+
19
37
  namespace v8 {
20
38
 
21
39
  class Array;
@@ -26,7 +44,10 @@ class Isolate;
26
44
  namespace internal {
27
45
 
28
46
  class Heap;
47
+ class LocalHeap;
29
48
  class Isolate;
49
+ class IsolateGroup;
50
+ class LocalIsolate;
30
51
 
31
52
  typedef uintptr_t Address;
32
53
  static constexpr Address kNullAddress = 0;
@@ -87,7 +108,10 @@ struct SmiTagging<4> {
87
108
  // Truncate and shift down (requires >> to be sign extending).
88
109
  return static_cast<int32_t>(static_cast<uint32_t>(value)) >> shift_bits;
89
110
  }
90
- V8_INLINE static constexpr bool IsValidSmi(intptr_t value) {
111
+
112
+ template <class T, typename std::enable_if_t<std::is_integral_v<T> &&
113
+ std::is_signed_v<T>>* = nullptr>
114
+ V8_INLINE static constexpr bool IsValidSmi(T value) {
91
115
  // Is value in range [kSmiMinValue, kSmiMaxValue].
92
116
  // Use unsigned operations in order to avoid undefined behaviour in case of
93
117
  // signed integer overflow.
@@ -96,6 +120,28 @@ struct SmiTagging<4> {
96
120
  (static_cast<uintptr_t>(kSmiMaxValue) -
97
121
  static_cast<uintptr_t>(kSmiMinValue));
98
122
  }
123
+
124
+ template <class T,
125
+ typename std::enable_if_t<std::is_integral_v<T> &&
126
+ std::is_unsigned_v<T>>* = nullptr>
127
+ V8_INLINE static constexpr bool IsValidSmi(T value) {
128
+ static_assert(kSmiMaxValue <= std::numeric_limits<uintptr_t>::max());
129
+ return value <= static_cast<uintptr_t>(kSmiMaxValue);
130
+ }
131
+
132
+ // Same as the `intptr_t` version but works with int64_t on 32-bit builds
133
+ // without slowing down anything else.
134
+ V8_INLINE static constexpr bool IsValidSmi(int64_t value) {
135
+ return (static_cast<uint64_t>(value) -
136
+ static_cast<uint64_t>(kSmiMinValue)) <=
137
+ (static_cast<uint64_t>(kSmiMaxValue) -
138
+ static_cast<uint64_t>(kSmiMinValue));
139
+ }
140
+
141
+ V8_INLINE static constexpr bool IsValidSmi(uint64_t value) {
142
+ static_assert(kSmiMaxValue <= std::numeric_limits<uint64_t>::max());
143
+ return value <= static_cast<uint64_t>(kSmiMaxValue);
144
+ }
99
145
  };
100
146
 
101
147
  // Smi constants for systems where tagged pointer is a 64-bit value.
@@ -112,9 +158,20 @@ struct SmiTagging<8> {
112
158
  // Shift down and throw away top 32 bits.
113
159
  return static_cast<int>(static_cast<intptr_t>(value) >> shift_bits);
114
160
  }
115
- V8_INLINE static constexpr bool IsValidSmi(intptr_t value) {
161
+
162
+ template <class T, typename std::enable_if_t<std::is_integral_v<T> &&
163
+ std::is_signed_v<T>>* = nullptr>
164
+ V8_INLINE static constexpr bool IsValidSmi(T value) {
116
165
  // To be representable as a long smi, the value must be a 32-bit integer.
117
- return (value == static_cast<int32_t>(value));
166
+ return std::numeric_limits<int32_t>::min() <= value &&
167
+ value <= std::numeric_limits<int32_t>::max();
168
+ }
169
+
170
+ template <class T,
171
+ typename std::enable_if_t<std::is_integral_v<T> &&
172
+ std::is_unsigned_v<T>>* = nullptr>
173
+ V8_INLINE static constexpr bool IsValidSmi(T value) {
174
+ return value <= std::numeric_limits<int32_t>::max();
118
175
  }
119
176
  };
120
177
 
@@ -180,10 +237,6 @@ using SandboxedPointer_t = Address;
180
237
  // virtual address space for userspace. As such, limit the sandbox to 128GB (a
181
238
  // quarter of the total available address space).
182
239
  constexpr size_t kSandboxSizeLog2 = 37; // 128 GB
183
- #elif defined(V8_TARGET_ARCH_LOONG64)
184
- // Some Linux distros on LoongArch64 configured with only 40 bits of virtual
185
- // address space for userspace. Limit the sandbox to 256GB here.
186
- constexpr size_t kSandboxSizeLog2 = 38; // 256 GB
187
240
  #else
188
241
  // Everywhere else use a 1TB sandbox.
189
242
  constexpr size_t kSandboxSizeLog2 = 40; // 1 TB
@@ -204,9 +257,12 @@ constexpr size_t kSandboxAlignment = kPtrComprCageBaseAlignment;
204
257
  constexpr uint64_t kSandboxedPointerShift = 64 - kSandboxSizeLog2;
205
258
 
206
259
  // Size of the guard regions surrounding the sandbox. This assumes a worst-case
207
- // scenario of a 32-bit unsigned index used to access an array of 64-bit
208
- // values.
209
- constexpr size_t kSandboxGuardRegionSize = 32ULL * GB;
260
+ // scenario of a 32-bit unsigned index used to access an array of 64-bit values
261
+ // with an additional 4GB (compressed pointer) offset. In particular, accesses
262
+ // to TypedArrays are effectively computed as
263
+ // `entry_pointer = array->base + array->offset + index * array->element_size`.
264
+ // See also https://crbug.com/40070746 for more details.
265
+ constexpr size_t kSandboxGuardRegionSize = 32ULL * GB + 4ULL * GB;
210
266
 
211
267
  static_assert((kSandboxGuardRegionSize % kSandboxAlignment) == 0,
212
268
  "The size of the guard regions around the sandbox must be a "
@@ -253,15 +309,16 @@ static_assert(1ULL << (64 - kBoundedSizeShift) ==
253
309
  // size allows omitting bounds checks on table accesses if the indices are
254
310
  // guaranteed (e.g. through shifting) to be below the maximum index. This
255
311
  // value must be a power of two.
256
- constexpr size_t kExternalPointerTableReservationSize = 512 * MB;
312
+ constexpr size_t kExternalPointerTableReservationSize = 256 * MB;
257
313
 
258
314
  // The external pointer table indices stored in HeapObjects as external
259
315
  // pointers are shifted to the left by this amount to guarantee that they are
260
- // smaller than the maximum table size.
261
- constexpr uint32_t kExternalPointerIndexShift = 6;
316
+ // smaller than the maximum table size even after the C++ compiler multiplies
317
+ // them by 8 to be used as indexes into a table of 64 bit pointers.
318
+ constexpr uint32_t kExternalPointerIndexShift = 7;
262
319
  #else
263
- constexpr size_t kExternalPointerTableReservationSize = 1024 * MB;
264
- constexpr uint32_t kExternalPointerIndexShift = 5;
320
+ constexpr size_t kExternalPointerTableReservationSize = 512 * MB;
321
+ constexpr uint32_t kExternalPointerIndexShift = 6;
265
322
  #endif // V8_TARGET_OS_ANDROID
266
323
 
267
324
  // The maximum number of entries in an external pointer table.
@@ -280,6 +337,16 @@ constexpr size_t kMaxExternalPointers = 0;
280
337
 
281
338
  #endif // V8_COMPRESS_POINTERS
282
339
 
340
+ constexpr uint64_t kExternalPointerMarkBit = 1ULL << 48;
341
+ constexpr uint64_t kExternalPointerTagShift = 49;
342
+ constexpr uint64_t kExternalPointerTagMask = 0x00fe000000000000ULL;
343
+ constexpr uint64_t kExternalPointerShiftedTagMask =
344
+ kExternalPointerTagMask >> kExternalPointerTagShift;
345
+ static_assert(kExternalPointerShiftedTagMask << kExternalPointerTagShift ==
346
+ kExternalPointerTagMask);
347
+ constexpr uint64_t kExternalPointerTagAndMarkbitMask = 0x00ff000000000000ULL;
348
+ constexpr uint64_t kExternalPointerPayloadMask = 0xff00ffffffffffffULL;
349
+
283
350
  // A ExternalPointerHandle represents a (opaque) reference to an external
284
351
  // pointer that can be stored inside the sandbox. A ExternalPointerHandle has
285
352
  // meaning only in combination with an (active) Isolate as it references an
@@ -301,6 +368,124 @@ using ExternalPointer_t = Address;
301
368
  constexpr ExternalPointer_t kNullExternalPointer = 0;
302
369
  constexpr ExternalPointerHandle kNullExternalPointerHandle = 0;
303
370
 
371
+ // See `ExternalPointerHandle` for the main documentation. The difference to
372
+ // `ExternalPointerHandle` is that the handle does not represent an arbitrary
373
+ // external pointer but always refers to an object managed by `CppHeap`. The
374
+ // handles are using in combination with a dedicated table for `CppHeap`
375
+ // references.
376
+ using CppHeapPointerHandle = uint32_t;
377
+
378
+ // The actual pointer to objects located on the `CppHeap`. When pointer
379
+ // compression is enabled these pointers are stored as `CppHeapPointerHandle`.
380
+ // In non-compressed configurations the pointers are simply stored as raw
381
+ // pointers.
382
+ #ifdef V8_COMPRESS_POINTERS
383
+ using CppHeapPointer_t = CppHeapPointerHandle;
384
+ #else
385
+ using CppHeapPointer_t = Address;
386
+ #endif
387
+
388
+ constexpr CppHeapPointer_t kNullCppHeapPointer = 0;
389
+ constexpr CppHeapPointerHandle kNullCppHeapPointerHandle = 0;
390
+
391
+ constexpr uint64_t kCppHeapPointerMarkBit = 1ULL;
392
+ constexpr uint64_t kCppHeapPointerTagShift = 1;
393
+ constexpr uint64_t kCppHeapPointerPayloadShift = 16;
394
+
395
+ #ifdef V8_COMPRESS_POINTERS
396
+ // CppHeapPointers use a dedicated pointer table. These constants control the
397
+ // size and layout of the table. See the corresponding constants for the
398
+ // external pointer table for further details.
399
+ constexpr size_t kCppHeapPointerTableReservationSize =
400
+ kExternalPointerTableReservationSize;
401
+ constexpr uint32_t kCppHeapPointerIndexShift = kExternalPointerIndexShift;
402
+
403
+ constexpr int kCppHeapPointerTableEntrySize = 8;
404
+ constexpr int kCppHeapPointerTableEntrySizeLog2 = 3;
405
+ constexpr size_t kMaxCppHeapPointers =
406
+ kCppHeapPointerTableReservationSize / kCppHeapPointerTableEntrySize;
407
+ static_assert((1 << (32 - kCppHeapPointerIndexShift)) == kMaxCppHeapPointers,
408
+ "kCppHeapPointerTableReservationSize and "
409
+ "kCppHeapPointerIndexShift don't match");
410
+
411
+ #else // !V8_COMPRESS_POINTERS
412
+
413
+ // Needed for the V8.SandboxedCppHeapPointersCount histogram.
414
+ constexpr size_t kMaxCppHeapPointers = 0;
415
+
416
+ #endif // V8_COMPRESS_POINTERS
417
+
418
+ // Generic tag range struct to represent ranges of type tags.
419
+ //
420
+ // When referencing external objects via pointer tables, type tags are
421
+ // frequently necessary to guarantee type safety for the external objects. When
422
+ // support for subtyping is necessary, range-based type checks are used in
423
+ // which all subtypes of a given supertype use contiguous tags. This struct can
424
+ // then be used to represent such a type range.
425
+ //
426
+ // In addition, there is an option for performance tweaks: if the size of the
427
+ // type range corresponding to a supertype is a power of two and starts at a
428
+ // power of two (e.g. [0x100, 0x13f]), then the compiler can often optimize
429
+ // the type check to use even fewer instructions (essentially replace a AND +
430
+ // SUB with a single AND).
431
+ //
432
+ template <typename Tag>
433
+ struct TagRange {
434
+ static_assert(std::is_enum_v<Tag> &&
435
+ std::is_same_v<std::underlying_type_t<Tag>, uint16_t>,
436
+ "Tag parameter must be an enum with base type uint16_t");
437
+
438
+ // Construct the inclusive tag range [first, last].
439
+ constexpr TagRange(Tag first, Tag last) : first(first), last(last) {}
440
+
441
+ // Construct a tag range consisting of a single tag.
442
+ //
443
+ // A single tag is always implicitly convertible to a tag range. This greatly
444
+ // increases readability as most of the time, the exact tag of a field is
445
+ // known and so no tag range needs to explicitly be created for it.
446
+ constexpr TagRange(Tag tag) // NOLINT(runtime/explicit)
447
+ : first(tag), last(tag) {}
448
+
449
+ // Construct an empty tag range.
450
+ constexpr TagRange() : TagRange(static_cast<Tag>(0)) {}
451
+
452
+ // A tag range is considered empty if it only contains the null tag.
453
+ constexpr bool IsEmpty() const { return first == 0 && last == 0; }
454
+
455
+ constexpr size_t Size() const {
456
+ if (IsEmpty()) {
457
+ return 0;
458
+ } else {
459
+ return last - first + 1;
460
+ }
461
+ }
462
+
463
+ constexpr bool Contains(Tag tag) const {
464
+ // Need to perform the math with uint32_t. Otherwise, the uint16_ts would
465
+ // be promoted to (signed) int, allowing the compiler to (wrongly) assume
466
+ // that an underflow cannot happen as that would be undefined behavior.
467
+ return static_cast<uint32_t>(tag) - first <=
468
+ static_cast<uint32_t>(last) - first;
469
+ }
470
+
471
+ constexpr bool Contains(TagRange tag_range) const {
472
+ return tag_range.first >= first && tag_range.last <= last;
473
+ }
474
+
475
+ constexpr bool operator==(const TagRange other) const {
476
+ return first == other.first && last == other.last;
477
+ }
478
+
479
+ constexpr size_t hash_value() const {
480
+ static_assert(std::is_same_v<std::underlying_type_t<Tag>, uint16_t>);
481
+ return (static_cast<size_t>(first) << 16) | last;
482
+ }
483
+
484
+ // Internally we represent tag ranges as half-open ranges [first, last).
485
+ const Tag first;
486
+ const Tag last;
487
+ };
488
+
304
489
  //
305
490
  // External Pointers.
306
491
  //
@@ -308,41 +493,12 @@ constexpr ExternalPointerHandle kNullExternalPointerHandle = 0;
308
493
  // pointer table and are referenced from HeapObjects through an index (a
309
494
  // "handle"). When stored in the table, the pointers are tagged with per-type
310
495
  // tags to prevent type confusion attacks between different external objects.
311
- // Besides type information bits, these tags also contain the GC marking bit
312
- // which indicates whether the pointer table entry is currently alive. When a
313
- // pointer is written into the table, the tag is ORed into the top bits. When
314
- // that pointer is later loaded from the table, it is ANDed with the inverse of
315
- // the expected tag. If the expected and actual type differ, this will leave
316
- // some of the top bits of the pointer set, rendering the pointer inaccessible.
317
- // The AND operation also removes the GC marking bit from the pointer.
318
496
  //
319
- // The tags are constructed such that UNTAG(TAG(0, T1), T2) != 0 for any two
320
- // (distinct) tags T1 and T2. In practice, this is achieved by generating tags
321
- // that all have the same number of zeroes and ones but different bit patterns.
322
- // With N type tag bits, this allows for (N choose N/2) possible type tags.
323
- // Besides the type tag bits, the tags also have the GC marking bit set so that
324
- // the marking bit is automatically set when a pointer is written into the
325
- // external pointer table (in which case it is clearly alive) and is cleared
326
- // when the pointer is loaded. The exception to this is the free entry tag,
327
- // which doesn't have the mark bit set, as the entry is not alive. This
328
- // construction allows performing the type check and removing GC marking bits
329
- // from the pointer in one efficient operation (bitwise AND). The number of
330
- // available bits is limited in the following way: on x64, bits [47, 64) are
331
- // generally available for tagging (userspace has 47 address bits available).
332
- // On Arm64, userspace typically has a 40 or 48 bit address space. However, due
333
- // to top-byte ignore (TBI) and memory tagging (MTE), the top byte is unusable
334
- // for type checks as type-check failures would go unnoticed or collide with
335
- // MTE bits. Some bits of the top byte can, however, still be used for the GC
336
- // marking bit. The bits available for the type tags are therefore limited to
337
- // [48, 56), i.e. (8 choose 4) = 70 different types.
338
- // The following options exist to increase the number of possible types:
339
- // - Using multiple ExternalPointerTables since tags can safely be reused
340
- // across different tables
341
- // - Using "extended" type checks, where additional type information is stored
342
- // either in an adjacent pointer table entry or at the pointed-to location
343
- // - Using a different tagging scheme, for example based on XOR which would
344
- // allow for 2**8 different tags but require a separate operation to remove
345
- // the marking bit
497
+ // When loading an external pointer, a range of allowed tags can be specified.
498
+ // This way, type hierarchies can be supported. The main requirement for that
499
+ // is that all (transitive) child classes of a given parent class have type ids
500
+ // in the same range, and that there are no unrelated types in that range. For
501
+ // more details about how to assign type tags to types, see the TagRange class.
346
502
  //
347
503
  // The external pointer sandboxing mechanism ensures that every access to an
348
504
  // external pointer field will result in a valid pointer of the expected type
@@ -365,138 +521,166 @@ constexpr ExternalPointerHandle kNullExternalPointerHandle = 0;
365
521
  // extension (MTE) which would use bits [56, 60).
366
522
  //
367
523
  // External pointer tables are also available even when the sandbox is off but
368
- // pointer compression is on. In that case, the mechanism can be used to easy
524
+ // pointer compression is on. In that case, the mechanism can be used to ease
369
525
  // alignment requirements as it turns unaligned 64-bit raw pointers into
370
526
  // aligned 32-bit indices. To "opt-in" to the external pointer table mechanism
371
527
  // for this purpose, instead of using the ExternalPointer accessors one needs to
372
528
  // use ExternalPointerHandles directly and use them to access the pointers in an
373
529
  // ExternalPointerTable.
374
- constexpr uint64_t kExternalPointerMarkBit = 1ULL << 62;
375
- constexpr uint64_t kExternalPointerTagMask = 0x40ff000000000000;
376
- constexpr uint64_t kExternalPointerTagMaskWithoutMarkBit = 0xff000000000000;
377
- constexpr uint64_t kExternalPointerTagShift = 48;
378
-
379
- // All possible 8-bit type tags.
380
- // These are sorted so that tags can be grouped together and it can efficiently
381
- // be checked if a tag belongs to a given group. See for example the
382
- // IsSharedExternalPointerType routine.
383
- constexpr uint64_t kAllExternalPointerTypeTags[] = {
384
- 0b00001111, 0b00010111, 0b00011011, 0b00011101, 0b00011110, 0b00100111,
385
- 0b00101011, 0b00101101, 0b00101110, 0b00110011, 0b00110101, 0b00110110,
386
- 0b00111001, 0b00111010, 0b00111100, 0b01000111, 0b01001011, 0b01001101,
387
- 0b01001110, 0b01010011, 0b01010101, 0b01010110, 0b01011001, 0b01011010,
388
- 0b01011100, 0b01100011, 0b01100101, 0b01100110, 0b01101001, 0b01101010,
389
- 0b01101100, 0b01110001, 0b01110010, 0b01110100, 0b01111000, 0b10000111,
390
- 0b10001011, 0b10001101, 0b10001110, 0b10010011, 0b10010101, 0b10010110,
391
- 0b10011001, 0b10011010, 0b10011100, 0b10100011, 0b10100101, 0b10100110,
392
- 0b10101001, 0b10101010, 0b10101100, 0b10110001, 0b10110010, 0b10110100,
393
- 0b10111000, 0b11000011, 0b11000101, 0b11000110, 0b11001001, 0b11001010,
394
- 0b11001100, 0b11010001, 0b11010010, 0b11010100, 0b11011000, 0b11100001,
395
- 0b11100010, 0b11100100, 0b11101000, 0b11110000};
396
-
397
- #define TAG(i) \
398
- ((kAllExternalPointerTypeTags[i] << kExternalPointerTagShift) | \
399
- kExternalPointerMarkBit)
400
-
401
- // clang-format off
402
-
403
- // When adding new tags, please ensure that the code using these tags is
404
- // "substitution-safe", i.e. still operate safely if external pointers of the
405
- // same type are swapped by an attacker. See comment above for more details.
406
-
407
- // Shared external pointers are owned by the shared Isolate and stored in the
408
- // shared external pointer table associated with that Isolate, where they can
409
- // be accessed from multiple threads at the same time. The objects referenced
410
- // in this way must therefore always be thread-safe.
411
- #define SHARED_EXTERNAL_POINTER_TAGS(V) \
412
- V(kFirstSharedTag, TAG(0)) \
413
- V(kWaiterQueueNodeTag, TAG(0)) \
414
- V(kExternalStringResourceTag, TAG(1)) \
415
- V(kExternalStringResourceDataTag, TAG(2)) \
416
- V(kLastSharedTag, TAG(2))
417
-
418
- // External pointers using these tags are kept in a per-Isolate external
419
- // pointer table and can only be accessed when this Isolate is active.
420
- #define PER_ISOLATE_EXTERNAL_POINTER_TAGS(V) \
421
- V(kForeignForeignAddressTag, TAG(10)) \
422
- V(kNativeContextMicrotaskQueueTag, TAG(11)) \
423
- V(kEmbedderDataSlotPayloadTag, TAG(12)) \
424
- /* This tag essentially stands for a `void*` pointer in the V8 API, and */ \
425
- /* it is the Embedder's responsibility to ensure type safety (against */ \
426
- /* substitution) and lifetime validity of these objects. */ \
427
- V(kExternalObjectValueTag, TAG(13)) \
428
- V(kFunctionTemplateInfoCallbackTag, TAG(14)) \
429
- V(kAccessorInfoGetterTag, TAG(15)) \
430
- V(kAccessorInfoSetterTag, TAG(16)) \
431
- V(kWasmInternalFunctionCallTargetTag, TAG(17)) \
432
- V(kWasmTypeInfoNativeTypeTag, TAG(18)) \
433
- V(kWasmExportedFunctionDataSignatureTag, TAG(19)) \
434
- V(kWasmContinuationJmpbufTag, TAG(20)) \
435
- V(kWasmIndirectFunctionTargetTag, TAG(21)) \
436
- V(kArrayBufferExtensionTag, TAG(22))
437
-
438
- // All external pointer tags.
439
- #define ALL_EXTERNAL_POINTER_TAGS(V) \
440
- SHARED_EXTERNAL_POINTER_TAGS(V) \
441
- PER_ISOLATE_EXTERNAL_POINTER_TAGS(V)
442
-
443
- #define EXTERNAL_POINTER_TAG_ENUM(Name, Tag) Name = Tag,
444
- #define MAKE_TAG(HasMarkBit, TypeTag) \
445
- ((static_cast<uint64_t>(TypeTag) << kExternalPointerTagShift) | \
446
- (HasMarkBit ? kExternalPointerMarkBit : 0))
447
- enum ExternalPointerTag : uint64_t {
448
- // Empty tag value. Mostly used as placeholder.
449
- kExternalPointerNullTag = MAKE_TAG(1, 0b00000000),
450
- // External pointer tag that will match any external pointer. Use with care!
451
- kAnyExternalPointerTag = MAKE_TAG(1, 0b11111111),
452
- // The free entry tag has all type bits set so every type check with a
453
- // different type fails. It also doesn't have the mark bit set as free
454
- // entries are (by definition) not alive.
455
- kExternalPointerFreeEntryTag = MAKE_TAG(0, 0b11111111),
456
- // Evacuation entries are used during external pointer table compaction.
457
- kExternalPointerEvacuationEntryTag = MAKE_TAG(1, 0b11100111),
458
-
459
- ALL_EXTERNAL_POINTER_TAGS(EXTERNAL_POINTER_TAG_ENUM)
530
+ //
531
+ // The tag is currently in practice limited to 15 bits since it needs to fit
532
+ // together with a marking bit into the unused parts of a pointer.
533
+ enum ExternalPointerTag : uint16_t {
534
+ kFirstExternalPointerTag = 0,
535
+ kExternalPointerNullTag = 0,
536
+
537
+ // When adding new tags, please ensure that the code using these tags is
538
+ // "substitution-safe", i.e. still operate safely if external pointers of the
539
+ // same type are swapped by an attacker. See comment above for more details.
540
+
541
+ // Shared external pointers are owned by the shared Isolate and stored in the
542
+ // shared external pointer table associated with that Isolate, where they can
543
+ // be accessed from multiple threads at the same time. The objects referenced
544
+ // in this way must therefore always be thread-safe.
545
+ kFirstSharedExternalPointerTag,
546
+ kWaiterQueueNodeTag = kFirstSharedExternalPointerTag,
547
+ kExternalStringResourceTag,
548
+ kExternalStringResourceDataTag,
549
+ kLastSharedExternalPointerTag = kExternalStringResourceDataTag,
550
+
551
+ // External pointers using these tags are kept in a per-Isolate external
552
+ // pointer table and can only be accessed when this Isolate is active.
553
+ kNativeContextMicrotaskQueueTag,
554
+ kEmbedderDataSlotPayloadTag,
555
+ // This tag essentially stands for a `void*` pointer in the V8 API, and it is
556
+ // the Embedder's responsibility to ensure type safety (against substitution)
557
+ // and lifetime validity of these objects.
558
+ kExternalObjectValueTag,
559
+ kFirstMaybeReadOnlyExternalPointerTag,
560
+ kFunctionTemplateInfoCallbackTag = kFirstMaybeReadOnlyExternalPointerTag,
561
+ kAccessorInfoGetterTag,
562
+ kAccessorInfoSetterTag,
563
+ kLastMaybeReadOnlyExternalPointerTag = kAccessorInfoSetterTag,
564
+ kWasmInternalFunctionCallTargetTag,
565
+ kWasmTypeInfoNativeTypeTag,
566
+ kWasmExportedFunctionDataSignatureTag,
567
+ kWasmStackMemoryTag,
568
+ kWasmIndirectFunctionTargetTag,
569
+
570
+ // Foreigns
571
+ kFirstForeignExternalPointerTag,
572
+ kGenericForeignTag = kFirstForeignExternalPointerTag,
573
+ kApiNamedPropertyQueryCallbackTag,
574
+ kApiNamedPropertyGetterCallbackTag,
575
+ kApiNamedPropertySetterCallbackTag,
576
+ kApiNamedPropertyDescriptorCallbackTag,
577
+ kApiNamedPropertyDefinerCallbackTag,
578
+ kApiNamedPropertyDeleterCallbackTag,
579
+ kApiIndexedPropertyQueryCallbackTag,
580
+ kApiIndexedPropertyGetterCallbackTag,
581
+ kApiIndexedPropertySetterCallbackTag,
582
+ kApiIndexedPropertyDescriptorCallbackTag,
583
+ kApiIndexedPropertyDefinerCallbackTag,
584
+ kApiIndexedPropertyDeleterCallbackTag,
585
+ kApiIndexedPropertyEnumeratorCallbackTag,
586
+ kApiAccessCheckCallbackTag,
587
+ kApiAbortScriptExecutionCallbackTag,
588
+ kSyntheticModuleTag,
589
+ kMicrotaskCallbackTag,
590
+ kMicrotaskCallbackDataTag,
591
+ kCFunctionTag,
592
+ kCFunctionInfoTag,
593
+ kMessageListenerTag,
594
+ kWaiterQueueForeignTag,
595
+
596
+ // Managed
597
+ kFirstManagedResourceTag,
598
+ kFirstManagedExternalPointerTag = kFirstManagedResourceTag,
599
+ kGenericManagedTag = kFirstManagedExternalPointerTag,
600
+ kWasmWasmStreamingTag,
601
+ kWasmFuncDataTag,
602
+ kWasmManagedDataTag,
603
+ kWasmNativeModuleTag,
604
+ kIcuBreakIteratorTag,
605
+ kIcuUnicodeStringTag,
606
+ kIcuListFormatterTag,
607
+ kIcuLocaleTag,
608
+ kIcuSimpleDateFormatTag,
609
+ kIcuDateIntervalFormatTag,
610
+ kIcuRelativeDateTimeFormatterTag,
611
+ kIcuLocalizedNumberFormatterTag,
612
+ kIcuPluralRulesTag,
613
+ kIcuCollatorTag,
614
+ kDisplayNamesInternalTag,
615
+ kD8WorkerTag,
616
+ kD8ModuleEmbedderDataTag,
617
+ kLastForeignExternalPointerTag = kD8ModuleEmbedderDataTag,
618
+ kLastManagedExternalPointerTag = kLastForeignExternalPointerTag,
619
+ // External resources whose lifetime is tied to their entry in the external
620
+ // pointer table but which are not referenced via a Managed
621
+ kArrayBufferExtensionTag,
622
+ kLastManagedResourceTag = kArrayBufferExtensionTag,
623
+
624
+ kExternalPointerZappedEntryTag = 0x7d,
625
+ kExternalPointerEvacuationEntryTag = 0x7e,
626
+ kExternalPointerFreeEntryTag = 0x7f,
627
+ // The tags are limited to 7 bits, so the last tag is 0x7f.
628
+ kLastExternalPointerTag = 0x7f,
460
629
  };
461
630
 
462
- #undef MAKE_TAG
463
- #undef TAG
464
- #undef EXTERNAL_POINTER_TAG_ENUM
465
-
466
- // clang-format on
631
+ using ExternalPointerTagRange = TagRange<ExternalPointerTag>;
632
+
633
+ constexpr ExternalPointerTagRange kAnyExternalPointerTagRange(
634
+ kFirstExternalPointerTag, kLastExternalPointerTag);
635
+ constexpr ExternalPointerTagRange kAnySharedExternalPointerTagRange(
636
+ kFirstSharedExternalPointerTag, kLastSharedExternalPointerTag);
637
+ constexpr ExternalPointerTagRange kAnyForeignExternalPointerTagRange(
638
+ kFirstForeignExternalPointerTag, kLastForeignExternalPointerTag);
639
+ constexpr ExternalPointerTagRange kAnyManagedExternalPointerTagRange(
640
+ kFirstManagedExternalPointerTag, kLastManagedExternalPointerTag);
641
+ constexpr ExternalPointerTagRange kAnyMaybeReadOnlyExternalPointerTagRange(
642
+ kFirstMaybeReadOnlyExternalPointerTag,
643
+ kLastMaybeReadOnlyExternalPointerTag);
644
+ constexpr ExternalPointerTagRange kAnyManagedResourceExternalPointerTag(
645
+ kFirstManagedResourceTag, kLastManagedResourceTag);
467
646
 
468
647
  // True if the external pointer must be accessed from the shared isolate's
469
648
  // external pointer table.
470
649
  V8_INLINE static constexpr bool IsSharedExternalPointerType(
471
- ExternalPointerTag tag) {
472
- return tag >= kFirstSharedTag && tag <= kLastSharedTag;
650
+ ExternalPointerTagRange tag_range) {
651
+ return kAnySharedExternalPointerTagRange.Contains(tag_range);
473
652
  }
474
653
 
475
654
  // True if the external pointer may live in a read-only object, in which case
476
655
  // the table entry will be in the shared read-only segment of the external
477
656
  // pointer table.
478
657
  V8_INLINE static constexpr bool IsMaybeReadOnlyExternalPointerType(
479
- ExternalPointerTag tag) {
480
- return tag == kAccessorInfoGetterTag || tag == kAccessorInfoSetterTag ||
481
- tag == kFunctionTemplateInfoCallbackTag;
658
+ ExternalPointerTagRange tag_range) {
659
+ return kAnyMaybeReadOnlyExternalPointerTagRange.Contains(tag_range);
482
660
  }
483
661
 
484
- // Sanity checks.
485
- #define CHECK_SHARED_EXTERNAL_POINTER_TAGS(Tag, ...) \
486
- static_assert(IsSharedExternalPointerType(Tag));
487
- #define CHECK_NON_SHARED_EXTERNAL_POINTER_TAGS(Tag, ...) \
488
- static_assert(!IsSharedExternalPointerType(Tag));
489
-
490
- SHARED_EXTERNAL_POINTER_TAGS(CHECK_SHARED_EXTERNAL_POINTER_TAGS)
491
- PER_ISOLATE_EXTERNAL_POINTER_TAGS(CHECK_NON_SHARED_EXTERNAL_POINTER_TAGS)
492
-
493
- #undef CHECK_NON_SHARED_EXTERNAL_POINTER_TAGS
494
- #undef CHECK_SHARED_EXTERNAL_POINTER_TAGS
662
+ // True if the external pointer references an external object whose lifetime is
663
+ // tied to the entry in the external pointer table.
664
+ // In this case, the entry in the ExternalPointerTable always points to an
665
+ // object derived from ExternalPointerTable::ManagedResource.
666
+ V8_INLINE static constexpr bool IsManagedExternalPointerType(
667
+ ExternalPointerTagRange tag_range) {
668
+ return kAnyManagedResourceExternalPointerTag.Contains(tag_range);
669
+ }
495
670
 
496
- #undef SHARED_EXTERNAL_POINTER_TAGS
497
- #undef EXTERNAL_POINTER_TAGS
671
+ // When an external poiner field can contain the null external pointer handle,
672
+ // the type checking mechanism needs to also check for null.
673
+ // TODO(saelo): this is mostly a temporary workaround to introduce range-based
674
+ // type checks. In the future, we should either (a) change the type tagging
675
+ // scheme so that null always passes or (b) (more likely) introduce dedicated
676
+ // null entries for those tags that need them (similar to other well-known
677
+ // empty value constants such as the empty fixed array).
678
+ V8_INLINE static constexpr bool ExternalPointerCanBeEmpty(
679
+ ExternalPointerTagRange tag_range) {
680
+ return tag_range.Contains(kArrayBufferExtensionTag) ||
681
+ tag_range.Contains(kEmbedderDataSlotPayloadTag);
682
+ }
498
683
 
499
- //
500
684
  // Indirect Pointers.
501
685
  //
502
686
  // When the sandbox is enabled, indirect pointers are used to reference
@@ -540,7 +724,7 @@ using TrustedPointerHandle = IndirectPointerHandle;
540
724
  // shifted indices allows omitting bounds checks.
541
725
  constexpr size_t kTrustedPointerTableReservationSize = 64 * MB;
542
726
 
543
- // The trusted pointer handles are stores shifted to the left by this amount
727
+ // The trusted pointer handles are stored shifted to the left by this amount
544
728
  // to guarantee that they are smaller than the maximum table size.
545
729
  constexpr uint32_t kTrustedPointerHandleShift = 9;
546
730
 
@@ -576,11 +760,11 @@ using CodePointerHandle = IndirectPointerHandle;
576
760
  // The size of the virtual memory reservation for the code pointer table.
577
761
  // As with the other tables, a maximum table size in combination with shifted
578
762
  // indices allows omitting bounds checks.
579
- constexpr size_t kCodePointerTableReservationSize = 16 * MB;
763
+ constexpr size_t kCodePointerTableReservationSize = 128 * MB;
580
764
 
581
765
  // Code pointer handles are shifted by a different amount than indirect pointer
582
766
  // handles as the tables have a different maximum size.
583
- constexpr uint32_t kCodePointerHandleShift = 12;
767
+ constexpr uint32_t kCodePointerHandleShift = 9;
584
768
 
585
769
  // A null handle always references an entry that contains nullptr.
586
770
  constexpr CodePointerHandle kNullCodePointerHandle = kNullIndirectPointerHandle;
@@ -647,6 +831,13 @@ class Internals {
647
831
 
648
832
  static const int kOddballKindOffset = 4 * kApiTaggedSize + kApiDoubleSize;
649
833
  static const int kJSObjectHeaderSize = 3 * kApiTaggedSize;
834
+ #ifdef V8_COMPRESS_POINTERS
835
+ static const int kJSAPIObjectWithEmbedderSlotsHeaderSize =
836
+ kJSObjectHeaderSize + kApiInt32Size;
837
+ #else // !V8_COMPRESS_POINTERS
838
+ static const int kJSAPIObjectWithEmbedderSlotsHeaderSize =
839
+ kJSObjectHeaderSize + kApiTaggedSize;
840
+ #endif // !V8_COMPRESS_POINTERS
650
841
  static const int kFixedArrayHeaderSize = 2 * kApiTaggedSize;
651
842
  static const int kEmbedderDataArrayHeaderSize = 2 * kApiTaggedSize;
652
843
  static const int kEmbedderDataSlotSize = kApiSystemPointerSize;
@@ -666,6 +857,7 @@ class Internals {
666
857
  static const int kNumberOfBooleanFlags = 6;
667
858
  static const int kErrorMessageParamSize = 1;
668
859
  static const int kTablesAlignmentPaddingSize = 1;
860
+ static const int kRegExpStaticResultOffsetsVectorSize = kApiSystemPointerSize;
669
861
  static const int kBuiltinTier0EntryTableSize = 7 * kApiSystemPointerSize;
670
862
  static const int kBuiltinTier0TableSize = 7 * kApiSystemPointerSize;
671
863
  static const int kLinearAllocationAreaSize = 3 * kApiSystemPointerSize;
@@ -687,9 +879,9 @@ class Internals {
687
879
  kIsolateStackGuardOffset + kStackGuardSize;
688
880
  static const int kErrorMessageParamOffset =
689
881
  kVariousBooleanFlagsOffset + kNumberOfBooleanFlags;
690
- static const int kBuiltinTier0EntryTableOffset = kErrorMessageParamOffset +
691
- kErrorMessageParamSize +
692
- kTablesAlignmentPaddingSize;
882
+ static const int kBuiltinTier0EntryTableOffset =
883
+ kErrorMessageParamOffset + kErrorMessageParamSize +
884
+ kTablesAlignmentPaddingSize + kRegExpStaticResultOffsetsVectorSize;
693
885
  static const int kBuiltinTier0TableOffset =
694
886
  kBuiltinTier0EntryTableOffset + kBuiltinTier0EntryTableSize;
695
887
  static const int kNewAllocationInfoOffset =
@@ -698,14 +890,15 @@ class Internals {
698
890
  kNewAllocationInfoOffset + kLinearAllocationAreaSize;
699
891
 
700
892
  static const int kFastCCallAlignmentPaddingSize =
701
- kApiSystemPointerSize == 8 ? 0 : kApiSystemPointerSize;
702
- static const int kIsolateFastCCallCallerFpOffset =
893
+ kApiSystemPointerSize == 8 ? 5 * kApiSystemPointerSize
894
+ : 1 * kApiSystemPointerSize;
895
+ static const int kIsolateFastCCallCallerPcOffset =
703
896
  kOldAllocationInfoOffset + kLinearAllocationAreaSize +
704
897
  kFastCCallAlignmentPaddingSize;
705
- static const int kIsolateFastCCallCallerPcOffset =
706
- kIsolateFastCCallCallerFpOffset + kApiSystemPointerSize;
707
- static const int kIsolateFastApiCallTargetOffset =
898
+ static const int kIsolateFastCCallCallerFpOffset =
708
899
  kIsolateFastCCallCallerPcOffset + kApiSystemPointerSize;
900
+ static const int kIsolateFastApiCallTargetOffset =
901
+ kIsolateFastCCallCallerFpOffset + kApiSystemPointerSize;
709
902
  static const int kIsolateLongTaskStatsCounterOffset =
710
903
  kIsolateFastApiCallTargetOffset + kApiSystemPointerSize;
711
904
  static const int kIsolateThreadLocalTopOffset =
@@ -719,41 +912,50 @@ class Internals {
719
912
  kIsolateEmbedderDataOffset + kNumIsolateDataSlots * kApiSystemPointerSize;
720
913
  static const int kIsolateSharedExternalPointerTableAddressOffset =
721
914
  kIsolateExternalPointerTableOffset + kExternalPointerTableSize;
915
+ static const int kIsolateCppHeapPointerTableOffset =
916
+ kIsolateSharedExternalPointerTableAddressOffset + kApiSystemPointerSize;
722
917
  #ifdef V8_ENABLE_SANDBOX
723
918
  static const int kIsolateTrustedCageBaseOffset =
724
- kIsolateSharedExternalPointerTableAddressOffset + kApiSystemPointerSize;
919
+ kIsolateCppHeapPointerTableOffset + kExternalPointerTableSize;
725
920
  static const int kIsolateTrustedPointerTableOffset =
726
921
  kIsolateTrustedCageBaseOffset + kApiSystemPointerSize;
727
- static const int kIsolateApiCallbackThunkArgumentOffset =
922
+ static const int kIsolateSharedTrustedPointerTableAddressOffset =
728
923
  kIsolateTrustedPointerTableOffset + kTrustedPointerTableSize;
924
+ static const int kIsolateTrustedPointerPublishingScopeOffset =
925
+ kIsolateSharedTrustedPointerTableAddressOffset + kApiSystemPointerSize;
926
+ static const int kIsolateCodePointerTableBaseAddressOffset =
927
+ kIsolateTrustedPointerPublishingScopeOffset + kApiSystemPointerSize;
928
+ static const int kIsolateApiCallbackThunkArgumentOffset =
929
+ kIsolateCodePointerTableBaseAddressOffset + kApiSystemPointerSize;
729
930
  #else
730
931
  static const int kIsolateApiCallbackThunkArgumentOffset =
731
- kIsolateSharedExternalPointerTableAddressOffset + kApiSystemPointerSize;
932
+ kIsolateCppHeapPointerTableOffset + kExternalPointerTableSize;
732
933
  #endif // V8_ENABLE_SANDBOX
733
934
  #else
734
935
  static const int kIsolateApiCallbackThunkArgumentOffset =
735
936
  kIsolateEmbedderDataOffset + kNumIsolateDataSlots * kApiSystemPointerSize;
736
937
  #endif // V8_COMPRESS_POINTERS
737
- static const int kContinuationPreservedEmbedderDataOffset =
938
+ static const int kIsolateRegexpExecVectorArgumentOffset =
738
939
  kIsolateApiCallbackThunkArgumentOffset + kApiSystemPointerSize;
739
-
740
- static const int kWasm64OOBOffsetAlignmentPaddingSize = 0;
741
- static const int kWasm64OOBOffsetOffset =
742
- kContinuationPreservedEmbedderDataOffset + kApiSystemPointerSize +
743
- kWasm64OOBOffsetAlignmentPaddingSize;
940
+ static const int kContinuationPreservedEmbedderDataOffset =
941
+ kIsolateRegexpExecVectorArgumentOffset + kApiSystemPointerSize;
744
942
  static const int kIsolateRootsOffset =
745
- kWasm64OOBOffsetOffset + sizeof(int64_t);
943
+ kContinuationPreservedEmbedderDataOffset + kApiSystemPointerSize;
944
+
945
+ // Assert scopes
946
+ static const int kDisallowGarbageCollectionAlign = alignof(uint32_t);
947
+ static const int kDisallowGarbageCollectionSize = sizeof(uint32_t);
746
948
 
747
949
  #if V8_STATIC_ROOTS_BOOL
748
950
 
749
951
  // These constants are copied from static-roots.h and guarded by static asserts.
750
952
  #define EXPORTED_STATIC_ROOTS_PTR_LIST(V) \
751
- V(UndefinedValue, 0x69) \
752
- V(NullValue, 0x85) \
753
- V(TrueValue, 0xc9) \
754
- V(FalseValue, 0xad) \
755
- V(EmptyString, 0xa1) \
756
- V(TheHoleValue, 0x719)
953
+ V(UndefinedValue, 0x11) \
954
+ V(NullValue, 0x2d) \
955
+ V(TrueValue, 0x71) \
956
+ V(FalseValue, 0x55) \
957
+ V(EmptyString, 0x49) \
958
+ V(TheHoleValue, 0x761)
757
959
 
758
960
  using Tagged_t = uint32_t;
759
961
  struct StaticReadOnlyRoot {
@@ -761,8 +963,9 @@ class Internals {
761
963
  EXPORTED_STATIC_ROOTS_PTR_LIST(DEF_ROOT)
762
964
  #undef DEF_ROOT
763
965
 
764
- static constexpr Tagged_t kFirstStringMap = 0xe5;
765
- static constexpr Tagged_t kLastStringMap = 0x47d;
966
+ // Use 0 for kStringMapLowerBound since string maps are the first maps.
967
+ static constexpr Tagged_t kStringMapLowerBound = 0;
968
+ static constexpr Tagged_t kStringMapUpperBound = 0x425;
766
969
 
767
970
  #define PLUSONE(...) +1
768
971
  static constexpr size_t kNumberOfExportedStaticRoots =
@@ -802,13 +1005,13 @@ class Internals {
802
1005
 
803
1006
  // Constants used by PropertyCallbackInfo to check if we should throw when an
804
1007
  // error occurs.
805
- static const int kThrowOnError = 0;
806
- static const int kDontThrow = 1;
1008
+ static const int kDontThrow = 0;
1009
+ static const int kThrowOnError = 1;
807
1010
  static const int kInferShouldThrowMode = 2;
808
1011
 
809
1012
  // Soft limit for AdjustAmountofExternalAllocatedMemory. Trigger an
810
1013
  // incremental GC once the external memory reaches this limit.
811
- static constexpr int kExternalAllocationSoftLimit = 64 * 1024 * 1024;
1014
+ static constexpr size_t kExternalAllocationSoftLimit = 64 * 1024 * 1024;
812
1015
 
813
1016
  #ifdef V8_MAP_PACKING
814
1017
  static const uintptr_t kMapWordMetadataMask = 0xffffULL << 48;
@@ -836,14 +1039,36 @@ class Internals {
836
1039
  return PlatformSmiTagging::SmiToInt(value);
837
1040
  }
838
1041
 
1042
+ V8_INLINE static constexpr Address AddressToSmi(Address value) {
1043
+ return (value << (kSmiTagSize + PlatformSmiTagging::kSmiShiftSize)) |
1044
+ kSmiTag;
1045
+ }
1046
+
839
1047
  V8_INLINE static constexpr Address IntToSmi(int value) {
840
- return internal::IntToSmi(value);
1048
+ return AddressToSmi(static_cast<Address>(value));
1049
+ }
1050
+
1051
+ template <typename T,
1052
+ typename std::enable_if_t<std::is_integral_v<T>>* = nullptr>
1053
+ V8_INLINE static constexpr Address IntegralToSmi(T value) {
1054
+ return AddressToSmi(static_cast<Address>(value));
841
1055
  }
842
1056
 
843
- V8_INLINE static constexpr bool IsValidSmi(intptr_t value) {
1057
+ template <typename T,
1058
+ typename std::enable_if_t<std::is_integral_v<T>>* = nullptr>
1059
+ V8_INLINE static constexpr bool IsValidSmi(T value) {
844
1060
  return PlatformSmiTagging::IsValidSmi(value);
845
1061
  }
846
1062
 
1063
+ template <typename T,
1064
+ typename std::enable_if_t<std::is_integral_v<T>>* = nullptr>
1065
+ static constexpr std::optional<Address> TryIntegralToSmi(T value) {
1066
+ if (V8_LIKELY(PlatformSmiTagging::IsValidSmi(value))) {
1067
+ return {AddressToSmi(static_cast<Address>(value))};
1068
+ }
1069
+ return {};
1070
+ }
1071
+
847
1072
  #if V8_STATIC_ROOTS_BOOL
848
1073
  V8_INLINE static bool is_identical(Address obj, Tagged_t constant) {
849
1074
  return static_cast<Tagged_t>(obj) == constant;
@@ -1028,15 +1253,15 @@ class Internals {
1028
1253
  #endif
1029
1254
  }
1030
1255
 
1031
- template <ExternalPointerTag tag>
1256
+ template <ExternalPointerTagRange tag_range>
1032
1257
  V8_INLINE static Address ReadExternalPointerField(v8::Isolate* isolate,
1033
1258
  Address heap_object_ptr,
1034
1259
  int offset) {
1035
1260
  #ifdef V8_ENABLE_SANDBOX
1036
- static_assert(tag != kExternalPointerNullTag);
1037
- // See src/sandbox/external-pointer-table-inl.h. Logic duplicated here so
1261
+ static_assert(!tag_range.IsEmpty());
1262
+ // See src/sandbox/external-pointer-table.h. Logic duplicated here so
1038
1263
  // it can be inlined and doesn't require an additional call.
1039
- Address* table = IsSharedExternalPointerType(tag)
1264
+ Address* table = IsSharedExternalPointerType(tag_range)
1040
1265
  ? GetSharedExternalPointerTableBase(isolate)
1041
1266
  : GetExternalPointerTableBase(isolate);
1042
1267
  internal::ExternalPointerHandle handle =
@@ -1045,7 +1270,14 @@ class Internals {
1045
1270
  std::atomic<Address>* ptr =
1046
1271
  reinterpret_cast<std::atomic<Address>*>(&table[index]);
1047
1272
  Address entry = std::atomic_load_explicit(ptr, std::memory_order_relaxed);
1048
- return entry & ~tag;
1273
+ ExternalPointerTag actual_tag = static_cast<ExternalPointerTag>(
1274
+ (entry & kExternalPointerTagMask) >> kExternalPointerTagShift);
1275
+ if (V8_LIKELY(tag_range.Contains(actual_tag))) {
1276
+ return entry & kExternalPointerPayloadMask;
1277
+ } else {
1278
+ return 0;
1279
+ }
1280
+ return entry;
1049
1281
  #else
1050
1282
  return ReadRawField<Address>(heap_object_ptr, offset);
1051
1283
  #endif // V8_ENABLE_SANDBOX
@@ -1099,7 +1331,7 @@ class BackingStoreBase {};
1099
1331
 
1100
1332
  // The maximum value in enum GarbageCollectionReason, defined in heap.h.
1101
1333
  // This is needed for histograms sampling garbage collection reasons.
1102
- constexpr int kGarbageCollectionReasonMaxValue = 27;
1334
+ constexpr int kGarbageCollectionReasonMaxValue = 29;
1103
1335
 
1104
1336
  // Base class for the address block allocator compatible with standard
1105
1337
  // containers, which registers its allocated range as strong roots.
@@ -1107,16 +1339,19 @@ class V8_EXPORT StrongRootAllocatorBase {
1107
1339
  public:
1108
1340
  Heap* heap() const { return heap_; }
1109
1341
 
1110
- bool operator==(const StrongRootAllocatorBase& other) const {
1111
- return heap_ == other.heap_;
1112
- }
1113
- bool operator!=(const StrongRootAllocatorBase& other) const {
1114
- return heap_ != other.heap_;
1342
+ friend bool operator==(const StrongRootAllocatorBase& a,
1343
+ const StrongRootAllocatorBase& b) {
1344
+ // TODO(pkasting): Replace this body with `= default` after dropping support
1345
+ // for old gcc versions.
1346
+ return a.heap_ == b.heap_;
1115
1347
  }
1116
1348
 
1117
1349
  protected:
1118
1350
  explicit StrongRootAllocatorBase(Heap* heap) : heap_(heap) {}
1351
+ explicit StrongRootAllocatorBase(LocalHeap* heap);
1352
+ explicit StrongRootAllocatorBase(Isolate* isolate);
1119
1353
  explicit StrongRootAllocatorBase(v8::Isolate* isolate);
1354
+ explicit StrongRootAllocatorBase(LocalIsolate* isolate);
1120
1355
 
1121
1356
  // Allocate/deallocate a range of n elements of type internal::Address.
1122
1357
  Address* allocate_impl(size_t n);
@@ -1132,38 +1367,74 @@ class V8_EXPORT StrongRootAllocatorBase {
1132
1367
  // and internal::StrongRootAllocator<v8::Local<T>> register the allocated range
1133
1368
  // as strong roots.
1134
1369
  template <typename T>
1135
- class StrongRootAllocator : public StrongRootAllocatorBase,
1136
- private std::allocator<T> {
1370
+ class StrongRootAllocator : private std::allocator<T> {
1137
1371
  public:
1138
1372
  using value_type = T;
1139
1373
 
1140
- explicit StrongRootAllocator(Heap* heap) : StrongRootAllocatorBase(heap) {}
1141
- explicit StrongRootAllocator(v8::Isolate* isolate)
1142
- : StrongRootAllocatorBase(isolate) {}
1374
+ template <typename HeapOrIsolateT>
1375
+ explicit StrongRootAllocator(HeapOrIsolateT*) {}
1143
1376
  template <typename U>
1144
- StrongRootAllocator(const StrongRootAllocator<U>& other) noexcept
1145
- : StrongRootAllocatorBase(other) {}
1377
+ StrongRootAllocator(const StrongRootAllocator<U>& other) noexcept {}
1146
1378
 
1147
1379
  using std::allocator<T>::allocate;
1148
1380
  using std::allocator<T>::deallocate;
1149
1381
  };
1150
1382
 
1383
+ // TODO(pkasting): Replace with `requires` clauses after dropping support for
1384
+ // old gcc versions.
1385
+ template <typename Iterator, typename = void>
1386
+ inline constexpr bool kHaveIteratorConcept = false;
1387
+ template <typename Iterator>
1388
+ inline constexpr bool kHaveIteratorConcept<
1389
+ Iterator, std::void_t<typename Iterator::iterator_concept>> = true;
1390
+
1391
+ template <typename Iterator, typename = void>
1392
+ inline constexpr bool kHaveIteratorCategory = false;
1393
+ template <typename Iterator>
1394
+ inline constexpr bool kHaveIteratorCategory<
1395
+ Iterator, std::void_t<typename Iterator::iterator_category>> = true;
1396
+
1397
+ // Helper struct that contains an `iterator_concept` type alias only when either
1398
+ // `Iterator` or `std::iterator_traits<Iterator>` do.
1399
+ // Default: no alias.
1400
+ template <typename Iterator, typename = void>
1401
+ struct MaybeDefineIteratorConcept {};
1402
+ // Use `Iterator::iterator_concept` if available.
1403
+ template <typename Iterator>
1404
+ struct MaybeDefineIteratorConcept<
1405
+ Iterator, std::enable_if_t<kHaveIteratorConcept<Iterator>>> {
1406
+ using iterator_concept = typename Iterator::iterator_concept;
1407
+ };
1408
+ // Otherwise fall back to `std::iterator_traits<Iterator>` if possible.
1409
+ template <typename Iterator>
1410
+ struct MaybeDefineIteratorConcept<
1411
+ Iterator, std::enable_if_t<kHaveIteratorCategory<Iterator> &&
1412
+ !kHaveIteratorConcept<Iterator>>> {
1413
+ // There seems to be no feature-test macro covering this, so use the
1414
+ // presence of `<ranges>` as a crude proxy, since it was added to the
1415
+ // standard as part of the Ranges papers.
1416
+ // TODO(pkasting): Add this unconditionally after dropping support for old
1417
+ // libstdc++ versions.
1418
+ #if __has_include(<ranges>)
1419
+ using iterator_concept =
1420
+ typename std::iterator_traits<Iterator>::iterator_concept;
1421
+ #endif
1422
+ };
1423
+
1151
1424
  // A class of iterators that wrap some different iterator type.
1152
1425
  // If specified, ElementType is the type of element accessed by the wrapper
1153
1426
  // iterator; in this case, the actual reference and pointer types of Iterator
1154
1427
  // must be convertible to ElementType& and ElementType*, respectively.
1155
1428
  template <typename Iterator, typename ElementType = void>
1156
- class WrappedIterator {
1429
+ class WrappedIterator : public MaybeDefineIteratorConcept<Iterator> {
1157
1430
  public:
1158
1431
  static_assert(
1159
- !std::is_void_v<ElementType> ||
1432
+ std::is_void_v<ElementType> ||
1160
1433
  (std::is_convertible_v<typename std::iterator_traits<Iterator>::pointer,
1161
- ElementType*> &&
1434
+ std::add_pointer_t<ElementType>> &&
1162
1435
  std::is_convertible_v<typename std::iterator_traits<Iterator>::reference,
1163
- ElementType&>));
1436
+ std::add_lvalue_reference_t<ElementType>>));
1164
1437
 
1165
- using iterator_category =
1166
- typename std::iterator_traits<Iterator>::iterator_category;
1167
1438
  using difference_type =
1168
1439
  typename std::iterator_traits<Iterator>::difference_type;
1169
1440
  using value_type =
@@ -1173,24 +1444,100 @@ class WrappedIterator {
1173
1444
  using pointer =
1174
1445
  std::conditional_t<std::is_void_v<ElementType>,
1175
1446
  typename std::iterator_traits<Iterator>::pointer,
1176
- ElementType*>;
1447
+ std::add_pointer_t<ElementType>>;
1177
1448
  using reference =
1178
1449
  std::conditional_t<std::is_void_v<ElementType>,
1179
1450
  typename std::iterator_traits<Iterator>::reference,
1180
- ElementType&>;
1451
+ std::add_lvalue_reference_t<ElementType>>;
1452
+ using iterator_category =
1453
+ typename std::iterator_traits<Iterator>::iterator_category;
1181
1454
 
1182
- constexpr WrappedIterator() noexcept : it_() {}
1455
+ constexpr WrappedIterator() noexcept = default;
1183
1456
  constexpr explicit WrappedIterator(Iterator it) noexcept : it_(it) {}
1184
1457
 
1458
+ // TODO(pkasting): Switch to `requires` and concepts after dropping support
1459
+ // for old gcc and libstdc++ versions.
1185
1460
  template <typename OtherIterator, typename OtherElementType,
1186
- std::enable_if_t<std::is_convertible_v<OtherIterator, Iterator>,
1187
- bool> = true>
1461
+ typename = std::enable_if_t<
1462
+ std::is_convertible_v<OtherIterator, Iterator>>>
1188
1463
  constexpr WrappedIterator(
1189
- const WrappedIterator<OtherIterator, OtherElementType>& it) noexcept
1190
- : it_(it.base()) {}
1464
+ const WrappedIterator<OtherIterator, OtherElementType>& other) noexcept
1465
+ : it_(other.base()) {}
1466
+
1467
+ [[nodiscard]] constexpr reference operator*() const noexcept { return *it_; }
1468
+ [[nodiscard]] constexpr pointer operator->() const noexcept {
1469
+ if constexpr (std::is_pointer_v<Iterator>) {
1470
+ return it_;
1471
+ } else {
1472
+ return it_.operator->();
1473
+ }
1474
+ }
1191
1475
 
1192
- constexpr reference operator*() const noexcept { return *it_; }
1193
- constexpr pointer operator->() const noexcept { return it_.operator->(); }
1476
+ template <typename OtherIterator, typename OtherElementType>
1477
+ [[nodiscard]] constexpr bool operator==(
1478
+ const WrappedIterator<OtherIterator, OtherElementType>& other)
1479
+ const noexcept {
1480
+ return it_ == other.base();
1481
+ }
1482
+ #if V8_HAVE_SPACESHIP_OPERATOR
1483
+ template <typename OtherIterator, typename OtherElementType>
1484
+ [[nodiscard]] constexpr auto operator<=>(
1485
+ const WrappedIterator<OtherIterator, OtherElementType>& other)
1486
+ const noexcept {
1487
+ if constexpr (std::three_way_comparable_with<Iterator, OtherIterator>) {
1488
+ return it_ <=> other.base();
1489
+ } else if constexpr (std::totally_ordered_with<Iterator, OtherIterator>) {
1490
+ if (it_ < other.base()) {
1491
+ return std::strong_ordering::less;
1492
+ }
1493
+ return (it_ > other.base()) ? std::strong_ordering::greater
1494
+ : std::strong_ordering::equal;
1495
+ } else {
1496
+ if (it_ < other.base()) {
1497
+ return std::partial_ordering::less;
1498
+ }
1499
+ if (other.base() < it_) {
1500
+ return std::partial_ordering::greater;
1501
+ }
1502
+ return (it_ == other.base()) ? std::partial_ordering::equivalent
1503
+ : std::partial_ordering::unordered;
1504
+ }
1505
+ }
1506
+ #else
1507
+ // Assume that if spaceship isn't present, operator rewriting might not be
1508
+ // either.
1509
+ template <typename OtherIterator, typename OtherElementType>
1510
+ [[nodiscard]] constexpr bool operator!=(
1511
+ const WrappedIterator<OtherIterator, OtherElementType>& other)
1512
+ const noexcept {
1513
+ return it_ != other.base();
1514
+ }
1515
+
1516
+ template <typename OtherIterator, typename OtherElementType>
1517
+ [[nodiscard]] constexpr bool operator<(
1518
+ const WrappedIterator<OtherIterator, OtherElementType>& other)
1519
+ const noexcept {
1520
+ return it_ < other.base();
1521
+ }
1522
+ template <typename OtherIterator, typename OtherElementType>
1523
+ [[nodiscard]] constexpr bool operator<=(
1524
+ const WrappedIterator<OtherIterator, OtherElementType>& other)
1525
+ const noexcept {
1526
+ return it_ <= other.base();
1527
+ }
1528
+ template <typename OtherIterator, typename OtherElementType>
1529
+ [[nodiscard]] constexpr bool operator>(
1530
+ const WrappedIterator<OtherIterator, OtherElementType>& other)
1531
+ const noexcept {
1532
+ return it_ > other.base();
1533
+ }
1534
+ template <typename OtherIterator, typename OtherElementType>
1535
+ [[nodiscard]] constexpr bool operator>=(
1536
+ const WrappedIterator<OtherIterator, OtherElementType>& other)
1537
+ const noexcept {
1538
+ return it_ >= other.base();
1539
+ }
1540
+ #endif
1194
1541
 
1195
1542
  constexpr WrappedIterator& operator++() noexcept {
1196
1543
  ++it_;
@@ -1211,116 +1558,68 @@ class WrappedIterator {
1211
1558
  --(*this);
1212
1559
  return result;
1213
1560
  }
1214
- constexpr WrappedIterator operator+(difference_type n) const noexcept {
1561
+ [[nodiscard]] constexpr WrappedIterator operator+(
1562
+ difference_type n) const noexcept {
1215
1563
  WrappedIterator result(*this);
1216
1564
  result += n;
1217
1565
  return result;
1218
1566
  }
1567
+ [[nodiscard]] friend constexpr WrappedIterator operator+(
1568
+ difference_type n, const WrappedIterator& x) noexcept {
1569
+ return x + n;
1570
+ }
1219
1571
  constexpr WrappedIterator& operator+=(difference_type n) noexcept {
1220
1572
  it_ += n;
1221
1573
  return *this;
1222
1574
  }
1223
- constexpr WrappedIterator operator-(difference_type n) const noexcept {
1224
- return *this + (-n);
1575
+ [[nodiscard]] constexpr WrappedIterator operator-(
1576
+ difference_type n) const noexcept {
1577
+ return *this + -n;
1225
1578
  }
1226
1579
  constexpr WrappedIterator& operator-=(difference_type n) noexcept {
1227
- *this += -n;
1228
- return *this;
1580
+ return *this += -n;
1581
+ }
1582
+ template <typename OtherIterator, typename OtherElementType>
1583
+ [[nodiscard]] constexpr auto operator-(
1584
+ const WrappedIterator<OtherIterator, OtherElementType>& other)
1585
+ const noexcept {
1586
+ return it_ - other.base();
1229
1587
  }
1230
- constexpr reference operator[](difference_type n) const noexcept {
1588
+ [[nodiscard]] constexpr reference operator[](
1589
+ difference_type n) const noexcept {
1231
1590
  return it_[n];
1232
1591
  }
1233
1592
 
1234
- constexpr Iterator base() const noexcept { return it_; }
1235
-
1236
- private:
1237
- template <typename OtherIterator, typename OtherElementType>
1238
- friend class WrappedIterator;
1593
+ [[nodiscard]] constexpr const Iterator& base() const noexcept { return it_; }
1239
1594
 
1240
1595
  private:
1241
1596
  Iterator it_;
1242
1597
  };
1243
1598
 
1244
- template <typename Iterator, typename ElementType, typename OtherIterator,
1245
- typename OtherElementType>
1246
- constexpr bool operator==(
1247
- const WrappedIterator<Iterator, ElementType>& x,
1248
- const WrappedIterator<OtherIterator, OtherElementType>& y) noexcept {
1249
- return x.base() == y.base();
1250
- }
1251
-
1252
- template <typename Iterator, typename ElementType, typename OtherIterator,
1253
- typename OtherElementType>
1254
- constexpr bool operator<(
1255
- const WrappedIterator<Iterator, ElementType>& x,
1256
- const WrappedIterator<OtherIterator, OtherElementType>& y) noexcept {
1257
- return x.base() < y.base();
1258
- }
1259
-
1260
- template <typename Iterator, typename ElementType, typename OtherIterator,
1261
- typename OtherElementType>
1262
- constexpr bool operator!=(
1263
- const WrappedIterator<Iterator, ElementType>& x,
1264
- const WrappedIterator<OtherIterator, OtherElementType>& y) noexcept {
1265
- return !(x == y);
1266
- }
1267
-
1268
- template <typename Iterator, typename ElementType, typename OtherIterator,
1269
- typename OtherElementType>
1270
- constexpr bool operator>(
1271
- const WrappedIterator<Iterator, ElementType>& x,
1272
- const WrappedIterator<OtherIterator, OtherElementType>& y) noexcept {
1273
- return y < x;
1274
- }
1275
-
1276
- template <typename Iterator, typename ElementType, typename OtherIterator,
1277
- typename OtherElementType>
1278
- constexpr bool operator>=(
1279
- const WrappedIterator<Iterator, ElementType>& x,
1280
- const WrappedIterator<OtherIterator, OtherElementType>& y) noexcept {
1281
- return !(x < y);
1282
- }
1283
-
1284
- template <typename Iterator, typename ElementType, typename OtherIterator,
1285
- typename OtherElementType>
1286
- constexpr bool operator<=(
1287
- const WrappedIterator<Iterator, ElementType>& x,
1288
- const WrappedIterator<OtherIterator, OtherElementType>& y) noexcept {
1289
- return !(y < x);
1290
- }
1291
-
1292
- template <typename Iterator, typename ElementType, typename OtherIterator,
1293
- typename OtherElementType>
1294
- constexpr auto operator-(
1295
- const WrappedIterator<Iterator, ElementType>& x,
1296
- const WrappedIterator<OtherIterator, OtherElementType>& y) noexcept
1297
- -> decltype(x.base() - y.base()) {
1298
- return x.base() - y.base();
1299
- }
1300
-
1301
- template <typename Iterator, typename ElementType>
1302
- constexpr WrappedIterator<Iterator> operator+(
1303
- typename WrappedIterator<Iterator, ElementType>::difference_type n,
1304
- const WrappedIterator<Iterator, ElementType>& x) noexcept {
1305
- x += n;
1306
- return x;
1307
- }
1308
-
1309
1599
  // Helper functions about values contained in handles.
1310
1600
  // A value is either an indirect pointer or a direct pointer, depending on
1311
1601
  // whether direct local support is enabled.
1312
1602
  class ValueHelper final {
1313
1603
  public:
1314
- #ifdef V8_ENABLE_DIRECT_LOCAL
1604
+ // ValueHelper::InternalRepresentationType is an abstract type that
1605
+ // corresponds to the internal representation of v8::Local and essentially
1606
+ // to what T* really is (these two are always in sync). This type is used in
1607
+ // methods like GetDataFromSnapshotOnce that need access to a handle's
1608
+ // internal representation. In particular, if `x` is a `v8::Local<T>`, then
1609
+ // `v8::Local<T>::FromRepr(x.repr())` gives exactly the same handle as `x`.
1610
+ #ifdef V8_ENABLE_DIRECT_HANDLE
1315
1611
  static constexpr Address kTaggedNullAddress = 1;
1316
- static constexpr Address kEmpty = kTaggedNullAddress;
1612
+
1613
+ using InternalRepresentationType = internal::Address;
1614
+ static constexpr InternalRepresentationType kEmpty = kTaggedNullAddress;
1317
1615
  #else
1318
- static constexpr Address kEmpty = kNullAddress;
1319
- #endif // V8_ENABLE_DIRECT_LOCAL
1616
+ using InternalRepresentationType = internal::Address*;
1617
+ static constexpr InternalRepresentationType kEmpty = nullptr;
1618
+ #endif // V8_ENABLE_DIRECT_HANDLE
1320
1619
 
1321
1620
  template <typename T>
1322
1621
  V8_INLINE static bool IsEmpty(T* value) {
1323
- return reinterpret_cast<Address>(value) == kEmpty;
1622
+ return ValueAsRepr(value) == kEmpty;
1324
1623
  }
1325
1624
 
1326
1625
  // Returns a handle's "value" for all kinds of abstract handles. For Local,
@@ -1332,7 +1631,7 @@ class ValueHelper final {
1332
1631
  return handle.template value<T>();
1333
1632
  }
1334
1633
 
1335
- #ifdef V8_ENABLE_DIRECT_LOCAL
1634
+ #ifdef V8_ENABLE_DIRECT_HANDLE
1336
1635
 
1337
1636
  template <typename T>
1338
1637
  V8_INLINE static Address ValueAsAddress(const T* value) {
@@ -1347,7 +1646,17 @@ class ValueHelper final {
1347
1646
  return *reinterpret_cast<T**>(slot);
1348
1647
  }
1349
1648
 
1350
- #else // !V8_ENABLE_DIRECT_LOCAL
1649
+ template <typename T>
1650
+ V8_INLINE static InternalRepresentationType ValueAsRepr(const T* value) {
1651
+ return reinterpret_cast<InternalRepresentationType>(value);
1652
+ }
1653
+
1654
+ template <typename T>
1655
+ V8_INLINE static T* ReprAsValue(InternalRepresentationType repr) {
1656
+ return reinterpret_cast<T*>(repr);
1657
+ }
1658
+
1659
+ #else // !V8_ENABLE_DIRECT_HANDLE
1351
1660
 
1352
1661
  template <typename T>
1353
1662
  V8_INLINE static Address ValueAsAddress(const T* value) {
@@ -1359,7 +1668,18 @@ class ValueHelper final {
1359
1668
  return reinterpret_cast<T*>(slot);
1360
1669
  }
1361
1670
 
1362
- #endif // V8_ENABLE_DIRECT_LOCAL
1671
+ template <typename T>
1672
+ V8_INLINE static InternalRepresentationType ValueAsRepr(const T* value) {
1673
+ return const_cast<InternalRepresentationType>(
1674
+ reinterpret_cast<const Address*>(value));
1675
+ }
1676
+
1677
+ template <typename T>
1678
+ V8_INLINE static T* ReprAsValue(InternalRepresentationType repr) {
1679
+ return reinterpret_cast<T*>(repr);
1680
+ }
1681
+
1682
+ #endif // V8_ENABLE_DIRECT_HANDLE
1363
1683
  };
1364
1684
 
1365
1685
  /**
@@ -1383,14 +1703,17 @@ class HandleHelper final {
1383
1703
  if (rhs.IsEmpty()) return false;
1384
1704
  return lhs.ptr() == rhs.ptr();
1385
1705
  }
1386
-
1387
- static V8_EXPORT bool IsOnStack(const void* ptr);
1388
- static V8_EXPORT void VerifyOnStack(const void* ptr);
1389
- static V8_EXPORT void VerifyOnMainThread();
1390
1706
  };
1391
1707
 
1392
1708
  V8_EXPORT void VerifyHandleIsNonEmpty(bool is_empty);
1393
1709
 
1710
+ // These functions are here just to match friend declarations in
1711
+ // XxxCallbackInfo classes allowing these functions to access the internals
1712
+ // of the info objects. These functions are supposed to be called by debugger
1713
+ // macros.
1714
+ void PrintFunctionCallbackInfo(void* function_callback_info);
1715
+ void PrintPropertyCallbackInfo(void* property_callback_info);
1716
+
1394
1717
  } // namespace internal
1395
1718
  } // namespace v8
1396
1719