libv8-node 15.14.0.1-aarch64-linux-musl → 18.8.0.0-aarch64-linux-musl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (103) hide show
  1. checksums.yaml +4 -4
  2. data/ext/libv8-node/location.rb +1 -1
  3. data/ext/libv8-node/paths.rb +5 -1
  4. data/lib/libv8/node/version.rb +3 -3
  5. data/vendor/v8/{out.gn → aarch64-linux-musl}/libv8/obj/libv8_monolith.a +0 -0
  6. data/vendor/v8/include/cppgc/allocation.h +184 -47
  7. data/vendor/v8/include/cppgc/common.h +9 -6
  8. data/vendor/v8/include/cppgc/cross-thread-persistent.h +465 -0
  9. data/vendor/v8/include/cppgc/custom-space.h +37 -2
  10. data/vendor/v8/include/cppgc/default-platform.h +40 -49
  11. data/vendor/v8/include/cppgc/ephemeron-pair.h +30 -0
  12. data/vendor/v8/include/cppgc/explicit-management.h +100 -0
  13. data/vendor/v8/include/cppgc/garbage-collected.h +19 -29
  14. data/vendor/v8/include/cppgc/heap-consistency.h +266 -0
  15. data/vendor/v8/include/cppgc/heap-state.h +82 -0
  16. data/vendor/v8/include/cppgc/heap-statistics.h +120 -0
  17. data/vendor/v8/include/cppgc/heap.h +73 -6
  18. data/vendor/v8/include/cppgc/internal/api-constants.h +11 -3
  19. data/vendor/v8/include/cppgc/internal/caged-heap-local-data.h +27 -15
  20. data/vendor/v8/include/cppgc/internal/compiler-specific.h +2 -2
  21. data/vendor/v8/include/cppgc/internal/finalizer-trait.h +4 -1
  22. data/vendor/v8/include/cppgc/internal/gc-info.h +124 -13
  23. data/vendor/v8/include/cppgc/internal/logging.h +3 -3
  24. data/vendor/v8/include/cppgc/internal/name-trait.h +122 -0
  25. data/vendor/v8/include/cppgc/internal/persistent-node.h +116 -16
  26. data/vendor/v8/include/cppgc/internal/pointer-policies.h +84 -32
  27. data/vendor/v8/include/cppgc/internal/write-barrier.h +392 -35
  28. data/vendor/v8/include/cppgc/liveness-broker.h +11 -2
  29. data/vendor/v8/include/cppgc/macros.h +2 -0
  30. data/vendor/v8/include/cppgc/member.h +91 -26
  31. data/vendor/v8/include/cppgc/name-provider.h +65 -0
  32. data/vendor/v8/include/cppgc/object-size-trait.h +58 -0
  33. data/vendor/v8/include/cppgc/persistent.h +70 -41
  34. data/vendor/v8/include/cppgc/platform.h +52 -26
  35. data/vendor/v8/include/cppgc/prefinalizer.h +36 -13
  36. data/vendor/v8/include/cppgc/process-heap-statistics.h +36 -0
  37. data/vendor/v8/include/cppgc/sentinel-pointer.h +32 -0
  38. data/vendor/v8/include/cppgc/source-location.h +2 -1
  39. data/vendor/v8/include/cppgc/testing.h +106 -0
  40. data/vendor/v8/include/cppgc/trace-trait.h +8 -3
  41. data/vendor/v8/include/cppgc/type-traits.h +163 -32
  42. data/vendor/v8/include/cppgc/visitor.h +194 -28
  43. data/vendor/v8/include/libplatform/libplatform.h +11 -11
  44. data/vendor/v8/include/libplatform/v8-tracing.h +2 -1
  45. data/vendor/v8/include/v8-array-buffer.h +445 -0
  46. data/vendor/v8/include/v8-callbacks.h +397 -0
  47. data/vendor/v8/include/v8-container.h +129 -0
  48. data/vendor/v8/include/v8-context.h +407 -0
  49. data/vendor/v8/include/v8-cppgc.h +151 -159
  50. data/vendor/v8/include/v8-data.h +80 -0
  51. data/vendor/v8/include/v8-date.h +43 -0
  52. data/vendor/v8/include/v8-debug.h +168 -0
  53. data/vendor/v8/include/v8-embedder-heap.h +218 -0
  54. data/vendor/v8/include/v8-embedder-state-scope.h +51 -0
  55. data/vendor/v8/include/v8-exception.h +217 -0
  56. data/vendor/v8/include/v8-extension.h +62 -0
  57. data/vendor/v8/include/v8-external.h +37 -0
  58. data/vendor/v8/include/v8-fast-api-calls.h +703 -152
  59. data/vendor/v8/include/v8-forward.h +81 -0
  60. data/vendor/v8/include/v8-function-callback.h +475 -0
  61. data/vendor/v8/include/v8-function.h +125 -0
  62. data/vendor/v8/include/v8-initialization.h +315 -0
  63. data/vendor/v8/include/v8-inspector.h +76 -27
  64. data/vendor/v8/include/v8-internal.h +285 -51
  65. data/vendor/v8/include/v8-isolate.h +1709 -0
  66. data/vendor/v8/include/v8-json.h +47 -0
  67. data/vendor/v8/include/v8-local-handle.h +455 -0
  68. data/vendor/v8/include/v8-locker.h +149 -0
  69. data/vendor/v8/include/v8-maybe.h +137 -0
  70. data/vendor/v8/include/v8-memory-span.h +43 -0
  71. data/vendor/v8/include/v8-message.h +216 -0
  72. data/vendor/v8/include/v8-metrics.h +131 -9
  73. data/vendor/v8/include/v8-microtask-queue.h +152 -0
  74. data/vendor/v8/include/v8-microtask.h +28 -0
  75. data/vendor/v8/include/v8-object.h +775 -0
  76. data/vendor/v8/include/v8-persistent-handle.h +590 -0
  77. data/vendor/v8/include/v8-platform.h +433 -25
  78. data/vendor/v8/include/v8-primitive-object.h +118 -0
  79. data/vendor/v8/include/v8-primitive.h +866 -0
  80. data/vendor/v8/include/v8-profiler.h +149 -10
  81. data/vendor/v8/include/v8-promise.h +174 -0
  82. data/vendor/v8/include/v8-proxy.h +50 -0
  83. data/vendor/v8/include/v8-regexp.h +105 -0
  84. data/vendor/v8/include/v8-script.h +747 -0
  85. data/vendor/v8/include/v8-snapshot.h +196 -0
  86. data/vendor/v8/include/v8-statistics.h +217 -0
  87. data/vendor/v8/include/v8-template.h +1079 -0
  88. data/vendor/v8/include/v8-traced-handle.h +420 -0
  89. data/vendor/v8/include/v8-typed-array.h +282 -0
  90. data/vendor/v8/include/v8-unwinder-state.h +31 -0
  91. data/vendor/v8/include/v8-unwinder.h +132 -0
  92. data/vendor/v8/include/v8-util.h +8 -2
  93. data/vendor/v8/include/v8-value-serializer-version.h +1 -1
  94. data/vendor/v8/include/v8-value-serializer.h +279 -0
  95. data/vendor/v8/include/v8-value.h +526 -0
  96. data/vendor/v8/include/v8-version.h +4 -4
  97. data/vendor/v8/include/v8-wasm.h +257 -0
  98. data/vendor/v8/include/v8-weak-callback-info.h +87 -0
  99. data/vendor/v8/include/v8.h +41 -12051
  100. data/vendor/v8/include/v8config.h +142 -21
  101. metadata +64 -10
  102. data/vendor/v8/include/cppgc/internal/prefinalizer-handler.h +0 -30
  103. data/vendor/v8/include/cppgc/internal/process-heap.h +0 -34
@@ -15,9 +15,12 @@
15
15
 
16
16
  namespace v8 {
17
17
 
18
+ class Array;
18
19
  class Context;
19
20
  class Data;
20
21
  class Isolate;
22
+ template <typename T>
23
+ class Local;
21
24
 
22
25
  namespace internal {
23
26
 
@@ -26,6 +29,13 @@ class Isolate;
26
29
  typedef uintptr_t Address;
27
30
  static const Address kNullAddress = 0;
28
31
 
32
+ constexpr int KB = 1024;
33
+ constexpr int MB = KB * 1024;
34
+ constexpr int GB = MB * 1024;
35
+ #ifdef V8_TARGET_ARCH_X64
36
+ constexpr size_t TB = size_t{GB} * 1024;
37
+ #endif
38
+
29
39
  /**
30
40
  * Configuration of tagging scheme.
31
41
  */
@@ -33,6 +43,7 @@ const int kApiSystemPointerSize = sizeof(void*);
33
43
  const int kApiDoubleSize = sizeof(double);
34
44
  const int kApiInt32Size = sizeof(int32_t);
35
45
  const int kApiInt64Size = sizeof(int64_t);
46
+ const int kApiSizetSize = sizeof(size_t);
36
47
 
37
48
  // Tag information for HeapObject.
38
49
  const int kHeapObjectTag = 1;
@@ -40,6 +51,13 @@ const int kWeakHeapObjectTag = 3;
40
51
  const int kHeapObjectTagSize = 2;
41
52
  const intptr_t kHeapObjectTagMask = (1 << kHeapObjectTagSize) - 1;
42
53
 
54
+ // Tag information for fowarding pointers stored in object headers.
55
+ // 0b00 at the lowest 2 bits in the header indicates that the map word is a
56
+ // forwarding pointer.
57
+ const int kForwardingTag = 0;
58
+ const int kForwardingTagSize = 2;
59
+ const intptr_t kForwardingTagMask = (1 << kForwardingTagSize) - 1;
60
+
43
61
  // Tag information for Smi.
44
62
  const int kSmiTag = 0;
45
63
  const int kSmiTagSize = 1;
@@ -98,6 +116,11 @@ struct SmiTagging<8> {
98
116
  };
99
117
 
100
118
  #ifdef V8_COMPRESS_POINTERS
119
+ // See v8:7703 or src/common/ptr-compr-inl.h for details about pointer
120
+ // compression.
121
+ constexpr size_t kPtrComprCageReservationSize = size_t{1} << 32;
122
+ constexpr size_t kPtrComprCageBaseAlignment = size_t{1} << 32;
123
+
101
124
  static_assert(
102
125
  kApiSystemPointerSize == kApiInt64Size,
103
126
  "Pointer compression can be enabled only for 64-bit architectures");
@@ -110,16 +133,6 @@ constexpr bool PointerCompressionIsEnabled() {
110
133
  return kApiTaggedSize != kApiSystemPointerSize;
111
134
  }
112
135
 
113
- constexpr bool HeapSandboxIsEnabled() {
114
- #ifdef V8_HEAP_SANDBOX
115
- return true;
116
- #else
117
- return false;
118
- #endif
119
- }
120
-
121
- using ExternalPointer_t = Address;
122
-
123
136
  #ifdef V8_31BIT_SMIS_ON_64BIT_ARCH
124
137
  using PlatformSmiTagging = SmiTagging<kApiInt32Size>;
125
138
  #else
@@ -140,6 +153,169 @@ V8_INLINE static constexpr internal::Address IntToSmi(int value) {
140
153
  kSmiTag;
141
154
  }
142
155
 
156
+ /*
157
+ * Sandbox related types, constants, and functions.
158
+ */
159
+ constexpr bool SandboxIsEnabled() {
160
+ #ifdef V8_SANDBOX
161
+ return true;
162
+ #else
163
+ return false;
164
+ #endif
165
+ }
166
+
167
+ constexpr bool SandboxedExternalPointersAreEnabled() {
168
+ #ifdef V8_SANDBOXED_EXTERNAL_POINTERS
169
+ return true;
170
+ #else
171
+ return false;
172
+ #endif
173
+ }
174
+
175
+ // SandboxedPointers are guaranteed to point into the sandbox. This is achieved
176
+ // for example by storing them as offset rather than as raw pointers.
177
+ using SandboxedPointer_t = Address;
178
+
179
+ // ExternalPointers point to objects located outside the sandbox. When sandboxed
180
+ // external pointers are enabled, these are stored in an external pointer table
181
+ // and referenced from HeapObjects through indices.
182
+ #ifdef V8_SANDBOXED_EXTERNAL_POINTERS
183
+ using ExternalPointer_t = uint32_t;
184
+ #else
185
+ using ExternalPointer_t = Address;
186
+ #endif
187
+
188
+ #ifdef V8_SANDBOX_IS_AVAILABLE
189
+
190
+ // Size of the sandbox, excluding the guard regions surrounding it.
191
+ constexpr size_t kSandboxSizeLog2 = 40; // 1 TB
192
+ constexpr size_t kSandboxSize = 1ULL << kSandboxSizeLog2;
193
+
194
+ // Required alignment of the sandbox. For simplicity, we require the
195
+ // size of the guard regions to be a multiple of this, so that this specifies
196
+ // the alignment of the sandbox including and excluding surrounding guard
197
+ // regions. The alignment requirement is due to the pointer compression cage
198
+ // being located at the start of the sandbox.
199
+ constexpr size_t kSandboxAlignment = kPtrComprCageBaseAlignment;
200
+
201
+ // Sandboxed pointers are stored inside the heap as offset from the sandbox
202
+ // base shifted to the left. This way, it is guaranteed that the offset is
203
+ // smaller than the sandbox size after shifting it to the right again. This
204
+ // constant specifies the shift amount.
205
+ constexpr uint64_t kSandboxedPointerShift = 64 - kSandboxSizeLog2;
206
+
207
+ // Size of the guard regions surrounding the sandbox. This assumes a worst-case
208
+ // scenario of a 32-bit unsigned index used to access an array of 64-bit
209
+ // values.
210
+ constexpr size_t kSandboxGuardRegionSize = 32ULL * GB;
211
+
212
+ static_assert((kSandboxGuardRegionSize % kSandboxAlignment) == 0,
213
+ "The size of the guard regions around the sandbox must be a "
214
+ "multiple of its required alignment.");
215
+
216
+ // Minimum size of the sandbox, excluding the guard regions surrounding it. If
217
+ // the virtual memory reservation for the sandbox fails, its size is currently
218
+ // halved until either the reservation succeeds or the minimum size is reached.
219
+ // A minimum of 32GB allows the 4GB pointer compression region as well as the
220
+ // ArrayBuffer partition and two 10GB Wasm memory cages to fit into the
221
+ // sandbox. 32GB should also be the minimum possible size of the userspace
222
+ // address space as there are some machine configurations with only 36 virtual
223
+ // address bits.
224
+ constexpr size_t kSandboxMinimumSize = 32ULL * GB;
225
+
226
+ static_assert(kSandboxMinimumSize <= kSandboxSize,
227
+ "The minimal size of the sandbox must be smaller or equal to the "
228
+ "regular size.");
229
+
230
+ // On OSes where reserving virtual memory is too expensive to reserve the
231
+ // entire address space backing the sandbox, notably Windows pre 8.1, we create
232
+ // a partially reserved sandbox that doesn't actually reserve most of the
233
+ // memory, and so doesn't have the desired security properties as unrelated
234
+ // memory allocations could end up inside of it, but which still ensures that
235
+ // objects that should be located inside the sandbox are allocated within
236
+ // kSandboxSize bytes from the start of the sandbox. The minimum size of the
237
+ // region that is actually reserved for such a sandbox is specified by this
238
+ // constant and should be big enough to contain the pointer compression cage as
239
+ // well as the ArrayBuffer partition.
240
+ constexpr size_t kSandboxMinimumReservationSize = 8ULL * GB;
241
+
242
+ static_assert(kSandboxMinimumSize > kPtrComprCageReservationSize,
243
+ "The sandbox must be larger than the pointer compression cage "
244
+ "contained within it.");
245
+ static_assert(kSandboxMinimumReservationSize > kPtrComprCageReservationSize,
246
+ "The minimum reservation size for a sandbox must be larger than "
247
+ "the pointer compression cage contained within it.");
248
+
249
+ // For now, even if the sandbox is enabled, we still allow backing stores to be
250
+ // allocated outside of it as fallback. This will simplify the initial rollout.
251
+ // However, if sandboxed pointers are also enabled, we must always place
252
+ // backing stores inside the sandbox as they will be referenced though them.
253
+ #ifdef V8_SANDBOXED_POINTERS
254
+ constexpr bool kAllowBackingStoresOutsideSandbox = false;
255
+ #else
256
+ constexpr bool kAllowBackingStoresOutsideSandbox = true;
257
+ #endif // V8_SANDBOXED_POINTERS
258
+
259
+ // The size of the virtual memory reservation for an external pointer table.
260
+ // This determines the maximum number of entries in a table. Using a maximum
261
+ // size allows omitting bounds checks on table accesses if the indices are
262
+ // guaranteed (e.g. through shifting) to be below the maximum index. This
263
+ // value must be a power of two.
264
+ static const size_t kExternalPointerTableReservationSize = 128 * MB;
265
+
266
+ // The maximum number of entries in an external pointer table.
267
+ static const size_t kMaxSandboxedExternalPointers =
268
+ kExternalPointerTableReservationSize / kApiSystemPointerSize;
269
+
270
+ // The external pointer table indices stored in HeapObjects as external
271
+ // pointers are shifted to the left by this amount to guarantee that they are
272
+ // smaller than the maximum table size.
273
+ static const uint32_t kExternalPointerIndexShift = 8;
274
+ static_assert((1 << (32 - kExternalPointerIndexShift)) ==
275
+ kMaxSandboxedExternalPointers,
276
+ "kExternalPointerTableReservationSize and "
277
+ "kExternalPointerIndexShift don't match");
278
+
279
+ #endif // V8_SANDBOX_IS_AVAILABLE
280
+
281
+ // If sandboxed external pointers are enabled, these tag values will be ORed
282
+ // with the external pointers in the external pointer table to prevent use of
283
+ // pointers of the wrong type. When a pointer is loaded, it is ANDed with the
284
+ // inverse of the expected type's tag. The tags are constructed in a way that
285
+ // guarantees that a failed type check will result in one or more of the top
286
+ // bits of the pointer to be set, rendering the pointer inacessible. Besides
287
+ // the type tag bits (48 through 62), the tags also have the GC mark bit (63)
288
+ // set, so that the mark bit is automatically set when a pointer is written
289
+ // into the external pointer table (in which case it is clearly alive) and is
290
+ // cleared when the pointer is loaded. The exception to this is the free entry
291
+ // tag, which doesn't have the mark bit set, as the entry is not alive. This
292
+ // construction allows performing the type check and removing GC marking bits
293
+ // (the MSB) from the pointer at the same time.
294
+ // Note: this scheme assumes a 48-bit address space and will likely break if
295
+ // more virtual address bits are used.
296
+ constexpr uint64_t kExternalPointerTagMask = 0xffff000000000000;
297
+ constexpr uint64_t kExternalPointerTagShift = 48;
298
+ #define MAKE_TAG(v) (static_cast<uint64_t>(v) << kExternalPointerTagShift)
299
+ // clang-format off
300
+ enum ExternalPointerTag : uint64_t {
301
+ kExternalPointerNullTag = MAKE_TAG(0b0000000000000000),
302
+ kExternalPointerFreeEntryTag = MAKE_TAG(0b0111111110000000),
303
+ kExternalStringResourceTag = MAKE_TAG(0b1000000011111111),
304
+ kExternalStringResourceDataTag = MAKE_TAG(0b1000000101111111),
305
+ kForeignForeignAddressTag = MAKE_TAG(0b1000000110111111),
306
+ kNativeContextMicrotaskQueueTag = MAKE_TAG(0b1000000111011111),
307
+ kEmbedderDataSlotPayloadTag = MAKE_TAG(0b1000000111101111),
308
+ kCodeEntryPointTag = MAKE_TAG(0b1000000111110111),
309
+ kExternalObjectValueTag = MAKE_TAG(0b1000000111111011),
310
+ };
311
+ // clang-format on
312
+ #undef MAKE_TAG
313
+
314
+ // Converts encoded external pointer to address.
315
+ V8_EXPORT Address DecodeExternalPointerImpl(const Isolate* isolate,
316
+ ExternalPointer_t pointer,
317
+ ExternalPointerTag tag);
318
+
143
319
  // {obj} must be the raw tagged pointer representation of a HeapObject
144
320
  // that's guaranteed to never be in ReadOnlySpace.
145
321
  V8_EXPORT internal::Isolate* IsolateFromNeverReadOnlySpaceObject(Address obj);
@@ -149,12 +325,22 @@ V8_EXPORT internal::Isolate* IsolateFromNeverReadOnlySpaceObject(Address obj);
149
325
  // language mode is strict.
150
326
  V8_EXPORT bool ShouldThrowOnError(v8::internal::Isolate* isolate);
151
327
 
328
+ V8_EXPORT bool CanHaveInternalField(int instance_type);
329
+
152
330
  /**
153
331
  * This class exports constants and functionality from within v8 that
154
332
  * is necessary to implement inline functions in the v8 api. Don't
155
333
  * depend on functions and constants defined here.
156
334
  */
157
335
  class Internals {
336
+ #ifdef V8_MAP_PACKING
337
+ V8_INLINE static constexpr internal::Address UnpackMapWord(
338
+ internal::Address mapword) {
339
+ // TODO(wenyuzhao): Clear header metadata.
340
+ return mapword ^ kMapWordXorMask;
341
+ }
342
+ #endif
343
+
158
344
  public:
159
345
  // These values match non-compiler-dependent values defined within
160
346
  // the implementation of v8.
@@ -168,24 +354,46 @@ class Internals {
168
354
  static const int kFixedArrayHeaderSize = 2 * kApiTaggedSize;
169
355
  static const int kEmbedderDataArrayHeaderSize = 2 * kApiTaggedSize;
170
356
  static const int kEmbedderDataSlotSize = kApiSystemPointerSize;
357
+ #ifdef V8_SANDBOXED_EXTERNAL_POINTERS
358
+ static const int kEmbedderDataSlotRawPayloadOffset = kApiTaggedSize;
359
+ #endif
171
360
  static const int kNativeContextEmbedderDataOffset = 6 * kApiTaggedSize;
172
- static const int kFullStringRepresentationMask = 0x0f;
361
+ static const int kStringRepresentationAndEncodingMask = 0x0f;
173
362
  static const int kStringEncodingMask = 0x8;
174
363
  static const int kExternalTwoByteRepresentationTag = 0x02;
175
364
  static const int kExternalOneByteRepresentationTag = 0x0a;
176
365
 
177
366
  static const uint32_t kNumIsolateDataSlots = 4;
367
+ static const int kStackGuardSize = 7 * kApiSystemPointerSize;
368
+ static const int kBuiltinTier0EntryTableSize = 10 * kApiSystemPointerSize;
369
+ static const int kBuiltinTier0TableSize = 10 * kApiSystemPointerSize;
178
370
 
179
371
  // IsolateData layout guarantees.
180
- static const int kIsolateEmbedderDataOffset = 0;
372
+ static const int kIsolateCageBaseOffset = 0;
373
+ static const int kIsolateStackGuardOffset =
374
+ kIsolateCageBaseOffset + kApiSystemPointerSize;
375
+ static const int kBuiltinTier0EntryTableOffset =
376
+ kIsolateStackGuardOffset + kStackGuardSize;
377
+ static const int kBuiltinTier0TableOffset =
378
+ kBuiltinTier0EntryTableOffset + kBuiltinTier0EntryTableSize;
379
+ static const int kIsolateEmbedderDataOffset =
380
+ kBuiltinTier0TableOffset + kBuiltinTier0TableSize;
181
381
  static const int kIsolateFastCCallCallerFpOffset =
182
- kNumIsolateDataSlots * kApiSystemPointerSize;
382
+ kIsolateEmbedderDataOffset + kNumIsolateDataSlots * kApiSystemPointerSize;
183
383
  static const int kIsolateFastCCallCallerPcOffset =
184
384
  kIsolateFastCCallCallerFpOffset + kApiSystemPointerSize;
185
- static const int kIsolateStackGuardOffset =
385
+ static const int kIsolateFastApiCallTargetOffset =
186
386
  kIsolateFastCCallCallerPcOffset + kApiSystemPointerSize;
387
+ static const int kIsolateLongTaskStatsCounterOffset =
388
+ kIsolateFastApiCallTargetOffset + kApiSystemPointerSize;
187
389
  static const int kIsolateRootsOffset =
188
- kIsolateStackGuardOffset + 7 * kApiSystemPointerSize;
390
+ kIsolateLongTaskStatsCounterOffset + kApiSizetSize;
391
+
392
+ static const int kExternalPointerTableBufferOffset = 0;
393
+ static const int kExternalPointerTableCapacityOffset =
394
+ kExternalPointerTableBufferOffset + kApiSystemPointerSize;
395
+ static const int kExternalPointerTableFreelistHeadOffset =
396
+ kExternalPointerTableCapacityOffset + kApiInt32Size;
189
397
 
190
398
  static const int kUndefinedValueRootIndex = 4;
191
399
  static const int kTheHoleValueRootIndex = 5;
@@ -200,12 +408,13 @@ class Internals {
200
408
  static const int kNodeStateIsWeakValue = 2;
201
409
  static const int kNodeStateIsPendingValue = 3;
202
410
 
203
- static const int kFirstNonstringType = 0x40;
204
- static const int kOddballType = 0x43;
205
- static const int kForeignType = 0x46;
411
+ static const int kFirstNonstringType = 0x80;
412
+ static const int kOddballType = 0x83;
413
+ static const int kForeignType = 0xcc;
206
414
  static const int kJSSpecialApiObjectType = 0x410;
207
- static const int kJSApiObjectType = 0x420;
208
415
  static const int kJSObjectType = 0x421;
416
+ static const int kFirstJSApiObjectType = 0x422;
417
+ static const int kLastJSApiObjectType = 0x80A;
209
418
 
210
419
  static const int kUndefinedOddballKind = 5;
211
420
  static const int kNullOddballKind = 3;
@@ -220,6 +429,17 @@ class Internals {
220
429
  // incremental GC once the external memory reaches this limit.
221
430
  static constexpr int kExternalAllocationSoftLimit = 64 * 1024 * 1024;
222
431
 
432
+ #ifdef V8_MAP_PACKING
433
+ static const uintptr_t kMapWordMetadataMask = 0xffffULL << 48;
434
+ // The lowest two bits of mapwords are always `0b10`
435
+ static const uintptr_t kMapWordSignature = 0b10;
436
+ // XORing a (non-compressed) map with this mask ensures that the two
437
+ // low-order bits are 0b10. The 0 at the end makes this look like a Smi,
438
+ // although real Smis have all lower 32 bits unset. We only rely on these
439
+ // values passing as Smis in very few places.
440
+ static const int kMapWordXorMask = 0b11;
441
+ #endif
442
+
223
443
  V8_EXPORT static void CheckInitializedImpl(v8::Isolate* isolate);
224
444
  V8_INLINE static void CheckInitialized(v8::Isolate* isolate) {
225
445
  #ifdef V8_ENABLE_CHECKS
@@ -246,6 +466,9 @@ class Internals {
246
466
  V8_INLINE static int GetInstanceType(const internal::Address obj) {
247
467
  typedef internal::Address A;
248
468
  A map = ReadTaggedPointerField(obj, kHeapObjectMapOffset);
469
+ #ifdef V8_MAP_PACKING
470
+ map = UnpackMapWord(map);
471
+ #endif
249
472
  return ReadRawField<uint16_t>(map, kMapInstanceTypeOffset);
250
473
  }
251
474
 
@@ -254,7 +477,7 @@ class Internals {
254
477
  }
255
478
 
256
479
  V8_INLINE static bool IsExternalTwoByteString(int instance_type) {
257
- int representation = (instance_type & kFullStringRepresentationMask);
480
+ int representation = (instance_type & kStringRepresentationAndEncodingMask);
258
481
  return representation == kExternalTwoByteRepresentationTag;
259
482
  }
260
483
 
@@ -296,6 +519,12 @@ class Internals {
296
519
  return *reinterpret_cast<void* const*>(addr);
297
520
  }
298
521
 
522
+ V8_INLINE static void IncrementLongTasksStatsCounter(v8::Isolate* isolate) {
523
+ internal::Address addr = reinterpret_cast<internal::Address>(isolate) +
524
+ kIsolateLongTaskStatsCounterOffset;
525
+ ++(*reinterpret_cast<size_t*>(addr));
526
+ }
527
+
299
528
  V8_INLINE static internal::Address* GetRoot(v8::Isolate* isolate, int index) {
300
529
  internal::Address addr = reinterpret_cast<internal::Address>(isolate) +
301
530
  kIsolateRootsOffset +
@@ -325,8 +554,9 @@ class Internals {
325
554
  internal::Address heap_object_ptr, int offset) {
326
555
  #ifdef V8_COMPRESS_POINTERS
327
556
  uint32_t value = ReadRawField<uint32_t>(heap_object_ptr, offset);
328
- internal::Address root = GetRootFromOnHeapAddress(heap_object_ptr);
329
- return root + static_cast<internal::Address>(static_cast<uintptr_t>(value));
557
+ internal::Address base =
558
+ GetPtrComprCageBaseFromOnHeapAddress(heap_object_ptr);
559
+ return base + static_cast<internal::Address>(static_cast<uintptr_t>(value));
330
560
  #else
331
561
  return ReadRawField<internal::Address>(heap_object_ptr, offset);
332
562
  #endif
@@ -342,9 +572,9 @@ class Internals {
342
572
  #endif
343
573
  }
344
574
 
345
- V8_INLINE static internal::Isolate* GetIsolateForHeapSandbox(
575
+ V8_INLINE static internal::Isolate* GetIsolateForSandbox(
346
576
  internal::Address obj) {
347
- #ifdef V8_HEAP_SANDBOX
577
+ #ifdef V8_SANDBOXED_EXTERNAL_POINTERS
348
578
  return internal::IsolateFromNeverReadOnlySpaceObject(obj);
349
579
  #else
350
580
  // Not used in non-sandbox mode.
@@ -352,45 +582,43 @@ class Internals {
352
582
  #endif
353
583
  }
354
584
 
585
+ V8_INLINE static Address DecodeExternalPointer(
586
+ const Isolate* isolate, ExternalPointer_t encoded_pointer,
587
+ ExternalPointerTag tag) {
588
+ #ifdef V8_SANDBOXED_EXTERNAL_POINTERS
589
+ return internal::DecodeExternalPointerImpl(isolate, encoded_pointer, tag);
590
+ #else
591
+ return encoded_pointer;
592
+ #endif
593
+ }
594
+
355
595
  V8_INLINE static internal::Address ReadExternalPointerField(
356
- internal::Isolate* isolate, internal::Address heap_object_ptr,
357
- int offset) {
358
- internal::Address value = ReadRawField<Address>(heap_object_ptr, offset);
359
- #ifdef V8_HEAP_SANDBOX
596
+ internal::Isolate* isolate, internal::Address heap_object_ptr, int offset,
597
+ ExternalPointerTag tag) {
598
+ #ifdef V8_SANDBOXED_EXTERNAL_POINTERS
599
+ internal::ExternalPointer_t encoded_value =
600
+ ReadRawField<uint32_t>(heap_object_ptr, offset);
360
601
  // We currently have to treat zero as nullptr in embedder slots.
361
- if (value) value = DecodeExternalPointer(isolate, value);
602
+ return encoded_value ? DecodeExternalPointer(isolate, encoded_value, tag)
603
+ : 0;
604
+ #else
605
+ return ReadRawField<Address>(heap_object_ptr, offset);
362
606
  #endif
363
- return value;
364
607
  }
365
608
 
366
609
  #ifdef V8_COMPRESS_POINTERS
367
- // See v8:7703 or src/ptr-compr.* for details about pointer compression.
368
- static constexpr size_t kPtrComprHeapReservationSize = size_t{1} << 32;
369
- static constexpr size_t kPtrComprIsolateRootAlignment = size_t{1} << 32;
370
-
371
- // See v8:10391 for details about V8 heap sandbox.
372
- static constexpr uint32_t kExternalPointerSalt =
373
- 0x7fffffff & ~static_cast<uint32_t>(kHeapObjectTagMask);
374
-
375
- V8_INLINE static internal::Address GetRootFromOnHeapAddress(
610
+ V8_INLINE static internal::Address GetPtrComprCageBaseFromOnHeapAddress(
376
611
  internal::Address addr) {
377
- return addr & -static_cast<intptr_t>(kPtrComprIsolateRootAlignment);
612
+ return addr & -static_cast<intptr_t>(kPtrComprCageBaseAlignment);
378
613
  }
379
614
 
380
615
  V8_INLINE static internal::Address DecompressTaggedAnyField(
381
616
  internal::Address heap_object_ptr, uint32_t value) {
382
- internal::Address root = GetRootFromOnHeapAddress(heap_object_ptr);
383
- return root + static_cast<internal::Address>(static_cast<uintptr_t>(value));
617
+ internal::Address base =
618
+ GetPtrComprCageBaseFromOnHeapAddress(heap_object_ptr);
619
+ return base + static_cast<internal::Address>(static_cast<uintptr_t>(value));
384
620
  }
385
621
 
386
- V8_INLINE static Address DecodeExternalPointer(
387
- const Isolate* isolate, ExternalPointer_t encoded_pointer) {
388
- #ifndef V8_HEAP_SANDBOX
389
- return encoded_pointer;
390
- #else
391
- return encoded_pointer ^ kExternalPointerSalt;
392
- #endif
393
- }
394
622
  #endif // V8_COMPRESS_POINTERS
395
623
  };
396
624
 
@@ -414,14 +642,20 @@ void CastCheck<false>::Perform(T* data) {}
414
642
 
415
643
  template <class T>
416
644
  V8_INLINE void PerformCastCheck(T* data) {
417
- CastCheck<std::is_base_of<Data, T>::value>::Perform(data);
645
+ CastCheck<std::is_base_of<Data, T>::value &&
646
+ !std::is_same<Data, std::remove_cv_t<T>>::value>::Perform(data);
418
647
  }
419
648
 
420
649
  // A base class for backing stores, which is needed due to vagaries of
421
650
  // how static casts work with std::shared_ptr.
422
651
  class BackingStoreBase {};
423
652
 
653
+ // The maximum value in enum GarbageCollectionReason, defined in heap.h.
654
+ // This is needed for histograms sampling garbage collection reasons.
655
+ constexpr int kGarbageCollectionReasonMaxValue = 25;
656
+
424
657
  } // namespace internal
658
+
425
659
  } // namespace v8
426
660
 
427
661
  #endif // INCLUDE_V8_INTERNAL_H_