libv8-node 22.7.0.4-x86_64-linux → 24.1.0.0-x86_64-linux

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (80) hide show
  1. checksums.yaml +4 -4
  2. data/lib/libv8/node/version.rb +3 -3
  3. data/vendor/v8/include/cppgc/allocation.h +11 -13
  4. data/vendor/v8/include/cppgc/default-platform.h +3 -2
  5. data/vendor/v8/include/cppgc/garbage-collected.h +8 -0
  6. data/vendor/v8/include/cppgc/heap-consistency.h +1 -1
  7. data/vendor/v8/include/cppgc/heap-statistics.h +2 -0
  8. data/vendor/v8/include/cppgc/internal/api-constants.h +2 -14
  9. data/vendor/v8/include/cppgc/internal/base-page-handle.h +2 -4
  10. data/vendor/v8/include/cppgc/internal/caged-heap-local-data.h +0 -4
  11. data/vendor/v8/include/cppgc/internal/caged-heap.h +0 -4
  12. data/vendor/v8/include/cppgc/internal/compiler-specific.h +9 -1
  13. data/vendor/v8/include/cppgc/internal/conditional-stack-allocated.h +41 -0
  14. data/vendor/v8/include/cppgc/internal/gc-info.h +12 -10
  15. data/vendor/v8/include/cppgc/internal/logging.h +3 -3
  16. data/vendor/v8/include/cppgc/internal/member-storage.h +69 -20
  17. data/vendor/v8/include/cppgc/internal/name-trait.h +5 -1
  18. data/vendor/v8/include/cppgc/internal/persistent-node.h +8 -3
  19. data/vendor/v8/include/cppgc/internal/pointer-policies.h +48 -11
  20. data/vendor/v8/include/cppgc/macros.h +21 -0
  21. data/vendor/v8/include/cppgc/member.h +70 -36
  22. data/vendor/v8/include/cppgc/name-provider.h +10 -0
  23. data/vendor/v8/include/cppgc/platform.h +11 -0
  24. data/vendor/v8/include/cppgc/type-traits.h +1 -0
  25. data/vendor/v8/include/cppgc/visitor.h +25 -1
  26. data/vendor/v8/include/libplatform/libplatform-export.h +2 -2
  27. data/vendor/v8/include/libplatform/v8-tracing.h +0 -1
  28. data/vendor/v8/include/v8-array-buffer.h +143 -46
  29. data/vendor/v8/include/v8-callbacks.h +94 -31
  30. data/vendor/v8/include/v8-context.h +48 -15
  31. data/vendor/v8/include/v8-cppgc.h +5 -56
  32. data/vendor/v8/include/v8-data.h +5 -0
  33. data/vendor/v8/include/v8-date.h +9 -0
  34. data/vendor/v8/include/v8-debug.h +11 -0
  35. data/vendor/v8/include/v8-embedder-heap.h +1 -29
  36. data/vendor/v8/include/v8-exception.h +72 -0
  37. data/vendor/v8/include/v8-external-memory-accounter.h +60 -0
  38. data/vendor/v8/include/v8-fast-api-calls.h +41 -206
  39. data/vendor/v8/include/v8-function-callback.h +190 -78
  40. data/vendor/v8/include/v8-function.h +11 -3
  41. data/vendor/v8/include/v8-handle-base.h +20 -2
  42. data/vendor/v8/include/v8-initialization.h +26 -1
  43. data/vendor/v8/include/v8-inspector.h +10 -3
  44. data/vendor/v8/include/v8-internal.h +638 -315
  45. data/vendor/v8/include/v8-isolate.h +275 -189
  46. data/vendor/v8/include/v8-local-handle.h +69 -42
  47. data/vendor/v8/include/v8-maybe.h +2 -1
  48. data/vendor/v8/include/v8-memory-span.h +149 -24
  49. data/vendor/v8/include/v8-message.h +9 -22
  50. data/vendor/v8/include/v8-metrics.h +14 -0
  51. data/vendor/v8/include/v8-microtask-queue.h +0 -5
  52. data/vendor/v8/include/v8-object.h +291 -37
  53. data/vendor/v8/include/v8-persistent-handle.h +17 -36
  54. data/vendor/v8/include/v8-platform.h +66 -45
  55. data/vendor/v8/include/v8-primitive.h +221 -5
  56. data/vendor/v8/include/v8-profiler.h +51 -2
  57. data/vendor/v8/include/v8-promise.h +2 -2
  58. data/vendor/v8/include/v8-proxy.h +0 -1
  59. data/vendor/v8/include/v8-regexp.h +0 -1
  60. data/vendor/v8/include/v8-sandbox.h +173 -0
  61. data/vendor/v8/include/v8-script.h +65 -17
  62. data/vendor/v8/include/v8-snapshot.h +38 -2
  63. data/vendor/v8/include/v8-source-location.h +6 -1
  64. data/vendor/v8/include/v8-template.h +111 -263
  65. data/vendor/v8/include/v8-trace-categories.h +23 -0
  66. data/vendor/v8/include/v8-traced-handle.h +20 -32
  67. data/vendor/v8/include/v8-typed-array.h +6 -10
  68. data/vendor/v8/include/v8-unwinder-state.h +2 -3
  69. data/vendor/v8/include/v8-unwinder.h +2 -1
  70. data/vendor/v8/include/v8-util.h +1 -117
  71. data/vendor/v8/include/v8-value-serializer-version.h +3 -3
  72. data/vendor/v8/include/v8-value.h +21 -2
  73. data/vendor/v8/include/v8-version.h +4 -4
  74. data/vendor/v8/include/v8-wasm.h +27 -0
  75. data/vendor/v8/include/v8-weak-callback-info.h +20 -12
  76. data/vendor/v8/include/v8.h +3 -3
  77. data/vendor/v8/include/v8config.h +83 -45
  78. data/vendor/v8/x86_64-linux/libv8/obj/libv8_monolith.a +0 -0
  79. metadata +6 -3
  80. data/vendor/v8/include/cppgc/ephemeron-pair.h +0 -30
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 336c0ae682db5dc4f6f217accd174186b010ea7e3904283a10435857730c840b
4
- data.tar.gz: 1b4bfba3017cc4ae48c834e89fff4a09768ee16f028f1de30a7fd7a59a2f5ef8
3
+ metadata.gz: ee09c619e80828aaeb2c7e2058adcfa5336b69a3b4daa9a2010ee516957018d4
4
+ data.tar.gz: 05c79c8073a9c02c707c80455208a820d7f593a19b3bebe8548a20f80b54f6ed
5
5
  SHA512:
6
- metadata.gz: d6a4a07c40126edd2f0cbced0b7c626d2924736a1021ddf3b4822612a9144fd0baa64514b76513847ef6b382cd785f0b2bf892897e7d85108fc6ad523f4e77c3
7
- data.tar.gz: e6b7927204c8a465694a4898634c73d1e0293e62b9f97196751b097e4155ee644d235374de539f3ffcec0c3e09d6f9cd27ae42cd37a9985359303ee7aeacce41
6
+ metadata.gz: 96a2d77d885fdb71db406980345d2630a9ecce05b91cb4030fac3ceb074912fd9a018b654c18f8e36a8001a76ab38f19f5d503a170b08576ebcb21eae8c5d8f2
7
+ data.tar.gz: ffb27f160c1942977bb41e40e75841938a129b78648d828bb75937e8c8f8666d5affd9f419f28192dd8148a4f6168e335de04c7887ccf72f1e9f3e90f9d05168
@@ -4,7 +4,7 @@ module Libv8
4
4
  end
5
5
 
6
6
  module Libv8::Node
7
- VERSION = '22.7.0.4'
8
- NODE_VERSION = '22.7.0'
9
- LIBV8_VERSION = '12.4.254.21' # from src/node-.../deps/v8/include/v8-version.h
7
+ VERSION = '24.1.0.0'
8
+ NODE_VERSION = '24.1.0'
9
+ LIBV8_VERSION = '13.6.233.10' # from src/node-.../deps/v8/include/v8-version.h
10
10
  end
@@ -44,10 +44,9 @@ class AllocationHandle;
44
44
 
45
45
  namespace internal {
46
46
 
47
- // Similar to C++17 std::align_val_t;
48
- enum class AlignVal : size_t {};
47
+ using AlignVal = std::align_val_t;
49
48
 
50
- class V8_EXPORT MakeGarbageCollectedTraitInternal {
49
+ class MakeGarbageCollectedTraitInternal {
51
50
  protected:
52
51
  static inline void MarkObjectAsFullyConstructed(const void* payload) {
53
52
  // See api_constants for an explanation of the constants.
@@ -121,16 +120,15 @@ class V8_EXPORT MakeGarbageCollectedTraitInternal {
121
120
  };
122
121
 
123
122
  private:
124
- static void* CPPGC_DEFAULT_ALIGNED Allocate(cppgc::AllocationHandle&, size_t,
125
- GCInfoIndex);
126
- static void* CPPGC_DOUBLE_WORD_ALIGNED Allocate(cppgc::AllocationHandle&,
127
- size_t, AlignVal,
128
- GCInfoIndex);
129
- static void* CPPGC_DEFAULT_ALIGNED Allocate(cppgc::AllocationHandle&, size_t,
130
- GCInfoIndex, CustomSpaceIndex);
131
- static void* CPPGC_DOUBLE_WORD_ALIGNED Allocate(cppgc::AllocationHandle&,
132
- size_t, AlignVal, GCInfoIndex,
133
- CustomSpaceIndex);
123
+ V8_EXPORT static void* CPPGC_DEFAULT_ALIGNED
124
+ Allocate(cppgc::AllocationHandle&, size_t, GCInfoIndex);
125
+ V8_EXPORT static void* CPPGC_DOUBLE_WORD_ALIGNED
126
+ Allocate(cppgc::AllocationHandle&, size_t, AlignVal, GCInfoIndex);
127
+ V8_EXPORT static void* CPPGC_DEFAULT_ALIGNED
128
+ Allocate(cppgc::AllocationHandle&, size_t, GCInfoIndex, CustomSpaceIndex);
129
+ V8_EXPORT static void* CPPGC_DOUBLE_WORD_ALIGNED
130
+ Allocate(cppgc::AllocationHandle&, size_t, AlignVal, GCInfoIndex,
131
+ CustomSpaceIndex);
134
132
 
135
133
  friend class HeapObjectHeader;
136
134
  };
@@ -37,11 +37,12 @@ class V8_EXPORT DefaultPlatform : public Platform {
37
37
  return v8_platform_->MonotonicallyIncreasingTime();
38
38
  }
39
39
 
40
- std::shared_ptr<cppgc::TaskRunner> GetForegroundTaskRunner() override {
40
+ std::shared_ptr<cppgc::TaskRunner> GetForegroundTaskRunner(
41
+ TaskPriority priority) override {
41
42
  // V8's default platform creates a new task runner when passed the
42
43
  // `v8::Isolate` pointer the first time. For non-default platforms this will
43
44
  // require getting the appropriate task runner.
44
- return v8_platform_->GetForegroundTaskRunner(kNoIsolate);
45
+ return v8_platform_->GetForegroundTaskRunner(kNoIsolate, priority);
45
46
  }
46
47
 
47
48
  std::unique_ptr<cppgc::JobHandle> PostJob(
@@ -94,6 +94,14 @@ class GarbageCollectedMixin {
94
94
  public:
95
95
  using IsGarbageCollectedMixinTypeMarker = void;
96
96
 
97
+ // Must use MakeGarbageCollected.
98
+ void* operator new(size_t) = delete;
99
+ void* operator new[](size_t) = delete;
100
+ // The garbage collector is taking care of reclaiming the object.
101
+ // Not override the non-array varaint of `delete` to not conflict with the
102
+ // operator in GarbageCollected above.
103
+ void operator delete[](void*) = delete;
104
+
97
105
  /**
98
106
  * This Trace method must be overriden by objects inheriting from
99
107
  * GarbageCollectedMixin.
@@ -114,7 +114,7 @@ class HeapConsistency final {
114
114
  * has not yet been processed.
115
115
  *
116
116
  * \param params The parameters retrieved from `GetWriteBarrierType()`.
117
- * \param object The pointer to the object. May be an interior pointer to a
117
+ * \param object The pointer to the object. May be an interior pointer to
118
118
  * an interface of the actual object.
119
119
  */
120
120
  static V8_INLINE void DijkstraWriteBarrier(const WriteBarrierParams& params,
@@ -102,6 +102,8 @@ struct HeapStatistics final {
102
102
  size_t resident_size_bytes = 0;
103
103
  /** Amount of memory actually used on the heap. */
104
104
  size_t used_size_bytes = 0;
105
+ /** Memory retained in the page pool, not used directly by the heap. */
106
+ size_t pooled_memory_size_bytes = 0;
105
107
  /** Detail level of this HeapStatistics. */
106
108
  DetailLevel detail_level;
107
109
 
@@ -30,13 +30,8 @@ static constexpr size_t kFullyConstructedBitFieldOffsetFromPayload =
30
30
  // Mask for in-construction bit.
31
31
  static constexpr uint16_t kFullyConstructedBitMask = uint16_t{1};
32
32
 
33
- static constexpr size_t kPageSize = size_t{1} << 17;
34
-
35
- #if defined(V8_HOST_ARCH_ARM64) && defined(V8_OS_DARWIN)
36
- constexpr size_t kGuardPageSize = 0;
37
- #else
38
- constexpr size_t kGuardPageSize = 4096;
39
- #endif
33
+ static constexpr size_t kPageSizeBits = 17;
34
+ static constexpr size_t kPageSize = size_t{1} << kPageSizeBits;
40
35
 
41
36
  static constexpr size_t kLargeObjectSizeThreshold = kPageSize / 2;
42
37
 
@@ -49,12 +44,6 @@ constexpr unsigned kPointerCompressionShift = 1;
49
44
  #endif // !defined(CPPGC_POINTER_COMPRESSION)
50
45
 
51
46
  #if defined(CPPGC_CAGED_HEAP)
52
- #if defined(CPPGC_2GB_CAGE)
53
- constexpr size_t kCagedHeapDefaultReservationSize =
54
- static_cast<size_t>(2) * kGB;
55
- constexpr size_t kCagedHeapMaxReservationSize =
56
- kCagedHeapDefaultReservationSize;
57
- #else // !defined(CPPGC_2GB_CAGE)
58
47
  constexpr size_t kCagedHeapDefaultReservationSize =
59
48
  static_cast<size_t>(4) * kGB;
60
49
  #if defined(CPPGC_POINTER_COMPRESSION)
@@ -64,7 +53,6 @@ constexpr size_t kCagedHeapMaxReservationSize =
64
53
  constexpr size_t kCagedHeapMaxReservationSize =
65
54
  kCagedHeapDefaultReservationSize;
66
55
  #endif // !defined(CPPGC_POINTER_COMPRESSION)
67
- #endif // !defined(CPPGC_2GB_CAGE)
68
56
  constexpr size_t kCagedHeapReservationAlignment = kCagedHeapMaxReservationSize;
69
57
  #endif // defined(CPPGC_CAGED_HEAP)
70
58
 
@@ -19,9 +19,7 @@ class BasePageHandle {
19
19
  public:
20
20
  static V8_INLINE BasePageHandle* FromPayload(void* payload) {
21
21
  return reinterpret_cast<BasePageHandle*>(
22
- (reinterpret_cast<uintptr_t>(payload) &
23
- ~(api_constants::kPageSize - 1)) +
24
- api_constants::kGuardPageSize);
22
+ reinterpret_cast<uintptr_t>(payload) & ~(api_constants::kPageSize - 1));
25
23
  }
26
24
  static V8_INLINE const BasePageHandle* FromPayload(const void* payload) {
27
25
  return FromPayload(const_cast<void*>(payload));
@@ -33,7 +31,7 @@ class BasePageHandle {
33
31
  protected:
34
32
  explicit BasePageHandle(HeapHandle& heap_handle) : heap_handle_(heap_handle) {
35
33
  CPPGC_DCHECK(reinterpret_cast<uintptr_t>(this) % api_constants::kPageSize ==
36
- api_constants::kGuardPageSize);
34
+ 0);
37
35
  }
38
36
 
39
37
  HeapHandle& heap_handle_;
@@ -77,11 +77,7 @@ class V8_EXPORT AgeTable final {
77
77
  __builtin_ctz(static_cast<uint32_t>(kCardSizeInBytes));
78
78
  #else //! V8_HAS_BUILTIN_CTZ
79
79
  // Hardcode and check with assert.
80
- #if defined(CPPGC_2GB_CAGE)
81
- 11;
82
- #else // !defined(CPPGC_2GB_CAGE)
83
80
  12;
84
- #endif // !defined(CPPGC_2GB_CAGE)
85
81
  #endif // !V8_HAS_BUILTIN_CTZ
86
82
  static_assert((1 << kGranularityBits) == kCardSizeInBytes);
87
83
  const size_t entry = offset >> kGranularityBits;
@@ -32,16 +32,12 @@ class V8_EXPORT CagedHeapBase {
32
32
  }
33
33
 
34
34
  V8_INLINE static bool AreWithinCage(const void* addr1, const void* addr2) {
35
- #if defined(CPPGC_2GB_CAGE)
36
- static constexpr size_t kHeapBaseShift = sizeof(uint32_t) * CHAR_BIT - 1;
37
- #else //! defined(CPPGC_2GB_CAGE)
38
35
  #if defined(CPPGC_POINTER_COMPRESSION)
39
36
  static constexpr size_t kHeapBaseShift =
40
37
  31 + api_constants::kPointerCompressionShift;
41
38
  #else // !defined(CPPGC_POINTER_COMPRESSION)
42
39
  static constexpr size_t kHeapBaseShift = sizeof(uint32_t) * CHAR_BIT;
43
40
  #endif // !defined(CPPGC_POINTER_COMPRESSION)
44
- #endif //! defined(CPPGC_2GB_CAGE)
45
41
  static_assert((static_cast<size_t>(1) << kHeapBaseShift) ==
46
42
  api_constants::kCagedHeapMaxReservationSize);
47
43
  CPPGC_DCHECK(g_heap_base_);
@@ -5,6 +5,8 @@
5
5
  #ifndef INCLUDE_CPPGC_INTERNAL_COMPILER_SPECIFIC_H_
6
6
  #define INCLUDE_CPPGC_INTERNAL_COMPILER_SPECIFIC_H_
7
7
 
8
+ #include "v8config.h" // NOLINT(build/include_directory)
9
+
8
10
  namespace cppgc {
9
11
 
10
12
  #if defined(__has_attribute)
@@ -21,7 +23,13 @@ namespace cppgc {
21
23
 
22
24
  // [[no_unique_address]] comes in C++20 but supported in clang with -std >=
23
25
  // c++11.
24
- #if CPPGC_HAS_CPP_ATTRIBUTE(no_unique_address)
26
+ #if defined(V8_CC_MSVC) && CPPGC_HAS_CPP_ATTRIBUTE(msvc::no_unique_address)
27
+ // Unfortunately MSVC ignores [[no_unique_address]] (see
28
+ // https://devblogs.microsoft.com/cppblog/msvc-cpp20-and-the-std-cpp20-switch/#msvc-extensions-and-abi),
29
+ // and clang-cl matches it for ABI compatibility reasons. We need to prefer
30
+ // [[msvc::no_unique_address]] when available if we actually want any effect.
31
+ #define CPPGC_NO_UNIQUE_ADDRESS [[msvc::no_unique_address]]
32
+ #elif CPPGC_HAS_CPP_ATTRIBUTE(no_unique_address)
25
33
  #define CPPGC_NO_UNIQUE_ADDRESS [[no_unique_address]]
26
34
  #else
27
35
  #define CPPGC_NO_UNIQUE_ADDRESS
@@ -0,0 +1,41 @@
1
+ // Copyright 2025 the V8 project authors. All rights reserved.
2
+ // Use of this source code is governed by a BSD-style license that can be
3
+ // found in the LICENSE file.
4
+
5
+ #ifndef INCLUDE_CPPGC_INTERNAL_CONDITIONAL_STACK_ALLOCATED_H_
6
+ #define INCLUDE_CPPGC_INTERNAL_CONDITIONAL_STACK_ALLOCATED_H_
7
+
8
+ #include <type_traits>
9
+
10
+ #include "cppgc/macros.h" // NOLINT(build/include_directory)
11
+ #include "cppgc/type-traits.h" // NOLINT(build/include_directory)
12
+
13
+ namespace cppgc {
14
+ namespace internal {
15
+
16
+ // Base class that is marked as stack allocated if T is either marked as stack
17
+ // allocated or a traceable type.
18
+ template <typename T>
19
+ class ConditionalStackAllocatedBase;
20
+
21
+ template <typename T>
22
+ concept RequiresStackAllocated =
23
+ !std::is_void_v<T> &&
24
+ (cppgc::IsStackAllocatedType<T> || cppgc::internal::IsTraceableV<T> ||
25
+ cppgc::IsGarbageCollectedOrMixinTypeV<T>);
26
+
27
+ template <typename T>
28
+ requires(RequiresStackAllocated<T>)
29
+ class ConditionalStackAllocatedBase<T> {
30
+ public:
31
+ CPPGC_STACK_ALLOCATED();
32
+ };
33
+
34
+ template <typename T>
35
+ requires(!RequiresStackAllocated<T>)
36
+ class ConditionalStackAllocatedBase<T> {};
37
+
38
+ } // namespace internal
39
+ } // namespace cppgc
40
+
41
+ #endif // INCLUDE_CPPGC_INTERNAL_CONDITIONAL_STACK_ALLOCATED_H_
@@ -94,12 +94,11 @@ struct GCInfoTrait final {
94
94
  return index;
95
95
  }
96
96
 
97
- static constexpr bool CheckCallbacksAreDefined() {
97
+ static constexpr void CheckCallbacksAreDefined() {
98
98
  // No USE() macro available.
99
99
  (void)static_cast<TraceCallback>(TraceTrait<T>::Trace);
100
100
  (void)static_cast<FinalizationCallback>(FinalizerTrait<T>::kCallback);
101
101
  (void)static_cast<NameCallback>(NameTrait<T>::GetName);
102
- return true;
103
102
  }
104
103
  };
105
104
 
@@ -127,19 +126,22 @@ struct GCInfoFolding final {
127
126
  // configuration. Only a single GCInfo (for `ResultType` below) will actually
128
127
  // be instantiated but existence (and well-formedness) of all callbacks is
129
128
  // checked.
130
- static constexpr bool kCheckTypeGuardAlwaysTrue =
131
- GCInfoTrait<T>::CheckCallbacksAreDefined() &&
129
+ static constexpr bool WantToFold() {
130
+ if constexpr ((kHasVirtualDestructorAtBase ||
131
+ kBothTypesAreTriviallyDestructible ||
132
+ kHasCustomFinalizerDispatchAtBase) &&
133
+ !kWantsDetailedObjectNames) {
134
+ GCInfoTrait<T>::CheckCallbacksAreDefined();
132
135
  GCInfoTrait<ParentMostGarbageCollectedType>::CheckCallbacksAreDefined();
136
+ return true;
137
+ }
138
+ return false;
139
+ }
133
140
 
134
141
  // Folding would regress name resolution when deriving names from C++
135
142
  // class names as it would just folds a name to the base class name.
136
143
  using ResultType =
137
- std::conditional_t<kCheckTypeGuardAlwaysTrue &&
138
- (kHasVirtualDestructorAtBase ||
139
- kBothTypesAreTriviallyDestructible ||
140
- kHasCustomFinalizerDispatchAtBase) &&
141
- !kWantsDetailedObjectNames,
142
- ParentMostGarbageCollectedType, T>;
144
+ std::conditional_t<WantToFold(), ParentMostGarbageCollectedType, T>;
143
145
  };
144
146
 
145
147
  } // namespace internal
@@ -20,18 +20,18 @@ FatalImpl(const char*, const SourceLocation& = SourceLocation::Current());
20
20
  template <typename>
21
21
  struct EatParams {};
22
22
 
23
- #if defined(DEBUG)
23
+ #ifdef CPPGC_ENABLE_API_CHECKS
24
24
  #define CPPGC_DCHECK_MSG(condition, message) \
25
25
  do { \
26
26
  if (V8_UNLIKELY(!(condition))) { \
27
27
  ::cppgc::internal::DCheckImpl(message); \
28
28
  } \
29
29
  } while (false)
30
- #else // !defined(DEBUG)
30
+ #else // !CPPGC_ENABLE_API_CHECKS
31
31
  #define CPPGC_DCHECK_MSG(condition, message) \
32
32
  (static_cast<void>(::cppgc::internal::EatParams<decltype( \
33
33
  static_cast<void>(condition), message)>{}))
34
- #endif // !defined(DEBUG)
34
+ #endif // !CPPGC_ENABLE_API_CHECKS
35
35
 
36
36
  #define CPPGC_DCHECK(condition) CPPGC_DCHECK_MSG(condition, #condition)
37
37
 
@@ -10,6 +10,7 @@
10
10
  #include <type_traits>
11
11
 
12
12
  #include "cppgc/internal/api-constants.h"
13
+ #include "cppgc/internal/caged-heap.h"
13
14
  #include "cppgc/internal/logging.h"
14
15
  #include "cppgc/sentinel-pointer.h"
15
16
  #include "v8config.h" // NOLINT(build/include_directory)
@@ -71,11 +72,17 @@ class V8_EXPORT CageBaseGlobal final {
71
72
 
72
73
  class V8_TRIVIAL_ABI CompressedPointer final {
73
74
  public:
75
+ struct AtomicInitializerTag {};
76
+
74
77
  using IntegralType = uint32_t;
75
78
  static constexpr auto kWriteBarrierSlotType =
76
79
  WriteBarrierSlotType::kCompressed;
77
80
 
78
81
  V8_INLINE CompressedPointer() : value_(0u) {}
82
+ V8_INLINE explicit CompressedPointer(const void* value,
83
+ AtomicInitializerTag) {
84
+ StoreAtomic(value);
85
+ }
79
86
  V8_INLINE explicit CompressedPointer(const void* ptr)
80
87
  : value_(Compress(ptr)) {}
81
88
  V8_INLINE explicit CompressedPointer(std::nullptr_t) : value_(0u) {}
@@ -139,17 +146,12 @@ class V8_TRIVIAL_ABI CompressedPointer final {
139
146
  CPPGC_DCHECK(
140
147
  (reinterpret_cast<uintptr_t>(ptr) & kPointerCompressionShiftMask) == 0);
141
148
 
142
- #if defined(CPPGC_2GB_CAGE)
143
- // Truncate the pointer.
144
- auto compressed =
145
- static_cast<IntegralType>(reinterpret_cast<uintptr_t>(ptr));
146
- #else // !defined(CPPGC_2GB_CAGE)
147
149
  const auto uptr = reinterpret_cast<uintptr_t>(ptr);
148
150
  // Shift the pointer and truncate.
149
151
  auto compressed = static_cast<IntegralType>(
150
152
  uptr >> api_constants::kPointerCompressionShift);
151
- #endif // !defined(CPPGC_2GB_CAGE)
152
- // Normal compressed pointers must have the MSB set.
153
+ // Normal compressed pointers must have the MSB set. This is guaranteed by
154
+ // the cage alignment.
153
155
  CPPGC_DCHECK((!compressed || compressed == kCompressedSentinel) ||
154
156
  (compressed & (1 << 31)));
155
157
  return compressed;
@@ -158,43 +160,83 @@ class V8_TRIVIAL_ABI CompressedPointer final {
158
160
  static V8_INLINE void* Decompress(IntegralType ptr) {
159
161
  CPPGC_DCHECK(CageBaseGlobal::IsSet());
160
162
  const uintptr_t base = CageBaseGlobal::Get();
161
- // Treat compressed pointer as signed and cast it to uint64_t, which will
162
- // sign-extend it.
163
- #if defined(CPPGC_2GB_CAGE)
164
- const uint64_t mask = static_cast<uint64_t>(static_cast<int32_t>(ptr));
165
- #else // !defined(CPPGC_2GB_CAGE)
166
- // Then, shift the result. It's important to shift the unsigned
167
- // value, as otherwise it would result in undefined behavior.
163
+ return Decompress(ptr, base);
164
+ }
165
+
166
+ static V8_INLINE void* Decompress(IntegralType ptr, uintptr_t base) {
167
+ CPPGC_DCHECK(CageBaseGlobal::IsSet());
168
+ CPPGC_DCHECK(base == CageBaseGlobal::Get());
169
+ // Sign-extend compressed pointer to full width. This ensure that normal
170
+ // pointers have only 1s in the base part of the address. It's also
171
+ // important to shift the unsigned value, as otherwise it would result in
172
+ // undefined behavior.
168
173
  const uint64_t mask = static_cast<uint64_t>(static_cast<int32_t>(ptr))
169
174
  << api_constants::kPointerCompressionShift;
170
- #endif // !defined(CPPGC_2GB_CAGE)
175
+ // Set the base part of the address for normal compressed pointers. Note
176
+ // that nullptr and the sentinel value do not have 1s in the base part and
177
+ // remain as-is in this operation.
171
178
  return reinterpret_cast<void*>(mask & base);
172
179
  }
173
180
 
181
+ // For a given memory `address`, this method iterates all possible pointers
182
+ // that can be reasonably recovered with the current compression scheme and
183
+ // passes them to `callback`.
184
+ template <typename Callback>
185
+ static V8_INLINE void VisitPossiblePointers(const void* address,
186
+ Callback callback);
187
+
174
188
  private:
175
- #if defined(CPPGC_2GB_CAGE)
176
- static constexpr IntegralType kCompressedSentinel =
177
- SentinelPointer::kSentinelValue;
178
- #else // !defined(CPPGC_2GB_CAGE)
179
189
  static constexpr IntegralType kCompressedSentinel =
180
190
  SentinelPointer::kSentinelValue >>
181
191
  api_constants::kPointerCompressionShift;
182
- #endif // !defined(CPPGC_2GB_CAGE)
183
192
  // All constructors initialize `value_`. Do not add a default value here as it
184
193
  // results in a non-atomic write on some builds, even when the atomic version
185
194
  // of the constructor is used.
186
195
  IntegralType value_;
187
196
  };
188
197
 
198
+ template <typename Callback>
199
+ // static
200
+ void CompressedPointer::VisitPossiblePointers(const void* address,
201
+ Callback callback) {
202
+ const uintptr_t base = CageBaseGlobal::Get();
203
+ CPPGC_DCHECK(base);
204
+ // We may have random compressed pointers on stack (e.g. due to inlined
205
+ // collections). These could be present in both halfwords.
206
+ const uint32_t compressed_low =
207
+ static_cast<uint32_t>(reinterpret_cast<uintptr_t>(address));
208
+ callback(CompressedPointer::Decompress(compressed_low, base));
209
+ const uint32_t compressed_high = static_cast<uint32_t>(
210
+ reinterpret_cast<uintptr_t>(address) >> (sizeof(uint32_t) * CHAR_BIT));
211
+ callback(CompressedPointer::Decompress(compressed_high, base));
212
+ // Iterate possible intermediate values, see `Decompress()`. The intermediate
213
+ // value of decompressing is a 64-bit value where 35 bits are the offset. We
214
+ // don't assume sign extension is stored and recover that part.
215
+ //
216
+ // Note that this case conveniently also recovers the full pointer.
217
+ static constexpr uintptr_t kBitForIntermediateValue =
218
+ (sizeof(uint32_t) * CHAR_BIT) + api_constants::kPointerCompressionShift;
219
+ static constexpr uintptr_t kSignExtensionMask =
220
+ ~((uintptr_t{1} << kBitForIntermediateValue) - 1);
221
+ const uintptr_t intermediate_sign_extended =
222
+ reinterpret_cast<uintptr_t>(address) | kSignExtensionMask;
223
+ callback(reinterpret_cast<void*>(intermediate_sign_extended & base));
224
+ }
225
+
189
226
  #endif // defined(CPPGC_POINTER_COMPRESSION)
190
227
 
191
228
  class V8_TRIVIAL_ABI RawPointer final {
192
229
  public:
230
+ struct AtomicInitializerTag {};
231
+
193
232
  using IntegralType = uintptr_t;
194
233
  static constexpr auto kWriteBarrierSlotType =
195
234
  WriteBarrierSlotType::kUncompressed;
196
235
 
197
236
  V8_INLINE RawPointer() : ptr_(nullptr) {}
237
+ V8_INLINE explicit RawPointer(const void* ptr, AtomicInitializerTag) {
238
+ StoreAtomic(ptr);
239
+ }
198
240
  V8_INLINE explicit RawPointer(const void* ptr) : ptr_(ptr) {}
199
241
 
200
242
  V8_INLINE const void* Load() const { return ptr_; }
@@ -237,6 +279,13 @@ class V8_TRIVIAL_ABI RawPointer final {
237
279
  return a.ptr_ >= b.ptr_;
238
280
  }
239
281
 
282
+ template <typename Callback>
283
+ static V8_INLINE void VisitPossiblePointers(const void* address,
284
+ Callback callback) {
285
+ // Pass along the full pointer.
286
+ return callback(const_cast<void*>(address));
287
+ }
288
+
240
289
  private:
241
290
  // All constructors initialize `ptr_`. Do not add a default value here as it
242
291
  // results in a non-atomic write on some builds, even when the atomic version
@@ -121,7 +121,11 @@ class NameTrait final : public NameTraitBase {
121
121
  #undef PRETTY_FUNCTION_VALUE
122
122
 
123
123
  #else // !CPPGC_SUPPORTS_OBJECT_NAMES
124
- return {NameProvider::kHiddenName, true};
124
+ // We wanted to use a class name but were unable to provide one due to
125
+ // compiler limitations or build configuration. As such, return the hidden
126
+ // name with name_was_hidden=false, which will cause this object to be
127
+ // visible in the snapshot.
128
+ return {NameProvider::kHiddenName, false};
125
129
  #endif // !CPPGC_SUPPORTS_OBJECT_NAMES
126
130
  }
127
131
  };
@@ -18,6 +18,7 @@ namespace internal {
18
18
 
19
19
  class CrossThreadPersistentRegion;
20
20
  class FatalOutOfMemoryHandler;
21
+ class HeapBase;
21
22
  class RootVisitor;
22
23
 
23
24
  // PersistentNode represents a variant of two states:
@@ -133,10 +134,14 @@ class V8_EXPORT PersistentRegionBase {
133
134
  };
134
135
 
135
136
  // Variant of PersistentRegionBase that checks whether the allocation and
136
- // freeing happens only on the thread that created the region.
137
+ // freeing happens only on the thread that created the heap.
137
138
  class V8_EXPORT PersistentRegion final : public PersistentRegionBase {
138
139
  public:
139
- explicit PersistentRegion(const FatalOutOfMemoryHandler&);
140
+ V8_INLINE PersistentRegion(const HeapBase& heap,
141
+ const FatalOutOfMemoryHandler& oom_handler)
142
+ : PersistentRegionBase(oom_handler), heap_(heap) {
143
+ CPPGC_DCHECK(IsCreationThread());
144
+ }
140
145
  // Clears Persistent fields to avoid stale pointers after heap teardown.
141
146
  ~PersistentRegion() = default;
142
147
 
@@ -161,7 +166,7 @@ class V8_EXPORT PersistentRegion final : public PersistentRegionBase {
161
166
  private:
162
167
  bool IsCreationThread();
163
168
 
164
- int creation_thread_id_;
169
+ const HeapBase& heap_;
165
170
  };
166
171
 
167
172
  // CrossThreadPersistent uses PersistentRegionBase but protects it using this
@@ -28,13 +28,19 @@ class WeakMemberTag;
28
28
  class UntracedMemberTag;
29
29
 
30
30
  struct DijkstraWriteBarrierPolicy {
31
- V8_INLINE static void InitializingBarrier(const void*, const void*) {
32
31
  // Since in initializing writes the source object is always white, having no
33
32
  // barrier doesn't break the tri-color invariant.
34
- }
33
+ V8_INLINE static void InitializingBarrier(const void*, const void*) {}
34
+ V8_INLINE static void InitializingBarrier(const void*, RawPointer storage) {
35
+ }
36
+ #if defined(CPPGC_POINTER_COMPRESSION)
37
+ V8_INLINE static void InitializingBarrier(const void*,
38
+ CompressedPointer storage) {}
39
+ #endif
35
40
 
36
- template <WriteBarrierSlotType SlotType>
37
- V8_INLINE static void AssigningBarrier(const void* slot, const void* value) {
41
+ template <WriteBarrierSlotType SlotType>
42
+ V8_INLINE static void AssigningBarrier(const void* slot,
43
+ const void* value) {
38
44
  #ifdef CPPGC_SLIM_WRITE_BARRIER
39
45
  if (V8_UNLIKELY(WriteBarrier::IsEnabled()))
40
46
  WriteBarrier::CombinedWriteBarrierSlow<SlotType>(slot);
@@ -44,7 +50,7 @@ struct DijkstraWriteBarrierPolicy {
44
50
  WriteBarrier::GetWriteBarrierType(slot, value, params);
45
51
  WriteBarrier(type, params, slot, value);
46
52
  #endif // !CPPGC_SLIM_WRITE_BARRIER
47
- }
53
+ }
48
54
 
49
55
  template <WriteBarrierSlotType SlotType>
50
56
  V8_INLINE static void AssigningBarrier(const void* slot, RawPointer storage) {
@@ -101,6 +107,11 @@ struct DijkstraWriteBarrierPolicy {
101
107
 
102
108
  struct NoWriteBarrierPolicy {
103
109
  V8_INLINE static void InitializingBarrier(const void*, const void*) {}
110
+ V8_INLINE static void InitializingBarrier(const void*, RawPointer storage) {}
111
+ #if defined(CPPGC_POINTER_COMPRESSION)
112
+ V8_INLINE static void InitializingBarrier(const void*,
113
+ CompressedPointer storage) {}
114
+ #endif
104
115
  template <WriteBarrierSlotType>
105
116
  V8_INLINE static void AssigningBarrier(const void*, const void*) {}
106
117
  template <WriteBarrierSlotType, typename MemberStorage>
@@ -119,10 +130,29 @@ template <bool kCheckOffHeapAssignments>
119
130
  class V8_EXPORT SameThreadEnabledCheckingPolicy
120
131
  : private SameThreadEnabledCheckingPolicyBase {
121
132
  protected:
133
+ template <typename T>
134
+ V8_INLINE void CheckPointer(RawPointer raw_pointer) {
135
+ if (raw_pointer.IsCleared() || raw_pointer.IsSentinel()) {
136
+ return;
137
+ }
138
+ CheckPointersImplTrampoline<T>::Call(
139
+ this, static_cast<const T*>(raw_pointer.Load()));
140
+ }
141
+ #if defined(CPPGC_POINTER_COMPRESSION)
142
+ template <typename T>
143
+ V8_INLINE void CheckPointer(CompressedPointer compressed_pointer) {
144
+ if (compressed_pointer.IsCleared() || compressed_pointer.IsSentinel()) {
145
+ return;
146
+ }
147
+ CheckPointersImplTrampoline<T>::Call(
148
+ this, static_cast<const T*>(compressed_pointer.Load()));
149
+ }
150
+ #endif
122
151
  template <typename T>
123
152
  void CheckPointer(const T* ptr) {
124
- if (!ptr || (kSentinelPointer == ptr)) return;
125
-
153
+ if (!ptr || (kSentinelPointer == ptr)) {
154
+ return;
155
+ }
126
156
  CheckPointersImplTrampoline<T>::Call(this, ptr);
127
157
  }
128
158
 
@@ -145,20 +175,27 @@ class V8_EXPORT SameThreadEnabledCheckingPolicy
145
175
 
146
176
  class DisabledCheckingPolicy {
147
177
  protected:
148
- V8_INLINE void CheckPointer(const void*) {}
178
+ template <typename T>
179
+ V8_INLINE void CheckPointer(T*) {}
180
+ template <typename T>
181
+ V8_INLINE void CheckPointer(RawPointer) {}
182
+ #if defined(CPPGC_POINTER_COMPRESSION)
183
+ template <typename T>
184
+ V8_INLINE void CheckPointer(CompressedPointer) {}
185
+ #endif
149
186
  };
150
187
 
151
- #ifdef DEBUG
188
+ #ifdef CPPGC_ENABLE_SLOW_API_CHECKS
152
189
  // Off heap members are not connected to object graph and thus cannot ressurect
153
190
  // dead objects.
154
191
  using DefaultMemberCheckingPolicy =
155
192
  SameThreadEnabledCheckingPolicy<false /* kCheckOffHeapAssignments*/>;
156
193
  using DefaultPersistentCheckingPolicy =
157
194
  SameThreadEnabledCheckingPolicy<true /* kCheckOffHeapAssignments*/>;
158
- #else // !DEBUG
195
+ #else // !CPPGC_ENABLE_SLOW_API_CHECKS
159
196
  using DefaultMemberCheckingPolicy = DisabledCheckingPolicy;
160
197
  using DefaultPersistentCheckingPolicy = DisabledCheckingPolicy;
161
- #endif // !DEBUG
198
+ #endif // !CPPGC_ENABLE_SLOW_API_CHECKS
162
199
  // For CT(W)P neither marking information (for value), nor objectstart bitmap
163
200
  // (for slot) are guaranteed to be present because there's no synchronization
164
201
  // between heaps after marking.