libv8-node 16.20.0.0-aarch64-linux → 17.9.1.0-aarch64-linux

Sign up to get free protection for your applications and to get access to all the features.
Files changed (70) hide show
  1. checksums.yaml +4 -4
  2. data/ext/libv8-node/location.rb +1 -1
  3. data/ext/libv8-node/paths.rb +1 -1
  4. data/lib/libv8/node/version.rb +3 -3
  5. data/vendor/v8/aarch64-linux/libv8/obj/libv8_monolith.a +0 -0
  6. data/vendor/v8/include/cppgc/allocation.h +11 -5
  7. data/vendor/v8/include/cppgc/cross-thread-persistent.h +78 -26
  8. data/vendor/v8/include/cppgc/internal/caged-heap-local-data.h +2 -2
  9. data/vendor/v8/include/cppgc/internal/finalizer-trait.h +2 -0
  10. data/vendor/v8/include/cppgc/internal/gc-info.h +90 -10
  11. data/vendor/v8/include/cppgc/internal/name-trait.h +11 -0
  12. data/vendor/v8/include/cppgc/internal/persistent-node.h +44 -12
  13. data/vendor/v8/include/cppgc/internal/pointer-policies.h +22 -11
  14. data/vendor/v8/include/cppgc/internal/write-barrier.h +9 -1
  15. data/vendor/v8/include/cppgc/persistent.h +1 -1
  16. data/vendor/v8/include/cppgc/prefinalizer.h +1 -1
  17. data/vendor/v8/include/v8-array-buffer.h +433 -0
  18. data/vendor/v8/include/v8-callbacks.h +377 -0
  19. data/vendor/v8/include/v8-container.h +129 -0
  20. data/vendor/v8/include/v8-context.h +418 -0
  21. data/vendor/v8/include/v8-cppgc.h +5 -2
  22. data/vendor/v8/include/v8-data.h +65 -0
  23. data/vendor/v8/include/v8-date.h +43 -0
  24. data/vendor/v8/include/v8-debug.h +151 -0
  25. data/vendor/v8/include/v8-embedder-heap.h +238 -0
  26. data/vendor/v8/include/v8-exception.h +224 -0
  27. data/vendor/v8/include/v8-extension.h +62 -0
  28. data/vendor/v8/include/v8-external.h +37 -0
  29. data/vendor/v8/include/v8-fast-api-calls.h +63 -11
  30. data/vendor/v8/include/v8-forward.h +81 -0
  31. data/vendor/v8/include/v8-function-callback.h +475 -0
  32. data/vendor/v8/include/v8-function.h +122 -0
  33. data/vendor/v8/include/v8-initialization.h +282 -0
  34. data/vendor/v8/include/v8-inspector.h +11 -21
  35. data/vendor/v8/include/v8-internal.h +85 -22
  36. data/vendor/v8/include/v8-isolate.h +1662 -0
  37. data/vendor/v8/include/v8-json.h +47 -0
  38. data/vendor/v8/include/v8-local-handle.h +459 -0
  39. data/vendor/v8/include/v8-locker.h +148 -0
  40. data/vendor/v8/include/v8-maybe.h +137 -0
  41. data/vendor/v8/include/v8-memory-span.h +43 -0
  42. data/vendor/v8/include/v8-message.h +241 -0
  43. data/vendor/v8/include/v8-metrics.h +37 -1
  44. data/vendor/v8/include/v8-microtask-queue.h +152 -0
  45. data/vendor/v8/include/v8-microtask.h +28 -0
  46. data/vendor/v8/include/v8-object.h +770 -0
  47. data/vendor/v8/include/v8-persistent-handle.h +590 -0
  48. data/vendor/v8/include/v8-platform.h +41 -17
  49. data/vendor/v8/include/v8-primitive-object.h +118 -0
  50. data/vendor/v8/include/v8-primitive.h +858 -0
  51. data/vendor/v8/include/v8-profiler.h +10 -11
  52. data/vendor/v8/include/v8-promise.h +174 -0
  53. data/vendor/v8/include/v8-proxy.h +50 -0
  54. data/vendor/v8/include/v8-regexp.h +105 -0
  55. data/vendor/v8/include/v8-script.h +771 -0
  56. data/vendor/v8/include/v8-snapshot.h +198 -0
  57. data/vendor/v8/include/v8-statistics.h +215 -0
  58. data/vendor/v8/include/v8-template.h +1052 -0
  59. data/vendor/v8/include/v8-traced-handle.h +605 -0
  60. data/vendor/v8/include/v8-typed-array.h +282 -0
  61. data/vendor/v8/include/v8-unwinder-state.h +4 -3
  62. data/vendor/v8/include/v8-unwinder.h +129 -0
  63. data/vendor/v8/include/v8-util.h +7 -1
  64. data/vendor/v8/include/v8-value-serializer.h +249 -0
  65. data/vendor/v8/include/v8-value.h +526 -0
  66. data/vendor/v8/include/v8-version.h +3 -3
  67. data/vendor/v8/include/v8-wasm.h +245 -0
  68. data/vendor/v8/include/v8-weak-callback-info.h +73 -0
  69. data/vendor/v8/include/v8.h +41 -12604
  70. metadata +47 -5
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 0a4ca9f3f74a7cdbb4215a85251a387e26f5e9b20f8f1d40359033faaeddb47a
4
- data.tar.gz: c85a20e9b27616947201e71f8180639a16446984a6849f7d3a7186bda4dfe589
3
+ metadata.gz: b0a9774d31edc0753dbcc5e2f0405ca98090199187eb4d5208f7fb186e11d5e1
4
+ data.tar.gz: ba5f096814e984f3f1085d6aa73e043bfb9fd4cd65d4844ac8fee8bdf5417198
5
5
  SHA512:
6
- metadata.gz: 8a95013e7203af5f653bdb5d748497d23db02e44d3c6fb27855eab4fe13f4ac6e60c638eccc4a4744b9a8e065e84941670320748d0d757352457dbf580fe4092
7
- data.tar.gz: e7f5d9b61fb08e75a36fa7ea11813dbde963903a8387ccc9141b293a41d4e9f860014329482cf2c9e4201e930a1a4da41b0260fb8d1d2933e144b57e9ec4417c
6
+ metadata.gz: 1ff9d63102cb2e10ecf41905dde25100c749aa2f24896f7f533fecf26c22601fc30cfcbdc9179a6218883b244c128faddd0860800db88b59366ef527dd45bcb2
7
+ data.tar.gz: ad32644a23d97bb64ba2f36f52cc4ae62db98b088801b107ea85b52f01f1a2bfd478f32d6f260ddf389d684a229cfd62419269aaa5b0fab1ea7f7420b3217824
@@ -45,7 +45,7 @@ module Libv8::Node
45
45
  def verify_installation!
46
46
  include_paths = Libv8::Node::Paths.include_paths
47
47
 
48
- unless include_paths.detect { |p| Pathname(p).join('v8.h').exist? } # rubocop:disable Style/IfUnlessModifier
48
+ unless include_paths.detect { |p| Pathname(p).join('v8.h').exist? }
49
49
  raise(HeaderNotFound, "Unable to locate 'v8.h' in the libv8 header paths: #{include_paths.inspect}")
50
50
  end
51
51
 
@@ -20,7 +20,7 @@ module Libv8::Node
20
20
  end
21
21
 
22
22
  def platform
23
- Gem::Platform.local.tap { |p| RUBY_PLATFORM =~ /musl/ && p.version.nil? && p.instance_eval { @version = 'musl' } }.to_s.gsub(/-darwin-?\d+/, '-darwin')
23
+ Gem::Platform.local.tap { |p| RUBY_PLATFORM =~ /musl/ && p.version.nil? and p.instance_eval { @version = "musl" } }.to_s.gsub(/-darwin-?\d+/, '-darwin')
24
24
  end
25
25
 
26
26
  def config
@@ -1,7 +1,7 @@
1
1
  module Libv8; end
2
2
 
3
3
  module Libv8::Node
4
- VERSION = '16.20.0.0'.freeze
5
- NODE_VERSION = '16.20.0'.freeze
6
- LIBV8_VERSION = '9.4.146.26'.freeze # from v8/include/v8-version.h
4
+ VERSION = '17.9.1.0'.freeze
5
+ NODE_VERSION = '17.9.1'.freeze
6
+ LIBV8_VERSION = '9.6.180.15'.freeze # from v8/include/v8-version.h
7
7
  end
@@ -36,8 +36,13 @@ class V8_EXPORT MakeGarbageCollectedTraitInternal {
36
36
  const_cast<uint16_t*>(reinterpret_cast<const uint16_t*>(
37
37
  reinterpret_cast<const uint8_t*>(payload) -
38
38
  api_constants::kFullyConstructedBitFieldOffsetFromPayload)));
39
- atomic_mutable_bitfield->fetch_or(api_constants::kFullyConstructedBitMask,
40
- std::memory_order_release);
39
+ // It's safe to split use load+store here (instead of a read-modify-write
40
+ // operation), since it's guaranteed that this 16-bit bitfield is only
41
+ // modified by a single thread. This is cheaper in terms of code bloat (on
42
+ // ARM) and performance.
43
+ uint16_t value = atomic_mutable_bitfield->load(std::memory_order_relaxed);
44
+ value |= api_constants::kFullyConstructedBitMask;
45
+ atomic_mutable_bitfield->store(value, std::memory_order_release);
41
46
  }
42
47
 
43
48
  template <typename U, typename CustomSpace>
@@ -202,7 +207,7 @@ struct PostConstructionCallbackTrait {
202
207
  * \returns an instance of type T.
203
208
  */
204
209
  template <typename T, typename... Args>
205
- T* MakeGarbageCollected(AllocationHandle& handle, Args&&... args) {
210
+ V8_INLINE T* MakeGarbageCollected(AllocationHandle& handle, Args&&... args) {
206
211
  T* object =
207
212
  MakeGarbageCollectedTrait<T>::Call(handle, std::forward<Args>(args)...);
208
213
  PostConstructionCallbackTrait<T>::Call(object);
@@ -220,8 +225,9 @@ T* MakeGarbageCollected(AllocationHandle& handle, Args&&... args) {
220
225
  * \returns an instance of type T.
221
226
  */
222
227
  template <typename T, typename... Args>
223
- T* MakeGarbageCollected(AllocationHandle& handle,
224
- AdditionalBytes additional_bytes, Args&&... args) {
228
+ V8_INLINE T* MakeGarbageCollected(AllocationHandle& handle,
229
+ AdditionalBytes additional_bytes,
230
+ Args&&... args) {
225
231
  T* object = MakeGarbageCollectedTrait<T>::Call(handle, additional_bytes,
226
232
  std::forward<Args>(args)...);
227
233
  PostConstructionCallbackTrait<T>::Call(object);
@@ -34,7 +34,35 @@ class CrossThreadPersistentBase : public PersistentBase {
34
34
  V8_CLANG_NO_SANITIZE("address")
35
35
  void ClearFromGC() const {
36
36
  raw_ = nullptr;
37
- node_ = nullptr;
37
+ SetNodeSafe(nullptr);
38
+ }
39
+
40
+ // GetNodeSafe() can be used for a thread-safe IsValid() check in a
41
+ // double-checked locking pattern. See ~BasicCrossThreadPersistent.
42
+ PersistentNode* GetNodeSafe() const {
43
+ return reinterpret_cast<std::atomic<PersistentNode*>*>(&node_)->load(
44
+ std::memory_order_acquire);
45
+ }
46
+
47
+ // The GC writes using SetNodeSafe() while holding the lock.
48
+ V8_CLANG_NO_SANITIZE("address")
49
+ void SetNodeSafe(PersistentNode* value) const {
50
+ #if defined(__has_feature)
51
+ #if __has_feature(address_sanitizer)
52
+ #define V8_IS_ASAN 1
53
+ #endif
54
+ #endif
55
+
56
+ #ifdef V8_IS_ASAN
57
+ __atomic_store(&node_, &value, __ATOMIC_RELEASE);
58
+ #else // !V8_IS_ASAN
59
+ // Non-ASAN builds can use atomics. This also covers MSVC which does not
60
+ // have the __atomic_store intrinsic.
61
+ reinterpret_cast<std::atomic<PersistentNode*>*>(&node_)->store(
62
+ value, std::memory_order_release);
63
+ #endif // !V8_IS_ASAN
64
+
65
+ #undef V8_IS_ASAN
38
66
  }
39
67
  };
40
68
 
@@ -48,7 +76,31 @@ class BasicCrossThreadPersistent final : public CrossThreadPersistentBase,
48
76
  using typename WeaknessPolicy::IsStrongPersistent;
49
77
  using PointeeType = T;
50
78
 
51
- ~BasicCrossThreadPersistent() { Clear(); }
79
+ ~BasicCrossThreadPersistent() {
80
+ // This implements fast path for destroying empty/sentinel.
81
+ //
82
+ // Simplified version of `AssignUnsafe()` to allow calling without a
83
+ // complete type `T`. Uses double-checked locking with a simple thread-safe
84
+ // check for a valid handle based on a node.
85
+ if (GetNodeSafe()) {
86
+ PersistentRegionLock guard;
87
+ const void* old_value = GetValue();
88
+ // The fast path check (GetNodeSafe()) does not acquire the lock. Recheck
89
+ // validity while holding the lock to ensure the reference has not been
90
+ // cleared.
91
+ if (IsValid(old_value)) {
92
+ CrossThreadPersistentRegion& region =
93
+ this->GetPersistentRegion(old_value);
94
+ region.FreeNode(GetNode());
95
+ SetNode(nullptr);
96
+ } else {
97
+ CPPGC_DCHECK(!GetNode());
98
+ }
99
+ }
100
+ // No need to call SetValue() as the handle is not used anymore. This can
101
+ // leave behind stale sentinel values but will always destroy the underlying
102
+ // node.
103
+ }
52
104
 
53
105
  BasicCrossThreadPersistent(
54
106
  const SourceLocation& loc = SourceLocation::Current())
@@ -135,7 +187,7 @@ class BasicCrossThreadPersistent final : public CrossThreadPersistentBase,
135
187
  BasicCrossThreadPersistent& operator=(
136
188
  const BasicCrossThreadPersistent& other) {
137
189
  PersistentRegionLock guard;
138
- AssignUnsafe(other.Get());
190
+ AssignSafe(guard, other.Get());
139
191
  return *this;
140
192
  }
141
193
 
@@ -147,7 +199,7 @@ class BasicCrossThreadPersistent final : public CrossThreadPersistentBase,
147
199
  OtherLocationPolicy,
148
200
  OtherCheckingPolicy>& other) {
149
201
  PersistentRegionLock guard;
150
- AssignUnsafe(other.Get());
202
+ AssignSafe(guard, other.Get());
151
203
  return *this;
152
204
  }
153
205
 
@@ -165,8 +217,13 @@ class BasicCrossThreadPersistent final : public CrossThreadPersistentBase,
165
217
  return *this;
166
218
  }
167
219
 
220
+ /**
221
+ * Assigns a raw pointer.
222
+ *
223
+ * Note: **Not thread-safe.**
224
+ */
168
225
  BasicCrossThreadPersistent& operator=(T* other) {
169
- Assign(other);
226
+ AssignUnsafe(other);
170
227
  return *this;
171
228
  }
172
229
 
@@ -181,13 +238,24 @@ class BasicCrossThreadPersistent final : public CrossThreadPersistentBase,
181
238
  return operator=(member.Get());
182
239
  }
183
240
 
241
+ /**
242
+ * Assigns a nullptr.
243
+ *
244
+ * \returns the handle.
245
+ */
184
246
  BasicCrossThreadPersistent& operator=(std::nullptr_t) {
185
247
  Clear();
186
248
  return *this;
187
249
  }
188
250
 
251
+ /**
252
+ * Assigns the sentinel pointer.
253
+ *
254
+ * \returns the handle.
255
+ */
189
256
  BasicCrossThreadPersistent& operator=(SentinelPointer s) {
190
- Assign(s);
257
+ PersistentRegionLock guard;
258
+ AssignSafe(guard, s);
191
259
  return *this;
192
260
  }
193
261
 
@@ -209,24 +277,8 @@ class BasicCrossThreadPersistent final : public CrossThreadPersistentBase,
209
277
  * Clears the stored object.
210
278
  */
211
279
  void Clear() {
212
- // Simplified version of `Assign()` to allow calling without a complete type
213
- // `T`.
214
- const void* old_value = GetValue();
215
- if (IsValid(old_value)) {
216
- PersistentRegionLock guard;
217
- old_value = GetValue();
218
- // The fast path check (IsValid()) does not acquire the lock. Reload
219
- // the value to ensure the reference has not been cleared.
220
- if (IsValid(old_value)) {
221
- CrossThreadPersistentRegion& region =
222
- this->GetPersistentRegion(old_value);
223
- region.FreeNode(GetNode());
224
- SetNode(nullptr);
225
- } else {
226
- CPPGC_DCHECK(!GetNode());
227
- }
228
- }
229
- SetValue(nullptr);
280
+ PersistentRegionLock guard;
281
+ AssignSafe(guard, nullptr);
230
282
  }
231
283
 
232
284
  /**
@@ -302,7 +354,7 @@ class BasicCrossThreadPersistent final : public CrossThreadPersistentBase,
302
354
  v->TraceRoot(*handle, handle->Location());
303
355
  }
304
356
 
305
- void Assign(T* ptr) {
357
+ void AssignUnsafe(T* ptr) {
306
358
  const void* old_value = GetValue();
307
359
  if (IsValid(old_value)) {
308
360
  PersistentRegionLock guard;
@@ -330,7 +382,7 @@ class BasicCrossThreadPersistent final : public CrossThreadPersistentBase,
330
382
  this->CheckPointer(ptr);
331
383
  }
332
384
 
333
- void AssignUnsafe(T* ptr) {
385
+ void AssignSafe(PersistentRegionLock&, T* ptr) {
334
386
  PersistentRegionLock::AssertLocked();
335
387
  const void* old_value = GetValue();
336
388
  if (IsValid(old_value)) {
@@ -53,10 +53,10 @@ static_assert(sizeof(AgeTable) == 1 * api_constants::kMB,
53
53
  #endif // CPPGC_YOUNG_GENERATION
54
54
 
55
55
  struct CagedHeapLocalData final {
56
- explicit CagedHeapLocalData(HeapBase* heap_base) : heap_base(heap_base) {}
56
+ CagedHeapLocalData(HeapBase&, PageAllocator&);
57
57
 
58
58
  bool is_incremental_marking_in_progress = false;
59
- HeapBase* heap_base = nullptr;
59
+ HeapBase& heap_base;
60
60
  #if defined(CPPGC_YOUNG_GENERATION)
61
61
  AgeTable age_table;
62
62
  #endif
@@ -76,6 +76,8 @@ struct FinalizerTrait {
76
76
  }
77
77
 
78
78
  public:
79
+ static constexpr bool HasFinalizer() { return kNonTrivialFinalizer; }
80
+
79
81
  // The callback used to finalize an object of type T.
80
82
  static constexpr FinalizationCallback kCallback =
81
83
  kNonTrivialFinalizer ? Finalize : nullptr;
@@ -19,11 +19,94 @@ namespace internal {
19
19
 
20
20
  using GCInfoIndex = uint16_t;
21
21
 
22
- // Acquires a new GC info object and returns the index. In addition, also
23
- // updates `registered_index` atomically.
24
- V8_EXPORT GCInfoIndex
25
- EnsureGCInfoIndex(std::atomic<GCInfoIndex>& registered_index,
26
- FinalizationCallback, TraceCallback, NameCallback, bool);
22
+ struct V8_EXPORT EnsureGCInfoIndexTrait final {
23
+ // Acquires a new GC info object and returns the index. In addition, also
24
+ // updates `registered_index` atomically.
25
+ template <typename T>
26
+ V8_INLINE static GCInfoIndex EnsureIndex(
27
+ std::atomic<GCInfoIndex>& registered_index) {
28
+ return EnsureGCInfoIndexTraitDispatch<T>{}(registered_index);
29
+ }
30
+
31
+ private:
32
+ template <typename T, bool = std::is_polymorphic<T>::value,
33
+ bool = FinalizerTrait<T>::HasFinalizer(),
34
+ bool = NameTrait<T>::HasNonHiddenName()>
35
+ struct EnsureGCInfoIndexTraitDispatch;
36
+
37
+ static GCInfoIndex EnsureGCInfoIndexPolymorphic(std::atomic<GCInfoIndex>&,
38
+ TraceCallback,
39
+ FinalizationCallback,
40
+ NameCallback);
41
+ static GCInfoIndex EnsureGCInfoIndexPolymorphic(std::atomic<GCInfoIndex>&,
42
+ TraceCallback,
43
+ FinalizationCallback);
44
+ static GCInfoIndex EnsureGCInfoIndexPolymorphic(std::atomic<GCInfoIndex>&,
45
+ TraceCallback, NameCallback);
46
+ static GCInfoIndex EnsureGCInfoIndexPolymorphic(std::atomic<GCInfoIndex>&,
47
+ TraceCallback);
48
+ static GCInfoIndex EnsureGCInfoIndexNonPolymorphic(std::atomic<GCInfoIndex>&,
49
+ TraceCallback,
50
+ FinalizationCallback,
51
+
52
+ NameCallback);
53
+ static GCInfoIndex EnsureGCInfoIndexNonPolymorphic(std::atomic<GCInfoIndex>&,
54
+ TraceCallback,
55
+ FinalizationCallback);
56
+ static GCInfoIndex EnsureGCInfoIndexNonPolymorphic(std::atomic<GCInfoIndex>&,
57
+ TraceCallback,
58
+ NameCallback);
59
+ static GCInfoIndex EnsureGCInfoIndexNonPolymorphic(std::atomic<GCInfoIndex>&,
60
+ TraceCallback);
61
+ };
62
+
63
+ #define DISPATCH(is_polymorphic, has_finalizer, has_non_hidden_name, function) \
64
+ template <typename T> \
65
+ struct EnsureGCInfoIndexTrait::EnsureGCInfoIndexTraitDispatch< \
66
+ T, is_polymorphic, has_finalizer, has_non_hidden_name> { \
67
+ V8_INLINE GCInfoIndex \
68
+ operator()(std::atomic<GCInfoIndex>& registered_index) { \
69
+ return function; \
70
+ } \
71
+ };
72
+
73
+ // --------------------------------------------------------------------- //
74
+ // DISPATCH(is_polymorphic, has_finalizer, has_non_hidden_name, function)
75
+ // --------------------------------------------------------------------- //
76
+ DISPATCH(true, true, true, //
77
+ EnsureGCInfoIndexPolymorphic(registered_index, //
78
+ TraceTrait<T>::Trace, //
79
+ FinalizerTrait<T>::kCallback, //
80
+ NameTrait<T>::GetName)) //
81
+ DISPATCH(true, true, false, //
82
+ EnsureGCInfoIndexPolymorphic(registered_index, //
83
+ TraceTrait<T>::Trace, //
84
+ FinalizerTrait<T>::kCallback)) //
85
+ DISPATCH(true, false, true, //
86
+ EnsureGCInfoIndexPolymorphic(registered_index, //
87
+ TraceTrait<T>::Trace, //
88
+ NameTrait<T>::GetName)) //
89
+ DISPATCH(true, false, false, //
90
+ EnsureGCInfoIndexPolymorphic(registered_index, //
91
+ TraceTrait<T>::Trace)) //
92
+ DISPATCH(false, true, true, //
93
+ EnsureGCInfoIndexNonPolymorphic(registered_index, //
94
+ TraceTrait<T>::Trace, //
95
+ FinalizerTrait<T>::kCallback, //
96
+ NameTrait<T>::GetName)) //
97
+ DISPATCH(false, true, false, //
98
+ EnsureGCInfoIndexNonPolymorphic(registered_index, //
99
+ TraceTrait<T>::Trace, //
100
+ FinalizerTrait<T>::kCallback)) //
101
+ DISPATCH(false, false, true, //
102
+ EnsureGCInfoIndexNonPolymorphic(registered_index, //
103
+ TraceTrait<T>::Trace, //
104
+ NameTrait<T>::GetName)) //
105
+ DISPATCH(false, false, false, //
106
+ EnsureGCInfoIndexNonPolymorphic(registered_index, //
107
+ TraceTrait<T>::Trace)) //
108
+
109
+ #undef DISPATCH
27
110
 
28
111
  // Fold types based on finalizer behavior. Note that finalizer characteristics
29
112
  // align with trace behavior, i.e., destructors are virtual when trace methods
@@ -57,16 +140,13 @@ struct GCInfoFolding {
57
140
  // finalization, and naming.
58
141
  template <typename T>
59
142
  struct GCInfoTrait final {
60
- static GCInfoIndex Index() {
143
+ V8_INLINE static GCInfoIndex Index() {
61
144
  static_assert(sizeof(T), "T must be fully defined");
62
145
  static std::atomic<GCInfoIndex>
63
146
  registered_index; // Uses zero initialization.
64
147
  const GCInfoIndex index = registered_index.load(std::memory_order_acquire);
65
148
  return index ? index
66
- : EnsureGCInfoIndex(
67
- registered_index, FinalizerTrait<T>::kCallback,
68
- TraceTrait<T>::Trace, NameTrait<T>::GetName,
69
- std::is_polymorphic<T>::value);
149
+ : EnsureGCInfoIndexTrait::EnsureIndex<T>(registered_index);
70
150
  }
71
151
  };
72
152
 
@@ -6,6 +6,7 @@
6
6
  #define INCLUDE_CPPGC_INTERNAL_NAME_TRAIT_H_
7
7
 
8
8
  #include <cstddef>
9
+ #include <type_traits>
9
10
 
10
11
  #include "cppgc/name-provider.h"
11
12
  #include "v8config.h" // NOLINT(build/include_directory)
@@ -67,6 +68,16 @@ class V8_EXPORT NameTraitBase {
67
68
  template <typename T>
68
69
  class NameTrait final : public NameTraitBase {
69
70
  public:
71
+ static constexpr bool HasNonHiddenName() {
72
+ #if CPPGC_SUPPORTS_COMPILE_TIME_TYPENAME
73
+ return true;
74
+ #elif CPPGC_SUPPORTS_OBJECT_NAMES
75
+ return true;
76
+ #else // !CPPGC_SUPPORTS_OBJECT_NAMES
77
+ return std::is_base_of<NameProvider, T>::value;
78
+ #endif // !CPPGC_SUPPORTS_OBJECT_NAMES
79
+ }
80
+
70
81
  static HeapObjectName GetName(const void* obj) {
71
82
  return GetNameFor(static_cast<const T*>(obj));
72
83
  }
@@ -75,16 +75,16 @@ class PersistentNode final {
75
75
  TraceCallback trace_ = nullptr;
76
76
  };
77
77
 
78
- class V8_EXPORT PersistentRegion {
78
+ class V8_EXPORT PersistentRegionBase {
79
79
  using PersistentNodeSlots = std::array<PersistentNode, 256u>;
80
80
 
81
81
  public:
82
- PersistentRegion() = default;
82
+ PersistentRegionBase() = default;
83
83
  // Clears Persistent fields to avoid stale pointers after heap teardown.
84
- ~PersistentRegion();
84
+ ~PersistentRegionBase();
85
85
 
86
- PersistentRegion(const PersistentRegion&) = delete;
87
- PersistentRegion& operator=(const PersistentRegion&) = delete;
86
+ PersistentRegionBase(const PersistentRegionBase&) = delete;
87
+ PersistentRegionBase& operator=(const PersistentRegionBase&) = delete;
88
88
 
89
89
  PersistentNode* AllocateNode(void* owner, TraceCallback trace) {
90
90
  if (!free_list_head_) {
@@ -126,8 +126,39 @@ class V8_EXPORT PersistentRegion {
126
126
  friend class CrossThreadPersistentRegion;
127
127
  };
128
128
 
129
- // CrossThreadPersistent uses PersistentRegion but protects it using this lock
130
- // when needed.
129
+ // Variant of PersistentRegionBase that checks whether the allocation and
130
+ // freeing happens only on the thread that created the region.
131
+ class V8_EXPORT PersistentRegion final : public PersistentRegionBase {
132
+ public:
133
+ PersistentRegion();
134
+ // Clears Persistent fields to avoid stale pointers after heap teardown.
135
+ ~PersistentRegion() = default;
136
+
137
+ PersistentRegion(const PersistentRegion&) = delete;
138
+ PersistentRegion& operator=(const PersistentRegion&) = delete;
139
+
140
+ V8_INLINE PersistentNode* AllocateNode(void* owner, TraceCallback trace) {
141
+ #if V8_ENABLE_CHECKS
142
+ CheckIsCreationThread();
143
+ #endif // V8_ENABLE_CHECKS
144
+ return PersistentRegionBase::AllocateNode(owner, trace);
145
+ }
146
+
147
+ V8_INLINE void FreeNode(PersistentNode* node) {
148
+ #if V8_ENABLE_CHECKS
149
+ CheckIsCreationThread();
150
+ #endif // V8_ENABLE_CHECKS
151
+ PersistentRegionBase::FreeNode(node);
152
+ }
153
+
154
+ private:
155
+ void CheckIsCreationThread();
156
+
157
+ int creation_thread_id_;
158
+ };
159
+
160
+ // CrossThreadPersistent uses PersistentRegionBase but protects it using this
161
+ // lock when needed.
131
162
  class V8_EXPORT PersistentRegionLock final {
132
163
  public:
133
164
  PersistentRegionLock();
@@ -136,9 +167,10 @@ class V8_EXPORT PersistentRegionLock final {
136
167
  static void AssertLocked();
137
168
  };
138
169
 
139
- // Variant of PersistentRegion that checks whether the PersistentRegionLock is
140
- // locked.
141
- class V8_EXPORT CrossThreadPersistentRegion final : protected PersistentRegion {
170
+ // Variant of PersistentRegionBase that checks whether the PersistentRegionLock
171
+ // is locked.
172
+ class V8_EXPORT CrossThreadPersistentRegion final
173
+ : protected PersistentRegionBase {
142
174
  public:
143
175
  CrossThreadPersistentRegion() = default;
144
176
  // Clears Persistent fields to avoid stale pointers after heap teardown.
@@ -150,12 +182,12 @@ class V8_EXPORT CrossThreadPersistentRegion final : protected PersistentRegion {
150
182
 
151
183
  V8_INLINE PersistentNode* AllocateNode(void* owner, TraceCallback trace) {
152
184
  PersistentRegionLock::AssertLocked();
153
- return PersistentRegion::AllocateNode(owner, trace);
185
+ return PersistentRegionBase::AllocateNode(owner, trace);
154
186
  }
155
187
 
156
188
  V8_INLINE void FreeNode(PersistentNode* node) {
157
189
  PersistentRegionLock::AssertLocked();
158
- PersistentRegion::FreeNode(node);
190
+ PersistentRegionBase::FreeNode(node);
159
191
  }
160
192
 
161
193
  void Trace(Visitor*);
@@ -51,7 +51,17 @@ struct NoWriteBarrierPolicy {
51
51
  static void AssigningBarrier(const void*, const void*) {}
52
52
  };
53
53
 
54
- class V8_EXPORT EnabledCheckingPolicy {
54
+ class V8_EXPORT SameThreadEnabledCheckingPolicyBase {
55
+ protected:
56
+ void CheckPointerImpl(const void* ptr, bool points_to_payload,
57
+ bool check_off_heap_assignments);
58
+
59
+ const HeapBase* heap_ = nullptr;
60
+ };
61
+
62
+ template <bool kCheckOffHeapAssignments>
63
+ class V8_EXPORT SameThreadEnabledCheckingPolicy
64
+ : private SameThreadEnabledCheckingPolicyBase {
55
65
  protected:
56
66
  template <typename T>
57
67
  void CheckPointer(const T* ptr) {
@@ -61,23 +71,20 @@ class V8_EXPORT EnabledCheckingPolicy {
61
71
  }
62
72
 
63
73
  private:
64
- void CheckPointerImpl(const void* ptr, bool points_to_payload);
65
-
66
74
  template <typename T, bool = IsCompleteV<T>>
67
75
  struct CheckPointersImplTrampoline {
68
- static void Call(EnabledCheckingPolicy* policy, const T* ptr) {
69
- policy->CheckPointerImpl(ptr, false);
76
+ static void Call(SameThreadEnabledCheckingPolicy* policy, const T* ptr) {
77
+ policy->CheckPointerImpl(ptr, false, kCheckOffHeapAssignments);
70
78
  }
71
79
  };
72
80
 
73
81
  template <typename T>
74
82
  struct CheckPointersImplTrampoline<T, true> {
75
- static void Call(EnabledCheckingPolicy* policy, const T* ptr) {
76
- policy->CheckPointerImpl(ptr, IsGarbageCollectedTypeV<T>);
83
+ static void Call(SameThreadEnabledCheckingPolicy* policy, const T* ptr) {
84
+ policy->CheckPointerImpl(ptr, IsGarbageCollectedTypeV<T>,
85
+ kCheckOffHeapAssignments);
77
86
  }
78
87
  };
79
-
80
- const HeapBase* heap_ = nullptr;
81
88
  };
82
89
 
83
90
  class DisabledCheckingPolicy {
@@ -86,8 +93,12 @@ class DisabledCheckingPolicy {
86
93
  };
87
94
 
88
95
  #if V8_ENABLE_CHECKS
89
- using DefaultMemberCheckingPolicy = EnabledCheckingPolicy;
90
- using DefaultPersistentCheckingPolicy = EnabledCheckingPolicy;
96
+ // Off heap members are not connected to object graph and thus cannot ressurect
97
+ // dead objects.
98
+ using DefaultMemberCheckingPolicy =
99
+ SameThreadEnabledCheckingPolicy<false /* kCheckOffHeapAssignments*/>;
100
+ using DefaultPersistentCheckingPolicy =
101
+ SameThreadEnabledCheckingPolicy<true /* kCheckOffHeapAssignments*/>;
91
102
  #else
92
103
  using DefaultMemberCheckingPolicy = DisabledCheckingPolicy;
93
104
  using DefaultPersistentCheckingPolicy = DisabledCheckingPolicy;
@@ -214,6 +214,11 @@ struct WriteBarrierTypeForCagedHeapPolicy::ValueModeDispatch<
214
214
  static V8_INLINE WriteBarrier::Type Get(const void* slot, const void* value,
215
215
  WriteBarrier::Params& params,
216
216
  HeapHandleCallback) {
217
+ #if !defined(CPPGC_YOUNG_GENERATION)
218
+ if (V8_LIKELY(!WriteBarrier::IsAnyIncrementalOrConcurrentMarking())) {
219
+ return SetAndReturnType<WriteBarrier::Type::kNone>(params);
220
+ }
221
+ #endif // !CPPGC_YOUNG_GENERATION
217
222
  bool within_cage = TryGetCagedHeap(slot, value, params);
218
223
  if (!within_cage) {
219
224
  return WriteBarrier::Type::kNone;
@@ -317,7 +322,10 @@ struct WriteBarrierTypeForNonCagedHeapPolicy::ValueModeDispatch<
317
322
  HeapHandleCallback callback) {
318
323
  // The following check covers nullptr as well as sentinel pointer.
319
324
  if (object <= static_cast<void*>(kSentinelPointer)) {
320
- return WriteBarrier::Type::kNone;
325
+ return SetAndReturnType<WriteBarrier::Type::kNone>(params);
326
+ }
327
+ if (V8_LIKELY(!WriteBarrier::IsAnyIncrementalOrConcurrentMarking())) {
328
+ return SetAndReturnType<WriteBarrier::Type::kNone>(params);
321
329
  }
322
330
  if (IsMarking(object, &params.heap)) {
323
331
  return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
@@ -45,7 +45,7 @@ class PersistentBase {
45
45
  mutable const void* raw_ = nullptr;
46
46
  mutable PersistentNode* node_ = nullptr;
47
47
 
48
- friend class PersistentRegion;
48
+ friend class PersistentRegionBase;
49
49
  };
50
50
 
51
51
  // The basic class from which all Persistent classes are generated.
@@ -38,7 +38,7 @@ class PrefinalizerRegistration final {
38
38
  "Only garbage collected objects can have prefinalizers"); \
39
39
  Class* self = static_cast<Class*>(object); \
40
40
  if (liveness_broker.IsHeapObjectAlive(self)) return false; \
41
- self->Class::PreFinalizer(); \
41
+ self->PreFinalizer(); \
42
42
  return true; \
43
43
  } \
44
44
  \