libv8-node 16.10.0.0-x86_64-darwin → 18.8.0.0-x86_64-darwin

Sign up to get free protection for your applications and to get access to all the features.
Files changed (89) hide show
  1. checksums.yaml +4 -4
  2. data/ext/libv8-node/paths.rb +1 -1
  3. data/lib/libv8/node/version.rb +3 -3
  4. data/vendor/v8/include/cppgc/allocation.h +100 -22
  5. data/vendor/v8/include/cppgc/cross-thread-persistent.h +114 -33
  6. data/vendor/v8/include/cppgc/default-platform.h +2 -10
  7. data/vendor/v8/include/cppgc/explicit-management.h +22 -4
  8. data/vendor/v8/include/cppgc/garbage-collected.h +15 -26
  9. data/vendor/v8/include/cppgc/heap-consistency.h +30 -0
  10. data/vendor/v8/include/cppgc/heap-state.h +12 -0
  11. data/vendor/v8/include/cppgc/heap.h +7 -2
  12. data/vendor/v8/include/cppgc/internal/api-constants.h +8 -0
  13. data/vendor/v8/include/cppgc/internal/caged-heap-local-data.h +25 -14
  14. data/vendor/v8/include/cppgc/internal/finalizer-trait.h +4 -1
  15. data/vendor/v8/include/cppgc/internal/gc-info.h +90 -10
  16. data/vendor/v8/include/cppgc/internal/logging.h +3 -3
  17. data/vendor/v8/include/cppgc/internal/name-trait.h +11 -0
  18. data/vendor/v8/include/cppgc/internal/persistent-node.h +73 -29
  19. data/vendor/v8/include/cppgc/internal/pointer-policies.h +26 -15
  20. data/vendor/v8/include/cppgc/internal/write-barrier.h +62 -23
  21. data/vendor/v8/include/cppgc/liveness-broker.h +4 -1
  22. data/vendor/v8/include/cppgc/member.h +7 -2
  23. data/vendor/v8/include/cppgc/persistent.h +38 -33
  24. data/vendor/v8/include/cppgc/platform.h +4 -1
  25. data/vendor/v8/include/cppgc/prefinalizer.h +35 -12
  26. data/vendor/v8/include/cppgc/testing.h +9 -2
  27. data/vendor/v8/include/cppgc/type-traits.h +6 -13
  28. data/vendor/v8/include/cppgc/visitor.h +9 -7
  29. data/vendor/v8/include/libplatform/libplatform.h +0 -11
  30. data/vendor/v8/include/libplatform/v8-tracing.h +0 -1
  31. data/vendor/v8/include/v8-array-buffer.h +445 -0
  32. data/vendor/v8/include/v8-callbacks.h +397 -0
  33. data/vendor/v8/include/v8-container.h +129 -0
  34. data/vendor/v8/include/v8-context.h +407 -0
  35. data/vendor/v8/include/v8-cppgc.h +21 -128
  36. data/vendor/v8/include/v8-data.h +80 -0
  37. data/vendor/v8/include/v8-date.h +43 -0
  38. data/vendor/v8/include/v8-debug.h +168 -0
  39. data/vendor/v8/include/v8-embedder-heap.h +218 -0
  40. data/vendor/v8/include/v8-embedder-state-scope.h +51 -0
  41. data/vendor/v8/include/v8-exception.h +217 -0
  42. data/vendor/v8/include/v8-extension.h +62 -0
  43. data/vendor/v8/include/v8-external.h +37 -0
  44. data/vendor/v8/include/v8-fast-api-calls.h +172 -24
  45. data/vendor/v8/include/v8-forward.h +81 -0
  46. data/vendor/v8/include/v8-function-callback.h +475 -0
  47. data/vendor/v8/include/v8-function.h +125 -0
  48. data/vendor/v8/include/v8-initialization.h +315 -0
  49. data/vendor/v8/include/v8-inspector.h +56 -28
  50. data/vendor/v8/include/v8-internal.h +217 -55
  51. data/vendor/v8/include/v8-isolate.h +1709 -0
  52. data/vendor/v8/include/v8-json.h +47 -0
  53. data/vendor/v8/include/v8-local-handle.h +455 -0
  54. data/vendor/v8/include/v8-locker.h +149 -0
  55. data/vendor/v8/include/v8-maybe.h +137 -0
  56. data/vendor/v8/include/v8-memory-span.h +43 -0
  57. data/vendor/v8/include/v8-message.h +216 -0
  58. data/vendor/v8/include/v8-metrics.h +69 -16
  59. data/vendor/v8/include/v8-microtask-queue.h +152 -0
  60. data/vendor/v8/include/v8-microtask.h +28 -0
  61. data/vendor/v8/include/v8-object.h +775 -0
  62. data/vendor/v8/include/v8-persistent-handle.h +590 -0
  63. data/vendor/v8/include/v8-platform.h +400 -17
  64. data/vendor/v8/include/v8-primitive-object.h +118 -0
  65. data/vendor/v8/include/v8-primitive.h +866 -0
  66. data/vendor/v8/include/v8-profiler.h +88 -13
  67. data/vendor/v8/include/v8-promise.h +174 -0
  68. data/vendor/v8/include/v8-proxy.h +50 -0
  69. data/vendor/v8/include/v8-regexp.h +105 -0
  70. data/vendor/v8/include/v8-script.h +747 -0
  71. data/vendor/v8/include/v8-snapshot.h +196 -0
  72. data/vendor/v8/include/v8-statistics.h +217 -0
  73. data/vendor/v8/include/v8-template.h +1079 -0
  74. data/vendor/v8/include/v8-traced-handle.h +420 -0
  75. data/vendor/v8/include/v8-typed-array.h +282 -0
  76. data/vendor/v8/include/v8-unwinder-state.h +4 -3
  77. data/vendor/v8/include/v8-unwinder.h +132 -0
  78. data/vendor/v8/include/v8-util.h +7 -1
  79. data/vendor/v8/include/v8-value-serializer-version.h +1 -1
  80. data/vendor/v8/include/v8-value-serializer.h +279 -0
  81. data/vendor/v8/include/v8-value.h +526 -0
  82. data/vendor/v8/include/v8-version.h +4 -4
  83. data/vendor/v8/include/v8-wasm.h +257 -0
  84. data/vendor/v8/include/v8-weak-callback-info.h +87 -0
  85. data/vendor/v8/include/v8.h +41 -12601
  86. data/vendor/v8/include/v8config.h +102 -12
  87. data/vendor/v8/x86_64-darwin/libv8/obj/libv8_monolith.a +0 -0
  88. metadata +47 -5
  89. data/vendor/v8/include/cppgc/internal/prefinalizer-handler.h +0 -30
@@ -38,6 +38,18 @@ class V8_EXPORT HeapState final {
38
38
  */
39
39
  static bool IsSweeping(const HeapHandle& heap_handle);
40
40
 
41
+ /*
42
+ * Returns whether the garbage collector is currently sweeping on the thread
43
+ * owning this heap. This API allows the caller to determine whether it has
44
+ * been called from a destructor of a managed object. This API is experimental
45
+ * and may be removed in future.
46
+ *
47
+ * \param heap_handle The corresponding heap.
48
+ * \returns true if the garbage collector is currently sweeping on this
49
+ * thread, and false otherwise.
50
+ */
51
+ static bool IsSweepingOnOwningThread(const HeapHandle& heap_handle);
52
+
41
53
  /**
42
54
  * Returns whether the garbage collector is in the atomic pause, i.e., the
43
55
  * mutator is stopped from running. This API is experimental and is expected
@@ -68,8 +68,8 @@ class V8_EXPORT Heap {
68
68
  */
69
69
  kAtomic,
70
70
  /**
71
- * Incremental marking, i.e. interleave marking is the rest of the
72
- * application on the same thread.
71
+ * Incremental marking interleaves marking with the rest of the application
72
+ * workload on the same thread.
73
73
  */
74
74
  kIncremental,
75
75
  /**
@@ -86,6 +86,11 @@ class V8_EXPORT Heap {
86
86
  * Atomic stop-the-world sweeping. All of sweeping is performed at once.
87
87
  */
88
88
  kAtomic,
89
+ /**
90
+ * Incremental sweeping interleaves sweeping with the rest of the
91
+ * application workload on the same thread.
92
+ */
93
+ kIncremental,
89
94
  /**
90
95
  * Incremental and concurrent sweeping. Sweeping is split and interleaved
91
96
  * with the rest of the application.
@@ -39,6 +39,14 @@ constexpr size_t kCagedHeapReservationSize = static_cast<size_t>(4) * kGB;
39
39
  constexpr size_t kCagedHeapReservationAlignment = kCagedHeapReservationSize;
40
40
  #endif
41
41
 
42
+ static constexpr size_t kDefaultAlignment = sizeof(void*);
43
+
44
+ // Maximum support alignment for a type as in `alignof(T)`.
45
+ static constexpr size_t kMaxSupportedAlignment = 2 * kDefaultAlignment;
46
+
47
+ // Granularity of heap allocations.
48
+ constexpr size_t kAllocationGranularity = sizeof(void*);
49
+
42
50
  } // namespace api_constants
43
51
 
44
52
  } // namespace internal
@@ -6,6 +6,8 @@
6
6
  #define INCLUDE_CPPGC_INTERNAL_CAGED_HEAP_LOCAL_DATA_H_
7
7
 
8
8
  #include <array>
9
+ #include <cstddef>
10
+ #include <cstdint>
9
11
 
10
12
  #include "cppgc/internal/api-constants.h"
11
13
  #include "cppgc/internal/logging.h"
@@ -19,32 +21,41 @@ class HeapBase;
19
21
 
20
22
  #if defined(CPPGC_YOUNG_GENERATION)
21
23
 
22
- // AgeTable contains entries that correspond to 4KB memory regions. Each entry
23
- // can be in one of three states: kOld, kYoung or kUnknown.
24
+ // AgeTable is the bytemap needed for the fast generation check in the write
25
+ // barrier. AgeTable contains entries that correspond to 512 bytes memory
26
+ // regions (cards). Each entry in the table represents generation of the objects
27
+ // that reside on the corresponding card (young, old or mixed).
24
28
  class AgeTable final {
25
- static constexpr size_t kGranularityBits = 12; // 4KiB per byte.
29
+ static constexpr size_t kRequiredSize = 1 * api_constants::kMB;
30
+ static constexpr size_t kAllocationGranularity =
31
+ api_constants::kAllocationGranularity;
26
32
 
27
33
  public:
28
- enum class Age : uint8_t { kOld, kYoung, kUnknown };
34
+ enum class Age : uint8_t { kOld, kYoung, kMixed };
29
35
 
30
- static constexpr size_t kEntrySizeInBytes = 1 << kGranularityBits;
36
+ static constexpr size_t kCardSizeInBytes =
37
+ (api_constants::kCagedHeapReservationSize / kAllocationGranularity) /
38
+ kRequiredSize;
31
39
 
32
- Age& operator[](uintptr_t offset) { return table_[entry(offset)]; }
33
- Age operator[](uintptr_t offset) const { return table_[entry(offset)]; }
40
+ void SetAge(uintptr_t cage_offset, Age age) {
41
+ table_[card(cage_offset)] = age;
42
+ }
43
+ V8_INLINE Age GetAge(uintptr_t cage_offset) const {
44
+ return table_[card(cage_offset)];
45
+ }
34
46
 
35
47
  void Reset(PageAllocator* allocator);
36
48
 
37
49
  private:
38
- static constexpr size_t kAgeTableSize =
39
- api_constants::kCagedHeapReservationSize >> kGranularityBits;
40
-
41
- size_t entry(uintptr_t offset) const {
50
+ V8_INLINE size_t card(uintptr_t offset) const {
51
+ constexpr size_t kGranularityBits =
52
+ __builtin_ctz(static_cast<uint32_t>(kCardSizeInBytes));
42
53
  const size_t entry = offset >> kGranularityBits;
43
54
  CPPGC_DCHECK(table_.size() > entry);
44
55
  return entry;
45
56
  }
46
57
 
47
- std::array<Age, kAgeTableSize> table_;
58
+ std::array<Age, kRequiredSize> table_;
48
59
  };
49
60
 
50
61
  static_assert(sizeof(AgeTable) == 1 * api_constants::kMB,
@@ -53,10 +64,10 @@ static_assert(sizeof(AgeTable) == 1 * api_constants::kMB,
53
64
  #endif // CPPGC_YOUNG_GENERATION
54
65
 
55
66
  struct CagedHeapLocalData final {
56
- explicit CagedHeapLocalData(HeapBase* heap_base) : heap_base(heap_base) {}
67
+ CagedHeapLocalData(HeapBase&, PageAllocator&);
57
68
 
58
69
  bool is_incremental_marking_in_progress = false;
59
- HeapBase* heap_base = nullptr;
70
+ HeapBase& heap_base;
60
71
  #if defined(CPPGC_YOUNG_GENERATION)
61
72
  AgeTable age_table;
62
73
  #endif
@@ -19,7 +19,8 @@ struct HasFinalizeGarbageCollectedObject : std::false_type {};
19
19
 
20
20
  template <typename T>
21
21
  struct HasFinalizeGarbageCollectedObject<
22
- T, void_t<decltype(std::declval<T>().FinalizeGarbageCollectedObject())>>
22
+ T,
23
+ std::void_t<decltype(std::declval<T>().FinalizeGarbageCollectedObject())>>
23
24
  : std::true_type {};
24
25
 
25
26
  // The FinalizerTraitImpl specifies how to finalize objects.
@@ -76,6 +77,8 @@ struct FinalizerTrait {
76
77
  }
77
78
 
78
79
  public:
80
+ static constexpr bool HasFinalizer() { return kNonTrivialFinalizer; }
81
+
79
82
  // The callback used to finalize an object of type T.
80
83
  static constexpr FinalizationCallback kCallback =
81
84
  kNonTrivialFinalizer ? Finalize : nullptr;
@@ -19,11 +19,94 @@ namespace internal {
19
19
 
20
20
  using GCInfoIndex = uint16_t;
21
21
 
22
- // Acquires a new GC info object and returns the index. In addition, also
23
- // updates `registered_index` atomically.
24
- V8_EXPORT GCInfoIndex
25
- EnsureGCInfoIndex(std::atomic<GCInfoIndex>& registered_index,
26
- FinalizationCallback, TraceCallback, NameCallback, bool);
22
+ struct V8_EXPORT EnsureGCInfoIndexTrait final {
23
+ // Acquires a new GC info object and returns the index. In addition, also
24
+ // updates `registered_index` atomically.
25
+ template <typename T>
26
+ V8_INLINE static GCInfoIndex EnsureIndex(
27
+ std::atomic<GCInfoIndex>& registered_index) {
28
+ return EnsureGCInfoIndexTraitDispatch<T>{}(registered_index);
29
+ }
30
+
31
+ private:
32
+ template <typename T, bool = std::is_polymorphic<T>::value,
33
+ bool = FinalizerTrait<T>::HasFinalizer(),
34
+ bool = NameTrait<T>::HasNonHiddenName()>
35
+ struct EnsureGCInfoIndexTraitDispatch;
36
+
37
+ static GCInfoIndex EnsureGCInfoIndexPolymorphic(std::atomic<GCInfoIndex>&,
38
+ TraceCallback,
39
+ FinalizationCallback,
40
+ NameCallback);
41
+ static GCInfoIndex EnsureGCInfoIndexPolymorphic(std::atomic<GCInfoIndex>&,
42
+ TraceCallback,
43
+ FinalizationCallback);
44
+ static GCInfoIndex EnsureGCInfoIndexPolymorphic(std::atomic<GCInfoIndex>&,
45
+ TraceCallback, NameCallback);
46
+ static GCInfoIndex EnsureGCInfoIndexPolymorphic(std::atomic<GCInfoIndex>&,
47
+ TraceCallback);
48
+ static GCInfoIndex EnsureGCInfoIndexNonPolymorphic(std::atomic<GCInfoIndex>&,
49
+ TraceCallback,
50
+ FinalizationCallback,
51
+
52
+ NameCallback);
53
+ static GCInfoIndex EnsureGCInfoIndexNonPolymorphic(std::atomic<GCInfoIndex>&,
54
+ TraceCallback,
55
+ FinalizationCallback);
56
+ static GCInfoIndex EnsureGCInfoIndexNonPolymorphic(std::atomic<GCInfoIndex>&,
57
+ TraceCallback,
58
+ NameCallback);
59
+ static GCInfoIndex EnsureGCInfoIndexNonPolymorphic(std::atomic<GCInfoIndex>&,
60
+ TraceCallback);
61
+ };
62
+
63
+ #define DISPATCH(is_polymorphic, has_finalizer, has_non_hidden_name, function) \
64
+ template <typename T> \
65
+ struct EnsureGCInfoIndexTrait::EnsureGCInfoIndexTraitDispatch< \
66
+ T, is_polymorphic, has_finalizer, has_non_hidden_name> { \
67
+ V8_INLINE GCInfoIndex \
68
+ operator()(std::atomic<GCInfoIndex>& registered_index) { \
69
+ return function; \
70
+ } \
71
+ };
72
+
73
+ // --------------------------------------------------------------------- //
74
+ // DISPATCH(is_polymorphic, has_finalizer, has_non_hidden_name, function)
75
+ // --------------------------------------------------------------------- //
76
+ DISPATCH(true, true, true, //
77
+ EnsureGCInfoIndexPolymorphic(registered_index, //
78
+ TraceTrait<T>::Trace, //
79
+ FinalizerTrait<T>::kCallback, //
80
+ NameTrait<T>::GetName)) //
81
+ DISPATCH(true, true, false, //
82
+ EnsureGCInfoIndexPolymorphic(registered_index, //
83
+ TraceTrait<T>::Trace, //
84
+ FinalizerTrait<T>::kCallback)) //
85
+ DISPATCH(true, false, true, //
86
+ EnsureGCInfoIndexPolymorphic(registered_index, //
87
+ TraceTrait<T>::Trace, //
88
+ NameTrait<T>::GetName)) //
89
+ DISPATCH(true, false, false, //
90
+ EnsureGCInfoIndexPolymorphic(registered_index, //
91
+ TraceTrait<T>::Trace)) //
92
+ DISPATCH(false, true, true, //
93
+ EnsureGCInfoIndexNonPolymorphic(registered_index, //
94
+ TraceTrait<T>::Trace, //
95
+ FinalizerTrait<T>::kCallback, //
96
+ NameTrait<T>::GetName)) //
97
+ DISPATCH(false, true, false, //
98
+ EnsureGCInfoIndexNonPolymorphic(registered_index, //
99
+ TraceTrait<T>::Trace, //
100
+ FinalizerTrait<T>::kCallback)) //
101
+ DISPATCH(false, false, true, //
102
+ EnsureGCInfoIndexNonPolymorphic(registered_index, //
103
+ TraceTrait<T>::Trace, //
104
+ NameTrait<T>::GetName)) //
105
+ DISPATCH(false, false, false, //
106
+ EnsureGCInfoIndexNonPolymorphic(registered_index, //
107
+ TraceTrait<T>::Trace)) //
108
+
109
+ #undef DISPATCH
27
110
 
28
111
  // Fold types based on finalizer behavior. Note that finalizer characteristics
29
112
  // align with trace behavior, i.e., destructors are virtual when trace methods
@@ -57,16 +140,13 @@ struct GCInfoFolding {
57
140
  // finalization, and naming.
58
141
  template <typename T>
59
142
  struct GCInfoTrait final {
60
- static GCInfoIndex Index() {
143
+ V8_INLINE static GCInfoIndex Index() {
61
144
  static_assert(sizeof(T), "T must be fully defined");
62
145
  static std::atomic<GCInfoIndex>
63
146
  registered_index; // Uses zero initialization.
64
147
  const GCInfoIndex index = registered_index.load(std::memory_order_acquire);
65
148
  return index ? index
66
- : EnsureGCInfoIndex(
67
- registered_index, FinalizerTrait<T>::kCallback,
68
- TraceTrait<T>::Trace, NameTrait<T>::GetName,
69
- std::is_polymorphic<T>::value);
149
+ : EnsureGCInfoIndexTrait::EnsureIndex<T>(registered_index);
70
150
  }
71
151
  };
72
152
 
@@ -20,18 +20,18 @@ FatalImpl(const char*, const SourceLocation& = SourceLocation::Current());
20
20
  template <typename>
21
21
  struct EatParams {};
22
22
 
23
- #if DEBUG
23
+ #if defined(DEBUG)
24
24
  #define CPPGC_DCHECK_MSG(condition, message) \
25
25
  do { \
26
26
  if (V8_UNLIKELY(!(condition))) { \
27
27
  ::cppgc::internal::DCheckImpl(message); \
28
28
  } \
29
29
  } while (false)
30
- #else
30
+ #else // !defined(DEBUG)
31
31
  #define CPPGC_DCHECK_MSG(condition, message) \
32
32
  (static_cast<void>(::cppgc::internal::EatParams<decltype( \
33
33
  static_cast<void>(condition), message)>{}))
34
- #endif
34
+ #endif // !defined(DEBUG)
35
35
 
36
36
  #define CPPGC_DCHECK(condition) CPPGC_DCHECK_MSG(condition, #condition)
37
37
 
@@ -6,6 +6,7 @@
6
6
  #define INCLUDE_CPPGC_INTERNAL_NAME_TRAIT_H_
7
7
 
8
8
  #include <cstddef>
9
+ #include <type_traits>
9
10
 
10
11
  #include "cppgc/name-provider.h"
11
12
  #include "v8config.h" // NOLINT(build/include_directory)
@@ -67,6 +68,16 @@ class V8_EXPORT NameTraitBase {
67
68
  template <typename T>
68
69
  class NameTrait final : public NameTraitBase {
69
70
  public:
71
+ static constexpr bool HasNonHiddenName() {
72
+ #if CPPGC_SUPPORTS_COMPILE_TIME_TYPENAME
73
+ return true;
74
+ #elif CPPGC_SUPPORTS_OBJECT_NAMES
75
+ return true;
76
+ #else // !CPPGC_SUPPORTS_OBJECT_NAMES
77
+ return std::is_base_of<NameProvider, T>::value;
78
+ #endif // !CPPGC_SUPPORTS_OBJECT_NAMES
79
+ }
80
+
70
81
  static HeapObjectName GetName(const void* obj) {
71
82
  return GetNameFor(static_cast<const T*>(obj));
72
83
  }
@@ -20,6 +20,7 @@ class Visitor;
20
20
  namespace internal {
21
21
 
22
22
  class CrossThreadPersistentRegion;
23
+ class FatalOutOfMemoryHandler;
23
24
 
24
25
  // PersistentNode represents a variant of two states:
25
26
  // 1) traceable node with a back pointer to the Persistent object;
@@ -75,26 +76,35 @@ class PersistentNode final {
75
76
  TraceCallback trace_ = nullptr;
76
77
  };
77
78
 
78
- class V8_EXPORT PersistentRegion final {
79
+ class V8_EXPORT PersistentRegionBase {
79
80
  using PersistentNodeSlots = std::array<PersistentNode, 256u>;
80
81
 
81
82
  public:
82
- PersistentRegion() = default;
83
83
  // Clears Persistent fields to avoid stale pointers after heap teardown.
84
- ~PersistentRegion();
84
+ ~PersistentRegionBase();
85
85
 
86
- PersistentRegion(const PersistentRegion&) = delete;
87
- PersistentRegion& operator=(const PersistentRegion&) = delete;
86
+ PersistentRegionBase(const PersistentRegionBase&) = delete;
87
+ PersistentRegionBase& operator=(const PersistentRegionBase&) = delete;
88
88
 
89
- PersistentNode* AllocateNode(void* owner, TraceCallback trace) {
90
- if (!free_list_head_) {
91
- EnsureNodeSlots();
89
+ void Trace(Visitor*);
90
+
91
+ size_t NodesInUse() const;
92
+
93
+ void ClearAllUsedNodes();
94
+
95
+ protected:
96
+ explicit PersistentRegionBase(const FatalOutOfMemoryHandler& oom_handler);
97
+
98
+ PersistentNode* TryAllocateNodeFromFreeList(void* owner,
99
+ TraceCallback trace) {
100
+ PersistentNode* node = nullptr;
101
+ if (V8_LIKELY(free_list_head_)) {
102
+ node = free_list_head_;
103
+ free_list_head_ = free_list_head_->FreeListNext();
104
+ CPPGC_DCHECK(!node->IsUsed());
105
+ node->InitializeAsUsedNode(owner, trace);
106
+ nodes_in_use_++;
92
107
  }
93
- PersistentNode* node = free_list_head_;
94
- free_list_head_ = free_list_head_->FreeListNext();
95
- CPPGC_DCHECK(!node->IsUsed());
96
- node->InitializeAsUsedNode(owner, trace);
97
- nodes_in_use_++;
98
108
  return node;
99
109
  }
100
110
 
@@ -107,24 +117,57 @@ class V8_EXPORT PersistentRegion final {
107
117
  nodes_in_use_--;
108
118
  }
109
119
 
110
- void Trace(Visitor*);
111
-
112
- size_t NodesInUse() const;
120
+ PersistentNode* RefillFreeListAndAllocateNode(void* owner,
121
+ TraceCallback trace);
113
122
 
123
+ private:
124
+ template <typename PersistentBaseClass>
114
125
  void ClearAllUsedNodes();
115
126
 
116
- private:
117
- void EnsureNodeSlots();
127
+ void RefillFreeList();
118
128
 
119
129
  std::vector<std::unique_ptr<PersistentNodeSlots>> nodes_;
120
130
  PersistentNode* free_list_head_ = nullptr;
121
131
  size_t nodes_in_use_ = 0;
132
+ const FatalOutOfMemoryHandler& oom_handler_;
122
133
 
123
134
  friend class CrossThreadPersistentRegion;
124
135
  };
125
136
 
126
- // CrossThreadPersistent uses PersistentRegion but protects it using this lock
127
- // when needed.
137
+ // Variant of PersistentRegionBase that checks whether the allocation and
138
+ // freeing happens only on the thread that created the region.
139
+ class V8_EXPORT PersistentRegion final : public PersistentRegionBase {
140
+ public:
141
+ explicit PersistentRegion(const FatalOutOfMemoryHandler&);
142
+ // Clears Persistent fields to avoid stale pointers after heap teardown.
143
+ ~PersistentRegion() = default;
144
+
145
+ PersistentRegion(const PersistentRegion&) = delete;
146
+ PersistentRegion& operator=(const PersistentRegion&) = delete;
147
+
148
+ V8_INLINE PersistentNode* AllocateNode(void* owner, TraceCallback trace) {
149
+ CPPGC_DCHECK(IsCreationThread());
150
+ auto* node = TryAllocateNodeFromFreeList(owner, trace);
151
+ if (V8_LIKELY(node)) return node;
152
+
153
+ // Slow path allocation allows for checking thread correspondence.
154
+ CPPGC_CHECK(IsCreationThread());
155
+ return RefillFreeListAndAllocateNode(owner, trace);
156
+ }
157
+
158
+ V8_INLINE void FreeNode(PersistentNode* node) {
159
+ CPPGC_DCHECK(IsCreationThread());
160
+ PersistentRegionBase::FreeNode(node);
161
+ }
162
+
163
+ private:
164
+ bool IsCreationThread();
165
+
166
+ int creation_thread_id_;
167
+ };
168
+
169
+ // CrossThreadPersistent uses PersistentRegionBase but protects it using this
170
+ // lock when needed.
128
171
  class V8_EXPORT PersistentRegionLock final {
129
172
  public:
130
173
  PersistentRegionLock();
@@ -133,11 +176,12 @@ class V8_EXPORT PersistentRegionLock final {
133
176
  static void AssertLocked();
134
177
  };
135
178
 
136
- // Variant of PersistentRegion that checks whether the PersistentRegionLock is
137
- // locked.
138
- class V8_EXPORT CrossThreadPersistentRegion final {
179
+ // Variant of PersistentRegionBase that checks whether the PersistentRegionLock
180
+ // is locked.
181
+ class V8_EXPORT CrossThreadPersistentRegion final
182
+ : protected PersistentRegionBase {
139
183
  public:
140
- CrossThreadPersistentRegion() = default;
184
+ explicit CrossThreadPersistentRegion(const FatalOutOfMemoryHandler&);
141
185
  // Clears Persistent fields to avoid stale pointers after heap teardown.
142
186
  ~CrossThreadPersistentRegion();
143
187
 
@@ -147,12 +191,15 @@ class V8_EXPORT CrossThreadPersistentRegion final {
147
191
 
148
192
  V8_INLINE PersistentNode* AllocateNode(void* owner, TraceCallback trace) {
149
193
  PersistentRegionLock::AssertLocked();
150
- return persistent_region_.AllocateNode(owner, trace);
194
+ auto* node = TryAllocateNodeFromFreeList(owner, trace);
195
+ if (V8_LIKELY(node)) return node;
196
+
197
+ return RefillFreeListAndAllocateNode(owner, trace);
151
198
  }
152
199
 
153
200
  V8_INLINE void FreeNode(PersistentNode* node) {
154
201
  PersistentRegionLock::AssertLocked();
155
- persistent_region_.FreeNode(node);
202
+ PersistentRegionBase::FreeNode(node);
156
203
  }
157
204
 
158
205
  void Trace(Visitor*);
@@ -160,9 +207,6 @@ class V8_EXPORT CrossThreadPersistentRegion final {
160
207
  size_t NodesInUse() const;
161
208
 
162
209
  void ClearAllUsedNodes();
163
-
164
- private:
165
- PersistentRegion persistent_region_;
166
210
  };
167
211
 
168
212
  } // namespace internal
@@ -51,7 +51,17 @@ struct NoWriteBarrierPolicy {
51
51
  static void AssigningBarrier(const void*, const void*) {}
52
52
  };
53
53
 
54
- class V8_EXPORT EnabledCheckingPolicy {
54
+ class V8_EXPORT SameThreadEnabledCheckingPolicyBase {
55
+ protected:
56
+ void CheckPointerImpl(const void* ptr, bool points_to_payload,
57
+ bool check_off_heap_assignments);
58
+
59
+ const HeapBase* heap_ = nullptr;
60
+ };
61
+
62
+ template <bool kCheckOffHeapAssignments>
63
+ class V8_EXPORT SameThreadEnabledCheckingPolicy
64
+ : private SameThreadEnabledCheckingPolicyBase {
55
65
  protected:
56
66
  template <typename T>
57
67
  void CheckPointer(const T* ptr) {
@@ -61,23 +71,20 @@ class V8_EXPORT EnabledCheckingPolicy {
61
71
  }
62
72
 
63
73
  private:
64
- void CheckPointerImpl(const void* ptr, bool points_to_payload);
65
-
66
74
  template <typename T, bool = IsCompleteV<T>>
67
75
  struct CheckPointersImplTrampoline {
68
- static void Call(EnabledCheckingPolicy* policy, const T* ptr) {
69
- policy->CheckPointerImpl(ptr, false);
76
+ static void Call(SameThreadEnabledCheckingPolicy* policy, const T* ptr) {
77
+ policy->CheckPointerImpl(ptr, false, kCheckOffHeapAssignments);
70
78
  }
71
79
  };
72
80
 
73
81
  template <typename T>
74
82
  struct CheckPointersImplTrampoline<T, true> {
75
- static void Call(EnabledCheckingPolicy* policy, const T* ptr) {
76
- policy->CheckPointerImpl(ptr, IsGarbageCollectedTypeV<T>);
83
+ static void Call(SameThreadEnabledCheckingPolicy* policy, const T* ptr) {
84
+ policy->CheckPointerImpl(ptr, IsGarbageCollectedTypeV<T>,
85
+ kCheckOffHeapAssignments);
77
86
  }
78
87
  };
79
-
80
- const HeapBase* heap_ = nullptr;
81
88
  };
82
89
 
83
90
  class DisabledCheckingPolicy {
@@ -85,15 +92,19 @@ class DisabledCheckingPolicy {
85
92
  void CheckPointer(const void*) {}
86
93
  };
87
94
 
88
- #if V8_ENABLE_CHECKS
89
- using DefaultMemberCheckingPolicy = EnabledCheckingPolicy;
90
- using DefaultPersistentCheckingPolicy = EnabledCheckingPolicy;
91
- #else
95
+ #ifdef DEBUG
96
+ // Off heap members are not connected to object graph and thus cannot ressurect
97
+ // dead objects.
98
+ using DefaultMemberCheckingPolicy =
99
+ SameThreadEnabledCheckingPolicy<false /* kCheckOffHeapAssignments*/>;
100
+ using DefaultPersistentCheckingPolicy =
101
+ SameThreadEnabledCheckingPolicy<true /* kCheckOffHeapAssignments*/>;
102
+ #else // !DEBUG
92
103
  using DefaultMemberCheckingPolicy = DisabledCheckingPolicy;
93
104
  using DefaultPersistentCheckingPolicy = DisabledCheckingPolicy;
94
- #endif
105
+ #endif // !DEBUG
95
106
  // For CT(W)P neither marking information (for value), nor objectstart bitmap
96
- // (for slot) are guaranteed to be present because there's no synchonization
107
+ // (for slot) are guaranteed to be present because there's no synchronization
97
108
  // between heaps after marking.
98
109
  using DefaultCrossThreadPersistentCheckingPolicy = DisabledCheckingPolicy;
99
110