libv8-node 17.9.1.0-x86_64-darwin → 18.13.0.0-x86_64-darwin
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/ext/libv8-node/paths.rb +1 -1
- data/lib/libv8/node/version.rb +3 -3
- data/vendor/v8/include/cppgc/allocation.h +88 -17
- data/vendor/v8/include/cppgc/default-platform.h +2 -10
- data/vendor/v8/include/cppgc/explicit-management.h +22 -4
- data/vendor/v8/include/cppgc/garbage-collected.h +15 -26
- data/vendor/v8/include/cppgc/heap-consistency.h +13 -0
- data/vendor/v8/include/cppgc/heap-state.h +12 -0
- data/vendor/v8/include/cppgc/heap.h +7 -2
- data/vendor/v8/include/cppgc/internal/api-constants.h +8 -0
- data/vendor/v8/include/cppgc/internal/caged-heap-local-data.h +23 -12
- data/vendor/v8/include/cppgc/internal/finalizer-trait.h +2 -1
- data/vendor/v8/include/cppgc/internal/logging.h +3 -3
- data/vendor/v8/include/cppgc/internal/persistent-node.h +39 -27
- data/vendor/v8/include/cppgc/internal/pointer-policies.h +4 -4
- data/vendor/v8/include/cppgc/internal/write-barrier.h +26 -32
- data/vendor/v8/include/cppgc/member.h +5 -2
- data/vendor/v8/include/cppgc/persistent.h +30 -31
- data/vendor/v8/include/cppgc/platform.h +3 -1
- data/vendor/v8/include/cppgc/prefinalizer.h +34 -11
- data/vendor/v8/include/cppgc/testing.h +9 -2
- data/vendor/v8/include/cppgc/type-traits.h +6 -13
- data/vendor/v8/include/libplatform/libplatform.h +0 -11
- data/vendor/v8/include/libplatform/v8-tracing.h +0 -1
- data/vendor/v8/include/v8-array-buffer.h +22 -2
- data/vendor/v8/include/v8-callbacks.h +26 -6
- data/vendor/v8/include/v8-context.h +3 -14
- data/vendor/v8/include/v8-cppgc.h +16 -126
- data/vendor/v8/include/v8-data.h +15 -0
- data/vendor/v8/include/v8-debug.h +21 -4
- data/vendor/v8/include/v8-embedder-heap.h +10 -30
- data/vendor/v8/include/v8-embedder-state-scope.h +51 -0
- data/vendor/v8/include/v8-exception.h +0 -7
- data/vendor/v8/include/v8-fast-api-calls.h +82 -31
- data/vendor/v8/include/v8-function.h +3 -0
- data/vendor/v8/include/v8-initialization.h +64 -31
- data/vendor/v8/include/v8-inspector.h +45 -4
- data/vendor/v8/include/v8-internal.h +189 -102
- data/vendor/v8/include/v8-isolate.h +49 -2
- data/vendor/v8/include/v8-local-handle.h +0 -4
- data/vendor/v8/include/v8-locker.h +2 -1
- data/vendor/v8/include/v8-message.h +19 -44
- data/vendor/v8/include/v8-metrics.h +32 -15
- data/vendor/v8/include/v8-object.h +11 -6
- data/vendor/v8/include/v8-platform.h +365 -6
- data/vendor/v8/include/v8-primitive.h +14 -6
- data/vendor/v8/include/v8-profiler.h +78 -2
- data/vendor/v8/include/v8-script.h +27 -51
- data/vendor/v8/include/v8-snapshot.h +0 -2
- data/vendor/v8/include/v8-statistics.h +2 -0
- data/vendor/v8/include/v8-template.h +31 -4
- data/vendor/v8/include/v8-traced-handle.h +39 -224
- data/vendor/v8/include/v8-unwinder.h +10 -7
- data/vendor/v8/include/v8-value-serializer-version.h +1 -1
- data/vendor/v8/include/v8-value-serializer.h +32 -2
- data/vendor/v8/include/v8-version.h +4 -4
- data/vendor/v8/include/v8-wasm.h +13 -1
- data/vendor/v8/include/v8-weak-callback-info.h +20 -6
- data/vendor/v8/include/v8.h +0 -1
- data/vendor/v8/include/v8config.h +56 -11
- data/vendor/v8/x86_64-darwin/libv8/obj/libv8_monolith.a +0 -0
- metadata +4 -4
- data/vendor/v8/include/cppgc/internal/prefinalizer-handler.h +0 -30
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: ec15997919f02fa3acdf39aa8207e362ce9279c207edb2140516bfd0c6afb624
|
4
|
+
data.tar.gz: 85941eba405f6206720085637432d2ab0c1778a2dce9b4e6ac6ebc74ec6f49b1
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 755ca7d6d67a97b9c47a11bc0828444474d8a04d27b843097fdfb5edbcd49c83c4ca5842cfc4b3e2e48e710e243e337ed69448cd42090ab2e5ee34331ebbcf9d
|
7
|
+
data.tar.gz: 41e12d75b0495e3f0dd658b10b7fead6c26e815df0ef2eaf46a69b105365dc3c8cebee03207a181cda343c7635098848ee88d65266e6914fe9c1fb1c5f6ef90a
|
data/ext/libv8-node/paths.rb
CHANGED
@@ -20,7 +20,7 @@ module Libv8::Node
|
|
20
20
|
end
|
21
21
|
|
22
22
|
def platform
|
23
|
-
Gem::Platform.local.tap { |p| RUBY_PLATFORM =~ /musl/ && p.version.nil?
|
23
|
+
Gem::Platform.local.tap { |p| RUBY_PLATFORM =~ /musl/ && p.version.nil? && p.instance_eval { @version = 'musl' } }.to_s.gsub(/-darwin-?\d+/, '-darwin')
|
24
24
|
end
|
25
25
|
|
26
26
|
def config
|
data/lib/libv8/node/version.rb
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
module Libv8; end
|
2
2
|
|
3
3
|
module Libv8::Node
|
4
|
-
VERSION = '
|
5
|
-
NODE_VERSION = '
|
6
|
-
LIBV8_VERSION = '
|
4
|
+
VERSION = '18.13.0.0'.freeze
|
5
|
+
NODE_VERSION = '18.13.0'.freeze
|
6
|
+
LIBV8_VERSION = '10.2.154.23'.freeze # from v8/include/v8-version.h
|
7
7
|
end
|
@@ -18,6 +18,23 @@
|
|
18
18
|
#include "cppgc/type-traits.h"
|
19
19
|
#include "v8config.h" // NOLINT(build/include_directory)
|
20
20
|
|
21
|
+
#if defined(__has_attribute)
|
22
|
+
#if __has_attribute(assume_aligned)
|
23
|
+
#define CPPGC_DEFAULT_ALIGNED \
|
24
|
+
__attribute__((assume_aligned(api_constants::kDefaultAlignment)))
|
25
|
+
#define CPPGC_DOUBLE_WORD_ALIGNED \
|
26
|
+
__attribute__((assume_aligned(2 * api_constants::kDefaultAlignment)))
|
27
|
+
#endif // __has_attribute(assume_aligned)
|
28
|
+
#endif // defined(__has_attribute)
|
29
|
+
|
30
|
+
#if !defined(CPPGC_DEFAULT_ALIGNED)
|
31
|
+
#define CPPGC_DEFAULT_ALIGNED
|
32
|
+
#endif
|
33
|
+
|
34
|
+
#if !defined(CPPGC_DOUBLE_WORD_ALIGNED)
|
35
|
+
#define CPPGC_DOUBLE_WORD_ALIGNED
|
36
|
+
#endif
|
37
|
+
|
21
38
|
namespace cppgc {
|
22
39
|
|
23
40
|
/**
|
@@ -27,6 +44,9 @@ class AllocationHandle;
|
|
27
44
|
|
28
45
|
namespace internal {
|
29
46
|
|
47
|
+
// Similar to C++17 std::align_val_t;
|
48
|
+
enum class AlignVal : size_t {};
|
49
|
+
|
30
50
|
class V8_EXPORT MakeGarbageCollectedTraitInternal {
|
31
51
|
protected:
|
32
52
|
static inline void MarkObjectAsFullyConstructed(const void* payload) {
|
@@ -45,32 +65,72 @@ class V8_EXPORT MakeGarbageCollectedTraitInternal {
|
|
45
65
|
atomic_mutable_bitfield->store(value, std::memory_order_release);
|
46
66
|
}
|
47
67
|
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
68
|
+
// Dispatch based on compile-time information.
|
69
|
+
//
|
70
|
+
// Default implementation is for a custom space with >`kDefaultAlignment` byte
|
71
|
+
// alignment.
|
72
|
+
template <typename GCInfoType, typename CustomSpace, size_t alignment>
|
73
|
+
struct AllocationDispatcher final {
|
74
|
+
static void* Invoke(AllocationHandle& handle, size_t size) {
|
52
75
|
static_assert(std::is_base_of<CustomSpaceBase, CustomSpace>::value,
|
53
76
|
"Custom space must inherit from CustomSpaceBase.");
|
77
|
+
static_assert(
|
78
|
+
!CustomSpace::kSupportsCompaction,
|
79
|
+
"Custom spaces that support compaction do not support allocating "
|
80
|
+
"objects with non-default (i.e. word-sized) alignment.");
|
54
81
|
return MakeGarbageCollectedTraitInternal::Allocate(
|
55
|
-
handle, size,
|
56
|
-
CustomSpace::kSpaceIndex);
|
82
|
+
handle, size, static_cast<AlignVal>(alignment),
|
83
|
+
internal::GCInfoTrait<GCInfoType>::Index(), CustomSpace::kSpaceIndex);
|
84
|
+
}
|
85
|
+
};
|
86
|
+
|
87
|
+
// Fast path for regular allocations for the default space with
|
88
|
+
// `kDefaultAlignment` byte alignment.
|
89
|
+
template <typename GCInfoType>
|
90
|
+
struct AllocationDispatcher<GCInfoType, void,
|
91
|
+
api_constants::kDefaultAlignment>
|
92
|
+
final {
|
93
|
+
static void* Invoke(AllocationHandle& handle, size_t size) {
|
94
|
+
return MakeGarbageCollectedTraitInternal::Allocate(
|
95
|
+
handle, size, internal::GCInfoTrait<GCInfoType>::Index());
|
57
96
|
}
|
58
97
|
};
|
59
98
|
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
99
|
+
// Default space with >`kDefaultAlignment` byte alignment.
|
100
|
+
template <typename GCInfoType, size_t alignment>
|
101
|
+
struct AllocationDispatcher<GCInfoType, void, alignment> final {
|
102
|
+
static void* Invoke(AllocationHandle& handle, size_t size) {
|
64
103
|
return MakeGarbageCollectedTraitInternal::Allocate(
|
65
|
-
handle, size,
|
104
|
+
handle, size, static_cast<AlignVal>(alignment),
|
105
|
+
internal::GCInfoTrait<GCInfoType>::Index());
|
106
|
+
}
|
107
|
+
};
|
108
|
+
|
109
|
+
// Custom space with `kDefaultAlignment` byte alignment.
|
110
|
+
template <typename GCInfoType, typename CustomSpace>
|
111
|
+
struct AllocationDispatcher<GCInfoType, CustomSpace,
|
112
|
+
api_constants::kDefaultAlignment>
|
113
|
+
final {
|
114
|
+
static void* Invoke(AllocationHandle& handle, size_t size) {
|
115
|
+
static_assert(std::is_base_of<CustomSpaceBase, CustomSpace>::value,
|
116
|
+
"Custom space must inherit from CustomSpaceBase.");
|
117
|
+
return MakeGarbageCollectedTraitInternal::Allocate(
|
118
|
+
handle, size, internal::GCInfoTrait<GCInfoType>::Index(),
|
119
|
+
CustomSpace::kSpaceIndex);
|
66
120
|
}
|
67
121
|
};
|
68
122
|
|
69
123
|
private:
|
70
|
-
static void* Allocate(cppgc::AllocationHandle
|
71
|
-
|
72
|
-
static void* Allocate(cppgc::AllocationHandle
|
73
|
-
|
124
|
+
static void* CPPGC_DEFAULT_ALIGNED Allocate(cppgc::AllocationHandle&, size_t,
|
125
|
+
GCInfoIndex);
|
126
|
+
static void* CPPGC_DOUBLE_WORD_ALIGNED Allocate(cppgc::AllocationHandle&,
|
127
|
+
size_t, AlignVal,
|
128
|
+
GCInfoIndex);
|
129
|
+
static void* CPPGC_DEFAULT_ALIGNED Allocate(cppgc::AllocationHandle&, size_t,
|
130
|
+
GCInfoIndex, CustomSpaceIndex);
|
131
|
+
static void* CPPGC_DOUBLE_WORD_ALIGNED Allocate(cppgc::AllocationHandle&,
|
132
|
+
size_t, AlignVal, GCInfoIndex,
|
133
|
+
CustomSpaceIndex);
|
74
134
|
|
75
135
|
friend class HeapObjectHeader;
|
76
136
|
};
|
@@ -109,10 +169,18 @@ class MakeGarbageCollectedTraitBase
|
|
109
169
|
std::is_base_of<typename T::ParentMostGarbageCollectedType, T>::value,
|
110
170
|
"U of GarbageCollected<U> must be a base of T. Check "
|
111
171
|
"GarbageCollected<T> base class inheritance.");
|
112
|
-
|
172
|
+
static constexpr size_t kWantedAlignment =
|
173
|
+
alignof(T) < internal::api_constants::kDefaultAlignment
|
174
|
+
? internal::api_constants::kDefaultAlignment
|
175
|
+
: alignof(T);
|
176
|
+
static_assert(
|
177
|
+
kWantedAlignment <= internal::api_constants::kMaxSupportedAlignment,
|
178
|
+
"Requested alignment larger than alignof(std::max_align_t) bytes. "
|
179
|
+
"Please file a bug to possibly get this restriction lifted.");
|
180
|
+
return AllocationDispatcher<
|
113
181
|
typename internal::GCInfoFolding<
|
114
182
|
T, typename T::ParentMostGarbageCollectedType>::ResultType,
|
115
|
-
typename SpaceTrait<T>::Space>::
|
183
|
+
typename SpaceTrait<T>::Space, kWantedAlignment>::Invoke(handle, size);
|
116
184
|
}
|
117
185
|
|
118
186
|
/**
|
@@ -236,4 +304,7 @@ V8_INLINE T* MakeGarbageCollected(AllocationHandle& handle,
|
|
236
304
|
|
237
305
|
} // namespace cppgc
|
238
306
|
|
307
|
+
#undef CPPGC_DEFAULT_ALIGNED
|
308
|
+
#undef CPPGC_DOUBLE_WORD_ALIGNED
|
309
|
+
|
239
310
|
#endif // INCLUDE_CPPGC_ALLOCATION_H_
|
@@ -6,7 +6,6 @@
|
|
6
6
|
#define INCLUDE_CPPGC_DEFAULT_PLATFORM_H_
|
7
7
|
|
8
8
|
#include <memory>
|
9
|
-
#include <vector>
|
10
9
|
|
11
10
|
#include "cppgc/platform.h"
|
12
11
|
#include "libplatform/libplatform.h"
|
@@ -20,15 +19,6 @@ namespace cppgc {
|
|
20
19
|
*/
|
21
20
|
class V8_EXPORT DefaultPlatform : public Platform {
|
22
21
|
public:
|
23
|
-
/**
|
24
|
-
* Use this method instead of 'cppgc::InitializeProcess' when using
|
25
|
-
* 'cppgc::DefaultPlatform'. 'cppgc::DefaultPlatform::InitializeProcess'
|
26
|
-
* will initialize cppgc and v8 if needed (for non-standalone builds).
|
27
|
-
*
|
28
|
-
* \param platform DefaultPlatform instance used to initialize cppgc/v8.
|
29
|
-
*/
|
30
|
-
static void InitializeProcess(DefaultPlatform* platform);
|
31
|
-
|
32
22
|
using IdleTaskSupport = v8::platform::IdleTaskSupport;
|
33
23
|
explicit DefaultPlatform(
|
34
24
|
int thread_pool_size = 0,
|
@@ -64,6 +54,8 @@ class V8_EXPORT DefaultPlatform : public Platform {
|
|
64
54
|
return v8_platform_->GetTracingController();
|
65
55
|
}
|
66
56
|
|
57
|
+
v8::Platform* GetV8Platform() const { return v8_platform_.get(); }
|
58
|
+
|
67
59
|
protected:
|
68
60
|
static constexpr v8::Isolate* kNoIsolate = nullptr;
|
69
61
|
|
@@ -15,11 +15,27 @@ namespace cppgc {
|
|
15
15
|
|
16
16
|
class HeapHandle;
|
17
17
|
|
18
|
+
namespace subtle {
|
19
|
+
|
20
|
+
template <typename T>
|
21
|
+
void FreeUnreferencedObject(HeapHandle& heap_handle, T& object);
|
22
|
+
template <typename T>
|
23
|
+
bool Resize(T& object, AdditionalBytes additional_bytes);
|
24
|
+
|
25
|
+
} // namespace subtle
|
26
|
+
|
18
27
|
namespace internal {
|
19
28
|
|
20
|
-
|
21
|
-
|
29
|
+
class ExplicitManagementImpl final {
|
30
|
+
private:
|
31
|
+
V8_EXPORT static void FreeUnreferencedObject(HeapHandle&, void*);
|
32
|
+
V8_EXPORT static bool Resize(void*, size_t);
|
22
33
|
|
34
|
+
template <typename T>
|
35
|
+
friend void subtle::FreeUnreferencedObject(HeapHandle&, T&);
|
36
|
+
template <typename T>
|
37
|
+
friend bool subtle::Resize(T&, AdditionalBytes);
|
38
|
+
};
|
23
39
|
} // namespace internal
|
24
40
|
|
25
41
|
namespace subtle {
|
@@ -45,7 +61,8 @@ template <typename T>
|
|
45
61
|
void FreeUnreferencedObject(HeapHandle& heap_handle, T& object) {
|
46
62
|
static_assert(IsGarbageCollectedTypeV<T>,
|
47
63
|
"Object must be of type GarbageCollected.");
|
48
|
-
internal::FreeUnreferencedObject(heap_handle,
|
64
|
+
internal::ExplicitManagementImpl::FreeUnreferencedObject(heap_handle,
|
65
|
+
&object);
|
49
66
|
}
|
50
67
|
|
51
68
|
/**
|
@@ -73,7 +90,8 @@ template <typename T>
|
|
73
90
|
bool Resize(T& object, AdditionalBytes additional_bytes) {
|
74
91
|
static_assert(IsGarbageCollectedTypeV<T>,
|
75
92
|
"Object must be of type GarbageCollected.");
|
76
|
-
return internal::Resize(
|
93
|
+
return internal::ExplicitManagementImpl::Resize(
|
94
|
+
&object, sizeof(T) + additional_bytes.value);
|
77
95
|
}
|
78
96
|
|
79
97
|
} // namespace subtle
|
@@ -5,8 +5,6 @@
|
|
5
5
|
#ifndef INCLUDE_CPPGC_GARBAGE_COLLECTED_H_
|
6
6
|
#define INCLUDE_CPPGC_GARBAGE_COLLECTED_H_
|
7
7
|
|
8
|
-
#include <type_traits>
|
9
|
-
|
10
8
|
#include "cppgc/internal/api-constants.h"
|
11
9
|
#include "cppgc/platform.h"
|
12
10
|
#include "cppgc/trace-trait.h"
|
@@ -16,28 +14,6 @@ namespace cppgc {
|
|
16
14
|
|
17
15
|
class Visitor;
|
18
16
|
|
19
|
-
namespace internal {
|
20
|
-
|
21
|
-
class GarbageCollectedBase {
|
22
|
-
public:
|
23
|
-
// Must use MakeGarbageCollected.
|
24
|
-
void* operator new(size_t) = delete;
|
25
|
-
void* operator new[](size_t) = delete;
|
26
|
-
// The garbage collector is taking care of reclaiming the object. Also,
|
27
|
-
// virtual destructor requires an unambiguous, accessible 'operator delete'.
|
28
|
-
void operator delete(void*) {
|
29
|
-
#ifdef V8_ENABLE_CHECKS
|
30
|
-
internal::Abort();
|
31
|
-
#endif // V8_ENABLE_CHECKS
|
32
|
-
}
|
33
|
-
void operator delete[](void*) = delete;
|
34
|
-
|
35
|
-
protected:
|
36
|
-
GarbageCollectedBase() = default;
|
37
|
-
};
|
38
|
-
|
39
|
-
} // namespace internal
|
40
|
-
|
41
17
|
/**
|
42
18
|
* Base class for managed objects. Only descendent types of `GarbageCollected`
|
43
19
|
* can be constructed using `MakeGarbageCollected()`. Must be inherited from as
|
@@ -74,11 +50,24 @@ class GarbageCollectedBase {
|
|
74
50
|
* \endcode
|
75
51
|
*/
|
76
52
|
template <typename T>
|
77
|
-
class GarbageCollected
|
53
|
+
class GarbageCollected {
|
78
54
|
public:
|
79
55
|
using IsGarbageCollectedTypeMarker = void;
|
80
56
|
using ParentMostGarbageCollectedType = T;
|
81
57
|
|
58
|
+
// Must use MakeGarbageCollected.
|
59
|
+
void* operator new(size_t) = delete;
|
60
|
+
void* operator new[](size_t) = delete;
|
61
|
+
// The garbage collector is taking care of reclaiming the object. Also,
|
62
|
+
// virtual destructor requires an unambiguous, accessible 'operator delete'.
|
63
|
+
void operator delete(void*) {
|
64
|
+
#ifdef V8_ENABLE_CHECKS
|
65
|
+
internal::Fatal(
|
66
|
+
"Manually deleting a garbage collected object is not allowed");
|
67
|
+
#endif // V8_ENABLE_CHECKS
|
68
|
+
}
|
69
|
+
void operator delete[](void*) = delete;
|
70
|
+
|
82
71
|
protected:
|
83
72
|
GarbageCollected() = default;
|
84
73
|
};
|
@@ -101,7 +90,7 @@ class GarbageCollected : public internal::GarbageCollectedBase {
|
|
101
90
|
* };
|
102
91
|
* \endcode
|
103
92
|
*/
|
104
|
-
class GarbageCollectedMixin
|
93
|
+
class GarbageCollectedMixin {
|
105
94
|
public:
|
106
95
|
using IsGarbageCollectedMixinTypeMarker = void;
|
107
96
|
|
@@ -149,6 +149,19 @@ class HeapConsistency final {
|
|
149
149
|
internal::WriteBarrier::GenerationalBarrier(params, slot);
|
150
150
|
}
|
151
151
|
|
152
|
+
/**
|
153
|
+
* Generational barrier for source object that may contain outgoing pointers
|
154
|
+
* to objects in young generation.
|
155
|
+
*
|
156
|
+
* \param params The parameters retrieved from `GetWriteBarrierType()`.
|
157
|
+
* \param inner_pointer Pointer to the source object.
|
158
|
+
*/
|
159
|
+
static V8_INLINE void GenerationalBarrierForSourceObject(
|
160
|
+
const WriteBarrierParams& params, const void* inner_pointer) {
|
161
|
+
internal::WriteBarrier::GenerationalBarrierForSourceObject(params,
|
162
|
+
inner_pointer);
|
163
|
+
}
|
164
|
+
|
152
165
|
private:
|
153
166
|
HeapConsistency() = delete;
|
154
167
|
};
|
@@ -38,6 +38,18 @@ class V8_EXPORT HeapState final {
|
|
38
38
|
*/
|
39
39
|
static bool IsSweeping(const HeapHandle& heap_handle);
|
40
40
|
|
41
|
+
/*
|
42
|
+
* Returns whether the garbage collector is currently sweeping on the thread
|
43
|
+
* owning this heap. This API allows the caller to determine whether it has
|
44
|
+
* been called from a destructor of a managed object. This API is experimental
|
45
|
+
* and may be removed in future.
|
46
|
+
*
|
47
|
+
* \param heap_handle The corresponding heap.
|
48
|
+
* \returns true if the garbage collector is currently sweeping on this
|
49
|
+
* thread, and false otherwise.
|
50
|
+
*/
|
51
|
+
static bool IsSweepingOnOwningThread(const HeapHandle& heap_handle);
|
52
|
+
|
41
53
|
/**
|
42
54
|
* Returns whether the garbage collector is in the atomic pause, i.e., the
|
43
55
|
* mutator is stopped from running. This API is experimental and is expected
|
@@ -68,8 +68,8 @@ class V8_EXPORT Heap {
|
|
68
68
|
*/
|
69
69
|
kAtomic,
|
70
70
|
/**
|
71
|
-
* Incremental marking
|
72
|
-
*
|
71
|
+
* Incremental marking interleaves marking with the rest of the application
|
72
|
+
* workload on the same thread.
|
73
73
|
*/
|
74
74
|
kIncremental,
|
75
75
|
/**
|
@@ -86,6 +86,11 @@ class V8_EXPORT Heap {
|
|
86
86
|
* Atomic stop-the-world sweeping. All of sweeping is performed at once.
|
87
87
|
*/
|
88
88
|
kAtomic,
|
89
|
+
/**
|
90
|
+
* Incremental sweeping interleaves sweeping with the rest of the
|
91
|
+
* application workload on the same thread.
|
92
|
+
*/
|
93
|
+
kIncremental,
|
89
94
|
/**
|
90
95
|
* Incremental and concurrent sweeping. Sweeping is split and interleaved
|
91
96
|
* with the rest of the application.
|
@@ -39,6 +39,14 @@ constexpr size_t kCagedHeapReservationSize = static_cast<size_t>(4) * kGB;
|
|
39
39
|
constexpr size_t kCagedHeapReservationAlignment = kCagedHeapReservationSize;
|
40
40
|
#endif
|
41
41
|
|
42
|
+
static constexpr size_t kDefaultAlignment = sizeof(void*);
|
43
|
+
|
44
|
+
// Maximum support alignment for a type as in `alignof(T)`.
|
45
|
+
static constexpr size_t kMaxSupportedAlignment = 2 * kDefaultAlignment;
|
46
|
+
|
47
|
+
// Granularity of heap allocations.
|
48
|
+
constexpr size_t kAllocationGranularity = sizeof(void*);
|
49
|
+
|
42
50
|
} // namespace api_constants
|
43
51
|
|
44
52
|
} // namespace internal
|
@@ -6,6 +6,8 @@
|
|
6
6
|
#define INCLUDE_CPPGC_INTERNAL_CAGED_HEAP_LOCAL_DATA_H_
|
7
7
|
|
8
8
|
#include <array>
|
9
|
+
#include <cstddef>
|
10
|
+
#include <cstdint>
|
9
11
|
|
10
12
|
#include "cppgc/internal/api-constants.h"
|
11
13
|
#include "cppgc/internal/logging.h"
|
@@ -19,32 +21,41 @@ class HeapBase;
|
|
19
21
|
|
20
22
|
#if defined(CPPGC_YOUNG_GENERATION)
|
21
23
|
|
22
|
-
// AgeTable
|
23
|
-
//
|
24
|
+
// AgeTable is the bytemap needed for the fast generation check in the write
|
25
|
+
// barrier. AgeTable contains entries that correspond to 512 bytes memory
|
26
|
+
// regions (cards). Each entry in the table represents generation of the objects
|
27
|
+
// that reside on the corresponding card (young, old or mixed).
|
24
28
|
class AgeTable final {
|
25
|
-
static constexpr size_t
|
29
|
+
static constexpr size_t kRequiredSize = 1 * api_constants::kMB;
|
30
|
+
static constexpr size_t kAllocationGranularity =
|
31
|
+
api_constants::kAllocationGranularity;
|
26
32
|
|
27
33
|
public:
|
28
|
-
enum class Age : uint8_t { kOld, kYoung,
|
34
|
+
enum class Age : uint8_t { kOld, kYoung, kMixed };
|
29
35
|
|
30
|
-
static constexpr size_t
|
36
|
+
static constexpr size_t kCardSizeInBytes =
|
37
|
+
(api_constants::kCagedHeapReservationSize / kAllocationGranularity) /
|
38
|
+
kRequiredSize;
|
31
39
|
|
32
|
-
|
33
|
-
|
40
|
+
void SetAge(uintptr_t cage_offset, Age age) {
|
41
|
+
table_[card(cage_offset)] = age;
|
42
|
+
}
|
43
|
+
V8_INLINE Age GetAge(uintptr_t cage_offset) const {
|
44
|
+
return table_[card(cage_offset)];
|
45
|
+
}
|
34
46
|
|
35
47
|
void Reset(PageAllocator* allocator);
|
36
48
|
|
37
49
|
private:
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
size_t entry(uintptr_t offset) const {
|
50
|
+
V8_INLINE size_t card(uintptr_t offset) const {
|
51
|
+
constexpr size_t kGranularityBits =
|
52
|
+
__builtin_ctz(static_cast<uint32_t>(kCardSizeInBytes));
|
42
53
|
const size_t entry = offset >> kGranularityBits;
|
43
54
|
CPPGC_DCHECK(table_.size() > entry);
|
44
55
|
return entry;
|
45
56
|
}
|
46
57
|
|
47
|
-
std::array<Age,
|
58
|
+
std::array<Age, kRequiredSize> table_;
|
48
59
|
};
|
49
60
|
|
50
61
|
static_assert(sizeof(AgeTable) == 1 * api_constants::kMB,
|
@@ -19,7 +19,8 @@ struct HasFinalizeGarbageCollectedObject : std::false_type {};
|
|
19
19
|
|
20
20
|
template <typename T>
|
21
21
|
struct HasFinalizeGarbageCollectedObject<
|
22
|
-
T,
|
22
|
+
T,
|
23
|
+
std::void_t<decltype(std::declval<T>().FinalizeGarbageCollectedObject())>>
|
23
24
|
: std::true_type {};
|
24
25
|
|
25
26
|
// The FinalizerTraitImpl specifies how to finalize objects.
|
@@ -20,18 +20,18 @@ FatalImpl(const char*, const SourceLocation& = SourceLocation::Current());
|
|
20
20
|
template <typename>
|
21
21
|
struct EatParams {};
|
22
22
|
|
23
|
-
#if DEBUG
|
23
|
+
#if defined(DEBUG)
|
24
24
|
#define CPPGC_DCHECK_MSG(condition, message) \
|
25
25
|
do { \
|
26
26
|
if (V8_UNLIKELY(!(condition))) { \
|
27
27
|
::cppgc::internal::DCheckImpl(message); \
|
28
28
|
} \
|
29
29
|
} while (false)
|
30
|
-
#else
|
30
|
+
#else // !defined(DEBUG)
|
31
31
|
#define CPPGC_DCHECK_MSG(condition, message) \
|
32
32
|
(static_cast<void>(::cppgc::internal::EatParams<decltype( \
|
33
33
|
static_cast<void>(condition), message)>{}))
|
34
|
-
#endif
|
34
|
+
#endif // !defined(DEBUG)
|
35
35
|
|
36
36
|
#define CPPGC_DCHECK(condition) CPPGC_DCHECK_MSG(condition, #condition)
|
37
37
|
|
@@ -20,6 +20,7 @@ class Visitor;
|
|
20
20
|
namespace internal {
|
21
21
|
|
22
22
|
class CrossThreadPersistentRegion;
|
23
|
+
class FatalOutOfMemoryHandler;
|
23
24
|
|
24
25
|
// PersistentNode represents a variant of two states:
|
25
26
|
// 1) traceable node with a back pointer to the Persistent object;
|
@@ -79,22 +80,31 @@ class V8_EXPORT PersistentRegionBase {
|
|
79
80
|
using PersistentNodeSlots = std::array<PersistentNode, 256u>;
|
80
81
|
|
81
82
|
public:
|
82
|
-
PersistentRegionBase() = default;
|
83
83
|
// Clears Persistent fields to avoid stale pointers after heap teardown.
|
84
84
|
~PersistentRegionBase();
|
85
85
|
|
86
86
|
PersistentRegionBase(const PersistentRegionBase&) = delete;
|
87
87
|
PersistentRegionBase& operator=(const PersistentRegionBase&) = delete;
|
88
88
|
|
89
|
-
|
90
|
-
|
91
|
-
|
89
|
+
void Trace(Visitor*);
|
90
|
+
|
91
|
+
size_t NodesInUse() const;
|
92
|
+
|
93
|
+
void ClearAllUsedNodes();
|
94
|
+
|
95
|
+
protected:
|
96
|
+
explicit PersistentRegionBase(const FatalOutOfMemoryHandler& oom_handler);
|
97
|
+
|
98
|
+
PersistentNode* TryAllocateNodeFromFreeList(void* owner,
|
99
|
+
TraceCallback trace) {
|
100
|
+
PersistentNode* node = nullptr;
|
101
|
+
if (V8_LIKELY(free_list_head_)) {
|
102
|
+
node = free_list_head_;
|
103
|
+
free_list_head_ = free_list_head_->FreeListNext();
|
104
|
+
CPPGC_DCHECK(!node->IsUsed());
|
105
|
+
node->InitializeAsUsedNode(owner, trace);
|
106
|
+
nodes_in_use_++;
|
92
107
|
}
|
93
|
-
PersistentNode* node = free_list_head_;
|
94
|
-
free_list_head_ = free_list_head_->FreeListNext();
|
95
|
-
CPPGC_DCHECK(!node->IsUsed());
|
96
|
-
node->InitializeAsUsedNode(owner, trace);
|
97
|
-
nodes_in_use_++;
|
98
108
|
return node;
|
99
109
|
}
|
100
110
|
|
@@ -107,21 +117,19 @@ class V8_EXPORT PersistentRegionBase {
|
|
107
117
|
nodes_in_use_--;
|
108
118
|
}
|
109
119
|
|
110
|
-
|
111
|
-
|
112
|
-
size_t NodesInUse() const;
|
113
|
-
|
114
|
-
void ClearAllUsedNodes();
|
120
|
+
PersistentNode* RefillFreeListAndAllocateNode(void* owner,
|
121
|
+
TraceCallback trace);
|
115
122
|
|
116
123
|
private:
|
117
|
-
void EnsureNodeSlots();
|
118
|
-
|
119
124
|
template <typename PersistentBaseClass>
|
120
125
|
void ClearAllUsedNodes();
|
121
126
|
|
127
|
+
void RefillFreeList();
|
128
|
+
|
122
129
|
std::vector<std::unique_ptr<PersistentNodeSlots>> nodes_;
|
123
130
|
PersistentNode* free_list_head_ = nullptr;
|
124
131
|
size_t nodes_in_use_ = 0;
|
132
|
+
const FatalOutOfMemoryHandler& oom_handler_;
|
125
133
|
|
126
134
|
friend class CrossThreadPersistentRegion;
|
127
135
|
};
|
@@ -130,7 +138,7 @@ class V8_EXPORT PersistentRegionBase {
|
|
130
138
|
// freeing happens only on the thread that created the region.
|
131
139
|
class V8_EXPORT PersistentRegion final : public PersistentRegionBase {
|
132
140
|
public:
|
133
|
-
PersistentRegion();
|
141
|
+
explicit PersistentRegion(const FatalOutOfMemoryHandler&);
|
134
142
|
// Clears Persistent fields to avoid stale pointers after heap teardown.
|
135
143
|
~PersistentRegion() = default;
|
136
144
|
|
@@ -138,21 +146,22 @@ class V8_EXPORT PersistentRegion final : public PersistentRegionBase {
|
|
138
146
|
PersistentRegion& operator=(const PersistentRegion&) = delete;
|
139
147
|
|
140
148
|
V8_INLINE PersistentNode* AllocateNode(void* owner, TraceCallback trace) {
|
141
|
-
|
142
|
-
|
143
|
-
|
144
|
-
|
149
|
+
CPPGC_DCHECK(IsCreationThread());
|
150
|
+
auto* node = TryAllocateNodeFromFreeList(owner, trace);
|
151
|
+
if (V8_LIKELY(node)) return node;
|
152
|
+
|
153
|
+
// Slow path allocation allows for checking thread correspondence.
|
154
|
+
CPPGC_CHECK(IsCreationThread());
|
155
|
+
return RefillFreeListAndAllocateNode(owner, trace);
|
145
156
|
}
|
146
157
|
|
147
158
|
V8_INLINE void FreeNode(PersistentNode* node) {
|
148
|
-
|
149
|
-
CheckIsCreationThread();
|
150
|
-
#endif // V8_ENABLE_CHECKS
|
159
|
+
CPPGC_DCHECK(IsCreationThread());
|
151
160
|
PersistentRegionBase::FreeNode(node);
|
152
161
|
}
|
153
162
|
|
154
163
|
private:
|
155
|
-
|
164
|
+
bool IsCreationThread();
|
156
165
|
|
157
166
|
int creation_thread_id_;
|
158
167
|
};
|
@@ -172,7 +181,7 @@ class V8_EXPORT PersistentRegionLock final {
|
|
172
181
|
class V8_EXPORT CrossThreadPersistentRegion final
|
173
182
|
: protected PersistentRegionBase {
|
174
183
|
public:
|
175
|
-
CrossThreadPersistentRegion()
|
184
|
+
explicit CrossThreadPersistentRegion(const FatalOutOfMemoryHandler&);
|
176
185
|
// Clears Persistent fields to avoid stale pointers after heap teardown.
|
177
186
|
~CrossThreadPersistentRegion();
|
178
187
|
|
@@ -182,7 +191,10 @@ class V8_EXPORT CrossThreadPersistentRegion final
|
|
182
191
|
|
183
192
|
V8_INLINE PersistentNode* AllocateNode(void* owner, TraceCallback trace) {
|
184
193
|
PersistentRegionLock::AssertLocked();
|
185
|
-
|
194
|
+
auto* node = TryAllocateNodeFromFreeList(owner, trace);
|
195
|
+
if (V8_LIKELY(node)) return node;
|
196
|
+
|
197
|
+
return RefillFreeListAndAllocateNode(owner, trace);
|
186
198
|
}
|
187
199
|
|
188
200
|
V8_INLINE void FreeNode(PersistentNode* node) {
|