libv8-node 19.9.0.0-arm64-darwin → 20.12.1.0-arm64-darwin
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/lib/libv8/node/version.rb +3 -3
- data/vendor/v8/arm64-darwin/libv8/obj/libv8_monolith.a +0 -0
- data/vendor/v8/include/cppgc/cross-thread-persistent.h +4 -2
- data/vendor/v8/include/cppgc/heap-consistency.h +2 -2
- data/vendor/v8/include/cppgc/heap-handle.h +5 -0
- data/vendor/v8/include/cppgc/internal/api-constants.h +4 -1
- data/vendor/v8/include/cppgc/internal/gc-info.h +35 -33
- data/vendor/v8/include/cppgc/internal/member-storage.h +19 -7
- data/vendor/v8/include/cppgc/internal/pointer-policies.h +38 -2
- data/vendor/v8/include/cppgc/internal/write-barrier.h +15 -5
- data/vendor/v8/include/cppgc/macros.h +10 -1
- data/vendor/v8/include/cppgc/member.h +167 -129
- data/vendor/v8/include/cppgc/persistent.h +22 -15
- data/vendor/v8/include/cppgc/platform.h +6 -4
- data/vendor/v8/include/cppgc/type-traits.h +4 -3
- data/vendor/v8/include/cppgc/visitor.h +16 -1
- data/vendor/v8/include/libplatform/v8-tracing.h +2 -2
- data/vendor/v8/include/v8-array-buffer.h +59 -0
- data/vendor/v8/include/v8-callbacks.h +14 -1
- data/vendor/v8/include/v8-context.h +50 -3
- data/vendor/v8/include/v8-cppgc.h +15 -0
- data/vendor/v8/include/v8-data.h +1 -1
- data/vendor/v8/include/v8-embedder-heap.h +0 -169
- data/vendor/v8/include/v8-fast-api-calls.h +7 -3
- data/vendor/v8/include/v8-function-callback.h +69 -42
- data/vendor/v8/include/v8-function.h +1 -0
- data/vendor/v8/include/v8-inspector.h +20 -5
- data/vendor/v8/include/v8-internal.h +242 -150
- data/vendor/v8/include/v8-isolate.h +30 -40
- data/vendor/v8/include/v8-local-handle.h +81 -48
- data/vendor/v8/include/v8-metrics.h +28 -2
- data/vendor/v8/include/v8-microtask-queue.h +5 -0
- data/vendor/v8/include/v8-object.h +38 -3
- data/vendor/v8/include/v8-persistent-handle.h +25 -16
- data/vendor/v8/include/v8-platform.h +79 -10
- data/vendor/v8/include/v8-primitive.h +19 -12
- data/vendor/v8/include/v8-profiler.h +59 -31
- data/vendor/v8/include/v8-script.h +32 -5
- data/vendor/v8/include/v8-snapshot.h +4 -8
- data/vendor/v8/include/v8-template.h +3 -1
- data/vendor/v8/include/v8-traced-handle.h +22 -28
- data/vendor/v8/include/v8-util.h +9 -3
- data/vendor/v8/include/v8-value.h +31 -4
- data/vendor/v8/include/v8-version.h +4 -4
- data/vendor/v8/include/v8-wasm.h +2 -1
- data/vendor/v8/include/v8config.h +73 -2
- metadata +3 -3
@@ -33,8 +33,9 @@ class V8_EXPORT Platform {
|
|
33
33
|
virtual ~Platform() = default;
|
34
34
|
|
35
35
|
/**
|
36
|
-
*
|
37
|
-
* support structures.
|
36
|
+
* \returns the allocator used by cppgc to allocate its heap and various
|
37
|
+
* support structures. Returning nullptr results in using the `PageAllocator`
|
38
|
+
* provided by `cppgc::InitializeProcess()` instead.
|
38
39
|
*/
|
39
40
|
virtual PageAllocator* GetPageAllocator() = 0;
|
40
41
|
|
@@ -133,9 +134,10 @@ class V8_EXPORT Platform {
|
|
133
134
|
* Can be called multiple times when paired with `ShutdownProcess()`.
|
134
135
|
*
|
135
136
|
* \param page_allocator The allocator used for maintaining meta data. Must stay
|
136
|
-
* always alive and not change between multiple calls to InitializeProcess.
|
137
|
+
* always alive and not change between multiple calls to InitializeProcess. If
|
138
|
+
* no allocator is provided, a default internal version will be used.
|
137
139
|
*/
|
138
|
-
V8_EXPORT void InitializeProcess(PageAllocator* page_allocator);
|
140
|
+
V8_EXPORT void InitializeProcess(PageAllocator* page_allocator = nullptr);
|
139
141
|
|
140
142
|
/**
|
141
143
|
* Must be called after destroying the last used heap. Some process-global
|
@@ -16,7 +16,7 @@ class Visitor;
|
|
16
16
|
|
17
17
|
namespace internal {
|
18
18
|
template <typename T, typename WeaknessTag, typename WriteBarrierPolicy,
|
19
|
-
typename CheckingPolicy>
|
19
|
+
typename CheckingPolicy, typename StorageType>
|
20
20
|
class BasicMember;
|
21
21
|
struct DijkstraWriteBarrierPolicy;
|
22
22
|
struct NoWriteBarrierPolicy;
|
@@ -126,9 +126,10 @@ template <typename BasicMemberCandidate, typename WeaknessTag,
|
|
126
126
|
typename WriteBarrierPolicy>
|
127
127
|
struct IsSubclassOfBasicMemberTemplate {
|
128
128
|
private:
|
129
|
-
template <typename T, typename CheckingPolicy>
|
129
|
+
template <typename T, typename CheckingPolicy, typename StorageType>
|
130
130
|
static std::true_type SubclassCheck(
|
131
|
-
BasicMember<T, WeaknessTag, WriteBarrierPolicy, CheckingPolicy
|
131
|
+
BasicMember<T, WeaknessTag, WriteBarrierPolicy, CheckingPolicy,
|
132
|
+
StorageType>*);
|
132
133
|
static std::false_type SubclassCheck(...);
|
133
134
|
|
134
135
|
public:
|
@@ -99,6 +99,20 @@ class V8_EXPORT Visitor {
|
|
99
99
|
&HandleWeak<WeakMember<T>>, &weak_member);
|
100
100
|
}
|
101
101
|
|
102
|
+
#if defined(CPPGC_POINTER_COMPRESSION)
|
103
|
+
/**
|
104
|
+
* Trace method for UncompressedMember.
|
105
|
+
*
|
106
|
+
* \param member UncompressedMember reference retaining an object.
|
107
|
+
*/
|
108
|
+
template <typename T>
|
109
|
+
void Trace(const subtle::UncompressedMember<T>& member) {
|
110
|
+
const T* value = member.GetRawAtomic();
|
111
|
+
CPPGC_DCHECK(value != kSentinelPointer);
|
112
|
+
TraceImpl(value);
|
113
|
+
}
|
114
|
+
#endif // defined(CPPGC_POINTER_COMPRESSION)
|
115
|
+
|
102
116
|
/**
|
103
117
|
* Trace method for inlined objects that are not allocated themselves but
|
104
118
|
* otherwise follow managed heap layout and have a Trace() method.
|
@@ -229,7 +243,8 @@ class V8_EXPORT Visitor {
|
|
229
243
|
}
|
230
244
|
|
231
245
|
/**
|
232
|
-
* Trace method for retaining containers weakly.
|
246
|
+
* Trace method for retaining containers weakly. Note that weak containers
|
247
|
+
* should emit write barriers.
|
233
248
|
*
|
234
249
|
* \param object reference to the container.
|
235
250
|
* \param callback to be invoked.
|
@@ -282,12 +282,12 @@ class V8_PLATFORM_EXPORT TracingController
|
|
282
282
|
const char* name, uint64_t handle) override;
|
283
283
|
|
284
284
|
static const char* GetCategoryGroupName(const uint8_t* category_enabled_flag);
|
285
|
-
#endif // !defined(V8_USE_PERFETTO)
|
286
285
|
|
287
286
|
void AddTraceStateObserver(
|
288
287
|
v8::TracingController::TraceStateObserver* observer) override;
|
289
288
|
void RemoveTraceStateObserver(
|
290
289
|
v8::TracingController::TraceStateObserver* observer) override;
|
290
|
+
#endif // !defined(V8_USE_PERFETTO)
|
291
291
|
|
292
292
|
void StartTracing(TraceConfig* trace_config);
|
293
293
|
void StopTracing();
|
@@ -307,7 +307,6 @@ class V8_PLATFORM_EXPORT TracingController
|
|
307
307
|
std::unique_ptr<base::Mutex> mutex_;
|
308
308
|
std::unique_ptr<TraceConfig> trace_config_;
|
309
309
|
std::atomic_bool recording_{false};
|
310
|
-
std::unordered_set<v8::TracingController::TraceStateObserver*> observers_;
|
311
310
|
|
312
311
|
#if defined(V8_USE_PERFETTO)
|
313
312
|
std::ostream* output_stream_ = nullptr;
|
@@ -316,6 +315,7 @@ class V8_PLATFORM_EXPORT TracingController
|
|
316
315
|
TraceEventListener* listener_for_testing_ = nullptr;
|
317
316
|
std::unique_ptr<perfetto::TracingSession> tracing_session_;
|
318
317
|
#else // !defined(V8_USE_PERFETTO)
|
318
|
+
std::unordered_set<v8::TracingController::TraceStateObserver*> observers_;
|
319
319
|
std::unique_ptr<TraceBuffer> trace_buffer_;
|
320
320
|
#endif // !defined(V8_USE_PERFETTO)
|
321
321
|
|
@@ -53,12 +53,28 @@ class V8_EXPORT BackingStore : public v8::internal::BackingStoreBase {
|
|
53
53
|
*/
|
54
54
|
size_t ByteLength() const;
|
55
55
|
|
56
|
+
/**
|
57
|
+
* The maximum length (in bytes) that this backing store may grow to.
|
58
|
+
*
|
59
|
+
* If this backing store was created for a resizable ArrayBuffer or a growable
|
60
|
+
* SharedArrayBuffer, it is >= ByteLength(). Otherwise it is ==
|
61
|
+
* ByteLength().
|
62
|
+
*/
|
63
|
+
size_t MaxByteLength() const;
|
64
|
+
|
56
65
|
/**
|
57
66
|
* Indicates whether the backing store was created for an ArrayBuffer or
|
58
67
|
* a SharedArrayBuffer.
|
59
68
|
*/
|
60
69
|
bool IsShared() const;
|
61
70
|
|
71
|
+
/**
|
72
|
+
* Indicates whether the backing store was created for a resizable ArrayBuffer
|
73
|
+
* or a growable SharedArrayBuffer, and thus may be resized by user JavaScript
|
74
|
+
* code.
|
75
|
+
*/
|
76
|
+
bool IsResizableByUserJavaScript() const;
|
77
|
+
|
62
78
|
/**
|
63
79
|
* Prevent implicit instantiation of operator delete with size_t argument.
|
64
80
|
* The size_t argument would be incorrect because ptr points to the
|
@@ -189,6 +205,11 @@ class V8_EXPORT ArrayBuffer : public Object {
|
|
189
205
|
*/
|
190
206
|
size_t ByteLength() const;
|
191
207
|
|
208
|
+
/**
|
209
|
+
* Maximum length in bytes.
|
210
|
+
*/
|
211
|
+
size_t MaxByteLength() const;
|
212
|
+
|
192
213
|
/**
|
193
214
|
* Create a new ArrayBuffer. Allocate |byte_length| bytes.
|
194
215
|
* Allocated memory will be owned by a created ArrayBuffer and
|
@@ -235,6 +256,21 @@ class V8_EXPORT ArrayBuffer : public Object {
|
|
235
256
|
void* data, size_t byte_length, v8::BackingStore::DeleterCallback deleter,
|
236
257
|
void* deleter_data);
|
237
258
|
|
259
|
+
/**
|
260
|
+
* Returns a new resizable standalone BackingStore that is allocated using the
|
261
|
+
* array buffer allocator of the isolate. The result can be later passed to
|
262
|
+
* ArrayBuffer::New.
|
263
|
+
*
|
264
|
+
* |byte_length| must be <= |max_byte_length|.
|
265
|
+
*
|
266
|
+
* This function is usable without an isolate. Unlike |NewBackingStore| calls
|
267
|
+
* with an isolate, GCs cannot be triggered, and there are no
|
268
|
+
* retries. Allocation failure will cause the function to crash with an
|
269
|
+
* out-of-memory error.
|
270
|
+
*/
|
271
|
+
static std::unique_ptr<BackingStore> NewResizableBackingStore(
|
272
|
+
size_t byte_length, size_t max_byte_length);
|
273
|
+
|
238
274
|
/**
|
239
275
|
* Returns true if this ArrayBuffer may be detached.
|
240
276
|
*/
|
@@ -251,8 +287,26 @@ class V8_EXPORT ArrayBuffer : public Object {
|
|
251
287
|
* preventing JavaScript from ever accessing underlying backing store.
|
252
288
|
* ArrayBuffer should have been externalized and must be detachable.
|
253
289
|
*/
|
290
|
+
V8_DEPRECATE_SOON(
|
291
|
+
"Use the version which takes a key parameter (passing a null handle is "
|
292
|
+
"ok).")
|
254
293
|
void Detach();
|
255
294
|
|
295
|
+
/**
|
296
|
+
* Detaches this ArrayBuffer and all its views (typed arrays).
|
297
|
+
* Detaching sets the byte length of the buffer and all typed arrays to zero,
|
298
|
+
* preventing JavaScript from ever accessing underlying backing store.
|
299
|
+
* ArrayBuffer should have been externalized and must be detachable. Returns
|
300
|
+
* Nothing if the key didn't pass the [[ArrayBufferDetachKey]] check,
|
301
|
+
* Just(true) otherwise.
|
302
|
+
*/
|
303
|
+
V8_WARN_UNUSED_RESULT Maybe<bool> Detach(v8::Local<v8::Value> key);
|
304
|
+
|
305
|
+
/**
|
306
|
+
* Sets the ArrayBufferDetachKey.
|
307
|
+
*/
|
308
|
+
void SetDetachKey(v8::Local<v8::Value> key);
|
309
|
+
|
256
310
|
/**
|
257
311
|
* Get a shared pointer to the backing store of this array buffer. This
|
258
312
|
* pointer coordinates the lifetime management of the internal storage
|
@@ -374,6 +428,11 @@ class V8_EXPORT SharedArrayBuffer : public Object {
|
|
374
428
|
*/
|
375
429
|
size_t ByteLength() const;
|
376
430
|
|
431
|
+
/**
|
432
|
+
* Maximum length in bytes.
|
433
|
+
*/
|
434
|
+
size_t MaxByteLength() const;
|
435
|
+
|
377
436
|
/**
|
378
437
|
* Create a new SharedArrayBuffer. Allocate |byte_length| bytes.
|
379
438
|
* Allocated memory will be owned by a created SharedArrayBuffer and
|
@@ -7,6 +7,7 @@
|
|
7
7
|
|
8
8
|
#include <stddef.h>
|
9
9
|
|
10
|
+
#include <functional>
|
10
11
|
#include <string>
|
11
12
|
|
12
13
|
#include "cppgc/common.h"
|
@@ -237,7 +238,8 @@ using LogEventCallback = void (*)(const char* name,
|
|
237
238
|
enum class CrashKeyId {
|
238
239
|
kIsolateAddress,
|
239
240
|
kReadonlySpaceFirstPageAddress,
|
240
|
-
kMapSpaceFirstPageAddress,
|
241
|
+
kMapSpaceFirstPageAddress V8_ENUM_DEPRECATE_SOON("Map space got removed"),
|
242
|
+
kOldSpaceFirstPageAddress,
|
241
243
|
kCodeRangeBaseAddress,
|
242
244
|
kCodeSpaceFirstPageAddress,
|
243
245
|
kDumpType,
|
@@ -327,6 +329,10 @@ using WasmSimdEnabledCallback = bool (*)(Local<Context> context);
|
|
327
329
|
// --- Callback for checking if WebAssembly exceptions are enabled ---
|
328
330
|
using WasmExceptionsEnabledCallback = bool (*)(Local<Context> context);
|
329
331
|
|
332
|
+
// --- Callback for checking if WebAssembly GC is enabled ---
|
333
|
+
// If the callback returns true, it will also enable Wasm stringrefs.
|
334
|
+
using WasmGCEnabledCallback = bool (*)(Local<Context> context);
|
335
|
+
|
330
336
|
// --- Callback for checking if the SharedArrayBuffer constructor is enabled ---
|
331
337
|
using SharedArrayBufferConstructorEnabledCallback =
|
332
338
|
bool (*)(Local<Context> context);
|
@@ -367,6 +373,13 @@ using HostImportModuleDynamicallyCallback = MaybeLocal<Promise> (*)(
|
|
367
373
|
Local<Value> resource_name, Local<String> specifier,
|
368
374
|
Local<FixedArray> import_assertions);
|
369
375
|
|
376
|
+
/**
|
377
|
+
* Callback for requesting a compile hint for a function from the embedder. The
|
378
|
+
* first parameter is the position of the function in source code and the second
|
379
|
+
* parameter is embedder data to be passed back.
|
380
|
+
*/
|
381
|
+
using CompileHintCallback = bool (*)(int, void*);
|
382
|
+
|
370
383
|
/**
|
371
384
|
* HostInitializeImportMetaObjectCallback is called the first time import.meta
|
372
385
|
* is accessed for a module. Subsequent access will reuse the same value.
|
@@ -7,8 +7,11 @@
|
|
7
7
|
|
8
8
|
#include <stdint.h>
|
9
9
|
|
10
|
+
#include <vector>
|
11
|
+
|
10
12
|
#include "v8-data.h" // NOLINT(build/include_directory)
|
11
13
|
#include "v8-local-handle.h" // NOLINT(build/include_directory)
|
14
|
+
#include "v8-maybe.h" // NOLINT(build/include_directory)
|
12
15
|
#include "v8-snapshot.h" // NOLINT(build/include_directory)
|
13
16
|
#include "v8config.h" // NOLINT(build/include_directory)
|
14
17
|
|
@@ -163,12 +166,51 @@ class V8_EXPORT Context : public Data {
|
|
163
166
|
*/
|
164
167
|
void Exit();
|
165
168
|
|
169
|
+
/**
|
170
|
+
* Delegate to help with Deep freezing embedder-specific objects (such as
|
171
|
+
* JSApiObjects) that can not be frozen natively.
|
172
|
+
*/
|
173
|
+
class DeepFreezeDelegate {
|
174
|
+
public:
|
175
|
+
/**
|
176
|
+
* Performs embedder-specific operations to freeze the provided embedder
|
177
|
+
* object. The provided object *will* be frozen by DeepFreeze after this
|
178
|
+
* function returns, so only embedder-specific objects need to be frozen.
|
179
|
+
* This function *may not* create new JS objects or perform JS allocations.
|
180
|
+
* Any v8 objects reachable from the provided embedder object that should
|
181
|
+
* also be considered for freezing should be added to the children_out
|
182
|
+
* parameter. Returns true if the operation completed successfully.
|
183
|
+
*/
|
184
|
+
virtual bool FreezeEmbedderObjectAndGetChildren(
|
185
|
+
Local<Object> obj, std::vector<Local<Object>>& children_out) = 0;
|
186
|
+
};
|
187
|
+
|
188
|
+
/**
|
189
|
+
* Attempts to recursively freeze all objects reachable from this context.
|
190
|
+
* Some objects (generators, iterators, non-const closures) can not be frozen
|
191
|
+
* and will cause this method to throw an error. An optional delegate can be
|
192
|
+
* provided to help freeze embedder-specific objects.
|
193
|
+
*
|
194
|
+
* Freezing occurs in two steps:
|
195
|
+
* 1. "Marking" where we iterate through all objects reachable by this
|
196
|
+
* context, accumulating a list of objects that need to be frozen and
|
197
|
+
* looking for objects that can't be frozen. This step is separated because
|
198
|
+
* it is more efficient when we can assume there is no garbage collection.
|
199
|
+
* 2. "Freezing" where we go through the list of objects and freezing them.
|
200
|
+
* This effectively requires copying them so it may trigger garbage
|
201
|
+
* collection.
|
202
|
+
*/
|
203
|
+
Maybe<void> DeepFreeze(DeepFreezeDelegate* delegate = nullptr);
|
204
|
+
|
166
205
|
/** Returns the isolate associated with a current context. */
|
167
206
|
Isolate* GetIsolate();
|
168
207
|
|
169
208
|
/** Returns the microtask queue associated with a current context. */
|
170
209
|
MicrotaskQueue* GetMicrotaskQueue();
|
171
210
|
|
211
|
+
/** Sets the microtask queue associated with the current context. */
|
212
|
+
void SetMicrotaskQueue(MicrotaskQueue* queue);
|
213
|
+
|
172
214
|
/**
|
173
215
|
* The field at kDebugIdIndex used to be reserved for the inspector.
|
174
216
|
* It now serves no purpose.
|
@@ -362,13 +404,18 @@ Local<Value> Context::GetEmbedderData(int index) {
|
|
362
404
|
#ifdef V8_COMPRESS_POINTERS
|
363
405
|
// We read the full pointer value and then decompress it in order to avoid
|
364
406
|
// dealing with potential endiannes issues.
|
365
|
-
value =
|
366
|
-
I::DecompressTaggedAnyField(embedder_data, static_cast<uint32_t>(value));
|
407
|
+
value = I::DecompressTaggedField(embedder_data, static_cast<uint32_t>(value));
|
367
408
|
#endif
|
409
|
+
|
410
|
+
#ifdef V8_ENABLE_CONSERVATIVE_STACK_SCANNING
|
411
|
+
return Local<Value>(reinterpret_cast<Value*>(value));
|
412
|
+
#else
|
368
413
|
internal::Isolate* isolate = internal::IsolateFromNeverReadOnlySpaceObject(
|
369
414
|
*reinterpret_cast<A*>(this));
|
370
415
|
A* result = HandleScope::CreateHandle(isolate, value);
|
371
416
|
return Local<Value>(reinterpret_cast<Value*>(result));
|
417
|
+
#endif
|
418
|
+
|
372
419
|
#else
|
373
420
|
return SlowGetEmbedderData(index);
|
374
421
|
#endif
|
@@ -378,7 +425,7 @@ void* Context::GetAlignedPointerFromEmbedderData(int index) {
|
|
378
425
|
#if !defined(V8_ENABLE_CHECKS)
|
379
426
|
using A = internal::Address;
|
380
427
|
using I = internal::Internals;
|
381
|
-
A ctx =
|
428
|
+
A ctx = internal::ValueHelper::ValueAsAddress(this);
|
382
429
|
A embedder_data =
|
383
430
|
I::ReadTaggedPointerField(ctx, I::kNativeContextEmbedderDataOffset);
|
384
431
|
int value_offset = I::kEmbedderDataArrayHeaderSize +
|
@@ -77,6 +77,12 @@ struct WrapperDescriptor final {
|
|
77
77
|
};
|
78
78
|
|
79
79
|
struct V8_EXPORT CppHeapCreateParams {
|
80
|
+
CppHeapCreateParams(
|
81
|
+
std::vector<std::unique_ptr<cppgc::CustomSpaceBase>> custom_spaces,
|
82
|
+
WrapperDescriptor wrapper_descriptor)
|
83
|
+
: custom_spaces(std::move(custom_spaces)),
|
84
|
+
wrapper_descriptor(wrapper_descriptor) {}
|
85
|
+
|
80
86
|
CppHeapCreateParams(const CppHeapCreateParams&) = delete;
|
81
87
|
CppHeapCreateParams& operator=(const CppHeapCreateParams&) = delete;
|
82
88
|
|
@@ -98,6 +104,10 @@ struct V8_EXPORT CppHeapCreateParams {
|
|
98
104
|
|
99
105
|
/**
|
100
106
|
* A heap for allocating managed C++ objects.
|
107
|
+
*
|
108
|
+
* Similar to v8::Isolate, the heap may only be accessed from one thread at a
|
109
|
+
* time. The heap may be used from different threads using the
|
110
|
+
* v8::Locker/v8::Unlocker APIs which is different from generic Oilpan.
|
101
111
|
*/
|
102
112
|
class V8_EXPORT CppHeap {
|
103
113
|
public:
|
@@ -167,6 +177,11 @@ class V8_EXPORT CppHeap {
|
|
167
177
|
void CollectGarbageInYoungGenerationForTesting(
|
168
178
|
cppgc::EmbedderStackState stack_state);
|
169
179
|
|
180
|
+
/**
|
181
|
+
* \returns the wrapper descriptor of this CppHeap.
|
182
|
+
*/
|
183
|
+
v8::WrapperDescriptor wrapper_descriptor() const;
|
184
|
+
|
170
185
|
private:
|
171
186
|
CppHeap() = default;
|
172
187
|
|
data/vendor/v8/include/v8-data.h
CHANGED
@@ -5,27 +5,14 @@
|
|
5
5
|
#ifndef INCLUDE_V8_EMBEDDER_HEAP_H_
|
6
6
|
#define INCLUDE_V8_EMBEDDER_HEAP_H_
|
7
7
|
|
8
|
-
#include <stddef.h>
|
9
|
-
#include <stdint.h>
|
10
|
-
|
11
|
-
#include <utility>
|
12
|
-
#include <vector>
|
13
|
-
|
14
|
-
#include "cppgc/common.h"
|
15
|
-
#include "v8-local-handle.h" // NOLINT(build/include_directory)
|
16
8
|
#include "v8-traced-handle.h" // NOLINT(build/include_directory)
|
17
9
|
#include "v8config.h" // NOLINT(build/include_directory)
|
18
10
|
|
19
11
|
namespace v8 {
|
20
12
|
|
21
|
-
class Data;
|
22
13
|
class Isolate;
|
23
14
|
class Value;
|
24
15
|
|
25
|
-
namespace internal {
|
26
|
-
class LocalEmbedderHeapTracer;
|
27
|
-
} // namespace internal
|
28
|
-
|
29
16
|
/**
|
30
17
|
* Handler for embedder roots on non-unified heap garbage collections.
|
31
18
|
*/
|
@@ -62,162 +49,6 @@ class V8_EXPORT EmbedderRootsHandler {
|
|
62
49
|
virtual void ResetRoot(const v8::TracedReference<v8::Value>& handle) = 0;
|
63
50
|
};
|
64
51
|
|
65
|
-
/**
|
66
|
-
* Interface for tracing through the embedder heap. During a V8 garbage
|
67
|
-
* collection, V8 collects hidden fields of all potential wrappers, and at the
|
68
|
-
* end of its marking phase iterates the collection and asks the embedder to
|
69
|
-
* trace through its heap and use reporter to report each JavaScript object
|
70
|
-
* reachable from any of the given wrappers.
|
71
|
-
*/
|
72
|
-
class V8_EXPORT
|
73
|
-
// GCC doesn't like combining __attribute__(()) with [[deprecated]].
|
74
|
-
#ifdef __clang__
|
75
|
-
V8_DEPRECATE_SOON("Use CppHeap when working with v8::TracedReference.")
|
76
|
-
#endif // __clang__
|
77
|
-
EmbedderHeapTracer {
|
78
|
-
public:
|
79
|
-
using EmbedderStackState = cppgc::EmbedderStackState;
|
80
|
-
|
81
|
-
enum TraceFlags : uint64_t {
|
82
|
-
kNoFlags = 0,
|
83
|
-
kReduceMemory = 1 << 0,
|
84
|
-
kForced = 1 << 2,
|
85
|
-
};
|
86
|
-
|
87
|
-
/**
|
88
|
-
* Interface for iterating through |TracedReference| handles.
|
89
|
-
*/
|
90
|
-
class V8_EXPORT TracedGlobalHandleVisitor {
|
91
|
-
public:
|
92
|
-
virtual ~TracedGlobalHandleVisitor() = default;
|
93
|
-
virtual void VisitTracedReference(const TracedReference<Value>& handle) {}
|
94
|
-
};
|
95
|
-
|
96
|
-
/**
|
97
|
-
* Summary of a garbage collection cycle. See |TraceEpilogue| on how the
|
98
|
-
* summary is reported.
|
99
|
-
*/
|
100
|
-
struct TraceSummary {
|
101
|
-
/**
|
102
|
-
* Time spent managing the retained memory in milliseconds. This can e.g.
|
103
|
-
* include the time tracing through objects in the embedder.
|
104
|
-
*/
|
105
|
-
double time = 0.0;
|
106
|
-
|
107
|
-
/**
|
108
|
-
* Memory retained by the embedder through the |EmbedderHeapTracer|
|
109
|
-
* mechanism in bytes.
|
110
|
-
*/
|
111
|
-
size_t allocated_size = 0;
|
112
|
-
};
|
113
|
-
|
114
|
-
virtual ~EmbedderHeapTracer() = default;
|
115
|
-
|
116
|
-
/**
|
117
|
-
* Iterates all |TracedReference| handles created for the |v8::Isolate| the
|
118
|
-
* tracer is attached to.
|
119
|
-
*/
|
120
|
-
void IterateTracedGlobalHandles(TracedGlobalHandleVisitor* visitor);
|
121
|
-
|
122
|
-
/**
|
123
|
-
* Called by the embedder to set the start of the stack which is e.g. used by
|
124
|
-
* V8 to determine whether handles are used from stack or heap.
|
125
|
-
*/
|
126
|
-
void SetStackStart(void* stack_start);
|
127
|
-
|
128
|
-
/**
|
129
|
-
* Called by v8 to register internal fields of found wrappers.
|
130
|
-
*
|
131
|
-
* The embedder is expected to store them somewhere and trace reachable
|
132
|
-
* wrappers from them when called through |AdvanceTracing|.
|
133
|
-
*/
|
134
|
-
virtual void RegisterV8References(
|
135
|
-
const std::vector<std::pair<void*, void*>>& embedder_fields) = 0;
|
136
|
-
|
137
|
-
void RegisterEmbedderReference(const BasicTracedReference<v8::Data>& ref);
|
138
|
-
|
139
|
-
/**
|
140
|
-
* Called at the beginning of a GC cycle.
|
141
|
-
*/
|
142
|
-
virtual void TracePrologue(TraceFlags flags) {}
|
143
|
-
|
144
|
-
/**
|
145
|
-
* Called to advance tracing in the embedder.
|
146
|
-
*
|
147
|
-
* The embedder is expected to trace its heap starting from wrappers reported
|
148
|
-
* by RegisterV8References method, and report back all reachable wrappers.
|
149
|
-
* Furthermore, the embedder is expected to stop tracing by the given
|
150
|
-
* deadline. A deadline of infinity means that tracing should be finished.
|
151
|
-
*
|
152
|
-
* Returns |true| if tracing is done, and false otherwise.
|
153
|
-
*/
|
154
|
-
virtual bool AdvanceTracing(double deadline_in_ms) = 0;
|
155
|
-
|
156
|
-
/*
|
157
|
-
* Returns true if there no more tracing work to be done (see AdvanceTracing)
|
158
|
-
* and false otherwise.
|
159
|
-
*/
|
160
|
-
virtual bool IsTracingDone() = 0;
|
161
|
-
|
162
|
-
/**
|
163
|
-
* Called at the end of a GC cycle.
|
164
|
-
*
|
165
|
-
* Note that allocation is *not* allowed within |TraceEpilogue|. Can be
|
166
|
-
* overriden to fill a |TraceSummary| that is used by V8 to schedule future
|
167
|
-
* garbage collections.
|
168
|
-
*/
|
169
|
-
virtual void TraceEpilogue(TraceSummary* trace_summary) {}
|
170
|
-
|
171
|
-
/**
|
172
|
-
* Called upon entering the final marking pause. No more incremental marking
|
173
|
-
* steps will follow this call.
|
174
|
-
*/
|
175
|
-
virtual void EnterFinalPause(EmbedderStackState stack_state) = 0;
|
176
|
-
|
177
|
-
/*
|
178
|
-
* Called by the embedder to request immediate finalization of the currently
|
179
|
-
* running tracing phase that has been started with TracePrologue and not
|
180
|
-
* yet finished with TraceEpilogue.
|
181
|
-
*
|
182
|
-
* Will be a noop when currently not in tracing.
|
183
|
-
*
|
184
|
-
* This is an experimental feature.
|
185
|
-
*/
|
186
|
-
void FinalizeTracing();
|
187
|
-
|
188
|
-
/**
|
189
|
-
* See documentation on EmbedderRootsHandler.
|
190
|
-
*/
|
191
|
-
virtual bool IsRootForNonTracingGC(
|
192
|
-
const v8::TracedReference<v8::Value>& handle);
|
193
|
-
|
194
|
-
/**
|
195
|
-
* See documentation on EmbedderRootsHandler.
|
196
|
-
*/
|
197
|
-
virtual void ResetHandleInNonTracingGC(
|
198
|
-
const v8::TracedReference<v8::Value>& handle);
|
199
|
-
|
200
|
-
/*
|
201
|
-
* Called by the embedder to signal newly allocated or freed memory. Not bound
|
202
|
-
* to tracing phases. Embedders should trade off when increments are reported
|
203
|
-
* as V8 may consult global heuristics on whether to trigger garbage
|
204
|
-
* collection on this change.
|
205
|
-
*/
|
206
|
-
void IncreaseAllocatedSize(size_t bytes);
|
207
|
-
void DecreaseAllocatedSize(size_t bytes);
|
208
|
-
|
209
|
-
/*
|
210
|
-
* Returns the v8::Isolate this tracer is attached too and |nullptr| if it
|
211
|
-
* is not attached to any v8::Isolate.
|
212
|
-
*/
|
213
|
-
v8::Isolate* isolate() const { return v8_isolate_; }
|
214
|
-
|
215
|
-
protected:
|
216
|
-
v8::Isolate* v8_isolate_ = nullptr;
|
217
|
-
|
218
|
-
friend class internal::LocalEmbedderHeapTracer;
|
219
|
-
};
|
220
|
-
|
221
52
|
} // namespace v8
|
222
53
|
|
223
54
|
#endif // INCLUDE_V8_EMBEDDER_HEAP_H_
|
@@ -247,6 +247,7 @@ class CTypeInfo {
|
|
247
247
|
kUint64,
|
248
248
|
kFloat32,
|
249
249
|
kFloat64,
|
250
|
+
kPointer,
|
250
251
|
kV8Value,
|
251
252
|
kSeqOneByteString,
|
252
253
|
kApiObject, // This will be deprecated once all users have
|
@@ -435,6 +436,7 @@ struct AnyCType {
|
|
435
436
|
uint64_t uint64_value;
|
436
437
|
float float_value;
|
437
438
|
double double_value;
|
439
|
+
void* pointer_value;
|
438
440
|
Local<Object> object_value;
|
439
441
|
Local<Array> sequence_value;
|
440
442
|
const FastApiTypedArray<uint8_t>* uint8_ta_value;
|
@@ -620,6 +622,7 @@ class CFunctionInfoImpl : public CFunctionInfo {
|
|
620
622
|
kReturnType == CTypeInfo::Type::kUint32 ||
|
621
623
|
kReturnType == CTypeInfo::Type::kFloat32 ||
|
622
624
|
kReturnType == CTypeInfo::Type::kFloat64 ||
|
625
|
+
kReturnType == CTypeInfo::Type::kPointer ||
|
623
626
|
kReturnType == CTypeInfo::Type::kAny,
|
624
627
|
"64-bit int, string and api object values are not currently "
|
625
628
|
"supported return types.");
|
@@ -658,13 +661,14 @@ struct CTypeInfoTraits {};
|
|
658
661
|
|
659
662
|
#define PRIMITIVE_C_TYPES(V) \
|
660
663
|
V(bool, kBool) \
|
664
|
+
V(uint8_t, kUint8) \
|
661
665
|
V(int32_t, kInt32) \
|
662
666
|
V(uint32_t, kUint32) \
|
663
667
|
V(int64_t, kInt64) \
|
664
668
|
V(uint64_t, kUint64) \
|
665
669
|
V(float, kFloat32) \
|
666
670
|
V(double, kFloat64) \
|
667
|
-
V(
|
671
|
+
V(void*, kPointer)
|
668
672
|
|
669
673
|
// Same as above, but includes deprecated types for compatibility.
|
670
674
|
#define ALL_C_TYPES(V) \
|
@@ -698,13 +702,13 @@ PRIMITIVE_C_TYPES(DEFINE_TYPE_INFO_TRAITS)
|
|
698
702
|
};
|
699
703
|
|
700
704
|
#define TYPED_ARRAY_C_TYPES(V) \
|
705
|
+
V(uint8_t, kUint8) \
|
701
706
|
V(int32_t, kInt32) \
|
702
707
|
V(uint32_t, kUint32) \
|
703
708
|
V(int64_t, kInt64) \
|
704
709
|
V(uint64_t, kUint64) \
|
705
710
|
V(float, kFloat32) \
|
706
|
-
V(double, kFloat64)
|
707
|
-
V(uint8_t, kUint8)
|
711
|
+
V(double, kFloat64)
|
708
712
|
|
709
713
|
TYPED_ARRAY_C_TYPES(SPECIALIZE_GET_TYPE_INFO_HELPER_FOR_TA)
|
710
714
|
|