libv8-node 15.14.0.1-aarch64-linux → 16.10.0.0-aarch64-linux
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/ext/libv8-node/location.rb +1 -1
- data/ext/libv8-node/paths.rb +1 -1
- data/lib/libv8/node/version.rb +3 -3
- data/vendor/v8/{out.gn → aarch64-linux}/libv8/obj/libv8_monolith.a +0 -0
- data/vendor/v8/include/cppgc/allocation.h +104 -45
- data/vendor/v8/include/cppgc/common.h +9 -6
- data/vendor/v8/include/cppgc/cross-thread-persistent.h +384 -0
- data/vendor/v8/include/cppgc/custom-space.h +37 -2
- data/vendor/v8/include/cppgc/default-platform.h +47 -48
- data/vendor/v8/include/cppgc/ephemeron-pair.h +30 -0
- data/vendor/v8/include/cppgc/explicit-management.h +82 -0
- data/vendor/v8/include/cppgc/garbage-collected.h +4 -3
- data/vendor/v8/include/cppgc/heap-consistency.h +236 -0
- data/vendor/v8/include/cppgc/heap-state.h +70 -0
- data/vendor/v8/include/cppgc/heap-statistics.h +120 -0
- data/vendor/v8/include/cppgc/heap.h +68 -6
- data/vendor/v8/include/cppgc/internal/api-constants.h +3 -3
- data/vendor/v8/include/cppgc/internal/caged-heap-local-data.h +2 -1
- data/vendor/v8/include/cppgc/internal/compiler-specific.h +2 -2
- data/vendor/v8/include/cppgc/internal/gc-info.h +44 -13
- data/vendor/v8/include/cppgc/internal/name-trait.h +111 -0
- data/vendor/v8/include/cppgc/internal/persistent-node.h +57 -1
- data/vendor/v8/include/cppgc/internal/pointer-policies.h +69 -28
- data/vendor/v8/include/cppgc/internal/prefinalizer-handler.h +1 -1
- data/vendor/v8/include/cppgc/internal/write-barrier.h +353 -35
- data/vendor/v8/include/cppgc/liveness-broker.h +7 -1
- data/vendor/v8/include/cppgc/macros.h +2 -0
- data/vendor/v8/include/cppgc/member.h +85 -25
- data/vendor/v8/include/cppgc/name-provider.h +65 -0
- data/vendor/v8/include/cppgc/object-size-trait.h +58 -0
- data/vendor/v8/include/cppgc/persistent.h +33 -9
- data/vendor/v8/include/cppgc/platform.h +48 -25
- data/vendor/v8/include/cppgc/prefinalizer.h +1 -1
- data/vendor/v8/include/cppgc/process-heap-statistics.h +36 -0
- data/vendor/v8/include/cppgc/sentinel-pointer.h +32 -0
- data/vendor/v8/include/cppgc/source-location.h +2 -1
- data/vendor/v8/include/cppgc/testing.h +99 -0
- data/vendor/v8/include/cppgc/trace-trait.h +8 -3
- data/vendor/v8/include/cppgc/type-traits.h +157 -19
- data/vendor/v8/include/cppgc/visitor.h +187 -23
- data/vendor/v8/include/libplatform/libplatform.h +11 -0
- data/vendor/v8/include/libplatform/v8-tracing.h +2 -0
- data/vendor/v8/include/v8-cppgc.h +258 -159
- data/vendor/v8/include/v8-fast-api-calls.h +562 -159
- data/vendor/v8/include/v8-inspector.h +23 -2
- data/vendor/v8/include/v8-internal.h +99 -27
- data/vendor/v8/include/v8-metrics.h +77 -8
- data/vendor/v8/include/v8-platform.h +47 -22
- data/vendor/v8/include/v8-profiler.h +75 -11
- data/vendor/v8/include/v8-unwinder-state.h +30 -0
- data/vendor/v8/include/v8-util.h +1 -1
- data/vendor/v8/include/v8-version.h +4 -4
- data/vendor/v8/include/v8.h +1192 -642
- data/vendor/v8/include/v8config.h +40 -9
- metadata +17 -5
- data/vendor/v8/include/cppgc/internal/process-heap.h +0 -34
@@ -105,7 +105,9 @@ class V8_EXPORT V8StackTrace {
|
|
105
105
|
virtual StringView topSourceURL() const = 0;
|
106
106
|
virtual int topLineNumber() const = 0;
|
107
107
|
virtual int topColumnNumber() const = 0;
|
108
|
-
virtual
|
108
|
+
virtual int topScriptId() const = 0;
|
109
|
+
V8_DEPRECATE_SOON("Use V8::StackTrace::topScriptId() instead.")
|
110
|
+
int topScriptIdAsInteger() const { return topScriptId(); }
|
109
111
|
virtual StringView topFunctionName() const = 0;
|
110
112
|
|
111
113
|
virtual ~V8StackTrace() = default;
|
@@ -129,6 +131,10 @@ class V8_EXPORT V8InspectorSession {
|
|
129
131
|
virtual v8::Local<v8::Value> get(v8::Local<v8::Context>) = 0;
|
130
132
|
virtual ~Inspectable() = default;
|
131
133
|
};
|
134
|
+
class V8_EXPORT CommandLineAPIScope {
|
135
|
+
public:
|
136
|
+
virtual ~CommandLineAPIScope() = default;
|
137
|
+
};
|
132
138
|
virtual void addInspectedObject(std::unique_ptr<Inspectable>) = 0;
|
133
139
|
|
134
140
|
// Dispatching protocol messages.
|
@@ -138,6 +144,9 @@ class V8_EXPORT V8InspectorSession {
|
|
138
144
|
virtual std::vector<std::unique_ptr<protocol::Schema::API::Domain>>
|
139
145
|
supportedDomains() = 0;
|
140
146
|
|
147
|
+
virtual std::unique_ptr<V8InspectorSession::CommandLineAPIScope>
|
148
|
+
initializeCommandLineAPIScope(int executionContextId) = 0;
|
149
|
+
|
141
150
|
// Debugger actions.
|
142
151
|
virtual void schedulePauseOnNextStatement(StringView breakReason,
|
143
152
|
StringView breakDetails) = 0;
|
@@ -161,7 +170,7 @@ class V8_EXPORT V8InspectorSession {
|
|
161
170
|
v8::Local<v8::Context>*,
|
162
171
|
std::unique_ptr<StringBuffer>* objectGroup) = 0;
|
163
172
|
virtual void releaseObjectGroup(StringView) = 0;
|
164
|
-
virtual void triggerPreciseCoverageDeltaUpdate(StringView
|
173
|
+
virtual void triggerPreciseCoverageDeltaUpdate(StringView occasion) = 0;
|
165
174
|
};
|
166
175
|
|
167
176
|
class V8_EXPORT V8InspectorClient {
|
@@ -181,6 +190,10 @@ class V8_EXPORT V8InspectorClient {
|
|
181
190
|
virtual std::unique_ptr<StringBuffer> valueSubtype(v8::Local<v8::Value>) {
|
182
191
|
return nullptr;
|
183
192
|
}
|
193
|
+
virtual std::unique_ptr<StringBuffer> descriptionForValueSubtype(
|
194
|
+
v8::Local<v8::Context>, v8::Local<v8::Value>) {
|
195
|
+
return nullptr;
|
196
|
+
}
|
184
197
|
virtual bool formatAccessorsAsProperties(v8::Local<v8::Value>) {
|
185
198
|
return false;
|
186
199
|
}
|
@@ -224,6 +237,10 @@ class V8_EXPORT V8InspectorClient {
|
|
224
237
|
const StringView& resourceName) {
|
225
238
|
return nullptr;
|
226
239
|
}
|
240
|
+
|
241
|
+
// The caller would defer to generating a random 64 bit integer if
|
242
|
+
// this method returns 0.
|
243
|
+
virtual int64_t generateUniqueId() { return 0; }
|
227
244
|
};
|
228
245
|
|
229
246
|
// These stack trace ids are intended to be passed between debuggers and be
|
@@ -284,6 +301,10 @@ class V8_EXPORT V8Inspector {
|
|
284
301
|
int scriptId) = 0;
|
285
302
|
virtual void exceptionRevoked(v8::Local<v8::Context>, unsigned exceptionId,
|
286
303
|
StringView message) = 0;
|
304
|
+
virtual bool associateExceptionData(v8::Local<v8::Context>,
|
305
|
+
v8::Local<v8::Value> exception,
|
306
|
+
v8::Local<v8::Name> key,
|
307
|
+
v8::Local<v8::Value> value) = 0;
|
287
308
|
|
288
309
|
// Connection.
|
289
310
|
class V8_EXPORT Channel {
|
@@ -40,6 +40,13 @@ const int kWeakHeapObjectTag = 3;
|
|
40
40
|
const int kHeapObjectTagSize = 2;
|
41
41
|
const intptr_t kHeapObjectTagMask = (1 << kHeapObjectTagSize) - 1;
|
42
42
|
|
43
|
+
// Tag information for fowarding pointers stored in object headers.
|
44
|
+
// 0b00 at the lowest 2 bits in the header indicates that the map word is a
|
45
|
+
// forwarding pointer.
|
46
|
+
const int kForwardingTag = 0;
|
47
|
+
const int kForwardingTagSize = 2;
|
48
|
+
const intptr_t kForwardingTagMask = (1 << kForwardingTagSize) - 1;
|
49
|
+
|
43
50
|
// Tag information for Smi.
|
44
51
|
const int kSmiTag = 0;
|
45
52
|
const int kSmiTagSize = 1;
|
@@ -120,6 +127,29 @@ constexpr bool HeapSandboxIsEnabled() {
|
|
120
127
|
|
121
128
|
using ExternalPointer_t = Address;
|
122
129
|
|
130
|
+
// If the heap sandbox is enabled, these tag values will be ORed with the
|
131
|
+
// external pointers in the external pointer table to prevent use of pointers of
|
132
|
+
// the wrong type. When a pointer is loaded, it is ANDed with the inverse of the
|
133
|
+
// expected type's tag. The tags are constructed in a way that guarantees that a
|
134
|
+
// failed type check will result in one or more of the top bits of the pointer
|
135
|
+
// to be set, rendering the pointer inacessible. This construction allows
|
136
|
+
// performing the type check and removing GC marking bits from the pointer at
|
137
|
+
// the same time.
|
138
|
+
enum ExternalPointerTag : uint64_t {
|
139
|
+
kExternalPointerNullTag = 0x0000000000000000,
|
140
|
+
kArrayBufferBackingStoreTag = 0x00ff000000000000, // 0b000000011111111
|
141
|
+
kTypedArrayExternalPointerTag = 0x017f000000000000, // 0b000000101111111
|
142
|
+
kDataViewDataPointerTag = 0x01bf000000000000, // 0b000000110111111
|
143
|
+
kExternalStringResourceTag = 0x01df000000000000, // 0b000000111011111
|
144
|
+
kExternalStringResourceDataTag = 0x01ef000000000000, // 0b000000111101111
|
145
|
+
kForeignForeignAddressTag = 0x01f7000000000000, // 0b000000111110111
|
146
|
+
kNativeContextMicrotaskQueueTag = 0x01fb000000000000, // 0b000000111111011
|
147
|
+
kEmbedderDataSlotPayloadTag = 0x01fd000000000000, // 0b000000111111101
|
148
|
+
kCodeEntryPointTag = 0x01fe000000000000, // 0b000000111111110
|
149
|
+
};
|
150
|
+
|
151
|
+
constexpr uint64_t kExternalPointerTagMask = 0xffff000000000000;
|
152
|
+
|
123
153
|
#ifdef V8_31BIT_SMIS_ON_64BIT_ARCH
|
124
154
|
using PlatformSmiTagging = SmiTagging<kApiInt32Size>;
|
125
155
|
#else
|
@@ -140,6 +170,11 @@ V8_INLINE static constexpr internal::Address IntToSmi(int value) {
|
|
140
170
|
kSmiTag;
|
141
171
|
}
|
142
172
|
|
173
|
+
// Converts encoded external pointer to address.
|
174
|
+
V8_EXPORT Address DecodeExternalPointerImpl(const Isolate* isolate,
|
175
|
+
ExternalPointer_t pointer,
|
176
|
+
ExternalPointerTag tag);
|
177
|
+
|
143
178
|
// {obj} must be the raw tagged pointer representation of a HeapObject
|
144
179
|
// that's guaranteed to never be in ReadOnlySpace.
|
145
180
|
V8_EXPORT internal::Isolate* IsolateFromNeverReadOnlySpaceObject(Address obj);
|
@@ -155,6 +190,14 @@ V8_EXPORT bool ShouldThrowOnError(v8::internal::Isolate* isolate);
|
|
155
190
|
* depend on functions and constants defined here.
|
156
191
|
*/
|
157
192
|
class Internals {
|
193
|
+
#ifdef V8_MAP_PACKING
|
194
|
+
V8_INLINE static constexpr internal::Address UnpackMapWord(
|
195
|
+
internal::Address mapword) {
|
196
|
+
// TODO(wenyuzhao): Clear header metadata.
|
197
|
+
return mapword ^ kMapWordXorMask;
|
198
|
+
}
|
199
|
+
#endif
|
200
|
+
|
158
201
|
public:
|
159
202
|
// These values match non-compiler-dependent values defined within
|
160
203
|
// the implementation of v8.
|
@@ -168,6 +211,9 @@ class Internals {
|
|
168
211
|
static const int kFixedArrayHeaderSize = 2 * kApiTaggedSize;
|
169
212
|
static const int kEmbedderDataArrayHeaderSize = 2 * kApiTaggedSize;
|
170
213
|
static const int kEmbedderDataSlotSize = kApiSystemPointerSize;
|
214
|
+
#ifdef V8_HEAP_SANDBOX
|
215
|
+
static const int kEmbedderDataSlotRawPayloadOffset = kApiTaggedSize;
|
216
|
+
#endif
|
171
217
|
static const int kNativeContextEmbedderDataOffset = 6 * kApiTaggedSize;
|
172
218
|
static const int kFullStringRepresentationMask = 0x0f;
|
173
219
|
static const int kStringEncodingMask = 0x8;
|
@@ -182,11 +228,19 @@ class Internals {
|
|
182
228
|
kNumIsolateDataSlots * kApiSystemPointerSize;
|
183
229
|
static const int kIsolateFastCCallCallerPcOffset =
|
184
230
|
kIsolateFastCCallCallerFpOffset + kApiSystemPointerSize;
|
185
|
-
static const int
|
231
|
+
static const int kIsolateFastApiCallTargetOffset =
|
186
232
|
kIsolateFastCCallCallerPcOffset + kApiSystemPointerSize;
|
233
|
+
static const int kIsolateStackGuardOffset =
|
234
|
+
kIsolateFastApiCallTargetOffset + kApiSystemPointerSize;
|
187
235
|
static const int kIsolateRootsOffset =
|
188
236
|
kIsolateStackGuardOffset + 7 * kApiSystemPointerSize;
|
189
237
|
|
238
|
+
static const int kExternalPointerTableBufferOffset = 0;
|
239
|
+
static const int kExternalPointerTableLengthOffset =
|
240
|
+
kExternalPointerTableBufferOffset + kApiSystemPointerSize;
|
241
|
+
static const int kExternalPointerTableCapacityOffset =
|
242
|
+
kExternalPointerTableLengthOffset + kApiInt32Size;
|
243
|
+
|
190
244
|
static const int kUndefinedValueRootIndex = 4;
|
191
245
|
static const int kTheHoleValueRootIndex = 5;
|
192
246
|
static const int kNullValueRootIndex = 6;
|
@@ -220,6 +274,17 @@ class Internals {
|
|
220
274
|
// incremental GC once the external memory reaches this limit.
|
221
275
|
static constexpr int kExternalAllocationSoftLimit = 64 * 1024 * 1024;
|
222
276
|
|
277
|
+
#ifdef V8_MAP_PACKING
|
278
|
+
static const uintptr_t kMapWordMetadataMask = 0xffffULL << 48;
|
279
|
+
// The lowest two bits of mapwords are always `0b10`
|
280
|
+
static const uintptr_t kMapWordSignature = 0b10;
|
281
|
+
// XORing a (non-compressed) map with this mask ensures that the two
|
282
|
+
// low-order bits are 0b10. The 0 at the end makes this look like a Smi,
|
283
|
+
// although real Smis have all lower 32 bits unset. We only rely on these
|
284
|
+
// values passing as Smis in very few places.
|
285
|
+
static const int kMapWordXorMask = 0b11;
|
286
|
+
#endif
|
287
|
+
|
223
288
|
V8_EXPORT static void CheckInitializedImpl(v8::Isolate* isolate);
|
224
289
|
V8_INLINE static void CheckInitialized(v8::Isolate* isolate) {
|
225
290
|
#ifdef V8_ENABLE_CHECKS
|
@@ -246,6 +311,9 @@ class Internals {
|
|
246
311
|
V8_INLINE static int GetInstanceType(const internal::Address obj) {
|
247
312
|
typedef internal::Address A;
|
248
313
|
A map = ReadTaggedPointerField(obj, kHeapObjectMapOffset);
|
314
|
+
#ifdef V8_MAP_PACKING
|
315
|
+
map = UnpackMapWord(map);
|
316
|
+
#endif
|
249
317
|
return ReadRawField<uint16_t>(map, kMapInstanceTypeOffset);
|
250
318
|
}
|
251
319
|
|
@@ -325,8 +393,9 @@ class Internals {
|
|
325
393
|
internal::Address heap_object_ptr, int offset) {
|
326
394
|
#ifdef V8_COMPRESS_POINTERS
|
327
395
|
uint32_t value = ReadRawField<uint32_t>(heap_object_ptr, offset);
|
328
|
-
internal::Address
|
329
|
-
|
396
|
+
internal::Address base =
|
397
|
+
GetPtrComprCageBaseFromOnHeapAddress(heap_object_ptr);
|
398
|
+
return base + static_cast<internal::Address>(static_cast<uintptr_t>(value));
|
330
399
|
#else
|
331
400
|
return ReadRawField<internal::Address>(heap_object_ptr, offset);
|
332
401
|
#endif
|
@@ -352,45 +421,47 @@ class Internals {
|
|
352
421
|
#endif
|
353
422
|
}
|
354
423
|
|
424
|
+
V8_INLINE static Address DecodeExternalPointer(
|
425
|
+
const Isolate* isolate, ExternalPointer_t encoded_pointer,
|
426
|
+
ExternalPointerTag tag) {
|
427
|
+
#ifdef V8_HEAP_SANDBOX
|
428
|
+
return internal::DecodeExternalPointerImpl(isolate, encoded_pointer, tag);
|
429
|
+
#else
|
430
|
+
return encoded_pointer;
|
431
|
+
#endif
|
432
|
+
}
|
433
|
+
|
355
434
|
V8_INLINE static internal::Address ReadExternalPointerField(
|
356
|
-
internal::Isolate* isolate, internal::Address heap_object_ptr,
|
357
|
-
|
358
|
-
internal::Address value = ReadRawField<Address>(heap_object_ptr, offset);
|
435
|
+
internal::Isolate* isolate, internal::Address heap_object_ptr, int offset,
|
436
|
+
ExternalPointerTag tag) {
|
359
437
|
#ifdef V8_HEAP_SANDBOX
|
438
|
+
internal::ExternalPointer_t encoded_value =
|
439
|
+
ReadRawField<uint32_t>(heap_object_ptr, offset);
|
360
440
|
// We currently have to treat zero as nullptr in embedder slots.
|
361
|
-
|
441
|
+
return encoded_value ? DecodeExternalPointer(isolate, encoded_value, tag)
|
442
|
+
: 0;
|
443
|
+
#else
|
444
|
+
return ReadRawField<Address>(heap_object_ptr, offset);
|
362
445
|
#endif
|
363
|
-
return value;
|
364
446
|
}
|
365
447
|
|
366
448
|
#ifdef V8_COMPRESS_POINTERS
|
367
449
|
// See v8:7703 or src/ptr-compr.* for details about pointer compression.
|
368
|
-
static constexpr size_t
|
369
|
-
static constexpr size_t
|
370
|
-
|
371
|
-
// See v8:10391 for details about V8 heap sandbox.
|
372
|
-
static constexpr uint32_t kExternalPointerSalt =
|
373
|
-
0x7fffffff & ~static_cast<uint32_t>(kHeapObjectTagMask);
|
450
|
+
static constexpr size_t kPtrComprCageReservationSize = size_t{1} << 32;
|
451
|
+
static constexpr size_t kPtrComprCageBaseAlignment = size_t{1} << 32;
|
374
452
|
|
375
|
-
V8_INLINE static internal::Address
|
453
|
+
V8_INLINE static internal::Address GetPtrComprCageBaseFromOnHeapAddress(
|
376
454
|
internal::Address addr) {
|
377
|
-
return addr & -static_cast<intptr_t>(
|
455
|
+
return addr & -static_cast<intptr_t>(kPtrComprCageBaseAlignment);
|
378
456
|
}
|
379
457
|
|
380
458
|
V8_INLINE static internal::Address DecompressTaggedAnyField(
|
381
459
|
internal::Address heap_object_ptr, uint32_t value) {
|
382
|
-
internal::Address
|
383
|
-
|
460
|
+
internal::Address base =
|
461
|
+
GetPtrComprCageBaseFromOnHeapAddress(heap_object_ptr);
|
462
|
+
return base + static_cast<internal::Address>(static_cast<uintptr_t>(value));
|
384
463
|
}
|
385
464
|
|
386
|
-
V8_INLINE static Address DecodeExternalPointer(
|
387
|
-
const Isolate* isolate, ExternalPointer_t encoded_pointer) {
|
388
|
-
#ifndef V8_HEAP_SANDBOX
|
389
|
-
return encoded_pointer;
|
390
|
-
#else
|
391
|
-
return encoded_pointer ^ kExternalPointerSalt;
|
392
|
-
#endif
|
393
|
-
}
|
394
465
|
#endif // V8_COMPRESS_POINTERS
|
395
466
|
};
|
396
467
|
|
@@ -414,7 +485,8 @@ void CastCheck<false>::Perform(T* data) {}
|
|
414
485
|
|
415
486
|
template <class T>
|
416
487
|
V8_INLINE void PerformCastCheck(T* data) {
|
417
|
-
CastCheck<std::is_base_of<Data, T>::value
|
488
|
+
CastCheck<std::is_base_of<Data, T>::value &&
|
489
|
+
!std::is_same<Data, std::remove_cv_t<T>>::value>::Perform(data);
|
418
490
|
}
|
419
491
|
|
420
492
|
// A base class for backing stores, which is needed due to vagaries of
|
@@ -10,13 +10,74 @@
|
|
10
10
|
namespace v8 {
|
11
11
|
namespace metrics {
|
12
12
|
|
13
|
+
struct GarbageCollectionPhases {
|
14
|
+
int64_t compact_wall_clock_duration_in_us = -1;
|
15
|
+
int64_t mark_wall_clock_duration_in_us = -1;
|
16
|
+
int64_t sweep_wall_clock_duration_in_us = -1;
|
17
|
+
int64_t weak_wall_clock_duration_in_us = -1;
|
18
|
+
};
|
19
|
+
|
20
|
+
struct GarbageCollectionSizes {
|
21
|
+
int64_t bytes_before = -1;
|
22
|
+
int64_t bytes_after = -1;
|
23
|
+
int64_t bytes_freed = -1;
|
24
|
+
};
|
25
|
+
|
26
|
+
struct GarbageCollectionFullCycle {
|
27
|
+
GarbageCollectionPhases total;
|
28
|
+
GarbageCollectionPhases total_cpp;
|
29
|
+
GarbageCollectionPhases main_thread;
|
30
|
+
GarbageCollectionPhases main_thread_cpp;
|
31
|
+
GarbageCollectionPhases main_thread_atomic;
|
32
|
+
GarbageCollectionPhases main_thread_atomic_cpp;
|
33
|
+
GarbageCollectionPhases main_thread_incremental;
|
34
|
+
GarbageCollectionPhases main_thread_incremental_cpp;
|
35
|
+
GarbageCollectionSizes objects;
|
36
|
+
GarbageCollectionSizes objects_cpp;
|
37
|
+
GarbageCollectionSizes memory;
|
38
|
+
GarbageCollectionSizes memory_cpp;
|
39
|
+
double collection_rate_in_percent;
|
40
|
+
double collection_rate_cpp_in_percent;
|
41
|
+
double efficiency_in_bytes_per_us;
|
42
|
+
double efficiency_cpp_in_bytes_per_us;
|
43
|
+
double main_thread_efficiency_in_bytes_per_us;
|
44
|
+
double main_thread_efficiency_cpp_in_bytes_per_us;
|
45
|
+
};
|
46
|
+
|
47
|
+
struct GarbageCollectionFullMainThreadIncrementalMark {
|
48
|
+
int64_t wall_clock_duration_in_us = -1;
|
49
|
+
int64_t cpp_wall_clock_duration_in_us = -1;
|
50
|
+
};
|
51
|
+
|
52
|
+
struct GarbageCollectionFullMainThreadBatchedIncrementalMark {
|
53
|
+
std::vector<GarbageCollectionFullMainThreadIncrementalMark> events;
|
54
|
+
};
|
55
|
+
|
56
|
+
struct GarbageCollectionFullMainThreadIncrementalSweep {
|
57
|
+
int64_t wall_clock_duration_in_us = -1;
|
58
|
+
int64_t cpp_wall_clock_duration_in_us = -1;
|
59
|
+
};
|
60
|
+
|
61
|
+
struct GarbageCollectionFullMainThreadBatchedIncrementalSweep {
|
62
|
+
std::vector<GarbageCollectionFullMainThreadIncrementalSweep> events;
|
63
|
+
};
|
64
|
+
|
65
|
+
struct GarbageCollectionYoungCycle {
|
66
|
+
int64_t total_wall_clock_duration_in_us = -1;
|
67
|
+
int64_t main_thread_wall_clock_duration_in_us = -1;
|
68
|
+
double collection_rate_in_percent;
|
69
|
+
double efficiency_in_bytes_per_us;
|
70
|
+
double main_thread_efficiency_in_bytes_per_us;
|
71
|
+
};
|
72
|
+
|
13
73
|
struct WasmModuleDecoded {
|
14
74
|
bool async = false;
|
15
75
|
bool streamed = false;
|
16
76
|
bool success = false;
|
17
77
|
size_t module_size_in_bytes = 0;
|
18
78
|
size_t function_count = 0;
|
19
|
-
int64_t
|
79
|
+
int64_t wall_clock_duration_in_us = -1;
|
80
|
+
int64_t cpu_duration_in_us = -1;
|
20
81
|
};
|
21
82
|
|
22
83
|
struct WasmModuleCompiled {
|
@@ -28,30 +89,38 @@ struct WasmModuleCompiled {
|
|
28
89
|
bool success = false;
|
29
90
|
size_t code_size_in_bytes = 0;
|
30
91
|
size_t liftoff_bailout_count = 0;
|
31
|
-
int64_t
|
92
|
+
int64_t wall_clock_duration_in_us = -1;
|
93
|
+
int64_t cpu_duration_in_us = -1;
|
32
94
|
};
|
33
95
|
|
34
96
|
struct WasmModuleInstantiated {
|
35
97
|
bool async = false;
|
36
98
|
bool success = false;
|
37
99
|
size_t imported_function_count = 0;
|
38
|
-
int64_t
|
100
|
+
int64_t wall_clock_duration_in_us = -1;
|
39
101
|
};
|
40
102
|
|
41
103
|
struct WasmModuleTieredUp {
|
42
104
|
bool lazy = false;
|
43
105
|
size_t code_size_in_bytes = 0;
|
44
|
-
int64_t
|
106
|
+
int64_t wall_clock_duration_in_us = -1;
|
107
|
+
int64_t cpu_duration_in_us = -1;
|
45
108
|
};
|
46
109
|
|
47
110
|
struct WasmModulesPerIsolate {
|
48
111
|
size_t count = 0;
|
49
112
|
};
|
50
113
|
|
51
|
-
#define V8_MAIN_THREAD_METRICS_EVENTS(V)
|
52
|
-
V(
|
53
|
-
V(
|
54
|
-
V(
|
114
|
+
#define V8_MAIN_THREAD_METRICS_EVENTS(V) \
|
115
|
+
V(GarbageCollectionFullCycle) \
|
116
|
+
V(GarbageCollectionFullMainThreadIncrementalMark) \
|
117
|
+
V(GarbageCollectionFullMainThreadBatchedIncrementalMark) \
|
118
|
+
V(GarbageCollectionFullMainThreadIncrementalSweep) \
|
119
|
+
V(GarbageCollectionFullMainThreadBatchedIncrementalSweep) \
|
120
|
+
V(GarbageCollectionYoungCycle) \
|
121
|
+
V(WasmModuleDecoded) \
|
122
|
+
V(WasmModuleCompiled) \
|
123
|
+
V(WasmModuleInstantiated) \
|
55
124
|
V(WasmModuleTieredUp)
|
56
125
|
|
57
126
|
#define V8_THREAD_SAFE_METRICS_EVENTS(V) V(WasmModulesPerIsolate)
|
@@ -175,9 +175,15 @@ class JobDelegate {
|
|
175
175
|
* Returns a task_id unique among threads currently running this job, such
|
176
176
|
* that GetTaskId() < worker count. To achieve this, the same task_id may be
|
177
177
|
* reused by a different thread after a worker_task returns.
|
178
|
+
*/
|
179
|
+
virtual uint8_t GetTaskId() = 0;
|
180
|
+
|
181
|
+
/**
|
182
|
+
* Returns true if the current task is called from the thread currently
|
183
|
+
* running JobHandle::Join().
|
178
184
|
* TODO(etiennep): Make pure virtual once custom embedders implement it.
|
179
185
|
*/
|
180
|
-
virtual
|
186
|
+
virtual bool IsJoiningThread() const { return false; }
|
181
187
|
};
|
182
188
|
|
183
189
|
/**
|
@@ -210,17 +216,43 @@ class JobHandle {
|
|
210
216
|
*/
|
211
217
|
virtual void Cancel() = 0;
|
212
218
|
|
219
|
+
/*
|
220
|
+
* Forces all existing workers to yield ASAP but doesn’t wait for them.
|
221
|
+
* Warning, this is dangerous if the Job's callback is bound to or has access
|
222
|
+
* to state which may be deleted after this call.
|
223
|
+
* TODO(etiennep): Cleanup once implemented by all embedders.
|
224
|
+
*/
|
225
|
+
virtual void CancelAndDetach() { Cancel(); }
|
226
|
+
|
213
227
|
/**
|
214
|
-
* Returns true if there's
|
215
|
-
* TODO(etiennep): Make pure virtual once custom embedders implement it.
|
228
|
+
* Returns true if there's any work pending or any worker running.
|
216
229
|
*/
|
217
|
-
virtual bool
|
230
|
+
virtual bool IsActive() = 0;
|
231
|
+
|
232
|
+
// TODO(etiennep): Clean up once all overrides are removed.
|
233
|
+
V8_DEPRECATED("Use !IsActive() instead.")
|
234
|
+
virtual bool IsCompleted() { return !IsActive(); }
|
218
235
|
|
219
236
|
/**
|
220
237
|
* Returns true if associated with a Job and other methods may be called.
|
221
|
-
* Returns false after Join() or Cancel() was called.
|
238
|
+
* Returns false after Join() or Cancel() was called. This may return true
|
239
|
+
* even if no workers are running and IsCompleted() returns true
|
222
240
|
*/
|
223
|
-
virtual bool
|
241
|
+
virtual bool IsValid() = 0;
|
242
|
+
|
243
|
+
// TODO(etiennep): Clean up once all overrides are removed.
|
244
|
+
V8_DEPRECATED("Use IsValid() instead.")
|
245
|
+
virtual bool IsRunning() { return IsValid(); }
|
246
|
+
|
247
|
+
/**
|
248
|
+
* Returns true if job priority can be changed.
|
249
|
+
*/
|
250
|
+
virtual bool UpdatePriorityEnabled() const { return false; }
|
251
|
+
|
252
|
+
/**
|
253
|
+
* Update this Job's priority.
|
254
|
+
*/
|
255
|
+
virtual void UpdatePriority(TaskPriority new_priority) {}
|
224
256
|
};
|
225
257
|
|
226
258
|
/**
|
@@ -233,23 +265,17 @@ class JobTask {
|
|
233
265
|
virtual void Run(JobDelegate* delegate) = 0;
|
234
266
|
|
235
267
|
/**
|
236
|
-
* Controls the maximum number of threads calling Run() concurrently
|
237
|
-
*
|
238
|
-
*
|
239
|
-
*
|
268
|
+
* Controls the maximum number of threads calling Run() concurrently, given
|
269
|
+
* the number of threads currently assigned to this job and executing Run().
|
270
|
+
* Run() is only invoked if the number of threads previously running Run() was
|
271
|
+
* less than the value returned. Since GetMaxConcurrency() is a leaf function,
|
272
|
+
* it must not call back any JobHandle methods.
|
240
273
|
*/
|
241
|
-
virtual size_t GetMaxConcurrency() const = 0;
|
274
|
+
virtual size_t GetMaxConcurrency(size_t worker_count) const = 0;
|
242
275
|
|
243
|
-
|
244
|
-
|
245
|
-
|
246
|
-
* must include local work items not visible globaly by other workers.
|
247
|
-
* TODO(etiennep): Replace the version above by this once custom embedders are
|
248
|
-
* migrated.
|
249
|
-
*/
|
250
|
-
size_t GetMaxConcurrency(size_t worker_count) const {
|
251
|
-
return GetMaxConcurrency();
|
252
|
-
}
|
276
|
+
// TODO(1114823): Clean up once all overrides are removed.
|
277
|
+
V8_DEPRECATED("Use the version that takes |worker_count|.")
|
278
|
+
virtual size_t GetMaxConcurrency() const { return 0; }
|
253
279
|
};
|
254
280
|
|
255
281
|
/**
|
@@ -382,7 +408,6 @@ class PageAllocator {
|
|
382
408
|
kNoAccess,
|
383
409
|
kRead,
|
384
410
|
kReadWrite,
|
385
|
-
// TODO(hpayer): Remove this flag. Memory should never be rwx.
|
386
411
|
kReadWriteExecute,
|
387
412
|
kReadExecute,
|
388
413
|
// Set this when reserving memory that will later require kReadWriteExecute
|