libv8-node 15.5.1.0.beta1-aarch64-linux-musl → 16.17.0.0-aarch64-linux-musl

Sign up to get free protection for your applications and to get access to all the features.
Files changed (57) hide show
  1. checksums.yaml +4 -4
  2. data/ext/libv8-node/location.rb +1 -1
  3. data/ext/libv8-node/paths.rb +5 -1
  4. data/lib/libv8/node/version.rb +3 -3
  5. data/vendor/v8/{out.gn → aarch64-linux-musl}/libv8/obj/libv8_monolith.a +0 -0
  6. data/vendor/v8/include/cppgc/allocation.h +105 -45
  7. data/vendor/v8/include/cppgc/common.h +9 -6
  8. data/vendor/v8/include/cppgc/cross-thread-persistent.h +413 -0
  9. data/vendor/v8/include/cppgc/custom-space.h +37 -2
  10. data/vendor/v8/include/cppgc/default-platform.h +47 -48
  11. data/vendor/v8/include/cppgc/ephemeron-pair.h +30 -0
  12. data/vendor/v8/include/cppgc/explicit-management.h +82 -0
  13. data/vendor/v8/include/cppgc/garbage-collected.h +4 -3
  14. data/vendor/v8/include/cppgc/heap-consistency.h +253 -0
  15. data/vendor/v8/include/cppgc/heap-state.h +70 -0
  16. data/vendor/v8/include/cppgc/heap-statistics.h +120 -0
  17. data/vendor/v8/include/cppgc/heap.h +68 -6
  18. data/vendor/v8/include/cppgc/internal/api-constants.h +3 -3
  19. data/vendor/v8/include/cppgc/internal/caged-heap-local-data.h +2 -1
  20. data/vendor/v8/include/cppgc/internal/compiler-specific.h +2 -2
  21. data/vendor/v8/include/cppgc/internal/gc-info.h +44 -13
  22. data/vendor/v8/include/cppgc/internal/name-trait.h +111 -0
  23. data/vendor/v8/include/cppgc/internal/persistent-node.h +58 -2
  24. data/vendor/v8/include/cppgc/internal/pointer-policies.h +69 -28
  25. data/vendor/v8/include/cppgc/internal/prefinalizer-handler.h +1 -1
  26. data/vendor/v8/include/cppgc/internal/write-barrier.h +390 -35
  27. data/vendor/v8/include/cppgc/liveness-broker.h +11 -2
  28. data/vendor/v8/include/cppgc/macros.h +2 -0
  29. data/vendor/v8/include/cppgc/member.h +87 -25
  30. data/vendor/v8/include/cppgc/name-provider.h +65 -0
  31. data/vendor/v8/include/cppgc/object-size-trait.h +58 -0
  32. data/vendor/v8/include/cppgc/persistent.h +40 -10
  33. data/vendor/v8/include/cppgc/platform.h +49 -25
  34. data/vendor/v8/include/cppgc/prefinalizer.h +1 -1
  35. data/vendor/v8/include/cppgc/process-heap-statistics.h +36 -0
  36. data/vendor/v8/include/cppgc/sentinel-pointer.h +32 -0
  37. data/vendor/v8/include/cppgc/source-location.h +2 -1
  38. data/vendor/v8/include/cppgc/testing.h +99 -0
  39. data/vendor/v8/include/cppgc/trace-trait.h +8 -3
  40. data/vendor/v8/include/cppgc/type-traits.h +157 -19
  41. data/vendor/v8/include/cppgc/visitor.h +194 -28
  42. data/vendor/v8/include/libplatform/libplatform.h +11 -0
  43. data/vendor/v8/include/libplatform/v8-tracing.h +2 -0
  44. data/vendor/v8/include/v8-cppgc.h +258 -159
  45. data/vendor/v8/include/v8-fast-api-calls.h +603 -155
  46. data/vendor/v8/include/v8-inspector.h +22 -4
  47. data/vendor/v8/include/v8-internal.h +111 -27
  48. data/vendor/v8/include/v8-metrics.h +77 -8
  49. data/vendor/v8/include/v8-platform.h +47 -22
  50. data/vendor/v8/include/v8-profiler.h +75 -11
  51. data/vendor/v8/include/v8-unwinder-state.h +30 -0
  52. data/vendor/v8/include/v8-util.h +1 -1
  53. data/vendor/v8/include/v8-version.h +4 -4
  54. data/vendor/v8/include/v8.h +1196 -642
  55. data/vendor/v8/include/v8config.h +87 -11
  56. metadata +20 -8
  57. data/vendor/v8/include/cppgc/internal/process-heap.h +0 -34
@@ -105,7 +105,9 @@ class V8_EXPORT V8StackTrace {
105
105
  virtual StringView topSourceURL() const = 0;
106
106
  virtual int topLineNumber() const = 0;
107
107
  virtual int topColumnNumber() const = 0;
108
- virtual StringView topScriptId() const = 0;
108
+ virtual int topScriptId() const = 0;
109
+ V8_DEPRECATE_SOON("Use V8::StackTrace::topScriptId() instead.")
110
+ int topScriptIdAsInteger() const { return topScriptId(); }
109
111
  virtual StringView topFunctionName() const = 0;
110
112
 
111
113
  virtual ~V8StackTrace() = default;
@@ -129,6 +131,10 @@ class V8_EXPORT V8InspectorSession {
129
131
  virtual v8::Local<v8::Value> get(v8::Local<v8::Context>) = 0;
130
132
  virtual ~Inspectable() = default;
131
133
  };
134
+ class V8_EXPORT CommandLineAPIScope {
135
+ public:
136
+ virtual ~CommandLineAPIScope() = default;
137
+ };
132
138
  virtual void addInspectedObject(std::unique_ptr<Inspectable>) = 0;
133
139
 
134
140
  // Dispatching protocol messages.
@@ -138,6 +144,9 @@ class V8_EXPORT V8InspectorSession {
138
144
  virtual std::vector<std::unique_ptr<protocol::Schema::API::Domain>>
139
145
  supportedDomains() = 0;
140
146
 
147
+ virtual std::unique_ptr<V8InspectorSession::CommandLineAPIScope>
148
+ initializeCommandLineAPIScope(int executionContextId) = 0;
149
+
141
150
  // Debugger actions.
142
151
  virtual void schedulePauseOnNextStatement(StringView breakReason,
143
152
  StringView breakDetails) = 0;
@@ -161,7 +170,7 @@ class V8_EXPORT V8InspectorSession {
161
170
  v8::Local<v8::Context>*,
162
171
  std::unique_ptr<StringBuffer>* objectGroup) = 0;
163
172
  virtual void releaseObjectGroup(StringView) = 0;
164
- virtual void triggerPreciseCoverageDeltaUpdate(StringView occassion) = 0;
173
+ virtual void triggerPreciseCoverageDeltaUpdate(StringView occasion) = 0;
165
174
  };
166
175
 
167
176
  class V8_EXPORT V8InspectorClient {
@@ -181,8 +190,9 @@ class V8_EXPORT V8InspectorClient {
181
190
  virtual std::unique_ptr<StringBuffer> valueSubtype(v8::Local<v8::Value>) {
182
191
  return nullptr;
183
192
  }
184
- virtual bool formatAccessorsAsProperties(v8::Local<v8::Value>) {
185
- return false;
193
+ virtual std::unique_ptr<StringBuffer> descriptionForValueSubtype(
194
+ v8::Local<v8::Context>, v8::Local<v8::Value>) {
195
+ return nullptr;
186
196
  }
187
197
  virtual bool isInspectableHeapObject(v8::Local<v8::Object>) { return true; }
188
198
 
@@ -224,6 +234,10 @@ class V8_EXPORT V8InspectorClient {
224
234
  const StringView& resourceName) {
225
235
  return nullptr;
226
236
  }
237
+
238
+ // The caller would defer to generating a random 64 bit integer if
239
+ // this method returns 0.
240
+ virtual int64_t generateUniqueId() { return 0; }
227
241
  };
228
242
 
229
243
  // These stack trace ids are intended to be passed between debuggers and be
@@ -284,6 +298,10 @@ class V8_EXPORT V8Inspector {
284
298
  int scriptId) = 0;
285
299
  virtual void exceptionRevoked(v8::Local<v8::Context>, unsigned exceptionId,
286
300
  StringView message) = 0;
301
+ virtual bool associateExceptionData(v8::Local<v8::Context>,
302
+ v8::Local<v8::Value> exception,
303
+ v8::Local<v8::Name> key,
304
+ v8::Local<v8::Value> value) = 0;
287
305
 
288
306
  // Connection.
289
307
  class V8_EXPORT Channel {
@@ -15,9 +15,12 @@
15
15
 
16
16
  namespace v8 {
17
17
 
18
+ class Array;
18
19
  class Context;
19
20
  class Data;
20
21
  class Isolate;
22
+ template <typename T>
23
+ class Local;
21
24
 
22
25
  namespace internal {
23
26
 
@@ -40,6 +43,13 @@ const int kWeakHeapObjectTag = 3;
40
43
  const int kHeapObjectTagSize = 2;
41
44
  const intptr_t kHeapObjectTagMask = (1 << kHeapObjectTagSize) - 1;
42
45
 
46
+ // Tag information for fowarding pointers stored in object headers.
47
+ // 0b00 at the lowest 2 bits in the header indicates that the map word is a
48
+ // forwarding pointer.
49
+ const int kForwardingTag = 0;
50
+ const int kForwardingTagSize = 2;
51
+ const intptr_t kForwardingTagMask = (1 << kForwardingTagSize) - 1;
52
+
43
53
  // Tag information for Smi.
44
54
  const int kSmiTag = 0;
45
55
  const int kSmiTagSize = 1;
@@ -120,6 +130,29 @@ constexpr bool HeapSandboxIsEnabled() {
120
130
 
121
131
  using ExternalPointer_t = Address;
122
132
 
133
+ // If the heap sandbox is enabled, these tag values will be ORed with the
134
+ // external pointers in the external pointer table to prevent use of pointers of
135
+ // the wrong type. When a pointer is loaded, it is ANDed with the inverse of the
136
+ // expected type's tag. The tags are constructed in a way that guarantees that a
137
+ // failed type check will result in one or more of the top bits of the pointer
138
+ // to be set, rendering the pointer inacessible. This construction allows
139
+ // performing the type check and removing GC marking bits from the pointer at
140
+ // the same time.
141
+ enum ExternalPointerTag : uint64_t {
142
+ kExternalPointerNullTag = 0x0000000000000000,
143
+ kArrayBufferBackingStoreTag = 0x00ff000000000000, // 0b000000011111111
144
+ kTypedArrayExternalPointerTag = 0x017f000000000000, // 0b000000101111111
145
+ kDataViewDataPointerTag = 0x01bf000000000000, // 0b000000110111111
146
+ kExternalStringResourceTag = 0x01df000000000000, // 0b000000111011111
147
+ kExternalStringResourceDataTag = 0x01ef000000000000, // 0b000000111101111
148
+ kForeignForeignAddressTag = 0x01f7000000000000, // 0b000000111110111
149
+ kNativeContextMicrotaskQueueTag = 0x01fb000000000000, // 0b000000111111011
150
+ kEmbedderDataSlotPayloadTag = 0x01fd000000000000, // 0b000000111111101
151
+ kCodeEntryPointTag = 0x01fe000000000000, // 0b000000111111110
152
+ };
153
+
154
+ constexpr uint64_t kExternalPointerTagMask = 0xffff000000000000;
155
+
123
156
  #ifdef V8_31BIT_SMIS_ON_64BIT_ARCH
124
157
  using PlatformSmiTagging = SmiTagging<kApiInt32Size>;
125
158
  #else
@@ -140,6 +173,11 @@ V8_INLINE static constexpr internal::Address IntToSmi(int value) {
140
173
  kSmiTag;
141
174
  }
142
175
 
176
+ // Converts encoded external pointer to address.
177
+ V8_EXPORT Address DecodeExternalPointerImpl(const Isolate* isolate,
178
+ ExternalPointer_t pointer,
179
+ ExternalPointerTag tag);
180
+
143
181
  // {obj} must be the raw tagged pointer representation of a HeapObject
144
182
  // that's guaranteed to never be in ReadOnlySpace.
145
183
  V8_EXPORT internal::Isolate* IsolateFromNeverReadOnlySpaceObject(Address obj);
@@ -155,6 +193,14 @@ V8_EXPORT bool ShouldThrowOnError(v8::internal::Isolate* isolate);
155
193
  * depend on functions and constants defined here.
156
194
  */
157
195
  class Internals {
196
+ #ifdef V8_MAP_PACKING
197
+ V8_INLINE static constexpr internal::Address UnpackMapWord(
198
+ internal::Address mapword) {
199
+ // TODO(wenyuzhao): Clear header metadata.
200
+ return mapword ^ kMapWordXorMask;
201
+ }
202
+ #endif
203
+
158
204
  public:
159
205
  // These values match non-compiler-dependent values defined within
160
206
  // the implementation of v8.
@@ -168,6 +214,9 @@ class Internals {
168
214
  static const int kFixedArrayHeaderSize = 2 * kApiTaggedSize;
169
215
  static const int kEmbedderDataArrayHeaderSize = 2 * kApiTaggedSize;
170
216
  static const int kEmbedderDataSlotSize = kApiSystemPointerSize;
217
+ #ifdef V8_HEAP_SANDBOX
218
+ static const int kEmbedderDataSlotRawPayloadOffset = kApiTaggedSize;
219
+ #endif
171
220
  static const int kNativeContextEmbedderDataOffset = 6 * kApiTaggedSize;
172
221
  static const int kFullStringRepresentationMask = 0x0f;
173
222
  static const int kStringEncodingMask = 0x8;
@@ -182,11 +231,19 @@ class Internals {
182
231
  kNumIsolateDataSlots * kApiSystemPointerSize;
183
232
  static const int kIsolateFastCCallCallerPcOffset =
184
233
  kIsolateFastCCallCallerFpOffset + kApiSystemPointerSize;
185
- static const int kIsolateStackGuardOffset =
234
+ static const int kIsolateFastApiCallTargetOffset =
186
235
  kIsolateFastCCallCallerPcOffset + kApiSystemPointerSize;
236
+ static const int kIsolateStackGuardOffset =
237
+ kIsolateFastApiCallTargetOffset + kApiSystemPointerSize;
187
238
  static const int kIsolateRootsOffset =
188
239
  kIsolateStackGuardOffset + 7 * kApiSystemPointerSize;
189
240
 
241
+ static const int kExternalPointerTableBufferOffset = 0;
242
+ static const int kExternalPointerTableLengthOffset =
243
+ kExternalPointerTableBufferOffset + kApiSystemPointerSize;
244
+ static const int kExternalPointerTableCapacityOffset =
245
+ kExternalPointerTableLengthOffset + kApiInt32Size;
246
+
190
247
  static const int kUndefinedValueRootIndex = 4;
191
248
  static const int kTheHoleValueRootIndex = 5;
192
249
  static const int kNullValueRootIndex = 6;
@@ -220,6 +277,17 @@ class Internals {
220
277
  // incremental GC once the external memory reaches this limit.
221
278
  static constexpr int kExternalAllocationSoftLimit = 64 * 1024 * 1024;
222
279
 
280
+ #ifdef V8_MAP_PACKING
281
+ static const uintptr_t kMapWordMetadataMask = 0xffffULL << 48;
282
+ // The lowest two bits of mapwords are always `0b10`
283
+ static const uintptr_t kMapWordSignature = 0b10;
284
+ // XORing a (non-compressed) map with this mask ensures that the two
285
+ // low-order bits are 0b10. The 0 at the end makes this look like a Smi,
286
+ // although real Smis have all lower 32 bits unset. We only rely on these
287
+ // values passing as Smis in very few places.
288
+ static const int kMapWordXorMask = 0b11;
289
+ #endif
290
+
223
291
  V8_EXPORT static void CheckInitializedImpl(v8::Isolate* isolate);
224
292
  V8_INLINE static void CheckInitialized(v8::Isolate* isolate) {
225
293
  #ifdef V8_ENABLE_CHECKS
@@ -246,6 +314,9 @@ class Internals {
246
314
  V8_INLINE static int GetInstanceType(const internal::Address obj) {
247
315
  typedef internal::Address A;
248
316
  A map = ReadTaggedPointerField(obj, kHeapObjectMapOffset);
317
+ #ifdef V8_MAP_PACKING
318
+ map = UnpackMapWord(map);
319
+ #endif
249
320
  return ReadRawField<uint16_t>(map, kMapInstanceTypeOffset);
250
321
  }
251
322
 
@@ -325,8 +396,9 @@ class Internals {
325
396
  internal::Address heap_object_ptr, int offset) {
326
397
  #ifdef V8_COMPRESS_POINTERS
327
398
  uint32_t value = ReadRawField<uint32_t>(heap_object_ptr, offset);
328
- internal::Address root = GetRootFromOnHeapAddress(heap_object_ptr);
329
- return root + static_cast<internal::Address>(static_cast<uintptr_t>(value));
399
+ internal::Address base =
400
+ GetPtrComprCageBaseFromOnHeapAddress(heap_object_ptr);
401
+ return base + static_cast<internal::Address>(static_cast<uintptr_t>(value));
330
402
  #else
331
403
  return ReadRawField<internal::Address>(heap_object_ptr, offset);
332
404
  #endif
@@ -352,45 +424,47 @@ class Internals {
352
424
  #endif
353
425
  }
354
426
 
427
+ V8_INLINE static Address DecodeExternalPointer(
428
+ const Isolate* isolate, ExternalPointer_t encoded_pointer,
429
+ ExternalPointerTag tag) {
430
+ #ifdef V8_HEAP_SANDBOX
431
+ return internal::DecodeExternalPointerImpl(isolate, encoded_pointer, tag);
432
+ #else
433
+ return encoded_pointer;
434
+ #endif
435
+ }
436
+
355
437
  V8_INLINE static internal::Address ReadExternalPointerField(
356
- internal::Isolate* isolate, internal::Address heap_object_ptr,
357
- int offset) {
358
- internal::Address value = ReadRawField<Address>(heap_object_ptr, offset);
438
+ internal::Isolate* isolate, internal::Address heap_object_ptr, int offset,
439
+ ExternalPointerTag tag) {
359
440
  #ifdef V8_HEAP_SANDBOX
441
+ internal::ExternalPointer_t encoded_value =
442
+ ReadRawField<uint32_t>(heap_object_ptr, offset);
360
443
  // We currently have to treat zero as nullptr in embedder slots.
361
- if (value) value = DecodeExternalPointer(isolate, value);
444
+ return encoded_value ? DecodeExternalPointer(isolate, encoded_value, tag)
445
+ : 0;
446
+ #else
447
+ return ReadRawField<Address>(heap_object_ptr, offset);
362
448
  #endif
363
- return value;
364
449
  }
365
450
 
366
451
  #ifdef V8_COMPRESS_POINTERS
367
452
  // See v8:7703 or src/ptr-compr.* for details about pointer compression.
368
- static constexpr size_t kPtrComprHeapReservationSize = size_t{1} << 32;
369
- static constexpr size_t kPtrComprIsolateRootAlignment = size_t{1} << 32;
370
-
371
- // See v8:10391 for details about V8 heap sandbox.
372
- static constexpr uint32_t kExternalPointerSalt =
373
- 0x7fffffff & ~static_cast<uint32_t>(kHeapObjectTagMask);
453
+ static constexpr size_t kPtrComprCageReservationSize = size_t{1} << 32;
454
+ static constexpr size_t kPtrComprCageBaseAlignment = size_t{1} << 32;
374
455
 
375
- V8_INLINE static internal::Address GetRootFromOnHeapAddress(
456
+ V8_INLINE static internal::Address GetPtrComprCageBaseFromOnHeapAddress(
376
457
  internal::Address addr) {
377
- return addr & -static_cast<intptr_t>(kPtrComprIsolateRootAlignment);
458
+ return addr & -static_cast<intptr_t>(kPtrComprCageBaseAlignment);
378
459
  }
379
460
 
380
461
  V8_INLINE static internal::Address DecompressTaggedAnyField(
381
462
  internal::Address heap_object_ptr, uint32_t value) {
382
- internal::Address root = GetRootFromOnHeapAddress(heap_object_ptr);
383
- return root + static_cast<internal::Address>(static_cast<uintptr_t>(value));
463
+ internal::Address base =
464
+ GetPtrComprCageBaseFromOnHeapAddress(heap_object_ptr);
465
+ return base + static_cast<internal::Address>(static_cast<uintptr_t>(value));
384
466
  }
385
467
 
386
- V8_INLINE static Address DecodeExternalPointer(
387
- const Isolate* isolate, ExternalPointer_t encoded_pointer) {
388
- #ifndef V8_HEAP_SANDBOX
389
- return encoded_pointer;
390
- #else
391
- return encoded_pointer ^ kExternalPointerSalt;
392
- #endif
393
- }
394
468
  #endif // V8_COMPRESS_POINTERS
395
469
  };
396
470
 
@@ -414,7 +488,8 @@ void CastCheck<false>::Perform(T* data) {}
414
488
 
415
489
  template <class T>
416
490
  V8_INLINE void PerformCastCheck(T* data) {
417
- CastCheck<std::is_base_of<Data, T>::value>::Perform(data);
491
+ CastCheck<std::is_base_of<Data, T>::value &&
492
+ !std::is_same<Data, std::remove_cv_t<T>>::value>::Perform(data);
418
493
  }
419
494
 
420
495
  // A base class for backing stores, which is needed due to vagaries of
@@ -422,6 +497,15 @@ V8_INLINE void PerformCastCheck(T* data) {
422
497
  class BackingStoreBase {};
423
498
 
424
499
  } // namespace internal
500
+
501
+ V8_EXPORT bool CopyAndConvertArrayToCppBufferInt32(Local<Array> src,
502
+ int32_t* dst,
503
+ uint32_t max_length);
504
+
505
+ V8_EXPORT bool CopyAndConvertArrayToCppBufferFloat64(Local<Array> src,
506
+ double* dst,
507
+ uint32_t max_length);
508
+
425
509
  } // namespace v8
426
510
 
427
511
  #endif // INCLUDE_V8_INTERNAL_H_
@@ -10,13 +10,74 @@
10
10
  namespace v8 {
11
11
  namespace metrics {
12
12
 
13
+ struct GarbageCollectionPhases {
14
+ int64_t compact_wall_clock_duration_in_us = -1;
15
+ int64_t mark_wall_clock_duration_in_us = -1;
16
+ int64_t sweep_wall_clock_duration_in_us = -1;
17
+ int64_t weak_wall_clock_duration_in_us = -1;
18
+ };
19
+
20
+ struct GarbageCollectionSizes {
21
+ int64_t bytes_before = -1;
22
+ int64_t bytes_after = -1;
23
+ int64_t bytes_freed = -1;
24
+ };
25
+
26
+ struct GarbageCollectionFullCycle {
27
+ GarbageCollectionPhases total;
28
+ GarbageCollectionPhases total_cpp;
29
+ GarbageCollectionPhases main_thread;
30
+ GarbageCollectionPhases main_thread_cpp;
31
+ GarbageCollectionPhases main_thread_atomic;
32
+ GarbageCollectionPhases main_thread_atomic_cpp;
33
+ GarbageCollectionPhases main_thread_incremental;
34
+ GarbageCollectionPhases main_thread_incremental_cpp;
35
+ GarbageCollectionSizes objects;
36
+ GarbageCollectionSizes objects_cpp;
37
+ GarbageCollectionSizes memory;
38
+ GarbageCollectionSizes memory_cpp;
39
+ double collection_rate_in_percent;
40
+ double collection_rate_cpp_in_percent;
41
+ double efficiency_in_bytes_per_us;
42
+ double efficiency_cpp_in_bytes_per_us;
43
+ double main_thread_efficiency_in_bytes_per_us;
44
+ double main_thread_efficiency_cpp_in_bytes_per_us;
45
+ };
46
+
47
+ struct GarbageCollectionFullMainThreadIncrementalMark {
48
+ int64_t wall_clock_duration_in_us = -1;
49
+ int64_t cpp_wall_clock_duration_in_us = -1;
50
+ };
51
+
52
+ struct GarbageCollectionFullMainThreadBatchedIncrementalMark {
53
+ std::vector<GarbageCollectionFullMainThreadIncrementalMark> events;
54
+ };
55
+
56
+ struct GarbageCollectionFullMainThreadIncrementalSweep {
57
+ int64_t wall_clock_duration_in_us = -1;
58
+ int64_t cpp_wall_clock_duration_in_us = -1;
59
+ };
60
+
61
+ struct GarbageCollectionFullMainThreadBatchedIncrementalSweep {
62
+ std::vector<GarbageCollectionFullMainThreadIncrementalSweep> events;
63
+ };
64
+
65
+ struct GarbageCollectionYoungCycle {
66
+ int64_t total_wall_clock_duration_in_us = -1;
67
+ int64_t main_thread_wall_clock_duration_in_us = -1;
68
+ double collection_rate_in_percent;
69
+ double efficiency_in_bytes_per_us;
70
+ double main_thread_efficiency_in_bytes_per_us;
71
+ };
72
+
13
73
  struct WasmModuleDecoded {
14
74
  bool async = false;
15
75
  bool streamed = false;
16
76
  bool success = false;
17
77
  size_t module_size_in_bytes = 0;
18
78
  size_t function_count = 0;
19
- int64_t wall_clock_time_in_us = 0;
79
+ int64_t wall_clock_duration_in_us = -1;
80
+ int64_t cpu_duration_in_us = -1;
20
81
  };
21
82
 
22
83
  struct WasmModuleCompiled {
@@ -28,30 +89,38 @@ struct WasmModuleCompiled {
28
89
  bool success = false;
29
90
  size_t code_size_in_bytes = 0;
30
91
  size_t liftoff_bailout_count = 0;
31
- int64_t wall_clock_time_in_us = 0;
92
+ int64_t wall_clock_duration_in_us = -1;
93
+ int64_t cpu_duration_in_us = -1;
32
94
  };
33
95
 
34
96
  struct WasmModuleInstantiated {
35
97
  bool async = false;
36
98
  bool success = false;
37
99
  size_t imported_function_count = 0;
38
- int64_t wall_clock_time_in_us = 0;
100
+ int64_t wall_clock_duration_in_us = -1;
39
101
  };
40
102
 
41
103
  struct WasmModuleTieredUp {
42
104
  bool lazy = false;
43
105
  size_t code_size_in_bytes = 0;
44
- int64_t wall_clock_time_in_us = 0;
106
+ int64_t wall_clock_duration_in_us = -1;
107
+ int64_t cpu_duration_in_us = -1;
45
108
  };
46
109
 
47
110
  struct WasmModulesPerIsolate {
48
111
  size_t count = 0;
49
112
  };
50
113
 
51
- #define V8_MAIN_THREAD_METRICS_EVENTS(V) \
52
- V(WasmModuleDecoded) \
53
- V(WasmModuleCompiled) \
54
- V(WasmModuleInstantiated) \
114
+ #define V8_MAIN_THREAD_METRICS_EVENTS(V) \
115
+ V(GarbageCollectionFullCycle) \
116
+ V(GarbageCollectionFullMainThreadIncrementalMark) \
117
+ V(GarbageCollectionFullMainThreadBatchedIncrementalMark) \
118
+ V(GarbageCollectionFullMainThreadIncrementalSweep) \
119
+ V(GarbageCollectionFullMainThreadBatchedIncrementalSweep) \
120
+ V(GarbageCollectionYoungCycle) \
121
+ V(WasmModuleDecoded) \
122
+ V(WasmModuleCompiled) \
123
+ V(WasmModuleInstantiated) \
55
124
  V(WasmModuleTieredUp)
56
125
 
57
126
  #define V8_THREAD_SAFE_METRICS_EVENTS(V) V(WasmModulesPerIsolate)
@@ -175,9 +175,15 @@ class JobDelegate {
175
175
  * Returns a task_id unique among threads currently running this job, such
176
176
  * that GetTaskId() < worker count. To achieve this, the same task_id may be
177
177
  * reused by a different thread after a worker_task returns.
178
+ */
179
+ virtual uint8_t GetTaskId() = 0;
180
+
181
+ /**
182
+ * Returns true if the current task is called from the thread currently
183
+ * running JobHandle::Join().
178
184
  * TODO(etiennep): Make pure virtual once custom embedders implement it.
179
185
  */
180
- virtual uint8_t GetTaskId() { return 0; }
186
+ virtual bool IsJoiningThread() const { return false; }
181
187
  };
182
188
 
183
189
  /**
@@ -210,17 +216,43 @@ class JobHandle {
210
216
  */
211
217
  virtual void Cancel() = 0;
212
218
 
219
+ /*
220
+ * Forces all existing workers to yield ASAP but doesn’t wait for them.
221
+ * Warning, this is dangerous if the Job's callback is bound to or has access
222
+ * to state which may be deleted after this call.
223
+ * TODO(etiennep): Cleanup once implemented by all embedders.
224
+ */
225
+ virtual void CancelAndDetach() { Cancel(); }
226
+
213
227
  /**
214
- * Returns true if there's no work pending and no worker running.
215
- * TODO(etiennep): Make pure virtual once custom embedders implement it.
228
+ * Returns true if there's any work pending or any worker running.
216
229
  */
217
- virtual bool IsCompleted() { return true; }
230
+ virtual bool IsActive() = 0;
231
+
232
+ // TODO(etiennep): Clean up once all overrides are removed.
233
+ V8_DEPRECATED("Use !IsActive() instead.")
234
+ virtual bool IsCompleted() { return !IsActive(); }
218
235
 
219
236
  /**
220
237
  * Returns true if associated with a Job and other methods may be called.
221
- * Returns false after Join() or Cancel() was called.
238
+ * Returns false after Join() or Cancel() was called. This may return true
239
+ * even if no workers are running and IsCompleted() returns true
222
240
  */
223
- virtual bool IsRunning() = 0;
241
+ virtual bool IsValid() = 0;
242
+
243
+ // TODO(etiennep): Clean up once all overrides are removed.
244
+ V8_DEPRECATED("Use IsValid() instead.")
245
+ virtual bool IsRunning() { return IsValid(); }
246
+
247
+ /**
248
+ * Returns true if job priority can be changed.
249
+ */
250
+ virtual bool UpdatePriorityEnabled() const { return false; }
251
+
252
+ /**
253
+ * Update this Job's priority.
254
+ */
255
+ virtual void UpdatePriority(TaskPriority new_priority) {}
224
256
  };
225
257
 
226
258
  /**
@@ -233,23 +265,17 @@ class JobTask {
233
265
  virtual void Run(JobDelegate* delegate) = 0;
234
266
 
235
267
  /**
236
- * Controls the maximum number of threads calling Run() concurrently. Run() is
237
- * only invoked if the number of threads previously running Run() was less
238
- * than the value returned. Since GetMaxConcurrency() is a leaf function, it
239
- * must not call back any JobHandle methods.
268
+ * Controls the maximum number of threads calling Run() concurrently, given
269
+ * the number of threads currently assigned to this job and executing Run().
270
+ * Run() is only invoked if the number of threads previously running Run() was
271
+ * less than the value returned. Since GetMaxConcurrency() is a leaf function,
272
+ * it must not call back any JobHandle methods.
240
273
  */
241
- virtual size_t GetMaxConcurrency() const = 0;
274
+ virtual size_t GetMaxConcurrency(size_t worker_count) const = 0;
242
275
 
243
- /*
244
- * Meant to replace the version above, given the number of threads currently
245
- * assigned to this job and executing Run(). This is useful when the result
246
- * must include local work items not visible globaly by other workers.
247
- * TODO(etiennep): Replace the version above by this once custom embedders are
248
- * migrated.
249
- */
250
- size_t GetMaxConcurrency(size_t worker_count) const {
251
- return GetMaxConcurrency();
252
- }
276
+ // TODO(1114823): Clean up once all overrides are removed.
277
+ V8_DEPRECATED("Use the version that takes |worker_count|.")
278
+ virtual size_t GetMaxConcurrency() const { return 0; }
253
279
  };
254
280
 
255
281
  /**
@@ -382,7 +408,6 @@ class PageAllocator {
382
408
  kNoAccess,
383
409
  kRead,
384
410
  kReadWrite,
385
- // TODO(hpayer): Remove this flag. Memory should never be rwx.
386
411
  kReadWriteExecute,
387
412
  kReadExecute,
388
413
  // Set this when reserving memory that will later require kReadWriteExecute