libv8 6.2.414.42.1-universal-darwin-16 → 8.4.255.0-universal-darwin-16

Sign up to get free protection for your applications and to get access to all the features.
Files changed (59) hide show
  1. checksums.yaml +5 -5
  2. data/ext/libv8/paths.rb +5 -18
  3. data/lib/libv8/version.rb +1 -1
  4. data/vendor/v8/include/cppgc/allocation.h +124 -0
  5. data/vendor/v8/include/cppgc/garbage-collected.h +192 -0
  6. data/vendor/v8/include/cppgc/heap.h +50 -0
  7. data/vendor/v8/include/cppgc/internal/accessors.h +26 -0
  8. data/vendor/v8/include/cppgc/internal/api-constants.h +44 -0
  9. data/vendor/v8/include/cppgc/internal/compiler-specific.h +26 -0
  10. data/vendor/v8/include/cppgc/internal/finalizer-trait.h +90 -0
  11. data/vendor/v8/include/cppgc/internal/gc-info.h +43 -0
  12. data/vendor/v8/include/cppgc/internal/logging.h +50 -0
  13. data/vendor/v8/include/cppgc/internal/persistent-node.h +109 -0
  14. data/vendor/v8/include/cppgc/internal/pointer-policies.h +133 -0
  15. data/vendor/v8/include/cppgc/internal/prefinalizer-handler.h +31 -0
  16. data/vendor/v8/include/cppgc/liveness-broker.h +50 -0
  17. data/vendor/v8/include/cppgc/macros.h +26 -0
  18. data/vendor/v8/include/cppgc/member.h +206 -0
  19. data/vendor/v8/include/cppgc/persistent.h +304 -0
  20. data/vendor/v8/include/cppgc/platform.h +31 -0
  21. data/vendor/v8/include/cppgc/prefinalizer.h +54 -0
  22. data/vendor/v8/include/cppgc/source-location.h +59 -0
  23. data/vendor/v8/include/cppgc/trace-trait.h +67 -0
  24. data/vendor/v8/include/cppgc/type-traits.h +109 -0
  25. data/vendor/v8/include/cppgc/visitor.h +137 -0
  26. data/vendor/v8/include/libplatform/libplatform.h +15 -13
  27. data/vendor/v8/include/libplatform/v8-tracing.h +69 -21
  28. data/vendor/v8/include/v8-fast-api-calls.h +412 -0
  29. data/vendor/v8/include/v8-inspector-protocol.h +4 -4
  30. data/vendor/v8/include/v8-inspector.h +95 -35
  31. data/vendor/v8/include/v8-internal.h +389 -0
  32. data/vendor/v8/include/v8-platform.h +388 -52
  33. data/vendor/v8/include/v8-profiler.h +363 -213
  34. data/vendor/v8/include/v8-util.h +33 -36
  35. data/vendor/v8/include/v8-version-string.h +10 -5
  36. data/vendor/v8/include/v8-version.h +4 -4
  37. data/vendor/v8/include/v8-wasm-trap-handler-posix.h +31 -0
  38. data/vendor/v8/include/v8-wasm-trap-handler-win.h +28 -0
  39. data/vendor/v8/include/v8.h +3566 -1925
  40. data/vendor/v8/include/v8config.h +162 -114
  41. data/vendor/v8/out.gn/libv8/obj/libv8_libbase.a +0 -0
  42. data/vendor/v8/out.gn/libv8/obj/libv8_libplatform.a +0 -0
  43. data/vendor/v8/out.gn/libv8/obj/libv8_monolith.a +0 -0
  44. data/vendor/v8/out.gn/libv8/obj/third_party/icu/libicui18n.a +0 -0
  45. data/vendor/v8/out.gn/libv8/obj/third_party/icu/libicuuc.a +0 -0
  46. data/vendor/v8/out.gn/libv8/obj/third_party/zlib/google/libcompression_utils_portable.a +0 -0
  47. data/vendor/v8/out.gn/libv8/obj/third_party/zlib/libchrome_zlib.a +0 -0
  48. metadata +51 -30
  49. data/ext/libv8/arch.rb +0 -20
  50. data/vendor/v8/include/v8-debug.h +0 -255
  51. data/vendor/v8/include/v8-testing.h +0 -48
  52. data/vendor/v8/out/x64.release/libv8_base.a +0 -0
  53. data/vendor/v8/out/x64.release/libv8_builtins_generators.a +0 -0
  54. data/vendor/v8/out/x64.release/libv8_builtins_setup.a +0 -0
  55. data/vendor/v8/out/x64.release/libv8_libbase.a +0 -0
  56. data/vendor/v8/out/x64.release/libv8_libplatform.a +0 -0
  57. data/vendor/v8/out/x64.release/libv8_libsampler.a +0 -0
  58. data/vendor/v8/out/x64.release/libv8_nosnapshot.a +0 -0
  59. data/vendor/v8/out/x64.release/libv8_snapshot.a +0 -0
@@ -5,9 +5,9 @@
5
5
  #ifndef V8_V8_INSPECTOR_PROTOCOL_H_
6
6
  #define V8_V8_INSPECTOR_PROTOCOL_H_
7
7
 
8
- #include "inspector/Debugger.h" // NOLINT(build/include)
9
- #include "inspector/Runtime.h" // NOLINT(build/include)
10
- #include "inspector/Schema.h" // NOLINT(build/include)
11
- #include "v8-inspector.h" // NOLINT(build/include)
8
+ #include "inspector/Debugger.h" // NOLINT(build/include_directory)
9
+ #include "inspector/Runtime.h" // NOLINT(build/include_directory)
10
+ #include "inspector/Schema.h" // NOLINT(build/include_directory)
11
+ #include "v8-inspector.h" // NOLINT(build/include_directory)
12
12
 
13
13
  #endif // V8_V8_INSPECTOR_PROTOCOL_H_
@@ -9,8 +9,9 @@
9
9
  #include <cctype>
10
10
 
11
11
  #include <memory>
12
+ #include <unordered_map>
12
13
 
13
- #include "v8.h" // NOLINT(build/include)
14
+ #include "v8.h" // NOLINT(build/include_directory)
14
15
 
15
16
  namespace v8_inspector {
16
17
 
@@ -24,6 +25,7 @@ namespace Runtime {
24
25
  namespace API {
25
26
  class RemoteObject;
26
27
  class StackTrace;
28
+ class StackTraceId;
27
29
  }
28
30
  }
29
31
  namespace Schema {
@@ -62,16 +64,16 @@ class V8_EXPORT StringView {
62
64
 
63
65
  class V8_EXPORT StringBuffer {
64
66
  public:
65
- virtual ~StringBuffer() {}
66
- virtual const StringView& string() = 0;
67
+ virtual ~StringBuffer() = default;
68
+ virtual StringView string() const = 0;
67
69
  // This method copies contents.
68
- static std::unique_ptr<StringBuffer> create(const StringView&);
70
+ static std::unique_ptr<StringBuffer> create(StringView);
69
71
  };
70
72
 
71
73
  class V8_EXPORT V8ContextInfo {
72
74
  public:
73
75
  V8ContextInfo(v8::Local<v8::Context> context, int contextGroupId,
74
- const StringView& humanReadableName)
76
+ StringView humanReadableName)
75
77
  : context(context),
76
78
  contextGroupId(contextGroupId),
77
79
  humanReadableName(humanReadableName),
@@ -87,7 +89,6 @@ class V8_EXPORT V8ContextInfo {
87
89
 
88
90
  static int executionContextId(v8::Local<v8::Context> context);
89
91
 
90
- private:
91
92
  // Disallow copying and allocating this one.
92
93
  enum NotNullTagEnum { NotNullLiteral };
93
94
  void* operator new(size_t) = delete;
@@ -99,6 +100,7 @@ class V8_EXPORT V8ContextInfo {
99
100
 
100
101
  class V8_EXPORT V8StackTrace {
101
102
  public:
103
+ virtual StringView firstNonEmptySourceURL() const = 0;
102
104
  virtual bool isEmpty() const = 0;
103
105
  virtual StringView topSourceURL() const = 0;
104
106
  virtual int topLineNumber() const = 0;
@@ -106,9 +108,11 @@ class V8_EXPORT V8StackTrace {
106
108
  virtual StringView topScriptId() const = 0;
107
109
  virtual StringView topFunctionName() const = 0;
108
110
 
109
- virtual ~V8StackTrace() {}
111
+ virtual ~V8StackTrace() = default;
110
112
  virtual std::unique_ptr<protocol::Runtime::API::StackTrace>
111
113
  buildInspectorObject() const = 0;
114
+ virtual std::unique_ptr<protocol::Runtime::API::StackTrace>
115
+ buildInspectorObject(int maxAsyncDepth) const = 0;
112
116
  virtual std::unique_ptr<StringBuffer> toString() const = 0;
113
117
 
114
118
  // Safe to pass between threads, drops async chain.
@@ -117,50 +121,52 @@ class V8_EXPORT V8StackTrace {
117
121
 
118
122
  class V8_EXPORT V8InspectorSession {
119
123
  public:
120
- virtual ~V8InspectorSession() {}
124
+ virtual ~V8InspectorSession() = default;
121
125
 
122
126
  // Cross-context inspectable values (DOM nodes in different worlds, etc.).
123
127
  class V8_EXPORT Inspectable {
124
128
  public:
125
129
  virtual v8::Local<v8::Value> get(v8::Local<v8::Context>) = 0;
126
- virtual ~Inspectable() {}
130
+ virtual ~Inspectable() = default;
127
131
  };
128
132
  virtual void addInspectedObject(std::unique_ptr<Inspectable>) = 0;
129
133
 
130
134
  // Dispatching protocol messages.
131
- static bool canDispatchMethod(const StringView& method);
132
- virtual void dispatchProtocolMessage(const StringView& message) = 0;
133
- virtual std::unique_ptr<StringBuffer> stateJSON() = 0;
135
+ static bool canDispatchMethod(StringView method);
136
+ virtual void dispatchProtocolMessage(StringView message) = 0;
137
+ virtual std::vector<uint8_t> state() = 0;
134
138
  virtual std::vector<std::unique_ptr<protocol::Schema::API::Domain>>
135
139
  supportedDomains() = 0;
136
140
 
137
141
  // Debugger actions.
138
- virtual void schedulePauseOnNextStatement(const StringView& breakReason,
139
- const StringView& breakDetails) = 0;
142
+ virtual void schedulePauseOnNextStatement(StringView breakReason,
143
+ StringView breakDetails) = 0;
140
144
  virtual void cancelPauseOnNextStatement() = 0;
141
- virtual void breakProgram(const StringView& breakReason,
142
- const StringView& breakDetails) = 0;
145
+ virtual void breakProgram(StringView breakReason,
146
+ StringView breakDetails) = 0;
143
147
  virtual void setSkipAllPauses(bool) = 0;
144
- virtual void resume() = 0;
148
+ virtual void resume(bool setTerminateOnResume = false) = 0;
145
149
  virtual void stepOver() = 0;
146
150
  virtual std::vector<std::unique_ptr<protocol::Debugger::API::SearchMatch>>
147
- searchInTextByLines(const StringView& text, const StringView& query,
148
- bool caseSensitive, bool isRegex) = 0;
151
+ searchInTextByLines(StringView text, StringView query, bool caseSensitive,
152
+ bool isRegex) = 0;
149
153
 
150
154
  // Remote objects.
151
155
  virtual std::unique_ptr<protocol::Runtime::API::RemoteObject> wrapObject(
152
- v8::Local<v8::Context>, v8::Local<v8::Value>,
153
- const StringView& groupName) = 0;
156
+ v8::Local<v8::Context>, v8::Local<v8::Value>, StringView groupName,
157
+ bool generatePreview) = 0;
158
+
154
159
  virtual bool unwrapObject(std::unique_ptr<StringBuffer>* error,
155
- const StringView& objectId, v8::Local<v8::Value>*,
160
+ StringView objectId, v8::Local<v8::Value>*,
156
161
  v8::Local<v8::Context>*,
157
162
  std::unique_ptr<StringBuffer>* objectGroup) = 0;
158
- virtual void releaseObjectGroup(const StringView&) = 0;
163
+ virtual void releaseObjectGroup(StringView) = 0;
164
+ virtual void triggerPreciseCoverageDeltaUpdate(StringView occassion) = 0;
159
165
  };
160
166
 
161
167
  class V8_EXPORT V8InspectorClient {
162
168
  public:
163
- virtual ~V8InspectorClient() {}
169
+ virtual ~V8InspectorClient() = default;
164
170
 
165
171
  virtual void runMessageLoopOnPause(int contextGroupId) {}
166
172
  virtual void quitMessageLoopOnPause() {}
@@ -211,55 +217,109 @@ class V8_EXPORT V8InspectorClient {
211
217
  // TODO(dgozman): this was added to support service worker shadow page. We
212
218
  // should not connect at all.
213
219
  virtual bool canExecuteScripts(int contextGroupId) { return true; }
220
+
221
+ virtual void maxAsyncCallStackDepthChanged(int depth) {}
222
+
223
+ virtual std::unique_ptr<StringBuffer> resourceNameToUrl(
224
+ const StringView& resourceName) {
225
+ return nullptr;
226
+ }
227
+ };
228
+
229
+ // These stack trace ids are intended to be passed between debuggers and be
230
+ // resolved later. This allows to track cross-debugger calls and step between
231
+ // them if a single client connects to multiple debuggers.
232
+ struct V8_EXPORT V8StackTraceId {
233
+ uintptr_t id;
234
+ std::pair<int64_t, int64_t> debugger_id;
235
+ bool should_pause = false;
236
+
237
+ V8StackTraceId();
238
+ V8StackTraceId(const V8StackTraceId&) = default;
239
+ V8StackTraceId(uintptr_t id, const std::pair<int64_t, int64_t> debugger_id);
240
+ V8StackTraceId(uintptr_t id, const std::pair<int64_t, int64_t> debugger_id,
241
+ bool should_pause);
242
+ explicit V8StackTraceId(StringView);
243
+ V8StackTraceId& operator=(const V8StackTraceId&) = default;
244
+ V8StackTraceId& operator=(V8StackTraceId&&) noexcept = default;
245
+ ~V8StackTraceId() = default;
246
+
247
+ bool IsInvalid() const;
248
+ std::unique_ptr<StringBuffer> ToString();
214
249
  };
215
250
 
216
251
  class V8_EXPORT V8Inspector {
217
252
  public:
218
253
  static std::unique_ptr<V8Inspector> create(v8::Isolate*, V8InspectorClient*);
219
- virtual ~V8Inspector() {}
254
+ virtual ~V8Inspector() = default;
220
255
 
221
256
  // Contexts instrumentation.
222
257
  virtual void contextCreated(const V8ContextInfo&) = 0;
223
258
  virtual void contextDestroyed(v8::Local<v8::Context>) = 0;
224
259
  virtual void resetContextGroup(int contextGroupId) = 0;
260
+ virtual v8::MaybeLocal<v8::Context> contextById(int contextId) = 0;
225
261
 
226
262
  // Various instrumentation.
227
263
  virtual void idleStarted() = 0;
228
264
  virtual void idleFinished() = 0;
229
265
 
230
266
  // Async stack traces instrumentation.
231
- virtual void asyncTaskScheduled(const StringView& taskName, void* task,
267
+ virtual void asyncTaskScheduled(StringView taskName, void* task,
232
268
  bool recurring) = 0;
233
269
  virtual void asyncTaskCanceled(void* task) = 0;
234
270
  virtual void asyncTaskStarted(void* task) = 0;
235
271
  virtual void asyncTaskFinished(void* task) = 0;
236
272
  virtual void allAsyncTasksCanceled() = 0;
237
273
 
274
+ virtual V8StackTraceId storeCurrentStackTrace(StringView description) = 0;
275
+ virtual void externalAsyncTaskStarted(const V8StackTraceId& parent) = 0;
276
+ virtual void externalAsyncTaskFinished(const V8StackTraceId& parent) = 0;
277
+
238
278
  // Exceptions instrumentation.
239
- virtual unsigned exceptionThrown(
240
- v8::Local<v8::Context>, const StringView& message,
241
- v8::Local<v8::Value> exception, const StringView& detailedMessage,
242
- const StringView& url, unsigned lineNumber, unsigned columnNumber,
243
- std::unique_ptr<V8StackTrace>, int scriptId) = 0;
279
+ virtual unsigned exceptionThrown(v8::Local<v8::Context>, StringView message,
280
+ v8::Local<v8::Value> exception,
281
+ StringView detailedMessage, StringView url,
282
+ unsigned lineNumber, unsigned columnNumber,
283
+ std::unique_ptr<V8StackTrace>,
284
+ int scriptId) = 0;
244
285
  virtual void exceptionRevoked(v8::Local<v8::Context>, unsigned exceptionId,
245
- const StringView& message) = 0;
286
+ StringView message) = 0;
246
287
 
247
288
  // Connection.
248
289
  class V8_EXPORT Channel {
249
290
  public:
250
- virtual ~Channel() {}
291
+ virtual ~Channel() = default;
251
292
  virtual void sendResponse(int callId,
252
293
  std::unique_ptr<StringBuffer> message) = 0;
253
294
  virtual void sendNotification(std::unique_ptr<StringBuffer> message) = 0;
254
295
  virtual void flushProtocolNotifications() = 0;
255
296
  };
256
- virtual std::unique_ptr<V8InspectorSession> connect(
257
- int contextGroupId, Channel*, const StringView& state) = 0;
297
+ virtual std::unique_ptr<V8InspectorSession> connect(int contextGroupId,
298
+ Channel*,
299
+ StringView state) = 0;
258
300
 
259
301
  // API methods.
260
302
  virtual std::unique_ptr<V8StackTrace> createStackTrace(
261
303
  v8::Local<v8::StackTrace>) = 0;
262
304
  virtual std::unique_ptr<V8StackTrace> captureStackTrace(bool fullStack) = 0;
305
+
306
+ // Performance counters.
307
+ class V8_EXPORT Counters : public std::enable_shared_from_this<Counters> {
308
+ public:
309
+ explicit Counters(v8::Isolate* isolate);
310
+ ~Counters();
311
+ const std::unordered_map<std::string, int>& getCountersMap() const {
312
+ return m_countersMap;
313
+ }
314
+
315
+ private:
316
+ static int* getCounterPtr(const char* name);
317
+
318
+ v8::Isolate* m_isolate;
319
+ std::unordered_map<std::string, int> m_countersMap;
320
+ };
321
+
322
+ virtual std::shared_ptr<Counters> enableCounters() = 0;
263
323
  };
264
324
 
265
325
  } // namespace v8_inspector
@@ -0,0 +1,389 @@
1
+ // Copyright 2018 the V8 project authors. All rights reserved.
2
+ // Use of this source code is governed by a BSD-style license that can be
3
+ // found in the LICENSE file.
4
+
5
+ #ifndef INCLUDE_V8_INTERNAL_H_
6
+ #define INCLUDE_V8_INTERNAL_H_
7
+
8
+ #include <stddef.h>
9
+ #include <stdint.h>
10
+ #include <string.h>
11
+ #include <type_traits>
12
+
13
+ #include "v8-version.h" // NOLINT(build/include_directory)
14
+ #include "v8config.h" // NOLINT(build/include_directory)
15
+
16
+ namespace v8 {
17
+
18
+ class Context;
19
+ class Data;
20
+ class Isolate;
21
+
22
+ namespace internal {
23
+
24
+ class Isolate;
25
+
26
+ typedef uintptr_t Address;
27
+ static const Address kNullAddress = 0;
28
+
29
+ /**
30
+ * Configuration of tagging scheme.
31
+ */
32
+ const int kApiSystemPointerSize = sizeof(void*);
33
+ const int kApiDoubleSize = sizeof(double);
34
+ const int kApiInt32Size = sizeof(int32_t);
35
+ const int kApiInt64Size = sizeof(int64_t);
36
+
37
+ // Tag information for HeapObject.
38
+ const int kHeapObjectTag = 1;
39
+ const int kWeakHeapObjectTag = 3;
40
+ const int kHeapObjectTagSize = 2;
41
+ const intptr_t kHeapObjectTagMask = (1 << kHeapObjectTagSize) - 1;
42
+
43
+ // Tag information for Smi.
44
+ const int kSmiTag = 0;
45
+ const int kSmiTagSize = 1;
46
+ const intptr_t kSmiTagMask = (1 << kSmiTagSize) - 1;
47
+
48
+ template <size_t tagged_ptr_size>
49
+ struct SmiTagging;
50
+
51
+ constexpr intptr_t kIntptrAllBitsSet = intptr_t{-1};
52
+ constexpr uintptr_t kUintptrAllBitsSet =
53
+ static_cast<uintptr_t>(kIntptrAllBitsSet);
54
+
55
+ // Smi constants for systems where tagged pointer is a 32-bit value.
56
+ template <>
57
+ struct SmiTagging<4> {
58
+ enum { kSmiShiftSize = 0, kSmiValueSize = 31 };
59
+
60
+ static constexpr intptr_t kSmiMinValue =
61
+ static_cast<intptr_t>(kUintptrAllBitsSet << (kSmiValueSize - 1));
62
+ static constexpr intptr_t kSmiMaxValue = -(kSmiMinValue + 1);
63
+
64
+ V8_INLINE static int SmiToInt(const internal::Address value) {
65
+ int shift_bits = kSmiTagSize + kSmiShiftSize;
66
+ // Truncate and shift down (requires >> to be sign extending).
67
+ return static_cast<int32_t>(static_cast<uint32_t>(value)) >> shift_bits;
68
+ }
69
+ V8_INLINE static constexpr bool IsValidSmi(intptr_t value) {
70
+ // Is value in range [kSmiMinValue, kSmiMaxValue].
71
+ // Use unsigned operations in order to avoid undefined behaviour in case of
72
+ // signed integer overflow.
73
+ return (static_cast<uintptr_t>(value) -
74
+ static_cast<uintptr_t>(kSmiMinValue)) <=
75
+ (static_cast<uintptr_t>(kSmiMaxValue) -
76
+ static_cast<uintptr_t>(kSmiMinValue));
77
+ }
78
+ };
79
+
80
+ // Smi constants for systems where tagged pointer is a 64-bit value.
81
+ template <>
82
+ struct SmiTagging<8> {
83
+ enum { kSmiShiftSize = 31, kSmiValueSize = 32 };
84
+
85
+ static constexpr intptr_t kSmiMinValue =
86
+ static_cast<intptr_t>(kUintptrAllBitsSet << (kSmiValueSize - 1));
87
+ static constexpr intptr_t kSmiMaxValue = -(kSmiMinValue + 1);
88
+
89
+ V8_INLINE static int SmiToInt(const internal::Address value) {
90
+ int shift_bits = kSmiTagSize + kSmiShiftSize;
91
+ // Shift down and throw away top 32 bits.
92
+ return static_cast<int>(static_cast<intptr_t>(value) >> shift_bits);
93
+ }
94
+ V8_INLINE static constexpr bool IsValidSmi(intptr_t value) {
95
+ // To be representable as a long smi, the value must be a 32-bit integer.
96
+ return (value == static_cast<int32_t>(value));
97
+ }
98
+ };
99
+
100
+ #ifdef V8_COMPRESS_POINTERS
101
+ static_assert(
102
+ kApiSystemPointerSize == kApiInt64Size,
103
+ "Pointer compression can be enabled only for 64-bit architectures");
104
+ const int kApiTaggedSize = kApiInt32Size;
105
+ #else
106
+ const int kApiTaggedSize = kApiSystemPointerSize;
107
+ #endif
108
+
109
+ constexpr bool PointerCompressionIsEnabled() {
110
+ return kApiTaggedSize != kApiSystemPointerSize;
111
+ }
112
+
113
+ #ifdef V8_31BIT_SMIS_ON_64BIT_ARCH
114
+ using PlatformSmiTagging = SmiTagging<kApiInt32Size>;
115
+ #else
116
+ using PlatformSmiTagging = SmiTagging<kApiTaggedSize>;
117
+ #endif
118
+
119
+ // TODO(ishell): Consinder adding kSmiShiftBits = kSmiShiftSize + kSmiTagSize
120
+ // since it's used much more often than the inividual constants.
121
+ const int kSmiShiftSize = PlatformSmiTagging::kSmiShiftSize;
122
+ const int kSmiValueSize = PlatformSmiTagging::kSmiValueSize;
123
+ const int kSmiMinValue = static_cast<int>(PlatformSmiTagging::kSmiMinValue);
124
+ const int kSmiMaxValue = static_cast<int>(PlatformSmiTagging::kSmiMaxValue);
125
+ constexpr bool SmiValuesAre31Bits() { return kSmiValueSize == 31; }
126
+ constexpr bool SmiValuesAre32Bits() { return kSmiValueSize == 32; }
127
+
128
+ V8_INLINE static constexpr internal::Address IntToSmi(int value) {
129
+ return (static_cast<Address>(value) << (kSmiTagSize + kSmiShiftSize)) |
130
+ kSmiTag;
131
+ }
132
+
133
+ /**
134
+ * This class exports constants and functionality from within v8 that
135
+ * is necessary to implement inline functions in the v8 api. Don't
136
+ * depend on functions and constants defined here.
137
+ */
138
+ class Internals {
139
+ public:
140
+ // These values match non-compiler-dependent values defined within
141
+ // the implementation of v8.
142
+ static const int kHeapObjectMapOffset = 0;
143
+ static const int kMapInstanceTypeOffset = 1 * kApiTaggedSize + kApiInt32Size;
144
+ static const int kStringResourceOffset =
145
+ 1 * kApiTaggedSize + 2 * kApiInt32Size;
146
+
147
+ static const int kOddballKindOffset = 4 * kApiTaggedSize + kApiDoubleSize;
148
+ static const int kJSObjectHeaderSize = 3 * kApiTaggedSize;
149
+ static const int kFixedArrayHeaderSize = 2 * kApiTaggedSize;
150
+ static const int kEmbedderDataArrayHeaderSize = 2 * kApiTaggedSize;
151
+ static const int kEmbedderDataSlotSize = kApiSystemPointerSize;
152
+ static const int kNativeContextEmbedderDataOffset = 6 * kApiTaggedSize;
153
+ static const int kFullStringRepresentationMask = 0x0f;
154
+ static const int kStringEncodingMask = 0x8;
155
+ static const int kExternalTwoByteRepresentationTag = 0x02;
156
+ static const int kExternalOneByteRepresentationTag = 0x0a;
157
+
158
+ static const uint32_t kNumIsolateDataSlots = 4;
159
+
160
+ // IsolateData layout guarantees.
161
+ static const int kIsolateEmbedderDataOffset = 0;
162
+ static const int kExternalMemoryOffset =
163
+ kNumIsolateDataSlots * kApiSystemPointerSize;
164
+ static const int kExternalMemoryLimitOffset =
165
+ kExternalMemoryOffset + kApiInt64Size;
166
+ static const int kExternalMemoryLowSinceMarkCompactOffset =
167
+ kExternalMemoryLimitOffset + kApiInt64Size;
168
+ static const int kIsolateFastCCallCallerFpOffset =
169
+ kExternalMemoryLowSinceMarkCompactOffset + kApiInt64Size;
170
+ static const int kIsolateFastCCallCallerPcOffset =
171
+ kIsolateFastCCallCallerFpOffset + kApiSystemPointerSize;
172
+ static const int kIsolateStackGuardOffset =
173
+ kIsolateFastCCallCallerPcOffset + kApiSystemPointerSize;
174
+ static const int kIsolateRootsOffset =
175
+ kIsolateStackGuardOffset + 7 * kApiSystemPointerSize;
176
+
177
+ static const int kUndefinedValueRootIndex = 4;
178
+ static const int kTheHoleValueRootIndex = 5;
179
+ static const int kNullValueRootIndex = 6;
180
+ static const int kTrueValueRootIndex = 7;
181
+ static const int kFalseValueRootIndex = 8;
182
+ static const int kEmptyStringRootIndex = 9;
183
+
184
+ static const int kNodeClassIdOffset = 1 * kApiSystemPointerSize;
185
+ static const int kNodeFlagsOffset = 1 * kApiSystemPointerSize + 3;
186
+ static const int kNodeStateMask = 0x7;
187
+ static const int kNodeStateIsWeakValue = 2;
188
+ static const int kNodeStateIsPendingValue = 3;
189
+
190
+ static const int kFirstNonstringType = 0x40;
191
+ static const int kOddballType = 0x43;
192
+ static const int kForeignType = 0x46;
193
+ static const int kJSSpecialApiObjectType = 0x410;
194
+ static const int kJSApiObjectType = 0x420;
195
+ static const int kJSObjectType = 0x421;
196
+
197
+ static const int kUndefinedOddballKind = 5;
198
+ static const int kNullOddballKind = 3;
199
+
200
+ // Constants used by PropertyCallbackInfo to check if we should throw when an
201
+ // error occurs.
202
+ static const int kThrowOnError = 0;
203
+ static const int kDontThrow = 1;
204
+ static const int kInferShouldThrowMode = 2;
205
+
206
+ // Soft limit for AdjustAmountofExternalAllocatedMemory. Trigger an
207
+ // incremental GC once the external memory reaches this limit.
208
+ static constexpr int kExternalAllocationSoftLimit = 64 * 1024 * 1024;
209
+
210
+ V8_EXPORT static void CheckInitializedImpl(v8::Isolate* isolate);
211
+ V8_INLINE static void CheckInitialized(v8::Isolate* isolate) {
212
+ #ifdef V8_ENABLE_CHECKS
213
+ CheckInitializedImpl(isolate);
214
+ #endif
215
+ }
216
+
217
+ V8_INLINE static bool HasHeapObjectTag(const internal::Address value) {
218
+ return (value & kHeapObjectTagMask) == static_cast<Address>(kHeapObjectTag);
219
+ }
220
+
221
+ V8_INLINE static int SmiValue(const internal::Address value) {
222
+ return PlatformSmiTagging::SmiToInt(value);
223
+ }
224
+
225
+ V8_INLINE static constexpr internal::Address IntToSmi(int value) {
226
+ return internal::IntToSmi(value);
227
+ }
228
+
229
+ V8_INLINE static constexpr bool IsValidSmi(intptr_t value) {
230
+ return PlatformSmiTagging::IsValidSmi(value);
231
+ }
232
+
233
+ V8_INLINE static int GetInstanceType(const internal::Address obj) {
234
+ typedef internal::Address A;
235
+ A map = ReadTaggedPointerField(obj, kHeapObjectMapOffset);
236
+ return ReadRawField<uint16_t>(map, kMapInstanceTypeOffset);
237
+ }
238
+
239
+ V8_INLINE static int GetOddballKind(const internal::Address obj) {
240
+ return SmiValue(ReadTaggedSignedField(obj, kOddballKindOffset));
241
+ }
242
+
243
+ V8_INLINE static bool IsExternalTwoByteString(int instance_type) {
244
+ int representation = (instance_type & kFullStringRepresentationMask);
245
+ return representation == kExternalTwoByteRepresentationTag;
246
+ }
247
+
248
+ V8_INLINE static uint8_t GetNodeFlag(internal::Address* obj, int shift) {
249
+ uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
250
+ return *addr & static_cast<uint8_t>(1U << shift);
251
+ }
252
+
253
+ V8_INLINE static void UpdateNodeFlag(internal::Address* obj, bool value,
254
+ int shift) {
255
+ uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
256
+ uint8_t mask = static_cast<uint8_t>(1U << shift);
257
+ *addr = static_cast<uint8_t>((*addr & ~mask) | (value << shift));
258
+ }
259
+
260
+ V8_INLINE static uint8_t GetNodeState(internal::Address* obj) {
261
+ uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
262
+ return *addr & kNodeStateMask;
263
+ }
264
+
265
+ V8_INLINE static void UpdateNodeState(internal::Address* obj, uint8_t value) {
266
+ uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
267
+ *addr = static_cast<uint8_t>((*addr & ~kNodeStateMask) | value);
268
+ }
269
+
270
+ V8_INLINE static void SetEmbedderData(v8::Isolate* isolate, uint32_t slot,
271
+ void* data) {
272
+ internal::Address addr = reinterpret_cast<internal::Address>(isolate) +
273
+ kIsolateEmbedderDataOffset +
274
+ slot * kApiSystemPointerSize;
275
+ *reinterpret_cast<void**>(addr) = data;
276
+ }
277
+
278
+ V8_INLINE static void* GetEmbedderData(const v8::Isolate* isolate,
279
+ uint32_t slot) {
280
+ internal::Address addr = reinterpret_cast<internal::Address>(isolate) +
281
+ kIsolateEmbedderDataOffset +
282
+ slot * kApiSystemPointerSize;
283
+ return *reinterpret_cast<void* const*>(addr);
284
+ }
285
+
286
+ V8_INLINE static internal::Address* GetRoot(v8::Isolate* isolate, int index) {
287
+ internal::Address addr = reinterpret_cast<internal::Address>(isolate) +
288
+ kIsolateRootsOffset +
289
+ index * kApiSystemPointerSize;
290
+ return reinterpret_cast<internal::Address*>(addr);
291
+ }
292
+
293
+ template <typename T>
294
+ V8_INLINE static T ReadRawField(internal::Address heap_object_ptr,
295
+ int offset) {
296
+ internal::Address addr = heap_object_ptr + offset - kHeapObjectTag;
297
+ #ifdef V8_COMPRESS_POINTERS
298
+ if (sizeof(T) > kApiTaggedSize) {
299
+ // TODO(ishell, v8:8875): When pointer compression is enabled 8-byte size
300
+ // fields (external pointers, doubles and BigInt data) are only
301
+ // kTaggedSize aligned so we have to use unaligned pointer friendly way of
302
+ // accessing them in order to avoid undefined behavior in C++ code.
303
+ T r;
304
+ memcpy(&r, reinterpret_cast<void*>(addr), sizeof(T));
305
+ return r;
306
+ }
307
+ #endif
308
+ return *reinterpret_cast<const T*>(addr);
309
+ }
310
+
311
+ V8_INLINE static internal::Address ReadTaggedPointerField(
312
+ internal::Address heap_object_ptr, int offset) {
313
+ #ifdef V8_COMPRESS_POINTERS
314
+ uint32_t value = ReadRawField<uint32_t>(heap_object_ptr, offset);
315
+ internal::Address root = GetRootFromOnHeapAddress(heap_object_ptr);
316
+ return root + static_cast<internal::Address>(static_cast<uintptr_t>(value));
317
+ #else
318
+ return ReadRawField<internal::Address>(heap_object_ptr, offset);
319
+ #endif
320
+ }
321
+
322
+ V8_INLINE static internal::Address ReadTaggedSignedField(
323
+ internal::Address heap_object_ptr, int offset) {
324
+ #ifdef V8_COMPRESS_POINTERS
325
+ uint32_t value = ReadRawField<uint32_t>(heap_object_ptr, offset);
326
+ return static_cast<internal::Address>(static_cast<uintptr_t>(value));
327
+ #else
328
+ return ReadRawField<internal::Address>(heap_object_ptr, offset);
329
+ #endif
330
+ }
331
+
332
+ #ifdef V8_COMPRESS_POINTERS
333
+ // See v8:7703 or src/ptr-compr.* for details about pointer compression.
334
+ static constexpr size_t kPtrComprHeapReservationSize = size_t{1} << 32;
335
+ static constexpr size_t kPtrComprIsolateRootAlignment = size_t{1} << 32;
336
+
337
+ V8_INLINE static internal::Address GetRootFromOnHeapAddress(
338
+ internal::Address addr) {
339
+ return addr & -static_cast<intptr_t>(kPtrComprIsolateRootAlignment);
340
+ }
341
+
342
+ V8_INLINE static internal::Address DecompressTaggedAnyField(
343
+ internal::Address heap_object_ptr, uint32_t value) {
344
+ internal::Address root = GetRootFromOnHeapAddress(heap_object_ptr);
345
+ return root + static_cast<internal::Address>(static_cast<uintptr_t>(value));
346
+ }
347
+ #endif // V8_COMPRESS_POINTERS
348
+ };
349
+
350
+ // Only perform cast check for types derived from v8::Data since
351
+ // other types do not implement the Cast method.
352
+ template <bool PerformCheck>
353
+ struct CastCheck {
354
+ template <class T>
355
+ static void Perform(T* data);
356
+ };
357
+
358
+ template <>
359
+ template <class T>
360
+ void CastCheck<true>::Perform(T* data) {
361
+ T::Cast(data);
362
+ }
363
+
364
+ template <>
365
+ template <class T>
366
+ void CastCheck<false>::Perform(T* data) {}
367
+
368
+ template <class T>
369
+ V8_INLINE void PerformCastCheck(T* data) {
370
+ CastCheck<std::is_base_of<Data, T>::value>::Perform(data);
371
+ }
372
+
373
+ // {obj} must be the raw tagged pointer representation of a HeapObject
374
+ // that's guaranteed to never be in ReadOnlySpace.
375
+ V8_EXPORT internal::Isolate* IsolateFromNeverReadOnlySpaceObject(Address obj);
376
+
377
+ // Returns if we need to throw when an error occurs. This infers the language
378
+ // mode based on the current context and the closure. This returns true if the
379
+ // language mode is strict.
380
+ V8_EXPORT bool ShouldThrowOnError(v8::internal::Isolate* isolate);
381
+
382
+ // A base class for backing stores, which is needed due to vagaries of
383
+ // how static casts work with std::shared_ptr.
384
+ class BackingStoreBase {};
385
+
386
+ } // namespace internal
387
+ } // namespace v8
388
+
389
+ #endif // INCLUDE_V8_INTERNAL_H_