libv8 6.3.292.48.0beta1 → 6.3.292.48.0beta2

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 61c3bf8a873f670cef6d2846cb09c8e6e8837160
4
- data.tar.gz: 3ab377edad709714dfc61f7be3f37f3801ce1b3d
3
+ metadata.gz: c2c675f05ea4892e6a7f0bd9ec6de233cea27e28
4
+ data.tar.gz: d09ddbb4549efc47781b8baeab370ae209776377
5
5
  SHA512:
6
- metadata.gz: 8645f9ffc99b2b8226880f664f088e76f49038138ed73984d546f88fb2a4bb07f659eb7540aa2694442cc6ea7327fbf39283c86d4c459ef44dd87bbecdc86fb6
7
- data.tar.gz: 3fdcabaa1d2e7bebd1439a79a79fd0e52ffeebda196fd5b595895314e3128e0ae43a49b1fa17602ffdc1274db1df0eac5a4300f15a777aeaaafc122da70fdc6c
6
+ metadata.gz: f690ce0e6e410ea38ee9af90abe163a7a239fc079d0758af7d54aa15ff4e902f28d808a4ac299c02f8e25e8fc7de80904d0c8aa06c4e9a94d85bfff2985f33e8
7
+ data.tar.gz: eca47f78e0897e0c2212fcf5fa8acf392369ee1c7ba0fe724bf03fe72af72462bd7dd6f199c654c8bd64e868c5daf12afc368d1dc9d0c70cb1e7ef6dafdaa4e9
data/CHANGELOG.md CHANGED
@@ -1,6 +1,7 @@
1
1
  ### Unreleased
2
2
 
3
3
  * Update upstream v8 version to 6.3.292.48
4
+ * Add a fix for https://bugs.chromium.org/p/v8/issues/detail?id=6933
4
5
 
5
6
  ### v6.2.414.42.0, v6.2.414.42.1 - 2017-10-25
6
7
 
data/lib/libv8/version.rb CHANGED
@@ -1,3 +1,3 @@
1
1
  module Libv8
2
- VERSION = "6.3.292.48.0beta1"
2
+ VERSION = "6.3.292.48.0beta2"
3
3
  end
@@ -1,4 +1,4 @@
1
- From cbe264bb5c231d06f64b48413f3f268ca490e00a Mon Sep 17 00:00:00 2001
1
+ From a018c0c78b9d40c9d9787fe969530e40fca5d38a Mon Sep 17 00:00:00 2001
2
2
  From: Petko Bordjukov <bordjukov@gmail.com>
3
3
  Date: Mon, 25 Jul 2016 00:00:19 +0300
4
4
  Subject: [PATCH 1/5] Build a standalone static library
@@ -22,5 +22,5 @@ index 63930d8aef..d0d00d3f67 100644
22
22
  'defines!': [
23
23
  'DEBUG',
24
24
  --
25
- 2.14.3
25
+ 2.15.1
26
26
 
@@ -1,4 +1,4 @@
1
- From 96075b7eeeef64063186d743d1fa36e6de494a6e Mon Sep 17 00:00:00 2001
1
+ From 9ec36a77bb241ee4532553809b1ec7281ea998b1 Mon Sep 17 00:00:00 2001
2
2
  From: Petko Bordjukov <bordjukov@gmail.com>
3
3
  Date: Fri, 28 Jul 2017 11:11:08 +0300
4
4
  Subject: [PATCH 2/5] Don't compile unnecessary stuff
@@ -9,7 +9,7 @@ Subject: [PATCH 2/5] Don't compile unnecessary stuff
9
9
  2 files changed, 2 insertions(+), 40 deletions(-)
10
10
 
11
11
  diff --git a/Makefile b/Makefile
12
- index eb146ac244..902832af48 100644
12
+ index 167ebf8c08..d9d0b9b306 100644
13
13
  --- a/Makefile
14
14
  +++ b/Makefile
15
15
  @@ -268,14 +268,9 @@ ANDROID_ARCHES = android_ia32 android_x64 android_arm android_arm64 \
@@ -81,5 +81,5 @@ index bc9d9650eb..96820a0ecc 100644
81
81
  }
82
82
  ]
83
83
  --
84
- 2.14.3
84
+ 2.15.1
85
85
 
@@ -1,4 +1,4 @@
1
- From 97bb118756e30cea60216d3e74ba352d76001637 Mon Sep 17 00:00:00 2001
1
+ From 68eadedb9728ee6ce634d646835ef0aab2d23be6 Mon Sep 17 00:00:00 2001
2
2
  From: Petko Bordjukov <bordjukov@gmail.com>
3
3
  Date: Mon, 25 Jul 2016 00:05:47 +0300
4
4
  Subject: [PATCH 3/5] Use the -fPIC flag for the static library
@@ -21,5 +21,5 @@ index d0d00d3f67..f01bd48596 100644
21
21
  }],
22
22
  [ 'clang==0 and coverage==1', {
23
23
  --
24
- 2.14.3
24
+ 2.15.1
25
25
 
@@ -1,4 +1,4 @@
1
- From b06fb098489c375a4499fb6a62c92a32a0b282d8 Mon Sep 17 00:00:00 2001
1
+ From 943282d83340d51a7749f59708e5dca5bbe96099 Mon Sep 17 00:00:00 2001
2
2
  From: Jb Aviat <jb@sqreen.io>
3
3
  Date: Mon, 27 Feb 2017 11:14:42 +0200
4
4
  Subject: [PATCH 4/5] Do not embed debug symbols in macOS libraries
@@ -21,5 +21,5 @@ index f01bd48596..25ca6b8606 100644
21
21
  'GCC_INLINES_ARE_PRIVATE_EXTERN': 'YES',
22
22
  'GCC_SYMBOLS_PRIVATE_EXTERN': 'YES', # -fvisibility=hidden
23
23
  --
24
- 2.14.3
24
+ 2.15.1
25
25
 
@@ -0,0 +1,321 @@
1
+ From 723ed88782dbe9462aba5711a70be628d694bfda Mon Sep 17 00:00:00 2001
2
+ From: Leszek Swirski <leszeks@chromium.org>
3
+ Date: Tue, 17 Oct 2017 17:20:15 +0100
4
+ Subject: [PATCH 5/5] Remove TryInstallOptimizedCode
5
+
6
+ Removes the interrupt check and runtime call to TryInstallOptimizedCode
7
+ from the optimization marker checks (i.e. CompileLazy and
8
+ InterpreterEntryTrampoline). Instead, we rely on the other interrupt
9
+ sources (in particular stack checks at function entries and loop
10
+ headers) to install optimized code for us.
11
+
12
+ This will hopefully not cause regressions, as we have plenty of other
13
+ interrupt checks, but it may delay optimized code execution for
14
+ some function by one function call.
15
+
16
+ Bug: v8:6933
17
+ Change-Id: Ieadfff7ae2078d2a84085294158ad9a706eb9c64
18
+ Reviewed-on: https://chromium-review.googlesource.com/723475
19
+ Reviewed-by: Ross McIlroy <rmcilroy@chromium.org>
20
+ Commit-Queue: Leszek Swirski <leszeks@chromium.org>
21
+ Cr-Commit-Position: refs/heads/master@{#48667}
22
+ ---
23
+ src/builtins/arm/builtins-arm.cc | 13 +++----------
24
+ src/builtins/arm64/builtins-arm64.cc | 13 +++----------
25
+ src/builtins/ia32/builtins-ia32.cc | 15 +++------------
26
+ src/builtins/mips/builtins-mips.cc | 13 +++----------
27
+ src/builtins/mips64/builtins-mips64.cc | 13 +++----------
28
+ src/builtins/ppc/builtins-ppc.cc | 14 +++-----------
29
+ src/builtins/s390/builtins-s390.cc | 13 +++----------
30
+ src/builtins/x64/builtins-x64.cc | 13 +++----------
31
+ src/runtime/runtime-compiler.cc | 21 ---------------------
32
+ src/runtime/runtime.h | 1 -
33
+ 10 files changed, 24 insertions(+), 105 deletions(-)
34
+
35
+ diff --git a/src/builtins/arm/builtins-arm.cc b/src/builtins/arm/builtins-arm.cc
36
+ index bf359d69e9..e8fa690660 100644
37
+ --- a/src/builtins/arm/builtins-arm.cc
38
+ +++ b/src/builtins/arm/builtins-arm.cc
39
+ @@ -782,22 +782,15 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
40
+ Runtime::kCompileOptimized_Concurrent);
41
+
42
+ {
43
+ - // Otherwise, the marker is InOptimizationQueue.
44
+ + // Otherwise, the marker is InOptimizationQueue, so fall through hoping
45
+ + // that an interrupt will eventually update the slot with optimized code.
46
+ if (FLAG_debug_code) {
47
+ __ cmp(
48
+ optimized_code_entry,
49
+ Operand(Smi::FromEnum(OptimizationMarker::kInOptimizationQueue)));
50
+ __ Assert(eq, kExpectedOptimizationSentinel);
51
+ }
52
+ - // Checking whether the queued function is ready for install is
53
+ - // optional, since we come across interrupts and stack checks elsewhere.
54
+ - // However, not checking may delay installing ready functions, and
55
+ - // always checking would be quite expensive. A good compromise is to
56
+ - // first check against stack limit as a cue for an interrupt signal.
57
+ - __ LoadRoot(scratch2, Heap::kStackLimitRootIndex);
58
+ - __ cmp(sp, Operand(scratch2));
59
+ - __ b(hs, &fallthrough);
60
+ - GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
61
+ + __ jmp(&fallthrough);
62
+ }
63
+ }
64
+
65
+ diff --git a/src/builtins/arm64/builtins-arm64.cc b/src/builtins/arm64/builtins-arm64.cc
66
+ index b1d5d32b9a..7aaa2d0003 100644
67
+ --- a/src/builtins/arm64/builtins-arm64.cc
68
+ +++ b/src/builtins/arm64/builtins-arm64.cc
69
+ @@ -788,22 +788,15 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
70
+ Runtime::kCompileOptimized_Concurrent);
71
+
72
+ {
73
+ - // Otherwise, the marker is InOptimizationQueue.
74
+ + // Otherwise, the marker is InOptimizationQueue, so fall through hoping
75
+ + // that an interrupt will eventually update the slot with optimized code.
76
+ if (FLAG_debug_code) {
77
+ __ Cmp(
78
+ optimized_code_entry,
79
+ Operand(Smi::FromEnum(OptimizationMarker::kInOptimizationQueue)));
80
+ __ Assert(eq, kExpectedOptimizationSentinel);
81
+ }
82
+ -
83
+ - // Checking whether the queued function is ready for install is optional,
84
+ - // since we come across interrupts and stack checks elsewhere. However,
85
+ - // not checking may delay installing ready functions, and always checking
86
+ - // would be quite expensive. A good compromise is to first check against
87
+ - // stack limit as a cue for an interrupt signal.
88
+ - __ CompareRoot(masm->StackPointer(), Heap::kStackLimitRootIndex);
89
+ - __ B(hs, &fallthrough);
90
+ - GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
91
+ + __ B(&fallthrough);
92
+ }
93
+ }
94
+
95
+ diff --git a/src/builtins/ia32/builtins-ia32.cc b/src/builtins/ia32/builtins-ia32.cc
96
+ index ee15025520..a689c3131d 100644
97
+ --- a/src/builtins/ia32/builtins-ia32.cc
98
+ +++ b/src/builtins/ia32/builtins-ia32.cc
99
+ @@ -698,24 +698,15 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
100
+ Runtime::kCompileOptimized_Concurrent);
101
+
102
+ {
103
+ - // Otherwise, the marker is InOptimizationQueue.
104
+ + // Otherwise, the marker is InOptimizationQueue, so fall through hoping
105
+ + // that an interrupt will eventually update the slot with optimized code.
106
+ if (FLAG_debug_code) {
107
+ __ cmp(
108
+ optimized_code_entry,
109
+ Immediate(Smi::FromEnum(OptimizationMarker::kInOptimizationQueue)));
110
+ __ Assert(equal, kExpectedOptimizationSentinel);
111
+ }
112
+ -
113
+ - // Checking whether the queued function is ready for install is optional,
114
+ - // since we come across interrupts and stack checks elsewhere. However,
115
+ - // not checking may delay installing ready functions, and always checking
116
+ - // would be quite expensive. A good compromise is to first check against
117
+ - // stack limit as a cue for an interrupt signal.
118
+ - ExternalReference stack_limit =
119
+ - ExternalReference::address_of_stack_limit(masm->isolate());
120
+ - __ cmp(esp, Operand::StaticVariable(stack_limit));
121
+ - __ j(above_equal, &fallthrough);
122
+ - GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
123
+ + __ jmp(&fallthrough);
124
+ }
125
+ }
126
+
127
+ diff --git a/src/builtins/mips/builtins-mips.cc b/src/builtins/mips/builtins-mips.cc
128
+ index e8f846c10a..4835fb0b1b 100644
129
+ --- a/src/builtins/mips/builtins-mips.cc
130
+ +++ b/src/builtins/mips/builtins-mips.cc
131
+ @@ -760,21 +760,14 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
132
+ Runtime::kCompileOptimized_Concurrent);
133
+
134
+ {
135
+ - // Otherwise, the marker is InOptimizationQueue.
136
+ + // Otherwise, the marker is InOptimizationQueue, so fall through hoping
137
+ + // that an interrupt will eventually update the slot with optimized code.
138
+ if (FLAG_debug_code) {
139
+ __ Assert(
140
+ eq, kExpectedOptimizationSentinel, optimized_code_entry,
141
+ Operand(Smi::FromEnum(OptimizationMarker::kInOptimizationQueue)));
142
+ }
143
+ -
144
+ - // Checking whether the queued function is ready for install is optional,
145
+ - // since we come across interrupts and stack checks elsewhere. However,
146
+ - // not checking may delay installing ready functions, and always checking
147
+ - // would be quite expensive. A good compromise is to first check against
148
+ - // stack limit as a cue for an interrupt signal.
149
+ - __ LoadRoot(at, Heap::kStackLimitRootIndex);
150
+ - __ Branch(&fallthrough, hs, sp, Operand(at));
151
+ - GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
152
+ + __ jmp(&fallthrough);
153
+ }
154
+ }
155
+
156
+ diff --git a/src/builtins/mips64/builtins-mips64.cc b/src/builtins/mips64/builtins-mips64.cc
157
+ index f62750b061..2584444f1f 100644
158
+ --- a/src/builtins/mips64/builtins-mips64.cc
159
+ +++ b/src/builtins/mips64/builtins-mips64.cc
160
+ @@ -760,21 +760,14 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
161
+ Runtime::kCompileOptimized_Concurrent);
162
+
163
+ {
164
+ - // Otherwise, the marker is InOptimizationQueue.
165
+ + // Otherwise, the marker is InOptimizationQueue, so fall through hoping
166
+ + // that an interrupt will eventually update the slot with optimized code.
167
+ if (FLAG_debug_code) {
168
+ __ Assert(
169
+ eq, kExpectedOptimizationSentinel, optimized_code_entry,
170
+ Operand(Smi::FromEnum(OptimizationMarker::kInOptimizationQueue)));
171
+ }
172
+ -
173
+ - // Checking whether the queued function is ready for install is optional,
174
+ - // since we come across interrupts and stack checks elsewhere. However,
175
+ - // not checking may delay installing ready functions, and always checking
176
+ - // would be quite expensive. A good compromise is to first check against
177
+ - // stack limit as a cue for an interrupt signal.
178
+ - __ LoadRoot(t0, Heap::kStackLimitRootIndex);
179
+ - __ Branch(&fallthrough, hs, sp, Operand(t0));
180
+ - GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
181
+ + __ jmp(&fallthrough);
182
+ }
183
+ }
184
+
185
+ diff --git a/src/builtins/ppc/builtins-ppc.cc b/src/builtins/ppc/builtins-ppc.cc
186
+ index 3ed3eb686d..c242be5cf8 100644
187
+ --- a/src/builtins/ppc/builtins-ppc.cc
188
+ +++ b/src/builtins/ppc/builtins-ppc.cc
189
+ @@ -780,23 +780,15 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
190
+ Runtime::kCompileOptimized_Concurrent);
191
+
192
+ {
193
+ - // Otherwise, the marker is InOptimizationQueue.
194
+ + // Otherwise, the marker is InOptimizationQueue, so fall through hoping
195
+ + // that an interrupt will eventually update the slot with optimized code.
196
+ if (FLAG_debug_code) {
197
+ __ CmpSmiLiteral(
198
+ optimized_code_entry,
199
+ Smi::FromEnum(OptimizationMarker::kInOptimizationQueue), r0);
200
+ __ Assert(eq, kExpectedOptimizationSentinel);
201
+ }
202
+ -
203
+ - // Checking whether the queued function is ready for install is optional,
204
+ - // since we come across interrupts and stack checks elsewhere. However,
205
+ - // not checking may delay installing ready functions, and always checking
206
+ - // would be quite expensive. A good compromise is to first check against
207
+ - // stack limit as a cue for an interrupt signal.
208
+ - __ LoadRoot(ip, Heap::kStackLimitRootIndex);
209
+ - __ cmpl(sp, ip);
210
+ - __ bge(&fallthrough);
211
+ - GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
212
+ + __ b(&fallthrough);
213
+ }
214
+ }
215
+
216
+ diff --git a/src/builtins/s390/builtins-s390.cc b/src/builtins/s390/builtins-s390.cc
217
+ index e9ef390c69..aa9e62f217 100644
218
+ --- a/src/builtins/s390/builtins-s390.cc
219
+ +++ b/src/builtins/s390/builtins-s390.cc
220
+ @@ -783,22 +783,15 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
221
+ Runtime::kCompileOptimized_Concurrent);
222
+
223
+ {
224
+ - // Otherwise, the marker is InOptimizationQueue.
225
+ + // Otherwise, the marker is InOptimizationQueue, so fall through hoping
226
+ + // that an interrupt will eventually update the slot with optimized code.
227
+ if (FLAG_debug_code) {
228
+ __ CmpSmiLiteral(
229
+ optimized_code_entry,
230
+ Smi::FromEnum(OptimizationMarker::kInOptimizationQueue), r0);
231
+ __ Assert(eq, kExpectedOptimizationSentinel);
232
+ }
233
+ -
234
+ - // Checking whether the queued function is ready for install is optional,
235
+ - // since we come across interrupts and stack checks elsewhere. However,
236
+ - // not checking may delay installing ready functions, and always checking
237
+ - // would be quite expensive. A good compromise is to first check against
238
+ - // stack limit as a cue for an interrupt signal.
239
+ - __ CmpLogicalP(sp, RootMemOperand(Heap::kStackLimitRootIndex));
240
+ - __ bge(&fallthrough, Label::kNear);
241
+ - GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
242
+ + __ b(&fallthrough, Label::kNear);
243
+ }
244
+ }
245
+
246
+ diff --git a/src/builtins/x64/builtins-x64.cc b/src/builtins/x64/builtins-x64.cc
247
+ index 713475cd34..81c92681d5 100644
248
+ --- a/src/builtins/x64/builtins-x64.cc
249
+ +++ b/src/builtins/x64/builtins-x64.cc
250
+ @@ -781,21 +781,14 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
251
+ Runtime::kCompileOptimized_Concurrent);
252
+
253
+ {
254
+ - // Otherwise, the marker is InOptimizationQueue.
255
+ + // Otherwise, the marker is InOptimizationQueue, so fall through hoping
256
+ + // that an interrupt will eventually update the slot with optimized code.
257
+ if (FLAG_debug_code) {
258
+ __ SmiCompare(optimized_code_entry,
259
+ Smi::FromEnum(OptimizationMarker::kInOptimizationQueue));
260
+ __ Assert(equal, kExpectedOptimizationSentinel);
261
+ }
262
+ -
263
+ - // Checking whether the queued function is ready for install is optional,
264
+ - // since we come across interrupts and stack checks elsewhere. However,
265
+ - // not checking may delay installing ready functions, and always checking
266
+ - // would be quite expensive. A good compromise is to first check against
267
+ - // stack limit as a cue for an interrupt signal.
268
+ - __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
269
+ - __ j(above_equal, &fallthrough);
270
+ - GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
271
+ + __ jmp(&fallthrough);
272
+ }
273
+ }
274
+
275
+ diff --git a/src/runtime/runtime-compiler.cc b/src/runtime/runtime-compiler.cc
276
+ index 1cc00f5b7e..b445037d08 100644
277
+ --- a/src/runtime/runtime-compiler.cc
278
+ +++ b/src/runtime/runtime-compiler.cc
279
+ @@ -302,27 +302,6 @@ RUNTIME_FUNCTION(Runtime_CompileForOnStackReplacement) {
280
+ return NULL;
281
+ }
282
+
283
+ -
284
+ -RUNTIME_FUNCTION(Runtime_TryInstallOptimizedCode) {
285
+ - HandleScope scope(isolate);
286
+ - DCHECK_EQ(1, args.length());
287
+ - CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
288
+ -
289
+ - // First check if this is a real stack overflow.
290
+ - StackLimitCheck check(isolate);
291
+ - if (check.JsHasOverflowed(kStackSpaceRequiredForCompilation * KB)) {
292
+ - return isolate->StackOverflow();
293
+ - }
294
+ -
295
+ - // Only try to install optimized functions if the interrupt was InstallCode.
296
+ - if (isolate->stack_guard()->CheckAndClearInstallCode()) {
297
+ - isolate->optimizing_compile_dispatcher()->InstallOptimizedFunctions();
298
+ - }
299
+ -
300
+ - return (function->IsOptimized()) ? function->code()
301
+ - : function->shared()->code();
302
+ -}
303
+ -
304
+ static Object* CompileGlobalEval(Isolate* isolate, Handle<String> source,
305
+ Handle<SharedFunctionInfo> outer_info,
306
+ LanguageMode language_mode,
307
+ diff --git a/src/runtime/runtime.h b/src/runtime/runtime.h
308
+ index e7084a8cca..a11d274d25 100644
309
+ --- a/src/runtime/runtime.h
310
+ +++ b/src/runtime/runtime.h
311
+ @@ -120,7 +120,6 @@ namespace internal {
312
+ F(NotifyStubFailure, 0, 1) \
313
+ F(NotifyDeoptimized, 0, 1) \
314
+ F(CompileForOnStackReplacement, 1, 1) \
315
+ - F(TryInstallOptimizedCode, 1, 1) \
316
+ F(ResolvePossiblyDirectEval, 6, 1) \
317
+ F(InstantiateAsmJs, 4, 1)
318
+
319
+ --
320
+ 2.15.1
321
+
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: libv8
3
3
  version: !ruby/object:Gem::Version
4
- version: 6.3.292.48.0beta1
4
+ version: 6.3.292.48.0beta2
5
5
  platform: ruby
6
6
  authors:
7
7
  - Charles Lowell
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2017-12-18 00:00:00.000000000 Z
11
+ date: 2017-12-19 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: rake
@@ -89,6 +89,7 @@ files:
89
89
  - patches/0002-Don-t-compile-unnecessary-stuff.patch
90
90
  - patches/0003-Use-the-fPIC-flag-for-the-static-library.patch
91
91
  - patches/0004-Do-not-embed-debug-symbols-in-macOS-libraries.patch
92
+ - patches/0005-Remove-TryInstallOptimizedCode.patch
92
93
  - patches/mingw-generate-makefiles.sh
93
94
  - scaleway.png
94
95
  - spec/compiler/apple_llvm_spec.rb