cast_off 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/README +578 -0
- data/README.en +256 -0
- data/bin/CastOff +145 -0
- data/cast_off.gemspec +25 -0
- data/ext/cast_off/cast_off.c.rb +1386 -0
- data/ext/cast_off/cast_off.h +24 -0
- data/ext/cast_off/depend +70 -0
- data/ext/cast_off/extconf.rb +19 -0
- data/ext/cast_off/generated_c_include/inline_api.h +507 -0
- data/ext/cast_off/generated_c_include/iter_api.h +595 -0
- data/ext/cast_off/generated_c_include/unbox_api.h.rb +76 -0
- data/ext/cast_off/generated_c_include/vm_api.h +751 -0
- data/ext/cast_off/ruby_source/atomic.h +56 -0
- data/ext/cast_off/ruby_source/constant.h +34 -0
- data/ext/cast_off/ruby_source/debug.h +41 -0
- data/ext/cast_off/ruby_source/eval_intern.h +234 -0
- data/ext/cast_off/ruby_source/gc.h +98 -0
- data/ext/cast_off/ruby_source/id.h +175 -0
- data/ext/cast_off/ruby_source/insns.inc +179 -0
- data/ext/cast_off/ruby_source/insns_info.inc +695 -0
- data/ext/cast_off/ruby_source/internal.h +227 -0
- data/ext/cast_off/ruby_source/iseq.h +125 -0
- data/ext/cast_off/ruby_source/manual_update.h +135 -0
- data/ext/cast_off/ruby_source/method.h +105 -0
- data/ext/cast_off/ruby_source/node.h +503 -0
- data/ext/cast_off/ruby_source/thread_pthread.h +51 -0
- data/ext/cast_off/ruby_source/thread_win32.h +40 -0
- data/ext/cast_off/ruby_source/vm_core.h +756 -0
- data/ext/cast_off/ruby_source/vm_exec.h +184 -0
- data/ext/cast_off/ruby_source/vm_insnhelper.c +1748 -0
- data/ext/cast_off/ruby_source/vm_insnhelper.h +220 -0
- data/ext/cast_off/ruby_source/vm_opts.h +51 -0
- data/lib/cast_off.rb +15 -0
- data/lib/cast_off/compile.rb +629 -0
- data/lib/cast_off/compile/basicblock.rb +144 -0
- data/lib/cast_off/compile/cfg.rb +391 -0
- data/lib/cast_off/compile/code_manager.rb +284 -0
- data/lib/cast_off/compile/configuration.rb +2368 -0
- data/lib/cast_off/compile/dependency.rb +240 -0
- data/lib/cast_off/compile/information.rb +775 -0
- data/lib/cast_off/compile/instruction.rb +446 -0
- data/lib/cast_off/compile/ir/call_ir.rb +2348 -0
- data/lib/cast_off/compile/ir/guard_ir.rb +423 -0
- data/lib/cast_off/compile/ir/jump_ir.rb +223 -0
- data/lib/cast_off/compile/ir/operand.rb +934 -0
- data/lib/cast_off/compile/ir/param_ir.rb +98 -0
- data/lib/cast_off/compile/ir/return_ir.rb +92 -0
- data/lib/cast_off/compile/ir/simple_ir.rb +808 -0
- data/lib/cast_off/compile/ir/sub_ir.rb +212 -0
- data/lib/cast_off/compile/iseq.rb +454 -0
- data/lib/cast_off/compile/method_information.rb +1384 -0
- data/lib/cast_off/compile/namespace/namespace.rb +556 -0
- data/lib/cast_off/compile/namespace/uuid.rb +323 -0
- data/lib/cast_off/compile/stack.rb +65 -0
- data/lib/cast_off/compile/translator.rb +1562 -0
- data/lib/cast_off/suggestion.rb +98 -0
- data/lib/cast_off/util.rb +58 -0
- metadata +107 -0
@@ -0,0 +1,323 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
# Copyright(c) 2005 URABE, Shyouhei.
|
3
|
+
#
|
4
|
+
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
5
|
+
# of this code, to deal in the code without restriction, including without
|
6
|
+
# limitation the rights to use, copy, modify, merge, publish, distribute,
|
7
|
+
# sublicense, and/or sell copies of the code, and to permit persons to whom the
|
8
|
+
# code is furnished to do so, subject to the following conditions:
|
9
|
+
#
|
10
|
+
# The above copyright notice and this permission notice shall be
|
11
|
+
# included in all copies or substantial portions of the code.
|
12
|
+
#
|
13
|
+
# THE CODE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
14
|
+
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
15
|
+
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
16
|
+
# AUTHOR OR COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
17
|
+
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
18
|
+
# OUT OF OR IN CONNECTION WITH THE CODE OR THE USE OR OTHER DEALINGS IN THE
|
19
|
+
# CODE.
|
20
|
+
|
21
|
+
%w[
|
22
|
+
digest/md5
|
23
|
+
digest/sha1
|
24
|
+
tmpdir
|
25
|
+
].each do |f|
|
26
|
+
require f
|
27
|
+
end
|
28
|
+
|
29
|
+
# Pure ruby UUID generator, which is compatible with RFC4122
|
30
|
+
class UUID
|
31
|
+
# UUID epoch is 15th Oct. 1582
|
32
|
+
UNIXEpoch = 0x01B21DD213814000 # in 100-nanoseconds resolution
|
33
|
+
|
34
|
+
private
|
35
|
+
|
36
|
+
def initialize str
|
37
|
+
tmp = str.unpack "C*"
|
38
|
+
@num = tmp.inject do |r, i|
|
39
|
+
r * 256 | i
|
40
|
+
end
|
41
|
+
@num.freeze
|
42
|
+
self.freeze
|
43
|
+
end
|
44
|
+
|
45
|
+
public
|
46
|
+
|
47
|
+
def raw_bytes
|
48
|
+
ret = String.new
|
49
|
+
tmp = @num
|
50
|
+
16.times do |i|
|
51
|
+
x, y = tmp.divmod 256
|
52
|
+
ret << y
|
53
|
+
tmp = x
|
54
|
+
end
|
55
|
+
ret.reverse!
|
56
|
+
ret
|
57
|
+
end
|
58
|
+
|
59
|
+
class << self
|
60
|
+
STATE_FILE = 'ruby-uuid'
|
61
|
+
|
62
|
+
private
|
63
|
+
|
64
|
+
alias create new # :nodoc:
|
65
|
+
private :create
|
66
|
+
|
67
|
+
def mask ver, str # :nodoc:
|
68
|
+
ver = ver & 15
|
69
|
+
v = str[6].ord
|
70
|
+
v &= 0b0000_1111
|
71
|
+
v |= ver << 4
|
72
|
+
str[6] = v.chr
|
73
|
+
r = str[8].ord
|
74
|
+
r &= 0b0011_1111
|
75
|
+
r |= 0b1000_0000
|
76
|
+
str[8] = r.chr
|
77
|
+
str
|
78
|
+
end
|
79
|
+
|
80
|
+
def prand # :nodoc:
|
81
|
+
rand 0x100000000
|
82
|
+
end
|
83
|
+
|
84
|
+
def read_state fp # :nodoc:
|
85
|
+
fp.rewind
|
86
|
+
Marshal.load fp.read
|
87
|
+
end
|
88
|
+
|
89
|
+
def write_state fp, c, m # :nodoc:
|
90
|
+
fp.rewind
|
91
|
+
str = Marshal.dump [c, m]
|
92
|
+
fp.write str.dup.force_encoding('UTF-8')
|
93
|
+
end
|
94
|
+
|
95
|
+
public
|
96
|
+
|
97
|
+
# UUID generation using SHA1. Recommended over create_md5.
|
98
|
+
# Namespace object is another UUID, some of them are pre-defined below.
|
99
|
+
def new_sha1 str, namespace
|
100
|
+
sha1 = Digest::SHA1.new
|
101
|
+
sha1.update namespace.raw_bytes
|
102
|
+
sha1.update str
|
103
|
+
sum = sha1.digest
|
104
|
+
raw = mask 5, sum[0..15]
|
105
|
+
create raw
|
106
|
+
end
|
107
|
+
|
108
|
+
# UUID generation using MD5 (for backward compat.)
|
109
|
+
def new_md5 str, namespace
|
110
|
+
md5 = Digest::MD5.new
|
111
|
+
md5.update namespace.raw_bytes
|
112
|
+
md5.update str
|
113
|
+
sum = md5.digest
|
114
|
+
raw = mask 3, sum[0..16]
|
115
|
+
create raw
|
116
|
+
end
|
117
|
+
|
118
|
+
# UUID generation using random-number generator. From it's random
|
119
|
+
# nature, there's no warranty that the created ID is really universaly
|
120
|
+
# unique.
|
121
|
+
def new_random
|
122
|
+
rnd = [prand, prand, prand, prand].pack "N4"
|
123
|
+
raw = mask 4, rnd
|
124
|
+
create raw
|
125
|
+
end
|
126
|
+
|
127
|
+
# create the "version 1" UUID with current system clock, current UTC
|
128
|
+
# timestamp, and the IEEE 802 address (so-called MAC address).
|
129
|
+
#
|
130
|
+
# Speed notice: it's slow. It writes some data into hard drive on every
|
131
|
+
# invokation. If you want to speed this up, try remounting tmpdir with a
|
132
|
+
# memory based filesystem (such as tmpfs). STILL slow? then no way but
|
133
|
+
# rewrite it with c :)
|
134
|
+
def new clock=nil, time=Time.now, mac_addr=nil
|
135
|
+
c = t = m = nil
|
136
|
+
Dir.chdir Dir.tmpdir do
|
137
|
+
unless FileTest.exist? STATE_FILE then
|
138
|
+
# Generate a pseudo MAC address because we have no pure-ruby way
|
139
|
+
# to know the MAC address of the NIC this system uses. Note
|
140
|
+
# that cheating with pseudo arresses here is completely legal:
|
141
|
+
# see Section 4.5 of RFC4122 for details.
|
142
|
+
sha1 = Digest::SHA1.new
|
143
|
+
256.times do
|
144
|
+
r = [prand].pack "N"
|
145
|
+
sha1.update r
|
146
|
+
end
|
147
|
+
str = sha1.digest
|
148
|
+
r = rand 34 # 40-6
|
149
|
+
node = str[r, 6] || str
|
150
|
+
node = node.bytes.to_a
|
151
|
+
node[0] |= 0x01 # multicast bit
|
152
|
+
node = node.pack "C*"
|
153
|
+
k = rand 0x40000
|
154
|
+
open STATE_FILE, 'w' do |fp|
|
155
|
+
fp.flock IO::LOCK_EX
|
156
|
+
write_state fp, k, node
|
157
|
+
fp.chmod 0o777 # must be world writable
|
158
|
+
end
|
159
|
+
end
|
160
|
+
open STATE_FILE, 'r+' do |fp|
|
161
|
+
fp.flock IO::LOCK_EX
|
162
|
+
c, m = read_state fp
|
163
|
+
c += 1 # important; increment here
|
164
|
+
write_state fp, c, m
|
165
|
+
end
|
166
|
+
end
|
167
|
+
c = clock & 0b11_1111_1111_1111 if clock
|
168
|
+
m = mac_addr if mac_addr
|
169
|
+
time = Time.at time if time.is_a? Float
|
170
|
+
case time
|
171
|
+
when Time
|
172
|
+
t = time.to_i * 10_000_000 + time.tv_usec * 10 + UNIXEpoch
|
173
|
+
when Integer
|
174
|
+
t = time + UNIXEpoch
|
175
|
+
else
|
176
|
+
raise TypeError, "cannot convert ``#{time}'' into Time."
|
177
|
+
end
|
178
|
+
|
179
|
+
tl = t & 0xFFFF_FFFF
|
180
|
+
tm = t >> 32
|
181
|
+
tm = tm & 0xFFFF
|
182
|
+
th = t >> 48
|
183
|
+
th = th & 0b0000_1111_1111_1111
|
184
|
+
th = th | 0b0001_0000_0000_0000
|
185
|
+
cl = c & 0b0000_0000_1111_1111
|
186
|
+
ch = c & 0b0011_1111_0000_0000
|
187
|
+
ch = ch >> 8
|
188
|
+
ch = ch | 0b1000_0000
|
189
|
+
pack tl, tm, th, ch, cl, m
|
190
|
+
end
|
191
|
+
|
192
|
+
# A simple GUID parser: just ignores unknown characters and convert
|
193
|
+
# hexadecimal dump into 16-octet object.
|
194
|
+
def parse obj
|
195
|
+
str = obj.to_s.sub %r/\Aurn:uuid:/, ''
|
196
|
+
str.gsub! %r/[^0-9A-Fa-f]/, ''
|
197
|
+
raw = [str[0..31]].pack 'H*'
|
198
|
+
create raw
|
199
|
+
end
|
200
|
+
|
201
|
+
# The 'primitive constructor' of this class
|
202
|
+
# Note UUID.pack(uuid.unpack) == uuid
|
203
|
+
def pack tl, tm, th, ch, cl, n
|
204
|
+
raw = [tl, tm, th, ch, cl, n].pack "NnnCCa6"
|
205
|
+
create raw
|
206
|
+
end
|
207
|
+
end
|
208
|
+
|
209
|
+
# The 'primitive deconstructor', or the dual to pack.
|
210
|
+
# Note UUID.pack(uuid.unpack) == uuid
|
211
|
+
def unpack
|
212
|
+
raw_bytes.unpack "NnnCCa6"
|
213
|
+
end
|
214
|
+
|
215
|
+
# The timestamp of this UUID.
|
216
|
+
# Throws RageError if that time exceeds UNIX time range
|
217
|
+
def time
|
218
|
+
a = unpack
|
219
|
+
tl = a[0]
|
220
|
+
tm = a[1]
|
221
|
+
th = a[2] & 0x0FFF
|
222
|
+
t = tl
|
223
|
+
t += tm << 32
|
224
|
+
t += th << 48
|
225
|
+
t -= UNIXEpoch
|
226
|
+
tv_sec = t / 10_000_000
|
227
|
+
t -= tv_sec * 10_000_000
|
228
|
+
tv_usec = t / 10
|
229
|
+
Time.at tv_sec, tv_usec
|
230
|
+
end
|
231
|
+
|
232
|
+
# The version of this UUID
|
233
|
+
def version
|
234
|
+
v = unpack[2] & 0b1111_0000_0000_0000
|
235
|
+
v >> 12
|
236
|
+
end
|
237
|
+
|
238
|
+
# The clock sequence of this UUID
|
239
|
+
def clock
|
240
|
+
a = unpack
|
241
|
+
ch = a[3] & 0b0001_1111
|
242
|
+
cl = a[4]
|
243
|
+
c = cl
|
244
|
+
c += ch << 8
|
245
|
+
c
|
246
|
+
end
|
247
|
+
|
248
|
+
# The IEEE 802 address in a hexadecimal format
|
249
|
+
def node
|
250
|
+
m = unpack[5].unpack 'C*'
|
251
|
+
'%02x%02x%02x%02x%02x%02x' % m
|
252
|
+
end
|
253
|
+
alias mac_address node
|
254
|
+
alias ieee802 node
|
255
|
+
|
256
|
+
# Generate the string representation (a.k.a GUID) of this UUID
|
257
|
+
def to_s
|
258
|
+
a = unpack
|
259
|
+
a[-1] = mac_address
|
260
|
+
"%08x-%04x-%04x-%02x%02x-%s" % a
|
261
|
+
end
|
262
|
+
alias guid to_s
|
263
|
+
|
264
|
+
# Convert into a RFC4122-comforming URN representation
|
265
|
+
def to_uri
|
266
|
+
"urn:uuid:" + self.to_s
|
267
|
+
end
|
268
|
+
alias urn to_uri
|
269
|
+
alias inspect to_uri
|
270
|
+
|
271
|
+
# Convert into 128-bit unsigned integer
|
272
|
+
# Typically a Bignum instance, but can be a Fixnum.
|
273
|
+
def to_int
|
274
|
+
@num
|
275
|
+
end
|
276
|
+
alias to_i to_int
|
277
|
+
|
278
|
+
# Two UUIDs are said to be equal if and only if their (byte-order
|
279
|
+
# canonicalized) integer representations are equivallent. Refer RFC4122 for
|
280
|
+
# details.
|
281
|
+
def == other
|
282
|
+
to_i == other.to_i
|
283
|
+
end
|
284
|
+
alias eql? ==
|
285
|
+
|
286
|
+
# Two identical UUIDs should have same hash
|
287
|
+
def hash
|
288
|
+
to_i
|
289
|
+
end
|
290
|
+
|
291
|
+
include Comparable
|
292
|
+
# UUIDs are comparable (don't know what benefits are there, though).
|
293
|
+
def <=> other
|
294
|
+
to_s <=> other.to_s
|
295
|
+
end
|
296
|
+
|
297
|
+
# shortcut
|
298
|
+
def new_sha1 str
|
299
|
+
self.class.new_sha1 str, self
|
300
|
+
end
|
301
|
+
|
302
|
+
# shortcut too
|
303
|
+
def new_md5 str
|
304
|
+
self.class.new_md5 str, self
|
305
|
+
end
|
306
|
+
|
307
|
+
# Pre-defined UUID Namespaces described in RFC4122 Appendix C.
|
308
|
+
NameSpace_DNS = parse "6ba7b810-9dad-11d1-80b4-00c04fd430c8"
|
309
|
+
NameSpace_URL = parse "6ba7b811-9dad-11d1-80b4-00c04fd430c8"
|
310
|
+
NameSpace_OID = parse "6ba7b812-9dad-11d1-80b4-00c04fd430c8"
|
311
|
+
NameSpace_X500 = parse "6ba7b814-9dad-11d1-80b4-00c04fd430c8"
|
312
|
+
|
313
|
+
# The Nil UUID in RFC4122 Section 4.1.7
|
314
|
+
Nil = parse "00000000-0000-0000-0000-000000000000"
|
315
|
+
end
|
316
|
+
|
317
|
+
module Kernel
|
318
|
+
module_function
|
319
|
+
def UUID str
|
320
|
+
UUID.parse str
|
321
|
+
end
|
322
|
+
end
|
323
|
+
|
@@ -0,0 +1,65 @@
|
|
1
|
+
module CastOff::Compiler
|
2
|
+
class Translator::CFG
|
3
|
+
class BasicBlock
|
4
|
+
|
5
|
+
def in_depth=(depth)
|
6
|
+
@in_depth = depth
|
7
|
+
end
|
8
|
+
|
9
|
+
def in_depth()
|
10
|
+
bug() unless @in_depth
|
11
|
+
@in_depth
|
12
|
+
end
|
13
|
+
|
14
|
+
def out_depth()
|
15
|
+
bug() unless @in_depth
|
16
|
+
@in_depth + stackincrease()
|
17
|
+
end
|
18
|
+
|
19
|
+
def find_insn_stack_depth(insn)
|
20
|
+
bug() unless @insns.include?(insn)
|
21
|
+
depth = in_depth()
|
22
|
+
@insns.each do |i|
|
23
|
+
return depth if i == insn
|
24
|
+
depth += i.stack_usage()
|
25
|
+
end
|
26
|
+
bug()
|
27
|
+
end
|
28
|
+
|
29
|
+
private
|
30
|
+
|
31
|
+
def stackincrease()
|
32
|
+
@insns.inject(0){|inc, i| inc + i.stack_usage()}
|
33
|
+
end
|
34
|
+
end
|
35
|
+
|
36
|
+
def find_insn_stack_depth(insn)
|
37
|
+
b = @blocks.find{|b| b.insns.include?(insn)}
|
38
|
+
b ? b.find_insn_stack_depth(insn) : nil
|
39
|
+
end
|
40
|
+
|
41
|
+
private
|
42
|
+
|
43
|
+
def validate_stack()
|
44
|
+
# Breadth first search
|
45
|
+
@blocks[0].in_depth = 0
|
46
|
+
achieved = {}
|
47
|
+
vertex = nil
|
48
|
+
queue = [@blocks[0]]
|
49
|
+
while achieved.size() != @blocks.size()
|
50
|
+
vertex = queue.shift()
|
51
|
+
bug() unless vertex
|
52
|
+
achieved[vertex] = true
|
53
|
+
depth = vertex.out_depth
|
54
|
+
vertex.next.each do |b|
|
55
|
+
if !queue.include?(b) && !achieved[b]
|
56
|
+
b.in_depth = depth
|
57
|
+
queue << b
|
58
|
+
end
|
59
|
+
end
|
60
|
+
end
|
61
|
+
@blocks.each{|b0| bug() if b0.next.find{|b1| b0.out_depth != b1.in_depth}}
|
62
|
+
end
|
63
|
+
end
|
64
|
+
end
|
65
|
+
|
@@ -0,0 +1,1562 @@
|
|
1
|
+
# coding=utf-8
|
2
|
+
|
3
|
+
module CastOff
|
4
|
+
module Compiler
|
5
|
+
class Translator
|
6
|
+
include CastOff::Util
|
7
|
+
include CastOff::Compiler::Instruction
|
8
|
+
include Converter
|
9
|
+
|
10
|
+
Template = ERB.new(<<-'end', 0, '%-', 'io')
|
11
|
+
#include <ruby.h>
|
12
|
+
#include <ruby/encoding.h>
|
13
|
+
|
14
|
+
#include "vm_opts.h"
|
15
|
+
|
16
|
+
#include "vm_core.h"
|
17
|
+
#include "eval_intern.h"
|
18
|
+
#include "iseq.h"
|
19
|
+
#include "gc.h"
|
20
|
+
#include <ruby/vm.h>
|
21
|
+
|
22
|
+
/* need for CHECK_STACK_OVERFLOW and vm_push_frame */
|
23
|
+
#undef GET_VM
|
24
|
+
#define GET_VM() th->vm
|
25
|
+
|
26
|
+
#include "vm_insnhelper.h"
|
27
|
+
#include "vm_insnhelper.c"
|
28
|
+
#define USE_INSN_STACK_INCREASE 1
|
29
|
+
#include "insns_info.inc"
|
30
|
+
|
31
|
+
#include "manual_update.h"
|
32
|
+
|
33
|
+
static VALUE rb_mCastOff;
|
34
|
+
static VALUE rb_eCastOffExecutionError;
|
35
|
+
static VALUE rb_mCastOffCompiler;
|
36
|
+
static VALUE rb_cCastOffSingletonClass;
|
37
|
+
static VALUE rb_cCastOffConfiguration;
|
38
|
+
static VALUE rb_cCastOffClassWrapper;
|
39
|
+
static VALUE rb_cCastOffMethodWrapper;
|
40
|
+
|
41
|
+
#include "vm_api.h"
|
42
|
+
#include "iter_api.h"
|
43
|
+
#include "unbox_api.h"
|
44
|
+
%if @configuration.inject_guard?
|
45
|
+
#define INJECT_GUARD 1
|
46
|
+
%end
|
47
|
+
%if @configuration.array_conservative?
|
48
|
+
#define ARRAY_CONSERVATIVE 1
|
49
|
+
%end
|
50
|
+
#include "inline_api.h"
|
51
|
+
|
52
|
+
/* FIXME */
|
53
|
+
#undef RUBY_VM_CHECK_INTS
|
54
|
+
#define RUBY_VM_CHECK_INTS(th)
|
55
|
+
|
56
|
+
/* Odd, This macro is not in any header files above... */
|
57
|
+
/* #define hide_obj(obj) do {OBJ_FREEZE(obj); RBASIC(obj)->klass = 0;} while (0) */
|
58
|
+
#define hide_obj(obj)
|
59
|
+
|
60
|
+
static VALUE sampling_table_val = Qnil;
|
61
|
+
static st_table *sampling_table = NULL;
|
62
|
+
|
63
|
+
static void register_sampling_table(VALUE hash)
|
64
|
+
{
|
65
|
+
sampling_table_val = hash;
|
66
|
+
rb_gc_register_mark_object(hash);
|
67
|
+
sampling_table = RHASH_TBL(hash);
|
68
|
+
}
|
69
|
+
|
70
|
+
static VALUE cast_off_register_sampling_table_<%= signiture() %>(VALUE dummy, VALUE hash)
|
71
|
+
{
|
72
|
+
register_sampling_table(hash);
|
73
|
+
return Qnil;
|
74
|
+
}
|
75
|
+
|
76
|
+
static void sampling_variable(VALUE val, VALUE sym)
|
77
|
+
{
|
78
|
+
/* :variable => [klass0, klass1, ...] */
|
79
|
+
VALUE klass = rb_class_of(val);
|
80
|
+
VALUE hashval;
|
81
|
+
st_table *hash;
|
82
|
+
|
83
|
+
if (!st_lookup(sampling_table, (st_data_t)sym, (st_data_t*)&hashval)) {
|
84
|
+
hashval = rb_hash_new();
|
85
|
+
st_insert(sampling_table, (st_data_t)sym, (st_data_t)hashval);
|
86
|
+
}
|
87
|
+
hash = RHASH_TBL(hashval);
|
88
|
+
|
89
|
+
if (FL_TEST(klass, FL_SINGLETON)) {
|
90
|
+
klass = rb_cCastOffSingletonClass;
|
91
|
+
}
|
92
|
+
|
93
|
+
if (!st_lookup(hash, (st_data_t)klass, 0)) {
|
94
|
+
st_insert(hash, (st_data_t)klass, (st_data_t)Qtrue);
|
95
|
+
}
|
96
|
+
|
97
|
+
return;
|
98
|
+
}
|
99
|
+
|
100
|
+
static void __sampling_poscall(VALUE val, VALUE method_klass, VALUE method_id)
|
101
|
+
{
|
102
|
+
VALUE klass;
|
103
|
+
VALUE method_id_hashval, hashval;
|
104
|
+
st_table *method_id_hash, *hash;
|
105
|
+
|
106
|
+
if (FL_TEST(method_klass, FL_SINGLETON)) {
|
107
|
+
method_klass = rb_cCastOffSingletonClass;
|
108
|
+
}
|
109
|
+
|
110
|
+
klass = rb_class_of(val);
|
111
|
+
|
112
|
+
if (!st_lookup(sampling_table, (st_data_t)method_klass, (st_data_t*)&method_id_hashval)) {
|
113
|
+
method_id_hashval = rb_hash_new();
|
114
|
+
st_insert(sampling_table, (st_data_t)method_klass, (st_data_t)method_id_hashval);
|
115
|
+
}
|
116
|
+
method_id_hash = RHASH_TBL(method_id_hashval);
|
117
|
+
if (!st_lookup(method_id_hash, (st_data_t)method_id, (st_data_t*)&hashval)) {
|
118
|
+
hashval = rb_hash_new();
|
119
|
+
st_insert(method_id_hash, (st_data_t)method_id, (st_data_t)hashval);
|
120
|
+
}
|
121
|
+
hash = RHASH_TBL(hashval);
|
122
|
+
|
123
|
+
if (FL_TEST(klass, FL_SINGLETON)) {
|
124
|
+
klass = rb_cCastOffSingletonClass;
|
125
|
+
}
|
126
|
+
|
127
|
+
if (!st_lookup(hash, (st_data_t)klass, 0)) {
|
128
|
+
st_insert(hash, (st_data_t)klass, (st_data_t)Qtrue);
|
129
|
+
}
|
130
|
+
|
131
|
+
return;
|
132
|
+
}
|
133
|
+
|
134
|
+
static void sampling_poscall(VALUE val, VALUE recv, VALUE method_id)
|
135
|
+
{
|
136
|
+
__sampling_poscall(val, rb_class_of(recv), method_id);
|
137
|
+
}
|
138
|
+
|
139
|
+
%@namespace.each_static_decls do |decl|
|
140
|
+
<%= decl %>
|
141
|
+
%end
|
142
|
+
|
143
|
+
%@fptr.each do |(k, fps)|
|
144
|
+
% kids, mid, singleton, convention, argc = k
|
145
|
+
% mid = allocate_id(mid)
|
146
|
+
% fps.each do |fp|
|
147
|
+
|
148
|
+
static VALUE (*<%= fp %>)(ANYARGS);
|
149
|
+
static VALUE (*<%= function_pointer_wrapper_fptr(fp) %>)(ANYARGS);
|
150
|
+
|
151
|
+
% case convention
|
152
|
+
% when -3 # -1
|
153
|
+
% args = Array.new(argc)
|
154
|
+
% i = 0; args.map!{|a| a = "arg#{i}"; i += 1; a}
|
155
|
+
% args_d = args.empty? ? '' : ", VALUE #{args.join(', VALUE ')}"
|
156
|
+
% args_c = args.empty? ? '' : ", #{args.join(', ')}"
|
157
|
+
static VALUE <%= function_pointer_wrapper_func(fp) %>(VALUE recv, ID id, int argc<%= args_d %>)
|
158
|
+
{
|
159
|
+
return <%= function_pointer_wrapper_fptr(fp) %>(recv<%= args_c %>);
|
160
|
+
}
|
161
|
+
|
162
|
+
static VALUE <%= function_pointer_wrapper_func_complex(fp) %>(VALUE recv, ID id, int argc<%= args_d %>)
|
163
|
+
{
|
164
|
+
VALUE argv[<%= [argc, 1].max %>];
|
165
|
+
|
166
|
+
% argc.times do |i|
|
167
|
+
argv[<%= i %>] = arg<%= i %>;
|
168
|
+
% end
|
169
|
+
if (argc != <%= argc %>) {
|
170
|
+
rb_bug("<%= function_pointer_wrapper_func(fp) %>: should not be reached");
|
171
|
+
}
|
172
|
+
|
173
|
+
return <%= function_pointer_wrapper_fptr(fp) %>(<%= argc %>, argv, recv);
|
174
|
+
}
|
175
|
+
% when -2
|
176
|
+
static VALUE <%= function_pointer_wrapper_func(fp) %>(VALUE recv, VALUE ary)
|
177
|
+
{
|
178
|
+
return <%= function_pointer_wrapper_fptr(fp) %>(recv, <%= mid %>, RARRAY_LEN(ary), RARRAY_PTR(ary));
|
179
|
+
}
|
180
|
+
% when -1
|
181
|
+
static VALUE <%= function_pointer_wrapper_func(fp) %>(int argc, VALUE *argv, VALUE recv)
|
182
|
+
{
|
183
|
+
return <%= function_pointer_wrapper_fptr(fp) %>(recv, <%= mid %>, argc, argv);
|
184
|
+
}
|
185
|
+
% when 0..15
|
186
|
+
% args = Array.new(convention)
|
187
|
+
% i = 0; args.map!{|a| a = "arg#{i}"; i += 1; a}
|
188
|
+
% args_d = args.empty? ? '' : ", VALUE #{args.join(', VALUE ')}"
|
189
|
+
% args_c = args.empty? ? '' : ", #{args.join(', ')}"
|
190
|
+
static VALUE <%= function_pointer_wrapper_func(fp) %>(VALUE recv<%= args_d %>)
|
191
|
+
{
|
192
|
+
return <%= function_pointer_wrapper_fptr(fp) %>(recv, <%= mid %>, <%= convention %><%= args_c %>);
|
193
|
+
}
|
194
|
+
% else
|
195
|
+
% bug()
|
196
|
+
% end
|
197
|
+
% end
|
198
|
+
%end
|
199
|
+
|
200
|
+
%@ic.each do |(k, v)|
|
201
|
+
static struct iseq_inline_cache_entry <%= v %>;
|
202
|
+
%end
|
203
|
+
|
204
|
+
%queue = [@root_iseq]
|
205
|
+
static rb_iseq_t *<%= @root_iseq %> = NULL;
|
206
|
+
%until queue.empty?
|
207
|
+
% entry = queue.pop()
|
208
|
+
% entry.children.each do |(pc, child)|
|
209
|
+
static rb_iseq_t *<%= child %> = NULL;
|
210
|
+
% bug() if queue.include?(child)
|
211
|
+
% queue << child
|
212
|
+
% end
|
213
|
+
%end
|
214
|
+
|
215
|
+
static rb_iseq_t *cast_off_orig_iseq = NULL;
|
216
|
+
static VALUE cast_off_register_iseq_<%= signiture() %>(VALUE dummy, VALUE iseqval)
|
217
|
+
{
|
218
|
+
rb_iseq_t *iseq = DATA_PTR(iseqval);
|
219
|
+
VALUE insn;
|
220
|
+
|
221
|
+
rb_gc_register_mark_object(iseqval);
|
222
|
+
cast_off_orig_iseq = iseq;
|
223
|
+
|
224
|
+
%queue = [@root_iseq]
|
225
|
+
<%= @root_iseq %> = cast_off_orig_iseq;
|
226
|
+
%until queue.empty?
|
227
|
+
% entry = queue.pop()
|
228
|
+
% entry.children.each do |(pc, child)|
|
229
|
+
insn = <%= entry %>->iseq[<%= pc %>];
|
230
|
+
if (insn != BIN(send)) {
|
231
|
+
rb_bug("should not be reached (0), pc = %d", <%= pc %>);
|
232
|
+
}
|
233
|
+
<%= child %> = (rb_iseq_t*)<%= entry %>->iseq[<%= pc %> + 3];
|
234
|
+
if (rb_class_of(<%= child %>->self) != rb_cISeq) {
|
235
|
+
rb_bug("should not be reached (1)");
|
236
|
+
}
|
237
|
+
% bug() if queue.include?(child)
|
238
|
+
% queue << child
|
239
|
+
% end
|
240
|
+
%end
|
241
|
+
return Qnil;
|
242
|
+
}
|
243
|
+
|
244
|
+
%@declare_constants.each do |(key, value)|
|
245
|
+
static VALUE <%= key %> = Qundef;
|
246
|
+
%end
|
247
|
+
|
248
|
+
static VALUE cast_off_prefetch_constants_<%= signiture() %>(VALUE self, VALUE binding)
|
249
|
+
{
|
250
|
+
%@prefetch_constants.each do |(key, value)|
|
251
|
+
% path, singleton_p = value
|
252
|
+
<%= key %> = rb_funcall(self, rb_intern("eval"), 2, rb_str_new2("<%= path %>"), binding);
|
253
|
+
% if singleton_p
|
254
|
+
<%= key %> = rb_class_of(<%= key %>);
|
255
|
+
% end
|
256
|
+
%end
|
257
|
+
|
258
|
+
return Qnil;
|
259
|
+
}
|
260
|
+
|
261
|
+
static void *fbind = NULL;
|
262
|
+
static void *feval = NULL;
|
263
|
+
static VALUE cast_off_initialize_fptr_<%= signiture() %>(VALUE dummy)
|
264
|
+
{
|
265
|
+
rb_method_entry_t *me;
|
266
|
+
VALUE klass;
|
267
|
+
VALUE (*fptr)(ANYARGS);
|
268
|
+
|
269
|
+
me = search_method(rb_mKernel, rb_intern("binding"));
|
270
|
+
should_be_cfunc(me);
|
271
|
+
fbind = me->def->body.cfunc.func;
|
272
|
+
|
273
|
+
me = search_method(rb_mKernel, rb_intern("eval"));
|
274
|
+
should_be_cfunc(me);
|
275
|
+
feval = me->def->body.cfunc.func;
|
276
|
+
|
277
|
+
%@fptr.each do |(k, v)|
|
278
|
+
% kids, mid, singleton, convention, argc = k
|
279
|
+
% mid = allocate_id(mid)
|
280
|
+
% fps = v
|
281
|
+
klass = rb_cObject;
|
282
|
+
% kids.each do |kid|
|
283
|
+
klass = rb_const_get(klass, rb_intern("<%= kid %>"));
|
284
|
+
% end
|
285
|
+
% if singleton
|
286
|
+
should_be_singleton(klass);
|
287
|
+
me = search_method(rb_class_of(klass), <%= mid %>);
|
288
|
+
% else
|
289
|
+
me = search_method(klass, <%= mid %>);
|
290
|
+
% end
|
291
|
+
fptr = c_function_pointer(me);
|
292
|
+
if (fptr && should_be_call_directly_p(fptr)) {
|
293
|
+
int argc = c_function_argc(me);
|
294
|
+
if (fptr == fbind) {
|
295
|
+
rb_raise(rb_eCastOffExecutionError, "should not use binding in compilation target of CastOff");
|
296
|
+
}
|
297
|
+
if (fptr == feval) {
|
298
|
+
rb_raise(rb_eCastOffExecutionError, "should not use eval in compilation target of CastOff");
|
299
|
+
}
|
300
|
+
if (argc == <%= convention %>) {
|
301
|
+
% fps.each do |fp|
|
302
|
+
<%= fp %> = fptr;
|
303
|
+
% end
|
304
|
+
} else {
|
305
|
+
% case convention
|
306
|
+
% when -3
|
307
|
+
if (0 <= argc && argc <= 15) {
|
308
|
+
% fps.each do |fp|
|
309
|
+
<%= function_pointer_wrapper_fptr(fp) %> = fptr;
|
310
|
+
<%= fp %> = <%= function_pointer_wrapper_func(fp) %>;
|
311
|
+
% end
|
312
|
+
} else if (argc == -1) {
|
313
|
+
% fps.each do |fp|
|
314
|
+
<%= function_pointer_wrapper_fptr(fp) %> = fptr;
|
315
|
+
<%= fp %> = <%= function_pointer_wrapper_func_complex(fp) %>;
|
316
|
+
% end
|
317
|
+
} else if (argc == -2) {
|
318
|
+
% fps.each do |fp|
|
319
|
+
<%= function_pointer_wrapper_fptr(fp) %> = fptr;
|
320
|
+
<%= fp %> = (void*)rb_funcall;
|
321
|
+
% end
|
322
|
+
} else {
|
323
|
+
rb_raise(rb_eCastOffExecutionError, "unexpected method(0)");
|
324
|
+
}
|
325
|
+
% when -1, -2
|
326
|
+
% fps.each do |fp|
|
327
|
+
<%= function_pointer_wrapper_fptr(fp) %> = (void*)rb_funcall2;
|
328
|
+
<%= fp %> = <%= function_pointer_wrapper_func(fp) %>;
|
329
|
+
% end
|
330
|
+
% when 0..15
|
331
|
+
% fps.each do |fp|
|
332
|
+
<%= function_pointer_wrapper_fptr(fp) %> = (void*)rb_funcall;
|
333
|
+
<%= fp %> = <%= function_pointer_wrapper_func(fp) %>;
|
334
|
+
% end
|
335
|
+
% else
|
336
|
+
% bug("convention = #{convention}")
|
337
|
+
% end
|
338
|
+
}
|
339
|
+
} else {
|
340
|
+
% case convention
|
341
|
+
% when -3 # rb_funcall
|
342
|
+
% fps.each do |fp|
|
343
|
+
<%= fp %> = (void*)rb_funcall;
|
344
|
+
% end
|
345
|
+
% when -1, -2
|
346
|
+
% fps.each do |fp|
|
347
|
+
<%= function_pointer_wrapper_fptr(fp) %> = (void*)rb_funcall2;
|
348
|
+
<%= fp %> = <%= function_pointer_wrapper_func(fp) %>;
|
349
|
+
% end
|
350
|
+
% when 0..15 # cfunc
|
351
|
+
% fps.each do |fp|
|
352
|
+
<%= function_pointer_wrapper_fptr(fp) %> = (void*)rb_funcall;
|
353
|
+
<%= fp %> = <%= function_pointer_wrapper_func(fp) %>;
|
354
|
+
% end
|
355
|
+
% else
|
356
|
+
% bug("convention = #{convention}")
|
357
|
+
% end
|
358
|
+
}
|
359
|
+
%end
|
360
|
+
return Qnil;
|
361
|
+
}
|
362
|
+
|
363
|
+
static inline int empty_method_table_p(VALUE klass)
|
364
|
+
{
|
365
|
+
st_table *mtbl = RCLASS_M_TBL(klass);
|
366
|
+
|
367
|
+
if (!mtbl) rb_bug("empty_method_table_p: shoult not be reached");
|
368
|
+
return mtbl->num_entries == 0;
|
369
|
+
}
|
370
|
+
|
371
|
+
%@throw_exception_functions.each do |(func, name)|
|
372
|
+
<%= func.gsub(/<THROW_EXCEPTION_FUNCTION_NAME>/, name) %>
|
373
|
+
%end
|
374
|
+
|
375
|
+
%@class_check_functions.each do |(func, name)|
|
376
|
+
<%= func.gsub(/<CLASS_CHECK_FUNCTION_NAME>/, name) %>
|
377
|
+
%end
|
378
|
+
|
379
|
+
%if !inline_block?
|
380
|
+
static inline void expand_dframe(rb_thread_t *th, long size, rb_iseq_t *iseq, int root_p)
|
381
|
+
{
|
382
|
+
rb_control_frame_t *cfp = th->cfp;
|
383
|
+
VALUE *sp = cfp->sp;
|
384
|
+
VALUE *dfp = cfp->dfp;
|
385
|
+
int i;
|
386
|
+
|
387
|
+
if ((void *)(sp + size + 2) >= (void *)cfp) {
|
388
|
+
rb_exc_raise(sysstack_error);
|
389
|
+
}
|
390
|
+
|
391
|
+
for (i = 0; i < size; i++) {
|
392
|
+
*sp++ = Qnil;
|
393
|
+
}
|
394
|
+
*sp++ = dfp[-1]; /* cref */
|
395
|
+
*sp = dfp[0]; /* specval */
|
396
|
+
|
397
|
+
if (root_p) {
|
398
|
+
cfp->lfp = sp;
|
399
|
+
}
|
400
|
+
|
401
|
+
cfp->dfp = sp;
|
402
|
+
cfp->sp = sp + 1;
|
403
|
+
cfp->bp = sp + 1;
|
404
|
+
cfp->iseq = iseq;
|
405
|
+
}
|
406
|
+
|
407
|
+
static rb_thread_t *current_thread()
|
408
|
+
{
|
409
|
+
VALUE thval = rb_thread_current();
|
410
|
+
rb_thread_t * th = DATA_PTR(thval);
|
411
|
+
|
412
|
+
return th;
|
413
|
+
}
|
414
|
+
|
415
|
+
static VALUE get_self(rb_thread_t *th)
|
416
|
+
{
|
417
|
+
return th->cfp->self;
|
418
|
+
}
|
419
|
+
|
420
|
+
static inline VALUE* fetch_dfp(rb_thread_t *th, int level)
|
421
|
+
{
|
422
|
+
VALUE *dfp;
|
423
|
+
int i;
|
424
|
+
|
425
|
+
dfp = th->cfp->dfp;
|
426
|
+
for (i = 0; i < level; i++) {
|
427
|
+
dfp = GET_PREV_DFP(dfp);
|
428
|
+
}
|
429
|
+
return dfp;
|
430
|
+
}
|
431
|
+
|
432
|
+
static inline int cast_off_lambda_p(VALUE arg, int argc, VALUE *argv)
|
433
|
+
{
|
434
|
+
VALUE *ptr;
|
435
|
+
int i;
|
436
|
+
|
437
|
+
if (rb_class_of(arg) != rb_cArray) {
|
438
|
+
return 0;
|
439
|
+
}
|
440
|
+
|
441
|
+
ptr = RARRAY_PTR(arg);
|
442
|
+
for (i = 0; i < argc; i++) {
|
443
|
+
if (ptr[i] != argv[i]) {
|
444
|
+
return 0;
|
445
|
+
}
|
446
|
+
}
|
447
|
+
|
448
|
+
return 1;
|
449
|
+
}
|
450
|
+
|
451
|
+
%if false # for instance_exec, instance_eval, ...
|
452
|
+
static inline void check_cref(rb_thread_t *th)
|
453
|
+
{
|
454
|
+
rb_control_frame_t *cfp = th->cfp;
|
455
|
+
rb_iseq_t *iseq = cfp->iseq;
|
456
|
+
VALUE *lfp = cfp->lfp;
|
457
|
+
VALUE *dfp = cfp->dfp;
|
458
|
+
NODE *cref;
|
459
|
+
|
460
|
+
while (1) {
|
461
|
+
if (lfp == dfp) {
|
462
|
+
if (!RUBY_VM_NORMAL_ISEQ_P(iseq)) {
|
463
|
+
cref = NULL;
|
464
|
+
break;
|
465
|
+
} else {
|
466
|
+
cref = iseq->cref_stack;
|
467
|
+
break;
|
468
|
+
}
|
469
|
+
} else if (dfp[-1] != Qnil) {
|
470
|
+
cref = (NODE *)dfp[-1];
|
471
|
+
break;
|
472
|
+
}
|
473
|
+
dfp = GET_PREV_DFP(dfp);
|
474
|
+
}
|
475
|
+
|
476
|
+
if (cref && cref->flags & NODE_FL_CREF_PUSHED_BY_EVAL) {
|
477
|
+
rb_raise(rb_eCastOffExecutionError, "Currently, CastOff cannot handle constant reference with object(e.g. reciever of BasicObject#instance_exec) context.");
|
478
|
+
}
|
479
|
+
}
|
480
|
+
%end
|
481
|
+
|
482
|
+
static void cast_off_set_block(rb_block_t *block)
|
483
|
+
{
|
484
|
+
VALUE thval = rb_thread_current();
|
485
|
+
rb_thread_t * th = DATA_PTR(thval);
|
486
|
+
|
487
|
+
th->passed_block = block;
|
488
|
+
}
|
489
|
+
|
490
|
+
% @root_iseq.iterate_all_iseq do |iseq|
|
491
|
+
% next if iseq.root?
|
492
|
+
<%= iseq.declare_ifunc_node() %>;
|
493
|
+
<%= iseq.declare_block_generator() %>;
|
494
|
+
<%= iseq.declare_ifunc() %>;
|
495
|
+
% end
|
496
|
+
|
497
|
+
% @root_iseq.iterate_all_iseq do |iseq|
|
498
|
+
/* iseq is <%= iseq %> */
|
499
|
+
% next if iseq.root?
|
500
|
+
<%= iseq.define_ifunc_node_generator() %>
|
501
|
+
<%= iseq.define_block_generator() %>
|
502
|
+
<%= iseq.define_ifunc() %>
|
503
|
+
% end
|
504
|
+
%end
|
505
|
+
|
506
|
+
static VALUE cast_off_register_ifunc_<%= signiture() %>(VALUE dummy)
|
507
|
+
{
|
508
|
+
%if !inline_block?
|
509
|
+
% @root_iseq.iterate_all_iseq do |iseq|
|
510
|
+
% next if iseq.root?
|
511
|
+
<%= iseq.ifunc_node_generator() %>();
|
512
|
+
% end
|
513
|
+
%end
|
514
|
+
return Qnil;
|
515
|
+
}
|
516
|
+
|
517
|
+
%if @mid
|
518
|
+
% if @complex_call
|
519
|
+
static VALUE <%= this_function_name() %>(int argc, VALUE *argv, VALUE self)
|
520
|
+
% else
|
521
|
+
static VALUE <%= this_function_name() %>(VALUE self<%= @arg_size > 0 ? ", #{arguments.join(", ")}" : "" %>)
|
522
|
+
% end
|
523
|
+
%else
|
524
|
+
static VALUE <%= this_function_name() %>(VALUE dummy, VALUE self)
|
525
|
+
%end
|
526
|
+
{
|
527
|
+
%if @configuration.enable_trace?
|
528
|
+
#ifdef CAST_OFF_ENABLE_TRACE
|
529
|
+
/* VALUE thval = rb_thread_current(); */
|
530
|
+
/* rb_thread_t *th = DATA_PTR(thval); */
|
531
|
+
/* VALUE trace_recv, trace_klass; */
|
532
|
+
#endif
|
533
|
+
%end
|
534
|
+
/* decl variables */
|
535
|
+
VALUE cast_off_argv[<%= @root_iseq.all_argv_size() %>];
|
536
|
+
VALUE cast_off_tmp;
|
537
|
+
rb_thread_t *th;
|
538
|
+
%if inline_block?
|
539
|
+
VALUE thval;
|
540
|
+
VALUE specval;
|
541
|
+
VALUE *lfp, *dfp;
|
542
|
+
%else
|
543
|
+
<%= @root_iseq.declare_dfp %>
|
544
|
+
%end
|
545
|
+
%if use_fast_ivar?
|
546
|
+
static VALUE __klass = Qundef;
|
547
|
+
VALUE *iv_table_ptr = NULL;
|
548
|
+
%end
|
549
|
+
%@ivar_index.each do |(iv_id, iv_var)|
|
550
|
+
static int <%= iv_var %>_cache;
|
551
|
+
int <%= iv_var %>;
|
552
|
+
%end
|
553
|
+
%if inline_block?
|
554
|
+
<%= @loopkey.map{|(k, v)| v.decl? ? " #{v.decl};" : nil}.compact.join("\n") %>
|
555
|
+
<%= (@root_iseq.all_local_variable_declarations - arguments).map{|v| " #{v};"}.join("\n") %>
|
556
|
+
%else
|
557
|
+
<%= (@root_iseq.own_local_variable_declarations - arguments).map{|v| " #{v};"}.join("\n") %>
|
558
|
+
%end
|
559
|
+
%if @complex_call
|
560
|
+
% arguments.each do |arg|
|
561
|
+
<%= arg %> = Qnil;
|
562
|
+
% end
|
563
|
+
%end
|
564
|
+
|
565
|
+
%if !inline_block?
|
566
|
+
th = current_thread();
|
567
|
+
expand_dframe(th, <%= @root_iseq.lvars.size %>, <%= @root_iseq %>, 1);
|
568
|
+
<%= @root_iseq.update_dfp() %>
|
569
|
+
%end
|
570
|
+
|
571
|
+
%if inline_block?
|
572
|
+
% inits = @root_iseq.all_initializations_for_guards()
|
573
|
+
%else
|
574
|
+
% inits = @root_iseq.own_initializations_for_guards()
|
575
|
+
%end
|
576
|
+
%bug() if inits.uniq!
|
577
|
+
<%= inits.join("\n") %>
|
578
|
+
|
579
|
+
%if use_fast_ivar?
|
580
|
+
if (UNLIKELY(TYPE(self) != T_OBJECT)) rb_bug("should not be reached"); /* FIXME should be check compile time */
|
581
|
+
if (UNLIKELY((RBASIC(self)->klass) != __klass)) {
|
582
|
+
/* iv index cache miss */
|
583
|
+
struct st_table *iv_index_tbl = cast_off_get_iv_index_tbl(self);
|
584
|
+
% @ivar_index.each do |(iv_id, iv_var)|
|
585
|
+
<%= iv_var %>_cache = cast_off_get_iv_index(iv_index_tbl, <%= iv_id %>);
|
586
|
+
% end
|
587
|
+
__klass = RBASIC(self)->klass;
|
588
|
+
}
|
589
|
+
% @ivar_index.each do |(iv_id, iv_var)|
|
590
|
+
<%= iv_var %> = <%= iv_var %>_cache;
|
591
|
+
% end
|
592
|
+
iv_table_ptr = cast_off_get_iv_table_ptr(self);
|
593
|
+
%end
|
594
|
+
|
595
|
+
<%= @root_iseq.enclose_begin %>
|
596
|
+
|
597
|
+
/* body */
|
598
|
+
%if inline_block?
|
599
|
+
<%= @root_iseq.all_c_function_body() %>
|
600
|
+
%else
|
601
|
+
<%= @root_iseq.own_c_function_body() %>
|
602
|
+
%end
|
603
|
+
|
604
|
+
%iterator = inline_block? ? :iterate_all_guards : :iterate_own_guards
|
605
|
+
%@root_iseq.__send__(iterator) do |code, insns|
|
606
|
+
{
|
607
|
+
long pc;
|
608
|
+
<%= @root_iseq.enclose_end_deoptimize %>
|
609
|
+
%code_label = "deoptimize_#{code.hash.to_s.gsub(/-/, "_")}"
|
610
|
+
% insns.uniq.each do |insn|
|
611
|
+
<%= insn.guard_label %>:
|
612
|
+
pc = <%= insn.pc %>;
|
613
|
+
goto <%= code_label %>;
|
614
|
+
% end
|
615
|
+
<%= code_label %>:
|
616
|
+
%if @mid
|
617
|
+
/* override this method (if this method has not redefined yet) */
|
618
|
+
/* rb_define_method(...); */
|
619
|
+
%end
|
620
|
+
<%= code %>
|
621
|
+
}
|
622
|
+
%end
|
623
|
+
<%= @root_iseq.enclose_end %>
|
624
|
+
}
|
625
|
+
|
626
|
+
%['', '_singleton'].each do |str|
|
627
|
+
static VALUE cast_off_register<%= str %>_method_<%= signiture() %>(VALUE dummy, VALUE self)
|
628
|
+
{
|
629
|
+
% if @complex_call
|
630
|
+
rb_define<%= str %>_method(self, "<%= @mid %>", <%= this_function_name() %>, -1);
|
631
|
+
% else
|
632
|
+
rb_define<%= str %>_method(self, "<%= @mid %>", <%= this_function_name() %>, <%= @arg_size %>);
|
633
|
+
% end
|
634
|
+
return Qnil;
|
635
|
+
}
|
636
|
+
%end
|
637
|
+
|
638
|
+
static VALUE cast_off_generate_proc_<%= signiture() %>(VALUE self, VALUE source_procval)
|
639
|
+
{
|
640
|
+
rb_proc_t *source_procptr = DATA_PTR(source_procval);
|
641
|
+
return rb_proc_new(<%= this_function_name() %>, source_procptr->block.self);
|
642
|
+
}
|
643
|
+
|
644
|
+
void Init_<%= signiture() %>(void)
|
645
|
+
{
|
646
|
+
%@namespace.each_nonstatic_decls do |decl|
|
647
|
+
<%= decl %>
|
648
|
+
%end
|
649
|
+
%@namespace.each do |nam|
|
650
|
+
% if /\Astatic VALUE\b/.match nam.declaration
|
651
|
+
rb_gc_register_address(&<%= nam.name %>);
|
652
|
+
% end
|
653
|
+
%end
|
654
|
+
%@namespace.each_initializers do |init|
|
655
|
+
<%= init %>
|
656
|
+
%end
|
657
|
+
/* finish up */
|
658
|
+
#define reg(n) \
|
659
|
+
rb_gc_register_mark_object(n); \
|
660
|
+
switch(BUILTIN_TYPE(n)) { \
|
661
|
+
case T_STRING: \
|
662
|
+
case T_ARRAY: \
|
663
|
+
hide_obj(n); \
|
664
|
+
break; \
|
665
|
+
}
|
666
|
+
#define bye(n) \
|
667
|
+
n = Qundef
|
668
|
+
|
669
|
+
%@namespace.each do |i|
|
670
|
+
% if /\bVALUE\b/.match i.declaration
|
671
|
+
% if /\Astatic\b/.match i.declaration
|
672
|
+
reg(<%= i.name %>);
|
673
|
+
% else
|
674
|
+
bye(<%= i.name %>);
|
675
|
+
% end
|
676
|
+
% end
|
677
|
+
%end
|
678
|
+
#undef reg
|
679
|
+
|
680
|
+
%@ic.each do |(k, v)|
|
681
|
+
MEMZERO(&<%= v %>, struct iseq_inline_cache_entry, 1);
|
682
|
+
%end
|
683
|
+
|
684
|
+
rb_mCastOff = rb_const_get(rb_cObject, rb_intern("CastOff"));
|
685
|
+
rb_eCastOffExecutionError = rb_const_get(rb_mCastOff, rb_intern("ExecutionError"));
|
686
|
+
rb_mCastOffCompiler = rb_const_get(rb_mCastOff, rb_intern("Compiler"));
|
687
|
+
rb_cCastOffSingletonClass = rb_const_get(rb_mCastOffCompiler, rb_intern("SingletonClass"));
|
688
|
+
rb_cCastOffConfiguration = rb_const_get(rb_mCastOffCompiler, rb_intern("Configuration"));
|
689
|
+
rb_cCastOffClassWrapper = rb_const_get(rb_mCastOffCompiler, rb_intern("ClassWrapper"));
|
690
|
+
rb_cCastOffMethodWrapper = rb_const_get(rb_mCastOffCompiler, rb_intern("MethodWrapper"));
|
691
|
+
|
692
|
+
%if !@mid
|
693
|
+
rb_define_method(rb_mCastOffCompiler, "<%= signiture() %>", <%= this_function_name() %>, 1);
|
694
|
+
%end
|
695
|
+
% ['', '_singleton'].each do |str|
|
696
|
+
rb_define_method(rb_mCastOffCompiler, "register<%= str %>_method_<%= signiture() %>", cast_off_register<%= str %>_method_<%= signiture() %>, 1);
|
697
|
+
% end
|
698
|
+
rb_define_method(rb_mCastOffCompiler, "register_iseq_<%= signiture() %>", cast_off_register_iseq_<%= signiture() %>, 1);
|
699
|
+
rb_define_method(rb_mCastOffCompiler, "register_ifunc_<%= signiture() %>", cast_off_register_ifunc_<%= signiture() %>, 0);
|
700
|
+
rb_define_method(rb_mCastOffCompiler, "register_sampling_table_<%= signiture() %>", cast_off_register_sampling_table_<%= signiture() %>, 1);
|
701
|
+
rb_define_method(rb_mCastOffCompiler, "initialize_fptr_<%= signiture() %>", cast_off_initialize_fptr_<%= signiture() %>, 0);
|
702
|
+
rb_define_method(rb_mCastOffCompiler, "prefetch_constants_<%= signiture() %>", cast_off_prefetch_constants_<%= signiture() %>, 1);
|
703
|
+
}
|
704
|
+
end
|
705
|
+
|
706
|
+
attr_reader :reciever_class, :loopkey, :mid, :configuration, :dependency, :root_iseq
|
707
|
+
|
708
|
+
def initialize(root, config, mid, is_proc, block_inlining, suggestion, dependency, manager)
|
709
|
+
ary = root.to_a
|
710
|
+
@configuration = config
|
711
|
+
@suggestion = suggestion
|
712
|
+
@dependency = dependency
|
713
|
+
@manager = manager
|
714
|
+
@block_inlining = block_inlining
|
715
|
+
format_check(ary)
|
716
|
+
@root_iseq = Iseq.new(root, nil, 0, nil)
|
717
|
+
@mid = mid
|
718
|
+
if execute?
|
719
|
+
# CastOff.execute
|
720
|
+
bug() unless @root_iseq.itype == :block
|
721
|
+
bug() if @mid
|
722
|
+
else
|
723
|
+
# CastOff.compile, CastOff.compile_singleton_method
|
724
|
+
bug() unless @root_iseq.itype == :method
|
725
|
+
bug() unless @mid
|
726
|
+
end
|
727
|
+
@arg_size = @root_iseq.args.arg_size
|
728
|
+
raise(UnsupportedError.new("Currently, CastOff.execute cannot handle arguments")) if execute? && @arg_size > 0
|
729
|
+
raise(UnsupportedError.new("Currently, CastOff.execute does not support deoptimization")) if execute? && @configuration.deoptimize?
|
730
|
+
bug() if is_proc
|
731
|
+
@reciever_class = @configuration.class_of_variable(:self)
|
732
|
+
@lvars, @ivars, args, body = prepare(ary, @configuration)
|
733
|
+
bug() unless @lvars.empty? || @lvars[0].is_a?(Array)
|
734
|
+
|
735
|
+
initialize_ivar_for_code_generation()
|
736
|
+
initialize_ivar_for_suggestion()
|
737
|
+
@cfg = CFG.new(body)
|
738
|
+
@cfg.gen_ir(self)
|
739
|
+
end
|
740
|
+
|
741
|
+
def signiture()
|
742
|
+
@manager.signiture
|
743
|
+
end
|
744
|
+
|
745
|
+
def target_name()
|
746
|
+
@root_iseq.to_name()
|
747
|
+
end
|
748
|
+
|
749
|
+
def to_c()
|
750
|
+
@cfg.to_c()
|
751
|
+
arguments = @lvars.slice(0, @arg_size).map{|l| "VALUE local#{l[1]}_#{l[0]}"} # FIXME
|
752
|
+
Template.trigger(binding)
|
753
|
+
end
|
754
|
+
|
755
|
+
# for code generation
|
756
|
+
def initialize_ivar_for_code_generation()
|
757
|
+
@namespace = Namespace.new()
|
758
|
+
@fptr = {}
|
759
|
+
@ic = {}
|
760
|
+
@declare_constants = {}
|
761
|
+
@class_check_functions = {}
|
762
|
+
@throw_exception_functions = {}
|
763
|
+
@prefetch_constants = {}
|
764
|
+
@ivar_index = {}
|
765
|
+
@loopkey = {}
|
766
|
+
end
|
767
|
+
|
768
|
+
def return_value_class(c, m)
|
769
|
+
@configuration.return_value_class(c, m)
|
770
|
+
end
|
771
|
+
|
772
|
+
def this_function_name()
|
773
|
+
bug() unless signiture()
|
774
|
+
"cast_off_#{signiture()}"
|
775
|
+
end
|
776
|
+
|
777
|
+
def re_compilation()
|
778
|
+
if @configuration.force_inline_block?
|
779
|
+
raise(UnsupportedError.new(<<-EOS))
|
780
|
+
|
781
|
+
Currently, CastOff cannot inline block in #{@root_iseq.name}.
|
782
|
+
Source file is #{@root_iseq.source_file}.
|
783
|
+
Source line is #{@root_iseq.source_line}.
|
784
|
+
EOS
|
785
|
+
end
|
786
|
+
if inline_block?
|
787
|
+
raise(ReCompilation.new(''))
|
788
|
+
else
|
789
|
+
bug()
|
790
|
+
end
|
791
|
+
end
|
792
|
+
|
793
|
+
def unsupported_or_re_compilation(msg)
|
794
|
+
if inline_block?
|
795
|
+
re_compilation()
|
796
|
+
else
|
797
|
+
raise(UnsupportedError.new(msg))
|
798
|
+
end
|
799
|
+
end
|
800
|
+
|
801
|
+
def allocate_name(name)
|
802
|
+
@namespace.new(name).to_s
|
803
|
+
end
|
804
|
+
|
805
|
+
STRMAX = 509
|
806
|
+
def allocate_id(val)
|
807
|
+
case val
|
808
|
+
when Symbol
|
809
|
+
robject2csource(val, @namespace, STRMAX).name
|
810
|
+
when String
|
811
|
+
robject2csource(val.intern, @namespace, STRMAX).name
|
812
|
+
when Class
|
813
|
+
robject2csource(val.to_s.intern, @namespace, STRMAX).name
|
814
|
+
else
|
815
|
+
bug()
|
816
|
+
end
|
817
|
+
end
|
818
|
+
|
819
|
+
def allocate_object(val)
|
820
|
+
case val
|
821
|
+
when Fixnum
|
822
|
+
"LONG2FIX(#{val})"
|
823
|
+
when Symbol
|
824
|
+
name = robject2csource(val, @namespace, STRMAX) # generate ID
|
825
|
+
newname = @namespace.new('symop_' + val.to_s)
|
826
|
+
newname.depends(name)
|
827
|
+
newname.declaration = 'static VALUE'
|
828
|
+
newname.definition = "#{newname.declaration} #{newname.name} = Qundef;"
|
829
|
+
newname.initialization = "#{newname.name} = #{name.expression};" # get Symbol from ID
|
830
|
+
newname.expression = nil
|
831
|
+
newname.to_s
|
832
|
+
else
|
833
|
+
robject2csource(val, @namespace, STRMAX).to_s
|
834
|
+
end
|
835
|
+
end
|
836
|
+
|
837
|
+
def function_pointer_wrapper_func(fp)
|
838
|
+
"#{fp}_funcall_wrapper"
|
839
|
+
end
|
840
|
+
|
841
|
+
def function_pointer_wrapper_func_complex(fp)
|
842
|
+
"#{fp}_funcall_wrapper_complex"
|
843
|
+
end
|
844
|
+
|
845
|
+
def function_pointer_wrapper_fptr(fp)
|
846
|
+
"#{fp}_funcall_wrapper_fptr"
|
847
|
+
end
|
848
|
+
|
849
|
+
def allocate_function_pointer(klass, mid, convention, argc)
|
850
|
+
bug() unless klass.is_a?(ClassWrapper)
|
851
|
+
fptr = "fptr_#{klass}_#{@namespace.new(mid).name}"
|
852
|
+
fptr.gsub!(/:/, '_')
|
853
|
+
ids = klass.to_s.split("::")
|
854
|
+
ids.each{|k| bug() if k == ''}
|
855
|
+
key = [ids, mid, klass.singleton?, convention, argc]
|
856
|
+
entry = @fptr[key] || []
|
857
|
+
fptr.concat("_#{argc}_#{entry.size}")
|
858
|
+
entry << fptr
|
859
|
+
@fptr[key] = entry
|
860
|
+
fptr
|
861
|
+
end
|
862
|
+
|
863
|
+
def inline_block?
|
864
|
+
@block_inlining
|
865
|
+
end
|
866
|
+
|
867
|
+
def complex_call?
|
868
|
+
@complex_call
|
869
|
+
end
|
870
|
+
|
871
|
+
def use_fast_ivar?
|
872
|
+
return false # FIXME T_OBJECT であるという指定があったときのみ true にする
|
873
|
+
@ivars.size > ROBJECT_EMBED_LEN_MAX
|
874
|
+
end
|
875
|
+
|
876
|
+
def get_ivar_index(iv_id, iv_var)
|
877
|
+
@ivar_index[iv_id] ||= "iv_index_#{iv_var}"
|
878
|
+
end
|
879
|
+
|
880
|
+
def get_ic(name)
|
881
|
+
@ic[name] ||= "ic_#{name}"
|
882
|
+
end
|
883
|
+
|
884
|
+
def declare_constant(var)
|
885
|
+
@declare_constants[var] = true
|
886
|
+
end
|
887
|
+
|
888
|
+
def declare_class_check_function(func)
|
889
|
+
unless name = @class_check_functions[func]
|
890
|
+
idx = @class_check_functions.size()
|
891
|
+
name = "class_check_#{idx}"
|
892
|
+
@class_check_functions[func] = name
|
893
|
+
end
|
894
|
+
name
|
895
|
+
end
|
896
|
+
|
897
|
+
def declare_throw_exception_function(func)
|
898
|
+
unless name = @throw_exception_functions[func]
|
899
|
+
idx = @throw_exception_functions.size()
|
900
|
+
name = "throw_exception_#{idx}"
|
901
|
+
@throw_exception_functions[func] = name
|
902
|
+
end
|
903
|
+
name
|
904
|
+
end
|
905
|
+
|
906
|
+
def prefetch_constant(var, path, singleton_p)
|
907
|
+
if @prefetch_constants[var]
|
908
|
+
bug() unless @prefetch_constants[var] == [path, singleton_p]
|
909
|
+
else
|
910
|
+
@prefetch_constants[var] = [path, singleton_p]
|
911
|
+
end
|
912
|
+
declare_constant(var)
|
913
|
+
end
|
914
|
+
|
915
|
+
C_CLASS_MAP = {
|
916
|
+
ClassWrapper.new(Fixnum, true) => :rb_cFixnum,
|
917
|
+
ClassWrapper.new(Bignum, true) => :rb_cBignum,
|
918
|
+
ClassWrapper.new(String, true) => :rb_cString,
|
919
|
+
ClassWrapper.new(Array, true) => :rb_cArray,
|
920
|
+
ClassWrapper.new(Hash, true) => :rb_cHash,
|
921
|
+
ClassWrapper.new(Float, true) => :rb_cFloat,
|
922
|
+
ClassWrapper.new(Object, true) => :rb_cObject,
|
923
|
+
ClassWrapper.new(IO, true) => :rb_cIO,
|
924
|
+
ClassWrapper.new(Module, true) => :rb_cModule,
|
925
|
+
ClassWrapper.new(Proc, true) => :rb_cProc,
|
926
|
+
ClassWrapper.new(RubyVM, true) => :rb_cRubyVM,
|
927
|
+
#ClassWrapper.new(Env, true) => :rb_cEnv,
|
928
|
+
ClassWrapper.new(Time, true) => :rb_cTime,
|
929
|
+
ClassWrapper.new(Symbol, true) => :rb_cSymbol,
|
930
|
+
ClassWrapper.new(Mutex, true) => :rb_cMutex,
|
931
|
+
ClassWrapper.new(Thread, true) => :rb_cThread,
|
932
|
+
ClassWrapper.new(Struct, true) => :rb_cStruct,
|
933
|
+
#ClassWrapper.new(Match, true) => :rb_cMatch,
|
934
|
+
ClassWrapper.new(Regexp, true) => :rb_cRegexp,
|
935
|
+
ClassWrapper.new(Rational, true) => :rb_cRational,
|
936
|
+
ClassWrapper.new(Range, true) => :rb_cRange,
|
937
|
+
ClassWrapper.new(NilClass, true) => :rb_cNilClass,
|
938
|
+
ClassWrapper.new(Random, true) => :rb_cRandom,
|
939
|
+
ClassWrapper.new(Numeric, true) => :rb_cNumeric,
|
940
|
+
ClassWrapper.new(Integer, true) => :rb_cInteger,
|
941
|
+
ClassWrapper.new(Binding, true) => :rb_cBinding,
|
942
|
+
ClassWrapper.new(Method, true) => :rb_cMethod,
|
943
|
+
ClassWrapper.new(File, true) => :rb_cFile,
|
944
|
+
ClassWrapper.new(FalseClass, true) => :rb_cFalseClass,
|
945
|
+
ClassWrapper.new(TrueClass, true) => :rb_cTrueClass,
|
946
|
+
ClassWrapper.new(Class, true) => :rb_cClass,
|
947
|
+
ClassWrapper.new(Encoding, true) => :rb_cEncoding,
|
948
|
+
ClassWrapper.new(Complex, true) => :rb_cComplex,
|
949
|
+
ClassWrapper.new(Dir, true) => :rb_cDir,
|
950
|
+
#ClassWrapper.new(Stat, true) => :rb_cStat,
|
951
|
+
ClassWrapper.new(Enumerator, true) => :rb_cEnumerator,
|
952
|
+
ClassWrapper.new(Fiber, true) => :rb_cFiber,
|
953
|
+
ClassWrapper.new(Data, true) => :rb_cData,
|
954
|
+
#ClassWrapper.new(Generator, true) => :rb_cGenerator,
|
955
|
+
#ClassWrapper.new(Continuation, true) => :rb_cContinuation,
|
956
|
+
#ClassWrapper.new(ISeq, true) => :rb_cISeq,
|
957
|
+
#ClassWrapper.new(UnboundMethod, true) => :rb_cUnboundMethod,
|
958
|
+
#ClassWrapper.new(BasicObject, true) => :rb_cBasicObject,
|
959
|
+
#ClassWrapper.new(ARGF, true) => :rb_cARGF,
|
960
|
+
#ClassWrapper.new(Yielder, true) => :rb_cYielder,
|
961
|
+
#ClassWrapper.new(NameErrorMesg, true) => :rb_cNameErrorMesg,
|
962
|
+
#ClassWrapper.new(Barrier, true) => :rb_cBarrier,
|
963
|
+
}
|
964
|
+
|
965
|
+
def get_c_classname(klass)
|
966
|
+
bug() unless klass.is_a?(ClassWrapper)
|
967
|
+
name = C_CLASS_MAP[klass]
|
968
|
+
if name
|
969
|
+
return name
|
970
|
+
else
|
971
|
+
if @configuration.has_binding?
|
972
|
+
begin
|
973
|
+
path = "::#{klass.to_s}"
|
974
|
+
rescue CompileError
|
975
|
+
return nil
|
976
|
+
end
|
977
|
+
return nil unless /^[\w:]+$/.match(path)
|
978
|
+
if klass.singleton?
|
979
|
+
name = allocate_name("singleton_class_#{path}")
|
980
|
+
singleton_p = true
|
981
|
+
else
|
982
|
+
name = allocate_name("class_#{path}")
|
983
|
+
singleton_p = false
|
984
|
+
end
|
985
|
+
prefetch_constant(name, path, singleton_p)
|
986
|
+
return name
|
987
|
+
else
|
988
|
+
return nil
|
989
|
+
end
|
990
|
+
end
|
991
|
+
bug()
|
992
|
+
end
|
993
|
+
|
994
|
+
# for suggestion
|
995
|
+
MSG_TYPE_NOT_RESOLVED = "Reciever not resolved."
|
996
|
+
COL_TYPE_NOT_RESOLVED = ["<Reciever>", "<Method>", "<Line>", "<Source>"]
|
997
|
+
MSG_LITERAL_DUPLICATED = "These literals have duplicated."
|
998
|
+
COL_LITERAL_DUPLICATED = ["<Literal>", "<Reason>", "<Line>", "<Source>"]
|
999
|
+
MSG_INLINEAPI_NOT_USED = "Some inline apis have not used."
|
1000
|
+
COL_INLINEAPI_NOT_USED = ["<Method>", "<Reason>", "<Line>", "<Source>"]
|
1001
|
+
|
1002
|
+
SUGGESTION_TABLE = [
|
1003
|
+
[MSG_TYPE_NOT_RESOLVED, COL_TYPE_NOT_RESOLVED, :@type_suggestion],
|
1004
|
+
[MSG_LITERAL_DUPLICATED, COL_LITERAL_DUPLICATED, :@literal_suggestion],
|
1005
|
+
[MSG_INLINEAPI_NOT_USED, COL_INLINEAPI_NOT_USED, :@inlineapi_suggestion],
|
1006
|
+
]
|
1007
|
+
|
1008
|
+
SUGGESTION_TABLE.each do |(msg, col, ivar)|
|
1009
|
+
name = ivar.slice(1, ivar.size - 1)
|
1010
|
+
eval(<<-EOS, binding)
|
1011
|
+
def add_#{name}(msg)
|
1012
|
+
#{ivar} << msg
|
1013
|
+
end
|
1014
|
+
EOS
|
1015
|
+
end
|
1016
|
+
|
1017
|
+
def initialize_ivar_for_suggestion()
|
1018
|
+
SUGGESTION_TABLE.each{|(msg, col, ivar)| instance_variable_set(ivar, [])}
|
1019
|
+
end
|
1020
|
+
|
1021
|
+
def suggest()
|
1022
|
+
return unless @configuration.development?
|
1023
|
+
SUGGESTION_TABLE.each do |(msg, col, ivar)|
|
1024
|
+
val = instance_variable_get(ivar)
|
1025
|
+
bug() unless val.instance_of?(Array)
|
1026
|
+
next if val.empty?
|
1027
|
+
@suggestion.add_suggestion(msg, col, val)
|
1028
|
+
end
|
1029
|
+
end
|
1030
|
+
|
1031
|
+
private
|
1032
|
+
|
1033
|
+
def execute?
|
1034
|
+
!@mid
|
1035
|
+
end
|
1036
|
+
|
1037
|
+
def gen_embed_label(label, loop_id)
|
1038
|
+
(label.to_s + "_#{loop_id}").intern
|
1039
|
+
end
|
1040
|
+
|
1041
|
+
def embed(p_lvars, p_args, p_excs, p_body, index, iseq_ary, loop_id)
|
1042
|
+
ary = iseq_ary[10..13]
|
1043
|
+
c_lvars, c_args, c_excs, c_body = *ary
|
1044
|
+
|
1045
|
+
# iseq
|
1046
|
+
c_iseq = nil
|
1047
|
+
c_depth = nil
|
1048
|
+
c_body.each do |v|
|
1049
|
+
case v when InsnInfo
|
1050
|
+
c_iseq = v.iseq
|
1051
|
+
c_depth = v.depth
|
1052
|
+
break
|
1053
|
+
end
|
1054
|
+
end
|
1055
|
+
bug() unless c_iseq && c_depth
|
1056
|
+
|
1057
|
+
# lvars
|
1058
|
+
bug() unless (p_lvars & c_lvars).empty?
|
1059
|
+
lvars = p_lvars + c_lvars
|
1060
|
+
|
1061
|
+
# args
|
1062
|
+
re_compilation() if c_iseq.args.block? && inline_block?
|
1063
|
+
re_compilation() if c_iseq.args.opt? && inline_block?
|
1064
|
+
c_args = c_lvars.slice(0, c_iseq.args.arg_size)
|
1065
|
+
|
1066
|
+
# excs
|
1067
|
+
excs = p_excs + c_excs.map do |(t, i, s, e, c, sp)|
|
1068
|
+
# type, iseq, start, end, cont, sp
|
1069
|
+
# rename labels(start, end, cont)
|
1070
|
+
s = gen_embed_label(s, loop_id)
|
1071
|
+
e = gen_embed_label(e, loop_id)
|
1072
|
+
c = gen_embed_label(c, loop_id)
|
1073
|
+
[t, i, s, e, c, sp]
|
1074
|
+
end
|
1075
|
+
|
1076
|
+
# body
|
1077
|
+
bug() unless p_body[index].get_iseq() == iseq_ary
|
1078
|
+
b = []
|
1079
|
+
insn = p_body[index]
|
1080
|
+
# CastOff supports send instruction only
|
1081
|
+
# CastOff doesn't support :defineclass, :invokesuper, :putiseq
|
1082
|
+
bug() unless insn.op == :send
|
1083
|
+
loop_label = gen_embed_label(:loop, loop_id)
|
1084
|
+
cont_label = gen_embed_label(:cont, loop_id)
|
1085
|
+
bug() unless insn.depth
|
1086
|
+
prep = InsnInfo.new([:cast_off_prep, loop_id, c_args, insn], insn.iseq, insn.pc, insn.line, false, insn.depth)
|
1087
|
+
b << prep
|
1088
|
+
b << InsnInfo.new([:cast_off_enter_block, loop_label], insn.iseq, -1, -1, true, prep.depth + prep.stack_usage())
|
1089
|
+
b << cont_label
|
1090
|
+
bug() unless c_depth + 1 == prep.depth + prep.stack_usage()
|
1091
|
+
b << InsnInfo.new([:cast_off_cont, loop_id, c_args, insn], c_iseq, -1, -1, true, c_depth)
|
1092
|
+
if c_iseq.args.opt?
|
1093
|
+
bug() if inline_block?
|
1094
|
+
c_args_opts = c_iseq.args.opts.map{|l| gen_embed_label(l, loop_id) }
|
1095
|
+
b << InsnInfo.new([:cast_off_fetch_args, nil], c_iseq, -1, -1, true, c_depth)
|
1096
|
+
b << InsnInfo.new([:cast_off_handle_optional_args, c_args_opts, c_iseq.args.argc, false], c_iseq, -1, -1, true, c_depth + 1)
|
1097
|
+
end
|
1098
|
+
|
1099
|
+
is_break = false
|
1100
|
+
break_label = gen_embed_label(:break, loop_id)
|
1101
|
+
c_body.each do |v|
|
1102
|
+
case v
|
1103
|
+
when InsnInfo
|
1104
|
+
bug() unless v.support?
|
1105
|
+
if label = v.get_label()
|
1106
|
+
v = v.dup()
|
1107
|
+
v.set_label(gen_embed_label(label, loop_id))
|
1108
|
+
end
|
1109
|
+
case v.op
|
1110
|
+
when :leave
|
1111
|
+
# leave => jump
|
1112
|
+
b << InsnInfo.new([:cast_off_leave_block, loop_label], c_iseq, -1, -1, true, v.depth)
|
1113
|
+
when :throw
|
1114
|
+
type, state, flag, level = v.get_throw_info()
|
1115
|
+
bug() unless flag == 0
|
1116
|
+
case type
|
1117
|
+
when :return
|
1118
|
+
# nothing to do
|
1119
|
+
b << v
|
1120
|
+
if !inline_block?
|
1121
|
+
if @mid
|
1122
|
+
@root_iseq.catch_exception(:return, nil, nil, nil)
|
1123
|
+
else
|
1124
|
+
raise(UnsupportedError, "Currently, CastOff.execute doesn't support return statement when block inlining is disabled")
|
1125
|
+
end
|
1126
|
+
end
|
1127
|
+
when :break
|
1128
|
+
is_break = true
|
1129
|
+
cfg = CFG.new(c_body)
|
1130
|
+
stack_depth = cfg.find_insn_stack_depth(v)
|
1131
|
+
bug() unless stack_depth
|
1132
|
+
bug() unless stack_depth > 0
|
1133
|
+
num = stack_depth - 1
|
1134
|
+
if num > 0
|
1135
|
+
b << InsnInfo.new([:setn, num], c_iseq, -1, -1, true, v.depth)
|
1136
|
+
num.times do |i|
|
1137
|
+
b << InsnInfo.new([:pop], c_iseq, -1, -1, true, v.depth - i)
|
1138
|
+
end
|
1139
|
+
end
|
1140
|
+
bug() unless c_iseq.parent_pc == insn.pc + insn.size
|
1141
|
+
b << InsnInfo.new([:cast_off_break_block, break_label, v.argv[0], c_iseq.parent_pc], c_iseq, -1, -1, true, c_depth + 1)
|
1142
|
+
insn.iseq.catch_exception(:break, c_iseq.parent_pc, break_label, insn.depth + insn.stack_usage()) if !inline_block?
|
1143
|
+
else
|
1144
|
+
bug()
|
1145
|
+
end
|
1146
|
+
else
|
1147
|
+
b << v
|
1148
|
+
end
|
1149
|
+
when Symbol
|
1150
|
+
b << gen_embed_label(v, loop_id)
|
1151
|
+
else
|
1152
|
+
bug()
|
1153
|
+
end
|
1154
|
+
end
|
1155
|
+
b << loop_label
|
1156
|
+
b << InsnInfo.new([:cast_off_loop, loop_id, c_args, insn], insn.iseq, -1, -1, true, c_depth + 1)
|
1157
|
+
b << InsnInfo.new([:cast_off_continue_loop, cont_label], insn.iseq, -1, -1, true, c_depth + 1)
|
1158
|
+
b << InsnInfo.new([:cast_off_finl, loop_id, c_args, insn], insn.iseq, -1, -1, true, c_depth)
|
1159
|
+
b << break_label if is_break
|
1160
|
+
body = p_body.slice(0, index) + b + p_body.slice(index + 1, p_body.size - (index + 1))
|
1161
|
+
|
1162
|
+
[lvars, p_args, excs, body]
|
1163
|
+
end
|
1164
|
+
|
1165
|
+
def get_var_index(locals, index)
|
1166
|
+
locals.size - (index - 2) - 1
|
1167
|
+
end
|
1168
|
+
|
1169
|
+
def prepare_local_variable(ary, configuration, lvars_table, depth, varid, current_iseq)
|
1170
|
+
misc = ary[4]
|
1171
|
+
lvars_size = misc[:local_size] - 1
|
1172
|
+
bug() unless lvars_size.is_a?(Integer)
|
1173
|
+
ary = ary[10..13]
|
1174
|
+
lvars, args, dummy, body = *ary
|
1175
|
+
op_idx = -(lvars_size + 1) # lvar を参照するための get/setlocal, get/setdynamic オペランド, dfp/lfp からの index
|
1176
|
+
lvars.map! do |l|
|
1177
|
+
var = [l, varid, op_idx, depth, configuration.class_of_variable(l)]
|
1178
|
+
varid += 1
|
1179
|
+
op_idx += 1
|
1180
|
+
var
|
1181
|
+
end
|
1182
|
+
if lvars.size < lvars_size
|
1183
|
+
#bug() unless depth > 0
|
1184
|
+
#def pma1((a), &b) end <= depth == 0 but lvars_size == 4, lvars = [:a, :b]
|
1185
|
+
|
1186
|
+
# for block
|
1187
|
+
(lvars_size - lvars.size).times do
|
1188
|
+
lvars << [:__lvar, varid, op_idx, depth, nil]
|
1189
|
+
varid += 1
|
1190
|
+
op_idx += 1
|
1191
|
+
end
|
1192
|
+
end
|
1193
|
+
bug() unless op_idx == -1
|
1194
|
+
current_iseq.set_local_variables(lvars)
|
1195
|
+
lvars_table << lvars
|
1196
|
+
body.each do |v|
|
1197
|
+
case v when InsnInfo
|
1198
|
+
bug() unless v.support?
|
1199
|
+
if iseq_ary = v.get_iseq()
|
1200
|
+
child_iseq = current_iseq.children[v.pc]
|
1201
|
+
bug() unless child_iseq.is_a?(Iseq)
|
1202
|
+
varid = prepare_local_variable(iseq_ary, configuration, lvars_table.dup, depth + 1, varid, child_iseq)
|
1203
|
+
end
|
1204
|
+
case v.op
|
1205
|
+
when :getdynamic, :setdynamic, :getlocal, :setlocal
|
1206
|
+
case v.op
|
1207
|
+
when :getdynamic
|
1208
|
+
idx, lv = *v.argv
|
1209
|
+
if inline_block?
|
1210
|
+
insn = [:cast_off_getlvar]
|
1211
|
+
else
|
1212
|
+
insn = [:cast_off_getdvar]
|
1213
|
+
end
|
1214
|
+
when :setdynamic
|
1215
|
+
idx, lv = *v.argv
|
1216
|
+
if inline_block?
|
1217
|
+
insn = [:cast_off_setlvar]
|
1218
|
+
else
|
1219
|
+
insn = [:cast_off_setdvar]
|
1220
|
+
end
|
1221
|
+
when :getlocal
|
1222
|
+
idx = v.argv[0]
|
1223
|
+
lv = depth # set/getlocal uses lfp
|
1224
|
+
if inline_block?
|
1225
|
+
insn = [:cast_off_getlvar]
|
1226
|
+
else
|
1227
|
+
insn = [:cast_off_getdvar]
|
1228
|
+
end
|
1229
|
+
when :setlocal
|
1230
|
+
idx = v.argv[0]
|
1231
|
+
lv = depth # set/getlocal uses lfp
|
1232
|
+
if inline_block?
|
1233
|
+
insn = [:cast_off_setlvar]
|
1234
|
+
else
|
1235
|
+
insn = [:cast_off_setdvar]
|
1236
|
+
end
|
1237
|
+
else
|
1238
|
+
bug()
|
1239
|
+
end
|
1240
|
+
raise(UnsupportedError.new(<<-EOS)) if 0 > depth - lv
|
1241
|
+
Unsupported operation(#{v.source}).
|
1242
|
+
Currently, CastOff doesn't support variables defined in an outer block.
|
1243
|
+
EOS
|
1244
|
+
var_index = get_var_index(lvars_table[depth - lv], idx)
|
1245
|
+
bug() unless 0 <= var_index && var_index <= lvars_table[depth - lv].size()
|
1246
|
+
lvar = lvars_table[depth - lv][var_index]
|
1247
|
+
bug() unless lvar
|
1248
|
+
bug() unless (depth - lv) == lvar[3] # name, id, op_idx, depth, types
|
1249
|
+
insn.concat(lvar)
|
1250
|
+
bug() unless insn.size() == 6
|
1251
|
+
v.update(insn)
|
1252
|
+
when :getinstancevariable, :setinstancevariable
|
1253
|
+
id, ic = *v.argv
|
1254
|
+
case v.op
|
1255
|
+
when :getinstancevariable
|
1256
|
+
insn = [:cast_off_getivar]
|
1257
|
+
when :setinstancevariable
|
1258
|
+
insn = [:cast_off_setivar]
|
1259
|
+
else
|
1260
|
+
bug()
|
1261
|
+
end
|
1262
|
+
ivar = [id, configuration.class_of_variable(id)]
|
1263
|
+
insn.concat(ivar)
|
1264
|
+
bug() if insn.size != 3
|
1265
|
+
v.update(insn)
|
1266
|
+
when :getclassvariable, :setclassvariable
|
1267
|
+
id = v.argv[0]
|
1268
|
+
case v.op
|
1269
|
+
when :getclassvariable
|
1270
|
+
insn = [:cast_off_getcvar]
|
1271
|
+
when :setclassvariable
|
1272
|
+
insn = [:cast_off_setcvar]
|
1273
|
+
else
|
1274
|
+
bug()
|
1275
|
+
end
|
1276
|
+
cvar = [id, configuration.class_of_variable(id)]
|
1277
|
+
insn.concat(cvar)
|
1278
|
+
bug() if insn.size != 3
|
1279
|
+
v.update(insn)
|
1280
|
+
when :getglobal, :setglobal
|
1281
|
+
gentry = v.argv[0]
|
1282
|
+
case v.op
|
1283
|
+
when :getglobal
|
1284
|
+
insn = [:cast_off_getgvar]
|
1285
|
+
when :setglobal
|
1286
|
+
insn = [:cast_off_setgvar]
|
1287
|
+
else
|
1288
|
+
bug()
|
1289
|
+
end
|
1290
|
+
gvar = [gentry, configuration.class_of_variable(gentry)]
|
1291
|
+
insn.concat(gvar)
|
1292
|
+
bug() if insn.size != 3
|
1293
|
+
v.update(insn)
|
1294
|
+
end
|
1295
|
+
end
|
1296
|
+
end
|
1297
|
+
varid
|
1298
|
+
end
|
1299
|
+
|
1300
|
+
def prepare_constant(body)
|
1301
|
+
nb = []
|
1302
|
+
pre = nil
|
1303
|
+
buf = nil
|
1304
|
+
body.each do |v|
|
1305
|
+
case v
|
1306
|
+
when InsnInfo
|
1307
|
+
case v.op
|
1308
|
+
when :getconstant
|
1309
|
+
bug() unless pre
|
1310
|
+
preop = pre.op
|
1311
|
+
if buf
|
1312
|
+
bug() unless preop == :getconstant
|
1313
|
+
else
|
1314
|
+
case preop
|
1315
|
+
when :putobject
|
1316
|
+
bug() unless pre.argv[0] == Object
|
1317
|
+
flag = true
|
1318
|
+
when :putnil
|
1319
|
+
flag = false
|
1320
|
+
else
|
1321
|
+
raise(UnsupportedError.new(<<-EOS))
|
1322
|
+
|
1323
|
+
Currently, CastOff cannot handle this constant reference.
|
1324
|
+
--- source code ---
|
1325
|
+
#{v.source}
|
1326
|
+
EOS
|
1327
|
+
end
|
1328
|
+
nb << InsnInfo.new([:pop], v.iseq, -1, -1, true, v.depth) # pop Object or nil
|
1329
|
+
n_insn = InsnInfo.new([:cast_off_getconst, flag], v.iseq, -1, -1, true, v.depth - 1)
|
1330
|
+
nb << n_insn
|
1331
|
+
buf = n_insn.argv
|
1332
|
+
end
|
1333
|
+
buf.concat(v.argv) # append id
|
1334
|
+
else
|
1335
|
+
buf = nil
|
1336
|
+
nb << v
|
1337
|
+
end
|
1338
|
+
pre = v
|
1339
|
+
else
|
1340
|
+
pre = nil
|
1341
|
+
nb << v
|
1342
|
+
end
|
1343
|
+
end
|
1344
|
+
nb.each do |v|
|
1345
|
+
case v when InsnInfo
|
1346
|
+
bug() if v.op == :getconstant
|
1347
|
+
end
|
1348
|
+
end
|
1349
|
+
nb
|
1350
|
+
end
|
1351
|
+
|
1352
|
+
def prepare_branch_instruction_and_line_no(ary)
|
1353
|
+
nb = []
|
1354
|
+
body = ary[13]
|
1355
|
+
body.each do |v|
|
1356
|
+
case v
|
1357
|
+
when InsnInfo # instruction
|
1358
|
+
bug() unless v.support?
|
1359
|
+
if iseq_ary = v.get_iseq()
|
1360
|
+
prepare_branch_instruction_and_line_no(iseq_ary)
|
1361
|
+
end
|
1362
|
+
if !v.ignore?
|
1363
|
+
case v.op
|
1364
|
+
when :getinlinecache
|
1365
|
+
nb << InsnInfo.new([:putnil], v.iseq, -1, -1, true, v.depth)
|
1366
|
+
when :opt_case_dispatch
|
1367
|
+
nb << InsnInfo.new([:pop], v.iseq, -1, -1, true, v.depth)
|
1368
|
+
else
|
1369
|
+
nb << v
|
1370
|
+
end
|
1371
|
+
end
|
1372
|
+
when Symbol # label
|
1373
|
+
nb << v
|
1374
|
+
when Integer # line
|
1375
|
+
# ignore
|
1376
|
+
else
|
1377
|
+
raise(CompileError, 'wrong format iseq')
|
1378
|
+
end
|
1379
|
+
end
|
1380
|
+
ary[13] = nb
|
1381
|
+
end
|
1382
|
+
|
1383
|
+
def prepare_throw_instruction(body)
|
1384
|
+
nb = body.map do |v|
|
1385
|
+
case v
|
1386
|
+
when InsnInfo
|
1387
|
+
case v.op
|
1388
|
+
when :throw
|
1389
|
+
type, state, flag, level = v.get_throw_info()
|
1390
|
+
bug() unless flag == 0
|
1391
|
+
case type
|
1392
|
+
when :return
|
1393
|
+
if execute?
|
1394
|
+
v
|
1395
|
+
else
|
1396
|
+
inline_block? ? InsnInfo.new([:leave], v.iseq, -1, -1, true, v.depth) : v
|
1397
|
+
end
|
1398
|
+
when :break
|
1399
|
+
bug() # should not be reached
|
1400
|
+
else
|
1401
|
+
bug()
|
1402
|
+
end
|
1403
|
+
else
|
1404
|
+
v
|
1405
|
+
end
|
1406
|
+
else
|
1407
|
+
v
|
1408
|
+
end
|
1409
|
+
end
|
1410
|
+
nb
|
1411
|
+
end
|
1412
|
+
|
1413
|
+
def annotate_instruction(ary, current_iseq, current_depth)
|
1414
|
+
bug() unless current_iseq.is_a?(Iseq)
|
1415
|
+
excs = ary[12]
|
1416
|
+
body = ary[13]
|
1417
|
+
|
1418
|
+
excs.each do |(t, i, s, e, c, sp)|
|
1419
|
+
# type, iseq, start, end, cont, sp
|
1420
|
+
case t when :rescue, :ensure
|
1421
|
+
# CastOff doesn't support rescue and ensure.
|
1422
|
+
raise(UnsupportedError, "Currently, CastOff cannot handle #{t}")
|
1423
|
+
end
|
1424
|
+
end
|
1425
|
+
|
1426
|
+
pc = 0
|
1427
|
+
line = -1
|
1428
|
+
body.map! do |v|
|
1429
|
+
case v
|
1430
|
+
when Array
|
1431
|
+
bug() if pc < 0
|
1432
|
+
insn = InsnInfo.new(v, current_iseq, pc, line)
|
1433
|
+
raise(UnsupportedError, insn.get_unsupport_message()) unless insn.support?
|
1434
|
+
pc += v.size()
|
1435
|
+
insn
|
1436
|
+
when Symbol
|
1437
|
+
pc = /label_/.match(v).post_match.to_i
|
1438
|
+
v
|
1439
|
+
when Integer
|
1440
|
+
line = v
|
1441
|
+
nil
|
1442
|
+
end
|
1443
|
+
end
|
1444
|
+
body.compact!()
|
1445
|
+
|
1446
|
+
cfg = CFG.new(body)
|
1447
|
+
body.each do |v|
|
1448
|
+
next unless v.instance_of?(InsnInfo)
|
1449
|
+
d = cfg.find_insn_stack_depth(v)
|
1450
|
+
v.set_stack_depth(d + current_depth) if d
|
1451
|
+
end
|
1452
|
+
|
1453
|
+
body.each do |v|
|
1454
|
+
next unless v.instance_of?(InsnInfo)
|
1455
|
+
if iseq_ary = v.get_iseq()
|
1456
|
+
bug() unless v.op == :send
|
1457
|
+
child_iseq = Iseq.new(get_child_iseq(current_iseq.iseq, v.pc), current_iseq, v.depth + v.stack_usage - 1, v.pc + v.size)
|
1458
|
+
current_iseq.add(child_iseq, v.pc)
|
1459
|
+
annotate_instruction(iseq_ary, child_iseq, v.depth + v.stack_usage - 1)
|
1460
|
+
end
|
1461
|
+
end
|
1462
|
+
end
|
1463
|
+
|
1464
|
+
def prepare(ary, configuration)
|
1465
|
+
annotate_instruction(ary, @root_iseq, 0)
|
1466
|
+
prepare_local_variable(ary, configuration, [], 0, 0, @root_iseq)
|
1467
|
+
prepare_branch_instruction_and_line_no(ary)
|
1468
|
+
ary = ary[10..13]
|
1469
|
+
lvars, args, excs, body = *ary
|
1470
|
+
loop_id = 0
|
1471
|
+
continue = true
|
1472
|
+
while continue
|
1473
|
+
continue = false
|
1474
|
+
body.each_with_index do |v, index|
|
1475
|
+
case v when InsnInfo
|
1476
|
+
bug() unless v.support?
|
1477
|
+
if iseq_ary = v.get_iseq()
|
1478
|
+
lvars, args, excs, body = embed(lvars, args, excs, body, index, iseq_ary, loop_id)
|
1479
|
+
continue = true
|
1480
|
+
break
|
1481
|
+
end
|
1482
|
+
end
|
1483
|
+
end
|
1484
|
+
loop_id += 1
|
1485
|
+
end
|
1486
|
+
|
1487
|
+
body = prepare_throw_instruction(body)
|
1488
|
+
body = prepare_constant(body)
|
1489
|
+
|
1490
|
+
ivars = []
|
1491
|
+
body.each do |v|
|
1492
|
+
case v when InsnInfo
|
1493
|
+
case v.op
|
1494
|
+
when :cast_off_getivar, :cast_off_setivar
|
1495
|
+
ivars << v.argv[0]
|
1496
|
+
end
|
1497
|
+
end
|
1498
|
+
end
|
1499
|
+
ivars.uniq!
|
1500
|
+
|
1501
|
+
opts = @root_iseq.args.opt? ? @root_iseq.args.opts : false
|
1502
|
+
rest_index = @root_iseq.args.rest? ? @root_iseq.args.rest_index : false
|
1503
|
+
block_argument_p = @root_iseq.args.block?
|
1504
|
+
post_len = @root_iseq.args.post? ? @root_iseq.args.post_len : false
|
1505
|
+
@complex_call = opts || rest_index || block_argument_p || post_len
|
1506
|
+
if @complex_call
|
1507
|
+
block = block_argument_p ? '&' : ''
|
1508
|
+
post = post_len ? post_len : 0
|
1509
|
+
rest = rest_index ? '*' : ''
|
1510
|
+
opt = opts ? opts.size() - 1 : 0
|
1511
|
+
must = @root_iseq.args.argc
|
1512
|
+
if opts
|
1513
|
+
body.unshift(InsnInfo.new([:cast_off_handle_optional_args, opts, must, rest_index], @root_iseq, -1, -1, true, 1))
|
1514
|
+
else
|
1515
|
+
body.unshift(InsnInfo.new([:pop], @root_iseq, -1, -1, true, 1))
|
1516
|
+
end
|
1517
|
+
body.unshift(InsnInfo.new([:cast_off_fetch_args, [must, opt, rest, post, block, lvars.slice(0, @arg_size)]], @root_iseq, -1, -1, true, 0))
|
1518
|
+
end
|
1519
|
+
|
1520
|
+
decl = []
|
1521
|
+
lvars.each_with_index do |l, index|
|
1522
|
+
if index < @arg_size
|
1523
|
+
bug() if execute?
|
1524
|
+
op = :cast_off_decl_arg
|
1525
|
+
else
|
1526
|
+
op = :cast_off_decl_var
|
1527
|
+
end
|
1528
|
+
decl.push(InsnInfo.new([op] + l, @root_iseq, -1, -1, true, 0))
|
1529
|
+
end
|
1530
|
+
body = decl.reverse + body
|
1531
|
+
|
1532
|
+
[lvars, ivars, args, body]
|
1533
|
+
end
|
1534
|
+
|
1535
|
+
def format_check(ary)
|
1536
|
+
magic = ary[0]
|
1537
|
+
major = ary[1]
|
1538
|
+
minor = ary[2]
|
1539
|
+
ftype = ary[3] # format type
|
1540
|
+
itype = ary[9] # iseq type
|
1541
|
+
|
1542
|
+
unless magic == 'YARVInstructionSequence/SimpleDataFormat' \
|
1543
|
+
&& major == 1 \
|
1544
|
+
&& minor == 2 \
|
1545
|
+
&& ftype == 1 \
|
1546
|
+
&& (itype == :block || itype == :method)
|
1547
|
+
raise(CompileError, <<-EOS)
|
1548
|
+
wrong format iseq
|
1549
|
+
magic: #{magic}
|
1550
|
+
major: #{major}
|
1551
|
+
minor: #{minor}
|
1552
|
+
ftype: #{ftype}
|
1553
|
+
itype: #{itype}
|
1554
|
+
EOS
|
1555
|
+
end
|
1556
|
+
|
1557
|
+
itype
|
1558
|
+
end
|
1559
|
+
end
|
1560
|
+
end
|
1561
|
+
end
|
1562
|
+
|