ytljit 0.0.6 → 0.0.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/ext/code_alloc.c +39 -17
- data/ext/memory.c +170 -22
- data/ext/ytljit.c +57 -2
- data/ext/ytljit.h +15 -2
- data/lib/ytljit/asm.rb +21 -0
- data/lib/ytljit/asmext_x64.rb +1 -1
- data/lib/ytljit/asmutil.rb +1 -0
- data/lib/ytljit/instruction.rb +6 -0
- data/lib/ytljit/instruction_ia.rb +31 -2
- data/lib/ytljit/util.rb +5 -4
- data/lib/ytljit/vm.rb +771 -182
- data/lib/ytljit/vm_codegen.rb +76 -25
- data/lib/ytljit/vm_cruby_obj.rb +34 -13
- data/lib/ytljit/vm_inline_method.rb +154 -32
- data/lib/ytljit/vm_sendnode.rb +597 -112
- data/lib/ytljit/vm_trans.rb +148 -35
- data/lib/ytljit/vm_type.rb +5 -1
- data/lib/ytljit/vm_type_gen.rb +224 -51
- data/lib/ytljit.rb +5 -0
- data/test/test_assemble2.rb +35 -5
- metadata +3 -3
data/lib/ytljit/vm_codegen.rb
CHANGED
@@ -79,14 +79,18 @@ LO | | | |
|
|
79
79
|
@modified_local_var = []
|
80
80
|
@modified_instance_var = Hash.new
|
81
81
|
@yield_node = []
|
82
|
+
|
83
|
+
# Options from user
|
84
|
+
@options = {}
|
82
85
|
end
|
83
86
|
|
84
87
|
attr :top_node
|
85
88
|
attr_accessor :modified_local_var
|
86
89
|
attr_accessor :modified_instance_var
|
87
90
|
attr_accessor :yield_node
|
91
|
+
attr_accessor :options
|
88
92
|
|
89
|
-
def
|
93
|
+
def marge_local_var(lvlist)
|
90
94
|
res = nil
|
91
95
|
lvlist.each do |lvs|
|
92
96
|
if res then
|
@@ -112,6 +116,8 @@ LO | | | |
|
|
112
116
|
@current_method = [tnode]
|
113
117
|
@convergent = false
|
114
118
|
@visited_top_node = {}
|
119
|
+
# Options from user
|
120
|
+
@options = {}
|
115
121
|
end
|
116
122
|
|
117
123
|
def to_signature(offset = -1, cache = {})
|
@@ -128,6 +134,11 @@ LO | | | |
|
|
128
134
|
if sigc.size == 1 then
|
129
135
|
return sigc[0]
|
130
136
|
else
|
137
|
+
p offset.signature_cache
|
138
|
+
p i
|
139
|
+
p offset.debug_info
|
140
|
+
p @current_method.map {|e| e.debug_info}
|
141
|
+
p @current_method.map {|e| e.class}
|
131
142
|
raise "I can't type inference..."
|
132
143
|
end
|
133
144
|
end
|
@@ -142,11 +153,14 @@ LO | | | |
|
|
142
153
|
end
|
143
154
|
|
144
155
|
if rsig = cache[cursignode] then
|
156
|
+
rsig = rsig.map {|e| e.copy_type}
|
145
157
|
return rsig
|
146
158
|
end
|
147
159
|
|
148
160
|
if curmethod.is_a?(Node::ClassTopNode) then
|
161
|
+
# Can't pass block when entering a class definition
|
149
162
|
rsig = to_signature_aux(cursignode, offset, cache)
|
163
|
+
rsig = rsig.map {|e| e.copy_type}
|
150
164
|
cache[cursignode] = rsig
|
151
165
|
rsig
|
152
166
|
|
@@ -154,6 +168,7 @@ LO | | | |
|
|
154
168
|
prevsig = to_signature(offset - 1, cache)
|
155
169
|
rsig = to_signature_aux2(curmethod, cursignode,
|
156
170
|
prevsig, offset, cache)
|
171
|
+
rsig = rsig.map {|e| e.copy_type}
|
157
172
|
cache[cursignode] = rsig
|
158
173
|
rsig
|
159
174
|
|
@@ -185,13 +200,15 @@ LO | | | |
|
|
185
200
|
res.push ele.type
|
186
201
|
end
|
187
202
|
|
188
|
-
|
203
|
+
ynode = mt.yield_node[0]
|
204
|
+
if ynode then
|
189
205
|
yargs = ynode.arguments
|
190
|
-
push_signature(
|
206
|
+
push_signature(yargs, ynode.frame_info.parent)
|
191
207
|
ysig = to_signature_aux3(yargs, -1, cache)
|
192
208
|
args[1].type = nil
|
193
|
-
args[1].decide_type_once(ysig)
|
194
|
-
res
|
209
|
+
res[1] = args[1].decide_type_once(ysig)
|
210
|
+
# p res
|
211
|
+
# p res[1]
|
195
212
|
pop_signature
|
196
213
|
end
|
197
214
|
|
@@ -203,14 +220,18 @@ LO | | | |
|
|
203
220
|
return res
|
204
221
|
end
|
205
222
|
|
206
|
-
|
207
|
-
|
208
|
-
|
209
|
-
|
210
|
-
|
211
|
-
|
212
|
-
|
213
|
-
|
223
|
+
node = @current_method[offset]
|
224
|
+
if node.is_a?(Node::ClassTopNode) then
|
225
|
+
node.signature_cache[0]
|
226
|
+
else
|
227
|
+
cursignode2 = @current_method_signature_node[offset]
|
228
|
+
sig = to_signature_aux3(cursignode2, offset - 1, cache)
|
229
|
+
res = cursignode.map { |enode|
|
230
|
+
enode.decide_type_once(sig)
|
231
|
+
}
|
232
|
+
cache[cursignode] = res
|
233
|
+
res
|
234
|
+
end
|
214
235
|
end
|
215
236
|
|
216
237
|
def push_signature(signode, method)
|
@@ -227,6 +248,7 @@ LO | | | |
|
|
227
248
|
attr :current_method_signature_node
|
228
249
|
attr_accessor :convergent
|
229
250
|
attr_accessor :visited_top_node
|
251
|
+
attr_accessor :options
|
230
252
|
end
|
231
253
|
|
232
254
|
class CompileContext
|
@@ -515,7 +537,7 @@ LO | | | |
|
|
515
537
|
asm.with_retry do
|
516
538
|
asm.mov(SPR, BPR)
|
517
539
|
asm.pop(BPR)
|
518
|
-
asm.pop(THEPR)
|
540
|
+
asm.pop(THEPR) if @is_escape != :export_object
|
519
541
|
asm.mov(SPR, BPR)
|
520
542
|
asm.pop(BPR)
|
521
543
|
end
|
@@ -559,14 +581,38 @@ LO | | | |
|
|
559
581
|
|
560
582
|
def gen_alloca(context, siz)
|
561
583
|
asm = context.assembler
|
562
|
-
|
563
|
-
|
564
|
-
|
584
|
+
case siz
|
585
|
+
when Integer
|
586
|
+
add = lambda {
|
587
|
+
address_of("ytl_arena_alloca")
|
588
|
+
}
|
589
|
+
alloca = OpVarMemAddress.new(add)
|
590
|
+
asm.with_retry do
|
591
|
+
asm.mov(FUNC_ARG[0], THEPR)
|
592
|
+
asm.mov(TMPR, siz)
|
593
|
+
asm.mov(FUNC_ARG[1], TMPR)
|
594
|
+
end
|
595
|
+
context = gen_call(context, alloca, 2)
|
596
|
+
asm.with_retry do
|
597
|
+
asm.mov(THEPR, RETR)
|
598
|
+
end
|
599
|
+
else
|
600
|
+
raise "Not implemented yet variable alloca"
|
565
601
|
end
|
566
602
|
context.ret_reg = THEPR
|
567
603
|
context
|
568
604
|
end
|
569
605
|
|
606
|
+
def gen_save_thepr(context)
|
607
|
+
casm = context.assembler
|
608
|
+
arenaaddr = context.top_node.get_arena_address
|
609
|
+
casm.with_retry do
|
610
|
+
casm.mov(TMPR, arenaaddr)
|
611
|
+
casm.mov(INDIRECT_TMPR, THEPR)
|
612
|
+
end
|
613
|
+
context
|
614
|
+
end
|
615
|
+
|
570
616
|
def gen_call(context, fnc, numarg, slf = nil)
|
571
617
|
casm = context.assembler
|
572
618
|
|
@@ -619,12 +665,14 @@ LO | | | |
|
|
619
665
|
include AbsArch
|
620
666
|
include CommonCodeGen
|
621
667
|
|
622
|
-
def gen_make_argv(context)
|
668
|
+
def gen_make_argv(context, rarg = nil, argcomphook = nil)
|
623
669
|
casm = context.assembler
|
624
|
-
rarg
|
670
|
+
if rarg == nil then
|
671
|
+
rarg = @arguments[3..-1]
|
672
|
+
end
|
673
|
+
cursig = context.to_signature
|
625
674
|
|
626
675
|
# make argv
|
627
|
-
casm = context.assembler
|
628
676
|
argbyte = rarg.size * AsmType::MACHINE_WORD.size
|
629
677
|
casm.with_retry do
|
630
678
|
casm.sub(SPR, argbyte)
|
@@ -632,11 +680,15 @@ LO | | | |
|
|
632
680
|
context.cpustack_pushn(argbyte)
|
633
681
|
|
634
682
|
rarg.each_with_index do |arg, i|
|
635
|
-
|
636
|
-
|
637
|
-
|
683
|
+
rtype = nil
|
684
|
+
if argcomphook then
|
685
|
+
rtype = argcomphook.call(context, arg, i)
|
686
|
+
else
|
687
|
+
context = arg.compile(context)
|
688
|
+
context.ret_node.decide_type_once(cursig)
|
689
|
+
rtype = context.ret_node.type
|
690
|
+
end
|
638
691
|
context = rtype.gen_boxing(context)
|
639
|
-
casm = context.assembler
|
640
692
|
dst = OpIndirect.new(SPR, i * AsmType::MACHINE_WORD.size)
|
641
693
|
if context.ret_reg.is_a?(OpRegistor) or
|
642
694
|
context.ret_reg.is_a?(OpImmidiate32) or
|
@@ -666,7 +718,6 @@ LO | | | |
|
|
666
718
|
context = yield(context, rarg)
|
667
719
|
|
668
720
|
# adjust stack
|
669
|
-
casm = context.assembler
|
670
721
|
casm.with_retry do
|
671
722
|
casm.add(SPR, argbyte)
|
672
723
|
end
|
data/lib/ytljit/vm_cruby_obj.rb
CHANGED
@@ -3,25 +3,33 @@ module YTLJit
|
|
3
3
|
module Node
|
4
4
|
class CRubyInstanceVarRefNode<InstanceVarRefNode
|
5
5
|
include TypeListWithoutSignature
|
6
|
+
include CommonCodeGen
|
6
7
|
|
7
|
-
def initialize(parent, name)
|
8
|
+
def initialize(parent, name, mnode)
|
8
9
|
super
|
9
10
|
@current_frame_info = search_frame_info
|
10
11
|
end
|
11
12
|
|
12
13
|
def compile_main(context)
|
13
14
|
slfoff = @current_frame_info.offset_arg(2, BPR)
|
14
|
-
|
15
|
-
|
15
|
+
mivl = @class_top.end_nodes[0].modified_instance_var.keys
|
16
|
+
off = mivl.index(@name)
|
17
|
+
addr = lambda {
|
18
|
+
address_of("ytl_ivar_get_boxing")
|
19
|
+
}
|
20
|
+
ivarget = OpVarMemAddress.new(addr)
|
16
21
|
context.start_arg_reg
|
17
22
|
asm = context.assembler
|
18
23
|
asm.with_retry do
|
19
24
|
asm.mov(FUNC_ARG[0], slfoff)
|
20
|
-
asm.mov(FUNC_ARG[1],
|
25
|
+
asm.mov(FUNC_ARG[1], off)
|
26
|
+
end
|
27
|
+
context = gen_save_thepr(context)
|
28
|
+
asm.with_retry do
|
21
29
|
asm.call_with_arg(ivarget, 2)
|
22
30
|
end
|
31
|
+
|
23
32
|
context.end_arg_reg
|
24
|
-
|
25
33
|
context.ret_reg = RETR
|
26
34
|
context.ret_node = self
|
27
35
|
decide_type_once(context.to_signature)
|
@@ -34,19 +42,26 @@ module YTLJit
|
|
34
42
|
|
35
43
|
class CRubyInstanceVarAssignNode<InstanceVarAssignNode
|
36
44
|
include TypeListWithoutSignature
|
45
|
+
include CommonCodeGen
|
37
46
|
|
38
|
-
def initialize(parent, name, val)
|
47
|
+
def initialize(parent, name, mnode, val)
|
39
48
|
super
|
40
49
|
@current_frame_info = search_frame_info
|
41
50
|
end
|
42
51
|
|
43
52
|
def compile_main(context)
|
44
53
|
slfoff = @current_frame_info.offset_arg(2, BPR)
|
45
|
-
|
46
|
-
|
54
|
+
mivl = @class_top.end_nodes[0].modified_instance_var.keys
|
55
|
+
off = mivl.index(@name)
|
56
|
+
addr = lambda {
|
57
|
+
address_of("ytl_ivar_set_boxing")
|
58
|
+
}
|
59
|
+
ivarset = OpVarMemAddress.new(addr)
|
47
60
|
context = @val.compile(context)
|
48
61
|
rtype = @val.decide_type_once(context.to_signature)
|
49
|
-
|
62
|
+
if @val.is_escape != true then
|
63
|
+
context = rtype.gen_boxing(context)
|
64
|
+
end
|
50
65
|
|
51
66
|
context.start_arg_reg
|
52
67
|
asm = context.assembler
|
@@ -54,8 +69,11 @@ module YTLJit
|
|
54
69
|
asm.push(TMPR2)
|
55
70
|
asm.mov(TMPR2, context.ret_reg)
|
56
71
|
asm.mov(FUNC_ARG[0], slfoff)
|
57
|
-
asm.mov(FUNC_ARG[1],
|
72
|
+
asm.mov(FUNC_ARG[1], off)
|
58
73
|
asm.mov(FUNC_ARG[2], TMPR2)
|
74
|
+
end
|
75
|
+
context = gen_save_thepr(context)
|
76
|
+
asm.with_retry do
|
59
77
|
asm.call_with_arg(ivarset, 3)
|
60
78
|
asm.pop(TMPR2)
|
61
79
|
end
|
@@ -74,7 +92,8 @@ module YTLJit
|
|
74
92
|
def visit_getinstancevariable(code, ins, context)
|
75
93
|
context.macro_method = false
|
76
94
|
curnode = context.current_node
|
77
|
-
|
95
|
+
mnode = context.current_method_node
|
96
|
+
node = CRubyInstanceVarRefNode.new(curnode, ins[1], mnode)
|
78
97
|
node.debug_info = context.debug_info
|
79
98
|
context.expstack.push node
|
80
99
|
end
|
@@ -83,10 +102,12 @@ module YTLJit
|
|
83
102
|
context.macro_method = false
|
84
103
|
val = context.expstack.pop
|
85
104
|
curnode = context.current_node
|
86
|
-
|
105
|
+
mnode = context.current_method_node
|
106
|
+
node = CRubyInstanceVarAssignNode.new(curnode, ins[1], mnode, val)
|
87
107
|
node.debug_info = context.debug_info
|
88
108
|
if context.expstack[-1] == val then
|
89
|
-
|
109
|
+
ivr = CRubyInstanceVarRefNode.new(curnode, ins[1], mnode)
|
110
|
+
context.expstack[-1] = ivr
|
90
111
|
end
|
91
112
|
curnode.body = node
|
92
113
|
context.current_node = node
|
@@ -2,6 +2,7 @@ module YTLJit
|
|
2
2
|
module VM
|
3
3
|
module ArithmeticOperationUtil
|
4
4
|
include AbsArch
|
5
|
+
=begin
|
5
6
|
def decide_type_core_local(tlist, sig, local_cache = {})
|
6
7
|
tlist = tlist.select {|e| e.class != RubyType::DefaultType0 }
|
7
8
|
if tlist.size < 2 then
|
@@ -29,6 +30,63 @@ module YTLJit
|
|
29
30
|
|
30
31
|
@type
|
31
32
|
end
|
33
|
+
=end
|
34
|
+
|
35
|
+
def gen_inst_with_conversion(context, dst, inst)
|
36
|
+
asm = context.assembler
|
37
|
+
src = context.ret_reg
|
38
|
+
if dst.is_a?(OpRegXMM) then
|
39
|
+
# Float
|
40
|
+
if src.is_a?(OpRegistor) and
|
41
|
+
!src.is_a?(OpRegXMM) then
|
42
|
+
asm.with_retry do
|
43
|
+
asm.cvtsi2sd(XMM0, src)
|
44
|
+
end
|
45
|
+
context.end_using_reg(src)
|
46
|
+
asm.with_retry do
|
47
|
+
asm.send(inst, dst, XMM0)
|
48
|
+
end
|
49
|
+
elsif src.using(dst) then
|
50
|
+
asm.with_retry do
|
51
|
+
asm.mov(XMM0, src)
|
52
|
+
end
|
53
|
+
context.end_using_reg(src)
|
54
|
+
asm.with_retry do
|
55
|
+
asm.send(inst, dst, XMM0)
|
56
|
+
end
|
57
|
+
elsif src.is_a?(OpImmidiateMachineWord) then
|
58
|
+
asm.with_retry do
|
59
|
+
asm.mov(TMPR, src)
|
60
|
+
end
|
61
|
+
context.end_using_reg(src)
|
62
|
+
asm.with_retry do
|
63
|
+
asm.cvtsi2sd(XMM0, TMPR)
|
64
|
+
asm.send(inst, dst, XMM0)
|
65
|
+
end
|
66
|
+
else
|
67
|
+
asm.with_retry do
|
68
|
+
asm.send(inst, dst, src)
|
69
|
+
end
|
70
|
+
context.end_using_reg(src)
|
71
|
+
end
|
72
|
+
else
|
73
|
+
# Fixnum
|
74
|
+
if src.using(dst) then
|
75
|
+
asm.with_retry do
|
76
|
+
asm.mov(TMPR, src)
|
77
|
+
end
|
78
|
+
context.end_using_reg(src)
|
79
|
+
asm.with_retry do
|
80
|
+
asm.send(inst, dst, TMPR)
|
81
|
+
end
|
82
|
+
else
|
83
|
+
asm.with_retry do
|
84
|
+
asm.send(inst, dst, src)
|
85
|
+
end
|
86
|
+
context.end_using_reg(src)
|
87
|
+
end
|
88
|
+
end
|
89
|
+
end
|
32
90
|
|
33
91
|
def gen_arithmetic_operation(context, inst, tempreg, resreg)
|
34
92
|
context.start_using_reg(tempreg)
|
@@ -36,21 +94,7 @@ module YTLJit
|
|
36
94
|
context.ret_node.type = nil
|
37
95
|
rtype = context.ret_node.decide_type_once(context.to_signature)
|
38
96
|
context = rtype.gen_unboxing(context)
|
39
|
-
|
40
|
-
if context.ret_reg.using(tempreg) then
|
41
|
-
asm.with_retry do
|
42
|
-
asm.mov(TMPR, context.ret_reg)
|
43
|
-
end
|
44
|
-
context.end_using_reg(context.ret_reg)
|
45
|
-
asm.with_retry do
|
46
|
-
asm.mov(tempreg, TMPR)
|
47
|
-
end
|
48
|
-
else
|
49
|
-
asm.with_retry do
|
50
|
-
asm.mov(tempreg, context.ret_reg)
|
51
|
-
end
|
52
|
-
context.end_using_reg(context.ret_reg)
|
53
|
-
end
|
97
|
+
gen_inst_with_conversion(context, tempreg, :mov)
|
54
98
|
context.set_reg_content(tempreg, context.ret_node)
|
55
99
|
|
56
100
|
# @argunents[1] is block
|
@@ -61,21 +105,14 @@ module YTLJit
|
|
61
105
|
context.ret_node.type = nil
|
62
106
|
rtype = context.ret_node.decide_type_once(context.to_signature)
|
63
107
|
context = rtype.gen_unboxing(context)
|
64
|
-
|
65
|
-
asm = context.assembler
|
108
|
+
|
66
109
|
if block_given? then
|
67
110
|
yield(context)
|
68
111
|
else
|
112
|
+
# default code
|
113
|
+
gen_inst_with_conversion(context, tempreg, inst)
|
114
|
+
asm = context.assembler
|
69
115
|
asm.with_retry do
|
70
|
-
# default code
|
71
|
-
if context.ret_reg.using(tempreg) then
|
72
|
-
asm.mov(TMPR, context.ret_reg)
|
73
|
-
context.end_using_reg(context.ret_reg)
|
74
|
-
asm.send(inst, tempreg, TMPR)
|
75
|
-
else
|
76
|
-
asm.send(inst, tempreg, context.ret_reg)
|
77
|
-
context.end_using_reg(context.ret_reg)
|
78
|
-
end
|
79
116
|
asm.mov(resreg, tempreg)
|
80
117
|
end
|
81
118
|
end
|
@@ -87,7 +124,7 @@ module YTLJit
|
|
87
124
|
|
88
125
|
decide_type_once(context.to_signature)
|
89
126
|
if @type.boxed then
|
90
|
-
context = @type.gen_boxing(context)
|
127
|
+
context = @type.to_unbox.gen_boxing(context)
|
91
128
|
end
|
92
129
|
|
93
130
|
context
|
@@ -96,14 +133,13 @@ module YTLJit
|
|
96
133
|
|
97
134
|
module CompareOperationUtil
|
98
135
|
def gen_compare_operation(context, cinst, sinst,
|
99
|
-
tempreg, tempreg2, resreg)
|
136
|
+
tempreg, tempreg2, resreg, dounbox = true)
|
100
137
|
context.start_using_reg(tempreg)
|
101
138
|
asm = context.assembler
|
102
139
|
asm.with_retry do
|
103
140
|
asm.mov(tempreg, context.ret_reg)
|
104
141
|
end
|
105
142
|
context.set_reg_content(tempreg, context.ret_node)
|
106
|
-
context.set_reg_content(tempreg, context.ret_node)
|
107
143
|
|
108
144
|
# @arguments[1] is block
|
109
145
|
# @arguments[2] is self
|
@@ -112,16 +148,20 @@ module YTLJit
|
|
112
148
|
context = aele.compile(context)
|
113
149
|
context.ret_node.type = nil
|
114
150
|
rtype = context.ret_node.decide_type_once(context.to_signature)
|
115
|
-
|
151
|
+
if dounbox then
|
152
|
+
context = rtype.gen_unboxing(context)
|
153
|
+
end
|
116
154
|
|
117
155
|
asm = context.assembler
|
118
156
|
asm.with_retry do
|
119
157
|
if context.ret_reg != tempreg2 then
|
120
158
|
asm.mov(tempreg2, context.ret_reg)
|
121
159
|
end
|
160
|
+
context.set_reg_content(tempreg2, context.ret_node)
|
122
161
|
asm.send(cinst, tempreg2, tempreg)
|
123
162
|
asm.send(sinst, resreg)
|
124
163
|
asm.add(resreg, resreg)
|
164
|
+
context.set_reg_content(resreg, context.ret_node)
|
125
165
|
end
|
126
166
|
context.end_using_reg(tempreg)
|
127
167
|
|
@@ -129,10 +169,92 @@ module YTLJit
|
|
129
169
|
context.ret_reg = resreg
|
130
170
|
|
131
171
|
decide_type_once(context.to_signature)
|
132
|
-
|
133
|
-
|
172
|
+
|
173
|
+
context
|
174
|
+
end
|
175
|
+
end
|
176
|
+
|
177
|
+
module UnboxedArrayUtil
|
178
|
+
include AbsArch
|
179
|
+
def gen_ref_element(context, slf, idx)
|
180
|
+
context.start_using_reg(TMPR2)
|
181
|
+
context = slf.compile(context)
|
182
|
+
asm = context.assembler
|
183
|
+
asm.with_retry do
|
184
|
+
asm.mov(TMPR2, context.ret_reg)
|
185
|
+
end
|
186
|
+
if idx.is_a?(Fixnum) then
|
187
|
+
idxval = idx
|
188
|
+
else
|
189
|
+
context = idx.compile(context)
|
190
|
+
idxval = context.ret_reg
|
134
191
|
end
|
192
|
+
asm.with_retry do
|
193
|
+
if idxval.is_a?(Fixnum) then
|
194
|
+
asm.mov(TMPR, idxval * 8)
|
195
|
+
elsif idxval.is_a?(OpImmidiate)
|
196
|
+
asm.mov(TMPR, idxval.value * 8)
|
197
|
+
else
|
198
|
+
if idxval != TMPR then
|
199
|
+
asm.mov(TMPR, idxval)
|
200
|
+
end
|
201
|
+
asm.add(TMPR, TMPR) # * 2
|
202
|
+
asm.add(TMPR, TMPR) # * 4
|
203
|
+
asm.add(TMPR, TMPR) # * 8
|
204
|
+
end
|
205
|
+
asm.add(TMPR2, TMPR)
|
206
|
+
asm.mov(RETR, INDIRECT_TMPR2)
|
207
|
+
end
|
208
|
+
|
209
|
+
context.end_using_reg(TMPR2)
|
210
|
+
context.ret_reg = RETR
|
211
|
+
context.ret_node = self
|
212
|
+
|
213
|
+
context
|
214
|
+
end
|
135
215
|
|
216
|
+
def gen_set_element(context, slf, idx, val)
|
217
|
+
context.start_using_reg(TMPR2)
|
218
|
+
|
219
|
+
asm = context.assembler
|
220
|
+
if slf then
|
221
|
+
context = slf.compile(context)
|
222
|
+
asm.with_retry do
|
223
|
+
asm.mov(TMPR2, context.ret_reg)
|
224
|
+
end
|
225
|
+
end
|
226
|
+
if idx.is_a?(Fixnum) then
|
227
|
+
idxval = idx
|
228
|
+
else
|
229
|
+
context = idx.compile(context)
|
230
|
+
idxval = context.ret_reg
|
231
|
+
end
|
232
|
+
|
233
|
+
asm.with_retry do
|
234
|
+
if idxval.is_a?(Fixnum) then
|
235
|
+
asm.mov(TMPR, idxval * 8)
|
236
|
+
elsif idxval.is_a?(OpImmidiate)
|
237
|
+
asm.mov(TMPR, idxval.value * 8)
|
238
|
+
else
|
239
|
+
asm.mov(TMPR, idxval)
|
240
|
+
asm.add(TMPR, TMPR) # * 2
|
241
|
+
asm.add(TMPR, TMPR) # * 4
|
242
|
+
asm.add(TMPR, TMPR) # * 8
|
243
|
+
end
|
244
|
+
asm.add(TMPR2, TMPR)
|
245
|
+
end
|
246
|
+
context = val.compile(context)
|
247
|
+
|
248
|
+
asm.with_retry do
|
249
|
+
if context.ret_reg != RETR then
|
250
|
+
asm.mov(RETR, context.ret_reg)
|
251
|
+
end
|
252
|
+
asm.mov(INDIRECT_TMPR2, RETR)
|
253
|
+
end
|
254
|
+
|
255
|
+
context.end_using_reg(TMPR2)
|
256
|
+
context.ret_reg = RETR
|
257
|
+
context.ret_node = self
|
136
258
|
context
|
137
259
|
end
|
138
260
|
end
|