free 0.1.0-i386-mswin32 → 0.1.2-i386-mswin32
Sign up to get free protection for your applications and to get access to all the features.
- data/HISTORY +2 -0
- data/README.md +2 -2
- data/ext/free/extconf.rb +1 -0
- data/ext/free/free.c +1 -1
- data/ext/free/ruby_headers/debug.h +36 -0
- data/ext/free/ruby_headers/dln.h +41 -0
- data/ext/free/ruby_headers/eval_intern.h +232 -0
- data/ext/free/ruby_headers/gc.h +77 -0
- data/ext/free/ruby_headers/id.h +173 -0
- data/ext/free/ruby_headers/iseq.h +104 -0
- data/ext/free/ruby_headers/method.h +103 -0
- data/ext/free/ruby_headers/node.h +483 -0
- data/ext/free/ruby_headers/regenc.h +211 -0
- data/ext/free/ruby_headers/regint.h +841 -0
- data/ext/free/ruby_headers/regparse.h +354 -0
- data/ext/free/ruby_headers/thread_pthread.h +27 -0
- data/ext/free/ruby_headers/thread_win32.h +33 -0
- data/ext/free/ruby_headers/timev.h +21 -0
- data/ext/free/ruby_headers/transcode_data.h +109 -0
- data/ext/free/ruby_headers/version.h +63 -0
- data/ext/free/ruby_headers/vm_core.h +703 -0
- data/ext/free/ruby_headers/vm_exec.h +184 -0
- data/ext/free/ruby_headers/vm_insnhelper.h +208 -0
- data/ext/free/ruby_headers/vm_opts.h +51 -0
- data/lib/1.8/free.so +0 -0
- data/lib/1.9/free.so +0 -0
- data/lib/free/version.rb +1 -1
- metadata +26 -3
@@ -0,0 +1,184 @@
|
|
1
|
+
/**********************************************************************
|
2
|
+
|
3
|
+
vm.h -
|
4
|
+
|
5
|
+
$Author$
|
6
|
+
created at: 04/01/01 16:56:59 JST
|
7
|
+
|
8
|
+
Copyright (C) 2004-2007 Koichi Sasada
|
9
|
+
|
10
|
+
**********************************************************************/
|
11
|
+
|
12
|
+
#ifndef RUBY_VM_EXEC_H
|
13
|
+
#define RUBY_VM_EXEC_H
|
14
|
+
|
15
|
+
typedef long OFFSET;
|
16
|
+
typedef unsigned long lindex_t;
|
17
|
+
typedef unsigned long dindex_t;
|
18
|
+
typedef rb_num_t GENTRY;
|
19
|
+
typedef rb_iseq_t *ISEQ;
|
20
|
+
|
21
|
+
#ifdef COLLECT_USAGE_ANALYSIS
|
22
|
+
#define USAGE_ANALYSIS_INSN(insn) vm_analysis_insn(insn)
|
23
|
+
#define USAGE_ANALYSIS_OPERAND(insn, n, op) vm_analysis_operand(insn, n, (VALUE)op)
|
24
|
+
#define USAGE_ANALYSIS_REGISTER(reg, s) vm_analysis_register(reg, s)
|
25
|
+
#else
|
26
|
+
#define USAGE_ANALYSIS_INSN(insn) /* none */
|
27
|
+
#define USAGE_ANALYSIS_OPERAND(insn, n, op) /* none */
|
28
|
+
#define USAGE_ANALYSIS_REGISTER(reg, s) /* none */
|
29
|
+
#endif
|
30
|
+
|
31
|
+
#ifdef __GCC__
|
32
|
+
/* TODO: machine dependent prefetch instruction */
|
33
|
+
#define PREFETCH(pc)
|
34
|
+
#else
|
35
|
+
#define PREFETCH(pc)
|
36
|
+
#endif
|
37
|
+
|
38
|
+
#if VMDEBUG > 0
|
39
|
+
#define debugs printf
|
40
|
+
#define DEBUG_ENTER_INSN(insn) \
|
41
|
+
debug_print_pre(th, GET_CFP());
|
42
|
+
|
43
|
+
#if OPT_STACK_CACHING
|
44
|
+
#define SC_REGS() , reg_a, reg_b
|
45
|
+
#else
|
46
|
+
#define SC_REGS()
|
47
|
+
#endif
|
48
|
+
|
49
|
+
#define DEBUG_END_INSN() \
|
50
|
+
debug_print_post(th, GET_CFP() SC_REGS());
|
51
|
+
|
52
|
+
#else
|
53
|
+
|
54
|
+
#define debugs
|
55
|
+
#define DEBUG_ENTER_INSN(insn)
|
56
|
+
#define DEBUG_END_INSN()
|
57
|
+
#endif
|
58
|
+
|
59
|
+
#define throwdebug if(0)printf
|
60
|
+
/* #define throwdebug printf */
|
61
|
+
|
62
|
+
/************************************************/
|
63
|
+
#if DISPATCH_XXX
|
64
|
+
error !
|
65
|
+
/************************************************/
|
66
|
+
#elif OPT_CALL_THREADED_CODE
|
67
|
+
|
68
|
+
#define LABEL(x) insn_func_##x
|
69
|
+
#define ELABEL(x)
|
70
|
+
#define LABEL_PTR(x) &LABEL(x)
|
71
|
+
|
72
|
+
#define INSN_ENTRY(insn) \
|
73
|
+
static rb_control_frame_t * \
|
74
|
+
FUNC_FASTCALL(LABEL(insn))(rb_thread_t *th, rb_control_frame_t *reg_cfp) {
|
75
|
+
|
76
|
+
#define END_INSN(insn) return reg_cfp;}
|
77
|
+
|
78
|
+
#define NEXT_INSN() return reg_cfp;
|
79
|
+
|
80
|
+
/************************************************/
|
81
|
+
#elif OPT_TOKEN_THREADED_CODE || OPT_DIRECT_THREADED_CODE
|
82
|
+
/* threaded code with gcc */
|
83
|
+
|
84
|
+
#define LABEL(x) INSN_LABEL_##x
|
85
|
+
#define ELABEL(x) INSN_ELABEL_##x
|
86
|
+
#define LABEL_PTR(x) &&LABEL(x)
|
87
|
+
|
88
|
+
#define INSN_ENTRY_SIG(insn)
|
89
|
+
|
90
|
+
|
91
|
+
#define INSN_DISPATCH_SIG(insn)
|
92
|
+
|
93
|
+
#define INSN_ENTRY(insn) \
|
94
|
+
LABEL(insn): \
|
95
|
+
INSN_ENTRY_SIG(insn); \
|
96
|
+
|
97
|
+
/* dispather */
|
98
|
+
#if __GNUC__ && (__i386__ || __x86_64__) && __GNUC__ == 3
|
99
|
+
#define DISPATCH_ARCH_DEPEND_WAY(addr) \
|
100
|
+
asm volatile("jmp *%0;\t# -- inseted by vm.h\t[length = 2]" : : "r" (addr))
|
101
|
+
|
102
|
+
#else
|
103
|
+
#define DISPATCH_ARCH_DEPEND_WAY(addr) \
|
104
|
+
/* do nothing */
|
105
|
+
|
106
|
+
#endif
|
107
|
+
|
108
|
+
|
109
|
+
/**********************************/
|
110
|
+
#if OPT_DIRECT_THREADED_CODE
|
111
|
+
|
112
|
+
/* for GCC 3.4.x */
|
113
|
+
#define TC_DISPATCH(insn) \
|
114
|
+
INSN_DISPATCH_SIG(insn); \
|
115
|
+
goto *GET_CURRENT_INSN(); \
|
116
|
+
;
|
117
|
+
|
118
|
+
#else
|
119
|
+
/* token threade code */
|
120
|
+
|
121
|
+
#define TC_DISPATCH(insn) \
|
122
|
+
DISPATCH_ARCH_DEPEND_WAY(insns_address_table[GET_CURRENT_INSN()]); \
|
123
|
+
INSN_DISPATCH_SIG(insn); \
|
124
|
+
goto *insns_address_table[GET_CURRENT_INSN()]; \
|
125
|
+
rb_bug("tc error");
|
126
|
+
|
127
|
+
|
128
|
+
#endif /* DISPATCH_DIRECT_THREADED_CODE */
|
129
|
+
|
130
|
+
#define END_INSN(insn) \
|
131
|
+
DEBUG_END_INSN(); \
|
132
|
+
TC_DISPATCH(insn); \
|
133
|
+
|
134
|
+
#define INSN_DISPATCH() \
|
135
|
+
TC_DISPATCH(__START__) \
|
136
|
+
{
|
137
|
+
|
138
|
+
#define END_INSNS_DISPATCH() \
|
139
|
+
rb_bug("unknown insn: %ld", GET_CURRENT_INSN()); \
|
140
|
+
} /* end of while loop */ \
|
141
|
+
|
142
|
+
#define NEXT_INSN() TC_DISPATCH(__NEXT_INSN__)
|
143
|
+
|
144
|
+
/************************************************/
|
145
|
+
#else /* no threaded code */
|
146
|
+
/* most common method */
|
147
|
+
|
148
|
+
#define INSN_ENTRY(insn) \
|
149
|
+
case BIN(insn):
|
150
|
+
|
151
|
+
#define END_INSN(insn) \
|
152
|
+
DEBUG_END_INSN(); \
|
153
|
+
break;
|
154
|
+
|
155
|
+
|
156
|
+
#define INSN_DISPATCH() \
|
157
|
+
while(1){ \
|
158
|
+
switch(GET_CURRENT_INSN()){
|
159
|
+
|
160
|
+
#define END_INSNS_DISPATCH() \
|
161
|
+
default: \
|
162
|
+
SDR(); \
|
163
|
+
rb_bug("unknown insn: %ld", GET_CURRENT_INSN()); \
|
164
|
+
} /* end of switch */ \
|
165
|
+
} /* end of while loop */ \
|
166
|
+
|
167
|
+
#define NEXT_INSN() goto first
|
168
|
+
|
169
|
+
#endif
|
170
|
+
|
171
|
+
#define VM_SP_CNT(th, sp) ((sp) - (th)->stack)
|
172
|
+
|
173
|
+
#if OPT_CALL_THREADED_CODE
|
174
|
+
#define THROW_EXCEPTION(exc) do { \
|
175
|
+
th->errinfo = (VALUE)(exc); \
|
176
|
+
return 0; \
|
177
|
+
} while (0)
|
178
|
+
#else
|
179
|
+
#define THROW_EXCEPTION(exc) return (VALUE)(exc)
|
180
|
+
#endif
|
181
|
+
|
182
|
+
#define SCREG(r) (reg_##r)
|
183
|
+
|
184
|
+
#endif /* RUBY_VM_EXEC_H */
|
@@ -0,0 +1,208 @@
|
|
1
|
+
/**********************************************************************
|
2
|
+
|
3
|
+
insnhelper.h - helper macros to implement each instructions
|
4
|
+
|
5
|
+
$Author$
|
6
|
+
created at: 04/01/01 15:50:34 JST
|
7
|
+
|
8
|
+
Copyright (C) 2004-2007 Koichi Sasada
|
9
|
+
|
10
|
+
**********************************************************************/
|
11
|
+
|
12
|
+
#ifndef RUBY_INSNHELPER_H
|
13
|
+
#define RUBY_INSNHELPER_H
|
14
|
+
|
15
|
+
/**
|
16
|
+
* VM Debug Level
|
17
|
+
*
|
18
|
+
* debug level:
|
19
|
+
* 0: no debug output
|
20
|
+
* 1: show instruction name
|
21
|
+
* 2: show stack frame when control stack frame is changed
|
22
|
+
* 3: show stack status
|
23
|
+
* 4: show register
|
24
|
+
* 5:
|
25
|
+
* 10: gc check
|
26
|
+
*/
|
27
|
+
|
28
|
+
#ifndef VMDEBUG
|
29
|
+
#define VMDEBUG 0
|
30
|
+
#endif
|
31
|
+
|
32
|
+
#if 0
|
33
|
+
#undef VMDEBUG
|
34
|
+
#define VMDEBUG 3
|
35
|
+
#endif
|
36
|
+
|
37
|
+
enum {
|
38
|
+
BOP_PLUS,
|
39
|
+
BOP_MINUS,
|
40
|
+
BOP_MULT,
|
41
|
+
BOP_DIV,
|
42
|
+
BOP_MOD,
|
43
|
+
BOP_EQ,
|
44
|
+
BOP_EQQ,
|
45
|
+
BOP_LT,
|
46
|
+
BOP_LE,
|
47
|
+
BOP_LTLT,
|
48
|
+
BOP_AREF,
|
49
|
+
BOP_ASET,
|
50
|
+
BOP_LENGTH,
|
51
|
+
BOP_SIZE,
|
52
|
+
BOP_SUCC,
|
53
|
+
BOP_GT,
|
54
|
+
BOP_GE,
|
55
|
+
BOP_NOT,
|
56
|
+
BOP_NEQ,
|
57
|
+
|
58
|
+
BOP_LAST_
|
59
|
+
};
|
60
|
+
|
61
|
+
extern char ruby_vm_redefined_flag[BOP_LAST_];
|
62
|
+
extern VALUE ruby_vm_const_missing_count;
|
63
|
+
|
64
|
+
|
65
|
+
/**********************************************************/
|
66
|
+
/* deal with stack */
|
67
|
+
/**********************************************************/
|
68
|
+
|
69
|
+
#define PUSH(x) (SET_SV(x), INC_SP(1))
|
70
|
+
#define TOPN(n) (*(GET_SP()-(n)-1))
|
71
|
+
#define POPN(n) (DEC_SP(n))
|
72
|
+
#define POP() (DEC_SP(1))
|
73
|
+
#define STACK_ADDR_FROM_TOP(n) (GET_SP()-(n))
|
74
|
+
|
75
|
+
#define GET_TOS() (tos) /* dummy */
|
76
|
+
|
77
|
+
/**********************************************************/
|
78
|
+
/* deal with registers */
|
79
|
+
/**********************************************************/
|
80
|
+
|
81
|
+
#define REG_CFP (reg_cfp)
|
82
|
+
#define REG_PC (REG_CFP->pc)
|
83
|
+
#define REG_SP (REG_CFP->sp)
|
84
|
+
#define REG_LFP (REG_CFP->lfp)
|
85
|
+
#define REG_DFP (REG_CFP->dfp)
|
86
|
+
|
87
|
+
#define RESTORE_REGS() do { \
|
88
|
+
REG_CFP = th->cfp; \
|
89
|
+
} while (0)
|
90
|
+
|
91
|
+
#define REG_A reg_a
|
92
|
+
#define REG_B reg_b
|
93
|
+
|
94
|
+
#ifdef COLLECT_USAGE_ANALYSIS
|
95
|
+
#define USAGE_ANALYSIS_REGISTER_HELPER(a, b, v) \
|
96
|
+
(USAGE_ANALYSIS_REGISTER(a, b), (v))
|
97
|
+
#else
|
98
|
+
#define USAGE_ANALYSIS_REGISTER_HELPER(a, b, v) (v)
|
99
|
+
#endif
|
100
|
+
|
101
|
+
/* PC */
|
102
|
+
#define GET_PC() (USAGE_ANALYSIS_REGISTER_HELPER(0, 0, REG_PC))
|
103
|
+
#define SET_PC(x) (REG_PC = (USAGE_ANALYSIS_REGISTER_HELPER(0, 1, x)))
|
104
|
+
#define GET_CURRENT_INSN() (*GET_PC())
|
105
|
+
#define GET_OPERAND(n) (GET_PC()[(n)])
|
106
|
+
#define ADD_PC(n) (SET_PC(REG_PC + (n)))
|
107
|
+
|
108
|
+
#define GET_PC_COUNT() (REG_PC - GET_ISEQ()->iseq_encoded)
|
109
|
+
#define JUMP(dst) (REG_PC += (dst))
|
110
|
+
|
111
|
+
/* FP */
|
112
|
+
#define GET_CFP() (USAGE_ANALYSIS_REGISTER_HELPER(2, 0, REG_CFP))
|
113
|
+
#define GET_LFP() (USAGE_ANALYSIS_REGISTER_HELPER(3, 0, REG_LFP))
|
114
|
+
#define SET_LFP(x) (REG_LFP = (USAGE_ANALYSIS_REGISTER_HELPER(3, 1, (x))))
|
115
|
+
#define GET_DFP() (USAGE_ANALYSIS_REGISTER_HELPER(4, 0, REG_DFP))
|
116
|
+
#define SET_DFP(x) (REG_DFP = (USAGE_ANALYSIS_REGISTER_HELPER(4, 1, (x))))
|
117
|
+
|
118
|
+
/* SP */
|
119
|
+
#define GET_SP() (USAGE_ANALYSIS_REGISTER_HELPER(1, 0, REG_SP))
|
120
|
+
#define SET_SP(x) (REG_SP = (USAGE_ANALYSIS_REGISTER_HELPER(1, 1, (x))))
|
121
|
+
#define INC_SP(x) (REG_SP += (USAGE_ANALYSIS_REGISTER_HELPER(1, 1, (x))))
|
122
|
+
#define DEC_SP(x) (REG_SP -= (USAGE_ANALYSIS_REGISTER_HELPER(1, 1, (x))))
|
123
|
+
#define SET_SV(x) (*GET_SP() = (x))
|
124
|
+
/* set current stack value as x */
|
125
|
+
|
126
|
+
#define GET_SP_COUNT() (REG_SP - th->stack)
|
127
|
+
|
128
|
+
/* instruction sequence C struct */
|
129
|
+
#define GET_ISEQ() (GET_CFP()->iseq)
|
130
|
+
|
131
|
+
/**********************************************************/
|
132
|
+
/* deal with variables */
|
133
|
+
/**********************************************************/
|
134
|
+
|
135
|
+
#define GET_PREV_DFP(dfp) ((VALUE *)((dfp)[0] & ~0x03))
|
136
|
+
|
137
|
+
#define GET_GLOBAL(entry) rb_gvar_get((struct rb_global_entry*)entry)
|
138
|
+
#define SET_GLOBAL(entry, val) rb_gvar_set((struct rb_global_entry*)entry, val)
|
139
|
+
|
140
|
+
#define GET_CONST_INLINE_CACHE(dst) ((IC) * (GET_PC() + (dst) + 2))
|
141
|
+
|
142
|
+
/**********************************************************/
|
143
|
+
/* deal with values */
|
144
|
+
/**********************************************************/
|
145
|
+
|
146
|
+
#define GET_SELF() (USAGE_ANALYSIS_REGISTER_HELPER(5, 0, GET_CFP()->self))
|
147
|
+
|
148
|
+
/**********************************************************/
|
149
|
+
/* deal with control flow 2: method/iterator */
|
150
|
+
/**********************************************************/
|
151
|
+
|
152
|
+
#define COPY_CREF(c1, c2) do { \
|
153
|
+
NODE *__tmp_c2 = (c2); \
|
154
|
+
c1->nd_clss = __tmp_c2->nd_clss; \
|
155
|
+
c1->nd_visi = __tmp_c2->nd_visi;\
|
156
|
+
c1->nd_next = __tmp_c2->nd_next; \
|
157
|
+
} while (0)
|
158
|
+
|
159
|
+
#define CALL_METHOD(num, blockptr, flag, id, me, recv) do { \
|
160
|
+
VALUE v = vm_call_method(th, GET_CFP(), num, blockptr, flag, id, me, recv); \
|
161
|
+
if (v == Qundef) { \
|
162
|
+
RESTORE_REGS(); \
|
163
|
+
NEXT_INSN(); \
|
164
|
+
} \
|
165
|
+
else { \
|
166
|
+
val = v; \
|
167
|
+
} \
|
168
|
+
} while (0)
|
169
|
+
|
170
|
+
#define GET_BLOCK_PTR() \
|
171
|
+
((rb_block_t *)(GC_GUARDED_PTR_REF(GET_LFP()[0] & \
|
172
|
+
((GET_LFP()[0] & 0x02) - 0x02))))
|
173
|
+
|
174
|
+
/**********************************************************/
|
175
|
+
/* deal with control flow 3: exception */
|
176
|
+
/**********************************************************/
|
177
|
+
|
178
|
+
|
179
|
+
/**********************************************************/
|
180
|
+
/* others */
|
181
|
+
/**********************************************************/
|
182
|
+
|
183
|
+
/* optimize insn */
|
184
|
+
#define FIXNUM_2_P(a, b) ((a) & (b) & 1)
|
185
|
+
#define BASIC_OP_UNREDEFINED_P(op) (LIKELY(ruby_vm_redefined_flag[op] == 0))
|
186
|
+
#define HEAP_CLASS_OF(obj) RBASIC(obj)->klass
|
187
|
+
|
188
|
+
#ifndef USE_IC_FOR_SPECIALIZED_METHOD
|
189
|
+
#define USE_IC_FOR_SPECIALIZED_METHOD 1
|
190
|
+
#endif
|
191
|
+
|
192
|
+
#if USE_IC_FOR_SPECIALIZED_METHOD
|
193
|
+
|
194
|
+
#define CALL_SIMPLE_METHOD(num, id, recv) do { \
|
195
|
+
VALUE klass = CLASS_OF(recv); \
|
196
|
+
CALL_METHOD(num, 0, 0, id, vm_method_search(id, klass, ic), recv); \
|
197
|
+
} while (0)
|
198
|
+
|
199
|
+
#else
|
200
|
+
|
201
|
+
#define CALL_SIMPLE_METHOD(num, id, recv) do { \
|
202
|
+
VALUE klass = CLASS_OF(recv); \
|
203
|
+
CALL_METHOD(num, 0, 0, id, rb_method_entry(klass, id), recv); \
|
204
|
+
} while (0)
|
205
|
+
|
206
|
+
#endif
|
207
|
+
|
208
|
+
#endif /* RUBY_INSNHELPER_H */
|
@@ -0,0 +1,51 @@
|
|
1
|
+
/*-*-c-*-*/
|
2
|
+
/**********************************************************************
|
3
|
+
|
4
|
+
vm_opts.h - VM optimize option
|
5
|
+
|
6
|
+
$Author$
|
7
|
+
|
8
|
+
Copyright (C) 2004-2007 Koichi Sasada
|
9
|
+
|
10
|
+
**********************************************************************/
|
11
|
+
|
12
|
+
|
13
|
+
#ifndef RUBY_VM_OPTS_H
|
14
|
+
#define RUBY_VM_OPTS_H
|
15
|
+
|
16
|
+
/* Compile options.
|
17
|
+
* You can change these options at runtime by VM::CompileOption.
|
18
|
+
* Following definitions are default values.
|
19
|
+
*/
|
20
|
+
|
21
|
+
#define OPT_TRACE_INSTRUCTION 1
|
22
|
+
#define OPT_TAILCALL_OPTIMIZATION 0
|
23
|
+
#define OPT_PEEPHOLE_OPTIMIZATION 1
|
24
|
+
#define OPT_SPECIALISED_INSTRUCTION 1
|
25
|
+
#define OPT_INLINE_CONST_CACHE 1
|
26
|
+
|
27
|
+
|
28
|
+
/* Build Options.
|
29
|
+
* You can't change these options at runtime.
|
30
|
+
*/
|
31
|
+
|
32
|
+
/* C compiler depend */
|
33
|
+
#define OPT_DIRECT_THREADED_CODE 1
|
34
|
+
#define OPT_TOKEN_THREADED_CODE 0
|
35
|
+
#define OPT_CALL_THREADED_CODE 0
|
36
|
+
|
37
|
+
/* VM running option */
|
38
|
+
#define OPT_CHECKED_RUN 1
|
39
|
+
#define OPT_INLINE_METHOD_CACHE 1
|
40
|
+
#define OPT_BLOCKINLINING 0
|
41
|
+
|
42
|
+
/* architecture independent, affects generated code */
|
43
|
+
#define OPT_OPERANDS_UNIFICATION 0
|
44
|
+
#define OPT_INSTRUCTIONS_UNIFICATION 0
|
45
|
+
#define OPT_UNIFY_ALL_COMBINATION 0
|
46
|
+
#define OPT_STACK_CACHING 0
|
47
|
+
|
48
|
+
/* misc */
|
49
|
+
#define SUPPORT_JOKE 0
|
50
|
+
|
51
|
+
#endif /* RUBY_VM_OPTS_H */
|
data/lib/1.8/free.so
CHANGED
Binary file
|
data/lib/1.9/free.so
CHANGED
Binary file
|