debase-ruby_core_source 0.9.8 → 0.9.9
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG.md +5 -1
- data/lib/debase/ruby_core_source/ruby-2.3.4-p301/addr2line.h +21 -0
- data/lib/debase/ruby_core_source/ruby-2.3.4-p301/ccan/build_assert/build_assert.h +40 -0
- data/lib/debase/ruby_core_source/ruby-2.3.4-p301/ccan/check_type/check_type.h +63 -0
- data/lib/debase/ruby_core_source/ruby-2.3.4-p301/ccan/container_of/container_of.h +142 -0
- data/lib/debase/ruby_core_source/ruby-2.3.4-p301/ccan/list/list.h +773 -0
- data/lib/debase/ruby_core_source/ruby-2.3.4-p301/ccan/str/str.h +16 -0
- data/lib/debase/ruby_core_source/ruby-2.3.4-p301/constant.h +50 -0
- data/lib/debase/ruby_core_source/ruby-2.3.4-p301/dln.h +51 -0
- data/lib/debase/ruby_core_source/ruby-2.3.4-p301/encindex.h +67 -0
- data/lib/debase/ruby_core_source/ruby-2.3.4-p301/eval_intern.h +304 -0
- data/lib/debase/ruby_core_source/ruby-2.3.4-p301/gc.h +114 -0
- data/lib/debase/ruby_core_source/ruby-2.3.4-p301/id.h +211 -0
- data/lib/debase/ruby_core_source/ruby-2.3.4-p301/id_table.h +30 -0
- data/lib/debase/ruby_core_source/ruby-2.3.4-p301/insns.inc +110 -0
- data/lib/debase/ruby_core_source/ruby-2.3.4-p301/insns_info.inc +776 -0
- data/lib/debase/ruby_core_source/ruby-2.3.4-p301/internal.h +1407 -0
- data/lib/debase/ruby_core_source/ruby-2.3.4-p301/iseq.h +249 -0
- data/lib/debase/ruby_core_source/ruby-2.3.4-p301/known_errors.inc +746 -0
- data/lib/debase/ruby_core_source/ruby-2.3.4-p301/method.h +213 -0
- data/lib/debase/ruby_core_source/ruby-2.3.4-p301/node.h +520 -0
- data/lib/debase/ruby_core_source/ruby-2.3.4-p301/node_name.inc +208 -0
- data/lib/debase/ruby_core_source/ruby-2.3.4-p301/opt_sc.inc +758 -0
- data/lib/debase/ruby_core_source/ruby-2.3.4-p301/optinsn.inc +83 -0
- data/lib/debase/ruby_core_source/ruby-2.3.4-p301/optunifs.inc +127 -0
- data/lib/debase/ruby_core_source/ruby-2.3.4-p301/parse.h +185 -0
- data/lib/debase/ruby_core_source/ruby-2.3.4-p301/probes_helper.h +43 -0
- data/lib/debase/ruby_core_source/ruby-2.3.4-p301/regenc.h +237 -0
- data/lib/debase/ruby_core_source/ruby-2.3.4-p301/regint.h +987 -0
- data/lib/debase/ruby_core_source/ruby-2.3.4-p301/regparse.h +367 -0
- data/lib/debase/ruby_core_source/ruby-2.3.4-p301/revision.h +1 -0
- data/lib/debase/ruby_core_source/ruby-2.3.4-p301/ruby_atomic.h +233 -0
- data/lib/debase/ruby_core_source/ruby-2.3.4-p301/siphash.h +48 -0
- data/lib/debase/ruby_core_source/ruby-2.3.4-p301/symbol.h +108 -0
- data/lib/debase/ruby_core_source/ruby-2.3.4-p301/thread_pthread.h +54 -0
- data/lib/debase/ruby_core_source/ruby-2.3.4-p301/thread_win32.h +36 -0
- data/lib/debase/ruby_core_source/ruby-2.3.4-p301/timev.h +42 -0
- data/lib/debase/ruby_core_source/ruby-2.3.4-p301/transcode_data.h +139 -0
- data/lib/debase/ruby_core_source/ruby-2.3.4-p301/version.h +55 -0
- data/lib/debase/ruby_core_source/ruby-2.3.4-p301/vm.inc +3414 -0
- data/lib/debase/ruby_core_source/ruby-2.3.4-p301/vm_call_iseq_optimized.inc +212 -0
- data/lib/debase/ruby_core_source/ruby-2.3.4-p301/vm_core.h +1240 -0
- data/lib/debase/ruby_core_source/ruby-2.3.4-p301/vm_debug.h +37 -0
- data/lib/debase/ruby_core_source/ruby-2.3.4-p301/vm_exec.h +182 -0
- data/lib/debase/ruby_core_source/ruby-2.3.4-p301/vm_insnhelper.h +227 -0
- data/lib/debase/ruby_core_source/ruby-2.3.4-p301/vm_opts.h +57 -0
- data/lib/debase/ruby_core_source/ruby-2.3.4-p301/vmtc.inc +108 -0
- data/lib/debase/ruby_core_source/version.rb +1 -1
- metadata +48 -2
@@ -0,0 +1,37 @@
|
|
1
|
+
/**********************************************************************
|
2
|
+
|
3
|
+
vm_debug.h - YARV Debug function interface
|
4
|
+
|
5
|
+
$Author: nobu $
|
6
|
+
created at: 04/08/25 02:33:49 JST
|
7
|
+
|
8
|
+
Copyright (C) 2004-2007 Koichi Sasada
|
9
|
+
|
10
|
+
**********************************************************************/
|
11
|
+
|
12
|
+
#ifndef RUBY_DEBUG_H
|
13
|
+
#define RUBY_DEBUG_H
|
14
|
+
|
15
|
+
#include "ruby/ruby.h"
|
16
|
+
#include "node.h"
|
17
|
+
|
18
|
+
RUBY_SYMBOL_EXPORT_BEGIN
|
19
|
+
|
20
|
+
#define dpv(h,v) ruby_debug_print_value(-1, 0, (h), (v))
|
21
|
+
#define dp(v) ruby_debug_print_value(-1, 0, "", (v))
|
22
|
+
#define dpi(i) ruby_debug_print_id(-1, 0, "", (i))
|
23
|
+
#define dpn(n) ruby_debug_print_node(-1, 0, "", (n))
|
24
|
+
|
25
|
+
#define bp() ruby_debug_breakpoint()
|
26
|
+
|
27
|
+
VALUE ruby_debug_print_value(int level, int debug_level, const char *header, VALUE v);
|
28
|
+
ID ruby_debug_print_id(int level, int debug_level, const char *header, ID id);
|
29
|
+
NODE *ruby_debug_print_node(int level, int debug_level, const char *header, const NODE *node);
|
30
|
+
int ruby_debug_print_indent(int level, int debug_level, int indent_level);
|
31
|
+
void ruby_debug_breakpoint(void);
|
32
|
+
void ruby_debug_gc_check_func(void);
|
33
|
+
void ruby_set_debug_option(const char *str);
|
34
|
+
|
35
|
+
RUBY_SYMBOL_EXPORT_END
|
36
|
+
|
37
|
+
#endif /* RUBY_DEBUG_H */
|
@@ -0,0 +1,182 @@
|
|
1
|
+
/**********************************************************************
|
2
|
+
|
3
|
+
vm.h -
|
4
|
+
|
5
|
+
$Author: ko1 $
|
6
|
+
created at: 04/01/01 16:56:59 JST
|
7
|
+
|
8
|
+
Copyright (C) 2004-2007 Koichi Sasada
|
9
|
+
|
10
|
+
**********************************************************************/
|
11
|
+
|
12
|
+
#ifndef RUBY_VM_EXEC_H
|
13
|
+
#define RUBY_VM_EXEC_H
|
14
|
+
|
15
|
+
typedef long OFFSET;
|
16
|
+
typedef unsigned long lindex_t;
|
17
|
+
typedef VALUE GENTRY;
|
18
|
+
typedef rb_iseq_t *ISEQ;
|
19
|
+
|
20
|
+
#ifdef __GCC__
|
21
|
+
/* TODO: machine dependent prefetch instruction */
|
22
|
+
#define PREFETCH(pc)
|
23
|
+
#else
|
24
|
+
#define PREFETCH(pc)
|
25
|
+
#endif
|
26
|
+
|
27
|
+
#if VMDEBUG > 0
|
28
|
+
#define debugs printf
|
29
|
+
#define DEBUG_ENTER_INSN(insn) \
|
30
|
+
rb_vmdebug_debug_print_pre(th, GET_CFP(),GET_PC());
|
31
|
+
|
32
|
+
#if OPT_STACK_CACHING
|
33
|
+
#define SC_REGS() , reg_a, reg_b
|
34
|
+
#else
|
35
|
+
#define SC_REGS()
|
36
|
+
#endif
|
37
|
+
|
38
|
+
#define DEBUG_END_INSN() \
|
39
|
+
rb_vmdebug_debug_print_post(th, GET_CFP() SC_REGS());
|
40
|
+
|
41
|
+
#else
|
42
|
+
|
43
|
+
#define debugs
|
44
|
+
#define DEBUG_ENTER_INSN(insn)
|
45
|
+
#define DEBUG_END_INSN()
|
46
|
+
#endif
|
47
|
+
|
48
|
+
#define throwdebug if(0)printf
|
49
|
+
/* #define throwdebug printf */
|
50
|
+
|
51
|
+
/************************************************/
|
52
|
+
#if defined(DISPATCH_XXX)
|
53
|
+
error !
|
54
|
+
/************************************************/
|
55
|
+
#elif OPT_CALL_THREADED_CODE
|
56
|
+
|
57
|
+
#define LABEL(x) insn_func_##x
|
58
|
+
#define ELABEL(x)
|
59
|
+
#define LABEL_PTR(x) &LABEL(x)
|
60
|
+
|
61
|
+
#define INSN_ENTRY(insn) \
|
62
|
+
static rb_control_frame_t * \
|
63
|
+
FUNC_FASTCALL(LABEL(insn))(rb_thread_t *th, rb_control_frame_t *reg_cfp) {
|
64
|
+
|
65
|
+
#define END_INSN(insn) return reg_cfp;}
|
66
|
+
|
67
|
+
#define NEXT_INSN() return reg_cfp;
|
68
|
+
|
69
|
+
/************************************************/
|
70
|
+
#elif OPT_TOKEN_THREADED_CODE || OPT_DIRECT_THREADED_CODE
|
71
|
+
/* threaded code with gcc */
|
72
|
+
|
73
|
+
#define LABEL(x) INSN_LABEL_##x
|
74
|
+
#define ELABEL(x) INSN_ELABEL_##x
|
75
|
+
#define LABEL_PTR(x) &&LABEL(x)
|
76
|
+
|
77
|
+
#define INSN_ENTRY_SIG(insn)
|
78
|
+
|
79
|
+
|
80
|
+
#define INSN_DISPATCH_SIG(insn)
|
81
|
+
|
82
|
+
#define INSN_ENTRY(insn) \
|
83
|
+
LABEL(insn): \
|
84
|
+
INSN_ENTRY_SIG(insn); \
|
85
|
+
|
86
|
+
/* dispatcher */
|
87
|
+
#if defined(__GNUC__) && (defined(__i386__) || defined(__x86_64__)) && __GNUC__ == 3
|
88
|
+
#define DISPATCH_ARCH_DEPEND_WAY(addr) \
|
89
|
+
__asm__ __volatile__("jmp *%0;\t# -- inserted by vm.h\t[length = 2]" : : "r" (addr))
|
90
|
+
|
91
|
+
#else
|
92
|
+
#define DISPATCH_ARCH_DEPEND_WAY(addr) \
|
93
|
+
/* do nothing */
|
94
|
+
|
95
|
+
#endif
|
96
|
+
|
97
|
+
|
98
|
+
/**********************************/
|
99
|
+
#if OPT_DIRECT_THREADED_CODE
|
100
|
+
|
101
|
+
/* for GCC 3.4.x */
|
102
|
+
#define TC_DISPATCH(insn) \
|
103
|
+
INSN_DISPATCH_SIG(insn); \
|
104
|
+
goto *(void const *)GET_CURRENT_INSN(); \
|
105
|
+
;
|
106
|
+
|
107
|
+
#else
|
108
|
+
/* token threaded code */
|
109
|
+
|
110
|
+
#define TC_DISPATCH(insn) \
|
111
|
+
DISPATCH_ARCH_DEPEND_WAY(insns_address_table[GET_CURRENT_INSN()]); \
|
112
|
+
INSN_DISPATCH_SIG(insn); \
|
113
|
+
goto *insns_address_table[GET_CURRENT_INSN()]; \
|
114
|
+
rb_bug("tc error");
|
115
|
+
|
116
|
+
|
117
|
+
#endif /* DISPATCH_DIRECT_THREADED_CODE */
|
118
|
+
|
119
|
+
#define END_INSN(insn) \
|
120
|
+
DEBUG_END_INSN(); \
|
121
|
+
TC_DISPATCH(insn);
|
122
|
+
|
123
|
+
#define INSN_DISPATCH() \
|
124
|
+
TC_DISPATCH(__START__) \
|
125
|
+
{
|
126
|
+
|
127
|
+
#define END_INSNS_DISPATCH() \
|
128
|
+
rb_bug("unknown insn: %"PRIdVALUE, GET_CURRENT_INSN()); \
|
129
|
+
} /* end of while loop */ \
|
130
|
+
|
131
|
+
#define NEXT_INSN() TC_DISPATCH(__NEXT_INSN__)
|
132
|
+
|
133
|
+
/************************************************/
|
134
|
+
#else /* no threaded code */
|
135
|
+
/* most common method */
|
136
|
+
|
137
|
+
#define INSN_ENTRY(insn) \
|
138
|
+
case BIN(insn):
|
139
|
+
|
140
|
+
#define END_INSN(insn) \
|
141
|
+
DEBUG_END_INSN(); \
|
142
|
+
break;
|
143
|
+
|
144
|
+
|
145
|
+
#define INSN_DISPATCH() \
|
146
|
+
while (1) { \
|
147
|
+
switch (GET_CURRENT_INSN()) {
|
148
|
+
|
149
|
+
#define END_INSNS_DISPATCH() \
|
150
|
+
default: \
|
151
|
+
SDR(); \
|
152
|
+
rb_bug("unknown insn: %ld", GET_CURRENT_INSN()); \
|
153
|
+
} /* end of switch */ \
|
154
|
+
} /* end of while loop */ \
|
155
|
+
|
156
|
+
#define NEXT_INSN() goto first
|
157
|
+
|
158
|
+
#endif
|
159
|
+
|
160
|
+
#define VM_SP_CNT(th, sp) ((sp) - (th)->stack)
|
161
|
+
|
162
|
+
#if OPT_CALL_THREADED_CODE
|
163
|
+
#define THROW_EXCEPTION(exc) do { \
|
164
|
+
th->errinfo = (VALUE)(exc); \
|
165
|
+
return 0; \
|
166
|
+
} while (0)
|
167
|
+
#else
|
168
|
+
#define THROW_EXCEPTION(exc) return (VALUE)(exc)
|
169
|
+
#endif
|
170
|
+
|
171
|
+
#define SCREG(r) (reg_##r)
|
172
|
+
|
173
|
+
#define VM_DEBUG_STACKOVERFLOW 0
|
174
|
+
|
175
|
+
#if VM_DEBUG_STACKOVERFLOW
|
176
|
+
#define CHECK_VM_STACK_OVERFLOW_FOR_INSN(cfp, margin) \
|
177
|
+
WHEN_VM_STACK_OVERFLOWED(cfp, (cfp)->sp, margin) vm_stack_overflow_for_insn()
|
178
|
+
#else
|
179
|
+
#define CHECK_VM_STACK_OVERFLOW_FOR_INSN(cfp, margin)
|
180
|
+
#endif
|
181
|
+
|
182
|
+
#endif /* RUBY_VM_EXEC_H */
|
@@ -0,0 +1,227 @@
|
|
1
|
+
/**********************************************************************
|
2
|
+
|
3
|
+
insnhelper.h - helper macros to implement each instructions
|
4
|
+
|
5
|
+
$Author: ko1 $
|
6
|
+
created at: 04/01/01 15:50:34 JST
|
7
|
+
|
8
|
+
Copyright (C) 2004-2007 Koichi Sasada
|
9
|
+
|
10
|
+
**********************************************************************/
|
11
|
+
|
12
|
+
#ifndef RUBY_INSNHELPER_H
|
13
|
+
#define RUBY_INSNHELPER_H
|
14
|
+
|
15
|
+
extern VALUE ruby_vm_const_missing_count;
|
16
|
+
|
17
|
+
#if VM_COLLECT_USAGE_DETAILS
|
18
|
+
#define COLLECT_USAGE_INSN(insn) vm_collect_usage_insn(insn)
|
19
|
+
#define COLLECT_USAGE_OPERAND(insn, n, op) vm_collect_usage_operand((insn), (n), ((VALUE)(op)))
|
20
|
+
|
21
|
+
#define COLLECT_USAGE_REGISTER(reg, s) vm_collect_usage_register((reg), (s))
|
22
|
+
#else
|
23
|
+
#define COLLECT_USAGE_INSN(insn) /* none */
|
24
|
+
#define COLLECT_USAGE_OPERAND(insn, n, op) /* none */
|
25
|
+
#define COLLECT_USAGE_REGISTER(reg, s) /* none */
|
26
|
+
#endif
|
27
|
+
|
28
|
+
/**********************************************************/
|
29
|
+
/* deal with stack */
|
30
|
+
/**********************************************************/
|
31
|
+
|
32
|
+
#define PUSH(x) (SET_SV(x), INC_SP(1))
|
33
|
+
#define TOPN(n) (*(GET_SP()-(n)-1))
|
34
|
+
#define POPN(n) (DEC_SP(n))
|
35
|
+
#define POP() (DEC_SP(1))
|
36
|
+
#define STACK_ADDR_FROM_TOP(n) (GET_SP()-(n))
|
37
|
+
|
38
|
+
#define GET_TOS() (tos) /* dummy */
|
39
|
+
|
40
|
+
/**********************************************************/
|
41
|
+
/* deal with registers */
|
42
|
+
/**********************************************************/
|
43
|
+
|
44
|
+
#define REG_CFP (reg_cfp)
|
45
|
+
#define REG_PC (REG_CFP->pc)
|
46
|
+
#define REG_SP (REG_CFP->sp)
|
47
|
+
#define REG_EP (REG_CFP->ep)
|
48
|
+
|
49
|
+
#define RESTORE_REGS() do { \
|
50
|
+
REG_CFP = th->cfp; \
|
51
|
+
} while (0)
|
52
|
+
|
53
|
+
#define REG_A reg_a
|
54
|
+
#define REG_B reg_b
|
55
|
+
|
56
|
+
enum vm_regan_regtype {
|
57
|
+
VM_REGAN_PC = 0,
|
58
|
+
VM_REGAN_SP = 1,
|
59
|
+
VM_REGAN_EP = 2,
|
60
|
+
VM_REGAN_CFP = 3,
|
61
|
+
VM_REGAN_SELF = 4,
|
62
|
+
VM_REGAN_ISEQ = 5,
|
63
|
+
};
|
64
|
+
enum vm_regan_acttype {
|
65
|
+
VM_REGAN_ACT_GET = 0,
|
66
|
+
VM_REGAN_ACT_SET = 1,
|
67
|
+
};
|
68
|
+
|
69
|
+
#if VM_COLLECT_USAGE_DETAILS
|
70
|
+
#define COLLECT_USAGE_REGISTER_HELPER(a, b, v) \
|
71
|
+
(COLLECT_USAGE_REGISTER((VM_REGAN_##a), (VM_REGAN_ACT_##b)), (v))
|
72
|
+
#else
|
73
|
+
#define COLLECT_USAGE_REGISTER_HELPER(a, b, v) (v)
|
74
|
+
#endif
|
75
|
+
|
76
|
+
/* PC */
|
77
|
+
#define GET_PC() (COLLECT_USAGE_REGISTER_HELPER(PC, GET, REG_PC))
|
78
|
+
#define SET_PC(x) (REG_PC = (COLLECT_USAGE_REGISTER_HELPER(PC, SET, (x))))
|
79
|
+
#define GET_CURRENT_INSN() (*GET_PC())
|
80
|
+
#define GET_OPERAND(n) (GET_PC()[(n)])
|
81
|
+
#define ADD_PC(n) (SET_PC(REG_PC + (n)))
|
82
|
+
#define JUMP(dst) (REG_PC += (dst))
|
83
|
+
|
84
|
+
/* frame pointer, environment pointer */
|
85
|
+
#define GET_CFP() (COLLECT_USAGE_REGISTER_HELPER(CFP, GET, REG_CFP))
|
86
|
+
#define GET_EP() (COLLECT_USAGE_REGISTER_HELPER(EP, GET, REG_EP))
|
87
|
+
#define SET_EP(x) (REG_EP = (COLLECT_USAGE_REGISTER_HELPER(EP, SET, (x))))
|
88
|
+
#define GET_LEP() (VM_EP_LEP(GET_EP()))
|
89
|
+
|
90
|
+
/* SP */
|
91
|
+
#define GET_SP() (COLLECT_USAGE_REGISTER_HELPER(SP, GET, REG_SP))
|
92
|
+
#define SET_SP(x) (REG_SP = (COLLECT_USAGE_REGISTER_HELPER(SP, SET, (x))))
|
93
|
+
#define INC_SP(x) (REG_SP += (COLLECT_USAGE_REGISTER_HELPER(SP, SET, (x))))
|
94
|
+
#define DEC_SP(x) (REG_SP -= (COLLECT_USAGE_REGISTER_HELPER(SP, SET, (x))))
|
95
|
+
#define SET_SV(x) (*GET_SP() = (x))
|
96
|
+
/* set current stack value as x */
|
97
|
+
|
98
|
+
#define GET_SP_COUNT() (REG_SP - th->stack)
|
99
|
+
|
100
|
+
/* instruction sequence C struct */
|
101
|
+
#define GET_ISEQ() (GET_CFP()->iseq)
|
102
|
+
|
103
|
+
/**********************************************************/
|
104
|
+
/* deal with variables */
|
105
|
+
/**********************************************************/
|
106
|
+
|
107
|
+
#define GET_PREV_EP(ep) ((VALUE *)((ep)[0] & ~0x03))
|
108
|
+
|
109
|
+
#define GET_GLOBAL(entry) rb_gvar_get((struct rb_global_entry*)(entry))
|
110
|
+
#define SET_GLOBAL(entry, val) rb_gvar_set((struct rb_global_entry*)(entry), (val))
|
111
|
+
|
112
|
+
#define GET_CONST_INLINE_CACHE(dst) ((IC) * (GET_PC() + (dst) + 2))
|
113
|
+
|
114
|
+
/**********************************************************/
|
115
|
+
/* deal with values */
|
116
|
+
/**********************************************************/
|
117
|
+
|
118
|
+
#define GET_SELF() (COLLECT_USAGE_REGISTER_HELPER(SELF, GET, GET_CFP()->self))
|
119
|
+
|
120
|
+
/**********************************************************/
|
121
|
+
/* deal with control flow 2: method/iterator */
|
122
|
+
/**********************************************************/
|
123
|
+
|
124
|
+
#define CALL_METHOD(calling, ci, cc) do { \
|
125
|
+
VALUE v = (*(cc)->call)(th, GET_CFP(), (calling), (ci), (cc)); \
|
126
|
+
if (v == Qundef) { \
|
127
|
+
RESTORE_REGS(); \
|
128
|
+
NEXT_INSN(); \
|
129
|
+
} \
|
130
|
+
else { \
|
131
|
+
val = v; \
|
132
|
+
} \
|
133
|
+
} while (0)
|
134
|
+
|
135
|
+
/* set fastpath when cached method is *NOT* protected
|
136
|
+
* because inline method cache does not care about receiver.
|
137
|
+
*/
|
138
|
+
|
139
|
+
#ifndef OPT_CALL_FASTPATH
|
140
|
+
#define OPT_CALL_FASTPATH 1
|
141
|
+
#endif
|
142
|
+
|
143
|
+
#if OPT_CALL_FASTPATH
|
144
|
+
#define CI_SET_FASTPATH(cc, func, enabled) do { \
|
145
|
+
if (LIKELY(enabled)) ((cc)->call = (func)); \
|
146
|
+
} while (0)
|
147
|
+
#else
|
148
|
+
#define CI_SET_FASTPATH(ci, func, enabled) /* do nothing */
|
149
|
+
#endif
|
150
|
+
|
151
|
+
#define GET_BLOCK_PTR() ((rb_block_t *)(GC_GUARDED_PTR_REF(GET_LEP()[0])))
|
152
|
+
|
153
|
+
/**********************************************************/
|
154
|
+
/* deal with control flow 3: exception */
|
155
|
+
/**********************************************************/
|
156
|
+
|
157
|
+
|
158
|
+
/**********************************************************/
|
159
|
+
/* others */
|
160
|
+
/**********************************************************/
|
161
|
+
|
162
|
+
/* optimize insn */
|
163
|
+
#define FIXNUM_2_P(a, b) ((a) & (b) & 1)
|
164
|
+
#if USE_FLONUM
|
165
|
+
#define FLONUM_2_P(a, b) (((((a)^2) | ((b)^2)) & 3) == 0) /* (FLONUM_P(a) && FLONUM_P(b)) */
|
166
|
+
#else
|
167
|
+
#define FLONUM_2_P(a, b) 0
|
168
|
+
#endif
|
169
|
+
|
170
|
+
#ifndef USE_IC_FOR_SPECIALIZED_METHOD
|
171
|
+
#define USE_IC_FOR_SPECIALIZED_METHOD 1
|
172
|
+
#endif
|
173
|
+
|
174
|
+
#define CALL_SIMPLE_METHOD(recv_) do { \
|
175
|
+
struct rb_calling_info calling; \
|
176
|
+
calling.blockptr = NULL; \
|
177
|
+
calling.argc = ci->orig_argc; \
|
178
|
+
vm_search_method(ci, cc, calling.recv = (recv_)); \
|
179
|
+
CALL_METHOD(&calling, ci, cc); \
|
180
|
+
} while (0)
|
181
|
+
|
182
|
+
#define NEXT_CLASS_SERIAL() (++ruby_vm_class_serial)
|
183
|
+
#define GET_GLOBAL_METHOD_STATE() (ruby_vm_global_method_state)
|
184
|
+
#define INC_GLOBAL_METHOD_STATE() (++ruby_vm_global_method_state)
|
185
|
+
#define GET_GLOBAL_CONSTANT_STATE() (ruby_vm_global_constant_state)
|
186
|
+
#define INC_GLOBAL_CONSTANT_STATE() (++ruby_vm_global_constant_state)
|
187
|
+
|
188
|
+
static VALUE make_no_method_exception(VALUE exc, const char *format,
|
189
|
+
VALUE obj, int argc, const VALUE *argv);
|
190
|
+
|
191
|
+
static inline struct vm_throw_data *
|
192
|
+
THROW_DATA_NEW(VALUE val, rb_control_frame_t *cf, VALUE st)
|
193
|
+
{
|
194
|
+
return (struct vm_throw_data *)rb_imemo_new(imemo_throw_data, val, (VALUE)cf, st, 0);
|
195
|
+
}
|
196
|
+
|
197
|
+
static inline void
|
198
|
+
THROW_DATA_CATCH_FRAME_SET(struct vm_throw_data *obj, const rb_control_frame_t *cfp)
|
199
|
+
{
|
200
|
+
obj->catch_frame = cfp;
|
201
|
+
}
|
202
|
+
|
203
|
+
static inline void
|
204
|
+
THROW_DATA_STATE_SET(struct vm_throw_data *obj, int st)
|
205
|
+
{
|
206
|
+
obj->throw_state = (VALUE)st;
|
207
|
+
}
|
208
|
+
|
209
|
+
static inline VALUE
|
210
|
+
THROW_DATA_VAL(const struct vm_throw_data *obj)
|
211
|
+
{
|
212
|
+
return obj->throw_obj;
|
213
|
+
}
|
214
|
+
|
215
|
+
static inline const rb_control_frame_t *
|
216
|
+
THROW_DATA_CATCH_FRAME(const struct vm_throw_data *obj)
|
217
|
+
{
|
218
|
+
return obj->catch_frame;
|
219
|
+
}
|
220
|
+
|
221
|
+
static int
|
222
|
+
THROW_DATA_STATE(const struct vm_throw_data *obj)
|
223
|
+
{
|
224
|
+
return (int)obj->throw_state;
|
225
|
+
}
|
226
|
+
|
227
|
+
#endif /* RUBY_INSNHELPER_H */
|
@@ -0,0 +1,57 @@
|
|
1
|
+
/*-*-c-*-*/
|
2
|
+
/**********************************************************************
|
3
|
+
|
4
|
+
vm_opts.h - VM optimize option
|
5
|
+
|
6
|
+
$Author: nobu $
|
7
|
+
|
8
|
+
Copyright (C) 2004-2007 Koichi Sasada
|
9
|
+
|
10
|
+
**********************************************************************/
|
11
|
+
|
12
|
+
|
13
|
+
#ifndef RUBY_VM_OPTS_H
|
14
|
+
#define RUBY_VM_OPTS_H
|
15
|
+
|
16
|
+
/* Compile options.
|
17
|
+
* You can change these options at runtime by VM::CompileOption.
|
18
|
+
* Following definitions are default values.
|
19
|
+
*/
|
20
|
+
|
21
|
+
#define OPT_TRACE_INSTRUCTION 1
|
22
|
+
#define OPT_TAILCALL_OPTIMIZATION 0
|
23
|
+
#define OPT_PEEPHOLE_OPTIMIZATION 1
|
24
|
+
#define OPT_SPECIALISED_INSTRUCTION 1
|
25
|
+
#define OPT_INLINE_CONST_CACHE 1
|
26
|
+
#define OPT_FROZEN_STRING_LITERAL 0
|
27
|
+
#define OPT_DEBUG_FROZEN_STRING_LITERAL 0
|
28
|
+
|
29
|
+
/* Build Options.
|
30
|
+
* You can't change these options at runtime.
|
31
|
+
*/
|
32
|
+
|
33
|
+
/* C compiler dependent */
|
34
|
+
#define OPT_DIRECT_THREADED_CODE 1
|
35
|
+
#define OPT_TOKEN_THREADED_CODE 0
|
36
|
+
#define OPT_CALL_THREADED_CODE 0
|
37
|
+
|
38
|
+
/* VM running option */
|
39
|
+
#define OPT_CHECKED_RUN 1
|
40
|
+
#define OPT_INLINE_METHOD_CACHE 1
|
41
|
+
#define OPT_GLOBAL_METHOD_CACHE 1
|
42
|
+
#define OPT_BLOCKINLINING 0
|
43
|
+
|
44
|
+
/* architecture independent, affects generated code */
|
45
|
+
#define OPT_OPERANDS_UNIFICATION 1
|
46
|
+
#define OPT_INSTRUCTIONS_UNIFICATION 0
|
47
|
+
#define OPT_UNIFY_ALL_COMBINATION 0
|
48
|
+
#define OPT_STACK_CACHING 0
|
49
|
+
|
50
|
+
/* misc */
|
51
|
+
#define SUPPORT_JOKE 0
|
52
|
+
|
53
|
+
#ifndef VM_COLLECT_USAGE_DETAILS
|
54
|
+
#define VM_COLLECT_USAGE_DETAILS 0
|
55
|
+
#endif
|
56
|
+
|
57
|
+
#endif /* RUBY_VM_OPTS_H */
|