Ruby 3.2.2p53 (2023-03-30 revision e51014f9c05aa65cbf203442d37fef7c12390015)
vm_insnhelper.h
1#ifndef RUBY_INSNHELPER_H
2#define RUBY_INSNHELPER_H
3/**********************************************************************
4
5 insnhelper.h - helper macros to implement each instructions
6
7 $Author$
8 created at: 04/01/01 15:50:34 JST
9
10 Copyright (C) 2004-2007 Koichi Sasada
11
12**********************************************************************/
13
14MJIT_SYMBOL_EXPORT_BEGIN
15
16RUBY_EXTERN VALUE ruby_vm_const_missing_count;
17RUBY_EXTERN rb_serial_t ruby_vm_constant_cache_invalidations;
18RUBY_EXTERN rb_serial_t ruby_vm_constant_cache_misses;
19RUBY_EXTERN rb_serial_t ruby_vm_global_cvar_state;
20
21MJIT_SYMBOL_EXPORT_END
22
23#if VM_COLLECT_USAGE_DETAILS
24#define COLLECT_USAGE_INSN(insn) vm_collect_usage_insn(insn)
25#define COLLECT_USAGE_OPERAND(insn, n, op) vm_collect_usage_operand((insn), (n), ((VALUE)(op)))
26
27#define COLLECT_USAGE_REGISTER(reg, s) vm_collect_usage_register((reg), (s))
28#elif YJIT_STATS
29/* for --yjit-stats */
30#define COLLECT_USAGE_INSN(insn) rb_yjit_collect_vm_usage_insn(insn)
31#define COLLECT_USAGE_OPERAND(insn, n, op) /* none */
32#define COLLECT_USAGE_REGISTER(reg, s) /* none */
33#else
34#define COLLECT_USAGE_INSN(insn) /* none */
35#define COLLECT_USAGE_OPERAND(insn, n, op) /* none */
36#define COLLECT_USAGE_REGISTER(reg, s) /* none */
37#endif
38
39/**********************************************************/
40/* deal with stack */
41/**********************************************************/
42
43#define PUSH(x) (SET_SV(x), INC_SP(1))
44#define TOPN(n) (*(GET_SP()-(n)-1))
45#define POPN(n) (DEC_SP(n))
46#define POP() (DEC_SP(1))
47#define STACK_ADDR_FROM_TOP(n) (GET_SP()-(n))
48
49/**********************************************************/
50/* deal with registers */
51/**********************************************************/
52
53#define VM_REG_CFP (reg_cfp)
54#define VM_REG_PC (VM_REG_CFP->pc)
55#define VM_REG_SP (VM_REG_CFP->sp)
56#define VM_REG_EP (VM_REG_CFP->ep)
57
58#define RESTORE_REGS() do { \
59 VM_REG_CFP = ec->cfp; \
60} while (0)
61
62#if VM_COLLECT_USAGE_DETAILS
63enum vm_regan_regtype {
64 VM_REGAN_PC = 0,
65 VM_REGAN_SP = 1,
66 VM_REGAN_EP = 2,
67 VM_REGAN_CFP = 3,
68 VM_REGAN_SELF = 4,
69 VM_REGAN_ISEQ = 5
70};
71enum vm_regan_acttype {
72 VM_REGAN_ACT_GET = 0,
73 VM_REGAN_ACT_SET = 1
74};
75
76#define COLLECT_USAGE_REGISTER_HELPER(a, b, v) \
77 (COLLECT_USAGE_REGISTER((VM_REGAN_##a), (VM_REGAN_ACT_##b)), (v))
78#else
79#define COLLECT_USAGE_REGISTER_HELPER(a, b, v) (v)
80#endif
81
82/* PC */
83#define GET_PC() (COLLECT_USAGE_REGISTER_HELPER(PC, GET, VM_REG_PC))
84#define SET_PC(x) (VM_REG_PC = (COLLECT_USAGE_REGISTER_HELPER(PC, SET, (x))))
85#define GET_CURRENT_INSN() (*GET_PC())
86#define GET_OPERAND(n) (GET_PC()[(n)])
87#define ADD_PC(n) (SET_PC(VM_REG_PC + (n)))
88#define JUMP(dst) (SET_PC(VM_REG_PC + (dst)))
89
90/* frame pointer, environment pointer */
91#define GET_CFP() (COLLECT_USAGE_REGISTER_HELPER(CFP, GET, VM_REG_CFP))
92#define GET_EP() (COLLECT_USAGE_REGISTER_HELPER(EP, GET, VM_REG_EP))
93#define SET_EP(x) (VM_REG_EP = (COLLECT_USAGE_REGISTER_HELPER(EP, SET, (x))))
94#define GET_LEP() (VM_EP_LEP(GET_EP()))
95
96/* SP */
97#define GET_SP() (COLLECT_USAGE_REGISTER_HELPER(SP, GET, VM_REG_SP))
98#define SET_SP(x) (VM_REG_SP = (COLLECT_USAGE_REGISTER_HELPER(SP, SET, (x))))
99#define INC_SP(x) (VM_REG_SP += (COLLECT_USAGE_REGISTER_HELPER(SP, SET, (x))))
100#define DEC_SP(x) (VM_REG_SP -= (COLLECT_USAGE_REGISTER_HELPER(SP, SET, (x))))
101#define SET_SV(x) (*GET_SP() = rb_ractor_confirm_belonging(x))
102 /* set current stack value as x */
103
104/* instruction sequence C struct */
105#define GET_ISEQ() (GET_CFP()->iseq)
106
107/**********************************************************/
108/* deal with variables */
109/**********************************************************/
110
111#define GET_PREV_EP(ep) ((VALUE *)((ep)[VM_ENV_DATA_INDEX_SPECVAL] & ~0x03))
112
113/**********************************************************/
114/* deal with values */
115/**********************************************************/
116
117#define GET_SELF() (COLLECT_USAGE_REGISTER_HELPER(SELF, GET, GET_CFP()->self))
118
119/**********************************************************/
120/* deal with control flow 2: method/iterator */
121/**********************************************************/
122
123/* set fastpath when cached method is *NOT* protected
124 * because inline method cache does not care about receiver.
125 */
126
127static inline void
128CC_SET_FASTPATH(const struct rb_callcache *cc, vm_call_handler func, bool enabled)
129{
130 if (LIKELY(enabled)) {
131 vm_cc_call_set(cc, func);
132 }
133}
134
135#define GET_BLOCK_HANDLER() (GET_LEP()[VM_ENV_DATA_INDEX_SPECVAL])
136
137/**********************************************************/
138/* deal with control flow 3: exception */
139/**********************************************************/
140
141
142/**********************************************************/
143/* deal with stack canary */
144/**********************************************************/
145
146#if VM_CHECK_MODE > 0
147#define SETUP_CANARY(cond) \
148 VALUE *canary = 0; \
149 if (cond) { \
150 canary = GET_SP(); \
151 SET_SV(vm_stack_canary); \
152 } \
153 else {\
154 SET_SV(Qfalse); /* cleanup */ \
155 }
156#define CHECK_CANARY(cond, insn) \
157 if (cond) { \
158 if (*canary == vm_stack_canary) { \
159 *canary = Qfalse; /* cleanup */ \
160 } \
161 else { \
162 rb_vm_canary_is_found_dead(insn, *canary); \
163 } \
164 }
165#else
166#define SETUP_CANARY(cond) if (cond) {} else {}
167#define CHECK_CANARY(cond, insn) if (cond) {(void)(insn);}
168#endif
169
170/**********************************************************/
171/* others */
172/**********************************************************/
173
174#ifndef MJIT_HEADER
175#define CALL_SIMPLE_METHOD() do { \
176 rb_snum_t x = leaf ? INSN_ATTR(width) : 0; \
177 rb_snum_t y = attr_width_opt_send_without_block(0); \
178 rb_snum_t z = x - y; \
179 ADD_PC(z); \
180 DISPATCH_ORIGINAL_INSN(opt_send_without_block); \
181} while (0)
182#endif
183
184#define GET_GLOBAL_CVAR_STATE() (ruby_vm_global_cvar_state)
185#define INC_GLOBAL_CVAR_STATE() (++ruby_vm_global_cvar_state)
186
187static inline struct vm_throw_data *
188THROW_DATA_NEW(VALUE val, const rb_control_frame_t *cf, int st)
189{
190 struct vm_throw_data *obj = (struct vm_throw_data *)rb_imemo_new(imemo_throw_data, val, (VALUE)cf, 0, 0);
191 obj->throw_state = st;
192 return obj;
193}
194
195static inline VALUE
196THROW_DATA_VAL(const struct vm_throw_data *obj)
197{
198 VM_ASSERT(THROW_DATA_P(obj));
199 return obj->throw_obj;
200}
201
202static inline const rb_control_frame_t *
203THROW_DATA_CATCH_FRAME(const struct vm_throw_data *obj)
204{
205 VM_ASSERT(THROW_DATA_P(obj));
206 return obj->catch_frame;
207}
208
209static inline int
210THROW_DATA_STATE(const struct vm_throw_data *obj)
211{
212 VM_ASSERT(THROW_DATA_P(obj));
213 return obj->throw_state;
214}
215
216static inline int
217THROW_DATA_CONSUMED_P(const struct vm_throw_data *obj)
218{
219 VM_ASSERT(THROW_DATA_P(obj));
220 return obj->flags & THROW_DATA_CONSUMED;
221}
222
223static inline void
224THROW_DATA_CATCH_FRAME_SET(struct vm_throw_data *obj, const rb_control_frame_t *cfp)
225{
226 VM_ASSERT(THROW_DATA_P(obj));
227 obj->catch_frame = cfp;
228}
229
230static inline void
231THROW_DATA_STATE_SET(struct vm_throw_data *obj, int st)
232{
233 VM_ASSERT(THROW_DATA_P(obj));
234 obj->throw_state = st;
235}
236
237static inline void
238THROW_DATA_CONSUMED_SET(struct vm_throw_data *obj)
239{
240 if (THROW_DATA_P(obj) &&
241 THROW_DATA_STATE(obj) == TAG_BREAK) {
242 obj->flags |= THROW_DATA_CONSUMED;
243 }
244}
245
246#define IS_ARGS_SPLAT(ci) (vm_ci_flag(ci) & VM_CALL_ARGS_SPLAT)
247#define IS_ARGS_KEYWORD(ci) (vm_ci_flag(ci) & VM_CALL_KWARG)
248#define IS_ARGS_KW_SPLAT(ci) (vm_ci_flag(ci) & VM_CALL_KW_SPLAT)
249#define IS_ARGS_KW_OR_KW_SPLAT(ci) (vm_ci_flag(ci) & (VM_CALL_KWARG | VM_CALL_KW_SPLAT))
250#define IS_ARGS_KW_SPLAT_MUT(ci) (vm_ci_flag(ci) & VM_CALL_KW_SPLAT_MUT)
251
252static inline bool
253vm_call_cacheable(const struct rb_callinfo *ci, const struct rb_callcache *cc)
254{
255 return (vm_ci_flag(ci) & VM_CALL_FCALL) ||
256 METHOD_ENTRY_VISI(vm_cc_cme(cc)) != METHOD_VISI_PROTECTED;
257}
258/* If this returns true, an optimized function returned by `vm_call_iseq_setup_func`
259 can be used as a fastpath. */
260static inline bool
261vm_call_iseq_optimizable_p(const struct rb_callinfo *ci, const struct rb_callcache *cc)
262{
263 return !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) && vm_call_cacheable(ci, cc);
264}
265
266#endif /* RUBY_INSNHELPER_H */
#define RUBY_EXTERN
Declaration of externally visible global variables.
Definition dllexport.h:47
THROW_DATA.
Definition imemo.h:62
uintptr_t VALUE
Type that represents a Ruby object.
Definition value.h:40