Ruby 3.2.1p31 (2023-02-08 revision 31819e82c88c6f8ecfaeb162519bfa26a14b21fd)
vm_insnhelper.c
1/**********************************************************************
2
3 vm_insnhelper.c - instruction helper functions.
4
5 $Author$
6
7 Copyright (C) 2007 Koichi Sasada
8
9**********************************************************************/
10
11#include "ruby/internal/config.h"
12
13#include <math.h>
14
15#include "constant.h"
16#include "debug_counter.h"
17#include "internal.h"
18#include "internal/class.h"
19#include "internal/compar.h"
20#include "internal/hash.h"
21#include "internal/numeric.h"
22#include "internal/proc.h"
23#include "internal/random.h"
24#include "internal/variable.h"
25#include "internal/struct.h"
26#include "variable.h"
27
28/* finish iseq array */
29#include "insns.inc"
30#ifndef MJIT_HEADER
31#include "insns_info.inc"
32#endif
33
34extern rb_method_definition_t *rb_method_definition_create(rb_method_type_t type, ID mid);
35extern void rb_method_definition_set(const rb_method_entry_t *me, rb_method_definition_t *def, void *opts);
36extern int rb_method_definition_eq(const rb_method_definition_t *d1, const rb_method_definition_t *d2);
37extern VALUE rb_make_no_method_exception(VALUE exc, VALUE format, VALUE obj,
38 int argc, const VALUE *argv, int priv);
39
40#ifndef MJIT_HEADER
41static const struct rb_callcache vm_empty_cc;
42static const struct rb_callcache vm_empty_cc_for_super;
43#endif
44
45/* control stack frame */
46
47static rb_control_frame_t *vm_get_ruby_level_caller_cfp(const rb_execution_context_t *ec, const rb_control_frame_t *cfp);
48
49MJIT_STATIC VALUE
50ruby_vm_special_exception_copy(VALUE exc)
51{
52 VALUE e = rb_obj_alloc(rb_class_real(RBASIC_CLASS(exc)));
53 rb_obj_copy_ivar(e, exc);
54 return e;
55}
56
57NORETURN(static void ec_stack_overflow(rb_execution_context_t *ec, int));
58static void
59ec_stack_overflow(rb_execution_context_t *ec, int setup)
60{
61 VALUE mesg = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_sysstack];
62 ec->raised_flag = RAISED_STACKOVERFLOW;
63 if (setup) {
64 VALUE at = rb_ec_backtrace_object(ec);
65 mesg = ruby_vm_special_exception_copy(mesg);
66 rb_ivar_set(mesg, idBt, at);
67 rb_ivar_set(mesg, idBt_locations, at);
68 }
69 ec->errinfo = mesg;
70 EC_JUMP_TAG(ec, TAG_RAISE);
71}
72
73NORETURN(static void vm_stackoverflow(void));
74#ifdef MJIT_HEADER
75NOINLINE(static COLDFUNC void vm_stackoverflow(void));
76#endif
77
78static void
79vm_stackoverflow(void)
80{
81 ec_stack_overflow(GET_EC(), TRUE);
82}
83
84NORETURN(MJIT_STATIC void rb_ec_stack_overflow(rb_execution_context_t *ec, int crit));
85MJIT_STATIC void
86rb_ec_stack_overflow(rb_execution_context_t *ec, int crit)
87{
88 if (rb_during_gc()) {
89 rb_bug("system stack overflow during GC. Faulty native extension?");
90 }
91 if (crit) {
92 ec->raised_flag = RAISED_STACKOVERFLOW;
93 ec->errinfo = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_stackfatal];
94 EC_JUMP_TAG(ec, TAG_RAISE);
95 }
96#ifdef USE_SIGALTSTACK
97 ec_stack_overflow(ec, TRUE);
98#else
99 ec_stack_overflow(ec, FALSE);
100#endif
101}
102
103
104#if VM_CHECK_MODE > 0
105static int
106callable_class_p(VALUE klass)
107{
108#if VM_CHECK_MODE >= 2
109 if (!klass) return FALSE;
110 switch (RB_BUILTIN_TYPE(klass)) {
111 default:
112 break;
113 case T_ICLASS:
114 if (!RB_TYPE_P(RCLASS_SUPER(klass), T_MODULE)) break;
115 case T_MODULE:
116 return TRUE;
117 }
118 while (klass) {
119 if (klass == rb_cBasicObject) {
120 return TRUE;
121 }
122 klass = RCLASS_SUPER(klass);
123 }
124 return FALSE;
125#else
126 return klass != 0;
127#endif
128}
129
130static int
131callable_method_entry_p(const rb_callable_method_entry_t *cme)
132{
133 if (cme == NULL) {
134 return TRUE;
135 }
136 else {
137 VM_ASSERT(IMEMO_TYPE_P((VALUE)cme, imemo_ment));
138
139 if (callable_class_p(cme->defined_class)) {
140 return TRUE;
141 }
142 else {
143 return FALSE;
144 }
145 }
146}
147
148static void
149vm_check_frame_detail(VALUE type, int req_block, int req_me, int req_cref, VALUE specval, VALUE cref_or_me, int is_cframe, const rb_iseq_t *iseq)
150{
151 unsigned int magic = (unsigned int)(type & VM_FRAME_MAGIC_MASK);
152 enum imemo_type cref_or_me_type = imemo_env; /* impossible value */
153
154 if (RB_TYPE_P(cref_or_me, T_IMEMO)) {
155 cref_or_me_type = imemo_type(cref_or_me);
156 }
157 if (type & VM_FRAME_FLAG_BMETHOD) {
158 req_me = TRUE;
159 }
160
161 if (req_block && (type & VM_ENV_FLAG_LOCAL) == 0) {
162 rb_bug("vm_push_frame: specval (%p) should be a block_ptr on %x frame", (void *)specval, magic);
163 }
164 if (!req_block && (type & VM_ENV_FLAG_LOCAL) != 0) {
165 rb_bug("vm_push_frame: specval (%p) should not be a block_ptr on %x frame", (void *)specval, magic);
166 }
167
168 if (req_me) {
169 if (cref_or_me_type != imemo_ment) {
170 rb_bug("vm_push_frame: (%s) should be method entry on %x frame", rb_obj_info(cref_or_me), magic);
171 }
172 }
173 else {
174 if (req_cref && cref_or_me_type != imemo_cref) {
175 rb_bug("vm_push_frame: (%s) should be CREF on %x frame", rb_obj_info(cref_or_me), magic);
176 }
177 else { /* cref or Qfalse */
178 if (cref_or_me != Qfalse && cref_or_me_type != imemo_cref) {
179 if (((type & VM_FRAME_FLAG_LAMBDA) || magic == VM_FRAME_MAGIC_IFUNC) && (cref_or_me_type == imemo_ment)) {
180 /* ignore */
181 }
182 else {
183 rb_bug("vm_push_frame: (%s) should be false or cref on %x frame", rb_obj_info(cref_or_me), magic);
184 }
185 }
186 }
187 }
188
189 if (cref_or_me_type == imemo_ment) {
190 const rb_callable_method_entry_t *me = (const rb_callable_method_entry_t *)cref_or_me;
191
192 if (!callable_method_entry_p(me)) {
193 rb_bug("vm_push_frame: ment (%s) should be callable on %x frame.", rb_obj_info(cref_or_me), magic);
194 }
195 }
196
197 if ((type & VM_FRAME_MAGIC_MASK) == VM_FRAME_MAGIC_DUMMY) {
198 VM_ASSERT(iseq == NULL ||
199 RBASIC_CLASS((VALUE)iseq) == 0 || // dummy frame for loading
200 RUBY_VM_NORMAL_ISEQ_P(iseq) //argument error
201 );
202 }
203 else {
204 VM_ASSERT(is_cframe == !RUBY_VM_NORMAL_ISEQ_P(iseq));
205 }
206}
207
208static void
209vm_check_frame(VALUE type,
210 VALUE specval,
211 VALUE cref_or_me,
212 const rb_iseq_t *iseq)
213{
214 VALUE given_magic = type & VM_FRAME_MAGIC_MASK;
215 VM_ASSERT(FIXNUM_P(type));
216
217#define CHECK(magic, req_block, req_me, req_cref, is_cframe) \
218 case magic: \
219 vm_check_frame_detail(type, req_block, req_me, req_cref, \
220 specval, cref_or_me, is_cframe, iseq); \
221 break
222 switch (given_magic) {
223 /* BLK ME CREF CFRAME */
224 CHECK(VM_FRAME_MAGIC_METHOD, TRUE, TRUE, FALSE, FALSE);
225 CHECK(VM_FRAME_MAGIC_CLASS, TRUE, FALSE, TRUE, FALSE);
226 CHECK(VM_FRAME_MAGIC_TOP, TRUE, FALSE, TRUE, FALSE);
227 CHECK(VM_FRAME_MAGIC_CFUNC, TRUE, TRUE, FALSE, TRUE);
228 CHECK(VM_FRAME_MAGIC_BLOCK, FALSE, FALSE, FALSE, FALSE);
229 CHECK(VM_FRAME_MAGIC_IFUNC, FALSE, FALSE, FALSE, TRUE);
230 CHECK(VM_FRAME_MAGIC_EVAL, FALSE, FALSE, FALSE, FALSE);
231 CHECK(VM_FRAME_MAGIC_RESCUE, FALSE, FALSE, FALSE, FALSE);
232 CHECK(VM_FRAME_MAGIC_DUMMY, TRUE, FALSE, FALSE, FALSE);
233 default:
234 rb_bug("vm_push_frame: unknown type (%x)", (unsigned int)given_magic);
235 }
236#undef CHECK
237}
238
239static VALUE vm_stack_canary; /* Initialized later */
240static bool vm_stack_canary_was_born = false;
241
242#ifndef MJIT_HEADER
243MJIT_FUNC_EXPORTED void
244rb_vm_check_canary(const rb_execution_context_t *ec, VALUE *sp)
245{
246 const struct rb_control_frame_struct *reg_cfp = ec->cfp;
247 const struct rb_iseq_struct *iseq;
248
249 if (! LIKELY(vm_stack_canary_was_born)) {
250 return; /* :FIXME: isn't it rather fatal to enter this branch? */
251 }
252 else if ((VALUE *)reg_cfp == ec->vm_stack + ec->vm_stack_size) {
253 /* This is at the very beginning of a thread. cfp does not exist. */
254 return;
255 }
256 else if (! (iseq = GET_ISEQ())) {
257 return;
258 }
259 else if (LIKELY(sp[0] != vm_stack_canary)) {
260 return;
261 }
262 else {
263 /* we are going to call methods below; squash the canary to
264 * prevent infinite loop. */
265 sp[0] = Qundef;
266 }
267
268 const VALUE *orig = rb_iseq_original_iseq(iseq);
269 const VALUE *encoded = ISEQ_BODY(iseq)->iseq_encoded;
270 const ptrdiff_t pos = GET_PC() - encoded;
271 const enum ruby_vminsn_type insn = (enum ruby_vminsn_type)orig[pos];
272 const char *name = insn_name(insn);
273 const VALUE iseqw = rb_iseqw_new(iseq);
274 const VALUE inspection = rb_inspect(iseqw);
275 const char *stri = rb_str_to_cstr(inspection);
276 const VALUE disasm = rb_iseq_disasm(iseq);
277 const char *strd = rb_str_to_cstr(disasm);
278
279 /* rb_bug() is not capable of outputting this large contents. It
280 is designed to run form a SIGSEGV handler, which tends to be
281 very restricted. */
282 ruby_debug_printf(
283 "We are killing the stack canary set by %s, "
284 "at %s@pc=%"PRIdPTR"\n"
285 "watch out the C stack trace.\n"
286 "%s",
287 name, stri, pos, strd);
288 rb_bug("see above.");
289}
290#endif
291#define vm_check_canary(ec, sp) rb_vm_check_canary(ec, sp)
292
293#else
294#define vm_check_canary(ec, sp)
295#define vm_check_frame(a, b, c, d)
296#endif /* VM_CHECK_MODE > 0 */
297
298#if USE_DEBUG_COUNTER
299static void
300vm_push_frame_debug_counter_inc(
301 const struct rb_execution_context_struct *ec,
302 const struct rb_control_frame_struct *reg_cfp,
303 VALUE type)
304{
305 const struct rb_control_frame_struct *prev_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(reg_cfp);
306
307 RB_DEBUG_COUNTER_INC(frame_push);
308
309 if (RUBY_VM_END_CONTROL_FRAME(ec) != prev_cfp) {
310 const bool curr = VM_FRAME_RUBYFRAME_P(reg_cfp);
311 const bool prev = VM_FRAME_RUBYFRAME_P(prev_cfp);
312 if (prev) {
313 if (curr) {
314 RB_DEBUG_COUNTER_INC(frame_R2R);
315 }
316 else {
317 RB_DEBUG_COUNTER_INC(frame_R2C);
318 }
319 }
320 else {
321 if (curr) {
322 RB_DEBUG_COUNTER_INC(frame_C2R);
323 }
324 else {
325 RB_DEBUG_COUNTER_INC(frame_C2C);
326 }
327 }
328 }
329
330 switch (type & VM_FRAME_MAGIC_MASK) {
331 case VM_FRAME_MAGIC_METHOD: RB_DEBUG_COUNTER_INC(frame_push_method); return;
332 case VM_FRAME_MAGIC_BLOCK: RB_DEBUG_COUNTER_INC(frame_push_block); return;
333 case VM_FRAME_MAGIC_CLASS: RB_DEBUG_COUNTER_INC(frame_push_class); return;
334 case VM_FRAME_MAGIC_TOP: RB_DEBUG_COUNTER_INC(frame_push_top); return;
335 case VM_FRAME_MAGIC_CFUNC: RB_DEBUG_COUNTER_INC(frame_push_cfunc); return;
336 case VM_FRAME_MAGIC_IFUNC: RB_DEBUG_COUNTER_INC(frame_push_ifunc); return;
337 case VM_FRAME_MAGIC_EVAL: RB_DEBUG_COUNTER_INC(frame_push_eval); return;
338 case VM_FRAME_MAGIC_RESCUE: RB_DEBUG_COUNTER_INC(frame_push_rescue); return;
339 case VM_FRAME_MAGIC_DUMMY: RB_DEBUG_COUNTER_INC(frame_push_dummy); return;
340 }
341
342 rb_bug("unreachable");
343}
344#else
345#define vm_push_frame_debug_counter_inc(ec, cfp, t) /* void */
346#endif
347
348STATIC_ASSERT(VM_ENV_DATA_INDEX_ME_CREF, VM_ENV_DATA_INDEX_ME_CREF == -2);
349STATIC_ASSERT(VM_ENV_DATA_INDEX_SPECVAL, VM_ENV_DATA_INDEX_SPECVAL == -1);
350STATIC_ASSERT(VM_ENV_DATA_INDEX_FLAGS, VM_ENV_DATA_INDEX_FLAGS == -0);
351
352static void
353vm_push_frame(rb_execution_context_t *ec,
354 const rb_iseq_t *iseq,
355 VALUE type,
356 VALUE self,
357 VALUE specval,
358 VALUE cref_or_me,
359 const VALUE *pc,
360 VALUE *sp,
361 int local_size,
362 int stack_max)
363{
364 rb_control_frame_t *const cfp = RUBY_VM_NEXT_CONTROL_FRAME(ec->cfp);
365
366 vm_check_frame(type, specval, cref_or_me, iseq);
367 VM_ASSERT(local_size >= 0);
368
369 /* check stack overflow */
370 CHECK_VM_STACK_OVERFLOW0(cfp, sp, local_size + stack_max);
371 vm_check_canary(ec, sp);
372
373 /* setup vm value stack */
374
375 /* initialize local variables */
376 for (int i=0; i < local_size; i++) {
377 *sp++ = Qnil;
378 }
379
380 /* setup ep with managing data */
381 *sp++ = cref_or_me; /* ep[-2] / Qnil or T_IMEMO(cref) or T_IMEMO(ment) */
382 *sp++ = specval /* ep[-1] / block handler or prev env ptr */;
383 *sp++ = type; /* ep[-0] / ENV_FLAGS */
384
385 /* setup new frame */
386 *cfp = (const struct rb_control_frame_struct) {
387 .pc = pc,
388 .sp = sp,
389 .iseq = iseq,
390 .self = self,
391 .ep = sp - 1,
392 .block_code = NULL,
393 .__bp__ = sp, /* Store initial value of ep as bp to skip calculation cost of bp on JIT cancellation. */
394#if VM_DEBUG_BP_CHECK
395 .bp_check = sp,
396#endif
397 .jit_return = NULL
398 };
399
400 ec->cfp = cfp;
401
402 if (VMDEBUG == 2) {
403 SDR();
404 }
405 vm_push_frame_debug_counter_inc(ec, cfp, type);
406}
407
408void
409rb_vm_pop_frame_no_int(rb_execution_context_t *ec)
410{
411 rb_control_frame_t *cfp = ec->cfp;
412
413 if (VM_CHECK_MODE >= 4) rb_gc_verify_internal_consistency();
414 if (VMDEBUG == 2) SDR();
415
416 ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
417}
418
419/* return TRUE if the frame is finished */
420static inline int
421vm_pop_frame(rb_execution_context_t *ec, rb_control_frame_t *cfp, const VALUE *ep)
422{
423 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
424
425 if (VM_CHECK_MODE >= 4) rb_gc_verify_internal_consistency();
426 if (VMDEBUG == 2) SDR();
427
428 RUBY_VM_CHECK_INTS(ec);
429 ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
430
431 return flags & VM_FRAME_FLAG_FINISH;
432}
433
434MJIT_STATIC void
435rb_vm_pop_frame(rb_execution_context_t *ec)
436{
437 vm_pop_frame(ec, ec->cfp, ec->cfp->ep);
438}
439
440// it pushes pseudo-frame with fname filename.
441VALUE
442rb_vm_push_frame_fname(rb_execution_context_t *ec, VALUE fname)
443{
444 VALUE tmpbuf = rb_imemo_tmpbuf_auto_free_pointer();
445 void *ptr = ruby_xcalloc(sizeof(struct rb_iseq_constant_body) + sizeof(struct rb_iseq_struct), 1);
446 rb_imemo_tmpbuf_set_ptr(tmpbuf, ptr);
447
448 struct rb_iseq_struct *dmy_iseq = (struct rb_iseq_struct *)ptr;
449 struct rb_iseq_constant_body *dmy_body = (struct rb_iseq_constant_body *)&dmy_iseq[1];
450 dmy_iseq->body = dmy_body;
451 dmy_body->type = ISEQ_TYPE_TOP;
452 dmy_body->location.pathobj = fname;
453
454 vm_push_frame(ec,
455 dmy_iseq, //const rb_iseq_t *iseq,
456 VM_FRAME_MAGIC_DUMMY | VM_ENV_FLAG_LOCAL | VM_FRAME_FLAG_FINISH, // VALUE type,
457 ec->cfp->self, // VALUE self,
458 VM_BLOCK_HANDLER_NONE, // VALUE specval,
459 Qfalse, // VALUE cref_or_me,
460 NULL, // const VALUE *pc,
461 ec->cfp->sp, // VALUE *sp,
462 0, // int local_size,
463 0); // int stack_max
464
465 return tmpbuf;
466}
467
468/* method dispatch */
469static inline VALUE
470rb_arity_error_new(int argc, int min, int max)
471{
472 VALUE err_mess = rb_sprintf("wrong number of arguments (given %d, expected %d", argc, min);
473 if (min == max) {
474 /* max is not needed */
475 }
476 else if (max == UNLIMITED_ARGUMENTS) {
477 rb_str_cat_cstr(err_mess, "+");
478 }
479 else {
480 rb_str_catf(err_mess, "..%d", max);
481 }
482 rb_str_cat_cstr(err_mess, ")");
483 return rb_exc_new3(rb_eArgError, err_mess);
484}
485
486MJIT_STATIC void
487rb_error_arity(int argc, int min, int max)
488{
489 rb_exc_raise(rb_arity_error_new(argc, min, max));
490}
491
492/* lvar */
493
494NOINLINE(static void vm_env_write_slowpath(const VALUE *ep, int index, VALUE v));
495
496static void
497vm_env_write_slowpath(const VALUE *ep, int index, VALUE v)
498{
499 /* remember env value forcely */
500 rb_gc_writebarrier_remember(VM_ENV_ENVVAL(ep));
501 VM_FORCE_WRITE(&ep[index], v);
502 VM_ENV_FLAGS_UNSET(ep, VM_ENV_FLAG_WB_REQUIRED);
503 RB_DEBUG_COUNTER_INC(lvar_set_slowpath);
504}
505
506static inline void
507vm_env_write(const VALUE *ep, int index, VALUE v)
508{
509 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
510 if (LIKELY((flags & VM_ENV_FLAG_WB_REQUIRED) == 0)) {
511 VM_STACK_ENV_WRITE(ep, index, v);
512 }
513 else {
514 vm_env_write_slowpath(ep, index, v);
515 }
516}
517
518MJIT_STATIC VALUE
519rb_vm_bh_to_procval(const rb_execution_context_t *ec, VALUE block_handler)
520{
521 if (block_handler == VM_BLOCK_HANDLER_NONE) {
522 return Qnil;
523 }
524 else {
525 switch (vm_block_handler_type(block_handler)) {
526 case block_handler_type_iseq:
527 case block_handler_type_ifunc:
528 return rb_vm_make_proc(ec, VM_BH_TO_CAPT_BLOCK(block_handler), rb_cProc);
529 case block_handler_type_symbol:
530 return rb_sym_to_proc(VM_BH_TO_SYMBOL(block_handler));
531 case block_handler_type_proc:
532 return VM_BH_TO_PROC(block_handler);
533 default:
534 VM_UNREACHABLE(rb_vm_bh_to_procval);
535 }
536 }
537}
538
539/* svar */
540
541#if VM_CHECK_MODE > 0
542static int
543vm_svar_valid_p(VALUE svar)
544{
545 if (RB_TYPE_P((VALUE)svar, T_IMEMO)) {
546 switch (imemo_type(svar)) {
547 case imemo_svar:
548 case imemo_cref:
549 case imemo_ment:
550 return TRUE;
551 default:
552 break;
553 }
554 }
555 rb_bug("vm_svar_valid_p: unknown type: %s", rb_obj_info(svar));
556 return FALSE;
557}
558#endif
559
560static inline struct vm_svar *
561lep_svar(const rb_execution_context_t *ec, const VALUE *lep)
562{
563 VALUE svar;
564
565 if (lep && (ec == NULL || ec->root_lep != lep)) {
566 svar = lep[VM_ENV_DATA_INDEX_ME_CREF];
567 }
568 else {
569 svar = ec->root_svar;
570 }
571
572 VM_ASSERT(svar == Qfalse || vm_svar_valid_p(svar));
573
574 return (struct vm_svar *)svar;
575}
576
577static inline void
578lep_svar_write(const rb_execution_context_t *ec, const VALUE *lep, const struct vm_svar *svar)
579{
580 VM_ASSERT(vm_svar_valid_p((VALUE)svar));
581
582 if (lep && (ec == NULL || ec->root_lep != lep)) {
583 vm_env_write(lep, VM_ENV_DATA_INDEX_ME_CREF, (VALUE)svar);
584 }
585 else {
586 RB_OBJ_WRITE(rb_ec_thread_ptr(ec)->self, &ec->root_svar, svar);
587 }
588}
589
590static VALUE
591lep_svar_get(const rb_execution_context_t *ec, const VALUE *lep, rb_num_t key)
592{
593 const struct vm_svar *svar = lep_svar(ec, lep);
594
595 if ((VALUE)svar == Qfalse || imemo_type((VALUE)svar) != imemo_svar) return Qnil;
596
597 switch (key) {
598 case VM_SVAR_LASTLINE:
599 return svar->lastline;
600 case VM_SVAR_BACKREF:
601 return svar->backref;
602 default: {
603 const VALUE ary = svar->others;
604
605 if (NIL_P(ary)) {
606 return Qnil;
607 }
608 else {
609 return rb_ary_entry(ary, key - VM_SVAR_EXTRA_START);
610 }
611 }
612 }
613}
614
615static struct vm_svar *
616svar_new(VALUE obj)
617{
618 return (struct vm_svar *)rb_imemo_new(imemo_svar, Qnil, Qnil, Qnil, obj);
619}
620
621static void
622lep_svar_set(const rb_execution_context_t *ec, const VALUE *lep, rb_num_t key, VALUE val)
623{
624 struct vm_svar *svar = lep_svar(ec, lep);
625
626 if ((VALUE)svar == Qfalse || imemo_type((VALUE)svar) != imemo_svar) {
627 lep_svar_write(ec, lep, svar = svar_new((VALUE)svar));
628 }
629
630 switch (key) {
631 case VM_SVAR_LASTLINE:
632 RB_OBJ_WRITE(svar, &svar->lastline, val);
633 return;
634 case VM_SVAR_BACKREF:
635 RB_OBJ_WRITE(svar, &svar->backref, val);
636 return;
637 default: {
638 VALUE ary = svar->others;
639
640 if (NIL_P(ary)) {
641 RB_OBJ_WRITE(svar, &svar->others, ary = rb_ary_new());
642 }
643 rb_ary_store(ary, key - VM_SVAR_EXTRA_START, val);
644 }
645 }
646}
647
648static inline VALUE
649vm_getspecial(const rb_execution_context_t *ec, const VALUE *lep, rb_num_t key, rb_num_t type)
650{
651 VALUE val;
652
653 if (type == 0) {
654 val = lep_svar_get(ec, lep, key);
655 }
656 else {
657 VALUE backref = lep_svar_get(ec, lep, VM_SVAR_BACKREF);
658
659 if (type & 0x01) {
660 switch (type >> 1) {
661 case '&':
662 val = rb_reg_last_match(backref);
663 break;
664 case '`':
665 val = rb_reg_match_pre(backref);
666 break;
667 case '\'':
668 val = rb_reg_match_post(backref);
669 break;
670 case '+':
671 val = rb_reg_match_last(backref);
672 break;
673 default:
674 rb_bug("unexpected back-ref");
675 }
676 }
677 else {
678 val = rb_reg_nth_match((int)(type >> 1), backref);
679 }
680 }
681 return val;
682}
683
684PUREFUNC(static rb_callable_method_entry_t *check_method_entry(VALUE obj, int can_be_svar));
686check_method_entry(VALUE obj, int can_be_svar)
687{
688 if (obj == Qfalse) return NULL;
689
690#if VM_CHECK_MODE > 0
691 if (!RB_TYPE_P(obj, T_IMEMO)) rb_bug("check_method_entry: unknown type: %s", rb_obj_info(obj));
692#endif
693
694 switch (imemo_type(obj)) {
695 case imemo_ment:
696 return (rb_callable_method_entry_t *)obj;
697 case imemo_cref:
698 return NULL;
699 case imemo_svar:
700 if (can_be_svar) {
701 return check_method_entry(((struct vm_svar *)obj)->cref_or_me, FALSE);
702 }
703 default:
704#if VM_CHECK_MODE > 0
705 rb_bug("check_method_entry: svar should not be there:");
706#endif
707 return NULL;
708 }
709}
710
711MJIT_STATIC const rb_callable_method_entry_t *
712rb_vm_frame_method_entry(const rb_control_frame_t *cfp)
713{
714 const VALUE *ep = cfp->ep;
716
717 while (!VM_ENV_LOCAL_P(ep)) {
718 if ((me = check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL) return me;
719 ep = VM_ENV_PREV_EP(ep);
720 }
721
722 return check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
723}
724
725static const rb_iseq_t *
726method_entry_iseqptr(const rb_callable_method_entry_t *me)
727{
728 switch (me->def->type) {
729 case VM_METHOD_TYPE_ISEQ:
730 return me->def->body.iseq.iseqptr;
731 default:
732 return NULL;
733 }
734}
735
736static rb_cref_t *
737method_entry_cref(const rb_callable_method_entry_t *me)
738{
739 switch (me->def->type) {
740 case VM_METHOD_TYPE_ISEQ:
741 return me->def->body.iseq.cref;
742 default:
743 return NULL;
744 }
745}
746
747#if VM_CHECK_MODE == 0
748PUREFUNC(static rb_cref_t *check_cref(VALUE, int));
749#endif
750static rb_cref_t *
751check_cref(VALUE obj, int can_be_svar)
752{
753 if (obj == Qfalse) return NULL;
754
755#if VM_CHECK_MODE > 0
756 if (!RB_TYPE_P(obj, T_IMEMO)) rb_bug("check_cref: unknown type: %s", rb_obj_info(obj));
757#endif
758
759 switch (imemo_type(obj)) {
760 case imemo_ment:
761 return method_entry_cref((rb_callable_method_entry_t *)obj);
762 case imemo_cref:
763 return (rb_cref_t *)obj;
764 case imemo_svar:
765 if (can_be_svar) {
766 return check_cref(((struct vm_svar *)obj)->cref_or_me, FALSE);
767 }
768 default:
769#if VM_CHECK_MODE > 0
770 rb_bug("check_method_entry: svar should not be there:");
771#endif
772 return NULL;
773 }
774}
775
776static inline rb_cref_t *
777vm_env_cref(const VALUE *ep)
778{
779 rb_cref_t *cref;
780
781 while (!VM_ENV_LOCAL_P(ep)) {
782 if ((cref = check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL) return cref;
783 ep = VM_ENV_PREV_EP(ep);
784 }
785
786 return check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
787}
788
789static int
790is_cref(const VALUE v, int can_be_svar)
791{
792 if (RB_TYPE_P(v, T_IMEMO)) {
793 switch (imemo_type(v)) {
794 case imemo_cref:
795 return TRUE;
796 case imemo_svar:
797 if (can_be_svar) return is_cref(((struct vm_svar *)v)->cref_or_me, FALSE);
798 default:
799 break;
800 }
801 }
802 return FALSE;
803}
804
805static int
806vm_env_cref_by_cref(const VALUE *ep)
807{
808 while (!VM_ENV_LOCAL_P(ep)) {
809 if (is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) return TRUE;
810 ep = VM_ENV_PREV_EP(ep);
811 }
812 return is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
813}
814
815static rb_cref_t *
816cref_replace_with_duplicated_cref_each_frame(const VALUE *vptr, int can_be_svar, VALUE parent)
817{
818 const VALUE v = *vptr;
819 rb_cref_t *cref, *new_cref;
820
821 if (RB_TYPE_P(v, T_IMEMO)) {
822 switch (imemo_type(v)) {
823 case imemo_cref:
824 cref = (rb_cref_t *)v;
825 new_cref = vm_cref_dup(cref);
826 if (parent) {
827 RB_OBJ_WRITE(parent, vptr, new_cref);
828 }
829 else {
830 VM_FORCE_WRITE(vptr, (VALUE)new_cref);
831 }
832 return (rb_cref_t *)new_cref;
833 case imemo_svar:
834 if (can_be_svar) {
835 return cref_replace_with_duplicated_cref_each_frame(&((struct vm_svar *)v)->cref_or_me, FALSE, v);
836 }
837 /* fall through */
838 case imemo_ment:
839 rb_bug("cref_replace_with_duplicated_cref_each_frame: unreachable");
840 default:
841 break;
842 }
843 }
844 return NULL;
845}
846
847static rb_cref_t *
848vm_cref_replace_with_duplicated_cref(const VALUE *ep)
849{
850 if (vm_env_cref_by_cref(ep)) {
851 rb_cref_t *cref;
852 VALUE envval;
853
854 while (!VM_ENV_LOCAL_P(ep)) {
855 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) : Qfalse;
856 if ((cref = cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE, envval)) != NULL) {
857 return cref;
858 }
859 ep = VM_ENV_PREV_EP(ep);
860 }
861 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) : Qfalse;
862 return cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE, envval);
863 }
864 else {
865 rb_bug("vm_cref_dup: unreachable");
866 }
867}
868
869static rb_cref_t *
870vm_get_cref(const VALUE *ep)
871{
872 rb_cref_t *cref = vm_env_cref(ep);
873
874 if (cref != NULL) {
875 return cref;
876 }
877 else {
878 rb_bug("vm_get_cref: unreachable");
879 }
880}
881
882rb_cref_t *
883rb_vm_get_cref(const VALUE *ep)
884{
885 return vm_get_cref(ep);
886}
887
888static rb_cref_t *
889vm_ec_cref(const rb_execution_context_t *ec)
890{
891 const rb_control_frame_t *cfp = rb_vm_get_ruby_level_next_cfp(ec, ec->cfp);
892
893 if (cfp == NULL) {
894 return NULL;
895 }
896 return vm_get_cref(cfp->ep);
897}
898
899static const rb_cref_t *
900vm_get_const_key_cref(const VALUE *ep)
901{
902 const rb_cref_t *cref = vm_get_cref(ep);
903 const rb_cref_t *key_cref = cref;
904
905 while (cref) {
906 if (FL_TEST(CREF_CLASS(cref), FL_SINGLETON) ||
907 FL_TEST(CREF_CLASS(cref), RCLASS_CLONED)) {
908 return key_cref;
909 }
910 cref = CREF_NEXT(cref);
911 }
912
913 /* does not include singleton class */
914 return NULL;
915}
916
917void
918rb_vm_rewrite_cref(rb_cref_t *cref, VALUE old_klass, VALUE new_klass, rb_cref_t **new_cref_ptr)
919{
920 rb_cref_t *new_cref;
921
922 while (cref) {
923 if (CREF_CLASS(cref) == old_klass) {
924 new_cref = vm_cref_new_use_prev(new_klass, METHOD_VISI_UNDEF, FALSE, cref, FALSE);
925 *new_cref_ptr = new_cref;
926 return;
927 }
928 new_cref = vm_cref_new_use_prev(CREF_CLASS(cref), METHOD_VISI_UNDEF, FALSE, cref, FALSE);
929 cref = CREF_NEXT(cref);
930 *new_cref_ptr = new_cref;
931 new_cref_ptr = &new_cref->next;
932 }
933 *new_cref_ptr = NULL;
934}
935
936static rb_cref_t *
937vm_cref_push(const rb_execution_context_t *ec, VALUE klass, const VALUE *ep, int pushed_by_eval, int singleton)
938{
939 rb_cref_t *prev_cref = NULL;
940
941 if (ep) {
942 prev_cref = vm_env_cref(ep);
943 }
944 else {
945 rb_control_frame_t *cfp = vm_get_ruby_level_caller_cfp(ec, ec->cfp);
946
947 if (cfp) {
948 prev_cref = vm_env_cref(cfp->ep);
949 }
950 }
951
952 return vm_cref_new(klass, METHOD_VISI_PUBLIC, FALSE, prev_cref, pushed_by_eval, singleton);
953}
954
955static inline VALUE
956vm_get_cbase(const VALUE *ep)
957{
958 const rb_cref_t *cref = vm_get_cref(ep);
959
960 return CREF_CLASS_FOR_DEFINITION(cref);
961}
962
963static inline VALUE
964vm_get_const_base(const VALUE *ep)
965{
966 const rb_cref_t *cref = vm_get_cref(ep);
967
968 while (cref) {
969 if (!CREF_PUSHED_BY_EVAL(cref)) {
970 return CREF_CLASS_FOR_DEFINITION(cref);
971 }
972 cref = CREF_NEXT(cref);
973 }
974
975 return Qundef;
976}
977
978static inline void
979vm_check_if_namespace(VALUE klass)
980{
981 if (!RB_TYPE_P(klass, T_CLASS) && !RB_TYPE_P(klass, T_MODULE)) {
982 rb_raise(rb_eTypeError, "%+"PRIsVALUE" is not a class/module", klass);
983 }
984}
985
986static inline void
987vm_ensure_not_refinement_module(VALUE self)
988{
989 if (RB_TYPE_P(self, T_MODULE) && FL_TEST(self, RMODULE_IS_REFINEMENT)) {
990 rb_warn("not defined at the refinement, but at the outer class/module");
991 }
992}
993
994static inline VALUE
995vm_get_iclass(const rb_control_frame_t *cfp, VALUE klass)
996{
997 return klass;
998}
999
1000static inline VALUE
1001vm_get_ev_const(rb_execution_context_t *ec, VALUE orig_klass, ID id, bool allow_nil, int is_defined)
1002{
1003 void rb_const_warn_if_deprecated(const rb_const_entry_t *ce, VALUE klass, ID id);
1004 VALUE val;
1005
1006 if (NIL_P(orig_klass) && allow_nil) {
1007 /* in current lexical scope */
1008 const rb_cref_t *root_cref = vm_get_cref(ec->cfp->ep);
1009 const rb_cref_t *cref;
1010 VALUE klass = Qnil;
1011
1012 while (root_cref && CREF_PUSHED_BY_EVAL(root_cref)) {
1013 root_cref = CREF_NEXT(root_cref);
1014 }
1015 cref = root_cref;
1016 while (cref && CREF_NEXT(cref)) {
1017 if (CREF_PUSHED_BY_EVAL(cref)) {
1018 klass = Qnil;
1019 }
1020 else {
1021 klass = CREF_CLASS(cref);
1022 }
1023 cref = CREF_NEXT(cref);
1024
1025 if (!NIL_P(klass)) {
1026 VALUE av, am = 0;
1027 rb_const_entry_t *ce;
1028 search_continue:
1029 if ((ce = rb_const_lookup(klass, id))) {
1030 rb_const_warn_if_deprecated(ce, klass, id);
1031 val = ce->value;
1032 if (UNDEF_P(val)) {
1033 if (am == klass) break;
1034 am = klass;
1035 if (is_defined) return 1;
1036 if (rb_autoloading_value(klass, id, &av, NULL)) return av;
1037 rb_autoload_load(klass, id);
1038 goto search_continue;
1039 }
1040 else {
1041 if (is_defined) {
1042 return 1;
1043 }
1044 else {
1045 if (UNLIKELY(!rb_ractor_main_p())) {
1046 if (!rb_ractor_shareable_p(val)) {
1047 rb_raise(rb_eRactorIsolationError,
1048 "can not access non-shareable objects in constant %"PRIsVALUE"::%s by non-main ractor.", rb_class_path(klass), rb_id2name(id));
1049 }
1050 }
1051 return val;
1052 }
1053 }
1054 }
1055 }
1056 }
1057
1058 /* search self */
1059 if (root_cref && !NIL_P(CREF_CLASS(root_cref))) {
1060 klass = vm_get_iclass(ec->cfp, CREF_CLASS(root_cref));
1061 }
1062 else {
1063 klass = CLASS_OF(ec->cfp->self);
1064 }
1065
1066 if (is_defined) {
1067 return rb_const_defined(klass, id);
1068 }
1069 else {
1070 return rb_const_get(klass, id);
1071 }
1072 }
1073 else {
1074 vm_check_if_namespace(orig_klass);
1075 if (is_defined) {
1076 return rb_public_const_defined_from(orig_klass, id);
1077 }
1078 else {
1079 return rb_public_const_get_from(orig_klass, id);
1080 }
1081 }
1082}
1083
1084VALUE
1085rb_vm_get_ev_const(rb_execution_context_t *ec, VALUE orig_klass, ID id, VALUE allow_nil)
1086{
1087 return vm_get_ev_const(ec, orig_klass, id, allow_nil == Qtrue, 0);
1088}
1089
1090static inline VALUE
1091vm_get_ev_const_chain(rb_execution_context_t *ec, const ID *segments)
1092{
1093 VALUE val = Qnil;
1094 int idx = 0;
1095 int allow_nil = TRUE;
1096 if (segments[0] == idNULL) {
1097 val = rb_cObject;
1098 idx++;
1099 allow_nil = FALSE;
1100 }
1101 while (segments[idx]) {
1102 ID id = segments[idx++];
1103 val = vm_get_ev_const(ec, val, id, allow_nil, 0);
1104 allow_nil = FALSE;
1105 }
1106 return val;
1107}
1108
1109
1110static inline VALUE
1111vm_get_cvar_base(const rb_cref_t *cref, const rb_control_frame_t *cfp, int top_level_raise)
1112{
1113 VALUE klass;
1114
1115 if (!cref) {
1116 rb_bug("vm_get_cvar_base: no cref");
1117 }
1118
1119 while (CREF_NEXT(cref) &&
1120 (NIL_P(CREF_CLASS(cref)) || FL_TEST(CREF_CLASS(cref), FL_SINGLETON) ||
1121 CREF_PUSHED_BY_EVAL(cref) || CREF_SINGLETON(cref))) {
1122 cref = CREF_NEXT(cref);
1123 }
1124 if (top_level_raise && !CREF_NEXT(cref)) {
1125 rb_raise(rb_eRuntimeError, "class variable access from toplevel");
1126 }
1127
1128 klass = vm_get_iclass(cfp, CREF_CLASS(cref));
1129
1130 if (NIL_P(klass)) {
1131 rb_raise(rb_eTypeError, "no class variables available");
1132 }
1133 return klass;
1134}
1135
1136ALWAYS_INLINE(static void fill_ivar_cache(const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, int is_attr, attr_index_t index, shape_id_t shape_id));
1137static inline void
1138fill_ivar_cache(const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, int is_attr, attr_index_t index, shape_id_t shape_id)
1139{
1140 if (is_attr) {
1141 vm_cc_attr_index_set(cc, index, shape_id);
1142 }
1143 else {
1144 vm_ic_attr_index_set(iseq, ic, index, shape_id);
1145 }
1146}
1147
1148#define ractor_incidental_shareable_p(cond, val) \
1149 (!(cond) || rb_ractor_shareable_p(val))
1150#define ractor_object_incidental_shareable_p(obj, val) \
1151 ractor_incidental_shareable_p(rb_ractor_shareable_p(obj), val)
1152
1153#define ATTR_INDEX_NOT_SET (attr_index_t)-1
1154
1155ALWAYS_INLINE(static VALUE vm_getivar(VALUE, ID, const rb_iseq_t *, IVC, const struct rb_callcache *, int));
1156static inline VALUE
1157vm_getivar(VALUE obj, ID id, const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, int is_attr)
1158{
1159#if OPT_IC_FOR_IVAR
1160 VALUE val = Qundef;
1161 shape_id_t shape_id;
1162 VALUE * ivar_list;
1163
1164 if (SPECIAL_CONST_P(obj)) {
1165 return Qnil;
1166 }
1167
1168#if SHAPE_IN_BASIC_FLAGS
1169 shape_id = RBASIC_SHAPE_ID(obj);
1170#endif
1171
1172 switch (BUILTIN_TYPE(obj)) {
1173 case T_OBJECT:
1174 ivar_list = ROBJECT_IVPTR(obj);
1175 VM_ASSERT(rb_ractor_shareable_p(obj) ? rb_ractor_shareable_p(val) : true);
1176
1177#if !SHAPE_IN_BASIC_FLAGS
1178 shape_id = ROBJECT_SHAPE_ID(obj);
1179#endif
1180 break;
1181 case T_CLASS:
1182 case T_MODULE:
1183 {
1184 if (UNLIKELY(!rb_ractor_main_p())) {
1185 // For two reasons we can only use the fast path on the main
1186 // ractor.
1187 // First, only the main ractor is allowed to set ivars on classes
1188 // and modules. So we can skip locking.
1189 // Second, other ractors need to check the shareability of the
1190 // values returned from the class ivars.
1191 goto general_path;
1192 }
1193
1194 ivar_list = RCLASS_IVPTR(obj);
1195
1196#if !SHAPE_IN_BASIC_FLAGS
1197 shape_id = RCLASS_SHAPE_ID(obj);
1198#endif
1199
1200 break;
1201 }
1202 default:
1203 if (FL_TEST_RAW(obj, FL_EXIVAR)) {
1204 struct gen_ivtbl *ivtbl;
1205 rb_gen_ivtbl_get(obj, id, &ivtbl);
1206#if !SHAPE_IN_BASIC_FLAGS
1207 shape_id = ivtbl->shape_id;
1208#endif
1209 ivar_list = ivtbl->ivptr;
1210 }
1211 else {
1212 return Qnil;
1213 }
1214 }
1215
1216 shape_id_t cached_id;
1217 attr_index_t index;
1218
1219 if (is_attr) {
1220 vm_cc_atomic_shape_and_index(cc, &cached_id, &index);
1221 }
1222 else {
1223 vm_ic_atomic_shape_and_index(ic, &cached_id, &index);
1224 }
1225
1226 if (LIKELY(cached_id == shape_id)) {
1227 RUBY_ASSERT(cached_id != OBJ_TOO_COMPLEX_SHAPE_ID);
1228
1229 if (index == ATTR_INDEX_NOT_SET) {
1230 return Qnil;
1231 }
1232
1233 val = ivar_list[index];
1234 RUBY_ASSERT(!UNDEF_P(val));
1235 }
1236 else { // cache miss case
1237#if RUBY_DEBUG
1238 if (is_attr) {
1239 if (cached_id != INVALID_SHAPE_ID) {
1240 RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_set);
1241 }
1242 else {
1243 RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_unset);
1244 }
1245 }
1246 else {
1247 if (cached_id != INVALID_SHAPE_ID) {
1248 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_set);
1249 }
1250 else {
1251 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_unset);
1252 }
1253 }
1254#endif
1255
1256 rb_shape_t *shape = rb_shape_get_shape_by_id(shape_id);
1257
1258 if (shape_id == OBJ_TOO_COMPLEX_SHAPE_ID) {
1259 if (!rb_id_table_lookup(ROBJECT_IV_HASH(obj), id, &val)) {
1260 val = Qnil;
1261 }
1262 }
1263 else {
1264 if (rb_shape_get_iv_index(shape, id, &index)) {
1265 // This fills in the cache with the shared cache object.
1266 // "ent" is the shared cache object
1267 fill_ivar_cache(iseq, ic, cc, is_attr, index, shape_id);
1268
1269 // We fetched the ivar list above
1270 val = ivar_list[index];
1271 RUBY_ASSERT(!UNDEF_P(val));
1272 }
1273 else {
1274 if (is_attr) {
1275 vm_cc_attr_index_initialize(cc, shape_id);
1276 }
1277 else {
1278 vm_ic_attr_index_initialize(ic, shape_id);
1279 }
1280
1281 val = Qnil;
1282 }
1283 }
1284
1285 }
1286
1287 RUBY_ASSERT(!UNDEF_P(val));
1288
1289 return val;
1290
1291general_path:
1292#endif /* OPT_IC_FOR_IVAR */
1293 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss);
1294
1295 if (is_attr) {
1296 return rb_attr_get(obj, id);
1297 }
1298 else {
1299 return rb_ivar_get(obj, id);
1300 }
1301}
1302
1303static void
1304populate_cache(attr_index_t index, shape_id_t next_shape_id, ID id, const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, bool is_attr)
1305{
1306 RUBY_ASSERT(next_shape_id != OBJ_TOO_COMPLEX_SHAPE_ID);
1307
1308 // Cache population code
1309 if (is_attr) {
1310 vm_cc_attr_index_set(cc, index, next_shape_id);
1311 }
1312 else {
1313 vm_ic_attr_index_set(iseq, ic, index, next_shape_id);
1314 }
1315}
1316
1317ALWAYS_INLINE(static VALUE vm_setivar_slowpath(VALUE obj, ID id, VALUE val, const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, int is_attr));
1318NOINLINE(static VALUE vm_setivar_slowpath_ivar(VALUE obj, ID id, VALUE val, const rb_iseq_t *iseq, IVC ic));
1319NOINLINE(static VALUE vm_setivar_slowpath_attr(VALUE obj, ID id, VALUE val, const struct rb_callcache *cc));
1320
1321static VALUE
1322vm_setivar_slowpath(VALUE obj, ID id, VALUE val, const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, int is_attr)
1323{
1324#if OPT_IC_FOR_IVAR
1325 switch (BUILTIN_TYPE(obj)) {
1326 case T_OBJECT:
1327 {
1329
1330 attr_index_t index = rb_obj_ivar_set(obj, id, val);
1331
1332 shape_id_t next_shape_id = ROBJECT_SHAPE_ID(obj);
1333
1334 if (next_shape_id != OBJ_TOO_COMPLEX_SHAPE_ID) {
1335 populate_cache(index, next_shape_id, id, iseq, ic, cc, is_attr);
1336 }
1337
1338 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss_iv_hit);
1339 return val;
1340 }
1341 case T_CLASS:
1342 case T_MODULE:
1343 break;
1344 default:
1345 {
1346 rb_ivar_set(obj, id, val);
1347 shape_id_t next_shape_id = rb_shape_get_shape_id(obj);
1348 rb_shape_t *next_shape = rb_shape_get_shape_by_id(next_shape_id);
1349 attr_index_t index;
1350
1351 if (rb_shape_get_iv_index(next_shape, id, &index)) { // based off the hash stored in the transition tree
1352 if (index >= MAX_IVARS) {
1353 rb_raise(rb_eArgError, "too many instance variables");
1354 }
1355
1356 populate_cache(index, next_shape_id, id, iseq, ic, cc, is_attr);
1357 }
1358 else {
1359 rb_bug("didn't find the id\n");
1360 }
1361
1362 return val;
1363 }
1364 }
1365#endif
1366 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss);
1367 return rb_ivar_set(obj, id, val);
1368}
1369
1370static VALUE
1371vm_setivar_slowpath_ivar(VALUE obj, ID id, VALUE val, const rb_iseq_t *iseq, IVC ic)
1372{
1373 return vm_setivar_slowpath(obj, id, val, iseq, ic, NULL, false);
1374}
1375
1376static VALUE
1377vm_setivar_slowpath_attr(VALUE obj, ID id, VALUE val, const struct rb_callcache *cc)
1378{
1379 return vm_setivar_slowpath(obj, id, val, NULL, NULL, cc, true);
1380}
1381
1382NOINLINE(static VALUE vm_setivar_default(VALUE obj, ID id, VALUE val, shape_id_t dest_shape_id, attr_index_t index));
1383static VALUE
1384vm_setivar_default(VALUE obj, ID id, VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1385{
1386#if SHAPE_IN_BASIC_FLAGS
1387 shape_id_t shape_id = RBASIC_SHAPE_ID(obj);
1388#else
1389 shape_id_t shape_id = rb_generic_shape_id(obj);
1390#endif
1391
1392 struct gen_ivtbl *ivtbl = 0;
1393
1394 // Cache hit case
1395 if (shape_id == dest_shape_id) {
1396 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1397
1398 // Just get the IV table
1399 rb_gen_ivtbl_get(obj, 0, &ivtbl);
1400 }
1401 else if (dest_shape_id != INVALID_SHAPE_ID) {
1402 rb_shape_t * dest_shape = rb_shape_get_shape_by_id(dest_shape_id);
1403 shape_id_t source_shape_id = dest_shape->parent_id;
1404
1405 if (shape_id == source_shape_id && dest_shape->edge_name == id && dest_shape->type == SHAPE_IVAR) {
1406 ivtbl = rb_ensure_generic_iv_list_size(obj, index + 1);
1407#if SHAPE_IN_BASIC_FLAGS
1408 RBASIC_SET_SHAPE_ID(obj, dest_shape_id);
1409#else
1410 ivtbl->shape_id = dest_shape_id;
1411#endif
1412 }
1413 else {
1414 return Qundef;
1415 }
1416 }
1417 else {
1418 return Qundef;
1419 }
1420
1421 VALUE *ptr = ivtbl->ivptr;
1422
1423 RB_OBJ_WRITE(obj, &ptr[index], val);
1424
1425 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1426
1427 return val;
1428}
1429
1430static inline VALUE
1431vm_setivar(VALUE obj, ID id, VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1432{
1433#if OPT_IC_FOR_IVAR
1434 switch (BUILTIN_TYPE(obj)) {
1435 case T_OBJECT:
1436 {
1437 VM_ASSERT(!rb_ractor_shareable_p(obj) || rb_obj_frozen_p(obj));
1438
1439 shape_id_t shape_id = ROBJECT_SHAPE_ID(obj);
1440 RUBY_ASSERT(dest_shape_id != OBJ_TOO_COMPLEX_SHAPE_ID);
1441
1442 if (LIKELY(shape_id == dest_shape_id)) {
1443 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1444 VM_ASSERT(!rb_ractor_shareable_p(obj));
1445 }
1446 else if (dest_shape_id != INVALID_SHAPE_ID) {
1447 rb_shape_t *dest_shape = rb_shape_get_shape_by_id(dest_shape_id);
1448 shape_id_t source_shape_id = dest_shape->parent_id;
1449
1450 if (shape_id == source_shape_id && dest_shape->edge_name == id) {
1451 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1452
1453 ROBJECT_SET_SHAPE_ID(obj, dest_shape_id);
1454
1455 RUBY_ASSERT(rb_shape_get_next_iv_shape(rb_shape_get_shape_by_id(source_shape_id), id) == dest_shape);
1456 RUBY_ASSERT(index < dest_shape->capacity);
1457 }
1458 else {
1459 break;
1460 }
1461 }
1462 else {
1463 break;
1464 }
1465
1466 VALUE *ptr = ROBJECT_IVPTR(obj);
1467
1468 RUBY_ASSERT(!rb_shape_obj_too_complex(obj));
1469 RB_OBJ_WRITE(obj, &ptr[index], val);
1470
1471 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1472 return val;
1473 }
1474 break;
1475 case T_CLASS:
1476 case T_MODULE:
1477 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss_noobject);
1478 default:
1479 break;
1480 }
1481
1482 return Qundef;
1483#endif /* OPT_IC_FOR_IVAR */
1484}
1485
1486static VALUE
1487update_classvariable_cache(const rb_iseq_t *iseq, VALUE klass, ID id, ICVARC ic)
1488{
1489 VALUE defined_class = 0;
1490 VALUE cvar_value = rb_cvar_find(klass, id, &defined_class);
1491
1492 if (RB_TYPE_P(defined_class, T_ICLASS)) {
1493 defined_class = RBASIC(defined_class)->klass;
1494 }
1495
1496 struct rb_id_table *rb_cvc_tbl = RCLASS_CVC_TBL(defined_class);
1497 if (!rb_cvc_tbl) {
1498 rb_bug("the cvc table should be set");
1499 }
1500
1501 VALUE ent_data;
1502 if (!rb_id_table_lookup(rb_cvc_tbl, id, &ent_data)) {
1503 rb_bug("should have cvar cache entry");
1504 }
1505
1506 struct rb_cvar_class_tbl_entry *ent = (void *)ent_data;
1507 ent->global_cvar_state = GET_GLOBAL_CVAR_STATE();
1508
1509 ic->entry = ent;
1510 RB_OBJ_WRITTEN(iseq, Qundef, ent->class_value);
1511
1512 return cvar_value;
1513}
1514
1515static inline VALUE
1516vm_getclassvariable(const rb_iseq_t *iseq, const rb_control_frame_t *reg_cfp, ID id, ICVARC ic)
1517{
1518 const rb_cref_t *cref;
1519
1520 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE() && LIKELY(rb_ractor_main_p())) {
1521 RB_DEBUG_COUNTER_INC(cvar_read_inline_hit);
1522
1523 VALUE v = rb_ivar_lookup(ic->entry->class_value, id, Qundef);
1524 RUBY_ASSERT(!UNDEF_P(v));
1525
1526 return v;
1527 }
1528
1529 cref = vm_get_cref(GET_EP());
1530 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1531
1532 return update_classvariable_cache(iseq, klass, id, ic);
1533}
1534
1535VALUE
1536rb_vm_getclassvariable(const rb_iseq_t *iseq, const rb_control_frame_t *cfp, ID id, ICVARC ic)
1537{
1538 return vm_getclassvariable(iseq, cfp, id, ic);
1539}
1540
1541static inline void
1542vm_setclassvariable(const rb_iseq_t *iseq, const rb_control_frame_t *reg_cfp, ID id, VALUE val, ICVARC ic)
1543{
1544 const rb_cref_t *cref;
1545
1546 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE()) {
1547 RB_DEBUG_COUNTER_INC(cvar_write_inline_hit);
1548
1549 rb_class_ivar_set(ic->entry->class_value, id, val);
1550 return;
1551 }
1552
1553 cref = vm_get_cref(GET_EP());
1554 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1555
1556 rb_cvar_set(klass, id, val);
1557
1558 update_classvariable_cache(iseq, klass, id, ic);
1559}
1560
1561void
1562rb_vm_setclassvariable(const rb_iseq_t *iseq, const rb_control_frame_t *cfp, ID id, VALUE val, ICVARC ic)
1563{
1564 vm_setclassvariable(iseq, cfp, id, val, ic);
1565}
1566
1567static inline VALUE
1568vm_getinstancevariable(const rb_iseq_t *iseq, VALUE obj, ID id, IVC ic)
1569{
1570 return vm_getivar(obj, id, iseq, ic, NULL, FALSE);
1571}
1572
1573static inline void
1574vm_setinstancevariable(const rb_iseq_t *iseq, VALUE obj, ID id, VALUE val, IVC ic)
1575{
1576 if (RB_SPECIAL_CONST_P(obj)) {
1578 return;
1579 }
1580
1581 shape_id_t dest_shape_id;
1582 attr_index_t index;
1583 vm_ic_atomic_shape_and_index(ic, &dest_shape_id, &index);
1584
1585 if (UNLIKELY(UNDEF_P(vm_setivar(obj, id, val, dest_shape_id, index)))) {
1586 switch (BUILTIN_TYPE(obj)) {
1587 case T_OBJECT:
1588 case T_CLASS:
1589 case T_MODULE:
1590 break;
1591 default:
1592 if (!UNDEF_P(vm_setivar_default(obj, id, val, dest_shape_id, index))) {
1593 return;
1594 }
1595 }
1596 vm_setivar_slowpath_ivar(obj, id, val, iseq, ic);
1597 }
1598}
1599
1600void
1601rb_vm_setinstancevariable(const rb_iseq_t *iseq, VALUE obj, ID id, VALUE val, IVC ic)
1602{
1603 vm_setinstancevariable(iseq, obj, id, val, ic);
1604}
1605
1606static VALUE
1607vm_throw_continue(const rb_execution_context_t *ec, VALUE err)
1608{
1609 /* continue throw */
1610
1611 if (FIXNUM_P(err)) {
1612 ec->tag->state = FIX2INT(err);
1613 }
1614 else if (SYMBOL_P(err)) {
1615 ec->tag->state = TAG_THROW;
1616 }
1617 else if (THROW_DATA_P(err)) {
1618 ec->tag->state = THROW_DATA_STATE((struct vm_throw_data *)err);
1619 }
1620 else {
1621 ec->tag->state = TAG_RAISE;
1622 }
1623 return err;
1624}
1625
1626static VALUE
1627vm_throw_start(const rb_execution_context_t *ec, rb_control_frame_t *const reg_cfp, enum ruby_tag_type state,
1628 const int flag, const VALUE throwobj)
1629{
1630 const rb_control_frame_t *escape_cfp = NULL;
1631 const rb_control_frame_t * const eocfp = RUBY_VM_END_CONTROL_FRAME(ec); /* end of control frame pointer */
1632
1633 if (flag != 0) {
1634 /* do nothing */
1635 }
1636 else if (state == TAG_BREAK) {
1637 int is_orphan = 1;
1638 const VALUE *ep = GET_EP();
1639 const rb_iseq_t *base_iseq = GET_ISEQ();
1640 escape_cfp = reg_cfp;
1641
1642 while (ISEQ_BODY(base_iseq)->type != ISEQ_TYPE_BLOCK) {
1643 if (ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_CLASS) {
1644 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1645 ep = escape_cfp->ep;
1646 base_iseq = escape_cfp->iseq;
1647 }
1648 else {
1649 ep = VM_ENV_PREV_EP(ep);
1650 base_iseq = ISEQ_BODY(base_iseq)->parent_iseq;
1651 escape_cfp = rb_vm_search_cf_from_ep(ec, escape_cfp, ep);
1652 VM_ASSERT(escape_cfp->iseq == base_iseq);
1653 }
1654 }
1655
1656 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1657 /* lambda{... break ...} */
1658 is_orphan = 0;
1659 state = TAG_RETURN;
1660 }
1661 else {
1662 ep = VM_ENV_PREV_EP(ep);
1663
1664 while (escape_cfp < eocfp) {
1665 if (escape_cfp->ep == ep) {
1666 const rb_iseq_t *const iseq = escape_cfp->iseq;
1667 const VALUE epc = escape_cfp->pc - ISEQ_BODY(iseq)->iseq_encoded;
1668 const struct iseq_catch_table *const ct = ISEQ_BODY(iseq)->catch_table;
1669 unsigned int i;
1670
1671 if (!ct) break;
1672 for (i=0; i < ct->size; i++) {
1673 const struct iseq_catch_table_entry *const entry =
1674 UNALIGNED_MEMBER_PTR(ct, entries[i]);
1675
1676 if (entry->type == CATCH_TYPE_BREAK &&
1677 entry->iseq == base_iseq &&
1678 entry->start < epc && entry->end >= epc) {
1679 if (entry->cont == epc) { /* found! */
1680 is_orphan = 0;
1681 }
1682 break;
1683 }
1684 }
1685 break;
1686 }
1687
1688 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1689 }
1690 }
1691
1692 if (is_orphan) {
1693 rb_vm_localjump_error("break from proc-closure", throwobj, TAG_BREAK);
1694 }
1695 }
1696 else if (state == TAG_RETRY) {
1697 const VALUE *ep = VM_ENV_PREV_EP(GET_EP());
1698
1699 escape_cfp = rb_vm_search_cf_from_ep(ec, reg_cfp, ep);
1700 }
1701 else if (state == TAG_RETURN) {
1702 const VALUE *current_ep = GET_EP();
1703 const VALUE *target_ep = NULL, *target_lep, *ep = current_ep;
1704 int in_class_frame = 0;
1705 int toplevel = 1;
1706 escape_cfp = reg_cfp;
1707
1708 // find target_lep, target_ep
1709 while (!VM_ENV_LOCAL_P(ep)) {
1710 if (VM_ENV_FLAGS(ep, VM_FRAME_FLAG_LAMBDA) && target_ep == NULL) {
1711 target_ep = ep;
1712 }
1713 ep = VM_ENV_PREV_EP(ep);
1714 }
1715 target_lep = ep;
1716
1717 while (escape_cfp < eocfp) {
1718 const VALUE *lep = VM_CF_LEP(escape_cfp);
1719
1720 if (!target_lep) {
1721 target_lep = lep;
1722 }
1723
1724 if (lep == target_lep &&
1725 VM_FRAME_RUBYFRAME_P(escape_cfp) &&
1726 ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_CLASS) {
1727 in_class_frame = 1;
1728 target_lep = 0;
1729 }
1730
1731 if (lep == target_lep) {
1732 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1733 toplevel = 0;
1734 if (in_class_frame) {
1735 /* lambda {class A; ... return ...; end} */
1736 goto valid_return;
1737 }
1738 else {
1739 const VALUE *tep = current_ep;
1740
1741 while (target_lep != tep) {
1742 if (escape_cfp->ep == tep) {
1743 /* in lambda */
1744 if (tep == target_ep) {
1745 goto valid_return;
1746 }
1747 else {
1748 goto unexpected_return;
1749 }
1750 }
1751 tep = VM_ENV_PREV_EP(tep);
1752 }
1753 }
1754 }
1755 else if (VM_FRAME_RUBYFRAME_P(escape_cfp)) {
1756 switch (ISEQ_BODY(escape_cfp->iseq)->type) {
1757 case ISEQ_TYPE_TOP:
1758 case ISEQ_TYPE_MAIN:
1759 if (toplevel) {
1760 if (in_class_frame) goto unexpected_return;
1761 if (target_ep == NULL) {
1762 goto valid_return;
1763 }
1764 else {
1765 goto unexpected_return;
1766 }
1767 }
1768 break;
1769 case ISEQ_TYPE_EVAL:
1770 case ISEQ_TYPE_CLASS:
1771 toplevel = 0;
1772 break;
1773 default:
1774 break;
1775 }
1776 }
1777 }
1778
1779 if (escape_cfp->ep == target_lep && ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_METHOD) {
1780 if (target_ep == NULL) {
1781 goto valid_return;
1782 }
1783 else {
1784 goto unexpected_return;
1785 }
1786 }
1787
1788 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1789 }
1790 unexpected_return:;
1791 rb_vm_localjump_error("unexpected return", throwobj, TAG_RETURN);
1792
1793 valid_return:;
1794 /* do nothing */
1795 }
1796 else {
1797 rb_bug("isns(throw): unsupported throw type");
1798 }
1799
1800 ec->tag->state = state;
1801 return (VALUE)THROW_DATA_NEW(throwobj, escape_cfp, state);
1802}
1803
1804static VALUE
1805vm_throw(const rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
1806 rb_num_t throw_state, VALUE throwobj)
1807{
1808 const int state = (int)(throw_state & VM_THROW_STATE_MASK);
1809 const int flag = (int)(throw_state & VM_THROW_NO_ESCAPE_FLAG);
1810
1811 if (state != 0) {
1812 return vm_throw_start(ec, reg_cfp, state, flag, throwobj);
1813 }
1814 else {
1815 return vm_throw_continue(ec, throwobj);
1816 }
1817}
1818
1819static inline void
1820vm_expandarray(VALUE *sp, VALUE ary, rb_num_t num, int flag)
1821{
1822 int is_splat = flag & 0x01;
1823 rb_num_t space_size = num + is_splat;
1824 VALUE *base = sp - 1;
1825 const VALUE *ptr;
1826 rb_num_t len;
1827 const VALUE obj = ary;
1828
1829 if (!RB_TYPE_P(ary, T_ARRAY) && NIL_P(ary = rb_check_array_type(ary))) {
1830 ary = obj;
1831 ptr = &ary;
1832 len = 1;
1833 }
1834 else {
1835 ptr = RARRAY_CONST_PTR_TRANSIENT(ary);
1836 len = (rb_num_t)RARRAY_LEN(ary);
1837 }
1838
1839 if (space_size == 0) {
1840 /* no space left on stack */
1841 }
1842 else if (flag & 0x02) {
1843 /* post: ..., nil ,ary[-1], ..., ary[0..-num] # top */
1844 rb_num_t i = 0, j;
1845
1846 if (len < num) {
1847 for (i=0; i<num-len; i++) {
1848 *base++ = Qnil;
1849 }
1850 }
1851 for (j=0; i<num; i++, j++) {
1852 VALUE v = ptr[len - j - 1];
1853 *base++ = v;
1854 }
1855 if (is_splat) {
1856 *base = rb_ary_new4(len - j, ptr);
1857 }
1858 }
1859 else {
1860 /* normal: ary[num..-1], ary[num-2], ary[num-3], ..., ary[0] # top */
1861 rb_num_t i;
1862 VALUE *bptr = &base[space_size - 1];
1863
1864 for (i=0; i<num; i++) {
1865 if (len <= i) {
1866 for (; i<num; i++) {
1867 *bptr-- = Qnil;
1868 }
1869 break;
1870 }
1871 *bptr-- = ptr[i];
1872 }
1873 if (is_splat) {
1874 if (num > len) {
1875 *bptr = rb_ary_new();
1876 }
1877 else {
1878 *bptr = rb_ary_new4(len - num, ptr + num);
1879 }
1880 }
1881 }
1882 RB_GC_GUARD(ary);
1883}
1884
1885static VALUE vm_call_general(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling);
1886
1887static VALUE vm_mtbl_dump(VALUE klass, ID target_mid);
1888
1889static struct rb_class_cc_entries *
1890vm_ccs_create(VALUE klass, const rb_callable_method_entry_t *cme)
1891{
1892 struct rb_class_cc_entries *ccs = ALLOC(struct rb_class_cc_entries);
1893#if VM_CHECK_MODE > 0
1894 ccs->debug_sig = ~(VALUE)ccs;
1895#endif
1896 ccs->capa = 0;
1897 ccs->len = 0;
1898 RB_OBJ_WRITE(klass, &ccs->cme, cme);
1899 METHOD_ENTRY_CACHED_SET((rb_callable_method_entry_t *)cme);
1900 ccs->entries = NULL;
1901 return ccs;
1902}
1903
1904static void
1905vm_ccs_push(VALUE klass, struct rb_class_cc_entries *ccs, const struct rb_callinfo *ci, const struct rb_callcache *cc)
1906{
1907 if (! vm_cc_markable(cc)) {
1908 return;
1909 }
1910 else if (! vm_ci_markable(ci)) {
1911 return;
1912 }
1913
1914 if (UNLIKELY(ccs->len == ccs->capa)) {
1915 if (ccs->capa == 0) {
1916 ccs->capa = 1;
1917 ccs->entries = ALLOC_N(struct rb_class_cc_entries_entry, ccs->capa);
1918 }
1919 else {
1920 ccs->capa *= 2;
1921 REALLOC_N(ccs->entries, struct rb_class_cc_entries_entry, ccs->capa);
1922 }
1923 }
1924 VM_ASSERT(ccs->len < ccs->capa);
1925
1926 const int pos = ccs->len++;
1927 RB_OBJ_WRITE(klass, &ccs->entries[pos].ci, ci);
1928 RB_OBJ_WRITE(klass, &ccs->entries[pos].cc, cc);
1929
1930 if (RB_DEBUG_COUNTER_SETMAX(ccs_maxlen, ccs->len)) {
1931 // for tuning
1932 // vm_mtbl_dump(klass, 0);
1933 }
1934}
1935
1936#if VM_CHECK_MODE > 0
1937void
1938rb_vm_ccs_dump(struct rb_class_cc_entries *ccs)
1939{
1940 ruby_debug_printf("ccs:%p (%d,%d)\n", (void *)ccs, ccs->len, ccs->capa);
1941 for (int i=0; i<ccs->len; i++) {
1942 vm_ci_dump(ccs->entries[i].ci);
1943 rp(ccs->entries[i].cc);
1944 }
1945}
1946
1947static int
1948vm_ccs_verify(struct rb_class_cc_entries *ccs, ID mid, VALUE klass)
1949{
1950 VM_ASSERT(vm_ccs_p(ccs));
1951 VM_ASSERT(ccs->len <= ccs->capa);
1952
1953 for (int i=0; i<ccs->len; i++) {
1954 const struct rb_callinfo *ci = ccs->entries[i].ci;
1955 const struct rb_callcache *cc = ccs->entries[i].cc;
1956
1957 VM_ASSERT(vm_ci_p(ci));
1958 VM_ASSERT(vm_ci_mid(ci) == mid);
1959 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
1960 VM_ASSERT(vm_cc_class_check(cc, klass));
1961 VM_ASSERT(vm_cc_check_cme(cc, ccs->cme));
1962 }
1963 return TRUE;
1964}
1965#endif
1966
1967#ifndef MJIT_HEADER
1968
1969static const rb_callable_method_entry_t *check_overloaded_cme(const rb_callable_method_entry_t *cme, const struct rb_callinfo * const ci);
1970
1971static const struct rb_callcache *
1972vm_search_cc(const VALUE klass, const struct rb_callinfo * const ci)
1973{
1974 const ID mid = vm_ci_mid(ci);
1975 struct rb_id_table *cc_tbl = RCLASS_CC_TBL(klass);
1976 struct rb_class_cc_entries *ccs = NULL;
1977 VALUE ccs_data;
1978
1979 if (cc_tbl) {
1980 if (rb_id_table_lookup(cc_tbl, mid, &ccs_data)) {
1981 ccs = (struct rb_class_cc_entries *)ccs_data;
1982 const int ccs_len = ccs->len;
1983
1984 if (UNLIKELY(METHOD_ENTRY_INVALIDATED(ccs->cme))) {
1985 rb_vm_ccs_free(ccs);
1986 rb_id_table_delete(cc_tbl, mid);
1987 ccs = NULL;
1988 }
1989 else {
1990 VM_ASSERT(vm_ccs_verify(ccs, mid, klass));
1991
1992 for (int i=0; i<ccs_len; i++) {
1993 const struct rb_callinfo *ccs_ci = ccs->entries[i].ci;
1994 const struct rb_callcache *ccs_cc = ccs->entries[i].cc;
1995
1996 VM_ASSERT(vm_ci_p(ccs_ci));
1997 VM_ASSERT(IMEMO_TYPE_P(ccs_cc, imemo_callcache));
1998
1999 if (ccs_ci == ci) { // TODO: equality
2000 RB_DEBUG_COUNTER_INC(cc_found_in_ccs);
2001
2002 VM_ASSERT(vm_cc_cme(ccs_cc)->called_id == mid);
2003 VM_ASSERT(ccs_cc->klass == klass);
2004 VM_ASSERT(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(ccs_cc)));
2005
2006 return ccs_cc;
2007 }
2008 }
2009 }
2010 }
2011 }
2012 else {
2013 cc_tbl = RCLASS_CC_TBL(klass) = rb_id_table_create(2);
2014 }
2015
2016 RB_DEBUG_COUNTER_INC(cc_not_found_in_ccs);
2017
2018 const rb_callable_method_entry_t *cme;
2019
2020 if (ccs) {
2021 cme = ccs->cme;
2022 cme = UNDEFINED_METHOD_ENTRY_P(cme) ? NULL : cme;
2023
2024 VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
2025 }
2026 else {
2027 cme = rb_callable_method_entry(klass, mid);
2028 }
2029
2030 VM_ASSERT(cme == NULL || IMEMO_TYPE_P(cme, imemo_ment));
2031
2032 if (cme == NULL) {
2033 // undef or not found: can't cache the information
2034 VM_ASSERT(vm_cc_cme(&vm_empty_cc) == NULL);
2035 return &vm_empty_cc;
2036 }
2037
2038 VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
2039
2040 METHOD_ENTRY_CACHED_SET((struct rb_callable_method_entry_struct *)cme);
2041
2042 if (ccs == NULL) {
2043 VM_ASSERT(cc_tbl != NULL);
2044
2045 if (LIKELY(rb_id_table_lookup(cc_tbl, mid, &ccs_data))) {
2046 // rb_callable_method_entry() prepares ccs.
2047 ccs = (struct rb_class_cc_entries *)ccs_data;
2048 }
2049 else {
2050 // TODO: required?
2051 ccs = vm_ccs_create(klass, cme);
2052 rb_id_table_insert(cc_tbl, mid, (VALUE)ccs);
2053 }
2054 }
2055
2056 cme = check_overloaded_cme(cme, ci);
2057
2058 const struct rb_callcache *cc = vm_cc_new(klass, cme, vm_call_general);
2059 vm_ccs_push(klass, ccs, ci, cc);
2060
2061 VM_ASSERT(vm_cc_cme(cc) != NULL);
2062 VM_ASSERT(cme->called_id == mid);
2063 VM_ASSERT(vm_cc_cme(cc)->called_id == mid);
2064
2065 return cc;
2066}
2067
2068MJIT_FUNC_EXPORTED const struct rb_callcache *
2069rb_vm_search_method_slowpath(const struct rb_callinfo *ci, VALUE klass)
2070{
2071 const struct rb_callcache *cc;
2072
2073 VM_ASSERT(RB_TYPE_P(klass, T_CLASS) || RB_TYPE_P(klass, T_ICLASS));
2074
2075 RB_VM_LOCK_ENTER();
2076 {
2077 cc = vm_search_cc(klass, ci);
2078
2079 VM_ASSERT(cc);
2080 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
2081 VM_ASSERT(cc == vm_cc_empty() || cc->klass == klass);
2082 VM_ASSERT(cc == vm_cc_empty() || callable_method_entry_p(vm_cc_cme(cc)));
2083 VM_ASSERT(cc == vm_cc_empty() || !METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)));
2084 VM_ASSERT(cc == vm_cc_empty() || vm_cc_cme(cc)->called_id == vm_ci_mid(ci));
2085 }
2086 RB_VM_LOCK_LEAVE();
2087
2088 return cc;
2089}
2090#endif
2091
2092static const struct rb_callcache *
2093vm_search_method_slowpath0(VALUE cd_owner, struct rb_call_data *cd, VALUE klass)
2094{
2095#if USE_DEBUG_COUNTER
2096 const struct rb_callcache *old_cc = cd->cc;
2097#endif
2098
2099 const struct rb_callcache *cc = rb_vm_search_method_slowpath(cd->ci, klass);
2100
2101#if OPT_INLINE_METHOD_CACHE
2102 cd->cc = cc;
2103
2104 const struct rb_callcache *empty_cc =
2105#ifdef MJIT_HEADER
2106 rb_vm_empty_cc();
2107#else
2108 &vm_empty_cc;
2109#endif
2110 if (cd_owner && cc != empty_cc) RB_OBJ_WRITTEN(cd_owner, Qundef, cc);
2111
2112#if USE_DEBUG_COUNTER
2113 if (old_cc == empty_cc) {
2114 // empty
2115 RB_DEBUG_COUNTER_INC(mc_inline_miss_empty);
2116 }
2117 else if (old_cc == cc) {
2118 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cc);
2119 }
2120 else if (vm_cc_cme(old_cc) == vm_cc_cme(cc)) {
2121 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cme);
2122 }
2123 else if (vm_cc_cme(old_cc) && vm_cc_cme(cc) &&
2124 vm_cc_cme(old_cc)->def == vm_cc_cme(cc)->def) {
2125 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_def);
2126 }
2127 else {
2128 RB_DEBUG_COUNTER_INC(mc_inline_miss_diff);
2129 }
2130#endif
2131#endif // OPT_INLINE_METHOD_CACHE
2132
2133 VM_ASSERT(vm_cc_cme(cc) == NULL ||
2134 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci));
2135
2136 return cc;
2137}
2138
2139#ifndef MJIT_HEADER
2140ALWAYS_INLINE(static const struct rb_callcache *vm_search_method_fastpath(VALUE cd_owner, struct rb_call_data *cd, VALUE klass));
2141#endif
2142static const struct rb_callcache *
2143vm_search_method_fastpath(VALUE cd_owner, struct rb_call_data *cd, VALUE klass)
2144{
2145 const struct rb_callcache *cc = cd->cc;
2146
2147#if OPT_INLINE_METHOD_CACHE
2148 if (LIKELY(vm_cc_class_check(cc, klass))) {
2149 if (LIKELY(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)))) {
2150 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
2151 RB_DEBUG_COUNTER_INC(mc_inline_hit);
2152 VM_ASSERT(vm_cc_cme(cc) == NULL || // not found
2153 (vm_ci_flag(cd->ci) & VM_CALL_SUPER) || // search_super w/ define_method
2154 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci)); // cme->called_id == ci->mid
2155
2156 return cc;
2157 }
2158 RB_DEBUG_COUNTER_INC(mc_inline_miss_invalidated);
2159 }
2160 else {
2161 RB_DEBUG_COUNTER_INC(mc_inline_miss_klass);
2162 }
2163#endif
2164
2165 return vm_search_method_slowpath0(cd_owner, cd, klass);
2166}
2167
2168static const struct rb_callcache *
2169vm_search_method(VALUE cd_owner, struct rb_call_data *cd, VALUE recv)
2170{
2171 VALUE klass = CLASS_OF(recv);
2172 VM_ASSERT(klass != Qfalse);
2173 VM_ASSERT(RBASIC_CLASS(klass) == 0 || rb_obj_is_kind_of(klass, rb_cClass));
2174
2175 return vm_search_method_fastpath(cd_owner, cd, klass);
2176}
2177
2178#if __has_attribute(transparent_union)
2179typedef union {
2180 VALUE (*anyargs)(ANYARGS);
2181 VALUE (*f00)(VALUE);
2182 VALUE (*f01)(VALUE, VALUE);
2183 VALUE (*f02)(VALUE, VALUE, VALUE);
2184 VALUE (*f03)(VALUE, VALUE, VALUE, VALUE);
2185 VALUE (*f04)(VALUE, VALUE, VALUE, VALUE, VALUE);
2186 VALUE (*f05)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE);
2187 VALUE (*f06)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE);
2188 VALUE (*f07)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE);
2189 VALUE (*f08)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE);
2190 VALUE (*f09)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE);
2191 VALUE (*f10)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE);
2192 VALUE (*f11)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE);
2193 VALUE (*f12)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE);
2194 VALUE (*f13)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE);
2195 VALUE (*f14)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE);
2196 VALUE (*f15)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE);
2197 VALUE (*fm1)(int, union { VALUE *x; const VALUE *y; } __attribute__((__transparent_union__)), VALUE);
2198} __attribute__((__transparent_union__)) cfunc_type;
2199#else
2200typedef VALUE (*cfunc_type)(ANYARGS);
2201#endif
2202
2203static inline int
2204check_cfunc(const rb_callable_method_entry_t *me, cfunc_type func)
2205{
2206 if (! me) {
2207 return false;
2208 }
2209 else {
2210 VM_ASSERT(IMEMO_TYPE_P(me, imemo_ment));
2211 VM_ASSERT(callable_method_entry_p(me));
2212 VM_ASSERT(me->def);
2213 if (me->def->type != VM_METHOD_TYPE_CFUNC) {
2214 return false;
2215 }
2216 else {
2217#if __has_attribute(transparent_union)
2218 return me->def->body.cfunc.func == func.anyargs;
2219#else
2220 return me->def->body.cfunc.func == func;
2221#endif
2222 }
2223 }
2224}
2225
2226static inline int
2227vm_method_cfunc_is(const rb_iseq_t *iseq, CALL_DATA cd, VALUE recv, cfunc_type func)
2228{
2229 VM_ASSERT(iseq != NULL);
2230 const struct rb_callcache *cc = vm_search_method((VALUE)iseq, cd, recv);
2231 return check_cfunc(vm_cc_cme(cc), func);
2232}
2233
2234#define EQ_UNREDEFINED_P(t) BASIC_OP_UNREDEFINED_P(BOP_EQ, t##_REDEFINED_OP_FLAG)
2235
2236static inline bool
2237FIXNUM_2_P(VALUE a, VALUE b)
2238{
2239 /* FIXNUM_P(a) && FIXNUM_P(b)
2240 * == ((a & 1) && (b & 1))
2241 * == a & b & 1 */
2242 SIGNED_VALUE x = a;
2243 SIGNED_VALUE y = b;
2244 SIGNED_VALUE z = x & y & 1;
2245 return z == 1;
2246}
2247
2248static inline bool
2249FLONUM_2_P(VALUE a, VALUE b)
2250{
2251#if USE_FLONUM
2252 /* FLONUM_P(a) && FLONUM_P(b)
2253 * == ((a & 3) == 2) && ((b & 3) == 2)
2254 * == ! ((a ^ 2) | (b ^ 2) & 3)
2255 */
2256 SIGNED_VALUE x = a;
2257 SIGNED_VALUE y = b;
2258 SIGNED_VALUE z = ((x ^ 2) | (y ^ 2)) & 3;
2259 return !z;
2260#else
2261 return false;
2262#endif
2263}
2264
2265static VALUE
2266opt_equality_specialized(VALUE recv, VALUE obj)
2267{
2268 if (FIXNUM_2_P(recv, obj) && EQ_UNREDEFINED_P(INTEGER)) {
2269 goto compare_by_identity;
2270 }
2271 else if (FLONUM_2_P(recv, obj) && EQ_UNREDEFINED_P(FLOAT)) {
2272 goto compare_by_identity;
2273 }
2274 else if (STATIC_SYM_P(recv) && STATIC_SYM_P(obj) && EQ_UNREDEFINED_P(SYMBOL)) {
2275 goto compare_by_identity;
2276 }
2277 else if (SPECIAL_CONST_P(recv)) {
2278 //
2279 }
2280 else if (RBASIC_CLASS(recv) == rb_cFloat && RB_FLOAT_TYPE_P(obj) && EQ_UNREDEFINED_P(FLOAT)) {
2281 double a = RFLOAT_VALUE(recv);
2282 double b = RFLOAT_VALUE(obj);
2283
2284#if MSC_VERSION_BEFORE(1300)
2285 if (isnan(a)) {
2286 return Qfalse;
2287 }
2288 else if (isnan(b)) {
2289 return Qfalse;
2290 }
2291 else
2292#endif
2293 return RBOOL(a == b);
2294 }
2295 else if (RBASIC_CLASS(recv) == rb_cString && EQ_UNREDEFINED_P(STRING)) {
2296 if (recv == obj) {
2297 return Qtrue;
2298 }
2299 else if (RB_TYPE_P(obj, T_STRING)) {
2300 return rb_str_eql_internal(obj, recv);
2301 }
2302 }
2303 return Qundef;
2304
2305 compare_by_identity:
2306 return RBOOL(recv == obj);
2307}
2308
2309static VALUE
2310opt_equality(const rb_iseq_t *cd_owner, VALUE recv, VALUE obj, CALL_DATA cd)
2311{
2312 VM_ASSERT(cd_owner != NULL);
2313
2314 VALUE val = opt_equality_specialized(recv, obj);
2315 if (!UNDEF_P(val)) return val;
2316
2317 if (!vm_method_cfunc_is(cd_owner, cd, recv, rb_obj_equal)) {
2318 return Qundef;
2319 }
2320 else {
2321 return RBOOL(recv == obj);
2322 }
2323}
2324
2325#undef EQ_UNREDEFINED_P
2326
2327#ifndef MJIT_HEADER
2328
2329static inline const struct rb_callcache *gccct_method_search(rb_execution_context_t *ec, VALUE recv, ID mid, int argc); // vm_eval.c
2330NOINLINE(static VALUE opt_equality_by_mid_slowpath(VALUE recv, VALUE obj, ID mid));
2331
2332static VALUE
2333opt_equality_by_mid_slowpath(VALUE recv, VALUE obj, ID mid)
2334{
2335 const struct rb_callcache *cc = gccct_method_search(GET_EC(), recv, mid, 1);
2336
2337 if (cc && check_cfunc(vm_cc_cme(cc), rb_obj_equal)) {
2338 return RBOOL(recv == obj);
2339 }
2340 else {
2341 return Qundef;
2342 }
2343}
2344
2345static VALUE
2346opt_equality_by_mid(VALUE recv, VALUE obj, ID mid)
2347{
2348 VALUE val = opt_equality_specialized(recv, obj);
2349 if (!UNDEF_P(val)) {
2350 return val;
2351 }
2352 else {
2353 return opt_equality_by_mid_slowpath(recv, obj, mid);
2354 }
2355}
2356
2357VALUE
2358rb_equal_opt(VALUE obj1, VALUE obj2)
2359{
2360 return opt_equality_by_mid(obj1, obj2, idEq);
2361}
2362
2363VALUE
2364rb_eql_opt(VALUE obj1, VALUE obj2)
2365{
2366 return opt_equality_by_mid(obj1, obj2, idEqlP);
2367}
2368
2369#endif // MJIT_HEADER
2370
2371extern VALUE rb_vm_call0(rb_execution_context_t *ec, VALUE, ID, int, const VALUE*, const rb_callable_method_entry_t *, int kw_splat);
2372extern VALUE rb_vm_call_with_refinements(rb_execution_context_t *, VALUE, ID, int, const VALUE *, int);
2373
2374static VALUE
2375check_match(rb_execution_context_t *ec, VALUE pattern, VALUE target, enum vm_check_match_type type)
2376{
2377 switch (type) {
2378 case VM_CHECKMATCH_TYPE_WHEN:
2379 return pattern;
2380 case VM_CHECKMATCH_TYPE_RESCUE:
2381 if (!rb_obj_is_kind_of(pattern, rb_cModule)) {
2382 rb_raise(rb_eTypeError, "class or module required for rescue clause");
2383 }
2384 /* fall through */
2385 case VM_CHECKMATCH_TYPE_CASE: {
2386 return rb_vm_call_with_refinements(ec, pattern, idEqq, 1, &target, RB_NO_KEYWORDS);
2387 }
2388 default:
2389 rb_bug("check_match: unreachable");
2390 }
2391}
2392
2393
2394#if MSC_VERSION_BEFORE(1300)
2395#define CHECK_CMP_NAN(a, b) if (isnan(a) || isnan(b)) return Qfalse;
2396#else
2397#define CHECK_CMP_NAN(a, b) /* do nothing */
2398#endif
2399
2400static inline VALUE
2401double_cmp_lt(double a, double b)
2402{
2403 CHECK_CMP_NAN(a, b);
2404 return RBOOL(a < b);
2405}
2406
2407static inline VALUE
2408double_cmp_le(double a, double b)
2409{
2410 CHECK_CMP_NAN(a, b);
2411 return RBOOL(a <= b);
2412}
2413
2414static inline VALUE
2415double_cmp_gt(double a, double b)
2416{
2417 CHECK_CMP_NAN(a, b);
2418 return RBOOL(a > b);
2419}
2420
2421static inline VALUE
2422double_cmp_ge(double a, double b)
2423{
2424 CHECK_CMP_NAN(a, b);
2425 return RBOOL(a >= b);
2426}
2427
2428static inline VALUE *
2429vm_base_ptr(const rb_control_frame_t *cfp)
2430{
2431#if 0 // we may optimize and use this once we confirm it does not spoil performance on JIT.
2432 const rb_control_frame_t *prev_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
2433
2434 if (cfp->iseq && VM_FRAME_RUBYFRAME_P(cfp)) {
2435 VALUE *bp = prev_cfp->sp + ISEQ_BODY(cfp->iseq)->local_table_size + VM_ENV_DATA_SIZE;
2436 if (ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_METHOD) {
2437 /* adjust `self' */
2438 bp += 1;
2439 }
2440#if VM_DEBUG_BP_CHECK
2441 if (bp != cfp->bp_check) {
2442 ruby_debug_printf("bp_check: %ld, bp: %ld\n",
2443 (long)(cfp->bp_check - GET_EC()->vm_stack),
2444 (long)(bp - GET_EC()->vm_stack));
2445 rb_bug("vm_base_ptr: unreachable");
2446 }
2447#endif
2448 return bp;
2449 }
2450 else {
2451 return NULL;
2452 }
2453#else
2454 return cfp->__bp__;
2455#endif
2456}
2457
2458/* method call processes with call_info */
2459
2460#include "vm_args.c"
2461
2462static inline VALUE vm_call_iseq_setup_2(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, int opt_pc, int param_size, int local_size);
2463ALWAYS_INLINE(static VALUE vm_call_iseq_setup_normal(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, const rb_callable_method_entry_t *me, int opt_pc, int param_size, int local_size));
2464static inline VALUE vm_call_iseq_setup_tailcall(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, int opt_pc);
2465static VALUE vm_call_super_method(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling);
2466static VALUE vm_call_method_nome(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling);
2467static VALUE vm_call_method_each_type(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling);
2468static inline VALUE vm_call_method(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling);
2469
2470static vm_call_handler vm_call_iseq_setup_func(const struct rb_callinfo *ci, const int param_size, const int local_size);
2471
2472static VALUE
2473vm_call_iseq_setup_tailcall_0start(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
2474{
2475 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_tailcall_0start);
2476
2477 return vm_call_iseq_setup_tailcall(ec, cfp, calling, 0);
2478}
2479
2480static VALUE
2481vm_call_iseq_setup_normal_0start(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
2482{
2483 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_0start);
2484
2485 const struct rb_callcache *cc = calling->cc;
2486 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2487 int param = ISEQ_BODY(iseq)->param.size;
2488 int local = ISEQ_BODY(iseq)->local_table_size;
2489 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2490}
2491
2492MJIT_STATIC bool
2493rb_simple_iseq_p(const rb_iseq_t *iseq)
2494{
2495 return ISEQ_BODY(iseq)->param.flags.has_opt == FALSE &&
2496 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2497 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2498 ISEQ_BODY(iseq)->param.flags.has_kw == FALSE &&
2499 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2500 ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg == FALSE &&
2501 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2502}
2503
2504MJIT_FUNC_EXPORTED bool
2505rb_iseq_only_optparam_p(const rb_iseq_t *iseq)
2506{
2507 return ISEQ_BODY(iseq)->param.flags.has_opt == TRUE &&
2508 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2509 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2510 ISEQ_BODY(iseq)->param.flags.has_kw == FALSE &&
2511 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2512 ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg == FALSE &&
2513 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2514}
2515
2516MJIT_FUNC_EXPORTED bool
2517rb_iseq_only_kwparam_p(const rb_iseq_t *iseq)
2518{
2519 return ISEQ_BODY(iseq)->param.flags.has_opt == FALSE &&
2520 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2521 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2522 ISEQ_BODY(iseq)->param.flags.has_kw == TRUE &&
2523 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2524 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2525}
2526
2527// If true, cc->call needs to include `CALLER_SETUP_ARG` (i.e. can't be skipped in fastpath)
2528MJIT_STATIC bool
2529rb_splat_or_kwargs_p(const struct rb_callinfo *restrict ci)
2530{
2531 return IS_ARGS_SPLAT(ci) || IS_ARGS_KW_OR_KW_SPLAT(ci);
2532}
2533
2534
2535static inline void
2536CALLER_SETUP_ARG(struct rb_control_frame_struct *restrict cfp,
2537 struct rb_calling_info *restrict calling,
2538 const struct rb_callinfo *restrict ci)
2539{
2540 if (UNLIKELY(IS_ARGS_SPLAT(ci))) {
2541 VALUE final_hash;
2542 /* This expands the rest argument to the stack.
2543 * So, vm_ci_flag(ci) & VM_CALL_ARGS_SPLAT is now inconsistent.
2544 */
2545 vm_caller_setup_arg_splat(cfp, calling);
2546 if (!IS_ARGS_KW_OR_KW_SPLAT(ci) &&
2547 calling->argc > 0 &&
2548 RB_TYPE_P((final_hash = *(cfp->sp - 1)), T_HASH) &&
2549 (((struct RHash *)final_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS)) {
2550 *(cfp->sp - 1) = rb_hash_dup(final_hash);
2551 calling->kw_splat = 1;
2552 }
2553 }
2554 if (UNLIKELY(IS_ARGS_KW_OR_KW_SPLAT(ci))) {
2555 if (IS_ARGS_KEYWORD(ci)) {
2556 /* This converts VM_CALL_KWARG style to VM_CALL_KW_SPLAT style
2557 * by creating a keyword hash.
2558 * So, vm_ci_flag(ci) & VM_CALL_KWARG is now inconsistent.
2559 */
2560 vm_caller_setup_arg_kw(cfp, calling, ci);
2561 }
2562 else {
2563 VALUE keyword_hash = cfp->sp[-1];
2564 if (!RB_TYPE_P(keyword_hash, T_HASH)) {
2565 /* Convert a non-hash keyword splat to a new hash */
2566 cfp->sp[-1] = rb_hash_dup(rb_to_hash_type(keyword_hash));
2567 }
2568 else if (!IS_ARGS_KW_SPLAT_MUT(ci)) {
2569 /* Convert a hash keyword splat to a new hash unless
2570 * a mutable keyword splat was passed.
2571 */
2572 cfp->sp[-1] = rb_hash_dup(keyword_hash);
2573 }
2574 }
2575 }
2576}
2577
2578static inline void
2579CALLER_REMOVE_EMPTY_KW_SPLAT(struct rb_control_frame_struct *restrict cfp,
2580 struct rb_calling_info *restrict calling,
2581 const struct rb_callinfo *restrict ci)
2582{
2583 if (UNLIKELY(calling->kw_splat)) {
2584 /* This removes the last Hash object if it is empty.
2585 * So, vm_ci_flag(ci) & VM_CALL_KW_SPLAT is now inconsistent.
2586 */
2587 if (RHASH_EMPTY_P(cfp->sp[-1])) {
2588 cfp->sp--;
2589 calling->argc--;
2590 calling->kw_splat = 0;
2591 }
2592 }
2593}
2594
2595#define USE_OPT_HIST 0
2596
2597#if USE_OPT_HIST
2598#define OPT_HIST_MAX 64
2599static int opt_hist[OPT_HIST_MAX+1];
2600
2601__attribute__((destructor))
2602static void
2603opt_hist_show_results_at_exit(void)
2604{
2605 for (int i=0; i<OPT_HIST_MAX; i++) {
2606 ruby_debug_printf("opt_hist\t%d\t%d\n", i, opt_hist[i]);
2607 }
2608}
2609#endif
2610
2611static VALUE
2612vm_call_iseq_setup_normal_opt_start(rb_execution_context_t *ec, rb_control_frame_t *cfp,
2613 struct rb_calling_info *calling)
2614{
2615 const struct rb_callcache *cc = calling->cc;
2616 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2617 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2618 const int opt = calling->argc - lead_num;
2619 const int opt_num = ISEQ_BODY(iseq)->param.opt_num;
2620 const int opt_pc = (int)ISEQ_BODY(iseq)->param.opt_table[opt];
2621 const int param = ISEQ_BODY(iseq)->param.size;
2622 const int local = ISEQ_BODY(iseq)->local_table_size;
2623 const int delta = opt_num - opt;
2624
2625 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
2626
2627#if USE_OPT_HIST
2628 if (opt_pc < OPT_HIST_MAX) {
2629 opt_hist[opt]++;
2630 }
2631 else {
2632 opt_hist[OPT_HIST_MAX]++;
2633 }
2634#endif
2635
2636 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param - delta, local);
2637}
2638
2639static VALUE
2640vm_call_iseq_setup_tailcall_opt_start(rb_execution_context_t *ec, rb_control_frame_t *cfp,
2641 struct rb_calling_info *calling)
2642{
2643 const struct rb_callcache *cc = calling->cc;
2644 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2645 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2646 const int opt = calling->argc - lead_num;
2647 const int opt_pc = (int)ISEQ_BODY(iseq)->param.opt_table[opt];
2648
2649 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
2650
2651#if USE_OPT_HIST
2652 if (opt_pc < OPT_HIST_MAX) {
2653 opt_hist[opt]++;
2654 }
2655 else {
2656 opt_hist[OPT_HIST_MAX]++;
2657 }
2658#endif
2659
2660 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
2661}
2662
2663static void
2664args_setup_kw_parameters(rb_execution_context_t *const ec, const rb_iseq_t *const iseq,
2665 VALUE *const passed_values, const int passed_keyword_len, const VALUE *const passed_keywords,
2666 VALUE *const locals);
2667
2668static VALUE
2669vm_call_iseq_setup_kwparm_kwarg(rb_execution_context_t *ec, rb_control_frame_t *cfp,
2670 struct rb_calling_info *calling)
2671{
2672 const struct rb_callinfo *ci = calling->ci;
2673 const struct rb_callcache *cc = calling->cc;
2674
2675 VM_ASSERT(vm_ci_flag(ci) & VM_CALL_KWARG);
2676 RB_DEBUG_COUNTER_INC(ccf_iseq_kw1);
2677
2678 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2679 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
2680 const struct rb_callinfo_kwarg *kw_arg = vm_ci_kwarg(ci);
2681 const int ci_kw_len = kw_arg->keyword_len;
2682 const VALUE * const ci_keywords = kw_arg->keywords;
2683 VALUE *argv = cfp->sp - calling->argc;
2684 VALUE *const klocals = argv + kw_param->bits_start - kw_param->num;
2685 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2686 VALUE * const ci_kws = ALLOCA_N(VALUE, ci_kw_len);
2687 MEMCPY(ci_kws, argv + lead_num, VALUE, ci_kw_len);
2688 args_setup_kw_parameters(ec, iseq, ci_kws, ci_kw_len, ci_keywords, klocals);
2689
2690 int param = ISEQ_BODY(iseq)->param.size;
2691 int local = ISEQ_BODY(iseq)->local_table_size;
2692 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2693}
2694
2695static VALUE
2696vm_call_iseq_setup_kwparm_nokwarg(rb_execution_context_t *ec, rb_control_frame_t *cfp,
2697 struct rb_calling_info *calling)
2698{
2699 const struct rb_callinfo *MAYBE_UNUSED(ci) = calling->ci;
2700 const struct rb_callcache *cc = calling->cc;
2701
2702 VM_ASSERT((vm_ci_flag(ci) & VM_CALL_KWARG) == 0);
2703 RB_DEBUG_COUNTER_INC(ccf_iseq_kw2);
2704
2705 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2706 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
2707 VALUE * const argv = cfp->sp - calling->argc;
2708 VALUE * const klocals = argv + kw_param->bits_start - kw_param->num;
2709
2710 int i;
2711 for (i=0; i<kw_param->num; i++) {
2712 klocals[i] = kw_param->default_values[i];
2713 }
2714 klocals[i] = INT2FIX(0); // kw specify flag
2715 // NOTE:
2716 // nobody check this value, but it should be cleared because it can
2717 // points invalid VALUE (T_NONE objects, raw pointer and so on).
2718
2719 int param = ISEQ_BODY(iseq)->param.size;
2720 int local = ISEQ_BODY(iseq)->local_table_size;
2721 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2722}
2723
2724static inline int
2725vm_callee_setup_arg(rb_execution_context_t *ec, struct rb_calling_info *calling,
2726 const rb_iseq_t *iseq, VALUE *argv, int param_size, int local_size)
2727{
2728 const struct rb_callinfo *ci = calling->ci;
2729 const struct rb_callcache *cc = calling->cc;
2730 bool cacheable_ci = vm_ci_markable(ci);
2731
2732 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_KW_SPLAT))) {
2733 if (LIKELY(rb_simple_iseq_p(iseq))) {
2734 rb_control_frame_t *cfp = ec->cfp;
2735 CALLER_SETUP_ARG(cfp, calling, ci);
2736 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
2737
2738 if (calling->argc != ISEQ_BODY(iseq)->param.lead_num) {
2739 argument_arity_error(ec, iseq, calling->argc, ISEQ_BODY(iseq)->param.lead_num, ISEQ_BODY(iseq)->param.lead_num);
2740 }
2741
2742 VM_ASSERT(ci == calling->ci);
2743 VM_ASSERT(cc == calling->cc);
2744 CC_SET_FASTPATH(cc, vm_call_iseq_setup_func(ci, param_size, local_size), cacheable_ci && vm_call_iseq_optimizable_p(ci, cc));
2745 return 0;
2746 }
2747 else if (rb_iseq_only_optparam_p(iseq)) {
2748 rb_control_frame_t *cfp = ec->cfp;
2749 CALLER_SETUP_ARG(cfp, calling, ci);
2750 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
2751
2752 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2753 const int opt_num = ISEQ_BODY(iseq)->param.opt_num;
2754 const int argc = calling->argc;
2755 const int opt = argc - lead_num;
2756
2757 if (opt < 0 || opt > opt_num) {
2758 argument_arity_error(ec, iseq, argc, lead_num, lead_num + opt_num);
2759 }
2760
2761 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
2762 CC_SET_FASTPATH(cc, vm_call_iseq_setup_normal_opt_start,
2763 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
2764 cacheable_ci && vm_call_cacheable(ci, cc));
2765 }
2766 else {
2767 CC_SET_FASTPATH(cc, vm_call_iseq_setup_tailcall_opt_start,
2768 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
2769 cacheable_ci && vm_call_cacheable(ci, cc));
2770 }
2771
2772 /* initialize opt vars for self-references */
2773 VM_ASSERT((int)ISEQ_BODY(iseq)->param.size == lead_num + opt_num);
2774 for (int i=argc; i<lead_num + opt_num; i++) {
2775 argv[i] = Qnil;
2776 }
2777 return (int)ISEQ_BODY(iseq)->param.opt_table[opt];
2778 }
2779 else if (rb_iseq_only_kwparam_p(iseq) && !IS_ARGS_SPLAT(ci)) {
2780 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2781 const int argc = calling->argc;
2782 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
2783
2784 if (vm_ci_flag(ci) & VM_CALL_KWARG) {
2785 const struct rb_callinfo_kwarg *kw_arg = vm_ci_kwarg(ci);
2786
2787 if (argc - kw_arg->keyword_len == lead_num) {
2788 const int ci_kw_len = kw_arg->keyword_len;
2789 const VALUE * const ci_keywords = kw_arg->keywords;
2790 VALUE * const ci_kws = ALLOCA_N(VALUE, ci_kw_len);
2791 MEMCPY(ci_kws, argv + lead_num, VALUE, ci_kw_len);
2792
2793 VALUE *const klocals = argv + kw_param->bits_start - kw_param->num;
2794 args_setup_kw_parameters(ec, iseq, ci_kws, ci_kw_len, ci_keywords, klocals);
2795
2796 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_kwarg,
2797 cacheable_ci && vm_call_cacheable(ci, cc));
2798
2799 return 0;
2800 }
2801 }
2802 else if (argc == lead_num) {
2803 /* no kwarg */
2804 VALUE *const klocals = argv + kw_param->bits_start - kw_param->num;
2805 args_setup_kw_parameters(ec, iseq, NULL, 0, NULL, klocals);
2806
2807 if (klocals[kw_param->num] == INT2FIX(0)) {
2808 /* copy from default_values */
2809 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_nokwarg,
2810 cacheable_ci && vm_call_cacheable(ci, cc));
2811 }
2812
2813 return 0;
2814 }
2815 }
2816 }
2817
2818 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_method);
2819}
2820
2821static VALUE
2822vm_call_iseq_setup(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
2823{
2824 RB_DEBUG_COUNTER_INC(ccf_iseq_setup);
2825
2826 const struct rb_callcache *cc = calling->cc;
2827 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2828 const int param_size = ISEQ_BODY(iseq)->param.size;
2829 const int local_size = ISEQ_BODY(iseq)->local_table_size;
2830 const int opt_pc = vm_callee_setup_arg(ec, calling, def_iseq_ptr(vm_cc_cme(cc)->def), cfp->sp - calling->argc, param_size, local_size);
2831 return vm_call_iseq_setup_2(ec, cfp, calling, opt_pc, param_size, local_size);
2832}
2833
2834static inline VALUE
2835vm_call_iseq_setup_2(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling,
2836 int opt_pc, int param_size, int local_size)
2837{
2838 const struct rb_callinfo *ci = calling->ci;
2839 const struct rb_callcache *cc = calling->cc;
2840
2841 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
2842 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param_size, local_size);
2843 }
2844 else {
2845 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
2846 }
2847}
2848
2849static inline VALUE
2850vm_call_iseq_setup_normal(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, const rb_callable_method_entry_t *me,
2851 int opt_pc, int param_size, int local_size)
2852{
2853 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
2854 VALUE *argv = cfp->sp - calling->argc;
2855 VALUE *sp = argv + param_size;
2856 cfp->sp = argv - 1 /* recv */;
2857
2858 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL, calling->recv,
2859 calling->block_handler, (VALUE)me,
2860 ISEQ_BODY(iseq)->iseq_encoded + opt_pc, sp,
2861 local_size - param_size,
2862 ISEQ_BODY(iseq)->stack_max);
2863 return Qundef;
2864}
2865
2866static inline VALUE
2867vm_call_iseq_setup_tailcall(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, int opt_pc)
2868{
2869 const struct rb_callcache *cc = calling->cc;
2870 unsigned int i;
2871 VALUE *argv = cfp->sp - calling->argc;
2872 const rb_callable_method_entry_t *me = vm_cc_cme(cc);
2873 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
2874 VALUE *src_argv = argv;
2875 VALUE *sp_orig, *sp;
2876 VALUE finish_flag = VM_FRAME_FINISHED_P(cfp) ? VM_FRAME_FLAG_FINISH : 0;
2877
2878 if (VM_BH_FROM_CFP_P(calling->block_handler, cfp)) {
2879 struct rb_captured_block *dst_captured = VM_CFP_TO_CAPTURED_BLOCK(RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp));
2880 const struct rb_captured_block *src_captured = VM_BH_TO_CAPT_BLOCK(calling->block_handler);
2881 dst_captured->code.val = src_captured->code.val;
2882 if (VM_BH_ISEQ_BLOCK_P(calling->block_handler)) {
2883 calling->block_handler = VM_BH_FROM_ISEQ_BLOCK(dst_captured);
2884 }
2885 else {
2886 calling->block_handler = VM_BH_FROM_IFUNC_BLOCK(dst_captured);
2887 }
2888 }
2889
2890 vm_pop_frame(ec, cfp, cfp->ep);
2891 cfp = ec->cfp;
2892
2893 sp_orig = sp = cfp->sp;
2894
2895 /* push self */
2896 sp[0] = calling->recv;
2897 sp++;
2898
2899 /* copy arguments */
2900 for (i=0; i < ISEQ_BODY(iseq)->param.size; i++) {
2901 *sp++ = src_argv[i];
2902 }
2903
2904 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL | finish_flag,
2905 calling->recv, calling->block_handler, (VALUE)me,
2906 ISEQ_BODY(iseq)->iseq_encoded + opt_pc, sp,
2907 ISEQ_BODY(iseq)->local_table_size - ISEQ_BODY(iseq)->param.size,
2908 ISEQ_BODY(iseq)->stack_max);
2909
2910 cfp->sp = sp_orig;
2911
2912 return Qundef;
2913}
2914
2915static void
2916ractor_unsafe_check(void)
2917{
2918 if (!rb_ractor_main_p()) {
2919 rb_raise(rb_eRactorUnsafeError, "ractor unsafe method called from not main ractor");
2920 }
2921}
2922
2923static VALUE
2924call_cfunc_m2(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2925{
2926 ractor_unsafe_check();
2927 VALUE(*f)(VALUE, VALUE) = (VALUE(*)(VALUE, VALUE))func;
2928 return (*f)(recv, rb_ary_new4(argc, argv));
2929}
2930
2931static VALUE
2932call_cfunc_m1(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2933{
2934 ractor_unsafe_check();
2935 VALUE(*f)(int, const VALUE *, VALUE) = (VALUE(*)(int, const VALUE *, VALUE))func;
2936 return (*f)(argc, argv, recv);
2937}
2938
2939static VALUE
2940call_cfunc_0(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2941{
2942 ractor_unsafe_check();
2943 VALUE(*f)(VALUE) = (VALUE(*)(VALUE))func;
2944 return (*f)(recv);
2945}
2946
2947static VALUE
2948call_cfunc_1(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2949{
2950 ractor_unsafe_check();
2951 VALUE(*f)(VALUE, VALUE) = (VALUE(*)(VALUE, VALUE))func;
2952 return (*f)(recv, argv[0]);
2953}
2954
2955static VALUE
2956call_cfunc_2(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2957{
2958 ractor_unsafe_check();
2959 VALUE(*f)(VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE))func;
2960 return (*f)(recv, argv[0], argv[1]);
2961}
2962
2963static VALUE
2964call_cfunc_3(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2965{
2966 ractor_unsafe_check();
2967 VALUE(*f)(VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE))func;
2968 return (*f)(recv, argv[0], argv[1], argv[2]);
2969}
2970
2971static VALUE
2972call_cfunc_4(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2973{
2974 ractor_unsafe_check();
2975 VALUE(*f)(VALUE, VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE, VALUE))func;
2976 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
2977}
2978
2979static VALUE
2980call_cfunc_5(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2981{
2982 ractor_unsafe_check();
2983 VALUE(*f)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE))func;
2984 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
2985}
2986
2987static VALUE
2988call_cfunc_6(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2989{
2990 ractor_unsafe_check();
2992 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
2993}
2994
2995static VALUE
2996call_cfunc_7(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2997{
2998 ractor_unsafe_check();
3000 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
3001}
3002
3003static VALUE
3004call_cfunc_8(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3005{
3006 ractor_unsafe_check();
3008 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
3009}
3010
3011static VALUE
3012call_cfunc_9(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3013{
3014 ractor_unsafe_check();
3016 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
3017}
3018
3019static VALUE
3020call_cfunc_10(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3021{
3022 ractor_unsafe_check();
3024 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
3025}
3026
3027static VALUE
3028call_cfunc_11(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3029{
3030 ractor_unsafe_check();
3032 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
3033}
3034
3035static VALUE
3036call_cfunc_12(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3037{
3038 ractor_unsafe_check();
3040 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
3041}
3042
3043static VALUE
3044call_cfunc_13(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3045{
3046 ractor_unsafe_check();
3048 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
3049}
3050
3051static VALUE
3052call_cfunc_14(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3053{
3054 ractor_unsafe_check();
3056 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
3057}
3058
3059static VALUE
3060call_cfunc_15(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3061{
3062 ractor_unsafe_check();
3064 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
3065}
3066
3067static VALUE
3068ractor_safe_call_cfunc_m2(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3069{
3070 VALUE(*f)(VALUE, VALUE) = (VALUE(*)(VALUE, VALUE))func;
3071 return (*f)(recv, rb_ary_new4(argc, argv));
3072}
3073
3074static VALUE
3075ractor_safe_call_cfunc_m1(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3076{
3077 VALUE(*f)(int, const VALUE *, VALUE) = (VALUE(*)(int, const VALUE *, VALUE))func;
3078 return (*f)(argc, argv, recv);
3079}
3080
3081static VALUE
3082ractor_safe_call_cfunc_0(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3083{
3084 VALUE(*f)(VALUE) = (VALUE(*)(VALUE))func;
3085 return (*f)(recv);
3086}
3087
3088static VALUE
3089ractor_safe_call_cfunc_1(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3090{
3091 VALUE(*f)(VALUE, VALUE) = (VALUE(*)(VALUE, VALUE))func;
3092 return (*f)(recv, argv[0]);
3093}
3094
3095static VALUE
3096ractor_safe_call_cfunc_2(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3097{
3098 VALUE(*f)(VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE))func;
3099 return (*f)(recv, argv[0], argv[1]);
3100}
3101
3102static VALUE
3103ractor_safe_call_cfunc_3(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3104{
3105 VALUE(*f)(VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE))func;
3106 return (*f)(recv, argv[0], argv[1], argv[2]);
3107}
3108
3109static VALUE
3110ractor_safe_call_cfunc_4(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3111{
3112 VALUE(*f)(VALUE, VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE, VALUE))func;
3113 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
3114}
3115
3116static VALUE
3117ractor_safe_call_cfunc_5(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3118{
3119 VALUE(*f)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE))func;
3120 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
3121}
3122
3123static VALUE
3124ractor_safe_call_cfunc_6(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3125{
3127 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
3128}
3129
3130static VALUE
3131ractor_safe_call_cfunc_7(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3132{
3134 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
3135}
3136
3137static VALUE
3138ractor_safe_call_cfunc_8(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3139{
3141 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
3142}
3143
3144static VALUE
3145ractor_safe_call_cfunc_9(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3146{
3148 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
3149}
3150
3151static VALUE
3152ractor_safe_call_cfunc_10(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3153{
3155 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
3156}
3157
3158static VALUE
3159ractor_safe_call_cfunc_11(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3160{
3162 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
3163}
3164
3165static VALUE
3166ractor_safe_call_cfunc_12(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3167{
3169 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
3170}
3171
3172static VALUE
3173ractor_safe_call_cfunc_13(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3174{
3176 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
3177}
3178
3179static VALUE
3180ractor_safe_call_cfunc_14(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3181{
3183 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
3184}
3185
3186static VALUE
3187ractor_safe_call_cfunc_15(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3188{
3190 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
3191}
3192
3193static inline int
3194vm_cfp_consistent_p(rb_execution_context_t *ec, const rb_control_frame_t *reg_cfp)
3195{
3196 const int ov_flags = RAISED_STACKOVERFLOW;
3197 if (LIKELY(reg_cfp == ec->cfp + 1)) return TRUE;
3198 if (rb_ec_raised_p(ec, ov_flags)) {
3199 rb_ec_raised_reset(ec, ov_flags);
3200 return TRUE;
3201 }
3202 return FALSE;
3203}
3204
3205#define CHECK_CFP_CONSISTENCY(func) \
3206 (LIKELY(vm_cfp_consistent_p(ec, reg_cfp)) ? (void)0 : \
3207 rb_bug(func ": cfp consistency error (%p, %p)", (void *)reg_cfp, (void *)(ec->cfp+1)))
3208
3209static inline
3210const rb_method_cfunc_t *
3211vm_method_cfunc_entry(const rb_callable_method_entry_t *me)
3212{
3213#if VM_DEBUG_VERIFY_METHOD_CACHE
3214 switch (me->def->type) {
3215 case VM_METHOD_TYPE_CFUNC:
3216 case VM_METHOD_TYPE_NOTIMPLEMENTED:
3217 break;
3218# define METHOD_BUG(t) case VM_METHOD_TYPE_##t: rb_bug("wrong method type: " #t)
3219 METHOD_BUG(ISEQ);
3220 METHOD_BUG(ATTRSET);
3221 METHOD_BUG(IVAR);
3222 METHOD_BUG(BMETHOD);
3223 METHOD_BUG(ZSUPER);
3224 METHOD_BUG(UNDEF);
3225 METHOD_BUG(OPTIMIZED);
3226 METHOD_BUG(MISSING);
3227 METHOD_BUG(REFINED);
3228 METHOD_BUG(ALIAS);
3229# undef METHOD_BUG
3230 default:
3231 rb_bug("wrong method type: %d", me->def->type);
3232 }
3233#endif
3234 return UNALIGNED_MEMBER_PTR(me->def, body.cfunc);
3235}
3236
3237static VALUE
3238vm_call_cfunc_with_frame(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
3239{
3240 RB_DEBUG_COUNTER_INC(ccf_cfunc_with_frame);
3241 const struct rb_callinfo *ci = calling->ci;
3242 const struct rb_callcache *cc = calling->cc;
3243 VALUE val;
3244 const rb_callable_method_entry_t *me = vm_cc_cme(cc);
3245 const rb_method_cfunc_t *cfunc = vm_method_cfunc_entry(me);
3246 int len = cfunc->argc;
3247
3248 VALUE recv = calling->recv;
3249 VALUE block_handler = calling->block_handler;
3250 VALUE frame_type = VM_FRAME_MAGIC_CFUNC | VM_FRAME_FLAG_CFRAME | VM_ENV_FLAG_LOCAL;
3251 int argc = calling->argc;
3252 int orig_argc = argc;
3253
3254 if (UNLIKELY(calling->kw_splat)) {
3255 frame_type |= VM_FRAME_FLAG_CFRAME_KW;
3256 }
3257
3258 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, me->owner, me->def->original_id);
3259 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_CALL, recv, me->def->original_id, vm_ci_mid(ci), me->owner, Qundef);
3260
3261 vm_push_frame(ec, NULL, frame_type, recv,
3262 block_handler, (VALUE)me,
3263 0, ec->cfp->sp, 0, 0);
3264
3265 if (len >= 0) rb_check_arity(argc, len, len);
3266
3267 reg_cfp->sp -= orig_argc + 1;
3268 val = (*cfunc->invoker)(recv, argc, reg_cfp->sp + 1, cfunc->func);
3269
3270 CHECK_CFP_CONSISTENCY("vm_call_cfunc");
3271
3272 rb_vm_pop_frame(ec);
3273
3274 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_RETURN, recv, me->def->original_id, vm_ci_mid(ci), me->owner, val);
3275 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, me->owner, me->def->original_id);
3276
3277 return val;
3278}
3279
3280static VALUE
3281vm_call_cfunc(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
3282{
3283 const struct rb_callinfo *ci = calling->ci;
3284 RB_DEBUG_COUNTER_INC(ccf_cfunc);
3285
3286 CALLER_SETUP_ARG(reg_cfp, calling, ci);
3287 CALLER_REMOVE_EMPTY_KW_SPLAT(reg_cfp, calling, ci);
3288 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_with_frame, !rb_splat_or_kwargs_p(ci) && !calling->kw_splat);
3289 return vm_call_cfunc_with_frame(ec, reg_cfp, calling);
3290}
3291
3292static VALUE
3293vm_call_ivar(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
3294{
3295 const struct rb_callcache *cc = calling->cc;
3296 RB_DEBUG_COUNTER_INC(ccf_ivar);
3297 cfp->sp -= 1;
3298 VALUE ivar = vm_getivar(calling->recv, vm_cc_cme(cc)->def->body.attr.id, NULL, NULL, cc, TRUE);
3299 return ivar;
3300}
3301
3302static VALUE
3303vm_call_attrset_direct(rb_execution_context_t *ec, rb_control_frame_t *cfp, const struct rb_callcache *cc, VALUE obj)
3304{
3305 RB_DEBUG_COUNTER_INC(ccf_attrset);
3306 VALUE val = *(cfp->sp - 1);
3307 cfp->sp -= 2;
3308 attr_index_t index = vm_cc_attr_index(cc);
3309 shape_id_t dest_shape_id = vm_cc_attr_index_dest_shape_id(cc);
3310 ID id = vm_cc_cme(cc)->def->body.attr.id;
3312 VALUE res = vm_setivar(obj, id, val, dest_shape_id, index);
3313 if (UNDEF_P(res)) {
3314 switch (BUILTIN_TYPE(obj)) {
3315 case T_OBJECT:
3316 case T_CLASS:
3317 case T_MODULE:
3318 break;
3319 default:
3320 {
3321 res = vm_setivar_default(obj, id, val, dest_shape_id, index);
3322 if (!UNDEF_P(res)) {
3323 return res;
3324 }
3325 }
3326 }
3327 res = vm_setivar_slowpath_attr(obj, id, val, cc);
3328 }
3329 return res;
3330}
3331
3332static VALUE
3333vm_call_attrset(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
3334{
3335 return vm_call_attrset_direct(ec, cfp, calling->cc, calling->recv);
3336}
3337
3338bool
3339rb_vm_call_ivar_attrset_p(const vm_call_handler ch)
3340{
3341 return (ch == vm_call_ivar || ch == vm_call_attrset);
3342}
3343
3344static inline VALUE
3345vm_call_bmethod_body(rb_execution_context_t *ec, struct rb_calling_info *calling, const VALUE *argv)
3346{
3347 rb_proc_t *proc;
3348 VALUE val;
3349 const struct rb_callcache *cc = calling->cc;
3350 const rb_callable_method_entry_t *cme = vm_cc_cme(cc);
3351 VALUE procv = cme->def->body.bmethod.proc;
3352
3353 if (!RB_OBJ_SHAREABLE_P(procv) &&
3354 cme->def->body.bmethod.defined_ractor != rb_ractor_self(rb_ec_ractor_ptr(ec))) {
3355 rb_raise(rb_eRuntimeError, "defined with an un-shareable Proc in a different Ractor");
3356 }
3357
3358 /* control block frame */
3359 GetProcPtr(procv, proc);
3360 val = rb_vm_invoke_bmethod(ec, proc, calling->recv, calling->argc, argv, calling->kw_splat, calling->block_handler, vm_cc_cme(cc));
3361
3362 return val;
3363}
3364
3365static VALUE
3366vm_call_bmethod(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
3367{
3368 RB_DEBUG_COUNTER_INC(ccf_bmethod);
3369
3370 VALUE *argv;
3371 int argc;
3372 const struct rb_callinfo *ci = calling->ci;
3373
3374 CALLER_SETUP_ARG(cfp, calling, ci);
3375 argc = calling->argc;
3376 argv = ALLOCA_N(VALUE, argc);
3377 MEMCPY(argv, cfp->sp - argc, VALUE, argc);
3378 cfp->sp += - argc - 1;
3379
3380 return vm_call_bmethod_body(ec, calling, argv);
3381}
3382
3383MJIT_FUNC_EXPORTED VALUE
3384rb_find_defined_class_by_owner(VALUE current_class, VALUE target_owner)
3385{
3386 VALUE klass = current_class;
3387
3388 /* for prepended Module, then start from cover class */
3389 if (RB_TYPE_P(klass, T_ICLASS) && FL_TEST(klass, RICLASS_IS_ORIGIN) &&
3390 RB_TYPE_P(RBASIC_CLASS(klass), T_CLASS)) {
3391 klass = RBASIC_CLASS(klass);
3392 }
3393
3394 while (RTEST(klass)) {
3395 VALUE owner = RB_TYPE_P(klass, T_ICLASS) ? RBASIC_CLASS(klass) : klass;
3396 if (owner == target_owner) {
3397 return klass;
3398 }
3399 klass = RCLASS_SUPER(klass);
3400 }
3401
3402 return current_class; /* maybe module function */
3403}
3404
3405static const rb_callable_method_entry_t *
3406aliased_callable_method_entry(const rb_callable_method_entry_t *me)
3407{
3408 const rb_method_entry_t *orig_me = me->def->body.alias.original_me;
3409 const rb_callable_method_entry_t *cme;
3410
3411 if (orig_me->defined_class == 0) {
3412 VALUE defined_class = rb_find_defined_class_by_owner(me->defined_class, orig_me->owner);
3413 VM_ASSERT(RB_TYPE_P(orig_me->owner, T_MODULE));
3414 cme = rb_method_entry_complement_defined_class(orig_me, me->called_id, defined_class);
3415
3416 if (me->def->alias_count + me->def->complemented_count == 0) {
3417 RB_OBJ_WRITE(me, &me->def->body.alias.original_me, cme);
3418 }
3419 else {
3421 rb_method_definition_create(VM_METHOD_TYPE_ALIAS, me->def->original_id);
3422 rb_method_definition_set((rb_method_entry_t *)me, def, (void *)cme);
3423 }
3424 }
3425 else {
3426 cme = (const rb_callable_method_entry_t *)orig_me;
3427 }
3428
3429 VM_ASSERT(callable_method_entry_p(cme));
3430 return cme;
3431}
3432
3434rb_aliased_callable_method_entry(const rb_callable_method_entry_t *me)
3435{
3436 return aliased_callable_method_entry(me);
3437}
3438
3439static VALUE
3440vm_call_alias(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
3441{
3442 calling->cc = &VM_CC_ON_STACK(Qundef,
3443 vm_call_general,
3444 {{0}},
3445 aliased_callable_method_entry(vm_cc_cme(calling->cc)));
3446
3447 return vm_call_method_each_type(ec, cfp, calling);
3448}
3449
3450static enum method_missing_reason
3451ci_missing_reason(const struct rb_callinfo *ci)
3452{
3453 enum method_missing_reason stat = MISSING_NOENTRY;
3454 if (vm_ci_flag(ci) & VM_CALL_VCALL) stat |= MISSING_VCALL;
3455 if (vm_ci_flag(ci) & VM_CALL_FCALL) stat |= MISSING_FCALL;
3456 if (vm_ci_flag(ci) & VM_CALL_SUPER) stat |= MISSING_SUPER;
3457 return stat;
3458}
3459
3460static VALUE vm_call_method_missing(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling);
3461
3462static VALUE
3463vm_call_symbol(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
3464 struct rb_calling_info *calling, const struct rb_callinfo *ci, VALUE symbol, int flags)
3465{
3466 ASSUME(calling->argc >= 0);
3467 /* Also assumes CALLER_SETUP_ARG is already done. */
3468
3469 enum method_missing_reason missing_reason = MISSING_NOENTRY;
3470 int argc = calling->argc;
3471 VALUE recv = calling->recv;
3472 VALUE klass = CLASS_OF(recv);
3473 ID mid = rb_check_id(&symbol);
3474 flags |= VM_CALL_OPT_SEND | (calling->kw_splat ? VM_CALL_KW_SPLAT : 0);
3475
3476 if (UNLIKELY(! mid)) {
3477 mid = idMethodMissing;
3478 missing_reason = ci_missing_reason(ci);
3479 ec->method_missing_reason = missing_reason;
3480
3481 /* E.g. when argc == 2
3482 *
3483 * | | | | TOPN
3484 * | | +------+
3485 * | | +---> | arg1 | 0
3486 * +------+ | +------+
3487 * | arg1 | -+ +-> | arg0 | 1
3488 * +------+ | +------+
3489 * | arg0 | ---+ | sym | 2
3490 * +------+ +------+
3491 * | recv | | recv | 3
3492 * --+------+--------+------+------
3493 */
3494 int i = argc;
3495 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
3496 INC_SP(1);
3497 MEMMOVE(&TOPN(i - 1), &TOPN(i), VALUE, i);
3498 argc = ++calling->argc;
3499
3500 if (rb_method_basic_definition_p(klass, idMethodMissing)) {
3501 /* Inadvertent symbol creation shall be forbidden, see [Feature #5112] */
3502 TOPN(i) = symbol;
3503 int priv = vm_ci_flag(ci) & (VM_CALL_FCALL | VM_CALL_VCALL);
3504 const VALUE *argv = STACK_ADDR_FROM_TOP(argc);
3505 VALUE exc = rb_make_no_method_exception(
3506 rb_eNoMethodError, 0, recv, argc, argv, priv);
3507
3508 rb_exc_raise(exc);
3509 }
3510 else {
3511 TOPN(i) = rb_str_intern(symbol);
3512 }
3513 }
3514
3515 calling->ci = &VM_CI_ON_STACK(mid, flags, argc, vm_ci_kwarg(ci));
3516 calling->cc = &VM_CC_ON_STACK(klass,
3517 vm_call_general,
3518 { .method_missing_reason = missing_reason },
3519 rb_callable_method_entry_with_refinements(klass, mid, NULL));
3520
3521 if (flags & VM_CALL_FCALL) {
3522 return vm_call_method(ec, reg_cfp, calling);
3523 }
3524
3525 const struct rb_callcache *cc = calling->cc;
3526 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
3527
3528 if (vm_cc_cme(cc) != NULL) {
3529 switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
3530 case METHOD_VISI_PUBLIC: /* likely */
3531 return vm_call_method_each_type(ec, reg_cfp, calling);
3532 case METHOD_VISI_PRIVATE:
3533 vm_cc_method_missing_reason_set(cc, MISSING_PRIVATE);
3534 break;
3535 case METHOD_VISI_PROTECTED:
3536 vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
3537 break;
3538 default:
3539 VM_UNREACHABLE(vm_call_method);
3540 }
3541 return vm_call_method_missing(ec, reg_cfp, calling);
3542 }
3543
3544 return vm_call_method_nome(ec, reg_cfp, calling);
3545}
3546
3547static VALUE
3548vm_call_opt_send(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
3549{
3550 RB_DEBUG_COUNTER_INC(ccf_opt_send);
3551
3552 int i;
3553 VALUE sym;
3554
3555 CALLER_SETUP_ARG(reg_cfp, calling, calling->ci);
3556
3557 i = calling->argc - 1;
3558
3559 if (calling->argc == 0) {
3560 rb_raise(rb_eArgError, "no method name given");
3561 }
3562 else {
3563 sym = TOPN(i);
3564 /* E.g. when i == 2
3565 *
3566 * | | | | TOPN
3567 * +------+ | |
3568 * | arg1 | ---+ | | 0
3569 * +------+ | +------+
3570 * | arg0 | -+ +-> | arg1 | 1
3571 * +------+ | +------+
3572 * | sym | +---> | arg0 | 2
3573 * +------+ +------+
3574 * | recv | | recv | 3
3575 * --+------+--------+------+------
3576 */
3577 /* shift arguments */
3578 if (i > 0) {
3579 MEMMOVE(&TOPN(i), &TOPN(i-1), VALUE, i);
3580 }
3581 calling->argc -= 1;
3582 DEC_SP(1);
3583
3584 return vm_call_symbol(ec, reg_cfp, calling, calling->ci, sym, VM_CALL_FCALL);
3585 }
3586}
3587
3588static VALUE
3589vm_call_method_missing_body(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling,
3590 const struct rb_callinfo *orig_ci, enum method_missing_reason reason)
3591{
3592 RB_DEBUG_COUNTER_INC(ccf_method_missing);
3593
3594 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
3595 unsigned int argc;
3596
3597 CALLER_SETUP_ARG(reg_cfp, calling, orig_ci);
3598 argc = calling->argc + 1;
3599
3600 unsigned int flag = VM_CALL_FCALL | VM_CALL_OPT_SEND | (calling->kw_splat ? VM_CALL_KW_SPLAT : 0);
3601 calling->argc = argc;
3602
3603 /* shift arguments: m(a, b, c) #=> method_missing(:m, a, b, c) */
3604 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
3605 vm_check_canary(ec, reg_cfp->sp);
3606 if (argc > 1) {
3607 MEMMOVE(argv+1, argv, VALUE, argc-1);
3608 }
3609 argv[0] = ID2SYM(vm_ci_mid(orig_ci));
3610 INC_SP(1);
3611
3612 ec->method_missing_reason = reason;
3613 calling->ci = &VM_CI_ON_STACK(idMethodMissing, flag, argc, vm_ci_kwarg(orig_ci));
3614 calling->cc = &VM_CC_ON_STACK(Qundef, vm_call_general, {{ 0 }},
3615 rb_callable_method_entry_without_refinements(CLASS_OF(calling->recv), idMethodMissing, NULL));
3616 return vm_call_method(ec, reg_cfp, calling);
3617}
3618
3619static VALUE
3620vm_call_method_missing(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
3621{
3622 return vm_call_method_missing_body(ec, reg_cfp, calling, calling->ci, vm_cc_cmethod_missing_reason(calling->cc));
3623}
3624
3625static const rb_callable_method_entry_t *refined_method_callable_without_refinement(const rb_callable_method_entry_t *me);
3626static VALUE
3627vm_call_zsuper(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, VALUE klass)
3628{
3629 klass = RCLASS_SUPER(klass);
3630
3631 const rb_callable_method_entry_t *cme = klass ? rb_callable_method_entry(klass, vm_ci_mid(calling->ci)) : NULL;
3632 if (cme == NULL) {
3633 return vm_call_method_nome(ec, cfp, calling);
3634 }
3635 if (cme->def->type == VM_METHOD_TYPE_REFINED &&
3636 cme->def->body.refined.orig_me) {
3637 cme = refined_method_callable_without_refinement(cme);
3638 }
3639
3640 calling->cc = &VM_CC_ON_STACK(Qundef, vm_call_general, {{ 0 }}, cme);
3641
3642 return vm_call_method_each_type(ec, cfp, calling);
3643}
3644
3645static inline VALUE
3646find_refinement(VALUE refinements, VALUE klass)
3647{
3648 if (NIL_P(refinements)) {
3649 return Qnil;
3650 }
3651 return rb_hash_lookup(refinements, klass);
3652}
3653
3654PUREFUNC(static rb_control_frame_t * current_method_entry(const rb_execution_context_t *ec, rb_control_frame_t *cfp));
3655static rb_control_frame_t *
3656current_method_entry(const rb_execution_context_t *ec, rb_control_frame_t *cfp)
3657{
3658 rb_control_frame_t *top_cfp = cfp;
3659
3660 if (cfp->iseq && ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_BLOCK) {
3661 const rb_iseq_t *local_iseq = ISEQ_BODY(cfp->iseq)->local_iseq;
3662
3663 do {
3664 cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
3665 if (RUBY_VM_CONTROL_FRAME_STACK_OVERFLOW_P(ec, cfp)) {
3666 /* TODO: orphan block */
3667 return top_cfp;
3668 }
3669 } while (cfp->iseq != local_iseq);
3670 }
3671 return cfp;
3672}
3673
3674static const rb_callable_method_entry_t *
3675refined_method_callable_without_refinement(const rb_callable_method_entry_t *me)
3676{
3677 const rb_method_entry_t *orig_me = me->def->body.refined.orig_me;
3678 const rb_callable_method_entry_t *cme;
3679
3680 if (orig_me->defined_class == 0) {
3681 cme = NULL;
3683 }
3684 else {
3685 cme = (const rb_callable_method_entry_t *)orig_me;
3686 }
3687
3688 VM_ASSERT(callable_method_entry_p(cme));
3689
3690 if (UNDEFINED_METHOD_ENTRY_P(cme)) {
3691 cme = NULL;
3692 }
3693
3694 return cme;
3695}
3696
3697static const rb_callable_method_entry_t *
3698search_refined_method(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
3699{
3700 ID mid = vm_ci_mid(calling->ci);
3701 const rb_cref_t *cref = vm_get_cref(cfp->ep);
3702 const struct rb_callcache * const cc = calling->cc;
3703 const rb_callable_method_entry_t *cme = vm_cc_cme(cc);
3704
3705 for (; cref; cref = CREF_NEXT(cref)) {
3706 const VALUE refinement = find_refinement(CREF_REFINEMENTS(cref), vm_cc_cme(cc)->owner);
3707 if (NIL_P(refinement)) continue;
3708
3709 const rb_callable_method_entry_t *const ref_me =
3710 rb_callable_method_entry(refinement, mid);
3711
3712 if (ref_me) {
3713 if (vm_cc_call(cc) == vm_call_super_method) {
3714 const rb_control_frame_t *top_cfp = current_method_entry(ec, cfp);
3715 const rb_callable_method_entry_t *top_me = rb_vm_frame_method_entry(top_cfp);
3716 if (top_me && rb_method_definition_eq(ref_me->def, top_me->def)) {
3717 continue;
3718 }
3719 }
3720
3721 if (cme->def->type != VM_METHOD_TYPE_REFINED ||
3722 cme->def != ref_me->def) {
3723 cme = ref_me;
3724 }
3725 if (ref_me->def->type != VM_METHOD_TYPE_REFINED) {
3726 return cme;
3727 }
3728 }
3729 else {
3730 return NULL;
3731 }
3732 }
3733
3734 if (vm_cc_cme(cc)->def->body.refined.orig_me) {
3735 return refined_method_callable_without_refinement(vm_cc_cme(cc));
3736 }
3737 else {
3738 VALUE klass = RCLASS_SUPER(vm_cc_cme(cc)->defined_class);
3739 const rb_callable_method_entry_t *cme = klass ? rb_callable_method_entry(klass, mid) : NULL;
3740 return cme;
3741 }
3742}
3743
3744static VALUE
3745vm_call_refined(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
3746{
3747 struct rb_callcache *ref_cc = &VM_CC_ON_STACK(Qundef, vm_call_general, {{ 0 }},
3748 search_refined_method(ec, cfp, calling));
3749
3750 if (vm_cc_cme(ref_cc)) {
3751 calling->cc= ref_cc;
3752 return vm_call_method(ec, cfp, calling);
3753 }
3754 else {
3755 return vm_call_method_nome(ec, cfp, calling);
3756 }
3757}
3758
3759static inline VALUE vm_invoke_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling, const struct rb_callinfo *ci, bool is_lambda, VALUE block_handler);
3760
3761NOINLINE(static VALUE
3762 vm_invoke_block_opt_call(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
3763 struct rb_calling_info *calling, const struct rb_callinfo *ci, VALUE block_handler));
3764
3765static VALUE
3766vm_invoke_block_opt_call(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
3767 struct rb_calling_info *calling, const struct rb_callinfo *ci, VALUE block_handler)
3768{
3769 int argc = calling->argc;
3770
3771 /* remove self */
3772 if (argc > 0) MEMMOVE(&TOPN(argc), &TOPN(argc-1), VALUE, argc);
3773 DEC_SP(1);
3774
3775 return vm_invoke_block(ec, reg_cfp, calling, ci, false, block_handler);
3776}
3777
3778static VALUE
3779vm_call_opt_call(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
3780{
3781 RB_DEBUG_COUNTER_INC(ccf_opt_call);
3782
3783 const struct rb_callinfo *ci = calling->ci;
3784 VALUE procval = calling->recv;
3785 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, VM_BH_FROM_PROC(procval));
3786}
3787
3788static VALUE
3789vm_call_opt_block_call(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
3790{
3791 RB_DEBUG_COUNTER_INC(ccf_opt_block_call);
3792
3793 VALUE block_handler = VM_ENV_BLOCK_HANDLER(VM_CF_LEP(reg_cfp));
3794 const struct rb_callinfo *ci = calling->ci;
3795
3796 if (BASIC_OP_UNREDEFINED_P(BOP_CALL, PROC_REDEFINED_OP_FLAG)) {
3797 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, block_handler);
3798 }
3799 else {
3800 calling->recv = rb_vm_bh_to_procval(ec, block_handler);
3801 calling->cc = rb_vm_search_method_slowpath(ci, CLASS_OF(calling->recv));
3802 return vm_call_general(ec, reg_cfp, calling);
3803 }
3804}
3805
3806static VALUE
3807vm_call_opt_struct_aref0(rb_execution_context_t *ec, struct rb_calling_info *calling)
3808{
3809 VALUE recv = calling->recv;
3810
3811 VM_ASSERT(RB_TYPE_P(recv, T_STRUCT));
3812 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
3813 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_AREF);
3814
3815 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
3816 return internal_RSTRUCT_GET(recv, off);
3817}
3818
3819static VALUE
3820vm_call_opt_struct_aref(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
3821{
3822 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aref);
3823
3824 VALUE ret = vm_call_opt_struct_aref0(ec, calling);
3825 reg_cfp->sp -= 1;
3826 return ret;
3827}
3828
3829static VALUE
3830vm_call_opt_struct_aset0(rb_execution_context_t *ec, struct rb_calling_info *calling, VALUE val)
3831{
3832 VALUE recv = calling->recv;
3833
3834 VM_ASSERT(RB_TYPE_P(recv, T_STRUCT));
3835 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
3836 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_ASET);
3837
3838 rb_check_frozen(recv);
3839
3840 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
3841 internal_RSTRUCT_SET(recv, off, val);
3842
3843 return val;
3844}
3845
3846static VALUE
3847vm_call_opt_struct_aset(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
3848{
3849 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aset);
3850
3851 VALUE ret = vm_call_opt_struct_aset0(ec, calling, *(reg_cfp->sp - 1));
3852 reg_cfp->sp -= 2;
3853 return ret;
3854}
3855
3856NOINLINE(static VALUE vm_call_optimized(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling,
3857 const struct rb_callinfo *ci, const struct rb_callcache *cc));
3858
3859static VALUE
3860vm_call_optimized(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling,
3861 const struct rb_callinfo *ci, const struct rb_callcache *cc)
3862{
3863 switch (vm_cc_cme(cc)->def->body.optimized.type) {
3864 case OPTIMIZED_METHOD_TYPE_SEND:
3865 CC_SET_FASTPATH(cc, vm_call_opt_send, TRUE);
3866 return vm_call_opt_send(ec, cfp, calling);
3867 case OPTIMIZED_METHOD_TYPE_CALL:
3868 CC_SET_FASTPATH(cc, vm_call_opt_call, TRUE);
3869 return vm_call_opt_call(ec, cfp, calling);
3870 case OPTIMIZED_METHOD_TYPE_BLOCK_CALL:
3871 CC_SET_FASTPATH(cc, vm_call_opt_block_call, TRUE);
3872 return vm_call_opt_block_call(ec, cfp, calling);
3873 case OPTIMIZED_METHOD_TYPE_STRUCT_AREF:
3874 CALLER_SETUP_ARG(cfp, calling, ci);
3875 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
3876 rb_check_arity(calling->argc, 0, 0);
3877 CC_SET_FASTPATH(cc, vm_call_opt_struct_aref, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE));
3878 return vm_call_opt_struct_aref(ec, cfp, calling);
3879
3880 case OPTIMIZED_METHOD_TYPE_STRUCT_ASET:
3881 CALLER_SETUP_ARG(cfp, calling, ci);
3882 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
3883 rb_check_arity(calling->argc, 1, 1);
3884 CC_SET_FASTPATH(cc, vm_call_opt_struct_aset, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE));
3885 return vm_call_opt_struct_aset(ec, cfp, calling);
3886 default:
3887 rb_bug("vm_call_method: unsupported optimized method type (%d)", vm_cc_cme(cc)->def->body.optimized.type);
3888 }
3889}
3890
3891#define VM_CALL_METHOD_ATTR(var, func, nohook) \
3892 if (UNLIKELY(ruby_vm_event_flags & (RUBY_EVENT_C_CALL | RUBY_EVENT_C_RETURN))) { \
3893 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_CALL, calling->recv, vm_cc_cme(cc)->def->original_id, \
3894 vm_ci_mid(ci), vm_cc_cme(cc)->owner, Qundef); \
3895 var = func; \
3896 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_RETURN, calling->recv, vm_cc_cme(cc)->def->original_id, \
3897 vm_ci_mid(ci), vm_cc_cme(cc)->owner, (var)); \
3898 } \
3899 else { \
3900 nohook; \
3901 var = func; \
3902 }
3903
3904static VALUE
3905vm_call_method_each_type(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
3906{
3907 const struct rb_callinfo *ci = calling->ci;
3908 const struct rb_callcache *cc = calling->cc;
3909 const rb_callable_method_entry_t *cme = vm_cc_cme(cc);
3910 VALUE v;
3911
3912 switch (cme->def->type) {
3913 case VM_METHOD_TYPE_ISEQ:
3914 CC_SET_FASTPATH(cc, vm_call_iseq_setup, TRUE);
3915 return vm_call_iseq_setup(ec, cfp, calling);
3916
3917 case VM_METHOD_TYPE_NOTIMPLEMENTED:
3918 case VM_METHOD_TYPE_CFUNC:
3919 CC_SET_FASTPATH(cc, vm_call_cfunc, TRUE);
3920 return vm_call_cfunc(ec, cfp, calling);
3921
3922 case VM_METHOD_TYPE_ATTRSET:
3923 CALLER_SETUP_ARG(cfp, calling, ci);
3924 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
3925
3926 rb_check_arity(calling->argc, 1, 1);
3927
3928 const unsigned int aset_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT | VM_CALL_KWARG);
3929
3930 if (vm_cc_markable(cc)) {
3931 vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
3932 VM_CALL_METHOD_ATTR(v,
3933 vm_call_attrset_direct(ec, cfp, cc, calling->recv),
3934 CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
3935 }
3936 else {
3937 cc = &((struct rb_callcache) {
3938 .flags = T_IMEMO |
3939 (imemo_callcache << FL_USHIFT) |
3940 VM_CALLCACHE_UNMARKABLE |
3941 ((VALUE)INVALID_SHAPE_ID << SHAPE_FLAG_SHIFT) |
3942 VM_CALLCACHE_ON_STACK,
3943 .klass = cc->klass,
3944 .cme_ = cc->cme_,
3945 .call_ = cc->call_,
3946 .aux_ = {
3947 .attr = {
3948 .value = INVALID_SHAPE_ID << SHAPE_FLAG_SHIFT,
3949 }
3950 },
3951 });
3952
3953 VM_CALL_METHOD_ATTR(v,
3954 vm_call_attrset_direct(ec, cfp, cc, calling->recv),
3955 CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
3956 }
3957 return v;
3958
3959 case VM_METHOD_TYPE_IVAR:
3960 CALLER_SETUP_ARG(cfp, calling, ci);
3961 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
3962 rb_check_arity(calling->argc, 0, 0);
3963 vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
3964 const unsigned int ivar_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT);
3965 VM_CALL_METHOD_ATTR(v,
3966 vm_call_ivar(ec, cfp, calling),
3967 CC_SET_FASTPATH(cc, vm_call_ivar, !(vm_ci_flag(ci) & ivar_mask)));
3968 return v;
3969
3970 case VM_METHOD_TYPE_MISSING:
3971 vm_cc_method_missing_reason_set(cc, 0);
3972 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
3973 return vm_call_method_missing(ec, cfp, calling);
3974
3975 case VM_METHOD_TYPE_BMETHOD:
3976 CC_SET_FASTPATH(cc, vm_call_bmethod, TRUE);
3977 return vm_call_bmethod(ec, cfp, calling);
3978
3979 case VM_METHOD_TYPE_ALIAS:
3980 CC_SET_FASTPATH(cc, vm_call_alias, TRUE);
3981 return vm_call_alias(ec, cfp, calling);
3982
3983 case VM_METHOD_TYPE_OPTIMIZED:
3984 return vm_call_optimized(ec, cfp, calling, ci, cc);
3985
3986 case VM_METHOD_TYPE_UNDEF:
3987 break;
3988
3989 case VM_METHOD_TYPE_ZSUPER:
3990 return vm_call_zsuper(ec, cfp, calling, RCLASS_ORIGIN(vm_cc_cme(cc)->defined_class));
3991
3992 case VM_METHOD_TYPE_REFINED:
3993 // CC_SET_FASTPATH(cc, vm_call_refined, TRUE);
3994 // should not set FASTPATH since vm_call_refined assumes cc->call is vm_call_super_method on invokesuper.
3995 return vm_call_refined(ec, cfp, calling);
3996 }
3997
3998 rb_bug("vm_call_method: unsupported method type (%d)", vm_cc_cme(cc)->def->type);
3999}
4000
4001NORETURN(static void vm_raise_method_missing(rb_execution_context_t *ec, int argc, const VALUE *argv, VALUE obj, int call_status));
4002
4003static VALUE
4004vm_call_method_nome(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
4005{
4006 /* method missing */
4007 const struct rb_callinfo *ci = calling->ci;
4008 const int stat = ci_missing_reason(ci);
4009
4010 if (vm_ci_mid(ci) == idMethodMissing) {
4011 rb_control_frame_t *reg_cfp = cfp;
4012 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
4013 vm_raise_method_missing(ec, calling->argc, argv, calling->recv, stat);
4014 }
4015 else {
4016 return vm_call_method_missing_body(ec, cfp, calling, ci, stat);
4017 }
4018}
4019
4020/* Protected method calls and super invocations need to check that the receiver
4021 * (self for super) inherits the module on which the method is defined.
4022 * In the case of refinements, it should consider the original class not the
4023 * refinement.
4024 */
4025static VALUE
4026vm_defined_class_for_protected_call(const rb_callable_method_entry_t *me)
4027{
4028 VALUE defined_class = me->defined_class;
4029 VALUE refined_class = RCLASS_REFINED_CLASS(defined_class);
4030 return NIL_P(refined_class) ? defined_class : refined_class;
4031}
4032
4033static inline VALUE
4034vm_call_method(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
4035{
4036 const struct rb_callinfo *ci = calling->ci;
4037 const struct rb_callcache *cc = calling->cc;
4038
4039 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
4040
4041 if (vm_cc_cme(cc) != NULL) {
4042 switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
4043 case METHOD_VISI_PUBLIC: /* likely */
4044 return vm_call_method_each_type(ec, cfp, calling);
4045
4046 case METHOD_VISI_PRIVATE:
4047 if (!(vm_ci_flag(ci) & VM_CALL_FCALL)) {
4048 enum method_missing_reason stat = MISSING_PRIVATE;
4049 if (vm_ci_flag(ci) & VM_CALL_VCALL) stat |= MISSING_VCALL;
4050
4051 vm_cc_method_missing_reason_set(cc, stat);
4052 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
4053 return vm_call_method_missing(ec, cfp, calling);
4054 }
4055 return vm_call_method_each_type(ec, cfp, calling);
4056
4057 case METHOD_VISI_PROTECTED:
4058 if (!(vm_ci_flag(ci) & (VM_CALL_OPT_SEND | VM_CALL_FCALL))) {
4059 VALUE defined_class = vm_defined_class_for_protected_call(vm_cc_cme(cc));
4060 if (!rb_obj_is_kind_of(cfp->self, defined_class)) {
4061 vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
4062 return vm_call_method_missing(ec, cfp, calling);
4063 }
4064 else {
4065 /* caching method info to dummy cc */
4066 VM_ASSERT(vm_cc_cme(cc) != NULL);
4067 struct rb_callcache cc_on_stack = *cc;
4068 FL_SET_RAW((VALUE)&cc_on_stack, VM_CALLCACHE_UNMARKABLE);
4069 calling->cc = &cc_on_stack;
4070 return vm_call_method_each_type(ec, cfp, calling);
4071 }
4072 }
4073 return vm_call_method_each_type(ec, cfp, calling);
4074
4075 default:
4076 rb_bug("unreachable");
4077 }
4078 }
4079 else {
4080 return vm_call_method_nome(ec, cfp, calling);
4081 }
4082}
4083
4084static VALUE
4085vm_call_general(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
4086{
4087 RB_DEBUG_COUNTER_INC(ccf_general);
4088 return vm_call_method(ec, reg_cfp, calling);
4089}
4090
4091void
4092rb_vm_cc_general(const struct rb_callcache *cc)
4093{
4094 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
4095 VM_ASSERT(cc != vm_cc_empty());
4096
4097 *(vm_call_handler *)&cc->call_ = vm_call_general;
4098}
4099
4100static VALUE
4101vm_call_super_method(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
4102{
4103 RB_DEBUG_COUNTER_INC(ccf_super_method);
4104
4105 // This line is introduced to make different from `vm_call_general` because some compilers (VC we found)
4106 // can merge the function and the address of the function becomes same.
4107 // The address of `vm_call_super_method` is used in `search_refined_method`, so it should be different.
4108 if (ec == NULL) rb_bug("unreachable");
4109
4110 /* this check is required to distinguish with other functions. */
4111 VM_ASSERT(vm_cc_call(calling->cc) == vm_call_super_method);
4112 return vm_call_method(ec, reg_cfp, calling);
4113}
4114
4115/* super */
4116
4117static inline VALUE
4118vm_search_normal_superclass(VALUE klass)
4119{
4120 if (BUILTIN_TYPE(klass) == T_ICLASS &&
4121 RB_TYPE_P(RBASIC(klass)->klass, T_MODULE) &&
4122 FL_TEST_RAW(RBASIC(klass)->klass, RMODULE_IS_REFINEMENT)) {
4123 klass = RBASIC(klass)->klass;
4124 }
4125 klass = RCLASS_ORIGIN(klass);
4126 return RCLASS_SUPER(klass);
4127}
4128
4129NORETURN(static void vm_super_outside(void));
4130
4131static void
4132vm_super_outside(void)
4133{
4134 rb_raise(rb_eNoMethodError, "super called outside of method");
4135}
4136
4137static const struct rb_callcache *
4138empty_cc_for_super(void)
4139{
4140#ifdef MJIT_HEADER
4141 return rb_vm_empty_cc_for_super();
4142#else
4143 return &vm_empty_cc_for_super;
4144#endif
4145}
4146
4147static const struct rb_callcache *
4148vm_search_super_method(const rb_control_frame_t *reg_cfp, struct rb_call_data *cd, VALUE recv)
4149{
4150 VALUE current_defined_class;
4151 const rb_callable_method_entry_t *me = rb_vm_frame_method_entry(reg_cfp);
4152
4153 if (!me) {
4154 vm_super_outside();
4155 }
4156
4157 current_defined_class = vm_defined_class_for_protected_call(me);
4158
4159 if (BUILTIN_TYPE(current_defined_class) != T_MODULE &&
4160 reg_cfp->iseq != method_entry_iseqptr(me) &&
4161 !rb_obj_is_kind_of(recv, current_defined_class)) {
4162 VALUE m = RB_TYPE_P(current_defined_class, T_ICLASS) ?
4163 RCLASS_INCLUDER(current_defined_class) : current_defined_class;
4164
4165 if (m) { /* not bound UnboundMethod */
4167 "self has wrong type to call super in this context: "
4168 "%"PRIsVALUE" (expected %"PRIsVALUE")",
4169 rb_obj_class(recv), m);
4170 }
4171 }
4172
4173 if (me->def->type == VM_METHOD_TYPE_BMETHOD && (vm_ci_flag(cd->ci) & VM_CALL_ZSUPER)) {
4175 "implicit argument passing of super from method defined"
4176 " by define_method() is not supported."
4177 " Specify all arguments explicitly.");
4178 }
4179
4180 ID mid = me->def->original_id;
4181
4182 // update iseq. really? (TODO)
4183 cd->ci = vm_ci_new_runtime(mid,
4184 vm_ci_flag(cd->ci),
4185 vm_ci_argc(cd->ci),
4186 vm_ci_kwarg(cd->ci));
4187
4188 RB_OBJ_WRITTEN(reg_cfp->iseq, Qundef, cd->ci);
4189
4190 const struct rb_callcache *cc;
4191
4192 VALUE klass = vm_search_normal_superclass(me->defined_class);
4193
4194 if (!klass) {
4195 /* bound instance method of module */
4196 cc = vm_cc_new(klass, NULL, vm_call_method_missing);
4197 RB_OBJ_WRITE(reg_cfp->iseq, &cd->cc, cc);
4198 }
4199 else {
4200 cc = vm_search_method_fastpath((VALUE)reg_cfp->iseq, cd, klass);
4201 const rb_callable_method_entry_t *cached_cme = vm_cc_cme(cc);
4202
4203 // define_method can cache for different method id
4204 if (cached_cme == NULL) {
4205 // empty_cc_for_super is not markable object
4206 cd->cc = empty_cc_for_super();
4207 }
4208 else if (cached_cme->called_id != mid) {
4209 const rb_callable_method_entry_t *cme = rb_callable_method_entry(klass, mid);
4210 if (cme) {
4211 cc = vm_cc_new(klass, cme, vm_call_super_method);
4212 RB_OBJ_WRITE(reg_cfp->iseq, &cd->cc, cc);
4213 }
4214 else {
4215 cd->cc = cc = empty_cc_for_super();
4216 }
4217 }
4218 else {
4219 switch (cached_cme->def->type) {
4220 // vm_call_refined (search_refined_method) assumes cc->call is vm_call_super_method on invokesuper
4221 case VM_METHOD_TYPE_REFINED:
4222 // cc->klass is superclass of receiver class. Checking cc->klass is not enough to invalidate IVC for the receiver class.
4223 case VM_METHOD_TYPE_ATTRSET:
4224 case VM_METHOD_TYPE_IVAR:
4225 vm_cc_call_set(cc, vm_call_super_method); // invalidate fastpath
4226 break;
4227 default:
4228 break; // use fastpath
4229 }
4230 }
4231 }
4232
4233 VM_ASSERT((vm_cc_cme(cc), true));
4234
4235 return cc;
4236}
4237
4238/* yield */
4239
4240static inline int
4241block_proc_is_lambda(const VALUE procval)
4242{
4243 rb_proc_t *proc;
4244
4245 if (procval) {
4246 GetProcPtr(procval, proc);
4247 return proc->is_lambda;
4248 }
4249 else {
4250 return 0;
4251 }
4252}
4253
4254static VALUE
4255vm_yield_with_cfunc(rb_execution_context_t *ec,
4256 const struct rb_captured_block *captured,
4257 VALUE self, int argc, const VALUE *argv, int kw_splat, VALUE block_handler,
4259{
4260 int is_lambda = FALSE; /* TODO */
4261 VALUE val, arg, blockarg;
4262 int frame_flag;
4263 const struct vm_ifunc *ifunc = captured->code.ifunc;
4264
4265 if (is_lambda) {
4266 arg = rb_ary_new4(argc, argv);
4267 }
4268 else if (argc == 0) {
4269 arg = Qnil;
4270 }
4271 else {
4272 arg = argv[0];
4273 }
4274
4275 blockarg = rb_vm_bh_to_procval(ec, block_handler);
4276
4277 frame_flag = VM_FRAME_MAGIC_IFUNC | VM_FRAME_FLAG_CFRAME | (me ? VM_FRAME_FLAG_BMETHOD : 0);
4278 if (kw_splat) {
4279 frame_flag |= VM_FRAME_FLAG_CFRAME_KW;
4280 }
4281
4282 vm_push_frame(ec, (const rb_iseq_t *)captured->code.ifunc,
4283 frame_flag,
4284 self,
4285 VM_GUARDED_PREV_EP(captured->ep),
4286 (VALUE)me,
4287 0, ec->cfp->sp, 0, 0);
4288 val = (*ifunc->func)(arg, (VALUE)ifunc->data, argc, argv, blockarg);
4289 rb_vm_pop_frame(ec);
4290
4291 return val;
4292}
4293
4294static VALUE
4295vm_yield_with_symbol(rb_execution_context_t *ec, VALUE symbol, int argc, const VALUE *argv, int kw_splat, VALUE block_handler)
4296{
4297 return rb_sym_proc_call(SYM2ID(symbol), argc, argv, kw_splat, rb_vm_bh_to_procval(ec, block_handler));
4298}
4299
4300static inline int
4301vm_callee_setup_block_arg_arg0_splat(rb_control_frame_t *cfp, const rb_iseq_t *iseq, VALUE *argv, VALUE ary)
4302{
4303 int i;
4304 long len = RARRAY_LEN(ary);
4305
4306 CHECK_VM_STACK_OVERFLOW(cfp, ISEQ_BODY(iseq)->param.lead_num);
4307
4308 for (i=0; i<len && i<ISEQ_BODY(iseq)->param.lead_num; i++) {
4309 argv[i] = RARRAY_AREF(ary, i);
4310 }
4311
4312 return i;
4313}
4314
4315static inline VALUE
4316vm_callee_setup_block_arg_arg0_check(VALUE *argv)
4317{
4318 VALUE ary, arg0 = argv[0];
4319 ary = rb_check_array_type(arg0);
4320#if 0
4321 argv[0] = arg0;
4322#else
4323 VM_ASSERT(argv[0] == arg0);
4324#endif
4325 return ary;
4326}
4327
4328static int
4329vm_callee_setup_block_arg(rb_execution_context_t *ec, struct rb_calling_info *calling, const struct rb_callinfo *ci, const rb_iseq_t *iseq, VALUE *argv, const enum arg_setup_type arg_setup_type)
4330{
4331 if (rb_simple_iseq_p(iseq)) {
4332 rb_control_frame_t *cfp = ec->cfp;
4333 VALUE arg0;
4334
4335 CALLER_SETUP_ARG(cfp, calling, ci);
4336 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
4337
4338 if (arg_setup_type == arg_setup_block &&
4339 calling->argc == 1 &&
4340 ISEQ_BODY(iseq)->param.flags.has_lead &&
4341 !ISEQ_BODY(iseq)->param.flags.ambiguous_param0 &&
4342 !NIL_P(arg0 = vm_callee_setup_block_arg_arg0_check(argv))) {
4343 calling->argc = vm_callee_setup_block_arg_arg0_splat(cfp, iseq, argv, arg0);
4344 }
4345
4346 if (calling->argc != ISEQ_BODY(iseq)->param.lead_num) {
4347 if (arg_setup_type == arg_setup_block) {
4348 if (calling->argc < ISEQ_BODY(iseq)->param.lead_num) {
4349 int i;
4350 CHECK_VM_STACK_OVERFLOW(cfp, ISEQ_BODY(iseq)->param.lead_num);
4351 for (i=calling->argc; i<ISEQ_BODY(iseq)->param.lead_num; i++) argv[i] = Qnil;
4352 calling->argc = ISEQ_BODY(iseq)->param.lead_num; /* fill rest parameters */
4353 }
4354 else if (calling->argc > ISEQ_BODY(iseq)->param.lead_num) {
4355 calling->argc = ISEQ_BODY(iseq)->param.lead_num; /* simply truncate arguments */
4356 }
4357 }
4358 else {
4359 argument_arity_error(ec, iseq, calling->argc, ISEQ_BODY(iseq)->param.lead_num, ISEQ_BODY(iseq)->param.lead_num);
4360 }
4361 }
4362
4363 return 0;
4364 }
4365 else {
4366 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_type);
4367 }
4368}
4369
4370static int
4371vm_yield_setup_args(rb_execution_context_t *ec, const rb_iseq_t *iseq, const int argc, VALUE *argv, int kw_splat, VALUE block_handler, enum arg_setup_type arg_setup_type)
4372{
4373 struct rb_calling_info calling_entry, *calling;
4374
4375 calling = &calling_entry;
4376 calling->argc = argc;
4377 calling->block_handler = block_handler;
4378 calling->kw_splat = kw_splat;
4379 calling->recv = Qundef;
4380 struct rb_callinfo dummy_ci = VM_CI_ON_STACK(0, (kw_splat ? VM_CALL_KW_SPLAT : 0), 0, 0);
4381
4382 return vm_callee_setup_block_arg(ec, calling, &dummy_ci, iseq, argv, arg_setup_type);
4383}
4384
4385/* ruby iseq -> ruby block */
4386
4387static VALUE
4388vm_invoke_iseq_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
4389 struct rb_calling_info *calling, const struct rb_callinfo *ci,
4390 bool is_lambda, VALUE block_handler)
4391{
4392 const struct rb_captured_block *captured = VM_BH_TO_ISEQ_BLOCK(block_handler);
4393 const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
4394 const int arg_size = ISEQ_BODY(iseq)->param.size;
4395 VALUE * const rsp = GET_SP() - calling->argc;
4396 int opt_pc = vm_callee_setup_block_arg(ec, calling, ci, iseq, rsp, is_lambda ? arg_setup_method : arg_setup_block);
4397
4398 SET_SP(rsp);
4399
4400 vm_push_frame(ec, iseq,
4401 VM_FRAME_MAGIC_BLOCK | (is_lambda ? VM_FRAME_FLAG_LAMBDA : 0),
4402 captured->self,
4403 VM_GUARDED_PREV_EP(captured->ep), 0,
4404 ISEQ_BODY(iseq)->iseq_encoded + opt_pc,
4405 rsp + arg_size,
4406 ISEQ_BODY(iseq)->local_table_size - arg_size, ISEQ_BODY(iseq)->stack_max);
4407
4408 return Qundef;
4409}
4410
4411static VALUE
4412vm_invoke_symbol_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
4413 struct rb_calling_info *calling, const struct rb_callinfo *ci,
4414 MAYBE_UNUSED(bool is_lambda), VALUE block_handler)
4415{
4416 if (calling->argc < 1) {
4417 rb_raise(rb_eArgError, "no receiver given");
4418 }
4419 else {
4420 VALUE symbol = VM_BH_TO_SYMBOL(block_handler);
4421 CALLER_SETUP_ARG(reg_cfp, calling, ci);
4422 calling->recv = TOPN(--calling->argc);
4423 return vm_call_symbol(ec, reg_cfp, calling, ci, symbol, 0);
4424 }
4425}
4426
4427static VALUE
4428vm_invoke_ifunc_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
4429 struct rb_calling_info *calling, const struct rb_callinfo *ci,
4430 MAYBE_UNUSED(bool is_lambda), VALUE block_handler)
4431{
4432 VALUE val;
4433 int argc;
4434 const struct rb_captured_block *captured = VM_BH_TO_IFUNC_BLOCK(block_handler);
4435 CALLER_SETUP_ARG(ec->cfp, calling, ci);
4436 CALLER_REMOVE_EMPTY_KW_SPLAT(ec->cfp, calling, ci);
4437 argc = calling->argc;
4438 val = vm_yield_with_cfunc(ec, captured, captured->self, argc, STACK_ADDR_FROM_TOP(argc), calling->kw_splat, calling->block_handler, NULL);
4439 POPN(argc); /* TODO: should put before C/yield? */
4440 return val;
4441}
4442
4443static VALUE
4444vm_proc_to_block_handler(VALUE procval)
4445{
4446 const struct rb_block *block = vm_proc_block(procval);
4447
4448 switch (vm_block_type(block)) {
4449 case block_type_iseq:
4450 return VM_BH_FROM_ISEQ_BLOCK(&block->as.captured);
4451 case block_type_ifunc:
4452 return VM_BH_FROM_IFUNC_BLOCK(&block->as.captured);
4453 case block_type_symbol:
4454 return VM_BH_FROM_SYMBOL(block->as.symbol);
4455 case block_type_proc:
4456 return VM_BH_FROM_PROC(block->as.proc);
4457 }
4458 VM_UNREACHABLE(vm_yield_with_proc);
4459 return Qundef;
4460}
4461
4462static VALUE
4463vm_invoke_proc_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
4464 struct rb_calling_info *calling, const struct rb_callinfo *ci,
4465 bool is_lambda, VALUE block_handler)
4466{
4467 while (vm_block_handler_type(block_handler) == block_handler_type_proc) {
4468 VALUE proc = VM_BH_TO_PROC(block_handler);
4469 is_lambda = block_proc_is_lambda(proc);
4470 block_handler = vm_proc_to_block_handler(proc);
4471 }
4472
4473 return vm_invoke_block(ec, reg_cfp, calling, ci, is_lambda, block_handler);
4474}
4475
4476static inline VALUE
4477vm_invoke_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
4478 struct rb_calling_info *calling, const struct rb_callinfo *ci,
4479 bool is_lambda, VALUE block_handler)
4480{
4481 VALUE (*func)(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
4482 struct rb_calling_info *calling, const struct rb_callinfo *ci,
4483 bool is_lambda, VALUE block_handler);
4484
4485 switch (vm_block_handler_type(block_handler)) {
4486 case block_handler_type_iseq: func = vm_invoke_iseq_block; break;
4487 case block_handler_type_ifunc: func = vm_invoke_ifunc_block; break;
4488 case block_handler_type_proc: func = vm_invoke_proc_block; break;
4489 case block_handler_type_symbol: func = vm_invoke_symbol_block; break;
4490 default: rb_bug("vm_invoke_block: unreachable");
4491 }
4492
4493 return func(ec, reg_cfp, calling, ci, is_lambda, block_handler);
4494}
4495
4496static VALUE
4497vm_make_proc_with_iseq(const rb_iseq_t *blockiseq)
4498{
4499 const rb_execution_context_t *ec = GET_EC();
4500 const rb_control_frame_t *cfp = rb_vm_get_ruby_level_next_cfp(ec, ec->cfp);
4501 struct rb_captured_block *captured;
4502
4503 if (cfp == 0) {
4504 rb_bug("vm_make_proc_with_iseq: unreachable");
4505 }
4506
4507 captured = VM_CFP_TO_CAPTURED_BLOCK(cfp);
4508 captured->code.iseq = blockiseq;
4509
4510 return rb_vm_make_proc(ec, captured, rb_cProc);
4511}
4512
4513static VALUE
4514vm_once_exec(VALUE iseq)
4515{
4516 VALUE proc = vm_make_proc_with_iseq((rb_iseq_t *)iseq);
4517 return rb_proc_call_with_block(proc, 0, 0, Qnil);
4518}
4519
4520static VALUE
4521vm_once_clear(VALUE data)
4522{
4523 union iseq_inline_storage_entry *is = (union iseq_inline_storage_entry *)data;
4524 is->once.running_thread = NULL;
4525 return Qnil;
4526}
4527
4528/* defined insn */
4529
4530static bool
4531check_respond_to_missing(VALUE obj, VALUE v)
4532{
4533 VALUE args[2];
4534 VALUE r;
4535
4536 args[0] = obj; args[1] = Qfalse;
4537 r = rb_check_funcall(v, idRespond_to_missing, 2, args);
4538 if (!UNDEF_P(r) && RTEST(r)) {
4539 return true;
4540 }
4541 else {
4542 return false;
4543 }
4544}
4545
4546static bool
4547vm_defined(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, rb_num_t op_type, VALUE obj, VALUE v)
4548{
4549 VALUE klass;
4550 enum defined_type type = (enum defined_type)op_type;
4551
4552 switch (type) {
4553 case DEFINED_IVAR:
4554 return rb_ivar_defined(GET_SELF(), SYM2ID(obj));
4555 break;
4556 case DEFINED_GVAR:
4557 return rb_gvar_defined(SYM2ID(obj));
4558 break;
4559 case DEFINED_CVAR: {
4560 const rb_cref_t *cref = vm_get_cref(GET_EP());
4561 klass = vm_get_cvar_base(cref, GET_CFP(), 0);
4562 return rb_cvar_defined(klass, SYM2ID(obj));
4563 break;
4564 }
4565 case DEFINED_CONST:
4566 case DEFINED_CONST_FROM: {
4567 bool allow_nil = type == DEFINED_CONST;
4568 klass = v;
4569 return vm_get_ev_const(ec, klass, SYM2ID(obj), allow_nil, true);
4570 break;
4571 }
4572 case DEFINED_FUNC:
4573 klass = CLASS_OF(v);
4574 return rb_ec_obj_respond_to(ec, v, SYM2ID(obj), TRUE);
4575 break;
4576 case DEFINED_METHOD:{
4577 VALUE klass = CLASS_OF(v);
4578 const rb_method_entry_t *me = rb_method_entry_with_refinements(klass, SYM2ID(obj), NULL);
4579
4580 if (me) {
4581 switch (METHOD_ENTRY_VISI(me)) {
4582 case METHOD_VISI_PRIVATE:
4583 break;
4584 case METHOD_VISI_PROTECTED:
4585 if (!rb_obj_is_kind_of(GET_SELF(), rb_class_real(me->defined_class))) {
4586 break;
4587 }
4588 case METHOD_VISI_PUBLIC:
4589 return true;
4590 break;
4591 default:
4592 rb_bug("vm_defined: unreachable: %u", (unsigned int)METHOD_ENTRY_VISI(me));
4593 }
4594 }
4595 else {
4596 return check_respond_to_missing(obj, v);
4597 }
4598 break;
4599 }
4600 case DEFINED_YIELD:
4601 if (GET_BLOCK_HANDLER() != VM_BLOCK_HANDLER_NONE) {
4602 return true;
4603 }
4604 break;
4605 case DEFINED_ZSUPER:
4606 {
4607 const rb_callable_method_entry_t *me = rb_vm_frame_method_entry(GET_CFP());
4608
4609 if (me) {
4610 VALUE klass = vm_search_normal_superclass(me->defined_class);
4611 ID id = me->def->original_id;
4612
4613 return rb_method_boundp(klass, id, 0);
4614 }
4615 }
4616 break;
4617 case DEFINED_REF:{
4618 return vm_getspecial(ec, GET_LEP(), Qfalse, FIX2INT(obj)) != Qnil;
4619 break;
4620 }
4621 default:
4622 rb_bug("unimplemented defined? type (VM)");
4623 break;
4624 }
4625
4626 return false;
4627}
4628
4629bool
4630rb_vm_defined(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, rb_num_t op_type, VALUE obj, VALUE v)
4631{
4632 return vm_defined(ec, reg_cfp, op_type, obj, v);
4633}
4634
4635static const VALUE *
4636vm_get_ep(const VALUE *const reg_ep, rb_num_t lv)
4637{
4638 rb_num_t i;
4639 const VALUE *ep = reg_ep;
4640 for (i = 0; i < lv; i++) {
4641 ep = GET_PREV_EP(ep);
4642 }
4643 return ep;
4644}
4645
4646static VALUE
4647vm_get_special_object(const VALUE *const reg_ep,
4648 enum vm_special_object_type type)
4649{
4650 switch (type) {
4651 case VM_SPECIAL_OBJECT_VMCORE:
4652 return rb_mRubyVMFrozenCore;
4653 case VM_SPECIAL_OBJECT_CBASE:
4654 return vm_get_cbase(reg_ep);
4655 case VM_SPECIAL_OBJECT_CONST_BASE:
4656 return vm_get_const_base(reg_ep);
4657 default:
4658 rb_bug("putspecialobject insn: unknown value_type %d", type);
4659 }
4660}
4661
4662static VALUE
4663vm_concat_array(VALUE ary1, VALUE ary2st)
4664{
4665 const VALUE ary2 = ary2st;
4666 VALUE tmp1 = rb_check_to_array(ary1);
4667 VALUE tmp2 = rb_check_to_array(ary2);
4668
4669 if (NIL_P(tmp1)) {
4670 tmp1 = rb_ary_new3(1, ary1);
4671 }
4672
4673 if (NIL_P(tmp2)) {
4674 tmp2 = rb_ary_new3(1, ary2);
4675 }
4676
4677 if (tmp1 == ary1) {
4678 tmp1 = rb_ary_dup(ary1);
4679 }
4680 return rb_ary_concat(tmp1, tmp2);
4681}
4682
4683// YJIT implementation is using the C function
4684// and needs to call a non-static function
4685VALUE
4686rb_vm_concat_array(VALUE ary1, VALUE ary2st)
4687{
4688 return vm_concat_array(ary1, ary2st);
4689}
4690
4691static VALUE
4692vm_splat_array(VALUE flag, VALUE ary)
4693{
4694 VALUE tmp = rb_check_to_array(ary);
4695 if (NIL_P(tmp)) {
4696 return rb_ary_new3(1, ary);
4697 }
4698 else if (RTEST(flag)) {
4699 return rb_ary_dup(tmp);
4700 }
4701 else {
4702 return tmp;
4703 }
4704}
4705
4706// YJIT implementation is using the C function
4707// and needs to call a non-static function
4708VALUE
4709rb_vm_splat_array(VALUE flag, VALUE ary)
4710{
4711 return vm_splat_array(flag, ary);
4712}
4713
4714static VALUE
4715vm_check_match(rb_execution_context_t *ec, VALUE target, VALUE pattern, rb_num_t flag)
4716{
4717 enum vm_check_match_type type = ((int)flag) & VM_CHECKMATCH_TYPE_MASK;
4718
4719 if (flag & VM_CHECKMATCH_ARRAY) {
4720 long i;
4721 const long n = RARRAY_LEN(pattern);
4722
4723 for (i = 0; i < n; i++) {
4724 VALUE v = RARRAY_AREF(pattern, i);
4725 VALUE c = check_match(ec, v, target, type);
4726
4727 if (RTEST(c)) {
4728 return c;
4729 }
4730 }
4731 return Qfalse;
4732 }
4733 else {
4734 return check_match(ec, pattern, target, type);
4735 }
4736}
4737
4738static VALUE
4739vm_check_keyword(lindex_t bits, lindex_t idx, const VALUE *ep)
4740{
4741 const VALUE kw_bits = *(ep - bits);
4742
4743 if (FIXNUM_P(kw_bits)) {
4744 unsigned int b = (unsigned int)FIX2ULONG(kw_bits);
4745 if ((idx < KW_SPECIFIED_BITS_MAX) && (b & (0x01 << idx)))
4746 return Qfalse;
4747 }
4748 else {
4749 VM_ASSERT(RB_TYPE_P(kw_bits, T_HASH));
4750 if (rb_hash_has_key(kw_bits, INT2FIX(idx))) return Qfalse;
4751 }
4752 return Qtrue;
4753}
4754
4755static void
4756vm_dtrace(rb_event_flag_t flag, rb_execution_context_t *ec)
4757{
4758 if (RUBY_DTRACE_METHOD_ENTRY_ENABLED() ||
4759 RUBY_DTRACE_METHOD_RETURN_ENABLED() ||
4760 RUBY_DTRACE_CMETHOD_ENTRY_ENABLED() ||
4761 RUBY_DTRACE_CMETHOD_RETURN_ENABLED()) {
4762
4763 switch (flag) {
4764 case RUBY_EVENT_CALL:
4765 RUBY_DTRACE_METHOD_ENTRY_HOOK(ec, 0, 0);
4766 return;
4767 case RUBY_EVENT_C_CALL:
4768 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, 0, 0);
4769 return;
4770 case RUBY_EVENT_RETURN:
4771 RUBY_DTRACE_METHOD_RETURN_HOOK(ec, 0, 0);
4772 return;
4774 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, 0, 0);
4775 return;
4776 }
4777 }
4778}
4779
4780static VALUE
4781vm_const_get_under(ID id, rb_num_t flags, VALUE cbase)
4782{
4783 if (!rb_const_defined_at(cbase, id)) {
4784 return 0;
4785 }
4786 else if (VM_DEFINECLASS_SCOPED_P(flags)) {
4787 return rb_public_const_get_at(cbase, id);
4788 }
4789 else {
4790 return rb_const_get_at(cbase, id);
4791 }
4792}
4793
4794static VALUE
4795vm_check_if_class(ID id, rb_num_t flags, VALUE super, VALUE klass)
4796{
4797 if (!RB_TYPE_P(klass, T_CLASS)) {
4798 return 0;
4799 }
4800 else if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags)) {
4801 VALUE tmp = rb_class_real(RCLASS_SUPER(klass));
4802
4803 if (tmp != super) {
4805 "superclass mismatch for class %"PRIsVALUE"",
4806 rb_id2str(id));
4807 }
4808 else {
4809 return klass;
4810 }
4811 }
4812 else {
4813 return klass;
4814 }
4815}
4816
4817static VALUE
4818vm_check_if_module(ID id, VALUE mod)
4819{
4820 if (!RB_TYPE_P(mod, T_MODULE)) {
4821 return 0;
4822 }
4823 else {
4824 return mod;
4825 }
4826}
4827
4828static VALUE
4829declare_under(ID id, VALUE cbase, VALUE c)
4830{
4831 rb_set_class_path_string(c, cbase, rb_id2str(id));
4832 rb_const_set(cbase, id, c);
4833 return c;
4834}
4835
4836static VALUE
4837vm_declare_class(ID id, rb_num_t flags, VALUE cbase, VALUE super)
4838{
4839 /* new class declaration */
4840 VALUE s = VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) ? super : rb_cObject;
4841 VALUE c = declare_under(id, cbase, rb_define_class_id(id, s));
4843 rb_class_inherited(s, c);
4844 return c;
4845}
4846
4847static VALUE
4848vm_declare_module(ID id, VALUE cbase)
4849{
4850 /* new module declaration */
4851 return declare_under(id, cbase, rb_module_new());
4852}
4853
4854NORETURN(static void unmatched_redefinition(const char *type, VALUE cbase, ID id, VALUE old));
4855static void
4856unmatched_redefinition(const char *type, VALUE cbase, ID id, VALUE old)
4857{
4858 VALUE name = rb_id2str(id);
4859 VALUE message = rb_sprintf("%"PRIsVALUE" is not a %s",
4860 name, type);
4861 VALUE location = rb_const_source_location_at(cbase, id);
4862 if (!NIL_P(location)) {
4863 rb_str_catf(message, "\n%"PRIsVALUE":%"PRIsVALUE":"
4864 " previous definition of %"PRIsVALUE" was here",
4865 rb_ary_entry(location, 0), rb_ary_entry(location, 1), name);
4866 }
4868}
4869
4870static VALUE
4871vm_define_class(ID id, rb_num_t flags, VALUE cbase, VALUE super)
4872{
4873 VALUE klass;
4874
4875 if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) && !RB_TYPE_P(super, T_CLASS)) {
4877 "superclass must be an instance of Class (given an instance of %"PRIsVALUE")",
4878 rb_obj_class(super));
4879 }
4880
4881 vm_check_if_namespace(cbase);
4882
4883 /* find klass */
4884 rb_autoload_load(cbase, id);
4885 if ((klass = vm_const_get_under(id, flags, cbase)) != 0) {
4886 if (!vm_check_if_class(id, flags, super, klass))
4887 unmatched_redefinition("class", cbase, id, klass);
4888 return klass;
4889 }
4890 else {
4891 return vm_declare_class(id, flags, cbase, super);
4892 }
4893}
4894
4895static VALUE
4896vm_define_module(ID id, rb_num_t flags, VALUE cbase)
4897{
4898 VALUE mod;
4899
4900 vm_check_if_namespace(cbase);
4901 if ((mod = vm_const_get_under(id, flags, cbase)) != 0) {
4902 if (!vm_check_if_module(id, mod))
4903 unmatched_redefinition("module", cbase, id, mod);
4904 return mod;
4905 }
4906 else {
4907 return vm_declare_module(id, cbase);
4908 }
4909}
4910
4911static VALUE
4912vm_find_or_create_class_by_id(ID id,
4913 rb_num_t flags,
4914 VALUE cbase,
4915 VALUE super)
4916{
4917 rb_vm_defineclass_type_t type = VM_DEFINECLASS_TYPE(flags);
4918
4919 switch (type) {
4920 case VM_DEFINECLASS_TYPE_CLASS:
4921 /* classdef returns class scope value */
4922 return vm_define_class(id, flags, cbase, super);
4923
4924 case VM_DEFINECLASS_TYPE_SINGLETON_CLASS:
4925 /* classdef returns class scope value */
4926 return rb_singleton_class(cbase);
4927
4928 case VM_DEFINECLASS_TYPE_MODULE:
4929 /* classdef returns class scope value */
4930 return vm_define_module(id, flags, cbase);
4931
4932 default:
4933 rb_bug("unknown defineclass type: %d", (int)type);
4934 }
4935}
4936
4937static rb_method_visibility_t
4938vm_scope_visibility_get(const rb_execution_context_t *ec)
4939{
4940 const rb_control_frame_t *cfp = rb_vm_get_ruby_level_next_cfp(ec, ec->cfp);
4941
4942 if (!vm_env_cref_by_cref(cfp->ep)) {
4943 return METHOD_VISI_PUBLIC;
4944 }
4945 else {
4946 return CREF_SCOPE_VISI(vm_ec_cref(ec))->method_visi;
4947 }
4948}
4949
4950static int
4951vm_scope_module_func_check(const rb_execution_context_t *ec)
4952{
4953 const rb_control_frame_t *cfp = rb_vm_get_ruby_level_next_cfp(ec, ec->cfp);
4954
4955 if (!vm_env_cref_by_cref(cfp->ep)) {
4956 return FALSE;
4957 }
4958 else {
4959 return CREF_SCOPE_VISI(vm_ec_cref(ec))->module_func;
4960 }
4961}
4962
4963static void
4964vm_define_method(const rb_execution_context_t *ec, VALUE obj, ID id, VALUE iseqval, int is_singleton)
4965{
4966 VALUE klass;
4967 rb_method_visibility_t visi;
4968 rb_cref_t *cref = vm_ec_cref(ec);
4969
4970 if (is_singleton) {
4971 klass = rb_singleton_class(obj); /* class and frozen checked in this API */
4972 visi = METHOD_VISI_PUBLIC;
4973 }
4974 else {
4975 klass = CREF_CLASS_FOR_DEFINITION(cref);
4976 visi = vm_scope_visibility_get(ec);
4977 }
4978
4979 if (NIL_P(klass)) {
4980 rb_raise(rb_eTypeError, "no class/module to add method");
4981 }
4982
4983 rb_add_method_iseq(klass, id, (const rb_iseq_t *)iseqval, cref, visi);
4984 // Set max_iv_count on klasses based on number of ivar sets that are in the initialize method
4985 if (id == rb_intern("initialize") && klass != rb_cObject && RB_TYPE_P(klass, T_CLASS) && (rb_get_alloc_func(klass) == rb_class_allocate_instance)) {
4986
4987 RCLASS_EXT(klass)->max_iv_count = rb_estimate_iv_count(klass, (const rb_iseq_t *)iseqval);
4988 }
4989
4990 if (!is_singleton && vm_scope_module_func_check(ec)) {
4991 klass = rb_singleton_class(klass);
4992 rb_add_method_iseq(klass, id, (const rb_iseq_t *)iseqval, cref, METHOD_VISI_PUBLIC);
4993 }
4994}
4995
4996static VALUE
4997vm_invokeblock_i(struct rb_execution_context_struct *ec,
4998 struct rb_control_frame_struct *reg_cfp,
4999 struct rb_calling_info *calling)
5000{
5001 const struct rb_callinfo *ci = calling->ci;
5002 VALUE block_handler = VM_CF_BLOCK_HANDLER(GET_CFP());
5003
5004 if (block_handler == VM_BLOCK_HANDLER_NONE) {
5005 rb_vm_localjump_error("no block given (yield)", Qnil, 0);
5006 }
5007 else {
5008 return vm_invoke_block(ec, GET_CFP(), calling, ci, false, block_handler);
5009 }
5010}
5011
5012#ifdef MJIT_HEADER
5013static const struct rb_callcache *
5014vm_search_method_wrap(const struct rb_control_frame_struct *reg_cfp, struct rb_call_data *cd, VALUE recv)
5015{
5016 return vm_search_method((VALUE)reg_cfp->iseq, cd, recv);
5017}
5018
5019static const struct rb_callcache *
5020vm_search_invokeblock(const struct rb_control_frame_struct *reg_cfp, struct rb_call_data *cd, VALUE recv)
5021{
5022 static const struct rb_callcache cc = {
5023 .flags = T_IMEMO | (imemo_callcache << FL_USHIFT) | VM_CALLCACHE_UNMARKABLE,
5024 .klass = 0,
5025 .cme_ = 0,
5026 .call_ = vm_invokeblock_i,
5027 .aux_ = {0},
5028 };
5029 return &cc;
5030}
5031
5032# define mexp_search_method vm_search_method_wrap
5033# define mexp_search_super vm_search_super_method
5034# define mexp_search_invokeblock vm_search_invokeblock
5035#else
5036enum method_explorer_type {
5037 mexp_search_method,
5038 mexp_search_invokeblock,
5039 mexp_search_super,
5040};
5041#endif
5042
5043static
5044#ifndef MJIT_HEADER
5045inline
5046#endif
5047VALUE
5048vm_sendish(
5049 struct rb_execution_context_struct *ec,
5050 struct rb_control_frame_struct *reg_cfp,
5051 struct rb_call_data *cd,
5052 VALUE block_handler,
5053#ifdef MJIT_HEADER
5054 const struct rb_callcache *(*method_explorer)(const struct rb_control_frame_struct *cfp, struct rb_call_data *cd, VALUE recv)
5055#else
5056 enum method_explorer_type method_explorer
5057#endif
5058) {
5059 VALUE val = Qundef;
5060 const struct rb_callinfo *ci = cd->ci;
5061 const struct rb_callcache *cc;
5062 int argc = vm_ci_argc(ci);
5063 VALUE recv = TOPN(argc);
5064 struct rb_calling_info calling = {
5065 .block_handler = block_handler,
5066 .kw_splat = IS_ARGS_KW_SPLAT(ci) > 0,
5067 .recv = recv,
5068 .argc = argc,
5069 .ci = ci,
5070 };
5071
5072// The enum-based branch and inlining are faster in VM, but function pointers without inlining are faster in JIT.
5073#ifdef MJIT_HEADER
5074 calling.cc = cc = method_explorer(GET_CFP(), cd, recv);
5075 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
5076#else
5077 switch (method_explorer) {
5078 case mexp_search_method:
5079 calling.cc = cc = vm_search_method_fastpath((VALUE)reg_cfp->iseq, cd, CLASS_OF(recv));
5080 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
5081 break;
5082 case mexp_search_super:
5083 calling.cc = cc = vm_search_super_method(reg_cfp, cd, recv);
5084 calling.ci = cd->ci; // TODO: does it safe?
5085 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
5086 break;
5087 case mexp_search_invokeblock:
5088 val = vm_invokeblock_i(ec, GET_CFP(), &calling);
5089 break;
5090 }
5091#endif
5092
5093 if (!UNDEF_P(val)) {
5094 return val; /* CFUNC normal return */
5095 }
5096 else {
5097 RESTORE_REGS(); /* CFP pushed in cc->call() */
5098 }
5099
5100#ifdef MJIT_HEADER
5101 /* When calling ISeq which may catch an exception from JIT-ed
5102 code, we should not call jit_exec directly to prevent the
5103 caller frame from being canceled. That's because the caller
5104 frame may have stack values in the local variables and the
5105 cancelling the caller frame will purge them. But directly
5106 calling jit_exec is faster... */
5107 if (ISEQ_BODY(GET_ISEQ())->catch_except_p) {
5108 VM_ENV_FLAGS_SET(GET_EP(), VM_FRAME_FLAG_FINISH);
5109 return vm_exec(ec, true);
5110 }
5111 else if (UNDEF_P(val = jit_exec(ec))) {
5112 VM_ENV_FLAGS_SET(GET_EP(), VM_FRAME_FLAG_FINISH);
5113 return vm_exec(ec, false);
5114 }
5115 else {
5116 return val;
5117 }
5118#else
5119 /* When calling from VM, longjmp in the callee won't purge any
5120 JIT-ed caller frames. So it's safe to directly call jit_exec. */
5121 return jit_exec(ec);
5122#endif
5123}
5124
5125/* object.c */
5126VALUE rb_nil_to_s(VALUE);
5127VALUE rb_true_to_s(VALUE);
5128VALUE rb_false_to_s(VALUE);
5129/* numeric.c */
5130VALUE rb_int_to_s(int argc, VALUE *argv, VALUE x);
5131VALUE rb_fix_to_s(VALUE);
5132/* variable.c */
5133VALUE rb_mod_to_s(VALUE);
5135
5136static VALUE
5137vm_objtostring(const rb_iseq_t *iseq, VALUE recv, CALL_DATA cd)
5138{
5139 int type = TYPE(recv);
5140 if (type == T_STRING) {
5141 return recv;
5142 }
5143
5144 const struct rb_callcache *cc = vm_search_method((VALUE)iseq, cd, recv);
5145
5146 switch (type) {
5147 case T_SYMBOL:
5148 if (check_cfunc(vm_cc_cme(cc), rb_sym_to_s)) {
5149 // rb_sym_to_s() allocates a mutable string, but since we are only
5150 // going to use this string for interpolation, it's fine to use the
5151 // frozen string.
5152 return rb_sym2str(recv);
5153 }
5154 break;
5155 case T_MODULE:
5156 case T_CLASS:
5157 if (check_cfunc(vm_cc_cme(cc), rb_mod_to_s)) {
5158 // rb_mod_to_s() allocates a mutable string, but since we are only
5159 // going to use this string for interpolation, it's fine to use the
5160 // frozen string.
5161 VALUE val = rb_mod_name(recv);
5162 if (NIL_P(val)) {
5163 val = rb_mod_to_s(recv);
5164 }
5165 return val;
5166 }
5167 break;
5168 case T_NIL:
5169 if (check_cfunc(vm_cc_cme(cc), rb_nil_to_s)) {
5170 return rb_nil_to_s(recv);
5171 }
5172 break;
5173 case T_TRUE:
5174 if (check_cfunc(vm_cc_cme(cc), rb_true_to_s)) {
5175 return rb_true_to_s(recv);
5176 }
5177 break;
5178 case T_FALSE:
5179 if (check_cfunc(vm_cc_cme(cc), rb_false_to_s)) {
5180 return rb_false_to_s(recv);
5181 }
5182 break;
5183 case T_FIXNUM:
5184 if (check_cfunc(vm_cc_cme(cc), rb_int_to_s)) {
5185 return rb_fix_to_s(recv);
5186 }
5187 break;
5188 }
5189 return Qundef;
5190}
5191
5192static VALUE
5193vm_opt_str_freeze(VALUE str, int bop, ID id)
5194{
5195 if (BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
5196 return str;
5197 }
5198 else {
5199 return Qundef;
5200 }
5201}
5202
5203/* this macro is mandatory to use OPTIMIZED_CMP. What a design! */
5204#define id_cmp idCmp
5205
5206static VALUE
5207vm_opt_newarray_max(rb_execution_context_t *ec, rb_num_t num, const VALUE *ptr)
5208{
5209 if (BASIC_OP_UNREDEFINED_P(BOP_MAX, ARRAY_REDEFINED_OP_FLAG)) {
5210 if (num == 0) {
5211 return Qnil;
5212 }
5213 else {
5214 VALUE result = *ptr;
5215 rb_snum_t i = num - 1;
5216 while (i-- > 0) {
5217 const VALUE v = *++ptr;
5218 if (OPTIMIZED_CMP(v, result) > 0) {
5219 result = v;
5220 }
5221 }
5222 return result;
5223 }
5224 }
5225 else {
5226 return rb_vm_call_with_refinements(ec, rb_ary_new4(num, ptr), idMax, 0, NULL, RB_NO_KEYWORDS);
5227 }
5228}
5229
5230VALUE
5231rb_vm_opt_newarray_max(rb_execution_context_t *ec, rb_num_t num, const VALUE *ptr)
5232{
5233 return vm_opt_newarray_max(ec, num, ptr);
5234}
5235
5236static VALUE
5237vm_opt_newarray_min(rb_execution_context_t *ec, rb_num_t num, const VALUE *ptr)
5238{
5239 if (BASIC_OP_UNREDEFINED_P(BOP_MIN, ARRAY_REDEFINED_OP_FLAG)) {
5240 if (num == 0) {
5241 return Qnil;
5242 }
5243 else {
5244 VALUE result = *ptr;
5245 rb_snum_t i = num - 1;
5246 while (i-- > 0) {
5247 const VALUE v = *++ptr;
5248 if (OPTIMIZED_CMP(v, result) < 0) {
5249 result = v;
5250 }
5251 }
5252 return result;
5253 }
5254 }
5255 else {
5256 return rb_vm_call_with_refinements(ec, rb_ary_new4(num, ptr), idMin, 0, NULL, RB_NO_KEYWORDS);
5257 }
5258}
5259
5260VALUE
5261rb_vm_opt_newarray_min(rb_execution_context_t *ec, rb_num_t num, const VALUE *ptr)
5262{
5263 return vm_opt_newarray_min(ec, num, ptr);
5264}
5265
5266#undef id_cmp
5267
5268#define IMEMO_CONST_CACHE_SHAREABLE IMEMO_FL_USER0
5269
5270static void
5271vm_track_constant_cache(ID id, void *ic)
5272{
5273 struct rb_id_table *const_cache = GET_VM()->constant_cache;
5274 VALUE lookup_result;
5275 st_table *ics;
5276
5277 if (rb_id_table_lookup(const_cache, id, &lookup_result)) {
5278 ics = (st_table *)lookup_result;
5279 }
5280 else {
5281 ics = st_init_numtable();
5282 rb_id_table_insert(const_cache, id, (VALUE)ics);
5283 }
5284
5285 st_insert(ics, (st_data_t) ic, (st_data_t) Qtrue);
5286}
5287
5288static void
5289vm_ic_track_const_chain(rb_control_frame_t *cfp, IC ic, const ID *segments)
5290{
5291 RB_VM_LOCK_ENTER();
5292
5293 for (int i = 0; segments[i]; i++) {
5294 ID id = segments[i];
5295 if (id == idNULL) continue;
5296 vm_track_constant_cache(id, ic);
5297 }
5298
5299 RB_VM_LOCK_LEAVE();
5300}
5301
5302// For MJIT inlining
5303static inline bool
5304vm_inlined_ic_hit_p(VALUE flags, VALUE value, const rb_cref_t *ic_cref, const VALUE *reg_ep)
5305{
5306 if ((flags & IMEMO_CONST_CACHE_SHAREABLE) || rb_ractor_main_p()) {
5307 VM_ASSERT(ractor_incidental_shareable_p(flags & IMEMO_CONST_CACHE_SHAREABLE, value));
5308
5309 return (ic_cref == NULL || // no need to check CREF
5310 ic_cref == vm_get_cref(reg_ep));
5311 }
5312 return false;
5313}
5314
5315static bool
5316vm_ic_hit_p(const struct iseq_inline_constant_cache_entry *ice, const VALUE *reg_ep)
5317{
5318 VM_ASSERT(IMEMO_TYPE_P(ice, imemo_constcache));
5319 return vm_inlined_ic_hit_p(ice->flags, ice->value, ice->ic_cref, reg_ep);
5320}
5321
5322// YJIT needs this function to never allocate and never raise
5323bool
5324rb_vm_ic_hit_p(IC ic, const VALUE *reg_ep)
5325{
5326 return ic->entry && vm_ic_hit_p(ic->entry, reg_ep);
5327}
5328
5329static void
5330vm_ic_update(const rb_iseq_t *iseq, IC ic, VALUE val, const VALUE *reg_ep, const VALUE *pc)
5331{
5332 if (ruby_vm_const_missing_count > 0) {
5333 ruby_vm_const_missing_count = 0;
5334 ic->entry = NULL;
5335 return;
5336 }
5337
5338 struct iseq_inline_constant_cache_entry *ice = (struct iseq_inline_constant_cache_entry *)rb_imemo_new(imemo_constcache, 0, 0, 0, 0);
5339 RB_OBJ_WRITE(ice, &ice->value, val);
5340 ice->ic_cref = vm_get_const_key_cref(reg_ep);
5341 if (rb_ractor_shareable_p(val)) ice->flags |= IMEMO_CONST_CACHE_SHAREABLE;
5342 RB_OBJ_WRITE(iseq, &ic->entry, ice);
5343
5344 RUBY_ASSERT(pc >= ISEQ_BODY(iseq)->iseq_encoded);
5345 unsigned pos = (unsigned)(pc - ISEQ_BODY(iseq)->iseq_encoded);
5346 rb_yjit_constant_ic_update(iseq, ic, pos);
5347 rb_mjit_constant_ic_update(iseq, ic, pos);
5348}
5349
5350static VALUE
5351vm_once_dispatch(rb_execution_context_t *ec, ISEQ iseq, ISE is)
5352{
5353 rb_thread_t *th = rb_ec_thread_ptr(ec);
5354 rb_thread_t *const RUNNING_THREAD_ONCE_DONE = (rb_thread_t *)(0x1);
5355
5356 again:
5357 if (is->once.running_thread == RUNNING_THREAD_ONCE_DONE) {
5358 return is->once.value;
5359 }
5360 else if (is->once.running_thread == NULL) {
5361 VALUE val;
5362 is->once.running_thread = th;
5363 val = rb_ensure(vm_once_exec, (VALUE)iseq, vm_once_clear, (VALUE)is);
5364 RB_OBJ_WRITE(ec->cfp->iseq, &is->once.value, val);
5365 /* is->once.running_thread is cleared by vm_once_clear() */
5366 is->once.running_thread = RUNNING_THREAD_ONCE_DONE; /* success */
5367 return val;
5368 }
5369 else if (is->once.running_thread == th) {
5370 /* recursive once */
5371 return vm_once_exec((VALUE)iseq);
5372 }
5373 else {
5374 /* waiting for finish */
5375 RUBY_VM_CHECK_INTS(ec);
5377 goto again;
5378 }
5379}
5380
5381static OFFSET
5382vm_case_dispatch(CDHASH hash, OFFSET else_offset, VALUE key)
5383{
5384 switch (OBJ_BUILTIN_TYPE(key)) {
5385 case -1:
5386 case T_FLOAT:
5387 case T_SYMBOL:
5388 case T_BIGNUM:
5389 case T_STRING:
5390 if (BASIC_OP_UNREDEFINED_P(BOP_EQQ,
5391 SYMBOL_REDEFINED_OP_FLAG |
5392 INTEGER_REDEFINED_OP_FLAG |
5393 FLOAT_REDEFINED_OP_FLAG |
5394 NIL_REDEFINED_OP_FLAG |
5395 TRUE_REDEFINED_OP_FLAG |
5396 FALSE_REDEFINED_OP_FLAG |
5397 STRING_REDEFINED_OP_FLAG)) {
5398 st_data_t val;
5399 if (RB_FLOAT_TYPE_P(key)) {
5400 double kval = RFLOAT_VALUE(key);
5401 if (!isinf(kval) && modf(kval, &kval) == 0.0) {
5402 key = FIXABLE(kval) ? LONG2FIX((long)kval) : rb_dbl2big(kval);
5403 }
5404 }
5405 if (rb_hash_stlike_lookup(hash, key, &val)) {
5406 return FIX2LONG((VALUE)val);
5407 }
5408 else {
5409 return else_offset;
5410 }
5411 }
5412 }
5413 return 0;
5414}
5415
5416NORETURN(static void
5417 vm_stack_consistency_error(const rb_execution_context_t *ec,
5418 const rb_control_frame_t *,
5419 const VALUE *));
5420static void
5421vm_stack_consistency_error(const rb_execution_context_t *ec,
5422 const rb_control_frame_t *cfp,
5423 const VALUE *bp)
5424{
5425 const ptrdiff_t nsp = VM_SP_CNT(ec, cfp->sp);
5426 const ptrdiff_t nbp = VM_SP_CNT(ec, bp);
5427 static const char stack_consistency_error[] =
5428 "Stack consistency error (sp: %"PRIdPTRDIFF", bp: %"PRIdPTRDIFF")";
5429#if defined RUBY_DEVEL
5430 VALUE mesg = rb_sprintf(stack_consistency_error, nsp, nbp);
5431 rb_str_cat_cstr(mesg, "\n");
5432 rb_str_append(mesg, rb_iseq_disasm(cfp->iseq));
5434#else
5435 rb_bug(stack_consistency_error, nsp, nbp);
5436#endif
5437}
5438
5439static VALUE
5440vm_opt_plus(VALUE recv, VALUE obj)
5441{
5442 if (FIXNUM_2_P(recv, obj) &&
5443 BASIC_OP_UNREDEFINED_P(BOP_PLUS, INTEGER_REDEFINED_OP_FLAG)) {
5444 return rb_fix_plus_fix(recv, obj);
5445 }
5446 else if (FLONUM_2_P(recv, obj) &&
5447 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
5448 return DBL2NUM(RFLOAT_VALUE(recv) + RFLOAT_VALUE(obj));
5449 }
5450 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
5451 return Qundef;
5452 }
5453 else if (RBASIC_CLASS(recv) == rb_cFloat &&
5454 RBASIC_CLASS(obj) == rb_cFloat &&
5455 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
5456 return DBL2NUM(RFLOAT_VALUE(recv) + RFLOAT_VALUE(obj));
5457 }
5458 else if (RBASIC_CLASS(recv) == rb_cString &&
5459 RBASIC_CLASS(obj) == rb_cString &&
5460 BASIC_OP_UNREDEFINED_P(BOP_PLUS, STRING_REDEFINED_OP_FLAG)) {
5461 return rb_str_opt_plus(recv, obj);
5462 }
5463 else if (RBASIC_CLASS(recv) == rb_cArray &&
5464 RBASIC_CLASS(obj) == rb_cArray &&
5465 BASIC_OP_UNREDEFINED_P(BOP_PLUS, ARRAY_REDEFINED_OP_FLAG)) {
5466 return rb_ary_plus(recv, obj);
5467 }
5468 else {
5469 return Qundef;
5470 }
5471}
5472
5473static VALUE
5474vm_opt_minus(VALUE recv, VALUE obj)
5475{
5476 if (FIXNUM_2_P(recv, obj) &&
5477 BASIC_OP_UNREDEFINED_P(BOP_MINUS, INTEGER_REDEFINED_OP_FLAG)) {
5478 return rb_fix_minus_fix(recv, obj);
5479 }
5480 else if (FLONUM_2_P(recv, obj) &&
5481 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
5482 return DBL2NUM(RFLOAT_VALUE(recv) - RFLOAT_VALUE(obj));
5483 }
5484 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
5485 return Qundef;
5486 }
5487 else if (RBASIC_CLASS(recv) == rb_cFloat &&
5488 RBASIC_CLASS(obj) == rb_cFloat &&
5489 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
5490 return DBL2NUM(RFLOAT_VALUE(recv) - RFLOAT_VALUE(obj));
5491 }
5492 else {
5493 return Qundef;
5494 }
5495}
5496
5497static VALUE
5498vm_opt_mult(VALUE recv, VALUE obj)
5499{
5500 if (FIXNUM_2_P(recv, obj) &&
5501 BASIC_OP_UNREDEFINED_P(BOP_MULT, INTEGER_REDEFINED_OP_FLAG)) {
5502 return rb_fix_mul_fix(recv, obj);
5503 }
5504 else if (FLONUM_2_P(recv, obj) &&
5505 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
5506 return DBL2NUM(RFLOAT_VALUE(recv) * RFLOAT_VALUE(obj));
5507 }
5508 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
5509 return Qundef;
5510 }
5511 else if (RBASIC_CLASS(recv) == rb_cFloat &&
5512 RBASIC_CLASS(obj) == rb_cFloat &&
5513 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
5514 return DBL2NUM(RFLOAT_VALUE(recv) * RFLOAT_VALUE(obj));
5515 }
5516 else {
5517 return Qundef;
5518 }
5519}
5520
5521static VALUE
5522vm_opt_div(VALUE recv, VALUE obj)
5523{
5524 if (FIXNUM_2_P(recv, obj) &&
5525 BASIC_OP_UNREDEFINED_P(BOP_DIV, INTEGER_REDEFINED_OP_FLAG)) {
5526 return (FIX2LONG(obj) == 0) ? Qundef : rb_fix_div_fix(recv, obj);
5527 }
5528 else if (FLONUM_2_P(recv, obj) &&
5529 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
5530 return rb_flo_div_flo(recv, obj);
5531 }
5532 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
5533 return Qundef;
5534 }
5535 else if (RBASIC_CLASS(recv) == rb_cFloat &&
5536 RBASIC_CLASS(obj) == rb_cFloat &&
5537 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
5538 return rb_flo_div_flo(recv, obj);
5539 }
5540 else {
5541 return Qundef;
5542 }
5543}
5544
5545static VALUE
5546vm_opt_mod(VALUE recv, VALUE obj)
5547{
5548 if (FIXNUM_2_P(recv, obj) &&
5549 BASIC_OP_UNREDEFINED_P(BOP_MOD, INTEGER_REDEFINED_OP_FLAG)) {
5550 return (FIX2LONG(obj) == 0) ? Qundef : rb_fix_mod_fix(recv, obj);
5551 }
5552 else if (FLONUM_2_P(recv, obj) &&
5553 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
5554 return DBL2NUM(ruby_float_mod(RFLOAT_VALUE(recv), RFLOAT_VALUE(obj)));
5555 }
5556 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
5557 return Qundef;
5558 }
5559 else if (RBASIC_CLASS(recv) == rb_cFloat &&
5560 RBASIC_CLASS(obj) == rb_cFloat &&
5561 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
5562 return DBL2NUM(ruby_float_mod(RFLOAT_VALUE(recv), RFLOAT_VALUE(obj)));
5563 }
5564 else {
5565 return Qundef;
5566 }
5567}
5568
5569static VALUE
5570vm_opt_neq(const rb_iseq_t *iseq, CALL_DATA cd, CALL_DATA cd_eq, VALUE recv, VALUE obj)
5571{
5572 if (vm_method_cfunc_is(iseq, cd, recv, rb_obj_not_equal)) {
5573 VALUE val = opt_equality(iseq, recv, obj, cd_eq);
5574
5575 if (!UNDEF_P(val)) {
5576 return RBOOL(!RTEST(val));
5577 }
5578 }
5579
5580 return Qundef;
5581}
5582
5583static VALUE
5584vm_opt_lt(VALUE recv, VALUE obj)
5585{
5586 if (FIXNUM_2_P(recv, obj) &&
5587 BASIC_OP_UNREDEFINED_P(BOP_LT, INTEGER_REDEFINED_OP_FLAG)) {
5588 return RBOOL((SIGNED_VALUE)recv < (SIGNED_VALUE)obj);
5589 }
5590 else if (FLONUM_2_P(recv, obj) &&
5591 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
5592 return RBOOL(RFLOAT_VALUE(recv) < RFLOAT_VALUE(obj));
5593 }
5594 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
5595 return Qundef;
5596 }
5597 else if (RBASIC_CLASS(recv) == rb_cFloat &&
5598 RBASIC_CLASS(obj) == rb_cFloat &&
5599 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
5600 CHECK_CMP_NAN(RFLOAT_VALUE(recv), RFLOAT_VALUE(obj));
5601 return RBOOL(RFLOAT_VALUE(recv) < RFLOAT_VALUE(obj));
5602 }
5603 else {
5604 return Qundef;
5605 }
5606}
5607
5608static VALUE
5609vm_opt_le(VALUE recv, VALUE obj)
5610{
5611 if (FIXNUM_2_P(recv, obj) &&
5612 BASIC_OP_UNREDEFINED_P(BOP_LE, INTEGER_REDEFINED_OP_FLAG)) {
5613 return RBOOL((SIGNED_VALUE)recv <= (SIGNED_VALUE)obj);
5614 }
5615 else if (FLONUM_2_P(recv, obj) &&
5616 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
5617 return RBOOL(RFLOAT_VALUE(recv) <= RFLOAT_VALUE(obj));
5618 }
5619 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
5620 return Qundef;
5621 }
5622 else if (RBASIC_CLASS(recv) == rb_cFloat &&
5623 RBASIC_CLASS(obj) == rb_cFloat &&
5624 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
5625 CHECK_CMP_NAN(RFLOAT_VALUE(recv), RFLOAT_VALUE(obj));
5626 return RBOOL(RFLOAT_VALUE(recv) <= RFLOAT_VALUE(obj));
5627 }
5628 else {
5629 return Qundef;
5630 }
5631}
5632
5633static VALUE
5634vm_opt_gt(VALUE recv, VALUE obj)
5635{
5636 if (FIXNUM_2_P(recv, obj) &&
5637 BASIC_OP_UNREDEFINED_P(BOP_GT, INTEGER_REDEFINED_OP_FLAG)) {
5638 return RBOOL((SIGNED_VALUE)recv > (SIGNED_VALUE)obj);
5639 }
5640 else if (FLONUM_2_P(recv, obj) &&
5641 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
5642 return RBOOL(RFLOAT_VALUE(recv) > RFLOAT_VALUE(obj));
5643 }
5644 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
5645 return Qundef;
5646 }
5647 else if (RBASIC_CLASS(recv) == rb_cFloat &&
5648 RBASIC_CLASS(obj) == rb_cFloat &&
5649 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
5650 CHECK_CMP_NAN(RFLOAT_VALUE(recv), RFLOAT_VALUE(obj));
5651 return RBOOL(RFLOAT_VALUE(recv) > RFLOAT_VALUE(obj));
5652 }
5653 else {
5654 return Qundef;
5655 }
5656}
5657
5658static VALUE
5659vm_opt_ge(VALUE recv, VALUE obj)
5660{
5661 if (FIXNUM_2_P(recv, obj) &&
5662 BASIC_OP_UNREDEFINED_P(BOP_GE, INTEGER_REDEFINED_OP_FLAG)) {
5663 return RBOOL((SIGNED_VALUE)recv >= (SIGNED_VALUE)obj);
5664 }
5665 else if (FLONUM_2_P(recv, obj) &&
5666 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
5667 return RBOOL(RFLOAT_VALUE(recv) >= RFLOAT_VALUE(obj));
5668 }
5669 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
5670 return Qundef;
5671 }
5672 else if (RBASIC_CLASS(recv) == rb_cFloat &&
5673 RBASIC_CLASS(obj) == rb_cFloat &&
5674 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
5675 CHECK_CMP_NAN(RFLOAT_VALUE(recv), RFLOAT_VALUE(obj));
5676 return RBOOL(RFLOAT_VALUE(recv) >= RFLOAT_VALUE(obj));
5677 }
5678 else {
5679 return Qundef;
5680 }
5681}
5682
5683
5684static VALUE
5685vm_opt_ltlt(VALUE recv, VALUE obj)
5686{
5687 if (SPECIAL_CONST_P(recv)) {
5688 return Qundef;
5689 }
5690 else if (RBASIC_CLASS(recv) == rb_cString &&
5691 BASIC_OP_UNREDEFINED_P(BOP_LTLT, STRING_REDEFINED_OP_FLAG)) {
5692 if (LIKELY(RB_TYPE_P(obj, T_STRING))) {
5693 return rb_str_buf_append(recv, obj);
5694 }
5695 else {
5696 return rb_str_concat(recv, obj);
5697 }
5698 }
5699 else if (RBASIC_CLASS(recv) == rb_cArray &&
5700 BASIC_OP_UNREDEFINED_P(BOP_LTLT, ARRAY_REDEFINED_OP_FLAG)) {
5701 return rb_ary_push(recv, obj);
5702 }
5703 else {
5704 return Qundef;
5705 }
5706}
5707
5708static VALUE
5709vm_opt_and(VALUE recv, VALUE obj)
5710{
5711 // If recv and obj are both fixnums, then the bottom tag bit
5712 // will be 1 on both. 1 & 1 == 1, so the result value will also
5713 // be a fixnum. If either side is *not* a fixnum, then the tag bit
5714 // will be 0, and we return Qundef.
5715 VALUE ret = ((SIGNED_VALUE) recv) & ((SIGNED_VALUE) obj);
5716
5717 if (FIXNUM_P(ret) &&
5718 BASIC_OP_UNREDEFINED_P(BOP_AND, INTEGER_REDEFINED_OP_FLAG)) {
5719 return ret;
5720 }
5721 else {
5722 return Qundef;
5723 }
5724}
5725
5726static VALUE
5727vm_opt_or(VALUE recv, VALUE obj)
5728{
5729 if (FIXNUM_2_P(recv, obj) &&
5730 BASIC_OP_UNREDEFINED_P(BOP_OR, INTEGER_REDEFINED_OP_FLAG)) {
5731 return recv | obj;
5732 }
5733 else {
5734 return Qundef;
5735 }
5736}
5737
5738static VALUE
5739vm_opt_aref(VALUE recv, VALUE obj)
5740{
5741 if (SPECIAL_CONST_P(recv)) {
5742 if (FIXNUM_2_P(recv, obj) &&
5743 BASIC_OP_UNREDEFINED_P(BOP_AREF, INTEGER_REDEFINED_OP_FLAG)) {
5744 return rb_fix_aref(recv, obj);
5745 }
5746 return Qundef;
5747 }
5748 else if (RBASIC_CLASS(recv) == rb_cArray &&
5749 BASIC_OP_UNREDEFINED_P(BOP_AREF, ARRAY_REDEFINED_OP_FLAG)) {
5750 if (FIXNUM_P(obj)) {
5751 return rb_ary_entry_internal(recv, FIX2LONG(obj));
5752 }
5753 else {
5754 return rb_ary_aref1(recv, obj);
5755 }
5756 }
5757 else if (RBASIC_CLASS(recv) == rb_cHash &&
5758 BASIC_OP_UNREDEFINED_P(BOP_AREF, HASH_REDEFINED_OP_FLAG)) {
5759 return rb_hash_aref(recv, obj);
5760 }
5761 else {
5762 return Qundef;
5763 }
5764}
5765
5766static VALUE
5767vm_opt_aset(VALUE recv, VALUE obj, VALUE set)
5768{
5769 if (SPECIAL_CONST_P(recv)) {
5770 return Qundef;
5771 }
5772 else if (RBASIC_CLASS(recv) == rb_cArray &&
5773 BASIC_OP_UNREDEFINED_P(BOP_ASET, ARRAY_REDEFINED_OP_FLAG) &&
5774 FIXNUM_P(obj)) {
5775 rb_ary_store(recv, FIX2LONG(obj), set);
5776 return set;
5777 }
5778 else if (RBASIC_CLASS(recv) == rb_cHash &&
5779 BASIC_OP_UNREDEFINED_P(BOP_ASET, HASH_REDEFINED_OP_FLAG)) {
5780 rb_hash_aset(recv, obj, set);
5781 return set;
5782 }
5783 else {
5784 return Qundef;
5785 }
5786}
5787
5788static VALUE
5789vm_opt_aref_with(VALUE recv, VALUE key)
5790{
5791 if (!SPECIAL_CONST_P(recv) && RBASIC_CLASS(recv) == rb_cHash &&
5792 BASIC_OP_UNREDEFINED_P(BOP_AREF, HASH_REDEFINED_OP_FLAG) &&
5793 rb_hash_compare_by_id_p(recv) == Qfalse &&
5794 !FL_TEST(recv, RHASH_PROC_DEFAULT)) {
5795 return rb_hash_aref(recv, key);
5796 }
5797 else {
5798 return Qundef;
5799 }
5800}
5801
5802static VALUE
5803vm_opt_aset_with(VALUE recv, VALUE key, VALUE val)
5804{
5805 if (!SPECIAL_CONST_P(recv) && RBASIC_CLASS(recv) == rb_cHash &&
5806 BASIC_OP_UNREDEFINED_P(BOP_ASET, HASH_REDEFINED_OP_FLAG) &&
5807 rb_hash_compare_by_id_p(recv) == Qfalse) {
5808 return rb_hash_aset(recv, key, val);
5809 }
5810 else {
5811 return Qundef;
5812 }
5813}
5814
5815static VALUE
5816vm_opt_length(VALUE recv, int bop)
5817{
5818 if (SPECIAL_CONST_P(recv)) {
5819 return Qundef;
5820 }
5821 else if (RBASIC_CLASS(recv) == rb_cString &&
5822 BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
5823 if (bop == BOP_EMPTY_P) {
5824 return LONG2NUM(RSTRING_LEN(recv));
5825 }
5826 else {
5827 return rb_str_length(recv);
5828 }
5829 }
5830 else if (RBASIC_CLASS(recv) == rb_cArray &&
5831 BASIC_OP_UNREDEFINED_P(bop, ARRAY_REDEFINED_OP_FLAG)) {
5832 return LONG2NUM(RARRAY_LEN(recv));
5833 }
5834 else if (RBASIC_CLASS(recv) == rb_cHash &&
5835 BASIC_OP_UNREDEFINED_P(bop, HASH_REDEFINED_OP_FLAG)) {
5836 return INT2FIX(RHASH_SIZE(recv));
5837 }
5838 else {
5839 return Qundef;
5840 }
5841}
5842
5843static VALUE
5844vm_opt_empty_p(VALUE recv)
5845{
5846 switch (vm_opt_length(recv, BOP_EMPTY_P)) {
5847 case Qundef: return Qundef;
5848 case INT2FIX(0): return Qtrue;
5849 default: return Qfalse;
5850 }
5851}
5852
5853VALUE rb_false(VALUE obj);
5854
5855static VALUE
5856vm_opt_nil_p(const rb_iseq_t *iseq, CALL_DATA cd, VALUE recv)
5857{
5858 if (NIL_P(recv) &&
5859 BASIC_OP_UNREDEFINED_P(BOP_NIL_P, NIL_REDEFINED_OP_FLAG)) {
5860 return Qtrue;
5861 }
5862 else if (vm_method_cfunc_is(iseq, cd, recv, rb_false)) {
5863 return Qfalse;
5864 }
5865 else {
5866 return Qundef;
5867 }
5868}
5869
5870static VALUE
5871fix_succ(VALUE x)
5872{
5873 switch (x) {
5874 case ~0UL:
5875 /* 0xFFFF_FFFF == INT2FIX(-1)
5876 * `-1.succ` is of course 0. */
5877 return INT2FIX(0);
5878 case RSHIFT(~0UL, 1):
5879 /* 0x7FFF_FFFF == LONG2FIX(0x3FFF_FFFF)
5880 * 0x3FFF_FFFF + 1 == 0x4000_0000, which is a Bignum. */
5881 return rb_uint2big(1UL << (SIZEOF_LONG * CHAR_BIT - 2));
5882 default:
5883 /* LONG2FIX(FIX2LONG(x)+FIX2LONG(y))
5884 * == ((lx*2+1)/2 + (ly*2+1)/2)*2+1
5885 * == lx*2 + ly*2 + 1
5886 * == (lx*2+1) + (ly*2+1) - 1
5887 * == x + y - 1
5888 *
5889 * Here, if we put y := INT2FIX(1):
5890 *
5891 * == x + INT2FIX(1) - 1
5892 * == x + 2 .
5893 */
5894 return x + 2;
5895 }
5896}
5897
5898static VALUE
5899vm_opt_succ(VALUE recv)
5900{
5901 if (FIXNUM_P(recv) &&
5902 BASIC_OP_UNREDEFINED_P(BOP_SUCC, INTEGER_REDEFINED_OP_FLAG)) {
5903 return fix_succ(recv);
5904 }
5905 else if (SPECIAL_CONST_P(recv)) {
5906 return Qundef;
5907 }
5908 else if (RBASIC_CLASS(recv) == rb_cString &&
5909 BASIC_OP_UNREDEFINED_P(BOP_SUCC, STRING_REDEFINED_OP_FLAG)) {
5910 return rb_str_succ(recv);
5911 }
5912 else {
5913 return Qundef;
5914 }
5915}
5916
5917static VALUE
5918vm_opt_not(const rb_iseq_t *iseq, CALL_DATA cd, VALUE recv)
5919{
5920 if (vm_method_cfunc_is(iseq, cd, recv, rb_obj_not)) {
5921 return RBOOL(!RTEST(recv));
5922 }
5923 else {
5924 return Qundef;
5925 }
5926}
5927
5928static VALUE
5929vm_opt_regexpmatch2(VALUE recv, VALUE obj)
5930{
5931 if (SPECIAL_CONST_P(recv)) {
5932 return Qundef;
5933 }
5934 else if (RBASIC_CLASS(recv) == rb_cString &&
5935 CLASS_OF(obj) == rb_cRegexp &&
5936 BASIC_OP_UNREDEFINED_P(BOP_MATCH, STRING_REDEFINED_OP_FLAG)) {
5937 return rb_reg_match(obj, recv);
5938 }
5939 else if (RBASIC_CLASS(recv) == rb_cRegexp &&
5940 BASIC_OP_UNREDEFINED_P(BOP_MATCH, REGEXP_REDEFINED_OP_FLAG)) {
5941 return rb_reg_match(recv, obj);
5942 }
5943 else {
5944 return Qundef;
5945 }
5946}
5947
5948rb_event_flag_t rb_iseq_event_flags(const rb_iseq_t *iseq, size_t pos);
5949
5950NOINLINE(static void vm_trace(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp));
5951
5952static inline void
5953vm_trace_hook(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, const VALUE *pc,
5954 rb_event_flag_t pc_events, rb_event_flag_t target_event,
5955 rb_hook_list_t *global_hooks, rb_hook_list_t *const *local_hooks_ptr, VALUE val)
5956{
5957 rb_event_flag_t event = pc_events & target_event;
5958 VALUE self = GET_SELF();
5959
5960 VM_ASSERT(rb_popcount64((uint64_t)event) == 1);
5961
5962 if (event & global_hooks->events) {
5963 /* increment PC because source line is calculated with PC-1 */
5964 reg_cfp->pc++;
5965 vm_dtrace(event, ec);
5966 rb_exec_event_hook_orig(ec, global_hooks, event, self, 0, 0, 0 , val, 0);
5967 reg_cfp->pc--;
5968 }
5969
5970 // Load here since global hook above can add and free local hooks
5971 rb_hook_list_t *local_hooks = *local_hooks_ptr;
5972 if (local_hooks != NULL) {
5973 if (event & local_hooks->events) {
5974 /* increment PC because source line is calculated with PC-1 */
5975 reg_cfp->pc++;
5976 rb_exec_event_hook_orig(ec, local_hooks, event, self, 0, 0, 0 , val, 0);
5977 reg_cfp->pc--;
5978 }
5979 }
5980}
5981
5982// Return true if given cc has cfunc which is NOT handled by opt_send_without_block.
5983bool
5984rb_vm_opt_cfunc_p(CALL_CACHE cc, int insn)
5985{
5986 switch (insn) {
5987 case BIN(opt_eq):
5988 return check_cfunc(vm_cc_cme(cc), rb_obj_equal);
5989 case BIN(opt_nil_p):
5990 return check_cfunc(vm_cc_cme(cc), rb_false);
5991 case BIN(opt_not):
5992 return check_cfunc(vm_cc_cme(cc), rb_obj_not);
5993 default:
5994 return false;
5995 }
5996}
5997
5998#define VM_TRACE_HOOK(target_event, val) do { \
5999 if ((pc_events & (target_event)) & enabled_flags) { \
6000 vm_trace_hook(ec, reg_cfp, pc, pc_events, (target_event), global_hooks, local_hooks_ptr, (val)); \
6001 } \
6002} while (0)
6003
6004static void
6005vm_trace(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp)
6006{
6007 const VALUE *pc = reg_cfp->pc;
6008 rb_event_flag_t enabled_flags = ruby_vm_event_flags & ISEQ_TRACE_EVENTS;
6009 rb_event_flag_t global_events = enabled_flags;
6010
6011 if (enabled_flags == 0 && ruby_vm_event_local_num == 0) {
6012 return;
6013 }
6014 else {
6015 const rb_iseq_t *iseq = reg_cfp->iseq;
6016 VALUE iseq_val = (VALUE)iseq;
6017 size_t pos = pc - ISEQ_BODY(iseq)->iseq_encoded;
6018 rb_event_flag_t pc_events = rb_iseq_event_flags(iseq, pos);
6019 rb_hook_list_t *local_hooks = iseq->aux.exec.local_hooks;
6020 rb_hook_list_t *const *local_hooks_ptr = &iseq->aux.exec.local_hooks;
6021 rb_event_flag_t iseq_local_events = local_hooks != NULL ? local_hooks->events : 0;
6022 rb_hook_list_t *bmethod_local_hooks = NULL;
6023 rb_hook_list_t **bmethod_local_hooks_ptr = NULL;
6024 rb_event_flag_t bmethod_local_events = 0;
6025 const bool bmethod_frame = VM_FRAME_BMETHOD_P(reg_cfp);
6026 enabled_flags |= iseq_local_events;
6027
6028 VM_ASSERT((iseq_local_events & ~ISEQ_TRACE_EVENTS) == 0);
6029
6030 if (bmethod_frame) {
6031 const rb_callable_method_entry_t *me = rb_vm_frame_method_entry(reg_cfp);
6032 VM_ASSERT(me->def->type == VM_METHOD_TYPE_BMETHOD);
6033 bmethod_local_hooks = me->def->body.bmethod.hooks;
6034 bmethod_local_hooks_ptr = &me->def->body.bmethod.hooks;
6035 if (bmethod_local_hooks) {
6036 bmethod_local_events = bmethod_local_hooks->events;
6037 }
6038 }
6039
6040
6041 if ((pc_events & enabled_flags) == 0 && !bmethod_frame) {
6042#if 0
6043 /* disable trace */
6044 /* TODO: incomplete */
6045 rb_iseq_trace_set(iseq, vm_event_flags & ISEQ_TRACE_EVENTS);
6046#else
6047 /* do not disable trace because of performance problem
6048 * (re-enable overhead)
6049 */
6050#endif
6051 return;
6052 }
6053 else if (ec->trace_arg != NULL) {
6054 /* already tracing */
6055 return;
6056 }
6057 else {
6058 rb_hook_list_t *global_hooks = rb_ec_ractor_hooks(ec);
6059 /* Note, not considering iseq local events here since the same
6060 * iseq could be used in multiple bmethods. */
6061 rb_event_flag_t bmethod_events = global_events | bmethod_local_events;
6062
6063 if (0) {
6064 ruby_debug_printf("vm_trace>>%4d (%4x) - %s:%d %s\n",
6065 (int)pos,
6066 (int)pc_events,
6067 RSTRING_PTR(rb_iseq_path(iseq)),
6068 (int)rb_iseq_line_no(iseq, pos),
6069 RSTRING_PTR(rb_iseq_label(iseq)));
6070 }
6071 VM_ASSERT(reg_cfp->pc == pc);
6072 VM_ASSERT(pc_events != 0);
6073
6074 /* check traces */
6075 if ((pc_events & RUBY_EVENT_B_CALL) && bmethod_frame && (bmethod_events & RUBY_EVENT_CALL)) {
6076 /* b_call instruction running as a method. Fire call event. */
6077 vm_trace_hook(ec, reg_cfp, pc, RUBY_EVENT_CALL, RUBY_EVENT_CALL, global_hooks, bmethod_local_hooks_ptr, Qundef);
6078 }
6080 VM_TRACE_HOOK(RUBY_EVENT_LINE, Qundef);
6081 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_LINE, Qundef);
6082 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_BRANCH, Qundef);
6083 VM_TRACE_HOOK(RUBY_EVENT_END | RUBY_EVENT_RETURN | RUBY_EVENT_B_RETURN, TOPN(0));
6084 if ((pc_events & RUBY_EVENT_B_RETURN) && bmethod_frame && (bmethod_events & RUBY_EVENT_RETURN)) {
6085 /* b_return instruction running as a method. Fire return event. */
6086 vm_trace_hook(ec, reg_cfp, pc, RUBY_EVENT_RETURN, RUBY_EVENT_RETURN, global_hooks, bmethod_local_hooks_ptr, TOPN(0));
6087 }
6088
6089 // Pin the iseq since `local_hooks_ptr` points inside the iseq's slot on the GC heap.
6090 // We need the pointer to stay valid in case compaction happens in a trace hook.
6091 //
6092 // Similar treatment is unnecessary for `bmethod_local_hooks_ptr` since
6093 // storage for `rb_method_definition_t` is not on the GC heap.
6094 RB_GC_GUARD(iseq_val);
6095 }
6096 }
6097}
6098#undef VM_TRACE_HOOK
6099
6100#if VM_CHECK_MODE > 0
6101NORETURN( NOINLINE( COLDFUNC
6102void rb_vm_canary_is_found_dead(enum ruby_vminsn_type i, VALUE c)));
6103
6104void
6105Init_vm_stack_canary(void)
6106{
6107 /* This has to be called _after_ our PRNG is properly set up. */
6108 int n = ruby_fill_random_bytes(&vm_stack_canary, sizeof vm_stack_canary, false);
6109 vm_stack_canary |= 0x01; // valid VALUE (Fixnum)
6110
6111 vm_stack_canary_was_born = true;
6112 VM_ASSERT(n == 0);
6113}
6114
6115#ifndef MJIT_HEADER
6116MJIT_FUNC_EXPORTED void
6117rb_vm_canary_is_found_dead(enum ruby_vminsn_type i, VALUE c)
6118{
6119 /* Because a method has already been called, why not call
6120 * another one. */
6121 const char *insn = rb_insns_name(i);
6122 VALUE inspection = rb_inspect(c);
6123 const char *str = StringValueCStr(inspection);
6124
6125 rb_bug("dead canary found at %s: %s", insn, str);
6126}
6127#endif
6128
6129#else
6130void Init_vm_stack_canary(void) { /* nothing to do */ }
6131#endif
6132
6133
6134/* a part of the following code is generated by this ruby script:
6135
613616.times{|i|
6137 typedef_args = (0...i).map{|j| "VALUE v#{j+1}"}.join(", ")
6138 typedef_args.prepend(", ") if i != 0
6139 call_args = (0...i).map{|j| "argv[#{j}]"}.join(", ")
6140 call_args.prepend(", ") if i != 0
6141 puts %Q{
6142static VALUE
6143builtin_invoker#{i}(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6144{
6145 typedef VALUE (*rb_invoke_funcptr#{i}_t)(rb_execution_context_t *ec, VALUE self#{typedef_args});
6146 return (*(rb_invoke_funcptr#{i}_t)funcptr)(ec, self#{call_args});
6147}}
6148}
6149
6150puts
6151puts "static VALUE (* const cfunc_invokers[])(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr) = {"
615216.times{|i|
6153 puts " builtin_invoker#{i},"
6154}
6155puts "};"
6156*/
6157
6158static VALUE
6159builtin_invoker0(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6160{
6161 typedef VALUE (*rb_invoke_funcptr0_t)(rb_execution_context_t *ec, VALUE self);
6162 return (*(rb_invoke_funcptr0_t)funcptr)(ec, self);
6163}
6164
6165static VALUE
6166builtin_invoker1(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6167{
6168 typedef VALUE (*rb_invoke_funcptr1_t)(rb_execution_context_t *ec, VALUE self, VALUE v1);
6169 return (*(rb_invoke_funcptr1_t)funcptr)(ec, self, argv[0]);
6170}
6171
6172static VALUE
6173builtin_invoker2(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6174{
6175 typedef VALUE (*rb_invoke_funcptr2_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2);
6176 return (*(rb_invoke_funcptr2_t)funcptr)(ec, self, argv[0], argv[1]);
6177}
6178
6179static VALUE
6180builtin_invoker3(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6181{
6182 typedef VALUE (*rb_invoke_funcptr3_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3);
6183 return (*(rb_invoke_funcptr3_t)funcptr)(ec, self, argv[0], argv[1], argv[2]);
6184}
6185
6186static VALUE
6187builtin_invoker4(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6188{
6189 typedef VALUE (*rb_invoke_funcptr4_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4);
6190 return (*(rb_invoke_funcptr4_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3]);
6191}
6192
6193static VALUE
6194builtin_invoker5(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6195{
6196 typedef VALUE (*rb_invoke_funcptr5_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5);
6197 return (*(rb_invoke_funcptr5_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4]);
6198}
6199
6200static VALUE
6201builtin_invoker6(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6202{
6203 typedef VALUE (*rb_invoke_funcptr6_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6);
6204 return (*(rb_invoke_funcptr6_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
6205}
6206
6207static VALUE
6208builtin_invoker7(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6209{
6210 typedef VALUE (*rb_invoke_funcptr7_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7);
6211 return (*(rb_invoke_funcptr7_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
6212}
6213
6214static VALUE
6215builtin_invoker8(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6216{
6217 typedef VALUE (*rb_invoke_funcptr8_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8);
6218 return (*(rb_invoke_funcptr8_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
6219}
6220
6221static VALUE
6222builtin_invoker9(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6223{
6224 typedef VALUE (*rb_invoke_funcptr9_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8, VALUE v9);
6225 return (*(rb_invoke_funcptr9_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
6226}
6227
6228static VALUE
6229builtin_invoker10(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6230{
6231 typedef VALUE (*rb_invoke_funcptr10_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8, VALUE v9, VALUE v10);
6232 return (*(rb_invoke_funcptr10_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
6233}
6234
6235static VALUE
6236builtin_invoker11(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6237{
6238 typedef VALUE (*rb_invoke_funcptr11_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8, VALUE v9, VALUE v10, VALUE v11);
6239 return (*(rb_invoke_funcptr11_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
6240}
6241
6242static VALUE
6243builtin_invoker12(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6244{
6245 typedef VALUE (*rb_invoke_funcptr12_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8, VALUE v9, VALUE v10, VALUE v11, VALUE v12);
6246 return (*(rb_invoke_funcptr12_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
6247}
6248
6249static VALUE
6250builtin_invoker13(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6251{
6252 typedef VALUE (*rb_invoke_funcptr13_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8, VALUE v9, VALUE v10, VALUE v11, VALUE v12, VALUE v13);
6253 return (*(rb_invoke_funcptr13_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
6254}
6255
6256static VALUE
6257builtin_invoker14(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6258{
6259 typedef VALUE (*rb_invoke_funcptr14_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8, VALUE v9, VALUE v10, VALUE v11, VALUE v12, VALUE v13, VALUE v14);
6260 return (*(rb_invoke_funcptr14_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
6261}
6262
6263static VALUE
6264builtin_invoker15(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6265{
6266 typedef VALUE (*rb_invoke_funcptr15_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8, VALUE v9, VALUE v10, VALUE v11, VALUE v12, VALUE v13, VALUE v14, VALUE v15);
6267 return (*(rb_invoke_funcptr15_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
6268}
6269
6270typedef VALUE (*builtin_invoker)(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr);
6271
6272static builtin_invoker
6273lookup_builtin_invoker(int argc)
6274{
6275 static const builtin_invoker invokers[] = {
6276 builtin_invoker0,
6277 builtin_invoker1,
6278 builtin_invoker2,
6279 builtin_invoker3,
6280 builtin_invoker4,
6281 builtin_invoker5,
6282 builtin_invoker6,
6283 builtin_invoker7,
6284 builtin_invoker8,
6285 builtin_invoker9,
6286 builtin_invoker10,
6287 builtin_invoker11,
6288 builtin_invoker12,
6289 builtin_invoker13,
6290 builtin_invoker14,
6291 builtin_invoker15,
6292 };
6293
6294 return invokers[argc];
6295}
6296
6297static inline VALUE
6298invoke_bf(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, const struct rb_builtin_function* bf, const VALUE *argv)
6299{
6300 const bool canary_p = ISEQ_BODY(reg_cfp->iseq)->builtin_inline_p; // Verify an assumption of `Primitive.attr! 'inline'`
6301 SETUP_CANARY(canary_p);
6302 VALUE ret = (*lookup_builtin_invoker(bf->argc))(ec, reg_cfp->self, argv, (rb_insn_func_t)bf->func_ptr);
6303 CHECK_CANARY(canary_p, BIN(invokebuiltin));
6304 return ret;
6305}
6306
6307static VALUE
6308vm_invoke_builtin(rb_execution_context_t *ec, rb_control_frame_t *cfp, const struct rb_builtin_function* bf, const VALUE *argv)
6309{
6310 return invoke_bf(ec, cfp, bf, argv);
6311}
6312
6313static VALUE
6314vm_invoke_builtin_delegate(rb_execution_context_t *ec, rb_control_frame_t *cfp, const struct rb_builtin_function *bf, unsigned int start_index)
6315{
6316 if (0) { // debug print
6317 fputs("vm_invoke_builtin_delegate: passing -> ", stderr);
6318 for (int i=0; i<bf->argc; i++) {
6319 ruby_debug_printf(":%s ", rb_id2name(ISEQ_BODY(cfp->iseq)->local_table[i+start_index]));
6320 }
6321 ruby_debug_printf("\n" "%s %s(%d):%p\n", RUBY_FUNCTION_NAME_STRING, bf->name, bf->argc, bf->func_ptr);
6322 }
6323
6324 if (bf->argc == 0) {
6325 return invoke_bf(ec, cfp, bf, NULL);
6326 }
6327 else {
6328 const VALUE *argv = cfp->ep - ISEQ_BODY(cfp->iseq)->local_table_size - VM_ENV_DATA_SIZE + 1 + start_index;
6329 return invoke_bf(ec, cfp, bf, argv);
6330 }
6331}
6332
6333// for __builtin_inline!()
6334
6335VALUE
6336rb_vm_lvar_exposed(rb_execution_context_t *ec, int index)
6337{
6338 const rb_control_frame_t *cfp = ec->cfp;
6339 return cfp->ep[index];
6340}
#define RUBY_ASSERT(expr)
Asserts that the given expression is truthy if and only if RUBY_DEBUG is truthy.
Definition: assert.h:177
#define RUBY_EVENT_END
Encountered an end of a class clause.
Definition: event.h:36
#define RUBY_EVENT_C_CALL
A method, written in C, is called.
Definition: event.h:39
#define RUBY_EVENT_B_RETURN
Encountered a next statement.
Definition: event.h:52
#define RUBY_EVENT_CLASS
Encountered a new class.
Definition: event.h:35
#define RUBY_EVENT_LINE
Encountered a new line.
Definition: event.h:34
#define RUBY_EVENT_RETURN
Encountered a return statement.
Definition: event.h:38
#define RUBY_EVENT_C_RETURN
Return from a method, written in C.
Definition: event.h:40
#define RUBY_EVENT_B_CALL
Encountered an yield statement.
Definition: event.h:51
uint32_t rb_event_flag_t
Represents event(s).
Definition: event.h:103
#define RUBY_EVENT_CALL
A method, written in Ruby, is called.
Definition: event.h:37
VALUE rb_singleton_class(VALUE obj)
Finds or creates the singleton class of the passed object.
Definition: class.c:2201
VALUE rb_module_new(void)
Creates a new, anonymous module.
Definition: class.c:979
VALUE rb_class_inherited(VALUE super, VALUE klass)
Calls Class::inherited.
Definition: class.c:879
VALUE rb_define_class_id(ID id, VALUE super)
This is a very badly designed API that creates an anonymous class.
Definition: class.c:858
#define TYPE(_)
Old name of rb_type.
Definition: value_type.h:107
#define FL_SINGLETON
Old name of RUBY_FL_SINGLETON.
Definition: fl_type.h:58
#define FL_EXIVAR
Old name of RUBY_FL_EXIVAR.
Definition: fl_type.h:67
#define REALLOC_N
Old name of RB_REALLOC_N.
Definition: memory.h:397
#define ALLOC
Old name of RB_ALLOC.
Definition: memory.h:394
#define RFLOAT_VALUE
Old name of rb_float_value.
Definition: double.h:28
#define T_STRING
Old name of RUBY_T_STRING.
Definition: value_type.h:78
#define Qundef
Old name of RUBY_Qundef.
#define INT2FIX
Old name of RB_INT2FIX.
Definition: long.h:48
#define T_NIL
Old name of RUBY_T_NIL.
Definition: value_type.h:72
#define T_FLOAT
Old name of RUBY_T_FLOAT.
Definition: value_type.h:64
#define T_IMEMO
Old name of RUBY_T_IMEMO.
Definition: value_type.h:67
#define ID2SYM
Old name of RB_ID2SYM.
Definition: symbol.h:44
#define T_BIGNUM
Old name of RUBY_T_BIGNUM.
Definition: value_type.h:57
#define SPECIAL_CONST_P
Old name of RB_SPECIAL_CONST_P.
#define T_STRUCT
Old name of RUBY_T_STRUCT.
Definition: value_type.h:79
#define T_FIXNUM
Old name of RUBY_T_FIXNUM.
Definition: value_type.h:63
#define SYM2ID
Old name of RB_SYM2ID.
Definition: symbol.h:45
#define CLASS_OF
Old name of rb_class_of.
Definition: globals.h:203
#define rb_ary_new4
Old name of rb_ary_new_from_values.
Definition: array.h:653
#define FIXABLE
Old name of RB_FIXABLE.
Definition: fixnum.h:25
#define LONG2FIX
Old name of RB_INT2FIX.
Definition: long.h:49
#define FIX2INT
Old name of RB_FIX2INT.
Definition: int.h:41
#define T_MODULE
Old name of RUBY_T_MODULE.
Definition: value_type.h:70
#define STATIC_SYM_P
Old name of RB_STATIC_SYM_P.
#define ASSUME
Old name of RBIMPL_ASSUME.
Definition: assume.h:27
#define FIX2ULONG
Old name of RB_FIX2ULONG.
Definition: long.h:47
#define T_TRUE
Old name of RUBY_T_TRUE.
Definition: value_type.h:81
#define T_ICLASS
Old name of RUBY_T_ICLASS.
Definition: value_type.h:66
#define T_HASH
Old name of RUBY_T_HASH.
Definition: value_type.h:65
#define ALLOC_N
Old name of RB_ALLOC_N.
Definition: memory.h:393
#define FL_TEST_RAW
Old name of RB_FL_TEST_RAW.
Definition: fl_type.h:140
#define rb_ary_new3
Old name of rb_ary_new_from_args.
Definition: array.h:652
#define LONG2NUM
Old name of RB_LONG2NUM.
Definition: long.h:50
#define rb_exc_new3
Old name of rb_exc_new_str.
Definition: error.h:38
#define T_FALSE
Old name of RUBY_T_FALSE.
Definition: value_type.h:61
#define Qtrue
Old name of RUBY_Qtrue.
#define Qnil
Old name of RUBY_Qnil.
#define Qfalse
Old name of RUBY_Qfalse.
#define FIX2LONG
Old name of RB_FIX2LONG.
Definition: long.h:46
#define T_ARRAY
Old name of RUBY_T_ARRAY.
Definition: value_type.h:56
#define T_OBJECT
Old name of RUBY_T_OBJECT.
Definition: value_type.h:75
#define NIL_P
Old name of RB_NIL_P.
#define T_SYMBOL
Old name of RUBY_T_SYMBOL.
Definition: value_type.h:80
#define DBL2NUM
Old name of rb_float_new.
Definition: double.h:29
#define T_CLASS
Old name of RUBY_T_CLASS.
Definition: value_type.h:58
#define BUILTIN_TYPE
Old name of RB_BUILTIN_TYPE.
Definition: value_type.h:85
#define FL_TEST
Old name of RB_FL_TEST.
Definition: fl_type.h:139
#define FIXNUM_P
Old name of RB_FIXNUM_P.
#define FL_USHIFT
Old name of RUBY_FL_USHIFT.
Definition: fl_type.h:70
#define FL_SET_RAW
Old name of RB_FL_SET_RAW.
Definition: fl_type.h:138
#define SYMBOL_P
Old name of RB_SYMBOL_P.
Definition: value_type.h:88
void rb_notimplement(void)
Definition: error.c:3191
void rb_raise(VALUE exc, const char *fmt,...)
Exception entry point.
Definition: error.c:3148
void rb_exc_raise(VALUE mesg)
Raises an exception in the current thread.
Definition: eval.c:684
void rb_bug(const char *fmt,...)
Interpreter panic switch.
Definition: error.c:794
VALUE rb_eTypeError
TypeError exception.
Definition: error.c:1091
VALUE rb_eFatal
fatal exception.
Definition: error.c:1087
VALUE rb_eNoMethodError
NoMethodError exception.
Definition: error.c:1099
void rb_exc_fatal(VALUE mesg)
Raises a fatal error in the current thread.
Definition: eval.c:697
VALUE rb_eRuntimeError
RuntimeError exception.
Definition: error.c:1089
void rb_warn(const char *fmt,...)
Identical to rb_warning(), except it reports always regardless of runtime -W flag.
Definition: error.c:411
void rb_error_frozen_object(VALUE frozen_obj)
Identical to rb_error_frozen(), except it takes arbitrary Ruby object instead of C's string.
Definition: error.c:3496
VALUE rb_exc_new_str(VALUE etype, VALUE str)
Identical to rb_exc_new_cstr(), except it takes a Ruby's string instead of C's.
Definition: error.c:1142
VALUE rb_eArgError
ArgumentError exception.
Definition: error.c:1092
VALUE rb_ensure(VALUE(*b_proc)(VALUE), VALUE data1, VALUE(*e_proc)(VALUE), VALUE data2)
An equivalent to ensure clause.
Definition: eval.c:993
VALUE rb_cClass
Class class.
Definition: object.c:54
VALUE rb_cArray
Array class.
Definition: array.c:40
VALUE rb_cObject
Documented in include/ruby/internal/globals.h.
VALUE rb_obj_alloc(VALUE klass)
Allocates an instance of the given class.
Definition: object.c:1939
VALUE rb_cRegexp
Regexp class.
Definition: re.c:2544
VALUE rb_obj_frozen_p(VALUE obj)
Just calls RB_OBJ_FROZEN() inside.
Definition: object.c:1194
VALUE rb_cHash
Hash class.
Definition: hash.c:94
VALUE rb_obj_class(VALUE obj)
Queries the class of an object.
Definition: object.c:190
VALUE rb_inspect(VALUE obj)
Generates a human-readable textual representation of the given object.
Definition: object.c:600
VALUE rb_cBasicObject
BasicObject class.
Definition: object.c:50
VALUE rb_cModule
Module class.
Definition: object.c:53
VALUE rb_class_real(VALUE klass)
Finds a "real" class.
Definition: object.c:180
VALUE rb_obj_is_kind_of(VALUE obj, VALUE klass)
Queries if the given object is an instance (of possibly descendants) of the given class.
Definition: object.c:787
VALUE rb_cFloat
Float class.
Definition: numeric.c:191
VALUE rb_cProc
Proc class.
Definition: proc.c:52
VALUE rb_cString
String class.
Definition: string.c:79
#define RB_OBJ_WRITTEN(old, oldv, young)
Identical to RB_OBJ_WRITE(), except it doesn't write any values, but only a WB declaration.
Definition: rgengc.h:232
#define RB_OBJ_WRITE(old, slot, young)
Declaration of a "back" pointer.
Definition: rgengc.h:220
#define UNLIMITED_ARGUMENTS
This macro is used in conjunction with rb_check_arity().
Definition: error.h:35
#define rb_check_frozen
Just another name of rb_check_frozen.
Definition: error.h:264
#define rb_check_frozen_internal(obj)
Definition: error.h:247
VALUE rb_proc_call_with_block(VALUE recv, int argc, const VALUE *argv, VALUE proc)
Identical to rb_proc_call(), except you can additionally pass another proc object,...
Definition: proc.c:1027
VALUE rb_reg_last_match(VALUE md)
This just returns the argument, stringified.
Definition: re.c:1886
VALUE rb_reg_match(VALUE re, VALUE str)
This is the match operator.
Definition: re.c:3590
VALUE rb_reg_nth_match(int n, VALUE md)
Queries the nth captured substring.
Definition: re.c:1861
VALUE rb_reg_match_post(VALUE md)
The portion of the original string after the given match.
Definition: re.c:1943
VALUE rb_reg_match_pre(VALUE md)
The portion of the original string before the given match.
Definition: re.c:1910
VALUE rb_reg_match_last(VALUE md)
The portion of the original string that captured at the very last.
Definition: re.c:1960
VALUE rb_str_append(VALUE dst, VALUE src)
Identical to rb_str_buf_append(), except it converts the right hand side before concatenating.
Definition: string.c:3323
VALUE rb_sym_to_s(VALUE sym)
This is an rb_sym2str() + rb_str_dup() combo.
Definition: string.c:11532
VALUE rb_str_succ(VALUE orig)
Searches for the "successor" of a string.
Definition: string.c:4793
VALUE rb_str_buf_append(VALUE dst, VALUE src)
Identical to rb_str_cat_cstr(), except it takes Ruby's string instead of C's.
Definition: string.c:3291
VALUE rb_str_concat(VALUE dst, VALUE src)
Identical to rb_str_append(), except it also accepts an integer as a codepoint.
Definition: string.c:3423
#define rb_str_cat_cstr(buf, str)
Identical to rb_str_cat(), except it assumes the passed pointer is a pointer to a C string.
Definition: string.h:1656
VALUE rb_str_length(VALUE)
Identical to rb_str_strlen(), except it returns the value in rb_cInteger.
Definition: string.c:2163
VALUE rb_str_intern(VALUE str)
Identical to rb_to_symbol(), except it assumes the receiver being an instance of RString.
Definition: symbol.c:844
void rb_thread_schedule(void)
Tries to switch to another thread.
Definition: thread.c:1467
VALUE rb_const_get(VALUE space, ID name)
Identical to rb_const_defined(), except it returns the actual defined value.
Definition: variable.c:2883
VALUE rb_attr_get(VALUE obj, ID name)
Identical to rb_ivar_get()
Definition: variable.c:1223
VALUE rb_ivar_set(VALUE obj, ID name, VALUE val)
Identical to rb_iv_set(), except it accepts the name as an ID instead of a C string.
Definition: variable.c:1593
void rb_cvar_set(VALUE klass, ID name, VALUE val)
Assigns a value to a class variable.
Definition: variable.c:3664
VALUE rb_cvar_find(VALUE klass, ID name, VALUE *front)
Identical to rb_cvar_get(), except it takes additional "front" pointer.
Definition: variable.c:3718
VALUE rb_ivar_get(VALUE obj, ID name)
Identical to rb_iv_get(), except it accepts the name as an ID instead of a C string.
Definition: variable.c:1215
void rb_const_set(VALUE space, ID name, VALUE val)
Names a constant.
Definition: variable.c:3333
VALUE rb_autoload_load(VALUE space, ID name)
Kicks the autoload procedure as if it was "touched".
Definition: variable.c:2718
VALUE rb_mod_name(VALUE mod)
Queries the name of a module.
Definition: variable.c:134
VALUE rb_const_get_at(VALUE space, ID name)
Identical to rb_const_defined_at(), except it returns the actual defined value.
Definition: variable.c:2889
void rb_set_class_path_string(VALUE klass, VALUE space, VALUE name)
Identical to rb_set_class_path(), except it accepts the name as Ruby's string instead of C's.
Definition: variable.c:228
VALUE rb_ivar_defined(VALUE obj, ID name)
Queries if the instance variable is defined at the object.
Definition: variable.c:1610
int rb_const_defined_at(VALUE space, ID name)
Identical to rb_const_defined(), except it doesn't look for parent classes.
Definition: variable.c:3197
VALUE rb_cvar_defined(VALUE klass, ID name)
Queries if the given class has the given class variable.
Definition: variable.c:3740
VALUE rb_class_path(VALUE mod)
Identical to rb_mod_name(), except it returns #<Class: ...> style inspection for anonymous modules.
Definition: variable.c:185
int rb_const_defined(VALUE space, ID name)
Queries if the constant is defined at the namespace.
Definition: variable.c:3191
VALUE rb_check_funcall(VALUE recv, ID mid, int argc, const VALUE *argv)
Identical to rb_funcallv(), except it returns RUBY_Qundef instead of raising rb_eNoMethodError.
Definition: vm_eval.c:665
rb_alloc_func_t rb_get_alloc_func(VALUE klass)
Queries the allocator function of a class.
Definition: vm_method.c:1165
void rb_define_alloc_func(VALUE klass, rb_alloc_func_t func)
Sets the allocator function of a class.
int rb_method_boundp(VALUE klass, ID id, int ex)
Queries if the klass has this method.
Definition: vm_method.c:1699
ID rb_check_id(volatile VALUE *namep)
Detects if the given name is already interned or not.
Definition: symbol.c:1084
VALUE rb_sym2str(VALUE id)
Identical to rb_id2str(), except it takes an instance of rb_cSymbol rather than an ID.
Definition: symbol.c:942
const char * rb_id2name(ID id)
Retrieves the name mapped to the given id.
Definition: symbol.c:959
static bool rb_ractor_shareable_p(VALUE obj)
Queries if multiple Ractors can share the passed object or not.
Definition: ractor.h:249
#define RB_OBJ_SHAREABLE_P(obj)
Queries if the passed object has previously classified as shareable or not.
Definition: ractor.h:235
VALUE rb_sprintf(const char *fmt,...)
Ruby's extended sprintf(3).
Definition: sprintf.c:1219
VALUE rb_str_catf(VALUE dst, const char *fmt,...)
Identical to rb_sprintf(), except it renders the output to the specified object rather than creating ...
Definition: sprintf.c:1242
#define MEMCPY(p1, p2, type, n)
Handy macro to call memcpy.
Definition: memory.h:366
#define ALLOCA_N(type, n)
Definition: memory.h:286
#define RB_GC_GUARD(v)
Prevents premature destruction of local objects.
Definition: memory.h:161
#define MEMMOVE(p1, p2, type, n)
Handy macro to call memmove.
Definition: memory.h:378
VALUE type(ANYARGS)
ANYARGS-ed function type.
Definition: cxxanyargs.hpp:56
#define RARRAY_LEN
Just another name of rb_array_len.
Definition: rarray.h:68
#define RARRAY_CONST_PTR_TRANSIENT
Just another name of rb_array_const_ptr_transient.
Definition: rarray.h:70
#define RARRAY_AREF(a, i)
Definition: rarray.h:583
#define RBASIC(obj)
Convenient casting macro.
Definition: rbasic.h:40
#define RCLASS_SUPER
Just another name of rb_class_get_superclass.
Definition: rclass.h:44
#define RHASH_SIZE(h)
Queries the size of the hash.
Definition: rhash.h:82
#define RHASH_EMPTY_P(h)
Checks if the hash is empty.
Definition: rhash.h:92
#define StringValueCStr(v)
Identical to StringValuePtr, except it additionally checks for the contents for viability as a C stri...
Definition: rstring.h:95
#define RB_NO_KEYWORDS
Do not pass keywords.
Definition: scan_args.h:69
#define RTEST
This is an old name of RB_TEST.
#define ANYARGS
Functions declared using this macro take arbitrary arguments, including void.
Definition: stdarg.h:64
Definition: hash.h:43
Definition: iseq.h:263
Definition: vm_core.h:247
Definition: vm_core.h:281
Definition: vm_core.h:276
Definition: method.h:62
Definition: constant.h:33
CREF (Class REFerence)
Definition: method.h:44
Definition: class.h:29
Definition: method.h:54
const rb_iseq_t * iseqptr
iseq pointer, should be separated from iseqval
Definition: method.h:134
rb_cref_t * cref
class reference, should be marked
Definition: method.h:135
Definition: shape.h:42
Definition: st.h:79
IFUNC (Internal FUNCtion)
Definition: imemo.h:84
SVAR (Special VARiable)
Definition: imemo.h:53
const VALUE cref_or_me
class reference or rb_method_entry_t
Definition: imemo.h:55
THROW_DATA.
Definition: imemo.h:62
Definition: vm_core.h:285
intptr_t SIGNED_VALUE
A signed integer type that has the same width with VALUE.
Definition: value.h:63
uintptr_t VALUE
Type that represents a Ruby object.
Definition: value.h:40
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.
Definition: value.h:52