11#include "ruby/internal/config.h"
16#include "debug_counter.h"
18#include "internal/class.h"
19#include "internal/compar.h"
20#include "internal/hash.h"
21#include "internal/numeric.h"
22#include "internal/proc.h"
23#include "internal/random.h"
24#include "internal/variable.h"
25#include "internal/struct.h"
31#include "insns_info.inc"
38 int argc,
const VALUE *argv,
int priv);
50ruby_vm_special_exception_copy(
VALUE exc)
53 rb_obj_copy_ivar(e, exc);
61 VALUE mesg = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_sysstack];
62 ec->raised_flag = RAISED_STACKOVERFLOW;
64 VALUE at = rb_ec_backtrace_object(ec);
65 mesg = ruby_vm_special_exception_copy(mesg);
70 EC_JUMP_TAG(ec, TAG_RAISE);
73NORETURN(
static void vm_stackoverflow(
void));
75NOINLINE(
static COLDFUNC
void vm_stackoverflow(
void));
81 ec_stack_overflow(GET_EC(), TRUE);
89 rb_bug(
"system stack overflow during GC. Faulty native extension?");
92 ec->raised_flag = RAISED_STACKOVERFLOW;
93 ec->errinfo = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_stackfatal];
94 EC_JUMP_TAG(ec, TAG_RAISE);
97 ec_stack_overflow(ec, TRUE);
99 ec_stack_overflow(ec, FALSE);
106callable_class_p(
VALUE klass)
108#if VM_CHECK_MODE >= 2
109 if (!klass)
return FALSE;
110 switch (RB_BUILTIN_TYPE(klass)) {
137 VM_ASSERT(IMEMO_TYPE_P((
VALUE)cme, imemo_ment));
139 if (callable_class_p(cme->defined_class)) {
149vm_check_frame_detail(
VALUE type,
int req_block,
int req_me,
int req_cref,
VALUE specval,
VALUE cref_or_me,
int is_cframe,
const rb_iseq_t *iseq)
151 unsigned int magic = (
unsigned int)(
type & VM_FRAME_MAGIC_MASK);
152 enum imemo_type cref_or_me_type = imemo_env;
154 if (RB_TYPE_P(cref_or_me,
T_IMEMO)) {
155 cref_or_me_type = imemo_type(cref_or_me);
157 if (
type & VM_FRAME_FLAG_BMETHOD) {
161 if (req_block && (
type & VM_ENV_FLAG_LOCAL) == 0) {
162 rb_bug(
"vm_push_frame: specval (%p) should be a block_ptr on %x frame", (
void *)specval, magic);
164 if (!req_block && (
type & VM_ENV_FLAG_LOCAL) != 0) {
165 rb_bug(
"vm_push_frame: specval (%p) should not be a block_ptr on %x frame", (
void *)specval, magic);
169 if (cref_or_me_type != imemo_ment) {
170 rb_bug(
"vm_push_frame: (%s) should be method entry on %x frame", rb_obj_info(cref_or_me), magic);
174 if (req_cref && cref_or_me_type != imemo_cref) {
175 rb_bug(
"vm_push_frame: (%s) should be CREF on %x frame", rb_obj_info(cref_or_me), magic);
178 if (cref_or_me !=
Qfalse && cref_or_me_type != imemo_cref) {
179 if (((
type & VM_FRAME_FLAG_LAMBDA) || magic == VM_FRAME_MAGIC_IFUNC) && (cref_or_me_type == imemo_ment)) {
183 rb_bug(
"vm_push_frame: (%s) should be false or cref on %x frame", rb_obj_info(cref_or_me), magic);
189 if (cref_or_me_type == imemo_ment) {
192 if (!callable_method_entry_p(me)) {
193 rb_bug(
"vm_push_frame: ment (%s) should be callable on %x frame.", rb_obj_info(cref_or_me), magic);
197 if ((
type & VM_FRAME_MAGIC_MASK) == VM_FRAME_MAGIC_DUMMY) {
198 VM_ASSERT(iseq == NULL ||
199 RBASIC_CLASS((
VALUE)iseq) == 0 ||
200 RUBY_VM_NORMAL_ISEQ_P(iseq)
204 VM_ASSERT(is_cframe == !RUBY_VM_NORMAL_ISEQ_P(iseq));
214 VALUE given_magic =
type & VM_FRAME_MAGIC_MASK;
217#define CHECK(magic, req_block, req_me, req_cref, is_cframe) \
219 vm_check_frame_detail(type, req_block, req_me, req_cref, \
220 specval, cref_or_me, is_cframe, iseq); \
222 switch (given_magic) {
224 CHECK(VM_FRAME_MAGIC_METHOD, TRUE, TRUE, FALSE, FALSE);
225 CHECK(VM_FRAME_MAGIC_CLASS, TRUE, FALSE, TRUE, FALSE);
226 CHECK(VM_FRAME_MAGIC_TOP, TRUE, FALSE, TRUE, FALSE);
227 CHECK(VM_FRAME_MAGIC_CFUNC, TRUE, TRUE, FALSE, TRUE);
228 CHECK(VM_FRAME_MAGIC_BLOCK, FALSE, FALSE, FALSE, FALSE);
229 CHECK(VM_FRAME_MAGIC_IFUNC, FALSE, FALSE, FALSE, TRUE);
230 CHECK(VM_FRAME_MAGIC_EVAL, FALSE, FALSE, FALSE, FALSE);
231 CHECK(VM_FRAME_MAGIC_RESCUE, FALSE, FALSE, FALSE, FALSE);
232 CHECK(VM_FRAME_MAGIC_DUMMY, TRUE, FALSE, FALSE, FALSE);
234 rb_bug(
"vm_push_frame: unknown type (%x)", (
unsigned int)given_magic);
239static VALUE vm_stack_canary;
240static bool vm_stack_canary_was_born =
false;
243MJIT_FUNC_EXPORTED
void
249 if (! LIKELY(vm_stack_canary_was_born)) {
252 else if ((
VALUE *)reg_cfp == ec->vm_stack + ec->vm_stack_size) {
256 else if (! (iseq = GET_ISEQ())) {
259 else if (LIKELY(sp[0] != vm_stack_canary)) {
268 const VALUE *orig = rb_iseq_original_iseq(iseq);
269 const VALUE *encoded = ISEQ_BODY(iseq)->iseq_encoded;
270 const ptrdiff_t pos = GET_PC() - encoded;
271 const enum ruby_vminsn_type insn = (
enum ruby_vminsn_type)orig[pos];
272 const char *name = insn_name(insn);
273 const VALUE iseqw = rb_iseqw_new(iseq);
275 const char *stri = rb_str_to_cstr(inspection);
276 const VALUE disasm = rb_iseq_disasm(iseq);
277 const char *strd = rb_str_to_cstr(disasm);
283 "We are killing the stack canary set by %s, "
284 "at %s@pc=%"PRIdPTR
"\n"
285 "watch out the C stack trace.\n"
287 name, stri, pos, strd);
291#define vm_check_canary(ec, sp) rb_vm_check_canary(ec, sp)
294#define vm_check_canary(ec, sp)
295#define vm_check_frame(a, b, c, d)
300vm_push_frame_debug_counter_inc(
307 RB_DEBUG_COUNTER_INC(frame_push);
309 if (RUBY_VM_END_CONTROL_FRAME(ec) != prev_cfp) {
310 const bool curr = VM_FRAME_RUBYFRAME_P(reg_cfp);
311 const bool prev = VM_FRAME_RUBYFRAME_P(prev_cfp);
314 RB_DEBUG_COUNTER_INC(frame_R2R);
317 RB_DEBUG_COUNTER_INC(frame_R2C);
322 RB_DEBUG_COUNTER_INC(frame_C2R);
325 RB_DEBUG_COUNTER_INC(frame_C2C);
330 switch (
type & VM_FRAME_MAGIC_MASK) {
331 case VM_FRAME_MAGIC_METHOD: RB_DEBUG_COUNTER_INC(frame_push_method);
return;
332 case VM_FRAME_MAGIC_BLOCK: RB_DEBUG_COUNTER_INC(frame_push_block);
return;
333 case VM_FRAME_MAGIC_CLASS: RB_DEBUG_COUNTER_INC(frame_push_class);
return;
334 case VM_FRAME_MAGIC_TOP: RB_DEBUG_COUNTER_INC(frame_push_top);
return;
335 case VM_FRAME_MAGIC_CFUNC: RB_DEBUG_COUNTER_INC(frame_push_cfunc);
return;
336 case VM_FRAME_MAGIC_IFUNC: RB_DEBUG_COUNTER_INC(frame_push_ifunc);
return;
337 case VM_FRAME_MAGIC_EVAL: RB_DEBUG_COUNTER_INC(frame_push_eval);
return;
338 case VM_FRAME_MAGIC_RESCUE: RB_DEBUG_COUNTER_INC(frame_push_rescue);
return;
339 case VM_FRAME_MAGIC_DUMMY: RB_DEBUG_COUNTER_INC(frame_push_dummy);
return;
345#define vm_push_frame_debug_counter_inc(ec, cfp, t)
348STATIC_ASSERT(VM_ENV_DATA_INDEX_ME_CREF, VM_ENV_DATA_INDEX_ME_CREF == -2);
349STATIC_ASSERT(VM_ENV_DATA_INDEX_SPECVAL, VM_ENV_DATA_INDEX_SPECVAL == -1);
350STATIC_ASSERT(VM_ENV_DATA_INDEX_FLAGS, VM_ENV_DATA_INDEX_FLAGS == -0);
366 vm_check_frame(
type, specval, cref_or_me, iseq);
367 VM_ASSERT(local_size >= 0);
370 CHECK_VM_STACK_OVERFLOW0(cfp, sp, local_size + stack_max);
371 vm_check_canary(ec, sp);
376 for (
int i=0; i < local_size; i++) {
405 vm_push_frame_debug_counter_inc(ec, cfp,
type);
413 if (VM_CHECK_MODE >= 4) rb_gc_verify_internal_consistency();
414 if (VMDEBUG == 2) SDR();
416 ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
423 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
425 if (VM_CHECK_MODE >= 4) rb_gc_verify_internal_consistency();
426 if (VMDEBUG == 2) SDR();
428 RUBY_VM_CHECK_INTS(ec);
429 ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
431 return flags & VM_FRAME_FLAG_FINISH;
437 vm_pop_frame(ec, ec->cfp, ec->cfp->ep);
444 VALUE tmpbuf = rb_imemo_tmpbuf_auto_free_pointer();
446 rb_imemo_tmpbuf_set_ptr(tmpbuf, ptr);
450 dmy_iseq->body = dmy_body;
451 dmy_body->type = ISEQ_TYPE_TOP;
452 dmy_body->location.pathobj = fname;
456 VM_FRAME_MAGIC_DUMMY | VM_ENV_FLAG_LOCAL | VM_FRAME_FLAG_FINISH,
458 VM_BLOCK_HANDLER_NONE,
470rb_arity_error_new(
int argc,
int min,
int max)
472 VALUE err_mess =
rb_sprintf(
"wrong number of arguments (given %d, expected %d", argc, min);
487rb_error_arity(
int argc,
int min,
int max)
494NOINLINE(
static void vm_env_write_slowpath(
const VALUE *ep,
int index,
VALUE v));
497vm_env_write_slowpath(
const VALUE *ep,
int index,
VALUE v)
500 rb_gc_writebarrier_remember(VM_ENV_ENVVAL(ep));
501 VM_FORCE_WRITE(&ep[index], v);
502 VM_ENV_FLAGS_UNSET(ep, VM_ENV_FLAG_WB_REQUIRED);
503 RB_DEBUG_COUNTER_INC(lvar_set_slowpath);
507vm_env_write(
const VALUE *ep,
int index,
VALUE v)
509 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
510 if (LIKELY((flags & VM_ENV_FLAG_WB_REQUIRED) == 0)) {
511 VM_STACK_ENV_WRITE(ep, index, v);
514 vm_env_write_slowpath(ep, index, v);
521 if (block_handler == VM_BLOCK_HANDLER_NONE) {
525 switch (vm_block_handler_type(block_handler)) {
526 case block_handler_type_iseq:
527 case block_handler_type_ifunc:
528 return rb_vm_make_proc(ec, VM_BH_TO_CAPT_BLOCK(block_handler),
rb_cProc);
529 case block_handler_type_symbol:
530 return rb_sym_to_proc(VM_BH_TO_SYMBOL(block_handler));
531 case block_handler_type_proc:
532 return VM_BH_TO_PROC(block_handler);
534 VM_UNREACHABLE(rb_vm_bh_to_procval);
543vm_svar_valid_p(
VALUE svar)
546 switch (imemo_type(svar)) {
555 rb_bug(
"vm_svar_valid_p: unknown type: %s", rb_obj_info(svar));
565 if (lep && (ec == NULL || ec->root_lep != lep)) {
566 svar = lep[VM_ENV_DATA_INDEX_ME_CREF];
569 svar = ec->root_svar;
572 VM_ASSERT(svar ==
Qfalse || vm_svar_valid_p(svar));
580 VM_ASSERT(vm_svar_valid_p((
VALUE)svar));
582 if (lep && (ec == NULL || ec->root_lep != lep)) {
583 vm_env_write(lep, VM_ENV_DATA_INDEX_ME_CREF, (
VALUE)svar);
586 RB_OBJ_WRITE(rb_ec_thread_ptr(ec)->self, &ec->root_svar, svar);
593 const struct vm_svar *svar = lep_svar(ec, lep);
598 case VM_SVAR_LASTLINE:
599 return svar->lastline;
600 case VM_SVAR_BACKREF:
601 return svar->backref;
603 const VALUE ary = svar->others;
609 return rb_ary_entry(ary, key - VM_SVAR_EXTRA_START);
624 struct vm_svar *svar = lep_svar(ec, lep);
627 lep_svar_write(ec, lep, svar = svar_new((
VALUE)svar));
631 case VM_SVAR_LASTLINE:
634 case VM_SVAR_BACKREF:
638 VALUE ary = svar->others;
643 rb_ary_store(ary, key - VM_SVAR_EXTRA_START, val);
654 val = lep_svar_get(ec, lep, key);
657 VALUE backref = lep_svar_get(ec, lep, VM_SVAR_BACKREF);
674 rb_bug(
"unexpected back-ref");
686check_method_entry(
VALUE obj,
int can_be_svar)
688 if (obj ==
Qfalse)
return NULL;
691 if (!RB_TYPE_P(obj,
T_IMEMO))
rb_bug(
"check_method_entry: unknown type: %s", rb_obj_info(obj));
694 switch (imemo_type(obj)) {
705 rb_bug(
"check_method_entry: svar should not be there:");
714 const VALUE *ep = cfp->ep;
717 while (!VM_ENV_LOCAL_P(ep)) {
718 if ((me = check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL)
return me;
719 ep = VM_ENV_PREV_EP(ep);
722 return check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
728 switch (me->def->type) {
729 case VM_METHOD_TYPE_ISEQ:
730 return me->def->body.iseq.
iseqptr;
739 switch (me->def->type) {
740 case VM_METHOD_TYPE_ISEQ:
741 return me->def->body.iseq.
cref;
747#if VM_CHECK_MODE == 0
751check_cref(
VALUE obj,
int can_be_svar)
753 if (obj ==
Qfalse)
return NULL;
756 if (!RB_TYPE_P(obj,
T_IMEMO))
rb_bug(
"check_cref: unknown type: %s", rb_obj_info(obj));
759 switch (imemo_type(obj)) {
770 rb_bug(
"check_method_entry: svar should not be there:");
777vm_env_cref(
const VALUE *ep)
781 while (!VM_ENV_LOCAL_P(ep)) {
782 if ((cref = check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL)
return cref;
783 ep = VM_ENV_PREV_EP(ep);
786 return check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
790is_cref(
const VALUE v,
int can_be_svar)
793 switch (imemo_type(v)) {
806vm_env_cref_by_cref(
const VALUE *ep)
808 while (!VM_ENV_LOCAL_P(ep)) {
809 if (is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE))
return TRUE;
810 ep = VM_ENV_PREV_EP(ep);
812 return is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
816cref_replace_with_duplicated_cref_each_frame(
const VALUE *vptr,
int can_be_svar,
VALUE parent)
818 const VALUE v = *vptr;
822 switch (imemo_type(v)) {
825 new_cref = vm_cref_dup(cref);
830 VM_FORCE_WRITE(vptr, (
VALUE)new_cref);
835 return cref_replace_with_duplicated_cref_each_frame(&((
struct vm_svar *)v)->
cref_or_me, FALSE, v);
839 rb_bug(
"cref_replace_with_duplicated_cref_each_frame: unreachable");
848vm_cref_replace_with_duplicated_cref(
const VALUE *ep)
850 if (vm_env_cref_by_cref(ep)) {
854 while (!VM_ENV_LOCAL_P(ep)) {
855 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) :
Qfalse;
856 if ((cref = cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE, envval)) != NULL) {
859 ep = VM_ENV_PREV_EP(ep);
861 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) :
Qfalse;
862 return cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE, envval);
865 rb_bug(
"vm_cref_dup: unreachable");
870vm_get_cref(
const VALUE *ep)
878 rb_bug(
"vm_get_cref: unreachable");
883rb_vm_get_cref(
const VALUE *ep)
885 return vm_get_cref(ep);
896 return vm_get_cref(cfp->ep);
900vm_get_const_key_cref(
const VALUE *ep)
907 FL_TEST(CREF_CLASS(cref), RCLASS_CLONED)) {
910 cref = CREF_NEXT(cref);
923 if (CREF_CLASS(cref) == old_klass) {
924 new_cref = vm_cref_new_use_prev(new_klass, METHOD_VISI_UNDEF, FALSE, cref, FALSE);
925 *new_cref_ptr = new_cref;
928 new_cref = vm_cref_new_use_prev(CREF_CLASS(cref), METHOD_VISI_UNDEF, FALSE, cref, FALSE);
929 cref = CREF_NEXT(cref);
930 *new_cref_ptr = new_cref;
931 new_cref_ptr = &new_cref->next;
933 *new_cref_ptr = NULL;
942 prev_cref = vm_env_cref(ep);
948 prev_cref = vm_env_cref(cfp->ep);
952 return vm_cref_new(klass, METHOD_VISI_PUBLIC, FALSE, prev_cref, pushed_by_eval, singleton);
956vm_get_cbase(
const VALUE *ep)
960 return CREF_CLASS_FOR_DEFINITION(cref);
964vm_get_const_base(
const VALUE *ep)
969 if (!CREF_PUSHED_BY_EVAL(cref)) {
970 return CREF_CLASS_FOR_DEFINITION(cref);
972 cref = CREF_NEXT(cref);
979vm_check_if_namespace(
VALUE klass)
987vm_ensure_not_refinement_module(
VALUE self)
989 if (RB_TYPE_P(self,
T_MODULE) &&
FL_TEST(self, RMODULE_IS_REFINEMENT)) {
990 rb_warn(
"not defined at the refinement, but at the outer class/module");
1006 if (
NIL_P(orig_klass) && allow_nil) {
1008 const rb_cref_t *root_cref = vm_get_cref(ec->cfp->ep);
1012 while (root_cref && CREF_PUSHED_BY_EVAL(root_cref)) {
1013 root_cref = CREF_NEXT(root_cref);
1016 while (cref && CREF_NEXT(cref)) {
1017 if (CREF_PUSHED_BY_EVAL(cref)) {
1021 klass = CREF_CLASS(cref);
1023 cref = CREF_NEXT(cref);
1025 if (!
NIL_P(klass)) {
1029 if ((ce = rb_const_lookup(klass,
id))) {
1030 rb_const_warn_if_deprecated(ce, klass,
id);
1033 if (am == klass)
break;
1035 if (is_defined)
return 1;
1036 if (rb_autoloading_value(klass,
id, &av, NULL))
return av;
1038 goto search_continue;
1045 if (UNLIKELY(!rb_ractor_main_p())) {
1048 "can not access non-shareable objects in constant %"PRIsVALUE
"::%s by non-main ractor.",
rb_class_path(klass),
rb_id2name(
id));
1059 if (root_cref && !
NIL_P(CREF_CLASS(root_cref))) {
1060 klass = vm_get_iclass(ec->cfp, CREF_CLASS(root_cref));
1074 vm_check_if_namespace(orig_klass);
1076 return rb_public_const_defined_from(orig_klass,
id);
1079 return rb_public_const_get_from(orig_klass,
id);
1087 return vm_get_ev_const(ec, orig_klass,
id, allow_nil ==
Qtrue, 0);
1095 int allow_nil = TRUE;
1096 if (segments[0] == idNULL) {
1101 while (segments[idx]) {
1102 ID id = segments[idx++];
1103 val = vm_get_ev_const(ec, val,
id, allow_nil, 0);
1116 rb_bug(
"vm_get_cvar_base: no cref");
1119 while (CREF_NEXT(cref) &&
1121 CREF_PUSHED_BY_EVAL(cref) || CREF_SINGLETON(cref))) {
1122 cref = CREF_NEXT(cref);
1124 if (top_level_raise && !CREF_NEXT(cref)) {
1128 klass = vm_get_iclass(cfp, CREF_CLASS(cref));
1136ALWAYS_INLINE(
static void fill_ivar_cache(
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
int is_attr, attr_index_t index, shape_id_t shape_id));
1138fill_ivar_cache(
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
int is_attr, attr_index_t index, shape_id_t shape_id)
1141 vm_cc_attr_index_set(cc, index, shape_id);
1144 vm_ic_attr_index_set(iseq, ic, index, shape_id);
1148#define ractor_incidental_shareable_p(cond, val) \
1149 (!(cond) || rb_ractor_shareable_p(val))
1150#define ractor_object_incidental_shareable_p(obj, val) \
1151 ractor_incidental_shareable_p(rb_ractor_shareable_p(obj), val)
1153#define ATTR_INDEX_NOT_SET (attr_index_t)-1
1161 shape_id_t shape_id;
1168#if SHAPE_IN_BASIC_FLAGS
1169 shape_id = RBASIC_SHAPE_ID(obj);
1174 ivar_list = ROBJECT_IVPTR(obj);
1177#if !SHAPE_IN_BASIC_FLAGS
1178 shape_id = ROBJECT_SHAPE_ID(obj);
1184 if (UNLIKELY(!rb_ractor_main_p())) {
1194 ivar_list = RCLASS_IVPTR(obj);
1196#if !SHAPE_IN_BASIC_FLAGS
1197 shape_id = RCLASS_SHAPE_ID(obj);
1205 rb_gen_ivtbl_get(obj,
id, &ivtbl);
1206#if !SHAPE_IN_BASIC_FLAGS
1207 shape_id = ivtbl->shape_id;
1209 ivar_list = ivtbl->ivptr;
1216 shape_id_t cached_id;
1220 vm_cc_atomic_shape_and_index(cc, &cached_id, &index);
1223 vm_ic_atomic_shape_and_index(ic, &cached_id, &index);
1226 if (LIKELY(cached_id == shape_id)) {
1227 RUBY_ASSERT(cached_id != OBJ_TOO_COMPLEX_SHAPE_ID);
1229 if (index == ATTR_INDEX_NOT_SET) {
1233 val = ivar_list[index];
1239 if (cached_id != INVALID_SHAPE_ID) {
1240 RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_set);
1243 RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_unset);
1247 if (cached_id != INVALID_SHAPE_ID) {
1248 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_set);
1251 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_unset);
1256 rb_shape_t *shape = rb_shape_get_shape_by_id(shape_id);
1258 if (shape_id == OBJ_TOO_COMPLEX_SHAPE_ID) {
1259 if (!rb_id_table_lookup(ROBJECT_IV_HASH(obj),
id, &val)) {
1264 if (rb_shape_get_iv_index(shape,
id, &index)) {
1267 fill_ivar_cache(iseq, ic, cc, is_attr, index, shape_id);
1270 val = ivar_list[index];
1275 vm_cc_attr_index_initialize(cc, shape_id);
1278 vm_ic_attr_index_initialize(ic, shape_id);
1293 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss);
1304populate_cache(attr_index_t index, shape_id_t next_shape_id,
ID id,
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
bool is_attr)
1306 RUBY_ASSERT(next_shape_id != OBJ_TOO_COMPLEX_SHAPE_ID);
1310 vm_cc_attr_index_set(cc, index, next_shape_id);
1313 vm_ic_attr_index_set(iseq, ic, index, next_shape_id);
1330 attr_index_t index = rb_obj_ivar_set(obj,
id, val);
1332 shape_id_t next_shape_id = ROBJECT_SHAPE_ID(obj);
1334 if (next_shape_id != OBJ_TOO_COMPLEX_SHAPE_ID) {
1335 populate_cache(index, next_shape_id,
id, iseq, ic, cc, is_attr);
1338 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss_iv_hit);
1347 shape_id_t next_shape_id = rb_shape_get_shape_id(obj);
1348 rb_shape_t *next_shape = rb_shape_get_shape_by_id(next_shape_id);
1351 if (rb_shape_get_iv_index(next_shape,
id, &index)) {
1352 if (index >= MAX_IVARS) {
1356 populate_cache(index, next_shape_id,
id, iseq, ic, cc, is_attr);
1359 rb_bug(
"didn't find the id\n");
1366 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss);
1373 return vm_setivar_slowpath(obj,
id, val, iseq, ic, NULL,
false);
1379 return vm_setivar_slowpath(obj,
id, val, NULL, NULL, cc,
true);
1382NOINLINE(
static VALUE vm_setivar_default(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index));
1384vm_setivar_default(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1386#if SHAPE_IN_BASIC_FLAGS
1387 shape_id_t shape_id = RBASIC_SHAPE_ID(obj);
1389 shape_id_t shape_id = rb_generic_shape_id(obj);
1395 if (shape_id == dest_shape_id) {
1396 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1399 rb_gen_ivtbl_get(obj, 0, &ivtbl);
1401 else if (dest_shape_id != INVALID_SHAPE_ID) {
1402 rb_shape_t * dest_shape = rb_shape_get_shape_by_id(dest_shape_id);
1403 shape_id_t source_shape_id = dest_shape->parent_id;
1405 if (shape_id == source_shape_id && dest_shape->edge_name ==
id && dest_shape->type == SHAPE_IVAR) {
1406 ivtbl = rb_ensure_generic_iv_list_size(obj, index + 1);
1407#if SHAPE_IN_BASIC_FLAGS
1408 RBASIC_SET_SHAPE_ID(obj, dest_shape_id);
1410 ivtbl->shape_id = dest_shape_id;
1421 VALUE *ptr = ivtbl->ivptr;
1425 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1431vm_setivar(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1439 shape_id_t shape_id = ROBJECT_SHAPE_ID(obj);
1440 RUBY_ASSERT(dest_shape_id != OBJ_TOO_COMPLEX_SHAPE_ID);
1442 if (LIKELY(shape_id == dest_shape_id)) {
1443 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1446 else if (dest_shape_id != INVALID_SHAPE_ID) {
1447 rb_shape_t *dest_shape = rb_shape_get_shape_by_id(dest_shape_id);
1448 shape_id_t source_shape_id = dest_shape->parent_id;
1450 if (shape_id == source_shape_id && dest_shape->edge_name ==
id) {
1451 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1453 ROBJECT_SET_SHAPE_ID(obj, dest_shape_id);
1455 RUBY_ASSERT(rb_shape_get_next_iv_shape(rb_shape_get_shape_by_id(source_shape_id),
id) == dest_shape);
1466 VALUE *ptr = ROBJECT_IVPTR(obj);
1471 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1477 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss_noobject);
1489 VALUE defined_class = 0;
1492 if (RB_TYPE_P(defined_class,
T_ICLASS)) {
1493 defined_class =
RBASIC(defined_class)->klass;
1496 struct rb_id_table *rb_cvc_tbl = RCLASS_CVC_TBL(defined_class);
1498 rb_bug(
"the cvc table should be set");
1502 if (!rb_id_table_lookup(rb_cvc_tbl,
id, &ent_data)) {
1503 rb_bug(
"should have cvar cache entry");
1507 ent->global_cvar_state = GET_GLOBAL_CVAR_STATE();
1520 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE() && LIKELY(rb_ractor_main_p())) {
1521 RB_DEBUG_COUNTER_INC(cvar_read_inline_hit);
1523 VALUE v = rb_ivar_lookup(ic->entry->class_value,
id,
Qundef);
1529 cref = vm_get_cref(GET_EP());
1530 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1532 return update_classvariable_cache(iseq, klass,
id, ic);
1538 return vm_getclassvariable(iseq, cfp,
id, ic);
1546 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE()) {
1547 RB_DEBUG_COUNTER_INC(cvar_write_inline_hit);
1549 rb_class_ivar_set(ic->entry->class_value,
id, val);
1553 cref = vm_get_cref(GET_EP());
1554 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1558 update_classvariable_cache(iseq, klass,
id, ic);
1564 vm_setclassvariable(iseq, cfp,
id, val, ic);
1570 return vm_getivar(obj,
id, iseq, ic, NULL, FALSE);
1576 if (RB_SPECIAL_CONST_P(obj)) {
1581 shape_id_t dest_shape_id;
1583 vm_ic_atomic_shape_and_index(ic, &dest_shape_id, &index);
1585 if (UNLIKELY(UNDEF_P(vm_setivar(obj,
id, val, dest_shape_id, index)))) {
1592 if (!UNDEF_P(vm_setivar_default(obj,
id, val, dest_shape_id, index))) {
1596 vm_setivar_slowpath_ivar(obj,
id, val, iseq, ic);
1603 vm_setinstancevariable(iseq, obj,
id, val, ic);
1612 ec->tag->state =
FIX2INT(err);
1615 ec->tag->state = TAG_THROW;
1617 else if (THROW_DATA_P(err)) {
1618 ec->tag->state = THROW_DATA_STATE((
struct vm_throw_data *)err);
1621 ec->tag->state = TAG_RAISE;
1628 const int flag,
const VALUE throwobj)
1636 else if (state == TAG_BREAK) {
1638 const VALUE *ep = GET_EP();
1639 const rb_iseq_t *base_iseq = GET_ISEQ();
1640 escape_cfp = reg_cfp;
1642 while (ISEQ_BODY(base_iseq)->
type != ISEQ_TYPE_BLOCK) {
1643 if (ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_CLASS) {
1644 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1645 ep = escape_cfp->ep;
1646 base_iseq = escape_cfp->iseq;
1649 ep = VM_ENV_PREV_EP(ep);
1650 base_iseq = ISEQ_BODY(base_iseq)->parent_iseq;
1651 escape_cfp = rb_vm_search_cf_from_ep(ec, escape_cfp, ep);
1652 VM_ASSERT(escape_cfp->iseq == base_iseq);
1656 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1662 ep = VM_ENV_PREV_EP(ep);
1664 while (escape_cfp < eocfp) {
1665 if (escape_cfp->ep == ep) {
1666 const rb_iseq_t *
const iseq = escape_cfp->iseq;
1667 const VALUE epc = escape_cfp->pc - ISEQ_BODY(iseq)->iseq_encoded;
1672 for (i=0; i < ct->size; i++) {
1674 UNALIGNED_MEMBER_PTR(ct, entries[i]);
1676 if (entry->type == CATCH_TYPE_BREAK &&
1677 entry->iseq == base_iseq &&
1678 entry->start < epc && entry->end >= epc) {
1679 if (entry->cont == epc) {
1688 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1693 rb_vm_localjump_error(
"break from proc-closure", throwobj, TAG_BREAK);
1696 else if (state == TAG_RETRY) {
1697 const VALUE *ep = VM_ENV_PREV_EP(GET_EP());
1699 escape_cfp = rb_vm_search_cf_from_ep(ec, reg_cfp, ep);
1701 else if (state == TAG_RETURN) {
1702 const VALUE *current_ep = GET_EP();
1703 const VALUE *target_ep = NULL, *target_lep, *ep = current_ep;
1704 int in_class_frame = 0;
1706 escape_cfp = reg_cfp;
1709 while (!VM_ENV_LOCAL_P(ep)) {
1710 if (VM_ENV_FLAGS(ep, VM_FRAME_FLAG_LAMBDA) && target_ep == NULL) {
1713 ep = VM_ENV_PREV_EP(ep);
1717 while (escape_cfp < eocfp) {
1718 const VALUE *lep = VM_CF_LEP(escape_cfp);
1724 if (lep == target_lep &&
1725 VM_FRAME_RUBYFRAME_P(escape_cfp) &&
1726 ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_CLASS) {
1731 if (lep == target_lep) {
1732 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1734 if (in_class_frame) {
1739 const VALUE *tep = current_ep;
1741 while (target_lep != tep) {
1742 if (escape_cfp->ep == tep) {
1744 if (tep == target_ep) {
1748 goto unexpected_return;
1751 tep = VM_ENV_PREV_EP(tep);
1755 else if (VM_FRAME_RUBYFRAME_P(escape_cfp)) {
1756 switch (ISEQ_BODY(escape_cfp->iseq)->type) {
1758 case ISEQ_TYPE_MAIN:
1760 if (in_class_frame)
goto unexpected_return;
1761 if (target_ep == NULL) {
1765 goto unexpected_return;
1769 case ISEQ_TYPE_EVAL:
1770 case ISEQ_TYPE_CLASS:
1779 if (escape_cfp->ep == target_lep && ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_METHOD) {
1780 if (target_ep == NULL) {
1784 goto unexpected_return;
1788 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1791 rb_vm_localjump_error(
"unexpected return", throwobj, TAG_RETURN);
1797 rb_bug(
"isns(throw): unsupported throw type");
1800 ec->tag->state = state;
1801 return (
VALUE)THROW_DATA_NEW(throwobj, escape_cfp, state);
1806 rb_num_t throw_state,
VALUE throwobj)
1808 const int state = (int)(throw_state & VM_THROW_STATE_MASK);
1809 const int flag = (int)(throw_state & VM_THROW_NO_ESCAPE_FLAG);
1812 return vm_throw_start(ec, reg_cfp, state, flag, throwobj);
1815 return vm_throw_continue(ec, throwobj);
1820vm_expandarray(
VALUE *sp,
VALUE ary, rb_num_t num,
int flag)
1822 int is_splat = flag & 0x01;
1823 rb_num_t space_size = num + is_splat;
1824 VALUE *base = sp - 1;
1827 const VALUE obj = ary;
1829 if (!RB_TYPE_P(ary,
T_ARRAY) &&
NIL_P(ary = rb_check_array_type(ary))) {
1839 if (space_size == 0) {
1842 else if (flag & 0x02) {
1847 for (i=0; i<num-len; i++) {
1851 for (j=0; i<num; i++, j++) {
1852 VALUE v = ptr[len - j - 1];
1862 VALUE *bptr = &base[space_size - 1];
1864 for (i=0; i<num; i++) {
1866 for (; i<num; i++) {
1875 *bptr = rb_ary_new();
1893#if VM_CHECK_MODE > 0
1894 ccs->debug_sig = ~(
VALUE)ccs;
1900 ccs->entries = NULL;
1907 if (! vm_cc_markable(cc)) {
1910 else if (! vm_ci_markable(ci)) {
1914 if (UNLIKELY(ccs->len == ccs->capa)) {
1915 if (ccs->capa == 0) {
1917 ccs->entries =
ALLOC_N(
struct rb_class_cc_entries_entry, ccs->capa);
1921 REALLOC_N(ccs->entries,
struct rb_class_cc_entries_entry, ccs->capa);
1924 VM_ASSERT(ccs->len < ccs->capa);
1926 const int pos = ccs->len++;
1930 if (RB_DEBUG_COUNTER_SETMAX(ccs_maxlen, ccs->len)) {
1936#if VM_CHECK_MODE > 0
1940 ruby_debug_printf(
"ccs:%p (%d,%d)\n", (
void *)ccs, ccs->len, ccs->capa);
1941 for (
int i=0; i<ccs->len; i++) {
1942 vm_ci_dump(ccs->entries[i].ci);
1943 rp(ccs->entries[i].cc);
1950 VM_ASSERT(vm_ccs_p(ccs));
1951 VM_ASSERT(ccs->len <= ccs->capa);
1953 for (
int i=0; i<ccs->len; i++) {
1954 const struct rb_callinfo *ci = ccs->entries[i].ci;
1957 VM_ASSERT(vm_ci_p(ci));
1958 VM_ASSERT(vm_ci_mid(ci) == mid);
1959 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
1960 VM_ASSERT(vm_cc_class_check(cc, klass));
1961 VM_ASSERT(vm_cc_check_cme(cc, ccs->cme));
1974 const ID mid = vm_ci_mid(ci);
1975 struct rb_id_table *cc_tbl = RCLASS_CC_TBL(klass);
1980 if (rb_id_table_lookup(cc_tbl, mid, &ccs_data)) {
1982 const int ccs_len = ccs->len;
1984 if (UNLIKELY(METHOD_ENTRY_INVALIDATED(ccs->cme))) {
1985 rb_vm_ccs_free(ccs);
1986 rb_id_table_delete(cc_tbl, mid);
1990 VM_ASSERT(vm_ccs_verify(ccs, mid, klass));
1992 for (
int i=0; i<ccs_len; i++) {
1993 const struct rb_callinfo *ccs_ci = ccs->entries[i].ci;
1994 const struct rb_callcache *ccs_cc = ccs->entries[i].cc;
1996 VM_ASSERT(vm_ci_p(ccs_ci));
1997 VM_ASSERT(IMEMO_TYPE_P(ccs_cc, imemo_callcache));
2000 RB_DEBUG_COUNTER_INC(cc_found_in_ccs);
2002 VM_ASSERT(vm_cc_cme(ccs_cc)->called_id == mid);
2003 VM_ASSERT(ccs_cc->klass == klass);
2004 VM_ASSERT(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(ccs_cc)));
2013 cc_tbl = RCLASS_CC_TBL(klass) = rb_id_table_create(2);
2016 RB_DEBUG_COUNTER_INC(cc_not_found_in_ccs);
2022 cme = UNDEFINED_METHOD_ENTRY_P(cme) ? NULL : cme;
2024 VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
2027 cme = rb_callable_method_entry(klass, mid);
2030 VM_ASSERT(cme == NULL || IMEMO_TYPE_P(cme, imemo_ment));
2034 VM_ASSERT(vm_cc_cme(&vm_empty_cc) == NULL);
2035 return &vm_empty_cc;
2038 VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
2043 VM_ASSERT(cc_tbl != NULL);
2045 if (LIKELY(rb_id_table_lookup(cc_tbl, mid, &ccs_data))) {
2051 ccs = vm_ccs_create(klass, cme);
2052 rb_id_table_insert(cc_tbl, mid, (
VALUE)ccs);
2056 cme = check_overloaded_cme(cme, ci);
2058 const struct rb_callcache *cc = vm_cc_new(klass, cme, vm_call_general);
2059 vm_ccs_push(klass, ccs, ci, cc);
2061 VM_ASSERT(vm_cc_cme(cc) != NULL);
2062 VM_ASSERT(cme->called_id == mid);
2063 VM_ASSERT(vm_cc_cme(cc)->called_id == mid);
2077 cc = vm_search_cc(klass, ci);
2080 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
2081 VM_ASSERT(cc == vm_cc_empty() || cc->klass == klass);
2082 VM_ASSERT(cc == vm_cc_empty() || callable_method_entry_p(vm_cc_cme(cc)));
2083 VM_ASSERT(cc == vm_cc_empty() || !METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)));
2084 VM_ASSERT(cc == vm_cc_empty() || vm_cc_cme(cc)->called_id == vm_ci_mid(ci));
2095#if USE_DEBUG_COUNTER
2099 const struct rb_callcache *cc = rb_vm_search_method_slowpath(cd->ci, klass);
2101#if OPT_INLINE_METHOD_CACHE
2112#if USE_DEBUG_COUNTER
2113 if (old_cc == empty_cc) {
2115 RB_DEBUG_COUNTER_INC(mc_inline_miss_empty);
2117 else if (old_cc == cc) {
2118 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cc);
2120 else if (vm_cc_cme(old_cc) == vm_cc_cme(cc)) {
2121 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cme);
2123 else if (vm_cc_cme(old_cc) && vm_cc_cme(cc) &&
2124 vm_cc_cme(old_cc)->def == vm_cc_cme(cc)->def) {
2125 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_def);
2128 RB_DEBUG_COUNTER_INC(mc_inline_miss_diff);
2133 VM_ASSERT(vm_cc_cme(cc) == NULL ||
2134 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci));
2147#if OPT_INLINE_METHOD_CACHE
2148 if (LIKELY(vm_cc_class_check(cc, klass))) {
2149 if (LIKELY(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)))) {
2150 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
2151 RB_DEBUG_COUNTER_INC(mc_inline_hit);
2152 VM_ASSERT(vm_cc_cme(cc) == NULL ||
2153 (vm_ci_flag(cd->ci) & VM_CALL_SUPER) ||
2154 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci));
2158 RB_DEBUG_COUNTER_INC(mc_inline_miss_invalidated);
2161 RB_DEBUG_COUNTER_INC(mc_inline_miss_klass);
2165 return vm_search_method_slowpath0(cd_owner, cd, klass);
2172 VM_ASSERT(klass !=
Qfalse);
2175 return vm_search_method_fastpath(cd_owner, cd, klass);
2178#if __has_attribute(transparent_union)
2181 VALUE (*f00)(VALUE);
2182 VALUE (*f01)(VALUE, VALUE);
2183 VALUE (*f02)(VALUE, VALUE, VALUE);
2184 VALUE (*f03)(VALUE, VALUE, VALUE, VALUE);
2185 VALUE (*f04)(VALUE, VALUE, VALUE, VALUE, VALUE);
2186 VALUE (*f05)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE);
2187 VALUE (*f06)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE);
2188 VALUE (*f07)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE);
2189 VALUE (*f08)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE);
2190 VALUE (*f09)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE);
2191 VALUE (*f10)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE);
2192 VALUE (*f11)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE);
2193 VALUE (*f12)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE);
2194 VALUE (*f13)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE);
2195 VALUE (*f14)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE);
2196 VALUE (*f15)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE);
2197 VALUE (*fm1)(int,
union { VALUE *x;
const VALUE *y; }
__attribute__((__transparent_union__)), VALUE);
2210 VM_ASSERT(IMEMO_TYPE_P(me, imemo_ment));
2211 VM_ASSERT(callable_method_entry_p(me));
2213 if (me->def->type != VM_METHOD_TYPE_CFUNC) {
2217#if __has_attribute(transparent_union)
2218 return me->def->body.cfunc.func == func.anyargs;
2220 return me->def->body.cfunc.func == func;
2229 VM_ASSERT(iseq != NULL);
2231 return check_cfunc(vm_cc_cme(cc), func);
2234#define EQ_UNREDEFINED_P(t) BASIC_OP_UNREDEFINED_P(BOP_EQ, t##_REDEFINED_OP_FLAG)
2266opt_equality_specialized(
VALUE recv,
VALUE obj)
2268 if (FIXNUM_2_P(recv, obj) && EQ_UNREDEFINED_P(INTEGER)) {
2269 goto compare_by_identity;
2271 else if (FLONUM_2_P(recv, obj) && EQ_UNREDEFINED_P(FLOAT)) {
2272 goto compare_by_identity;
2275 goto compare_by_identity;
2280 else if (RBASIC_CLASS(recv) ==
rb_cFloat && RB_FLOAT_TYPE_P(obj) && EQ_UNREDEFINED_P(FLOAT)) {
2284#if MSC_VERSION_BEFORE(1300)
2288 else if (isnan(b)) {
2293 return RBOOL(a == b);
2295 else if (RBASIC_CLASS(recv) ==
rb_cString && EQ_UNREDEFINED_P(STRING)) {
2299 else if (RB_TYPE_P(obj,
T_STRING)) {
2300 return rb_str_eql_internal(obj, recv);
2305 compare_by_identity:
2306 return RBOOL(recv == obj);
2312 VM_ASSERT(cd_owner != NULL);
2314 VALUE val = opt_equality_specialized(recv, obj);
2315 if (!UNDEF_P(val))
return val;
2317 if (!vm_method_cfunc_is(cd_owner, cd, recv, rb_obj_equal)) {
2321 return RBOOL(recv == obj);
2325#undef EQ_UNREDEFINED_P
2330NOINLINE(
static VALUE opt_equality_by_mid_slowpath(
VALUE recv,
VALUE obj,
ID mid));
2333opt_equality_by_mid_slowpath(
VALUE recv,
VALUE obj,
ID mid)
2335 const struct rb_callcache *cc = gccct_method_search(GET_EC(), recv, mid, 1);
2337 if (cc && check_cfunc(vm_cc_cme(cc), rb_obj_equal)) {
2338 return RBOOL(recv == obj);
2348 VALUE val = opt_equality_specialized(recv, obj);
2349 if (!UNDEF_P(val)) {
2353 return opt_equality_by_mid_slowpath(recv, obj, mid);
2360 return opt_equality_by_mid(obj1, obj2, idEq);
2366 return opt_equality_by_mid(obj1, obj2, idEqlP);
2378 case VM_CHECKMATCH_TYPE_WHEN:
2380 case VM_CHECKMATCH_TYPE_RESCUE:
2385 case VM_CHECKMATCH_TYPE_CASE: {
2386 return rb_vm_call_with_refinements(ec, pattern, idEqq, 1, &target,
RB_NO_KEYWORDS);
2389 rb_bug(
"check_match: unreachable");
2394#if MSC_VERSION_BEFORE(1300)
2395#define CHECK_CMP_NAN(a, b) if (isnan(a) || isnan(b)) return Qfalse;
2397#define CHECK_CMP_NAN(a, b)
2401double_cmp_lt(
double a,
double b)
2403 CHECK_CMP_NAN(a, b);
2404 return RBOOL(a < b);
2408double_cmp_le(
double a,
double b)
2410 CHECK_CMP_NAN(a, b);
2411 return RBOOL(a <= b);
2415double_cmp_gt(
double a,
double b)
2417 CHECK_CMP_NAN(a, b);
2418 return RBOOL(a > b);
2422double_cmp_ge(
double a,
double b)
2424 CHECK_CMP_NAN(a, b);
2425 return RBOOL(a >= b);
2428static inline VALUE *
2434 if (cfp->iseq && VM_FRAME_RUBYFRAME_P(cfp)) {
2435 VALUE *bp = prev_cfp->sp + ISEQ_BODY(cfp->iseq)->local_table_size + VM_ENV_DATA_SIZE;
2436 if (ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_METHOD) {
2440#if VM_DEBUG_BP_CHECK
2441 if (bp != cfp->bp_check) {
2442 ruby_debug_printf(
"bp_check: %ld, bp: %ld\n",
2443 (
long)(cfp->bp_check - GET_EC()->vm_stack),
2444 (
long)(bp - GET_EC()->vm_stack));
2445 rb_bug(
"vm_base_ptr: unreachable");
2470static vm_call_handler vm_call_iseq_setup_func(
const struct rb_callinfo *ci,
const int param_size,
const int local_size);
2475 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_tailcall_0start);
2477 return vm_call_iseq_setup_tailcall(ec, cfp, calling, 0);
2483 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_0start);
2486 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2487 int param = ISEQ_BODY(iseq)->param.size;
2488 int local = ISEQ_BODY(iseq)->local_table_size;
2489 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2495 return ISEQ_BODY(iseq)->param.flags.has_opt == FALSE &&
2496 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2497 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2498 ISEQ_BODY(iseq)->param.flags.has_kw == FALSE &&
2499 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2500 ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg == FALSE &&
2501 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2504MJIT_FUNC_EXPORTED
bool
2505rb_iseq_only_optparam_p(
const rb_iseq_t *iseq)
2507 return ISEQ_BODY(iseq)->param.flags.has_opt == TRUE &&
2508 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2509 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2510 ISEQ_BODY(iseq)->param.flags.has_kw == FALSE &&
2511 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2512 ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg == FALSE &&
2513 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2516MJIT_FUNC_EXPORTED
bool
2517rb_iseq_only_kwparam_p(
const rb_iseq_t *iseq)
2519 return ISEQ_BODY(iseq)->param.flags.has_opt == FALSE &&
2520 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2521 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2522 ISEQ_BODY(iseq)->param.flags.has_kw == TRUE &&
2523 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2524 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2529rb_splat_or_kwargs_p(
const struct rb_callinfo *restrict ci)
2531 return IS_ARGS_SPLAT(ci) || IS_ARGS_KW_OR_KW_SPLAT(ci);
2540 if (UNLIKELY(IS_ARGS_SPLAT(ci))) {
2545 vm_caller_setup_arg_splat(cfp, calling);
2546 if (!IS_ARGS_KW_OR_KW_SPLAT(ci) &&
2547 calling->argc > 0 &&
2548 RB_TYPE_P((final_hash = *(cfp->sp - 1)),
T_HASH) &&
2549 (((
struct RHash *)final_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS)) {
2550 *(cfp->sp - 1) = rb_hash_dup(final_hash);
2551 calling->kw_splat = 1;
2554 if (UNLIKELY(IS_ARGS_KW_OR_KW_SPLAT(ci))) {
2555 if (IS_ARGS_KEYWORD(ci)) {
2560 vm_caller_setup_arg_kw(cfp, calling, ci);
2563 VALUE keyword_hash = cfp->sp[-1];
2564 if (!RB_TYPE_P(keyword_hash,
T_HASH)) {
2566 cfp->sp[-1] = rb_hash_dup(rb_to_hash_type(keyword_hash));
2568 else if (!IS_ARGS_KW_SPLAT_MUT(ci)) {
2572 cfp->sp[-1] = rb_hash_dup(keyword_hash);
2583 if (UNLIKELY(calling->kw_splat)) {
2590 calling->kw_splat = 0;
2595#define USE_OPT_HIST 0
2598#define OPT_HIST_MAX 64
2599static int opt_hist[OPT_HIST_MAX+1];
2603opt_hist_show_results_at_exit(
void)
2605 for (
int i=0; i<OPT_HIST_MAX; i++) {
2606 ruby_debug_printf(
"opt_hist\t%d\t%d\n", i, opt_hist[i]);
2616 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2617 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2618 const int opt = calling->argc - lead_num;
2619 const int opt_num = ISEQ_BODY(iseq)->param.opt_num;
2620 const int opt_pc = (int)ISEQ_BODY(iseq)->param.opt_table[opt];
2621 const int param = ISEQ_BODY(iseq)->param.size;
2622 const int local = ISEQ_BODY(iseq)->local_table_size;
2623 const int delta = opt_num - opt;
2625 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
2628 if (opt_pc < OPT_HIST_MAX) {
2632 opt_hist[OPT_HIST_MAX]++;
2636 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param - delta, local);
2644 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2645 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2646 const int opt = calling->argc - lead_num;
2647 const int opt_pc = (int)ISEQ_BODY(iseq)->param.opt_table[opt];
2649 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
2652 if (opt_pc < OPT_HIST_MAX) {
2656 opt_hist[OPT_HIST_MAX]++;
2660 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
2665 VALUE *
const passed_values,
const int passed_keyword_len,
const VALUE *
const passed_keywords,
2666 VALUE *
const locals);
2675 VM_ASSERT(vm_ci_flag(ci) & VM_CALL_KWARG);
2676 RB_DEBUG_COUNTER_INC(ccf_iseq_kw1);
2678 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2679 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
2681 const int ci_kw_len = kw_arg->keyword_len;
2682 const VALUE *
const ci_keywords = kw_arg->keywords;
2683 VALUE *argv = cfp->sp - calling->argc;
2684 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
2685 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2687 MEMCPY(ci_kws, argv + lead_num,
VALUE, ci_kw_len);
2688 args_setup_kw_parameters(ec, iseq, ci_kws, ci_kw_len, ci_keywords, klocals);
2690 int param = ISEQ_BODY(iseq)->param.size;
2691 int local = ISEQ_BODY(iseq)->local_table_size;
2692 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2699 const struct rb_callinfo *MAYBE_UNUSED(ci) = calling->ci;
2702 VM_ASSERT((vm_ci_flag(ci) & VM_CALL_KWARG) == 0);
2703 RB_DEBUG_COUNTER_INC(ccf_iseq_kw2);
2705 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2706 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
2707 VALUE *
const argv = cfp->sp - calling->argc;
2708 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
2711 for (i=0; i<kw_param->num; i++) {
2712 klocals[i] = kw_param->default_values[i];
2719 int param = ISEQ_BODY(iseq)->param.size;
2720 int local = ISEQ_BODY(iseq)->local_table_size;
2721 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2726 const rb_iseq_t *iseq,
VALUE *argv,
int param_size,
int local_size)
2730 bool cacheable_ci = vm_ci_markable(ci);
2732 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_KW_SPLAT))) {
2733 if (LIKELY(rb_simple_iseq_p(iseq))) {
2735 CALLER_SETUP_ARG(cfp, calling, ci);
2736 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
2738 if (calling->argc != ISEQ_BODY(iseq)->param.lead_num) {
2739 argument_arity_error(ec, iseq, calling->argc, ISEQ_BODY(iseq)->param.lead_num, ISEQ_BODY(iseq)->param.lead_num);
2742 VM_ASSERT(ci == calling->ci);
2743 VM_ASSERT(cc == calling->cc);
2744 CC_SET_FASTPATH(cc, vm_call_iseq_setup_func(ci, param_size, local_size), cacheable_ci && vm_call_iseq_optimizable_p(ci, cc));
2747 else if (rb_iseq_only_optparam_p(iseq)) {
2749 CALLER_SETUP_ARG(cfp, calling, ci);
2750 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
2752 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2753 const int opt_num = ISEQ_BODY(iseq)->param.opt_num;
2754 const int argc = calling->argc;
2755 const int opt = argc - lead_num;
2757 if (opt < 0 || opt > opt_num) {
2758 argument_arity_error(ec, iseq, argc, lead_num, lead_num + opt_num);
2761 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
2762 CC_SET_FASTPATH(cc, vm_call_iseq_setup_normal_opt_start,
2763 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
2764 cacheable_ci && vm_call_cacheable(ci, cc));
2767 CC_SET_FASTPATH(cc, vm_call_iseq_setup_tailcall_opt_start,
2768 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
2769 cacheable_ci && vm_call_cacheable(ci, cc));
2773 VM_ASSERT((
int)ISEQ_BODY(iseq)->param.size == lead_num + opt_num);
2774 for (
int i=argc; i<lead_num + opt_num; i++) {
2777 return (
int)ISEQ_BODY(iseq)->param.opt_table[opt];
2779 else if (rb_iseq_only_kwparam_p(iseq) && !IS_ARGS_SPLAT(ci)) {
2780 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2781 const int argc = calling->argc;
2782 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
2784 if (vm_ci_flag(ci) & VM_CALL_KWARG) {
2787 if (argc - kw_arg->keyword_len == lead_num) {
2788 const int ci_kw_len = kw_arg->keyword_len;
2789 const VALUE *
const ci_keywords = kw_arg->keywords;
2791 MEMCPY(ci_kws, argv + lead_num,
VALUE, ci_kw_len);
2793 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
2794 args_setup_kw_parameters(ec, iseq, ci_kws, ci_kw_len, ci_keywords, klocals);
2796 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_kwarg,
2797 cacheable_ci && vm_call_cacheable(ci, cc));
2802 else if (argc == lead_num) {
2804 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
2805 args_setup_kw_parameters(ec, iseq, NULL, 0, NULL, klocals);
2807 if (klocals[kw_param->num] ==
INT2FIX(0)) {
2809 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_nokwarg,
2810 cacheable_ci && vm_call_cacheable(ci, cc));
2818 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_method);
2824 RB_DEBUG_COUNTER_INC(ccf_iseq_setup);
2827 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2828 const int param_size = ISEQ_BODY(iseq)->param.size;
2829 const int local_size = ISEQ_BODY(iseq)->local_table_size;
2830 const int opt_pc = vm_callee_setup_arg(ec, calling, def_iseq_ptr(vm_cc_cme(cc)->def), cfp->sp - calling->argc, param_size, local_size);
2831 return vm_call_iseq_setup_2(ec, cfp, calling, opt_pc, param_size, local_size);
2836 int opt_pc,
int param_size,
int local_size)
2841 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
2842 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param_size, local_size);
2845 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
2851 int opt_pc,
int param_size,
int local_size)
2853 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
2854 VALUE *argv = cfp->sp - calling->argc;
2855 VALUE *sp = argv + param_size;
2856 cfp->sp = argv - 1 ;
2858 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL, calling->recv,
2859 calling->block_handler, (
VALUE)me,
2860 ISEQ_BODY(iseq)->iseq_encoded + opt_pc, sp,
2861 local_size - param_size,
2862 ISEQ_BODY(iseq)->stack_max);
2871 VALUE *argv = cfp->sp - calling->argc;
2873 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
2874 VALUE *src_argv = argv;
2875 VALUE *sp_orig, *sp;
2876 VALUE finish_flag = VM_FRAME_FINISHED_P(cfp) ? VM_FRAME_FLAG_FINISH : 0;
2878 if (VM_BH_FROM_CFP_P(calling->block_handler, cfp)) {
2879 struct rb_captured_block *dst_captured = VM_CFP_TO_CAPTURED_BLOCK(RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp));
2880 const struct rb_captured_block *src_captured = VM_BH_TO_CAPT_BLOCK(calling->block_handler);
2881 dst_captured->code.val = src_captured->code.val;
2882 if (VM_BH_ISEQ_BLOCK_P(calling->block_handler)) {
2883 calling->block_handler = VM_BH_FROM_ISEQ_BLOCK(dst_captured);
2886 calling->block_handler = VM_BH_FROM_IFUNC_BLOCK(dst_captured);
2890 vm_pop_frame(ec, cfp, cfp->ep);
2893 sp_orig = sp = cfp->sp;
2896 sp[0] = calling->recv;
2900 for (i=0; i < ISEQ_BODY(iseq)->param.size; i++) {
2901 *sp++ = src_argv[i];
2904 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL | finish_flag,
2905 calling->recv, calling->block_handler, (
VALUE)me,
2906 ISEQ_BODY(iseq)->iseq_encoded + opt_pc, sp,
2907 ISEQ_BODY(iseq)->local_table_size - ISEQ_BODY(iseq)->param.size,
2908 ISEQ_BODY(iseq)->stack_max);
2916ractor_unsafe_check(
void)
2918 if (!rb_ractor_main_p()) {
2919 rb_raise(rb_eRactorUnsafeError,
"ractor unsafe method called from not main ractor");
2926 ractor_unsafe_check();
2934 ractor_unsafe_check();
2936 return (*f)(argc, argv, recv);
2942 ractor_unsafe_check();
2950 ractor_unsafe_check();
2952 return (*f)(recv, argv[0]);
2958 ractor_unsafe_check();
2960 return (*f)(recv, argv[0], argv[1]);
2966 ractor_unsafe_check();
2968 return (*f)(recv, argv[0], argv[1], argv[2]);
2974 ractor_unsafe_check();
2976 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
2982 ractor_unsafe_check();
2983 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
2984 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
2990 ractor_unsafe_check();
2991 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
2992 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
2998 ractor_unsafe_check();
2999 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3000 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
3006 ractor_unsafe_check();
3007 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3008 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
3014 ractor_unsafe_check();
3015 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3016 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
3022 ractor_unsafe_check();
3023 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3024 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
3030 ractor_unsafe_check();
3031 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3032 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
3038 ractor_unsafe_check();
3039 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3040 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
3046 ractor_unsafe_check();
3047 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3048 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
3054 ractor_unsafe_check();
3055 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3056 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
3062 ractor_unsafe_check();
3063 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3064 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
3078 return (*f)(argc, argv, recv);
3092 return (*f)(recv, argv[0]);
3099 return (*f)(recv, argv[0], argv[1]);
3106 return (*f)(recv, argv[0], argv[1], argv[2]);
3113 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
3119 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3120 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
3126 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3127 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
3133 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3134 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
3140 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3141 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
3147 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3148 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
3154 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3155 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
3161 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3162 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
3168 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3169 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
3175 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3176 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
3182 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3183 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
3189 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3190 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
3196 const int ov_flags = RAISED_STACKOVERFLOW;
3197 if (LIKELY(reg_cfp == ec->cfp + 1))
return TRUE;
3198 if (rb_ec_raised_p(ec, ov_flags)) {
3199 rb_ec_raised_reset(ec, ov_flags);
3205#define CHECK_CFP_CONSISTENCY(func) \
3206 (LIKELY(vm_cfp_consistent_p(ec, reg_cfp)) ? (void)0 : \
3207 rb_bug(func ": cfp consistency error (%p, %p)", (void *)reg_cfp, (void *)(ec->cfp+1)))
3213#if VM_DEBUG_VERIFY_METHOD_CACHE
3214 switch (me->def->type) {
3215 case VM_METHOD_TYPE_CFUNC:
3216 case VM_METHOD_TYPE_NOTIMPLEMENTED:
3218# define METHOD_BUG(t) case VM_METHOD_TYPE_##t: rb_bug("wrong method type: " #t)
3220 METHOD_BUG(ATTRSET);
3222 METHOD_BUG(BMETHOD);
3225 METHOD_BUG(OPTIMIZED);
3226 METHOD_BUG(MISSING);
3227 METHOD_BUG(REFINED);
3231 rb_bug(
"wrong method type: %d", me->def->type);
3234 return UNALIGNED_MEMBER_PTR(me->def, body.cfunc);
3240 RB_DEBUG_COUNTER_INC(ccf_cfunc_with_frame);
3246 int len = cfunc->argc;
3248 VALUE recv = calling->recv;
3249 VALUE block_handler = calling->block_handler;
3250 VALUE frame_type = VM_FRAME_MAGIC_CFUNC | VM_FRAME_FLAG_CFRAME | VM_ENV_FLAG_LOCAL;
3251 int argc = calling->argc;
3252 int orig_argc = argc;
3254 if (UNLIKELY(calling->kw_splat)) {
3255 frame_type |= VM_FRAME_FLAG_CFRAME_KW;
3258 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, me->owner, me->def->original_id);
3261 vm_push_frame(ec, NULL, frame_type, recv,
3262 block_handler, (
VALUE)me,
3263 0, ec->cfp->sp, 0, 0);
3265 if (len >= 0) rb_check_arity(argc, len, len);
3267 reg_cfp->sp -= orig_argc + 1;
3268 val = (*cfunc->invoker)(recv, argc, reg_cfp->sp + 1, cfunc->func);
3270 CHECK_CFP_CONSISTENCY(
"vm_call_cfunc");
3272 rb_vm_pop_frame(ec);
3274 EXEC_EVENT_HOOK(ec,
RUBY_EVENT_C_RETURN, recv, me->def->original_id, vm_ci_mid(ci), me->owner, val);
3275 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, me->owner, me->def->original_id);
3284 RB_DEBUG_COUNTER_INC(ccf_cfunc);
3286 CALLER_SETUP_ARG(reg_cfp, calling, ci);
3287 CALLER_REMOVE_EMPTY_KW_SPLAT(reg_cfp, calling, ci);
3288 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_with_frame, !rb_splat_or_kwargs_p(ci) && !calling->kw_splat);
3289 return vm_call_cfunc_with_frame(ec, reg_cfp, calling);
3296 RB_DEBUG_COUNTER_INC(ccf_ivar);
3298 VALUE ivar = vm_getivar(calling->recv, vm_cc_cme(cc)->def->body.attr.id, NULL, NULL, cc, TRUE);
3305 RB_DEBUG_COUNTER_INC(ccf_attrset);
3306 VALUE val = *(cfp->sp - 1);
3308 attr_index_t index = vm_cc_attr_index(cc);
3309 shape_id_t dest_shape_id = vm_cc_attr_index_dest_shape_id(cc);
3310 ID id = vm_cc_cme(cc)->def->body.attr.id;
3312 VALUE res = vm_setivar(obj,
id, val, dest_shape_id, index);
3321 res = vm_setivar_default(obj,
id, val, dest_shape_id, index);
3322 if (!UNDEF_P(res)) {
3327 res = vm_setivar_slowpath_attr(obj,
id, val, cc);
3335 return vm_call_attrset_direct(ec, cfp, calling->cc, calling->recv);
3339rb_vm_call_ivar_attrset_p(
const vm_call_handler ch)
3341 return (ch == vm_call_ivar || ch == vm_call_attrset);
3351 VALUE procv = cme->def->body.bmethod.proc;
3354 cme->def->body.bmethod.defined_ractor != rb_ractor_self(rb_ec_ractor_ptr(ec))) {
3359 GetProcPtr(procv, proc);
3360 val = rb_vm_invoke_bmethod(ec, proc, calling->recv, calling->argc, argv, calling->kw_splat, calling->block_handler, vm_cc_cme(cc));
3368 RB_DEBUG_COUNTER_INC(ccf_bmethod);
3374 CALLER_SETUP_ARG(cfp, calling, ci);
3375 argc = calling->argc;
3378 cfp->sp += - argc - 1;
3380 return vm_call_bmethod_body(ec, calling, argv);
3383MJIT_FUNC_EXPORTED
VALUE
3384rb_find_defined_class_by_owner(
VALUE current_class,
VALUE target_owner)
3386 VALUE klass = current_class;
3389 if (RB_TYPE_P(klass,
T_ICLASS) &&
FL_TEST(klass, RICLASS_IS_ORIGIN) &&
3390 RB_TYPE_P(RBASIC_CLASS(klass),
T_CLASS)) {
3391 klass = RBASIC_CLASS(klass);
3394 while (
RTEST(klass)) {
3395 VALUE owner = RB_TYPE_P(klass,
T_ICLASS) ? RBASIC_CLASS(klass) : klass;
3396 if (owner == target_owner) {
3402 return current_class;
3411 if (orig_me->defined_class == 0) {
3412 VALUE defined_class = rb_find_defined_class_by_owner(me->defined_class, orig_me->owner);
3413 VM_ASSERT(RB_TYPE_P(orig_me->owner,
T_MODULE));
3414 cme = rb_method_entry_complement_defined_class(orig_me, me->called_id, defined_class);
3416 if (me->def->alias_count + me->def->complemented_count == 0) {
3417 RB_OBJ_WRITE(me, &me->def->body.alias.original_me, cme);
3421 rb_method_definition_create(VM_METHOD_TYPE_ALIAS, me->def->original_id);
3429 VM_ASSERT(callable_method_entry_p(cme));
3436 return aliased_callable_method_entry(me);
3442 calling->cc = &VM_CC_ON_STACK(
Qundef,
3445 aliased_callable_method_entry(vm_cc_cme(calling->cc)));
3447 return vm_call_method_each_type(ec, cfp, calling);
3450static enum method_missing_reason
3453 enum method_missing_reason
stat = MISSING_NOENTRY;
3454 if (vm_ci_flag(ci) & VM_CALL_VCALL)
stat |= MISSING_VCALL;
3455 if (vm_ci_flag(ci) & VM_CALL_FCALL)
stat |= MISSING_FCALL;
3456 if (vm_ci_flag(ci) & VM_CALL_SUPER)
stat |= MISSING_SUPER;
3466 ASSUME(calling->argc >= 0);
3469 enum method_missing_reason missing_reason = MISSING_NOENTRY;
3470 int argc = calling->argc;
3471 VALUE recv = calling->recv;
3474 flags |= VM_CALL_OPT_SEND | (calling->kw_splat ? VM_CALL_KW_SPLAT : 0);
3476 if (UNLIKELY(! mid)) {
3477 mid = idMethodMissing;
3478 missing_reason = ci_missing_reason(ci);
3479 ec->method_missing_reason = missing_reason;
3495 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
3498 argc = ++calling->argc;
3500 if (rb_method_basic_definition_p(klass, idMethodMissing)) {
3503 int priv = vm_ci_flag(ci) & (VM_CALL_FCALL | VM_CALL_VCALL);
3504 const VALUE *argv = STACK_ADDR_FROM_TOP(argc);
3505 VALUE exc = rb_make_no_method_exception(
3515 calling->ci = &VM_CI_ON_STACK(mid, flags, argc, vm_ci_kwarg(ci));
3516 calling->cc = &VM_CC_ON_STACK(klass,
3518 { .method_missing_reason = missing_reason },
3519 rb_callable_method_entry_with_refinements(klass, mid, NULL));
3521 if (flags & VM_CALL_FCALL) {
3522 return vm_call_method(ec, reg_cfp, calling);
3526 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
3528 if (vm_cc_cme(cc) != NULL) {
3529 switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
3530 case METHOD_VISI_PUBLIC:
3531 return vm_call_method_each_type(ec, reg_cfp, calling);
3532 case METHOD_VISI_PRIVATE:
3533 vm_cc_method_missing_reason_set(cc, MISSING_PRIVATE);
3535 case METHOD_VISI_PROTECTED:
3536 vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
3539 VM_UNREACHABLE(vm_call_method);
3541 return vm_call_method_missing(ec, reg_cfp, calling);
3544 return vm_call_method_nome(ec, reg_cfp, calling);
3550 RB_DEBUG_COUNTER_INC(ccf_opt_send);
3555 CALLER_SETUP_ARG(reg_cfp, calling, calling->ci);
3557 i = calling->argc - 1;
3559 if (calling->argc == 0) {
3584 return vm_call_symbol(ec, reg_cfp, calling, calling->ci, sym, VM_CALL_FCALL);
3590 const struct rb_callinfo *orig_ci,
enum method_missing_reason reason)
3592 RB_DEBUG_COUNTER_INC(ccf_method_missing);
3594 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
3597 CALLER_SETUP_ARG(reg_cfp, calling, orig_ci);
3598 argc = calling->argc + 1;
3600 unsigned int flag = VM_CALL_FCALL | VM_CALL_OPT_SEND | (calling->kw_splat ? VM_CALL_KW_SPLAT : 0);
3601 calling->argc = argc;
3604 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
3605 vm_check_canary(ec, reg_cfp->sp);
3609 argv[0] =
ID2SYM(vm_ci_mid(orig_ci));
3612 ec->method_missing_reason = reason;
3613 calling->ci = &VM_CI_ON_STACK(idMethodMissing, flag, argc, vm_ci_kwarg(orig_ci));
3614 calling->cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }},
3615 rb_callable_method_entry_without_refinements(
CLASS_OF(calling->recv), idMethodMissing, NULL));
3616 return vm_call_method(ec, reg_cfp, calling);
3622 return vm_call_method_missing_body(ec, reg_cfp, calling, calling->ci, vm_cc_cmethod_missing_reason(calling->cc));
3633 return vm_call_method_nome(ec, cfp, calling);
3635 if (cme->def->type == VM_METHOD_TYPE_REFINED &&
3636 cme->def->body.refined.orig_me) {
3637 cme = refined_method_callable_without_refinement(cme);
3640 calling->cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }}, cme);
3642 return vm_call_method_each_type(ec, cfp, calling);
3646find_refinement(
VALUE refinements,
VALUE klass)
3648 if (
NIL_P(refinements)) {
3651 return rb_hash_lookup(refinements, klass);
3660 if (cfp->iseq && ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_BLOCK) {
3661 const rb_iseq_t *local_iseq = ISEQ_BODY(cfp->iseq)->local_iseq;
3664 cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
3665 if (RUBY_VM_CONTROL_FRAME_STACK_OVERFLOW_P(ec, cfp)) {
3669 }
while (cfp->iseq != local_iseq);
3680 if (orig_me->defined_class == 0) {
3688 VM_ASSERT(callable_method_entry_p(cme));
3690 if (UNDEFINED_METHOD_ENTRY_P(cme)) {
3700 ID mid = vm_ci_mid(calling->ci);
3701 const rb_cref_t *cref = vm_get_cref(cfp->ep);
3705 for (; cref; cref = CREF_NEXT(cref)) {
3706 const VALUE refinement = find_refinement(CREF_REFINEMENTS(cref), vm_cc_cme(cc)->owner);
3707 if (
NIL_P(refinement))
continue;
3710 rb_callable_method_entry(refinement, mid);
3713 if (vm_cc_call(cc) == vm_call_super_method) {
3716 if (top_me && rb_method_definition_eq(ref_me->def, top_me->def)) {
3721 if (cme->def->type != VM_METHOD_TYPE_REFINED ||
3722 cme->def != ref_me->def) {
3725 if (ref_me->def->type != VM_METHOD_TYPE_REFINED) {
3734 if (vm_cc_cme(cc)->def->body.refined.orig_me) {
3735 return refined_method_callable_without_refinement(vm_cc_cme(cc));
3748 search_refined_method(ec, cfp, calling));
3750 if (vm_cc_cme(ref_cc)) {
3751 calling->cc= ref_cc;
3752 return vm_call_method(ec, cfp, calling);
3755 return vm_call_method_nome(ec, cfp, calling);
3761NOINLINE(
static VALUE
3769 int argc = calling->argc;
3772 if (argc > 0)
MEMMOVE(&TOPN(argc), &TOPN(argc-1),
VALUE, argc);
3775 return vm_invoke_block(ec, reg_cfp, calling, ci,
false, block_handler);
3781 RB_DEBUG_COUNTER_INC(ccf_opt_call);
3784 VALUE procval = calling->recv;
3785 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, VM_BH_FROM_PROC(procval));
3791 RB_DEBUG_COUNTER_INC(ccf_opt_block_call);
3793 VALUE block_handler = VM_ENV_BLOCK_HANDLER(VM_CF_LEP(reg_cfp));
3796 if (BASIC_OP_UNREDEFINED_P(BOP_CALL, PROC_REDEFINED_OP_FLAG)) {
3797 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, block_handler);
3800 calling->recv = rb_vm_bh_to_procval(ec, block_handler);
3801 calling->cc = rb_vm_search_method_slowpath(ci,
CLASS_OF(calling->recv));
3802 return vm_call_general(ec, reg_cfp, calling);
3809 VALUE recv = calling->recv;
3811 VM_ASSERT(RB_TYPE_P(recv,
T_STRUCT));
3812 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
3813 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_AREF);
3815 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
3816 return internal_RSTRUCT_GET(recv, off);
3822 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aref);
3824 VALUE ret = vm_call_opt_struct_aref0(ec, calling);
3832 VALUE recv = calling->recv;
3834 VM_ASSERT(RB_TYPE_P(recv,
T_STRUCT));
3835 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
3836 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_ASET);
3840 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
3841 internal_RSTRUCT_SET(recv, off, val);
3849 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aset);
3851 VALUE ret = vm_call_opt_struct_aset0(ec, calling, *(reg_cfp->sp - 1));
3863 switch (vm_cc_cme(cc)->def->body.optimized.type) {
3864 case OPTIMIZED_METHOD_TYPE_SEND:
3865 CC_SET_FASTPATH(cc, vm_call_opt_send, TRUE);
3866 return vm_call_opt_send(ec, cfp, calling);
3867 case OPTIMIZED_METHOD_TYPE_CALL:
3868 CC_SET_FASTPATH(cc, vm_call_opt_call, TRUE);
3869 return vm_call_opt_call(ec, cfp, calling);
3870 case OPTIMIZED_METHOD_TYPE_BLOCK_CALL:
3871 CC_SET_FASTPATH(cc, vm_call_opt_block_call, TRUE);
3872 return vm_call_opt_block_call(ec, cfp, calling);
3873 case OPTIMIZED_METHOD_TYPE_STRUCT_AREF:
3874 CALLER_SETUP_ARG(cfp, calling, ci);
3875 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
3876 rb_check_arity(calling->argc, 0, 0);
3877 CC_SET_FASTPATH(cc, vm_call_opt_struct_aref, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE));
3878 return vm_call_opt_struct_aref(ec, cfp, calling);
3880 case OPTIMIZED_METHOD_TYPE_STRUCT_ASET:
3881 CALLER_SETUP_ARG(cfp, calling, ci);
3882 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
3883 rb_check_arity(calling->argc, 1, 1);
3884 CC_SET_FASTPATH(cc, vm_call_opt_struct_aset, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE));
3885 return vm_call_opt_struct_aset(ec, cfp, calling);
3887 rb_bug(
"vm_call_method: unsupported optimized method type (%d)", vm_cc_cme(cc)->def->body.optimized.type);
3891#define VM_CALL_METHOD_ATTR(var, func, nohook) \
3892 if (UNLIKELY(ruby_vm_event_flags & (RUBY_EVENT_C_CALL | RUBY_EVENT_C_RETURN))) { \
3893 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_CALL, calling->recv, vm_cc_cme(cc)->def->original_id, \
3894 vm_ci_mid(ci), vm_cc_cme(cc)->owner, Qundef); \
3896 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_RETURN, calling->recv, vm_cc_cme(cc)->def->original_id, \
3897 vm_ci_mid(ci), vm_cc_cme(cc)->owner, (var)); \
3912 switch (cme->def->type) {
3913 case VM_METHOD_TYPE_ISEQ:
3914 CC_SET_FASTPATH(cc, vm_call_iseq_setup, TRUE);
3915 return vm_call_iseq_setup(ec, cfp, calling);
3917 case VM_METHOD_TYPE_NOTIMPLEMENTED:
3918 case VM_METHOD_TYPE_CFUNC:
3919 CC_SET_FASTPATH(cc, vm_call_cfunc, TRUE);
3920 return vm_call_cfunc(ec, cfp, calling);
3922 case VM_METHOD_TYPE_ATTRSET:
3923 CALLER_SETUP_ARG(cfp, calling, ci);
3924 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
3926 rb_check_arity(calling->argc, 1, 1);
3928 const unsigned int aset_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT | VM_CALL_KWARG);
3930 if (vm_cc_markable(cc)) {
3931 vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
3932 VM_CALL_METHOD_ATTR(v,
3933 vm_call_attrset_direct(ec, cfp, cc, calling->recv),
3934 CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
3940 VM_CALLCACHE_UNMARKABLE |
3941 ((
VALUE)INVALID_SHAPE_ID << SHAPE_FLAG_SHIFT) |
3942 VM_CALLCACHE_ON_STACK,
3948 .value = INVALID_SHAPE_ID << SHAPE_FLAG_SHIFT,
3953 VM_CALL_METHOD_ATTR(v,
3954 vm_call_attrset_direct(ec, cfp, cc, calling->recv),
3955 CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
3959 case VM_METHOD_TYPE_IVAR:
3960 CALLER_SETUP_ARG(cfp, calling, ci);
3961 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
3962 rb_check_arity(calling->argc, 0, 0);
3963 vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
3964 const unsigned int ivar_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT);
3965 VM_CALL_METHOD_ATTR(v,
3966 vm_call_ivar(ec, cfp, calling),
3967 CC_SET_FASTPATH(cc, vm_call_ivar, !(vm_ci_flag(ci) & ivar_mask)));
3970 case VM_METHOD_TYPE_MISSING:
3971 vm_cc_method_missing_reason_set(cc, 0);
3972 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
3973 return vm_call_method_missing(ec, cfp, calling);
3975 case VM_METHOD_TYPE_BMETHOD:
3976 CC_SET_FASTPATH(cc, vm_call_bmethod, TRUE);
3977 return vm_call_bmethod(ec, cfp, calling);
3979 case VM_METHOD_TYPE_ALIAS:
3980 CC_SET_FASTPATH(cc, vm_call_alias, TRUE);
3981 return vm_call_alias(ec, cfp, calling);
3983 case VM_METHOD_TYPE_OPTIMIZED:
3984 return vm_call_optimized(ec, cfp, calling, ci, cc);
3986 case VM_METHOD_TYPE_UNDEF:
3989 case VM_METHOD_TYPE_ZSUPER:
3990 return vm_call_zsuper(ec, cfp, calling, RCLASS_ORIGIN(vm_cc_cme(cc)->defined_class));
3992 case VM_METHOD_TYPE_REFINED:
3995 return vm_call_refined(ec, cfp, calling);
3998 rb_bug(
"vm_call_method: unsupported method type (%d)", vm_cc_cme(cc)->def->type);
4008 const int stat = ci_missing_reason(ci);
4010 if (vm_ci_mid(ci) == idMethodMissing) {
4012 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
4013 vm_raise_method_missing(ec, calling->argc, argv, calling->recv,
stat);
4016 return vm_call_method_missing_body(ec, cfp, calling, ci,
stat);
4028 VALUE defined_class = me->defined_class;
4029 VALUE refined_class = RCLASS_REFINED_CLASS(defined_class);
4030 return NIL_P(refined_class) ? defined_class : refined_class;
4039 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
4041 if (vm_cc_cme(cc) != NULL) {
4042 switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
4043 case METHOD_VISI_PUBLIC:
4044 return vm_call_method_each_type(ec, cfp, calling);
4046 case METHOD_VISI_PRIVATE:
4047 if (!(vm_ci_flag(ci) & VM_CALL_FCALL)) {
4048 enum method_missing_reason
stat = MISSING_PRIVATE;
4049 if (vm_ci_flag(ci) & VM_CALL_VCALL)
stat |= MISSING_VCALL;
4051 vm_cc_method_missing_reason_set(cc,
stat);
4052 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
4053 return vm_call_method_missing(ec, cfp, calling);
4055 return vm_call_method_each_type(ec, cfp, calling);
4057 case METHOD_VISI_PROTECTED:
4058 if (!(vm_ci_flag(ci) & (VM_CALL_OPT_SEND | VM_CALL_FCALL))) {
4059 VALUE defined_class = vm_defined_class_for_protected_call(vm_cc_cme(cc));
4061 vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
4062 return vm_call_method_missing(ec, cfp, calling);
4066 VM_ASSERT(vm_cc_cme(cc) != NULL);
4069 calling->cc = &cc_on_stack;
4070 return vm_call_method_each_type(ec, cfp, calling);
4073 return vm_call_method_each_type(ec, cfp, calling);
4080 return vm_call_method_nome(ec, cfp, calling);
4087 RB_DEBUG_COUNTER_INC(ccf_general);
4088 return vm_call_method(ec, reg_cfp, calling);
4094 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
4095 VM_ASSERT(cc != vm_cc_empty());
4097 *(vm_call_handler *)&cc->call_ = vm_call_general;
4103 RB_DEBUG_COUNTER_INC(ccf_super_method);
4108 if (ec == NULL)
rb_bug(
"unreachable");
4111 VM_ASSERT(vm_cc_call(calling->cc) == vm_call_super_method);
4112 return vm_call_method(ec, reg_cfp, calling);
4118vm_search_normal_superclass(
VALUE klass)
4123 klass =
RBASIC(klass)->klass;
4125 klass = RCLASS_ORIGIN(klass);
4129NORETURN(
static void vm_super_outside(
void));
4132vm_super_outside(
void)
4138empty_cc_for_super(
void)
4141 return rb_vm_empty_cc_for_super();
4143 return &vm_empty_cc_for_super;
4150 VALUE current_defined_class;
4157 current_defined_class = vm_defined_class_for_protected_call(me);
4160 reg_cfp->iseq != method_entry_iseqptr(me) &&
4163 RCLASS_INCLUDER(current_defined_class) : current_defined_class;
4167 "self has wrong type to call super in this context: "
4168 "%"PRIsVALUE
" (expected %"PRIsVALUE
")",
4173 if (me->def->type == VM_METHOD_TYPE_BMETHOD && (vm_ci_flag(cd->ci) & VM_CALL_ZSUPER)) {
4175 "implicit argument passing of super from method defined"
4176 " by define_method() is not supported."
4177 " Specify all arguments explicitly.");
4180 ID mid = me->def->original_id;
4183 cd->ci = vm_ci_new_runtime(mid,
4186 vm_ci_kwarg(cd->ci));
4192 VALUE klass = vm_search_normal_superclass(me->defined_class);
4196 cc = vm_cc_new(klass, NULL, vm_call_method_missing);
4200 cc = vm_search_method_fastpath((
VALUE)reg_cfp->iseq, cd, klass);
4204 if (cached_cme == NULL) {
4206 cd->cc = empty_cc_for_super();
4208 else if (cached_cme->called_id != mid) {
4211 cc = vm_cc_new(klass, cme, vm_call_super_method);
4215 cd->cc = cc = empty_cc_for_super();
4219 switch (cached_cme->def->type) {
4221 case VM_METHOD_TYPE_REFINED:
4223 case VM_METHOD_TYPE_ATTRSET:
4224 case VM_METHOD_TYPE_IVAR:
4225 vm_cc_call_set(cc, vm_call_super_method);
4233 VM_ASSERT((vm_cc_cme(cc),
true));
4241block_proc_is_lambda(
const VALUE procval)
4246 GetProcPtr(procval, proc);
4247 return proc->is_lambda;
4257 VALUE self,
int argc,
const VALUE *argv,
int kw_splat,
VALUE block_handler,
4260 int is_lambda = FALSE;
4261 VALUE val, arg, blockarg;
4263 const struct vm_ifunc *ifunc = captured->code.ifunc;
4268 else if (argc == 0) {
4275 blockarg = rb_vm_bh_to_procval(ec, block_handler);
4277 frame_flag = VM_FRAME_MAGIC_IFUNC | VM_FRAME_FLAG_CFRAME | (me ? VM_FRAME_FLAG_BMETHOD : 0);
4279 frame_flag |= VM_FRAME_FLAG_CFRAME_KW;
4282 vm_push_frame(ec, (
const rb_iseq_t *)captured->code.ifunc,
4285 VM_GUARDED_PREV_EP(captured->ep),
4287 0, ec->cfp->sp, 0, 0);
4288 val = (*ifunc->func)(arg, (
VALUE)ifunc->data, argc, argv, blockarg);
4289 rb_vm_pop_frame(ec);
4297 return rb_sym_proc_call(
SYM2ID(symbol), argc, argv, kw_splat, rb_vm_bh_to_procval(ec, block_handler));
4306 CHECK_VM_STACK_OVERFLOW(cfp, ISEQ_BODY(iseq)->param.lead_num);
4308 for (i=0; i<len && i<ISEQ_BODY(iseq)->param.lead_num; i++) {
4316vm_callee_setup_block_arg_arg0_check(
VALUE *argv)
4318 VALUE ary, arg0 = argv[0];
4319 ary = rb_check_array_type(arg0);
4323 VM_ASSERT(argv[0] == arg0);
4331 if (rb_simple_iseq_p(iseq)) {
4335 CALLER_SETUP_ARG(cfp, calling, ci);
4336 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
4338 if (arg_setup_type == arg_setup_block &&
4339 calling->argc == 1 &&
4340 ISEQ_BODY(iseq)->param.flags.has_lead &&
4341 !ISEQ_BODY(iseq)->param.flags.ambiguous_param0 &&
4342 !
NIL_P(arg0 = vm_callee_setup_block_arg_arg0_check(argv))) {
4343 calling->argc = vm_callee_setup_block_arg_arg0_splat(cfp, iseq, argv, arg0);
4346 if (calling->argc != ISEQ_BODY(iseq)->param.lead_num) {
4347 if (arg_setup_type == arg_setup_block) {
4348 if (calling->argc < ISEQ_BODY(iseq)->param.lead_num) {
4350 CHECK_VM_STACK_OVERFLOW(cfp, ISEQ_BODY(iseq)->param.lead_num);
4351 for (i=calling->argc; i<ISEQ_BODY(iseq)->param.lead_num; i++) argv[i] =
Qnil;
4352 calling->argc = ISEQ_BODY(iseq)->param.lead_num;
4354 else if (calling->argc > ISEQ_BODY(iseq)->param.lead_num) {
4355 calling->argc = ISEQ_BODY(iseq)->param.lead_num;
4359 argument_arity_error(ec, iseq, calling->argc, ISEQ_BODY(iseq)->param.lead_num, ISEQ_BODY(iseq)->param.lead_num);
4366 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_type);
4375 calling = &calling_entry;
4376 calling->argc = argc;
4377 calling->block_handler = block_handler;
4378 calling->kw_splat = kw_splat;
4380 struct rb_callinfo dummy_ci = VM_CI_ON_STACK(0, (kw_splat ? VM_CALL_KW_SPLAT : 0), 0, 0);
4382 return vm_callee_setup_block_arg(ec, calling, &dummy_ci, iseq, argv, arg_setup_type);
4390 bool is_lambda,
VALUE block_handler)
4393 const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
4394 const int arg_size = ISEQ_BODY(iseq)->param.size;
4395 VALUE *
const rsp = GET_SP() - calling->argc;
4396 int opt_pc = vm_callee_setup_block_arg(ec, calling, ci, iseq, rsp, is_lambda ? arg_setup_method : arg_setup_block);
4400 vm_push_frame(ec, iseq,
4401 VM_FRAME_MAGIC_BLOCK | (is_lambda ? VM_FRAME_FLAG_LAMBDA : 0),
4403 VM_GUARDED_PREV_EP(captured->ep), 0,
4404 ISEQ_BODY(iseq)->iseq_encoded + opt_pc,
4406 ISEQ_BODY(iseq)->local_table_size - arg_size, ISEQ_BODY(iseq)->stack_max);
4414 MAYBE_UNUSED(
bool is_lambda),
VALUE block_handler)
4416 if (calling->argc < 1) {
4420 VALUE symbol = VM_BH_TO_SYMBOL(block_handler);
4421 CALLER_SETUP_ARG(reg_cfp, calling, ci);
4422 calling->recv = TOPN(--calling->argc);
4423 return vm_call_symbol(ec, reg_cfp, calling, ci, symbol, 0);
4430 MAYBE_UNUSED(
bool is_lambda),
VALUE block_handler)
4435 CALLER_SETUP_ARG(ec->cfp, calling, ci);
4436 CALLER_REMOVE_EMPTY_KW_SPLAT(ec->cfp, calling, ci);
4437 argc = calling->argc;
4438 val = vm_yield_with_cfunc(ec, captured, captured->self, argc, STACK_ADDR_FROM_TOP(argc), calling->kw_splat, calling->block_handler, NULL);
4444vm_proc_to_block_handler(
VALUE procval)
4446 const struct rb_block *block = vm_proc_block(procval);
4448 switch (vm_block_type(block)) {
4449 case block_type_iseq:
4450 return VM_BH_FROM_ISEQ_BLOCK(&block->as.captured);
4451 case block_type_ifunc:
4452 return VM_BH_FROM_IFUNC_BLOCK(&block->as.captured);
4453 case block_type_symbol:
4454 return VM_BH_FROM_SYMBOL(block->as.symbol);
4455 case block_type_proc:
4456 return VM_BH_FROM_PROC(block->as.proc);
4458 VM_UNREACHABLE(vm_yield_with_proc);
4465 bool is_lambda,
VALUE block_handler)
4467 while (vm_block_handler_type(block_handler) == block_handler_type_proc) {
4468 VALUE proc = VM_BH_TO_PROC(block_handler);
4469 is_lambda = block_proc_is_lambda(proc);
4470 block_handler = vm_proc_to_block_handler(proc);
4473 return vm_invoke_block(ec, reg_cfp, calling, ci, is_lambda, block_handler);
4479 bool is_lambda,
VALUE block_handler)
4483 bool is_lambda,
VALUE block_handler);
4485 switch (vm_block_handler_type(block_handler)) {
4486 case block_handler_type_iseq: func = vm_invoke_iseq_block;
break;
4487 case block_handler_type_ifunc: func = vm_invoke_ifunc_block;
break;
4488 case block_handler_type_proc: func = vm_invoke_proc_block;
break;
4489 case block_handler_type_symbol: func = vm_invoke_symbol_block;
break;
4490 default:
rb_bug(
"vm_invoke_block: unreachable");
4493 return func(ec, reg_cfp, calling, ci, is_lambda, block_handler);
4497vm_make_proc_with_iseq(
const rb_iseq_t *blockiseq)
4504 rb_bug(
"vm_make_proc_with_iseq: unreachable");
4507 captured = VM_CFP_TO_CAPTURED_BLOCK(cfp);
4508 captured->code.iseq = blockiseq;
4510 return rb_vm_make_proc(ec, captured,
rb_cProc);
4514vm_once_exec(
VALUE iseq)
4521vm_once_clear(
VALUE data)
4524 is->once.running_thread = NULL;
4536 args[0] = obj; args[1] =
Qfalse;
4538 if (!UNDEF_P(r) &&
RTEST(r)) {
4550 enum defined_type
type = (
enum defined_type)op_type;
4557 return rb_gvar_defined(
SYM2ID(obj));
4559 case DEFINED_CVAR: {
4560 const rb_cref_t *cref = vm_get_cref(GET_EP());
4561 klass = vm_get_cvar_base(cref, GET_CFP(), 0);
4566 case DEFINED_CONST_FROM: {
4567 bool allow_nil =
type == DEFINED_CONST;
4569 return vm_get_ev_const(ec, klass,
SYM2ID(obj), allow_nil,
true);
4574 return rb_ec_obj_respond_to(ec, v,
SYM2ID(obj), TRUE);
4576 case DEFINED_METHOD:{
4581 switch (METHOD_ENTRY_VISI(me)) {
4582 case METHOD_VISI_PRIVATE:
4584 case METHOD_VISI_PROTECTED:
4588 case METHOD_VISI_PUBLIC:
4592 rb_bug(
"vm_defined: unreachable: %u", (
unsigned int)METHOD_ENTRY_VISI(me));
4596 return check_respond_to_missing(obj, v);
4601 if (GET_BLOCK_HANDLER() != VM_BLOCK_HANDLER_NONE) {
4605 case DEFINED_ZSUPER:
4610 VALUE klass = vm_search_normal_superclass(me->defined_class);
4611 ID id = me->def->original_id;
4622 rb_bug(
"unimplemented defined? type (VM)");
4632 return vm_defined(ec, reg_cfp, op_type, obj, v);
4636vm_get_ep(
const VALUE *
const reg_ep, rb_num_t lv)
4639 const VALUE *ep = reg_ep;
4640 for (i = 0; i < lv; i++) {
4641 ep = GET_PREV_EP(ep);
4647vm_get_special_object(
const VALUE *
const reg_ep,
4648 enum vm_special_object_type
type)
4651 case VM_SPECIAL_OBJECT_VMCORE:
4652 return rb_mRubyVMFrozenCore;
4653 case VM_SPECIAL_OBJECT_CBASE:
4654 return vm_get_cbase(reg_ep);
4655 case VM_SPECIAL_OBJECT_CONST_BASE:
4656 return vm_get_const_base(reg_ep);
4658 rb_bug(
"putspecialobject insn: unknown value_type %d",
type);
4665 const VALUE ary2 = ary2st;
4666 VALUE tmp1 = rb_check_to_array(ary1);
4667 VALUE tmp2 = rb_check_to_array(ary2);
4678 tmp1 = rb_ary_dup(ary1);
4680 return rb_ary_concat(tmp1, tmp2);
4688 return vm_concat_array(ary1, ary2st);
4694 VALUE tmp = rb_check_to_array(ary);
4698 else if (
RTEST(flag)) {
4699 return rb_ary_dup(tmp);
4711 return vm_splat_array(flag, ary);
4717 enum vm_check_match_type
type = ((int)flag) & VM_CHECKMATCH_TYPE_MASK;
4719 if (flag & VM_CHECKMATCH_ARRAY) {
4723 for (i = 0; i < n; i++) {
4725 VALUE c = check_match(ec, v, target,
type);
4734 return check_match(ec, pattern, target,
type);
4739vm_check_keyword(lindex_t bits, lindex_t idx,
const VALUE *ep)
4741 const VALUE kw_bits = *(ep - bits);
4744 unsigned int b = (
unsigned int)
FIX2ULONG(kw_bits);
4745 if ((idx < KW_SPECIFIED_BITS_MAX) && (b & (0x01 << idx)))
4749 VM_ASSERT(RB_TYPE_P(kw_bits,
T_HASH));
4758 if (RUBY_DTRACE_METHOD_ENTRY_ENABLED() ||
4759 RUBY_DTRACE_METHOD_RETURN_ENABLED() ||
4760 RUBY_DTRACE_CMETHOD_ENTRY_ENABLED() ||
4761 RUBY_DTRACE_CMETHOD_RETURN_ENABLED()) {
4765 RUBY_DTRACE_METHOD_ENTRY_HOOK(ec, 0, 0);
4768 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, 0, 0);
4771 RUBY_DTRACE_METHOD_RETURN_HOOK(ec, 0, 0);
4774 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, 0, 0);
4781vm_const_get_under(
ID id, rb_num_t flags,
VALUE cbase)
4786 else if (VM_DEFINECLASS_SCOPED_P(flags)) {
4787 return rb_public_const_get_at(cbase,
id);
4795vm_check_if_class(
ID id, rb_num_t flags,
VALUE super,
VALUE klass)
4797 if (!RB_TYPE_P(klass,
T_CLASS)) {
4800 else if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags)) {
4805 "superclass mismatch for class %"PRIsVALUE
"",
4818vm_check_if_module(
ID id,
VALUE mod)
4837vm_declare_class(
ID id, rb_num_t flags,
VALUE cbase,
VALUE super)
4840 VALUE s = VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) ? super :
rb_cObject;
4848vm_declare_module(
ID id,
VALUE cbase)
4854NORETURN(
static void unmatched_redefinition(
const char *
type,
VALUE cbase,
ID id,
VALUE old));
4858 VALUE name = rb_id2str(
id);
4861 VALUE location = rb_const_source_location_at(cbase,
id);
4862 if (!
NIL_P(location)) {
4863 rb_str_catf(message,
"\n%"PRIsVALUE
":%"PRIsVALUE
":"
4864 " previous definition of %"PRIsVALUE
" was here",
4865 rb_ary_entry(location, 0), rb_ary_entry(location, 1), name);
4871vm_define_class(
ID id, rb_num_t flags,
VALUE cbase,
VALUE super)
4875 if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) && !RB_TYPE_P(super,
T_CLASS)) {
4877 "superclass must be an instance of Class (given an instance of %"PRIsVALUE
")",
4881 vm_check_if_namespace(cbase);
4885 if ((klass = vm_const_get_under(
id, flags, cbase)) != 0) {
4886 if (!vm_check_if_class(
id, flags, super, klass))
4887 unmatched_redefinition(
"class", cbase,
id, klass);
4891 return vm_declare_class(
id, flags, cbase, super);
4896vm_define_module(
ID id, rb_num_t flags,
VALUE cbase)
4900 vm_check_if_namespace(cbase);
4901 if ((mod = vm_const_get_under(
id, flags, cbase)) != 0) {
4902 if (!vm_check_if_module(
id, mod))
4903 unmatched_redefinition(
"module", cbase,
id, mod);
4907 return vm_declare_module(
id, cbase);
4912vm_find_or_create_class_by_id(
ID id,
4917 rb_vm_defineclass_type_t
type = VM_DEFINECLASS_TYPE(flags);
4920 case VM_DEFINECLASS_TYPE_CLASS:
4922 return vm_define_class(
id, flags, cbase, super);
4924 case VM_DEFINECLASS_TYPE_SINGLETON_CLASS:
4928 case VM_DEFINECLASS_TYPE_MODULE:
4930 return vm_define_module(
id, flags, cbase);
4933 rb_bug(
"unknown defineclass type: %d", (
int)
type);
4937static rb_method_visibility_t
4942 if (!vm_env_cref_by_cref(cfp->ep)) {
4943 return METHOD_VISI_PUBLIC;
4946 return CREF_SCOPE_VISI(vm_ec_cref(ec))->method_visi;
4955 if (!vm_env_cref_by_cref(cfp->ep)) {
4959 return CREF_SCOPE_VISI(vm_ec_cref(ec))->module_func;
4967 rb_method_visibility_t visi;
4972 visi = METHOD_VISI_PUBLIC;
4975 klass = CREF_CLASS_FOR_DEFINITION(cref);
4976 visi = vm_scope_visibility_get(ec);
4983 rb_add_method_iseq(klass,
id, (
const rb_iseq_t *)iseqval, cref, visi);
4987 RCLASS_EXT(klass)->max_iv_count = rb_estimate_iv_count(klass, (
const rb_iseq_t *)iseqval);
4990 if (!is_singleton && vm_scope_module_func_check(ec)) {
4992 rb_add_method_iseq(klass,
id, (
const rb_iseq_t *)iseqval, cref, METHOD_VISI_PUBLIC);
5002 VALUE block_handler = VM_CF_BLOCK_HANDLER(GET_CFP());
5004 if (block_handler == VM_BLOCK_HANDLER_NONE) {
5005 rb_vm_localjump_error(
"no block given (yield)",
Qnil, 0);
5008 return vm_invoke_block(ec, GET_CFP(), calling, ci,
false, block_handler);
5016 return vm_search_method((
VALUE)reg_cfp->iseq, cd, recv);
5023 .flags =
T_IMEMO | (imemo_callcache <<
FL_USHIFT) | VM_CALLCACHE_UNMARKABLE,
5026 .call_ = vm_invokeblock_i,
5032# define mexp_search_method vm_search_method_wrap
5033# define mexp_search_super vm_search_super_method
5034# define mexp_search_invokeblock vm_search_invokeblock
5036enum method_explorer_type {
5038 mexp_search_invokeblock,
5052 VALUE block_handler,
5056 enum method_explorer_type method_explorer
5062 int argc = vm_ci_argc(ci);
5063 VALUE recv = TOPN(argc);
5065 .block_handler = block_handler,
5066 .kw_splat = IS_ARGS_KW_SPLAT(ci) > 0,
5074 calling.cc = cc = method_explorer(GET_CFP(), cd, recv);
5075 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
5077 switch (method_explorer) {
5078 case mexp_search_method:
5079 calling.cc = cc = vm_search_method_fastpath((
VALUE)reg_cfp->iseq, cd,
CLASS_OF(recv));
5080 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
5082 case mexp_search_super:
5083 calling.cc = cc = vm_search_super_method(reg_cfp, cd, recv);
5084 calling.ci = cd->ci;
5085 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
5087 case mexp_search_invokeblock:
5088 val = vm_invokeblock_i(ec, GET_CFP(), &calling);
5093 if (!UNDEF_P(val)) {
5107 if (ISEQ_BODY(GET_ISEQ())->catch_except_p) {
5108 VM_ENV_FLAGS_SET(GET_EP(), VM_FRAME_FLAG_FINISH);
5109 return vm_exec(ec,
true);
5111 else if (UNDEF_P(val = jit_exec(ec))) {
5112 VM_ENV_FLAGS_SET(GET_EP(), VM_FRAME_FLAG_FINISH);
5113 return vm_exec(ec,
false);
5121 return jit_exec(ec);
5157 if (check_cfunc(vm_cc_cme(cc), rb_mod_to_s)) {
5163 val = rb_mod_to_s(recv);
5169 if (check_cfunc(vm_cc_cme(cc), rb_nil_to_s)) {
5170 return rb_nil_to_s(recv);
5174 if (check_cfunc(vm_cc_cme(cc), rb_true_to_s)) {
5175 return rb_true_to_s(recv);
5179 if (check_cfunc(vm_cc_cme(cc), rb_false_to_s)) {
5180 return rb_false_to_s(recv);
5184 if (check_cfunc(vm_cc_cme(cc), rb_int_to_s)) {
5185 return rb_fix_to_s(recv);
5193vm_opt_str_freeze(
VALUE str,
int bop,
ID id)
5195 if (BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
5209 if (BASIC_OP_UNREDEFINED_P(BOP_MAX, ARRAY_REDEFINED_OP_FLAG)) {
5214 VALUE result = *ptr;
5215 rb_snum_t i = num - 1;
5217 const VALUE v = *++ptr;
5218 if (OPTIMIZED_CMP(v, result) > 0) {
5233 return vm_opt_newarray_max(ec, num, ptr);
5239 if (BASIC_OP_UNREDEFINED_P(BOP_MIN, ARRAY_REDEFINED_OP_FLAG)) {
5244 VALUE result = *ptr;
5245 rb_snum_t i = num - 1;
5247 const VALUE v = *++ptr;
5248 if (OPTIMIZED_CMP(v, result) < 0) {
5263 return vm_opt_newarray_min(ec, num, ptr);
5268#define IMEMO_CONST_CACHE_SHAREABLE IMEMO_FL_USER0
5271vm_track_constant_cache(
ID id,
void *ic)
5273 struct rb_id_table *const_cache = GET_VM()->constant_cache;
5274 VALUE lookup_result;
5277 if (rb_id_table_lookup(const_cache,
id, &lookup_result)) {
5281 ics = st_init_numtable();
5282 rb_id_table_insert(const_cache,
id, (
VALUE)ics);
5285 st_insert(ics, (st_data_t) ic, (st_data_t)
Qtrue);
5293 for (
int i = 0; segments[i]; i++) {
5294 ID id = segments[i];
5295 if (
id == idNULL)
continue;
5296 vm_track_constant_cache(
id, ic);
5306 if ((flags & IMEMO_CONST_CACHE_SHAREABLE) || rb_ractor_main_p()) {
5307 VM_ASSERT(ractor_incidental_shareable_p(flags & IMEMO_CONST_CACHE_SHAREABLE, value));
5309 return (ic_cref == NULL ||
5310 ic_cref == vm_get_cref(reg_ep));
5318 VM_ASSERT(IMEMO_TYPE_P(ice, imemo_constcache));
5319 return vm_inlined_ic_hit_p(ice->flags, ice->value, ice->ic_cref, reg_ep);
5324rb_vm_ic_hit_p(
IC ic,
const VALUE *reg_ep)
5326 return ic->entry && vm_ic_hit_p(ic->entry, reg_ep);
5332 if (ruby_vm_const_missing_count > 0) {
5333 ruby_vm_const_missing_count = 0;
5340 ice->ic_cref = vm_get_const_key_cref(reg_ep);
5345 unsigned pos = (unsigned)(pc - ISEQ_BODY(iseq)->iseq_encoded);
5346 rb_yjit_constant_ic_update(iseq, ic, pos);
5347 rb_mjit_constant_ic_update(iseq, ic, pos);
5357 if (is->once.running_thread == RUNNING_THREAD_ONCE_DONE) {
5358 return is->once.value;
5360 else if (is->once.running_thread == NULL) {
5362 is->once.running_thread = th;
5366 is->once.running_thread = RUNNING_THREAD_ONCE_DONE;
5369 else if (is->once.running_thread == th) {
5371 return vm_once_exec((
VALUE)iseq);
5375 RUBY_VM_CHECK_INTS(ec);
5382vm_case_dispatch(CDHASH hash, OFFSET else_offset,
VALUE key)
5384 switch (OBJ_BUILTIN_TYPE(key)) {
5390 if (BASIC_OP_UNREDEFINED_P(BOP_EQQ,
5391 SYMBOL_REDEFINED_OP_FLAG |
5392 INTEGER_REDEFINED_OP_FLAG |
5393 FLOAT_REDEFINED_OP_FLAG |
5394 NIL_REDEFINED_OP_FLAG |
5395 TRUE_REDEFINED_OP_FLAG |
5396 FALSE_REDEFINED_OP_FLAG |
5397 STRING_REDEFINED_OP_FLAG)) {
5399 if (RB_FLOAT_TYPE_P(key)) {
5401 if (!isinf(kval) && modf(kval, &kval) == 0.0) {
5405 if (rb_hash_stlike_lookup(hash, key, &val)) {
5425 const ptrdiff_t nsp = VM_SP_CNT(ec, cfp->sp);
5426 const ptrdiff_t nbp = VM_SP_CNT(ec, bp);
5427 static const char stack_consistency_error[] =
5428 "Stack consistency error (sp: %"PRIdPTRDIFF
", bp: %"PRIdPTRDIFF
")";
5429#if defined RUBY_DEVEL
5435 rb_bug(stack_consistency_error, nsp, nbp);
5442 if (FIXNUM_2_P(recv, obj) &&
5443 BASIC_OP_UNREDEFINED_P(BOP_PLUS, INTEGER_REDEFINED_OP_FLAG)) {
5444 return rb_fix_plus_fix(recv, obj);
5446 else if (FLONUM_2_P(recv, obj) &&
5447 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
5453 else if (RBASIC_CLASS(recv) ==
rb_cFloat &&
5455 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
5460 BASIC_OP_UNREDEFINED_P(BOP_PLUS, STRING_REDEFINED_OP_FLAG)) {
5461 return rb_str_opt_plus(recv, obj);
5463 else if (RBASIC_CLASS(recv) ==
rb_cArray &&
5465 BASIC_OP_UNREDEFINED_P(BOP_PLUS, ARRAY_REDEFINED_OP_FLAG)) {
5466 return rb_ary_plus(recv, obj);
5476 if (FIXNUM_2_P(recv, obj) &&
5477 BASIC_OP_UNREDEFINED_P(BOP_MINUS, INTEGER_REDEFINED_OP_FLAG)) {
5478 return rb_fix_minus_fix(recv, obj);
5480 else if (FLONUM_2_P(recv, obj) &&
5481 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
5487 else if (RBASIC_CLASS(recv) ==
rb_cFloat &&
5489 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
5500 if (FIXNUM_2_P(recv, obj) &&
5501 BASIC_OP_UNREDEFINED_P(BOP_MULT, INTEGER_REDEFINED_OP_FLAG)) {
5502 return rb_fix_mul_fix(recv, obj);
5504 else if (FLONUM_2_P(recv, obj) &&
5505 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
5511 else if (RBASIC_CLASS(recv) ==
rb_cFloat &&
5513 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
5524 if (FIXNUM_2_P(recv, obj) &&
5525 BASIC_OP_UNREDEFINED_P(BOP_DIV, INTEGER_REDEFINED_OP_FLAG)) {
5526 return (
FIX2LONG(obj) == 0) ?
Qundef : rb_fix_div_fix(recv, obj);
5528 else if (FLONUM_2_P(recv, obj) &&
5529 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
5530 return rb_flo_div_flo(recv, obj);
5535 else if (RBASIC_CLASS(recv) ==
rb_cFloat &&
5537 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
5538 return rb_flo_div_flo(recv, obj);
5548 if (FIXNUM_2_P(recv, obj) &&
5549 BASIC_OP_UNREDEFINED_P(BOP_MOD, INTEGER_REDEFINED_OP_FLAG)) {
5550 return (
FIX2LONG(obj) == 0) ?
Qundef : rb_fix_mod_fix(recv, obj);
5552 else if (FLONUM_2_P(recv, obj) &&
5553 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
5559 else if (RBASIC_CLASS(recv) ==
rb_cFloat &&
5561 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
5572 if (vm_method_cfunc_is(iseq, cd, recv, rb_obj_not_equal)) {
5573 VALUE val = opt_equality(iseq, recv, obj, cd_eq);
5575 if (!UNDEF_P(val)) {
5576 return RBOOL(!
RTEST(val));
5586 if (FIXNUM_2_P(recv, obj) &&
5587 BASIC_OP_UNREDEFINED_P(BOP_LT, INTEGER_REDEFINED_OP_FLAG)) {
5590 else if (FLONUM_2_P(recv, obj) &&
5591 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
5597 else if (RBASIC_CLASS(recv) ==
rb_cFloat &&
5599 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
5611 if (FIXNUM_2_P(recv, obj) &&
5612 BASIC_OP_UNREDEFINED_P(BOP_LE, INTEGER_REDEFINED_OP_FLAG)) {
5615 else if (FLONUM_2_P(recv, obj) &&
5616 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
5622 else if (RBASIC_CLASS(recv) ==
rb_cFloat &&
5624 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
5636 if (FIXNUM_2_P(recv, obj) &&
5637 BASIC_OP_UNREDEFINED_P(BOP_GT, INTEGER_REDEFINED_OP_FLAG)) {
5640 else if (FLONUM_2_P(recv, obj) &&
5641 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
5647 else if (RBASIC_CLASS(recv) ==
rb_cFloat &&
5649 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
5661 if (FIXNUM_2_P(recv, obj) &&
5662 BASIC_OP_UNREDEFINED_P(BOP_GE, INTEGER_REDEFINED_OP_FLAG)) {
5665 else if (FLONUM_2_P(recv, obj) &&
5666 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
5672 else if (RBASIC_CLASS(recv) ==
rb_cFloat &&
5674 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
5691 BASIC_OP_UNREDEFINED_P(BOP_LTLT, STRING_REDEFINED_OP_FLAG)) {
5692 if (LIKELY(RB_TYPE_P(obj,
T_STRING))) {
5699 else if (RBASIC_CLASS(recv) ==
rb_cArray &&
5700 BASIC_OP_UNREDEFINED_P(BOP_LTLT, ARRAY_REDEFINED_OP_FLAG)) {
5701 return rb_ary_push(recv, obj);
5718 BASIC_OP_UNREDEFINED_P(BOP_AND, INTEGER_REDEFINED_OP_FLAG)) {
5729 if (FIXNUM_2_P(recv, obj) &&
5730 BASIC_OP_UNREDEFINED_P(BOP_OR, INTEGER_REDEFINED_OP_FLAG)) {
5742 if (FIXNUM_2_P(recv, obj) &&
5743 BASIC_OP_UNREDEFINED_P(BOP_AREF, INTEGER_REDEFINED_OP_FLAG)) {
5744 return rb_fix_aref(recv, obj);
5748 else if (RBASIC_CLASS(recv) ==
rb_cArray &&
5749 BASIC_OP_UNREDEFINED_P(BOP_AREF, ARRAY_REDEFINED_OP_FLAG)) {
5751 return rb_ary_entry_internal(recv,
FIX2LONG(obj));
5754 return rb_ary_aref1(recv, obj);
5757 else if (RBASIC_CLASS(recv) ==
rb_cHash &&
5758 BASIC_OP_UNREDEFINED_P(BOP_AREF, HASH_REDEFINED_OP_FLAG)) {
5759 return rb_hash_aref(recv, obj);
5772 else if (RBASIC_CLASS(recv) ==
rb_cArray &&
5773 BASIC_OP_UNREDEFINED_P(BOP_ASET, ARRAY_REDEFINED_OP_FLAG) &&
5775 rb_ary_store(recv,
FIX2LONG(obj), set);
5778 else if (RBASIC_CLASS(recv) ==
rb_cHash &&
5779 BASIC_OP_UNREDEFINED_P(BOP_ASET, HASH_REDEFINED_OP_FLAG)) {
5780 rb_hash_aset(recv, obj, set);
5792 BASIC_OP_UNREDEFINED_P(BOP_AREF, HASH_REDEFINED_OP_FLAG) &&
5793 rb_hash_compare_by_id_p(recv) ==
Qfalse &&
5794 !
FL_TEST(recv, RHASH_PROC_DEFAULT)) {
5795 return rb_hash_aref(recv, key);
5806 BASIC_OP_UNREDEFINED_P(BOP_ASET, HASH_REDEFINED_OP_FLAG) &&
5807 rb_hash_compare_by_id_p(recv) ==
Qfalse) {
5808 return rb_hash_aset(recv, key, val);
5816vm_opt_length(
VALUE recv,
int bop)
5822 BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
5823 if (bop == BOP_EMPTY_P) {
5824 return LONG2NUM(RSTRING_LEN(recv));
5830 else if (RBASIC_CLASS(recv) ==
rb_cArray &&
5831 BASIC_OP_UNREDEFINED_P(bop, ARRAY_REDEFINED_OP_FLAG)) {
5834 else if (RBASIC_CLASS(recv) ==
rb_cHash &&
5835 BASIC_OP_UNREDEFINED_P(bop, HASH_REDEFINED_OP_FLAG)) {
5844vm_opt_empty_p(
VALUE recv)
5846 switch (vm_opt_length(recv, BOP_EMPTY_P)) {
5859 BASIC_OP_UNREDEFINED_P(BOP_NIL_P, NIL_REDEFINED_OP_FLAG)) {
5862 else if (vm_method_cfunc_is(iseq, cd, recv, rb_false)) {
5878 case RSHIFT(~0UL, 1):
5881 return rb_uint2big(1UL << (SIZEOF_LONG * CHAR_BIT - 2));
5899vm_opt_succ(
VALUE recv)
5902 BASIC_OP_UNREDEFINED_P(BOP_SUCC, INTEGER_REDEFINED_OP_FLAG)) {
5903 return fix_succ(recv);
5909 BASIC_OP_UNREDEFINED_P(BOP_SUCC, STRING_REDEFINED_OP_FLAG)) {
5920 if (vm_method_cfunc_is(iseq, cd, recv, rb_obj_not)) {
5921 return RBOOL(!
RTEST(recv));
5936 BASIC_OP_UNREDEFINED_P(BOP_MATCH, STRING_REDEFINED_OP_FLAG)) {
5940 BASIC_OP_UNREDEFINED_P(BOP_MATCH, REGEXP_REDEFINED_OP_FLAG)) {
5958 VALUE self = GET_SELF();
5960 VM_ASSERT(rb_popcount64((uint64_t)event) == 1);
5962 if (event & global_hooks->events) {
5965 vm_dtrace(event, ec);
5966 rb_exec_event_hook_orig(ec, global_hooks, event, self, 0, 0, 0 , val, 0);
5972 if (local_hooks != NULL) {
5973 if (event & local_hooks->events) {
5976 rb_exec_event_hook_orig(ec, local_hooks, event, self, 0, 0, 0 , val, 0);
5988 return check_cfunc(vm_cc_cme(cc), rb_obj_equal);
5989 case BIN(opt_nil_p):
5990 return check_cfunc(vm_cc_cme(cc), rb_false);
5992 return check_cfunc(vm_cc_cme(cc), rb_obj_not);
5998#define VM_TRACE_HOOK(target_event, val) do { \
5999 if ((pc_events & (target_event)) & enabled_flags) { \
6000 vm_trace_hook(ec, reg_cfp, pc, pc_events, (target_event), global_hooks, local_hooks_ptr, (val)); \
6007 const VALUE *pc = reg_cfp->pc;
6008 rb_event_flag_t enabled_flags = ruby_vm_event_flags & ISEQ_TRACE_EVENTS;
6011 if (enabled_flags == 0 && ruby_vm_event_local_num == 0) {
6017 size_t pos = pc - ISEQ_BODY(iseq)->iseq_encoded;
6020 rb_hook_list_t *
const *local_hooks_ptr = &iseq->aux.exec.local_hooks;
6021 rb_event_flag_t iseq_local_events = local_hooks != NULL ? local_hooks->events : 0;
6025 const bool bmethod_frame = VM_FRAME_BMETHOD_P(reg_cfp);
6026 enabled_flags |= iseq_local_events;
6028 VM_ASSERT((iseq_local_events & ~ISEQ_TRACE_EVENTS) == 0);
6030 if (bmethod_frame) {
6032 VM_ASSERT(me->def->type == VM_METHOD_TYPE_BMETHOD);
6033 bmethod_local_hooks = me->def->body.bmethod.hooks;
6034 bmethod_local_hooks_ptr = &me->def->body.bmethod.hooks;
6035 if (bmethod_local_hooks) {
6036 bmethod_local_events = bmethod_local_hooks->events;
6041 if ((pc_events & enabled_flags) == 0 && !bmethod_frame) {
6045 rb_iseq_trace_set(iseq, vm_event_flags & ISEQ_TRACE_EVENTS);
6053 else if (ec->trace_arg != NULL) {
6061 rb_event_flag_t bmethod_events = global_events | bmethod_local_events;
6064 ruby_debug_printf(
"vm_trace>>%4d (%4x) - %s:%d %s\n",
6067 RSTRING_PTR(rb_iseq_path(iseq)),
6068 (
int)rb_iseq_line_no(iseq, pos),
6069 RSTRING_PTR(rb_iseq_label(iseq)));
6071 VM_ASSERT(reg_cfp->pc == pc);
6072 VM_ASSERT(pc_events != 0);
6081 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_LINE,
Qundef);
6082 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_BRANCH,
Qundef);
6100#if VM_CHECK_MODE > 0
6101NORETURN( NOINLINE( COLDFUNC
6102void rb_vm_canary_is_found_dead(
enum ruby_vminsn_type i,
VALUE c)));
6105Init_vm_stack_canary(
void)
6108 int n = ruby_fill_random_bytes(&vm_stack_canary,
sizeof vm_stack_canary,
false);
6109 vm_stack_canary |= 0x01;
6111 vm_stack_canary_was_born =
true;
6116MJIT_FUNC_EXPORTED
void
6117rb_vm_canary_is_found_dead(
enum ruby_vminsn_type i,
VALUE c)
6121 const char *insn = rb_insns_name(i);
6125 rb_bug(
"dead canary found at %s: %s", insn, str);
6130void Init_vm_stack_canary(
void) { }
6162 return (*(rb_invoke_funcptr0_t)funcptr)(ec, self);
6169 return (*(rb_invoke_funcptr1_t)funcptr)(ec, self, argv[0]);
6176 return (*(rb_invoke_funcptr2_t)funcptr)(ec, self, argv[0], argv[1]);
6183 return (*(rb_invoke_funcptr3_t)funcptr)(ec, self, argv[0], argv[1], argv[2]);
6190 return (*(rb_invoke_funcptr4_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3]);
6197 return (*(rb_invoke_funcptr5_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4]);
6204 return (*(rb_invoke_funcptr6_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
6211 return (*(rb_invoke_funcptr7_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
6218 return (*(rb_invoke_funcptr8_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
6224 typedef VALUE (*rb_invoke_funcptr9_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9);
6225 return (*(rb_invoke_funcptr9_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
6231 typedef VALUE (*rb_invoke_funcptr10_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10);
6232 return (*(rb_invoke_funcptr10_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
6238 typedef VALUE (*rb_invoke_funcptr11_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11);
6239 return (*(rb_invoke_funcptr11_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
6245 typedef VALUE (*rb_invoke_funcptr12_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12);
6246 return (*(rb_invoke_funcptr12_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
6252 typedef VALUE (*rb_invoke_funcptr13_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13);
6253 return (*(rb_invoke_funcptr13_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
6259 typedef VALUE (*rb_invoke_funcptr14_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13,
VALUE v14);
6260 return (*(rb_invoke_funcptr14_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
6266 typedef VALUE (*rb_invoke_funcptr15_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13,
VALUE v14,
VALUE v15);
6267 return (*(rb_invoke_funcptr15_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
6272static builtin_invoker
6273lookup_builtin_invoker(
int argc)
6275 static const builtin_invoker invokers[] = {
6294 return invokers[argc];
6300 const bool canary_p = ISEQ_BODY(reg_cfp->iseq)->builtin_inline_p;
6301 SETUP_CANARY(canary_p);
6302 VALUE ret = (*lookup_builtin_invoker(bf->argc))(ec, reg_cfp->self, argv, (rb_insn_func_t)bf->func_ptr);
6303 CHECK_CANARY(canary_p, BIN(invokebuiltin));
6310 return invoke_bf(ec, cfp, bf, argv);
6317 fputs(
"vm_invoke_builtin_delegate: passing -> ", stderr);
6318 for (
int i=0; i<bf->argc; i++) {
6319 ruby_debug_printf(
":%s ",
rb_id2name(ISEQ_BODY(cfp->iseq)->local_table[i+start_index]));
6321 ruby_debug_printf(
"\n" "%s %s(%d):%p\n", RUBY_FUNCTION_NAME_STRING, bf->name, bf->argc, bf->func_ptr);
6324 if (bf->argc == 0) {
6325 return invoke_bf(ec, cfp, bf, NULL);
6328 const VALUE *argv = cfp->ep - ISEQ_BODY(cfp->iseq)->local_table_size - VM_ENV_DATA_SIZE + 1 + start_index;
6329 return invoke_bf(ec, cfp, bf, argv);
6339 return cfp->ep[index];
#define RUBY_ASSERT(expr)
Asserts that the given expression is truthy if and only if RUBY_DEBUG is truthy.
#define RUBY_EVENT_END
Encountered an end of a class clause.
#define RUBY_EVENT_C_CALL
A method, written in C, is called.
#define RUBY_EVENT_B_RETURN
Encountered a next statement.
#define RUBY_EVENT_CLASS
Encountered a new class.
#define RUBY_EVENT_LINE
Encountered a new line.
#define RUBY_EVENT_RETURN
Encountered a return statement.
#define RUBY_EVENT_C_RETURN
Return from a method, written in C.
#define RUBY_EVENT_B_CALL
Encountered an yield statement.
uint32_t rb_event_flag_t
Represents event(s).
#define RUBY_EVENT_CALL
A method, written in Ruby, is called.
VALUE rb_singleton_class(VALUE obj)
Finds or creates the singleton class of the passed object.
VALUE rb_module_new(void)
Creates a new, anonymous module.
VALUE rb_class_inherited(VALUE super, VALUE klass)
Calls Class::inherited.
VALUE rb_define_class_id(ID id, VALUE super)
This is a very badly designed API that creates an anonymous class.
#define TYPE(_)
Old name of rb_type.
#define FL_SINGLETON
Old name of RUBY_FL_SINGLETON.
#define FL_EXIVAR
Old name of RUBY_FL_EXIVAR.
#define REALLOC_N
Old name of RB_REALLOC_N.
#define ALLOC
Old name of RB_ALLOC.
#define RFLOAT_VALUE
Old name of rb_float_value.
#define T_STRING
Old name of RUBY_T_STRING.
#define Qundef
Old name of RUBY_Qundef.
#define INT2FIX
Old name of RB_INT2FIX.
#define T_NIL
Old name of RUBY_T_NIL.
#define T_FLOAT
Old name of RUBY_T_FLOAT.
#define T_IMEMO
Old name of RUBY_T_IMEMO.
#define ID2SYM
Old name of RB_ID2SYM.
#define T_BIGNUM
Old name of RUBY_T_BIGNUM.
#define SPECIAL_CONST_P
Old name of RB_SPECIAL_CONST_P.
#define T_STRUCT
Old name of RUBY_T_STRUCT.
#define T_FIXNUM
Old name of RUBY_T_FIXNUM.
#define SYM2ID
Old name of RB_SYM2ID.
#define CLASS_OF
Old name of rb_class_of.
#define rb_ary_new4
Old name of rb_ary_new_from_values.
#define FIXABLE
Old name of RB_FIXABLE.
#define LONG2FIX
Old name of RB_INT2FIX.
#define FIX2INT
Old name of RB_FIX2INT.
#define T_MODULE
Old name of RUBY_T_MODULE.
#define STATIC_SYM_P
Old name of RB_STATIC_SYM_P.
#define ASSUME
Old name of RBIMPL_ASSUME.
#define FIX2ULONG
Old name of RB_FIX2ULONG.
#define T_TRUE
Old name of RUBY_T_TRUE.
#define T_ICLASS
Old name of RUBY_T_ICLASS.
#define T_HASH
Old name of RUBY_T_HASH.
#define ALLOC_N
Old name of RB_ALLOC_N.
#define FL_TEST_RAW
Old name of RB_FL_TEST_RAW.
#define rb_ary_new3
Old name of rb_ary_new_from_args.
#define LONG2NUM
Old name of RB_LONG2NUM.
#define rb_exc_new3
Old name of rb_exc_new_str.
#define T_FALSE
Old name of RUBY_T_FALSE.
#define Qtrue
Old name of RUBY_Qtrue.
#define Qnil
Old name of RUBY_Qnil.
#define Qfalse
Old name of RUBY_Qfalse.
#define FIX2LONG
Old name of RB_FIX2LONG.
#define T_ARRAY
Old name of RUBY_T_ARRAY.
#define T_OBJECT
Old name of RUBY_T_OBJECT.
#define NIL_P
Old name of RB_NIL_P.
#define T_SYMBOL
Old name of RUBY_T_SYMBOL.
#define DBL2NUM
Old name of rb_float_new.
#define T_CLASS
Old name of RUBY_T_CLASS.
#define BUILTIN_TYPE
Old name of RB_BUILTIN_TYPE.
#define FL_TEST
Old name of RB_FL_TEST.
#define FIXNUM_P
Old name of RB_FIXNUM_P.
#define FL_USHIFT
Old name of RUBY_FL_USHIFT.
#define FL_SET_RAW
Old name of RB_FL_SET_RAW.
#define SYMBOL_P
Old name of RB_SYMBOL_P.
void rb_notimplement(void)
void rb_raise(VALUE exc, const char *fmt,...)
Exception entry point.
void rb_exc_raise(VALUE mesg)
Raises an exception in the current thread.
void rb_bug(const char *fmt,...)
Interpreter panic switch.
VALUE rb_eTypeError
TypeError exception.
VALUE rb_eFatal
fatal exception.
VALUE rb_eNoMethodError
NoMethodError exception.
void rb_exc_fatal(VALUE mesg)
Raises a fatal error in the current thread.
VALUE rb_eRuntimeError
RuntimeError exception.
void rb_warn(const char *fmt,...)
Identical to rb_warning(), except it reports always regardless of runtime -W flag.
void rb_error_frozen_object(VALUE frozen_obj)
Identical to rb_error_frozen(), except it takes arbitrary Ruby object instead of C's string.
VALUE rb_exc_new_str(VALUE etype, VALUE str)
Identical to rb_exc_new_cstr(), except it takes a Ruby's string instead of C's.
VALUE rb_eArgError
ArgumentError exception.
VALUE rb_ensure(VALUE(*b_proc)(VALUE), VALUE data1, VALUE(*e_proc)(VALUE), VALUE data2)
An equivalent to ensure clause.
VALUE rb_cClass
Class class.
VALUE rb_cArray
Array class.
VALUE rb_cObject
Documented in include/ruby/internal/globals.h.
VALUE rb_obj_alloc(VALUE klass)
Allocates an instance of the given class.
VALUE rb_cRegexp
Regexp class.
VALUE rb_obj_frozen_p(VALUE obj)
Just calls RB_OBJ_FROZEN() inside.
VALUE rb_cHash
Hash class.
VALUE rb_obj_class(VALUE obj)
Queries the class of an object.
VALUE rb_inspect(VALUE obj)
Generates a human-readable textual representation of the given object.
VALUE rb_cBasicObject
BasicObject class.
VALUE rb_cModule
Module class.
VALUE rb_class_real(VALUE klass)
Finds a "real" class.
VALUE rb_obj_is_kind_of(VALUE obj, VALUE klass)
Queries if the given object is an instance (of possibly descendants) of the given class.
VALUE rb_cFloat
Float class.
VALUE rb_cProc
Proc class.
VALUE rb_cString
String class.
#define RB_OBJ_WRITTEN(old, oldv, young)
Identical to RB_OBJ_WRITE(), except it doesn't write any values, but only a WB declaration.
#define RB_OBJ_WRITE(old, slot, young)
Declaration of a "back" pointer.
#define UNLIMITED_ARGUMENTS
This macro is used in conjunction with rb_check_arity().
#define rb_check_frozen
Just another name of rb_check_frozen.
#define rb_check_frozen_internal(obj)
VALUE rb_proc_call_with_block(VALUE recv, int argc, const VALUE *argv, VALUE proc)
Identical to rb_proc_call(), except you can additionally pass another proc object,...
VALUE rb_reg_last_match(VALUE md)
This just returns the argument, stringified.
VALUE rb_reg_match(VALUE re, VALUE str)
This is the match operator.
VALUE rb_reg_nth_match(int n, VALUE md)
Queries the nth captured substring.
VALUE rb_reg_match_post(VALUE md)
The portion of the original string after the given match.
VALUE rb_reg_match_pre(VALUE md)
The portion of the original string before the given match.
VALUE rb_reg_match_last(VALUE md)
The portion of the original string that captured at the very last.
VALUE rb_str_append(VALUE dst, VALUE src)
Identical to rb_str_buf_append(), except it converts the right hand side before concatenating.
VALUE rb_sym_to_s(VALUE sym)
This is an rb_sym2str() + rb_str_dup() combo.
VALUE rb_str_succ(VALUE orig)
Searches for the "successor" of a string.
VALUE rb_str_buf_append(VALUE dst, VALUE src)
Identical to rb_str_cat_cstr(), except it takes Ruby's string instead of C's.
VALUE rb_str_concat(VALUE dst, VALUE src)
Identical to rb_str_append(), except it also accepts an integer as a codepoint.
#define rb_str_cat_cstr(buf, str)
Identical to rb_str_cat(), except it assumes the passed pointer is a pointer to a C string.
VALUE rb_str_length(VALUE)
Identical to rb_str_strlen(), except it returns the value in rb_cInteger.
VALUE rb_str_intern(VALUE str)
Identical to rb_to_symbol(), except it assumes the receiver being an instance of RString.
void rb_thread_schedule(void)
Tries to switch to another thread.
VALUE rb_const_get(VALUE space, ID name)
Identical to rb_const_defined(), except it returns the actual defined value.
VALUE rb_attr_get(VALUE obj, ID name)
Identical to rb_ivar_get()
VALUE rb_ivar_set(VALUE obj, ID name, VALUE val)
Identical to rb_iv_set(), except it accepts the name as an ID instead of a C string.
void rb_cvar_set(VALUE klass, ID name, VALUE val)
Assigns a value to a class variable.
VALUE rb_cvar_find(VALUE klass, ID name, VALUE *front)
Identical to rb_cvar_get(), except it takes additional "front" pointer.
VALUE rb_ivar_get(VALUE obj, ID name)
Identical to rb_iv_get(), except it accepts the name as an ID instead of a C string.
void rb_const_set(VALUE space, ID name, VALUE val)
Names a constant.
VALUE rb_autoload_load(VALUE space, ID name)
Kicks the autoload procedure as if it was "touched".
VALUE rb_mod_name(VALUE mod)
Queries the name of a module.
VALUE rb_const_get_at(VALUE space, ID name)
Identical to rb_const_defined_at(), except it returns the actual defined value.
void rb_set_class_path_string(VALUE klass, VALUE space, VALUE name)
Identical to rb_set_class_path(), except it accepts the name as Ruby's string instead of C's.
VALUE rb_ivar_defined(VALUE obj, ID name)
Queries if the instance variable is defined at the object.
int rb_const_defined_at(VALUE space, ID name)
Identical to rb_const_defined(), except it doesn't look for parent classes.
VALUE rb_cvar_defined(VALUE klass, ID name)
Queries if the given class has the given class variable.
VALUE rb_class_path(VALUE mod)
Identical to rb_mod_name(), except it returns #<Class: ...> style inspection for anonymous modules.
int rb_const_defined(VALUE space, ID name)
Queries if the constant is defined at the namespace.
VALUE rb_check_funcall(VALUE recv, ID mid, int argc, const VALUE *argv)
Identical to rb_funcallv(), except it returns RUBY_Qundef instead of raising rb_eNoMethodError.
rb_alloc_func_t rb_get_alloc_func(VALUE klass)
Queries the allocator function of a class.
void rb_define_alloc_func(VALUE klass, rb_alloc_func_t func)
Sets the allocator function of a class.
int rb_method_boundp(VALUE klass, ID id, int ex)
Queries if the klass has this method.
ID rb_check_id(volatile VALUE *namep)
Detects if the given name is already interned or not.
VALUE rb_sym2str(VALUE id)
Identical to rb_id2str(), except it takes an instance of rb_cSymbol rather than an ID.
const char * rb_id2name(ID id)
Retrieves the name mapped to the given id.
static bool rb_ractor_shareable_p(VALUE obj)
Queries if multiple Ractors can share the passed object or not.
#define RB_OBJ_SHAREABLE_P(obj)
Queries if the passed object has previously classified as shareable or not.
VALUE rb_sprintf(const char *fmt,...)
Ruby's extended sprintf(3).
VALUE rb_str_catf(VALUE dst, const char *fmt,...)
Identical to rb_sprintf(), except it renders the output to the specified object rather than creating ...
#define MEMCPY(p1, p2, type, n)
Handy macro to call memcpy.
#define ALLOCA_N(type, n)
#define RB_GC_GUARD(v)
Prevents premature destruction of local objects.
#define MEMMOVE(p1, p2, type, n)
Handy macro to call memmove.
VALUE type(ANYARGS)
ANYARGS-ed function type.
#define RARRAY_LEN
Just another name of rb_array_len.
#define RARRAY_CONST_PTR_TRANSIENT
Just another name of rb_array_const_ptr_transient.
#define RARRAY_AREF(a, i)
#define RBASIC(obj)
Convenient casting macro.
#define RCLASS_SUPER
Just another name of rb_class_get_superclass.
#define RHASH_SIZE(h)
Queries the size of the hash.
#define RHASH_EMPTY_P(h)
Checks if the hash is empty.
#define StringValueCStr(v)
Identical to StringValuePtr, except it additionally checks for the contents for viability as a C stri...
#define RB_NO_KEYWORDS
Do not pass keywords.
#define RTEST
This is an old name of RB_TEST.
#define ANYARGS
Functions declared using this macro take arbitrary arguments, including void.
const rb_iseq_t * iseqptr
iseq pointer, should be separated from iseqval
rb_cref_t * cref
class reference, should be marked
IFUNC (Internal FUNCtion)
const VALUE cref_or_me
class reference or rb_method_entry_t
intptr_t SIGNED_VALUE
A signed integer type that has the same width with VALUE.
uintptr_t VALUE
Type that represents a Ruby object.
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.