11#include "ruby/internal/config.h"
15#ifdef HAVE_STDATOMIC_H
16 #include <stdatomic.h>
20#include "debug_counter.h"
22#include "internal/class.h"
23#include "internal/compar.h"
24#include "internal/hash.h"
25#include "internal/numeric.h"
26#include "internal/proc.h"
27#include "internal/random.h"
28#include "internal/variable.h"
29#include "internal/set_table.h"
30#include "internal/struct.h"
35#include "insns_info.inc"
41 int argc,
const VALUE *argv,
int priv);
51ruby_vm_special_exception_copy(
VALUE exc)
54 rb_obj_copy_ivar(e, exc);
62 VALUE mesg = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_sysstack];
63 ec->raised_flag = RAISED_STACKOVERFLOW;
65 VALUE at = rb_ec_backtrace_object(ec);
66 mesg = ruby_vm_special_exception_copy(mesg);
71 EC_JUMP_TAG(ec, TAG_RAISE);
74NORETURN(
static void vm_stackoverflow(
void));
79 ec_stack_overflow(GET_EC(), TRUE);
86 rb_bug(
"system stack overflow during GC. Faulty native extension?");
88 if (crit >= rb_stack_overflow_fatal) {
89 ec->raised_flag = RAISED_STACKOVERFLOW;
90 ec->errinfo = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_stackfatal];
91 EC_JUMP_TAG(ec, TAG_RAISE);
93 ec_stack_overflow(ec, crit < rb_stack_overflow_signal);
100callable_class_p(
VALUE klass)
102#if VM_CHECK_MODE >= 2
103 if (!klass)
return FALSE;
131 VM_ASSERT(IMEMO_TYPE_P((
VALUE)cme, imemo_ment),
"imemo_type:%s", rb_imemo_name(imemo_type((
VALUE)cme)));
133 if (callable_class_p(cme->defined_class)) {
143vm_check_frame_detail(
VALUE type,
int req_block,
int req_me,
int req_cref,
VALUE specval,
VALUE cref_or_me,
int is_cframe,
const rb_iseq_t *iseq)
145 unsigned int magic = (
unsigned int)(
type & VM_FRAME_MAGIC_MASK);
146 enum imemo_type cref_or_me_type = imemo_env;
149 cref_or_me_type = imemo_type(cref_or_me);
151 if (
type & VM_FRAME_FLAG_BMETHOD) {
155 if (req_block && (
type & VM_ENV_FLAG_LOCAL) == 0) {
156 rb_bug(
"vm_push_frame: specval (%p) should be a block_ptr on %x frame", (
void *)specval, magic);
158 if (!req_block && (
type & VM_ENV_FLAG_LOCAL) != 0) {
159 rb_bug(
"vm_push_frame: specval (%p) should not be a block_ptr on %x frame", (
void *)specval, magic);
163 if (cref_or_me_type != imemo_ment) {
164 rb_bug(
"vm_push_frame: (%s) should be method entry on %x frame", rb_obj_info(cref_or_me), magic);
168 if (req_cref && cref_or_me_type != imemo_cref) {
169 rb_bug(
"vm_push_frame: (%s) should be CREF on %x frame", rb_obj_info(cref_or_me), magic);
172 if (cref_or_me !=
Qfalse && cref_or_me_type != imemo_cref) {
173 if (((
type & VM_FRAME_FLAG_LAMBDA) || magic == VM_FRAME_MAGIC_IFUNC || magic == VM_FRAME_MAGIC_DUMMY) && (cref_or_me_type == imemo_ment)) {
177 rb_bug(
"vm_push_frame: (%s) should be false or cref on %x frame", rb_obj_info(cref_or_me), magic);
183 if (cref_or_me_type == imemo_ment) {
186 if (!callable_method_entry_p(me)) {
187 rb_bug(
"vm_push_frame: ment (%s) should be callable on %x frame.", rb_obj_info(cref_or_me), magic);
191 if ((
type & VM_FRAME_MAGIC_MASK) == VM_FRAME_MAGIC_DUMMY) {
192 VM_ASSERT(iseq == NULL ||
194 RUBY_VM_NORMAL_ISEQ_P(iseq)
198 VM_ASSERT(is_cframe == !RUBY_VM_NORMAL_ISEQ_P(iseq));
208 VALUE given_magic =
type & VM_FRAME_MAGIC_MASK;
211#define CHECK(magic, req_block, req_me, req_cref, is_cframe) \
213 vm_check_frame_detail(type, req_block, req_me, req_cref, \
214 specval, cref_or_me, is_cframe, iseq); \
216 switch (given_magic) {
218 CHECK(VM_FRAME_MAGIC_METHOD, TRUE, TRUE, FALSE, FALSE);
219 CHECK(VM_FRAME_MAGIC_CLASS, TRUE, FALSE, TRUE, FALSE);
220 CHECK(VM_FRAME_MAGIC_TOP, TRUE, FALSE, TRUE, FALSE);
221 CHECK(VM_FRAME_MAGIC_CFUNC, TRUE, TRUE, FALSE, TRUE);
222 CHECK(VM_FRAME_MAGIC_BLOCK, FALSE, FALSE, FALSE, FALSE);
223 CHECK(VM_FRAME_MAGIC_IFUNC, FALSE, FALSE, FALSE, TRUE);
224 CHECK(VM_FRAME_MAGIC_EVAL, FALSE, FALSE, FALSE, FALSE);
225 CHECK(VM_FRAME_MAGIC_RESCUE, FALSE, FALSE, FALSE, FALSE);
226 CHECK(VM_FRAME_MAGIC_DUMMY, TRUE, FALSE, FALSE, FALSE);
228 rb_bug(
"vm_push_frame: unknown type (%x)", (
unsigned int)given_magic);
233static VALUE vm_stack_canary;
234static bool vm_stack_canary_was_born =
false;
241 unsigned int pos = 0;
242 while (pos < ISEQ_BODY(iseq)->iseq_size) {
243 int opcode = rb_vm_insn_addr2opcode((
void *)ISEQ_BODY(iseq)->iseq_encoded[pos]);
244 unsigned int next_pos = pos + insn_len(opcode);
245 if (ISEQ_BODY(iseq)->iseq_encoded + next_pos == pc) {
250 rb_bug(
"failed to find the previous insn");
259 if (! LIKELY(vm_stack_canary_was_born)) {
262 else if ((
VALUE *)reg_cfp == ec->vm_stack + ec->vm_stack_size) {
266 else if (! (iseq = GET_ISEQ())) {
269 else if (LIKELY(sp[0] != vm_stack_canary)) {
278 const VALUE *orig = rb_iseq_original_iseq(iseq);
279 const VALUE iseqw = rb_iseqw_new(iseq);
281 const char *stri = rb_str_to_cstr(inspection);
282 const VALUE disasm = rb_iseq_disasm(iseq);
283 const char *strd = rb_str_to_cstr(disasm);
284 const ptrdiff_t pos = previous_insn_index(iseq, GET_PC());
285 const enum ruby_vminsn_type insn = (
enum ruby_vminsn_type)orig[pos];
286 const char *name = insn_name(insn);
292 "We are killing the stack canary set by %s, "
293 "at %s@pc=%"PRIdPTR
"\n"
294 "watch out the C stack trace.\n"
296 name, stri, pos, strd);
297 rb_bug(
"see above.");
299#define vm_check_canary(ec, sp) rb_vm_check_canary(ec, sp)
302#define vm_check_canary(ec, sp)
303#define vm_check_frame(a, b, c, d)
308vm_push_frame_debug_counter_inc(
315 RB_DEBUG_COUNTER_INC(frame_push);
317 if (RUBY_VM_END_CONTROL_FRAME(ec) != prev_cfp) {
318 const bool curr = VM_FRAME_RUBYFRAME_P(reg_cfp);
319 const bool prev = VM_FRAME_RUBYFRAME_P(prev_cfp);
322 RB_DEBUG_COUNTER_INC(frame_R2R);
325 RB_DEBUG_COUNTER_INC(frame_R2C);
330 RB_DEBUG_COUNTER_INC(frame_C2R);
333 RB_DEBUG_COUNTER_INC(frame_C2C);
338 switch (
type & VM_FRAME_MAGIC_MASK) {
339 case VM_FRAME_MAGIC_METHOD: RB_DEBUG_COUNTER_INC(frame_push_method);
return;
340 case VM_FRAME_MAGIC_BLOCK: RB_DEBUG_COUNTER_INC(frame_push_block);
return;
341 case VM_FRAME_MAGIC_CLASS: RB_DEBUG_COUNTER_INC(frame_push_class);
return;
342 case VM_FRAME_MAGIC_TOP: RB_DEBUG_COUNTER_INC(frame_push_top);
return;
343 case VM_FRAME_MAGIC_CFUNC: RB_DEBUG_COUNTER_INC(frame_push_cfunc);
return;
344 case VM_FRAME_MAGIC_IFUNC: RB_DEBUG_COUNTER_INC(frame_push_ifunc);
return;
345 case VM_FRAME_MAGIC_EVAL: RB_DEBUG_COUNTER_INC(frame_push_eval);
return;
346 case VM_FRAME_MAGIC_RESCUE: RB_DEBUG_COUNTER_INC(frame_push_rescue);
return;
347 case VM_FRAME_MAGIC_DUMMY: RB_DEBUG_COUNTER_INC(frame_push_dummy);
return;
350 rb_bug(
"unreachable");
353#define vm_push_frame_debug_counter_inc(ec, cfp, t)
358rb_vm_stack_canary(
void)
361 return vm_stack_canary;
367STATIC_ASSERT(VM_ENV_DATA_INDEX_ME_CREF, VM_ENV_DATA_INDEX_ME_CREF == -2);
368STATIC_ASSERT(VM_ENV_DATA_INDEX_SPECVAL, VM_ENV_DATA_INDEX_SPECVAL == -1);
369STATIC_ASSERT(VM_ENV_DATA_INDEX_FLAGS, VM_ENV_DATA_INDEX_FLAGS == -0);
385 vm_check_frame(
type, specval, cref_or_me, iseq);
386 VM_ASSERT(local_size >= 0);
389 CHECK_VM_STACK_OVERFLOW0(cfp, sp, local_size + stack_max);
390 vm_check_canary(ec, sp);
395 for (
int i=0; i < local_size; i++) {
422 #if defined HAVE_DECL_ATOMIC_SIGNAL_FENCE && HAVE_DECL_ATOMIC_SIGNAL_FENCE
423 atomic_signal_fence(memory_order_seq_cst);
431 vm_push_frame_debug_counter_inc(ec, cfp,
type);
439 if (VMDEBUG == 2) SDR();
441 ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
448 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
450 if (VMDEBUG == 2) SDR();
452 RUBY_VM_CHECK_INTS(ec);
453 ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
455 return flags & VM_FRAME_FLAG_FINISH;
461 vm_pop_frame(ec, ec->cfp, ec->cfp->ep);
469 rb_iseq_t *dmy_iseq = rb_iseq_alloc_with_dummy_path(fname);
473 VM_FRAME_MAGIC_DUMMY | VM_ENV_FLAG_LOCAL | VM_FRAME_FLAG_FINISH,
475 VM_BLOCK_HANDLER_NONE,
482 return (
VALUE)dmy_iseq;
487rb_arity_error_new(
int argc,
int min,
int max)
489 VALUE err_mess = rb_sprintf(
"wrong number of arguments (given %d, expected %d", argc, min);
497 rb_str_catf(err_mess,
"..%d", max);
504rb_error_arity(
int argc,
int min,
int max)
511NOINLINE(
static void vm_env_write_slowpath(
const VALUE *ep,
int index,
VALUE v));
514vm_env_write_slowpath(
const VALUE *ep,
int index,
VALUE v)
517 rb_gc_writebarrier_remember(VM_ENV_ENVVAL(ep));
518 VM_FORCE_WRITE(&ep[index], v);
519 VM_ENV_FLAGS_UNSET(ep, VM_ENV_FLAG_WB_REQUIRED);
520 RB_DEBUG_COUNTER_INC(lvar_set_slowpath);
525vm_env_write(
const VALUE *ep,
int index,
VALUE v)
527 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
528 if (LIKELY((flags & VM_ENV_FLAG_WB_REQUIRED) == 0)) {
529 VM_STACK_ENV_WRITE(ep, index, v);
532 vm_env_write_slowpath(ep, index, v);
537rb_vm_env_write(
const VALUE *ep,
int index,
VALUE v)
539 vm_env_write(ep, index, v);
545 if (block_handler == VM_BLOCK_HANDLER_NONE) {
549 switch (vm_block_handler_type(block_handler)) {
550 case block_handler_type_iseq:
551 case block_handler_type_ifunc:
552 return rb_vm_make_proc(ec, VM_BH_TO_CAPT_BLOCK(block_handler),
rb_cProc);
553 case block_handler_type_symbol:
554 return rb_sym_to_proc(VM_BH_TO_SYMBOL(block_handler));
555 case block_handler_type_proc:
556 return VM_BH_TO_PROC(block_handler);
558 VM_UNREACHABLE(rb_vm_bh_to_procval);
567vm_svar_valid_p(
VALUE svar)
570 switch (imemo_type(svar)) {
579 rb_bug(
"vm_svar_valid_p: unknown type: %s", rb_obj_info(svar));
589 if (lep && (ec == NULL || ec->root_lep != lep)) {
590 svar = lep[VM_ENV_DATA_INDEX_ME_CREF];
593 svar = ec->root_svar;
596 VM_ASSERT(svar ==
Qfalse || vm_svar_valid_p(svar));
604 VM_ASSERT(vm_svar_valid_p((
VALUE)svar));
606 if (lep && (ec == NULL || ec->root_lep != lep)) {
607 vm_env_write(lep, VM_ENV_DATA_INDEX_ME_CREF, (
VALUE)svar);
610 RB_OBJ_WRITE(rb_ec_thread_ptr(ec)->self, &ec->root_svar, svar);
617 const struct vm_svar *svar = lep_svar(ec, lep);
622 case VM_SVAR_LASTLINE:
623 return svar->lastline;
624 case VM_SVAR_BACKREF:
625 return svar->backref;
627 const VALUE ary = svar->others;
642 struct vm_svar *svar = IMEMO_NEW(
struct vm_svar, imemo_svar, obj);
653 struct vm_svar *svar = lep_svar(ec, lep);
656 lep_svar_write(ec, lep, svar = svar_new((
VALUE)svar));
660 case VM_SVAR_LASTLINE:
663 case VM_SVAR_BACKREF:
667 VALUE ary = svar->others;
683 val = lep_svar_get(ec, lep, key);
686 VALUE backref = lep_svar_get(ec, lep, VM_SVAR_BACKREF);
703 rb_bug(
"unexpected back-ref");
716 VALUE backref = lep_svar_get(ec, lep, VM_SVAR_BACKREF);
726 return rb_reg_last_defined(backref);
728 rb_bug(
"unexpected back-ref");
732 nth = (int)(
type >> 1);
739check_method_entry(
VALUE obj,
int can_be_svar)
741 if (obj ==
Qfalse)
return NULL;
744 if (!
RB_TYPE_P(obj,
T_IMEMO)) rb_bug(
"check_method_entry: unknown type: %s", rb_obj_info(obj));
747 switch (imemo_type(obj)) {
758 rb_bug(
"check_method_entry: svar should not be there:");
767 const VALUE *ep = cfp->ep;
770 while (!VM_ENV_LOCAL_P(ep)) {
771 if ((me = check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL)
return me;
772 ep = VM_ENV_PREV_EP(ep);
775 return check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
781 switch (me->def->type) {
782 case VM_METHOD_TYPE_ISEQ:
783 return me->def->body.iseq.
iseqptr;
792 switch (me->def->type) {
793 case VM_METHOD_TYPE_ISEQ:
794 return me->def->body.iseq.
cref;
800#if VM_CHECK_MODE == 0
804check_cref(
VALUE obj,
int can_be_svar)
806 if (obj ==
Qfalse)
return NULL;
809 if (!
RB_TYPE_P(obj,
T_IMEMO)) rb_bug(
"check_cref: unknown type: %s", rb_obj_info(obj));
812 switch (imemo_type(obj)) {
823 rb_bug(
"check_method_entry: svar should not be there:");
830vm_env_cref(
const VALUE *ep)
834 while (!VM_ENV_LOCAL_P(ep)) {
835 if ((cref = check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL)
return cref;
836 ep = VM_ENV_PREV_EP(ep);
839 return check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
843is_cref(
const VALUE v,
int can_be_svar)
846 switch (imemo_type(v)) {
859vm_env_cref_by_cref(
const VALUE *ep)
861 while (!VM_ENV_LOCAL_P(ep)) {
862 if (is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE))
return TRUE;
863 ep = VM_ENV_PREV_EP(ep);
865 return is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
869cref_replace_with_duplicated_cref_each_frame(
const VALUE *vptr,
int can_be_svar,
VALUE parent)
871 const VALUE v = *vptr;
875 switch (imemo_type(v)) {
878 new_cref = vm_cref_dup(cref);
883 VM_FORCE_WRITE(vptr, (
VALUE)new_cref);
888 return cref_replace_with_duplicated_cref_each_frame(&((
struct vm_svar *)v)->
cref_or_me, FALSE, v);
892 rb_bug(
"cref_replace_with_duplicated_cref_each_frame: unreachable");
901vm_cref_replace_with_duplicated_cref(
const VALUE *ep)
903 if (vm_env_cref_by_cref(ep)) {
907 while (!VM_ENV_LOCAL_P(ep)) {
908 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) :
Qfalse;
909 if ((cref = cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE, envval)) != NULL) {
912 ep = VM_ENV_PREV_EP(ep);
914 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) :
Qfalse;
915 return cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE, envval);
918 rb_bug(
"vm_cref_dup: unreachable");
923vm_get_cref(
const VALUE *ep)
931 rb_bug(
"vm_get_cref: unreachable");
936rb_vm_get_cref(
const VALUE *ep)
938 return vm_get_cref(ep);
949 return vm_get_cref(cfp->ep);
953vm_get_const_key_cref(
const VALUE *ep)
959 if (RCLASS_SINGLETON_P(CREF_CLASS(cref)) ||
960 RCLASS_CLONED_P(CREF_CLASS(cref)) ) {
963 cref = CREF_NEXT(cref);
976 #define ADD_NEW_CREF(new_cref) \
977 if (new_cref_tail) { \
978 RB_OBJ_WRITE(new_cref_tail, &new_cref_tail->next, new_cref); \
981 new_cref_head = new_cref; \
983 new_cref_tail = new_cref;
987 if (CREF_CLASS(cref) == old_klass) {
988 new_cref = vm_cref_new_use_prev(new_klass, METHOD_VISI_UNDEF, FALSE, cref, FALSE);
989 ADD_NEW_CREF(new_cref);
990 return new_cref_head;
992 new_cref = vm_cref_new_use_prev(CREF_CLASS(cref), METHOD_VISI_UNDEF, FALSE, cref, FALSE);
993 cref = CREF_NEXT(cref);
994 ADD_NEW_CREF(new_cref);
1000 return new_cref_head;
1009 prev_cref = vm_env_cref(ep);
1015 prev_cref = vm_env_cref(cfp->ep);
1019 return vm_cref_new(klass, METHOD_VISI_PUBLIC, FALSE, prev_cref, pushed_by_eval, singleton);
1023vm_get_cbase(
const VALUE *ep)
1025 const rb_cref_t *cref = vm_get_cref(ep);
1027 return CREF_CLASS_FOR_DEFINITION(cref);
1031vm_get_const_base(
const VALUE *ep)
1033 const rb_cref_t *cref = vm_get_cref(ep);
1036 if (!CREF_PUSHED_BY_EVAL(cref)) {
1037 return CREF_CLASS_FOR_DEFINITION(cref);
1039 cref = CREF_NEXT(cref);
1046vm_check_if_namespace(
VALUE klass)
1049 rb_raise(
rb_eTypeError,
"%+"PRIsVALUE
" is not a class/module", klass);
1054vm_ensure_not_refinement_module(
VALUE self)
1057 rb_warn(
"not defined at the refinement, but at the outer class/module");
1073 if (
NIL_P(orig_klass) && allow_nil) {
1075 const rb_cref_t *root_cref = vm_get_cref(ec->cfp->ep);
1079 while (root_cref && CREF_PUSHED_BY_EVAL(root_cref)) {
1080 root_cref = CREF_NEXT(root_cref);
1083 while (cref && CREF_NEXT(cref)) {
1084 if (CREF_PUSHED_BY_EVAL(cref)) {
1088 klass = CREF_CLASS(cref);
1090 cref = CREF_NEXT(cref);
1092 if (!
NIL_P(klass)) {
1096 if ((ce = rb_const_lookup(klass,
id))) {
1097 rb_const_warn_if_deprecated(ce, klass,
id);
1100 if (am == klass)
break;
1102 if (is_defined)
return 1;
1103 if (rb_autoloading_value(klass,
id, &av, NULL))
return av;
1105 goto search_continue;
1112 if (UNLIKELY(!rb_ractor_main_p())) {
1114 rb_raise(rb_eRactorIsolationError,
1115 "can not access non-shareable objects in constant %"PRIsVALUE
"::%s by non-main ractor.",
rb_class_path(klass), rb_id2name(
id));
1126 if (root_cref && !
NIL_P(CREF_CLASS(root_cref))) {
1127 klass = vm_get_iclass(ec->cfp, CREF_CLASS(root_cref));
1141 vm_check_if_namespace(orig_klass);
1143 return rb_public_const_defined_from(orig_klass,
id);
1146 return rb_public_const_get_from(orig_klass,
id);
1154 return vm_get_ev_const(ec, orig_klass,
id, allow_nil ==
Qtrue, 0);
1162 int allow_nil = TRUE;
1163 if (segments[0] == idNULL) {
1168 while (segments[idx]) {
1169 ID id = segments[idx++];
1170 val = vm_get_ev_const(ec, val,
id, allow_nil, 0);
1183 rb_bug(
"vm_get_cvar_base: no cref");
1186 while (CREF_NEXT(cref) &&
1187 (
NIL_P(CREF_CLASS(cref)) || RCLASS_SINGLETON_P(CREF_CLASS(cref)) ||
1188 CREF_PUSHED_BY_EVAL(cref) || CREF_SINGLETON(cref))) {
1189 cref = CREF_NEXT(cref);
1191 if (top_level_raise && !CREF_NEXT(cref)) {
1195 klass = vm_get_iclass(cfp, CREF_CLASS(cref));
1203ALWAYS_INLINE(
static void fill_ivar_cache(
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
int is_attr, attr_index_t index, shape_id_t shape_id));
1205fill_ivar_cache(
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
int is_attr, attr_index_t index, shape_id_t shape_id)
1208 vm_cc_attr_index_set(cc, index, shape_id);
1211 vm_ic_attr_index_set(iseq, ic, index, shape_id);
1215#define ractor_incidental_shareable_p(cond, val) \
1216 (!(cond) || rb_ractor_shareable_p(val))
1217#define ractor_object_incidental_shareable_p(obj, val) \
1218 ractor_incidental_shareable_p(rb_ractor_shareable_p(obj), val)
1230 return default_value;
1233 shape_id_t shape_id = RBASIC_SHAPE_ID_FOR_READ(obj);
1243 if (UNLIKELY(!rb_ractor_main_p())) {
1251 if (default_value ==
Qundef) {
1259 fields_obj = RCLASS_WRITABLE_FIELDS_OBJ(obj);
1261 return default_value;
1263 ivar_list = rb_imemo_fields_ptr(fields_obj);
1264 shape_id = RBASIC_SHAPE_ID_FOR_READ(fields_obj);
1269 if (rb_obj_exivar_p(obj)) {
1270 VALUE fields_obj = 0;
1271 if (!rb_gen_fields_tbl_get(obj,
id, &fields_obj)) {
1272 return default_value;
1274 ivar_list = rb_imemo_fields_ptr(fields_obj);
1277 return default_value;
1281 shape_id_t cached_id;
1285 vm_cc_atomic_shape_and_index(cc, &cached_id, &index);
1288 vm_ic_atomic_shape_and_index(ic, &cached_id, &index);
1291 if (LIKELY(cached_id == shape_id)) {
1294 if (index == ATTR_INDEX_NOT_SET) {
1295 return default_value;
1298 val = ivar_list[index];
1299#if USE_DEBUG_COUNTER
1300 RB_DEBUG_COUNTER_INC(ivar_get_ic_hit);
1303 RB_DEBUG_COUNTER_INC(ivar_get_obj_hit);
1309#if USE_DEBUG_COUNTER
1311 if (cached_id != INVALID_SHAPE_ID) {
1312 RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_set);
1315 RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_unset);
1319 if (cached_id != INVALID_SHAPE_ID) {
1320 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_set);
1323 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_unset);
1326 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss);
1329 RB_DEBUG_COUNTER_INC(ivar_get_obj_miss);
1333 if (rb_shape_too_complex_p(shape_id)) {
1338 table = rb_imemo_fields_complex_tbl(fields_obj);
1342 table = ROBJECT_FIELDS_HASH(obj);
1347 if (rb_gen_fields_tbl_get(obj, 0, &fields_obj)) {
1348 table = rb_imemo_fields_complex_tbl(fields_obj);
1354 if (!table || !st_lookup(table,
id, &val)) {
1355 val = default_value;
1359 shape_id_t previous_cached_id = cached_id;
1360 if (rb_shape_get_iv_index_with_hint(shape_id,
id, &index, &cached_id)) {
1363 if (cached_id != previous_cached_id) {
1364 fill_ivar_cache(iseq, ic, cc, is_attr, index, cached_id);
1367 if (index == ATTR_INDEX_NOT_SET) {
1368 val = default_value;
1372 val = ivar_list[index];
1378 vm_cc_attr_index_initialize(cc, shape_id);
1381 vm_ic_attr_index_initialize(ic, shape_id);
1384 val = default_value;
1390 if (!UNDEF_P(default_value)) {
1399 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss);
1402 return rb_attr_get(obj,
id);
1410populate_cache(attr_index_t index, shape_id_t next_shape_id,
ID id,
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
bool is_attr)
1412 RUBY_ASSERT(!rb_shape_too_complex_p(next_shape_id));
1416 vm_cc_attr_index_set(cc, index, next_shape_id);
1419 vm_ic_attr_index_set(iseq, ic, index, next_shape_id);
1431 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss);
1434 rb_check_frozen(obj);
1436 attr_index_t index = rb_obj_ivar_set(obj,
id, val);
1438 shape_id_t next_shape_id = RBASIC_SHAPE_ID(obj);
1440 if (!rb_shape_too_complex_p(next_shape_id)) {
1441 populate_cache(index, next_shape_id,
id, iseq, ic, cc, is_attr);
1444 RB_DEBUG_COUNTER_INC(ivar_set_obj_miss);
1454 return vm_setivar_slowpath(obj,
id, val, iseq, ic, NULL,
false);
1460 return vm_setivar_slowpath(obj,
id, val, NULL, NULL, cc,
true);
1463NOINLINE(
static VALUE vm_setivar_default(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index));
1465vm_setivar_default(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1467 shape_id_t shape_id = RBASIC_SHAPE_ID(obj);
1469 VALUE fields_obj = 0;
1472 if (shape_id == dest_shape_id) {
1473 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1475 else if (dest_shape_id != INVALID_SHAPE_ID) {
1476 if (shape_id == RSHAPE_PARENT(dest_shape_id) && RSHAPE_EDGE_NAME(dest_shape_id) ==
id && RSHAPE_CAPACITY(shape_id) == RSHAPE_CAPACITY(dest_shape_id)) {
1477 RUBY_ASSERT(index < RSHAPE_CAPACITY(dest_shape_id));
1487 rb_gen_fields_tbl_get(obj, 0, &fields_obj);
1489 if (shape_id != dest_shape_id) {
1490 RBASIC_SET_SHAPE_ID(obj, dest_shape_id);
1493 RB_OBJ_WRITE(obj, &rb_imemo_fields_ptr(fields_obj)[index], val);
1495 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1501vm_setivar(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1509 shape_id_t shape_id = RBASIC_SHAPE_ID(obj);
1510 RUBY_ASSERT(dest_shape_id == INVALID_SHAPE_ID || !rb_shape_too_complex_p(dest_shape_id));
1512 if (LIKELY(shape_id == dest_shape_id)) {
1513 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1516 else if (dest_shape_id != INVALID_SHAPE_ID) {
1517 shape_id_t source_shape_id = RSHAPE_PARENT(dest_shape_id);
1519 if (shape_id == source_shape_id && RSHAPE_EDGE_NAME(dest_shape_id) ==
id && RSHAPE_CAPACITY(shape_id) == RSHAPE_CAPACITY(dest_shape_id)) {
1520 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1522 RBASIC_SET_SHAPE_ID(obj, dest_shape_id);
1524 RUBY_ASSERT(rb_shape_get_next_iv_shape(source_shape_id,
id) == dest_shape_id);
1525 RUBY_ASSERT(index < RSHAPE_CAPACITY(dest_shape_id));
1540 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1541 RB_DEBUG_COUNTER_INC(ivar_set_obj_hit);
1547 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss_noobject);
1559 VALUE defined_class = 0;
1563 defined_class =
RBASIC(defined_class)->klass;
1566 struct rb_id_table *rb_cvc_tbl = RCLASS_CVC_TBL(defined_class);
1568 rb_bug(
"the cvc table should be set");
1572 if (!rb_id_table_lookup(rb_cvc_tbl,
id, &ent_data)) {
1573 rb_bug(
"should have cvar cache entry");
1578 ent->global_cvar_state = GET_GLOBAL_CVAR_STATE();
1594 cref = vm_get_cref(GET_EP());
1596 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE() && ic->entry->cref == cref && LIKELY(rb_ractor_main_p())) {
1597 RB_DEBUG_COUNTER_INC(cvar_read_inline_hit);
1599 VALUE v = rb_ivar_lookup(ic->entry->class_value,
id,
Qundef);
1605 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1607 return update_classvariable_cache(iseq, klass,
id, cref, ic);
1613 return vm_getclassvariable(iseq, cfp,
id, ic);
1620 cref = vm_get_cref(GET_EP());
1622 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE() && ic->entry->cref == cref && LIKELY(rb_ractor_main_p())) {
1623 RB_DEBUG_COUNTER_INC(cvar_write_inline_hit);
1625 rb_class_ivar_set(ic->entry->class_value,
id, val);
1629 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1633 update_classvariable_cache(iseq, klass,
id, cref, ic);
1639 vm_setclassvariable(iseq, cfp,
id, val, ic);
1645 return vm_getivar(obj,
id, iseq, ic, NULL, FALSE,
Qnil);
1656 shape_id_t dest_shape_id;
1658 vm_ic_atomic_shape_and_index(ic, &dest_shape_id, &index);
1660 if (UNLIKELY(UNDEF_P(vm_setivar(obj,
id, val, dest_shape_id, index)))) {
1667 if (!UNDEF_P(vm_setivar_default(obj,
id, val, dest_shape_id, index))) {
1671 vm_setivar_slowpath_ivar(obj,
id, val, iseq, ic);
1678 vm_setinstancevariable(iseq, obj,
id, val, ic);
1687 ec->tag->state = RUBY_TAG_FATAL;
1690 ec->tag->state = TAG_THROW;
1692 else if (THROW_DATA_P(err)) {
1693 ec->tag->state = THROW_DATA_STATE((
struct vm_throw_data *)err);
1696 ec->tag->state = TAG_RAISE;
1703 const int flag,
const VALUE throwobj)
1711 else if (state == TAG_BREAK) {
1713 const VALUE *ep = GET_EP();
1714 const rb_iseq_t *base_iseq = GET_ISEQ();
1715 escape_cfp = reg_cfp;
1717 while (ISEQ_BODY(base_iseq)->
type != ISEQ_TYPE_BLOCK) {
1718 if (ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_CLASS) {
1719 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1720 ep = escape_cfp->ep;
1721 base_iseq = escape_cfp->iseq;
1724 ep = VM_ENV_PREV_EP(ep);
1725 base_iseq = ISEQ_BODY(base_iseq)->parent_iseq;
1726 escape_cfp = rb_vm_search_cf_from_ep(ec, escape_cfp, ep);
1727 VM_ASSERT(escape_cfp->iseq == base_iseq);
1731 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1737 ep = VM_ENV_PREV_EP(ep);
1739 while (escape_cfp < eocfp) {
1740 if (escape_cfp->ep == ep) {
1741 const rb_iseq_t *
const iseq = escape_cfp->iseq;
1742 const VALUE epc = escape_cfp->pc - ISEQ_BODY(iseq)->iseq_encoded;
1747 for (i=0; i < ct->size; i++) {
1749 UNALIGNED_MEMBER_PTR(ct, entries[i]);
1751 if (entry->type == CATCH_TYPE_BREAK &&
1752 entry->iseq == base_iseq &&
1753 entry->start < epc && entry->end >= epc) {
1754 if (entry->cont == epc) {
1763 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1768 rb_vm_localjump_error(
"break from proc-closure", throwobj, TAG_BREAK);
1771 else if (state == TAG_RETRY) {
1772 const VALUE *ep = VM_ENV_PREV_EP(GET_EP());
1774 escape_cfp = rb_vm_search_cf_from_ep(ec, reg_cfp, ep);
1776 else if (state == TAG_RETURN) {
1777 const VALUE *current_ep = GET_EP();
1778 const VALUE *target_ep = NULL, *target_lep, *ep = current_ep;
1779 int in_class_frame = 0;
1781 escape_cfp = reg_cfp;
1784 while (!VM_ENV_LOCAL_P(ep)) {
1785 if (VM_ENV_FLAGS(ep, VM_FRAME_FLAG_LAMBDA) && target_ep == NULL) {
1788 ep = VM_ENV_PREV_EP(ep);
1792 while (escape_cfp < eocfp) {
1793 const VALUE *lep = VM_CF_LEP(escape_cfp);
1799 if (lep == target_lep &&
1800 VM_FRAME_RUBYFRAME_P(escape_cfp) &&
1801 ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_CLASS) {
1806 if (lep == target_lep) {
1807 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1809 if (in_class_frame) {
1814 const VALUE *tep = current_ep;
1816 while (target_lep != tep) {
1817 if (escape_cfp->ep == tep) {
1819 if (tep == target_ep) {
1823 goto unexpected_return;
1826 tep = VM_ENV_PREV_EP(tep);
1830 else if (VM_FRAME_RUBYFRAME_P(escape_cfp)) {
1831 switch (ISEQ_BODY(escape_cfp->iseq)->type) {
1833 case ISEQ_TYPE_MAIN:
1835 if (in_class_frame)
goto unexpected_return;
1836 if (target_ep == NULL) {
1840 goto unexpected_return;
1844 case ISEQ_TYPE_EVAL: {
1846 enum rb_iseq_type t = ISEQ_BODY(is)->type;
1847 while (t == ISEQ_TYPE_RESCUE || t == ISEQ_TYPE_ENSURE || t == ISEQ_TYPE_EVAL) {
1848 if (!(is = ISEQ_BODY(is)->parent_iseq))
break;
1849 t = ISEQ_BODY(is)->type;
1851 toplevel = t == ISEQ_TYPE_TOP || t == ISEQ_TYPE_MAIN;
1854 case ISEQ_TYPE_CLASS:
1863 if (escape_cfp->ep == target_lep && ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_METHOD) {
1864 if (target_ep == NULL) {
1868 goto unexpected_return;
1872 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1875 rb_vm_localjump_error(
"unexpected return", throwobj, TAG_RETURN);
1881 rb_bug(
"isns(throw): unsupported throw type");
1884 ec->tag->state = state;
1885 return (
VALUE)THROW_DATA_NEW(throwobj, escape_cfp, state);
1890 rb_num_t throw_state,
VALUE throwobj)
1892 const int state = (int)(throw_state & VM_THROW_STATE_MASK);
1893 const int flag = (int)(throw_state & VM_THROW_NO_ESCAPE_FLAG);
1896 return vm_throw_start(ec, reg_cfp, state, flag, throwobj);
1899 return vm_throw_continue(ec, throwobj);
1906 return vm_throw(ec, reg_cfp, throw_state, throwobj);
1912 int is_splat = flag & 0x01;
1915 const VALUE obj = ary;
1927 if (num + is_splat == 0) {
1930 else if (flag & 0x02) {
1935 for (i = 0; i < num -
len; i++) {
1940 for (j = 0; i < num; i++, j++) {
1962 for (; i < num -
len; i++) {
1966 for (rb_num_t j = 0; i < num; i++, j++) {
1967 *cfp->sp++ = ptr[
len - j - 1];
1971 for (rb_num_t j = 0; j < num; j++) {
1972 *cfp->sp++ = ptr[num - j - 1];
1988#if VM_CHECK_MODE > 0
1989 ccs->debug_sig = ~(
VALUE)ccs;
1995 ccs->entries = NULL;
1997 rb_id_table_insert(cc_tbl, mid, (
VALUE)ccs);
2005 if (! vm_cc_markable(cc)) {
2009 if (UNLIKELY(ccs->len == ccs->capa)) {
2010 if (ccs->capa == 0) {
2012 ccs->entries =
ALLOC_N(
struct rb_class_cc_entries_entry, ccs->capa);
2016 REALLOC_N(ccs->entries,
struct rb_class_cc_entries_entry, ccs->capa);
2019 VM_ASSERT(ccs->len < ccs->capa);
2021 const int pos = ccs->len++;
2022 ccs->entries[pos].argc = vm_ci_argc(ci);
2023 ccs->entries[pos].flag = vm_ci_flag(ci);
2026 if (RB_DEBUG_COUNTER_SETMAX(ccs_maxlen, ccs->len)) {
2032#if VM_CHECK_MODE > 0
2036 ruby_debug_printf(
"ccs:%p (%d,%d)\n", (
void *)ccs, ccs->len, ccs->capa);
2037 for (
int i=0; i<ccs->len; i++) {
2038 ruby_debug_printf(
"CCS CI ID:flag:%x argc:%u\n",
2039 ccs->entries[i].flag,
2040 ccs->entries[i].argc);
2041 rp(ccs->entries[i].cc);
2048 VM_ASSERT(vm_ccs_p(ccs));
2049 VM_ASSERT(ccs->len <= ccs->capa);
2051 for (
int i=0; i<ccs->len; i++) {
2054 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
2055 VM_ASSERT(vm_cc_class_check(cc, klass));
2056 VM_ASSERT(vm_cc_check_cme(cc, ccs->cme));
2057 VM_ASSERT(!vm_cc_super_p(cc));
2058 VM_ASSERT(!vm_cc_refinement_p(cc));
2069 const ID mid = vm_ci_mid(ci);
2070 struct rb_id_table *cc_tbl = RCLASS_WRITABLE_CC_TBL(klass);
2077 if (rb_id_table_lookup(cc_tbl, mid, &ccs_data)) {
2079 const int ccs_len = ccs->len;
2081 if (UNLIKELY(METHOD_ENTRY_INVALIDATED(ccs->cme))) {
2082 rb_vm_ccs_free(ccs);
2083 rb_id_table_delete(cc_tbl, mid);
2087 VM_ASSERT(vm_ccs_verify(ccs, mid, klass));
2092 unsigned int argc = vm_ci_argc(ci);
2093 unsigned int flag = vm_ci_flag(ci);
2095 for (
int i=0; i<ccs_len; i++) {
2096 unsigned int ccs_ci_argc = ccs->entries[i].argc;
2097 unsigned int ccs_ci_flag = ccs->entries[i].flag;
2098 const struct rb_callcache *ccs_cc = ccs->entries[i].cc;
2100 VM_ASSERT(IMEMO_TYPE_P(ccs_cc, imemo_callcache));
2102 if (ccs_ci_argc == argc && ccs_ci_flag == flag) {
2103 RB_DEBUG_COUNTER_INC(cc_found_in_ccs);
2105 VM_ASSERT(vm_cc_cme(ccs_cc)->called_id == mid);
2106 VM_ASSERT(ccs_cc->klass == klass);
2107 VM_ASSERT(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(ccs_cc)));
2116 cc_tbl = rb_id_table_create(2);
2117 RCLASS_WRITE_CC_TBL(klass, cc_tbl);
2120 RB_DEBUG_COUNTER_INC(cc_not_found_in_ccs);
2126 cme = UNDEFINED_METHOD_ENTRY_P(cme) ? NULL : cme;
2128 VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
2131 cme = rb_callable_method_entry(klass, mid);
2134 VM_ASSERT(cme == NULL || IMEMO_TYPE_P(cme, imemo_ment));
2138 VM_ASSERT(vm_cc_cme(&vm_empty_cc) == NULL);
2139 return &vm_empty_cc;
2142 VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
2147 VM_ASSERT(cc_tbl != NULL);
2149 if (LIKELY(rb_id_table_lookup(cc_tbl, mid, &ccs_data))) {
2155 ccs = vm_ccs_create(klass, cc_tbl, mid, cme);
2159 cme = rb_check_overloaded_cme(cme, ci);
2161 const struct rb_callcache *cc = vm_cc_new(klass, cme, vm_call_general, cc_type_normal);
2162 vm_ccs_push(klass, ccs, ci, cc);
2164 VM_ASSERT(vm_cc_cme(cc) != NULL);
2165 VM_ASSERT(cme->called_id == mid);
2166 VM_ASSERT(vm_cc_cme(cc)->called_id == mid);
2179 cc = vm_search_cc(klass, ci);
2182 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
2183 VM_ASSERT(cc == vm_cc_empty() || cc->klass == klass);
2184 VM_ASSERT(cc == vm_cc_empty() || callable_method_entry_p(vm_cc_cme(cc)));
2185 VM_ASSERT(cc == vm_cc_empty() || !METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)));
2186 VM_ASSERT(cc == vm_cc_empty() || vm_cc_cme(cc)->called_id == vm_ci_mid(ci));
2195#if USE_DEBUG_COUNTER
2199 const struct rb_callcache *cc = rb_vm_search_method_slowpath(cd->ci, klass);
2201#if OPT_INLINE_METHOD_CACHE
2205 if (cd_owner && cc != empty_cc) {
2209#if USE_DEBUG_COUNTER
2210 if (!old_cc || old_cc == empty_cc) {
2212 RB_DEBUG_COUNTER_INC(mc_inline_miss_empty);
2214 else if (old_cc == cc) {
2215 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cc);
2217 else if (vm_cc_cme(old_cc) == vm_cc_cme(cc)) {
2218 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cme);
2220 else if (vm_cc_cme(old_cc) && vm_cc_cme(cc) &&
2221 vm_cc_cme(old_cc)->def == vm_cc_cme(cc)->def) {
2222 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_def);
2225 RB_DEBUG_COUNTER_INC(mc_inline_miss_diff);
2230 VM_ASSERT(vm_cc_cme(cc) == NULL ||
2231 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci));
2242#if OPT_INLINE_METHOD_CACHE
2243 if (LIKELY(vm_cc_class_check(cc, klass))) {
2244 if (LIKELY(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)))) {
2245 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
2246 RB_DEBUG_COUNTER_INC(mc_inline_hit);
2247 VM_ASSERT(vm_cc_cme(cc) == NULL ||
2248 (vm_ci_flag(cd->ci) & VM_CALL_SUPER) ||
2249 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci));
2253 RB_DEBUG_COUNTER_INC(mc_inline_miss_invalidated);
2256 RB_DEBUG_COUNTER_INC(mc_inline_miss_klass);
2260 return vm_search_method_slowpath0(cd_owner, cd, klass);
2267 VM_ASSERT(klass !=
Qfalse);
2270 return vm_search_method_fastpath(cd_owner, cd, klass);
2273#if __has_attribute(transparent_union)
2286 VALUE (*f10)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2287 VALUE (*f11)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2288 VALUE (*f12)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2289 VALUE (*f13)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2290 VALUE (*f14)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2291 VALUE (*f15)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2294# define make_cfunc_type(f) (cfunc_type){.anyargs = (VALUE (*)(ANYARGS))(f)}
2297# define make_cfunc_type(f) (cfunc_type)(f)
2307 VM_ASSERT(IMEMO_TYPE_P(me, imemo_ment));
2308 VM_ASSERT(callable_method_entry_p(me));
2310 if (me->def->type != VM_METHOD_TYPE_CFUNC) {
2314#if __has_attribute(transparent_union)
2315 return me->def->body.cfunc.func == func.anyargs;
2317 return me->def->body.cfunc.func == func;
2326 return me && METHOD_ENTRY_BASIC(me);
2332 VM_ASSERT(iseq != NULL);
2334 return check_cfunc(vm_cc_cme(cc), func);
2337#define check_cfunc(me, func) check_cfunc(me, make_cfunc_type(func))
2338#define vm_method_cfunc_is(iseq, cd, recv, func) vm_method_cfunc_is(iseq, cd, recv, make_cfunc_type(func))
2340#define EQ_UNREDEFINED_P(t) BASIC_OP_UNREDEFINED_P(BOP_EQ, t##_REDEFINED_OP_FLAG)
2372opt_equality_specialized(
VALUE recv,
VALUE obj)
2374 if (FIXNUM_2_P(recv, obj) && EQ_UNREDEFINED_P(INTEGER)) {
2375 goto compare_by_identity;
2377 else if (FLONUM_2_P(recv, obj) && EQ_UNREDEFINED_P(FLOAT)) {
2378 goto compare_by_identity;
2381 goto compare_by_identity;
2390#if MSC_VERSION_BEFORE(1300)
2394 else if (isnan(b)) {
2399 return RBOOL(a == b);
2406 return rb_str_eql_internal(obj, recv);
2411 compare_by_identity:
2412 return RBOOL(recv == obj);
2418 VM_ASSERT(cd_owner != NULL);
2420 VALUE val = opt_equality_specialized(recv, obj);
2421 if (!UNDEF_P(val))
return val;
2423 if (!vm_method_cfunc_is(cd_owner, cd, recv, rb_obj_equal)) {
2427 return RBOOL(recv == obj);
2431#undef EQ_UNREDEFINED_P
2434NOINLINE(
static VALUE opt_equality_by_mid_slowpath(
VALUE recv,
VALUE obj,
ID mid));
2437opt_equality_by_mid_slowpath(
VALUE recv,
VALUE obj,
ID mid)
2439 const struct rb_callcache *cc = gccct_method_search(GET_EC(), recv, mid, &VM_CI_ON_STACK(mid, 0, 1, NULL));
2441 if (cc && check_cfunc(vm_cc_cme(cc), rb_obj_equal)) {
2442 return RBOOL(recv == obj);
2452 VALUE val = opt_equality_specialized(recv, obj);
2453 if (!UNDEF_P(val)) {
2457 return opt_equality_by_mid_slowpath(recv, obj, mid);
2464 return opt_equality_by_mid(obj1, obj2, idEq);
2470 return opt_equality_by_mid(obj1, obj2, idEqlP);
2480 case VM_CHECKMATCH_TYPE_WHEN:
2482 case VM_CHECKMATCH_TYPE_RESCUE:
2484 rb_raise(
rb_eTypeError,
"class or module required for rescue clause");
2487 case VM_CHECKMATCH_TYPE_CASE: {
2488 return rb_vm_call_with_refinements(ec, pattern, idEqq, 1, &target,
RB_NO_KEYWORDS);
2491 rb_bug(
"check_match: unreachable");
2496#if MSC_VERSION_BEFORE(1300)
2497#define CHECK_CMP_NAN(a, b) if (isnan(a) || isnan(b)) return Qfalse;
2499#define CHECK_CMP_NAN(a, b)
2503double_cmp_lt(
double a,
double b)
2505 CHECK_CMP_NAN(a, b);
2506 return RBOOL(a < b);
2510double_cmp_le(
double a,
double b)
2512 CHECK_CMP_NAN(a, b);
2513 return RBOOL(a <= b);
2517double_cmp_gt(
double a,
double b)
2519 CHECK_CMP_NAN(a, b);
2520 return RBOOL(a > b);
2524double_cmp_ge(
double a,
double b)
2526 CHECK_CMP_NAN(a, b);
2527 return RBOOL(a >= b);
2531static inline VALUE *
2536 if (cfp->iseq && VM_FRAME_RUBYFRAME_P(cfp)) {
2537 VALUE *bp = prev_cfp->sp + ISEQ_BODY(cfp->iseq)->local_table_size + VM_ENV_DATA_SIZE;
2539 if (ISEQ_BODY(cfp->iseq)->param.flags.forwardable && VM_ENV_LOCAL_P(cfp->ep)) {
2540 int lts = ISEQ_BODY(cfp->iseq)->local_table_size;
2541 int params = ISEQ_BODY(cfp->iseq)->param.size;
2544 bp += vm_ci_argc(ci);
2547 if (ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_METHOD || VM_FRAME_BMETHOD_P(cfp)) {
2551#if VM_DEBUG_BP_CHECK
2552 if (bp != cfp->bp_check) {
2553 ruby_debug_printf(
"bp_check: %ld, bp: %ld\n",
2554 (
long)(cfp->bp_check - GET_EC()->vm_stack),
2555 (
long)(bp - GET_EC()->vm_stack));
2556 rb_bug(
"vm_base_ptr: unreachable");
2569 return vm_base_ptr(cfp);
2584static vm_call_handler vm_call_iseq_setup_func(
const struct rb_callinfo *ci,
const int param_size,
const int local_size);
2589 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_tailcall_0start);
2591 return vm_call_iseq_setup_tailcall(ec, cfp, calling, 0);
2597 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_0start);
2600 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2601 int param = ISEQ_BODY(iseq)->param.size;
2602 int local = ISEQ_BODY(iseq)->local_table_size;
2603 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2609 return ISEQ_BODY(iseq)->param.flags.has_opt == FALSE &&
2610 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2611 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2612 ISEQ_BODY(iseq)->param.flags.has_kw == FALSE &&
2613 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2614 ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg == FALSE &&
2615 ISEQ_BODY(iseq)->param.flags.forwardable == FALSE &&
2616 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2620rb_iseq_only_optparam_p(
const rb_iseq_t *iseq)
2622 return ISEQ_BODY(iseq)->param.flags.has_opt == TRUE &&
2623 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2624 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2625 ISEQ_BODY(iseq)->param.flags.has_kw == FALSE &&
2626 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2627 ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg == FALSE &&
2628 ISEQ_BODY(iseq)->param.flags.forwardable == FALSE &&
2629 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2633rb_iseq_only_kwparam_p(
const rb_iseq_t *iseq)
2635 return ISEQ_BODY(iseq)->param.flags.has_opt == FALSE &&
2636 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2637 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2638 ISEQ_BODY(iseq)->param.flags.has_kw == TRUE &&
2639 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2640 ISEQ_BODY(iseq)->param.flags.forwardable == FALSE &&
2641 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2644#define ALLOW_HEAP_ARGV (-2)
2645#define ALLOW_HEAP_ARGV_KEEP_KWSPLAT (-3)
2650 vm_check_canary(GET_EC(), cfp->sp);
2656 int argc = calling->argc;
2658 if (UNLIKELY(max_args <= ALLOW_HEAP_ARGV && len + argc > VM_ARGC_STACK_MAX)) {
2662 VALUE *argv = cfp->sp - argc;
2666 cfp->sp -= argc - 1;
2667 cfp->sp[-1] = argv_ary;
2669 calling->heap_argv = argv_ary;
2675 if (max_args >= 0 &&
len + argc > max_args) {
2683 calling->argc +=
len - (max_args - argc + 1);
2684 len = max_args - argc + 1;
2693 calling->heap_argv = 0;
2695 CHECK_VM_STACK_OVERFLOW(cfp,
len);
2697 for (i = 0; i <
len; i++) {
2698 *cfp->sp++ = ptr[i];
2710 const VALUE *
const passed_keywords = vm_ci_kwarg(ci)->keywords;
2711 const int kw_len = vm_ci_kwarg(ci)->keyword_len;
2712 const VALUE h = rb_hash_new_with_size(kw_len);
2713 VALUE *sp = cfp->sp;
2716 for (i=0; i<kw_len; i++) {
2717 rb_hash_aset(h, passed_keywords[i], (sp - kw_len)[i]);
2721 cfp->sp -= kw_len - 1;
2722 calling->argc -= kw_len - 1;
2723 calling->kw_splat = 1;
2727vm_caller_setup_keyword_hash(
const struct rb_callinfo *ci,
VALUE keyword_hash)
2730 if (keyword_hash !=
Qnil) {
2732 keyword_hash = rb_hash_dup(rb_to_hash_type(keyword_hash));
2735 else if (!IS_ARGS_KW_SPLAT_MUT(ci) && !
RHASH_EMPTY_P(keyword_hash)) {
2741 keyword_hash = rb_hash_dup(keyword_hash);
2743 return keyword_hash;
2749 const struct rb_callinfo *restrict ci,
int max_args)
2751 if (UNLIKELY(IS_ARGS_SPLAT(ci))) {
2752 if (IS_ARGS_KW_SPLAT(ci)) {
2754 VM_ASSERT(calling->kw_splat == 1);
2758 VALUE ary = cfp->sp[0];
2759 VALUE kwh = vm_caller_setup_keyword_hash(ci, cfp->sp[1]);
2762 if (vm_caller_setup_arg_splat(cfp, calling, ary, max_args))
return;
2766 if (UNLIKELY(calling->heap_argv)) {
2768 ((
struct RHash *)kwh)->basic.flags |= RHASH_PASS_AS_KEYWORDS;
2769 if (max_args != ALLOW_HEAP_ARGV_KEEP_KWSPLAT) {
2770 calling->kw_splat = 0;
2778 VM_ASSERT(calling->kw_splat == 1);
2782 calling->kw_splat = 0;
2787 VM_ASSERT(calling->kw_splat == 0);
2791 VALUE ary = cfp->sp[0];
2793 if (vm_caller_setup_arg_splat(cfp, calling, ary, max_args)) {
2798 VALUE last_hash, argv_ary;
2799 if (UNLIKELY(argv_ary = calling->heap_argv)) {
2800 if (!IS_ARGS_KEYWORD(ci) &&
2803 (((
struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS)) {
2808 calling->kw_splat = 1;
2814 if (!IS_ARGS_KEYWORD(ci) &&
2815 calling->argc > 0 &&
2817 (((
struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS)) {
2824 cfp->sp[-1] = rb_hash_dup(last_hash);
2825 calling->kw_splat = 1;
2831 else if (UNLIKELY(IS_ARGS_KW_SPLAT(ci))) {
2833 VM_ASSERT(calling->kw_splat == 1);
2834 VALUE kwh = vm_caller_setup_keyword_hash(ci, cfp->sp[-1]);
2839 calling->kw_splat = 0;
2845 else if (UNLIKELY(IS_ARGS_KEYWORD(ci))) {
2847 VM_ASSERT(calling->kw_splat == 0);
2853 vm_caller_setup_arg_kw(cfp, calling, ci);
2857#define USE_OPT_HIST 0
2860#define OPT_HIST_MAX 64
2861static int opt_hist[OPT_HIST_MAX+1];
2865opt_hist_show_results_at_exit(
void)
2867 for (
int i=0; i<OPT_HIST_MAX; i++) {
2868 ruby_debug_printf(
"opt_hist\t%d\t%d\n", i, opt_hist[i]);
2878 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2879 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2880 const int opt = calling->argc - lead_num;
2881 const int opt_num = ISEQ_BODY(iseq)->param.opt_num;
2882 const int opt_pc = (int)ISEQ_BODY(iseq)->param.opt_table[opt];
2883 const int param = ISEQ_BODY(iseq)->param.size;
2884 const int local = ISEQ_BODY(iseq)->local_table_size;
2885 const int delta = opt_num - opt;
2887 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
2890 if (opt_pc < OPT_HIST_MAX) {
2894 opt_hist[OPT_HIST_MAX]++;
2898 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param - delta, local);
2906 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2907 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2908 const int opt = calling->argc - lead_num;
2909 const int opt_pc = (int)ISEQ_BODY(iseq)->param.opt_table[opt];
2911 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
2914 if (opt_pc < OPT_HIST_MAX) {
2918 opt_hist[OPT_HIST_MAX]++;
2922 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
2927 VALUE *
const passed_values,
const int passed_keyword_len,
const VALUE *
const passed_keywords,
2928 VALUE *
const locals);
2935 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2936 int param_size = ISEQ_BODY(iseq)->param.size;
2937 int local_size = ISEQ_BODY(iseq)->local_table_size;
2940 VM_ASSERT(ISEQ_BODY(iseq)->param.flags.forwardable);
2942 local_size = local_size + vm_ci_argc(calling->cd->ci);
2943 param_size = param_size + vm_ci_argc(calling->cd->ci);
2945 cfp->sp[0] = (
VALUE)calling->cd->ci;
2947 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param_size, local_size);
2957 VM_ASSERT(vm_ci_flag(ci) & VM_CALL_KWARG);
2958 RB_DEBUG_COUNTER_INC(ccf_iseq_kw1);
2960 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2961 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
2963 const int ci_kw_len = kw_arg->keyword_len;
2964 const VALUE *
const ci_keywords = kw_arg->keywords;
2965 VALUE *argv = cfp->sp - calling->argc;
2966 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
2967 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2969 MEMCPY(ci_kws, argv + lead_num,
VALUE, ci_kw_len);
2970 args_setup_kw_parameters(ec, iseq, vm_cc_cme(cc), ci_kws, ci_kw_len, ci_keywords, klocals);
2972 int param = ISEQ_BODY(iseq)->param.size;
2973 int local = ISEQ_BODY(iseq)->local_table_size;
2974 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2981 const struct rb_callinfo *MAYBE_UNUSED(ci) = calling->cd->ci;
2984 VM_ASSERT((vm_ci_flag(ci) & VM_CALL_KWARG) == 0);
2985 RB_DEBUG_COUNTER_INC(ccf_iseq_kw2);
2987 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2988 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
2989 VALUE *
const argv = cfp->sp - calling->argc;
2990 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
2993 for (i=0; i<kw_param->num; i++) {
2994 klocals[i] = kw_param->default_values[i];
3001 int param = ISEQ_BODY(iseq)->param.size;
3002 int local = ISEQ_BODY(iseq)->local_table_size;
3003 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
3013 cfp->sp -= (calling->argc + 1);
3014 rb_insn_func_t func_ptr = (rb_insn_func_t)(uintptr_t)bf->func_ptr;
3015 return builtin_invoker0(ec, calling->recv, NULL, func_ptr);
3024 set_table *dup_check_table = vm->unused_block_warning_table;
3034 .v = (
VALUE)cme->def,
3038 if (!strict_unused_block) {
3039 key = (st_data_t)cme->def->original_id;
3041 if (set_table_lookup(dup_check_table, key)) {
3051 key |= (st_data_t)(k1.b[i] ^ k2.b[
SIZEOF_VALUE-1-i]) << (8 * i);
3056 fprintf(stderr,
"pc:%p def:%p\n", pc, (
void *)cme->def);
3057 fprintf(stderr,
"key:%p\n", (
void *)key);
3061 if (set_insert(dup_check_table, key)) {
3066 VALUE name = rb_gen_method_name(cme->defined_class, ISEQ_BODY(iseq)->location.base_label);
3068 if (!
NIL_P(m_loc)) {
3069 rb_warn(
"the block passed to '%"PRIsVALUE
"' defined at %"PRIsVALUE
":%"PRIsVALUE
" may be ignored",
3073 rb_warn(
"the block may be ignored because '%"PRIsVALUE
"' does not use a block", name);
3080 const rb_iseq_t *iseq,
VALUE *argv,
int param_size,
int local_size)
3085 VM_ASSERT((vm_ci_argc(ci), 1));
3086 VM_ASSERT(vm_cc_cme(cc) != NULL);
3088 if (UNLIKELY(!ISEQ_BODY(iseq)->param.flags.use_block &&
3089 calling->block_handler != VM_BLOCK_HANDLER_NONE &&
3090 !(vm_ci_flag(calling->cd->ci) & (VM_CALL_OPT_SEND | VM_CALL_SUPER)))) {
3091 warn_unused_block(vm_cc_cme(cc), iseq, (
void *)ec->cfp->pc);
3094 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_KW_SPLAT))) {
3095 if (LIKELY(rb_simple_iseq_p(iseq))) {
3097 int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3098 CALLER_SETUP_ARG(cfp, calling, ci, lead_num);
3100 if (calling->argc != lead_num) {
3101 argument_arity_error(ec, iseq, vm_cc_cme(cc), calling->argc, lead_num, lead_num);
3105 VM_ASSERT(cc == calling->cc);
3107 if (vm_call_iseq_optimizable_p(ci, cc)) {
3108 if ((iseq->body->builtin_attrs & BUILTIN_ATTR_SINGLE_NOARG_LEAF) &&
3110 VM_ASSERT(iseq->body->builtin_attrs & BUILTIN_ATTR_LEAF);
3111 vm_cc_bf_set(cc, (
void *)iseq->body->iseq_encoded[1]);
3112 CC_SET_FASTPATH(cc, vm_call_single_noarg_leaf_builtin,
true);
3115 CC_SET_FASTPATH(cc, vm_call_iseq_setup_func(ci, param_size, local_size),
true);
3120 else if (rb_iseq_only_optparam_p(iseq)) {
3123 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3124 const int opt_num = ISEQ_BODY(iseq)->param.opt_num;
3126 CALLER_SETUP_ARG(cfp, calling, ci, lead_num + opt_num);
3127 const int argc = calling->argc;
3128 const int opt = argc - lead_num;
3130 if (opt < 0 || opt > opt_num) {
3131 argument_arity_error(ec, iseq, vm_cc_cme(cc), argc, lead_num, lead_num + opt_num);
3134 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
3135 CC_SET_FASTPATH(cc, vm_call_iseq_setup_normal_opt_start,
3136 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
3137 vm_call_cacheable(ci, cc));
3140 CC_SET_FASTPATH(cc, vm_call_iseq_setup_tailcall_opt_start,
3141 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
3142 vm_call_cacheable(ci, cc));
3146 VM_ASSERT((
int)ISEQ_BODY(iseq)->param.size == lead_num + opt_num);
3147 for (
int i=argc; i<lead_num + opt_num; i++) {
3150 return (
int)ISEQ_BODY(iseq)->param.opt_table[opt];
3152 else if (rb_iseq_only_kwparam_p(iseq) && !IS_ARGS_SPLAT(ci)) {
3153 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3154 const int argc = calling->argc;
3155 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
3157 if (vm_ci_flag(ci) & VM_CALL_KWARG) {
3160 if (argc - kw_arg->keyword_len == lead_num) {
3161 const int ci_kw_len = kw_arg->keyword_len;
3162 const VALUE *
const ci_keywords = kw_arg->keywords;
3164 MEMCPY(ci_kws, argv + lead_num,
VALUE, ci_kw_len);
3166 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3167 args_setup_kw_parameters(ec, iseq, vm_cc_cme(cc), ci_kws, ci_kw_len, ci_keywords, klocals);
3169 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_kwarg,
3170 vm_call_cacheable(ci, cc));
3175 else if (argc == lead_num) {
3177 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3178 args_setup_kw_parameters(ec, iseq, vm_cc_cme(cc), NULL, 0, NULL, klocals);
3180 if (klocals[kw_param->num] ==
INT2FIX(0)) {
3182 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_nokwarg,
3183 vm_call_cacheable(ci, cc));
3209 if (ISEQ_BODY(iseq)->param.flags.forwardable) {
3210 bool can_fastpath =
true;
3212 if ((vm_ci_flag(ci) & VM_CALL_FORWARDING)) {
3214 if (vm_ci_argc(ci) != vm_ci_argc(forward_cd->caller_ci)) {
3215 ci = vm_ci_new_runtime(
3222 ci = forward_cd->caller_ci;
3224 can_fastpath =
false;
3228 if (!vm_ci_markable(ci)) {
3229 ci = vm_ci_new_runtime(
3234 can_fastpath =
false;
3236 argv[param_size - 1] = (
VALUE)ci;
3237 CC_SET_FASTPATH(cc, vm_call_iseq_forwardable, can_fastpath);
3241 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_method);
3268 const VALUE * lep = VM_CF_LEP(cfp);
3274 if (VM_ENV_FLAGS(lep, VM_ENV_FLAG_ESCAPED)) {
3279 iseq = rb_vm_search_cf_from_ep(ec, cfp, lep)->iseq;
3283 int local_size = ISEQ_BODY(iseq)->local_table_size + argc;
3285 const VALUE * from = lep - (local_size + VM_ENV_DATA_SIZE - 1);
3286 VALUE * to = cfp->sp - 1;
3290 CHECK_VM_STACK_OVERFLOW0(cfp, to,
RARRAY_LEN(splat));
3295 CHECK_VM_STACK_OVERFLOW0(cfp, to, argc);
3297 cfp->sp = to + argc;
3316 RB_DEBUG_COUNTER_INC(ccf_iseq_setup);
3319 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3320 int param_size = ISEQ_BODY(iseq)->param.size;
3321 int local_size = ISEQ_BODY(iseq)->local_table_size;
3323 RUBY_ASSERT(!ISEQ_BODY(iseq)->param.flags.forwardable);
3325 const int opt_pc = vm_callee_setup_arg(ec, calling, iseq, cfp->sp - calling->argc, param_size, local_size);
3326 return vm_call_iseq_setup_2(ec, cfp, calling, opt_pc, param_size, local_size);
3332 RB_DEBUG_COUNTER_INC(ccf_iseq_setup);
3335 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3336 int param_size = ISEQ_BODY(iseq)->param.size;
3337 int local_size = ISEQ_BODY(iseq)->local_table_size;
3339 RUBY_ASSERT(ISEQ_BODY(iseq)->param.flags.forwardable);
3342 local_size = local_size + vm_ci_argc(calling->cd->ci);
3343 param_size = param_size + vm_ci_argc(calling->cd->ci);
3345 const int opt_pc = vm_callee_setup_arg(ec, calling, iseq, cfp->sp - calling->argc, param_size, local_size);
3346 return vm_call_iseq_setup_2(ec, cfp, calling, opt_pc, param_size, local_size);
3351 int opt_pc,
int param_size,
int local_size)
3356 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
3357 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param_size, local_size);
3360 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
3366 int opt_pc,
int param_size,
int local_size)
3368 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
3369 VALUE *argv = cfp->sp - calling->argc;
3370 VALUE *sp = argv + param_size;
3371 cfp->sp = argv - 1 ;
3373 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL, calling->recv,
3374 calling->block_handler, (
VALUE)me,
3375 ISEQ_BODY(iseq)->iseq_encoded + opt_pc, sp,
3376 local_size - param_size,
3377 ISEQ_BODY(iseq)->stack_max);
3386 VALUE *argv = cfp->sp - calling->argc;
3388 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
3389 VALUE *src_argv = argv;
3390 VALUE *sp_orig, *sp;
3391 VALUE finish_flag = VM_FRAME_FINISHED_P(cfp) ? VM_FRAME_FLAG_FINISH : 0;
3393 if (VM_BH_FROM_CFP_P(calling->block_handler, cfp)) {
3394 struct rb_captured_block *dst_captured = VM_CFP_TO_CAPTURED_BLOCK(RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp));
3395 const struct rb_captured_block *src_captured = VM_BH_TO_CAPT_BLOCK(calling->block_handler);
3396 dst_captured->code.val = src_captured->code.val;
3397 if (VM_BH_ISEQ_BLOCK_P(calling->block_handler)) {
3398 calling->block_handler = VM_BH_FROM_ISEQ_BLOCK(dst_captured);
3401 calling->block_handler = VM_BH_FROM_IFUNC_BLOCK(dst_captured);
3405 vm_pop_frame(ec, cfp, cfp->ep);
3408 sp_orig = sp = cfp->sp;
3411 sp[0] = calling->recv;
3415 for (i=0; i < ISEQ_BODY(iseq)->param.size; i++) {
3416 *sp++ = src_argv[i];
3419 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL | finish_flag,
3420 calling->recv, calling->block_handler, (
VALUE)me,
3421 ISEQ_BODY(iseq)->iseq_encoded + opt_pc, sp,
3422 ISEQ_BODY(iseq)->local_table_size - ISEQ_BODY(iseq)->param.size,
3423 ISEQ_BODY(iseq)->stack_max);
3431ractor_unsafe_check(
void)
3433 if (!rb_ractor_main_p()) {
3434 rb_raise(rb_eRactorUnsafeError,
"ractor unsafe method called from not main ractor");
3441 ractor_unsafe_check();
3449 ractor_unsafe_check();
3451 return (*f)(argc, argv, recv);
3457 ractor_unsafe_check();
3465 ractor_unsafe_check();
3467 return (*f)(recv, argv[0]);
3473 ractor_unsafe_check();
3475 return (*f)(recv, argv[0], argv[1]);
3481 ractor_unsafe_check();
3483 return (*f)(recv, argv[0], argv[1], argv[2]);
3489 ractor_unsafe_check();
3491 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
3497 ractor_unsafe_check();
3498 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3499 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
3505 ractor_unsafe_check();
3506 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3507 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
3513 ractor_unsafe_check();
3514 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3515 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
3521 ractor_unsafe_check();
3522 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3523 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
3529 ractor_unsafe_check();
3530 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3531 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
3537 ractor_unsafe_check();
3538 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3539 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
3545 ractor_unsafe_check();
3546 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3547 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
3553 ractor_unsafe_check();
3554 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3555 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
3561 ractor_unsafe_check();
3562 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3563 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
3569 ractor_unsafe_check();
3570 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3571 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
3577 ractor_unsafe_check();
3578 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3579 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
3593 return (*f)(argc, argv, recv);
3607 return (*f)(recv, argv[0]);
3614 return (*f)(recv, argv[0], argv[1]);
3621 return (*f)(recv, argv[0], argv[1], argv[2]);
3628 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
3634 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3635 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
3641 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3642 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
3648 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3649 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
3655 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3656 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
3662 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3663 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
3669 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3670 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
3676 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3677 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
3683 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3684 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
3690 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3691 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
3697 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3698 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
3704 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3705 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
3711 const int ov_flags = RAISED_STACKOVERFLOW;
3712 if (LIKELY(reg_cfp == ec->cfp + 1))
return TRUE;
3713 if (rb_ec_raised_p(ec, ov_flags)) {
3714 rb_ec_raised_reset(ec, ov_flags);
3720#define CHECK_CFP_CONSISTENCY(func) \
3721 (LIKELY(vm_cfp_consistent_p(ec, reg_cfp)) ? (void)0 : \
3722 rb_bug(func ": cfp consistency error (%p, %p)", (void *)reg_cfp, (void *)(ec->cfp+1)))
3728#if VM_DEBUG_VERIFY_METHOD_CACHE
3729 switch (me->def->type) {
3730 case VM_METHOD_TYPE_CFUNC:
3731 case VM_METHOD_TYPE_NOTIMPLEMENTED:
3733# define METHOD_BUG(t) case VM_METHOD_TYPE_##t: rb_bug("wrong method type: " #t)
3735 METHOD_BUG(ATTRSET);
3737 METHOD_BUG(BMETHOD);
3740 METHOD_BUG(OPTIMIZED);
3741 METHOD_BUG(MISSING);
3742 METHOD_BUG(REFINED);
3746 rb_bug(
"wrong method type: %d", me->def->type);
3749 return UNALIGNED_MEMBER_PTR(me->def, body.cfunc);
3756 RB_DEBUG_COUNTER_INC(ccf_cfunc_with_frame);
3763 VALUE recv = calling->recv;
3764 VALUE block_handler = calling->block_handler;
3765 VALUE frame_type = VM_FRAME_MAGIC_CFUNC | VM_FRAME_FLAG_CFRAME | VM_ENV_FLAG_LOCAL;
3767 if (UNLIKELY(calling->kw_splat)) {
3768 frame_type |= VM_FRAME_FLAG_CFRAME_KW;
3771 VM_ASSERT(reg_cfp == ec->cfp);
3773 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, me->owner, me->def->original_id);
3776 vm_push_frame(ec, NULL, frame_type, recv,
3777 block_handler, (
VALUE)me,
3778 0, ec->cfp->sp, 0, 0);
3780 int len = cfunc->argc;
3783 reg_cfp->sp = stack_bottom;
3784 val = (*cfunc->invoker)(recv, argc, argv, cfunc->func);
3786 CHECK_CFP_CONSISTENCY(
"vm_call_cfunc");
3788 rb_vm_pop_frame(ec);
3790 VM_ASSERT(ec->cfp->sp == stack_bottom);
3792 EXEC_EVENT_HOOK(ec,
RUBY_EVENT_C_RETURN, recv, me->def->original_id, vm_ci_mid(ci), me->owner, val);
3793 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, me->owner, me->def->original_id);
3803 VM_ASSERT(cme->def->type == VM_METHOD_TYPE_CFUNC);
3805 VALUE *sp = ec->cfp->sp;
3806 VALUE recv = *(sp - recv_idx - 1);
3807 VALUE frame_type = VM_FRAME_MAGIC_CFUNC | VM_FRAME_FLAG_CFRAME | VM_ENV_FLAG_LOCAL;
3808 VALUE block_handler = VM_BLOCK_HANDLER_NONE;
3809#if VM_CHECK_MODE > 0
3811 *(GET_EC()->cfp->sp) =
Qfalse;
3813 vm_push_frame(ec, NULL, frame_type, recv, block_handler, (
VALUE)cme, 0, ec->cfp->sp, 0, 0);
3818rb_splat_or_kwargs_p(
const struct rb_callinfo *restrict ci)
3820 return IS_ARGS_SPLAT(ci) || IS_ARGS_KW_OR_KW_SPLAT(ci);
3826 int argc = calling->argc;
3827 VALUE *stack_bottom = reg_cfp->sp - argc - 1;
3828 VALUE *argv = &stack_bottom[1];
3830 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, argv, stack_bottom);
3837 RB_DEBUG_COUNTER_INC(ccf_cfunc_other);
3839 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV_KEEP_KWSPLAT);
3841 if (UNLIKELY(argv_ary = calling->heap_argv)) {
3842 VM_ASSERT(!IS_ARGS_KEYWORD(ci));
3845 VALUE *stack_bottom = reg_cfp->sp - 2;
3847 VM_ASSERT(calling->argc == 1);
3851 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, argv, stack_bottom);
3854 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_with_frame, !rb_splat_or_kwargs_p(ci) && !calling->kw_splat && !(vm_ci_flag(ci) & VM_CALL_FORWARDING));
3856 return vm_call_cfunc_with_frame(ec, reg_cfp, calling);
3863 VALUE argv_ary = reg_cfp->sp[-1 - stack_offset];
3866 if (UNLIKELY(argc > VM_ARGC_STACK_MAX)) {
3867 return vm_call_cfunc_other(ec, reg_cfp, calling);
3871 calling->kw_splat = 0;
3873 VALUE *stack_bottom = reg_cfp->sp - 2 - stack_offset;
3874 VALUE *sp = stack_bottom;
3875 CHECK_VM_STACK_OVERFLOW(reg_cfp, argc);
3876 for(i = 0; i < argc; i++) {
3881 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, stack_bottom+1, stack_bottom);
3887 RB_DEBUG_COUNTER_INC(ccf_cfunc_only_splat);
3888 VALUE argv_ary = reg_cfp->sp[-1];
3892 int argc_offset = 0;
3894 if (UNLIKELY(argc > 0 &&
3896 (((
struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS))) {
3898 return vm_call_cfunc_other(ec, reg_cfp, calling);
3902 return vm_call_cfunc_array_argv(ec, reg_cfp, calling, 0, argc_offset);
3908 RB_DEBUG_COUNTER_INC(ccf_cfunc_only_splat_kw);
3909 VALUE keyword_hash = reg_cfp->sp[-1];
3912 return vm_call_cfunc_array_argv(ec, reg_cfp, calling, 1, 0);
3915 return vm_call_cfunc_other(ec, reg_cfp, calling);
3922 RB_DEBUG_COUNTER_INC(ccf_cfunc);
3924 if (IS_ARGS_SPLAT(ci) && !(vm_ci_flag(ci) & VM_CALL_FORWARDING)) {
3925 if (!IS_ARGS_KW_SPLAT(ci) && vm_ci_argc(ci) == 1) {
3927 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_only_splat, TRUE);
3928 return vm_call_cfunc_only_splat(ec, reg_cfp, calling);
3930 if (IS_ARGS_KW_SPLAT(ci) && vm_ci_argc(ci) == 2) {
3932 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_only_splat_kw, TRUE);
3933 return vm_call_cfunc_only_splat_kw(ec, reg_cfp, calling);
3937 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_other, TRUE);
3938 return vm_call_cfunc_other(ec, reg_cfp, calling);
3945 RB_DEBUG_COUNTER_INC(ccf_ivar);
3947 VALUE ivar = vm_getivar(calling->recv, vm_cc_cme(cc)->def->body.attr.id, NULL, NULL, cc, TRUE,
Qnil);
3954 RB_DEBUG_COUNTER_INC(ccf_attrset);
3955 VALUE val = *(cfp->sp - 1);
3958 shape_id_t dest_shape_id;
3959 vm_cc_atomic_shape_and_index(cc, &dest_shape_id, &index);
3960 ID id = vm_cc_cme(cc)->def->body.attr.id;
3961 rb_check_frozen(obj);
3962 VALUE res = vm_setivar(obj,
id, val, dest_shape_id, index);
3971 res = vm_setivar_default(obj,
id, val, dest_shape_id, index);
3972 if (!UNDEF_P(res)) {
3977 res = vm_setivar_slowpath_attr(obj,
id, val, cc);
3985 return vm_call_attrset_direct(ec, cfp, calling->cc, calling->recv);
3995 VALUE procv = cme->def->body.bmethod.proc;
3998 cme->def->body.bmethod.defined_ractor != rb_ractor_self(rb_ec_ractor_ptr(ec))) {
3999 rb_raise(
rb_eRuntimeError,
"defined with an un-shareable Proc in a different Ractor");
4003 GetProcPtr(procv, proc);
4004 val = vm_invoke_bmethod(ec, proc, calling->recv, CALLING_ARGC(calling), argv, calling->kw_splat, calling->block_handler, vm_cc_cme(cc));
4014 RB_DEBUG_COUNTER_INC(ccf_iseq_bmethod);
4018 VALUE procv = cme->def->body.bmethod.proc;
4021 cme->def->body.bmethod.defined_ractor != rb_ractor_self(rb_ec_ractor_ptr(ec))) {
4022 rb_raise(
rb_eRuntimeError,
"defined with an un-shareable Proc in a different Ractor");
4026 GetProcPtr(procv, proc);
4027 const struct rb_block *block = &proc->block;
4029 while (vm_block_type(block) == block_type_proc) {
4030 block = vm_proc_block(block->as.proc);
4032 VM_ASSERT(vm_block_type(block) == block_type_iseq);
4035 const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
4036 VALUE *
const argv = cfp->sp - calling->argc;
4037 const int arg_size = ISEQ_BODY(iseq)->param.size;
4040 if (vm_ci_flag(calling->cd->ci) & VM_CALL_ARGS_SIMPLE) {
4041 opt_pc = vm_callee_setup_block_arg(ec, calling, calling->cd->ci, iseq, argv, arg_setup_method);
4044 opt_pc = setup_parameters_complex(ec, iseq, calling, calling->cd->ci, argv, arg_setup_method);
4049 vm_push_frame(ec, iseq,
4050 VM_FRAME_MAGIC_BLOCK | VM_FRAME_FLAG_BMETHOD | VM_FRAME_FLAG_LAMBDA,
4052 VM_GUARDED_PREV_EP(captured->ep),
4054 ISEQ_BODY(iseq)->iseq_encoded + opt_pc,
4056 ISEQ_BODY(iseq)->local_table_size - arg_size,
4057 ISEQ_BODY(iseq)->stack_max);
4065 RB_DEBUG_COUNTER_INC(ccf_noniseq_bmethod);
4069 CALLER_SETUP_ARG(cfp, calling, calling->cd->ci, ALLOW_HEAP_ARGV);
4070 if (UNLIKELY(calling->heap_argv)) {
4075 argc = calling->argc;
4078 cfp->sp += - argc - 1;
4081 return vm_call_bmethod_body(ec, calling, argv);
4087 RB_DEBUG_COUNTER_INC(ccf_bmethod);
4091 VALUE procv = cme->def->body.bmethod.proc;
4093 GetProcPtr(procv, proc);
4094 const struct rb_block *block = &proc->block;
4096 while (vm_block_type(block) == block_type_proc) {
4097 block = vm_proc_block(block->as.proc);
4099 if (vm_block_type(block) == block_type_iseq) {
4100 CC_SET_FASTPATH(cc, vm_call_iseq_bmethod, TRUE);
4101 return vm_call_iseq_bmethod(ec, cfp, calling);
4104 CC_SET_FASTPATH(cc, vm_call_noniseq_bmethod, TRUE);
4105 return vm_call_noniseq_bmethod(ec, cfp, calling);
4109rb_find_defined_class_by_owner(
VALUE current_class,
VALUE target_owner)
4111 VALUE klass = current_class;
4119 while (
RTEST(klass)) {
4121 if (owner == target_owner) {
4127 return current_class;
4136 if (orig_me->defined_class == 0) {
4137 VALUE defined_class = rb_find_defined_class_by_owner(me->defined_class, orig_me->owner);
4138 VM_ASSERT_TYPE(orig_me->owner,
T_MODULE);
4139 cme = rb_method_entry_complement_defined_class(orig_me, me->called_id, defined_class);
4141 if (me->def->reference_count == 1) {
4142 RB_OBJ_WRITE(me, &me->def->body.alias.original_me, cme);
4146 rb_method_definition_create(VM_METHOD_TYPE_ALIAS, me->def->original_id);
4154 VM_ASSERT(callable_method_entry_p(cme));
4161 return aliased_callable_method_entry(me);
4167 calling->cc = &VM_CC_ON_STACK(
Qundef,
4170 aliased_callable_method_entry(vm_cc_cme(calling->cc)));
4172 return vm_call_method_each_type(ec, cfp, calling);
4175static enum method_missing_reason
4178 enum method_missing_reason stat = MISSING_NOENTRY;
4179 if (vm_ci_flag(ci) & VM_CALL_VCALL) stat |= MISSING_VCALL;
4180 if (vm_ci_flag(ci) & VM_CALL_FCALL) stat |= MISSING_FCALL;
4181 if (vm_ci_flag(ci) & VM_CALL_SUPER) stat |= MISSING_SUPER;
4191 ASSUME(calling->argc >= 0);
4193 enum method_missing_reason missing_reason = MISSING_NOENTRY;
4194 int argc = calling->argc;
4195 VALUE recv = calling->recv;
4198 flags |= VM_CALL_OPT_SEND;
4200 if (UNLIKELY(! mid)) {
4201 mid = idMethodMissing;
4202 missing_reason = ci_missing_reason(ci);
4203 ec->method_missing_reason = missing_reason;
4206 if (UNLIKELY(argv_ary = calling->heap_argv)) {
4207 if (rb_method_basic_definition_p(klass, idMethodMissing)) {
4208 rb_ary_unshift(argv_ary, symbol);
4211 int priv = vm_ci_flag(ci) & (VM_CALL_FCALL | VM_CALL_VCALL);
4212 VALUE exc = rb_make_no_method_exception(
4234 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
4237 argc = ++calling->argc;
4239 if (rb_method_basic_definition_p(klass, idMethodMissing)) {
4242 int priv = vm_ci_flag(ci) & (VM_CALL_FCALL | VM_CALL_VCALL);
4243 const VALUE *argv = STACK_ADDR_FROM_TOP(argc);
4244 VALUE exc = rb_make_no_method_exception(
4257 .ci = &VM_CI_ON_STACK(mid, flags, argc, vm_ci_kwarg(ci)),
4263 if (!(vm_ci_flag(ci) & VM_CALL_FORWARDING)) {
4264 calling->cd = &new_fcd.cd;
4268 VM_ASSERT((vm_ci_argc(caller_ci), 1));
4269 new_fcd.caller_ci = caller_ci;
4272 calling->cc = &VM_CC_ON_STACK(klass,
4274 { .method_missing_reason = missing_reason },
4275 rb_callable_method_entry_with_refinements(klass, mid, NULL));
4277 if (flags & VM_CALL_FCALL) {
4278 return vm_call_method(ec, reg_cfp, calling);
4282 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
4284 if (vm_cc_cme(cc) != NULL) {
4285 switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
4286 case METHOD_VISI_PUBLIC:
4287 return vm_call_method_each_type(ec, reg_cfp, calling);
4288 case METHOD_VISI_PRIVATE:
4289 vm_cc_method_missing_reason_set(cc, MISSING_PRIVATE);
4291 case METHOD_VISI_PROTECTED:
4292 vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
4295 VM_UNREACHABLE(vm_call_method);
4297 return vm_call_method_missing(ec, reg_cfp, calling);
4300 return vm_call_method_nome(ec, reg_cfp, calling);
4310 i = calling->argc - 1;
4312 if (calling->argc == 0) {
4313 rb_raise(rb_eArgError,
"no method name given");
4337 return vm_call_symbol(ec, reg_cfp, calling, ci, sym, flags);
4343 RB_DEBUG_COUNTER_INC(ccf_opt_send_complex);
4345 int flags = VM_CALL_FCALL;
4349 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV);
4350 if (UNLIKELY(argv_ary = calling->heap_argv)) {
4352 flags |= VM_CALL_ARGS_SPLAT;
4353 if (calling->kw_splat) {
4354 VALUE last_hash = rb_ary_last(0, NULL, argv_ary);
4355 ((
struct RHash *)last_hash)->basic.flags |= RHASH_PASS_AS_KEYWORDS;
4356 calling->kw_splat = 0;
4358 return vm_call_symbol(ec, reg_cfp, calling, ci, sym, flags);
4361 if (calling->kw_splat) flags |= VM_CALL_KW_SPLAT;
4362 return vm_call_opt_send0(ec, reg_cfp, calling, flags);
4368 RB_DEBUG_COUNTER_INC(ccf_opt_send_simple);
4369 return vm_call_opt_send0(ec, reg_cfp, calling, vm_ci_flag(calling->cd->ci) | VM_CALL_FCALL);
4375 RB_DEBUG_COUNTER_INC(ccf_opt_send);
4378 int flags = vm_ci_flag(ci);
4380 if (UNLIKELY((flags & VM_CALL_FORWARDING) || (!(flags & VM_CALL_ARGS_SIMPLE) &&
4381 ((calling->argc == 1 && (flags & (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT))) ||
4382 (calling->argc == 2 && (flags & VM_CALL_ARGS_SPLAT) && (flags & VM_CALL_KW_SPLAT)) ||
4383 ((flags & VM_CALL_KWARG) && (vm_ci_kwarg(ci)->keyword_len == calling->argc)))))) {
4384 CC_SET_FASTPATH(calling->cc, vm_call_opt_send_complex, TRUE);
4385 return vm_call_opt_send_complex(ec, reg_cfp, calling);
4388 CC_SET_FASTPATH(calling->cc, vm_call_opt_send_simple, TRUE);
4389 return vm_call_opt_send_simple(ec, reg_cfp, calling);
4394 const struct rb_callinfo *orig_ci,
enum method_missing_reason reason)
4396 RB_DEBUG_COUNTER_INC(ccf_method_missing);
4398 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
4399 unsigned int argc, flag;
4401 flag = VM_CALL_FCALL | VM_CALL_OPT_SEND | vm_ci_flag(orig_ci);
4402 argc = ++calling->argc;
4405 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
4406 vm_check_canary(ec, reg_cfp->sp);
4410 argv[0] =
ID2SYM(vm_ci_mid(orig_ci));
4413 ec->method_missing_reason = reason;
4417 .ci = &VM_CI_ON_STACK(idMethodMissing, flag, argc, vm_ci_kwarg(orig_ci)),
4423 if (!(flag & VM_CALL_FORWARDING)) {
4424 calling->cd = &new_fcd.cd;
4428 VM_ASSERT((vm_ci_argc(caller_ci), 1));
4429 new_fcd.caller_ci = caller_ci;
4433 calling->cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }},
4434 rb_callable_method_entry_without_refinements(
CLASS_OF(calling->recv), idMethodMissing, NULL));
4435 return vm_call_method(ec, reg_cfp, calling);
4441 return vm_call_method_missing_body(ec, reg_cfp, calling, calling->cd->ci, vm_cc_cmethod_missing_reason(calling->cc));
4452 return vm_call_method_nome(ec, cfp, calling);
4454 if (cme->def->type == VM_METHOD_TYPE_REFINED &&
4455 cme->def->body.refined.orig_me) {
4456 cme = refined_method_callable_without_refinement(cme);
4459 calling->cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }}, cme);
4461 return vm_call_method_each_type(ec, cfp, calling);
4465find_refinement(
VALUE refinements,
VALUE klass)
4467 if (
NIL_P(refinements)) {
4470 return rb_hash_lookup(refinements, klass);
4479 if (cfp->iseq && ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_BLOCK) {
4480 const rb_iseq_t *local_iseq = ISEQ_BODY(cfp->iseq)->local_iseq;
4483 cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
4484 if (RUBY_VM_CONTROL_FRAME_STACK_OVERFLOW_P(ec, cfp)) {
4488 }
while (cfp->iseq != local_iseq);
4499 if (orig_me->defined_class == 0) {
4507 VM_ASSERT(callable_method_entry_p(cme));
4509 if (UNDEFINED_METHOD_ENTRY_P(cme)) {
4519 ID mid = vm_ci_mid(calling->cd->ci);
4520 const rb_cref_t *cref = vm_get_cref(cfp->ep);
4524 for (; cref; cref = CREF_NEXT(cref)) {
4525 const VALUE refinement = find_refinement(CREF_REFINEMENTS(cref), vm_cc_cme(cc)->owner);
4526 if (
NIL_P(refinement))
continue;
4529 rb_callable_method_entry(refinement, mid);
4532 if (vm_cc_call(cc) == vm_call_super_method) {
4535 if (top_me && rb_method_definition_eq(ref_me->def, top_me->def)) {
4540 if (cme->def->type != VM_METHOD_TYPE_REFINED ||
4541 cme->def != ref_me->def) {
4544 if (ref_me->def->type != VM_METHOD_TYPE_REFINED) {
4553 if (vm_cc_cme(cc)->def->body.refined.orig_me) {
4554 return refined_method_callable_without_refinement(vm_cc_cme(cc));
4569 if (calling->cd->cc) {
4570 const struct rb_callcache *cc = calling->cc = vm_cc_new(vm_cc_cme(calling->cc)->defined_class, ref_cme, vm_call_general, cc_type_refinement);
4572 return vm_call_method(ec, cfp, calling);
4575 struct rb_callcache *ref_cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }}, ref_cme);
4576 calling->cc= ref_cc;
4577 return vm_call_method(ec, cfp, calling);
4581 return vm_call_method_nome(ec, cfp, calling);
4587NOINLINE(
static VALUE
4595 int argc = calling->argc;
4598 if (argc > 0)
MEMMOVE(&TOPN(argc), &TOPN(argc-1),
VALUE, argc);
4601 return vm_invoke_block(ec, reg_cfp, calling, ci,
false, block_handler);
4607 RB_DEBUG_COUNTER_INC(ccf_opt_call);
4610 VALUE procval = calling->recv;
4611 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, VM_BH_FROM_PROC(procval));
4617 RB_DEBUG_COUNTER_INC(ccf_opt_block_call);
4619 VALUE block_handler = VM_ENV_BLOCK_HANDLER(VM_CF_LEP(reg_cfp));
4622 if (BASIC_OP_UNREDEFINED_P(BOP_CALL, PROC_REDEFINED_OP_FLAG)) {
4623 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, block_handler);
4626 calling->recv = rb_vm_bh_to_procval(ec, block_handler);
4627 calling->cc = rb_vm_search_method_slowpath(ci,
CLASS_OF(calling->recv));
4628 return vm_call_general(ec, reg_cfp, calling);
4635 VALUE recv = calling->recv;
4638 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
4639 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_AREF);
4641 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
4642 return internal_RSTRUCT_GET(recv,
off);
4648 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aref);
4650 VALUE ret = vm_call_opt_struct_aref0(ec, calling);
4658 VALUE recv = calling->recv;
4661 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
4662 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_ASET);
4664 rb_check_frozen(recv);
4666 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
4667 internal_RSTRUCT_SET(recv,
off, val);
4675 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aset);
4677 VALUE ret = vm_call_opt_struct_aset0(ec, calling, *(reg_cfp->sp - 1));
4685#define VM_CALL_METHOD_ATTR(var, func, nohook) \
4686 if (UNLIKELY(ruby_vm_event_flags & (RUBY_EVENT_C_CALL | RUBY_EVENT_C_RETURN))) { \
4687 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_CALL, calling->recv, vm_cc_cme(cc)->def->original_id, \
4688 vm_ci_mid(ci), vm_cc_cme(cc)->owner, Qundef); \
4690 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_RETURN, calling->recv, vm_cc_cme(cc)->def->original_id, \
4691 vm_ci_mid(ci), vm_cc_cme(cc)->owner, (var)); \
4702 switch (vm_cc_cme(cc)->def->body.optimized.type) {
4703 case OPTIMIZED_METHOD_TYPE_SEND:
4704 CC_SET_FASTPATH(cc, vm_call_opt_send, TRUE);
4705 return vm_call_opt_send(ec, cfp, calling);
4706 case OPTIMIZED_METHOD_TYPE_CALL:
4707 CC_SET_FASTPATH(cc, vm_call_opt_call, TRUE);
4708 return vm_call_opt_call(ec, cfp, calling);
4709 case OPTIMIZED_METHOD_TYPE_BLOCK_CALL:
4710 CC_SET_FASTPATH(cc, vm_call_opt_block_call, TRUE);
4711 return vm_call_opt_block_call(ec, cfp, calling);
4712 case OPTIMIZED_METHOD_TYPE_STRUCT_AREF: {
4713 CALLER_SETUP_ARG(cfp, calling, ci, 0);
4717 VM_CALL_METHOD_ATTR(v,
4718 vm_call_opt_struct_aref(ec, cfp, calling),
4719 set_vm_cc_ivar(cc); \
4720 CC_SET_FASTPATH(cc, vm_call_opt_struct_aref, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE)))
4723 case OPTIMIZED_METHOD_TYPE_STRUCT_ASET: {
4724 CALLER_SETUP_ARG(cfp, calling, ci, 1);
4728 VM_CALL_METHOD_ATTR(v,
4729 vm_call_opt_struct_aset(ec, cfp, calling),
4730 set_vm_cc_ivar(cc); \
4731 CC_SET_FASTPATH(cc, vm_call_opt_struct_aset, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE)))
4735 rb_bug(
"vm_call_method: unsupported optimized method type (%d)", vm_cc_cme(cc)->def->body.optimized.type);
4747 VM_ASSERT(! METHOD_ENTRY_INVALIDATED(cme));
4749 switch (cme->def->type) {
4750 case VM_METHOD_TYPE_ISEQ:
4751 if (ISEQ_BODY(def_iseq_ptr(cme->def))->param.flags.forwardable) {
4752 CC_SET_FASTPATH(cc, vm_call_iseq_fwd_setup, TRUE);
4753 return vm_call_iseq_fwd_setup(ec, cfp, calling);
4756 CC_SET_FASTPATH(cc, vm_call_iseq_setup, TRUE);
4757 return vm_call_iseq_setup(ec, cfp, calling);
4760 case VM_METHOD_TYPE_NOTIMPLEMENTED:
4761 case VM_METHOD_TYPE_CFUNC:
4762 CC_SET_FASTPATH(cc, vm_call_cfunc, TRUE);
4763 return vm_call_cfunc(ec, cfp, calling);
4765 case VM_METHOD_TYPE_ATTRSET:
4766 CALLER_SETUP_ARG(cfp, calling, ci, 1);
4770 const unsigned int aset_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT | VM_CALL_KWARG | VM_CALL_FORWARDING);
4772 if (vm_cc_markable(cc)) {
4773 vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
4774 VM_CALL_METHOD_ATTR(v,
4775 vm_call_attrset_direct(ec, cfp, cc, calling->recv),
4776 CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
4782 VM_CALLCACHE_UNMARKABLE |
4783 VM_CALLCACHE_ON_STACK,
4789 .value = vm_pack_shape_and_index(INVALID_SHAPE_ID, ATTR_INDEX_NOT_SET),
4794 VM_CALL_METHOD_ATTR(v,
4795 vm_call_attrset_direct(ec, cfp, cc, calling->recv),
4796 CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
4800 case VM_METHOD_TYPE_IVAR:
4801 CALLER_SETUP_ARG(cfp, calling, ci, 0);
4803 vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
4804 const unsigned int ivar_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT | VM_CALL_FORWARDING);
4805 VM_CALL_METHOD_ATTR(v,
4806 vm_call_ivar(ec, cfp, calling),
4807 CC_SET_FASTPATH(cc, vm_call_ivar, !(vm_ci_flag(ci) & ivar_mask)));
4810 case VM_METHOD_TYPE_MISSING:
4811 vm_cc_method_missing_reason_set(cc, 0);
4812 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
4813 return vm_call_method_missing(ec, cfp, calling);
4815 case VM_METHOD_TYPE_BMETHOD:
4816 CC_SET_FASTPATH(cc, vm_call_bmethod, TRUE);
4817 return vm_call_bmethod(ec, cfp, calling);
4819 case VM_METHOD_TYPE_ALIAS:
4820 CC_SET_FASTPATH(cc, vm_call_alias, TRUE);
4821 return vm_call_alias(ec, cfp, calling);
4823 case VM_METHOD_TYPE_OPTIMIZED:
4824 return vm_call_optimized(ec, cfp, calling, ci, cc);
4826 case VM_METHOD_TYPE_UNDEF:
4829 case VM_METHOD_TYPE_ZSUPER:
4830 return vm_call_zsuper(ec, cfp, calling, RCLASS_ORIGIN(vm_cc_cme(cc)->defined_class));
4832 case VM_METHOD_TYPE_REFINED:
4835 return vm_call_refined(ec, cfp, calling);
4838 rb_bug(
"vm_call_method: unsupported method type (%d)", vm_cc_cme(cc)->def->type);
4848 const int stat = ci_missing_reason(ci);
4850 if (vm_ci_mid(ci) == idMethodMissing) {
4851 if (UNLIKELY(calling->heap_argv)) {
4856 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
4857 vm_raise_method_missing(ec, calling->argc, argv, calling->recv, stat);
4861 return vm_call_method_missing_body(ec, cfp, calling, ci, stat);
4873 VALUE defined_class = me->defined_class;
4874 VALUE refined_class = RCLASS_REFINED_CLASS(defined_class);
4875 return NIL_P(refined_class) ? defined_class : refined_class;
4884 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
4886 if (vm_cc_cme(cc) != NULL) {
4887 switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
4888 case METHOD_VISI_PUBLIC:
4889 return vm_call_method_each_type(ec, cfp, calling);
4891 case METHOD_VISI_PRIVATE:
4892 if (!(vm_ci_flag(ci) & VM_CALL_FCALL)) {
4893 enum method_missing_reason stat = MISSING_PRIVATE;
4894 if (vm_ci_flag(ci) & VM_CALL_VCALL) stat |= MISSING_VCALL;
4896 vm_cc_method_missing_reason_set(cc, stat);
4897 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
4898 return vm_call_method_missing(ec, cfp, calling);
4900 return vm_call_method_each_type(ec, cfp, calling);
4902 case METHOD_VISI_PROTECTED:
4903 if (!(vm_ci_flag(ci) & (VM_CALL_OPT_SEND | VM_CALL_FCALL))) {
4904 VALUE defined_class = vm_defined_class_for_protected_call(vm_cc_cme(cc));
4906 vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
4907 return vm_call_method_missing(ec, cfp, calling);
4911 VM_ASSERT(vm_cc_cme(cc) != NULL);
4914 calling->cc = &cc_on_stack;
4915 return vm_call_method_each_type(ec, cfp, calling);
4918 return vm_call_method_each_type(ec, cfp, calling);
4921 rb_bug(
"unreachable");
4925 return vm_call_method_nome(ec, cfp, calling);
4932 RB_DEBUG_COUNTER_INC(ccf_general);
4933 return vm_call_method(ec, reg_cfp, calling);
4939 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
4940 VM_ASSERT(cc != vm_cc_empty());
4942 *(vm_call_handler *)&cc->call_ = vm_call_general;
4948 RB_DEBUG_COUNTER_INC(ccf_super_method);
4953 if (ec == NULL) rb_bug(
"unreachable");
4956 VM_ASSERT(vm_cc_call(calling->cc) == vm_call_super_method);
4957 return vm_call_method(ec, reg_cfp, calling);
4963vm_search_normal_superclass(
VALUE klass)
4968 klass =
RBASIC(klass)->klass;
4970 klass = RCLASS_ORIGIN(klass);
4974NORETURN(
static void vm_super_outside(
void));
4977vm_super_outside(
void)
4983empty_cc_for_super(
void)
4985 return &vm_empty_cc_for_super;
4991 VALUE current_defined_class;
4998 current_defined_class = vm_defined_class_for_protected_call(me);
5001 reg_cfp->iseq != method_entry_iseqptr(me) &&
5004 RCLASS_INCLUDER(current_defined_class) : current_defined_class;
5008 "self has wrong type to call super in this context: "
5009 "%"PRIsVALUE
" (expected %"PRIsVALUE
")",
5014 if (me->def->type == VM_METHOD_TYPE_BMETHOD && (vm_ci_flag(cd->ci) & VM_CALL_ZSUPER)) {
5016 "implicit argument passing of super from method defined"
5017 " by define_method() is not supported."
5018 " Specify all arguments explicitly.");
5021 ID mid = me->def->original_id;
5023 if (!vm_ci_markable(cd->ci)) {
5024 VM_FORCE_WRITE((
const VALUE *)&cd->ci->mid, (
VALUE)mid);
5028 cd->ci = vm_ci_new_runtime(mid,
5031 vm_ci_kwarg(cd->ci));
5038 VALUE klass = vm_search_normal_superclass(me->defined_class);
5042 cc = vm_cc_new(klass, NULL, vm_call_method_missing, cc_type_super);
5046 cc = vm_search_method_fastpath((
VALUE)reg_cfp->iseq, cd, klass);
5050 if (cached_cme == NULL) {
5052 cd->cc = empty_cc_for_super();
5054 else if (cached_cme->called_id != mid) {
5057 cc = vm_cc_new(klass, cme, vm_call_super_method, cc_type_super);
5061 cd->cc = cc = empty_cc_for_super();
5065 switch (cached_cme->def->type) {
5067 case VM_METHOD_TYPE_REFINED:
5069 case VM_METHOD_TYPE_ATTRSET:
5070 case VM_METHOD_TYPE_IVAR:
5071 vm_cc_call_set(cc, vm_call_super_method);
5079 VM_ASSERT((vm_cc_cme(cc),
true));
5087block_proc_is_lambda(
const VALUE procval)
5092 GetProcPtr(procval, proc);
5093 return proc->is_lambda;
5101block_proc_namespace(
const VALUE procval)
5106 GetProcPtr(procval, proc);
5117 VALUE self,
int argc,
const VALUE *argv,
int kw_splat,
VALUE block_handler,
5120 int is_lambda = FALSE;
5121 VALUE val, arg, blockarg;
5123 const struct vm_ifunc *ifunc = captured->code.ifunc;
5128 else if (argc == 0) {
5135 blockarg = rb_vm_bh_to_procval(ec, block_handler);
5137 frame_flag = VM_FRAME_MAGIC_IFUNC | VM_FRAME_FLAG_CFRAME | (me ? VM_FRAME_FLAG_BMETHOD : 0);
5139 frame_flag |= VM_FRAME_FLAG_CFRAME_KW;
5142 vm_push_frame(ec, (
const rb_iseq_t *)captured->code.ifunc,
5145 VM_GUARDED_PREV_EP(captured->ep),
5147 0, ec->cfp->sp, 0, 0);
5148 val = (*ifunc->func)(arg, (
VALUE)ifunc->data, argc, argv, blockarg);
5149 rb_vm_pop_frame(ec);
5157 return vm_yield_with_cfunc(ec, captured, captured->self, argc, argv, 0, VM_BLOCK_HANDLER_NONE, NULL);
5163 return rb_sym_proc_call(
SYM2ID(symbol), argc, argv, kw_splat, rb_vm_bh_to_procval(ec, block_handler));
5172 CHECK_VM_STACK_OVERFLOW(cfp, ISEQ_BODY(iseq)->param.lead_num);
5174 for (i=0; i<
len && i<ISEQ_BODY(iseq)->param.lead_num; i++) {
5182vm_callee_setup_block_arg_arg0_check(
VALUE *argv)
5184 VALUE ary, arg0 = argv[0];
5189 VM_ASSERT(argv[0] == arg0);
5197 if (rb_simple_iseq_p(iseq)) {
5201 CALLER_SETUP_ARG(cfp, calling, ci, ISEQ_BODY(iseq)->param.lead_num);
5203 if (arg_setup_type == arg_setup_block &&
5204 calling->argc == 1 &&
5205 ISEQ_BODY(iseq)->param.flags.has_lead &&
5206 !ISEQ_BODY(iseq)->param.flags.ambiguous_param0 &&
5207 !
NIL_P(arg0 = vm_callee_setup_block_arg_arg0_check(argv))) {
5208 calling->argc = vm_callee_setup_block_arg_arg0_splat(cfp, iseq, argv, arg0);
5211 if (calling->argc != ISEQ_BODY(iseq)->param.lead_num) {
5212 if (arg_setup_type == arg_setup_block) {
5213 if (calling->argc < ISEQ_BODY(iseq)->param.lead_num) {
5215 CHECK_VM_STACK_OVERFLOW(cfp, ISEQ_BODY(iseq)->param.lead_num);
5216 for (i=calling->argc; i<ISEQ_BODY(iseq)->param.lead_num; i++) argv[i] =
Qnil;
5217 calling->argc = ISEQ_BODY(iseq)->param.lead_num;
5219 else if (calling->argc > ISEQ_BODY(iseq)->param.lead_num) {
5220 calling->argc = ISEQ_BODY(iseq)->param.lead_num;
5224 argument_arity_error(ec, iseq, NULL, calling->argc, ISEQ_BODY(iseq)->param.lead_num, ISEQ_BODY(iseq)->param.lead_num);
5231 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_type);
5240 calling = &calling_entry;
5241 calling->argc = argc;
5242 calling->block_handler = block_handler;
5243 calling->kw_splat = (flags & VM_CALL_KW_SPLAT) ? 1 : 0;
5245 calling->heap_argv = 0;
5247 struct rb_callinfo dummy_ci = VM_CI_ON_STACK(0, flags, 0, 0);
5249 return vm_callee_setup_block_arg(ec, calling, &dummy_ci, iseq, argv, arg_setup_type);
5257 bool is_lambda,
VALUE block_handler)
5260 const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
5261 const int arg_size = ISEQ_BODY(iseq)->param.size;
5262 VALUE *
const rsp = GET_SP() - calling->argc;
5263 VALUE *
const argv = rsp;
5264 int opt_pc = vm_callee_setup_block_arg(ec, calling, ci, iseq, argv, is_lambda ? arg_setup_method : arg_setup_block);
5265 int frame_flag = VM_FRAME_MAGIC_BLOCK | (is_lambda ? VM_FRAME_FLAG_LAMBDA : 0);
5269 if (calling->proc_ns) {
5270 frame_flag |= VM_FRAME_FLAG_NS_SWITCH;
5273 vm_push_frame(ec, iseq,
5276 VM_GUARDED_PREV_EP(captured->ep), 0,
5277 ISEQ_BODY(iseq)->iseq_encoded + opt_pc,
5279 ISEQ_BODY(iseq)->local_table_size - arg_size, ISEQ_BODY(iseq)->stack_max);
5287 MAYBE_UNUSED(
bool is_lambda),
VALUE block_handler)
5289 VALUE symbol = VM_BH_TO_SYMBOL(block_handler);
5290 int flags = vm_ci_flag(ci);
5292 if (UNLIKELY(!(flags & VM_CALL_ARGS_SIMPLE) &&
5293 ((calling->argc == 0) ||
5294 (calling->argc == 1 && (flags & (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT))) ||
5295 (calling->argc == 2 && (flags & VM_CALL_ARGS_SPLAT) && (flags & VM_CALL_KW_SPLAT)) ||
5296 ((flags & VM_CALL_KWARG) && (vm_ci_kwarg(ci)->keyword_len == calling->argc))))) {
5297 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV);
5299 if (UNLIKELY(calling->heap_argv)) {
5300#if VM_ARGC_STACK_MAX < 0
5302 rb_raise(rb_eArgError,
"no receiver given");
5308 reg_cfp->sp[-1] = reg_cfp->sp[-2];
5309 reg_cfp->sp[-2] = calling->recv;
5310 flags |= VM_CALL_ARGS_SPLAT;
5313 if (calling->argc < 1) {
5314 rb_raise(rb_eArgError,
"no receiver given");
5316 calling->recv = TOPN(--calling->argc);
5318 if (calling->kw_splat) {
5319 flags |= VM_CALL_KW_SPLAT;
5323 if (calling->argc < 1) {
5324 rb_raise(rb_eArgError,
"no receiver given");
5326 calling->recv = TOPN(--calling->argc);
5329 return vm_call_symbol(ec, reg_cfp, calling, ci, symbol, flags);
5335 MAYBE_UNUSED(
bool is_lambda),
VALUE block_handler)
5340 CALLER_SETUP_ARG(ec->cfp, calling, ci, ALLOW_HEAP_ARGV_KEEP_KWSPLAT);
5341 argc = calling->argc;
5342 val = vm_yield_with_cfunc(ec, captured, captured->self, CALLING_ARGC(calling), calling->heap_argv ?
RARRAY_CONST_PTR(calling->heap_argv) : STACK_ADDR_FROM_TOP(argc), calling->kw_splat, calling->block_handler, NULL);
5348vm_proc_to_block_handler(
VALUE procval)
5350 const struct rb_block *block = vm_proc_block(procval);
5352 switch (vm_block_type(block)) {
5353 case block_type_iseq:
5354 return VM_BH_FROM_ISEQ_BLOCK(&block->as.captured);
5355 case block_type_ifunc:
5356 return VM_BH_FROM_IFUNC_BLOCK(&block->as.captured);
5357 case block_type_symbol:
5358 return VM_BH_FROM_SYMBOL(block->as.symbol);
5359 case block_type_proc:
5360 return VM_BH_FROM_PROC(block->as.proc);
5362 VM_UNREACHABLE(vm_yield_with_proc);
5369 bool is_lambda,
VALUE block_handler)
5371 while (vm_block_handler_type(block_handler) == block_handler_type_proc) {
5372 VALUE proc = VM_BH_TO_PROC(block_handler);
5373 if (!calling->proc_ns) {
5374 calling->proc_ns = block_proc_namespace(proc);
5376 is_lambda = block_proc_is_lambda(proc);
5377 block_handler = vm_proc_to_block_handler(proc);
5380 return vm_invoke_block(ec, reg_cfp, calling, ci, is_lambda, block_handler);
5386 bool is_lambda,
VALUE block_handler)
5390 bool is_lambda,
VALUE block_handler);
5392 switch (vm_block_handler_type(block_handler)) {
5393 case block_handler_type_iseq: func = vm_invoke_iseq_block;
break;
5394 case block_handler_type_ifunc: func = vm_invoke_ifunc_block;
break;
5395 case block_handler_type_proc: func = vm_invoke_proc_block;
break;
5396 case block_handler_type_symbol: func = vm_invoke_symbol_block;
break;
5397 default: rb_bug(
"vm_invoke_block: unreachable");
5400 return func(ec, reg_cfp, calling, ci, is_lambda, block_handler);
5404vm_make_proc_with_iseq(
const rb_iseq_t *blockiseq)
5411 rb_bug(
"vm_make_proc_with_iseq: unreachable");
5414 captured = VM_CFP_TO_CAPTURED_BLOCK(cfp);
5415 captured->code.iseq = blockiseq;
5417 return rb_vm_make_proc(ec, captured,
rb_cProc);
5421vm_once_exec(
VALUE iseq)
5428vm_once_clear(
VALUE data)
5431 is->once.running_thread = NULL;
5443 args[0] = obj; args[1] =
Qfalse;
5445 if (!UNDEF_P(r) &&
RTEST(r)) {
5457 enum defined_type
type = (
enum defined_type)op_type;
5464 return rb_gvar_defined(
SYM2ID(obj));
5466 case DEFINED_CVAR: {
5467 const rb_cref_t *cref = vm_get_cref(GET_EP());
5468 klass = vm_get_cvar_base(cref, GET_CFP(), 0);
5473 case DEFINED_CONST_FROM: {
5474 bool allow_nil =
type == DEFINED_CONST;
5476 return vm_get_ev_const(ec, klass,
SYM2ID(obj), allow_nil,
true);
5481 return rb_ec_obj_respond_to(ec, v,
SYM2ID(obj), TRUE);
5483 case DEFINED_METHOD:{
5488 switch (METHOD_ENTRY_VISI(me)) {
5489 case METHOD_VISI_PRIVATE:
5491 case METHOD_VISI_PROTECTED:
5495 case METHOD_VISI_PUBLIC:
5499 rb_bug(
"vm_defined: unreachable: %u", (
unsigned int)METHOD_ENTRY_VISI(me));
5503 return check_respond_to_missing(obj, v);
5508 if (GET_BLOCK_HANDLER() != VM_BLOCK_HANDLER_NONE) {
5512 case DEFINED_ZSUPER:
5517 VALUE klass = vm_search_normal_superclass(me->defined_class);
5518 if (!klass)
return false;
5520 ID id = me->def->original_id;
5527 return RTEST(vm_backref_defined(ec, GET_LEP(),
FIX2INT(obj)));
5529 rb_bug(
"unimplemented defined? type (VM)");
5539 return vm_defined(ec, reg_cfp, op_type, obj, v);
5543vm_get_ep(
const VALUE *
const reg_ep, rb_num_t lv)
5546 const VALUE *ep = reg_ep;
5547 for (i = 0; i < lv; i++) {
5548 ep = GET_PREV_EP(ep);
5554vm_get_special_object(
const VALUE *
const reg_ep,
5555 enum vm_special_object_type
type)
5558 case VM_SPECIAL_OBJECT_VMCORE:
5559 return rb_mRubyVMFrozenCore;
5560 case VM_SPECIAL_OBJECT_CBASE:
5561 return vm_get_cbase(reg_ep);
5562 case VM_SPECIAL_OBJECT_CONST_BASE:
5563 return vm_get_const_base(reg_ep);
5565 rb_bug(
"putspecialobject insn: unknown value_type %d",
type);
5572rb_vm_get_special_object(
const VALUE *reg_ep,
enum vm_special_object_type
type)
5574 return vm_get_special_object(reg_ep,
type);
5580 const VALUE ary2 = ary2st;
5581 VALUE tmp1 = rb_check_to_array(ary1);
5582 VALUE tmp2 = rb_check_to_array(ary2);
5603 const VALUE ary2 = ary2st;
5605 if (
NIL_P(ary2))
return ary1;
5607 VALUE tmp2 = rb_check_to_array(ary2);
5622 return vm_concat_array(ary1, ary2st);
5626rb_vm_concat_to_array(
VALUE ary1,
VALUE ary2st)
5628 return vm_concat_to_array(ary1, ary2st);
5637 VALUE tmp = rb_check_to_array(ary);
5641 else if (
RTEST(flag)) {
5654 return vm_splat_array(flag, ary);
5660 enum vm_check_match_type
type = ((int)flag) & VM_CHECKMATCH_TYPE_MASK;
5662 if (flag & VM_CHECKMATCH_ARRAY) {
5666 for (i = 0; i < n; i++) {
5668 VALUE c = check_match(ec, v, target,
type);
5677 return check_match(ec, pattern, target,
type);
5684 return vm_check_match(ec, target, pattern, flag);
5688vm_check_keyword(lindex_t bits, lindex_t idx,
const VALUE *ep)
5690 const VALUE kw_bits = *(ep - bits);
5693 unsigned int b = (
unsigned int)
FIX2ULONG(kw_bits);
5694 if ((idx < KW_SPECIFIED_BITS_MAX) && (b & (0x01 << idx)))
5707 if (RUBY_DTRACE_METHOD_ENTRY_ENABLED() ||
5708 RUBY_DTRACE_METHOD_RETURN_ENABLED() ||
5709 RUBY_DTRACE_CMETHOD_ENTRY_ENABLED() ||
5710 RUBY_DTRACE_CMETHOD_RETURN_ENABLED()) {
5714 RUBY_DTRACE_METHOD_ENTRY_HOOK(ec, 0, 0);
5717 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, 0, 0);
5720 RUBY_DTRACE_METHOD_RETURN_HOOK(ec, 0, 0);
5723 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, 0, 0);
5730vm_const_get_under(
ID id, rb_num_t flags,
VALUE cbase)
5735 else if (VM_DEFINECLASS_SCOPED_P(flags)) {
5736 return rb_public_const_get_at(cbase,
id);
5744vm_check_if_class(
ID id, rb_num_t flags,
VALUE super,
VALUE klass)
5749 else if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags)) {
5754 "superclass mismatch for class %"PRIsVALUE
"",
5767vm_check_if_module(
ID id,
VALUE mod)
5786vm_declare_class(
ID id, rb_num_t flags,
VALUE cbase,
VALUE super)
5789 VALUE s = VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) ? super : rb_cObject;
5797vm_declare_module(
ID id,
VALUE cbase)
5803NORETURN(
static void unmatched_redefinition(
const char *
type,
VALUE cbase,
ID id,
VALUE old));
5807 VALUE name = rb_id2str(
id);
5808 VALUE message = rb_sprintf(
"%"PRIsVALUE
" is not a %s",
5810 VALUE location = rb_const_source_location_at(cbase,
id);
5811 if (!
NIL_P(location)) {
5812 rb_str_catf(message,
"\n%"PRIsVALUE
":%"PRIsVALUE
":"
5813 " previous definition of %"PRIsVALUE
" was here",
5820vm_define_class(
ID id, rb_num_t flags,
VALUE cbase,
VALUE super)
5824 if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) && !
RB_TYPE_P(super,
T_CLASS)) {
5826 "superclass must be an instance of Class (given an instance of %"PRIsVALUE
")",
5830 vm_check_if_namespace(cbase);
5835 if ((klass = vm_const_get_under(
id, flags, cbase)) != 0) {
5836 if (!vm_check_if_class(
id, flags, super, klass))
5837 unmatched_redefinition(
"class", cbase,
id, klass);
5841 return vm_declare_class(
id, flags, cbase, super);
5846vm_define_module(
ID id, rb_num_t flags,
VALUE cbase)
5850 vm_check_if_namespace(cbase);
5851 if ((mod = vm_const_get_under(
id, flags, cbase)) != 0) {
5852 if (!vm_check_if_module(
id, mod))
5853 unmatched_redefinition(
"module", cbase,
id, mod);
5857 return vm_declare_module(
id, cbase);
5862vm_find_or_create_class_by_id(
ID id,
5867 rb_vm_defineclass_type_t
type = VM_DEFINECLASS_TYPE(flags);
5870 case VM_DEFINECLASS_TYPE_CLASS:
5872 return vm_define_class(
id, flags, cbase, super);
5874 case VM_DEFINECLASS_TYPE_SINGLETON_CLASS:
5878 case VM_DEFINECLASS_TYPE_MODULE:
5880 return vm_define_module(
id, flags, cbase);
5883 rb_bug(
"unknown defineclass type: %d", (
int)
type);
5887static rb_method_visibility_t
5892 if (!vm_env_cref_by_cref(cfp->ep)) {
5893 return METHOD_VISI_PUBLIC;
5896 return CREF_SCOPE_VISI(vm_ec_cref(ec))->method_visi;
5905 if (!vm_env_cref_by_cref(cfp->ep)) {
5909 return CREF_SCOPE_VISI(vm_ec_cref(ec))->module_func;
5917 rb_method_visibility_t visi;
5922 visi = METHOD_VISI_PUBLIC;
5925 klass = CREF_CLASS_FOR_DEFINITION(cref);
5926 visi = vm_scope_visibility_get(ec);
5933 rb_add_method_iseq(klass,
id, (
const rb_iseq_t *)iseqval, cref, visi);
5936 RCLASS_SET_MAX_IV_COUNT(klass, rb_estimate_iv_count(klass, (
const rb_iseq_t *)iseqval));
5939 if (!is_singleton && vm_scope_module_func_check(ec)) {
5941 rb_add_method_iseq(klass,
id, (
const rb_iseq_t *)iseqval, cref, METHOD_VISI_PUBLIC);
5951 VALUE block_handler = VM_CF_BLOCK_HANDLER(GET_CFP());
5953 if (block_handler == VM_BLOCK_HANDLER_NONE) {
5954 rb_vm_localjump_error(
"no block given (yield)",
Qnil, 0);
5957 return vm_invoke_block(ec, GET_CFP(), calling, ci,
false, block_handler);
5961enum method_explorer_type {
5963 mexp_search_invokeblock,
5972 VALUE block_handler,
5973 enum method_explorer_type method_explorer
5978 int argc = vm_ci_argc(ci);
5979 VALUE recv = TOPN(argc);
5981 .block_handler = block_handler,
5982 .kw_splat = IS_ARGS_KW_SPLAT(ci) > 0,
5988 switch (method_explorer) {
5989 case mexp_search_method:
5990 calling.cc = cc = vm_search_method_fastpath((
VALUE)reg_cfp->iseq, cd,
CLASS_OF(recv));
5991 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
5993 case mexp_search_super:
5994 calling.cc = cc = vm_search_super_method(reg_cfp, cd, recv);
5995 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
5997 case mexp_search_invokeblock:
5998 val = vm_invokeblock_i(ec, GET_CFP(), &calling);
6015 if (vm_ci_flag(cd->ci) & VM_CALL_FORWARDING) {
6016 bh = vm_caller_setup_fwd_args(GET_EC(), GET_CFP(), cd, blockiseq,
false, &adjusted_cd, &adjusted_ci);
6018 val = vm_sendish(ec, GET_CFP(), &adjusted_cd.cd, bh, mexp_search_method);
6020 if (cd->cc != adjusted_cd.cd.cc && vm_cc_markable(adjusted_cd.cd.cc)) {
6025 bh = vm_caller_setup_arg_block(ec, GET_CFP(), cd->ci, blockiseq,
false);
6026 val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_method);
6037 VALUE bh = VM_BLOCK_HANDLER_NONE;
6038 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_method);
6053 if (vm_ci_flag(cd->ci) & VM_CALL_FORWARDING) {
6054 bh = vm_caller_setup_fwd_args(GET_EC(), GET_CFP(), cd, blockiseq,
true, &adjusted_cd, &adjusted_ci);
6056 val = vm_sendish(ec, GET_CFP(), &adjusted_cd.cd, bh, mexp_search_super);
6058 if (cd->cc != adjusted_cd.cd.cc && vm_cc_markable(adjusted_cd.cd.cc)) {
6063 bh = vm_caller_setup_arg_block(ec, GET_CFP(), cd->ci, blockiseq,
true);
6064 val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_super);
6075 VALUE bh = VM_BLOCK_HANDLER_NONE;
6076 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_invokeblock);
6104 if (check_method_basic_definition(vm_cc_cme(cc))) {
6113 if (check_cfunc(vm_cc_cme(cc), rb_mod_to_s)) {
6119 val = rb_mod_to_s(recv);
6125 if (check_cfunc(vm_cc_cme(cc), rb_nil_to_s)) {
6126 return rb_nil_to_s(recv);
6130 if (check_cfunc(vm_cc_cme(cc), rb_true_to_s)) {
6131 return rb_true_to_s(recv);
6135 if (check_cfunc(vm_cc_cme(cc), rb_false_to_s)) {
6136 return rb_false_to_s(recv);
6140 if (check_cfunc(vm_cc_cme(cc), rb_int_to_s)) {
6141 return rb_fix_to_s(recv);
6149vm_opt_ary_freeze(
VALUE ary,
int bop,
ID id)
6151 if (BASIC_OP_UNREDEFINED_P(bop, ARRAY_REDEFINED_OP_FLAG)) {
6160vm_opt_hash_freeze(
VALUE hash,
int bop,
ID id)
6162 if (BASIC_OP_UNREDEFINED_P(bop, HASH_REDEFINED_OP_FLAG)) {
6171vm_opt_str_freeze(
VALUE str,
int bop,
ID id)
6173 if (BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
6187 if (BASIC_OP_UNREDEFINED_P(BOP_INCLUDE_P, ARRAY_REDEFINED_OP_FLAG)) {
6191 VALUE args[1] = {target};
6194 RUBY_DTRACE_CREATE_HOOK(ARRAY,
RARRAY_LEN(ary));
6197 return rb_vm_call_with_refinements(ec, dupary, idIncludeP, 1, args,
RB_NO_KEYWORDS);
6204 return vm_opt_duparray_include_p(ec, ary, target);
6210 if (BASIC_OP_UNREDEFINED_P(BOP_MAX, ARRAY_REDEFINED_OP_FLAG)) {
6215 VALUE result = *ptr;
6216 rb_snum_t i = num - 1;
6218 const VALUE v = *++ptr;
6219 if (OPTIMIZED_CMP(v, result) > 0) {
6234 return vm_opt_newarray_max(ec, num, ptr);
6240 if (BASIC_OP_UNREDEFINED_P(BOP_MIN, ARRAY_REDEFINED_OP_FLAG)) {
6245 VALUE result = *ptr;
6246 rb_snum_t i = num - 1;
6248 const VALUE v = *++ptr;
6249 if (OPTIMIZED_CMP(v, result) < 0) {
6264 return vm_opt_newarray_min(ec, num, ptr);
6271 if (BASIC_OP_UNREDEFINED_P(BOP_HASH, ARRAY_REDEFINED_OP_FLAG)) {
6272 return rb_ary_hash_values(num, ptr);
6282 return vm_opt_newarray_hash(ec, num, ptr);
6291 if (BASIC_OP_UNREDEFINED_P(BOP_INCLUDE_P, ARRAY_REDEFINED_OP_FLAG)) {
6293 VALUE ary = rb_setup_fake_ary(&fake_ary,
ptr, num);
6297 VALUE args[1] = {target};
6305 return vm_opt_newarray_include_p(ec, num,
ptr, target);
6311 if (BASIC_OP_UNREDEFINED_P(BOP_PACK, ARRAY_REDEFINED_OP_FLAG)) {
6313 VALUE ary = rb_setup_fake_ary(&fake_ary,
ptr, num);
6314 return rb_ec_pack_ary(ec,
ary, fmt, (UNDEF_P(buffer) ?
Qnil : buffer));
6324 if (!UNDEF_P(buffer)) {
6325 args[1] = rb_hash_new_with_size(1);
6326 rb_hash_aset(args[1],
ID2SYM(idBuffer), buffer);
6331 return rb_vm_call_with_refinements(ec,
rb_ary_new4(num,
ptr), idPack, argc, args, kw_splat);
6338 return vm_opt_newarray_pack_buffer(ec, num,
ptr, fmt, buffer);
6344 return vm_opt_newarray_pack_buffer(ec, num,
ptr, fmt,
Qundef);
6350vm_track_constant_cache(
ID id,
void *ic)
6353 struct rb_id_table *const_cache = vm->constant_cache;
6354 VALUE lookup_result;
6357 if (rb_id_table_lookup(const_cache,
id, &lookup_result)) {
6361 ics = set_init_numtable();
6362 rb_id_table_insert(const_cache,
id, (
VALUE)ics);
6377 vm->inserting_constant_cache_id = id;
6379 set_insert(ics, (st_data_t)ic);
6381 vm->inserting_constant_cache_id = (
ID)0;
6388 for (
int i = 0; segments[i]; i++) {
6389 ID id = segments[i];
6390 if (
id == idNULL)
continue;
6391 vm_track_constant_cache(
id, ic);
6400 if ((flags & IMEMO_CONST_CACHE_SHAREABLE) || rb_ractor_main_p()) {
6401 VM_ASSERT(ractor_incidental_shareable_p(flags & IMEMO_CONST_CACHE_SHAREABLE, value));
6403 return (ic_cref == NULL ||
6404 ic_cref == vm_get_cref(reg_ep));
6412 VM_ASSERT(IMEMO_TYPE_P(ice, imemo_constcache));
6413 return vm_inlined_ic_hit_p(ice->flags, ice->value, ice->ic_cref, reg_ep);
6418rb_vm_ic_hit_p(
IC ic,
const VALUE *reg_ep)
6420 return ic->entry && vm_ic_hit_p(ic->entry, reg_ep);
6426 if (ruby_vm_const_missing_count > 0) {
6427 ruby_vm_const_missing_count = 0;
6434 ice->ic_cref = vm_get_const_key_cref(reg_ep);
6439 unsigned pos = (unsigned)(pc - ISEQ_BODY(iseq)->iseq_encoded);
6440 rb_yjit_constant_ic_update(iseq, ic, pos);
6449 if (ice && vm_ic_hit_p(ice, GET_EP())) {
6452 VM_ASSERT(val == vm_get_ev_const_chain(ec, segments));
6455 ruby_vm_constant_cache_misses++;
6456 val = vm_get_ev_const_chain(ec, segments);
6457 vm_ic_track_const_chain(GET_CFP(), ic, segments);
6460 vm_ic_update(GET_ISEQ(), ic, val, GET_EP(), GET_PC() - 2);
6472 if (is->once.running_thread == RUNNING_THREAD_ONCE_DONE) {
6473 return is->once.value;
6475 else if (is->once.running_thread == NULL) {
6477 is->once.running_thread = th;
6481 is->once.running_thread = RUNNING_THREAD_ONCE_DONE;
6484 else if (is->once.running_thread == th) {
6486 return vm_once_exec((
VALUE)iseq);
6490 RUBY_VM_CHECK_INTS(ec);
6497vm_case_dispatch(CDHASH hash, OFFSET else_offset,
VALUE key)
6499 switch (OBJ_BUILTIN_TYPE(key)) {
6505 if (BASIC_OP_UNREDEFINED_P(BOP_EQQ,
6506 SYMBOL_REDEFINED_OP_FLAG |
6507 INTEGER_REDEFINED_OP_FLAG |
6508 FLOAT_REDEFINED_OP_FLAG |
6509 NIL_REDEFINED_OP_FLAG |
6510 TRUE_REDEFINED_OP_FLAG |
6511 FALSE_REDEFINED_OP_FLAG |
6512 STRING_REDEFINED_OP_FLAG)) {
6516 if (!isinf(kval) && modf(kval, &kval) == 0.0) {
6520 if (rb_hash_stlike_lookup(hash, key, &val)) {
6540 const ptrdiff_t nsp = VM_SP_CNT(ec, cfp->sp);
6541 const ptrdiff_t nbp = VM_SP_CNT(ec, bp);
6542 static const char stack_consistency_error[] =
6543 "Stack consistency error (sp: %"PRIdPTRDIFF
", bp: %"PRIdPTRDIFF
")";
6544#if defined RUBY_DEVEL
6545 VALUE mesg = rb_sprintf(stack_consistency_error, nsp, nbp);
6550 rb_bug(stack_consistency_error, nsp, nbp);
6557 if (FIXNUM_2_P(recv, obj) &&
6558 BASIC_OP_UNREDEFINED_P(BOP_PLUS, INTEGER_REDEFINED_OP_FLAG)) {
6559 return rb_fix_plus_fix(recv, obj);
6561 else if (FLONUM_2_P(recv, obj) &&
6562 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
6570 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
6575 BASIC_OP_UNREDEFINED_P(BOP_PLUS, STRING_REDEFINED_OP_FLAG)) {
6576 return rb_str_opt_plus(recv, obj);
6580 BASIC_OP_UNREDEFINED_P(BOP_PLUS, ARRAY_REDEFINED_OP_FLAG)) {
6591 if (FIXNUM_2_P(recv, obj) &&
6592 BASIC_OP_UNREDEFINED_P(BOP_MINUS, INTEGER_REDEFINED_OP_FLAG)) {
6593 return rb_fix_minus_fix(recv, obj);
6595 else if (FLONUM_2_P(recv, obj) &&
6596 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
6604 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
6615 if (FIXNUM_2_P(recv, obj) &&
6616 BASIC_OP_UNREDEFINED_P(BOP_MULT, INTEGER_REDEFINED_OP_FLAG)) {
6617 return rb_fix_mul_fix(recv, obj);
6619 else if (FLONUM_2_P(recv, obj) &&
6620 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
6628 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
6639 if (FIXNUM_2_P(recv, obj) &&
6640 BASIC_OP_UNREDEFINED_P(BOP_DIV, INTEGER_REDEFINED_OP_FLAG)) {
6641 return (
FIX2LONG(obj) == 0) ?
Qundef : rb_fix_div_fix(recv, obj);
6643 else if (FLONUM_2_P(recv, obj) &&
6644 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
6645 return rb_flo_div_flo(recv, obj);
6652 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
6653 return rb_flo_div_flo(recv, obj);
6663 if (FIXNUM_2_P(recv, obj) &&
6664 BASIC_OP_UNREDEFINED_P(BOP_MOD, INTEGER_REDEFINED_OP_FLAG)) {
6665 return (
FIX2LONG(obj) == 0) ?
Qundef : rb_fix_mod_fix(recv, obj);
6667 else if (FLONUM_2_P(recv, obj) &&
6668 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
6676 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
6687 if (vm_method_cfunc_is(iseq, cd, recv, rb_obj_not_equal)) {
6688 VALUE val = opt_equality(iseq, recv, obj, cd_eq);
6690 if (!UNDEF_P(val)) {
6691 return RBOOL(!
RTEST(val));
6701 if (FIXNUM_2_P(recv, obj) &&
6702 BASIC_OP_UNREDEFINED_P(BOP_LT, INTEGER_REDEFINED_OP_FLAG)) {
6705 else if (FLONUM_2_P(recv, obj) &&
6706 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
6714 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
6726 if (FIXNUM_2_P(recv, obj) &&
6727 BASIC_OP_UNREDEFINED_P(BOP_LE, INTEGER_REDEFINED_OP_FLAG)) {
6730 else if (FLONUM_2_P(recv, obj) &&
6731 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
6739 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
6751 if (FIXNUM_2_P(recv, obj) &&
6752 BASIC_OP_UNREDEFINED_P(BOP_GT, INTEGER_REDEFINED_OP_FLAG)) {
6755 else if (FLONUM_2_P(recv, obj) &&
6756 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
6764 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
6776 if (FIXNUM_2_P(recv, obj) &&
6777 BASIC_OP_UNREDEFINED_P(BOP_GE, INTEGER_REDEFINED_OP_FLAG)) {
6780 else if (FLONUM_2_P(recv, obj) &&
6781 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
6789 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
6806 BASIC_OP_UNREDEFINED_P(BOP_LTLT, STRING_REDEFINED_OP_FLAG)) {
6815 BASIC_OP_UNREDEFINED_P(BOP_LTLT, ARRAY_REDEFINED_OP_FLAG)) {
6833 BASIC_OP_UNREDEFINED_P(BOP_AND, INTEGER_REDEFINED_OP_FLAG)) {
6844 if (FIXNUM_2_P(recv, obj) &&
6845 BASIC_OP_UNREDEFINED_P(BOP_OR, INTEGER_REDEFINED_OP_FLAG)) {
6857 if (FIXNUM_2_P(recv, obj) &&
6858 BASIC_OP_UNREDEFINED_P(BOP_AREF, INTEGER_REDEFINED_OP_FLAG)) {
6859 return rb_fix_aref(recv, obj);
6864 BASIC_OP_UNREDEFINED_P(BOP_AREF, ARRAY_REDEFINED_OP_FLAG)) {
6866 return rb_ary_entry_internal(recv,
FIX2LONG(obj));
6869 return rb_ary_aref1(recv, obj);
6873 BASIC_OP_UNREDEFINED_P(BOP_AREF, HASH_REDEFINED_OP_FLAG)) {
6874 return rb_hash_aref(recv, obj);
6888 BASIC_OP_UNREDEFINED_P(BOP_ASET, ARRAY_REDEFINED_OP_FLAG) &&
6894 BASIC_OP_UNREDEFINED_P(BOP_ASET, HASH_REDEFINED_OP_FLAG)) {
6895 rb_hash_aset(recv, obj, set);
6907 BASIC_OP_UNREDEFINED_P(BOP_AREF, HASH_REDEFINED_OP_FLAG) &&
6908 rb_hash_compare_by_id_p(recv) ==
Qfalse &&
6909 !
FL_TEST(recv, RHASH_PROC_DEFAULT)) {
6910 return rb_hash_aref(recv, key);
6920 return vm_opt_aref_with(recv, key);
6927 BASIC_OP_UNREDEFINED_P(BOP_ASET, HASH_REDEFINED_OP_FLAG) &&
6928 rb_hash_compare_by_id_p(recv) ==
Qfalse) {
6929 return rb_hash_aset(recv, key, val);
6939 return vm_opt_aset_with(recv, key, value);
6943vm_opt_length(
VALUE recv,
int bop)
6949 BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
6950 if (bop == BOP_EMPTY_P) {
6951 return LONG2NUM(RSTRING_LEN(recv));
6958 BASIC_OP_UNREDEFINED_P(bop, ARRAY_REDEFINED_OP_FLAG)) {
6962 BASIC_OP_UNREDEFINED_P(bop, HASH_REDEFINED_OP_FLAG)) {
6971vm_opt_empty_p(
VALUE recv)
6973 switch (vm_opt_length(recv, BOP_EMPTY_P)) {
6986 BASIC_OP_UNREDEFINED_P(BOP_NIL_P, NIL_REDEFINED_OP_FLAG)) {
6989 else if (vm_method_cfunc_is(iseq, cd, recv, rb_false)) {
7005 case RSHIFT(~0UL, 1):
7008 return rb_uint2big(1UL << (SIZEOF_LONG * CHAR_BIT - 2));
7026vm_opt_succ(
VALUE recv)
7029 BASIC_OP_UNREDEFINED_P(BOP_SUCC, INTEGER_REDEFINED_OP_FLAG)) {
7030 return fix_succ(recv);
7036 BASIC_OP_UNREDEFINED_P(BOP_SUCC, STRING_REDEFINED_OP_FLAG)) {
7047 if (vm_method_cfunc_is(iseq, cd, recv, rb_obj_not)) {
7048 return RBOOL(!
RTEST(recv));
7063 BASIC_OP_UNREDEFINED_P(BOP_MATCH, STRING_REDEFINED_OP_FLAG)) {
7067 BASIC_OP_UNREDEFINED_P(BOP_MATCH, REGEXP_REDEFINED_OP_FLAG)) {
7085 VALUE self = GET_SELF();
7087 VM_ASSERT(rb_popcount64((uint64_t)event) == 1);
7089 if (event & global_hooks->events) {
7092 vm_dtrace(event, ec);
7093 rb_exec_event_hook_orig(ec, global_hooks, event, self, 0, 0, 0 , val, 0);
7099 if (local_hooks != NULL) {
7100 if (event & local_hooks->events) {
7103 rb_exec_event_hook_orig(ec, local_hooks, event, self, 0, 0, 0 , val, 0);
7109#define VM_TRACE_HOOK(target_event, val) do { \
7110 if ((pc_events & (target_event)) & enabled_flags) { \
7111 vm_trace_hook(ec, reg_cfp, pc, pc_events, (target_event), global_hooks, local_hooks_ptr, (val)); \
7118 VM_ASSERT(VM_FRAME_RUBYFRAME_P(cfp));
7119 VM_ASSERT(ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_RESCUE);
7120 return cfp->ep[VM_ENV_INDEX_LAST_LVAR];
7126 const VALUE *pc = reg_cfp->pc;
7127 rb_event_flag_t enabled_flags = ruby_vm_event_flags & ISEQ_TRACE_EVENTS;
7130 if (enabled_flags == 0 && ruby_vm_event_local_num == 0) {
7136 size_t pos = pc - ISEQ_BODY(iseq)->iseq_encoded;
7139 rb_hook_list_t *
const *local_hooks_ptr = &iseq->aux.exec.local_hooks;
7140 rb_event_flag_t iseq_local_events = local_hooks != NULL ? local_hooks->events : 0;
7144 const bool bmethod_frame = VM_FRAME_BMETHOD_P(reg_cfp);
7145 enabled_flags |= iseq_local_events;
7147 VM_ASSERT((iseq_local_events & ~ISEQ_TRACE_EVENTS) == 0);
7149 if (bmethod_frame) {
7151 VM_ASSERT(me->def->type == VM_METHOD_TYPE_BMETHOD);
7152 bmethod_local_hooks = me->def->body.bmethod.hooks;
7153 bmethod_local_hooks_ptr = &me->def->body.bmethod.hooks;
7154 if (bmethod_local_hooks) {
7155 bmethod_local_events = bmethod_local_hooks->events;
7160 if ((pc_events & enabled_flags) == 0 && !bmethod_frame) {
7164 rb_iseq_trace_set(iseq, vm_event_flags & ISEQ_TRACE_EVENTS);
7172 else if (ec->trace_arg != NULL) {
7180 rb_event_flag_t bmethod_events = global_events | bmethod_local_events;
7183 ruby_debug_printf(
"vm_trace>>%4d (%4x) - %s:%d %s\n",
7186 RSTRING_PTR(rb_iseq_path(iseq)),
7187 (
int)rb_iseq_line_no(iseq, pos),
7188 RSTRING_PTR(rb_iseq_label(iseq)));
7190 VM_ASSERT(reg_cfp->pc == pc);
7191 VM_ASSERT(pc_events != 0);
7201 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_LINE,
Qundef);
7202 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_BRANCH,
Qundef);
7220#if VM_CHECK_MODE > 0
7221NORETURN( NOINLINE( COLDFUNC
7222void rb_vm_canary_is_found_dead(
enum ruby_vminsn_type i,
VALUE c)));
7225Init_vm_stack_canary(
void)
7228 int n = ruby_fill_random_bytes(&vm_stack_canary,
sizeof vm_stack_canary,
false);
7229 vm_stack_canary |= 0x01;
7231 vm_stack_canary_was_born =
true;
7236rb_vm_canary_is_found_dead(
enum ruby_vminsn_type i,
VALUE c)
7240 const char *insn = rb_insns_name(i);
7244 rb_bug(
"dead canary found at %s: %s", insn, str);
7248void Init_vm_stack_canary(
void) { }
7280 return (*(rb_invoke_funcptr0_t)funcptr)(ec, self);
7287 return (*(rb_invoke_funcptr1_t)funcptr)(ec, self, argv[0]);
7294 return (*(rb_invoke_funcptr2_t)funcptr)(ec, self, argv[0], argv[1]);
7301 return (*(rb_invoke_funcptr3_t)funcptr)(ec, self, argv[0], argv[1], argv[2]);
7308 return (*(rb_invoke_funcptr4_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3]);
7315 return (*(rb_invoke_funcptr5_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4]);
7322 return (*(rb_invoke_funcptr6_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
7329 return (*(rb_invoke_funcptr7_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
7336 return (*(rb_invoke_funcptr8_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
7342 typedef VALUE (*rb_invoke_funcptr9_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9);
7343 return (*(rb_invoke_funcptr9_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
7349 typedef VALUE (*rb_invoke_funcptr10_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10);
7350 return (*(rb_invoke_funcptr10_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
7356 typedef VALUE (*rb_invoke_funcptr11_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11);
7357 return (*(rb_invoke_funcptr11_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
7363 typedef VALUE (*rb_invoke_funcptr12_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12);
7364 return (*(rb_invoke_funcptr12_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
7370 typedef VALUE (*rb_invoke_funcptr13_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13);
7371 return (*(rb_invoke_funcptr13_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
7377 typedef VALUE (*rb_invoke_funcptr14_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13,
VALUE v14);
7378 return (*(rb_invoke_funcptr14_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
7384 typedef VALUE (*rb_invoke_funcptr15_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13,
VALUE v14,
VALUE v15);
7385 return (*(rb_invoke_funcptr15_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
7390static builtin_invoker
7391lookup_builtin_invoker(
int argc)
7393 static const builtin_invoker invokers[] = {
7412 return invokers[argc];
7418 const bool canary_p = ISEQ_BODY(reg_cfp->iseq)->builtin_attrs & BUILTIN_ATTR_LEAF;
7419 SETUP_CANARY(canary_p);
7420 rb_insn_func_t func_ptr = (rb_insn_func_t)(uintptr_t)bf->func_ptr;
7421 VALUE ret = (*lookup_builtin_invoker(bf->argc))(ec, reg_cfp->self, argv, func_ptr);
7422 CHECK_CANARY(canary_p, BIN(invokebuiltin));
7429 return invoke_bf(ec, cfp, bf, argv);
7436 fputs(
"vm_invoke_builtin_delegate: passing -> ", stderr);
7437 for (
int i=0; i<bf->argc; i++) {
7438 ruby_debug_printf(
":%s ", rb_id2name(ISEQ_BODY(cfp->iseq)->local_table[i+start_index]));
7440 ruby_debug_printf(
"\n" "%s %s(%d):%p\n", RUBY_FUNCTION_NAME_STRING, bf->name, bf->argc,
7441 (
void *)(uintptr_t)bf->func_ptr);
7444 if (bf->argc == 0) {
7445 return invoke_bf(ec, cfp, bf, NULL);
7448 const VALUE *argv = cfp->ep - ISEQ_BODY(cfp->iseq)->local_table_size - VM_ENV_DATA_SIZE + 1 + start_index;
7449 return invoke_bf(ec, cfp, bf, argv);
7459 return cfp->ep[index];
#define RUBY_ASSERT(...)
Asserts that the given expression is truthy if and only if RUBY_DEBUG is truthy.
#define RUBY_EVENT_END
Encountered an end of a class clause.
#define RUBY_EVENT_C_CALL
A method, written in C, is called.
#define RUBY_EVENT_B_RETURN
Encountered a next statement.
#define RUBY_EVENT_CLASS
Encountered a new class.
#define RUBY_EVENT_LINE
Encountered a new line.
#define RUBY_EVENT_RETURN
Encountered a return statement.
#define RUBY_EVENT_C_RETURN
Return from a method, written in C.
#define RUBY_EVENT_B_CALL
Encountered an yield statement.
uint32_t rb_event_flag_t
Represents event(s).
#define RUBY_EVENT_CALL
A method, written in Ruby, is called.
#define RUBY_EVENT_RESCUE
Encountered a rescue statement.
VALUE rb_singleton_class(VALUE obj)
Finds or creates the singleton class of the passed object.
VALUE rb_module_new(void)
Creates a new, anonymous module.
VALUE rb_class_inherited(VALUE super, VALUE klass)
Calls Class::inherited.
VALUE rb_define_class_id(ID id, VALUE super)
This is a very badly designed API that creates an anonymous class.
#define TYPE(_)
Old name of rb_type.
#define REALLOC_N
Old name of RB_REALLOC_N.
#define ALLOC
Old name of RB_ALLOC.
#define RFLOAT_VALUE
Old name of rb_float_value.
#define T_STRING
Old name of RUBY_T_STRING.
#define Qundef
Old name of RUBY_Qundef.
#define INT2FIX
Old name of RB_INT2FIX.
#define T_NIL
Old name of RUBY_T_NIL.
#define T_FLOAT
Old name of RUBY_T_FLOAT.
#define T_IMEMO
Old name of RUBY_T_IMEMO.
#define ID2SYM
Old name of RB_ID2SYM.
#define T_BIGNUM
Old name of RUBY_T_BIGNUM.
#define SPECIAL_CONST_P
Old name of RB_SPECIAL_CONST_P.
#define T_STRUCT
Old name of RUBY_T_STRUCT.
#define T_FIXNUM
Old name of RUBY_T_FIXNUM.
#define SYM2ID
Old name of RB_SYM2ID.
#define CLASS_OF
Old name of rb_class_of.
#define rb_ary_new4
Old name of rb_ary_new_from_values.
#define FIXABLE
Old name of RB_FIXABLE.
#define LONG2FIX
Old name of RB_INT2FIX.
#define FIX2INT
Old name of RB_FIX2INT.
#define T_MODULE
Old name of RUBY_T_MODULE.
#define STATIC_SYM_P
Old name of RB_STATIC_SYM_P.
#define ASSUME
Old name of RBIMPL_ASSUME.
#define FIX2ULONG
Old name of RB_FIX2ULONG.
#define T_TRUE
Old name of RUBY_T_TRUE.
#define T_ICLASS
Old name of RUBY_T_ICLASS.
#define T_HASH
Old name of RUBY_T_HASH.
#define ALLOC_N
Old name of RB_ALLOC_N.
#define FL_TEST_RAW
Old name of RB_FL_TEST_RAW.
#define rb_ary_new3
Old name of rb_ary_new_from_args.
#define LONG2NUM
Old name of RB_LONG2NUM.
#define rb_exc_new3
Old name of rb_exc_new_str.
#define T_FALSE
Old name of RUBY_T_FALSE.
#define Qtrue
Old name of RUBY_Qtrue.
#define Qnil
Old name of RUBY_Qnil.
#define Qfalse
Old name of RUBY_Qfalse.
#define FIX2LONG
Old name of RB_FIX2LONG.
#define T_ARRAY
Old name of RUBY_T_ARRAY.
#define T_OBJECT
Old name of RUBY_T_OBJECT.
#define NIL_P
Old name of RB_NIL_P.
#define T_SYMBOL
Old name of RUBY_T_SYMBOL.
#define DBL2NUM
Old name of rb_float_new.
#define T_CLASS
Old name of RUBY_T_CLASS.
#define BUILTIN_TYPE
Old name of RB_BUILTIN_TYPE.
#define FL_TEST
Old name of RB_FL_TEST.
#define FIXNUM_P
Old name of RB_FIXNUM_P.
#define FL_USHIFT
Old name of RUBY_FL_USHIFT.
#define FL_SET_RAW
Old name of RB_FL_SET_RAW.
#define SYMBOL_P
Old name of RB_SYMBOL_P.
void rb_notimplement(void)
void rb_exc_raise(VALUE mesg)
Raises an exception in the current thread.
#define ruby_verbose
This variable controls whether the interpreter is in debug mode.
VALUE rb_eTypeError
TypeError exception.
VALUE rb_eFatal
fatal exception.
VALUE rb_eNoMethodError
NoMethodError exception.
void rb_exc_fatal(VALUE mesg)
Raises a fatal error in the current thread.
VALUE rb_eRuntimeError
RuntimeError exception.
void rb_warn(const char *fmt,...)
Identical to rb_warning(), except it reports unless $VERBOSE is nil.
void rb_error_frozen_object(VALUE frozen_obj)
Identical to rb_error_frozen(), except it takes arbitrary Ruby object instead of C's string.
VALUE rb_exc_new_str(VALUE etype, VALUE str)
Identical to rb_exc_new_cstr(), except it takes a Ruby's string instead of C's.
@ RB_WARN_CATEGORY_STRICT_UNUSED_BLOCK
Warning is for checking unused block strictly.
VALUE rb_cClass
Class class.
VALUE rb_cArray
Array class.
VALUE rb_obj_alloc(VALUE klass)
Allocates an instance of the given class.
VALUE rb_cRegexp
Regexp class.
VALUE rb_obj_frozen_p(VALUE obj)
Just calls RB_OBJ_FROZEN() inside.
VALUE rb_cHash
Hash class.
VALUE rb_obj_class(VALUE obj)
Queries the class of an object.
VALUE rb_inspect(VALUE obj)
Generates a human-readable textual representation of the given object.
VALUE rb_cBasicObject
BasicObject class.
VALUE rb_cModule
Module class.
VALUE rb_class_real(VALUE klass)
Finds a "real" class.
VALUE rb_obj_is_kind_of(VALUE obj, VALUE klass)
Queries if the given object is an instance (of possibly descendants) of the given class.
VALUE rb_cFloat
Float class.
VALUE rb_cProc
Proc class.
VALUE rb_cString
String class.
#define RB_OBJ_WRITTEN(old, oldv, young)
Identical to RB_OBJ_WRITE(), except it doesn't write any values, but only a WB declaration.
#define RB_OBJ_WRITE(old, slot, young)
Declaration of a "back" pointer.
VALUE rb_ary_concat(VALUE lhs, VALUE rhs)
Destructively appends the contents of latter into the end of former.
VALUE rb_ary_shift(VALUE ary)
Destructively deletes an element from the beginning of the passed array and returns what was deleted.
VALUE rb_ary_resurrect(VALUE ary)
I guess there is no use case of this function in extension libraries, but this is a routine identical...
VALUE rb_ary_dup(VALUE ary)
Duplicates an array.
VALUE rb_ary_includes(VALUE ary, VALUE elem)
Queries if the passed array has the passed entry.
VALUE rb_ary_plus(VALUE lhs, VALUE rhs)
Creates a new array, concatenating the former to the latter.
VALUE rb_ary_cat(VALUE ary, const VALUE *train, long len)
Destructively appends multiple elements at the end of the array.
VALUE rb_check_array_type(VALUE obj)
Try converting an object to its array representation using its to_ary method, if any.
VALUE rb_ary_new(void)
Allocates a new, empty array.
VALUE rb_ary_pop(VALUE ary)
Destructively deletes an element from the end of the passed array and returns what was deleted.
VALUE rb_ary_hidden_new(long capa)
Allocates a hidden (no class) empty array.
VALUE rb_ary_push(VALUE ary, VALUE elem)
Special case of rb_ary_cat() that it adds only one element.
VALUE rb_ary_entry(VALUE ary, long off)
Queries an element of an array.
void rb_ary_store(VALUE ary, long key, VALUE val)
Destructively stores the passed value to the passed array's passed index.
#define UNLIMITED_ARGUMENTS
This macro is used in conjunction with rb_check_arity().
static int rb_check_arity(int argc, int min, int max)
Ensures that the passed integer is in the passed range.
VALUE rb_proc_call_with_block(VALUE recv, int argc, const VALUE *argv, VALUE proc)
Identical to rb_proc_call(), except you can additionally pass another proc object,...
VALUE rb_reg_last_match(VALUE md)
This just returns the argument, stringified.
VALUE rb_reg_match(VALUE re, VALUE str)
This is the match operator.
VALUE rb_reg_nth_match(int n, VALUE md)
Queries the nth captured substring.
VALUE rb_reg_match_post(VALUE md)
The portion of the original string after the given match.
VALUE rb_reg_nth_defined(int n, VALUE md)
Identical to rb_reg_nth_match(), except it just returns Boolean.
VALUE rb_reg_match_pre(VALUE md)
The portion of the original string before the given match.
VALUE rb_reg_match_last(VALUE md)
The portion of the original string that captured at the very last.
VALUE rb_str_append(VALUE dst, VALUE src)
Identical to rb_str_buf_append(), except it converts the right hand side before concatenating.
VALUE rb_str_succ(VALUE orig)
Searches for the "successor" of a string.
VALUE rb_str_buf_append(VALUE dst, VALUE src)
Identical to rb_str_cat_cstr(), except it takes Ruby's string instead of C's.
VALUE rb_str_concat(VALUE dst, VALUE src)
Identical to rb_str_append(), except it also accepts an integer as a codepoint.
#define rb_str_cat_cstr(buf, str)
Identical to rb_str_cat(), except it assumes the passed pointer is a pointer to a C string.
VALUE rb_str_length(VALUE)
Identical to rb_str_strlen(), except it returns the value in rb_cInteger.
VALUE rb_str_intern(VALUE str)
Identical to rb_to_symbol(), except it assumes the receiver being an instance of RString.
void rb_thread_schedule(void)
Tries to switch to another thread.
VALUE rb_const_get(VALUE space, ID name)
Identical to rb_const_defined(), except it returns the actual defined value.
VALUE rb_ivar_set(VALUE obj, ID name, VALUE val)
Identical to rb_iv_set(), except it accepts the name as an ID instead of a C string.
void rb_cvar_set(VALUE klass, ID name, VALUE val)
Assigns a value to a class variable.
VALUE rb_cvar_find(VALUE klass, ID name, VALUE *front)
Identical to rb_cvar_get(), except it takes additional "front" pointer.
VALUE rb_ivar_get(VALUE obj, ID name)
Identical to rb_iv_get(), except it accepts the name as an ID instead of a C string.
void rb_const_set(VALUE space, ID name, VALUE val)
Names a constant.
VALUE rb_autoload_load(VALUE space, ID name)
Kicks the autoload procedure as if it was "touched".
VALUE rb_mod_name(VALUE mod)
Queries the name of a module.
VALUE rb_const_get_at(VALUE space, ID name)
Identical to rb_const_defined_at(), except it returns the actual defined value.
void rb_set_class_path_string(VALUE klass, VALUE space, VALUE name)
Identical to rb_set_class_path(), except it accepts the name as Ruby's string instead of C's.
VALUE rb_ivar_defined(VALUE obj, ID name)
Queries if the instance variable is defined at the object.
int rb_const_defined_at(VALUE space, ID name)
Identical to rb_const_defined(), except it doesn't look for parent classes.
VALUE rb_cvar_defined(VALUE klass, ID name)
Queries if the given class has the given class variable.
VALUE rb_class_path(VALUE mod)
Identical to rb_mod_name(), except it returns #<Class: ...> style inspection for anonymous modules.
int rb_const_defined(VALUE space, ID name)
Queries if the constant is defined at the namespace.
VALUE rb_check_funcall(VALUE recv, ID mid, int argc, const VALUE *argv)
Identical to rb_funcallv(), except it returns RUBY_Qundef instead of raising rb_eNoMethodError.
rb_alloc_func_t rb_get_alloc_func(VALUE klass)
Queries the allocator function of a class.
void rb_define_alloc_func(VALUE klass, rb_alloc_func_t func)
Sets the allocator function of a class.
int rb_method_boundp(VALUE klass, ID id, int ex)
Queries if the klass has this method.
ID rb_check_id(volatile VALUE *namep)
Detects if the given name is already interned or not.
VALUE rb_sym2str(VALUE symbol)
Obtain a frozen string representation of a symbol (not including the leading colon).
int off
Offset inside of ptr.
int len
Length of the buffer.
static bool rb_ractor_shareable_p(VALUE obj)
Queries if multiple Ractors can share the passed object or not.
#define RB_OBJ_SHAREABLE_P(obj)
Queries if the passed object has previously classified as shareable or not.
#define MEMCPY(p1, p2, type, n)
Handy macro to call memcpy.
#define ALLOCA_N(type, n)
#define RB_GC_GUARD(v)
Prevents premature destruction of local objects.
#define MEMMOVE(p1, p2, type, n)
Handy macro to call memmove.
VALUE type(ANYARGS)
ANYARGS-ed function type.
VALUE rb_ensure(type *q, VALUE w, type *e, VALUE r)
An equivalent of ensure clause.
#define RARRAY_LEN
Just another name of rb_array_len.
static int RARRAY_LENINT(VALUE ary)
Identical to rb_array_len(), except it differs for the return type.
static VALUE * RARRAY_PTR(VALUE ary)
Wild use of a C pointer.
#define RARRAY_AREF(a, i)
#define RARRAY_CONST_PTR
Just another name of rb_array_const_ptr.
static VALUE RBASIC_CLASS(VALUE obj)
Queries the class of an object.
#define RBASIC(obj)
Convenient casting macro.
#define RCLASS_SUPER
Just another name of rb_class_get_superclass.
#define RHASH_SIZE(h)
Queries the size of the hash.
#define RHASH_EMPTY_P(h)
Checks if the hash is empty.
static VALUE * ROBJECT_FIELDS(VALUE obj)
Queries the instance variables.
#define StringValueCStr(v)
Identical to StringValuePtr, except it additionally checks for the contents for viability as a C stri...
#define RB_PASS_KEYWORDS
Pass keywords, final argument should be a hash of keywords.
#define RB_NO_KEYWORDS
Do not pass keywords.
static bool RB_SPECIAL_CONST_P(VALUE obj)
Checks if the given object is of enum ruby_special_consts.
#define RTEST
This is an old name of RB_TEST.
#define ANYARGS
Functions declared using this macro take arbitrary arguments, including void.
const VALUE ary[1]
Embedded elements.
const VALUE * ptr
Pointer to the C array that holds the elements of the array.
const ID * segments
A null-terminated list of ids, used to represent a constant's path idNULL is used to represent the ::...
rb_cref_t * cref
class reference, should be marked
const rb_iseq_t * iseqptr
iseq pointer, should be separated from iseqval
Internal header for Namespace.
IFUNC (Internal FUNCtion)
const VALUE cref_or_me
class reference or rb_method_entry_t
intptr_t SIGNED_VALUE
A signed integer type that has the same width with VALUE.
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.
#define SIZEOF_VALUE
Identical to sizeof(VALUE), except it is a macro that can also be used inside of preprocessor directi...
uintptr_t VALUE
Type that represents a Ruby object.
static enum ruby_value_type RB_BUILTIN_TYPE(VALUE obj)
Queries the type of the object.
static bool RB_FLOAT_TYPE_P(VALUE obj)
Queries if the object is an instance of rb_cFloat.
static bool RB_TYPE_P(VALUE obj, enum ruby_value_type t)
Queries if the given object is of given type.